| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 447 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 458 __ push(descriptor->register_params_[i]); | 458 __ push(descriptor->register_params_[i]); |
| 459 } | 459 } |
| 460 ExternalReference miss = descriptor->miss_handler(); | 460 ExternalReference miss = descriptor->miss_handler(); |
| 461 __ CallExternalReference(miss, descriptor->register_param_count_); | 461 __ CallExternalReference(miss, descriptor->register_param_count_); |
| 462 } | 462 } |
| 463 | 463 |
| 464 __ ret(0); | 464 __ ret(0); |
| 465 } | 465 } |
| 466 | 466 |
| 467 | 467 |
| 468 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { | |
| 469 // Stack layout on entry: | |
| 470 // | |
| 471 // [esp + (1 * kPointerSize)]: function | |
| 472 // [esp + (2 * kPointerSize)]: serialized scope info | |
| 473 | |
| 474 // Try to allocate the context in new space. | |
| 475 Label gc; | |
| 476 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | |
| 477 __ Allocate(FixedArray::SizeFor(length), eax, ebx, ecx, &gc, TAG_OBJECT); | |
| 478 | |
| 479 // Get the function or sentinel from the stack. | |
| 480 __ mov(ecx, Operand(esp, 1 * kPointerSize)); | |
| 481 | |
| 482 // Get the serialized scope info from the stack. | |
| 483 __ mov(ebx, Operand(esp, 2 * kPointerSize)); | |
| 484 | |
| 485 // Set up the object header. | |
| 486 Factory* factory = masm->isolate()->factory(); | |
| 487 __ mov(FieldOperand(eax, HeapObject::kMapOffset), | |
| 488 factory->block_context_map()); | |
| 489 __ mov(FieldOperand(eax, Context::kLengthOffset), | |
| 490 Immediate(Smi::FromInt(length))); | |
| 491 | |
| 492 // If this block context is nested in the native context we get a smi | |
| 493 // sentinel instead of a function. The block context should get the | |
| 494 // canonical empty function of the native context as its closure which | |
| 495 // we still have to look up. | |
| 496 Label after_sentinel; | |
| 497 __ JumpIfNotSmi(ecx, &after_sentinel, Label::kNear); | |
| 498 if (FLAG_debug_code) { | |
| 499 __ cmp(ecx, 0); | |
| 500 __ Assert(equal, kExpected0AsASmiSentinel); | |
| 501 } | |
| 502 __ mov(ecx, GlobalObjectOperand()); | |
| 503 __ mov(ecx, FieldOperand(ecx, GlobalObject::kNativeContextOffset)); | |
| 504 __ mov(ecx, ContextOperand(ecx, Context::CLOSURE_INDEX)); | |
| 505 __ bind(&after_sentinel); | |
| 506 | |
| 507 // Set up the fixed slots. | |
| 508 __ mov(ContextOperand(eax, Context::CLOSURE_INDEX), ecx); | |
| 509 __ mov(ContextOperand(eax, Context::PREVIOUS_INDEX), esi); | |
| 510 __ mov(ContextOperand(eax, Context::EXTENSION_INDEX), ebx); | |
| 511 | |
| 512 // Copy the global object from the previous context. | |
| 513 __ mov(ebx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX)); | |
| 514 __ mov(ContextOperand(eax, Context::GLOBAL_OBJECT_INDEX), ebx); | |
| 515 | |
| 516 // Initialize the rest of the slots to the hole value. | |
| 517 if (slots_ == 1) { | |
| 518 __ mov(ContextOperand(eax, Context::MIN_CONTEXT_SLOTS), | |
| 519 factory->the_hole_value()); | |
| 520 } else { | |
| 521 __ mov(ebx, factory->the_hole_value()); | |
| 522 for (int i = 0; i < slots_; i++) { | |
| 523 __ mov(ContextOperand(eax, i + Context::MIN_CONTEXT_SLOTS), ebx); | |
| 524 } | |
| 525 } | |
| 526 | |
| 527 // Return and remove the on-stack parameters. | |
| 528 __ mov(esi, eax); | |
| 529 __ ret(2 * kPointerSize); | |
| 530 | |
| 531 // Need to collect. Call into runtime system. | |
| 532 __ bind(&gc); | |
| 533 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); | |
| 534 } | |
| 535 | |
| 536 | |
| 537 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 468 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
| 538 // We don't allow a GC during a store buffer overflow so there is no need to | 469 // We don't allow a GC during a store buffer overflow so there is no need to |
| 539 // store the registers in any particular way, but we do have to store and | 470 // store the registers in any particular way, but we do have to store and |
| 540 // restore them. | 471 // restore them. |
| 541 __ pushad(); | 472 __ pushad(); |
| 542 if (save_doubles_ == kSaveFPRegs) { | 473 if (save_doubles_ == kSaveFPRegs) { |
| 543 CpuFeatureScope scope(masm, SSE2); | 474 CpuFeatureScope scope(masm, SSE2); |
| 544 __ sub(esp, Immediate(kDoubleSize * XMMRegister::kNumRegisters)); | 475 __ sub(esp, Immediate(kDoubleSize * XMMRegister::kNumRegisters)); |
| 545 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { | 476 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { |
| 546 XMMRegister reg = XMMRegister::from_code(i); | 477 XMMRegister reg = XMMRegister::from_code(i); |
| (...skipping 4949 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5496 restore_context ? | 5427 restore_context ? |
| 5497 &context_restore_operand : NULL); | 5428 &context_restore_operand : NULL); |
| 5498 } | 5429 } |
| 5499 | 5430 |
| 5500 | 5431 |
| 5501 #undef __ | 5432 #undef __ |
| 5502 | 5433 |
| 5503 } } // namespace v8::internal | 5434 } } // namespace v8::internal |
| 5504 | 5435 |
| 5505 #endif // V8_TARGET_ARCH_IA32 | 5436 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |