| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 13 matching lines...) Expand all Loading... |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #include "v8.h" | 28 #include "v8.h" |
| 29 | 29 |
| 30 #if defined(V8_TARGET_ARCH_IA32) | 30 #if defined(V8_TARGET_ARCH_IA32) |
| 31 | 31 |
| 32 #include "ia32/lithium-codegen-ia32.h" | 32 #include "ia32/lithium-codegen-ia32.h" |
| 33 #include "code-stubs.h" | 33 #include "code-stubs.h" |
| 34 #include "deoptimizer.h" |
| 34 #include "stub-cache.h" | 35 #include "stub-cache.h" |
| 35 | 36 |
| 36 namespace v8 { | 37 namespace v8 { |
| 37 namespace internal { | 38 namespace internal { |
| 38 | 39 |
| 39 | 40 |
| 40 // When invoking builtins, we need to record the safepoint in the middle of | 41 // When invoking builtins, we need to record the safepoint in the middle of |
| 41 // the invoke instruction sequence generated by the macro assembler. | 42 // the invoke instruction sequence generated by the macro assembler. |
| 42 class SafepointGenerator : public PostCallGenerator { | 43 class SafepointGenerator : public PostCallGenerator { |
| 43 public: | 44 public: |
| 44 SafepointGenerator(LCodeGen* codegen, | 45 SafepointGenerator(LCodeGen* codegen, |
| 45 LPointerMap* pointers, | 46 LPointerMap* pointers, |
| 46 int deoptimization_index) | 47 int deoptimization_index) |
| 47 : codegen_(codegen), | 48 : codegen_(codegen), |
| 48 pointers_(pointers), | 49 pointers_(pointers), |
| 49 deoptimization_index_(deoptimization_index) {} | 50 deoptimization_index_(deoptimization_index) {} |
| 50 virtual ~SafepointGenerator() { } | 51 virtual ~SafepointGenerator() { } |
| 51 | 52 |
| 52 virtual void Generate() { | 53 virtual void Generate() { |
| 53 // Ensure that we have enough space in the reloc info to patch | |
| 54 // this with calls when doing deoptimization. | |
| 55 codegen_->EnsureRelocSpaceForDeoptimization(); | |
| 56 codegen_->RecordSafepoint(pointers_, deoptimization_index_); | 54 codegen_->RecordSafepoint(pointers_, deoptimization_index_); |
| 57 } | 55 } |
| 58 | 56 |
| 59 private: | 57 private: |
| 60 LCodeGen* codegen_; | 58 LCodeGen* codegen_; |
| 61 LPointerMap* pointers_; | 59 LPointerMap* pointers_; |
| 62 int deoptimization_index_; | 60 int deoptimization_index_; |
| 63 }; | 61 }; |
| 64 | 62 |
| 65 | 63 |
| 66 #define __ masm()-> | 64 #define __ masm()-> |
| 67 | 65 |
| 68 bool LCodeGen::GenerateCode() { | 66 bool LCodeGen::GenerateCode() { |
| 69 HPhase phase("Code generation", chunk()); | 67 HPhase phase("Code generation", chunk()); |
| 70 ASSERT(is_unused()); | 68 ASSERT(is_unused()); |
| 71 status_ = GENERATING; | 69 status_ = GENERATING; |
| 72 CpuFeatures::Scope scope(SSE2); | 70 CpuFeatures::Scope scope(SSE2); |
| 73 return GeneratePrologue() && | 71 return GeneratePrologue() && |
| 74 GenerateBody() && | 72 GenerateBody() && |
| 75 GenerateDeferredCode() && | 73 GenerateDeferredCode() && |
| 76 GenerateRelocPadding() && | |
| 77 GenerateSafepointTable(); | 74 GenerateSafepointTable(); |
| 78 } | 75 } |
| 79 | 76 |
| 80 | 77 |
| 81 void LCodeGen::FinishCode(Handle<Code> code) { | 78 void LCodeGen::FinishCode(Handle<Code> code) { |
| 82 ASSERT(is_done()); | 79 ASSERT(is_done()); |
| 83 code->set_stack_slots(StackSlotCount()); | 80 code->set_stack_slots(StackSlotCount()); |
| 84 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 81 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
| 85 PopulateDeoptimizationData(code); | 82 PopulateDeoptimizationData(code); |
| 83 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code); |
| 86 } | 84 } |
| 87 | 85 |
| 88 | 86 |
| 89 void LCodeGen::Abort(const char* format, ...) { | 87 void LCodeGen::Abort(const char* format, ...) { |
| 90 if (FLAG_trace_bailout) { | 88 if (FLAG_trace_bailout) { |
| 91 SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString()); | 89 SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString()); |
| 92 PrintF("Aborting LCodeGen in @\"%s\": ", *name); | 90 PrintF("Aborting LCodeGen in @\"%s\": ", *name); |
| 93 va_list arguments; | 91 va_list arguments; |
| 94 va_start(arguments, format); | 92 va_start(arguments, format); |
| 95 OS::VPrint(format, arguments); | 93 OS::VPrint(format, arguments); |
| (...skipping 277 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 373 environment->spilled_double_registers()[value->index()], | 371 environment->spilled_double_registers()[value->index()], |
| 374 false); | 372 false); |
| 375 } | 373 } |
| 376 } | 374 } |
| 377 | 375 |
| 378 AddToTranslation(translation, value, environment->HasTaggedValueAt(i)); | 376 AddToTranslation(translation, value, environment->HasTaggedValueAt(i)); |
| 379 } | 377 } |
| 380 } | 378 } |
| 381 | 379 |
| 382 | 380 |
| 383 void LCodeGen::EnsureRelocSpaceForDeoptimization() { | |
| 384 // Since we patch the reloc info with RUNTIME_ENTRY calls every | |
| 385 // patch site will take up 2 bytes + any pc-jumps. We are | |
| 386 // conservative and always reserve 6 bytes in case a simple pc-jump | |
| 387 // is not enough. | |
| 388 uint32_t pc_delta = | |
| 389 masm()->pc_offset() - deoptimization_reloc_size.last_pc_offset; | |
| 390 if (is_uintn(pc_delta, 6)) { | |
| 391 deoptimization_reloc_size.min_size += 2; | |
| 392 } else { | |
| 393 deoptimization_reloc_size.min_size += 6; | |
| 394 } | |
| 395 deoptimization_reloc_size.last_pc_offset = masm()->pc_offset(); | |
| 396 } | |
| 397 | |
| 398 | |
| 399 void LCodeGen::AddToTranslation(Translation* translation, | 381 void LCodeGen::AddToTranslation(Translation* translation, |
| 400 LOperand* op, | 382 LOperand* op, |
| 401 bool is_tagged) { | 383 bool is_tagged) { |
| 402 if (op == NULL) { | 384 if (op == NULL) { |
| 403 // TODO(twuerthinger): Introduce marker operands to indicate that this value | 385 // TODO(twuerthinger): Introduce marker operands to indicate that this value |
| 404 // is not present and must be reconstructed from the deoptimizer. Currently | 386 // is not present and must be reconstructed from the deoptimizer. Currently |
| 405 // this is only used for the arguments object. | 387 // this is only used for the arguments object. |
| 406 translation->StoreArgumentsObject(); | 388 translation->StoreArgumentsObject(); |
| 407 } else if (op->IsStackSlot()) { | 389 } else if (op->IsStackSlot()) { |
| 408 if (is_tagged) { | 390 if (is_tagged) { |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 442 bool adjusted) { | 424 bool adjusted) { |
| 443 ASSERT(instr != NULL); | 425 ASSERT(instr != NULL); |
| 444 LPointerMap* pointers = instr->pointer_map(); | 426 LPointerMap* pointers = instr->pointer_map(); |
| 445 RecordPosition(pointers->position()); | 427 RecordPosition(pointers->position()); |
| 446 | 428 |
| 447 if (!adjusted) { | 429 if (!adjusted) { |
| 448 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 430 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 449 } | 431 } |
| 450 __ call(code, mode); | 432 __ call(code, mode); |
| 451 | 433 |
| 452 EnsureRelocSpaceForDeoptimization(); | |
| 453 RegisterLazyDeoptimization(instr); | 434 RegisterLazyDeoptimization(instr); |
| 454 | 435 |
| 455 // Signal that we don't inline smi code before these stubs in the | 436 // Signal that we don't inline smi code before these stubs in the |
| 456 // optimizing code generator. | 437 // optimizing code generator. |
| 457 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || | 438 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || |
| 458 code->kind() == Code::COMPARE_IC) { | 439 code->kind() == Code::COMPARE_IC) { |
| 459 __ nop(); | 440 __ nop(); |
| 460 } | 441 } |
| 461 } | 442 } |
| 462 | 443 |
| 463 | 444 |
| 464 void LCodeGen::CallRuntime(const Runtime::Function* fun, | 445 void LCodeGen::CallRuntime(const Runtime::Function* fun, |
| 465 int argc, | 446 int argc, |
| 466 LInstruction* instr, | 447 LInstruction* instr, |
| 467 bool adjusted) { | 448 bool adjusted) { |
| 468 ASSERT(instr != NULL); | 449 ASSERT(instr != NULL); |
| 469 ASSERT(instr->HasPointerMap()); | 450 ASSERT(instr->HasPointerMap()); |
| 470 LPointerMap* pointers = instr->pointer_map(); | 451 LPointerMap* pointers = instr->pointer_map(); |
| 471 RecordPosition(pointers->position()); | 452 RecordPosition(pointers->position()); |
| 472 | 453 |
| 473 if (!adjusted) { | 454 if (!adjusted) { |
| 474 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 455 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 475 } | 456 } |
| 476 __ CallRuntime(fun, argc); | 457 __ CallRuntime(fun, argc); |
| 477 EnsureRelocSpaceForDeoptimization(); | 458 |
| 478 RegisterLazyDeoptimization(instr); | 459 RegisterLazyDeoptimization(instr); |
| 479 } | 460 } |
| 480 | 461 |
| 481 | 462 |
| 482 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { | 463 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { |
| 483 // Create the environment to bailout to. If the call has side effects | 464 // Create the environment to bailout to. If the call has side effects |
| 484 // execution has to continue after the call otherwise execution can continue | 465 // execution has to continue after the call otherwise execution can continue |
| 485 // from a previous bailout point repeating the call. | 466 // from a previous bailout point repeating the call. |
| 486 LEnvironment* deoptimization_environment; | 467 LEnvironment* deoptimization_environment; |
| 487 if (instr->HasDeoptimizationEnvironment()) { | 468 if (instr->HasDeoptimizationEnvironment()) { |
| (...skipping 2044 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2532 | 2513 |
| 2533 LPointerMap* pointers = instr->pointer_map(); | 2514 LPointerMap* pointers = instr->pointer_map(); |
| 2534 RecordPosition(pointers->position()); | 2515 RecordPosition(pointers->position()); |
| 2535 | 2516 |
| 2536 // Invoke function. | 2517 // Invoke function. |
| 2537 if (*function == *info()->closure()) { | 2518 if (*function == *info()->closure()) { |
| 2538 __ CallSelf(); | 2519 __ CallSelf(); |
| 2539 } else { | 2520 } else { |
| 2540 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); | 2521 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); |
| 2541 } | 2522 } |
| 2542 EnsureRelocSpaceForDeoptimization(); | |
| 2543 | 2523 |
| 2544 // Setup deoptimization. | 2524 // Setup deoptimization. |
| 2545 RegisterLazyDeoptimization(instr); | 2525 RegisterLazyDeoptimization(instr); |
| 2546 } | 2526 } |
| 2547 | 2527 |
| 2548 | 2528 |
| 2549 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 2529 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { |
| 2550 ASSERT(ToRegister(instr->result()).is(eax)); | 2530 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2551 __ mov(edi, instr->function()); | 2531 __ mov(edi, instr->function()); |
| 2552 CallKnownFunction(instr->function(), instr->arity(), instr); | 2532 CallKnownFunction(instr->function(), instr->arity(), instr); |
| (...skipping 1580 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4133 ASSERT(osr_pc_offset_ == -1); | 4113 ASSERT(osr_pc_offset_ == -1); |
| 4134 osr_pc_offset_ = masm()->pc_offset(); | 4114 osr_pc_offset_ = masm()->pc_offset(); |
| 4135 } | 4115 } |
| 4136 | 4116 |
| 4137 | 4117 |
| 4138 #undef __ | 4118 #undef __ |
| 4139 | 4119 |
| 4140 } } // namespace v8::internal | 4120 } } // namespace v8::internal |
| 4141 | 4121 |
| 4142 #endif // V8_TARGET_ARCH_IA32 | 4122 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |