OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 22 matching lines...) Expand all Loading... | |
33 #include "stub-cache.h" | 33 #include "stub-cache.h" |
34 | 34 |
35 namespace v8 { | 35 namespace v8 { |
36 namespace internal { | 36 namespace internal { |
37 | 37 |
38 | 38 |
39 class SafepointGenerator : public CallWrapper { | 39 class SafepointGenerator : public CallWrapper { |
40 public: | 40 public: |
41 SafepointGenerator(LCodeGen* codegen, | 41 SafepointGenerator(LCodeGen* codegen, |
42 LPointerMap* pointers, | 42 LPointerMap* pointers, |
43 int deoptimization_index) | 43 Safepoint::DeoptMode mode) |
44 : codegen_(codegen), | 44 : codegen_(codegen), |
45 pointers_(pointers), | 45 pointers_(pointers), |
46 deoptimization_index_(deoptimization_index) { } | 46 deopt_mode_(mode) { } |
47 virtual ~SafepointGenerator() { } | 47 virtual ~SafepointGenerator() { } |
48 | 48 |
49 virtual void BeforeCall(int call_size) const { | 49 virtual void BeforeCall(int call_size) const { } |
50 ASSERT(call_size >= 0); | |
51 // Ensure that we have enough space after the previous safepoint position | |
52 // for the generated code there. | |
53 int call_end = codegen_->masm()->pc_offset() + call_size; | |
54 int prev_jump_end = | |
55 codegen_->LastSafepointEnd() + Deoptimizer::patch_size(); | |
56 if (call_end < prev_jump_end) { | |
57 int padding_size = prev_jump_end - call_end; | |
58 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); | |
59 while (padding_size > 0) { | |
60 codegen_->masm()->nop(); | |
61 padding_size -= Assembler::kInstrSize; | |
62 } | |
63 } | |
64 } | |
65 | 50 |
66 virtual void AfterCall() const { | 51 virtual void AfterCall() const { |
67 codegen_->RecordSafepoint(pointers_, deoptimization_index_); | 52 codegen_->RecordSafepoint(pointers_, deopt_mode_); |
68 } | 53 } |
69 | 54 |
70 private: | 55 private: |
71 LCodeGen* codegen_; | 56 LCodeGen* codegen_; |
72 LPointerMap* pointers_; | 57 LPointerMap* pointers_; |
73 int deoptimization_index_; | 58 Safepoint::DeoptMode deopt_mode_; |
74 }; | 59 }; |
75 | 60 |
76 | 61 |
77 #define __ masm()-> | 62 #define __ masm()-> |
78 | 63 |
79 bool LCodeGen::GenerateCode() { | 64 bool LCodeGen::GenerateCode() { |
80 HPhase phase("Code generation", chunk()); | 65 HPhase phase("Code generation", chunk()); |
81 ASSERT(is_unused()); | 66 ASSERT(is_unused()); |
82 status_ = GENERATING; | 67 status_ = GENERATING; |
83 CpuFeatures::Scope scope1(VFP3); | 68 CpuFeatures::Scope scope1(VFP3); |
(...skipping 12 matching lines...) Expand all Loading... | |
96 GenerateDeoptJumpTable() && | 81 GenerateDeoptJumpTable() && |
97 GenerateSafepointTable(); | 82 GenerateSafepointTable(); |
98 } | 83 } |
99 | 84 |
100 | 85 |
101 void LCodeGen::FinishCode(Handle<Code> code) { | 86 void LCodeGen::FinishCode(Handle<Code> code) { |
102 ASSERT(is_done()); | 87 ASSERT(is_done()); |
103 code->set_stack_slots(GetStackSlotCount()); | 88 code->set_stack_slots(GetStackSlotCount()); |
104 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 89 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
105 PopulateDeoptimizationData(code); | 90 PopulateDeoptimizationData(code); |
106 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code); | |
107 } | 91 } |
108 | 92 |
109 | 93 |
110 void LCodeGen::Abort(const char* format, ...) { | 94 void LCodeGen::Abort(const char* format, ...) { |
111 if (FLAG_trace_bailout) { | 95 if (FLAG_trace_bailout) { |
112 SmartArrayPointer<char> name( | 96 SmartArrayPointer<char> name( |
113 info()->shared_info()->DebugName()->ToCString()); | 97 info()->shared_info()->DebugName()->ToCString()); |
114 PrintF("Aborting LCodeGen in @\"%s\": ", *name); | 98 PrintF("Aborting LCodeGen in @\"%s\": ", *name); |
115 va_list arguments; | 99 va_list arguments; |
116 va_start(arguments, format); | 100 va_start(arguments, format); |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
193 if (heap_slots > 0) { | 177 if (heap_slots > 0) { |
194 Comment(";;; Allocate local context"); | 178 Comment(";;; Allocate local context"); |
195 // Argument to NewContext is the function, which is in r1. | 179 // Argument to NewContext is the function, which is in r1. |
196 __ push(r1); | 180 __ push(r1); |
197 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 181 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
198 FastNewContextStub stub(heap_slots); | 182 FastNewContextStub stub(heap_slots); |
199 __ CallStub(&stub); | 183 __ CallStub(&stub); |
200 } else { | 184 } else { |
201 __ CallRuntime(Runtime::kNewFunctionContext, 1); | 185 __ CallRuntime(Runtime::kNewFunctionContext, 1); |
202 } | 186 } |
203 RecordSafepoint(Safepoint::kNoDeoptimizationIndex); | 187 RecordSafepoint(Safepoint::kNoLazyDeopt); |
204 // Context is returned in both r0 and cp. It replaces the context | 188 // Context is returned in both r0 and cp. It replaces the context |
205 // passed to us. It's saved in the stack and kept live in cp. | 189 // passed to us. It's saved in the stack and kept live in cp. |
206 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 190 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
207 // Copy any necessary parameters into the context. | 191 // Copy any necessary parameters into the context. |
208 int num_parameters = scope()->num_parameters(); | 192 int num_parameters = scope()->num_parameters(); |
209 for (int i = 0; i < num_parameters; i++) { | 193 for (int i = 0; i < num_parameters; i++) { |
210 Variable* var = scope()->parameter(i); | 194 Variable* var = scope()->parameter(i); |
211 if (var->IsContextSlot()) { | 195 if (var->IsContextSlot()) { |
212 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 196 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
213 (num_parameters - 1 - i) * kPointerSize; | 197 (num_parameters - 1 - i) * kPointerSize; |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
246 | 230 |
247 if (emit_instructions) { | 231 if (emit_instructions) { |
248 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); | 232 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); |
249 instr->CompileToNative(this); | 233 instr->CompileToNative(this); |
250 } | 234 } |
251 } | 235 } |
252 return !is_aborted(); | 236 return !is_aborted(); |
253 } | 237 } |
254 | 238 |
255 | 239 |
256 LInstruction* LCodeGen::GetNextInstruction() { | |
257 if (current_instruction_ < instructions_->length() - 1) { | |
258 return instructions_->at(current_instruction_ + 1); | |
259 } else { | |
260 return NULL; | |
261 } | |
262 } | |
263 | |
264 | |
265 bool LCodeGen::GenerateDeferredCode() { | 240 bool LCodeGen::GenerateDeferredCode() { |
266 ASSERT(is_generating()); | 241 ASSERT(is_generating()); |
267 if (deferred_.length() > 0) { | 242 if (deferred_.length() > 0) { |
268 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 243 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { |
269 LDeferredCode* code = deferred_[i]; | 244 LDeferredCode* code = deferred_[i]; |
270 __ bind(code->entry()); | 245 __ bind(code->entry()); |
271 Comment(";;; Deferred code @%d: %s.", | 246 Comment(";;; Deferred code @%d: %s.", |
272 code->instruction_index(), | 247 code->instruction_index(), |
273 code->instr()->Mnemonic()); | 248 code->instr()->Mnemonic()); |
274 code->Generate(); | 249 code->Generate(); |
275 __ jmp(code->exit()); | 250 __ jmp(code->exit()); |
276 } | 251 } |
277 | |
278 // Pad code to ensure that the last piece of deferred code have | |
279 // room for lazy bailout. | |
280 while ((masm()->pc_offset() - LastSafepointEnd()) | |
281 < Deoptimizer::patch_size()) { | |
282 __ nop(); | |
Vyacheslav Egorov (Chromium)
2011/11/15 12:03:56
Maybe add an assertion to check that we always hav
fschneider
2011/11/15 13:35:24
Done.
| |
283 } | |
284 } | 252 } |
285 | 253 |
286 // Force constant pool emission at the end of the deferred code to make | 254 // Force constant pool emission at the end of the deferred code to make |
287 // sure that no constant pools are emitted after. | 255 // sure that no constant pools are emitted after. |
288 masm()->CheckConstPool(true, false); | 256 masm()->CheckConstPool(true, false); |
289 | 257 |
290 return !is_aborted(); | 258 return !is_aborted(); |
291 } | 259 } |
292 | 260 |
293 | 261 |
(...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
559 | 527 |
560 | 528 |
561 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 529 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
562 RelocInfo::Mode mode, | 530 RelocInfo::Mode mode, |
563 LInstruction* instr, | 531 LInstruction* instr, |
564 SafepointMode safepoint_mode) { | 532 SafepointMode safepoint_mode) { |
565 ASSERT(instr != NULL); | 533 ASSERT(instr != NULL); |
566 LPointerMap* pointers = instr->pointer_map(); | 534 LPointerMap* pointers = instr->pointer_map(); |
567 RecordPosition(pointers->position()); | 535 RecordPosition(pointers->position()); |
568 __ Call(code, mode); | 536 __ Call(code, mode); |
569 RegisterLazyDeoptimization(instr, safepoint_mode); | 537 RecordSafepointWithLazyDeopt(instr, safepoint_mode); |
570 | 538 |
571 // Signal that we don't inline smi code before these stubs in the | 539 // Signal that we don't inline smi code before these stubs in the |
572 // optimizing code generator. | 540 // optimizing code generator. |
573 if (code->kind() == Code::BINARY_OP_IC || | 541 if (code->kind() == Code::BINARY_OP_IC || |
574 code->kind() == Code::COMPARE_IC) { | 542 code->kind() == Code::COMPARE_IC) { |
575 __ nop(); | 543 __ nop(); |
576 } | 544 } |
577 } | 545 } |
578 | 546 |
579 | 547 |
580 void LCodeGen::CallRuntime(const Runtime::Function* function, | 548 void LCodeGen::CallRuntime(const Runtime::Function* function, |
581 int num_arguments, | 549 int num_arguments, |
582 LInstruction* instr) { | 550 LInstruction* instr) { |
583 ASSERT(instr != NULL); | 551 ASSERT(instr != NULL); |
584 LPointerMap* pointers = instr->pointer_map(); | 552 LPointerMap* pointers = instr->pointer_map(); |
585 ASSERT(pointers != NULL); | 553 ASSERT(pointers != NULL); |
586 RecordPosition(pointers->position()); | 554 RecordPosition(pointers->position()); |
587 | 555 |
588 __ CallRuntime(function, num_arguments); | 556 __ CallRuntime(function, num_arguments); |
589 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); | 557 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
590 } | 558 } |
591 | 559 |
592 | 560 |
593 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, | 561 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, |
594 int argc, | 562 int argc, |
595 LInstruction* instr) { | 563 LInstruction* instr) { |
596 __ CallRuntimeSaveDoubles(id); | 564 __ CallRuntimeSaveDoubles(id); |
597 RecordSafepointWithRegisters( | 565 RecordSafepointWithRegisters( |
598 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex); | 566 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); |
599 } | 567 } |
600 | 568 |
601 | 569 |
602 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr, | 570 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, |
603 SafepointMode safepoint_mode) { | 571 Safepoint::DeoptMode mode) { |
604 // Create the environment to bailout to. If the call has side effects | |
605 // execution has to continue after the call otherwise execution can continue | |
606 // from a previous bailout point repeating the call. | |
607 LEnvironment* deoptimization_environment; | |
608 if (instr->HasDeoptimizationEnvironment()) { | |
609 deoptimization_environment = instr->deoptimization_environment(); | |
610 } else { | |
611 deoptimization_environment = instr->environment(); | |
612 } | |
613 | |
614 RegisterEnvironmentForDeoptimization(deoptimization_environment); | |
615 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { | |
616 RecordSafepoint(instr->pointer_map(), | |
617 deoptimization_environment->deoptimization_index()); | |
618 } else { | |
619 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | |
620 RecordSafepointWithRegisters( | |
621 instr->pointer_map(), | |
622 0, | |
623 deoptimization_environment->deoptimization_index()); | |
624 } | |
625 } | |
626 | |
627 | |
628 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { | |
629 if (!environment->HasBeenRegistered()) { | 572 if (!environment->HasBeenRegistered()) { |
630 // Physical stack frame layout: | 573 // Physical stack frame layout: |
631 // -x ............. -4 0 ..................................... y | 574 // -x ............. -4 0 ..................................... y |
632 // [incoming arguments] [spill slots] [pushed outgoing arguments] | 575 // [incoming arguments] [spill slots] [pushed outgoing arguments] |
633 | 576 |
634 // Layout of the environment: | 577 // Layout of the environment: |
635 // 0 ..................................................... size-1 | 578 // 0 ..................................................... size-1 |
636 // [parameters] [locals] [expression stack including arguments] | 579 // [parameters] [locals] [expression stack including arguments] |
637 | 580 |
638 // Layout of the translation: | 581 // Layout of the translation: |
639 // 0 ........................................................ size - 1 + 4 | 582 // 0 ........................................................ size - 1 + 4 |
640 // [expression stack including arguments] [locals] [4 words] [parameters] | 583 // [expression stack including arguments] [locals] [4 words] [parameters] |
641 // |>------------ translation_size ------------<| | 584 // |>------------ translation_size ------------<| |
642 | 585 |
643 int frame_count = 0; | 586 int frame_count = 0; |
644 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { | 587 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { |
645 ++frame_count; | 588 ++frame_count; |
646 } | 589 } |
647 Translation translation(&translations_, frame_count); | 590 Translation translation(&translations_, frame_count); |
648 WriteTranslation(environment, &translation); | 591 WriteTranslation(environment, &translation); |
649 int deoptimization_index = deoptimizations_.length(); | 592 int deoptimization_index = deoptimizations_.length(); |
650 environment->Register(deoptimization_index, translation.index()); | 593 int pc_offset = masm()->pc_offset(); |
594 environment->Register(deoptimization_index, | |
595 translation.index(), | |
596 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); | |
651 deoptimizations_.Add(environment); | 597 deoptimizations_.Add(environment); |
652 } | 598 } |
653 } | 599 } |
654 | 600 |
655 | 601 |
656 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { | 602 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { |
657 RegisterEnvironmentForDeoptimization(environment); | 603 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
658 ASSERT(environment->HasBeenRegistered()); | 604 ASSERT(environment->HasBeenRegistered()); |
659 int id = environment->deoptimization_index(); | 605 int id = environment->deoptimization_index(); |
660 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); | 606 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); |
661 ASSERT(entry != NULL); | 607 ASSERT(entry != NULL); |
662 if (entry == NULL) { | 608 if (entry == NULL) { |
663 Abort("bailout was not prepared"); | 609 Abort("bailout was not prepared"); |
664 return; | 610 return; |
665 } | 611 } |
666 | 612 |
667 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM. | 613 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM. |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
709 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); | 655 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); |
710 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_)); | 656 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_)); |
711 | 657 |
712 // Populate the deoptimization entries. | 658 // Populate the deoptimization entries. |
713 for (int i = 0; i < length; i++) { | 659 for (int i = 0; i < length; i++) { |
714 LEnvironment* env = deoptimizations_[i]; | 660 LEnvironment* env = deoptimizations_[i]; |
715 data->SetAstId(i, Smi::FromInt(env->ast_id())); | 661 data->SetAstId(i, Smi::FromInt(env->ast_id())); |
716 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index())); | 662 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index())); |
717 data->SetArgumentsStackHeight(i, | 663 data->SetArgumentsStackHeight(i, |
718 Smi::FromInt(env->arguments_stack_height())); | 664 Smi::FromInt(env->arguments_stack_height())); |
665 data->SetPc(i, Smi::FromInt(env->pc_offset())); | |
719 } | 666 } |
720 code->set_deoptimization_data(*data); | 667 code->set_deoptimization_data(*data); |
721 } | 668 } |
722 | 669 |
723 | 670 |
724 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) { | 671 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) { |
725 int result = deoptimization_literals_.length(); | 672 int result = deoptimization_literals_.length(); |
726 for (int i = 0; i < deoptimization_literals_.length(); ++i) { | 673 for (int i = 0; i < deoptimization_literals_.length(); ++i) { |
727 if (deoptimization_literals_[i].is_identical_to(literal)) return i; | 674 if (deoptimization_literals_[i].is_identical_to(literal)) return i; |
728 } | 675 } |
(...skipping 11 matching lines...) Expand all Loading... | |
740 for (int i = 0, length = inlined_closures->length(); | 687 for (int i = 0, length = inlined_closures->length(); |
741 i < length; | 688 i < length; |
742 i++) { | 689 i++) { |
743 DefineDeoptimizationLiteral(inlined_closures->at(i)); | 690 DefineDeoptimizationLiteral(inlined_closures->at(i)); |
744 } | 691 } |
745 | 692 |
746 inlined_function_count_ = deoptimization_literals_.length(); | 693 inlined_function_count_ = deoptimization_literals_.length(); |
747 } | 694 } |
748 | 695 |
749 | 696 |
697 void LCodeGen::RecordSafepointWithLazyDeopt( | |
698 LInstruction* instr, SafepointMode safepoint_mode) { | |
699 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { | |
700 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt); | |
701 } else { | |
702 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | |
703 RecordSafepointWithRegisters( | |
704 instr->pointer_map(), 0, Safepoint::kLazyDeopt); | |
705 } | |
706 } | |
707 | |
708 | |
750 void LCodeGen::RecordSafepoint( | 709 void LCodeGen::RecordSafepoint( |
751 LPointerMap* pointers, | 710 LPointerMap* pointers, |
752 Safepoint::Kind kind, | 711 Safepoint::Kind kind, |
753 int arguments, | 712 int arguments, |
754 int deoptimization_index) { | 713 Safepoint::DeoptMode deopt_mode) { |
755 ASSERT(expected_safepoint_kind_ == kind); | 714 ASSERT(expected_safepoint_kind_ == kind); |
756 | 715 |
757 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); | 716 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); |
758 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), | 717 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), |
759 kind, arguments, deoptimization_index); | 718 kind, arguments, deopt_mode); |
760 for (int i = 0; i < operands->length(); i++) { | 719 for (int i = 0; i < operands->length(); i++) { |
761 LOperand* pointer = operands->at(i); | 720 LOperand* pointer = operands->at(i); |
762 if (pointer->IsStackSlot()) { | 721 if (pointer->IsStackSlot()) { |
763 safepoint.DefinePointerSlot(pointer->index()); | 722 safepoint.DefinePointerSlot(pointer->index()); |
764 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { | 723 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { |
765 safepoint.DefinePointerRegister(ToRegister(pointer)); | 724 safepoint.DefinePointerRegister(ToRegister(pointer)); |
766 } | 725 } |
767 } | 726 } |
768 if (kind & Safepoint::kWithRegisters) { | 727 if (kind & Safepoint::kWithRegisters) { |
769 // Register cp always contains a pointer to the context. | 728 // Register cp always contains a pointer to the context. |
770 safepoint.DefinePointerRegister(cp); | 729 safepoint.DefinePointerRegister(cp); |
771 } | 730 } |
772 } | 731 } |
773 | 732 |
774 | 733 |
775 void LCodeGen::RecordSafepoint(LPointerMap* pointers, | 734 void LCodeGen::RecordSafepoint(LPointerMap* pointers, |
776 int deoptimization_index) { | 735 Safepoint::DeoptMode deopt_mode) { |
777 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index); | 736 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode); |
778 } | 737 } |
779 | 738 |
780 | 739 |
781 void LCodeGen::RecordSafepoint(int deoptimization_index) { | 740 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) { |
782 LPointerMap empty_pointers(RelocInfo::kNoPosition); | 741 LPointerMap empty_pointers(RelocInfo::kNoPosition); |
783 RecordSafepoint(&empty_pointers, deoptimization_index); | 742 RecordSafepoint(&empty_pointers, deopt_mode); |
784 } | 743 } |
785 | 744 |
786 | 745 |
787 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, | 746 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, |
788 int arguments, | 747 int arguments, |
789 int deoptimization_index) { | 748 Safepoint::DeoptMode deopt_mode) { |
790 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, | 749 RecordSafepoint( |
791 deoptimization_index); | 750 pointers, Safepoint::kWithRegisters, arguments, deopt_mode); |
792 } | 751 } |
793 | 752 |
794 | 753 |
795 void LCodeGen::RecordSafepointWithRegistersAndDoubles( | 754 void LCodeGen::RecordSafepointWithRegistersAndDoubles( |
796 LPointerMap* pointers, | 755 LPointerMap* pointers, |
797 int arguments, | 756 int arguments, |
798 int deoptimization_index) { | 757 Safepoint::DeoptMode deopt_mode) { |
799 RecordSafepoint(pointers, Safepoint::kWithRegistersAndDoubles, arguments, | 758 RecordSafepoint( |
800 deoptimization_index); | 759 pointers, Safepoint::kWithRegistersAndDoubles, arguments, deopt_mode); |
801 } | 760 } |
802 | 761 |
803 | 762 |
804 void LCodeGen::RecordPosition(int position) { | 763 void LCodeGen::RecordPosition(int position) { |
805 if (position == RelocInfo::kNoPosition) return; | 764 if (position == RelocInfo::kNoPosition) return; |
806 masm()->positions_recorder()->RecordPosition(position); | 765 masm()->positions_recorder()->RecordPosition(position); |
807 } | 766 } |
808 | 767 |
809 | 768 |
810 void LCodeGen::DoLabel(LLabel* label) { | 769 void LCodeGen::DoLabel(LLabel* label) { |
(...skipping 14 matching lines...) Expand all Loading... | |
825 | 784 |
826 | 785 |
827 void LCodeGen::DoGap(LGap* gap) { | 786 void LCodeGen::DoGap(LGap* gap) { |
828 for (int i = LGap::FIRST_INNER_POSITION; | 787 for (int i = LGap::FIRST_INNER_POSITION; |
829 i <= LGap::LAST_INNER_POSITION; | 788 i <= LGap::LAST_INNER_POSITION; |
830 i++) { | 789 i++) { |
831 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); | 790 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); |
832 LParallelMove* move = gap->GetParallelMove(inner_pos); | 791 LParallelMove* move = gap->GetParallelMove(inner_pos); |
833 if (move != NULL) DoParallelMove(move); | 792 if (move != NULL) DoParallelMove(move); |
834 } | 793 } |
835 | |
836 LInstruction* next = GetNextInstruction(); | |
837 if (next != NULL && next->IsLazyBailout()) { | |
838 int pc = masm()->pc_offset(); | |
839 safepoints_.SetPcAfterGap(pc); | |
840 } | |
841 } | 794 } |
842 | 795 |
843 | 796 |
844 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { | 797 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { |
845 DoGap(instr); | 798 DoGap(instr); |
846 } | 799 } |
847 | 800 |
848 | 801 |
849 void LCodeGen::DoParameter(LParameter* instr) { | 802 void LCodeGen::DoParameter(LParameter* instr) { |
850 // Nothing to do. | 803 // Nothing to do. |
(...skipping 287 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1138 __ mov(r0, right); | 1091 __ mov(r0, right); |
1139 } else { | 1092 } else { |
1140 ASSERT(!left.is(r0) && !right.is(r0)); | 1093 ASSERT(!left.is(r0) && !right.is(r0)); |
1141 __ mov(r0, right); | 1094 __ mov(r0, right); |
1142 __ mov(r1, left); | 1095 __ mov(r1, left); |
1143 } | 1096 } |
1144 BinaryOpStub stub(op, OVERWRITE_LEFT); | 1097 BinaryOpStub stub(op, OVERWRITE_LEFT); |
1145 __ CallStub(&stub); | 1098 __ CallStub(&stub); |
1146 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(), | 1099 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(), |
1147 0, | 1100 0, |
1148 Safepoint::kNoDeoptimizationIndex); | 1101 Safepoint::kNoLazyDeopt); |
1149 // Overwrite the stored value of r0 with the result of the stub. | 1102 // Overwrite the stored value of r0 with the result of the stub. |
1150 __ StoreToSafepointRegistersAndDoublesSlot(r0, r0); | 1103 __ StoreToSafepointRegistersAndDoublesSlot(r0, r0); |
1151 } | 1104 } |
1152 | 1105 |
1153 | 1106 |
1154 void LCodeGen::DoMulI(LMulI* instr) { | 1107 void LCodeGen::DoMulI(LMulI* instr) { |
1155 Register scratch = scratch0(); | 1108 Register scratch = scratch0(); |
1156 Register result = ToRegister(instr->result()); | 1109 Register result = ToRegister(instr->result()); |
1157 // Note that result may alias left. | 1110 // Note that result may alias left. |
1158 Register left = ToRegister(instr->InputAt(0)); | 1111 Register left = ToRegister(instr->InputAt(0)); |
(...skipping 896 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2055 } | 2008 } |
2056 | 2009 |
2057 | 2010 |
2058 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 2011 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
2059 class DeferredInstanceOfKnownGlobal: public LDeferredCode { | 2012 class DeferredInstanceOfKnownGlobal: public LDeferredCode { |
2060 public: | 2013 public: |
2061 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, | 2014 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, |
2062 LInstanceOfKnownGlobal* instr) | 2015 LInstanceOfKnownGlobal* instr) |
2063 : LDeferredCode(codegen), instr_(instr) { } | 2016 : LDeferredCode(codegen), instr_(instr) { } |
2064 virtual void Generate() { | 2017 virtual void Generate() { |
2065 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_); | 2018 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); |
2066 } | 2019 } |
2067 virtual LInstruction* instr() { return instr_; } | 2020 virtual LInstruction* instr() { return instr_; } |
2068 Label* map_check() { return &map_check_; } | 2021 Label* map_check() { return &map_check_; } |
2069 private: | 2022 private: |
2070 LInstanceOfKnownGlobal* instr_; | 2023 LInstanceOfKnownGlobal* instr_; |
2071 Label map_check_; | 2024 Label map_check_; |
2072 }; | 2025 }; |
2073 | 2026 |
2074 DeferredInstanceOfKnownGlobal* deferred; | 2027 DeferredInstanceOfKnownGlobal* deferred; |
2075 deferred = new DeferredInstanceOfKnownGlobal(this, instr); | 2028 deferred = new DeferredInstanceOfKnownGlobal(this, instr); |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2122 __ bind(&false_result); | 2075 __ bind(&false_result); |
2123 __ LoadRoot(result, Heap::kFalseValueRootIndex); | 2076 __ LoadRoot(result, Heap::kFalseValueRootIndex); |
2124 | 2077 |
2125 // Here result has either true or false. Deferred code also produces true or | 2078 // Here result has either true or false. Deferred code also produces true or |
2126 // false object. | 2079 // false object. |
2127 __ bind(deferred->exit()); | 2080 __ bind(deferred->exit()); |
2128 __ bind(&done); | 2081 __ bind(&done); |
2129 } | 2082 } |
2130 | 2083 |
2131 | 2084 |
2132 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | 2085 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
2133 Label* map_check) { | 2086 Label* map_check) { |
2134 Register result = ToRegister(instr->result()); | 2087 Register result = ToRegister(instr->result()); |
2135 ASSERT(result.is(r0)); | 2088 ASSERT(result.is(r0)); |
2136 | 2089 |
2137 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; | 2090 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; |
2138 flags = static_cast<InstanceofStub::Flags>( | 2091 flags = static_cast<InstanceofStub::Flags>( |
2139 flags | InstanceofStub::kArgsInRegisters); | 2092 flags | InstanceofStub::kArgsInRegisters); |
2140 flags = static_cast<InstanceofStub::Flags>( | 2093 flags = static_cast<InstanceofStub::Flags>( |
2141 flags | InstanceofStub::kCallSiteInlineCheck); | 2094 flags | InstanceofStub::kCallSiteInlineCheck); |
2142 flags = static_cast<InstanceofStub::Flags>( | 2095 flags = static_cast<InstanceofStub::Flags>( |
2143 flags | InstanceofStub::kReturnTrueFalseObject); | 2096 flags | InstanceofStub::kReturnTrueFalseObject); |
(...skipping 11 matching lines...) Expand all Loading... | |
2155 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; | 2108 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; |
2156 Label before_push_delta; | 2109 Label before_push_delta; |
2157 __ bind(&before_push_delta); | 2110 __ bind(&before_push_delta); |
2158 __ BlockConstPoolFor(kAdditionalDelta); | 2111 __ BlockConstPoolFor(kAdditionalDelta); |
2159 __ mov(temp, Operand(delta * kPointerSize)); | 2112 __ mov(temp, Operand(delta * kPointerSize)); |
2160 __ StoreToSafepointRegisterSlot(temp, temp); | 2113 __ StoreToSafepointRegisterSlot(temp, temp); |
2161 CallCodeGeneric(stub.GetCode(), | 2114 CallCodeGeneric(stub.GetCode(), |
2162 RelocInfo::CODE_TARGET, | 2115 RelocInfo::CODE_TARGET, |
2163 instr, | 2116 instr, |
2164 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 2117 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
2118 ASSERT(instr->HasDeoptimizationEnvironment()); | |
2119 LEnvironment* env = instr->deoptimization_environment(); | |
2120 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
2165 // Put the result value into the result register slot and | 2121 // Put the result value into the result register slot and |
2166 // restore all registers. | 2122 // restore all registers. |
2167 __ StoreToSafepointRegisterSlot(result, result); | 2123 __ StoreToSafepointRegisterSlot(result, result); |
2168 } | 2124 } |
2169 | 2125 |
2170 | 2126 |
2171 static Condition ComputeCompareCondition(Token::Value op) { | 2127 static Condition ComputeCompareCondition(Token::Value op) { |
2172 switch (op) { | 2128 switch (op) { |
2173 case Token::EQ_STRICT: | 2129 case Token::EQ_STRICT: |
2174 case Token::EQ: | 2130 case Token::EQ: |
(...skipping 596 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2771 __ b(eq, &invoke); | 2727 __ b(eq, &invoke); |
2772 __ bind(&loop); | 2728 __ bind(&loop); |
2773 __ ldr(scratch, MemOperand(elements, length, LSL, 2)); | 2729 __ ldr(scratch, MemOperand(elements, length, LSL, 2)); |
2774 __ push(scratch); | 2730 __ push(scratch); |
2775 __ sub(length, length, Operand(1), SetCC); | 2731 __ sub(length, length, Operand(1), SetCC); |
2776 __ b(ne, &loop); | 2732 __ b(ne, &loop); |
2777 | 2733 |
2778 __ bind(&invoke); | 2734 __ bind(&invoke); |
2779 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 2735 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
2780 LPointerMap* pointers = instr->pointer_map(); | 2736 LPointerMap* pointers = instr->pointer_map(); |
2781 LEnvironment* env = instr->deoptimization_environment(); | |
2782 RecordPosition(pointers->position()); | 2737 RecordPosition(pointers->position()); |
2783 RegisterEnvironmentForDeoptimization(env); | 2738 SafepointGenerator safepoint_generator( |
2784 SafepointGenerator safepoint_generator(this, | 2739 this, pointers, Safepoint::kLazyDeopt); |
2785 pointers, | |
2786 env->deoptimization_index()); | |
2787 // The number of arguments is stored in receiver which is r0, as expected | 2740 // The number of arguments is stored in receiver which is r0, as expected |
2788 // by InvokeFunction. | 2741 // by InvokeFunction. |
2789 v8::internal::ParameterCount actual(receiver); | 2742 v8::internal::ParameterCount actual(receiver); |
2790 __ InvokeFunction(function, actual, CALL_FUNCTION, | 2743 __ InvokeFunction(function, actual, CALL_FUNCTION, |
2791 safepoint_generator, CALL_AS_METHOD); | 2744 safepoint_generator, CALL_AS_METHOD); |
2792 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2745 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2793 } | 2746 } |
2794 | 2747 |
2795 | 2748 |
2796 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 2749 void LCodeGen::DoPushArgument(LPushArgument* instr) { |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2858 | 2811 |
2859 LPointerMap* pointers = instr->pointer_map(); | 2812 LPointerMap* pointers = instr->pointer_map(); |
2860 RecordPosition(pointers->position()); | 2813 RecordPosition(pointers->position()); |
2861 | 2814 |
2862 // Invoke function. | 2815 // Invoke function. |
2863 __ SetCallKind(r5, call_kind); | 2816 __ SetCallKind(r5, call_kind); |
2864 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); | 2817 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); |
2865 __ Call(ip); | 2818 __ Call(ip); |
2866 | 2819 |
2867 // Setup deoptimization. | 2820 // Setup deoptimization. |
2868 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); | 2821 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
2869 | 2822 |
2870 // Restore context. | 2823 // Restore context. |
2871 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2824 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2872 } | 2825 } |
2873 | 2826 |
2874 | 2827 |
2875 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 2828 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { |
2876 ASSERT(ToRegister(instr->result()).is(r0)); | 2829 ASSERT(ToRegister(instr->result()).is(r0)); |
2877 __ mov(r1, Operand(instr->function())); | 2830 __ mov(r1, Operand(instr->function())); |
2878 CallKnownFunction(instr->function(), | 2831 CallKnownFunction(instr->function(), |
(...skipping 344 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3223 UNREACHABLE(); | 3176 UNREACHABLE(); |
3224 } | 3177 } |
3225 } | 3178 } |
3226 | 3179 |
3227 | 3180 |
3228 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { | 3181 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { |
3229 ASSERT(ToRegister(instr->function()).is(r1)); | 3182 ASSERT(ToRegister(instr->function()).is(r1)); |
3230 ASSERT(instr->HasPointerMap()); | 3183 ASSERT(instr->HasPointerMap()); |
3231 ASSERT(instr->HasDeoptimizationEnvironment()); | 3184 ASSERT(instr->HasDeoptimizationEnvironment()); |
3232 LPointerMap* pointers = instr->pointer_map(); | 3185 LPointerMap* pointers = instr->pointer_map(); |
3233 LEnvironment* env = instr->deoptimization_environment(); | |
3234 RecordPosition(pointers->position()); | 3186 RecordPosition(pointers->position()); |
3235 RegisterEnvironmentForDeoptimization(env); | 3187 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
3236 SafepointGenerator generator(this, pointers, env->deoptimization_index()); | |
3237 ParameterCount count(instr->arity()); | 3188 ParameterCount count(instr->arity()); |
3238 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD); | 3189 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD); |
3239 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 3190 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
3240 } | 3191 } |
3241 | 3192 |
3242 | 3193 |
3243 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { | 3194 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { |
3244 ASSERT(ToRegister(instr->result()).is(r0)); | 3195 ASSERT(ToRegister(instr->result()).is(r0)); |
3245 | 3196 |
3246 int arity = instr->arity(); | 3197 int arity = instr->arity(); |
(...skipping 1304 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4551 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset)); | 4502 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset)); |
4552 | 4503 |
4553 // Check the marker in the calling frame. | 4504 // Check the marker in the calling frame. |
4554 __ bind(&check_frame_marker); | 4505 __ bind(&check_frame_marker); |
4555 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset)); | 4506 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset)); |
4556 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); | 4507 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); |
4557 } | 4508 } |
4558 | 4509 |
4559 | 4510 |
4560 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 4511 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
4561 // No code for lazy bailout instruction. Used to capture environment after a | 4512 ASSERT(instr->HasEnvironment()); |
4562 // call for populating the safepoint data with deoptimization data. | 4513 LEnvironment* env = instr->environment(); |
4514 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | |
4515 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
Vyacheslav Egorov (Chromium)
2011/11/15 12:03:56
I think it should emit nop padding (the one remove
fschneider
2011/11/15 13:35:24
Done.
| |
4563 } | 4516 } |
4564 | 4517 |
4565 | 4518 |
4566 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 4519 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
4567 DeoptimizeIf(al, instr->environment()); | 4520 DeoptimizeIf(al, instr->environment()); |
4568 } | 4521 } |
4569 | 4522 |
4570 | 4523 |
4571 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { | 4524 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { |
4572 Register object = ToRegister(instr->object()); | 4525 Register object = ToRegister(instr->object()); |
4573 Register key = ToRegister(instr->key()); | 4526 Register key = ToRegister(instr->key()); |
4574 Register strict = scratch0(); | 4527 Register strict = scratch0(); |
4575 __ mov(strict, Operand(Smi::FromInt(strict_mode_flag()))); | 4528 __ mov(strict, Operand(Smi::FromInt(strict_mode_flag()))); |
4576 __ Push(object, key, strict); | 4529 __ Push(object, key, strict); |
4577 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 4530 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
4578 LPointerMap* pointers = instr->pointer_map(); | 4531 LPointerMap* pointers = instr->pointer_map(); |
4579 LEnvironment* env = instr->deoptimization_environment(); | |
4580 RecordPosition(pointers->position()); | 4532 RecordPosition(pointers->position()); |
4581 RegisterEnvironmentForDeoptimization(env); | 4533 SafepointGenerator safepoint_generator( |
4582 SafepointGenerator safepoint_generator(this, | 4534 this, pointers, Safepoint::kLazyDeopt); |
4583 pointers, | |
4584 env->deoptimization_index()); | |
4585 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator); | 4535 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator); |
4586 } | 4536 } |
4587 | 4537 |
4588 | 4538 |
4589 void LCodeGen::DoIn(LIn* instr) { | 4539 void LCodeGen::DoIn(LIn* instr) { |
4590 Register obj = ToRegister(instr->object()); | 4540 Register obj = ToRegister(instr->object()); |
4591 Register key = ToRegister(instr->key()); | 4541 Register key = ToRegister(instr->key()); |
4592 __ Push(key, obj); | 4542 __ Push(key, obj); |
4593 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 4543 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
4594 LPointerMap* pointers = instr->pointer_map(); | 4544 LPointerMap* pointers = instr->pointer_map(); |
4595 LEnvironment* env = instr->deoptimization_environment(); | |
4596 RecordPosition(pointers->position()); | 4545 RecordPosition(pointers->position()); |
4597 RegisterEnvironmentForDeoptimization(env); | 4546 SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt); |
4598 SafepointGenerator safepoint_generator(this, | |
4599 pointers, | |
4600 env->deoptimization_index()); | |
4601 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); | 4547 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); |
4602 } | 4548 } |
4603 | 4549 |
4604 | 4550 |
4605 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 4551 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
4606 { | 4552 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
4607 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 4553 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); |
4608 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 4554 RecordSafepointWithLazyDeopt( |
4609 RegisterLazyDeoptimization( | 4555 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
4610 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 4556 ASSERT(instr->HasEnvironment()); |
4611 } | 4557 LEnvironment* env = instr->environment(); |
4612 | 4558 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
4613 // The gap code includes the restoring of the safepoint registers. | |
4614 int pc = masm()->pc_offset(); | |
4615 safepoints_.SetPcAfterGap(pc); | |
4616 } | 4559 } |
4617 | 4560 |
4618 | 4561 |
4619 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 4562 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
4620 class DeferredStackCheck: public LDeferredCode { | 4563 class DeferredStackCheck: public LDeferredCode { |
4621 public: | 4564 public: |
4622 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) | 4565 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) |
4623 : LDeferredCode(codegen), instr_(instr) { } | 4566 : LDeferredCode(codegen), instr_(instr) { } |
4624 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } | 4567 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } |
4625 virtual LInstruction* instr() { return instr_; } | 4568 virtual LInstruction* instr() { return instr_; } |
4626 private: | 4569 private: |
4627 LStackCheck* instr_; | 4570 LStackCheck* instr_; |
4628 }; | 4571 }; |
4629 | 4572 |
4573 ASSERT(instr->HasEnvironment()); | |
4574 LEnvironment* env = instr->environment(); | |
4630 if (instr->hydrogen()->is_function_entry()) { | 4575 if (instr->hydrogen()->is_function_entry()) { |
4631 // Perform stack overflow check. | 4576 // Perform stack overflow check. |
4632 Label done; | 4577 Label done; |
4633 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 4578 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
4634 __ cmp(sp, Operand(ip)); | 4579 __ cmp(sp, Operand(ip)); |
4635 __ b(hs, &done); | 4580 __ b(hs, &done); |
4636 StackCheckStub stub; | 4581 StackCheckStub stub; |
4637 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4582 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
4638 __ bind(&done); | 4583 __ bind(&done); |
4584 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | |
4585 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
4639 } else { | 4586 } else { |
4640 ASSERT(instr->hydrogen()->is_backwards_branch()); | 4587 ASSERT(instr->hydrogen()->is_backwards_branch()); |
4641 // Perform stack overflow check if this goto needs it before jumping. | 4588 // Perform stack overflow check if this goto needs it before jumping. |
4642 DeferredStackCheck* deferred_stack_check = | 4589 DeferredStackCheck* deferred_stack_check = |
4643 new DeferredStackCheck(this, instr); | 4590 new DeferredStackCheck(this, instr); |
4644 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 4591 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
4645 __ cmp(sp, Operand(ip)); | 4592 __ cmp(sp, Operand(ip)); |
4646 __ b(lo, deferred_stack_check->entry()); | 4593 __ b(lo, deferred_stack_check->entry()); |
4647 __ bind(instr->done_label()); | 4594 __ bind(instr->done_label()); |
4648 deferred_stack_check->SetExit(instr->done_label()); | 4595 deferred_stack_check->SetExit(instr->done_label()); |
4596 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | |
4597 // Don't record a deoptimization index for the safepoint here. | |
4598 // This will be done explicitly when emitting call and the safepoint in | |
4599 // the deferred code. | |
4649 } | 4600 } |
4650 } | 4601 } |
4651 | 4602 |
4652 | 4603 |
4653 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 4604 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
4654 // This is a pseudo-instruction that ensures that the environment here is | 4605 // This is a pseudo-instruction that ensures that the environment here is |
4655 // properly registered for deoptimization and records the assembler's PC | 4606 // properly registered for deoptimization and records the assembler's PC |
4656 // offset. | 4607 // offset. |
4657 LEnvironment* environment = instr->environment(); | 4608 LEnvironment* environment = instr->environment(); |
4658 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), | 4609 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), |
4659 instr->SpilledDoubleRegisterArray()); | 4610 instr->SpilledDoubleRegisterArray()); |
4660 | 4611 |
4661 // If the environment were already registered, we would have no way of | 4612 // If the environment were already registered, we would have no way of |
4662 // backpatching it with the spill slot operands. | 4613 // backpatching it with the spill slot operands. |
4663 ASSERT(!environment->HasBeenRegistered()); | 4614 ASSERT(!environment->HasBeenRegistered()); |
4664 RegisterEnvironmentForDeoptimization(environment); | 4615 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
4665 ASSERT(osr_pc_offset_ == -1); | 4616 ASSERT(osr_pc_offset_ == -1); |
4666 osr_pc_offset_ = masm()->pc_offset(); | 4617 osr_pc_offset_ = masm()->pc_offset(); |
4667 } | 4618 } |
4668 | 4619 |
4669 | 4620 |
4670 | 4621 |
4671 | 4622 |
4672 #undef __ | 4623 #undef __ |
4673 | 4624 |
4674 } } // namespace v8::internal | 4625 } } // namespace v8::internal |
OLD | NEW |