Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/arm/lithium-codegen-arm.cc

Issue 8492004: Fix lazy deoptimization at HInvokeFunction and enable target-recording call-function stub. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: added nop-padding and assertions on all platforms Created 9 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/deoptimizer.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 22 matching lines...) Expand all
33 #include "stub-cache.h" 33 #include "stub-cache.h"
34 34
35 namespace v8 { 35 namespace v8 {
36 namespace internal { 36 namespace internal {
37 37
38 38
39 class SafepointGenerator : public CallWrapper { 39 class SafepointGenerator : public CallWrapper {
40 public: 40 public:
41 SafepointGenerator(LCodeGen* codegen, 41 SafepointGenerator(LCodeGen* codegen,
42 LPointerMap* pointers, 42 LPointerMap* pointers,
43 int deoptimization_index) 43 Safepoint::DeoptMode mode)
44 : codegen_(codegen), 44 : codegen_(codegen),
45 pointers_(pointers), 45 pointers_(pointers),
46 deoptimization_index_(deoptimization_index) { } 46 deopt_mode_(mode) { }
47 virtual ~SafepointGenerator() { } 47 virtual ~SafepointGenerator() { }
48 48
49 virtual void BeforeCall(int call_size) const { 49 virtual void BeforeCall(int call_size) const { }
50 ASSERT(call_size >= 0);
51 // Ensure that we have enough space after the previous safepoint position
52 // for the generated code there.
53 int call_end = codegen_->masm()->pc_offset() + call_size;
54 int prev_jump_end =
55 codegen_->LastSafepointEnd() + Deoptimizer::patch_size();
56 if (call_end < prev_jump_end) {
57 int padding_size = prev_jump_end - call_end;
58 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
59 while (padding_size > 0) {
60 codegen_->masm()->nop();
61 padding_size -= Assembler::kInstrSize;
62 }
63 }
64 }
65 50
66 virtual void AfterCall() const { 51 virtual void AfterCall() const {
67 codegen_->RecordSafepoint(pointers_, deoptimization_index_); 52 codegen_->RecordSafepoint(pointers_, deopt_mode_);
68 } 53 }
69 54
70 private: 55 private:
71 LCodeGen* codegen_; 56 LCodeGen* codegen_;
72 LPointerMap* pointers_; 57 LPointerMap* pointers_;
73 int deoptimization_index_; 58 Safepoint::DeoptMode deopt_mode_;
74 }; 59 };
75 60
76 61
77 #define __ masm()-> 62 #define __ masm()->
78 63
79 bool LCodeGen::GenerateCode() { 64 bool LCodeGen::GenerateCode() {
80 HPhase phase("Code generation", chunk()); 65 HPhase phase("Code generation", chunk());
81 ASSERT(is_unused()); 66 ASSERT(is_unused());
82 status_ = GENERATING; 67 status_ = GENERATING;
83 CpuFeatures::Scope scope1(VFP3); 68 CpuFeatures::Scope scope1(VFP3);
(...skipping 12 matching lines...) Expand all
96 GenerateDeoptJumpTable() && 81 GenerateDeoptJumpTable() &&
97 GenerateSafepointTable(); 82 GenerateSafepointTable();
98 } 83 }
99 84
100 85
101 void LCodeGen::FinishCode(Handle<Code> code) { 86 void LCodeGen::FinishCode(Handle<Code> code) {
102 ASSERT(is_done()); 87 ASSERT(is_done());
103 code->set_stack_slots(GetStackSlotCount()); 88 code->set_stack_slots(GetStackSlotCount());
104 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); 89 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
105 PopulateDeoptimizationData(code); 90 PopulateDeoptimizationData(code);
106 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
107 } 91 }
108 92
109 93
110 void LCodeGen::Abort(const char* format, ...) { 94 void LCodeGen::Abort(const char* format, ...) {
111 if (FLAG_trace_bailout) { 95 if (FLAG_trace_bailout) {
112 SmartArrayPointer<char> name( 96 SmartArrayPointer<char> name(
113 info()->shared_info()->DebugName()->ToCString()); 97 info()->shared_info()->DebugName()->ToCString());
114 PrintF("Aborting LCodeGen in @\"%s\": ", *name); 98 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
115 va_list arguments; 99 va_list arguments;
116 va_start(arguments, format); 100 va_start(arguments, format);
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
193 if (heap_slots > 0) { 177 if (heap_slots > 0) {
194 Comment(";;; Allocate local context"); 178 Comment(";;; Allocate local context");
195 // Argument to NewContext is the function, which is in r1. 179 // Argument to NewContext is the function, which is in r1.
196 __ push(r1); 180 __ push(r1);
197 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 181 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
198 FastNewContextStub stub(heap_slots); 182 FastNewContextStub stub(heap_slots);
199 __ CallStub(&stub); 183 __ CallStub(&stub);
200 } else { 184 } else {
201 __ CallRuntime(Runtime::kNewFunctionContext, 1); 185 __ CallRuntime(Runtime::kNewFunctionContext, 1);
202 } 186 }
203 RecordSafepoint(Safepoint::kNoDeoptimizationIndex); 187 RecordSafepoint(Safepoint::kNoLazyDeopt);
204 // Context is returned in both r0 and cp. It replaces the context 188 // Context is returned in both r0 and cp. It replaces the context
205 // passed to us. It's saved in the stack and kept live in cp. 189 // passed to us. It's saved in the stack and kept live in cp.
206 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 190 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
207 // Copy any necessary parameters into the context. 191 // Copy any necessary parameters into the context.
208 int num_parameters = scope()->num_parameters(); 192 int num_parameters = scope()->num_parameters();
209 for (int i = 0; i < num_parameters; i++) { 193 for (int i = 0; i < num_parameters; i++) {
210 Variable* var = scope()->parameter(i); 194 Variable* var = scope()->parameter(i);
211 if (var->IsContextSlot()) { 195 if (var->IsContextSlot()) {
212 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 196 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
213 (num_parameters - 1 - i) * kPointerSize; 197 (num_parameters - 1 - i) * kPointerSize;
(...skipping 28 matching lines...) Expand all
242 if (instr->IsLabel()) { 226 if (instr->IsLabel()) {
243 LLabel* label = LLabel::cast(instr); 227 LLabel* label = LLabel::cast(instr);
244 emit_instructions = !label->HasReplacement(); 228 emit_instructions = !label->HasReplacement();
245 } 229 }
246 230
247 if (emit_instructions) { 231 if (emit_instructions) {
248 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); 232 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
249 instr->CompileToNative(this); 233 instr->CompileToNative(this);
250 } 234 }
251 } 235 }
236 EnsureSpaceForLazyDeopt();
252 return !is_aborted(); 237 return !is_aborted();
253 } 238 }
254 239
255 240
256 LInstruction* LCodeGen::GetNextInstruction() {
257 if (current_instruction_ < instructions_->length() - 1) {
258 return instructions_->at(current_instruction_ + 1);
259 } else {
260 return NULL;
261 }
262 }
263
264
265 bool LCodeGen::GenerateDeferredCode() { 241 bool LCodeGen::GenerateDeferredCode() {
266 ASSERT(is_generating()); 242 ASSERT(is_generating());
267 if (deferred_.length() > 0) { 243 if (deferred_.length() > 0) {
268 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { 244 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
269 LDeferredCode* code = deferred_[i]; 245 LDeferredCode* code = deferred_[i];
270 __ bind(code->entry()); 246 __ bind(code->entry());
271 Comment(";;; Deferred code @%d: %s.", 247 Comment(";;; Deferred code @%d: %s.",
272 code->instruction_index(), 248 code->instruction_index(),
273 code->instr()->Mnemonic()); 249 code->instr()->Mnemonic());
274 code->Generate(); 250 code->Generate();
275 __ jmp(code->exit()); 251 __ jmp(code->exit());
276 } 252 }
277
278 // Pad code to ensure that the last piece of deferred code have
279 // room for lazy bailout.
280 while ((masm()->pc_offset() - LastSafepointEnd())
281 < Deoptimizer::patch_size()) {
282 __ nop();
283 }
284 } 253 }
285 254
286 // Force constant pool emission at the end of the deferred code to make 255 // Force constant pool emission at the end of the deferred code to make
287 // sure that no constant pools are emitted after. 256 // sure that no constant pools are emitted after.
288 masm()->CheckConstPool(true, false); 257 masm()->CheckConstPool(true, false);
289 258
290 return !is_aborted(); 259 return !is_aborted();
291 } 260 }
292 261
293 262
(...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after
559 528
560 529
561 void LCodeGen::CallCodeGeneric(Handle<Code> code, 530 void LCodeGen::CallCodeGeneric(Handle<Code> code,
562 RelocInfo::Mode mode, 531 RelocInfo::Mode mode,
563 LInstruction* instr, 532 LInstruction* instr,
564 SafepointMode safepoint_mode) { 533 SafepointMode safepoint_mode) {
565 ASSERT(instr != NULL); 534 ASSERT(instr != NULL);
566 LPointerMap* pointers = instr->pointer_map(); 535 LPointerMap* pointers = instr->pointer_map();
567 RecordPosition(pointers->position()); 536 RecordPosition(pointers->position());
568 __ Call(code, mode); 537 __ Call(code, mode);
569 RegisterLazyDeoptimization(instr, safepoint_mode); 538 RecordSafepointWithLazyDeopt(instr, safepoint_mode);
570 539
571 // Signal that we don't inline smi code before these stubs in the 540 // Signal that we don't inline smi code before these stubs in the
572 // optimizing code generator. 541 // optimizing code generator.
573 if (code->kind() == Code::BINARY_OP_IC || 542 if (code->kind() == Code::BINARY_OP_IC ||
574 code->kind() == Code::COMPARE_IC) { 543 code->kind() == Code::COMPARE_IC) {
575 __ nop(); 544 __ nop();
576 } 545 }
577 } 546 }
578 547
579 548
580 void LCodeGen::CallRuntime(const Runtime::Function* function, 549 void LCodeGen::CallRuntime(const Runtime::Function* function,
581 int num_arguments, 550 int num_arguments,
582 LInstruction* instr) { 551 LInstruction* instr) {
583 ASSERT(instr != NULL); 552 ASSERT(instr != NULL);
584 LPointerMap* pointers = instr->pointer_map(); 553 LPointerMap* pointers = instr->pointer_map();
585 ASSERT(pointers != NULL); 554 ASSERT(pointers != NULL);
586 RecordPosition(pointers->position()); 555 RecordPosition(pointers->position());
587 556
588 __ CallRuntime(function, num_arguments); 557 __ CallRuntime(function, num_arguments);
589 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); 558 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
590 } 559 }
591 560
592 561
593 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, 562 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
594 int argc, 563 int argc,
595 LInstruction* instr) { 564 LInstruction* instr) {
596 __ CallRuntimeSaveDoubles(id); 565 __ CallRuntimeSaveDoubles(id);
597 RecordSafepointWithRegisters( 566 RecordSafepointWithRegisters(
598 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex); 567 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
599 } 568 }
600 569
601 570
602 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr, 571 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
603 SafepointMode safepoint_mode) { 572 Safepoint::DeoptMode mode) {
604 // Create the environment to bailout to. If the call has side effects
605 // execution has to continue after the call otherwise execution can continue
606 // from a previous bailout point repeating the call.
607 LEnvironment* deoptimization_environment;
608 if (instr->HasDeoptimizationEnvironment()) {
609 deoptimization_environment = instr->deoptimization_environment();
610 } else {
611 deoptimization_environment = instr->environment();
612 }
613
614 RegisterEnvironmentForDeoptimization(deoptimization_environment);
615 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
616 RecordSafepoint(instr->pointer_map(),
617 deoptimization_environment->deoptimization_index());
618 } else {
619 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
620 RecordSafepointWithRegisters(
621 instr->pointer_map(),
622 0,
623 deoptimization_environment->deoptimization_index());
624 }
625 }
626
627
628 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
629 if (!environment->HasBeenRegistered()) { 573 if (!environment->HasBeenRegistered()) {
630 // Physical stack frame layout: 574 // Physical stack frame layout:
631 // -x ............. -4 0 ..................................... y 575 // -x ............. -4 0 ..................................... y
632 // [incoming arguments] [spill slots] [pushed outgoing arguments] 576 // [incoming arguments] [spill slots] [pushed outgoing arguments]
633 577
634 // Layout of the environment: 578 // Layout of the environment:
635 // 0 ..................................................... size-1 579 // 0 ..................................................... size-1
636 // [parameters] [locals] [expression stack including arguments] 580 // [parameters] [locals] [expression stack including arguments]
637 581
638 // Layout of the translation: 582 // Layout of the translation:
639 // 0 ........................................................ size - 1 + 4 583 // 0 ........................................................ size - 1 + 4
640 // [expression stack including arguments] [locals] [4 words] [parameters] 584 // [expression stack including arguments] [locals] [4 words] [parameters]
641 // |>------------ translation_size ------------<| 585 // |>------------ translation_size ------------<|
642 586
643 int frame_count = 0; 587 int frame_count = 0;
644 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { 588 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
645 ++frame_count; 589 ++frame_count;
646 } 590 }
647 Translation translation(&translations_, frame_count); 591 Translation translation(&translations_, frame_count);
648 WriteTranslation(environment, &translation); 592 WriteTranslation(environment, &translation);
649 int deoptimization_index = deoptimizations_.length(); 593 int deoptimization_index = deoptimizations_.length();
650 environment->Register(deoptimization_index, translation.index()); 594 int pc_offset = masm()->pc_offset();
595 environment->Register(deoptimization_index,
596 translation.index(),
597 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
651 deoptimizations_.Add(environment); 598 deoptimizations_.Add(environment);
652 } 599 }
653 } 600 }
654 601
655 602
656 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { 603 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
657 RegisterEnvironmentForDeoptimization(environment); 604 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
658 ASSERT(environment->HasBeenRegistered()); 605 ASSERT(environment->HasBeenRegistered());
659 int id = environment->deoptimization_index(); 606 int id = environment->deoptimization_index();
660 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); 607 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
661 ASSERT(entry != NULL); 608 ASSERT(entry != NULL);
662 if (entry == NULL) { 609 if (entry == NULL) {
663 Abort("bailout was not prepared"); 610 Abort("bailout was not prepared");
664 return; 611 return;
665 } 612 }
666 613
667 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM. 614 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM.
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
709 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); 656 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
710 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_)); 657 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
711 658
712 // Populate the deoptimization entries. 659 // Populate the deoptimization entries.
713 for (int i = 0; i < length; i++) { 660 for (int i = 0; i < length; i++) {
714 LEnvironment* env = deoptimizations_[i]; 661 LEnvironment* env = deoptimizations_[i];
715 data->SetAstId(i, Smi::FromInt(env->ast_id())); 662 data->SetAstId(i, Smi::FromInt(env->ast_id()));
716 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index())); 663 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
717 data->SetArgumentsStackHeight(i, 664 data->SetArgumentsStackHeight(i,
718 Smi::FromInt(env->arguments_stack_height())); 665 Smi::FromInt(env->arguments_stack_height()));
666 data->SetPc(i, Smi::FromInt(env->pc_offset()));
719 } 667 }
720 code->set_deoptimization_data(*data); 668 code->set_deoptimization_data(*data);
721 } 669 }
722 670
723 671
724 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) { 672 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
725 int result = deoptimization_literals_.length(); 673 int result = deoptimization_literals_.length();
726 for (int i = 0; i < deoptimization_literals_.length(); ++i) { 674 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
727 if (deoptimization_literals_[i].is_identical_to(literal)) return i; 675 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
728 } 676 }
(...skipping 11 matching lines...) Expand all
740 for (int i = 0, length = inlined_closures->length(); 688 for (int i = 0, length = inlined_closures->length();
741 i < length; 689 i < length;
742 i++) { 690 i++) {
743 DefineDeoptimizationLiteral(inlined_closures->at(i)); 691 DefineDeoptimizationLiteral(inlined_closures->at(i));
744 } 692 }
745 693
746 inlined_function_count_ = deoptimization_literals_.length(); 694 inlined_function_count_ = deoptimization_literals_.length();
747 } 695 }
748 696
749 697
698 void LCodeGen::RecordSafepointWithLazyDeopt(
699 LInstruction* instr, SafepointMode safepoint_mode) {
700 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
701 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
702 } else {
703 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
704 RecordSafepointWithRegisters(
705 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
706 }
707 }
708
709
750 void LCodeGen::RecordSafepoint( 710 void LCodeGen::RecordSafepoint(
751 LPointerMap* pointers, 711 LPointerMap* pointers,
752 Safepoint::Kind kind, 712 Safepoint::Kind kind,
753 int arguments, 713 int arguments,
754 int deoptimization_index) { 714 Safepoint::DeoptMode deopt_mode) {
755 ASSERT(expected_safepoint_kind_ == kind); 715 ASSERT(expected_safepoint_kind_ == kind);
756 716
757 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); 717 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
758 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), 718 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
759 kind, arguments, deoptimization_index); 719 kind, arguments, deopt_mode);
760 for (int i = 0; i < operands->length(); i++) { 720 for (int i = 0; i < operands->length(); i++) {
761 LOperand* pointer = operands->at(i); 721 LOperand* pointer = operands->at(i);
762 if (pointer->IsStackSlot()) { 722 if (pointer->IsStackSlot()) {
763 safepoint.DefinePointerSlot(pointer->index()); 723 safepoint.DefinePointerSlot(pointer->index());
764 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { 724 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
765 safepoint.DefinePointerRegister(ToRegister(pointer)); 725 safepoint.DefinePointerRegister(ToRegister(pointer));
766 } 726 }
767 } 727 }
768 if (kind & Safepoint::kWithRegisters) { 728 if (kind & Safepoint::kWithRegisters) {
769 // Register cp always contains a pointer to the context. 729 // Register cp always contains a pointer to the context.
770 safepoint.DefinePointerRegister(cp); 730 safepoint.DefinePointerRegister(cp);
771 } 731 }
772 } 732 }
773 733
774 734
775 void LCodeGen::RecordSafepoint(LPointerMap* pointers, 735 void LCodeGen::RecordSafepoint(LPointerMap* pointers,
776 int deoptimization_index) { 736 Safepoint::DeoptMode deopt_mode) {
777 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index); 737 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
778 } 738 }
779 739
780 740
781 void LCodeGen::RecordSafepoint(int deoptimization_index) { 741 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
782 LPointerMap empty_pointers(RelocInfo::kNoPosition); 742 LPointerMap empty_pointers(RelocInfo::kNoPosition);
783 RecordSafepoint(&empty_pointers, deoptimization_index); 743 RecordSafepoint(&empty_pointers, deopt_mode);
784 } 744 }
785 745
786 746
787 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, 747 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
788 int arguments, 748 int arguments,
789 int deoptimization_index) { 749 Safepoint::DeoptMode deopt_mode) {
790 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, 750 RecordSafepoint(
791 deoptimization_index); 751 pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
792 } 752 }
793 753
794 754
795 void LCodeGen::RecordSafepointWithRegistersAndDoubles( 755 void LCodeGen::RecordSafepointWithRegistersAndDoubles(
796 LPointerMap* pointers, 756 LPointerMap* pointers,
797 int arguments, 757 int arguments,
798 int deoptimization_index) { 758 Safepoint::DeoptMode deopt_mode) {
799 RecordSafepoint(pointers, Safepoint::kWithRegistersAndDoubles, arguments, 759 RecordSafepoint(
800 deoptimization_index); 760 pointers, Safepoint::kWithRegistersAndDoubles, arguments, deopt_mode);
801 } 761 }
802 762
803 763
804 void LCodeGen::RecordPosition(int position) { 764 void LCodeGen::RecordPosition(int position) {
805 if (position == RelocInfo::kNoPosition) return; 765 if (position == RelocInfo::kNoPosition) return;
806 masm()->positions_recorder()->RecordPosition(position); 766 masm()->positions_recorder()->RecordPosition(position);
807 } 767 }
808 768
809 769
810 void LCodeGen::DoLabel(LLabel* label) { 770 void LCodeGen::DoLabel(LLabel* label) {
(...skipping 14 matching lines...) Expand all
825 785
826 786
827 void LCodeGen::DoGap(LGap* gap) { 787 void LCodeGen::DoGap(LGap* gap) {
828 for (int i = LGap::FIRST_INNER_POSITION; 788 for (int i = LGap::FIRST_INNER_POSITION;
829 i <= LGap::LAST_INNER_POSITION; 789 i <= LGap::LAST_INNER_POSITION;
830 i++) { 790 i++) {
831 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); 791 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
832 LParallelMove* move = gap->GetParallelMove(inner_pos); 792 LParallelMove* move = gap->GetParallelMove(inner_pos);
833 if (move != NULL) DoParallelMove(move); 793 if (move != NULL) DoParallelMove(move);
834 } 794 }
835
836 LInstruction* next = GetNextInstruction();
837 if (next != NULL && next->IsLazyBailout()) {
838 int pc = masm()->pc_offset();
839 safepoints_.SetPcAfterGap(pc);
840 }
841 } 795 }
842 796
843 797
844 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { 798 void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
845 DoGap(instr); 799 DoGap(instr);
846 } 800 }
847 801
848 802
849 void LCodeGen::DoParameter(LParameter* instr) { 803 void LCodeGen::DoParameter(LParameter* instr) {
850 // Nothing to do. 804 // Nothing to do.
(...skipping 287 matching lines...) Expand 10 before | Expand all | Expand 10 after
1138 __ mov(r0, right); 1092 __ mov(r0, right);
1139 } else { 1093 } else {
1140 ASSERT(!left.is(r0) && !right.is(r0)); 1094 ASSERT(!left.is(r0) && !right.is(r0));
1141 __ mov(r0, right); 1095 __ mov(r0, right);
1142 __ mov(r1, left); 1096 __ mov(r1, left);
1143 } 1097 }
1144 BinaryOpStub stub(op, OVERWRITE_LEFT); 1098 BinaryOpStub stub(op, OVERWRITE_LEFT);
1145 __ CallStub(&stub); 1099 __ CallStub(&stub);
1146 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(), 1100 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(),
1147 0, 1101 0,
1148 Safepoint::kNoDeoptimizationIndex); 1102 Safepoint::kNoLazyDeopt);
1149 // Overwrite the stored value of r0 with the result of the stub. 1103 // Overwrite the stored value of r0 with the result of the stub.
1150 __ StoreToSafepointRegistersAndDoublesSlot(r0, r0); 1104 __ StoreToSafepointRegistersAndDoublesSlot(r0, r0);
1151 } 1105 }
1152 1106
1153 1107
1154 void LCodeGen::DoMulI(LMulI* instr) { 1108 void LCodeGen::DoMulI(LMulI* instr) {
1155 Register scratch = scratch0(); 1109 Register scratch = scratch0();
1156 Register result = ToRegister(instr->result()); 1110 Register result = ToRegister(instr->result());
1157 // Note that result may alias left. 1111 // Note that result may alias left.
1158 Register left = ToRegister(instr->InputAt(0)); 1112 Register left = ToRegister(instr->InputAt(0));
(...skipping 896 matching lines...) Expand 10 before | Expand all | Expand 10 after
2055 } 2009 }
2056 2010
2057 2011
2058 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { 2012 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
2059 class DeferredInstanceOfKnownGlobal: public LDeferredCode { 2013 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
2060 public: 2014 public:
2061 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, 2015 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
2062 LInstanceOfKnownGlobal* instr) 2016 LInstanceOfKnownGlobal* instr)
2063 : LDeferredCode(codegen), instr_(instr) { } 2017 : LDeferredCode(codegen), instr_(instr) { }
2064 virtual void Generate() { 2018 virtual void Generate() {
2065 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_); 2019 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
2066 } 2020 }
2067 virtual LInstruction* instr() { return instr_; } 2021 virtual LInstruction* instr() { return instr_; }
2068 Label* map_check() { return &map_check_; } 2022 Label* map_check() { return &map_check_; }
2069 private: 2023 private:
2070 LInstanceOfKnownGlobal* instr_; 2024 LInstanceOfKnownGlobal* instr_;
2071 Label map_check_; 2025 Label map_check_;
2072 }; 2026 };
2073 2027
2074 DeferredInstanceOfKnownGlobal* deferred; 2028 DeferredInstanceOfKnownGlobal* deferred;
2075 deferred = new DeferredInstanceOfKnownGlobal(this, instr); 2029 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
2122 __ bind(&false_result); 2076 __ bind(&false_result);
2123 __ LoadRoot(result, Heap::kFalseValueRootIndex); 2077 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2124 2078
2125 // Here result has either true or false. Deferred code also produces true or 2079 // Here result has either true or false. Deferred code also produces true or
2126 // false object. 2080 // false object.
2127 __ bind(deferred->exit()); 2081 __ bind(deferred->exit());
2128 __ bind(&done); 2082 __ bind(&done);
2129 } 2083 }
2130 2084
2131 2085
2132 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 2086 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2133 Label* map_check) { 2087 Label* map_check) {
2134 Register result = ToRegister(instr->result()); 2088 Register result = ToRegister(instr->result());
2135 ASSERT(result.is(r0)); 2089 ASSERT(result.is(r0));
2136 2090
2137 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; 2091 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2138 flags = static_cast<InstanceofStub::Flags>( 2092 flags = static_cast<InstanceofStub::Flags>(
2139 flags | InstanceofStub::kArgsInRegisters); 2093 flags | InstanceofStub::kArgsInRegisters);
2140 flags = static_cast<InstanceofStub::Flags>( 2094 flags = static_cast<InstanceofStub::Flags>(
2141 flags | InstanceofStub::kCallSiteInlineCheck); 2095 flags | InstanceofStub::kCallSiteInlineCheck);
2142 flags = static_cast<InstanceofStub::Flags>( 2096 flags = static_cast<InstanceofStub::Flags>(
2143 flags | InstanceofStub::kReturnTrueFalseObject); 2097 flags | InstanceofStub::kReturnTrueFalseObject);
(...skipping 11 matching lines...) Expand all
2155 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; 2109 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2156 Label before_push_delta; 2110 Label before_push_delta;
2157 __ bind(&before_push_delta); 2111 __ bind(&before_push_delta);
2158 __ BlockConstPoolFor(kAdditionalDelta); 2112 __ BlockConstPoolFor(kAdditionalDelta);
2159 __ mov(temp, Operand(delta * kPointerSize)); 2113 __ mov(temp, Operand(delta * kPointerSize));
2160 __ StoreToSafepointRegisterSlot(temp, temp); 2114 __ StoreToSafepointRegisterSlot(temp, temp);
2161 CallCodeGeneric(stub.GetCode(), 2115 CallCodeGeneric(stub.GetCode(),
2162 RelocInfo::CODE_TARGET, 2116 RelocInfo::CODE_TARGET,
2163 instr, 2117 instr,
2164 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); 2118 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
2119 ASSERT(instr->HasDeoptimizationEnvironment());
2120 LEnvironment* env = instr->deoptimization_environment();
2121 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2165 // Put the result value into the result register slot and 2122 // Put the result value into the result register slot and
2166 // restore all registers. 2123 // restore all registers.
2167 __ StoreToSafepointRegisterSlot(result, result); 2124 __ StoreToSafepointRegisterSlot(result, result);
2168 } 2125 }
2169 2126
2170 2127
2171 static Condition ComputeCompareCondition(Token::Value op) { 2128 static Condition ComputeCompareCondition(Token::Value op) {
2172 switch (op) { 2129 switch (op) {
2173 case Token::EQ_STRICT: 2130 case Token::EQ_STRICT:
2174 case Token::EQ: 2131 case Token::EQ:
(...skipping 596 matching lines...) Expand 10 before | Expand all | Expand 10 after
2771 __ b(eq, &invoke); 2728 __ b(eq, &invoke);
2772 __ bind(&loop); 2729 __ bind(&loop);
2773 __ ldr(scratch, MemOperand(elements, length, LSL, 2)); 2730 __ ldr(scratch, MemOperand(elements, length, LSL, 2));
2774 __ push(scratch); 2731 __ push(scratch);
2775 __ sub(length, length, Operand(1), SetCC); 2732 __ sub(length, length, Operand(1), SetCC);
2776 __ b(ne, &loop); 2733 __ b(ne, &loop);
2777 2734
2778 __ bind(&invoke); 2735 __ bind(&invoke);
2779 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); 2736 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2780 LPointerMap* pointers = instr->pointer_map(); 2737 LPointerMap* pointers = instr->pointer_map();
2781 LEnvironment* env = instr->deoptimization_environment();
2782 RecordPosition(pointers->position()); 2738 RecordPosition(pointers->position());
2783 RegisterEnvironmentForDeoptimization(env); 2739 SafepointGenerator safepoint_generator(
2784 SafepointGenerator safepoint_generator(this, 2740 this, pointers, Safepoint::kLazyDeopt);
2785 pointers,
2786 env->deoptimization_index());
2787 // The number of arguments is stored in receiver which is r0, as expected 2741 // The number of arguments is stored in receiver which is r0, as expected
2788 // by InvokeFunction. 2742 // by InvokeFunction.
2789 v8::internal::ParameterCount actual(receiver); 2743 v8::internal::ParameterCount actual(receiver);
2790 __ InvokeFunction(function, actual, CALL_FUNCTION, 2744 __ InvokeFunction(function, actual, CALL_FUNCTION,
2791 safepoint_generator, CALL_AS_METHOD); 2745 safepoint_generator, CALL_AS_METHOD);
2792 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2746 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2793 } 2747 }
2794 2748
2795 2749
2796 void LCodeGen::DoPushArgument(LPushArgument* instr) { 2750 void LCodeGen::DoPushArgument(LPushArgument* instr) {
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
2858 2812
2859 LPointerMap* pointers = instr->pointer_map(); 2813 LPointerMap* pointers = instr->pointer_map();
2860 RecordPosition(pointers->position()); 2814 RecordPosition(pointers->position());
2861 2815
2862 // Invoke function. 2816 // Invoke function.
2863 __ SetCallKind(r5, call_kind); 2817 __ SetCallKind(r5, call_kind);
2864 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); 2818 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2865 __ Call(ip); 2819 __ Call(ip);
2866 2820
2867 // Setup deoptimization. 2821 // Setup deoptimization.
2868 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); 2822 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
2869 2823
2870 // Restore context. 2824 // Restore context.
2871 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2825 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2872 } 2826 }
2873 2827
2874 2828
2875 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { 2829 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2876 ASSERT(ToRegister(instr->result()).is(r0)); 2830 ASSERT(ToRegister(instr->result()).is(r0));
2877 __ mov(r1, Operand(instr->function())); 2831 __ mov(r1, Operand(instr->function()));
2878 CallKnownFunction(instr->function(), 2832 CallKnownFunction(instr->function(),
(...skipping 344 matching lines...) Expand 10 before | Expand all | Expand 10 after
3223 UNREACHABLE(); 3177 UNREACHABLE();
3224 } 3178 }
3225 } 3179 }
3226 3180
3227 3181
3228 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { 3182 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3229 ASSERT(ToRegister(instr->function()).is(r1)); 3183 ASSERT(ToRegister(instr->function()).is(r1));
3230 ASSERT(instr->HasPointerMap()); 3184 ASSERT(instr->HasPointerMap());
3231 ASSERT(instr->HasDeoptimizationEnvironment()); 3185 ASSERT(instr->HasDeoptimizationEnvironment());
3232 LPointerMap* pointers = instr->pointer_map(); 3186 LPointerMap* pointers = instr->pointer_map();
3233 LEnvironment* env = instr->deoptimization_environment();
3234 RecordPosition(pointers->position()); 3187 RecordPosition(pointers->position());
3235 RegisterEnvironmentForDeoptimization(env); 3188 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3236 SafepointGenerator generator(this, pointers, env->deoptimization_index());
3237 ParameterCount count(instr->arity()); 3189 ParameterCount count(instr->arity());
3238 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD); 3190 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
3239 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3191 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3240 } 3192 }
3241 3193
3242 3194
3243 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { 3195 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
3244 ASSERT(ToRegister(instr->result()).is(r0)); 3196 ASSERT(ToRegister(instr->result()).is(r0));
3245 3197
3246 int arity = instr->arity(); 3198 int arity = instr->arity();
(...skipping 1306 matching lines...) Expand 10 before | Expand all | Expand 10 after
4553 __ b(ne, &check_frame_marker); 4505 __ b(ne, &check_frame_marker);
4554 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset)); 4506 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
4555 4507
4556 // Check the marker in the calling frame. 4508 // Check the marker in the calling frame.
4557 __ bind(&check_frame_marker); 4509 __ bind(&check_frame_marker);
4558 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset)); 4510 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
4559 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); 4511 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
4560 } 4512 }
4561 4513
4562 4514
4515 void LCodeGen::EnsureSpaceForLazyDeopt() {
4516 // Ensure that we have enough space after the previous lazy-bailout
4517 // instruction for patching the code here.
4518 int current_pc = masm()->pc_offset();
4519 int patch_size = Deoptimizer::patch_size();
4520 if (current_pc < last_lazy_deopt_pc_ + patch_size) {
4521 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
4522 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
4523 while (padding_size > 0) {
4524 __ nop();
4525 padding_size -= Assembler::kInstrSize;
4526 }
4527 }
4528 last_lazy_deopt_pc_ = current_pc;
4529 }
4530
4531
4563 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { 4532 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
4564 // No code for lazy bailout instruction. Used to capture environment after a 4533 EnsureSpaceForLazyDeopt();
4565 // call for populating the safepoint data with deoptimization data. 4534 ASSERT(instr->HasEnvironment());
4535 LEnvironment* env = instr->environment();
4536 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4537 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
4566 } 4538 }
4567 4539
4568 4540
4569 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { 4541 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
4570 DeoptimizeIf(al, instr->environment()); 4542 DeoptimizeIf(al, instr->environment());
4571 } 4543 }
4572 4544
4573 4545
4574 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { 4546 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
4575 Register object = ToRegister(instr->object()); 4547 Register object = ToRegister(instr->object());
4576 Register key = ToRegister(instr->key()); 4548 Register key = ToRegister(instr->key());
4577 Register strict = scratch0(); 4549 Register strict = scratch0();
4578 __ mov(strict, Operand(Smi::FromInt(strict_mode_flag()))); 4550 __ mov(strict, Operand(Smi::FromInt(strict_mode_flag())));
4579 __ Push(object, key, strict); 4551 __ Push(object, key, strict);
4580 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); 4552 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4581 LPointerMap* pointers = instr->pointer_map(); 4553 LPointerMap* pointers = instr->pointer_map();
4582 LEnvironment* env = instr->deoptimization_environment();
4583 RecordPosition(pointers->position()); 4554 RecordPosition(pointers->position());
4584 RegisterEnvironmentForDeoptimization(env); 4555 SafepointGenerator safepoint_generator(
4585 SafepointGenerator safepoint_generator(this, 4556 this, pointers, Safepoint::kLazyDeopt);
4586 pointers,
4587 env->deoptimization_index());
4588 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator); 4557 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
4589 } 4558 }
4590 4559
4591 4560
4592 void LCodeGen::DoIn(LIn* instr) { 4561 void LCodeGen::DoIn(LIn* instr) {
4593 Register obj = ToRegister(instr->object()); 4562 Register obj = ToRegister(instr->object());
4594 Register key = ToRegister(instr->key()); 4563 Register key = ToRegister(instr->key());
4595 __ Push(key, obj); 4564 __ Push(key, obj);
4596 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); 4565 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4597 LPointerMap* pointers = instr->pointer_map(); 4566 LPointerMap* pointers = instr->pointer_map();
4598 LEnvironment* env = instr->deoptimization_environment();
4599 RecordPosition(pointers->position()); 4567 RecordPosition(pointers->position());
4600 RegisterEnvironmentForDeoptimization(env); 4568 SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt);
4601 SafepointGenerator safepoint_generator(this,
4602 pointers,
4603 env->deoptimization_index());
4604 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); 4569 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
4605 } 4570 }
4606 4571
4607 4572
4608 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { 4573 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
4609 { 4574 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4610 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 4575 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
4611 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); 4576 RecordSafepointWithLazyDeopt(
4612 RegisterLazyDeoptimization( 4577 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
4613 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); 4578 ASSERT(instr->HasEnvironment());
4614 } 4579 LEnvironment* env = instr->environment();
4615 4580 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
4616 // The gap code includes the restoring of the safepoint registers.
4617 int pc = masm()->pc_offset();
4618 safepoints_.SetPcAfterGap(pc);
4619 } 4581 }
4620 4582
4621 4583
4622 void LCodeGen::DoStackCheck(LStackCheck* instr) { 4584 void LCodeGen::DoStackCheck(LStackCheck* instr) {
4623 class DeferredStackCheck: public LDeferredCode { 4585 class DeferredStackCheck: public LDeferredCode {
4624 public: 4586 public:
4625 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) 4587 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4626 : LDeferredCode(codegen), instr_(instr) { } 4588 : LDeferredCode(codegen), instr_(instr) { }
4627 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } 4589 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4628 virtual LInstruction* instr() { return instr_; } 4590 virtual LInstruction* instr() { return instr_; }
4629 private: 4591 private:
4630 LStackCheck* instr_; 4592 LStackCheck* instr_;
4631 }; 4593 };
4632 4594
4595 ASSERT(instr->HasEnvironment());
4596 LEnvironment* env = instr->environment();
4597 // There is no LLazyBailout instruction for stack-checks. We have to
4598 // prepare for lazy deoptimization explicitly here.
4633 if (instr->hydrogen()->is_function_entry()) { 4599 if (instr->hydrogen()->is_function_entry()) {
4634 // Perform stack overflow check. 4600 // Perform stack overflow check.
4635 Label done; 4601 Label done;
4636 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 4602 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4637 __ cmp(sp, Operand(ip)); 4603 __ cmp(sp, Operand(ip));
4638 __ b(hs, &done); 4604 __ b(hs, &done);
4639 StackCheckStub stub; 4605 StackCheckStub stub;
4640 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 4606 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4607 EnsureSpaceForLazyDeopt();
4641 __ bind(&done); 4608 __ bind(&done);
4609 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4610 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
4642 } else { 4611 } else {
4643 ASSERT(instr->hydrogen()->is_backwards_branch()); 4612 ASSERT(instr->hydrogen()->is_backwards_branch());
4644 // Perform stack overflow check if this goto needs it before jumping. 4613 // Perform stack overflow check if this goto needs it before jumping.
4645 DeferredStackCheck* deferred_stack_check = 4614 DeferredStackCheck* deferred_stack_check =
4646 new DeferredStackCheck(this, instr); 4615 new DeferredStackCheck(this, instr);
4647 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 4616 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4648 __ cmp(sp, Operand(ip)); 4617 __ cmp(sp, Operand(ip));
4649 __ b(lo, deferred_stack_check->entry()); 4618 __ b(lo, deferred_stack_check->entry());
4619 EnsureSpaceForLazyDeopt();
4650 __ bind(instr->done_label()); 4620 __ bind(instr->done_label());
4651 deferred_stack_check->SetExit(instr->done_label()); 4621 deferred_stack_check->SetExit(instr->done_label());
4622 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4623 // Don't record a deoptimization index for the safepoint here.
4624 // This will be done explicitly when emitting call and the safepoint in
4625 // the deferred code.
4652 } 4626 }
4653 } 4627 }
4654 4628
4655 4629
4656 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { 4630 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
4657 // This is a pseudo-instruction that ensures that the environment here is 4631 // This is a pseudo-instruction that ensures that the environment here is
4658 // properly registered for deoptimization and records the assembler's PC 4632 // properly registered for deoptimization and records the assembler's PC
4659 // offset. 4633 // offset.
4660 LEnvironment* environment = instr->environment(); 4634 LEnvironment* environment = instr->environment();
4661 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), 4635 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4662 instr->SpilledDoubleRegisterArray()); 4636 instr->SpilledDoubleRegisterArray());
4663 4637
4664 // If the environment were already registered, we would have no way of 4638 // If the environment were already registered, we would have no way of
4665 // backpatching it with the spill slot operands. 4639 // backpatching it with the spill slot operands.
4666 ASSERT(!environment->HasBeenRegistered()); 4640 ASSERT(!environment->HasBeenRegistered());
4667 RegisterEnvironmentForDeoptimization(environment); 4641 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
4668 ASSERT(osr_pc_offset_ == -1); 4642 ASSERT(osr_pc_offset_ == -1);
4669 osr_pc_offset_ = masm()->pc_offset(); 4643 osr_pc_offset_ = masm()->pc_offset();
4670 } 4644 }
4671 4645
4672 4646
4673 4647
4674 4648
4675 #undef __ 4649 #undef __
4676 4650
4677 } } // namespace v8::internal 4651 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/deoptimizer.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698