Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(225)

Side by Side Diff: src/arm/lithium-codegen-arm.cc

Issue 9215010: Merge r10006, r10087 and r10126 from bleeding edge to the 3.6 branch. (Closed) Base URL: http://v8.googlecode.com/svn/branches/3.6/
Patch Set: Created 8 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/deoptimizer.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 22 matching lines...) Expand all
33 #include "stub-cache.h" 33 #include "stub-cache.h"
34 34
35 namespace v8 { 35 namespace v8 {
36 namespace internal { 36 namespace internal {
37 37
38 38
39 class SafepointGenerator : public CallWrapper { 39 class SafepointGenerator : public CallWrapper {
40 public: 40 public:
41 SafepointGenerator(LCodeGen* codegen, 41 SafepointGenerator(LCodeGen* codegen,
42 LPointerMap* pointers, 42 LPointerMap* pointers,
43 int deoptimization_index) 43 Safepoint::DeoptMode mode)
44 : codegen_(codegen), 44 : codegen_(codegen),
45 pointers_(pointers), 45 pointers_(pointers),
46 deoptimization_index_(deoptimization_index) { } 46 deopt_mode_(mode) { }
47 virtual ~SafepointGenerator() { } 47 virtual ~SafepointGenerator() { }
48 48
49 virtual void BeforeCall(int call_size) const { 49 virtual void BeforeCall(int call_size) const { }
50 ASSERT(call_size >= 0);
51 // Ensure that we have enough space after the previous safepoint position
52 // for the generated code there.
53 int call_end = codegen_->masm()->pc_offset() + call_size;
54 int prev_jump_end =
55 codegen_->LastSafepointEnd() + Deoptimizer::patch_size();
56 if (call_end < prev_jump_end) {
57 int padding_size = prev_jump_end - call_end;
58 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
59 while (padding_size > 0) {
60 codegen_->masm()->nop();
61 padding_size -= Assembler::kInstrSize;
62 }
63 }
64 }
65 50
66 virtual void AfterCall() const { 51 virtual void AfterCall() const {
67 codegen_->RecordSafepoint(pointers_, deoptimization_index_); 52 codegen_->RecordSafepoint(pointers_, deopt_mode_);
68 } 53 }
69 54
70 private: 55 private:
71 LCodeGen* codegen_; 56 LCodeGen* codegen_;
72 LPointerMap* pointers_; 57 LPointerMap* pointers_;
73 int deoptimization_index_; 58 Safepoint::DeoptMode deopt_mode_;
74 }; 59 };
75 60
76 61
77 #define __ masm()-> 62 #define __ masm()->
78 63
79 bool LCodeGen::GenerateCode() { 64 bool LCodeGen::GenerateCode() {
80 HPhase phase("Code generation", chunk()); 65 HPhase phase("Code generation", chunk());
81 ASSERT(is_unused()); 66 ASSERT(is_unused());
82 status_ = GENERATING; 67 status_ = GENERATING;
83 CpuFeatures::Scope scope1(VFP3); 68 CpuFeatures::Scope scope1(VFP3);
84 CpuFeatures::Scope scope2(ARMv7); 69 CpuFeatures::Scope scope2(ARMv7);
85 return GeneratePrologue() && 70 return GeneratePrologue() &&
86 GenerateBody() && 71 GenerateBody() &&
87 GenerateDeferredCode() && 72 GenerateDeferredCode() &&
88 GenerateDeoptJumpTable() && 73 GenerateDeoptJumpTable() &&
89 GenerateSafepointTable(); 74 GenerateSafepointTable();
90 } 75 }
91 76
92 77
93 void LCodeGen::FinishCode(Handle<Code> code) { 78 void LCodeGen::FinishCode(Handle<Code> code) {
94 ASSERT(is_done()); 79 ASSERT(is_done());
95 code->set_stack_slots(GetStackSlotCount()); 80 code->set_stack_slots(GetStackSlotCount());
96 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); 81 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
97 PopulateDeoptimizationData(code); 82 PopulateDeoptimizationData(code);
98 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
99 } 83 }
100 84
101 85
102 void LCodeGen::Abort(const char* format, ...) { 86 void LCodeGen::Abort(const char* format, ...) {
103 if (FLAG_trace_bailout) { 87 if (FLAG_trace_bailout) {
104 SmartArrayPointer<char> name( 88 SmartArrayPointer<char> name(
105 info()->shared_info()->DebugName()->ToCString()); 89 info()->shared_info()->DebugName()->ToCString());
106 PrintF("Aborting LCodeGen in @\"%s\": ", *name); 90 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
107 va_list arguments; 91 va_list arguments;
108 va_start(arguments, format); 92 va_start(arguments, format);
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
185 if (heap_slots > 0) { 169 if (heap_slots > 0) {
186 Comment(";;; Allocate local context"); 170 Comment(";;; Allocate local context");
187 // Argument to NewContext is the function, which is in r1. 171 // Argument to NewContext is the function, which is in r1.
188 __ push(r1); 172 __ push(r1);
189 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 173 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
190 FastNewContextStub stub(heap_slots); 174 FastNewContextStub stub(heap_slots);
191 __ CallStub(&stub); 175 __ CallStub(&stub);
192 } else { 176 } else {
193 __ CallRuntime(Runtime::kNewFunctionContext, 1); 177 __ CallRuntime(Runtime::kNewFunctionContext, 1);
194 } 178 }
195 RecordSafepoint(Safepoint::kNoDeoptimizationIndex); 179 RecordSafepoint(Safepoint::kNoLazyDeopt);
196 // Context is returned in both r0 and cp. It replaces the context 180 // Context is returned in both r0 and cp. It replaces the context
197 // passed to us. It's saved in the stack and kept live in cp. 181 // passed to us. It's saved in the stack and kept live in cp.
198 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 182 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
199 // Copy any necessary parameters into the context. 183 // Copy any necessary parameters into the context.
200 int num_parameters = scope()->num_parameters(); 184 int num_parameters = scope()->num_parameters();
201 for (int i = 0; i < num_parameters; i++) { 185 for (int i = 0; i < num_parameters; i++) {
202 Variable* var = scope()->parameter(i); 186 Variable* var = scope()->parameter(i);
203 if (var->IsContextSlot()) { 187 if (var->IsContextSlot()) {
204 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 188 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
205 (num_parameters - 1 - i) * kPointerSize; 189 (num_parameters - 1 - i) * kPointerSize;
(...skipping 30 matching lines...) Expand all
236 if (instr->IsLabel()) { 220 if (instr->IsLabel()) {
237 LLabel* label = LLabel::cast(instr); 221 LLabel* label = LLabel::cast(instr);
238 emit_instructions = !label->HasReplacement(); 222 emit_instructions = !label->HasReplacement();
239 } 223 }
240 224
241 if (emit_instructions) { 225 if (emit_instructions) {
242 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); 226 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
243 instr->CompileToNative(this); 227 instr->CompileToNative(this);
244 } 228 }
245 } 229 }
230 EnsureSpaceForLazyDeopt();
246 return !is_aborted(); 231 return !is_aborted();
247 } 232 }
248 233
249 234
250 LInstruction* LCodeGen::GetNextInstruction() {
251 if (current_instruction_ < instructions_->length() - 1) {
252 return instructions_->at(current_instruction_ + 1);
253 } else {
254 return NULL;
255 }
256 }
257
258
259 bool LCodeGen::GenerateDeferredCode() { 235 bool LCodeGen::GenerateDeferredCode() {
260 ASSERT(is_generating()); 236 ASSERT(is_generating());
261 if (deferred_.length() > 0) { 237 if (deferred_.length() > 0) {
262 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { 238 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
263 LDeferredCode* code = deferred_[i]; 239 LDeferredCode* code = deferred_[i];
264 __ bind(code->entry()); 240 __ bind(code->entry());
265 code->Generate(); 241 code->Generate();
266 __ jmp(code->exit()); 242 __ jmp(code->exit());
267 } 243 }
268
269 // Pad code to ensure that the last piece of deferred code have
270 // room for lazy bailout.
271 while ((masm()->pc_offset() - LastSafepointEnd())
272 < Deoptimizer::patch_size()) {
273 __ nop();
274 }
275 } 244 }
276 245
277 // Force constant pool emission at the end of the deferred code to make 246 // Force constant pool emission at the end of the deferred code to make
278 // sure that no constant pools are emitted after. 247 // sure that no constant pools are emitted after.
279 masm()->CheckConstPool(true, false); 248 masm()->CheckConstPool(true, false);
280 249
281 return !is_aborted(); 250 return !is_aborted();
282 } 251 }
283 252
284 253
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after
544 513
545 514
546 void LCodeGen::CallCodeGeneric(Handle<Code> code, 515 void LCodeGen::CallCodeGeneric(Handle<Code> code,
547 RelocInfo::Mode mode, 516 RelocInfo::Mode mode,
548 LInstruction* instr, 517 LInstruction* instr,
549 SafepointMode safepoint_mode) { 518 SafepointMode safepoint_mode) {
550 ASSERT(instr != NULL); 519 ASSERT(instr != NULL);
551 LPointerMap* pointers = instr->pointer_map(); 520 LPointerMap* pointers = instr->pointer_map();
552 RecordPosition(pointers->position()); 521 RecordPosition(pointers->position());
553 __ Call(code, mode); 522 __ Call(code, mode);
554 RegisterLazyDeoptimization(instr, safepoint_mode); 523 RecordSafepointWithLazyDeopt(instr, safepoint_mode);
555 524
556 // Signal that we don't inline smi code before these stubs in the 525 // Signal that we don't inline smi code before these stubs in the
557 // optimizing code generator. 526 // optimizing code generator.
558 if (code->kind() == Code::BINARY_OP_IC || 527 if (code->kind() == Code::BINARY_OP_IC ||
559 code->kind() == Code::COMPARE_IC) { 528 code->kind() == Code::COMPARE_IC) {
560 __ nop(); 529 __ nop();
561 } 530 }
562 } 531 }
563 532
564 533
565 void LCodeGen::CallRuntime(const Runtime::Function* function, 534 void LCodeGen::CallRuntime(const Runtime::Function* function,
566 int num_arguments, 535 int num_arguments,
567 LInstruction* instr) { 536 LInstruction* instr) {
568 ASSERT(instr != NULL); 537 ASSERT(instr != NULL);
569 LPointerMap* pointers = instr->pointer_map(); 538 LPointerMap* pointers = instr->pointer_map();
570 ASSERT(pointers != NULL); 539 ASSERT(pointers != NULL);
571 RecordPosition(pointers->position()); 540 RecordPosition(pointers->position());
572 541
573 __ CallRuntime(function, num_arguments); 542 __ CallRuntime(function, num_arguments);
574 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); 543 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
575 } 544 }
576 545
577 546
578 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, 547 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
579 int argc, 548 int argc,
580 LInstruction* instr) { 549 LInstruction* instr) {
581 __ CallRuntimeSaveDoubles(id); 550 __ CallRuntimeSaveDoubles(id);
582 RecordSafepointWithRegisters( 551 RecordSafepointWithRegisters(
583 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex); 552 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
584 } 553 }
585 554
586 555
587 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr, 556 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
588 SafepointMode safepoint_mode) { 557 Safepoint::DeoptMode mode) {
589 // Create the environment to bailout to. If the call has side effects
590 // execution has to continue after the call otherwise execution can continue
591 // from a previous bailout point repeating the call.
592 LEnvironment* deoptimization_environment;
593 if (instr->HasDeoptimizationEnvironment()) {
594 deoptimization_environment = instr->deoptimization_environment();
595 } else {
596 deoptimization_environment = instr->environment();
597 }
598
599 RegisterEnvironmentForDeoptimization(deoptimization_environment);
600 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
601 RecordSafepoint(instr->pointer_map(),
602 deoptimization_environment->deoptimization_index());
603 } else {
604 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
605 RecordSafepointWithRegisters(
606 instr->pointer_map(),
607 0,
608 deoptimization_environment->deoptimization_index());
609 }
610 }
611
612
613 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
614 if (!environment->HasBeenRegistered()) { 558 if (!environment->HasBeenRegistered()) {
615 // Physical stack frame layout: 559 // Physical stack frame layout:
616 // -x ............. -4 0 ..................................... y 560 // -x ............. -4 0 ..................................... y
617 // [incoming arguments] [spill slots] [pushed outgoing arguments] 561 // [incoming arguments] [spill slots] [pushed outgoing arguments]
618 562
619 // Layout of the environment: 563 // Layout of the environment:
620 // 0 ..................................................... size-1 564 // 0 ..................................................... size-1
621 // [parameters] [locals] [expression stack including arguments] 565 // [parameters] [locals] [expression stack including arguments]
622 566
623 // Layout of the translation: 567 // Layout of the translation:
624 // 0 ........................................................ size - 1 + 4 568 // 0 ........................................................ size - 1 + 4
625 // [expression stack including arguments] [locals] [4 words] [parameters] 569 // [expression stack including arguments] [locals] [4 words] [parameters]
626 // |>------------ translation_size ------------<| 570 // |>------------ translation_size ------------<|
627 571
628 int frame_count = 0; 572 int frame_count = 0;
629 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { 573 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
630 ++frame_count; 574 ++frame_count;
631 } 575 }
632 Translation translation(&translations_, frame_count); 576 Translation translation(&translations_, frame_count);
633 WriteTranslation(environment, &translation); 577 WriteTranslation(environment, &translation);
634 int deoptimization_index = deoptimizations_.length(); 578 int deoptimization_index = deoptimizations_.length();
635 environment->Register(deoptimization_index, translation.index()); 579 int pc_offset = masm()->pc_offset();
580 environment->Register(deoptimization_index,
581 translation.index(),
582 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
636 deoptimizations_.Add(environment); 583 deoptimizations_.Add(environment);
637 } 584 }
638 } 585 }
639 586
640 587
641 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { 588 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
642 RegisterEnvironmentForDeoptimization(environment); 589 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
643 ASSERT(environment->HasBeenRegistered()); 590 ASSERT(environment->HasBeenRegistered());
644 int id = environment->deoptimization_index(); 591 int id = environment->deoptimization_index();
645 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); 592 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
646 ASSERT(entry != NULL); 593 ASSERT(entry != NULL);
647 if (entry == NULL) { 594 if (entry == NULL) {
648 Abort("bailout was not prepared"); 595 Abort("bailout was not prepared");
649 return; 596 return;
650 } 597 }
651 598
652 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM. 599 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM.
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
694 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); 641 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
695 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_)); 642 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
696 643
697 // Populate the deoptimization entries. 644 // Populate the deoptimization entries.
698 for (int i = 0; i < length; i++) { 645 for (int i = 0; i < length; i++) {
699 LEnvironment* env = deoptimizations_[i]; 646 LEnvironment* env = deoptimizations_[i];
700 data->SetAstId(i, Smi::FromInt(env->ast_id())); 647 data->SetAstId(i, Smi::FromInt(env->ast_id()));
701 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index())); 648 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
702 data->SetArgumentsStackHeight(i, 649 data->SetArgumentsStackHeight(i,
703 Smi::FromInt(env->arguments_stack_height())); 650 Smi::FromInt(env->arguments_stack_height()));
651 data->SetPc(i, Smi::FromInt(env->pc_offset()));
704 } 652 }
705 code->set_deoptimization_data(*data); 653 code->set_deoptimization_data(*data);
706 } 654 }
707 655
708 656
709 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) { 657 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
710 int result = deoptimization_literals_.length(); 658 int result = deoptimization_literals_.length();
711 for (int i = 0; i < deoptimization_literals_.length(); ++i) { 659 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
712 if (deoptimization_literals_[i].is_identical_to(literal)) return i; 660 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
713 } 661 }
(...skipping 11 matching lines...) Expand all
725 for (int i = 0, length = inlined_closures->length(); 673 for (int i = 0, length = inlined_closures->length();
726 i < length; 674 i < length;
727 i++) { 675 i++) {
728 DefineDeoptimizationLiteral(inlined_closures->at(i)); 676 DefineDeoptimizationLiteral(inlined_closures->at(i));
729 } 677 }
730 678
731 inlined_function_count_ = deoptimization_literals_.length(); 679 inlined_function_count_ = deoptimization_literals_.length();
732 } 680 }
733 681
734 682
683 void LCodeGen::RecordSafepointWithLazyDeopt(
684 LInstruction* instr, SafepointMode safepoint_mode) {
685 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
686 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
687 } else {
688 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
689 RecordSafepointWithRegisters(
690 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
691 }
692 }
693
694
735 void LCodeGen::RecordSafepoint( 695 void LCodeGen::RecordSafepoint(
736 LPointerMap* pointers, 696 LPointerMap* pointers,
737 Safepoint::Kind kind, 697 Safepoint::Kind kind,
738 int arguments, 698 int arguments,
739 int deoptimization_index) { 699 Safepoint::DeoptMode deopt_mode) {
740 ASSERT(expected_safepoint_kind_ == kind); 700 ASSERT(expected_safepoint_kind_ == kind);
741 701
742 const ZoneList<LOperand*>* operands = pointers->operands(); 702 const ZoneList<LOperand*>* operands = pointers->operands();
743 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), 703 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
744 kind, arguments, deoptimization_index); 704 kind, arguments, deopt_mode);
745 for (int i = 0; i < operands->length(); i++) { 705 for (int i = 0; i < operands->length(); i++) {
746 LOperand* pointer = operands->at(i); 706 LOperand* pointer = operands->at(i);
747 if (pointer->IsStackSlot()) { 707 if (pointer->IsStackSlot()) {
748 safepoint.DefinePointerSlot(pointer->index()); 708 safepoint.DefinePointerSlot(pointer->index());
749 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { 709 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
750 safepoint.DefinePointerRegister(ToRegister(pointer)); 710 safepoint.DefinePointerRegister(ToRegister(pointer));
751 } 711 }
752 } 712 }
753 if (kind & Safepoint::kWithRegisters) { 713 if (kind & Safepoint::kWithRegisters) {
754 // Register cp always contains a pointer to the context. 714 // Register cp always contains a pointer to the context.
755 safepoint.DefinePointerRegister(cp); 715 safepoint.DefinePointerRegister(cp);
756 } 716 }
757 } 717 }
758 718
759 719
760 void LCodeGen::RecordSafepoint(LPointerMap* pointers, 720 void LCodeGen::RecordSafepoint(LPointerMap* pointers,
761 int deoptimization_index) { 721 Safepoint::DeoptMode deopt_mode) {
762 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index); 722 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
763 } 723 }
764 724
765 725
766 void LCodeGen::RecordSafepoint(int deoptimization_index) { 726 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
767 LPointerMap empty_pointers(RelocInfo::kNoPosition); 727 LPointerMap empty_pointers(RelocInfo::kNoPosition);
768 RecordSafepoint(&empty_pointers, deoptimization_index); 728 RecordSafepoint(&empty_pointers, deopt_mode);
769 } 729 }
770 730
771 731
772 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, 732 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
773 int arguments, 733 int arguments,
774 int deoptimization_index) { 734 Safepoint::DeoptMode deopt_mode) {
775 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, 735 RecordSafepoint(
776 deoptimization_index); 736 pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
777 } 737 }
778 738
779 739
780 void LCodeGen::RecordSafepointWithRegistersAndDoubles( 740 void LCodeGen::RecordSafepointWithRegistersAndDoubles(
781 LPointerMap* pointers, 741 LPointerMap* pointers,
782 int arguments, 742 int arguments,
783 int deoptimization_index) { 743 Safepoint::DeoptMode deopt_mode) {
784 RecordSafepoint(pointers, Safepoint::kWithRegistersAndDoubles, arguments, 744 RecordSafepoint(
785 deoptimization_index); 745 pointers, Safepoint::kWithRegistersAndDoubles, arguments, deopt_mode);
786 } 746 }
787 747
788 748
789 void LCodeGen::RecordPosition(int position) { 749 void LCodeGen::RecordPosition(int position) {
790 if (position == RelocInfo::kNoPosition) return; 750 if (position == RelocInfo::kNoPosition) return;
791 masm()->positions_recorder()->RecordPosition(position); 751 masm()->positions_recorder()->RecordPosition(position);
792 } 752 }
793 753
794 754
795 void LCodeGen::DoLabel(LLabel* label) { 755 void LCodeGen::DoLabel(LLabel* label) {
(...skipping 14 matching lines...) Expand all
810 770
811 771
812 void LCodeGen::DoGap(LGap* gap) { 772 void LCodeGen::DoGap(LGap* gap) {
813 for (int i = LGap::FIRST_INNER_POSITION; 773 for (int i = LGap::FIRST_INNER_POSITION;
814 i <= LGap::LAST_INNER_POSITION; 774 i <= LGap::LAST_INNER_POSITION;
815 i++) { 775 i++) {
816 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); 776 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
817 LParallelMove* move = gap->GetParallelMove(inner_pos); 777 LParallelMove* move = gap->GetParallelMove(inner_pos);
818 if (move != NULL) DoParallelMove(move); 778 if (move != NULL) DoParallelMove(move);
819 } 779 }
820
821 LInstruction* next = GetNextInstruction();
822 if (next != NULL && next->IsLazyBailout()) {
823 int pc = masm()->pc_offset();
824 safepoints_.SetPcAfterGap(pc);
825 }
826 } 780 }
827 781
828 782
829 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { 783 void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
830 DoGap(instr); 784 DoGap(instr);
831 } 785 }
832 786
833 787
834 void LCodeGen::DoParameter(LParameter* instr) { 788 void LCodeGen::DoParameter(LParameter* instr) {
835 // Nothing to do. 789 // Nothing to do.
(...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after
1122 __ mov(r0, right); 1076 __ mov(r0, right);
1123 } else { 1077 } else {
1124 ASSERT(!left.is(r0) && !right.is(r0)); 1078 ASSERT(!left.is(r0) && !right.is(r0));
1125 __ mov(r0, right); 1079 __ mov(r0, right);
1126 __ mov(r1, left); 1080 __ mov(r1, left);
1127 } 1081 }
1128 BinaryOpStub stub(op, OVERWRITE_LEFT); 1082 BinaryOpStub stub(op, OVERWRITE_LEFT);
1129 __ CallStub(&stub); 1083 __ CallStub(&stub);
1130 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(), 1084 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(),
1131 0, 1085 0,
1132 Safepoint::kNoDeoptimizationIndex); 1086 Safepoint::kNoLazyDeopt);
1133 // Overwrite the stored value of r0 with the result of the stub. 1087 // Overwrite the stored value of r0 with the result of the stub.
1134 __ StoreToSafepointRegistersAndDoublesSlot(r0, r0); 1088 __ StoreToSafepointRegistersAndDoublesSlot(r0, r0);
1135 } 1089 }
1136 1090
1137 1091
1138 void LCodeGen::DoMulI(LMulI* instr) { 1092 void LCodeGen::DoMulI(LMulI* instr) {
1139 Register scratch = scratch0(); 1093 Register scratch = scratch0();
1140 Register result = ToRegister(instr->result()); 1094 Register result = ToRegister(instr->result());
1141 // Note that result may alias left. 1095 // Note that result may alias left.
1142 Register left = ToRegister(instr->InputAt(0)); 1096 Register left = ToRegister(instr->InputAt(0));
(...skipping 864 matching lines...) Expand 10 before | Expand all | Expand 10 after
2007 } 1961 }
2008 1962
2009 1963
2010 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { 1964 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
2011 class DeferredInstanceOfKnownGlobal: public LDeferredCode { 1965 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
2012 public: 1966 public:
2013 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, 1967 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
2014 LInstanceOfKnownGlobal* instr) 1968 LInstanceOfKnownGlobal* instr)
2015 : LDeferredCode(codegen), instr_(instr) { } 1969 : LDeferredCode(codegen), instr_(instr) { }
2016 virtual void Generate() { 1970 virtual void Generate() {
2017 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_); 1971 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
2018 } 1972 }
2019 1973
2020 Label* map_check() { return &map_check_; } 1974 Label* map_check() { return &map_check_; }
2021 1975
2022 private: 1976 private:
2023 LInstanceOfKnownGlobal* instr_; 1977 LInstanceOfKnownGlobal* instr_;
2024 Label map_check_; 1978 Label map_check_;
2025 }; 1979 };
2026 1980
2027 DeferredInstanceOfKnownGlobal* deferred; 1981 DeferredInstanceOfKnownGlobal* deferred;
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
2075 __ bind(&false_result); 2029 __ bind(&false_result);
2076 __ LoadRoot(result, Heap::kFalseValueRootIndex); 2030 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2077 2031
2078 // Here result has either true or false. Deferred code also produces true or 2032 // Here result has either true or false. Deferred code also produces true or
2079 // false object. 2033 // false object.
2080 __ bind(deferred->exit()); 2034 __ bind(deferred->exit());
2081 __ bind(&done); 2035 __ bind(&done);
2082 } 2036 }
2083 2037
2084 2038
2085 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 2039 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2086 Label* map_check) { 2040 Label* map_check) {
2087 Register result = ToRegister(instr->result()); 2041 Register result = ToRegister(instr->result());
2088 ASSERT(result.is(r0)); 2042 ASSERT(result.is(r0));
2089 2043
2090 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; 2044 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2091 flags = static_cast<InstanceofStub::Flags>( 2045 flags = static_cast<InstanceofStub::Flags>(
2092 flags | InstanceofStub::kArgsInRegisters); 2046 flags | InstanceofStub::kArgsInRegisters);
2093 flags = static_cast<InstanceofStub::Flags>( 2047 flags = static_cast<InstanceofStub::Flags>(
2094 flags | InstanceofStub::kCallSiteInlineCheck); 2048 flags | InstanceofStub::kCallSiteInlineCheck);
2095 flags = static_cast<InstanceofStub::Flags>( 2049 flags = static_cast<InstanceofStub::Flags>(
2096 flags | InstanceofStub::kReturnTrueFalseObject); 2050 flags | InstanceofStub::kReturnTrueFalseObject);
(...skipping 11 matching lines...) Expand all
2108 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; 2062 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2109 Label before_push_delta; 2063 Label before_push_delta;
2110 __ bind(&before_push_delta); 2064 __ bind(&before_push_delta);
2111 __ BlockConstPoolFor(kAdditionalDelta); 2065 __ BlockConstPoolFor(kAdditionalDelta);
2112 __ mov(temp, Operand(delta * kPointerSize)); 2066 __ mov(temp, Operand(delta * kPointerSize));
2113 __ StoreToSafepointRegisterSlot(temp, temp); 2067 __ StoreToSafepointRegisterSlot(temp, temp);
2114 CallCodeGeneric(stub.GetCode(), 2068 CallCodeGeneric(stub.GetCode(),
2115 RelocInfo::CODE_TARGET, 2069 RelocInfo::CODE_TARGET,
2116 instr, 2070 instr,
2117 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); 2071 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
2072 ASSERT(instr->HasDeoptimizationEnvironment());
2073 LEnvironment* env = instr->deoptimization_environment();
2074 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2118 // Put the result value into the result register slot and 2075 // Put the result value into the result register slot and
2119 // restore all registers. 2076 // restore all registers.
2120 __ StoreToSafepointRegisterSlot(result, result); 2077 __ StoreToSafepointRegisterSlot(result, result);
2121 } 2078 }
2122 2079
2123 2080
2124 static Condition ComputeCompareCondition(Token::Value op) { 2081 static Condition ComputeCompareCondition(Token::Value op) {
2125 switch (op) { 2082 switch (op) {
2126 case Token::EQ_STRICT: 2083 case Token::EQ_STRICT:
2127 case Token::EQ: 2084 case Token::EQ:
(...skipping 577 matching lines...) Expand 10 before | Expand all | Expand 10 after
2705 __ b(eq, &invoke); 2662 __ b(eq, &invoke);
2706 __ bind(&loop); 2663 __ bind(&loop);
2707 __ ldr(scratch, MemOperand(elements, length, LSL, 2)); 2664 __ ldr(scratch, MemOperand(elements, length, LSL, 2));
2708 __ push(scratch); 2665 __ push(scratch);
2709 __ sub(length, length, Operand(1), SetCC); 2666 __ sub(length, length, Operand(1), SetCC);
2710 __ b(ne, &loop); 2667 __ b(ne, &loop);
2711 2668
2712 __ bind(&invoke); 2669 __ bind(&invoke);
2713 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); 2670 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2714 LPointerMap* pointers = instr->pointer_map(); 2671 LPointerMap* pointers = instr->pointer_map();
2715 LEnvironment* env = instr->deoptimization_environment();
2716 RecordPosition(pointers->position()); 2672 RecordPosition(pointers->position());
2717 RegisterEnvironmentForDeoptimization(env); 2673 SafepointGenerator safepoint_generator(
2718 SafepointGenerator safepoint_generator(this, 2674 this, pointers, Safepoint::kLazyDeopt);
2719 pointers,
2720 env->deoptimization_index());
2721 // The number of arguments is stored in receiver which is r0, as expected 2675 // The number of arguments is stored in receiver which is r0, as expected
2722 // by InvokeFunction. 2676 // by InvokeFunction.
2723 v8::internal::ParameterCount actual(receiver); 2677 v8::internal::ParameterCount actual(receiver);
2724 __ InvokeFunction(function, actual, CALL_FUNCTION, 2678 __ InvokeFunction(function, actual, CALL_FUNCTION,
2725 safepoint_generator, CALL_AS_METHOD); 2679 safepoint_generator, CALL_AS_METHOD);
2726 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2680 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2727 } 2681 }
2728 2682
2729 2683
2730 void LCodeGen::DoPushArgument(LPushArgument* instr) { 2684 void LCodeGen::DoPushArgument(LPushArgument* instr) {
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
2792 2746
2793 LPointerMap* pointers = instr->pointer_map(); 2747 LPointerMap* pointers = instr->pointer_map();
2794 RecordPosition(pointers->position()); 2748 RecordPosition(pointers->position());
2795 2749
2796 // Invoke function. 2750 // Invoke function.
2797 __ SetCallKind(r5, call_kind); 2751 __ SetCallKind(r5, call_kind);
2798 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); 2752 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2799 __ Call(ip); 2753 __ Call(ip);
2800 2754
2801 // Setup deoptimization. 2755 // Setup deoptimization.
2802 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); 2756 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
2803 2757
2804 // Restore context. 2758 // Restore context.
2805 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2759 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2806 } 2760 }
2807 2761
2808 2762
2809 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { 2763 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2810 ASSERT(ToRegister(instr->result()).is(r0)); 2764 ASSERT(ToRegister(instr->result()).is(r0));
2811 __ mov(r1, Operand(instr->function())); 2765 __ mov(r1, Operand(instr->function()));
2812 CallKnownFunction(instr->function(), 2766 CallKnownFunction(instr->function(),
(...skipping 343 matching lines...) Expand 10 before | Expand all | Expand 10 after
3156 UNREACHABLE(); 3110 UNREACHABLE();
3157 } 3111 }
3158 } 3112 }
3159 3113
3160 3114
3161 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { 3115 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3162 ASSERT(ToRegister(instr->function()).is(r1)); 3116 ASSERT(ToRegister(instr->function()).is(r1));
3163 ASSERT(instr->HasPointerMap()); 3117 ASSERT(instr->HasPointerMap());
3164 ASSERT(instr->HasDeoptimizationEnvironment()); 3118 ASSERT(instr->HasDeoptimizationEnvironment());
3165 LPointerMap* pointers = instr->pointer_map(); 3119 LPointerMap* pointers = instr->pointer_map();
3166 LEnvironment* env = instr->deoptimization_environment();
3167 RecordPosition(pointers->position()); 3120 RecordPosition(pointers->position());
3168 RegisterEnvironmentForDeoptimization(env); 3121 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3169 SafepointGenerator generator(this, pointers, env->deoptimization_index());
3170 ParameterCount count(instr->arity()); 3122 ParameterCount count(instr->arity());
3171 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD); 3123 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
3172 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3124 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3173 } 3125 }
3174 3126
3175 3127
3176 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { 3128 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
3177 ASSERT(ToRegister(instr->result()).is(r0)); 3129 ASSERT(ToRegister(instr->result()).is(r0));
3178 3130
3179 int arity = instr->arity(); 3131 int arity = instr->arity();
(...skipping 1216 matching lines...) Expand 10 before | Expand all | Expand 10 after
4396 __ b(ne, &check_frame_marker); 4348 __ b(ne, &check_frame_marker);
4397 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset)); 4349 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
4398 4350
4399 // Check the marker in the calling frame. 4351 // Check the marker in the calling frame.
4400 __ bind(&check_frame_marker); 4352 __ bind(&check_frame_marker);
4401 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset)); 4353 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
4402 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); 4354 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
4403 } 4355 }
4404 4356
4405 4357
4358 void LCodeGen::EnsureSpaceForLazyDeopt() {
4359 // Ensure that we have enough space after the previous lazy-bailout
4360 // instruction for patching the code here.
4361 int current_pc = masm()->pc_offset();
4362 int patch_size = Deoptimizer::patch_size();
4363 if (current_pc < last_lazy_deopt_pc_ + patch_size) {
4364 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
4365 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
4366 while (padding_size > 0) {
4367 __ nop();
4368 padding_size -= Assembler::kInstrSize;
4369 }
4370 }
4371 last_lazy_deopt_pc_ = masm()->pc_offset();
4372 }
4373
4374
4406 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { 4375 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
4407 // No code for lazy bailout instruction. Used to capture environment after a 4376 EnsureSpaceForLazyDeopt();
4408 // call for populating the safepoint data with deoptimization data. 4377 ASSERT(instr->HasEnvironment());
4378 LEnvironment* env = instr->environment();
4379 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4380 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
4409 } 4381 }
4410 4382
4411 4383
4412 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { 4384 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
4413 DeoptimizeIf(al, instr->environment()); 4385 DeoptimizeIf(al, instr->environment());
4414 } 4386 }
4415 4387
4416 4388
4417 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { 4389 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
4418 Register object = ToRegister(instr->object()); 4390 Register object = ToRegister(instr->object());
4419 Register key = ToRegister(instr->key()); 4391 Register key = ToRegister(instr->key());
4420 Register strict = scratch0(); 4392 Register strict = scratch0();
4421 __ mov(strict, Operand(Smi::FromInt(strict_mode_flag()))); 4393 __ mov(strict, Operand(Smi::FromInt(strict_mode_flag())));
4422 __ Push(object, key, strict); 4394 __ Push(object, key, strict);
4423 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); 4395 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4424 LPointerMap* pointers = instr->pointer_map(); 4396 LPointerMap* pointers = instr->pointer_map();
4425 LEnvironment* env = instr->deoptimization_environment();
4426 RecordPosition(pointers->position()); 4397 RecordPosition(pointers->position());
4427 RegisterEnvironmentForDeoptimization(env); 4398 SafepointGenerator safepoint_generator(
4428 SafepointGenerator safepoint_generator(this, 4399 this, pointers, Safepoint::kLazyDeopt);
4429 pointers,
4430 env->deoptimization_index());
4431 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator); 4400 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
4432 } 4401 }
4433 4402
4434 4403
4435 void LCodeGen::DoIn(LIn* instr) { 4404 void LCodeGen::DoIn(LIn* instr) {
4436 Register obj = ToRegister(instr->object()); 4405 Register obj = ToRegister(instr->object());
4437 Register key = ToRegister(instr->key()); 4406 Register key = ToRegister(instr->key());
4438 __ Push(key, obj); 4407 __ Push(key, obj);
4439 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); 4408 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4440 LPointerMap* pointers = instr->pointer_map(); 4409 LPointerMap* pointers = instr->pointer_map();
4441 LEnvironment* env = instr->deoptimization_environment();
4442 RecordPosition(pointers->position()); 4410 RecordPosition(pointers->position());
4443 RegisterEnvironmentForDeoptimization(env); 4411 SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt);
4444 SafepointGenerator safepoint_generator(this,
4445 pointers,
4446 env->deoptimization_index());
4447 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); 4412 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
4448 } 4413 }
4449 4414
4450 4415
4451 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { 4416 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
4452 { 4417 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4453 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 4418 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
4454 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); 4419 RecordSafepointWithLazyDeopt(
4455 RegisterLazyDeoptimization( 4420 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
4456 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); 4421 ASSERT(instr->HasEnvironment());
4457 } 4422 LEnvironment* env = instr->environment();
4458 4423 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
4459 // The gap code includes the restoring of the safepoint registers.
4460 int pc = masm()->pc_offset();
4461 safepoints_.SetPcAfterGap(pc);
4462 } 4424 }
4463 4425
4464 4426
4465 void LCodeGen::DoStackCheck(LStackCheck* instr) { 4427 void LCodeGen::DoStackCheck(LStackCheck* instr) {
4466 class DeferredStackCheck: public LDeferredCode { 4428 class DeferredStackCheck: public LDeferredCode {
4467 public: 4429 public:
4468 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) 4430 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4469 : LDeferredCode(codegen), instr_(instr) { } 4431 : LDeferredCode(codegen), instr_(instr) { }
4470 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } 4432 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4471 private: 4433 private:
4472 LStackCheck* instr_; 4434 LStackCheck* instr_;
4473 }; 4435 };
4474 4436
4437 ASSERT(instr->HasEnvironment());
4438 LEnvironment* env = instr->environment();
4439 // There is no LLazyBailout instruction for stack-checks. We have to
4440 // prepare for lazy deoptimization explicitly here.
4475 if (instr->hydrogen()->is_function_entry()) { 4441 if (instr->hydrogen()->is_function_entry()) {
4476 // Perform stack overflow check. 4442 // Perform stack overflow check.
4477 Label done; 4443 Label done;
4478 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 4444 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4479 __ cmp(sp, Operand(ip)); 4445 __ cmp(sp, Operand(ip));
4480 __ b(hs, &done); 4446 __ b(hs, &done);
4481 StackCheckStub stub; 4447 StackCheckStub stub;
4482 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 4448 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4449 EnsureSpaceForLazyDeopt();
4483 __ bind(&done); 4450 __ bind(&done);
4451 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4452 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
4484 } else { 4453 } else {
4485 ASSERT(instr->hydrogen()->is_backwards_branch()); 4454 ASSERT(instr->hydrogen()->is_backwards_branch());
4486 // Perform stack overflow check if this goto needs it before jumping. 4455 // Perform stack overflow check if this goto needs it before jumping.
4487 DeferredStackCheck* deferred_stack_check = 4456 DeferredStackCheck* deferred_stack_check =
4488 new DeferredStackCheck(this, instr); 4457 new DeferredStackCheck(this, instr);
4489 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 4458 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4490 __ cmp(sp, Operand(ip)); 4459 __ cmp(sp, Operand(ip));
4491 __ b(lo, deferred_stack_check->entry()); 4460 __ b(lo, deferred_stack_check->entry());
4461 EnsureSpaceForLazyDeopt();
4492 __ bind(instr->done_label()); 4462 __ bind(instr->done_label());
4493 deferred_stack_check->SetExit(instr->done_label()); 4463 deferred_stack_check->SetExit(instr->done_label());
4464 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4465 // Don't record a deoptimization index for the safepoint here.
4466 // This will be done explicitly when emitting call and the safepoint in
4467 // the deferred code.
4494 } 4468 }
4495 } 4469 }
4496 4470
4497 4471
4498 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { 4472 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
4499 // This is a pseudo-instruction that ensures that the environment here is 4473 // This is a pseudo-instruction that ensures that the environment here is
4500 // properly registered for deoptimization and records the assembler's PC 4474 // properly registered for deoptimization and records the assembler's PC
4501 // offset. 4475 // offset.
4502 LEnvironment* environment = instr->environment(); 4476 LEnvironment* environment = instr->environment();
4503 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), 4477 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4504 instr->SpilledDoubleRegisterArray()); 4478 instr->SpilledDoubleRegisterArray());
4505 4479
4506 // If the environment were already registered, we would have no way of 4480 // If the environment were already registered, we would have no way of
4507 // backpatching it with the spill slot operands. 4481 // backpatching it with the spill slot operands.
4508 ASSERT(!environment->HasBeenRegistered()); 4482 ASSERT(!environment->HasBeenRegistered());
4509 RegisterEnvironmentForDeoptimization(environment); 4483 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
4510 ASSERT(osr_pc_offset_ == -1); 4484 ASSERT(osr_pc_offset_ == -1);
4511 osr_pc_offset_ = masm()->pc_offset(); 4485 osr_pc_offset_ = masm()->pc_offset();
4512 } 4486 }
4513 4487
4514 4488
4515 4489
4516 4490
4517 #undef __ 4491 #undef __
4518 4492
4519 } } // namespace v8::internal 4493 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/deoptimizer.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698