Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(47)

Side by Side Diff: src/mips/lithium-codegen-mips.cc

Issue 8587008: MIPS: Fix lazy deoptimization at HInvokeFunction and enable target-recording call-function stub. (Closed)
Patch Set: Created 9 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/mips/lithium-codegen-mips.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 22 matching lines...) Expand all
33 #include "stub-cache.h" 33 #include "stub-cache.h"
34 34
35 namespace v8 { 35 namespace v8 {
36 namespace internal { 36 namespace internal {
37 37
38 38
39 class SafepointGenerator : public CallWrapper { 39 class SafepointGenerator : public CallWrapper {
40 public: 40 public:
41 SafepointGenerator(LCodeGen* codegen, 41 SafepointGenerator(LCodeGen* codegen,
42 LPointerMap* pointers, 42 LPointerMap* pointers,
43 int deoptimization_index) 43 Safepoint::DeoptMode mode)
44 : codegen_(codegen), 44 : codegen_(codegen),
45 pointers_(pointers), 45 pointers_(pointers),
46 deoptimization_index_(deoptimization_index) { } 46 deopt_mode_(mode) { }
47 virtual ~SafepointGenerator() { } 47 virtual ~SafepointGenerator() { }
48 48
49 virtual void BeforeCall(int call_size) const { 49 virtual void BeforeCall(int call_size) const { }
50 ASSERT(call_size >= 0);
51 // Ensure that we have enough space after the previous safepoint position
52 // for the generated code there.
53 int call_end = codegen_->masm()->pc_offset() + call_size;
54 int prev_jump_end =
55 codegen_->LastSafepointEnd() + Deoptimizer::patch_size();
56 if (call_end < prev_jump_end) {
57 int padding_size = prev_jump_end - call_end;
58 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
59 while (padding_size > 0) {
60 codegen_->masm()->nop();
61 padding_size -= Assembler::kInstrSize;
62 }
63 }
64 }
65 50
66 virtual void AfterCall() const { 51 virtual void AfterCall() const {
67 codegen_->RecordSafepoint(pointers_, deoptimization_index_); 52 codegen_->RecordSafepoint(pointers_, deopt_mode_);
68 } 53 }
69 54
70 private: 55 private:
71 LCodeGen* codegen_; 56 LCodeGen* codegen_;
72 LPointerMap* pointers_; 57 LPointerMap* pointers_;
73 int deoptimization_index_; 58 Safepoint::DeoptMode deopt_mode_;
74 }; 59 };
75 60
76 61
77 #define __ masm()-> 62 #define __ masm()->
78 63
79 bool LCodeGen::GenerateCode() { 64 bool LCodeGen::GenerateCode() {
80 HPhase phase("Code generation", chunk()); 65 HPhase phase("Code generation", chunk());
81 ASSERT(is_unused()); 66 ASSERT(is_unused());
82 status_ = GENERATING; 67 status_ = GENERATING;
83 CpuFeatures::Scope scope(FPU); 68 CpuFeatures::Scope scope(FPU);
(...skipping 10 matching lines...) Expand all
94 GenerateDeferredCode() && 79 GenerateDeferredCode() &&
95 GenerateSafepointTable(); 80 GenerateSafepointTable();
96 } 81 }
97 82
98 83
99 void LCodeGen::FinishCode(Handle<Code> code) { 84 void LCodeGen::FinishCode(Handle<Code> code) {
100 ASSERT(is_done()); 85 ASSERT(is_done());
101 code->set_stack_slots(GetStackSlotCount()); 86 code->set_stack_slots(GetStackSlotCount());
102 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); 87 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
103 PopulateDeoptimizationData(code); 88 PopulateDeoptimizationData(code);
104 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
105 } 89 }
106 90
107 91
108 void LCodeGen::Abort(const char* format, ...) { 92 void LCodeGen::Abort(const char* format, ...) {
109 if (FLAG_trace_bailout) { 93 if (FLAG_trace_bailout) {
110 SmartArrayPointer<char> name( 94 SmartArrayPointer<char> name(
111 info()->shared_info()->DebugName()->ToCString()); 95 info()->shared_info()->DebugName()->ToCString());
112 PrintF("Aborting LCodeGen in @\"%s\": ", *name); 96 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
113 va_list arguments; 97 va_list arguments;
114 va_start(arguments, format); 98 va_start(arguments, format);
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
191 if (heap_slots > 0) { 175 if (heap_slots > 0) {
192 Comment(";;; Allocate local context"); 176 Comment(";;; Allocate local context");
193 // Argument to NewContext is the function, which is in a1. 177 // Argument to NewContext is the function, which is in a1.
194 __ push(a1); 178 __ push(a1);
195 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 179 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
196 FastNewContextStub stub(heap_slots); 180 FastNewContextStub stub(heap_slots);
197 __ CallStub(&stub); 181 __ CallStub(&stub);
198 } else { 182 } else {
199 __ CallRuntime(Runtime::kNewFunctionContext, 1); 183 __ CallRuntime(Runtime::kNewFunctionContext, 1);
200 } 184 }
201 RecordSafepoint(Safepoint::kNoDeoptimizationIndex); 185 RecordSafepoint(Safepoint::kNoLazyDeopt);
202 // Context is returned in both v0 and cp. It replaces the context 186 // Context is returned in both v0 and cp. It replaces the context
203 // passed to us. It's saved in the stack and kept live in cp. 187 // passed to us. It's saved in the stack and kept live in cp.
204 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 188 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
205 // Copy any necessary parameters into the context. 189 // Copy any necessary parameters into the context.
206 int num_parameters = scope()->num_parameters(); 190 int num_parameters = scope()->num_parameters();
207 for (int i = 0; i < num_parameters; i++) { 191 for (int i = 0; i < num_parameters; i++) {
208 Variable* var = scope()->parameter(i); 192 Variable* var = scope()->parameter(i);
209 if (var->IsContextSlot()) { 193 if (var->IsContextSlot()) {
210 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 194 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
211 (num_parameters - 1 - i) * kPointerSize; 195 (num_parameters - 1 - i) * kPointerSize;
212 // Load parameter from stack. 196 // Load parameter from stack.
213 __ lw(a0, MemOperand(fp, parameter_offset)); 197 __ lw(a0, MemOperand(fp, parameter_offset));
214 // Store it in the context. 198 // Store it in the context.
215 MemOperand target = ContextOperand(cp, var->index()); 199 MemOperand target = ContextOperand(cp, var->index());
216 __ sw(a0, target); 200 __ sw(a0, target);
217 // Update the write barrier. This clobbers a3 and a0. 201 // Update the write barrier. This clobbers a3 and a0.
218 __ RecordWriteContextSlot( 202 __ RecordWriteContextSlot(
219 cp, target.offset(), a0, a3, kRAHasBeenSaved, kSaveFPRegs); 203 cp, target.offset(), a0, a3, kRAHasBeenSaved, kSaveFPRegs);
220 } 204 }
221 } 205 }
222 Comment(";;; End allocate local context"); 206 Comment(";;; End allocate local context");
223 } 207 }
224 208
225 // Trace the call. 209 // Trace the call.
226 if (FLAG_trace) { 210 if (FLAG_trace) {
227 __ CallRuntime(Runtime::kTraceEnter, 0); 211 __ CallRuntime(Runtime::kTraceEnter, 0);
228 } 212 }
213 EnsureSpaceForLazyDeopt();
229 return !is_aborted(); 214 return !is_aborted();
230 } 215 }
231 216
232 217
233 bool LCodeGen::GenerateBody() { 218 bool LCodeGen::GenerateBody() {
234 ASSERT(is_generating()); 219 ASSERT(is_generating());
235 bool emit_instructions = true; 220 bool emit_instructions = true;
236 for (current_instruction_ = 0; 221 for (current_instruction_ = 0;
237 !is_aborted() && current_instruction_ < instructions_->length(); 222 !is_aborted() && current_instruction_ < instructions_->length();
238 current_instruction_++) { 223 current_instruction_++) {
239 LInstruction* instr = instructions_->at(current_instruction_); 224 LInstruction* instr = instructions_->at(current_instruction_);
240 if (instr->IsLabel()) { 225 if (instr->IsLabel()) {
241 LLabel* label = LLabel::cast(instr); 226 LLabel* label = LLabel::cast(instr);
242 emit_instructions = !label->HasReplacement(); 227 emit_instructions = !label->HasReplacement();
243 } 228 }
244 229
245 if (emit_instructions) { 230 if (emit_instructions) {
246 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); 231 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
247 instr->CompileToNative(this); 232 instr->CompileToNative(this);
248 } 233 }
249 } 234 }
250 return !is_aborted(); 235 return !is_aborted();
251 } 236 }
252 237
253 238
254 LInstruction* LCodeGen::GetNextInstruction() {
255 if (current_instruction_ < instructions_->length() - 1) {
256 return instructions_->at(current_instruction_ + 1);
257 } else {
258 return NULL;
259 }
260 }
261
262
263 bool LCodeGen::GenerateDeferredCode() { 239 bool LCodeGen::GenerateDeferredCode() {
264 ASSERT(is_generating()); 240 ASSERT(is_generating());
265 if (deferred_.length() > 0) { 241 if (deferred_.length() > 0) {
266 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { 242 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
267 LDeferredCode* code = deferred_[i]; 243 LDeferredCode* code = deferred_[i];
268 __ bind(code->entry()); 244 __ bind(code->entry());
269 Comment(";;; Deferred code @%d: %s.", 245 Comment(";;; Deferred code @%d: %s.",
270 code->instruction_index(), 246 code->instruction_index(),
271 code->instr()->Mnemonic()); 247 code->instr()->Mnemonic());
272 code->Generate(); 248 code->Generate();
273 __ jmp(code->exit()); 249 __ jmp(code->exit());
274 } 250 }
275
276 // Pad code to ensure that the last piece of deferred code have
277 // room for lazy bailout.
278 while ((masm()->pc_offset() - LastSafepointEnd())
279 < Deoptimizer::patch_size()) {
280 __ nop();
281 }
282 } 251 }
283 // Deferred code is the last part of the instruction sequence. Mark 252 // Deferred code is the last part of the instruction sequence. Mark
284 // the generated code as done unless we bailed out. 253 // the generated code as done unless we bailed out.
285 if (!is_aborted()) status_ = DONE; 254 if (!is_aborted()) status_ = DONE;
286 return !is_aborted(); 255 return !is_aborted();
287 } 256 }
288 257
289 258
290 bool LCodeGen::GenerateDeoptJumpTable() { 259 bool LCodeGen::GenerateDeoptJumpTable() {
291 // TODO(plind): not clear that this will have advantage for MIPS. 260 // TODO(plind): not clear that this will have advantage for MIPS.
(...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after
527 496
528 497
529 void LCodeGen::CallCodeGeneric(Handle<Code> code, 498 void LCodeGen::CallCodeGeneric(Handle<Code> code,
530 RelocInfo::Mode mode, 499 RelocInfo::Mode mode,
531 LInstruction* instr, 500 LInstruction* instr,
532 SafepointMode safepoint_mode) { 501 SafepointMode safepoint_mode) {
533 ASSERT(instr != NULL); 502 ASSERT(instr != NULL);
534 LPointerMap* pointers = instr->pointer_map(); 503 LPointerMap* pointers = instr->pointer_map();
535 RecordPosition(pointers->position()); 504 RecordPosition(pointers->position());
536 __ Call(code, mode); 505 __ Call(code, mode);
537 RegisterLazyDeoptimization(instr, safepoint_mode); 506 RecordSafepointWithLazyDeopt(instr, safepoint_mode);
538 } 507 }
539 508
540 509
541 void LCodeGen::CallRuntime(const Runtime::Function* function, 510 void LCodeGen::CallRuntime(const Runtime::Function* function,
542 int num_arguments, 511 int num_arguments,
543 LInstruction* instr) { 512 LInstruction* instr) {
544 ASSERT(instr != NULL); 513 ASSERT(instr != NULL);
545 LPointerMap* pointers = instr->pointer_map(); 514 LPointerMap* pointers = instr->pointer_map();
546 ASSERT(pointers != NULL); 515 ASSERT(pointers != NULL);
547 RecordPosition(pointers->position()); 516 RecordPosition(pointers->position());
548 517
549 __ CallRuntime(function, num_arguments); 518 __ CallRuntime(function, num_arguments);
550 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); 519 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
551 } 520 }
552 521
553 522
554 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, 523 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
555 int argc, 524 int argc,
556 LInstruction* instr) { 525 LInstruction* instr) {
557 __ CallRuntimeSaveDoubles(id); 526 __ CallRuntimeSaveDoubles(id);
558 RecordSafepointWithRegisters( 527 RecordSafepointWithRegisters(
559 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex); 528 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
560 } 529 }
561 530
562 531
563 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr, 532 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
564 SafepointMode safepoint_mode) { 533 Safepoint::DeoptMode mode) {
565 // Create the environment to bailout to. If the call has side effects
566 // execution has to continue after the call otherwise execution can continue
567 // from a previous bailout point repeating the call.
568 LEnvironment* deoptimization_environment;
569 if (instr->HasDeoptimizationEnvironment()) {
570 deoptimization_environment = instr->deoptimization_environment();
571 } else {
572 deoptimization_environment = instr->environment();
573 }
574
575 RegisterEnvironmentForDeoptimization(deoptimization_environment);
576 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
577 RecordSafepoint(instr->pointer_map(),
578 deoptimization_environment->deoptimization_index());
579 } else {
580 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
581 RecordSafepointWithRegisters(
582 instr->pointer_map(),
583 0,
584 deoptimization_environment->deoptimization_index());
585 }
586 }
587
588
589 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
590 if (!environment->HasBeenRegistered()) { 534 if (!environment->HasBeenRegistered()) {
591 // Physical stack frame layout: 535 // Physical stack frame layout:
592 // -x ............. -4 0 ..................................... y 536 // -x ............. -4 0 ..................................... y
593 // [incoming arguments] [spill slots] [pushed outgoing arguments] 537 // [incoming arguments] [spill slots] [pushed outgoing arguments]
594 538
595 // Layout of the environment: 539 // Layout of the environment:
596 // 0 ..................................................... size-1 540 // 0 ..................................................... size-1
597 // [parameters] [locals] [expression stack including arguments] 541 // [parameters] [locals] [expression stack including arguments]
598 542
599 // Layout of the translation: 543 // Layout of the translation:
600 // 0 ........................................................ size - 1 + 4 544 // 0 ........................................................ size - 1 + 4
601 // [expression stack including arguments] [locals] [4 words] [parameters] 545 // [expression stack including arguments] [locals] [4 words] [parameters]
602 // |>------------ translation_size ------------<| 546 // |>------------ translation_size ------------<|
603 547
604 int frame_count = 0; 548 int frame_count = 0;
605 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { 549 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
606 ++frame_count; 550 ++frame_count;
607 } 551 }
608 Translation translation(&translations_, frame_count); 552 Translation translation(&translations_, frame_count);
609 WriteTranslation(environment, &translation); 553 WriteTranslation(environment, &translation);
610 int deoptimization_index = deoptimizations_.length(); 554 int deoptimization_index = deoptimizations_.length();
611 environment->Register(deoptimization_index, translation.index()); 555 int pc_offset = masm()->pc_offset();
556 environment->Register(deoptimization_index,
557 translation.index(),
558 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
612 deoptimizations_.Add(environment); 559 deoptimizations_.Add(environment);
613 } 560 }
614 } 561 }
615 562
616 563
617 void LCodeGen::DeoptimizeIf(Condition cc, 564 void LCodeGen::DeoptimizeIf(Condition cc,
618 LEnvironment* environment, 565 LEnvironment* environment,
619 Register src1, 566 Register src1,
620 const Operand& src2) { 567 const Operand& src2) {
621 RegisterEnvironmentForDeoptimization(environment); 568 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
622 ASSERT(environment->HasBeenRegistered()); 569 ASSERT(environment->HasBeenRegistered());
623 int id = environment->deoptimization_index(); 570 int id = environment->deoptimization_index();
624 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); 571 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
625 ASSERT(entry != NULL); 572 ASSERT(entry != NULL);
626 if (entry == NULL) { 573 if (entry == NULL) {
627 Abort("bailout was not prepared"); 574 Abort("bailout was not prepared");
628 return; 575 return;
629 } 576 }
630 577
631 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on MIPS. 578 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on MIPS.
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
676 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); 623 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
677 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_)); 624 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
678 625
679 // Populate the deoptimization entries. 626 // Populate the deoptimization entries.
680 for (int i = 0; i < length; i++) { 627 for (int i = 0; i < length; i++) {
681 LEnvironment* env = deoptimizations_[i]; 628 LEnvironment* env = deoptimizations_[i];
682 data->SetAstId(i, Smi::FromInt(env->ast_id())); 629 data->SetAstId(i, Smi::FromInt(env->ast_id()));
683 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index())); 630 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
684 data->SetArgumentsStackHeight(i, 631 data->SetArgumentsStackHeight(i,
685 Smi::FromInt(env->arguments_stack_height())); 632 Smi::FromInt(env->arguments_stack_height()));
633 data->SetPc(i, Smi::FromInt(env->pc_offset()));
686 } 634 }
687 code->set_deoptimization_data(*data); 635 code->set_deoptimization_data(*data);
688 } 636 }
689 637
690 638
691 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) { 639 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
692 int result = deoptimization_literals_.length(); 640 int result = deoptimization_literals_.length();
693 for (int i = 0; i < deoptimization_literals_.length(); ++i) { 641 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
694 if (deoptimization_literals_[i].is_identical_to(literal)) return i; 642 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
695 } 643 }
(...skipping 11 matching lines...) Expand all
707 for (int i = 0, length = inlined_closures->length(); 655 for (int i = 0, length = inlined_closures->length();
708 i < length; 656 i < length;
709 i++) { 657 i++) {
710 DefineDeoptimizationLiteral(inlined_closures->at(i)); 658 DefineDeoptimizationLiteral(inlined_closures->at(i));
711 } 659 }
712 660
713 inlined_function_count_ = deoptimization_literals_.length(); 661 inlined_function_count_ = deoptimization_literals_.length();
714 } 662 }
715 663
716 664
665 void LCodeGen::RecordSafepointWithLazyDeopt(
666 LInstruction* instr, SafepointMode safepoint_mode) {
667 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
668 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
669 } else {
670 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
671 RecordSafepointWithRegisters(
672 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
673 }
674 }
675
676
717 void LCodeGen::RecordSafepoint( 677 void LCodeGen::RecordSafepoint(
718 LPointerMap* pointers, 678 LPointerMap* pointers,
719 Safepoint::Kind kind, 679 Safepoint::Kind kind,
720 int arguments, 680 int arguments,
721 int deoptimization_index) { 681 Safepoint::DeoptMode deopt_mode) {
722 ASSERT(expected_safepoint_kind_ == kind); 682 ASSERT(expected_safepoint_kind_ == kind);
723 683
724 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); 684 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
725 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), 685 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
726 kind, arguments, deoptimization_index); 686 kind, arguments, deopt_mode);
727 for (int i = 0; i < operands->length(); i++) { 687 for (int i = 0; i < operands->length(); i++) {
728 LOperand* pointer = operands->at(i); 688 LOperand* pointer = operands->at(i);
729 if (pointer->IsStackSlot()) { 689 if (pointer->IsStackSlot()) {
730 safepoint.DefinePointerSlot(pointer->index()); 690 safepoint.DefinePointerSlot(pointer->index());
731 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { 691 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
732 safepoint.DefinePointerRegister(ToRegister(pointer)); 692 safepoint.DefinePointerRegister(ToRegister(pointer));
733 } 693 }
734 } 694 }
735 if (kind & Safepoint::kWithRegisters) { 695 if (kind & Safepoint::kWithRegisters) {
736 // Register cp always contains a pointer to the context. 696 // Register cp always contains a pointer to the context.
737 safepoint.DefinePointerRegister(cp); 697 safepoint.DefinePointerRegister(cp);
738 } 698 }
739 } 699 }
740 700
741 701
742 void LCodeGen::RecordSafepoint(LPointerMap* pointers, 702 void LCodeGen::RecordSafepoint(LPointerMap* pointers,
743 int deoptimization_index) { 703 Safepoint::DeoptMode deopt_mode) {
744 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index); 704 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
745 } 705 }
746 706
747 707
748 void LCodeGen::RecordSafepoint(int deoptimization_index) { 708 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
749 LPointerMap empty_pointers(RelocInfo::kNoPosition); 709 LPointerMap empty_pointers(RelocInfo::kNoPosition);
750 RecordSafepoint(&empty_pointers, deoptimization_index); 710 RecordSafepoint(&empty_pointers, deopt_mode);
751 } 711 }
752 712
753 713
754 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, 714 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
755 int arguments, 715 int arguments,
756 int deoptimization_index) { 716 Safepoint::DeoptMode deopt_mode) {
757 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, 717 RecordSafepoint(
758 deoptimization_index); 718 pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
759 } 719 }
760 720
761 721
762 void LCodeGen::RecordSafepointWithRegistersAndDoubles( 722 void LCodeGen::RecordSafepointWithRegistersAndDoubles(
763 LPointerMap* pointers, 723 LPointerMap* pointers,
764 int arguments, 724 int arguments,
765 int deoptimization_index) { 725 Safepoint::DeoptMode deopt_mode) {
766 RecordSafepoint(pointers, Safepoint::kWithRegistersAndDoubles, arguments, 726 RecordSafepoint(
767 deoptimization_index); 727 pointers, Safepoint::kWithRegistersAndDoubles, arguments, deopt_mode);
768 } 728 }
769 729
770 730
771 void LCodeGen::RecordPosition(int position) { 731 void LCodeGen::RecordPosition(int position) {
772 if (position == RelocInfo::kNoPosition) return; 732 if (position == RelocInfo::kNoPosition) return;
773 masm()->positions_recorder()->RecordPosition(position); 733 masm()->positions_recorder()->RecordPosition(position);
774 } 734 }
775 735
776 736
777 void LCodeGen::DoLabel(LLabel* label) { 737 void LCodeGen::DoLabel(LLabel* label) {
(...skipping 14 matching lines...) Expand all
792 752
793 753
794 void LCodeGen::DoGap(LGap* gap) { 754 void LCodeGen::DoGap(LGap* gap) {
795 for (int i = LGap::FIRST_INNER_POSITION; 755 for (int i = LGap::FIRST_INNER_POSITION;
796 i <= LGap::LAST_INNER_POSITION; 756 i <= LGap::LAST_INNER_POSITION;
797 i++) { 757 i++) {
798 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); 758 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
799 LParallelMove* move = gap->GetParallelMove(inner_pos); 759 LParallelMove* move = gap->GetParallelMove(inner_pos);
800 if (move != NULL) DoParallelMove(move); 760 if (move != NULL) DoParallelMove(move);
801 } 761 }
802
803 LInstruction* next = GetNextInstruction();
804 if (next != NULL && next->IsLazyBailout()) {
805 int pc = masm()->pc_offset();
806 safepoints_.SetPcAfterGap(pc);
807 }
808 } 762 }
809 763
810 764
811 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { 765 void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
812 DoGap(instr); 766 DoGap(instr);
813 } 767 }
814 768
815 769
816 void LCodeGen::DoParameter(LParameter* instr) { 770 void LCodeGen::DoParameter(LParameter* instr) {
817 // Nothing to do. 771 // Nothing to do.
(...skipping 1116 matching lines...) Expand 10 before | Expand all | Expand 10 after
1934 } 1888 }
1935 1889
1936 1890
1937 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { 1891 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1938 class DeferredInstanceOfKnownGlobal: public LDeferredCode { 1892 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1939 public: 1893 public:
1940 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, 1894 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1941 LInstanceOfKnownGlobal* instr) 1895 LInstanceOfKnownGlobal* instr)
1942 : LDeferredCode(codegen), instr_(instr) { } 1896 : LDeferredCode(codegen), instr_(instr) { }
1943 virtual void Generate() { 1897 virtual void Generate() {
1944 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_); 1898 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
1945 } 1899 }
1946 virtual LInstruction* instr() { return instr_; } 1900 virtual LInstruction* instr() { return instr_; }
1947 Label* map_check() { return &map_check_; } 1901 Label* map_check() { return &map_check_; }
1948 1902
1949 private: 1903 private:
1950 LInstanceOfKnownGlobal* instr_; 1904 LInstanceOfKnownGlobal* instr_;
1951 Label map_check_; 1905 Label map_check_;
1952 }; 1906 };
1953 1907
1954 DeferredInstanceOfKnownGlobal* deferred; 1908 DeferredInstanceOfKnownGlobal* deferred;
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
2002 __ bind(&false_result); 1956 __ bind(&false_result);
2003 __ LoadRoot(result, Heap::kFalseValueRootIndex); 1957 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2004 1958
2005 // Here result has either true or false. Deferred code also produces true or 1959 // Here result has either true or false. Deferred code also produces true or
2006 // false object. 1960 // false object.
2007 __ bind(deferred->exit()); 1961 __ bind(deferred->exit());
2008 __ bind(&done); 1962 __ bind(&done);
2009 } 1963 }
2010 1964
2011 1965
2012 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 1966 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2013 Label* map_check) { 1967 Label* map_check) {
2014 Register result = ToRegister(instr->result()); 1968 Register result = ToRegister(instr->result());
2015 ASSERT(result.is(v0)); 1969 ASSERT(result.is(v0));
2016 1970
2017 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; 1971 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2018 flags = static_cast<InstanceofStub::Flags>( 1972 flags = static_cast<InstanceofStub::Flags>(
2019 flags | InstanceofStub::kArgsInRegisters); 1973 flags | InstanceofStub::kArgsInRegisters);
2020 flags = static_cast<InstanceofStub::Flags>( 1974 flags = static_cast<InstanceofStub::Flags>(
2021 flags | InstanceofStub::kCallSiteInlineCheck); 1975 flags | InstanceofStub::kCallSiteInlineCheck);
2022 flags = static_cast<InstanceofStub::Flags>( 1976 flags = static_cast<InstanceofStub::Flags>(
2023 flags | InstanceofStub::kReturnTrueFalseObject); 1977 flags | InstanceofStub::kReturnTrueFalseObject);
(...skipping 13 matching lines...) Expand all
2037 __ bind(&before_push_delta); 1991 __ bind(&before_push_delta);
2038 { 1992 {
2039 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 1993 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
2040 __ li(temp, Operand(delta * kPointerSize), true); 1994 __ li(temp, Operand(delta * kPointerSize), true);
2041 __ StoreToSafepointRegisterSlot(temp, temp); 1995 __ StoreToSafepointRegisterSlot(temp, temp);
2042 } 1996 }
2043 CallCodeGeneric(stub.GetCode(), 1997 CallCodeGeneric(stub.GetCode(),
2044 RelocInfo::CODE_TARGET, 1998 RelocInfo::CODE_TARGET,
2045 instr, 1999 instr,
2046 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); 2000 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
2001 ASSERT(instr->HasDeoptimizationEnvironment());
2002 LEnvironment* env = instr->deoptimization_environment();
2003 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2047 // Put the result value into the result register slot and 2004 // Put the result value into the result register slot and
2048 // restore all registers. 2005 // restore all registers.
2049 __ StoreToSafepointRegisterSlot(result, result); 2006 __ StoreToSafepointRegisterSlot(result, result);
2050 } 2007 }
2051 2008
2052 2009
2053 static Condition ComputeCompareCondition(Token::Value op) { 2010 static Condition ComputeCompareCondition(Token::Value op) {
2054 switch (op) { 2011 switch (op) {
2055 case Token::EQ_STRICT: 2012 case Token::EQ_STRICT:
2056 case Token::EQ: 2013 case Token::EQ:
(...skipping 606 matching lines...) Expand 10 before | Expand all | Expand 10 after
2663 __ Addu(scratch, elements, scratch); 2620 __ Addu(scratch, elements, scratch);
2664 __ lw(scratch, MemOperand(scratch)); 2621 __ lw(scratch, MemOperand(scratch));
2665 __ push(scratch); 2622 __ push(scratch);
2666 __ Subu(length, length, Operand(1)); 2623 __ Subu(length, length, Operand(1));
2667 __ Branch(USE_DELAY_SLOT, &loop, ne, length, Operand(zero_reg)); 2624 __ Branch(USE_DELAY_SLOT, &loop, ne, length, Operand(zero_reg));
2668 __ sll(scratch, length, 2); 2625 __ sll(scratch, length, 2);
2669 2626
2670 __ bind(&invoke); 2627 __ bind(&invoke);
2671 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); 2628 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2672 LPointerMap* pointers = instr->pointer_map(); 2629 LPointerMap* pointers = instr->pointer_map();
2673 LEnvironment* env = instr->deoptimization_environment();
2674 RecordPosition(pointers->position()); 2630 RecordPosition(pointers->position());
2675 RegisterEnvironmentForDeoptimization(env); 2631 SafepointGenerator safepoint_generator(
2676 SafepointGenerator safepoint_generator(this, 2632 this, pointers, Safepoint::kLazyDeopt);
2677 pointers,
2678 env->deoptimization_index());
2679 // The number of arguments is stored in receiver which is a0, as expected 2633 // The number of arguments is stored in receiver which is a0, as expected
2680 // by InvokeFunction. 2634 // by InvokeFunction.
2681 v8::internal::ParameterCount actual(receiver); 2635 v8::internal::ParameterCount actual(receiver);
2682 __ InvokeFunction(function, actual, CALL_FUNCTION, 2636 __ InvokeFunction(function, actual, CALL_FUNCTION,
2683 safepoint_generator, CALL_AS_METHOD); 2637 safepoint_generator, CALL_AS_METHOD);
2684 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2638 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2685 } 2639 }
2686 2640
2687 2641
2688 void LCodeGen::DoPushArgument(LPushArgument* instr) { 2642 void LCodeGen::DoPushArgument(LPushArgument* instr) {
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
2751 2705
2752 LPointerMap* pointers = instr->pointer_map(); 2706 LPointerMap* pointers = instr->pointer_map();
2753 RecordPosition(pointers->position()); 2707 RecordPosition(pointers->position());
2754 2708
2755 // Invoke function. 2709 // Invoke function.
2756 __ SetCallKind(t1, call_kind); 2710 __ SetCallKind(t1, call_kind);
2757 __ lw(at, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); 2711 __ lw(at, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2758 __ Call(at); 2712 __ Call(at);
2759 2713
2760 // Setup deoptimization. 2714 // Setup deoptimization.
2761 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); 2715 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
2762 2716
2763 // Restore context. 2717 // Restore context.
2764 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2718 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2765 } 2719 }
2766 2720
2767 2721
2768 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { 2722 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2769 ASSERT(ToRegister(instr->result()).is(v0)); 2723 ASSERT(ToRegister(instr->result()).is(v0));
2770 __ mov(a0, v0); 2724 __ mov(a0, v0);
2771 __ li(a1, Operand(instr->function())); 2725 __ li(a1, Operand(instr->function()));
(...skipping 356 matching lines...) Expand 10 before | Expand all | Expand 10 after
3128 UNREACHABLE(); 3082 UNREACHABLE();
3129 } 3083 }
3130 } 3084 }
3131 3085
3132 3086
3133 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { 3087 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3134 ASSERT(ToRegister(instr->function()).is(a1)); 3088 ASSERT(ToRegister(instr->function()).is(a1));
3135 ASSERT(instr->HasPointerMap()); 3089 ASSERT(instr->HasPointerMap());
3136 ASSERT(instr->HasDeoptimizationEnvironment()); 3090 ASSERT(instr->HasDeoptimizationEnvironment());
3137 LPointerMap* pointers = instr->pointer_map(); 3091 LPointerMap* pointers = instr->pointer_map();
3138 LEnvironment* env = instr->deoptimization_environment();
3139 RecordPosition(pointers->position()); 3092 RecordPosition(pointers->position());
3140 RegisterEnvironmentForDeoptimization(env); 3093 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3141 SafepointGenerator generator(this, pointers, env->deoptimization_index());
3142 ParameterCount count(instr->arity()); 3094 ParameterCount count(instr->arity());
3143 __ InvokeFunction(a1, count, CALL_FUNCTION, generator, CALL_AS_METHOD); 3095 __ InvokeFunction(a1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
3144 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3096 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3145 } 3097 }
3146 3098
3147 3099
3148 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { 3100 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
3149 ASSERT(ToRegister(instr->result()).is(v0)); 3101 ASSERT(ToRegister(instr->result()).is(v0));
3150 3102
3151 int arity = instr->arity(); 3103 int arity = instr->arity();
(...skipping 1359 matching lines...) Expand 10 before | Expand all | Expand 10 after
4511 __ Branch(&check_frame_marker, ne, temp2, 4463 __ Branch(&check_frame_marker, ne, temp2,
4512 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 4464 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4513 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset)); 4465 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
4514 4466
4515 // Check the marker in the calling frame. 4467 // Check the marker in the calling frame.
4516 __ bind(&check_frame_marker); 4468 __ bind(&check_frame_marker);
4517 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset)); 4469 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
4518 } 4470 }
4519 4471
4520 4472
4473 void LCodeGen::EnsureSpaceForLazyDeopt() {
4474 // Ensure that we have enough space after the previous lazy-bailout
4475 // instruction for patching the code here.
4476 int current_pc = masm()->pc_offset();
4477 int patch_size = Deoptimizer::patch_size();
4478 if (current_pc < last_lazy_deopt_pc_ + patch_size) {
4479 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
4480 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
4481 while (padding_size > 0) {
4482 __ nop();
4483 padding_size -= Assembler::kInstrSize;
4484 }
4485 }
4486 last_lazy_deopt_pc_ = current_pc;
4487 }
4488
4489
4521 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { 4490 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
4522 // No code for lazy bailout instruction. Used to capture environment after a 4491 EnsureSpaceForLazyDeopt();
4523 // call for populating the safepoint data with deoptimization data. 4492 ASSERT(instr->HasEnvironment());
4493 LEnvironment* env = instr->environment();
4494 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4495 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
4524 } 4496 }
4525 4497
4526 4498
4527 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { 4499 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
4528 DeoptimizeIf(al, instr->environment(), zero_reg, Operand(zero_reg)); 4500 DeoptimizeIf(al, instr->environment(), zero_reg, Operand(zero_reg));
4529 } 4501 }
4530 4502
4531 4503
4532 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { 4504 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
4533 Register object = ToRegister(instr->object()); 4505 Register object = ToRegister(instr->object());
4534 Register key = ToRegister(instr->key()); 4506 Register key = ToRegister(instr->key());
4535 Register strict = scratch0(); 4507 Register strict = scratch0();
4536 __ li(strict, Operand(Smi::FromInt(strict_mode_flag()))); 4508 __ li(strict, Operand(Smi::FromInt(strict_mode_flag())));
4537 __ Push(object, key, strict); 4509 __ Push(object, key, strict);
4538 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); 4510 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4539 LPointerMap* pointers = instr->pointer_map(); 4511 LPointerMap* pointers = instr->pointer_map();
4540 LEnvironment* env = instr->deoptimization_environment();
4541 RecordPosition(pointers->position()); 4512 RecordPosition(pointers->position());
4542 RegisterEnvironmentForDeoptimization(env); 4513 SafepointGenerator safepoint_generator(
4543 SafepointGenerator safepoint_generator(this, 4514 this, pointers, Safepoint::kLazyDeopt);
4544 pointers,
4545 env->deoptimization_index());
4546 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator); 4515 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
4547 } 4516 }
4548 4517
4549 4518
4550 void LCodeGen::DoIn(LIn* instr) { 4519 void LCodeGen::DoIn(LIn* instr) {
4551 Register obj = ToRegister(instr->object()); 4520 Register obj = ToRegister(instr->object());
4552 Register key = ToRegister(instr->key()); 4521 Register key = ToRegister(instr->key());
4553 __ Push(key, obj); 4522 __ Push(key, obj);
4554 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); 4523 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4555 LPointerMap* pointers = instr->pointer_map(); 4524 LPointerMap* pointers = instr->pointer_map();
4556 LEnvironment* env = instr->deoptimization_environment();
4557 RecordPosition(pointers->position()); 4525 RecordPosition(pointers->position());
4558 RegisterEnvironmentForDeoptimization(env); 4526 SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt);
4559 SafepointGenerator safepoint_generator(this,
4560 pointers,
4561 env->deoptimization_index());
4562 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); 4527 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
4563 } 4528 }
4564 4529
4565 4530
4566 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { 4531 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
4567 { 4532 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4568 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 4533 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
4569 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); 4534 RecordSafepointWithLazyDeopt(
4570 RegisterLazyDeoptimization( 4535 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
4571 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); 4536 ASSERT(instr->HasEnvironment());
4572 } 4537 LEnvironment* env = instr->environment();
4573 4538 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
4574 // The gap code includes the restoring of the safepoint registers.
4575 int pc = masm()->pc_offset();
4576 safepoints_.SetPcAfterGap(pc);
4577 } 4539 }
4578 4540
4579 4541
4580 void LCodeGen::DoStackCheck(LStackCheck* instr) { 4542 void LCodeGen::DoStackCheck(LStackCheck* instr) {
4581 class DeferredStackCheck: public LDeferredCode { 4543 class DeferredStackCheck: public LDeferredCode {
4582 public: 4544 public:
4583 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) 4545 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4584 : LDeferredCode(codegen), instr_(instr) { } 4546 : LDeferredCode(codegen), instr_(instr) { }
4585 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } 4547 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4586 virtual LInstruction* instr() { return instr_; } 4548 virtual LInstruction* instr() { return instr_; }
4587 private: 4549 private:
4588 LStackCheck* instr_; 4550 LStackCheck* instr_;
4589 }; 4551 };
4590 4552
4553 ASSERT(instr->HasEnvironment());
4554 LEnvironment* env = instr->environment();
4555 // There is no LLazyBailout instruction for stack-checks. We have to
4556 // prepare for lazy deoptimization explicitly here.
4591 if (instr->hydrogen()->is_function_entry()) { 4557 if (instr->hydrogen()->is_function_entry()) {
4592 // Perform stack overflow check. 4558 // Perform stack overflow check.
4593 Label done; 4559 Label done;
4594 __ LoadRoot(at, Heap::kStackLimitRootIndex); 4560 __ LoadRoot(at, Heap::kStackLimitRootIndex);
4595 __ Branch(&done, hs, sp, Operand(at)); 4561 __ Branch(&done, hs, sp, Operand(at));
4596 StackCheckStub stub; 4562 StackCheckStub stub;
4597 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 4563 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4564 EnsureSpaceForLazyDeopt();
4598 __ bind(&done); 4565 __ bind(&done);
4566 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4567 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
4599 } else { 4568 } else {
4600 ASSERT(instr->hydrogen()->is_backwards_branch()); 4569 ASSERT(instr->hydrogen()->is_backwards_branch());
4601 // Perform stack overflow check if this goto needs it before jumping. 4570 // Perform stack overflow check if this goto needs it before jumping.
4602 DeferredStackCheck* deferred_stack_check = 4571 DeferredStackCheck* deferred_stack_check =
4603 new DeferredStackCheck(this, instr); 4572 new DeferredStackCheck(this, instr);
4604 __ LoadRoot(at, Heap::kStackLimitRootIndex); 4573 __ LoadRoot(at, Heap::kStackLimitRootIndex);
4605 __ Branch(deferred_stack_check->entry(), lo, sp, Operand(at)); 4574 __ Branch(deferred_stack_check->entry(), lo, sp, Operand(at));
4575 EnsureSpaceForLazyDeopt();
4606 __ bind(instr->done_label()); 4576 __ bind(instr->done_label());
4607 deferred_stack_check->SetExit(instr->done_label()); 4577 deferred_stack_check->SetExit(instr->done_label());
4578 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4579 // Don't record a deoptimization index for the safepoint here.
4580 // This will be done explicitly when emitting call and the safepoint in
4581 // the deferred code.
4608 } 4582 }
4609 } 4583 }
4610 4584
4611 4585
4612 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { 4586 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
4613 // This is a pseudo-instruction that ensures that the environment here is 4587 // This is a pseudo-instruction that ensures that the environment here is
4614 // properly registered for deoptimization and records the assembler's PC 4588 // properly registered for deoptimization and records the assembler's PC
4615 // offset. 4589 // offset.
4616 LEnvironment* environment = instr->environment(); 4590 LEnvironment* environment = instr->environment();
4617 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), 4591 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4618 instr->SpilledDoubleRegisterArray()); 4592 instr->SpilledDoubleRegisterArray());
4619 4593
4620 // If the environment were already registered, we would have no way of 4594 // If the environment were already registered, we would have no way of
4621 // backpatching it with the spill slot operands. 4595 // backpatching it with the spill slot operands.
4622 ASSERT(!environment->HasBeenRegistered()); 4596 ASSERT(!environment->HasBeenRegistered());
4623 RegisterEnvironmentForDeoptimization(environment); 4597 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
4624 ASSERT(osr_pc_offset_ == -1); 4598 ASSERT(osr_pc_offset_ == -1);
4625 osr_pc_offset_ = masm()->pc_offset(); 4599 osr_pc_offset_ = masm()->pc_offset();
4626 } 4600 }
4627 4601
4628 4602
4629 #undef __ 4603 #undef __
4630 4604
4631 } } // namespace v8::internal 4605 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mips/lithium-codegen-mips.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698