OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 22 matching lines...) Expand all Loading... |
33 #include "stub-cache.h" | 33 #include "stub-cache.h" |
34 | 34 |
35 namespace v8 { | 35 namespace v8 { |
36 namespace internal { | 36 namespace internal { |
37 | 37 |
38 | 38 |
39 class SafepointGenerator : public CallWrapper { | 39 class SafepointGenerator : public CallWrapper { |
40 public: | 40 public: |
41 SafepointGenerator(LCodeGen* codegen, | 41 SafepointGenerator(LCodeGen* codegen, |
42 LPointerMap* pointers, | 42 LPointerMap* pointers, |
43 int deoptimization_index) | 43 Safepoint::DeoptMode mode) |
44 : codegen_(codegen), | 44 : codegen_(codegen), |
45 pointers_(pointers), | 45 pointers_(pointers), |
46 deoptimization_index_(deoptimization_index) { } | 46 deopt_mode_(mode) { } |
47 virtual ~SafepointGenerator() { } | 47 virtual ~SafepointGenerator() { } |
48 | 48 |
49 virtual void BeforeCall(int call_size) const { | 49 virtual void BeforeCall(int call_size) const { } |
50 ASSERT(call_size >= 0); | |
51 // Ensure that we have enough space after the previous safepoint position | |
52 // for the generated code there. | |
53 int call_end = codegen_->masm()->pc_offset() + call_size; | |
54 int prev_jump_end = | |
55 codegen_->LastSafepointEnd() + Deoptimizer::patch_size(); | |
56 if (call_end < prev_jump_end) { | |
57 int padding_size = prev_jump_end - call_end; | |
58 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); | |
59 while (padding_size > 0) { | |
60 codegen_->masm()->nop(); | |
61 padding_size -= Assembler::kInstrSize; | |
62 } | |
63 } | |
64 } | |
65 | 50 |
66 virtual void AfterCall() const { | 51 virtual void AfterCall() const { |
67 codegen_->RecordSafepoint(pointers_, deoptimization_index_); | 52 codegen_->RecordSafepoint(pointers_, deopt_mode_); |
68 } | 53 } |
69 | 54 |
70 private: | 55 private: |
71 LCodeGen* codegen_; | 56 LCodeGen* codegen_; |
72 LPointerMap* pointers_; | 57 LPointerMap* pointers_; |
73 int deoptimization_index_; | 58 Safepoint::DeoptMode deopt_mode_; |
74 }; | 59 }; |
75 | 60 |
76 | 61 |
77 #define __ masm()-> | 62 #define __ masm()-> |
78 | 63 |
79 bool LCodeGen::GenerateCode() { | 64 bool LCodeGen::GenerateCode() { |
80 HPhase phase("Code generation", chunk()); | 65 HPhase phase("Code generation", chunk()); |
81 ASSERT(is_unused()); | 66 ASSERT(is_unused()); |
82 status_ = GENERATING; | 67 status_ = GENERATING; |
83 CpuFeatures::Scope scope(FPU); | 68 CpuFeatures::Scope scope(FPU); |
(...skipping 10 matching lines...) Expand all Loading... |
94 GenerateDeferredCode() && | 79 GenerateDeferredCode() && |
95 GenerateSafepointTable(); | 80 GenerateSafepointTable(); |
96 } | 81 } |
97 | 82 |
98 | 83 |
99 void LCodeGen::FinishCode(Handle<Code> code) { | 84 void LCodeGen::FinishCode(Handle<Code> code) { |
100 ASSERT(is_done()); | 85 ASSERT(is_done()); |
101 code->set_stack_slots(GetStackSlotCount()); | 86 code->set_stack_slots(GetStackSlotCount()); |
102 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 87 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
103 PopulateDeoptimizationData(code); | 88 PopulateDeoptimizationData(code); |
104 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code); | |
105 } | 89 } |
106 | 90 |
107 | 91 |
108 void LCodeGen::Abort(const char* format, ...) { | 92 void LCodeGen::Abort(const char* format, ...) { |
109 if (FLAG_trace_bailout) { | 93 if (FLAG_trace_bailout) { |
110 SmartArrayPointer<char> name( | 94 SmartArrayPointer<char> name( |
111 info()->shared_info()->DebugName()->ToCString()); | 95 info()->shared_info()->DebugName()->ToCString()); |
112 PrintF("Aborting LCodeGen in @\"%s\": ", *name); | 96 PrintF("Aborting LCodeGen in @\"%s\": ", *name); |
113 va_list arguments; | 97 va_list arguments; |
114 va_start(arguments, format); | 98 va_start(arguments, format); |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
191 if (heap_slots > 0) { | 175 if (heap_slots > 0) { |
192 Comment(";;; Allocate local context"); | 176 Comment(";;; Allocate local context"); |
193 // Argument to NewContext is the function, which is in a1. | 177 // Argument to NewContext is the function, which is in a1. |
194 __ push(a1); | 178 __ push(a1); |
195 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 179 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
196 FastNewContextStub stub(heap_slots); | 180 FastNewContextStub stub(heap_slots); |
197 __ CallStub(&stub); | 181 __ CallStub(&stub); |
198 } else { | 182 } else { |
199 __ CallRuntime(Runtime::kNewFunctionContext, 1); | 183 __ CallRuntime(Runtime::kNewFunctionContext, 1); |
200 } | 184 } |
201 RecordSafepoint(Safepoint::kNoDeoptimizationIndex); | 185 RecordSafepoint(Safepoint::kNoLazyDeopt); |
202 // Context is returned in both v0 and cp. It replaces the context | 186 // Context is returned in both v0 and cp. It replaces the context |
203 // passed to us. It's saved in the stack and kept live in cp. | 187 // passed to us. It's saved in the stack and kept live in cp. |
204 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 188 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
205 // Copy any necessary parameters into the context. | 189 // Copy any necessary parameters into the context. |
206 int num_parameters = scope()->num_parameters(); | 190 int num_parameters = scope()->num_parameters(); |
207 for (int i = 0; i < num_parameters; i++) { | 191 for (int i = 0; i < num_parameters; i++) { |
208 Variable* var = scope()->parameter(i); | 192 Variable* var = scope()->parameter(i); |
209 if (var->IsContextSlot()) { | 193 if (var->IsContextSlot()) { |
210 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 194 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
211 (num_parameters - 1 - i) * kPointerSize; | 195 (num_parameters - 1 - i) * kPointerSize; |
212 // Load parameter from stack. | 196 // Load parameter from stack. |
213 __ lw(a0, MemOperand(fp, parameter_offset)); | 197 __ lw(a0, MemOperand(fp, parameter_offset)); |
214 // Store it in the context. | 198 // Store it in the context. |
215 MemOperand target = ContextOperand(cp, var->index()); | 199 MemOperand target = ContextOperand(cp, var->index()); |
216 __ sw(a0, target); | 200 __ sw(a0, target); |
217 // Update the write barrier. This clobbers a3 and a0. | 201 // Update the write barrier. This clobbers a3 and a0. |
218 __ RecordWriteContextSlot( | 202 __ RecordWriteContextSlot( |
219 cp, target.offset(), a0, a3, kRAHasBeenSaved, kSaveFPRegs); | 203 cp, target.offset(), a0, a3, kRAHasBeenSaved, kSaveFPRegs); |
220 } | 204 } |
221 } | 205 } |
222 Comment(";;; End allocate local context"); | 206 Comment(";;; End allocate local context"); |
223 } | 207 } |
224 | 208 |
225 // Trace the call. | 209 // Trace the call. |
226 if (FLAG_trace) { | 210 if (FLAG_trace) { |
227 __ CallRuntime(Runtime::kTraceEnter, 0); | 211 __ CallRuntime(Runtime::kTraceEnter, 0); |
228 } | 212 } |
| 213 EnsureSpaceForLazyDeopt(); |
229 return !is_aborted(); | 214 return !is_aborted(); |
230 } | 215 } |
231 | 216 |
232 | 217 |
233 bool LCodeGen::GenerateBody() { | 218 bool LCodeGen::GenerateBody() { |
234 ASSERT(is_generating()); | 219 ASSERT(is_generating()); |
235 bool emit_instructions = true; | 220 bool emit_instructions = true; |
236 for (current_instruction_ = 0; | 221 for (current_instruction_ = 0; |
237 !is_aborted() && current_instruction_ < instructions_->length(); | 222 !is_aborted() && current_instruction_ < instructions_->length(); |
238 current_instruction_++) { | 223 current_instruction_++) { |
239 LInstruction* instr = instructions_->at(current_instruction_); | 224 LInstruction* instr = instructions_->at(current_instruction_); |
240 if (instr->IsLabel()) { | 225 if (instr->IsLabel()) { |
241 LLabel* label = LLabel::cast(instr); | 226 LLabel* label = LLabel::cast(instr); |
242 emit_instructions = !label->HasReplacement(); | 227 emit_instructions = !label->HasReplacement(); |
243 } | 228 } |
244 | 229 |
245 if (emit_instructions) { | 230 if (emit_instructions) { |
246 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); | 231 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); |
247 instr->CompileToNative(this); | 232 instr->CompileToNative(this); |
248 } | 233 } |
249 } | 234 } |
250 return !is_aborted(); | 235 return !is_aborted(); |
251 } | 236 } |
252 | 237 |
253 | 238 |
254 LInstruction* LCodeGen::GetNextInstruction() { | |
255 if (current_instruction_ < instructions_->length() - 1) { | |
256 return instructions_->at(current_instruction_ + 1); | |
257 } else { | |
258 return NULL; | |
259 } | |
260 } | |
261 | |
262 | |
263 bool LCodeGen::GenerateDeferredCode() { | 239 bool LCodeGen::GenerateDeferredCode() { |
264 ASSERT(is_generating()); | 240 ASSERT(is_generating()); |
265 if (deferred_.length() > 0) { | 241 if (deferred_.length() > 0) { |
266 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 242 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { |
267 LDeferredCode* code = deferred_[i]; | 243 LDeferredCode* code = deferred_[i]; |
268 __ bind(code->entry()); | 244 __ bind(code->entry()); |
269 Comment(";;; Deferred code @%d: %s.", | 245 Comment(";;; Deferred code @%d: %s.", |
270 code->instruction_index(), | 246 code->instruction_index(), |
271 code->instr()->Mnemonic()); | 247 code->instr()->Mnemonic()); |
272 code->Generate(); | 248 code->Generate(); |
273 __ jmp(code->exit()); | 249 __ jmp(code->exit()); |
274 } | 250 } |
275 | |
276 // Pad code to ensure that the last piece of deferred code have | |
277 // room for lazy bailout. | |
278 while ((masm()->pc_offset() - LastSafepointEnd()) | |
279 < Deoptimizer::patch_size()) { | |
280 __ nop(); | |
281 } | |
282 } | 251 } |
283 // Deferred code is the last part of the instruction sequence. Mark | 252 // Deferred code is the last part of the instruction sequence. Mark |
284 // the generated code as done unless we bailed out. | 253 // the generated code as done unless we bailed out. |
285 if (!is_aborted()) status_ = DONE; | 254 if (!is_aborted()) status_ = DONE; |
286 return !is_aborted(); | 255 return !is_aborted(); |
287 } | 256 } |
288 | 257 |
289 | 258 |
290 bool LCodeGen::GenerateDeoptJumpTable() { | 259 bool LCodeGen::GenerateDeoptJumpTable() { |
291 // TODO(plind): not clear that this will have advantage for MIPS. | 260 // TODO(plind): not clear that this will have advantage for MIPS. |
(...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
527 | 496 |
528 | 497 |
529 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 498 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
530 RelocInfo::Mode mode, | 499 RelocInfo::Mode mode, |
531 LInstruction* instr, | 500 LInstruction* instr, |
532 SafepointMode safepoint_mode) { | 501 SafepointMode safepoint_mode) { |
533 ASSERT(instr != NULL); | 502 ASSERT(instr != NULL); |
534 LPointerMap* pointers = instr->pointer_map(); | 503 LPointerMap* pointers = instr->pointer_map(); |
535 RecordPosition(pointers->position()); | 504 RecordPosition(pointers->position()); |
536 __ Call(code, mode); | 505 __ Call(code, mode); |
537 RegisterLazyDeoptimization(instr, safepoint_mode); | 506 RecordSafepointWithLazyDeopt(instr, safepoint_mode); |
538 } | 507 } |
539 | 508 |
540 | 509 |
541 void LCodeGen::CallRuntime(const Runtime::Function* function, | 510 void LCodeGen::CallRuntime(const Runtime::Function* function, |
542 int num_arguments, | 511 int num_arguments, |
543 LInstruction* instr) { | 512 LInstruction* instr) { |
544 ASSERT(instr != NULL); | 513 ASSERT(instr != NULL); |
545 LPointerMap* pointers = instr->pointer_map(); | 514 LPointerMap* pointers = instr->pointer_map(); |
546 ASSERT(pointers != NULL); | 515 ASSERT(pointers != NULL); |
547 RecordPosition(pointers->position()); | 516 RecordPosition(pointers->position()); |
548 | 517 |
549 __ CallRuntime(function, num_arguments); | 518 __ CallRuntime(function, num_arguments); |
550 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); | 519 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
551 } | 520 } |
552 | 521 |
553 | 522 |
554 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, | 523 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, |
555 int argc, | 524 int argc, |
556 LInstruction* instr) { | 525 LInstruction* instr) { |
557 __ CallRuntimeSaveDoubles(id); | 526 __ CallRuntimeSaveDoubles(id); |
558 RecordSafepointWithRegisters( | 527 RecordSafepointWithRegisters( |
559 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex); | 528 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); |
560 } | 529 } |
561 | 530 |
562 | 531 |
563 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr, | 532 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, |
564 SafepointMode safepoint_mode) { | 533 Safepoint::DeoptMode mode) { |
565 // Create the environment to bailout to. If the call has side effects | |
566 // execution has to continue after the call otherwise execution can continue | |
567 // from a previous bailout point repeating the call. | |
568 LEnvironment* deoptimization_environment; | |
569 if (instr->HasDeoptimizationEnvironment()) { | |
570 deoptimization_environment = instr->deoptimization_environment(); | |
571 } else { | |
572 deoptimization_environment = instr->environment(); | |
573 } | |
574 | |
575 RegisterEnvironmentForDeoptimization(deoptimization_environment); | |
576 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { | |
577 RecordSafepoint(instr->pointer_map(), | |
578 deoptimization_environment->deoptimization_index()); | |
579 } else { | |
580 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | |
581 RecordSafepointWithRegisters( | |
582 instr->pointer_map(), | |
583 0, | |
584 deoptimization_environment->deoptimization_index()); | |
585 } | |
586 } | |
587 | |
588 | |
589 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { | |
590 if (!environment->HasBeenRegistered()) { | 534 if (!environment->HasBeenRegistered()) { |
591 // Physical stack frame layout: | 535 // Physical stack frame layout: |
592 // -x ............. -4 0 ..................................... y | 536 // -x ............. -4 0 ..................................... y |
593 // [incoming arguments] [spill slots] [pushed outgoing arguments] | 537 // [incoming arguments] [spill slots] [pushed outgoing arguments] |
594 | 538 |
595 // Layout of the environment: | 539 // Layout of the environment: |
596 // 0 ..................................................... size-1 | 540 // 0 ..................................................... size-1 |
597 // [parameters] [locals] [expression stack including arguments] | 541 // [parameters] [locals] [expression stack including arguments] |
598 | 542 |
599 // Layout of the translation: | 543 // Layout of the translation: |
600 // 0 ........................................................ size - 1 + 4 | 544 // 0 ........................................................ size - 1 + 4 |
601 // [expression stack including arguments] [locals] [4 words] [parameters] | 545 // [expression stack including arguments] [locals] [4 words] [parameters] |
602 // |>------------ translation_size ------------<| | 546 // |>------------ translation_size ------------<| |
603 | 547 |
604 int frame_count = 0; | 548 int frame_count = 0; |
605 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { | 549 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { |
606 ++frame_count; | 550 ++frame_count; |
607 } | 551 } |
608 Translation translation(&translations_, frame_count); | 552 Translation translation(&translations_, frame_count); |
609 WriteTranslation(environment, &translation); | 553 WriteTranslation(environment, &translation); |
610 int deoptimization_index = deoptimizations_.length(); | 554 int deoptimization_index = deoptimizations_.length(); |
611 environment->Register(deoptimization_index, translation.index()); | 555 int pc_offset = masm()->pc_offset(); |
| 556 environment->Register(deoptimization_index, |
| 557 translation.index(), |
| 558 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); |
612 deoptimizations_.Add(environment); | 559 deoptimizations_.Add(environment); |
613 } | 560 } |
614 } | 561 } |
615 | 562 |
616 | 563 |
617 void LCodeGen::DeoptimizeIf(Condition cc, | 564 void LCodeGen::DeoptimizeIf(Condition cc, |
618 LEnvironment* environment, | 565 LEnvironment* environment, |
619 Register src1, | 566 Register src1, |
620 const Operand& src2) { | 567 const Operand& src2) { |
621 RegisterEnvironmentForDeoptimization(environment); | 568 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
622 ASSERT(environment->HasBeenRegistered()); | 569 ASSERT(environment->HasBeenRegistered()); |
623 int id = environment->deoptimization_index(); | 570 int id = environment->deoptimization_index(); |
624 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); | 571 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); |
625 ASSERT(entry != NULL); | 572 ASSERT(entry != NULL); |
626 if (entry == NULL) { | 573 if (entry == NULL) { |
627 Abort("bailout was not prepared"); | 574 Abort("bailout was not prepared"); |
628 return; | 575 return; |
629 } | 576 } |
630 | 577 |
631 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on MIPS. | 578 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on MIPS. |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
676 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); | 623 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); |
677 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_)); | 624 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_)); |
678 | 625 |
679 // Populate the deoptimization entries. | 626 // Populate the deoptimization entries. |
680 for (int i = 0; i < length; i++) { | 627 for (int i = 0; i < length; i++) { |
681 LEnvironment* env = deoptimizations_[i]; | 628 LEnvironment* env = deoptimizations_[i]; |
682 data->SetAstId(i, Smi::FromInt(env->ast_id())); | 629 data->SetAstId(i, Smi::FromInt(env->ast_id())); |
683 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index())); | 630 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index())); |
684 data->SetArgumentsStackHeight(i, | 631 data->SetArgumentsStackHeight(i, |
685 Smi::FromInt(env->arguments_stack_height())); | 632 Smi::FromInt(env->arguments_stack_height())); |
| 633 data->SetPc(i, Smi::FromInt(env->pc_offset())); |
686 } | 634 } |
687 code->set_deoptimization_data(*data); | 635 code->set_deoptimization_data(*data); |
688 } | 636 } |
689 | 637 |
690 | 638 |
691 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) { | 639 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) { |
692 int result = deoptimization_literals_.length(); | 640 int result = deoptimization_literals_.length(); |
693 for (int i = 0; i < deoptimization_literals_.length(); ++i) { | 641 for (int i = 0; i < deoptimization_literals_.length(); ++i) { |
694 if (deoptimization_literals_[i].is_identical_to(literal)) return i; | 642 if (deoptimization_literals_[i].is_identical_to(literal)) return i; |
695 } | 643 } |
(...skipping 11 matching lines...) Expand all Loading... |
707 for (int i = 0, length = inlined_closures->length(); | 655 for (int i = 0, length = inlined_closures->length(); |
708 i < length; | 656 i < length; |
709 i++) { | 657 i++) { |
710 DefineDeoptimizationLiteral(inlined_closures->at(i)); | 658 DefineDeoptimizationLiteral(inlined_closures->at(i)); |
711 } | 659 } |
712 | 660 |
713 inlined_function_count_ = deoptimization_literals_.length(); | 661 inlined_function_count_ = deoptimization_literals_.length(); |
714 } | 662 } |
715 | 663 |
716 | 664 |
| 665 void LCodeGen::RecordSafepointWithLazyDeopt( |
| 666 LInstruction* instr, SafepointMode safepoint_mode) { |
| 667 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { |
| 668 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt); |
| 669 } else { |
| 670 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
| 671 RecordSafepointWithRegisters( |
| 672 instr->pointer_map(), 0, Safepoint::kLazyDeopt); |
| 673 } |
| 674 } |
| 675 |
| 676 |
717 void LCodeGen::RecordSafepoint( | 677 void LCodeGen::RecordSafepoint( |
718 LPointerMap* pointers, | 678 LPointerMap* pointers, |
719 Safepoint::Kind kind, | 679 Safepoint::Kind kind, |
720 int arguments, | 680 int arguments, |
721 int deoptimization_index) { | 681 Safepoint::DeoptMode deopt_mode) { |
722 ASSERT(expected_safepoint_kind_ == kind); | 682 ASSERT(expected_safepoint_kind_ == kind); |
723 | 683 |
724 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); | 684 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); |
725 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), | 685 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), |
726 kind, arguments, deoptimization_index); | 686 kind, arguments, deopt_mode); |
727 for (int i = 0; i < operands->length(); i++) { | 687 for (int i = 0; i < operands->length(); i++) { |
728 LOperand* pointer = operands->at(i); | 688 LOperand* pointer = operands->at(i); |
729 if (pointer->IsStackSlot()) { | 689 if (pointer->IsStackSlot()) { |
730 safepoint.DefinePointerSlot(pointer->index()); | 690 safepoint.DefinePointerSlot(pointer->index()); |
731 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { | 691 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { |
732 safepoint.DefinePointerRegister(ToRegister(pointer)); | 692 safepoint.DefinePointerRegister(ToRegister(pointer)); |
733 } | 693 } |
734 } | 694 } |
735 if (kind & Safepoint::kWithRegisters) { | 695 if (kind & Safepoint::kWithRegisters) { |
736 // Register cp always contains a pointer to the context. | 696 // Register cp always contains a pointer to the context. |
737 safepoint.DefinePointerRegister(cp); | 697 safepoint.DefinePointerRegister(cp); |
738 } | 698 } |
739 } | 699 } |
740 | 700 |
741 | 701 |
742 void LCodeGen::RecordSafepoint(LPointerMap* pointers, | 702 void LCodeGen::RecordSafepoint(LPointerMap* pointers, |
743 int deoptimization_index) { | 703 Safepoint::DeoptMode deopt_mode) { |
744 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index); | 704 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode); |
745 } | 705 } |
746 | 706 |
747 | 707 |
748 void LCodeGen::RecordSafepoint(int deoptimization_index) { | 708 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) { |
749 LPointerMap empty_pointers(RelocInfo::kNoPosition); | 709 LPointerMap empty_pointers(RelocInfo::kNoPosition); |
750 RecordSafepoint(&empty_pointers, deoptimization_index); | 710 RecordSafepoint(&empty_pointers, deopt_mode); |
751 } | 711 } |
752 | 712 |
753 | 713 |
754 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, | 714 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, |
755 int arguments, | 715 int arguments, |
756 int deoptimization_index) { | 716 Safepoint::DeoptMode deopt_mode) { |
757 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, | 717 RecordSafepoint( |
758 deoptimization_index); | 718 pointers, Safepoint::kWithRegisters, arguments, deopt_mode); |
759 } | 719 } |
760 | 720 |
761 | 721 |
762 void LCodeGen::RecordSafepointWithRegistersAndDoubles( | 722 void LCodeGen::RecordSafepointWithRegistersAndDoubles( |
763 LPointerMap* pointers, | 723 LPointerMap* pointers, |
764 int arguments, | 724 int arguments, |
765 int deoptimization_index) { | 725 Safepoint::DeoptMode deopt_mode) { |
766 RecordSafepoint(pointers, Safepoint::kWithRegistersAndDoubles, arguments, | 726 RecordSafepoint( |
767 deoptimization_index); | 727 pointers, Safepoint::kWithRegistersAndDoubles, arguments, deopt_mode); |
768 } | 728 } |
769 | 729 |
770 | 730 |
771 void LCodeGen::RecordPosition(int position) { | 731 void LCodeGen::RecordPosition(int position) { |
772 if (position == RelocInfo::kNoPosition) return; | 732 if (position == RelocInfo::kNoPosition) return; |
773 masm()->positions_recorder()->RecordPosition(position); | 733 masm()->positions_recorder()->RecordPosition(position); |
774 } | 734 } |
775 | 735 |
776 | 736 |
777 void LCodeGen::DoLabel(LLabel* label) { | 737 void LCodeGen::DoLabel(LLabel* label) { |
(...skipping 14 matching lines...) Expand all Loading... |
792 | 752 |
793 | 753 |
794 void LCodeGen::DoGap(LGap* gap) { | 754 void LCodeGen::DoGap(LGap* gap) { |
795 for (int i = LGap::FIRST_INNER_POSITION; | 755 for (int i = LGap::FIRST_INNER_POSITION; |
796 i <= LGap::LAST_INNER_POSITION; | 756 i <= LGap::LAST_INNER_POSITION; |
797 i++) { | 757 i++) { |
798 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); | 758 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); |
799 LParallelMove* move = gap->GetParallelMove(inner_pos); | 759 LParallelMove* move = gap->GetParallelMove(inner_pos); |
800 if (move != NULL) DoParallelMove(move); | 760 if (move != NULL) DoParallelMove(move); |
801 } | 761 } |
802 | |
803 LInstruction* next = GetNextInstruction(); | |
804 if (next != NULL && next->IsLazyBailout()) { | |
805 int pc = masm()->pc_offset(); | |
806 safepoints_.SetPcAfterGap(pc); | |
807 } | |
808 } | 762 } |
809 | 763 |
810 | 764 |
811 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { | 765 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { |
812 DoGap(instr); | 766 DoGap(instr); |
813 } | 767 } |
814 | 768 |
815 | 769 |
816 void LCodeGen::DoParameter(LParameter* instr) { | 770 void LCodeGen::DoParameter(LParameter* instr) { |
817 // Nothing to do. | 771 // Nothing to do. |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
867 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { | 821 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { |
868 // Nothing to do. | 822 // Nothing to do. |
869 } | 823 } |
870 | 824 |
871 | 825 |
872 void LCodeGen::DoModI(LModI* instr) { | 826 void LCodeGen::DoModI(LModI* instr) { |
873 Register scratch = scratch0(); | 827 Register scratch = scratch0(); |
874 const Register left = ToRegister(instr->InputAt(0)); | 828 const Register left = ToRegister(instr->InputAt(0)); |
875 const Register result = ToRegister(instr->result()); | 829 const Register result = ToRegister(instr->result()); |
876 | 830 |
877 // p2constant holds the right side value if it's a power of 2 constant. | 831 Label done; |
878 // In other cases it is 0. | |
879 int32_t p2constant = 0; | |
880 | 832 |
881 if (instr->InputAt(1)->IsConstantOperand()) { | 833 if (instr->hydrogen()->HasPowerOf2Divisor()) { |
882 p2constant = ToInteger32(LConstantOperand::cast(instr->InputAt(1))); | 834 Register scratch = scratch0(); |
883 if (p2constant % 2 != 0) { | 835 ASSERT(!left.is(scratch)); |
884 p2constant = 0; | 836 __ mov(scratch, left); |
885 } | 837 int32_t p2constant = HConstant::cast( |
886 // Result always takes the sign of the dividend (left). | 838 instr->hydrogen()->right())->Integer32Value(); |
887 p2constant = abs(p2constant); | 839 ASSERT(p2constant != 0); |
| 840 // Result always takes the sign of the dividend (left). |
| 841 p2constant = abs(p2constant); |
| 842 |
| 843 Label positive_dividend; |
| 844 __ Branch(USE_DELAY_SLOT, &positive_dividend, ge, left, Operand(zero_reg)); |
| 845 __ subu(result, zero_reg, left); |
| 846 __ And(result, result, p2constant - 1); |
| 847 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 848 DeoptimizeIf(eq, instr->environment(), result, Operand(zero_reg)); |
| 849 } |
| 850 __ Branch(USE_DELAY_SLOT, &done); |
| 851 __ subu(result, zero_reg, result); |
| 852 __ bind(&positive_dividend); |
| 853 __ And(result, scratch, p2constant - 1); |
| 854 } else { |
| 855 // div runs in the background while we check for special cases. |
| 856 Register right = EmitLoadRegister(instr->InputAt(1), scratch); |
| 857 __ div(left, right); |
| 858 |
| 859 // Check for x % 0. |
| 860 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) { |
| 861 DeoptimizeIf(eq, instr->environment(), right, Operand(zero_reg)); |
| 862 } |
| 863 |
| 864 __ Branch(USE_DELAY_SLOT, &done, ge, left, Operand(zero_reg)); |
| 865 __ mfhi(result); |
| 866 |
| 867 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 868 DeoptimizeIf(eq, instr->environment(), result, Operand(zero_reg)); |
| 869 } |
888 } | 870 } |
889 | 871 __ bind(&done); |
890 // div runs in the background while we check for special cases. | |
891 Register right = EmitLoadRegister(instr->InputAt(1), scratch); | |
892 __ div(left, right); | |
893 | |
894 // Check for x % 0. | |
895 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) { | |
896 DeoptimizeIf(eq, instr->environment(), right, Operand(zero_reg)); | |
897 } | |
898 | |
899 Label skip_div, do_div; | |
900 if (p2constant != 0) { | |
901 // Fall back to the result of the div instruction if we could have sign | |
902 // problems. | |
903 __ Branch(&do_div, lt, left, Operand(zero_reg)); | |
904 // Modulo by masking. | |
905 __ And(scratch, left, p2constant - 1); | |
906 __ Branch(&skip_div); | |
907 } | |
908 | |
909 __ bind(&do_div); | |
910 __ mfhi(scratch); | |
911 __ bind(&skip_div); | |
912 | |
913 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | |
914 // Result always takes the sign of the dividend (left). | |
915 Label done; | |
916 __ Branch(USE_DELAY_SLOT, &done, ge, left, Operand(zero_reg)); | |
917 __ mov(result, scratch); | |
918 DeoptimizeIf(eq, instr->environment(), result, Operand(zero_reg)); | |
919 __ bind(&done); | |
920 } else { | |
921 __ Move(result, scratch); | |
922 } | |
923 } | 872 } |
924 | 873 |
925 | 874 |
926 void LCodeGen::DoDivI(LDivI* instr) { | 875 void LCodeGen::DoDivI(LDivI* instr) { |
927 const Register left = ToRegister(instr->InputAt(0)); | 876 const Register left = ToRegister(instr->InputAt(0)); |
928 const Register right = ToRegister(instr->InputAt(1)); | 877 const Register right = ToRegister(instr->InputAt(1)); |
929 const Register result = ToRegister(instr->result()); | 878 const Register result = ToRegister(instr->result()); |
930 | 879 |
931 // On MIPS div is asynchronous - it will run in the background while we | 880 // On MIPS div is asynchronous - it will run in the background while we |
932 // check for special cases. | 881 // check for special cases. |
(...skipping 746 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1679 __ lw(scratch, FieldMemOperand(reg, HeapObject::kMapOffset)); | 1628 __ lw(scratch, FieldMemOperand(reg, HeapObject::kMapOffset)); |
1680 __ lbu(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset)); | 1629 __ lbu(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset)); |
1681 __ And(scratch, scratch, 1 << Map::kIsUndetectable); | 1630 __ And(scratch, scratch, 1 << Map::kIsUndetectable); |
1682 EmitBranch(true_block, false_block, ne, scratch, Operand(zero_reg)); | 1631 EmitBranch(true_block, false_block, ne, scratch, Operand(zero_reg)); |
1683 } | 1632 } |
1684 } | 1633 } |
1685 | 1634 |
1686 | 1635 |
1687 Condition LCodeGen::EmitIsObject(Register input, | 1636 Condition LCodeGen::EmitIsObject(Register input, |
1688 Register temp1, | 1637 Register temp1, |
| 1638 Register temp2, |
1689 Label* is_not_object, | 1639 Label* is_not_object, |
1690 Label* is_object) { | 1640 Label* is_object) { |
1691 Register temp2 = scratch0(); | |
1692 __ JumpIfSmi(input, is_not_object); | 1641 __ JumpIfSmi(input, is_not_object); |
1693 | 1642 |
1694 __ LoadRoot(temp2, Heap::kNullValueRootIndex); | 1643 __ LoadRoot(temp2, Heap::kNullValueRootIndex); |
1695 __ Branch(is_object, eq, input, Operand(temp2)); | 1644 __ Branch(is_object, eq, input, Operand(temp2)); |
1696 | 1645 |
1697 // Load map. | 1646 // Load map. |
1698 __ lw(temp1, FieldMemOperand(input, HeapObject::kMapOffset)); | 1647 __ lw(temp1, FieldMemOperand(input, HeapObject::kMapOffset)); |
1699 // Undetectable objects behave like undefined. | 1648 // Undetectable objects behave like undefined. |
1700 __ lbu(temp2, FieldMemOperand(temp1, Map::kBitFieldOffset)); | 1649 __ lbu(temp2, FieldMemOperand(temp1, Map::kBitFieldOffset)); |
1701 __ And(temp2, temp2, Operand(1 << Map::kIsUndetectable)); | 1650 __ And(temp2, temp2, Operand(1 << Map::kIsUndetectable)); |
(...skipping 12 matching lines...) Expand all Loading... |
1714 Register reg = ToRegister(instr->InputAt(0)); | 1663 Register reg = ToRegister(instr->InputAt(0)); |
1715 Register temp1 = ToRegister(instr->TempAt(0)); | 1664 Register temp1 = ToRegister(instr->TempAt(0)); |
1716 Register temp2 = scratch0(); | 1665 Register temp2 = scratch0(); |
1717 | 1666 |
1718 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1667 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
1719 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1668 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
1720 Label* true_label = chunk_->GetAssemblyLabel(true_block); | 1669 Label* true_label = chunk_->GetAssemblyLabel(true_block); |
1721 Label* false_label = chunk_->GetAssemblyLabel(false_block); | 1670 Label* false_label = chunk_->GetAssemblyLabel(false_block); |
1722 | 1671 |
1723 Condition true_cond = | 1672 Condition true_cond = |
1724 EmitIsObject(reg, temp1, false_label, true_label); | 1673 EmitIsObject(reg, temp1, temp2, false_label, true_label); |
1725 | 1674 |
1726 EmitBranch(true_block, false_block, true_cond, temp2, | 1675 EmitBranch(true_block, false_block, true_cond, temp2, |
1727 Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); | 1676 Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); |
1728 } | 1677 } |
1729 | 1678 |
1730 | 1679 |
1731 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) { | 1680 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) { |
1732 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1681 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
1733 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1682 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
1734 | 1683 |
(...skipping 199 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1934 } | 1883 } |
1935 | 1884 |
1936 | 1885 |
1937 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 1886 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
1938 class DeferredInstanceOfKnownGlobal: public LDeferredCode { | 1887 class DeferredInstanceOfKnownGlobal: public LDeferredCode { |
1939 public: | 1888 public: |
1940 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, | 1889 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, |
1941 LInstanceOfKnownGlobal* instr) | 1890 LInstanceOfKnownGlobal* instr) |
1942 : LDeferredCode(codegen), instr_(instr) { } | 1891 : LDeferredCode(codegen), instr_(instr) { } |
1943 virtual void Generate() { | 1892 virtual void Generate() { |
1944 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_); | 1893 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); |
1945 } | 1894 } |
1946 virtual LInstruction* instr() { return instr_; } | 1895 virtual LInstruction* instr() { return instr_; } |
1947 Label* map_check() { return &map_check_; } | 1896 Label* map_check() { return &map_check_; } |
1948 | 1897 |
1949 private: | 1898 private: |
1950 LInstanceOfKnownGlobal* instr_; | 1899 LInstanceOfKnownGlobal* instr_; |
1951 Label map_check_; | 1900 Label map_check_; |
1952 }; | 1901 }; |
1953 | 1902 |
1954 DeferredInstanceOfKnownGlobal* deferred; | 1903 DeferredInstanceOfKnownGlobal* deferred; |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2002 __ bind(&false_result); | 1951 __ bind(&false_result); |
2003 __ LoadRoot(result, Heap::kFalseValueRootIndex); | 1952 __ LoadRoot(result, Heap::kFalseValueRootIndex); |
2004 | 1953 |
2005 // Here result has either true or false. Deferred code also produces true or | 1954 // Here result has either true or false. Deferred code also produces true or |
2006 // false object. | 1955 // false object. |
2007 __ bind(deferred->exit()); | 1956 __ bind(deferred->exit()); |
2008 __ bind(&done); | 1957 __ bind(&done); |
2009 } | 1958 } |
2010 | 1959 |
2011 | 1960 |
2012 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | 1961 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
2013 Label* map_check) { | 1962 Label* map_check) { |
2014 Register result = ToRegister(instr->result()); | 1963 Register result = ToRegister(instr->result()); |
2015 ASSERT(result.is(v0)); | 1964 ASSERT(result.is(v0)); |
2016 | 1965 |
2017 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; | 1966 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; |
2018 flags = static_cast<InstanceofStub::Flags>( | 1967 flags = static_cast<InstanceofStub::Flags>( |
2019 flags | InstanceofStub::kArgsInRegisters); | 1968 flags | InstanceofStub::kArgsInRegisters); |
2020 flags = static_cast<InstanceofStub::Flags>( | 1969 flags = static_cast<InstanceofStub::Flags>( |
2021 flags | InstanceofStub::kCallSiteInlineCheck); | 1970 flags | InstanceofStub::kCallSiteInlineCheck); |
2022 flags = static_cast<InstanceofStub::Flags>( | 1971 flags = static_cast<InstanceofStub::Flags>( |
2023 flags | InstanceofStub::kReturnTrueFalseObject); | 1972 flags | InstanceofStub::kReturnTrueFalseObject); |
(...skipping 13 matching lines...) Expand all Loading... |
2037 __ bind(&before_push_delta); | 1986 __ bind(&before_push_delta); |
2038 { | 1987 { |
2039 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 1988 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
2040 __ li(temp, Operand(delta * kPointerSize), true); | 1989 __ li(temp, Operand(delta * kPointerSize), true); |
2041 __ StoreToSafepointRegisterSlot(temp, temp); | 1990 __ StoreToSafepointRegisterSlot(temp, temp); |
2042 } | 1991 } |
2043 CallCodeGeneric(stub.GetCode(), | 1992 CallCodeGeneric(stub.GetCode(), |
2044 RelocInfo::CODE_TARGET, | 1993 RelocInfo::CODE_TARGET, |
2045 instr, | 1994 instr, |
2046 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 1995 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
| 1996 ASSERT(instr->HasDeoptimizationEnvironment()); |
| 1997 LEnvironment* env = instr->deoptimization_environment(); |
| 1998 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
2047 // Put the result value into the result register slot and | 1999 // Put the result value into the result register slot and |
2048 // restore all registers. | 2000 // restore all registers. |
2049 __ StoreToSafepointRegisterSlot(result, result); | 2001 __ StoreToSafepointRegisterSlot(result, result); |
2050 } | 2002 } |
2051 | 2003 |
2052 | 2004 |
2053 static Condition ComputeCompareCondition(Token::Value op) { | 2005 static Condition ComputeCompareCondition(Token::Value op) { |
2054 switch (op) { | 2006 switch (op) { |
2055 case Token::EQ_STRICT: | 2007 case Token::EQ_STRICT: |
2056 case Token::EQ: | 2008 case Token::EQ: |
(...skipping 606 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2663 __ Addu(scratch, elements, scratch); | 2615 __ Addu(scratch, elements, scratch); |
2664 __ lw(scratch, MemOperand(scratch)); | 2616 __ lw(scratch, MemOperand(scratch)); |
2665 __ push(scratch); | 2617 __ push(scratch); |
2666 __ Subu(length, length, Operand(1)); | 2618 __ Subu(length, length, Operand(1)); |
2667 __ Branch(USE_DELAY_SLOT, &loop, ne, length, Operand(zero_reg)); | 2619 __ Branch(USE_DELAY_SLOT, &loop, ne, length, Operand(zero_reg)); |
2668 __ sll(scratch, length, 2); | 2620 __ sll(scratch, length, 2); |
2669 | 2621 |
2670 __ bind(&invoke); | 2622 __ bind(&invoke); |
2671 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 2623 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
2672 LPointerMap* pointers = instr->pointer_map(); | 2624 LPointerMap* pointers = instr->pointer_map(); |
2673 LEnvironment* env = instr->deoptimization_environment(); | |
2674 RecordPosition(pointers->position()); | 2625 RecordPosition(pointers->position()); |
2675 RegisterEnvironmentForDeoptimization(env); | 2626 SafepointGenerator safepoint_generator( |
2676 SafepointGenerator safepoint_generator(this, | 2627 this, pointers, Safepoint::kLazyDeopt); |
2677 pointers, | |
2678 env->deoptimization_index()); | |
2679 // The number of arguments is stored in receiver which is a0, as expected | 2628 // The number of arguments is stored in receiver which is a0, as expected |
2680 // by InvokeFunction. | 2629 // by InvokeFunction. |
2681 v8::internal::ParameterCount actual(receiver); | 2630 v8::internal::ParameterCount actual(receiver); |
2682 __ InvokeFunction(function, actual, CALL_FUNCTION, | 2631 __ InvokeFunction(function, actual, CALL_FUNCTION, |
2683 safepoint_generator, CALL_AS_METHOD); | 2632 safepoint_generator, CALL_AS_METHOD); |
2684 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2633 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2685 } | 2634 } |
2686 | 2635 |
2687 | 2636 |
2688 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 2637 void LCodeGen::DoPushArgument(LPushArgument* instr) { |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2751 | 2700 |
2752 LPointerMap* pointers = instr->pointer_map(); | 2701 LPointerMap* pointers = instr->pointer_map(); |
2753 RecordPosition(pointers->position()); | 2702 RecordPosition(pointers->position()); |
2754 | 2703 |
2755 // Invoke function. | 2704 // Invoke function. |
2756 __ SetCallKind(t1, call_kind); | 2705 __ SetCallKind(t1, call_kind); |
2757 __ lw(at, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | 2706 __ lw(at, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); |
2758 __ Call(at); | 2707 __ Call(at); |
2759 | 2708 |
2760 // Setup deoptimization. | 2709 // Setup deoptimization. |
2761 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); | 2710 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
2762 | 2711 |
2763 // Restore context. | 2712 // Restore context. |
2764 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2713 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2765 } | 2714 } |
2766 | 2715 |
2767 | 2716 |
2768 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 2717 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { |
2769 ASSERT(ToRegister(instr->result()).is(v0)); | 2718 ASSERT(ToRegister(instr->result()).is(v0)); |
2770 __ mov(a0, v0); | 2719 __ mov(a0, v0); |
2771 __ li(a1, Operand(instr->function())); | 2720 __ li(a1, Operand(instr->function())); |
(...skipping 356 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3128 UNREACHABLE(); | 3077 UNREACHABLE(); |
3129 } | 3078 } |
3130 } | 3079 } |
3131 | 3080 |
3132 | 3081 |
3133 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { | 3082 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { |
3134 ASSERT(ToRegister(instr->function()).is(a1)); | 3083 ASSERT(ToRegister(instr->function()).is(a1)); |
3135 ASSERT(instr->HasPointerMap()); | 3084 ASSERT(instr->HasPointerMap()); |
3136 ASSERT(instr->HasDeoptimizationEnvironment()); | 3085 ASSERT(instr->HasDeoptimizationEnvironment()); |
3137 LPointerMap* pointers = instr->pointer_map(); | 3086 LPointerMap* pointers = instr->pointer_map(); |
3138 LEnvironment* env = instr->deoptimization_environment(); | |
3139 RecordPosition(pointers->position()); | 3087 RecordPosition(pointers->position()); |
3140 RegisterEnvironmentForDeoptimization(env); | 3088 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
3141 SafepointGenerator generator(this, pointers, env->deoptimization_index()); | |
3142 ParameterCount count(instr->arity()); | 3089 ParameterCount count(instr->arity()); |
3143 __ InvokeFunction(a1, count, CALL_FUNCTION, generator, CALL_AS_METHOD); | 3090 __ InvokeFunction(a1, count, CALL_FUNCTION, generator, CALL_AS_METHOD); |
3144 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 3091 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
3145 } | 3092 } |
3146 | 3093 |
3147 | 3094 |
3148 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { | 3095 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { |
3149 ASSERT(ToRegister(instr->result()).is(v0)); | 3096 ASSERT(ToRegister(instr->result()).is(v0)); |
3150 | 3097 |
3151 int arity = instr->arity(); | 3098 int arity = instr->arity(); |
(...skipping 1359 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4511 __ Branch(&check_frame_marker, ne, temp2, | 4458 __ Branch(&check_frame_marker, ne, temp2, |
4512 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 4459 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
4513 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset)); | 4460 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset)); |
4514 | 4461 |
4515 // Check the marker in the calling frame. | 4462 // Check the marker in the calling frame. |
4516 __ bind(&check_frame_marker); | 4463 __ bind(&check_frame_marker); |
4517 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset)); | 4464 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset)); |
4518 } | 4465 } |
4519 | 4466 |
4520 | 4467 |
| 4468 void LCodeGen::EnsureSpaceForLazyDeopt() { |
| 4469 // Ensure that we have enough space after the previous lazy-bailout |
| 4470 // instruction for patching the code here. |
| 4471 int current_pc = masm()->pc_offset(); |
| 4472 int patch_size = Deoptimizer::patch_size(); |
| 4473 if (current_pc < last_lazy_deopt_pc_ + patch_size) { |
| 4474 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc; |
| 4475 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); |
| 4476 while (padding_size > 0) { |
| 4477 __ nop(); |
| 4478 padding_size -= Assembler::kInstrSize; |
| 4479 } |
| 4480 } |
| 4481 last_lazy_deopt_pc_ = current_pc; |
| 4482 } |
| 4483 |
| 4484 |
4521 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 4485 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
4522 // No code for lazy bailout instruction. Used to capture environment after a | 4486 EnsureSpaceForLazyDeopt(); |
4523 // call for populating the safepoint data with deoptimization data. | 4487 ASSERT(instr->HasEnvironment()); |
| 4488 LEnvironment* env = instr->environment(); |
| 4489 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 4490 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
4524 } | 4491 } |
4525 | 4492 |
4526 | 4493 |
4527 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 4494 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
4528 DeoptimizeIf(al, instr->environment(), zero_reg, Operand(zero_reg)); | 4495 DeoptimizeIf(al, instr->environment(), zero_reg, Operand(zero_reg)); |
4529 } | 4496 } |
4530 | 4497 |
4531 | 4498 |
4532 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { | 4499 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { |
4533 Register object = ToRegister(instr->object()); | 4500 Register object = ToRegister(instr->object()); |
4534 Register key = ToRegister(instr->key()); | 4501 Register key = ToRegister(instr->key()); |
4535 Register strict = scratch0(); | 4502 Register strict = scratch0(); |
4536 __ li(strict, Operand(Smi::FromInt(strict_mode_flag()))); | 4503 __ li(strict, Operand(Smi::FromInt(strict_mode_flag()))); |
4537 __ Push(object, key, strict); | 4504 __ Push(object, key, strict); |
4538 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 4505 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
4539 LPointerMap* pointers = instr->pointer_map(); | 4506 LPointerMap* pointers = instr->pointer_map(); |
4540 LEnvironment* env = instr->deoptimization_environment(); | |
4541 RecordPosition(pointers->position()); | 4507 RecordPosition(pointers->position()); |
4542 RegisterEnvironmentForDeoptimization(env); | 4508 SafepointGenerator safepoint_generator( |
4543 SafepointGenerator safepoint_generator(this, | 4509 this, pointers, Safepoint::kLazyDeopt); |
4544 pointers, | |
4545 env->deoptimization_index()); | |
4546 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator); | 4510 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator); |
4547 } | 4511 } |
4548 | 4512 |
4549 | 4513 |
4550 void LCodeGen::DoIn(LIn* instr) { | 4514 void LCodeGen::DoIn(LIn* instr) { |
4551 Register obj = ToRegister(instr->object()); | 4515 Register obj = ToRegister(instr->object()); |
4552 Register key = ToRegister(instr->key()); | 4516 Register key = ToRegister(instr->key()); |
4553 __ Push(key, obj); | 4517 __ Push(key, obj); |
4554 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 4518 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
4555 LPointerMap* pointers = instr->pointer_map(); | 4519 LPointerMap* pointers = instr->pointer_map(); |
4556 LEnvironment* env = instr->deoptimization_environment(); | |
4557 RecordPosition(pointers->position()); | 4520 RecordPosition(pointers->position()); |
4558 RegisterEnvironmentForDeoptimization(env); | 4521 SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt); |
4559 SafepointGenerator safepoint_generator(this, | |
4560 pointers, | |
4561 env->deoptimization_index()); | |
4562 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); | 4522 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); |
4563 } | 4523 } |
4564 | 4524 |
4565 | 4525 |
4566 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 4526 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
4567 { | 4527 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
4568 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 4528 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); |
4569 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 4529 RecordSafepointWithLazyDeopt( |
4570 RegisterLazyDeoptimization( | 4530 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
4571 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 4531 ASSERT(instr->HasEnvironment()); |
4572 } | 4532 LEnvironment* env = instr->environment(); |
4573 | 4533 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
4574 // The gap code includes the restoring of the safepoint registers. | |
4575 int pc = masm()->pc_offset(); | |
4576 safepoints_.SetPcAfterGap(pc); | |
4577 } | 4534 } |
4578 | 4535 |
4579 | 4536 |
4580 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 4537 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
4581 class DeferredStackCheck: public LDeferredCode { | 4538 class DeferredStackCheck: public LDeferredCode { |
4582 public: | 4539 public: |
4583 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) | 4540 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) |
4584 : LDeferredCode(codegen), instr_(instr) { } | 4541 : LDeferredCode(codegen), instr_(instr) { } |
4585 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } | 4542 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } |
4586 virtual LInstruction* instr() { return instr_; } | 4543 virtual LInstruction* instr() { return instr_; } |
4587 private: | 4544 private: |
4588 LStackCheck* instr_; | 4545 LStackCheck* instr_; |
4589 }; | 4546 }; |
4590 | 4547 |
| 4548 ASSERT(instr->HasEnvironment()); |
| 4549 LEnvironment* env = instr->environment(); |
| 4550 // There is no LLazyBailout instruction for stack-checks. We have to |
| 4551 // prepare for lazy deoptimization explicitly here. |
4591 if (instr->hydrogen()->is_function_entry()) { | 4552 if (instr->hydrogen()->is_function_entry()) { |
4592 // Perform stack overflow check. | 4553 // Perform stack overflow check. |
4593 Label done; | 4554 Label done; |
4594 __ LoadRoot(at, Heap::kStackLimitRootIndex); | 4555 __ LoadRoot(at, Heap::kStackLimitRootIndex); |
4595 __ Branch(&done, hs, sp, Operand(at)); | 4556 __ Branch(&done, hs, sp, Operand(at)); |
4596 StackCheckStub stub; | 4557 StackCheckStub stub; |
4597 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4558 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 4559 EnsureSpaceForLazyDeopt(); |
4598 __ bind(&done); | 4560 __ bind(&done); |
| 4561 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 4562 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
4599 } else { | 4563 } else { |
4600 ASSERT(instr->hydrogen()->is_backwards_branch()); | 4564 ASSERT(instr->hydrogen()->is_backwards_branch()); |
4601 // Perform stack overflow check if this goto needs it before jumping. | 4565 // Perform stack overflow check if this goto needs it before jumping. |
4602 DeferredStackCheck* deferred_stack_check = | 4566 DeferredStackCheck* deferred_stack_check = |
4603 new DeferredStackCheck(this, instr); | 4567 new DeferredStackCheck(this, instr); |
4604 __ LoadRoot(at, Heap::kStackLimitRootIndex); | 4568 __ LoadRoot(at, Heap::kStackLimitRootIndex); |
4605 __ Branch(deferred_stack_check->entry(), lo, sp, Operand(at)); | 4569 __ Branch(deferred_stack_check->entry(), lo, sp, Operand(at)); |
| 4570 EnsureSpaceForLazyDeopt(); |
4606 __ bind(instr->done_label()); | 4571 __ bind(instr->done_label()); |
4607 deferred_stack_check->SetExit(instr->done_label()); | 4572 deferred_stack_check->SetExit(instr->done_label()); |
| 4573 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 4574 // Don't record a deoptimization index for the safepoint here. |
| 4575 // This will be done explicitly when emitting call and the safepoint in |
| 4576 // the deferred code. |
4608 } | 4577 } |
4609 } | 4578 } |
4610 | 4579 |
4611 | 4580 |
4612 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 4581 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
4613 // This is a pseudo-instruction that ensures that the environment here is | 4582 // This is a pseudo-instruction that ensures that the environment here is |
4614 // properly registered for deoptimization and records the assembler's PC | 4583 // properly registered for deoptimization and records the assembler's PC |
4615 // offset. | 4584 // offset. |
4616 LEnvironment* environment = instr->environment(); | 4585 LEnvironment* environment = instr->environment(); |
4617 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), | 4586 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), |
4618 instr->SpilledDoubleRegisterArray()); | 4587 instr->SpilledDoubleRegisterArray()); |
4619 | 4588 |
4620 // If the environment were already registered, we would have no way of | 4589 // If the environment were already registered, we would have no way of |
4621 // backpatching it with the spill slot operands. | 4590 // backpatching it with the spill slot operands. |
4622 ASSERT(!environment->HasBeenRegistered()); | 4591 ASSERT(!environment->HasBeenRegistered()); |
4623 RegisterEnvironmentForDeoptimization(environment); | 4592 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
4624 ASSERT(osr_pc_offset_ == -1); | 4593 ASSERT(osr_pc_offset_ == -1); |
4625 osr_pc_offset_ = masm()->pc_offset(); | 4594 osr_pc_offset_ = masm()->pc_offset(); |
4626 } | 4595 } |
4627 | 4596 |
4628 | 4597 |
4629 #undef __ | 4598 #undef __ |
4630 | 4599 |
4631 } } // namespace v8::internal | 4600 } } // namespace v8::internal |
OLD | NEW |