OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 26 matching lines...) Expand all Loading... | |
37 namespace v8 { | 37 namespace v8 { |
38 namespace internal { | 38 namespace internal { |
39 | 39 |
40 | 40 |
41 // When invoking builtins, we need to record the safepoint in the middle of | 41 // When invoking builtins, we need to record the safepoint in the middle of |
42 // the invoke instruction sequence generated by the macro assembler. | 42 // the invoke instruction sequence generated by the macro assembler. |
43 class SafepointGenerator : public CallWrapper { | 43 class SafepointGenerator : public CallWrapper { |
44 public: | 44 public: |
45 SafepointGenerator(LCodeGen* codegen, | 45 SafepointGenerator(LCodeGen* codegen, |
46 LPointerMap* pointers, | 46 LPointerMap* pointers, |
47 int deoptimization_index) | 47 Safepoint::DeoptMode mode) |
48 : codegen_(codegen), | 48 : codegen_(codegen), |
49 pointers_(pointers), | 49 pointers_(pointers), |
50 deoptimization_index_(deoptimization_index) {} | 50 deopt_mode_(mode) {} |
51 virtual ~SafepointGenerator() { } | 51 virtual ~SafepointGenerator() { } |
52 | 52 |
53 virtual void BeforeCall(int call_size) const {} | 53 virtual void BeforeCall(int call_size) const {} |
54 | 54 |
55 virtual void AfterCall() const { | 55 virtual void AfterCall() const { |
56 codegen_->RecordSafepoint(pointers_, deoptimization_index_); | 56 codegen_->RecordSafepoint(pointers_, deopt_mode_); |
57 } | 57 } |
58 | 58 |
59 private: | 59 private: |
60 LCodeGen* codegen_; | 60 LCodeGen* codegen_; |
61 LPointerMap* pointers_; | 61 LPointerMap* pointers_; |
62 int deoptimization_index_; | 62 Safepoint::DeoptMode deopt_mode_; |
63 }; | 63 }; |
64 | 64 |
65 | 65 |
66 #define __ masm()-> | 66 #define __ masm()-> |
67 | 67 |
68 bool LCodeGen::GenerateCode() { | 68 bool LCodeGen::GenerateCode() { |
69 HPhase phase("Code generation", chunk()); | 69 HPhase phase("Code generation", chunk()); |
70 ASSERT(is_unused()); | 70 ASSERT(is_unused()); |
71 status_ = GENERATING; | 71 status_ = GENERATING; |
72 CpuFeatures::Scope scope(SSE2); | 72 CpuFeatures::Scope scope(SSE2); |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
214 if (heap_slots > 0) { | 214 if (heap_slots > 0) { |
215 Comment(";;; Allocate local context"); | 215 Comment(";;; Allocate local context"); |
216 // Argument to NewContext is the function, which is still in edi. | 216 // Argument to NewContext is the function, which is still in edi. |
217 __ push(edi); | 217 __ push(edi); |
218 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 218 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
219 FastNewContextStub stub(heap_slots); | 219 FastNewContextStub stub(heap_slots); |
220 __ CallStub(&stub); | 220 __ CallStub(&stub); |
221 } else { | 221 } else { |
222 __ CallRuntime(Runtime::kNewFunctionContext, 1); | 222 __ CallRuntime(Runtime::kNewFunctionContext, 1); |
223 } | 223 } |
224 RecordSafepoint(Safepoint::kNoDeoptimizationIndex); | 224 RecordSafepoint(Safepoint::kNoLazyDeopt); |
225 // Context is returned in both eax and esi. It replaces the context | 225 // Context is returned in both eax and esi. It replaces the context |
226 // passed to us. It's saved in the stack and kept live in esi. | 226 // passed to us. It's saved in the stack and kept live in esi. |
227 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi); | 227 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi); |
228 | 228 |
229 // Copy parameters into context if necessary. | 229 // Copy parameters into context if necessary. |
230 int num_parameters = scope()->num_parameters(); | 230 int num_parameters = scope()->num_parameters(); |
231 for (int i = 0; i < num_parameters; i++) { | 231 for (int i = 0; i < num_parameters; i++) { |
232 Variable* var = scope()->parameter(i); | 232 Variable* var = scope()->parameter(i); |
233 if (var->IsContextSlot()) { | 233 if (var->IsContextSlot()) { |
234 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 234 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
273 | 273 |
274 if (emit_instructions) { | 274 if (emit_instructions) { |
275 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); | 275 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); |
276 instr->CompileToNative(this); | 276 instr->CompileToNative(this); |
277 } | 277 } |
278 } | 278 } |
279 return !is_aborted(); | 279 return !is_aborted(); |
280 } | 280 } |
281 | 281 |
282 | 282 |
283 LInstruction* LCodeGen::GetNextInstruction() { | |
284 if (current_instruction_ < instructions_->length() - 1) { | |
285 return instructions_->at(current_instruction_ + 1); | |
286 } else { | |
287 return NULL; | |
288 } | |
289 } | |
290 | |
291 | |
292 bool LCodeGen::GenerateDeferredCode() { | 283 bool LCodeGen::GenerateDeferredCode() { |
293 ASSERT(is_generating()); | 284 ASSERT(is_generating()); |
294 if (deferred_.length() > 0) { | 285 if (deferred_.length() > 0) { |
295 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 286 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { |
296 LDeferredCode* code = deferred_[i]; | 287 LDeferredCode* code = deferred_[i]; |
297 __ bind(code->entry()); | 288 __ bind(code->entry()); |
298 Comment(";;; Deferred code @%d: %s.", | 289 Comment(";;; Deferred code @%d: %s.", |
299 code->instruction_index(), | 290 code->instruction_index(), |
300 code->instr()->Mnemonic()); | 291 code->instr()->Mnemonic()); |
301 code->Generate(); | 292 code->Generate(); |
302 __ jmp(code->exit()); | 293 __ jmp(code->exit()); |
303 } | 294 } |
304 | |
305 // Pad code to ensure that the last piece of deferred code have | |
306 // room for lazy bailout. | |
307 while ((masm()->pc_offset() - LastSafepointEnd()) | |
308 < Deoptimizer::patch_size()) { | |
309 __ nop(); | |
310 } | |
311 } | 295 } |
312 | 296 |
313 // Deferred code is the last part of the instruction sequence. Mark | 297 // Deferred code is the last part of the instruction sequence. Mark |
314 // the generated code as done unless we bailed out. | 298 // the generated code as done unless we bailed out. |
315 if (!is_aborted()) status_ = DONE; | 299 if (!is_aborted()) status_ = DONE; |
316 return !is_aborted(); | 300 return !is_aborted(); |
317 } | 301 } |
318 | 302 |
319 | 303 |
320 bool LCodeGen::GenerateSafepointTable() { | 304 bool LCodeGen::GenerateSafepointTable() { |
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
479 } | 463 } |
480 | 464 |
481 | 465 |
482 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 466 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
483 RelocInfo::Mode mode, | 467 RelocInfo::Mode mode, |
484 LInstruction* instr, | 468 LInstruction* instr, |
485 SafepointMode safepoint_mode) { | 469 SafepointMode safepoint_mode) { |
486 ASSERT(instr != NULL); | 470 ASSERT(instr != NULL); |
487 LPointerMap* pointers = instr->pointer_map(); | 471 LPointerMap* pointers = instr->pointer_map(); |
488 RecordPosition(pointers->position()); | 472 RecordPosition(pointers->position()); |
489 | |
490 __ call(code, mode); | 473 __ call(code, mode); |
491 | 474 RecordSafepointWithLazyDeopt(instr, safepoint_mode); |
492 RegisterLazyDeoptimization(instr, safepoint_mode); | |
493 | 475 |
494 // Signal that we don't inline smi code before these stubs in the | 476 // Signal that we don't inline smi code before these stubs in the |
495 // optimizing code generator. | 477 // optimizing code generator. |
496 if (code->kind() == Code::BINARY_OP_IC || | 478 if (code->kind() == Code::BINARY_OP_IC || |
497 code->kind() == Code::COMPARE_IC) { | 479 code->kind() == Code::COMPARE_IC) { |
498 __ nop(); | 480 __ nop(); |
499 } | 481 } |
500 } | 482 } |
501 | 483 |
502 | 484 |
503 void LCodeGen::CallCode(Handle<Code> code, | 485 void LCodeGen::CallCode(Handle<Code> code, |
504 RelocInfo::Mode mode, | 486 RelocInfo::Mode mode, |
505 LInstruction* instr) { | 487 LInstruction* instr) { |
506 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT); | 488 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT); |
507 } | 489 } |
508 | 490 |
509 | 491 |
510 void LCodeGen::CallRuntime(const Runtime::Function* fun, | 492 void LCodeGen::CallRuntime(const Runtime::Function* fun, |
511 int argc, | 493 int argc, |
512 LInstruction* instr) { | 494 LInstruction* instr) { |
513 ASSERT(instr != NULL); | 495 ASSERT(instr != NULL); |
514 ASSERT(instr->HasPointerMap()); | 496 ASSERT(instr->HasPointerMap()); |
515 LPointerMap* pointers = instr->pointer_map(); | 497 LPointerMap* pointers = instr->pointer_map(); |
516 RecordPosition(pointers->position()); | 498 RecordPosition(pointers->position()); |
517 | 499 |
518 __ CallRuntime(fun, argc); | 500 __ CallRuntime(fun, argc); |
519 | 501 |
520 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); | 502 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
521 } | 503 } |
522 | 504 |
523 | 505 |
524 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, | 506 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, |
525 int argc, | 507 int argc, |
526 LInstruction* instr, | 508 LInstruction* instr, |
527 LOperand* context) { | 509 LOperand* context) { |
528 if (context->IsRegister()) { | 510 if (context->IsRegister()) { |
529 if (!ToRegister(context).is(esi)) { | 511 if (!ToRegister(context).is(esi)) { |
530 __ mov(esi, ToRegister(context)); | 512 __ mov(esi, ToRegister(context)); |
531 } | 513 } |
532 } else if (context->IsStackSlot()) { | 514 } else if (context->IsStackSlot()) { |
533 __ mov(esi, ToOperand(context)); | 515 __ mov(esi, ToOperand(context)); |
534 } else if (context->IsConstantOperand()) { | 516 } else if (context->IsConstantOperand()) { |
535 Handle<Object> literal = | 517 Handle<Object> literal = |
536 chunk_->LookupLiteral(LConstantOperand::cast(context)); | 518 chunk_->LookupLiteral(LConstantOperand::cast(context)); |
537 LoadHeapObject(esi, Handle<Context>::cast(literal)); | 519 LoadHeapObject(esi, Handle<Context>::cast(literal)); |
538 } else { | 520 } else { |
539 UNREACHABLE(); | 521 UNREACHABLE(); |
540 } | 522 } |
541 | 523 |
542 __ CallRuntimeSaveDoubles(id); | 524 __ CallRuntimeSaveDoubles(id); |
543 RecordSafepointWithRegisters( | 525 RecordSafepointWithRegisters( |
544 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex); | 526 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); |
545 } | 527 } |
546 | 528 |
547 | 529 |
548 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr, | 530 void LCodeGen::RegisterEnvironmentForDeoptimization( |
549 SafepointMode safepoint_mode) { | 531 LEnvironment* environment, Safepoint::DeoptMode mode) { |
550 // Create the environment to bailout to. If the call has side effects | |
551 // execution has to continue after the call otherwise execution can continue | |
552 // from a previous bailout point repeating the call. | |
553 LEnvironment* deoptimization_environment; | |
554 if (instr->HasDeoptimizationEnvironment()) { | |
555 deoptimization_environment = instr->deoptimization_environment(); | |
556 } else { | |
557 deoptimization_environment = instr->environment(); | |
558 } | |
559 | |
560 RegisterEnvironmentForDeoptimization(deoptimization_environment); | |
561 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { | |
562 RecordSafepoint(instr->pointer_map(), | |
563 deoptimization_environment->deoptimization_index()); | |
564 } else { | |
565 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | |
566 RecordSafepointWithRegisters( | |
567 instr->pointer_map(), | |
568 0, | |
569 deoptimization_environment->deoptimization_index()); | |
570 } | |
571 } | |
572 | |
573 | |
574 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { | |
575 if (!environment->HasBeenRegistered()) { | 532 if (!environment->HasBeenRegistered()) { |
576 // Physical stack frame layout: | 533 // Physical stack frame layout: |
577 // -x ............. -4 0 ..................................... y | 534 // -x ............. -4 0 ..................................... y |
578 // [incoming arguments] [spill slots] [pushed outgoing arguments] | 535 // [incoming arguments] [spill slots] [pushed outgoing arguments] |
579 | 536 |
580 // Layout of the environment: | 537 // Layout of the environment: |
581 // 0 ..................................................... size-1 | 538 // 0 ..................................................... size-1 |
582 // [parameters] [locals] [expression stack including arguments] | 539 // [parameters] [locals] [expression stack including arguments] |
583 | 540 |
584 // Layout of the translation: | 541 // Layout of the translation: |
585 // 0 ........................................................ size - 1 + 4 | 542 // 0 ........................................................ size - 1 + 4 |
586 // [expression stack including arguments] [locals] [4 words] [parameters] | 543 // [expression stack including arguments] [locals] [4 words] [parameters] |
587 // |>------------ translation_size ------------<| | 544 // |>------------ translation_size ------------<| |
588 | 545 |
589 int frame_count = 0; | 546 int frame_count = 0; |
590 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { | 547 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { |
591 ++frame_count; | 548 ++frame_count; |
592 } | 549 } |
593 Translation translation(&translations_, frame_count); | 550 Translation translation(&translations_, frame_count); |
594 WriteTranslation(environment, &translation); | 551 WriteTranslation(environment, &translation); |
595 int deoptimization_index = deoptimizations_.length(); | 552 int deoptimization_index = deoptimizations_.length(); |
596 environment->Register(deoptimization_index, translation.index()); | 553 int pc_offset = masm()->pc_offset(); |
554 environment->Register(deoptimization_index, | |
555 translation.index(), | |
556 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); | |
597 deoptimizations_.Add(environment); | 557 deoptimizations_.Add(environment); |
598 } | 558 } |
599 } | 559 } |
600 | 560 |
601 | 561 |
602 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { | 562 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { |
603 RegisterEnvironmentForDeoptimization(environment); | 563 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
604 ASSERT(environment->HasBeenRegistered()); | 564 ASSERT(environment->HasBeenRegistered()); |
605 int id = environment->deoptimization_index(); | 565 int id = environment->deoptimization_index(); |
606 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); | 566 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); |
607 ASSERT(entry != NULL); | 567 ASSERT(entry != NULL); |
608 if (entry == NULL) { | 568 if (entry == NULL) { |
609 Abort("bailout was not prepared"); | 569 Abort("bailout was not prepared"); |
610 return; | 570 return; |
611 } | 571 } |
612 | 572 |
613 if (FLAG_deopt_every_n_times != 0) { | 573 if (FLAG_deopt_every_n_times != 0) { |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
673 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); | 633 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); |
674 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_)); | 634 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_)); |
675 | 635 |
676 // Populate the deoptimization entries. | 636 // Populate the deoptimization entries. |
677 for (int i = 0; i < length; i++) { | 637 for (int i = 0; i < length; i++) { |
678 LEnvironment* env = deoptimizations_[i]; | 638 LEnvironment* env = deoptimizations_[i]; |
679 data->SetAstId(i, Smi::FromInt(env->ast_id())); | 639 data->SetAstId(i, Smi::FromInt(env->ast_id())); |
680 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index())); | 640 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index())); |
681 data->SetArgumentsStackHeight(i, | 641 data->SetArgumentsStackHeight(i, |
682 Smi::FromInt(env->arguments_stack_height())); | 642 Smi::FromInt(env->arguments_stack_height())); |
643 data->SetPc(i, Smi::FromInt(env->pc_offset())); | |
683 } | 644 } |
684 code->set_deoptimization_data(*data); | 645 code->set_deoptimization_data(*data); |
685 } | 646 } |
686 | 647 |
687 | 648 |
688 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) { | 649 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) { |
689 int result = deoptimization_literals_.length(); | 650 int result = deoptimization_literals_.length(); |
690 for (int i = 0; i < deoptimization_literals_.length(); ++i) { | 651 for (int i = 0; i < deoptimization_literals_.length(); ++i) { |
691 if (deoptimization_literals_[i].is_identical_to(literal)) return i; | 652 if (deoptimization_literals_[i].is_identical_to(literal)) return i; |
692 } | 653 } |
(...skipping 11 matching lines...) Expand all Loading... | |
704 for (int i = 0, length = inlined_closures->length(); | 665 for (int i = 0, length = inlined_closures->length(); |
705 i < length; | 666 i < length; |
706 i++) { | 667 i++) { |
707 DefineDeoptimizationLiteral(inlined_closures->at(i)); | 668 DefineDeoptimizationLiteral(inlined_closures->at(i)); |
708 } | 669 } |
709 | 670 |
710 inlined_function_count_ = deoptimization_literals_.length(); | 671 inlined_function_count_ = deoptimization_literals_.length(); |
711 } | 672 } |
712 | 673 |
713 | 674 |
675 void LCodeGen::RecordSafepointWithLazyDeopt( | |
676 LInstruction* instr, SafepointMode safepoint_mode) { | |
677 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { | |
678 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt); | |
679 } else { | |
680 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | |
681 RecordSafepointWithRegisters( | |
682 instr->pointer_map(), 0, Safepoint::kLazyDeopt); | |
683 } | |
684 } | |
685 | |
686 | |
714 void LCodeGen::RecordSafepoint( | 687 void LCodeGen::RecordSafepoint( |
715 LPointerMap* pointers, | 688 LPointerMap* pointers, |
716 Safepoint::Kind kind, | 689 Safepoint::Kind kind, |
717 int arguments, | 690 int arguments, |
718 int deoptimization_index) { | 691 Safepoint::DeoptMode deopt_mode) { |
719 ASSERT(kind == expected_safepoint_kind_); | 692 ASSERT(kind == expected_safepoint_kind_); |
720 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); | 693 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); |
721 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), | 694 Safepoint safepoint = |
722 kind, arguments, deoptimization_index); | 695 safepoints_.DefineSafepoint(masm(), kind, arguments, deopt_mode); |
723 for (int i = 0; i < operands->length(); i++) { | 696 for (int i = 0; i < operands->length(); i++) { |
724 LOperand* pointer = operands->at(i); | 697 LOperand* pointer = operands->at(i); |
725 if (pointer->IsStackSlot()) { | 698 if (pointer->IsStackSlot()) { |
726 safepoint.DefinePointerSlot(pointer->index()); | 699 safepoint.DefinePointerSlot(pointer->index()); |
727 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { | 700 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { |
728 safepoint.DefinePointerRegister(ToRegister(pointer)); | 701 safepoint.DefinePointerRegister(ToRegister(pointer)); |
729 } | 702 } |
730 } | 703 } |
731 } | 704 } |
732 | 705 |
733 | 706 |
734 void LCodeGen::RecordSafepoint(LPointerMap* pointers, | 707 void LCodeGen::RecordSafepoint(LPointerMap* pointers, |
735 int deoptimization_index) { | 708 Safepoint::DeoptMode mode) { |
736 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index); | 709 RecordSafepoint(pointers, Safepoint::kSimple, 0, mode); |
737 } | 710 } |
738 | 711 |
739 | 712 |
740 void LCodeGen::RecordSafepoint(int deoptimization_index) { | 713 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode mode) { |
741 LPointerMap empty_pointers(RelocInfo::kNoPosition); | 714 LPointerMap empty_pointers(RelocInfo::kNoPosition); |
742 RecordSafepoint(&empty_pointers, deoptimization_index); | 715 RecordSafepoint(&empty_pointers, mode); |
743 } | 716 } |
744 | 717 |
745 | 718 |
746 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, | 719 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, |
747 int arguments, | 720 int arguments, |
748 int deoptimization_index) { | 721 Safepoint::DeoptMode mode) { |
749 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, | 722 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, mode); |
750 deoptimization_index); | |
751 } | 723 } |
752 | 724 |
753 | 725 |
754 void LCodeGen::RecordPosition(int position) { | 726 void LCodeGen::RecordPosition(int position) { |
755 if (position == RelocInfo::kNoPosition) return; | 727 if (position == RelocInfo::kNoPosition) return; |
756 masm()->positions_recorder()->RecordPosition(position); | 728 masm()->positions_recorder()->RecordPosition(position); |
757 } | 729 } |
758 | 730 |
759 | 731 |
760 void LCodeGen::DoLabel(LLabel* label) { | 732 void LCodeGen::DoLabel(LLabel* label) { |
(...skipping 14 matching lines...) Expand all Loading... | |
775 | 747 |
776 | 748 |
777 void LCodeGen::DoGap(LGap* gap) { | 749 void LCodeGen::DoGap(LGap* gap) { |
778 for (int i = LGap::FIRST_INNER_POSITION; | 750 for (int i = LGap::FIRST_INNER_POSITION; |
779 i <= LGap::LAST_INNER_POSITION; | 751 i <= LGap::LAST_INNER_POSITION; |
780 i++) { | 752 i++) { |
781 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); | 753 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); |
782 LParallelMove* move = gap->GetParallelMove(inner_pos); | 754 LParallelMove* move = gap->GetParallelMove(inner_pos); |
783 if (move != NULL) DoParallelMove(move); | 755 if (move != NULL) DoParallelMove(move); |
784 } | 756 } |
785 | |
786 LInstruction* next = GetNextInstruction(); | |
787 if (next != NULL && next->IsLazyBailout()) { | |
788 int pc = masm()->pc_offset(); | |
789 safepoints_.SetPcAfterGap(pc); | |
790 } | |
791 } | 757 } |
792 | 758 |
793 | 759 |
794 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { | 760 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { |
795 DoGap(instr); | 761 DoGap(instr); |
796 } | 762 } |
797 | 763 |
798 | 764 |
799 void LCodeGen::DoParameter(LParameter* instr) { | 765 void LCodeGen::DoParameter(LParameter* instr) { |
800 // Nothing to do. | 766 // Nothing to do. |
(...skipping 1120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1921 } | 1887 } |
1922 | 1888 |
1923 | 1889 |
1924 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 1890 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
1925 class DeferredInstanceOfKnownGlobal: public LDeferredCode { | 1891 class DeferredInstanceOfKnownGlobal: public LDeferredCode { |
1926 public: | 1892 public: |
1927 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, | 1893 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, |
1928 LInstanceOfKnownGlobal* instr) | 1894 LInstanceOfKnownGlobal* instr) |
1929 : LDeferredCode(codegen), instr_(instr) { } | 1895 : LDeferredCode(codegen), instr_(instr) { } |
1930 virtual void Generate() { | 1896 virtual void Generate() { |
1931 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_); | 1897 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); |
1932 } | 1898 } |
1933 virtual LInstruction* instr() { return instr_; } | 1899 virtual LInstruction* instr() { return instr_; } |
1934 Label* map_check() { return &map_check_; } | 1900 Label* map_check() { return &map_check_; } |
1935 private: | 1901 private: |
1936 LInstanceOfKnownGlobal* instr_; | 1902 LInstanceOfKnownGlobal* instr_; |
1937 Label map_check_; | 1903 Label map_check_; |
1938 }; | 1904 }; |
1939 | 1905 |
1940 DeferredInstanceOfKnownGlobal* deferred; | 1906 DeferredInstanceOfKnownGlobal* deferred; |
1941 deferred = new DeferredInstanceOfKnownGlobal(this, instr); | 1907 deferred = new DeferredInstanceOfKnownGlobal(this, instr); |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1976 __ bind(&false_result); | 1942 __ bind(&false_result); |
1977 __ mov(ToRegister(instr->result()), factory()->false_value()); | 1943 __ mov(ToRegister(instr->result()), factory()->false_value()); |
1978 | 1944 |
1979 // Here result has either true or false. Deferred code also produces true or | 1945 // Here result has either true or false. Deferred code also produces true or |
1980 // false object. | 1946 // false object. |
1981 __ bind(deferred->exit()); | 1947 __ bind(deferred->exit()); |
1982 __ bind(&done); | 1948 __ bind(&done); |
1983 } | 1949 } |
1984 | 1950 |
1985 | 1951 |
1986 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | 1952 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
1987 Label* map_check) { | 1953 Label* map_check) { |
1988 PushSafepointRegistersScope scope(this); | 1954 PushSafepointRegistersScope scope(this); |
1989 | 1955 |
1990 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; | 1956 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; |
1991 flags = static_cast<InstanceofStub::Flags>( | 1957 flags = static_cast<InstanceofStub::Flags>( |
1992 flags | InstanceofStub::kArgsInRegisters); | 1958 flags | InstanceofStub::kArgsInRegisters); |
1993 flags = static_cast<InstanceofStub::Flags>( | 1959 flags = static_cast<InstanceofStub::Flags>( |
1994 flags | InstanceofStub::kCallSiteInlineCheck); | 1960 flags | InstanceofStub::kCallSiteInlineCheck); |
1995 flags = static_cast<InstanceofStub::Flags>( | 1961 flags = static_cast<InstanceofStub::Flags>( |
1996 flags | InstanceofStub::kReturnTrueFalseObject); | 1962 flags | InstanceofStub::kReturnTrueFalseObject); |
1997 InstanceofStub stub(flags); | 1963 InstanceofStub stub(flags); |
1998 | 1964 |
1999 // Get the temp register reserved by the instruction. This needs to be a | 1965 // Get the temp register reserved by the instruction. This needs to be a |
2000 // register which is pushed last by PushSafepointRegisters as top of the | 1966 // register which is pushed last by PushSafepointRegisters as top of the |
2001 // stack is used to pass the offset to the location of the map check to | 1967 // stack is used to pass the offset to the location of the map check to |
2002 // the stub. | 1968 // the stub. |
2003 Register temp = ToRegister(instr->TempAt(0)); | 1969 Register temp = ToRegister(instr->TempAt(0)); |
2004 ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0); | 1970 ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0); |
2005 __ mov(InstanceofStub::right(), Immediate(instr->function())); | 1971 __ mov(InstanceofStub::right(), Immediate(instr->function())); |
2006 static const int kAdditionalDelta = 13; | 1972 static const int kAdditionalDelta = 13; |
2007 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; | 1973 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; |
2008 __ mov(temp, Immediate(delta)); | 1974 __ mov(temp, Immediate(delta)); |
2009 __ StoreToSafepointRegisterSlot(temp, temp); | 1975 __ StoreToSafepointRegisterSlot(temp, temp); |
2010 CallCodeGeneric(stub.GetCode(), | 1976 CallCodeGeneric(stub.GetCode(), |
2011 RelocInfo::CODE_TARGET, | 1977 RelocInfo::CODE_TARGET, |
2012 instr, | 1978 instr, |
2013 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 1979 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
1980 ASSERT(instr->HasDeoptimizationEnvironment()); | |
1981 LEnvironment* env = instr->deoptimization_environment(); | |
1982 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
1983 | |
2014 // Put the result value into the eax slot and restore all registers. | 1984 // Put the result value into the eax slot and restore all registers. |
2015 __ StoreToSafepointRegisterSlot(eax, eax); | 1985 __ StoreToSafepointRegisterSlot(eax, eax); |
2016 } | 1986 } |
2017 | 1987 |
2018 | 1988 |
2019 static Condition ComputeCompareCondition(Token::Value op) { | 1989 static Condition ComputeCompareCondition(Token::Value op) { |
2020 switch (op) { | 1990 switch (op) { |
2021 case Token::EQ_STRICT: | 1991 case Token::EQ_STRICT: |
2022 case Token::EQ: | 1992 case Token::EQ: |
2023 return equal; | 1993 return equal; |
(...skipping 585 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2609 __ j(zero, &invoke, Label::kNear); | 2579 __ j(zero, &invoke, Label::kNear); |
2610 __ bind(&loop); | 2580 __ bind(&loop); |
2611 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); | 2581 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); |
2612 __ dec(length); | 2582 __ dec(length); |
2613 __ j(not_zero, &loop); | 2583 __ j(not_zero, &loop); |
2614 | 2584 |
2615 // Invoke the function. | 2585 // Invoke the function. |
2616 __ bind(&invoke); | 2586 __ bind(&invoke); |
2617 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 2587 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
2618 LPointerMap* pointers = instr->pointer_map(); | 2588 LPointerMap* pointers = instr->pointer_map(); |
2619 LEnvironment* env = instr->deoptimization_environment(); | |
2620 RecordPosition(pointers->position()); | 2589 RecordPosition(pointers->position()); |
2621 RegisterEnvironmentForDeoptimization(env); | 2590 SafepointGenerator safepoint_generator( |
2622 SafepointGenerator safepoint_generator(this, | 2591 this, pointers, Safepoint::kLazyDeopt); |
2623 pointers, | |
2624 env->deoptimization_index()); | |
2625 ParameterCount actual(eax); | 2592 ParameterCount actual(eax); |
2626 __ InvokeFunction(function, actual, CALL_FUNCTION, | 2593 __ InvokeFunction(function, actual, CALL_FUNCTION, |
2627 safepoint_generator, CALL_AS_METHOD); | 2594 safepoint_generator, CALL_AS_METHOD); |
2628 } | 2595 } |
2629 | 2596 |
2630 | 2597 |
2631 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 2598 void LCodeGen::DoPushArgument(LPushArgument* instr) { |
2632 LOperand* argument = instr->InputAt(0); | 2599 LOperand* argument = instr->InputAt(0); |
2633 if (argument->IsConstantOperand()) { | 2600 if (argument->IsConstantOperand()) { |
2634 __ push(ToImmediate(argument)); | 2601 __ push(ToImmediate(argument)); |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2697 RecordPosition(pointers->position()); | 2664 RecordPosition(pointers->position()); |
2698 | 2665 |
2699 // Invoke function. | 2666 // Invoke function. |
2700 __ SetCallKind(ecx, call_kind); | 2667 __ SetCallKind(ecx, call_kind); |
2701 if (*function == *info()->closure()) { | 2668 if (*function == *info()->closure()) { |
2702 __ CallSelf(); | 2669 __ CallSelf(); |
2703 } else { | 2670 } else { |
2704 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); | 2671 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); |
2705 } | 2672 } |
2706 | 2673 |
2707 // Setup deoptimization. | 2674 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
2708 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); | |
2709 } | 2675 } |
2710 | 2676 |
2711 | 2677 |
2712 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 2678 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { |
2713 ASSERT(ToRegister(instr->result()).is(eax)); | 2679 ASSERT(ToRegister(instr->result()).is(eax)); |
2714 __ mov(edi, instr->function()); | 2680 __ mov(edi, instr->function()); |
2715 CallKnownFunction(instr->function(), | 2681 CallKnownFunction(instr->function(), |
2716 instr->arity(), | 2682 instr->arity(), |
2717 instr, | 2683 instr, |
2718 CALL_AS_METHOD); | 2684 CALL_AS_METHOD); |
(...skipping 355 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3074 } | 3040 } |
3075 } | 3041 } |
3076 | 3042 |
3077 | 3043 |
3078 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { | 3044 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { |
3079 ASSERT(ToRegister(instr->context()).is(esi)); | 3045 ASSERT(ToRegister(instr->context()).is(esi)); |
3080 ASSERT(ToRegister(instr->function()).is(edi)); | 3046 ASSERT(ToRegister(instr->function()).is(edi)); |
3081 ASSERT(instr->HasPointerMap()); | 3047 ASSERT(instr->HasPointerMap()); |
3082 ASSERT(instr->HasDeoptimizationEnvironment()); | 3048 ASSERT(instr->HasDeoptimizationEnvironment()); |
3083 LPointerMap* pointers = instr->pointer_map(); | 3049 LPointerMap* pointers = instr->pointer_map(); |
3084 LEnvironment* env = instr->deoptimization_environment(); | |
3085 RecordPosition(pointers->position()); | 3050 RecordPosition(pointers->position()); |
3086 RegisterEnvironmentForDeoptimization(env); | 3051 SafepointGenerator generator( |
3087 SafepointGenerator generator(this, pointers, env->deoptimization_index()); | 3052 this, pointers, Safepoint::kLazyDeopt); |
3088 ParameterCount count(instr->arity()); | 3053 ParameterCount count(instr->arity()); |
3089 __ InvokeFunction(edi, count, CALL_FUNCTION, generator, CALL_AS_METHOD); | 3054 __ InvokeFunction(edi, count, CALL_FUNCTION, generator, CALL_AS_METHOD); |
3090 } | 3055 } |
3091 | 3056 |
3092 | 3057 |
3093 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { | 3058 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { |
3094 ASSERT(ToRegister(instr->context()).is(esi)); | 3059 ASSERT(ToRegister(instr->context()).is(esi)); |
3095 ASSERT(ToRegister(instr->key()).is(ecx)); | 3060 ASSERT(ToRegister(instr->key()).is(ecx)); |
3096 ASSERT(ToRegister(instr->result()).is(eax)); | 3061 ASSERT(ToRegister(instr->result()).is(eax)); |
3097 | 3062 |
(...skipping 549 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3647 // integer value. | 3612 // integer value. |
3648 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); | 3613 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); |
3649 // NumberTagI and NumberTagD use the context from the frame, rather than | 3614 // NumberTagI and NumberTagD use the context from the frame, rather than |
3650 // the environment's HContext or HInlinedContext value. | 3615 // the environment's HContext or HInlinedContext value. |
3651 // They only call Runtime::kAllocateHeapNumber. | 3616 // They only call Runtime::kAllocateHeapNumber. |
3652 // The corresponding HChange instructions are added in a phase that does | 3617 // The corresponding HChange instructions are added in a phase that does |
3653 // not have easy access to the local context. | 3618 // not have easy access to the local context. |
3654 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 3619 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
3655 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 3620 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
3656 RecordSafepointWithRegisters( | 3621 RecordSafepointWithRegisters( |
3657 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | 3622 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
3658 if (!reg.is(eax)) __ mov(reg, eax); | 3623 if (!reg.is(eax)) __ mov(reg, eax); |
3659 | 3624 |
3660 // Done. Put the value in xmm0 into the value of the allocated heap | 3625 // Done. Put the value in xmm0 into the value of the allocated heap |
3661 // number. | 3626 // number. |
3662 __ bind(&done); | 3627 __ bind(&done); |
3663 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0); | 3628 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0); |
3664 __ StoreToSafepointRegisterSlot(reg, reg); | 3629 __ StoreToSafepointRegisterSlot(reg, reg); |
3665 } | 3630 } |
3666 | 3631 |
3667 | 3632 |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3699 __ Set(reg, Immediate(0)); | 3664 __ Set(reg, Immediate(0)); |
3700 | 3665 |
3701 PushSafepointRegistersScope scope(this); | 3666 PushSafepointRegistersScope scope(this); |
3702 // NumberTagI and NumberTagD use the context from the frame, rather than | 3667 // NumberTagI and NumberTagD use the context from the frame, rather than |
3703 // the environment's HContext or HInlinedContext value. | 3668 // the environment's HContext or HInlinedContext value. |
3704 // They only call Runtime::kAllocateHeapNumber. | 3669 // They only call Runtime::kAllocateHeapNumber. |
3705 // The corresponding HChange instructions are added in a phase that does | 3670 // The corresponding HChange instructions are added in a phase that does |
3706 // not have easy access to the local context. | 3671 // not have easy access to the local context. |
3707 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 3672 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
3708 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 3673 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
3709 RecordSafepointWithRegisters(instr->pointer_map(), 0, | 3674 RecordSafepointWithRegisters( |
3710 Safepoint::kNoDeoptimizationIndex); | 3675 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
3711 __ StoreToSafepointRegisterSlot(reg, eax); | 3676 __ StoreToSafepointRegisterSlot(reg, eax); |
3712 } | 3677 } |
3713 | 3678 |
3714 | 3679 |
3715 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 3680 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
3716 LOperand* input = instr->InputAt(0); | 3681 LOperand* input = instr->InputAt(0); |
3717 ASSERT(input->IsRegister() && input->Equals(instr->result())); | 3682 ASSERT(input->IsRegister() && input->Equals(instr->result())); |
3718 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); | 3683 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); |
3719 __ SmiTag(ToRegister(input)); | 3684 __ SmiTag(ToRegister(input)); |
3720 } | 3685 } |
(...skipping 723 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4444 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset)); | 4409 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset)); |
4445 | 4410 |
4446 // Check the marker in the calling frame. | 4411 // Check the marker in the calling frame. |
4447 __ bind(&check_frame_marker); | 4412 __ bind(&check_frame_marker); |
4448 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), | 4413 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), |
4449 Immediate(Smi::FromInt(StackFrame::CONSTRUCT))); | 4414 Immediate(Smi::FromInt(StackFrame::CONSTRUCT))); |
4450 } | 4415 } |
4451 | 4416 |
4452 | 4417 |
4453 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 4418 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
4454 // No code for lazy bailout instruction. Used to capture environment after a | 4419 ASSERT(instr->HasEnvironment()); |
4455 // call for populating the safepoint data with deoptimization data. | 4420 LEnvironment* env = instr->environment(); |
4421 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | |
4422 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
Vyacheslav Egorov (Chromium)
2011/11/15 12:03:56
Are we sure we always have enough space between la
fschneider
2011/11/15 13:35:24
Done.
| |
4456 } | 4423 } |
4457 | 4424 |
4458 | 4425 |
4459 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 4426 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
4460 DeoptimizeIf(no_condition, instr->environment()); | 4427 DeoptimizeIf(no_condition, instr->environment()); |
4461 } | 4428 } |
4462 | 4429 |
4463 | 4430 |
4464 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { | 4431 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { |
4465 LOperand* obj = instr->object(); | 4432 LOperand* obj = instr->object(); |
4466 LOperand* key = instr->key(); | 4433 LOperand* key = instr->key(); |
4467 __ push(ToOperand(obj)); | 4434 __ push(ToOperand(obj)); |
4468 if (key->IsConstantOperand()) { | 4435 if (key->IsConstantOperand()) { |
4469 __ push(ToImmediate(key)); | 4436 __ push(ToImmediate(key)); |
4470 } else { | 4437 } else { |
4471 __ push(ToOperand(key)); | 4438 __ push(ToOperand(key)); |
4472 } | 4439 } |
4473 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 4440 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
4474 LPointerMap* pointers = instr->pointer_map(); | 4441 LPointerMap* pointers = instr->pointer_map(); |
4475 LEnvironment* env = instr->deoptimization_environment(); | |
4476 RecordPosition(pointers->position()); | 4442 RecordPosition(pointers->position()); |
4477 RegisterEnvironmentForDeoptimization(env); | |
4478 // Create safepoint generator that will also ensure enough space in the | 4443 // Create safepoint generator that will also ensure enough space in the |
4479 // reloc info for patching in deoptimization (since this is invoking a | 4444 // reloc info for patching in deoptimization (since this is invoking a |
4480 // builtin) | 4445 // builtin) |
4481 SafepointGenerator safepoint_generator(this, | 4446 SafepointGenerator safepoint_generator( |
4482 pointers, | 4447 this, pointers, Safepoint::kLazyDeopt); |
4483 env->deoptimization_index()); | |
4484 __ push(Immediate(Smi::FromInt(strict_mode_flag()))); | 4448 __ push(Immediate(Smi::FromInt(strict_mode_flag()))); |
4485 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator); | 4449 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator); |
4486 } | 4450 } |
4487 | 4451 |
4488 | 4452 |
4489 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 4453 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
4490 { | 4454 PushSafepointRegistersScope scope(this); |
4491 PushSafepointRegistersScope scope(this); | 4455 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
4492 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 4456 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); |
4493 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 4457 RecordSafepointWithLazyDeopt( |
4494 RegisterLazyDeoptimization( | 4458 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
4495 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 4459 ASSERT(instr->HasEnvironment()); |
4496 } | 4460 LEnvironment* env = instr->environment(); |
4497 | 4461 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
4498 // The gap code includes the restoring of the safepoint registers. | |
4499 int pc = masm()->pc_offset(); | |
4500 safepoints_.SetPcAfterGap(pc); | |
4501 } | 4462 } |
4502 | 4463 |
4503 | 4464 |
4504 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 4465 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
4505 class DeferredStackCheck: public LDeferredCode { | 4466 class DeferredStackCheck: public LDeferredCode { |
4506 public: | 4467 public: |
4507 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) | 4468 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) |
4508 : LDeferredCode(codegen), instr_(instr) { } | 4469 : LDeferredCode(codegen), instr_(instr) { } |
4509 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } | 4470 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } |
4510 virtual LInstruction* instr() { return instr_; } | 4471 virtual LInstruction* instr() { return instr_; } |
4511 private: | 4472 private: |
4512 LStackCheck* instr_; | 4473 LStackCheck* instr_; |
4513 }; | 4474 }; |
4514 | 4475 |
4476 ASSERT(instr->HasEnvironment()); | |
4477 LEnvironment* env = instr->environment(); | |
4515 if (instr->hydrogen()->is_function_entry()) { | 4478 if (instr->hydrogen()->is_function_entry()) { |
4516 // Perform stack overflow check. | 4479 // Perform stack overflow check. |
4517 Label done; | 4480 Label done; |
4518 ExternalReference stack_limit = | 4481 ExternalReference stack_limit = |
4519 ExternalReference::address_of_stack_limit(isolate()); | 4482 ExternalReference::address_of_stack_limit(isolate()); |
4520 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 4483 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
4521 __ j(above_equal, &done, Label::kNear); | 4484 __ j(above_equal, &done, Label::kNear); |
4522 | 4485 |
4523 ASSERT(instr->context()->IsRegister()); | 4486 ASSERT(instr->context()->IsRegister()); |
4524 ASSERT(ToRegister(instr->context()).is(esi)); | 4487 ASSERT(ToRegister(instr->context()).is(esi)); |
4525 StackCheckStub stub; | 4488 StackCheckStub stub; |
4526 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4489 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
4527 __ bind(&done); | 4490 __ bind(&done); |
4491 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | |
4492 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
4528 } else { | 4493 } else { |
4529 ASSERT(instr->hydrogen()->is_backwards_branch()); | 4494 ASSERT(instr->hydrogen()->is_backwards_branch()); |
4530 // Perform stack overflow check if this goto needs it before jumping. | 4495 // Perform stack overflow check if this goto needs it before jumping. |
4531 DeferredStackCheck* deferred_stack_check = | 4496 DeferredStackCheck* deferred_stack_check = |
4532 new DeferredStackCheck(this, instr); | 4497 new DeferredStackCheck(this, instr); |
4533 ExternalReference stack_limit = | 4498 ExternalReference stack_limit = |
4534 ExternalReference::address_of_stack_limit(isolate()); | 4499 ExternalReference::address_of_stack_limit(isolate()); |
4535 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 4500 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
4536 __ j(below, deferred_stack_check->entry()); | 4501 __ j(below, deferred_stack_check->entry()); |
4537 __ bind(instr->done_label()); | 4502 __ bind(instr->done_label()); |
4538 deferred_stack_check->SetExit(instr->done_label()); | 4503 deferred_stack_check->SetExit(instr->done_label()); |
4504 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | |
4505 // Don't record a deoptimization index for the safepoint here. | |
4506 // This will be done explicitly when emitting call and the safepoint in | |
4507 // the deferred code. | |
4539 } | 4508 } |
4540 } | 4509 } |
4541 | 4510 |
4542 | 4511 |
4543 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 4512 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
4544 // This is a pseudo-instruction that ensures that the environment here is | 4513 // This is a pseudo-instruction that ensures that the environment here is |
4545 // properly registered for deoptimization and records the assembler's PC | 4514 // properly registered for deoptimization and records the assembler's PC |
4546 // offset. | 4515 // offset. |
4547 LEnvironment* environment = instr->environment(); | 4516 LEnvironment* environment = instr->environment(); |
4548 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), | 4517 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), |
4549 instr->SpilledDoubleRegisterArray()); | 4518 instr->SpilledDoubleRegisterArray()); |
4550 | 4519 |
4551 // If the environment were already registered, we would have no way of | 4520 // If the environment were already registered, we would have no way of |
4552 // backpatching it with the spill slot operands. | 4521 // backpatching it with the spill slot operands. |
4553 ASSERT(!environment->HasBeenRegistered()); | 4522 ASSERT(!environment->HasBeenRegistered()); |
4554 RegisterEnvironmentForDeoptimization(environment); | 4523 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
4555 ASSERT(osr_pc_offset_ == -1); | 4524 ASSERT(osr_pc_offset_ == -1); |
4556 osr_pc_offset_ = masm()->pc_offset(); | 4525 osr_pc_offset_ = masm()->pc_offset(); |
4557 } | 4526 } |
4558 | 4527 |
4559 | 4528 |
4560 void LCodeGen::DoIn(LIn* instr) { | 4529 void LCodeGen::DoIn(LIn* instr) { |
4561 LOperand* obj = instr->object(); | 4530 LOperand* obj = instr->object(); |
4562 LOperand* key = instr->key(); | 4531 LOperand* key = instr->key(); |
4563 if (key->IsConstantOperand()) { | 4532 if (key->IsConstantOperand()) { |
4564 __ push(ToImmediate(key)); | 4533 __ push(ToImmediate(key)); |
4565 } else { | 4534 } else { |
4566 __ push(ToOperand(key)); | 4535 __ push(ToOperand(key)); |
4567 } | 4536 } |
4568 if (obj->IsConstantOperand()) { | 4537 if (obj->IsConstantOperand()) { |
4569 __ push(ToImmediate(obj)); | 4538 __ push(ToImmediate(obj)); |
4570 } else { | 4539 } else { |
4571 __ push(ToOperand(obj)); | 4540 __ push(ToOperand(obj)); |
4572 } | 4541 } |
4573 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 4542 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
4574 LPointerMap* pointers = instr->pointer_map(); | 4543 LPointerMap* pointers = instr->pointer_map(); |
4575 LEnvironment* env = instr->deoptimization_environment(); | |
4576 RecordPosition(pointers->position()); | 4544 RecordPosition(pointers->position()); |
4577 RegisterEnvironmentForDeoptimization(env); | 4545 SafepointGenerator safepoint_generator( |
4578 // Create safepoint generator that will also ensure enough space in the | 4546 this, pointers, Safepoint::kLazyDeopt); |
4579 // reloc info for patching in deoptimization (since this is invoking a | |
4580 // builtin) | |
4581 SafepointGenerator safepoint_generator(this, | |
4582 pointers, | |
4583 env->deoptimization_index()); | |
4584 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); | 4547 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); |
4585 } | 4548 } |
4586 | 4549 |
4587 | 4550 |
4588 #undef __ | 4551 #undef __ |
4589 | 4552 |
4590 } } // namespace v8::internal | 4553 } } // namespace v8::internal |
4591 | 4554 |
4592 #endif // V8_TARGET_ARCH_IA32 | 4555 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |