| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved.7 | 1 // Copyright 2012 the V8 project authors. All rights reserved.7 |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 674 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 685 RelocInfo::Mode mode, | 685 RelocInfo::Mode mode, |
| 686 LInstruction* instr) { | 686 LInstruction* instr) { |
| 687 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT); | 687 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT); |
| 688 } | 688 } |
| 689 | 689 |
| 690 | 690 |
| 691 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 691 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
| 692 RelocInfo::Mode mode, | 692 RelocInfo::Mode mode, |
| 693 LInstruction* instr, | 693 LInstruction* instr, |
| 694 SafepointMode safepoint_mode) { | 694 SafepointMode safepoint_mode) { |
| 695 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | |
| 696 ASSERT(instr != NULL); | 695 ASSERT(instr != NULL); |
| 697 __ Call(code, mode); | 696 __ Call(code, mode); |
| 698 RecordSafepointWithLazyDeopt(instr, safepoint_mode); | 697 RecordSafepointWithLazyDeopt(instr, safepoint_mode); |
| 699 } | 698 } |
| 700 | 699 |
| 701 | 700 |
| 702 void LCodeGen::CallRuntime(const Runtime::Function* function, | 701 void LCodeGen::CallRuntime(const Runtime::Function* function, |
| 703 int num_arguments, | 702 int num_arguments, |
| 704 LInstruction* instr, | 703 LInstruction* instr, |
| 705 SaveFPRegsMode save_doubles) { | 704 SaveFPRegsMode save_doubles) { |
| (...skipping 4862 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5568 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 5567 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 5569 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset)); | 5568 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset)); |
| 5570 | 5569 |
| 5571 // Check the marker in the calling frame. | 5570 // Check the marker in the calling frame. |
| 5572 __ bind(&check_frame_marker); | 5571 __ bind(&check_frame_marker); |
| 5573 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset)); | 5572 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset)); |
| 5574 } | 5573 } |
| 5575 | 5574 |
| 5576 | 5575 |
| 5577 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { | 5576 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { |
| 5578 if (info()->IsStub()) return; | 5577 if (!info()->IsStub()) { |
| 5579 // Ensure that we have enough space after the previous lazy-bailout | 5578 // Ensure that we have enough space after the previous lazy-bailout |
| 5580 // instruction for patching the code here. | 5579 // instruction for patching the code here. |
| 5581 int current_pc = masm()->pc_offset(); | 5580 int current_pc = masm()->pc_offset(); |
| 5582 if (current_pc < last_lazy_deopt_pc_ + space_needed) { | 5581 if (current_pc < last_lazy_deopt_pc_ + space_needed) { |
| 5583 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | 5582 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
| 5584 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); | 5583 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); |
| 5585 while (padding_size > 0) { | 5584 while (padding_size > 0) { |
| 5586 __ nop(); | 5585 __ nop(); |
| 5587 padding_size -= Assembler::kInstrSize; | 5586 padding_size -= Assembler::kInstrSize; |
| 5587 } |
| 5588 } | 5588 } |
| 5589 } | 5589 } |
| 5590 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 5590 } | 5591 } |
| 5591 | 5592 |
| 5592 | 5593 |
| 5593 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 5594 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
| 5594 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5595 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 5595 last_lazy_deopt_pc_ = masm()->pc_offset(); | |
| 5596 ASSERT(instr->HasEnvironment()); | 5596 ASSERT(instr->HasEnvironment()); |
| 5597 LEnvironment* env = instr->environment(); | 5597 LEnvironment* env = instr->environment(); |
| 5598 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5598 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5599 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5599 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5600 } | 5600 } |
| 5601 | 5601 |
| 5602 | 5602 |
| 5603 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5603 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
| 5604 Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 5604 Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
| 5605 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 5605 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5658 // Perform stack overflow check. | 5658 // Perform stack overflow check. |
| 5659 Label done; | 5659 Label done; |
| 5660 __ LoadRoot(at, Heap::kStackLimitRootIndex); | 5660 __ LoadRoot(at, Heap::kStackLimitRootIndex); |
| 5661 __ Branch(&done, hs, sp, Operand(at)); | 5661 __ Branch(&done, hs, sp, Operand(at)); |
| 5662 ASSERT(instr->context()->IsRegister()); | 5662 ASSERT(instr->context()->IsRegister()); |
| 5663 ASSERT(ToRegister(instr->context()).is(cp)); | 5663 ASSERT(ToRegister(instr->context()).is(cp)); |
| 5664 CallCode(isolate()->builtins()->StackCheck(), | 5664 CallCode(isolate()->builtins()->StackCheck(), |
| 5665 RelocInfo::CODE_TARGET, | 5665 RelocInfo::CODE_TARGET, |
| 5666 instr); | 5666 instr); |
| 5667 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5667 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 5668 last_lazy_deopt_pc_ = masm()->pc_offset(); | |
| 5669 __ bind(&done); | 5668 __ bind(&done); |
| 5670 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5669 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5671 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5670 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5672 } else { | 5671 } else { |
| 5673 ASSERT(instr->hydrogen()->is_backwards_branch()); | 5672 ASSERT(instr->hydrogen()->is_backwards_branch()); |
| 5674 // Perform stack overflow check if this goto needs it before jumping. | 5673 // Perform stack overflow check if this goto needs it before jumping. |
| 5675 DeferredStackCheck* deferred_stack_check = | 5674 DeferredStackCheck* deferred_stack_check = |
| 5676 new(zone()) DeferredStackCheck(this, instr); | 5675 new(zone()) DeferredStackCheck(this, instr); |
| 5677 __ LoadRoot(at, Heap::kStackLimitRootIndex); | 5676 __ LoadRoot(at, Heap::kStackLimitRootIndex); |
| 5678 __ Branch(deferred_stack_check->entry(), lo, sp, Operand(at)); | 5677 __ Branch(deferred_stack_check->entry(), lo, sp, Operand(at)); |
| 5679 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5678 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 5680 last_lazy_deopt_pc_ = masm()->pc_offset(); | |
| 5681 __ bind(instr->done_label()); | 5679 __ bind(instr->done_label()); |
| 5682 deferred_stack_check->SetExit(instr->done_label()); | 5680 deferred_stack_check->SetExit(instr->done_label()); |
| 5683 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5681 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5684 // Don't record a deoptimization index for the safepoint here. | 5682 // Don't record a deoptimization index for the safepoint here. |
| 5685 // This will be done explicitly when emitting call and the safepoint in | 5683 // This will be done explicitly when emitting call and the safepoint in |
| 5686 // the deferred code. | 5684 // the deferred code. |
| 5687 } | 5685 } |
| 5688 } | 5686 } |
| 5689 | 5687 |
| 5690 | 5688 |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5791 __ Subu(scratch, result, scratch); | 5789 __ Subu(scratch, result, scratch); |
| 5792 __ lw(result, FieldMemOperand(scratch, | 5790 __ lw(result, FieldMemOperand(scratch, |
| 5793 FixedArray::kHeaderSize - kPointerSize)); | 5791 FixedArray::kHeaderSize - kPointerSize)); |
| 5794 __ bind(&done); | 5792 __ bind(&done); |
| 5795 } | 5793 } |
| 5796 | 5794 |
| 5797 | 5795 |
| 5798 #undef __ | 5796 #undef __ |
| 5799 | 5797 |
| 5800 } } // namespace v8::internal | 5798 } } // namespace v8::internal |
| OLD | NEW |