| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 702 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 713 TargetAddressStorageMode storage_mode) { | 713 TargetAddressStorageMode storage_mode) { |
| 714 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, storage_mode); | 714 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, storage_mode); |
| 715 } | 715 } |
| 716 | 716 |
| 717 | 717 |
| 718 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 718 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
| 719 RelocInfo::Mode mode, | 719 RelocInfo::Mode mode, |
| 720 LInstruction* instr, | 720 LInstruction* instr, |
| 721 SafepointMode safepoint_mode, | 721 SafepointMode safepoint_mode, |
| 722 TargetAddressStorageMode storage_mode) { | 722 TargetAddressStorageMode storage_mode) { |
| 723 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | |
| 724 ASSERT(instr != NULL); | 723 ASSERT(instr != NULL); |
| 725 // Block literal pool emission to ensure nop indicating no inlined smi code | 724 // Block literal pool emission to ensure nop indicating no inlined smi code |
| 726 // is in the correct position. | 725 // is in the correct position. |
| 727 Assembler::BlockConstPoolScope block_const_pool(masm()); | 726 Assembler::BlockConstPoolScope block_const_pool(masm()); |
| 728 __ Call(code, mode, TypeFeedbackId::None(), al, storage_mode); | 727 __ Call(code, mode, TypeFeedbackId::None(), al, storage_mode); |
| 729 RecordSafepointWithLazyDeopt(instr, safepoint_mode); | 728 RecordSafepointWithLazyDeopt(instr, safepoint_mode); |
| 730 | 729 |
| 731 // Signal that we don't inline smi code before these stubs in the | 730 // Signal that we don't inline smi code before these stubs in the |
| 732 // optimizing code generator. | 731 // optimizing code generator. |
| 733 if (code->kind() == Code::BINARY_OP_IC || | 732 if (code->kind() == Code::BINARY_OP_IC || |
| (...skipping 4838 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5572 __ cmp(temp2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 5571 __ cmp(temp2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 5573 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset), eq); | 5572 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset), eq); |
| 5574 | 5573 |
| 5575 // Check the marker in the calling frame. | 5574 // Check the marker in the calling frame. |
| 5576 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset)); | 5575 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset)); |
| 5577 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); | 5576 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); |
| 5578 } | 5577 } |
| 5579 | 5578 |
| 5580 | 5579 |
| 5581 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { | 5580 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { |
| 5582 if (info()->IsStub()) return; | 5581 if (!info()->IsStub()) { |
| 5583 // Ensure that we have enough space after the previous lazy-bailout | 5582 // Ensure that we have enough space after the previous lazy-bailout |
| 5584 // instruction for patching the code here. | 5583 // instruction for patching the code here. |
| 5585 int current_pc = masm()->pc_offset(); | 5584 int current_pc = masm()->pc_offset(); |
| 5586 if (current_pc < last_lazy_deopt_pc_ + space_needed) { | 5585 if (current_pc < last_lazy_deopt_pc_ + space_needed) { |
| 5587 // Block literal pool emission for duration of padding. | 5586 // Block literal pool emission for duration of padding. |
| 5588 Assembler::BlockConstPoolScope block_const_pool(masm()); | 5587 Assembler::BlockConstPoolScope block_const_pool(masm()); |
| 5589 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | 5588 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
| 5590 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); | 5589 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); |
| 5591 while (padding_size > 0) { | 5590 while (padding_size > 0) { |
| 5592 __ nop(); | 5591 __ nop(); |
| 5593 padding_size -= Assembler::kInstrSize; | 5592 padding_size -= Assembler::kInstrSize; |
| 5593 } |
| 5594 } | 5594 } |
| 5595 } | 5595 } |
| 5596 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 5596 } | 5597 } |
| 5597 | 5598 |
| 5598 | 5599 |
| 5599 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 5600 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
| 5600 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5601 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 5601 last_lazy_deopt_pc_ = masm()->pc_offset(); | |
| 5602 ASSERT(instr->HasEnvironment()); | 5602 ASSERT(instr->HasEnvironment()); |
| 5603 LEnvironment* env = instr->environment(); | 5603 LEnvironment* env = instr->environment(); |
| 5604 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5604 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5605 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5605 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5606 } | 5606 } |
| 5607 | 5607 |
| 5608 | 5608 |
| 5609 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5609 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
| 5610 Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 5610 Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
| 5611 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 5611 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5666 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 5666 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
| 5667 __ cmp(sp, Operand(ip)); | 5667 __ cmp(sp, Operand(ip)); |
| 5668 __ b(hs, &done); | 5668 __ b(hs, &done); |
| 5669 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); | 5669 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); |
| 5670 ASSERT(instr->context()->IsRegister()); | 5670 ASSERT(instr->context()->IsRegister()); |
| 5671 ASSERT(ToRegister(instr->context()).is(cp)); | 5671 ASSERT(ToRegister(instr->context()).is(cp)); |
| 5672 CallCode(isolate()->builtins()->StackCheck(), | 5672 CallCode(isolate()->builtins()->StackCheck(), |
| 5673 RelocInfo::CODE_TARGET, | 5673 RelocInfo::CODE_TARGET, |
| 5674 instr); | 5674 instr); |
| 5675 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5675 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 5676 last_lazy_deopt_pc_ = masm()->pc_offset(); | |
| 5677 __ bind(&done); | 5676 __ bind(&done); |
| 5678 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5677 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5679 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5678 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5680 } else { | 5679 } else { |
| 5681 ASSERT(instr->hydrogen()->is_backwards_branch()); | 5680 ASSERT(instr->hydrogen()->is_backwards_branch()); |
| 5682 // Perform stack overflow check if this goto needs it before jumping. | 5681 // Perform stack overflow check if this goto needs it before jumping. |
| 5683 DeferredStackCheck* deferred_stack_check = | 5682 DeferredStackCheck* deferred_stack_check = |
| 5684 new(zone()) DeferredStackCheck(this, instr); | 5683 new(zone()) DeferredStackCheck(this, instr); |
| 5685 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 5684 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
| 5686 __ cmp(sp, Operand(ip)); | 5685 __ cmp(sp, Operand(ip)); |
| 5687 __ b(lo, deferred_stack_check->entry()); | 5686 __ b(lo, deferred_stack_check->entry()); |
| 5688 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5687 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 5689 last_lazy_deopt_pc_ = masm()->pc_offset(); | |
| 5690 __ bind(instr->done_label()); | 5688 __ bind(instr->done_label()); |
| 5691 deferred_stack_check->SetExit(instr->done_label()); | 5689 deferred_stack_check->SetExit(instr->done_label()); |
| 5692 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5690 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5693 // Don't record a deoptimization index for the safepoint here. | 5691 // Don't record a deoptimization index for the safepoint here. |
| 5694 // This will be done explicitly when emitting call and the safepoint in | 5692 // This will be done explicitly when emitting call and the safepoint in |
| 5695 // the deferred code. | 5693 // the deferred code. |
| 5696 } | 5694 } |
| 5697 } | 5695 } |
| 5698 | 5696 |
| 5699 | 5697 |
| (...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5802 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); | 5800 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); |
| 5803 __ ldr(result, FieldMemOperand(scratch, | 5801 __ ldr(result, FieldMemOperand(scratch, |
| 5804 FixedArray::kHeaderSize - kPointerSize)); | 5802 FixedArray::kHeaderSize - kPointerSize)); |
| 5805 __ bind(&done); | 5803 __ bind(&done); |
| 5806 } | 5804 } |
| 5807 | 5805 |
| 5808 | 5806 |
| 5809 #undef __ | 5807 #undef __ |
| 5810 | 5808 |
| 5811 } } // namespace v8::internal | 5809 } } // namespace v8::internal |
| OLD | NEW |