OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_IA32 | 5 #if V8_TARGET_ARCH_IA32 |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/code-factory.h" | 8 #include "src/code-factory.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
69 GenerateJumpTable() && | 69 GenerateJumpTable() && |
70 GenerateSafepointTable(); | 70 GenerateSafepointTable(); |
71 } | 71 } |
72 | 72 |
73 | 73 |
74 void LCodeGen::FinishCode(Handle<Code> code) { | 74 void LCodeGen::FinishCode(Handle<Code> code) { |
75 DCHECK(is_done()); | 75 DCHECK(is_done()); |
76 code->set_stack_slots(GetStackSlotCount()); | 76 code->set_stack_slots(GetStackSlotCount()); |
77 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 77 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
78 PopulateDeoptimizationData(code); | 78 PopulateDeoptimizationData(code); |
79 if (!info()->IsStub()) { | 79 if (info()->ShouldEnsureSpaceForLazyDeopt()) { |
80 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code); | 80 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code); |
81 } | 81 } |
82 } | 82 } |
83 | 83 |
84 | 84 |
85 #ifdef _MSC_VER | 85 #ifdef _MSC_VER |
86 void LCodeGen::MakeSureStackPagesMapped(int offset) { | 86 void LCodeGen::MakeSureStackPagesMapped(int offset) { |
87 const int kPageSize = 4 * KB; | 87 const int kPageSize = 4 * KB; |
88 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { | 88 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { |
89 __ mov(Operand(esp, offset), eax); | 89 __ mov(Operand(esp, offset), eax); |
(...skipping 399 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
489 | 489 |
490 // Deferred code is the last part of the instruction sequence. Mark | 490 // Deferred code is the last part of the instruction sequence. Mark |
491 // the generated code as done unless we bailed out. | 491 // the generated code as done unless we bailed out. |
492 if (!is_aborted()) status_ = DONE; | 492 if (!is_aborted()) status_ = DONE; |
493 return !is_aborted(); | 493 return !is_aborted(); |
494 } | 494 } |
495 | 495 |
496 | 496 |
497 bool LCodeGen::GenerateSafepointTable() { | 497 bool LCodeGen::GenerateSafepointTable() { |
498 DCHECK(is_done()); | 498 DCHECK(is_done()); |
499 if (!info()->IsStub()) { | 499 if (info()->ShouldEnsureSpaceForLazyDeopt()) { |
500 // For lazy deoptimization we need space to patch a call after every call. | 500 // For lazy deoptimization we need space to patch a call after every call. |
501 // Ensure there is always space for such patching, even if the code ends | 501 // Ensure there is always space for such patching, even if the code ends |
502 // in a call. | 502 // in a call. |
503 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size(); | 503 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size(); |
504 while (masm()->pc_offset() < target_offset) { | 504 while (masm()->pc_offset() < target_offset) { |
505 masm()->nop(); | 505 masm()->nop(); |
506 } | 506 } |
507 } | 507 } |
508 safepoints_.Emit(masm(), GetStackSlotCount()); | 508 safepoints_.Emit(masm(), GetStackSlotCount()); |
509 return !is_aborted(); | 509 return !is_aborted(); |
(...skipping 308 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
818 } | 818 } |
819 | 819 |
820 | 820 |
821 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, | 821 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, |
822 Deoptimizer::DeoptReason deopt_reason, | 822 Deoptimizer::DeoptReason deopt_reason, |
823 Deoptimizer::BailoutType bailout_type) { | 823 Deoptimizer::BailoutType bailout_type) { |
824 LEnvironment* environment = instr->environment(); | 824 LEnvironment* environment = instr->environment(); |
825 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 825 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
826 DCHECK(environment->HasBeenRegistered()); | 826 DCHECK(environment->HasBeenRegistered()); |
827 int id = environment->deoptimization_index(); | 827 int id = environment->deoptimization_index(); |
828 DCHECK(info()->IsOptimizing() || info()->IsStub()); | |
829 Address entry = | 828 Address entry = |
830 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 829 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
831 if (entry == NULL) { | 830 if (entry == NULL) { |
832 Abort(kBailoutWasNotPrepared); | 831 Abort(kBailoutWasNotPrepared); |
833 return; | 832 return; |
834 } | 833 } |
835 | 834 |
836 if (DeoptEveryNTimes()) { | 835 if (DeoptEveryNTimes()) { |
837 ExternalReference count = ExternalReference::stress_deopt_count(isolate()); | 836 ExternalReference count = ExternalReference::stress_deopt_count(isolate()); |
838 Label no_deopt; | 837 Label no_deopt; |
(...skipping 4640 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5479 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset)); | 5478 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset)); |
5480 | 5479 |
5481 // Check the marker in the calling frame. | 5480 // Check the marker in the calling frame. |
5482 __ bind(&check_frame_marker); | 5481 __ bind(&check_frame_marker); |
5483 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), | 5482 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), |
5484 Immediate(Smi::FromInt(StackFrame::CONSTRUCT))); | 5483 Immediate(Smi::FromInt(StackFrame::CONSTRUCT))); |
5485 } | 5484 } |
5486 | 5485 |
5487 | 5486 |
5488 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { | 5487 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { |
5489 if (!info()->IsStub()) { | 5488 if (info()->ShouldEnsureSpaceForLazyDeopt()) { |
5490 // Ensure that we have enough space after the previous lazy-bailout | 5489 // Ensure that we have enough space after the previous lazy-bailout |
5491 // instruction for patching the code here. | 5490 // instruction for patching the code here. |
5492 int current_pc = masm()->pc_offset(); | 5491 int current_pc = masm()->pc_offset(); |
5493 if (current_pc < last_lazy_deopt_pc_ + space_needed) { | 5492 if (current_pc < last_lazy_deopt_pc_ + space_needed) { |
5494 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | 5493 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
5495 __ Nop(padding_size); | 5494 __ Nop(padding_size); |
5496 } | 5495 } |
5497 } | 5496 } |
5498 last_lazy_deopt_pc_ = masm()->pc_offset(); | 5497 last_lazy_deopt_pc_ = masm()->pc_offset(); |
5499 } | 5498 } |
(...skipping 250 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5750 RecordSafepoint(Safepoint::kNoLazyDeopt); | 5749 RecordSafepoint(Safepoint::kNoLazyDeopt); |
5751 } | 5750 } |
5752 | 5751 |
5753 | 5752 |
5754 #undef __ | 5753 #undef __ |
5755 | 5754 |
5756 } // namespace internal | 5755 } // namespace internal |
5757 } // namespace v8 | 5756 } // namespace v8 |
5758 | 5757 |
5759 #endif // V8_TARGET_ARCH_IA32 | 5758 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |