Index: src/compiler/arm64/code-generator-arm64.cc |
diff --git a/src/compiler/arm64/code-generator-arm64.cc b/src/compiler/arm64/code-generator-arm64.cc |
index c8d5f26d0dc6b3303623a9343a0cab8d69dd5405..c9c2686677d8c342d334b1dd7cca876f85dbbf9b 100644 |
--- a/src/compiler/arm64/code-generator-arm64.cc |
+++ b/src/compiler/arm64/code-generator-arm64.cc |
@@ -137,6 +137,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
break; |
} |
case kArchCallCodeObject: { |
+ EnsureSpaceForLazyDeopt(); |
if (instr->InputAt(0)->IsImmediate()) { |
__ Call(Handle<Code>::cast(i.InputHeapObject(0)), |
RelocInfo::CODE_TARGET); |
@@ -149,6 +150,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
break; |
} |
case kArchCallJSFunction: { |
+ EnsureSpaceForLazyDeopt(); |
Register func = i.InputRegister(0); |
if (FLAG_debug_code) { |
// Check the function's context matches the context argument. |
@@ -854,6 +856,29 @@ void CodeGenerator::AssembleSwap(InstructionOperand* source, |
void CodeGenerator::AddNopForSmiCodeInlining() { __ movz(xzr, 0); } |
+ |
+void CodeGenerator::EnsureSpaceForLazyDeopt() { |
+ int space_needed = Deoptimizer::patch_size(); |
+ if (!linkage()->info()->IsStub()) { |
+ // Ensure that we have enough space after the previous lazy-bailout |
+ // instruction for patching the code here. |
+ intptr_t current_pc = masm()->pc_offset(); |
+ |
+ if (current_pc < (last_lazy_deopt_pc_ + space_needed)) { |
+ ptrdiff_t padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
Benedikt Meurer
2014/09/22 07:23:37
ptrdiff_t is wrong here, should be int.
|
+ DCHECK((padding_size % kInstructionSize) == 0); |
+ InstructionAccurateScope instruction_accurate( |
+ masm(), padding_size / kInstructionSize); |
+ |
+ while (padding_size > 0) { |
+ __ nop(); |
+ padding_size -= kInstructionSize; |
+ } |
+ } |
+ } |
+ MarkLazyDeoptSite(); |
+} |
+ |
#undef __ |
} // namespace compiler |