| Index: src/compiler/arm64/code-generator-arm64.cc
|
| diff --git a/src/compiler/arm64/code-generator-arm64.cc b/src/compiler/arm64/code-generator-arm64.cc
|
| index ffadd35c78cdd914363f09af42a2bbe9aeb9bfcf..4a9893f3b742b1d405bf581b936c159bb684679a 100644
|
| --- a/src/compiler/arm64/code-generator-arm64.cc
|
| +++ b/src/compiler/arm64/code-generator-arm64.cc
|
| @@ -132,6 +132,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
|
| InstructionCode opcode = instr->opcode();
|
| switch (ArchOpcodeField::decode(opcode)) {
|
| case kArchCallCodeObject: {
|
| + EnsureSpaceForLazyDeopt();
|
| if (instr->InputAt(0)->IsImmediate()) {
|
| __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
|
| RelocInfo::CODE_TARGET);
|
| @@ -144,6 +145,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
|
| break;
|
| }
|
| case kArchCallJSFunction: {
|
| + EnsureSpaceForLazyDeopt();
|
| Register func = i.InputRegister(0);
|
| if (FLAG_debug_code) {
|
| // Check the function's context matches the context argument.
|
| @@ -844,6 +846,29 @@ void CodeGenerator::AssembleSwap(InstructionOperand* source,
|
|
|
| void CodeGenerator::AddNopForSmiCodeInlining() { __ movz(xzr, 0); }
|
|
|
| +
|
| +void CodeGenerator::EnsureSpaceForLazyDeopt() {
|
| + int space_needed = Deoptimizer::patch_size();
|
| + if (!linkage()->info()->IsStub()) {
|
| + // Ensure that we have enough space after the previous lazy-bailout
|
| + // instruction for patching the code here.
|
| + intptr_t current_pc = masm()->pc_offset();
|
| +
|
| + if (current_pc < (last_lazy_deopt_pc_ + space_needed)) {
|
| + intptr_t padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
|
| + DCHECK((padding_size % kInstructionSize) == 0);
|
| + InstructionAccurateScope instruction_accurate(
|
| + masm(), padding_size / kInstructionSize);
|
| +
|
| + while (padding_size > 0) {
|
| + __ nop();
|
| + padding_size -= kInstructionSize;
|
| + }
|
| + }
|
| + }
|
| + MarkLazyDeoptSite();
|
| +}
|
| +
|
| #undef __
|
|
|
| } // namespace compiler
|
|
|