| Index: src/compiler/arm/code-generator-arm.cc
|
| diff --git a/src/compiler/arm/code-generator-arm.cc b/src/compiler/arm/code-generator-arm.cc
|
| index aa004e0ecdf8be0a4a3dd1e23de9058373444dc4..c995d11b1a96a09efcf38daec6778dafecbf668f 100644
|
| --- a/src/compiler/arm/code-generator-arm.cc
|
| +++ b/src/compiler/arm/code-generator-arm.cc
|
| @@ -137,6 +137,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
|
|
|
| switch (ArchOpcodeField::decode(instr->opcode())) {
|
| case kArchCallCodeObject: {
|
| + EnsureSpaceForLazyDeopt();
|
| if (instr->InputAt(0)->IsImmediate()) {
|
| __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
|
| RelocInfo::CODE_TARGET);
|
| @@ -150,6 +151,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
|
| break;
|
| }
|
| case kArchCallJSFunction: {
|
| + EnsureSpaceForLazyDeopt();
|
| Register func = i.InputRegister(0);
|
| if (FLAG_debug_code) {
|
| // Check the function's context matches the context argument.
|
| @@ -843,6 +845,27 @@ void CodeGenerator::AddNopForSmiCodeInlining() {
|
| // On 32-bit ARM we do not insert nops for inlined Smi code.
|
| }
|
|
|
| +
|
| +void CodeGenerator::EnsureSpaceForLazyDeopt() {
|
| + int space_needed = Deoptimizer::patch_size();
|
| + if (!linkage()->info()->IsStub()) {
|
| + // Ensure that we have enough space after the previous lazy-bailout
|
| + // instruction for patching the code here.
|
| + int current_pc = masm()->pc_offset();
|
| + if (current_pc < last_lazy_deopt_pc_ + space_needed) {
|
| + // Block literal pool emission for duration of padding.
|
| + v8::internal::Assembler::BlockConstPoolScope block_const_pool(masm());
|
| + int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
|
| + DCHECK_EQ(0, padding_size % v8::internal::Assembler::kInstrSize);
|
| + while (padding_size > 0) {
|
| + __ nop();
|
| + padding_size -= v8::internal::Assembler::kInstrSize;
|
| + }
|
| + }
|
| + }
|
| + MarkLazyDeoptSite();
|
| +}
|
| +
|
| #undef __
|
|
|
| } // namespace compiler
|
|
|