Chromium Code Reviews| Index: src/arm64/lithium-codegen-arm64.cc |
| diff --git a/src/arm64/lithium-codegen-arm64.cc b/src/arm64/lithium-codegen-arm64.cc |
| index ddc8731c9ff96ad767d5723d7108b7c67b60b2af..196a5400c38eab21051b53618454e00495f2dea4 100644 |
| --- a/src/arm64/lithium-codegen-arm64.cc |
| +++ b/src/arm64/lithium-codegen-arm64.cc |
| @@ -864,55 +864,55 @@ bool LCodeGen::GenerateJumpTable() { |
| // address and add an immediate offset. |
| __ Mov(entry_offset, entry - base); |
| - // The last entry can fall through into `call_deopt_entry`, avoiding a |
| - // branch. |
| - bool last_entry = (i + 1) == length; |
| - |
| if (table_entry->needs_frame) { |
| DCHECK(!info()->saves_caller_doubles()); |
| - if (!needs_frame.is_bound()) { |
| - // This variant of deopt can only be used with stubs. Since we don't |
| - // have a function pointer to install in the stack frame that we're |
| - // building, install a special marker there instead. |
| - DCHECK(info()->IsStub()); |
| - |
| - UseScratchRegisterScope temps(masm()); |
| - Register stub_marker = temps.AcquireX(); |
| - __ Bind(&needs_frame); |
| - __ Mov(stub_marker, Smi::FromInt(StackFrame::STUB)); |
| - __ Push(lr, fp, cp, stub_marker); |
| - __ Add(fp, __ StackPointer(), 2 * kPointerSize); |
| - if (!last_entry) __ B(&call_deopt_entry); |
| - } else { |
| - // Reuse the existing needs_frame code. |
| - __ B(&needs_frame); |
| - } |
| + // Save lr before Bl, fp will be adjusted in the needs_frame code. |
| + __ Push(lr, fp, cp); |
|
jbramley
2015/03/10 13:11:18
We can save some space here by pushing (lr, fp) he
loislo
2015/03/10 14:42:18
I'll do this in https://codereview.chromium.org/99
|
| + // Reuse the existing needs_frame code. |
| + __ Bl(&needs_frame); |
| } else if (info()->saves_caller_doubles()) { |
| DCHECK(info()->IsStub()); |
| - if (!restore_caller_doubles.is_bound()) { |
| - __ Bind(&restore_caller_doubles); |
| - RestoreCallerDoubles(); |
| - if (!last_entry) __ B(&call_deopt_entry); |
| - } else { |
| - // Reuse the existing restore_caller_doubles code. |
| - __ B(&restore_caller_doubles); |
| - } |
| + // Reuse the existing restore_caller_doubles code. |
| + __ Bl(&restore_caller_doubles); |
| } else { |
| // There is nothing special to do, so just continue to the second-level |
| // table. |
| - if (!last_entry) __ B(&call_deopt_entry); |
| + __ Bl(&call_deopt_entry); |
| } |
| + bool last_entry = (i + 1) == length; |
| masm()->CheckConstPool(false, last_entry); |
| } |
| + if (needs_frame.is_linked()) { |
| + // This variant of deopt can only be used with stubs. Since we don't |
| + // have a function pointer to install in the stack frame that we're |
| + // building, install a special marker there instead. |
| + DCHECK(info()->IsStub()); |
| + |
| + UseScratchRegisterScope temps(masm()); |
| + Register stub_marker = temps.AcquireX(); |
| + __ Bind(&needs_frame); |
| + __ Mov(stub_marker, Smi::FromInt(StackFrame::STUB)); |
| + __ Push(stub_marker); |
| + __ Add(fp, __ StackPointer(), 2 * kPointerSize); |
| + if (restore_caller_doubles.is_linked()) { |
| + __ B(&call_deopt_entry); |
| + } |
| + } |
| + |
| + if (restore_caller_doubles.is_linked()) { |
| + __ Bind(&restore_caller_doubles); |
| + RestoreCallerDoubles(); |
| + } |
| + |
| // Generate common code for calling the second-level deopt table. |
| Register deopt_entry = temps.AcquireX(); |
| __ Bind(&call_deopt_entry); |
| __ Mov(deopt_entry, Operand(reinterpret_cast<uint64_t>(base), |
| RelocInfo::RUNTIME_ENTRY)); |
| __ Add(deopt_entry, deopt_entry, entry_offset); |
| - __ Call(deopt_entry); |
| + __ Br(deopt_entry); |
| } |
| // Force constant pool emission at the end of the deopt jump table to make |
| @@ -1152,7 +1152,7 @@ void LCodeGen::DeoptimizeIfMinusZero(DoubleRegister input, LInstruction* instr, |
| void LCodeGen::DeoptimizeIfNotHeapNumber(Register object, LInstruction* instr) { |
| __ CompareObjectMap(object, Heap::kHeapNumberMapRootIndex); |
| - DeoptimizeIf(ne, instr, Deoptimizer::kNotHeapNumber); |
| + DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumber); |
| } |