Index: src/compiler/arm64/code-generator-arm64.cc |
diff --git a/src/compiler/arm64/code-generator-arm64.cc b/src/compiler/arm64/code-generator-arm64.cc |
index 511e08c02d9df37c538a2862580a52f60a04547b..9c3483c3c0d73abb7e096dcf8837264dd51b9552 100644 |
--- a/src/compiler/arm64/code-generator-arm64.cc |
+++ b/src/compiler/arm64/code-generator-arm64.cc |
@@ -207,6 +207,15 @@ class Arm64OperandConverter final : public InstructionOperandConverter { |
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); |
FrameOffset offset = |
linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame()); |
+ if (offset.from_frame_pointer()) { |
+ int from_sp = |
+ offset.offset() + (frame()->GetSpToFpSlotCount() * kPointerSize); |
+ // Convert FP-offsets to SP-offsets if it results in better code. |
+ if (Assembler::IsImmLSUnscaled(from_sp) || |
+ Assembler::IsImmLSScaled(from_sp, LSDoubleWord)) { |
+ offset = FrameOffset::FromStackPointer(from_sp); |
+ } |
+ } |
return MemOperand(offset.from_stack_pointer() ? masm->StackPointer() : fp, |
offset.offset()); |
} |
@@ -431,6 +440,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
__ Add(target, target, Code::kHeaderSize - kHeapObjectTag); |
__ Call(target); |
} |
+ frame()->ClearOutgoingParameterSlots(); |
RecordCallPosition(instr); |
break; |
} |
@@ -444,6 +454,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
__ Add(target, target, Code::kHeaderSize - kHeapObjectTag); |
__ Jump(target); |
} |
+ frame()->ClearOutgoingParameterSlots(); |
break; |
} |
case kArchCallJSFunction: { |
@@ -459,6 +470,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
} |
__ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
__ Call(x10); |
+ frame()->ClearOutgoingParameterSlots(); |
RecordCallPosition(instr); |
break; |
} |
@@ -475,6 +487,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
AssembleDeconstructActivationRecord(); |
__ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
__ Jump(x10); |
+ frame()->ClearOutgoingParameterSlots(); |
break; |
} |
case kArchLazyBailout: { |
@@ -497,6 +510,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
Register func = i.InputRegister(0); |
__ CallCFunction(func, num_parameters, 0); |
} |
+ // CallCFunction only supports register arguments so we never need to call |
+ // frame()->ClearOutgoingParameterSlots() here. |
+ DCHECK(frame()->GetOutgoingParameterSlotCount() == 0); |
break; |
} |
case kArchJmp: |
@@ -760,8 +776,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
case kArm64CompareAndBranch32: |
// Pseudo instruction turned into cbz/cbnz in AssembleArchBranch. |
break; |
- case kArm64Claim: { |
+ case kArm64ClaimForCallArguments: { |
__ Claim(i.InputInt32(0)); |
+ frame()->AllocateOutgoingParameterSlots(i.InputInt32(0)); |
break; |
} |
case kArm64Poke: { |
@@ -1169,13 +1186,6 @@ void CodeGenerator::AssembleDeoptimizerCall( |
} |
-// TODO(dcarney): increase stack slots in frame once before first use. |
-static int AlignedStackSlots(int stack_slots) { |
- if (stack_slots & 1) stack_slots++; |
- return stack_slots; |
-} |
- |
- |
void CodeGenerator::AssemblePrologue() { |
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
if (descriptor->kind() == CallDescriptor::kCallAddress) { |
@@ -1209,13 +1219,12 @@ void CodeGenerator::AssemblePrologue() { |
stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots(); |
} |
- if (stack_shrink_slots > 0) { |
- Register sp = __ StackPointer(); |
- if (!sp.Is(csp)) { |
- __ Sub(sp, sp, stack_shrink_slots * kPointerSize); |
- } |
- __ Sub(csp, csp, AlignedStackSlots(stack_shrink_slots) * kPointerSize); |
+ if (csp.Is(masm()->StackPointer())) { |
+ // The system stack pointer requires 16-byte alignment at function call |
+ // boundaries. |
+ stack_shrink_slots += frame()->AlignSavedCalleeRegisterSlots(); |
} |
+ __ Claim(stack_shrink_slots); |
// Save FP registers. |
CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits, |