Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(31)

Side by Side Diff: src/compiler/arm64/code-generator-arm64.cc

Issue 1376173003: [arm64] Use SP-offset rather than FP-offset. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Rebase. Created 5 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | src/compiler/arm64/instruction-codes-arm64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/code-generator.h" 5 #include "src/compiler/code-generator.h"
6 6
7 #include "src/arm64/frames-arm64.h" 7 #include "src/arm64/frames-arm64.h"
8 #include "src/arm64/macro-assembler-arm64.h" 8 #include "src/arm64/macro-assembler-arm64.h"
9 #include "src/compiler/code-generator-impl.h" 9 #include "src/compiler/code-generator-impl.h"
10 #include "src/compiler/gap-resolver.h" 10 #include "src/compiler/gap-resolver.h"
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after
200 } 200 }
201 UNREACHABLE(); 201 UNREACHABLE();
202 return Operand(-1); 202 return Operand(-1);
203 } 203 }
204 204
205 MemOperand ToMemOperand(InstructionOperand* op, MacroAssembler* masm) const { 205 MemOperand ToMemOperand(InstructionOperand* op, MacroAssembler* masm) const {
206 DCHECK(op != NULL); 206 DCHECK(op != NULL);
207 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); 207 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
208 FrameOffset offset = 208 FrameOffset offset =
209 linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame()); 209 linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame());
210 if (offset.from_frame_pointer()) {
211 int from_sp =
212 offset.offset() + (frame()->GetSpToFpSlotCount() * kPointerSize);
213 // Convert FP-offsets to SP-offsets if it results in better code.
214 if (Assembler::IsImmLSUnscaled(from_sp) ||
215 Assembler::IsImmLSScaled(from_sp, LSDoubleWord)) {
216 offset = FrameOffset::FromStackPointer(from_sp);
217 }
218 }
210 return MemOperand(offset.from_stack_pointer() ? masm->StackPointer() : fp, 219 return MemOperand(offset.from_stack_pointer() ? masm->StackPointer() : fp,
211 offset.offset()); 220 offset.offset());
212 } 221 }
213 }; 222 };
214 223
215 224
216 namespace { 225 namespace {
217 226
218 class OutOfLineLoadNaN32 final : public OutOfLineCode { 227 class OutOfLineLoadNaN32 final : public OutOfLineCode {
219 public: 228 public:
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after
424 case kArchCallCodeObject: { 433 case kArchCallCodeObject: {
425 EnsureSpaceForLazyDeopt(); 434 EnsureSpaceForLazyDeopt();
426 if (instr->InputAt(0)->IsImmediate()) { 435 if (instr->InputAt(0)->IsImmediate()) {
427 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), 436 __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
428 RelocInfo::CODE_TARGET); 437 RelocInfo::CODE_TARGET);
429 } else { 438 } else {
430 Register target = i.InputRegister(0); 439 Register target = i.InputRegister(0);
431 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); 440 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
432 __ Call(target); 441 __ Call(target);
433 } 442 }
443 frame()->ClearOutgoingParameterSlots();
434 RecordCallPosition(instr); 444 RecordCallPosition(instr);
435 break; 445 break;
436 } 446 }
437 case kArchTailCallCodeObject: { 447 case kArchTailCallCodeObject: {
438 AssembleDeconstructActivationRecord(); 448 AssembleDeconstructActivationRecord();
439 if (instr->InputAt(0)->IsImmediate()) { 449 if (instr->InputAt(0)->IsImmediate()) {
440 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), 450 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
441 RelocInfo::CODE_TARGET); 451 RelocInfo::CODE_TARGET);
442 } else { 452 } else {
443 Register target = i.InputRegister(0); 453 Register target = i.InputRegister(0);
444 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); 454 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
445 __ Jump(target); 455 __ Jump(target);
446 } 456 }
457 frame()->ClearOutgoingParameterSlots();
447 break; 458 break;
448 } 459 }
449 case kArchCallJSFunction: { 460 case kArchCallJSFunction: {
450 EnsureSpaceForLazyDeopt(); 461 EnsureSpaceForLazyDeopt();
451 Register func = i.InputRegister(0); 462 Register func = i.InputRegister(0);
452 if (FLAG_debug_code) { 463 if (FLAG_debug_code) {
453 // Check the function's context matches the context argument. 464 // Check the function's context matches the context argument.
454 UseScratchRegisterScope scope(masm()); 465 UseScratchRegisterScope scope(masm());
455 Register temp = scope.AcquireX(); 466 Register temp = scope.AcquireX();
456 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); 467 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset));
457 __ cmp(cp, temp); 468 __ cmp(cp, temp);
458 __ Assert(eq, kWrongFunctionContext); 469 __ Assert(eq, kWrongFunctionContext);
459 } 470 }
460 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); 471 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
461 __ Call(x10); 472 __ Call(x10);
473 frame()->ClearOutgoingParameterSlots();
462 RecordCallPosition(instr); 474 RecordCallPosition(instr);
463 break; 475 break;
464 } 476 }
465 case kArchTailCallJSFunction: { 477 case kArchTailCallJSFunction: {
466 Register func = i.InputRegister(0); 478 Register func = i.InputRegister(0);
467 if (FLAG_debug_code) { 479 if (FLAG_debug_code) {
468 // Check the function's context matches the context argument. 480 // Check the function's context matches the context argument.
469 UseScratchRegisterScope scope(masm()); 481 UseScratchRegisterScope scope(masm());
470 Register temp = scope.AcquireX(); 482 Register temp = scope.AcquireX();
471 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); 483 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset));
472 __ cmp(cp, temp); 484 __ cmp(cp, temp);
473 __ Assert(eq, kWrongFunctionContext); 485 __ Assert(eq, kWrongFunctionContext);
474 } 486 }
475 AssembleDeconstructActivationRecord(); 487 AssembleDeconstructActivationRecord();
476 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); 488 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
477 __ Jump(x10); 489 __ Jump(x10);
490 frame()->ClearOutgoingParameterSlots();
478 break; 491 break;
479 } 492 }
480 case kArchLazyBailout: { 493 case kArchLazyBailout: {
481 EnsureSpaceForLazyDeopt(); 494 EnsureSpaceForLazyDeopt();
482 RecordCallPosition(instr); 495 RecordCallPosition(instr);
483 break; 496 break;
484 } 497 }
485 case kArchPrepareCallCFunction: 498 case kArchPrepareCallCFunction:
486 // We don't need kArchPrepareCallCFunction on arm64 as the instruction 499 // We don't need kArchPrepareCallCFunction on arm64 as the instruction
487 // selector already perform a Claim to reserve space on the stack and 500 // selector already perform a Claim to reserve space on the stack and
488 // guarantee correct alignment of stack pointer. 501 // guarantee correct alignment of stack pointer.
489 UNREACHABLE(); 502 UNREACHABLE();
490 break; 503 break;
491 case kArchCallCFunction: { 504 case kArchCallCFunction: {
492 int const num_parameters = MiscField::decode(instr->opcode()); 505 int const num_parameters = MiscField::decode(instr->opcode());
493 if (instr->InputAt(0)->IsImmediate()) { 506 if (instr->InputAt(0)->IsImmediate()) {
494 ExternalReference ref = i.InputExternalReference(0); 507 ExternalReference ref = i.InputExternalReference(0);
495 __ CallCFunction(ref, num_parameters, 0); 508 __ CallCFunction(ref, num_parameters, 0);
496 } else { 509 } else {
497 Register func = i.InputRegister(0); 510 Register func = i.InputRegister(0);
498 __ CallCFunction(func, num_parameters, 0); 511 __ CallCFunction(func, num_parameters, 0);
499 } 512 }
513 // CallCFunction only supports register arguments so we never need to call
514 // frame()->ClearOutgoingParameterSlots() here.
515 DCHECK(frame()->GetOutgoingParameterSlotCount() == 0);
500 break; 516 break;
501 } 517 }
502 case kArchJmp: 518 case kArchJmp:
503 AssembleArchJump(i.InputRpo(0)); 519 AssembleArchJump(i.InputRpo(0));
504 break; 520 break;
505 case kArchTableSwitch: 521 case kArchTableSwitch:
506 AssembleArchTableSwitch(instr); 522 AssembleArchTableSwitch(instr);
507 break; 523 break;
508 case kArchLookupSwitch: 524 case kArchLookupSwitch:
509 AssembleArchLookupSwitch(instr); 525 AssembleArchLookupSwitch(instr);
(...skipping 243 matching lines...) Expand 10 before | Expand all | Expand 10 after
753 __ Bfi(i.OutputRegister(), i.InputRegister(1), i.InputInt6(2), 769 __ Bfi(i.OutputRegister(), i.InputRegister(1), i.InputInt6(2),
754 i.InputInt6(3)); 770 i.InputInt6(3));
755 break; 771 break;
756 case kArm64TestAndBranch32: 772 case kArm64TestAndBranch32:
757 case kArm64TestAndBranch: 773 case kArm64TestAndBranch:
758 // Pseudo instructions turned into tbz/tbnz in AssembleArchBranch. 774 // Pseudo instructions turned into tbz/tbnz in AssembleArchBranch.
759 break; 775 break;
760 case kArm64CompareAndBranch32: 776 case kArm64CompareAndBranch32:
761 // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch. 777 // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch.
762 break; 778 break;
763 case kArm64Claim: { 779 case kArm64ClaimForCallArguments: {
764 __ Claim(i.InputInt32(0)); 780 __ Claim(i.InputInt32(0));
781 frame()->AllocateOutgoingParameterSlots(i.InputInt32(0));
765 break; 782 break;
766 } 783 }
767 case kArm64Poke: { 784 case kArm64Poke: {
768 Operand operand(i.InputInt32(1) * kPointerSize); 785 Operand operand(i.InputInt32(1) * kPointerSize);
769 __ Poke(i.InputRegister(0), operand); 786 __ Poke(i.InputRegister(0), operand);
770 break; 787 break;
771 } 788 }
772 case kArm64PokePair: { 789 case kArm64PokePair: {
773 int slot = i.InputInt32(2) - 1; 790 int slot = i.InputInt32(2) - 1;
774 __ PokePair(i.InputRegister(1), i.InputRegister(0), slot * kPointerSize); 791 __ PokePair(i.InputRegister(1), i.InputRegister(0), slot * kPointerSize);
(...skipping 387 matching lines...) Expand 10 before | Expand all | Expand 10 after
1162 1179
1163 1180
1164 void CodeGenerator::AssembleDeoptimizerCall( 1181 void CodeGenerator::AssembleDeoptimizerCall(
1165 int deoptimization_id, Deoptimizer::BailoutType bailout_type) { 1182 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
1166 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( 1183 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
1167 isolate(), deoptimization_id, bailout_type); 1184 isolate(), deoptimization_id, bailout_type);
1168 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); 1185 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
1169 } 1186 }
1170 1187
1171 1188
1172 // TODO(dcarney): increase stack slots in frame once before first use.
1173 static int AlignedStackSlots(int stack_slots) {
1174 if (stack_slots & 1) stack_slots++;
1175 return stack_slots;
1176 }
1177
1178
1179 void CodeGenerator::AssemblePrologue() { 1189 void CodeGenerator::AssemblePrologue() {
1180 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); 1190 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
1181 if (descriptor->kind() == CallDescriptor::kCallAddress) { 1191 if (descriptor->kind() == CallDescriptor::kCallAddress) {
1182 __ SetStackPointer(csp); 1192 __ SetStackPointer(csp);
1183 __ Push(lr, fp); 1193 __ Push(lr, fp);
1184 __ Mov(fp, csp); 1194 __ Mov(fp, csp);
1185 } else if (descriptor->IsJSFunctionCall()) { 1195 } else if (descriptor->IsJSFunctionCall()) {
1186 CompilationInfo* info = this->info(); 1196 CompilationInfo* info = this->info();
1187 __ SetStackPointer(jssp); 1197 __ SetStackPointer(jssp);
1188 __ Prologue(info->IsCodePreAgingActive()); 1198 __ Prologue(info->IsCodePreAgingActive());
(...skipping 13 matching lines...) Expand all
1202 // frame is still on the stack. Optimized code uses OSR values directly from 1212 // frame is still on the stack. Optimized code uses OSR values directly from
1203 // the unoptimized frame. Thus, all that needs to be done is to allocate the 1213 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1204 // remaining stack slots. 1214 // remaining stack slots.
1205 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --"); 1215 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1206 osr_pc_offset_ = __ pc_offset(); 1216 osr_pc_offset_ = __ pc_offset();
1207 // TODO(titzer): cannot address target function == local #-1 1217 // TODO(titzer): cannot address target function == local #-1
1208 __ ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1218 __ ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1209 stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots(); 1219 stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
1210 } 1220 }
1211 1221
1212 if (stack_shrink_slots > 0) { 1222 if (csp.Is(masm()->StackPointer())) {
1213 Register sp = __ StackPointer(); 1223 // The system stack pointer requires 16-byte alignment at function call
1214 if (!sp.Is(csp)) { 1224 // boundaries.
1215 __ Sub(sp, sp, stack_shrink_slots * kPointerSize); 1225 stack_shrink_slots += frame()->AlignSavedCalleeRegisterSlots();
1216 }
1217 __ Sub(csp, csp, AlignedStackSlots(stack_shrink_slots) * kPointerSize);
1218 } 1226 }
1227 __ Claim(stack_shrink_slots);
1219 1228
1220 // Save FP registers. 1229 // Save FP registers.
1221 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits, 1230 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
1222 descriptor->CalleeSavedFPRegisters()); 1231 descriptor->CalleeSavedFPRegisters());
1223 int saved_count = saves_fp.Count(); 1232 int saved_count = saves_fp.Count();
1224 if (saved_count != 0) { 1233 if (saved_count != 0) {
1225 DCHECK(saves_fp.list() == CPURegList::GetCalleeSavedFP().list()); 1234 DCHECK(saves_fp.list() == CPURegList::GetCalleeSavedFP().list());
1226 __ PushCPURegList(saves_fp); 1235 __ PushCPURegList(saves_fp);
1227 frame()->AllocateSavedCalleeRegisterSlots(saved_count * 1236 frame()->AllocateSavedCalleeRegisterSlots(saved_count *
1228 (kDoubleSize / kPointerSize)); 1237 (kDoubleSize / kPointerSize));
(...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after
1460 padding_size -= kInstructionSize; 1469 padding_size -= kInstructionSize;
1461 } 1470 }
1462 } 1471 }
1463 } 1472 }
1464 1473
1465 #undef __ 1474 #undef __
1466 1475
1467 } // namespace compiler 1476 } // namespace compiler
1468 } // namespace internal 1477 } // namespace internal
1469 } // namespace v8 1478 } // namespace v8
OLDNEW
« no previous file with comments | « no previous file | src/compiler/arm64/instruction-codes-arm64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698