OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 | 6 |
7 #include "src/arm64/frames-arm64.h" | 7 #include "src/arm64/frames-arm64.h" |
8 #include "src/arm64/macro-assembler-arm64.h" | 8 #include "src/arm64/macro-assembler-arm64.h" |
9 #include "src/compiler/code-generator-impl.h" | 9 #include "src/compiler/code-generator-impl.h" |
10 #include "src/compiler/gap-resolver.h" | 10 #include "src/compiler/gap-resolver.h" |
(...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
198 UNREACHABLE(); // TODO(dcarney): RPO immediates on arm64. | 198 UNREACHABLE(); // TODO(dcarney): RPO immediates on arm64. |
199 break; | 199 break; |
200 } | 200 } |
201 UNREACHABLE(); | 201 UNREACHABLE(); |
202 return Operand(-1); | 202 return Operand(-1); |
203 } | 203 } |
204 | 204 |
205 MemOperand ToMemOperand(InstructionOperand* op, MacroAssembler* masm) const { | 205 MemOperand ToMemOperand(InstructionOperand* op, MacroAssembler* masm) const { |
206 DCHECK(op != NULL); | 206 DCHECK(op != NULL); |
207 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); | 207 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); |
208 FrameOffset offset = | 208 FrameOffset offset = linkage()->GetFrameOffset( |
209 linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame()); | 209 AllocatedOperand::cast(op)->index(), frame_access_state()); |
210 if (offset.from_frame_pointer()) { | 210 if (offset.from_frame_pointer()) { |
211 int from_sp = | 211 int from_sp = |
212 offset.offset() + (frame()->GetSpToFpSlotCount() * kPointerSize); | 212 offset.offset() + |
| 213 ((frame()->GetSpToFpSlotCount() + frame_access_state()->sp_delta()) * |
| 214 kPointerSize); |
213 // Convert FP-offsets to SP-offsets if it results in better code. | 215 // Convert FP-offsets to SP-offsets if it results in better code. |
214 if (Assembler::IsImmLSUnscaled(from_sp) || | 216 if (Assembler::IsImmLSUnscaled(from_sp) || |
215 Assembler::IsImmLSScaled(from_sp, LSDoubleWord)) { | 217 Assembler::IsImmLSScaled(from_sp, LSDoubleWord)) { |
216 offset = FrameOffset::FromStackPointer(from_sp); | 218 offset = FrameOffset::FromStackPointer(from_sp); |
217 } | 219 } |
218 } | 220 } |
219 return MemOperand(offset.from_stack_pointer() ? masm->StackPointer() : fp, | 221 return MemOperand(offset.from_stack_pointer() ? masm->StackPointer() : fp, |
220 offset.offset()); | 222 offset.offset()); |
221 } | 223 } |
222 }; | 224 }; |
(...skipping 247 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
470 // Discard only slots that won't be used by new parameters. | 472 // Discard only slots that won't be used by new parameters. |
471 total_discarded_slots -= stack_param_delta; | 473 total_discarded_slots -= stack_param_delta; |
472 if (total_discarded_slots > 0) { | 474 if (total_discarded_slots > 0) { |
473 __ Add(jssp, jssp, Operand(total_discarded_slots * kPointerSize)); | 475 __ Add(jssp, jssp, Operand(total_discarded_slots * kPointerSize)); |
474 } | 476 } |
475 } else { | 477 } else { |
476 __ Mov(jssp, fp); | 478 __ Mov(jssp, fp); |
477 } | 479 } |
478 __ Pop(fp, lr); | 480 __ Pop(fp, lr); |
479 } | 481 } |
| 482 frame_access_state()->UseDefaultFrameAccess(); |
480 } | 483 } |
481 | 484 |
482 | 485 |
483 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { | 486 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { |
484 if (stack_param_delta > 0) { | 487 if (stack_param_delta > 0) { |
485 int total_discarded_slots = frame()->GetTotalFrameSlotCount(); | 488 int total_discarded_slots = frame()->GetTotalFrameSlotCount(); |
486 // Leave the PC and saved frame pointer on the stack. | 489 // Leave the PC and saved frame pointer on the stack. |
487 total_discarded_slots -= | 490 total_discarded_slots -= |
488 StandardFrameConstants::kFixedFrameSizeFromFp / kPointerSize; | 491 StandardFrameConstants::kFixedFrameSizeFromFp / kPointerSize; |
489 // Discard only slots that won't be used by new parameters. | 492 // Discard only slots that won't be used by new parameters. |
490 total_discarded_slots -= stack_param_delta; | 493 total_discarded_slots -= stack_param_delta; |
491 if (total_discarded_slots < 0) { | 494 if (total_discarded_slots < 0) { |
492 __ Sub(jssp, jssp, Operand(-total_discarded_slots * kPointerSize)); | 495 __ Sub(jssp, jssp, Operand(-total_discarded_slots * kPointerSize)); |
| 496 frame_access_state()->IncreaseSPDelta(-total_discarded_slots); |
493 } | 497 } |
494 } | 498 } |
| 499 frame_access_state()->UseSPToAccessFrame(); |
495 } | 500 } |
496 | 501 |
497 | 502 |
498 // Assembles an instruction after register allocation, producing machine code. | 503 // Assembles an instruction after register allocation, producing machine code. |
499 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { | 504 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
500 Arm64OperandConverter i(this, instr); | 505 Arm64OperandConverter i(this, instr); |
501 InstructionCode opcode = instr->opcode(); | 506 InstructionCode opcode = instr->opcode(); |
502 switch (ArchOpcodeField::decode(opcode)) { | 507 switch (ArchOpcodeField::decode(opcode)) { |
503 case kArchCallCodeObject: { | 508 case kArchCallCodeObject: { |
504 EnsureSpaceForLazyDeopt(); | 509 EnsureSpaceForLazyDeopt(); |
505 if (instr->InputAt(0)->IsImmediate()) { | 510 if (instr->InputAt(0)->IsImmediate()) { |
506 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), | 511 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), |
507 RelocInfo::CODE_TARGET); | 512 RelocInfo::CODE_TARGET); |
508 } else { | 513 } else { |
509 Register target = i.InputRegister(0); | 514 Register target = i.InputRegister(0); |
510 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); | 515 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); |
511 __ Call(target); | 516 __ Call(target); |
512 } | 517 } |
513 frame()->ClearOutgoingParameterSlots(); | 518 frame_access_state()->ClearSPDelta(); |
514 RecordCallPosition(instr); | 519 RecordCallPosition(instr); |
515 break; | 520 break; |
516 } | 521 } |
517 case kArchTailCallCodeObject: { | 522 case kArchTailCallCodeObject: { |
518 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 523 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); |
519 AssembleDeconstructActivationRecord(stack_param_delta); | 524 AssembleDeconstructActivationRecord(stack_param_delta); |
520 if (instr->InputAt(0)->IsImmediate()) { | 525 if (instr->InputAt(0)->IsImmediate()) { |
521 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), | 526 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), |
522 RelocInfo::CODE_TARGET); | 527 RelocInfo::CODE_TARGET); |
523 } else { | 528 } else { |
524 Register target = i.InputRegister(0); | 529 Register target = i.InputRegister(0); |
525 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); | 530 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); |
526 __ Jump(target); | 531 __ Jump(target); |
527 } | 532 } |
528 frame()->ClearOutgoingParameterSlots(); | 533 frame_access_state()->ClearSPDelta(); |
529 break; | 534 break; |
530 } | 535 } |
531 case kArchCallJSFunction: { | 536 case kArchCallJSFunction: { |
532 EnsureSpaceForLazyDeopt(); | 537 EnsureSpaceForLazyDeopt(); |
533 Register func = i.InputRegister(0); | 538 Register func = i.InputRegister(0); |
534 if (FLAG_debug_code) { | 539 if (FLAG_debug_code) { |
535 // Check the function's context matches the context argument. | 540 // Check the function's context matches the context argument. |
536 UseScratchRegisterScope scope(masm()); | 541 UseScratchRegisterScope scope(masm()); |
537 Register temp = scope.AcquireX(); | 542 Register temp = scope.AcquireX(); |
538 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); | 543 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); |
539 __ cmp(cp, temp); | 544 __ cmp(cp, temp); |
540 __ Assert(eq, kWrongFunctionContext); | 545 __ Assert(eq, kWrongFunctionContext); |
541 } | 546 } |
542 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 547 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
543 __ Call(x10); | 548 __ Call(x10); |
544 frame()->ClearOutgoingParameterSlots(); | 549 frame_access_state()->ClearSPDelta(); |
545 RecordCallPosition(instr); | 550 RecordCallPosition(instr); |
546 break; | 551 break; |
547 } | 552 } |
548 case kArchTailCallJSFunction: { | 553 case kArchTailCallJSFunction: { |
549 Register func = i.InputRegister(0); | 554 Register func = i.InputRegister(0); |
550 if (FLAG_debug_code) { | 555 if (FLAG_debug_code) { |
551 // Check the function's context matches the context argument. | 556 // Check the function's context matches the context argument. |
552 UseScratchRegisterScope scope(masm()); | 557 UseScratchRegisterScope scope(masm()); |
553 Register temp = scope.AcquireX(); | 558 Register temp = scope.AcquireX(); |
554 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); | 559 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); |
555 __ cmp(cp, temp); | 560 __ cmp(cp, temp); |
556 __ Assert(eq, kWrongFunctionContext); | 561 __ Assert(eq, kWrongFunctionContext); |
557 } | 562 } |
558 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 563 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); |
559 AssembleDeconstructActivationRecord(stack_param_delta); | 564 AssembleDeconstructActivationRecord(stack_param_delta); |
560 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 565 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
561 __ Jump(x10); | 566 __ Jump(x10); |
562 frame()->ClearOutgoingParameterSlots(); | 567 frame_access_state()->ClearSPDelta(); |
563 break; | 568 break; |
564 } | 569 } |
565 case kArchLazyBailout: { | 570 case kArchLazyBailout: { |
566 EnsureSpaceForLazyDeopt(); | 571 EnsureSpaceForLazyDeopt(); |
567 RecordCallPosition(instr); | 572 RecordCallPosition(instr); |
568 break; | 573 break; |
569 } | 574 } |
570 case kArchPrepareCallCFunction: | 575 case kArchPrepareCallCFunction: |
571 // We don't need kArchPrepareCallCFunction on arm64 as the instruction | 576 // We don't need kArchPrepareCallCFunction on arm64 as the instruction |
572 // selector already perform a Claim to reserve space on the stack and | 577 // selector already perform a Claim to reserve space on the stack and |
573 // guarantee correct alignment of stack pointer. | 578 // guarantee correct alignment of stack pointer. |
574 UNREACHABLE(); | 579 UNREACHABLE(); |
575 break; | 580 break; |
576 case kArchPrepareTailCall: | 581 case kArchPrepareTailCall: |
577 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); | 582 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); |
578 break; | 583 break; |
579 case kArchCallCFunction: { | 584 case kArchCallCFunction: { |
580 int const num_parameters = MiscField::decode(instr->opcode()); | 585 int const num_parameters = MiscField::decode(instr->opcode()); |
581 if (instr->InputAt(0)->IsImmediate()) { | 586 if (instr->InputAt(0)->IsImmediate()) { |
582 ExternalReference ref = i.InputExternalReference(0); | 587 ExternalReference ref = i.InputExternalReference(0); |
583 __ CallCFunction(ref, num_parameters, 0); | 588 __ CallCFunction(ref, num_parameters, 0); |
584 } else { | 589 } else { |
585 Register func = i.InputRegister(0); | 590 Register func = i.InputRegister(0); |
586 __ CallCFunction(func, num_parameters, 0); | 591 __ CallCFunction(func, num_parameters, 0); |
587 } | 592 } |
588 // CallCFunction only supports register arguments so we never need to call | 593 // CallCFunction only supports register arguments so we never need to call |
589 // frame()->ClearOutgoingParameterSlots() here. | 594 // frame()->ClearOutgoingParameterSlots() here. |
590 DCHECK(frame()->GetOutgoingParameterSlotCount() == 0); | 595 DCHECK(frame_access_state()->sp_delta() == 0); |
591 break; | 596 break; |
592 } | 597 } |
593 case kArchJmp: | 598 case kArchJmp: |
594 AssembleArchJump(i.InputRpo(0)); | 599 AssembleArchJump(i.InputRpo(0)); |
595 break; | 600 break; |
596 case kArchTableSwitch: | 601 case kArchTableSwitch: |
597 AssembleArchTableSwitch(instr); | 602 AssembleArchTableSwitch(instr); |
598 break; | 603 break; |
599 case kArchLookupSwitch: | 604 case kArchLookupSwitch: |
600 AssembleArchLookupSwitch(instr); | 605 AssembleArchLookupSwitch(instr); |
(...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
866 break; | 871 break; |
867 case kArm64TestAndBranch32: | 872 case kArm64TestAndBranch32: |
868 case kArm64TestAndBranch: | 873 case kArm64TestAndBranch: |
869 // Pseudo instructions turned into tbz/tbnz in AssembleArchBranch. | 874 // Pseudo instructions turned into tbz/tbnz in AssembleArchBranch. |
870 break; | 875 break; |
871 case kArm64CompareAndBranch32: | 876 case kArm64CompareAndBranch32: |
872 // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch. | 877 // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch. |
873 break; | 878 break; |
874 case kArm64ClaimForCallArguments: { | 879 case kArm64ClaimForCallArguments: { |
875 __ Claim(i.InputInt32(0)); | 880 __ Claim(i.InputInt32(0)); |
876 frame()->AllocateOutgoingParameterSlots(i.InputInt32(0)); | 881 frame_access_state()->IncreaseSPDelta(i.InputInt32(0)); |
877 break; | 882 break; |
878 } | 883 } |
879 case kArm64Poke: { | 884 case kArm64Poke: { |
880 Operand operand(i.InputInt32(1) * kPointerSize); | 885 Operand operand(i.InputInt32(1) * kPointerSize); |
881 __ Poke(i.InputRegister(0), operand); | 886 __ Poke(i.InputRegister(0), operand); |
882 break; | 887 break; |
883 } | 888 } |
884 case kArm64PokePair: { | 889 case kArm64PokePair: { |
885 int slot = i.InputInt32(2) - 1; | 890 int slot = i.InputInt32(2) - 1; |
886 __ PokePair(i.InputRegister(1), i.InputRegister(0), slot * kPointerSize); | 891 __ PokePair(i.InputRegister(1), i.InputRegister(0), slot * kPointerSize); |
(...skipping 389 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1276 void CodeGenerator::AssemblePrologue() { | 1281 void CodeGenerator::AssemblePrologue() { |
1277 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1282 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
1278 if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1283 if (descriptor->kind() == CallDescriptor::kCallAddress) { |
1279 __ SetStackPointer(csp); | 1284 __ SetStackPointer(csp); |
1280 __ Push(lr, fp); | 1285 __ Push(lr, fp); |
1281 __ Mov(fp, csp); | 1286 __ Mov(fp, csp); |
1282 } else if (descriptor->IsJSFunctionCall()) { | 1287 } else if (descriptor->IsJSFunctionCall()) { |
1283 CompilationInfo* info = this->info(); | 1288 CompilationInfo* info = this->info(); |
1284 __ SetStackPointer(jssp); | 1289 __ SetStackPointer(jssp); |
1285 __ Prologue(info->IsCodePreAgingActive()); | 1290 __ Prologue(info->IsCodePreAgingActive()); |
1286 } else if (needs_frame_) { | 1291 } else if (frame()->needs_frame()) { |
1287 __ SetStackPointer(jssp); | 1292 __ SetStackPointer(jssp); |
1288 __ StubPrologue(); | 1293 __ StubPrologue(); |
1289 } else { | 1294 } else { |
1290 frame()->SetElidedFrameSizeInSlots(0); | 1295 frame()->SetElidedFrameSizeInSlots(0); |
1291 } | 1296 } |
| 1297 frame_access_state()->UseDefaultFrameAccess(); |
1292 | 1298 |
1293 int stack_shrink_slots = frame()->GetSpillSlotCount(); | 1299 int stack_shrink_slots = frame()->GetSpillSlotCount(); |
1294 if (info()->is_osr()) { | 1300 if (info()->is_osr()) { |
1295 // TurboFan OSR-compiled functions cannot be entered directly. | 1301 // TurboFan OSR-compiled functions cannot be entered directly. |
1296 __ Abort(kShouldNotDirectlyEnterOsrFunction); | 1302 __ Abort(kShouldNotDirectlyEnterOsrFunction); |
1297 | 1303 |
1298 // Unoptimized code jumps directly to this entrypoint while the unoptimized | 1304 // Unoptimized code jumps directly to this entrypoint while the unoptimized |
1299 // frame is still on the stack. Optimized code uses OSR values directly from | 1305 // frame is still on the stack. Optimized code uses OSR values directly from |
1300 // the unoptimized frame. Thus, all that needs to be done is to allocate the | 1306 // the unoptimized frame. Thus, all that needs to be done is to allocate the |
1301 // remaining stack slots. | 1307 // remaining stack slots. |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1351 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits, | 1357 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits, |
1352 descriptor->CalleeSavedFPRegisters()); | 1358 descriptor->CalleeSavedFPRegisters()); |
1353 if (saves_fp.Count() != 0) { | 1359 if (saves_fp.Count() != 0) { |
1354 __ PopCPURegList(saves_fp); | 1360 __ PopCPURegList(saves_fp); |
1355 } | 1361 } |
1356 | 1362 |
1357 int pop_count = static_cast<int>(descriptor->StackParameterCount()); | 1363 int pop_count = static_cast<int>(descriptor->StackParameterCount()); |
1358 if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1364 if (descriptor->kind() == CallDescriptor::kCallAddress) { |
1359 __ Mov(csp, fp); | 1365 __ Mov(csp, fp); |
1360 __ Pop(fp, lr); | 1366 __ Pop(fp, lr); |
1361 } else if (descriptor->IsJSFunctionCall() || needs_frame_) { | 1367 } else if (descriptor->IsJSFunctionCall() || frame()->needs_frame()) { |
1362 // Canonicalize JSFunction return sites for now. | 1368 // Canonicalize JSFunction return sites for now. |
1363 if (return_label_.is_bound()) { | 1369 if (return_label_.is_bound()) { |
1364 __ B(&return_label_); | 1370 __ B(&return_label_); |
1365 return; | 1371 return; |
1366 } else { | 1372 } else { |
1367 __ Bind(&return_label_); | 1373 __ Bind(&return_label_); |
1368 __ Mov(jssp, fp); | 1374 __ Mov(jssp, fp); |
1369 __ Pop(fp, lr); | 1375 __ Pop(fp, lr); |
1370 } | 1376 } |
1371 } | 1377 } |
(...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1556 padding_size -= kInstructionSize; | 1562 padding_size -= kInstructionSize; |
1557 } | 1563 } |
1558 } | 1564 } |
1559 } | 1565 } |
1560 | 1566 |
1561 #undef __ | 1567 #undef __ |
1562 | 1568 |
1563 } // namespace compiler | 1569 } // namespace compiler |
1564 } // namespace internal | 1570 } // namespace internal |
1565 } // namespace v8 | 1571 } // namespace v8 |
OLD | NEW |