| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
| 6 | 6 |
| 7 #include "src/arm64/frames-arm64.h" | 7 #include "src/arm64/frames-arm64.h" |
| 8 #include "src/arm64/macro-assembler-arm64.h" | 8 #include "src/arm64/macro-assembler-arm64.h" |
| 9 #include "src/ast/scopes.h" | 9 #include "src/ast/scopes.h" |
| 10 #include "src/compiler/code-generator-impl.h" | 10 #include "src/compiler/code-generator-impl.h" |
| (...skipping 523 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 534 void CodeGenerator::AssembleDeconstructFrame() { | 534 void CodeGenerator::AssembleDeconstructFrame() { |
| 535 const CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 535 const CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
| 536 if (descriptor->IsCFunctionCall() || descriptor->UseNativeStack()) { | 536 if (descriptor->IsCFunctionCall() || descriptor->UseNativeStack()) { |
| 537 __ Mov(csp, fp); | 537 __ Mov(csp, fp); |
| 538 } else { | 538 } else { |
| 539 __ Mov(jssp, fp); | 539 __ Mov(jssp, fp); |
| 540 } | 540 } |
| 541 __ Pop(fp, lr); | 541 __ Pop(fp, lr); |
| 542 } | 542 } |
| 543 | 543 |
| 544 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { | 544 void CodeGenerator::AssemblePrepareTailCall() { |
| 545 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | |
| 546 if (sp_slot_delta > 0) { | |
| 547 __ Drop(sp_slot_delta); | |
| 548 } | |
| 549 frame_access_state()->SetFrameAccessToDefault(); | |
| 550 } | |
| 551 | |
| 552 | |
| 553 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { | |
| 554 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | |
| 555 if (sp_slot_delta < 0) { | |
| 556 __ Claim(-sp_slot_delta); | |
| 557 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); | |
| 558 } | |
| 559 if (frame_access_state()->has_frame()) { | 545 if (frame_access_state()->has_frame()) { |
| 560 __ Ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); | 546 __ Ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); |
| 561 __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 547 __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 562 } | 548 } |
| 563 frame_access_state()->SetFrameAccessToSP(); | 549 frame_access_state()->SetFrameAccessToSP(); |
| 564 } | 550 } |
| 565 | 551 |
| 566 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg, | 552 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg, |
| 567 Register scratch1, | 553 Register scratch1, |
| 568 Register scratch2, | 554 Register scratch2, |
| (...skipping 12 matching lines...) Expand all Loading... |
| 581 __ Ldr(caller_args_count_reg, | 567 __ Ldr(caller_args_count_reg, |
| 582 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 568 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 583 __ SmiUntag(caller_args_count_reg); | 569 __ SmiUntag(caller_args_count_reg); |
| 584 | 570 |
| 585 ParameterCount callee_args_count(args_reg); | 571 ParameterCount callee_args_count(args_reg); |
| 586 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, | 572 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, |
| 587 scratch3); | 573 scratch3); |
| 588 __ bind(&done); | 574 __ bind(&done); |
| 589 } | 575 } |
| 590 | 576 |
| 577 namespace { |
| 578 |
| 579 void AdjustStackPointerForTailCall(MacroAssembler* masm, |
| 580 FrameAccessState* state, |
| 581 int new_slot_above_sp, |
| 582 bool allow_shrinkage = true) { |
| 583 int current_sp_offset = state->GetSPToFPSlotCount() + |
| 584 StandardFrameConstants::kFixedSlotCountAboveFp; |
| 585 int stack_slot_delta = new_slot_above_sp - current_sp_offset; |
| 586 if (stack_slot_delta > 0) { |
| 587 masm->Claim(stack_slot_delta); |
| 588 state->IncreaseSPDelta(stack_slot_delta); |
| 589 } else if (allow_shrinkage && stack_slot_delta < 0) { |
| 590 masm->Drop(-stack_slot_delta); |
| 591 state->IncreaseSPDelta(stack_slot_delta); |
| 592 } |
| 593 } |
| 594 |
| 595 } // namespace |
| 596 |
| 597 void CodeGenerator::AssembleTailCallBeforeGap(Instruction* instr, |
| 598 int first_unused_stack_slot) { |
| 599 AdjustStackPointerForTailCall(masm(), frame_access_state(), |
| 600 first_unused_stack_slot, false); |
| 601 } |
| 602 |
| 603 void CodeGenerator::AssembleTailCallAfterGap(Instruction* instr, |
| 604 int first_unused_stack_slot) { |
| 605 AdjustStackPointerForTailCall(masm(), frame_access_state(), |
| 606 first_unused_stack_slot); |
| 607 } |
| 608 |
| 591 // Assembles an instruction after register allocation, producing machine code. | 609 // Assembles an instruction after register allocation, producing machine code. |
| 592 CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( | 610 CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( |
| 593 Instruction* instr) { | 611 Instruction* instr) { |
| 594 Arm64OperandConverter i(this, instr); | 612 Arm64OperandConverter i(this, instr); |
| 595 InstructionCode opcode = instr->opcode(); | 613 InstructionCode opcode = instr->opcode(); |
| 596 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); | 614 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); |
| 597 switch (arch_opcode) { | 615 switch (arch_opcode) { |
| 598 case kArchCallCodeObject: { | 616 case kArchCallCodeObject: { |
| 599 EnsureSpaceForLazyDeopt(); | 617 EnsureSpaceForLazyDeopt(); |
| 600 if (instr->InputAt(0)->IsImmediate()) { | 618 if (instr->InputAt(0)->IsImmediate()) { |
| (...skipping 15 matching lines...) Expand all Loading... |
| 616 } | 634 } |
| 617 if (flags & CallDescriptor::kRestoreCSP) { | 635 if (flags & CallDescriptor::kRestoreCSP) { |
| 618 __ Mov(csp, jssp); | 636 __ Mov(csp, jssp); |
| 619 __ AssertCspAligned(); | 637 __ AssertCspAligned(); |
| 620 } | 638 } |
| 621 frame_access_state()->ClearSPDelta(); | 639 frame_access_state()->ClearSPDelta(); |
| 622 break; | 640 break; |
| 623 } | 641 } |
| 624 case kArchTailCallCodeObjectFromJSFunction: | 642 case kArchTailCallCodeObjectFromJSFunction: |
| 625 case kArchTailCallCodeObject: { | 643 case kArchTailCallCodeObject: { |
| 626 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | |
| 627 AssembleDeconstructActivationRecord(stack_param_delta); | |
| 628 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) { | 644 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) { |
| 629 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister, | 645 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister, |
| 630 i.TempRegister(0), i.TempRegister(1), | 646 i.TempRegister(0), i.TempRegister(1), |
| 631 i.TempRegister(2)); | 647 i.TempRegister(2)); |
| 632 } | 648 } |
| 633 if (instr->InputAt(0)->IsImmediate()) { | 649 if (instr->InputAt(0)->IsImmediate()) { |
| 634 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), | 650 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), |
| 635 RelocInfo::CODE_TARGET); | 651 RelocInfo::CODE_TARGET); |
| 636 } else { | 652 } else { |
| 637 Register target = i.InputRegister(0); | 653 Register target = i.InputRegister(0); |
| 638 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); | 654 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); |
| 639 __ Jump(target); | 655 __ Jump(target); |
| 640 } | 656 } |
| 641 frame_access_state()->ClearSPDelta(); | 657 frame_access_state()->ClearSPDelta(); |
| 658 frame_access_state()->SetFrameAccessToDefault(); |
| 642 break; | 659 break; |
| 643 } | 660 } |
| 644 case kArchTailCallAddress: { | 661 case kArchTailCallAddress: { |
| 645 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | |
| 646 AssembleDeconstructActivationRecord(stack_param_delta); | |
| 647 CHECK(!instr->InputAt(0)->IsImmediate()); | 662 CHECK(!instr->InputAt(0)->IsImmediate()); |
| 648 __ Jump(i.InputRegister(0)); | 663 __ Jump(i.InputRegister(0)); |
| 649 frame_access_state()->ClearSPDelta(); | 664 frame_access_state()->ClearSPDelta(); |
| 665 frame_access_state()->SetFrameAccessToDefault(); |
| 650 break; | 666 break; |
| 651 } | 667 } |
| 652 case kArchCallJSFunction: { | 668 case kArchCallJSFunction: { |
| 653 EnsureSpaceForLazyDeopt(); | 669 EnsureSpaceForLazyDeopt(); |
| 654 Register func = i.InputRegister(0); | 670 Register func = i.InputRegister(0); |
| 655 if (FLAG_debug_code) { | 671 if (FLAG_debug_code) { |
| 656 // Check the function's context matches the context argument. | 672 // Check the function's context matches the context argument. |
| 657 UseScratchRegisterScope scope(masm()); | 673 UseScratchRegisterScope scope(masm()); |
| 658 Register temp = scope.AcquireX(); | 674 Register temp = scope.AcquireX(); |
| 659 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); | 675 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); |
| (...skipping 22 matching lines...) Expand all Loading... |
| 682 case kArchTailCallJSFunction: { | 698 case kArchTailCallJSFunction: { |
| 683 Register func = i.InputRegister(0); | 699 Register func = i.InputRegister(0); |
| 684 if (FLAG_debug_code) { | 700 if (FLAG_debug_code) { |
| 685 // Check the function's context matches the context argument. | 701 // Check the function's context matches the context argument. |
| 686 UseScratchRegisterScope scope(masm()); | 702 UseScratchRegisterScope scope(masm()); |
| 687 Register temp = scope.AcquireX(); | 703 Register temp = scope.AcquireX(); |
| 688 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); | 704 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); |
| 689 __ cmp(cp, temp); | 705 __ cmp(cp, temp); |
| 690 __ Assert(eq, kWrongFunctionContext); | 706 __ Assert(eq, kWrongFunctionContext); |
| 691 } | 707 } |
| 692 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | |
| 693 AssembleDeconstructActivationRecord(stack_param_delta); | |
| 694 if (arch_opcode == kArchTailCallJSFunctionFromJSFunction) { | 708 if (arch_opcode == kArchTailCallJSFunctionFromJSFunction) { |
| 695 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister, | 709 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister, |
| 696 i.TempRegister(0), i.TempRegister(1), | 710 i.TempRegister(0), i.TempRegister(1), |
| 697 i.TempRegister(2)); | 711 i.TempRegister(2)); |
| 698 } | 712 } |
| 699 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 713 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
| 700 __ Jump(x10); | 714 __ Jump(x10); |
| 701 frame_access_state()->ClearSPDelta(); | 715 frame_access_state()->ClearSPDelta(); |
| 716 frame_access_state()->SetFrameAccessToDefault(); |
| 702 break; | 717 break; |
| 703 } | 718 } |
| 704 case kArchPrepareCallCFunction: | 719 case kArchPrepareCallCFunction: |
| 705 // We don't need kArchPrepareCallCFunction on arm64 as the instruction | 720 // We don't need kArchPrepareCallCFunction on arm64 as the instruction |
| 706 // selector already perform a Claim to reserve space on the stack and | 721 // selector already perform a Claim to reserve space on the stack and |
| 707 // guarantee correct alignment of stack pointer. | 722 // guarantee correct alignment of stack pointer. |
| 708 UNREACHABLE(); | 723 UNREACHABLE(); |
| 709 break; | 724 break; |
| 710 case kArchPrepareTailCall: | 725 case kArchPrepareTailCall: |
| 711 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); | 726 AssemblePrepareTailCall(); |
| 712 break; | 727 break; |
| 713 case kArchCallCFunction: { | 728 case kArchCallCFunction: { |
| 714 int const num_parameters = MiscField::decode(instr->opcode()); | 729 int const num_parameters = MiscField::decode(instr->opcode()); |
| 715 if (instr->InputAt(0)->IsImmediate()) { | 730 if (instr->InputAt(0)->IsImmediate()) { |
| 716 ExternalReference ref = i.InputExternalReference(0); | 731 ExternalReference ref = i.InputExternalReference(0); |
| 717 __ CallCFunction(ref, num_parameters, 0); | 732 __ CallCFunction(ref, num_parameters, 0); |
| 718 } else { | 733 } else { |
| 719 Register func = i.InputRegister(0); | 734 Register func = i.InputRegister(0); |
| 720 __ CallCFunction(func, num_parameters, 0); | 735 __ CallCFunction(func, num_parameters, 0); |
| 721 } | 736 } |
| (...skipping 1300 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2022 padding_size -= kInstructionSize; | 2037 padding_size -= kInstructionSize; |
| 2023 } | 2038 } |
| 2024 } | 2039 } |
| 2025 } | 2040 } |
| 2026 | 2041 |
| 2027 #undef __ | 2042 #undef __ |
| 2028 | 2043 |
| 2029 } // namespace compiler | 2044 } // namespace compiler |
| 2030 } // namespace internal | 2045 } // namespace internal |
| 2031 } // namespace v8 | 2046 } // namespace v8 |
| OLD | NEW |