Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1294)

Side by Side Diff: src/compiler/ppc/code-generator-ppc.cc

Issue 1484913003: PPC: [turbofan] Add general support for sp-based frame access (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/code-generator.h" 5 #include "src/compiler/code-generator.h"
6 6
7 #include "src/ast/scopes.h" 7 #include "src/ast/scopes.h"
8 #include "src/compiler/code-generator-impl.h" 8 #include "src/compiler/code-generator-impl.h"
9 #include "src/compiler/gap-resolver.h" 9 #include "src/compiler/gap-resolver.h"
10 #include "src/compiler/node-matchers.h" 10 #include "src/compiler/node-matchers.h"
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
94 return MemOperand(r0); 94 return MemOperand(r0);
95 } 95 }
96 96
97 MemOperand MemoryOperand(AddressingMode* mode, size_t first_index = 0) { 97 MemOperand MemoryOperand(AddressingMode* mode, size_t first_index = 0) {
98 return MemoryOperand(mode, &first_index); 98 return MemoryOperand(mode, &first_index);
99 } 99 }
100 100
101 MemOperand ToMemOperand(InstructionOperand* op) const { 101 MemOperand ToMemOperand(InstructionOperand* op) const {
102 DCHECK(op != NULL); 102 DCHECK(op != NULL);
103 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); 103 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
104 FrameOffset offset = 104 FrameOffset offset = frame_access_state()->GetFrameOffset(
105 linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame()); 105 AllocatedOperand::cast(op)->index());
106 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset()); 106 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
107 } 107 }
108 }; 108 };
109 109
110 110
111 static inline bool HasRegisterInput(Instruction* instr, size_t index) { 111 static inline bool HasRegisterInput(Instruction* instr, size_t index) {
112 return instr->InputAt(index)->IsRegister(); 112 return instr->InputAt(index)->IsRegister();
113 } 113 }
114 114
115 115
(...skipping 487 matching lines...) Expand 10 before | Expand all | Expand 10 after
603 __ bind(&done); \ 603 __ bind(&done); \
604 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ 604 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
605 } while (0) 605 } while (0)
606 606
607 607
608 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { 608 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
609 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); 609 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
610 if (sp_slot_delta > 0) { 610 if (sp_slot_delta > 0) {
611 __ Add(sp, sp, sp_slot_delta * kPointerSize, r0); 611 __ Add(sp, sp, sp_slot_delta * kPointerSize, r0);
612 } 612 }
613 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); 613 if (frame()->needs_frame()) {
614 int spill_slots = frame()->GetSpillSlotCount();
615 bool has_frame = descriptor->IsJSFunctionCall() || spill_slots > 0;
616 if (has_frame) {
617 if (FLAG_enable_embedded_constant_pool) { 614 if (FLAG_enable_embedded_constant_pool) {
618 __ Pop(r0, fp, kConstantPoolRegister); 615 __ Pop(r0, fp, kConstantPoolRegister);
619 } else { 616 } else {
620 __ Pop(r0, fp); 617 __ Pop(r0, fp);
621 } 618 }
622 __ mtlr(r0); 619 __ mtlr(r0);
623 } 620 }
621 frame_access_state()->SetFrameAccessToDefault();
624 } 622 }
625 623
626 624
627 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { 625 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
628 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); 626 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
629 if (sp_slot_delta < 0) { 627 if (sp_slot_delta < 0) {
630 __ Add(sp, sp, sp_slot_delta * kPointerSize, r0); 628 __ Add(sp, sp, sp_slot_delta * kPointerSize, r0);
629 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
631 } 630 }
631 frame_access_state()->SetFrameAccessToSP();
632 } 632 }
633 633
634 634
635 // Assembles an instruction after register allocation, producing machine code. 635 // Assembles an instruction after register allocation, producing machine code.
636 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { 636 void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
637 PPCOperandConverter i(this, instr); 637 PPCOperandConverter i(this, instr);
638 ArchOpcode opcode = ArchOpcodeField::decode(instr->opcode()); 638 ArchOpcode opcode = ArchOpcodeField::decode(instr->opcode());
639 639
640 switch (opcode) { 640 switch (opcode) {
641 case kArchCallCodeObject: { 641 case kArchCallCodeObject: {
642 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool( 642 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
643 masm()); 643 masm());
644 EnsureSpaceForLazyDeopt(); 644 EnsureSpaceForLazyDeopt();
645 if (HasRegisterInput(instr, 0)) { 645 if (HasRegisterInput(instr, 0)) {
646 __ addi(ip, i.InputRegister(0), 646 __ addi(ip, i.InputRegister(0),
647 Operand(Code::kHeaderSize - kHeapObjectTag)); 647 Operand(Code::kHeaderSize - kHeapObjectTag));
648 __ Call(ip); 648 __ Call(ip);
649 } else { 649 } else {
650 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), 650 __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
651 RelocInfo::CODE_TARGET); 651 RelocInfo::CODE_TARGET);
652 } 652 }
653 RecordCallPosition(instr); 653 RecordCallPosition(instr);
654 DCHECK_EQ(LeaveRC, i.OutputRCBit()); 654 DCHECK_EQ(LeaveRC, i.OutputRCBit());
655 frame_access_state()->ClearSPDelta();
655 break; 656 break;
656 } 657 }
657 case kArchTailCallCodeObject: { 658 case kArchTailCallCodeObject: {
658 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); 659 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
659 AssembleDeconstructActivationRecord(stack_param_delta); 660 AssembleDeconstructActivationRecord(stack_param_delta);
660 if (HasRegisterInput(instr, 0)) { 661 if (HasRegisterInput(instr, 0)) {
661 __ addi(ip, i.InputRegister(0), 662 __ addi(ip, i.InputRegister(0),
662 Operand(Code::kHeaderSize - kHeapObjectTag)); 663 Operand(Code::kHeaderSize - kHeapObjectTag));
663 __ Jump(ip); 664 __ Jump(ip);
664 } else { 665 } else {
665 // We cannot use the constant pool to load the target since 666 // We cannot use the constant pool to load the target since
666 // we've already restored the caller's frame. 667 // we've already restored the caller's frame.
667 ConstantPoolUnavailableScope constant_pool_unavailable(masm()); 668 ConstantPoolUnavailableScope constant_pool_unavailable(masm());
668 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), 669 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
669 RelocInfo::CODE_TARGET); 670 RelocInfo::CODE_TARGET);
670 } 671 }
671 DCHECK_EQ(LeaveRC, i.OutputRCBit()); 672 DCHECK_EQ(LeaveRC, i.OutputRCBit());
673 frame_access_state()->ClearSPDelta();
672 break; 674 break;
673 } 675 }
674 case kArchCallJSFunction: { 676 case kArchCallJSFunction: {
675 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool( 677 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
676 masm()); 678 masm());
677 EnsureSpaceForLazyDeopt(); 679 EnsureSpaceForLazyDeopt();
678 Register func = i.InputRegister(0); 680 Register func = i.InputRegister(0);
679 if (FLAG_debug_code) { 681 if (FLAG_debug_code) {
680 // Check the function's context matches the context argument. 682 // Check the function's context matches the context argument.
681 __ LoadP(kScratchReg, 683 __ LoadP(kScratchReg,
682 FieldMemOperand(func, JSFunction::kContextOffset)); 684 FieldMemOperand(func, JSFunction::kContextOffset));
683 __ cmp(cp, kScratchReg); 685 __ cmp(cp, kScratchReg);
684 __ Assert(eq, kWrongFunctionContext); 686 __ Assert(eq, kWrongFunctionContext);
685 } 687 }
686 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); 688 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
687 __ Call(ip); 689 __ Call(ip);
688 RecordCallPosition(instr); 690 RecordCallPosition(instr);
689 DCHECK_EQ(LeaveRC, i.OutputRCBit()); 691 DCHECK_EQ(LeaveRC, i.OutputRCBit());
692 frame_access_state()->ClearSPDelta();
690 break; 693 break;
691 } 694 }
692 case kArchTailCallJSFunction: { 695 case kArchTailCallJSFunction: {
693 Register func = i.InputRegister(0); 696 Register func = i.InputRegister(0);
694 if (FLAG_debug_code) { 697 if (FLAG_debug_code) {
695 // Check the function's context matches the context argument. 698 // Check the function's context matches the context argument.
696 __ LoadP(kScratchReg, 699 __ LoadP(kScratchReg,
697 FieldMemOperand(func, JSFunction::kContextOffset)); 700 FieldMemOperand(func, JSFunction::kContextOffset));
698 __ cmp(cp, kScratchReg); 701 __ cmp(cp, kScratchReg);
699 __ Assert(eq, kWrongFunctionContext); 702 __ Assert(eq, kWrongFunctionContext);
700 } 703 }
701 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); 704 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
702 AssembleDeconstructActivationRecord(stack_param_delta); 705 AssembleDeconstructActivationRecord(stack_param_delta);
703 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); 706 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
704 __ Jump(ip); 707 __ Jump(ip);
705 DCHECK_EQ(LeaveRC, i.OutputRCBit()); 708 DCHECK_EQ(LeaveRC, i.OutputRCBit());
709 frame_access_state()->ClearSPDelta();
706 break; 710 break;
707 } 711 }
708 case kArchLazyBailout: { 712 case kArchLazyBailout: {
709 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool( 713 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
710 masm()); 714 masm());
711 EnsureSpaceForLazyDeopt(); 715 EnsureSpaceForLazyDeopt();
712 RecordCallPosition(instr); 716 RecordCallPosition(instr);
713 break; 717 break;
714 } 718 }
715 case kArchPrepareCallCFunction: { 719 case kArchPrepareCallCFunction: {
716 int const num_parameters = MiscField::decode(instr->opcode()); 720 int const num_parameters = MiscField::decode(instr->opcode());
717 __ PrepareCallCFunction(num_parameters, kScratchReg); 721 __ PrepareCallCFunction(num_parameters, kScratchReg);
722 // Frame alignment requires using FP-relative frame addressing.
723 frame_access_state()->SetFrameAccessToFP();
718 break; 724 break;
719 } 725 }
720 case kArchPrepareTailCall: 726 case kArchPrepareTailCall:
721 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); 727 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
722 break; 728 break;
723 case kArchCallCFunction: { 729 case kArchCallCFunction: {
724 int const num_parameters = MiscField::decode(instr->opcode()); 730 int const num_parameters = MiscField::decode(instr->opcode());
725 if (instr->InputAt(0)->IsImmediate()) { 731 if (instr->InputAt(0)->IsImmediate()) {
726 ExternalReference ref = i.InputExternalReference(0); 732 ExternalReference ref = i.InputExternalReference(0);
727 __ CallCFunction(ref, num_parameters); 733 __ CallCFunction(ref, num_parameters);
728 } else { 734 } else {
729 Register func = i.InputRegister(0); 735 Register func = i.InputRegister(0);
730 __ CallCFunction(func, num_parameters); 736 __ CallCFunction(func, num_parameters);
731 } 737 }
738 frame_access_state()->SetFrameAccessToDefault();
739 frame_access_state()->ClearSPDelta();
732 break; 740 break;
733 } 741 }
734 case kArchJmp: 742 case kArchJmp:
735 AssembleArchJump(i.InputRpo(0)); 743 AssembleArchJump(i.InputRpo(0));
736 DCHECK_EQ(LeaveRC, i.OutputRCBit()); 744 DCHECK_EQ(LeaveRC, i.OutputRCBit());
737 break; 745 break;
738 case kArchLookupSwitch: 746 case kArchLookupSwitch:
739 AssembleArchLookupSwitch(instr); 747 AssembleArchLookupSwitch(instr);
740 DCHECK_EQ(LeaveRC, i.OutputRCBit()); 748 DCHECK_EQ(LeaveRC, i.OutputRCBit());
741 break; 749 break;
(...skipping 319 matching lines...) Expand 10 before | Expand all | Expand 10 after
1061 __ and_(r0, i.InputRegister(0), i.InputRegister(1), i.OutputRCBit()); 1069 __ and_(r0, i.InputRegister(0), i.InputRegister(1), i.OutputRCBit());
1062 } else { 1070 } else {
1063 __ andi(r0, i.InputRegister(0), i.InputImmediate(1)); 1071 __ andi(r0, i.InputRegister(0), i.InputImmediate(1));
1064 } 1072 }
1065 DCHECK_EQ(SetRC, i.OutputRCBit()); 1073 DCHECK_EQ(SetRC, i.OutputRCBit());
1066 break; 1074 break;
1067 #endif 1075 #endif
1068 case kPPC_Push: 1076 case kPPC_Push:
1069 if (instr->InputAt(0)->IsDoubleRegister()) { 1077 if (instr->InputAt(0)->IsDoubleRegister()) {
1070 __ stfdu(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize)); 1078 __ stfdu(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize));
1079 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1071 } else { 1080 } else {
1072 __ Push(i.InputRegister(0)); 1081 __ Push(i.InputRegister(0));
1082 frame_access_state()->IncreaseSPDelta(1);
1073 } 1083 }
1074 DCHECK_EQ(LeaveRC, i.OutputRCBit()); 1084 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1075 break; 1085 break;
1076 case kPPC_PushFrame: { 1086 case kPPC_PushFrame: {
1077 int num_slots = i.InputInt32(1); 1087 int num_slots = i.InputInt32(1);
1078 if (instr->InputAt(0)->IsDoubleRegister()) { 1088 if (instr->InputAt(0)->IsDoubleRegister()) {
1079 __ stfdu(i.InputDoubleRegister(0), 1089 __ stfdu(i.InputDoubleRegister(0),
1080 MemOperand(sp, -num_slots * kPointerSize)); 1090 MemOperand(sp, -num_slots * kPointerSize));
1081 } else { 1091 } else {
1082 __ StorePU(i.InputRegister(0), 1092 __ StorePU(i.InputRegister(0),
(...skipping 359 matching lines...) Expand 10 before | Expand all | Expand 10 after
1442 void CodeGenerator::AssembleDeoptimizerCall( 1452 void CodeGenerator::AssembleDeoptimizerCall(
1443 int deoptimization_id, Deoptimizer::BailoutType bailout_type) { 1453 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
1444 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( 1454 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
1445 isolate(), deoptimization_id, bailout_type); 1455 isolate(), deoptimization_id, bailout_type);
1446 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); 1456 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
1447 } 1457 }
1448 1458
1449 1459
1450 void CodeGenerator::AssemblePrologue() { 1460 void CodeGenerator::AssemblePrologue() {
1451 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); 1461 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
1452 1462 if (descriptor->IsCFunctionCall()) {
1453 if (descriptor->kind() == CallDescriptor::kCallAddress) {
1454 __ function_descriptor(); 1463 __ function_descriptor();
1455 __ mflr(r0); 1464 __ mflr(r0);
1456 if (FLAG_enable_embedded_constant_pool) { 1465 if (FLAG_enable_embedded_constant_pool) {
1457 __ Push(r0, fp, kConstantPoolRegister); 1466 __ Push(r0, fp, kConstantPoolRegister);
1458 // Adjust FP to point to saved FP. 1467 // Adjust FP to point to saved FP.
1459 __ subi(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset)); 1468 __ subi(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset));
1460 } else { 1469 } else {
1461 __ Push(r0, fp); 1470 __ Push(r0, fp);
1462 __ mr(fp, sp); 1471 __ mr(fp, sp);
1463 } 1472 }
1464 } else if (descriptor->IsJSFunctionCall()) { 1473 } else if (descriptor->IsJSFunctionCall()) {
1465 CompilationInfo* info = this->info(); 1474 CompilationInfo* info = this->info();
1466 __ Prologue(info->IsCodePreAgingActive()); 1475 __ Prologue(info->IsCodePreAgingActive());
1467 } else if (needs_frame_) { 1476 } else if (frame()->needs_frame()) {
1468 __ StubPrologue(); 1477 __ StubPrologue();
1469 } else { 1478 } else {
1470 frame()->SetElidedFrameSizeInSlots(0); 1479 frame()->SetElidedFrameSizeInSlots(0);
1471 } 1480 }
1481 frame_access_state()->SetFrameAccessToDefault();
1472 1482
1473 int stack_shrink_slots = frame()->GetSpillSlotCount(); 1483 int stack_shrink_slots = frame()->GetSpillSlotCount();
1474 if (info()->is_osr()) { 1484 if (info()->is_osr()) {
1475 // TurboFan OSR-compiled functions cannot be entered directly. 1485 // TurboFan OSR-compiled functions cannot be entered directly.
1476 __ Abort(kShouldNotDirectlyEnterOsrFunction); 1486 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1477 1487
1478 // Unoptimized code jumps directly to this entrypoint while the unoptimized 1488 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1479 // frame is still on the stack. Optimized code uses OSR values directly from 1489 // frame is still on the stack. Optimized code uses OSR values directly from
1480 // the unoptimized frame. Thus, all that needs to be done is to allocate the 1490 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1481 // remaining stack slots. 1491 // remaining stack slots.
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
1531 if (saves != 0) { 1541 if (saves != 0) {
1532 __ MultiPop(saves); 1542 __ MultiPop(saves);
1533 } 1543 }
1534 1544
1535 // Restore double registers. 1545 // Restore double registers.
1536 const RegList double_saves = descriptor->CalleeSavedFPRegisters(); 1546 const RegList double_saves = descriptor->CalleeSavedFPRegisters();
1537 if (double_saves != 0) { 1547 if (double_saves != 0) {
1538 __ MultiPopDoubles(double_saves); 1548 __ MultiPopDoubles(double_saves);
1539 } 1549 }
1540 1550
1541 if (descriptor->kind() == CallDescriptor::kCallAddress) { 1551 if (descriptor->IsCFunctionCall()) {
1542 __ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize); 1552 __ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize);
1543 } else if (descriptor->IsJSFunctionCall() || needs_frame_) { 1553 } else if (frame()->needs_frame()) {
1544 // Canonicalize JSFunction return sites for now. 1554 // Canonicalize JSFunction return sites for now.
1545 if (return_label_.is_bound()) { 1555 if (return_label_.is_bound()) {
1546 __ b(&return_label_); 1556 __ b(&return_label_);
1547 return; 1557 return;
1548 } else { 1558 } else {
1549 __ bind(&return_label_); 1559 __ bind(&return_label_);
1550 __ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize); 1560 __ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize);
1551 } 1561 }
1552 } else { 1562 } else {
1553 __ Drop(pop_count); 1563 __ Drop(pop_count);
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after
1759 padding_size -= v8::internal::Assembler::kInstrSize; 1769 padding_size -= v8::internal::Assembler::kInstrSize;
1760 } 1770 }
1761 } 1771 }
1762 } 1772 }
1763 1773
1764 #undef __ 1774 #undef __
1765 1775
1766 } // namespace compiler 1776 } // namespace compiler
1767 } // namespace internal 1777 } // namespace internal
1768 } // namespace v8 1778 } // namespace v8
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698