OLD | NEW |
---|---|
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #ifndef VM_ASSEMBLER_ARM64_H_ | 5 #ifndef VM_ASSEMBLER_ARM64_H_ |
6 #define VM_ASSEMBLER_ARM64_H_ | 6 #define VM_ASSEMBLER_ARM64_H_ |
7 | 7 |
8 #ifndef VM_ASSEMBLER_H_ | 8 #ifndef VM_ASSEMBLER_H_ |
9 #error Do not include assembler_arm64.h directly; use assembler.h instead. | 9 #error Do not include assembler_arm64.h directly; use assembler.h instead. |
10 #endif | 10 #endif |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
133 } else { | 133 } else { |
134 ASSERT((at == PreIndex) || (at == PostIndex)); | 134 ASSERT((at == PreIndex) || (at == PostIndex)); |
135 return Utils::IsInt(9, offset); | 135 return Utils::IsInt(9, offset); |
136 } | 136 } |
137 } | 137 } |
138 | 138 |
139 // PC-relative load address. | 139 // PC-relative load address. |
140 static Address PC(int32_t pc_off) { | 140 static Address PC(int32_t pc_off) { |
141 ASSERT(CanHoldOffset(pc_off, PCOffset)); | 141 ASSERT(CanHoldOffset(pc_off, PCOffset)); |
142 Address addr; | 142 Address addr; |
143 addr.encoding_ = (((pc_off >> 2) & kImm19Mask) << kImm19Shift); | 143 addr.encoding_ = (((pc_off >> 2) << kImm19Shift) & kImm19Mask); |
144 addr.base_ = kNoRegister; | 144 addr.base_ = kNoRegister; |
145 addr.type_ = PCOffset; | 145 addr.type_ = PCOffset; |
146 return addr; | 146 return addr; |
147 } | 147 } |
148 | 148 |
149 // Base register rn with offset rm. rm is sign-extended according to ext. | 149 // Base register rn with offset rm. rm is sign-extended according to ext. |
150 // If ext is UXTX, rm may be optionally scaled by the | 150 // If ext is UXTX, rm may be optionally scaled by the |
151 // Log2OperandSize (specified by the instruction). | 151 // Log2OperandSize (specified by the instruction). |
152 Address(Register rn, Register rm, Extend ext = UXTX, bool scaled = false) { | 152 Address(Register rn, Register rm, Extend ext = UXTX, bool scaled = false) { |
153 ASSERT((rn != R31) && (rn != ZR)); | 153 ASSERT((rn != R31) && (rn != ZR)); |
(...skipping 417 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
571 void neg(Register rd, Register rm) { | 571 void neg(Register rd, Register rm) { |
572 sub(rd, ZR, Operand(rm)); | 572 sub(rd, ZR, Operand(rm)); |
573 } | 573 } |
574 void negs(Register rd, Register rm) { | 574 void negs(Register rd, Register rm) { |
575 subs(rd, ZR, Operand(rm)); | 575 subs(rd, ZR, Operand(rm)); |
576 } | 576 } |
577 void mul(Register rd, Register rn, Register rm) { | 577 void mul(Register rd, Register rn, Register rm) { |
578 madd(rd, rn, rm, ZR); | 578 madd(rd, rn, rm, ZR); |
579 } | 579 } |
580 void Push(Register reg) { | 580 void Push(Register reg) { |
581 ASSERT(reg != PP); // Only push PP with PushPP(). | |
581 str(reg, Address(SP, -1 * kWordSize, Address::PreIndex)); | 582 str(reg, Address(SP, -1 * kWordSize, Address::PreIndex)); |
582 } | 583 } |
583 void Pop(Register reg) { | 584 void Pop(Register reg) { |
585 ASSERT(reg != PP); // Only pop PP with PopPP(). | |
584 ldr(reg, Address(SP, 1 * kWordSize, Address::PostIndex)); | 586 ldr(reg, Address(SP, 1 * kWordSize, Address::PostIndex)); |
585 } | 587 } |
588 void PushPP() { | |
589 // Add the heap object tag back to PP before putting it on the stack. | |
590 add(PP, PP, Operand(kHeapObjectTag)); | |
591 str(PP, Address(SP, -1 * kWordSize, Address::PreIndex)); | |
592 } | |
593 void PopPP() { | |
594 ldr(PP, Address(SP, 1 * kWordSize, Address::PostIndex)); | |
595 sub(PP, PP, Operand(kHeapObjectTag)); | |
596 } | |
586 void tst(Register rn, Operand o) { | 597 void tst(Register rn, Operand o) { |
587 ands(ZR, rn, o); | 598 ands(ZR, rn, o); |
588 } | 599 } |
589 void tsti(Register rn, uint64_t imm) { | 600 void tsti(Register rn, uint64_t imm) { |
590 andis(ZR, rn, imm); | 601 andis(ZR, rn, imm); |
591 } | 602 } |
592 | 603 |
604 // Branching to ExternalLabels. | |
605 void Branch(const ExternalLabel* label) { | |
606 LoadExternalLabel(TMP, label, kPatchable, PP); | |
607 br(TMP); | |
608 } | |
609 | |
610 void BranchPatchable(const ExternalLabel* label) { | |
611 LoadPatchableImmediate(TMP, label->address()); | |
612 br(TMP); | |
613 } | |
614 | |
615 void BranchLink(const ExternalLabel* label, Register pp) { | |
616 if (Isolate::Current() == Dart::vm_isolate()) { | |
617 LoadImmediate(TMP, label->address(), kNoRegister); | |
618 blr(TMP); | |
619 } else { | |
620 LoadExternalLabel(TMP, label, kNotPatchable, pp); | |
621 blr(TMP); | |
622 } | |
623 } | |
624 | |
625 void BranchLinkPatchable(const ExternalLabel* label) { | |
626 LoadExternalLabel(TMP, label, kPatchable, PP); | |
627 blr(TMP); | |
628 } | |
629 | |
593 // Object pool, loading from pool, etc. | 630 // Object pool, loading from pool, etc. |
594 void LoadPoolPointer(Register pp) { | 631 void LoadPoolPointer(Register pp) { |
595 const intptr_t object_pool_pc_dist = | 632 const intptr_t object_pool_pc_dist = |
596 Instructions::HeaderSize() - Instructions::object_pool_offset() + | 633 Instructions::HeaderSize() - Instructions::object_pool_offset() + |
597 CodeSize(); | 634 CodeSize(); |
598 // PP <- Read(PC - object_pool_pc_dist). | 635 // PP <- Read(PC - object_pool_pc_dist). |
599 ldr(pp, Address::PC(-object_pool_pc_dist)); | 636 ldr(pp, Address::PC(-object_pool_pc_dist)); |
637 // Remove tag so that offsets are aligned for loads from pp. | |
638 sub(pp, pp, Operand(kHeapObjectTag)); | |
zra
2014/04/17 21:27:27
When in the PP register, the object pool pointer i
regis
2014/04/17 21:42:28
Sounds good. How about you merge most of this comm
zra
2014/04/17 21:56:32
Done.
| |
600 } | 639 } |
601 | 640 |
602 enum Patchability { | 641 enum Patchability { |
603 kPatchable, | 642 kPatchable, |
604 kNotPatchable, | 643 kNotPatchable, |
605 }; | 644 }; |
606 | 645 |
607 void LoadWordFromPoolOffset(Register dst, Register pp, uint32_t offset); | 646 void LoadWordFromPoolOffset(Register dst, Register pp, uint32_t offset); |
647 intptr_t FindExternalLabel(const ExternalLabel* label, | |
648 Patchability patchable); | |
608 intptr_t FindObject(const Object& obj, Patchability patchable); | 649 intptr_t FindObject(const Object& obj, Patchability patchable); |
609 intptr_t FindImmediate(int64_t imm); | 650 intptr_t FindImmediate(int64_t imm); |
610 bool CanLoadObjectFromPool(const Object& object); | 651 bool CanLoadObjectFromPool(const Object& object); |
611 bool CanLoadImmediateFromPool(int64_t imm, Register pp); | 652 bool CanLoadImmediateFromPool(int64_t imm, Register pp); |
653 void LoadExternalLabel(Register dst, const ExternalLabel* label, | |
654 Patchability patchable, Register pp); | |
612 void LoadObject(Register dst, const Object& obj, Register pp); | 655 void LoadObject(Register dst, const Object& obj, Register pp); |
656 void LoadDecodableImmediate(Register reg, int64_t imm, Register pp); | |
657 void LoadPatchableImmediate(Register reg, int64_t imm); | |
613 void LoadImmediate(Register reg, int64_t imm, Register pp); | 658 void LoadImmediate(Register reg, int64_t imm, Register pp); |
614 | 659 |
615 private: | 660 private: |
616 AssemblerBuffer buffer_; // Contains position independent code. | 661 AssemblerBuffer buffer_; // Contains position independent code. |
617 | 662 |
618 // Objects and patchable jump targets. | 663 // Objects and patchable jump targets. |
619 GrowableObjectArray& object_pool_; | 664 GrowableObjectArray& object_pool_; |
620 | 665 |
621 // Patchability of pool entries. | 666 // Patchability of pool entries. |
622 GrowableArray<Patchability> patchable_pool_entries_; | 667 GrowableArray<Patchability> patchable_pool_entries_; |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
759 const int32_t encoding = | 804 const int32_t encoding = |
760 op | size | s | | 805 op | size | s | |
761 (static_cast<int32_t>(rd) << kRdShift) | | 806 (static_cast<int32_t>(rd) << kRdShift) | |
762 (static_cast<int32_t>(rn) << kRnShift) | | 807 (static_cast<int32_t>(rn) << kRnShift) | |
763 o.encoding(); | 808 o.encoding(); |
764 Emit(encoding); | 809 Emit(encoding); |
765 } | 810 } |
766 | 811 |
767 int32_t EncodeImm19BranchOffset(int64_t imm, int32_t instr) { | 812 int32_t EncodeImm19BranchOffset(int64_t imm, int32_t instr) { |
768 const int32_t imm32 = static_cast<int32_t>(imm); | 813 const int32_t imm32 = static_cast<int32_t>(imm); |
769 const int32_t off = (((imm32 >> 2) & kImm19Mask) << kImm19Shift); | 814 const int32_t off = (((imm32 >> 2) << kImm19Shift) & kImm19Mask); |
770 return (instr & ~(kImm19Mask << kImm19Shift)) | off; | 815 return (instr & ~kImm19Mask) | off; |
771 } | 816 } |
772 | 817 |
773 int64_t DecodeImm19BranchOffset(int32_t instr) { | 818 int64_t DecodeImm19BranchOffset(int32_t instr) { |
774 const int32_t off = (((instr >> kImm19Shift) & kImm19Shift) << 13) >> 13; | 819 const int32_t off = (((instr >> kImm19Shift) & kImm19Shift) << 13) >> 13; |
775 return static_cast<int64_t>(off); | 820 return static_cast<int64_t>(off); |
776 } | 821 } |
777 | 822 |
778 void EmitCompareAndBranch(CompareAndBranchOp op, Register rt, int64_t imm, | 823 void EmitCompareAndBranch(CompareAndBranchOp op, Register rt, int64_t imm, |
779 OperandSize sz) { | 824 OperandSize sz) { |
780 ASSERT((sz == kDoubleWord) || (sz == kWord)); | 825 ASSERT((sz == kDoubleWord) || (sz == kWord)); |
781 ASSERT(Utils::IsInt(21, imm) && ((imm & 0x3) == 0)); | 826 ASSERT(Utils::IsInt(21, imm) && ((imm & 0x3) == 0)); |
782 const int32_t size = (sz == kDoubleWord) ? B31 : 0; | 827 const int32_t size = (sz == kDoubleWord) ? B31 : 0; |
783 const int32_t encoded_offset = EncodeImm19BranchOffset(imm, 0); | 828 const int32_t encoded_offset = EncodeImm19BranchOffset(imm, 0); |
784 const int32_t encoding = | 829 const int32_t encoding = |
785 op | size | | 830 op | size | |
786 (static_cast<int32_t>(rt) << kRtShift) | | 831 (static_cast<int32_t>(rt) << kRtShift) | |
787 encoded_offset; | 832 encoded_offset; |
788 Emit(encoding); | 833 Emit(encoding); |
789 } | 834 } |
790 | 835 |
791 void EmitConditionalBranch(ConditionalBranchOp op, Condition cond, | 836 void EmitConditionalBranch(ConditionalBranchOp op, Condition cond, |
792 int64_t imm) { | 837 int64_t imm) { |
793 ASSERT(Utils::IsInt(21, imm) && ((imm & 0x3) == 0)); | 838 ASSERT(Utils::IsInt(21, imm) && ((imm & 0x3) == 0)); |
794 const int32_t encoding = | 839 const int32_t encoding = |
795 op | | 840 op | |
796 (static_cast<int32_t>(cond) << kCondShift) | | 841 (static_cast<int32_t>(cond) << kCondShift) | |
797 (((imm >> 2) & kImm19Mask) << kImm19Shift); | 842 (((imm >> 2) << kImm19Shift) & kImm19Mask); |
798 Emit(encoding); | 843 Emit(encoding); |
799 } | 844 } |
800 | 845 |
801 bool CanEncodeImm19BranchOffset(int64_t offset) { | 846 bool CanEncodeImm19BranchOffset(int64_t offset) { |
802 ASSERT(Utils::IsAligned(offset, 4)); | 847 ASSERT(Utils::IsAligned(offset, 4)); |
803 return Utils::IsInt(19, offset); | 848 return Utils::IsInt(19, offset); |
804 } | 849 } |
805 | 850 |
806 // TODO(zra): Implement far branches. Requires loading large immediates. | 851 // TODO(zra): Implement far branches. Requires loading large immediates. |
807 void EmitBranch(ConditionalBranchOp op, Condition cond, Label* label) { | 852 void EmitBranch(ConditionalBranchOp op, Condition cond, Label* label) { |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
861 (static_cast<int32_t>(rt) << kRtShift) | | 906 (static_cast<int32_t>(rt) << kRtShift) | |
862 a.encoding(); | 907 a.encoding(); |
863 Emit(encoding); | 908 Emit(encoding); |
864 } | 909 } |
865 | 910 |
866 void EmitPCRelOp(PCRelOp op, Register rd, int64_t imm) { | 911 void EmitPCRelOp(PCRelOp op, Register rd, int64_t imm) { |
867 ASSERT(Utils::IsInt(21, imm)); | 912 ASSERT(Utils::IsInt(21, imm)); |
868 ASSERT((rd != R31) && (rd != SP)); | 913 ASSERT((rd != R31) && (rd != SP)); |
869 const Register crd = ConcreteRegister(rd); | 914 const Register crd = ConcreteRegister(rd); |
870 const int32_t loimm = (imm & 0x3) << 29; | 915 const int32_t loimm = (imm & 0x3) << 29; |
871 const int32_t hiimm = ((imm >> 2) & kImm19Mask) << kImm19Shift; | 916 const int32_t hiimm = ((imm >> 2) << kImm19Shift) & kImm19Mask; |
872 const int32_t encoding = | 917 const int32_t encoding = |
873 op | loimm | hiimm | | 918 op | loimm | hiimm | |
874 (static_cast<int32_t>(crd) << kRdShift); | 919 (static_cast<int32_t>(crd) << kRdShift); |
875 Emit(encoding); | 920 Emit(encoding); |
876 } | 921 } |
877 | 922 |
878 void EmitMiscDP2Source(MiscDP2SourceOp op, | 923 void EmitMiscDP2Source(MiscDP2SourceOp op, |
879 Register rd, Register rn, Register rm, | 924 Register rd, Register rn, Register rm, |
880 OperandSize sz) { | 925 OperandSize sz) { |
881 ASSERT((rd != SP) && (rn != SP) && (rm != SP)); | 926 ASSERT((rd != SP) && (rn != SP) && (rm != SP)); |
(...skipping 27 matching lines...) Expand all Loading... | |
909 Emit(encoding); | 954 Emit(encoding); |
910 } | 955 } |
911 | 956 |
912 DISALLOW_ALLOCATION(); | 957 DISALLOW_ALLOCATION(); |
913 DISALLOW_COPY_AND_ASSIGN(Assembler); | 958 DISALLOW_COPY_AND_ASSIGN(Assembler); |
914 }; | 959 }; |
915 | 960 |
916 } // namespace dart | 961 } // namespace dart |
917 | 962 |
918 #endif // VM_ASSEMBLER_ARM64_H_ | 963 #endif // VM_ASSEMBLER_ARM64_H_ |
OLD | NEW |