OLD | NEW |
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #ifndef VM_ASSEMBLER_ARM64_H_ | 5 #ifndef VM_ASSEMBLER_ARM64_H_ |
6 #define VM_ASSEMBLER_ARM64_H_ | 6 #define VM_ASSEMBLER_ARM64_H_ |
7 | 7 |
8 #ifndef VM_ASSEMBLER_H_ | 8 #ifndef VM_ASSEMBLER_H_ |
9 #error Do not include assembler_arm64.h directly; use assembler.h instead. | 9 #error Do not include assembler_arm64.h directly; use assembler.h instead. |
10 #endif | 10 #endif |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
133 } else { | 133 } else { |
134 ASSERT((at == PreIndex) || (at == PostIndex)); | 134 ASSERT((at == PreIndex) || (at == PostIndex)); |
135 return Utils::IsInt(9, offset); | 135 return Utils::IsInt(9, offset); |
136 } | 136 } |
137 } | 137 } |
138 | 138 |
139 // PC-relative load address. | 139 // PC-relative load address. |
140 static Address PC(int32_t pc_off) { | 140 static Address PC(int32_t pc_off) { |
141 ASSERT(CanHoldOffset(pc_off, PCOffset)); | 141 ASSERT(CanHoldOffset(pc_off, PCOffset)); |
142 Address addr; | 142 Address addr; |
143 addr.encoding_ = (((pc_off >> 2) & kImm19Mask) << kImm19Shift); | 143 addr.encoding_ = (((pc_off >> 2) << kImm19Shift) & kImm19Mask); |
144 addr.base_ = kNoRegister; | 144 addr.base_ = kNoRegister; |
145 addr.type_ = PCOffset; | 145 addr.type_ = PCOffset; |
146 return addr; | 146 return addr; |
147 } | 147 } |
148 | 148 |
149 // Base register rn with offset rm. rm is sign-extended according to ext. | 149 // Base register rn with offset rm. rm is sign-extended according to ext. |
150 // If ext is UXTX, rm may be optionally scaled by the | 150 // If ext is UXTX, rm may be optionally scaled by the |
151 // Log2OperandSize (specified by the instruction). | 151 // Log2OperandSize (specified by the instruction). |
152 Address(Register rn, Register rm, Extend ext = UXTX, bool scaled = false) { | 152 Address(Register rn, Register rm, Extend ext = UXTX, bool scaled = false) { |
153 ASSERT((rn != R31) && (rn != ZR)); | 153 ASSERT((rn != R31) && (rn != ZR)); |
(...skipping 417 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
571 void neg(Register rd, Register rm) { | 571 void neg(Register rd, Register rm) { |
572 sub(rd, ZR, Operand(rm)); | 572 sub(rd, ZR, Operand(rm)); |
573 } | 573 } |
574 void negs(Register rd, Register rm) { | 574 void negs(Register rd, Register rm) { |
575 subs(rd, ZR, Operand(rm)); | 575 subs(rd, ZR, Operand(rm)); |
576 } | 576 } |
577 void mul(Register rd, Register rn, Register rm) { | 577 void mul(Register rd, Register rn, Register rm) { |
578 madd(rd, rn, rm, ZR); | 578 madd(rd, rn, rm, ZR); |
579 } | 579 } |
580 void Push(Register reg) { | 580 void Push(Register reg) { |
| 581 ASSERT(reg != PP); // Only push PP with PushPP(). |
581 str(reg, Address(SP, -1 * kWordSize, Address::PreIndex)); | 582 str(reg, Address(SP, -1 * kWordSize, Address::PreIndex)); |
582 } | 583 } |
583 void Pop(Register reg) { | 584 void Pop(Register reg) { |
| 585 ASSERT(reg != PP); // Only pop PP with PopPP(). |
584 ldr(reg, Address(SP, 1 * kWordSize, Address::PostIndex)); | 586 ldr(reg, Address(SP, 1 * kWordSize, Address::PostIndex)); |
585 } | 587 } |
| 588 void PushPP() { |
| 589 // Add the heap object tag back to PP before putting it on the stack. |
| 590 add(PP, PP, Operand(kHeapObjectTag)); |
| 591 str(PP, Address(SP, -1 * kWordSize, Address::PreIndex)); |
| 592 } |
| 593 void PopPP() { |
| 594 ldr(PP, Address(SP, 1 * kWordSize, Address::PostIndex)); |
| 595 sub(PP, PP, Operand(kHeapObjectTag)); |
| 596 } |
586 void tst(Register rn, Operand o) { | 597 void tst(Register rn, Operand o) { |
587 ands(ZR, rn, o); | 598 ands(ZR, rn, o); |
588 } | 599 } |
589 void tsti(Register rn, uint64_t imm) { | 600 void tsti(Register rn, uint64_t imm) { |
590 andis(ZR, rn, imm); | 601 andis(ZR, rn, imm); |
591 } | 602 } |
592 | 603 |
| 604 // Branching to ExternalLabels. |
| 605 void Branch(const ExternalLabel* label) { |
| 606 LoadExternalLabel(TMP, label, kPatchable, PP); |
| 607 br(TMP); |
| 608 } |
| 609 |
| 610 void BranchPatchable(const ExternalLabel* label) { |
| 611 LoadPatchableImmediate(TMP, label->address()); |
| 612 br(TMP); |
| 613 } |
| 614 |
| 615 void BranchLink(const ExternalLabel* label, Register pp) { |
| 616 if (Isolate::Current() == Dart::vm_isolate()) { |
| 617 LoadImmediate(TMP, label->address(), kNoRegister); |
| 618 blr(TMP); |
| 619 } else { |
| 620 LoadExternalLabel(TMP, label, kNotPatchable, pp); |
| 621 blr(TMP); |
| 622 } |
| 623 } |
| 624 |
| 625 void BranchLinkPatchable(const ExternalLabel* label) { |
| 626 LoadExternalLabel(TMP, label, kPatchable, PP); |
| 627 blr(TMP); |
| 628 } |
| 629 |
593 // Object pool, loading from pool, etc. | 630 // Object pool, loading from pool, etc. |
594 void LoadPoolPointer(Register pp) { | 631 void LoadPoolPointer(Register pp) { |
595 const intptr_t object_pool_pc_dist = | 632 const intptr_t object_pool_pc_dist = |
596 Instructions::HeaderSize() - Instructions::object_pool_offset() + | 633 Instructions::HeaderSize() - Instructions::object_pool_offset() + |
597 CodeSize(); | 634 CodeSize(); |
598 // PP <- Read(PC - object_pool_pc_dist). | 635 // PP <- Read(PC - object_pool_pc_dist). |
599 ldr(pp, Address::PC(-object_pool_pc_dist)); | 636 ldr(pp, Address::PC(-object_pool_pc_dist)); |
| 637 |
| 638 // When in the PP register, the pool pointer is untagged. When we |
| 639 // push it on the stack with PushPP it is tagged again. PopPP then untags |
| 640 // when restoring from the stack. This will make loading from the object |
| 641 // pool only one instruction for the first 4096 entries. Otherwise, because |
| 642 // the offset wouldn't be aligned, it would always be at least two |
| 643 // instructions. |
| 644 sub(pp, pp, Operand(kHeapObjectTag)); |
600 } | 645 } |
601 | 646 |
602 enum Patchability { | 647 enum Patchability { |
603 kPatchable, | 648 kPatchable, |
604 kNotPatchable, | 649 kNotPatchable, |
605 }; | 650 }; |
606 | 651 |
607 void LoadWordFromPoolOffset(Register dst, Register pp, uint32_t offset); | 652 void LoadWordFromPoolOffset(Register dst, Register pp, uint32_t offset); |
| 653 intptr_t FindExternalLabel(const ExternalLabel* label, |
| 654 Patchability patchable); |
608 intptr_t FindObject(const Object& obj, Patchability patchable); | 655 intptr_t FindObject(const Object& obj, Patchability patchable); |
609 intptr_t FindImmediate(int64_t imm); | 656 intptr_t FindImmediate(int64_t imm); |
610 bool CanLoadObjectFromPool(const Object& object); | 657 bool CanLoadObjectFromPool(const Object& object); |
611 bool CanLoadImmediateFromPool(int64_t imm, Register pp); | 658 bool CanLoadImmediateFromPool(int64_t imm, Register pp); |
| 659 void LoadExternalLabel(Register dst, const ExternalLabel* label, |
| 660 Patchability patchable, Register pp); |
612 void LoadObject(Register dst, const Object& obj, Register pp); | 661 void LoadObject(Register dst, const Object& obj, Register pp); |
| 662 void LoadDecodableImmediate(Register reg, int64_t imm, Register pp); |
| 663 void LoadPatchableImmediate(Register reg, int64_t imm); |
613 void LoadImmediate(Register reg, int64_t imm, Register pp); | 664 void LoadImmediate(Register reg, int64_t imm, Register pp); |
614 | 665 |
615 private: | 666 private: |
616 AssemblerBuffer buffer_; // Contains position independent code. | 667 AssemblerBuffer buffer_; // Contains position independent code. |
617 | 668 |
618 // Objects and patchable jump targets. | 669 // Objects and patchable jump targets. |
619 GrowableObjectArray& object_pool_; | 670 GrowableObjectArray& object_pool_; |
620 | 671 |
621 // Patchability of pool entries. | 672 // Patchability of pool entries. |
622 GrowableArray<Patchability> patchable_pool_entries_; | 673 GrowableArray<Patchability> patchable_pool_entries_; |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
759 const int32_t encoding = | 810 const int32_t encoding = |
760 op | size | s | | 811 op | size | s | |
761 (static_cast<int32_t>(rd) << kRdShift) | | 812 (static_cast<int32_t>(rd) << kRdShift) | |
762 (static_cast<int32_t>(rn) << kRnShift) | | 813 (static_cast<int32_t>(rn) << kRnShift) | |
763 o.encoding(); | 814 o.encoding(); |
764 Emit(encoding); | 815 Emit(encoding); |
765 } | 816 } |
766 | 817 |
767 int32_t EncodeImm19BranchOffset(int64_t imm, int32_t instr) { | 818 int32_t EncodeImm19BranchOffset(int64_t imm, int32_t instr) { |
768 const int32_t imm32 = static_cast<int32_t>(imm); | 819 const int32_t imm32 = static_cast<int32_t>(imm); |
769 const int32_t off = (((imm32 >> 2) & kImm19Mask) << kImm19Shift); | 820 const int32_t off = (((imm32 >> 2) << kImm19Shift) & kImm19Mask); |
770 return (instr & ~(kImm19Mask << kImm19Shift)) | off; | 821 return (instr & ~kImm19Mask) | off; |
771 } | 822 } |
772 | 823 |
773 int64_t DecodeImm19BranchOffset(int32_t instr) { | 824 int64_t DecodeImm19BranchOffset(int32_t instr) { |
774 const int32_t off = (((instr >> kImm19Shift) & kImm19Shift) << 13) >> 13; | 825 const int32_t off = (((instr >> kImm19Shift) & kImm19Shift) << 13) >> 13; |
775 return static_cast<int64_t>(off); | 826 return static_cast<int64_t>(off); |
776 } | 827 } |
777 | 828 |
778 void EmitCompareAndBranch(CompareAndBranchOp op, Register rt, int64_t imm, | 829 void EmitCompareAndBranch(CompareAndBranchOp op, Register rt, int64_t imm, |
779 OperandSize sz) { | 830 OperandSize sz) { |
780 ASSERT((sz == kDoubleWord) || (sz == kWord)); | 831 ASSERT((sz == kDoubleWord) || (sz == kWord)); |
781 ASSERT(Utils::IsInt(21, imm) && ((imm & 0x3) == 0)); | 832 ASSERT(Utils::IsInt(21, imm) && ((imm & 0x3) == 0)); |
782 const int32_t size = (sz == kDoubleWord) ? B31 : 0; | 833 const int32_t size = (sz == kDoubleWord) ? B31 : 0; |
783 const int32_t encoded_offset = EncodeImm19BranchOffset(imm, 0); | 834 const int32_t encoded_offset = EncodeImm19BranchOffset(imm, 0); |
784 const int32_t encoding = | 835 const int32_t encoding = |
785 op | size | | 836 op | size | |
786 (static_cast<int32_t>(rt) << kRtShift) | | 837 (static_cast<int32_t>(rt) << kRtShift) | |
787 encoded_offset; | 838 encoded_offset; |
788 Emit(encoding); | 839 Emit(encoding); |
789 } | 840 } |
790 | 841 |
791 void EmitConditionalBranch(ConditionalBranchOp op, Condition cond, | 842 void EmitConditionalBranch(ConditionalBranchOp op, Condition cond, |
792 int64_t imm) { | 843 int64_t imm) { |
793 ASSERT(Utils::IsInt(21, imm) && ((imm & 0x3) == 0)); | 844 ASSERT(Utils::IsInt(21, imm) && ((imm & 0x3) == 0)); |
794 const int32_t encoding = | 845 const int32_t encoding = |
795 op | | 846 op | |
796 (static_cast<int32_t>(cond) << kCondShift) | | 847 (static_cast<int32_t>(cond) << kCondShift) | |
797 (((imm >> 2) & kImm19Mask) << kImm19Shift); | 848 (((imm >> 2) << kImm19Shift) & kImm19Mask); |
798 Emit(encoding); | 849 Emit(encoding); |
799 } | 850 } |
800 | 851 |
801 bool CanEncodeImm19BranchOffset(int64_t offset) { | 852 bool CanEncodeImm19BranchOffset(int64_t offset) { |
802 ASSERT(Utils::IsAligned(offset, 4)); | 853 ASSERT(Utils::IsAligned(offset, 4)); |
803 return Utils::IsInt(19, offset); | 854 return Utils::IsInt(19, offset); |
804 } | 855 } |
805 | 856 |
806 // TODO(zra): Implement far branches. Requires loading large immediates. | 857 // TODO(zra): Implement far branches. Requires loading large immediates. |
807 void EmitBranch(ConditionalBranchOp op, Condition cond, Label* label) { | 858 void EmitBranch(ConditionalBranchOp op, Condition cond, Label* label) { |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
861 (static_cast<int32_t>(rt) << kRtShift) | | 912 (static_cast<int32_t>(rt) << kRtShift) | |
862 a.encoding(); | 913 a.encoding(); |
863 Emit(encoding); | 914 Emit(encoding); |
864 } | 915 } |
865 | 916 |
866 void EmitPCRelOp(PCRelOp op, Register rd, int64_t imm) { | 917 void EmitPCRelOp(PCRelOp op, Register rd, int64_t imm) { |
867 ASSERT(Utils::IsInt(21, imm)); | 918 ASSERT(Utils::IsInt(21, imm)); |
868 ASSERT((rd != R31) && (rd != SP)); | 919 ASSERT((rd != R31) && (rd != SP)); |
869 const Register crd = ConcreteRegister(rd); | 920 const Register crd = ConcreteRegister(rd); |
870 const int32_t loimm = (imm & 0x3) << 29; | 921 const int32_t loimm = (imm & 0x3) << 29; |
871 const int32_t hiimm = ((imm >> 2) & kImm19Mask) << kImm19Shift; | 922 const int32_t hiimm = ((imm >> 2) << kImm19Shift) & kImm19Mask; |
872 const int32_t encoding = | 923 const int32_t encoding = |
873 op | loimm | hiimm | | 924 op | loimm | hiimm | |
874 (static_cast<int32_t>(crd) << kRdShift); | 925 (static_cast<int32_t>(crd) << kRdShift); |
875 Emit(encoding); | 926 Emit(encoding); |
876 } | 927 } |
877 | 928 |
878 void EmitMiscDP2Source(MiscDP2SourceOp op, | 929 void EmitMiscDP2Source(MiscDP2SourceOp op, |
879 Register rd, Register rn, Register rm, | 930 Register rd, Register rn, Register rm, |
880 OperandSize sz) { | 931 OperandSize sz) { |
881 ASSERT((rd != SP) && (rn != SP) && (rm != SP)); | 932 ASSERT((rd != SP) && (rn != SP) && (rm != SP)); |
(...skipping 27 matching lines...) Expand all Loading... |
909 Emit(encoding); | 960 Emit(encoding); |
910 } | 961 } |
911 | 962 |
912 DISALLOW_ALLOCATION(); | 963 DISALLOW_ALLOCATION(); |
913 DISALLOW_COPY_AND_ASSIGN(Assembler); | 964 DISALLOW_COPY_AND_ASSIGN(Assembler); |
914 }; | 965 }; |
915 | 966 |
916 } // namespace dart | 967 } // namespace dart |
917 | 968 |
918 #endif // VM_ASSEMBLER_ARM64_H_ | 969 #endif // VM_ASSEMBLER_ARM64_H_ |
OLD | NEW |