OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_ | 5 #ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_ |
6 #define V8_ARM64_ASSEMBLER_ARM64_INL_H_ | 6 #define V8_ARM64_ASSEMBLER_ARM64_INL_H_ |
7 | 7 |
8 #include "src/arm64/assembler-arm64.h" | 8 #include "src/arm64/assembler-arm64.h" |
9 #include "src/assembler.h" | 9 #include "src/assembler.h" |
10 #include "src/debug/debug.h" | 10 #include "src/debug/debug.h" |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
50 return reg_size; | 50 return reg_size; |
51 } | 51 } |
52 | 52 |
53 | 53 |
54 inline int CPURegister::SizeInBytes() const { | 54 inline int CPURegister::SizeInBytes() const { |
55 DCHECK(IsValid()); | 55 DCHECK(IsValid()); |
56 DCHECK(SizeInBits() % 8 == 0); | 56 DCHECK(SizeInBits() % 8 == 0); |
57 return reg_size / 8; | 57 return reg_size / 8; |
58 } | 58 } |
59 | 59 |
| 60 inline bool CPURegister::Is8Bits() const { |
| 61 DCHECK(IsValid()); |
| 62 return reg_size == 8; |
| 63 } |
| 64 |
| 65 inline bool CPURegister::Is16Bits() const { |
| 66 DCHECK(IsValid()); |
| 67 return reg_size == 16; |
| 68 } |
60 | 69 |
61 inline bool CPURegister::Is32Bits() const { | 70 inline bool CPURegister::Is32Bits() const { |
62 DCHECK(IsValid()); | 71 DCHECK(IsValid()); |
63 return reg_size == 32; | 72 return reg_size == 32; |
64 } | 73 } |
65 | 74 |
66 | 75 |
67 inline bool CPURegister::Is64Bits() const { | 76 inline bool CPURegister::Is64Bits() const { |
68 DCHECK(IsValid()); | 77 DCHECK(IsValid()); |
69 return reg_size == 64; | 78 return reg_size == 64; |
70 } | 79 } |
71 | 80 |
| 81 inline bool CPURegister::Is128Bits() const { |
| 82 DCHECK(IsValid()); |
| 83 return reg_size == 128; |
| 84 } |
72 | 85 |
73 inline bool CPURegister::IsValid() const { | 86 inline bool CPURegister::IsValid() const { |
74 if (IsValidRegister() || IsValidFPRegister()) { | 87 if (IsValidRegister() || IsValidVRegister()) { |
75 DCHECK(!IsNone()); | 88 DCHECK(!IsNone()); |
76 return true; | 89 return true; |
77 } else { | 90 } else { |
78 DCHECK(IsNone()); | 91 DCHECK(IsNone()); |
79 return false; | 92 return false; |
80 } | 93 } |
81 } | 94 } |
82 | 95 |
83 | 96 |
84 inline bool CPURegister::IsValidRegister() const { | 97 inline bool CPURegister::IsValidRegister() const { |
85 return IsRegister() && | 98 return IsRegister() && |
86 ((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)) && | 99 ((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)) && |
87 ((reg_code < kNumberOfRegisters) || (reg_code == kSPRegInternalCode)); | 100 ((reg_code < kNumberOfRegisters) || (reg_code == kSPRegInternalCode)); |
88 } | 101 } |
89 | 102 |
90 | 103 inline bool CPURegister::IsValidVRegister() const { |
91 inline bool CPURegister::IsValidFPRegister() const { | 104 return IsVRegister() && |
92 return IsFPRegister() && | 105 ((reg_size == kBRegSizeInBits) || (reg_size == kHRegSizeInBits) || |
93 ((reg_size == kSRegSizeInBits) || (reg_size == kDRegSizeInBits)) && | 106 (reg_size == kSRegSizeInBits) || (reg_size == kDRegSizeInBits) || |
94 (reg_code < kNumberOfFPRegisters); | 107 (reg_size == kQRegSizeInBits)) && |
| 108 (reg_code < kNumberOfVRegisters); |
95 } | 109 } |
96 | 110 |
97 | |
98 inline bool CPURegister::IsNone() const { | 111 inline bool CPURegister::IsNone() const { |
99 // kNoRegister types should always have size 0 and code 0. | 112 // kNoRegister types should always have size 0 and code 0. |
100 DCHECK((reg_type != kNoRegister) || (reg_code == 0)); | 113 DCHECK((reg_type != kNoRegister) || (reg_code == 0)); |
101 DCHECK((reg_type != kNoRegister) || (reg_size == 0)); | 114 DCHECK((reg_type != kNoRegister) || (reg_size == 0)); |
102 | 115 |
103 return reg_type == kNoRegister; | 116 return reg_type == kNoRegister; |
104 } | 117 } |
105 | 118 |
106 | 119 |
107 inline bool CPURegister::Is(const CPURegister& other) const { | 120 inline bool CPURegister::Is(const CPURegister& other) const { |
108 DCHECK(IsValidOrNone() && other.IsValidOrNone()); | 121 DCHECK(IsValidOrNone() && other.IsValidOrNone()); |
109 return Aliases(other) && (reg_size == other.reg_size); | 122 return Aliases(other) && (reg_size == other.reg_size); |
110 } | 123 } |
111 | 124 |
112 | 125 |
113 inline bool CPURegister::Aliases(const CPURegister& other) const { | 126 inline bool CPURegister::Aliases(const CPURegister& other) const { |
114 DCHECK(IsValidOrNone() && other.IsValidOrNone()); | 127 DCHECK(IsValidOrNone() && other.IsValidOrNone()); |
115 return (reg_code == other.reg_code) && (reg_type == other.reg_type); | 128 return (reg_code == other.reg_code) && (reg_type == other.reg_type); |
116 } | 129 } |
117 | 130 |
118 | 131 |
119 inline bool CPURegister::IsRegister() const { | 132 inline bool CPURegister::IsRegister() const { |
120 return reg_type == kRegister; | 133 return reg_type == kRegister; |
121 } | 134 } |
122 | 135 |
123 | 136 inline bool CPURegister::IsVRegister() const { return reg_type == kVRegister; } |
124 inline bool CPURegister::IsFPRegister() const { | |
125 return reg_type == kFPRegister; | |
126 } | |
127 | |
128 | 137 |
129 inline bool CPURegister::IsSameSizeAndType(const CPURegister& other) const { | 138 inline bool CPURegister::IsSameSizeAndType(const CPURegister& other) const { |
130 return (reg_size == other.reg_size) && (reg_type == other.reg_type); | 139 return (reg_size == other.reg_size) && (reg_type == other.reg_type); |
131 } | 140 } |
132 | 141 |
133 | 142 |
134 inline bool CPURegister::IsValidOrNone() const { | 143 inline bool CPURegister::IsValidOrNone() const { |
135 return IsValid() || IsNone(); | 144 return IsValid() || IsNone(); |
136 } | 145 } |
137 | 146 |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
193 DCHECK(IsValid()); | 202 DCHECK(IsValid()); |
194 DCHECK(CPURegister::Create(code, size_, type_).IsValid()); | 203 DCHECK(CPURegister::Create(code, size_, type_).IsValid()); |
195 list_ &= ~(1UL << code); | 204 list_ &= ~(1UL << code); |
196 } | 205 } |
197 | 206 |
198 | 207 |
199 inline Register Register::XRegFromCode(unsigned code) { | 208 inline Register Register::XRegFromCode(unsigned code) { |
200 if (code == kSPRegInternalCode) { | 209 if (code == kSPRegInternalCode) { |
201 return csp; | 210 return csp; |
202 } else { | 211 } else { |
203 DCHECK(code < kNumberOfRegisters); | 212 DCHECK_LT(code, static_cast<unsigned>(kNumberOfRegisters)); |
204 return Register::Create(code, kXRegSizeInBits); | 213 return Register::Create(code, kXRegSizeInBits); |
205 } | 214 } |
206 } | 215 } |
207 | 216 |
208 | 217 |
209 inline Register Register::WRegFromCode(unsigned code) { | 218 inline Register Register::WRegFromCode(unsigned code) { |
210 if (code == kSPRegInternalCode) { | 219 if (code == kSPRegInternalCode) { |
211 return wcsp; | 220 return wcsp; |
212 } else { | 221 } else { |
213 DCHECK(code < kNumberOfRegisters); | 222 DCHECK_LT(code, static_cast<unsigned>(kNumberOfRegisters)); |
214 return Register::Create(code, kWRegSizeInBits); | 223 return Register::Create(code, kWRegSizeInBits); |
215 } | 224 } |
216 } | 225 } |
217 | 226 |
218 | 227 inline VRegister VRegister::BRegFromCode(unsigned code) { |
219 inline FPRegister FPRegister::SRegFromCode(unsigned code) { | 228 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
220 DCHECK(code < kNumberOfFPRegisters); | 229 return VRegister::Create(code, kBRegSizeInBits); |
221 return FPRegister::Create(code, kSRegSizeInBits); | |
222 } | 230 } |
223 | 231 |
224 | 232 inline VRegister VRegister::HRegFromCode(unsigned code) { |
225 inline FPRegister FPRegister::DRegFromCode(unsigned code) { | 233 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
226 DCHECK(code < kNumberOfFPRegisters); | 234 return VRegister::Create(code, kHRegSizeInBits); |
227 return FPRegister::Create(code, kDRegSizeInBits); | |
228 } | 235 } |
229 | 236 |
| 237 inline VRegister VRegister::SRegFromCode(unsigned code) { |
| 238 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
| 239 return VRegister::Create(code, kSRegSizeInBits); |
| 240 } |
| 241 |
| 242 inline VRegister VRegister::DRegFromCode(unsigned code) { |
| 243 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
| 244 return VRegister::Create(code, kDRegSizeInBits); |
| 245 } |
| 246 |
| 247 inline VRegister VRegister::QRegFromCode(unsigned code) { |
| 248 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
| 249 return VRegister::Create(code, kQRegSizeInBits); |
| 250 } |
| 251 |
| 252 inline VRegister VRegister::VRegFromCode(unsigned code) { |
| 253 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
| 254 return VRegister::Create(code, kVRegSizeInBits); |
| 255 } |
230 | 256 |
231 inline Register CPURegister::W() const { | 257 inline Register CPURegister::W() const { |
232 DCHECK(IsValidRegister()); | 258 DCHECK(IsValidRegister()); |
233 return Register::WRegFromCode(reg_code); | 259 return Register::WRegFromCode(reg_code); |
234 } | 260 } |
235 | 261 |
236 | 262 |
237 inline Register CPURegister::X() const { | 263 inline Register CPURegister::X() const { |
238 DCHECK(IsValidRegister()); | 264 DCHECK(IsValidRegister()); |
239 return Register::XRegFromCode(reg_code); | 265 return Register::XRegFromCode(reg_code); |
240 } | 266 } |
241 | 267 |
| 268 inline VRegister CPURegister::V() const { |
| 269 DCHECK(IsValidVRegister()); |
| 270 return VRegister::VRegFromCode(reg_code); |
| 271 } |
242 | 272 |
243 inline FPRegister CPURegister::S() const { | 273 inline VRegister CPURegister::B() const { |
244 DCHECK(IsValidFPRegister()); | 274 DCHECK(IsValidVRegister()); |
245 return FPRegister::SRegFromCode(reg_code); | 275 return VRegister::BRegFromCode(reg_code); |
| 276 } |
| 277 |
| 278 inline VRegister CPURegister::H() const { |
| 279 DCHECK(IsValidVRegister()); |
| 280 return VRegister::HRegFromCode(reg_code); |
| 281 } |
| 282 |
| 283 inline VRegister CPURegister::S() const { |
| 284 DCHECK(IsValidVRegister()); |
| 285 return VRegister::SRegFromCode(reg_code); |
| 286 } |
| 287 |
| 288 inline VRegister CPURegister::D() const { |
| 289 DCHECK(IsValidVRegister()); |
| 290 return VRegister::DRegFromCode(reg_code); |
| 291 } |
| 292 |
| 293 inline VRegister CPURegister::Q() const { |
| 294 DCHECK(IsValidVRegister()); |
| 295 return VRegister::QRegFromCode(reg_code); |
246 } | 296 } |
247 | 297 |
248 | 298 |
249 inline FPRegister CPURegister::D() const { | |
250 DCHECK(IsValidFPRegister()); | |
251 return FPRegister::DRegFromCode(reg_code); | |
252 } | |
253 | |
254 | |
255 // Immediate. | 299 // Immediate. |
256 // Default initializer is for int types | 300 // Default initializer is for int types |
257 template<typename T> | 301 template<typename T> |
258 struct ImmediateInitializer { | 302 struct ImmediateInitializer { |
259 static const bool kIsIntType = true; | 303 static const bool kIsIntType = true; |
260 static inline RelocInfo::Mode rmode_for(T) { | 304 static inline RelocInfo::Mode rmode_for(T) { |
261 return sizeof(T) == 8 ? RelocInfo::NONE64 : RelocInfo::NONE32; | 305 return sizeof(T) == 8 ? RelocInfo::NONE64 : RelocInfo::NONE32; |
262 } | 306 } |
263 static inline int64_t immediate_for(T t) { | 307 static inline int64_t immediate_for(T t) { |
264 STATIC_ASSERT(sizeof(T) <= 8); | 308 STATIC_ASSERT(sizeof(T) <= 8); |
(...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
484 | 528 |
485 MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode) | 529 MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode) |
486 : base_(base), addrmode_(addrmode) { | 530 : base_(base), addrmode_(addrmode) { |
487 DCHECK(base.Is64Bits() && !base.IsZero()); | 531 DCHECK(base.Is64Bits() && !base.IsZero()); |
488 | 532 |
489 if (offset.IsImmediate()) { | 533 if (offset.IsImmediate()) { |
490 offset_ = offset.ImmediateValue(); | 534 offset_ = offset.ImmediateValue(); |
491 | 535 |
492 regoffset_ = NoReg; | 536 regoffset_ = NoReg; |
493 } else if (offset.IsShiftedRegister()) { | 537 } else if (offset.IsShiftedRegister()) { |
494 DCHECK(addrmode == Offset); | 538 DCHECK((addrmode == Offset) || (addrmode == PostIndex)); |
495 | 539 |
496 regoffset_ = offset.reg(); | 540 regoffset_ = offset.reg(); |
497 shift_ = offset.shift(); | 541 shift_ = offset.shift(); |
498 shift_amount_ = offset.shift_amount(); | 542 shift_amount_ = offset.shift_amount(); |
499 | 543 |
500 extend_ = NO_EXTEND; | 544 extend_ = NO_EXTEND; |
501 offset_ = 0; | 545 offset_ = 0; |
502 | 546 |
503 // These assertions match those in the shifted-register constructor. | 547 // These assertions match those in the shifted-register constructor. |
504 DCHECK(regoffset_.Is64Bits() && !regoffset_.IsSP()); | 548 DCHECK(regoffset_.Is64Bits() && !regoffset_.IsSP()); |
(...skipping 365 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
870 StaticVisitor::VisitRuntimeEntry(this); | 914 StaticVisitor::VisitRuntimeEntry(this); |
871 } | 915 } |
872 } | 916 } |
873 | 917 |
874 | 918 |
875 LoadStoreOp Assembler::LoadOpFor(const CPURegister& rt) { | 919 LoadStoreOp Assembler::LoadOpFor(const CPURegister& rt) { |
876 DCHECK(rt.IsValid()); | 920 DCHECK(rt.IsValid()); |
877 if (rt.IsRegister()) { | 921 if (rt.IsRegister()) { |
878 return rt.Is64Bits() ? LDR_x : LDR_w; | 922 return rt.Is64Bits() ? LDR_x : LDR_w; |
879 } else { | 923 } else { |
880 DCHECK(rt.IsFPRegister()); | 924 DCHECK(rt.IsVRegister()); |
881 return rt.Is64Bits() ? LDR_d : LDR_s; | 925 switch (rt.SizeInBits()) { |
| 926 case kBRegSizeInBits: |
| 927 return LDR_b; |
| 928 case kHRegSizeInBits: |
| 929 return LDR_h; |
| 930 case kSRegSizeInBits: |
| 931 return LDR_s; |
| 932 case kDRegSizeInBits: |
| 933 return LDR_d; |
| 934 default: |
| 935 DCHECK(rt.IsQ()); |
| 936 return LDR_q; |
| 937 } |
882 } | 938 } |
883 } | 939 } |
884 | 940 |
885 | |
886 LoadStorePairOp Assembler::LoadPairOpFor(const CPURegister& rt, | |
887 const CPURegister& rt2) { | |
888 DCHECK(AreSameSizeAndType(rt, rt2)); | |
889 USE(rt2); | |
890 if (rt.IsRegister()) { | |
891 return rt.Is64Bits() ? LDP_x : LDP_w; | |
892 } else { | |
893 DCHECK(rt.IsFPRegister()); | |
894 return rt.Is64Bits() ? LDP_d : LDP_s; | |
895 } | |
896 } | |
897 | |
898 | 941 |
899 LoadStoreOp Assembler::StoreOpFor(const CPURegister& rt) { | 942 LoadStoreOp Assembler::StoreOpFor(const CPURegister& rt) { |
900 DCHECK(rt.IsValid()); | 943 DCHECK(rt.IsValid()); |
901 if (rt.IsRegister()) { | 944 if (rt.IsRegister()) { |
902 return rt.Is64Bits() ? STR_x : STR_w; | 945 return rt.Is64Bits() ? STR_x : STR_w; |
903 } else { | 946 } else { |
904 DCHECK(rt.IsFPRegister()); | 947 DCHECK(rt.IsVRegister()); |
905 return rt.Is64Bits() ? STR_d : STR_s; | 948 switch (rt.SizeInBits()) { |
| 949 case kBRegSizeInBits: |
| 950 return STR_b; |
| 951 case kHRegSizeInBits: |
| 952 return STR_h; |
| 953 case kSRegSizeInBits: |
| 954 return STR_s; |
| 955 case kDRegSizeInBits: |
| 956 return STR_d; |
| 957 default: |
| 958 DCHECK(rt.IsQ()); |
| 959 return STR_q; |
| 960 } |
906 } | 961 } |
907 } | 962 } |
908 | 963 |
| 964 LoadStorePairOp Assembler::LoadPairOpFor(const CPURegister& rt, |
| 965 const CPURegister& rt2) { |
| 966 DCHECK_EQ(STP_w | LoadStorePairLBit, LDP_w); |
| 967 return static_cast<LoadStorePairOp>(StorePairOpFor(rt, rt2) | |
| 968 LoadStorePairLBit); |
| 969 } |
909 | 970 |
910 LoadStorePairOp Assembler::StorePairOpFor(const CPURegister& rt, | 971 LoadStorePairOp Assembler::StorePairOpFor(const CPURegister& rt, |
911 const CPURegister& rt2) { | 972 const CPURegister& rt2) { |
912 DCHECK(AreSameSizeAndType(rt, rt2)); | 973 DCHECK(AreSameSizeAndType(rt, rt2)); |
913 USE(rt2); | 974 USE(rt2); |
914 if (rt.IsRegister()) { | 975 if (rt.IsRegister()) { |
915 return rt.Is64Bits() ? STP_x : STP_w; | 976 return rt.Is64Bits() ? STP_x : STP_w; |
916 } else { | 977 } else { |
917 DCHECK(rt.IsFPRegister()); | 978 DCHECK(rt.IsVRegister()); |
918 return rt.Is64Bits() ? STP_d : STP_s; | 979 switch (rt.SizeInBits()) { |
| 980 case kSRegSizeInBits: |
| 981 return STP_s; |
| 982 case kDRegSizeInBits: |
| 983 return STP_d; |
| 984 default: |
| 985 DCHECK(rt.IsQ()); |
| 986 return STP_q; |
| 987 } |
919 } | 988 } |
920 } | 989 } |
921 | 990 |
922 | 991 |
923 LoadLiteralOp Assembler::LoadLiteralOpFor(const CPURegister& rt) { | 992 LoadLiteralOp Assembler::LoadLiteralOpFor(const CPURegister& rt) { |
924 if (rt.IsRegister()) { | 993 if (rt.IsRegister()) { |
925 return rt.Is64Bits() ? LDR_x_lit : LDR_w_lit; | 994 return rt.Is64Bits() ? LDR_x_lit : LDR_w_lit; |
926 } else { | 995 } else { |
927 DCHECK(rt.IsFPRegister()); | 996 DCHECK(rt.IsVRegister()); |
928 return rt.Is64Bits() ? LDR_d_lit : LDR_s_lit; | 997 return rt.Is64Bits() ? LDR_d_lit : LDR_s_lit; |
929 } | 998 } |
930 } | 999 } |
931 | 1000 |
932 | 1001 |
933 int Assembler::LinkAndGetInstructionOffsetTo(Label* label) { | 1002 int Assembler::LinkAndGetInstructionOffsetTo(Label* label) { |
934 DCHECK(kStartOfLabelLinkChain == 0); | 1003 DCHECK(kStartOfLabelLinkChain == 0); |
935 int offset = LinkAndGetByteOffsetTo(label); | 1004 int offset = LinkAndGetByteOffsetTo(label); |
936 DCHECK(IsAligned(offset, kInstructionSize)); | 1005 DCHECK(IsAligned(offset, kInstructionSize)); |
937 return offset >> kInstructionSizeLog2; | 1006 return offset >> kInstructionSizeLog2; |
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1101 DCHECK(is_uint12(imm12)); | 1170 DCHECK(is_uint12(imm12)); |
1102 return imm12 << ImmLSUnsigned_offset; | 1171 return imm12 << ImmLSUnsigned_offset; |
1103 } | 1172 } |
1104 | 1173 |
1105 | 1174 |
1106 Instr Assembler::ImmLS(int imm9) { | 1175 Instr Assembler::ImmLS(int imm9) { |
1107 DCHECK(is_int9(imm9)); | 1176 DCHECK(is_int9(imm9)); |
1108 return truncate_to_int9(imm9) << ImmLS_offset; | 1177 return truncate_to_int9(imm9) << ImmLS_offset; |
1109 } | 1178 } |
1110 | 1179 |
1111 | 1180 Instr Assembler::ImmLSPair(int imm7, unsigned size) { |
1112 Instr Assembler::ImmLSPair(int imm7, LSDataSize size) { | 1181 DCHECK_EQ((imm7 >> size) << size, imm7); |
1113 DCHECK(((imm7 >> size) << size) == imm7); | |
1114 int scaled_imm7 = imm7 >> size; | 1182 int scaled_imm7 = imm7 >> size; |
1115 DCHECK(is_int7(scaled_imm7)); | 1183 DCHECK(is_int7(scaled_imm7)); |
1116 return truncate_to_int7(scaled_imm7) << ImmLSPair_offset; | 1184 return truncate_to_int7(scaled_imm7) << ImmLSPair_offset; |
1117 } | 1185 } |
1118 | 1186 |
1119 | 1187 |
1120 Instr Assembler::ImmShiftLS(unsigned shift_amount) { | 1188 Instr Assembler::ImmShiftLS(unsigned shift_amount) { |
1121 DCHECK(is_uint1(shift_amount)); | 1189 DCHECK(is_uint1(shift_amount)); |
1122 return shift_amount << ImmShiftLS_offset; | 1190 return shift_amount << ImmShiftLS_offset; |
1123 } | 1191 } |
(...skipping 21 matching lines...) Expand all Loading... |
1145 DCHECK(is_uint2(imm2)); | 1213 DCHECK(is_uint2(imm2)); |
1146 return imm2 << ImmBarrierDomain_offset; | 1214 return imm2 << ImmBarrierDomain_offset; |
1147 } | 1215 } |
1148 | 1216 |
1149 | 1217 |
1150 Instr Assembler::ImmBarrierType(int imm2) { | 1218 Instr Assembler::ImmBarrierType(int imm2) { |
1151 DCHECK(is_uint2(imm2)); | 1219 DCHECK(is_uint2(imm2)); |
1152 return imm2 << ImmBarrierType_offset; | 1220 return imm2 << ImmBarrierType_offset; |
1153 } | 1221 } |
1154 | 1222 |
1155 | 1223 unsigned Assembler::CalcLSDataSize(LoadStoreOp op) { |
1156 LSDataSize Assembler::CalcLSDataSize(LoadStoreOp op) { | 1224 DCHECK((LSSize_offset + LSSize_width) == (kInstructionSize * 8)); |
1157 DCHECK((SizeLS_offset + SizeLS_width) == (kInstructionSize * 8)); | 1225 unsigned size = static_cast<Instr>(op >> LSSize_offset); |
1158 return static_cast<LSDataSize>(op >> SizeLS_offset); | 1226 if ((op & LSVector_mask) != 0) { |
| 1227 // Vector register memory operations encode the access size in the "size" |
| 1228 // and "opc" fields. |
| 1229 if ((size == 0) && ((op & LSOpc_mask) >> LSOpc_offset) >= 2) { |
| 1230 size = kQRegSizeLog2; |
| 1231 } |
| 1232 } |
| 1233 return size; |
1159 } | 1234 } |
1160 | 1235 |
1161 | 1236 |
1162 Instr Assembler::ImmMoveWide(int imm) { | 1237 Instr Assembler::ImmMoveWide(int imm) { |
1163 DCHECK(is_uint16(imm)); | 1238 DCHECK(is_uint16(imm)); |
1164 return imm << ImmMoveWide_offset; | 1239 return imm << ImmMoveWide_offset; |
1165 } | 1240 } |
1166 | 1241 |
1167 | 1242 |
1168 Instr Assembler::ShiftMoveWide(int shift) { | 1243 Instr Assembler::ShiftMoveWide(int shift) { |
1169 DCHECK(is_uint2(shift)); | 1244 DCHECK(is_uint2(shift)); |
1170 return shift << ShiftMoveWide_offset; | 1245 return shift << ShiftMoveWide_offset; |
1171 } | 1246 } |
1172 | 1247 |
1173 | 1248 Instr Assembler::FPType(VRegister fd) { return fd.Is64Bits() ? FP64 : FP32; } |
1174 Instr Assembler::FPType(FPRegister fd) { | |
1175 return fd.Is64Bits() ? FP64 : FP32; | |
1176 } | |
1177 | |
1178 | 1249 |
1179 Instr Assembler::FPScale(unsigned scale) { | 1250 Instr Assembler::FPScale(unsigned scale) { |
1180 DCHECK(is_uint6(scale)); | 1251 DCHECK(is_uint6(scale)); |
1181 return scale << FPScale_offset; | 1252 return scale << FPScale_offset; |
1182 } | 1253 } |
1183 | 1254 |
1184 | 1255 |
1185 const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const { | 1256 const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const { |
1186 return reg.Is64Bits() ? xzr : wzr; | 1257 return reg.Is64Bits() ? xzr : wzr; |
1187 } | 1258 } |
(...skipping 26 matching lines...) Expand all Loading... |
1214 | 1285 |
1215 void Assembler::ClearRecordedAstId() { | 1286 void Assembler::ClearRecordedAstId() { |
1216 recorded_ast_id_ = TypeFeedbackId::None(); | 1287 recorded_ast_id_ = TypeFeedbackId::None(); |
1217 } | 1288 } |
1218 | 1289 |
1219 | 1290 |
1220 } // namespace internal | 1291 } // namespace internal |
1221 } // namespace v8 | 1292 } // namespace v8 |
1222 | 1293 |
1223 #endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_ | 1294 #endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_ |
OLD | NEW |