OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_ | 5 #ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_ |
6 #define V8_ARM64_ASSEMBLER_ARM64_INL_H_ | 6 #define V8_ARM64_ASSEMBLER_ARM64_INL_H_ |
7 | 7 |
8 #include "src/arm64/assembler-arm64.h" | 8 #include "src/arm64/assembler-arm64.h" |
9 #include "src/assembler.h" | 9 #include "src/assembler.h" |
10 #include "src/debug/debug.h" | 10 #include "src/debug/debug.h" |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
50 return reg_size; | 50 return reg_size; |
51 } | 51 } |
52 | 52 |
53 | 53 |
54 inline int CPURegister::SizeInBytes() const { | 54 inline int CPURegister::SizeInBytes() const { |
55 DCHECK(IsValid()); | 55 DCHECK(IsValid()); |
56 DCHECK(SizeInBits() % 8 == 0); | 56 DCHECK(SizeInBits() % 8 == 0); |
57 return reg_size / 8; | 57 return reg_size / 8; |
58 } | 58 } |
59 | 59 |
60 inline bool CPURegister::Is8Bits() const { | |
61 DCHECK(IsValid()); | |
62 return reg_size == 8; | |
63 } | |
64 | |
65 inline bool CPURegister::Is16Bits() const { | |
66 DCHECK(IsValid()); | |
67 return reg_size == 16; | |
68 } | |
69 | 60 |
70 inline bool CPURegister::Is32Bits() const { | 61 inline bool CPURegister::Is32Bits() const { |
71 DCHECK(IsValid()); | 62 DCHECK(IsValid()); |
72 return reg_size == 32; | 63 return reg_size == 32; |
73 } | 64 } |
74 | 65 |
75 | 66 |
76 inline bool CPURegister::Is64Bits() const { | 67 inline bool CPURegister::Is64Bits() const { |
77 DCHECK(IsValid()); | 68 DCHECK(IsValid()); |
78 return reg_size == 64; | 69 return reg_size == 64; |
79 } | 70 } |
80 | 71 |
81 inline bool CPURegister::Is128Bits() const { | |
82 DCHECK(IsValid()); | |
83 return reg_size == 128; | |
84 } | |
85 | 72 |
86 inline bool CPURegister::IsValid() const { | 73 inline bool CPURegister::IsValid() const { |
87 if (IsValidRegister() || IsValidVRegister()) { | 74 if (IsValidRegister() || IsValidFPRegister()) { |
88 DCHECK(!IsNone()); | 75 DCHECK(!IsNone()); |
89 return true; | 76 return true; |
90 } else { | 77 } else { |
91 DCHECK(IsNone()); | 78 DCHECK(IsNone()); |
92 return false; | 79 return false; |
93 } | 80 } |
94 } | 81 } |
95 | 82 |
96 | 83 |
97 inline bool CPURegister::IsValidRegister() const { | 84 inline bool CPURegister::IsValidRegister() const { |
98 return IsRegister() && | 85 return IsRegister() && |
99 ((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)) && | 86 ((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)) && |
100 ((reg_code < kNumberOfRegisters) || (reg_code == kSPRegInternalCode)); | 87 ((reg_code < kNumberOfRegisters) || (reg_code == kSPRegInternalCode)); |
101 } | 88 } |
102 | 89 |
103 inline bool CPURegister::IsValidVRegister() const { | 90 |
104 return IsVRegister() && | 91 inline bool CPURegister::IsValidFPRegister() const { |
105 ((reg_size == kBRegSizeInBits) || (reg_size == kHRegSizeInBits) || | 92 return IsFPRegister() && |
106 (reg_size == kSRegSizeInBits) || (reg_size == kDRegSizeInBits) || | 93 ((reg_size == kSRegSizeInBits) || (reg_size == kDRegSizeInBits)) && |
107 (reg_size == kQRegSizeInBits)) && | 94 (reg_code < kNumberOfFPRegisters); |
108 (reg_code < kNumberOfVRegisters); | |
109 } | 95 } |
110 | 96 |
| 97 |
111 inline bool CPURegister::IsNone() const { | 98 inline bool CPURegister::IsNone() const { |
112 // kNoRegister types should always have size 0 and code 0. | 99 // kNoRegister types should always have size 0 and code 0. |
113 DCHECK((reg_type != kNoRegister) || (reg_code == 0)); | 100 DCHECK((reg_type != kNoRegister) || (reg_code == 0)); |
114 DCHECK((reg_type != kNoRegister) || (reg_size == 0)); | 101 DCHECK((reg_type != kNoRegister) || (reg_size == 0)); |
115 | 102 |
116 return reg_type == kNoRegister; | 103 return reg_type == kNoRegister; |
117 } | 104 } |
118 | 105 |
119 | 106 |
120 inline bool CPURegister::Is(const CPURegister& other) const { | 107 inline bool CPURegister::Is(const CPURegister& other) const { |
121 DCHECK(IsValidOrNone() && other.IsValidOrNone()); | 108 DCHECK(IsValidOrNone() && other.IsValidOrNone()); |
122 return Aliases(other) && (reg_size == other.reg_size); | 109 return Aliases(other) && (reg_size == other.reg_size); |
123 } | 110 } |
124 | 111 |
125 | 112 |
126 inline bool CPURegister::Aliases(const CPURegister& other) const { | 113 inline bool CPURegister::Aliases(const CPURegister& other) const { |
127 DCHECK(IsValidOrNone() && other.IsValidOrNone()); | 114 DCHECK(IsValidOrNone() && other.IsValidOrNone()); |
128 return (reg_code == other.reg_code) && (reg_type == other.reg_type); | 115 return (reg_code == other.reg_code) && (reg_type == other.reg_type); |
129 } | 116 } |
130 | 117 |
131 | 118 |
132 inline bool CPURegister::IsRegister() const { | 119 inline bool CPURegister::IsRegister() const { |
133 return reg_type == kRegister; | 120 return reg_type == kRegister; |
134 } | 121 } |
135 | 122 |
136 inline bool CPURegister::IsVRegister() const { return reg_type == kVRegister; } | 123 |
| 124 inline bool CPURegister::IsFPRegister() const { |
| 125 return reg_type == kFPRegister; |
| 126 } |
| 127 |
137 | 128 |
138 inline bool CPURegister::IsSameSizeAndType(const CPURegister& other) const { | 129 inline bool CPURegister::IsSameSizeAndType(const CPURegister& other) const { |
139 return (reg_size == other.reg_size) && (reg_type == other.reg_type); | 130 return (reg_size == other.reg_size) && (reg_type == other.reg_type); |
140 } | 131 } |
141 | 132 |
142 | 133 |
143 inline bool CPURegister::IsValidOrNone() const { | 134 inline bool CPURegister::IsValidOrNone() const { |
144 return IsValid() || IsNone(); | 135 return IsValid() || IsNone(); |
145 } | 136 } |
146 | 137 |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
202 DCHECK(IsValid()); | 193 DCHECK(IsValid()); |
203 DCHECK(CPURegister::Create(code, size_, type_).IsValid()); | 194 DCHECK(CPURegister::Create(code, size_, type_).IsValid()); |
204 list_ &= ~(1UL << code); | 195 list_ &= ~(1UL << code); |
205 } | 196 } |
206 | 197 |
207 | 198 |
208 inline Register Register::XRegFromCode(unsigned code) { | 199 inline Register Register::XRegFromCode(unsigned code) { |
209 if (code == kSPRegInternalCode) { | 200 if (code == kSPRegInternalCode) { |
210 return csp; | 201 return csp; |
211 } else { | 202 } else { |
212 DCHECK_LT(code, static_cast<unsigned>(kNumberOfRegisters)); | 203 DCHECK(code < kNumberOfRegisters); |
213 return Register::Create(code, kXRegSizeInBits); | 204 return Register::Create(code, kXRegSizeInBits); |
214 } | 205 } |
215 } | 206 } |
216 | 207 |
217 | 208 |
218 inline Register Register::WRegFromCode(unsigned code) { | 209 inline Register Register::WRegFromCode(unsigned code) { |
219 if (code == kSPRegInternalCode) { | 210 if (code == kSPRegInternalCode) { |
220 return wcsp; | 211 return wcsp; |
221 } else { | 212 } else { |
222 DCHECK_LT(code, static_cast<unsigned>(kNumberOfRegisters)); | 213 DCHECK(code < kNumberOfRegisters); |
223 return Register::Create(code, kWRegSizeInBits); | 214 return Register::Create(code, kWRegSizeInBits); |
224 } | 215 } |
225 } | 216 } |
226 | 217 |
227 inline VRegister VRegister::BRegFromCode(unsigned code) { | 218 |
228 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); | 219 inline FPRegister FPRegister::SRegFromCode(unsigned code) { |
229 return VRegister::Create(code, kBRegSizeInBits); | 220 DCHECK(code < kNumberOfFPRegisters); |
| 221 return FPRegister::Create(code, kSRegSizeInBits); |
230 } | 222 } |
231 | 223 |
232 inline VRegister VRegister::HRegFromCode(unsigned code) { | 224 |
233 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); | 225 inline FPRegister FPRegister::DRegFromCode(unsigned code) { |
234 return VRegister::Create(code, kHRegSizeInBits); | 226 DCHECK(code < kNumberOfFPRegisters); |
| 227 return FPRegister::Create(code, kDRegSizeInBits); |
235 } | 228 } |
236 | 229 |
237 inline VRegister VRegister::SRegFromCode(unsigned code) { | |
238 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); | |
239 return VRegister::Create(code, kSRegSizeInBits); | |
240 } | |
241 | |
242 inline VRegister VRegister::DRegFromCode(unsigned code) { | |
243 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); | |
244 return VRegister::Create(code, kDRegSizeInBits); | |
245 } | |
246 | |
247 inline VRegister VRegister::QRegFromCode(unsigned code) { | |
248 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); | |
249 return VRegister::Create(code, kQRegSizeInBits); | |
250 } | |
251 | |
252 inline VRegister VRegister::VRegFromCode(unsigned code) { | |
253 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); | |
254 return VRegister::Create(code, kVRegSizeInBits); | |
255 } | |
256 | 230 |
257 inline Register CPURegister::W() const { | 231 inline Register CPURegister::W() const { |
258 DCHECK(IsValidRegister()); | 232 DCHECK(IsValidRegister()); |
259 return Register::WRegFromCode(reg_code); | 233 return Register::WRegFromCode(reg_code); |
260 } | 234 } |
261 | 235 |
262 | 236 |
263 inline Register CPURegister::X() const { | 237 inline Register CPURegister::X() const { |
264 DCHECK(IsValidRegister()); | 238 DCHECK(IsValidRegister()); |
265 return Register::XRegFromCode(reg_code); | 239 return Register::XRegFromCode(reg_code); |
266 } | 240 } |
267 | 241 |
268 inline VRegister CPURegister::V() const { | |
269 DCHECK(IsValidVRegister()); | |
270 return VRegister::VRegFromCode(reg_code); | |
271 } | |
272 | 242 |
273 inline VRegister CPURegister::B() const { | 243 inline FPRegister CPURegister::S() const { |
274 DCHECK(IsValidVRegister()); | 244 DCHECK(IsValidFPRegister()); |
275 return VRegister::BRegFromCode(reg_code); | 245 return FPRegister::SRegFromCode(reg_code); |
276 } | |
277 | |
278 inline VRegister CPURegister::H() const { | |
279 DCHECK(IsValidVRegister()); | |
280 return VRegister::HRegFromCode(reg_code); | |
281 } | |
282 | |
283 inline VRegister CPURegister::S() const { | |
284 DCHECK(IsValidVRegister()); | |
285 return VRegister::SRegFromCode(reg_code); | |
286 } | |
287 | |
288 inline VRegister CPURegister::D() const { | |
289 DCHECK(IsValidVRegister()); | |
290 return VRegister::DRegFromCode(reg_code); | |
291 } | |
292 | |
293 inline VRegister CPURegister::Q() const { | |
294 DCHECK(IsValidVRegister()); | |
295 return VRegister::QRegFromCode(reg_code); | |
296 } | 246 } |
297 | 247 |
298 | 248 |
| 249 inline FPRegister CPURegister::D() const { |
| 250 DCHECK(IsValidFPRegister()); |
| 251 return FPRegister::DRegFromCode(reg_code); |
| 252 } |
| 253 |
| 254 |
299 // Immediate. | 255 // Immediate. |
300 // Default initializer is for int types | 256 // Default initializer is for int types |
301 template<typename T> | 257 template<typename T> |
302 struct ImmediateInitializer { | 258 struct ImmediateInitializer { |
303 static const bool kIsIntType = true; | 259 static const bool kIsIntType = true; |
304 static inline RelocInfo::Mode rmode_for(T) { | 260 static inline RelocInfo::Mode rmode_for(T) { |
305 return sizeof(T) == 8 ? RelocInfo::NONE64 : RelocInfo::NONE32; | 261 return sizeof(T) == 8 ? RelocInfo::NONE64 : RelocInfo::NONE32; |
306 } | 262 } |
307 static inline int64_t immediate_for(T t) { | 263 static inline int64_t immediate_for(T t) { |
308 STATIC_ASSERT(sizeof(T) <= 8); | 264 STATIC_ASSERT(sizeof(T) <= 8); |
(...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
528 | 484 |
529 MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode) | 485 MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode) |
530 : base_(base), addrmode_(addrmode) { | 486 : base_(base), addrmode_(addrmode) { |
531 DCHECK(base.Is64Bits() && !base.IsZero()); | 487 DCHECK(base.Is64Bits() && !base.IsZero()); |
532 | 488 |
533 if (offset.IsImmediate()) { | 489 if (offset.IsImmediate()) { |
534 offset_ = offset.ImmediateValue(); | 490 offset_ = offset.ImmediateValue(); |
535 | 491 |
536 regoffset_ = NoReg; | 492 regoffset_ = NoReg; |
537 } else if (offset.IsShiftedRegister()) { | 493 } else if (offset.IsShiftedRegister()) { |
538 DCHECK((addrmode == Offset) || (addrmode == PostIndex)); | 494 DCHECK(addrmode == Offset); |
539 | 495 |
540 regoffset_ = offset.reg(); | 496 regoffset_ = offset.reg(); |
541 shift_ = offset.shift(); | 497 shift_ = offset.shift(); |
542 shift_amount_ = offset.shift_amount(); | 498 shift_amount_ = offset.shift_amount(); |
543 | 499 |
544 extend_ = NO_EXTEND; | 500 extend_ = NO_EXTEND; |
545 offset_ = 0; | 501 offset_ = 0; |
546 | 502 |
547 // These assertions match those in the shifted-register constructor. | 503 // These assertions match those in the shifted-register constructor. |
548 DCHECK(regoffset_.Is64Bits() && !regoffset_.IsSP()); | 504 DCHECK(regoffset_.Is64Bits() && !regoffset_.IsSP()); |
(...skipping 365 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
914 StaticVisitor::VisitRuntimeEntry(this); | 870 StaticVisitor::VisitRuntimeEntry(this); |
915 } | 871 } |
916 } | 872 } |
917 | 873 |
918 | 874 |
919 LoadStoreOp Assembler::LoadOpFor(const CPURegister& rt) { | 875 LoadStoreOp Assembler::LoadOpFor(const CPURegister& rt) { |
920 DCHECK(rt.IsValid()); | 876 DCHECK(rt.IsValid()); |
921 if (rt.IsRegister()) { | 877 if (rt.IsRegister()) { |
922 return rt.Is64Bits() ? LDR_x : LDR_w; | 878 return rt.Is64Bits() ? LDR_x : LDR_w; |
923 } else { | 879 } else { |
924 DCHECK(rt.IsVRegister()); | 880 DCHECK(rt.IsFPRegister()); |
925 switch (rt.SizeInBits()) { | 881 return rt.Is64Bits() ? LDR_d : LDR_s; |
926 case kBRegSizeInBits: | |
927 return LDR_b; | |
928 case kHRegSizeInBits: | |
929 return LDR_h; | |
930 case kSRegSizeInBits: | |
931 return LDR_s; | |
932 case kDRegSizeInBits: | |
933 return LDR_d; | |
934 default: | |
935 DCHECK(rt.IsQ()); | |
936 return LDR_q; | |
937 } | |
938 } | 882 } |
939 } | 883 } |
940 | 884 |
| 885 |
| 886 LoadStorePairOp Assembler::LoadPairOpFor(const CPURegister& rt, |
| 887 const CPURegister& rt2) { |
| 888 DCHECK(AreSameSizeAndType(rt, rt2)); |
| 889 USE(rt2); |
| 890 if (rt.IsRegister()) { |
| 891 return rt.Is64Bits() ? LDP_x : LDP_w; |
| 892 } else { |
| 893 DCHECK(rt.IsFPRegister()); |
| 894 return rt.Is64Bits() ? LDP_d : LDP_s; |
| 895 } |
| 896 } |
| 897 |
941 | 898 |
942 LoadStoreOp Assembler::StoreOpFor(const CPURegister& rt) { | 899 LoadStoreOp Assembler::StoreOpFor(const CPURegister& rt) { |
943 DCHECK(rt.IsValid()); | 900 DCHECK(rt.IsValid()); |
944 if (rt.IsRegister()) { | 901 if (rt.IsRegister()) { |
945 return rt.Is64Bits() ? STR_x : STR_w; | 902 return rt.Is64Bits() ? STR_x : STR_w; |
946 } else { | 903 } else { |
947 DCHECK(rt.IsVRegister()); | 904 DCHECK(rt.IsFPRegister()); |
948 switch (rt.SizeInBits()) { | 905 return rt.Is64Bits() ? STR_d : STR_s; |
949 case kBRegSizeInBits: | |
950 return STR_b; | |
951 case kHRegSizeInBits: | |
952 return STR_h; | |
953 case kSRegSizeInBits: | |
954 return STR_s; | |
955 case kDRegSizeInBits: | |
956 return STR_d; | |
957 default: | |
958 DCHECK(rt.IsQ()); | |
959 return STR_q; | |
960 } | |
961 } | 906 } |
962 } | 907 } |
963 | 908 |
964 LoadStorePairOp Assembler::LoadPairOpFor(const CPURegister& rt, | |
965 const CPURegister& rt2) { | |
966 DCHECK_EQ(STP_w | LoadStorePairLBit, LDP_w); | |
967 return static_cast<LoadStorePairOp>(StorePairOpFor(rt, rt2) | | |
968 LoadStorePairLBit); | |
969 } | |
970 | 909 |
971 LoadStorePairOp Assembler::StorePairOpFor(const CPURegister& rt, | 910 LoadStorePairOp Assembler::StorePairOpFor(const CPURegister& rt, |
972 const CPURegister& rt2) { | 911 const CPURegister& rt2) { |
973 DCHECK(AreSameSizeAndType(rt, rt2)); | 912 DCHECK(AreSameSizeAndType(rt, rt2)); |
974 USE(rt2); | 913 USE(rt2); |
975 if (rt.IsRegister()) { | 914 if (rt.IsRegister()) { |
976 return rt.Is64Bits() ? STP_x : STP_w; | 915 return rt.Is64Bits() ? STP_x : STP_w; |
977 } else { | 916 } else { |
978 DCHECK(rt.IsVRegister()); | 917 DCHECK(rt.IsFPRegister()); |
979 switch (rt.SizeInBits()) { | 918 return rt.Is64Bits() ? STP_d : STP_s; |
980 case kSRegSizeInBits: | |
981 return STP_s; | |
982 case kDRegSizeInBits: | |
983 return STP_d; | |
984 default: | |
985 DCHECK(rt.IsQ()); | |
986 return STP_q; | |
987 } | |
988 } | 919 } |
989 } | 920 } |
990 | 921 |
991 | 922 |
992 LoadLiteralOp Assembler::LoadLiteralOpFor(const CPURegister& rt) { | 923 LoadLiteralOp Assembler::LoadLiteralOpFor(const CPURegister& rt) { |
993 if (rt.IsRegister()) { | 924 if (rt.IsRegister()) { |
994 return rt.Is64Bits() ? LDR_x_lit : LDR_w_lit; | 925 return rt.Is64Bits() ? LDR_x_lit : LDR_w_lit; |
995 } else { | 926 } else { |
996 DCHECK(rt.IsVRegister()); | 927 DCHECK(rt.IsFPRegister()); |
997 return rt.Is64Bits() ? LDR_d_lit : LDR_s_lit; | 928 return rt.Is64Bits() ? LDR_d_lit : LDR_s_lit; |
998 } | 929 } |
999 } | 930 } |
1000 | 931 |
1001 | 932 |
1002 int Assembler::LinkAndGetInstructionOffsetTo(Label* label) { | 933 int Assembler::LinkAndGetInstructionOffsetTo(Label* label) { |
1003 DCHECK(kStartOfLabelLinkChain == 0); | 934 DCHECK(kStartOfLabelLinkChain == 0); |
1004 int offset = LinkAndGetByteOffsetTo(label); | 935 int offset = LinkAndGetByteOffsetTo(label); |
1005 DCHECK(IsAligned(offset, kInstructionSize)); | 936 DCHECK(IsAligned(offset, kInstructionSize)); |
1006 return offset >> kInstructionSizeLog2; | 937 return offset >> kInstructionSizeLog2; |
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1170 DCHECK(is_uint12(imm12)); | 1101 DCHECK(is_uint12(imm12)); |
1171 return imm12 << ImmLSUnsigned_offset; | 1102 return imm12 << ImmLSUnsigned_offset; |
1172 } | 1103 } |
1173 | 1104 |
1174 | 1105 |
1175 Instr Assembler::ImmLS(int imm9) { | 1106 Instr Assembler::ImmLS(int imm9) { |
1176 DCHECK(is_int9(imm9)); | 1107 DCHECK(is_int9(imm9)); |
1177 return truncate_to_int9(imm9) << ImmLS_offset; | 1108 return truncate_to_int9(imm9) << ImmLS_offset; |
1178 } | 1109 } |
1179 | 1110 |
1180 Instr Assembler::ImmLSPair(int imm7, unsigned size) { | 1111 |
1181 DCHECK_EQ((imm7 >> size) << size, imm7); | 1112 Instr Assembler::ImmLSPair(int imm7, LSDataSize size) { |
| 1113 DCHECK(((imm7 >> size) << size) == imm7); |
1182 int scaled_imm7 = imm7 >> size; | 1114 int scaled_imm7 = imm7 >> size; |
1183 DCHECK(is_int7(scaled_imm7)); | 1115 DCHECK(is_int7(scaled_imm7)); |
1184 return truncate_to_int7(scaled_imm7) << ImmLSPair_offset; | 1116 return truncate_to_int7(scaled_imm7) << ImmLSPair_offset; |
1185 } | 1117 } |
1186 | 1118 |
1187 | 1119 |
1188 Instr Assembler::ImmShiftLS(unsigned shift_amount) { | 1120 Instr Assembler::ImmShiftLS(unsigned shift_amount) { |
1189 DCHECK(is_uint1(shift_amount)); | 1121 DCHECK(is_uint1(shift_amount)); |
1190 return shift_amount << ImmShiftLS_offset; | 1122 return shift_amount << ImmShiftLS_offset; |
1191 } | 1123 } |
(...skipping 21 matching lines...) Expand all Loading... |
1213 DCHECK(is_uint2(imm2)); | 1145 DCHECK(is_uint2(imm2)); |
1214 return imm2 << ImmBarrierDomain_offset; | 1146 return imm2 << ImmBarrierDomain_offset; |
1215 } | 1147 } |
1216 | 1148 |
1217 | 1149 |
1218 Instr Assembler::ImmBarrierType(int imm2) { | 1150 Instr Assembler::ImmBarrierType(int imm2) { |
1219 DCHECK(is_uint2(imm2)); | 1151 DCHECK(is_uint2(imm2)); |
1220 return imm2 << ImmBarrierType_offset; | 1152 return imm2 << ImmBarrierType_offset; |
1221 } | 1153 } |
1222 | 1154 |
1223 unsigned Assembler::CalcLSDataSize(LoadStoreOp op) { | 1155 |
1224 DCHECK((LSSize_offset + LSSize_width) == (kInstructionSize * 8)); | 1156 LSDataSize Assembler::CalcLSDataSize(LoadStoreOp op) { |
1225 unsigned size = static_cast<Instr>(op >> LSSize_offset); | 1157 DCHECK((SizeLS_offset + SizeLS_width) == (kInstructionSize * 8)); |
1226 if ((op & LSVector_mask) != 0) { | 1158 return static_cast<LSDataSize>(op >> SizeLS_offset); |
1227 // Vector register memory operations encode the access size in the "size" | |
1228 // and "opc" fields. | |
1229 if ((size == 0) && ((op & LSOpc_mask) >> LSOpc_offset) >= 2) { | |
1230 size = kQRegSizeLog2; | |
1231 } | |
1232 } | |
1233 return size; | |
1234 } | 1159 } |
1235 | 1160 |
1236 | 1161 |
1237 Instr Assembler::ImmMoveWide(int imm) { | 1162 Instr Assembler::ImmMoveWide(int imm) { |
1238 DCHECK(is_uint16(imm)); | 1163 DCHECK(is_uint16(imm)); |
1239 return imm << ImmMoveWide_offset; | 1164 return imm << ImmMoveWide_offset; |
1240 } | 1165 } |
1241 | 1166 |
1242 | 1167 |
1243 Instr Assembler::ShiftMoveWide(int shift) { | 1168 Instr Assembler::ShiftMoveWide(int shift) { |
1244 DCHECK(is_uint2(shift)); | 1169 DCHECK(is_uint2(shift)); |
1245 return shift << ShiftMoveWide_offset; | 1170 return shift << ShiftMoveWide_offset; |
1246 } | 1171 } |
1247 | 1172 |
1248 Instr Assembler::FPType(VRegister fd) { return fd.Is64Bits() ? FP64 : FP32; } | 1173 |
| 1174 Instr Assembler::FPType(FPRegister fd) { |
| 1175 return fd.Is64Bits() ? FP64 : FP32; |
| 1176 } |
| 1177 |
1249 | 1178 |
1250 Instr Assembler::FPScale(unsigned scale) { | 1179 Instr Assembler::FPScale(unsigned scale) { |
1251 DCHECK(is_uint6(scale)); | 1180 DCHECK(is_uint6(scale)); |
1252 return scale << FPScale_offset; | 1181 return scale << FPScale_offset; |
1253 } | 1182 } |
1254 | 1183 |
1255 | 1184 |
1256 const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const { | 1185 const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const { |
1257 return reg.Is64Bits() ? xzr : wzr; | 1186 return reg.Is64Bits() ? xzr : wzr; |
1258 } | 1187 } |
(...skipping 26 matching lines...) Expand all Loading... |
1285 | 1214 |
1286 void Assembler::ClearRecordedAstId() { | 1215 void Assembler::ClearRecordedAstId() { |
1287 recorded_ast_id_ = TypeFeedbackId::None(); | 1216 recorded_ast_id_ = TypeFeedbackId::None(); |
1288 } | 1217 } |
1289 | 1218 |
1290 | 1219 |
1291 } // namespace internal | 1220 } // namespace internal |
1292 } // namespace v8 | 1221 } // namespace v8 |
1293 | 1222 |
1294 #endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_ | 1223 #endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_ |
OLD | NEW |