OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_ | 5 #ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_ |
6 #define V8_ARM64_ASSEMBLER_ARM64_INL_H_ | 6 #define V8_ARM64_ASSEMBLER_ARM64_INL_H_ |
7 | 7 |
8 #include "src/arm64/assembler-arm64.h" | 8 #include "src/arm64/assembler-arm64.h" |
9 #include "src/assembler.h" | 9 #include "src/assembler.h" |
10 #include "src/debug/debug.h" | 10 #include "src/debug/debug.h" |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
50 return reg_size; | 50 return reg_size; |
51 } | 51 } |
52 | 52 |
53 | 53 |
54 inline int CPURegister::SizeInBytes() const { | 54 inline int CPURegister::SizeInBytes() const { |
55 DCHECK(IsValid()); | 55 DCHECK(IsValid()); |
56 DCHECK(SizeInBits() % 8 == 0); | 56 DCHECK(SizeInBits() % 8 == 0); |
57 return reg_size / 8; | 57 return reg_size / 8; |
58 } | 58 } |
59 | 59 |
| 60 inline bool CPURegister::Is8Bits() const { |
| 61 DCHECK(IsValid()); |
| 62 return reg_size == 8; |
| 63 } |
| 64 |
| 65 inline bool CPURegister::Is16Bits() const { |
| 66 DCHECK(IsValid()); |
| 67 return reg_size == 16; |
| 68 } |
60 | 69 |
61 inline bool CPURegister::Is32Bits() const { | 70 inline bool CPURegister::Is32Bits() const { |
62 DCHECK(IsValid()); | 71 DCHECK(IsValid()); |
63 return reg_size == 32; | 72 return reg_size == 32; |
64 } | 73 } |
65 | 74 |
66 | 75 |
67 inline bool CPURegister::Is64Bits() const { | 76 inline bool CPURegister::Is64Bits() const { |
68 DCHECK(IsValid()); | 77 DCHECK(IsValid()); |
69 return reg_size == 64; | 78 return reg_size == 64; |
70 } | 79 } |
71 | 80 |
| 81 inline bool CPURegister::Is128Bits() const { |
| 82 DCHECK(IsValid()); |
| 83 return reg_size == 128; |
| 84 } |
72 | 85 |
73 inline bool CPURegister::IsValid() const { | 86 inline bool CPURegister::IsValid() const { |
74 if (IsValidRegister() || IsValidFPRegister()) { | 87 if (IsValidRegister() || IsValidVRegister()) { |
75 DCHECK(!IsNone()); | 88 DCHECK(!IsNone()); |
76 return true; | 89 return true; |
77 } else { | 90 } else { |
78 DCHECK(IsNone()); | 91 DCHECK(IsNone()); |
79 return false; | 92 return false; |
80 } | 93 } |
81 } | 94 } |
82 | 95 |
83 | 96 |
84 inline bool CPURegister::IsValidRegister() const { | 97 inline bool CPURegister::IsValidRegister() const { |
85 return IsRegister() && | 98 return IsRegister() && |
86 ((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)) && | 99 ((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)) && |
87 ((reg_code < kNumberOfRegisters) || (reg_code == kSPRegInternalCode)); | 100 ((reg_code < kNumberOfRegisters) || (reg_code == kSPRegInternalCode)); |
88 } | 101 } |
89 | 102 |
90 | 103 inline bool CPURegister::IsValidVRegister() const { |
91 inline bool CPURegister::IsValidFPRegister() const { | 104 return IsVRegister() && |
92 return IsFPRegister() && | 105 ((reg_size == kBRegSizeInBits) || (reg_size == kHRegSizeInBits) || |
93 ((reg_size == kSRegSizeInBits) || (reg_size == kDRegSizeInBits)) && | 106 (reg_size == kSRegSizeInBits) || (reg_size == kDRegSizeInBits) || |
94 (reg_code < kNumberOfFPRegisters); | 107 (reg_size == kQRegSizeInBits)) && |
| 108 (reg_code < kNumberOfVRegisters); |
95 } | 109 } |
96 | 110 |
97 | |
98 inline bool CPURegister::IsNone() const { | 111 inline bool CPURegister::IsNone() const { |
99 // kNoRegister types should always have size 0 and code 0. | 112 // kNoRegister types should always have size 0 and code 0. |
100 DCHECK((reg_type != kNoRegister) || (reg_code == 0)); | 113 DCHECK((reg_type != kNoRegister) || (reg_code == 0)); |
101 DCHECK((reg_type != kNoRegister) || (reg_size == 0)); | 114 DCHECK((reg_type != kNoRegister) || (reg_size == 0)); |
102 | 115 |
103 return reg_type == kNoRegister; | 116 return reg_type == kNoRegister; |
104 } | 117 } |
105 | 118 |
106 | 119 |
107 inline bool CPURegister::Is(const CPURegister& other) const { | 120 inline bool CPURegister::Is(const CPURegister& other) const { |
108 DCHECK(IsValidOrNone() && other.IsValidOrNone()); | 121 DCHECK(IsValidOrNone() && other.IsValidOrNone()); |
109 return Aliases(other) && (reg_size == other.reg_size); | 122 return Aliases(other) && (reg_size == other.reg_size); |
110 } | 123 } |
111 | 124 |
112 | 125 |
113 inline bool CPURegister::Aliases(const CPURegister& other) const { | 126 inline bool CPURegister::Aliases(const CPURegister& other) const { |
114 DCHECK(IsValidOrNone() && other.IsValidOrNone()); | 127 DCHECK(IsValidOrNone() && other.IsValidOrNone()); |
115 return (reg_code == other.reg_code) && (reg_type == other.reg_type); | 128 return (reg_code == other.reg_code) && (reg_type == other.reg_type); |
116 } | 129 } |
117 | 130 |
118 | 131 |
119 inline bool CPURegister::IsRegister() const { | 132 inline bool CPURegister::IsRegister() const { |
120 return reg_type == kRegister; | 133 return reg_type == kRegister; |
121 } | 134 } |
122 | 135 |
123 | 136 inline bool CPURegister::IsVRegister() const { return reg_type == kVRegister; } |
124 inline bool CPURegister::IsFPRegister() const { | |
125 return reg_type == kFPRegister; | |
126 } | |
127 | |
128 | 137 |
129 inline bool CPURegister::IsSameSizeAndType(const CPURegister& other) const { | 138 inline bool CPURegister::IsSameSizeAndType(const CPURegister& other) const { |
130 return (reg_size == other.reg_size) && (reg_type == other.reg_type); | 139 return (reg_size == other.reg_size) && (reg_type == other.reg_type); |
131 } | 140 } |
132 | 141 |
133 | 142 |
134 inline bool CPURegister::IsValidOrNone() const { | 143 inline bool CPURegister::IsValidOrNone() const { |
135 return IsValid() || IsNone(); | 144 return IsValid() || IsNone(); |
136 } | 145 } |
137 | 146 |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
193 DCHECK(IsValid()); | 202 DCHECK(IsValid()); |
194 DCHECK(CPURegister::Create(code, size_, type_).IsValid()); | 203 DCHECK(CPURegister::Create(code, size_, type_).IsValid()); |
195 list_ &= ~(1UL << code); | 204 list_ &= ~(1UL << code); |
196 } | 205 } |
197 | 206 |
198 | 207 |
199 inline Register Register::XRegFromCode(unsigned code) { | 208 inline Register Register::XRegFromCode(unsigned code) { |
200 if (code == kSPRegInternalCode) { | 209 if (code == kSPRegInternalCode) { |
201 return csp; | 210 return csp; |
202 } else { | 211 } else { |
203 DCHECK(code < kNumberOfRegisters); | 212 DCHECK_LT(code, static_cast<unsigned>(kNumberOfRegisters)); |
204 return Register::Create(code, kXRegSizeInBits); | 213 return Register::Create(code, kXRegSizeInBits); |
205 } | 214 } |
206 } | 215 } |
207 | 216 |
208 | 217 |
209 inline Register Register::WRegFromCode(unsigned code) { | 218 inline Register Register::WRegFromCode(unsigned code) { |
210 if (code == kSPRegInternalCode) { | 219 if (code == kSPRegInternalCode) { |
211 return wcsp; | 220 return wcsp; |
212 } else { | 221 } else { |
213 DCHECK(code < kNumberOfRegisters); | 222 DCHECK_LT(code, static_cast<unsigned>(kNumberOfRegisters)); |
214 return Register::Create(code, kWRegSizeInBits); | 223 return Register::Create(code, kWRegSizeInBits); |
215 } | 224 } |
216 } | 225 } |
217 | 226 |
218 | 227 inline VRegister VRegister::BRegFromCode(unsigned code) { |
219 inline FPRegister FPRegister::SRegFromCode(unsigned code) { | 228 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
220 DCHECK(code < kNumberOfFPRegisters); | 229 return VRegister::Create(code, kBRegSizeInBits); |
221 return FPRegister::Create(code, kSRegSizeInBits); | |
222 } | 230 } |
223 | 231 |
224 | 232 inline VRegister VRegister::HRegFromCode(unsigned code) { |
225 inline FPRegister FPRegister::DRegFromCode(unsigned code) { | 233 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
226 DCHECK(code < kNumberOfFPRegisters); | 234 return VRegister::Create(code, kHRegSizeInBits); |
227 return FPRegister::Create(code, kDRegSizeInBits); | |
228 } | 235 } |
229 | 236 |
| 237 inline VRegister VRegister::SRegFromCode(unsigned code) { |
| 238 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
| 239 return VRegister::Create(code, kSRegSizeInBits); |
| 240 } |
| 241 |
| 242 inline VRegister VRegister::DRegFromCode(unsigned code) { |
| 243 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
| 244 return VRegister::Create(code, kDRegSizeInBits); |
| 245 } |
| 246 |
| 247 inline VRegister VRegister::QRegFromCode(unsigned code) { |
| 248 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
| 249 return VRegister::Create(code, kQRegSizeInBits); |
| 250 } |
| 251 |
| 252 inline VRegister VRegister::VRegFromCode(unsigned code) { |
| 253 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
| 254 return VRegister::Create(code, kVRegSizeInBits); |
| 255 } |
230 | 256 |
231 inline Register CPURegister::W() const { | 257 inline Register CPURegister::W() const { |
232 DCHECK(IsValidRegister()); | 258 DCHECK(IsValidRegister()); |
233 return Register::WRegFromCode(reg_code); | 259 return Register::WRegFromCode(reg_code); |
234 } | 260 } |
235 | 261 |
236 | 262 |
237 inline Register CPURegister::X() const { | 263 inline Register CPURegister::X() const { |
238 DCHECK(IsValidRegister()); | 264 DCHECK(IsValidRegister()); |
239 return Register::XRegFromCode(reg_code); | 265 return Register::XRegFromCode(reg_code); |
240 } | 266 } |
241 | 267 |
| 268 inline VRegister CPURegister::V() const { |
| 269 DCHECK(IsValidVRegister()); |
| 270 return VRegister::VRegFromCode(reg_code); |
| 271 } |
242 | 272 |
243 inline FPRegister CPURegister::S() const { | 273 inline VRegister CPURegister::B() const { |
244 DCHECK(IsValidFPRegister()); | 274 DCHECK(IsValidVRegister()); |
245 return FPRegister::SRegFromCode(reg_code); | 275 return VRegister::BRegFromCode(reg_code); |
| 276 } |
| 277 |
| 278 inline VRegister CPURegister::H() const { |
| 279 DCHECK(IsValidVRegister()); |
| 280 return VRegister::HRegFromCode(reg_code); |
| 281 } |
| 282 |
| 283 inline VRegister CPURegister::S() const { |
| 284 DCHECK(IsValidVRegister()); |
| 285 return VRegister::SRegFromCode(reg_code); |
| 286 } |
| 287 |
| 288 inline VRegister CPURegister::D() const { |
| 289 DCHECK(IsValidVRegister()); |
| 290 return VRegister::DRegFromCode(reg_code); |
| 291 } |
| 292 |
| 293 inline VRegister CPURegister::Q() const { |
| 294 DCHECK(IsValidVRegister()); |
| 295 return VRegister::QRegFromCode(reg_code); |
246 } | 296 } |
247 | 297 |
248 | 298 |
249 inline FPRegister CPURegister::D() const { | |
250 DCHECK(IsValidFPRegister()); | |
251 return FPRegister::DRegFromCode(reg_code); | |
252 } | |
253 | |
254 | |
255 // Immediate. | 299 // Immediate. |
256 // Default initializer is for int types | 300 // Default initializer is for int types |
257 template<typename T> | 301 template<typename T> |
258 struct ImmediateInitializer { | 302 struct ImmediateInitializer { |
259 static const bool kIsIntType = true; | 303 static const bool kIsIntType = true; |
260 static inline RelocInfo::Mode rmode_for(T) { | 304 static inline RelocInfo::Mode rmode_for(T) { |
261 return sizeof(T) == 8 ? RelocInfo::NONE64 : RelocInfo::NONE32; | 305 return sizeof(T) == 8 ? RelocInfo::NONE64 : RelocInfo::NONE32; |
262 } | 306 } |
263 static inline int64_t immediate_for(T t) { | 307 static inline int64_t immediate_for(T t) { |
264 STATIC_ASSERT(sizeof(T) <= 8); | 308 STATIC_ASSERT(sizeof(T) <= 8); |
(...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
484 | 528 |
485 MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode) | 529 MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode) |
486 : base_(base), addrmode_(addrmode) { | 530 : base_(base), addrmode_(addrmode) { |
487 DCHECK(base.Is64Bits() && !base.IsZero()); | 531 DCHECK(base.Is64Bits() && !base.IsZero()); |
488 | 532 |
489 if (offset.IsImmediate()) { | 533 if (offset.IsImmediate()) { |
490 offset_ = offset.ImmediateValue(); | 534 offset_ = offset.ImmediateValue(); |
491 | 535 |
492 regoffset_ = NoReg; | 536 regoffset_ = NoReg; |
493 } else if (offset.IsShiftedRegister()) { | 537 } else if (offset.IsShiftedRegister()) { |
494 DCHECK(addrmode == Offset); | 538 DCHECK((addrmode == Offset) || (addrmode == PostIndex)); |
495 | 539 |
496 regoffset_ = offset.reg(); | 540 regoffset_ = offset.reg(); |
497 shift_ = offset.shift(); | 541 shift_ = offset.shift(); |
498 shift_amount_ = offset.shift_amount(); | 542 shift_amount_ = offset.shift_amount(); |
499 | 543 |
500 extend_ = NO_EXTEND; | 544 extend_ = NO_EXTEND; |
501 offset_ = 0; | 545 offset_ = 0; |
502 | 546 |
503 // These assertions match those in the shifted-register constructor. | 547 // These assertions match those in the shifted-register constructor. |
504 DCHECK(regoffset_.Is64Bits() && !regoffset_.IsSP()); | 548 DCHECK(regoffset_.Is64Bits() && !regoffset_.IsSP()); |
(...skipping 374 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
879 StaticVisitor::VisitRuntimeEntry(this); | 923 StaticVisitor::VisitRuntimeEntry(this); |
880 } | 924 } |
881 } | 925 } |
882 | 926 |
883 | 927 |
884 LoadStoreOp Assembler::LoadOpFor(const CPURegister& rt) { | 928 LoadStoreOp Assembler::LoadOpFor(const CPURegister& rt) { |
885 DCHECK(rt.IsValid()); | 929 DCHECK(rt.IsValid()); |
886 if (rt.IsRegister()) { | 930 if (rt.IsRegister()) { |
887 return rt.Is64Bits() ? LDR_x : LDR_w; | 931 return rt.Is64Bits() ? LDR_x : LDR_w; |
888 } else { | 932 } else { |
889 DCHECK(rt.IsFPRegister()); | 933 DCHECK(rt.IsVRegister()); |
890 return rt.Is64Bits() ? LDR_d : LDR_s; | 934 switch (rt.SizeInBits()) { |
| 935 case kBRegSizeInBits: |
| 936 return LDR_b; |
| 937 case kHRegSizeInBits: |
| 938 return LDR_h; |
| 939 case kSRegSizeInBits: |
| 940 return LDR_s; |
| 941 case kDRegSizeInBits: |
| 942 return LDR_d; |
| 943 default: |
| 944 DCHECK(rt.IsQ()); |
| 945 return LDR_q; |
| 946 } |
891 } | 947 } |
892 } | 948 } |
893 | 949 |
894 | |
895 LoadStorePairOp Assembler::LoadPairOpFor(const CPURegister& rt, | |
896 const CPURegister& rt2) { | |
897 DCHECK(AreSameSizeAndType(rt, rt2)); | |
898 USE(rt2); | |
899 if (rt.IsRegister()) { | |
900 return rt.Is64Bits() ? LDP_x : LDP_w; | |
901 } else { | |
902 DCHECK(rt.IsFPRegister()); | |
903 return rt.Is64Bits() ? LDP_d : LDP_s; | |
904 } | |
905 } | |
906 | |
907 | 950 |
908 LoadStoreOp Assembler::StoreOpFor(const CPURegister& rt) { | 951 LoadStoreOp Assembler::StoreOpFor(const CPURegister& rt) { |
909 DCHECK(rt.IsValid()); | 952 DCHECK(rt.IsValid()); |
910 if (rt.IsRegister()) { | 953 if (rt.IsRegister()) { |
911 return rt.Is64Bits() ? STR_x : STR_w; | 954 return rt.Is64Bits() ? STR_x : STR_w; |
912 } else { | 955 } else { |
913 DCHECK(rt.IsFPRegister()); | 956 DCHECK(rt.IsVRegister()); |
914 return rt.Is64Bits() ? STR_d : STR_s; | 957 switch (rt.SizeInBits()) { |
| 958 case kBRegSizeInBits: |
| 959 return STR_b; |
| 960 case kHRegSizeInBits: |
| 961 return STR_h; |
| 962 case kSRegSizeInBits: |
| 963 return STR_s; |
| 964 case kDRegSizeInBits: |
| 965 return STR_d; |
| 966 default: |
| 967 DCHECK(rt.IsQ()); |
| 968 return STR_q; |
| 969 } |
915 } | 970 } |
916 } | 971 } |
917 | 972 |
| 973 LoadStorePairOp Assembler::LoadPairOpFor(const CPURegister& rt, |
| 974 const CPURegister& rt2) { |
| 975 DCHECK_EQ(STP_w | LoadStorePairLBit, LDP_w); |
| 976 return static_cast<LoadStorePairOp>(StorePairOpFor(rt, rt2) | |
| 977 LoadStorePairLBit); |
| 978 } |
918 | 979 |
919 LoadStorePairOp Assembler::StorePairOpFor(const CPURegister& rt, | 980 LoadStorePairOp Assembler::StorePairOpFor(const CPURegister& rt, |
920 const CPURegister& rt2) { | 981 const CPURegister& rt2) { |
921 DCHECK(AreSameSizeAndType(rt, rt2)); | 982 DCHECK(AreSameSizeAndType(rt, rt2)); |
922 USE(rt2); | 983 USE(rt2); |
923 if (rt.IsRegister()) { | 984 if (rt.IsRegister()) { |
924 return rt.Is64Bits() ? STP_x : STP_w; | 985 return rt.Is64Bits() ? STP_x : STP_w; |
925 } else { | 986 } else { |
926 DCHECK(rt.IsFPRegister()); | 987 DCHECK(rt.IsVRegister()); |
927 return rt.Is64Bits() ? STP_d : STP_s; | 988 switch (rt.SizeInBits()) { |
| 989 case kSRegSizeInBits: |
| 990 return STP_s; |
| 991 case kDRegSizeInBits: |
| 992 return STP_d; |
| 993 default: |
| 994 DCHECK(rt.IsQ()); |
| 995 return STP_q; |
| 996 } |
928 } | 997 } |
929 } | 998 } |
930 | 999 |
931 | 1000 |
932 LoadLiteralOp Assembler::LoadLiteralOpFor(const CPURegister& rt) { | 1001 LoadLiteralOp Assembler::LoadLiteralOpFor(const CPURegister& rt) { |
933 if (rt.IsRegister()) { | 1002 if (rt.IsRegister()) { |
934 return rt.Is64Bits() ? LDR_x_lit : LDR_w_lit; | 1003 return rt.Is64Bits() ? LDR_x_lit : LDR_w_lit; |
935 } else { | 1004 } else { |
936 DCHECK(rt.IsFPRegister()); | 1005 DCHECK(rt.IsVRegister()); |
937 return rt.Is64Bits() ? LDR_d_lit : LDR_s_lit; | 1006 return rt.Is64Bits() ? LDR_d_lit : LDR_s_lit; |
938 } | 1007 } |
939 } | 1008 } |
940 | 1009 |
941 | 1010 |
942 int Assembler::LinkAndGetInstructionOffsetTo(Label* label) { | 1011 int Assembler::LinkAndGetInstructionOffsetTo(Label* label) { |
943 DCHECK(kStartOfLabelLinkChain == 0); | 1012 DCHECK(kStartOfLabelLinkChain == 0); |
944 int offset = LinkAndGetByteOffsetTo(label); | 1013 int offset = LinkAndGetByteOffsetTo(label); |
945 DCHECK(IsAligned(offset, kInstructionSize)); | 1014 DCHECK(IsAligned(offset, kInstructionSize)); |
946 return offset >> kInstructionSizeLog2; | 1015 return offset >> kInstructionSizeLog2; |
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1110 DCHECK(is_uint12(imm12)); | 1179 DCHECK(is_uint12(imm12)); |
1111 return imm12 << ImmLSUnsigned_offset; | 1180 return imm12 << ImmLSUnsigned_offset; |
1112 } | 1181 } |
1113 | 1182 |
1114 | 1183 |
1115 Instr Assembler::ImmLS(int imm9) { | 1184 Instr Assembler::ImmLS(int imm9) { |
1116 DCHECK(is_int9(imm9)); | 1185 DCHECK(is_int9(imm9)); |
1117 return truncate_to_int9(imm9) << ImmLS_offset; | 1186 return truncate_to_int9(imm9) << ImmLS_offset; |
1118 } | 1187 } |
1119 | 1188 |
1120 | 1189 Instr Assembler::ImmLSPair(int imm7, unsigned size) { |
1121 Instr Assembler::ImmLSPair(int imm7, LSDataSize size) { | 1190 DCHECK_EQ((imm7 >> size) << size, imm7); |
1122 DCHECK(((imm7 >> size) << size) == imm7); | |
1123 int scaled_imm7 = imm7 >> size; | 1191 int scaled_imm7 = imm7 >> size; |
1124 DCHECK(is_int7(scaled_imm7)); | 1192 DCHECK(is_int7(scaled_imm7)); |
1125 return truncate_to_int7(scaled_imm7) << ImmLSPair_offset; | 1193 return truncate_to_int7(scaled_imm7) << ImmLSPair_offset; |
1126 } | 1194 } |
1127 | 1195 |
1128 | 1196 |
1129 Instr Assembler::ImmShiftLS(unsigned shift_amount) { | 1197 Instr Assembler::ImmShiftLS(unsigned shift_amount) { |
1130 DCHECK(is_uint1(shift_amount)); | 1198 DCHECK(is_uint1(shift_amount)); |
1131 return shift_amount << ImmShiftLS_offset; | 1199 return shift_amount << ImmShiftLS_offset; |
1132 } | 1200 } |
(...skipping 21 matching lines...) Expand all Loading... |
1154 DCHECK(is_uint2(imm2)); | 1222 DCHECK(is_uint2(imm2)); |
1155 return imm2 << ImmBarrierDomain_offset; | 1223 return imm2 << ImmBarrierDomain_offset; |
1156 } | 1224 } |
1157 | 1225 |
1158 | 1226 |
1159 Instr Assembler::ImmBarrierType(int imm2) { | 1227 Instr Assembler::ImmBarrierType(int imm2) { |
1160 DCHECK(is_uint2(imm2)); | 1228 DCHECK(is_uint2(imm2)); |
1161 return imm2 << ImmBarrierType_offset; | 1229 return imm2 << ImmBarrierType_offset; |
1162 } | 1230 } |
1163 | 1231 |
1164 | 1232 unsigned Assembler::CalcLSDataSize(LoadStoreOp op) { |
1165 LSDataSize Assembler::CalcLSDataSize(LoadStoreOp op) { | 1233 DCHECK((LSSize_offset + LSSize_width) == (kInstructionSize * 8)); |
1166 DCHECK((SizeLS_offset + SizeLS_width) == (kInstructionSize * 8)); | 1234 unsigned size = static_cast<Instr>(op >> LSSize_offset); |
1167 return static_cast<LSDataSize>(op >> SizeLS_offset); | 1235 if ((op & LSVector_mask) != 0) { |
| 1236 // Vector register memory operations encode the access size in the "size" |
| 1237 // and "opc" fields. |
| 1238 if ((size == 0) && ((op & LSOpc_mask) >> LSOpc_offset) >= 2) { |
| 1239 size = kQRegSizeLog2; |
| 1240 } |
| 1241 } |
| 1242 return size; |
1168 } | 1243 } |
1169 | 1244 |
1170 | 1245 |
1171 Instr Assembler::ImmMoveWide(int imm) { | 1246 Instr Assembler::ImmMoveWide(int imm) { |
1172 DCHECK(is_uint16(imm)); | 1247 DCHECK(is_uint16(imm)); |
1173 return imm << ImmMoveWide_offset; | 1248 return imm << ImmMoveWide_offset; |
1174 } | 1249 } |
1175 | 1250 |
1176 | 1251 |
1177 Instr Assembler::ShiftMoveWide(int shift) { | 1252 Instr Assembler::ShiftMoveWide(int shift) { |
1178 DCHECK(is_uint2(shift)); | 1253 DCHECK(is_uint2(shift)); |
1179 return shift << ShiftMoveWide_offset; | 1254 return shift << ShiftMoveWide_offset; |
1180 } | 1255 } |
1181 | 1256 |
1182 | 1257 Instr Assembler::FPType(VRegister fd) { return fd.Is64Bits() ? FP64 : FP32; } |
1183 Instr Assembler::FPType(FPRegister fd) { | |
1184 return fd.Is64Bits() ? FP64 : FP32; | |
1185 } | |
1186 | |
1187 | 1258 |
1188 Instr Assembler::FPScale(unsigned scale) { | 1259 Instr Assembler::FPScale(unsigned scale) { |
1189 DCHECK(is_uint6(scale)); | 1260 DCHECK(is_uint6(scale)); |
1190 return scale << FPScale_offset; | 1261 return scale << FPScale_offset; |
1191 } | 1262 } |
1192 | 1263 |
1193 | 1264 |
1194 const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const { | 1265 const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const { |
1195 return reg.Is64Bits() ? xzr : wzr; | 1266 return reg.Is64Bits() ? xzr : wzr; |
1196 } | 1267 } |
(...skipping 26 matching lines...) Expand all Loading... |
1223 | 1294 |
1224 void Assembler::ClearRecordedAstId() { | 1295 void Assembler::ClearRecordedAstId() { |
1225 recorded_ast_id_ = TypeFeedbackId::None(); | 1296 recorded_ast_id_ = TypeFeedbackId::None(); |
1226 } | 1297 } |
1227 | 1298 |
1228 | 1299 |
1229 } // namespace internal | 1300 } // namespace internal |
1230 } // namespace v8 | 1301 } // namespace v8 |
1231 | 1302 |
1232 #endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_ | 1303 #endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_ |
OLD | NEW |