| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_ARM64_ASSEMBLER_ARM64_H_ | 5 #ifndef V8_ARM64_ASSEMBLER_ARM64_H_ |
| 6 #define V8_ARM64_ASSEMBLER_ARM64_H_ | 6 #define V8_ARM64_ASSEMBLER_ARM64_H_ |
| 7 | 7 |
| 8 #include <deque> | 8 #include <deque> |
| 9 #include <list> | 9 #include <list> |
| 10 #include <map> | 10 #include <map> |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 56 V(q0) V(q1) V(q2) V(q3) V(q4) V(q5) V(q6) V(q7) \ | 56 V(q0) V(q1) V(q2) V(q3) V(q4) V(q5) V(q6) V(q7) \ |
| 57 V(q8) V(q9) V(q10) V(q11) V(q12) V(q13) V(q14) V(q15) | 57 V(q8) V(q9) V(q10) V(q11) V(q12) V(q13) V(q14) V(q15) |
| 58 | 58 |
| 59 #define ALLOCATABLE_DOUBLE_REGISTERS(R) \ | 59 #define ALLOCATABLE_DOUBLE_REGISTERS(R) \ |
| 60 R(d0) R(d1) R(d2) R(d3) R(d4) R(d5) R(d6) R(d7) \ | 60 R(d0) R(d1) R(d2) R(d3) R(d4) R(d5) R(d6) R(d7) \ |
| 61 R(d8) R(d9) R(d10) R(d11) R(d12) R(d13) R(d14) R(d16) \ | 61 R(d8) R(d9) R(d10) R(d11) R(d12) R(d13) R(d14) R(d16) \ |
| 62 R(d17) R(d18) R(d19) R(d20) R(d21) R(d22) R(d23) R(d24) \ | 62 R(d17) R(d18) R(d19) R(d20) R(d21) R(d22) R(d23) R(d24) \ |
| 63 R(d25) R(d26) R(d27) R(d28) | 63 R(d25) R(d26) R(d27) R(d28) |
| 64 // clang-format on | 64 // clang-format on |
| 65 | 65 |
| 66 static const int kRegListSizeInBits = sizeof(RegList) * kBitsPerByte; | 66 constexpr int kRegListSizeInBits = sizeof(RegList) * kBitsPerByte; |
| 67 | |
| 68 | 67 |
| 69 // Some CPURegister methods can return Register and FPRegister types, so we | 68 // Some CPURegister methods can return Register and FPRegister types, so we |
| 70 // need to declare them in advance. | 69 // need to declare them in advance. |
| 71 struct Register; | 70 struct Register; |
| 72 struct FPRegister; | 71 struct FPRegister; |
| 73 | 72 |
| 74 | 73 |
| 75 struct CPURegister { | 74 struct CPURegister { |
| 76 enum Code { | 75 enum Code { |
| 77 #define REGISTER_CODE(R) kCode_##R, | 76 #define REGISTER_CODE(R) kCode_##R, |
| 78 GENERAL_REGISTERS(REGISTER_CODE) | 77 GENERAL_REGISTERS(REGISTER_CODE) |
| 79 #undef REGISTER_CODE | 78 #undef REGISTER_CODE |
| 80 kAfterLast, | 79 kAfterLast, |
| 81 kCode_no_reg = -1 | 80 kCode_no_reg = -1 |
| 82 }; | 81 }; |
| 83 | 82 |
| 84 enum RegisterType { | 83 enum RegisterType { |
| 85 // The kInvalid value is used to detect uninitialized static instances, | 84 // The kInvalid value is used to detect uninitialized static instances, |
| 86 // which are always zero-initialized before any constructors are called. | 85 // which are always zero-initialized before any constructors are called. |
| 87 kInvalid = 0, | 86 kInvalid = 0, |
| 88 kRegister, | 87 kRegister, |
| 89 kFPRegister, | 88 kFPRegister, |
| 90 kNoRegister | 89 kNoRegister |
| 91 }; | 90 }; |
| 92 | 91 |
| 92 constexpr CPURegister() : CPURegister(0, 0, CPURegister::kNoRegister) {} |
| 93 |
| 94 constexpr CPURegister(int reg_code, int reg_size, RegisterType reg_type) |
| 95 : reg_code(reg_code), reg_size(reg_size), reg_type(reg_type) {} |
| 96 |
| 93 static CPURegister Create(int code, int size, RegisterType type) { | 97 static CPURegister Create(int code, int size, RegisterType type) { |
| 94 CPURegister r = {code, size, type}; | 98 CPURegister r = {code, size, type}; |
| 95 return r; | 99 return r; |
| 96 } | 100 } |
| 97 | 101 |
| 98 int code() const; | 102 int code() const; |
| 99 RegisterType type() const; | 103 RegisterType type() const; |
| 100 RegList Bit() const; | 104 RegList Bit() const; |
| 101 int SizeInBits() const; | 105 int SizeInBits() const; |
| 102 int SizeInBytes() const; | 106 int SizeInBytes() const; |
| (...skipping 28 matching lines...) Expand all Loading... |
| 131 int reg_size; | 135 int reg_size; |
| 132 RegisterType reg_type; | 136 RegisterType reg_type; |
| 133 }; | 137 }; |
| 134 | 138 |
| 135 | 139 |
| 136 struct Register : public CPURegister { | 140 struct Register : public CPURegister { |
| 137 static Register Create(int code, int size) { | 141 static Register Create(int code, int size) { |
| 138 return Register(CPURegister::Create(code, size, CPURegister::kRegister)); | 142 return Register(CPURegister::Create(code, size, CPURegister::kRegister)); |
| 139 } | 143 } |
| 140 | 144 |
| 141 Register() { | 145 constexpr Register() : CPURegister() {} |
| 142 reg_code = 0; | |
| 143 reg_size = 0; | |
| 144 reg_type = CPURegister::kNoRegister; | |
| 145 } | |
| 146 | 146 |
| 147 explicit Register(const CPURegister& r) { | 147 constexpr explicit Register(const CPURegister& r) : CPURegister(r) {} |
| 148 reg_code = r.reg_code; | |
| 149 reg_size = r.reg_size; | |
| 150 reg_type = r.reg_type; | |
| 151 DCHECK(IsValidOrNone()); | |
| 152 } | |
| 153 | |
| 154 Register(const Register& r) { // NOLINT(runtime/explicit) | |
| 155 reg_code = r.reg_code; | |
| 156 reg_size = r.reg_size; | |
| 157 reg_type = r.reg_type; | |
| 158 DCHECK(IsValidOrNone()); | |
| 159 } | |
| 160 | 148 |
| 161 bool IsValid() const { | 149 bool IsValid() const { |
| 162 DCHECK(IsRegister() || IsNone()); | 150 DCHECK(IsRegister() || IsNone()); |
| 163 return IsValidRegister(); | 151 return IsValidRegister(); |
| 164 } | 152 } |
| 165 | 153 |
| 166 static Register XRegFromCode(unsigned code); | 154 static Register XRegFromCode(unsigned code); |
| 167 static Register WRegFromCode(unsigned code); | 155 static Register WRegFromCode(unsigned code); |
| 168 | 156 |
| 169 // Start of V8 compatibility section --------------------- | 157 // Start of V8 compatibility section --------------------- |
| 170 // These memebers are necessary for compilation. | 158 // These memebers are necessary for compilation. |
| 171 // A few of them may be unused for now. | 159 // A few of them may be unused for now. |
| 172 | 160 |
| 173 static const int kNumRegisters = kNumberOfRegisters; | 161 static constexpr int kNumRegisters = kNumberOfRegisters; |
| 174 STATIC_ASSERT(kNumRegisters == Code::kAfterLast); | 162 STATIC_ASSERT(kNumRegisters == Code::kAfterLast); |
| 175 static int NumRegisters() { return kNumRegisters; } | 163 static int NumRegisters() { return kNumRegisters; } |
| 176 | 164 |
| 177 // We allow crankshaft to use the following registers: | 165 // We allow crankshaft to use the following registers: |
| 178 // - x0 to x15 | 166 // - x0 to x15 |
| 179 // - x18 to x24 | 167 // - x18 to x24 |
| 180 // - x27 (also context) | 168 // - x27 (also context) |
| 181 // | 169 // |
| 182 // TODO(all): Register x25 is currently free and could be available for | 170 // TODO(all): Register x25 is currently free and could be available for |
| 183 // crankshaft, but we don't use it as we might use it as a per function | 171 // crankshaft, but we don't use it as we might use it as a per function |
| 184 // literal pool pointer in the future. | 172 // literal pool pointer in the future. |
| 185 // | 173 // |
| 186 // TODO(all): Consider storing cp in x25 to have only two ranges. | 174 // TODO(all): Consider storing cp in x25 to have only two ranges. |
| 187 // We split allocatable registers in three ranges called | 175 // We split allocatable registers in three ranges called |
| 188 // - "low range" | 176 // - "low range" |
| 189 // - "high range" | 177 // - "high range" |
| 190 // - "context" | 178 // - "context" |
| 191 | 179 |
| 192 static Register from_code(int code) { | 180 static Register from_code(int code) { |
| 193 // Always return an X register. | 181 // Always return an X register. |
| 194 return Register::Create(code, kXRegSizeInBits); | 182 return Register::Create(code, kXRegSizeInBits); |
| 195 } | 183 } |
| 196 | 184 |
| 197 // End of V8 compatibility section ----------------------- | 185 // End of V8 compatibility section ----------------------- |
| 198 }; | 186 }; |
| 199 | 187 |
| 200 static const bool kSimpleFPAliasing = true; | 188 constexpr bool kSimpleFPAliasing = true; |
| 201 static const bool kSimdMaskRegisters = false; | 189 constexpr bool kSimdMaskRegisters = false; |
| 202 | 190 |
| 203 struct FPRegister : public CPURegister { | 191 struct FPRegister : public CPURegister { |
| 204 enum Code { | 192 enum Code { |
| 205 #define REGISTER_CODE(R) kCode_##R, | 193 #define REGISTER_CODE(R) kCode_##R, |
| 206 DOUBLE_REGISTERS(REGISTER_CODE) | 194 DOUBLE_REGISTERS(REGISTER_CODE) |
| 207 #undef REGISTER_CODE | 195 #undef REGISTER_CODE |
| 208 kAfterLast, | 196 kAfterLast, |
| 209 kCode_no_reg = -1 | 197 kCode_no_reg = -1 |
| 210 }; | 198 }; |
| 211 | 199 |
| 212 static FPRegister Create(int code, int size) { | 200 static FPRegister Create(int code, int size) { |
| 213 return FPRegister( | 201 return FPRegister( |
| 214 CPURegister::Create(code, size, CPURegister::kFPRegister)); | 202 CPURegister::Create(code, size, CPURegister::kFPRegister)); |
| 215 } | 203 } |
| 216 | 204 |
| 217 FPRegister() { | 205 constexpr FPRegister() : CPURegister() {} |
| 218 reg_code = 0; | |
| 219 reg_size = 0; | |
| 220 reg_type = CPURegister::kNoRegister; | |
| 221 } | |
| 222 | 206 |
| 223 explicit FPRegister(const CPURegister& r) { | 207 constexpr explicit FPRegister(const CPURegister& r) : CPURegister(r) {} |
| 224 reg_code = r.reg_code; | |
| 225 reg_size = r.reg_size; | |
| 226 reg_type = r.reg_type; | |
| 227 DCHECK(IsValidOrNone()); | |
| 228 } | |
| 229 | |
| 230 FPRegister(const FPRegister& r) { // NOLINT(runtime/explicit) | |
| 231 reg_code = r.reg_code; | |
| 232 reg_size = r.reg_size; | |
| 233 reg_type = r.reg_type; | |
| 234 DCHECK(IsValidOrNone()); | |
| 235 } | |
| 236 | 208 |
| 237 bool IsValid() const { | 209 bool IsValid() const { |
| 238 DCHECK(IsFPRegister() || IsNone()); | 210 DCHECK(IsFPRegister() || IsNone()); |
| 239 return IsValidFPRegister(); | 211 return IsValidFPRegister(); |
| 240 } | 212 } |
| 241 | 213 |
| 242 static FPRegister SRegFromCode(unsigned code); | 214 static FPRegister SRegFromCode(unsigned code); |
| 243 static FPRegister DRegFromCode(unsigned code); | 215 static FPRegister DRegFromCode(unsigned code); |
| 244 | 216 |
| 245 // Start of V8 compatibility section --------------------- | 217 // Start of V8 compatibility section --------------------- |
| 246 static const int kMaxNumRegisters = kNumberOfFPRegisters; | 218 static constexpr int kMaxNumRegisters = kNumberOfFPRegisters; |
| 247 STATIC_ASSERT(kMaxNumRegisters == Code::kAfterLast); | 219 STATIC_ASSERT(kMaxNumRegisters == Code::kAfterLast); |
| 248 | 220 |
| 249 // Crankshaft can use all the FP registers except: | 221 // Crankshaft can use all the FP registers except: |
| 250 // - d15 which is used to keep the 0 double value | 222 // - d15 which is used to keep the 0 double value |
| 251 // - d30 which is used in crankshaft as a double scratch register | 223 // - d30 which is used in crankshaft as a double scratch register |
| 252 // - d31 which is used in the MacroAssembler as a double scratch register | 224 // - d31 which is used in the MacroAssembler as a double scratch register |
| 253 static FPRegister from_code(int code) { | 225 static FPRegister from_code(int code) { |
| 254 // Always return a D register. | 226 // Always return a D register. |
| 255 return FPRegister::Create(code, kDRegSizeInBits); | 227 return FPRegister::Create(code, kDRegSizeInBits); |
| 256 } | 228 } |
| 257 // End of V8 compatibility section ----------------------- | 229 // End of V8 compatibility section ----------------------- |
| 258 }; | 230 }; |
| 259 | 231 |
| 260 | 232 |
| 261 STATIC_ASSERT(sizeof(CPURegister) == sizeof(Register)); | 233 STATIC_ASSERT(sizeof(CPURegister) == sizeof(Register)); |
| 262 STATIC_ASSERT(sizeof(CPURegister) == sizeof(FPRegister)); | 234 STATIC_ASSERT(sizeof(CPURegister) == sizeof(FPRegister)); |
| 263 | 235 |
| 264 | 236 #define DEFINE_REGISTER(register_class, name, code, size, type) \ |
| 265 #if defined(ARM64_DEFINE_REG_STATICS) | 237 constexpr register_class name { CPURegister(code, size, type) } |
| 266 #define INITIALIZE_REGISTER(register_class, name, code, size, type) \ | |
| 267 const CPURegister init_##register_class##_##name = {code, size, type}; \ | |
| 268 const register_class& name = *reinterpret_cast<const register_class*>( \ | |
| 269 &init_##register_class##_##name) | |
| 270 #define ALIAS_REGISTER(register_class, alias, name) \ | |
| 271 const register_class& alias = *reinterpret_cast<const register_class*>( \ | |
| 272 &init_##register_class##_##name) | |
| 273 #else | |
| 274 #define INITIALIZE_REGISTER(register_class, name, code, size, type) \ | |
| 275 extern const register_class& name | |
| 276 #define ALIAS_REGISTER(register_class, alias, name) \ | 238 #define ALIAS_REGISTER(register_class, alias, name) \ |
| 277 extern const register_class& alias | 239 constexpr register_class alias = name |
| 278 #endif // defined(ARM64_DEFINE_REG_STATICS) | |
| 279 | 240 |
| 280 // No*Reg is used to indicate an unused argument, or an error case. Note that | 241 // No*Reg is used to indicate an unused argument, or an error case. Note that |
| 281 // these all compare equal (using the Is() method). The Register and FPRegister | 242 // these all compare equal (using the Is() method). The Register and FPRegister |
| 282 // variants are provided for convenience. | 243 // variants are provided for convenience. |
| 283 INITIALIZE_REGISTER(Register, NoReg, 0, 0, CPURegister::kNoRegister); | 244 DEFINE_REGISTER(Register, NoReg, 0, 0, CPURegister::kNoRegister); |
| 284 INITIALIZE_REGISTER(FPRegister, NoFPReg, 0, 0, CPURegister::kNoRegister); | 245 DEFINE_REGISTER(FPRegister, NoFPReg, 0, 0, CPURegister::kNoRegister); |
| 285 INITIALIZE_REGISTER(CPURegister, NoCPUReg, 0, 0, CPURegister::kNoRegister); | 246 DEFINE_REGISTER(CPURegister, NoCPUReg, 0, 0, CPURegister::kNoRegister); |
| 286 | 247 |
| 287 // v8 compatibility. | 248 // v8 compatibility. |
| 288 INITIALIZE_REGISTER(Register, no_reg, 0, 0, CPURegister::kNoRegister); | 249 DEFINE_REGISTER(Register, no_reg, 0, 0, CPURegister::kNoRegister); |
| 289 | 250 |
| 290 #define DEFINE_REGISTERS(N) \ | 251 #define DEFINE_REGISTERS(N) \ |
| 291 INITIALIZE_REGISTER(Register, w##N, N, \ | 252 DEFINE_REGISTER(Register, w##N, N, kWRegSizeInBits, CPURegister::kRegister); \ |
| 292 kWRegSizeInBits, CPURegister::kRegister); \ | 253 DEFINE_REGISTER(Register, x##N, N, kXRegSizeInBits, CPURegister::kRegister); |
| 293 INITIALIZE_REGISTER(Register, x##N, N, \ | |
| 294 kXRegSizeInBits, CPURegister::kRegister); | |
| 295 GENERAL_REGISTER_CODE_LIST(DEFINE_REGISTERS) | 254 GENERAL_REGISTER_CODE_LIST(DEFINE_REGISTERS) |
| 296 #undef DEFINE_REGISTERS | 255 #undef DEFINE_REGISTERS |
| 297 | 256 |
| 298 INITIALIZE_REGISTER(Register, wcsp, kSPRegInternalCode, kWRegSizeInBits, | 257 DEFINE_REGISTER(Register, wcsp, kSPRegInternalCode, kWRegSizeInBits, |
| 299 CPURegister::kRegister); | 258 CPURegister::kRegister); |
| 300 INITIALIZE_REGISTER(Register, csp, kSPRegInternalCode, kXRegSizeInBits, | 259 DEFINE_REGISTER(Register, csp, kSPRegInternalCode, kXRegSizeInBits, |
| 301 CPURegister::kRegister); | 260 CPURegister::kRegister); |
| 302 | 261 |
| 303 #define DEFINE_FPREGISTERS(N) \ | 262 #define DEFINE_FPREGISTERS(N) \ |
| 304 INITIALIZE_REGISTER(FPRegister, s##N, N, \ | 263 DEFINE_REGISTER(FPRegister, s##N, N, kSRegSizeInBits, \ |
| 305 kSRegSizeInBits, CPURegister::kFPRegister); \ | 264 CPURegister::kFPRegister); \ |
| 306 INITIALIZE_REGISTER(FPRegister, d##N, N, \ | 265 DEFINE_REGISTER(FPRegister, d##N, N, kDRegSizeInBits, \ |
| 307 kDRegSizeInBits, CPURegister::kFPRegister); | 266 CPURegister::kFPRegister); |
| 308 GENERAL_REGISTER_CODE_LIST(DEFINE_FPREGISTERS) | 267 GENERAL_REGISTER_CODE_LIST(DEFINE_FPREGISTERS) |
| 309 #undef DEFINE_FPREGISTERS | 268 #undef DEFINE_FPREGISTERS |
| 310 | 269 |
| 311 #undef INITIALIZE_REGISTER | 270 #undef DEFINE_REGISTER |
| 312 | 271 |
| 313 // Registers aliases. | 272 // Registers aliases. |
| 314 ALIAS_REGISTER(Register, ip0, x16); | 273 ALIAS_REGISTER(Register, ip0, x16); |
| 315 ALIAS_REGISTER(Register, ip1, x17); | 274 ALIAS_REGISTER(Register, ip1, x17); |
| 316 ALIAS_REGISTER(Register, wip0, w16); | 275 ALIAS_REGISTER(Register, wip0, w16); |
| 317 ALIAS_REGISTER(Register, wip1, w17); | 276 ALIAS_REGISTER(Register, wip1, w17); |
| 318 // Root register. | 277 // Root register. |
| 319 ALIAS_REGISTER(Register, root, x26); | 278 ALIAS_REGISTER(Register, root, x26); |
| 320 ALIAS_REGISTER(Register, rr, x26); | 279 ALIAS_REGISTER(Register, rr, x26); |
| 321 // Context pointer register. | 280 // Context pointer register. |
| (...skipping 237 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 559 private: | 518 private: |
| 560 void InitializeHandle(Handle<Object> value); | 519 void InitializeHandle(Handle<Object> value); |
| 561 | 520 |
| 562 int64_t value_; | 521 int64_t value_; |
| 563 RelocInfo::Mode rmode_; | 522 RelocInfo::Mode rmode_; |
| 564 }; | 523 }; |
| 565 | 524 |
| 566 | 525 |
| 567 // ----------------------------------------------------------------------------- | 526 // ----------------------------------------------------------------------------- |
| 568 // Operands. | 527 // Operands. |
| 569 const int kSmiShift = kSmiTagSize + kSmiShiftSize; | 528 constexpr int kSmiShift = kSmiTagSize + kSmiShiftSize; |
| 570 const uint64_t kSmiShiftMask = (1UL << kSmiShift) - 1; | 529 constexpr uint64_t kSmiShiftMask = (1UL << kSmiShift) - 1; |
| 571 | 530 |
| 572 // Represents an operand in a machine instruction. | 531 // Represents an operand in a machine instruction. |
| 573 class Operand { | 532 class Operand { |
| 574 // TODO(all): If necessary, study more in details which methods | 533 // TODO(all): If necessary, study more in details which methods |
| 575 // TODO(all): should be inlined or not. | 534 // TODO(all): should be inlined or not. |
| 576 public: | 535 public: |
| 577 // rm, {<shift> {#<shift_amount>}} | 536 // rm, {<shift> {#<shift_amount>}} |
| 578 // where <shift> is one of {LSL, LSR, ASR, ROR}. | 537 // where <shift> is one of {LSL, LSR, ASR, ROR}. |
| 579 // <shift_amount> is uint6_t. | 538 // <shift_amount> is uint6_t. |
| 580 // This is allowed to be an implicit constructor because Operand is | 539 // This is allowed to be an implicit constructor because Operand is |
| (...skipping 248 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 829 inline static void deserialization_set_special_target_at( | 788 inline static void deserialization_set_special_target_at( |
| 830 Isolate* isolate, Address constant_pool_entry, Code* code, | 789 Isolate* isolate, Address constant_pool_entry, Code* code, |
| 831 Address target); | 790 Address target); |
| 832 | 791 |
| 833 // This sets the internal reference at the pc. | 792 // This sets the internal reference at the pc. |
| 834 inline static void deserialization_set_target_internal_reference_at( | 793 inline static void deserialization_set_target_internal_reference_at( |
| 835 Isolate* isolate, Address pc, Address target, | 794 Isolate* isolate, Address pc, Address target, |
| 836 RelocInfo::Mode mode = RelocInfo::INTERNAL_REFERENCE); | 795 RelocInfo::Mode mode = RelocInfo::INTERNAL_REFERENCE); |
| 837 | 796 |
| 838 // All addresses in the constant pool are the same size as pointers. | 797 // All addresses in the constant pool are the same size as pointers. |
| 839 static const int kSpecialTargetSize = kPointerSize; | 798 static constexpr int kSpecialTargetSize = kPointerSize; |
| 840 | 799 |
| 841 // The sizes of the call sequences emitted by MacroAssembler::Call. | 800 // The sizes of the call sequences emitted by MacroAssembler::Call. |
| 842 // Wherever possible, use MacroAssembler::CallSize instead of these constants, | 801 // Wherever possible, use MacroAssembler::CallSize instead of these constants, |
| 843 // as it will choose the correct value for a given relocation mode. | 802 // as it will choose the correct value for a given relocation mode. |
| 844 // | 803 // |
| 845 // Without relocation: | 804 // Without relocation: |
| 846 // movz temp, #(target & 0x000000000000ffff) | 805 // movz temp, #(target & 0x000000000000ffff) |
| 847 // movk temp, #(target & 0x00000000ffff0000) | 806 // movk temp, #(target & 0x00000000ffff0000) |
| 848 // movk temp, #(target & 0x0000ffff00000000) | 807 // movk temp, #(target & 0x0000ffff00000000) |
| 849 // blr temp | 808 // blr temp |
| 850 // | 809 // |
| 851 // With relocation: | 810 // With relocation: |
| 852 // ldr temp, =target | 811 // ldr temp, =target |
| 853 // blr temp | 812 // blr temp |
| 854 static const int kCallSizeWithoutRelocation = 4 * kInstructionSize; | 813 static constexpr int kCallSizeWithoutRelocation = 4 * kInstructionSize; |
| 855 static const int kCallSizeWithRelocation = 2 * kInstructionSize; | 814 static constexpr int kCallSizeWithRelocation = 2 * kInstructionSize; |
| 856 | 815 |
| 857 // Size of the generated code in bytes | 816 // Size of the generated code in bytes |
| 858 uint64_t SizeOfGeneratedCode() const { | 817 uint64_t SizeOfGeneratedCode() const { |
| 859 DCHECK((pc_ >= buffer_) && (pc_ < (buffer_ + buffer_size_))); | 818 DCHECK((pc_ >= buffer_) && (pc_ < (buffer_ + buffer_size_))); |
| 860 return pc_ - buffer_; | 819 return pc_ - buffer_; |
| 861 } | 820 } |
| 862 | 821 |
| 863 // Return the code size generated from label to the current position. | 822 // Return the code size generated from label to the current position. |
| 864 uint64_t SizeOfCodeGeneratedSince(const Label* label) { | 823 uint64_t SizeOfCodeGeneratedSince(const Label* label) { |
| 865 DCHECK(label->is_bound()); | 824 DCHECK(label->is_bound()); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 877 DCHECK(size >= 0); | 836 DCHECK(size >= 0); |
| 878 DCHECK(static_cast<uint64_t>(size) == SizeOfCodeGeneratedSince(label)); | 837 DCHECK(static_cast<uint64_t>(size) == SizeOfCodeGeneratedSince(label)); |
| 879 } | 838 } |
| 880 | 839 |
| 881 // Return the number of instructions generated from label to the | 840 // Return the number of instructions generated from label to the |
| 882 // current position. | 841 // current position. |
| 883 uint64_t InstructionsGeneratedSince(const Label* label) { | 842 uint64_t InstructionsGeneratedSince(const Label* label) { |
| 884 return SizeOfCodeGeneratedSince(label) / kInstructionSize; | 843 return SizeOfCodeGeneratedSince(label) / kInstructionSize; |
| 885 } | 844 } |
| 886 | 845 |
| 887 static const int kPatchDebugBreakSlotAddressOffset = 0; | 846 static constexpr int kPatchDebugBreakSlotAddressOffset = 0; |
| 888 | 847 |
| 889 // Number of instructions necessary to be able to later patch it to a call. | 848 // Number of instructions necessary to be able to later patch it to a call. |
| 890 static const int kDebugBreakSlotInstructions = 5; | 849 static constexpr int kDebugBreakSlotInstructions = 5; |
| 891 static const int kDebugBreakSlotLength = | 850 static constexpr int kDebugBreakSlotLength = |
| 892 kDebugBreakSlotInstructions * kInstructionSize; | 851 kDebugBreakSlotInstructions * kInstructionSize; |
| 893 | 852 |
| 894 // Prevent contant pool emission until EndBlockConstPool is called. | 853 // Prevent contant pool emission until EndBlockConstPool is called. |
| 895 // Call to this function can be nested but must be followed by an equal | 854 // Call to this function can be nested but must be followed by an equal |
| 896 // number of call to EndBlockConstpool. | 855 // number of call to EndBlockConstpool. |
| 897 void StartBlockConstPool(); | 856 void StartBlockConstPool(); |
| 898 | 857 |
| 899 // Resume constant pool emission. Need to be called as many time as | 858 // Resume constant pool emission. Need to be called as many time as |
| 900 // StartBlockConstPool to have an effect. | 859 // StartBlockConstPool to have an effect. |
| 901 void EndBlockConstPool(); | 860 void EndBlockConstPool(); |
| 902 | 861 |
| (...skipping 937 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1840 // which can at most reach to specified pc. | 1799 // which can at most reach to specified pc. |
| 1841 bool ShouldEmitVeneer(int max_reachable_pc, | 1800 bool ShouldEmitVeneer(int max_reachable_pc, |
| 1842 int margin = kVeneerDistanceMargin); | 1801 int margin = kVeneerDistanceMargin); |
| 1843 bool ShouldEmitVeneers(int margin = kVeneerDistanceMargin) { | 1802 bool ShouldEmitVeneers(int margin = kVeneerDistanceMargin) { |
| 1844 return ShouldEmitVeneer(unresolved_branches_first_limit(), margin); | 1803 return ShouldEmitVeneer(unresolved_branches_first_limit(), margin); |
| 1845 } | 1804 } |
| 1846 | 1805 |
| 1847 // The maximum code size generated for a veneer. Currently one branch | 1806 // The maximum code size generated for a veneer. Currently one branch |
| 1848 // instruction. This is for code size checking purposes, and can be extended | 1807 // instruction. This is for code size checking purposes, and can be extended |
| 1849 // in the future for example if we decide to add nops between the veneers. | 1808 // in the future for example if we decide to add nops between the veneers. |
| 1850 static const int kMaxVeneerCodeSize = 1 * kInstructionSize; | 1809 static constexpr int kMaxVeneerCodeSize = 1 * kInstructionSize; |
| 1851 | 1810 |
| 1852 void RecordVeneerPool(int location_offset, int size); | 1811 void RecordVeneerPool(int location_offset, int size); |
| 1853 // Emits veneers for branches that are approaching their maximum range. | 1812 // Emits veneers for branches that are approaching their maximum range. |
| 1854 // If need_protection is true, the veneers are protected by a branch jumping | 1813 // If need_protection is true, the veneers are protected by a branch jumping |
| 1855 // over the code. | 1814 // over the code. |
| 1856 void EmitVeneers(bool force_emit, bool need_protection, | 1815 void EmitVeneers(bool force_emit, bool need_protection, |
| 1857 int margin = kVeneerDistanceMargin); | 1816 int margin = kVeneerDistanceMargin); |
| 1858 void EmitVeneersGuard() { EmitPoolGuard(); } | 1817 void EmitVeneersGuard() { EmitPoolGuard(); } |
| 1859 // Checks whether veneers need to be emitted at this point. | 1818 // Checks whether veneers need to be emitted at this point. |
| 1860 // If force_emit is set, a veneer is generated for *all* unresolved branches. | 1819 // If force_emit is set, a veneer is generated for *all* unresolved branches. |
| (...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1993 | 1952 |
| 1994 // Label helpers. | 1953 // Label helpers. |
| 1995 | 1954 |
| 1996 // Return an offset for a label-referencing instruction, typically a branch. | 1955 // Return an offset for a label-referencing instruction, typically a branch. |
| 1997 int LinkAndGetByteOffsetTo(Label* label); | 1956 int LinkAndGetByteOffsetTo(Label* label); |
| 1998 | 1957 |
| 1999 // This is the same as LinkAndGetByteOffsetTo, but return an offset | 1958 // This is the same as LinkAndGetByteOffsetTo, but return an offset |
| 2000 // suitable for fields that take instruction offsets. | 1959 // suitable for fields that take instruction offsets. |
| 2001 inline int LinkAndGetInstructionOffsetTo(Label* label); | 1960 inline int LinkAndGetInstructionOffsetTo(Label* label); |
| 2002 | 1961 |
| 2003 static const int kStartOfLabelLinkChain = 0; | 1962 static constexpr int kStartOfLabelLinkChain = 0; |
| 2004 | 1963 |
| 2005 // Verify that a label's link chain is intact. | 1964 // Verify that a label's link chain is intact. |
| 2006 void CheckLabelLinkChain(Label const * label); | 1965 void CheckLabelLinkChain(Label const * label); |
| 2007 | 1966 |
| 2008 void RecordLiteral(int64_t imm, unsigned size); | 1967 void RecordLiteral(int64_t imm, unsigned size); |
| 2009 | 1968 |
| 2010 // Postpone the generation of the constant pool for the specified number of | 1969 // Postpone the generation of the constant pool for the specified number of |
| 2011 // instructions. | 1970 // instructions. |
| 2012 void BlockConstPoolFor(int instructions); | 1971 void BlockConstPoolFor(int instructions); |
| 2013 | 1972 |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2054 // If a pool needs to be emitted before code generation is finished a branch | 2013 // If a pool needs to be emitted before code generation is finished a branch |
| 2055 // over the emitted pool will be inserted. | 2014 // over the emitted pool will be inserted. |
| 2056 | 2015 |
| 2057 // Constants in the pool may be addresses of functions that gets relocated; | 2016 // Constants in the pool may be addresses of functions that gets relocated; |
| 2058 // if so, a relocation info entry is associated to the constant pool entry. | 2017 // if so, a relocation info entry is associated to the constant pool entry. |
| 2059 | 2018 |
| 2060 // Repeated checking whether the constant pool should be emitted is rather | 2019 // Repeated checking whether the constant pool should be emitted is rather |
| 2061 // expensive. By default we only check again once a number of instructions | 2020 // expensive. By default we only check again once a number of instructions |
| 2062 // has been generated. That also means that the sizing of the buffers is not | 2021 // has been generated. That also means that the sizing of the buffers is not |
| 2063 // an exact science, and that we rely on some slop to not overrun buffers. | 2022 // an exact science, and that we rely on some slop to not overrun buffers. |
| 2064 static const int kCheckConstPoolInterval = 128; | 2023 static constexpr int kCheckConstPoolInterval = 128; |
| 2065 | 2024 |
| 2066 // Distance to first use after a which a pool will be emitted. Pool entries | 2025 // Distance to first use after a which a pool will be emitted. Pool entries |
| 2067 // are accessed with pc relative load therefore this cannot be more than | 2026 // are accessed with pc relative load therefore this cannot be more than |
| 2068 // 1 * MB. Since constant pool emission checks are interval based this value | 2027 // 1 * MB. Since constant pool emission checks are interval based this value |
| 2069 // is an approximation. | 2028 // is an approximation. |
| 2070 static const int kApproxMaxDistToConstPool = 64 * KB; | 2029 static constexpr int kApproxMaxDistToConstPool = 64 * KB; |
| 2071 | 2030 |
| 2072 // Number of pool entries after which a pool will be emitted. Since constant | 2031 // Number of pool entries after which a pool will be emitted. Since constant |
| 2073 // pool emission checks are interval based this value is an approximation. | 2032 // pool emission checks are interval based this value is an approximation. |
| 2074 static const int kApproxMaxPoolEntryCount = 512; | 2033 static constexpr int kApproxMaxPoolEntryCount = 512; |
| 2075 | 2034 |
| 2076 // Emission of the constant pool may be blocked in some code sequences. | 2035 // Emission of the constant pool may be blocked in some code sequences. |
| 2077 int const_pool_blocked_nesting_; // Block emission if this is not zero. | 2036 int const_pool_blocked_nesting_; // Block emission if this is not zero. |
| 2078 int no_const_pool_before_; // Block emission before this pc offset. | 2037 int no_const_pool_before_; // Block emission before this pc offset. |
| 2079 | 2038 |
| 2080 // Emission of the veneer pools may be blocked in some code sequences. | 2039 // Emission of the veneer pools may be blocked in some code sequences. |
| 2081 int veneer_pool_blocked_nesting_; // Block emission if this is not zero. | 2040 int veneer_pool_blocked_nesting_; // Block emission if this is not zero. |
| 2082 | 2041 |
| 2083 // Relocation info generation | 2042 // Relocation info generation |
| 2084 // Each relocation is encoded as a variable size value | 2043 // Each relocation is encoded as a variable size value |
| 2085 static const int kMaxRelocSize = RelocInfoWriter::kMaxSize; | 2044 static constexpr int kMaxRelocSize = RelocInfoWriter::kMaxSize; |
| 2086 RelocInfoWriter reloc_info_writer; | 2045 RelocInfoWriter reloc_info_writer; |
| 2087 // Internal reference positions, required for (potential) patching in | 2046 // Internal reference positions, required for (potential) patching in |
| 2088 // GrowBuffer(); contains only those internal references whose labels | 2047 // GrowBuffer(); contains only those internal references whose labels |
| 2089 // are already bound. | 2048 // are already bound. |
| 2090 std::deque<int> internal_reference_positions_; | 2049 std::deque<int> internal_reference_positions_; |
| 2091 | 2050 |
| 2092 // Relocation info records are also used during code generation as temporary | 2051 // Relocation info records are also used during code generation as temporary |
| 2093 // containers for constants and code target addresses until they are emitted | 2052 // containers for constants and code target addresses until they are emitted |
| 2094 // to the constant pool. These pending relocation info records are temporarily | 2053 // to the constant pool. These pending relocation info records are temporarily |
| 2095 // stored in a separate buffer until a constant pool is emitted. | 2054 // stored in a separate buffer until a constant pool is emitted. |
| (...skipping 18 matching lines...) Expand all Loading... |
| 2114 DCHECK(recorded_ast_id_.IsNone()); | 2073 DCHECK(recorded_ast_id_.IsNone()); |
| 2115 recorded_ast_id_ = ast_id; | 2074 recorded_ast_id_ = ast_id; |
| 2116 } | 2075 } |
| 2117 | 2076 |
| 2118 // Code generation | 2077 // Code generation |
| 2119 // The relocation writer's position is at least kGap bytes below the end of | 2078 // The relocation writer's position is at least kGap bytes below the end of |
| 2120 // the generated instructions. This is so that multi-instruction sequences do | 2079 // the generated instructions. This is so that multi-instruction sequences do |
| 2121 // not have to check for overflow. The same is true for writes of large | 2080 // not have to check for overflow. The same is true for writes of large |
| 2122 // relocation info entries, and debug strings encoded in the instruction | 2081 // relocation info entries, and debug strings encoded in the instruction |
| 2123 // stream. | 2082 // stream. |
| 2124 static const int kGap = 128; | 2083 static constexpr int kGap = 128; |
| 2125 | 2084 |
| 2126 public: | 2085 public: |
| 2127 class FarBranchInfo { | 2086 class FarBranchInfo { |
| 2128 public: | 2087 public: |
| 2129 FarBranchInfo(int offset, Label* label) | 2088 FarBranchInfo(int offset, Label* label) |
| 2130 : pc_offset_(offset), label_(label) {} | 2089 : pc_offset_(offset), label_(label) {} |
| 2131 // Offset of the branch in the code generation buffer. | 2090 // Offset of the branch in the code generation buffer. |
| 2132 int pc_offset_; | 2091 int pc_offset_; |
| 2133 // The label branched to. | 2092 // The label branched to. |
| 2134 Label* label_; | 2093 Label* label_; |
| 2135 }; | 2094 }; |
| 2136 | 2095 |
| 2137 protected: | 2096 protected: |
| 2138 // Information about unresolved (forward) branches. | 2097 // Information about unresolved (forward) branches. |
| 2139 // The Assembler is only allowed to delete out-of-date information from here | 2098 // The Assembler is only allowed to delete out-of-date information from here |
| 2140 // after a label is bound. The MacroAssembler uses this information to | 2099 // after a label is bound. The MacroAssembler uses this information to |
| 2141 // generate veneers. | 2100 // generate veneers. |
| 2142 // | 2101 // |
| 2143 // The second member gives information about the unresolved branch. The first | 2102 // The second member gives information about the unresolved branch. The first |
| 2144 // member of the pair is the maximum offset that the branch can reach in the | 2103 // member of the pair is the maximum offset that the branch can reach in the |
| 2145 // buffer. The map is sorted according to this reachable offset, allowing to | 2104 // buffer. The map is sorted according to this reachable offset, allowing to |
| 2146 // easily check when veneers need to be emitted. | 2105 // easily check when veneers need to be emitted. |
| 2147 // Note that the maximum reachable offset (first member of the pairs) should | 2106 // Note that the maximum reachable offset (first member of the pairs) should |
| 2148 // always be positive but has the same type as the return value for | 2107 // always be positive but has the same type as the return value for |
| 2149 // pc_offset() for convenience. | 2108 // pc_offset() for convenience. |
| 2150 std::multimap<int, FarBranchInfo> unresolved_branches_; | 2109 std::multimap<int, FarBranchInfo> unresolved_branches_; |
| 2151 | 2110 |
| 2152 // We generate a veneer for a branch if we reach within this distance of the | 2111 // We generate a veneer for a branch if we reach within this distance of the |
| 2153 // limit of the range. | 2112 // limit of the range. |
| 2154 static const int kVeneerDistanceMargin = 1 * KB; | 2113 static constexpr int kVeneerDistanceMargin = 1 * KB; |
| 2155 // The factor of 2 is a finger in the air guess. With a default margin of | 2114 // The factor of 2 is a finger in the air guess. With a default margin of |
| 2156 // 1KB, that leaves us an addional 256 instructions to avoid generating a | 2115 // 1KB, that leaves us an addional 256 instructions to avoid generating a |
| 2157 // protective branch. | 2116 // protective branch. |
| 2158 static const int kVeneerNoProtectionFactor = 2; | 2117 static constexpr int kVeneerNoProtectionFactor = 2; |
| 2159 static const int kVeneerDistanceCheckMargin = | 2118 static constexpr int kVeneerDistanceCheckMargin = |
| 2160 kVeneerNoProtectionFactor * kVeneerDistanceMargin; | 2119 kVeneerNoProtectionFactor * kVeneerDistanceMargin; |
| 2161 int unresolved_branches_first_limit() const { | 2120 int unresolved_branches_first_limit() const { |
| 2162 DCHECK(!unresolved_branches_.empty()); | 2121 DCHECK(!unresolved_branches_.empty()); |
| 2163 return unresolved_branches_.begin()->first; | 2122 return unresolved_branches_.begin()->first; |
| 2164 } | 2123 } |
| 2165 // This is similar to next_constant_pool_check_ and helps reduce the overhead | 2124 // This is similar to next_constant_pool_check_ and helps reduce the overhead |
| 2166 // of checking for veneer pools. | 2125 // of checking for veneer pools. |
| 2167 // It is maintained to the closest unresolved branch limit minus the maximum | 2126 // It is maintained to the closest unresolved branch limit minus the maximum |
| 2168 // veneer margin (or kMaxInt if there are no unresolved branches). | 2127 // veneer margin (or kMaxInt if there are no unresolved branches). |
| 2169 int next_veneer_pool_check_; | 2128 int next_veneer_pool_check_; |
| 2170 | 2129 |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2214 // Verify we have generated the number of instruction we expected. | 2173 // Verify we have generated the number of instruction we expected. |
| 2215 DCHECK((pc_offset() + kGap) == buffer_size_); | 2174 DCHECK((pc_offset() + kGap) == buffer_size_); |
| 2216 // Verify no relocation information has been emitted. | 2175 // Verify no relocation information has been emitted. |
| 2217 DCHECK(IsConstPoolEmpty()); | 2176 DCHECK(IsConstPoolEmpty()); |
| 2218 // Flush the Instruction cache. | 2177 // Flush the Instruction cache. |
| 2219 size_t length = buffer_size_ - kGap; | 2178 size_t length = buffer_size_ - kGap; |
| 2220 Assembler::FlushICache(isolate(), buffer_, length); | 2179 Assembler::FlushICache(isolate(), buffer_, length); |
| 2221 } | 2180 } |
| 2222 | 2181 |
| 2223 // See definition of PatchAdrFar() for details. | 2182 // See definition of PatchAdrFar() for details. |
| 2224 static const int kAdrFarPatchableNNops = 2; | 2183 static constexpr int kAdrFarPatchableNNops = 2; |
| 2225 static const int kAdrFarPatchableNInstrs = kAdrFarPatchableNNops + 2; | 2184 static constexpr int kAdrFarPatchableNInstrs = kAdrFarPatchableNNops + 2; |
| 2226 void PatchAdrFar(int64_t target_offset); | 2185 void PatchAdrFar(int64_t target_offset); |
| 2227 }; | 2186 }; |
| 2228 | 2187 |
| 2229 | 2188 |
| 2230 class EnsureSpace BASE_EMBEDDED { | 2189 class EnsureSpace BASE_EMBEDDED { |
| 2231 public: | 2190 public: |
| 2232 explicit EnsureSpace(Assembler* assembler) { | 2191 explicit EnsureSpace(Assembler* assembler) { |
| 2233 assembler->CheckBufferSpace(); | 2192 assembler->CheckBufferSpace(); |
| 2234 } | 2193 } |
| 2235 }; | 2194 }; |
| 2236 | 2195 |
| 2237 } // namespace internal | 2196 } // namespace internal |
| 2238 } // namespace v8 | 2197 } // namespace v8 |
| 2239 | 2198 |
| 2240 #endif // V8_ARM64_ASSEMBLER_ARM64_H_ | 2199 #endif // V8_ARM64_ASSEMBLER_ARM64_H_ |
| OLD | NEW |