OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
57 inline MemOperand FieldMemOperand(Register object, int offset); | 57 inline MemOperand FieldMemOperand(Register object, int offset); |
58 inline MemOperand UntagSmiFieldMemOperand(Register object, int offset); | 58 inline MemOperand UntagSmiFieldMemOperand(Register object, int offset); |
59 | 59 |
60 // Generate a MemOperand for loading a SMI from memory. | 60 // Generate a MemOperand for loading a SMI from memory. |
61 inline MemOperand UntagSmiMemOperand(Register object, int offset); | 61 inline MemOperand UntagSmiMemOperand(Register object, int offset); |
62 | 62 |
63 | 63 |
64 // ---------------------------------------------------------------------------- | 64 // ---------------------------------------------------------------------------- |
65 // MacroAssembler | 65 // MacroAssembler |
66 | 66 |
| 67 enum BranchType { |
| 68 // Copies of architectural conditions. |
| 69 // The associated conditions can be used in place of those, the code will |
| 70 // take care of reinterpreting them with the correct type. |
| 71 integer_eq = eq, |
| 72 integer_ne = ne, |
| 73 integer_hs = hs, |
| 74 integer_lo = lo, |
| 75 integer_mi = mi, |
| 76 integer_pl = pl, |
| 77 integer_vs = vs, |
| 78 integer_vc = vc, |
| 79 integer_hi = hi, |
| 80 integer_ls = ls, |
| 81 integer_ge = ge, |
| 82 integer_lt = lt, |
| 83 integer_gt = gt, |
| 84 integer_le = le, |
| 85 integer_al = al, |
| 86 integer_nv = nv, |
| 87 |
| 88 // These two are *different* from the architectural codes al and nv. |
| 89 // 'always' is used to generate unconditional branches. |
| 90 // 'never' is used to not generate a branch (generally as the inverse |
| 91 // branch type of 'always). |
| 92 always, never, |
| 93 // cbz and cbnz |
| 94 reg_zero, reg_not_zero, |
| 95 // tbz and tbnz |
| 96 reg_bit_clear, reg_bit_set, |
| 97 |
| 98 // Aliases. |
| 99 kBranchTypeFirstCondition = eq, |
| 100 kBranchTypeLastCondition = nv, |
| 101 kBranchTypeFirstUsingReg = reg_zero, |
| 102 kBranchTypeFirstUsingBit = reg_bit_clear |
| 103 }; |
| 104 |
| 105 inline BranchType InvertBranchType(BranchType type) { |
| 106 if (kBranchTypeFirstCondition <= type && type <= kBranchTypeLastCondition) { |
| 107 return static_cast<BranchType>( |
| 108 InvertCondition(static_cast<Condition>(type))); |
| 109 } else { |
| 110 return static_cast<BranchType>(type ^ 1); |
| 111 } |
| 112 } |
| 113 |
67 enum RememberedSetAction { EMIT_REMEMBERED_SET, OMIT_REMEMBERED_SET }; | 114 enum RememberedSetAction { EMIT_REMEMBERED_SET, OMIT_REMEMBERED_SET }; |
68 enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK }; | 115 enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK }; |
69 enum LinkRegisterStatus { kLRHasNotBeenSaved, kLRHasBeenSaved }; | 116 enum LinkRegisterStatus { kLRHasNotBeenSaved, kLRHasBeenSaved }; |
70 enum TargetAddressStorageMode { | 117 enum TargetAddressStorageMode { |
71 CAN_INLINE_TARGET_ADDRESS, | 118 CAN_INLINE_TARGET_ADDRESS, |
72 NEVER_INLINE_TARGET_ADDRESS | 119 NEVER_INLINE_TARGET_ADDRESS |
73 }; | 120 }; |
74 enum UntagMode { kNotSpeculativeUntag, kSpeculativeUntag }; | 121 enum UntagMode { kNotSpeculativeUntag, kSpeculativeUntag }; |
75 enum ArrayHasHoles { kArrayCantHaveHoles, kArrayCanHaveHoles }; | 122 enum ArrayHasHoles { kArrayCantHaveHoles, kArrayCanHaveHoles }; |
76 enum CopyHint { kCopyUnknown, kCopyShort, kCopyLong }; | 123 enum CopyHint { kCopyUnknown, kCopyShort, kCopyLong }; |
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
205 LoadStoreOp op); | 252 LoadStoreOp op); |
206 | 253 |
207 // V8-specific load/store helpers. | 254 // V8-specific load/store helpers. |
208 void Load(const Register& rt, const MemOperand& addr, Representation r); | 255 void Load(const Register& rt, const MemOperand& addr, Representation r); |
209 void Store(const Register& rt, const MemOperand& addr, Representation r); | 256 void Store(const Register& rt, const MemOperand& addr, Representation r); |
210 | 257 |
211 // Remaining instructions are simple pass-through calls to the assembler. | 258 // Remaining instructions are simple pass-through calls to the assembler. |
212 inline void Adr(const Register& rd, Label* label); | 259 inline void Adr(const Register& rd, Label* label); |
213 inline void Asr(const Register& rd, const Register& rn, unsigned shift); | 260 inline void Asr(const Register& rd, const Register& rn, unsigned shift); |
214 inline void Asr(const Register& rd, const Register& rn, const Register& rm); | 261 inline void Asr(const Register& rd, const Register& rn, const Register& rm); |
| 262 |
| 263 // Branch type inversion relies on these relations. |
| 264 STATIC_ASSERT((reg_zero == (reg_not_zero ^ 1)) && |
| 265 (reg_bit_clear == (reg_bit_set ^ 1)) && |
| 266 (always == (never ^ 1))); |
| 267 |
| 268 void B(Label* label, BranchType type, Register reg = NoReg, int bit = -1); |
| 269 |
215 inline void B(Label* label); | 270 inline void B(Label* label); |
216 inline void B(Condition cond, Label* label); | 271 inline void B(Condition cond, Label* label); |
217 void B(Label* label, Condition cond); | 272 void B(Label* label, Condition cond); |
218 inline void Bfi(const Register& rd, | 273 inline void Bfi(const Register& rd, |
219 const Register& rn, | 274 const Register& rn, |
220 unsigned lsb, | 275 unsigned lsb, |
221 unsigned width); | 276 unsigned width); |
222 inline void Bfxil(const Register& rd, | 277 inline void Bfxil(const Register& rd, |
223 const Register& rn, | 278 const Register& rn, |
224 unsigned lsb, | 279 unsigned lsb, |
(...skipping 2035 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2260 #error "Unsupported option" | 2315 #error "Unsupported option" |
2261 #define CODE_COVERAGE_STRINGIFY(x) #x | 2316 #define CODE_COVERAGE_STRINGIFY(x) #x |
2262 #define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x) | 2317 #define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x) |
2263 #define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__) | 2318 #define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__) |
2264 #define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm-> | 2319 #define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm-> |
2265 #else | 2320 #else |
2266 #define ACCESS_MASM(masm) masm-> | 2321 #define ACCESS_MASM(masm) masm-> |
2267 #endif | 2322 #endif |
2268 | 2323 |
2269 #endif // V8_A64_MACRO_ASSEMBLER_A64_H_ | 2324 #endif // V8_A64_MACRO_ASSEMBLER_A64_H_ |
OLD | NEW |