OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ | 5 #ifndef V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ |
6 #define V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ | 6 #define V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 namespace compiler { | 10 namespace compiler { |
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
123 // code generator after register allocation which assembler method to call. | 123 // code generator after register allocation which assembler method to call. |
124 // | 124 // |
125 // We use the following local notation for addressing modes: | 125 // We use the following local notation for addressing modes: |
126 // | 126 // |
127 // R = register | 127 // R = register |
128 // O = register or stack slot | 128 // O = register or stack slot |
129 // D = double register | 129 // D = double register |
130 // I = immediate (handle, external, int32) | 130 // I = immediate (handle, external, int32) |
131 // MRI = [register + immediate] | 131 // MRI = [register + immediate] |
132 // MRR = [register + register] | 132 // MRR = [register + register] |
133 #define TARGET_ADDRESSING_MODE_LIST(V) \ | 133 #define TARGET_ADDRESSING_MODE_LIST(V) \ |
134 V(MRI) /* [%r0 + K] */ \ | 134 V(MRI) /* [%r0 + K] */ \ |
135 V(MRR) /* [%r0 + %r1] */ \ | 135 V(MRR) /* [%r0 + %r1] */ \ |
136 V(Operand2_R_LSL_I) /* %r0 LSL K */ \ | 136 V(Operand2_R_LSL_I) /* %r0 LSL K */ \ |
137 V(Operand2_R_LSR_I) /* %r0 LSR K */ \ | 137 V(Operand2_R_LSR_I) /* %r0 LSR K */ \ |
138 V(Operand2_R_ASR_I) /* %r0 ASR K */ \ | 138 V(Operand2_R_ASR_I) /* %r0 ASR K */ \ |
139 V(Operand2_R_ROR_I) /* %r0 ROR K */ | 139 V(Operand2_R_ROR_I) /* %r0 ROR K */ \ |
| 140 V(Operand2_R_UXTB) /* %r0 UXTB (unsigned extend byte) */ \ |
| 141 V(Operand2_R_UXTH) /* %r0 UXTH (unsigned extend halfword) */ |
140 | 142 |
141 } // namespace internal | 143 } // namespace internal |
142 } // namespace compiler | 144 } // namespace compiler |
143 } // namespace v8 | 145 } // namespace v8 |
144 | 146 |
145 #endif // V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ | 147 #endif // V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ |
OLD | NEW |