OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_COMPILER_ARM_INSTRUCTION_CODES_ARM_H_ | 5 #ifndef V8_COMPILER_ARM_INSTRUCTION_CODES_ARM_H_ |
6 #define V8_COMPILER_ARM_INSTRUCTION_CODES_ARM_H_ | 6 #define V8_COMPILER_ARM_INSTRUCTION_CODES_ARM_H_ |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 namespace compiler { | 10 namespace compiler { |
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
129 V(ArmFloat32x4Neg) \ | 129 V(ArmFloat32x4Neg) \ |
130 V(ArmFloat32x4Add) \ | 130 V(ArmFloat32x4Add) \ |
131 V(ArmFloat32x4Sub) \ | 131 V(ArmFloat32x4Sub) \ |
132 V(ArmFloat32x4Eq) \ | 132 V(ArmFloat32x4Eq) \ |
133 V(ArmFloat32x4Ne) \ | 133 V(ArmFloat32x4Ne) \ |
134 V(ArmInt32x4Splat) \ | 134 V(ArmInt32x4Splat) \ |
135 V(ArmInt32x4ExtractLane) \ | 135 V(ArmInt32x4ExtractLane) \ |
136 V(ArmInt32x4ReplaceLane) \ | 136 V(ArmInt32x4ReplaceLane) \ |
137 V(ArmInt32x4FromFloat32x4) \ | 137 V(ArmInt32x4FromFloat32x4) \ |
138 V(ArmUint32x4FromFloat32x4) \ | 138 V(ArmUint32x4FromFloat32x4) \ |
| 139 V(ArmInt32x4Neg) \ |
139 V(ArmInt32x4Add) \ | 140 V(ArmInt32x4Add) \ |
140 V(ArmInt32x4Sub) \ | 141 V(ArmInt32x4Sub) \ |
| 142 V(ArmInt32x4Mul) \ |
| 143 V(ArmInt32x4Min) \ |
| 144 V(ArmInt32x4Max) \ |
141 V(ArmInt32x4Eq) \ | 145 V(ArmInt32x4Eq) \ |
142 V(ArmInt32x4Ne) \ | 146 V(ArmInt32x4Ne) \ |
143 V(ArmSimd32x4Select) | 147 V(ArmInt32x4Gt) \ |
| 148 V(ArmInt32x4Ge) \ |
| 149 V(ArmUint32x4Gt) \ |
| 150 V(ArmUint32x4Ge) \ |
| 151 V(ArmSimd32x4Select) \ |
| 152 V(ArmInt16x8Splat) \ |
| 153 V(ArmInt16x8ExtractLane) \ |
| 154 V(ArmInt16x8ReplaceLane) \ |
| 155 V(ArmInt16x8Neg) \ |
| 156 V(ArmInt16x8Add) \ |
| 157 V(ArmInt16x8Sub) \ |
| 158 V(ArmInt16x8Mul) \ |
| 159 V(ArmInt16x8Min) \ |
| 160 V(ArmInt16x8Max) \ |
| 161 V(ArmInt16x8Eq) \ |
| 162 V(ArmInt16x8Ne) \ |
| 163 V(ArmInt16x8Gt) \ |
| 164 V(ArmInt16x8Ge) \ |
| 165 V(ArmUint16x8Gt) \ |
| 166 V(ArmUint16x8Ge) \ |
| 167 V(ArmInt8x16Splat) \ |
| 168 V(ArmInt8x16ExtractLane) \ |
| 169 V(ArmInt8x16ReplaceLane) \ |
| 170 V(ArmInt8x16Neg) \ |
| 171 V(ArmInt8x16Add) \ |
| 172 V(ArmInt8x16Sub) \ |
| 173 V(ArmInt8x16Mul) \ |
| 174 V(ArmInt8x16Min) \ |
| 175 V(ArmInt8x16Max) \ |
| 176 V(ArmInt8x16Eq) \ |
| 177 V(ArmInt8x16Ne) \ |
| 178 V(ArmInt8x16Gt) \ |
| 179 V(ArmInt8x16Ge) \ |
| 180 V(ArmUint8x16Gt) \ |
| 181 V(ArmUint8x16Ge) |
144 | 182 |
145 // Addressing modes represent the "shape" of inputs to an instruction. | 183 // Addressing modes represent the "shape" of inputs to an instruction. |
146 // Many instructions support multiple addressing modes. Addressing modes | 184 // Many instructions support multiple addressing modes. Addressing modes |
147 // are encoded into the InstructionCode of the instruction and tell the | 185 // are encoded into the InstructionCode of the instruction and tell the |
148 // code generator after register allocation which assembler method to call. | 186 // code generator after register allocation which assembler method to call. |
149 #define TARGET_ADDRESSING_MODE_LIST(V) \ | 187 #define TARGET_ADDRESSING_MODE_LIST(V) \ |
150 V(Offset_RI) /* [%r0 + K] */ \ | 188 V(Offset_RI) /* [%r0 + K] */ \ |
151 V(Offset_RR) /* [%r0 + %r1] */ \ | 189 V(Offset_RR) /* [%r0 + %r1] */ \ |
152 V(Operand2_I) /* K */ \ | 190 V(Operand2_I) /* K */ \ |
153 V(Operand2_R) /* %r0 */ \ | 191 V(Operand2_R) /* %r0 */ \ |
154 V(Operand2_R_ASR_I) /* %r0 ASR K */ \ | 192 V(Operand2_R_ASR_I) /* %r0 ASR K */ \ |
155 V(Operand2_R_LSL_I) /* %r0 LSL K */ \ | 193 V(Operand2_R_LSL_I) /* %r0 LSL K */ \ |
156 V(Operand2_R_LSR_I) /* %r0 LSR K */ \ | 194 V(Operand2_R_LSR_I) /* %r0 LSR K */ \ |
157 V(Operand2_R_ROR_I) /* %r0 ROR K */ \ | 195 V(Operand2_R_ROR_I) /* %r0 ROR K */ \ |
158 V(Operand2_R_ASR_R) /* %r0 ASR %r1 */ \ | 196 V(Operand2_R_ASR_R) /* %r0 ASR %r1 */ \ |
159 V(Operand2_R_LSL_R) /* %r0 LSL %r1 */ \ | 197 V(Operand2_R_LSL_R) /* %r0 LSL %r1 */ \ |
160 V(Operand2_R_LSR_R) /* %r0 LSR %r1 */ \ | 198 V(Operand2_R_LSR_R) /* %r0 LSR %r1 */ \ |
161 V(Operand2_R_ROR_R) /* %r0 ROR %r1 */ | 199 V(Operand2_R_ROR_R) /* %r0 ROR %r1 */ |
162 | 200 |
163 } // namespace compiler | 201 } // namespace compiler |
164 } // namespace internal | 202 } // namespace internal |
165 } // namespace v8 | 203 } // namespace v8 |
166 | 204 |
167 #endif // V8_COMPILER_ARM_INSTRUCTION_CODES_ARM_H_ | 205 #endif // V8_COMPILER_ARM_INSTRUCTION_CODES_ARM_H_ |
OLD | NEW |