| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_COMPILER_ARM_INSTRUCTION_CODES_ARM_H_ | 5 #ifndef V8_COMPILER_ARM_INSTRUCTION_CODES_ARM_H_ |
| 6 #define V8_COMPILER_ARM_INSTRUCTION_CODES_ARM_H_ | 6 #define V8_COMPILER_ARM_INSTRUCTION_CODES_ARM_H_ |
| 7 | 7 |
| 8 namespace v8 { | 8 namespace v8 { |
| 9 namespace internal { | 9 namespace internal { |
| 10 namespace compiler { | 10 namespace compiler { |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 100 V(ArmVmovHighU32F64) \ | 100 V(ArmVmovHighU32F64) \ |
| 101 V(ArmVmovHighF64U32) \ | 101 V(ArmVmovHighF64U32) \ |
| 102 V(ArmVmovF64U32U32) \ | 102 V(ArmVmovF64U32U32) \ |
| 103 V(ArmVmovU32U32F64) \ | 103 V(ArmVmovU32U32F64) \ |
| 104 V(ArmVldrF32) \ | 104 V(ArmVldrF32) \ |
| 105 V(ArmVstrF32) \ | 105 V(ArmVstrF32) \ |
| 106 V(ArmVldrF64) \ | 106 V(ArmVldrF64) \ |
| 107 V(ArmVld1F64) \ | 107 V(ArmVld1F64) \ |
| 108 V(ArmVstrF64) \ | 108 V(ArmVstrF64) \ |
| 109 V(ArmVst1F64) \ | 109 V(ArmVst1F64) \ |
| 110 V(ArmVld1S128) \ |
| 111 V(ArmVst1S128) \ |
| 110 V(ArmFloat32Max) \ | 112 V(ArmFloat32Max) \ |
| 111 V(ArmFloat64Max) \ | 113 V(ArmFloat64Max) \ |
| 112 V(ArmFloat32Min) \ | 114 V(ArmFloat32Min) \ |
| 113 V(ArmFloat64Min) \ | 115 V(ArmFloat64Min) \ |
| 114 V(ArmFloat64SilenceNaN) \ | 116 V(ArmFloat64SilenceNaN) \ |
| 115 V(ArmLdrb) \ | 117 V(ArmLdrb) \ |
| 116 V(ArmLdrsb) \ | 118 V(ArmLdrsb) \ |
| 117 V(ArmStrb) \ | 119 V(ArmStrb) \ |
| 118 V(ArmLdrh) \ | 120 V(ArmLdrh) \ |
| 119 V(ArmLdrsh) \ | 121 V(ArmLdrsh) \ |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 206 V(ArmInt8x16LessThan) \ | 208 V(ArmInt8x16LessThan) \ |
| 207 V(ArmInt8x16LessThanOrEqual) \ | 209 V(ArmInt8x16LessThanOrEqual) \ |
| 208 V(ArmUint8x16ShiftRightByScalar) \ | 210 V(ArmUint8x16ShiftRightByScalar) \ |
| 209 V(ArmUint8x16AddSaturate) \ | 211 V(ArmUint8x16AddSaturate) \ |
| 210 V(ArmUint8x16SubSaturate) \ | 212 V(ArmUint8x16SubSaturate) \ |
| 211 V(ArmUint8x16Min) \ | 213 V(ArmUint8x16Min) \ |
| 212 V(ArmUint8x16Max) \ | 214 V(ArmUint8x16Max) \ |
| 213 V(ArmUint8x16LessThan) \ | 215 V(ArmUint8x16LessThan) \ |
| 214 V(ArmUint8x16LessThanOrEqual) \ | 216 V(ArmUint8x16LessThanOrEqual) \ |
| 215 V(ArmSimd128Zero) \ | 217 V(ArmSimd128Zero) \ |
| 216 V(ArmSimd128Load) \ | |
| 217 V(ArmSimd128Store) \ | |
| 218 V(ArmSimd128And) \ | 218 V(ArmSimd128And) \ |
| 219 V(ArmSimd128Or) \ | 219 V(ArmSimd128Or) \ |
| 220 V(ArmSimd128Xor) \ | 220 V(ArmSimd128Xor) \ |
| 221 V(ArmSimd128Not) \ | 221 V(ArmSimd128Not) \ |
| 222 V(ArmSimd128Select) \ | 222 V(ArmSimd128Select) \ |
| 223 V(ArmSimd1x4AnyTrue) \ | 223 V(ArmSimd1x4AnyTrue) \ |
| 224 V(ArmSimd1x4AllTrue) \ | 224 V(ArmSimd1x4AllTrue) \ |
| 225 V(ArmSimd1x8AnyTrue) \ | 225 V(ArmSimd1x8AnyTrue) \ |
| 226 V(ArmSimd1x8AllTrue) \ | 226 V(ArmSimd1x8AllTrue) \ |
| 227 V(ArmSimd1x16AnyTrue) \ | 227 V(ArmSimd1x16AnyTrue) \ |
| (...skipping 15 matching lines...) Expand all Loading... |
| 243 V(Operand2_R_ASR_R) /* %r0 ASR %r1 */ \ | 243 V(Operand2_R_ASR_R) /* %r0 ASR %r1 */ \ |
| 244 V(Operand2_R_LSL_R) /* %r0 LSL %r1 */ \ | 244 V(Operand2_R_LSL_R) /* %r0 LSL %r1 */ \ |
| 245 V(Operand2_R_LSR_R) /* %r0 LSR %r1 */ \ | 245 V(Operand2_R_LSR_R) /* %r0 LSR %r1 */ \ |
| 246 V(Operand2_R_ROR_R) /* %r0 ROR %r1 */ | 246 V(Operand2_R_ROR_R) /* %r0 ROR %r1 */ |
| 247 | 247 |
| 248 } // namespace compiler | 248 } // namespace compiler |
| 249 } // namespace internal | 249 } // namespace internal |
| 250 } // namespace v8 | 250 } // namespace v8 |
| 251 | 251 |
| 252 #endif // V8_COMPILER_ARM_INSTRUCTION_CODES_ARM_H_ | 252 #endif // V8_COMPILER_ARM_INSTRUCTION_CODES_ARM_H_ |
| OLD | NEW |