| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ | 5 #ifndef V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ |
| 6 #define V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ | 6 #define V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ |
| 7 | 7 |
| 8 namespace v8 { | 8 namespace v8 { |
| 9 namespace internal { | 9 namespace internal { |
| 10 namespace compiler { | 10 namespace compiler { |
| 11 | 11 |
| 12 // ARM64-specific opcodes that specify which assembly sequence to emit. | 12 // ARM64-specific opcodes that specify which assembly sequence to emit. |
| 13 // Most opcodes specify a single instruction. | 13 // Most opcodes specify a single instruction. |
| 14 #define TARGET_ARCH_OPCODE_LIST(V) \ | 14 #define TARGET_ARCH_OPCODE_LIST(V) \ |
| 15 V(Arm64Add) \ | 15 V(Arm64Add) \ |
| 16 V(Arm64Add32) \ | 16 V(Arm64Add32) \ |
| 17 V(Arm64And) \ | 17 V(Arm64And) \ |
| 18 V(Arm64And32) \ | 18 V(Arm64And32) \ |
| 19 V(Arm64Bic) \ |
| 20 V(Arm64Bic32) \ |
| 19 V(Arm64Cmp) \ | 21 V(Arm64Cmp) \ |
| 20 V(Arm64Cmp32) \ | 22 V(Arm64Cmp32) \ |
| 21 V(Arm64Cmn) \ | 23 V(Arm64Cmn) \ |
| 22 V(Arm64Cmn32) \ | 24 V(Arm64Cmn32) \ |
| 23 V(Arm64Tst) \ | 25 V(Arm64Tst) \ |
| 24 V(Arm64Tst32) \ | 26 V(Arm64Tst32) \ |
| 25 V(Arm64Or) \ | 27 V(Arm64Or) \ |
| 26 V(Arm64Or32) \ | 28 V(Arm64Or32) \ |
| 27 V(Arm64Xor) \ | 29 V(Arm64Orn) \ |
| 28 V(Arm64Xor32) \ | 30 V(Arm64Orn32) \ |
| 31 V(Arm64Eor) \ |
| 32 V(Arm64Eor32) \ |
| 33 V(Arm64Eon) \ |
| 34 V(Arm64Eon32) \ |
| 29 V(Arm64Sub) \ | 35 V(Arm64Sub) \ |
| 30 V(Arm64Sub32) \ | 36 V(Arm64Sub32) \ |
| 31 V(Arm64Mul) \ | 37 V(Arm64Mul) \ |
| 32 V(Arm64Mul32) \ | 38 V(Arm64Mul32) \ |
| 33 V(Arm64Idiv) \ | 39 V(Arm64Idiv) \ |
| 34 V(Arm64Idiv32) \ | 40 V(Arm64Idiv32) \ |
| 35 V(Arm64Udiv) \ | 41 V(Arm64Udiv) \ |
| 36 V(Arm64Udiv32) \ | 42 V(Arm64Udiv32) \ |
| 37 V(Arm64Imod) \ | 43 V(Arm64Imod) \ |
| 38 V(Arm64Imod32) \ | 44 V(Arm64Imod32) \ |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 98 // MRR = [register + register] | 104 // MRR = [register + register] |
| 99 #define TARGET_ADDRESSING_MODE_LIST(V) \ | 105 #define TARGET_ADDRESSING_MODE_LIST(V) \ |
| 100 V(MRI) /* [%r0 + K] */ \ | 106 V(MRI) /* [%r0 + K] */ \ |
| 101 V(MRR) /* [%r0 + %r1] */ | 107 V(MRR) /* [%r0 + %r1] */ |
| 102 | 108 |
| 103 } // namespace internal | 109 } // namespace internal |
| 104 } // namespace compiler | 110 } // namespace compiler |
| 105 } // namespace v8 | 111 } // namespace v8 |
| 106 | 112 |
| 107 #endif // V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ | 113 #endif // V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ |
| OLD | NEW |