OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_COMPILER_S390_INSTRUCTION_CODES_S390_H_ | 5 #ifndef V8_COMPILER_S390_INSTRUCTION_CODES_S390_H_ |
6 #define V8_COMPILER_S390_INSTRUCTION_CODES_S390_H_ | 6 #define V8_COMPILER_S390_INSTRUCTION_CODES_S390_H_ |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 namespace compiler { | 10 namespace compiler { |
11 | 11 |
12 // S390-specific opcodes that specify which assembly sequence to emit. | 12 // S390-specific opcodes that specify which assembly sequence to emit. |
13 // Most opcodes specify a single instruction. | 13 // Most opcodes specify a single instruction. |
14 #define TARGET_ARCH_OPCODE_LIST(V) \ | 14 #define TARGET_ARCH_OPCODE_LIST(V) \ |
15 V(S390_And) \ | 15 V(S390_And32) \ |
16 V(S390_Or) \ | 16 V(S390_And64) \ |
17 V(S390_Xor) \ | 17 V(S390_Or32) \ |
| 18 V(S390_Or64) \ |
| 19 V(S390_Xor32) \ |
| 20 V(S390_Xor64) \ |
18 V(S390_ShiftLeft32) \ | 21 V(S390_ShiftLeft32) \ |
19 V(S390_ShiftLeft64) \ | 22 V(S390_ShiftLeft64) \ |
20 V(S390_ShiftLeftPair) \ | 23 V(S390_ShiftLeftPair) \ |
21 V(S390_ShiftRight32) \ | 24 V(S390_ShiftRight32) \ |
22 V(S390_ShiftRight64) \ | 25 V(S390_ShiftRight64) \ |
23 V(S390_ShiftRightPair) \ | 26 V(S390_ShiftRightPair) \ |
24 V(S390_ShiftRightArith32) \ | 27 V(S390_ShiftRightArith32) \ |
25 V(S390_ShiftRightArith64) \ | 28 V(S390_ShiftRightArith64) \ |
26 V(S390_ShiftRightArithPair) \ | 29 V(S390_ShiftRightArithPair) \ |
27 V(S390_RotRight32) \ | 30 V(S390_RotRight32) \ |
28 V(S390_RotRight64) \ | 31 V(S390_RotRight64) \ |
29 V(S390_Not) \ | 32 V(S390_Not32) \ |
| 33 V(S390_Not64) \ |
30 V(S390_RotLeftAndMask32) \ | 34 V(S390_RotLeftAndMask32) \ |
31 V(S390_RotLeftAndClear64) \ | 35 V(S390_RotLeftAndClear64) \ |
32 V(S390_RotLeftAndClearLeft64) \ | 36 V(S390_RotLeftAndClearLeft64) \ |
33 V(S390_RotLeftAndClearRight64) \ | 37 V(S390_RotLeftAndClearRight64) \ |
34 V(S390_Add) \ | 38 V(S390_Add) \ |
35 V(S390_AddWithOverflow32) \ | 39 V(S390_AddWithOverflow32) \ |
36 V(S390_AddPair) \ | 40 V(S390_AddPair) \ |
37 V(S390_AddFloat) \ | 41 V(S390_AddFloat) \ |
38 V(S390_AddDouble) \ | 42 V(S390_AddDouble) \ |
39 V(S390_Sub) \ | 43 V(S390_Sub) \ |
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
161 // MRR = [register + register] | 165 // MRR = [register + register] |
162 #define TARGET_ADDRESSING_MODE_LIST(V) \ | 166 #define TARGET_ADDRESSING_MODE_LIST(V) \ |
163 V(MRI) /* [%r0 + K] */ \ | 167 V(MRI) /* [%r0 + K] */ \ |
164 V(MRR) /* [%r0 + %r1] */ | 168 V(MRR) /* [%r0 + %r1] */ |
165 | 169 |
166 } // namespace compiler | 170 } // namespace compiler |
167 } // namespace internal | 171 } // namespace internal |
168 } // namespace v8 | 172 } // namespace v8 |
169 | 173 |
170 #endif // V8_COMPILER_S390_INSTRUCTION_CODES_S390_H_ | 174 #endif // V8_COMPILER_S390_INSTRUCTION_CODES_S390_H_ |
OLD | NEW |