OLD | NEW |
---|---|
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ | 5 #ifndef V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ |
6 #define V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ | 6 #define V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 namespace compiler { | 10 namespace compiler { |
11 | 11 |
12 // ARM64-specific opcodes that specify which assembly sequence to emit. | 12 // ARM64-specific opcodes that specify which assembly sequence to emit. |
13 // Most opcodes specify a single instruction. | 13 // Most opcodes specify a single instruction. |
14 #define TARGET_ARCH_OPCODE_LIST(V) \ | 14 #define TARGET_ARCH_OPCODE_LIST(V) \ |
15 V(Arm64Add) \ | 15 V(Arm64Add, kNoOpcodeFlags) \ |
16 V(Arm64Add32) \ | 16 V(Arm64Add32, kNoOpcodeFlags) \ |
17 V(Arm64And) \ | 17 V(Arm64And, kNoOpcodeFlags) \ |
18 V(Arm64And32) \ | 18 V(Arm64And32, kNoOpcodeFlags) \ |
19 V(Arm64Bic) \ | 19 V(Arm64Bic, kNoOpcodeFlags) \ |
20 V(Arm64Bic32) \ | 20 V(Arm64Bic32, kNoOpcodeFlags) \ |
21 V(Arm64Clz32) \ | 21 V(Arm64Clz32, kNoOpcodeFlags) \ |
22 V(Arm64Cmp) \ | 22 V(Arm64Cmp, kNoOpcodeFlags) \ |
23 V(Arm64Cmp32) \ | 23 V(Arm64Cmp32, kNoOpcodeFlags) \ |
24 V(Arm64Cmn) \ | 24 V(Arm64Cmn, kNoOpcodeFlags) \ |
25 V(Arm64Cmn32) \ | 25 V(Arm64Cmn32, kNoOpcodeFlags) \ |
26 V(Arm64Tst) \ | 26 V(Arm64Tst, kNoOpcodeFlags) \ |
27 V(Arm64Tst32) \ | 27 V(Arm64Tst32, kNoOpcodeFlags) \ |
28 V(Arm64Or) \ | 28 V(Arm64Or, kNoOpcodeFlags) \ |
29 V(Arm64Or32) \ | 29 V(Arm64Or32, kNoOpcodeFlags) \ |
30 V(Arm64Orn) \ | 30 V(Arm64Orn, kNoOpcodeFlags) \ |
31 V(Arm64Orn32) \ | 31 V(Arm64Orn32, kNoOpcodeFlags) \ |
32 V(Arm64Eor) \ | 32 V(Arm64Eor, kNoOpcodeFlags) \ |
33 V(Arm64Eor32) \ | 33 V(Arm64Eor32, kNoOpcodeFlags) \ |
34 V(Arm64Eon) \ | 34 V(Arm64Eon, kNoOpcodeFlags) \ |
35 V(Arm64Eon32) \ | 35 V(Arm64Eon32, kNoOpcodeFlags) \ |
36 V(Arm64Sub) \ | 36 V(Arm64Sub, kNoOpcodeFlags) \ |
37 V(Arm64Sub32) \ | 37 V(Arm64Sub32, kNoOpcodeFlags) \ |
38 V(Arm64Mul) \ | 38 V(Arm64Mul, kNoOpcodeFlags) \ |
39 V(Arm64Mul32) \ | 39 V(Arm64Mul32, kNoOpcodeFlags) \ |
40 V(Arm64Smull) \ | 40 V(Arm64Smull, kNoOpcodeFlags) \ |
41 V(Arm64Umull) \ | 41 V(Arm64Umull, kNoOpcodeFlags) \ |
42 V(Arm64Madd) \ | 42 V(Arm64Madd, kNoOpcodeFlags) \ |
43 V(Arm64Madd32) \ | 43 V(Arm64Madd32, kNoOpcodeFlags) \ |
44 V(Arm64Msub) \ | 44 V(Arm64Msub, kNoOpcodeFlags) \ |
45 V(Arm64Msub32) \ | 45 V(Arm64Msub32, kNoOpcodeFlags) \ |
46 V(Arm64Mneg) \ | 46 V(Arm64Mneg, kNoOpcodeFlags) \ |
47 V(Arm64Mneg32) \ | 47 V(Arm64Mneg32, kNoOpcodeFlags) \ |
48 V(Arm64Idiv) \ | 48 V(Arm64Idiv, kNoOpcodeFlags) \ |
49 V(Arm64Idiv32) \ | 49 V(Arm64Idiv32, kNoOpcodeFlags) \ |
50 V(Arm64Udiv) \ | 50 V(Arm64Udiv, kNoOpcodeFlags) \ |
51 V(Arm64Udiv32) \ | 51 V(Arm64Udiv32, kNoOpcodeFlags) \ |
52 V(Arm64Imod) \ | 52 V(Arm64Imod, kNoOpcodeFlags) \ |
53 V(Arm64Imod32) \ | 53 V(Arm64Imod32, kNoOpcodeFlags) \ |
54 V(Arm64Umod) \ | 54 V(Arm64Umod, kNoOpcodeFlags) \ |
55 V(Arm64Umod32) \ | 55 V(Arm64Umod32, kNoOpcodeFlags) \ |
56 V(Arm64Not) \ | 56 V(Arm64Not, kNoOpcodeFlags) \ |
57 V(Arm64Not32) \ | 57 V(Arm64Not32, kNoOpcodeFlags) \ |
58 V(Arm64Neg) \ | 58 V(Arm64Neg, kNoOpcodeFlags) \ |
59 V(Arm64Neg32) \ | 59 V(Arm64Neg32, kNoOpcodeFlags) \ |
60 V(Arm64Lsl) \ | 60 V(Arm64Lsl, kNoOpcodeFlags) \ |
61 V(Arm64Lsl32) \ | 61 V(Arm64Lsl32, kNoOpcodeFlags) \ |
62 V(Arm64Lsr) \ | 62 V(Arm64Lsr, kNoOpcodeFlags) \ |
63 V(Arm64Lsr32) \ | 63 V(Arm64Lsr32, kNoOpcodeFlags) \ |
64 V(Arm64Asr) \ | 64 V(Arm64Asr, kNoOpcodeFlags) \ |
65 V(Arm64Asr32) \ | 65 V(Arm64Asr32, kNoOpcodeFlags) \ |
66 V(Arm64Ror) \ | 66 V(Arm64Ror, kNoOpcodeFlags) \ |
67 V(Arm64Ror32) \ | 67 V(Arm64Ror32, kNoOpcodeFlags) \ |
68 V(Arm64Mov32) \ | 68 V(Arm64Mov32, kNoOpcodeFlags) \ |
69 V(Arm64Sxtb32) \ | 69 V(Arm64Sxtb32, kNoOpcodeFlags) \ |
70 V(Arm64Sxth32) \ | 70 V(Arm64Sxth32, kNoOpcodeFlags) \ |
71 V(Arm64Sxtw) \ | 71 V(Arm64Sxtw, kNoOpcodeFlags) \ |
72 V(Arm64Sbfx32) \ | 72 V(Arm64Sbfx32, kNoOpcodeFlags) \ |
73 V(Arm64Ubfx) \ | 73 V(Arm64Ubfx, kNoOpcodeFlags) \ |
74 V(Arm64Ubfx32) \ | 74 V(Arm64Ubfx32, kNoOpcodeFlags) \ |
75 V(Arm64Ubfiz32) \ | 75 V(Arm64Ubfiz32, kNoOpcodeFlags) \ |
76 V(Arm64Bfi) \ | 76 V(Arm64Bfi, kNoOpcodeFlags) \ |
77 V(Arm64TestAndBranch32) \ | 77 V(Arm64TestAndBranch32, kIsBlockTerminator) \ |
78 V(Arm64TestAndBranch) \ | 78 V(Arm64TestAndBranch, kIsBlockTerminator) \ |
79 V(Arm64CompareAndBranch32) \ | 79 V(Arm64CompareAndBranch32, kIsBlockTerminator) \ |
80 V(Arm64Claim) \ | 80 V(Arm64Claim, kHasSideEffect) \ |
81 V(Arm64Poke) \ | 81 V(Arm64Poke, kHasSideEffect) \ |
82 V(Arm64PokePair) \ | 82 V(Arm64PokePair, kHasSideEffect) \ |
83 V(Arm64Float32Cmp) \ | 83 V(Arm64Float32Cmp, kNoOpcodeFlags) \ |
84 V(Arm64Float32Add) \ | 84 V(Arm64Float32Add, kNoOpcodeFlags) \ |
85 V(Arm64Float32Sub) \ | 85 V(Arm64Float32Sub, kNoOpcodeFlags) \ |
86 V(Arm64Float32Mul) \ | 86 V(Arm64Float32Mul, kNoOpcodeFlags) \ |
87 V(Arm64Float32Div) \ | 87 V(Arm64Float32Div, kNoOpcodeFlags) \ |
88 V(Arm64Float32Max) \ | 88 V(Arm64Float32Max, kNoOpcodeFlags) \ |
89 V(Arm64Float32Min) \ | 89 V(Arm64Float32Min, kNoOpcodeFlags) \ |
90 V(Arm64Float32Abs) \ | 90 V(Arm64Float32Abs, kNoOpcodeFlags) \ |
91 V(Arm64Float32Sqrt) \ | 91 V(Arm64Float32Sqrt, kNoOpcodeFlags) \ |
92 V(Arm64Float64Cmp) \ | 92 V(Arm64Float64Cmp, kNoOpcodeFlags) \ |
93 V(Arm64Float64Add) \ | 93 V(Arm64Float64Add, kNoOpcodeFlags) \ |
94 V(Arm64Float64Sub) \ | 94 V(Arm64Float64Sub, kNoOpcodeFlags) \ |
95 V(Arm64Float64Mul) \ | 95 V(Arm64Float64Mul, kNoOpcodeFlags) \ |
96 V(Arm64Float64Div) \ | 96 V(Arm64Float64Div, kNoOpcodeFlags) \ |
97 V(Arm64Float64Mod) \ | 97 V(Arm64Float64Mod, kNoOpcodeFlags) \ |
98 V(Arm64Float64Max) \ | 98 V(Arm64Float64Max, kNoOpcodeFlags) \ |
99 V(Arm64Float64Min) \ | 99 V(Arm64Float64Min, kNoOpcodeFlags) \ |
100 V(Arm64Float64Abs) \ | 100 V(Arm64Float64Abs, kNoOpcodeFlags) \ |
101 V(Arm64Float64Neg) \ | 101 V(Arm64Float64Neg, kNoOpcodeFlags) \ |
102 V(Arm64Float64Sqrt) \ | 102 V(Arm64Float64Sqrt, kNoOpcodeFlags) \ |
103 V(Arm64Float64RoundDown) \ | 103 V(Arm64Float64RoundDown, kNoOpcodeFlags) \ |
104 V(Arm64Float64RoundTiesAway) \ | 104 V(Arm64Float64RoundTiesAway, kNoOpcodeFlags) \ |
105 V(Arm64Float64RoundTruncate) \ | 105 V(Arm64Float64RoundTruncate, kNoOpcodeFlags) \ |
106 V(Arm64Float64RoundUp) \ | 106 V(Arm64Float64RoundUp, kNoOpcodeFlags) \ |
107 V(Arm64Float32ToFloat64) \ | 107 V(Arm64Float32ToFloat64, kNoOpcodeFlags) \ |
108 V(Arm64Float64ToFloat32) \ | 108 V(Arm64Float64ToFloat32, kNoOpcodeFlags) \ |
109 V(Arm64Float64ToInt32) \ | 109 V(Arm64Float64ToInt32, kNoOpcodeFlags) \ |
110 V(Arm64Float64ToUint32) \ | 110 V(Arm64Float64ToUint32, kNoOpcodeFlags) \ |
111 V(Arm64Int32ToFloat64) \ | 111 V(Arm64Int32ToFloat64, kNoOpcodeFlags) \ |
112 V(Arm64Uint32ToFloat64) \ | 112 V(Arm64Uint32ToFloat64, kNoOpcodeFlags) \ |
113 V(Arm64Float64ExtractLowWord32) \ | 113 V(Arm64Float64ExtractLowWord32, kNoOpcodeFlags) \ |
114 V(Arm64Float64ExtractHighWord32) \ | 114 V(Arm64Float64ExtractHighWord32, kNoOpcodeFlags) \ |
115 V(Arm64Float64InsertLowWord32) \ | 115 V(Arm64Float64InsertLowWord32, kNoOpcodeFlags) \ |
116 V(Arm64Float64InsertHighWord32) \ | 116 V(Arm64Float64InsertHighWord32, kNoOpcodeFlags) \ |
117 V(Arm64Float64MoveU64) \ | 117 V(Arm64Float64MoveU64, kNoOpcodeFlags) \ |
118 V(Arm64U64MoveFloat64) \ | 118 V(Arm64U64MoveFloat64, kNoOpcodeFlags) \ |
119 V(Arm64LdrS) \ | 119 V(Arm64LdrS, kIsLoadOperation) \ |
120 V(Arm64StrS) \ | 120 V(Arm64StrS, kHasSideEffect) \ |
121 V(Arm64LdrD) \ | 121 V(Arm64LdrD, kIsLoadOperation) \ |
122 V(Arm64StrD) \ | 122 V(Arm64StrD, kHasSideEffect) \ |
123 V(Arm64Ldrb) \ | 123 V(Arm64Ldrb, kIsLoadOperation) \ |
124 V(Arm64Ldrsb) \ | 124 V(Arm64Ldrsb, kIsLoadOperation) \ |
125 V(Arm64Strb) \ | 125 V(Arm64Strb, kHasSideEffect) \ |
126 V(Arm64Ldrh) \ | 126 V(Arm64Ldrh, kIsLoadOperation) \ |
127 V(Arm64Ldrsh) \ | 127 V(Arm64Ldrsh, kIsLoadOperation) \ |
128 V(Arm64Strh) \ | 128 V(Arm64Strh, kHasSideEffect) \ |
129 V(Arm64LdrW) \ | 129 V(Arm64LdrW, kIsLoadOperation) \ |
130 V(Arm64StrW) \ | 130 V(Arm64StrW, kHasSideEffect) \ |
131 V(Arm64Ldr) \ | 131 V(Arm64Ldr, kIsLoadOperation) \ |
132 V(Arm64Str) \ | 132 V(Arm64Str, kHasSideEffect) \ |
133 V(Arm64StoreWriteBarrier) | 133 V(Arm64StoreWriteBarrier, kHasSideEffect) |
Jarin
2015/10/26 15:12:35
As discussed offline, it would be better the flags
baptiste.afsa1
2015/10/27 16:00:23
Done.
| |
134 | 134 |
135 | 135 |
136 // Addressing modes represent the "shape" of inputs to an instruction. | 136 // Addressing modes represent the "shape" of inputs to an instruction. |
137 // Many instructions support multiple addressing modes. Addressing modes | 137 // Many instructions support multiple addressing modes. Addressing modes |
138 // are encoded into the InstructionCode of the instruction and tell the | 138 // are encoded into the InstructionCode of the instruction and tell the |
139 // code generator after register allocation which assembler method to call. | 139 // code generator after register allocation which assembler method to call. |
140 // | 140 // |
141 // We use the following local notation for addressing modes: | 141 // We use the following local notation for addressing modes: |
142 // | 142 // |
143 // R = register | 143 // R = register |
(...skipping 12 matching lines...) Expand all Loading... | |
156 V(Operand2_R_UXTB) /* %r0 UXTB (unsigned extend byte) */ \ | 156 V(Operand2_R_UXTB) /* %r0 UXTB (unsigned extend byte) */ \ |
157 V(Operand2_R_UXTH) /* %r0 UXTH (unsigned extend halfword) */ \ | 157 V(Operand2_R_UXTH) /* %r0 UXTH (unsigned extend halfword) */ \ |
158 V(Operand2_R_SXTB) /* %r0 SXTB (signed extend byte) */ \ | 158 V(Operand2_R_SXTB) /* %r0 SXTB (signed extend byte) */ \ |
159 V(Operand2_R_SXTH) /* %r0 SXTH (signed extend halfword) */ | 159 V(Operand2_R_SXTH) /* %r0 SXTH (signed extend halfword) */ |
160 | 160 |
161 } // namespace internal | 161 } // namespace internal |
162 } // namespace compiler | 162 } // namespace compiler |
163 } // namespace v8 | 163 } // namespace v8 |
164 | 164 |
165 #endif // V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ | 165 #endif // V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ |
OLD | NEW |