OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_COMPILER_X64_INSTRUCTION_CODES_X64_H_ | 5 #ifndef V8_COMPILER_X64_INSTRUCTION_CODES_X64_H_ |
6 #define V8_COMPILER_X64_INSTRUCTION_CODES_X64_H_ | 6 #define V8_COMPILER_X64_INSTRUCTION_CODES_X64_H_ |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 namespace compiler { | 10 namespace compiler { |
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
125 V(X64Movsxwl) \ | 125 V(X64Movsxwl) \ |
126 V(X64Movzxwl) \ | 126 V(X64Movzxwl) \ |
127 V(X64Movsxwq) \ | 127 V(X64Movsxwq) \ |
128 V(X64Movzxwq) \ | 128 V(X64Movzxwq) \ |
129 V(X64Movw) \ | 129 V(X64Movw) \ |
130 V(X64Movl) \ | 130 V(X64Movl) \ |
131 V(X64Movsxlq) \ | 131 V(X64Movsxlq) \ |
132 V(X64Movq) \ | 132 V(X64Movq) \ |
133 V(X64Movsd) \ | 133 V(X64Movsd) \ |
134 V(X64Movss) \ | 134 V(X64Movss) \ |
| 135 V(X64Movdqu) \ |
135 V(X64BitcastFI) \ | 136 V(X64BitcastFI) \ |
136 V(X64BitcastDL) \ | 137 V(X64BitcastDL) \ |
137 V(X64BitcastIF) \ | 138 V(X64BitcastIF) \ |
138 V(X64BitcastLD) \ | 139 V(X64BitcastLD) \ |
139 V(X64Lea32) \ | 140 V(X64Lea32) \ |
140 V(X64Lea) \ | 141 V(X64Lea) \ |
141 V(X64Dec32) \ | 142 V(X64Dec32) \ |
142 V(X64Inc32) \ | 143 V(X64Inc32) \ |
143 V(X64Push) \ | 144 V(X64Push) \ |
144 V(X64Poke) \ | 145 V(X64Poke) \ |
145 V(X64StackCheck) \ | 146 V(X64StackCheck) \ |
146 V(X64I32x4Splat) \ | 147 V(X64I32x4Splat) \ |
147 V(X64I32x4ExtractLane) \ | 148 V(X64I32x4ExtractLane) \ |
148 V(X64I32x4ReplaceLane) \ | 149 V(X64I32x4ReplaceLane) \ |
149 V(X64I32x4Shl) \ | 150 V(X64I32x4Shl) \ |
150 V(X64I32x4ShrS) \ | 151 V(X64I32x4ShrS) \ |
151 V(X64I32x4Add) \ | 152 V(X64I32x4Add) \ |
| 153 V(X64I32x4AddHoriz) \ |
152 V(X64I32x4Sub) \ | 154 V(X64I32x4Sub) \ |
153 V(X64I32x4Mul) \ | 155 V(X64I32x4Mul) \ |
154 V(X64I32x4MinS) \ | 156 V(X64I32x4MinS) \ |
155 V(X64I32x4MaxS) \ | 157 V(X64I32x4MaxS) \ |
156 V(X64I32x4Eq) \ | 158 V(X64I32x4Eq) \ |
157 V(X64I32x4Ne) \ | 159 V(X64I32x4Ne) \ |
158 V(X64I32x4ShrU) \ | 160 V(X64I32x4ShrU) \ |
159 V(X64I32x4MinU) \ | 161 V(X64I32x4MinU) \ |
160 V(X64I32x4MaxU) \ | 162 V(X64I32x4MaxU) \ |
161 V(X64I16x8Splat) \ | 163 V(X64I16x8Splat) \ |
162 V(X64I16x8ExtractLane) \ | 164 V(X64I16x8ExtractLane) \ |
163 V(X64I16x8ReplaceLane) \ | 165 V(X64I16x8ReplaceLane) \ |
164 V(X64I16x8Shl) \ | 166 V(X64I16x8Shl) \ |
165 V(X64I16x8ShrS) \ | 167 V(X64I16x8ShrS) \ |
166 V(X64I16x8Add) \ | 168 V(X64I16x8Add) \ |
167 V(X64I16x8AddSaturateS) \ | 169 V(X64I16x8AddSaturateS) \ |
| 170 V(X64I16x8AddHoriz) \ |
168 V(X64I16x8Sub) \ | 171 V(X64I16x8Sub) \ |
169 V(X64I16x8SubSaturateS) \ | 172 V(X64I16x8SubSaturateS) \ |
170 V(X64I16x8Mul) \ | 173 V(X64I16x8Mul) \ |
171 V(X64I16x8MinS) \ | 174 V(X64I16x8MinS) \ |
172 V(X64I16x8MaxS) \ | 175 V(X64I16x8MaxS) \ |
173 V(X64I16x8Eq) \ | 176 V(X64I16x8Eq) \ |
174 V(X64I16x8Ne) \ | 177 V(X64I16x8Ne) \ |
175 V(X64I16x8ShrU) \ | 178 V(X64I16x8ShrU) \ |
176 V(X64I16x8AddSaturateU) \ | 179 V(X64I16x8AddSaturateU) \ |
177 V(X64I16x8SubSaturateU) \ | 180 V(X64I16x8SubSaturateU) \ |
178 V(X64I16x8MinU) \ | 181 V(X64I16x8MinU) \ |
179 V(X64I16x8MaxU) \ | 182 V(X64I16x8MaxU) \ |
180 V(X64I8x16Splat) \ | 183 V(X64I8x16Splat) \ |
181 V(X64I8x16ExtractLane) \ | 184 V(X64I8x16ExtractLane) \ |
182 V(X64I8x16ReplaceLane) \ | 185 V(X64I8x16ReplaceLane) \ |
183 V(X64I8x16Add) \ | 186 V(X64I8x16Add) \ |
184 V(X64I8x16AddSaturateS) \ | 187 V(X64I8x16AddSaturateS) \ |
185 V(X64I8x16Sub) \ | 188 V(X64I8x16Sub) \ |
186 V(X64I8x16SubSaturateS) \ | 189 V(X64I8x16SubSaturateS) \ |
187 V(X64I8x16MinS) \ | 190 V(X64I8x16MinS) \ |
188 V(X64I8x16MaxS) \ | 191 V(X64I8x16MaxS) \ |
189 V(X64I8x16Eq) \ | 192 V(X64I8x16Eq) \ |
190 V(X64I8x16Ne) \ | 193 V(X64I8x16Ne) \ |
191 V(X64I8x16AddSaturateU) \ | 194 V(X64I8x16AddSaturateU) \ |
192 V(X64I8x16SubSaturateU) \ | 195 V(X64I8x16SubSaturateU) \ |
193 V(X64I8x16MinU) \ | 196 V(X64I8x16MinU) \ |
194 V(X64I8x16MaxU) \ | 197 V(X64I8x16MaxU) \ |
| 198 V(X64S128And) \ |
| 199 V(X64S128Or) \ |
| 200 V(X64S128Xor) \ |
| 201 V(X64S128Not) \ |
195 V(X64S128Select) \ | 202 V(X64S128Select) \ |
196 V(X64S128Zero) | 203 V(X64S128Zero) |
197 | 204 |
198 // Addressing modes represent the "shape" of inputs to an instruction. | 205 // Addressing modes represent the "shape" of inputs to an instruction. |
199 // Many instructions support multiple addressing modes. Addressing modes | 206 // Many instructions support multiple addressing modes. Addressing modes |
200 // are encoded into the InstructionCode of the instruction and tell the | 207 // are encoded into the InstructionCode of the instruction and tell the |
201 // code generator after register allocation which assembler method to call. | 208 // code generator after register allocation which assembler method to call. |
202 // | 209 // |
203 // We use the following local notation for addressing modes: | 210 // We use the following local notation for addressing modes: |
204 // | 211 // |
(...skipping 23 matching lines...) Expand all Loading... |
228 V(M8I) /* [ %r2*8 + K] */ \ | 235 V(M8I) /* [ %r2*8 + K] */ \ |
229 V(Root) /* [%root + K] */ | 236 V(Root) /* [%root + K] */ |
230 | 237 |
231 enum X64MemoryProtection { kUnprotected = 0, kProtected = 1 }; | 238 enum X64MemoryProtection { kUnprotected = 0, kProtected = 1 }; |
232 | 239 |
233 } // namespace compiler | 240 } // namespace compiler |
234 } // namespace internal | 241 } // namespace internal |
235 } // namespace v8 | 242 } // namespace v8 |
236 | 243 |
237 #endif // V8_COMPILER_X64_INSTRUCTION_CODES_X64_H_ | 244 #endif // V8_COMPILER_X64_INSTRUCTION_CODES_X64_H_ |
OLD | NEW |