OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ | 5 #ifndef V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ |
6 #define V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ | 6 #define V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 namespace compiler { | 10 namespace compiler { |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
66 V(Arm64Ror32) \ | 66 V(Arm64Ror32) \ |
67 V(Arm64Mov32) \ | 67 V(Arm64Mov32) \ |
68 V(Arm64Sxtb32) \ | 68 V(Arm64Sxtb32) \ |
69 V(Arm64Sxth32) \ | 69 V(Arm64Sxth32) \ |
70 V(Arm64Sxtw) \ | 70 V(Arm64Sxtw) \ |
71 V(Arm64Sbfx32) \ | 71 V(Arm64Sbfx32) \ |
72 V(Arm64Ubfx) \ | 72 V(Arm64Ubfx) \ |
73 V(Arm64Ubfx32) \ | 73 V(Arm64Ubfx32) \ |
74 V(Arm64Ubfiz32) \ | 74 V(Arm64Ubfiz32) \ |
75 V(Arm64Bfi) \ | 75 V(Arm64Bfi) \ |
| 76 V(Arm64Rbit) \ |
| 77 V(Arm64Rbit32) \ |
76 V(Arm64TestAndBranch32) \ | 78 V(Arm64TestAndBranch32) \ |
77 V(Arm64TestAndBranch) \ | 79 V(Arm64TestAndBranch) \ |
78 V(Arm64CompareAndBranch32) \ | 80 V(Arm64CompareAndBranch32) \ |
79 V(Arm64ClaimCSP) \ | 81 V(Arm64ClaimCSP) \ |
80 V(Arm64ClaimJSSP) \ | 82 V(Arm64ClaimJSSP) \ |
81 V(Arm64PokeCSP) \ | 83 V(Arm64PokeCSP) \ |
82 V(Arm64PokeJSSP) \ | 84 V(Arm64PokeJSSP) \ |
83 V(Arm64PokePair) \ | 85 V(Arm64PokePair) \ |
84 V(Arm64Float32Cmp) \ | 86 V(Arm64Float32Cmp) \ |
85 V(Arm64Float32Add) \ | 87 V(Arm64Float32Add) \ |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
142 V(Arm64Ldrsb) \ | 144 V(Arm64Ldrsb) \ |
143 V(Arm64Strb) \ | 145 V(Arm64Strb) \ |
144 V(Arm64Ldrh) \ | 146 V(Arm64Ldrh) \ |
145 V(Arm64Ldrsh) \ | 147 V(Arm64Ldrsh) \ |
146 V(Arm64Strh) \ | 148 V(Arm64Strh) \ |
147 V(Arm64LdrW) \ | 149 V(Arm64LdrW) \ |
148 V(Arm64StrW) \ | 150 V(Arm64StrW) \ |
149 V(Arm64Ldr) \ | 151 V(Arm64Ldr) \ |
150 V(Arm64Str) | 152 V(Arm64Str) |
151 | 153 |
152 | |
153 // Addressing modes represent the "shape" of inputs to an instruction. | 154 // Addressing modes represent the "shape" of inputs to an instruction. |
154 // Many instructions support multiple addressing modes. Addressing modes | 155 // Many instructions support multiple addressing modes. Addressing modes |
155 // are encoded into the InstructionCode of the instruction and tell the | 156 // are encoded into the InstructionCode of the instruction and tell the |
156 // code generator after register allocation which assembler method to call. | 157 // code generator after register allocation which assembler method to call. |
157 // | 158 // |
158 // We use the following local notation for addressing modes: | 159 // We use the following local notation for addressing modes: |
159 // | 160 // |
160 // R = register | 161 // R = register |
161 // O = register or stack slot | 162 // O = register or stack slot |
162 // D = double register | 163 // D = double register |
(...skipping 12 matching lines...) Expand all Loading... |
175 V(Operand2_R_SXTB) /* %r0 SXTB (signed extend byte) */ \ | 176 V(Operand2_R_SXTB) /* %r0 SXTB (signed extend byte) */ \ |
176 V(Operand2_R_SXTH) /* %r0 SXTH (signed extend halfword) */ | 177 V(Operand2_R_SXTH) /* %r0 SXTH (signed extend halfword) */ |
177 | 178 |
178 enum ResetJSSPAfterCall { kNoResetJSSP, kResetJSSP }; | 179 enum ResetJSSPAfterCall { kNoResetJSSP, kResetJSSP }; |
179 | 180 |
180 } // namespace compiler | 181 } // namespace compiler |
181 } // namespace internal | 182 } // namespace internal |
182 } // namespace v8 | 183 } // namespace v8 |
183 | 184 |
184 #endif // V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ | 185 #endif // V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ |
OLD | NEW |