| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ | 5 #ifndef V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ |
| 6 #define V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ | 6 #define V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ |
| 7 | 7 |
| 8 namespace v8 { | 8 namespace v8 { |
| 9 namespace internal { | 9 namespace internal { |
| 10 namespace compiler { | 10 namespace compiler { |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 74 V(Arm64Ubfx32) \ | 74 V(Arm64Ubfx32) \ |
| 75 V(Arm64Ubfiz32) \ | 75 V(Arm64Ubfiz32) \ |
| 76 V(Arm64Bfi) \ | 76 V(Arm64Bfi) \ |
| 77 V(Arm64TestAndBranch32) \ | 77 V(Arm64TestAndBranch32) \ |
| 78 V(Arm64TestAndBranch) \ | 78 V(Arm64TestAndBranch) \ |
| 79 V(Arm64CompareAndBranch32) \ | 79 V(Arm64CompareAndBranch32) \ |
| 80 V(Arm64Claim) \ | 80 V(Arm64Claim) \ |
| 81 V(Arm64Poke) \ | 81 V(Arm64Poke) \ |
| 82 V(Arm64PokePair) \ | 82 V(Arm64PokePair) \ |
| 83 V(Arm64Float32Cmp) \ | 83 V(Arm64Float32Cmp) \ |
| 84 V(Arm64Float32CmpAndFloat32Sel) \ |
| 84 V(Arm64Float32Add) \ | 85 V(Arm64Float32Add) \ |
| 85 V(Arm64Float32Sub) \ | 86 V(Arm64Float32Sub) \ |
| 86 V(Arm64Float32Mul) \ | 87 V(Arm64Float32Mul) \ |
| 87 V(Arm64Float32Div) \ | 88 V(Arm64Float32Div) \ |
| 88 V(Arm64Float32Max) \ | 89 V(Arm64Float32Max) \ |
| 89 V(Arm64Float32Min) \ | 90 V(Arm64Float32Min) \ |
| 90 V(Arm64Float32Abs) \ | 91 V(Arm64Float32Abs) \ |
| 91 V(Arm64Float32Sqrt) \ | 92 V(Arm64Float32Sqrt) \ |
| 92 V(Arm64Float64Cmp) \ | 93 V(Arm64Float64Cmp) \ |
| 94 V(Arm64Float64CmpAndFloat64Sel) \ |
| 93 V(Arm64Float64Add) \ | 95 V(Arm64Float64Add) \ |
| 94 V(Arm64Float64Sub) \ | 96 V(Arm64Float64Sub) \ |
| 95 V(Arm64Float64Mul) \ | 97 V(Arm64Float64Mul) \ |
| 96 V(Arm64Float64Div) \ | 98 V(Arm64Float64Div) \ |
| 97 V(Arm64Float64Mod) \ | 99 V(Arm64Float64Mod) \ |
| 98 V(Arm64Float64Max) \ | 100 V(Arm64Float64Max) \ |
| 99 V(Arm64Float64Min) \ | 101 V(Arm64Float64Min) \ |
| 100 V(Arm64Float64Abs) \ | 102 V(Arm64Float64Abs) \ |
| 101 V(Arm64Float64Neg) \ | 103 V(Arm64Float64Neg) \ |
| 102 V(Arm64Float64Sqrt) \ | 104 V(Arm64Float64Sqrt) \ |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 155 V(Operand2_R_UXTB) /* %r0 UXTB (unsigned extend byte) */ \ | 157 V(Operand2_R_UXTB) /* %r0 UXTB (unsigned extend byte) */ \ |
| 156 V(Operand2_R_UXTH) /* %r0 UXTH (unsigned extend halfword) */ \ | 158 V(Operand2_R_UXTH) /* %r0 UXTH (unsigned extend halfword) */ \ |
| 157 V(Operand2_R_SXTB) /* %r0 SXTB (signed extend byte) */ \ | 159 V(Operand2_R_SXTB) /* %r0 SXTB (signed extend byte) */ \ |
| 158 V(Operand2_R_SXTH) /* %r0 SXTH (signed extend halfword) */ | 160 V(Operand2_R_SXTH) /* %r0 SXTH (signed extend halfword) */ |
| 159 | 161 |
| 160 } // namespace internal | 162 } // namespace internal |
| 161 } // namespace compiler | 163 } // namespace compiler |
| 162 } // namespace v8 | 164 } // namespace v8 |
| 163 | 165 |
| 164 #endif // V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ | 166 #endif // V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_ |
| OLD | NEW |