Index: src/compiler/arm64/instruction-codes-arm64.h |
diff --git a/src/compiler/arm64/instruction-codes-arm64.h b/src/compiler/arm64/instruction-codes-arm64.h |
index c2a52af7cbbb5281331c310cd5a77a877739af4e..de5cd816836afe28050ef60565908c46c391d034 100644 |
--- a/src/compiler/arm64/instruction-codes-arm64.h |
+++ b/src/compiler/arm64/instruction-codes-arm64.h |
@@ -11,126 +11,126 @@ namespace compiler { |
// ARM64-specific opcodes that specify which assembly sequence to emit. |
// Most opcodes specify a single instruction. |
-#define TARGET_ARCH_OPCODE_LIST(V) \ |
- V(Arm64Add) \ |
- V(Arm64Add32) \ |
- V(Arm64And) \ |
- V(Arm64And32) \ |
- V(Arm64Bic) \ |
- V(Arm64Bic32) \ |
- V(Arm64Clz32) \ |
- V(Arm64Cmp) \ |
- V(Arm64Cmp32) \ |
- V(Arm64Cmn) \ |
- V(Arm64Cmn32) \ |
- V(Arm64Tst) \ |
- V(Arm64Tst32) \ |
- V(Arm64Or) \ |
- V(Arm64Or32) \ |
- V(Arm64Orn) \ |
- V(Arm64Orn32) \ |
- V(Arm64Eor) \ |
- V(Arm64Eor32) \ |
- V(Arm64Eon) \ |
- V(Arm64Eon32) \ |
- V(Arm64Sub) \ |
- V(Arm64Sub32) \ |
- V(Arm64Mul) \ |
- V(Arm64Mul32) \ |
- V(Arm64Smull) \ |
- V(Arm64Umull) \ |
- V(Arm64Madd) \ |
- V(Arm64Madd32) \ |
- V(Arm64Msub) \ |
- V(Arm64Msub32) \ |
- V(Arm64Mneg) \ |
- V(Arm64Mneg32) \ |
- V(Arm64Idiv) \ |
- V(Arm64Idiv32) \ |
- V(Arm64Udiv) \ |
- V(Arm64Udiv32) \ |
- V(Arm64Imod) \ |
- V(Arm64Imod32) \ |
- V(Arm64Umod) \ |
- V(Arm64Umod32) \ |
- V(Arm64Not) \ |
- V(Arm64Not32) \ |
- V(Arm64Neg) \ |
- V(Arm64Neg32) \ |
- V(Arm64Lsl) \ |
- V(Arm64Lsl32) \ |
- V(Arm64Lsr) \ |
- V(Arm64Lsr32) \ |
- V(Arm64Asr) \ |
- V(Arm64Asr32) \ |
- V(Arm64Ror) \ |
- V(Arm64Ror32) \ |
- V(Arm64Mov32) \ |
- V(Arm64Sxtb32) \ |
- V(Arm64Sxth32) \ |
- V(Arm64Sxtw) \ |
- V(Arm64Sbfx32) \ |
- V(Arm64Ubfx) \ |
- V(Arm64Ubfx32) \ |
- V(Arm64Ubfiz32) \ |
- V(Arm64Bfi) \ |
- V(Arm64TestAndBranch32) \ |
- V(Arm64TestAndBranch) \ |
- V(Arm64CompareAndBranch32) \ |
- V(Arm64Claim) \ |
- V(Arm64Poke) \ |
- V(Arm64PokePair) \ |
- V(Arm64Float32Cmp) \ |
- V(Arm64Float32Add) \ |
- V(Arm64Float32Sub) \ |
- V(Arm64Float32Mul) \ |
- V(Arm64Float32Div) \ |
- V(Arm64Float32Max) \ |
- V(Arm64Float32Min) \ |
- V(Arm64Float32Abs) \ |
- V(Arm64Float32Sqrt) \ |
- V(Arm64Float64Cmp) \ |
- V(Arm64Float64Add) \ |
- V(Arm64Float64Sub) \ |
- V(Arm64Float64Mul) \ |
- V(Arm64Float64Div) \ |
- V(Arm64Float64Mod) \ |
- V(Arm64Float64Max) \ |
- V(Arm64Float64Min) \ |
- V(Arm64Float64Abs) \ |
- V(Arm64Float64Neg) \ |
- V(Arm64Float64Sqrt) \ |
- V(Arm64Float64RoundDown) \ |
- V(Arm64Float64RoundTiesAway) \ |
- V(Arm64Float64RoundTruncate) \ |
- V(Arm64Float64RoundUp) \ |
- V(Arm64Float32ToFloat64) \ |
- V(Arm64Float64ToFloat32) \ |
- V(Arm64Float64ToInt32) \ |
- V(Arm64Float64ToUint32) \ |
- V(Arm64Int32ToFloat64) \ |
- V(Arm64Uint32ToFloat64) \ |
- V(Arm64Float64ExtractLowWord32) \ |
- V(Arm64Float64ExtractHighWord32) \ |
- V(Arm64Float64InsertLowWord32) \ |
- V(Arm64Float64InsertHighWord32) \ |
- V(Arm64Float64MoveU64) \ |
- V(Arm64U64MoveFloat64) \ |
- V(Arm64LdrS) \ |
- V(Arm64StrS) \ |
- V(Arm64LdrD) \ |
- V(Arm64StrD) \ |
- V(Arm64Ldrb) \ |
- V(Arm64Ldrsb) \ |
- V(Arm64Strb) \ |
- V(Arm64Ldrh) \ |
- V(Arm64Ldrsh) \ |
- V(Arm64Strh) \ |
- V(Arm64LdrW) \ |
- V(Arm64StrW) \ |
- V(Arm64Ldr) \ |
- V(Arm64Str) \ |
- V(Arm64StoreWriteBarrier) |
+#define TARGET_ARCH_OPCODE_LIST(V) \ |
+ V(Arm64Add, kNoOpcodeFlags) \ |
+ V(Arm64Add32, kNoOpcodeFlags) \ |
+ V(Arm64And, kNoOpcodeFlags) \ |
+ V(Arm64And32, kNoOpcodeFlags) \ |
+ V(Arm64Bic, kNoOpcodeFlags) \ |
+ V(Arm64Bic32, kNoOpcodeFlags) \ |
+ V(Arm64Clz32, kNoOpcodeFlags) \ |
+ V(Arm64Cmp, kNoOpcodeFlags) \ |
+ V(Arm64Cmp32, kNoOpcodeFlags) \ |
+ V(Arm64Cmn, kNoOpcodeFlags) \ |
+ V(Arm64Cmn32, kNoOpcodeFlags) \ |
+ V(Arm64Tst, kNoOpcodeFlags) \ |
+ V(Arm64Tst32, kNoOpcodeFlags) \ |
+ V(Arm64Or, kNoOpcodeFlags) \ |
+ V(Arm64Or32, kNoOpcodeFlags) \ |
+ V(Arm64Orn, kNoOpcodeFlags) \ |
+ V(Arm64Orn32, kNoOpcodeFlags) \ |
+ V(Arm64Eor, kNoOpcodeFlags) \ |
+ V(Arm64Eor32, kNoOpcodeFlags) \ |
+ V(Arm64Eon, kNoOpcodeFlags) \ |
+ V(Arm64Eon32, kNoOpcodeFlags) \ |
+ V(Arm64Sub, kNoOpcodeFlags) \ |
+ V(Arm64Sub32, kNoOpcodeFlags) \ |
+ V(Arm64Mul, kNoOpcodeFlags) \ |
+ V(Arm64Mul32, kNoOpcodeFlags) \ |
+ V(Arm64Smull, kNoOpcodeFlags) \ |
+ V(Arm64Umull, kNoOpcodeFlags) \ |
+ V(Arm64Madd, kNoOpcodeFlags) \ |
+ V(Arm64Madd32, kNoOpcodeFlags) \ |
+ V(Arm64Msub, kNoOpcodeFlags) \ |
+ V(Arm64Msub32, kNoOpcodeFlags) \ |
+ V(Arm64Mneg, kNoOpcodeFlags) \ |
+ V(Arm64Mneg32, kNoOpcodeFlags) \ |
+ V(Arm64Idiv, kNoOpcodeFlags) \ |
+ V(Arm64Idiv32, kNoOpcodeFlags) \ |
+ V(Arm64Udiv, kNoOpcodeFlags) \ |
+ V(Arm64Udiv32, kNoOpcodeFlags) \ |
+ V(Arm64Imod, kNoOpcodeFlags) \ |
+ V(Arm64Imod32, kNoOpcodeFlags) \ |
+ V(Arm64Umod, kNoOpcodeFlags) \ |
+ V(Arm64Umod32, kNoOpcodeFlags) \ |
+ V(Arm64Not, kNoOpcodeFlags) \ |
+ V(Arm64Not32, kNoOpcodeFlags) \ |
+ V(Arm64Neg, kNoOpcodeFlags) \ |
+ V(Arm64Neg32, kNoOpcodeFlags) \ |
+ V(Arm64Lsl, kNoOpcodeFlags) \ |
+ V(Arm64Lsl32, kNoOpcodeFlags) \ |
+ V(Arm64Lsr, kNoOpcodeFlags) \ |
+ V(Arm64Lsr32, kNoOpcodeFlags) \ |
+ V(Arm64Asr, kNoOpcodeFlags) \ |
+ V(Arm64Asr32, kNoOpcodeFlags) \ |
+ V(Arm64Ror, kNoOpcodeFlags) \ |
+ V(Arm64Ror32, kNoOpcodeFlags) \ |
+ V(Arm64Mov32, kNoOpcodeFlags) \ |
+ V(Arm64Sxtb32, kNoOpcodeFlags) \ |
+ V(Arm64Sxth32, kNoOpcodeFlags) \ |
+ V(Arm64Sxtw, kNoOpcodeFlags) \ |
+ V(Arm64Sbfx32, kNoOpcodeFlags) \ |
+ V(Arm64Ubfx, kNoOpcodeFlags) \ |
+ V(Arm64Ubfx32, kNoOpcodeFlags) \ |
+ V(Arm64Ubfiz32, kNoOpcodeFlags) \ |
+ V(Arm64Bfi, kNoOpcodeFlags) \ |
+ V(Arm64TestAndBranch32, kIsBlockTerminator) \ |
+ V(Arm64TestAndBranch, kIsBlockTerminator) \ |
+ V(Arm64CompareAndBranch32, kIsBlockTerminator) \ |
+ V(Arm64Claim, kHasSideEffect) \ |
+ V(Arm64Poke, kHasSideEffect) \ |
+ V(Arm64PokePair, kHasSideEffect) \ |
+ V(Arm64Float32Cmp, kNoOpcodeFlags) \ |
+ V(Arm64Float32Add, kNoOpcodeFlags) \ |
+ V(Arm64Float32Sub, kNoOpcodeFlags) \ |
+ V(Arm64Float32Mul, kNoOpcodeFlags) \ |
+ V(Arm64Float32Div, kNoOpcodeFlags) \ |
+ V(Arm64Float32Max, kNoOpcodeFlags) \ |
+ V(Arm64Float32Min, kNoOpcodeFlags) \ |
+ V(Arm64Float32Abs, kNoOpcodeFlags) \ |
+ V(Arm64Float32Sqrt, kNoOpcodeFlags) \ |
+ V(Arm64Float64Cmp, kNoOpcodeFlags) \ |
+ V(Arm64Float64Add, kNoOpcodeFlags) \ |
+ V(Arm64Float64Sub, kNoOpcodeFlags) \ |
+ V(Arm64Float64Mul, kNoOpcodeFlags) \ |
+ V(Arm64Float64Div, kNoOpcodeFlags) \ |
+ V(Arm64Float64Mod, kNoOpcodeFlags) \ |
+ V(Arm64Float64Max, kNoOpcodeFlags) \ |
+ V(Arm64Float64Min, kNoOpcodeFlags) \ |
+ V(Arm64Float64Abs, kNoOpcodeFlags) \ |
+ V(Arm64Float64Neg, kNoOpcodeFlags) \ |
+ V(Arm64Float64Sqrt, kNoOpcodeFlags) \ |
+ V(Arm64Float64RoundDown, kNoOpcodeFlags) \ |
+ V(Arm64Float64RoundTiesAway, kNoOpcodeFlags) \ |
+ V(Arm64Float64RoundTruncate, kNoOpcodeFlags) \ |
+ V(Arm64Float64RoundUp, kNoOpcodeFlags) \ |
+ V(Arm64Float32ToFloat64, kNoOpcodeFlags) \ |
+ V(Arm64Float64ToFloat32, kNoOpcodeFlags) \ |
+ V(Arm64Float64ToInt32, kNoOpcodeFlags) \ |
+ V(Arm64Float64ToUint32, kNoOpcodeFlags) \ |
+ V(Arm64Int32ToFloat64, kNoOpcodeFlags) \ |
+ V(Arm64Uint32ToFloat64, kNoOpcodeFlags) \ |
+ V(Arm64Float64ExtractLowWord32, kNoOpcodeFlags) \ |
+ V(Arm64Float64ExtractHighWord32, kNoOpcodeFlags) \ |
+ V(Arm64Float64InsertLowWord32, kNoOpcodeFlags) \ |
+ V(Arm64Float64InsertHighWord32, kNoOpcodeFlags) \ |
+ V(Arm64Float64MoveU64, kNoOpcodeFlags) \ |
+ V(Arm64U64MoveFloat64, kNoOpcodeFlags) \ |
+ V(Arm64LdrS, kIsLoadOperation) \ |
+ V(Arm64StrS, kHasSideEffect) \ |
+ V(Arm64LdrD, kIsLoadOperation) \ |
+ V(Arm64StrD, kHasSideEffect) \ |
+ V(Arm64Ldrb, kIsLoadOperation) \ |
+ V(Arm64Ldrsb, kIsLoadOperation) \ |
+ V(Arm64Strb, kHasSideEffect) \ |
+ V(Arm64Ldrh, kIsLoadOperation) \ |
+ V(Arm64Ldrsh, kIsLoadOperation) \ |
+ V(Arm64Strh, kHasSideEffect) \ |
+ V(Arm64LdrW, kIsLoadOperation) \ |
+ V(Arm64StrW, kHasSideEffect) \ |
+ V(Arm64Ldr, kIsLoadOperation) \ |
+ V(Arm64Str, kHasSideEffect) \ |
+ V(Arm64StoreWriteBarrier, kHasSideEffect) |
Jarin
2015/10/26 15:12:35
As discussed offline, it would be better the flags
baptiste.afsa1
2015/10/27 16:00:23
Done.
|
// Addressing modes represent the "shape" of inputs to an instruction. |