Index: src/compiler/arm/instruction-selector-arm.cc |
diff --git a/src/compiler/arm/instruction-selector-arm.cc b/src/compiler/arm/instruction-selector-arm.cc |
index a2d83e6db21e03d776d5f54906561134884401ab..47a19db9cd82484d59b070ac1c7a0f2682445b04 100644 |
--- a/src/compiler/arm/instruction-selector-arm.cc |
+++ b/src/compiler/arm/instruction-selector-arm.cc |
@@ -2234,90 +2234,99 @@ void InstructionSelector::VisitAtomicExchange(Node* node) { |
V(Simd1x8Zero) \ |
V(Simd1x16Zero) |
-#define SIMD_UNOP_LIST(V) \ |
- V(Float32x4FromInt32x4, kArmFloat32x4FromInt32x4) \ |
- V(Float32x4FromUint32x4, kArmFloat32x4FromUint32x4) \ |
- V(Float32x4Abs, kArmFloat32x4Abs) \ |
- V(Float32x4Neg, kArmFloat32x4Neg) \ |
- V(Int32x4FromFloat32x4, kArmInt32x4FromFloat32x4) \ |
- V(Uint32x4FromFloat32x4, kArmUint32x4FromFloat32x4) \ |
- V(Int32x4Neg, kArmInt32x4Neg) \ |
- V(Int16x8Neg, kArmInt16x8Neg) \ |
- V(Int8x16Neg, kArmInt8x16Neg) \ |
- V(Simd128Not, kArmSimd128Not) \ |
- V(Simd1x4Not, kArmSimd128Not) \ |
- V(Simd1x4AnyTrue, kArmSimd1x4AnyTrue) \ |
- V(Simd1x4AllTrue, kArmSimd1x4AllTrue) \ |
- V(Simd1x8Not, kArmSimd128Not) \ |
- V(Simd1x8AnyTrue, kArmSimd1x8AnyTrue) \ |
- V(Simd1x8AllTrue, kArmSimd1x8AllTrue) \ |
- V(Simd1x16Not, kArmSimd128Not) \ |
- V(Simd1x16AnyTrue, kArmSimd1x16AnyTrue) \ |
+#define SIMD_UNOP_LIST(V) \ |
+ V(Float32x4FromInt32x4, kArmFloat32x4FromInt32x4) \ |
+ V(Float32x4FromUint32x4, kArmFloat32x4FromUint32x4) \ |
+ V(Float32x4Abs, kArmFloat32x4Abs) \ |
+ V(Float32x4Neg, kArmFloat32x4Neg) \ |
+ V(Float32x4RecipApprox, kArmFloat32x4RecipApprox) \ |
+ V(Float32x4RecipSqrtApprox, kArmFloat32x4RecipSqrtApprox) \ |
+ V(Int32x4FromFloat32x4, kArmInt32x4FromFloat32x4) \ |
+ V(Uint32x4FromFloat32x4, kArmUint32x4FromFloat32x4) \ |
+ V(Int32x4Neg, kArmInt32x4Neg) \ |
+ V(Int16x8Neg, kArmInt16x8Neg) \ |
+ V(Int8x16Neg, kArmInt8x16Neg) \ |
+ V(Simd128Not, kArmSimd128Not) \ |
+ V(Simd1x4Not, kArmSimd128Not) \ |
+ V(Simd1x4AnyTrue, kArmSimd1x4AnyTrue) \ |
+ V(Simd1x4AllTrue, kArmSimd1x4AllTrue) \ |
+ V(Simd1x8Not, kArmSimd128Not) \ |
+ V(Simd1x8AnyTrue, kArmSimd1x8AnyTrue) \ |
+ V(Simd1x8AllTrue, kArmSimd1x8AllTrue) \ |
+ V(Simd1x16Not, kArmSimd128Not) \ |
+ V(Simd1x16AnyTrue, kArmSimd1x16AnyTrue) \ |
V(Simd1x16AllTrue, kArmSimd1x16AllTrue) |
-#define SIMD_BINOP_LIST(V) \ |
- V(Float32x4Add, kArmFloat32x4Add) \ |
- V(Float32x4Sub, kArmFloat32x4Sub) \ |
- V(Float32x4Equal, kArmFloat32x4Equal) \ |
- V(Float32x4NotEqual, kArmFloat32x4NotEqual) \ |
- V(Int32x4Add, kArmInt32x4Add) \ |
- V(Int32x4Sub, kArmInt32x4Sub) \ |
- V(Int32x4Mul, kArmInt32x4Mul) \ |
- V(Int32x4Min, kArmInt32x4Min) \ |
- V(Int32x4Max, kArmInt32x4Max) \ |
- V(Int32x4Equal, kArmInt32x4Equal) \ |
- V(Int32x4NotEqual, kArmInt32x4NotEqual) \ |
- V(Int32x4GreaterThan, kArmInt32x4GreaterThan) \ |
- V(Int32x4GreaterThanOrEqual, kArmInt32x4GreaterThanOrEqual) \ |
- V(Uint32x4Min, kArmUint32x4Min) \ |
- V(Uint32x4Max, kArmUint32x4Max) \ |
- V(Uint32x4GreaterThan, kArmUint32x4GreaterThan) \ |
- V(Uint32x4GreaterThanOrEqual, kArmUint32x4GreaterThanOrEqual) \ |
- V(Int16x8Add, kArmInt16x8Add) \ |
- V(Int16x8AddSaturate, kArmInt16x8AddSaturate) \ |
- V(Int16x8Sub, kArmInt16x8Sub) \ |
- V(Int16x8SubSaturate, kArmInt16x8SubSaturate) \ |
- V(Int16x8Mul, kArmInt16x8Mul) \ |
- V(Int16x8Min, kArmInt16x8Min) \ |
- V(Int16x8Max, kArmInt16x8Max) \ |
- V(Int16x8Equal, kArmInt16x8Equal) \ |
- V(Int16x8NotEqual, kArmInt16x8NotEqual) \ |
- V(Int16x8GreaterThan, kArmInt16x8GreaterThan) \ |
- V(Int16x8GreaterThanOrEqual, kArmInt16x8GreaterThanOrEqual) \ |
- V(Uint16x8AddSaturate, kArmUint16x8AddSaturate) \ |
- V(Uint16x8SubSaturate, kArmUint16x8SubSaturate) \ |
- V(Uint16x8Min, kArmUint16x8Min) \ |
- V(Uint16x8Max, kArmUint16x8Max) \ |
- V(Uint16x8GreaterThan, kArmUint16x8GreaterThan) \ |
- V(Uint16x8GreaterThanOrEqual, kArmUint16x8GreaterThanOrEqual) \ |
- V(Int8x16Add, kArmInt8x16Add) \ |
- V(Int8x16AddSaturate, kArmInt8x16AddSaturate) \ |
- V(Int8x16Sub, kArmInt8x16Sub) \ |
- V(Int8x16SubSaturate, kArmInt8x16SubSaturate) \ |
- V(Int8x16Mul, kArmInt8x16Mul) \ |
- V(Int8x16Min, kArmInt8x16Min) \ |
- V(Int8x16Max, kArmInt8x16Max) \ |
- V(Int8x16Equal, kArmInt8x16Equal) \ |
- V(Int8x16NotEqual, kArmInt8x16NotEqual) \ |
- V(Int8x16GreaterThan, kArmInt8x16GreaterThan) \ |
- V(Int8x16GreaterThanOrEqual, kArmInt8x16GreaterThanOrEqual) \ |
- V(Uint8x16AddSaturate, kArmUint8x16AddSaturate) \ |
- V(Uint8x16SubSaturate, kArmUint8x16SubSaturate) \ |
- V(Uint8x16Min, kArmUint8x16Min) \ |
- V(Uint8x16Max, kArmUint8x16Max) \ |
- V(Uint8x16GreaterThan, kArmUint8x16GreaterThan) \ |
- V(Uint8x16GreaterThanOrEqual, kArmUint8x16GreaterThanOrEqual) \ |
- V(Simd128And, kArmSimd128And) \ |
- V(Simd128Or, kArmSimd128Or) \ |
- V(Simd128Xor, kArmSimd128Xor) \ |
- V(Simd1x4And, kArmSimd128And) \ |
- V(Simd1x4Or, kArmSimd128Or) \ |
- V(Simd1x4Xor, kArmSimd128Xor) \ |
- V(Simd1x8And, kArmSimd128And) \ |
- V(Simd1x8Or, kArmSimd128Or) \ |
- V(Simd1x8Xor, kArmSimd128Xor) \ |
- V(Simd1x16And, kArmSimd128And) \ |
- V(Simd1x16Or, kArmSimd128Or) \ |
+#define SIMD_BINOP_LIST(V) \ |
+ V(Float32x4Add, kArmFloat32x4Add) \ |
+ V(Float32x4Sub, kArmFloat32x4Sub) \ |
+ V(Float32x4Mul, kArmFloat32x4Mul) \ |
+ V(Float32x4Min, kArmFloat32x4Min) \ |
+ V(Float32x4Max, kArmFloat32x4Max) \ |
+ V(Float32x4RecipRefine, kArmFloat32x4RecipRefine) \ |
+ V(Float32x4RecipSqrtRefine, kArmFloat32x4RecipSqrtRefine) \ |
+ V(Float32x4Equal, kArmFloat32x4Equal) \ |
+ V(Float32x4NotEqual, kArmFloat32x4NotEqual) \ |
+ V(Float32x4LessThan, kArmFloat32x4LessThan) \ |
+ V(Float32x4LessThanOrEqual, kArmFloat32x4LessThanOrEqual) \ |
+ V(Int32x4Add, kArmInt32x4Add) \ |
+ V(Int32x4Sub, kArmInt32x4Sub) \ |
+ V(Int32x4Mul, kArmInt32x4Mul) \ |
+ V(Int32x4Min, kArmInt32x4Min) \ |
+ V(Int32x4Max, kArmInt32x4Max) \ |
+ V(Int32x4Equal, kArmInt32x4Equal) \ |
+ V(Int32x4NotEqual, kArmInt32x4NotEqual) \ |
+ V(Int32x4LessThan, kArmInt32x4LessThan) \ |
+ V(Int32x4LessThanOrEqual, kArmInt32x4LessThanOrEqual) \ |
+ V(Uint32x4Min, kArmUint32x4Min) \ |
+ V(Uint32x4Max, kArmUint32x4Max) \ |
+ V(Uint32x4LessThan, kArmUint32x4LessThan) \ |
+ V(Uint32x4LessThanOrEqual, kArmUint32x4LessThanOrEqual) \ |
+ V(Int16x8Add, kArmInt16x8Add) \ |
+ V(Int16x8AddSaturate, kArmInt16x8AddSaturate) \ |
+ V(Int16x8Sub, kArmInt16x8Sub) \ |
+ V(Int16x8SubSaturate, kArmInt16x8SubSaturate) \ |
+ V(Int16x8Mul, kArmInt16x8Mul) \ |
+ V(Int16x8Min, kArmInt16x8Min) \ |
+ V(Int16x8Max, kArmInt16x8Max) \ |
+ V(Int16x8Equal, kArmInt16x8Equal) \ |
+ V(Int16x8NotEqual, kArmInt16x8NotEqual) \ |
+ V(Int16x8LessThan, kArmInt16x8LessThan) \ |
+ V(Int16x8LessThanOrEqual, kArmInt16x8LessThanOrEqual) \ |
+ V(Uint16x8AddSaturate, kArmUint16x8AddSaturate) \ |
+ V(Uint16x8SubSaturate, kArmUint16x8SubSaturate) \ |
+ V(Uint16x8Min, kArmUint16x8Min) \ |
+ V(Uint16x8Max, kArmUint16x8Max) \ |
+ V(Uint16x8LessThan, kArmUint16x8LessThan) \ |
+ V(Uint16x8LessThanOrEqual, kArmUint16x8LessThanOrEqual) \ |
+ V(Int8x16Add, kArmInt8x16Add) \ |
+ V(Int8x16AddSaturate, kArmInt8x16AddSaturate) \ |
+ V(Int8x16Sub, kArmInt8x16Sub) \ |
+ V(Int8x16SubSaturate, kArmInt8x16SubSaturate) \ |
+ V(Int8x16Mul, kArmInt8x16Mul) \ |
+ V(Int8x16Min, kArmInt8x16Min) \ |
+ V(Int8x16Max, kArmInt8x16Max) \ |
+ V(Int8x16Equal, kArmInt8x16Equal) \ |
+ V(Int8x16NotEqual, kArmInt8x16NotEqual) \ |
+ V(Int8x16LessThan, kArmInt8x16LessThan) \ |
+ V(Int8x16LessThanOrEqual, kArmInt8x16LessThanOrEqual) \ |
+ V(Uint8x16AddSaturate, kArmUint8x16AddSaturate) \ |
+ V(Uint8x16SubSaturate, kArmUint8x16SubSaturate) \ |
+ V(Uint8x16Min, kArmUint8x16Min) \ |
+ V(Uint8x16Max, kArmUint8x16Max) \ |
+ V(Uint8x16LessThan, kArmUint8x16LessThan) \ |
+ V(Uint8x16LessThanOrEqual, kArmUint8x16LessThanOrEqual) \ |
+ V(Simd128And, kArmSimd128And) \ |
+ V(Simd128Or, kArmSimd128Or) \ |
+ V(Simd128Xor, kArmSimd128Xor) \ |
+ V(Simd1x4And, kArmSimd128And) \ |
+ V(Simd1x4Or, kArmSimd128Or) \ |
+ V(Simd1x4Xor, kArmSimd128Xor) \ |
+ V(Simd1x8And, kArmSimd128And) \ |
+ V(Simd1x8Or, kArmSimd128Or) \ |
+ V(Simd1x8Xor, kArmSimd128Xor) \ |
+ V(Simd1x16And, kArmSimd128And) \ |
+ V(Simd1x16Or, kArmSimd128Or) \ |
V(Simd1x16Xor, kArmSimd128Xor) |
#define SIMD_SHIFT_OP_LIST(V) \ |