Index: src/compiler/arm64/instruction-selector-arm64.cc |
diff --git a/src/compiler/arm64/instruction-selector-arm64.cc b/src/compiler/arm64/instruction-selector-arm64.cc |
index e81c5801bc2615348e25c16edea2c66b0bd025ee..87ed89645f321ac7991c3c5871118f00e93faa18 100644 |
--- a/src/compiler/arm64/instruction-selector-arm64.cc |
+++ b/src/compiler/arm64/instruction-selector-arm64.cc |
@@ -1069,17 +1069,22 @@ void InstructionSelector::VisitFloat64Add(Node* node) { |
void InstructionSelector::VisitFloat64Sub(Node* node) { |
Arm64OperandGenerator g(this); |
Float64BinopMatcher m(node); |
- if (m.left().IsMinusZero() && m.right().IsFloat64RoundDown() && |
- CanCover(m.node(), m.right().node())) { |
- if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub && |
- CanCover(m.right().node(), m.right().InputAt(0))) { |
- Float64BinopMatcher mright0(m.right().InputAt(0)); |
- if (mright0.left().IsMinusZero()) { |
- Emit(kArm64Float64RoundUp, g.DefineAsRegister(node), |
- g.UseRegister(mright0.right().node())); |
- return; |
+ if (m.left().IsMinusZero()) { |
+ if (m.right().IsFloat64RoundDown() && |
+ CanCover(m.node(), m.right().node())) { |
+ if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub && |
+ CanCover(m.right().node(), m.right().InputAt(0))) { |
+ Float64BinopMatcher mright0(m.right().InputAt(0)); |
+ if (mright0.left().IsMinusZero()) { |
+ Emit(kArm64Float64RoundUp, g.DefineAsRegister(node), |
+ g.UseRegister(mright0.right().node())); |
+ return; |
+ } |
} |
} |
+ Emit(kArm64Float64Neg, g.DefineAsRegister(node), |
+ g.UseRegister(m.right().node())); |
+ return; |
} |
VisitRRRFloat64(this, kArm64Float64Sub, node); |
} |