Index: src/compiler/arm64/instruction-selector-arm64.cc |
diff --git a/src/compiler/arm64/instruction-selector-arm64.cc b/src/compiler/arm64/instruction-selector-arm64.cc |
index 78b24c5af6b2c45b2c8524d99464b7b77362f606..28ac51fc22e2f853ac67e87d16064f43d1c3550b 100644 |
--- a/src/compiler/arm64/instruction-selector-arm64.cc |
+++ b/src/compiler/arm64/instruction-selector-arm64.cc |
@@ -1740,34 +1740,7 @@ void InstructionSelector::VisitFloat32Sub(Node* node) { |
VisitRRR(this, kArm64Float32Sub, node); |
} |
-void InstructionSelector::VisitFloat32SubPreserveNan(Node* node) { |
- VisitRRR(this, kArm64Float32Sub, node); |
-} |
- |
void InstructionSelector::VisitFloat64Sub(Node* node) { |
- Arm64OperandGenerator g(this); |
- Float64BinopMatcher m(node); |
- if (m.left().IsMinusZero()) { |
- if (m.right().IsFloat64RoundDown() && |
- CanCover(m.node(), m.right().node())) { |
- if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub && |
- CanCover(m.right().node(), m.right().InputAt(0))) { |
- Float64BinopMatcher mright0(m.right().InputAt(0)); |
- if (mright0.left().IsMinusZero()) { |
- Emit(kArm64Float64RoundUp, g.DefineAsRegister(node), |
- g.UseRegister(mright0.right().node())); |
- return; |
- } |
- } |
- } |
- Emit(kArm64Float64Neg, g.DefineAsRegister(node), |
- g.UseRegister(m.right().node())); |
- return; |
- } |
- VisitRRR(this, kArm64Float64Sub, node); |
-} |
- |
-void InstructionSelector::VisitFloat64SubPreserveNan(Node* node) { |
VisitRRR(this, kArm64Float64Sub, node); |
} |