OLD | NEW |
---|---|
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/base/adapters.h" | 5 #include "src/base/adapters.h" |
6 #include "src/base/bits.h" | 6 #include "src/base/bits.h" |
7 #include "src/compiler/instruction-selector-impl.h" | 7 #include "src/compiler/instruction-selector-impl.h" |
8 #include "src/compiler/node-matchers.h" | 8 #include "src/compiler/node-matchers.h" |
9 #include "src/compiler/node-properties.h" | 9 #include "src/compiler/node-properties.h" |
10 | 10 |
(...skipping 1187 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1198 if (m.right().IsFloat64Mul() && CanCover(node, m.right().node())) { | 1198 if (m.right().IsFloat64Mul() && CanCover(node, m.right().node())) { |
1199 Float64BinopMatcher mright(m.right().node()); | 1199 Float64BinopMatcher mright(m.right().node()); |
1200 Emit(kArmVmlaF64, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()), | 1200 Emit(kArmVmlaF64, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()), |
1201 g.UseRegister(mright.left().node()), | 1201 g.UseRegister(mright.left().node()), |
1202 g.UseRegister(mright.right().node())); | 1202 g.UseRegister(mright.right().node())); |
1203 return; | 1203 return; |
1204 } | 1204 } |
1205 VisitRRR(this, kArmVaddF64, node); | 1205 VisitRRR(this, kArmVaddF64, node); |
1206 } | 1206 } |
1207 | 1207 |
1208 | 1208 namespace { |
1209 void InstructionSelector::VisitFloat32Sub(Node* node) { | 1209 void VisitFloat32SubHelper(InstructionSelector* selector, Node* node, |
titzer
2016/05/12 13:02:47
If you only factor out the part of the helper rela
| |
1210 ArmOperandGenerator g(this); | 1210 bool preserveNan) { |
1211 ArmOperandGenerator g(selector); | |
1211 Float32BinopMatcher m(node); | 1212 Float32BinopMatcher m(node); |
1212 if (m.left().IsMinusZero()) { | 1213 if (!preserveNan && m.left().IsMinusZero()) { |
1213 Emit(kArmVnegF32, g.DefineAsRegister(node), | 1214 selector->Emit(kArmVnegF32, g.DefineAsRegister(node), |
1214 g.UseRegister(m.right().node())); | 1215 g.UseRegister(m.right().node())); |
1215 return; | 1216 return; |
1216 } | 1217 } |
1217 if (m.right().IsFloat32Mul() && CanCover(node, m.right().node())) { | 1218 if (m.right().IsFloat32Mul() && selector->CanCover(node, m.right().node())) { |
1218 Float32BinopMatcher mright(m.right().node()); | 1219 Float32BinopMatcher mright(m.right().node()); |
1219 Emit(kArmVmlsF32, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()), | 1220 selector->Emit(kArmVmlsF32, g.DefineSameAsFirst(node), |
1220 g.UseRegister(mright.left().node()), | 1221 g.UseRegister(m.left().node()), |
1221 g.UseRegister(mright.right().node())); | 1222 g.UseRegister(mright.left().node()), |
1223 g.UseRegister(mright.right().node())); | |
1222 return; | 1224 return; |
1223 } | 1225 } |
1224 VisitRRR(this, kArmVsubF32, node); | 1226 VisitRRR(selector, kArmVsubF32, node); |
1225 } | 1227 } |
1226 | 1228 |
1227 | 1229 void VisitFloat64SubHelper(InstructionSelector* selector, Node* node, |
1228 void InstructionSelector::VisitFloat64Sub(Node* node) { | 1230 bool preserveNan) { |
1229 ArmOperandGenerator g(this); | 1231 ArmOperandGenerator g(selector); |
1230 Float64BinopMatcher m(node); | 1232 Float64BinopMatcher m(node); |
1231 if (m.left().IsMinusZero()) { | 1233 if (!preserveNan && m.left().IsMinusZero()) { |
1232 if (m.right().IsFloat64RoundDown() && | 1234 if (m.right().IsFloat64RoundDown() && |
1233 CanCover(m.node(), m.right().node())) { | 1235 selector->CanCover(m.node(), m.right().node())) { |
1234 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub && | 1236 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub && |
1235 CanCover(m.right().node(), m.right().InputAt(0))) { | 1237 selector->CanCover(m.right().node(), m.right().InputAt(0))) { |
1236 Float64BinopMatcher mright0(m.right().InputAt(0)); | 1238 Float64BinopMatcher mright0(m.right().InputAt(0)); |
1237 if (mright0.left().IsMinusZero()) { | 1239 if (mright0.left().IsMinusZero()) { |
1238 Emit(kArmVrintpF64, g.DefineAsRegister(node), | 1240 selector->Emit(kArmVrintpF64, g.DefineAsRegister(node), |
1239 g.UseRegister(mright0.right().node())); | 1241 g.UseRegister(mright0.right().node())); |
1240 return; | 1242 return; |
1241 } | 1243 } |
1242 } | 1244 } |
1243 } | 1245 } |
1244 Emit(kArmVnegF64, g.DefineAsRegister(node), | 1246 selector->Emit(kArmVnegF64, g.DefineAsRegister(node), |
1245 g.UseRegister(m.right().node())); | 1247 g.UseRegister(m.right().node())); |
1246 return; | 1248 return; |
1247 } | 1249 } |
1248 if (m.right().IsFloat64Mul() && CanCover(node, m.right().node())) { | 1250 if (m.right().IsFloat64Mul() && selector->CanCover(node, m.right().node())) { |
1249 Float64BinopMatcher mright(m.right().node()); | 1251 Float64BinopMatcher mright(m.right().node()); |
1250 Emit(kArmVmlsF64, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()), | 1252 selector->Emit(kArmVmlsF64, g.DefineSameAsFirst(node), |
1251 g.UseRegister(mright.left().node()), | 1253 g.UseRegister(m.left().node()), |
1252 g.UseRegister(mright.right().node())); | 1254 g.UseRegister(mright.left().node()), |
1255 g.UseRegister(mright.right().node())); | |
1253 return; | 1256 return; |
1254 } | 1257 } |
1255 VisitRRR(this, kArmVsubF64, node); | 1258 VisitRRR(selector, kArmVsubF64, node); |
1259 } | |
1260 } // namespace | |
1261 | |
1262 void InstructionSelector::VisitFloat32Sub(Node* node) { | |
1263 VisitFloat32SubHelper(this, node, false); | |
1256 } | 1264 } |
1257 | 1265 |
1266 void InstructionSelector::VisitFloat32SubPreserveNan(Node* node) { | |
1267 VisitFloat32SubHelper(this, node, true); | |
1268 } | |
1269 | |
1270 void InstructionSelector::VisitFloat64Sub(Node* node) { | |
1271 VisitFloat64SubHelper(this, node, false); | |
1272 } | |
1273 | |
1274 void InstructionSelector::VisitFloat64SubPreserveNan(Node* node) { | |
1275 VisitFloat64SubHelper(this, node, true); | |
1276 } | |
1258 | 1277 |
1259 void InstructionSelector::VisitFloat32Mul(Node* node) { | 1278 void InstructionSelector::VisitFloat32Mul(Node* node) { |
1260 VisitRRR(this, kArmVmulF32, node); | 1279 VisitRRR(this, kArmVmulF32, node); |
1261 } | 1280 } |
1262 | 1281 |
1263 | 1282 |
1264 void InstructionSelector::VisitFloat64Mul(Node* node) { | 1283 void InstructionSelector::VisitFloat64Mul(Node* node) { |
1265 VisitRRR(this, kArmVmulF64, node); | 1284 VisitRRR(this, kArmVmulF64, node); |
1266 } | 1285 } |
1267 | 1286 |
(...skipping 622 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1890 MachineOperatorBuilder::kFloat32Max | | 1909 MachineOperatorBuilder::kFloat32Max | |
1891 MachineOperatorBuilder::kFloat64Min | | 1910 MachineOperatorBuilder::kFloat64Min | |
1892 MachineOperatorBuilder::kFloat64Max; | 1911 MachineOperatorBuilder::kFloat64Max; |
1893 } | 1912 } |
1894 return flags; | 1913 return flags; |
1895 } | 1914 } |
1896 | 1915 |
1897 } // namespace compiler | 1916 } // namespace compiler |
1898 } // namespace internal | 1917 } // namespace internal |
1899 } // namespace v8 | 1918 } // namespace v8 |
OLD | NEW |