| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/instruction-selector-impl.h" | 5 #include "src/compiler/instruction-selector-impl.h" |
| 6 #include "src/compiler/node-matchers.h" | 6 #include "src/compiler/node-matchers.h" |
| 7 | 7 |
| 8 namespace v8 { | 8 namespace v8 { |
| 9 namespace internal { | 9 namespace internal { |
| 10 namespace compiler { | 10 namespace compiler { |
| (...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 187 } else if (commutative && g.CanBeImmediate(left)) { | 187 } else if (commutative && g.CanBeImmediate(left)) { |
| 188 selector->Emit(opcode, g.DefineSameAsFirst(node), g.Use(right), | 188 selector->Emit(opcode, g.DefineSameAsFirst(node), g.Use(right), |
| 189 g.UseImmediate(left)); | 189 g.UseImmediate(left)); |
| 190 } else { | 190 } else { |
| 191 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left), | 191 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left), |
| 192 g.Use(right)); | 192 g.Use(right)); |
| 193 } | 193 } |
| 194 } | 194 } |
| 195 | 195 |
| 196 | 196 |
| 197 static void VisitBinopWithOverflow(InstructionSelector* selector, Node* node, |
| 198 InstructionCode opcode) { |
| 199 X64OperandGenerator g(selector); |
| 200 Int32BinopMatcher m(node); |
| 201 InstructionOperand* inputs[2]; |
| 202 size_t input_count = 0; |
| 203 InstructionOperand* outputs[2]; |
| 204 size_t output_count = 0; |
| 205 |
| 206 // TODO(turbofan): match complex addressing modes. |
| 207 // TODO(turbofan): if commutative, pick the non-live-in operand as the left as |
| 208 // this might be the last use and therefore its register can be reused. |
| 209 if (g.CanBeImmediate(m.right().node())) { |
| 210 inputs[input_count++] = g.Use(m.left().node()); |
| 211 inputs[input_count++] = g.UseImmediate(m.right().node()); |
| 212 } else { |
| 213 inputs[input_count++] = g.UseRegister(m.left().node()); |
| 214 inputs[input_count++] = g.Use(m.right().node()); |
| 215 } |
| 216 |
| 217 // Define outputs depending on the projections. |
| 218 Node* projections[2]; |
| 219 node->CollectProjections(ARRAY_SIZE(projections), projections); |
| 220 if (projections[0]) { |
| 221 outputs[output_count++] = g.DefineSameAsFirst(projections[0]); |
| 222 } |
| 223 if (projections[1]) { |
| 224 opcode |= FlagsModeField::encode(kFlags_set); |
| 225 opcode |= FlagsConditionField::encode(kOverflow); |
| 226 outputs[output_count++] = |
| 227 (projections[0] ? g.DefineAsRegister(projections[1]) |
| 228 : g.DefineSameAsFirst(projections[1])); |
| 229 } |
| 230 |
| 231 ASSERT_NE(0, input_count); |
| 232 ASSERT_NE(0, output_count); |
| 233 ASSERT_GE(ARRAY_SIZE(inputs), input_count); |
| 234 ASSERT_GE(ARRAY_SIZE(outputs), output_count); |
| 235 |
| 236 selector->Emit(opcode, output_count, outputs, input_count, inputs); |
| 237 } |
| 238 |
| 239 |
| 197 void InstructionSelector::VisitWord32And(Node* node) { | 240 void InstructionSelector::VisitWord32And(Node* node) { |
| 198 VisitBinop(this, node, kX64And32, true); | 241 VisitBinop(this, node, kX64And32, true); |
| 199 } | 242 } |
| 200 | 243 |
| 201 | 244 |
| 202 void InstructionSelector::VisitWord64And(Node* node) { | 245 void InstructionSelector::VisitWord64And(Node* node) { |
| 203 VisitBinop(this, node, kX64And, true); | 246 VisitBinop(this, node, kX64And, true); |
| 204 } | 247 } |
| 205 | 248 |
| 206 | 249 |
| (...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 318 void InstructionSelector::VisitWord64Sar(Node* node) { | 361 void InstructionSelector::VisitWord64Sar(Node* node) { |
| 319 VisitWord64Shift(this, node, kX64Sar); | 362 VisitWord64Shift(this, node, kX64Sar); |
| 320 } | 363 } |
| 321 | 364 |
| 322 | 365 |
| 323 void InstructionSelector::VisitInt32Add(Node* node) { | 366 void InstructionSelector::VisitInt32Add(Node* node) { |
| 324 VisitBinop(this, node, kX64Add32, true); | 367 VisitBinop(this, node, kX64Add32, true); |
| 325 } | 368 } |
| 326 | 369 |
| 327 | 370 |
| 371 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) { |
| 372 VisitBinopWithOverflow(this, node, kX64Add32); |
| 373 } |
| 374 |
| 375 |
| 328 void InstructionSelector::VisitInt64Add(Node* node) { | 376 void InstructionSelector::VisitInt64Add(Node* node) { |
| 329 VisitBinop(this, node, kX64Add, true); | 377 VisitBinop(this, node, kX64Add, true); |
| 330 } | 378 } |
| 331 | 379 |
| 332 | 380 |
| 333 template <typename T> | 381 template <typename T> |
| 334 static void VisitSub(InstructionSelector* selector, Node* node, | 382 static void VisitSub(InstructionSelector* selector, Node* node, |
| 335 ArchOpcode sub_opcode, ArchOpcode neg_opcode) { | 383 ArchOpcode sub_opcode, ArchOpcode neg_opcode) { |
| 336 X64OperandGenerator g(selector); | 384 X64OperandGenerator g(selector); |
| 337 BinopMatcher<IntMatcher<T>, IntMatcher<T> > m(node); | 385 BinopMatcher<IntMatcher<T>, IntMatcher<T> > m(node); |
| (...skipping 335 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 673 if (descriptor->kind() == CallDescriptor::kCallAddress && | 721 if (descriptor->kind() == CallDescriptor::kCallAddress && |
| 674 buffer.pushed_count > 0) { | 722 buffer.pushed_count > 0) { |
| 675 ASSERT(deoptimization == NULL && continuation == NULL); | 723 ASSERT(deoptimization == NULL && continuation == NULL); |
| 676 Emit(kPopStack | MiscField::encode(buffer.pushed_count), NULL); | 724 Emit(kPopStack | MiscField::encode(buffer.pushed_count), NULL); |
| 677 } | 725 } |
| 678 } | 726 } |
| 679 | 727 |
| 680 } // namespace compiler | 728 } // namespace compiler |
| 681 } // namespace internal | 729 } // namespace internal |
| 682 } // namespace v8 | 730 } // namespace v8 |
| OLD | NEW |