OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/generic-node-inl.h" | 5 #include "src/compiler/generic-node-inl.h" |
6 #include "src/compiler/instruction-selector-impl.h" | 6 #include "src/compiler/instruction-selector-impl.h" |
7 #include "src/compiler/node-matchers.h" | 7 #include "src/compiler/node-matchers.h" |
8 | 8 |
9 namespace v8 { | 9 namespace v8 { |
10 namespace internal { | 10 namespace internal { |
(...skipping 416 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
427 } | 427 } |
428 | 428 |
429 } // namespace | 429 } // namespace |
430 | 430 |
431 | 431 |
432 void InstructionSelector::VisitInt32Add(Node* node) { | 432 void InstructionSelector::VisitInt32Add(Node* node) { |
433 // Try to match the Add to a leal pattern | 433 // Try to match the Add to a leal pattern |
434 ScaledWithOffset32Matcher m(node); | 434 ScaledWithOffset32Matcher m(node); |
435 X64OperandGenerator g(this); | 435 X64OperandGenerator g(this); |
436 if (m.matches() && (m.constant() == NULL || g.CanBeImmediate(m.constant()))) { | 436 if (m.matches() && (m.constant() == NULL || g.CanBeImmediate(m.constant()))) { |
| 437 // The add can be represented as a "leal", but there may be a smaller |
| 438 // representation that is better and no more expensive. |
| 439 if (m.offset() != NULL) { |
| 440 if (m.scaled() == NULL) { |
| 441 if (!IsLive(m.offset())) { |
| 442 // If the add is of the form (r1 + immediate) and the non-constant |
| 443 // input to the add is owned by the add, then it doesn't need to be |
| 444 // preserved across the operation, so use more compact, |
| 445 // source-register-overwriting versions when they are available and |
| 446 // smaller, e.g. "incl" and "decl". |
| 447 int32_t value = |
| 448 m.constant() == NULL ? 0 : OpParameter<int32_t>(m.constant()); |
| 449 if (value == 1) { |
| 450 Emit(kX64Inc32, g.DefineSameAsFirst(node), |
| 451 g.UseRegister(m.offset())); |
| 452 return; |
| 453 } else if (value == -1) { |
| 454 Emit(kX64Dec32, g.DefineSameAsFirst(node), |
| 455 g.UseRegister(m.offset())); |
| 456 return; |
| 457 } |
| 458 } |
| 459 } |
| 460 } |
| 461 |
437 InstructionOperand* inputs[4]; | 462 InstructionOperand* inputs[4]; |
438 size_t input_count = 0; | 463 size_t input_count = 0; |
439 | |
440 AddressingMode mode = GenerateMemoryOperandInputs( | 464 AddressingMode mode = GenerateMemoryOperandInputs( |
441 &g, m.scaled(), m.scale_exponent(), m.offset(), m.constant(), inputs, | 465 &g, m.scaled(), m.scale_exponent(), m.offset(), m.constant(), inputs, |
442 &input_count); | 466 &input_count); |
443 | 467 |
444 DCHECK_NE(0, static_cast<int>(input_count)); | 468 DCHECK_NE(0, static_cast<int>(input_count)); |
445 DCHECK_GE(arraysize(inputs), input_count); | 469 DCHECK_GE(arraysize(inputs), input_count); |
446 | 470 |
447 InstructionOperand* outputs[1]; | 471 InstructionOperand* outputs[1]; |
448 outputs[0] = g.DefineAsRegister(node); | 472 outputs[0] = g.DefineAsRegister(node); |
449 | 473 |
(...skipping 11 matching lines...) Expand all Loading... |
461 VisitBinop(this, node, kX64Add); | 485 VisitBinop(this, node, kX64Add); |
462 } | 486 } |
463 | 487 |
464 | 488 |
465 void InstructionSelector::VisitInt32Sub(Node* node) { | 489 void InstructionSelector::VisitInt32Sub(Node* node) { |
466 X64OperandGenerator g(this); | 490 X64OperandGenerator g(this); |
467 Int32BinopMatcher m(node); | 491 Int32BinopMatcher m(node); |
468 if (m.left().Is(0)) { | 492 if (m.left().Is(0)) { |
469 Emit(kX64Neg32, g.DefineSameAsFirst(node), g.UseRegister(m.right().node())); | 493 Emit(kX64Neg32, g.DefineSameAsFirst(node), g.UseRegister(m.right().node())); |
470 } else { | 494 } else { |
| 495 if (m.right().HasValue() && g.CanBeImmediate(m.right().node())) { |
| 496 // If the Non-constant input is owned by the subtract, using a "decl" or |
| 497 // "incl" that overwrites that input is smaller and probably an overall |
| 498 // win. |
| 499 if (!IsLive(m.left().node())) { |
| 500 if (m.right().Value() == 1) { |
| 501 Emit(kX64Dec32, g.DefineSameAsFirst(node), |
| 502 g.UseRegister(m.left().node())); |
| 503 return; |
| 504 } |
| 505 if (m.right().Value() == -1) { |
| 506 Emit(kX64Inc32, g.DefineSameAsFirst(node), |
| 507 g.UseRegister(m.left().node())); |
| 508 return; |
| 509 } |
| 510 } else { |
| 511 // Special handling for subtraction of constants where the non-constant |
| 512 // input is used elsewhere. To eliminate the gap move before the sub to |
| 513 // copy the destination register, use a "leal" instead. |
| 514 Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI), |
| 515 g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
| 516 g.TempImmediate(-m.right().Value())); |
| 517 return; |
| 518 } |
| 519 } |
471 VisitBinop(this, node, kX64Sub32); | 520 VisitBinop(this, node, kX64Sub32); |
472 } | 521 } |
473 } | 522 } |
474 | 523 |
475 | 524 |
476 void InstructionSelector::VisitInt64Sub(Node* node) { | 525 void InstructionSelector::VisitInt64Sub(Node* node) { |
477 X64OperandGenerator g(this); | 526 X64OperandGenerator g(this); |
478 Int64BinopMatcher m(node); | 527 Int64BinopMatcher m(node); |
479 if (m.left().Is(0)) { | 528 if (m.left().Is(0)) { |
480 Emit(kX64Neg, g.DefineSameAsFirst(node), g.UseRegister(m.right().node())); | 529 Emit(kX64Neg, g.DefineSameAsFirst(node), g.UseRegister(m.right().node())); |
(...skipping 690 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1171 return MachineOperatorBuilder::kFloat64Floor | | 1220 return MachineOperatorBuilder::kFloat64Floor | |
1172 MachineOperatorBuilder::kFloat64Ceil | | 1221 MachineOperatorBuilder::kFloat64Ceil | |
1173 MachineOperatorBuilder::kFloat64RoundTruncate; | 1222 MachineOperatorBuilder::kFloat64RoundTruncate; |
1174 } | 1223 } |
1175 return MachineOperatorBuilder::kNoFlags; | 1224 return MachineOperatorBuilder::kNoFlags; |
1176 } | 1225 } |
1177 | 1226 |
1178 } // namespace compiler | 1227 } // namespace compiler |
1179 } // namespace internal | 1228 } // namespace internal |
1180 } // namespace v8 | 1229 } // namespace v8 |
OLD | NEW |