Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include <algorithm> | 5 #include <algorithm> |
| 6 | 6 |
| 7 #include "src/base/adapters.h" | 7 #include "src/base/adapters.h" |
| 8 #include "src/compiler/instruction-selector-impl.h" | 8 #include "src/compiler/instruction-selector-impl.h" |
| 9 #include "src/compiler/node-matchers.h" | 9 #include "src/compiler/node-matchers.h" |
| 10 #include "src/compiler/node-properties.h" | 10 #include "src/compiler/node-properties.h" |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 56 return rep == MachineRepresentation::kWord16; | 56 return rep == MachineRepresentation::kWord16; |
| 57 case kX64Cmp8: | 57 case kX64Cmp8: |
| 58 case kX64Test8: | 58 case kX64Test8: |
| 59 return rep == MachineRepresentation::kWord8; | 59 return rep == MachineRepresentation::kWord8; |
| 60 default: | 60 default: |
| 61 break; | 61 break; |
| 62 } | 62 } |
| 63 return false; | 63 return false; |
| 64 } | 64 } |
| 65 | 65 |
| 66 AddressingMode EnsureHasDisplacement( | |
| 67 const BaseWithIndexAndDisplacement64Matcher& matcher, | |
| 68 AddressingMode mode) { | |
| 69 if (matcher.displacement() != nullptr) { | |
| 70 return mode; | |
| 71 } | |
| 72 DCHECK(mode == kMode_MR || (kMode_MR1 <= mode && mode <= kMode_MR8) || | |
|
Benedikt Meurer
2016/04/01 08:33:00
As discussed offline, please do a switch here. And
epertoso
2016/04/01 09:16:44
Done.
| |
| 73 (kMode_M4 == mode || kMode_M8 == mode)); | |
| 74 return mode == kMode_MR ? kMode_MRI : static_cast<AddressingMode>(mode + 4); | |
| 75 } | |
| 76 | |
| 66 AddressingMode GenerateMemoryOperandInputs(Node* index, int scale_exponent, | 77 AddressingMode GenerateMemoryOperandInputs(Node* index, int scale_exponent, |
| 67 Node* base, Node* displacement, | 78 Node* base, Node* displacement, |
| 68 InstructionOperand inputs[], | 79 InstructionOperand inputs[], |
| 69 size_t* input_count) { | 80 size_t* input_count) { |
| 70 AddressingMode mode = kMode_MRI; | 81 AddressingMode mode = kMode_MRI; |
| 71 if (base != nullptr) { | 82 if (base != nullptr) { |
| 72 inputs[(*input_count)++] = UseRegister(base); | 83 inputs[(*input_count)++] = UseRegister(base); |
| 73 if (index != nullptr) { | 84 if (index != nullptr) { |
| 74 DCHECK(scale_exponent >= 0 && scale_exponent <= 3); | 85 DCHECK(scale_exponent >= 0 && scale_exponent <= 3); |
| 75 inputs[(*input_count)++] = UseRegister(index); | 86 inputs[(*input_count)++] = UseRegister(index); |
| (...skipping 535 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 611 } else if (mleft.right().Is(24) && m.right().Is(24)) { | 622 } else if (mleft.right().Is(24) && m.right().Is(24)) { |
| 612 Emit(kX64Movsxbl, g.DefineAsRegister(node), g.Use(mleft.left().node())); | 623 Emit(kX64Movsxbl, g.DefineAsRegister(node), g.Use(mleft.left().node())); |
| 613 return; | 624 return; |
| 614 } | 625 } |
| 615 } | 626 } |
| 616 VisitWord32Shift(this, node, kX64Sar32); | 627 VisitWord32Shift(this, node, kX64Sar32); |
| 617 } | 628 } |
| 618 | 629 |
| 619 | 630 |
| 620 void InstructionSelector::VisitWord64Sar(Node* node) { | 631 void InstructionSelector::VisitWord64Sar(Node* node) { |
| 632 X64OperandGenerator g(this); | |
| 633 Int64BinopMatcher m(node); | |
| 634 if (CanCover(m.node(), m.left().node()) && m.left().IsLoad() && | |
| 635 m.right().Is(32)) { | |
| 636 // Just load and sign-extend the interesting 4 bytes instead. This happens, | |
| 637 // for example, when we're loading and untagging SMIs. | |
| 638 BaseWithIndexAndDisplacement64Matcher mleft(m.left().node(), true); | |
| 639 if (mleft.matches() && (mleft.displacement() == nullptr || | |
| 640 g.CanBeImmediate(mleft.displacement()))) { | |
| 641 size_t input_count = 0; | |
| 642 InstructionOperand inputs[3]; | |
| 643 AddressingMode mode = g.GetEffectiveAddressMemoryOperand( | |
| 644 m.left().node(), inputs, &input_count); | |
| 645 mode = g.EnsureHasDisplacement(mleft, mode); | |
| 646 if (mleft.displacement() == nullptr) { | |
| 647 inputs[input_count++] = ImmediateOperand(ImmediateOperand::INLINE, 4); | |
| 648 } else { | |
| 649 ImmediateOperand* op = ImmediateOperand::cast(&inputs[input_count - 1]); | |
| 650 int32_t displacement = sequence()->GetImmediate(op).ToInt32(); | |
| 651 *op = ImmediateOperand(ImmediateOperand::INLINE, displacement + 4); | |
| 652 } | |
| 653 InstructionOperand outputs[] = {g.DefineAsRegister(node)}; | |
| 654 InstructionCode code = kX64Movsxlq | AddressingModeField::encode(mode); | |
| 655 Emit(code, 1, outputs, input_count, inputs); | |
| 656 return; | |
| 657 } | |
| 658 } | |
| 621 VisitWord64Shift(this, node, kX64Sar); | 659 VisitWord64Shift(this, node, kX64Sar); |
| 622 } | 660 } |
| 623 | 661 |
| 624 | 662 |
| 625 void InstructionSelector::VisitWord32Ror(Node* node) { | 663 void InstructionSelector::VisitWord32Ror(Node* node) { |
| 626 VisitWord32Shift(this, node, kX64Ror32); | 664 VisitWord32Shift(this, node, kX64Ror32); |
| 627 } | 665 } |
| 628 | 666 |
| 629 | 667 |
| 630 void InstructionSelector::VisitWord64Ror(Node* node) { | 668 void InstructionSelector::VisitWord64Ror(Node* node) { |
| (...skipping 1341 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1972 MachineOperatorBuilder::kFloat64RoundTruncate | | 2010 MachineOperatorBuilder::kFloat64RoundTruncate | |
| 1973 MachineOperatorBuilder::kFloat32RoundTiesEven | | 2011 MachineOperatorBuilder::kFloat32RoundTiesEven | |
| 1974 MachineOperatorBuilder::kFloat64RoundTiesEven; | 2012 MachineOperatorBuilder::kFloat64RoundTiesEven; |
| 1975 } | 2013 } |
| 1976 return flags; | 2014 return flags; |
| 1977 } | 2015 } |
| 1978 | 2016 |
| 1979 } // namespace compiler | 2017 } // namespace compiler |
| 1980 } // namespace internal | 2018 } // namespace internal |
| 1981 } // namespace v8 | 2019 } // namespace v8 |
| OLD | NEW |