OLD | NEW |
---|---|
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/instruction-selector-impl.h" | 5 #include "src/compiler/instruction-selector-impl.h" |
6 #include "src/compiler/node-matchers.h" | 6 #include "src/compiler/node-matchers.h" |
7 #include "src/compiler/node-properties.h" | 7 #include "src/compiler/node-properties.h" |
8 | 8 |
9 namespace v8 { | 9 namespace v8 { |
10 namespace internal { | 10 namespace internal { |
(...skipping 642 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
653 // 32 bits anyway. | 653 // 32 bits anyway. |
654 Emit(kArm64Lsl, g.DefineAsRegister(node), | 654 Emit(kArm64Lsl, g.DefineAsRegister(node), |
655 g.UseRegister(m.left().node()->InputAt(0)), | 655 g.UseRegister(m.left().node()->InputAt(0)), |
656 g.UseImmediate(m.right().node())); | 656 g.UseImmediate(m.right().node())); |
657 return; | 657 return; |
658 } | 658 } |
659 VisitRRO(this, kArm64Lsl, node, kShift64Imm); | 659 VisitRRO(this, kArm64Lsl, node, kShift64Imm); |
660 } | 660 } |
661 | 661 |
662 | 662 |
663 bool TryEmitBitfieldExtract32(InstructionSelector* selector, Node* node) { | |
Benedikt Meurer
2015/05/08 04:04:59
Nit: put this function into an anonymous namespace
| |
664 Arm64OperandGenerator g(selector); | |
665 Int32BinopMatcher m(node); | |
666 if (selector->CanCover(node, m.left().node()) && m.left().IsWord32Shl()) { | |
667 // Select Ubfx or Sbfx for (x << (K & 0x1f)) OP (K & 0x1f), where | |
668 // OP is >>> or >> and (K & 0x1f) != 0. | |
669 Int32BinopMatcher mleft(m.left().node()); | |
670 if (mleft.right().HasValue() && m.right().HasValue() && | |
671 (mleft.right().Value() & 0x1f) == (m.right().Value() & 0x1f)) { | |
672 DCHECK(m.IsWord32Shr() || m.IsWord32Sar()); | |
673 ArchOpcode opcode = m.IsWord32Sar() ? kArm64Sbfx32 : kArm64Ubfx32; | |
674 | |
675 int right_val = m.right().Value() & 0x1f; | |
676 DCHECK_NE(right_val, 0); | |
677 | |
678 selector->Emit(opcode, g.DefineAsRegister(node), | |
679 g.UseRegister(mleft.left().node()), g.TempImmediate(0), | |
680 g.TempImmediate(32 - right_val)); | |
681 return true; | |
682 } | |
683 } | |
684 return false; | |
685 } | |
686 | |
687 | |
663 void InstructionSelector::VisitWord32Shr(Node* node) { | 688 void InstructionSelector::VisitWord32Shr(Node* node) { |
664 Arm64OperandGenerator g(this); | |
665 Int32BinopMatcher m(node); | 689 Int32BinopMatcher m(node); |
666 if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) { | 690 if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) { |
667 uint32_t lsb = m.right().Value(); | 691 uint32_t lsb = m.right().Value(); |
668 Int32BinopMatcher mleft(m.left().node()); | 692 Int32BinopMatcher mleft(m.left().node()); |
669 if (mleft.right().HasValue()) { | 693 if (mleft.right().HasValue()) { |
670 uint32_t mask = (mleft.right().Value() >> lsb) << lsb; | 694 uint32_t mask = (mleft.right().Value() >> lsb) << lsb; |
671 uint32_t mask_width = base::bits::CountPopulation32(mask); | 695 uint32_t mask_width = base::bits::CountPopulation32(mask); |
672 uint32_t mask_msb = base::bits::CountLeadingZeros32(mask); | 696 uint32_t mask_msb = base::bits::CountLeadingZeros32(mask); |
673 // Select Ubfx for Shr(And(x, mask), imm) where the result of the mask is | 697 // Select Ubfx for Shr(And(x, mask), imm) where the result of the mask is |
674 // shifted into the least-significant bits. | 698 // shifted into the least-significant bits. |
675 if ((mask_msb + mask_width + lsb) == 32) { | 699 if ((mask_msb + mask_width + lsb) == 32) { |
700 Arm64OperandGenerator g(this); | |
676 DCHECK_EQ(lsb, base::bits::CountTrailingZeros32(mask)); | 701 DCHECK_EQ(lsb, base::bits::CountTrailingZeros32(mask)); |
677 Emit(kArm64Ubfx32, g.DefineAsRegister(node), | 702 Emit(kArm64Ubfx32, g.DefineAsRegister(node), |
678 g.UseRegister(mleft.left().node()), g.TempImmediate(lsb), | 703 g.UseRegister(mleft.left().node()), g.TempImmediate(lsb), |
679 g.TempImmediate(mask_width)); | 704 g.TempImmediate(mask_width)); |
680 return; | 705 return; |
681 } | 706 } |
682 } | 707 } |
708 } else if (TryEmitBitfieldExtract32(this, node)) { | |
709 return; | |
683 } | 710 } |
684 VisitRRO(this, kArm64Lsr32, node, kShift32Imm); | 711 VisitRRO(this, kArm64Lsr32, node, kShift32Imm); |
685 } | 712 } |
686 | 713 |
687 | 714 |
688 void InstructionSelector::VisitWord64Shr(Node* node) { | 715 void InstructionSelector::VisitWord64Shr(Node* node) { |
689 Arm64OperandGenerator g(this); | 716 Arm64OperandGenerator g(this); |
690 Int64BinopMatcher m(node); | 717 Int64BinopMatcher m(node); |
691 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) { | 718 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) { |
692 uint64_t lsb = m.right().Value(); | 719 uint64_t lsb = m.right().Value(); |
(...skipping 11 matching lines...) Expand all Loading... | |
704 g.TempImmediate(mask_width)); | 731 g.TempImmediate(mask_width)); |
705 return; | 732 return; |
706 } | 733 } |
707 } | 734 } |
708 } | 735 } |
709 VisitRRO(this, kArm64Lsr, node, kShift64Imm); | 736 VisitRRO(this, kArm64Lsr, node, kShift64Imm); |
710 } | 737 } |
711 | 738 |
712 | 739 |
713 void InstructionSelector::VisitWord32Sar(Node* node) { | 740 void InstructionSelector::VisitWord32Sar(Node* node) { |
714 Arm64OperandGenerator g(this); | 741 if (TryEmitBitfieldExtract32(this, node)) { |
715 Int32BinopMatcher m(node); | 742 return; |
716 // Select Sxth/Sxtb for (x << K) >> K where K is 16 or 24. | |
717 if (CanCover(node, m.left().node()) && m.left().IsWord32Shl()) { | |
718 Int32BinopMatcher mleft(m.left().node()); | |
719 if (mleft.right().Is(16) && m.right().Is(16)) { | |
720 Emit(kArm64Sxth32, g.DefineAsRegister(node), | |
721 g.UseRegister(mleft.left().node())); | |
722 return; | |
723 } else if (mleft.right().Is(24) && m.right().Is(24)) { | |
724 Emit(kArm64Sxtb32, g.DefineAsRegister(node), | |
725 g.UseRegister(mleft.left().node())); | |
726 return; | |
727 } | |
728 } | 743 } |
729 VisitRRO(this, kArm64Asr32, node, kShift32Imm); | 744 VisitRRO(this, kArm64Asr32, node, kShift32Imm); |
730 } | 745 } |
731 | 746 |
732 | 747 |
733 void InstructionSelector::VisitWord64Sar(Node* node) { | 748 void InstructionSelector::VisitWord64Sar(Node* node) { |
734 VisitRRO(this, kArm64Asr, node, kShift64Imm); | 749 VisitRRO(this, kArm64Asr, node, kShift64Imm); |
735 } | 750 } |
736 | 751 |
737 | 752 |
(...skipping 1143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1881 MachineOperatorBuilder::kFloat64RoundTruncate | | 1896 MachineOperatorBuilder::kFloat64RoundTruncate | |
1882 MachineOperatorBuilder::kFloat64RoundTiesAway | | 1897 MachineOperatorBuilder::kFloat64RoundTiesAway | |
1883 MachineOperatorBuilder::kWord32ShiftIsSafe | | 1898 MachineOperatorBuilder::kWord32ShiftIsSafe | |
1884 MachineOperatorBuilder::kInt32DivIsSafe | | 1899 MachineOperatorBuilder::kInt32DivIsSafe | |
1885 MachineOperatorBuilder::kUint32DivIsSafe; | 1900 MachineOperatorBuilder::kUint32DivIsSafe; |
1886 } | 1901 } |
1887 | 1902 |
1888 } // namespace compiler | 1903 } // namespace compiler |
1889 } // namespace internal | 1904 } // namespace internal |
1890 } // namespace v8 | 1905 } // namespace v8 |
OLD | NEW |