| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/base/adapters.h" | 5 #include "src/base/adapters.h" |
| 6 #include "src/compiler/instruction-selector-impl.h" | 6 #include "src/compiler/instruction-selector-impl.h" |
| 7 #include "src/compiler/node-matchers.h" | 7 #include "src/compiler/node-matchers.h" |
| 8 #include "src/compiler/node-properties.h" | 8 #include "src/compiler/node-properties.h" |
| 9 #include "src/s390/frames-s390.h" | 9 #include "src/s390/frames-s390.h" |
| 10 | 10 |
| (...skipping 526 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 537 case MachineRepresentation::kNone: | 537 case MachineRepresentation::kNone: |
| 538 UNREACHABLE(); | 538 UNREACHABLE(); |
| 539 return; | 539 return; |
| 540 } | 540 } |
| 541 AddressingMode addressingMode = kMode_MRR; | 541 AddressingMode addressingMode = kMode_MRR; |
| 542 Emit(opcode | AddressingModeField::encode(addressingMode), g.NoOutput(), | 542 Emit(opcode | AddressingModeField::encode(addressingMode), g.NoOutput(), |
| 543 g.UseRegister(base), g.UseRegister(offset), | 543 g.UseRegister(base), g.UseRegister(offset), |
| 544 g.UseOperand(length, kUint32Imm), g.UseRegister(value)); | 544 g.UseOperand(length, kUint32Imm), g.UseRegister(value)); |
| 545 } | 545 } |
| 546 | 546 |
| 547 #if 0 |
| 547 static inline bool IsContiguousMask32(uint32_t value, int* mb, int* me) { | 548 static inline bool IsContiguousMask32(uint32_t value, int* mb, int* me) { |
| 548 int mask_width = base::bits::CountPopulation32(value); | 549 int mask_width = base::bits::CountPopulation32(value); |
| 549 int mask_msb = base::bits::CountLeadingZeros32(value); | 550 int mask_msb = base::bits::CountLeadingZeros32(value); |
| 550 int mask_lsb = base::bits::CountTrailingZeros32(value); | 551 int mask_lsb = base::bits::CountTrailingZeros32(value); |
| 551 if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 32)) | 552 if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 32)) |
| 552 return false; | 553 return false; |
| 553 *mb = mask_lsb + mask_width - 1; | 554 *mb = mask_lsb + mask_width - 1; |
| 554 *me = mask_lsb; | 555 *me = mask_lsb; |
| 555 return true; | 556 return true; |
| 556 } | 557 } |
| 558 #endif |
| 557 | 559 |
| 558 #if V8_TARGET_ARCH_S390X | 560 #if V8_TARGET_ARCH_S390X |
| 559 static inline bool IsContiguousMask64(uint64_t value, int* mb, int* me) { | 561 static inline bool IsContiguousMask64(uint64_t value, int* mb, int* me) { |
| 560 int mask_width = base::bits::CountPopulation64(value); | 562 int mask_width = base::bits::CountPopulation64(value); |
| 561 int mask_msb = base::bits::CountLeadingZeros64(value); | 563 int mask_msb = base::bits::CountLeadingZeros64(value); |
| 562 int mask_lsb = base::bits::CountTrailingZeros64(value); | 564 int mask_lsb = base::bits::CountTrailingZeros64(value); |
| 563 if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 64)) | 565 if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 64)) |
| 564 return false; | 566 return false; |
| 565 *mb = mask_lsb + mask_width - 1; | 567 *mb = mask_lsb + mask_width - 1; |
| 566 *me = mask_lsb; | 568 *me = mask_lsb; |
| 567 return true; | 569 return true; |
| 568 } | 570 } |
| 569 #endif | 571 #endif |
| 570 | 572 |
| 571 void InstructionSelector::VisitWord32And(Node* node) { | 573 void InstructionSelector::VisitWord32And(Node* node) { |
| 572 S390OperandGenerator g(this); | |
| 573 Int32BinopMatcher m(node); | |
| 574 int mb = 0; | |
| 575 int me = 0; | |
| 576 if (m.right().HasValue() && IsContiguousMask32(m.right().Value(), &mb, &me)) { | |
| 577 int sh = 0; | |
| 578 Node* left = m.left().node(); | |
| 579 if ((m.left().IsWord32Shr() || m.left().IsWord32Shl()) && | |
| 580 CanCover(node, left)) { | |
| 581 Int32BinopMatcher mleft(m.left().node()); | |
| 582 if (mleft.right().IsInRange(0, 31)) { | |
| 583 left = mleft.left().node(); | |
| 584 sh = mleft.right().Value(); | |
| 585 if (m.left().IsWord32Shr()) { | |
| 586 // Adjust the mask such that it doesn't include any rotated bits. | |
| 587 if (mb > 31 - sh) mb = 31 - sh; | |
| 588 sh = (32 - sh) & 0x1f; | |
| 589 } else { | |
| 590 // Adjust the mask such that it doesn't include any rotated bits. | |
| 591 if (me < sh) me = sh; | |
| 592 } | |
| 593 } | |
| 594 } | |
| 595 if (mb >= me) { | |
| 596 Emit(kS390_RotLeftAndMask32, g.DefineAsRegister(node), | |
| 597 g.UseRegister(left), g.TempImmediate(sh), g.TempImmediate(mb), | |
| 598 g.TempImmediate(me)); | |
| 599 return; | |
| 600 } | |
| 601 } | |
| 602 VisitBinop<Int32BinopMatcher>(this, node, kS390_And32, kUint32Imm); | 574 VisitBinop<Int32BinopMatcher>(this, node, kS390_And32, kUint32Imm); |
| 603 } | 575 } |
| 604 | 576 |
| 605 #if V8_TARGET_ARCH_S390X | 577 #if V8_TARGET_ARCH_S390X |
| 606 void InstructionSelector::VisitWord64And(Node* node) { | 578 void InstructionSelector::VisitWord64And(Node* node) { |
| 607 S390OperandGenerator g(this); | 579 S390OperandGenerator g(this); |
| 608 Int64BinopMatcher m(node); | 580 Int64BinopMatcher m(node); |
| 609 int mb = 0; | 581 int mb = 0; |
| 610 int me = 0; | 582 int me = 0; |
| 611 if (m.right().HasValue() && IsContiguousMask64(m.right().Value(), &mb, &me)) { | 583 if (m.right().HasValue() && IsContiguousMask64(m.right().Value(), &mb, &me)) { |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 683 Int64BinopMatcher m(node); | 655 Int64BinopMatcher m(node); |
| 684 if (m.right().Is(-1)) { | 656 if (m.right().Is(-1)) { |
| 685 Emit(kS390_Not64, g.DefineAsRegister(node), g.UseRegister(m.left().node())); | 657 Emit(kS390_Not64, g.DefineAsRegister(node), g.UseRegister(m.left().node())); |
| 686 } else { | 658 } else { |
| 687 VisitBinop<Int64BinopMatcher>(this, node, kS390_Xor64, kUint32Imm); | 659 VisitBinop<Int64BinopMatcher>(this, node, kS390_Xor64, kUint32Imm); |
| 688 } | 660 } |
| 689 } | 661 } |
| 690 #endif | 662 #endif |
| 691 | 663 |
| 692 void InstructionSelector::VisitWord32Shl(Node* node) { | 664 void InstructionSelector::VisitWord32Shl(Node* node) { |
| 693 S390OperandGenerator g(this); | |
| 694 Int32BinopMatcher m(node); | |
| 695 if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) { | |
| 696 Int32BinopMatcher mleft(m.left().node()); | |
| 697 int sh = m.right().Value(); | |
| 698 int mb; | |
| 699 int me; | |
| 700 if (mleft.right().HasValue() && | |
| 701 IsContiguousMask32(mleft.right().Value() << sh, &mb, &me)) { | |
| 702 // Adjust the mask such that it doesn't include any rotated bits. | |
| 703 if (me < sh) me = sh; | |
| 704 if (mb >= me) { | |
| 705 Emit(kS390_RotLeftAndMask32, g.DefineAsRegister(node), | |
| 706 g.UseRegister(mleft.left().node()), g.TempImmediate(sh), | |
| 707 g.TempImmediate(mb), g.TempImmediate(me)); | |
| 708 return; | |
| 709 } | |
| 710 } | |
| 711 } | |
| 712 VisitRRO(this, kS390_ShiftLeft32, node, kShift32Imm); | 665 VisitRRO(this, kS390_ShiftLeft32, node, kShift32Imm); |
| 713 } | 666 } |
| 714 | 667 |
| 715 #if V8_TARGET_ARCH_S390X | 668 #if V8_TARGET_ARCH_S390X |
| 716 void InstructionSelector::VisitWord64Shl(Node* node) { | 669 void InstructionSelector::VisitWord64Shl(Node* node) { |
| 717 S390OperandGenerator g(this); | 670 S390OperandGenerator g(this); |
| 718 Int64BinopMatcher m(node); | 671 Int64BinopMatcher m(node); |
| 719 // TODO(mbrandy): eliminate left sign extension if right >= 32 | 672 // TODO(mbrandy): eliminate left sign extension if right >= 32 |
| 720 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) { | 673 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) { |
| 721 Int64BinopMatcher mleft(m.left().node()); | 674 Int64BinopMatcher mleft(m.left().node()); |
| (...skipping 28 matching lines...) Expand all Loading... |
| 750 return; | 703 return; |
| 751 } | 704 } |
| 752 } | 705 } |
| 753 } | 706 } |
| 754 } | 707 } |
| 755 VisitRRO(this, kS390_ShiftLeft64, node, kShift64Imm); | 708 VisitRRO(this, kS390_ShiftLeft64, node, kShift64Imm); |
| 756 } | 709 } |
| 757 #endif | 710 #endif |
| 758 | 711 |
| 759 void InstructionSelector::VisitWord32Shr(Node* node) { | 712 void InstructionSelector::VisitWord32Shr(Node* node) { |
| 760 S390OperandGenerator g(this); | |
| 761 Int32BinopMatcher m(node); | |
| 762 if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) { | |
| 763 Int32BinopMatcher mleft(m.left().node()); | |
| 764 int sh = m.right().Value(); | |
| 765 int mb; | |
| 766 int me; | |
| 767 if (mleft.right().HasValue() && | |
| 768 IsContiguousMask32((uint32_t)(mleft.right().Value()) >> sh, &mb, &me)) { | |
| 769 // Adjust the mask such that it doesn't include any rotated bits. | |
| 770 if (mb > 31 - sh) mb = 31 - sh; | |
| 771 sh = (32 - sh) & 0x1f; | |
| 772 if (mb >= me) { | |
| 773 Emit(kS390_RotLeftAndMask32, g.DefineAsRegister(node), | |
| 774 g.UseRegister(mleft.left().node()), g.TempImmediate(sh), | |
| 775 g.TempImmediate(mb), g.TempImmediate(me)); | |
| 776 return; | |
| 777 } | |
| 778 } | |
| 779 } | |
| 780 VisitRRO(this, kS390_ShiftRight32, node, kShift32Imm); | 713 VisitRRO(this, kS390_ShiftRight32, node, kShift32Imm); |
| 781 } | 714 } |
| 782 | 715 |
| 783 #if V8_TARGET_ARCH_S390X | 716 #if V8_TARGET_ARCH_S390X |
| 784 void InstructionSelector::VisitWord64Shr(Node* node) { | 717 void InstructionSelector::VisitWord64Shr(Node* node) { |
| 785 S390OperandGenerator g(this); | 718 S390OperandGenerator g(this); |
| 786 Int64BinopMatcher m(node); | 719 Int64BinopMatcher m(node); |
| 787 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) { | 720 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) { |
| 788 Int64BinopMatcher mleft(m.left().node()); | 721 Int64BinopMatcher mleft(m.left().node()); |
| 789 int sh = m.right().Value(); | 722 int sh = m.right().Value(); |
| (...skipping 1323 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2113 // static | 2046 // static |
| 2114 MachineOperatorBuilder::AlignmentRequirements | 2047 MachineOperatorBuilder::AlignmentRequirements |
| 2115 InstructionSelector::AlignmentRequirements() { | 2048 InstructionSelector::AlignmentRequirements() { |
| 2116 return MachineOperatorBuilder::AlignmentRequirements:: | 2049 return MachineOperatorBuilder::AlignmentRequirements:: |
| 2117 FullUnalignedAccessSupport(); | 2050 FullUnalignedAccessSupport(); |
| 2118 } | 2051 } |
| 2119 | 2052 |
| 2120 } // namespace compiler | 2053 } // namespace compiler |
| 2121 } // namespace internal | 2054 } // namespace internal |
| 2122 } // namespace v8 | 2055 } // namespace v8 |
| OLD | NEW |