| Index: src/compiler/mips64/instruction-selector-mips64.cc
|
| diff --git a/src/compiler/mips64/instruction-selector-mips64.cc b/src/compiler/mips64/instruction-selector-mips64.cc
|
| index 6cb337f97355b639b972590526626fb7c0f17911..2a0e9c9d225b0053ab41fa41b6a23478c62e6b34 100644
|
| --- a/src/compiler/mips64/instruction-selector-mips64.cc
|
| +++ b/src/compiler/mips64/instruction-selector-mips64.cc
|
| @@ -269,17 +269,16 @@ void InstructionSelector::VisitWord32And(Node* node) {
|
| uint32_t mask = m.right().Value();
|
| uint32_t mask_width = base::bits::CountPopulation32(mask);
|
| uint32_t mask_msb = base::bits::CountLeadingZeros32(mask);
|
| - if ((mask_width != 0) && (mask_msb + mask_width == 32)) {
|
| - // The mask must be contiguous, and occupy the least-significant bits.
|
| - DCHECK_EQ(0u, base::bits::CountTrailingZeros32(mask));
|
| -
|
| - // Select Ext for And(Shr(x, imm), mask) where the mask is in the least
|
| - // significant bits.
|
| + uint32_t mask_lsb = base::bits::CountTrailingZeros32(mask);
|
| + if ((mask_width != 0) && (mask_msb + mask_width + mask_lsb == 32)) {
|
| + // The mask must be contiguous.
|
| + // Select Ext for And(Shr(x, imm), mask) where the mask may be in
|
| + // the least-significant bits or elsewhere.
|
| Int32BinopMatcher mleft(m.left().node());
|
| if (mleft.right().HasValue()) {
|
| // Any shift value can match; int32 shifts use `value % 32`.
|
| uint32_t lsb = mleft.right().Value() & 0x1f;
|
| -
|
| + lsb = lsb + mask_lsb;
|
| // Ext cannot extract bits past the register size, however since
|
| // shifting the original value would have introduced some zeros we can
|
| // still use Ext with a smaller mask and the remaining bits will be
|
| @@ -319,23 +318,21 @@ void InstructionSelector::VisitWord64And(Node* node) {
|
| uint64_t mask = m.right().Value();
|
| uint32_t mask_width = base::bits::CountPopulation64(mask);
|
| uint32_t mask_msb = base::bits::CountLeadingZeros64(mask);
|
| - if ((mask_width != 0) && (mask_msb + mask_width == 64)) {
|
| - // The mask must be contiguous, and occupy the least-significant bits.
|
| - DCHECK_EQ(0u, base::bits::CountTrailingZeros64(mask));
|
| -
|
| - // Select Dext for And(Shr(x, imm), mask) where the mask is in the least
|
| - // significant bits.
|
| + uint32_t mask_lsb = base::bits::CountTrailingZeros64(mask);
|
| + if ((mask_width != 0) && (mask_msb + mask_width + mask_lsb == 64)) {
|
| + // The mask must be contiguous.
|
| + // Select Dext for And(Shr(x, imm), mask) where the mask may be in
|
| + // the least-significant bits or elsewhere.
|
| Int64BinopMatcher mleft(m.left().node());
|
| if (mleft.right().HasValue()) {
|
| // Any shift value can match; int64 shifts use `value % 64`.
|
| uint32_t lsb = static_cast<uint32_t>(mleft.right().Value() & 0x3f);
|
| -
|
| + lsb = lsb + mask_lsb;
|
| // Dext cannot extract bits past the register size, however since
|
| // shifting the original value would have introduced some zeros we can
|
| // still use Dext with a smaller mask and the remaining bits will be
|
| // zeros.
|
| if (lsb + mask_width > 64) mask_width = 64 - lsb;
|
| -
|
| Emit(kMips64Dext, g.DefineAsRegister(node),
|
| g.UseRegister(mleft.left().node()), g.TempImmediate(lsb),
|
| g.TempImmediate(static_cast<int32_t>(mask_width)));
|
|
|