OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <algorithm> | 5 #include <algorithm> |
6 | 6 |
7 #include "src/base/adapters.h" | 7 #include "src/base/adapters.h" |
8 #include "src/compiler/instruction-selector-impl.h" | 8 #include "src/compiler/instruction-selector-impl.h" |
9 #include "src/compiler/node-matchers.h" | 9 #include "src/compiler/node-matchers.h" |
10 #include "src/compiler/node-properties.h" | 10 #include "src/compiler/node-properties.h" |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
63 case kX64Test8: | 63 case kX64Test8: |
64 return rep == MachineRepresentation::kWord8; | 64 return rep == MachineRepresentation::kWord8; |
65 default: | 65 default: |
66 break; | 66 break; |
67 } | 67 } |
68 return false; | 68 return false; |
69 } | 69 } |
70 | 70 |
71 AddressingMode GenerateMemoryOperandInputs(Node* index, int scale_exponent, | 71 AddressingMode GenerateMemoryOperandInputs(Node* index, int scale_exponent, |
72 Node* base, Node* displacement, | 72 Node* base, Node* displacement, |
| 73 DisplacementMode displacement_mode, |
73 InstructionOperand inputs[], | 74 InstructionOperand inputs[], |
74 size_t* input_count) { | 75 size_t* input_count) { |
75 AddressingMode mode = kMode_MRI; | 76 AddressingMode mode = kMode_MRI; |
76 if (base != nullptr) { | 77 if (base != nullptr) { |
77 inputs[(*input_count)++] = UseRegister(base); | 78 inputs[(*input_count)++] = UseRegister(base); |
78 if (index != nullptr) { | 79 if (index != nullptr) { |
79 DCHECK(scale_exponent >= 0 && scale_exponent <= 3); | 80 DCHECK(scale_exponent >= 0 && scale_exponent <= 3); |
80 inputs[(*input_count)++] = UseRegister(index); | 81 inputs[(*input_count)++] = UseRegister(index); |
81 if (displacement != nullptr) { | 82 if (displacement != nullptr) { |
82 inputs[(*input_count)++] = UseImmediate(displacement); | 83 inputs[(*input_count)++] = displacement_mode |
| 84 ? UseNegatedImmediate(displacement) |
| 85 : UseImmediate(displacement); |
83 static const AddressingMode kMRnI_modes[] = {kMode_MR1I, kMode_MR2I, | 86 static const AddressingMode kMRnI_modes[] = {kMode_MR1I, kMode_MR2I, |
84 kMode_MR4I, kMode_MR8I}; | 87 kMode_MR4I, kMode_MR8I}; |
85 mode = kMRnI_modes[scale_exponent]; | 88 mode = kMRnI_modes[scale_exponent]; |
86 } else { | 89 } else { |
87 static const AddressingMode kMRn_modes[] = {kMode_MR1, kMode_MR2, | 90 static const AddressingMode kMRn_modes[] = {kMode_MR1, kMode_MR2, |
88 kMode_MR4, kMode_MR8}; | 91 kMode_MR4, kMode_MR8}; |
89 mode = kMRn_modes[scale_exponent]; | 92 mode = kMRn_modes[scale_exponent]; |
90 } | 93 } |
91 } else { | 94 } else { |
92 if (displacement == nullptr) { | 95 if (displacement == nullptr) { |
93 mode = kMode_MR; | 96 mode = kMode_MR; |
94 } else { | 97 } else { |
95 inputs[(*input_count)++] = UseImmediate(displacement); | 98 inputs[(*input_count)++] = displacement_mode == kNegativeDisplacement |
| 99 ? UseNegatedImmediate(displacement) |
| 100 : UseImmediate(displacement); |
96 mode = kMode_MRI; | 101 mode = kMode_MRI; |
97 } | 102 } |
98 } | 103 } |
99 } else { | 104 } else { |
100 DCHECK_NOT_NULL(index); | 105 DCHECK_NOT_NULL(index); |
101 DCHECK(scale_exponent >= 0 && scale_exponent <= 3); | 106 DCHECK(scale_exponent >= 0 && scale_exponent <= 3); |
102 inputs[(*input_count)++] = UseRegister(index); | 107 inputs[(*input_count)++] = UseRegister(index); |
103 if (displacement != nullptr) { | 108 if (displacement != nullptr) { |
104 inputs[(*input_count)++] = UseImmediate(displacement); | 109 inputs[(*input_count)++] = displacement_mode == kNegativeDisplacement |
| 110 ? UseNegatedImmediate(displacement) |
| 111 : UseImmediate(displacement); |
105 static const AddressingMode kMnI_modes[] = {kMode_MRI, kMode_M2I, | 112 static const AddressingMode kMnI_modes[] = {kMode_MRI, kMode_M2I, |
106 kMode_M4I, kMode_M8I}; | 113 kMode_M4I, kMode_M8I}; |
107 mode = kMnI_modes[scale_exponent]; | 114 mode = kMnI_modes[scale_exponent]; |
108 } else { | 115 } else { |
109 static const AddressingMode kMn_modes[] = {kMode_MR, kMode_MR1, | 116 static const AddressingMode kMn_modes[] = {kMode_MR, kMode_MR1, |
110 kMode_M4, kMode_M8}; | 117 kMode_M4, kMode_M8}; |
111 mode = kMn_modes[scale_exponent]; | 118 mode = kMn_modes[scale_exponent]; |
112 if (mode == kMode_MR1) { | 119 if (mode == kMode_MR1) { |
113 // [%r1 + %r1*1] has a smaller encoding than [%r1*2+0] | 120 // [%r1 + %r1*1] has a smaller encoding than [%r1*2+0] |
114 inputs[(*input_count)++] = UseRegister(index); | 121 inputs[(*input_count)++] = UseRegister(index); |
115 } | 122 } |
116 } | 123 } |
117 } | 124 } |
118 return mode; | 125 return mode; |
119 } | 126 } |
120 | 127 |
121 AddressingMode GetEffectiveAddressMemoryOperand(Node* operand, | 128 AddressingMode GetEffectiveAddressMemoryOperand(Node* operand, |
122 InstructionOperand inputs[], | 129 InstructionOperand inputs[], |
123 size_t* input_count) { | 130 size_t* input_count) { |
124 BaseWithIndexAndDisplacement64Matcher m(operand, true); | 131 BaseWithIndexAndDisplacement64Matcher m(operand, true); |
125 DCHECK(m.matches()); | 132 DCHECK(m.matches()); |
126 if ((m.displacement() == nullptr || CanBeImmediate(m.displacement()))) { | 133 if ((m.displacement() == nullptr || CanBeImmediate(m.displacement()))) { |
127 return GenerateMemoryOperandInputs(m.index(), m.scale(), m.base(), | 134 return GenerateMemoryOperandInputs( |
128 m.displacement(), inputs, input_count); | 135 m.index(), m.scale(), m.base(), m.displacement(), |
| 136 m.displacement_mode(), inputs, input_count); |
129 } else { | 137 } else { |
130 inputs[(*input_count)++] = UseRegister(operand->InputAt(0)); | 138 inputs[(*input_count)++] = UseRegister(operand->InputAt(0)); |
131 inputs[(*input_count)++] = UseRegister(operand->InputAt(1)); | 139 inputs[(*input_count)++] = UseRegister(operand->InputAt(1)); |
132 return kMode_MR1; | 140 return kMode_MR1; |
133 } | 141 } |
134 } | 142 } |
135 | 143 |
136 bool CanBeBetterLeftOperand(Node* node) const { | 144 bool CanBeBetterLeftOperand(Node* node) const { |
137 return !selector()->IsLive(node); | 145 return !selector()->IsLive(node); |
138 } | 146 } |
(...skipping 396 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
535 Int64BinopMatcher mright(right); | 543 Int64BinopMatcher mright(right); |
536 if (mright.right().Is(0x3F)) { | 544 if (mright.right().Is(0x3F)) { |
537 right = mright.left().node(); | 545 right = mright.left().node(); |
538 } | 546 } |
539 } | 547 } |
540 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left), | 548 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left), |
541 g.UseFixed(right, rcx)); | 549 g.UseFixed(right, rcx)); |
542 } | 550 } |
543 } | 551 } |
544 | 552 |
545 | |
546 void EmitLea(InstructionSelector* selector, InstructionCode opcode, | 553 void EmitLea(InstructionSelector* selector, InstructionCode opcode, |
547 Node* result, Node* index, int scale, Node* base, | 554 Node* result, Node* index, int scale, Node* base, |
548 Node* displacement) { | 555 Node* displacement, DisplacementMode displacement_mode) { |
549 X64OperandGenerator g(selector); | 556 X64OperandGenerator g(selector); |
550 | 557 |
551 InstructionOperand inputs[4]; | 558 InstructionOperand inputs[4]; |
552 size_t input_count = 0; | 559 size_t input_count = 0; |
553 AddressingMode mode = g.GenerateMemoryOperandInputs( | 560 AddressingMode mode = |
554 index, scale, base, displacement, inputs, &input_count); | 561 g.GenerateMemoryOperandInputs(index, scale, base, displacement, |
| 562 displacement_mode, inputs, &input_count); |
555 | 563 |
556 DCHECK_NE(0u, input_count); | 564 DCHECK_NE(0u, input_count); |
557 DCHECK_GE(arraysize(inputs), input_count); | 565 DCHECK_GE(arraysize(inputs), input_count); |
558 | 566 |
559 InstructionOperand outputs[1]; | 567 InstructionOperand outputs[1]; |
560 outputs[0] = g.DefineAsRegister(result); | 568 outputs[0] = g.DefineAsRegister(result); |
561 | 569 |
562 opcode = AddressingModeField::encode(mode) | opcode; | 570 opcode = AddressingModeField::encode(mode) | opcode; |
563 | 571 |
564 selector->Emit(opcode, 1, outputs, input_count, inputs); | 572 selector->Emit(opcode, 1, outputs, input_count, inputs); |
565 } | 573 } |
566 | 574 |
567 } // namespace | 575 } // namespace |
568 | 576 |
569 | 577 |
570 void InstructionSelector::VisitWord32Shl(Node* node) { | 578 void InstructionSelector::VisitWord32Shl(Node* node) { |
571 Int32ScaleMatcher m(node, true); | 579 Int32ScaleMatcher m(node, true); |
572 if (m.matches()) { | 580 if (m.matches()) { |
573 Node* index = node->InputAt(0); | 581 Node* index = node->InputAt(0); |
574 Node* base = m.power_of_two_plus_one() ? index : nullptr; | 582 Node* base = m.power_of_two_plus_one() ? index : nullptr; |
575 EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr); | 583 EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr, |
| 584 kPositiveDisplacement); |
576 return; | 585 return; |
577 } | 586 } |
578 VisitWord32Shift(this, node, kX64Shl32); | 587 VisitWord32Shift(this, node, kX64Shl32); |
579 } | 588 } |
580 | 589 |
581 | 590 |
582 void InstructionSelector::VisitWord64Shl(Node* node) { | 591 void InstructionSelector::VisitWord64Shl(Node* node) { |
583 X64OperandGenerator g(this); | 592 X64OperandGenerator g(this); |
584 Int64BinopMatcher m(node); | 593 Int64BinopMatcher m(node); |
585 if ((m.left().IsChangeInt32ToInt64() || m.left().IsChangeUint32ToUint64()) && | 594 if ((m.left().IsChangeInt32ToInt64() || m.left().IsChangeUint32ToUint64()) && |
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
749 | 758 |
750 | 759 |
751 void InstructionSelector::VisitInt32Add(Node* node) { | 760 void InstructionSelector::VisitInt32Add(Node* node) { |
752 X64OperandGenerator g(this); | 761 X64OperandGenerator g(this); |
753 | 762 |
754 // Try to match the Add to a leal pattern | 763 // Try to match the Add to a leal pattern |
755 BaseWithIndexAndDisplacement32Matcher m(node); | 764 BaseWithIndexAndDisplacement32Matcher m(node); |
756 if (m.matches() && | 765 if (m.matches() && |
757 (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) { | 766 (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) { |
758 EmitLea(this, kX64Lea32, node, m.index(), m.scale(), m.base(), | 767 EmitLea(this, kX64Lea32, node, m.index(), m.scale(), m.base(), |
759 m.displacement()); | 768 m.displacement(), m.displacement_mode()); |
760 return; | 769 return; |
761 } | 770 } |
762 | 771 |
763 // No leal pattern match, use addl | 772 // No leal pattern match, use addl |
764 VisitBinop(this, node, kX64Add32); | 773 VisitBinop(this, node, kX64Add32); |
765 } | 774 } |
766 | 775 |
767 | 776 |
768 void InstructionSelector::VisitInt64Add(Node* node) { | 777 void InstructionSelector::VisitInt64Add(Node* node) { |
769 VisitBinop(this, node, kX64Add); | 778 VisitBinop(this, node, kX64Add); |
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
874 } | 883 } |
875 | 884 |
876 } // namespace | 885 } // namespace |
877 | 886 |
878 | 887 |
879 void InstructionSelector::VisitInt32Mul(Node* node) { | 888 void InstructionSelector::VisitInt32Mul(Node* node) { |
880 Int32ScaleMatcher m(node, true); | 889 Int32ScaleMatcher m(node, true); |
881 if (m.matches()) { | 890 if (m.matches()) { |
882 Node* index = node->InputAt(0); | 891 Node* index = node->InputAt(0); |
883 Node* base = m.power_of_two_plus_one() ? index : nullptr; | 892 Node* base = m.power_of_two_plus_one() ? index : nullptr; |
884 EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr); | 893 EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr, |
| 894 kPositiveDisplacement); |
885 return; | 895 return; |
886 } | 896 } |
887 VisitMul(this, node, kX64Imul32); | 897 VisitMul(this, node, kX64Imul32); |
888 } | 898 } |
889 | 899 |
890 | 900 |
891 void InstructionSelector::VisitInt64Mul(Node* node) { | 901 void InstructionSelector::VisitInt64Mul(Node* node) { |
892 VisitMul(this, node, kX64Imul); | 902 VisitMul(this, node, kX64Imul); |
893 } | 903 } |
894 | 904 |
(...skipping 1241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2136 // static | 2146 // static |
2137 MachineOperatorBuilder::AlignmentRequirements | 2147 MachineOperatorBuilder::AlignmentRequirements |
2138 InstructionSelector::AlignmentRequirements() { | 2148 InstructionSelector::AlignmentRequirements() { |
2139 return MachineOperatorBuilder::AlignmentRequirements:: | 2149 return MachineOperatorBuilder::AlignmentRequirements:: |
2140 FullUnalignedAccessSupport(); | 2150 FullUnalignedAccessSupport(); |
2141 } | 2151 } |
2142 | 2152 |
2143 } // namespace compiler | 2153 } // namespace compiler |
2144 } // namespace internal | 2154 } // namespace internal |
2145 } // namespace v8 | 2155 } // namespace v8 |
OLD | NEW |