OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/base/adapters.h" | 5 #include "src/base/adapters.h" |
6 #include "src/base/bits.h" | 6 #include "src/base/bits.h" |
7 #include "src/compiler/instruction-selector-impl.h" | 7 #include "src/compiler/instruction-selector-impl.h" |
8 #include "src/compiler/node-matchers.h" | 8 #include "src/compiler/node-matchers.h" |
9 #include "src/compiler/node-properties.h" | 9 #include "src/compiler/node-properties.h" |
10 | 10 |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
52 bool CanBeImmediate(int64_t value, InstructionCode opcode) { | 52 bool CanBeImmediate(int64_t value, InstructionCode opcode) { |
53 switch (ArchOpcodeField::decode(opcode)) { | 53 switch (ArchOpcodeField::decode(opcode)) { |
54 case kMips64Shl: | 54 case kMips64Shl: |
55 case kMips64Sar: | 55 case kMips64Sar: |
56 case kMips64Shr: | 56 case kMips64Shr: |
57 return is_uint5(value); | 57 return is_uint5(value); |
58 case kMips64Dshl: | 58 case kMips64Dshl: |
59 case kMips64Dsar: | 59 case kMips64Dsar: |
60 case kMips64Dshr: | 60 case kMips64Dshr: |
61 return is_uint6(value); | 61 return is_uint6(value); |
| 62 case kMips64Add: |
| 63 case kMips64And32: |
| 64 case kMips64And: |
| 65 case kMips64Dadd: |
| 66 case kMips64Or32: |
| 67 case kMips64Or: |
62 case kMips64Xor: | 68 case kMips64Xor: |
63 return is_uint16(value); | 69 return is_uint16(value); |
64 case kMips64Ldc1: | 70 case kMips64Ldc1: |
65 case kMips64Sdc1: | 71 case kMips64Sdc1: |
66 return is_int16(value + kIntSize); | 72 return is_int16(value + kIntSize); |
67 default: | 73 default: |
68 return is_int16(value); | 74 return is_int16(value); |
69 } | 75 } |
70 } | 76 } |
71 | 77 |
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
166 DCHECK(is_int32(m.immediate())); | 172 DCHECK(is_int32(m.immediate())); |
167 inputs[1] = g.TempImmediate(static_cast<int32_t>(m.immediate())); | 173 inputs[1] = g.TempImmediate(static_cast<int32_t>(m.immediate())); |
168 InstructionOperand outputs[] = {g.DefineAsRegister(node)}; | 174 InstructionOperand outputs[] = {g.DefineAsRegister(node)}; |
169 selector->Emit(opcode, arraysize(outputs), outputs, arraysize(inputs), | 175 selector->Emit(opcode, arraysize(outputs), outputs, arraysize(inputs), |
170 inputs); | 176 inputs); |
171 return true; | 177 return true; |
172 } | 178 } |
173 return false; | 179 return false; |
174 } | 180 } |
175 | 181 |
| 182 bool TryMatchImmediate(InstructionSelector* selector, |
| 183 InstructionCode* opcode_return, Node* node, |
| 184 size_t* input_count_return, InstructionOperand* inputs) { |
| 185 Mips64OperandGenerator g(selector); |
| 186 if (g.CanBeImmediate(node, *opcode_return)) { |
| 187 *opcode_return |= AddressingModeField::encode(kMode_MRI); |
| 188 inputs[0] = g.UseImmediate(node); |
| 189 *input_count_return = 1; |
| 190 return true; |
| 191 } |
| 192 return false; |
| 193 } |
| 194 |
176 static void VisitBinop(InstructionSelector* selector, Node* node, | 195 static void VisitBinop(InstructionSelector* selector, Node* node, |
177 InstructionCode opcode, FlagsContinuation* cont) { | 196 InstructionCode opcode, bool has_reverse_opcode, |
| 197 InstructionCode reverse_opcode, |
| 198 FlagsContinuation* cont) { |
178 Mips64OperandGenerator g(selector); | 199 Mips64OperandGenerator g(selector); |
179 Int32BinopMatcher m(node); | 200 Int32BinopMatcher m(node); |
180 InstructionOperand inputs[4]; | 201 InstructionOperand inputs[4]; |
181 size_t input_count = 0; | 202 size_t input_count = 0; |
182 InstructionOperand outputs[2]; | 203 InstructionOperand outputs[2]; |
183 size_t output_count = 0; | 204 size_t output_count = 0; |
184 | 205 |
185 inputs[input_count++] = g.UseRegister(m.left().node()); | 206 if (TryMatchImmediate(selector, &opcode, m.right().node(), &input_count, |
186 inputs[input_count++] = g.UseOperand(m.right().node(), opcode); | 207 &inputs[1])) { |
| 208 inputs[0] = g.UseRegister(m.left().node()); |
| 209 input_count++; |
| 210 } |
| 211 if (has_reverse_opcode && |
| 212 TryMatchImmediate(selector, &reverse_opcode, m.left().node(), |
| 213 &input_count, &inputs[1])) { |
| 214 inputs[0] = g.UseRegister(m.right().node()); |
| 215 opcode = reverse_opcode; |
| 216 input_count++; |
| 217 } else { |
| 218 inputs[input_count++] = g.UseRegister(m.left().node()); |
| 219 inputs[input_count++] = g.UseOperand(m.right().node(), opcode); |
| 220 } |
187 | 221 |
188 if (cont->IsBranch()) { | 222 if (cont->IsBranch()) { |
189 inputs[input_count++] = g.Label(cont->true_block()); | 223 inputs[input_count++] = g.Label(cont->true_block()); |
190 inputs[input_count++] = g.Label(cont->false_block()); | 224 inputs[input_count++] = g.Label(cont->false_block()); |
191 } | 225 } |
192 | 226 |
193 if (cont->IsDeoptimize()) { | 227 if (cont->IsDeoptimize()) { |
194 // If we can deoptimize as a result of the binop, we need to make sure that | 228 // If we can deoptimize as a result of the binop, we need to make sure that |
195 // the deopt inputs are not overwritten by the binop result. One way | 229 // the deopt inputs are not overwritten by the binop result. One way |
196 // to achieve that is to declare the output register as same-as-first. | 230 // to achieve that is to declare the output register as same-as-first. |
(...skipping 12 matching lines...) Expand all Loading... |
209 | 243 |
210 opcode = cont->Encode(opcode); | 244 opcode = cont->Encode(opcode); |
211 if (cont->IsDeoptimize()) { | 245 if (cont->IsDeoptimize()) { |
212 selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs, | 246 selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs, |
213 cont->reason(), cont->frame_state()); | 247 cont->reason(), cont->frame_state()); |
214 } else { | 248 } else { |
215 selector->Emit(opcode, output_count, outputs, input_count, inputs); | 249 selector->Emit(opcode, output_count, outputs, input_count, inputs); |
216 } | 250 } |
217 } | 251 } |
218 | 252 |
| 253 static void VisitBinop(InstructionSelector* selector, Node* node, |
| 254 InstructionCode opcode, bool has_reverse_opcode, |
| 255 InstructionCode reverse_opcode) { |
| 256 FlagsContinuation cont; |
| 257 VisitBinop(selector, node, opcode, has_reverse_opcode, reverse_opcode, &cont); |
| 258 } |
| 259 |
| 260 static void VisitBinop(InstructionSelector* selector, Node* node, |
| 261 InstructionCode opcode, FlagsContinuation* cont) { |
| 262 VisitBinop(selector, node, opcode, false, kArchNop, cont); |
| 263 } |
219 | 264 |
220 static void VisitBinop(InstructionSelector* selector, Node* node, | 265 static void VisitBinop(InstructionSelector* selector, Node* node, |
221 InstructionCode opcode) { | 266 InstructionCode opcode) { |
222 FlagsContinuation cont; | 267 VisitBinop(selector, node, opcode, false, kArchNop); |
223 VisitBinop(selector, node, opcode, &cont); | |
224 } | 268 } |
225 | 269 |
226 void EmitLoad(InstructionSelector* selector, Node* node, InstructionCode opcode, | 270 void EmitLoad(InstructionSelector* selector, Node* node, InstructionCode opcode, |
227 Node* output = nullptr) { | 271 Node* output = nullptr) { |
228 Mips64OperandGenerator g(selector); | 272 Mips64OperandGenerator g(selector); |
229 Node* base = node->InputAt(0); | 273 Node* base = node->InputAt(0); |
230 Node* index = node->InputAt(1); | 274 Node* index = node->InputAt(1); |
231 | 275 |
232 if (g.CanBeImmediate(index, opcode)) { | 276 if (g.CanBeImmediate(index, opcode)) { |
233 selector->Emit(opcode | AddressingModeField::encode(kMode_MRI), | 277 selector->Emit(opcode | AddressingModeField::encode(kMode_MRI), |
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
408 uint32_t msb = base::bits::CountLeadingZeros32(~mask); | 452 uint32_t msb = base::bits::CountLeadingZeros32(~mask); |
409 if (shift != 0 && shift != 32 && msb + shift == 32) { | 453 if (shift != 0 && shift != 32 && msb + shift == 32) { |
410 // Insert zeros for (x >> K) << K => x & ~(2^K - 1) expression reduction | 454 // Insert zeros for (x >> K) << K => x & ~(2^K - 1) expression reduction |
411 // and remove constant loading of inverted mask. | 455 // and remove constant loading of inverted mask. |
412 Emit(kMips64Ins, g.DefineSameAsFirst(node), | 456 Emit(kMips64Ins, g.DefineSameAsFirst(node), |
413 g.UseRegister(m.left().node()), g.TempImmediate(0), | 457 g.UseRegister(m.left().node()), g.TempImmediate(0), |
414 g.TempImmediate(shift)); | 458 g.TempImmediate(shift)); |
415 return; | 459 return; |
416 } | 460 } |
417 } | 461 } |
418 VisitBinop(this, node, kMips64And32); | 462 VisitBinop(this, node, kMips64And32, true, kMips64And32); |
419 } | 463 } |
420 | 464 |
421 | 465 |
422 void InstructionSelector::VisitWord64And(Node* node) { | 466 void InstructionSelector::VisitWord64And(Node* node) { |
423 Mips64OperandGenerator g(this); | 467 Mips64OperandGenerator g(this); |
424 Int64BinopMatcher m(node); | 468 Int64BinopMatcher m(node); |
425 if (m.left().IsWord64Shr() && CanCover(node, m.left().node()) && | 469 if (m.left().IsWord64Shr() && CanCover(node, m.left().node()) && |
426 m.right().HasValue()) { | 470 m.right().HasValue()) { |
427 uint64_t mask = m.right().Value(); | 471 uint64_t mask = m.right().Value(); |
428 uint32_t mask_width = base::bits::CountPopulation64(mask); | 472 uint32_t mask_width = base::bits::CountPopulation64(mask); |
(...skipping 30 matching lines...) Expand all Loading... |
459 if (shift != 0 && shift < 32 && msb + shift == 64) { | 503 if (shift != 0 && shift < 32 && msb + shift == 64) { |
460 // Insert zeros for (x >> K) << K => x & ~(2^K - 1) expression reduction | 504 // Insert zeros for (x >> K) << K => x & ~(2^K - 1) expression reduction |
461 // and remove constant loading of inverted mask. Dins cannot insert bits | 505 // and remove constant loading of inverted mask. Dins cannot insert bits |
462 // past word size, so shifts smaller than 32 are covered. | 506 // past word size, so shifts smaller than 32 are covered. |
463 Emit(kMips64Dins, g.DefineSameAsFirst(node), | 507 Emit(kMips64Dins, g.DefineSameAsFirst(node), |
464 g.UseRegister(m.left().node()), g.TempImmediate(0), | 508 g.UseRegister(m.left().node()), g.TempImmediate(0), |
465 g.TempImmediate(shift)); | 509 g.TempImmediate(shift)); |
466 return; | 510 return; |
467 } | 511 } |
468 } | 512 } |
469 VisitBinop(this, node, kMips64And); | 513 VisitBinop(this, node, kMips64And, true, kMips64And); |
470 } | 514 } |
471 | 515 |
472 | 516 |
473 void InstructionSelector::VisitWord32Or(Node* node) { | 517 void InstructionSelector::VisitWord32Or(Node* node) { |
474 VisitBinop(this, node, kMips64Or32); | 518 VisitBinop(this, node, kMips64Or32, true, kMips64Or32); |
475 } | 519 } |
476 | 520 |
477 | 521 |
478 void InstructionSelector::VisitWord64Or(Node* node) { | 522 void InstructionSelector::VisitWord64Or(Node* node) { |
479 VisitBinop(this, node, kMips64Or); | 523 VisitBinop(this, node, kMips64Or, true, kMips64Or); |
480 } | 524 } |
481 | 525 |
482 | 526 |
483 void InstructionSelector::VisitWord32Xor(Node* node) { | 527 void InstructionSelector::VisitWord32Xor(Node* node) { |
484 Int32BinopMatcher m(node); | 528 Int32BinopMatcher m(node); |
485 if (m.left().IsWord32Or() && CanCover(node, m.left().node()) && | 529 if (m.left().IsWord32Or() && CanCover(node, m.left().node()) && |
486 m.right().Is(-1)) { | 530 m.right().Is(-1)) { |
487 Int32BinopMatcher mleft(m.left().node()); | 531 Int32BinopMatcher mleft(m.left().node()); |
488 if (!mleft.right().HasValue()) { | 532 if (!mleft.right().HasValue()) { |
489 Mips64OperandGenerator g(this); | 533 Mips64OperandGenerator g(this); |
490 Emit(kMips64Nor32, g.DefineAsRegister(node), | 534 Emit(kMips64Nor32, g.DefineAsRegister(node), |
491 g.UseRegister(mleft.left().node()), | 535 g.UseRegister(mleft.left().node()), |
492 g.UseRegister(mleft.right().node())); | 536 g.UseRegister(mleft.right().node())); |
493 return; | 537 return; |
494 } | 538 } |
495 } | 539 } |
496 if (m.right().Is(-1)) { | 540 if (m.right().Is(-1)) { |
497 // Use Nor for bit negation and eliminate constant loading for xori. | 541 // Use Nor for bit negation and eliminate constant loading for xori. |
498 Mips64OperandGenerator g(this); | 542 Mips64OperandGenerator g(this); |
499 Emit(kMips64Nor32, g.DefineAsRegister(node), g.UseRegister(m.left().node()), | 543 Emit(kMips64Nor32, g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
500 g.TempImmediate(0)); | 544 g.TempImmediate(0)); |
501 return; | 545 return; |
502 } | 546 } |
503 VisitBinop(this, node, kMips64Xor32); | 547 VisitBinop(this, node, kMips64Xor32, true, kMips64Xor32); |
504 } | 548 } |
505 | 549 |
506 | 550 |
507 void InstructionSelector::VisitWord64Xor(Node* node) { | 551 void InstructionSelector::VisitWord64Xor(Node* node) { |
508 Int64BinopMatcher m(node); | 552 Int64BinopMatcher m(node); |
509 if (m.left().IsWord64Or() && CanCover(node, m.left().node()) && | 553 if (m.left().IsWord64Or() && CanCover(node, m.left().node()) && |
510 m.right().Is(-1)) { | 554 m.right().Is(-1)) { |
511 Int64BinopMatcher mleft(m.left().node()); | 555 Int64BinopMatcher mleft(m.left().node()); |
512 if (!mleft.right().HasValue()) { | 556 if (!mleft.right().HasValue()) { |
513 Mips64OperandGenerator g(this); | 557 Mips64OperandGenerator g(this); |
514 Emit(kMips64Nor, g.DefineAsRegister(node), | 558 Emit(kMips64Nor, g.DefineAsRegister(node), |
515 g.UseRegister(mleft.left().node()), | 559 g.UseRegister(mleft.left().node()), |
516 g.UseRegister(mleft.right().node())); | 560 g.UseRegister(mleft.right().node())); |
517 return; | 561 return; |
518 } | 562 } |
519 } | 563 } |
520 if (m.right().Is(-1)) { | 564 if (m.right().Is(-1)) { |
521 // Use Nor for bit negation and eliminate constant loading for xori. | 565 // Use Nor for bit negation and eliminate constant loading for xori. |
522 Mips64OperandGenerator g(this); | 566 Mips64OperandGenerator g(this); |
523 Emit(kMips64Nor, g.DefineAsRegister(node), g.UseRegister(m.left().node()), | 567 Emit(kMips64Nor, g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
524 g.TempImmediate(0)); | 568 g.TempImmediate(0)); |
525 return; | 569 return; |
526 } | 570 } |
527 VisitBinop(this, node, kMips64Xor); | 571 VisitBinop(this, node, kMips64Xor, true, kMips64Xor); |
528 } | 572 } |
529 | 573 |
530 | 574 |
531 void InstructionSelector::VisitWord32Shl(Node* node) { | 575 void InstructionSelector::VisitWord32Shl(Node* node) { |
532 Int32BinopMatcher m(node); | 576 Int32BinopMatcher m(node); |
533 if (m.left().IsWord32And() && CanCover(node, m.left().node()) && | 577 if (m.left().IsWord32And() && CanCover(node, m.left().node()) && |
534 m.right().IsInRange(1, 31)) { | 578 m.right().IsInRange(1, 31)) { |
535 Mips64OperandGenerator g(this); | 579 Mips64OperandGenerator g(this); |
536 Int32BinopMatcher mleft(m.left().node()); | 580 Int32BinopMatcher mleft(m.left().node()); |
537 // Match Word32Shl(Word32And(x, mask), imm) to Shl where the mask is | 581 // Match Word32Shl(Word32And(x, mask), imm) to Shl where the mask is |
(...skipping 229 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
767 CanCover(node, m.right().node()) && CanCover(node, m.left().node())) { | 811 CanCover(node, m.right().node()) && CanCover(node, m.left().node())) { |
768 Int32BinopMatcher mleft(m.left().node()); | 812 Int32BinopMatcher mleft(m.left().node()); |
769 if (mleft.right().HasValue()) { | 813 if (mleft.right().HasValue()) { |
770 int32_t shift_value = static_cast<int32_t>(mleft.right().Value()); | 814 int32_t shift_value = static_cast<int32_t>(mleft.right().Value()); |
771 Emit(kMips64Lsa, g.DefineAsRegister(node), | 815 Emit(kMips64Lsa, g.DefineAsRegister(node), |
772 g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()), | 816 g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()), |
773 g.TempImmediate(shift_value)); | 817 g.TempImmediate(shift_value)); |
774 return; | 818 return; |
775 } | 819 } |
776 } | 820 } |
777 VisitBinop(this, node, kMips64Add); | 821 VisitBinop(this, node, kMips64Add, true, kMips64Add); |
778 } | 822 } |
779 | 823 |
780 | 824 |
781 void InstructionSelector::VisitInt64Add(Node* node) { | 825 void InstructionSelector::VisitInt64Add(Node* node) { |
782 Mips64OperandGenerator g(this); | 826 Mips64OperandGenerator g(this); |
783 Int64BinopMatcher m(node); | 827 Int64BinopMatcher m(node); |
784 | 828 |
785 // Select Dlsa for (left + (left_of_right << imm)). | 829 // Select Dlsa for (left + (left_of_right << imm)). |
786 if (m.right().opcode() == IrOpcode::kWord64Shl && | 830 if (m.right().opcode() == IrOpcode::kWord64Shl && |
787 CanCover(node, m.left().node()) && CanCover(node, m.right().node())) { | 831 CanCover(node, m.left().node()) && CanCover(node, m.right().node())) { |
(...skipping 13 matching lines...) Expand all Loading... |
801 Int64BinopMatcher mleft(m.left().node()); | 845 Int64BinopMatcher mleft(m.left().node()); |
802 if (mleft.right().HasValue()) { | 846 if (mleft.right().HasValue()) { |
803 int32_t shift_value = static_cast<int32_t>(mleft.right().Value()); | 847 int32_t shift_value = static_cast<int32_t>(mleft.right().Value()); |
804 Emit(kMips64Dlsa, g.DefineAsRegister(node), | 848 Emit(kMips64Dlsa, g.DefineAsRegister(node), |
805 g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()), | 849 g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()), |
806 g.TempImmediate(shift_value)); | 850 g.TempImmediate(shift_value)); |
807 return; | 851 return; |
808 } | 852 } |
809 } | 853 } |
810 | 854 |
811 VisitBinop(this, node, kMips64Dadd); | 855 VisitBinop(this, node, kMips64Dadd, true, kMips64Dadd); |
812 } | 856 } |
813 | 857 |
814 | 858 |
815 void InstructionSelector::VisitInt32Sub(Node* node) { | 859 void InstructionSelector::VisitInt32Sub(Node* node) { |
816 VisitBinop(this, node, kMips64Sub); | 860 VisitBinop(this, node, kMips64Sub); |
817 } | 861 } |
818 | 862 |
819 | 863 |
820 void InstructionSelector::VisitInt64Sub(Node* node) { | 864 void InstructionSelector::VisitInt64Sub(Node* node) { |
821 VisitBinop(this, node, kMips64Dsub); | 865 VisitBinop(this, node, kMips64Dsub); |
(...skipping 1670 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2492 } else { | 2536 } else { |
2493 DCHECK(kArchVariant == kMips64r2); | 2537 DCHECK(kArchVariant == kMips64r2); |
2494 return MachineOperatorBuilder::AlignmentRequirements:: | 2538 return MachineOperatorBuilder::AlignmentRequirements:: |
2495 NoUnalignedAccessSupport(); | 2539 NoUnalignedAccessSupport(); |
2496 } | 2540 } |
2497 } | 2541 } |
2498 | 2542 |
2499 } // namespace compiler | 2543 } // namespace compiler |
2500 } // namespace internal | 2544 } // namespace internal |
2501 } // namespace v8 | 2545 } // namespace v8 |
OLD | NEW |