OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/generic-node-inl.h" | 5 #include "src/compiler/generic-node-inl.h" |
6 #include "src/compiler/instruction-selector-impl.h" | 6 #include "src/compiler/instruction-selector-impl.h" |
7 #include "src/compiler/node-matchers.h" | 7 #include "src/compiler/node-matchers.h" |
8 | 8 |
9 namespace v8 { | 9 namespace v8 { |
10 namespace internal { | 10 namespace internal { |
(...skipping 416 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
427 void InstructionSelector::VisitWord32Ror(Node* node) { | 427 void InstructionSelector::VisitWord32Ror(Node* node) { |
428 VisitWord32Shift(this, node, kX64Ror32); | 428 VisitWord32Shift(this, node, kX64Ror32); |
429 } | 429 } |
430 | 430 |
431 | 431 |
432 void InstructionSelector::VisitWord64Ror(Node* node) { | 432 void InstructionSelector::VisitWord64Ror(Node* node) { |
433 VisitWord64Shift(this, node, kX64Ror); | 433 VisitWord64Shift(this, node, kX64Ror); |
434 } | 434 } |
435 | 435 |
436 | 436 |
| 437 static bool TryEmitLeaMultAdd(InstructionSelector* selector, Node* node, |
| 438 ArchOpcode opcode) { |
| 439 int32_t displacement_value; |
| 440 Node* left; |
| 441 { |
| 442 Int32BinopMatcher m32(node); |
| 443 left = m32.left().node(); |
| 444 if (m32.right().HasValue()) { |
| 445 displacement_value = m32.right().Value(); |
| 446 } else { |
| 447 Int64BinopMatcher m64(node); |
| 448 if (!m64.right().HasValue()) { |
| 449 return false; |
| 450 } |
| 451 int64_t value_64 = m64.right().Value(); |
| 452 displacement_value = static_cast<int32_t>(value_64); |
| 453 if (displacement_value != value_64) return false; |
| 454 } |
| 455 } |
| 456 LeaMultiplyMatcher lmm(left); |
| 457 if (!lmm.Matches()) return false; |
| 458 AddressingMode mode; |
| 459 size_t input_count; |
| 460 X64OperandGenerator g(selector); |
| 461 InstructionOperand* index = g.UseRegister(lmm.Left()); |
| 462 InstructionOperand* displacement = g.TempImmediate(displacement_value); |
| 463 InstructionOperand* inputs[] = {index, displacement, displacement}; |
| 464 if (lmm.Displacement() != 0) { |
| 465 input_count = 3; |
| 466 inputs[1] = index; |
| 467 mode = kMode_MR1I; |
| 468 } else { |
| 469 input_count = 2; |
| 470 mode = kMode_M1I; |
| 471 } |
| 472 mode = AdjustAddressingMode(mode, lmm.Power()); |
| 473 InstructionOperand* outputs[] = {g.DefineAsRegister(node)}; |
| 474 selector->Emit(opcode | AddressingModeField::encode(mode), 1, outputs, |
| 475 input_count, inputs); |
| 476 return true; |
| 477 } |
| 478 |
| 479 |
437 void InstructionSelector::VisitInt32Add(Node* node) { | 480 void InstructionSelector::VisitInt32Add(Node* node) { |
| 481 if (TryEmitLeaMultAdd(this, node, kX64Lea32)) return; |
438 VisitBinop(this, node, kX64Add32); | 482 VisitBinop(this, node, kX64Add32); |
439 } | 483 } |
440 | 484 |
441 | 485 |
442 void InstructionSelector::VisitInt64Add(Node* node) { | 486 void InstructionSelector::VisitInt64Add(Node* node) { |
| 487 if (TryEmitLeaMultAdd(this, node, kX64Lea)) return; |
443 VisitBinop(this, node, kX64Add); | 488 VisitBinop(this, node, kX64Add); |
444 } | 489 } |
445 | 490 |
446 | 491 |
447 void InstructionSelector::VisitInt32Sub(Node* node) { | 492 void InstructionSelector::VisitInt32Sub(Node* node) { |
448 X64OperandGenerator g(this); | 493 X64OperandGenerator g(this); |
449 Int32BinopMatcher m(node); | 494 Int32BinopMatcher m(node); |
450 if (m.left().Is(0)) { | 495 if (m.left().Is(0)) { |
451 Emit(kX64Neg32, g.DefineSameAsFirst(node), g.Use(m.right().node())); | 496 Emit(kX64Neg32, g.DefineSameAsFirst(node), g.Use(m.right().node())); |
452 } else { | 497 } else { |
453 VisitBinop(this, node, kX64Sub32); | 498 VisitBinop(this, node, kX64Sub32); |
454 } | 499 } |
455 } | 500 } |
456 | 501 |
457 | 502 |
458 void InstructionSelector::VisitInt64Sub(Node* node) { | 503 void InstructionSelector::VisitInt64Sub(Node* node) { |
459 X64OperandGenerator g(this); | 504 X64OperandGenerator g(this); |
460 Int64BinopMatcher m(node); | 505 Int64BinopMatcher m(node); |
461 if (m.left().Is(0)) { | 506 if (m.left().Is(0)) { |
462 Emit(kX64Neg, g.DefineSameAsFirst(node), g.Use(m.right().node())); | 507 Emit(kX64Neg, g.DefineSameAsFirst(node), g.Use(m.right().node())); |
463 } else { | 508 } else { |
464 VisitBinop(this, node, kX64Sub); | 509 VisitBinop(this, node, kX64Sub); |
465 } | 510 } |
466 } | 511 } |
467 | 512 |
468 | 513 |
| 514 static bool TryEmitLeaMult(InstructionSelector* selector, Node* node, |
| 515 ArchOpcode opcode) { |
| 516 LeaMultiplyMatcher lea(node); |
| 517 // Try to match lea. |
| 518 if (!lea.Matches()) return false; |
| 519 AddressingMode mode; |
| 520 size_t input_count; |
| 521 X64OperandGenerator g(selector); |
| 522 InstructionOperand* left = g.UseRegister(lea.Left()); |
| 523 InstructionOperand* inputs[] = {left, left}; |
| 524 if (lea.Displacement() != 0) { |
| 525 input_count = 2; |
| 526 mode = kMode_MR1; |
| 527 } else { |
| 528 input_count = 1; |
| 529 mode = kMode_M1; |
| 530 } |
| 531 mode = AdjustAddressingMode(mode, lea.Power()); |
| 532 InstructionOperand* outputs[] = {g.DefineAsRegister(node)}; |
| 533 selector->Emit(opcode | AddressingModeField::encode(mode), 1, outputs, |
| 534 input_count, inputs); |
| 535 return true; |
| 536 } |
| 537 |
| 538 |
469 static void VisitMul(InstructionSelector* selector, Node* node, | 539 static void VisitMul(InstructionSelector* selector, Node* node, |
470 ArchOpcode opcode) { | 540 ArchOpcode opcode) { |
471 X64OperandGenerator g(selector); | 541 X64OperandGenerator g(selector); |
472 LeaMultiplyMatcher lea(node); | 542 Int32BinopMatcher m(node); |
473 // Try to match lea. | 543 Node* left = m.left().node(); |
474 if (lea.Matches()) { | 544 Node* right = m.right().node(); |
475 switch (opcode) { | 545 if (g.CanBeImmediate(right)) { |
476 case kX64Imul32: | 546 selector->Emit(opcode, g.DefineAsRegister(node), g.Use(left), |
477 opcode = kX64Lea32; | 547 g.UseImmediate(right)); |
478 break; | 548 } else { |
479 case kX64Imul: | 549 if (g.CanBeBetterLeftOperand(right)) { |
480 opcode = kX64Lea; | 550 std::swap(left, right); |
481 break; | |
482 default: | |
483 UNREACHABLE(); | |
484 } | 551 } |
485 AddressingMode mode; | 552 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left), |
486 size_t input_count; | 553 g.Use(right)); |
487 InstructionOperand* left = g.UseRegister(lea.Left()); | |
488 InstructionOperand* inputs[] = {left, left}; | |
489 if (lea.Displacement() != 0) { | |
490 input_count = 2; | |
491 mode = kMode_MR1; | |
492 } else { | |
493 input_count = 1; | |
494 mode = kMode_M1; | |
495 } | |
496 mode = AdjustAddressingMode(mode, lea.Power()); | |
497 InstructionOperand* outputs[] = {g.DefineAsRegister(node)}; | |
498 selector->Emit(opcode | AddressingModeField::encode(mode), 1, outputs, | |
499 input_count, inputs); | |
500 } else { | |
501 Int32BinopMatcher m(node); | |
502 Node* left = m.left().node(); | |
503 Node* right = m.right().node(); | |
504 if (g.CanBeImmediate(right)) { | |
505 selector->Emit(opcode, g.DefineAsRegister(node), g.Use(left), | |
506 g.UseImmediate(right)); | |
507 } else { | |
508 if (g.CanBeBetterLeftOperand(right)) { | |
509 std::swap(left, right); | |
510 } | |
511 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left), | |
512 g.Use(right)); | |
513 } | |
514 } | 554 } |
515 } | 555 } |
516 | 556 |
517 | 557 |
518 void InstructionSelector::VisitInt32Mul(Node* node) { | 558 void InstructionSelector::VisitInt32Mul(Node* node) { |
| 559 if (TryEmitLeaMult(this, node, kX64Lea32)) return; |
519 VisitMul(this, node, kX64Imul32); | 560 VisitMul(this, node, kX64Imul32); |
520 } | 561 } |
521 | 562 |
522 | 563 |
523 void InstructionSelector::VisitInt64Mul(Node* node) { | 564 void InstructionSelector::VisitInt64Mul(Node* node) { |
| 565 if (TryEmitLeaMult(this, node, kX64Lea)) return; |
524 VisitMul(this, node, kX64Imul); | 566 VisitMul(this, node, kX64Imul); |
525 } | 567 } |
526 | 568 |
527 | 569 |
528 static void VisitDiv(InstructionSelector* selector, Node* node, | 570 static void VisitDiv(InstructionSelector* selector, Node* node, |
529 ArchOpcode opcode) { | 571 ArchOpcode opcode) { |
530 X64OperandGenerator g(selector); | 572 X64OperandGenerator g(selector); |
531 InstructionOperand* temps[] = {g.TempRegister(rdx)}; | 573 InstructionOperand* temps[] = {g.TempRegister(rdx)}; |
532 selector->Emit( | 574 selector->Emit( |
533 opcode, g.DefineAsFixed(node, rax), g.UseFixed(node->InputAt(0), rax), | 575 opcode, g.DefineAsFixed(node, rax), g.UseFixed(node->InputAt(0), rax), |
(...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
831 call_instr->MarkAsCall(); | 873 call_instr->MarkAsCall(); |
832 if (deoptimization != NULL) { | 874 if (deoptimization != NULL) { |
833 DCHECK(continuation != NULL); | 875 DCHECK(continuation != NULL); |
834 call_instr->MarkAsControl(); | 876 call_instr->MarkAsControl(); |
835 } | 877 } |
836 } | 878 } |
837 | 879 |
838 } // namespace compiler | 880 } // namespace compiler |
839 } // namespace internal | 881 } // namespace internal |
840 } // namespace v8 | 882 } // namespace v8 |
OLD | NEW |