| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/base/bits.h" | 5 #include "src/base/bits.h" |
| 6 #include "src/compiler/instruction-selector-impl.h" | 6 #include "src/compiler/instruction-selector-impl.h" |
| 7 #include "src/compiler/node-matchers.h" | 7 #include "src/compiler/node-matchers.h" |
| 8 #include "src/compiler/node-properties.h" |
| 8 | 9 |
| 9 namespace v8 { | 10 namespace v8 { |
| 10 namespace internal { | 11 namespace internal { |
| 11 namespace compiler { | 12 namespace compiler { |
| 12 | 13 |
| 13 #define TRACE_UNIMPL() \ | 14 #define TRACE_UNIMPL() \ |
| 14 PrintF("UNIMPLEMENTED instr_sel: %s at line %d\n", __FUNCTION__, __LINE__) | 15 PrintF("UNIMPLEMENTED instr_sel: %s at line %d\n", __FUNCTION__, __LINE__) |
| 15 | 16 |
| 16 #define TRACE() PrintF("instr_sel: %s at line %d\n", __FUNCTION__, __LINE__) | 17 #define TRACE() PrintF("instr_sel: %s at line %d\n", __FUNCTION__, __LINE__) |
| 17 | 18 |
| (...skipping 443 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 461 CallBuffer buffer(zone(), descriptor, frame_state_descriptor); | 462 CallBuffer buffer(zone(), descriptor, frame_state_descriptor); |
| 462 | 463 |
| 463 // Compute InstructionOperands for inputs and outputs. | 464 // Compute InstructionOperands for inputs and outputs. |
| 464 InitializeCallBuffer(node, &buffer, true, false); | 465 InitializeCallBuffer(node, &buffer, true, false); |
| 465 // Possibly align stack here for functions. | 466 // Possibly align stack here for functions. |
| 466 int push_count = buffer.pushed_nodes.size(); | 467 int push_count = buffer.pushed_nodes.size(); |
| 467 if (push_count > 0) { | 468 if (push_count > 0) { |
| 468 Emit(kMipsStackClaim | MiscField::encode(push_count), NULL); | 469 Emit(kMipsStackClaim | MiscField::encode(push_count), NULL); |
| 469 } | 470 } |
| 470 int slot = buffer.pushed_nodes.size() - 1; | 471 int slot = buffer.pushed_nodes.size() - 1; |
| 471 for (NodeVectorRIter input = buffer.pushed_nodes.rbegin(); | 472 for (auto i = buffer.pushed_nodes.rbegin(); i != buffer.pushed_nodes.rend(); |
| 472 input != buffer.pushed_nodes.rend(); input++) { | 473 ++i) { |
| 473 Emit(kMipsStoreToStackSlot | MiscField::encode(slot), NULL, | 474 Emit(kMipsStoreToStackSlot | MiscField::encode(slot), nullptr, |
| 474 g.UseRegister(*input)); | 475 g.UseRegister(*i)); |
| 475 slot--; | 476 slot--; |
| 476 } | 477 } |
| 477 | 478 |
| 478 // Select the appropriate opcode based on the call type. | 479 // Select the appropriate opcode based on the call type. |
| 479 InstructionCode opcode; | 480 InstructionCode opcode; |
| 480 switch (descriptor->kind()) { | 481 switch (descriptor->kind()) { |
| 481 case CallDescriptor::kCallCodeObject: { | 482 case CallDescriptor::kCallCodeObject: { |
| 482 opcode = kArchCallCodeObject; | 483 opcode = kArchCallCodeObject; |
| 483 break; | 484 break; |
| 484 } | 485 } |
| (...skipping 199 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 684 return VisitFloat64Compare(selector, value, cont); | 685 return VisitFloat64Compare(selector, value, cont); |
| 685 case IrOpcode::kFloat64LessThan: | 686 case IrOpcode::kFloat64LessThan: |
| 686 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan); | 687 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan); |
| 687 return VisitFloat64Compare(selector, value, cont); | 688 return VisitFloat64Compare(selector, value, cont); |
| 688 case IrOpcode::kFloat64LessThanOrEqual: | 689 case IrOpcode::kFloat64LessThanOrEqual: |
| 689 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual); | 690 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual); |
| 690 return VisitFloat64Compare(selector, value, cont); | 691 return VisitFloat64Compare(selector, value, cont); |
| 691 case IrOpcode::kProjection: | 692 case IrOpcode::kProjection: |
| 692 // Check if this is the overflow output projection of an | 693 // Check if this is the overflow output projection of an |
| 693 // <Operation>WithOverflow node. | 694 // <Operation>WithOverflow node. |
| 694 if (OpParameter<size_t>(value) == 1u) { | 695 if (ProjectionIndexOf(value->op()) == 1u) { |
| 695 // We cannot combine the <Operation>WithOverflow with this branch | 696 // We cannot combine the <Operation>WithOverflow with this branch |
| 696 // unless the 0th projection (the use of the actual value of the | 697 // unless the 0th projection (the use of the actual value of the |
| 697 // <Operation> is either NULL, which means there's no use of the | 698 // <Operation> is either NULL, which means there's no use of the |
| 698 // actual value, or was already defined, which means it is scheduled | 699 // actual value, or was already defined, which means it is scheduled |
| 699 // *AFTER* this branch). | 700 // *AFTER* this branch). |
| 700 Node* const node = value->InputAt(0); | 701 Node* const node = value->InputAt(0); |
| 701 Node* const result = node->FindProjection(0); | 702 Node* const result = NodeProperties::FindProjection(node, 0); |
| 702 if (!result || selector->IsDefined(result)) { | 703 if (!result || selector->IsDefined(result)) { |
| 703 switch (node->opcode()) { | 704 switch (node->opcode()) { |
| 704 case IrOpcode::kInt32AddWithOverflow: | 705 case IrOpcode::kInt32AddWithOverflow: |
| 705 cont->OverwriteAndNegateIfEqual(kOverflow); | 706 cont->OverwriteAndNegateIfEqual(kOverflow); |
| 706 return VisitBinop(selector, node, kMipsAddOvf, cont); | 707 return VisitBinop(selector, node, kMipsAddOvf, cont); |
| 707 case IrOpcode::kInt32SubWithOverflow: | 708 case IrOpcode::kInt32SubWithOverflow: |
| 708 cont->OverwriteAndNegateIfEqual(kOverflow); | 709 cont->OverwriteAndNegateIfEqual(kOverflow); |
| 709 return VisitBinop(selector, node, kMipsSubOvf, cont); | 710 return VisitBinop(selector, node, kMipsSubOvf, cont); |
| 710 default: | 711 default: |
| 711 break; | 712 break; |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 771 } | 772 } |
| 772 | 773 |
| 773 | 774 |
| 774 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) { | 775 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) { |
| 775 FlagsContinuation cont(kUnsignedLessThanOrEqual, node); | 776 FlagsContinuation cont(kUnsignedLessThanOrEqual, node); |
| 776 VisitWordCompare(this, node, &cont); | 777 VisitWordCompare(this, node, &cont); |
| 777 } | 778 } |
| 778 | 779 |
| 779 | 780 |
| 780 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) { | 781 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) { |
| 781 if (Node* ovf = node->FindProjection(1)) { | 782 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { |
| 782 FlagsContinuation cont(kOverflow, ovf); | 783 FlagsContinuation cont(kOverflow, ovf); |
| 783 return VisitBinop(this, node, kMipsAddOvf, &cont); | 784 return VisitBinop(this, node, kMipsAddOvf, &cont); |
| 784 } | 785 } |
| 785 FlagsContinuation cont; | 786 FlagsContinuation cont; |
| 786 VisitBinop(this, node, kMipsAddOvf, &cont); | 787 VisitBinop(this, node, kMipsAddOvf, &cont); |
| 787 } | 788 } |
| 788 | 789 |
| 789 | 790 |
| 790 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) { | 791 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) { |
| 791 if (Node* ovf = node->FindProjection(1)) { | 792 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { |
| 792 FlagsContinuation cont(kOverflow, ovf); | 793 FlagsContinuation cont(kOverflow, ovf); |
| 793 return VisitBinop(this, node, kMipsSubOvf, &cont); | 794 return VisitBinop(this, node, kMipsSubOvf, &cont); |
| 794 } | 795 } |
| 795 FlagsContinuation cont; | 796 FlagsContinuation cont; |
| 796 VisitBinop(this, node, kMipsSubOvf, &cont); | 797 VisitBinop(this, node, kMipsSubOvf, &cont); |
| 797 } | 798 } |
| 798 | 799 |
| 799 | 800 |
| 800 void InstructionSelector::VisitFloat64Equal(Node* node) { | 801 void InstructionSelector::VisitFloat64Equal(Node* node) { |
| 801 FlagsContinuation cont(kEqual, node); | 802 FlagsContinuation cont(kEqual, node); |
| (...skipping 20 matching lines...) Expand all Loading... |
| 822 return MachineOperatorBuilder::kFloat64Floor | | 823 return MachineOperatorBuilder::kFloat64Floor | |
| 823 MachineOperatorBuilder::kFloat64Ceil | | 824 MachineOperatorBuilder::kFloat64Ceil | |
| 824 MachineOperatorBuilder::kFloat64RoundTruncate; | 825 MachineOperatorBuilder::kFloat64RoundTruncate; |
| 825 } | 826 } |
| 826 return MachineOperatorBuilder::kNoFlags; | 827 return MachineOperatorBuilder::kNoFlags; |
| 827 } | 828 } |
| 828 | 829 |
| 829 } // namespace compiler | 830 } // namespace compiler |
| 830 } // namespace internal | 831 } // namespace internal |
| 831 } // namespace v8 | 832 } // namespace v8 |
| OLD | NEW |