| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/instruction-selector.h" | 5 #include "src/compiler/instruction-selector.h" |
| 6 | 6 |
| 7 #include "src/compiler/instruction-selector-impl.h" | 7 #include "src/compiler/instruction-selector-impl.h" |
| 8 #include "src/compiler/node-matchers.h" | 8 #include "src/compiler/node-matchers.h" |
| 9 #include "src/compiler/node-properties-inl.h" | 9 #include "src/compiler/node-properties-inl.h" |
| 10 #include "src/compiler/pipeline.h" | 10 #include "src/compiler/pipeline.h" |
| 11 | 11 |
| 12 namespace v8 { | 12 namespace v8 { |
| 13 namespace internal { | 13 namespace internal { |
| 14 namespace compiler { | 14 namespace compiler { |
| 15 | 15 |
| 16 InstructionSelector::InstructionSelector(InstructionSequence* sequence, | 16 InstructionSelector::InstructionSelector(InstructionSequence* sequence, |
| 17 SourcePositionTable* source_positions) | 17 SourcePositionTable* source_positions) |
| 18 : zone_(sequence->isolate()), | 18 : zone_(sequence->isolate()), |
| 19 sequence_(sequence), | 19 sequence_(sequence), |
| 20 source_positions_(source_positions), | 20 source_positions_(source_positions), |
| 21 current_block_(NULL), | 21 current_block_(NULL), |
| 22 instructions_(InstructionDeque::allocator_type(zone())), | 22 instructions_(InstructionDeque::allocator_type(zone())), |
| 23 defined_(graph()->NodeCount(), false, BoolVector::allocator_type(zone())), |
| 23 used_(graph()->NodeCount(), false, BoolVector::allocator_type(zone())) {} | 24 used_(graph()->NodeCount(), false, BoolVector::allocator_type(zone())) {} |
| 24 | 25 |
| 25 | 26 |
| 26 void InstructionSelector::SelectInstructions() { | 27 void InstructionSelector::SelectInstructions() { |
| 27 // Mark the inputs of all phis in loop headers as used. | 28 // Mark the inputs of all phis in loop headers as used. |
| 28 BasicBlockVector* blocks = schedule()->rpo_order(); | 29 BasicBlockVector* blocks = schedule()->rpo_order(); |
| 29 for (BasicBlockVectorIter i = blocks->begin(); i != blocks->end(); ++i) { | 30 for (BasicBlockVectorIter i = blocks->begin(); i != blocks->end(); ++i) { |
| 30 BasicBlock* block = *i; | 31 BasicBlock* block = *i; |
| 31 if (!block->IsLoopHeader()) continue; | 32 if (!block->IsLoopHeader()) continue; |
| 32 ASSERT_NE(0, block->PredecessorCount()); | 33 ASSERT_NE(0, block->PredecessorCount()); |
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 142 block->deferred_ == current_block_->deferred_; | 143 block->deferred_ == current_block_->deferred_; |
| 143 } | 144 } |
| 144 | 145 |
| 145 | 146 |
| 146 bool InstructionSelector::CanCover(Node* user, Node* node) const { | 147 bool InstructionSelector::CanCover(Node* user, Node* node) const { |
| 147 return node->OwnedBy(user) && | 148 return node->OwnedBy(user) && |
| 148 schedule()->block(node) == schedule()->block(user); | 149 schedule()->block(node) == schedule()->block(user); |
| 149 } | 150 } |
| 150 | 151 |
| 151 | 152 |
| 153 bool InstructionSelector::IsDefined(Node* node) const { |
| 154 ASSERT_NOT_NULL(node); |
| 155 NodeId id = node->id(); |
| 156 ASSERT(id >= 0); |
| 157 ASSERT(id < static_cast<NodeId>(defined_.size())); |
| 158 return defined_[id]; |
| 159 } |
| 160 |
| 161 |
| 162 void InstructionSelector::MarkAsDefined(Node* node) { |
| 163 ASSERT_NOT_NULL(node); |
| 164 NodeId id = node->id(); |
| 165 ASSERT(id >= 0); |
| 166 ASSERT(id < static_cast<NodeId>(defined_.size())); |
| 167 defined_[id] = true; |
| 168 } |
| 169 |
| 170 |
| 152 bool InstructionSelector::IsUsed(Node* node) const { | 171 bool InstructionSelector::IsUsed(Node* node) const { |
| 153 if (!node->op()->HasProperty(Operator::kEliminatable)) return true; | 172 if (!node->op()->HasProperty(Operator::kEliminatable)) return true; |
| 154 NodeId id = node->id(); | 173 NodeId id = node->id(); |
| 155 ASSERT(id >= 0); | 174 ASSERT(id >= 0); |
| 156 ASSERT(id < static_cast<NodeId>(used_.size())); | 175 ASSERT(id < static_cast<NodeId>(used_.size())); |
| 157 return used_[id]; | 176 return used_[id]; |
| 158 } | 177 } |
| 159 | 178 |
| 160 | 179 |
| 161 void InstructionSelector::MarkAsUsed(Node* node) { | 180 void InstructionSelector::MarkAsUsed(Node* node) { |
| (...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 340 // Generate code for the block control "top down", but schedule the code | 359 // Generate code for the block control "top down", but schedule the code |
| 341 // "bottom up". | 360 // "bottom up". |
| 342 VisitControl(block); | 361 VisitControl(block); |
| 343 std::reverse(instructions_.begin() + current_block_end, instructions_.end()); | 362 std::reverse(instructions_.begin() + current_block_end, instructions_.end()); |
| 344 | 363 |
| 345 // Visit code in reverse control flow order, because architecture-specific | 364 // Visit code in reverse control flow order, because architecture-specific |
| 346 // matching may cover more than one node at a time. | 365 // matching may cover more than one node at a time. |
| 347 for (BasicBlock::reverse_iterator i = block->rbegin(); i != block->rend(); | 366 for (BasicBlock::reverse_iterator i = block->rbegin(); i != block->rend(); |
| 348 ++i) { | 367 ++i) { |
| 349 Node* node = *i; | 368 Node* node = *i; |
| 350 if (!IsUsed(node)) continue; | 369 // Skip nodes that are unused or already defined. |
| 370 if (!IsUsed(node) || IsDefined(node)) continue; |
| 351 // Generate code for this node "top down", but schedule the code "bottom | 371 // Generate code for this node "top down", but schedule the code "bottom |
| 352 // up". | 372 // up". |
| 353 size_t current_node_end = instructions_.size(); | 373 size_t current_node_end = instructions_.size(); |
| 354 VisitNode(node); | 374 VisitNode(node); |
| 355 std::reverse(instructions_.begin() + current_node_end, instructions_.end()); | 375 std::reverse(instructions_.begin() + current_node_end, instructions_.end()); |
| 356 } | 376 } |
| 357 | 377 |
| 358 // We're done with the block. | 378 // We're done with the block. |
| 359 // TODO(bmeurer): We should not mutate the schedule. | 379 // TODO(bmeurer): We should not mutate the schedule. |
| 360 block->code_end_ = current_block_end; | 380 block->code_end_ = current_block_end; |
| (...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 623 void InstructionSelector::VisitWord64Equal(Node* node) { | 643 void InstructionSelector::VisitWord64Equal(Node* node) { |
| 624 FlagsContinuation cont(kEqual, node); | 644 FlagsContinuation cont(kEqual, node); |
| 625 Int64BinopMatcher m(node); | 645 Int64BinopMatcher m(node); |
| 626 if (m.right().Is(0)) { | 646 if (m.right().Is(0)) { |
| 627 return VisitWord64Test(m.left().node(), &cont); | 647 return VisitWord64Test(m.left().node(), &cont); |
| 628 } | 648 } |
| 629 VisitWord64Compare(node, &cont); | 649 VisitWord64Compare(node, &cont); |
| 630 } | 650 } |
| 631 | 651 |
| 632 | 652 |
| 653 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) { |
| 654 if (Node* ovf = node->FindProjection(1)) { |
| 655 FlagsContinuation cont(kOverflow, ovf); |
| 656 return VisitInt32AddWithOverflow(node, &cont); |
| 657 } |
| 658 FlagsContinuation cont; |
| 659 VisitInt32AddWithOverflow(node, &cont); |
| 660 } |
| 661 |
| 662 |
| 663 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) { |
| 664 if (Node* ovf = node->FindProjection(1)) { |
| 665 FlagsContinuation cont(kOverflow, ovf); |
| 666 return VisitInt32SubWithOverflow(node, &cont); |
| 667 } |
| 668 FlagsContinuation cont; |
| 669 VisitInt32SubWithOverflow(node, &cont); |
| 670 } |
| 671 |
| 672 |
| 633 void InstructionSelector::VisitInt64LessThan(Node* node) { | 673 void InstructionSelector::VisitInt64LessThan(Node* node) { |
| 634 FlagsContinuation cont(kSignedLessThan, node); | 674 FlagsContinuation cont(kSignedLessThan, node); |
| 635 VisitWord64Compare(node, &cont); | 675 VisitWord64Compare(node, &cont); |
| 636 } | 676 } |
| 637 | 677 |
| 638 | 678 |
| 639 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) { | 679 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) { |
| 640 FlagsContinuation cont(kSignedLessThanOrEqual, node); | 680 FlagsContinuation cont(kSignedLessThanOrEqual, node); |
| 641 VisitWord64Compare(node, &cont); | 681 VisitWord64Compare(node, &cont); |
| 642 } | 682 } |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 741 | 781 |
| 742 void InstructionSelector::VisitPhi(Node* node) { | 782 void InstructionSelector::VisitPhi(Node* node) { |
| 743 // TODO(bmeurer): Emit a PhiInstruction here. | 783 // TODO(bmeurer): Emit a PhiInstruction here. |
| 744 for (InputIter i = node->inputs().begin(); i != node->inputs().end(); ++i) { | 784 for (InputIter i = node->inputs().begin(); i != node->inputs().end(); ++i) { |
| 745 MarkAsUsed(*i); | 785 MarkAsUsed(*i); |
| 746 } | 786 } |
| 747 } | 787 } |
| 748 | 788 |
| 749 | 789 |
| 750 void InstructionSelector::VisitProjection(Node* node) { | 790 void InstructionSelector::VisitProjection(Node* node) { |
| 751 for (InputIter i = node->inputs().begin(); i != node->inputs().end(); ++i) { | 791 OperandGenerator g(this); |
| 752 MarkAsUsed(*i); | 792 Node* value = node->InputAt(0); |
| 793 switch (value->opcode()) { |
| 794 case IrOpcode::kInt32AddWithOverflow: |
| 795 case IrOpcode::kInt32SubWithOverflow: |
| 796 if (OpParameter<int32_t>(node) == 0) { |
| 797 Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value)); |
| 798 } else { |
| 799 ASSERT_EQ(1, OpParameter<int32_t>(node)); |
| 800 MarkAsUsed(value); |
| 801 } |
| 802 break; |
| 803 default: |
| 804 break; |
| 753 } | 805 } |
| 754 } | 806 } |
| 755 | 807 |
| 756 | 808 |
| 757 void InstructionSelector::VisitConstant(Node* node) { | 809 void InstructionSelector::VisitConstant(Node* node) { |
| 758 // We must emit a NOP here because every live range needs a defining | 810 // We must emit a NOP here because every live range needs a defining |
| 759 // instruction in the register allocator. | 811 // instruction in the register allocator. |
| 760 OperandGenerator g(this); | 812 OperandGenerator g(this); |
| 761 Emit(kArchNop, g.DefineAsConstant(node)); | 813 Emit(kArchNop, g.DefineAsConstant(node)); |
| 762 } | 814 } |
| (...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 842 return VisitWord64Compare(value, &cont); | 894 return VisitWord64Compare(value, &cont); |
| 843 case IrOpcode::kFloat64Equal: | 895 case IrOpcode::kFloat64Equal: |
| 844 cont.OverwriteAndNegateIfEqual(kUnorderedEqual); | 896 cont.OverwriteAndNegateIfEqual(kUnorderedEqual); |
| 845 return VisitFloat64Compare(value, &cont); | 897 return VisitFloat64Compare(value, &cont); |
| 846 case IrOpcode::kFloat64LessThan: | 898 case IrOpcode::kFloat64LessThan: |
| 847 cont.OverwriteAndNegateIfEqual(kUnorderedLessThan); | 899 cont.OverwriteAndNegateIfEqual(kUnorderedLessThan); |
| 848 return VisitFloat64Compare(value, &cont); | 900 return VisitFloat64Compare(value, &cont); |
| 849 case IrOpcode::kFloat64LessThanOrEqual: | 901 case IrOpcode::kFloat64LessThanOrEqual: |
| 850 cont.OverwriteAndNegateIfEqual(kUnorderedLessThanOrEqual); | 902 cont.OverwriteAndNegateIfEqual(kUnorderedLessThanOrEqual); |
| 851 return VisitFloat64Compare(value, &cont); | 903 return VisitFloat64Compare(value, &cont); |
| 904 case IrOpcode::kProjection: |
| 905 // Check if this is the overflow output projection of an |
| 906 // <Operation>WithOverflow node. |
| 907 if (OpParameter<int32_t>(value) == 1) { |
| 908 // We cannot combine the <Operation>WithOverflow with this branch |
| 909 // unless the 0th projection (the use of the actual value of the |
| 910 // <Operation> is either NULL, which means there's no use of the |
| 911 // actual value, or was already defined, which means it is scheduled |
| 912 // *AFTER* this branch). |
| 913 Node* node = value->InputAt(0); |
| 914 Node* result = node->FindProjection(0); |
| 915 if (result == NULL || IsDefined(result)) { |
| 916 switch (node->opcode()) { |
| 917 case IrOpcode::kInt32AddWithOverflow: |
| 918 cont.OverwriteAndNegateIfEqual(kOverflow); |
| 919 return VisitInt32AddWithOverflow(node, &cont); |
| 920 case IrOpcode::kInt32SubWithOverflow: |
| 921 cont.OverwriteAndNegateIfEqual(kOverflow); |
| 922 return VisitInt32SubWithOverflow(node, &cont); |
| 923 default: |
| 924 break; |
| 925 } |
| 926 } |
| 927 } |
| 928 break; |
| 852 default: | 929 default: |
| 853 break; | 930 break; |
| 854 } | 931 } |
| 855 } | 932 } |
| 856 | 933 |
| 857 // Branch could not be combined with a compare, emit compare against 0. | 934 // Branch could not be combined with a compare, emit compare against 0. |
| 858 VisitWord32Test(value, &cont); | 935 VisitWord32Test(value, &cont); |
| 859 } | 936 } |
| 860 | 937 |
| 861 | 938 |
| (...skipping 16 matching lines...) Expand all Loading... |
| 878 ASSERT(deopt->op()->opcode() == IrOpcode::kDeoptimize); | 955 ASSERT(deopt->op()->opcode() == IrOpcode::kDeoptimize); |
| 879 Node* state = deopt->InputAt(0); | 956 Node* state = deopt->InputAt(0); |
| 880 ASSERT(state->op()->opcode() == IrOpcode::kFrameState); | 957 ASSERT(state->op()->opcode() == IrOpcode::kFrameState); |
| 881 FrameStateDescriptor descriptor = OpParameter<FrameStateDescriptor>(state); | 958 FrameStateDescriptor descriptor = OpParameter<FrameStateDescriptor>(state); |
| 882 // TODO(jarin) We should also add an instruction input for every input to | 959 // TODO(jarin) We should also add an instruction input for every input to |
| 883 // the framestate node (and recurse for the inlined framestates). | 960 // the framestate node (and recurse for the inlined framestates). |
| 884 int deoptimization_id = sequence()->AddDeoptimizationEntry(descriptor); | 961 int deoptimization_id = sequence()->AddDeoptimizationEntry(descriptor); |
| 885 Emit(kArchDeoptimize | MiscField::encode(deoptimization_id), NULL); | 962 Emit(kArchDeoptimize | MiscField::encode(deoptimization_id), NULL); |
| 886 } | 963 } |
| 887 | 964 |
| 965 |
| 888 #if !V8_TURBOFAN_TARGET | 966 #if !V8_TURBOFAN_TARGET |
| 889 | 967 |
| 890 #define DECLARE_UNIMPLEMENTED_SELECTOR(x) \ | 968 #define DECLARE_UNIMPLEMENTED_SELECTOR(x) \ |
| 891 void InstructionSelector::Visit##x(Node* node) { UNIMPLEMENTED(); } | 969 void InstructionSelector::Visit##x(Node* node) { UNIMPLEMENTED(); } |
| 892 MACHINE_OP_LIST(DECLARE_UNIMPLEMENTED_SELECTOR) | 970 MACHINE_OP_LIST(DECLARE_UNIMPLEMENTED_SELECTOR) |
| 893 #undef DECLARE_UNIMPLEMENTED_SELECTOR | 971 #undef DECLARE_UNIMPLEMENTED_SELECTOR |
| 894 | 972 |
| 895 | 973 |
| 974 void InstructionSelector::VisitInt32AddWithOverflow(Node* node, |
| 975 FlagsContinuation* cont) { |
| 976 UNIMPLEMENTED(); |
| 977 } |
| 978 |
| 979 |
| 980 void InstructionSelector::VisitInt32SubWithOverflow(Node* node, |
| 981 FlagsContinuation* cont) { |
| 982 UNIMPLEMENTED(); |
| 983 } |
| 984 |
| 985 |
| 896 void InstructionSelector::VisitWord32Test(Node* node, FlagsContinuation* cont) { | 986 void InstructionSelector::VisitWord32Test(Node* node, FlagsContinuation* cont) { |
| 897 UNIMPLEMENTED(); | 987 UNIMPLEMENTED(); |
| 898 } | 988 } |
| 899 | 989 |
| 900 | 990 |
| 901 void InstructionSelector::VisitWord32Compare(Node* node, | 991 void InstructionSelector::VisitWord32Compare(Node* node, |
| 902 FlagsContinuation* cont) { | 992 FlagsContinuation* cont) { |
| 903 UNIMPLEMENTED(); | 993 UNIMPLEMENTED(); |
| 904 } | 994 } |
| 905 | 995 |
| 906 | 996 |
| 907 void InstructionSelector::VisitFloat64Compare(Node* node, | 997 void InstructionSelector::VisitFloat64Compare(Node* node, |
| 908 FlagsContinuation* cont) { | 998 FlagsContinuation* cont) { |
| 909 UNIMPLEMENTED(); | 999 UNIMPLEMENTED(); |
| 910 } | 1000 } |
| 911 | 1001 |
| 912 | 1002 |
| 913 void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation, | 1003 void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation, |
| 914 BasicBlock* deoptimization) {} | 1004 BasicBlock* deoptimization) {} |
| 915 | 1005 |
| 916 #endif | 1006 #endif // !V8_TURBOFAN_TARGET |
| 917 | 1007 |
| 918 } // namespace compiler | 1008 } // namespace compiler |
| 919 } // namespace internal | 1009 } // namespace internal |
| 920 } // namespace v8 | 1010 } // namespace v8 |
| OLD | NEW |