OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/register-allocator.h" | 5 #include "src/compiler/register-allocator.h" |
6 | 6 |
7 #include "src/compiler/linkage.h" | 7 #include "src/compiler/linkage.h" |
8 #include "src/hydrogen.h" | 8 #include "src/hydrogen.h" |
9 #include "src/string-stream.h" | 9 #include "src/string-stream.h" |
10 | 10 |
(...skipping 726 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
737 for (int i = start; i <= end; ++i) { | 737 for (int i = start; i <= end; ++i) { |
738 if (code()->IsGapAt(i)) { | 738 if (code()->IsGapAt(i)) { |
739 Instruction* instr = NULL; | 739 Instruction* instr = NULL; |
740 Instruction* prev_instr = NULL; | 740 Instruction* prev_instr = NULL; |
741 if (i < end) instr = InstructionAt(i + 1); | 741 if (i < end) instr = InstructionAt(i + 1); |
742 if (i > start) prev_instr = InstructionAt(i - 1); | 742 if (i > start) prev_instr = InstructionAt(i - 1); |
743 MeetConstraintsBetween(prev_instr, instr, i); | 743 MeetConstraintsBetween(prev_instr, instr, i); |
744 if (!AllocationOk()) return; | 744 if (!AllocationOk()) return; |
745 } | 745 } |
746 } | 746 } |
| 747 |
| 748 // Meet register constraints for the instruction in the end. |
| 749 if (!code()->IsGapAt(end)) { |
| 750 MeetRegisterConstraintsForLastInstructionInBlock(block); |
| 751 } |
| 752 } |
| 753 |
| 754 |
| 755 void RegisterAllocator::MeetRegisterConstraintsForLastInstructionInBlock( |
| 756 BasicBlock* block) { |
| 757 int end = block->last_instruction_index(); |
| 758 Instruction* last_instruction = InstructionAt(end); |
| 759 for (size_t i = 0; i < last_instruction->OutputCount(); i++) { |
| 760 InstructionOperand* output_operand = last_instruction->OutputAt(i); |
| 761 DCHECK(!output_operand->IsConstant()); |
| 762 UnallocatedOperand* output = UnallocatedOperand::cast(output_operand); |
| 763 int output_vreg = output->virtual_register(); |
| 764 LiveRange* range = LiveRangeFor(output_vreg); |
| 765 bool assigned = false; |
| 766 if (output->HasFixedPolicy()) { |
| 767 AllocateFixed(output, -1, false); |
| 768 // This value is produced on the stack, we never need to spill it. |
| 769 if (output->IsStackSlot()) { |
| 770 range->SetSpillOperand(output); |
| 771 range->SetSpillStartIndex(end); |
| 772 assigned = true; |
| 773 } |
| 774 |
| 775 BasicBlock::Successors successors = block->successors(); |
| 776 for (BasicBlock::Successors::iterator succ = successors.begin(); |
| 777 succ != successors.end(); ++succ) { |
| 778 DCHECK((*succ)->PredecessorCount() == 1); |
| 779 int gap_index = (*succ)->first_instruction_index() + 1; |
| 780 DCHECK(code()->IsGapAt(gap_index)); |
| 781 |
| 782 // Create an unconstrained operand for the same virtual register |
| 783 // and insert a gap move from the fixed output to the operand. |
| 784 UnallocatedOperand* output_copy = |
| 785 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY); |
| 786 output_copy->set_virtual_register(output_vreg); |
| 787 |
| 788 code()->AddGapMove(gap_index, output, output_copy); |
| 789 } |
| 790 } |
| 791 |
| 792 if (!assigned) { |
| 793 BasicBlock::Successors successors = block->successors(); |
| 794 for (BasicBlock::Successors::iterator succ = successors.begin(); |
| 795 succ != successors.end(); ++succ) { |
| 796 DCHECK((*succ)->PredecessorCount() == 1); |
| 797 int gap_index = (*succ)->first_instruction_index() + 1; |
| 798 range->SetSpillStartIndex(gap_index); |
| 799 |
| 800 // This move to spill operand is not a real use. Liveness analysis |
| 801 // and splitting of live ranges do not account for it. |
| 802 // Thus it should be inserted to a lifetime position corresponding to |
| 803 // the instruction end. |
| 804 GapInstruction* gap = code()->GapAt(gap_index); |
| 805 ParallelMove* move = |
| 806 gap->GetOrCreateParallelMove(GapInstruction::BEFORE, code_zone()); |
| 807 move->AddMove(output, range->GetSpillOperand(), code_zone()); |
| 808 } |
| 809 } |
| 810 } |
747 } | 811 } |
748 | 812 |
749 | 813 |
750 void RegisterAllocator::MeetConstraintsBetween(Instruction* first, | 814 void RegisterAllocator::MeetConstraintsBetween(Instruction* first, |
751 Instruction* second, | 815 Instruction* second, |
752 int gap_index) { | 816 int gap_index) { |
753 if (first != NULL) { | 817 if (first != NULL) { |
754 // Handle fixed temporaries. | 818 // Handle fixed temporaries. |
755 for (size_t i = 0; i < first->TempCount(); i++) { | 819 for (size_t i = 0; i < first->TempCount(); i++) { |
756 UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i)); | 820 UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i)); |
(...skipping 22 matching lines...) Expand all Loading... |
779 | 843 |
780 // This value is produced on the stack, we never need to spill it. | 844 // This value is produced on the stack, we never need to spill it. |
781 if (first_output->IsStackSlot()) { | 845 if (first_output->IsStackSlot()) { |
782 range->SetSpillOperand(first_output); | 846 range->SetSpillOperand(first_output); |
783 range->SetSpillStartIndex(gap_index - 1); | 847 range->SetSpillStartIndex(gap_index - 1); |
784 assigned = true; | 848 assigned = true; |
785 } | 849 } |
786 code()->AddGapMove(gap_index, first_output, output_copy); | 850 code()->AddGapMove(gap_index, first_output, output_copy); |
787 } | 851 } |
788 | 852 |
| 853 // Make sure we add a gap move for spilling (if we have not done |
| 854 // so already). |
789 if (!assigned) { | 855 if (!assigned) { |
790 range->SetSpillStartIndex(gap_index); | 856 range->SetSpillStartIndex(gap_index); |
791 | 857 |
792 // This move to spill operand is not a real use. Liveness analysis | 858 // This move to spill operand is not a real use. Liveness analysis |
793 // and splitting of live ranges do not account for it. | 859 // and splitting of live ranges do not account for it. |
794 // Thus it should be inserted to a lifetime position corresponding to | 860 // Thus it should be inserted to a lifetime position corresponding to |
795 // the instruction end. | 861 // the instruction end. |
796 GapInstruction* gap = code()->GapAt(gap_index); | 862 GapInstruction* gap = code()->GapAt(gap_index); |
797 ParallelMove* move = | 863 ParallelMove* move = |
798 gap->GetOrCreateParallelMove(GapInstruction::BEFORE, code_zone()); | 864 gap->GetOrCreateParallelMove(GapInstruction::BEFORE, code_zone()); |
(...skipping 1358 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2157 allocator_zone_start_allocation_size_; | 2223 allocator_zone_start_allocation_size_; |
2158 isolate()->GetTStatistics()->SaveTiming(name(), base::TimeDelta(), size); | 2224 isolate()->GetTStatistics()->SaveTiming(name(), base::TimeDelta(), size); |
2159 } | 2225 } |
2160 #ifdef DEBUG | 2226 #ifdef DEBUG |
2161 if (allocator_ != NULL) allocator_->Verify(); | 2227 if (allocator_ != NULL) allocator_->Verify(); |
2162 #endif | 2228 #endif |
2163 } | 2229 } |
2164 } | 2230 } |
2165 } | 2231 } |
2166 } // namespace v8::internal::compiler | 2232 } // namespace v8::internal::compiler |
OLD | NEW |