| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. | 11 // with the distribution. |
| 12 // * Neither the name of Google Inc. nor the names of its | 12 // * Neither the name of Google Inc. nor the names of its |
| 13 // contributors may be used to endorse or promote products derived | 13 // contributors may be used to endorse or promote products derived |
| 14 // from this software without specific prior written permission. | 14 // from this software without specific prior written permission. |
| 15 // | 15 // |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #include "lithium-allocator.h" | 28 #include "lithium-allocator-inl.h" |
| 29 | 29 |
| 30 #include "hydrogen.h" | 30 #include "hydrogen.h" |
| 31 #include "string-stream.h" | 31 #include "string-stream.h" |
| 32 | 32 |
| 33 #if V8_TARGET_ARCH_IA32 | 33 #if V8_TARGET_ARCH_IA32 |
| 34 #include "ia32/lithium-ia32.h" | 34 #include "ia32/lithium-ia32.h" |
| 35 #elif V8_TARGET_ARCH_X64 | 35 #elif V8_TARGET_ARCH_X64 |
| 36 #include "x64/lithium-x64.h" | 36 #include "x64/lithium-x64.h" |
| 37 #elif V8_TARGET_ARCH_ARM | 37 #elif V8_TARGET_ARCH_ARM |
| 38 #include "arm/lithium-arm.h" | 38 #include "arm/lithium-arm.h" |
| (...skipping 25 matching lines...) Expand all Loading... |
| 64 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { | 64 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { |
| 65 return a.Value() < b.Value() ? a : b; | 65 return a.Value() < b.Value() ? a : b; |
| 66 } | 66 } |
| 67 | 67 |
| 68 | 68 |
| 69 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { | 69 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { |
| 70 return a.Value() > b.Value() ? a : b; | 70 return a.Value() > b.Value() ? a : b; |
| 71 } | 71 } |
| 72 | 72 |
| 73 | 73 |
| 74 void LOperand::PrintTo(StringStream* stream) { | 74 UsePosition::UsePosition(LifetimePosition pos, LOperand* operand) |
| 75 LUnallocated* unalloc = NULL; | 75 : operand_(operand), |
| 76 switch (kind()) { | 76 hint_(NULL), |
| 77 case INVALID: | 77 pos_(pos), |
| 78 break; | 78 next_(NULL), |
| 79 case UNALLOCATED: | 79 requires_reg_(false), |
| 80 unalloc = LUnallocated::cast(this); | 80 register_beneficial_(true) { |
| 81 stream->Add("v%d", unalloc->virtual_register()); | 81 if (operand_ != NULL && operand_->IsUnallocated()) { |
| 82 switch (unalloc->policy()) { | 82 LUnallocated* unalloc = LUnallocated::cast(operand_); |
| 83 case LUnallocated::NONE: | 83 requires_reg_ = unalloc->HasRegisterPolicy(); |
| 84 break; | 84 register_beneficial_ = !unalloc->HasAnyPolicy(); |
| 85 case LUnallocated::FIXED_REGISTER: { | |
| 86 const char* register_name = | |
| 87 Register::AllocationIndexToString(unalloc->fixed_index()); | |
| 88 stream->Add("(=%s)", register_name); | |
| 89 break; | |
| 90 } | |
| 91 case LUnallocated::FIXED_DOUBLE_REGISTER: { | |
| 92 const char* double_register_name = | |
| 93 DoubleRegister::AllocationIndexToString(unalloc->fixed_index()); | |
| 94 stream->Add("(=%s)", double_register_name); | |
| 95 break; | |
| 96 } | |
| 97 case LUnallocated::FIXED_SLOT: | |
| 98 stream->Add("(=%dS)", unalloc->fixed_index()); | |
| 99 break; | |
| 100 case LUnallocated::MUST_HAVE_REGISTER: | |
| 101 stream->Add("(R)"); | |
| 102 break; | |
| 103 case LUnallocated::WRITABLE_REGISTER: | |
| 104 stream->Add("(WR)"); | |
| 105 break; | |
| 106 case LUnallocated::SAME_AS_FIRST_INPUT: | |
| 107 stream->Add("(1)"); | |
| 108 break; | |
| 109 case LUnallocated::ANY: | |
| 110 stream->Add("(-)"); | |
| 111 break; | |
| 112 case LUnallocated::IGNORE: | |
| 113 stream->Add("(0)"); | |
| 114 break; | |
| 115 } | |
| 116 break; | |
| 117 case CONSTANT_OPERAND: | |
| 118 stream->Add("[constant:%d]", index()); | |
| 119 break; | |
| 120 case STACK_SLOT: | |
| 121 stream->Add("[stack:%d]", index()); | |
| 122 break; | |
| 123 case DOUBLE_STACK_SLOT: | |
| 124 stream->Add("[double_stack:%d]", index()); | |
| 125 break; | |
| 126 case REGISTER: | |
| 127 stream->Add("[%s|R]", Register::AllocationIndexToString(index())); | |
| 128 break; | |
| 129 case DOUBLE_REGISTER: | |
| 130 stream->Add("[%s|R]", DoubleRegister::AllocationIndexToString(index())); | |
| 131 break; | |
| 132 case ARGUMENT: | |
| 133 stream->Add("[arg:%d]", index()); | |
| 134 break; | |
| 135 } | 85 } |
| 136 } | 86 ASSERT(pos_.IsValid()); |
| 137 | |
| 138 int LOperand::VirtualRegister() { | |
| 139 LUnallocated* unalloc = LUnallocated::cast(this); | |
| 140 return unalloc->virtual_register(); | |
| 141 } | 87 } |
| 142 | 88 |
| 143 | 89 |
| 90 bool UsePosition::HasHint() const { |
| 91 return hint_ != NULL && !hint_->IsUnallocated(); |
| 92 } |
| 93 |
| 94 |
| 144 bool UsePosition::RequiresRegister() const { | 95 bool UsePosition::RequiresRegister() const { |
| 145 return requires_reg_; | 96 return requires_reg_; |
| 146 } | 97 } |
| 147 | 98 |
| 148 | 99 |
| 149 bool UsePosition::RegisterIsBeneficial() const { | 100 bool UsePosition::RegisterIsBeneficial() const { |
| 150 return register_beneficial_; | 101 return register_beneficial_; |
| 151 } | 102 } |
| 152 | 103 |
| 153 | 104 |
| (...skipping 29 matching lines...) Expand all Loading... |
| 183 } | 134 } |
| 184 current_interval = current_interval->next(); | 135 current_interval = current_interval->next(); |
| 185 } | 136 } |
| 186 return false; | 137 return false; |
| 187 } | 138 } |
| 188 | 139 |
| 189 | 140 |
| 190 #endif | 141 #endif |
| 191 | 142 |
| 192 | 143 |
| 144 LiveRange::LiveRange(int id) |
| 145 : id_(id), |
| 146 spilled_(false), |
| 147 assigned_register_(kInvalidAssignment), |
| 148 assigned_register_kind_(NONE), |
| 149 last_interval_(NULL), |
| 150 first_interval_(NULL), |
| 151 first_pos_(NULL), |
| 152 parent_(NULL), |
| 153 next_(NULL), |
| 154 current_interval_(NULL), |
| 155 last_processed_use_(NULL), |
| 156 spill_start_index_(kMaxInt) { |
| 157 spill_operand_ = new LUnallocated(LUnallocated::IGNORE); |
| 158 } |
| 159 |
| 160 |
| 161 void LiveRange::set_assigned_register(int reg, RegisterKind register_kind) { |
| 162 ASSERT(!HasRegisterAssigned() && !IsSpilled()); |
| 163 assigned_register_ = reg; |
| 164 assigned_register_kind_ = register_kind; |
| 165 ConvertOperands(); |
| 166 } |
| 167 |
| 168 |
| 169 void LiveRange::MakeSpilled() { |
| 170 ASSERT(!IsSpilled()); |
| 171 ASSERT(TopLevel()->HasAllocatedSpillOperand()); |
| 172 spilled_ = true; |
| 173 assigned_register_ = kInvalidAssignment; |
| 174 ConvertOperands(); |
| 175 } |
| 176 |
| 177 |
| 178 bool LiveRange::HasAllocatedSpillOperand() const { |
| 179 return spill_operand_ != NULL && !spill_operand_->IsUnallocated(); |
| 180 } |
| 181 |
| 182 |
| 183 void LiveRange::SetSpillOperand(LOperand* operand) { |
| 184 ASSERT(!operand->IsUnallocated()); |
| 185 ASSERT(spill_operand_ != NULL); |
| 186 ASSERT(spill_operand_->IsUnallocated()); |
| 187 spill_operand_->ConvertTo(operand->kind(), operand->index()); |
| 188 } |
| 189 |
| 190 |
| 193 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) { | 191 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) { |
| 194 UsePosition* use_pos = last_processed_use_; | 192 UsePosition* use_pos = last_processed_use_; |
| 195 if (use_pos == NULL) use_pos = first_pos(); | 193 if (use_pos == NULL) use_pos = first_pos(); |
| 196 while (use_pos != NULL && use_pos->pos().Value() < start.Value()) { | 194 while (use_pos != NULL && use_pos->pos().Value() < start.Value()) { |
| 197 use_pos = use_pos->next(); | 195 use_pos = use_pos->next(); |
| 198 } | 196 } |
| 199 last_processed_use_ = use_pos; | 197 last_processed_use_ = use_pos; |
| 200 return use_pos; | 198 return use_pos; |
| 201 } | 199 } |
| 202 | 200 |
| (...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 527 } else { | 525 } else { |
| 528 b = b->next(); | 526 b = b->next(); |
| 529 } | 527 } |
| 530 } | 528 } |
| 531 return LifetimePosition::Invalid(); | 529 return LifetimePosition::Invalid(); |
| 532 } | 530 } |
| 533 | 531 |
| 534 | 532 |
| 535 void LAllocator::InitializeLivenessAnalysis() { | 533 void LAllocator::InitializeLivenessAnalysis() { |
| 536 // Initialize the live_in sets for each block to NULL. | 534 // Initialize the live_in sets for each block to NULL. |
| 537 int block_count = graph()->blocks()->length(); | 535 int block_count = graph_->blocks()->length(); |
| 538 live_in_sets_.Initialize(block_count); | 536 live_in_sets_.Initialize(block_count); |
| 539 live_in_sets_.AddBlock(NULL, block_count); | 537 live_in_sets_.AddBlock(NULL, block_count); |
| 540 } | 538 } |
| 541 | 539 |
| 542 | 540 |
| 543 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) { | 541 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) { |
| 544 // Compute live out for the given block, except not including backward | 542 // Compute live out for the given block, except not including backward |
| 545 // successor edges. | 543 // successor edges. |
| 546 BitVector* live_out = new BitVector(next_virtual_register_); | 544 BitVector* live_out = new BitVector(next_virtual_register_); |
| 547 | 545 |
| (...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 608 int reg_index = operand->fixed_index(); | 606 int reg_index = operand->fixed_index(); |
| 609 operand->ConvertTo(LOperand::REGISTER, reg_index); | 607 operand->ConvertTo(LOperand::REGISTER, reg_index); |
| 610 } else if (operand->policy() == LUnallocated::FIXED_DOUBLE_REGISTER) { | 608 } else if (operand->policy() == LUnallocated::FIXED_DOUBLE_REGISTER) { |
| 611 int reg_index = operand->fixed_index(); | 609 int reg_index = operand->fixed_index(); |
| 612 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index); | 610 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index); |
| 613 } else { | 611 } else { |
| 614 UNREACHABLE(); | 612 UNREACHABLE(); |
| 615 } | 613 } |
| 616 if (is_tagged) { | 614 if (is_tagged) { |
| 617 TraceAlloc("Fixed reg is tagged at %d\n", pos); | 615 TraceAlloc("Fixed reg is tagged at %d\n", pos); |
| 618 LInstruction* instr = chunk_->instructions()->at(pos); | 616 LInstruction* instr = InstructionAt(pos); |
| 619 if (instr->HasPointerMap()) { | 617 if (instr->HasPointerMap()) { |
| 620 instr->pointer_map()->RecordPointer(operand); | 618 instr->pointer_map()->RecordPointer(operand); |
| 621 } | 619 } |
| 622 } | 620 } |
| 623 return operand; | 621 return operand; |
| 624 } | 622 } |
| 625 | 623 |
| 626 | 624 |
| 627 LiveRange* LAllocator::FixedLiveRangeFor(int index) { | 625 LiveRange* LAllocator::FixedLiveRangeFor(int index) { |
| 628 if (index >= fixed_live_ranges_.length()) { | 626 if (index >= fixed_live_ranges_.length()) { |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 663 } | 661 } |
| 664 LiveRange* result = live_ranges_[index]; | 662 LiveRange* result = live_ranges_[index]; |
| 665 if (result == NULL) { | 663 if (result == NULL) { |
| 666 result = new LiveRange(index); | 664 result = new LiveRange(index); |
| 667 live_ranges_[index] = result; | 665 live_ranges_[index] = result; |
| 668 } | 666 } |
| 669 return result; | 667 return result; |
| 670 } | 668 } |
| 671 | 669 |
| 672 | 670 |
| 673 LGap* LAllocator::GetLastGap(HBasicBlock* block) const { | 671 LGap* LAllocator::GetLastGap(HBasicBlock* block) { |
| 674 int last_instruction = block->last_instruction_index(); | 672 int last_instruction = block->last_instruction_index(); |
| 675 int index = chunk_->NearestGapPos(last_instruction); | 673 int index = chunk_->NearestGapPos(last_instruction); |
| 676 return chunk_->GetGapAt(index); | 674 return GapAt(index); |
| 677 } | 675 } |
| 678 | 676 |
| 679 | 677 |
| 680 HPhi* LAllocator::LookupPhi(LOperand* operand) const { | 678 HPhi* LAllocator::LookupPhi(LOperand* operand) const { |
| 681 if (!operand->IsUnallocated()) return NULL; | 679 if (!operand->IsUnallocated()) return NULL; |
| 682 int index = operand->VirtualRegister(); | 680 int index = operand->VirtualRegister(); |
| 683 HValue* instr = graph()->LookupValue(index); | 681 HValue* instr = graph_->LookupValue(index); |
| 684 if (instr != NULL && instr->IsPhi()) { | 682 if (instr != NULL && instr->IsPhi()) { |
| 685 return HPhi::cast(instr); | 683 return HPhi::cast(instr); |
| 686 } | 684 } |
| 687 return NULL; | 685 return NULL; |
| 688 } | 686 } |
| 689 | 687 |
| 690 | 688 |
| 691 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) { | 689 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) { |
| 692 if (operand->IsUnallocated()) { | 690 if (operand->IsUnallocated()) { |
| 693 return LiveRangeFor(LUnallocated::cast(operand)->virtual_register()); | 691 return LiveRangeFor(LUnallocated::cast(operand)->virtual_register()); |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 732 LUnallocated* unalloc_operand = LUnallocated::cast(operand); | 730 LUnallocated* unalloc_operand = LUnallocated::cast(operand); |
| 733 range->AddUsePosition(position, unalloc_operand)->set_hint(hint); | 731 range->AddUsePosition(position, unalloc_operand)->set_hint(hint); |
| 734 } | 732 } |
| 735 range->AddUseInterval(block_start, position); | 733 range->AddUseInterval(block_start, position); |
| 736 } | 734 } |
| 737 | 735 |
| 738 | 736 |
| 739 void LAllocator::AddConstraintsGapMove(int index, | 737 void LAllocator::AddConstraintsGapMove(int index, |
| 740 LOperand* from, | 738 LOperand* from, |
| 741 LOperand* to) { | 739 LOperand* to) { |
| 742 LGap* gap = chunk_->GetGapAt(index); | 740 LGap* gap = GapAt(index); |
| 743 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START); | 741 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START); |
| 744 if (from->IsUnallocated()) { | 742 if (from->IsUnallocated()) { |
| 745 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); | 743 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); |
| 746 for (int i = 0; i < move_operands->length(); ++i) { | 744 for (int i = 0; i < move_operands->length(); ++i) { |
| 747 LMoveOperands cur = move_operands->at(i); | 745 LMoveOperands cur = move_operands->at(i); |
| 748 LOperand* cur_to = cur.to(); | 746 LOperand* cur_to = cur.destination(); |
| 749 if (cur_to->IsUnallocated()) { | 747 if (cur_to->IsUnallocated()) { |
| 750 if (cur_to->VirtualRegister() == from->VirtualRegister()) { | 748 if (cur_to->VirtualRegister() == from->VirtualRegister()) { |
| 751 move->AddMove(cur.from(), to); | 749 move->AddMove(cur.source(), to); |
| 752 return; | 750 return; |
| 753 } | 751 } |
| 754 } | 752 } |
| 755 } | 753 } |
| 756 } | 754 } |
| 757 move->AddMove(from, to); | 755 move->AddMove(from, to); |
| 758 } | 756 } |
| 759 | 757 |
| 760 | 758 |
| 761 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) { | 759 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) { |
| 762 int start = block->first_instruction_index(); | 760 int start = block->first_instruction_index(); |
| 763 int end = block->last_instruction_index(); | 761 int end = block->last_instruction_index(); |
| 764 for (int i = start; i <= end; ++i) { | 762 for (int i = start; i <= end; ++i) { |
| 765 if (chunk_->IsGapAt(i)) { | 763 if (IsGapAt(i)) { |
| 766 InstructionSummary* summary = NULL; | 764 LInstruction* instr = NULL; |
| 767 InstructionSummary* prev_summary = NULL; | 765 LInstruction* prev_instr = NULL; |
| 768 if (i < end) summary = GetSummary(i + 1); | 766 if (i < end) instr = InstructionAt(i + 1); |
| 769 if (i > start) prev_summary = GetSummary(i - 1); | 767 if (i > start) prev_instr = InstructionAt(i - 1); |
| 770 MeetConstraintsBetween(prev_summary, summary, i); | 768 MeetConstraintsBetween(prev_instr, instr, i); |
| 771 } | 769 } |
| 772 } | 770 } |
| 773 } | 771 } |
| 774 | 772 |
| 775 | 773 |
| 776 void LAllocator::MeetConstraintsBetween(InstructionSummary* first, | 774 void LAllocator::MeetConstraintsBetween(LInstruction* first, |
| 777 InstructionSummary* second, | 775 LInstruction* second, |
| 778 int gap_index) { | 776 int gap_index) { |
| 779 // Handle fixed temporaries. | 777 // Handle fixed temporaries. |
| 780 if (first != NULL) { | 778 if (first != NULL) { |
| 781 for (int i = 0; i < first->TempCount(); ++i) { | 779 for (TempIterator it(first); it.HasNext(); it.Advance()) { |
| 782 LUnallocated* temp = LUnallocated::cast(first->TempAt(i)); | 780 LUnallocated* temp = LUnallocated::cast(it.Next()); |
| 783 if (temp->HasFixedPolicy()) { | 781 if (temp->HasFixedPolicy()) { |
| 784 AllocateFixed(temp, gap_index - 1, false); | 782 AllocateFixed(temp, gap_index - 1, false); |
| 785 } | 783 } |
| 786 } | 784 } |
| 787 } | 785 } |
| 788 | 786 |
| 789 // Handle fixed output operand. | 787 // Handle fixed output operand. |
| 790 if (first != NULL && first->Output() != NULL) { | 788 if (first != NULL && first->Output() != NULL) { |
| 791 LUnallocated* first_output = LUnallocated::cast(first->Output()); | 789 LUnallocated* first_output = LUnallocated::cast(first->Output()); |
| 792 LiveRange* range = LiveRangeFor(first_output->VirtualRegister()); | 790 LiveRange* range = LiveRangeFor(first_output->VirtualRegister()); |
| (...skipping 12 matching lines...) Expand all Loading... |
| 805 chunk_->AddGapMove(gap_index, first_output, output_copy); | 803 chunk_->AddGapMove(gap_index, first_output, output_copy); |
| 806 } | 804 } |
| 807 | 805 |
| 808 if (!assigned) { | 806 if (!assigned) { |
| 809 range->SetSpillStartIndex(gap_index); | 807 range->SetSpillStartIndex(gap_index); |
| 810 | 808 |
| 811 // This move to spill operand is not a real use. Liveness analysis | 809 // This move to spill operand is not a real use. Liveness analysis |
| 812 // and splitting of live ranges do not account for it. | 810 // and splitting of live ranges do not account for it. |
| 813 // Thus it should be inserted to a lifetime position corresponding to | 811 // Thus it should be inserted to a lifetime position corresponding to |
| 814 // the instruction end. | 812 // the instruction end. |
| 815 LGap* gap = chunk_->GetGapAt(gap_index); | 813 LGap* gap = GapAt(gap_index); |
| 816 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE); | 814 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE); |
| 817 move->AddMove(first_output, range->GetSpillOperand()); | 815 move->AddMove(first_output, range->GetSpillOperand()); |
| 818 } | 816 } |
| 819 } | 817 } |
| 820 | 818 |
| 821 // Handle fixed input operands of second instruction. | 819 // Handle fixed input operands of second instruction. |
| 822 if (second != NULL) { | 820 if (second != NULL) { |
| 823 for (int i = 0; i < second->InputCount(); ++i) { | 821 for (UseIterator it(second); it.HasNext(); it.Advance()) { |
| 824 LUnallocated* cur_input = LUnallocated::cast(second->InputAt(i)); | 822 LUnallocated* cur_input = LUnallocated::cast(it.Next()); |
| 825 if (cur_input->HasFixedPolicy()) { | 823 if (cur_input->HasFixedPolicy()) { |
| 826 LUnallocated* input_copy = cur_input->CopyUnconstrained(); | 824 LUnallocated* input_copy = cur_input->CopyUnconstrained(); |
| 827 bool is_tagged = HasTaggedValue(cur_input->VirtualRegister()); | 825 bool is_tagged = HasTaggedValue(cur_input->VirtualRegister()); |
| 828 AllocateFixed(cur_input, gap_index + 1, is_tagged); | 826 AllocateFixed(cur_input, gap_index + 1, is_tagged); |
| 829 AddConstraintsGapMove(gap_index, input_copy, cur_input); | 827 AddConstraintsGapMove(gap_index, input_copy, cur_input); |
| 830 } else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) { | 828 } else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) { |
| 829 // The live range of writable input registers always goes until the end |
| 830 // of the instruction. |
| 831 ASSERT(!cur_input->IsUsedAtStart()); |
| 832 |
| 831 LUnallocated* input_copy = cur_input->CopyUnconstrained(); | 833 LUnallocated* input_copy = cur_input->CopyUnconstrained(); |
| 832 cur_input->set_virtual_register(next_virtual_register_++); | 834 cur_input->set_virtual_register(next_virtual_register_++); |
| 833 | 835 |
| 834 if (RequiredRegisterKind(input_copy->virtual_register()) == | 836 if (RequiredRegisterKind(input_copy->virtual_register()) == |
| 835 DOUBLE_REGISTERS) { | 837 DOUBLE_REGISTERS) { |
| 836 double_artificial_registers_.Add( | 838 double_artificial_registers_.Add( |
| 837 cur_input->virtual_register() - first_artificial_register_); | 839 cur_input->virtual_register() - first_artificial_register_); |
| 838 } | 840 } |
| 839 | 841 |
| 840 second->AddTemp(cur_input); | |
| 841 AddConstraintsGapMove(gap_index, input_copy, cur_input); | 842 AddConstraintsGapMove(gap_index, input_copy, cur_input); |
| 842 } | 843 } |
| 843 } | 844 } |
| 844 } | 845 } |
| 845 | 846 |
| 846 // Handle "output same as input" for second instruction. | 847 // Handle "output same as input" for second instruction. |
| 847 if (second != NULL && second->Output() != NULL) { | 848 if (second != NULL && second->Output() != NULL) { |
| 848 LUnallocated* second_output = LUnallocated::cast(second->Output()); | 849 LUnallocated* second_output = LUnallocated::cast(second->Output()); |
| 849 if (second_output->HasSameAsInputPolicy()) { | 850 if (second_output->HasSameAsInputPolicy()) { |
| 850 LUnallocated* cur_input = LUnallocated::cast(second->InputAt(0)); | 851 LUnallocated* cur_input = LUnallocated::cast(second->FirstInput()); |
| 851 int output_vreg = second_output->VirtualRegister(); | 852 int output_vreg = second_output->VirtualRegister(); |
| 852 int input_vreg = cur_input->VirtualRegister(); | 853 int input_vreg = cur_input->VirtualRegister(); |
| 853 | 854 |
| 854 LUnallocated* input_copy = cur_input->CopyUnconstrained(); | 855 LUnallocated* input_copy = cur_input->CopyUnconstrained(); |
| 855 cur_input->set_virtual_register(second_output->virtual_register()); | 856 cur_input->set_virtual_register(second_output->virtual_register()); |
| 856 AddConstraintsGapMove(gap_index, input_copy, cur_input); | 857 AddConstraintsGapMove(gap_index, input_copy, cur_input); |
| 857 | 858 |
| 858 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { | 859 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { |
| 859 int index = gap_index + 1; | 860 int index = gap_index + 1; |
| 860 LInstruction* instr = chunk_->instructions()->at(index); | 861 LInstruction* instr = InstructionAt(index); |
| 861 if (instr->HasPointerMap()) { | 862 if (instr->HasPointerMap()) { |
| 862 instr->pointer_map()->RecordPointer(input_copy); | 863 instr->pointer_map()->RecordPointer(input_copy); |
| 863 } | 864 } |
| 864 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { | 865 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { |
| 865 // The input is assumed to immediately have a tagged representation, | 866 // The input is assumed to immediately have a tagged representation, |
| 866 // before the pointer map can be used. I.e. the pointer map at the | 867 // before the pointer map can be used. I.e. the pointer map at the |
| 867 // instruction will include the output operand (whose value at the | 868 // instruction will include the output operand (whose value at the |
| 868 // beginning of the instruction is equal to the input operand). If | 869 // beginning of the instruction is equal to the input operand). If |
| 869 // this is not desired, then the pointer map at this instruction needs | 870 // this is not desired, then the pointer map at this instruction needs |
| 870 // to be adjusted manually. | 871 // to be adjusted manually. |
| 871 } | 872 } |
| 872 } | 873 } |
| 873 } | 874 } |
| 874 } | 875 } |
| 875 | 876 |
| 876 | 877 |
| 877 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) { | 878 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) { |
| 878 int block_start = block->first_instruction_index(); | 879 int block_start = block->first_instruction_index(); |
| 879 int index = block->last_instruction_index(); | 880 int index = block->last_instruction_index(); |
| 880 | 881 |
| 881 LifetimePosition block_start_position = | 882 LifetimePosition block_start_position = |
| 882 LifetimePosition::FromInstructionIndex(block_start); | 883 LifetimePosition::FromInstructionIndex(block_start); |
| 883 | 884 |
| 884 while (index >= block_start) { | 885 while (index >= block_start) { |
| 885 LifetimePosition curr_position = | 886 LifetimePosition curr_position = |
| 886 LifetimePosition::FromInstructionIndex(index); | 887 LifetimePosition::FromInstructionIndex(index); |
| 887 | 888 |
| 888 if (chunk_->IsGapAt(index)) { | 889 if (IsGapAt(index)) { |
| 889 // We have a gap at this position. | 890 // We have a gap at this position. |
| 890 LGap* gap = chunk_->GetGapAt(index); | 891 LGap* gap = GapAt(index); |
| 891 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START); | 892 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START); |
| 892 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); | 893 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); |
| 893 for (int i = 0; i < move_operands->length(); ++i) { | 894 for (int i = 0; i < move_operands->length(); ++i) { |
| 894 LMoveOperands* cur = &move_operands->at(i); | 895 LMoveOperands* cur = &move_operands->at(i); |
| 895 if (cur->IsIgnored()) continue; | 896 if (cur->IsIgnored()) continue; |
| 896 LOperand* from = cur->from(); | 897 LOperand* from = cur->source(); |
| 897 LOperand* to = cur->to(); | 898 LOperand* to = cur->destination(); |
| 898 HPhi* phi = LookupPhi(to); | 899 HPhi* phi = LookupPhi(to); |
| 899 LOperand* hint = to; | 900 LOperand* hint = to; |
| 900 if (phi != NULL) { | 901 if (phi != NULL) { |
| 901 // This is a phi resolving move. | 902 // This is a phi resolving move. |
| 902 if (!phi->block()->IsLoopHeader()) { | 903 if (!phi->block()->IsLoopHeader()) { |
| 903 hint = LiveRangeFor(phi->id())->FirstHint(); | 904 hint = LiveRangeFor(phi->id())->FirstHint(); |
| 904 } | 905 } |
| 905 } else { | 906 } else { |
| 906 if (to->IsUnallocated()) { | 907 if (to->IsUnallocated()) { |
| 907 if (live->Contains(to->VirtualRegister())) { | 908 if (live->Contains(to->VirtualRegister())) { |
| 908 Define(curr_position, to, from); | 909 Define(curr_position, to, from); |
| 909 live->Remove(to->VirtualRegister()); | 910 live->Remove(to->VirtualRegister()); |
| 910 } else { | 911 } else { |
| 911 cur->Eliminate(); | 912 cur->Eliminate(); |
| 912 continue; | 913 continue; |
| 913 } | 914 } |
| 914 } else { | 915 } else { |
| 915 Define(curr_position, to, from); | 916 Define(curr_position, to, from); |
| 916 } | 917 } |
| 917 } | 918 } |
| 918 Use(block_start_position, curr_position, from, hint); | 919 Use(block_start_position, curr_position, from, hint); |
| 919 if (from->IsUnallocated()) { | 920 if (from->IsUnallocated()) { |
| 920 live->Add(from->VirtualRegister()); | 921 live->Add(from->VirtualRegister()); |
| 921 } | 922 } |
| 922 } | 923 } |
| 923 } else { | 924 } else { |
| 924 ASSERT(!chunk_->IsGapAt(index)); | 925 ASSERT(!IsGapAt(index)); |
| 925 InstructionSummary* summary = GetSummary(index); | 926 LInstruction* instr = InstructionAt(index); |
| 926 | 927 |
| 927 if (summary != NULL) { | 928 if (instr != NULL) { |
| 928 LOperand* output = summary->Output(); | 929 LOperand* output = instr->Output(); |
| 929 if (output != NULL) { | 930 if (output != NULL) { |
| 930 if (output->IsUnallocated()) live->Remove(output->VirtualRegister()); | 931 if (output->IsUnallocated()) live->Remove(output->VirtualRegister()); |
| 931 Define(curr_position, output, NULL); | 932 Define(curr_position, output, NULL); |
| 932 } | 933 } |
| 933 | 934 |
| 934 if (summary->IsCall()) { | 935 if (instr->IsMarkedAsCall()) { |
| 935 for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) { | 936 for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) { |
| 936 if (output == NULL || !output->IsRegister() || | 937 if (output == NULL || !output->IsRegister() || |
| 937 output->index() != i) { | 938 output->index() != i) { |
| 938 LiveRange* range = FixedLiveRangeFor(i); | 939 LiveRange* range = FixedLiveRangeFor(i); |
| 939 range->AddUseInterval(curr_position, | 940 range->AddUseInterval(curr_position, |
| 940 curr_position.InstructionEnd()); | 941 curr_position.InstructionEnd()); |
| 941 } | 942 } |
| 942 } | 943 } |
| 944 } |
| 945 |
| 946 if (instr->IsMarkedAsCall() || instr->IsMarkedAsSaveDoubles()) { |
| 943 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) { | 947 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) { |
| 944 if (output == NULL || !output->IsDoubleRegister() || | 948 if (output == NULL || !output->IsDoubleRegister() || |
| 945 output->index() != i) { | 949 output->index() != i) { |
| 946 LiveRange* range = FixedDoubleLiveRangeFor(i); | 950 LiveRange* range = FixedDoubleLiveRangeFor(i); |
| 947 range->AddUseInterval(curr_position, | 951 range->AddUseInterval(curr_position, |
| 948 curr_position.InstructionEnd()); | 952 curr_position.InstructionEnd()); |
| 949 } | 953 } |
| 950 } | 954 } |
| 951 } | 955 } |
| 952 | 956 |
| 953 for (int i = 0; i < summary->InputCount(); ++i) { | 957 for (UseIterator it(instr); it.HasNext(); it.Advance()) { |
| 954 LOperand* input = summary->InputAt(i); | 958 LOperand* input = it.Next(); |
| 955 | 959 |
| 956 LifetimePosition use_pos; | 960 LifetimePosition use_pos; |
| 957 if (input->IsUnallocated() && | 961 if (input->IsUnallocated() && |
| 958 LUnallocated::cast(input)->IsUsedAtStart()) { | 962 LUnallocated::cast(input)->IsUsedAtStart()) { |
| 959 use_pos = curr_position; | 963 use_pos = curr_position; |
| 960 } else { | 964 } else { |
| 961 use_pos = curr_position.InstructionEnd(); | 965 use_pos = curr_position.InstructionEnd(); |
| 962 } | 966 } |
| 963 | 967 |
| 964 Use(block_start_position, use_pos, input, NULL); | 968 Use(block_start_position, use_pos, input, NULL); |
| 965 if (input->IsUnallocated()) live->Add(input->VirtualRegister()); | 969 if (input->IsUnallocated()) live->Add(input->VirtualRegister()); |
| 966 } | 970 } |
| 967 | 971 |
| 968 for (int i = 0; i < summary->TempCount(); ++i) { | 972 for (TempIterator it(instr); it.HasNext(); it.Advance()) { |
| 969 LOperand* temp = summary->TempAt(i); | 973 LOperand* temp = it.Next(); |
| 970 if (summary->IsCall()) { | 974 if (instr->IsMarkedAsCall()) { |
| 971 if (temp->IsRegister()) continue; | 975 if (temp->IsRegister()) continue; |
| 972 if (temp->IsUnallocated()) { | 976 if (temp->IsUnallocated()) { |
| 973 LUnallocated* temp_unalloc = LUnallocated::cast(temp); | 977 LUnallocated* temp_unalloc = LUnallocated::cast(temp); |
| 974 if (temp_unalloc->HasFixedPolicy()) { | 978 if (temp_unalloc->HasFixedPolicy()) { |
| 975 continue; | 979 continue; |
| 976 } | 980 } |
| 977 } | 981 } |
| 978 } | 982 } |
| 979 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL); | 983 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL); |
| 980 Define(curr_position, temp, NULL); | 984 Define(curr_position, temp, NULL); |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1031 AllocateGeneralRegisters(); | 1035 AllocateGeneralRegisters(); |
| 1032 AllocateDoubleRegisters(); | 1036 AllocateDoubleRegisters(); |
| 1033 PopulatePointerMaps(); | 1037 PopulatePointerMaps(); |
| 1034 if (has_osr_entry_) ProcessOsrEntry(); | 1038 if (has_osr_entry_) ProcessOsrEntry(); |
| 1035 ConnectRanges(); | 1039 ConnectRanges(); |
| 1036 ResolveControlFlow(); | 1040 ResolveControlFlow(); |
| 1037 } | 1041 } |
| 1038 | 1042 |
| 1039 | 1043 |
| 1040 void LAllocator::MeetRegisterConstraints() { | 1044 void LAllocator::MeetRegisterConstraints() { |
| 1041 HPhase phase("Register constraints", chunk()); | 1045 HPhase phase("Register constraints", chunk_); |
| 1042 first_artificial_register_ = next_virtual_register_; | 1046 first_artificial_register_ = next_virtual_register_; |
| 1043 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); | 1047 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); |
| 1044 for (int i = 0; i < blocks->length(); ++i) { | 1048 for (int i = 0; i < blocks->length(); ++i) { |
| 1045 HBasicBlock* block = blocks->at(i); | 1049 HBasicBlock* block = blocks->at(i); |
| 1046 MeetRegisterConstraints(block); | 1050 MeetRegisterConstraints(block); |
| 1047 } | 1051 } |
| 1048 } | 1052 } |
| 1049 | 1053 |
| 1050 | 1054 |
| 1051 void LAllocator::ResolvePhis() { | 1055 void LAllocator::ResolvePhis() { |
| 1052 HPhase phase("Resolve phis", chunk()); | 1056 HPhase phase("Resolve phis", chunk_); |
| 1053 | 1057 |
| 1054 // Process the blocks in reverse order. | 1058 // Process the blocks in reverse order. |
| 1055 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); | 1059 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); |
| 1056 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) { | 1060 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) { |
| 1057 HBasicBlock* block = blocks->at(block_id); | 1061 HBasicBlock* block = blocks->at(block_id); |
| 1058 ResolvePhis(block); | 1062 ResolvePhis(block); |
| 1059 } | 1063 } |
| 1060 } | 1064 } |
| 1061 | 1065 |
| 1062 | 1066 |
| 1063 void LAllocator::ResolveControlFlow(LiveRange* range, | 1067 void LAllocator::ResolveControlFlow(LiveRange* range, |
| 1064 HBasicBlock* block, | 1068 HBasicBlock* block, |
| 1065 HBasicBlock* pred) { | 1069 HBasicBlock* pred) { |
| 1066 LifetimePosition pred_end = | 1070 LifetimePosition pred_end = |
| 1067 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()). | 1071 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); |
| 1068 PrevInstruction(); | |
| 1069 | |
| 1070 LifetimePosition cur_start = | 1072 LifetimePosition cur_start = |
| 1071 LifetimePosition::FromInstructionIndex(block->first_instruction_index()); | 1073 LifetimePosition::FromInstructionIndex(block->first_instruction_index()); |
| 1072 LiveRange* pred_cover = NULL; | 1074 LiveRange* pred_cover = NULL; |
| 1073 LiveRange* cur_cover = NULL; | 1075 LiveRange* cur_cover = NULL; |
| 1074 LiveRange* cur_range = range; | 1076 LiveRange* cur_range = range; |
| 1075 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) { | 1077 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) { |
| 1076 if (cur_range->CanCover(cur_start)) { | 1078 if (cur_range->CanCover(cur_start)) { |
| 1077 ASSERT(cur_cover == NULL); | 1079 ASSERT(cur_cover == NULL); |
| 1078 cur_cover = cur_range; | 1080 cur_cover = cur_range; |
| 1079 } | 1081 } |
| 1080 if (cur_range->CanCover(pred_end)) { | 1082 if (cur_range->CanCover(pred_end)) { |
| 1081 ASSERT(pred_cover == NULL); | 1083 ASSERT(pred_cover == NULL); |
| 1082 pred_cover = cur_range; | 1084 pred_cover = cur_range; |
| 1083 } | 1085 } |
| 1084 cur_range = cur_range->next(); | 1086 cur_range = cur_range->next(); |
| 1085 } | 1087 } |
| 1086 | 1088 |
| 1087 if (cur_cover->IsSpilled()) return; | 1089 if (cur_cover->IsSpilled()) return; |
| 1088 ASSERT(pred_cover != NULL && cur_cover != NULL); | 1090 ASSERT(pred_cover != NULL && cur_cover != NULL); |
| 1089 if (pred_cover != cur_cover) { | 1091 if (pred_cover != cur_cover) { |
| 1090 LOperand* pred_op = pred_cover->CreateAssignedOperand(); | 1092 LOperand* pred_op = pred_cover->CreateAssignedOperand(); |
| 1091 LOperand* cur_op = cur_cover->CreateAssignedOperand(); | 1093 LOperand* cur_op = cur_cover->CreateAssignedOperand(); |
| 1092 if (!pred_op->Equals(cur_op)) { | 1094 if (!pred_op->Equals(cur_op)) { |
| 1093 LGap* gap = NULL; | 1095 LGap* gap = NULL; |
| 1094 if (block->predecessors()->length() == 1) { | 1096 if (block->predecessors()->length() == 1) { |
| 1095 gap = chunk_->GetGapAt(block->first_instruction_index()); | 1097 gap = GapAt(block->first_instruction_index()); |
| 1096 } else { | 1098 } else { |
| 1097 ASSERT(pred->end()->SecondSuccessor() == NULL); | 1099 ASSERT(pred->end()->SecondSuccessor() == NULL); |
| 1098 gap = GetLastGap(pred); | 1100 gap = GetLastGap(pred); |
| 1099 } | 1101 } |
| 1100 gap->GetOrCreateParallelMove(LGap::START)->AddMove(pred_op, cur_op); | 1102 gap->GetOrCreateParallelMove(LGap::START)->AddMove(pred_op, cur_op); |
| 1101 } | 1103 } |
| 1102 } | 1104 } |
| 1103 } | 1105 } |
| 1104 | 1106 |
| 1105 | 1107 |
| 1106 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) { | 1108 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) { |
| 1107 int index = pos.InstructionIndex(); | 1109 int index = pos.InstructionIndex(); |
| 1108 if (chunk_->IsGapAt(index)) { | 1110 if (IsGapAt(index)) { |
| 1109 LGap* gap = chunk_->GetGapAt(index); | 1111 LGap* gap = GapAt(index); |
| 1110 return gap->GetOrCreateParallelMove( | 1112 return gap->GetOrCreateParallelMove( |
| 1111 pos.IsInstructionStart() ? LGap::START : LGap::END); | 1113 pos.IsInstructionStart() ? LGap::START : LGap::END); |
| 1112 } | 1114 } |
| 1113 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1); | 1115 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1); |
| 1114 return chunk_->GetGapAt(gap_pos)->GetOrCreateParallelMove( | 1116 return GapAt(gap_pos)->GetOrCreateParallelMove( |
| 1115 (gap_pos < index) ? LGap::AFTER : LGap::BEFORE); | 1117 (gap_pos < index) ? LGap::AFTER : LGap::BEFORE); |
| 1116 } | 1118 } |
| 1117 | 1119 |
| 1118 | 1120 |
| 1119 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) { | 1121 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) { |
| 1120 LGap* gap = chunk_->GetGapAt(chunk_->NearestGapPos(pos.InstructionIndex())); | 1122 LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex())); |
| 1121 return gap->block(); | 1123 return gap->block(); |
| 1122 } | 1124 } |
| 1123 | 1125 |
| 1124 | 1126 |
| 1125 void LAllocator::ConnectRanges() { | 1127 void LAllocator::ConnectRanges() { |
| 1126 HPhase phase("Connect ranges", this); | 1128 HPhase phase("Connect ranges", this); |
| 1127 for (int i = 0; i < live_ranges()->length(); ++i) { | 1129 for (int i = 0; i < live_ranges()->length(); ++i) { |
| 1128 LiveRange* first_range = live_ranges()->at(i); | 1130 LiveRange* first_range = live_ranges()->at(i); |
| 1129 if (first_range == NULL || first_range->parent() != NULL) continue; | 1131 if (first_range == NULL || first_range->parent() != NULL) continue; |
| 1130 | 1132 |
| (...skipping 26 matching lines...) Expand all Loading... |
| 1157 | 1159 |
| 1158 | 1160 |
| 1159 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const { | 1161 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const { |
| 1160 if (block->predecessors()->length() != 1) return false; | 1162 if (block->predecessors()->length() != 1) return false; |
| 1161 return block->predecessors()->first()->block_id() == block->block_id() - 1; | 1163 return block->predecessors()->first()->block_id() == block->block_id() - 1; |
| 1162 } | 1164 } |
| 1163 | 1165 |
| 1164 | 1166 |
| 1165 void LAllocator::ResolveControlFlow() { | 1167 void LAllocator::ResolveControlFlow() { |
| 1166 HPhase phase("Resolve control flow", this); | 1168 HPhase phase("Resolve control flow", this); |
| 1167 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); | 1169 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); |
| 1168 for (int block_id = 1; block_id < blocks->length(); ++block_id) { | 1170 for (int block_id = 1; block_id < blocks->length(); ++block_id) { |
| 1169 HBasicBlock* block = blocks->at(block_id); | 1171 HBasicBlock* block = blocks->at(block_id); |
| 1170 if (CanEagerlyResolveControlFlow(block)) continue; | 1172 if (CanEagerlyResolveControlFlow(block)) continue; |
| 1171 BitVector* live = live_in_sets_[block->block_id()]; | 1173 BitVector* live = live_in_sets_[block->block_id()]; |
| 1172 BitVector::Iterator iterator(live); | 1174 BitVector::Iterator iterator(live); |
| 1173 while (!iterator.Done()) { | 1175 while (!iterator.Done()) { |
| 1174 int operand_index = iterator.Current(); | 1176 int operand_index = iterator.Current(); |
| 1175 for (int i = 0; i < block->predecessors()->length(); ++i) { | 1177 for (int i = 0; i < block->predecessors()->length(); ++i) { |
| 1176 HBasicBlock* cur = block->predecessors()->at(i); | 1178 HBasicBlock* cur = block->predecessors()->at(i); |
| 1177 LiveRange* cur_range = LiveRangeFor(operand_index); | 1179 LiveRange* cur_range = LiveRangeFor(operand_index); |
| 1178 ResolveControlFlow(cur_range, block, cur); | 1180 ResolveControlFlow(cur_range, block, cur); |
| 1179 } | 1181 } |
| 1180 iterator.Advance(); | 1182 iterator.Advance(); |
| 1181 } | 1183 } |
| 1182 } | 1184 } |
| 1183 } | 1185 } |
| 1184 | 1186 |
| 1185 | 1187 |
| 1186 void LAllocator::BuildLiveRanges() { | 1188 void LAllocator::BuildLiveRanges() { |
| 1187 HPhase phase("Build live ranges", this); | 1189 HPhase phase("Build live ranges", this); |
| 1188 InitializeLivenessAnalysis(); | 1190 InitializeLivenessAnalysis(); |
| 1189 // Process the blocks in reverse order. | 1191 // Process the blocks in reverse order. |
| 1190 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); | 1192 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); |
| 1191 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) { | 1193 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) { |
| 1192 HBasicBlock* block = blocks->at(block_id); | 1194 HBasicBlock* block = blocks->at(block_id); |
| 1193 BitVector* live = ComputeLiveOut(block); | 1195 BitVector* live = ComputeLiveOut(block); |
| 1194 // Initially consider all live_out values live for the entire block. We | 1196 // Initially consider all live_out values live for the entire block. We |
| 1195 // will shorten these intervals if necessary. | 1197 // will shorten these intervals if necessary. |
| 1196 AddInitialIntervals(block, live); | 1198 AddInitialIntervals(block, live); |
| 1197 | 1199 |
| 1198 // Process the instructions in reverse order, generating and killing | 1200 // Process the instructions in reverse order, generating and killing |
| 1199 // live values. | 1201 // live values. |
| 1200 ProcessInstructions(block, live); | 1202 ProcessInstructions(block, live); |
| 1201 // All phi output operands are killed by this block. | 1203 // All phi output operands are killed by this block. |
| 1202 const ZoneList<HPhi*>* phis = block->phis(); | 1204 const ZoneList<HPhi*>* phis = block->phis(); |
| 1203 for (int i = 0; i < phis->length(); ++i) { | 1205 for (int i = 0; i < phis->length(); ++i) { |
| 1204 // The live range interval already ends at the first instruction of the | 1206 // The live range interval already ends at the first instruction of the |
| 1205 // block. | 1207 // block. |
| 1206 HPhi* phi = phis->at(i); | 1208 HPhi* phi = phis->at(i); |
| 1207 live->Remove(phi->id()); | 1209 live->Remove(phi->id()); |
| 1208 | 1210 |
| 1209 LOperand* hint = NULL; | 1211 LOperand* hint = NULL; |
| 1210 LOperand* phi_operand = NULL; | 1212 LOperand* phi_operand = NULL; |
| 1211 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0)); | 1213 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0)); |
| 1212 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START); | 1214 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START); |
| 1213 for (int j = 0; j < move->move_operands()->length(); ++j) { | 1215 for (int j = 0; j < move->move_operands()->length(); ++j) { |
| 1214 LOperand* to = move->move_operands()->at(j).to(); | 1216 LOperand* to = move->move_operands()->at(j).destination(); |
| 1215 if (to->IsUnallocated() && to->VirtualRegister() == phi->id()) { | 1217 if (to->IsUnallocated() && to->VirtualRegister() == phi->id()) { |
| 1216 hint = move->move_operands()->at(j).from(); | 1218 hint = move->move_operands()->at(j).source(); |
| 1217 phi_operand = to; | 1219 phi_operand = to; |
| 1218 break; | 1220 break; |
| 1219 } | 1221 } |
| 1220 } | 1222 } |
| 1221 ASSERT(hint != NULL); | 1223 ASSERT(hint != NULL); |
| 1222 | 1224 |
| 1223 LifetimePosition block_start = LifetimePosition::FromInstructionIndex( | 1225 LifetimePosition block_start = LifetimePosition::FromInstructionIndex( |
| 1224 block->first_instruction_index()); | 1226 block->first_instruction_index()); |
| 1225 Define(block_start, phi_operand, hint); | 1227 Define(block_start, phi_operand, hint); |
| 1226 } | 1228 } |
| 1227 | 1229 |
| 1228 // Now live is live_in for this block except not including values live | 1230 // Now live is live_in for this block except not including values live |
| 1229 // out on backward successor edges. | 1231 // out on backward successor edges. |
| 1230 live_in_sets_[block_id] = live; | 1232 live_in_sets_[block_id] = live; |
| 1231 | 1233 |
| 1232 // If this block is a loop header go back and patch up the necessary | 1234 // If this block is a loop header go back and patch up the necessary |
| 1233 // predecessor blocks. | 1235 // predecessor blocks. |
| 1234 if (block->IsLoopHeader()) { | 1236 if (block->IsLoopHeader()) { |
| 1235 // TODO(kmillikin): Need to be able to get the last block of the loop | 1237 // TODO(kmillikin): Need to be able to get the last block of the loop |
| 1236 // in the loop information. Add a live range stretching from the first | 1238 // in the loop information. Add a live range stretching from the first |
| 1237 // loop instruction to the last for each value live on entry to the | 1239 // loop instruction to the last for each value live on entry to the |
| 1238 // header. | 1240 // header. |
| 1239 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge(); | 1241 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge(); |
| 1240 BitVector::Iterator iterator(live); | 1242 BitVector::Iterator iterator(live); |
| 1241 LifetimePosition start = LifetimePosition::FromInstructionIndex( | 1243 LifetimePosition start = LifetimePosition::FromInstructionIndex( |
| 1242 block->first_instruction_index()); | 1244 block->first_instruction_index()); |
| 1243 LifetimePosition end = LifetimePosition::FromInstructionIndex( | 1245 LifetimePosition end = LifetimePosition::FromInstructionIndex( |
| 1244 back_edge->last_instruction_index()); | 1246 back_edge->last_instruction_index()).NextInstruction(); |
| 1245 while (!iterator.Done()) { | 1247 while (!iterator.Done()) { |
| 1246 int operand_index = iterator.Current(); | 1248 int operand_index = iterator.Current(); |
| 1247 LiveRange* range = LiveRangeFor(operand_index); | 1249 LiveRange* range = LiveRangeFor(operand_index); |
| 1248 range->EnsureInterval(start, end); | 1250 range->EnsureInterval(start, end); |
| 1249 iterator.Advance(); | 1251 iterator.Advance(); |
| 1250 } | 1252 } |
| 1251 | 1253 |
| 1252 for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) { | 1254 for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) { |
| 1253 live_in_sets_[i]->Union(*live); | 1255 live_in_sets_[i]->Union(*live); |
| 1254 } | 1256 } |
| 1255 } | 1257 } |
| 1256 | 1258 |
| 1257 #ifdef DEBUG | 1259 #ifdef DEBUG |
| 1258 if (block_id == 0) { | 1260 if (block_id == 0) { |
| 1259 BitVector::Iterator iterator(live); | 1261 BitVector::Iterator iterator(live); |
| 1260 bool found = false; | 1262 bool found = false; |
| 1261 while (!iterator.Done()) { | 1263 while (!iterator.Done()) { |
| 1262 found = true; | 1264 found = true; |
| 1263 int operand_index = iterator.Current(); | 1265 int operand_index = iterator.Current(); |
| 1264 PrintF("Function: %s\n", | 1266 PrintF("Function: %s\n", |
| 1265 *graph()->info()->function()->debug_name()->ToCString()); | 1267 *graph_->info()->function()->debug_name()->ToCString()); |
| 1266 PrintF("Value %d used before first definition!\n", operand_index); | 1268 PrintF("Value %d used before first definition!\n", operand_index); |
| 1267 LiveRange* range = LiveRangeFor(operand_index); | 1269 LiveRange* range = LiveRangeFor(operand_index); |
| 1268 PrintF("First use is at %d\n", range->first_pos()->pos().Value()); | 1270 PrintF("First use is at %d\n", range->first_pos()->pos().Value()); |
| 1269 iterator.Advance(); | 1271 iterator.Advance(); |
| 1270 } | 1272 } |
| 1271 ASSERT(!found); | 1273 ASSERT(!found); |
| 1272 } | 1274 } |
| 1273 #endif | 1275 #endif |
| 1274 } | 1276 } |
| 1275 } | 1277 } |
| (...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1460 LiveRange* current = unhandled_live_ranges_.RemoveLast(); | 1462 LiveRange* current = unhandled_live_ranges_.RemoveLast(); |
| 1461 ASSERT(UnhandledIsSorted()); | 1463 ASSERT(UnhandledIsSorted()); |
| 1462 LifetimePosition position = current->Start(); | 1464 LifetimePosition position = current->Start(); |
| 1463 TraceAlloc("Processing interval %d start=%d\n", | 1465 TraceAlloc("Processing interval %d start=%d\n", |
| 1464 current->id(), | 1466 current->id(), |
| 1465 position.Value()); | 1467 position.Value()); |
| 1466 | 1468 |
| 1467 if (current->HasAllocatedSpillOperand()) { | 1469 if (current->HasAllocatedSpillOperand()) { |
| 1468 TraceAlloc("Live range %d already has a spill operand\n", current->id()); | 1470 TraceAlloc("Live range %d already has a spill operand\n", current->id()); |
| 1469 LifetimePosition next_pos = position; | 1471 LifetimePosition next_pos = position; |
| 1470 if (chunk_->IsGapAt(next_pos.InstructionIndex())) { | 1472 if (IsGapAt(next_pos.InstructionIndex())) { |
| 1471 next_pos = next_pos.NextInstruction(); | 1473 next_pos = next_pos.NextInstruction(); |
| 1472 } | 1474 } |
| 1473 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos); | 1475 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos); |
| 1474 // If the range already has a spill operand and it doesn't need a | 1476 // If the range already has a spill operand and it doesn't need a |
| 1475 // register immediately, split it and spill the first part of the range. | 1477 // register immediately, split it and spill the first part of the range. |
| 1476 if (pos == NULL) { | 1478 if (pos == NULL) { |
| 1477 Spill(current); | 1479 Spill(current); |
| 1478 continue; | 1480 continue; |
| 1479 } else if (pos->pos().Value() > | 1481 } else if (pos->pos().Value() > |
| 1480 current->Start().NextInstruction().Value()) { | 1482 current->Start().NextInstruction().Value()) { |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1547 void LAllocator::TraceAlloc(const char* msg, ...) { | 1549 void LAllocator::TraceAlloc(const char* msg, ...) { |
| 1548 if (FLAG_trace_alloc) { | 1550 if (FLAG_trace_alloc) { |
| 1549 va_list arguments; | 1551 va_list arguments; |
| 1550 va_start(arguments, msg); | 1552 va_start(arguments, msg); |
| 1551 OS::VPrint(msg, arguments); | 1553 OS::VPrint(msg, arguments); |
| 1552 va_end(arguments); | 1554 va_end(arguments); |
| 1553 } | 1555 } |
| 1554 } | 1556 } |
| 1555 | 1557 |
| 1556 | 1558 |
| 1557 void LAllocator::RecordUse(HValue* value, LUnallocated* operand) { | |
| 1558 operand->set_virtual_register(value->id()); | |
| 1559 current_summary()->AddInput(operand); | |
| 1560 } | |
| 1561 | |
| 1562 | |
| 1563 bool LAllocator::HasTaggedValue(int virtual_register) const { | 1559 bool LAllocator::HasTaggedValue(int virtual_register) const { |
| 1564 HValue* value = graph()->LookupValue(virtual_register); | 1560 HValue* value = graph_->LookupValue(virtual_register); |
| 1565 if (value == NULL) return false; | 1561 if (value == NULL) return false; |
| 1566 return value->representation().IsTagged(); | 1562 return value->representation().IsTagged(); |
| 1567 } | 1563 } |
| 1568 | 1564 |
| 1569 | 1565 |
| 1570 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const { | 1566 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const { |
| 1571 if (virtual_register < first_artificial_register_) { | 1567 if (virtual_register < first_artificial_register_) { |
| 1572 HValue* value = graph()->LookupValue(virtual_register); | 1568 HValue* value = graph_->LookupValue(virtual_register); |
| 1573 if (value != NULL && value->representation().IsDouble()) { | 1569 if (value != NULL && value->representation().IsDouble()) { |
| 1574 return DOUBLE_REGISTERS; | 1570 return DOUBLE_REGISTERS; |
| 1575 } | 1571 } |
| 1576 } else if (double_artificial_registers_.Contains( | 1572 } else if (double_artificial_registers_.Contains( |
| 1577 virtual_register - first_artificial_register_)) { | 1573 virtual_register - first_artificial_register_)) { |
| 1578 return DOUBLE_REGISTERS; | 1574 return DOUBLE_REGISTERS; |
| 1579 } | 1575 } |
| 1580 | 1576 |
| 1581 return GENERAL_REGISTERS; | 1577 return GENERAL_REGISTERS; |
| 1582 } | 1578 } |
| 1583 | 1579 |
| 1584 | 1580 |
| 1585 void LAllocator::MarkAsCall() { | |
| 1586 // Call instructions can use only fixed registers as | |
| 1587 // temporaries and outputs because all registers | |
| 1588 // are blocked by the calling convention. | |
| 1589 // Inputs can use either fixed register or have a short lifetime (be | |
| 1590 // used at start of the instruction). | |
| 1591 InstructionSummary* summary = current_summary(); | |
| 1592 #ifdef DEBUG | |
| 1593 ASSERT(summary->Output() == NULL || | |
| 1594 LUnallocated::cast(summary->Output())->HasFixedPolicy() || | |
| 1595 !LUnallocated::cast(summary->Output())->HasRegisterPolicy()); | |
| 1596 for (int i = 0; i < summary->InputCount(); i++) { | |
| 1597 ASSERT(LUnallocated::cast(summary->InputAt(i))->HasFixedPolicy() || | |
| 1598 LUnallocated::cast(summary->InputAt(i))->IsUsedAtStart() || | |
| 1599 !LUnallocated::cast(summary->InputAt(i))->HasRegisterPolicy()); | |
| 1600 } | |
| 1601 for (int i = 0; i < summary->TempCount(); i++) { | |
| 1602 ASSERT(LUnallocated::cast(summary->TempAt(i))->HasFixedPolicy() || | |
| 1603 !LUnallocated::cast(summary->TempAt(i))->HasRegisterPolicy()); | |
| 1604 } | |
| 1605 #endif | |
| 1606 summary->MarkAsCall(); | |
| 1607 } | |
| 1608 | |
| 1609 | |
| 1610 void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) { | 1581 void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) { |
| 1611 operand->set_virtual_register(instr->id()); | 1582 operand->set_virtual_register(instr->id()); |
| 1612 current_summary()->SetOutput(operand); | |
| 1613 } | 1583 } |
| 1614 | 1584 |
| 1615 | 1585 |
| 1616 void LAllocator::RecordTemporary(LUnallocated* operand) { | 1586 void LAllocator::RecordTemporary(LUnallocated* operand) { |
| 1617 ASSERT(next_virtual_register_ < LUnallocated::kMaxVirtualRegisters); | 1587 ASSERT(next_virtual_register_ < LUnallocated::kMaxVirtualRegisters); |
| 1618 if (!operand->HasFixedPolicy()) { | 1588 if (!operand->HasFixedPolicy()) { |
| 1619 operand->set_virtual_register(next_virtual_register_++); | 1589 operand->set_virtual_register(next_virtual_register_++); |
| 1620 } | 1590 } |
| 1621 current_summary()->AddTemp(operand); | 1591 } |
| 1592 |
| 1593 |
| 1594 void LAllocator::RecordUse(HValue* value, LUnallocated* operand) { |
| 1595 operand->set_virtual_register(value->id()); |
| 1622 } | 1596 } |
| 1623 | 1597 |
| 1624 | 1598 |
| 1625 int LAllocator::max_initial_value_ids() { | 1599 int LAllocator::max_initial_value_ids() { |
| 1626 return LUnallocated::kMaxVirtualRegisters / 32; | 1600 return LUnallocated::kMaxVirtualRegisters / 32; |
| 1627 } | 1601 } |
| 1628 | 1602 |
| 1629 | 1603 |
| 1630 void LAllocator::BeginInstruction() { | |
| 1631 if (next_summary_ == NULL) { | |
| 1632 next_summary_ = new InstructionSummary(); | |
| 1633 } | |
| 1634 summary_stack_.Add(next_summary_); | |
| 1635 next_summary_ = NULL; | |
| 1636 } | |
| 1637 | |
| 1638 | |
| 1639 void LAllocator::SummarizeInstruction(int index) { | |
| 1640 InstructionSummary* sum = summary_stack_.RemoveLast(); | |
| 1641 if (summaries_.length() <= index) { | |
| 1642 summaries_.AddBlock(NULL, index + 1 - summaries_.length()); | |
| 1643 } | |
| 1644 ASSERT(summaries_[index] == NULL); | |
| 1645 if (sum->Output() != NULL || sum->InputCount() > 0 || sum->TempCount() > 0) { | |
| 1646 summaries_[index] = sum; | |
| 1647 } else { | |
| 1648 next_summary_ = sum; | |
| 1649 } | |
| 1650 } | |
| 1651 | |
| 1652 | |
| 1653 void LAllocator::OmitInstruction() { | |
| 1654 summary_stack_.RemoveLast(); | |
| 1655 } | |
| 1656 | |
| 1657 | |
| 1658 void LAllocator::AddToActive(LiveRange* range) { | 1604 void LAllocator::AddToActive(LiveRange* range) { |
| 1659 TraceAlloc("Add live range %d to active\n", range->id()); | 1605 TraceAlloc("Add live range %d to active\n", range->id()); |
| 1660 active_live_ranges_.Add(range); | 1606 active_live_ranges_.Add(range); |
| 1661 } | 1607 } |
| 1662 | 1608 |
| 1663 | 1609 |
| 1664 void LAllocator::AddToInactive(LiveRange* range) { | 1610 void LAllocator::AddToInactive(LiveRange* range) { |
| 1665 TraceAlloc("Add live range %d to inactive\n", range->id()); | 1611 TraceAlloc("Add live range %d to inactive\n", range->id()); |
| 1666 inactive_live_ranges_.Add(range); | 1612 inactive_live_ranges_.Add(range); |
| 1667 } | 1613 } |
| (...skipping 325 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1993 InactiveToHandled(range); | 1939 InactiveToHandled(range); |
| 1994 --i; | 1940 --i; |
| 1995 } | 1941 } |
| 1996 } | 1942 } |
| 1997 } | 1943 } |
| 1998 } | 1944 } |
| 1999 | 1945 |
| 2000 | 1946 |
| 2001 bool LAllocator::IsBlockBoundary(LifetimePosition pos) { | 1947 bool LAllocator::IsBlockBoundary(LifetimePosition pos) { |
| 2002 return pos.IsInstructionStart() && | 1948 return pos.IsInstructionStart() && |
| 2003 chunk_->instructions()->at(pos.InstructionIndex())->IsLabel(); | 1949 InstructionAt(pos.InstructionIndex())->IsLabel(); |
| 2004 } | |
| 2005 | |
| 2006 | |
| 2007 void LAllocator::AddGapMove(int pos, LiveRange* prev, LiveRange* next) { | |
| 2008 UsePosition* prev_pos = prev->AddUsePosition( | |
| 2009 LifetimePosition::FromInstructionIndex(pos)); | |
| 2010 UsePosition* next_pos = next->AddUsePosition( | |
| 2011 LifetimePosition::FromInstructionIndex(pos)); | |
| 2012 LOperand* prev_operand = prev_pos->operand(); | |
| 2013 LOperand* next_operand = next_pos->operand(); | |
| 2014 LGap* gap = chunk_->GetGapAt(pos); | |
| 2015 gap->GetOrCreateParallelMove(LGap::START)-> | |
| 2016 AddMove(prev_operand, next_operand); | |
| 2017 next_pos->set_hint(prev_operand); | |
| 2018 } | 1950 } |
| 2019 | 1951 |
| 2020 | 1952 |
| 2021 LiveRange* LAllocator::SplitAt(LiveRange* range, LifetimePosition pos) { | 1953 LiveRange* LAllocator::SplitAt(LiveRange* range, LifetimePosition pos) { |
| 2022 ASSERT(!range->IsFixed()); | 1954 ASSERT(!range->IsFixed()); |
| 2023 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value()); | 1955 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value()); |
| 2024 | 1956 |
| 2025 if (pos.Value() <= range->Start().Value()) return range; | 1957 if (pos.Value() <= range->Start().Value()) return range; |
| 2026 | 1958 |
| 1959 // We can't properly connect liveranges if split occured at the end |
| 1960 // of control instruction. |
| 1961 ASSERT(pos.IsInstructionStart() || |
| 1962 !chunk_->instructions()->at(pos.InstructionIndex())->IsControl()); |
| 1963 |
| 2027 LiveRange* result = LiveRangeFor(next_virtual_register_++); | 1964 LiveRange* result = LiveRangeFor(next_virtual_register_++); |
| 2028 range->SplitAt(pos, result); | 1965 range->SplitAt(pos, result); |
| 2029 return result; | 1966 return result; |
| 2030 } | 1967 } |
| 2031 | 1968 |
| 2032 | 1969 |
| 2033 LiveRange* LAllocator::SplitBetween(LiveRange* range, | 1970 LiveRange* LAllocator::SplitBetween(LiveRange* range, |
| 2034 LifetimePosition start, | 1971 LifetimePosition start, |
| 2035 LifetimePosition end) { | 1972 LifetimePosition end) { |
| 2036 ASSERT(!range->IsFixed()); | 1973 ASSERT(!range->IsFixed()); |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2137 LiveRange* current = live_ranges()->at(i); | 2074 LiveRange* current = live_ranges()->at(i); |
| 2138 if (current != NULL) current->Verify(); | 2075 if (current != NULL) current->Verify(); |
| 2139 } | 2076 } |
| 2140 } | 2077 } |
| 2141 | 2078 |
| 2142 | 2079 |
| 2143 #endif | 2080 #endif |
| 2144 | 2081 |
| 2145 | 2082 |
| 2146 } } // namespace v8::internal | 2083 } } // namespace v8::internal |
| OLD | NEW |