| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/linkage.h" | 5 #include "src/compiler/linkage.h" |
| 6 #include "src/compiler/register-allocator.h" | 6 #include "src/compiler/register-allocator.h" |
| 7 #include "src/string-stream.h" | 7 #include "src/string-stream.h" |
| 8 | 8 |
| 9 namespace v8 { | 9 namespace v8 { |
| 10 namespace internal { | 10 namespace internal { |
| (...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 162 } | 162 } |
| 163 | 163 |
| 164 | 164 |
| 165 void LiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence, | 165 void LiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence, |
| 166 InstructionOperand* op, | 166 InstructionOperand* op, |
| 167 bool might_be_duplicated) { | 167 bool might_be_duplicated) { |
| 168 DCHECK(!IsChild()); | 168 DCHECK(!IsChild()); |
| 169 auto zone = sequence->zone(); | 169 auto zone = sequence->zone(); |
| 170 for (auto to_spill = spills_at_definition_; to_spill != nullptr; | 170 for (auto to_spill = spills_at_definition_; to_spill != nullptr; |
| 171 to_spill = to_spill->next) { | 171 to_spill = to_spill->next) { |
| 172 auto gap = sequence->GapAt(to_spill->gap_index); | 172 auto instr = sequence->InstructionAt(to_spill->gap_index); |
| 173 auto move = gap->GetOrCreateParallelMove(GapInstruction::START, zone); | 173 auto move = instr->GetOrCreateParallelMove(Instruction::START, zone); |
| 174 // Skip insertion if it's possible that the move exists already as a | 174 // Skip insertion if it's possible that the move exists already as a |
| 175 // constraint move from a fixed output register to a slot. | 175 // constraint move from a fixed output register to a slot. |
| 176 if (might_be_duplicated) { | 176 if (might_be_duplicated) { |
| 177 bool found = false; | 177 bool found = false; |
| 178 auto move_ops = move->move_operands(); | 178 auto move_ops = move->move_operands(); |
| 179 for (auto move_op = move_ops->begin(); move_op != move_ops->end(); | 179 for (auto move_op = move_ops->begin(); move_op != move_ops->end(); |
| 180 ++move_op) { | 180 ++move_op) { |
| 181 if (move_op->IsEliminated()) continue; | 181 if (move_op->IsEliminated()) continue; |
| 182 if (move_op->source()->Equals(to_spill->operand) && | 182 if (move_op->source()->Equals(to_spill->operand) && |
| 183 move_op->destination()->Equals(op)) { | 183 move_op->destination()->Equals(op)) { |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 257 } | 257 } |
| 258 return pos; | 258 return pos; |
| 259 } | 259 } |
| 260 | 260 |
| 261 | 261 |
| 262 bool LiveRange::CanBeSpilled(LifetimePosition pos) { | 262 bool LiveRange::CanBeSpilled(LifetimePosition pos) { |
| 263 // We cannot spill a live range that has a use requiring a register | 263 // We cannot spill a live range that has a use requiring a register |
| 264 // at the current or the immediate next position. | 264 // at the current or the immediate next position. |
| 265 auto use_pos = NextRegisterPosition(pos); | 265 auto use_pos = NextRegisterPosition(pos); |
| 266 if (use_pos == nullptr) return true; | 266 if (use_pos == nullptr) return true; |
| 267 return use_pos->pos().Value() > | 267 return use_pos->pos().Value() > pos.NextStart().End().Value(); |
| 268 pos.NextInstruction().InstructionEnd().Value(); | |
| 269 } | 268 } |
| 270 | 269 |
| 271 | 270 |
| 272 InstructionOperand* LiveRange::GetAssignedOperand( | 271 InstructionOperand* LiveRange::GetAssignedOperand( |
| 273 InstructionOperandCache* cache) const { | 272 InstructionOperandCache* cache) const { |
| 274 if (HasRegisterAssigned()) { | 273 if (HasRegisterAssigned()) { |
| 275 DCHECK(!IsSpilled()); | 274 DCHECK(!IsSpilled()); |
| 276 switch (Kind()) { | 275 switch (Kind()) { |
| 277 case GENERAL_REGISTERS: | 276 case GENERAL_REGISTERS: |
| 278 return cache->RegisterOperand(assigned_register()); | 277 return cache->RegisterOperand(assigned_register()); |
| (...skipping 399 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 678 } | 677 } |
| 679 } | 678 } |
| 680 return live_out; | 679 return live_out; |
| 681 } | 680 } |
| 682 | 681 |
| 683 | 682 |
| 684 void RegisterAllocator::AddInitialIntervals(const InstructionBlock* block, | 683 void RegisterAllocator::AddInitialIntervals(const InstructionBlock* block, |
| 685 BitVector* live_out) { | 684 BitVector* live_out) { |
| 686 // Add an interval that includes the entire block to the live range for | 685 // Add an interval that includes the entire block to the live range for |
| 687 // each live_out value. | 686 // each live_out value. |
| 688 auto start = | 687 auto start = LifetimePosition::GapFromInstructionIndex( |
| 689 LifetimePosition::FromInstructionIndex(block->first_instruction_index()); | 688 block->first_instruction_index()); |
| 690 auto end = LifetimePosition::FromInstructionIndex( | 689 auto end = LifetimePosition::InstructionFromInstructionIndex( |
| 691 block->last_instruction_index()).NextInstruction(); | 690 block->last_instruction_index()).NextStart(); |
| 692 BitVector::Iterator iterator(live_out); | 691 BitVector::Iterator iterator(live_out); |
| 693 while (!iterator.Done()) { | 692 while (!iterator.Done()) { |
| 694 int operand_index = iterator.Current(); | 693 int operand_index = iterator.Current(); |
| 695 auto range = LiveRangeFor(operand_index); | 694 auto range = LiveRangeFor(operand_index); |
| 696 range->AddUseInterval(start, end, local_zone()); | 695 range->AddUseInterval(start, end, local_zone()); |
| 697 iterator.Advance(); | 696 iterator.Advance(); |
| 698 } | 697 } |
| 699 } | 698 } |
| 700 | 699 |
| 701 | 700 |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 773 } | 772 } |
| 774 auto result = live_ranges()[index]; | 773 auto result = live_ranges()[index]; |
| 775 if (result == nullptr) { | 774 if (result == nullptr) { |
| 776 result = NewLiveRange(index); | 775 result = NewLiveRange(index); |
| 777 live_ranges()[index] = result; | 776 live_ranges()[index] = result; |
| 778 } | 777 } |
| 779 return result; | 778 return result; |
| 780 } | 779 } |
| 781 | 780 |
| 782 | 781 |
| 783 GapInstruction* RegisterAllocator::GetLastGap(const InstructionBlock* block) { | 782 Instruction* RegisterAllocator::GetLastInstruction( |
| 784 int last_instruction = block->last_instruction_index(); | 783 const InstructionBlock* block) { |
| 785 return code()->GapAt(last_instruction - 1); | 784 return code()->InstructionAt(block->last_instruction_index()); |
| 786 } | 785 } |
| 787 | 786 |
| 788 | 787 |
| 789 LiveRange* RegisterAllocator::LiveRangeFor(InstructionOperand* operand) { | 788 LiveRange* RegisterAllocator::LiveRangeFor(InstructionOperand* operand) { |
| 790 if (operand->IsUnallocated()) { | 789 if (operand->IsUnallocated()) { |
| 791 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register()); | 790 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register()); |
| 791 } else if (operand->IsConstant()) { |
| 792 return LiveRangeFor(ConstantOperand::cast(operand)->index()); |
| 792 } else if (operand->IsRegister()) { | 793 } else if (operand->IsRegister()) { |
| 793 return FixedLiveRangeFor(operand->index()); | 794 return FixedLiveRangeFor(operand->index()); |
| 794 } else if (operand->IsDoubleRegister()) { | 795 } else if (operand->IsDoubleRegister()) { |
| 795 return FixedDoubleLiveRangeFor(operand->index()); | 796 return FixedDoubleLiveRangeFor(operand->index()); |
| 796 } else { | 797 } else { |
| 797 return nullptr; | 798 return nullptr; |
| 798 } | 799 } |
| 799 } | 800 } |
| 800 | 801 |
| 801 | 802 |
| 802 void RegisterAllocator::Define(LifetimePosition position, | 803 void RegisterAllocator::Define(LifetimePosition position, |
| 803 InstructionOperand* operand, | 804 InstructionOperand* operand, |
| 804 InstructionOperand* hint) { | 805 InstructionOperand* hint) { |
| 805 auto range = LiveRangeFor(operand); | 806 auto range = LiveRangeFor(operand); |
| 806 if (range == nullptr) return; | 807 if (range == nullptr) return; |
| 807 | 808 |
| 808 if (range->IsEmpty() || range->Start().Value() > position.Value()) { | 809 if (range->IsEmpty() || range->Start().Value() > position.Value()) { |
| 809 // Can happen if there is a definition without use. | 810 // Can happen if there is a definition without use. |
| 810 range->AddUseInterval(position, position.NextInstruction(), local_zone()); | 811 range->AddUseInterval(position, position.NextStart(), local_zone()); |
| 811 range->AddUsePosition(position.NextInstruction(), nullptr, nullptr, | 812 range->AddUsePosition(position.NextStart(), nullptr, nullptr, local_zone()); |
| 812 local_zone()); | |
| 813 } else { | 813 } else { |
| 814 range->ShortenTo(position); | 814 range->ShortenTo(position); |
| 815 } | 815 } |
| 816 | 816 |
| 817 if (operand->IsUnallocated()) { | 817 if (operand->IsUnallocated()) { |
| 818 auto unalloc_operand = UnallocatedOperand::cast(operand); | 818 auto unalloc_operand = UnallocatedOperand::cast(operand); |
| 819 range->AddUsePosition(position, unalloc_operand, hint, local_zone()); | 819 range->AddUsePosition(position, unalloc_operand, hint, local_zone()); |
| 820 } | 820 } |
| 821 } | 821 } |
| 822 | 822 |
| 823 | 823 |
| 824 void RegisterAllocator::Use(LifetimePosition block_start, | 824 void RegisterAllocator::Use(LifetimePosition block_start, |
| 825 LifetimePosition position, | 825 LifetimePosition position, |
| 826 InstructionOperand* operand, | 826 InstructionOperand* operand, |
| 827 InstructionOperand* hint) { | 827 InstructionOperand* hint) { |
| 828 auto range = LiveRangeFor(operand); | 828 auto range = LiveRangeFor(operand); |
| 829 if (range == nullptr) return; | 829 if (range == nullptr) return; |
| 830 if (operand->IsUnallocated()) { | 830 if (operand->IsUnallocated()) { |
| 831 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand); | 831 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand); |
| 832 range->AddUsePosition(position, unalloc_operand, hint, local_zone()); | 832 range->AddUsePosition(position, unalloc_operand, hint, local_zone()); |
| 833 } | 833 } |
| 834 range->AddUseInterval(block_start, position, local_zone()); | 834 range->AddUseInterval(block_start, position, local_zone()); |
| 835 } | 835 } |
| 836 | 836 |
| 837 | 837 |
| 838 void RegisterAllocator::AddGapMove(int index, | 838 void RegisterAllocator::AddGapMove(int index, Instruction::GapPosition position, |
| 839 GapInstruction::InnerPosition position, | |
| 840 InstructionOperand* from, | 839 InstructionOperand* from, |
| 841 InstructionOperand* to) { | 840 InstructionOperand* to) { |
| 842 auto gap = code()->GapAt(index); | 841 auto instr = code()->InstructionAt(index); |
| 843 auto move = gap->GetOrCreateParallelMove(position, code_zone()); | 842 auto move = instr->GetOrCreateParallelMove(position, code_zone()); |
| 844 move->AddMove(from, to, code_zone()); | 843 move->AddMove(from, to, code_zone()); |
| 845 } | 844 } |
| 846 | 845 |
| 847 | 846 |
| 848 static bool AreUseIntervalsIntersecting(UseInterval* interval1, | 847 static bool AreUseIntervalsIntersecting(UseInterval* interval1, |
| 849 UseInterval* interval2) { | 848 UseInterval* interval2) { |
| 850 while (interval1 != nullptr && interval2 != nullptr) { | 849 while (interval1 != nullptr && interval2 != nullptr) { |
| 851 if (interval1->start().Value() < interval2->start().Value()) { | 850 if (interval1->start().Value() < interval2->start().Value()) { |
| 852 if (interval1->end().Value() > interval2->start().Value()) { | 851 if (interval1->end().Value() > interval2->start().Value()) { |
| 853 return true; | 852 return true; |
| (...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1017 auto phi = lookup->second.phi; | 1016 auto phi = lookup->second.phi; |
| 1018 auto block = lookup->second.block; | 1017 auto block = lookup->second.block; |
| 1019 // Count the number of spilled operands. | 1018 // Count the number of spilled operands. |
| 1020 size_t spilled_count = 0; | 1019 size_t spilled_count = 0; |
| 1021 LiveRange* first_op = nullptr; | 1020 LiveRange* first_op = nullptr; |
| 1022 for (size_t i = 0; i < phi->operands().size(); i++) { | 1021 for (size_t i = 0; i < phi->operands().size(); i++) { |
| 1023 int op = phi->operands()[i]; | 1022 int op = phi->operands()[i]; |
| 1024 LiveRange* op_range = LiveRangeFor(op); | 1023 LiveRange* op_range = LiveRangeFor(op); |
| 1025 if (op_range->GetSpillRange() == nullptr) continue; | 1024 if (op_range->GetSpillRange() == nullptr) continue; |
| 1026 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); | 1025 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); |
| 1027 auto pred_end = | 1026 auto pred_end = LifetimePosition::InstructionFromInstructionIndex( |
| 1028 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); | 1027 pred->last_instruction_index()); |
| 1029 while (op_range != nullptr && !op_range->CanCover(pred_end)) { | 1028 while (op_range != nullptr && !op_range->CanCover(pred_end)) { |
| 1030 op_range = op_range->next(); | 1029 op_range = op_range->next(); |
| 1031 } | 1030 } |
| 1032 if (op_range != nullptr && op_range->IsSpilled()) { | 1031 if (op_range != nullptr && op_range->IsSpilled()) { |
| 1033 spilled_count++; | 1032 spilled_count++; |
| 1034 if (first_op == nullptr) { | 1033 if (first_op == nullptr) { |
| 1035 first_op = op_range->TopLevel(); | 1034 first_op = op_range->TopLevel(); |
| 1036 } | 1035 } |
| 1037 } | 1036 } |
| 1038 } | 1037 } |
| (...skipping 22 matching lines...) Expand all Loading... |
| 1061 // same spill slot. | 1060 // same spill slot. |
| 1062 if (num_merged * 2 <= phi->operands().size() || | 1061 if (num_merged * 2 <= phi->operands().size() || |
| 1063 AreUseIntervalsIntersecting(first_op_spill->interval(), | 1062 AreUseIntervalsIntersecting(first_op_spill->interval(), |
| 1064 range->first_interval())) { | 1063 range->first_interval())) { |
| 1065 return false; | 1064 return false; |
| 1066 } | 1065 } |
| 1067 | 1066 |
| 1068 // If the range does not need register soon, spill it to the merged | 1067 // If the range does not need register soon, spill it to the merged |
| 1069 // spill range. | 1068 // spill range. |
| 1070 auto next_pos = range->Start(); | 1069 auto next_pos = range->Start(); |
| 1071 if (code()->IsGapAt(next_pos.InstructionIndex())) { | 1070 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart(); |
| 1072 next_pos = next_pos.NextInstruction(); | |
| 1073 } | |
| 1074 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); | 1071 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); |
| 1075 if (pos == nullptr) { | 1072 if (pos == nullptr) { |
| 1076 auto spill_range = range->TopLevel()->HasSpillRange() | 1073 auto spill_range = range->TopLevel()->HasSpillRange() |
| 1077 ? range->TopLevel()->GetSpillRange() | 1074 ? range->TopLevel()->GetSpillRange() |
| 1078 : AssignSpillRangeToLiveRange(range->TopLevel()); | 1075 : AssignSpillRangeToLiveRange(range->TopLevel()); |
| 1079 CHECK(first_op_spill->TryMerge(spill_range)); | 1076 CHECK(first_op_spill->TryMerge(spill_range)); |
| 1080 Spill(range); | 1077 Spill(range); |
| 1081 return true; | 1078 return true; |
| 1082 } else if (pos->pos().Value() > range->Start().NextInstruction().Value()) { | 1079 } else if (pos->pos().Value() > range->Start().NextStart().Value()) { |
| 1083 auto spill_range = range->TopLevel()->HasSpillRange() | 1080 auto spill_range = range->TopLevel()->HasSpillRange() |
| 1084 ? range->TopLevel()->GetSpillRange() | 1081 ? range->TopLevel()->GetSpillRange() |
| 1085 : AssignSpillRangeToLiveRange(range->TopLevel()); | 1082 : AssignSpillRangeToLiveRange(range->TopLevel()); |
| 1086 CHECK(first_op_spill->TryMerge(spill_range)); | 1083 CHECK(first_op_spill->TryMerge(spill_range)); |
| 1087 SpillBetween(range, range->Start(), pos->pos()); | 1084 SpillBetween(range, range->Start(), pos->pos()); |
| 1088 DCHECK(UnhandledIsSorted()); | 1085 DCHECK(UnhandledIsSorted()); |
| 1089 return true; | 1086 return true; |
| 1090 } | 1087 } |
| 1091 return false; | 1088 return false; |
| 1092 } | 1089 } |
| 1093 | 1090 |
| 1094 | 1091 |
| 1095 void RegisterAllocator::MeetRegisterConstraints(const InstructionBlock* block) { | 1092 void RegisterAllocator::MeetRegisterConstraints(const InstructionBlock* block) { |
| 1096 int start = block->first_instruction_index(); | 1093 int start = block->first_instruction_index(); |
| 1097 int end = block->last_instruction_index(); | 1094 int end = block->last_instruction_index(); |
| 1098 DCHECK_NE(-1, start); | 1095 DCHECK_NE(-1, start); |
| 1099 for (int i = start; i <= end; ++i) { | 1096 for (int i = start; i <= end; ++i) { |
| 1100 if (code()->IsGapAt(i)) { | 1097 MeetConstraintsBefore(i); |
| 1101 Instruction* instr = nullptr; | 1098 if (i != end) MeetConstraintsAfter(i); |
| 1102 Instruction* prev_instr = nullptr; | |
| 1103 if (i < end) instr = InstructionAt(i + 1); | |
| 1104 if (i > start) prev_instr = InstructionAt(i - 1); | |
| 1105 MeetConstraintsBetween(prev_instr, instr, i); | |
| 1106 } | |
| 1107 } | 1099 } |
| 1108 | |
| 1109 // Meet register constraints for the instruction in the end. | 1100 // Meet register constraints for the instruction in the end. |
| 1110 if (!code()->IsGapAt(end)) { | 1101 MeetRegisterConstraintsForLastInstructionInBlock(block); |
| 1111 MeetRegisterConstraintsForLastInstructionInBlock(block); | |
| 1112 } | |
| 1113 } | 1102 } |
| 1114 | 1103 |
| 1115 | 1104 |
| 1116 void RegisterAllocator::MeetRegisterConstraintsForLastInstructionInBlock( | 1105 void RegisterAllocator::MeetRegisterConstraintsForLastInstructionInBlock( |
| 1117 const InstructionBlock* block) { | 1106 const InstructionBlock* block) { |
| 1118 int end = block->last_instruction_index(); | 1107 int end = block->last_instruction_index(); |
| 1119 auto last_instruction = InstructionAt(end); | 1108 auto last_instruction = InstructionAt(end); |
| 1120 for (size_t i = 0; i < last_instruction->OutputCount(); i++) { | 1109 for (size_t i = 0; i < last_instruction->OutputCount(); i++) { |
| 1121 auto output_operand = last_instruction->OutputAt(i); | 1110 auto output_operand = last_instruction->OutputAt(i); |
| 1122 DCHECK(!output_operand->IsConstant()); | 1111 DCHECK(!output_operand->IsConstant()); |
| 1123 auto output = UnallocatedOperand::cast(output_operand); | 1112 auto output = UnallocatedOperand::cast(output_operand); |
| 1124 int output_vreg = output->virtual_register(); | 1113 int output_vreg = output->virtual_register(); |
| 1125 auto range = LiveRangeFor(output_vreg); | 1114 auto range = LiveRangeFor(output_vreg); |
| 1126 bool assigned = false; | 1115 bool assigned = false; |
| 1127 if (output->HasFixedPolicy()) { | 1116 if (output->HasFixedPolicy()) { |
| 1128 AllocateFixed(output, -1, false); | 1117 AllocateFixed(output, -1, false); |
| 1129 // This value is produced on the stack, we never need to spill it. | 1118 // This value is produced on the stack, we never need to spill it. |
| 1130 if (output->IsStackSlot()) { | 1119 if (output->IsStackSlot()) { |
| 1131 DCHECK(output->index() < frame_->GetSpillSlotCount()); | 1120 DCHECK(output->index() < frame_->GetSpillSlotCount()); |
| 1132 range->SetSpillOperand(output); | 1121 range->SetSpillOperand(output); |
| 1133 range->SetSpillStartIndex(end); | 1122 range->SetSpillStartIndex(end); |
| 1134 assigned = true; | 1123 assigned = true; |
| 1135 } | 1124 } |
| 1136 | 1125 |
| 1137 for (auto succ : block->successors()) { | 1126 for (auto succ : block->successors()) { |
| 1138 const InstructionBlock* successor = code()->InstructionBlockAt(succ); | 1127 const InstructionBlock* successor = code()->InstructionBlockAt(succ); |
| 1139 DCHECK(successor->PredecessorCount() == 1); | 1128 DCHECK(successor->PredecessorCount() == 1); |
| 1140 int gap_index = successor->first_instruction_index(); | 1129 int gap_index = successor->first_instruction_index(); |
| 1141 DCHECK(code()->IsGapAt(gap_index)); | |
| 1142 | |
| 1143 // Create an unconstrained operand for the same virtual register | 1130 // Create an unconstrained operand for the same virtual register |
| 1144 // and insert a gap move from the fixed output to the operand. | 1131 // and insert a gap move from the fixed output to the operand. |
| 1145 UnallocatedOperand* output_copy = | 1132 UnallocatedOperand* output_copy = |
| 1146 UnallocatedOperand(UnallocatedOperand::ANY, output_vreg) | 1133 UnallocatedOperand(UnallocatedOperand::ANY, output_vreg) |
| 1147 .Copy(code_zone()); | 1134 .Copy(code_zone()); |
| 1148 AddGapMove(gap_index, GapInstruction::START, output, output_copy); | 1135 AddGapMove(gap_index, Instruction::START, output, output_copy); |
| 1149 } | 1136 } |
| 1150 } | 1137 } |
| 1151 | 1138 |
| 1152 if (!assigned) { | 1139 if (!assigned) { |
| 1153 for (auto succ : block->successors()) { | 1140 for (auto succ : block->successors()) { |
| 1154 const InstructionBlock* successor = code()->InstructionBlockAt(succ); | 1141 const InstructionBlock* successor = code()->InstructionBlockAt(succ); |
| 1155 DCHECK(successor->PredecessorCount() == 1); | 1142 DCHECK(successor->PredecessorCount() == 1); |
| 1156 int gap_index = successor->first_instruction_index(); | 1143 int gap_index = successor->first_instruction_index(); |
| 1157 range->SpillAtDefinition(local_zone(), gap_index, output); | 1144 range->SpillAtDefinition(local_zone(), gap_index, output); |
| 1158 range->SetSpillStartIndex(gap_index); | 1145 range->SetSpillStartIndex(gap_index); |
| 1159 } | 1146 } |
| 1160 } | 1147 } |
| 1161 } | 1148 } |
| 1162 } | 1149 } |
| 1163 | 1150 |
| 1164 | 1151 |
| 1165 void RegisterAllocator::MeetConstraintsBetween(Instruction* first, | 1152 void RegisterAllocator::MeetConstraintsAfter(int instr_index) { |
| 1166 Instruction* second, | 1153 auto first = InstructionAt(instr_index); |
| 1167 int gap_index) { | 1154 // Handle fixed temporaries. |
| 1168 if (first != nullptr) { | 1155 for (size_t i = 0; i < first->TempCount(); i++) { |
| 1169 // Handle fixed temporaries. | 1156 auto temp = UnallocatedOperand::cast(first->TempAt(i)); |
| 1170 for (size_t i = 0; i < first->TempCount(); i++) { | 1157 if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false); |
| 1171 auto temp = UnallocatedOperand::cast(first->TempAt(i)); | 1158 } |
| 1172 if (temp->HasFixedPolicy()) { | 1159 // Handle constant/fixed output operands. |
| 1173 AllocateFixed(temp, gap_index - 1, false); | 1160 for (size_t i = 0; i < first->OutputCount(); i++) { |
| 1161 InstructionOperand* output = first->OutputAt(i); |
| 1162 if (output->IsConstant()) { |
| 1163 int output_vreg = output->index(); |
| 1164 auto range = LiveRangeFor(output_vreg); |
| 1165 range->SetSpillStartIndex(instr_index + 1); |
| 1166 range->SetSpillOperand(output); |
| 1167 continue; |
| 1168 } |
| 1169 auto first_output = UnallocatedOperand::cast(output); |
| 1170 auto range = LiveRangeFor(first_output->virtual_register()); |
| 1171 bool assigned = false; |
| 1172 if (first_output->HasFixedPolicy()) { |
| 1173 auto output_copy = first_output->CopyUnconstrained(code_zone()); |
| 1174 bool is_tagged = HasTaggedValue(first_output->virtual_register()); |
| 1175 AllocateFixed(first_output, instr_index, is_tagged); |
| 1176 |
| 1177 // This value is produced on the stack, we never need to spill it. |
| 1178 if (first_output->IsStackSlot()) { |
| 1179 DCHECK(first_output->index() < frame_->GetSpillSlotCount()); |
| 1180 range->SetSpillOperand(first_output); |
| 1181 range->SetSpillStartIndex(instr_index + 1); |
| 1182 assigned = true; |
| 1174 } | 1183 } |
| 1184 AddGapMove(instr_index + 1, Instruction::START, first_output, |
| 1185 output_copy); |
| 1175 } | 1186 } |
| 1176 | 1187 // Make sure we add a gap move for spilling (if we have not done |
| 1177 // Handle constant/fixed output operands. | 1188 // so already). |
| 1178 for (size_t i = 0; i < first->OutputCount(); i++) { | 1189 if (!assigned) { |
| 1179 InstructionOperand* output = first->OutputAt(i); | 1190 range->SpillAtDefinition(local_zone(), instr_index + 1, first_output); |
| 1180 if (output->IsConstant()) { | 1191 range->SetSpillStartIndex(instr_index + 1); |
| 1181 int output_vreg = output->index(); | |
| 1182 auto range = LiveRangeFor(output_vreg); | |
| 1183 range->SetSpillStartIndex(gap_index - 1); | |
| 1184 range->SetSpillOperand(output); | |
| 1185 } else { | |
| 1186 auto first_output = UnallocatedOperand::cast(output); | |
| 1187 auto range = LiveRangeFor(first_output->virtual_register()); | |
| 1188 bool assigned = false; | |
| 1189 if (first_output->HasFixedPolicy()) { | |
| 1190 auto output_copy = first_output->CopyUnconstrained(code_zone()); | |
| 1191 bool is_tagged = HasTaggedValue(first_output->virtual_register()); | |
| 1192 AllocateFixed(first_output, gap_index, is_tagged); | |
| 1193 | |
| 1194 // This value is produced on the stack, we never need to spill it. | |
| 1195 if (first_output->IsStackSlot()) { | |
| 1196 DCHECK(first_output->index() < frame_->GetSpillSlotCount()); | |
| 1197 range->SetSpillOperand(first_output); | |
| 1198 range->SetSpillStartIndex(gap_index - 1); | |
| 1199 assigned = true; | |
| 1200 } | |
| 1201 AddGapMove(gap_index, GapInstruction::START, first_output, | |
| 1202 output_copy); | |
| 1203 } | |
| 1204 | |
| 1205 // Make sure we add a gap move for spilling (if we have not done | |
| 1206 // so already). | |
| 1207 if (!assigned) { | |
| 1208 range->SpillAtDefinition(local_zone(), gap_index, first_output); | |
| 1209 range->SetSpillStartIndex(gap_index); | |
| 1210 } | |
| 1211 } | |
| 1212 } | |
| 1213 } | |
| 1214 | |
| 1215 if (second != nullptr) { | |
| 1216 // Handle fixed input operands of second instruction. | |
| 1217 for (size_t i = 0; i < second->InputCount(); i++) { | |
| 1218 auto input = second->InputAt(i); | |
| 1219 if (input->IsImmediate()) continue; // Ignore immediates. | |
| 1220 auto cur_input = UnallocatedOperand::cast(input); | |
| 1221 if (cur_input->HasFixedPolicy()) { | |
| 1222 auto input_copy = cur_input->CopyUnconstrained(code_zone()); | |
| 1223 bool is_tagged = HasTaggedValue(cur_input->virtual_register()); | |
| 1224 AllocateFixed(cur_input, gap_index + 1, is_tagged); | |
| 1225 AddGapMove(gap_index, GapInstruction::END, input_copy, cur_input); | |
| 1226 } | |
| 1227 } | |
| 1228 | |
| 1229 // Handle "output same as input" for second instruction. | |
| 1230 for (size_t i = 0; i < second->OutputCount(); i++) { | |
| 1231 auto output = second->OutputAt(i); | |
| 1232 if (!output->IsUnallocated()) continue; | |
| 1233 auto second_output = UnallocatedOperand::cast(output); | |
| 1234 if (second_output->HasSameAsInputPolicy()) { | |
| 1235 DCHECK(i == 0); // Only valid for first output. | |
| 1236 UnallocatedOperand* cur_input = | |
| 1237 UnallocatedOperand::cast(second->InputAt(0)); | |
| 1238 int output_vreg = second_output->virtual_register(); | |
| 1239 int input_vreg = cur_input->virtual_register(); | |
| 1240 | |
| 1241 auto input_copy = cur_input->CopyUnconstrained(code_zone()); | |
| 1242 cur_input->set_virtual_register(second_output->virtual_register()); | |
| 1243 AddGapMove(gap_index, GapInstruction::END, input_copy, cur_input); | |
| 1244 | |
| 1245 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { | |
| 1246 int index = gap_index + 1; | |
| 1247 Instruction* instr = InstructionAt(index); | |
| 1248 if (instr->HasPointerMap()) { | |
| 1249 instr->pointer_map()->RecordPointer(input_copy, code_zone()); | |
| 1250 } | |
| 1251 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { | |
| 1252 // The input is assumed to immediately have a tagged representation, | |
| 1253 // before the pointer map can be used. I.e. the pointer map at the | |
| 1254 // instruction will include the output operand (whose value at the | |
| 1255 // beginning of the instruction is equal to the input operand). If | |
| 1256 // this is not desired, then the pointer map at this instruction needs | |
| 1257 // to be adjusted manually. | |
| 1258 } | |
| 1259 } | |
| 1260 } | 1192 } |
| 1261 } | 1193 } |
| 1262 } | 1194 } |
| 1195 |
| 1196 |
| 1197 void RegisterAllocator::MeetConstraintsBefore(int instr_index) { |
| 1198 auto second = InstructionAt(instr_index); |
| 1199 // Handle fixed input operands of second instruction. |
| 1200 for (size_t i = 0; i < second->InputCount(); i++) { |
| 1201 auto input = second->InputAt(i); |
| 1202 if (input->IsImmediate()) continue; // Ignore immediates. |
| 1203 auto cur_input = UnallocatedOperand::cast(input); |
| 1204 if (cur_input->HasFixedPolicy()) { |
| 1205 auto input_copy = cur_input->CopyUnconstrained(code_zone()); |
| 1206 bool is_tagged = HasTaggedValue(cur_input->virtual_register()); |
| 1207 AllocateFixed(cur_input, instr_index, is_tagged); |
| 1208 AddGapMove(instr_index, Instruction::END, input_copy, cur_input); |
| 1209 } |
| 1210 } |
| 1211 // Handle "output same as input" for second instruction. |
| 1212 for (size_t i = 0; i < second->OutputCount(); i++) { |
| 1213 auto output = second->OutputAt(i); |
| 1214 if (!output->IsUnallocated()) continue; |
| 1215 auto second_output = UnallocatedOperand::cast(output); |
| 1216 if (!second_output->HasSameAsInputPolicy()) continue; |
| 1217 DCHECK(i == 0); // Only valid for first output. |
| 1218 UnallocatedOperand* cur_input = |
| 1219 UnallocatedOperand::cast(second->InputAt(0)); |
| 1220 int output_vreg = second_output->virtual_register(); |
| 1221 int input_vreg = cur_input->virtual_register(); |
| 1222 auto input_copy = cur_input->CopyUnconstrained(code_zone()); |
| 1223 cur_input->set_virtual_register(second_output->virtual_register()); |
| 1224 AddGapMove(instr_index, Instruction::END, input_copy, cur_input); |
| 1225 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { |
| 1226 if (second->HasPointerMap()) { |
| 1227 second->pointer_map()->RecordPointer(input_copy, code_zone()); |
| 1228 } |
| 1229 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { |
| 1230 // The input is assumed to immediately have a tagged representation, |
| 1231 // before the pointer map can be used. I.e. the pointer map at the |
| 1232 // instruction will include the output operand (whose value at the |
| 1233 // beginning of the instruction is equal to the input operand). If |
| 1234 // this is not desired, then the pointer map at this instruction needs |
| 1235 // to be adjusted manually. |
| 1236 } |
| 1237 } |
| 1238 } |
| 1263 | 1239 |
| 1264 | 1240 |
| 1265 bool RegisterAllocator::IsOutputRegisterOf(Instruction* instr, int index) { | 1241 bool RegisterAllocator::IsOutputRegisterOf(Instruction* instr, int index) { |
| 1266 for (size_t i = 0; i < instr->OutputCount(); i++) { | 1242 for (size_t i = 0; i < instr->OutputCount(); i++) { |
| 1267 auto output = instr->OutputAt(i); | 1243 auto output = instr->OutputAt(i); |
| 1268 if (output->IsRegister() && output->index() == index) return true; | 1244 if (output->IsRegister() && output->index() == index) return true; |
| 1269 } | 1245 } |
| 1270 return false; | 1246 return false; |
| 1271 } | 1247 } |
| 1272 | 1248 |
| 1273 | 1249 |
| 1274 bool RegisterAllocator::IsOutputDoubleRegisterOf(Instruction* instr, | 1250 bool RegisterAllocator::IsOutputDoubleRegisterOf(Instruction* instr, |
| 1275 int index) { | 1251 int index) { |
| 1276 for (size_t i = 0; i < instr->OutputCount(); i++) { | 1252 for (size_t i = 0; i < instr->OutputCount(); i++) { |
| 1277 auto output = instr->OutputAt(i); | 1253 auto output = instr->OutputAt(i); |
| 1278 if (output->IsDoubleRegister() && output->index() == index) return true; | 1254 if (output->IsDoubleRegister() && output->index() == index) return true; |
| 1279 } | 1255 } |
| 1280 return false; | 1256 return false; |
| 1281 } | 1257 } |
| 1282 | 1258 |
| 1283 | 1259 |
| 1284 void RegisterAllocator::ProcessInstructions(const InstructionBlock* block, | 1260 void RegisterAllocator::ProcessInstructions(const InstructionBlock* block, |
| 1285 BitVector* live) { | 1261 BitVector* live) { |
| 1286 int block_start = block->first_instruction_index(); | 1262 int block_start = block->first_instruction_index(); |
| 1287 auto block_start_position = | 1263 auto block_start_position = |
| 1288 LifetimePosition::FromInstructionIndex(block_start); | 1264 LifetimePosition::GapFromInstructionIndex(block_start); |
| 1289 | 1265 |
| 1290 for (int index = block->last_instruction_index(); index >= block_start; | 1266 for (int index = block->last_instruction_index(); index >= block_start; |
| 1291 index--) { | 1267 index--) { |
| 1292 auto curr_position = LifetimePosition::FromInstructionIndex(index); | 1268 auto curr_position = |
| 1269 LifetimePosition::InstructionFromInstructionIndex(index); |
| 1293 auto instr = InstructionAt(index); | 1270 auto instr = InstructionAt(index); |
| 1294 DCHECK(instr != nullptr); | 1271 DCHECK(instr != nullptr); |
| 1295 if (instr->IsGapMoves()) { | 1272 DCHECK(curr_position.IsInstructionPosition()); |
| 1296 // Process the moves of the gap instruction, making their sources live. | 1273 // Process output, inputs, and temps of this instruction. |
| 1297 auto gap = code()->GapAt(index); | 1274 for (size_t i = 0; i < instr->OutputCount(); i++) { |
| 1298 const GapInstruction::InnerPosition kPositions[] = { | 1275 auto output = instr->OutputAt(i); |
| 1299 GapInstruction::END, GapInstruction::START}; | 1276 if (output->IsUnallocated()) { |
| 1300 for (auto position : kPositions) { | 1277 // Unsupported. |
| 1301 auto move = gap->GetParallelMove(position); | 1278 DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy()); |
| 1302 if (move == nullptr) continue; | 1279 int out_vreg = UnallocatedOperand::cast(output)->virtual_register(); |
| 1303 if (position == GapInstruction::END) { | 1280 live->Remove(out_vreg); |
| 1304 curr_position = curr_position.InstructionEnd(); | 1281 } else if (output->IsConstant()) { |
| 1305 } else { | 1282 int out_vreg = output->index(); |
| 1306 curr_position = curr_position.InstructionStart(); | 1283 live->Remove(out_vreg); |
| 1284 } |
| 1285 Define(curr_position, output, nullptr); |
| 1286 } |
| 1287 |
| 1288 if (instr->ClobbersRegisters()) { |
| 1289 for (int i = 0; i < config()->num_general_registers(); ++i) { |
| 1290 if (!IsOutputRegisterOf(instr, i)) { |
| 1291 auto range = FixedLiveRangeFor(i); |
| 1292 range->AddUseInterval(curr_position, curr_position.End(), |
| 1293 local_zone()); |
| 1307 } | 1294 } |
| 1308 auto move_ops = move->move_operands(); | 1295 } |
| 1309 for (auto cur = move_ops->begin(); cur != move_ops->end(); ++cur) { | 1296 } |
| 1310 auto from = cur->source(); | 1297 |
| 1311 auto to = cur->destination(); | 1298 if (instr->ClobbersDoubleRegisters()) { |
| 1312 auto hint = to; | 1299 for (int i = 0; i < config()->num_aliased_double_registers(); ++i) { |
| 1313 if (to->IsUnallocated()) { | 1300 if (!IsOutputDoubleRegisterOf(instr, i)) { |
| 1314 int to_vreg = UnallocatedOperand::cast(to)->virtual_register(); | 1301 auto range = FixedDoubleLiveRangeFor(i); |
| 1315 auto to_range = LiveRangeFor(to_vreg); | 1302 range->AddUseInterval(curr_position, curr_position.End(), |
| 1316 if (to_range->is_phi()) { | 1303 local_zone()); |
| 1317 DCHECK(!FLAG_turbo_delay_ssa_decon); | 1304 } |
| 1318 if (to_range->is_non_loop_phi()) { | 1305 } |
| 1319 hint = to_range->current_hint_operand(); | 1306 } |
| 1320 } | 1307 |
| 1321 } else { | 1308 for (size_t i = 0; i < instr->InputCount(); i++) { |
| 1322 if (live->Contains(to_vreg)) { | 1309 auto input = instr->InputAt(i); |
| 1323 Define(curr_position, to, from); | 1310 if (input->IsImmediate()) continue; // Ignore immediates. |
| 1324 live->Remove(to_vreg); | 1311 LifetimePosition use_pos; |
| 1325 } else { | 1312 if (input->IsUnallocated() && |
| 1326 cur->Eliminate(); | 1313 UnallocatedOperand::cast(input)->IsUsedAtStart()) { |
| 1327 continue; | 1314 use_pos = curr_position; |
| 1328 } | 1315 } else { |
| 1329 } | 1316 use_pos = curr_position.End(); |
| 1330 } else { | 1317 } |
| 1331 Define(curr_position, to, from); | 1318 |
| 1332 } | 1319 if (input->IsUnallocated()) { |
| 1333 Use(block_start_position, curr_position, from, hint); | 1320 UnallocatedOperand* unalloc = UnallocatedOperand::cast(input); |
| 1334 if (from->IsUnallocated()) { | 1321 int vreg = unalloc->virtual_register(); |
| 1335 live->Add(UnallocatedOperand::cast(from)->virtual_register()); | 1322 live->Add(vreg); |
| 1323 if (unalloc->HasSlotPolicy()) { |
| 1324 LiveRangeFor(vreg)->set_has_slot_use(true); |
| 1325 } |
| 1326 } |
| 1327 Use(block_start_position, use_pos, input, nullptr); |
| 1328 } |
| 1329 |
| 1330 for (size_t i = 0; i < instr->TempCount(); i++) { |
| 1331 auto temp = instr->TempAt(i); |
| 1332 // Unsupported. |
| 1333 DCHECK_IMPLIES(temp->IsUnallocated(), |
| 1334 !UnallocatedOperand::cast(temp)->HasSlotPolicy()); |
| 1335 if (instr->ClobbersTemps()) { |
| 1336 if (temp->IsRegister()) continue; |
| 1337 if (temp->IsUnallocated()) { |
| 1338 UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp); |
| 1339 if (temp_unalloc->HasFixedPolicy()) { |
| 1340 continue; |
| 1336 } | 1341 } |
| 1337 } | 1342 } |
| 1338 } | 1343 } |
| 1339 } else { | 1344 Use(block_start_position, curr_position.End(), temp, nullptr); |
| 1340 // Process output, inputs, and temps of this non-gap instruction. | 1345 Define(curr_position, temp, nullptr); |
| 1341 for (size_t i = 0; i < instr->OutputCount(); i++) { | 1346 } |
| 1342 auto output = instr->OutputAt(i); | 1347 |
| 1343 if (output->IsUnallocated()) { | 1348 // Process the moves of the instruction's gaps, making their sources live. |
| 1344 // Unsupported. | 1349 const Instruction::GapPosition kPositions[] = {Instruction::END, |
| 1345 DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy()); | 1350 Instruction::START}; |
| 1346 int out_vreg = UnallocatedOperand::cast(output)->virtual_register(); | 1351 curr_position = curr_position.PrevStart(); |
| 1347 live->Remove(out_vreg); | 1352 DCHECK(curr_position.IsGapPosition()); |
| 1348 } else if (output->IsConstant()) { | 1353 for (auto position : kPositions) { |
| 1349 int out_vreg = output->index(); | 1354 auto move = instr->GetParallelMove(position); |
| 1350 live->Remove(out_vreg); | 1355 if (move == nullptr) continue; |
| 1351 } | 1356 if (position == Instruction::END) { |
| 1352 Define(curr_position, output, nullptr); | 1357 curr_position = curr_position.End(); |
| 1358 } else { |
| 1359 curr_position = curr_position.Start(); |
| 1353 } | 1360 } |
| 1354 | 1361 auto move_ops = move->move_operands(); |
| 1355 if (instr->ClobbersRegisters()) { | 1362 for (auto cur = move_ops->begin(); cur != move_ops->end(); ++cur) { |
| 1356 for (int i = 0; i < config()->num_general_registers(); ++i) { | 1363 auto from = cur->source(); |
| 1357 if (!IsOutputRegisterOf(instr, i)) { | 1364 auto to = cur->destination(); |
| 1358 auto range = FixedLiveRangeFor(i); | 1365 auto hint = to; |
| 1359 range->AddUseInterval(curr_position, curr_position.InstructionEnd(), | 1366 if (to->IsUnallocated()) { |
| 1360 local_zone()); | 1367 int to_vreg = UnallocatedOperand::cast(to)->virtual_register(); |
| 1361 } | 1368 auto to_range = LiveRangeFor(to_vreg); |
| 1362 } | 1369 if (to_range->is_phi()) { |
| 1363 } | 1370 DCHECK(!FLAG_turbo_delay_ssa_decon); |
| 1364 | 1371 if (to_range->is_non_loop_phi()) { |
| 1365 if (instr->ClobbersDoubleRegisters()) { | 1372 hint = to_range->current_hint_operand(); |
| 1366 for (int i = 0; i < config()->num_aliased_double_registers(); ++i) { | 1373 } |
| 1367 if (!IsOutputDoubleRegisterOf(instr, i)) { | 1374 } else { |
| 1368 auto range = FixedDoubleLiveRangeFor(i); | 1375 if (live->Contains(to_vreg)) { |
| 1369 range->AddUseInterval(curr_position, curr_position.InstructionEnd(), | 1376 Define(curr_position, to, from); |
| 1370 local_zone()); | 1377 live->Remove(to_vreg); |
| 1371 } | 1378 } else { |
| 1372 } | 1379 cur->Eliminate(); |
| 1373 } | |
| 1374 | |
| 1375 for (size_t i = 0; i < instr->InputCount(); i++) { | |
| 1376 auto input = instr->InputAt(i); | |
| 1377 if (input->IsImmediate()) continue; // Ignore immediates. | |
| 1378 LifetimePosition use_pos; | |
| 1379 if (input->IsUnallocated() && | |
| 1380 UnallocatedOperand::cast(input)->IsUsedAtStart()) { | |
| 1381 use_pos = curr_position; | |
| 1382 } else { | |
| 1383 use_pos = curr_position.InstructionEnd(); | |
| 1384 } | |
| 1385 | |
| 1386 if (input->IsUnallocated()) { | |
| 1387 UnallocatedOperand* unalloc = UnallocatedOperand::cast(input); | |
| 1388 int vreg = unalloc->virtual_register(); | |
| 1389 live->Add(vreg); | |
| 1390 if (unalloc->HasSlotPolicy()) { | |
| 1391 LiveRangeFor(vreg)->set_has_slot_use(true); | |
| 1392 } | |
| 1393 } | |
| 1394 Use(block_start_position, use_pos, input, nullptr); | |
| 1395 } | |
| 1396 | |
| 1397 for (size_t i = 0; i < instr->TempCount(); i++) { | |
| 1398 auto temp = instr->TempAt(i); | |
| 1399 // Unsupported. | |
| 1400 DCHECK_IMPLIES(temp->IsUnallocated(), | |
| 1401 !UnallocatedOperand::cast(temp)->HasSlotPolicy()); | |
| 1402 if (instr->ClobbersTemps()) { | |
| 1403 if (temp->IsRegister()) continue; | |
| 1404 if (temp->IsUnallocated()) { | |
| 1405 UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp); | |
| 1406 if (temp_unalloc->HasFixedPolicy()) { | |
| 1407 continue; | 1380 continue; |
| 1408 } | 1381 } |
| 1409 } | 1382 } |
| 1383 } else { |
| 1384 Define(curr_position, to, from); |
| 1410 } | 1385 } |
| 1411 Use(block_start_position, curr_position.InstructionEnd(), temp, | 1386 Use(block_start_position, curr_position, from, hint); |
| 1412 nullptr); | 1387 if (from->IsUnallocated()) { |
| 1413 Define(curr_position, temp, nullptr); | 1388 live->Add(UnallocatedOperand::cast(from)->virtual_register()); |
| 1389 } |
| 1414 } | 1390 } |
| 1415 } | 1391 } |
| 1416 } | 1392 } |
| 1417 } | 1393 } |
| 1418 | 1394 |
| 1419 | 1395 |
| 1420 void RegisterAllocator::ResolvePhis(const InstructionBlock* block) { | 1396 void RegisterAllocator::ResolvePhis(const InstructionBlock* block) { |
| 1421 for (auto phi : block->phis()) { | 1397 for (auto phi : block->phis()) { |
| 1422 int phi_vreg = phi->virtual_register(); | 1398 int phi_vreg = phi->virtual_register(); |
| 1423 auto res = | 1399 auto res = |
| 1424 phi_map_.insert(std::make_pair(phi_vreg, PhiMapValue(phi, block))); | 1400 phi_map_.insert(std::make_pair(phi_vreg, PhiMapValue(phi, block))); |
| 1425 DCHECK(res.second); | 1401 DCHECK(res.second); |
| 1426 USE(res); | 1402 USE(res); |
| 1427 auto& output = phi->output(); | 1403 auto& output = phi->output(); |
| 1428 if (!FLAG_turbo_delay_ssa_decon) { | 1404 if (!FLAG_turbo_delay_ssa_decon) { |
| 1429 for (size_t i = 0; i < phi->operands().size(); ++i) { | 1405 for (size_t i = 0; i < phi->operands().size(); ++i) { |
| 1430 InstructionBlock* cur_block = | 1406 InstructionBlock* cur_block = |
| 1431 code()->InstructionBlockAt(block->predecessors()[i]); | 1407 code()->InstructionBlockAt(block->predecessors()[i]); |
| 1432 AddGapMove(cur_block->last_instruction_index() - 1, GapInstruction::END, | 1408 AddGapMove(cur_block->last_instruction_index(), Instruction::END, |
| 1433 &phi->inputs()[i], &output); | 1409 &phi->inputs()[i], &output); |
| 1434 DCHECK(!InstructionAt(cur_block->last_instruction_index()) | 1410 DCHECK(!InstructionAt(cur_block->last_instruction_index()) |
| 1435 ->HasPointerMap()); | 1411 ->HasPointerMap()); |
| 1436 } | 1412 } |
| 1437 } | 1413 } |
| 1438 auto live_range = LiveRangeFor(phi_vreg); | 1414 auto live_range = LiveRangeFor(phi_vreg); |
| 1439 int gap_index = block->first_instruction_index(); | 1415 int gap_index = block->first_instruction_index(); |
| 1440 live_range->SpillAtDefinition(local_zone(), gap_index, &output); | 1416 live_range->SpillAtDefinition(local_zone(), gap_index, &output); |
| 1441 live_range->SetSpillStartIndex(gap_index); | 1417 live_range->SetSpillStartIndex(gap_index); |
| 1442 // We use the phi-ness of some nodes in some later heuristics. | 1418 // We use the phi-ness of some nodes in some later heuristics. |
| (...skipping 14 matching lines...) Expand all Loading... |
| 1457 // Process the blocks in reverse order. | 1433 // Process the blocks in reverse order. |
| 1458 for (auto i = code()->instruction_blocks().rbegin(); | 1434 for (auto i = code()->instruction_blocks().rbegin(); |
| 1459 i != code()->instruction_blocks().rend(); ++i) { | 1435 i != code()->instruction_blocks().rend(); ++i) { |
| 1460 ResolvePhis(*i); | 1436 ResolvePhis(*i); |
| 1461 } | 1437 } |
| 1462 } | 1438 } |
| 1463 | 1439 |
| 1464 | 1440 |
| 1465 const InstructionBlock* RegisterAllocator::GetInstructionBlock( | 1441 const InstructionBlock* RegisterAllocator::GetInstructionBlock( |
| 1466 LifetimePosition pos) { | 1442 LifetimePosition pos) { |
| 1467 return code()->GetInstructionBlock(pos.InstructionIndex()); | 1443 return code()->GetInstructionBlock(pos.ToInstructionIndex()); |
| 1468 } | 1444 } |
| 1469 | 1445 |
| 1470 | 1446 |
| 1471 void RegisterAllocator::ConnectRanges() { | 1447 void RegisterAllocator::ConnectRanges() { |
| 1472 ZoneMap<std::pair<ParallelMove*, InstructionOperand*>, InstructionOperand*> | 1448 ZoneMap<std::pair<ParallelMove*, InstructionOperand*>, InstructionOperand*> |
| 1473 delayed_insertion_map(local_zone()); | 1449 delayed_insertion_map(local_zone()); |
| 1474 for (auto first_range : live_ranges()) { | 1450 for (auto first_range : live_ranges()) { |
| 1475 if (first_range == nullptr || first_range->IsChild()) continue; | 1451 if (first_range == nullptr || first_range->IsChild()) continue; |
| 1476 for (auto second_range = first_range->next(); second_range != nullptr; | 1452 for (auto second_range = first_range->next(); second_range != nullptr; |
| 1477 first_range = second_range, second_range = second_range->next()) { | 1453 first_range = second_range, second_range = second_range->next()) { |
| 1478 auto pos = second_range->Start(); | 1454 auto pos = second_range->Start(); |
| 1479 // Add gap move if the two live ranges touch and there is no block | 1455 // Add gap move if the two live ranges touch and there is no block |
| 1480 // boundary. | 1456 // boundary. |
| 1481 if (second_range->IsSpilled()) continue; | 1457 if (second_range->IsSpilled()) continue; |
| 1482 if (first_range->End().Value() != pos.Value()) continue; | 1458 if (first_range->End().Value() != pos.Value()) continue; |
| 1483 if (IsBlockBoundary(pos) && | 1459 if (IsBlockBoundary(pos) && |
| 1484 !CanEagerlyResolveControlFlow(GetInstructionBlock(pos))) { | 1460 !CanEagerlyResolveControlFlow(GetInstructionBlock(pos))) { |
| 1485 continue; | 1461 continue; |
| 1486 } | 1462 } |
| 1487 auto prev_operand = first_range->GetAssignedOperand(operand_cache()); | 1463 auto prev_operand = first_range->GetAssignedOperand(operand_cache()); |
| 1488 auto cur_operand = second_range->GetAssignedOperand(operand_cache()); | 1464 auto cur_operand = second_range->GetAssignedOperand(operand_cache()); |
| 1489 if (prev_operand->Equals(cur_operand)) continue; | 1465 if (prev_operand->Equals(cur_operand)) continue; |
| 1490 int index = pos.InstructionIndex(); | |
| 1491 bool delay_insertion = false; | 1466 bool delay_insertion = false; |
| 1492 GapInstruction::InnerPosition gap_pos; | 1467 Instruction::GapPosition gap_pos; |
| 1493 int gap_index = index; | 1468 int gap_index = pos.ToInstructionIndex(); |
| 1494 if (code()->IsGapAt(index)) { | 1469 if (pos.IsGapPosition()) { |
| 1495 gap_pos = pos.IsInstructionStart() ? GapInstruction::START | 1470 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END; |
| 1496 : GapInstruction::END; | |
| 1497 } else { | 1471 } else { |
| 1498 gap_index = pos.IsInstructionStart() ? (index - 1) : (index + 1); | 1472 if (pos.IsStart()) { |
| 1499 delay_insertion = gap_index < index; | 1473 delay_insertion = true; |
| 1500 gap_pos = delay_insertion ? GapInstruction::END : GapInstruction::START; | 1474 } else { |
| 1475 gap_index++; |
| 1476 } |
| 1477 gap_pos = delay_insertion ? Instruction::END : Instruction::START; |
| 1501 } | 1478 } |
| 1502 auto move = code()->GapAt(gap_index)->GetOrCreateParallelMove( | 1479 auto move = code()->InstructionAt(gap_index)->GetOrCreateParallelMove( |
| 1503 gap_pos, code_zone()); | 1480 gap_pos, code_zone()); |
| 1504 if (!delay_insertion) { | 1481 if (!delay_insertion) { |
| 1505 move->AddMove(prev_operand, cur_operand, code_zone()); | 1482 move->AddMove(prev_operand, cur_operand, code_zone()); |
| 1506 } else { | 1483 } else { |
| 1507 delayed_insertion_map.insert( | 1484 delayed_insertion_map.insert( |
| 1508 std::make_pair(std::make_pair(move, prev_operand), cur_operand)); | 1485 std::make_pair(std::make_pair(move, prev_operand), cur_operand)); |
| 1509 } | 1486 } |
| 1510 } | 1487 } |
| 1511 } | 1488 } |
| 1512 if (delayed_insertion_map.empty()) return; | 1489 if (delayed_insertion_map.empty()) return; |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1605 if (position.Value() < bound->end_.Value()) return bound; | 1582 if (position.Value() < bound->end_.Value()) return bound; |
| 1606 DCHECK(left_index < current_index); | 1583 DCHECK(left_index < current_index); |
| 1607 left_index = current_index; | 1584 left_index = current_index; |
| 1608 } else { | 1585 } else { |
| 1609 right_index = current_index; | 1586 right_index = current_index; |
| 1610 } | 1587 } |
| 1611 } | 1588 } |
| 1612 } | 1589 } |
| 1613 | 1590 |
| 1614 LiveRangeBound* FindPred(const InstructionBlock* pred) { | 1591 LiveRangeBound* FindPred(const InstructionBlock* pred) { |
| 1615 auto pred_end = | 1592 auto pred_end = LifetimePosition::InstructionFromInstructionIndex( |
| 1616 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); | 1593 pred->last_instruction_index()); |
| 1617 return Find(pred_end); | 1594 return Find(pred_end); |
| 1618 } | 1595 } |
| 1619 | 1596 |
| 1620 LiveRangeBound* FindSucc(const InstructionBlock* succ) { | 1597 LiveRangeBound* FindSucc(const InstructionBlock* succ) { |
| 1621 auto succ_start = | 1598 auto succ_start = LifetimePosition::GapFromInstructionIndex( |
| 1622 LifetimePosition::FromInstructionIndex(succ->first_instruction_index()); | 1599 succ->first_instruction_index()); |
| 1623 return Find(succ_start); | 1600 return Find(succ_start); |
| 1624 } | 1601 } |
| 1625 | 1602 |
| 1626 void Find(const InstructionBlock* block, const InstructionBlock* pred, | 1603 void Find(const InstructionBlock* block, const InstructionBlock* pred, |
| 1627 FindResult* result) const { | 1604 FindResult* result) const { |
| 1628 auto pred_end = | 1605 auto pred_end = LifetimePosition::InstructionFromInstructionIndex( |
| 1629 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); | 1606 pred->last_instruction_index()); |
| 1630 auto bound = Find(pred_end); | 1607 auto bound = Find(pred_end); |
| 1631 result->pred_cover_ = bound->range_; | 1608 result->pred_cover_ = bound->range_; |
| 1632 auto cur_start = LifetimePosition::FromInstructionIndex( | 1609 auto cur_start = LifetimePosition::GapFromInstructionIndex( |
| 1633 block->first_instruction_index()); | 1610 block->first_instruction_index()); |
| 1634 // Common case. | 1611 // Common case. |
| 1635 if (bound->CanCover(cur_start)) { | 1612 if (bound->CanCover(cur_start)) { |
| 1636 result->cur_cover_ = bound->range_; | 1613 result->cur_cover_ = bound->range_; |
| 1637 return; | 1614 return; |
| 1638 } | 1615 } |
| 1639 result->cur_cover_ = Find(cur_start)->range_; | 1616 result->cur_cover_ = Find(cur_start)->range_; |
| 1640 DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr); | 1617 DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr); |
| 1641 } | 1618 } |
| 1642 | 1619 |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1728 } | 1705 } |
| 1729 } | 1706 } |
| 1730 | 1707 |
| 1731 | 1708 |
| 1732 void RegisterAllocator::ResolveControlFlow(const InstructionBlock* block, | 1709 void RegisterAllocator::ResolveControlFlow(const InstructionBlock* block, |
| 1733 InstructionOperand* cur_op, | 1710 InstructionOperand* cur_op, |
| 1734 const InstructionBlock* pred, | 1711 const InstructionBlock* pred, |
| 1735 InstructionOperand* pred_op) { | 1712 InstructionOperand* pred_op) { |
| 1736 if (pred_op->Equals(cur_op)) return; | 1713 if (pred_op->Equals(cur_op)) return; |
| 1737 int gap_index; | 1714 int gap_index; |
| 1738 GapInstruction::InnerPosition position; | 1715 Instruction::GapPosition position; |
| 1739 if (block->PredecessorCount() == 1) { | 1716 if (block->PredecessorCount() == 1) { |
| 1740 gap_index = block->first_instruction_index(); | 1717 gap_index = block->first_instruction_index(); |
| 1741 position = GapInstruction::START; | 1718 position = Instruction::START; |
| 1742 } else { | 1719 } else { |
| 1743 DCHECK(pred->SuccessorCount() == 1); | 1720 DCHECK(pred->SuccessorCount() == 1); |
| 1744 DCHECK(!InstructionAt(pred->last_instruction_index())->HasPointerMap()); | 1721 DCHECK(!InstructionAt(pred->last_instruction_index())->HasPointerMap()); |
| 1745 gap_index = pred->last_instruction_index() - 1; | 1722 gap_index = pred->last_instruction_index(); |
| 1746 position = GapInstruction::END; | 1723 position = Instruction::END; |
| 1747 } | 1724 } |
| 1748 AddGapMove(gap_index, position, pred_op, cur_op); | 1725 AddGapMove(gap_index, position, pred_op, cur_op); |
| 1749 } | 1726 } |
| 1750 | 1727 |
| 1751 | 1728 |
| 1752 void RegisterAllocator::BuildLiveRanges() { | 1729 void RegisterAllocator::BuildLiveRanges() { |
| 1753 // Process the blocks in reverse order. | 1730 // Process the blocks in reverse order. |
| 1754 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0; | 1731 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0; |
| 1755 --block_id) { | 1732 --block_id) { |
| 1756 auto block = code()->InstructionBlockAt(RpoNumber::FromInt(block_id)); | 1733 auto block = code()->InstructionBlockAt(RpoNumber::FromInt(block_id)); |
| 1757 auto live = ComputeLiveOut(block); | 1734 auto live = ComputeLiveOut(block); |
| 1758 // Initially consider all live_out values live for the entire block. We | 1735 // Initially consider all live_out values live for the entire block. We |
| 1759 // will shorten these intervals if necessary. | 1736 // will shorten these intervals if necessary. |
| 1760 AddInitialIntervals(block, live); | 1737 AddInitialIntervals(block, live); |
| 1761 | 1738 |
| 1762 // Process the instructions in reverse order, generating and killing | 1739 // Process the instructions in reverse order, generating and killing |
| 1763 // live values. | 1740 // live values. |
| 1764 ProcessInstructions(block, live); | 1741 ProcessInstructions(block, live); |
| 1765 // All phi output operands are killed by this block. | 1742 // All phi output operands are killed by this block. |
| 1766 for (auto phi : block->phis()) { | 1743 for (auto phi : block->phis()) { |
| 1767 // The live range interval already ends at the first instruction of the | 1744 // The live range interval already ends at the first instruction of the |
| 1768 // block. | 1745 // block. |
| 1769 int phi_vreg = phi->virtual_register(); | 1746 int phi_vreg = phi->virtual_register(); |
| 1770 live->Remove(phi_vreg); | 1747 live->Remove(phi_vreg); |
| 1771 if (!FLAG_turbo_delay_ssa_decon) { | 1748 if (!FLAG_turbo_delay_ssa_decon) { |
| 1772 InstructionOperand* hint = nullptr; | 1749 InstructionOperand* hint = nullptr; |
| 1773 InstructionOperand* phi_operand = nullptr; | 1750 InstructionOperand* phi_operand = nullptr; |
| 1774 auto gap = | 1751 auto instr = GetLastInstruction( |
| 1775 GetLastGap(code()->InstructionBlockAt(block->predecessors()[0])); | 1752 code()->InstructionBlockAt(block->predecessors()[0])); |
| 1776 auto move = | 1753 auto move = instr->GetParallelMove(Instruction::END); |
| 1777 gap->GetOrCreateParallelMove(GapInstruction::END, code_zone()); | |
| 1778 for (int j = 0; j < move->move_operands()->length(); ++j) { | 1754 for (int j = 0; j < move->move_operands()->length(); ++j) { |
| 1779 auto to = move->move_operands()->at(j).destination(); | 1755 auto to = move->move_operands()->at(j).destination(); |
| 1780 if (to->IsUnallocated() && | 1756 if (to->IsUnallocated() && |
| 1781 UnallocatedOperand::cast(to)->virtual_register() == phi_vreg) { | 1757 UnallocatedOperand::cast(to)->virtual_register() == phi_vreg) { |
| 1782 hint = move->move_operands()->at(j).source(); | 1758 hint = move->move_operands()->at(j).source(); |
| 1783 phi_operand = to; | 1759 phi_operand = to; |
| 1784 break; | 1760 break; |
| 1785 } | 1761 } |
| 1786 } | 1762 } |
| 1787 DCHECK(hint != nullptr); | 1763 DCHECK(hint != nullptr); |
| 1788 auto block_start = LifetimePosition::FromInstructionIndex( | 1764 auto block_start = LifetimePosition::GapFromInstructionIndex( |
| 1789 block->first_instruction_index()); | 1765 block->first_instruction_index()); |
| 1790 Define(block_start, phi_operand, hint); | 1766 Define(block_start, phi_operand, hint); |
| 1791 } | 1767 } |
| 1792 } | 1768 } |
| 1793 | 1769 |
| 1794 // Now live is live_in for this block except not including values live | 1770 // Now live is live_in for this block except not including values live |
| 1795 // out on backward successor edges. | 1771 // out on backward successor edges. |
| 1796 live_in_sets_[block_id] = live; | 1772 live_in_sets_[block_id] = live; |
| 1797 | 1773 |
| 1798 if (block->IsLoopHeader()) { | 1774 if (block->IsLoopHeader()) { |
| 1799 // Add a live range stretching from the first loop instruction to the last | 1775 // Add a live range stretching from the first loop instruction to the last |
| 1800 // for each value live on entry to the header. | 1776 // for each value live on entry to the header. |
| 1801 BitVector::Iterator iterator(live); | 1777 BitVector::Iterator iterator(live); |
| 1802 auto start = LifetimePosition::FromInstructionIndex( | 1778 auto start = LifetimePosition::GapFromInstructionIndex( |
| 1803 block->first_instruction_index()); | 1779 block->first_instruction_index()); |
| 1804 auto end = LifetimePosition::FromInstructionIndex( | 1780 auto end = LifetimePosition::GapFromInstructionIndex( |
| 1805 code()->LastLoopInstructionIndex(block)).NextInstruction(); | 1781 code()->LastLoopInstructionIndex(block)).NextFullStart(); |
| 1806 while (!iterator.Done()) { | 1782 while (!iterator.Done()) { |
| 1807 int operand_index = iterator.Current(); | 1783 int operand_index = iterator.Current(); |
| 1808 auto range = LiveRangeFor(operand_index); | 1784 auto range = LiveRangeFor(operand_index); |
| 1809 range->EnsureInterval(start, end, local_zone()); | 1785 range->EnsureInterval(start, end, local_zone()); |
| 1810 iterator.Advance(); | 1786 iterator.Advance(); |
| 1811 } | 1787 } |
| 1812 // Insert all values into the live in sets of all blocks in the loop. | 1788 // Insert all values into the live in sets of all blocks in the loop. |
| 1813 for (int i = block->rpo_number().ToInt() + 1; | 1789 for (int i = block->rpo_number().ToInt() + 1; |
| 1814 i < block->loop_end().ToInt(); ++i) { | 1790 i < block->loop_end().ToInt(); ++i) { |
| 1815 live_in_sets_[i]->Union(*live); | 1791 live_in_sets_[i]->Union(*live); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 1826 } | 1802 } |
| 1827 // TODO(bmeurer): This is a horrible hack to make sure that for constant | 1803 // TODO(bmeurer): This is a horrible hack to make sure that for constant |
| 1828 // live ranges, every use requires the constant to be in a register. | 1804 // live ranges, every use requires the constant to be in a register. |
| 1829 // Without this hack, all uses with "any" policy would get the constant | 1805 // Without this hack, all uses with "any" policy would get the constant |
| 1830 // operand assigned. | 1806 // operand assigned. |
| 1831 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) { | 1807 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) { |
| 1832 for (auto pos = range->first_pos(); pos != nullptr; pos = pos->next_) { | 1808 for (auto pos = range->first_pos(); pos != nullptr; pos = pos->next_) { |
| 1833 if (pos->type() == UsePositionType::kRequiresSlot) continue; | 1809 if (pos->type() == UsePositionType::kRequiresSlot) continue; |
| 1834 UsePositionType new_type = UsePositionType::kAny; | 1810 UsePositionType new_type = UsePositionType::kAny; |
| 1835 // Can't mark phis as needing a register. | 1811 // Can't mark phis as needing a register. |
| 1836 if (!code() | 1812 if (!pos->pos().IsGapPosition()) { |
| 1837 ->InstructionAt(pos->pos().InstructionIndex()) | |
| 1838 ->IsGapMoves()) { | |
| 1839 new_type = UsePositionType::kRequiresRegister; | 1813 new_type = UsePositionType::kRequiresRegister; |
| 1840 } | 1814 } |
| 1841 pos->set_type(new_type, true); | 1815 pos->set_type(new_type, true); |
| 1842 } | 1816 } |
| 1843 } | 1817 } |
| 1844 } | 1818 } |
| 1845 } | 1819 } |
| 1846 | 1820 |
| 1847 | 1821 |
| 1848 bool RegisterAllocator::ExistsUseWithoutDefinition() { | 1822 bool RegisterAllocator::ExistsUseWithoutDefinition() { |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1887 for (LiveRange* range : live_ranges()) { | 1861 for (LiveRange* range : live_ranges()) { |
| 1888 if (range == nullptr) continue; | 1862 if (range == nullptr) continue; |
| 1889 // Iterate over the first parts of multi-part live ranges. | 1863 // Iterate over the first parts of multi-part live ranges. |
| 1890 if (range->IsChild()) continue; | 1864 if (range->IsChild()) continue; |
| 1891 // Skip non-reference values. | 1865 // Skip non-reference values. |
| 1892 if (!HasTaggedValue(range->id())) continue; | 1866 if (!HasTaggedValue(range->id())) continue; |
| 1893 // Skip empty live ranges. | 1867 // Skip empty live ranges. |
| 1894 if (range->IsEmpty()) continue; | 1868 if (range->IsEmpty()) continue; |
| 1895 | 1869 |
| 1896 // Find the extent of the range and its children. | 1870 // Find the extent of the range and its children. |
| 1897 int start = range->Start().InstructionIndex(); | 1871 int start = range->Start().ToInstructionIndex(); |
| 1898 int end = 0; | 1872 int end = 0; |
| 1899 for (auto cur = range; cur != nullptr; cur = cur->next()) { | 1873 for (auto cur = range; cur != nullptr; cur = cur->next()) { |
| 1900 auto this_end = cur->End(); | 1874 auto this_end = cur->End(); |
| 1901 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex(); | 1875 if (this_end.ToInstructionIndex() > end) |
| 1902 DCHECK(cur->Start().InstructionIndex() >= start); | 1876 end = this_end.ToInstructionIndex(); |
| 1877 DCHECK(cur->Start().ToInstructionIndex() >= start); |
| 1903 } | 1878 } |
| 1904 | 1879 |
| 1905 // Most of the ranges are in order, but not all. Keep an eye on when they | 1880 // Most of the ranges are in order, but not all. Keep an eye on when they |
| 1906 // step backwards and reset the first_it so we don't miss any safe points. | 1881 // step backwards and reset the first_it so we don't miss any safe points. |
| 1907 if (start < last_range_start) first_it = pointer_maps->begin(); | 1882 if (start < last_range_start) first_it = pointer_maps->begin(); |
| 1908 last_range_start = start; | 1883 last_range_start = start; |
| 1909 | 1884 |
| 1910 // Step across all the safe points that are before the start of this range, | 1885 // Step across all the safe points that are before the start of this range, |
| 1911 // recording how far we step in order to save doing this for the next range. | 1886 // recording how far we step in order to save doing this for the next range. |
| 1912 for (; first_it != pointer_maps->end(); ++first_it) { | 1887 for (; first_it != pointer_maps->end(); ++first_it) { |
| 1913 auto map = *first_it; | 1888 auto map = *first_it; |
| 1914 if (map->instruction_position() >= start) break; | 1889 if (map->instruction_position() >= start) break; |
| 1915 } | 1890 } |
| 1916 | 1891 |
| 1917 // Step through the safe points to see whether they are in the range. | 1892 // Step through the safe points to see whether they are in the range. |
| 1918 for (auto it = first_it; it != pointer_maps->end(); ++it) { | 1893 for (auto it = first_it; it != pointer_maps->end(); ++it) { |
| 1919 auto map = *it; | 1894 auto map = *it; |
| 1920 int safe_point = map->instruction_position(); | 1895 int safe_point = map->instruction_position(); |
| 1921 | 1896 |
| 1922 // The safe points are sorted so we can stop searching here. | 1897 // The safe points are sorted so we can stop searching here. |
| 1923 if (safe_point - 1 > end) break; | 1898 if (safe_point - 1 > end) break; |
| 1924 | 1899 |
| 1925 // Advance to the next active range that covers the current | 1900 // Advance to the next active range that covers the current |
| 1926 // safe point position. | 1901 // safe point position. |
| 1927 auto safe_point_pos = LifetimePosition::FromInstructionIndex(safe_point); | 1902 auto safe_point_pos = |
| 1903 LifetimePosition::InstructionFromInstructionIndex(safe_point); |
| 1928 auto cur = range; | 1904 auto cur = range; |
| 1929 while (cur != nullptr && !cur->Covers(safe_point_pos)) { | 1905 while (cur != nullptr && !cur->Covers(safe_point_pos)) { |
| 1930 cur = cur->next(); | 1906 cur = cur->next(); |
| 1931 } | 1907 } |
| 1932 if (cur == nullptr) continue; | 1908 if (cur == nullptr) continue; |
| 1933 | 1909 |
| 1934 // Check if the live range is spilled and the safe point is after | 1910 // Check if the live range is spilled and the safe point is after |
| 1935 // the spill position. | 1911 // the spill position. |
| 1936 if (range->HasSpillOperand() && | 1912 if (range->HasSpillOperand() && |
| 1937 safe_point >= range->spill_start_index() && | 1913 safe_point >= range->spill_start_index() && |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2007 DCHECK(UnhandledIsSorted()); | 1983 DCHECK(UnhandledIsSorted()); |
| 2008 auto position = current->Start(); | 1984 auto position = current->Start(); |
| 2009 #ifdef DEBUG | 1985 #ifdef DEBUG |
| 2010 allocation_finger_ = position; | 1986 allocation_finger_ = position; |
| 2011 #endif | 1987 #endif |
| 2012 TRACE("Processing interval %d start=%d\n", current->id(), position.Value()); | 1988 TRACE("Processing interval %d start=%d\n", current->id(), position.Value()); |
| 2013 | 1989 |
| 2014 if (!current->HasNoSpillType()) { | 1990 if (!current->HasNoSpillType()) { |
| 2015 TRACE("Live range %d already has a spill operand\n", current->id()); | 1991 TRACE("Live range %d already has a spill operand\n", current->id()); |
| 2016 auto next_pos = position; | 1992 auto next_pos = position; |
| 2017 if (code()->IsGapAt(next_pos.InstructionIndex())) { | 1993 if (next_pos.IsGapPosition()) { |
| 2018 next_pos = next_pos.NextInstruction(); | 1994 next_pos = next_pos.NextStart(); |
| 2019 } | 1995 } |
| 2020 auto pos = current->NextUsePositionRegisterIsBeneficial(next_pos); | 1996 auto pos = current->NextUsePositionRegisterIsBeneficial(next_pos); |
| 2021 // If the range already has a spill operand and it doesn't need a | 1997 // If the range already has a spill operand and it doesn't need a |
| 2022 // register immediately, split it and spill the first part of the range. | 1998 // register immediately, split it and spill the first part of the range. |
| 2023 if (pos == nullptr) { | 1999 if (pos == nullptr) { |
| 2024 Spill(current); | 2000 Spill(current); |
| 2025 continue; | 2001 continue; |
| 2026 } else if (pos->pos().Value() > | 2002 } else if (pos->pos().Value() > current->Start().NextStart().Value()) { |
| 2027 current->Start().NextInstruction().Value()) { | |
| 2028 // Do not spill live range eagerly if use position that can benefit from | 2003 // Do not spill live range eagerly if use position that can benefit from |
| 2029 // the register is too close to the start of live range. | 2004 // the register is too close to the start of live range. |
| 2030 SpillBetween(current, current->Start(), pos->pos()); | 2005 SpillBetween(current, current->Start(), pos->pos()); |
| 2031 DCHECK(UnhandledIsSorted()); | 2006 DCHECK(UnhandledIsSorted()); |
| 2032 continue; | 2007 continue; |
| 2033 } | 2008 } |
| 2034 } | 2009 } |
| 2035 | 2010 |
| 2036 if (TryReuseSpillForPhi(current)) continue; | 2011 if (TryReuseSpillForPhi(current)) continue; |
| 2037 | 2012 |
| (...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2189 | 2164 |
| 2190 bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) { | 2165 bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) { |
| 2191 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2166 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
| 2192 | 2167 |
| 2193 for (int i = 0; i < num_registers_; i++) { | 2168 for (int i = 0; i < num_registers_; i++) { |
| 2194 free_until_pos[i] = LifetimePosition::MaxPosition(); | 2169 free_until_pos[i] = LifetimePosition::MaxPosition(); |
| 2195 } | 2170 } |
| 2196 | 2171 |
| 2197 for (auto cur_active : active_live_ranges()) { | 2172 for (auto cur_active : active_live_ranges()) { |
| 2198 free_until_pos[cur_active->assigned_register()] = | 2173 free_until_pos[cur_active->assigned_register()] = |
| 2199 LifetimePosition::FromInstructionIndex(0); | 2174 LifetimePosition::GapFromInstructionIndex(0); |
| 2200 } | 2175 } |
| 2201 | 2176 |
| 2202 for (auto cur_inactive : inactive_live_ranges()) { | 2177 for (auto cur_inactive : inactive_live_ranges()) { |
| 2203 DCHECK(cur_inactive->End().Value() > current->Start().Value()); | 2178 DCHECK(cur_inactive->End().Value() > current->Start().Value()); |
| 2204 auto next_intersection = cur_inactive->FirstIntersection(current); | 2179 auto next_intersection = cur_inactive->FirstIntersection(current); |
| 2205 if (!next_intersection.IsValid()) continue; | 2180 if (!next_intersection.IsValid()) continue; |
| 2206 int cur_reg = cur_inactive->assigned_register(); | 2181 int cur_reg = cur_inactive->assigned_register(); |
| 2207 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); | 2182 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); |
| 2208 } | 2183 } |
| 2209 | 2184 |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2269 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2244 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
| 2270 | 2245 |
| 2271 for (int i = 0; i < num_registers_; i++) { | 2246 for (int i = 0; i < num_registers_; i++) { |
| 2272 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition(); | 2247 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition(); |
| 2273 } | 2248 } |
| 2274 | 2249 |
| 2275 for (auto range : active_live_ranges()) { | 2250 for (auto range : active_live_ranges()) { |
| 2276 int cur_reg = range->assigned_register(); | 2251 int cur_reg = range->assigned_register(); |
| 2277 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) { | 2252 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) { |
| 2278 block_pos[cur_reg] = use_pos[cur_reg] = | 2253 block_pos[cur_reg] = use_pos[cur_reg] = |
| 2279 LifetimePosition::FromInstructionIndex(0); | 2254 LifetimePosition::GapFromInstructionIndex(0); |
| 2280 } else { | 2255 } else { |
| 2281 auto next_use = | 2256 auto next_use = |
| 2282 range->NextUsePositionRegisterIsBeneficial(current->Start()); | 2257 range->NextUsePositionRegisterIsBeneficial(current->Start()); |
| 2283 if (next_use == nullptr) { | 2258 if (next_use == nullptr) { |
| 2284 use_pos[cur_reg] = range->End(); | 2259 use_pos[cur_reg] = range->End(); |
| 2285 } else { | 2260 } else { |
| 2286 use_pos[cur_reg] = next_use->pos(); | 2261 use_pos[cur_reg] = next_use->pos(); |
| 2287 } | 2262 } |
| 2288 } | 2263 } |
| 2289 } | 2264 } |
| (...skipping 23 matching lines...) Expand all Loading... |
| 2313 if (pos.Value() < register_use->pos().Value()) { | 2288 if (pos.Value() < register_use->pos().Value()) { |
| 2314 // All registers are blocked before the first use that requires a register. | 2289 // All registers are blocked before the first use that requires a register. |
| 2315 // Spill starting part of live range up to that use. | 2290 // Spill starting part of live range up to that use. |
| 2316 SpillBetween(current, current->Start(), register_use->pos()); | 2291 SpillBetween(current, current->Start(), register_use->pos()); |
| 2317 return; | 2292 return; |
| 2318 } | 2293 } |
| 2319 | 2294 |
| 2320 if (block_pos[reg].Value() < current->End().Value()) { | 2295 if (block_pos[reg].Value() < current->End().Value()) { |
| 2321 // Register becomes blocked before the current range end. Split before that | 2296 // Register becomes blocked before the current range end. Split before that |
| 2322 // position. | 2297 // position. |
| 2323 LiveRange* tail = SplitBetween(current, current->Start(), | 2298 LiveRange* tail = |
| 2324 block_pos[reg].InstructionStart()); | 2299 SplitBetween(current, current->Start(), block_pos[reg].Start()); |
| 2325 AddToUnhandledSorted(tail); | 2300 AddToUnhandledSorted(tail); |
| 2326 } | 2301 } |
| 2327 | 2302 |
| 2328 // Register reg is not blocked for the whole range. | 2303 // Register reg is not blocked for the whole range. |
| 2329 DCHECK(block_pos[reg].Value() >= current->End().Value()); | 2304 DCHECK(block_pos[reg].Value() >= current->End().Value()); |
| 2330 TRACE("Assigning blocked reg %s to live range %d\n", RegisterName(reg), | 2305 TRACE("Assigning blocked reg %s to live range %d\n", RegisterName(reg), |
| 2331 current->id()); | 2306 current->id()); |
| 2332 SetLiveRangeAssignedRegister(current, reg); | 2307 SetLiveRangeAssignedRegister(current, reg); |
| 2333 | 2308 |
| 2334 // This register was not free. Thus we need to find and spill | 2309 // This register was not free. Thus we need to find and spill |
| 2335 // parts of active and inactive live regions that use the same register | 2310 // parts of active and inactive live regions that use the same register |
| 2336 // at the same lifetime positions as current. | 2311 // at the same lifetime positions as current. |
| 2337 SplitAndSpillIntersecting(current); | 2312 SplitAndSpillIntersecting(current); |
| 2338 } | 2313 } |
| 2339 | 2314 |
| 2340 | 2315 |
| 2341 static const InstructionBlock* GetContainingLoop( | 2316 static const InstructionBlock* GetContainingLoop( |
| 2342 const InstructionSequence* sequence, const InstructionBlock* block) { | 2317 const InstructionSequence* sequence, const InstructionBlock* block) { |
| 2343 auto index = block->loop_header(); | 2318 auto index = block->loop_header(); |
| 2344 if (!index.IsValid()) return nullptr; | 2319 if (!index.IsValid()) return nullptr; |
| 2345 return sequence->InstructionBlockAt(index); | 2320 return sequence->InstructionBlockAt(index); |
| 2346 } | 2321 } |
| 2347 | 2322 |
| 2348 | 2323 |
| 2349 LifetimePosition RegisterAllocator::FindOptimalSpillingPos( | 2324 LifetimePosition RegisterAllocator::FindOptimalSpillingPos( |
| 2350 LiveRange* range, LifetimePosition pos) { | 2325 LiveRange* range, LifetimePosition pos) { |
| 2351 auto block = GetInstructionBlock(pos.InstructionStart()); | 2326 auto block = GetInstructionBlock(pos.Start()); |
| 2352 auto loop_header = | 2327 auto loop_header = |
| 2353 block->IsLoopHeader() ? block : GetContainingLoop(code(), block); | 2328 block->IsLoopHeader() ? block : GetContainingLoop(code(), block); |
| 2354 | 2329 |
| 2355 if (loop_header == nullptr) return pos; | 2330 if (loop_header == nullptr) return pos; |
| 2356 | 2331 |
| 2357 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); | 2332 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); |
| 2358 | 2333 |
| 2359 while (loop_header != nullptr) { | 2334 while (loop_header != nullptr) { |
| 2360 // We are going to spill live range inside the loop. | 2335 // We are going to spill live range inside the loop. |
| 2361 // If possible try to move spilling position backwards to loop header. | 2336 // If possible try to move spilling position backwards to loop header. |
| 2362 // This will reduce number of memory moves on the back edge. | 2337 // This will reduce number of memory moves on the back edge. |
| 2363 auto loop_start = LifetimePosition::FromInstructionIndex( | 2338 auto loop_start = LifetimePosition::GapFromInstructionIndex( |
| 2364 loop_header->first_instruction_index()); | 2339 loop_header->first_instruction_index()); |
| 2365 | 2340 |
| 2366 if (range->Covers(loop_start)) { | 2341 if (range->Covers(loop_start)) { |
| 2367 if (prev_use == nullptr || prev_use->pos().Value() < loop_start.Value()) { | 2342 if (prev_use == nullptr || prev_use->pos().Value() < loop_start.Value()) { |
| 2368 // No register beneficial use inside the loop before the pos. | 2343 // No register beneficial use inside the loop before the pos. |
| 2369 pos = loop_start; | 2344 pos = loop_start; |
| 2370 } | 2345 } |
| 2371 } | 2346 } |
| 2372 | 2347 |
| 2373 // Try hoisting out to an outer loop. | 2348 // Try hoisting out to an outer loop. |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2420 } | 2395 } |
| 2421 InactiveToHandled(range); | 2396 InactiveToHandled(range); |
| 2422 --i; | 2397 --i; |
| 2423 } | 2398 } |
| 2424 } | 2399 } |
| 2425 } | 2400 } |
| 2426 } | 2401 } |
| 2427 | 2402 |
| 2428 | 2403 |
| 2429 bool RegisterAllocator::IsBlockBoundary(LifetimePosition pos) { | 2404 bool RegisterAllocator::IsBlockBoundary(LifetimePosition pos) { |
| 2430 return pos.IsInstructionStart() && | 2405 return pos.IsFullStart() && |
| 2431 code()->GetInstructionBlock(pos.InstructionIndex())->code_start() == | 2406 code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() == |
| 2432 pos.InstructionIndex(); | 2407 pos.ToInstructionIndex(); |
| 2433 } | 2408 } |
| 2434 | 2409 |
| 2435 | 2410 |
| 2436 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, | 2411 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, |
| 2437 LifetimePosition pos) { | 2412 LifetimePosition pos) { |
| 2438 DCHECK(!range->IsFixed()); | 2413 DCHECK(!range->IsFixed()); |
| 2439 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value()); | 2414 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value()); |
| 2440 | 2415 |
| 2441 if (pos.Value() <= range->Start().Value()) return range; | 2416 if (pos.Value() <= range->Start().Value()) return range; |
| 2442 | 2417 |
| 2443 // We can't properly connect liveranges if splitting occurred at the end | 2418 // We can't properly connect liveranges if splitting occurred at the end |
| 2444 // a block. | 2419 // a block. |
| 2445 DCHECK(pos.IsInstructionStart() || | 2420 DCHECK(pos.IsStart() || pos.IsGapPosition() || |
| 2446 (code()->GetInstructionBlock(pos.InstructionIndex())) | 2421 (code()->GetInstructionBlock(pos.ToInstructionIndex())) |
| 2447 ->last_instruction_index() != pos.InstructionIndex()); | 2422 ->last_instruction_index() != pos.ToInstructionIndex()); |
| 2448 | 2423 |
| 2449 int vreg = GetVirtualRegister(); | 2424 int vreg = GetVirtualRegister(); |
| 2450 auto result = LiveRangeFor(vreg); | 2425 auto result = LiveRangeFor(vreg); |
| 2451 range->SplitAt(pos, result, local_zone()); | 2426 range->SplitAt(pos, result, local_zone()); |
| 2452 return result; | 2427 return result; |
| 2453 } | 2428 } |
| 2454 | 2429 |
| 2455 | 2430 |
| 2456 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range, | 2431 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range, |
| 2457 LifetimePosition start, | 2432 LifetimePosition start, |
| 2458 LifetimePosition end) { | 2433 LifetimePosition end) { |
| 2459 DCHECK(!range->IsFixed()); | 2434 DCHECK(!range->IsFixed()); |
| 2460 TRACE("Splitting live range %d in position between [%d, %d]\n", range->id(), | 2435 TRACE("Splitting live range %d in position between [%d, %d]\n", range->id(), |
| 2461 start.Value(), end.Value()); | 2436 start.Value(), end.Value()); |
| 2462 | 2437 |
| 2463 auto split_pos = FindOptimalSplitPos(start, end); | 2438 auto split_pos = FindOptimalSplitPos(start, end); |
| 2464 DCHECK(split_pos.Value() >= start.Value()); | 2439 DCHECK(split_pos.Value() >= start.Value()); |
| 2465 return SplitRangeAt(range, split_pos); | 2440 return SplitRangeAt(range, split_pos); |
| 2466 } | 2441 } |
| 2467 | 2442 |
| 2468 | 2443 |
| 2469 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, | 2444 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, |
| 2470 LifetimePosition end) { | 2445 LifetimePosition end) { |
| 2471 int start_instr = start.InstructionIndex(); | 2446 int start_instr = start.ToInstructionIndex(); |
| 2472 int end_instr = end.InstructionIndex(); | 2447 int end_instr = end.ToInstructionIndex(); |
| 2473 DCHECK(start_instr <= end_instr); | 2448 DCHECK(start_instr <= end_instr); |
| 2474 | 2449 |
| 2475 // We have no choice | 2450 // We have no choice |
| 2476 if (start_instr == end_instr) return end; | 2451 if (start_instr == end_instr) return end; |
| 2477 | 2452 |
| 2478 auto start_block = GetInstructionBlock(start); | 2453 auto start_block = GetInstructionBlock(start); |
| 2479 auto end_block = GetInstructionBlock(end); | 2454 auto end_block = GetInstructionBlock(end); |
| 2480 | 2455 |
| 2481 if (end_block == start_block) { | 2456 if (end_block == start_block) { |
| 2482 // The interval is split in the same basic block. Split at the latest | 2457 // The interval is split in the same basic block. Split at the latest |
| 2483 // possible position. | 2458 // possible position. |
| 2484 return end; | 2459 return end; |
| 2485 } | 2460 } |
| 2486 | 2461 |
| 2487 auto block = end_block; | 2462 auto block = end_block; |
| 2488 // Find header of outermost loop. | 2463 // Find header of outermost loop. |
| 2489 // TODO(titzer): fix redundancy below. | 2464 // TODO(titzer): fix redundancy below. |
| 2490 while (GetContainingLoop(code(), block) != nullptr && | 2465 while (GetContainingLoop(code(), block) != nullptr && |
| 2491 GetContainingLoop(code(), block)->rpo_number().ToInt() > | 2466 GetContainingLoop(code(), block)->rpo_number().ToInt() > |
| 2492 start_block->rpo_number().ToInt()) { | 2467 start_block->rpo_number().ToInt()) { |
| 2493 block = GetContainingLoop(code(), block); | 2468 block = GetContainingLoop(code(), block); |
| 2494 } | 2469 } |
| 2495 | 2470 |
| 2496 // We did not find any suitable outer loop. Split at the latest possible | 2471 // We did not find any suitable outer loop. Split at the latest possible |
| 2497 // position unless end_block is a loop header itself. | 2472 // position unless end_block is a loop header itself. |
| 2498 if (block == end_block && !end_block->IsLoopHeader()) return end; | 2473 if (block == end_block && !end_block->IsLoopHeader()) return end; |
| 2499 | 2474 |
| 2500 return LifetimePosition::FromInstructionIndex( | 2475 return LifetimePosition::GapFromInstructionIndex( |
| 2501 block->first_instruction_index()); | 2476 block->first_instruction_index()); |
| 2502 } | 2477 } |
| 2503 | 2478 |
| 2504 | 2479 |
| 2505 void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { | 2480 void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { |
| 2506 auto second_part = SplitRangeAt(range, pos); | 2481 auto second_part = SplitRangeAt(range, pos); |
| 2507 Spill(second_part); | 2482 Spill(second_part); |
| 2508 } | 2483 } |
| 2509 | 2484 |
| 2510 | 2485 |
| 2511 void RegisterAllocator::SpillBetween(LiveRange* range, LifetimePosition start, | 2486 void RegisterAllocator::SpillBetween(LiveRange* range, LifetimePosition start, |
| 2512 LifetimePosition end) { | 2487 LifetimePosition end) { |
| 2513 SpillBetweenUntil(range, start, start, end); | 2488 SpillBetweenUntil(range, start, start, end); |
| 2514 } | 2489 } |
| 2515 | 2490 |
| 2516 | 2491 |
| 2517 void RegisterAllocator::SpillBetweenUntil(LiveRange* range, | 2492 void RegisterAllocator::SpillBetweenUntil(LiveRange* range, |
| 2518 LifetimePosition start, | 2493 LifetimePosition start, |
| 2519 LifetimePosition until, | 2494 LifetimePosition until, |
| 2520 LifetimePosition end) { | 2495 LifetimePosition end) { |
| 2521 CHECK(start.Value() < end.Value()); | 2496 CHECK(start.Value() < end.Value()); |
| 2522 auto second_part = SplitRangeAt(range, start); | 2497 auto second_part = SplitRangeAt(range, start); |
| 2523 | 2498 |
| 2524 if (second_part->Start().Value() < end.Value()) { | 2499 if (second_part->Start().Value() < end.Value()) { |
| 2525 // The split result intersects with [start, end[. | 2500 // The split result intersects with [start, end[. |
| 2526 // Split it at position between ]start+1, end[, spill the middle part | 2501 // Split it at position between ]start+1, end[, spill the middle part |
| 2527 // and put the rest to unhandled. | 2502 // and put the rest to unhandled. |
| 2528 auto third_part_end = end.PrevInstruction().InstructionEnd(); | 2503 auto third_part_end = end.PrevStart().End(); |
| 2529 if (IsBlockBoundary(end.InstructionStart())) { | 2504 if (IsBlockBoundary(end.Start())) { |
| 2530 third_part_end = end.InstructionStart(); | 2505 third_part_end = end.Start(); |
| 2531 } | 2506 } |
| 2532 auto third_part = SplitBetween( | 2507 auto third_part = SplitBetween( |
| 2533 second_part, Max(second_part->Start().InstructionEnd(), until), | 2508 second_part, Max(second_part->Start().End(), until), third_part_end); |
| 2534 third_part_end); | |
| 2535 | 2509 |
| 2536 DCHECK(third_part != second_part); | 2510 DCHECK(third_part != second_part); |
| 2537 | 2511 |
| 2538 Spill(second_part); | 2512 Spill(second_part); |
| 2539 AddToUnhandledSorted(third_part); | 2513 AddToUnhandledSorted(third_part); |
| 2540 } else { | 2514 } else { |
| 2541 // The split result does not intersect with [start, end[. | 2515 // The split result does not intersect with [start, end[. |
| 2542 // Nothing to spill. Just put it to unhandled as whole. | 2516 // Nothing to spill. Just put it to unhandled as whole. |
| 2543 AddToUnhandledSorted(second_part); | 2517 AddToUnhandledSorted(second_part); |
| 2544 } | 2518 } |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2579 } else { | 2553 } else { |
| 2580 DCHECK(range->Kind() == GENERAL_REGISTERS); | 2554 DCHECK(range->Kind() == GENERAL_REGISTERS); |
| 2581 assigned_registers_->Add(reg); | 2555 assigned_registers_->Add(reg); |
| 2582 } | 2556 } |
| 2583 range->set_assigned_register(reg, operand_cache()); | 2557 range->set_assigned_register(reg, operand_cache()); |
| 2584 } | 2558 } |
| 2585 | 2559 |
| 2586 } // namespace compiler | 2560 } // namespace compiler |
| 2587 } // namespace internal | 2561 } // namespace internal |
| 2588 } // namespace v8 | 2562 } // namespace v8 |
| OLD | NEW |