| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/live-range-separator.h" | 5 #include "src/compiler/live-range-separator.h" |
| 6 #include "src/compiler/register-allocator.h" | 6 #include "src/compiler/register-allocator.h" |
| 7 | 7 |
| 8 namespace v8 { | 8 namespace v8 { |
| 9 namespace internal { | 9 namespace internal { |
| 10 namespace compiler { | 10 namespace compiler { |
| 11 | 11 |
| 12 | 12 |
| 13 #define TRACE(...) \ | 13 #define TRACE(...) \ |
| 14 do { \ | 14 do { \ |
| 15 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ | 15 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ |
| 16 } while (false) | 16 } while (false) |
| 17 | 17 |
| 18 | 18 |
| 19 namespace { | 19 namespace { |
| 20 | 20 |
| 21 // Starting from a deferred block, find the last consecutive deferred block. | |
| 22 RpoNumber GetLastDeferredBlock(const InstructionBlock *block, | |
| 23 const InstructionSequence *code) { | |
| 24 DCHECK(block->IsDeferred()); | |
| 25 RpoNumber first = block->rpo_number(); | |
| 26 | |
| 27 RpoNumber last = first; | |
| 28 for (int i = first.ToInt(); i < code->InstructionBlockCount(); ++i) { | |
| 29 RpoNumber at_i = RpoNumber::FromInt(i); | |
| 30 const InstructionBlock *block_at_i = code->InstructionBlockAt(at_i); | |
| 31 if (!block_at_i->IsDeferred()) break; | |
| 32 last = at_i; | |
| 33 } | |
| 34 | |
| 35 return last; | |
| 36 } | |
| 37 | |
| 38 | |
| 39 // Delimits consecutive deferred block sequences. | |
| 40 void AssociateDeferredBlockSequences(InstructionSequence *code) { | |
| 41 for (int blk_id = 0; blk_id < code->InstructionBlockCount(); ++blk_id) { | |
| 42 InstructionBlock *block = | |
| 43 code->InstructionBlockAt(RpoNumber::FromInt(blk_id)); | |
| 44 if (!block->IsDeferred()) continue; | |
| 45 RpoNumber last = GetLastDeferredBlock(block, code); | |
| 46 block->set_last_deferred(last); | |
| 47 // We know last is still deferred, and that last + 1, is not (or is an | |
| 48 // invalid index). So skip over last + 1 and continue from last + 2. This | |
| 49 // way, we visit each block exactly once, and the total complexity of this | |
| 50 // function is O(n), n being jthe number of blocks. | |
| 51 blk_id = last.ToInt() + 1; | |
| 52 } | |
| 53 } | |
| 54 | |
| 55 | 21 |
| 56 void CreateSplinter(TopLevelLiveRange *range, RegisterAllocationData *data, | 22 void CreateSplinter(TopLevelLiveRange *range, RegisterAllocationData *data, |
| 57 LifetimePosition first_cut, LifetimePosition last_cut) { | 23 LifetimePosition first_cut, LifetimePosition last_cut) { |
| 58 DCHECK(!range->IsSplinter()); | 24 DCHECK(!range->IsSplinter()); |
| 59 // We can ignore ranges that live solely in deferred blocks. | 25 // We can ignore ranges that live solely in deferred blocks. |
| 60 // If a range ends right at the end of a deferred block, it is marked by | 26 // If a range ends right at the end of a deferred block, it is marked by |
| 61 // the range builder as ending at gap start of the next block - since the | 27 // the range builder as ending at gap start of the next block - since the |
| 62 // end is a position where the variable isn't live. We need to take that | 28 // end is a position where the variable isn't live. We need to take that |
| 63 // into consideration. | 29 // into consideration. |
| 64 LifetimePosition max_allowed_end = last_cut.NextFullStart(); | 30 LifetimePosition max_allowed_end = last_cut.NextFullStart(); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 81 TopLevelLiveRange *result = data->NextLiveRange(range->machine_type()); | 47 TopLevelLiveRange *result = data->NextLiveRange(range->machine_type()); |
| 82 DCHECK_NULL(data->live_ranges()[result->vreg()]); | 48 DCHECK_NULL(data->live_ranges()[result->vreg()]); |
| 83 data->live_ranges()[result->vreg()] = result; | 49 data->live_ranges()[result->vreg()] = result; |
| 84 | 50 |
| 85 Zone *zone = data->allocation_zone(); | 51 Zone *zone = data->allocation_zone(); |
| 86 range->Splinter(start, end, result, zone); | 52 range->Splinter(start, end, result, zone); |
| 87 } | 53 } |
| 88 } | 54 } |
| 89 | 55 |
| 90 | 56 |
| 91 // Splinter all ranges live inside successive deferred blocks. | 57 int FirstInstruction(const UseInterval *interval) { |
| 92 // No control flow analysis is performed. After the register allocation, we will | 58 LifetimePosition start = interval->start(); |
| 93 // merge the splinters back into the original ranges, and then rely on the | 59 int ret = start.ToInstructionIndex(); |
| 94 // range connector to properly connect them. | 60 if (start.IsInstructionPosition() && start.IsEnd()) { |
| 95 void SplinterRangesInDeferredBlocks(RegisterAllocationData *data) { | 61 ++ret; |
| 96 InstructionSequence *code = data->code(); | 62 } |
| 97 int code_block_count = code->InstructionBlockCount(); | 63 return ret; |
| 98 Zone *zone = data->allocation_zone(); | 64 } |
| 99 ZoneVector<BitVector *> &in_sets = data->live_in_sets(); | |
| 100 | 65 |
| 101 for (int i = 0; i < code_block_count; ++i) { | |
| 102 InstructionBlock *block = code->InstructionBlockAt(RpoNumber::FromInt(i)); | |
| 103 if (!block->IsDeferred()) continue; | |
| 104 | 66 |
| 105 RpoNumber last_deferred = block->last_deferred(); | 67 int LastInstruction(const UseInterval *interval) { |
| 106 // last_deferred + 1 is not deferred, so no point in visiting it. | 68 LifetimePosition end = interval->end(); |
| 107 i = last_deferred.ToInt() + 1; | 69 int ret = end.ToInstructionIndex(); |
| 70 if (end.IsGapPosition() || end.IsStart()) { |
| 71 --ret; |
| 72 } |
| 73 return ret; |
| 74 } |
| 108 | 75 |
| 109 LifetimePosition first_cut = LifetimePosition::GapFromInstructionIndex( | |
| 110 block->first_instruction_index()); | |
| 111 | 76 |
| 112 LifetimePosition last_cut = LifetimePosition::GapFromInstructionIndex( | 77 void SplinterLiveRange(TopLevelLiveRange *range, RegisterAllocationData *data) { |
| 113 static_cast<int>(code->instructions().size())); | 78 const InstructionSequence *code = data->code(); |
| 79 UseInterval *interval = range->first_interval(); |
| 114 | 80 |
| 115 const BitVector *in_set = in_sets[block->rpo_number().ToInt()]; | 81 LifetimePosition first_cut = LifetimePosition::Invalid(); |
| 116 BitVector ranges_to_splinter(*in_set, zone); | 82 LifetimePosition last_cut = LifetimePosition::Invalid(); |
| 117 InstructionBlock *last = code->InstructionBlockAt(last_deferred); | 83 |
| 118 for (int deferred_id = block->rpo_number().ToInt(); | 84 while (interval != nullptr) { |
| 119 deferred_id <= last->rpo_number().ToInt(); ++deferred_id) { | 85 UseInterval *next_interval = interval->next(); |
| 120 const BitVector *ins = in_sets[deferred_id]; | 86 const InstructionBlock *first_block = |
| 121 ranges_to_splinter.Union(*ins); | 87 code->GetInstructionBlock(FirstInstruction(interval)); |
| 122 const BitVector *outs = LiveRangeBuilder::ComputeLiveOut( | 88 const InstructionBlock *last_block = |
| 123 code->InstructionBlockAt(RpoNumber::FromInt(deferred_id)), data); | 89 code->GetInstructionBlock(LastInstruction(interval)); |
| 124 ranges_to_splinter.Union(*outs); | 90 int first_block_nr = first_block->rpo_number().ToInt(); |
| 91 int last_block_nr = last_block->rpo_number().ToInt(); |
| 92 for (int block_id = first_block_nr; block_id <= last_block_nr; ++block_id) { |
| 93 const InstructionBlock *current_block = |
| 94 code->InstructionBlockAt(RpoNumber::FromInt(block_id)); |
| 95 if (current_block->IsDeferred()) { |
| 96 if (!first_cut.IsValid()) { |
| 97 first_cut = LifetimePosition::GapFromInstructionIndex( |
| 98 current_block->first_instruction_index()); |
| 99 } |
| 100 last_cut = LifetimePosition::GapFromInstructionIndex( |
| 101 current_block->last_instruction_index()); |
| 102 } else { |
| 103 if (first_cut.IsValid()) { |
| 104 CreateSplinter(range, data, first_cut, last_cut); |
| 105 first_cut = LifetimePosition::Invalid(); |
| 106 last_cut = LifetimePosition::Invalid(); |
| 107 } |
| 108 } |
| 125 } | 109 } |
| 126 | 110 interval = next_interval; |
| 127 int last_index = last->last_instruction_index(); | 111 } |
| 128 if (code->InstructionAt(last_index)->opcode() == | 112 // When the range ends in deferred blocks, first_cut will be valid here. |
| 129 ArchOpcode::kArchDeoptimize) { | 113 if (first_cut.IsValid()) { |
| 130 ++last_index; | 114 CreateSplinter(range, data, first_cut, range->End()); |
| 131 } | |
| 132 last_cut = LifetimePosition::GapFromInstructionIndex(last_index); | |
| 133 | |
| 134 BitVector::Iterator iterator(&ranges_to_splinter); | |
| 135 | |
| 136 while (!iterator.Done()) { | |
| 137 int range_id = iterator.Current(); | |
| 138 iterator.Advance(); | |
| 139 | |
| 140 TopLevelLiveRange *range = data->live_ranges()[range_id]; | |
| 141 CreateSplinter(range, data, first_cut, last_cut); | |
| 142 } | |
| 143 } | 115 } |
| 144 } | 116 } |
| 145 } // namespace | 117 } // namespace |
| 146 | 118 |
| 147 | 119 |
| 148 void LiveRangeSeparator::Splinter() { | 120 void LiveRangeSeparator::Splinter() { |
| 149 AssociateDeferredBlockSequences(data()->code()); | 121 size_t virt_reg_count = data()->live_ranges().size(); |
| 150 SplinterRangesInDeferredBlocks(data()); | 122 for (size_t vreg = 0; vreg < virt_reg_count; ++vreg) { |
| 123 TopLevelLiveRange *range = data()->live_ranges()[vreg]; |
| 124 if (range == nullptr || range->IsEmpty() || range->IsSplinter()) { |
| 125 continue; |
| 126 } |
| 127 SplinterLiveRange(range, data()); |
| 128 } |
| 151 } | 129 } |
| 152 | 130 |
| 153 | 131 |
| 154 void LiveRangeMerger::Merge() { | 132 void LiveRangeMerger::Merge() { |
| 155 int live_range_count = static_cast<int>(data()->live_ranges().size()); | 133 int live_range_count = static_cast<int>(data()->live_ranges().size()); |
| 156 for (int i = 0; i < live_range_count; ++i) { | 134 for (int i = 0; i < live_range_count; ++i) { |
| 157 TopLevelLiveRange *range = data()->live_ranges()[i]; | 135 TopLevelLiveRange *range = data()->live_ranges()[i]; |
| 158 if (range == nullptr || range->IsEmpty() || !range->IsSplinter()) { | 136 if (range == nullptr || range->IsEmpty() || !range->IsSplinter()) { |
| 159 continue; | 137 continue; |
| 160 } | 138 } |
| 161 TopLevelLiveRange *splinter_parent = range->splintered_from(); | 139 TopLevelLiveRange *splinter_parent = range->splintered_from(); |
| 162 | 140 |
| 163 int to_remove = range->vreg(); | 141 int to_remove = range->vreg(); |
| 164 splinter_parent->Merge(range, data()->allocation_zone()); | 142 splinter_parent->Merge(range, data()->allocation_zone()); |
| 165 data()->live_ranges()[to_remove] = nullptr; | 143 data()->live_ranges()[to_remove] = nullptr; |
| 166 } | 144 } |
| 167 } | 145 } |
| 168 | 146 |
| 169 | 147 |
| 170 } // namespace compiler | 148 } // namespace compiler |
| 171 } // namespace internal | 149 } // namespace internal |
| 172 } // namespace v8 | 150 } // namespace v8 |
| OLD | NEW |