| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/coalesced-live-ranges.h" | 5 #include "src/compiler/coalesced-live-ranges.h" |
| 6 #include "src/compiler/greedy-allocator.h" | 6 #include "src/compiler/greedy-allocator.h" |
| 7 #include "src/compiler/register-allocator.h" | 7 #include "src/compiler/register-allocator.h" |
| 8 | 8 |
| 9 namespace v8 { | 9 namespace v8 { |
| 10 namespace internal { | 10 namespace internal { |
| 11 namespace compiler { | 11 namespace compiler { |
| 12 | 12 |
| 13 #define TRACE(...) \ | |
| 14 do { \ | |
| 15 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ | |
| 16 } while (false) | |
| 17 | 13 |
| 18 | 14 LiveRangeConflictIterator::LiveRangeConflictIterator(const LiveRange* range, |
| 19 const float CoalescedLiveRanges::kAllocatedRangeMultiplier = 10.0; | 15 IntervalStore* storage) |
| 20 | 16 : query_(range->first_interval()), |
| 21 void CoalescedLiveRanges::AllocateRange(LiveRange* range) { | 17 pos_(storage->end()), |
| 22 UpdateWeightAtAllocation(range); | 18 intervals_(storage) { |
| 23 for (auto interval = range->first_interval(); interval != nullptr; | 19 MovePosAndQueryToFirstConflict(); |
| 24 interval = interval->next()) { | |
| 25 storage().insert({interval->start(), interval->end(), range}); | |
| 26 } | |
| 27 } | 20 } |
| 28 | 21 |
| 29 | 22 |
| 30 void CoalescedLiveRanges::Remove(LiveRange* range) { | 23 LiveRange* LiveRangeConflictIterator::Current() const { |
| 31 for (auto interval = range->first_interval(); interval != nullptr; | 24 if (IsFinished()) return nullptr; |
| 32 interval = interval->next()) { | 25 return pos_->range_; |
| 33 storage().erase({interval->start(), interval->end(), nullptr}); | |
| 34 } | |
| 35 range->UnsetAssignedRegister(); | |
| 36 } | 26 } |
| 37 | 27 |
| 38 | 28 |
| 39 float CoalescedLiveRanges::GetMaximumConflictingWeight( | 29 void LiveRangeConflictIterator::MovePosToFirstConflictForQuery() { |
| 40 const LiveRange* range) const { | 30 DCHECK(query_ != nullptr); |
| 41 float ret = LiveRange::kInvalidWeight; | 31 auto end = intervals_->end(); |
| 42 auto end = storage().end(); | 32 LifetimePosition q_start = query_->start(); |
| 43 for (auto query = range->first_interval(); query != nullptr; | 33 LifetimePosition q_end = query_->end(); |
| 44 query = query->next()) { | |
| 45 auto conflict = GetFirstConflict(query); | |
| 46 | 34 |
| 47 if (conflict == end) continue; | 35 if (intervals_->empty() || intervals_->rbegin()->end_ <= q_start || |
| 48 for (; QueryIntersectsAllocatedInterval(query, conflict); ++conflict) { | 36 intervals_->begin()->start_ >= q_end) { |
| 49 // It is possible we'll visit the same range multiple times, because | 37 pos_ = end; |
| 50 // successive (not necessarily consecutive) intervals belong to the same | 38 return; |
| 51 // range, or because different intervals of the query range have the same | |
| 52 // range as conflict. | |
| 53 DCHECK_NE(conflict->range->weight(), LiveRange::kInvalidWeight); | |
| 54 ret = Max(ret, conflict->range->weight()); | |
| 55 if (ret == LiveRange::kMaxWeight) break; | |
| 56 } | |
| 57 } | |
| 58 return ret; | |
| 59 } | |
| 60 | |
| 61 | |
| 62 void CoalescedLiveRanges::EvictAndRescheduleConflicts( | |
| 63 LiveRange* range, AllocationScheduler* scheduler) { | |
| 64 auto end = storage().end(); | |
| 65 | |
| 66 for (auto query = range->first_interval(); query != nullptr; | |
| 67 query = query->next()) { | |
| 68 auto conflict = GetFirstConflict(query); | |
| 69 if (conflict == end) continue; | |
| 70 while (QueryIntersectsAllocatedInterval(query, conflict)) { | |
| 71 LiveRange* range_to_evict = conflict->range; | |
| 72 // Bypass successive intervals belonging to the same range, because we're | |
| 73 // about to remove this range, and we don't want the storage iterator to | |
| 74 // become invalid. | |
| 75 while (conflict != end && conflict->range == range_to_evict) { | |
| 76 ++conflict; | |
| 77 } | |
| 78 | |
| 79 DCHECK(range_to_evict->HasRegisterAssigned()); | |
| 80 CHECK(!range_to_evict->IsFixed()); | |
| 81 Remove(range_to_evict); | |
| 82 UpdateWeightAtEviction(range_to_evict); | |
| 83 TRACE("Evicted range %d.\n", range_to_evict->id()); | |
| 84 scheduler->Schedule(range_to_evict); | |
| 85 } | |
| 86 } | |
| 87 } | |
| 88 | |
| 89 | |
| 90 bool CoalescedLiveRanges::VerifyAllocationsAreValid() const { | |
| 91 LifetimePosition last_end = LifetimePosition::GapFromInstructionIndex(0); | |
| 92 for (auto i : storage_) { | |
| 93 if (i.start < last_end) { | |
| 94 return false; | |
| 95 } | |
| 96 last_end = i.end; | |
| 97 } | |
| 98 return true; | |
| 99 } | |
| 100 | |
| 101 | |
| 102 void CoalescedLiveRanges::UpdateWeightAtAllocation(LiveRange* range) { | |
| 103 DCHECK_NE(range->weight(), LiveRange::kInvalidWeight); | |
| 104 range->set_weight(range->weight() * kAllocatedRangeMultiplier); | |
| 105 } | |
| 106 | |
| 107 | |
| 108 void CoalescedLiveRanges::UpdateWeightAtEviction(LiveRange* range) { | |
| 109 DCHECK_NE(range->weight(), LiveRange::kInvalidWeight); | |
| 110 range->set_weight(range->weight() / kAllocatedRangeMultiplier); | |
| 111 } | |
| 112 | |
| 113 | |
| 114 CoalescedLiveRanges::interval_iterator CoalescedLiveRanges::GetFirstConflict( | |
| 115 const UseInterval* query) const { | |
| 116 DCHECK(query != nullptr); | |
| 117 auto end = storage().end(); | |
| 118 LifetimePosition q_start = query->start(); | |
| 119 LifetimePosition q_end = query->end(); | |
| 120 | |
| 121 if (storage().empty() || storage().rbegin()->end <= q_start || | |
| 122 storage().begin()->start >= q_end) { | |
| 123 return end; | |
| 124 } | 39 } |
| 125 | 40 |
| 126 auto ret = storage().upper_bound(AsAllocatedInterval(q_start)); | 41 pos_ = intervals_->upper_bound(AsAllocatedInterval(q_start)); |
| 127 // ret is either at the end (no start strictly greater than q_start) or | 42 // pos is either at the end (no start strictly greater than q_start) or |
| 128 // at some position with the aforementioned property. In either case, the | 43 // at some position with the aforementioned property. In either case, the |
| 129 // allocated interval before this one may intersect our query: | 44 // allocated interval before this one may intersect our query: |
| 130 // either because, although it starts before this query's start, it ends | 45 // either because, although it starts before this query's start, it ends |
| 131 // after; or because it starts exactly at the query start. So unless we're | 46 // after; or because it starts exactly at the query start. So unless we're |
| 132 // right at the beginning of the storage - meaning the first allocated | 47 // right at the beginning of the storage - meaning the first allocated |
| 133 // interval is also starting after this query's start - see what's behind. | 48 // interval is also starting after this query's start - see what's behind. |
| 134 if (ret != storage().begin()) { | 49 if (pos_ != intervals_->begin()) { |
| 135 --ret; | 50 --pos_; |
| 136 if (!QueryIntersectsAllocatedInterval(query, ret)) { | 51 if (!QueryIntersectsAllocatedInterval()) { |
| 137 // The interval behind wasn't intersecting, so move back. | 52 // The interval behind wasn't intersecting, so move back. |
| 138 ++ret; | 53 ++pos_; |
| 139 } | 54 } |
| 140 } | 55 } |
| 141 if (ret != end && QueryIntersectsAllocatedInterval(query, ret)) return ret; | 56 if (pos_ == end || !QueryIntersectsAllocatedInterval()) { |
| 142 return end; | 57 pos_ = end; |
| 58 } |
| 143 } | 59 } |
| 144 | 60 |
| 145 | 61 |
| 62 void LiveRangeConflictIterator::MovePosAndQueryToFirstConflict() { |
| 63 auto end = intervals_->end(); |
| 64 for (; query_ != nullptr; query_ = query_->next()) { |
| 65 MovePosToFirstConflictForQuery(); |
| 66 if (pos_ != end) { |
| 67 DCHECK(QueryIntersectsAllocatedInterval()); |
| 68 return; |
| 69 } |
| 70 } |
| 71 |
| 72 Invalidate(); |
| 73 } |
| 74 |
| 75 |
| 76 void LiveRangeConflictIterator::IncrementPosAndSkipOverRepetitions() { |
| 77 auto end = intervals_->end(); |
| 78 DCHECK(pos_ != end); |
| 79 LiveRange* current_conflict = Current(); |
| 80 while (pos_ != end && pos_->range_ == current_conflict) { |
| 81 ++pos_; |
| 82 } |
| 83 } |
| 84 |
| 85 |
| 86 LiveRange* LiveRangeConflictIterator::InternalGetNext(bool clean_behind) { |
| 87 if (IsFinished()) return nullptr; |
| 88 |
| 89 LiveRange* to_clear = Current(); |
| 90 IncrementPosAndSkipOverRepetitions(); |
| 91 // At this point, pos_ is either at the end, or on an interval that doesn't |
| 92 // correspond to the same range as to_clear. This interval may not even be |
| 93 // a conflict. |
| 94 if (clean_behind) { |
| 95 // Since we parked pos_ on an iterator that won't be affected by removal, |
| 96 // we can safely delete to_clear's intervals. |
| 97 for (auto interval = to_clear->first_interval(); interval != nullptr; |
| 98 interval = interval->next()) { |
| 99 AllocatedInterval erase_key(interval->start(), interval->end(), nullptr); |
| 100 intervals_->erase(erase_key); |
| 101 } |
| 102 } |
| 103 // We may have parked pos_ at the end, or on a non-conflict. In that case, |
| 104 // move to the next query and reinitialize pos and query. This may invalidate |
| 105 // the iterator, if no more conflicts are available. |
| 106 if (!QueryIntersectsAllocatedInterval()) { |
| 107 query_ = query_->next(); |
| 108 MovePosAndQueryToFirstConflict(); |
| 109 } |
| 110 return Current(); |
| 111 } |
| 112 |
| 113 |
| 114 LiveRangeConflictIterator CoalescedLiveRanges::GetConflicts( |
| 115 const LiveRange* range) { |
| 116 return LiveRangeConflictIterator(range, &intervals()); |
| 117 } |
| 118 |
| 119 |
| 120 void CoalescedLiveRanges::AllocateRange(LiveRange* range) { |
| 121 for (auto interval = range->first_interval(); interval != nullptr; |
| 122 interval = interval->next()) { |
| 123 AllocatedInterval to_insert(interval->start(), interval->end(), range); |
| 124 intervals().insert(to_insert); |
| 125 } |
| 126 } |
| 127 |
| 128 |
| 129 bool CoalescedLiveRanges::VerifyAllocationsAreValidForTesting() const { |
| 130 LifetimePosition last_end = LifetimePosition::GapFromInstructionIndex(0); |
| 131 for (auto i : intervals_) { |
| 132 if (i.start_ < last_end) { |
| 133 return false; |
| 134 } |
| 135 last_end = i.end_; |
| 136 } |
| 137 return true; |
| 138 } |
| 139 |
| 140 |
| 146 } // namespace compiler | 141 } // namespace compiler |
| 147 } // namespace internal | 142 } // namespace internal |
| 148 } // namespace v8 | 143 } // namespace v8 |
| OLD | NEW |