OLD | NEW |
---|---|
(Empty) | |
1 // Copyright 2015 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #ifndef V8_COALESCED_LIVE_RANGES_H_ | |
6 #define V8_COALESCED_LIVE_RANGES_H_ | |
7 | |
8 #include "src/compiler/register-allocator.h" | |
9 #include "src/zone-containers.h" | |
10 | |
11 namespace v8 { | |
12 namespace internal { | |
13 namespace compiler { | |
14 | |
15 // A LiveRange with a calculated weight. | |
16 // TODO(mtrofin): we may be calculating the weight multiple times. Consider | |
Benedikt Meurer
2015/06/25 04:41:06
I'd prefer to have it on the LiveRange instead. Th
Mircea Trofin
2015/06/25 23:06:22
Done.
| |
17 // moving to LiveRange as a member. | |
18 struct AllocatedRange { | |
titzer
2015/06/24 23:00:07
WeightedLiveRange? Should be a class if it has pri
Mircea Trofin
2015/06/25 23:06:22
Removed the type, by virtue of moving weight to Li
| |
19 AllocatedRange(LiveRange* range, float weight) | |
20 : range_(range), weight_(weight) {} | |
21 | |
22 LiveRange* live_range() const { return range_; } | |
23 float weight() const { return weight_; } | |
24 static AllocatedRange Invalid() { return AllocatedRange(); } | |
25 | |
26 private: | |
27 friend class CoalescedLiveRanges; | |
28 AllocatedRange() : range_(nullptr), weight_(0.0) {} | |
29 LiveRange* range_; | |
30 float weight_; | |
31 }; | |
32 | |
33 | |
34 class AllocationScheduler; | |
35 | |
36 | |
37 // Collection of live ranges allocated to the same register. | |
38 // It supports efficiently finding all conflicts for a given, non-allocated | |
39 // range. See AllocatedInterval. | |
40 // Allocated live ranges do not intersect. At most, individual use intervals | |
41 // touch. We store, for a live range, an AllocatedInterval corresponding to each | |
42 // of that range's UseIntervals. We keep the list of AllocatedIntervals sorted | |
43 // by starts. Then, given the non-intersecting property, we know that | |
44 // consecutive AllocatedIntervals have the property that the "smaller"'s end is | |
45 // less or equal to the "larger"'s start. | |
46 // This allows for quick (logarithmic complexity) identification of the first | |
47 // AllocatedInterval to conflict with a given LiveRange, and then for efficient | |
48 // traversal of conflicts. | |
49 class CoalescedLiveRanges : public ZoneObject { | |
50 public: | |
51 explicit CoalescedLiveRanges(Zone* zone) : storage_(zone) {} | |
52 void clear() { storage_.clear(); } | |
53 | |
54 bool empty() const { return storage_.empty(); } | |
55 | |
56 // Returns kInvalidWeight if there are no conflicts, or the largest weight of | |
57 // a range conflicting with the given range. | |
58 float GetMaximumConflictingWeight(const LiveRange* range) const; | |
59 | |
60 // Evicts all conflicts of the given range, and reschedules them with the | |
61 // provided scheduler. | |
62 void EvictAndRescheduleConflicts(LiveRange* range, | |
63 AllocationScheduler* scheduler); | |
64 | |
65 // Allocates a range with a pre-calculated candidate weight. | |
66 void AllocateRange(LiveRange* range, float weight); | |
67 | |
68 // Allocate using maximum weight - fixed ranges cannot be re-allocated, | |
69 // and thus must always win the weight comparisons. | |
70 void AllocateFixedRange(LiveRange* range); | |
71 | |
72 // TODO(mtrofin): remove this in favor of comprehensive unit tests. | |
73 bool VerifyAllocationsAreValid() const; | |
74 | |
75 static const float kMaxWeight; | |
76 static const float kAllocatedRangeMultiplier; | |
77 static const float kInvalidWeight; | |
78 | |
79 private: | |
80 // Storage detail for CoalescedLiveRanges. | |
81 struct AllocatedInterval { | |
82 LifetimePosition start; | |
83 LifetimePosition end; | |
84 AllocatedRange range; | |
85 bool operator<(const AllocatedInterval& other) const { | |
86 return start < other.start; | |
87 } | |
88 }; | |
89 typedef ZoneSet<AllocatedInterval> IntervalStore; | |
90 typedef IntervalStore::const_iterator interval_iterator; | |
91 | |
92 IntervalStore& storage() { return storage_; } | |
93 const IntervalStore& storage() const { return storage_; } | |
94 | |
95 // Augment the weight of a range that is about to be allocated. | |
96 static float GetAllocatedRangeWeight(float weight); | |
97 | |
98 // Intersection utilities. | |
99 static bool Intersects(LifetimePosition a_start, LifetimePosition a_end, | |
100 LifetimePosition b_start, LifetimePosition b_end) { | |
101 return a_start < b_end && b_start < a_end; | |
102 } | |
103 static AllocatedInterval AsAllocatedInterval(LifetimePosition pos) { | |
104 return {pos, LifetimePosition::Invalid(), AllocatedRange::Invalid()}; | |
105 } | |
106 | |
107 bool QueryIntersectsAllocatedInterval(const UseInterval* query, | |
108 interval_iterator& pos) const { | |
109 DCHECK(query != nullptr); | |
110 return pos != storage().end() && | |
111 Intersects(query->start(), query->end(), pos->start, pos->end); | |
112 } | |
113 | |
114 void Remove(LiveRange* range); | |
115 | |
116 // Get the first interval intersecting query. Since the intervals are sorted, | |
117 // subsequent intervals intersecting query follow. | |
118 interval_iterator GetFirstConflict(const UseInterval* query) const; | |
119 | |
120 IntervalStore storage_; | |
121 DISALLOW_COPY_AND_ASSIGN(CoalescedLiveRanges); | |
122 }; | |
123 | |
124 | |
125 } // namespace compiler | |
126 } // namespace internal | |
127 } // namespace v8 | |
128 #endif // V8_COALESCED_LIVE_RANGES_H_ | |
OLD | NEW |