OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_GREEDY_ALLOCATOR_H_ | 5 #ifndef V8_GREEDY_ALLOCATOR_H_ |
6 #define V8_GREEDY_ALLOCATOR_H_ | 6 #define V8_GREEDY_ALLOCATOR_H_ |
7 | 7 |
8 #include "src/compiler/coalesced-live-ranges.h" | 8 #include "src/compiler/coalesced-live-ranges.h" |
9 #include "src/compiler/register-allocator.h" | 9 #include "src/compiler/register-allocator.h" |
10 #include "src/zone-containers.h" | 10 #include "src/zone-containers.h" |
11 | 11 |
12 namespace v8 { | 12 namespace v8 { |
13 namespace internal { | 13 namespace internal { |
14 namespace compiler { | 14 namespace compiler { |
15 | 15 |
16 | 16 |
17 // The object of allocation scheduling. At minimum, this is a LiveRange, but | 17 // The object of allocation scheduling. At minimum, this is a LiveRange, but |
18 // we may extend this to groups of LiveRanges. It has to be comparable. | 18 // we may extend this to groups of LiveRanges. It has to be comparable. |
19 class AllocationCandidate { | 19 class AllocationCandidate { |
20 public: | 20 public: |
21 explicit AllocationCandidate(LiveRange* range) : range_(range) {} | 21 explicit AllocationCandidate(LiveRange* range) |
| 22 : is_group_(false), size_(range->GetSize()) { |
| 23 candidate_.range_ = range; |
| 24 } |
| 25 |
| 26 explicit AllocationCandidate(LiveRangeGroup* ranges) |
| 27 : is_group_(true), size_(CalculateGroupSize(ranges)) { |
| 28 candidate_.group_ = ranges; |
| 29 } |
22 | 30 |
23 // Strict ordering operators | 31 // Strict ordering operators |
24 bool operator<(const AllocationCandidate& other) const { | 32 bool operator<(const AllocationCandidate& other) const { |
25 return range_->GetSize() < other.range_->GetSize(); | 33 return size() < other.size(); |
26 } | 34 } |
27 | 35 |
28 bool operator>(const AllocationCandidate& other) const { | 36 bool operator>(const AllocationCandidate& other) const { |
29 return range_->GetSize() > other.range_->GetSize(); | 37 return size() > other.size(); |
30 } | 38 } |
31 | 39 |
32 LiveRange* live_range() const { return range_; } | 40 bool is_group() const { return is_group_; } |
| 41 LiveRange* live_range() const { return candidate_.range_; } |
| 42 LiveRangeGroup* group() const { return candidate_.group_; } |
33 | 43 |
34 private: | 44 private: |
35 LiveRange* range_; | 45 unsigned CalculateGroupSize(LiveRangeGroup* group) { |
| 46 unsigned ret = 0; |
| 47 for (LiveRange* range : group->ranges()) { |
| 48 ret += range->GetSize(); |
| 49 } |
| 50 return ret; |
| 51 } |
| 52 |
| 53 unsigned size() const { return size_; } |
| 54 bool is_group_; |
| 55 unsigned size_; |
| 56 union { |
| 57 LiveRange* range_; |
| 58 LiveRangeGroup* group_; |
| 59 } candidate_; |
36 }; | 60 }; |
37 | 61 |
38 | 62 |
39 // Schedule processing (allocating) of AllocationCandidates. | 63 // Schedule processing (allocating) of AllocationCandidates. |
40 class AllocationScheduler final : ZoneObject { | 64 class AllocationScheduler final : ZoneObject { |
41 public: | 65 public: |
42 explicit AllocationScheduler(Zone* zone) : queue_(zone) {} | 66 explicit AllocationScheduler(Zone* zone) : queue_(zone) {} |
43 void Schedule(LiveRange* range); | 67 void Schedule(LiveRange* range); |
| 68 void Schedule(LiveRangeGroup* group); |
44 AllocationCandidate GetNext(); | 69 AllocationCandidate GetNext(); |
45 bool empty() const { return queue_.empty(); } | 70 bool empty() const { return queue_.empty(); } |
46 | 71 |
47 private: | 72 private: |
48 typedef ZonePriorityQueue<AllocationCandidate> ScheduleQueue; | 73 typedef ZonePriorityQueue<AllocationCandidate> ScheduleQueue; |
49 ScheduleQueue queue_; | 74 ScheduleQueue queue_; |
50 | 75 |
51 DISALLOW_COPY_AND_ASSIGN(AllocationScheduler); | 76 DISALLOW_COPY_AND_ASSIGN(AllocationScheduler); |
52 }; | 77 }; |
53 | 78 |
(...skipping 24 matching lines...) Expand all Loading... |
78 AllocationScheduler& scheduler() { return scheduler_; } | 103 AllocationScheduler& scheduler() { return scheduler_; } |
79 CoalescedLiveRanges* current_allocations(unsigned i) { | 104 CoalescedLiveRanges* current_allocations(unsigned i) { |
80 return allocations_[i]; | 105 return allocations_[i]; |
81 } | 106 } |
82 | 107 |
83 CoalescedLiveRanges* current_allocations(unsigned i) const { | 108 CoalescedLiveRanges* current_allocations(unsigned i) const { |
84 return allocations_[i]; | 109 return allocations_[i]; |
85 } | 110 } |
86 | 111 |
87 Zone* local_zone() const { return local_zone_; } | 112 Zone* local_zone() const { return local_zone_; } |
| 113 ZoneVector<LiveRangeGroup*>& groups() { return groups_; } |
| 114 const ZoneVector<LiveRangeGroup*>& groups() const { return groups_; } |
88 | 115 |
89 // Insert fixed ranges. | 116 // Insert fixed ranges. |
90 void PreallocateFixedRanges(); | 117 void PreallocateFixedRanges(); |
91 | 118 |
| 119 void GroupLiveRanges(); |
| 120 |
92 // Schedule unassigned live ranges for allocation. | 121 // Schedule unassigned live ranges for allocation. |
93 // TODO(mtrofin): groups. | |
94 void ScheduleAllocationCandidates(); | 122 void ScheduleAllocationCandidates(); |
95 | 123 |
96 void AllocateRegisterToRange(unsigned reg_id, LiveRange* range) { | 124 void AllocateRegisterToRange(unsigned reg_id, LiveRange* range) { |
97 UpdateWeightAtAllocation(range); | 125 UpdateWeightAtAllocation(range); |
98 current_allocations(reg_id)->AllocateRange(range); | 126 current_allocations(reg_id)->AllocateRange(range); |
99 } | 127 } |
100 // Evict and reschedule conflicts of a given range, at a given register. | 128 // Evict and reschedule conflicts of a given range, at a given register. |
101 void EvictAndRescheduleConflicts(unsigned reg_id, const LiveRange* range); | 129 void EvictAndRescheduleConflicts(unsigned reg_id, const LiveRange* range); |
102 | 130 |
103 // Find the optimal split for ranges defined by a memory operand, e.g. | 131 // Find the optimal split for ranges defined by a memory operand, e.g. |
104 // constants or function parameters passed on the stack. | 132 // constants or function parameters passed on the stack. |
105 void SplitAndSpillRangesDefinedByMemoryOperand(); | 133 void SplitAndSpillRangesDefinedByMemoryOperand(); |
106 | 134 |
107 void TryAllocateCandidate(const AllocationCandidate& candidate); | 135 void TryAllocateCandidate(const AllocationCandidate& candidate); |
108 void TryAllocateLiveRange(LiveRange* range); | 136 void TryAllocateLiveRange(LiveRange* range); |
| 137 void TryAllocateGroup(LiveRangeGroup* group); |
109 | 138 |
110 bool CanProcessRange(LiveRange* range) const { | 139 bool CanProcessRange(LiveRange* range) const { |
111 return range != nullptr && !range->IsEmpty() && range->kind() == mode(); | 140 return range != nullptr && !range->IsEmpty() && range->kind() == mode(); |
112 } | 141 } |
113 | 142 |
114 // Calculate the weight of a candidate for allocation. | 143 // Calculate the weight of a candidate for allocation. |
115 void EnsureValidRangeWeight(LiveRange* range); | 144 void EnsureValidRangeWeight(LiveRange* range); |
116 | 145 |
117 // Calculate the new weight of a range that is about to be allocated. | 146 // Calculate the new weight of a range that is about to be allocated. |
118 float GetAllocatedRangeWeight(float candidate_weight); | 147 float GetAllocatedRangeWeight(float candidate_weight); |
119 | 148 |
120 // Returns kInvalidWeight if there are no conflicts, or the largest weight of | 149 // Returns kInvalidWeight if there are no conflicts, or the largest weight of |
121 // a range conflicting with the given range, at the given register. | 150 // a range conflicting with the given range, at the given register. |
122 float GetMaximumConflictingWeight(unsigned reg_id, | 151 float GetMaximumConflictingWeight(unsigned reg_id, |
123 const LiveRange* range) const; | 152 const LiveRange* range) const; |
124 | 153 |
| 154 // Returns kInvalidWeight if there are no conflicts, or the largest weight of |
| 155 // a range conflicting with the given range, at the given register. |
| 156 float GetMaximumConflictingWeight(unsigned reg_id, |
| 157 const LiveRangeGroup* group, |
| 158 float group_weight) const; |
| 159 |
125 // This is the extension point for splitting heuristics. | 160 // This is the extension point for splitting heuristics. |
126 void SplitOrSpillBlockedRange(LiveRange* range); | 161 void SplitOrSpillBlockedRange(LiveRange* range); |
127 | 162 |
128 // Find a good position where to fill, after a range was spilled after a call. | 163 // Find a good position where to fill, after a range was spilled after a call. |
129 LifetimePosition FindSplitPositionAfterCall(const LiveRange* range, | 164 LifetimePosition FindSplitPositionAfterCall(const LiveRange* range, |
130 int call_index); | 165 int call_index); |
131 // Split a range around all calls it passes over. Returns true if any changes | 166 // Split a range around all calls it passes over. Returns true if any changes |
132 // were made, or false if no calls were found. | 167 // were made, or false if no calls were found. |
133 bool TrySplitAroundCalls(LiveRange* range); | 168 bool TrySplitAroundCalls(LiveRange* range); |
134 | 169 |
(...skipping 10 matching lines...) Expand all Loading... |
145 LiveRange* GetRemainderAfterSplittingAroundFirstCall(LiveRange* range); | 180 LiveRange* GetRemainderAfterSplittingAroundFirstCall(LiveRange* range); |
146 | 181 |
147 // Necessary heuristic: spill when all else failed. | 182 // Necessary heuristic: spill when all else failed. |
148 void SpillRangeAsLastResort(LiveRange* range); | 183 void SpillRangeAsLastResort(LiveRange* range); |
149 | 184 |
150 void AssignRangeToRegister(int reg_id, LiveRange* range); | 185 void AssignRangeToRegister(int reg_id, LiveRange* range); |
151 | 186 |
152 Zone* local_zone_; | 187 Zone* local_zone_; |
153 ZoneVector<CoalescedLiveRanges*> allocations_; | 188 ZoneVector<CoalescedLiveRanges*> allocations_; |
154 AllocationScheduler scheduler_; | 189 AllocationScheduler scheduler_; |
| 190 ZoneVector<LiveRangeGroup*> groups_; |
| 191 |
155 DISALLOW_COPY_AND_ASSIGN(GreedyAllocator); | 192 DISALLOW_COPY_AND_ASSIGN(GreedyAllocator); |
156 }; | 193 }; |
157 } // namespace compiler | 194 } // namespace compiler |
158 } // namespace internal | 195 } // namespace internal |
159 } // namespace v8 | 196 } // namespace v8 |
160 #endif // V8_GREEDY_ALLOCATOR_H_ | 197 #endif // V8_GREEDY_ALLOCATOR_H_ |
OLD | NEW |