OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/greedy-allocator.h" | 5 #include "src/compiler/greedy-allocator.h" |
6 #include "src/compiler/register-allocator.h" | 6 #include "src/compiler/register-allocator.h" |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 namespace compiler { | 10 namespace compiler { |
11 | 11 |
12 | 12 |
13 #define TRACE(...) \ | 13 #define TRACE(...) \ |
14 do { \ | 14 do { \ |
15 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ | 15 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ |
16 } while (false) | 16 } while (false) |
17 | 17 |
18 | 18 |
19 const float GreedyAllocator::kAllocatedRangeMultiplier = 10.0; | 19 const float GreedyAllocator::kAllocatedRangeMultiplier = 10.0; |
20 | 20 |
21 | 21 |
22 namespace { | 22 namespace { |
23 | 23 |
24 | 24 |
25 void UpdateOperands(LiveRange* range, RegisterAllocationData* data) { | 25 void UpdateOperands(TopLevelLiveRange* range, RegisterAllocationData* data) { |
26 int reg_id = range->assigned_register(); | 26 int reg_id = range->assigned_register(); |
27 range->SetUseHints(reg_id); | 27 range->SetUseHints(reg_id); |
28 if (range->is_phi()) { | 28 if (range->is_phi()) { |
29 data->GetPhiMapValueFor(range->id())->set_assigned_register(reg_id); | 29 data->GetPhiMapValueFor(range)->set_assigned_register(reg_id); |
30 } | 30 } |
31 } | 31 } |
32 | 32 |
33 | 33 |
34 LiveRange* Split(LiveRange* range, RegisterAllocationData* data, | 34 LiveRange* Split(LiveRange* range, RegisterAllocationData* data, |
35 LifetimePosition pos) { | 35 LifetimePosition pos) { |
36 DCHECK(range->Start() < pos && pos < range->End()); | 36 DCHECK(range->Start() < pos && pos < range->End()); |
37 DCHECK(pos.IsStart() || pos.IsGapPosition() || | 37 DCHECK(pos.IsStart() || pos.IsGapPosition() || |
38 (data->code() | 38 (data->code() |
39 ->GetInstructionBlock(pos.ToInstructionIndex()) | 39 ->GetInstructionBlock(pos.ToInstructionIndex()) |
40 ->last_instruction_index() != pos.ToInstructionIndex())); | 40 ->last_instruction_index() != pos.ToInstructionIndex())); |
41 LiveRange* result = data->NewChildRangeFor(range); | 41 LiveRange* result = range->SplitAt(pos, data->allocation_zone()); |
42 range->SplitAt(pos, result, data->allocation_zone()); | |
43 return result; | 42 return result; |
44 } | 43 } |
45 | 44 |
46 | 45 |
47 // TODO(mtrofin): explain why splitting in gap START is always OK. | 46 // TODO(mtrofin): explain why splitting in gap START is always OK. |
48 LifetimePosition GetSplitPositionForInstruction(const LiveRange* range, | 47 LifetimePosition GetSplitPositionForInstruction(const LiveRange* range, |
49 const InstructionSequence* code, | 48 const InstructionSequence* code, |
50 int instruction_index) { | 49 int instruction_index) { |
51 LifetimePosition ret = LifetimePosition::Invalid(); | 50 LifetimePosition ret = LifetimePosition::Invalid(); |
52 | 51 |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
110 | 109 |
111 AllocationCandidate AllocationScheduler::GetNext() { | 110 AllocationCandidate AllocationScheduler::GetNext() { |
112 DCHECK(!queue_.empty()); | 111 DCHECK(!queue_.empty()); |
113 AllocationCandidate ret = queue_.top(); | 112 AllocationCandidate ret = queue_.top(); |
114 queue_.pop(); | 113 queue_.pop(); |
115 return ret; | 114 return ret; |
116 } | 115 } |
117 | 116 |
118 | 117 |
119 void AllocationScheduler::Schedule(LiveRange* range) { | 118 void AllocationScheduler::Schedule(LiveRange* range) { |
120 TRACE("Scheduling live range %d.\n", range->id()); | 119 TRACE("Scheduling live range %d:%d.\n", range->TopLevel()->vreg(), |
| 120 range->relative_id()); |
121 queue_.push(AllocationCandidate(range)); | 121 queue_.push(AllocationCandidate(range)); |
122 } | 122 } |
123 | 123 |
124 GreedyAllocator::GreedyAllocator(RegisterAllocationData* data, | 124 GreedyAllocator::GreedyAllocator(RegisterAllocationData* data, |
125 RegisterKind kind, Zone* local_zone) | 125 RegisterKind kind, Zone* local_zone) |
126 : RegisterAllocator(data, kind), | 126 : RegisterAllocator(data, kind), |
127 local_zone_(local_zone), | 127 local_zone_(local_zone), |
128 allocations_(local_zone), | 128 allocations_(local_zone), |
129 scheduler_(local_zone) {} | 129 scheduler_(local_zone) {} |
130 | 130 |
131 | 131 |
132 void GreedyAllocator::AssignRangeToRegister(int reg_id, LiveRange* range) { | 132 void GreedyAllocator::AssignRangeToRegister(int reg_id, LiveRange* range) { |
133 TRACE("Assigning register %s to live range %d\n", RegisterName(reg_id), | 133 TRACE("Assigning register %s to live range %d:%d\n", RegisterName(reg_id), |
134 range->id()); | 134 range->TopLevel()->vreg(), range->relative_id()); |
135 | 135 |
136 DCHECK(!range->HasRegisterAssigned()); | 136 DCHECK(!range->HasRegisterAssigned()); |
137 | 137 |
138 AllocateRegisterToRange(reg_id, range); | 138 AllocateRegisterToRange(reg_id, range); |
139 | 139 |
140 TRACE("Assigning %s to range %d\n", RegisterName(reg_id), range->id()); | 140 TRACE("Assigning %s to range %d%d.\n", RegisterName(reg_id), |
| 141 range->TopLevel()->vreg(), range->relative_id()); |
141 range->set_assigned_register(reg_id); | 142 range->set_assigned_register(reg_id); |
142 } | 143 } |
143 | 144 |
144 | 145 |
145 void GreedyAllocator::PreallocateFixedRanges() { | 146 void GreedyAllocator::PreallocateFixedRanges() { |
146 allocations_.resize(num_registers()); | 147 allocations_.resize(num_registers()); |
147 for (int i = 0; i < num_registers(); i++) { | 148 for (int i = 0; i < num_registers(); i++) { |
148 allocations_[i] = new (local_zone()) CoalescedLiveRanges(local_zone()); | 149 allocations_[i] = new (local_zone()) CoalescedLiveRanges(local_zone()); |
149 } | 150 } |
150 | 151 |
151 for (LiveRange* fixed_range : GetFixedRegisters()) { | 152 for (LiveRange* fixed_range : GetFixedRegisters()) { |
152 if (fixed_range != nullptr) { | 153 if (fixed_range != nullptr) { |
153 DCHECK_EQ(mode(), fixed_range->kind()); | 154 DCHECK_EQ(mode(), fixed_range->kind()); |
154 DCHECK(fixed_range->IsFixed()); | 155 DCHECK(fixed_range->TopLevel()->IsFixed()); |
155 | 156 |
156 int reg_nr = fixed_range->assigned_register(); | 157 int reg_nr = fixed_range->assigned_register(); |
157 EnsureValidRangeWeight(fixed_range); | 158 EnsureValidRangeWeight(fixed_range); |
158 AllocateRegisterToRange(reg_nr, fixed_range); | 159 AllocateRegisterToRange(reg_nr, fixed_range); |
159 } | 160 } |
160 } | 161 } |
161 } | 162 } |
162 | 163 |
163 | 164 |
164 void GreedyAllocator::ScheduleAllocationCandidates() { | 165 void GreedyAllocator::ScheduleAllocationCandidates() { |
165 for (auto range : data()->live_ranges()) { | 166 for (auto range : data()->live_ranges()) { |
166 if (CanProcessRange(range) && !range->spilled()) { | 167 if (CanProcessRange(range) && !range->spilled()) { |
167 scheduler().Schedule(range); | 168 scheduler().Schedule(range); |
168 } | 169 } |
169 } | 170 } |
170 } | 171 } |
171 | 172 |
172 | 173 |
173 void GreedyAllocator::TryAllocateCandidate( | 174 void GreedyAllocator::TryAllocateCandidate( |
174 const AllocationCandidate& candidate) { | 175 const AllocationCandidate& candidate) { |
175 // At this point, this is just a live range. TODO: groups. | 176 // At this point, this is just a live range. TODO: groups. |
176 TryAllocateLiveRange(candidate.live_range()); | 177 TryAllocateLiveRange(candidate.live_range()); |
177 } | 178 } |
178 | 179 |
179 | 180 |
180 void GreedyAllocator::TryAllocateLiveRange(LiveRange* range) { | 181 void GreedyAllocator::TryAllocateLiveRange(LiveRange* range) { |
181 // TODO(mtrofin): once we introduce groups, we'll want to first try and | 182 // TODO(mtrofin): once we introduce groups, we'll want to first try and |
182 // allocate at the preferred register. | 183 // allocate at the preferred register. |
183 TRACE("Attempting to allocate live range %d\n", range->id()); | 184 TRACE("Attempting to allocate live range %d:%d.\n", range->TopLevel()->vreg(), |
| 185 range->relative_id()); |
184 int free_reg = -1; | 186 int free_reg = -1; |
185 int evictable_reg = -1; | 187 int evictable_reg = -1; |
186 EnsureValidRangeWeight(range); | 188 EnsureValidRangeWeight(range); |
187 DCHECK(range->weight() != LiveRange::kInvalidWeight); | 189 DCHECK(range->weight() != LiveRange::kInvalidWeight); |
188 | 190 |
189 float smallest_weight = LiveRange::kMaxWeight; | 191 float smallest_weight = LiveRange::kMaxWeight; |
190 | 192 |
191 // Seek either the first free register, or, from the set of registers | 193 // Seek either the first free register, or, from the set of registers |
192 // where the maximum conflict is lower than the candidate's weight, the one | 194 // where the maximum conflict is lower than the candidate's weight, the one |
193 // with the smallest such weight. | 195 // with the smallest such weight. |
194 for (int i = 0; i < num_registers(); i++) { | 196 for (int i = 0; i < num_registers(); i++) { |
195 float max_conflict_weight = GetMaximumConflictingWeight(i, range); | 197 float max_conflict_weight = GetMaximumConflictingWeight(i, range); |
196 if (max_conflict_weight == LiveRange::kInvalidWeight) { | 198 if (max_conflict_weight == LiveRange::kInvalidWeight) { |
197 free_reg = i; | 199 free_reg = i; |
198 break; | 200 break; |
199 } | 201 } |
200 if (max_conflict_weight < range->weight() && | 202 if (max_conflict_weight < range->weight() && |
201 max_conflict_weight < smallest_weight) { | 203 max_conflict_weight < smallest_weight) { |
202 smallest_weight = max_conflict_weight; | 204 smallest_weight = max_conflict_weight; |
203 evictable_reg = i; | 205 evictable_reg = i; |
204 } | 206 } |
205 } | 207 } |
206 | 208 |
207 // We have a free register, so we use it. | 209 // We have a free register, so we use it. |
208 if (free_reg >= 0) { | 210 if (free_reg >= 0) { |
209 TRACE("Found free register %s for live range %d\n", RegisterName(free_reg), | 211 TRACE("Found free register %s for live range %d:%d.\n", |
210 range->id()); | 212 RegisterName(free_reg), range->TopLevel()->vreg(), |
| 213 range->relative_id()); |
211 AssignRangeToRegister(free_reg, range); | 214 AssignRangeToRegister(free_reg, range); |
212 return; | 215 return; |
213 } | 216 } |
214 | 217 |
215 // We found a register to perform evictions, so we evict and allocate our | 218 // We found a register to perform evictions, so we evict and allocate our |
216 // candidate. | 219 // candidate. |
217 if (evictable_reg >= 0) { | 220 if (evictable_reg >= 0) { |
218 TRACE("Found evictable register %s for live range %d\n", | 221 TRACE("Found evictable register %s for live range %d:%d.\n", |
219 RegisterName(free_reg), range->id()); | 222 RegisterName(free_reg), range->TopLevel()->vreg(), |
| 223 range->relative_id()); |
220 EvictAndRescheduleConflicts(evictable_reg, range); | 224 EvictAndRescheduleConflicts(evictable_reg, range); |
221 AssignRangeToRegister(evictable_reg, range); | 225 AssignRangeToRegister(evictable_reg, range); |
222 return; | 226 return; |
223 } | 227 } |
224 | 228 |
225 // The range needs to be split or spilled. | 229 // The range needs to be split or spilled. |
226 SplitOrSpillBlockedRange(range); | 230 SplitOrSpillBlockedRange(range); |
227 } | 231 } |
228 | 232 |
229 | 233 |
230 void GreedyAllocator::EvictAndRescheduleConflicts(unsigned reg_id, | 234 void GreedyAllocator::EvictAndRescheduleConflicts(unsigned reg_id, |
231 const LiveRange* range) { | 235 const LiveRange* range) { |
232 auto conflicts = current_allocations(reg_id)->GetConflicts(range); | 236 auto conflicts = current_allocations(reg_id)->GetConflicts(range); |
233 for (LiveRange* conflict = conflicts.Current(); conflict != nullptr; | 237 for (LiveRange* conflict = conflicts.Current(); conflict != nullptr; |
234 conflict = conflicts.RemoveCurrentAndGetNext()) { | 238 conflict = conflicts.RemoveCurrentAndGetNext()) { |
235 DCHECK(conflict->HasRegisterAssigned()); | 239 DCHECK(conflict->HasRegisterAssigned()); |
236 CHECK(!conflict->IsFixed()); | 240 CHECK(!conflict->TopLevel()->IsFixed()); |
237 conflict->UnsetAssignedRegister(); | 241 conflict->UnsetAssignedRegister(); |
238 UpdateWeightAtEviction(conflict); | 242 UpdateWeightAtEviction(conflict); |
239 scheduler().Schedule(conflict); | 243 scheduler().Schedule(conflict); |
240 TRACE("Evicted range %d.\n", conflict->id()); | 244 TRACE("Evicted range %d%d.\n", conflict->TopLevel()->vreg(), |
| 245 conflict->relative_id()); |
241 } | 246 } |
242 } | 247 } |
243 | 248 |
244 | 249 |
245 void GreedyAllocator::SplitAndSpillRangesDefinedByMemoryOperand() { | 250 void GreedyAllocator::SplitAndSpillRangesDefinedByMemoryOperand() { |
246 size_t initial_range_count = data()->live_ranges().size(); | 251 size_t initial_range_count = data()->live_ranges().size(); |
247 for (size_t i = 0; i < initial_range_count; ++i) { | 252 for (size_t i = 0; i < initial_range_count; ++i) { |
248 auto range = data()->live_ranges()[i]; | 253 auto range = data()->live_ranges()[i]; |
249 if (!CanProcessRange(range)) continue; | 254 if (!CanProcessRange(range)) continue; |
250 if (range->HasNoSpillType()) continue; | 255 if (range->HasNoSpillType()) continue; |
251 | 256 |
252 LifetimePosition start = range->Start(); | 257 LifetimePosition start = range->Start(); |
253 TRACE("Live range %d is defined by a spill operand.\n", range->id()); | 258 TRACE("Live range %d:%d is defined by a spill operand.\n", |
| 259 range->TopLevel()->vreg(), range->relative_id()); |
254 auto next_pos = start; | 260 auto next_pos = start; |
255 if (next_pos.IsGapPosition()) { | 261 if (next_pos.IsGapPosition()) { |
256 next_pos = next_pos.NextStart(); | 262 next_pos = next_pos.NextStart(); |
257 } | 263 } |
258 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); | 264 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); |
259 // If the range already has a spill operand and it doesn't need a | 265 // If the range already has a spill operand and it doesn't need a |
260 // register immediately, split it and spill the first part of the range. | 266 // register immediately, split it and spill the first part of the range. |
261 if (pos == nullptr) { | 267 if (pos == nullptr) { |
262 Spill(range); | 268 Spill(range); |
263 } else if (pos->pos() > range->Start().NextStart()) { | 269 } else if (pos->pos() > range->Start().NextStart()) { |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
328 return ret; | 334 return ret; |
329 } | 335 } |
330 | 336 |
331 | 337 |
332 void GreedyAllocator::EnsureValidRangeWeight(LiveRange* range) { | 338 void GreedyAllocator::EnsureValidRangeWeight(LiveRange* range) { |
333 // The live range weight will be invalidated when ranges are created or split. | 339 // The live range weight will be invalidated when ranges are created or split. |
334 // Otherwise, it is consistently updated when the range is allocated or | 340 // Otherwise, it is consistently updated when the range is allocated or |
335 // unallocated. | 341 // unallocated. |
336 if (range->weight() != LiveRange::kInvalidWeight) return; | 342 if (range->weight() != LiveRange::kInvalidWeight) return; |
337 | 343 |
338 if (range->IsFixed()) { | 344 if (range->TopLevel()->IsFixed()) { |
339 range->set_weight(LiveRange::kMaxWeight); | 345 range->set_weight(LiveRange::kMaxWeight); |
340 return; | 346 return; |
341 } | 347 } |
342 if (!IsProgressPossible(range, code())) { | 348 if (!IsProgressPossible(range, code())) { |
343 range->set_weight(LiveRange::kMaxWeight); | 349 range->set_weight(LiveRange::kMaxWeight); |
344 return; | 350 return; |
345 } | 351 } |
346 | 352 |
347 float use_count = 0.0; | 353 float use_count = 0.0; |
348 for (auto pos = range->first_pos(); pos != nullptr; pos = pos->next()) { | 354 for (auto pos = range->first_pos(); pos != nullptr; pos = pos->next()) { |
(...skipping 23 matching lines...) Expand all Loading... |
372 scheduler().Schedule(range); | 378 scheduler().Schedule(range); |
373 return; | 379 return; |
374 } | 380 } |
375 SpillRangeAsLastResort(range); | 381 SpillRangeAsLastResort(range); |
376 } | 382 } |
377 | 383 |
378 | 384 |
379 } // namespace compiler | 385 } // namespace compiler |
380 } // namespace internal | 386 } // namespace internal |
381 } // namespace v8 | 387 } // namespace v8 |
OLD | NEW |