| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/greedy-allocator.h" | 5 #include "src/compiler/greedy-allocator.h" |
| 6 #include "src/compiler/register-allocator.h" | 6 #include "src/compiler/register-allocator.h" |
| 7 | 7 |
| 8 namespace v8 { | 8 namespace v8 { |
| 9 namespace internal { | 9 namespace internal { |
| 10 namespace compiler { | 10 namespace compiler { |
| 11 | 11 |
| 12 | 12 |
| 13 #define TRACE(...) \ | 13 #define TRACE(...) \ |
| 14 do { \ | 14 do { \ |
| 15 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ | 15 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ |
| 16 } while (false) | 16 } while (false) |
| 17 | 17 |
| 18 | 18 |
| 19 const float GreedyAllocator::kAllocatedRangeMultiplier = 10.0; | 19 const float GreedyAllocator::kAllocatedRangeMultiplier = 10.0; |
| 20 | 20 |
| 21 | 21 |
| 22 namespace { | 22 namespace { |
| 23 | 23 |
| 24 | 24 void UpdateOperands(LiveRange* range, RegisterAllocationData* data) { |
| 25 void UpdateOperands(TopLevelLiveRange* range, RegisterAllocationData* data) { | |
| 26 int reg_id = range->assigned_register(); | 25 int reg_id = range->assigned_register(); |
| 27 range->SetUseHints(reg_id); | 26 range->SetUseHints(reg_id); |
| 28 if (range->is_phi()) { | 27 if (range->IsTopLevel() && range->TopLevel()->is_phi()) { |
| 29 data->GetPhiMapValueFor(range)->set_assigned_register(reg_id); | 28 data->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg_id); |
| 30 } | 29 } |
| 31 } | 30 } |
| 32 | 31 |
| 33 | 32 |
| 34 LiveRange* Split(LiveRange* range, RegisterAllocationData* data, | 33 LiveRange* Split(LiveRange* range, RegisterAllocationData* data, |
| 35 LifetimePosition pos) { | 34 LifetimePosition pos) { |
| 36 DCHECK(range->Start() < pos && pos < range->End()); | 35 DCHECK(range->Start() < pos && pos < range->End()); |
| 37 DCHECK(pos.IsStart() || pos.IsGapPosition() || | 36 DCHECK(pos.IsStart() || pos.IsGapPosition() || |
| 38 (data->code() | 37 (data->code() |
| 39 ->GetInstructionBlock(pos.ToInstructionIndex()) | 38 ->GetInstructionBlock(pos.ToInstructionIndex()) |
| (...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 133 TRACE("Assigning register %s to live range %d:%d\n", RegisterName(reg_id), | 132 TRACE("Assigning register %s to live range %d:%d\n", RegisterName(reg_id), |
| 134 range->TopLevel()->vreg(), range->relative_id()); | 133 range->TopLevel()->vreg(), range->relative_id()); |
| 135 | 134 |
| 136 DCHECK(!range->HasRegisterAssigned()); | 135 DCHECK(!range->HasRegisterAssigned()); |
| 137 | 136 |
| 138 AllocateRegisterToRange(reg_id, range); | 137 AllocateRegisterToRange(reg_id, range); |
| 139 | 138 |
| 140 TRACE("Assigning %s to range %d%d.\n", RegisterName(reg_id), | 139 TRACE("Assigning %s to range %d%d.\n", RegisterName(reg_id), |
| 141 range->TopLevel()->vreg(), range->relative_id()); | 140 range->TopLevel()->vreg(), range->relative_id()); |
| 142 range->set_assigned_register(reg_id); | 141 range->set_assigned_register(reg_id); |
| 142 UpdateOperands(range, data()); |
| 143 } | 143 } |
| 144 | 144 |
| 145 | 145 |
| 146 void GreedyAllocator::PreallocateFixedRanges() { | 146 void GreedyAllocator::PreallocateFixedRanges() { |
| 147 allocations_.resize(num_registers()); | 147 allocations_.resize(num_registers()); |
| 148 for (int i = 0; i < num_registers(); i++) { | 148 for (int i = 0; i < num_registers(); i++) { |
| 149 allocations_[i] = new (local_zone()) CoalescedLiveRanges(local_zone()); | 149 allocations_[i] = new (local_zone()) CoalescedLiveRanges(local_zone()); |
| 150 } | 150 } |
| 151 | 151 |
| 152 for (LiveRange* fixed_range : GetFixedRegisters()) { | 152 for (LiveRange* fixed_range : GetFixedRegisters()) { |
| (...skipping 29 matching lines...) Expand all Loading... |
| 182 } | 182 } |
| 183 | 183 |
| 184 | 184 |
| 185 void GreedyAllocator::TryAllocateLiveRange(LiveRange* range) { | 185 void GreedyAllocator::TryAllocateLiveRange(LiveRange* range) { |
| 186 // TODO(mtrofin): once we introduce groups, we'll want to first try and | 186 // TODO(mtrofin): once we introduce groups, we'll want to first try and |
| 187 // allocate at the preferred register. | 187 // allocate at the preferred register. |
| 188 TRACE("Attempting to allocate live range %d:%d.\n", range->TopLevel()->vreg(), | 188 TRACE("Attempting to allocate live range %d:%d.\n", range->TopLevel()->vreg(), |
| 189 range->relative_id()); | 189 range->relative_id()); |
| 190 int free_reg = -1; | 190 int free_reg = -1; |
| 191 int evictable_reg = -1; | 191 int evictable_reg = -1; |
| 192 int hinted_reg = -1; |
| 193 |
| 192 EnsureValidRangeWeight(range); | 194 EnsureValidRangeWeight(range); |
| 193 DCHECK(range->weight() != LiveRange::kInvalidWeight); | 195 DCHECK(range->weight() != LiveRange::kInvalidWeight); |
| 194 | 196 |
| 195 float smallest_weight = LiveRange::kMaxWeight; | 197 // Can we allocate at the hinted register? |
| 196 | 198 if (range->FirstHintPosition(&hinted_reg) != nullptr) { |
| 197 // Seek either the first free register, or, from the set of registers | 199 DCHECK(hinted_reg >= 0); |
| 198 // where the maximum conflict is lower than the candidate's weight, the one | 200 float max_conflict_weight = GetMaximumConflictingWeight(hinted_reg, range); |
| 199 // with the smallest such weight. | |
| 200 for (int i = 0; i < num_registers(); i++) { | |
| 201 float max_conflict_weight = GetMaximumConflictingWeight(i, range); | |
| 202 if (max_conflict_weight == LiveRange::kInvalidWeight) { | 201 if (max_conflict_weight == LiveRange::kInvalidWeight) { |
| 203 free_reg = i; | 202 free_reg = hinted_reg; |
| 204 break; | 203 } else if (max_conflict_weight < range->weight()) { |
| 205 } | 204 evictable_reg = hinted_reg; |
| 206 if (max_conflict_weight < range->weight() && | |
| 207 max_conflict_weight < smallest_weight) { | |
| 208 smallest_weight = max_conflict_weight; | |
| 209 evictable_reg = i; | |
| 210 } | 205 } |
| 211 } | 206 } |
| 212 | 207 |
| 208 if (free_reg < 0 && evictable_reg < 0) { |
| 209 // There was no hinted reg, or we cannot allocate there. |
| 210 float smallest_weight = LiveRange::kMaxWeight; |
| 211 |
| 212 // Seek either the first free register, or, from the set of registers |
| 213 // where the maximum conflict is lower than the candidate's weight, the one |
| 214 // with the smallest such weight. |
| 215 for (int i = 0; i < num_registers(); i++) { |
| 216 // Skip unnecessarily re-visiting the hinted register, if any. |
| 217 if (i == hinted_reg) continue; |
| 218 float max_conflict_weight = GetMaximumConflictingWeight(i, range); |
| 219 if (max_conflict_weight == LiveRange::kInvalidWeight) { |
| 220 free_reg = i; |
| 221 break; |
| 222 } |
| 223 if (max_conflict_weight < range->weight() && |
| 224 max_conflict_weight < smallest_weight) { |
| 225 smallest_weight = max_conflict_weight; |
| 226 evictable_reg = i; |
| 227 } |
| 228 } |
| 229 } |
| 230 |
| 213 // We have a free register, so we use it. | 231 // We have a free register, so we use it. |
| 214 if (free_reg >= 0) { | 232 if (free_reg >= 0) { |
| 215 TRACE("Found free register %s for live range %d:%d.\n", | 233 TRACE("Found free register %s for live range %d:%d.\n", |
| 216 RegisterName(free_reg), range->TopLevel()->vreg(), | 234 RegisterName(free_reg), range->TopLevel()->vreg(), |
| 217 range->relative_id()); | 235 range->relative_id()); |
| 218 AssignRangeToRegister(free_reg, range); | 236 AssignRangeToRegister(free_reg, range); |
| 219 return; | 237 return; |
| 220 } | 238 } |
| 221 | 239 |
| 222 // We found a register to perform evictions, so we evict and allocate our | 240 // We found a register to perform evictions, so we evict and allocate our |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 297 | 315 |
| 298 SplitAndSpillRangesDefinedByMemoryOperand(); | 316 SplitAndSpillRangesDefinedByMemoryOperand(); |
| 299 PreallocateFixedRanges(); | 317 PreallocateFixedRanges(); |
| 300 ScheduleAllocationCandidates(); | 318 ScheduleAllocationCandidates(); |
| 301 | 319 |
| 302 while (!scheduler().empty()) { | 320 while (!scheduler().empty()) { |
| 303 AllocationCandidate candidate = scheduler().GetNext(); | 321 AllocationCandidate candidate = scheduler().GetNext(); |
| 304 TryAllocateCandidate(candidate); | 322 TryAllocateCandidate(candidate); |
| 305 } | 323 } |
| 306 | 324 |
| 307 | |
| 308 // We do not rely on the hint mechanism used by LinearScan, so no need to | |
| 309 // actively update/reset operands until the end. | |
| 310 for (auto range : data()->live_ranges()) { | |
| 311 if (CanProcessRange(range) && range->HasRegisterAssigned()) { | |
| 312 UpdateOperands(range, data()); | |
| 313 } | |
| 314 } | |
| 315 | |
| 316 for (size_t i = 0; i < allocations_.size(); ++i) { | 325 for (size_t i = 0; i < allocations_.size(); ++i) { |
| 317 if (!allocations_[i]->empty()) { | 326 if (!allocations_[i]->empty()) { |
| 318 data()->MarkAllocated(mode(), static_cast<int>(i)); | 327 data()->MarkAllocated(mode(), static_cast<int>(i)); |
| 319 } | 328 } |
| 320 } | 329 } |
| 321 allocations_.clear(); | 330 allocations_.clear(); |
| 322 | 331 |
| 323 TRACE("End allocating function %s with the Greedy Allocator\n", | 332 TRACE("End allocating function %s with the Greedy Allocator\n", |
| 324 data()->debug_name()); | 333 data()->debug_name()); |
| 325 } | 334 } |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 384 scheduler().Schedule(range); | 393 scheduler().Schedule(range); |
| 385 return; | 394 return; |
| 386 } | 395 } |
| 387 SpillRangeAsLastResort(range); | 396 SpillRangeAsLastResort(range); |
| 388 } | 397 } |
| 389 | 398 |
| 390 | 399 |
| 391 } // namespace compiler | 400 } // namespace compiler |
| 392 } // namespace internal | 401 } // namespace internal |
| 393 } // namespace v8 | 402 } // namespace v8 |
| OLD | NEW |