OLD | NEW |
(Empty) | |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "src/compiler/greedy-allocator.h" |
| 6 #include "src/compiler/register-allocator.h" |
| 7 |
| 8 namespace v8 { |
| 9 namespace internal { |
| 10 namespace compiler { |
| 11 |
| 12 #define TRACE(...) \ |
| 13 do { \ |
| 14 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ |
| 15 } while (false) |
| 16 |
| 17 |
| 18 class CoalescedLiveRanges : public ZoneObject { |
| 19 public: |
| 20 explicit CoalescedLiveRanges(Zone* zone) : storage_(zone) {} |
| 21 |
| 22 LiveRange* Find(UseInterval* query) { |
| 23 ZoneSplayTree<Config>::Locator locator; |
| 24 |
| 25 if (storage_.Find(GetKey(query), &locator)) { |
| 26 return locator.value(); |
| 27 } |
| 28 return nullptr; |
| 29 } |
| 30 |
| 31 // TODO(mtrofin): Change to void returning if we do not care if the interval |
| 32 // was previously added. |
| 33 bool Insert(LiveRange* range) { |
| 34 auto* interval = range->first_interval(); |
| 35 while (interval != nullptr) { |
| 36 if (!Insert(interval, range)) return false; |
| 37 interval = interval->next(); |
| 38 } |
| 39 return true; |
| 40 } |
| 41 |
| 42 bool Remove(LiveRange* range) { |
| 43 bool ret = false; |
| 44 auto* segment = range->first_interval(); |
| 45 while (segment != nullptr) { |
| 46 ret |= Remove(segment); |
| 47 segment = segment->next(); |
| 48 } |
| 49 return ret; |
| 50 } |
| 51 |
| 52 bool IsEmpty() { return storage_.is_empty(); } |
| 53 |
| 54 private: |
| 55 struct Config { |
| 56 typedef std::pair<int, int> Key; |
| 57 typedef LiveRange* Value; |
| 58 static const Key kNoKey; |
| 59 static Value NoValue() { return nullptr; } |
| 60 static int Compare(const Key& a, const Key& b) { |
| 61 if (a.second <= b.first) return -1; |
| 62 if (a.first >= b.second) return 1; |
| 63 return 0; |
| 64 } |
| 65 }; |
| 66 |
| 67 Config::Key GetKey(UseInterval* interval) { |
| 68 if (interval == nullptr) return std::make_pair(0, 0); |
| 69 return std::make_pair(interval->start().value(), interval->end().value()); |
| 70 } |
| 71 |
| 72 // TODO(mtrofin): Change to void returning if we do not care if the interval |
| 73 // was previously added. |
| 74 bool Insert(UseInterval* interval, LiveRange* range) { |
| 75 ZoneSplayTree<Config>::Locator locator; |
| 76 bool ret = storage_.Insert(GetKey(interval), &locator); |
| 77 if (ret) locator.set_value(range); |
| 78 return ret; |
| 79 } |
| 80 |
| 81 bool Remove(UseInterval* key) { return storage_.Remove(GetKey(key)); } |
| 82 |
| 83 ZoneSplayTree<Config> storage_; |
| 84 DISALLOW_COPY_AND_ASSIGN(CoalescedLiveRanges); |
| 85 }; |
| 86 |
| 87 |
| 88 const std::pair<int, int> CoalescedLiveRanges::Config::kNoKey = {0, 0}; |
| 89 |
| 90 GreedyAllocator::GreedyAllocator(RegisterAllocationData* data, |
| 91 RegisterKind kind, Zone* local_zone) |
| 92 : RegisterAllocator(data, kind), |
| 93 local_zone_(local_zone), |
| 94 allocations_(local_zone), |
| 95 queue_(local_zone) {} |
| 96 |
| 97 |
| 98 unsigned GreedyAllocator::GetLiveRangeSize(LiveRange* range) { |
| 99 auto interval = range->first_interval(); |
| 100 if (interval == nullptr) return 0; |
| 101 |
| 102 unsigned size = 0; |
| 103 while (interval != nullptr) { |
| 104 size += (interval->end().value() - interval->start().value()); |
| 105 interval = interval->next(); |
| 106 } |
| 107 |
| 108 return size; |
| 109 } |
| 110 |
| 111 |
| 112 void GreedyAllocator::AssignRangeToRegister(int reg_id, LiveRange* range) { |
| 113 allocations_[reg_id]->Insert(range); |
| 114 if (range->HasRegisterAssigned()) { |
| 115 DCHECK_EQ(reg_id, range->assigned_register()); |
| 116 return; |
| 117 } |
| 118 range->set_assigned_register(reg_id); |
| 119 range->SetUseHints(reg_id); |
| 120 if (range->is_phi()) { |
| 121 data()->GetPhiMapValueFor(range->id())->set_assigned_register(reg_id); |
| 122 } |
| 123 } |
| 124 |
| 125 |
| 126 float GreedyAllocator::CalculateSpillWeight(LiveRange* range) { |
| 127 InstructionOperand* first_hint = nullptr; |
| 128 if (range->FirstHintPosition() != nullptr) { |
| 129 first_hint = range->FirstHintPosition()->operand(); |
| 130 } |
| 131 |
| 132 if (range->IsFixed()) return std::numeric_limits<float>::max(); |
| 133 bool spill; |
| 134 if (!FindProgressingSplitPosition(range, &spill).IsValid()) |
| 135 return std::numeric_limits<float>::max(); |
| 136 |
| 137 float multiplier = 1.0; |
| 138 if (first_hint != nullptr && first_hint->IsRegister()) { |
| 139 multiplier = 3.0; |
| 140 } |
| 141 |
| 142 unsigned use_count = 0; |
| 143 auto* pos = range->first_pos(); |
| 144 while (pos != nullptr) { |
| 145 use_count++; |
| 146 pos = pos->next(); |
| 147 } |
| 148 |
| 149 unsigned range_size = GetLiveRangeSize(range); |
| 150 DCHECK_NE(0U, range_size); |
| 151 |
| 152 return multiplier * static_cast<float>(use_count) / |
| 153 static_cast<float>(range_size); |
| 154 } |
| 155 |
| 156 |
| 157 float GreedyAllocator::CalculateMaxSpillWeight( |
| 158 const ZoneSet<LiveRange*>& ranges) { |
| 159 float max = 0.0; |
| 160 for (auto* r : ranges) { |
| 161 max = std::max(max, CalculateSpillWeight(r)); |
| 162 } |
| 163 return max; |
| 164 } |
| 165 |
| 166 |
| 167 void GreedyAllocator::Evict(LiveRange* range) { |
| 168 bool removed = allocations_[range->assigned_register()]->Remove(range); |
| 169 CHECK(removed); |
| 170 range->UnsetUseHints(); |
| 171 range->UnsetAssignedRegister(); |
| 172 if (range->is_phi()) { |
| 173 data()->GetPhiMapValueFor(range->id())->UnsetAssignedRegister(); |
| 174 } |
| 175 } |
| 176 |
| 177 |
| 178 bool GreedyAllocator::TryAllocatePhysicalRegister( |
| 179 unsigned reg_id, LiveRange* range, ZoneSet<LiveRange*>* conflicting) { |
| 180 auto* segment = range->first_interval(); |
| 181 |
| 182 auto* alloc_info = allocations_[reg_id]; |
| 183 while (segment != nullptr) { |
| 184 if (auto* existing = alloc_info->Find(segment)) { |
| 185 DCHECK(existing->HasRegisterAssigned()); |
| 186 conflicting->insert(existing); |
| 187 } |
| 188 segment = segment->next(); |
| 189 } |
| 190 if (!conflicting->empty()) return false; |
| 191 // No conflicts means we can safely allocate this register to this range. |
| 192 AssignRangeToRegister(reg_id, range); |
| 193 return true; |
| 194 } |
| 195 |
| 196 |
| 197 int GreedyAllocator::GetHintedRegister(LiveRange* range) { |
| 198 int reg; |
| 199 if (range->FirstHintPosition(®) != nullptr) { |
| 200 return reg; |
| 201 } |
| 202 return -1; |
| 203 } |
| 204 |
| 205 |
| 206 bool GreedyAllocator::TryAllocate(LiveRange* current, |
| 207 ZoneSet<LiveRange*>* conflicting) { |
| 208 if (current->IsFixed()) { |
| 209 return TryAllocatePhysicalRegister(current->assigned_register(), current, |
| 210 conflicting); |
| 211 } |
| 212 |
| 213 int hinted_reg_id = GetHintedRegister(current); |
| 214 if (hinted_reg_id >= 0) { |
| 215 if (TryAllocatePhysicalRegister(hinted_reg_id, current, conflicting)) { |
| 216 return true; |
| 217 } |
| 218 } |
| 219 |
| 220 ZoneSet<LiveRange*> local_conflicts(local_zone()); |
| 221 for (unsigned candidate_reg = 0; candidate_reg < allocations_.size(); |
| 222 candidate_reg++) { |
| 223 if (hinted_reg_id >= 0 && |
| 224 candidate_reg == static_cast<size_t>(hinted_reg_id)) |
| 225 continue; |
| 226 local_conflicts.clear(); |
| 227 if (TryAllocatePhysicalRegister(candidate_reg, current, &local_conflicts)) { |
| 228 conflicting->clear(); |
| 229 return true; |
| 230 } else { |
| 231 conflicting->insert(local_conflicts.begin(), local_conflicts.end()); |
| 232 } |
| 233 } |
| 234 return false; |
| 235 } |
| 236 |
| 237 |
| 238 LiveRange* GreedyAllocator::SpillBetweenUntil(LiveRange* range, |
| 239 LifetimePosition start, |
| 240 LifetimePosition until, |
| 241 LifetimePosition end) { |
| 242 CHECK(start < end); |
| 243 auto second_part = SplitRangeAt(range, start); |
| 244 |
| 245 if (second_part->Start() < end) { |
| 246 // The split result intersects with [start, end[. |
| 247 // Split it at position between ]start+1, end[, spill the middle part |
| 248 // and put the rest to unhandled. |
| 249 auto third_part_end = end.PrevStart().End(); |
| 250 if (data()->IsBlockBoundary(end.Start())) { |
| 251 third_part_end = end.Start(); |
| 252 } |
| 253 auto third_part = SplitBetween( |
| 254 second_part, Max(second_part->Start().End(), until), third_part_end); |
| 255 |
| 256 DCHECK(third_part != second_part); |
| 257 |
| 258 Spill(second_part); |
| 259 return third_part; |
| 260 } else { |
| 261 // The split result does not intersect with [start, end[. |
| 262 // Nothing to spill. Just return it for re-processing. |
| 263 return second_part; |
| 264 } |
| 265 } |
| 266 |
| 267 |
| 268 void GreedyAllocator::Enqueue(LiveRange* range) { |
| 269 if (range == nullptr || range->IsEmpty()) return; |
| 270 unsigned size = GetLiveRangeSize(range); |
| 271 TRACE("Enqueuing range %d\n", range->id()); |
| 272 queue_.push(std::make_pair(size, range)); |
| 273 } |
| 274 |
| 275 |
| 276 bool GreedyAllocator::HandleSpillOperands(LiveRange* range) { |
| 277 auto position = range->Start(); |
| 278 TRACE("Processing interval %d start=%d\n", range->id(), position.value()); |
| 279 |
| 280 if (!range->HasNoSpillType()) { |
| 281 TRACE("Live range %d already has a spill operand\n", range->id()); |
| 282 auto next_pos = position; |
| 283 if (next_pos.IsGapPosition()) { |
| 284 next_pos = next_pos.NextStart(); |
| 285 } |
| 286 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); |
| 287 // If the range already has a spill operand and it doesn't need a |
| 288 // register immediately, split it and spill the first part of the range. |
| 289 if (pos == nullptr) { |
| 290 Spill(range); |
| 291 return true; |
| 292 } else if (pos->pos() > range->Start().NextStart()) { |
| 293 // Do not spill live range eagerly if use position that can benefit from |
| 294 // the register is too close to the start of live range. |
| 295 auto* reminder = SpillBetweenUntil(range, position, position, pos->pos()); |
| 296 Enqueue(reminder); |
| 297 return true; |
| 298 } |
| 299 } |
| 300 return false; |
| 301 } |
| 302 |
| 303 |
| 304 void GreedyAllocator::AllocateRegisters() { |
| 305 for (auto range : data()->live_ranges()) { |
| 306 if (range == nullptr) continue; |
| 307 if (range->kind() == mode()) { |
| 308 DCHECK(!range->HasRegisterAssigned() && !range->spilled()); |
| 309 TRACE("Enqueueing live range %d to priority queue \n", range->id()); |
| 310 Enqueue(range); |
| 311 } |
| 312 } |
| 313 |
| 314 allocations_.resize(num_registers()); |
| 315 for (int i = 0; i < num_registers(); i++) { |
| 316 allocations_[i] = new (local_zone()) CoalescedLiveRanges(local_zone()); |
| 317 } |
| 318 |
| 319 for (auto* current : GetFixedRegisters()) { |
| 320 if (current != nullptr) { |
| 321 DCHECK_EQ(mode(), current->kind()); |
| 322 int reg_nr = current->assigned_register(); |
| 323 bool inserted = allocations_[reg_nr]->Insert(current); |
| 324 CHECK(inserted); |
| 325 } |
| 326 } |
| 327 |
| 328 while (!queue_.empty()) { |
| 329 auto current_pair = queue_.top(); |
| 330 queue_.pop(); |
| 331 auto current = current_pair.second; |
| 332 if (HandleSpillOperands(current)) { |
| 333 continue; |
| 334 } |
| 335 bool spill = false; |
| 336 ZoneSet<LiveRange*> conflicting(local_zone()); |
| 337 if (!TryAllocate(current, &conflicting)) { |
| 338 DCHECK(!conflicting.empty()); |
| 339 float this_weight = std::numeric_limits<float>::max(); |
| 340 LifetimePosition split_pos = |
| 341 FindProgressingSplitPosition(current, &spill); |
| 342 if (split_pos.IsValid()) { |
| 343 this_weight = CalculateSpillWeight(current); |
| 344 } |
| 345 |
| 346 bool evicted = false; |
| 347 for (auto* conflict : conflicting) { |
| 348 if (CalculateSpillWeight(conflict) < this_weight) { |
| 349 Evict(conflict); |
| 350 Enqueue(conflict); |
| 351 evicted = true; |
| 352 } |
| 353 } |
| 354 if (evicted) { |
| 355 conflicting.clear(); |
| 356 TryAllocate(current, &conflicting); |
| 357 } |
| 358 if (!conflicting.empty()) { |
| 359 DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start())); |
| 360 DCHECK(split_pos.IsValid()); |
| 361 AllocateBlockedRange(current, split_pos, spill); |
| 362 } |
| 363 } |
| 364 } |
| 365 |
| 366 for (size_t i = 0; i < allocations_.size(); ++i) { |
| 367 if (!allocations_[i]->IsEmpty()) { |
| 368 data()->MarkAllocated(mode(), static_cast<int>(i)); |
| 369 } |
| 370 } |
| 371 } |
| 372 |
| 373 |
| 374 LifetimePosition GreedyAllocator::GetSplittablePos(LifetimePosition pos) { |
| 375 auto ret = pos.PrevStart().End(); |
| 376 if (data()->IsBlockBoundary(pos.Start())) { |
| 377 ret = pos.Start(); |
| 378 } |
| 379 DCHECK(ret <= pos); |
| 380 return ret; |
| 381 } |
| 382 |
| 383 LifetimePosition GreedyAllocator::FindProgressingSplitPosition( |
| 384 LiveRange* range, bool* is_spill_pos) { |
| 385 auto start = range->Start(); |
| 386 auto end = range->End(); |
| 387 |
| 388 UsePosition* next_reg_use = range->first_pos(); |
| 389 while (next_reg_use != nullptr && |
| 390 next_reg_use->type() != UsePositionType::kRequiresRegister) { |
| 391 next_reg_use = next_reg_use->next(); |
| 392 } |
| 393 |
| 394 if (next_reg_use == nullptr) { |
| 395 *is_spill_pos = true; |
| 396 auto ret = FindOptimalSpillingPos(range, start); |
| 397 DCHECK(ret.IsValid()); |
| 398 return ret; |
| 399 } |
| 400 |
| 401 *is_spill_pos = false; |
| 402 auto reg_pos = next_reg_use->pos(); |
| 403 auto correct_pos = GetSplittablePos(reg_pos); |
| 404 if (start < correct_pos && correct_pos < end) { |
| 405 return correct_pos; |
| 406 } |
| 407 |
| 408 if (correct_pos >= end) { |
| 409 return LifetimePosition::Invalid(); |
| 410 } |
| 411 |
| 412 // Correct_pos must be at or before start. Find the next use position. |
| 413 next_reg_use = next_reg_use->next(); |
| 414 auto reference = reg_pos; |
| 415 while (next_reg_use != nullptr) { |
| 416 auto pos = next_reg_use->pos(); |
| 417 // Skip over tight successive uses. |
| 418 if (reference.NextStart() < pos) { |
| 419 break; |
| 420 } |
| 421 reference = pos; |
| 422 next_reg_use = next_reg_use->next(); |
| 423 } |
| 424 |
| 425 if (next_reg_use == nullptr) { |
| 426 // While there may not be another use, we may still have space in the range |
| 427 // to clip off. |
| 428 correct_pos = reference.NextStart(); |
| 429 if (start < correct_pos && correct_pos < end) { |
| 430 return correct_pos; |
| 431 } |
| 432 return LifetimePosition::Invalid(); |
| 433 } |
| 434 |
| 435 correct_pos = GetSplittablePos(next_reg_use->pos()); |
| 436 if (start < correct_pos && correct_pos < end) { |
| 437 DCHECK(reference < correct_pos); |
| 438 return correct_pos; |
| 439 } |
| 440 return LifetimePosition::Invalid(); |
| 441 } |
| 442 |
| 443 |
| 444 void GreedyAllocator::AllocateBlockedRange(LiveRange* current, |
| 445 LifetimePosition pos, bool spill) { |
| 446 auto tail = SplitRangeAt(current, pos); |
| 447 if (spill) { |
| 448 Spill(tail); |
| 449 } else { |
| 450 Enqueue(tail); |
| 451 } |
| 452 if (tail != current) { |
| 453 Enqueue(current); |
| 454 } |
| 455 } |
| 456 |
| 457 |
| 458 } // namespace compiler |
| 459 } // namespace internal |
| 460 } // namespace v8 |
OLD | NEW |