| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/base/adapters.h" | 5 #include "src/base/adapters.h" |
| 6 #include "src/compiler/linkage.h" | 6 #include "src/compiler/linkage.h" |
| 7 #include "src/compiler/register-allocator.h" | 7 #include "src/compiler/register-allocator.h" |
| 8 #include "src/string-stream.h" | 8 #include "src/string-stream.h" |
| 9 | 9 |
| 10 namespace v8 { | 10 namespace v8 { |
| (...skipping 14 matching lines...) Expand all Loading... |
| 25 v->erase(it); | 25 v->erase(it); |
| 26 } | 26 } |
| 27 | 27 |
| 28 | 28 |
| 29 int GetRegisterCount(const RegisterConfiguration* cfg, RegisterKind kind) { | 29 int GetRegisterCount(const RegisterConfiguration* cfg, RegisterKind kind) { |
| 30 return kind == DOUBLE_REGISTERS ? cfg->num_aliased_double_registers() | 30 return kind == DOUBLE_REGISTERS ? cfg->num_aliased_double_registers() |
| 31 : cfg->num_general_registers(); | 31 : cfg->num_general_registers(); |
| 32 } | 32 } |
| 33 | 33 |
| 34 | 34 |
| 35 const ZoneVector<LiveRange*>& GetFixedRegisters( | |
| 36 const RegisterAllocationData* data, RegisterKind kind) { | |
| 37 return kind == DOUBLE_REGISTERS ? data->fixed_double_live_ranges() | |
| 38 : data->fixed_live_ranges(); | |
| 39 } | |
| 40 | |
| 41 | |
| 42 const InstructionBlock* GetContainingLoop(const InstructionSequence* sequence, | 35 const InstructionBlock* GetContainingLoop(const InstructionSequence* sequence, |
| 43 const InstructionBlock* block) { | 36 const InstructionBlock* block) { |
| 44 auto index = block->loop_header(); | 37 auto index = block->loop_header(); |
| 45 if (!index.IsValid()) return nullptr; | 38 if (!index.IsValid()) return nullptr; |
| 46 return sequence->InstructionBlockAt(index); | 39 return sequence->InstructionBlockAt(index); |
| 47 } | 40 } |
| 48 | 41 |
| 49 | 42 |
| 50 const InstructionBlock* GetInstructionBlock(const InstructionSequence* code, | 43 const InstructionBlock* GetInstructionBlock(const InstructionSequence* code, |
| 51 LifetimePosition pos) { | 44 LifetimePosition pos) { |
| 52 return code->GetInstructionBlock(pos.ToInstructionIndex()); | 45 return code->GetInstructionBlock(pos.ToInstructionIndex()); |
| 53 } | 46 } |
| 54 | 47 |
| 55 | 48 |
| 56 bool IsBlockBoundary(const InstructionSequence* code, LifetimePosition pos) { | |
| 57 return pos.IsFullStart() && | |
| 58 code->GetInstructionBlock(pos.ToInstructionIndex())->code_start() == | |
| 59 pos.ToInstructionIndex(); | |
| 60 } | |
| 61 | |
| 62 | |
| 63 Instruction* GetLastInstruction(InstructionSequence* code, | 49 Instruction* GetLastInstruction(InstructionSequence* code, |
| 64 const InstructionBlock* block) { | 50 const InstructionBlock* block) { |
| 65 return code->InstructionAt(block->last_instruction_index()); | 51 return code->InstructionAt(block->last_instruction_index()); |
| 66 } | 52 } |
| 67 | 53 |
| 68 | 54 |
| 69 bool IsOutputRegisterOf(Instruction* instr, int index) { | 55 bool IsOutputRegisterOf(Instruction* instr, int index) { |
| 70 for (size_t i = 0; i < instr->OutputCount(); i++) { | 56 for (size_t i = 0; i < instr->OutputCount(); i++) { |
| 71 auto output = instr->OutputAt(i); | 57 auto output = instr->OutputAt(i); |
| 72 if (output->IsRegister() && | 58 if (output->IsRegister() && |
| (...skipping 991 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1064 void RegisterAllocationData::MarkAllocated(RegisterKind kind, int index) { | 1050 void RegisterAllocationData::MarkAllocated(RegisterKind kind, int index) { |
| 1065 if (kind == DOUBLE_REGISTERS) { | 1051 if (kind == DOUBLE_REGISTERS) { |
| 1066 assigned_double_registers_->Add(index); | 1052 assigned_double_registers_->Add(index); |
| 1067 } else { | 1053 } else { |
| 1068 DCHECK(kind == GENERAL_REGISTERS); | 1054 DCHECK(kind == GENERAL_REGISTERS); |
| 1069 assigned_registers_->Add(index); | 1055 assigned_registers_->Add(index); |
| 1070 } | 1056 } |
| 1071 } | 1057 } |
| 1072 | 1058 |
| 1073 | 1059 |
| 1060 bool RegisterAllocationData::IsBlockBoundary(LifetimePosition pos) const { |
| 1061 return pos.IsFullStart() && |
| 1062 code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() == |
| 1063 pos.ToInstructionIndex(); |
| 1064 } |
| 1065 |
| 1066 |
| 1074 ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data) | 1067 ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data) |
| 1075 : data_(data) {} | 1068 : data_(data) {} |
| 1076 | 1069 |
| 1077 | 1070 |
| 1078 InstructionOperand* ConstraintBuilder::AllocateFixed( | 1071 InstructionOperand* ConstraintBuilder::AllocateFixed( |
| 1079 UnallocatedOperand* operand, int pos, bool is_tagged) { | 1072 UnallocatedOperand* operand, int pos, bool is_tagged) { |
| 1080 TRACE("Allocating fixed reg for op %d\n", operand->virtual_register()); | 1073 TRACE("Allocating fixed reg for op %d\n", operand->virtual_register()); |
| 1081 DCHECK(operand->HasFixedPolicy()); | 1074 DCHECK(operand->HasFixedPolicy()); |
| 1082 InstructionOperand allocated; | 1075 InstructionOperand allocated; |
| 1083 MachineType machine_type = InstructionSequence::DefaultRepresentation(); | 1076 MachineType machine_type = InstructionSequence::DefaultRepresentation(); |
| (...skipping 762 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1846 DCHECK(!range->spilled()); | 1839 DCHECK(!range->spilled()); |
| 1847 TRACE("Spilling live range %d\n", range->id()); | 1840 TRACE("Spilling live range %d\n", range->id()); |
| 1848 auto first = range->TopLevel(); | 1841 auto first = range->TopLevel(); |
| 1849 if (first->HasNoSpillType()) { | 1842 if (first->HasNoSpillType()) { |
| 1850 data()->AssignSpillRangeToLiveRange(first); | 1843 data()->AssignSpillRangeToLiveRange(first); |
| 1851 } | 1844 } |
| 1852 range->Spill(); | 1845 range->Spill(); |
| 1853 } | 1846 } |
| 1854 | 1847 |
| 1855 | 1848 |
| 1849 const ZoneVector<LiveRange*>& RegisterAllocator::GetFixedRegisters() const { |
| 1850 return mode() == DOUBLE_REGISTERS ? data()->fixed_double_live_ranges() |
| 1851 : data()->fixed_live_ranges(); |
| 1852 } |
| 1853 |
| 1854 |
| 1856 LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data, | 1855 LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data, |
| 1857 RegisterKind kind, Zone* local_zone) | 1856 RegisterKind kind, Zone* local_zone) |
| 1858 : RegisterAllocator(data, kind), | 1857 : RegisterAllocator(data, kind), |
| 1859 unhandled_live_ranges_(local_zone), | 1858 unhandled_live_ranges_(local_zone), |
| 1860 active_live_ranges_(local_zone), | 1859 active_live_ranges_(local_zone), |
| 1861 inactive_live_ranges_(local_zone) { | 1860 inactive_live_ranges_(local_zone) { |
| 1862 unhandled_live_ranges().reserve( | 1861 unhandled_live_ranges().reserve( |
| 1863 static_cast<size_t>(code()->VirtualRegisterCount() * 2)); | 1862 static_cast<size_t>(code()->VirtualRegisterCount() * 2)); |
| 1864 active_live_ranges().reserve(8); | 1863 active_live_ranges().reserve(8); |
| 1865 inactive_live_ranges().reserve(8); | 1864 inactive_live_ranges().reserve(8); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 1877 | 1876 |
| 1878 for (auto range : data()->live_ranges()) { | 1877 for (auto range : data()->live_ranges()) { |
| 1879 if (range == nullptr) continue; | 1878 if (range == nullptr) continue; |
| 1880 if (range->kind() == mode()) { | 1879 if (range->kind() == mode()) { |
| 1881 AddToUnhandledUnsorted(range); | 1880 AddToUnhandledUnsorted(range); |
| 1882 } | 1881 } |
| 1883 } | 1882 } |
| 1884 SortUnhandled(); | 1883 SortUnhandled(); |
| 1885 DCHECK(UnhandledIsSorted()); | 1884 DCHECK(UnhandledIsSorted()); |
| 1886 | 1885 |
| 1887 auto& fixed_ranges = GetFixedRegisters(data(), mode()); | 1886 auto& fixed_ranges = GetFixedRegisters(); |
| 1888 for (auto current : fixed_ranges) { | 1887 for (auto current : fixed_ranges) { |
| 1889 if (current != nullptr) { | 1888 if (current != nullptr) { |
| 1890 DCHECK_EQ(mode(), current->kind()); | 1889 DCHECK_EQ(mode(), current->kind()); |
| 1891 AddToInactive(current); | 1890 AddToInactive(current); |
| 1892 } | 1891 } |
| 1893 } | 1892 } |
| 1894 | 1893 |
| 1895 while (!unhandled_live_ranges().empty()) { | 1894 while (!unhandled_live_ranges().empty()) { |
| 1896 DCHECK(UnhandledIsSorted()); | 1895 DCHECK(UnhandledIsSorted()); |
| 1897 auto current = unhandled_live_ranges().back(); | 1896 auto current = unhandled_live_ranges().back(); |
| (...skipping 475 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2373 LifetimePosition until, | 2372 LifetimePosition until, |
| 2374 LifetimePosition end) { | 2373 LifetimePosition end) { |
| 2375 CHECK(start < end); | 2374 CHECK(start < end); |
| 2376 auto second_part = SplitRangeAt(range, start); | 2375 auto second_part = SplitRangeAt(range, start); |
| 2377 | 2376 |
| 2378 if (second_part->Start() < end) { | 2377 if (second_part->Start() < end) { |
| 2379 // The split result intersects with [start, end[. | 2378 // The split result intersects with [start, end[. |
| 2380 // Split it at position between ]start+1, end[, spill the middle part | 2379 // Split it at position between ]start+1, end[, spill the middle part |
| 2381 // and put the rest to unhandled. | 2380 // and put the rest to unhandled. |
| 2382 auto third_part_end = end.PrevStart().End(); | 2381 auto third_part_end = end.PrevStart().End(); |
| 2383 if (IsBlockBoundary(code(), end.Start())) { | 2382 if (data()->IsBlockBoundary(end.Start())) { |
| 2384 third_part_end = end.Start(); | 2383 third_part_end = end.Start(); |
| 2385 } | 2384 } |
| 2386 auto third_part = SplitBetween( | 2385 auto third_part = SplitBetween( |
| 2387 second_part, Max(second_part->Start().End(), until), third_part_end); | 2386 second_part, Max(second_part->Start().End(), until), third_part_end); |
| 2388 | 2387 |
| 2389 DCHECK(third_part != second_part); | 2388 DCHECK(third_part != second_part); |
| 2390 | 2389 |
| 2391 Spill(second_part); | 2390 Spill(second_part); |
| 2392 AddToUnhandledSorted(third_part); | 2391 AddToUnhandledSorted(third_part); |
| 2393 } else { | 2392 } else { |
| 2394 // The split result does not intersect with [start, end[. | 2393 // The split result does not intersect with [start, end[. |
| 2395 // Nothing to spill. Just put it to unhandled as whole. | 2394 // Nothing to spill. Just put it to unhandled as whole. |
| 2396 AddToUnhandledSorted(second_part); | 2395 AddToUnhandledSorted(second_part); |
| 2397 } | 2396 } |
| 2398 } | 2397 } |
| 2399 | 2398 |
| 2400 | 2399 |
| 2401 class CoalescedLiveRanges : public ZoneObject { | |
| 2402 public: | |
| 2403 explicit CoalescedLiveRanges(Zone* zone) : storage_(zone) {} | |
| 2404 | |
| 2405 LiveRange* Find(UseInterval* query) { | |
| 2406 ZoneSplayTree<Config>::Locator locator; | |
| 2407 | |
| 2408 if (storage_.Find(GetKey(query), &locator)) { | |
| 2409 return locator.value(); | |
| 2410 } | |
| 2411 return nullptr; | |
| 2412 } | |
| 2413 | |
| 2414 // TODO(mtrofin): Change to void returning if we do not care if the interval | |
| 2415 // was previously added. | |
| 2416 bool Insert(LiveRange* range) { | |
| 2417 auto* interval = range->first_interval(); | |
| 2418 while (interval != nullptr) { | |
| 2419 if (!Insert(interval, range)) return false; | |
| 2420 interval = interval->next(); | |
| 2421 } | |
| 2422 return true; | |
| 2423 } | |
| 2424 | |
| 2425 bool Remove(LiveRange* range) { | |
| 2426 bool ret = false; | |
| 2427 auto* segment = range->first_interval(); | |
| 2428 while (segment != nullptr) { | |
| 2429 ret |= Remove(segment); | |
| 2430 segment = segment->next(); | |
| 2431 } | |
| 2432 return ret; | |
| 2433 } | |
| 2434 | |
| 2435 bool IsEmpty() { return storage_.is_empty(); } | |
| 2436 | |
| 2437 private: | |
| 2438 struct Config { | |
| 2439 typedef std::pair<int, int> Key; | |
| 2440 typedef LiveRange* Value; | |
| 2441 static const Key kNoKey; | |
| 2442 static Value NoValue() { return nullptr; } | |
| 2443 static int Compare(const Key& a, const Key& b) { | |
| 2444 if (a.second <= b.first) return -1; | |
| 2445 if (a.first >= b.second) return 1; | |
| 2446 return 0; | |
| 2447 } | |
| 2448 }; | |
| 2449 | |
| 2450 Config::Key GetKey(UseInterval* interval) { | |
| 2451 if (interval == nullptr) return std::make_pair(0, 0); | |
| 2452 return std::make_pair(interval->start().value(), interval->end().value()); | |
| 2453 } | |
| 2454 | |
| 2455 // TODO(mtrofin): Change to void returning if we do not care if the interval | |
| 2456 // was previously added. | |
| 2457 bool Insert(UseInterval* interval, LiveRange* range) { | |
| 2458 ZoneSplayTree<Config>::Locator locator; | |
| 2459 bool ret = storage_.Insert(GetKey(interval), &locator); | |
| 2460 if (ret) locator.set_value(range); | |
| 2461 return ret; | |
| 2462 } | |
| 2463 | |
| 2464 bool Remove(UseInterval* key) { return storage_.Remove(GetKey(key)); } | |
| 2465 | |
| 2466 ZoneSplayTree<Config> storage_; | |
| 2467 DISALLOW_COPY_AND_ASSIGN(CoalescedLiveRanges); | |
| 2468 }; | |
| 2469 | |
| 2470 | |
| 2471 const std::pair<int, int> CoalescedLiveRanges::Config::kNoKey = {0, 0}; | |
| 2472 | |
| 2473 GreedyAllocator::GreedyAllocator(RegisterAllocationData* data, | |
| 2474 RegisterKind kind, Zone* local_zone) | |
| 2475 : RegisterAllocator(data, kind), | |
| 2476 local_zone_(local_zone), | |
| 2477 allocations_(local_zone), | |
| 2478 queue_(local_zone) {} | |
| 2479 | |
| 2480 | |
| 2481 unsigned GreedyAllocator::GetLiveRangeSize(LiveRange* range) { | |
| 2482 auto interval = range->first_interval(); | |
| 2483 if (interval == nullptr) return 0; | |
| 2484 | |
| 2485 unsigned size = 0; | |
| 2486 while (interval != nullptr) { | |
| 2487 size += (interval->end().value() - interval->start().value()); | |
| 2488 interval = interval->next(); | |
| 2489 } | |
| 2490 | |
| 2491 return size; | |
| 2492 } | |
| 2493 | |
| 2494 | |
| 2495 void GreedyAllocator::AssignRangeToRegister(int reg_id, LiveRange* range) { | |
| 2496 allocations_[reg_id]->Insert(range); | |
| 2497 if (range->HasRegisterAssigned()) { | |
| 2498 DCHECK_EQ(reg_id, range->assigned_register()); | |
| 2499 return; | |
| 2500 } | |
| 2501 range->set_assigned_register(reg_id); | |
| 2502 range->SetUseHints(reg_id); | |
| 2503 if (range->is_phi()) { | |
| 2504 data()->GetPhiMapValueFor(range->id())->set_assigned_register(reg_id); | |
| 2505 } | |
| 2506 } | |
| 2507 | |
| 2508 | |
| 2509 float GreedyAllocator::CalculateSpillWeight(LiveRange* range) { | |
| 2510 InstructionOperand* first_hint = nullptr; | |
| 2511 if (range->FirstHintPosition() != nullptr) { | |
| 2512 first_hint = range->FirstHintPosition()->operand(); | |
| 2513 } | |
| 2514 | |
| 2515 if (range->IsFixed()) return std::numeric_limits<float>::max(); | |
| 2516 bool spill; | |
| 2517 if (!FindProgressingSplitPosition(range, &spill).IsValid()) | |
| 2518 return std::numeric_limits<float>::max(); | |
| 2519 | |
| 2520 float multiplier = 1.0; | |
| 2521 if (first_hint != nullptr && first_hint->IsRegister()) { | |
| 2522 multiplier = 3.0; | |
| 2523 } | |
| 2524 | |
| 2525 unsigned use_count = 0; | |
| 2526 auto* pos = range->first_pos(); | |
| 2527 while (pos != nullptr) { | |
| 2528 use_count++; | |
| 2529 pos = pos->next(); | |
| 2530 } | |
| 2531 | |
| 2532 unsigned range_size = GetLiveRangeSize(range); | |
| 2533 DCHECK_NE(0U, range_size); | |
| 2534 | |
| 2535 return multiplier * static_cast<float>(use_count) / | |
| 2536 static_cast<float>(range_size); | |
| 2537 } | |
| 2538 | |
| 2539 | |
| 2540 float GreedyAllocator::CalculateMaxSpillWeight( | |
| 2541 const ZoneSet<LiveRange*>& ranges) { | |
| 2542 float max = 0.0; | |
| 2543 for (auto* r : ranges) { | |
| 2544 max = std::max(max, CalculateSpillWeight(r)); | |
| 2545 } | |
| 2546 return max; | |
| 2547 } | |
| 2548 | |
| 2549 | |
| 2550 void GreedyAllocator::Evict(LiveRange* range) { | |
| 2551 bool removed = allocations_[range->assigned_register()]->Remove(range); | |
| 2552 CHECK(removed); | |
| 2553 range->UnsetUseHints(); | |
| 2554 range->UnsetAssignedRegister(); | |
| 2555 if (range->is_phi()) { | |
| 2556 data()->GetPhiMapValueFor(range->id())->UnsetAssignedRegister(); | |
| 2557 } | |
| 2558 } | |
| 2559 | |
| 2560 | |
| 2561 bool GreedyAllocator::TryAllocatePhysicalRegister( | |
| 2562 unsigned reg_id, LiveRange* range, ZoneSet<LiveRange*>* conflicting) { | |
| 2563 auto* segment = range->first_interval(); | |
| 2564 | |
| 2565 auto* alloc_info = allocations_[reg_id]; | |
| 2566 while (segment != nullptr) { | |
| 2567 if (auto* existing = alloc_info->Find(segment)) { | |
| 2568 DCHECK(existing->HasRegisterAssigned()); | |
| 2569 conflicting->insert(existing); | |
| 2570 } | |
| 2571 segment = segment->next(); | |
| 2572 } | |
| 2573 if (!conflicting->empty()) return false; | |
| 2574 // No conflicts means we can safely allocate this register to this range. | |
| 2575 AssignRangeToRegister(reg_id, range); | |
| 2576 return true; | |
| 2577 } | |
| 2578 | |
| 2579 | |
| 2580 int GreedyAllocator::GetHintedRegister(LiveRange* range) { | |
| 2581 int reg; | |
| 2582 if (range->FirstHintPosition(®) != nullptr) { | |
| 2583 return reg; | |
| 2584 } | |
| 2585 return -1; | |
| 2586 } | |
| 2587 | |
| 2588 | |
| 2589 bool GreedyAllocator::TryAllocate(LiveRange* current, | |
| 2590 ZoneSet<LiveRange*>* conflicting) { | |
| 2591 if (current->IsFixed()) { | |
| 2592 return TryAllocatePhysicalRegister(current->assigned_register(), current, | |
| 2593 conflicting); | |
| 2594 } | |
| 2595 | |
| 2596 int hinted_reg_id = GetHintedRegister(current); | |
| 2597 if (hinted_reg_id >= 0) { | |
| 2598 if (TryAllocatePhysicalRegister(hinted_reg_id, current, conflicting)) { | |
| 2599 return true; | |
| 2600 } | |
| 2601 } | |
| 2602 | |
| 2603 ZoneSet<LiveRange*> local_conflicts(local_zone()); | |
| 2604 for (unsigned candidate_reg = 0; candidate_reg < allocations_.size(); | |
| 2605 candidate_reg++) { | |
| 2606 if (hinted_reg_id >= 0 && | |
| 2607 candidate_reg == static_cast<size_t>(hinted_reg_id)) | |
| 2608 continue; | |
| 2609 local_conflicts.clear(); | |
| 2610 if (TryAllocatePhysicalRegister(candidate_reg, current, &local_conflicts)) { | |
| 2611 conflicting->clear(); | |
| 2612 return true; | |
| 2613 } else { | |
| 2614 conflicting->insert(local_conflicts.begin(), local_conflicts.end()); | |
| 2615 } | |
| 2616 } | |
| 2617 return false; | |
| 2618 } | |
| 2619 | |
| 2620 | |
| 2621 LiveRange* GreedyAllocator::SpillBetweenUntil(LiveRange* range, | |
| 2622 LifetimePosition start, | |
| 2623 LifetimePosition until, | |
| 2624 LifetimePosition end) { | |
| 2625 CHECK(start < end); | |
| 2626 auto second_part = SplitRangeAt(range, start); | |
| 2627 | |
| 2628 if (second_part->Start() < end) { | |
| 2629 // The split result intersects with [start, end[. | |
| 2630 // Split it at position between ]start+1, end[, spill the middle part | |
| 2631 // and put the rest to unhandled. | |
| 2632 auto third_part_end = end.PrevStart().End(); | |
| 2633 if (IsBlockBoundary(code(), end.Start())) { | |
| 2634 third_part_end = end.Start(); | |
| 2635 } | |
| 2636 auto third_part = SplitBetween( | |
| 2637 second_part, Max(second_part->Start().End(), until), third_part_end); | |
| 2638 | |
| 2639 DCHECK(third_part != second_part); | |
| 2640 | |
| 2641 Spill(second_part); | |
| 2642 return third_part; | |
| 2643 } else { | |
| 2644 // The split result does not intersect with [start, end[. | |
| 2645 // Nothing to spill. Just return it for re-processing. | |
| 2646 return second_part; | |
| 2647 } | |
| 2648 } | |
| 2649 | |
| 2650 | |
| 2651 void GreedyAllocator::Enqueue(LiveRange* range) { | |
| 2652 if (range == nullptr || range->IsEmpty()) return; | |
| 2653 unsigned size = GetLiveRangeSize(range); | |
| 2654 TRACE("Enqueuing range %d\n", range->id()); | |
| 2655 queue_.push(std::make_pair(size, range)); | |
| 2656 } | |
| 2657 | |
| 2658 | |
| 2659 bool GreedyAllocator::HandleSpillOperands(LiveRange* range) { | |
| 2660 auto position = range->Start(); | |
| 2661 TRACE("Processing interval %d start=%d\n", range->id(), position.value()); | |
| 2662 | |
| 2663 if (!range->HasNoSpillType()) { | |
| 2664 TRACE("Live range %d already has a spill operand\n", range->id()); | |
| 2665 auto next_pos = position; | |
| 2666 if (next_pos.IsGapPosition()) { | |
| 2667 next_pos = next_pos.NextStart(); | |
| 2668 } | |
| 2669 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); | |
| 2670 // If the range already has a spill operand and it doesn't need a | |
| 2671 // register immediately, split it and spill the first part of the range. | |
| 2672 if (pos == nullptr) { | |
| 2673 Spill(range); | |
| 2674 return true; | |
| 2675 } else if (pos->pos() > range->Start().NextStart()) { | |
| 2676 // Do not spill live range eagerly if use position that can benefit from | |
| 2677 // the register is too close to the start of live range. | |
| 2678 auto* reminder = SpillBetweenUntil(range, position, position, pos->pos()); | |
| 2679 Enqueue(reminder); | |
| 2680 return true; | |
| 2681 } | |
| 2682 } | |
| 2683 return TryReuseSpillForPhi(range); | |
| 2684 } | |
| 2685 | |
| 2686 | |
| 2687 void GreedyAllocator::AllocateRegisters() { | |
| 2688 for (auto range : data()->live_ranges()) { | |
| 2689 if (range == nullptr) continue; | |
| 2690 if (range->kind() == mode()) { | |
| 2691 DCHECK(!range->HasRegisterAssigned() && !range->spilled()); | |
| 2692 TRACE("Enqueueing live range %d to priority queue \n", range->id()); | |
| 2693 Enqueue(range); | |
| 2694 } | |
| 2695 } | |
| 2696 | |
| 2697 allocations_.resize(num_registers()); | |
| 2698 for (int i = 0; i < num_registers(); i++) { | |
| 2699 allocations_[i] = new (local_zone()) CoalescedLiveRanges(local_zone()); | |
| 2700 } | |
| 2701 | |
| 2702 for (auto* current : GetFixedRegisters(data(), mode())) { | |
| 2703 if (current != nullptr) { | |
| 2704 DCHECK_EQ(mode(), current->kind()); | |
| 2705 int reg_nr = current->assigned_register(); | |
| 2706 bool inserted = allocations_[reg_nr]->Insert(current); | |
| 2707 CHECK(inserted); | |
| 2708 } | |
| 2709 } | |
| 2710 | |
| 2711 while (!queue_.empty()) { | |
| 2712 auto current_pair = queue_.top(); | |
| 2713 queue_.pop(); | |
| 2714 auto current = current_pair.second; | |
| 2715 if (HandleSpillOperands(current)) { | |
| 2716 continue; | |
| 2717 } | |
| 2718 bool spill = false; | |
| 2719 ZoneSet<LiveRange*> conflicting(local_zone()); | |
| 2720 if (!TryAllocate(current, &conflicting)) { | |
| 2721 DCHECK(!conflicting.empty()); | |
| 2722 float this_weight = std::numeric_limits<float>::max(); | |
| 2723 LifetimePosition split_pos = | |
| 2724 FindProgressingSplitPosition(current, &spill); | |
| 2725 if (split_pos.IsValid()) { | |
| 2726 this_weight = CalculateSpillWeight(current); | |
| 2727 } | |
| 2728 | |
| 2729 bool evicted = false; | |
| 2730 for (auto* conflict : conflicting) { | |
| 2731 if (CalculateSpillWeight(conflict) < this_weight) { | |
| 2732 Evict(conflict); | |
| 2733 Enqueue(conflict); | |
| 2734 evicted = true; | |
| 2735 } | |
| 2736 } | |
| 2737 if (evicted) { | |
| 2738 conflicting.clear(); | |
| 2739 TryAllocate(current, &conflicting); | |
| 2740 } | |
| 2741 if (!conflicting.empty()) { | |
| 2742 DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start())); | |
| 2743 DCHECK(split_pos.IsValid()); | |
| 2744 AllocateBlockedRange(current, split_pos, spill); | |
| 2745 } | |
| 2746 } | |
| 2747 } | |
| 2748 | |
| 2749 for (size_t i = 0; i < allocations_.size(); ++i) { | |
| 2750 if (!allocations_[i]->IsEmpty()) { | |
| 2751 data()->MarkAllocated(mode(), static_cast<int>(i)); | |
| 2752 } | |
| 2753 } | |
| 2754 } | |
| 2755 | |
| 2756 | |
| 2757 LifetimePosition GreedyAllocator::GetSplittablePos(LifetimePosition pos) { | |
| 2758 auto ret = pos.PrevStart().End(); | |
| 2759 if (IsBlockBoundary(code(), pos.Start())) { | |
| 2760 ret = pos.Start(); | |
| 2761 } | |
| 2762 DCHECK(ret <= pos); | |
| 2763 return ret; | |
| 2764 } | |
| 2765 | |
| 2766 LifetimePosition GreedyAllocator::FindProgressingSplitPosition( | |
| 2767 LiveRange* range, bool* is_spill_pos) { | |
| 2768 auto start = range->Start(); | |
| 2769 auto end = range->End(); | |
| 2770 | |
| 2771 UsePosition* next_reg_use = range->first_pos(); | |
| 2772 while (next_reg_use != nullptr && | |
| 2773 next_reg_use->type() != UsePositionType::kRequiresRegister) { | |
| 2774 next_reg_use = next_reg_use->next(); | |
| 2775 } | |
| 2776 | |
| 2777 if (next_reg_use == nullptr) { | |
| 2778 *is_spill_pos = true; | |
| 2779 auto ret = FindOptimalSpillingPos(range, start); | |
| 2780 DCHECK(ret.IsValid()); | |
| 2781 return ret; | |
| 2782 } | |
| 2783 | |
| 2784 *is_spill_pos = false; | |
| 2785 auto reg_pos = next_reg_use->pos(); | |
| 2786 auto correct_pos = GetSplittablePos(reg_pos); | |
| 2787 if (start < correct_pos && correct_pos < end) { | |
| 2788 return correct_pos; | |
| 2789 } | |
| 2790 | |
| 2791 if (correct_pos >= end) { | |
| 2792 return LifetimePosition::Invalid(); | |
| 2793 } | |
| 2794 | |
| 2795 // Correct_pos must be at or before start. Find the next use position. | |
| 2796 next_reg_use = next_reg_use->next(); | |
| 2797 auto reference = reg_pos; | |
| 2798 while (next_reg_use != nullptr) { | |
| 2799 auto pos = next_reg_use->pos(); | |
| 2800 // Skip over tight successive uses. | |
| 2801 if (reference.NextStart() < pos) { | |
| 2802 break; | |
| 2803 } | |
| 2804 reference = pos; | |
| 2805 next_reg_use = next_reg_use->next(); | |
| 2806 } | |
| 2807 | |
| 2808 if (next_reg_use == nullptr) { | |
| 2809 // While there may not be another use, we may still have space in the range | |
| 2810 // to clip off. | |
| 2811 correct_pos = reference.NextStart(); | |
| 2812 if (start < correct_pos && correct_pos < end) { | |
| 2813 return correct_pos; | |
| 2814 } | |
| 2815 return LifetimePosition::Invalid(); | |
| 2816 } | |
| 2817 | |
| 2818 correct_pos = GetSplittablePos(next_reg_use->pos()); | |
| 2819 if (start < correct_pos && correct_pos < end) { | |
| 2820 DCHECK(reference < correct_pos); | |
| 2821 return correct_pos; | |
| 2822 } | |
| 2823 return LifetimePosition::Invalid(); | |
| 2824 } | |
| 2825 | |
| 2826 | |
| 2827 void GreedyAllocator::AllocateBlockedRange(LiveRange* current, | |
| 2828 LifetimePosition pos, bool spill) { | |
| 2829 auto tail = SplitRangeAt(current, pos); | |
| 2830 if (spill) { | |
| 2831 Spill(tail); | |
| 2832 } else { | |
| 2833 Enqueue(tail); | |
| 2834 } | |
| 2835 if (tail != current) { | |
| 2836 Enqueue(current); | |
| 2837 } | |
| 2838 } | |
| 2839 | |
| 2840 | |
| 2841 SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data) | 2400 SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data) |
| 2842 : data_(data) {} | 2401 : data_(data) {} |
| 2843 | 2402 |
| 2844 | 2403 |
| 2845 void SpillSlotLocator::LocateSpillSlots() { | 2404 void SpillSlotLocator::LocateSpillSlots() { |
| 2846 auto code = data()->code(); | 2405 auto code = data()->code(); |
| 2847 for (auto range : data()->live_ranges()) { | 2406 for (auto range : data()->live_ranges()) { |
| 2848 if (range == nullptr || range->IsEmpty() || range->IsChild()) continue; | 2407 if (range == nullptr || range->IsEmpty() || range->IsChild()) continue; |
| 2849 // We care only about ranges which spill in the frame. | 2408 // We care only about ranges which spill in the frame. |
| 2850 if (!range->HasSpillRange()) continue; | 2409 if (!range->HasSpillRange()) continue; |
| 2851 auto spills = range->spills_at_definition(); | 2410 auto spills = range->spills_at_definition(); |
| 2852 DCHECK_NOT_NULL(spills); | 2411 DCHECK_NOT_NULL(spills); |
| 2853 for (; spills != nullptr; spills = spills->next) { | 2412 for (; spills != nullptr; spills = spills->next) { |
| 2854 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame(); | 2413 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame(); |
| 2855 } | 2414 } |
| 2856 } | 2415 } |
| 2857 } | 2416 } |
| 2858 | 2417 |
| 2859 | 2418 |
| 2860 bool GreedyAllocator::TryReuseSpillForPhi(LiveRange* range) { | |
| 2861 if (range->IsChild() || !range->is_phi()) return false; | |
| 2862 DCHECK(!range->HasSpillOperand()); | |
| 2863 | |
| 2864 auto phi_map_value = data()->GetPhiMapValueFor(range->id()); | |
| 2865 auto phi = phi_map_value->phi(); | |
| 2866 auto block = phi_map_value->block(); | |
| 2867 // Count the number of spilled operands. | |
| 2868 size_t spilled_count = 0; | |
| 2869 LiveRange* first_op = nullptr; | |
| 2870 for (size_t i = 0; i < phi->operands().size(); i++) { | |
| 2871 int op = phi->operands()[i]; | |
| 2872 LiveRange* op_range = LiveRangeFor(op); | |
| 2873 if (!op_range->HasSpillRange()) continue; | |
| 2874 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); | |
| 2875 auto pred_end = LifetimePosition::InstructionFromInstructionIndex( | |
| 2876 pred->last_instruction_index()); | |
| 2877 while (op_range != nullptr && !op_range->CanCover(pred_end)) { | |
| 2878 op_range = op_range->next(); | |
| 2879 } | |
| 2880 if (op_range != nullptr && op_range->spilled()) { | |
| 2881 spilled_count++; | |
| 2882 if (first_op == nullptr) { | |
| 2883 first_op = op_range->TopLevel(); | |
| 2884 } | |
| 2885 } | |
| 2886 } | |
| 2887 | |
| 2888 // Only continue if more than half of the operands are spilled. | |
| 2889 if (spilled_count * 2 <= phi->operands().size()) { | |
| 2890 return false; | |
| 2891 } | |
| 2892 | |
| 2893 // Try to merge the spilled operands and count the number of merged spilled | |
| 2894 // operands. | |
| 2895 DCHECK(first_op != nullptr); | |
| 2896 auto first_op_spill = first_op->GetSpillRange(); | |
| 2897 size_t num_merged = 1; | |
| 2898 for (size_t i = 1; i < phi->operands().size(); i++) { | |
| 2899 int op = phi->operands()[i]; | |
| 2900 auto op_range = LiveRangeFor(op); | |
| 2901 if (!op_range->HasSpillRange()) continue; | |
| 2902 auto op_spill = op_range->GetSpillRange(); | |
| 2903 if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) { | |
| 2904 num_merged++; | |
| 2905 } | |
| 2906 } | |
| 2907 | |
| 2908 // Only continue if enough operands could be merged to the | |
| 2909 // same spill slot. | |
| 2910 if (num_merged * 2 <= phi->operands().size() || | |
| 2911 AreUseIntervalsIntersecting(first_op_spill->interval(), | |
| 2912 range->first_interval())) { | |
| 2913 return false; | |
| 2914 } | |
| 2915 | |
| 2916 // If the range does not need register soon, spill it to the merged | |
| 2917 // spill range. | |
| 2918 auto next_pos = range->Start(); | |
| 2919 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart(); | |
| 2920 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); | |
| 2921 if (pos == nullptr) { | |
| 2922 auto spill_range = | |
| 2923 range->TopLevel()->HasSpillRange() | |
| 2924 ? range->TopLevel()->GetSpillRange() | |
| 2925 : data()->AssignSpillRangeToLiveRange(range->TopLevel()); | |
| 2926 bool merged = first_op_spill->TryMerge(spill_range); | |
| 2927 CHECK(merged); | |
| 2928 Spill(range); | |
| 2929 return true; | |
| 2930 } else if (pos->pos() > range->Start().NextStart()) { | |
| 2931 auto spill_range = | |
| 2932 range->TopLevel()->HasSpillRange() | |
| 2933 ? range->TopLevel()->GetSpillRange() | |
| 2934 : data()->AssignSpillRangeToLiveRange(range->TopLevel()); | |
| 2935 bool merged = first_op_spill->TryMerge(spill_range); | |
| 2936 CHECK(merged); | |
| 2937 Enqueue( | |
| 2938 SpillBetweenUntil(range, range->Start(), range->Start(), pos->pos())); | |
| 2939 return true; | |
| 2940 } | |
| 2941 return false; | |
| 2942 } | |
| 2943 | |
| 2944 | |
| 2945 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {} | 2419 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {} |
| 2946 | 2420 |
| 2947 | 2421 |
| 2948 void OperandAssigner::AssignSpillSlots() { | 2422 void OperandAssigner::AssignSpillSlots() { |
| 2949 auto& spill_ranges = data()->spill_ranges(); | 2423 auto& spill_ranges = data()->spill_ranges(); |
| 2950 // Merge disjoint spill ranges | 2424 // Merge disjoint spill ranges |
| 2951 for (size_t i = 0; i < spill_ranges.size(); i++) { | 2425 for (size_t i = 0; i < spill_ranges.size(); i++) { |
| 2952 auto range = spill_ranges[i]; | 2426 auto range = spill_ranges[i]; |
| 2953 if (range->IsEmpty()) continue; | 2427 if (range->IsEmpty()) continue; |
| 2954 for (size_t j = i + 1; j < spill_ranges.size(); j++) { | 2428 for (size_t j = i + 1; j < spill_ranges.size(); j++) { |
| (...skipping 367 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3322 DelayedInsertionMap delayed_insertion_map(local_zone); | 2796 DelayedInsertionMap delayed_insertion_map(local_zone); |
| 3323 for (auto first_range : data()->live_ranges()) { | 2797 for (auto first_range : data()->live_ranges()) { |
| 3324 if (first_range == nullptr || first_range->IsChild()) continue; | 2798 if (first_range == nullptr || first_range->IsChild()) continue; |
| 3325 for (auto second_range = first_range->next(); second_range != nullptr; | 2799 for (auto second_range = first_range->next(); second_range != nullptr; |
| 3326 first_range = second_range, second_range = second_range->next()) { | 2800 first_range = second_range, second_range = second_range->next()) { |
| 3327 auto pos = second_range->Start(); | 2801 auto pos = second_range->Start(); |
| 3328 // Add gap move if the two live ranges touch and there is no block | 2802 // Add gap move if the two live ranges touch and there is no block |
| 3329 // boundary. | 2803 // boundary. |
| 3330 if (second_range->spilled()) continue; | 2804 if (second_range->spilled()) continue; |
| 3331 if (first_range->End() != pos) continue; | 2805 if (first_range->End() != pos) continue; |
| 3332 if (IsBlockBoundary(code(), pos) && | 2806 if (data()->IsBlockBoundary(pos) && |
| 3333 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) { | 2807 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) { |
| 3334 continue; | 2808 continue; |
| 3335 } | 2809 } |
| 3336 auto prev_operand = first_range->GetAssignedOperand(); | 2810 auto prev_operand = first_range->GetAssignedOperand(); |
| 3337 auto cur_operand = second_range->GetAssignedOperand(); | 2811 auto cur_operand = second_range->GetAssignedOperand(); |
| 3338 if (prev_operand.Equals(cur_operand)) continue; | 2812 if (prev_operand.Equals(cur_operand)) continue; |
| 3339 bool delay_insertion = false; | 2813 bool delay_insertion = false; |
| 3340 Instruction::GapPosition gap_pos; | 2814 Instruction::GapPosition gap_pos; |
| 3341 int gap_index = pos.ToInstructionIndex(); | 2815 int gap_index = pos.ToInstructionIndex(); |
| 3342 if (pos.IsGapPosition()) { | 2816 if (pos.IsGapPosition()) { |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3387 auto eliminate = moves->PrepareInsertAfter(move); | 2861 auto eliminate = moves->PrepareInsertAfter(move); |
| 3388 to_insert.push_back(move); | 2862 to_insert.push_back(move); |
| 3389 if (eliminate != nullptr) to_eliminate.push_back(eliminate); | 2863 if (eliminate != nullptr) to_eliminate.push_back(eliminate); |
| 3390 } | 2864 } |
| 3391 } | 2865 } |
| 3392 | 2866 |
| 3393 | 2867 |
| 3394 } // namespace compiler | 2868 } // namespace compiler |
| 3395 } // namespace internal | 2869 } // namespace internal |
| 3396 } // namespace v8 | 2870 } // namespace v8 |
| OLD | NEW |