Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/base/adapters.h" | 5 #include "src/base/adapters.h" |
| 6 #include "src/compiler/linkage.h" | 6 #include "src/compiler/linkage.h" |
| 7 #include "src/compiler/register-allocator.h" | 7 #include "src/compiler/register-allocator.h" |
| 8 #include "src/string-stream.h" | 8 #include "src/string-stream.h" |
| 9 | 9 |
| 10 namespace v8 { | 10 namespace v8 { |
| 11 namespace internal { | 11 namespace internal { |
| 12 namespace compiler { | 12 namespace compiler { |
| 13 | 13 |
| 14 #define TRACE(...) \ | 14 #define TRACE(...) \ |
| 15 do { \ | 15 do { \ |
| 16 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ | 16 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ |
| 17 } while (false) | 17 } while (false) |
| 18 | 18 |
| 19 | |
| 20 class CoallescedLiveRanges : public ZoneObject { | |
|
dcarney
2015/04/22 07:07:24
please move this class below the last LinearScanAl
| |
| 21 public: | |
| 22 explicit CoallescedLiveRanges(Zone* zone) : storage_(zone) {} | |
| 23 | |
| 24 LiveRange* Find(UseInterval* query) { | |
| 25 ZoneSplayTree<Config>::Locator locator; | |
| 26 | |
| 27 if (storage_.Find(GetKey(query), &locator)) { | |
| 28 return locator.value(); | |
| 29 } | |
| 30 return nullptr; | |
| 31 } | |
| 32 | |
| 33 // TODO(mtrofin): Change to void returning if we do not care if the interval | |
| 34 // was previously added. | |
| 35 bool Insert(LiveRange* range) { | |
| 36 auto* interval = range->first_interval(); | |
| 37 while (interval != nullptr) { | |
| 38 if (!Insert(interval, range)) return false; | |
| 39 interval = interval->next(); | |
| 40 } | |
| 41 return true; | |
| 42 } | |
| 43 | |
| 44 bool Remove(LiveRange* range) { | |
| 45 bool ret = false; | |
| 46 auto* segment = range->first_interval(); | |
| 47 while (segment != nullptr) { | |
| 48 ret |= Remove(segment); | |
| 49 segment = segment->next(); | |
| 50 } | |
| 51 return ret; | |
| 52 } | |
| 53 | |
| 54 bool IsEmpty() { return storage_.is_empty(); } | |
| 55 | |
| 56 private: | |
| 57 struct Config { | |
| 58 typedef std::pair<int, int> Key; | |
| 59 typedef LiveRange* Value; | |
| 60 static const Key kNoKey; | |
| 61 static Value NoValue() { return nullptr; } | |
| 62 static int Compare(const Key& a, const Key& b) { | |
| 63 if (a.second <= b.first) return -1; | |
| 64 if (a.first >= b.second) return 1; | |
| 65 return 0; | |
| 66 } | |
| 67 }; | |
| 68 | |
| 69 Config::Key GetKey(UseInterval* interval) { | |
| 70 if (interval == nullptr) return std::make_pair(0, 0); | |
| 71 return std::make_pair(interval->start().Value(), interval->end().Value()); | |
| 72 } | |
| 73 | |
| 74 // TODO(mtrofin): Change to void returning if we do not care if the interval | |
| 75 // was previously added. | |
| 76 bool Insert(UseInterval* interval, LiveRange* range) { | |
| 77 ZoneSplayTree<Config>::Locator locator; | |
| 78 bool ret = storage_.Insert(GetKey(interval), &locator); | |
| 79 if (ret) locator.set_value(range); | |
| 80 return ret; | |
| 81 } | |
| 82 | |
| 83 bool Remove(UseInterval* key) { return storage_.Remove(GetKey(key)); } | |
| 84 | |
| 85 ZoneSplayTree<Config> storage_; | |
| 86 DISALLOW_COPY_AND_ASSIGN(CoallescedLiveRanges); | |
| 87 }; | |
| 88 | |
| 89 | |
| 90 const std::pair<int, int> CoallescedLiveRanges::Config::kNoKey = | |
| 91 std::make_pair<int, int>(0, 0); | |
| 92 | |
| 93 | |
| 19 namespace { | 94 namespace { |
| 20 | 95 |
| 21 inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { | 96 inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { |
| 22 return a.Value() < b.Value() ? a : b; | 97 return a.Value() < b.Value() ? a : b; |
| 23 } | 98 } |
| 24 | 99 |
| 25 | 100 |
| 26 inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { | 101 inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { |
| 27 return a.Value() > b.Value() ? a : b; | 102 return a.Value() > b.Value() ? a : b; |
| 28 } | 103 } |
| (...skipping 1426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1455 } | 1530 } |
| 1456 | 1531 |
| 1457 | 1532 |
| 1458 void LiveRangeBuilder::Verify() const { | 1533 void LiveRangeBuilder::Verify() const { |
| 1459 for (auto current : data()->live_ranges()) { | 1534 for (auto current : data()->live_ranges()) { |
| 1460 if (current != nullptr) current->Verify(); | 1535 if (current != nullptr) current->Verify(); |
| 1461 } | 1536 } |
| 1462 } | 1537 } |
| 1463 | 1538 |
| 1464 | 1539 |
| 1465 RegisterAllocator::RegisterAllocator(RegisterAllocationData* data) | 1540 RegisterAllocator::RegisterAllocator(RegisterAllocationData* data, |
| 1466 : data_(data) {} | 1541 RegisterKind kind) |
| 1542 : data_(data), | |
| 1543 mode_(kind), | |
| 1544 num_registers_(GetRegisterCount(data->config(), kind)) {} | |
| 1467 | 1545 |
| 1468 | 1546 |
| 1469 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, | 1547 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, |
| 1470 LifetimePosition pos) { | 1548 LifetimePosition pos) { |
| 1471 DCHECK(!range->IsFixed()); | 1549 DCHECK(!range->IsFixed()); |
| 1472 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value()); | 1550 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value()); |
| 1473 | 1551 |
| 1474 if (pos.Value() <= range->Start().Value()) return range; | 1552 if (pos.Value() <= range->Start().Value()) return range; |
| 1475 | 1553 |
| 1476 // We can't properly connect liveranges if splitting occurred at the end | 1554 // We can't properly connect liveranges if splitting occurred at the end |
| (...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1573 auto first = range->TopLevel(); | 1651 auto first = range->TopLevel(); |
| 1574 if (first->HasNoSpillType()) { | 1652 if (first->HasNoSpillType()) { |
| 1575 data()->AssignSpillRangeToLiveRange(first); | 1653 data()->AssignSpillRangeToLiveRange(first); |
| 1576 } | 1654 } |
| 1577 range->MakeSpilled(); | 1655 range->MakeSpilled(); |
| 1578 } | 1656 } |
| 1579 | 1657 |
| 1580 | 1658 |
| 1581 LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data, | 1659 LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data, |
| 1582 RegisterKind kind, Zone* local_zone) | 1660 RegisterKind kind, Zone* local_zone) |
| 1583 : RegisterAllocator(data), | 1661 : RegisterAllocator(data, kind), |
| 1584 mode_(kind), | |
| 1585 num_registers_(GetRegisterCount(data->config(), kind)), | |
| 1586 unhandled_live_ranges_(local_zone), | 1662 unhandled_live_ranges_(local_zone), |
| 1587 active_live_ranges_(local_zone), | 1663 active_live_ranges_(local_zone), |
| 1588 inactive_live_ranges_(local_zone) { | 1664 inactive_live_ranges_(local_zone) { |
| 1589 unhandled_live_ranges().reserve( | 1665 unhandled_live_ranges().reserve( |
| 1590 static_cast<size_t>(code()->VirtualRegisterCount() * 2)); | 1666 static_cast<size_t>(code()->VirtualRegisterCount() * 2)); |
| 1591 active_live_ranges().reserve(8); | 1667 active_live_ranges().reserve(8); |
| 1592 inactive_live_ranges().reserve(8); | 1668 inactive_live_ranges().reserve(8); |
| 1593 // TryAllocateFreeReg and AllocateBlockedReg assume this | 1669 // TryAllocateFreeReg and AllocateBlockedReg assume this |
| 1594 // when allocating local arrays. | 1670 // when allocating local arrays. |
| 1595 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= | 1671 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= |
| 1596 this->data()->config()->num_general_registers()); | 1672 this->data()->config()->num_general_registers()); |
| 1597 } | 1673 } |
| 1598 | 1674 |
| 1599 | 1675 |
| 1600 void LinearScanAllocator::AllocateRegisters() { | 1676 void LinearScanAllocator::AllocateRegisters() { |
| 1601 DCHECK(unhandled_live_ranges().empty()); | 1677 DCHECK(unhandled_live_ranges().empty()); |
| 1602 DCHECK(active_live_ranges().empty()); | 1678 DCHECK(active_live_ranges().empty()); |
| 1603 DCHECK(inactive_live_ranges().empty()); | 1679 DCHECK(inactive_live_ranges().empty()); |
| 1604 | 1680 |
| 1605 for (auto range : data()->live_ranges()) { | 1681 for (auto range : data()->live_ranges()) { |
| 1606 if (range == nullptr) continue; | 1682 if (range == nullptr) continue; |
| 1607 if (range->Kind() == mode_) { | 1683 if (range->Kind() == mode()) { |
| 1608 AddToUnhandledUnsorted(range); | 1684 AddToUnhandledUnsorted(range); |
| 1609 } | 1685 } |
| 1610 } | 1686 } |
| 1611 SortUnhandled(); | 1687 SortUnhandled(); |
| 1612 DCHECK(UnhandledIsSorted()); | 1688 DCHECK(UnhandledIsSorted()); |
| 1613 | 1689 |
| 1614 auto& fixed_ranges = GetFixedRegisters(data(), mode_); | 1690 auto& fixed_ranges = GetFixedRegisters(data(), mode()); |
| 1615 for (auto current : fixed_ranges) { | 1691 for (auto current : fixed_ranges) { |
| 1616 if (current != nullptr) { | 1692 if (current != nullptr) { |
| 1617 DCHECK_EQ(mode_, current->Kind()); | 1693 DCHECK_EQ(mode(), current->Kind()); |
| 1618 AddToInactive(current); | 1694 AddToInactive(current); |
| 1619 } | 1695 } |
| 1620 } | 1696 } |
| 1621 | 1697 |
| 1622 while (!unhandled_live_ranges().empty()) { | 1698 while (!unhandled_live_ranges().empty()) { |
| 1623 DCHECK(UnhandledIsSorted()); | 1699 DCHECK(UnhandledIsSorted()); |
| 1624 auto current = unhandled_live_ranges().back(); | 1700 auto current = unhandled_live_ranges().back(); |
| 1625 unhandled_live_ranges().pop_back(); | 1701 unhandled_live_ranges().pop_back(); |
| 1626 DCHECK(UnhandledIsSorted()); | 1702 DCHECK(UnhandledIsSorted()); |
| 1627 auto position = current->Start(); | 1703 auto position = current->Start(); |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1680 bool result = TryAllocateFreeReg(current); | 1756 bool result = TryAllocateFreeReg(current); |
| 1681 if (!result) AllocateBlockedReg(current); | 1757 if (!result) AllocateBlockedReg(current); |
| 1682 if (current->HasRegisterAssigned()) { | 1758 if (current->HasRegisterAssigned()) { |
| 1683 AddToActive(current); | 1759 AddToActive(current); |
| 1684 } | 1760 } |
| 1685 } | 1761 } |
| 1686 } | 1762 } |
| 1687 | 1763 |
| 1688 | 1764 |
| 1689 const char* LinearScanAllocator::RegisterName(int allocation_index) const { | 1765 const char* LinearScanAllocator::RegisterName(int allocation_index) const { |
| 1690 if (mode_ == GENERAL_REGISTERS) { | 1766 if (mode() == GENERAL_REGISTERS) { |
| 1691 return data()->config()->general_register_name(allocation_index); | 1767 return data()->config()->general_register_name(allocation_index); |
| 1692 } else { | 1768 } else { |
| 1693 return data()->config()->double_register_name(allocation_index); | 1769 return data()->config()->double_register_name(allocation_index); |
| 1694 } | 1770 } |
| 1695 } | 1771 } |
| 1696 | 1772 |
| 1697 | 1773 |
| 1698 void LinearScanAllocator::AddToActive(LiveRange* range) { | 1774 void LinearScanAllocator::AddToActive(LiveRange* range) { |
| 1699 TRACE("Add live range %d to active\n", range->id()); | 1775 TRACE("Add live range %d to active\n", range->id()); |
| 1700 active_live_ranges().push_back(range); | 1776 active_live_ranges().push_back(range); |
| (...skipping 822 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2523 moves = it->first.first; | 2599 moves = it->first.first; |
| 2524 } | 2600 } |
| 2525 // Gather all MoveOperands for a single ParallelMove. | 2601 // Gather all MoveOperands for a single ParallelMove. |
| 2526 auto move = new (code_zone()) MoveOperands(it->first.second, it->second); | 2602 auto move = new (code_zone()) MoveOperands(it->first.second, it->second); |
| 2527 auto eliminate = moves->PrepareInsertAfter(move); | 2603 auto eliminate = moves->PrepareInsertAfter(move); |
| 2528 to_insert.push_back(move); | 2604 to_insert.push_back(move); |
| 2529 if (eliminate != nullptr) to_eliminate.push_back(eliminate); | 2605 if (eliminate != nullptr) to_eliminate.push_back(eliminate); |
| 2530 } | 2606 } |
| 2531 } | 2607 } |
| 2532 | 2608 |
| 2609 | |
| 2610 GreedyAllocator::GreedyAllocator(RegisterAllocationData* data, | |
|
dcarney
2015/04/22 07:07:24
please move all functions for this class above the
| |
| 2611 RegisterKind kind, Zone* local_zone) | |
| 2612 : RegisterAllocator(data, kind), | |
| 2613 allocations_(local_zone), | |
| 2614 queue_(local_zone) {} | |
| 2615 | |
| 2616 | |
| 2617 unsigned GreedyAllocator::GetLiveRangeSize(LiveRange* range) { | |
| 2618 auto interval = range->first_interval(); | |
| 2619 if (interval == nullptr) return 0; | |
| 2620 | |
| 2621 unsigned size = 0; | |
| 2622 while (interval != nullptr) { | |
| 2623 size += (interval->end().Value() - interval->start().Value()); | |
| 2624 interval = interval->next(); | |
| 2625 } | |
| 2626 | |
| 2627 return size; | |
| 2628 } | |
| 2629 | |
| 2630 | |
| 2631 void GreedyAllocator::AssignRangeToRegister(int reg_id, LiveRange* range) { | |
| 2632 allocations_[reg_id]->Insert(range); | |
| 2633 if (range->HasRegisterAssigned()) { | |
| 2634 DCHECK_EQ(reg_id, range->assigned_register()); | |
| 2635 return; | |
| 2636 } | |
| 2637 range->set_assigned_register(reg_id); | |
| 2638 } | |
| 2639 | |
| 2640 | |
| 2641 float GreedyAllocator::CalculateSpillWeight(LiveRange* range) { | |
| 2642 if (range->IsFixed()) return std::numeric_limits<float>::max(); | |
| 2643 | |
| 2644 if (range->FirstHint() != nullptr && range->FirstHint()->IsRegister()) { | |
| 2645 return std::numeric_limits<float>::max(); | |
| 2646 } | |
| 2647 | |
| 2648 unsigned use_count = 0; | |
| 2649 auto* pos = range->first_pos(); | |
| 2650 while (pos != nullptr) { | |
| 2651 use_count++; | |
| 2652 pos = pos->next(); | |
| 2653 } | |
| 2654 | |
| 2655 // GetLiveRangeSize is DCHECK-ed to not be 0 | |
| 2656 unsigned range_size = GetLiveRangeSize(range); | |
| 2657 DCHECK_NE(0, range_size); | |
| 2658 | |
| 2659 return static_cast<float>(use_count) / static_cast<float>(range_size); | |
| 2660 } | |
| 2661 | |
| 2662 | |
| 2663 float GreedyAllocator::CalculateMaxSpillWeight( | |
| 2664 const ZoneSet<LiveRange*>& ranges) { | |
| 2665 float max = 0.0; | |
| 2666 for (auto* r : ranges) { | |
| 2667 max = std::max(max, CalculateSpillWeight(r)); | |
| 2668 } | |
| 2669 return max; | |
| 2670 } | |
| 2671 | |
| 2672 | |
| 2673 void GreedyAllocator::Evict(LiveRange* range) { | |
| 2674 bool removed = allocations_[range->assigned_register()]->Remove(range); | |
| 2675 CHECK(removed); | |
| 2676 } | |
| 2677 | |
| 2678 | |
| 2679 bool GreedyAllocator::TryAllocatePhysicalRegister( | |
| 2680 unsigned reg_id, LiveRange* range, ZoneSet<LiveRange*>* conflicting) { | |
| 2681 auto* segment = range->first_interval(); | |
| 2682 | |
| 2683 auto* alloc_info = allocations_[reg_id]; | |
| 2684 while (segment != nullptr) { | |
| 2685 if (auto* existing = alloc_info->Find(segment)) { | |
| 2686 DCHECK(existing->HasRegisterAssigned()); | |
| 2687 conflicting->insert(existing); | |
| 2688 } | |
| 2689 segment = segment->next(); | |
| 2690 } | |
| 2691 if (!conflicting->empty()) return false; | |
| 2692 // No conflicts means we can safely allocate this register to this range. | |
| 2693 AssignRangeToRegister(reg_id, range); | |
| 2694 return true; | |
| 2695 } | |
| 2696 | |
| 2697 bool GreedyAllocator::TryAllocate(LiveRange* current, | |
| 2698 ZoneSet<LiveRange*>* conflicting) { | |
| 2699 if (current->HasSpillOperand()) { | |
| 2700 Spill(current); | |
| 2701 return true; | |
| 2702 } | |
| 2703 if (current->IsFixed()) { | |
| 2704 return TryAllocatePhysicalRegister(current->assigned_register(), current, | |
| 2705 conflicting); | |
| 2706 } | |
| 2707 | |
| 2708 if (current->HasRegisterAssigned()) { | |
| 2709 int reg_id = current->assigned_register(); | |
| 2710 return TryAllocatePhysicalRegister(reg_id, current, conflicting); | |
| 2711 } | |
| 2712 | |
| 2713 for (unsigned candidate_reg = 0; candidate_reg < allocations_.size(); | |
| 2714 candidate_reg++) { | |
| 2715 if (TryAllocatePhysicalRegister(candidate_reg, current, conflicting)) { | |
| 2716 conflicting->clear(); | |
| 2717 return true; | |
| 2718 } | |
| 2719 } | |
| 2720 return false; | |
| 2721 } | |
| 2722 | |
| 2723 LiveRange* GreedyAllocator::SpillBetweenUntil(LiveRange* range, | |
| 2724 LifetimePosition start, | |
| 2725 LifetimePosition until, | |
| 2726 LifetimePosition end) { | |
| 2727 CHECK(start.Value() < end.Value()); | |
| 2728 auto second_part = SplitRangeAt(range, start); | |
| 2729 | |
| 2730 if (second_part->Start().Value() < end.Value()) { | |
| 2731 // The split result intersects with [start, end[. | |
| 2732 // Split it at position between ]start+1, end[, spill the middle part | |
| 2733 // and put the rest to unhandled. | |
| 2734 auto third_part_end = end.PrevStart().End(); | |
| 2735 if (IsBlockBoundary(code(), end.Start())) { | |
| 2736 third_part_end = end.Start(); | |
| 2737 } | |
| 2738 auto third_part = SplitBetween( | |
| 2739 second_part, Max(second_part->Start().End(), until), third_part_end); | |
| 2740 | |
| 2741 DCHECK(third_part != second_part); | |
| 2742 | |
| 2743 Spill(second_part); | |
| 2744 return third_part; | |
| 2745 } else { | |
| 2746 // The split result does not intersect with [start, end[. | |
| 2747 // Nothing to spill. Just return it for re-processing. | |
| 2748 return second_part; | |
| 2749 } | |
| 2750 } | |
| 2751 | |
| 2752 | |
| 2753 void GreedyAllocator::Enqueue(LiveRange* range) { | |
| 2754 if (range == nullptr || range->IsEmpty()) return; | |
| 2755 unsigned size = GetLiveRangeSize(range); | |
| 2756 queue_.push(std::make_pair(size, range)); | |
| 2757 } | |
| 2758 | |
| 2759 | |
| 2760 // TODO(mtrofin): consolidate with identical code segment in | |
| 2761 // LinearScanAllocator::AllocateRegisters | |
| 2762 bool GreedyAllocator::HandleSpillOperands(LiveRange* range) { | |
| 2763 auto position = range->Start(); | |
| 2764 TRACE("Processing interval %d start=%d\n", range->id(), position.Value()); | |
| 2765 | |
| 2766 if (!range->HasNoSpillType()) { | |
| 2767 TRACE("Live range %d already has a spill operand\n", range->id()); | |
| 2768 auto next_pos = position; | |
| 2769 if (next_pos.IsGapPosition()) { | |
| 2770 next_pos = next_pos.NextStart(); | |
| 2771 } | |
| 2772 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); | |
| 2773 // If the range already has a spill operand and it doesn't need a | |
| 2774 // register immediately, split it and spill the first part of the range. | |
| 2775 if (pos == nullptr) { | |
| 2776 Spill(range); | |
| 2777 return true; | |
| 2778 } else if (pos->pos().Value() > range->Start().NextStart().Value()) { | |
| 2779 // Do not spill live range eagerly if use position that can benefit from | |
| 2780 // the register is too close to the start of live range. | |
| 2781 auto* reminder = SpillBetweenUntil(range, position, position, pos->pos()); | |
| 2782 Enqueue(reminder); | |
| 2783 return true; | |
| 2784 } | |
| 2785 } | |
| 2786 return false; | |
| 2787 // TODO(mtrofin): Do we need this? | |
| 2788 // return (TryReuseSpillForPhi(range)); | |
| 2789 } | |
| 2790 | |
| 2791 | |
| 2792 void GreedyAllocator::AllocateRegisters() { | |
| 2793 for (auto range : data()->live_ranges()) { | |
| 2794 if (range == nullptr) continue; | |
| 2795 if (range->Kind() == mode()) { | |
| 2796 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); | |
| 2797 TRACE("Enqueueing live range %d to priority queue \n", range->id()); | |
| 2798 Enqueue(range); | |
| 2799 } | |
| 2800 } | |
| 2801 | |
| 2802 allocations_.resize(num_registers()); | |
| 2803 auto* zone = data()->allocation_zone(); | |
| 2804 for (int i = 0; i < num_registers(); i++) { | |
| 2805 allocations_[i] = new (zone) CoallescedLiveRanges(zone); | |
| 2806 } | |
| 2807 | |
| 2808 for (auto* current : GetFixedRegisters(data(), mode())) { | |
| 2809 if (current != nullptr) { | |
| 2810 DCHECK_EQ(mode(), current->Kind()); | |
| 2811 int reg_nr = current->assigned_register(); | |
| 2812 bool inserted = allocations_[reg_nr]->Insert(current); | |
| 2813 CHECK(inserted); | |
| 2814 } | |
| 2815 } | |
| 2816 | |
| 2817 while (!queue_.empty()) { | |
| 2818 auto current_pair = queue_.top(); | |
| 2819 queue_.pop(); | |
| 2820 auto current = current_pair.second; | |
| 2821 if (HandleSpillOperands(current)) continue; | |
| 2822 ZoneSet<LiveRange*> conflicting(zone); | |
| 2823 if (!TryAllocate(current, &conflicting)) { | |
| 2824 DCHECK(!conflicting.empty()); | |
| 2825 float this_max = CalculateSpillWeight(current); | |
| 2826 float max_conflicting = CalculateMaxSpillWeight(conflicting); | |
| 2827 if (max_conflicting < this_max) { | |
| 2828 for (auto* conflict : conflicting) { | |
| 2829 Evict(conflict); | |
| 2830 Enqueue(conflict); | |
| 2831 } | |
| 2832 conflicting.clear(); | |
| 2833 bool allocated = TryAllocate(current, &conflicting); | |
| 2834 CHECK(allocated); | |
| 2835 DCHECK(conflicting.empty()); | |
| 2836 } else { | |
| 2837 DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start())); | |
| 2838 bool allocated = AllocateBlockedRange(current, conflicting); | |
| 2839 CHECK(allocated); | |
| 2840 } | |
| 2841 } | |
| 2842 } | |
| 2843 | |
| 2844 BitVector* assigned_registers = new (zone) BitVector(num_registers(), zone); | |
| 2845 for (size_t i = 0; i < allocations_.size(); ++i) { | |
| 2846 if (!allocations_[i]->IsEmpty()) { | |
| 2847 assigned_registers->Add(static_cast<int>(i)); | |
| 2848 } | |
| 2849 } | |
| 2850 data()->frame()->SetAllocatedRegisters(assigned_registers); | |
| 2851 } | |
| 2852 | |
| 2853 | |
| 2854 bool GreedyAllocator::AllocateBlockedRange( | |
| 2855 LiveRange* current, const ZoneSet<LiveRange*>& conflicts) { | |
| 2856 auto register_use = current->NextRegisterPosition(current->Start()); | |
| 2857 if (register_use == nullptr) { | |
| 2858 // There is no use in the current live range that requires a register. | |
| 2859 // We can just spill it. | |
| 2860 Spill(current); | |
| 2861 return true; | |
| 2862 } | |
| 2863 | |
| 2864 auto second_part = SplitRangeAt(current, register_use->pos()); | |
| 2865 Spill(second_part); | |
| 2866 | |
| 2867 return true; | |
| 2868 } | |
| 2869 | |
| 2870 | |
| 2533 } // namespace compiler | 2871 } // namespace compiler |
| 2534 } // namespace internal | 2872 } // namespace internal |
| 2535 } // namespace v8 | 2873 } // namespace v8 |
| OLD | NEW |