Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(560)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 1061923005: Reland: Introducing the LLVM greedy register allocator. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Redo after refactoring Created 5 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/base/adapters.h" 5 #include "src/base/adapters.h"
6 #include "src/compiler/linkage.h" 6 #include "src/compiler/linkage.h"
7 #include "src/compiler/register-allocator.h" 7 #include "src/compiler/register-allocator.h"
8 #include "src/string-stream.h" 8 #include "src/string-stream.h"
9 9
10 namespace v8 { 10 namespace v8 {
11 namespace internal { 11 namespace internal {
12 namespace compiler { 12 namespace compiler {
13 13
14 #define TRACE(...) \ 14 #define TRACE(...) \
15 do { \ 15 do { \
16 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ 16 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
17 } while (false) 17 } while (false)
18 18
19
20 class CoallescedLiveRanges : public ZoneObject {
21 public:
22 explicit CoallescedLiveRanges(Zone* zone) : storage_(zone) {}
23
24 LiveRange* Find(UseInterval* query) {
25 ZoneSplayTree<Config>::Locator locator;
26
27 if (storage_.Find(GetKey(query), &locator)) {
28 return locator.value();
29 }
30 return nullptr;
31 }
32
33 // TODO(mtrofin): Change to void returning if we do not care if the interval
34 // was previously added.
35 bool Insert(LiveRange* range) {
36 auto* interval = range->first_interval();
37 while (interval != nullptr) {
38 if (!Insert(interval, range)) return false;
39 interval = interval->next();
40 }
41 return true;
42 }
43
44 bool Remove(LiveRange* range) {
45 bool ret = false;
46 auto* segment = range->first_interval();
47 while (segment != nullptr) {
48 ret |= Remove(segment);
49 segment = segment->next();
50 }
51 return ret;
52 }
53
54 bool IsEmpty() { return storage_.is_empty(); }
55
56 private:
57 struct Config {
58 typedef std::pair<int, int> Key;
59 typedef LiveRange* Value;
60 static const Key kNoKey;
61 static Value NoValue() { return nullptr; }
62 static int Compare(const Key& a, const Key& b) {
63 if (a.second <= b.first) return -1;
64 if (a.first >= b.second) return 1;
65 return 0;
66 }
67 };
68
69 Config::Key GetKey(UseInterval* interval) {
70 if (interval == nullptr) return std::make_pair(0, 0);
71 return std::make_pair(interval->start().Value(), interval->end().Value());
72 }
73
74 // TODO(mtrofin): Change to void returning if we do not care if the interval
75 // was previously added.
76 bool Insert(UseInterval* interval, LiveRange* range) {
77 ZoneSplayTree<Config>::Locator locator;
78 bool ret = storage_.Insert(GetKey(interval), &locator);
79 if (ret) locator.set_value(range);
80 return ret;
81 }
82
83 bool Remove(UseInterval* key) { return storage_.Remove(GetKey(key)); }
84
85 ZoneSplayTree<Config> storage_;
86 DISALLOW_COPY_AND_ASSIGN(CoallescedLiveRanges);
87 };
88
89
90 const std::pair<int, int> CoallescedLiveRanges::Config::kNoKey =
91 std::make_pair<int, int>(0, 0);
92
93
19 namespace { 94 namespace {
20 95
21 inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { 96 inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
22 return a.Value() < b.Value() ? a : b; 97 return a.Value() < b.Value() ? a : b;
23 } 98 }
24 99
25 100
26 inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { 101 inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
27 return a.Value() > b.Value() ? a : b; 102 return a.Value() > b.Value() ? a : b;
28 } 103 }
29 104
30 105
31 void RemoveElement(ZoneVector<LiveRange*>* v, LiveRange* range) { 106 void RemoveElement(ZoneVector<LiveRange*>* v, LiveRange* range) {
32 auto it = std::find(v->begin(), v->end(), range); 107 auto it = std::find(v->begin(), v->end(), range);
33 DCHECK(it != v->end()); 108 DCHECK(it != v->end());
34 v->erase(it); 109 v->erase(it);
35 } 110 }
36 111
112
113 static int GetRegisterCount(const RegisterConfiguration* cfg,
114 RegisterKind kind) {
115 return kind == DOUBLE_REGISTERS ? cfg->num_aliased_double_registers()
116 : cfg->num_general_registers();
117 }
118
119
120 static const ZoneVector<LiveRange*>& GetFixedRegisters(
121 const RegisterAllocationData* data, RegisterKind kind) {
122 return kind == DOUBLE_REGISTERS ? data->fixed_double_live_ranges()
123 : data->fixed_live_ranges();
124 }
125
37 } // namespace 126 } // namespace
38 127
39 128
40 UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand, 129 UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
41 InstructionOperand* hint) 130 InstructionOperand* hint)
42 : operand_(operand), hint_(hint), pos_(pos), next_(nullptr), flags_(0) { 131 : operand_(operand), hint_(hint), pos_(pos), next_(nullptr), flags_(0) {
43 bool register_beneficial = true; 132 bool register_beneficial = true;
44 UsePositionType type = UsePositionType::kAny; 133 UsePositionType type = UsePositionType::kAny;
45 if (operand_ != nullptr && operand_->IsUnallocated()) { 134 if (operand_ != nullptr && operand_->IsUnallocated()) {
46 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_); 135 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
(...skipping 1377 matching lines...) Expand 10 before | Expand all | Expand 10 after
1424 for (auto current : data()->live_ranges()) { 1513 for (auto current : data()->live_ranges()) {
1425 if (current != nullptr) current->Verify(); 1514 if (current != nullptr) current->Verify();
1426 } 1515 }
1427 } 1516 }
1428 1517
1429 1518
1430 LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data, 1519 LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
1431 RegisterKind kind) 1520 RegisterKind kind)
1432 : data_(data), 1521 : data_(data),
1433 mode_(kind), 1522 mode_(kind),
1434 num_registers_(kind == DOUBLE_REGISTERS 1523 num_registers_(GetRegisterCount(data->config(), kind)),
1435 ? config()->num_aliased_double_registers()
1436 : config()->num_general_registers()),
1437 unhandled_live_ranges_(allocation_zone()), 1524 unhandled_live_ranges_(allocation_zone()),
1438 active_live_ranges_(allocation_zone()), 1525 active_live_ranges_(allocation_zone()),
1439 inactive_live_ranges_(allocation_zone()) { 1526 inactive_live_ranges_(allocation_zone()) {
1440 unhandled_live_ranges().reserve( 1527 unhandled_live_ranges().reserve(
1441 static_cast<size_t>(code()->VirtualRegisterCount() * 2)); 1528 static_cast<size_t>(code()->VirtualRegisterCount() * 2));
1442 active_live_ranges().reserve(8); 1529 active_live_ranges().reserve(8);
1443 inactive_live_ranges().reserve(8); 1530 inactive_live_ranges().reserve(8);
1444 // TryAllocateFreeReg and AllocateBlockedReg assume this 1531 // TryAllocateFreeReg and AllocateBlockedReg assume this
1445 // when allocating local arrays. 1532 // when allocating local arrays.
1446 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= 1533 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >=
1447 config()->num_general_registers()); 1534 config()->num_general_registers());
1448 } 1535 }
1449 1536
1450 1537
1451 void LinearScanAllocator::AllocateRegisters() { 1538 void LinearScanAllocator::AllocateRegisters() {
1452 DCHECK(unhandled_live_ranges().empty()); 1539 DCHECK(unhandled_live_ranges().empty());
1453 1540
1454 for (auto range : live_ranges()) { 1541 for (auto range : live_ranges()) {
1455 if (range == nullptr) continue; 1542 if (range == nullptr) continue;
1456 if (range->Kind() == mode_) { 1543 if (range->Kind() == mode_) {
1457 AddToUnhandledUnsorted(range); 1544 AddToUnhandledUnsorted(range);
1458 } 1545 }
1459 } 1546 }
1460 SortUnhandled(); 1547 SortUnhandled();
1461 DCHECK(UnhandledIsSorted()); 1548 DCHECK(UnhandledIsSorted());
1462 1549
1463 DCHECK(active_live_ranges().empty()); 1550 DCHECK(active_live_ranges().empty());
1464 DCHECK(inactive_live_ranges().empty()); 1551 DCHECK(inactive_live_ranges().empty());
1465 1552
1466 if (mode_ == DOUBLE_REGISTERS) { 1553 auto& fixed_ranges = GetFixedRegisters(data(), mode_);
1467 for (auto current : fixed_double_live_ranges()) { 1554 for (auto current : fixed_ranges) {
1468 if (current != nullptr) AddToInactive(current); 1555 if (current != nullptr) {
1469 } 1556 DCHECK_EQ(mode_, current->Kind());
1470 } else { 1557 AddToInactive(current);
1471 DCHECK(mode_ == GENERAL_REGISTERS);
1472 for (auto current : fixed_live_ranges()) {
1473 if (current != nullptr) AddToInactive(current);
1474 } 1558 }
1475 } 1559 }
1476 1560
1477 while (!unhandled_live_ranges().empty()) { 1561 while (!unhandled_live_ranges().empty()) {
1478 DCHECK(UnhandledIsSorted()); 1562 DCHECK(UnhandledIsSorted());
1479 auto current = unhandled_live_ranges().back(); 1563 auto current = unhandled_live_ranges().back();
1480 unhandled_live_ranges().pop_back(); 1564 unhandled_live_ranges().pop_back();
1481 DCHECK(UnhandledIsSorted()); 1565 DCHECK(UnhandledIsSorted());
1482 auto position = current->Start(); 1566 auto position = current->Start();
1483 #ifdef DEBUG 1567 #ifdef DEBUG
1484 allocation_finger_ = position; 1568 allocation_finger_ = position;
1485 #endif 1569 #endif
1486 TRACE("Processing interval %d start=%d\n", current->id(), position.Value()); 1570 TRACE("Processing interval %d start=%d\n", current->id(), position.Value());
1487 1571
1488 if (!current->HasNoSpillType()) { 1572 if (!current->HasNoSpillType()) {
1489 TRACE("Live range %d already has a spill operand\n", current->id()); 1573 TRACE("Live range %d already has a spill operand\n", current->id());
1490 auto next_pos = position; 1574 auto next_pos = position;
1491 if (next_pos.IsGapPosition()) { 1575 if (next_pos.IsGapPosition()) {
1492 next_pos = next_pos.NextStart(); 1576 next_pos = next_pos.NextStart();
1493 } 1577 }
1494 auto pos = current->NextUsePositionRegisterIsBeneficial(next_pos); 1578 auto pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1495 // If the range already has a spill operand and it doesn't need a 1579 // If the range already has a spill operand and it doesn't need a
1496 // register immediately, split it and spill the first part of the range. 1580 // register immediately, split it and spill the first part of the range.
1497 if (pos == nullptr) { 1581 if (pos == nullptr) {
1498 Spill(current); 1582 data()->Spill(current);
1499 continue; 1583 continue;
1500 } else if (pos->pos().Value() > current->Start().NextStart().Value()) { 1584 } else if (pos->pos().Value() > current->Start().NextStart().Value()) {
1501 // Do not spill live range eagerly if use position that can benefit from 1585 // Do not spill live range eagerly if use position that can benefit from
1502 // the register is too close to the start of live range. 1586 // the register is too close to the start of live range.
1503 SpillBetween(current, current->Start(), pos->pos()); 1587 SpillBetween(current, current->Start(), pos->pos());
1504 DCHECK(UnhandledIsSorted()); 1588 DCHECK(UnhandledIsSorted());
1505 continue; 1589 continue;
1506 } 1590 }
1507 } 1591 }
1508 1592
(...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after
1695 auto pos = free_until_pos[reg]; 1779 auto pos = free_until_pos[reg];
1696 1780
1697 if (pos.Value() <= current->Start().Value()) { 1781 if (pos.Value() <= current->Start().Value()) {
1698 // All registers are blocked. 1782 // All registers are blocked.
1699 return false; 1783 return false;
1700 } 1784 }
1701 1785
1702 if (pos.Value() < current->End().Value()) { 1786 if (pos.Value() < current->End().Value()) {
1703 // Register reg is available at the range start but becomes blocked before 1787 // Register reg is available at the range start but becomes blocked before
1704 // the range end. Split current at position where it becomes blocked. 1788 // the range end. Split current at position where it becomes blocked.
1705 auto tail = SplitRangeAt(current, pos); 1789 auto tail = data()->SplitRangeAt(current, pos);
1706 AddToUnhandledSorted(tail); 1790 AddToUnhandledSorted(tail);
1707 } 1791 }
1708 1792
1709 // Register reg is available at the range start and is free until 1793 // Register reg is available at the range start and is free until
1710 // the range end. 1794 // the range end.
1711 DCHECK(pos.Value() >= current->End().Value()); 1795 DCHECK(pos.Value() >= current->End().Value());
1712 TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg), 1796 TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg),
1713 current->id()); 1797 current->id());
1714 SetLiveRangeAssignedRegister(current, reg); 1798 SetLiveRangeAssignedRegister(current, reg);
1715 1799
1716 return true; 1800 return true;
1717 } 1801 }
1718 1802
1719 1803
1720 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) { 1804 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
1721 auto register_use = current->NextRegisterPosition(current->Start()); 1805 auto register_use = current->NextRegisterPosition(current->Start());
1722 if (register_use == nullptr) { 1806 if (register_use == nullptr) {
1723 // There is no use in the current live range that requires a register. 1807 // There is no use in the current live range that requires a register.
1724 // We can just spill it. 1808 // We can just spill it.
1725 Spill(current); 1809 data()->Spill(current);
1726 return; 1810 return;
1727 } 1811 }
1728 1812
1729 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters]; 1813 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters];
1730 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; 1814 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters];
1731 1815
1732 for (int i = 0; i < num_registers_; i++) { 1816 for (int i = 0; i < num_registers_; i++) {
1733 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition(); 1817 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1734 } 1818 }
1735 1819
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
1775 // All registers are blocked before the first use that requires a register. 1859 // All registers are blocked before the first use that requires a register.
1776 // Spill starting part of live range up to that use. 1860 // Spill starting part of live range up to that use.
1777 SpillBetween(current, current->Start(), register_use->pos()); 1861 SpillBetween(current, current->Start(), register_use->pos());
1778 return; 1862 return;
1779 } 1863 }
1780 1864
1781 if (block_pos[reg].Value() < current->End().Value()) { 1865 if (block_pos[reg].Value() < current->End().Value()) {
1782 // Register becomes blocked before the current range end. Split before that 1866 // Register becomes blocked before the current range end. Split before that
1783 // position. 1867 // position.
1784 LiveRange* tail = 1868 LiveRange* tail =
1785 SplitBetween(current, current->Start(), block_pos[reg].Start()); 1869 data()->SplitBetween(current, current->Start(), block_pos[reg].Start());
1786 AddToUnhandledSorted(tail); 1870 AddToUnhandledSorted(tail);
1787 } 1871 }
1788 1872
1789 // Register reg is not blocked for the whole range. 1873 // Register reg is not blocked for the whole range.
1790 DCHECK(block_pos[reg].Value() >= current->End().Value()); 1874 DCHECK(block_pos[reg].Value() >= current->End().Value());
1791 TRACE("Assigning blocked reg %s to live range %d\n", RegisterName(reg), 1875 TRACE("Assigning blocked reg %s to live range %d\n", RegisterName(reg),
1792 current->id()); 1876 current->id());
1793 SetLiveRangeAssignedRegister(current, reg); 1877 SetLiveRangeAssignedRegister(current, reg);
1794 1878
1795 // This register was not free. Thus we need to find and spill 1879 // This register was not free. Thus we need to find and spill
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after
1949 auto next_pos = range->Start(); 2033 auto next_pos = range->Start();
1950 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart(); 2034 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
1951 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); 2035 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
1952 if (pos == nullptr) { 2036 if (pos == nullptr) {
1953 auto spill_range = 2037 auto spill_range =
1954 range->TopLevel()->HasSpillRange() 2038 range->TopLevel()->HasSpillRange()
1955 ? range->TopLevel()->GetSpillRange() 2039 ? range->TopLevel()->GetSpillRange()
1956 : data()->AssignSpillRangeToLiveRange(range->TopLevel()); 2040 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
1957 bool merged = first_op_spill->TryMerge(spill_range); 2041 bool merged = first_op_spill->TryMerge(spill_range);
1958 CHECK(merged); 2042 CHECK(merged);
1959 Spill(range); 2043 data()->Spill(range);
1960 return true; 2044 return true;
1961 } else if (pos->pos().Value() > range->Start().NextStart().Value()) { 2045 } else if (pos->pos().Value() > range->Start().NextStart().Value()) {
1962 auto spill_range = 2046 auto spill_range =
1963 range->TopLevel()->HasSpillRange() 2047 range->TopLevel()->HasSpillRange()
1964 ? range->TopLevel()->GetSpillRange() 2048 ? range->TopLevel()->GetSpillRange()
1965 : data()->AssignSpillRangeToLiveRange(range->TopLevel()); 2049 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
1966 bool merged = first_op_spill->TryMerge(spill_range); 2050 bool merged = first_op_spill->TryMerge(spill_range);
1967 CHECK(merged); 2051 CHECK(merged);
1968 SpillBetween(range, range->Start(), pos->pos()); 2052 SpillBetween(range, range->Start(), pos->pos());
1969 DCHECK(UnhandledIsSorted()); 2053 DCHECK(UnhandledIsSorted());
1970 return true; 2054 return true;
1971 } 2055 }
1972 return false; 2056 return false;
1973 } 2057 }
1974 2058
1975 2059
1976 LiveRange* LinearScanAllocator::SplitRangeAt(LiveRange* range, 2060 LiveRange* RegisterAllocationData::SplitRangeAt(LiveRange* range,
1977 LifetimePosition pos) { 2061 LifetimePosition pos) {
1978 DCHECK(!range->IsFixed()); 2062 DCHECK(!range->IsFixed());
1979 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value()); 2063 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value());
1980 2064
1981 if (pos.Value() <= range->Start().Value()) return range; 2065 if (pos.Value() <= range->Start().Value()) return range;
1982 2066
1983 // We can't properly connect liveranges if splitting occurred at the end 2067 // We can't properly connect liveranges if splitting occurred at the end
1984 // a block. 2068 // a block.
1985 DCHECK(pos.IsStart() || pos.IsGapPosition() || 2069 DCHECK(pos.IsStart() || pos.IsGapPosition() ||
1986 (code()->GetInstructionBlock(pos.ToInstructionIndex())) 2070 (code()->GetInstructionBlock(pos.ToInstructionIndex()))
1987 ->last_instruction_index() != pos.ToInstructionIndex()); 2071 ->last_instruction_index() != pos.ToInstructionIndex());
1988 2072
1989 int vreg = GetVirtualRegister(); 2073 int vreg = GetVirtualRegister();
1990 auto result = LiveRangeFor(vreg); 2074 auto result = LiveRangeFor(vreg);
1991 range->SplitAt(pos, result, allocation_zone()); 2075 range->SplitAt(pos, result, allocation_zone());
1992 return result; 2076 return result;
1993 } 2077 }
1994 2078
1995 2079
1996 LiveRange* LinearScanAllocator::SplitBetween(LiveRange* range, 2080 LiveRange* RegisterAllocationData::SplitBetween(LiveRange* range,
1997 LifetimePosition start, 2081 LifetimePosition start,
1998 LifetimePosition end) { 2082 LifetimePosition end) {
1999 DCHECK(!range->IsFixed()); 2083 DCHECK(!range->IsFixed());
2000 TRACE("Splitting live range %d in position between [%d, %d]\n", range->id(), 2084 TRACE("Splitting live range %d in position between [%d, %d]\n", range->id(),
2001 start.Value(), end.Value()); 2085 start.Value(), end.Value());
2002 2086
2003 auto split_pos = FindOptimalSplitPos(start, end); 2087 auto split_pos = FindOptimalSplitPos(start, end);
2004 DCHECK(split_pos.Value() >= start.Value()); 2088 DCHECK(split_pos.Value() >= start.Value());
2005 return SplitRangeAt(range, split_pos); 2089 return SplitRangeAt(range, split_pos);
2006 } 2090 }
2007 2091
2008 2092
2009 LifetimePosition LinearScanAllocator::FindOptimalSplitPos( 2093 LifetimePosition RegisterAllocationData::FindOptimalSplitPos(
2010 LifetimePosition start, LifetimePosition end) { 2094 LifetimePosition start, LifetimePosition end) {
2011 int start_instr = start.ToInstructionIndex(); 2095 int start_instr = start.ToInstructionIndex();
2012 int end_instr = end.ToInstructionIndex(); 2096 int end_instr = end.ToInstructionIndex();
2013 DCHECK(start_instr <= end_instr); 2097 DCHECK(start_instr <= end_instr);
2014 2098
2015 // We have no choice 2099 // We have no choice
2016 if (start_instr == end_instr) return end; 2100 if (start_instr == end_instr) return end;
2017 2101
2018 auto start_block = GetInstructionBlock(start); 2102 auto start_block = GetInstructionBlock(start);
2019 auto end_block = GetInstructionBlock(end); 2103 auto end_block = GetInstructionBlock(end);
(...skipping 16 matching lines...) Expand all
2036 // We did not find any suitable outer loop. Split at the latest possible 2120 // We did not find any suitable outer loop. Split at the latest possible
2037 // position unless end_block is a loop header itself. 2121 // position unless end_block is a loop header itself.
2038 if (block == end_block && !end_block->IsLoopHeader()) return end; 2122 if (block == end_block && !end_block->IsLoopHeader()) return end;
2039 2123
2040 return LifetimePosition::GapFromInstructionIndex( 2124 return LifetimePosition::GapFromInstructionIndex(
2041 block->first_instruction_index()); 2125 block->first_instruction_index());
2042 } 2126 }
2043 2127
2044 2128
2045 void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { 2129 void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2046 auto second_part = SplitRangeAt(range, pos); 2130 auto second_part = data()->SplitRangeAt(range, pos);
2047 Spill(second_part); 2131 data()->Spill(second_part);
2048 } 2132 }
2049 2133
2050 2134
2051 void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start, 2135 void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
2052 LifetimePosition end) { 2136 LifetimePosition end) {
2053 SpillBetweenUntil(range, start, start, end); 2137 SpillBetweenUntil(range, start, start, end);
2054 } 2138 }
2055 2139
2056 2140
2057 void LinearScanAllocator::SpillBetweenUntil(LiveRange* range, 2141 void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
2058 LifetimePosition start, 2142 LifetimePosition start,
2059 LifetimePosition until, 2143 LifetimePosition until,
2060 LifetimePosition end) { 2144 LifetimePosition end) {
2061 CHECK(start.Value() < end.Value()); 2145 CHECK(start.Value() < end.Value());
2062 auto second_part = SplitRangeAt(range, start); 2146 auto second_part = data()->SplitRangeAt(range, start);
2063 2147
2064 if (second_part->Start().Value() < end.Value()) { 2148 if (second_part->Start().Value() < end.Value()) {
2065 // The split result intersects with [start, end[. 2149 // The split result intersects with [start, end[.
2066 // Split it at position between ]start+1, end[, spill the middle part 2150 // Split it at position between ]start+1, end[, spill the middle part
2067 // and put the rest to unhandled. 2151 // and put the rest to unhandled.
2068 auto third_part_end = end.PrevStart().End(); 2152 auto third_part_end = end.PrevStart().End();
2069 if (data()->IsBlockBoundary(end.Start())) { 2153 if (data()->IsBlockBoundary(end.Start())) {
2070 third_part_end = end.Start(); 2154 third_part_end = end.Start();
2071 } 2155 }
2072 auto third_part = SplitBetween( 2156 auto third_part = data()->SplitBetween(
2073 second_part, Max(second_part->Start().End(), until), third_part_end); 2157 second_part, Max(second_part->Start().End(), until), third_part_end);
2074 2158
2075 DCHECK(third_part != second_part); 2159 DCHECK(third_part != second_part);
2076 2160
2077 Spill(second_part); 2161 data()->Spill(second_part);
2078 AddToUnhandledSorted(third_part); 2162 AddToUnhandledSorted(third_part);
2079 } else { 2163 } else {
2080 // The split result does not intersect with [start, end[. 2164 // The split result does not intersect with [start, end[.
2081 // Nothing to spill. Just put it to unhandled as whole. 2165 // Nothing to spill. Just put it to unhandled as whole.
2082 AddToUnhandledSorted(second_part); 2166 AddToUnhandledSorted(second_part);
2083 } 2167 }
2084 } 2168 }
2085 2169
2086 2170
2087 void LinearScanAllocator::Spill(LiveRange* range) { 2171 void RegisterAllocationData::Spill(LiveRange* range) {
2088 DCHECK(!range->IsSpilled()); 2172 DCHECK(!range->IsSpilled());
2089 TRACE("Spilling live range %d\n", range->id()); 2173 TRACE("Spilling live range %d\n", range->id());
2090 auto first = range->TopLevel(); 2174 auto first = range->TopLevel();
2091 if (first->HasNoSpillType()) { 2175 if (first->HasNoSpillType()) {
2092 data()->AssignSpillRangeToLiveRange(first); 2176 AssignSpillRangeToLiveRange(first);
2093 } 2177 }
2094 range->MakeSpilled(); 2178 range->MakeSpilled();
2095 } 2179 }
2096 2180
2097 2181
2098 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {} 2182 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
2099 2183
2100 2184
2101 void OperandAssigner::AssignSpillSlots() { 2185 void OperandAssigner::AssignSpillSlots() {
2102 auto& spill_ranges = data()->spill_ranges(); 2186 auto& spill_ranges = data()->spill_ranges();
(...skipping 397 matching lines...) Expand 10 before | Expand all | Expand 10 after
2500 moves = it->first.first; 2584 moves = it->first.first;
2501 } 2585 }
2502 // Gather all MoveOperands for a single ParallelMove. 2586 // Gather all MoveOperands for a single ParallelMove.
2503 auto move = new (code_zone()) MoveOperands(it->first.second, it->second); 2587 auto move = new (code_zone()) MoveOperands(it->first.second, it->second);
2504 auto eliminate = moves->PrepareInsertAfter(move); 2588 auto eliminate = moves->PrepareInsertAfter(move);
2505 to_insert.push_back(move); 2589 to_insert.push_back(move);
2506 if (eliminate != nullptr) to_eliminate.push_back(eliminate); 2590 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
2507 } 2591 }
2508 } 2592 }
2509 2593
2594
2595 unsigned GreedyAllocator::GetLiveRangeSize(LiveRange* range) {
2596 auto interval = range->first_interval();
2597 if (interval == nullptr) return 0;
2598
2599 unsigned size = 0;
2600 while (interval != nullptr) {
2601 size += (interval->end().Value() - interval->start().Value());
2602 interval = interval->next();
2603 }
2604
2605 DCHECK_NE(0, size);
2606 return size;
2607 }
2608
2609 GreedyAllocator::GreedyAllocator(RegisterAllocationData* data,
2610 RegisterKind kind)
2611 : data_(data),
2612 mode_(kind),
2613 num_registers_(GetRegisterCount(data->config(), kind)),
2614 allocations_(data->allocation_zone()),
2615 queue_(data->allocation_zone()) {}
2616
2617
2618 void GreedyAllocator::AssignRangeToRegister(int reg_id, LiveRange* range) {
2619 allocations_[reg_id]->Insert(range);
2620 if (range->HasRegisterAssigned()) {
2621 DCHECK_EQ(reg_id, range->assigned_register());
2622 return;
2623 }
2624 range->set_assigned_register(reg_id);
2625 }
2626
2627
2628 float GreedyAllocator::CalculateSpillWeight(LiveRange* range) {
2629 if (range->IsFixed()) return std::numeric_limits<float>::max();
2630
2631 if (range->FirstHint() != nullptr && range->FirstHint()->IsRegister()) {
2632 return std::numeric_limits<float>::max();
2633 }
2634
2635 unsigned use_count = 0;
2636 auto* pos = range->first_pos();
2637 while (pos != nullptr) {
2638 use_count++;
2639 pos = pos->next();
2640 }
2641
2642 // GetLiveRangeSize is DCHECK-ed to not be 0
jvoung (off chromium) 2015/04/21 23:34:06 GetLiveRangeSize can be 0 if range->first_interval
Mircea Trofin 2015/04/22 04:37:33 Thanks! Moved the check here.
2643 return static_cast<float>(use_count) /
2644 static_cast<float>(GetLiveRangeSize(range));
2645 }
2646
2647
2648 float GreedyAllocator::CalculateMaxSpillWeight(
2649 const ZoneSet<LiveRange*>& ranges) {
2650 float max = 0.0;
2651 for (auto* r : ranges) {
2652 max = std::max(max, CalculateSpillWeight(r));
2653 }
2654 return max;
2655 }
2656
2657
2658 void GreedyAllocator::Evict(LiveRange* range) {
2659 bool removed = allocations_[range->assigned_register()]->Remove(range);
2660 DCHECK(removed);
2661 }
2662
2663
2664 bool GreedyAllocator::TryAllocatePhysicalRegister(
2665 unsigned reg_id, LiveRange* range, ZoneSet<LiveRange*>* conflicting) {
2666 auto* segment = range->first_interval();
2667
2668 auto* alloc_info = allocations_[reg_id];
2669 while (segment != nullptr) {
2670 if (auto* existing = alloc_info->Find(segment)) {
2671 DCHECK(existing->HasRegisterAssigned());
2672 conflicting->insert(existing);
2673 }
2674 segment = segment->next();
2675 }
2676 if (!conflicting->empty()) return false;
2677 // No conflicts means we can safely allocate this register to this range.
2678 AssignRangeToRegister(reg_id, range);
2679 return true;
2680 }
2681
2682 bool GreedyAllocator::TryAllocate(LiveRange* current,
2683 ZoneSet<LiveRange*>* conflicting) {
2684 if (current->HasSpillOperand()) {
2685 data()->Spill(current);
2686 return true;
2687 }
2688 if (current->IsFixed()) {
2689 return TryAllocatePhysicalRegister(current->assigned_register(), current,
2690 conflicting);
2691 }
2692
2693 if (current->HasRegisterAssigned()) {
2694 int reg_id = current->assigned_register();
2695 return TryAllocatePhysicalRegister(reg_id, current, conflicting);
2696 }
2697
2698 for (unsigned candidate_reg = 0; candidate_reg < allocations_.size();
2699 candidate_reg++) {
2700 if (TryAllocatePhysicalRegister(candidate_reg, current, conflicting)) {
2701 conflicting->clear();
2702 return true;
2703 }
2704 }
2705 return false;
2706 }
2707
2708 LiveRange* GreedyAllocator::SpillBetweenUntil(LiveRange* range,
2709 LifetimePosition start,
2710 LifetimePosition until,
2711 LifetimePosition end) {
2712 CHECK(start.Value() < end.Value());
2713 auto second_part = data()->SplitRangeAt(range, start);
2714
2715 if (second_part->Start().Value() < end.Value()) {
2716 // The split result intersects with [start, end[.
2717 // Split it at position between ]start+1, end[, spill the middle part
2718 // and put the rest to unhandled.
2719 auto third_part_end = end.PrevStart().End();
2720 if (data()->IsBlockBoundary(end.Start())) {
2721 third_part_end = end.Start();
2722 }
2723 auto third_part = data()->SplitBetween(
2724 second_part, Max(second_part->Start().End(), until), third_part_end);
2725
2726 DCHECK(third_part != second_part);
2727
2728 data()->Spill(second_part);
2729 return third_part;
2730 } else {
2731 // The split result does not intersect with [start, end[.
2732 // Nothing to spill. Just put it to unhandled as whole.
jvoung (off chromium) 2015/04/21 23:34:06 nit: This comment seems to come from the LinearSca
Mircea Trofin 2015/04/22 04:37:33 Most likely this copy of the API will be deleted o
2733 return second_part;
2734 }
2735 }
2736
2737
2738 void GreedyAllocator::Enqueue(LiveRange* range) {
2739 if (range == nullptr || range->IsEmpty()) return;
2740 unsigned size = GetLiveRangeSize(range);
2741 queue_.push(std::make_pair(size, range));
2742 }
2743
2744
2745 // TODO(mtrofin): consolidate with identical code segment in
2746 // LinearScanAllocator::AllocateRegisters
2747 bool GreedyAllocator::HandleSpillOperands(LiveRange* range) {
2748 auto position = range->Start();
2749 TRACE("Processing interval %d start=%d\n", range->id(), position.Value());
2750
2751 if (!range->HasNoSpillType()) {
2752 TRACE("Live range %d already has a spill operand\n", range->id());
2753 auto next_pos = position;
2754 if (next_pos.IsGapPosition()) {
2755 next_pos = next_pos.NextStart();
2756 }
2757 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
2758 // If the range already has a spill operand and it doesn't need a
2759 // register immediately, split it and spill the first part of the range.
2760 if (pos == nullptr) {
2761 data()->Spill(range);
2762 return true;
2763 } else if (pos->pos().Value() > range->Start().NextStart().Value()) {
2764 // Do not spill live range eagerly if use position that can benefit from
2765 // the register is too close to the start of live range.
2766 auto* reminder = SpillBetweenUntil(range, position, position, pos->pos());
2767 Enqueue(reminder);
2768 return true;
2769 }
2770 }
2771 return false;
2772 // TODO(mtrofin): Do we need this?
2773 // return (TryReuseSpillForPhi(range));
2774 }
2775
2776
2777 void GreedyAllocator::AllocateRegisters() {
2778 for (auto range : data()->live_ranges()) {
2779 if (range == nullptr) continue;
2780 if (range->Kind() == mode_) {
2781 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
2782 TRACE("Enqueueing live range %d to priority queue \n", range->id());
2783 Enqueue(range);
2784 }
2785 }
2786
2787 allocations_.resize(num_registers_);
2788 auto* zone = data()->allocation_zone();
2789 for (int i = 0; i < num_registers_; i++) {
2790 allocations_[i] = new (zone) CoallescedLiveRanges(zone);
2791 }
2792
2793 for (auto* current : GetFixedRegisters(data(), mode_)) {
2794 if (current != nullptr) {
2795 DCHECK_EQ(mode_, current->Kind());
2796 int reg_nr = current->assigned_register();
2797 bool inserted = allocations_[reg_nr]->Insert(current);
2798 CHECK(inserted);
2799 }
2800 }
2801
2802 while (!queue_.empty()) {
2803 auto current_pair = queue_.top();
2804 queue_.pop();
2805 auto current = current_pair.second;
2806 if (HandleSpillOperands(current)) continue;
2807 ZoneSet<LiveRange*> conflicting(zone);
2808 if (!TryAllocate(current, &conflicting)) {
2809 DCHECK(conflicting.size());
jvoung (off chromium) 2015/04/21 23:34:06 Is there an !empty() method to check instead of si
Mircea Trofin 2015/04/22 04:37:33 Done.
2810 float this_max = CalculateSpillWeight(current);
2811 float max_conflicting = CalculateMaxSpillWeight(conflicting);
2812 if (max_conflicting < this_max) {
2813 for (auto* conflict : conflicting) {
2814 Evict(conflict);
2815 Enqueue(conflict);
2816 }
2817 conflicting.clear();
2818 bool allocated = TryAllocate(current, &conflicting);
2819 CHECK(allocated);
2820 DCHECK_EQ(0, conflicting.size());
jvoung (off chromium) 2015/04/21 23:34:06 conflicting.empty() ?
Mircea Trofin 2015/04/22 04:37:33 Done.
2821 } else {
2822 DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start()));
2823 bool allocated = AllocateBlockedRange(current, conflicting);
2824 CHECK(allocated);
2825 }
2826 }
2827 }
2828
2829 BitVector* assigned_registers = new (zone) BitVector(num_registers_, zone);
2830 for (size_t i = 0; i < allocations_.size(); ++i) {
2831 if (!allocations_[i]->IsEmpty()) {
2832 assigned_registers->Add(i);
2833 }
2834 }
2835 data()->frame()->SetAllocatedRegisters(assigned_registers);
2836 }
2837
2838
2839 bool GreedyAllocator::AllocateBlockedRange(
2840 LiveRange* current, const ZoneSet<LiveRange*>& conflicts) {
2841 auto register_use = current->NextRegisterPosition(current->Start());
2842 if (register_use == nullptr) {
2843 // There is no use in the current live range that requires a register.
2844 // We can just spill it.
2845 data()->Spill(current);
2846 return true;
2847 }
2848
2849 auto second_part = data()->SplitRangeAt(current, register_use->pos());
2850 data()->Spill(second_part);
2851
2852 return true;
2853 }
2854
2855
2510 } // namespace compiler 2856 } // namespace compiler
2511 } // namespace internal 2857 } // namespace internal
2512 } // namespace v8 2858 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698