| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/base/adapters.h" | 5 #include "src/base/adapters.h" |
| 6 #include "src/compiler/linkage.h" | 6 #include "src/compiler/linkage.h" |
| 7 #include "src/compiler/register-allocator.h" | 7 #include "src/compiler/register-allocator.h" |
| 8 #include "src/string-stream.h" | 8 #include "src/string-stream.h" |
| 9 | 9 |
| 10 namespace v8 { | 10 namespace v8 { |
| (...skipping 1444 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1455 } | 1455 } |
| 1456 | 1456 |
| 1457 | 1457 |
| 1458 void LiveRangeBuilder::Verify() const { | 1458 void LiveRangeBuilder::Verify() const { |
| 1459 for (auto current : data()->live_ranges()) { | 1459 for (auto current : data()->live_ranges()) { |
| 1460 if (current != nullptr) current->Verify(); | 1460 if (current != nullptr) current->Verify(); |
| 1461 } | 1461 } |
| 1462 } | 1462 } |
| 1463 | 1463 |
| 1464 | 1464 |
| 1465 RegisterAllocator::RegisterAllocator(RegisterAllocationData* data) |
| 1466 : data_(data) {} |
| 1467 |
| 1468 |
| 1469 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, |
| 1470 LifetimePosition pos) { |
| 1471 DCHECK(!range->IsFixed()); |
| 1472 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value()); |
| 1473 |
| 1474 if (pos.Value() <= range->Start().Value()) return range; |
| 1475 |
| 1476 // We can't properly connect liveranges if splitting occurred at the end |
| 1477 // a block. |
| 1478 DCHECK(pos.IsStart() || pos.IsGapPosition() || |
| 1479 (GetInstructionBlock(code(), pos)->last_instruction_index() != |
| 1480 pos.ToInstructionIndex())); |
| 1481 |
| 1482 int vreg = code()->NextVirtualRegister(); |
| 1483 auto result = LiveRangeFor(vreg); |
| 1484 range->SplitAt(pos, result, allocation_zone()); |
| 1485 return result; |
| 1486 } |
| 1487 |
| 1488 |
| 1489 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range, |
| 1490 LifetimePosition start, |
| 1491 LifetimePosition end) { |
| 1492 DCHECK(!range->IsFixed()); |
| 1493 TRACE("Splitting live range %d in position between [%d, %d]\n", range->id(), |
| 1494 start.Value(), end.Value()); |
| 1495 |
| 1496 auto split_pos = FindOptimalSplitPos(start, end); |
| 1497 DCHECK(split_pos.Value() >= start.Value()); |
| 1498 return SplitRangeAt(range, split_pos); |
| 1499 } |
| 1500 |
| 1501 |
| 1502 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, |
| 1503 LifetimePosition end) { |
| 1504 int start_instr = start.ToInstructionIndex(); |
| 1505 int end_instr = end.ToInstructionIndex(); |
| 1506 DCHECK(start_instr <= end_instr); |
| 1507 |
| 1508 // We have no choice |
| 1509 if (start_instr == end_instr) return end; |
| 1510 |
| 1511 auto start_block = GetInstructionBlock(code(), start); |
| 1512 auto end_block = GetInstructionBlock(code(), end); |
| 1513 |
| 1514 if (end_block == start_block) { |
| 1515 // The interval is split in the same basic block. Split at the latest |
| 1516 // possible position. |
| 1517 return end; |
| 1518 } |
| 1519 |
| 1520 auto block = end_block; |
| 1521 // Find header of outermost loop. |
| 1522 // TODO(titzer): fix redundancy below. |
| 1523 while (GetContainingLoop(code(), block) != nullptr && |
| 1524 GetContainingLoop(code(), block)->rpo_number().ToInt() > |
| 1525 start_block->rpo_number().ToInt()) { |
| 1526 block = GetContainingLoop(code(), block); |
| 1527 } |
| 1528 |
| 1529 // We did not find any suitable outer loop. Split at the latest possible |
| 1530 // position unless end_block is a loop header itself. |
| 1531 if (block == end_block && !end_block->IsLoopHeader()) return end; |
| 1532 |
| 1533 return LifetimePosition::GapFromInstructionIndex( |
| 1534 block->first_instruction_index()); |
| 1535 } |
| 1536 |
| 1537 |
| 1538 LifetimePosition RegisterAllocator::FindOptimalSpillingPos( |
| 1539 LiveRange* range, LifetimePosition pos) { |
| 1540 auto block = GetInstructionBlock(code(), pos.Start()); |
| 1541 auto loop_header = |
| 1542 block->IsLoopHeader() ? block : GetContainingLoop(code(), block); |
| 1543 |
| 1544 if (loop_header == nullptr) return pos; |
| 1545 |
| 1546 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); |
| 1547 |
| 1548 while (loop_header != nullptr) { |
| 1549 // We are going to spill live range inside the loop. |
| 1550 // If possible try to move spilling position backwards to loop header. |
| 1551 // This will reduce number of memory moves on the back edge. |
| 1552 auto loop_start = LifetimePosition::GapFromInstructionIndex( |
| 1553 loop_header->first_instruction_index()); |
| 1554 |
| 1555 if (range->Covers(loop_start)) { |
| 1556 if (prev_use == nullptr || prev_use->pos().Value() < loop_start.Value()) { |
| 1557 // No register beneficial use inside the loop before the pos. |
| 1558 pos = loop_start; |
| 1559 } |
| 1560 } |
| 1561 |
| 1562 // Try hoisting out to an outer loop. |
| 1563 loop_header = GetContainingLoop(code(), loop_header); |
| 1564 } |
| 1565 |
| 1566 return pos; |
| 1567 } |
| 1568 |
| 1569 |
| 1570 void RegisterAllocator::Spill(LiveRange* range) { |
| 1571 DCHECK(!range->IsSpilled()); |
| 1572 TRACE("Spilling live range %d\n", range->id()); |
| 1573 auto first = range->TopLevel(); |
| 1574 if (first->HasNoSpillType()) { |
| 1575 data()->AssignSpillRangeToLiveRange(first); |
| 1576 } |
| 1577 range->MakeSpilled(); |
| 1578 } |
| 1579 |
| 1580 |
| 1465 LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data, | 1581 LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data, |
| 1466 RegisterKind kind, Zone* local_zone) | 1582 RegisterKind kind, Zone* local_zone) |
| 1467 : data_(data), | 1583 : RegisterAllocator(data), |
| 1468 mode_(kind), | 1584 mode_(kind), |
| 1469 num_registers_(GetRegisterCount(data->config(), kind)), | 1585 num_registers_(GetRegisterCount(data->config(), kind)), |
| 1470 unhandled_live_ranges_(local_zone), | 1586 unhandled_live_ranges_(local_zone), |
| 1471 active_live_ranges_(local_zone), | 1587 active_live_ranges_(local_zone), |
| 1472 inactive_live_ranges_(local_zone) { | 1588 inactive_live_ranges_(local_zone) { |
| 1473 unhandled_live_ranges().reserve( | 1589 unhandled_live_ranges().reserve( |
| 1474 static_cast<size_t>(code()->VirtualRegisterCount() * 2)); | 1590 static_cast<size_t>(code()->VirtualRegisterCount() * 2)); |
| 1475 active_live_ranges().reserve(8); | 1591 active_live_ranges().reserve(8); |
| 1476 inactive_live_ranges().reserve(8); | 1592 inactive_live_ranges().reserve(8); |
| 1477 // TryAllocateFreeReg and AllocateBlockedReg assume this | 1593 // TryAllocateFreeReg and AllocateBlockedReg assume this |
| (...skipping 340 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1818 current->id()); | 1934 current->id()); |
| 1819 data()->SetLiveRangeAssignedRegister(current, reg); | 1935 data()->SetLiveRangeAssignedRegister(current, reg); |
| 1820 | 1936 |
| 1821 // This register was not free. Thus we need to find and spill | 1937 // This register was not free. Thus we need to find and spill |
| 1822 // parts of active and inactive live regions that use the same register | 1938 // parts of active and inactive live regions that use the same register |
| 1823 // at the same lifetime positions as current. | 1939 // at the same lifetime positions as current. |
| 1824 SplitAndSpillIntersecting(current); | 1940 SplitAndSpillIntersecting(current); |
| 1825 } | 1941 } |
| 1826 | 1942 |
| 1827 | 1943 |
| 1828 LifetimePosition LinearScanAllocator::FindOptimalSpillingPos( | |
| 1829 LiveRange* range, LifetimePosition pos) { | |
| 1830 auto block = GetInstructionBlock(code(), pos.Start()); | |
| 1831 auto loop_header = | |
| 1832 block->IsLoopHeader() ? block : GetContainingLoop(code(), block); | |
| 1833 | |
| 1834 if (loop_header == nullptr) return pos; | |
| 1835 | |
| 1836 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); | |
| 1837 | |
| 1838 while (loop_header != nullptr) { | |
| 1839 // We are going to spill live range inside the loop. | |
| 1840 // If possible try to move spilling position backwards to loop header. | |
| 1841 // This will reduce number of memory moves on the back edge. | |
| 1842 auto loop_start = LifetimePosition::GapFromInstructionIndex( | |
| 1843 loop_header->first_instruction_index()); | |
| 1844 | |
| 1845 if (range->Covers(loop_start)) { | |
| 1846 if (prev_use == nullptr || prev_use->pos().Value() < loop_start.Value()) { | |
| 1847 // No register beneficial use inside the loop before the pos. | |
| 1848 pos = loop_start; | |
| 1849 } | |
| 1850 } | |
| 1851 | |
| 1852 // Try hoisting out to an outer loop. | |
| 1853 loop_header = GetContainingLoop(code(), loop_header); | |
| 1854 } | |
| 1855 | |
| 1856 return pos; | |
| 1857 } | |
| 1858 | |
| 1859 | |
| 1860 void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) { | 1944 void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) { |
| 1861 DCHECK(current->HasRegisterAssigned()); | 1945 DCHECK(current->HasRegisterAssigned()); |
| 1862 int reg = current->assigned_register(); | 1946 int reg = current->assigned_register(); |
| 1863 auto split_pos = current->Start(); | 1947 auto split_pos = current->Start(); |
| 1864 for (size_t i = 0; i < active_live_ranges().size(); ++i) { | 1948 for (size_t i = 0; i < active_live_ranges().size(); ++i) { |
| 1865 auto range = active_live_ranges()[i]; | 1949 auto range = active_live_ranges()[i]; |
| 1866 if (range->assigned_register() == reg) { | 1950 if (range->assigned_register() == reg) { |
| 1867 auto next_pos = range->NextRegisterPosition(current->Start()); | 1951 auto next_pos = range->NextRegisterPosition(current->Start()); |
| 1868 auto spill_pos = FindOptimalSpillingPos(range, split_pos); | 1952 auto spill_pos = FindOptimalSpillingPos(range, split_pos); |
| 1869 if (next_pos == nullptr) { | 1953 if (next_pos == nullptr) { |
| (...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1984 bool merged = first_op_spill->TryMerge(spill_range); | 2068 bool merged = first_op_spill->TryMerge(spill_range); |
| 1985 CHECK(merged); | 2069 CHECK(merged); |
| 1986 SpillBetween(range, range->Start(), pos->pos()); | 2070 SpillBetween(range, range->Start(), pos->pos()); |
| 1987 DCHECK(UnhandledIsSorted()); | 2071 DCHECK(UnhandledIsSorted()); |
| 1988 return true; | 2072 return true; |
| 1989 } | 2073 } |
| 1990 return false; | 2074 return false; |
| 1991 } | 2075 } |
| 1992 | 2076 |
| 1993 | 2077 |
| 1994 LiveRange* LinearScanAllocator::SplitRangeAt(LiveRange* range, | |
| 1995 LifetimePosition pos) { | |
| 1996 DCHECK(!range->IsFixed()); | |
| 1997 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value()); | |
| 1998 | |
| 1999 if (pos.Value() <= range->Start().Value()) return range; | |
| 2000 | |
| 2001 // We can't properly connect liveranges if splitting occurred at the end | |
| 2002 // a block. | |
| 2003 DCHECK(pos.IsStart() || pos.IsGapPosition() || | |
| 2004 (GetInstructionBlock(code(), pos)->last_instruction_index() != | |
| 2005 pos.ToInstructionIndex())); | |
| 2006 | |
| 2007 int vreg = code()->NextVirtualRegister(); | |
| 2008 auto result = LiveRangeFor(vreg); | |
| 2009 range->SplitAt(pos, result, allocation_zone()); | |
| 2010 return result; | |
| 2011 } | |
| 2012 | |
| 2013 | |
| 2014 LiveRange* LinearScanAllocator::SplitBetween(LiveRange* range, | |
| 2015 LifetimePosition start, | |
| 2016 LifetimePosition end) { | |
| 2017 DCHECK(!range->IsFixed()); | |
| 2018 TRACE("Splitting live range %d in position between [%d, %d]\n", range->id(), | |
| 2019 start.Value(), end.Value()); | |
| 2020 | |
| 2021 auto split_pos = FindOptimalSplitPos(start, end); | |
| 2022 DCHECK(split_pos.Value() >= start.Value()); | |
| 2023 return SplitRangeAt(range, split_pos); | |
| 2024 } | |
| 2025 | |
| 2026 | |
| 2027 LifetimePosition LinearScanAllocator::FindOptimalSplitPos( | |
| 2028 LifetimePosition start, LifetimePosition end) { | |
| 2029 int start_instr = start.ToInstructionIndex(); | |
| 2030 int end_instr = end.ToInstructionIndex(); | |
| 2031 DCHECK(start_instr <= end_instr); | |
| 2032 | |
| 2033 // We have no choice | |
| 2034 if (start_instr == end_instr) return end; | |
| 2035 | |
| 2036 auto start_block = GetInstructionBlock(code(), start); | |
| 2037 auto end_block = GetInstructionBlock(code(), end); | |
| 2038 | |
| 2039 if (end_block == start_block) { | |
| 2040 // The interval is split in the same basic block. Split at the latest | |
| 2041 // possible position. | |
| 2042 return end; | |
| 2043 } | |
| 2044 | |
| 2045 auto block = end_block; | |
| 2046 // Find header of outermost loop. | |
| 2047 // TODO(titzer): fix redundancy below. | |
| 2048 while (GetContainingLoop(code(), block) != nullptr && | |
| 2049 GetContainingLoop(code(), block)->rpo_number().ToInt() > | |
| 2050 start_block->rpo_number().ToInt()) { | |
| 2051 block = GetContainingLoop(code(), block); | |
| 2052 } | |
| 2053 | |
| 2054 // We did not find any suitable outer loop. Split at the latest possible | |
| 2055 // position unless end_block is a loop header itself. | |
| 2056 if (block == end_block && !end_block->IsLoopHeader()) return end; | |
| 2057 | |
| 2058 return LifetimePosition::GapFromInstructionIndex( | |
| 2059 block->first_instruction_index()); | |
| 2060 } | |
| 2061 | |
| 2062 | |
| 2063 void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { | 2078 void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { |
| 2064 auto second_part = SplitRangeAt(range, pos); | 2079 auto second_part = SplitRangeAt(range, pos); |
| 2065 Spill(second_part); | 2080 Spill(second_part); |
| 2066 } | 2081 } |
| 2067 | 2082 |
| 2068 | 2083 |
| 2069 void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start, | 2084 void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start, |
| 2070 LifetimePosition end) { | 2085 LifetimePosition end) { |
| 2071 SpillBetweenUntil(range, start, start, end); | 2086 SpillBetweenUntil(range, start, start, end); |
| 2072 } | 2087 } |
| (...skipping 22 matching lines...) Expand all Loading... |
| 2095 Spill(second_part); | 2110 Spill(second_part); |
| 2096 AddToUnhandledSorted(third_part); | 2111 AddToUnhandledSorted(third_part); |
| 2097 } else { | 2112 } else { |
| 2098 // The split result does not intersect with [start, end[. | 2113 // The split result does not intersect with [start, end[. |
| 2099 // Nothing to spill. Just put it to unhandled as whole. | 2114 // Nothing to spill. Just put it to unhandled as whole. |
| 2100 AddToUnhandledSorted(second_part); | 2115 AddToUnhandledSorted(second_part); |
| 2101 } | 2116 } |
| 2102 } | 2117 } |
| 2103 | 2118 |
| 2104 | 2119 |
| 2105 void LinearScanAllocator::Spill(LiveRange* range) { | |
| 2106 DCHECK(!range->IsSpilled()); | |
| 2107 TRACE("Spilling live range %d\n", range->id()); | |
| 2108 auto first = range->TopLevel(); | |
| 2109 if (first->HasNoSpillType()) { | |
| 2110 data()->AssignSpillRangeToLiveRange(first); | |
| 2111 } | |
| 2112 range->MakeSpilled(); | |
| 2113 } | |
| 2114 | |
| 2115 | |
| 2116 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {} | 2120 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {} |
| 2117 | 2121 |
| 2118 | 2122 |
| 2119 void OperandAssigner::AssignSpillSlots() { | 2123 void OperandAssigner::AssignSpillSlots() { |
| 2120 auto& spill_ranges = data()->spill_ranges(); | 2124 auto& spill_ranges = data()->spill_ranges(); |
| 2121 // Merge disjoint spill ranges | 2125 // Merge disjoint spill ranges |
| 2122 for (size_t i = 0; i < spill_ranges.size(); i++) { | 2126 for (size_t i = 0; i < spill_ranges.size(); i++) { |
| 2123 auto range = spill_ranges[i]; | 2127 auto range = spill_ranges[i]; |
| 2124 if (range->IsEmpty()) continue; | 2128 if (range->IsEmpty()) continue; |
| 2125 for (size_t j = i + 1; j < spill_ranges.size(); j++) { | 2129 for (size_t j = i + 1; j < spill_ranges.size(); j++) { |
| (...skipping 396 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2522 auto move = new (code_zone()) MoveOperands(it->first.second, it->second); | 2526 auto move = new (code_zone()) MoveOperands(it->first.second, it->second); |
| 2523 auto eliminate = moves->PrepareInsertAfter(move); | 2527 auto eliminate = moves->PrepareInsertAfter(move); |
| 2524 to_insert.push_back(move); | 2528 to_insert.push_back(move); |
| 2525 if (eliminate != nullptr) to_eliminate.push_back(eliminate); | 2529 if (eliminate != nullptr) to_eliminate.push_back(eliminate); |
| 2526 } | 2530 } |
| 2527 } | 2531 } |
| 2528 | 2532 |
| 2529 } // namespace compiler | 2533 } // namespace compiler |
| 2530 } // namespace internal | 2534 } // namespace internal |
| 2531 } // namespace v8 | 2535 } // namespace v8 |
| OLD | NEW |