Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/linkage.h" | 5 #include "src/compiler/linkage.h" |
| 6 #include "src/compiler/register-allocator.h" | 6 #include "src/compiler/register-allocator.h" |
| 7 #include "src/string-stream.h" | 7 #include "src/string-stream.h" |
| 8 | 8 |
| 9 namespace v8 { | 9 namespace v8 { |
| 10 namespace internal { | 10 namespace internal { |
| 11 namespace compiler { | 11 namespace compiler { |
| 12 | 12 |
| 13 const std::pair<int, int> | |
|
dcarney
2015/04/17 07:22:45
this should go under the #define
also, does this c
Mircea Trofin
2015/04/17 19:23:33
This is a constexpr, so it is evaluated at compile
| |
| 14 RegisterAllocatorGreedy::RegisterAllocationInfo::Config::kNoKey = | |
| 15 std::make_pair(0, 0); | |
| 16 | |
| 13 #define TRACE(...) \ | 17 #define TRACE(...) \ |
| 14 do { \ | 18 do { \ |
| 15 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ | 19 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ |
| 16 } while (false) | 20 } while (false) |
| 17 | 21 |
| 18 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { | 22 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { |
| 19 return a.Value() < b.Value() ? a : b; | 23 return a.Value() < b.Value() ? a : b; |
| 20 } | 24 } |
| 21 | 25 |
| 22 | 26 |
| (...skipping 551 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 574 RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, | 578 RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, |
| 575 Zone* zone, Frame* frame, | 579 Zone* zone, Frame* frame, |
| 576 InstructionSequence* code, | 580 InstructionSequence* code, |
| 577 const char* debug_name) | 581 const char* debug_name) |
| 578 : local_zone_(zone), | 582 : local_zone_(zone), |
| 579 frame_(frame), | 583 frame_(frame), |
| 580 code_(code), | 584 code_(code), |
| 581 debug_name_(debug_name), | 585 debug_name_(debug_name), |
| 582 config_(config), | 586 config_(config), |
| 583 phi_map_(local_zone()), | 587 phi_map_(local_zone()), |
| 584 live_in_sets_(code->InstructionBlockCount(), nullptr, local_zone()), | |
| 585 live_ranges_(code->VirtualRegisterCount() * 2, nullptr, local_zone()), | 588 live_ranges_(code->VirtualRegisterCount() * 2, nullptr, local_zone()), |
| 586 fixed_live_ranges_(this->config()->num_general_registers(), nullptr, | 589 fixed_live_ranges_(this->config()->num_general_registers(), nullptr, |
| 587 local_zone()), | 590 local_zone()), |
| 588 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr, | 591 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr, |
| 589 local_zone()), | 592 local_zone()), |
| 593 spill_ranges_(local_zone()), | |
| 594 live_in_sets_(code->InstructionBlockCount(), nullptr, local_zone()) { | |
| 595 spill_ranges().reserve(8); | |
| 596 assigned_registers_ = | |
| 597 new (code_zone()) BitVector(config->num_general_registers(), code_zone()); | |
| 598 assigned_double_registers_ = new (code_zone()) | |
| 599 BitVector(config->num_aliased_double_registers(), code_zone()); | |
| 600 frame->SetAllocatedRegisters(assigned_registers_); | |
| 601 frame->SetAllocatedDoubleRegisters(assigned_double_registers_); | |
| 602 } | |
| 603 | |
| 604 RegisterAllocatorLinear::RegisterAllocatorLinear( | |
| 605 const RegisterConfiguration* config, Zone* zone, Frame* frame, | |
| 606 InstructionSequence* code, const char* debug_name) | |
| 607 : RegisterAllocator(config, zone, frame, code, debug_name), | |
| 590 unhandled_live_ranges_(local_zone()), | 608 unhandled_live_ranges_(local_zone()), |
| 591 active_live_ranges_(local_zone()), | 609 active_live_ranges_(local_zone()), |
| 592 inactive_live_ranges_(local_zone()), | 610 inactive_live_ranges_(local_zone()), |
| 593 spill_ranges_(local_zone()), | |
| 594 mode_(UNALLOCATED_REGISTERS), | 611 mode_(UNALLOCATED_REGISTERS), |
| 595 num_registers_(-1) { | 612 num_registers_(-1) { |
| 596 DCHECK(this->config()->num_general_registers() <= | 613 DCHECK(this->config()->num_general_registers() <= |
| 597 RegisterConfiguration::kMaxGeneralRegisters); | 614 RegisterConfiguration::kMaxGeneralRegisters); |
| 598 DCHECK(this->config()->num_double_registers() <= | 615 DCHECK(this->config()->num_double_registers() <= |
| 599 RegisterConfiguration::kMaxDoubleRegisters); | 616 RegisterConfiguration::kMaxDoubleRegisters); |
| 600 // TryAllocateFreeReg and AllocateBlockedReg assume this | 617 // TryAllocateFreeReg and AllocateBlockedReg assume this |
| 601 // when allocating local arrays. | 618 // when allocating local arrays. |
| 602 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= | 619 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= |
| 603 this->config()->num_general_registers()); | 620 this->config()->num_general_registers()); |
| 604 unhandled_live_ranges().reserve( | 621 unhandled_live_ranges().reserve( |
| 605 static_cast<size_t>(code->VirtualRegisterCount() * 2)); | 622 static_cast<size_t>(code->VirtualRegisterCount() * 2)); |
| 606 active_live_ranges().reserve(8); | 623 active_live_ranges().reserve(8); |
| 607 inactive_live_ranges().reserve(8); | 624 inactive_live_ranges().reserve(8); |
| 608 spill_ranges().reserve(8); | |
| 609 assigned_registers_ = | |
| 610 new (code_zone()) BitVector(config->num_general_registers(), code_zone()); | |
| 611 assigned_double_registers_ = new (code_zone()) | |
| 612 BitVector(config->num_aliased_double_registers(), code_zone()); | |
| 613 frame->SetAllocatedRegisters(assigned_registers_); | |
| 614 frame->SetAllocatedDoubleRegisters(assigned_double_registers_); | |
| 615 } | 625 } |
| 616 | 626 |
| 617 | 627 |
| 618 BitVector* RegisterAllocator::ComputeLiveOut(const InstructionBlock* block) { | 628 BitVector* RegisterAllocator::ComputeLiveOut(const InstructionBlock* block) { |
| 619 // Compute live out for the given block, except not including backward | 629 // Compute live out for the given block, except not including backward |
| 620 // successor edges. | 630 // successor edges. |
| 621 auto live_out = new (local_zone()) | 631 auto live_out = new (local_zone()) |
| 622 BitVector(code()->VirtualRegisterCount(), local_zone()); | 632 BitVector(code()->VirtualRegisterCount(), local_zone()); |
| 623 | 633 |
| 624 // Process all successor blocks. | 634 // Process all successor blocks. |
| (...skipping 341 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 966 } | 976 } |
| 967 | 977 |
| 968 | 978 |
| 969 SpillRange* RegisterAllocator::AssignSpillRangeToLiveRange(LiveRange* range) { | 979 SpillRange* RegisterAllocator::AssignSpillRangeToLiveRange(LiveRange* range) { |
| 970 auto spill_range = new (local_zone()) SpillRange(range, local_zone()); | 980 auto spill_range = new (local_zone()) SpillRange(range, local_zone()); |
| 971 spill_ranges().push_back(spill_range); | 981 spill_ranges().push_back(spill_range); |
| 972 return spill_range; | 982 return spill_range; |
| 973 } | 983 } |
| 974 | 984 |
| 975 | 985 |
| 976 bool RegisterAllocator::TryReuseSpillForPhi(LiveRange* range) { | 986 bool RegisterAllocatorLinear::TryReuseSpillForPhi(LiveRange* range) { |
| 977 if (range->IsChild() || !range->is_phi()) return false; | 987 if (range->IsChild() || !range->is_phi()) return false; |
| 978 DCHECK(!range->HasSpillOperand()); | 988 DCHECK(!range->HasSpillOperand()); |
| 979 | 989 |
| 980 auto lookup = phi_map_.find(range->id()); | 990 auto lookup = phi_map().find(range->id()); |
| 981 DCHECK(lookup != phi_map_.end()); | 991 DCHECK(lookup != phi_map().end()); |
| 982 auto phi = lookup->second->phi; | 992 auto phi = lookup->second->phi; |
| 983 auto block = lookup->second->block; | 993 auto block = lookup->second->block; |
| 984 // Count the number of spilled operands. | 994 // Count the number of spilled operands. |
| 985 size_t spilled_count = 0; | 995 size_t spilled_count = 0; |
| 986 LiveRange* first_op = nullptr; | 996 LiveRange* first_op = nullptr; |
| 987 for (size_t i = 0; i < phi->operands().size(); i++) { | 997 for (size_t i = 0; i < phi->operands().size(); i++) { |
| 988 int op = phi->operands()[i]; | 998 int op = phi->operands()[i]; |
| 989 LiveRange* op_range = LiveRangeFor(op); | 999 LiveRange* op_range = LiveRangeFor(op); |
| 990 if (op_range->GetSpillRange() == nullptr) continue; | 1000 if (op_range->GetSpillRange() == nullptr) continue; |
| 991 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); | 1001 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); |
| (...skipping 806 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1798 bool RegisterAllocator::SafePointsAreInOrder() const { | 1808 bool RegisterAllocator::SafePointsAreInOrder() const { |
| 1799 int safe_point = 0; | 1809 int safe_point = 0; |
| 1800 for (auto map : *code()->reference_maps()) { | 1810 for (auto map : *code()->reference_maps()) { |
| 1801 if (safe_point > map->instruction_position()) return false; | 1811 if (safe_point > map->instruction_position()) return false; |
| 1802 safe_point = map->instruction_position(); | 1812 safe_point = map->instruction_position(); |
| 1803 } | 1813 } |
| 1804 return true; | 1814 return true; |
| 1805 } | 1815 } |
| 1806 | 1816 |
| 1807 | 1817 |
| 1818 unsigned RegisterAllocatorGreedy::GetLiveRangeSize(LiveRange* range) { | |
| 1819 auto interval = range->first_interval(); | |
| 1820 if (!interval) return 0; | |
| 1821 | |
| 1822 unsigned size = 0; | |
| 1823 while (interval) { | |
| 1824 size += (interval->end().Value() - interval->start().Value()); | |
| 1825 interval = interval->next(); | |
| 1826 } | |
| 1827 | |
| 1828 DCHECK(size); | |
| 1829 return size; | |
| 1830 } | |
| 1831 | |
| 1832 | |
| 1833 void RegisterAllocatorGreedy::Enqueue(LiveRange* range) { | |
| 1834 if (!range || range->IsEmpty()) return; | |
| 1835 unsigned size = GetLiveRangeSize(range); | |
| 1836 queue_.push(std::make_pair(size, range)); | |
| 1837 } | |
| 1838 | |
| 1839 | |
| 1840 // TODO(mtrofin): consolidate with identical code segment in | |
| 1841 // RegisterAllocatorLinear::AllocateRegisters | |
| 1842 bool RegisterAllocatorGreedy::HandleSpillOperands(LiveRange* range) { | |
| 1843 auto position = range->Start(); | |
| 1844 TRACE("Processing interval %d start=%d\n", range->id(), position.Value()); | |
| 1845 | |
| 1846 if (!range->HasNoSpillType()) { | |
| 1847 TRACE("Live range %d already has a spill operand\n", range->id()); | |
| 1848 auto next_pos = position; | |
| 1849 if (next_pos.IsGapPosition()) { | |
| 1850 next_pos = next_pos.NextStart(); | |
| 1851 } | |
| 1852 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); | |
| 1853 // If the range already has a spill operand and it doesn't need a | |
| 1854 // register immediately, split it and spill the first part of the range. | |
| 1855 if (pos == nullptr) { | |
| 1856 Spill(range); | |
| 1857 return true; | |
| 1858 } else if (pos->pos().Value() > range->Start().NextStart().Value()) { | |
| 1859 // Do not spill live range eagerly if use position that can benefit from | |
| 1860 // the register is too close to the start of live range. | |
| 1861 auto* reminder = SpillBetweenUntil(range, position, position, pos->pos()); | |
| 1862 Enqueue(reminder); | |
| 1863 return true; | |
| 1864 } | |
| 1865 } | |
| 1866 return true; | |
| 1867 // TODO(mtrofin): Do we need this? | |
| 1868 // return (TryReuseSpillForPhi(range)); | |
| 1869 } | |
| 1870 | |
| 1871 | |
| 1872 void RegisterAllocatorGreedy::AllocateRegisters(RegisterKind mode) { | |
| 1873 // TODO(mtrofin): support for double registers | |
| 1874 DCHECK(mode == GENERAL_REGISTERS); | |
| 1875 | |
| 1876 for (auto range : live_ranges()) { | |
| 1877 if (range == nullptr) continue; | |
| 1878 if (range->Kind() == mode) { | |
| 1879 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); | |
| 1880 TRACE("Enqueueing live range %d to priority queue \n", range->id()); | |
| 1881 Enqueue(range); | |
| 1882 } | |
| 1883 } | |
| 1884 | |
| 1885 for (int i = 0; i < config()->kMaxGeneralRegisters; i++) { | |
| 1886 allocations_[i] = new RegisterAllocationInfo(local_zone()); | |
|
dcarney
2015/04/17 07:22:45
this must be zone allocated - in fact pretty much
Mircea Trofin
2015/04/17 19:23:33
Done.
| |
| 1887 } | |
| 1888 | |
| 1889 for (auto* current : fixed_live_ranges()) { | |
| 1890 if (current != nullptr) { | |
| 1891 int regNr = current->assigned_register(); | |
| 1892 CHECK((allocations_[regNr]->insert(current))); | |
| 1893 } | |
| 1894 } | |
| 1895 | |
| 1896 while (!queue_.empty()) { | |
| 1897 auto currentPair = queue_.top(); | |
| 1898 queue_.pop(); | |
| 1899 auto current = currentPair.second; | |
| 1900 if (HandleSpillOperands(current)) continue; | |
| 1901 ZoneSet<LiveRange*> conflicting(local_zone()); | |
| 1902 if (!TryAllocate(current, conflicting)) { | |
| 1903 DCHECK(conflicting.size()); | |
| 1904 float thisMax = CalculateSpillWeight(current); | |
| 1905 float maxConflicting = CalculateMaxSpillWeight(conflicting); | |
| 1906 if (maxConflicting < thisMax) { | |
| 1907 for (auto* conflict : conflicting) { | |
| 1908 Evict(conflict); | |
| 1909 Enqueue(conflict); | |
| 1910 } | |
| 1911 conflicting.clear(); | |
| 1912 CHECK(TryAllocate(current, conflicting)); | |
| 1913 DCHECK(conflicting.size() == 0); | |
| 1914 } else { | |
| 1915 DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start())); | |
| 1916 CHECK(AllocateBlockedRange(current, conflicting)); | |
| 1917 } | |
| 1918 } | |
| 1919 } | |
| 1920 for (unsigned i = 0; i < allocations_.size(); i++) { | |
| 1921 if (!allocations_[i]->isEmpty()) { | |
| 1922 assigned_registers()->Add(i); | |
| 1923 } | |
| 1924 } | |
| 1925 } | |
| 1926 | |
| 1927 | |
| 1928 bool RegisterAllocatorGreedy::AllocateBlockedRange( | |
| 1929 LiveRange* current, ZoneSet<LiveRange*>& conflicts) { | |
| 1930 auto register_use = current->NextRegisterPosition(current->Start()); | |
| 1931 if (register_use == nullptr) { | |
| 1932 // There is no use in the current live range that requires a register. | |
| 1933 // We can just spill it. | |
| 1934 Spill(current); | |
| 1935 return true; | |
| 1936 } | |
| 1937 | |
| 1938 auto second_part = SplitRangeAt(current, register_use->pos()); | |
| 1939 Spill(second_part); | |
| 1940 | |
| 1941 return true; | |
| 1942 } | |
| 1943 | |
| 1944 | |
| 1808 void RegisterAllocator::PopulateReferenceMaps() { | 1945 void RegisterAllocator::PopulateReferenceMaps() { |
| 1809 DCHECK(SafePointsAreInOrder()); | 1946 DCHECK(SafePointsAreInOrder()); |
| 1810 | 1947 |
| 1811 // Iterate over all safe point positions and record a pointer | 1948 // Iterate over all safe point positions and record a pointer |
| 1812 // for all spilled live ranges at this point. | 1949 // for all spilled live ranges at this point. |
| 1813 int last_range_start = 0; | 1950 int last_range_start = 0; |
| 1814 auto reference_maps = code()->reference_maps(); | 1951 auto reference_maps = code()->reference_maps(); |
| 1815 ReferenceMapDeque::const_iterator first_it = reference_maps->begin(); | 1952 ReferenceMapDeque::const_iterator first_it = reference_maps->begin(); |
| 1816 for (LiveRange* range : live_ranges()) { | 1953 for (LiveRange* range : live_ranges()) { |
| 1817 if (range == nullptr) continue; | 1954 if (range == nullptr) continue; |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1879 cur->id(), cur->Start().Value(), safe_point); | 2016 cur->id(), cur->Start().Value(), safe_point); |
| 1880 auto operand = cur->GetAssignedOperand(); | 2017 auto operand = cur->GetAssignedOperand(); |
| 1881 DCHECK(!operand.IsStackSlot()); | 2018 DCHECK(!operand.IsStackSlot()); |
| 1882 map->RecordReference(operand); | 2019 map->RecordReference(operand); |
| 1883 } | 2020 } |
| 1884 } | 2021 } |
| 1885 } | 2022 } |
| 1886 } | 2023 } |
| 1887 | 2024 |
| 1888 | 2025 |
| 1889 void RegisterAllocator::AllocateGeneralRegisters() { | 2026 void RegisterAllocatorLinear::AllocateGeneralRegisters() { |
| 1890 num_registers_ = config()->num_general_registers(); | 2027 num_registers_ = config()->num_general_registers(); |
| 1891 mode_ = GENERAL_REGISTERS; | 2028 mode_ = GENERAL_REGISTERS; |
| 1892 AllocateRegisters(); | 2029 AllocateRegisters(); |
| 1893 } | 2030 } |
| 1894 | 2031 |
| 1895 | 2032 |
| 1896 void RegisterAllocator::AllocateDoubleRegisters() { | 2033 void RegisterAllocatorLinear::AllocateDoubleRegisters() { |
| 1897 num_registers_ = config()->num_aliased_double_registers(); | 2034 num_registers_ = config()->num_aliased_double_registers(); |
| 1898 mode_ = DOUBLE_REGISTERS; | 2035 mode_ = DOUBLE_REGISTERS; |
| 1899 AllocateRegisters(); | 2036 AllocateRegisters(); |
| 1900 } | 2037 } |
| 1901 | 2038 |
| 1902 | 2039 |
| 1903 void RegisterAllocator::AllocateRegisters() { | 2040 void RegisterAllocatorLinear::AllocateRegisters() { |
| 1904 DCHECK(unhandled_live_ranges().empty()); | 2041 DCHECK(unhandled_live_ranges().empty()); |
| 1905 | 2042 |
| 1906 for (auto range : live_ranges()) { | 2043 for (auto range : live_ranges()) { |
| 1907 if (range == nullptr) continue; | 2044 if (range == nullptr) continue; |
| 1908 if (range->Kind() == mode_) { | 2045 if (range->Kind() == mode_) { |
| 1909 AddToUnhandledUnsorted(range); | 2046 AddToUnhandledUnsorted(range); |
| 1910 } | 2047 } |
| 1911 } | 2048 } |
| 1912 SortUnhandled(); | 2049 SortUnhandled(); |
| 1913 DCHECK(UnhandledIsSorted()); | 2050 DCHECK(UnhandledIsSorted()); |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1994 if (current->HasRegisterAssigned()) { | 2131 if (current->HasRegisterAssigned()) { |
| 1995 AddToActive(current); | 2132 AddToActive(current); |
| 1996 } | 2133 } |
| 1997 } | 2134 } |
| 1998 | 2135 |
| 1999 active_live_ranges().clear(); | 2136 active_live_ranges().clear(); |
| 2000 inactive_live_ranges().clear(); | 2137 inactive_live_ranges().clear(); |
| 2001 } | 2138 } |
| 2002 | 2139 |
| 2003 | 2140 |
| 2004 const char* RegisterAllocator::RegisterName(int allocation_index) { | 2141 const char* RegisterAllocatorLinear::RegisterName(int allocation_index) { |
| 2005 if (mode_ == GENERAL_REGISTERS) { | 2142 if (mode_ == GENERAL_REGISTERS) { |
| 2006 return config()->general_register_name(allocation_index); | 2143 return config()->general_register_name(allocation_index); |
| 2007 } else { | 2144 } else { |
| 2008 return config()->double_register_name(allocation_index); | 2145 return config()->double_register_name(allocation_index); |
| 2009 } | 2146 } |
| 2010 } | 2147 } |
| 2011 | 2148 |
| 2012 | 2149 |
| 2013 bool RegisterAllocator::HasTaggedValue(int virtual_register) const { | 2150 bool RegisterAllocator::HasTaggedValue(int virtual_register) const { |
| 2014 return code()->IsReference(virtual_register); | 2151 return code()->IsReference(virtual_register); |
| 2015 } | 2152 } |
| 2016 | 2153 |
| 2017 | 2154 |
| 2018 RegisterKind RegisterAllocator::RequiredRegisterKind( | 2155 RegisterKind RegisterAllocator::RequiredRegisterKind( |
| 2019 int virtual_register) const { | 2156 int virtual_register) const { |
| 2020 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS | 2157 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS |
| 2021 : GENERAL_REGISTERS; | 2158 : GENERAL_REGISTERS; |
| 2022 } | 2159 } |
| 2023 | 2160 |
| 2024 | 2161 |
| 2025 void RegisterAllocator::AddToActive(LiveRange* range) { | 2162 void RegisterAllocatorLinear::AddToActive(LiveRange* range) { |
| 2026 TRACE("Add live range %d to active\n", range->id()); | 2163 TRACE("Add live range %d to active\n", range->id()); |
| 2027 active_live_ranges().push_back(range); | 2164 active_live_ranges().push_back(range); |
| 2028 } | 2165 } |
| 2029 | 2166 |
| 2030 | 2167 |
| 2031 void RegisterAllocator::AddToInactive(LiveRange* range) { | 2168 void RegisterAllocatorLinear::AddToInactive(LiveRange* range) { |
| 2032 TRACE("Add live range %d to inactive\n", range->id()); | 2169 TRACE("Add live range %d to inactive\n", range->id()); |
| 2033 inactive_live_ranges().push_back(range); | 2170 inactive_live_ranges().push_back(range); |
| 2034 } | 2171 } |
| 2035 | 2172 |
| 2036 | 2173 |
| 2037 void RegisterAllocator::AddToUnhandledSorted(LiveRange* range) { | 2174 void RegisterAllocatorLinear::AddToUnhandledSorted(LiveRange* range) { |
| 2038 if (range == nullptr || range->IsEmpty()) return; | 2175 if (range == nullptr || range->IsEmpty()) return; |
| 2039 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); | 2176 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); |
| 2040 DCHECK(allocation_finger_.Value() <= range->Start().Value()); | 2177 DCHECK(allocation_finger_.Value() <= range->Start().Value()); |
| 2041 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0; | 2178 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0; |
| 2042 --i) { | 2179 --i) { |
| 2043 auto cur_range = unhandled_live_ranges().at(i); | 2180 auto cur_range = unhandled_live_ranges().at(i); |
| 2044 if (!range->ShouldBeAllocatedBefore(cur_range)) continue; | 2181 if (!range->ShouldBeAllocatedBefore(cur_range)) continue; |
| 2045 TRACE("Add live range %d to unhandled at %d\n", range->id(), i + 1); | 2182 TRACE("Add live range %d to unhandled at %d\n", range->id(), i + 1); |
| 2046 auto it = unhandled_live_ranges().begin() + (i + 1); | 2183 auto it = unhandled_live_ranges().begin() + (i + 1); |
| 2047 unhandled_live_ranges().insert(it, range); | 2184 unhandled_live_ranges().insert(it, range); |
| 2048 DCHECK(UnhandledIsSorted()); | 2185 DCHECK(UnhandledIsSorted()); |
| 2049 return; | 2186 return; |
| 2050 } | 2187 } |
| 2051 TRACE("Add live range %d to unhandled at start\n", range->id()); | 2188 TRACE("Add live range %d to unhandled at start\n", range->id()); |
| 2052 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range); | 2189 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range); |
| 2053 DCHECK(UnhandledIsSorted()); | 2190 DCHECK(UnhandledIsSorted()); |
| 2054 } | 2191 } |
| 2055 | 2192 |
| 2056 | 2193 |
| 2057 void RegisterAllocator::AddToUnhandledUnsorted(LiveRange* range) { | 2194 void RegisterAllocatorLinear::AddToUnhandledUnsorted(LiveRange* range) { |
| 2058 if (range == nullptr || range->IsEmpty()) return; | 2195 if (range == nullptr || range->IsEmpty()) return; |
| 2059 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); | 2196 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); |
| 2060 TRACE("Add live range %d to unhandled unsorted at end\n", range->id()); | 2197 TRACE("Add live range %d to unhandled unsorted at end\n", range->id()); |
| 2061 unhandled_live_ranges().push_back(range); | 2198 unhandled_live_ranges().push_back(range); |
| 2062 } | 2199 } |
| 2063 | 2200 |
| 2064 | 2201 |
| 2065 static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) { | 2202 static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) { |
| 2066 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a)); | 2203 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a)); |
| 2067 if (a->ShouldBeAllocatedBefore(b)) return false; | 2204 if (a->ShouldBeAllocatedBefore(b)) return false; |
| 2068 if (b->ShouldBeAllocatedBefore(a)) return true; | 2205 if (b->ShouldBeAllocatedBefore(a)) return true; |
| 2069 return a->id() < b->id(); | 2206 return a->id() < b->id(); |
| 2070 } | 2207 } |
| 2071 | 2208 |
| 2072 | 2209 |
| 2073 // Sort the unhandled live ranges so that the ranges to be processed first are | 2210 // Sort the unhandled live ranges so that the ranges to be processed first are |
| 2074 // at the end of the array list. This is convenient for the register allocation | 2211 // at the end of the array list. This is convenient for the register allocation |
| 2075 // algorithm because it is efficient to remove elements from the end. | 2212 // algorithm because it is efficient to remove elements from the end. |
| 2076 void RegisterAllocator::SortUnhandled() { | 2213 void RegisterAllocatorLinear::SortUnhandled() { |
| 2077 TRACE("Sort unhandled\n"); | 2214 TRACE("Sort unhandled\n"); |
| 2078 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(), | 2215 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(), |
| 2079 &UnhandledSortHelper); | 2216 &UnhandledSortHelper); |
| 2080 } | 2217 } |
| 2081 | 2218 |
| 2082 | 2219 |
| 2083 bool RegisterAllocator::UnhandledIsSorted() { | 2220 bool RegisterAllocatorLinear::UnhandledIsSorted() { |
| 2084 size_t len = unhandled_live_ranges().size(); | 2221 size_t len = unhandled_live_ranges().size(); |
| 2085 for (size_t i = 1; i < len; i++) { | 2222 for (size_t i = 1; i < len; i++) { |
| 2086 auto a = unhandled_live_ranges().at(i - 1); | 2223 auto a = unhandled_live_ranges().at(i - 1); |
| 2087 auto b = unhandled_live_ranges().at(i); | 2224 auto b = unhandled_live_ranges().at(i); |
| 2088 if (a->Start().Value() < b->Start().Value()) return false; | 2225 if (a->Start().Value() < b->Start().Value()) return false; |
| 2089 } | 2226 } |
| 2090 return true; | 2227 return true; |
| 2091 } | 2228 } |
| 2092 | 2229 |
| 2093 | 2230 |
| 2094 void RegisterAllocator::ActiveToHandled(LiveRange* range) { | 2231 void RegisterAllocatorLinear::ActiveToHandled(LiveRange* range) { |
| 2095 RemoveElement(&active_live_ranges(), range); | 2232 RemoveElement(&active_live_ranges(), range); |
| 2096 TRACE("Moving live range %d from active to handled\n", range->id()); | 2233 TRACE("Moving live range %d from active to handled\n", range->id()); |
| 2097 } | 2234 } |
| 2098 | 2235 |
| 2099 | 2236 |
| 2100 void RegisterAllocator::ActiveToInactive(LiveRange* range) { | 2237 void RegisterAllocatorLinear::ActiveToInactive(LiveRange* range) { |
| 2101 RemoveElement(&active_live_ranges(), range); | 2238 RemoveElement(&active_live_ranges(), range); |
| 2102 inactive_live_ranges().push_back(range); | 2239 inactive_live_ranges().push_back(range); |
| 2103 TRACE("Moving live range %d from active to inactive\n", range->id()); | 2240 TRACE("Moving live range %d from active to inactive\n", range->id()); |
| 2104 } | 2241 } |
| 2105 | 2242 |
| 2106 | 2243 |
| 2107 void RegisterAllocator::InactiveToHandled(LiveRange* range) { | 2244 void RegisterAllocatorLinear::InactiveToHandled(LiveRange* range) { |
| 2108 RemoveElement(&inactive_live_ranges(), range); | 2245 RemoveElement(&inactive_live_ranges(), range); |
| 2109 TRACE("Moving live range %d from inactive to handled\n", range->id()); | 2246 TRACE("Moving live range %d from inactive to handled\n", range->id()); |
| 2110 } | 2247 } |
| 2111 | 2248 |
| 2112 | 2249 |
| 2113 void RegisterAllocator::InactiveToActive(LiveRange* range) { | 2250 void RegisterAllocatorLinear::InactiveToActive(LiveRange* range) { |
| 2114 RemoveElement(&inactive_live_ranges(), range); | 2251 RemoveElement(&inactive_live_ranges(), range); |
| 2115 active_live_ranges().push_back(range); | 2252 active_live_ranges().push_back(range); |
| 2116 TRACE("Moving live range %d from inactive to active\n", range->id()); | 2253 TRACE("Moving live range %d from inactive to active\n", range->id()); |
| 2117 } | 2254 } |
| 2118 | 2255 |
| 2119 | 2256 |
| 2120 bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) { | 2257 RegisterAllocatorGreedy::RegisterAllocatorGreedy( |
| 2258 const RegisterConfiguration* config, Zone* local_zone, Frame* frame, | |
| 2259 InstructionSequence* code, const char* debug_name) | |
| 2260 : RegisterAllocator(config, local_zone, frame, code, debug_name), | |
| 2261 allocations_(local_zone) {} | |
| 2262 | |
| 2263 | |
| 2264 void RegisterAllocatorGreedy::AllocateGeneralRegisters() { | |
| 2265 AllocateRegisters(RegisterKind::GENERAL_REGISTERS); | |
| 2266 } | |
| 2267 | |
| 2268 | |
| 2269 void RegisterAllocatorGreedy::AllocateDoubleRegisters() { | |
| 2270 AllocateRegisters(RegisterKind::DOUBLE_REGISTERS); | |
| 2271 } | |
| 2272 | |
| 2273 | |
| 2274 void RegisterAllocatorGreedy::AssignRangeToRegister(unsigned regId, | |
| 2275 LiveRange* range) { | |
| 2276 allocations_[regId]->insert(range); | |
| 2277 if (range->HasRegisterAssigned()) { | |
| 2278 DCHECK((unsigned)(range->assigned_register()) == regId); | |
| 2279 return; | |
| 2280 } | |
| 2281 range->set_assigned_register(regId); | |
| 2282 } | |
| 2283 | |
| 2284 | |
| 2285 float RegisterAllocatorGreedy::CalculateSpillWeight(LiveRange* range) { | |
| 2286 if (range->IsFixed()) return std::numeric_limits<float>::max(); | |
| 2287 | |
| 2288 if (range->FirstHint() && range->FirstHint()->IsRegister()) | |
| 2289 return std::numeric_limits<float>::max(); | |
| 2290 | |
| 2291 unsigned useCount = 0; | |
| 2292 auto* pos = range->first_pos(); | |
| 2293 while (pos) { | |
|
dcarney
2015/04/17 07:22:45
while (pos != nullptr)
nullptr comparisons tend t
Mircea Trofin
2015/04/17 19:23:33
Done.
| |
| 2294 useCount++; | |
| 2295 pos = pos->next(); | |
| 2296 } | |
| 2297 | |
| 2298 // GetLiveRangeSize is DCHECK-ed to not be 0 | |
| 2299 return static_cast<float>(useCount) / | |
| 2300 static_cast<float>(GetLiveRangeSize(range)); | |
| 2301 } | |
| 2302 | |
| 2303 | |
| 2304 float RegisterAllocatorGreedy::CalculateMaxSpillWeight( | |
| 2305 ZoneSet<LiveRange*> ranges) { | |
| 2306 float max = 0.0; | |
| 2307 for (auto* r : ranges) { | |
| 2308 float localMax = CalculateSpillWeight(r); | |
| 2309 if (localMax > max) max = localMax; | |
| 2310 } | |
| 2311 return max; | |
| 2312 } | |
| 2313 | |
| 2314 | |
| 2315 void RegisterAllocatorGreedy::Evict(LiveRange* range) { | |
| 2316 allocations_[range->assigned_register()]->remove(range); | |
| 2317 } | |
| 2318 | |
| 2319 | |
| 2320 bool RegisterAllocatorGreedy::TryAllocatePhysicalRegister( | |
| 2321 unsigned regId, LiveRange* range, ZoneSet<LiveRange*>& conflicting) { | |
|
dcarney
2015/04/17 07:22:45
v8 would generally uses reg_id, not regId here
Mircea Trofin
2015/04/17 19:23:33
Done.
| |
| 2322 auto* segment = range->first_interval(); | |
| 2323 | |
| 2324 RegisterAllocationInfo* allocInfo = allocations_[regId]; | |
| 2325 while (segment) { | |
| 2326 LiveRange* existing; | |
| 2327 if (allocInfo->find(segment, existing)) { | |
| 2328 DCHECK(existing->HasRegisterAssigned()); | |
| 2329 conflicting.insert(existing); | |
| 2330 } | |
| 2331 segment = segment->next(); | |
| 2332 } | |
| 2333 if (conflicting.size()) return false; | |
|
dcarney
2015/04/17 07:22:45
we tend to make compare with 0 explicit, should an
Mircea Trofin
2015/04/17 19:23:33
Done.
| |
| 2334 // good to insert | |
| 2335 AssignRangeToRegister(regId, range); | |
| 2336 return true; | |
| 2337 } | |
| 2338 | |
| 2339 bool RegisterAllocatorGreedy::TryAllocate(LiveRange* current, | |
| 2340 ZoneSet<LiveRange*>& conflicting) { | |
| 2341 if (current->HasSpillOperand()) { | |
| 2342 Spill(current); | |
| 2343 return true; | |
| 2344 } | |
| 2345 if (current->IsFixed()) | |
|
dcarney
2015/04/17 07:22:45
need braces for conditions that are not on the sam
Mircea Trofin
2015/04/17 19:23:33
Done.
| |
| 2346 return TryAllocatePhysicalRegister(current->assigned_register(), current, | |
| 2347 conflicting); | |
| 2348 | |
| 2349 if (current->HasRegisterAssigned()) { | |
| 2350 int regId = current->assigned_register(); | |
| 2351 return TryAllocatePhysicalRegister(regId, current, conflicting); | |
| 2352 } | |
| 2353 | |
| 2354 for (unsigned candidateReg = 0; candidateReg < allocations_.size(); | |
|
dcarney
2015/04/17 07:22:45
candidate_reg
Mircea Trofin
2015/04/17 19:23:33
Done.
| |
| 2355 candidateReg++) { | |
| 2356 if (TryAllocatePhysicalRegister(candidateReg, current, conflicting)) { | |
| 2357 conflicting.clear(); | |
| 2358 return true; | |
| 2359 } | |
| 2360 } | |
| 2361 return false; | |
| 2362 } | |
| 2363 | |
| 2364 | |
| 2365 bool RegisterAllocatorLinear::TryAllocateFreeReg(LiveRange* current) { | |
| 2121 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2366 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
| 2122 | 2367 |
| 2123 for (int i = 0; i < num_registers_; i++) { | 2368 for (int i = 0; i < num_registers_; i++) { |
| 2124 free_until_pos[i] = LifetimePosition::MaxPosition(); | 2369 free_until_pos[i] = LifetimePosition::MaxPosition(); |
| 2125 } | 2370 } |
| 2126 | 2371 |
| 2127 for (auto cur_active : active_live_ranges()) { | 2372 for (auto cur_active : active_live_ranges()) { |
| 2128 free_until_pos[cur_active->assigned_register()] = | 2373 free_until_pos[cur_active->assigned_register()] = |
| 2129 LifetimePosition::GapFromInstructionIndex(0); | 2374 LifetimePosition::GapFromInstructionIndex(0); |
| 2130 } | 2375 } |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2179 // the range end. | 2424 // the range end. |
| 2180 DCHECK(pos.Value() >= current->End().Value()); | 2425 DCHECK(pos.Value() >= current->End().Value()); |
| 2181 TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg), | 2426 TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg), |
| 2182 current->id()); | 2427 current->id()); |
| 2183 SetLiveRangeAssignedRegister(current, reg); | 2428 SetLiveRangeAssignedRegister(current, reg); |
| 2184 | 2429 |
| 2185 return true; | 2430 return true; |
| 2186 } | 2431 } |
| 2187 | 2432 |
| 2188 | 2433 |
| 2189 void RegisterAllocator::AllocateBlockedReg(LiveRange* current) { | 2434 void RegisterAllocatorLinear::AllocateBlockedReg(LiveRange* current) { |
| 2190 auto register_use = current->NextRegisterPosition(current->Start()); | 2435 auto register_use = current->NextRegisterPosition(current->Start()); |
| 2191 if (register_use == nullptr) { | 2436 if (register_use == nullptr) { |
| 2192 // There is no use in the current live range that requires a register. | 2437 // There is no use in the current live range that requires a register. |
| 2193 // We can just spill it. | 2438 // We can just spill it. |
| 2194 Spill(current); | 2439 Spill(current); |
| 2195 return; | 2440 return; |
| 2196 } | 2441 } |
| 2197 | 2442 |
| 2198 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2443 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
| 2199 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2444 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2269 | 2514 |
| 2270 | 2515 |
| 2271 static const InstructionBlock* GetContainingLoop( | 2516 static const InstructionBlock* GetContainingLoop( |
| 2272 const InstructionSequence* sequence, const InstructionBlock* block) { | 2517 const InstructionSequence* sequence, const InstructionBlock* block) { |
| 2273 auto index = block->loop_header(); | 2518 auto index = block->loop_header(); |
| 2274 if (!index.IsValid()) return nullptr; | 2519 if (!index.IsValid()) return nullptr; |
| 2275 return sequence->InstructionBlockAt(index); | 2520 return sequence->InstructionBlockAt(index); |
| 2276 } | 2521 } |
| 2277 | 2522 |
| 2278 | 2523 |
| 2279 LifetimePosition RegisterAllocator::FindOptimalSpillingPos( | 2524 LifetimePosition RegisterAllocatorLinear::FindOptimalSpillingPos( |
| 2280 LiveRange* range, LifetimePosition pos) { | 2525 LiveRange* range, LifetimePosition pos) { |
| 2281 auto block = GetInstructionBlock(pos.Start()); | 2526 auto block = GetInstructionBlock(pos.Start()); |
| 2282 auto loop_header = | 2527 auto loop_header = |
| 2283 block->IsLoopHeader() ? block : GetContainingLoop(code(), block); | 2528 block->IsLoopHeader() ? block : GetContainingLoop(code(), block); |
| 2284 | 2529 |
| 2285 if (loop_header == nullptr) return pos; | 2530 if (loop_header == nullptr) return pos; |
| 2286 | 2531 |
| 2287 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); | 2532 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); |
| 2288 | 2533 |
| 2289 while (loop_header != nullptr) { | 2534 while (loop_header != nullptr) { |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 2301 } | 2546 } |
| 2302 | 2547 |
| 2303 // Try hoisting out to an outer loop. | 2548 // Try hoisting out to an outer loop. |
| 2304 loop_header = GetContainingLoop(code(), loop_header); | 2549 loop_header = GetContainingLoop(code(), loop_header); |
| 2305 } | 2550 } |
| 2306 | 2551 |
| 2307 return pos; | 2552 return pos; |
| 2308 } | 2553 } |
| 2309 | 2554 |
| 2310 | 2555 |
| 2311 void RegisterAllocator::SplitAndSpillIntersecting(LiveRange* current) { | 2556 void RegisterAllocatorLinear::SplitAndSpillIntersecting(LiveRange* current) { |
| 2312 DCHECK(current->HasRegisterAssigned()); | 2557 DCHECK(current->HasRegisterAssigned()); |
| 2313 int reg = current->assigned_register(); | 2558 int reg = current->assigned_register(); |
| 2314 auto split_pos = current->Start(); | 2559 auto split_pos = current->Start(); |
| 2315 for (size_t i = 0; i < active_live_ranges().size(); ++i) { | 2560 for (size_t i = 0; i < active_live_ranges().size(); ++i) { |
| 2316 auto range = active_live_ranges()[i]; | 2561 auto range = active_live_ranges()[i]; |
| 2317 if (range->assigned_register() == reg) { | 2562 if (range->assigned_register() == reg) { |
| 2318 auto next_pos = range->NextRegisterPosition(current->Start()); | 2563 auto next_pos = range->NextRegisterPosition(current->Start()); |
| 2319 auto spill_pos = FindOptimalSpillingPos(range, split_pos); | 2564 auto spill_pos = FindOptimalSpillingPos(range, split_pos); |
| 2320 if (next_pos == nullptr) { | 2565 if (next_pos == nullptr) { |
| 2321 SpillAfter(range, spill_pos); | 2566 SpillAfter(range, spill_pos); |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2425 | 2670 |
| 2426 // We did not find any suitable outer loop. Split at the latest possible | 2671 // We did not find any suitable outer loop. Split at the latest possible |
| 2427 // position unless end_block is a loop header itself. | 2672 // position unless end_block is a loop header itself. |
| 2428 if (block == end_block && !end_block->IsLoopHeader()) return end; | 2673 if (block == end_block && !end_block->IsLoopHeader()) return end; |
| 2429 | 2674 |
| 2430 return LifetimePosition::GapFromInstructionIndex( | 2675 return LifetimePosition::GapFromInstructionIndex( |
| 2431 block->first_instruction_index()); | 2676 block->first_instruction_index()); |
| 2432 } | 2677 } |
| 2433 | 2678 |
| 2434 | 2679 |
| 2435 void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { | 2680 void RegisterAllocatorLinear::SpillAfter(LiveRange* range, |
| 2681 LifetimePosition pos) { | |
| 2436 auto second_part = SplitRangeAt(range, pos); | 2682 auto second_part = SplitRangeAt(range, pos); |
| 2437 Spill(second_part); | 2683 Spill(second_part); |
| 2438 } | 2684 } |
| 2439 | 2685 |
| 2440 | 2686 |
| 2441 void RegisterAllocator::SpillBetween(LiveRange* range, LifetimePosition start, | 2687 void RegisterAllocatorLinear::SpillBetween(LiveRange* range, |
| 2442 LifetimePosition end) { | 2688 LifetimePosition start, |
| 2689 LifetimePosition end) { | |
| 2443 SpillBetweenUntil(range, start, start, end); | 2690 SpillBetweenUntil(range, start, start, end); |
| 2444 } | 2691 } |
| 2445 | 2692 |
| 2446 | 2693 |
| 2447 void RegisterAllocator::SpillBetweenUntil(LiveRange* range, | 2694 void RegisterAllocatorLinear::SpillBetweenUntil(LiveRange* range, |
| 2448 LifetimePosition start, | 2695 LifetimePosition start, |
| 2449 LifetimePosition until, | 2696 LifetimePosition until, |
| 2450 LifetimePosition end) { | 2697 LifetimePosition end) { |
| 2451 CHECK(start.Value() < end.Value()); | 2698 CHECK(start.Value() < end.Value()); |
| 2452 auto second_part = SplitRangeAt(range, start); | 2699 auto second_part = SplitRangeAt(range, start); |
| 2453 | 2700 |
| 2454 if (second_part->Start().Value() < end.Value()) { | 2701 if (second_part->Start().Value() < end.Value()) { |
| 2455 // The split result intersects with [start, end[. | 2702 // The split result intersects with [start, end[. |
| 2456 // Split it at position between ]start+1, end[, spill the middle part | 2703 // Split it at position between ]start+1, end[, spill the middle part |
| 2457 // and put the rest to unhandled. | 2704 // and put the rest to unhandled. |
| 2458 auto third_part_end = end.PrevStart().End(); | 2705 auto third_part_end = end.PrevStart().End(); |
| 2459 if (IsBlockBoundary(end.Start())) { | 2706 if (IsBlockBoundary(end.Start())) { |
| 2460 third_part_end = end.Start(); | 2707 third_part_end = end.Start(); |
| 2461 } | 2708 } |
| 2462 auto third_part = SplitBetween( | 2709 auto third_part = SplitBetween( |
| 2463 second_part, Max(second_part->Start().End(), until), third_part_end); | 2710 second_part, Max(second_part->Start().End(), until), third_part_end); |
| 2464 | 2711 |
| 2465 DCHECK(third_part != second_part); | 2712 DCHECK(third_part != second_part); |
| 2466 | 2713 |
| 2467 Spill(second_part); | 2714 Spill(second_part); |
| 2468 AddToUnhandledSorted(third_part); | 2715 AddToUnhandledSorted(third_part); |
| 2469 } else { | 2716 } else { |
| 2470 // The split result does not intersect with [start, end[. | 2717 // The split result does not intersect with [start, end[. |
| 2471 // Nothing to spill. Just put it to unhandled as whole. | 2718 // Nothing to spill. Just put it to unhandled as whole. |
| 2472 AddToUnhandledSorted(second_part); | 2719 AddToUnhandledSorted(second_part); |
| 2473 } | 2720 } |
| 2474 } | 2721 } |
| 2475 | 2722 |
| 2723 LiveRange* RegisterAllocatorGreedy::SpillBetweenUntil(LiveRange* range, | |
| 2724 LifetimePosition start, | |
| 2725 LifetimePosition until, | |
| 2726 LifetimePosition end) { | |
| 2727 CHECK(start.Value() < end.Value()); | |
| 2728 auto second_part = SplitRangeAt(range, start); | |
| 2729 | |
| 2730 if (second_part->Start().Value() < end.Value()) { | |
| 2731 // The split result intersects with [start, end[. | |
| 2732 // Split it at position between ]start+1, end[, spill the middle part | |
| 2733 // and put the rest to unhandled. | |
| 2734 auto third_part_end = end.PrevStart().End(); | |
| 2735 if (IsBlockBoundary(end.Start())) { | |
| 2736 third_part_end = end.Start(); | |
| 2737 } | |
| 2738 auto third_part = SplitBetween( | |
| 2739 second_part, Max(second_part->Start().End(), until), third_part_end); | |
| 2740 | |
| 2741 DCHECK(third_part != second_part); | |
| 2742 | |
| 2743 Spill(second_part); | |
| 2744 return third_part; | |
| 2745 } else { | |
| 2746 // The split result does not intersect with [start, end[. | |
| 2747 // Nothing to spill. Just put it to unhandled as whole. | |
| 2748 return second_part; | |
| 2749 } | |
| 2750 } | |
| 2751 | |
| 2476 | 2752 |
| 2477 void RegisterAllocator::Spill(LiveRange* range) { | 2753 void RegisterAllocator::Spill(LiveRange* range) { |
| 2478 DCHECK(!range->IsSpilled()); | 2754 DCHECK(!range->IsSpilled()); |
| 2479 TRACE("Spilling live range %d\n", range->id()); | 2755 TRACE("Spilling live range %d\n", range->id()); |
| 2480 auto first = range->TopLevel(); | 2756 auto first = range->TopLevel(); |
| 2481 if (first->HasNoSpillType()) { | 2757 if (first->HasNoSpillType()) { |
| 2482 AssignSpillRangeToLiveRange(first); | 2758 AssignSpillRangeToLiveRange(first); |
| 2483 } | 2759 } |
| 2484 range->MakeSpilled(); | 2760 range->MakeSpilled(); |
| 2485 } | 2761 } |
| 2486 | 2762 |
| 2487 | 2763 |
| 2488 int RegisterAllocator::RegisterCount() const { return num_registers_; } | 2764 int RegisterAllocatorLinear::RegisterCount() const { return num_registers_; } |
| 2489 | 2765 |
| 2490 | 2766 |
| 2491 #ifdef DEBUG | 2767 #ifdef DEBUG |
| 2492 | 2768 |
| 2493 | 2769 |
| 2494 void RegisterAllocator::Verify() const { | 2770 void RegisterAllocatorLinear::Verify() const { |
| 2495 for (auto current : live_ranges()) { | 2771 for (auto current : live_ranges()) { |
| 2496 if (current != nullptr) current->Verify(); | 2772 if (current != nullptr) current->Verify(); |
| 2497 } | 2773 } |
| 2498 } | 2774 } |
| 2499 | 2775 |
| 2500 | 2776 |
| 2501 #endif | 2777 #endif |
| 2502 | 2778 |
| 2503 | 2779 |
| 2504 void RegisterAllocator::SetLiveRangeAssignedRegister(LiveRange* range, | 2780 void RegisterAllocator::SetLiveRangeAssignedRegister(LiveRange* range, |
| 2505 int reg) { | 2781 int reg) { |
| 2506 if (range->Kind() == DOUBLE_REGISTERS) { | 2782 if (range->Kind() == DOUBLE_REGISTERS) { |
| 2507 assigned_double_registers_->Add(reg); | 2783 assigned_double_registers_->Add(reg); |
| 2508 } else { | 2784 } else { |
| 2509 DCHECK(range->Kind() == GENERAL_REGISTERS); | 2785 DCHECK(range->Kind() == GENERAL_REGISTERS); |
| 2510 assigned_registers_->Add(reg); | 2786 assigned_registers_->Add(reg); |
| 2511 } | 2787 } |
| 2512 range->set_assigned_register(reg); | 2788 range->set_assigned_register(reg); |
| 2513 auto assignment = range->GetAssignedOperand(); | 2789 auto assignment = range->GetAssignedOperand(); |
| 2514 // TODO(dcarney): stop aliasing hint operands. | 2790 // TODO(dcarney): stop aliasing hint operands. |
| 2515 range->ConvertUsesToOperand(assignment, nullptr); | 2791 range->ConvertUsesToOperand(assignment, nullptr); |
| 2516 if (range->is_phi()) AssignPhiInput(range, assignment); | 2792 if (range->is_phi()) AssignPhiInput(range, assignment); |
| 2517 } | 2793 } |
| 2518 | 2794 |
| 2519 } // namespace compiler | 2795 } // namespace compiler |
| 2520 } // namespace internal | 2796 } // namespace internal |
| 2521 } // namespace v8 | 2797 } // namespace v8 |
| OLD | NEW |