Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/linkage.h" | 5 #include "src/compiler/linkage.h" |
| 6 #include "src/compiler/register-allocator.h" | 6 #include "src/compiler/register-allocator.h" |
| 7 #include "src/string-stream.h" | 7 #include "src/string-stream.h" |
| 8 | 8 |
| 9 namespace v8 { | 9 namespace v8 { |
| 10 namespace internal { | 10 namespace internal { |
| 11 namespace compiler { | 11 namespace compiler { |
| 12 | 12 |
| 13 | |
| 13 #define TRACE(...) \ | 14 #define TRACE(...) \ |
| 14 do { \ | 15 do { \ |
| 15 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ | 16 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ |
| 16 } while (false) | 17 } while (false) |
| 17 | 18 |
| 19 | |
| 20 class RegisterAllocationInfo : public ZoneObject { | |
| 21 public: | |
| 22 explicit RegisterAllocationInfo(Zone* zone) : storage_(zone) {} | |
| 23 | |
| 24 bool Find(UseInterval* query, LiveRange** result) { | |
| 25 ZoneSplayTree<Config>::Locator locator; | |
| 26 bool ret = storage_.Find(GetKey(query), &locator); | |
| 27 if (ret) { | |
| 28 *result = locator.value(); | |
| 29 } | |
| 30 return ret; | |
| 31 } | |
| 32 | |
| 33 bool Insert(LiveRange* range) { | |
| 34 auto* interval = range->first_interval(); | |
| 35 while (interval) { | |
| 36 if (!Insert(interval, range)) return false; | |
| 37 interval = interval->next(); | |
| 38 } | |
| 39 return true; | |
| 40 } | |
| 41 | |
| 42 bool Remove(UseInterval* key) { return storage_.Remove(GetKey(key)); } | |
| 43 | |
| 44 bool Remove(LiveRange* range) { | |
| 45 bool ret = false; | |
| 46 auto* segment = range->first_interval(); | |
| 47 while (segment) { | |
| 48 ret |= Remove(segment); | |
| 49 segment = segment->next(); | |
| 50 } | |
| 51 return ret; | |
| 52 } | |
| 53 | |
| 54 bool IsEmpty() { return storage_.is_empty(); } | |
| 55 | |
| 56 private: | |
| 57 struct Config { | |
| 58 typedef std::pair<int, int> Key; | |
| 59 typedef LiveRange* Value; | |
| 60 static const Key kNoKey; | |
| 61 static Value NoValue() { return nullptr; } | |
| 62 static int Compare(const Key& a, const Key& b) { | |
| 63 if (a.second <= b.first) return -1; | |
| 64 if (a.first >= b.second) return 1; | |
| 65 return 0; | |
| 66 } | |
| 67 }; | |
| 68 | |
| 69 Config::Key GetKey(UseInterval* interval) { | |
| 70 if (!interval) return std::make_pair(0, 0); | |
| 71 return std::make_pair(interval->start().Value(), interval->end().Value()); | |
| 72 } | |
| 73 bool Insert(UseInterval* interval, LiveRange* range) { | |
| 74 ZoneSplayTree<Config>::Locator locator; | |
| 75 bool ret = storage_.Insert(GetKey(interval), &locator); | |
| 76 if (ret) locator.set_value(range); | |
| 77 return ret; | |
| 78 } | |
| 79 | |
| 80 ZoneSplayTree<Config> storage_; | |
| 81 DISALLOW_COPY_AND_ASSIGN(RegisterAllocationInfo); | |
| 82 }; | |
| 83 | |
| 84 | |
| 85 const std::pair<int, int> RegisterAllocationInfo::Config::kNoKey = | |
| 86 std::make_pair<int, int>(0, 0); | |
| 87 | |
| 88 | |
| 18 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { | 89 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { |
| 19 return a.Value() < b.Value() ? a : b; | 90 return a.Value() < b.Value() ? a : b; |
| 20 } | 91 } |
| 21 | 92 |
| 22 | 93 |
| 23 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { | 94 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { |
| 24 return a.Value() > b.Value() ? a : b; | 95 return a.Value() > b.Value() ? a : b; |
| 25 } | 96 } |
| 26 | 97 |
| 27 | 98 |
| (...skipping 546 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 574 RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, | 645 RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, |
| 575 Zone* zone, Frame* frame, | 646 Zone* zone, Frame* frame, |
| 576 InstructionSequence* code, | 647 InstructionSequence* code, |
| 577 const char* debug_name) | 648 const char* debug_name) |
| 578 : local_zone_(zone), | 649 : local_zone_(zone), |
| 579 frame_(frame), | 650 frame_(frame), |
| 580 code_(code), | 651 code_(code), |
| 581 debug_name_(debug_name), | 652 debug_name_(debug_name), |
| 582 config_(config), | 653 config_(config), |
| 583 phi_map_(local_zone()), | 654 phi_map_(local_zone()), |
| 584 live_in_sets_(code->InstructionBlockCount(), nullptr, local_zone()), | |
| 585 live_ranges_(code->VirtualRegisterCount() * 2, nullptr, local_zone()), | 655 live_ranges_(code->VirtualRegisterCount() * 2, nullptr, local_zone()), |
| 586 fixed_live_ranges_(this->config()->num_general_registers(), nullptr, | 656 fixed_live_ranges_(this->config()->num_general_registers(), nullptr, |
| 587 local_zone()), | 657 local_zone()), |
| 588 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr, | 658 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr, |
| 589 local_zone()), | 659 local_zone()), |
| 660 spill_ranges_(local_zone()), | |
| 661 live_in_sets_(code->InstructionBlockCount(), nullptr, local_zone()) { | |
| 662 spill_ranges().reserve(8); | |
| 663 assigned_registers_ = | |
| 664 new (code_zone()) BitVector(config->num_general_registers(), code_zone()); | |
| 665 assigned_double_registers_ = new (code_zone()) | |
| 666 BitVector(config->num_aliased_double_registers(), code_zone()); | |
| 667 frame->SetAllocatedRegisters(assigned_registers_); | |
| 668 frame->SetAllocatedDoubleRegisters(assigned_double_registers_); | |
| 669 } | |
| 670 | |
| 671 RegisterAllocatorLinear::RegisterAllocatorLinear( | |
| 672 const RegisterConfiguration* config, Zone* zone, Frame* frame, | |
| 673 InstructionSequence* code, const char* debug_name) | |
| 674 : RegisterAllocator(config, zone, frame, code, debug_name), | |
| 590 unhandled_live_ranges_(local_zone()), | 675 unhandled_live_ranges_(local_zone()), |
| 591 active_live_ranges_(local_zone()), | 676 active_live_ranges_(local_zone()), |
| 592 inactive_live_ranges_(local_zone()), | 677 inactive_live_ranges_(local_zone()), |
| 593 spill_ranges_(local_zone()), | |
| 594 mode_(UNALLOCATED_REGISTERS), | 678 mode_(UNALLOCATED_REGISTERS), |
| 595 num_registers_(-1) { | 679 num_registers_(-1) { |
| 596 DCHECK(this->config()->num_general_registers() <= | 680 DCHECK(this->config()->num_general_registers() <= |
| 597 RegisterConfiguration::kMaxGeneralRegisters); | 681 RegisterConfiguration::kMaxGeneralRegisters); |
| 598 DCHECK(this->config()->num_double_registers() <= | 682 DCHECK(this->config()->num_double_registers() <= |
| 599 RegisterConfiguration::kMaxDoubleRegisters); | 683 RegisterConfiguration::kMaxDoubleRegisters); |
| 600 // TryAllocateFreeReg and AllocateBlockedReg assume this | 684 // TryAllocateFreeReg and AllocateBlockedReg assume this |
| 601 // when allocating local arrays. | 685 // when allocating local arrays. |
| 602 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= | 686 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= |
| 603 this->config()->num_general_registers()); | 687 this->config()->num_general_registers()); |
| 604 unhandled_live_ranges().reserve( | 688 unhandled_live_ranges().reserve( |
| 605 static_cast<size_t>(code->VirtualRegisterCount() * 2)); | 689 static_cast<size_t>(code->VirtualRegisterCount() * 2)); |
| 606 active_live_ranges().reserve(8); | 690 active_live_ranges().reserve(8); |
| 607 inactive_live_ranges().reserve(8); | 691 inactive_live_ranges().reserve(8); |
| 608 spill_ranges().reserve(8); | |
| 609 assigned_registers_ = | |
| 610 new (code_zone()) BitVector(config->num_general_registers(), code_zone()); | |
| 611 assigned_double_registers_ = new (code_zone()) | |
| 612 BitVector(config->num_aliased_double_registers(), code_zone()); | |
| 613 frame->SetAllocatedRegisters(assigned_registers_); | |
| 614 frame->SetAllocatedDoubleRegisters(assigned_double_registers_); | |
| 615 } | 692 } |
| 616 | 693 |
| 617 | 694 |
| 618 BitVector* RegisterAllocator::ComputeLiveOut(const InstructionBlock* block) { | 695 BitVector* RegisterAllocator::ComputeLiveOut(const InstructionBlock* block) { |
| 619 // Compute live out for the given block, except not including backward | 696 // Compute live out for the given block, except not including backward |
| 620 // successor edges. | 697 // successor edges. |
| 621 auto live_out = new (local_zone()) | 698 auto live_out = new (local_zone()) |
| 622 BitVector(code()->VirtualRegisterCount(), local_zone()); | 699 BitVector(code()->VirtualRegisterCount(), local_zone()); |
| 623 | 700 |
| 624 // Process all successor blocks. | 701 // Process all successor blocks. |
| (...skipping 341 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 966 } | 1043 } |
| 967 | 1044 |
| 968 | 1045 |
| 969 SpillRange* RegisterAllocator::AssignSpillRangeToLiveRange(LiveRange* range) { | 1046 SpillRange* RegisterAllocator::AssignSpillRangeToLiveRange(LiveRange* range) { |
| 970 auto spill_range = new (local_zone()) SpillRange(range, local_zone()); | 1047 auto spill_range = new (local_zone()) SpillRange(range, local_zone()); |
| 971 spill_ranges().push_back(spill_range); | 1048 spill_ranges().push_back(spill_range); |
| 972 return spill_range; | 1049 return spill_range; |
| 973 } | 1050 } |
| 974 | 1051 |
| 975 | 1052 |
| 976 bool RegisterAllocator::TryReuseSpillForPhi(LiveRange* range) { | 1053 bool RegisterAllocatorLinear::TryReuseSpillForPhi(LiveRange* range) { |
| 977 if (range->IsChild() || !range->is_phi()) return false; | 1054 if (range->IsChild() || !range->is_phi()) return false; |
| 978 DCHECK(!range->HasSpillOperand()); | 1055 DCHECK(!range->HasSpillOperand()); |
| 979 | 1056 |
| 980 auto lookup = phi_map_.find(range->id()); | 1057 auto lookup = phi_map().find(range->id()); |
| 981 DCHECK(lookup != phi_map_.end()); | 1058 DCHECK(lookup != phi_map().end()); |
| 982 auto phi = lookup->second->phi; | 1059 auto phi = lookup->second->phi; |
| 983 auto block = lookup->second->block; | 1060 auto block = lookup->second->block; |
| 984 // Count the number of spilled operands. | 1061 // Count the number of spilled operands. |
| 985 size_t spilled_count = 0; | 1062 size_t spilled_count = 0; |
| 986 LiveRange* first_op = nullptr; | 1063 LiveRange* first_op = nullptr; |
| 987 for (size_t i = 0; i < phi->operands().size(); i++) { | 1064 for (size_t i = 0; i < phi->operands().size(); i++) { |
| 988 int op = phi->operands()[i]; | 1065 int op = phi->operands()[i]; |
| 989 LiveRange* op_range = LiveRangeFor(op); | 1066 LiveRange* op_range = LiveRangeFor(op); |
| 990 if (op_range->GetSpillRange() == nullptr) continue; | 1067 if (op_range->GetSpillRange() == nullptr) continue; |
| 991 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); | 1068 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); |
| (...skipping 806 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1798 bool RegisterAllocator::SafePointsAreInOrder() const { | 1875 bool RegisterAllocator::SafePointsAreInOrder() const { |
| 1799 int safe_point = 0; | 1876 int safe_point = 0; |
| 1800 for (auto map : *code()->reference_maps()) { | 1877 for (auto map : *code()->reference_maps()) { |
| 1801 if (safe_point > map->instruction_position()) return false; | 1878 if (safe_point > map->instruction_position()) return false; |
| 1802 safe_point = map->instruction_position(); | 1879 safe_point = map->instruction_position(); |
| 1803 } | 1880 } |
| 1804 return true; | 1881 return true; |
| 1805 } | 1882 } |
| 1806 | 1883 |
| 1807 | 1884 |
| 1885 unsigned RegisterAllocatorGreedy::GetLiveRangeSize(LiveRange* range) { | |
| 1886 auto interval = range->first_interval(); | |
| 1887 if (!interval) return 0; | |
| 1888 | |
| 1889 unsigned size = 0; | |
| 1890 while (interval) { | |
| 1891 size += (interval->end().Value() - interval->start().Value()); | |
| 1892 interval = interval->next(); | |
| 1893 } | |
| 1894 | |
| 1895 DCHECK(size); | |
| 1896 return size; | |
| 1897 } | |
| 1898 | |
| 1899 | |
| 1900 void RegisterAllocatorGreedy::Enqueue(LiveRange* range) { | |
| 1901 if (!range || range->IsEmpty()) return; | |
| 1902 unsigned size = GetLiveRangeSize(range); | |
| 1903 queue_.push(std::make_pair(size, range)); | |
| 1904 } | |
| 1905 | |
| 1906 | |
| 1907 // TODO(mtrofin): consolidate with identical code segment in | |
| 1908 // RegisterAllocatorLinear::AllocateRegisters | |
| 1909 bool RegisterAllocatorGreedy::HandleSpillOperands(LiveRange* range) { | |
| 1910 auto position = range->Start(); | |
| 1911 TRACE("Processing interval %d start=%d\n", range->id(), position.Value()); | |
| 1912 | |
| 1913 if (!range->HasNoSpillType()) { | |
| 1914 TRACE("Live range %d already has a spill operand\n", range->id()); | |
| 1915 auto next_pos = position; | |
| 1916 if (next_pos.IsGapPosition()) { | |
| 1917 next_pos = next_pos.NextStart(); | |
| 1918 } | |
| 1919 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); | |
|
dcarney
2015/04/18 19:01:33
NextUsePositionRegisterIsBeneficial is not availab
Mircea Trofin
2015/04/19 04:02:04
Acknowledged.
This will need a rewrite anyway.
| |
| 1920 // If the range already has a spill operand and it doesn't need a | |
| 1921 // register immediately, split it and spill the first part of the range. | |
| 1922 if (pos == nullptr) { | |
| 1923 Spill(range); | |
| 1924 return true; | |
| 1925 } else if (pos->pos().Value() > range->Start().NextStart().Value()) { | |
| 1926 // Do not spill live range eagerly if use position that can benefit from | |
| 1927 // the register is too close to the start of live range. | |
| 1928 auto* reminder = SpillBetweenUntil(range, position, position, pos->pos()); | |
| 1929 Enqueue(reminder); | |
| 1930 return true; | |
| 1931 } | |
| 1932 } | |
| 1933 return false; | |
| 1934 // TODO(mtrofin): Do we need this? | |
| 1935 // return (TryReuseSpillForPhi(range)); | |
| 1936 } | |
| 1937 | |
| 1938 | |
| 1939 void RegisterAllocatorGreedy::AllocateRegisters(RegisterKind mode) { | |
| 1940 // TODO(mtrofin): support for double registers | |
| 1941 DCHECK(mode == GENERAL_REGISTERS); | |
| 1942 | |
| 1943 for (auto range : live_ranges()) { | |
| 1944 if (range == nullptr) continue; | |
| 1945 if (range->Kind() == mode) { | |
| 1946 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); | |
| 1947 TRACE("Enqueueing live range %d to priority queue \n", range->id()); | |
| 1948 Enqueue(range); | |
| 1949 } | |
| 1950 } | |
| 1951 | |
| 1952 allocations_.resize(config()->kMaxGeneralRegisters); | |
|
dcarney
2015/04/18 19:01:32
config()->num_general_registers() is the number of
Mircea Trofin
2015/04/19 04:02:04
Done.
| |
| 1953 for (int i = 0; i < config()->kMaxGeneralRegisters; i++) { | |
| 1954 allocations_[i] = new (local_zone()) RegisterAllocationInfo(local_zone()); | |
| 1955 } | |
| 1956 | |
| 1957 for (auto* current : fixed_live_ranges()) { | |
| 1958 if (current != nullptr) { | |
| 1959 int regNr = current->assigned_register(); | |
| 1960 CHECK((allocations_[regNr]->Insert(current))); | |
|
dcarney
2015/04/18 19:01:33
generally we ensure CHECK should be convertible to
Mircea Trofin
2015/04/19 04:02:04
Done.
| |
| 1961 } | |
| 1962 } | |
| 1963 | |
| 1964 while (!queue_.empty()) { | |
| 1965 auto currentPair = queue_.top(); | |
| 1966 queue_.pop(); | |
| 1967 auto current = currentPair.second; | |
| 1968 if (HandleSpillOperands(current)) continue; | |
| 1969 ZoneSet<LiveRange*> conflicting(local_zone()); | |
| 1970 if (!TryAllocate(current, &conflicting)) { | |
| 1971 DCHECK(conflicting.size()); | |
| 1972 float thisMax = CalculateSpillWeight(current); | |
| 1973 float maxConflicting = CalculateMaxSpillWeight(conflicting); | |
| 1974 if (maxConflicting < thisMax) { | |
| 1975 for (auto* conflict : conflicting) { | |
| 1976 Evict(conflict); | |
| 1977 Enqueue(conflict); | |
| 1978 } | |
| 1979 conflicting.clear(); | |
| 1980 CHECK(TryAllocate(current, &conflicting)); | |
| 1981 DCHECK(conflicting.size() == 0); | |
| 1982 } else { | |
| 1983 DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start())); | |
| 1984 CHECK(AllocateBlockedRange(current, conflicting)); | |
|
dcarney
2015/04/18 19:01:33
here and 2 lines above CHECK issue again
Mircea Trofin
2015/04/19 04:02:04
Done.
| |
| 1985 } | |
| 1986 } | |
| 1987 } | |
| 1988 for (unsigned i = 0; i < allocations_.size(); i++) { | |
|
dcarney
2015/04/18 19:01:33
size_t here and in other places when iterating to
Mircea Trofin
2015/04/19 04:02:04
Done.
| |
| 1989 if (!allocations_[i]->IsEmpty()) { | |
| 1990 assigned_registers()->Add(i); | |
| 1991 } | |
| 1992 } | |
| 1993 } | |
| 1994 | |
| 1995 | |
| 1996 bool RegisterAllocatorGreedy::AllocateBlockedRange( | |
| 1997 LiveRange* current, const ZoneSet<LiveRange*>& conflicts) { | |
| 1998 auto register_use = current->NextRegisterPosition(current->Start()); | |
|
dcarney
2015/04/18 19:01:33
NextRegisterPosition is also unavailable to the gr
Mircea Trofin
2015/04/19 04:02:04
Acknowledged.
| |
| 1999 if (register_use == nullptr) { | |
| 2000 // There is no use in the current live range that requires a register. | |
| 2001 // We can just spill it. | |
| 2002 Spill(current); | |
| 2003 return true; | |
| 2004 } | |
| 2005 | |
| 2006 auto second_part = SplitRangeAt(current, register_use->pos()); | |
| 2007 Spill(second_part); | |
| 2008 | |
| 2009 return true; | |
| 2010 } | |
| 2011 | |
| 2012 | |
| 1808 void RegisterAllocator::PopulateReferenceMaps() { | 2013 void RegisterAllocator::PopulateReferenceMaps() { |
| 1809 DCHECK(SafePointsAreInOrder()); | 2014 DCHECK(SafePointsAreInOrder()); |
| 1810 | 2015 |
| 1811 // Iterate over all safe point positions and record a pointer | 2016 // Iterate over all safe point positions and record a pointer |
| 1812 // for all spilled live ranges at this point. | 2017 // for all spilled live ranges at this point. |
| 1813 int last_range_start = 0; | 2018 int last_range_start = 0; |
| 1814 auto reference_maps = code()->reference_maps(); | 2019 auto reference_maps = code()->reference_maps(); |
| 1815 ReferenceMapDeque::const_iterator first_it = reference_maps->begin(); | 2020 ReferenceMapDeque::const_iterator first_it = reference_maps->begin(); |
| 1816 for (LiveRange* range : live_ranges()) { | 2021 for (LiveRange* range : live_ranges()) { |
| 1817 if (range == nullptr) continue; | 2022 if (range == nullptr) continue; |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1879 cur->id(), cur->Start().Value(), safe_point); | 2084 cur->id(), cur->Start().Value(), safe_point); |
| 1880 auto operand = cur->GetAssignedOperand(); | 2085 auto operand = cur->GetAssignedOperand(); |
| 1881 DCHECK(!operand.IsStackSlot()); | 2086 DCHECK(!operand.IsStackSlot()); |
| 1882 map->RecordReference(operand); | 2087 map->RecordReference(operand); |
| 1883 } | 2088 } |
| 1884 } | 2089 } |
| 1885 } | 2090 } |
| 1886 } | 2091 } |
| 1887 | 2092 |
| 1888 | 2093 |
| 1889 void RegisterAllocator::AllocateGeneralRegisters() { | 2094 void RegisterAllocatorLinear::AllocateGeneralRegisters() { |
| 1890 num_registers_ = config()->num_general_registers(); | 2095 num_registers_ = config()->num_general_registers(); |
| 1891 mode_ = GENERAL_REGISTERS; | 2096 mode_ = GENERAL_REGISTERS; |
| 1892 AllocateRegisters(); | 2097 AllocateRegisters(); |
| 1893 } | 2098 } |
| 1894 | 2099 |
| 1895 | 2100 |
| 1896 void RegisterAllocator::AllocateDoubleRegisters() { | 2101 void RegisterAllocatorLinear::AllocateDoubleRegisters() { |
| 1897 num_registers_ = config()->num_aliased_double_registers(); | 2102 num_registers_ = config()->num_aliased_double_registers(); |
| 1898 mode_ = DOUBLE_REGISTERS; | 2103 mode_ = DOUBLE_REGISTERS; |
| 1899 AllocateRegisters(); | 2104 AllocateRegisters(); |
| 1900 } | 2105 } |
| 1901 | 2106 |
| 1902 | 2107 |
| 1903 void RegisterAllocator::AllocateRegisters() { | 2108 void RegisterAllocatorLinear::AllocateRegisters() { |
| 1904 DCHECK(unhandled_live_ranges().empty()); | 2109 DCHECK(unhandled_live_ranges().empty()); |
| 1905 | 2110 |
| 1906 for (auto range : live_ranges()) { | 2111 for (auto range : live_ranges()) { |
| 1907 if (range == nullptr) continue; | 2112 if (range == nullptr) continue; |
| 1908 if (range->Kind() == mode_) { | 2113 if (range->Kind() == mode_) { |
| 1909 AddToUnhandledUnsorted(range); | 2114 AddToUnhandledUnsorted(range); |
| 1910 } | 2115 } |
| 1911 } | 2116 } |
| 1912 SortUnhandled(); | 2117 SortUnhandled(); |
| 1913 DCHECK(UnhandledIsSorted()); | 2118 DCHECK(UnhandledIsSorted()); |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1994 if (current->HasRegisterAssigned()) { | 2199 if (current->HasRegisterAssigned()) { |
| 1995 AddToActive(current); | 2200 AddToActive(current); |
| 1996 } | 2201 } |
| 1997 } | 2202 } |
| 1998 | 2203 |
| 1999 active_live_ranges().clear(); | 2204 active_live_ranges().clear(); |
| 2000 inactive_live_ranges().clear(); | 2205 inactive_live_ranges().clear(); |
| 2001 } | 2206 } |
| 2002 | 2207 |
| 2003 | 2208 |
| 2004 const char* RegisterAllocator::RegisterName(int allocation_index) { | 2209 const char* RegisterAllocatorLinear::RegisterName(int allocation_index) { |
| 2005 if (mode_ == GENERAL_REGISTERS) { | 2210 if (mode_ == GENERAL_REGISTERS) { |
| 2006 return config()->general_register_name(allocation_index); | 2211 return config()->general_register_name(allocation_index); |
| 2007 } else { | 2212 } else { |
| 2008 return config()->double_register_name(allocation_index); | 2213 return config()->double_register_name(allocation_index); |
| 2009 } | 2214 } |
| 2010 } | 2215 } |
| 2011 | 2216 |
| 2012 | 2217 |
| 2013 bool RegisterAllocator::HasTaggedValue(int virtual_register) const { | 2218 bool RegisterAllocator::HasTaggedValue(int virtual_register) const { |
| 2014 return code()->IsReference(virtual_register); | 2219 return code()->IsReference(virtual_register); |
| 2015 } | 2220 } |
| 2016 | 2221 |
| 2017 | 2222 |
| 2018 RegisterKind RegisterAllocator::RequiredRegisterKind( | 2223 RegisterKind RegisterAllocator::RequiredRegisterKind( |
| 2019 int virtual_register) const { | 2224 int virtual_register) const { |
| 2020 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS | 2225 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS |
| 2021 : GENERAL_REGISTERS; | 2226 : GENERAL_REGISTERS; |
| 2022 } | 2227 } |
| 2023 | 2228 |
| 2024 | 2229 |
| 2025 void RegisterAllocator::AddToActive(LiveRange* range) { | 2230 void RegisterAllocatorLinear::AddToActive(LiveRange* range) { |
| 2026 TRACE("Add live range %d to active\n", range->id()); | 2231 TRACE("Add live range %d to active\n", range->id()); |
| 2027 active_live_ranges().push_back(range); | 2232 active_live_ranges().push_back(range); |
| 2028 } | 2233 } |
| 2029 | 2234 |
| 2030 | 2235 |
| 2031 void RegisterAllocator::AddToInactive(LiveRange* range) { | 2236 void RegisterAllocatorLinear::AddToInactive(LiveRange* range) { |
| 2032 TRACE("Add live range %d to inactive\n", range->id()); | 2237 TRACE("Add live range %d to inactive\n", range->id()); |
| 2033 inactive_live_ranges().push_back(range); | 2238 inactive_live_ranges().push_back(range); |
| 2034 } | 2239 } |
| 2035 | 2240 |
| 2036 | 2241 |
| 2037 void RegisterAllocator::AddToUnhandledSorted(LiveRange* range) { | 2242 void RegisterAllocatorLinear::AddToUnhandledSorted(LiveRange* range) { |
| 2038 if (range == nullptr || range->IsEmpty()) return; | 2243 if (range == nullptr || range->IsEmpty()) return; |
| 2039 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); | 2244 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); |
| 2040 DCHECK(allocation_finger_.Value() <= range->Start().Value()); | 2245 DCHECK(allocation_finger_.Value() <= range->Start().Value()); |
| 2041 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0; | 2246 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0; |
| 2042 --i) { | 2247 --i) { |
| 2043 auto cur_range = unhandled_live_ranges().at(i); | 2248 auto cur_range = unhandled_live_ranges().at(i); |
| 2044 if (!range->ShouldBeAllocatedBefore(cur_range)) continue; | 2249 if (!range->ShouldBeAllocatedBefore(cur_range)) continue; |
| 2045 TRACE("Add live range %d to unhandled at %d\n", range->id(), i + 1); | 2250 TRACE("Add live range %d to unhandled at %d\n", range->id(), i + 1); |
| 2046 auto it = unhandled_live_ranges().begin() + (i + 1); | 2251 auto it = unhandled_live_ranges().begin() + (i + 1); |
| 2047 unhandled_live_ranges().insert(it, range); | 2252 unhandled_live_ranges().insert(it, range); |
| 2048 DCHECK(UnhandledIsSorted()); | 2253 DCHECK(UnhandledIsSorted()); |
| 2049 return; | 2254 return; |
| 2050 } | 2255 } |
| 2051 TRACE("Add live range %d to unhandled at start\n", range->id()); | 2256 TRACE("Add live range %d to unhandled at start\n", range->id()); |
| 2052 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range); | 2257 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range); |
| 2053 DCHECK(UnhandledIsSorted()); | 2258 DCHECK(UnhandledIsSorted()); |
| 2054 } | 2259 } |
| 2055 | 2260 |
| 2056 | 2261 |
| 2057 void RegisterAllocator::AddToUnhandledUnsorted(LiveRange* range) { | 2262 void RegisterAllocatorLinear::AddToUnhandledUnsorted(LiveRange* range) { |
| 2058 if (range == nullptr || range->IsEmpty()) return; | 2263 if (range == nullptr || range->IsEmpty()) return; |
| 2059 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); | 2264 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); |
| 2060 TRACE("Add live range %d to unhandled unsorted at end\n", range->id()); | 2265 TRACE("Add live range %d to unhandled unsorted at end\n", range->id()); |
| 2061 unhandled_live_ranges().push_back(range); | 2266 unhandled_live_ranges().push_back(range); |
| 2062 } | 2267 } |
| 2063 | 2268 |
| 2064 | 2269 |
| 2065 static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) { | 2270 static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) { |
| 2066 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a)); | 2271 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a)); |
| 2067 if (a->ShouldBeAllocatedBefore(b)) return false; | 2272 if (a->ShouldBeAllocatedBefore(b)) return false; |
| 2068 if (b->ShouldBeAllocatedBefore(a)) return true; | 2273 if (b->ShouldBeAllocatedBefore(a)) return true; |
| 2069 return a->id() < b->id(); | 2274 return a->id() < b->id(); |
| 2070 } | 2275 } |
| 2071 | 2276 |
| 2072 | 2277 |
| 2073 // Sort the unhandled live ranges so that the ranges to be processed first are | 2278 // Sort the unhandled live ranges so that the ranges to be processed first are |
| 2074 // at the end of the array list. This is convenient for the register allocation | 2279 // at the end of the array list. This is convenient for the register allocation |
| 2075 // algorithm because it is efficient to remove elements from the end. | 2280 // algorithm because it is efficient to remove elements from the end. |
| 2076 void RegisterAllocator::SortUnhandled() { | 2281 void RegisterAllocatorLinear::SortUnhandled() { |
| 2077 TRACE("Sort unhandled\n"); | 2282 TRACE("Sort unhandled\n"); |
| 2078 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(), | 2283 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(), |
| 2079 &UnhandledSortHelper); | 2284 &UnhandledSortHelper); |
| 2080 } | 2285 } |
| 2081 | 2286 |
| 2082 | 2287 |
| 2083 bool RegisterAllocator::UnhandledIsSorted() { | 2288 bool RegisterAllocatorLinear::UnhandledIsSorted() { |
| 2084 size_t len = unhandled_live_ranges().size(); | 2289 size_t len = unhandled_live_ranges().size(); |
| 2085 for (size_t i = 1; i < len; i++) { | 2290 for (size_t i = 1; i < len; i++) { |
| 2086 auto a = unhandled_live_ranges().at(i - 1); | 2291 auto a = unhandled_live_ranges().at(i - 1); |
| 2087 auto b = unhandled_live_ranges().at(i); | 2292 auto b = unhandled_live_ranges().at(i); |
| 2088 if (a->Start().Value() < b->Start().Value()) return false; | 2293 if (a->Start().Value() < b->Start().Value()) return false; |
| 2089 } | 2294 } |
| 2090 return true; | 2295 return true; |
| 2091 } | 2296 } |
| 2092 | 2297 |
| 2093 | 2298 |
| 2094 void RegisterAllocator::ActiveToHandled(LiveRange* range) { | 2299 void RegisterAllocatorLinear::ActiveToHandled(LiveRange* range) { |
| 2095 RemoveElement(&active_live_ranges(), range); | 2300 RemoveElement(&active_live_ranges(), range); |
| 2096 TRACE("Moving live range %d from active to handled\n", range->id()); | 2301 TRACE("Moving live range %d from active to handled\n", range->id()); |
| 2097 } | 2302 } |
| 2098 | 2303 |
| 2099 | 2304 |
| 2100 void RegisterAllocator::ActiveToInactive(LiveRange* range) { | 2305 void RegisterAllocatorLinear::ActiveToInactive(LiveRange* range) { |
| 2101 RemoveElement(&active_live_ranges(), range); | 2306 RemoveElement(&active_live_ranges(), range); |
| 2102 inactive_live_ranges().push_back(range); | 2307 inactive_live_ranges().push_back(range); |
| 2103 TRACE("Moving live range %d from active to inactive\n", range->id()); | 2308 TRACE("Moving live range %d from active to inactive\n", range->id()); |
| 2104 } | 2309 } |
| 2105 | 2310 |
| 2106 | 2311 |
| 2107 void RegisterAllocator::InactiveToHandled(LiveRange* range) { | 2312 void RegisterAllocatorLinear::InactiveToHandled(LiveRange* range) { |
| 2108 RemoveElement(&inactive_live_ranges(), range); | 2313 RemoveElement(&inactive_live_ranges(), range); |
| 2109 TRACE("Moving live range %d from inactive to handled\n", range->id()); | 2314 TRACE("Moving live range %d from inactive to handled\n", range->id()); |
| 2110 } | 2315 } |
| 2111 | 2316 |
| 2112 | 2317 |
| 2113 void RegisterAllocator::InactiveToActive(LiveRange* range) { | 2318 void RegisterAllocatorLinear::InactiveToActive(LiveRange* range) { |
| 2114 RemoveElement(&inactive_live_ranges(), range); | 2319 RemoveElement(&inactive_live_ranges(), range); |
| 2115 active_live_ranges().push_back(range); | 2320 active_live_ranges().push_back(range); |
| 2116 TRACE("Moving live range %d from inactive to active\n", range->id()); | 2321 TRACE("Moving live range %d from inactive to active\n", range->id()); |
| 2117 } | 2322 } |
| 2118 | 2323 |
| 2119 | 2324 |
| 2120 bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) { | 2325 RegisterAllocatorGreedy::RegisterAllocatorGreedy( |
| 2326 const RegisterConfiguration* config, Zone* local_zone, Frame* frame, | |
| 2327 InstructionSequence* code, const char* debug_name) | |
| 2328 : RegisterAllocator(config, local_zone, frame, code, debug_name), | |
| 2329 allocations_(local_zone), | |
| 2330 queue_(local_zone) {} | |
| 2331 | |
| 2332 | |
| 2333 void RegisterAllocatorGreedy::AllocateGeneralRegisters() { | |
| 2334 AllocateRegisters(RegisterKind::GENERAL_REGISTERS); | |
| 2335 } | |
| 2336 | |
| 2337 | |
| 2338 void RegisterAllocatorGreedy::AllocateDoubleRegisters() { | |
| 2339 AllocateRegisters(RegisterKind::DOUBLE_REGISTERS); | |
| 2340 } | |
| 2341 | |
| 2342 | |
| 2343 void RegisterAllocatorGreedy::AssignRangeToRegister(unsigned reg_id, | |
| 2344 LiveRange* range) { | |
| 2345 allocations_[reg_id]->Insert(range); | |
| 2346 if (range->HasRegisterAssigned()) { | |
| 2347 DCHECK((unsigned)(range->assigned_register()) == reg_id); | |
|
dcarney
2015/04/18 19:01:33
chromium style guide dictates that you must use st
Mircea Trofin
2015/04/19 04:02:04
Done.
| |
| 2348 return; | |
| 2349 } | |
| 2350 range->set_assigned_register(reg_id); | |
| 2351 } | |
| 2352 | |
| 2353 | |
| 2354 float RegisterAllocatorGreedy::CalculateSpillWeight(LiveRange* range) { | |
| 2355 if (range->IsFixed()) return std::numeric_limits<float>::max(); | |
| 2356 | |
| 2357 if (range->FirstHint() && range->FirstHint()->IsRegister()) | |
|
dcarney
2015/04/18 19:01:33
need braces on multiline if
Mircea Trofin
2015/04/19 04:02:04
Done.
| |
| 2358 return std::numeric_limits<float>::max(); | |
| 2359 | |
| 2360 unsigned useCount = 0; | |
| 2361 auto* pos = range->first_pos(); | |
| 2362 while (pos != nullptr) { | |
| 2363 useCount++; | |
| 2364 pos = pos->next(); | |
| 2365 } | |
| 2366 | |
| 2367 // GetLiveRangeSize is DCHECK-ed to not be 0 | |
| 2368 return static_cast<float>(useCount) / | |
| 2369 static_cast<float>(GetLiveRangeSize(range)); | |
| 2370 } | |
| 2371 | |
| 2372 | |
| 2373 float RegisterAllocatorGreedy::CalculateMaxSpillWeight( | |
| 2374 const ZoneSet<LiveRange*>& ranges) { | |
| 2375 float max = 0.0; | |
| 2376 for (auto* r : ranges) { | |
| 2377 float localMax = CalculateSpillWeight(r); | |
|
dcarney
2015/04/18 19:01:33
max = std::max(max, CalculateSpillWeight(r)
Mircea Trofin
2015/04/19 04:02:04
Done.
| |
| 2378 if (localMax > max) max = localMax; | |
| 2379 } | |
| 2380 return max; | |
| 2381 } | |
| 2382 | |
| 2383 | |
| 2384 void RegisterAllocatorGreedy::Evict(LiveRange* range) { | |
| 2385 allocations_[range->assigned_register()]->Remove(range); | |
| 2386 } | |
| 2387 | |
| 2388 | |
| 2389 bool RegisterAllocatorGreedy::TryAllocatePhysicalRegister( | |
| 2390 unsigned reg_id, LiveRange* range, ZoneSet<LiveRange*>* conflicting) { | |
| 2391 auto* segment = range->first_interval(); | |
| 2392 | |
| 2393 RegisterAllocationInfo* allocInfo = allocations_[reg_id]; | |
| 2394 while (segment) { | |
| 2395 LiveRange* existing; | |
| 2396 if (allocInfo->Find(segment, &existing)) { | |
| 2397 DCHECK(existing->HasRegisterAssigned()); | |
| 2398 conflicting->insert(existing); | |
| 2399 } | |
| 2400 segment = segment->next(); | |
| 2401 } | |
| 2402 if (!conflicting->empty()) return false; | |
| 2403 // good to insert | |
|
dcarney
2015/04/18 19:01:33
comments in v8 typically start with a capital and
Mircea Trofin
2015/04/19 04:02:04
Done.
| |
| 2404 AssignRangeToRegister(reg_id, range); | |
| 2405 return true; | |
| 2406 } | |
| 2407 | |
| 2408 bool RegisterAllocatorGreedy::TryAllocate(LiveRange* current, | |
| 2409 ZoneSet<LiveRange*>* conflicting) { | |
| 2410 if (current->HasSpillOperand()) { | |
| 2411 Spill(current); | |
| 2412 return true; | |
| 2413 } | |
| 2414 if (current->IsFixed()) { | |
| 2415 return TryAllocatePhysicalRegister(current->assigned_register(), current, | |
| 2416 conflicting); | |
| 2417 } | |
| 2418 | |
| 2419 if (current->HasRegisterAssigned()) { | |
| 2420 int reg_id = current->assigned_register(); | |
| 2421 return TryAllocatePhysicalRegister(reg_id, current, conflicting); | |
| 2422 } | |
| 2423 | |
| 2424 for (unsigned candidate_reg = 0; candidate_reg < allocations_.size(); | |
| 2425 candidate_reg++) { | |
| 2426 if (TryAllocatePhysicalRegister(candidate_reg, current, conflicting)) { | |
| 2427 conflicting->clear(); | |
| 2428 return true; | |
| 2429 } | |
| 2430 } | |
| 2431 return false; | |
| 2432 } | |
| 2433 | |
| 2434 | |
| 2435 bool RegisterAllocatorLinear::TryAllocateFreeReg(LiveRange* current) { | |
| 2121 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2436 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
| 2122 | 2437 |
| 2123 for (int i = 0; i < num_registers_; i++) { | 2438 for (int i = 0; i < num_registers_; i++) { |
| 2124 free_until_pos[i] = LifetimePosition::MaxPosition(); | 2439 free_until_pos[i] = LifetimePosition::MaxPosition(); |
| 2125 } | 2440 } |
| 2126 | 2441 |
| 2127 for (auto cur_active : active_live_ranges()) { | 2442 for (auto cur_active : active_live_ranges()) { |
| 2128 free_until_pos[cur_active->assigned_register()] = | 2443 free_until_pos[cur_active->assigned_register()] = |
| 2129 LifetimePosition::GapFromInstructionIndex(0); | 2444 LifetimePosition::GapFromInstructionIndex(0); |
| 2130 } | 2445 } |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2179 // the range end. | 2494 // the range end. |
| 2180 DCHECK(pos.Value() >= current->End().Value()); | 2495 DCHECK(pos.Value() >= current->End().Value()); |
| 2181 TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg), | 2496 TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg), |
| 2182 current->id()); | 2497 current->id()); |
| 2183 SetLiveRangeAssignedRegister(current, reg); | 2498 SetLiveRangeAssignedRegister(current, reg); |
| 2184 | 2499 |
| 2185 return true; | 2500 return true; |
| 2186 } | 2501 } |
| 2187 | 2502 |
| 2188 | 2503 |
| 2189 void RegisterAllocator::AllocateBlockedReg(LiveRange* current) { | 2504 void RegisterAllocatorLinear::AllocateBlockedReg(LiveRange* current) { |
| 2190 auto register_use = current->NextRegisterPosition(current->Start()); | 2505 auto register_use = current->NextRegisterPosition(current->Start()); |
| 2191 if (register_use == nullptr) { | 2506 if (register_use == nullptr) { |
| 2192 // There is no use in the current live range that requires a register. | 2507 // There is no use in the current live range that requires a register. |
| 2193 // We can just spill it. | 2508 // We can just spill it. |
| 2194 Spill(current); | 2509 Spill(current); |
| 2195 return; | 2510 return; |
| 2196 } | 2511 } |
| 2197 | 2512 |
| 2198 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2513 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
| 2199 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2514 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2269 | 2584 |
| 2270 | 2585 |
| 2271 static const InstructionBlock* GetContainingLoop( | 2586 static const InstructionBlock* GetContainingLoop( |
| 2272 const InstructionSequence* sequence, const InstructionBlock* block) { | 2587 const InstructionSequence* sequence, const InstructionBlock* block) { |
| 2273 auto index = block->loop_header(); | 2588 auto index = block->loop_header(); |
| 2274 if (!index.IsValid()) return nullptr; | 2589 if (!index.IsValid()) return nullptr; |
| 2275 return sequence->InstructionBlockAt(index); | 2590 return sequence->InstructionBlockAt(index); |
| 2276 } | 2591 } |
| 2277 | 2592 |
| 2278 | 2593 |
| 2279 LifetimePosition RegisterAllocator::FindOptimalSpillingPos( | 2594 LifetimePosition RegisterAllocatorLinear::FindOptimalSpillingPos( |
| 2280 LiveRange* range, LifetimePosition pos) { | 2595 LiveRange* range, LifetimePosition pos) { |
| 2281 auto block = GetInstructionBlock(pos.Start()); | 2596 auto block = GetInstructionBlock(pos.Start()); |
| 2282 auto loop_header = | 2597 auto loop_header = |
| 2283 block->IsLoopHeader() ? block : GetContainingLoop(code(), block); | 2598 block->IsLoopHeader() ? block : GetContainingLoop(code(), block); |
| 2284 | 2599 |
| 2285 if (loop_header == nullptr) return pos; | 2600 if (loop_header == nullptr) return pos; |
| 2286 | 2601 |
| 2287 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); | 2602 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); |
| 2288 | 2603 |
| 2289 while (loop_header != nullptr) { | 2604 while (loop_header != nullptr) { |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 2301 } | 2616 } |
| 2302 | 2617 |
| 2303 // Try hoisting out to an outer loop. | 2618 // Try hoisting out to an outer loop. |
| 2304 loop_header = GetContainingLoop(code(), loop_header); | 2619 loop_header = GetContainingLoop(code(), loop_header); |
| 2305 } | 2620 } |
| 2306 | 2621 |
| 2307 return pos; | 2622 return pos; |
| 2308 } | 2623 } |
| 2309 | 2624 |
| 2310 | 2625 |
| 2311 void RegisterAllocator::SplitAndSpillIntersecting(LiveRange* current) { | 2626 void RegisterAllocatorLinear::SplitAndSpillIntersecting(LiveRange* current) { |
| 2312 DCHECK(current->HasRegisterAssigned()); | 2627 DCHECK(current->HasRegisterAssigned()); |
| 2313 int reg = current->assigned_register(); | 2628 int reg = current->assigned_register(); |
| 2314 auto split_pos = current->Start(); | 2629 auto split_pos = current->Start(); |
| 2315 for (size_t i = 0; i < active_live_ranges().size(); ++i) { | 2630 for (size_t i = 0; i < active_live_ranges().size(); ++i) { |
| 2316 auto range = active_live_ranges()[i]; | 2631 auto range = active_live_ranges()[i]; |
| 2317 if (range->assigned_register() == reg) { | 2632 if (range->assigned_register() == reg) { |
| 2318 auto next_pos = range->NextRegisterPosition(current->Start()); | 2633 auto next_pos = range->NextRegisterPosition(current->Start()); |
| 2319 auto spill_pos = FindOptimalSpillingPos(range, split_pos); | 2634 auto spill_pos = FindOptimalSpillingPos(range, split_pos); |
| 2320 if (next_pos == nullptr) { | 2635 if (next_pos == nullptr) { |
| 2321 SpillAfter(range, spill_pos); | 2636 SpillAfter(range, spill_pos); |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2425 | 2740 |
| 2426 // We did not find any suitable outer loop. Split at the latest possible | 2741 // We did not find any suitable outer loop. Split at the latest possible |
| 2427 // position unless end_block is a loop header itself. | 2742 // position unless end_block is a loop header itself. |
| 2428 if (block == end_block && !end_block->IsLoopHeader()) return end; | 2743 if (block == end_block && !end_block->IsLoopHeader()) return end; |
| 2429 | 2744 |
| 2430 return LifetimePosition::GapFromInstructionIndex( | 2745 return LifetimePosition::GapFromInstructionIndex( |
| 2431 block->first_instruction_index()); | 2746 block->first_instruction_index()); |
| 2432 } | 2747 } |
| 2433 | 2748 |
| 2434 | 2749 |
| 2435 void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { | 2750 void RegisterAllocatorLinear::SpillAfter(LiveRange* range, |
| 2751 LifetimePosition pos) { | |
| 2436 auto second_part = SplitRangeAt(range, pos); | 2752 auto second_part = SplitRangeAt(range, pos); |
| 2437 Spill(second_part); | 2753 Spill(second_part); |
| 2438 } | 2754 } |
| 2439 | 2755 |
| 2440 | 2756 |
| 2441 void RegisterAllocator::SpillBetween(LiveRange* range, LifetimePosition start, | 2757 void RegisterAllocatorLinear::SpillBetween(LiveRange* range, |
| 2442 LifetimePosition end) { | 2758 LifetimePosition start, |
| 2759 LifetimePosition end) { | |
| 2443 SpillBetweenUntil(range, start, start, end); | 2760 SpillBetweenUntil(range, start, start, end); |
| 2444 } | 2761 } |
| 2445 | 2762 |
| 2446 | 2763 |
| 2447 void RegisterAllocator::SpillBetweenUntil(LiveRange* range, | 2764 void RegisterAllocatorLinear::SpillBetweenUntil(LiveRange* range, |
| 2448 LifetimePosition start, | 2765 LifetimePosition start, |
| 2449 LifetimePosition until, | 2766 LifetimePosition until, |
| 2450 LifetimePosition end) { | 2767 LifetimePosition end) { |
| 2451 CHECK(start.Value() < end.Value()); | 2768 CHECK(start.Value() < end.Value()); |
| 2452 auto second_part = SplitRangeAt(range, start); | 2769 auto second_part = SplitRangeAt(range, start); |
| 2453 | 2770 |
| 2454 if (second_part->Start().Value() < end.Value()) { | 2771 if (second_part->Start().Value() < end.Value()) { |
| 2455 // The split result intersects with [start, end[. | 2772 // The split result intersects with [start, end[. |
| 2456 // Split it at position between ]start+1, end[, spill the middle part | 2773 // Split it at position between ]start+1, end[, spill the middle part |
| 2457 // and put the rest to unhandled. | 2774 // and put the rest to unhandled. |
| 2458 auto third_part_end = end.PrevStart().End(); | 2775 auto third_part_end = end.PrevStart().End(); |
| 2459 if (IsBlockBoundary(end.Start())) { | 2776 if (IsBlockBoundary(end.Start())) { |
| 2460 third_part_end = end.Start(); | 2777 third_part_end = end.Start(); |
| 2461 } | 2778 } |
| 2462 auto third_part = SplitBetween( | 2779 auto third_part = SplitBetween( |
| 2463 second_part, Max(second_part->Start().End(), until), third_part_end); | 2780 second_part, Max(second_part->Start().End(), until), third_part_end); |
| 2464 | 2781 |
| 2465 DCHECK(third_part != second_part); | 2782 DCHECK(third_part != second_part); |
| 2466 | 2783 |
| 2467 Spill(second_part); | 2784 Spill(second_part); |
| 2468 AddToUnhandledSorted(third_part); | 2785 AddToUnhandledSorted(third_part); |
| 2469 } else { | 2786 } else { |
| 2470 // The split result does not intersect with [start, end[. | 2787 // The split result does not intersect with [start, end[. |
| 2471 // Nothing to spill. Just put it to unhandled as whole. | 2788 // Nothing to spill. Just put it to unhandled as whole. |
| 2472 AddToUnhandledSorted(second_part); | 2789 AddToUnhandledSorted(second_part); |
| 2473 } | 2790 } |
| 2474 } | 2791 } |
| 2475 | 2792 |
| 2793 LiveRange* RegisterAllocatorGreedy::SpillBetweenUntil(LiveRange* range, | |
| 2794 LifetimePosition start, | |
| 2795 LifetimePosition until, | |
| 2796 LifetimePosition end) { | |
| 2797 CHECK(start.Value() < end.Value()); | |
| 2798 auto second_part = SplitRangeAt(range, start); | |
| 2799 | |
| 2800 if (second_part->Start().Value() < end.Value()) { | |
| 2801 // The split result intersects with [start, end[. | |
| 2802 // Split it at position between ]start+1, end[, spill the middle part | |
| 2803 // and put the rest to unhandled. | |
| 2804 auto third_part_end = end.PrevStart().End(); | |
| 2805 if (IsBlockBoundary(end.Start())) { | |
| 2806 third_part_end = end.Start(); | |
| 2807 } | |
| 2808 auto third_part = SplitBetween( | |
| 2809 second_part, Max(second_part->Start().End(), until), third_part_end); | |
| 2810 | |
| 2811 DCHECK(third_part != second_part); | |
| 2812 | |
| 2813 Spill(second_part); | |
| 2814 return third_part; | |
| 2815 } else { | |
| 2816 // The split result does not intersect with [start, end[. | |
| 2817 // Nothing to spill. Just put it to unhandled as whole. | |
| 2818 return second_part; | |
| 2819 } | |
| 2820 } | |
| 2821 | |
| 2476 | 2822 |
| 2477 void RegisterAllocator::Spill(LiveRange* range) { | 2823 void RegisterAllocator::Spill(LiveRange* range) { |
| 2478 DCHECK(!range->IsSpilled()); | 2824 DCHECK(!range->IsSpilled()); |
| 2479 TRACE("Spilling live range %d\n", range->id()); | 2825 TRACE("Spilling live range %d\n", range->id()); |
| 2480 auto first = range->TopLevel(); | 2826 auto first = range->TopLevel(); |
| 2481 if (first->HasNoSpillType()) { | 2827 if (first->HasNoSpillType()) { |
| 2482 AssignSpillRangeToLiveRange(first); | 2828 AssignSpillRangeToLiveRange(first); |
| 2483 } | 2829 } |
| 2484 range->MakeSpilled(); | 2830 range->MakeSpilled(); |
| 2485 } | 2831 } |
| 2486 | 2832 |
| 2487 | 2833 |
| 2488 int RegisterAllocator::RegisterCount() const { return num_registers_; } | 2834 int RegisterAllocatorLinear::RegisterCount() const { return num_registers_; } |
| 2489 | 2835 |
| 2490 | 2836 |
| 2491 #ifdef DEBUG | 2837 #ifdef DEBUG |
| 2492 | 2838 |
| 2493 | 2839 |
| 2494 void RegisterAllocator::Verify() const { | 2840 void RegisterAllocatorLinear::Verify() const { |
| 2495 for (auto current : live_ranges()) { | 2841 for (auto current : live_ranges()) { |
| 2496 if (current != nullptr) current->Verify(); | 2842 if (current != nullptr) current->Verify(); |
| 2497 } | 2843 } |
| 2498 } | 2844 } |
| 2499 | 2845 |
| 2500 | 2846 |
| 2501 #endif | 2847 #endif |
| 2502 | 2848 |
| 2503 | 2849 |
| 2504 void RegisterAllocator::SetLiveRangeAssignedRegister(LiveRange* range, | 2850 void RegisterAllocator::SetLiveRangeAssignedRegister(LiveRange* range, |
| 2505 int reg) { | 2851 int reg) { |
| 2506 if (range->Kind() == DOUBLE_REGISTERS) { | 2852 if (range->Kind() == DOUBLE_REGISTERS) { |
| 2507 assigned_double_registers_->Add(reg); | 2853 assigned_double_registers_->Add(reg); |
| 2508 } else { | 2854 } else { |
| 2509 DCHECK(range->Kind() == GENERAL_REGISTERS); | 2855 DCHECK(range->Kind() == GENERAL_REGISTERS); |
| 2510 assigned_registers_->Add(reg); | 2856 assigned_registers_->Add(reg); |
| 2511 } | 2857 } |
| 2512 range->set_assigned_register(reg); | 2858 range->set_assigned_register(reg); |
| 2513 auto assignment = range->GetAssignedOperand(); | 2859 auto assignment = range->GetAssignedOperand(); |
| 2514 // TODO(dcarney): stop aliasing hint operands. | 2860 // TODO(dcarney): stop aliasing hint operands. |
| 2515 range->ConvertUsesToOperand(assignment, nullptr); | 2861 range->ConvertUsesToOperand(assignment, nullptr); |
| 2516 if (range->is_phi()) AssignPhiInput(range, assignment); | 2862 if (range->is_phi()) AssignPhiInput(range, assignment); |
| 2517 } | 2863 } |
| 2518 | 2864 |
| 2519 } // namespace compiler | 2865 } // namespace compiler |
| 2520 } // namespace internal | 2866 } // namespace internal |
| 2521 } // namespace v8 | 2867 } // namespace v8 |
| OLD | NEW |