Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(50)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 1061923005: Reland: Introducing the LLVM greedy register allocator. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/register-allocator.h ('k') | src/flag-definitions.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/linkage.h" 5 #include "src/compiler/linkage.h"
6 #include "src/compiler/register-allocator.h" 6 #include "src/compiler/register-allocator.h"
7 #include "src/string-stream.h" 7 #include "src/string-stream.h"
8 8
9 namespace v8 { 9 namespace v8 {
10 namespace internal { 10 namespace internal {
11 namespace compiler { 11 namespace compiler {
12 12
13
13 #define TRACE(...) \ 14 #define TRACE(...) \
14 do { \ 15 do { \
15 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ 16 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
16 } while (false) 17 } while (false)
17 18
19
20 class RegisterAllocationInfo : public ZoneObject {
dcarney 2015/04/19 12:21:01 this is a pretty generic name, to be avoided if po
Mircea Trofin 2015/04/19 16:13:38 I share the sentiment :) I'll try and find a bette
21 public:
22 explicit RegisterAllocationInfo(Zone* zone) : storage_(zone) {}
23
24 bool Find(UseInterval* query, LiveRange** result) {
dcarney 2015/04/19 12:21:01 what about just returning a live range that could
Mircea Trofin 2015/04/19 16:13:38 I agree - changed it.
25 ZoneSplayTree<Config>::Locator locator;
26 bool ret = storage_.Find(GetKey(query), &locator);
27 if (ret) {
28 *result = locator.value();
29 }
30 return ret;
31 }
32
33 bool Insert(LiveRange* range) {
34 auto* interval = range->first_interval();
35 while (interval != nullptr) {
36 if (!Insert(interval, range)) return false;
dcarney 2015/04/19 12:21:01 if i'm reading the rest of this cl correctly, this
Mircea Trofin 2015/04/19 16:13:38 SplayTree.Insert returns false if we already inser
37 interval = interval->next();
38 }
39 return true;
40 }
41
42 bool Remove(UseInterval* key) { return storage_.Remove(GetKey(key)); }
dcarney 2015/04/19 12:21:01 this should probably be private since i can't imag
Mircea Trofin 2015/04/19 16:13:38 Done.
43
44 bool Remove(LiveRange* range) {
45 bool ret = false;
46 auto* segment = range->first_interval();
47 while (segment != nullptr) {
48 ret |= Remove(segment);
dcarney 2015/04/19 12:21:01 again, the intervals must all be in there somewher
Mircea Trofin 2015/04/19 16:13:38 I added that logic in the uses of the API, to avoi
49 segment = segment->next();
50 }
51 return ret;
52 }
53
54 bool IsEmpty() { return storage_.is_empty(); }
55
56 private:
57 struct Config {
58 typedef std::pair<int, int> Key;
59 typedef LiveRange* Value;
60 static const Key kNoKey;
61 static Value NoValue() { return nullptr; }
62 static int Compare(const Key& a, const Key& b) {
63 if (a.second <= b.first) return -1;
64 if (a.first >= b.second) return 1;
65 return 0;
66 }
67 };
68
69 Config::Key GetKey(UseInterval* interval) {
70 if (!interval) return std::make_pair(0, 0);
dcarney 2015/04/19 12:21:01 interval != nullptr
71 return std::make_pair(interval->start().Value(), interval->end().Value());
72 }
73 bool Insert(UseInterval* interval, LiveRange* range) {
74 ZoneSplayTree<Config>::Locator locator;
75 bool ret = storage_.Insert(GetKey(interval), &locator);
dcarney 2015/04/19 12:21:01 here you should be able to DCHEK ret as well, no?
Mircea Trofin 2015/04/19 16:13:38 No, it just means we'd already inserted that. This
76 if (ret) locator.set_value(range);
77 return ret;
78 }
79
80 ZoneSplayTree<Config> storage_;
81 DISALLOW_COPY_AND_ASSIGN(RegisterAllocationInfo);
82 };
83
84
85 const std::pair<int, int> RegisterAllocationInfo::Config::kNoKey =
86 std::make_pair<int, int>(0, 0);
87
88
18 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { 89 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
19 return a.Value() < b.Value() ? a : b; 90 return a.Value() < b.Value() ? a : b;
20 } 91 }
21 92
22 93
23 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { 94 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
24 return a.Value() > b.Value() ? a : b; 95 return a.Value() > b.Value() ? a : b;
25 } 96 }
26 97
27 98
(...skipping 546 matching lines...) Expand 10 before | Expand all | Expand 10 after
574 RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, 645 RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config,
575 Zone* zone, Frame* frame, 646 Zone* zone, Frame* frame,
576 InstructionSequence* code, 647 InstructionSequence* code,
577 const char* debug_name) 648 const char* debug_name)
578 : local_zone_(zone), 649 : local_zone_(zone),
579 frame_(frame), 650 frame_(frame),
580 code_(code), 651 code_(code),
581 debug_name_(debug_name), 652 debug_name_(debug_name),
582 config_(config), 653 config_(config),
583 phi_map_(local_zone()), 654 phi_map_(local_zone()),
584 live_in_sets_(code->InstructionBlockCount(), nullptr, local_zone()),
585 live_ranges_(code->VirtualRegisterCount() * 2, nullptr, local_zone()), 655 live_ranges_(code->VirtualRegisterCount() * 2, nullptr, local_zone()),
586 fixed_live_ranges_(this->config()->num_general_registers(), nullptr, 656 fixed_live_ranges_(this->config()->num_general_registers(), nullptr,
587 local_zone()), 657 local_zone()),
588 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr, 658 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr,
589 local_zone()), 659 local_zone()),
660 spill_ranges_(local_zone()),
661 live_in_sets_(code->InstructionBlockCount(), nullptr, local_zone()) {
662 spill_ranges().reserve(8);
663 assigned_registers_ =
664 new (code_zone()) BitVector(config->num_general_registers(), code_zone());
665 assigned_double_registers_ = new (code_zone())
666 BitVector(config->num_aliased_double_registers(), code_zone());
667 frame->SetAllocatedRegisters(assigned_registers_);
668 frame->SetAllocatedDoubleRegisters(assigned_double_registers_);
669 }
670
671 LinearScanAllocator::LinearScanAllocator(const RegisterConfiguration* config,
672 Zone* zone, Frame* frame,
673 InstructionSequence* code,
674 const char* debug_name)
675 : RegisterAllocator(config, zone, frame, code, debug_name),
590 unhandled_live_ranges_(local_zone()), 676 unhandled_live_ranges_(local_zone()),
591 active_live_ranges_(local_zone()), 677 active_live_ranges_(local_zone()),
592 inactive_live_ranges_(local_zone()), 678 inactive_live_ranges_(local_zone()),
593 spill_ranges_(local_zone()),
594 mode_(UNALLOCATED_REGISTERS), 679 mode_(UNALLOCATED_REGISTERS),
595 num_registers_(-1) { 680 num_registers_(-1) {
596 DCHECK(this->config()->num_general_registers() <= 681 DCHECK(this->config()->num_general_registers() <=
597 RegisterConfiguration::kMaxGeneralRegisters); 682 RegisterConfiguration::kMaxGeneralRegisters);
598 DCHECK(this->config()->num_double_registers() <= 683 DCHECK(this->config()->num_double_registers() <=
599 RegisterConfiguration::kMaxDoubleRegisters); 684 RegisterConfiguration::kMaxDoubleRegisters);
600 // TryAllocateFreeReg and AllocateBlockedReg assume this 685 // TryAllocateFreeReg and AllocateBlockedReg assume this
601 // when allocating local arrays. 686 // when allocating local arrays.
602 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= 687 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >=
603 this->config()->num_general_registers()); 688 this->config()->num_general_registers());
604 unhandled_live_ranges().reserve( 689 unhandled_live_ranges().reserve(
605 static_cast<size_t>(code->VirtualRegisterCount() * 2)); 690 static_cast<size_t>(code->VirtualRegisterCount() * 2));
606 active_live_ranges().reserve(8); 691 active_live_ranges().reserve(8);
607 inactive_live_ranges().reserve(8); 692 inactive_live_ranges().reserve(8);
608 spill_ranges().reserve(8);
609 assigned_registers_ =
610 new (code_zone()) BitVector(config->num_general_registers(), code_zone());
611 assigned_double_registers_ = new (code_zone())
612 BitVector(config->num_aliased_double_registers(), code_zone());
613 frame->SetAllocatedRegisters(assigned_registers_);
614 frame->SetAllocatedDoubleRegisters(assigned_double_registers_);
615 } 693 }
616 694
617 695
618 BitVector* RegisterAllocator::ComputeLiveOut(const InstructionBlock* block) { 696 BitVector* RegisterAllocator::ComputeLiveOut(const InstructionBlock* block) {
619 // Compute live out for the given block, except not including backward 697 // Compute live out for the given block, except not including backward
620 // successor edges. 698 // successor edges.
621 auto live_out = new (local_zone()) 699 auto live_out = new (local_zone())
622 BitVector(code()->VirtualRegisterCount(), local_zone()); 700 BitVector(code()->VirtualRegisterCount(), local_zone());
623 701
624 // Process all successor blocks. 702 // Process all successor blocks.
(...skipping 341 matching lines...) Expand 10 before | Expand all | Expand 10 after
966 } 1044 }
967 1045
968 1046
969 SpillRange* RegisterAllocator::AssignSpillRangeToLiveRange(LiveRange* range) { 1047 SpillRange* RegisterAllocator::AssignSpillRangeToLiveRange(LiveRange* range) {
970 auto spill_range = new (local_zone()) SpillRange(range, local_zone()); 1048 auto spill_range = new (local_zone()) SpillRange(range, local_zone());
971 spill_ranges().push_back(spill_range); 1049 spill_ranges().push_back(spill_range);
972 return spill_range; 1050 return spill_range;
973 } 1051 }
974 1052
975 1053
976 bool RegisterAllocator::TryReuseSpillForPhi(LiveRange* range) { 1054 bool LinearScanAllocator::TryReuseSpillForPhi(LiveRange* range) {
977 if (range->IsChild() || !range->is_phi()) return false; 1055 if (range->IsChild() || !range->is_phi()) return false;
978 DCHECK(!range->HasSpillOperand()); 1056 DCHECK(!range->HasSpillOperand());
979 1057
980 auto lookup = phi_map_.find(range->id()); 1058 auto lookup = phi_map().find(range->id());
981 DCHECK(lookup != phi_map_.end()); 1059 DCHECK(lookup != phi_map().end());
982 auto phi = lookup->second->phi; 1060 auto phi = lookup->second->phi;
983 auto block = lookup->second->block; 1061 auto block = lookup->second->block;
984 // Count the number of spilled operands. 1062 // Count the number of spilled operands.
985 size_t spilled_count = 0; 1063 size_t spilled_count = 0;
986 LiveRange* first_op = nullptr; 1064 LiveRange* first_op = nullptr;
987 for (size_t i = 0; i < phi->operands().size(); i++) { 1065 for (size_t i = 0; i < phi->operands().size(); i++) {
988 int op = phi->operands()[i]; 1066 int op = phi->operands()[i];
989 LiveRange* op_range = LiveRangeFor(op); 1067 LiveRange* op_range = LiveRangeFor(op);
990 if (op_range->GetSpillRange() == nullptr) continue; 1068 if (op_range->GetSpillRange() == nullptr) continue;
991 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); 1069 auto pred = code()->InstructionBlockAt(block->predecessors()[i]);
(...skipping 806 matching lines...) Expand 10 before | Expand all | Expand 10 after
1798 bool RegisterAllocator::SafePointsAreInOrder() const { 1876 bool RegisterAllocator::SafePointsAreInOrder() const {
1799 int safe_point = 0; 1877 int safe_point = 0;
1800 for (auto map : *code()->reference_maps()) { 1878 for (auto map : *code()->reference_maps()) {
1801 if (safe_point > map->instruction_position()) return false; 1879 if (safe_point > map->instruction_position()) return false;
1802 safe_point = map->instruction_position(); 1880 safe_point = map->instruction_position();
1803 } 1881 }
1804 return true; 1882 return true;
1805 } 1883 }
1806 1884
1807 1885
1886 unsigned GreedyAllocator::GetLiveRangeSize(LiveRange* range) {
1887 auto interval = range->first_interval();
1888 if (interval == nullptr) return 0;
1889
1890 unsigned size = 0;
1891 while (interval != nullptr) {
1892 size += (interval->end().Value() - interval->start().Value());
1893 interval = interval->next();
1894 }
1895
1896 DCHECK(size);
dcarney 2015/04/19 12:21:01 size != 0
Mircea Trofin 2015/04/19 16:13:38 not DCHECK_NE(0, size)? I went with this, in the s
dcarney 2015/04/19 18:56:54 yeah, DCHECK_NE is better if it compiles. for siz
1897 return size;
1898 }
1899
1900
1901 void GreedyAllocator::Enqueue(LiveRange* range) {
1902 if (!range || range->IsEmpty()) return;
dcarney 2015/04/19 12:21:01 range == nullptr
Mircea Trofin 2015/04/19 16:13:38 Done.
1903 unsigned size = GetLiveRangeSize(range);
1904 queue_.push(std::make_pair(size, range));
1905 }
1906
1907
1908 // TODO(mtrofin): consolidate with identical code segment in
1909 // LinearScanAllocator::AllocateRegisters
1910 bool GreedyAllocator::HandleSpillOperands(LiveRange* range) {
1911 auto position = range->Start();
1912 TRACE("Processing interval %d start=%d\n", range->id(), position.Value());
1913
1914 if (!range->HasNoSpillType()) {
1915 TRACE("Live range %d already has a spill operand\n", range->id());
1916 auto next_pos = position;
1917 if (next_pos.IsGapPosition()) {
1918 next_pos = next_pos.NextStart();
1919 }
1920 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
1921 // If the range already has a spill operand and it doesn't need a
1922 // register immediately, split it and spill the first part of the range.
1923 if (pos == nullptr) {
1924 Spill(range);
1925 return true;
1926 } else if (pos->pos().Value() > range->Start().NextStart().Value()) {
1927 // Do not spill live range eagerly if use position that can benefit from
1928 // the register is too close to the start of live range.
1929 auto* reminder = SpillBetweenUntil(range, position, position, pos->pos());
1930 Enqueue(reminder);
1931 return true;
1932 }
1933 }
1934 return false;
1935 // TODO(mtrofin): Do we need this?
1936 // return (TryReuseSpillForPhi(range));
1937 }
1938
1939
1940 void GreedyAllocator::AllocateRegisters(RegisterKind mode) {
1941 // TODO(mtrofin): support for double registers
1942 DCHECK_EQ(GENERAL_REGISTERS, mode);
1943
1944 for (auto range : live_ranges()) {
1945 if (range == nullptr) continue;
1946 if (range->Kind() == mode) {
1947 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
1948 TRACE("Enqueueing live range %d to priority queue \n", range->id());
1949 Enqueue(range);
1950 }
1951 }
1952
1953 int reg_count = mode == GENERAL_REGISTERS ? config()->num_general_registers()
dcarney 2015/04/19 12:21:01 maybe you want to use num_registers_ like in the l
Mircea Trofin 2015/04/19 16:13:38 I am a bit uneasy with the existing pattern, actua
dcarney 2015/04/19 18:56:54 the current design splits the allocation of double
1954 : config()->num_double_registers();
1955
1956 allocations_.resize(reg_count);
1957 for (int i = 0; i < reg_count; i++) {
1958 allocations_[i] = new (local_zone()) RegisterAllocationInfo(local_zone());
1959 }
1960
1961 for (auto* current : fixed_live_ranges()) {
dcarney 2015/04/19 12:21:01 i think if you just switch between fixed_live_rang
Mircea Trofin 2015/04/19 16:13:38 Done - something along those lines.
1962 if (current != nullptr) {
1963 int reg_nr = current->assigned_register();
1964 bool inserted = allocations_[reg_nr]->Insert(current);
1965 CHECK(inserted);
1966 }
1967 }
1968
1969 while (!queue_.empty()) {
1970 auto current_pair = queue_.top();
1971 queue_.pop();
1972 auto current = current_pair.second;
1973 if (HandleSpillOperands(current)) continue;
1974 ZoneSet<LiveRange*> conflicting(local_zone());
1975 if (!TryAllocate(current, &conflicting)) {
1976 DCHECK(conflicting.size());
1977 float this_max = CalculateSpillWeight(current);
1978 float max_conflicting = CalculateMaxSpillWeight(conflicting);
1979 if (max_conflicting < this_max) {
1980 for (auto* conflict : conflicting) {
1981 Evict(conflict);
1982 Enqueue(conflict);
1983 }
1984 conflicting.clear();
1985 bool allocated = TryAllocate(current, &conflicting);
1986 CHECK(allocated);
1987 DCHECK_EQ(0, conflicting.size());
1988 } else {
1989 DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start()));
1990 bool allocated = AllocateBlockedRange(current, conflicting);
1991 CHECK(allocated);
1992 }
1993 }
1994 }
1995 for (size_t i = 0; i < allocations_.size(); ++i) {
1996 if (!allocations_[i]->IsEmpty()) {
1997 assigned_registers()->Add(i);
1998 }
1999 }
2000 }
2001
2002
2003 bool GreedyAllocator::AllocateBlockedRange(
2004 LiveRange* current, const ZoneSet<LiveRange*>& conflicts) {
2005 auto register_use = current->NextRegisterPosition(current->Start());
2006 if (register_use == nullptr) {
2007 // There is no use in the current live range that requires a register.
2008 // We can just spill it.
2009 Spill(current);
2010 return true;
2011 }
2012
2013 auto second_part = SplitRangeAt(current, register_use->pos());
2014 Spill(second_part);
2015
2016 return true;
2017 }
2018
2019
1808 void RegisterAllocator::PopulateReferenceMaps() { 2020 void RegisterAllocator::PopulateReferenceMaps() {
1809 DCHECK(SafePointsAreInOrder()); 2021 DCHECK(SafePointsAreInOrder());
1810 2022
1811 // Iterate over all safe point positions and record a pointer 2023 // Iterate over all safe point positions and record a pointer
1812 // for all spilled live ranges at this point. 2024 // for all spilled live ranges at this point.
1813 int last_range_start = 0; 2025 int last_range_start = 0;
1814 auto reference_maps = code()->reference_maps(); 2026 auto reference_maps = code()->reference_maps();
1815 ReferenceMapDeque::const_iterator first_it = reference_maps->begin(); 2027 ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
1816 for (LiveRange* range : live_ranges()) { 2028 for (LiveRange* range : live_ranges()) {
1817 if (range == nullptr) continue; 2029 if (range == nullptr) continue;
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
1879 cur->id(), cur->Start().Value(), safe_point); 2091 cur->id(), cur->Start().Value(), safe_point);
1880 auto operand = cur->GetAssignedOperand(); 2092 auto operand = cur->GetAssignedOperand();
1881 DCHECK(!operand.IsStackSlot()); 2093 DCHECK(!operand.IsStackSlot());
1882 map->RecordReference(operand); 2094 map->RecordReference(operand);
1883 } 2095 }
1884 } 2096 }
1885 } 2097 }
1886 } 2098 }
1887 2099
1888 2100
1889 void RegisterAllocator::AllocateGeneralRegisters() { 2101 void LinearScanAllocator::AllocateGeneralRegisters() {
1890 num_registers_ = config()->num_general_registers(); 2102 num_registers_ = config()->num_general_registers();
1891 mode_ = GENERAL_REGISTERS; 2103 mode_ = GENERAL_REGISTERS;
1892 AllocateRegisters(); 2104 AllocateRegisters();
1893 } 2105 }
1894 2106
1895 2107
1896 void RegisterAllocator::AllocateDoubleRegisters() { 2108 void LinearScanAllocator::AllocateDoubleRegisters() {
1897 num_registers_ = config()->num_aliased_double_registers(); 2109 num_registers_ = config()->num_aliased_double_registers();
1898 mode_ = DOUBLE_REGISTERS; 2110 mode_ = DOUBLE_REGISTERS;
1899 AllocateRegisters(); 2111 AllocateRegisters();
1900 } 2112 }
1901 2113
1902 2114
1903 void RegisterAllocator::AllocateRegisters() { 2115 void LinearScanAllocator::AllocateRegisters() {
1904 DCHECK(unhandled_live_ranges().empty()); 2116 DCHECK(unhandled_live_ranges().empty());
1905 2117
1906 for (auto range : live_ranges()) { 2118 for (auto range : live_ranges()) {
1907 if (range == nullptr) continue; 2119 if (range == nullptr) continue;
1908 if (range->Kind() == mode_) { 2120 if (range->Kind() == mode_) {
1909 AddToUnhandledUnsorted(range); 2121 AddToUnhandledUnsorted(range);
1910 } 2122 }
1911 } 2123 }
1912 SortUnhandled(); 2124 SortUnhandled();
1913 DCHECK(UnhandledIsSorted()); 2125 DCHECK(UnhandledIsSorted());
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
1994 if (current->HasRegisterAssigned()) { 2206 if (current->HasRegisterAssigned()) {
1995 AddToActive(current); 2207 AddToActive(current);
1996 } 2208 }
1997 } 2209 }
1998 2210
1999 active_live_ranges().clear(); 2211 active_live_ranges().clear();
2000 inactive_live_ranges().clear(); 2212 inactive_live_ranges().clear();
2001 } 2213 }
2002 2214
2003 2215
2004 const char* RegisterAllocator::RegisterName(int allocation_index) { 2216 const char* LinearScanAllocator::RegisterName(int allocation_index) {
2005 if (mode_ == GENERAL_REGISTERS) { 2217 if (mode_ == GENERAL_REGISTERS) {
2006 return config()->general_register_name(allocation_index); 2218 return config()->general_register_name(allocation_index);
2007 } else { 2219 } else {
2008 return config()->double_register_name(allocation_index); 2220 return config()->double_register_name(allocation_index);
2009 } 2221 }
2010 } 2222 }
2011 2223
2012 2224
2013 bool RegisterAllocator::HasTaggedValue(int virtual_register) const { 2225 bool RegisterAllocator::HasTaggedValue(int virtual_register) const {
2014 return code()->IsReference(virtual_register); 2226 return code()->IsReference(virtual_register);
2015 } 2227 }
2016 2228
2017 2229
2018 RegisterKind RegisterAllocator::RequiredRegisterKind( 2230 RegisterKind RegisterAllocator::RequiredRegisterKind(
2019 int virtual_register) const { 2231 int virtual_register) const {
2020 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS 2232 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS
2021 : GENERAL_REGISTERS; 2233 : GENERAL_REGISTERS;
2022 } 2234 }
2023 2235
2024 2236
2025 void RegisterAllocator::AddToActive(LiveRange* range) { 2237 void LinearScanAllocator::AddToActive(LiveRange* range) {
2026 TRACE("Add live range %d to active\n", range->id()); 2238 TRACE("Add live range %d to active\n", range->id());
2027 active_live_ranges().push_back(range); 2239 active_live_ranges().push_back(range);
2028 } 2240 }
2029 2241
2030 2242
2031 void RegisterAllocator::AddToInactive(LiveRange* range) { 2243 void LinearScanAllocator::AddToInactive(LiveRange* range) {
2032 TRACE("Add live range %d to inactive\n", range->id()); 2244 TRACE("Add live range %d to inactive\n", range->id());
2033 inactive_live_ranges().push_back(range); 2245 inactive_live_ranges().push_back(range);
2034 } 2246 }
2035 2247
2036 2248
2037 void RegisterAllocator::AddToUnhandledSorted(LiveRange* range) { 2249 void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) {
2038 if (range == nullptr || range->IsEmpty()) return; 2250 if (range == nullptr || range->IsEmpty()) return;
2039 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); 2251 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
2040 DCHECK(allocation_finger_.Value() <= range->Start().Value()); 2252 DCHECK(allocation_finger_.Value() <= range->Start().Value());
2041 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0; 2253 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0;
2042 --i) { 2254 --i) {
2043 auto cur_range = unhandled_live_ranges().at(i); 2255 auto cur_range = unhandled_live_ranges().at(i);
2044 if (!range->ShouldBeAllocatedBefore(cur_range)) continue; 2256 if (!range->ShouldBeAllocatedBefore(cur_range)) continue;
2045 TRACE("Add live range %d to unhandled at %d\n", range->id(), i + 1); 2257 TRACE("Add live range %d to unhandled at %d\n", range->id(), i + 1);
2046 auto it = unhandled_live_ranges().begin() + (i + 1); 2258 auto it = unhandled_live_ranges().begin() + (i + 1);
2047 unhandled_live_ranges().insert(it, range); 2259 unhandled_live_ranges().insert(it, range);
2048 DCHECK(UnhandledIsSorted()); 2260 DCHECK(UnhandledIsSorted());
2049 return; 2261 return;
2050 } 2262 }
2051 TRACE("Add live range %d to unhandled at start\n", range->id()); 2263 TRACE("Add live range %d to unhandled at start\n", range->id());
2052 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range); 2264 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range);
2053 DCHECK(UnhandledIsSorted()); 2265 DCHECK(UnhandledIsSorted());
2054 } 2266 }
2055 2267
2056 2268
2057 void RegisterAllocator::AddToUnhandledUnsorted(LiveRange* range) { 2269 void LinearScanAllocator::AddToUnhandledUnsorted(LiveRange* range) {
2058 if (range == nullptr || range->IsEmpty()) return; 2270 if (range == nullptr || range->IsEmpty()) return;
2059 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); 2271 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
2060 TRACE("Add live range %d to unhandled unsorted at end\n", range->id()); 2272 TRACE("Add live range %d to unhandled unsorted at end\n", range->id());
2061 unhandled_live_ranges().push_back(range); 2273 unhandled_live_ranges().push_back(range);
2062 } 2274 }
2063 2275
2064 2276
2065 static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) { 2277 static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) {
2066 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a)); 2278 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a));
2067 if (a->ShouldBeAllocatedBefore(b)) return false; 2279 if (a->ShouldBeAllocatedBefore(b)) return false;
2068 if (b->ShouldBeAllocatedBefore(a)) return true; 2280 if (b->ShouldBeAllocatedBefore(a)) return true;
2069 return a->id() < b->id(); 2281 return a->id() < b->id();
2070 } 2282 }
2071 2283
2072 2284
2073 // Sort the unhandled live ranges so that the ranges to be processed first are 2285 // Sort the unhandled live ranges so that the ranges to be processed first are
2074 // at the end of the array list. This is convenient for the register allocation 2286 // at the end of the array list. This is convenient for the register allocation
2075 // algorithm because it is efficient to remove elements from the end. 2287 // algorithm because it is efficient to remove elements from the end.
2076 void RegisterAllocator::SortUnhandled() { 2288 void LinearScanAllocator::SortUnhandled() {
2077 TRACE("Sort unhandled\n"); 2289 TRACE("Sort unhandled\n");
2078 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(), 2290 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(),
2079 &UnhandledSortHelper); 2291 &UnhandledSortHelper);
2080 } 2292 }
2081 2293
2082 2294
2083 bool RegisterAllocator::UnhandledIsSorted() { 2295 bool LinearScanAllocator::UnhandledIsSorted() {
2084 size_t len = unhandled_live_ranges().size(); 2296 size_t len = unhandled_live_ranges().size();
2085 for (size_t i = 1; i < len; i++) { 2297 for (size_t i = 1; i < len; i++) {
2086 auto a = unhandled_live_ranges().at(i - 1); 2298 auto a = unhandled_live_ranges().at(i - 1);
2087 auto b = unhandled_live_ranges().at(i); 2299 auto b = unhandled_live_ranges().at(i);
2088 if (a->Start().Value() < b->Start().Value()) return false; 2300 if (a->Start().Value() < b->Start().Value()) return false;
2089 } 2301 }
2090 return true; 2302 return true;
2091 } 2303 }
2092 2304
2093 2305
2094 void RegisterAllocator::ActiveToHandled(LiveRange* range) { 2306 void LinearScanAllocator::ActiveToHandled(LiveRange* range) {
2095 RemoveElement(&active_live_ranges(), range); 2307 RemoveElement(&active_live_ranges(), range);
2096 TRACE("Moving live range %d from active to handled\n", range->id()); 2308 TRACE("Moving live range %d from active to handled\n", range->id());
2097 } 2309 }
2098 2310
2099 2311
2100 void RegisterAllocator::ActiveToInactive(LiveRange* range) { 2312 void LinearScanAllocator::ActiveToInactive(LiveRange* range) {
2101 RemoveElement(&active_live_ranges(), range); 2313 RemoveElement(&active_live_ranges(), range);
2102 inactive_live_ranges().push_back(range); 2314 inactive_live_ranges().push_back(range);
2103 TRACE("Moving live range %d from active to inactive\n", range->id()); 2315 TRACE("Moving live range %d from active to inactive\n", range->id());
2104 } 2316 }
2105 2317
2106 2318
2107 void RegisterAllocator::InactiveToHandled(LiveRange* range) { 2319 void LinearScanAllocator::InactiveToHandled(LiveRange* range) {
2108 RemoveElement(&inactive_live_ranges(), range); 2320 RemoveElement(&inactive_live_ranges(), range);
2109 TRACE("Moving live range %d from inactive to handled\n", range->id()); 2321 TRACE("Moving live range %d from inactive to handled\n", range->id());
2110 } 2322 }
2111 2323
2112 2324
2113 void RegisterAllocator::InactiveToActive(LiveRange* range) { 2325 void LinearScanAllocator::InactiveToActive(LiveRange* range) {
2114 RemoveElement(&inactive_live_ranges(), range); 2326 RemoveElement(&inactive_live_ranges(), range);
2115 active_live_ranges().push_back(range); 2327 active_live_ranges().push_back(range);
2116 TRACE("Moving live range %d from inactive to active\n", range->id()); 2328 TRACE("Moving live range %d from inactive to active\n", range->id());
2117 } 2329 }
2118 2330
2119 2331
2120 bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) { 2332 GreedyAllocator::GreedyAllocator(const RegisterConfiguration* config,
2333 Zone* local_zone, Frame* frame,
2334 InstructionSequence* code,
2335 const char* debug_name)
2336 : RegisterAllocator(config, local_zone, frame, code, debug_name),
2337 allocations_(local_zone),
2338 queue_(local_zone) {}
2339
2340
2341 void GreedyAllocator::AllocateGeneralRegisters() {
2342 AllocateRegisters(RegisterKind::GENERAL_REGISTERS);
2343 }
2344
2345
2346 void GreedyAllocator::AllocateDoubleRegisters() {
2347 AllocateRegisters(RegisterKind::DOUBLE_REGISTERS);
2348 }
2349
2350
2351 void GreedyAllocator::AssignRangeToRegister(int reg_id, LiveRange* range) {
2352 allocations_[reg_id]->Insert(range);
2353 if (range->HasRegisterAssigned()) {
2354 DCHECK_EQ(reg_id, range->assigned_register());
2355 return;
2356 }
2357 range->set_assigned_register(reg_id);
2358 }
2359
2360
2361 float GreedyAllocator::CalculateSpillWeight(LiveRange* range) {
2362 if (range->IsFixed()) return std::numeric_limits<float>::max();
2363
2364 if (range->FirstHint() != nullptr && range->FirstHint()->IsRegister()) {
2365 return std::numeric_limits<float>::max();
2366 }
2367
2368 unsigned use_count = 0;
2369 auto* pos = range->first_pos();
2370 while (pos != nullptr) {
2371 use_count++;
2372 pos = pos->next();
2373 }
2374
2375 // GetLiveRangeSize is DCHECK-ed to not be 0
2376 return static_cast<float>(use_count) /
2377 static_cast<float>(GetLiveRangeSize(range));
2378 }
2379
2380
2381 float GreedyAllocator::CalculateMaxSpillWeight(
2382 const ZoneSet<LiveRange*>& ranges) {
2383 float max = 0.0;
2384 for (auto* r : ranges) {
2385 max = std::max(max, CalculateSpillWeight(r));
2386 }
2387 return max;
2388 }
2389
2390
2391 void GreedyAllocator::Evict(LiveRange* range) {
2392 allocations_[range->assigned_register()]->Remove(range);
2393 }
2394
2395
2396 bool GreedyAllocator::TryAllocatePhysicalRegister(
2397 unsigned reg_id, LiveRange* range, ZoneSet<LiveRange*>* conflicting) {
2398 auto* segment = range->first_interval();
2399
2400 RegisterAllocationInfo* alloc_info = allocations_[reg_id];
2401 while (segment != nullptr) {
2402 LiveRange* existing;
2403 if (alloc_info->Find(segment, &existing)) {
2404 DCHECK(existing->HasRegisterAssigned());
2405 conflicting->insert(existing);
2406 }
2407 segment = segment->next();
2408 }
2409 if (!conflicting->empty()) return false;
2410 // No conflicts means we can safely allocate this register to this range.
2411 AssignRangeToRegister(reg_id, range);
2412 return true;
2413 }
2414
2415 bool GreedyAllocator::TryAllocate(LiveRange* current,
2416 ZoneSet<LiveRange*>* conflicting) {
2417 if (current->HasSpillOperand()) {
2418 Spill(current);
2419 return true;
2420 }
2421 if (current->IsFixed()) {
2422 return TryAllocatePhysicalRegister(current->assigned_register(), current,
2423 conflicting);
2424 }
2425
2426 if (current->HasRegisterAssigned()) {
2427 int reg_id = current->assigned_register();
2428 return TryAllocatePhysicalRegister(reg_id, current, conflicting);
2429 }
2430
2431 for (unsigned candidate_reg = 0; candidate_reg < allocations_.size();
2432 candidate_reg++) {
2433 if (TryAllocatePhysicalRegister(candidate_reg, current, conflicting)) {
2434 conflicting->clear();
2435 return true;
2436 }
2437 }
2438 return false;
2439 }
2440
2441
2442 bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) {
2121 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; 2443 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters];
2122 2444
2123 for (int i = 0; i < num_registers_; i++) { 2445 for (int i = 0; i < num_registers_; i++) {
2124 free_until_pos[i] = LifetimePosition::MaxPosition(); 2446 free_until_pos[i] = LifetimePosition::MaxPosition();
2125 } 2447 }
2126 2448
2127 for (auto cur_active : active_live_ranges()) { 2449 for (auto cur_active : active_live_ranges()) {
2128 free_until_pos[cur_active->assigned_register()] = 2450 free_until_pos[cur_active->assigned_register()] =
2129 LifetimePosition::GapFromInstructionIndex(0); 2451 LifetimePosition::GapFromInstructionIndex(0);
2130 } 2452 }
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
2179 // the range end. 2501 // the range end.
2180 DCHECK(pos.Value() >= current->End().Value()); 2502 DCHECK(pos.Value() >= current->End().Value());
2181 TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg), 2503 TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg),
2182 current->id()); 2504 current->id());
2183 SetLiveRangeAssignedRegister(current, reg); 2505 SetLiveRangeAssignedRegister(current, reg);
2184 2506
2185 return true; 2507 return true;
2186 } 2508 }
2187 2509
2188 2510
2189 void RegisterAllocator::AllocateBlockedReg(LiveRange* current) { 2511 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
2190 auto register_use = current->NextRegisterPosition(current->Start()); 2512 auto register_use = current->NextRegisterPosition(current->Start());
2191 if (register_use == nullptr) { 2513 if (register_use == nullptr) {
2192 // There is no use in the current live range that requires a register. 2514 // There is no use in the current live range that requires a register.
2193 // We can just spill it. 2515 // We can just spill it.
2194 Spill(current); 2516 Spill(current);
2195 return; 2517 return;
2196 } 2518 }
2197 2519
2198 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters]; 2520 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters];
2199 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; 2521 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters];
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
2269 2591
2270 2592
2271 static const InstructionBlock* GetContainingLoop( 2593 static const InstructionBlock* GetContainingLoop(
2272 const InstructionSequence* sequence, const InstructionBlock* block) { 2594 const InstructionSequence* sequence, const InstructionBlock* block) {
2273 auto index = block->loop_header(); 2595 auto index = block->loop_header();
2274 if (!index.IsValid()) return nullptr; 2596 if (!index.IsValid()) return nullptr;
2275 return sequence->InstructionBlockAt(index); 2597 return sequence->InstructionBlockAt(index);
2276 } 2598 }
2277 2599
2278 2600
2279 LifetimePosition RegisterAllocator::FindOptimalSpillingPos( 2601 LifetimePosition LinearScanAllocator::FindOptimalSpillingPos(
2280 LiveRange* range, LifetimePosition pos) { 2602 LiveRange* range, LifetimePosition pos) {
2281 auto block = GetInstructionBlock(pos.Start()); 2603 auto block = GetInstructionBlock(pos.Start());
2282 auto loop_header = 2604 auto loop_header =
2283 block->IsLoopHeader() ? block : GetContainingLoop(code(), block); 2605 block->IsLoopHeader() ? block : GetContainingLoop(code(), block);
2284 2606
2285 if (loop_header == nullptr) return pos; 2607 if (loop_header == nullptr) return pos;
2286 2608
2287 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); 2609 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos);
2288 2610
2289 while (loop_header != nullptr) { 2611 while (loop_header != nullptr) {
(...skipping 11 matching lines...) Expand all
2301 } 2623 }
2302 2624
2303 // Try hoisting out to an outer loop. 2625 // Try hoisting out to an outer loop.
2304 loop_header = GetContainingLoop(code(), loop_header); 2626 loop_header = GetContainingLoop(code(), loop_header);
2305 } 2627 }
2306 2628
2307 return pos; 2629 return pos;
2308 } 2630 }
2309 2631
2310 2632
2311 void RegisterAllocator::SplitAndSpillIntersecting(LiveRange* current) { 2633 void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) {
2312 DCHECK(current->HasRegisterAssigned()); 2634 DCHECK(current->HasRegisterAssigned());
2313 int reg = current->assigned_register(); 2635 int reg = current->assigned_register();
2314 auto split_pos = current->Start(); 2636 auto split_pos = current->Start();
2315 for (size_t i = 0; i < active_live_ranges().size(); ++i) { 2637 for (size_t i = 0; i < active_live_ranges().size(); ++i) {
2316 auto range = active_live_ranges()[i]; 2638 auto range = active_live_ranges()[i];
2317 if (range->assigned_register() == reg) { 2639 if (range->assigned_register() == reg) {
2318 auto next_pos = range->NextRegisterPosition(current->Start()); 2640 auto next_pos = range->NextRegisterPosition(current->Start());
2319 auto spill_pos = FindOptimalSpillingPos(range, split_pos); 2641 auto spill_pos = FindOptimalSpillingPos(range, split_pos);
2320 if (next_pos == nullptr) { 2642 if (next_pos == nullptr) {
2321 SpillAfter(range, spill_pos); 2643 SpillAfter(range, spill_pos);
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
2425 2747
2426 // We did not find any suitable outer loop. Split at the latest possible 2748 // We did not find any suitable outer loop. Split at the latest possible
2427 // position unless end_block is a loop header itself. 2749 // position unless end_block is a loop header itself.
2428 if (block == end_block && !end_block->IsLoopHeader()) return end; 2750 if (block == end_block && !end_block->IsLoopHeader()) return end;
2429 2751
2430 return LifetimePosition::GapFromInstructionIndex( 2752 return LifetimePosition::GapFromInstructionIndex(
2431 block->first_instruction_index()); 2753 block->first_instruction_index());
2432 } 2754 }
2433 2755
2434 2756
2435 void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { 2757 void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2436 auto second_part = SplitRangeAt(range, pos); 2758 auto second_part = SplitRangeAt(range, pos);
2437 Spill(second_part); 2759 Spill(second_part);
2438 } 2760 }
2439 2761
2440 2762
2441 void RegisterAllocator::SpillBetween(LiveRange* range, LifetimePosition start, 2763 void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
2442 LifetimePosition end) { 2764 LifetimePosition end) {
2443 SpillBetweenUntil(range, start, start, end); 2765 SpillBetweenUntil(range, start, start, end);
2444 } 2766 }
2445 2767
2446 2768
2447 void RegisterAllocator::SpillBetweenUntil(LiveRange* range, 2769 void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
2448 LifetimePosition start, 2770 LifetimePosition start,
2449 LifetimePosition until, 2771 LifetimePosition until,
2450 LifetimePosition end) { 2772 LifetimePosition end) {
2451 CHECK(start.Value() < end.Value()); 2773 CHECK(start.Value() < end.Value());
2452 auto second_part = SplitRangeAt(range, start); 2774 auto second_part = SplitRangeAt(range, start);
2453 2775
2454 if (second_part->Start().Value() < end.Value()) { 2776 if (second_part->Start().Value() < end.Value()) {
2455 // The split result intersects with [start, end[. 2777 // The split result intersects with [start, end[.
2456 // Split it at position between ]start+1, end[, spill the middle part 2778 // Split it at position between ]start+1, end[, spill the middle part
2457 // and put the rest to unhandled. 2779 // and put the rest to unhandled.
2458 auto third_part_end = end.PrevStart().End(); 2780 auto third_part_end = end.PrevStart().End();
2459 if (IsBlockBoundary(end.Start())) { 2781 if (IsBlockBoundary(end.Start())) {
2460 third_part_end = end.Start(); 2782 third_part_end = end.Start();
2461 } 2783 }
2462 auto third_part = SplitBetween( 2784 auto third_part = SplitBetween(
2463 second_part, Max(second_part->Start().End(), until), third_part_end); 2785 second_part, Max(second_part->Start().End(), until), third_part_end);
2464 2786
2465 DCHECK(third_part != second_part); 2787 DCHECK(third_part != second_part);
2466 2788
2467 Spill(second_part); 2789 Spill(second_part);
2468 AddToUnhandledSorted(third_part); 2790 AddToUnhandledSorted(third_part);
2469 } else { 2791 } else {
2470 // The split result does not intersect with [start, end[. 2792 // The split result does not intersect with [start, end[.
2471 // Nothing to spill. Just put it to unhandled as whole. 2793 // Nothing to spill. Just put it to unhandled as whole.
2472 AddToUnhandledSorted(second_part); 2794 AddToUnhandledSorted(second_part);
2473 } 2795 }
2474 } 2796 }
2475 2797
2798 LiveRange* GreedyAllocator::SpillBetweenUntil(LiveRange* range,
2799 LifetimePosition start,
2800 LifetimePosition until,
2801 LifetimePosition end) {
2802 CHECK(start.Value() < end.Value());
2803 auto second_part = SplitRangeAt(range, start);
2804
2805 if (second_part->Start().Value() < end.Value()) {
2806 // The split result intersects with [start, end[.
2807 // Split it at position between ]start+1, end[, spill the middle part
2808 // and put the rest to unhandled.
2809 auto third_part_end = end.PrevStart().End();
2810 if (IsBlockBoundary(end.Start())) {
2811 third_part_end = end.Start();
2812 }
2813 auto third_part = SplitBetween(
2814 second_part, Max(second_part->Start().End(), until), third_part_end);
2815
2816 DCHECK(third_part != second_part);
2817
2818 Spill(second_part);
2819 return third_part;
2820 } else {
2821 // The split result does not intersect with [start, end[.
2822 // Nothing to spill. Just put it to unhandled as whole.
2823 return second_part;
2824 }
2825 }
2826
2476 2827
2477 void RegisterAllocator::Spill(LiveRange* range) { 2828 void RegisterAllocator::Spill(LiveRange* range) {
2478 DCHECK(!range->IsSpilled()); 2829 DCHECK(!range->IsSpilled());
2479 TRACE("Spilling live range %d\n", range->id()); 2830 TRACE("Spilling live range %d\n", range->id());
2480 auto first = range->TopLevel(); 2831 auto first = range->TopLevel();
2481 if (first->HasNoSpillType()) { 2832 if (first->HasNoSpillType()) {
2482 AssignSpillRangeToLiveRange(first); 2833 AssignSpillRangeToLiveRange(first);
2483 } 2834 }
2484 range->MakeSpilled(); 2835 range->MakeSpilled();
2485 } 2836 }
2486 2837
2487 2838
2488 int RegisterAllocator::RegisterCount() const { return num_registers_; } 2839 int LinearScanAllocator::RegisterCount() const { return num_registers_; }
2489 2840
2490 2841
2491 #ifdef DEBUG 2842 #ifdef DEBUG
2492 2843
2493 2844
2494 void RegisterAllocator::Verify() const { 2845 void LinearScanAllocator::Verify() const {
2495 for (auto current : live_ranges()) { 2846 for (auto current : live_ranges()) {
2496 if (current != nullptr) current->Verify(); 2847 if (current != nullptr) current->Verify();
2497 } 2848 }
2498 } 2849 }
2499 2850
2500 2851
2501 #endif 2852 #endif
2502 2853
2503 2854
2504 void RegisterAllocator::SetLiveRangeAssignedRegister(LiveRange* range, 2855 void RegisterAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
2505 int reg) { 2856 int reg) {
2506 if (range->Kind() == DOUBLE_REGISTERS) { 2857 if (range->Kind() == DOUBLE_REGISTERS) {
2507 assigned_double_registers_->Add(reg); 2858 assigned_double_registers_->Add(reg);
2508 } else { 2859 } else {
2509 DCHECK(range->Kind() == GENERAL_REGISTERS); 2860 DCHECK(range->Kind() == GENERAL_REGISTERS);
2510 assigned_registers_->Add(reg); 2861 assigned_registers_->Add(reg);
2511 } 2862 }
2512 range->set_assigned_register(reg); 2863 range->set_assigned_register(reg);
2513 auto assignment = range->GetAssignedOperand(); 2864 auto assignment = range->GetAssignedOperand();
2514 // TODO(dcarney): stop aliasing hint operands. 2865 // TODO(dcarney): stop aliasing hint operands.
2515 range->ConvertUsesToOperand(assignment, nullptr); 2866 range->ConvertUsesToOperand(assignment, nullptr);
2516 if (range->is_phi()) AssignPhiInput(range, assignment); 2867 if (range->is_phi()) AssignPhiInput(range, assignment);
2517 } 2868 }
2518 2869
2519 } // namespace compiler 2870 } // namespace compiler
2520 } // namespace internal 2871 } // namespace internal
2521 } // namespace v8 2872 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/register-allocator.h ('k') | src/flag-definitions.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698