Chromium Code Reviews| Index: src/compiler/register-allocator.cc |
| diff --git a/src/compiler/register-allocator.cc b/src/compiler/register-allocator.cc |
| index 9d745d0097f4e627707a0349f006c6073db7592d..c1b90373470dccc209b34e31f016ec1ac2b194d6 100644 |
| --- a/src/compiler/register-allocator.cc |
| +++ b/src/compiler/register-allocator.cc |
| @@ -10,11 +10,82 @@ namespace v8 { |
| namespace internal { |
| namespace compiler { |
| + |
| #define TRACE(...) \ |
| do { \ |
| if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ |
| } while (false) |
| + |
| +class RegisterAllocationInfo : public ZoneObject { |
| + public: |
| + explicit RegisterAllocationInfo(Zone* zone) : storage_(zone) {} |
| + |
| + bool Find(UseInterval* query, LiveRange** result) { |
| + ZoneSplayTree<Config>::Locator locator; |
| + bool ret = storage_.Find(GetKey(query), &locator); |
| + if (ret) { |
| + *result = locator.value(); |
| + } |
| + return ret; |
| + } |
| + |
| + bool Insert(LiveRange* range) { |
| + auto* interval = range->first_interval(); |
| + while (interval) { |
| + if (!Insert(interval, range)) return false; |
| + interval = interval->next(); |
| + } |
| + return true; |
| + } |
| + |
| + bool Remove(UseInterval* key) { return storage_.Remove(GetKey(key)); } |
| + |
| + bool Remove(LiveRange* range) { |
| + bool ret = false; |
| + auto* segment = range->first_interval(); |
| + while (segment) { |
| + ret |= Remove(segment); |
| + segment = segment->next(); |
| + } |
| + return ret; |
| + } |
| + |
| + bool IsEmpty() { return storage_.is_empty(); } |
| + |
| + private: |
| + struct Config { |
| + typedef std::pair<int, int> Key; |
| + typedef LiveRange* Value; |
| + static const Key kNoKey; |
| + static Value NoValue() { return nullptr; } |
| + static int Compare(const Key& a, const Key& b) { |
| + if (a.second <= b.first) return -1; |
| + if (a.first >= b.second) return 1; |
| + return 0; |
| + } |
| + }; |
| + |
| + Config::Key GetKey(UseInterval* interval) { |
| + if (!interval) return std::make_pair(0, 0); |
| + return std::make_pair(interval->start().Value(), interval->end().Value()); |
| + } |
| + bool Insert(UseInterval* interval, LiveRange* range) { |
| + ZoneSplayTree<Config>::Locator locator; |
| + bool ret = storage_.Insert(GetKey(interval), &locator); |
| + if (ret) locator.set_value(range); |
| + return ret; |
| + } |
| + |
| + ZoneSplayTree<Config> storage_; |
| + DISALLOW_COPY_AND_ASSIGN(RegisterAllocationInfo); |
| +}; |
| + |
| + |
| +const std::pair<int, int> RegisterAllocationInfo::Config::kNoKey = |
| + std::make_pair<int, int>(0, 0); |
| + |
| + |
| static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { |
| return a.Value() < b.Value() ? a : b; |
| } |
| @@ -581,16 +652,29 @@ RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, |
| debug_name_(debug_name), |
| config_(config), |
| phi_map_(local_zone()), |
| - live_in_sets_(code->InstructionBlockCount(), nullptr, local_zone()), |
| live_ranges_(code->VirtualRegisterCount() * 2, nullptr, local_zone()), |
| fixed_live_ranges_(this->config()->num_general_registers(), nullptr, |
| local_zone()), |
| fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr, |
| local_zone()), |
| + spill_ranges_(local_zone()), |
| + live_in_sets_(code->InstructionBlockCount(), nullptr, local_zone()) { |
| + spill_ranges().reserve(8); |
| + assigned_registers_ = |
| + new (code_zone()) BitVector(config->num_general_registers(), code_zone()); |
| + assigned_double_registers_ = new (code_zone()) |
| + BitVector(config->num_aliased_double_registers(), code_zone()); |
| + frame->SetAllocatedRegisters(assigned_registers_); |
| + frame->SetAllocatedDoubleRegisters(assigned_double_registers_); |
| +} |
| + |
| +RegisterAllocatorLinear::RegisterAllocatorLinear( |
| + const RegisterConfiguration* config, Zone* zone, Frame* frame, |
| + InstructionSequence* code, const char* debug_name) |
| + : RegisterAllocator(config, zone, frame, code, debug_name), |
| unhandled_live_ranges_(local_zone()), |
| active_live_ranges_(local_zone()), |
| inactive_live_ranges_(local_zone()), |
| - spill_ranges_(local_zone()), |
| mode_(UNALLOCATED_REGISTERS), |
| num_registers_(-1) { |
| DCHECK(this->config()->num_general_registers() <= |
| @@ -605,13 +689,6 @@ RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, |
| static_cast<size_t>(code->VirtualRegisterCount() * 2)); |
| active_live_ranges().reserve(8); |
| inactive_live_ranges().reserve(8); |
| - spill_ranges().reserve(8); |
| - assigned_registers_ = |
| - new (code_zone()) BitVector(config->num_general_registers(), code_zone()); |
| - assigned_double_registers_ = new (code_zone()) |
| - BitVector(config->num_aliased_double_registers(), code_zone()); |
| - frame->SetAllocatedRegisters(assigned_registers_); |
| - frame->SetAllocatedDoubleRegisters(assigned_double_registers_); |
| } |
| @@ -973,12 +1050,12 @@ SpillRange* RegisterAllocator::AssignSpillRangeToLiveRange(LiveRange* range) { |
| } |
| -bool RegisterAllocator::TryReuseSpillForPhi(LiveRange* range) { |
| +bool RegisterAllocatorLinear::TryReuseSpillForPhi(LiveRange* range) { |
| if (range->IsChild() || !range->is_phi()) return false; |
| DCHECK(!range->HasSpillOperand()); |
| - auto lookup = phi_map_.find(range->id()); |
| - DCHECK(lookup != phi_map_.end()); |
| + auto lookup = phi_map().find(range->id()); |
| + DCHECK(lookup != phi_map().end()); |
| auto phi = lookup->second->phi; |
| auto block = lookup->second->block; |
| // Count the number of spilled operands. |
| @@ -1805,6 +1882,134 @@ bool RegisterAllocator::SafePointsAreInOrder() const { |
| } |
| +unsigned RegisterAllocatorGreedy::GetLiveRangeSize(LiveRange* range) { |
| + auto interval = range->first_interval(); |
| + if (!interval) return 0; |
| + |
| + unsigned size = 0; |
| + while (interval) { |
| + size += (interval->end().Value() - interval->start().Value()); |
| + interval = interval->next(); |
| + } |
| + |
| + DCHECK(size); |
| + return size; |
| +} |
| + |
| + |
| +void RegisterAllocatorGreedy::Enqueue(LiveRange* range) { |
| + if (!range || range->IsEmpty()) return; |
| + unsigned size = GetLiveRangeSize(range); |
| + queue_.push(std::make_pair(size, range)); |
| +} |
| + |
| + |
| +// TODO(mtrofin): consolidate with identical code segment in |
| +// RegisterAllocatorLinear::AllocateRegisters |
| +bool RegisterAllocatorGreedy::HandleSpillOperands(LiveRange* range) { |
| + auto position = range->Start(); |
| + TRACE("Processing interval %d start=%d\n", range->id(), position.Value()); |
| + |
| + if (!range->HasNoSpillType()) { |
| + TRACE("Live range %d already has a spill operand\n", range->id()); |
| + auto next_pos = position; |
| + if (next_pos.IsGapPosition()) { |
| + next_pos = next_pos.NextStart(); |
| + } |
| + auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); |
|
dcarney
2015/04/18 19:01:33
NextUsePositionRegisterIsBeneficial is not availab
Mircea Trofin
2015/04/19 04:02:04
Acknowledged.
This will need a rewrite anyway.
|
| + // If the range already has a spill operand and it doesn't need a |
| + // register immediately, split it and spill the first part of the range. |
| + if (pos == nullptr) { |
| + Spill(range); |
| + return true; |
| + } else if (pos->pos().Value() > range->Start().NextStart().Value()) { |
| + // Do not spill live range eagerly if use position that can benefit from |
| + // the register is too close to the start of live range. |
| + auto* reminder = SpillBetweenUntil(range, position, position, pos->pos()); |
| + Enqueue(reminder); |
| + return true; |
| + } |
| + } |
| + return false; |
| + // TODO(mtrofin): Do we need this? |
| + // return (TryReuseSpillForPhi(range)); |
| +} |
| + |
| + |
| +void RegisterAllocatorGreedy::AllocateRegisters(RegisterKind mode) { |
| + // TODO(mtrofin): support for double registers |
| + DCHECK(mode == GENERAL_REGISTERS); |
| + |
| + for (auto range : live_ranges()) { |
| + if (range == nullptr) continue; |
| + if (range->Kind() == mode) { |
| + DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); |
| + TRACE("Enqueueing live range %d to priority queue \n", range->id()); |
| + Enqueue(range); |
| + } |
| + } |
| + |
| + allocations_.resize(config()->kMaxGeneralRegisters); |
|
dcarney
2015/04/18 19:01:32
config()->num_general_registers() is the number of
Mircea Trofin
2015/04/19 04:02:04
Done.
|
| + for (int i = 0; i < config()->kMaxGeneralRegisters; i++) { |
| + allocations_[i] = new (local_zone()) RegisterAllocationInfo(local_zone()); |
| + } |
| + |
| + for (auto* current : fixed_live_ranges()) { |
| + if (current != nullptr) { |
| + int regNr = current->assigned_register(); |
| + CHECK((allocations_[regNr]->Insert(current))); |
|
dcarney
2015/04/18 19:01:33
generally we ensure CHECK should be convertible to
Mircea Trofin
2015/04/19 04:02:04
Done.
|
| + } |
| + } |
| + |
| + while (!queue_.empty()) { |
| + auto currentPair = queue_.top(); |
| + queue_.pop(); |
| + auto current = currentPair.second; |
| + if (HandleSpillOperands(current)) continue; |
| + ZoneSet<LiveRange*> conflicting(local_zone()); |
| + if (!TryAllocate(current, &conflicting)) { |
| + DCHECK(conflicting.size()); |
| + float thisMax = CalculateSpillWeight(current); |
| + float maxConflicting = CalculateMaxSpillWeight(conflicting); |
| + if (maxConflicting < thisMax) { |
| + for (auto* conflict : conflicting) { |
| + Evict(conflict); |
| + Enqueue(conflict); |
| + } |
| + conflicting.clear(); |
| + CHECK(TryAllocate(current, &conflicting)); |
| + DCHECK(conflicting.size() == 0); |
| + } else { |
| + DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start())); |
| + CHECK(AllocateBlockedRange(current, conflicting)); |
|
dcarney
2015/04/18 19:01:33
here and 2 lines above CHECK issue again
Mircea Trofin
2015/04/19 04:02:04
Done.
|
| + } |
| + } |
| + } |
| + for (unsigned i = 0; i < allocations_.size(); i++) { |
|
dcarney
2015/04/18 19:01:33
size_t here and in other places when iterating to
Mircea Trofin
2015/04/19 04:02:04
Done.
|
| + if (!allocations_[i]->IsEmpty()) { |
| + assigned_registers()->Add(i); |
| + } |
| + } |
| +} |
| + |
| + |
| +bool RegisterAllocatorGreedy::AllocateBlockedRange( |
| + LiveRange* current, const ZoneSet<LiveRange*>& conflicts) { |
| + auto register_use = current->NextRegisterPosition(current->Start()); |
|
dcarney
2015/04/18 19:01:33
NextRegisterPosition is also unavailable to the gr
Mircea Trofin
2015/04/19 04:02:04
Acknowledged.
|
| + if (register_use == nullptr) { |
| + // There is no use in the current live range that requires a register. |
| + // We can just spill it. |
| + Spill(current); |
| + return true; |
| + } |
| + |
| + auto second_part = SplitRangeAt(current, register_use->pos()); |
| + Spill(second_part); |
| + |
| + return true; |
| +} |
| + |
| + |
| void RegisterAllocator::PopulateReferenceMaps() { |
| DCHECK(SafePointsAreInOrder()); |
| @@ -1886,21 +2091,21 @@ void RegisterAllocator::PopulateReferenceMaps() { |
| } |
| -void RegisterAllocator::AllocateGeneralRegisters() { |
| +void RegisterAllocatorLinear::AllocateGeneralRegisters() { |
| num_registers_ = config()->num_general_registers(); |
| mode_ = GENERAL_REGISTERS; |
| AllocateRegisters(); |
| } |
| -void RegisterAllocator::AllocateDoubleRegisters() { |
| +void RegisterAllocatorLinear::AllocateDoubleRegisters() { |
| num_registers_ = config()->num_aliased_double_registers(); |
| mode_ = DOUBLE_REGISTERS; |
| AllocateRegisters(); |
| } |
| -void RegisterAllocator::AllocateRegisters() { |
| +void RegisterAllocatorLinear::AllocateRegisters() { |
| DCHECK(unhandled_live_ranges().empty()); |
| for (auto range : live_ranges()) { |
| @@ -2001,7 +2206,7 @@ void RegisterAllocator::AllocateRegisters() { |
| } |
| -const char* RegisterAllocator::RegisterName(int allocation_index) { |
| +const char* RegisterAllocatorLinear::RegisterName(int allocation_index) { |
| if (mode_ == GENERAL_REGISTERS) { |
| return config()->general_register_name(allocation_index); |
| } else { |
| @@ -2022,19 +2227,19 @@ RegisterKind RegisterAllocator::RequiredRegisterKind( |
| } |
| -void RegisterAllocator::AddToActive(LiveRange* range) { |
| +void RegisterAllocatorLinear::AddToActive(LiveRange* range) { |
| TRACE("Add live range %d to active\n", range->id()); |
| active_live_ranges().push_back(range); |
| } |
| -void RegisterAllocator::AddToInactive(LiveRange* range) { |
| +void RegisterAllocatorLinear::AddToInactive(LiveRange* range) { |
| TRACE("Add live range %d to inactive\n", range->id()); |
| inactive_live_ranges().push_back(range); |
| } |
| -void RegisterAllocator::AddToUnhandledSorted(LiveRange* range) { |
| +void RegisterAllocatorLinear::AddToUnhandledSorted(LiveRange* range) { |
| if (range == nullptr || range->IsEmpty()) return; |
| DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); |
| DCHECK(allocation_finger_.Value() <= range->Start().Value()); |
| @@ -2054,7 +2259,7 @@ void RegisterAllocator::AddToUnhandledSorted(LiveRange* range) { |
| } |
| -void RegisterAllocator::AddToUnhandledUnsorted(LiveRange* range) { |
| +void RegisterAllocatorLinear::AddToUnhandledUnsorted(LiveRange* range) { |
| if (range == nullptr || range->IsEmpty()) return; |
| DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); |
| TRACE("Add live range %d to unhandled unsorted at end\n", range->id()); |
| @@ -2073,14 +2278,14 @@ static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) { |
| // Sort the unhandled live ranges so that the ranges to be processed first are |
| // at the end of the array list. This is convenient for the register allocation |
| // algorithm because it is efficient to remove elements from the end. |
| -void RegisterAllocator::SortUnhandled() { |
| +void RegisterAllocatorLinear::SortUnhandled() { |
| TRACE("Sort unhandled\n"); |
| std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(), |
| &UnhandledSortHelper); |
| } |
| -bool RegisterAllocator::UnhandledIsSorted() { |
| +bool RegisterAllocatorLinear::UnhandledIsSorted() { |
| size_t len = unhandled_live_ranges().size(); |
| for (size_t i = 1; i < len; i++) { |
| auto a = unhandled_live_ranges().at(i - 1); |
| @@ -2091,33 +2296,143 @@ bool RegisterAllocator::UnhandledIsSorted() { |
| } |
| -void RegisterAllocator::ActiveToHandled(LiveRange* range) { |
| +void RegisterAllocatorLinear::ActiveToHandled(LiveRange* range) { |
| RemoveElement(&active_live_ranges(), range); |
| TRACE("Moving live range %d from active to handled\n", range->id()); |
| } |
| -void RegisterAllocator::ActiveToInactive(LiveRange* range) { |
| +void RegisterAllocatorLinear::ActiveToInactive(LiveRange* range) { |
| RemoveElement(&active_live_ranges(), range); |
| inactive_live_ranges().push_back(range); |
| TRACE("Moving live range %d from active to inactive\n", range->id()); |
| } |
| -void RegisterAllocator::InactiveToHandled(LiveRange* range) { |
| +void RegisterAllocatorLinear::InactiveToHandled(LiveRange* range) { |
| RemoveElement(&inactive_live_ranges(), range); |
| TRACE("Moving live range %d from inactive to handled\n", range->id()); |
| } |
| -void RegisterAllocator::InactiveToActive(LiveRange* range) { |
| +void RegisterAllocatorLinear::InactiveToActive(LiveRange* range) { |
| RemoveElement(&inactive_live_ranges(), range); |
| active_live_ranges().push_back(range); |
| TRACE("Moving live range %d from inactive to active\n", range->id()); |
| } |
| -bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) { |
| +RegisterAllocatorGreedy::RegisterAllocatorGreedy( |
| + const RegisterConfiguration* config, Zone* local_zone, Frame* frame, |
| + InstructionSequence* code, const char* debug_name) |
| + : RegisterAllocator(config, local_zone, frame, code, debug_name), |
| + allocations_(local_zone), |
| + queue_(local_zone) {} |
| + |
| + |
| +void RegisterAllocatorGreedy::AllocateGeneralRegisters() { |
| + AllocateRegisters(RegisterKind::GENERAL_REGISTERS); |
| +} |
| + |
| + |
| +void RegisterAllocatorGreedy::AllocateDoubleRegisters() { |
| + AllocateRegisters(RegisterKind::DOUBLE_REGISTERS); |
| +} |
| + |
| + |
| +void RegisterAllocatorGreedy::AssignRangeToRegister(unsigned reg_id, |
| + LiveRange* range) { |
| + allocations_[reg_id]->Insert(range); |
| + if (range->HasRegisterAssigned()) { |
| + DCHECK((unsigned)(range->assigned_register()) == reg_id); |
|
dcarney
2015/04/18 19:01:33
chromium style guide dictates that you must use st
Mircea Trofin
2015/04/19 04:02:04
Done.
|
| + return; |
| + } |
| + range->set_assigned_register(reg_id); |
| +} |
| + |
| + |
| +float RegisterAllocatorGreedy::CalculateSpillWeight(LiveRange* range) { |
| + if (range->IsFixed()) return std::numeric_limits<float>::max(); |
| + |
| + if (range->FirstHint() && range->FirstHint()->IsRegister()) |
|
dcarney
2015/04/18 19:01:33
need braces on multiline if
Mircea Trofin
2015/04/19 04:02:04
Done.
|
| + return std::numeric_limits<float>::max(); |
| + |
| + unsigned useCount = 0; |
| + auto* pos = range->first_pos(); |
| + while (pos != nullptr) { |
| + useCount++; |
| + pos = pos->next(); |
| + } |
| + |
| + // GetLiveRangeSize is DCHECK-ed to not be 0 |
| + return static_cast<float>(useCount) / |
| + static_cast<float>(GetLiveRangeSize(range)); |
| +} |
| + |
| + |
| +float RegisterAllocatorGreedy::CalculateMaxSpillWeight( |
| + const ZoneSet<LiveRange*>& ranges) { |
| + float max = 0.0; |
| + for (auto* r : ranges) { |
| + float localMax = CalculateSpillWeight(r); |
|
dcarney
2015/04/18 19:01:33
max = std::max(max, CalculateSpillWeight(r)
Mircea Trofin
2015/04/19 04:02:04
Done.
|
| + if (localMax > max) max = localMax; |
| + } |
| + return max; |
| +} |
| + |
| + |
| +void RegisterAllocatorGreedy::Evict(LiveRange* range) { |
| + allocations_[range->assigned_register()]->Remove(range); |
| +} |
| + |
| + |
| +bool RegisterAllocatorGreedy::TryAllocatePhysicalRegister( |
| + unsigned reg_id, LiveRange* range, ZoneSet<LiveRange*>* conflicting) { |
| + auto* segment = range->first_interval(); |
| + |
| + RegisterAllocationInfo* allocInfo = allocations_[reg_id]; |
| + while (segment) { |
| + LiveRange* existing; |
| + if (allocInfo->Find(segment, &existing)) { |
| + DCHECK(existing->HasRegisterAssigned()); |
| + conflicting->insert(existing); |
| + } |
| + segment = segment->next(); |
| + } |
| + if (!conflicting->empty()) return false; |
| + // good to insert |
|
dcarney
2015/04/18 19:01:33
comments in v8 typically start with a capital and
Mircea Trofin
2015/04/19 04:02:04
Done.
|
| + AssignRangeToRegister(reg_id, range); |
| + return true; |
| +} |
| + |
| +bool RegisterAllocatorGreedy::TryAllocate(LiveRange* current, |
| + ZoneSet<LiveRange*>* conflicting) { |
| + if (current->HasSpillOperand()) { |
| + Spill(current); |
| + return true; |
| + } |
| + if (current->IsFixed()) { |
| + return TryAllocatePhysicalRegister(current->assigned_register(), current, |
| + conflicting); |
| + } |
| + |
| + if (current->HasRegisterAssigned()) { |
| + int reg_id = current->assigned_register(); |
| + return TryAllocatePhysicalRegister(reg_id, current, conflicting); |
| + } |
| + |
| + for (unsigned candidate_reg = 0; candidate_reg < allocations_.size(); |
| + candidate_reg++) { |
| + if (TryAllocatePhysicalRegister(candidate_reg, current, conflicting)) { |
| + conflicting->clear(); |
| + return true; |
| + } |
| + } |
| + return false; |
| +} |
| + |
| + |
| +bool RegisterAllocatorLinear::TryAllocateFreeReg(LiveRange* current) { |
| LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
| for (int i = 0; i < num_registers_; i++) { |
| @@ -2186,7 +2501,7 @@ bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) { |
| } |
| -void RegisterAllocator::AllocateBlockedReg(LiveRange* current) { |
| +void RegisterAllocatorLinear::AllocateBlockedReg(LiveRange* current) { |
| auto register_use = current->NextRegisterPosition(current->Start()); |
| if (register_use == nullptr) { |
| // There is no use in the current live range that requires a register. |
| @@ -2276,7 +2591,7 @@ static const InstructionBlock* GetContainingLoop( |
| } |
| -LifetimePosition RegisterAllocator::FindOptimalSpillingPos( |
| +LifetimePosition RegisterAllocatorLinear::FindOptimalSpillingPos( |
| LiveRange* range, LifetimePosition pos) { |
| auto block = GetInstructionBlock(pos.Start()); |
| auto loop_header = |
| @@ -2308,7 +2623,7 @@ LifetimePosition RegisterAllocator::FindOptimalSpillingPos( |
| } |
| -void RegisterAllocator::SplitAndSpillIntersecting(LiveRange* current) { |
| +void RegisterAllocatorLinear::SplitAndSpillIntersecting(LiveRange* current) { |
| DCHECK(current->HasRegisterAssigned()); |
| int reg = current->assigned_register(); |
| auto split_pos = current->Start(); |
| @@ -2432,22 +2747,24 @@ LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, |
| } |
| -void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { |
| +void RegisterAllocatorLinear::SpillAfter(LiveRange* range, |
| + LifetimePosition pos) { |
| auto second_part = SplitRangeAt(range, pos); |
| Spill(second_part); |
| } |
| -void RegisterAllocator::SpillBetween(LiveRange* range, LifetimePosition start, |
| - LifetimePosition end) { |
| +void RegisterAllocatorLinear::SpillBetween(LiveRange* range, |
| + LifetimePosition start, |
| + LifetimePosition end) { |
| SpillBetweenUntil(range, start, start, end); |
| } |
| -void RegisterAllocator::SpillBetweenUntil(LiveRange* range, |
| - LifetimePosition start, |
| - LifetimePosition until, |
| - LifetimePosition end) { |
| +void RegisterAllocatorLinear::SpillBetweenUntil(LiveRange* range, |
| + LifetimePosition start, |
| + LifetimePosition until, |
| + LifetimePosition end) { |
| CHECK(start.Value() < end.Value()); |
| auto second_part = SplitRangeAt(range, start); |
| @@ -2473,6 +2790,35 @@ void RegisterAllocator::SpillBetweenUntil(LiveRange* range, |
| } |
| } |
| +LiveRange* RegisterAllocatorGreedy::SpillBetweenUntil(LiveRange* range, |
| + LifetimePosition start, |
| + LifetimePosition until, |
| + LifetimePosition end) { |
| + CHECK(start.Value() < end.Value()); |
| + auto second_part = SplitRangeAt(range, start); |
| + |
| + if (second_part->Start().Value() < end.Value()) { |
| + // The split result intersects with [start, end[. |
| + // Split it at position between ]start+1, end[, spill the middle part |
| + // and put the rest to unhandled. |
| + auto third_part_end = end.PrevStart().End(); |
| + if (IsBlockBoundary(end.Start())) { |
| + third_part_end = end.Start(); |
| + } |
| + auto third_part = SplitBetween( |
| + second_part, Max(second_part->Start().End(), until), third_part_end); |
| + |
| + DCHECK(third_part != second_part); |
| + |
| + Spill(second_part); |
| + return third_part; |
| + } else { |
| + // The split result does not intersect with [start, end[. |
| + // Nothing to spill. Just put it to unhandled as whole. |
| + return second_part; |
| + } |
| +} |
| + |
| void RegisterAllocator::Spill(LiveRange* range) { |
| DCHECK(!range->IsSpilled()); |
| @@ -2485,13 +2831,13 @@ void RegisterAllocator::Spill(LiveRange* range) { |
| } |
| -int RegisterAllocator::RegisterCount() const { return num_registers_; } |
| +int RegisterAllocatorLinear::RegisterCount() const { return num_registers_; } |
| #ifdef DEBUG |
| -void RegisterAllocator::Verify() const { |
| +void RegisterAllocatorLinear::Verify() const { |
| for (auto current : live_ranges()) { |
| if (current != nullptr) current->Verify(); |
| } |