Index: src/compiler/register-allocator.cc |
diff --git a/src/compiler/register-allocator.cc b/src/compiler/register-allocator.cc |
index 9d745d0097f4e627707a0349f006c6073db7592d..a7f9adb4a93d1a09bbcb1837dc21c1fde9f37534 100644 |
--- a/src/compiler/register-allocator.cc |
+++ b/src/compiler/register-allocator.cc |
@@ -10,6 +10,10 @@ namespace v8 { |
namespace internal { |
namespace compiler { |
+const std::pair<int, int> |
dcarney
2015/04/17 07:22:45
this should go under the #define
also, does this c
Mircea Trofin
2015/04/17 19:23:33
This is a constexpr, so it is evaluated at compile
|
+ RegisterAllocatorGreedy::RegisterAllocationInfo::Config::kNoKey = |
+ std::make_pair(0, 0); |
+ |
#define TRACE(...) \ |
do { \ |
if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ |
@@ -581,16 +585,29 @@ RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, |
debug_name_(debug_name), |
config_(config), |
phi_map_(local_zone()), |
- live_in_sets_(code->InstructionBlockCount(), nullptr, local_zone()), |
live_ranges_(code->VirtualRegisterCount() * 2, nullptr, local_zone()), |
fixed_live_ranges_(this->config()->num_general_registers(), nullptr, |
local_zone()), |
fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr, |
local_zone()), |
+ spill_ranges_(local_zone()), |
+ live_in_sets_(code->InstructionBlockCount(), nullptr, local_zone()) { |
+ spill_ranges().reserve(8); |
+ assigned_registers_ = |
+ new (code_zone()) BitVector(config->num_general_registers(), code_zone()); |
+ assigned_double_registers_ = new (code_zone()) |
+ BitVector(config->num_aliased_double_registers(), code_zone()); |
+ frame->SetAllocatedRegisters(assigned_registers_); |
+ frame->SetAllocatedDoubleRegisters(assigned_double_registers_); |
+} |
+ |
+RegisterAllocatorLinear::RegisterAllocatorLinear( |
+ const RegisterConfiguration* config, Zone* zone, Frame* frame, |
+ InstructionSequence* code, const char* debug_name) |
+ : RegisterAllocator(config, zone, frame, code, debug_name), |
unhandled_live_ranges_(local_zone()), |
active_live_ranges_(local_zone()), |
inactive_live_ranges_(local_zone()), |
- spill_ranges_(local_zone()), |
mode_(UNALLOCATED_REGISTERS), |
num_registers_(-1) { |
DCHECK(this->config()->num_general_registers() <= |
@@ -605,13 +622,6 @@ RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, |
static_cast<size_t>(code->VirtualRegisterCount() * 2)); |
active_live_ranges().reserve(8); |
inactive_live_ranges().reserve(8); |
- spill_ranges().reserve(8); |
- assigned_registers_ = |
- new (code_zone()) BitVector(config->num_general_registers(), code_zone()); |
- assigned_double_registers_ = new (code_zone()) |
- BitVector(config->num_aliased_double_registers(), code_zone()); |
- frame->SetAllocatedRegisters(assigned_registers_); |
- frame->SetAllocatedDoubleRegisters(assigned_double_registers_); |
} |
@@ -973,12 +983,12 @@ SpillRange* RegisterAllocator::AssignSpillRangeToLiveRange(LiveRange* range) { |
} |
-bool RegisterAllocator::TryReuseSpillForPhi(LiveRange* range) { |
+bool RegisterAllocatorLinear::TryReuseSpillForPhi(LiveRange* range) { |
if (range->IsChild() || !range->is_phi()) return false; |
DCHECK(!range->HasSpillOperand()); |
- auto lookup = phi_map_.find(range->id()); |
- DCHECK(lookup != phi_map_.end()); |
+ auto lookup = phi_map().find(range->id()); |
+ DCHECK(lookup != phi_map().end()); |
auto phi = lookup->second->phi; |
auto block = lookup->second->block; |
// Count the number of spilled operands. |
@@ -1805,6 +1815,133 @@ bool RegisterAllocator::SafePointsAreInOrder() const { |
} |
+unsigned RegisterAllocatorGreedy::GetLiveRangeSize(LiveRange* range) { |
+ auto interval = range->first_interval(); |
+ if (!interval) return 0; |
+ |
+ unsigned size = 0; |
+ while (interval) { |
+ size += (interval->end().Value() - interval->start().Value()); |
+ interval = interval->next(); |
+ } |
+ |
+ DCHECK(size); |
+ return size; |
+} |
+ |
+ |
+void RegisterAllocatorGreedy::Enqueue(LiveRange* range) { |
+ if (!range || range->IsEmpty()) return; |
+ unsigned size = GetLiveRangeSize(range); |
+ queue_.push(std::make_pair(size, range)); |
+} |
+ |
+ |
+// TODO(mtrofin): consolidate with identical code segment in |
+// RegisterAllocatorLinear::AllocateRegisters |
+bool RegisterAllocatorGreedy::HandleSpillOperands(LiveRange* range) { |
+ auto position = range->Start(); |
+ TRACE("Processing interval %d start=%d\n", range->id(), position.Value()); |
+ |
+ if (!range->HasNoSpillType()) { |
+ TRACE("Live range %d already has a spill operand\n", range->id()); |
+ auto next_pos = position; |
+ if (next_pos.IsGapPosition()) { |
+ next_pos = next_pos.NextStart(); |
+ } |
+ auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); |
+ // If the range already has a spill operand and it doesn't need a |
+ // register immediately, split it and spill the first part of the range. |
+ if (pos == nullptr) { |
+ Spill(range); |
+ return true; |
+ } else if (pos->pos().Value() > range->Start().NextStart().Value()) { |
+ // Do not spill live range eagerly if use position that can benefit from |
+ // the register is too close to the start of live range. |
+ auto* reminder = SpillBetweenUntil(range, position, position, pos->pos()); |
+ Enqueue(reminder); |
+ return true; |
+ } |
+ } |
+ return true; |
+ // TODO(mtrofin): Do we need this? |
+ // return (TryReuseSpillForPhi(range)); |
+} |
+ |
+ |
+void RegisterAllocatorGreedy::AllocateRegisters(RegisterKind mode) { |
+ // TODO(mtrofin): support for double registers |
+ DCHECK(mode == GENERAL_REGISTERS); |
+ |
+ for (auto range : live_ranges()) { |
+ if (range == nullptr) continue; |
+ if (range->Kind() == mode) { |
+ DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); |
+ TRACE("Enqueueing live range %d to priority queue \n", range->id()); |
+ Enqueue(range); |
+ } |
+ } |
+ |
+ for (int i = 0; i < config()->kMaxGeneralRegisters; i++) { |
+ allocations_[i] = new RegisterAllocationInfo(local_zone()); |
dcarney
2015/04/17 07:22:45
this must be zone allocated - in fact pretty much
Mircea Trofin
2015/04/17 19:23:33
Done.
|
+ } |
+ |
+ for (auto* current : fixed_live_ranges()) { |
+ if (current != nullptr) { |
+ int regNr = current->assigned_register(); |
+ CHECK((allocations_[regNr]->insert(current))); |
+ } |
+ } |
+ |
+ while (!queue_.empty()) { |
+ auto currentPair = queue_.top(); |
+ queue_.pop(); |
+ auto current = currentPair.second; |
+ if (HandleSpillOperands(current)) continue; |
+ ZoneSet<LiveRange*> conflicting(local_zone()); |
+ if (!TryAllocate(current, conflicting)) { |
+ DCHECK(conflicting.size()); |
+ float thisMax = CalculateSpillWeight(current); |
+ float maxConflicting = CalculateMaxSpillWeight(conflicting); |
+ if (maxConflicting < thisMax) { |
+ for (auto* conflict : conflicting) { |
+ Evict(conflict); |
+ Enqueue(conflict); |
+ } |
+ conflicting.clear(); |
+ CHECK(TryAllocate(current, conflicting)); |
+ DCHECK(conflicting.size() == 0); |
+ } else { |
+ DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start())); |
+ CHECK(AllocateBlockedRange(current, conflicting)); |
+ } |
+ } |
+ } |
+ for (unsigned i = 0; i < allocations_.size(); i++) { |
+ if (!allocations_[i]->isEmpty()) { |
+ assigned_registers()->Add(i); |
+ } |
+ } |
+} |
+ |
+ |
+bool RegisterAllocatorGreedy::AllocateBlockedRange( |
+ LiveRange* current, ZoneSet<LiveRange*>& conflicts) { |
+ auto register_use = current->NextRegisterPosition(current->Start()); |
+ if (register_use == nullptr) { |
+ // There is no use in the current live range that requires a register. |
+ // We can just spill it. |
+ Spill(current); |
+ return true; |
+ } |
+ |
+ auto second_part = SplitRangeAt(current, register_use->pos()); |
+ Spill(second_part); |
+ |
+ return true; |
+} |
+ |
+ |
void RegisterAllocator::PopulateReferenceMaps() { |
DCHECK(SafePointsAreInOrder()); |
@@ -1886,21 +2023,21 @@ void RegisterAllocator::PopulateReferenceMaps() { |
} |
-void RegisterAllocator::AllocateGeneralRegisters() { |
+void RegisterAllocatorLinear::AllocateGeneralRegisters() { |
num_registers_ = config()->num_general_registers(); |
mode_ = GENERAL_REGISTERS; |
AllocateRegisters(); |
} |
-void RegisterAllocator::AllocateDoubleRegisters() { |
+void RegisterAllocatorLinear::AllocateDoubleRegisters() { |
num_registers_ = config()->num_aliased_double_registers(); |
mode_ = DOUBLE_REGISTERS; |
AllocateRegisters(); |
} |
-void RegisterAllocator::AllocateRegisters() { |
+void RegisterAllocatorLinear::AllocateRegisters() { |
DCHECK(unhandled_live_ranges().empty()); |
for (auto range : live_ranges()) { |
@@ -2001,7 +2138,7 @@ void RegisterAllocator::AllocateRegisters() { |
} |
-const char* RegisterAllocator::RegisterName(int allocation_index) { |
+const char* RegisterAllocatorLinear::RegisterName(int allocation_index) { |
if (mode_ == GENERAL_REGISTERS) { |
return config()->general_register_name(allocation_index); |
} else { |
@@ -2022,19 +2159,19 @@ RegisterKind RegisterAllocator::RequiredRegisterKind( |
} |
-void RegisterAllocator::AddToActive(LiveRange* range) { |
+void RegisterAllocatorLinear::AddToActive(LiveRange* range) { |
TRACE("Add live range %d to active\n", range->id()); |
active_live_ranges().push_back(range); |
} |
-void RegisterAllocator::AddToInactive(LiveRange* range) { |
+void RegisterAllocatorLinear::AddToInactive(LiveRange* range) { |
TRACE("Add live range %d to inactive\n", range->id()); |
inactive_live_ranges().push_back(range); |
} |
-void RegisterAllocator::AddToUnhandledSorted(LiveRange* range) { |
+void RegisterAllocatorLinear::AddToUnhandledSorted(LiveRange* range) { |
if (range == nullptr || range->IsEmpty()) return; |
DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); |
DCHECK(allocation_finger_.Value() <= range->Start().Value()); |
@@ -2054,7 +2191,7 @@ void RegisterAllocator::AddToUnhandledSorted(LiveRange* range) { |
} |
-void RegisterAllocator::AddToUnhandledUnsorted(LiveRange* range) { |
+void RegisterAllocatorLinear::AddToUnhandledUnsorted(LiveRange* range) { |
if (range == nullptr || range->IsEmpty()) return; |
DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); |
TRACE("Add live range %d to unhandled unsorted at end\n", range->id()); |
@@ -2073,14 +2210,14 @@ static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) { |
// Sort the unhandled live ranges so that the ranges to be processed first are |
// at the end of the array list. This is convenient for the register allocation |
// algorithm because it is efficient to remove elements from the end. |
-void RegisterAllocator::SortUnhandled() { |
+void RegisterAllocatorLinear::SortUnhandled() { |
TRACE("Sort unhandled\n"); |
std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(), |
&UnhandledSortHelper); |
} |
-bool RegisterAllocator::UnhandledIsSorted() { |
+bool RegisterAllocatorLinear::UnhandledIsSorted() { |
size_t len = unhandled_live_ranges().size(); |
for (size_t i = 1; i < len; i++) { |
auto a = unhandled_live_ranges().at(i - 1); |
@@ -2091,33 +2228,141 @@ bool RegisterAllocator::UnhandledIsSorted() { |
} |
-void RegisterAllocator::ActiveToHandled(LiveRange* range) { |
+void RegisterAllocatorLinear::ActiveToHandled(LiveRange* range) { |
RemoveElement(&active_live_ranges(), range); |
TRACE("Moving live range %d from active to handled\n", range->id()); |
} |
-void RegisterAllocator::ActiveToInactive(LiveRange* range) { |
+void RegisterAllocatorLinear::ActiveToInactive(LiveRange* range) { |
RemoveElement(&active_live_ranges(), range); |
inactive_live_ranges().push_back(range); |
TRACE("Moving live range %d from active to inactive\n", range->id()); |
} |
-void RegisterAllocator::InactiveToHandled(LiveRange* range) { |
+void RegisterAllocatorLinear::InactiveToHandled(LiveRange* range) { |
RemoveElement(&inactive_live_ranges(), range); |
TRACE("Moving live range %d from inactive to handled\n", range->id()); |
} |
-void RegisterAllocator::InactiveToActive(LiveRange* range) { |
+void RegisterAllocatorLinear::InactiveToActive(LiveRange* range) { |
RemoveElement(&inactive_live_ranges(), range); |
active_live_ranges().push_back(range); |
TRACE("Moving live range %d from inactive to active\n", range->id()); |
} |
-bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) { |
+RegisterAllocatorGreedy::RegisterAllocatorGreedy( |
+ const RegisterConfiguration* config, Zone* local_zone, Frame* frame, |
+ InstructionSequence* code, const char* debug_name) |
+ : RegisterAllocator(config, local_zone, frame, code, debug_name), |
+ allocations_(local_zone) {} |
+ |
+ |
+void RegisterAllocatorGreedy::AllocateGeneralRegisters() { |
+ AllocateRegisters(RegisterKind::GENERAL_REGISTERS); |
+} |
+ |
+ |
+void RegisterAllocatorGreedy::AllocateDoubleRegisters() { |
+ AllocateRegisters(RegisterKind::DOUBLE_REGISTERS); |
+} |
+ |
+ |
+void RegisterAllocatorGreedy::AssignRangeToRegister(unsigned regId, |
+ LiveRange* range) { |
+ allocations_[regId]->insert(range); |
+ if (range->HasRegisterAssigned()) { |
+ DCHECK((unsigned)(range->assigned_register()) == regId); |
+ return; |
+ } |
+ range->set_assigned_register(regId); |
+} |
+ |
+ |
+float RegisterAllocatorGreedy::CalculateSpillWeight(LiveRange* range) { |
+ if (range->IsFixed()) return std::numeric_limits<float>::max(); |
+ |
+ if (range->FirstHint() && range->FirstHint()->IsRegister()) |
+ return std::numeric_limits<float>::max(); |
+ |
+ unsigned useCount = 0; |
+ auto* pos = range->first_pos(); |
+ while (pos) { |
dcarney
2015/04/17 07:22:45
while (pos != nullptr)
nullptr comparisons tend t
Mircea Trofin
2015/04/17 19:23:33
Done.
|
+ useCount++; |
+ pos = pos->next(); |
+ } |
+ |
+ // GetLiveRangeSize is DCHECK-ed to not be 0 |
+ return static_cast<float>(useCount) / |
+ static_cast<float>(GetLiveRangeSize(range)); |
+} |
+ |
+ |
+float RegisterAllocatorGreedy::CalculateMaxSpillWeight( |
+ ZoneSet<LiveRange*> ranges) { |
+ float max = 0.0; |
+ for (auto* r : ranges) { |
+ float localMax = CalculateSpillWeight(r); |
+ if (localMax > max) max = localMax; |
+ } |
+ return max; |
+} |
+ |
+ |
+void RegisterAllocatorGreedy::Evict(LiveRange* range) { |
+ allocations_[range->assigned_register()]->remove(range); |
+} |
+ |
+ |
+bool RegisterAllocatorGreedy::TryAllocatePhysicalRegister( |
+ unsigned regId, LiveRange* range, ZoneSet<LiveRange*>& conflicting) { |
dcarney
2015/04/17 07:22:45
v8 would generally uses reg_id, not regId here
Mircea Trofin
2015/04/17 19:23:33
Done.
|
+ auto* segment = range->first_interval(); |
+ |
+ RegisterAllocationInfo* allocInfo = allocations_[regId]; |
+ while (segment) { |
+ LiveRange* existing; |
+ if (allocInfo->find(segment, existing)) { |
+ DCHECK(existing->HasRegisterAssigned()); |
+ conflicting.insert(existing); |
+ } |
+ segment = segment->next(); |
+ } |
+ if (conflicting.size()) return false; |
dcarney
2015/04/17 07:22:45
we tend to make compare with 0 explicit, should an
Mircea Trofin
2015/04/17 19:23:33
Done.
|
+ // good to insert |
+ AssignRangeToRegister(regId, range); |
+ return true; |
+} |
+ |
+bool RegisterAllocatorGreedy::TryAllocate(LiveRange* current, |
+ ZoneSet<LiveRange*>& conflicting) { |
+ if (current->HasSpillOperand()) { |
+ Spill(current); |
+ return true; |
+ } |
+ if (current->IsFixed()) |
dcarney
2015/04/17 07:22:45
need braces for conditions that are not on the sam
Mircea Trofin
2015/04/17 19:23:33
Done.
|
+ return TryAllocatePhysicalRegister(current->assigned_register(), current, |
+ conflicting); |
+ |
+ if (current->HasRegisterAssigned()) { |
+ int regId = current->assigned_register(); |
+ return TryAllocatePhysicalRegister(regId, current, conflicting); |
+ } |
+ |
+ for (unsigned candidateReg = 0; candidateReg < allocations_.size(); |
dcarney
2015/04/17 07:22:45
candidate_reg
Mircea Trofin
2015/04/17 19:23:33
Done.
|
+ candidateReg++) { |
+ if (TryAllocatePhysicalRegister(candidateReg, current, conflicting)) { |
+ conflicting.clear(); |
+ return true; |
+ } |
+ } |
+ return false; |
+} |
+ |
+ |
+bool RegisterAllocatorLinear::TryAllocateFreeReg(LiveRange* current) { |
LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
for (int i = 0; i < num_registers_; i++) { |
@@ -2186,7 +2431,7 @@ bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) { |
} |
-void RegisterAllocator::AllocateBlockedReg(LiveRange* current) { |
+void RegisterAllocatorLinear::AllocateBlockedReg(LiveRange* current) { |
auto register_use = current->NextRegisterPosition(current->Start()); |
if (register_use == nullptr) { |
// There is no use in the current live range that requires a register. |
@@ -2276,7 +2521,7 @@ static const InstructionBlock* GetContainingLoop( |
} |
-LifetimePosition RegisterAllocator::FindOptimalSpillingPos( |
+LifetimePosition RegisterAllocatorLinear::FindOptimalSpillingPos( |
LiveRange* range, LifetimePosition pos) { |
auto block = GetInstructionBlock(pos.Start()); |
auto loop_header = |
@@ -2308,7 +2553,7 @@ LifetimePosition RegisterAllocator::FindOptimalSpillingPos( |
} |
-void RegisterAllocator::SplitAndSpillIntersecting(LiveRange* current) { |
+void RegisterAllocatorLinear::SplitAndSpillIntersecting(LiveRange* current) { |
DCHECK(current->HasRegisterAssigned()); |
int reg = current->assigned_register(); |
auto split_pos = current->Start(); |
@@ -2432,22 +2677,24 @@ LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, |
} |
-void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { |
+void RegisterAllocatorLinear::SpillAfter(LiveRange* range, |
+ LifetimePosition pos) { |
auto second_part = SplitRangeAt(range, pos); |
Spill(second_part); |
} |
-void RegisterAllocator::SpillBetween(LiveRange* range, LifetimePosition start, |
- LifetimePosition end) { |
+void RegisterAllocatorLinear::SpillBetween(LiveRange* range, |
+ LifetimePosition start, |
+ LifetimePosition end) { |
SpillBetweenUntil(range, start, start, end); |
} |
-void RegisterAllocator::SpillBetweenUntil(LiveRange* range, |
- LifetimePosition start, |
- LifetimePosition until, |
- LifetimePosition end) { |
+void RegisterAllocatorLinear::SpillBetweenUntil(LiveRange* range, |
+ LifetimePosition start, |
+ LifetimePosition until, |
+ LifetimePosition end) { |
CHECK(start.Value() < end.Value()); |
auto second_part = SplitRangeAt(range, start); |
@@ -2473,6 +2720,35 @@ void RegisterAllocator::SpillBetweenUntil(LiveRange* range, |
} |
} |
+LiveRange* RegisterAllocatorGreedy::SpillBetweenUntil(LiveRange* range, |
+ LifetimePosition start, |
+ LifetimePosition until, |
+ LifetimePosition end) { |
+ CHECK(start.Value() < end.Value()); |
+ auto second_part = SplitRangeAt(range, start); |
+ |
+ if (second_part->Start().Value() < end.Value()) { |
+ // The split result intersects with [start, end[. |
+ // Split it at position between ]start+1, end[, spill the middle part |
+ // and put the rest to unhandled. |
+ auto third_part_end = end.PrevStart().End(); |
+ if (IsBlockBoundary(end.Start())) { |
+ third_part_end = end.Start(); |
+ } |
+ auto third_part = SplitBetween( |
+ second_part, Max(second_part->Start().End(), until), third_part_end); |
+ |
+ DCHECK(third_part != second_part); |
+ |
+ Spill(second_part); |
+ return third_part; |
+ } else { |
+ // The split result does not intersect with [start, end[. |
+ // Nothing to spill. Just put it to unhandled as whole. |
+ return second_part; |
+ } |
+} |
+ |
void RegisterAllocator::Spill(LiveRange* range) { |
DCHECK(!range->IsSpilled()); |
@@ -2485,13 +2761,13 @@ void RegisterAllocator::Spill(LiveRange* range) { |
} |
-int RegisterAllocator::RegisterCount() const { return num_registers_; } |
+int RegisterAllocatorLinear::RegisterCount() const { return num_registers_; } |
#ifdef DEBUG |
-void RegisterAllocator::Verify() const { |
+void RegisterAllocatorLinear::Verify() const { |
for (auto current : live_ranges()) { |
if (current != nullptr) current->Verify(); |
} |