Index: src/lithium-allocator.cc |
diff --git a/src/lithium-allocator.cc b/src/lithium-allocator.cc |
index a36f7de5c8e310683144d26f8e7155577280810a..ee90bc0ade784f78398c022357dbe18653ee01c1 100644 |
--- a/src/lithium-allocator.cc |
+++ b/src/lithium-allocator.cc |
@@ -124,7 +124,8 @@ LiveRange::LiveRange(int id, Zone* zone) |
last_processed_use_(NULL), |
current_hint_operand_(NULL), |
spill_operand_(new(zone) LOperand()), |
- spill_start_index_(kMaxInt) { } |
+ spill_start_index_(kMaxInt), |
+ spill_range_id_(SpillRange::INVALID_ID) { } |
void LiveRange::set_assigned_register(int reg, Zone* zone) { |
@@ -139,13 +140,16 @@ void LiveRange::MakeSpilled(Zone* zone) { |
ASSERT(TopLevel()->HasAllocatedSpillOperand()); |
spilled_ = true; |
assigned_register_ = kInvalidAssignment; |
- ConvertOperands(zone); |
+ if (spill_range_id_ == SpillRange::INVALID_ID) { |
+ ConvertOperands(zone); |
+ } |
} |
bool LiveRange::HasAllocatedSpillOperand() const { |
ASSERT(spill_operand_ != NULL); |
- return !spill_operand_->IsIgnored(); |
+ return !spill_operand_->IsIgnored() || |
+ spill_range_id_ != SpillRange::INVALID_ID; |
} |
@@ -157,6 +161,19 @@ void LiveRange::SetSpillOperand(LOperand* operand) { |
} |
+void LiveRange::CommitSpillOperand(LOperand* operand) { |
+ ASSERT(spill_range_id_ != SpillRange::INVALID_ID); |
+ ASSERT(!IsChild()); |
+ spill_range_id_ = SpillRange::INVALID_ID; |
+ SetSpillOperand(operand); |
+ for (LiveRange* range = this; range != NULL; range = range->next()) { |
+ if (range->IsSpilled()) { |
+ range->ConvertUsesToOperand(operand); |
+ } |
+ } |
+} |
+ |
+ |
UsePosition* LiveRange::NextUsePosition(LifetimePosition start) { |
UsePosition* use_pos = last_processed_use_; |
if (use_pos == NULL) use_pos = first_pos(); |
@@ -461,8 +478,7 @@ void LiveRange::AddUsePosition(LifetimePosition pos, |
} |
-void LiveRange::ConvertOperands(Zone* zone) { |
- LOperand* op = CreateAssignedOperand(zone); |
+void LiveRange::ConvertUsesToOperand(LOperand* op) { |
UsePosition* use_pos = first_pos(); |
while (use_pos != NULL) { |
ASSERT(Start().Value() <= use_pos->pos().Value() && |
@@ -478,6 +494,12 @@ void LiveRange::ConvertOperands(Zone* zone) { |
} |
+void LiveRange::ConvertOperands(Zone* zone) { |
+ LOperand* op = CreateAssignedOperand(zone); |
+ ConvertUsesToOperand(op); |
+} |
+ |
+ |
bool LiveRange::CanCover(LifetimePosition position) const { |
if (IsEmpty()) return false; |
return Start().Value() <= position.Value() && |
@@ -536,6 +558,7 @@ LAllocator::LAllocator(int num_values, HGraph* graph) |
active_live_ranges_(8, zone()), |
inactive_live_ranges_(8, zone()), |
reusable_slots_(8, zone()), |
+ spill_ranges_(8, zone()), |
next_virtual_register_(num_values), |
first_artificial_register_(num_values), |
mode_(UNALLOCATED_REGISTERS), |
@@ -1031,7 +1054,7 @@ void LAllocator::ResolvePhis(HBasicBlock* block) { |
for (int i = 0; i < phis->length(); ++i) { |
HPhi* phi = phis->at(i); |
LUnallocated* phi_operand = |
- new(chunk()->zone()) LUnallocated(LUnallocated::NONE); |
+ new(chunk()->zone()) LUnallocated(LUnallocated::ANY); |
phi_operand->set_virtual_register(phi->id()); |
for (int j = 0; j < phi->OperandCount(); ++j) { |
HValue* op = phi->OperandAt(j); |
@@ -1098,6 +1121,7 @@ bool LAllocator::Allocate(LChunk* chunk) { |
if (!AllocationOk()) return false; |
AllocateDoubleRegisters(); |
if (!AllocationOk()) return false; |
+ ReuseSpillSlots(); |
PopulatePointerMaps(); |
ConnectRanges(); |
ResolveControlFlow(); |
@@ -1105,6 +1129,134 @@ bool LAllocator::Allocate(LChunk* chunk) { |
} |
+static bool AreUseIntervalsIntersecting( |
+ UseInterval* interval1, UseInterval* interval2) { |
+ while (interval1 != NULL && interval2 != NULL) { |
titzer
2014/06/17 09:01:34
I suppose we could speed this interval check up wi
Jarin
2014/06/24 21:49:58
Yes, the asymptotic complexity is indeed bad, but
|
+ if (interval1->start().Value() < interval2->start().Value()) { |
+ if (interval1->end().Value() > interval2->start().Value()) { |
+ return true; |
+ } |
+ interval1 = interval1->next(); |
+ } else { |
+ if (interval2->end().Value() > interval1->start().Value()) { |
+ return true; |
+ } |
+ interval2 = interval2->next(); |
+ } |
+ } |
+ return false; |
+} |
+ |
+ |
+SpillRange::SpillRange(LiveRange* range, int id, Zone* zone) |
+ : id_(id), |
+ live_ranges_(1, zone) { |
+ UseInterval* src = range->first_interval(); |
+ UseInterval* result = NULL; |
+ UseInterval* node = NULL; |
+ // Copy the nodes |
+ while (src != NULL) { |
+ UseInterval* new_node = new(zone) UseInterval(src->start(), src->end()); |
+ if (result == NULL) { |
+ result = new_node; |
+ } else { |
+ node->set_next(new_node); |
+ } |
+ node = new_node; |
+ src = src->next(); |
+ } |
+ use_interval_ = result; |
+ live_ranges_.Add(range, zone); |
+ ASSERT(range->GetSpillRangeId() == SpillRange::INVALID_ID); |
+ range->SetSpillRangeId(id); |
+} |
+ |
+ |
+void SpillRange::Swap(UseInterval*& a, UseInterval*& b) { |
titzer
2014/06/17 09:01:34
You can replace uses of this with std::swap
Jarin
2014/06/24 21:49:58
Done.
|
+ UseInterval* temp = a; |
+ a = b; |
+ b = temp; |
+} |
+ |
+ |
+bool SpillRange::TryMerge(SpillRange* other, Zone* zone) { |
+ if (Kind() == other->Kind() && |
+ !AreUseIntervalsIntersecting(use_interval_, other->use_interval_)) { |
+ MergeDisjointIntervals(other->use_interval_, zone); |
+ other->use_interval_ = NULL; |
+ |
+ for (int i = 0; i < other->live_ranges_.length(); i++) { |
+ ASSERT(other->live_ranges_.at(i)->GetSpillRangeId() == other->id()); |
+ other->live_ranges_.at(i)->SetSpillRangeId(id_); |
+ } |
+ |
+ live_ranges_.AddAll(other->live_ranges_, zone); |
+ other->live_ranges_.Clear(); |
+ |
+ return true; |
+ } |
+ return false; |
+} |
+ |
+ |
+void SpillRange::SetOperand(LOperand* op) { |
+ for (int i = 0; i < live_ranges_.length(); i++) { |
+ ASSERT(live_ranges_.at(i)->GetSpillRangeId() == id()); |
+ live_ranges_.at(i)->CommitSpillOperand(op); |
+ } |
+} |
+ |
+ |
+void SpillRange::MergeDisjointIntervals(UseInterval* other, Zone* zone) { |
+ UseInterval* tail = NULL; |
+ UseInterval* current = use_interval_; |
+ while (other != NULL) { |
+ // Make sure the 'current' list starts first |
+ if (current == NULL || current->start().Value() > other->start().Value()) { |
+ Swap(current, other); |
+ } |
+ |
+ // Check disjointness |
+ ASSERT(other == NULL || current->end().Value() <= other->start().Value()); |
+ |
+ // Append the 'current' node to the result accumulator and move forward |
+ if (tail == NULL) { |
+ use_interval_ = current; |
+ } else { |
+ tail->set_next(current); |
+ } |
+ tail = current; |
+ current = current->next(); |
+ } |
+ // Other list is empty => we are done |
+} |
+ |
+ |
+void LAllocator::ReuseSpillSlots() { |
+ // Merge disjoint spill ranges |
+ for (int i = 0; i < spill_ranges_.length(); i++) { |
+ SpillRange* range = spill_ranges_.at(i); |
+ if (!range->IsEmpty()) { |
+ for (int j = i + 1; j < spill_ranges_.length(); j++) { |
+ SpillRange* other = spill_ranges_.at(j); |
+ if (!other->IsEmpty()) { |
+ range->TryMerge(spill_ranges_.at(j), zone()); |
+ } |
+ } |
+ } |
+ } |
+ |
+ // Allocate slots for the merged spill ranges. |
+ for (int i = 0; i < spill_ranges_.length(); i++) { |
+ SpillRange* range = spill_ranges_.at(i); |
+ if (!range->IsEmpty()) { |
+ LOperand* op = chunk_->GetNextSpillSlot(range->Kind()); |
+ range->SetOperand(op); |
+ } |
+ } |
+} |
+ |
+ |
void LAllocator::MeetRegisterConstraints() { |
LAllocatorPhase phase("L_Register constraints", this); |
const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); |
@@ -1495,6 +1647,104 @@ void LAllocator::AllocateDoubleRegisters() { |
} |
+bool LAllocator::TryReuseSpillForPhi(LiveRange* range) { |
+ ASSERT(!range->HasAllocatedSpillOperand()); |
+ if (range->IsChild()) { |
+ return false; |
+ } |
+ HValue* instr = graph_->LookupValue(range->id()); |
+ if (instr == NULL || !instr->IsPhi()) { |
+ return false; |
+ } |
+ |
+ // Count the number of spilled operands. |
+ HPhi* phi = HPhi::cast(instr); |
+ int spilled_count = 0; |
+ LiveRange* first_op = NULL; |
+ for (int i = 0; i < phi->OperandCount(); i++) { |
+ HValue* op = phi->OperandAt(i); |
+ LiveRange* op_range = LiveRangeFor(op->id()); |
+ if (op_range->GetSpillRangeId() != SpillRange::INVALID_ID) { |
+ HBasicBlock* pred = instr->block()->predecessors()->at(i); |
+ LifetimePosition pred_end = LifetimePosition::FromInstructionIndex( |
+ pred->last_instruction_index()); |
+ while (op_range != NULL && !op_range->CanCover(pred_end)) { |
+ op_range = op_range->next(); |
+ } |
+ if (op_range != NULL && op_range->IsSpilled()) { |
+ spilled_count++; |
+ if (first_op == NULL) { |
+ first_op = op_range->TopLevel(); |
+ } |
+ } |
+ } |
+ } |
+ |
+ // Only continue if more than half of the operands are spilled. |
+ if (spilled_count * 2 <= phi->OperandCount()) { |
+ return false; |
+ } |
+ |
+ // Try to merge the spilled operands and count the number of merged spilled |
+ // operands. |
+ ASSERT(first_op != NULL); |
+ SpillRange* first_op_spill = spill_ranges_.at(first_op->GetSpillRangeId()); |
+ int num_merged = 1; |
+ for (int i = 1; i < phi->OperandCount(); i++) { |
+ HValue* op = phi->OperandAt(i); |
+ LiveRange* op_range = LiveRangeFor(op->id()); |
+ int spill_range_id = op_range->GetSpillRangeId(); |
+ if (spill_range_id != SpillRange::INVALID_ID) { |
+ SpillRange* op_spill = spill_ranges_.at(spill_range_id); |
+ if (op_spill->id() == first_op_spill->id() || |
+ first_op_spill->TryMerge(op_spill, zone())) { |
+ num_merged++; |
+ } |
+ } |
+ } |
+ |
+ // Only continue if enough operands could be merged to the |
+ // same spill slot. |
+ if (num_merged * 2 <= phi->OperandCount() || |
+ AreUseIntervalsIntersecting(first_op_spill->interval(), |
+ range->first_interval())) { |
+ return false; |
+ } |
+ |
+ // If the range does not need register soon, spill it to the merged |
+ // spill range. |
+ LifetimePosition next_pos = range->Start(); |
+ if (IsGapAt(next_pos.InstructionIndex())) { |
+ next_pos = next_pos.NextInstruction(); |
+ } |
+ UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos); |
+ if (pos == NULL) { |
+ SpillRange* spill_range = AssignSpillRangeToLiveRange(range->TopLevel()); |
+ CHECK(first_op_spill->TryMerge(spill_range, zone())); |
+ Spill(range); |
+ return true; |
+ } else if (pos->pos().Value() > |
+ range->Start().NextInstruction().Value()) { |
+ SpillRange* spill_range = AssignSpillRangeToLiveRange(range->TopLevel()); |
+ CHECK(first_op_spill->TryMerge(spill_range, zone())); |
+ SpillBetween(range, range->Start(), pos->pos()); |
+ if (!AllocationOk()) return false; |
+ ASSERT(UnhandledIsSorted()); |
+ return true; |
+ } |
+ |
+ return false; |
+} |
+ |
+ |
+SpillRange* LAllocator::AssignSpillRangeToLiveRange(LiveRange* range) { |
+ int spill_id = spill_ranges_.length(); |
+ SpillRange* spill_range = new(zone()) SpillRange(range, spill_id, zone()); |
+ spill_ranges_.Add(spill_range, zone()); |
+ return spill_range; |
+} |
+ |
+ |
void LAllocator::AllocateRegisters() { |
ASSERT(unhandled_live_ranges_.is_empty()); |
@@ -1564,6 +1814,11 @@ void LAllocator::AllocateRegisters() { |
} |
} |
+ if (TryReuseSpillForPhi(current)) { |
+ continue; |
+ } |
+ if (!AllocationOk()) return; |
+ |
for (int i = 0; i < active_live_ranges_.length(); ++i) { |
LiveRange* cur_active = active_live_ranges_.at(i); |
if (cur_active->End().Value() <= position.Value()) { |
@@ -1714,36 +1969,10 @@ bool LAllocator::UnhandledIsSorted() { |
} |
-void LAllocator::FreeSpillSlot(LiveRange* range) { |
- // Check that we are the last range. |
- if (range->next() != NULL) return; |
- |
- if (!range->TopLevel()->HasAllocatedSpillOperand()) return; |
- |
- int index = range->TopLevel()->GetSpillOperand()->index(); |
- if (index >= 0) { |
- reusable_slots_.Add(range, zone()); |
- } |
-} |
- |
- |
-LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) { |
- if (reusable_slots_.is_empty()) return NULL; |
- if (reusable_slots_.first()->End().Value() > |
- range->TopLevel()->Start().Value()) { |
- return NULL; |
- } |
- LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand(); |
- reusable_slots_.Remove(0); |
- return result; |
-} |
- |
- |
void LAllocator::ActiveToHandled(LiveRange* range) { |
ASSERT(active_live_ranges_.Contains(range)); |
active_live_ranges_.RemoveElement(range); |
TraceAlloc("Moving live range %d from active to handled\n", range->id()); |
- FreeSpillSlot(range); |
} |
@@ -1759,7 +1988,6 @@ void LAllocator::InactiveToHandled(LiveRange* range) { |
ASSERT(inactive_live_ranges_.Contains(range)); |
inactive_live_ranges_.RemoveElement(range); |
TraceAlloc("Moving live range %d from inactive to handled\n", range->id()); |
- FreeSpillSlot(range); |
} |
@@ -1843,7 +2071,6 @@ bool LAllocator::TryAllocateFreeReg(LiveRange* current) { |
AddToUnhandledSorted(tail); |
} |
- |
// Register reg is available at the range start and is free until |
// the range end. |
ASSERT(pos.Value() >= current->End().Value()); |
@@ -2151,9 +2378,7 @@ void LAllocator::Spill(LiveRange* range) { |
LiveRange* first = range->TopLevel(); |
if (!first->HasAllocatedSpillOperand()) { |
- LOperand* op = TryReuseSpillSlot(range); |
- if (op == NULL) op = chunk_->GetNextSpillSlot(range->Kind()); |
- first->SetSpillOperand(op); |
+ AssignSpillRangeToLiveRange(first); |
} |
range->MakeSpilled(chunk()->zone()); |
} |