Chromium Code Reviews| Index: src/lithium-allocator.cc |
| diff --git a/src/lithium-allocator.cc b/src/lithium-allocator.cc |
| index 91a98112b61bd60a50b56d98f8ca4eebacdf01ff..42828dbd32bc9a2a2cc6cd71c763b527cc62e57c 100644 |
| --- a/src/lithium-allocator.cc |
| +++ b/src/lithium-allocator.cc |
| @@ -139,7 +139,8 @@ LiveRange::LiveRange(int id, Zone* zone) |
| current_interval_(NULL), |
| last_processed_use_(NULL), |
| spill_operand_(new(zone) LOperand()), |
| - spill_start_index_(kMaxInt) { } |
| + spill_start_index_(kMaxInt), |
| + spill_source_(NULL) { } |
| void LiveRange::set_assigned_register(int reg, |
| @@ -541,6 +542,7 @@ LAllocator::LAllocator(int num_values, HGraph* graph) |
| active_live_ranges_(8, zone_), |
| inactive_live_ranges_(8, zone_), |
| reusable_slots_(8, zone_), |
| + pending_spilled_(8, zone_), |
| next_virtual_register_(num_values), |
| first_artificial_register_(num_values), |
| mode_(GENERAL_REGISTERS), |
| @@ -798,7 +800,6 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first, |
| if (first != NULL && first->Output() != NULL) { |
| LUnallocated* first_output = LUnallocated::cast(first->Output()); |
| LiveRange* range = LiveRangeFor(first_output->virtual_register()); |
| - bool assigned = false; |
| if (first_output->HasFixedPolicy()) { |
| LUnallocated* output_copy = first_output->CopyUnconstrained(zone()); |
| bool is_tagged = HasTaggedValue(first_output->virtual_register()); |
| @@ -807,22 +808,27 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first, |
| // This value is produced on the stack, we never need to spill it. |
| if (first_output->IsStackSlot()) { |
| range->SetSpillOperand(first_output); |
| - range->SetSpillStartIndex(gap_index - 1); |
| - assigned = true; |
| + range->set_spill_start_index(gap_index - 1); |
| } |
| chunk_->AddGapMove(gap_index, first_output, output_copy); |
| } |
| - if (!assigned) { |
| - range->SetSpillStartIndex(gap_index); |
| + if (!range->HasAllocatedSpillOperand()) { |
| + // Instructions that are emitted at uses produce values multiple times |
| + // from different spill sources. There is however no dominance relation |
| + // between different occurrence of the same constant and thus we have to |
| + // emit spill stores eagerly at all of them. |
| + if (range->spill_source() != NULL) { |
| + if (range->HasPendingSpillStore()) { |
| + EmitSpillStore(range); |
| + range->DisableSpillStoreSinking(); |
| + } |
| - // This move to spill operand is not a real use. Liveness analysis |
| - // and splitting of live ranges do not account for it. |
| - // Thus it should be inserted to a lifetime position corresponding to |
| - // the instruction end. |
| - LGap* gap = GapAt(gap_index); |
| - LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE, zone()); |
| - move->AddMove(first_output, range->GetSpillOperand(), zone()); |
| + EmitSpillStore(gap_index, first_output, range->GetSpillOperand()); |
| + } else { |
| + range->set_spill_start_index(gap_index); |
| + range->set_spill_source(first_output); |
| + } |
| } |
| } |
| @@ -1054,10 +1060,61 @@ void LAllocator::ResolvePhis(HBasicBlock* block) { |
| } |
| LiveRange* live_range = LiveRangeFor(phi->id()); |
| - LLabel* label = chunk_->GetLabel(phi->block()->block_id()); |
| - label->GetOrCreateParallelMove(LGap::START, zone())-> |
| - AddMove(phi_operand, live_range->GetSpillOperand(), zone()); |
| - live_range->SetSpillStartIndex(phi->block()->first_instruction_index()); |
| + live_range->set_spill_start_index(phi->block()->first_instruction_index()); |
| + live_range->set_spill_source(phi_operand); |
| + } |
| +} |
| + |
| + |
| +void LAllocator::TrySinkSpillStoreTo(LiveRange* range, HBasicBlock* block) { |
| + LifetimePosition pos = LifetimePosition::FromInstructionIndex( |
| + block->first_instruction_index()); |
| + |
| + if (!range->Covers(pos)) { |
| + // For now we are limiting sinking to the case when initial live range |
| + // extends beyond the loop and covers the position to which we are trying |
| + // to sink the spill store. |
| + return; |
| + } |
| + |
| + // Check that selected position dominates all spilled siblings. |
| + for (LiveRange* sibling = range; sibling != NULL; sibling = sibling->next()) { |
| + if (!sibling->IsSpilled()) { |
| + continue; |
| + } |
| + |
| + if (pos.Value() > sibling->Start().Value()) { |
| + return; |
| + } |
| + |
| + HBasicBlock* sibling_block = GetBlock(sibling->Start()); |
| + if ((block != sibling_block) && !block->Dominates(sibling_block)) { |
| + return; |
| + } |
| + } |
| + |
| + // Sink the spill store. |
| + range->set_spill_source(range->CreateAssignedOperand(zone())); |
| + range->set_spill_start_index(pos.InstructionIndex()); |
| +} |
| + |
| + |
| +void LAllocator::SinkSpillStores() { |
| + for (int i = 0; i < pending_spilled_.length(); i++) { |
| + LiveRange* range = pending_spilled_[i]; |
| + |
| + HBasicBlock* range_block = GetBlock(range->Start()); |
| + |
| + HBasicBlock* loop_header = range_block->IsLoopHeader() ? |
| + range_block : range_block->parent_loop_header(); |
| + ASSERT(loop_header != NULL); |
| + |
| + HBasicBlock* block = loop_header->loop_information()->exit_block(); |
|
danno
2013/01/10 15:33:27
How about a loop here that tries to spill to the o
Vyacheslav Egorov (Google)
2013/01/25 13:49:17
Done.
|
| + if (block != NULL) { |
| + TrySinkSpillStoreTo(range, block); |
| + } |
| + |
| + EmitSpillStore(range); |
| } |
| } |
| @@ -1073,6 +1130,11 @@ bool LAllocator::Allocate(LChunk* chunk) { |
| if (!AllocationOk()) return false; |
| AllocateDoubleRegisters(); |
| if (!AllocationOk()) return false; |
| + |
| + // Sink pending spill stores down to loop exits. Should be done before |
| + // pointer maps are populated. |
| + SinkSpillStores(); |
| + |
| PopulatePointerMaps(); |
| if (has_osr_entry_) ProcessOsrEntry(); |
| ConnectRanges(); |
| @@ -2092,11 +2154,49 @@ void LAllocator::Spill(LiveRange* range) { |
| LOperand* op = TryReuseSpillSlot(range); |
| if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS); |
| first->SetSpillOperand(op); |
| + |
| + // This is the first time we are spilling a sibling of this range. |
| + // If the spill store was not eagerly at parent's start then we need |
|
danno
2013/01/10 15:33:27
nit: not eagerly emitted
Vyacheslav Egorov (Google)
2013/01/25 13:49:17
Done.
|
| + // to either emit it eagerly or postpone it for spill store sinking pass. |
| + if (first->HasPendingSpillStore()) { |
| + HBasicBlock* first_block = GetBlock(first->Start()); |
| + HBasicBlock* loop_header = first_block->IsLoopHeader() ? |
| + first_block : first_block->parent_loop_header(); |
| + if ((first != range) && |
| + (loop_header != NULL) && |
| + (GetBlock(range->Start())->parent_loop_header() != loop_header)) { |
|
danno
2013/01/10 15:33:27
Why don't you have to handle the case there GetBlo
Vyacheslav Egorov (Google)
2013/01/25 13:49:17
Done.
|
| + // If the range was defined inside the loop but is spilled outside then |
| + // we should try to sink corresponding spill store out of the loop to |
| + // reduce amount of memory writes. |
|
danno
2013/01/10 15:33:27
You check that it's a different loop, but don't yo
Vyacheslav Egorov (Google)
2013/01/25 13:49:17
We are always moving spilling *up*, so it can't go
|
| + pending_spilled_.Add(first, zone_); |
| + } else { |
| + EmitSpillStore(first); |
| + } |
| + } |
| } |
| range->MakeSpilled(zone_); |
| } |
| +void LAllocator::EmitSpillStore(LiveRange* range) { |
| + ASSERT(range->HasPendingSpillStore()); |
| + EmitSpillStore(range->spill_start_index(), |
| + range->spill_source(), |
| + range->GetSpillOperand()); |
| +} |
| + |
| + |
| +void LAllocator::EmitSpillStore(int pos, LOperand* source, LOperand* spill) { |
| + // This move to spill operand is not a real use. Liveness analysis |
| + // and splitting of live ranges do not account for it. |
| + // Thus it should be inserted to a lifetime position corresponding to |
| + // the instruction end. |
| + LGap* gap = GapAt(pos); |
| + LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE, zone()); |
| + move->AddMove(source, spill, zone()); |
| +} |
| + |
| + |
| int LAllocator::RegisterCount() const { |
| return num_registers_; |
| } |