| Index: src/lithium-allocator.cc
|
| diff --git a/src/lithium-allocator.cc b/src/lithium-allocator.cc
|
| index 91a98112b61bd60a50b56d98f8ca4eebacdf01ff..8a3dac83bd802fbdcb0854d9d9347019d63c9695 100644
|
| --- a/src/lithium-allocator.cc
|
| +++ b/src/lithium-allocator.cc
|
| @@ -139,7 +139,8 @@ LiveRange::LiveRange(int id, Zone* zone)
|
| current_interval_(NULL),
|
| last_processed_use_(NULL),
|
| spill_operand_(new(zone) LOperand()),
|
| - spill_start_index_(kMaxInt) { }
|
| + spill_start_index_(kMaxInt),
|
| + spill_source_(NULL) { }
|
|
|
|
|
| void LiveRange::set_assigned_register(int reg,
|
| @@ -541,6 +542,7 @@ LAllocator::LAllocator(int num_values, HGraph* graph)
|
| active_live_ranges_(8, zone_),
|
| inactive_live_ranges_(8, zone_),
|
| reusable_slots_(8, zone_),
|
| + pending_spilled_(8, zone_),
|
| next_virtual_register_(num_values),
|
| first_artificial_register_(num_values),
|
| mode_(GENERAL_REGISTERS),
|
| @@ -798,7 +800,6 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
|
| if (first != NULL && first->Output() != NULL) {
|
| LUnallocated* first_output = LUnallocated::cast(first->Output());
|
| LiveRange* range = LiveRangeFor(first_output->virtual_register());
|
| - bool assigned = false;
|
| if (first_output->HasFixedPolicy()) {
|
| LUnallocated* output_copy = first_output->CopyUnconstrained(zone());
|
| bool is_tagged = HasTaggedValue(first_output->virtual_register());
|
| @@ -807,22 +808,27 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
|
| // This value is produced on the stack, we never need to spill it.
|
| if (first_output->IsStackSlot()) {
|
| range->SetSpillOperand(first_output);
|
| - range->SetSpillStartIndex(gap_index - 1);
|
| - assigned = true;
|
| + range->set_spill_start_index(gap_index - 1);
|
| }
|
| chunk_->AddGapMove(gap_index, first_output, output_copy);
|
| }
|
|
|
| - if (!assigned) {
|
| - range->SetSpillStartIndex(gap_index);
|
| + if (!range->HasAllocatedSpillOperand()) {
|
| + // Instructions that are emitted at uses produce values multiple times
|
| + // from different spill sources. There is however no dominance relation
|
| + // between different occurrence of the same constant and thus we have to
|
| + // emit spill stores eagerly at all of them.
|
| + if (range->spill_source() != NULL) {
|
| + if (range->HasPendingSpillStore()) {
|
| + EmitSpillStore(range);
|
| + range->DisableSpillStoreSinking();
|
| + }
|
|
|
| - // This move to spill operand is not a real use. Liveness analysis
|
| - // and splitting of live ranges do not account for it.
|
| - // Thus it should be inserted to a lifetime position corresponding to
|
| - // the instruction end.
|
| - LGap* gap = GapAt(gap_index);
|
| - LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE, zone());
|
| - move->AddMove(first_output, range->GetSpillOperand(), zone());
|
| + EmitSpillStore(gap_index, first_output, range->GetSpillOperand());
|
| + } else {
|
| + range->set_spill_start_index(gap_index);
|
| + range->set_spill_source(first_output);
|
| + }
|
| }
|
| }
|
|
|
| @@ -1054,10 +1060,70 @@ void LAllocator::ResolvePhis(HBasicBlock* block) {
|
| }
|
|
|
| LiveRange* live_range = LiveRangeFor(phi->id());
|
| - LLabel* label = chunk_->GetLabel(phi->block()->block_id());
|
| - label->GetOrCreateParallelMove(LGap::START, zone())->
|
| - AddMove(phi_operand, live_range->GetSpillOperand(), zone());
|
| - live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
|
| + live_range->set_spill_start_index(phi->block()->first_instruction_index());
|
| + live_range->set_spill_source(phi_operand);
|
| + }
|
| +}
|
| +
|
| +
|
| +bool LAllocator::TrySinkSpillStoreTo(LiveRange* range, HBasicBlock* block) {
|
| + LifetimePosition pos = LifetimePosition::FromInstructionIndex(
|
| + block->first_instruction_index());
|
| +
|
| + if (!range->Covers(pos)) {
|
| + // For now we are limiting sinking to the case when initial live range
|
| + // extends beyond the loop and covers the position to which we are trying
|
| + // to sink the spill store.
|
| + return false;
|
| + }
|
| +
|
| + // Check that selected position dominates all spilled siblings.
|
| + for (LiveRange* sibling = range; sibling != NULL; sibling = sibling->next()) {
|
| + if (!sibling->IsSpilled()) {
|
| + continue;
|
| + }
|
| +
|
| + if (pos.Value() > sibling->Start().Value()) {
|
| + return false;
|
| + }
|
| +
|
| + HBasicBlock* sibling_block = GetBlock(sibling->Start());
|
| + if ((block != sibling_block) && !block->Dominates(sibling_block)) {
|
| + return false;
|
| + }
|
| + }
|
| +
|
| + range->set_spill_start_index(pos.InstructionIndex());
|
| + return true;
|
| +}
|
| +
|
| +
|
| +void LAllocator::SinkSpillStores() {
|
| + for (int i = 0; i < pending_spilled_.length(); i++) {
|
| + LiveRange* range = pending_spilled_[i];
|
| +
|
| + HBasicBlock* range_block = GetBlock(range->Start());
|
| +
|
| + HBasicBlock* loop_header = range_block->IsLoopHeader() ?
|
| + range_block : range_block->parent_loop_header();
|
| +
|
| + bool replace_spill_source = false;
|
| + while (loop_header != NULL) {
|
| + HBasicBlock* block = loop_header->loop_information()->exit_block();
|
| + if (block == NULL || !TrySinkSpillStoreTo(range, block)) {
|
| + break;
|
| + }
|
| +
|
| + replace_spill_source = true;
|
| + loop_header = loop_header->parent_loop_header();
|
| + }
|
| +
|
| + if (replace_spill_source) {
|
| + // Spill store was sunk down. Replace spill source.
|
| + range->set_spill_source(range->CreateAssignedOperand(zone()));
|
| + }
|
| +
|
| + EmitSpillStore(range);
|
| }
|
| }
|
|
|
| @@ -1073,6 +1139,11 @@ bool LAllocator::Allocate(LChunk* chunk) {
|
| if (!AllocationOk()) return false;
|
| AllocateDoubleRegisters();
|
| if (!AllocationOk()) return false;
|
| +
|
| + // Sink pending spill stores down to loop exits. Should be done before
|
| + // pointer maps are populated.
|
| + SinkSpillStores();
|
| +
|
| PopulatePointerMaps();
|
| if (has_osr_entry_) ProcessOsrEntry();
|
| ConnectRanges();
|
| @@ -2092,11 +2163,47 @@ void LAllocator::Spill(LiveRange* range) {
|
| LOperand* op = TryReuseSpillSlot(range);
|
| if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS);
|
| first->SetSpillOperand(op);
|
| +
|
| + // This is the first time we are spilling a sibling of this range.
|
| + // If the spill store was not eagerly emitted at parent's start then we need
|
| + // to either emit it eagerly or postpone it for spill store sinking pass.
|
| + if (first->HasPendingSpillStore()) {
|
| + HBasicBlock* loop_header = GetBlock(first->Start())->LoopHeader();
|
| + if ((first != range) &&
|
| + (loop_header != NULL) &&
|
| + (GetBlock(range->Start())->LoopHeader() != loop_header)) {
|
| + // If the range was defined inside the loop but is spilled outside then
|
| + // we should try to sink corresponding spill store out of the loop to
|
| + // reduce amount of memory writes.
|
| + pending_spilled_.Add(first, zone_);
|
| + } else {
|
| + EmitSpillStore(first);
|
| + }
|
| + }
|
| }
|
| range->MakeSpilled(zone_);
|
| }
|
|
|
|
|
| +void LAllocator::EmitSpillStore(LiveRange* range) {
|
| + ASSERT(range->HasPendingSpillStore());
|
| + EmitSpillStore(range->spill_start_index(),
|
| + range->spill_source(),
|
| + range->GetSpillOperand());
|
| +}
|
| +
|
| +
|
| +void LAllocator::EmitSpillStore(int pos, LOperand* source, LOperand* spill) {
|
| + // This move to spill operand is not a real use. Liveness analysis
|
| + // and splitting of live ranges do not account for it.
|
| + // Thus it should be inserted to a lifetime position corresponding to
|
| + // the instruction end.
|
| + LGap* gap = GapAt(pos);
|
| + LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE, zone());
|
| + move->AddMove(source, spill, zone());
|
| +}
|
| +
|
| +
|
| int LAllocator::RegisterCount() const {
|
| return num_registers_;
|
| }
|
|
|