Index: src/compiler/register-allocator.cc |
diff --git a/src/compiler/register-allocator.cc b/src/compiler/register-allocator.cc |
index 0dc76000f784771856dd10e29daedddb3b2ad27e..84c0b44a2cfcb5f4480ebac53c9aa9aa8f1cff41 100644 |
--- a/src/compiler/register-allocator.cc |
+++ b/src/compiler/register-allocator.cc |
@@ -692,7 +692,8 @@ TopLevelLiveRange::TopLevelLiveRange(int vreg, MachineType machine_type) |
spill_start_index_(kMaxInt), |
last_child_(this), |
last_pos_(nullptr), |
- splinter_(nullptr) { |
+ splinter_(nullptr), |
+ elidable_def_(nullptr) { |
bits_ |= SpillTypeField::encode(SpillType::kNoSpillType); |
} |
@@ -1625,6 +1626,9 @@ void ConstraintBuilder::MeetConstraintsAfter(int instr_index) { |
if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false); |
} |
// Handle constant/fixed output operands. |
+ bool is_prespilled_parameter = |
+ first->arch_opcode() == ArchOpcode::kArchNop && first->OutputCount() == 2; |
Jarin
2015/11/12 12:08:33
This looks really brittle because we do not even c
Mircea Trofin
2015/11/12 18:01:35
I see. I thought we used ArchNop just for introduc
|
+ |
for (size_t i = 0; i < first->OutputCount(); i++) { |
InstructionOperand* output = first->OutputAt(i); |
if (output->IsConstant()) { |
@@ -1643,6 +1647,8 @@ void ConstraintBuilder::MeetConstraintsAfter(int instr_index) { |
UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg); |
bool is_tagged = code()->IsReference(output_vreg); |
AllocateFixed(first_output, instr_index, is_tagged); |
+ MoveOperands* def = data()->AddGapMove( |
+ instr_index + 1, Instruction::START, *first_output, output_copy); |
// This value is produced on the stack, we never need to spill it. |
if (first_output->IsStackSlot()) { |
@@ -1651,9 +1657,19 @@ void ConstraintBuilder::MeetConstraintsAfter(int instr_index) { |
range->SetSpillOperand(LocationOperand::cast(first_output)); |
range->SetSpillStartIndex(instr_index + 1); |
assigned = true; |
+ } else if (is_prespilled_parameter) { |
+ // we need the def so that the live range has the shape expected by |
+ // the rest of the pipeline. Upon assigning operands, if this move |
+ // ends up assigning to the stack slot the register value, we may |
+ // delete it. |
Jarin
2015/11/12 12:08:33
To be honest, this MarkElidableDef business seems
Mircea Trofin
2015/11/12 18:01:35
Here's what happens with a vanilla fixed register:
|
+ range->MarkElidableDef(def); |
+ UnallocatedOperand* spill_op = |
+ UnallocatedOperand::cast(first->OutputAt(1)); |
+ AllocateFixed(spill_op, instr_index, is_tagged); |
+ range->SetSpillOperand(spill_op); |
+ range->SetSpillStartIndex(instr_index + 1); |
+ break; |
} |
- data()->AddGapMove(instr_index + 1, Instruction::START, *first_output, |
- output_copy); |
} |
// Make sure we add a gap move for spilling (if we have not done |
// so already). |
@@ -3016,6 +3032,11 @@ void OperandAssigner::CommitAssignment() { |
range->ConvertUsesToOperand(assigned, spill_operand); |
} |
+ MoveOperands* move = top_range->elidable_def(); |
+ if (move != nullptr && move->destination().Equals(spill_operand)) { |
+ move->Eliminate(); |
+ } |
+ |
if (!spill_operand.IsInvalid()) { |
// If this top level range has a child spilled in a deferred block, we use |
// the range and control flow connection mechanism instead of spilling at |
@@ -3073,6 +3094,7 @@ void ReferenceMapPopulator::PopulateReferenceMaps() { |
if (!data()->IsReference(range)) continue; |
// Skip empty live ranges. |
if (range->IsEmpty()) continue; |
+ if (range->IsReferenceAccountedByCaller()) continue; |
// Find the extent of the range and its children. |
int start = range->Start().ToInstructionIndex(); |