Index: src/compiler/move-optimizer.cc |
diff --git a/src/compiler/move-optimizer.cc b/src/compiler/move-optimizer.cc |
index f7abcf462d6f6f17f6c78ac568d378de7e6de3f2..2c4c720d263b50bd909acd5800b2eccd8df6bc48 100644 |
--- a/src/compiler/move-optimizer.cc |
+++ b/src/compiler/move-optimizer.cc |
@@ -10,6 +10,11 @@ namespace compiler { |
namespace { |
+typedef std::pair<InstructionOperand, InstructionOperand> MoveKey; |
+typedef ZoneMap<MoveKey, unsigned> MoveMap; |
+typedef ZoneSet<InstructionOperand> OperandSet; |
+ |
+ |
bool GapsCanMoveOver(Instruction* instr) { |
DCHECK(!instr->IsGapMoves()); |
return instr->IsSourcePosition() || instr->IsNop(); |
@@ -48,6 +53,10 @@ void MoveOptimizer::Run() { |
for (auto* block : code()->instruction_blocks()) { |
CompressBlock(block); |
} |
+ for (auto block : code()->instruction_blocks()) { |
+ if (block->PredecessorCount() <= 1) continue; |
+ OptimizeMerge(block); |
+ } |
for (auto gap : to_finalize_) { |
FinalizeMoves(gap); |
} |
@@ -130,6 +139,107 @@ void MoveOptimizer::CompressBlock(InstructionBlock* block) { |
} |
+GapInstruction* MoveOptimizer::LastGap(InstructionBlock* block) { |
+ int gap_index = block->last_instruction_index() - 1; |
+ auto instr = code()->instructions()[gap_index]; |
+ return GapInstruction::cast(instr); |
+} |
+ |
+ |
+void MoveOptimizer::OptimizeMerge(InstructionBlock* block) { |
+ DCHECK(block->PredecessorCount() > 1); |
+ // Ensure that the last instruction in all incoming blocks don't contain |
+ // things that would prevent moving gap moves across them. |
+ for (auto pred_index : block->predecessors()) { |
+ auto pred = code()->InstructionBlockAt(pred_index); |
+ auto last_instr = code()->instructions()[pred->last_instruction_index()]; |
+ DCHECK(!last_instr->IsGapMoves()); |
+ if (last_instr->IsSourcePosition()) continue; |
+ if (last_instr->IsCall()) return; |
+ if (last_instr->TempCount() != 0) return; |
+ if (last_instr->OutputCount() != 0) return; |
+ for (size_t i = 0; i < last_instr->InputCount(); ++i) { |
+ auto op = last_instr->InputAt(i); |
+ if (!op->IsConstant() && !op->IsImmediate()) return; |
+ } |
+ } |
+ // TODO(dcarney): pass a ZonePool down for this? |
+ MoveMap move_map(local_zone()); |
+ size_t correct_counts = 0; |
+ // Accumulate set of shared moves. |
+ for (auto pred_index : block->predecessors()) { |
+ auto pred = code()->InstructionBlockAt(pred_index); |
+ auto gap = LastGap(pred); |
+ if (gap->parallel_moves()[0] == nullptr || |
+ gap->parallel_moves()[0]->move_operands()->is_empty()) { |
+ return; |
+ } |
+ auto move_ops = gap->parallel_moves()[0]->move_operands(); |
+ for (auto op = move_ops->begin(); op != move_ops->end(); ++op) { |
+ if (op->IsRedundant()) continue; |
+ auto src = *op->source(); |
+ auto dst = *op->destination(); |
+ MoveKey key = {src, dst}; |
+ auto res = move_map.insert(std::make_pair(key, 1)); |
+ if (!res.second) { |
+ res.first->second++; |
+ if (res.first->second == block->PredecessorCount()) { |
+ correct_counts++; |
+ } |
+ } |
+ } |
+ } |
+ if (move_map.empty() || correct_counts != move_map.size()) return; |
+ // Find insertion point. |
+ GapInstruction* gap = nullptr; |
+ for (int i = block->first_instruction_index(); |
+ i <= block->last_instruction_index(); ++i) { |
+ auto instr = code()->instructions()[i]; |
+ if (instr->IsGapMoves()) { |
+ gap = GapInstruction::cast(instr); |
+ continue; |
+ } |
+ if (!GapsCanMoveOver(instr)) break; |
+ } |
+ DCHECK(gap != nullptr); |
+ bool gap_initialized = true; |
+ if (gap->parallel_moves()[0] == nullptr || |
+ gap->parallel_moves()[0]->move_operands()->is_empty()) { |
+ to_finalize_.push_back(gap); |
+ } else { |
+ // Will compress after insertion. |
+ gap_initialized = false; |
+ std::swap(gap->parallel_moves()[0], gap->parallel_moves()[1]); |
+ } |
+ auto move = gap->GetOrCreateParallelMove( |
+ static_cast<GapInstruction::InnerPosition>(0), code_zone()); |
+ // Delete relevant entries in predecessors and move everything to block. |
+ bool first_iteration = true; |
+ for (auto pred_index : block->predecessors()) { |
+ auto pred = code()->InstructionBlockAt(pred_index); |
+ auto gap = LastGap(pred); |
brucedawson
2015/04/06 18:05:31
This line of code is worrisome because it shadows
|
+ auto move_ops = gap->parallel_moves()[0]->move_operands(); |
+ for (auto op = move_ops->begin(); op != move_ops->end(); ++op) { |
+ if (op->IsRedundant()) continue; |
+ MoveKey key = {*op->source(), *op->destination()}; |
+ auto it = move_map.find(key); |
+ USE(it); |
+ DCHECK(it != move_map.end()); |
+ if (first_iteration) { |
+ move->AddMove(op->source(), op->destination(), code_zone()); |
+ } |
+ op->Eliminate(); |
+ } |
+ first_iteration = false; |
+ } |
+ // Compress. |
+ if (!gap_initialized) { |
+ CompressMoves(&temp_vector_0(), gap->parallel_moves()[0], |
+ gap->parallel_moves()[1]); |
+ } |
+} |
+ |
+ |
// Split multiple loads of the same constant or stack slot off into the second |
// slot and keep remaining moves in the first slot. |
void MoveOptimizer::FinalizeMoves(GapInstruction* gap) { |