OLD | NEW |
(Empty) | |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "src/compiler/move-optimizer.h" |
| 6 |
| 7 namespace v8 { |
| 8 namespace internal { |
| 9 namespace compiler { |
| 10 |
| 11 MoveOptimizer::MoveOptimizer(Zone* local_zone, InstructionSequence* code) |
| 12 : local_zone_(local_zone), |
| 13 code_(code), |
| 14 temp_vector_0_(local_zone), |
| 15 temp_vector_1_(local_zone) {} |
| 16 |
| 17 |
| 18 void MoveOptimizer::Run() { |
| 19 // First smash all consecutive moves into the left most move slot. |
| 20 for (auto* block : code()->instruction_blocks()) { |
| 21 GapInstruction* prev_gap = nullptr; |
| 22 for (int index = block->code_start(); index < block->code_end(); ++index) { |
| 23 auto instr = code()->instructions()[index]; |
| 24 if (!instr->IsGapMoves()) { |
| 25 if (instr->IsSourcePosition() || instr->IsNop()) continue; |
| 26 FinalizeMoves(&temp_vector_0_, &temp_vector_1_, prev_gap); |
| 27 prev_gap = nullptr; |
| 28 continue; |
| 29 } |
| 30 auto gap = GapInstruction::cast(instr); |
| 31 // Find first non-empty slot. |
| 32 int i = GapInstruction::FIRST_INNER_POSITION; |
| 33 for (; i <= GapInstruction::LAST_INNER_POSITION; i++) { |
| 34 auto move = gap->parallel_moves()[i]; |
| 35 if (move == nullptr) continue; |
| 36 auto move_ops = move->move_operands(); |
| 37 auto op = move_ops->begin(); |
| 38 for (; op != move_ops->end(); ++op) { |
| 39 if (!op->IsRedundant()) break; |
| 40 } |
| 41 if (op == move_ops->end()) { |
| 42 move_ops->Rewind(0); // Clear this redundant move. |
| 43 } else { |
| 44 break; // Found index of first non-redundant move. |
| 45 } |
| 46 } |
| 47 // Nothing to do here. |
| 48 if (i == GapInstruction::LAST_INNER_POSITION + 1) { |
| 49 if (prev_gap != nullptr) { |
| 50 // Slide prev_gap down so we always know where to look for it. |
| 51 std::swap(prev_gap->parallel_moves()[0], gap->parallel_moves()[0]); |
| 52 prev_gap = gap; |
| 53 } |
| 54 continue; |
| 55 } |
| 56 // Move the first non-empty gap to position 0. |
| 57 std::swap(gap->parallel_moves()[0], gap->parallel_moves()[i]); |
| 58 auto left = gap->parallel_moves()[0]; |
| 59 // Compress everything into position 0. |
| 60 for (++i; i <= GapInstruction::LAST_INNER_POSITION; ++i) { |
| 61 auto move = gap->parallel_moves()[i]; |
| 62 if (move == nullptr) continue; |
| 63 CompressMoves(&temp_vector_0_, left, move); |
| 64 } |
| 65 if (prev_gap != nullptr) { |
| 66 // Smash left into prev_gap, killing left. |
| 67 auto pred_moves = prev_gap->parallel_moves()[0]; |
| 68 CompressMoves(&temp_vector_0_, pred_moves, left); |
| 69 std::swap(prev_gap->parallel_moves()[0], gap->parallel_moves()[0]); |
| 70 } |
| 71 prev_gap = gap; |
| 72 } |
| 73 FinalizeMoves(&temp_vector_0_, &temp_vector_1_, prev_gap); |
| 74 } |
| 75 } |
| 76 |
| 77 |
| 78 static MoveOperands* PrepareInsertAfter(ParallelMove* left, MoveOperands* move, |
| 79 Zone* zone) { |
| 80 auto move_ops = left->move_operands(); |
| 81 MoveOperands* replacement = nullptr; |
| 82 MoveOperands* to_eliminate = nullptr; |
| 83 for (auto curr = move_ops->begin(); curr != move_ops->end(); ++curr) { |
| 84 if (curr->IsEliminated()) continue; |
| 85 if (curr->destination()->Equals(move->source())) { |
| 86 DCHECK_EQ(nullptr, replacement); |
| 87 replacement = curr; |
| 88 if (to_eliminate != nullptr) break; |
| 89 } else if (curr->destination()->Equals(move->destination())) { |
| 90 DCHECK_EQ(nullptr, to_eliminate); |
| 91 to_eliminate = curr; |
| 92 if (replacement != nullptr) break; |
| 93 } |
| 94 } |
| 95 DCHECK(!(replacement == to_eliminate && replacement != nullptr)); |
| 96 if (replacement != nullptr) { |
| 97 auto new_source = new (zone) InstructionOperand( |
| 98 replacement->source()->kind(), replacement->source()->index()); |
| 99 move->set_source(new_source); |
| 100 } |
| 101 return to_eliminate; |
| 102 } |
| 103 |
| 104 |
| 105 void MoveOptimizer::CompressMoves(MoveOpVector* eliminated, ParallelMove* left, |
| 106 ParallelMove* right) { |
| 107 DCHECK(eliminated->empty()); |
| 108 auto move_ops = right->move_operands(); |
| 109 // Modify the right moves in place and collect moves that will be killed by |
| 110 // merging the two gaps. |
| 111 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) { |
| 112 if (op->IsRedundant()) continue; |
| 113 MoveOperands* to_eliminate = PrepareInsertAfter(left, op, code_zone()); |
| 114 if (to_eliminate != nullptr) { |
| 115 eliminated->push_back(to_eliminate); |
| 116 } |
| 117 } |
| 118 // Eliminate dead moves. Must happen before insertion of new moves as the |
| 119 // contents of eliminated are pointers into a list. |
| 120 for (auto to_eliminate : *eliminated) { |
| 121 to_eliminate->Eliminate(); |
| 122 } |
| 123 eliminated->clear(); |
| 124 // Add all possibly modified moves from right side. |
| 125 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) { |
| 126 if (op->IsRedundant()) continue; |
| 127 left->move_operands()->Add(*op, code_zone()); |
| 128 } |
| 129 // Nuke right. |
| 130 move_ops->Rewind(0); |
| 131 } |
| 132 |
| 133 |
| 134 void MoveOptimizer::FinalizeMoves(MoveOpVector* loads, MoveOpVector* new_moves, |
| 135 GapInstruction* gap) { |
| 136 DCHECK(loads->empty()); |
| 137 DCHECK(new_moves->empty()); |
| 138 if (gap == nullptr) return; |
| 139 // Split multiple loads of the same constant or stack slot off into the second |
| 140 // slot and keep remaining moves in the first slot. |
| 141 auto move_ops = gap->parallel_moves()[0]->move_operands(); |
| 142 for (auto move = move_ops->begin(); move != move_ops->end(); ++move) { |
| 143 if (move->IsRedundant()) { |
| 144 move->Eliminate(); |
| 145 continue; |
| 146 } |
| 147 if (!(move->source()->IsConstant() || move->source()->IsStackSlot() || |
| 148 move->source()->IsDoubleStackSlot())) |
| 149 continue; |
| 150 // Search for existing move to this slot. |
| 151 MoveOperands* found = nullptr; |
| 152 for (auto load : *loads) { |
| 153 if (load->source()->Equals(move->source())) { |
| 154 found = load; |
| 155 break; |
| 156 } |
| 157 } |
| 158 // Not found so insert. |
| 159 if (found == nullptr) { |
| 160 loads->push_back(move); |
| 161 // Replace source with copy for later use. |
| 162 auto dest = move->destination(); |
| 163 move->set_destination(new (code_zone()) |
| 164 InstructionOperand(dest->kind(), dest->index())); |
| 165 continue; |
| 166 } |
| 167 if ((found->destination()->IsStackSlot() || |
| 168 found->destination()->IsDoubleStackSlot()) && |
| 169 !(move->destination()->IsStackSlot() || |
| 170 move->destination()->IsDoubleStackSlot())) { |
| 171 // Found a better source for this load. Smash it in place to affect other |
| 172 // loads that have already been split. |
| 173 InstructionOperand::Kind found_kind = found->destination()->kind(); |
| 174 int found_index = found->destination()->index(); |
| 175 auto next_dest = |
| 176 new (code_zone()) InstructionOperand(found_kind, found_index); |
| 177 auto dest = move->destination(); |
| 178 found->destination()->ConvertTo(dest->kind(), dest->index()); |
| 179 move->set_destination(next_dest); |
| 180 } |
| 181 // move from load destination. |
| 182 move->set_source(found->destination()); |
| 183 new_moves->push_back(move); |
| 184 } |
| 185 loads->clear(); |
| 186 if (new_moves->empty()) return; |
| 187 // Insert all new moves into slot 1. |
| 188 auto slot_1 = gap->GetOrCreateParallelMove( |
| 189 static_cast<GapInstruction::InnerPosition>(1), code_zone()); |
| 190 DCHECK(slot_1->move_operands()->is_empty()); |
| 191 slot_1->move_operands()->AddBlock(MoveOperands(nullptr, nullptr), |
| 192 static_cast<int>(new_moves->size()), |
| 193 code_zone()); |
| 194 auto it = slot_1->move_operands()->begin(); |
| 195 for (auto new_move : *new_moves) { |
| 196 std::swap(*new_move, *it); |
| 197 ++it; |
| 198 } |
| 199 DCHECK_EQ(it, slot_1->move_operands()->end()); |
| 200 new_moves->clear(); |
| 201 } |
| 202 |
| 203 } // namespace compiler |
| 204 } // namespace internal |
| 205 } // namespace v8 |
OLD | NEW |