Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/move-optimizer.h" | 5 #include "src/compiler/move-optimizer.h" |
| 6 | 6 |
| 7 namespace v8 { | 7 namespace v8 { |
| 8 namespace internal { | 8 namespace internal { |
| 9 namespace compiler { | 9 namespace compiler { |
| 10 | 10 |
| 11 namespace { | 11 namespace { |
| 12 | 12 |
| 13 typedef std::pair<InstructionOperand, InstructionOperand> MoveKey; | |
| 14 typedef ZoneMap<MoveKey, unsigned> MoveMap; | |
| 15 typedef ZoneSet<InstructionOperand> OperandSet; | |
| 16 | |
| 17 | |
| 13 bool GapsCanMoveOver(Instruction* instr) { | 18 bool GapsCanMoveOver(Instruction* instr) { |
| 14 DCHECK(!instr->IsGapMoves()); | 19 DCHECK(!instr->IsGapMoves()); |
| 15 return instr->IsSourcePosition() || instr->IsNop(); | 20 return instr->IsSourcePosition() || instr->IsNop(); |
| 16 } | 21 } |
| 17 | 22 |
| 18 | 23 |
| 19 int FindFirstNonEmptySlot(GapInstruction* gap) { | 24 int FindFirstNonEmptySlot(GapInstruction* gap) { |
| 20 int i = GapInstruction::FIRST_INNER_POSITION; | 25 int i = GapInstruction::FIRST_INNER_POSITION; |
| 21 for (; i <= GapInstruction::LAST_INNER_POSITION; i++) { | 26 for (; i <= GapInstruction::LAST_INNER_POSITION; i++) { |
| 22 auto move = gap->parallel_moves()[i]; | 27 auto move = gap->parallel_moves()[i]; |
| (...skipping 18 matching lines...) Expand all Loading... | |
| 41 code_(code), | 46 code_(code), |
| 42 to_finalize_(local_zone), | 47 to_finalize_(local_zone), |
| 43 temp_vector_0_(local_zone), | 48 temp_vector_0_(local_zone), |
| 44 temp_vector_1_(local_zone) {} | 49 temp_vector_1_(local_zone) {} |
| 45 | 50 |
| 46 | 51 |
| 47 void MoveOptimizer::Run() { | 52 void MoveOptimizer::Run() { |
| 48 for (auto* block : code()->instruction_blocks()) { | 53 for (auto* block : code()->instruction_blocks()) { |
| 49 CompressBlock(block); | 54 CompressBlock(block); |
| 50 } | 55 } |
| 56 for (auto block : code()->instruction_blocks()) { | |
| 57 if (block->PredecessorCount() <= 1) continue; | |
| 58 OptimizeMerge(block); | |
| 59 } | |
| 51 for (auto gap : to_finalize_) { | 60 for (auto gap : to_finalize_) { |
| 52 FinalizeMoves(gap); | 61 FinalizeMoves(gap); |
| 53 } | 62 } |
| 54 } | 63 } |
| 55 | 64 |
| 56 | 65 |
| 57 void MoveOptimizer::CompressMoves(MoveOpVector* eliminated, ParallelMove* left, | 66 void MoveOptimizer::CompressMoves(MoveOpVector* eliminated, ParallelMove* left, |
| 58 ParallelMove* right) { | 67 ParallelMove* right) { |
| 59 DCHECK(eliminated->empty()); | 68 DCHECK(eliminated->empty()); |
| 60 auto move_ops = right->move_operands(); | 69 auto move_ops = right->move_operands(); |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 123 CompressMoves(&temp_vector, pred_moves, left); | 132 CompressMoves(&temp_vector, pred_moves, left); |
| 124 // Slide prev_gap down so we always know where to look for it. | 133 // Slide prev_gap down so we always know where to look for it. |
| 125 std::swap(prev_gap->parallel_moves()[0], gap->parallel_moves()[0]); | 134 std::swap(prev_gap->parallel_moves()[0], gap->parallel_moves()[0]); |
| 126 } | 135 } |
| 127 prev_gap = gap; | 136 prev_gap = gap; |
| 128 } | 137 } |
| 129 if (prev_gap != nullptr) to_finalize_.push_back(prev_gap); | 138 if (prev_gap != nullptr) to_finalize_.push_back(prev_gap); |
| 130 } | 139 } |
| 131 | 140 |
| 132 | 141 |
| 142 GapInstruction* MoveOptimizer::LastGap(InstructionBlock* block) { | |
| 143 int gap_index = block->last_instruction_index() - 1; | |
| 144 auto instr = code()->instructions()[gap_index]; | |
| 145 return GapInstruction::cast(instr); | |
| 146 } | |
| 147 | |
| 148 | |
| 149 void MoveOptimizer::OptimizeMerge(InstructionBlock* block) { | |
| 150 DCHECK(block->PredecessorCount() > 1); | |
| 151 // Ensure that the last instruction in all incoming blocks don't contain | |
| 152 // things that would prevent moving gap moves across them. | |
| 153 for (auto pred_index : block->predecessors()) { | |
| 154 auto pred = code()->InstructionBlockAt(pred_index); | |
| 155 auto last_instr = code()->instructions()[pred->last_instruction_index()]; | |
| 156 DCHECK(!last_instr->IsGapMoves()); | |
| 157 if (last_instr->IsSourcePosition()) continue; | |
| 158 if (last_instr->IsCall()) return; | |
| 159 if (last_instr->TempCount() != 0) return; | |
| 160 if (last_instr->OutputCount() != 0) return; | |
| 161 for (size_t i = 0; i < last_instr->InputCount(); ++i) { | |
| 162 auto op = last_instr->InputAt(i); | |
| 163 if (!op->IsConstant() && !op->IsImmediate()) return; | |
| 164 } | |
| 165 } | |
| 166 // TODO(dcarney): pass a ZonePool down for this? | |
| 167 MoveMap move_map(local_zone()); | |
| 168 size_t correct_counts = 0; | |
| 169 // Accumulate set of shared moves. | |
| 170 for (auto pred_index : block->predecessors()) { | |
| 171 auto pred = code()->InstructionBlockAt(pred_index); | |
| 172 auto gap = LastGap(pred); | |
| 173 if (gap->parallel_moves()[0] == nullptr || | |
| 174 gap->parallel_moves()[0]->move_operands()->is_empty()) { | |
| 175 return; | |
| 176 } | |
| 177 auto move_ops = gap->parallel_moves()[0]->move_operands(); | |
| 178 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) { | |
| 179 if (op->IsRedundant()) continue; | |
| 180 auto src = *op->source(); | |
| 181 auto dst = *op->destination(); | |
| 182 MoveKey key = {src, dst}; | |
| 183 auto res = move_map.insert(std::make_pair(key, 1)); | |
| 184 if (!res.second) { | |
| 185 res.first->second++; | |
| 186 if (res.first->second == block->PredecessorCount()) { | |
| 187 correct_counts++; | |
| 188 } | |
| 189 } | |
| 190 } | |
| 191 } | |
| 192 if (move_map.empty() || correct_counts != move_map.size()) return; | |
| 193 // Find insertion point. | |
| 194 GapInstruction* gap = nullptr; | |
| 195 for (int i = block->first_instruction_index(); | |
| 196 i <= block->last_instruction_index(); ++i) { | |
| 197 auto instr = code()->instructions()[i]; | |
| 198 if (instr->IsGapMoves()) { | |
| 199 gap = GapInstruction::cast(instr); | |
| 200 continue; | |
| 201 } | |
| 202 if (!GapsCanMoveOver(instr)) break; | |
| 203 } | |
| 204 DCHECK(gap != nullptr); | |
| 205 bool gap_initialized = true; | |
| 206 if (gap->parallel_moves()[0] == nullptr || | |
| 207 gap->parallel_moves()[0]->move_operands()->is_empty()) { | |
| 208 to_finalize_.push_back(gap); | |
| 209 } else { | |
| 210 // Will compress after insertion. | |
| 211 gap_initialized = false; | |
| 212 std::swap(gap->parallel_moves()[0], gap->parallel_moves()[1]); | |
| 213 } | |
| 214 auto move = gap->GetOrCreateParallelMove( | |
| 215 static_cast<GapInstruction::InnerPosition>(0), code_zone()); | |
| 216 // Delete relevant entries in predecessors and move everything to block. | |
| 217 bool first_iteration = true; | |
| 218 for (auto pred_index : block->predecessors()) { | |
| 219 auto pred = code()->InstructionBlockAt(pred_index); | |
| 220 auto gap = LastGap(pred); | |
|
brucedawson
2015/04/06 18:05:31
This line of code is worrisome because it shadows
| |
| 221 auto move_ops = gap->parallel_moves()[0]->move_operands(); | |
| 222 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) { | |
| 223 if (op->IsRedundant()) continue; | |
| 224 MoveKey key = {*op->source(), *op->destination()}; | |
| 225 auto it = move_map.find(key); | |
| 226 USE(it); | |
| 227 DCHECK(it != move_map.end()); | |
| 228 if (first_iteration) { | |
| 229 move->AddMove(op->source(), op->destination(), code_zone()); | |
| 230 } | |
| 231 op->Eliminate(); | |
| 232 } | |
| 233 first_iteration = false; | |
| 234 } | |
| 235 // Compress. | |
| 236 if (!gap_initialized) { | |
| 237 CompressMoves(&temp_vector_0(), gap->parallel_moves()[0], | |
| 238 gap->parallel_moves()[1]); | |
| 239 } | |
| 240 } | |
| 241 | |
| 242 | |
| 133 // Split multiple loads of the same constant or stack slot off into the second | 243 // Split multiple loads of the same constant or stack slot off into the second |
| 134 // slot and keep remaining moves in the first slot. | 244 // slot and keep remaining moves in the first slot. |
| 135 void MoveOptimizer::FinalizeMoves(GapInstruction* gap) { | 245 void MoveOptimizer::FinalizeMoves(GapInstruction* gap) { |
| 136 auto loads = temp_vector_0(); | 246 auto loads = temp_vector_0(); |
| 137 DCHECK(loads.empty()); | 247 DCHECK(loads.empty()); |
| 138 auto new_moves = temp_vector_1(); | 248 auto new_moves = temp_vector_1(); |
| 139 DCHECK(new_moves.empty()); | 249 DCHECK(new_moves.empty()); |
| 140 auto move_ops = gap->parallel_moves()[0]->move_operands(); | 250 auto move_ops = gap->parallel_moves()[0]->move_operands(); |
| 141 for (auto move = move_ops->begin(); move != move_ops->end(); ++move) { | 251 for (auto move = move_ops->begin(); move != move_ops->end(); ++move) { |
| 142 if (move->IsRedundant()) { | 252 if (move->IsRedundant()) { |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 195 std::swap(*new_move, *it); | 305 std::swap(*new_move, *it); |
| 196 ++it; | 306 ++it; |
| 197 } | 307 } |
| 198 DCHECK_EQ(it, slot_1->move_operands()->end()); | 308 DCHECK_EQ(it, slot_1->move_operands()->end()); |
| 199 new_moves.clear(); | 309 new_moves.clear(); |
| 200 } | 310 } |
| 201 | 311 |
| 202 } // namespace compiler | 312 } // namespace compiler |
| 203 } // namespace internal | 313 } // namespace internal |
| 204 } // namespace v8 | 314 } // namespace v8 |
| OLD | NEW |