OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/move-optimizer.h" | 5 #include "src/compiler/move-optimizer.h" |
6 | 6 |
7 namespace v8 { | 7 namespace v8 { |
8 namespace internal { | 8 namespace internal { |
9 namespace compiler { | 9 namespace compiler { |
10 | 10 |
11 namespace { | 11 namespace { |
12 | 12 |
13 typedef std::pair<InstructionOperand, InstructionOperand> MoveKey; | 13 typedef std::pair<InstructionOperand, InstructionOperand> MoveKey; |
14 typedef ZoneMap<MoveKey, unsigned> MoveMap; | 14 typedef ZoneMap<MoveKey, unsigned> MoveMap; |
15 typedef ZoneSet<InstructionOperand> OperandSet; | 15 typedef ZoneSet<InstructionOperand> OperandSet; |
16 | 16 |
17 | 17 |
18 bool GapsCanMoveOver(Instruction* instr) { return instr->IsNop(); } | 18 bool GapsCanMoveOver(Instruction* instr) { return instr->IsNop(); } |
19 | 19 |
20 | 20 |
21 int FindFirstNonEmptySlot(Instruction* instr) { | 21 int FindFirstNonEmptySlot(Instruction* instr) { |
22 int i = Instruction::FIRST_GAP_POSITION; | 22 int i = Instruction::FIRST_GAP_POSITION; |
23 for (; i <= Instruction::LAST_GAP_POSITION; i++) { | 23 for (; i <= Instruction::LAST_GAP_POSITION; i++) { |
24 auto move = instr->parallel_moves()[i]; | 24 auto moves = instr->parallel_moves()[i]; |
25 if (move == nullptr) continue; | 25 if (moves == nullptr) continue; |
26 auto move_ops = move->move_operands(); | 26 for (auto move : *moves) { |
27 auto op = move_ops->begin(); | 27 if (!move->IsRedundant()) return i; |
28 for (; op != move_ops->end(); ++op) { | 28 move->Eliminate(); |
29 if (!op->IsRedundant()) break; | |
30 op->Eliminate(); | |
31 } | 29 } |
32 if (op != move_ops->end()) break; // Found non-redundant move. | 30 moves->clear(); // Clear this redundant move. |
33 move_ops->Rewind(0); // Clear this redundant move. | |
34 } | 31 } |
35 return i; | 32 return i; |
36 } | 33 } |
37 | 34 |
38 } // namepace | 35 } // namepace |
39 | 36 |
40 | 37 |
41 MoveOptimizer::MoveOptimizer(Zone* local_zone, InstructionSequence* code) | 38 MoveOptimizer::MoveOptimizer(Zone* local_zone, InstructionSequence* code) |
42 : local_zone_(local_zone), | 39 : local_zone_(local_zone), |
43 code_(code), | 40 code_(code), |
(...skipping 12 matching lines...) Expand all Loading... |
56 } | 53 } |
57 for (auto gap : to_finalize_) { | 54 for (auto gap : to_finalize_) { |
58 FinalizeMoves(gap); | 55 FinalizeMoves(gap); |
59 } | 56 } |
60 } | 57 } |
61 | 58 |
62 | 59 |
63 void MoveOptimizer::CompressMoves(MoveOpVector* eliminated, ParallelMove* left, | 60 void MoveOptimizer::CompressMoves(MoveOpVector* eliminated, ParallelMove* left, |
64 ParallelMove* right) { | 61 ParallelMove* right) { |
65 DCHECK(eliminated->empty()); | 62 DCHECK(eliminated->empty()); |
66 auto move_ops = right->move_operands(); | 63 if (!left->empty()) { |
67 if (!left->move_operands()->is_empty()) { | |
68 // Modify the right moves in place and collect moves that will be killed by | 64 // Modify the right moves in place and collect moves that will be killed by |
69 // merging the two gaps. | 65 // merging the two gaps. |
70 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) { | 66 for (auto move : *right) { |
71 if (op->IsRedundant()) continue; | 67 if (move->IsRedundant()) continue; |
72 auto to_eliminate = left->PrepareInsertAfter(op); | 68 auto to_eliminate = left->PrepareInsertAfter(move); |
73 if (to_eliminate != nullptr) eliminated->push_back(to_eliminate); | 69 if (to_eliminate != nullptr) eliminated->push_back(to_eliminate); |
74 } | 70 } |
75 // Eliminate dead moves. Must happen before insertion of new moves as the | 71 // Eliminate dead moves. |
76 // contents of eliminated are pointers into a list. | |
77 for (auto to_eliminate : *eliminated) { | 72 for (auto to_eliminate : *eliminated) { |
78 to_eliminate->Eliminate(); | 73 to_eliminate->Eliminate(); |
79 } | 74 } |
80 eliminated->clear(); | 75 eliminated->clear(); |
81 } | 76 } |
82 // Add all possibly modified moves from right side. | 77 // Add all possibly modified moves from right side. |
83 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) { | 78 for (auto move : *right) { |
84 if (op->IsRedundant()) continue; | 79 if (move->IsRedundant()) continue; |
85 left->move_operands()->Add(*op, code_zone()); | 80 left->push_back(move); |
86 } | 81 } |
87 // Nuke right. | 82 // Nuke right. |
88 move_ops->Rewind(0); | 83 right->clear(); |
89 } | 84 } |
90 | 85 |
91 | 86 |
92 // Smash all consecutive moves into the left most move slot and accumulate them | 87 // Smash all consecutive moves into the left most move slot and accumulate them |
93 // as much as possible across instructions. | 88 // as much as possible across instructions. |
94 void MoveOptimizer::CompressBlock(InstructionBlock* block) { | 89 void MoveOptimizer::CompressBlock(InstructionBlock* block) { |
95 auto temp_vector = temp_vector_0(); | 90 auto temp_vector = temp_vector_0(); |
96 DCHECK(temp_vector.empty()); | 91 DCHECK(temp_vector.empty()); |
97 Instruction* prev_instr = nullptr; | 92 Instruction* prev_instr = nullptr; |
98 for (int index = block->code_start(); index < block->code_end(); ++index) { | 93 for (int index = block->code_start(); index < block->code_end(); ++index) { |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
152 } | 147 } |
153 } | 148 } |
154 // TODO(dcarney): pass a ZonePool down for this? | 149 // TODO(dcarney): pass a ZonePool down for this? |
155 MoveMap move_map(local_zone()); | 150 MoveMap move_map(local_zone()); |
156 size_t correct_counts = 0; | 151 size_t correct_counts = 0; |
157 // Accumulate set of shared moves. | 152 // Accumulate set of shared moves. |
158 for (auto pred_index : block->predecessors()) { | 153 for (auto pred_index : block->predecessors()) { |
159 auto pred = code()->InstructionBlockAt(pred_index); | 154 auto pred = code()->InstructionBlockAt(pred_index); |
160 auto instr = LastInstruction(pred); | 155 auto instr = LastInstruction(pred); |
161 if (instr->parallel_moves()[0] == nullptr || | 156 if (instr->parallel_moves()[0] == nullptr || |
162 instr->parallel_moves()[0]->move_operands()->is_empty()) { | 157 instr->parallel_moves()[0]->empty()) { |
163 return; | 158 return; |
164 } | 159 } |
165 auto move_ops = instr->parallel_moves()[0]->move_operands(); | 160 for (auto move : *instr->parallel_moves()[0]) { |
166 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) { | 161 if (move->IsRedundant()) continue; |
167 if (op->IsRedundant()) continue; | 162 auto src = move->source(); |
168 auto src = *op->source(); | 163 auto dst = move->destination(); |
169 auto dst = *op->destination(); | |
170 MoveKey key = {src, dst}; | 164 MoveKey key = {src, dst}; |
171 auto res = move_map.insert(std::make_pair(key, 1)); | 165 auto res = move_map.insert(std::make_pair(key, 1)); |
172 if (!res.second) { | 166 if (!res.second) { |
173 res.first->second++; | 167 res.first->second++; |
174 if (res.first->second == block->PredecessorCount()) { | 168 if (res.first->second == block->PredecessorCount()) { |
175 correct_counts++; | 169 correct_counts++; |
176 } | 170 } |
177 } | 171 } |
178 } | 172 } |
179 } | 173 } |
180 if (move_map.empty() || correct_counts != move_map.size()) return; | 174 if (move_map.empty() || correct_counts != move_map.size()) return; |
181 // Find insertion point. | 175 // Find insertion point. |
182 Instruction* instr = nullptr; | 176 Instruction* instr = nullptr; |
183 for (int i = block->first_instruction_index(); | 177 for (int i = block->first_instruction_index(); |
184 i <= block->last_instruction_index(); ++i) { | 178 i <= block->last_instruction_index(); ++i) { |
185 instr = code()->instructions()[i]; | 179 instr = code()->instructions()[i]; |
186 if (!GapsCanMoveOver(instr) || !instr->AreMovesRedundant()) break; | 180 if (!GapsCanMoveOver(instr) || !instr->AreMovesRedundant()) break; |
187 } | 181 } |
188 DCHECK(instr != nullptr); | 182 DCHECK(instr != nullptr); |
189 bool gap_initialized = true; | 183 bool gap_initialized = true; |
190 if (instr->parallel_moves()[0] == nullptr || | 184 if (instr->parallel_moves()[0] == nullptr || |
191 instr->parallel_moves()[0]->move_operands()->is_empty()) { | 185 instr->parallel_moves()[0]->empty()) { |
192 to_finalize_.push_back(instr); | 186 to_finalize_.push_back(instr); |
193 } else { | 187 } else { |
194 // Will compress after insertion. | 188 // Will compress after insertion. |
195 gap_initialized = false; | 189 gap_initialized = false; |
196 std::swap(instr->parallel_moves()[0], instr->parallel_moves()[1]); | 190 std::swap(instr->parallel_moves()[0], instr->parallel_moves()[1]); |
197 } | 191 } |
198 auto move = instr->GetOrCreateParallelMove( | 192 auto moves = instr->GetOrCreateParallelMove( |
199 static_cast<Instruction::GapPosition>(0), code_zone()); | 193 static_cast<Instruction::GapPosition>(0), code_zone()); |
200 // Delete relevant entries in predecessors and move everything to block. | 194 // Delete relevant entries in predecessors and move everything to block. |
201 bool first_iteration = true; | 195 bool first_iteration = true; |
202 for (auto pred_index : block->predecessors()) { | 196 for (auto pred_index : block->predecessors()) { |
203 auto pred = code()->InstructionBlockAt(pred_index); | 197 auto pred = code()->InstructionBlockAt(pred_index); |
204 auto move_ops = LastInstruction(pred)->parallel_moves()[0]->move_operands(); | 198 for (auto move : *LastInstruction(pred)->parallel_moves()[0]) { |
205 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) { | 199 if (move->IsRedundant()) continue; |
206 if (op->IsRedundant()) continue; | 200 MoveKey key = {move->source(), move->destination()}; |
207 MoveKey key = {*op->source(), *op->destination()}; | |
208 auto it = move_map.find(key); | 201 auto it = move_map.find(key); |
209 USE(it); | 202 USE(it); |
210 DCHECK(it != move_map.end()); | 203 DCHECK(it != move_map.end()); |
211 if (first_iteration) { | 204 if (first_iteration) { |
212 move->AddMove(op->source(), op->destination(), code_zone()); | 205 moves->AddMove(move->source(), move->destination()); |
213 } | 206 } |
214 op->Eliminate(); | 207 move->Eliminate(); |
215 } | 208 } |
216 first_iteration = false; | 209 first_iteration = false; |
217 } | 210 } |
218 // Compress. | 211 // Compress. |
219 if (!gap_initialized) { | 212 if (!gap_initialized) { |
220 CompressMoves(&temp_vector_0(), instr->parallel_moves()[0], | 213 CompressMoves(&temp_vector_0(), instr->parallel_moves()[0], |
221 instr->parallel_moves()[1]); | 214 instr->parallel_moves()[1]); |
222 } | 215 } |
223 } | 216 } |
224 | 217 |
225 | 218 |
| 219 namespace { |
| 220 |
| 221 bool IsSlot(const InstructionOperand& op) { |
| 222 return op.IsStackSlot() || op.IsDoubleStackSlot(); |
| 223 } |
| 224 |
| 225 |
| 226 bool LoadCompare(const MoveOperands* a, const MoveOperands* b) { |
| 227 if (a->source() != b->source()) return a->source() < b->source(); |
| 228 if (IsSlot(a->destination()) && !IsSlot(b->destination())) return false; |
| 229 if (!IsSlot(a->destination()) && IsSlot(b->destination())) return true; |
| 230 return a->destination() < b->destination(); |
| 231 } |
| 232 |
| 233 } // namespace |
| 234 |
| 235 |
226 // Split multiple loads of the same constant or stack slot off into the second | 236 // Split multiple loads of the same constant or stack slot off into the second |
227 // slot and keep remaining moves in the first slot. | 237 // slot and keep remaining moves in the first slot. |
228 void MoveOptimizer::FinalizeMoves(Instruction* instr) { | 238 void MoveOptimizer::FinalizeMoves(Instruction* instr) { |
229 auto loads = temp_vector_0(); | 239 auto loads = temp_vector_0(); |
230 DCHECK(loads.empty()); | 240 DCHECK(loads.empty()); |
231 auto new_moves = temp_vector_1(); | 241 // Find all the loads. |
232 DCHECK(new_moves.empty()); | 242 for (auto move : *instr->parallel_moves()[0]) { |
233 auto move_ops = instr->parallel_moves()[0]->move_operands(); | 243 if (move->IsRedundant()) continue; |
234 for (auto move = move_ops->begin(); move != move_ops->end(); ++move) { | 244 if (move->source().IsConstant() || IsSlot(move->source())) { |
235 if (move->IsRedundant()) { | 245 loads.push_back(move); |
236 move->Eliminate(); | 246 } |
| 247 } |
| 248 if (loads.empty()) return; |
| 249 // Group the loads by source, moving the preferred destination to the |
| 250 // beginning of the group. |
| 251 std::sort(loads.begin(), loads.end(), LoadCompare); |
| 252 MoveOperands* group_begin = nullptr; |
| 253 for (auto load : loads) { |
| 254 // New group. |
| 255 if (group_begin == nullptr || load->source() != group_begin->source()) { |
| 256 group_begin = load; |
237 continue; | 257 continue; |
238 } | 258 } |
239 if (!(move->source()->IsConstant() || move->source()->IsStackSlot() || | 259 // Nothing to be gained from splitting here. |
240 move->source()->IsDoubleStackSlot())) | 260 if (IsSlot(group_begin->destination())) continue; |
241 continue; | 261 // Insert new move into slot 1. |
242 // Search for existing move to this slot. | 262 auto slot_1 = instr->GetOrCreateParallelMove( |
243 MoveOperands* found = nullptr; | 263 static_cast<Instruction::GapPosition>(1), code_zone()); |
244 for (auto load : loads) { | 264 slot_1->AddMove(group_begin->destination(), load->destination()); |
245 if (load->source()->Equals(move->source())) { | 265 load->Eliminate(); |
246 found = load; | |
247 break; | |
248 } | |
249 } | |
250 // Not found so insert. | |
251 if (found == nullptr) { | |
252 loads.push_back(move); | |
253 // Replace source with copy for later use. | |
254 auto dest = move->destination(); | |
255 move->set_destination(InstructionOperand::New(code_zone(), *dest)); | |
256 continue; | |
257 } | |
258 if ((found->destination()->IsStackSlot() || | |
259 found->destination()->IsDoubleStackSlot()) && | |
260 !(move->destination()->IsStackSlot() || | |
261 move->destination()->IsDoubleStackSlot())) { | |
262 // Found a better source for this load. Smash it in place to affect other | |
263 // loads that have already been split. | |
264 auto next_dest = | |
265 InstructionOperand::New(code_zone(), *found->destination()); | |
266 auto dest = move->destination(); | |
267 InstructionOperand::ReplaceWith(found->destination(), dest); | |
268 move->set_destination(next_dest); | |
269 } | |
270 // move from load destination. | |
271 move->set_source(found->destination()); | |
272 new_moves.push_back(move); | |
273 } | 266 } |
274 loads.clear(); | 267 loads.clear(); |
275 if (new_moves.empty()) return; | |
276 // Insert all new moves into slot 1. | |
277 auto slot_1 = instr->GetOrCreateParallelMove( | |
278 static_cast<Instruction::GapPosition>(1), code_zone()); | |
279 DCHECK(slot_1->move_operands()->is_empty()); | |
280 slot_1->move_operands()->AddBlock(MoveOperands(nullptr, nullptr), | |
281 static_cast<int>(new_moves.size()), | |
282 code_zone()); | |
283 auto it = slot_1->move_operands()->begin(); | |
284 for (auto new_move : new_moves) { | |
285 std::swap(*new_move, *it); | |
286 ++it; | |
287 } | |
288 DCHECK_EQ(it, slot_1->move_operands()->end()); | |
289 new_moves.clear(); | |
290 } | 268 } |
291 | 269 |
292 } // namespace compiler | 270 } // namespace compiler |
293 } // namespace internal | 271 } // namespace internal |
294 } // namespace v8 | 272 } // namespace v8 |
OLD | NEW |