Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(30)

Side by Side Diff: src/compiler/move-optimizer.cc

Issue 755323011: [turbofan] optimize moves into merges (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/move-optimizer.h ('k') | test/unittests/compiler/move-optimizer-unittest.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/move-optimizer.h" 5 #include "src/compiler/move-optimizer.h"
6 6
7 namespace v8 { 7 namespace v8 {
8 namespace internal { 8 namespace internal {
9 namespace compiler { 9 namespace compiler {
10 10
11 namespace {
12
13 struct MoveKey {
Benedikt Meurer 2015/01/28 05:18:51 Once the issues mentioned below are addressed, how
dcarney 2015/02/02 09:23:33 Done.
14 InstructionOperand src;
15 InstructionOperand dst;
16 };
17
18 struct OperandLess {
Benedikt Meurer 2015/01/28 05:18:51 Can you add an operator< (and operator>) to Instru
dcarney 2015/02/02 09:23:33 now that it's 64 bits, it might be a little wierd,
19 bool operator()(const InstructionOperand a,
20 const InstructionOperand b) const {
21 if (a.kind() == b.kind()) return a.index() < b.index();
22 return a.kind() < b.kind();
23 }
24 };
25
26 struct MoveKeyLess {
27 bool operator()(const MoveKey& a, const MoveKey& b) const {
28 OperandLess operand_less;
29 if (a.src.Equals(&b.src)) return operand_less(a.dst, b.dst);
30 return operand_less(a.src, b.src);
31 }
32 };
33
34 typedef std::set<InstructionOperand, std::less<InstructionOperand>,
Benedikt Meurer 2015/01/28 05:18:51 I guess this shouldn't be std::less<InstructionOpe
dcarney 2015/02/02 09:23:33 Done.
35 zone_allocator<InstructionOperand>> OperandSetSuper;
36 typedef std::map<MoveKey, unsigned, MoveKeyLess,
Benedikt Meurer 2015/01/28 05:18:51 How about adding ZoneSet and ZoneMap to zone-conta
dcarney 2015/02/02 09:23:33 Done.
37 zone_allocator<std::pair<const MoveKey, unsigned>>>
38 MoveMapSuper;
39
40
41 class MoveMap : public MoveMapSuper {
42 public:
43 explicit MoveMap(Zone* zone)
44 : MoveMapSuper(key_compare(), allocator_type(zone)) {}
45 };
46
47
48 class OperandSet : public OperandSetSuper {
49 public:
50 explicit OperandSet(Zone* zone)
51 : OperandSetSuper(key_compare(), allocator_type(zone)) {}
52 };
53
54 } // namespace
55
56
11 MoveOptimizer::MoveOptimizer(Zone* local_zone, InstructionSequence* code) 57 MoveOptimizer::MoveOptimizer(Zone* local_zone, InstructionSequence* code)
12 : local_zone_(local_zone), 58 : local_zone_(local_zone),
13 code_(code), 59 code_(code),
60 to_finalize_(local_zone),
14 temp_vector_0_(local_zone), 61 temp_vector_0_(local_zone),
15 temp_vector_1_(local_zone) {} 62 temp_vector_1_(local_zone) {}
16 63
17 64
18 void MoveOptimizer::Run() { 65 void MoveOptimizer::Run() {
19 // First smash all consecutive moves into the left most move slot.
20 for (auto* block : code()->instruction_blocks()) { 66 for (auto* block : code()->instruction_blocks()) {
21 GapInstruction* prev_gap = nullptr; 67 CompressBlock(block);
22 for (int index = block->code_start(); index < block->code_end(); ++index) { 68 }
23 auto instr = code()->instructions()[index]; 69 for (auto block : code()->instruction_blocks()) {
24 if (!instr->IsGapMoves()) { 70 if (block->PredecessorCount() <= 1) continue;
25 if (instr->IsSourcePosition() || instr->IsNop()) continue; 71 OptimizeMerge(block);
26 FinalizeMoves(&temp_vector_0_, &temp_vector_1_, prev_gap); 72 }
27 prev_gap = nullptr; 73 for (auto gap : to_finalize_) {
28 continue; 74 FinalizeMoves(gap);
29 }
30 auto gap = GapInstruction::cast(instr);
31 // Find first non-empty slot.
32 int i = GapInstruction::FIRST_INNER_POSITION;
33 for (; i <= GapInstruction::LAST_INNER_POSITION; i++) {
34 auto move = gap->parallel_moves()[i];
35 if (move == nullptr) continue;
36 auto move_ops = move->move_operands();
37 auto op = move_ops->begin();
38 for (; op != move_ops->end(); ++op) {
39 if (!op->IsRedundant()) break;
40 }
41 if (op == move_ops->end()) {
42 move_ops->Rewind(0); // Clear this redundant move.
43 } else {
44 break; // Found index of first non-redundant move.
45 }
46 }
47 // Nothing to do here.
48 if (i == GapInstruction::LAST_INNER_POSITION + 1) {
49 if (prev_gap != nullptr) {
50 // Slide prev_gap down so we always know where to look for it.
51 std::swap(prev_gap->parallel_moves()[0], gap->parallel_moves()[0]);
52 prev_gap = gap;
53 }
54 continue;
55 }
56 // Move the first non-empty gap to position 0.
57 std::swap(gap->parallel_moves()[0], gap->parallel_moves()[i]);
58 auto left = gap->parallel_moves()[0];
59 // Compress everything into position 0.
60 for (++i; i <= GapInstruction::LAST_INNER_POSITION; ++i) {
61 auto move = gap->parallel_moves()[i];
62 if (move == nullptr) continue;
63 CompressMoves(&temp_vector_0_, left, move);
64 }
65 if (prev_gap != nullptr) {
66 // Smash left into prev_gap, killing left.
67 auto pred_moves = prev_gap->parallel_moves()[0];
68 CompressMoves(&temp_vector_0_, pred_moves, left);
69 std::swap(prev_gap->parallel_moves()[0], gap->parallel_moves()[0]);
70 }
71 prev_gap = gap;
72 }
73 FinalizeMoves(&temp_vector_0_, &temp_vector_1_, prev_gap);
74 } 75 }
75 } 76 }
76 77
77 78
78 static MoveOperands* PrepareInsertAfter(ParallelMove* left, MoveOperands* move, 79 static MoveOperands* PrepareInsertAfter(ParallelMove* left, MoveOperands* move,
79 Zone* zone) { 80 Zone* zone) {
80 auto move_ops = left->move_operands(); 81 auto move_ops = left->move_operands();
81 MoveOperands* replacement = nullptr; 82 MoveOperands* replacement = nullptr;
82 MoveOperands* to_eliminate = nullptr; 83 MoveOperands* to_eliminate = nullptr;
83 for (auto curr = move_ops->begin(); curr != move_ops->end(); ++curr) { 84 for (auto curr = move_ops->begin(); curr != move_ops->end(); ++curr) {
(...skipping 15 matching lines...) Expand all
99 move->set_source(new_source); 100 move->set_source(new_source);
100 } 101 }
101 return to_eliminate; 102 return to_eliminate;
102 } 103 }
103 104
104 105
105 void MoveOptimizer::CompressMoves(MoveOpVector* eliminated, ParallelMove* left, 106 void MoveOptimizer::CompressMoves(MoveOpVector* eliminated, ParallelMove* left,
106 ParallelMove* right) { 107 ParallelMove* right) {
107 DCHECK(eliminated->empty()); 108 DCHECK(eliminated->empty());
108 auto move_ops = right->move_operands(); 109 auto move_ops = right->move_operands();
109 // Modify the right moves in place and collect moves that will be killed by 110 if (!left->move_operands()->is_empty()) {
110 // merging the two gaps. 111 // Modify the right moves in place and collect moves that will be killed by
111 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) { 112 // merging the two gaps.
112 if (op->IsRedundant()) continue; 113 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) {
113 MoveOperands* to_eliminate = PrepareInsertAfter(left, op, code_zone()); 114 if (op->IsRedundant()) continue;
114 if (to_eliminate != nullptr) { 115 MoveOperands* to_eliminate = PrepareInsertAfter(left, op, code_zone());
115 eliminated->push_back(to_eliminate); 116 if (to_eliminate != nullptr) eliminated->push_back(to_eliminate);
116 } 117 }
118 // Eliminate dead moves. Must happen before insertion of new moves as the
119 // contents of eliminated are pointers into a list.
120 for (auto to_eliminate : *eliminated) {
121 to_eliminate->Eliminate();
122 }
123 eliminated->clear();
117 } 124 }
118 // Eliminate dead moves. Must happen before insertion of new moves as the
119 // contents of eliminated are pointers into a list.
120 for (auto to_eliminate : *eliminated) {
121 to_eliminate->Eliminate();
122 }
123 eliminated->clear();
124 // Add all possibly modified moves from right side. 125 // Add all possibly modified moves from right side.
125 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) { 126 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) {
126 if (op->IsRedundant()) continue; 127 if (op->IsRedundant()) continue;
127 left->move_operands()->Add(*op, code_zone()); 128 left->move_operands()->Add(*op, code_zone());
128 } 129 }
129 // Nuke right. 130 // Nuke right.
130 move_ops->Rewind(0); 131 move_ops->Rewind(0);
131 } 132 }
132 133
133 134
134 void MoveOptimizer::FinalizeMoves(MoveOpVector* loads, MoveOpVector* new_moves, 135 static bool GapsCanMoveOver(Instruction* instr) {
Benedikt Meurer 2015/01/28 05:18:51 Nit: use anonymous namespace instead of static.
135 GapInstruction* gap) { 136 DCHECK(!instr->IsGapMoves());
136 DCHECK(loads->empty()); 137 return instr->IsSourcePosition() || instr->IsNop();
137 DCHECK(new_moves->empty()); 138 }
138 if (gap == nullptr) return; 139
139 // Split multiple loads of the same constant or stack slot off into the second 140
140 // slot and keep remaining moves in the first slot. 141 static int FindFirstNonEmptySlot(GapInstruction* gap) {
Benedikt Meurer 2015/01/28 05:18:51 Nit: use anonymous namespace instead of static.
dcarney 2015/02/02 09:23:33 Done.
142 int i = GapInstruction::FIRST_INNER_POSITION;
143 for (; i <= GapInstruction::LAST_INNER_POSITION; i++) {
144 auto move = gap->parallel_moves()[i];
145 if (move == nullptr) continue;
146 auto move_ops = move->move_operands();
147 auto op = move_ops->begin();
148 for (; op != move_ops->end(); ++op) {
149 if (!op->IsRedundant()) break;
150 op->Eliminate();
151 }
152 if (op != move_ops->end()) break; // Found non-redundant move.
153 move_ops->Rewind(0); // Clear this redundant move.
154 }
155 return i;
156 }
157
158
159 // Smash all consecutive moves into the left most move slot and accumulate them
160 // as much as possible across instructions.
161 void MoveOptimizer::CompressBlock(InstructionBlock* block) {
162 auto temp_vector = temp_vector_0();
163 DCHECK(temp_vector.empty());
164 GapInstruction* prev_gap = nullptr;
165 for (int index = block->code_start(); index < block->code_end(); ++index) {
166 auto instr = code()->instructions()[index];
167 if (!instr->IsGapMoves()) {
168 if (GapsCanMoveOver(instr)) continue;
169 if (prev_gap != nullptr) to_finalize_.push_back(prev_gap);
170 prev_gap = nullptr;
171 continue;
172 }
173 auto gap = GapInstruction::cast(instr);
174 int i = FindFirstNonEmptySlot(gap);
175 // Nothing to do here.
176 if (i == GapInstruction::LAST_INNER_POSITION + 1) {
177 if (prev_gap != nullptr) {
178 // Slide prev_gap down so we always know where to look for it.
179 std::swap(prev_gap->parallel_moves()[0], gap->parallel_moves()[0]);
180 prev_gap = gap;
181 }
182 continue;
183 }
184 // Move the first non-empty gap to position 0.
185 std::swap(gap->parallel_moves()[0], gap->parallel_moves()[i]);
186 auto left = gap->parallel_moves()[0];
187 // Compress everything into position 0.
188 for (++i; i <= GapInstruction::LAST_INNER_POSITION; ++i) {
189 auto move = gap->parallel_moves()[i];
190 if (move == nullptr) continue;
191 CompressMoves(&temp_vector, left, move);
192 }
193 if (prev_gap != nullptr) {
194 // Smash left into prev_gap, killing left.
195 auto pred_moves = prev_gap->parallel_moves()[0];
196 CompressMoves(&temp_vector, pred_moves, left);
197 // Slide prev_gap down so we always know where to look for it.
198 std::swap(prev_gap->parallel_moves()[0], gap->parallel_moves()[0]);
199 }
200 prev_gap = gap;
201 }
202 if (prev_gap != nullptr) to_finalize_.push_back(prev_gap);
203 }
204
205
206 GapInstruction* MoveOptimizer::LastGap(InstructionBlock* block) {
207 int gap_index = block->last_instruction_index() - 1;
208 auto instr = code()->instructions()[gap_index];
209 return GapInstruction::cast(instr);
210 }
211
212
213 void MoveOptimizer::OptimizeMerge(InstructionBlock* block) {
214 DCHECK(block->PredecessorCount() > 1);
215 // Ensure that the last instruction in all incoming blocks don't contain
216 // things that would prevent moving gap moves across them.
217 for (auto pred_index : block->predecessors()) {
218 auto pred = code()->InstructionBlockAt(pred_index);
219 auto last_instr = code()->instructions()[pred->last_instruction_index()];
220 DCHECK(!last_instr->IsGapMoves());
221 if (last_instr->IsSourcePosition()) continue;
222 if (last_instr->IsCall()) return;
223 if (last_instr->TempCount() != 0) return;
224 if (last_instr->OutputCount() != 0) return;
225 for (size_t i = 0; i < last_instr->InputCount(); ++i) {
226 auto op = last_instr->InputAt(i);
227 if (!op->IsConstant() && !op->IsImmediate()) return;
228 }
229 }
230 // TODO(dcarney): pass a ZonePool down for this?
231 MoveMap move_map(local_zone());
232 size_t correct_counts = 0;
233 // Accumulate set of shared moves.
234 for (auto pred_index : block->predecessors()) {
235 auto pred = code()->InstructionBlockAt(pred_index);
236 auto gap = LastGap(pred);
237 if (gap->parallel_moves()[0] == nullptr ||
238 gap->parallel_moves()[0]->move_operands()->is_empty()) {
239 return;
240 }
241 auto move_ops = gap->parallel_moves()[0]->move_operands();
242 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) {
243 if (op->IsRedundant()) continue;
244 auto src = *op->source();
245 auto dst = *op->destination();
246 MoveKey key = {src, dst};
247 auto res = move_map.insert(std::make_pair(key, 1));
248 if (!res.second) {
249 res.first->second++;
250 if (res.first->second == block->PredecessorCount()) {
251 correct_counts++;
252 }
253 }
254 }
255 }
256 if (move_map.empty() || correct_counts != move_map.size()) return;
257 // Find insertion point.
258 GapInstruction* gap = nullptr;
259 for (int i = block->first_instruction_index();
260 i <= block->last_instruction_index(); ++i) {
261 auto instr = code()->instructions()[i];
262 if (instr->IsGapMoves()) {
263 gap = GapInstruction::cast(instr);
264 continue;
265 }
266 if (!GapsCanMoveOver(instr)) break;
267 }
268 DCHECK(gap != nullptr);
269 bool gap_initialized = true;
270 if (gap->parallel_moves()[0] == nullptr ||
271 gap->parallel_moves()[0]->move_operands()->is_empty()) {
272 to_finalize_.push_back(gap);
273 } else {
274 // Will compress after insertion.
275 gap_initialized = false;
276 std::swap(gap->parallel_moves()[0], gap->parallel_moves()[1]);
277 }
278 auto move = gap->GetOrCreateParallelMove(
279 static_cast<GapInstruction::InnerPosition>(0), code_zone());
280 // Delete relevant entries in predecessors and move everything to block.
281 bool first_iteration = true;
282 for (auto pred_index : block->predecessors()) {
283 auto pred = code()->InstructionBlockAt(pred_index);
284 auto gap = LastGap(pred);
285 auto move_ops = gap->parallel_moves()[0]->move_operands();
286 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) {
287 if (op->IsRedundant()) continue;
288 MoveKey key = {*op->source(), *op->destination()};
289 auto it = move_map.find(key);
290 DCHECK(it != move_map.end());
291 if (first_iteration) {
292 move->AddMove(op->source(), op->destination(), code_zone());
293 }
294 op->Eliminate();
295 }
296 first_iteration = false;
297 }
298 // Compress.
299 if (!gap_initialized) {
300 MoveOpVector temp_vector(local_zone());
301 CompressMoves(&temp_vector, gap->parallel_moves()[0],
302 gap->parallel_moves()[1]);
303 }
304 }
305
306
307 // Split multiple loads of the same constant or stack slot off into the second
308 // slot and keep remaining moves in the first slot.
309 void MoveOptimizer::FinalizeMoves(GapInstruction* gap) {
310 auto loads = temp_vector_0();
311 DCHECK(loads.empty());
312 auto new_moves = temp_vector_1();
313 DCHECK(new_moves.empty());
141 auto move_ops = gap->parallel_moves()[0]->move_operands(); 314 auto move_ops = gap->parallel_moves()[0]->move_operands();
142 for (auto move = move_ops->begin(); move != move_ops->end(); ++move) { 315 for (auto move = move_ops->begin(); move != move_ops->end(); ++move) {
143 if (move->IsRedundant()) { 316 if (move->IsRedundant()) {
144 move->Eliminate(); 317 move->Eliminate();
145 continue; 318 continue;
146 } 319 }
147 if (!(move->source()->IsConstant() || move->source()->IsStackSlot() || 320 if (!(move->source()->IsConstant() || move->source()->IsStackSlot() ||
148 move->source()->IsDoubleStackSlot())) 321 move->source()->IsDoubleStackSlot()))
149 continue; 322 continue;
150 // Search for existing move to this slot. 323 // Search for existing move to this slot.
151 MoveOperands* found = nullptr; 324 MoveOperands* found = nullptr;
152 for (auto load : *loads) { 325 for (auto load : loads) {
153 if (load->source()->Equals(move->source())) { 326 if (load->source()->Equals(move->source())) {
154 found = load; 327 found = load;
155 break; 328 break;
156 } 329 }
157 } 330 }
158 // Not found so insert. 331 // Not found so insert.
159 if (found == nullptr) { 332 if (found == nullptr) {
160 loads->push_back(move); 333 loads.push_back(move);
161 // Replace source with copy for later use. 334 // Replace source with copy for later use.
162 auto dest = move->destination(); 335 auto dest = move->destination();
163 move->set_destination(new (code_zone()) 336 move->set_destination(new (code_zone())
164 InstructionOperand(dest->kind(), dest->index())); 337 InstructionOperand(dest->kind(), dest->index()));
165 continue; 338 continue;
166 } 339 }
167 if ((found->destination()->IsStackSlot() || 340 if ((found->destination()->IsStackSlot() ||
168 found->destination()->IsDoubleStackSlot()) && 341 found->destination()->IsDoubleStackSlot()) &&
169 !(move->destination()->IsStackSlot() || 342 !(move->destination()->IsStackSlot() ||
170 move->destination()->IsDoubleStackSlot())) { 343 move->destination()->IsDoubleStackSlot())) {
171 // Found a better source for this load. Smash it in place to affect other 344 // Found a better source for this load. Smash it in place to affect other
172 // loads that have already been split. 345 // loads that have already been split.
173 InstructionOperand::Kind found_kind = found->destination()->kind(); 346 InstructionOperand::Kind found_kind = found->destination()->kind();
174 int found_index = found->destination()->index(); 347 int found_index = found->destination()->index();
175 auto next_dest = 348 auto next_dest =
176 new (code_zone()) InstructionOperand(found_kind, found_index); 349 new (code_zone()) InstructionOperand(found_kind, found_index);
177 auto dest = move->destination(); 350 auto dest = move->destination();
178 found->destination()->ConvertTo(dest->kind(), dest->index()); 351 found->destination()->ConvertTo(dest->kind(), dest->index());
179 move->set_destination(next_dest); 352 move->set_destination(next_dest);
180 } 353 }
181 // move from load destination. 354 // move from load destination.
182 move->set_source(found->destination()); 355 move->set_source(found->destination());
183 new_moves->push_back(move); 356 new_moves.push_back(move);
184 } 357 }
185 loads->clear(); 358 loads.clear();
186 if (new_moves->empty()) return; 359 if (new_moves.empty()) return;
187 // Insert all new moves into slot 1. 360 // Insert all new moves into slot 1.
188 auto slot_1 = gap->GetOrCreateParallelMove( 361 auto slot_1 = gap->GetOrCreateParallelMove(
189 static_cast<GapInstruction::InnerPosition>(1), code_zone()); 362 static_cast<GapInstruction::InnerPosition>(1), code_zone());
190 DCHECK(slot_1->move_operands()->is_empty()); 363 DCHECK(slot_1->move_operands()->is_empty());
191 slot_1->move_operands()->AddBlock(MoveOperands(nullptr, nullptr), 364 slot_1->move_operands()->AddBlock(MoveOperands(nullptr, nullptr),
192 static_cast<int>(new_moves->size()), 365 static_cast<int>(new_moves.size()),
193 code_zone()); 366 code_zone());
194 auto it = slot_1->move_operands()->begin(); 367 auto it = slot_1->move_operands()->begin();
195 for (auto new_move : *new_moves) { 368 for (auto new_move : new_moves) {
196 std::swap(*new_move, *it); 369 std::swap(*new_move, *it);
197 ++it; 370 ++it;
198 } 371 }
199 DCHECK_EQ(it, slot_1->move_operands()->end()); 372 DCHECK_EQ(it, slot_1->move_operands()->end());
200 new_moves->clear(); 373 new_moves.clear();
201 } 374 }
202 375
203 } // namespace compiler 376 } // namespace compiler
204 } // namespace internal 377 } // namespace internal
205 } // namespace v8 378 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/move-optimizer.h ('k') | test/unittests/compiler/move-optimizer-unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698