OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/move-optimizer.h" | 5 #include "src/compiler/move-optimizer.h" |
6 | 6 |
7 namespace v8 { | 7 namespace v8 { |
8 namespace internal { | 8 namespace internal { |
9 namespace compiler { | 9 namespace compiler { |
10 | 10 |
11 MoveOptimizer::MoveOptimizer(Zone* local_zone, InstructionSequence* code) | 11 namespace { |
12 : local_zone_(local_zone), | |
13 code_(code), | |
14 temp_vector_0_(local_zone), | |
15 temp_vector_1_(local_zone) {} | |
16 | 12 |
17 | 13 MoveOperands* PrepareInsertAfter(ParallelMove* left, MoveOperands* move, |
18 void MoveOptimizer::Run() { | 14 Zone* zone) { |
19 // First smash all consecutive moves into the left most move slot. | |
20 for (auto* block : code()->instruction_blocks()) { | |
21 GapInstruction* prev_gap = nullptr; | |
22 for (int index = block->code_start(); index < block->code_end(); ++index) { | |
23 auto instr = code()->instructions()[index]; | |
24 if (!instr->IsGapMoves()) { | |
25 if (instr->IsSourcePosition() || instr->IsNop()) continue; | |
26 FinalizeMoves(&temp_vector_0_, &temp_vector_1_, prev_gap); | |
27 prev_gap = nullptr; | |
28 continue; | |
29 } | |
30 auto gap = GapInstruction::cast(instr); | |
31 // Find first non-empty slot. | |
32 int i = GapInstruction::FIRST_INNER_POSITION; | |
33 for (; i <= GapInstruction::LAST_INNER_POSITION; i++) { | |
34 auto move = gap->parallel_moves()[i]; | |
35 if (move == nullptr) continue; | |
36 auto move_ops = move->move_operands(); | |
37 auto op = move_ops->begin(); | |
38 for (; op != move_ops->end(); ++op) { | |
39 if (!op->IsRedundant()) break; | |
40 } | |
41 if (op == move_ops->end()) { | |
42 move_ops->Rewind(0); // Clear this redundant move. | |
43 } else { | |
44 break; // Found index of first non-redundant move. | |
45 } | |
46 } | |
47 // Nothing to do here. | |
48 if (i == GapInstruction::LAST_INNER_POSITION + 1) { | |
49 if (prev_gap != nullptr) { | |
50 // Slide prev_gap down so we always know where to look for it. | |
51 std::swap(prev_gap->parallel_moves()[0], gap->parallel_moves()[0]); | |
52 prev_gap = gap; | |
53 } | |
54 continue; | |
55 } | |
56 // Move the first non-empty gap to position 0. | |
57 std::swap(gap->parallel_moves()[0], gap->parallel_moves()[i]); | |
58 auto left = gap->parallel_moves()[0]; | |
59 // Compress everything into position 0. | |
60 for (++i; i <= GapInstruction::LAST_INNER_POSITION; ++i) { | |
61 auto move = gap->parallel_moves()[i]; | |
62 if (move == nullptr) continue; | |
63 CompressMoves(&temp_vector_0_, left, move); | |
64 } | |
65 if (prev_gap != nullptr) { | |
66 // Smash left into prev_gap, killing left. | |
67 auto pred_moves = prev_gap->parallel_moves()[0]; | |
68 CompressMoves(&temp_vector_0_, pred_moves, left); | |
69 std::swap(prev_gap->parallel_moves()[0], gap->parallel_moves()[0]); | |
70 } | |
71 prev_gap = gap; | |
72 } | |
73 FinalizeMoves(&temp_vector_0_, &temp_vector_1_, prev_gap); | |
74 } | |
75 } | |
76 | |
77 | |
78 static MoveOperands* PrepareInsertAfter(ParallelMove* left, MoveOperands* move, | |
79 Zone* zone) { | |
80 auto move_ops = left->move_operands(); | 15 auto move_ops = left->move_operands(); |
81 MoveOperands* replacement = nullptr; | 16 MoveOperands* replacement = nullptr; |
82 MoveOperands* to_eliminate = nullptr; | 17 MoveOperands* to_eliminate = nullptr; |
83 for (auto curr = move_ops->begin(); curr != move_ops->end(); ++curr) { | 18 for (auto curr = move_ops->begin(); curr != move_ops->end(); ++curr) { |
84 if (curr->IsEliminated()) continue; | 19 if (curr->IsEliminated()) continue; |
85 if (curr->destination()->Equals(move->source())) { | 20 if (curr->destination()->Equals(move->source())) { |
86 DCHECK_EQ(nullptr, replacement); | 21 DCHECK_EQ(nullptr, replacement); |
87 replacement = curr; | 22 replacement = curr; |
88 if (to_eliminate != nullptr) break; | 23 if (to_eliminate != nullptr) break; |
89 } else if (curr->destination()->Equals(move->destination())) { | 24 } else if (curr->destination()->Equals(move->destination())) { |
90 DCHECK_EQ(nullptr, to_eliminate); | 25 DCHECK_EQ(nullptr, to_eliminate); |
91 to_eliminate = curr; | 26 to_eliminate = curr; |
92 if (replacement != nullptr) break; | 27 if (replacement != nullptr) break; |
93 } | 28 } |
94 } | 29 } |
95 DCHECK(!(replacement == to_eliminate && replacement != nullptr)); | 30 DCHECK(!(replacement == to_eliminate && replacement != nullptr)); |
96 if (replacement != nullptr) { | 31 if (replacement != nullptr) { |
97 auto new_source = new (zone) InstructionOperand( | 32 auto new_source = new (zone) InstructionOperand( |
98 replacement->source()->kind(), replacement->source()->index()); | 33 replacement->source()->kind(), replacement->source()->index()); |
99 move->set_source(new_source); | 34 move->set_source(new_source); |
100 } | 35 } |
101 return to_eliminate; | 36 return to_eliminate; |
102 } | 37 } |
103 | 38 |
104 | 39 |
| 40 bool GapsCanMoveOver(Instruction* instr) { |
| 41 DCHECK(!instr->IsGapMoves()); |
| 42 return instr->IsSourcePosition() || instr->IsNop(); |
| 43 } |
| 44 |
| 45 |
| 46 int FindFirstNonEmptySlot(GapInstruction* gap) { |
| 47 int i = GapInstruction::FIRST_INNER_POSITION; |
| 48 for (; i <= GapInstruction::LAST_INNER_POSITION; i++) { |
| 49 auto move = gap->parallel_moves()[i]; |
| 50 if (move == nullptr) continue; |
| 51 auto move_ops = move->move_operands(); |
| 52 auto op = move_ops->begin(); |
| 53 for (; op != move_ops->end(); ++op) { |
| 54 if (!op->IsRedundant()) break; |
| 55 op->Eliminate(); |
| 56 } |
| 57 if (op != move_ops->end()) break; // Found non-redundant move. |
| 58 move_ops->Rewind(0); // Clear this redundant move. |
| 59 } |
| 60 return i; |
| 61 } |
| 62 |
| 63 } // namepace |
| 64 |
| 65 |
| 66 MoveOptimizer::MoveOptimizer(Zone* local_zone, InstructionSequence* code) |
| 67 : local_zone_(local_zone), |
| 68 code_(code), |
| 69 to_finalize_(local_zone), |
| 70 temp_vector_0_(local_zone), |
| 71 temp_vector_1_(local_zone) {} |
| 72 |
| 73 |
| 74 void MoveOptimizer::Run() { |
| 75 for (auto* block : code()->instruction_blocks()) { |
| 76 CompressBlock(block); |
| 77 } |
| 78 for (auto gap : to_finalize_) { |
| 79 FinalizeMoves(gap); |
| 80 } |
| 81 } |
| 82 |
| 83 |
105 void MoveOptimizer::CompressMoves(MoveOpVector* eliminated, ParallelMove* left, | 84 void MoveOptimizer::CompressMoves(MoveOpVector* eliminated, ParallelMove* left, |
106 ParallelMove* right) { | 85 ParallelMove* right) { |
107 DCHECK(eliminated->empty()); | 86 DCHECK(eliminated->empty()); |
108 auto move_ops = right->move_operands(); | 87 auto move_ops = right->move_operands(); |
109 // Modify the right moves in place and collect moves that will be killed by | 88 if (!left->move_operands()->is_empty()) { |
110 // merging the two gaps. | 89 // Modify the right moves in place and collect moves that will be killed by |
111 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) { | 90 // merging the two gaps. |
112 if (op->IsRedundant()) continue; | 91 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) { |
113 MoveOperands* to_eliminate = PrepareInsertAfter(left, op, code_zone()); | 92 if (op->IsRedundant()) continue; |
114 if (to_eliminate != nullptr) { | 93 MoveOperands* to_eliminate = PrepareInsertAfter(left, op, code_zone()); |
115 eliminated->push_back(to_eliminate); | 94 if (to_eliminate != nullptr) eliminated->push_back(to_eliminate); |
116 } | 95 } |
| 96 // Eliminate dead moves. Must happen before insertion of new moves as the |
| 97 // contents of eliminated are pointers into a list. |
| 98 for (auto to_eliminate : *eliminated) { |
| 99 to_eliminate->Eliminate(); |
| 100 } |
| 101 eliminated->clear(); |
117 } | 102 } |
118 // Eliminate dead moves. Must happen before insertion of new moves as the | |
119 // contents of eliminated are pointers into a list. | |
120 for (auto to_eliminate : *eliminated) { | |
121 to_eliminate->Eliminate(); | |
122 } | |
123 eliminated->clear(); | |
124 // Add all possibly modified moves from right side. | 103 // Add all possibly modified moves from right side. |
125 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) { | 104 for (auto op = move_ops->begin(); op != move_ops->end(); ++op) { |
126 if (op->IsRedundant()) continue; | 105 if (op->IsRedundant()) continue; |
127 left->move_operands()->Add(*op, code_zone()); | 106 left->move_operands()->Add(*op, code_zone()); |
128 } | 107 } |
129 // Nuke right. | 108 // Nuke right. |
130 move_ops->Rewind(0); | 109 move_ops->Rewind(0); |
131 } | 110 } |
132 | 111 |
133 | 112 |
134 void MoveOptimizer::FinalizeMoves(MoveOpVector* loads, MoveOpVector* new_moves, | 113 // Smash all consecutive moves into the left most move slot and accumulate them |
135 GapInstruction* gap) { | 114 // as much as possible across instructions. |
136 DCHECK(loads->empty()); | 115 void MoveOptimizer::CompressBlock(InstructionBlock* block) { |
137 DCHECK(new_moves->empty()); | 116 auto temp_vector = temp_vector_0(); |
138 if (gap == nullptr) return; | 117 DCHECK(temp_vector.empty()); |
139 // Split multiple loads of the same constant or stack slot off into the second | 118 GapInstruction* prev_gap = nullptr; |
140 // slot and keep remaining moves in the first slot. | 119 for (int index = block->code_start(); index < block->code_end(); ++index) { |
| 120 auto instr = code()->instructions()[index]; |
| 121 if (!instr->IsGapMoves()) { |
| 122 if (GapsCanMoveOver(instr)) continue; |
| 123 if (prev_gap != nullptr) to_finalize_.push_back(prev_gap); |
| 124 prev_gap = nullptr; |
| 125 continue; |
| 126 } |
| 127 auto gap = GapInstruction::cast(instr); |
| 128 int i = FindFirstNonEmptySlot(gap); |
| 129 // Nothing to do here. |
| 130 if (i == GapInstruction::LAST_INNER_POSITION + 1) { |
| 131 if (prev_gap != nullptr) { |
| 132 // Slide prev_gap down so we always know where to look for it. |
| 133 std::swap(prev_gap->parallel_moves()[0], gap->parallel_moves()[0]); |
| 134 prev_gap = gap; |
| 135 } |
| 136 continue; |
| 137 } |
| 138 // Move the first non-empty gap to position 0. |
| 139 std::swap(gap->parallel_moves()[0], gap->parallel_moves()[i]); |
| 140 auto left = gap->parallel_moves()[0]; |
| 141 // Compress everything into position 0. |
| 142 for (++i; i <= GapInstruction::LAST_INNER_POSITION; ++i) { |
| 143 auto move = gap->parallel_moves()[i]; |
| 144 if (move == nullptr) continue; |
| 145 CompressMoves(&temp_vector, left, move); |
| 146 } |
| 147 if (prev_gap != nullptr) { |
| 148 // Smash left into prev_gap, killing left. |
| 149 auto pred_moves = prev_gap->parallel_moves()[0]; |
| 150 CompressMoves(&temp_vector, pred_moves, left); |
| 151 // Slide prev_gap down so we always know where to look for it. |
| 152 std::swap(prev_gap->parallel_moves()[0], gap->parallel_moves()[0]); |
| 153 } |
| 154 prev_gap = gap; |
| 155 } |
| 156 if (prev_gap != nullptr) to_finalize_.push_back(prev_gap); |
| 157 } |
| 158 |
| 159 |
| 160 // Split multiple loads of the same constant or stack slot off into the second |
| 161 // slot and keep remaining moves in the first slot. |
| 162 void MoveOptimizer::FinalizeMoves(GapInstruction* gap) { |
| 163 auto loads = temp_vector_0(); |
| 164 DCHECK(loads.empty()); |
| 165 auto new_moves = temp_vector_1(); |
| 166 DCHECK(new_moves.empty()); |
141 auto move_ops = gap->parallel_moves()[0]->move_operands(); | 167 auto move_ops = gap->parallel_moves()[0]->move_operands(); |
142 for (auto move = move_ops->begin(); move != move_ops->end(); ++move) { | 168 for (auto move = move_ops->begin(); move != move_ops->end(); ++move) { |
143 if (move->IsRedundant()) { | 169 if (move->IsRedundant()) { |
144 move->Eliminate(); | 170 move->Eliminate(); |
145 continue; | 171 continue; |
146 } | 172 } |
147 if (!(move->source()->IsConstant() || move->source()->IsStackSlot() || | 173 if (!(move->source()->IsConstant() || move->source()->IsStackSlot() || |
148 move->source()->IsDoubleStackSlot())) | 174 move->source()->IsDoubleStackSlot())) |
149 continue; | 175 continue; |
150 // Search for existing move to this slot. | 176 // Search for existing move to this slot. |
151 MoveOperands* found = nullptr; | 177 MoveOperands* found = nullptr; |
152 for (auto load : *loads) { | 178 for (auto load : loads) { |
153 if (load->source()->Equals(move->source())) { | 179 if (load->source()->Equals(move->source())) { |
154 found = load; | 180 found = load; |
155 break; | 181 break; |
156 } | 182 } |
157 } | 183 } |
158 // Not found so insert. | 184 // Not found so insert. |
159 if (found == nullptr) { | 185 if (found == nullptr) { |
160 loads->push_back(move); | 186 loads.push_back(move); |
161 // Replace source with copy for later use. | 187 // Replace source with copy for later use. |
162 auto dest = move->destination(); | 188 auto dest = move->destination(); |
163 move->set_destination(new (code_zone()) | 189 move->set_destination(new (code_zone()) |
164 InstructionOperand(dest->kind(), dest->index())); | 190 InstructionOperand(dest->kind(), dest->index())); |
165 continue; | 191 continue; |
166 } | 192 } |
167 if ((found->destination()->IsStackSlot() || | 193 if ((found->destination()->IsStackSlot() || |
168 found->destination()->IsDoubleStackSlot()) && | 194 found->destination()->IsDoubleStackSlot()) && |
169 !(move->destination()->IsStackSlot() || | 195 !(move->destination()->IsStackSlot() || |
170 move->destination()->IsDoubleStackSlot())) { | 196 move->destination()->IsDoubleStackSlot())) { |
171 // Found a better source for this load. Smash it in place to affect other | 197 // Found a better source for this load. Smash it in place to affect other |
172 // loads that have already been split. | 198 // loads that have already been split. |
173 InstructionOperand::Kind found_kind = found->destination()->kind(); | 199 InstructionOperand::Kind found_kind = found->destination()->kind(); |
174 int found_index = found->destination()->index(); | 200 int found_index = found->destination()->index(); |
175 auto next_dest = | 201 auto next_dest = |
176 new (code_zone()) InstructionOperand(found_kind, found_index); | 202 new (code_zone()) InstructionOperand(found_kind, found_index); |
177 auto dest = move->destination(); | 203 auto dest = move->destination(); |
178 found->destination()->ConvertTo(dest->kind(), dest->index()); | 204 found->destination()->ConvertTo(dest->kind(), dest->index()); |
179 move->set_destination(next_dest); | 205 move->set_destination(next_dest); |
180 } | 206 } |
181 // move from load destination. | 207 // move from load destination. |
182 move->set_source(found->destination()); | 208 move->set_source(found->destination()); |
183 new_moves->push_back(move); | 209 new_moves.push_back(move); |
184 } | 210 } |
185 loads->clear(); | 211 loads.clear(); |
186 if (new_moves->empty()) return; | 212 if (new_moves.empty()) return; |
187 // Insert all new moves into slot 1. | 213 // Insert all new moves into slot 1. |
188 auto slot_1 = gap->GetOrCreateParallelMove( | 214 auto slot_1 = gap->GetOrCreateParallelMove( |
189 static_cast<GapInstruction::InnerPosition>(1), code_zone()); | 215 static_cast<GapInstruction::InnerPosition>(1), code_zone()); |
190 DCHECK(slot_1->move_operands()->is_empty()); | 216 DCHECK(slot_1->move_operands()->is_empty()); |
191 slot_1->move_operands()->AddBlock(MoveOperands(nullptr, nullptr), | 217 slot_1->move_operands()->AddBlock(MoveOperands(nullptr, nullptr), |
192 static_cast<int>(new_moves->size()), | 218 static_cast<int>(new_moves.size()), |
193 code_zone()); | 219 code_zone()); |
194 auto it = slot_1->move_operands()->begin(); | 220 auto it = slot_1->move_operands()->begin(); |
195 for (auto new_move : *new_moves) { | 221 for (auto new_move : new_moves) { |
196 std::swap(*new_move, *it); | 222 std::swap(*new_move, *it); |
197 ++it; | 223 ++it; |
198 } | 224 } |
199 DCHECK_EQ(it, slot_1->move_operands()->end()); | 225 DCHECK_EQ(it, slot_1->move_operands()->end()); |
200 new_moves->clear(); | 226 new_moves.clear(); |
201 } | 227 } |
202 | 228 |
203 } // namespace compiler | 229 } // namespace compiler |
204 } // namespace internal | 230 } // namespace internal |
205 } // namespace v8 | 231 } // namespace v8 |
OLD | NEW |