OLD | NEW |
| (Empty) |
1 // Copyright 2011 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #if V8_TARGET_ARCH_X64 | |
6 | |
7 #include "src/x64/lithium-codegen-x64.h" | |
8 #include "src/x64/lithium-gap-resolver-x64.h" | |
9 | |
10 namespace v8 { | |
11 namespace internal { | |
12 | |
13 LGapResolver::LGapResolver(LCodeGen* owner) | |
14 : cgen_(owner), moves_(32, owner->zone()) {} | |
15 | |
16 | |
17 void LGapResolver::Resolve(LParallelMove* parallel_move) { | |
18 DCHECK(moves_.is_empty()); | |
19 // Build up a worklist of moves. | |
20 BuildInitialMoveList(parallel_move); | |
21 | |
22 for (int i = 0; i < moves_.length(); ++i) { | |
23 LMoveOperands move = moves_[i]; | |
24 // Skip constants to perform them last. They don't block other moves | |
25 // and skipping such moves with register destinations keeps those | |
26 // registers free for the whole algorithm. | |
27 if (!move.IsEliminated() && !move.source()->IsConstantOperand()) { | |
28 PerformMove(i); | |
29 } | |
30 } | |
31 | |
32 // Perform the moves with constant sources. | |
33 for (int i = 0; i < moves_.length(); ++i) { | |
34 if (!moves_[i].IsEliminated()) { | |
35 DCHECK(moves_[i].source()->IsConstantOperand()); | |
36 EmitMove(i); | |
37 } | |
38 } | |
39 | |
40 moves_.Rewind(0); | |
41 } | |
42 | |
43 | |
44 void LGapResolver::BuildInitialMoveList(LParallelMove* parallel_move) { | |
45 // Perform a linear sweep of the moves to add them to the initial list of | |
46 // moves to perform, ignoring any move that is redundant (the source is | |
47 // the same as the destination, the destination is ignored and | |
48 // unallocated, or the move was already eliminated). | |
49 const ZoneList<LMoveOperands>* moves = parallel_move->move_operands(); | |
50 for (int i = 0; i < moves->length(); ++i) { | |
51 LMoveOperands move = moves->at(i); | |
52 if (!move.IsRedundant()) moves_.Add(move, cgen_->zone()); | |
53 } | |
54 Verify(); | |
55 } | |
56 | |
57 | |
58 void LGapResolver::PerformMove(int index) { | |
59 // Each call to this function performs a move and deletes it from the move | |
60 // graph. We first recursively perform any move blocking this one. We | |
61 // mark a move as "pending" on entry to PerformMove in order to detect | |
62 // cycles in the move graph. We use operand swaps to resolve cycles, | |
63 // which means that a call to PerformMove could change any source operand | |
64 // in the move graph. | |
65 | |
66 DCHECK(!moves_[index].IsPending()); | |
67 DCHECK(!moves_[index].IsRedundant()); | |
68 | |
69 // Clear this move's destination to indicate a pending move. The actual | |
70 // destination is saved in a stack-allocated local. Recursion may allow | |
71 // multiple moves to be pending. | |
72 DCHECK(moves_[index].source() != NULL); // Or else it will look eliminated. | |
73 LOperand* destination = moves_[index].destination(); | |
74 moves_[index].set_destination(NULL); | |
75 | |
76 // Perform a depth-first traversal of the move graph to resolve | |
77 // dependencies. Any unperformed, unpending move with a source the same | |
78 // as this one's destination blocks this one so recursively perform all | |
79 // such moves. | |
80 for (int i = 0; i < moves_.length(); ++i) { | |
81 LMoveOperands other_move = moves_[i]; | |
82 if (other_move.Blocks(destination) && !other_move.IsPending()) { | |
83 // Though PerformMove can change any source operand in the move graph, | |
84 // this call cannot create a blocking move via a swap (this loop does | |
85 // not miss any). Assume there is a non-blocking move with source A | |
86 // and this move is blocked on source B and there is a swap of A and | |
87 // B. Then A and B must be involved in the same cycle (or they would | |
88 // not be swapped). Since this move's destination is B and there is | |
89 // only a single incoming edge to an operand, this move must also be | |
90 // involved in the same cycle. In that case, the blocking move will | |
91 // be created but will be "pending" when we return from PerformMove. | |
92 PerformMove(i); | |
93 } | |
94 } | |
95 | |
96 // We are about to resolve this move and don't need it marked as | |
97 // pending, so restore its destination. | |
98 moves_[index].set_destination(destination); | |
99 | |
100 // This move's source may have changed due to swaps to resolve cycles and | |
101 // so it may now be the last move in the cycle. If so remove it. | |
102 if (moves_[index].source()->Equals(destination)) { | |
103 moves_[index].Eliminate(); | |
104 return; | |
105 } | |
106 | |
107 // The move may be blocked on a (at most one) pending move, in which case | |
108 // we have a cycle. Search for such a blocking move and perform a swap to | |
109 // resolve it. | |
110 for (int i = 0; i < moves_.length(); ++i) { | |
111 LMoveOperands other_move = moves_[i]; | |
112 if (other_move.Blocks(destination)) { | |
113 DCHECK(other_move.IsPending()); | |
114 EmitSwap(index); | |
115 return; | |
116 } | |
117 } | |
118 | |
119 // This move is not blocked. | |
120 EmitMove(index); | |
121 } | |
122 | |
123 | |
124 void LGapResolver::Verify() { | |
125 #ifdef ENABLE_SLOW_DCHECKS | |
126 // No operand should be the destination for more than one move. | |
127 for (int i = 0; i < moves_.length(); ++i) { | |
128 LOperand* destination = moves_[i].destination(); | |
129 for (int j = i + 1; j < moves_.length(); ++j) { | |
130 SLOW_DCHECK(!destination->Equals(moves_[j].destination())); | |
131 } | |
132 } | |
133 #endif | |
134 } | |
135 | |
136 | |
137 #define __ ACCESS_MASM(cgen_->masm()) | |
138 | |
139 | |
140 void LGapResolver::EmitMove(int index) { | |
141 LOperand* source = moves_[index].source(); | |
142 LOperand* destination = moves_[index].destination(); | |
143 | |
144 // Dispatch on the source and destination operand kinds. Not all | |
145 // combinations are possible. | |
146 if (source->IsRegister()) { | |
147 Register src = cgen_->ToRegister(source); | |
148 if (destination->IsRegister()) { | |
149 Register dst = cgen_->ToRegister(destination); | |
150 __ movp(dst, src); | |
151 } else { | |
152 DCHECK(destination->IsStackSlot()); | |
153 Operand dst = cgen_->ToOperand(destination); | |
154 __ movp(dst, src); | |
155 } | |
156 | |
157 } else if (source->IsStackSlot()) { | |
158 Operand src = cgen_->ToOperand(source); | |
159 if (destination->IsRegister()) { | |
160 Register dst = cgen_->ToRegister(destination); | |
161 __ movp(dst, src); | |
162 } else { | |
163 DCHECK(destination->IsStackSlot()); | |
164 Operand dst = cgen_->ToOperand(destination); | |
165 __ movp(kScratchRegister, src); | |
166 __ movp(dst, kScratchRegister); | |
167 } | |
168 | |
169 } else if (source->IsConstantOperand()) { | |
170 LConstantOperand* constant_source = LConstantOperand::cast(source); | |
171 if (destination->IsRegister()) { | |
172 Register dst = cgen_->ToRegister(destination); | |
173 if (cgen_->IsSmiConstant(constant_source)) { | |
174 __ Move(dst, cgen_->ToSmi(constant_source)); | |
175 } else if (cgen_->IsInteger32Constant(constant_source)) { | |
176 int32_t constant = cgen_->ToInteger32(constant_source); | |
177 // Do sign extension only for constant used as de-hoisted array key. | |
178 // Others only need zero extension, which saves 2 bytes. | |
179 if (cgen_->IsDehoistedKeyConstant(constant_source)) { | |
180 __ Set(dst, constant); | |
181 } else { | |
182 __ Set(dst, static_cast<uint32_t>(constant)); | |
183 } | |
184 } else { | |
185 __ Move(dst, cgen_->ToHandle(constant_source)); | |
186 } | |
187 } else if (destination->IsDoubleRegister()) { | |
188 double v = cgen_->ToDouble(constant_source); | |
189 uint64_t int_val = bit_cast<uint64_t, double>(v); | |
190 XMMRegister dst = cgen_->ToDoubleRegister(destination); | |
191 if (int_val == 0) { | |
192 __ Xorpd(dst, dst); | |
193 } else { | |
194 __ Set(kScratchRegister, int_val); | |
195 __ Movq(dst, kScratchRegister); | |
196 } | |
197 } else { | |
198 DCHECK(destination->IsStackSlot()); | |
199 Operand dst = cgen_->ToOperand(destination); | |
200 if (cgen_->IsSmiConstant(constant_source)) { | |
201 __ Move(dst, cgen_->ToSmi(constant_source)); | |
202 } else if (cgen_->IsInteger32Constant(constant_source)) { | |
203 // Do sign extension to 64 bits when stored into stack slot. | |
204 __ movp(dst, Immediate(cgen_->ToInteger32(constant_source))); | |
205 } else { | |
206 __ Move(kScratchRegister, cgen_->ToHandle(constant_source)); | |
207 __ movp(dst, kScratchRegister); | |
208 } | |
209 } | |
210 | |
211 } else if (source->IsDoubleRegister()) { | |
212 XMMRegister src = cgen_->ToDoubleRegister(source); | |
213 if (destination->IsDoubleRegister()) { | |
214 __ movaps(cgen_->ToDoubleRegister(destination), src); | |
215 } else { | |
216 DCHECK(destination->IsDoubleStackSlot()); | |
217 __ Movsd(cgen_->ToOperand(destination), src); | |
218 } | |
219 } else if (source->IsDoubleStackSlot()) { | |
220 Operand src = cgen_->ToOperand(source); | |
221 if (destination->IsDoubleRegister()) { | |
222 __ Movsd(cgen_->ToDoubleRegister(destination), src); | |
223 } else { | |
224 DCHECK(destination->IsDoubleStackSlot()); | |
225 __ Movsd(xmm0, src); | |
226 __ Movsd(cgen_->ToOperand(destination), xmm0); | |
227 } | |
228 } else { | |
229 UNREACHABLE(); | |
230 } | |
231 | |
232 moves_[index].Eliminate(); | |
233 } | |
234 | |
235 | |
236 void LGapResolver::EmitSwap(int index) { | |
237 LOperand* source = moves_[index].source(); | |
238 LOperand* destination = moves_[index].destination(); | |
239 | |
240 // Dispatch on the source and destination operand kinds. Not all | |
241 // combinations are possible. | |
242 if (source->IsRegister() && destination->IsRegister()) { | |
243 // Swap two general-purpose registers. | |
244 Register src = cgen_->ToRegister(source); | |
245 Register dst = cgen_->ToRegister(destination); | |
246 __ xchgq(dst, src); | |
247 | |
248 } else if ((source->IsRegister() && destination->IsStackSlot()) || | |
249 (source->IsStackSlot() && destination->IsRegister())) { | |
250 // Swap a general-purpose register and a stack slot. | |
251 Register reg = | |
252 cgen_->ToRegister(source->IsRegister() ? source : destination); | |
253 Operand mem = | |
254 cgen_->ToOperand(source->IsRegister() ? destination : source); | |
255 __ movp(kScratchRegister, mem); | |
256 __ movp(mem, reg); | |
257 __ movp(reg, kScratchRegister); | |
258 | |
259 } else if ((source->IsStackSlot() && destination->IsStackSlot()) || | |
260 (source->IsDoubleStackSlot() && destination->IsDoubleStackSlot())) { | |
261 // Swap two stack slots or two double stack slots. | |
262 Operand src = cgen_->ToOperand(source); | |
263 Operand dst = cgen_->ToOperand(destination); | |
264 __ Movsd(xmm0, src); | |
265 __ movp(kScratchRegister, dst); | |
266 __ Movsd(dst, xmm0); | |
267 __ movp(src, kScratchRegister); | |
268 | |
269 } else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) { | |
270 // Swap two double registers. | |
271 XMMRegister source_reg = cgen_->ToDoubleRegister(source); | |
272 XMMRegister destination_reg = cgen_->ToDoubleRegister(destination); | |
273 __ movaps(xmm0, source_reg); | |
274 __ movaps(source_reg, destination_reg); | |
275 __ movaps(destination_reg, xmm0); | |
276 | |
277 } else if (source->IsDoubleRegister() || destination->IsDoubleRegister()) { | |
278 // Swap a double register and a double stack slot. | |
279 DCHECK((source->IsDoubleRegister() && destination->IsDoubleStackSlot()) || | |
280 (source->IsDoubleStackSlot() && destination->IsDoubleRegister())); | |
281 XMMRegister reg = cgen_->ToDoubleRegister(source->IsDoubleRegister() | |
282 ? source | |
283 : destination); | |
284 LOperand* other = source->IsDoubleRegister() ? destination : source; | |
285 DCHECK(other->IsDoubleStackSlot()); | |
286 Operand other_operand = cgen_->ToOperand(other); | |
287 __ Movsd(xmm0, other_operand); | |
288 __ Movsd(other_operand, reg); | |
289 __ movaps(reg, xmm0); | |
290 | |
291 } else { | |
292 // No other combinations are possible. | |
293 UNREACHABLE(); | |
294 } | |
295 | |
296 // The swap of source and destination has executed a move from source to | |
297 // destination. | |
298 moves_[index].Eliminate(); | |
299 | |
300 // Any unperformed (including pending) move with a source of either | |
301 // this move's source or destination needs to have their source | |
302 // changed to reflect the state of affairs after the swap. | |
303 for (int i = 0; i < moves_.length(); ++i) { | |
304 LMoveOperands other_move = moves_[i]; | |
305 if (other_move.Blocks(source)) { | |
306 moves_[i].set_source(destination); | |
307 } else if (other_move.Blocks(destination)) { | |
308 moves_[i].set_source(source); | |
309 } | |
310 } | |
311 } | |
312 | |
313 #undef __ | |
314 | |
315 } // namespace internal | |
316 } // namespace v8 | |
317 | |
318 #endif // V8_TARGET_ARCH_X64 | |
OLD | NEW |