OLD | NEW |
| (Empty) |
1 // Copyright 2012 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "src/lithium-allocator.h" | |
6 | |
7 #include "src/hydrogen.h" | |
8 #include "src/lithium-inl.h" | |
9 #include "src/lithium-allocator-inl.h" | |
10 #include "src/register-configuration.h" | |
11 #include "src/string-stream.h" | |
12 | |
13 namespace v8 { | |
14 namespace internal { | |
15 | |
16 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { | |
17 return a.Value() < b.Value() ? a : b; | |
18 } | |
19 | |
20 | |
21 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { | |
22 return a.Value() > b.Value() ? a : b; | |
23 } | |
24 | |
25 | |
26 UsePosition::UsePosition(LifetimePosition pos, | |
27 LOperand* operand, | |
28 LOperand* hint) | |
29 : operand_(operand), | |
30 hint_(hint), | |
31 pos_(pos), | |
32 next_(NULL), | |
33 requires_reg_(false), | |
34 register_beneficial_(true) { | |
35 if (operand_ != NULL && operand_->IsUnallocated()) { | |
36 LUnallocated* unalloc = LUnallocated::cast(operand_); | |
37 requires_reg_ = unalloc->HasRegisterPolicy() || | |
38 unalloc->HasDoubleRegisterPolicy(); | |
39 register_beneficial_ = !unalloc->HasAnyPolicy(); | |
40 } | |
41 DCHECK(pos_.IsValid()); | |
42 } | |
43 | |
44 | |
45 bool UsePosition::HasHint() const { | |
46 return hint_ != NULL && !hint_->IsUnallocated(); | |
47 } | |
48 | |
49 | |
50 bool UsePosition::RequiresRegister() const { | |
51 return requires_reg_; | |
52 } | |
53 | |
54 | |
55 bool UsePosition::RegisterIsBeneficial() const { | |
56 return register_beneficial_; | |
57 } | |
58 | |
59 | |
60 void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) { | |
61 DCHECK(Contains(pos) && pos.Value() != start().Value()); | |
62 UseInterval* after = new(zone) UseInterval(pos, end_); | |
63 after->next_ = next_; | |
64 next_ = after; | |
65 end_ = pos; | |
66 } | |
67 | |
68 | |
69 #ifdef DEBUG | |
70 | |
71 | |
72 void LiveRange::Verify() const { | |
73 UsePosition* cur = first_pos_; | |
74 while (cur != NULL) { | |
75 DCHECK(Start().Value() <= cur->pos().Value() && | |
76 cur->pos().Value() <= End().Value()); | |
77 cur = cur->next(); | |
78 } | |
79 } | |
80 | |
81 | |
82 bool LiveRange::HasOverlap(UseInterval* target) const { | |
83 UseInterval* current_interval = first_interval_; | |
84 while (current_interval != NULL) { | |
85 // Intervals overlap if the start of one is contained in the other. | |
86 if (current_interval->Contains(target->start()) || | |
87 target->Contains(current_interval->start())) { | |
88 return true; | |
89 } | |
90 current_interval = current_interval->next(); | |
91 } | |
92 return false; | |
93 } | |
94 | |
95 | |
96 #endif | |
97 | |
98 | |
99 LiveRange::LiveRange(int id, Zone* zone) | |
100 : id_(id), | |
101 spilled_(false), | |
102 kind_(UNALLOCATED_REGISTERS), | |
103 assigned_register_(kInvalidAssignment), | |
104 last_interval_(NULL), | |
105 first_interval_(NULL), | |
106 first_pos_(NULL), | |
107 parent_(NULL), | |
108 next_(NULL), | |
109 current_interval_(NULL), | |
110 last_processed_use_(NULL), | |
111 current_hint_operand_(NULL), | |
112 spill_operand_(new (zone) LOperand()), | |
113 spill_start_index_(kMaxInt) {} | |
114 | |
115 | |
116 void LiveRange::set_assigned_register(int reg, Zone* zone) { | |
117 DCHECK(!HasRegisterAssigned() && !IsSpilled()); | |
118 assigned_register_ = reg; | |
119 ConvertOperands(zone); | |
120 } | |
121 | |
122 | |
123 void LiveRange::MakeSpilled(Zone* zone) { | |
124 DCHECK(!IsSpilled()); | |
125 DCHECK(TopLevel()->HasAllocatedSpillOperand()); | |
126 spilled_ = true; | |
127 assigned_register_ = kInvalidAssignment; | |
128 ConvertOperands(zone); | |
129 } | |
130 | |
131 | |
132 bool LiveRange::HasAllocatedSpillOperand() const { | |
133 DCHECK(spill_operand_ != NULL); | |
134 return !spill_operand_->IsIgnored(); | |
135 } | |
136 | |
137 | |
138 void LiveRange::SetSpillOperand(LOperand* operand) { | |
139 DCHECK(!operand->IsUnallocated()); | |
140 DCHECK(spill_operand_ != NULL); | |
141 DCHECK(spill_operand_->IsIgnored()); | |
142 spill_operand_->ConvertTo(operand->kind(), operand->index()); | |
143 } | |
144 | |
145 | |
146 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) { | |
147 UsePosition* use_pos = last_processed_use_; | |
148 if (use_pos == NULL) use_pos = first_pos(); | |
149 while (use_pos != NULL && use_pos->pos().Value() < start.Value()) { | |
150 use_pos = use_pos->next(); | |
151 } | |
152 last_processed_use_ = use_pos; | |
153 return use_pos; | |
154 } | |
155 | |
156 | |
157 UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial( | |
158 LifetimePosition start) { | |
159 UsePosition* pos = NextUsePosition(start); | |
160 while (pos != NULL && !pos->RegisterIsBeneficial()) { | |
161 pos = pos->next(); | |
162 } | |
163 return pos; | |
164 } | |
165 | |
166 | |
167 UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial( | |
168 LifetimePosition start) { | |
169 UsePosition* pos = first_pos(); | |
170 UsePosition* prev = NULL; | |
171 while (pos != NULL && pos->pos().Value() < start.Value()) { | |
172 if (pos->RegisterIsBeneficial()) prev = pos; | |
173 pos = pos->next(); | |
174 } | |
175 return prev; | |
176 } | |
177 | |
178 | |
179 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) { | |
180 UsePosition* pos = NextUsePosition(start); | |
181 while (pos != NULL && !pos->RequiresRegister()) { | |
182 pos = pos->next(); | |
183 } | |
184 return pos; | |
185 } | |
186 | |
187 | |
188 bool LiveRange::CanBeSpilled(LifetimePosition pos) { | |
189 // We cannot spill a live range that has a use requiring a register | |
190 // at the current or the immediate next position. | |
191 UsePosition* use_pos = NextRegisterPosition(pos); | |
192 if (use_pos == NULL) return true; | |
193 return | |
194 use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value(); | |
195 } | |
196 | |
197 | |
198 LOperand* LiveRange::CreateAssignedOperand(Zone* zone) { | |
199 LOperand* op = NULL; | |
200 if (HasRegisterAssigned()) { | |
201 DCHECK(!IsSpilled()); | |
202 switch (Kind()) { | |
203 case GENERAL_REGISTERS: | |
204 op = LRegister::Create(assigned_register(), zone); | |
205 break; | |
206 case DOUBLE_REGISTERS: | |
207 op = LDoubleRegister::Create(assigned_register(), zone); | |
208 break; | |
209 default: | |
210 UNREACHABLE(); | |
211 } | |
212 } else if (IsSpilled()) { | |
213 DCHECK(!HasRegisterAssigned()); | |
214 op = TopLevel()->GetSpillOperand(); | |
215 DCHECK(!op->IsUnallocated()); | |
216 } else { | |
217 LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE); | |
218 unalloc->set_virtual_register(id_); | |
219 op = unalloc; | |
220 } | |
221 return op; | |
222 } | |
223 | |
224 | |
225 UseInterval* LiveRange::FirstSearchIntervalForPosition( | |
226 LifetimePosition position) const { | |
227 if (current_interval_ == NULL) return first_interval_; | |
228 if (current_interval_->start().Value() > position.Value()) { | |
229 current_interval_ = NULL; | |
230 return first_interval_; | |
231 } | |
232 return current_interval_; | |
233 } | |
234 | |
235 | |
236 void LiveRange::AdvanceLastProcessedMarker( | |
237 UseInterval* to_start_of, LifetimePosition but_not_past) const { | |
238 if (to_start_of == NULL) return; | |
239 if (to_start_of->start().Value() > but_not_past.Value()) return; | |
240 LifetimePosition start = | |
241 current_interval_ == NULL ? LifetimePosition::Invalid() | |
242 : current_interval_->start(); | |
243 if (to_start_of->start().Value() > start.Value()) { | |
244 current_interval_ = to_start_of; | |
245 } | |
246 } | |
247 | |
248 | |
249 void LiveRange::SplitAt(LifetimePosition position, | |
250 LiveRange* result, | |
251 Zone* zone) { | |
252 DCHECK(Start().Value() < position.Value()); | |
253 DCHECK(result->IsEmpty()); | |
254 // Find the last interval that ends before the position. If the | |
255 // position is contained in one of the intervals in the chain, we | |
256 // split that interval and use the first part. | |
257 UseInterval* current = FirstSearchIntervalForPosition(position); | |
258 | |
259 // If the split position coincides with the beginning of a use interval | |
260 // we need to split use positons in a special way. | |
261 bool split_at_start = false; | |
262 | |
263 if (current->start().Value() == position.Value()) { | |
264 // When splitting at start we need to locate the previous use interval. | |
265 current = first_interval_; | |
266 } | |
267 | |
268 while (current != NULL) { | |
269 if (current->Contains(position)) { | |
270 current->SplitAt(position, zone); | |
271 break; | |
272 } | |
273 UseInterval* next = current->next(); | |
274 if (next->start().Value() >= position.Value()) { | |
275 split_at_start = (next->start().Value() == position.Value()); | |
276 break; | |
277 } | |
278 current = next; | |
279 } | |
280 | |
281 // Partition original use intervals to the two live ranges. | |
282 UseInterval* before = current; | |
283 UseInterval* after = before->next(); | |
284 result->last_interval_ = (last_interval_ == before) | |
285 ? after // Only interval in the range after split. | |
286 : last_interval_; // Last interval of the original range. | |
287 result->first_interval_ = after; | |
288 last_interval_ = before; | |
289 | |
290 // Find the last use position before the split and the first use | |
291 // position after it. | |
292 UsePosition* use_after = first_pos_; | |
293 UsePosition* use_before = NULL; | |
294 if (split_at_start) { | |
295 // The split position coincides with the beginning of a use interval (the | |
296 // end of a lifetime hole). Use at this position should be attributed to | |
297 // the split child because split child owns use interval covering it. | |
298 while (use_after != NULL && use_after->pos().Value() < position.Value()) { | |
299 use_before = use_after; | |
300 use_after = use_after->next(); | |
301 } | |
302 } else { | |
303 while (use_after != NULL && use_after->pos().Value() <= position.Value()) { | |
304 use_before = use_after; | |
305 use_after = use_after->next(); | |
306 } | |
307 } | |
308 | |
309 // Partition original use positions to the two live ranges. | |
310 if (use_before != NULL) { | |
311 use_before->next_ = NULL; | |
312 } else { | |
313 first_pos_ = NULL; | |
314 } | |
315 result->first_pos_ = use_after; | |
316 | |
317 // Discard cached iteration state. It might be pointing | |
318 // to the use that no longer belongs to this live range. | |
319 last_processed_use_ = NULL; | |
320 current_interval_ = NULL; | |
321 | |
322 // Link the new live range in the chain before any of the other | |
323 // ranges linked from the range before the split. | |
324 result->parent_ = (parent_ == NULL) ? this : parent_; | |
325 result->kind_ = result->parent_->kind_; | |
326 result->next_ = next_; | |
327 next_ = result; | |
328 | |
329 #ifdef DEBUG | |
330 Verify(); | |
331 result->Verify(); | |
332 #endif | |
333 } | |
334 | |
335 | |
336 // This implements an ordering on live ranges so that they are ordered by their | |
337 // start positions. This is needed for the correctness of the register | |
338 // allocation algorithm. If two live ranges start at the same offset then there | |
339 // is a tie breaker based on where the value is first used. This part of the | |
340 // ordering is merely a heuristic. | |
341 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const { | |
342 LifetimePosition start = Start(); | |
343 LifetimePosition other_start = other->Start(); | |
344 if (start.Value() == other_start.Value()) { | |
345 UsePosition* pos = first_pos(); | |
346 if (pos == NULL) return false; | |
347 UsePosition* other_pos = other->first_pos(); | |
348 if (other_pos == NULL) return true; | |
349 return pos->pos().Value() < other_pos->pos().Value(); | |
350 } | |
351 return start.Value() < other_start.Value(); | |
352 } | |
353 | |
354 | |
355 void LiveRange::ShortenTo(LifetimePosition start) { | |
356 LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value()); | |
357 DCHECK(first_interval_ != NULL); | |
358 DCHECK(first_interval_->start().Value() <= start.Value()); | |
359 DCHECK(start.Value() < first_interval_->end().Value()); | |
360 first_interval_->set_start(start); | |
361 } | |
362 | |
363 | |
364 void LiveRange::EnsureInterval(LifetimePosition start, | |
365 LifetimePosition end, | |
366 Zone* zone) { | |
367 LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n", | |
368 id_, | |
369 start.Value(), | |
370 end.Value()); | |
371 LifetimePosition new_end = end; | |
372 while (first_interval_ != NULL && | |
373 first_interval_->start().Value() <= end.Value()) { | |
374 if (first_interval_->end().Value() > end.Value()) { | |
375 new_end = first_interval_->end(); | |
376 } | |
377 first_interval_ = first_interval_->next(); | |
378 } | |
379 | |
380 UseInterval* new_interval = new(zone) UseInterval(start, new_end); | |
381 new_interval->next_ = first_interval_; | |
382 first_interval_ = new_interval; | |
383 if (new_interval->next() == NULL) { | |
384 last_interval_ = new_interval; | |
385 } | |
386 } | |
387 | |
388 | |
389 void LiveRange::AddUseInterval(LifetimePosition start, | |
390 LifetimePosition end, | |
391 Zone* zone) { | |
392 LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n", | |
393 id_, | |
394 start.Value(), | |
395 end.Value()); | |
396 if (first_interval_ == NULL) { | |
397 UseInterval* interval = new(zone) UseInterval(start, end); | |
398 first_interval_ = interval; | |
399 last_interval_ = interval; | |
400 } else { | |
401 if (end.Value() == first_interval_->start().Value()) { | |
402 first_interval_->set_start(start); | |
403 } else if (end.Value() < first_interval_->start().Value()) { | |
404 UseInterval* interval = new(zone) UseInterval(start, end); | |
405 interval->set_next(first_interval_); | |
406 first_interval_ = interval; | |
407 } else { | |
408 // Order of instruction's processing (see ProcessInstructions) guarantees | |
409 // that each new use interval either precedes or intersects with | |
410 // last added interval. | |
411 DCHECK(start.Value() < first_interval_->end().Value()); | |
412 first_interval_->start_ = Min(start, first_interval_->start_); | |
413 first_interval_->end_ = Max(end, first_interval_->end_); | |
414 } | |
415 } | |
416 } | |
417 | |
418 | |
419 void LiveRange::AddUsePosition(LifetimePosition pos, | |
420 LOperand* operand, | |
421 LOperand* hint, | |
422 Zone* zone) { | |
423 LAllocator::TraceAlloc("Add to live range %d use position %d\n", | |
424 id_, | |
425 pos.Value()); | |
426 UsePosition* use_pos = new(zone) UsePosition(pos, operand, hint); | |
427 UsePosition* prev_hint = NULL; | |
428 UsePosition* prev = NULL; | |
429 UsePosition* current = first_pos_; | |
430 while (current != NULL && current->pos().Value() < pos.Value()) { | |
431 prev_hint = current->HasHint() ? current : prev_hint; | |
432 prev = current; | |
433 current = current->next(); | |
434 } | |
435 | |
436 if (prev == NULL) { | |
437 use_pos->set_next(first_pos_); | |
438 first_pos_ = use_pos; | |
439 } else { | |
440 use_pos->next_ = prev->next_; | |
441 prev->next_ = use_pos; | |
442 } | |
443 | |
444 if (prev_hint == NULL && use_pos->HasHint()) { | |
445 current_hint_operand_ = hint; | |
446 } | |
447 } | |
448 | |
449 | |
450 void LiveRange::ConvertOperands(Zone* zone) { | |
451 LOperand* op = CreateAssignedOperand(zone); | |
452 UsePosition* use_pos = first_pos(); | |
453 while (use_pos != NULL) { | |
454 DCHECK(Start().Value() <= use_pos->pos().Value() && | |
455 use_pos->pos().Value() <= End().Value()); | |
456 | |
457 if (use_pos->HasOperand()) { | |
458 DCHECK(op->IsRegister() || op->IsDoubleRegister() || | |
459 !use_pos->RequiresRegister()); | |
460 use_pos->operand()->ConvertTo(op->kind(), op->index()); | |
461 } | |
462 use_pos = use_pos->next(); | |
463 } | |
464 } | |
465 | |
466 | |
467 bool LiveRange::CanCover(LifetimePosition position) const { | |
468 if (IsEmpty()) return false; | |
469 return Start().Value() <= position.Value() && | |
470 position.Value() < End().Value(); | |
471 } | |
472 | |
473 | |
474 bool LiveRange::Covers(LifetimePosition position) { | |
475 if (!CanCover(position)) return false; | |
476 UseInterval* start_search = FirstSearchIntervalForPosition(position); | |
477 for (UseInterval* interval = start_search; | |
478 interval != NULL; | |
479 interval = interval->next()) { | |
480 DCHECK(interval->next() == NULL || | |
481 interval->next()->start().Value() >= interval->start().Value()); | |
482 AdvanceLastProcessedMarker(interval, position); | |
483 if (interval->Contains(position)) return true; | |
484 if (interval->start().Value() > position.Value()) return false; | |
485 } | |
486 return false; | |
487 } | |
488 | |
489 | |
490 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) { | |
491 UseInterval* b = other->first_interval(); | |
492 if (b == NULL) return LifetimePosition::Invalid(); | |
493 LifetimePosition advance_last_processed_up_to = b->start(); | |
494 UseInterval* a = FirstSearchIntervalForPosition(b->start()); | |
495 while (a != NULL && b != NULL) { | |
496 if (a->start().Value() > other->End().Value()) break; | |
497 if (b->start().Value() > End().Value()) break; | |
498 LifetimePosition cur_intersection = a->Intersect(b); | |
499 if (cur_intersection.IsValid()) { | |
500 return cur_intersection; | |
501 } | |
502 if (a->start().Value() < b->start().Value()) { | |
503 a = a->next(); | |
504 if (a == NULL || a->start().Value() > other->End().Value()) break; | |
505 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); | |
506 } else { | |
507 b = b->next(); | |
508 } | |
509 } | |
510 return LifetimePosition::Invalid(); | |
511 } | |
512 | |
513 | |
514 LAllocator::LAllocator(int num_values, HGraph* graph) | |
515 : chunk_(NULL), | |
516 live_in_sets_(graph->blocks()->length(), zone()), | |
517 live_ranges_(num_values * 2, zone()), | |
518 fixed_live_ranges_(NULL), | |
519 fixed_double_live_ranges_(NULL), | |
520 unhandled_live_ranges_(num_values * 2, zone()), | |
521 active_live_ranges_(8, zone()), | |
522 inactive_live_ranges_(8, zone()), | |
523 reusable_slots_(8, zone()), | |
524 next_virtual_register_(num_values), | |
525 first_artificial_register_(num_values), | |
526 mode_(UNALLOCATED_REGISTERS), | |
527 num_registers_(-1), | |
528 graph_(graph), | |
529 has_osr_entry_(false), | |
530 allocation_ok_(true) {} | |
531 | |
532 | |
533 void LAllocator::InitializeLivenessAnalysis() { | |
534 // Initialize the live_in sets for each block to NULL. | |
535 int block_count = graph_->blocks()->length(); | |
536 live_in_sets_.Initialize(block_count, zone()); | |
537 live_in_sets_.AddBlock(NULL, block_count, zone()); | |
538 } | |
539 | |
540 | |
541 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) { | |
542 // Compute live out for the given block, except not including backward | |
543 // successor edges. | |
544 BitVector* live_out = new(zone()) BitVector(next_virtual_register_, zone()); | |
545 | |
546 // Process all successor blocks. | |
547 for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) { | |
548 // Add values live on entry to the successor. Note the successor's | |
549 // live_in will not be computed yet for backwards edges. | |
550 HBasicBlock* successor = it.Current(); | |
551 BitVector* live_in = live_in_sets_[successor->block_id()]; | |
552 if (live_in != NULL) live_out->Union(*live_in); | |
553 | |
554 // All phi input operands corresponding to this successor edge are live | |
555 // out from this block. | |
556 int index = successor->PredecessorIndexOf(block); | |
557 const ZoneList<HPhi*>* phis = successor->phis(); | |
558 for (int i = 0; i < phis->length(); ++i) { | |
559 HPhi* phi = phis->at(i); | |
560 if (!phi->OperandAt(index)->IsConstant()) { | |
561 live_out->Add(phi->OperandAt(index)->id()); | |
562 } | |
563 } | |
564 } | |
565 | |
566 return live_out; | |
567 } | |
568 | |
569 | |
570 void LAllocator::AddInitialIntervals(HBasicBlock* block, | |
571 BitVector* live_out) { | |
572 // Add an interval that includes the entire block to the live range for | |
573 // each live_out value. | |
574 LifetimePosition start = LifetimePosition::FromInstructionIndex( | |
575 block->first_instruction_index()); | |
576 LifetimePosition end = LifetimePosition::FromInstructionIndex( | |
577 block->last_instruction_index()).NextInstruction(); | |
578 BitVector::Iterator iterator(live_out); | |
579 while (!iterator.Done()) { | |
580 int operand_index = iterator.Current(); | |
581 LiveRange* range = LiveRangeFor(operand_index); | |
582 range->AddUseInterval(start, end, zone()); | |
583 iterator.Advance(); | |
584 } | |
585 } | |
586 | |
587 | |
588 int LAllocator::FixedDoubleLiveRangeID(int index) { | |
589 return -index - 1 - Register::kNumRegisters; | |
590 } | |
591 | |
592 | |
593 LOperand* LAllocator::AllocateFixed(LUnallocated* operand, | |
594 int pos, | |
595 bool is_tagged) { | |
596 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register()); | |
597 DCHECK(operand->HasFixedPolicy()); | |
598 if (operand->HasFixedSlotPolicy()) { | |
599 operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index()); | |
600 } else if (operand->HasFixedRegisterPolicy()) { | |
601 int reg_index = operand->fixed_register_index(); | |
602 operand->ConvertTo(LOperand::REGISTER, reg_index); | |
603 } else if (operand->HasFixedDoubleRegisterPolicy()) { | |
604 int reg_index = operand->fixed_register_index(); | |
605 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index); | |
606 } else { | |
607 UNREACHABLE(); | |
608 } | |
609 if (is_tagged) { | |
610 TraceAlloc("Fixed reg is tagged at %d\n", pos); | |
611 LInstruction* instr = InstructionAt(pos); | |
612 if (instr->HasPointerMap()) { | |
613 instr->pointer_map()->RecordPointer(operand, chunk()->zone()); | |
614 } | |
615 } | |
616 return operand; | |
617 } | |
618 | |
619 | |
620 LiveRange* LAllocator::FixedLiveRangeFor(int index) { | |
621 DCHECK(index < Register::kNumRegisters); | |
622 LiveRange* result = fixed_live_ranges_[index]; | |
623 if (result == NULL) { | |
624 result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone()); | |
625 DCHECK(result->IsFixed()); | |
626 result->kind_ = GENERAL_REGISTERS; | |
627 SetLiveRangeAssignedRegister(result, index); | |
628 fixed_live_ranges_[index] = result; | |
629 } | |
630 return result; | |
631 } | |
632 | |
633 | |
634 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) { | |
635 DCHECK(index < DoubleRegister::kMaxNumRegisters); | |
636 LiveRange* result = fixed_double_live_ranges_[index]; | |
637 if (result == NULL) { | |
638 result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index), | |
639 chunk()->zone()); | |
640 DCHECK(result->IsFixed()); | |
641 result->kind_ = DOUBLE_REGISTERS; | |
642 SetLiveRangeAssignedRegister(result, index); | |
643 fixed_double_live_ranges_[index] = result; | |
644 } | |
645 return result; | |
646 } | |
647 | |
648 | |
649 LiveRange* LAllocator::LiveRangeFor(int index) { | |
650 if (index >= live_ranges_.length()) { | |
651 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone()); | |
652 } | |
653 LiveRange* result = live_ranges_[index]; | |
654 if (result == NULL) { | |
655 result = new(zone()) LiveRange(index, chunk()->zone()); | |
656 live_ranges_[index] = result; | |
657 } | |
658 return result; | |
659 } | |
660 | |
661 | |
662 LGap* LAllocator::GetLastGap(HBasicBlock* block) { | |
663 int last_instruction = block->last_instruction_index(); | |
664 int index = chunk_->NearestGapPos(last_instruction); | |
665 return GapAt(index); | |
666 } | |
667 | |
668 | |
669 HPhi* LAllocator::LookupPhi(LOperand* operand) const { | |
670 if (!operand->IsUnallocated()) return NULL; | |
671 int index = LUnallocated::cast(operand)->virtual_register(); | |
672 HValue* instr = graph_->LookupValue(index); | |
673 if (instr != NULL && instr->IsPhi()) { | |
674 return HPhi::cast(instr); | |
675 } | |
676 return NULL; | |
677 } | |
678 | |
679 | |
680 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) { | |
681 if (operand->IsUnallocated()) { | |
682 return LiveRangeFor(LUnallocated::cast(operand)->virtual_register()); | |
683 } else if (operand->IsRegister()) { | |
684 return FixedLiveRangeFor(operand->index()); | |
685 } else if (operand->IsDoubleRegister()) { | |
686 return FixedDoubleLiveRangeFor(operand->index()); | |
687 } else { | |
688 return NULL; | |
689 } | |
690 } | |
691 | |
692 | |
693 void LAllocator::Define(LifetimePosition position, | |
694 LOperand* operand, | |
695 LOperand* hint) { | |
696 LiveRange* range = LiveRangeFor(operand); | |
697 if (range == NULL) return; | |
698 | |
699 if (range->IsEmpty() || range->Start().Value() > position.Value()) { | |
700 // Can happen if there is a definition without use. | |
701 range->AddUseInterval(position, position.NextInstruction(), zone()); | |
702 range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone()); | |
703 } else { | |
704 range->ShortenTo(position); | |
705 } | |
706 | |
707 if (operand->IsUnallocated()) { | |
708 LUnallocated* unalloc_operand = LUnallocated::cast(operand); | |
709 range->AddUsePosition(position, unalloc_operand, hint, zone()); | |
710 } | |
711 } | |
712 | |
713 | |
714 void LAllocator::Use(LifetimePosition block_start, | |
715 LifetimePosition position, | |
716 LOperand* operand, | |
717 LOperand* hint) { | |
718 LiveRange* range = LiveRangeFor(operand); | |
719 if (range == NULL) return; | |
720 if (operand->IsUnallocated()) { | |
721 LUnallocated* unalloc_operand = LUnallocated::cast(operand); | |
722 range->AddUsePosition(position, unalloc_operand, hint, zone()); | |
723 } | |
724 range->AddUseInterval(block_start, position, zone()); | |
725 } | |
726 | |
727 | |
728 void LAllocator::AddConstraintsGapMove(int index, | |
729 LOperand* from, | |
730 LOperand* to) { | |
731 LGap* gap = GapAt(index); | |
732 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, | |
733 chunk()->zone()); | |
734 if (from->IsUnallocated()) { | |
735 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); | |
736 for (int i = 0; i < move_operands->length(); ++i) { | |
737 LMoveOperands cur = move_operands->at(i); | |
738 LOperand* cur_to = cur.destination(); | |
739 if (cur_to->IsUnallocated()) { | |
740 if (LUnallocated::cast(cur_to)->virtual_register() == | |
741 LUnallocated::cast(from)->virtual_register()) { | |
742 move->AddMove(cur.source(), to, chunk()->zone()); | |
743 return; | |
744 } | |
745 } | |
746 } | |
747 } | |
748 move->AddMove(from, to, chunk()->zone()); | |
749 } | |
750 | |
751 | |
752 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) { | |
753 int start = block->first_instruction_index(); | |
754 int end = block->last_instruction_index(); | |
755 if (start == -1) return; | |
756 for (int i = start; i <= end; ++i) { | |
757 if (IsGapAt(i)) { | |
758 LInstruction* instr = NULL; | |
759 LInstruction* prev_instr = NULL; | |
760 if (i < end) instr = InstructionAt(i + 1); | |
761 if (i > start) prev_instr = InstructionAt(i - 1); | |
762 MeetConstraintsBetween(prev_instr, instr, i); | |
763 if (!AllocationOk()) return; | |
764 } | |
765 } | |
766 } | |
767 | |
768 | |
769 void LAllocator::MeetConstraintsBetween(LInstruction* first, | |
770 LInstruction* second, | |
771 int gap_index) { | |
772 // Handle fixed temporaries. | |
773 if (first != NULL) { | |
774 for (TempIterator it(first); !it.Done(); it.Advance()) { | |
775 LUnallocated* temp = LUnallocated::cast(it.Current()); | |
776 if (temp->HasFixedPolicy()) { | |
777 AllocateFixed(temp, gap_index - 1, false); | |
778 } | |
779 } | |
780 } | |
781 | |
782 // Handle fixed output operand. | |
783 if (first != NULL && first->Output() != NULL) { | |
784 LUnallocated* first_output = LUnallocated::cast(first->Output()); | |
785 LiveRange* range = LiveRangeFor(first_output->virtual_register()); | |
786 bool assigned = false; | |
787 if (first_output->HasFixedPolicy()) { | |
788 LUnallocated* output_copy = first_output->CopyUnconstrained( | |
789 chunk()->zone()); | |
790 bool is_tagged = HasTaggedValue(first_output->virtual_register()); | |
791 AllocateFixed(first_output, gap_index, is_tagged); | |
792 | |
793 // This value is produced on the stack, we never need to spill it. | |
794 if (first_output->IsStackSlot()) { | |
795 range->SetSpillOperand(first_output); | |
796 range->SetSpillStartIndex(gap_index - 1); | |
797 assigned = true; | |
798 } | |
799 chunk_->AddGapMove(gap_index, first_output, output_copy); | |
800 } | |
801 | |
802 if (!assigned) { | |
803 range->SetSpillStartIndex(gap_index); | |
804 | |
805 // This move to spill operand is not a real use. Liveness analysis | |
806 // and splitting of live ranges do not account for it. | |
807 // Thus it should be inserted to a lifetime position corresponding to | |
808 // the instruction end. | |
809 LGap* gap = GapAt(gap_index); | |
810 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE, | |
811 chunk()->zone()); | |
812 move->AddMove(first_output, range->GetSpillOperand(), | |
813 chunk()->zone()); | |
814 } | |
815 } | |
816 | |
817 // Handle fixed input operands of second instruction. | |
818 if (second != NULL) { | |
819 for (UseIterator it(second); !it.Done(); it.Advance()) { | |
820 LUnallocated* cur_input = LUnallocated::cast(it.Current()); | |
821 if (cur_input->HasFixedPolicy()) { | |
822 LUnallocated* input_copy = cur_input->CopyUnconstrained( | |
823 chunk()->zone()); | |
824 bool is_tagged = HasTaggedValue(cur_input->virtual_register()); | |
825 AllocateFixed(cur_input, gap_index + 1, is_tagged); | |
826 AddConstraintsGapMove(gap_index, input_copy, cur_input); | |
827 } else if (cur_input->HasWritableRegisterPolicy()) { | |
828 // The live range of writable input registers always goes until the end | |
829 // of the instruction. | |
830 DCHECK(!cur_input->IsUsedAtStart()); | |
831 | |
832 LUnallocated* input_copy = cur_input->CopyUnconstrained( | |
833 chunk()->zone()); | |
834 int vreg = GetVirtualRegister(); | |
835 if (!AllocationOk()) return; | |
836 cur_input->set_virtual_register(vreg); | |
837 | |
838 if (RequiredRegisterKind(input_copy->virtual_register()) == | |
839 DOUBLE_REGISTERS) { | |
840 double_artificial_registers_.Add( | |
841 cur_input->virtual_register() - first_artificial_register_, | |
842 zone()); | |
843 } | |
844 | |
845 AddConstraintsGapMove(gap_index, input_copy, cur_input); | |
846 } | |
847 } | |
848 } | |
849 | |
850 // Handle "output same as input" for second instruction. | |
851 if (second != NULL && second->Output() != NULL) { | |
852 LUnallocated* second_output = LUnallocated::cast(second->Output()); | |
853 if (second_output->HasSameAsInputPolicy()) { | |
854 LUnallocated* cur_input = LUnallocated::cast(second->FirstInput()); | |
855 int output_vreg = second_output->virtual_register(); | |
856 int input_vreg = cur_input->virtual_register(); | |
857 | |
858 LUnallocated* input_copy = cur_input->CopyUnconstrained( | |
859 chunk()->zone()); | |
860 cur_input->set_virtual_register(second_output->virtual_register()); | |
861 AddConstraintsGapMove(gap_index, input_copy, cur_input); | |
862 | |
863 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { | |
864 int index = gap_index + 1; | |
865 LInstruction* instr = InstructionAt(index); | |
866 if (instr->HasPointerMap()) { | |
867 instr->pointer_map()->RecordPointer(input_copy, chunk()->zone()); | |
868 } | |
869 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { | |
870 // The input is assumed to immediately have a tagged representation, | |
871 // before the pointer map can be used. I.e. the pointer map at the | |
872 // instruction will include the output operand (whose value at the | |
873 // beginning of the instruction is equal to the input operand). If | |
874 // this is not desired, then the pointer map at this instruction needs | |
875 // to be adjusted manually. | |
876 } | |
877 } | |
878 } | |
879 } | |
880 | |
881 | |
882 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) { | |
883 int block_start = block->first_instruction_index(); | |
884 int index = block->last_instruction_index(); | |
885 | |
886 LifetimePosition block_start_position = | |
887 LifetimePosition::FromInstructionIndex(block_start); | |
888 | |
889 while (index >= block_start) { | |
890 LifetimePosition curr_position = | |
891 LifetimePosition::FromInstructionIndex(index); | |
892 | |
893 if (IsGapAt(index)) { | |
894 // We have a gap at this position. | |
895 LGap* gap = GapAt(index); | |
896 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, | |
897 chunk()->zone()); | |
898 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); | |
899 for (int i = 0; i < move_operands->length(); ++i) { | |
900 LMoveOperands* cur = &move_operands->at(i); | |
901 if (cur->IsIgnored()) continue; | |
902 LOperand* from = cur->source(); | |
903 LOperand* to = cur->destination(); | |
904 HPhi* phi = LookupPhi(to); | |
905 LOperand* hint = to; | |
906 if (phi != NULL) { | |
907 // This is a phi resolving move. | |
908 if (!phi->block()->IsLoopHeader()) { | |
909 hint = LiveRangeFor(phi->id())->current_hint_operand(); | |
910 } | |
911 } else { | |
912 if (to->IsUnallocated()) { | |
913 if (live->Contains(LUnallocated::cast(to)->virtual_register())) { | |
914 Define(curr_position, to, from); | |
915 live->Remove(LUnallocated::cast(to)->virtual_register()); | |
916 } else { | |
917 cur->Eliminate(); | |
918 continue; | |
919 } | |
920 } else { | |
921 Define(curr_position, to, from); | |
922 } | |
923 } | |
924 Use(block_start_position, curr_position, from, hint); | |
925 if (from->IsUnallocated()) { | |
926 live->Add(LUnallocated::cast(from)->virtual_register()); | |
927 } | |
928 } | |
929 } else { | |
930 DCHECK(!IsGapAt(index)); | |
931 LInstruction* instr = InstructionAt(index); | |
932 | |
933 if (instr != NULL) { | |
934 LOperand* output = instr->Output(); | |
935 if (output != NULL) { | |
936 if (output->IsUnallocated()) { | |
937 live->Remove(LUnallocated::cast(output)->virtual_register()); | |
938 } | |
939 Define(curr_position, output, NULL); | |
940 } | |
941 | |
942 if (instr->ClobbersRegisters()) { | |
943 for (int i = 0; i < Register::kNumRegisters; ++i) { | |
944 if (Register::from_code(i).IsAllocatable()) { | |
945 if (output == NULL || !output->IsRegister() || | |
946 output->index() != i) { | |
947 LiveRange* range = FixedLiveRangeFor(i); | |
948 range->AddUseInterval(curr_position, | |
949 curr_position.InstructionEnd(), zone()); | |
950 } | |
951 } | |
952 } | |
953 } | |
954 | |
955 if (instr->ClobbersDoubleRegisters(isolate())) { | |
956 for (int i = 0; i < DoubleRegister::kMaxNumRegisters; ++i) { | |
957 if (DoubleRegister::from_code(i).IsAllocatable()) { | |
958 if (output == NULL || !output->IsDoubleRegister() || | |
959 output->index() != i) { | |
960 LiveRange* range = FixedDoubleLiveRangeFor(i); | |
961 range->AddUseInterval(curr_position, | |
962 curr_position.InstructionEnd(), zone()); | |
963 } | |
964 } | |
965 } | |
966 } | |
967 | |
968 for (UseIterator it(instr); !it.Done(); it.Advance()) { | |
969 LOperand* input = it.Current(); | |
970 | |
971 LifetimePosition use_pos; | |
972 if (input->IsUnallocated() && | |
973 LUnallocated::cast(input)->IsUsedAtStart()) { | |
974 use_pos = curr_position; | |
975 } else { | |
976 use_pos = curr_position.InstructionEnd(); | |
977 } | |
978 | |
979 Use(block_start_position, use_pos, input, NULL); | |
980 if (input->IsUnallocated()) { | |
981 live->Add(LUnallocated::cast(input)->virtual_register()); | |
982 } | |
983 } | |
984 | |
985 for (TempIterator it(instr); !it.Done(); it.Advance()) { | |
986 LOperand* temp = it.Current(); | |
987 if (instr->ClobbersTemps()) { | |
988 if (temp->IsRegister()) continue; | |
989 if (temp->IsUnallocated()) { | |
990 LUnallocated* temp_unalloc = LUnallocated::cast(temp); | |
991 if (temp_unalloc->HasFixedPolicy()) { | |
992 continue; | |
993 } | |
994 } | |
995 } | |
996 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL); | |
997 Define(curr_position, temp, NULL); | |
998 | |
999 if (temp->IsUnallocated()) { | |
1000 LUnallocated* temp_unalloc = LUnallocated::cast(temp); | |
1001 if (temp_unalloc->HasDoubleRegisterPolicy()) { | |
1002 double_artificial_registers_.Add( | |
1003 temp_unalloc->virtual_register() - first_artificial_register_, | |
1004 zone()); | |
1005 } | |
1006 } | |
1007 } | |
1008 } | |
1009 } | |
1010 | |
1011 index = index - 1; | |
1012 } | |
1013 } | |
1014 | |
1015 | |
1016 void LAllocator::ResolvePhis(HBasicBlock* block) { | |
1017 const ZoneList<HPhi*>* phis = block->phis(); | |
1018 for (int i = 0; i < phis->length(); ++i) { | |
1019 HPhi* phi = phis->at(i); | |
1020 LUnallocated* phi_operand = | |
1021 new (chunk()->zone()) LUnallocated(LUnallocated::NONE); | |
1022 phi_operand->set_virtual_register(phi->id()); | |
1023 for (int j = 0; j < phi->OperandCount(); ++j) { | |
1024 HValue* op = phi->OperandAt(j); | |
1025 LOperand* operand = NULL; | |
1026 if (op->IsConstant() && op->EmitAtUses()) { | |
1027 HConstant* constant = HConstant::cast(op); | |
1028 operand = chunk_->DefineConstantOperand(constant); | |
1029 } else { | |
1030 DCHECK(!op->EmitAtUses()); | |
1031 LUnallocated* unalloc = | |
1032 new(chunk()->zone()) LUnallocated(LUnallocated::ANY); | |
1033 unalloc->set_virtual_register(op->id()); | |
1034 operand = unalloc; | |
1035 } | |
1036 HBasicBlock* cur_block = block->predecessors()->at(j); | |
1037 // The gap move must be added without any special processing as in | |
1038 // the AddConstraintsGapMove. | |
1039 chunk_->AddGapMove(cur_block->last_instruction_index() - 1, | |
1040 operand, | |
1041 phi_operand); | |
1042 | |
1043 // We are going to insert a move before the branch instruction. | |
1044 // Some branch instructions (e.g. loops' back edges) | |
1045 // can potentially cause a GC so they have a pointer map. | |
1046 // By inserting a move we essentially create a copy of a | |
1047 // value which is invisible to PopulatePointerMaps(), because we store | |
1048 // it into a location different from the operand of a live range | |
1049 // covering a branch instruction. | |
1050 // Thus we need to manually record a pointer. | |
1051 LInstruction* branch = | |
1052 InstructionAt(cur_block->last_instruction_index()); | |
1053 if (branch->HasPointerMap()) { | |
1054 if (phi->representation().IsTagged() && !phi->type().IsSmi()) { | |
1055 branch->pointer_map()->RecordPointer(phi_operand, chunk()->zone()); | |
1056 } else if (!phi->representation().IsDouble()) { | |
1057 branch->pointer_map()->RecordUntagged(phi_operand, chunk()->zone()); | |
1058 } | |
1059 } | |
1060 } | |
1061 | |
1062 LiveRange* live_range = LiveRangeFor(phi->id()); | |
1063 LLabel* label = chunk_->GetLabel(phi->block()->block_id()); | |
1064 label->GetOrCreateParallelMove(LGap::START, chunk()->zone())-> | |
1065 AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone()); | |
1066 live_range->SetSpillStartIndex(phi->block()->first_instruction_index()); | |
1067 } | |
1068 } | |
1069 | |
1070 | |
1071 bool LAllocator::Allocate(LChunk* chunk) { | |
1072 DCHECK(chunk_ == NULL); | |
1073 chunk_ = static_cast<LPlatformChunk*>(chunk); | |
1074 assigned_registers_ = | |
1075 new (chunk->zone()) BitVector(Register::kNumRegisters, chunk->zone()); | |
1076 assigned_double_registers_ = new (chunk->zone()) | |
1077 BitVector(DoubleRegister::kMaxNumRegisters, chunk->zone()); | |
1078 MeetRegisterConstraints(); | |
1079 if (!AllocationOk()) return false; | |
1080 ResolvePhis(); | |
1081 BuildLiveRanges(); | |
1082 AllocateGeneralRegisters(); | |
1083 if (!AllocationOk()) return false; | |
1084 AllocateDoubleRegisters(); | |
1085 if (!AllocationOk()) return false; | |
1086 PopulatePointerMaps(); | |
1087 ConnectRanges(); | |
1088 ResolveControlFlow(); | |
1089 return true; | |
1090 } | |
1091 | |
1092 | |
1093 void LAllocator::MeetRegisterConstraints() { | |
1094 LAllocatorPhase phase("L_Register constraints", this); | |
1095 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); | |
1096 for (int i = 0; i < blocks->length(); ++i) { | |
1097 HBasicBlock* block = blocks->at(i); | |
1098 MeetRegisterConstraints(block); | |
1099 if (!AllocationOk()) return; | |
1100 } | |
1101 } | |
1102 | |
1103 | |
1104 void LAllocator::ResolvePhis() { | |
1105 LAllocatorPhase phase("L_Resolve phis", this); | |
1106 | |
1107 // Process the blocks in reverse order. | |
1108 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); | |
1109 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) { | |
1110 HBasicBlock* block = blocks->at(block_id); | |
1111 ResolvePhis(block); | |
1112 } | |
1113 } | |
1114 | |
1115 | |
1116 void LAllocator::ResolveControlFlow(LiveRange* range, | |
1117 HBasicBlock* block, | |
1118 HBasicBlock* pred) { | |
1119 LifetimePosition pred_end = | |
1120 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); | |
1121 LifetimePosition cur_start = | |
1122 LifetimePosition::FromInstructionIndex(block->first_instruction_index()); | |
1123 LiveRange* pred_cover = NULL; | |
1124 LiveRange* cur_cover = NULL; | |
1125 LiveRange* cur_range = range; | |
1126 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) { | |
1127 if (cur_range->CanCover(cur_start)) { | |
1128 DCHECK(cur_cover == NULL); | |
1129 cur_cover = cur_range; | |
1130 } | |
1131 if (cur_range->CanCover(pred_end)) { | |
1132 DCHECK(pred_cover == NULL); | |
1133 pred_cover = cur_range; | |
1134 } | |
1135 cur_range = cur_range->next(); | |
1136 } | |
1137 | |
1138 if (cur_cover->IsSpilled()) return; | |
1139 DCHECK(pred_cover != NULL && cur_cover != NULL); | |
1140 if (pred_cover != cur_cover) { | |
1141 LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone()); | |
1142 LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone()); | |
1143 if (!pred_op->Equals(cur_op)) { | |
1144 LGap* gap = NULL; | |
1145 if (block->predecessors()->length() == 1) { | |
1146 gap = GapAt(block->first_instruction_index()); | |
1147 } else { | |
1148 DCHECK(pred->end()->SecondSuccessor() == NULL); | |
1149 gap = GetLastGap(pred); | |
1150 | |
1151 // We are going to insert a move before the branch instruction. | |
1152 // Some branch instructions (e.g. loops' back edges) | |
1153 // can potentially cause a GC so they have a pointer map. | |
1154 // By inserting a move we essentially create a copy of a | |
1155 // value which is invisible to PopulatePointerMaps(), because we store | |
1156 // it into a location different from the operand of a live range | |
1157 // covering a branch instruction. | |
1158 // Thus we need to manually record a pointer. | |
1159 LInstruction* branch = InstructionAt(pred->last_instruction_index()); | |
1160 if (branch->HasPointerMap()) { | |
1161 if (HasTaggedValue(range->id())) { | |
1162 branch->pointer_map()->RecordPointer(cur_op, chunk()->zone()); | |
1163 } else if (!cur_op->IsDoubleStackSlot() && | |
1164 !cur_op->IsDoubleRegister()) { | |
1165 branch->pointer_map()->RemovePointer(cur_op); | |
1166 } | |
1167 } | |
1168 } | |
1169 gap->GetOrCreateParallelMove( | |
1170 LGap::START, chunk()->zone())->AddMove(pred_op, cur_op, | |
1171 chunk()->zone()); | |
1172 } | |
1173 } | |
1174 } | |
1175 | |
1176 | |
1177 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) { | |
1178 int index = pos.InstructionIndex(); | |
1179 if (IsGapAt(index)) { | |
1180 LGap* gap = GapAt(index); | |
1181 return gap->GetOrCreateParallelMove( | |
1182 pos.IsInstructionStart() ? LGap::START : LGap::END, chunk()->zone()); | |
1183 } | |
1184 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1); | |
1185 return GapAt(gap_pos)->GetOrCreateParallelMove( | |
1186 (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, chunk()->zone()); | |
1187 } | |
1188 | |
1189 | |
1190 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) { | |
1191 LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex())); | |
1192 return gap->block(); | |
1193 } | |
1194 | |
1195 | |
1196 void LAllocator::ConnectRanges() { | |
1197 LAllocatorPhase phase("L_Connect ranges", this); | |
1198 for (int i = 0; i < live_ranges()->length(); ++i) { | |
1199 LiveRange* first_range = live_ranges()->at(i); | |
1200 if (first_range == NULL || first_range->parent() != NULL) continue; | |
1201 | |
1202 LiveRange* second_range = first_range->next(); | |
1203 while (second_range != NULL) { | |
1204 LifetimePosition pos = second_range->Start(); | |
1205 | |
1206 if (!second_range->IsSpilled()) { | |
1207 // Add gap move if the two live ranges touch and there is no block | |
1208 // boundary. | |
1209 if (first_range->End().Value() == pos.Value()) { | |
1210 bool should_insert = true; | |
1211 if (IsBlockBoundary(pos)) { | |
1212 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos)); | |
1213 } | |
1214 if (should_insert) { | |
1215 LParallelMove* move = GetConnectingParallelMove(pos); | |
1216 LOperand* prev_operand = first_range->CreateAssignedOperand( | |
1217 chunk()->zone()); | |
1218 LOperand* cur_operand = second_range->CreateAssignedOperand( | |
1219 chunk()->zone()); | |
1220 move->AddMove(prev_operand, cur_operand, | |
1221 chunk()->zone()); | |
1222 } | |
1223 } | |
1224 } | |
1225 | |
1226 first_range = second_range; | |
1227 second_range = second_range->next(); | |
1228 } | |
1229 } | |
1230 } | |
1231 | |
1232 | |
1233 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const { | |
1234 if (block->predecessors()->length() != 1) return false; | |
1235 return block->predecessors()->first()->block_id() == block->block_id() - 1; | |
1236 } | |
1237 | |
1238 | |
1239 void LAllocator::ResolveControlFlow() { | |
1240 LAllocatorPhase phase("L_Resolve control flow", this); | |
1241 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); | |
1242 for (int block_id = 1; block_id < blocks->length(); ++block_id) { | |
1243 HBasicBlock* block = blocks->at(block_id); | |
1244 if (CanEagerlyResolveControlFlow(block)) continue; | |
1245 BitVector* live = live_in_sets_[block->block_id()]; | |
1246 BitVector::Iterator iterator(live); | |
1247 while (!iterator.Done()) { | |
1248 int operand_index = iterator.Current(); | |
1249 for (int i = 0; i < block->predecessors()->length(); ++i) { | |
1250 HBasicBlock* cur = block->predecessors()->at(i); | |
1251 LiveRange* cur_range = LiveRangeFor(operand_index); | |
1252 ResolveControlFlow(cur_range, block, cur); | |
1253 } | |
1254 iterator.Advance(); | |
1255 } | |
1256 } | |
1257 } | |
1258 | |
1259 | |
1260 void LAllocator::BuildLiveRanges() { | |
1261 LAllocatorPhase phase("L_Build live ranges", this); | |
1262 InitializeLivenessAnalysis(); | |
1263 // Process the blocks in reverse order. | |
1264 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); | |
1265 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) { | |
1266 HBasicBlock* block = blocks->at(block_id); | |
1267 BitVector* live = ComputeLiveOut(block); | |
1268 // Initially consider all live_out values live for the entire block. We | |
1269 // will shorten these intervals if necessary. | |
1270 AddInitialIntervals(block, live); | |
1271 | |
1272 // Process the instructions in reverse order, generating and killing | |
1273 // live values. | |
1274 ProcessInstructions(block, live); | |
1275 // All phi output operands are killed by this block. | |
1276 const ZoneList<HPhi*>* phis = block->phis(); | |
1277 for (int i = 0; i < phis->length(); ++i) { | |
1278 // The live range interval already ends at the first instruction of the | |
1279 // block. | |
1280 HPhi* phi = phis->at(i); | |
1281 live->Remove(phi->id()); | |
1282 | |
1283 LOperand* hint = NULL; | |
1284 LOperand* phi_operand = NULL; | |
1285 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0)); | |
1286 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, | |
1287 chunk()->zone()); | |
1288 for (int j = 0; j < move->move_operands()->length(); ++j) { | |
1289 LOperand* to = move->move_operands()->at(j).destination(); | |
1290 if (to->IsUnallocated() && | |
1291 LUnallocated::cast(to)->virtual_register() == phi->id()) { | |
1292 hint = move->move_operands()->at(j).source(); | |
1293 phi_operand = to; | |
1294 break; | |
1295 } | |
1296 } | |
1297 DCHECK(hint != NULL); | |
1298 | |
1299 LifetimePosition block_start = LifetimePosition::FromInstructionIndex( | |
1300 block->first_instruction_index()); | |
1301 Define(block_start, phi_operand, hint); | |
1302 } | |
1303 | |
1304 // Now live is live_in for this block except not including values live | |
1305 // out on backward successor edges. | |
1306 live_in_sets_[block_id] = live; | |
1307 | |
1308 // If this block is a loop header go back and patch up the necessary | |
1309 // predecessor blocks. | |
1310 if (block->IsLoopHeader()) { | |
1311 // TODO(kmillikin): Need to be able to get the last block of the loop | |
1312 // in the loop information. Add a live range stretching from the first | |
1313 // loop instruction to the last for each value live on entry to the | |
1314 // header. | |
1315 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge(); | |
1316 BitVector::Iterator iterator(live); | |
1317 LifetimePosition start = LifetimePosition::FromInstructionIndex( | |
1318 block->first_instruction_index()); | |
1319 LifetimePosition end = LifetimePosition::FromInstructionIndex( | |
1320 back_edge->last_instruction_index()).NextInstruction(); | |
1321 while (!iterator.Done()) { | |
1322 int operand_index = iterator.Current(); | |
1323 LiveRange* range = LiveRangeFor(operand_index); | |
1324 range->EnsureInterval(start, end, zone()); | |
1325 iterator.Advance(); | |
1326 } | |
1327 | |
1328 for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) { | |
1329 live_in_sets_[i]->Union(*live); | |
1330 } | |
1331 } | |
1332 | |
1333 #ifdef DEBUG | |
1334 if (block_id == 0) { | |
1335 BitVector::Iterator iterator(live); | |
1336 bool found = false; | |
1337 while (!iterator.Done()) { | |
1338 found = true; | |
1339 int operand_index = iterator.Current(); | |
1340 { | |
1341 AllowHandleDereference allow_deref; | |
1342 PrintF("Function: %s\n", chunk_->info()->GetDebugName().get()); | |
1343 } | |
1344 PrintF("Value %d used before first definition!\n", operand_index); | |
1345 LiveRange* range = LiveRangeFor(operand_index); | |
1346 PrintF("First use is at %d\n", range->first_pos()->pos().Value()); | |
1347 iterator.Advance(); | |
1348 } | |
1349 DCHECK(!found); | |
1350 } | |
1351 #endif | |
1352 } | |
1353 | |
1354 for (int i = 0; i < live_ranges_.length(); ++i) { | |
1355 if (live_ranges_[i] != NULL) { | |
1356 live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id()); | |
1357 } | |
1358 } | |
1359 } | |
1360 | |
1361 | |
1362 bool LAllocator::SafePointsAreInOrder() const { | |
1363 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps(); | |
1364 int safe_point = 0; | |
1365 for (int i = 0; i < pointer_maps->length(); ++i) { | |
1366 LPointerMap* map = pointer_maps->at(i); | |
1367 if (safe_point > map->lithium_position()) return false; | |
1368 safe_point = map->lithium_position(); | |
1369 } | |
1370 return true; | |
1371 } | |
1372 | |
1373 | |
1374 void LAllocator::PopulatePointerMaps() { | |
1375 LAllocatorPhase phase("L_Populate pointer maps", this); | |
1376 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps(); | |
1377 | |
1378 DCHECK(SafePointsAreInOrder()); | |
1379 | |
1380 // Iterate over all safe point positions and record a pointer | |
1381 // for all spilled live ranges at this point. | |
1382 int first_safe_point_index = 0; | |
1383 int last_range_start = 0; | |
1384 for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) { | |
1385 LiveRange* range = live_ranges()->at(range_idx); | |
1386 if (range == NULL) continue; | |
1387 // Iterate over the first parts of multi-part live ranges. | |
1388 if (range->parent() != NULL) continue; | |
1389 // Skip non-pointer values. | |
1390 if (!HasTaggedValue(range->id())) continue; | |
1391 // Skip empty live ranges. | |
1392 if (range->IsEmpty()) continue; | |
1393 | |
1394 // Find the extent of the range and its children. | |
1395 int start = range->Start().InstructionIndex(); | |
1396 int end = 0; | |
1397 for (LiveRange* cur = range; cur != NULL; cur = cur->next()) { | |
1398 LifetimePosition this_end = cur->End(); | |
1399 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex(); | |
1400 DCHECK(cur->Start().InstructionIndex() >= start); | |
1401 } | |
1402 | |
1403 // Most of the ranges are in order, but not all. Keep an eye on when | |
1404 // they step backwards and reset the first_safe_point_index so we don't | |
1405 // miss any safe points. | |
1406 if (start < last_range_start) { | |
1407 first_safe_point_index = 0; | |
1408 } | |
1409 last_range_start = start; | |
1410 | |
1411 // Step across all the safe points that are before the start of this range, | |
1412 // recording how far we step in order to save doing this for the next range. | |
1413 while (first_safe_point_index < pointer_maps->length()) { | |
1414 LPointerMap* map = pointer_maps->at(first_safe_point_index); | |
1415 int safe_point = map->lithium_position(); | |
1416 if (safe_point >= start) break; | |
1417 first_safe_point_index++; | |
1418 } | |
1419 | |
1420 // Step through the safe points to see whether they are in the range. | |
1421 for (int safe_point_index = first_safe_point_index; | |
1422 safe_point_index < pointer_maps->length(); | |
1423 ++safe_point_index) { | |
1424 LPointerMap* map = pointer_maps->at(safe_point_index); | |
1425 int safe_point = map->lithium_position(); | |
1426 | |
1427 // The safe points are sorted so we can stop searching here. | |
1428 if (safe_point - 1 > end) break; | |
1429 | |
1430 // Advance to the next active range that covers the current | |
1431 // safe point position. | |
1432 LifetimePosition safe_point_pos = | |
1433 LifetimePosition::FromInstructionIndex(safe_point); | |
1434 LiveRange* cur = range; | |
1435 while (cur != NULL && !cur->Covers(safe_point_pos)) { | |
1436 cur = cur->next(); | |
1437 } | |
1438 if (cur == NULL) continue; | |
1439 | |
1440 // Check if the live range is spilled and the safe point is after | |
1441 // the spill position. | |
1442 if (range->HasAllocatedSpillOperand() && | |
1443 safe_point >= range->spill_start_index()) { | |
1444 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n", | |
1445 range->id(), range->spill_start_index(), safe_point); | |
1446 map->RecordPointer(range->GetSpillOperand(), chunk()->zone()); | |
1447 } | |
1448 | |
1449 if (!cur->IsSpilled()) { | |
1450 TraceAlloc("Pointer in register for range %d (start at %d) " | |
1451 "at safe point %d\n", | |
1452 cur->id(), cur->Start().Value(), safe_point); | |
1453 LOperand* operand = cur->CreateAssignedOperand(chunk()->zone()); | |
1454 DCHECK(!operand->IsStackSlot()); | |
1455 map->RecordPointer(operand, chunk()->zone()); | |
1456 } | |
1457 } | |
1458 } | |
1459 } | |
1460 | |
1461 | |
1462 void LAllocator::AllocateGeneralRegisters() { | |
1463 LAllocatorPhase phase("L_Allocate general registers", this); | |
1464 num_registers_ = | |
1465 RegisterConfiguration::ArchDefault()->num_allocatable_general_registers(); | |
1466 allocatable_register_codes_ = | |
1467 RegisterConfiguration::ArchDefault()->allocatable_general_codes(); | |
1468 mode_ = GENERAL_REGISTERS; | |
1469 AllocateRegisters(); | |
1470 } | |
1471 | |
1472 | |
1473 void LAllocator::AllocateDoubleRegisters() { | |
1474 LAllocatorPhase phase("L_Allocate double registers", this); | |
1475 num_registers_ = | |
1476 RegisterConfiguration::ArchDefault()->num_allocatable_double_registers(); | |
1477 allocatable_register_codes_ = | |
1478 RegisterConfiguration::ArchDefault()->allocatable_double_codes(); | |
1479 mode_ = DOUBLE_REGISTERS; | |
1480 AllocateRegisters(); | |
1481 } | |
1482 | |
1483 | |
1484 void LAllocator::AllocateRegisters() { | |
1485 DCHECK(unhandled_live_ranges_.is_empty()); | |
1486 | |
1487 for (int i = 0; i < live_ranges_.length(); ++i) { | |
1488 if (live_ranges_[i] != NULL) { | |
1489 if (live_ranges_[i]->Kind() == mode_) { | |
1490 AddToUnhandledUnsorted(live_ranges_[i]); | |
1491 } | |
1492 } | |
1493 } | |
1494 SortUnhandled(); | |
1495 DCHECK(UnhandledIsSorted()); | |
1496 | |
1497 DCHECK(reusable_slots_.is_empty()); | |
1498 DCHECK(active_live_ranges_.is_empty()); | |
1499 DCHECK(inactive_live_ranges_.is_empty()); | |
1500 | |
1501 if (mode_ == DOUBLE_REGISTERS) { | |
1502 for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) { | |
1503 LiveRange* current = fixed_double_live_ranges_.at(i); | |
1504 if (current != NULL) { | |
1505 AddToInactive(current); | |
1506 } | |
1507 } | |
1508 } else { | |
1509 DCHECK(mode_ == GENERAL_REGISTERS); | |
1510 for (int i = 0; i < fixed_live_ranges_.length(); ++i) { | |
1511 LiveRange* current = fixed_live_ranges_.at(i); | |
1512 if (current != NULL) { | |
1513 AddToInactive(current); | |
1514 } | |
1515 } | |
1516 } | |
1517 | |
1518 while (!unhandled_live_ranges_.is_empty()) { | |
1519 DCHECK(UnhandledIsSorted()); | |
1520 LiveRange* current = unhandled_live_ranges_.RemoveLast(); | |
1521 DCHECK(UnhandledIsSorted()); | |
1522 LifetimePosition position = current->Start(); | |
1523 #ifdef DEBUG | |
1524 allocation_finger_ = position; | |
1525 #endif | |
1526 TraceAlloc("Processing interval %d start=%d\n", | |
1527 current->id(), | |
1528 position.Value()); | |
1529 | |
1530 if (current->HasAllocatedSpillOperand()) { | |
1531 TraceAlloc("Live range %d already has a spill operand\n", current->id()); | |
1532 LifetimePosition next_pos = position; | |
1533 if (IsGapAt(next_pos.InstructionIndex())) { | |
1534 next_pos = next_pos.NextInstruction(); | |
1535 } | |
1536 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos); | |
1537 // If the range already has a spill operand and it doesn't need a | |
1538 // register immediately, split it and spill the first part of the range. | |
1539 if (pos == NULL) { | |
1540 Spill(current); | |
1541 continue; | |
1542 } else if (pos->pos().Value() > | |
1543 current->Start().NextInstruction().Value()) { | |
1544 // Do not spill live range eagerly if use position that can benefit from | |
1545 // the register is too close to the start of live range. | |
1546 SpillBetween(current, current->Start(), pos->pos()); | |
1547 if (!AllocationOk()) return; | |
1548 DCHECK(UnhandledIsSorted()); | |
1549 continue; | |
1550 } | |
1551 } | |
1552 | |
1553 for (int i = 0; i < active_live_ranges_.length(); ++i) { | |
1554 LiveRange* cur_active = active_live_ranges_.at(i); | |
1555 if (cur_active->End().Value() <= position.Value()) { | |
1556 ActiveToHandled(cur_active); | |
1557 --i; // The live range was removed from the list of active live ranges. | |
1558 } else if (!cur_active->Covers(position)) { | |
1559 ActiveToInactive(cur_active); | |
1560 --i; // The live range was removed from the list of active live ranges. | |
1561 } | |
1562 } | |
1563 | |
1564 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { | |
1565 LiveRange* cur_inactive = inactive_live_ranges_.at(i); | |
1566 if (cur_inactive->End().Value() <= position.Value()) { | |
1567 InactiveToHandled(cur_inactive); | |
1568 --i; // Live range was removed from the list of inactive live ranges. | |
1569 } else if (cur_inactive->Covers(position)) { | |
1570 InactiveToActive(cur_inactive); | |
1571 --i; // Live range was removed from the list of inactive live ranges. | |
1572 } | |
1573 } | |
1574 | |
1575 DCHECK(!current->HasRegisterAssigned() && !current->IsSpilled()); | |
1576 | |
1577 bool result = TryAllocateFreeReg(current); | |
1578 if (!AllocationOk()) return; | |
1579 | |
1580 if (!result) AllocateBlockedReg(current); | |
1581 if (!AllocationOk()) return; | |
1582 | |
1583 if (current->HasRegisterAssigned()) { | |
1584 AddToActive(current); | |
1585 } | |
1586 } | |
1587 | |
1588 reusable_slots_.Rewind(0); | |
1589 active_live_ranges_.Rewind(0); | |
1590 inactive_live_ranges_.Rewind(0); | |
1591 } | |
1592 | |
1593 | |
1594 const char* LAllocator::RegisterName(int allocation_index) { | |
1595 if (mode_ == GENERAL_REGISTERS) { | |
1596 return Register::from_code(allocation_index).ToString(); | |
1597 } else { | |
1598 return DoubleRegister::from_code(allocation_index).ToString(); | |
1599 } | |
1600 } | |
1601 | |
1602 | |
1603 void LAllocator::TraceAlloc(const char* msg, ...) { | |
1604 if (FLAG_trace_alloc) { | |
1605 va_list arguments; | |
1606 va_start(arguments, msg); | |
1607 base::OS::VPrint(msg, arguments); | |
1608 va_end(arguments); | |
1609 } | |
1610 } | |
1611 | |
1612 | |
1613 bool LAllocator::HasTaggedValue(int virtual_register) const { | |
1614 HValue* value = graph_->LookupValue(virtual_register); | |
1615 if (value == NULL) return false; | |
1616 return value->representation().IsTagged() && !value->type().IsSmi(); | |
1617 } | |
1618 | |
1619 | |
1620 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const { | |
1621 if (virtual_register < first_artificial_register_) { | |
1622 HValue* value = graph_->LookupValue(virtual_register); | |
1623 if (value != NULL && value->representation().IsDouble()) { | |
1624 return DOUBLE_REGISTERS; | |
1625 } | |
1626 } else if (double_artificial_registers_.Contains( | |
1627 virtual_register - first_artificial_register_)) { | |
1628 return DOUBLE_REGISTERS; | |
1629 } | |
1630 | |
1631 return GENERAL_REGISTERS; | |
1632 } | |
1633 | |
1634 | |
1635 void LAllocator::AddToActive(LiveRange* range) { | |
1636 TraceAlloc("Add live range %d to active\n", range->id()); | |
1637 active_live_ranges_.Add(range, zone()); | |
1638 } | |
1639 | |
1640 | |
1641 void LAllocator::AddToInactive(LiveRange* range) { | |
1642 TraceAlloc("Add live range %d to inactive\n", range->id()); | |
1643 inactive_live_ranges_.Add(range, zone()); | |
1644 } | |
1645 | |
1646 | |
1647 void LAllocator::AddToUnhandledSorted(LiveRange* range) { | |
1648 if (range == NULL || range->IsEmpty()) return; | |
1649 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); | |
1650 DCHECK(allocation_finger_.Value() <= range->Start().Value()); | |
1651 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) { | |
1652 LiveRange* cur_range = unhandled_live_ranges_.at(i); | |
1653 if (range->ShouldBeAllocatedBefore(cur_range)) { | |
1654 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1); | |
1655 unhandled_live_ranges_.InsertAt(i + 1, range, zone()); | |
1656 DCHECK(UnhandledIsSorted()); | |
1657 return; | |
1658 } | |
1659 } | |
1660 TraceAlloc("Add live range %d to unhandled at start\n", range->id()); | |
1661 unhandled_live_ranges_.InsertAt(0, range, zone()); | |
1662 DCHECK(UnhandledIsSorted()); | |
1663 } | |
1664 | |
1665 | |
1666 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) { | |
1667 if (range == NULL || range->IsEmpty()) return; | |
1668 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); | |
1669 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id()); | |
1670 unhandled_live_ranges_.Add(range, zone()); | |
1671 } | |
1672 | |
1673 | |
1674 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) { | |
1675 DCHECK(!(*a)->ShouldBeAllocatedBefore(*b) || | |
1676 !(*b)->ShouldBeAllocatedBefore(*a)); | |
1677 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1; | |
1678 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1; | |
1679 return (*a)->id() - (*b)->id(); | |
1680 } | |
1681 | |
1682 | |
1683 // Sort the unhandled live ranges so that the ranges to be processed first are | |
1684 // at the end of the array list. This is convenient for the register allocation | |
1685 // algorithm because it is efficient to remove elements from the end. | |
1686 void LAllocator::SortUnhandled() { | |
1687 TraceAlloc("Sort unhandled\n"); | |
1688 unhandled_live_ranges_.Sort(&UnhandledSortHelper); | |
1689 } | |
1690 | |
1691 | |
1692 bool LAllocator::UnhandledIsSorted() { | |
1693 int len = unhandled_live_ranges_.length(); | |
1694 for (int i = 1; i < len; i++) { | |
1695 LiveRange* a = unhandled_live_ranges_.at(i - 1); | |
1696 LiveRange* b = unhandled_live_ranges_.at(i); | |
1697 if (a->Start().Value() < b->Start().Value()) return false; | |
1698 } | |
1699 return true; | |
1700 } | |
1701 | |
1702 | |
1703 void LAllocator::FreeSpillSlot(LiveRange* range) { | |
1704 // Check that we are the last range. | |
1705 if (range->next() != NULL) return; | |
1706 | |
1707 if (!range->TopLevel()->HasAllocatedSpillOperand()) return; | |
1708 | |
1709 int index = range->TopLevel()->GetSpillOperand()->index(); | |
1710 if (index >= 0) { | |
1711 reusable_slots_.Add(range, zone()); | |
1712 } | |
1713 } | |
1714 | |
1715 | |
1716 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) { | |
1717 if (reusable_slots_.is_empty()) return NULL; | |
1718 if (reusable_slots_.first()->End().Value() > | |
1719 range->TopLevel()->Start().Value()) { | |
1720 return NULL; | |
1721 } | |
1722 LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand(); | |
1723 reusable_slots_.Remove(0); | |
1724 return result; | |
1725 } | |
1726 | |
1727 | |
1728 void LAllocator::ActiveToHandled(LiveRange* range) { | |
1729 DCHECK(active_live_ranges_.Contains(range)); | |
1730 active_live_ranges_.RemoveElement(range); | |
1731 TraceAlloc("Moving live range %d from active to handled\n", range->id()); | |
1732 FreeSpillSlot(range); | |
1733 } | |
1734 | |
1735 | |
1736 void LAllocator::ActiveToInactive(LiveRange* range) { | |
1737 DCHECK(active_live_ranges_.Contains(range)); | |
1738 active_live_ranges_.RemoveElement(range); | |
1739 inactive_live_ranges_.Add(range, zone()); | |
1740 TraceAlloc("Moving live range %d from active to inactive\n", range->id()); | |
1741 } | |
1742 | |
1743 | |
1744 void LAllocator::InactiveToHandled(LiveRange* range) { | |
1745 DCHECK(inactive_live_ranges_.Contains(range)); | |
1746 inactive_live_ranges_.RemoveElement(range); | |
1747 TraceAlloc("Moving live range %d from inactive to handled\n", range->id()); | |
1748 FreeSpillSlot(range); | |
1749 } | |
1750 | |
1751 | |
1752 void LAllocator::InactiveToActive(LiveRange* range) { | |
1753 DCHECK(inactive_live_ranges_.Contains(range)); | |
1754 inactive_live_ranges_.RemoveElement(range); | |
1755 active_live_ranges_.Add(range, zone()); | |
1756 TraceAlloc("Moving live range %d from inactive to active\n", range->id()); | |
1757 } | |
1758 | |
1759 | |
1760 bool LAllocator::TryAllocateFreeReg(LiveRange* current) { | |
1761 DCHECK(DoubleRegister::kMaxNumRegisters >= Register::kNumRegisters); | |
1762 | |
1763 LifetimePosition free_until_pos[DoubleRegister::kMaxNumRegisters]; | |
1764 | |
1765 for (int i = 0; i < DoubleRegister::kMaxNumRegisters; i++) { | |
1766 free_until_pos[i] = LifetimePosition::MaxPosition(); | |
1767 } | |
1768 | |
1769 for (int i = 0; i < active_live_ranges_.length(); ++i) { | |
1770 LiveRange* cur_active = active_live_ranges_.at(i); | |
1771 free_until_pos[cur_active->assigned_register()] = | |
1772 LifetimePosition::FromInstructionIndex(0); | |
1773 } | |
1774 | |
1775 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { | |
1776 LiveRange* cur_inactive = inactive_live_ranges_.at(i); | |
1777 DCHECK(cur_inactive->End().Value() > current->Start().Value()); | |
1778 LifetimePosition next_intersection = | |
1779 cur_inactive->FirstIntersection(current); | |
1780 if (!next_intersection.IsValid()) continue; | |
1781 int cur_reg = cur_inactive->assigned_register(); | |
1782 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); | |
1783 } | |
1784 | |
1785 LOperand* hint = current->FirstHint(); | |
1786 if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) { | |
1787 int register_index = hint->index(); | |
1788 TraceAlloc( | |
1789 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n", | |
1790 RegisterName(register_index), | |
1791 free_until_pos[register_index].Value(), | |
1792 current->id(), | |
1793 current->End().Value()); | |
1794 | |
1795 // The desired register is free until the end of the current live range. | |
1796 if (free_until_pos[register_index].Value() >= current->End().Value()) { | |
1797 TraceAlloc("Assigning preferred reg %s to live range %d\n", | |
1798 RegisterName(register_index), | |
1799 current->id()); | |
1800 SetLiveRangeAssignedRegister(current, register_index); | |
1801 return true; | |
1802 } | |
1803 } | |
1804 | |
1805 // Find the register which stays free for the longest time. | |
1806 int reg = allocatable_register_codes_[0]; | |
1807 for (int i = 1; i < RegisterCount(); ++i) { | |
1808 int code = allocatable_register_codes_[i]; | |
1809 if (free_until_pos[code].Value() > free_until_pos[reg].Value()) { | |
1810 reg = code; | |
1811 } | |
1812 } | |
1813 | |
1814 LifetimePosition pos = free_until_pos[reg]; | |
1815 | |
1816 if (pos.Value() <= current->Start().Value()) { | |
1817 // All registers are blocked. | |
1818 return false; | |
1819 } | |
1820 | |
1821 if (pos.Value() < current->End().Value()) { | |
1822 // Register reg is available at the range start but becomes blocked before | |
1823 // the range end. Split current at position where it becomes blocked. | |
1824 LiveRange* tail = SplitRangeAt(current, pos); | |
1825 if (!AllocationOk()) return false; | |
1826 AddToUnhandledSorted(tail); | |
1827 } | |
1828 | |
1829 | |
1830 // Register reg is available at the range start and is free until | |
1831 // the range end. | |
1832 DCHECK(pos.Value() >= current->End().Value()); | |
1833 TraceAlloc("Assigning free reg %s to live range %d\n", | |
1834 RegisterName(reg), | |
1835 current->id()); | |
1836 SetLiveRangeAssignedRegister(current, reg); | |
1837 | |
1838 return true; | |
1839 } | |
1840 | |
1841 | |
1842 void LAllocator::AllocateBlockedReg(LiveRange* current) { | |
1843 UsePosition* register_use = current->NextRegisterPosition(current->Start()); | |
1844 if (register_use == NULL) { | |
1845 // There is no use in the current live range that requires a register. | |
1846 // We can just spill it. | |
1847 Spill(current); | |
1848 return; | |
1849 } | |
1850 | |
1851 | |
1852 LifetimePosition use_pos[DoubleRegister::kMaxNumRegisters]; | |
1853 LifetimePosition block_pos[DoubleRegister::kMaxNumRegisters]; | |
1854 | |
1855 for (int i = 0; i < DoubleRegister::kMaxNumRegisters; i++) { | |
1856 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition(); | |
1857 } | |
1858 | |
1859 for (int i = 0; i < active_live_ranges_.length(); ++i) { | |
1860 LiveRange* range = active_live_ranges_[i]; | |
1861 int cur_reg = range->assigned_register(); | |
1862 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) { | |
1863 block_pos[cur_reg] = use_pos[cur_reg] = | |
1864 LifetimePosition::FromInstructionIndex(0); | |
1865 } else { | |
1866 UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial( | |
1867 current->Start()); | |
1868 if (next_use == NULL) { | |
1869 use_pos[cur_reg] = range->End(); | |
1870 } else { | |
1871 use_pos[cur_reg] = next_use->pos(); | |
1872 } | |
1873 } | |
1874 } | |
1875 | |
1876 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { | |
1877 LiveRange* range = inactive_live_ranges_.at(i); | |
1878 DCHECK(range->End().Value() > current->Start().Value()); | |
1879 LifetimePosition next_intersection = range->FirstIntersection(current); | |
1880 if (!next_intersection.IsValid()) continue; | |
1881 int cur_reg = range->assigned_register(); | |
1882 if (range->IsFixed()) { | |
1883 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); | |
1884 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); | |
1885 } else { | |
1886 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection); | |
1887 } | |
1888 } | |
1889 | |
1890 int reg = allocatable_register_codes_[0]; | |
1891 for (int i = 1; i < RegisterCount(); ++i) { | |
1892 int code = allocatable_register_codes_[i]; | |
1893 if (use_pos[code].Value() > use_pos[reg].Value()) { | |
1894 reg = code; | |
1895 } | |
1896 } | |
1897 | |
1898 LifetimePosition pos = use_pos[reg]; | |
1899 | |
1900 if (pos.Value() < register_use->pos().Value()) { | |
1901 // All registers are blocked before the first use that requires a register. | |
1902 // Spill starting part of live range up to that use. | |
1903 SpillBetween(current, current->Start(), register_use->pos()); | |
1904 return; | |
1905 } | |
1906 | |
1907 if (block_pos[reg].Value() < current->End().Value()) { | |
1908 // Register becomes blocked before the current range end. Split before that | |
1909 // position. | |
1910 LiveRange* tail = SplitBetween(current, | |
1911 current->Start(), | |
1912 block_pos[reg].InstructionStart()); | |
1913 if (!AllocationOk()) return; | |
1914 AddToUnhandledSorted(tail); | |
1915 } | |
1916 | |
1917 // Register reg is not blocked for the whole range. | |
1918 DCHECK(block_pos[reg].Value() >= current->End().Value()); | |
1919 TraceAlloc("Assigning blocked reg %s to live range %d\n", | |
1920 RegisterName(reg), | |
1921 current->id()); | |
1922 SetLiveRangeAssignedRegister(current, reg); | |
1923 | |
1924 // This register was not free. Thus we need to find and spill | |
1925 // parts of active and inactive live regions that use the same register | |
1926 // at the same lifetime positions as current. | |
1927 SplitAndSpillIntersecting(current); | |
1928 } | |
1929 | |
1930 | |
1931 LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range, | |
1932 LifetimePosition pos) { | |
1933 HBasicBlock* block = GetBlock(pos.InstructionStart()); | |
1934 HBasicBlock* loop_header = | |
1935 block->IsLoopHeader() ? block : block->parent_loop_header(); | |
1936 | |
1937 if (loop_header == NULL) return pos; | |
1938 | |
1939 UsePosition* prev_use = | |
1940 range->PreviousUsePositionRegisterIsBeneficial(pos); | |
1941 | |
1942 while (loop_header != NULL) { | |
1943 // We are going to spill live range inside the loop. | |
1944 // If possible try to move spilling position backwards to loop header. | |
1945 // This will reduce number of memory moves on the back edge. | |
1946 LifetimePosition loop_start = LifetimePosition::FromInstructionIndex( | |
1947 loop_header->first_instruction_index()); | |
1948 | |
1949 if (range->Covers(loop_start)) { | |
1950 if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) { | |
1951 // No register beneficial use inside the loop before the pos. | |
1952 pos = loop_start; | |
1953 } | |
1954 } | |
1955 | |
1956 // Try hoisting out to an outer loop. | |
1957 loop_header = loop_header->parent_loop_header(); | |
1958 } | |
1959 | |
1960 return pos; | |
1961 } | |
1962 | |
1963 | |
1964 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) { | |
1965 DCHECK(current->HasRegisterAssigned()); | |
1966 int reg = current->assigned_register(); | |
1967 LifetimePosition split_pos = current->Start(); | |
1968 for (int i = 0; i < active_live_ranges_.length(); ++i) { | |
1969 LiveRange* range = active_live_ranges_[i]; | |
1970 if (range->assigned_register() == reg) { | |
1971 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); | |
1972 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos); | |
1973 if (next_pos == NULL) { | |
1974 SpillAfter(range, spill_pos); | |
1975 } else { | |
1976 // When spilling between spill_pos and next_pos ensure that the range | |
1977 // remains spilled at least until the start of the current live range. | |
1978 // This guarantees that we will not introduce new unhandled ranges that | |
1979 // start before the current range as this violates allocation invariant | |
1980 // and will lead to an inconsistent state of active and inactive | |
1981 // live-ranges: ranges are allocated in order of their start positions, | |
1982 // ranges are retired from active/inactive when the start of the | |
1983 // current live-range is larger than their end. | |
1984 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos()); | |
1985 } | |
1986 if (!AllocationOk()) return; | |
1987 ActiveToHandled(range); | |
1988 --i; | |
1989 } | |
1990 } | |
1991 | |
1992 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { | |
1993 LiveRange* range = inactive_live_ranges_[i]; | |
1994 DCHECK(range->End().Value() > current->Start().Value()); | |
1995 if (range->assigned_register() == reg && !range->IsFixed()) { | |
1996 LifetimePosition next_intersection = range->FirstIntersection(current); | |
1997 if (next_intersection.IsValid()) { | |
1998 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); | |
1999 if (next_pos == NULL) { | |
2000 SpillAfter(range, split_pos); | |
2001 } else { | |
2002 next_intersection = Min(next_intersection, next_pos->pos()); | |
2003 SpillBetween(range, split_pos, next_intersection); | |
2004 } | |
2005 if (!AllocationOk()) return; | |
2006 InactiveToHandled(range); | |
2007 --i; | |
2008 } | |
2009 } | |
2010 } | |
2011 } | |
2012 | |
2013 | |
2014 bool LAllocator::IsBlockBoundary(LifetimePosition pos) { | |
2015 return pos.IsInstructionStart() && | |
2016 InstructionAt(pos.InstructionIndex())->IsLabel(); | |
2017 } | |
2018 | |
2019 | |
2020 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) { | |
2021 DCHECK(!range->IsFixed()); | |
2022 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value()); | |
2023 | |
2024 if (pos.Value() <= range->Start().Value()) return range; | |
2025 | |
2026 // We can't properly connect liveranges if split occured at the end | |
2027 // of control instruction. | |
2028 DCHECK(pos.IsInstructionStart() || | |
2029 !chunk_->instructions()->at(pos.InstructionIndex())->IsControl()); | |
2030 | |
2031 int vreg = GetVirtualRegister(); | |
2032 if (!AllocationOk()) return NULL; | |
2033 LiveRange* result = LiveRangeFor(vreg); | |
2034 range->SplitAt(pos, result, zone()); | |
2035 return result; | |
2036 } | |
2037 | |
2038 | |
2039 LiveRange* LAllocator::SplitBetween(LiveRange* range, | |
2040 LifetimePosition start, | |
2041 LifetimePosition end) { | |
2042 DCHECK(!range->IsFixed()); | |
2043 TraceAlloc("Splitting live range %d in position between [%d, %d]\n", | |
2044 range->id(), | |
2045 start.Value(), | |
2046 end.Value()); | |
2047 | |
2048 LifetimePosition split_pos = FindOptimalSplitPos(start, end); | |
2049 DCHECK(split_pos.Value() >= start.Value()); | |
2050 return SplitRangeAt(range, split_pos); | |
2051 } | |
2052 | |
2053 | |
2054 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start, | |
2055 LifetimePosition end) { | |
2056 int start_instr = start.InstructionIndex(); | |
2057 int end_instr = end.InstructionIndex(); | |
2058 DCHECK(start_instr <= end_instr); | |
2059 | |
2060 // We have no choice | |
2061 if (start_instr == end_instr) return end; | |
2062 | |
2063 HBasicBlock* start_block = GetBlock(start); | |
2064 HBasicBlock* end_block = GetBlock(end); | |
2065 | |
2066 if (end_block == start_block) { | |
2067 // The interval is split in the same basic block. Split at the latest | |
2068 // possible position. | |
2069 return end; | |
2070 } | |
2071 | |
2072 HBasicBlock* block = end_block; | |
2073 // Find header of outermost loop. | |
2074 while (block->parent_loop_header() != NULL && | |
2075 block->parent_loop_header()->block_id() > start_block->block_id()) { | |
2076 block = block->parent_loop_header(); | |
2077 } | |
2078 | |
2079 // We did not find any suitable outer loop. Split at the latest possible | |
2080 // position unless end_block is a loop header itself. | |
2081 if (block == end_block && !end_block->IsLoopHeader()) return end; | |
2082 | |
2083 return LifetimePosition::FromInstructionIndex( | |
2084 block->first_instruction_index()); | |
2085 } | |
2086 | |
2087 | |
2088 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { | |
2089 LiveRange* second_part = SplitRangeAt(range, pos); | |
2090 if (!AllocationOk()) return; | |
2091 Spill(second_part); | |
2092 } | |
2093 | |
2094 | |
2095 void LAllocator::SpillBetween(LiveRange* range, | |
2096 LifetimePosition start, | |
2097 LifetimePosition end) { | |
2098 SpillBetweenUntil(range, start, start, end); | |
2099 } | |
2100 | |
2101 | |
2102 void LAllocator::SpillBetweenUntil(LiveRange* range, | |
2103 LifetimePosition start, | |
2104 LifetimePosition until, | |
2105 LifetimePosition end) { | |
2106 CHECK(start.Value() < end.Value()); | |
2107 LiveRange* second_part = SplitRangeAt(range, start); | |
2108 if (!AllocationOk()) return; | |
2109 | |
2110 if (second_part->Start().Value() < end.Value()) { | |
2111 // The split result intersects with [start, end[. | |
2112 // Split it at position between ]start+1, end[, spill the middle part | |
2113 // and put the rest to unhandled. | |
2114 LiveRange* third_part = SplitBetween( | |
2115 second_part, | |
2116 Max(second_part->Start().InstructionEnd(), until), | |
2117 end.PrevInstruction().InstructionEnd()); | |
2118 if (!AllocationOk()) return; | |
2119 | |
2120 DCHECK(third_part != second_part); | |
2121 | |
2122 Spill(second_part); | |
2123 AddToUnhandledSorted(third_part); | |
2124 } else { | |
2125 // The split result does not intersect with [start, end[. | |
2126 // Nothing to spill. Just put it to unhandled as whole. | |
2127 AddToUnhandledSorted(second_part); | |
2128 } | |
2129 } | |
2130 | |
2131 | |
2132 void LAllocator::Spill(LiveRange* range) { | |
2133 DCHECK(!range->IsSpilled()); | |
2134 TraceAlloc("Spilling live range %d\n", range->id()); | |
2135 LiveRange* first = range->TopLevel(); | |
2136 | |
2137 if (!first->HasAllocatedSpillOperand()) { | |
2138 LOperand* op = TryReuseSpillSlot(range); | |
2139 if (op == NULL) op = chunk_->GetNextSpillSlot(range->Kind()); | |
2140 first->SetSpillOperand(op); | |
2141 } | |
2142 range->MakeSpilled(chunk()->zone()); | |
2143 } | |
2144 | |
2145 | |
2146 int LAllocator::RegisterCount() const { | |
2147 return num_registers_; | |
2148 } | |
2149 | |
2150 | |
2151 #ifdef DEBUG | |
2152 | |
2153 | |
2154 void LAllocator::Verify() const { | |
2155 for (int i = 0; i < live_ranges()->length(); ++i) { | |
2156 LiveRange* current = live_ranges()->at(i); | |
2157 if (current != NULL) current->Verify(); | |
2158 } | |
2159 } | |
2160 | |
2161 | |
2162 #endif | |
2163 | |
2164 | |
2165 LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator) | |
2166 : CompilationPhase(name, allocator->graph()->info()), | |
2167 allocator_(allocator) { | |
2168 if (FLAG_hydrogen_stats) { | |
2169 allocator_zone_start_allocation_size_ = | |
2170 allocator->zone()->allocation_size(); | |
2171 } | |
2172 } | |
2173 | |
2174 | |
2175 LAllocatorPhase::~LAllocatorPhase() { | |
2176 if (FLAG_hydrogen_stats) { | |
2177 size_t size = allocator_->zone()->allocation_size() - | |
2178 allocator_zone_start_allocation_size_; | |
2179 isolate()->GetHStatistics()->SaveTiming(name(), base::TimeDelta(), size); | |
2180 } | |
2181 | |
2182 if (ShouldProduceTraceOutput()) { | |
2183 isolate()->GetHTracer()->TraceLithium(name(), allocator_->chunk()); | |
2184 isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_); | |
2185 } | |
2186 | |
2187 #ifdef DEBUG | |
2188 if (allocator_ != NULL) allocator_->Verify(); | |
2189 #endif | |
2190 } | |
2191 | |
2192 | |
2193 } // namespace internal | |
2194 } // namespace v8 | |
OLD | NEW |