OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/hydrogen.h" | 7 #include "src/hydrogen.h" |
8 #include "src/lithium-inl.h" | 8 #include "src/lithium-inl.h" |
9 #include "src/lithium-allocator-inl.h" | 9 #include "src/lithium-allocator-inl.h" |
10 #include "src/string-stream.h" | 10 #include "src/string-stream.h" |
(...skipping 19 matching lines...) Expand all Loading... |
30 pos_(pos), | 30 pos_(pos), |
31 next_(NULL), | 31 next_(NULL), |
32 requires_reg_(false), | 32 requires_reg_(false), |
33 register_beneficial_(true) { | 33 register_beneficial_(true) { |
34 if (operand_ != NULL && operand_->IsUnallocated()) { | 34 if (operand_ != NULL && operand_->IsUnallocated()) { |
35 LUnallocated* unalloc = LUnallocated::cast(operand_); | 35 LUnallocated* unalloc = LUnallocated::cast(operand_); |
36 requires_reg_ = unalloc->HasRegisterPolicy() || | 36 requires_reg_ = unalloc->HasRegisterPolicy() || |
37 unalloc->HasDoubleRegisterPolicy(); | 37 unalloc->HasDoubleRegisterPolicy(); |
38 register_beneficial_ = !unalloc->HasAnyPolicy(); | 38 register_beneficial_ = !unalloc->HasAnyPolicy(); |
39 } | 39 } |
40 ASSERT(pos_.IsValid()); | 40 DCHECK(pos_.IsValid()); |
41 } | 41 } |
42 | 42 |
43 | 43 |
44 bool UsePosition::HasHint() const { | 44 bool UsePosition::HasHint() const { |
45 return hint_ != NULL && !hint_->IsUnallocated(); | 45 return hint_ != NULL && !hint_->IsUnallocated(); |
46 } | 46 } |
47 | 47 |
48 | 48 |
49 bool UsePosition::RequiresRegister() const { | 49 bool UsePosition::RequiresRegister() const { |
50 return requires_reg_; | 50 return requires_reg_; |
51 } | 51 } |
52 | 52 |
53 | 53 |
54 bool UsePosition::RegisterIsBeneficial() const { | 54 bool UsePosition::RegisterIsBeneficial() const { |
55 return register_beneficial_; | 55 return register_beneficial_; |
56 } | 56 } |
57 | 57 |
58 | 58 |
59 void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) { | 59 void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) { |
60 ASSERT(Contains(pos) && pos.Value() != start().Value()); | 60 DCHECK(Contains(pos) && pos.Value() != start().Value()); |
61 UseInterval* after = new(zone) UseInterval(pos, end_); | 61 UseInterval* after = new(zone) UseInterval(pos, end_); |
62 after->next_ = next_; | 62 after->next_ = next_; |
63 next_ = after; | 63 next_ = after; |
64 end_ = pos; | 64 end_ = pos; |
65 } | 65 } |
66 | 66 |
67 | 67 |
68 #ifdef DEBUG | 68 #ifdef DEBUG |
69 | 69 |
70 | 70 |
71 void LiveRange::Verify() const { | 71 void LiveRange::Verify() const { |
72 UsePosition* cur = first_pos_; | 72 UsePosition* cur = first_pos_; |
73 while (cur != NULL) { | 73 while (cur != NULL) { |
74 ASSERT(Start().Value() <= cur->pos().Value() && | 74 DCHECK(Start().Value() <= cur->pos().Value() && |
75 cur->pos().Value() <= End().Value()); | 75 cur->pos().Value() <= End().Value()); |
76 cur = cur->next(); | 76 cur = cur->next(); |
77 } | 77 } |
78 } | 78 } |
79 | 79 |
80 | 80 |
81 bool LiveRange::HasOverlap(UseInterval* target) const { | 81 bool LiveRange::HasOverlap(UseInterval* target) const { |
82 UseInterval* current_interval = first_interval_; | 82 UseInterval* current_interval = first_interval_; |
83 while (current_interval != NULL) { | 83 while (current_interval != NULL) { |
84 // Intervals overlap if the start of one is contained in the other. | 84 // Intervals overlap if the start of one is contained in the other. |
(...skipping 21 matching lines...) Expand all Loading... |
106 parent_(NULL), | 106 parent_(NULL), |
107 next_(NULL), | 107 next_(NULL), |
108 current_interval_(NULL), | 108 current_interval_(NULL), |
109 last_processed_use_(NULL), | 109 last_processed_use_(NULL), |
110 current_hint_operand_(NULL), | 110 current_hint_operand_(NULL), |
111 spill_operand_(new(zone) LOperand()), | 111 spill_operand_(new(zone) LOperand()), |
112 spill_start_index_(kMaxInt) { } | 112 spill_start_index_(kMaxInt) { } |
113 | 113 |
114 | 114 |
115 void LiveRange::set_assigned_register(int reg, Zone* zone) { | 115 void LiveRange::set_assigned_register(int reg, Zone* zone) { |
116 ASSERT(!HasRegisterAssigned() && !IsSpilled()); | 116 DCHECK(!HasRegisterAssigned() && !IsSpilled()); |
117 assigned_register_ = reg; | 117 assigned_register_ = reg; |
118 ConvertOperands(zone); | 118 ConvertOperands(zone); |
119 } | 119 } |
120 | 120 |
121 | 121 |
122 void LiveRange::MakeSpilled(Zone* zone) { | 122 void LiveRange::MakeSpilled(Zone* zone) { |
123 ASSERT(!IsSpilled()); | 123 DCHECK(!IsSpilled()); |
124 ASSERT(TopLevel()->HasAllocatedSpillOperand()); | 124 DCHECK(TopLevel()->HasAllocatedSpillOperand()); |
125 spilled_ = true; | 125 spilled_ = true; |
126 assigned_register_ = kInvalidAssignment; | 126 assigned_register_ = kInvalidAssignment; |
127 ConvertOperands(zone); | 127 ConvertOperands(zone); |
128 } | 128 } |
129 | 129 |
130 | 130 |
131 bool LiveRange::HasAllocatedSpillOperand() const { | 131 bool LiveRange::HasAllocatedSpillOperand() const { |
132 ASSERT(spill_operand_ != NULL); | 132 DCHECK(spill_operand_ != NULL); |
133 return !spill_operand_->IsIgnored(); | 133 return !spill_operand_->IsIgnored(); |
134 } | 134 } |
135 | 135 |
136 | 136 |
137 void LiveRange::SetSpillOperand(LOperand* operand) { | 137 void LiveRange::SetSpillOperand(LOperand* operand) { |
138 ASSERT(!operand->IsUnallocated()); | 138 DCHECK(!operand->IsUnallocated()); |
139 ASSERT(spill_operand_ != NULL); | 139 DCHECK(spill_operand_ != NULL); |
140 ASSERT(spill_operand_->IsIgnored()); | 140 DCHECK(spill_operand_->IsIgnored()); |
141 spill_operand_->ConvertTo(operand->kind(), operand->index()); | 141 spill_operand_->ConvertTo(operand->kind(), operand->index()); |
142 } | 142 } |
143 | 143 |
144 | 144 |
145 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) { | 145 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) { |
146 UsePosition* use_pos = last_processed_use_; | 146 UsePosition* use_pos = last_processed_use_; |
147 if (use_pos == NULL) use_pos = first_pos(); | 147 if (use_pos == NULL) use_pos = first_pos(); |
148 while (use_pos != NULL && use_pos->pos().Value() < start.Value()) { | 148 while (use_pos != NULL && use_pos->pos().Value() < start.Value()) { |
149 use_pos = use_pos->next(); | 149 use_pos = use_pos->next(); |
150 } | 150 } |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
190 UsePosition* use_pos = NextRegisterPosition(pos); | 190 UsePosition* use_pos = NextRegisterPosition(pos); |
191 if (use_pos == NULL) return true; | 191 if (use_pos == NULL) return true; |
192 return | 192 return |
193 use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value(); | 193 use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value(); |
194 } | 194 } |
195 | 195 |
196 | 196 |
197 LOperand* LiveRange::CreateAssignedOperand(Zone* zone) { | 197 LOperand* LiveRange::CreateAssignedOperand(Zone* zone) { |
198 LOperand* op = NULL; | 198 LOperand* op = NULL; |
199 if (HasRegisterAssigned()) { | 199 if (HasRegisterAssigned()) { |
200 ASSERT(!IsSpilled()); | 200 DCHECK(!IsSpilled()); |
201 switch (Kind()) { | 201 switch (Kind()) { |
202 case GENERAL_REGISTERS: | 202 case GENERAL_REGISTERS: |
203 op = LRegister::Create(assigned_register(), zone); | 203 op = LRegister::Create(assigned_register(), zone); |
204 break; | 204 break; |
205 case DOUBLE_REGISTERS: | 205 case DOUBLE_REGISTERS: |
206 op = LDoubleRegister::Create(assigned_register(), zone); | 206 op = LDoubleRegister::Create(assigned_register(), zone); |
207 break; | 207 break; |
208 default: | 208 default: |
209 UNREACHABLE(); | 209 UNREACHABLE(); |
210 } | 210 } |
211 } else if (IsSpilled()) { | 211 } else if (IsSpilled()) { |
212 ASSERT(!HasRegisterAssigned()); | 212 DCHECK(!HasRegisterAssigned()); |
213 op = TopLevel()->GetSpillOperand(); | 213 op = TopLevel()->GetSpillOperand(); |
214 ASSERT(!op->IsUnallocated()); | 214 DCHECK(!op->IsUnallocated()); |
215 } else { | 215 } else { |
216 LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE); | 216 LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE); |
217 unalloc->set_virtual_register(id_); | 217 unalloc->set_virtual_register(id_); |
218 op = unalloc; | 218 op = unalloc; |
219 } | 219 } |
220 return op; | 220 return op; |
221 } | 221 } |
222 | 222 |
223 | 223 |
224 UseInterval* LiveRange::FirstSearchIntervalForPosition( | 224 UseInterval* LiveRange::FirstSearchIntervalForPosition( |
(...skipping 16 matching lines...) Expand all Loading... |
241 : current_interval_->start(); | 241 : current_interval_->start(); |
242 if (to_start_of->start().Value() > start.Value()) { | 242 if (to_start_of->start().Value() > start.Value()) { |
243 current_interval_ = to_start_of; | 243 current_interval_ = to_start_of; |
244 } | 244 } |
245 } | 245 } |
246 | 246 |
247 | 247 |
248 void LiveRange::SplitAt(LifetimePosition position, | 248 void LiveRange::SplitAt(LifetimePosition position, |
249 LiveRange* result, | 249 LiveRange* result, |
250 Zone* zone) { | 250 Zone* zone) { |
251 ASSERT(Start().Value() < position.Value()); | 251 DCHECK(Start().Value() < position.Value()); |
252 ASSERT(result->IsEmpty()); | 252 DCHECK(result->IsEmpty()); |
253 // Find the last interval that ends before the position. If the | 253 // Find the last interval that ends before the position. If the |
254 // position is contained in one of the intervals in the chain, we | 254 // position is contained in one of the intervals in the chain, we |
255 // split that interval and use the first part. | 255 // split that interval and use the first part. |
256 UseInterval* current = FirstSearchIntervalForPosition(position); | 256 UseInterval* current = FirstSearchIntervalForPosition(position); |
257 | 257 |
258 // If the split position coincides with the beginning of a use interval | 258 // If the split position coincides with the beginning of a use interval |
259 // we need to split use positons in a special way. | 259 // we need to split use positons in a special way. |
260 bool split_at_start = false; | 260 bool split_at_start = false; |
261 | 261 |
262 if (current->start().Value() == position.Value()) { | 262 if (current->start().Value() == position.Value()) { |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
346 UsePosition* other_pos = other->first_pos(); | 346 UsePosition* other_pos = other->first_pos(); |
347 if (other_pos == NULL) return true; | 347 if (other_pos == NULL) return true; |
348 return pos->pos().Value() < other_pos->pos().Value(); | 348 return pos->pos().Value() < other_pos->pos().Value(); |
349 } | 349 } |
350 return start.Value() < other_start.Value(); | 350 return start.Value() < other_start.Value(); |
351 } | 351 } |
352 | 352 |
353 | 353 |
354 void LiveRange::ShortenTo(LifetimePosition start) { | 354 void LiveRange::ShortenTo(LifetimePosition start) { |
355 LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value()); | 355 LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value()); |
356 ASSERT(first_interval_ != NULL); | 356 DCHECK(first_interval_ != NULL); |
357 ASSERT(first_interval_->start().Value() <= start.Value()); | 357 DCHECK(first_interval_->start().Value() <= start.Value()); |
358 ASSERT(start.Value() < first_interval_->end().Value()); | 358 DCHECK(start.Value() < first_interval_->end().Value()); |
359 first_interval_->set_start(start); | 359 first_interval_->set_start(start); |
360 } | 360 } |
361 | 361 |
362 | 362 |
363 void LiveRange::EnsureInterval(LifetimePosition start, | 363 void LiveRange::EnsureInterval(LifetimePosition start, |
364 LifetimePosition end, | 364 LifetimePosition end, |
365 Zone* zone) { | 365 Zone* zone) { |
366 LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n", | 366 LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n", |
367 id_, | 367 id_, |
368 start.Value(), | 368 start.Value(), |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
400 if (end.Value() == first_interval_->start().Value()) { | 400 if (end.Value() == first_interval_->start().Value()) { |
401 first_interval_->set_start(start); | 401 first_interval_->set_start(start); |
402 } else if (end.Value() < first_interval_->start().Value()) { | 402 } else if (end.Value() < first_interval_->start().Value()) { |
403 UseInterval* interval = new(zone) UseInterval(start, end); | 403 UseInterval* interval = new(zone) UseInterval(start, end); |
404 interval->set_next(first_interval_); | 404 interval->set_next(first_interval_); |
405 first_interval_ = interval; | 405 first_interval_ = interval; |
406 } else { | 406 } else { |
407 // Order of instruction's processing (see ProcessInstructions) guarantees | 407 // Order of instruction's processing (see ProcessInstructions) guarantees |
408 // that each new use interval either precedes or intersects with | 408 // that each new use interval either precedes or intersects with |
409 // last added interval. | 409 // last added interval. |
410 ASSERT(start.Value() < first_interval_->end().Value()); | 410 DCHECK(start.Value() < first_interval_->end().Value()); |
411 first_interval_->start_ = Min(start, first_interval_->start_); | 411 first_interval_->start_ = Min(start, first_interval_->start_); |
412 first_interval_->end_ = Max(end, first_interval_->end_); | 412 first_interval_->end_ = Max(end, first_interval_->end_); |
413 } | 413 } |
414 } | 414 } |
415 } | 415 } |
416 | 416 |
417 | 417 |
418 void LiveRange::AddUsePosition(LifetimePosition pos, | 418 void LiveRange::AddUsePosition(LifetimePosition pos, |
419 LOperand* operand, | 419 LOperand* operand, |
420 LOperand* hint, | 420 LOperand* hint, |
(...skipping 22 matching lines...) Expand all Loading... |
443 if (prev_hint == NULL && use_pos->HasHint()) { | 443 if (prev_hint == NULL && use_pos->HasHint()) { |
444 current_hint_operand_ = hint; | 444 current_hint_operand_ = hint; |
445 } | 445 } |
446 } | 446 } |
447 | 447 |
448 | 448 |
449 void LiveRange::ConvertOperands(Zone* zone) { | 449 void LiveRange::ConvertOperands(Zone* zone) { |
450 LOperand* op = CreateAssignedOperand(zone); | 450 LOperand* op = CreateAssignedOperand(zone); |
451 UsePosition* use_pos = first_pos(); | 451 UsePosition* use_pos = first_pos(); |
452 while (use_pos != NULL) { | 452 while (use_pos != NULL) { |
453 ASSERT(Start().Value() <= use_pos->pos().Value() && | 453 DCHECK(Start().Value() <= use_pos->pos().Value() && |
454 use_pos->pos().Value() <= End().Value()); | 454 use_pos->pos().Value() <= End().Value()); |
455 | 455 |
456 if (use_pos->HasOperand()) { | 456 if (use_pos->HasOperand()) { |
457 ASSERT(op->IsRegister() || op->IsDoubleRegister() || | 457 DCHECK(op->IsRegister() || op->IsDoubleRegister() || |
458 !use_pos->RequiresRegister()); | 458 !use_pos->RequiresRegister()); |
459 use_pos->operand()->ConvertTo(op->kind(), op->index()); | 459 use_pos->operand()->ConvertTo(op->kind(), op->index()); |
460 } | 460 } |
461 use_pos = use_pos->next(); | 461 use_pos = use_pos->next(); |
462 } | 462 } |
463 } | 463 } |
464 | 464 |
465 | 465 |
466 bool LiveRange::CanCover(LifetimePosition position) const { | 466 bool LiveRange::CanCover(LifetimePosition position) const { |
467 if (IsEmpty()) return false; | 467 if (IsEmpty()) return false; |
468 return Start().Value() <= position.Value() && | 468 return Start().Value() <= position.Value() && |
469 position.Value() < End().Value(); | 469 position.Value() < End().Value(); |
470 } | 470 } |
471 | 471 |
472 | 472 |
473 bool LiveRange::Covers(LifetimePosition position) { | 473 bool LiveRange::Covers(LifetimePosition position) { |
474 if (!CanCover(position)) return false; | 474 if (!CanCover(position)) return false; |
475 UseInterval* start_search = FirstSearchIntervalForPosition(position); | 475 UseInterval* start_search = FirstSearchIntervalForPosition(position); |
476 for (UseInterval* interval = start_search; | 476 for (UseInterval* interval = start_search; |
477 interval != NULL; | 477 interval != NULL; |
478 interval = interval->next()) { | 478 interval = interval->next()) { |
479 ASSERT(interval->next() == NULL || | 479 DCHECK(interval->next() == NULL || |
480 interval->next()->start().Value() >= interval->start().Value()); | 480 interval->next()->start().Value() >= interval->start().Value()); |
481 AdvanceLastProcessedMarker(interval, position); | 481 AdvanceLastProcessedMarker(interval, position); |
482 if (interval->Contains(position)) return true; | 482 if (interval->Contains(position)) return true; |
483 if (interval->start().Value() > position.Value()) return false; | 483 if (interval->start().Value() > position.Value()) return false; |
484 } | 484 } |
485 return false; | 485 return false; |
486 } | 486 } |
487 | 487 |
488 | 488 |
489 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) { | 489 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) { |
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
587 | 587 |
588 int LAllocator::FixedDoubleLiveRangeID(int index) { | 588 int LAllocator::FixedDoubleLiveRangeID(int index) { |
589 return -index - 1 - Register::kMaxNumAllocatableRegisters; | 589 return -index - 1 - Register::kMaxNumAllocatableRegisters; |
590 } | 590 } |
591 | 591 |
592 | 592 |
593 LOperand* LAllocator::AllocateFixed(LUnallocated* operand, | 593 LOperand* LAllocator::AllocateFixed(LUnallocated* operand, |
594 int pos, | 594 int pos, |
595 bool is_tagged) { | 595 bool is_tagged) { |
596 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register()); | 596 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register()); |
597 ASSERT(operand->HasFixedPolicy()); | 597 DCHECK(operand->HasFixedPolicy()); |
598 if (operand->HasFixedSlotPolicy()) { | 598 if (operand->HasFixedSlotPolicy()) { |
599 operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index()); | 599 operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index()); |
600 } else if (operand->HasFixedRegisterPolicy()) { | 600 } else if (operand->HasFixedRegisterPolicy()) { |
601 int reg_index = operand->fixed_register_index(); | 601 int reg_index = operand->fixed_register_index(); |
602 operand->ConvertTo(LOperand::REGISTER, reg_index); | 602 operand->ConvertTo(LOperand::REGISTER, reg_index); |
603 } else if (operand->HasFixedDoubleRegisterPolicy()) { | 603 } else if (operand->HasFixedDoubleRegisterPolicy()) { |
604 int reg_index = operand->fixed_register_index(); | 604 int reg_index = operand->fixed_register_index(); |
605 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index); | 605 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index); |
606 } else { | 606 } else { |
607 UNREACHABLE(); | 607 UNREACHABLE(); |
608 } | 608 } |
609 if (is_tagged) { | 609 if (is_tagged) { |
610 TraceAlloc("Fixed reg is tagged at %d\n", pos); | 610 TraceAlloc("Fixed reg is tagged at %d\n", pos); |
611 LInstruction* instr = InstructionAt(pos); | 611 LInstruction* instr = InstructionAt(pos); |
612 if (instr->HasPointerMap()) { | 612 if (instr->HasPointerMap()) { |
613 instr->pointer_map()->RecordPointer(operand, chunk()->zone()); | 613 instr->pointer_map()->RecordPointer(operand, chunk()->zone()); |
614 } | 614 } |
615 } | 615 } |
616 return operand; | 616 return operand; |
617 } | 617 } |
618 | 618 |
619 | 619 |
620 LiveRange* LAllocator::FixedLiveRangeFor(int index) { | 620 LiveRange* LAllocator::FixedLiveRangeFor(int index) { |
621 ASSERT(index < Register::kMaxNumAllocatableRegisters); | 621 DCHECK(index < Register::kMaxNumAllocatableRegisters); |
622 LiveRange* result = fixed_live_ranges_[index]; | 622 LiveRange* result = fixed_live_ranges_[index]; |
623 if (result == NULL) { | 623 if (result == NULL) { |
624 result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone()); | 624 result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone()); |
625 ASSERT(result->IsFixed()); | 625 DCHECK(result->IsFixed()); |
626 result->kind_ = GENERAL_REGISTERS; | 626 result->kind_ = GENERAL_REGISTERS; |
627 SetLiveRangeAssignedRegister(result, index); | 627 SetLiveRangeAssignedRegister(result, index); |
628 fixed_live_ranges_[index] = result; | 628 fixed_live_ranges_[index] = result; |
629 } | 629 } |
630 return result; | 630 return result; |
631 } | 631 } |
632 | 632 |
633 | 633 |
634 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) { | 634 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) { |
635 ASSERT(index < DoubleRegister::NumAllocatableRegisters()); | 635 DCHECK(index < DoubleRegister::NumAllocatableRegisters()); |
636 LiveRange* result = fixed_double_live_ranges_[index]; | 636 LiveRange* result = fixed_double_live_ranges_[index]; |
637 if (result == NULL) { | 637 if (result == NULL) { |
638 result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index), | 638 result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index), |
639 chunk()->zone()); | 639 chunk()->zone()); |
640 ASSERT(result->IsFixed()); | 640 DCHECK(result->IsFixed()); |
641 result->kind_ = DOUBLE_REGISTERS; | 641 result->kind_ = DOUBLE_REGISTERS; |
642 SetLiveRangeAssignedRegister(result, index); | 642 SetLiveRangeAssignedRegister(result, index); |
643 fixed_double_live_ranges_[index] = result; | 643 fixed_double_live_ranges_[index] = result; |
644 } | 644 } |
645 return result; | 645 return result; |
646 } | 646 } |
647 | 647 |
648 | 648 |
649 LiveRange* LAllocator::LiveRangeFor(int index) { | 649 LiveRange* LAllocator::LiveRangeFor(int index) { |
650 if (index >= live_ranges_.length()) { | 650 if (index >= live_ranges_.length()) { |
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
820 LUnallocated* cur_input = LUnallocated::cast(it.Current()); | 820 LUnallocated* cur_input = LUnallocated::cast(it.Current()); |
821 if (cur_input->HasFixedPolicy()) { | 821 if (cur_input->HasFixedPolicy()) { |
822 LUnallocated* input_copy = cur_input->CopyUnconstrained( | 822 LUnallocated* input_copy = cur_input->CopyUnconstrained( |
823 chunk()->zone()); | 823 chunk()->zone()); |
824 bool is_tagged = HasTaggedValue(cur_input->virtual_register()); | 824 bool is_tagged = HasTaggedValue(cur_input->virtual_register()); |
825 AllocateFixed(cur_input, gap_index + 1, is_tagged); | 825 AllocateFixed(cur_input, gap_index + 1, is_tagged); |
826 AddConstraintsGapMove(gap_index, input_copy, cur_input); | 826 AddConstraintsGapMove(gap_index, input_copy, cur_input); |
827 } else if (cur_input->HasWritableRegisterPolicy()) { | 827 } else if (cur_input->HasWritableRegisterPolicy()) { |
828 // The live range of writable input registers always goes until the end | 828 // The live range of writable input registers always goes until the end |
829 // of the instruction. | 829 // of the instruction. |
830 ASSERT(!cur_input->IsUsedAtStart()); | 830 DCHECK(!cur_input->IsUsedAtStart()); |
831 | 831 |
832 LUnallocated* input_copy = cur_input->CopyUnconstrained( | 832 LUnallocated* input_copy = cur_input->CopyUnconstrained( |
833 chunk()->zone()); | 833 chunk()->zone()); |
834 int vreg = GetVirtualRegister(); | 834 int vreg = GetVirtualRegister(); |
835 if (!AllocationOk()) return; | 835 if (!AllocationOk()) return; |
836 cur_input->set_virtual_register(vreg); | 836 cur_input->set_virtual_register(vreg); |
837 | 837 |
838 if (RequiredRegisterKind(input_copy->virtual_register()) == | 838 if (RequiredRegisterKind(input_copy->virtual_register()) == |
839 DOUBLE_REGISTERS) { | 839 DOUBLE_REGISTERS) { |
840 double_artificial_registers_.Add( | 840 double_artificial_registers_.Add( |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
920 } else { | 920 } else { |
921 Define(curr_position, to, from); | 921 Define(curr_position, to, from); |
922 } | 922 } |
923 } | 923 } |
924 Use(block_start_position, curr_position, from, hint); | 924 Use(block_start_position, curr_position, from, hint); |
925 if (from->IsUnallocated()) { | 925 if (from->IsUnallocated()) { |
926 live->Add(LUnallocated::cast(from)->virtual_register()); | 926 live->Add(LUnallocated::cast(from)->virtual_register()); |
927 } | 927 } |
928 } | 928 } |
929 } else { | 929 } else { |
930 ASSERT(!IsGapAt(index)); | 930 DCHECK(!IsGapAt(index)); |
931 LInstruction* instr = InstructionAt(index); | 931 LInstruction* instr = InstructionAt(index); |
932 | 932 |
933 if (instr != NULL) { | 933 if (instr != NULL) { |
934 LOperand* output = instr->Output(); | 934 LOperand* output = instr->Output(); |
935 if (output != NULL) { | 935 if (output != NULL) { |
936 if (output->IsUnallocated()) { | 936 if (output->IsUnallocated()) { |
937 live->Remove(LUnallocated::cast(output)->virtual_register()); | 937 live->Remove(LUnallocated::cast(output)->virtual_register()); |
938 } | 938 } |
939 Define(curr_position, output, NULL); | 939 Define(curr_position, output, NULL); |
940 } | 940 } |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1018 LUnallocated* phi_operand = | 1018 LUnallocated* phi_operand = |
1019 new(chunk()->zone()) LUnallocated(LUnallocated::NONE); | 1019 new(chunk()->zone()) LUnallocated(LUnallocated::NONE); |
1020 phi_operand->set_virtual_register(phi->id()); | 1020 phi_operand->set_virtual_register(phi->id()); |
1021 for (int j = 0; j < phi->OperandCount(); ++j) { | 1021 for (int j = 0; j < phi->OperandCount(); ++j) { |
1022 HValue* op = phi->OperandAt(j); | 1022 HValue* op = phi->OperandAt(j); |
1023 LOperand* operand = NULL; | 1023 LOperand* operand = NULL; |
1024 if (op->IsConstant() && op->EmitAtUses()) { | 1024 if (op->IsConstant() && op->EmitAtUses()) { |
1025 HConstant* constant = HConstant::cast(op); | 1025 HConstant* constant = HConstant::cast(op); |
1026 operand = chunk_->DefineConstantOperand(constant); | 1026 operand = chunk_->DefineConstantOperand(constant); |
1027 } else { | 1027 } else { |
1028 ASSERT(!op->EmitAtUses()); | 1028 DCHECK(!op->EmitAtUses()); |
1029 LUnallocated* unalloc = | 1029 LUnallocated* unalloc = |
1030 new(chunk()->zone()) LUnallocated(LUnallocated::ANY); | 1030 new(chunk()->zone()) LUnallocated(LUnallocated::ANY); |
1031 unalloc->set_virtual_register(op->id()); | 1031 unalloc->set_virtual_register(op->id()); |
1032 operand = unalloc; | 1032 operand = unalloc; |
1033 } | 1033 } |
1034 HBasicBlock* cur_block = block->predecessors()->at(j); | 1034 HBasicBlock* cur_block = block->predecessors()->at(j); |
1035 // The gap move must be added without any special processing as in | 1035 // The gap move must be added without any special processing as in |
1036 // the AddConstraintsGapMove. | 1036 // the AddConstraintsGapMove. |
1037 chunk_->AddGapMove(cur_block->last_instruction_index() - 1, | 1037 chunk_->AddGapMove(cur_block->last_instruction_index() - 1, |
1038 operand, | 1038 operand, |
(...skipping 21 matching lines...) Expand all Loading... |
1060 LiveRange* live_range = LiveRangeFor(phi->id()); | 1060 LiveRange* live_range = LiveRangeFor(phi->id()); |
1061 LLabel* label = chunk_->GetLabel(phi->block()->block_id()); | 1061 LLabel* label = chunk_->GetLabel(phi->block()->block_id()); |
1062 label->GetOrCreateParallelMove(LGap::START, chunk()->zone())-> | 1062 label->GetOrCreateParallelMove(LGap::START, chunk()->zone())-> |
1063 AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone()); | 1063 AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone()); |
1064 live_range->SetSpillStartIndex(phi->block()->first_instruction_index()); | 1064 live_range->SetSpillStartIndex(phi->block()->first_instruction_index()); |
1065 } | 1065 } |
1066 } | 1066 } |
1067 | 1067 |
1068 | 1068 |
1069 bool LAllocator::Allocate(LChunk* chunk) { | 1069 bool LAllocator::Allocate(LChunk* chunk) { |
1070 ASSERT(chunk_ == NULL); | 1070 DCHECK(chunk_ == NULL); |
1071 chunk_ = static_cast<LPlatformChunk*>(chunk); | 1071 chunk_ = static_cast<LPlatformChunk*>(chunk); |
1072 assigned_registers_ = | 1072 assigned_registers_ = |
1073 new(chunk->zone()) BitVector(Register::NumAllocatableRegisters(), | 1073 new(chunk->zone()) BitVector(Register::NumAllocatableRegisters(), |
1074 chunk->zone()); | 1074 chunk->zone()); |
1075 assigned_double_registers_ = | 1075 assigned_double_registers_ = |
1076 new(chunk->zone()) BitVector(DoubleRegister::NumAllocatableRegisters(), | 1076 new(chunk->zone()) BitVector(DoubleRegister::NumAllocatableRegisters(), |
1077 chunk->zone()); | 1077 chunk->zone()); |
1078 MeetRegisterConstraints(); | 1078 MeetRegisterConstraints(); |
1079 if (!AllocationOk()) return false; | 1079 if (!AllocationOk()) return false; |
1080 ResolvePhis(); | 1080 ResolvePhis(); |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1118 HBasicBlock* pred) { | 1118 HBasicBlock* pred) { |
1119 LifetimePosition pred_end = | 1119 LifetimePosition pred_end = |
1120 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); | 1120 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); |
1121 LifetimePosition cur_start = | 1121 LifetimePosition cur_start = |
1122 LifetimePosition::FromInstructionIndex(block->first_instruction_index()); | 1122 LifetimePosition::FromInstructionIndex(block->first_instruction_index()); |
1123 LiveRange* pred_cover = NULL; | 1123 LiveRange* pred_cover = NULL; |
1124 LiveRange* cur_cover = NULL; | 1124 LiveRange* cur_cover = NULL; |
1125 LiveRange* cur_range = range; | 1125 LiveRange* cur_range = range; |
1126 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) { | 1126 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) { |
1127 if (cur_range->CanCover(cur_start)) { | 1127 if (cur_range->CanCover(cur_start)) { |
1128 ASSERT(cur_cover == NULL); | 1128 DCHECK(cur_cover == NULL); |
1129 cur_cover = cur_range; | 1129 cur_cover = cur_range; |
1130 } | 1130 } |
1131 if (cur_range->CanCover(pred_end)) { | 1131 if (cur_range->CanCover(pred_end)) { |
1132 ASSERT(pred_cover == NULL); | 1132 DCHECK(pred_cover == NULL); |
1133 pred_cover = cur_range; | 1133 pred_cover = cur_range; |
1134 } | 1134 } |
1135 cur_range = cur_range->next(); | 1135 cur_range = cur_range->next(); |
1136 } | 1136 } |
1137 | 1137 |
1138 if (cur_cover->IsSpilled()) return; | 1138 if (cur_cover->IsSpilled()) return; |
1139 ASSERT(pred_cover != NULL && cur_cover != NULL); | 1139 DCHECK(pred_cover != NULL && cur_cover != NULL); |
1140 if (pred_cover != cur_cover) { | 1140 if (pred_cover != cur_cover) { |
1141 LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone()); | 1141 LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone()); |
1142 LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone()); | 1142 LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone()); |
1143 if (!pred_op->Equals(cur_op)) { | 1143 if (!pred_op->Equals(cur_op)) { |
1144 LGap* gap = NULL; | 1144 LGap* gap = NULL; |
1145 if (block->predecessors()->length() == 1) { | 1145 if (block->predecessors()->length() == 1) { |
1146 gap = GapAt(block->first_instruction_index()); | 1146 gap = GapAt(block->first_instruction_index()); |
1147 } else { | 1147 } else { |
1148 ASSERT(pred->end()->SecondSuccessor() == NULL); | 1148 DCHECK(pred->end()->SecondSuccessor() == NULL); |
1149 gap = GetLastGap(pred); | 1149 gap = GetLastGap(pred); |
1150 | 1150 |
1151 // We are going to insert a move before the branch instruction. | 1151 // We are going to insert a move before the branch instruction. |
1152 // Some branch instructions (e.g. loops' back edges) | 1152 // Some branch instructions (e.g. loops' back edges) |
1153 // can potentially cause a GC so they have a pointer map. | 1153 // can potentially cause a GC so they have a pointer map. |
1154 // By inserting a move we essentially create a copy of a | 1154 // By inserting a move we essentially create a copy of a |
1155 // value which is invisible to PopulatePointerMaps(), because we store | 1155 // value which is invisible to PopulatePointerMaps(), because we store |
1156 // it into a location different from the operand of a live range | 1156 // it into a location different from the operand of a live range |
1157 // covering a branch instruction. | 1157 // covering a branch instruction. |
1158 // Thus we need to manually record a pointer. | 1158 // Thus we need to manually record a pointer. |
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1287 chunk()->zone()); | 1287 chunk()->zone()); |
1288 for (int j = 0; j < move->move_operands()->length(); ++j) { | 1288 for (int j = 0; j < move->move_operands()->length(); ++j) { |
1289 LOperand* to = move->move_operands()->at(j).destination(); | 1289 LOperand* to = move->move_operands()->at(j).destination(); |
1290 if (to->IsUnallocated() && | 1290 if (to->IsUnallocated() && |
1291 LUnallocated::cast(to)->virtual_register() == phi->id()) { | 1291 LUnallocated::cast(to)->virtual_register() == phi->id()) { |
1292 hint = move->move_operands()->at(j).source(); | 1292 hint = move->move_operands()->at(j).source(); |
1293 phi_operand = to; | 1293 phi_operand = to; |
1294 break; | 1294 break; |
1295 } | 1295 } |
1296 } | 1296 } |
1297 ASSERT(hint != NULL); | 1297 DCHECK(hint != NULL); |
1298 | 1298 |
1299 LifetimePosition block_start = LifetimePosition::FromInstructionIndex( | 1299 LifetimePosition block_start = LifetimePosition::FromInstructionIndex( |
1300 block->first_instruction_index()); | 1300 block->first_instruction_index()); |
1301 Define(block_start, phi_operand, hint); | 1301 Define(block_start, phi_operand, hint); |
1302 } | 1302 } |
1303 | 1303 |
1304 // Now live is live_in for this block except not including values live | 1304 // Now live is live_in for this block except not including values live |
1305 // out on backward successor edges. | 1305 // out on backward successor edges. |
1306 live_in_sets_[block_id] = live; | 1306 live_in_sets_[block_id] = live; |
1307 | 1307 |
(...skipping 26 matching lines...) Expand all Loading... |
1334 if (block_id == 0) { | 1334 if (block_id == 0) { |
1335 BitVector::Iterator iterator(live); | 1335 BitVector::Iterator iterator(live); |
1336 bool found = false; | 1336 bool found = false; |
1337 while (!iterator.Done()) { | 1337 while (!iterator.Done()) { |
1338 found = true; | 1338 found = true; |
1339 int operand_index = iterator.Current(); | 1339 int operand_index = iterator.Current(); |
1340 if (chunk_->info()->IsStub()) { | 1340 if (chunk_->info()->IsStub()) { |
1341 CodeStub::Major major_key = chunk_->info()->code_stub()->MajorKey(); | 1341 CodeStub::Major major_key = chunk_->info()->code_stub()->MajorKey(); |
1342 PrintF("Function: %s\n", CodeStub::MajorName(major_key, false)); | 1342 PrintF("Function: %s\n", CodeStub::MajorName(major_key, false)); |
1343 } else { | 1343 } else { |
1344 ASSERT(chunk_->info()->IsOptimizing()); | 1344 DCHECK(chunk_->info()->IsOptimizing()); |
1345 AllowHandleDereference allow_deref; | 1345 AllowHandleDereference allow_deref; |
1346 PrintF("Function: %s\n", | 1346 PrintF("Function: %s\n", |
1347 chunk_->info()->function()->debug_name()->ToCString().get()); | 1347 chunk_->info()->function()->debug_name()->ToCString().get()); |
1348 } | 1348 } |
1349 PrintF("Value %d used before first definition!\n", operand_index); | 1349 PrintF("Value %d used before first definition!\n", operand_index); |
1350 LiveRange* range = LiveRangeFor(operand_index); | 1350 LiveRange* range = LiveRangeFor(operand_index); |
1351 PrintF("First use is at %d\n", range->first_pos()->pos().Value()); | 1351 PrintF("First use is at %d\n", range->first_pos()->pos().Value()); |
1352 iterator.Advance(); | 1352 iterator.Advance(); |
1353 } | 1353 } |
1354 ASSERT(!found); | 1354 DCHECK(!found); |
1355 } | 1355 } |
1356 #endif | 1356 #endif |
1357 } | 1357 } |
1358 | 1358 |
1359 for (int i = 0; i < live_ranges_.length(); ++i) { | 1359 for (int i = 0; i < live_ranges_.length(); ++i) { |
1360 if (live_ranges_[i] != NULL) { | 1360 if (live_ranges_[i] != NULL) { |
1361 live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id()); | 1361 live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id()); |
1362 } | 1362 } |
1363 } | 1363 } |
1364 } | 1364 } |
1365 | 1365 |
1366 | 1366 |
1367 bool LAllocator::SafePointsAreInOrder() const { | 1367 bool LAllocator::SafePointsAreInOrder() const { |
1368 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps(); | 1368 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps(); |
1369 int safe_point = 0; | 1369 int safe_point = 0; |
1370 for (int i = 0; i < pointer_maps->length(); ++i) { | 1370 for (int i = 0; i < pointer_maps->length(); ++i) { |
1371 LPointerMap* map = pointer_maps->at(i); | 1371 LPointerMap* map = pointer_maps->at(i); |
1372 if (safe_point > map->lithium_position()) return false; | 1372 if (safe_point > map->lithium_position()) return false; |
1373 safe_point = map->lithium_position(); | 1373 safe_point = map->lithium_position(); |
1374 } | 1374 } |
1375 return true; | 1375 return true; |
1376 } | 1376 } |
1377 | 1377 |
1378 | 1378 |
1379 void LAllocator::PopulatePointerMaps() { | 1379 void LAllocator::PopulatePointerMaps() { |
1380 LAllocatorPhase phase("L_Populate pointer maps", this); | 1380 LAllocatorPhase phase("L_Populate pointer maps", this); |
1381 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps(); | 1381 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps(); |
1382 | 1382 |
1383 ASSERT(SafePointsAreInOrder()); | 1383 DCHECK(SafePointsAreInOrder()); |
1384 | 1384 |
1385 // Iterate over all safe point positions and record a pointer | 1385 // Iterate over all safe point positions and record a pointer |
1386 // for all spilled live ranges at this point. | 1386 // for all spilled live ranges at this point. |
1387 int first_safe_point_index = 0; | 1387 int first_safe_point_index = 0; |
1388 int last_range_start = 0; | 1388 int last_range_start = 0; |
1389 for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) { | 1389 for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) { |
1390 LiveRange* range = live_ranges()->at(range_idx); | 1390 LiveRange* range = live_ranges()->at(range_idx); |
1391 if (range == NULL) continue; | 1391 if (range == NULL) continue; |
1392 // Iterate over the first parts of multi-part live ranges. | 1392 // Iterate over the first parts of multi-part live ranges. |
1393 if (range->parent() != NULL) continue; | 1393 if (range->parent() != NULL) continue; |
1394 // Skip non-pointer values. | 1394 // Skip non-pointer values. |
1395 if (!HasTaggedValue(range->id())) continue; | 1395 if (!HasTaggedValue(range->id())) continue; |
1396 // Skip empty live ranges. | 1396 // Skip empty live ranges. |
1397 if (range->IsEmpty()) continue; | 1397 if (range->IsEmpty()) continue; |
1398 | 1398 |
1399 // Find the extent of the range and its children. | 1399 // Find the extent of the range and its children. |
1400 int start = range->Start().InstructionIndex(); | 1400 int start = range->Start().InstructionIndex(); |
1401 int end = 0; | 1401 int end = 0; |
1402 for (LiveRange* cur = range; cur != NULL; cur = cur->next()) { | 1402 for (LiveRange* cur = range; cur != NULL; cur = cur->next()) { |
1403 LifetimePosition this_end = cur->End(); | 1403 LifetimePosition this_end = cur->End(); |
1404 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex(); | 1404 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex(); |
1405 ASSERT(cur->Start().InstructionIndex() >= start); | 1405 DCHECK(cur->Start().InstructionIndex() >= start); |
1406 } | 1406 } |
1407 | 1407 |
1408 // Most of the ranges are in order, but not all. Keep an eye on when | 1408 // Most of the ranges are in order, but not all. Keep an eye on when |
1409 // they step backwards and reset the first_safe_point_index so we don't | 1409 // they step backwards and reset the first_safe_point_index so we don't |
1410 // miss any safe points. | 1410 // miss any safe points. |
1411 if (start < last_range_start) { | 1411 if (start < last_range_start) { |
1412 first_safe_point_index = 0; | 1412 first_safe_point_index = 0; |
1413 } | 1413 } |
1414 last_range_start = start; | 1414 last_range_start = start; |
1415 | 1415 |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1449 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n", | 1449 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n", |
1450 range->id(), range->spill_start_index(), safe_point); | 1450 range->id(), range->spill_start_index(), safe_point); |
1451 map->RecordPointer(range->GetSpillOperand(), chunk()->zone()); | 1451 map->RecordPointer(range->GetSpillOperand(), chunk()->zone()); |
1452 } | 1452 } |
1453 | 1453 |
1454 if (!cur->IsSpilled()) { | 1454 if (!cur->IsSpilled()) { |
1455 TraceAlloc("Pointer in register for range %d (start at %d) " | 1455 TraceAlloc("Pointer in register for range %d (start at %d) " |
1456 "at safe point %d\n", | 1456 "at safe point %d\n", |
1457 cur->id(), cur->Start().Value(), safe_point); | 1457 cur->id(), cur->Start().Value(), safe_point); |
1458 LOperand* operand = cur->CreateAssignedOperand(chunk()->zone()); | 1458 LOperand* operand = cur->CreateAssignedOperand(chunk()->zone()); |
1459 ASSERT(!operand->IsStackSlot()); | 1459 DCHECK(!operand->IsStackSlot()); |
1460 map->RecordPointer(operand, chunk()->zone()); | 1460 map->RecordPointer(operand, chunk()->zone()); |
1461 } | 1461 } |
1462 } | 1462 } |
1463 } | 1463 } |
1464 } | 1464 } |
1465 | 1465 |
1466 | 1466 |
1467 void LAllocator::AllocateGeneralRegisters() { | 1467 void LAllocator::AllocateGeneralRegisters() { |
1468 LAllocatorPhase phase("L_Allocate general registers", this); | 1468 LAllocatorPhase phase("L_Allocate general registers", this); |
1469 num_registers_ = Register::NumAllocatableRegisters(); | 1469 num_registers_ = Register::NumAllocatableRegisters(); |
1470 mode_ = GENERAL_REGISTERS; | 1470 mode_ = GENERAL_REGISTERS; |
1471 AllocateRegisters(); | 1471 AllocateRegisters(); |
1472 } | 1472 } |
1473 | 1473 |
1474 | 1474 |
1475 void LAllocator::AllocateDoubleRegisters() { | 1475 void LAllocator::AllocateDoubleRegisters() { |
1476 LAllocatorPhase phase("L_Allocate double registers", this); | 1476 LAllocatorPhase phase("L_Allocate double registers", this); |
1477 num_registers_ = DoubleRegister::NumAllocatableRegisters(); | 1477 num_registers_ = DoubleRegister::NumAllocatableRegisters(); |
1478 mode_ = DOUBLE_REGISTERS; | 1478 mode_ = DOUBLE_REGISTERS; |
1479 AllocateRegisters(); | 1479 AllocateRegisters(); |
1480 } | 1480 } |
1481 | 1481 |
1482 | 1482 |
1483 void LAllocator::AllocateRegisters() { | 1483 void LAllocator::AllocateRegisters() { |
1484 ASSERT(unhandled_live_ranges_.is_empty()); | 1484 DCHECK(unhandled_live_ranges_.is_empty()); |
1485 | 1485 |
1486 for (int i = 0; i < live_ranges_.length(); ++i) { | 1486 for (int i = 0; i < live_ranges_.length(); ++i) { |
1487 if (live_ranges_[i] != NULL) { | 1487 if (live_ranges_[i] != NULL) { |
1488 if (live_ranges_[i]->Kind() == mode_) { | 1488 if (live_ranges_[i]->Kind() == mode_) { |
1489 AddToUnhandledUnsorted(live_ranges_[i]); | 1489 AddToUnhandledUnsorted(live_ranges_[i]); |
1490 } | 1490 } |
1491 } | 1491 } |
1492 } | 1492 } |
1493 SortUnhandled(); | 1493 SortUnhandled(); |
1494 ASSERT(UnhandledIsSorted()); | 1494 DCHECK(UnhandledIsSorted()); |
1495 | 1495 |
1496 ASSERT(reusable_slots_.is_empty()); | 1496 DCHECK(reusable_slots_.is_empty()); |
1497 ASSERT(active_live_ranges_.is_empty()); | 1497 DCHECK(active_live_ranges_.is_empty()); |
1498 ASSERT(inactive_live_ranges_.is_empty()); | 1498 DCHECK(inactive_live_ranges_.is_empty()); |
1499 | 1499 |
1500 if (mode_ == DOUBLE_REGISTERS) { | 1500 if (mode_ == DOUBLE_REGISTERS) { |
1501 for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) { | 1501 for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) { |
1502 LiveRange* current = fixed_double_live_ranges_.at(i); | 1502 LiveRange* current = fixed_double_live_ranges_.at(i); |
1503 if (current != NULL) { | 1503 if (current != NULL) { |
1504 AddToInactive(current); | 1504 AddToInactive(current); |
1505 } | 1505 } |
1506 } | 1506 } |
1507 } else { | 1507 } else { |
1508 ASSERT(mode_ == GENERAL_REGISTERS); | 1508 DCHECK(mode_ == GENERAL_REGISTERS); |
1509 for (int i = 0; i < fixed_live_ranges_.length(); ++i) { | 1509 for (int i = 0; i < fixed_live_ranges_.length(); ++i) { |
1510 LiveRange* current = fixed_live_ranges_.at(i); | 1510 LiveRange* current = fixed_live_ranges_.at(i); |
1511 if (current != NULL) { | 1511 if (current != NULL) { |
1512 AddToInactive(current); | 1512 AddToInactive(current); |
1513 } | 1513 } |
1514 } | 1514 } |
1515 } | 1515 } |
1516 | 1516 |
1517 while (!unhandled_live_ranges_.is_empty()) { | 1517 while (!unhandled_live_ranges_.is_empty()) { |
1518 ASSERT(UnhandledIsSorted()); | 1518 DCHECK(UnhandledIsSorted()); |
1519 LiveRange* current = unhandled_live_ranges_.RemoveLast(); | 1519 LiveRange* current = unhandled_live_ranges_.RemoveLast(); |
1520 ASSERT(UnhandledIsSorted()); | 1520 DCHECK(UnhandledIsSorted()); |
1521 LifetimePosition position = current->Start(); | 1521 LifetimePosition position = current->Start(); |
1522 #ifdef DEBUG | 1522 #ifdef DEBUG |
1523 allocation_finger_ = position; | 1523 allocation_finger_ = position; |
1524 #endif | 1524 #endif |
1525 TraceAlloc("Processing interval %d start=%d\n", | 1525 TraceAlloc("Processing interval %d start=%d\n", |
1526 current->id(), | 1526 current->id(), |
1527 position.Value()); | 1527 position.Value()); |
1528 | 1528 |
1529 if (current->HasAllocatedSpillOperand()) { | 1529 if (current->HasAllocatedSpillOperand()) { |
1530 TraceAlloc("Live range %d already has a spill operand\n", current->id()); | 1530 TraceAlloc("Live range %d already has a spill operand\n", current->id()); |
1531 LifetimePosition next_pos = position; | 1531 LifetimePosition next_pos = position; |
1532 if (IsGapAt(next_pos.InstructionIndex())) { | 1532 if (IsGapAt(next_pos.InstructionIndex())) { |
1533 next_pos = next_pos.NextInstruction(); | 1533 next_pos = next_pos.NextInstruction(); |
1534 } | 1534 } |
1535 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos); | 1535 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos); |
1536 // If the range already has a spill operand and it doesn't need a | 1536 // If the range already has a spill operand and it doesn't need a |
1537 // register immediately, split it and spill the first part of the range. | 1537 // register immediately, split it and spill the first part of the range. |
1538 if (pos == NULL) { | 1538 if (pos == NULL) { |
1539 Spill(current); | 1539 Spill(current); |
1540 continue; | 1540 continue; |
1541 } else if (pos->pos().Value() > | 1541 } else if (pos->pos().Value() > |
1542 current->Start().NextInstruction().Value()) { | 1542 current->Start().NextInstruction().Value()) { |
1543 // Do not spill live range eagerly if use position that can benefit from | 1543 // Do not spill live range eagerly if use position that can benefit from |
1544 // the register is too close to the start of live range. | 1544 // the register is too close to the start of live range. |
1545 SpillBetween(current, current->Start(), pos->pos()); | 1545 SpillBetween(current, current->Start(), pos->pos()); |
1546 if (!AllocationOk()) return; | 1546 if (!AllocationOk()) return; |
1547 ASSERT(UnhandledIsSorted()); | 1547 DCHECK(UnhandledIsSorted()); |
1548 continue; | 1548 continue; |
1549 } | 1549 } |
1550 } | 1550 } |
1551 | 1551 |
1552 for (int i = 0; i < active_live_ranges_.length(); ++i) { | 1552 for (int i = 0; i < active_live_ranges_.length(); ++i) { |
1553 LiveRange* cur_active = active_live_ranges_.at(i); | 1553 LiveRange* cur_active = active_live_ranges_.at(i); |
1554 if (cur_active->End().Value() <= position.Value()) { | 1554 if (cur_active->End().Value() <= position.Value()) { |
1555 ActiveToHandled(cur_active); | 1555 ActiveToHandled(cur_active); |
1556 --i; // The live range was removed from the list of active live ranges. | 1556 --i; // The live range was removed from the list of active live ranges. |
1557 } else if (!cur_active->Covers(position)) { | 1557 } else if (!cur_active->Covers(position)) { |
1558 ActiveToInactive(cur_active); | 1558 ActiveToInactive(cur_active); |
1559 --i; // The live range was removed from the list of active live ranges. | 1559 --i; // The live range was removed from the list of active live ranges. |
1560 } | 1560 } |
1561 } | 1561 } |
1562 | 1562 |
1563 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { | 1563 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { |
1564 LiveRange* cur_inactive = inactive_live_ranges_.at(i); | 1564 LiveRange* cur_inactive = inactive_live_ranges_.at(i); |
1565 if (cur_inactive->End().Value() <= position.Value()) { | 1565 if (cur_inactive->End().Value() <= position.Value()) { |
1566 InactiveToHandled(cur_inactive); | 1566 InactiveToHandled(cur_inactive); |
1567 --i; // Live range was removed from the list of inactive live ranges. | 1567 --i; // Live range was removed from the list of inactive live ranges. |
1568 } else if (cur_inactive->Covers(position)) { | 1568 } else if (cur_inactive->Covers(position)) { |
1569 InactiveToActive(cur_inactive); | 1569 InactiveToActive(cur_inactive); |
1570 --i; // Live range was removed from the list of inactive live ranges. | 1570 --i; // Live range was removed from the list of inactive live ranges. |
1571 } | 1571 } |
1572 } | 1572 } |
1573 | 1573 |
1574 ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled()); | 1574 DCHECK(!current->HasRegisterAssigned() && !current->IsSpilled()); |
1575 | 1575 |
1576 bool result = TryAllocateFreeReg(current); | 1576 bool result = TryAllocateFreeReg(current); |
1577 if (!AllocationOk()) return; | 1577 if (!AllocationOk()) return; |
1578 | 1578 |
1579 if (!result) AllocateBlockedReg(current); | 1579 if (!result) AllocateBlockedReg(current); |
1580 if (!AllocationOk()) return; | 1580 if (!AllocationOk()) return; |
1581 | 1581 |
1582 if (current->HasRegisterAssigned()) { | 1582 if (current->HasRegisterAssigned()) { |
1583 AddToActive(current); | 1583 AddToActive(current); |
1584 } | 1584 } |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1638 | 1638 |
1639 | 1639 |
1640 void LAllocator::AddToInactive(LiveRange* range) { | 1640 void LAllocator::AddToInactive(LiveRange* range) { |
1641 TraceAlloc("Add live range %d to inactive\n", range->id()); | 1641 TraceAlloc("Add live range %d to inactive\n", range->id()); |
1642 inactive_live_ranges_.Add(range, zone()); | 1642 inactive_live_ranges_.Add(range, zone()); |
1643 } | 1643 } |
1644 | 1644 |
1645 | 1645 |
1646 void LAllocator::AddToUnhandledSorted(LiveRange* range) { | 1646 void LAllocator::AddToUnhandledSorted(LiveRange* range) { |
1647 if (range == NULL || range->IsEmpty()) return; | 1647 if (range == NULL || range->IsEmpty()) return; |
1648 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled()); | 1648 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); |
1649 ASSERT(allocation_finger_.Value() <= range->Start().Value()); | 1649 DCHECK(allocation_finger_.Value() <= range->Start().Value()); |
1650 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) { | 1650 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) { |
1651 LiveRange* cur_range = unhandled_live_ranges_.at(i); | 1651 LiveRange* cur_range = unhandled_live_ranges_.at(i); |
1652 if (range->ShouldBeAllocatedBefore(cur_range)) { | 1652 if (range->ShouldBeAllocatedBefore(cur_range)) { |
1653 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1); | 1653 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1); |
1654 unhandled_live_ranges_.InsertAt(i + 1, range, zone()); | 1654 unhandled_live_ranges_.InsertAt(i + 1, range, zone()); |
1655 ASSERT(UnhandledIsSorted()); | 1655 DCHECK(UnhandledIsSorted()); |
1656 return; | 1656 return; |
1657 } | 1657 } |
1658 } | 1658 } |
1659 TraceAlloc("Add live range %d to unhandled at start\n", range->id()); | 1659 TraceAlloc("Add live range %d to unhandled at start\n", range->id()); |
1660 unhandled_live_ranges_.InsertAt(0, range, zone()); | 1660 unhandled_live_ranges_.InsertAt(0, range, zone()); |
1661 ASSERT(UnhandledIsSorted()); | 1661 DCHECK(UnhandledIsSorted()); |
1662 } | 1662 } |
1663 | 1663 |
1664 | 1664 |
1665 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) { | 1665 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) { |
1666 if (range == NULL || range->IsEmpty()) return; | 1666 if (range == NULL || range->IsEmpty()) return; |
1667 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled()); | 1667 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); |
1668 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id()); | 1668 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id()); |
1669 unhandled_live_ranges_.Add(range, zone()); | 1669 unhandled_live_ranges_.Add(range, zone()); |
1670 } | 1670 } |
1671 | 1671 |
1672 | 1672 |
1673 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) { | 1673 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) { |
1674 ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) || | 1674 DCHECK(!(*a)->ShouldBeAllocatedBefore(*b) || |
1675 !(*b)->ShouldBeAllocatedBefore(*a)); | 1675 !(*b)->ShouldBeAllocatedBefore(*a)); |
1676 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1; | 1676 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1; |
1677 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1; | 1677 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1; |
1678 return (*a)->id() - (*b)->id(); | 1678 return (*a)->id() - (*b)->id(); |
1679 } | 1679 } |
1680 | 1680 |
1681 | 1681 |
1682 // Sort the unhandled live ranges so that the ranges to be processed first are | 1682 // Sort the unhandled live ranges so that the ranges to be processed first are |
1683 // at the end of the array list. This is convenient for the register allocation | 1683 // at the end of the array list. This is convenient for the register allocation |
1684 // algorithm because it is efficient to remove elements from the end. | 1684 // algorithm because it is efficient to remove elements from the end. |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1718 range->TopLevel()->Start().Value()) { | 1718 range->TopLevel()->Start().Value()) { |
1719 return NULL; | 1719 return NULL; |
1720 } | 1720 } |
1721 LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand(); | 1721 LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand(); |
1722 reusable_slots_.Remove(0); | 1722 reusable_slots_.Remove(0); |
1723 return result; | 1723 return result; |
1724 } | 1724 } |
1725 | 1725 |
1726 | 1726 |
1727 void LAllocator::ActiveToHandled(LiveRange* range) { | 1727 void LAllocator::ActiveToHandled(LiveRange* range) { |
1728 ASSERT(active_live_ranges_.Contains(range)); | 1728 DCHECK(active_live_ranges_.Contains(range)); |
1729 active_live_ranges_.RemoveElement(range); | 1729 active_live_ranges_.RemoveElement(range); |
1730 TraceAlloc("Moving live range %d from active to handled\n", range->id()); | 1730 TraceAlloc("Moving live range %d from active to handled\n", range->id()); |
1731 FreeSpillSlot(range); | 1731 FreeSpillSlot(range); |
1732 } | 1732 } |
1733 | 1733 |
1734 | 1734 |
1735 void LAllocator::ActiveToInactive(LiveRange* range) { | 1735 void LAllocator::ActiveToInactive(LiveRange* range) { |
1736 ASSERT(active_live_ranges_.Contains(range)); | 1736 DCHECK(active_live_ranges_.Contains(range)); |
1737 active_live_ranges_.RemoveElement(range); | 1737 active_live_ranges_.RemoveElement(range); |
1738 inactive_live_ranges_.Add(range, zone()); | 1738 inactive_live_ranges_.Add(range, zone()); |
1739 TraceAlloc("Moving live range %d from active to inactive\n", range->id()); | 1739 TraceAlloc("Moving live range %d from active to inactive\n", range->id()); |
1740 } | 1740 } |
1741 | 1741 |
1742 | 1742 |
1743 void LAllocator::InactiveToHandled(LiveRange* range) { | 1743 void LAllocator::InactiveToHandled(LiveRange* range) { |
1744 ASSERT(inactive_live_ranges_.Contains(range)); | 1744 DCHECK(inactive_live_ranges_.Contains(range)); |
1745 inactive_live_ranges_.RemoveElement(range); | 1745 inactive_live_ranges_.RemoveElement(range); |
1746 TraceAlloc("Moving live range %d from inactive to handled\n", range->id()); | 1746 TraceAlloc("Moving live range %d from inactive to handled\n", range->id()); |
1747 FreeSpillSlot(range); | 1747 FreeSpillSlot(range); |
1748 } | 1748 } |
1749 | 1749 |
1750 | 1750 |
1751 void LAllocator::InactiveToActive(LiveRange* range) { | 1751 void LAllocator::InactiveToActive(LiveRange* range) { |
1752 ASSERT(inactive_live_ranges_.Contains(range)); | 1752 DCHECK(inactive_live_ranges_.Contains(range)); |
1753 inactive_live_ranges_.RemoveElement(range); | 1753 inactive_live_ranges_.RemoveElement(range); |
1754 active_live_ranges_.Add(range, zone()); | 1754 active_live_ranges_.Add(range, zone()); |
1755 TraceAlloc("Moving live range %d from inactive to active\n", range->id()); | 1755 TraceAlloc("Moving live range %d from inactive to active\n", range->id()); |
1756 } | 1756 } |
1757 | 1757 |
1758 | 1758 |
1759 // TryAllocateFreeReg and AllocateBlockedReg assume this | 1759 // TryAllocateFreeReg and AllocateBlockedReg assume this |
1760 // when allocating local arrays. | 1760 // when allocating local arrays. |
1761 STATIC_ASSERT(DoubleRegister::kMaxNumAllocatableRegisters >= | 1761 STATIC_ASSERT(DoubleRegister::kMaxNumAllocatableRegisters >= |
1762 Register::kMaxNumAllocatableRegisters); | 1762 Register::kMaxNumAllocatableRegisters); |
1763 | 1763 |
1764 | 1764 |
1765 bool LAllocator::TryAllocateFreeReg(LiveRange* current) { | 1765 bool LAllocator::TryAllocateFreeReg(LiveRange* current) { |
1766 LifetimePosition free_until_pos[DoubleRegister::kMaxNumAllocatableRegisters]; | 1766 LifetimePosition free_until_pos[DoubleRegister::kMaxNumAllocatableRegisters]; |
1767 | 1767 |
1768 for (int i = 0; i < num_registers_; i++) { | 1768 for (int i = 0; i < num_registers_; i++) { |
1769 free_until_pos[i] = LifetimePosition::MaxPosition(); | 1769 free_until_pos[i] = LifetimePosition::MaxPosition(); |
1770 } | 1770 } |
1771 | 1771 |
1772 for (int i = 0; i < active_live_ranges_.length(); ++i) { | 1772 for (int i = 0; i < active_live_ranges_.length(); ++i) { |
1773 LiveRange* cur_active = active_live_ranges_.at(i); | 1773 LiveRange* cur_active = active_live_ranges_.at(i); |
1774 free_until_pos[cur_active->assigned_register()] = | 1774 free_until_pos[cur_active->assigned_register()] = |
1775 LifetimePosition::FromInstructionIndex(0); | 1775 LifetimePosition::FromInstructionIndex(0); |
1776 } | 1776 } |
1777 | 1777 |
1778 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { | 1778 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { |
1779 LiveRange* cur_inactive = inactive_live_ranges_.at(i); | 1779 LiveRange* cur_inactive = inactive_live_ranges_.at(i); |
1780 ASSERT(cur_inactive->End().Value() > current->Start().Value()); | 1780 DCHECK(cur_inactive->End().Value() > current->Start().Value()); |
1781 LifetimePosition next_intersection = | 1781 LifetimePosition next_intersection = |
1782 cur_inactive->FirstIntersection(current); | 1782 cur_inactive->FirstIntersection(current); |
1783 if (!next_intersection.IsValid()) continue; | 1783 if (!next_intersection.IsValid()) continue; |
1784 int cur_reg = cur_inactive->assigned_register(); | 1784 int cur_reg = cur_inactive->assigned_register(); |
1785 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); | 1785 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); |
1786 } | 1786 } |
1787 | 1787 |
1788 LOperand* hint = current->FirstHint(); | 1788 LOperand* hint = current->FirstHint(); |
1789 if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) { | 1789 if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) { |
1790 int register_index = hint->index(); | 1790 int register_index = hint->index(); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1824 // Register reg is available at the range start but becomes blocked before | 1824 // Register reg is available at the range start but becomes blocked before |
1825 // the range end. Split current at position where it becomes blocked. | 1825 // the range end. Split current at position where it becomes blocked. |
1826 LiveRange* tail = SplitRangeAt(current, pos); | 1826 LiveRange* tail = SplitRangeAt(current, pos); |
1827 if (!AllocationOk()) return false; | 1827 if (!AllocationOk()) return false; |
1828 AddToUnhandledSorted(tail); | 1828 AddToUnhandledSorted(tail); |
1829 } | 1829 } |
1830 | 1830 |
1831 | 1831 |
1832 // Register reg is available at the range start and is free until | 1832 // Register reg is available at the range start and is free until |
1833 // the range end. | 1833 // the range end. |
1834 ASSERT(pos.Value() >= current->End().Value()); | 1834 DCHECK(pos.Value() >= current->End().Value()); |
1835 TraceAlloc("Assigning free reg %s to live range %d\n", | 1835 TraceAlloc("Assigning free reg %s to live range %d\n", |
1836 RegisterName(reg), | 1836 RegisterName(reg), |
1837 current->id()); | 1837 current->id()); |
1838 SetLiveRangeAssignedRegister(current, reg); | 1838 SetLiveRangeAssignedRegister(current, reg); |
1839 | 1839 |
1840 return true; | 1840 return true; |
1841 } | 1841 } |
1842 | 1842 |
1843 | 1843 |
1844 void LAllocator::AllocateBlockedReg(LiveRange* current) { | 1844 void LAllocator::AllocateBlockedReg(LiveRange* current) { |
(...skipping 25 matching lines...) Expand all Loading... |
1870 if (next_use == NULL) { | 1870 if (next_use == NULL) { |
1871 use_pos[cur_reg] = range->End(); | 1871 use_pos[cur_reg] = range->End(); |
1872 } else { | 1872 } else { |
1873 use_pos[cur_reg] = next_use->pos(); | 1873 use_pos[cur_reg] = next_use->pos(); |
1874 } | 1874 } |
1875 } | 1875 } |
1876 } | 1876 } |
1877 | 1877 |
1878 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { | 1878 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { |
1879 LiveRange* range = inactive_live_ranges_.at(i); | 1879 LiveRange* range = inactive_live_ranges_.at(i); |
1880 ASSERT(range->End().Value() > current->Start().Value()); | 1880 DCHECK(range->End().Value() > current->Start().Value()); |
1881 LifetimePosition next_intersection = range->FirstIntersection(current); | 1881 LifetimePosition next_intersection = range->FirstIntersection(current); |
1882 if (!next_intersection.IsValid()) continue; | 1882 if (!next_intersection.IsValid()) continue; |
1883 int cur_reg = range->assigned_register(); | 1883 int cur_reg = range->assigned_register(); |
1884 if (range->IsFixed()) { | 1884 if (range->IsFixed()) { |
1885 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); | 1885 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); |
1886 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); | 1886 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); |
1887 } else { | 1887 } else { |
1888 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection); | 1888 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection); |
1889 } | 1889 } |
1890 } | 1890 } |
(...skipping 18 matching lines...) Expand all Loading... |
1909 // Register becomes blocked before the current range end. Split before that | 1909 // Register becomes blocked before the current range end. Split before that |
1910 // position. | 1910 // position. |
1911 LiveRange* tail = SplitBetween(current, | 1911 LiveRange* tail = SplitBetween(current, |
1912 current->Start(), | 1912 current->Start(), |
1913 block_pos[reg].InstructionStart()); | 1913 block_pos[reg].InstructionStart()); |
1914 if (!AllocationOk()) return; | 1914 if (!AllocationOk()) return; |
1915 AddToUnhandledSorted(tail); | 1915 AddToUnhandledSorted(tail); |
1916 } | 1916 } |
1917 | 1917 |
1918 // Register reg is not blocked for the whole range. | 1918 // Register reg is not blocked for the whole range. |
1919 ASSERT(block_pos[reg].Value() >= current->End().Value()); | 1919 DCHECK(block_pos[reg].Value() >= current->End().Value()); |
1920 TraceAlloc("Assigning blocked reg %s to live range %d\n", | 1920 TraceAlloc("Assigning blocked reg %s to live range %d\n", |
1921 RegisterName(reg), | 1921 RegisterName(reg), |
1922 current->id()); | 1922 current->id()); |
1923 SetLiveRangeAssignedRegister(current, reg); | 1923 SetLiveRangeAssignedRegister(current, reg); |
1924 | 1924 |
1925 // This register was not free. Thus we need to find and spill | 1925 // This register was not free. Thus we need to find and spill |
1926 // parts of active and inactive live regions that use the same register | 1926 // parts of active and inactive live regions that use the same register |
1927 // at the same lifetime positions as current. | 1927 // at the same lifetime positions as current. |
1928 SplitAndSpillIntersecting(current); | 1928 SplitAndSpillIntersecting(current); |
1929 } | 1929 } |
(...skipping 26 matching lines...) Expand all Loading... |
1956 | 1956 |
1957 // Try hoisting out to an outer loop. | 1957 // Try hoisting out to an outer loop. |
1958 loop_header = loop_header->parent_loop_header(); | 1958 loop_header = loop_header->parent_loop_header(); |
1959 } | 1959 } |
1960 | 1960 |
1961 return pos; | 1961 return pos; |
1962 } | 1962 } |
1963 | 1963 |
1964 | 1964 |
1965 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) { | 1965 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) { |
1966 ASSERT(current->HasRegisterAssigned()); | 1966 DCHECK(current->HasRegisterAssigned()); |
1967 int reg = current->assigned_register(); | 1967 int reg = current->assigned_register(); |
1968 LifetimePosition split_pos = current->Start(); | 1968 LifetimePosition split_pos = current->Start(); |
1969 for (int i = 0; i < active_live_ranges_.length(); ++i) { | 1969 for (int i = 0; i < active_live_ranges_.length(); ++i) { |
1970 LiveRange* range = active_live_ranges_[i]; | 1970 LiveRange* range = active_live_ranges_[i]; |
1971 if (range->assigned_register() == reg) { | 1971 if (range->assigned_register() == reg) { |
1972 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); | 1972 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); |
1973 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos); | 1973 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos); |
1974 if (next_pos == NULL) { | 1974 if (next_pos == NULL) { |
1975 SpillAfter(range, spill_pos); | 1975 SpillAfter(range, spill_pos); |
1976 } else { | 1976 } else { |
1977 // When spilling between spill_pos and next_pos ensure that the range | 1977 // When spilling between spill_pos and next_pos ensure that the range |
1978 // remains spilled at least until the start of the current live range. | 1978 // remains spilled at least until the start of the current live range. |
1979 // This guarantees that we will not introduce new unhandled ranges that | 1979 // This guarantees that we will not introduce new unhandled ranges that |
1980 // start before the current range as this violates allocation invariant | 1980 // start before the current range as this violates allocation invariant |
1981 // and will lead to an inconsistent state of active and inactive | 1981 // and will lead to an inconsistent state of active and inactive |
1982 // live-ranges: ranges are allocated in order of their start positions, | 1982 // live-ranges: ranges are allocated in order of their start positions, |
1983 // ranges are retired from active/inactive when the start of the | 1983 // ranges are retired from active/inactive when the start of the |
1984 // current live-range is larger than their end. | 1984 // current live-range is larger than their end. |
1985 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos()); | 1985 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos()); |
1986 } | 1986 } |
1987 if (!AllocationOk()) return; | 1987 if (!AllocationOk()) return; |
1988 ActiveToHandled(range); | 1988 ActiveToHandled(range); |
1989 --i; | 1989 --i; |
1990 } | 1990 } |
1991 } | 1991 } |
1992 | 1992 |
1993 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { | 1993 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { |
1994 LiveRange* range = inactive_live_ranges_[i]; | 1994 LiveRange* range = inactive_live_ranges_[i]; |
1995 ASSERT(range->End().Value() > current->Start().Value()); | 1995 DCHECK(range->End().Value() > current->Start().Value()); |
1996 if (range->assigned_register() == reg && !range->IsFixed()) { | 1996 if (range->assigned_register() == reg && !range->IsFixed()) { |
1997 LifetimePosition next_intersection = range->FirstIntersection(current); | 1997 LifetimePosition next_intersection = range->FirstIntersection(current); |
1998 if (next_intersection.IsValid()) { | 1998 if (next_intersection.IsValid()) { |
1999 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); | 1999 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); |
2000 if (next_pos == NULL) { | 2000 if (next_pos == NULL) { |
2001 SpillAfter(range, split_pos); | 2001 SpillAfter(range, split_pos); |
2002 } else { | 2002 } else { |
2003 next_intersection = Min(next_intersection, next_pos->pos()); | 2003 next_intersection = Min(next_intersection, next_pos->pos()); |
2004 SpillBetween(range, split_pos, next_intersection); | 2004 SpillBetween(range, split_pos, next_intersection); |
2005 } | 2005 } |
2006 if (!AllocationOk()) return; | 2006 if (!AllocationOk()) return; |
2007 InactiveToHandled(range); | 2007 InactiveToHandled(range); |
2008 --i; | 2008 --i; |
2009 } | 2009 } |
2010 } | 2010 } |
2011 } | 2011 } |
2012 } | 2012 } |
2013 | 2013 |
2014 | 2014 |
2015 bool LAllocator::IsBlockBoundary(LifetimePosition pos) { | 2015 bool LAllocator::IsBlockBoundary(LifetimePosition pos) { |
2016 return pos.IsInstructionStart() && | 2016 return pos.IsInstructionStart() && |
2017 InstructionAt(pos.InstructionIndex())->IsLabel(); | 2017 InstructionAt(pos.InstructionIndex())->IsLabel(); |
2018 } | 2018 } |
2019 | 2019 |
2020 | 2020 |
2021 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) { | 2021 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) { |
2022 ASSERT(!range->IsFixed()); | 2022 DCHECK(!range->IsFixed()); |
2023 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value()); | 2023 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value()); |
2024 | 2024 |
2025 if (pos.Value() <= range->Start().Value()) return range; | 2025 if (pos.Value() <= range->Start().Value()) return range; |
2026 | 2026 |
2027 // We can't properly connect liveranges if split occured at the end | 2027 // We can't properly connect liveranges if split occured at the end |
2028 // of control instruction. | 2028 // of control instruction. |
2029 ASSERT(pos.IsInstructionStart() || | 2029 DCHECK(pos.IsInstructionStart() || |
2030 !chunk_->instructions()->at(pos.InstructionIndex())->IsControl()); | 2030 !chunk_->instructions()->at(pos.InstructionIndex())->IsControl()); |
2031 | 2031 |
2032 int vreg = GetVirtualRegister(); | 2032 int vreg = GetVirtualRegister(); |
2033 if (!AllocationOk()) return NULL; | 2033 if (!AllocationOk()) return NULL; |
2034 LiveRange* result = LiveRangeFor(vreg); | 2034 LiveRange* result = LiveRangeFor(vreg); |
2035 range->SplitAt(pos, result, zone()); | 2035 range->SplitAt(pos, result, zone()); |
2036 return result; | 2036 return result; |
2037 } | 2037 } |
2038 | 2038 |
2039 | 2039 |
2040 LiveRange* LAllocator::SplitBetween(LiveRange* range, | 2040 LiveRange* LAllocator::SplitBetween(LiveRange* range, |
2041 LifetimePosition start, | 2041 LifetimePosition start, |
2042 LifetimePosition end) { | 2042 LifetimePosition end) { |
2043 ASSERT(!range->IsFixed()); | 2043 DCHECK(!range->IsFixed()); |
2044 TraceAlloc("Splitting live range %d in position between [%d, %d]\n", | 2044 TraceAlloc("Splitting live range %d in position between [%d, %d]\n", |
2045 range->id(), | 2045 range->id(), |
2046 start.Value(), | 2046 start.Value(), |
2047 end.Value()); | 2047 end.Value()); |
2048 | 2048 |
2049 LifetimePosition split_pos = FindOptimalSplitPos(start, end); | 2049 LifetimePosition split_pos = FindOptimalSplitPos(start, end); |
2050 ASSERT(split_pos.Value() >= start.Value()); | 2050 DCHECK(split_pos.Value() >= start.Value()); |
2051 return SplitRangeAt(range, split_pos); | 2051 return SplitRangeAt(range, split_pos); |
2052 } | 2052 } |
2053 | 2053 |
2054 | 2054 |
2055 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start, | 2055 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start, |
2056 LifetimePosition end) { | 2056 LifetimePosition end) { |
2057 int start_instr = start.InstructionIndex(); | 2057 int start_instr = start.InstructionIndex(); |
2058 int end_instr = end.InstructionIndex(); | 2058 int end_instr = end.InstructionIndex(); |
2059 ASSERT(start_instr <= end_instr); | 2059 DCHECK(start_instr <= end_instr); |
2060 | 2060 |
2061 // We have no choice | 2061 // We have no choice |
2062 if (start_instr == end_instr) return end; | 2062 if (start_instr == end_instr) return end; |
2063 | 2063 |
2064 HBasicBlock* start_block = GetBlock(start); | 2064 HBasicBlock* start_block = GetBlock(start); |
2065 HBasicBlock* end_block = GetBlock(end); | 2065 HBasicBlock* end_block = GetBlock(end); |
2066 | 2066 |
2067 if (end_block == start_block) { | 2067 if (end_block == start_block) { |
2068 // The interval is split in the same basic block. Split at the latest | 2068 // The interval is split in the same basic block. Split at the latest |
2069 // possible position. | 2069 // possible position. |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2111 if (second_part->Start().Value() < end.Value()) { | 2111 if (second_part->Start().Value() < end.Value()) { |
2112 // The split result intersects with [start, end[. | 2112 // The split result intersects with [start, end[. |
2113 // Split it at position between ]start+1, end[, spill the middle part | 2113 // Split it at position between ]start+1, end[, spill the middle part |
2114 // and put the rest to unhandled. | 2114 // and put the rest to unhandled. |
2115 LiveRange* third_part = SplitBetween( | 2115 LiveRange* third_part = SplitBetween( |
2116 second_part, | 2116 second_part, |
2117 Max(second_part->Start().InstructionEnd(), until), | 2117 Max(second_part->Start().InstructionEnd(), until), |
2118 end.PrevInstruction().InstructionEnd()); | 2118 end.PrevInstruction().InstructionEnd()); |
2119 if (!AllocationOk()) return; | 2119 if (!AllocationOk()) return; |
2120 | 2120 |
2121 ASSERT(third_part != second_part); | 2121 DCHECK(third_part != second_part); |
2122 | 2122 |
2123 Spill(second_part); | 2123 Spill(second_part); |
2124 AddToUnhandledSorted(third_part); | 2124 AddToUnhandledSorted(third_part); |
2125 } else { | 2125 } else { |
2126 // The split result does not intersect with [start, end[. | 2126 // The split result does not intersect with [start, end[. |
2127 // Nothing to spill. Just put it to unhandled as whole. | 2127 // Nothing to spill. Just put it to unhandled as whole. |
2128 AddToUnhandledSorted(second_part); | 2128 AddToUnhandledSorted(second_part); |
2129 } | 2129 } |
2130 } | 2130 } |
2131 | 2131 |
2132 | 2132 |
2133 void LAllocator::Spill(LiveRange* range) { | 2133 void LAllocator::Spill(LiveRange* range) { |
2134 ASSERT(!range->IsSpilled()); | 2134 DCHECK(!range->IsSpilled()); |
2135 TraceAlloc("Spilling live range %d\n", range->id()); | 2135 TraceAlloc("Spilling live range %d\n", range->id()); |
2136 LiveRange* first = range->TopLevel(); | 2136 LiveRange* first = range->TopLevel(); |
2137 | 2137 |
2138 if (!first->HasAllocatedSpillOperand()) { | 2138 if (!first->HasAllocatedSpillOperand()) { |
2139 LOperand* op = TryReuseSpillSlot(range); | 2139 LOperand* op = TryReuseSpillSlot(range); |
2140 if (op == NULL) op = chunk_->GetNextSpillSlot(range->Kind()); | 2140 if (op == NULL) op = chunk_->GetNextSpillSlot(range->Kind()); |
2141 first->SetSpillOperand(op); | 2141 first->SetSpillOperand(op); |
2142 } | 2142 } |
2143 range->MakeSpilled(chunk()->zone()); | 2143 range->MakeSpilled(chunk()->zone()); |
2144 } | 2144 } |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2185 isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_); | 2185 isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_); |
2186 } | 2186 } |
2187 | 2187 |
2188 #ifdef DEBUG | 2188 #ifdef DEBUG |
2189 if (allocator_ != NULL) allocator_->Verify(); | 2189 if (allocator_ != NULL) allocator_->Verify(); |
2190 #endif | 2190 #endif |
2191 } | 2191 } |
2192 | 2192 |
2193 | 2193 |
2194 } } // namespace v8::internal | 2194 } } // namespace v8::internal |
OLD | NEW |