OLD | NEW |
---|---|
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
54 } | 54 } |
55 | 55 |
56 DEFINE_OPERAND_CACHE(LConstantOperand, CONSTANT_OPERAND) | 56 DEFINE_OPERAND_CACHE(LConstantOperand, CONSTANT_OPERAND) |
57 DEFINE_OPERAND_CACHE(LStackSlot, STACK_SLOT) | 57 DEFINE_OPERAND_CACHE(LStackSlot, STACK_SLOT) |
58 DEFINE_OPERAND_CACHE(LDoubleStackSlot, DOUBLE_STACK_SLOT) | 58 DEFINE_OPERAND_CACHE(LDoubleStackSlot, DOUBLE_STACK_SLOT) |
59 DEFINE_OPERAND_CACHE(LRegister, REGISTER) | 59 DEFINE_OPERAND_CACHE(LRegister, REGISTER) |
60 DEFINE_OPERAND_CACHE(LDoubleRegister, DOUBLE_REGISTER) | 60 DEFINE_OPERAND_CACHE(LDoubleRegister, DOUBLE_REGISTER) |
61 | 61 |
62 #undef DEFINE_OPERAND_CACHE | 62 #undef DEFINE_OPERAND_CACHE |
63 | 63 |
64 | |
65 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { | 64 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { |
66 return a.Value() < b.Value() ? a : b; | 65 return a.Value() < b.Value() ? a : b; |
67 } | 66 } |
68 | 67 |
69 | 68 |
70 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { | 69 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { |
71 return a.Value() > b.Value() ? a : b; | 70 return a.Value() > b.Value() ? a : b; |
72 } | 71 } |
73 | 72 |
74 | 73 |
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
240 UsePosition* pos = first_pos_; | 239 UsePosition* pos = first_pos_; |
241 while (pos != NULL && !pos->HasHint()) pos = pos->next(); | 240 while (pos != NULL && !pos->HasHint()) pos = pos->next(); |
242 return pos; | 241 return pos; |
243 } | 242 } |
244 | 243 |
245 | 244 |
246 LOperand* LiveRange::CreateAssignedOperand() { | 245 LOperand* LiveRange::CreateAssignedOperand() { |
247 LOperand* op = NULL; | 246 LOperand* op = NULL; |
248 if (HasRegisterAssigned()) { | 247 if (HasRegisterAssigned()) { |
249 ASSERT(!IsSpilled()); | 248 ASSERT(!IsSpilled()); |
250 if (assigned_double_) { | 249 if (assigned_register_class_ == DOUBLE_REGISTERS) { |
Kevin Millikin (Chromium)
2010/12/10 07:39:19
Could use your IsDouble() predicate here.
Vyacheslav Egorov (Chromium)
2010/12/10 14:18:27
Done.
| |
251 op = LDoubleRegister::Create(assigned_register()); | 250 op = LDoubleRegister::Create(assigned_register()); |
252 } else { | 251 } else { |
253 op = LRegister::Create(assigned_register()); | 252 op = LRegister::Create(assigned_register()); |
254 } | 253 } |
255 } else if (IsSpilled()) { | 254 } else if (IsSpilled()) { |
256 ASSERT(!HasRegisterAssigned()); | 255 ASSERT(!HasRegisterAssigned()); |
257 op = TopLevel()->GetSpillOperand(); | 256 op = TopLevel()->GetSpillOperand(); |
258 ASSERT(!op->IsUnallocated()); | 257 ASSERT(!op->IsUnallocated()); |
259 } else { | 258 } else { |
260 LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE); | 259 LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE); |
(...skipping 22 matching lines...) Expand all Loading... | |
283 LifetimePosition start = | 282 LifetimePosition start = |
284 current_interval_ == NULL ? LifetimePosition::Invalid() | 283 current_interval_ == NULL ? LifetimePosition::Invalid() |
285 : current_interval_->start(); | 284 : current_interval_->start(); |
286 if (to_start_of->start().Value() > start.Value()) { | 285 if (to_start_of->start().Value() > start.Value()) { |
287 current_interval_ = to_start_of; | 286 current_interval_ = to_start_of; |
288 } | 287 } |
289 } | 288 } |
290 | 289 |
291 | 290 |
292 void LiveRange::SplitAt(LifetimePosition position, LiveRange* result) { | 291 void LiveRange::SplitAt(LifetimePosition position, LiveRange* result) { |
293 ASSERT(Start().Value() <= position.Value()); | 292 ASSERT(Start().Value() < position.Value()); |
294 ASSERT(result->IsEmpty()); | 293 ASSERT(result->IsEmpty()); |
295 // Find the last interval that ends before the position. If the | 294 // Find the last interval that ends before the position. If the |
296 // position is contained in one of the intervals in the chain, we | 295 // position is contained in one of the intervals in the chain, we |
297 // split that interval and use the first part. | 296 // split that interval and use the first part. |
298 UseInterval* current = FirstSearchIntervalForPosition(position); | 297 UseInterval* current = FirstSearchIntervalForPosition(position); |
299 while (current != NULL) { | 298 while (current != NULL) { |
300 if (current->Contains(position)) { | 299 if (current->Contains(position)) { |
301 current->SplitAt(position); | 300 current->SplitAt(position); |
302 break; | 301 break; |
303 } | 302 } |
(...skipping 314 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
618 LiveRange* LAllocator::FixedLiveRangeFor(int index) { | 617 LiveRange* LAllocator::FixedLiveRangeFor(int index) { |
619 if (index >= fixed_live_ranges_.length()) { | 618 if (index >= fixed_live_ranges_.length()) { |
620 fixed_live_ranges_.AddBlock(NULL, | 619 fixed_live_ranges_.AddBlock(NULL, |
621 index - fixed_live_ranges_.length() + 1); | 620 index - fixed_live_ranges_.length() + 1); |
622 } | 621 } |
623 | 622 |
624 LiveRange* result = fixed_live_ranges_[index]; | 623 LiveRange* result = fixed_live_ranges_[index]; |
625 if (result == NULL) { | 624 if (result == NULL) { |
626 result = new LiveRange(FixedLiveRangeID(index)); | 625 result = new LiveRange(FixedLiveRangeID(index)); |
627 ASSERT(result->IsFixed()); | 626 ASSERT(result->IsFixed()); |
628 result->set_assigned_register(index, false); | 627 result->set_assigned_register(index, CPU_REGISTERS); |
629 fixed_live_ranges_[index] = result; | 628 fixed_live_ranges_[index] = result; |
630 } | 629 } |
631 return result; | 630 return result; |
632 } | 631 } |
633 | 632 |
634 | 633 |
635 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) { | 634 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) { |
636 if (index >= fixed_double_live_ranges_.length()) { | 635 if (index >= fixed_double_live_ranges_.length()) { |
637 fixed_double_live_ranges_.AddBlock(NULL, | 636 fixed_double_live_ranges_.AddBlock(NULL, |
638 index - fixed_double_live_ranges_.length() + 1); | 637 index - fixed_double_live_ranges_.length() + 1); |
639 } | 638 } |
640 | 639 |
641 LiveRange* result = fixed_double_live_ranges_[index]; | 640 LiveRange* result = fixed_double_live_ranges_[index]; |
642 if (result == NULL) { | 641 if (result == NULL) { |
643 result = new LiveRange(FixedDoubleLiveRangeID(index)); | 642 result = new LiveRange(FixedDoubleLiveRangeID(index)); |
644 ASSERT(result->IsFixed()); | 643 ASSERT(result->IsFixed()); |
645 result->set_assigned_register(index, true); | 644 result->set_assigned_register(index, DOUBLE_REGISTERS); |
646 fixed_double_live_ranges_[index] = result; | 645 fixed_double_live_ranges_[index] = result; |
647 } | 646 } |
648 return result; | 647 return result; |
649 } | 648 } |
650 | 649 |
651 LiveRange* LAllocator::LiveRangeFor(int index) { | 650 LiveRange* LAllocator::LiveRangeFor(int index) { |
652 if (index >= live_ranges_.length()) { | 651 if (index >= live_ranges_.length()) { |
653 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1); | 652 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1); |
654 } | 653 } |
655 LiveRange* result = live_ranges_[index]; | 654 LiveRange* result = live_ranges_[index]; |
(...skipping 595 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1251 PrintF("First use is at %d\n", range->first_pos()->pos().Value()); | 1250 PrintF("First use is at %d\n", range->first_pos()->pos().Value()); |
1252 iterator.Advance(); | 1251 iterator.Advance(); |
1253 } | 1252 } |
1254 ASSERT(!found); | 1253 ASSERT(!found); |
1255 } | 1254 } |
1256 #endif | 1255 #endif |
1257 } | 1256 } |
1258 } | 1257 } |
1259 | 1258 |
1260 | 1259 |
1261 void LAllocator::AllocateGeneralRegisters() { | |
1262 HPhase phase("Allocate general registers", this); | |
1263 num_registers_ = Register::kNumAllocatableRegisters; | |
1264 mode_ = CPU_REGISTERS; | |
1265 AllocateRegisters(); | |
1266 } | |
1267 | |
1268 | |
1269 bool LAllocator::SafePointsAreInOrder() const { | 1260 bool LAllocator::SafePointsAreInOrder() const { |
1270 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps(); | 1261 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps(); |
1271 int safe_point = 0; | 1262 int safe_point = 0; |
1272 for (int i = 0; i < pointer_maps->length(); ++i) { | 1263 for (int i = 0; i < pointer_maps->length(); ++i) { |
1273 LPointerMap* map = pointer_maps->at(i); | 1264 LPointerMap* map = pointer_maps->at(i); |
1274 if (safe_point > map->lithium_position()) return false; | 1265 if (safe_point > map->lithium_position()) return false; |
1275 safe_point = map->lithium_position(); | 1266 safe_point = map->lithium_position(); |
1276 } | 1267 } |
1277 return true; | 1268 return true; |
1278 } | 1269 } |
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1390 instruction->MarkSpilledDoubleRegister(reg_index, spill_operand); | 1381 instruction->MarkSpilledDoubleRegister(reg_index, spill_operand); |
1391 } else { | 1382 } else { |
1392 instruction->MarkSpilledRegister(reg_index, spill_operand); | 1383 instruction->MarkSpilledRegister(reg_index, spill_operand); |
1393 } | 1384 } |
1394 } | 1385 } |
1395 } | 1386 } |
1396 } | 1387 } |
1397 } | 1388 } |
1398 | 1389 |
1399 | 1390 |
1391 void LAllocator::AllocateGeneralRegisters() { | |
1392 HPhase phase("Allocate general registers", this); | |
1393 num_registers_ = Register::kNumAllocatableRegisters; | |
1394 mode_ = CPU_REGISTERS; | |
1395 AllocateRegisters(); | |
1396 } | |
1397 | |
1398 | |
1400 void LAllocator::AllocateDoubleRegisters() { | 1399 void LAllocator::AllocateDoubleRegisters() { |
1401 HPhase phase("Allocate double registers", this); | 1400 HPhase phase("Allocate double registers", this); |
1402 num_registers_ = DoubleRegister::kNumAllocatableRegisters; | 1401 num_registers_ = DoubleRegister::kNumAllocatableRegisters; |
1403 mode_ = XMM_REGISTERS; | 1402 mode_ = DOUBLE_REGISTERS; |
1404 AllocateRegisters(); | 1403 AllocateRegisters(); |
1405 } | 1404 } |
1406 | 1405 |
1407 | 1406 |
1408 void LAllocator::AllocateRegisters() { | 1407 void LAllocator::AllocateRegisters() { |
1409 ASSERT(mode_ != NONE); | 1408 ASSERT(mode_ != NONE); |
1410 reusable_slots_.Clear(); | 1409 reusable_slots_.Clear(); |
1411 | 1410 |
1412 for (int i = 0; i < live_ranges_.length(); ++i) { | 1411 for (int i = 0; i < live_ranges_.length(); ++i) { |
1413 if (live_ranges_[i] != NULL) { | 1412 if (live_ranges_[i] != NULL) { |
1414 if (HasDoubleValue(live_ranges_[i]->id()) == (mode_ == XMM_REGISTERS)) { | 1413 if (RequiredRegisterClass(live_ranges_[i]->id()) == mode_) { |
1415 AddToUnhandledUnsorted(live_ranges_[i]); | 1414 AddToUnhandledUnsorted(live_ranges_[i]); |
1416 } | 1415 } |
1417 } | 1416 } |
1418 } | 1417 } |
1419 SortUnhandled(); | 1418 SortUnhandled(); |
1420 ASSERT(UnhandledIsSorted()); | 1419 ASSERT(UnhandledIsSorted()); |
1421 | 1420 |
1422 ASSERT(active_live_ranges_.is_empty()); | 1421 ASSERT(active_live_ranges_.is_empty()); |
1423 ASSERT(inactive_live_ranges_.is_empty()); | 1422 ASSERT(inactive_live_ranges_.is_empty()); |
1424 | 1423 |
1425 if (mode_ == XMM_REGISTERS) { | 1424 if (mode_ == DOUBLE_REGISTERS) { |
1426 for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) { | 1425 for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) { |
1427 LiveRange* current = fixed_double_live_ranges_.at(i); | 1426 LiveRange* current = fixed_double_live_ranges_.at(i); |
1428 if (current != NULL) { | 1427 if (current != NULL) { |
1429 AddToInactive(current); | 1428 AddToInactive(current); |
1430 } | 1429 } |
1431 } | 1430 } |
1432 } else { | 1431 } else { |
1433 for (int i = 0; i < fixed_live_ranges_.length(); ++i) { | 1432 for (int i = 0; i < fixed_live_ranges_.length(); ++i) { |
1434 LiveRange* current = fixed_live_ranges_.at(i); | 1433 LiveRange* current = fixed_live_ranges_.at(i); |
1435 if (current != NULL) { | 1434 if (current != NULL) { |
(...skipping 20 matching lines...) Expand all Loading... | |
1456 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos); | 1455 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos); |
1457 // If the range already has a spill operand and it doesn't need a | 1456 // If the range already has a spill operand and it doesn't need a |
1458 // register immediately, split it and spill the first part of the range. | 1457 // register immediately, split it and spill the first part of the range. |
1459 if (pos == NULL) { | 1458 if (pos == NULL) { |
1460 Spill(current); | 1459 Spill(current); |
1461 continue; | 1460 continue; |
1462 } else if (pos->pos().Value() > | 1461 } else if (pos->pos().Value() > |
1463 current->Start().NextInstruction().Value()) { | 1462 current->Start().NextInstruction().Value()) { |
1464 // Do not spill live range eagerly if use position that can benefit from | 1463 // Do not spill live range eagerly if use position that can benefit from |
1465 // the register is too close to the start of live range. | 1464 // the register is too close to the start of live range. |
1466 LiveRange* part = Split(current, | 1465 SpillBetween(current, current->Start(), pos->pos()); |
1467 current->Start().NextInstruction(), | |
1468 pos->pos()); | |
1469 Spill(current); | |
1470 AddToUnhandledSorted(part); | |
1471 ASSERT(UnhandledIsSorted()); | 1466 ASSERT(UnhandledIsSorted()); |
1472 continue; | 1467 continue; |
1473 } | 1468 } |
1474 } | 1469 } |
1475 | 1470 |
1476 for (int i = 0; i < active_live_ranges_.length(); ++i) { | 1471 for (int i = 0; i < active_live_ranges_.length(); ++i) { |
1477 LiveRange* cur_active = active_live_ranges_.at(i); | 1472 LiveRange* cur_active = active_live_ranges_.at(i); |
1478 if (cur_active->End().Value() <= position.Value()) { | 1473 if (cur_active->End().Value() <= position.Value()) { |
1479 ActiveToHandled(cur_active); | 1474 ActiveToHandled(cur_active); |
1480 --i; // The live range was removed from the list of active live ranges. | 1475 --i; // The live range was removed from the list of active live ranges. |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1537 } | 1532 } |
1538 | 1533 |
1539 | 1534 |
1540 bool LAllocator::HasTaggedValue(int virtual_register) const { | 1535 bool LAllocator::HasTaggedValue(int virtual_register) const { |
1541 HValue* value = graph()->LookupValue(virtual_register); | 1536 HValue* value = graph()->LookupValue(virtual_register); |
1542 if (value == NULL) return false; | 1537 if (value == NULL) return false; |
1543 return value->representation().IsTagged(); | 1538 return value->representation().IsTagged(); |
1544 } | 1539 } |
1545 | 1540 |
1546 | 1541 |
1547 bool LAllocator::HasDoubleValue(int virtual_register) const { | 1542 RegistersClass LAllocator::RequiredRegisterClass(int virtual_register) const { |
1548 HValue* value = graph()->LookupValue(virtual_register); | 1543 HValue* value = graph()->LookupValue(virtual_register); |
1549 if (value == NULL) return false; | 1544 if (value != NULL && value->representation().IsDouble()) { |
1550 return value->representation().IsDouble(); | 1545 return DOUBLE_REGISTERS; |
1546 } | |
1547 return CPU_REGISTERS; | |
1551 } | 1548 } |
1552 | 1549 |
1553 | 1550 |
1554 void LAllocator::MarkAsCall() { | 1551 void LAllocator::MarkAsCall() { |
1555 current_summary()->MarkAsCall(); | 1552 current_summary()->MarkAsCall(); |
1556 } | 1553 } |
1557 | 1554 |
1558 | 1555 |
1559 void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) { | 1556 void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) { |
1560 operand->set_virtual_register(instr->id()); | 1557 operand->set_virtual_register(instr->id()); |
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1721 | 1718 |
1722 | 1719 |
1723 void LAllocator::InactiveToActive(LiveRange* range) { | 1720 void LAllocator::InactiveToActive(LiveRange* range) { |
1724 ASSERT(inactive_live_ranges_.Contains(range)); | 1721 ASSERT(inactive_live_ranges_.Contains(range)); |
1725 inactive_live_ranges_.RemoveElement(range); | 1722 inactive_live_ranges_.RemoveElement(range); |
1726 active_live_ranges_.Add(range); | 1723 active_live_ranges_.Add(range); |
1727 TraceAlloc("Moving live range %d from inactive to active\n", range->id()); | 1724 TraceAlloc("Moving live range %d from inactive to active\n", range->id()); |
1728 } | 1725 } |
1729 | 1726 |
1730 | 1727 |
1728 // TryAllocateFreeReg and AllocateBlockedReg assume this | |
1729 // when allocating local arrays. | |
1730 STATIC_ASSERT(DoubleRegister::kNumAllocatableRegisters >= | |
1731 Register::kNumAllocatableRegisters); | |
1732 | |
1733 | |
1731 bool LAllocator::TryAllocateFreeReg(LiveRange* current) { | 1734 bool LAllocator::TryAllocateFreeReg(LiveRange* current) { |
1732 LifetimePosition max_pos = LifetimePosition::FromInstructionIndex( | 1735 LifetimePosition free_until_pos[DoubleRegister::kNumAllocatableRegisters]; |
1733 chunk_->instructions()->length() + 1); | 1736 |
1734 ASSERT(DoubleRegister::kNumAllocatableRegisters >= | 1737 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) { |
1735 Register::kNumAllocatableRegisters); | 1738 free_until_pos[i] = LifetimePosition::MaxPosition(); |
1736 EmbeddedVector<LifetimePosition, DoubleRegister::kNumAllocatableRegisters> | 1739 } |
1737 free_pos(max_pos); | 1740 |
1738 for (int i = 0; i < active_live_ranges_.length(); ++i) { | 1741 for (int i = 0; i < active_live_ranges_.length(); ++i) { |
1739 LiveRange* cur_active = active_live_ranges_.at(i); | 1742 LiveRange* cur_active = active_live_ranges_.at(i); |
1740 free_pos[cur_active->assigned_register()] = | 1743 free_until_pos[cur_active->assigned_register()] = |
1741 LifetimePosition::FromInstructionIndex(0); | 1744 LifetimePosition::FromInstructionIndex(0); |
1742 } | 1745 } |
1743 | 1746 |
1744 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { | 1747 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { |
1745 LiveRange* cur_inactive = inactive_live_ranges_.at(i); | 1748 LiveRange* cur_inactive = inactive_live_ranges_.at(i); |
1746 ASSERT(cur_inactive->End().Value() > current->Start().Value()); | 1749 ASSERT(cur_inactive->End().Value() > current->Start().Value()); |
1747 LifetimePosition next_intersection = | 1750 LifetimePosition next_intersection = |
1748 cur_inactive->FirstIntersection(current); | 1751 cur_inactive->FirstIntersection(current); |
1749 if (!next_intersection.IsValid()) continue; | 1752 if (!next_intersection.IsValid()) continue; |
1750 int cur_reg = cur_inactive->assigned_register(); | 1753 int cur_reg = cur_inactive->assigned_register(); |
1751 free_pos[cur_reg] = Min(free_pos[cur_reg], next_intersection); | 1754 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); |
1752 } | 1755 } |
1753 | 1756 |
1754 UsePosition* pos = current->FirstPosWithHint(); | 1757 UsePosition* hinted_use = current->FirstPosWithHint(); |
1755 if (pos != NULL) { | 1758 if (hinted_use != NULL) { |
1756 LOperand* hint = pos->hint(); | 1759 LOperand* hint = hinted_use->hint(); |
1757 if (hint->IsRegister() || hint->IsDoubleRegister()) { | 1760 if (hint->IsRegister() || hint->IsDoubleRegister()) { |
1758 int register_index = hint->index(); | 1761 int register_index = hint->index(); |
1759 TraceAlloc("Found reg hint %d for live range %d (free [%d, end %d[)\n", | 1762 TraceAlloc( |
1760 register_index, | 1763 "Found reg hint %d (free until [%d) for live range %d (end %d[).\n", |
1761 current->id(), | 1764 register_index, |
Kevin Millikin (Chromium)
2010/12/10 07:39:19
Maybe it's not helpful here, but there's probably
Vyacheslav Egorov (Chromium)
2010/12/10 14:18:27
Yes, I know. I was just lazy to introduce helper (
| |
1762 free_pos[register_index].Value(), | 1765 free_until_pos[register_index].Value(), |
1763 current->End().Value()); | 1766 current->id(), |
1764 if (free_pos[register_index].Value() >= current->End().Value()) { | 1767 current->End().Value()); |
1768 | |
1769 // Desired register is free until end of current live range. | |
Kevin Millikin (Chromium)
2010/12/10 07:39:19
The desired....until the end...
Vyacheslav Egorov (Chromium)
2010/12/10 14:18:27
Done.
| |
1770 if (free_until_pos[register_index].Value() >= current->End().Value()) { | |
1765 TraceAlloc("Assigning preferred reg %d to live range %d\n", | 1771 TraceAlloc("Assigning preferred reg %d to live range %d\n", |
1766 register_index, | 1772 register_index, |
1767 current->id()); | 1773 current->id()); |
1768 current->set_assigned_register(register_index, mode_ == XMM_REGISTERS); | 1774 current->set_assigned_register(register_index, mode_); |
1769 return true; | 1775 return true; |
1770 } | 1776 } |
1771 } | 1777 } |
1772 } | 1778 } |
1773 | 1779 |
1774 int max_reg = 0; | 1780 // Find register that stays free for the longest time. |
Kevin Millikin (Chromium)
2010/12/10 07:39:19
Find the register
Vyacheslav Egorov (Chromium)
2010/12/10 14:18:27
Done.
| |
1781 int reg = 0; | |
1775 for (int i = 1; i < RegisterCount(); ++i) { | 1782 for (int i = 1; i < RegisterCount(); ++i) { |
1776 if (free_pos[i].Value() > free_pos[max_reg].Value()) { | 1783 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) { |
1777 max_reg = i; | 1784 reg = i; |
1778 } | 1785 } |
1779 } | 1786 } |
1780 | 1787 |
1781 if (free_pos[max_reg].InstructionIndex() == 0) { | 1788 LifetimePosition pos = free_until_pos[reg]; |
1789 | |
1790 if (pos.Value() <= current->Start().Value()) { | |
1791 // All registers are blocked. | |
1782 return false; | 1792 return false; |
1783 } else if (free_pos[max_reg].Value() >= current->End().Value()) { | |
1784 TraceAlloc("Assigning reg %d to live range %d\n", max_reg, current->id()); | |
1785 current->set_assigned_register(max_reg, mode_ == XMM_REGISTERS); | |
1786 } else { | |
1787 // Split the interval before first use position of max_reg and never split | |
1788 // it interval at its start position. | |
1789 LifetimePosition pos = free_pos[max_reg]; | |
1790 if (pos.Value() <= current->Start().Value()) return false; | |
1791 LiveRange* second_range = Split(current, pos); | |
1792 AddToUnhandledSorted(second_range); | |
1793 current->set_assigned_register(max_reg, mode_ == XMM_REGISTERS); | |
1794 } | 1793 } |
1795 | 1794 |
1795 if (pos.Value() < current->End().Value()) { | |
1796 // Register reg is available at the range start but becomes blocked before | |
1797 // the range end. Split current at position where it becomes blocked. | |
1798 LiveRange* tail = SplitAt(current, pos); | |
1799 AddToUnhandledSorted(tail); | |
1800 } | |
1801 | |
1802 | |
1803 // Register reg is available at the range start and is free until | |
1804 // the range end. | |
1805 ASSERT(pos.Value() >= current->End().Value()); | |
1806 TraceAlloc("Assigning reg %d to live range %d\n", reg, current->id()); | |
1807 current->set_assigned_register(reg, mode_); | |
1808 | |
1796 return true; | 1809 return true; |
1797 } | 1810 } |
1798 | 1811 |
1799 | 1812 |
1800 void LAllocator::AllocateBlockedReg(LiveRange* current) { | 1813 void LAllocator::AllocateBlockedReg(LiveRange* current) { |
1801 LifetimePosition max_pos = | 1814 UsePosition* register_use = current->NextRegisterPosition(current->Start()); |
1802 LifetimePosition::FromInstructionIndex( | 1815 if (register_use == NULL) { |
1803 chunk_->instructions()->length() + 1); | 1816 // There is no use in currect live range that requires register. |
Kevin Millikin (Chromium)
2010/12/10 07:39:19
the current live range that requires a register
Vyacheslav Egorov (Chromium)
2010/12/10 14:18:27
Done.
| |
1804 ASSERT(DoubleRegister::kNumAllocatableRegisters >= | 1817 // We can just spill it. |
1805 Register::kNumAllocatableRegisters); | 1818 Spill(current); |
1806 EmbeddedVector<LifetimePosition, DoubleRegister::kNumAllocatableRegisters> | 1819 return; |
1807 use_pos(max_pos); | 1820 } |
1808 EmbeddedVector<LifetimePosition, DoubleRegister::kNumAllocatableRegisters> | 1821 |
1809 block_pos(max_pos); | 1822 |
1823 LifetimePosition use_pos[DoubleRegister::kNumAllocatableRegisters]; | |
1824 LifetimePosition block_pos[DoubleRegister::kNumAllocatableRegisters]; | |
1825 | |
1826 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) { | |
1827 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition(); | |
1828 } | |
1810 | 1829 |
1811 for (int i = 0; i < active_live_ranges_.length(); ++i) { | 1830 for (int i = 0; i < active_live_ranges_.length(); ++i) { |
1812 LiveRange* range = active_live_ranges_[i]; | 1831 LiveRange* range = active_live_ranges_[i]; |
1813 int cur_reg = range->assigned_register(); | 1832 int cur_reg = range->assigned_register(); |
1814 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) { | 1833 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) { |
1815 block_pos[cur_reg] = use_pos[cur_reg] = | 1834 block_pos[cur_reg] = use_pos[cur_reg] = |
1816 LifetimePosition::FromInstructionIndex(0); | 1835 LifetimePosition::FromInstructionIndex(0); |
1817 } else { | 1836 } else { |
1818 UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial( | 1837 UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial( |
1819 current->Start()); | 1838 current->Start()); |
(...skipping 12 matching lines...) Expand all Loading... | |
1832 if (!next_intersection.IsValid()) continue; | 1851 if (!next_intersection.IsValid()) continue; |
1833 int cur_reg = range->assigned_register(); | 1852 int cur_reg = range->assigned_register(); |
1834 if (range->IsFixed()) { | 1853 if (range->IsFixed()) { |
1835 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); | 1854 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); |
1836 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); | 1855 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); |
1837 } else { | 1856 } else { |
1838 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection); | 1857 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection); |
1839 } | 1858 } |
1840 } | 1859 } |
1841 | 1860 |
1842 int max_reg = 0; | 1861 int reg = 0; |
1843 for (int i = 1; i < RegisterCount(); ++i) { | 1862 for (int i = 1; i < RegisterCount(); ++i) { |
1844 if (use_pos[i].Value() > use_pos[max_reg].Value()) { | 1863 if (use_pos[i].Value() > use_pos[reg].Value()) { |
1845 max_reg = i; | 1864 reg = i; |
1846 } | 1865 } |
1847 } | 1866 } |
1848 | 1867 |
1849 UsePosition* first_usage = current->NextRegisterPosition(current->Start()); | 1868 LifetimePosition pos = use_pos[reg]; |
1850 if (first_usage == NULL) { | |
1851 Spill(current); | |
1852 } else if (use_pos[max_reg].Value() < first_usage->pos().Value()) { | |
1853 SplitAndSpill(current, current->Start(), first_usage->pos()); | |
1854 } else { | |
1855 if (block_pos[max_reg].Value() < current->End().Value()) { | |
1856 // Split current before blocked position. | |
1857 LiveRange* second_range = Split(current, | |
1858 current->Start(), | |
1859 block_pos[max_reg]); | |
1860 AddToUnhandledSorted(second_range); | |
1861 } | |
1862 | 1869 |
1863 current->set_assigned_register(max_reg, mode_ == XMM_REGISTERS); | 1870 if (pos.Value() < register_use->pos().Value()) { |
1864 SplitAndSpillIntersecting(current); | 1871 // All registers are blocked before the first use that requires register. |
Kevin Millikin (Chromium)
2010/12/10 07:39:19
that requires a register. Spill the starting part
Vyacheslav Egorov (Chromium)
2010/12/10 14:18:27
Done.
| |
1872 // Spill starting part of live range upto that use. | |
1873 // | |
1874 // Corner case: first use position is equal to start of range. | |
Kevin Millikin (Chromium)
2010/12/10 07:39:19
the first use...the start of the range
Vyacheslav Egorov (Chromium)
2010/12/10 14:18:27
Done.
| |
1875 // In this case we have nothing to spill and SpillBetween will just return | |
1876 // this range to list unhandled ones. This means that we have unsatisfieable | |
1877 // register constraints and we have to abort to avoid infinite loop. | |
1878 CHECK(current->Start().Value() < register_use->pos().Value()); | |
Kevin Millikin (Chromium)
2010/12/10 07:39:19
We should consider what to do here. CHECK will cr
Vyacheslav Egorov (Chromium)
2010/12/10 14:18:27
I replaced check with assert.
Yes. Gracefully bai
| |
1879 SpillBetween(current, current->Start(), register_use->pos()); | |
1880 return; | |
1865 } | 1881 } |
1882 | |
1883 if (block_pos[reg].Value() < current->End().Value()) { | |
1884 // Register becomes blocked before current range end. Split before that | |
1885 // position. | |
1886 LiveRange* tail = SplitBetween(current, | |
1887 current->Start(), | |
1888 block_pos[reg].InstructionStart()); | |
1889 AddToUnhandledSorted(tail); | |
1890 } | |
1891 | |
1892 // Register reg is not blocked for the whole range. | |
1893 ASSERT(block_pos[reg].Value() >= current->End().Value()); | |
1894 TraceAlloc("Assigning reg %d to live range %d\n", reg, current->id()); | |
1895 current->set_assigned_register(reg, mode_); | |
1896 | |
1897 // This register was not free. Thus we need to find and spill | |
1898 // parts of active and inactive live regions that use the same register | |
1899 // at the same lifetime positions as current. | |
1900 SplitAndSpillIntersecting(current); | |
1866 } | 1901 } |
1867 | 1902 |
1868 | 1903 |
1869 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) { | 1904 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) { |
1870 ASSERT(current->HasRegisterAssigned()); | 1905 ASSERT(current->HasRegisterAssigned()); |
1871 int reg = current->assigned_register(); | 1906 int reg = current->assigned_register(); |
1872 LifetimePosition split_pos = current->Start(); | 1907 LifetimePosition split_pos = current->Start(); |
1873 for (int i = 0; i < active_live_ranges_.length(); ++i) { | 1908 for (int i = 0; i < active_live_ranges_.length(); ++i) { |
1874 LiveRange* range = active_live_ranges_[i]; | 1909 LiveRange* range = active_live_ranges_[i]; |
1875 if (range->assigned_register() == reg) { | 1910 if (range->assigned_register() == reg) { |
1876 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); | 1911 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); |
1877 if (next_pos == NULL) { | 1912 if (next_pos == NULL) { |
1878 SplitAndSpill(range, split_pos); | 1913 SpillAfter(range, split_pos); |
1879 } else { | 1914 } else { |
1880 SplitAndSpill(range, split_pos, next_pos->pos()); | 1915 SpillBetween(range, split_pos, next_pos->pos()); |
1881 } | 1916 } |
1882 ActiveToHandled(range); | 1917 ActiveToHandled(range); |
1883 --i; | 1918 --i; |
1884 } | 1919 } |
1885 } | 1920 } |
1886 | 1921 |
1887 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { | 1922 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { |
1888 LiveRange* range = inactive_live_ranges_[i]; | 1923 LiveRange* range = inactive_live_ranges_[i]; |
1889 ASSERT(range->End().Value() > current->Start().Value()); | 1924 ASSERT(range->End().Value() > current->Start().Value()); |
1890 if (range->assigned_register() == reg && !range->IsFixed()) { | 1925 if (range->assigned_register() == reg && !range->IsFixed()) { |
1891 LifetimePosition next_intersection = range->FirstIntersection(current); | 1926 LifetimePosition next_intersection = range->FirstIntersection(current); |
1892 if (next_intersection.IsValid()) { | 1927 if (next_intersection.IsValid()) { |
1893 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); | 1928 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); |
1894 if (next_pos == NULL) { | 1929 if (next_pos == NULL) { |
1895 SplitAndSpill(range, split_pos); | 1930 SpillAfter(range, split_pos); |
1896 } else { | 1931 } else { |
1897 next_intersection = Min(next_intersection, next_pos->pos()); | 1932 next_intersection = Min(next_intersection, next_pos->pos()); |
1898 SplitAndSpill(range, split_pos, next_intersection); | 1933 SpillBetween(range, split_pos, next_intersection); |
1899 } | 1934 } |
1900 InactiveToHandled(range); | 1935 InactiveToHandled(range); |
1901 --i; | 1936 --i; |
1902 } | 1937 } |
1903 } | 1938 } |
1904 } | 1939 } |
1905 } | 1940 } |
1906 | 1941 |
1907 | 1942 |
1908 LiveRange* LAllocator::Split(LiveRange* range, | 1943 bool LAllocator::IsBlockBoundary(LifetimePosition pos) { |
1909 LifetimePosition start, | 1944 return pos.IsInstructionStart() && |
1910 LifetimePosition end) { | 1945 chunk_->instructions()->at(pos.InstructionIndex())->IsLabel(); |
1946 } | |
1947 | |
1948 | |
1949 void LAllocator::AddGapMove(int pos, LiveRange* prev, LiveRange* next) { | |
1950 UsePosition* prev_pos = prev->AddUsePosition( | |
1951 LifetimePosition::FromInstructionIndex(pos)); | |
1952 UsePosition* next_pos = next->AddUsePosition( | |
1953 LifetimePosition::FromInstructionIndex(pos)); | |
1954 LOperand* prev_operand = prev_pos->operand(); | |
1955 LOperand* next_operand = next_pos->operand(); | |
1956 LGap* gap = chunk_->GetGapAt(pos); | |
1957 gap->GetOrCreateParallelMove(LGap::START)-> | |
1958 AddMove(prev_operand, next_operand); | |
1959 next_pos->set_hint(prev_operand); | |
1960 } | |
1961 | |
1962 | |
1963 LiveRange* LAllocator::SplitAt(LiveRange* range, LifetimePosition pos) { | |
1911 ASSERT(!range->IsFixed()); | 1964 ASSERT(!range->IsFixed()); |
1912 TraceAlloc("Splitting live range %d in position between [%d, %d[\n", | 1965 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value()); |
1966 | |
1967 if (pos.Value() <= range->Start().Value()) return range; | |
1968 | |
1969 LiveRange* result = LiveRangeFor(next_virtual_register_++); | |
1970 range->SplitAt(pos, result); | |
1971 return result; | |
1972 } | |
1973 | |
1974 | |
1975 LiveRange* LAllocator::SplitBetween(LiveRange* range, | |
1976 LifetimePosition start, | |
1977 LifetimePosition end) { | |
1978 ASSERT(!range->IsFixed()); | |
1979 TraceAlloc("Splitting live range %d in position between [%d, %d]\n", | |
1913 range->id(), | 1980 range->id(), |
1914 start.Value(), | 1981 start.Value(), |
1915 end.Value()); | 1982 end.Value()); |
1916 | 1983 |
1917 LifetimePosition split_pos = FindOptimalSplitPos( | 1984 LifetimePosition split_pos = FindOptimalSplitPos(start, end); |
1918 start, end.PrevInstruction().InstructionEnd()); | |
1919 ASSERT(split_pos.Value() >= start.Value()); | 1985 ASSERT(split_pos.Value() >= start.Value()); |
1920 return Split(range, split_pos); | 1986 return SplitAt(range, split_pos); |
1921 } | 1987 } |
1922 | 1988 |
1923 | 1989 |
1924 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start, | 1990 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start, |
1925 LifetimePosition end) { | 1991 LifetimePosition end) { |
1926 int start_instr = start.InstructionIndex(); | 1992 int start_instr = start.InstructionIndex(); |
1927 int end_instr = end.InstructionIndex(); | 1993 int end_instr = end.InstructionIndex(); |
1928 ASSERT(start_instr <= end_instr); | 1994 ASSERT(start_instr <= end_instr); |
1929 | 1995 |
1930 // We have no choice | 1996 // We have no choice |
1931 if (start_instr == end_instr) return end; | 1997 if (start_instr == end_instr) return end; |
1932 | 1998 |
1933 HBasicBlock* end_block = GetBlock(start); | 1999 HBasicBlock* end_block = GetBlock(start); |
1934 HBasicBlock* start_block = GetBlock(end); | 2000 HBasicBlock* start_block = GetBlock(end); |
1935 | 2001 |
1936 if (end_block == start_block) { | 2002 if (end_block == start_block) { |
1937 // The interval is split in the same basic block. Split at latest possible | 2003 // The interval is split in the same basic block. Split at latest possible |
1938 // position. | 2004 // position. |
1939 return end; | 2005 return end; |
1940 } | 2006 } |
1941 | 2007 |
1942 HBasicBlock* block = end_block; | 2008 HBasicBlock* block = end_block; |
1943 // Move to the most outside loop header. | 2009 // Find header of outermost loop. |
1944 while (block->parent_loop_header() != NULL && | 2010 while (block->parent_loop_header() != NULL && |
1945 block->parent_loop_header()->block_id() > start_block->block_id()) { | 2011 block->parent_loop_header()->block_id() > start_block->block_id()) { |
1946 block = block->parent_loop_header(); | 2012 block = block->parent_loop_header(); |
1947 } | 2013 } |
1948 | 2014 |
1949 if (block == end_block) { | 2015 if (block == end_block) return end; |
1950 return end; | |
1951 } | |
1952 | 2016 |
1953 return LifetimePosition::FromInstructionIndex( | 2017 return LifetimePosition::FromInstructionIndex( |
1954 block->first_instruction_index()); | 2018 block->first_instruction_index()); |
1955 } | 2019 } |
1956 | 2020 |
1957 | 2021 |
1958 bool LAllocator::IsBlockBoundary(LifetimePosition pos) { | 2022 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { |
1959 return pos.IsInstructionStart() && | 2023 LiveRange* second_part = SplitAt(range, pos); |
1960 chunk_->instructions()->at(pos.InstructionIndex())->IsLabel(); | 2024 Spill(second_part); |
1961 } | 2025 } |
1962 | 2026 |
1963 | 2027 |
1964 void LAllocator::AddGapMove(int pos, LiveRange* prev, LiveRange* next) { | 2028 void LAllocator::SpillBetween(LiveRange* range, |
1965 UsePosition* prev_pos = prev->AddUsePosition( | 2029 LifetimePosition start, |
1966 LifetimePosition::FromInstructionIndex(pos)); | 2030 LifetimePosition end) { |
1967 UsePosition* next_pos = next->AddUsePosition( | 2031 ASSERT(start.Value() < end.Value()); |
1968 LifetimePosition::FromInstructionIndex(pos)); | 2032 LiveRange* second_part = SplitAt(range, start); |
1969 LOperand* prev_operand = prev_pos->operand(); | |
1970 LOperand* next_operand = next_pos->operand(); | |
1971 LGap* gap = chunk_->GetGapAt(pos); | |
1972 gap->GetOrCreateParallelMove(LGap::START)-> | |
1973 AddMove(prev_operand, next_operand); | |
1974 next_pos->set_hint(prev_operand); | |
1975 } | |
1976 | 2033 |
2034 if (second_part->Start().Value() < end.Value()) { | |
2035 // Split result intersects with [start, end[. | |
2036 // Split it at position between ]start+1, end[, spill middle part | |
2037 // and put the rest to unhandled. | |
2038 LiveRange* third_part = SplitBetween( | |
2039 second_part, | |
2040 second_part->Start().InstructionEnd(), | |
2041 end.PrevInstruction().InstructionEnd()); | |
1977 | 2042 |
1978 LiveRange* LAllocator::Split(LiveRange* range, LifetimePosition pos) { | 2043 ASSERT(third_part != second_part); |
1979 ASSERT(!range->IsFixed()); | |
1980 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value()); | |
1981 if (pos.Value() <= range->Start().Value()) { | |
1982 return range; | |
1983 } | |
1984 LiveRange* result = LiveRangeFor(next_virtual_register_++); | |
1985 range->SplitAt(pos, result); | |
1986 return result; | |
1987 } | |
1988 | 2044 |
1989 | 2045 Spill(second_part); |
1990 void LAllocator::SplitAndSpill(LiveRange* range, | |
1991 LifetimePosition start, | |
1992 LifetimePosition end) { | |
1993 // We have an interval range and want to make sure that it is | |
1994 // spilled at start and at most spilled until end. | |
1995 ASSERT(start.Value() < end.Value()); | |
1996 LiveRange* tail_part = Split(range, start); | |
1997 if (tail_part->Start().Value() < end.Value()) { | |
1998 LiveRange* third_part = Split(tail_part, | |
1999 tail_part->Start().NextInstruction(), | |
2000 end); | |
2001 Spill(tail_part); | |
2002 ASSERT(third_part != tail_part); | |
2003 AddToUnhandledSorted(third_part); | 2046 AddToUnhandledSorted(third_part); |
2004 } else { | 2047 } else { |
2005 AddToUnhandledSorted(tail_part); | 2048 // Split result does not intersect with [start, end[. |
2049 // Nothing to spill. Just put split result to unhandled as whole. | |
2050 AddToUnhandledSorted(second_part); | |
2006 } | 2051 } |
2007 } | 2052 } |
2008 | 2053 |
2009 | 2054 |
2010 void LAllocator::SplitAndSpill(LiveRange* range, LifetimePosition at) { | |
2011 LiveRange* second_part = Split(range, at); | |
2012 Spill(second_part); | |
2013 } | |
2014 | |
2015 | |
2016 void LAllocator::Spill(LiveRange* range) { | 2055 void LAllocator::Spill(LiveRange* range) { |
2017 ASSERT(!range->IsSpilled()); | 2056 ASSERT(!range->IsSpilled()); |
2018 TraceAlloc("Spilling live range %d\n", range->id()); | 2057 TraceAlloc("Spilling live range %d\n", range->id()); |
2019 LiveRange* first = range->TopLevel(); | 2058 LiveRange* first = range->TopLevel(); |
2020 | 2059 |
2021 if (!first->HasAllocatedSpillOperand()) { | 2060 if (!first->HasAllocatedSpillOperand()) { |
2022 LOperand* op = TryReuseSpillSlot(range); | 2061 LOperand* op = TryReuseSpillSlot(range); |
2023 if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == XMM_REGISTERS); | 2062 if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS); |
2024 first->SetSpillOperand(op); | 2063 first->SetSpillOperand(op); |
2025 } | 2064 } |
2026 range->MakeSpilled(); | 2065 range->MakeSpilled(); |
2027 } | 2066 } |
2028 | 2067 |
2029 | 2068 |
2030 int LAllocator::RegisterCount() const { | 2069 int LAllocator::RegisterCount() const { |
2031 return num_registers_; | 2070 return num_registers_; |
2032 } | 2071 } |
2033 | 2072 |
2034 | 2073 |
2035 #ifdef DEBUG | 2074 #ifdef DEBUG |
2036 | 2075 |
2037 | 2076 |
2038 void LAllocator::Verify() const { | 2077 void LAllocator::Verify() const { |
2039 for (int i = 0; i < live_ranges()->length(); ++i) { | 2078 for (int i = 0; i < live_ranges()->length(); ++i) { |
2040 LiveRange* current = live_ranges()->at(i); | 2079 LiveRange* current = live_ranges()->at(i); |
2041 if (current != NULL) current->Verify(); | 2080 if (current != NULL) current->Verify(); |
2042 } | 2081 } |
2043 } | 2082 } |
2044 | 2083 |
2045 | 2084 |
2046 #endif | 2085 #endif |
2047 | 2086 |
2048 | 2087 |
2049 } } // namespace v8::internal | 2088 } } // namespace v8::internal |
OLD | NEW |