Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(217)

Side by Side Diff: src/lithium-allocator.cc

Issue 6580038: [Isolates] Merge from bleeding_edge, revisions 5934-6100. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/isolates/
Patch Set: '' Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/lithium-allocator.h ('k') | src/log.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 229 matching lines...) Expand 10 before | Expand all | Expand 10 after
240 UsePosition* pos = first_pos_; 240 UsePosition* pos = first_pos_;
241 while (pos != NULL && !pos->HasHint()) pos = pos->next(); 241 while (pos != NULL && !pos->HasHint()) pos = pos->next();
242 return pos; 242 return pos;
243 } 243 }
244 244
245 245
246 LOperand* LiveRange::CreateAssignedOperand() { 246 LOperand* LiveRange::CreateAssignedOperand() {
247 LOperand* op = NULL; 247 LOperand* op = NULL;
248 if (HasRegisterAssigned()) { 248 if (HasRegisterAssigned()) {
249 ASSERT(!IsSpilled()); 249 ASSERT(!IsSpilled());
250 if (assigned_double_) { 250 if (IsDouble()) {
251 op = LDoubleRegister::Create(assigned_register()); 251 op = LDoubleRegister::Create(assigned_register());
252 } else { 252 } else {
253 op = LRegister::Create(assigned_register()); 253 op = LRegister::Create(assigned_register());
254 } 254 }
255 } else if (IsSpilled()) { 255 } else if (IsSpilled()) {
256 ASSERT(!HasRegisterAssigned()); 256 ASSERT(!HasRegisterAssigned());
257 op = TopLevel()->GetSpillOperand(); 257 op = TopLevel()->GetSpillOperand();
258 ASSERT(!op->IsUnallocated()); 258 ASSERT(!op->IsUnallocated());
259 } else { 259 } else {
260 LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE); 260 LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE);
(...skipping 22 matching lines...) Expand all
283 LifetimePosition start = 283 LifetimePosition start =
284 current_interval_ == NULL ? LifetimePosition::Invalid() 284 current_interval_ == NULL ? LifetimePosition::Invalid()
285 : current_interval_->start(); 285 : current_interval_->start();
286 if (to_start_of->start().Value() > start.Value()) { 286 if (to_start_of->start().Value() > start.Value()) {
287 current_interval_ = to_start_of; 287 current_interval_ = to_start_of;
288 } 288 }
289 } 289 }
290 290
291 291
292 void LiveRange::SplitAt(LifetimePosition position, LiveRange* result) { 292 void LiveRange::SplitAt(LifetimePosition position, LiveRange* result) {
293 ASSERT(Start().Value() <= position.Value()); 293 ASSERT(Start().Value() < position.Value());
294 ASSERT(result->IsEmpty()); 294 ASSERT(result->IsEmpty());
295 // Find the last interval that ends before the position. If the 295 // Find the last interval that ends before the position. If the
296 // position is contained in one of the intervals in the chain, we 296 // position is contained in one of the intervals in the chain, we
297 // split that interval and use the first part. 297 // split that interval and use the first part.
298 UseInterval* current = FirstSearchIntervalForPosition(position); 298 UseInterval* current = FirstSearchIntervalForPosition(position);
299
300 // If the split position coincides with the beginning of a use interval
301 // we need to split use positons in a special way.
302 bool split_at_start = false;
303
299 while (current != NULL) { 304 while (current != NULL) {
300 if (current->Contains(position)) { 305 if (current->Contains(position)) {
301 current->SplitAt(position); 306 current->SplitAt(position);
302 break; 307 break;
303 } 308 }
304 UseInterval* next = current->next(); 309 UseInterval* next = current->next();
305 if (next->start().Value() >= position.Value()) break; 310 if (next->start().Value() >= position.Value()) {
311 split_at_start = (next->start().Value() == position.Value());
312 break;
313 }
306 current = next; 314 current = next;
307 } 315 }
308 316
309 // Partition original use intervals to the two live ranges. 317 // Partition original use intervals to the two live ranges.
310 UseInterval* before = current; 318 UseInterval* before = current;
311 UseInterval* after = before->next(); 319 UseInterval* after = before->next();
312 result->last_interval_ = (last_interval_ == before) 320 result->last_interval_ = (last_interval_ == before)
313 ? after // Only interval in the range after split. 321 ? after // Only interval in the range after split.
314 : last_interval_; // Last interval of the original range. 322 : last_interval_; // Last interval of the original range.
315 result->first_interval_ = after; 323 result->first_interval_ = after;
316 last_interval_ = before; 324 last_interval_ = before;
317 325
318 // Find the last use position before the split and the first use 326 // Find the last use position before the split and the first use
319 // position after it. 327 // position after it.
320 UsePosition* use_after = first_pos_; 328 UsePosition* use_after = first_pos_;
321 UsePosition* use_before = NULL; 329 UsePosition* use_before = NULL;
322 while (use_after != NULL && use_after->pos().Value() <= position.Value()) { 330 if (split_at_start) {
323 use_before = use_after; 331 // The split position coincides with the beginning of a use interval (the
324 use_after = use_after->next(); 332 // end of a lifetime hole). Use at this position should be attributed to
333 // the split child because split child owns use interval covering it.
334 while (use_after != NULL && use_after->pos().Value() < position.Value()) {
335 use_before = use_after;
336 use_after = use_after->next();
337 }
338 } else {
339 while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
340 use_before = use_after;
341 use_after = use_after->next();
342 }
325 } 343 }
326 344
327 // Partition original use positions to the two live ranges. 345 // Partition original use positions to the two live ranges.
328 if (use_before != NULL) { 346 if (use_before != NULL) {
329 use_before->next_ = NULL; 347 use_before->next_ = NULL;
330 } else { 348 } else {
331 first_pos_ = NULL; 349 first_pos_ = NULL;
332 } 350 }
333 result->first_pos_ = use_after; 351 result->first_pos_ = use_after;
334 352
(...skipping 166 matching lines...) Expand 10 before | Expand all | Expand 10 after
501 UseInterval* a = FirstSearchIntervalForPosition(b->start()); 519 UseInterval* a = FirstSearchIntervalForPosition(b->start());
502 while (a != NULL && b != NULL) { 520 while (a != NULL && b != NULL) {
503 if (a->start().Value() > other->End().Value()) break; 521 if (a->start().Value() > other->End().Value()) break;
504 if (b->start().Value() > End().Value()) break; 522 if (b->start().Value() > End().Value()) break;
505 LifetimePosition cur_intersection = a->Intersect(b); 523 LifetimePosition cur_intersection = a->Intersect(b);
506 if (cur_intersection.IsValid()) { 524 if (cur_intersection.IsValid()) {
507 return cur_intersection; 525 return cur_intersection;
508 } 526 }
509 if (a->start().Value() < b->start().Value()) { 527 if (a->start().Value() < b->start().Value()) {
510 a = a->next(); 528 a = a->next();
511 if (a == NULL && a->start().Value() > other->End().Value()) break; 529 if (a == NULL || a->start().Value() > other->End().Value()) break;
512 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); 530 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
513 } else { 531 } else {
514 b = b->next(); 532 b = b->next();
515 } 533 }
516 } 534 }
517 return LifetimePosition::Invalid(); 535 return LifetimePosition::Invalid();
518 } 536 }
519 537
520 538
521 void LAllocator::InitializeLivenessAnalysis() { 539 void LAllocator::InitializeLivenessAnalysis() {
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
560 } 578 }
561 579
562 580
563 void LAllocator::AddInitialIntervals(HBasicBlock* block, 581 void LAllocator::AddInitialIntervals(HBasicBlock* block,
564 BitVector* live_out) { 582 BitVector* live_out) {
565 // Add an interval that includes the entire block to the live range for 583 // Add an interval that includes the entire block to the live range for
566 // each live_out value. 584 // each live_out value.
567 LifetimePosition start = LifetimePosition::FromInstructionIndex( 585 LifetimePosition start = LifetimePosition::FromInstructionIndex(
568 block->first_instruction_index()); 586 block->first_instruction_index());
569 LifetimePosition end = LifetimePosition::FromInstructionIndex( 587 LifetimePosition end = LifetimePosition::FromInstructionIndex(
570 block->last_instruction_index()); 588 block->last_instruction_index()).NextInstruction();
571 BitVector::Iterator iterator(live_out); 589 BitVector::Iterator iterator(live_out);
572 while (!iterator.Done()) { 590 while (!iterator.Done()) {
573 int operand_index = iterator.Current(); 591 int operand_index = iterator.Current();
574 LiveRange* range = LiveRangeFor(operand_index); 592 LiveRange* range = LiveRangeFor(operand_index);
575 if (!range->IsEmpty() && 593 range->AddUseInterval(start, end);
576 range->Start().Value() == end.NextInstruction().Value()) {
577 range->AddUseInterval(start, end.NextInstruction());
578 } else {
579 range->AddUseInterval(start, end);
580 }
581 iterator.Advance(); 594 iterator.Advance();
582 } 595 }
583 } 596 }
584 597
585 598
586 int LAllocator::FixedDoubleLiveRangeID(int index) { 599 int LAllocator::FixedDoubleLiveRangeID(int index) {
587 return -index - 1 - Register::kNumAllocatableRegisters; 600 return -index - 1 - Register::kNumAllocatableRegisters;
588 } 601 }
589 602
590 603
(...skipping 27 matching lines...) Expand all
618 LiveRange* LAllocator::FixedLiveRangeFor(int index) { 631 LiveRange* LAllocator::FixedLiveRangeFor(int index) {
619 if (index >= fixed_live_ranges_.length()) { 632 if (index >= fixed_live_ranges_.length()) {
620 fixed_live_ranges_.AddBlock(NULL, 633 fixed_live_ranges_.AddBlock(NULL,
621 index - fixed_live_ranges_.length() + 1); 634 index - fixed_live_ranges_.length() + 1);
622 } 635 }
623 636
624 LiveRange* result = fixed_live_ranges_[index]; 637 LiveRange* result = fixed_live_ranges_[index];
625 if (result == NULL) { 638 if (result == NULL) {
626 result = new LiveRange(FixedLiveRangeID(index)); 639 result = new LiveRange(FixedLiveRangeID(index));
627 ASSERT(result->IsFixed()); 640 ASSERT(result->IsFixed());
628 result->set_assigned_register(index, false); 641 result->set_assigned_register(index, GENERAL_REGISTERS);
629 fixed_live_ranges_[index] = result; 642 fixed_live_ranges_[index] = result;
630 } 643 }
631 return result; 644 return result;
632 } 645 }
633 646
634 647
635 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) { 648 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
636 if (index >= fixed_double_live_ranges_.length()) { 649 if (index >= fixed_double_live_ranges_.length()) {
637 fixed_double_live_ranges_.AddBlock(NULL, 650 fixed_double_live_ranges_.AddBlock(NULL,
638 index - fixed_double_live_ranges_.length() + 1); 651 index - fixed_double_live_ranges_.length() + 1);
639 } 652 }
640 653
641 LiveRange* result = fixed_double_live_ranges_[index]; 654 LiveRange* result = fixed_double_live_ranges_[index];
642 if (result == NULL) { 655 if (result == NULL) {
643 result = new LiveRange(FixedDoubleLiveRangeID(index)); 656 result = new LiveRange(FixedDoubleLiveRangeID(index));
644 ASSERT(result->IsFixed()); 657 ASSERT(result->IsFixed());
645 result->set_assigned_register(index, true); 658 result->set_assigned_register(index, DOUBLE_REGISTERS);
646 fixed_double_live_ranges_[index] = result; 659 fixed_double_live_ranges_[index] = result;
647 } 660 }
648 return result; 661 return result;
649 } 662 }
650 663
651 LiveRange* LAllocator::LiveRangeFor(int index) { 664 LiveRange* LAllocator::LiveRangeFor(int index) {
652 if (index >= live_ranges_.length()) { 665 if (index >= live_ranges_.length()) {
653 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1); 666 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1);
654 } 667 }
655 LiveRange* result = live_ranges_[index]; 668 LiveRange* result = live_ranges_[index];
(...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after
953 LOperand* temp = summary->TempAt(i); 966 LOperand* temp = summary->TempAt(i);
954 if (summary->IsCall()) { 967 if (summary->IsCall()) {
955 if (temp->IsRegister()) continue; 968 if (temp->IsRegister()) continue;
956 if (temp->IsUnallocated()) { 969 if (temp->IsUnallocated()) {
957 LUnallocated* temp_unalloc = LUnallocated::cast(temp); 970 LUnallocated* temp_unalloc = LUnallocated::cast(temp);
958 if (temp_unalloc->HasFixedPolicy()) { 971 if (temp_unalloc->HasFixedPolicy()) {
959 continue; 972 continue;
960 } 973 }
961 } 974 }
962 } 975 }
963 Use(block_start_position, curr_position, temp, NULL); 976 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
964 Define(curr_position.PrevInstruction(), temp, NULL); 977 Define(curr_position, temp, NULL);
965 } 978 }
966 } 979 }
967 } 980 }
968 981
969 index = index - 1; 982 index = index - 1;
970 } 983 }
971 } 984 }
972 985
973 986
974 void LAllocator::ResolvePhis(HBasicBlock* block) { 987 void LAllocator::ResolvePhis(HBasicBlock* block) {
(...skipping 276 matching lines...) Expand 10 before | Expand all | Expand 10 after
1251 PrintF("First use is at %d\n", range->first_pos()->pos().Value()); 1264 PrintF("First use is at %d\n", range->first_pos()->pos().Value());
1252 iterator.Advance(); 1265 iterator.Advance();
1253 } 1266 }
1254 ASSERT(!found); 1267 ASSERT(!found);
1255 } 1268 }
1256 #endif 1269 #endif
1257 } 1270 }
1258 } 1271 }
1259 1272
1260 1273
1261 void LAllocator::AllocateGeneralRegisters() {
1262 HPhase phase("Allocate general registers", this);
1263 num_registers_ = Register::kNumAllocatableRegisters;
1264 mode_ = CPU_REGISTERS;
1265 AllocateRegisters();
1266 }
1267
1268
1269 bool LAllocator::SafePointsAreInOrder() const { 1274 bool LAllocator::SafePointsAreInOrder() const {
1270 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps(); 1275 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1271 int safe_point = 0; 1276 int safe_point = 0;
1272 for (int i = 0; i < pointer_maps->length(); ++i) { 1277 for (int i = 0; i < pointer_maps->length(); ++i) {
1273 LPointerMap* map = pointer_maps->at(i); 1278 LPointerMap* map = pointer_maps->at(i);
1274 if (safe_point > map->lithium_position()) return false; 1279 if (safe_point > map->lithium_position()) return false;
1275 safe_point = map->lithium_position(); 1280 safe_point = map->lithium_position();
1276 } 1281 }
1277 return true; 1282 return true;
1278 } 1283 }
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
1390 instruction->MarkSpilledDoubleRegister(reg_index, spill_operand); 1395 instruction->MarkSpilledDoubleRegister(reg_index, spill_operand);
1391 } else { 1396 } else {
1392 instruction->MarkSpilledRegister(reg_index, spill_operand); 1397 instruction->MarkSpilledRegister(reg_index, spill_operand);
1393 } 1398 }
1394 } 1399 }
1395 } 1400 }
1396 } 1401 }
1397 } 1402 }
1398 1403
1399 1404
1405 void LAllocator::AllocateGeneralRegisters() {
1406 HPhase phase("Allocate general registers", this);
1407 num_registers_ = Register::kNumAllocatableRegisters;
1408 mode_ = GENERAL_REGISTERS;
1409 AllocateRegisters();
1410 }
1411
1412
1400 void LAllocator::AllocateDoubleRegisters() { 1413 void LAllocator::AllocateDoubleRegisters() {
1401 HPhase phase("Allocate double registers", this); 1414 HPhase phase("Allocate double registers", this);
1402 num_registers_ = DoubleRegister::kNumAllocatableRegisters; 1415 num_registers_ = DoubleRegister::kNumAllocatableRegisters;
1403 mode_ = XMM_REGISTERS; 1416 mode_ = DOUBLE_REGISTERS;
1404 AllocateRegisters(); 1417 AllocateRegisters();
1405 } 1418 }
1406 1419
1407 1420
1408 void LAllocator::AllocateRegisters() { 1421 void LAllocator::AllocateRegisters() {
1409 ASSERT(mode_ != NONE); 1422 ASSERT(mode_ != NONE);
1410 reusable_slots_.Clear(); 1423 reusable_slots_.Clear();
1411 1424
1412 for (int i = 0; i < live_ranges_.length(); ++i) { 1425 for (int i = 0; i < live_ranges_.length(); ++i) {
1413 if (live_ranges_[i] != NULL) { 1426 if (live_ranges_[i] != NULL) {
1414 if (HasDoubleValue(live_ranges_[i]->id()) == (mode_ == XMM_REGISTERS)) { 1427 if (RequiredRegisterKind(live_ranges_[i]->id()) == mode_) {
1415 AddToUnhandledUnsorted(live_ranges_[i]); 1428 AddToUnhandledUnsorted(live_ranges_[i]);
1416 } 1429 }
1417 } 1430 }
1418 } 1431 }
1419 SortUnhandled(); 1432 SortUnhandled();
1420 ASSERT(UnhandledIsSorted()); 1433 ASSERT(UnhandledIsSorted());
1421 1434
1422 ASSERT(active_live_ranges_.is_empty()); 1435 ASSERT(active_live_ranges_.is_empty());
1423 ASSERT(inactive_live_ranges_.is_empty()); 1436 ASSERT(inactive_live_ranges_.is_empty());
1424 1437
1425 if (mode_ == XMM_REGISTERS) { 1438 if (mode_ == DOUBLE_REGISTERS) {
1426 for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) { 1439 for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) {
1427 LiveRange* current = fixed_double_live_ranges_.at(i); 1440 LiveRange* current = fixed_double_live_ranges_.at(i);
1428 if (current != NULL) { 1441 if (current != NULL) {
1429 AddToInactive(current); 1442 AddToInactive(current);
1430 } 1443 }
1431 } 1444 }
1432 } else { 1445 } else {
1433 for (int i = 0; i < fixed_live_ranges_.length(); ++i) { 1446 for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
1434 LiveRange* current = fixed_live_ranges_.at(i); 1447 LiveRange* current = fixed_live_ranges_.at(i);
1435 if (current != NULL) { 1448 if (current != NULL) {
(...skipping 20 matching lines...) Expand all
1456 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos); 1469 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1457 // If the range already has a spill operand and it doesn't need a 1470 // If the range already has a spill operand and it doesn't need a
1458 // register immediately, split it and spill the first part of the range. 1471 // register immediately, split it and spill the first part of the range.
1459 if (pos == NULL) { 1472 if (pos == NULL) {
1460 Spill(current); 1473 Spill(current);
1461 continue; 1474 continue;
1462 } else if (pos->pos().Value() > 1475 } else if (pos->pos().Value() >
1463 current->Start().NextInstruction().Value()) { 1476 current->Start().NextInstruction().Value()) {
1464 // Do not spill live range eagerly if use position that can benefit from 1477 // Do not spill live range eagerly if use position that can benefit from
1465 // the register is too close to the start of live range. 1478 // the register is too close to the start of live range.
1466 LiveRange* part = Split(current, 1479 SpillBetween(current, current->Start(), pos->pos());
1467 current->Start().NextInstruction(),
1468 pos->pos());
1469 Spill(current);
1470 AddToUnhandledSorted(part);
1471 ASSERT(UnhandledIsSorted()); 1480 ASSERT(UnhandledIsSorted());
1472 continue; 1481 continue;
1473 } 1482 }
1474 } 1483 }
1475 1484
1476 for (int i = 0; i < active_live_ranges_.length(); ++i) { 1485 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1477 LiveRange* cur_active = active_live_ranges_.at(i); 1486 LiveRange* cur_active = active_live_ranges_.at(i);
1478 if (cur_active->End().Value() <= position.Value()) { 1487 if (cur_active->End().Value() <= position.Value()) {
1479 ActiveToHandled(cur_active); 1488 ActiveToHandled(cur_active);
1480 --i; // The live range was removed from the list of active live ranges. 1489 --i; // The live range was removed from the list of active live ranges.
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1514 1523
1515 void LAllocator::Setup() { 1524 void LAllocator::Setup() {
1516 LConstantOperand::SetupCache(); 1525 LConstantOperand::SetupCache();
1517 LStackSlot::SetupCache(); 1526 LStackSlot::SetupCache();
1518 LDoubleStackSlot::SetupCache(); 1527 LDoubleStackSlot::SetupCache();
1519 LRegister::SetupCache(); 1528 LRegister::SetupCache();
1520 LDoubleRegister::SetupCache(); 1529 LDoubleRegister::SetupCache();
1521 } 1530 }
1522 1531
1523 1532
1533 const char* LAllocator::RegisterName(int allocation_index) {
1534 ASSERT(mode_ != NONE);
1535 if (mode_ == GENERAL_REGISTERS) {
1536 return Register::AllocationIndexToString(allocation_index);
1537 } else {
1538 return DoubleRegister::AllocationIndexToString(allocation_index);
1539 }
1540 }
1541
1542
1524 void LAllocator::TraceAlloc(const char* msg, ...) { 1543 void LAllocator::TraceAlloc(const char* msg, ...) {
1525 if (FLAG_trace_alloc) { 1544 if (FLAG_trace_alloc) {
1526 va_list arguments; 1545 va_list arguments;
1527 va_start(arguments, msg); 1546 va_start(arguments, msg);
1528 OS::VPrint(msg, arguments); 1547 OS::VPrint(msg, arguments);
1529 va_end(arguments); 1548 va_end(arguments);
1530 } 1549 }
1531 } 1550 }
1532 1551
1533 1552
1534 void LAllocator::RecordUse(HValue* value, LUnallocated* operand) { 1553 void LAllocator::RecordUse(HValue* value, LUnallocated* operand) {
1535 operand->set_virtual_register(value->id()); 1554 operand->set_virtual_register(value->id());
1536 current_summary()->AddInput(operand); 1555 current_summary()->AddInput(operand);
1537 } 1556 }
1538 1557
1539 1558
1540 bool LAllocator::HasTaggedValue(int virtual_register) const { 1559 bool LAllocator::HasTaggedValue(int virtual_register) const {
1541 HValue* value = graph()->LookupValue(virtual_register); 1560 HValue* value = graph()->LookupValue(virtual_register);
1542 if (value == NULL) return false; 1561 if (value == NULL) return false;
1543 return value->representation().IsTagged(); 1562 return value->representation().IsTagged();
1544 } 1563 }
1545 1564
1546 1565
1547 bool LAllocator::HasDoubleValue(int virtual_register) const { 1566 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
1548 HValue* value = graph()->LookupValue(virtual_register); 1567 HValue* value = graph()->LookupValue(virtual_register);
1549 if (value == NULL) return false; 1568 if (value != NULL && value->representation().IsDouble()) {
1550 return value->representation().IsDouble(); 1569 return DOUBLE_REGISTERS;
1570 }
1571 return GENERAL_REGISTERS;
1551 } 1572 }
1552 1573
1553 1574
1554 void LAllocator::MarkAsCall() { 1575 void LAllocator::MarkAsCall() {
1555 current_summary()->MarkAsCall(); 1576 current_summary()->MarkAsCall();
1556 } 1577 }
1557 1578
1558 1579
1559 void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) { 1580 void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) {
1560 operand->set_virtual_register(instr->id()); 1581 operand->set_virtual_register(instr->id());
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after
1721 1742
1722 1743
1723 void LAllocator::InactiveToActive(LiveRange* range) { 1744 void LAllocator::InactiveToActive(LiveRange* range) {
1724 ASSERT(inactive_live_ranges_.Contains(range)); 1745 ASSERT(inactive_live_ranges_.Contains(range));
1725 inactive_live_ranges_.RemoveElement(range); 1746 inactive_live_ranges_.RemoveElement(range);
1726 active_live_ranges_.Add(range); 1747 active_live_ranges_.Add(range);
1727 TraceAlloc("Moving live range %d from inactive to active\n", range->id()); 1748 TraceAlloc("Moving live range %d from inactive to active\n", range->id());
1728 } 1749 }
1729 1750
1730 1751
1752 // TryAllocateFreeReg and AllocateBlockedReg assume this
1753 // when allocating local arrays.
1754 STATIC_ASSERT(DoubleRegister::kNumAllocatableRegisters >=
1755 Register::kNumAllocatableRegisters);
1756
1757
1731 bool LAllocator::TryAllocateFreeReg(LiveRange* current) { 1758 bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
1732 LifetimePosition max_pos = LifetimePosition::FromInstructionIndex( 1759 LifetimePosition free_until_pos[DoubleRegister::kNumAllocatableRegisters];
1733 chunk_->instructions()->length() + 1); 1760
1734 ASSERT(DoubleRegister::kNumAllocatableRegisters >= 1761 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
1735 Register::kNumAllocatableRegisters); 1762 free_until_pos[i] = LifetimePosition::MaxPosition();
1736 EmbeddedVector<LifetimePosition, DoubleRegister::kNumAllocatableRegisters> 1763 }
1737 free_pos(max_pos); 1764
1738 for (int i = 0; i < active_live_ranges_.length(); ++i) { 1765 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1739 LiveRange* cur_active = active_live_ranges_.at(i); 1766 LiveRange* cur_active = active_live_ranges_.at(i);
1740 free_pos[cur_active->assigned_register()] = 1767 free_until_pos[cur_active->assigned_register()] =
1741 LifetimePosition::FromInstructionIndex(0); 1768 LifetimePosition::FromInstructionIndex(0);
1742 } 1769 }
1743 1770
1744 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { 1771 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1745 LiveRange* cur_inactive = inactive_live_ranges_.at(i); 1772 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1746 ASSERT(cur_inactive->End().Value() > current->Start().Value()); 1773 ASSERT(cur_inactive->End().Value() > current->Start().Value());
1747 LifetimePosition next_intersection = 1774 LifetimePosition next_intersection =
1748 cur_inactive->FirstIntersection(current); 1775 cur_inactive->FirstIntersection(current);
1749 if (!next_intersection.IsValid()) continue; 1776 if (!next_intersection.IsValid()) continue;
1750 int cur_reg = cur_inactive->assigned_register(); 1777 int cur_reg = cur_inactive->assigned_register();
1751 free_pos[cur_reg] = Min(free_pos[cur_reg], next_intersection); 1778 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1752 } 1779 }
1753 1780
1754 UsePosition* pos = current->FirstPosWithHint(); 1781 UsePosition* hinted_use = current->FirstPosWithHint();
1755 if (pos != NULL) { 1782 if (hinted_use != NULL) {
1756 LOperand* hint = pos->hint(); 1783 LOperand* hint = hinted_use->hint();
1757 if (hint->IsRegister() || hint->IsDoubleRegister()) { 1784 if (hint->IsRegister() || hint->IsDoubleRegister()) {
1758 int register_index = hint->index(); 1785 int register_index = hint->index();
1759 TraceAlloc("Found reg hint %d for live range %d (free [%d, end %d[)\n", 1786 TraceAlloc(
1760 register_index, 1787 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1761 current->id(), 1788 RegisterName(register_index),
1762 free_pos[register_index].Value(), 1789 free_until_pos[register_index].Value(),
1763 current->End().Value()); 1790 current->id(),
1764 if (free_pos[register_index].Value() >= current->End().Value()) { 1791 current->End().Value());
1765 TraceAlloc("Assigning preferred reg %d to live range %d\n", 1792
1766 register_index, 1793 // The desired register is free until the end of the current live range.
1794 if (free_until_pos[register_index].Value() >= current->End().Value()) {
1795 TraceAlloc("Assigning preferred reg %s to live range %d\n",
1796 RegisterName(register_index),
1767 current->id()); 1797 current->id());
1768 current->set_assigned_register(register_index, mode_ == XMM_REGISTERS); 1798 current->set_assigned_register(register_index, mode_);
1769 return true; 1799 return true;
1770 } 1800 }
1771 } 1801 }
1772 } 1802 }
1773 1803
1774 int max_reg = 0; 1804 // Find the register which stays free for the longest time.
1805 int reg = 0;
1775 for (int i = 1; i < RegisterCount(); ++i) { 1806 for (int i = 1; i < RegisterCount(); ++i) {
1776 if (free_pos[i].Value() > free_pos[max_reg].Value()) { 1807 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1777 max_reg = i; 1808 reg = i;
1778 } 1809 }
1779 } 1810 }
1780 1811
1781 if (free_pos[max_reg].InstructionIndex() == 0) { 1812 LifetimePosition pos = free_until_pos[reg];
1813
1814 if (pos.Value() <= current->Start().Value()) {
1815 // All registers are blocked.
1782 return false; 1816 return false;
1783 } else if (free_pos[max_reg].Value() >= current->End().Value()) {
1784 TraceAlloc("Assigning reg %d to live range %d\n", max_reg, current->id());
1785 current->set_assigned_register(max_reg, mode_ == XMM_REGISTERS);
1786 } else {
1787 // Split the interval at the nearest gap and never split an interval at its
1788 // start position.
1789 LifetimePosition pos =
1790 LifetimePosition::FromInstructionIndex(
1791 chunk_->NearestGapPos(free_pos[max_reg].InstructionIndex()));
1792 if (pos.Value() <= current->Start().Value()) return false;
1793 LiveRange* second_range = Split(current, pos);
1794 AddToUnhandledSorted(second_range);
1795 current->set_assigned_register(max_reg, mode_ == XMM_REGISTERS);
1796 } 1817 }
1797 1818
1819 if (pos.Value() < current->End().Value()) {
1820 // Register reg is available at the range start but becomes blocked before
1821 // the range end. Split current at position where it becomes blocked.
1822 LiveRange* tail = SplitAt(current, pos);
1823 AddToUnhandledSorted(tail);
1824 }
1825
1826
1827 // Register reg is available at the range start and is free until
1828 // the range end.
1829 ASSERT(pos.Value() >= current->End().Value());
1830 TraceAlloc("Assigning free reg %s to live range %d\n",
1831 RegisterName(reg),
1832 current->id());
1833 current->set_assigned_register(reg, mode_);
1834
1798 return true; 1835 return true;
1799 } 1836 }
1800 1837
1801 1838
1802 void LAllocator::AllocateBlockedReg(LiveRange* current) { 1839 void LAllocator::AllocateBlockedReg(LiveRange* current) {
1803 LifetimePosition max_pos = 1840 UsePosition* register_use = current->NextRegisterPosition(current->Start());
1804 LifetimePosition::FromInstructionIndex( 1841 if (register_use == NULL) {
1805 chunk_->instructions()->length() + 1); 1842 // There is no use in the current live range that requires a register.
1806 ASSERT(DoubleRegister::kNumAllocatableRegisters >= 1843 // We can just spill it.
1807 Register::kNumAllocatableRegisters); 1844 Spill(current);
1808 EmbeddedVector<LifetimePosition, DoubleRegister::kNumAllocatableRegisters> 1845 return;
1809 use_pos(max_pos); 1846 }
1810 EmbeddedVector<LifetimePosition, DoubleRegister::kNumAllocatableRegisters> 1847
1811 block_pos(max_pos); 1848
1849 LifetimePosition use_pos[DoubleRegister::kNumAllocatableRegisters];
1850 LifetimePosition block_pos[DoubleRegister::kNumAllocatableRegisters];
1851
1852 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
1853 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1854 }
1812 1855
1813 for (int i = 0; i < active_live_ranges_.length(); ++i) { 1856 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1814 LiveRange* range = active_live_ranges_[i]; 1857 LiveRange* range = active_live_ranges_[i];
1815 int cur_reg = range->assigned_register(); 1858 int cur_reg = range->assigned_register();
1816 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) { 1859 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1817 block_pos[cur_reg] = use_pos[cur_reg] = 1860 block_pos[cur_reg] = use_pos[cur_reg] =
1818 LifetimePosition::FromInstructionIndex(0); 1861 LifetimePosition::FromInstructionIndex(0);
1819 } else { 1862 } else {
1820 UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial( 1863 UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
1821 current->Start()); 1864 current->Start());
(...skipping 12 matching lines...) Expand all
1834 if (!next_intersection.IsValid()) continue; 1877 if (!next_intersection.IsValid()) continue;
1835 int cur_reg = range->assigned_register(); 1878 int cur_reg = range->assigned_register();
1836 if (range->IsFixed()) { 1879 if (range->IsFixed()) {
1837 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); 1880 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1838 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); 1881 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1839 } else { 1882 } else {
1840 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection); 1883 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1841 } 1884 }
1842 } 1885 }
1843 1886
1844 int max_reg = 0; 1887 int reg = 0;
1845 for (int i = 1; i < RegisterCount(); ++i) { 1888 for (int i = 1; i < RegisterCount(); ++i) {
1846 if (use_pos[i].Value() > use_pos[max_reg].Value()) { 1889 if (use_pos[i].Value() > use_pos[reg].Value()) {
1847 max_reg = i; 1890 reg = i;
1848 } 1891 }
1849 } 1892 }
1850 1893
1851 UsePosition* first_usage = current->NextRegisterPosition(current->Start()); 1894 LifetimePosition pos = use_pos[reg];
1852 if (first_usage == NULL) {
1853 Spill(current);
1854 } else if (use_pos[max_reg].Value() < first_usage->pos().Value()) {
1855 SplitAndSpill(current, current->Start(), first_usage->pos());
1856 } else {
1857 if (block_pos[max_reg].Value() < current->End().Value()) {
1858 // Split current before blocked position.
1859 LiveRange* second_range = Split(current,
1860 current->Start(),
1861 block_pos[max_reg]);
1862 AddToUnhandledSorted(second_range);
1863 }
1864 1895
1865 current->set_assigned_register(max_reg, mode_ == XMM_REGISTERS); 1896 if (pos.Value() < register_use->pos().Value()) {
1866 SplitAndSpillIntersecting(current); 1897 // All registers are blocked before the first use that requires a register.
1898 // Spill starting part of live range up to that use.
1899 //
1900 // Corner case: the first use position is equal to the start of the range.
1901 // In this case we have nothing to spill and SpillBetween will just return
1902 // this range to the list of unhandled ones. This will lead to the infinite
1903 // loop.
1904 ASSERT(current->Start().Value() < register_use->pos().Value());
1905 SpillBetween(current, current->Start(), register_use->pos());
1906 return;
1867 } 1907 }
1908
1909 if (block_pos[reg].Value() < current->End().Value()) {
1910 // Register becomes blocked before the current range end. Split before that
1911 // position.
1912 LiveRange* tail = SplitBetween(current,
1913 current->Start(),
1914 block_pos[reg].InstructionStart());
1915 AddToUnhandledSorted(tail);
1916 }
1917
1918 // Register reg is not blocked for the whole range.
1919 ASSERT(block_pos[reg].Value() >= current->End().Value());
1920 TraceAlloc("Assigning blocked reg %s to live range %d\n",
1921 RegisterName(reg),
1922 current->id());
1923 current->set_assigned_register(reg, mode_);
1924
1925 // This register was not free. Thus we need to find and spill
1926 // parts of active and inactive live regions that use the same register
1927 // at the same lifetime positions as current.
1928 SplitAndSpillIntersecting(current);
1868 } 1929 }
1869 1930
1870 1931
1871 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) { 1932 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
1872 ASSERT(current->HasRegisterAssigned()); 1933 ASSERT(current->HasRegisterAssigned());
1873 int reg = current->assigned_register(); 1934 int reg = current->assigned_register();
1874 LifetimePosition split_pos = 1935 LifetimePosition split_pos = current->Start();
1875 LifetimePosition::FromInstructionIndex(
1876 chunk_->NearestGapPos(current->Start().InstructionIndex()));
1877 for (int i = 0; i < active_live_ranges_.length(); ++i) { 1936 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1878 LiveRange* range = active_live_ranges_[i]; 1937 LiveRange* range = active_live_ranges_[i];
1879 if (range->assigned_register() == reg) { 1938 if (range->assigned_register() == reg) {
1880 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); 1939 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1881 if (next_pos == NULL) { 1940 if (next_pos == NULL) {
1882 SplitAndSpill(range, split_pos); 1941 SpillAfter(range, split_pos);
1883 } else { 1942 } else {
1884 SplitAndSpill(range, split_pos, next_pos->pos()); 1943 SpillBetween(range, split_pos, next_pos->pos());
1885 } 1944 }
1886 ActiveToHandled(range); 1945 ActiveToHandled(range);
1887 --i; 1946 --i;
1888 } 1947 }
1889 } 1948 }
1890 1949
1891 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { 1950 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1892 LiveRange* range = inactive_live_ranges_[i]; 1951 LiveRange* range = inactive_live_ranges_[i];
1893 ASSERT(range->End().Value() > current->Start().Value()); 1952 ASSERT(range->End().Value() > current->Start().Value());
1894 if (range->assigned_register() == reg && !range->IsFixed()) { 1953 if (range->assigned_register() == reg && !range->IsFixed()) {
1895 LifetimePosition next_intersection = range->FirstIntersection(current); 1954 LifetimePosition next_intersection = range->FirstIntersection(current);
1896 if (next_intersection.IsValid()) { 1955 if (next_intersection.IsValid()) {
1897 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); 1956 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1898 if (next_pos == NULL) { 1957 if (next_pos == NULL) {
1899 SplitAndSpill(range, split_pos); 1958 SpillAfter(range, split_pos);
1900 } else { 1959 } else {
1901 next_intersection = Min(next_intersection, next_pos->pos()); 1960 next_intersection = Min(next_intersection, next_pos->pos());
1902 SplitAndSpill(range, split_pos, next_intersection); 1961 SpillBetween(range, split_pos, next_intersection);
1903 } 1962 }
1904 InactiveToHandled(range); 1963 InactiveToHandled(range);
1905 --i; 1964 --i;
1906 } 1965 }
1907 } 1966 }
1908 } 1967 }
1909 } 1968 }
1910 1969
1911 1970
1912 LiveRange* LAllocator::Split(LiveRange* range, 1971 bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
1913 LifetimePosition start, 1972 return pos.IsInstructionStart() &&
1914 LifetimePosition end) { 1973 chunk_->instructions()->at(pos.InstructionIndex())->IsLabel();
1974 }
1975
1976
1977 void LAllocator::AddGapMove(int pos, LiveRange* prev, LiveRange* next) {
1978 UsePosition* prev_pos = prev->AddUsePosition(
1979 LifetimePosition::FromInstructionIndex(pos));
1980 UsePosition* next_pos = next->AddUsePosition(
1981 LifetimePosition::FromInstructionIndex(pos));
1982 LOperand* prev_operand = prev_pos->operand();
1983 LOperand* next_operand = next_pos->operand();
1984 LGap* gap = chunk_->GetGapAt(pos);
1985 gap->GetOrCreateParallelMove(LGap::START)->
1986 AddMove(prev_operand, next_operand);
1987 next_pos->set_hint(prev_operand);
1988 }
1989
1990
1991 LiveRange* LAllocator::SplitAt(LiveRange* range, LifetimePosition pos) {
1915 ASSERT(!range->IsFixed()); 1992 ASSERT(!range->IsFixed());
1916 TraceAlloc("Splitting live range %d in position between [%d, %d[\n", 1993 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
1994
1995 if (pos.Value() <= range->Start().Value()) return range;
1996
1997 LiveRange* result = LiveRangeFor(next_virtual_register_++);
1998 range->SplitAt(pos, result);
1999 return result;
2000 }
2001
2002
2003 LiveRange* LAllocator::SplitBetween(LiveRange* range,
2004 LifetimePosition start,
2005 LifetimePosition end) {
2006 ASSERT(!range->IsFixed());
2007 TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
1917 range->id(), 2008 range->id(),
1918 start.Value(), 2009 start.Value(),
1919 end.Value()); 2010 end.Value());
1920 2011
1921 LifetimePosition split_pos = FindOptimalSplitPos( 2012 LifetimePosition split_pos = FindOptimalSplitPos(start, end);
1922 start, end.PrevInstruction().InstructionEnd());
1923 ASSERT(split_pos.Value() >= start.Value()); 2013 ASSERT(split_pos.Value() >= start.Value());
1924 return Split(range, split_pos); 2014 return SplitAt(range, split_pos);
1925 } 2015 }
1926 2016
1927 2017
1928 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start, 2018 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
1929 LifetimePosition end) { 2019 LifetimePosition end) {
1930 int start_instr = start.InstructionIndex(); 2020 int start_instr = start.InstructionIndex();
1931 int end_instr = end.InstructionIndex(); 2021 int end_instr = end.InstructionIndex();
1932 ASSERT(start_instr <= end_instr); 2022 ASSERT(start_instr <= end_instr);
1933 2023
1934 // We have no choice 2024 // We have no choice
1935 if (start_instr == end_instr) return end; 2025 if (start_instr == end_instr) return end;
1936 2026
1937 HBasicBlock* end_block = GetBlock(start); 2027 HBasicBlock* end_block = GetBlock(start);
1938 HBasicBlock* start_block = GetBlock(end); 2028 HBasicBlock* start_block = GetBlock(end);
1939 2029
1940 if (end_block == start_block) { 2030 if (end_block == start_block) {
1941 // The interval is split in the same basic block. Split at latest possible 2031 // The interval is split in the same basic block. Split at latest possible
1942 // position. 2032 // position.
1943 return end; 2033 return end;
1944 } 2034 }
1945 2035
1946 HBasicBlock* block = end_block; 2036 HBasicBlock* block = end_block;
1947 // Move to the most outside loop header. 2037 // Find header of outermost loop.
1948 while (block->parent_loop_header() != NULL && 2038 while (block->parent_loop_header() != NULL &&
1949 block->parent_loop_header()->block_id() > start_block->block_id()) { 2039 block->parent_loop_header()->block_id() > start_block->block_id()) {
1950 block = block->parent_loop_header(); 2040 block = block->parent_loop_header();
1951 } 2041 }
1952 2042
1953 if (block == end_block) { 2043 if (block == end_block) return end;
1954 return end;
1955 }
1956 2044
1957 return LifetimePosition::FromInstructionIndex( 2045 return LifetimePosition::FromInstructionIndex(
1958 block->first_instruction_index()); 2046 block->first_instruction_index());
1959 } 2047 }
1960 2048
1961 2049
1962 bool LAllocator::IsBlockBoundary(LifetimePosition pos) { 2050 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
1963 return pos.IsInstructionStart() && 2051 LiveRange* second_part = SplitAt(range, pos);
1964 chunk_->instructions()->at(pos.InstructionIndex())->IsLabel(); 2052 Spill(second_part);
1965 } 2053 }
1966 2054
1967 2055
1968 void LAllocator::AddGapMove(int pos, LiveRange* prev, LiveRange* next) { 2056 void LAllocator::SpillBetween(LiveRange* range,
1969 UsePosition* prev_pos = prev->AddUsePosition( 2057 LifetimePosition start,
1970 LifetimePosition::FromInstructionIndex(pos)); 2058 LifetimePosition end) {
1971 UsePosition* next_pos = next->AddUsePosition( 2059 ASSERT(start.Value() < end.Value());
1972 LifetimePosition::FromInstructionIndex(pos)); 2060 LiveRange* second_part = SplitAt(range, start);
1973 LOperand* prev_operand = prev_pos->operand();
1974 LOperand* next_operand = next_pos->operand();
1975 LGap* gap = chunk_->GetGapAt(pos);
1976 gap->GetOrCreateParallelMove(LGap::START)->
1977 AddMove(prev_operand, next_operand);
1978 next_pos->set_hint(prev_operand);
1979 }
1980 2061
2062 if (second_part->Start().Value() < end.Value()) {
2063 // The split result intersects with [start, end[.
2064 // Split it at position between ]start+1, end[, spill the middle part
2065 // and put the rest to unhandled.
2066 LiveRange* third_part = SplitBetween(
2067 second_part,
2068 second_part->Start().InstructionEnd(),
2069 end.PrevInstruction().InstructionEnd());
1981 2070
1982 LiveRange* LAllocator::Split(LiveRange* range, LifetimePosition pos) { 2071 ASSERT(third_part != second_part);
1983 ASSERT(!range->IsFixed());
1984 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
1985 if (pos.Value() <= range->Start().Value()) {
1986 return range;
1987 }
1988 LiveRange* result = LiveRangeFor(next_virtual_register_++);
1989 range->SplitAt(pos, result);
1990 return result;
1991 }
1992 2072
1993 2073 Spill(second_part);
1994 void LAllocator::SplitAndSpill(LiveRange* range,
1995 LifetimePosition start,
1996 LifetimePosition end) {
1997 // We have an interval range and want to make sure that it is
1998 // spilled at start and at most spilled until end.
1999 ASSERT(start.Value() < end.Value());
2000 LiveRange* tail_part = Split(range, start);
2001 if (tail_part->Start().Value() < end.Value()) {
2002 LiveRange* third_part = Split(tail_part,
2003 tail_part->Start().NextInstruction(),
2004 end);
2005 Spill(tail_part);
2006 ASSERT(third_part != tail_part);
2007 AddToUnhandledSorted(third_part); 2074 AddToUnhandledSorted(third_part);
2008 } else { 2075 } else {
2009 AddToUnhandledSorted(tail_part); 2076 // The split result does not intersect with [start, end[.
2077 // Nothing to spill. Just put it to unhandled as whole.
2078 AddToUnhandledSorted(second_part);
2010 } 2079 }
2011 } 2080 }
2012 2081
2013 2082
2014 void LAllocator::SplitAndSpill(LiveRange* range, LifetimePosition at) {
2015 at = LifetimePosition::FromInstructionIndex(
2016 chunk_->NearestGapPos(at.InstructionIndex()));
2017 LiveRange* second_part = Split(range, at);
2018 Spill(second_part);
2019 }
2020
2021
2022 void LAllocator::Spill(LiveRange* range) { 2083 void LAllocator::Spill(LiveRange* range) {
2023 ASSERT(!range->IsSpilled()); 2084 ASSERT(!range->IsSpilled());
2024 TraceAlloc("Spilling live range %d\n", range->id()); 2085 TraceAlloc("Spilling live range %d\n", range->id());
2025 LiveRange* first = range->TopLevel(); 2086 LiveRange* first = range->TopLevel();
2026 2087
2027 if (!first->HasAllocatedSpillOperand()) { 2088 if (!first->HasAllocatedSpillOperand()) {
2028 LOperand* op = TryReuseSpillSlot(range); 2089 LOperand* op = TryReuseSpillSlot(range);
2029 if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == XMM_REGISTERS); 2090 if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS);
2030 first->SetSpillOperand(op); 2091 first->SetSpillOperand(op);
2031 } 2092 }
2032 range->MakeSpilled(); 2093 range->MakeSpilled();
2033 } 2094 }
2034 2095
2035 2096
2036 int LAllocator::RegisterCount() const { 2097 int LAllocator::RegisterCount() const {
2037 return num_registers_; 2098 return num_registers_;
2038 } 2099 }
2039 2100
2040 2101
2041 #ifdef DEBUG 2102 #ifdef DEBUG
2042 2103
2043 2104
2044 void LAllocator::Verify() const { 2105 void LAllocator::Verify() const {
2045 for (int i = 0; i < live_ranges()->length(); ++i) { 2106 for (int i = 0; i < live_ranges()->length(); ++i) {
2046 LiveRange* current = live_ranges()->at(i); 2107 LiveRange* current = live_ranges()->at(i);
2047 if (current != NULL) current->Verify(); 2108 if (current != NULL) current->Verify();
2048 } 2109 }
2049 } 2110 }
2050 2111
2051 2112
2052 #endif 2113 #endif
2053 2114
2054 2115
2055 } } // namespace v8::internal 2116 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/lithium-allocator.h ('k') | src/log.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698