Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(763)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 1087983007: [turbofan] Cleanup register allocator a little after split. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/base/adapters.h" 5 #include "src/base/adapters.h"
6 #include "src/compiler/linkage.h" 6 #include "src/compiler/linkage.h"
7 #include "src/compiler/register-allocator.h" 7 #include "src/compiler/register-allocator.h"
8 #include "src/string-stream.h" 8 #include "src/string-stream.h"
9 9
10 namespace v8 { 10 namespace v8 {
(...skipping 16 matching lines...) Expand all
27 return a.Value() > b.Value() ? a : b; 27 return a.Value() > b.Value() ? a : b;
28 } 28 }
29 29
30 30
31 void RemoveElement(ZoneVector<LiveRange*>* v, LiveRange* range) { 31 void RemoveElement(ZoneVector<LiveRange*>* v, LiveRange* range) {
32 auto it = std::find(v->begin(), v->end(), range); 32 auto it = std::find(v->begin(), v->end(), range);
33 DCHECK(it != v->end()); 33 DCHECK(it != v->end());
34 v->erase(it); 34 v->erase(it);
35 } 35 }
36 36
37
38 int GetRegisterCount(const RegisterConfiguration* cfg, RegisterKind kind) {
39 return kind == DOUBLE_REGISTERS ? cfg->num_aliased_double_registers()
40 : cfg->num_general_registers();
41 }
42
43
44 const ZoneVector<LiveRange*>& GetFixedRegisters(
45 const RegisterAllocationData* data, RegisterKind kind) {
46 return kind == DOUBLE_REGISTERS ? data->fixed_double_live_ranges()
47 : data->fixed_live_ranges();
48 }
49
50
51 const InstructionBlock* GetContainingLoop(const InstructionSequence* sequence,
52 const InstructionBlock* block) {
53 auto index = block->loop_header();
54 if (!index.IsValid()) return nullptr;
55 return sequence->InstructionBlockAt(index);
56 }
57
58
59 const InstructionBlock* GetInstructionBlock(const InstructionSequence* code,
60 LifetimePosition pos) {
61 return code->GetInstructionBlock(pos.ToInstructionIndex());
62 }
63
64
65 bool IsBlockBoundary(const InstructionSequence* code, LifetimePosition pos) {
66 return pos.IsFullStart() &&
67 code->GetInstructionBlock(pos.ToInstructionIndex())->code_start() ==
68 pos.ToInstructionIndex();
69 }
70
71
37 } // namespace 72 } // namespace
38 73
39 74
40 UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand, 75 UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
41 InstructionOperand* hint) 76 InstructionOperand* hint)
42 : operand_(operand), hint_(hint), pos_(pos), next_(nullptr), flags_(0) { 77 : operand_(operand), hint_(hint), pos_(pos), next_(nullptr), flags_(0) {
43 bool register_beneficial = true; 78 bool register_beneficial = true;
44 UsePositionType type = UsePositionType::kAny; 79 UsePositionType type = UsePositionType::kAny;
45 if (operand_ != nullptr && operand_->IsUnallocated()) { 80 if (operand_ != nullptr && operand_->IsUnallocated()) {
46 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_); 81 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
83 struct LiveRange::SpillAtDefinitionList : ZoneObject { 118 struct LiveRange::SpillAtDefinitionList : ZoneObject {
84 SpillAtDefinitionList(int gap_index, InstructionOperand* operand, 119 SpillAtDefinitionList(int gap_index, InstructionOperand* operand,
85 SpillAtDefinitionList* next) 120 SpillAtDefinitionList* next)
86 : gap_index(gap_index), operand(operand), next(next) {} 121 : gap_index(gap_index), operand(operand), next(next) {}
87 const int gap_index; 122 const int gap_index;
88 InstructionOperand* const operand; 123 InstructionOperand* const operand;
89 SpillAtDefinitionList* const next; 124 SpillAtDefinitionList* const next;
90 }; 125 };
91 126
92 127
93 LiveRange::LiveRange(int id, Zone* zone) 128 LiveRange::LiveRange(int id)
94 : id_(id), 129 : id_(id),
95 spilled_(false), 130 spilled_(false),
96 has_slot_use_(false), 131 has_slot_use_(false),
97 is_phi_(false), 132 is_phi_(false),
98 is_non_loop_phi_(false), 133 is_non_loop_phi_(false),
99 kind_(UNALLOCATED_REGISTERS), 134 kind_(UNALLOCATED_REGISTERS),
100 assigned_register_(kInvalidAssignment), 135 assigned_register_(kInvalidAssignment),
101 last_interval_(nullptr), 136 last_interval_(nullptr),
102 first_interval_(nullptr), 137 first_interval_(nullptr),
103 first_pos_(nullptr), 138 first_pos_(nullptr),
(...skipping 258 matching lines...) Expand 10 before | Expand all | Expand 10 after
362 } else { 397 } else {
363 while (use_after != nullptr && 398 while (use_after != nullptr &&
364 use_after->pos().Value() <= position.Value()) { 399 use_after->pos().Value() <= position.Value()) {
365 use_before = use_after; 400 use_before = use_after;
366 use_after = use_after->next(); 401 use_after = use_after->next();
367 } 402 }
368 } 403 }
369 404
370 // Partition original use positions to the two live ranges. 405 // Partition original use positions to the two live ranges.
371 if (use_before != nullptr) { 406 if (use_before != nullptr) {
372 use_before->next_ = nullptr; 407 use_before->set_next(nullptr);
373 } else { 408 } else {
374 first_pos_ = nullptr; 409 first_pos_ = nullptr;
375 } 410 }
376 result->first_pos_ = use_after; 411 result->first_pos_ = use_after;
377 412
378 // Discard cached iteration state. It might be pointing 413 // Discard cached iteration state. It might be pointing
379 // to the use that no longer belongs to this live range. 414 // to the use that no longer belongs to this live range.
380 last_processed_use_ = nullptr; 415 last_processed_use_ = nullptr;
381 current_interval_ = nullptr; 416 current_interval_ = nullptr;
382 417
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
429 auto new_end = end; 464 auto new_end = end;
430 while (first_interval_ != nullptr && 465 while (first_interval_ != nullptr &&
431 first_interval_->start().Value() <= end.Value()) { 466 first_interval_->start().Value() <= end.Value()) {
432 if (first_interval_->end().Value() > end.Value()) { 467 if (first_interval_->end().Value() > end.Value()) {
433 new_end = first_interval_->end(); 468 new_end = first_interval_->end();
434 } 469 }
435 first_interval_ = first_interval_->next(); 470 first_interval_ = first_interval_->next();
436 } 471 }
437 472
438 auto new_interval = new (zone) UseInterval(start, new_end); 473 auto new_interval = new (zone) UseInterval(start, new_end);
439 new_interval->next_ = first_interval_; 474 new_interval->set_next(first_interval_);
440 first_interval_ = new_interval; 475 first_interval_ = new_interval;
441 if (new_interval->next() == nullptr) { 476 if (new_interval->next() == nullptr) {
442 last_interval_ = new_interval; 477 last_interval_ = new_interval;
443 } 478 }
444 } 479 }
445 480
446 481
447 void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end, 482 void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end,
448 Zone* zone) { 483 Zone* zone) {
449 TRACE("Add to live range %d interval [%d %d[\n", id_, start.Value(), 484 TRACE("Add to live range %d interval [%d %d[\n", id_, start.Value(),
450 end.Value()); 485 end.Value());
451 if (first_interval_ == nullptr) { 486 if (first_interval_ == nullptr) {
452 auto interval = new (zone) UseInterval(start, end); 487 auto interval = new (zone) UseInterval(start, end);
453 first_interval_ = interval; 488 first_interval_ = interval;
454 last_interval_ = interval; 489 last_interval_ = interval;
455 } else { 490 } else {
456 if (end.Value() == first_interval_->start().Value()) { 491 if (end.Value() == first_interval_->start().Value()) {
457 first_interval_->set_start(start); 492 first_interval_->set_start(start);
458 } else if (end.Value() < first_interval_->start().Value()) { 493 } else if (end.Value() < first_interval_->start().Value()) {
459 auto interval = new (zone) UseInterval(start, end); 494 auto interval = new (zone) UseInterval(start, end);
460 interval->set_next(first_interval_); 495 interval->set_next(first_interval_);
461 first_interval_ = interval; 496 first_interval_ = interval;
462 } else { 497 } else {
463 // Order of instruction's processing (see ProcessInstructions) guarantees 498 // Order of instruction's processing (see ProcessInstructions) guarantees
464 // that each new use interval either precedes or intersects with 499 // that each new use interval either precedes or intersects with
465 // last added interval. 500 // last added interval.
466 DCHECK(start.Value() < first_interval_->end().Value()); 501 DCHECK(start.Value() < first_interval_->end().Value());
467 first_interval_->start_ = Min(start, first_interval_->start_); 502 first_interval_->set_start(Min(start, first_interval_->start()));
468 first_interval_->end_ = Max(end, first_interval_->end_); 503 first_interval_->set_end(Max(end, first_interval_->end()));
469 } 504 }
470 } 505 }
471 } 506 }
472 507
473 508
474 void LiveRange::AddUsePosition(LifetimePosition pos, 509 void LiveRange::AddUsePosition(LifetimePosition pos,
475 InstructionOperand* operand, 510 InstructionOperand* operand,
476 InstructionOperand* hint, Zone* zone) { 511 InstructionOperand* hint, Zone* zone) {
477 TRACE("Add to live range %d use position %d\n", id_, pos.Value()); 512 TRACE("Add to live range %d use position %d\n", id_, pos.Value());
478 auto use_pos = new (zone) UsePosition(pos, operand, hint); 513 auto use_pos = new (zone) UsePosition(pos, operand, hint);
479 UsePosition* prev_hint = nullptr; 514 UsePosition* prev_hint = nullptr;
480 UsePosition* prev = nullptr; 515 UsePosition* prev = nullptr;
481 auto current = first_pos_; 516 auto current = first_pos_;
482 while (current != nullptr && current->pos().Value() < pos.Value()) { 517 while (current != nullptr && current->pos().Value() < pos.Value()) {
483 prev_hint = current->HasHint() ? current : prev_hint; 518 prev_hint = current->HasHint() ? current : prev_hint;
484 prev = current; 519 prev = current;
485 current = current->next(); 520 current = current->next();
486 } 521 }
487 522
488 if (prev == nullptr) { 523 if (prev == nullptr) {
489 use_pos->set_next(first_pos_); 524 use_pos->set_next(first_pos_);
490 first_pos_ = use_pos; 525 first_pos_ = use_pos;
491 } else { 526 } else {
492 use_pos->next_ = prev->next_; 527 use_pos->set_next(prev->next());
493 prev->next_ = use_pos; 528 prev->set_next(use_pos);
494 } 529 }
495 530
496 if (prev_hint == nullptr && use_pos->HasHint()) { 531 if (prev_hint == nullptr && use_pos->HasHint()) {
497 current_hint_operand_ = hint; 532 current_hint_operand_ = hint;
498 } 533 }
499 } 534 }
500 535
501 536
502 void LiveRange::ConvertUsesToOperand(const InstructionOperand& op, 537 void LiveRange::ConvertUsesToOperand(const InstructionOperand& op,
503 InstructionOperand* spill_op) { 538 InstructionOperand* spill_op) {
(...skipping 237 matching lines...) Expand 10 before | Expand all | Expand 10 after
741 DCHECK(range->is_phi()); 776 DCHECK(range->is_phi());
742 auto it = phi_map().find(range->id()); 777 auto it = phi_map().find(range->id());
743 DCHECK(it != phi_map().end()); 778 DCHECK(it != phi_map().end());
744 for (auto move : it->second->incoming_moves) { 779 for (auto move : it->second->incoming_moves) {
745 move->set_destination(assignment); 780 move->set_destination(assignment);
746 } 781 }
747 } 782 }
748 783
749 784
750 LiveRange* RegisterAllocationData::NewLiveRange(int index) { 785 LiveRange* RegisterAllocationData::NewLiveRange(int index) {
751 // The LiveRange object itself can go in the local zone, but the 786 return new (allocation_zone()) LiveRange(index);
752 // InstructionOperand needs to go in the code zone, since it may survive
753 // register allocation.
754 return new (allocation_zone()) LiveRange(index, code_zone());
755 } 787 }
756 788
757 789
758 bool RegisterAllocationData::ExistsUseWithoutDefinition() { 790 bool RegisterAllocationData::ExistsUseWithoutDefinition() {
759 bool found = false; 791 bool found = false;
760 BitVector::Iterator iterator(live_in_sets()[0]); 792 BitVector::Iterator iterator(live_in_sets()[0]);
761 while (!iterator.Done()) { 793 while (!iterator.Done()) {
762 found = true; 794 found = true;
763 int operand_index = iterator.Current(); 795 int operand_index = iterator.Current();
764 PrintF("Register allocator error: live v%d reached first block.\n", 796 PrintF("Register allocator error: live v%d reached first block.\n",
(...skipping 623 matching lines...) Expand 10 before | Expand all | Expand 10 after
1388 range->set_kind(RequiredRegisterKind(range->id())); 1420 range->set_kind(RequiredRegisterKind(range->id()));
1389 // Give slots to all ranges with a non fixed slot use. 1421 // Give slots to all ranges with a non fixed slot use.
1390 if (range->has_slot_use() && range->HasNoSpillType()) { 1422 if (range->has_slot_use() && range->HasNoSpillType()) {
1391 data()->AssignSpillRangeToLiveRange(range); 1423 data()->AssignSpillRangeToLiveRange(range);
1392 } 1424 }
1393 // TODO(bmeurer): This is a horrible hack to make sure that for constant 1425 // TODO(bmeurer): This is a horrible hack to make sure that for constant
1394 // live ranges, every use requires the constant to be in a register. 1426 // live ranges, every use requires the constant to be in a register.
1395 // Without this hack, all uses with "any" policy would get the constant 1427 // Without this hack, all uses with "any" policy would get the constant
1396 // operand assigned. 1428 // operand assigned.
1397 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) { 1429 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
1398 for (auto pos = range->first_pos(); pos != nullptr; pos = pos->next_) { 1430 for (auto pos = range->first_pos(); pos != nullptr; pos = pos->next()) {
1399 if (pos->type() == UsePositionType::kRequiresSlot) continue; 1431 if (pos->type() == UsePositionType::kRequiresSlot) continue;
1400 UsePositionType new_type = UsePositionType::kAny; 1432 UsePositionType new_type = UsePositionType::kAny;
1401 // Can't mark phis as needing a register. 1433 // Can't mark phis as needing a register.
1402 if (!pos->pos().IsGapPosition()) { 1434 if (!pos->pos().IsGapPosition()) {
1403 new_type = UsePositionType::kRequiresRegister; 1435 new_type = UsePositionType::kRequiresRegister;
1404 } 1436 }
1405 pos->set_type(new_type, true); 1437 pos->set_type(new_type, true);
1406 } 1438 }
1407 } 1439 }
1408 } 1440 }
(...skipping 12 matching lines...) Expand all
1421 1453
1422 1454
1423 void LiveRangeBuilder::Verify() const { 1455 void LiveRangeBuilder::Verify() const {
1424 for (auto current : data()->live_ranges()) { 1456 for (auto current : data()->live_ranges()) {
1425 if (current != nullptr) current->Verify(); 1457 if (current != nullptr) current->Verify();
1426 } 1458 }
1427 } 1459 }
1428 1460
1429 1461
1430 LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data, 1462 LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
1431 RegisterKind kind) 1463 RegisterKind kind, Zone* local_zone)
1432 : data_(data), 1464 : data_(data),
1433 mode_(kind), 1465 mode_(kind),
1434 num_registers_(kind == DOUBLE_REGISTERS 1466 num_registers_(GetRegisterCount(data->config(), kind)),
1435 ? config()->num_aliased_double_registers() 1467 unhandled_live_ranges_(local_zone),
1436 : config()->num_general_registers()), 1468 active_live_ranges_(local_zone),
1437 unhandled_live_ranges_(allocation_zone()), 1469 inactive_live_ranges_(local_zone) {
1438 active_live_ranges_(allocation_zone()),
1439 inactive_live_ranges_(allocation_zone()) {
1440 unhandled_live_ranges().reserve( 1470 unhandled_live_ranges().reserve(
1441 static_cast<size_t>(code()->VirtualRegisterCount() * 2)); 1471 static_cast<size_t>(code()->VirtualRegisterCount() * 2));
1442 active_live_ranges().reserve(8); 1472 active_live_ranges().reserve(8);
1443 inactive_live_ranges().reserve(8); 1473 inactive_live_ranges().reserve(8);
1444 // TryAllocateFreeReg and AllocateBlockedReg assume this 1474 // TryAllocateFreeReg and AllocateBlockedReg assume this
1445 // when allocating local arrays. 1475 // when allocating local arrays.
1446 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= 1476 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >=
1447 config()->num_general_registers()); 1477 config()->num_general_registers());
1448 } 1478 }
1449 1479
1450 1480
1451 void LinearScanAllocator::AllocateRegisters() { 1481 void LinearScanAllocator::AllocateRegisters() {
1452 DCHECK(unhandled_live_ranges().empty()); 1482 DCHECK(unhandled_live_ranges().empty());
1453 1483
1454 for (auto range : live_ranges()) { 1484 for (auto range : live_ranges()) {
1455 if (range == nullptr) continue; 1485 if (range == nullptr) continue;
1456 if (range->Kind() == mode_) { 1486 if (range->Kind() == mode_) {
1457 AddToUnhandledUnsorted(range); 1487 AddToUnhandledUnsorted(range);
1458 } 1488 }
1459 } 1489 }
1460 SortUnhandled(); 1490 SortUnhandled();
1461 DCHECK(UnhandledIsSorted()); 1491 DCHECK(UnhandledIsSorted());
1462 1492
1463 DCHECK(active_live_ranges().empty()); 1493 DCHECK(active_live_ranges().empty());
1464 DCHECK(inactive_live_ranges().empty()); 1494 DCHECK(inactive_live_ranges().empty());
1465 1495
1466 if (mode_ == DOUBLE_REGISTERS) { 1496 auto& fixed_ranges = GetFixedRegisters(data(), mode_);
1467 for (auto current : fixed_double_live_ranges()) { 1497 for (auto current : fixed_ranges) {
1468 if (current != nullptr) AddToInactive(current); 1498 if (current != nullptr) {
1469 } 1499 DCHECK_EQ(mode_, current->Kind());
1470 } else { 1500 AddToInactive(current);
1471 DCHECK(mode_ == GENERAL_REGISTERS);
1472 for (auto current : fixed_live_ranges()) {
1473 if (current != nullptr) AddToInactive(current);
1474 } 1501 }
1475 } 1502 }
1476 1503
1477 while (!unhandled_live_ranges().empty()) { 1504 while (!unhandled_live_ranges().empty()) {
1478 DCHECK(UnhandledIsSorted()); 1505 DCHECK(UnhandledIsSorted());
1479 auto current = unhandled_live_ranges().back(); 1506 auto current = unhandled_live_ranges().back();
1480 unhandled_live_ranges().pop_back(); 1507 unhandled_live_ranges().pop_back();
1481 DCHECK(UnhandledIsSorted()); 1508 DCHECK(UnhandledIsSorted());
1482 auto position = current->Start(); 1509 auto position = current->Start();
1483 #ifdef DEBUG 1510 #ifdef DEBUG
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
1537 if (current->HasRegisterAssigned()) { 1564 if (current->HasRegisterAssigned()) {
1538 AddToActive(current); 1565 AddToActive(current);
1539 } 1566 }
1540 } 1567 }
1541 1568
1542 active_live_ranges().clear(); 1569 active_live_ranges().clear();
1543 inactive_live_ranges().clear(); 1570 inactive_live_ranges().clear();
1544 } 1571 }
1545 1572
1546 1573
1547 const char* LinearScanAllocator::RegisterName(int allocation_index) { 1574 const char* LinearScanAllocator::RegisterName(int allocation_index) const {
1548 if (mode_ == GENERAL_REGISTERS) { 1575 if (mode_ == GENERAL_REGISTERS) {
1549 return config()->general_register_name(allocation_index); 1576 return config()->general_register_name(allocation_index);
1550 } else { 1577 } else {
1551 return config()->double_register_name(allocation_index); 1578 return config()->double_register_name(allocation_index);
1552 } 1579 }
1553 } 1580 }
1554 1581
1555 1582
1556 void LinearScanAllocator::AddToActive(LiveRange* range) { 1583 void LinearScanAllocator::AddToActive(LiveRange* range) {
1557 TRACE("Add live range %d to active\n", range->id()); 1584 TRACE("Add live range %d to active\n", range->id());
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
1644 void LinearScanAllocator::InactiveToActive(LiveRange* range) { 1671 void LinearScanAllocator::InactiveToActive(LiveRange* range) {
1645 RemoveElement(&inactive_live_ranges(), range); 1672 RemoveElement(&inactive_live_ranges(), range);
1646 active_live_ranges().push_back(range); 1673 active_live_ranges().push_back(range);
1647 TRACE("Moving live range %d from inactive to active\n", range->id()); 1674 TRACE("Moving live range %d from inactive to active\n", range->id());
1648 } 1675 }
1649 1676
1650 1677
1651 bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) { 1678 bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) {
1652 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; 1679 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters];
1653 1680
1654 for (int i = 0; i < num_registers_; i++) { 1681 for (int i = 0; i < num_registers(); i++) {
1655 free_until_pos[i] = LifetimePosition::MaxPosition(); 1682 free_until_pos[i] = LifetimePosition::MaxPosition();
1656 } 1683 }
1657 1684
1658 for (auto cur_active : active_live_ranges()) { 1685 for (auto cur_active : active_live_ranges()) {
1659 free_until_pos[cur_active->assigned_register()] = 1686 free_until_pos[cur_active->assigned_register()] =
1660 LifetimePosition::GapFromInstructionIndex(0); 1687 LifetimePosition::GapFromInstructionIndex(0);
1661 } 1688 }
1662 1689
1663 for (auto cur_inactive : inactive_live_ranges()) { 1690 for (auto cur_inactive : inactive_live_ranges()) {
1664 DCHECK(cur_inactive->End().Value() > current->Start().Value()); 1691 DCHECK(cur_inactive->End().Value() > current->Start().Value());
1665 auto next_intersection = cur_inactive->FirstIntersection(current); 1692 auto next_intersection = cur_inactive->FirstIntersection(current);
1666 if (!next_intersection.IsValid()) continue; 1693 if (!next_intersection.IsValid()) continue;
1667 int cur_reg = cur_inactive->assigned_register(); 1694 int cur_reg = cur_inactive->assigned_register();
1668 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); 1695 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1669 } 1696 }
1670 1697
1671 auto hint = current->FirstHint(); 1698 auto hint = current->FirstHint();
1672 if (hint != nullptr && (hint->IsRegister() || hint->IsDoubleRegister())) { 1699 if (hint != nullptr && (hint->IsRegister() || hint->IsDoubleRegister())) {
1673 int register_index = AllocatedOperand::cast(hint)->index(); 1700 int register_index = AllocatedOperand::cast(hint)->index();
1674 TRACE("Found reg hint %s (free until [%d) for live range %d (end %d[).\n", 1701 TRACE("Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1675 RegisterName(register_index), free_until_pos[register_index].Value(), 1702 RegisterName(register_index), free_until_pos[register_index].Value(),
1676 current->id(), current->End().Value()); 1703 current->id(), current->End().Value());
1677 1704
1678 // The desired register is free until the end of the current live range. 1705 // The desired register is free until the end of the current live range.
1679 if (free_until_pos[register_index].Value() >= current->End().Value()) { 1706 if (free_until_pos[register_index].Value() >= current->End().Value()) {
1680 TRACE("Assigning preferred reg %s to live range %d\n", 1707 TRACE("Assigning preferred reg %s to live range %d\n",
1681 RegisterName(register_index), current->id()); 1708 RegisterName(register_index), current->id());
1682 SetLiveRangeAssignedRegister(current, register_index); 1709 data()->SetLiveRangeAssignedRegister(current, register_index);
1683 return true; 1710 return true;
1684 } 1711 }
1685 } 1712 }
1686 1713
1687 // Find the register which stays free for the longest time. 1714 // Find the register which stays free for the longest time.
1688 int reg = 0; 1715 int reg = 0;
1689 for (int i = 1; i < RegisterCount(); ++i) { 1716 for (int i = 1; i < num_registers(); ++i) {
1690 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) { 1717 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1691 reg = i; 1718 reg = i;
1692 } 1719 }
1693 } 1720 }
1694 1721
1695 auto pos = free_until_pos[reg]; 1722 auto pos = free_until_pos[reg];
1696 1723
1697 if (pos.Value() <= current->Start().Value()) { 1724 if (pos.Value() <= current->Start().Value()) {
1698 // All registers are blocked. 1725 // All registers are blocked.
1699 return false; 1726 return false;
1700 } 1727 }
1701 1728
1702 if (pos.Value() < current->End().Value()) { 1729 if (pos.Value() < current->End().Value()) {
1703 // Register reg is available at the range start but becomes blocked before 1730 // Register reg is available at the range start but becomes blocked before
1704 // the range end. Split current at position where it becomes blocked. 1731 // the range end. Split current at position where it becomes blocked.
1705 auto tail = SplitRangeAt(current, pos); 1732 auto tail = SplitRangeAt(current, pos);
1706 AddToUnhandledSorted(tail); 1733 AddToUnhandledSorted(tail);
1707 } 1734 }
1708 1735
1709 // Register reg is available at the range start and is free until 1736 // Register reg is available at the range start and is free until
1710 // the range end. 1737 // the range end.
1711 DCHECK(pos.Value() >= current->End().Value()); 1738 DCHECK(pos.Value() >= current->End().Value());
1712 TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg), 1739 TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg),
1713 current->id()); 1740 current->id());
1714 SetLiveRangeAssignedRegister(current, reg); 1741 data()->SetLiveRangeAssignedRegister(current, reg);
1715 1742
1716 return true; 1743 return true;
1717 } 1744 }
1718 1745
1719 1746
1720 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) { 1747 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
1721 auto register_use = current->NextRegisterPosition(current->Start()); 1748 auto register_use = current->NextRegisterPosition(current->Start());
1722 if (register_use == nullptr) { 1749 if (register_use == nullptr) {
1723 // There is no use in the current live range that requires a register. 1750 // There is no use in the current live range that requires a register.
1724 // We can just spill it. 1751 // We can just spill it.
1725 Spill(current); 1752 Spill(current);
1726 return; 1753 return;
1727 } 1754 }
1728 1755
1729 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters]; 1756 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters];
1730 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; 1757 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters];
1731 1758
1732 for (int i = 0; i < num_registers_; i++) { 1759 for (int i = 0; i < num_registers(); i++) {
1733 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition(); 1760 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1734 } 1761 }
1735 1762
1736 for (auto range : active_live_ranges()) { 1763 for (auto range : active_live_ranges()) {
1737 int cur_reg = range->assigned_register(); 1764 int cur_reg = range->assigned_register();
1738 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) { 1765 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1739 block_pos[cur_reg] = use_pos[cur_reg] = 1766 block_pos[cur_reg] = use_pos[cur_reg] =
1740 LifetimePosition::GapFromInstructionIndex(0); 1767 LifetimePosition::GapFromInstructionIndex(0);
1741 } else { 1768 } else {
1742 auto next_use = 1769 auto next_use =
(...skipping 13 matching lines...) Expand all
1756 int cur_reg = range->assigned_register(); 1783 int cur_reg = range->assigned_register();
1757 if (range->IsFixed()) { 1784 if (range->IsFixed()) {
1758 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); 1785 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1759 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); 1786 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1760 } else { 1787 } else {
1761 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection); 1788 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1762 } 1789 }
1763 } 1790 }
1764 1791
1765 int reg = 0; 1792 int reg = 0;
1766 for (int i = 1; i < RegisterCount(); ++i) { 1793 for (int i = 1; i < num_registers(); ++i) {
1767 if (use_pos[i].Value() > use_pos[reg].Value()) { 1794 if (use_pos[i].Value() > use_pos[reg].Value()) {
1768 reg = i; 1795 reg = i;
1769 } 1796 }
1770 } 1797 }
1771 1798
1772 auto pos = use_pos[reg]; 1799 auto pos = use_pos[reg];
1773 1800
1774 if (pos.Value() < register_use->pos().Value()) { 1801 if (pos.Value() < register_use->pos().Value()) {
1775 // All registers are blocked before the first use that requires a register. 1802 // All registers are blocked before the first use that requires a register.
1776 // Spill starting part of live range up to that use. 1803 // Spill starting part of live range up to that use.
1777 SpillBetween(current, current->Start(), register_use->pos()); 1804 SpillBetween(current, current->Start(), register_use->pos());
1778 return; 1805 return;
1779 } 1806 }
1780 1807
1781 if (block_pos[reg].Value() < current->End().Value()) { 1808 if (block_pos[reg].Value() < current->End().Value()) {
1782 // Register becomes blocked before the current range end. Split before that 1809 // Register becomes blocked before the current range end. Split before that
1783 // position. 1810 // position.
1784 LiveRange* tail = 1811 LiveRange* tail =
1785 SplitBetween(current, current->Start(), block_pos[reg].Start()); 1812 SplitBetween(current, current->Start(), block_pos[reg].Start());
1786 AddToUnhandledSorted(tail); 1813 AddToUnhandledSorted(tail);
1787 } 1814 }
1788 1815
1789 // Register reg is not blocked for the whole range. 1816 // Register reg is not blocked for the whole range.
1790 DCHECK(block_pos[reg].Value() >= current->End().Value()); 1817 DCHECK(block_pos[reg].Value() >= current->End().Value());
1791 TRACE("Assigning blocked reg %s to live range %d\n", RegisterName(reg), 1818 TRACE("Assigning blocked reg %s to live range %d\n", RegisterName(reg),
1792 current->id()); 1819 current->id());
1793 SetLiveRangeAssignedRegister(current, reg); 1820 data()->SetLiveRangeAssignedRegister(current, reg);
1794 1821
1795 // This register was not free. Thus we need to find and spill 1822 // This register was not free. Thus we need to find and spill
1796 // parts of active and inactive live regions that use the same register 1823 // parts of active and inactive live regions that use the same register
1797 // at the same lifetime positions as current. 1824 // at the same lifetime positions as current.
1798 SplitAndSpillIntersecting(current); 1825 SplitAndSpillIntersecting(current);
1799 } 1826 }
1800 1827
1801 1828
1802 static const InstructionBlock* GetContainingLoop(
1803 const InstructionSequence* sequence, const InstructionBlock* block) {
1804 auto index = block->loop_header();
1805 if (!index.IsValid()) return nullptr;
1806 return sequence->InstructionBlockAt(index);
1807 }
1808
1809
1810 LifetimePosition LinearScanAllocator::FindOptimalSpillingPos( 1829 LifetimePosition LinearScanAllocator::FindOptimalSpillingPos(
1811 LiveRange* range, LifetimePosition pos) { 1830 LiveRange* range, LifetimePosition pos) {
1812 auto block = GetInstructionBlock(pos.Start()); 1831 auto block = GetInstructionBlock(code(), pos.Start());
1813 auto loop_header = 1832 auto loop_header =
1814 block->IsLoopHeader() ? block : GetContainingLoop(code(), block); 1833 block->IsLoopHeader() ? block : GetContainingLoop(code(), block);
1815 1834
1816 if (loop_header == nullptr) return pos; 1835 if (loop_header == nullptr) return pos;
1817 1836
1818 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); 1837 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos);
1819 1838
1820 while (loop_header != nullptr) { 1839 while (loop_header != nullptr) {
1821 // We are going to spill live range inside the loop. 1840 // We are going to spill live range inside the loop.
1822 // If possible try to move spilling position backwards to loop header. 1841 // If possible try to move spilling position backwards to loop header.
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
1894 auto lookup = phi_map().find(range->id()); 1913 auto lookup = phi_map().find(range->id());
1895 DCHECK(lookup != phi_map().end()); 1914 DCHECK(lookup != phi_map().end());
1896 auto phi = lookup->second->phi; 1915 auto phi = lookup->second->phi;
1897 auto block = lookup->second->block; 1916 auto block = lookup->second->block;
1898 // Count the number of spilled operands. 1917 // Count the number of spilled operands.
1899 size_t spilled_count = 0; 1918 size_t spilled_count = 0;
1900 LiveRange* first_op = nullptr; 1919 LiveRange* first_op = nullptr;
1901 for (size_t i = 0; i < phi->operands().size(); i++) { 1920 for (size_t i = 0; i < phi->operands().size(); i++) {
1902 int op = phi->operands()[i]; 1921 int op = phi->operands()[i];
1903 LiveRange* op_range = LiveRangeFor(op); 1922 LiveRange* op_range = LiveRangeFor(op);
1904 if (op_range->GetSpillRange() == nullptr) continue; 1923 if (!op_range->HasSpillRange()) continue;
1905 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); 1924 auto pred = code()->InstructionBlockAt(block->predecessors()[i]);
1906 auto pred_end = LifetimePosition::InstructionFromInstructionIndex( 1925 auto pred_end = LifetimePosition::InstructionFromInstructionIndex(
1907 pred->last_instruction_index()); 1926 pred->last_instruction_index());
1908 while (op_range != nullptr && !op_range->CanCover(pred_end)) { 1927 while (op_range != nullptr && !op_range->CanCover(pred_end)) {
1909 op_range = op_range->next(); 1928 op_range = op_range->next();
1910 } 1929 }
1911 if (op_range != nullptr && op_range->IsSpilled()) { 1930 if (op_range != nullptr && op_range->IsSpilled()) {
1912 spilled_count++; 1931 spilled_count++;
1913 if (first_op == nullptr) { 1932 if (first_op == nullptr) {
1914 first_op = op_range->TopLevel(); 1933 first_op = op_range->TopLevel();
1915 } 1934 }
1916 } 1935 }
1917 } 1936 }
1918 1937
1919 // Only continue if more than half of the operands are spilled. 1938 // Only continue if more than half of the operands are spilled.
1920 if (spilled_count * 2 <= phi->operands().size()) { 1939 if (spilled_count * 2 <= phi->operands().size()) {
1921 return false; 1940 return false;
1922 } 1941 }
1923 1942
1924 // Try to merge the spilled operands and count the number of merged spilled 1943 // Try to merge the spilled operands and count the number of merged spilled
1925 // operands. 1944 // operands.
1926 DCHECK(first_op != nullptr); 1945 DCHECK(first_op != nullptr);
1927 auto first_op_spill = first_op->GetSpillRange(); 1946 auto first_op_spill = first_op->GetSpillRange();
1928 size_t num_merged = 1; 1947 size_t num_merged = 1;
1929 for (size_t i = 1; i < phi->operands().size(); i++) { 1948 for (size_t i = 1; i < phi->operands().size(); i++) {
1930 int op = phi->operands()[i]; 1949 int op = phi->operands()[i];
1931 auto op_range = LiveRangeFor(op); 1950 auto op_range = LiveRangeFor(op);
1951 if (!op_range->HasSpillRange()) continue;
1932 auto op_spill = op_range->GetSpillRange(); 1952 auto op_spill = op_range->GetSpillRange();
1933 if (op_spill != nullptr && 1953 if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) {
1934 (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill))) {
1935 num_merged++; 1954 num_merged++;
1936 } 1955 }
1937 } 1956 }
1938 1957
1939 // Only continue if enough operands could be merged to the 1958 // Only continue if enough operands could be merged to the
1940 // same spill slot. 1959 // same spill slot.
1941 if (num_merged * 2 <= phi->operands().size() || 1960 if (num_merged * 2 <= phi->operands().size() ||
1942 AreUseIntervalsIntersecting(first_op_spill->interval(), 1961 AreUseIntervalsIntersecting(first_op_spill->interval(),
1943 range->first_interval())) { 1962 range->first_interval())) {
1944 return false; 1963 return false;
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
1976 LiveRange* LinearScanAllocator::SplitRangeAt(LiveRange* range, 1995 LiveRange* LinearScanAllocator::SplitRangeAt(LiveRange* range,
1977 LifetimePosition pos) { 1996 LifetimePosition pos) {
1978 DCHECK(!range->IsFixed()); 1997 DCHECK(!range->IsFixed());
1979 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value()); 1998 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value());
1980 1999
1981 if (pos.Value() <= range->Start().Value()) return range; 2000 if (pos.Value() <= range->Start().Value()) return range;
1982 2001
1983 // We can't properly connect liveranges if splitting occurred at the end 2002 // We can't properly connect liveranges if splitting occurred at the end
1984 // a block. 2003 // a block.
1985 DCHECK(pos.IsStart() || pos.IsGapPosition() || 2004 DCHECK(pos.IsStart() || pos.IsGapPosition() ||
1986 (code()->GetInstructionBlock(pos.ToInstructionIndex())) 2005 (GetInstructionBlock(code(), pos)->last_instruction_index() !=
1987 ->last_instruction_index() != pos.ToInstructionIndex()); 2006 pos.ToInstructionIndex()));
1988 2007
1989 int vreg = GetVirtualRegister(); 2008 int vreg = code()->NextVirtualRegister();
1990 auto result = LiveRangeFor(vreg); 2009 auto result = LiveRangeFor(vreg);
1991 range->SplitAt(pos, result, allocation_zone()); 2010 range->SplitAt(pos, result, allocation_zone());
1992 return result; 2011 return result;
1993 } 2012 }
1994 2013
1995 2014
1996 LiveRange* LinearScanAllocator::SplitBetween(LiveRange* range, 2015 LiveRange* LinearScanAllocator::SplitBetween(LiveRange* range,
1997 LifetimePosition start, 2016 LifetimePosition start,
1998 LifetimePosition end) { 2017 LifetimePosition end) {
1999 DCHECK(!range->IsFixed()); 2018 DCHECK(!range->IsFixed());
2000 TRACE("Splitting live range %d in position between [%d, %d]\n", range->id(), 2019 TRACE("Splitting live range %d in position between [%d, %d]\n", range->id(),
2001 start.Value(), end.Value()); 2020 start.Value(), end.Value());
2002 2021
2003 auto split_pos = FindOptimalSplitPos(start, end); 2022 auto split_pos = FindOptimalSplitPos(start, end);
2004 DCHECK(split_pos.Value() >= start.Value()); 2023 DCHECK(split_pos.Value() >= start.Value());
2005 return SplitRangeAt(range, split_pos); 2024 return SplitRangeAt(range, split_pos);
2006 } 2025 }
2007 2026
2008 2027
2009 LifetimePosition LinearScanAllocator::FindOptimalSplitPos( 2028 LifetimePosition LinearScanAllocator::FindOptimalSplitPos(
2010 LifetimePosition start, LifetimePosition end) { 2029 LifetimePosition start, LifetimePosition end) {
2011 int start_instr = start.ToInstructionIndex(); 2030 int start_instr = start.ToInstructionIndex();
2012 int end_instr = end.ToInstructionIndex(); 2031 int end_instr = end.ToInstructionIndex();
2013 DCHECK(start_instr <= end_instr); 2032 DCHECK(start_instr <= end_instr);
2014 2033
2015 // We have no choice 2034 // We have no choice
2016 if (start_instr == end_instr) return end; 2035 if (start_instr == end_instr) return end;
2017 2036
2018 auto start_block = GetInstructionBlock(start); 2037 auto start_block = GetInstructionBlock(code(), start);
2019 auto end_block = GetInstructionBlock(end); 2038 auto end_block = GetInstructionBlock(code(), end);
2020 2039
2021 if (end_block == start_block) { 2040 if (end_block == start_block) {
2022 // The interval is split in the same basic block. Split at the latest 2041 // The interval is split in the same basic block. Split at the latest
2023 // possible position. 2042 // possible position.
2024 return end; 2043 return end;
2025 } 2044 }
2026 2045
2027 auto block = end_block; 2046 auto block = end_block;
2028 // Find header of outermost loop. 2047 // Find header of outermost loop.
2029 // TODO(titzer): fix redundancy below. 2048 // TODO(titzer): fix redundancy below.
(...skipping 29 matching lines...) Expand all
2059 LifetimePosition until, 2078 LifetimePosition until,
2060 LifetimePosition end) { 2079 LifetimePosition end) {
2061 CHECK(start.Value() < end.Value()); 2080 CHECK(start.Value() < end.Value());
2062 auto second_part = SplitRangeAt(range, start); 2081 auto second_part = SplitRangeAt(range, start);
2063 2082
2064 if (second_part->Start().Value() < end.Value()) { 2083 if (second_part->Start().Value() < end.Value()) {
2065 // The split result intersects with [start, end[. 2084 // The split result intersects with [start, end[.
2066 // Split it at position between ]start+1, end[, spill the middle part 2085 // Split it at position between ]start+1, end[, spill the middle part
2067 // and put the rest to unhandled. 2086 // and put the rest to unhandled.
2068 auto third_part_end = end.PrevStart().End(); 2087 auto third_part_end = end.PrevStart().End();
2069 if (data()->IsBlockBoundary(end.Start())) { 2088 if (IsBlockBoundary(code(), end.Start())) {
2070 third_part_end = end.Start(); 2089 third_part_end = end.Start();
2071 } 2090 }
2072 auto third_part = SplitBetween( 2091 auto third_part = SplitBetween(
2073 second_part, Max(second_part->Start().End(), until), third_part_end); 2092 second_part, Max(second_part->Start().End(), until), third_part_end);
2074 2093
2075 DCHECK(third_part != second_part); 2094 DCHECK(third_part != second_part);
2076 2095
2077 Spill(second_part); 2096 Spill(second_part);
2078 AddToUnhandledSorted(third_part); 2097 AddToUnhandledSorted(third_part);
2079 } else { 2098 } else {
(...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after
2334 private: 2353 private:
2335 size_t length_; 2354 size_t length_;
2336 LiveRangeBound* start_; 2355 LiveRangeBound* start_;
2337 2356
2338 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray); 2357 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
2339 }; 2358 };
2340 2359
2341 2360
2342 class LiveRangeFinder { 2361 class LiveRangeFinder {
2343 public: 2362 public:
2344 explicit LiveRangeFinder(const RegisterAllocationData* data) 2363 explicit LiveRangeFinder(const RegisterAllocationData* data, Zone* zone)
2345 : data_(data), 2364 : data_(data),
2346 bounds_length_(static_cast<int>(data_->live_ranges().size())), 2365 bounds_length_(static_cast<int>(data_->live_ranges().size())),
2347 bounds_(data_->allocation_zone()->NewArray<LiveRangeBoundArray>( 2366 bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)),
2348 bounds_length_)) { 2367 zone_(zone) {
2349 for (int i = 0; i < bounds_length_; ++i) { 2368 for (int i = 0; i < bounds_length_; ++i) {
2350 new (&bounds_[i]) LiveRangeBoundArray(); 2369 new (&bounds_[i]) LiveRangeBoundArray();
2351 } 2370 }
2352 } 2371 }
2353 2372
2354 LiveRangeBoundArray* ArrayFor(int operand_index) { 2373 LiveRangeBoundArray* ArrayFor(int operand_index) {
2355 DCHECK(operand_index < bounds_length_); 2374 DCHECK(operand_index < bounds_length_);
2356 auto range = data_->live_ranges()[operand_index]; 2375 auto range = data_->live_ranges()[operand_index];
2357 DCHECK(range != nullptr && !range->IsEmpty()); 2376 DCHECK(range != nullptr && !range->IsEmpty());
2358 auto array = &bounds_[operand_index]; 2377 auto array = &bounds_[operand_index];
2359 if (array->ShouldInitialize()) { 2378 if (array->ShouldInitialize()) {
2360 array->Initialize(data_->allocation_zone(), range); 2379 array->Initialize(zone_, range);
2361 } 2380 }
2362 return array; 2381 return array;
2363 } 2382 }
2364 2383
2365 private: 2384 private:
2366 const RegisterAllocationData* const data_; 2385 const RegisterAllocationData* const data_;
2367 const int bounds_length_; 2386 const int bounds_length_;
2368 LiveRangeBoundArray* const bounds_; 2387 LiveRangeBoundArray* const bounds_;
2388 Zone* const zone_;
2369 2389
2370 DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder); 2390 DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
2371 }; 2391 };
2372 2392
2373 } // namespace 2393 } // namespace
2374 2394
2375 2395
2376 LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data) 2396 LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data)
2377 : data_(data) {} 2397 : data_(data) {}
2378 2398
2379 2399
2380 bool LiveRangeConnector::CanEagerlyResolveControlFlow( 2400 bool LiveRangeConnector::CanEagerlyResolveControlFlow(
2381 const InstructionBlock* block) const { 2401 const InstructionBlock* block) const {
2382 if (block->PredecessorCount() != 1) return false; 2402 if (block->PredecessorCount() != 1) return false;
2383 return block->predecessors()[0].IsNext(block->rpo_number()); 2403 return block->predecessors()[0].IsNext(block->rpo_number());
2384 } 2404 }
2385 2405
2386 2406
2387 void LiveRangeConnector::ResolveControlFlow() { 2407 void LiveRangeConnector::ResolveControlFlow(Zone* local_zone) {
2388 // Lazily linearize live ranges in memory for fast lookup. 2408 // Lazily linearize live ranges in memory for fast lookup.
2389 LiveRangeFinder finder(data()); 2409 LiveRangeFinder finder(data(), local_zone);
2390 auto& live_in_sets = data()->live_in_sets(); 2410 auto& live_in_sets = data()->live_in_sets();
2391 for (auto block : code()->instruction_blocks()) { 2411 for (auto block : code()->instruction_blocks()) {
2392 if (CanEagerlyResolveControlFlow(block)) continue; 2412 if (CanEagerlyResolveControlFlow(block)) continue;
2393 auto live = live_in_sets[block->rpo_number().ToInt()]; 2413 auto live = live_in_sets[block->rpo_number().ToInt()];
2394 BitVector::Iterator iterator(live); 2414 BitVector::Iterator iterator(live);
2395 while (!iterator.Done()) { 2415 while (!iterator.Done()) {
2396 auto* array = finder.ArrayFor(iterator.Current()); 2416 auto* array = finder.ArrayFor(iterator.Current());
2397 for (auto pred : block->predecessors()) { 2417 for (auto pred : block->predecessors()) {
2398 FindResult result; 2418 FindResult result;
2399 const auto* pred_block = code()->InstructionBlockAt(pred); 2419 const auto* pred_block = code()->InstructionBlockAt(pred);
(...skipping 27 matching lines...) Expand all
2427 DCHECK(!data() 2447 DCHECK(!data()
2428 ->InstructionAt(pred->last_instruction_index()) 2448 ->InstructionAt(pred->last_instruction_index())
2429 ->HasReferenceMap()); 2449 ->HasReferenceMap());
2430 gap_index = pred->last_instruction_index(); 2450 gap_index = pred->last_instruction_index();
2431 position = Instruction::END; 2451 position = Instruction::END;
2432 } 2452 }
2433 data()->AddGapMove(gap_index, position, pred_op, cur_op); 2453 data()->AddGapMove(gap_index, position, pred_op, cur_op);
2434 } 2454 }
2435 2455
2436 2456
2437 void LiveRangeConnector::ConnectRanges(Zone* temp_zone) { 2457 void LiveRangeConnector::ConnectRanges(Zone* local_zone) {
2438 ZoneMap<std::pair<ParallelMove*, InstructionOperand>, InstructionOperand> 2458 ZoneMap<std::pair<ParallelMove*, InstructionOperand>, InstructionOperand>
2439 delayed_insertion_map(temp_zone); 2459 delayed_insertion_map(local_zone);
2440 for (auto first_range : data()->live_ranges()) { 2460 for (auto first_range : data()->live_ranges()) {
2441 if (first_range == nullptr || first_range->IsChild()) continue; 2461 if (first_range == nullptr || first_range->IsChild()) continue;
2442 for (auto second_range = first_range->next(); second_range != nullptr; 2462 for (auto second_range = first_range->next(); second_range != nullptr;
2443 first_range = second_range, second_range = second_range->next()) { 2463 first_range = second_range, second_range = second_range->next()) {
2444 auto pos = second_range->Start(); 2464 auto pos = second_range->Start();
2445 // Add gap move if the two live ranges touch and there is no block 2465 // Add gap move if the two live ranges touch and there is no block
2446 // boundary. 2466 // boundary.
2447 if (second_range->IsSpilled()) continue; 2467 if (second_range->IsSpilled()) continue;
2448 if (first_range->End().Value() != pos.Value()) continue; 2468 if (first_range->End().Value() != pos.Value()) continue;
2449 if (data()->IsBlockBoundary(pos) && 2469 if (IsBlockBoundary(code(), pos) &&
2450 !CanEagerlyResolveControlFlow(GetInstructionBlock(pos))) { 2470 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) {
2451 continue; 2471 continue;
2452 } 2472 }
2453 auto prev_operand = first_range->GetAssignedOperand(); 2473 auto prev_operand = first_range->GetAssignedOperand();
2454 auto cur_operand = second_range->GetAssignedOperand(); 2474 auto cur_operand = second_range->GetAssignedOperand();
2455 if (prev_operand == cur_operand) continue; 2475 if (prev_operand == cur_operand) continue;
2456 bool delay_insertion = false; 2476 bool delay_insertion = false;
2457 Instruction::GapPosition gap_pos; 2477 Instruction::GapPosition gap_pos;
2458 int gap_index = pos.ToInstructionIndex(); 2478 int gap_index = pos.ToInstructionIndex();
2459 if (pos.IsGapPosition()) { 2479 if (pos.IsGapPosition()) {
2460 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END; 2480 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
(...skipping 10 matching lines...) Expand all
2471 if (!delay_insertion) { 2491 if (!delay_insertion) {
2472 move->AddMove(prev_operand, cur_operand); 2492 move->AddMove(prev_operand, cur_operand);
2473 } else { 2493 } else {
2474 delayed_insertion_map.insert( 2494 delayed_insertion_map.insert(
2475 std::make_pair(std::make_pair(move, prev_operand), cur_operand)); 2495 std::make_pair(std::make_pair(move, prev_operand), cur_operand));
2476 } 2496 }
2477 } 2497 }
2478 } 2498 }
2479 if (delayed_insertion_map.empty()) return; 2499 if (delayed_insertion_map.empty()) return;
2480 // Insert all the moves which should occur after the stored move. 2500 // Insert all the moves which should occur after the stored move.
2481 ZoneVector<MoveOperands*> to_insert(temp_zone); 2501 ZoneVector<MoveOperands*> to_insert(local_zone);
2482 ZoneVector<MoveOperands*> to_eliminate(temp_zone); 2502 ZoneVector<MoveOperands*> to_eliminate(local_zone);
2483 to_insert.reserve(4); 2503 to_insert.reserve(4);
2484 to_eliminate.reserve(4); 2504 to_eliminate.reserve(4);
2485 auto moves = delayed_insertion_map.begin()->first.first; 2505 auto moves = delayed_insertion_map.begin()->first.first;
2486 for (auto it = delayed_insertion_map.begin();; ++it) { 2506 for (auto it = delayed_insertion_map.begin();; ++it) {
2487 bool done = it == delayed_insertion_map.end(); 2507 bool done = it == delayed_insertion_map.end();
2488 if (done || it->first.first != moves) { 2508 if (done || it->first.first != moves) {
2489 // Commit the MoveOperands for current ParallelMove. 2509 // Commit the MoveOperands for current ParallelMove.
2490 for (auto move : to_eliminate) { 2510 for (auto move : to_eliminate) {
2491 move->Eliminate(); 2511 move->Eliminate();
2492 } 2512 }
(...skipping 10 matching lines...) Expand all
2503 auto move = new (code_zone()) MoveOperands(it->first.second, it->second); 2523 auto move = new (code_zone()) MoveOperands(it->first.second, it->second);
2504 auto eliminate = moves->PrepareInsertAfter(move); 2524 auto eliminate = moves->PrepareInsertAfter(move);
2505 to_insert.push_back(move); 2525 to_insert.push_back(move);
2506 if (eliminate != nullptr) to_eliminate.push_back(eliminate); 2526 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
2507 } 2527 }
2508 } 2528 }
2509 2529
2510 } // namespace compiler 2530 } // namespace compiler
2511 } // namespace internal 2531 } // namespace internal
2512 } // namespace v8 2532 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698