Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(53)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 1094063002: [turbofan] split register allocator into little pieces (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/linkage.h" 5 #include "src/compiler/linkage.h"
6 #include "src/compiler/register-allocator.h" 6 #include "src/compiler/register-allocator.h"
7 #include "src/string-stream.h" 7 #include "src/string-stream.h"
8 8
9 namespace v8 { 9 namespace v8 {
10 namespace internal { 10 namespace internal {
11 namespace compiler { 11 namespace compiler {
12 12
13 #define TRACE(...) \ 13 #define TRACE(...) \
14 do { \ 14 do { \
15 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ 15 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
16 } while (false) 16 } while (false)
17 17
18 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { 18 namespace {
19
20 inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
19 return a.Value() < b.Value() ? a : b; 21 return a.Value() < b.Value() ? a : b;
20 } 22 }
21 23
22 24
23 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { 25 inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
24 return a.Value() > b.Value() ? a : b; 26 return a.Value() > b.Value() ? a : b;
25 } 27 }
26 28
27 29
28 static void RemoveElement(ZoneVector<LiveRange*>* v, LiveRange* range) { 30 void RemoveElement(ZoneVector<LiveRange*>* v, LiveRange* range) {
29 auto it = std::find(v->begin(), v->end(), range); 31 auto it = std::find(v->begin(), v->end(), range);
30 DCHECK(it != v->end()); 32 DCHECK(it != v->end());
31 v->erase(it); 33 v->erase(it);
32 } 34 }
33 35
36 } // namespace
37
34 38
35 UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand, 39 UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
36 InstructionOperand* hint) 40 InstructionOperand* hint)
37 : operand_(operand), hint_(hint), pos_(pos), next_(nullptr), flags_(0) { 41 : operand_(operand), hint_(hint), pos_(pos), next_(nullptr), flags_(0) {
38 bool register_beneficial = true; 42 bool register_beneficial = true;
39 UsePositionType type = UsePositionType::kAny; 43 UsePositionType type = UsePositionType::kAny;
40 if (operand_ != nullptr && operand_->IsUnallocated()) { 44 if (operand_ != nullptr && operand_->IsUnallocated()) {
41 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_); 45 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
42 if (unalloc->HasRegisterPolicy()) { 46 if (unalloc->HasRegisterPolicy()) {
43 type = UsePositionType::kRequiresRegister; 47 type = UsePositionType::kRequiresRegister;
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
78 struct LiveRange::SpillAtDefinitionList : ZoneObject { 82 struct LiveRange::SpillAtDefinitionList : ZoneObject {
79 SpillAtDefinitionList(int gap_index, InstructionOperand* operand, 83 SpillAtDefinitionList(int gap_index, InstructionOperand* operand,
80 SpillAtDefinitionList* next) 84 SpillAtDefinitionList* next)
81 : gap_index(gap_index), operand(operand), next(next) {} 85 : gap_index(gap_index), operand(operand), next(next) {}
82 const int gap_index; 86 const int gap_index;
83 InstructionOperand* const operand; 87 InstructionOperand* const operand;
84 SpillAtDefinitionList* const next; 88 SpillAtDefinitionList* const next;
85 }; 89 };
86 90
87 91
88 #ifdef DEBUG 92 LiveRange::LiveRange(int id, Zone* zone)
93 : id_(id),
94 spilled_(false),
95 has_slot_use_(false),
96 is_phi_(false),
97 is_non_loop_phi_(false),
98 kind_(UNALLOCATED_REGISTERS),
99 assigned_register_(kInvalidAssignment),
100 last_interval_(nullptr),
101 first_interval_(nullptr),
102 first_pos_(nullptr),
103 parent_(nullptr),
104 next_(nullptr),
105 current_interval_(nullptr),
106 last_processed_use_(nullptr),
107 current_hint_operand_(nullptr),
108 spill_start_index_(kMaxInt),
109 spill_type_(SpillType::kNoSpillType),
110 spill_operand_(nullptr),
111 spills_at_definition_(nullptr) {}
89 112
90 113
91 void LiveRange::Verify() const { 114 void LiveRange::Verify() const {
92 UsePosition* cur = first_pos_; 115 UsePosition* cur = first_pos_;
93 while (cur != nullptr) { 116 while (cur != nullptr) {
94 DCHECK(Start().Value() <= cur->pos().Value() && 117 CHECK(Start().Value() <= cur->pos().Value() &&
95 cur->pos().Value() <= End().Value()); 118 cur->pos().Value() <= End().Value());
96 cur = cur->next(); 119 cur = cur->next();
97 } 120 }
98 } 121 }
99 122
100 123
101 bool LiveRange::HasOverlap(UseInterval* target) const { 124 bool LiveRange::HasOverlap(UseInterval* target) const {
102 UseInterval* current_interval = first_interval_; 125 UseInterval* current_interval = first_interval_;
103 while (current_interval != nullptr) { 126 while (current_interval != nullptr) {
104 // Intervals overlap if the start of one is contained in the other. 127 // Intervals overlap if the start of one is contained in the other.
105 if (current_interval->Contains(target->start()) || 128 if (current_interval->Contains(target->start()) ||
106 target->Contains(current_interval->start())) { 129 target->Contains(current_interval->start())) {
107 return true; 130 return true;
108 } 131 }
109 current_interval = current_interval->next(); 132 current_interval = current_interval->next();
110 } 133 }
111 return false; 134 return false;
112 } 135 }
113 136
114 137
115 #endif
116
117
118 LiveRange::LiveRange(int id, Zone* zone)
119 : id_(id),
120 spilled_(false),
121 has_slot_use_(false),
122 is_phi_(false),
123 is_non_loop_phi_(false),
124 kind_(UNALLOCATED_REGISTERS),
125 assigned_register_(kInvalidAssignment),
126 last_interval_(nullptr),
127 first_interval_(nullptr),
128 first_pos_(nullptr),
129 parent_(nullptr),
130 next_(nullptr),
131 current_interval_(nullptr),
132 last_processed_use_(nullptr),
133 current_hint_operand_(nullptr),
134 spill_start_index_(kMaxInt),
135 spill_type_(SpillType::kNoSpillType),
136 spill_operand_(nullptr),
137 spills_at_definition_(nullptr) {}
138
139
140 void LiveRange::set_assigned_register(int reg) { 138 void LiveRange::set_assigned_register(int reg) {
141 DCHECK(!HasRegisterAssigned() && !IsSpilled()); 139 DCHECK(!HasRegisterAssigned() && !IsSpilled());
142 assigned_register_ = reg; 140 assigned_register_ = reg;
143 } 141 }
144 142
145 143
146 void LiveRange::MakeSpilled() { 144 void LiveRange::MakeSpilled() {
147 DCHECK(!IsSpilled()); 145 DCHECK(!IsSpilled());
148 DCHECK(!TopLevel()->HasNoSpillType()); 146 DCHECK(!TopLevel()->HasNoSpillType());
149 spilled_ = true; 147 spilled_ = true;
(...skipping 414 matching lines...) Expand 10 before | Expand all | Expand 10 after
564 if (a == nullptr || a->start().Value() > other->End().Value()) break; 562 if (a == nullptr || a->start().Value() > other->End().Value()) break;
565 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); 563 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
566 } else { 564 } else {
567 b = b->next(); 565 b = b->next();
568 } 566 }
569 } 567 }
570 return LifetimePosition::Invalid(); 568 return LifetimePosition::Invalid();
571 } 569 }
572 570
573 571
574 RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, 572 static bool AreUseIntervalsIntersecting(UseInterval* interval1,
575 Zone* zone, Frame* frame, 573 UseInterval* interval2) {
576 InstructionSequence* code, 574 while (interval1 != nullptr && interval2 != nullptr) {
577 const char* debug_name) 575 if (interval1->start().Value() < interval2->start().Value()) {
576 if (interval1->end().Value() > interval2->start().Value()) {
577 return true;
578 }
579 interval1 = interval1->next();
580 } else {
581 if (interval2->end().Value() > interval1->start().Value()) {
582 return true;
583 }
584 interval2 = interval2->next();
585 }
586 }
587 return false;
588 }
589
590
591 SpillRange::SpillRange(LiveRange* range, Zone* zone) : live_ranges_(zone) {
592 auto src = range->first_interval();
593 UseInterval* result = nullptr;
594 UseInterval* node = nullptr;
595 // Copy the nodes
596 while (src != nullptr) {
597 auto new_node = new (zone) UseInterval(src->start(), src->end());
598 if (result == nullptr) {
599 result = new_node;
600 } else {
601 node->set_next(new_node);
602 }
603 node = new_node;
604 src = src->next();
605 }
606 use_interval_ = result;
607 live_ranges().push_back(range);
608 end_position_ = node->end();
609 DCHECK(!range->HasSpillRange());
610 range->SetSpillRange(this);
611 }
612
613
614 bool SpillRange::IsIntersectingWith(SpillRange* other) const {
615 if (this->use_interval_ == nullptr || other->use_interval_ == nullptr ||
616 this->End().Value() <= other->use_interval_->start().Value() ||
617 other->End().Value() <= this->use_interval_->start().Value()) {
618 return false;
619 }
620 return AreUseIntervalsIntersecting(use_interval_, other->use_interval_);
621 }
622
623
624 bool SpillRange::TryMerge(SpillRange* other) {
625 if (Kind() != other->Kind() || IsIntersectingWith(other)) return false;
626
627 auto max = LifetimePosition::MaxPosition();
628 if (End().Value() < other->End().Value() &&
629 other->End().Value() != max.Value()) {
630 end_position_ = other->End();
631 }
632 other->end_position_ = max;
633
634 MergeDisjointIntervals(other->use_interval_);
635 other->use_interval_ = nullptr;
636
637 for (auto range : other->live_ranges()) {
638 DCHECK(range->GetSpillRange() == other);
639 range->SetSpillRange(this);
640 }
641
642 live_ranges().insert(live_ranges().end(), other->live_ranges().begin(),
643 other->live_ranges().end());
644 other->live_ranges().clear();
645
646 return true;
647 }
648
649
650 void SpillRange::SetOperand(AllocatedOperand* op) {
651 for (auto range : live_ranges()) {
652 DCHECK(range->GetSpillRange() == this);
653 range->CommitSpillOperand(op);
654 }
655 }
656
657
658 void SpillRange::MergeDisjointIntervals(UseInterval* other) {
659 UseInterval* tail = nullptr;
660 auto current = use_interval_;
661 while (other != nullptr) {
662 // Make sure the 'current' list starts first
663 if (current == nullptr ||
664 current->start().Value() > other->start().Value()) {
665 std::swap(current, other);
666 }
667 // Check disjointness
668 DCHECK(other == nullptr ||
669 current->end().Value() <= other->start().Value());
670 // Append the 'current' node to the result accumulator and move forward
671 if (tail == nullptr) {
672 use_interval_ = current;
673 } else {
674 tail->set_next(current);
675 }
676 tail = current;
677 current = current->next();
678 }
679 // Other list is empty => we are done
680 }
681
682
683 LiveRangeBuilder::LiveRangeBuilder(const RegisterConfiguration* config,
684 Zone* zone, Frame* frame,
685 InstructionSequence* code,
686 const char* debug_name)
578 : local_zone_(zone), 687 : local_zone_(zone),
579 frame_(frame), 688 frame_(frame),
580 code_(code), 689 code_(code),
581 debug_name_(debug_name), 690 debug_name_(debug_name),
582 config_(config), 691 config_(config),
583 phi_map_(local_zone()), 692 phi_map_(local_zone()),
584 live_in_sets_(code->InstructionBlockCount(), nullptr, local_zone()), 693 live_in_sets_(code->InstructionBlockCount(), nullptr, local_zone()),
585 live_ranges_(code->VirtualRegisterCount() * 2, nullptr, local_zone()), 694 live_ranges_(code->VirtualRegisterCount() * 2, nullptr, local_zone()),
586 fixed_live_ranges_(this->config()->num_general_registers(), nullptr, 695 fixed_live_ranges_(this->config()->num_general_registers(), nullptr,
587 local_zone()), 696 local_zone()),
588 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr, 697 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr,
589 local_zone()), 698 local_zone()),
590 unhandled_live_ranges_(local_zone()),
591 active_live_ranges_(local_zone()),
592 inactive_live_ranges_(local_zone()),
593 spill_ranges_(local_zone()), 699 spill_ranges_(local_zone()),
594 mode_(UNALLOCATED_REGISTERS), 700 assigned_registers_(nullptr),
595 num_registers_(-1) { 701 assigned_double_registers_(nullptr) {
596 DCHECK(this->config()->num_general_registers() <= 702 DCHECK(this->config()->num_general_registers() <=
597 RegisterConfiguration::kMaxGeneralRegisters); 703 RegisterConfiguration::kMaxGeneralRegisters);
598 DCHECK(this->config()->num_double_registers() <= 704 DCHECK(this->config()->num_double_registers() <=
599 RegisterConfiguration::kMaxDoubleRegisters); 705 RegisterConfiguration::kMaxDoubleRegisters);
600 // TryAllocateFreeReg and AllocateBlockedReg assume this
601 // when allocating local arrays.
602 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >=
603 this->config()->num_general_registers());
604 unhandled_live_ranges().reserve(
605 static_cast<size_t>(code->VirtualRegisterCount() * 2));
606 active_live_ranges().reserve(8);
607 inactive_live_ranges().reserve(8);
608 spill_ranges().reserve(8); 706 spill_ranges().reserve(8);
609 assigned_registers_ = 707 assigned_registers_ = new (code_zone())
610 new (code_zone()) BitVector(config->num_general_registers(), code_zone()); 708 BitVector(this->config()->num_general_registers(), code_zone());
611 assigned_double_registers_ = new (code_zone()) 709 assigned_double_registers_ = new (code_zone())
612 BitVector(config->num_aliased_double_registers(), code_zone()); 710 BitVector(this->config()->num_aliased_double_registers(), code_zone());
613 frame->SetAllocatedRegisters(assigned_registers_); 711 this->frame()->SetAllocatedRegisters(assigned_registers_);
614 frame->SetAllocatedDoubleRegisters(assigned_double_registers_); 712 this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
615 } 713 }
616 714
617 715
618 BitVector* RegisterAllocator::ComputeLiveOut(const InstructionBlock* block) { 716 BitVector* LiveRangeBuilder::ComputeLiveOut(const InstructionBlock* block) {
619 // Compute live out for the given block, except not including backward 717 // Compute live out for the given block, except not including backward
620 // successor edges. 718 // successor edges.
621 auto live_out = new (local_zone()) 719 auto live_out = new (local_zone())
622 BitVector(code()->VirtualRegisterCount(), local_zone()); 720 BitVector(code()->VirtualRegisterCount(), local_zone());
623 721
624 // Process all successor blocks. 722 // Process all successor blocks.
625 for (auto succ : block->successors()) { 723 for (auto succ : block->successors()) {
626 // Add values live on entry to the successor. Note the successor's 724 // Add values live on entry to the successor. Note the successor's
627 // live_in will not be computed yet for backwards edges. 725 // live_in will not be computed yet for backwards edges.
628 auto live_in = live_in_sets_[succ.ToSize()]; 726 auto live_in = live_in_sets_[succ.ToSize()];
629 if (live_in != nullptr) live_out->Union(*live_in); 727 if (live_in != nullptr) live_out->Union(*live_in);
630 728
631 // All phi input operands corresponding to this successor edge are live 729 // All phi input operands corresponding to this successor edge are live
632 // out from this block. 730 // out from this block.
633 auto successor = code()->InstructionBlockAt(succ); 731 auto successor = code()->InstructionBlockAt(succ);
634 size_t index = successor->PredecessorIndexOf(block->rpo_number()); 732 size_t index = successor->PredecessorIndexOf(block->rpo_number());
635 DCHECK(index < successor->PredecessorCount()); 733 DCHECK(index < successor->PredecessorCount());
636 for (auto phi : successor->phis()) { 734 for (auto phi : successor->phis()) {
637 live_out->Add(phi->operands()[index]); 735 live_out->Add(phi->operands()[index]);
638 } 736 }
639 } 737 }
640 return live_out; 738 return live_out;
641 } 739 }
642 740
643 741
644 void RegisterAllocator::AddInitialIntervals(const InstructionBlock* block, 742 void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block,
645 BitVector* live_out) { 743 BitVector* live_out) {
646 // Add an interval that includes the entire block to the live range for 744 // Add an interval that includes the entire block to the live range for
647 // each live_out value. 745 // each live_out value.
648 auto start = LifetimePosition::GapFromInstructionIndex( 746 auto start = LifetimePosition::GapFromInstructionIndex(
649 block->first_instruction_index()); 747 block->first_instruction_index());
650 auto end = LifetimePosition::InstructionFromInstructionIndex( 748 auto end = LifetimePosition::InstructionFromInstructionIndex(
651 block->last_instruction_index()).NextStart(); 749 block->last_instruction_index()).NextStart();
652 BitVector::Iterator iterator(live_out); 750 BitVector::Iterator iterator(live_out);
653 while (!iterator.Done()) { 751 while (!iterator.Done()) {
654 int operand_index = iterator.Current(); 752 int operand_index = iterator.Current();
655 auto range = LiveRangeFor(operand_index); 753 auto range = LiveRangeFor(operand_index);
656 range->AddUseInterval(start, end, local_zone()); 754 range->AddUseInterval(start, end, local_zone());
657 iterator.Advance(); 755 iterator.Advance();
658 } 756 }
659 } 757 }
660 758
661 759
662 int RegisterAllocator::FixedDoubleLiveRangeID(int index) { 760 int LiveRangeBuilder::FixedDoubleLiveRangeID(int index) {
663 return -index - 1 - config()->num_general_registers(); 761 return -index - 1 - config()->num_general_registers();
664 } 762 }
665 763
666 764
667 InstructionOperand* RegisterAllocator::AllocateFixed( 765 InstructionOperand* LiveRangeBuilder::AllocateFixed(UnallocatedOperand* operand,
668 UnallocatedOperand* operand, int pos, bool is_tagged) { 766 int pos, bool is_tagged) {
669 TRACE("Allocating fixed reg for op %d\n", operand->virtual_register()); 767 TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
670 DCHECK(operand->HasFixedPolicy()); 768 DCHECK(operand->HasFixedPolicy());
671 InstructionOperand allocated; 769 InstructionOperand allocated;
672 if (operand->HasFixedSlotPolicy()) { 770 if (operand->HasFixedSlotPolicy()) {
673 allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT, 771 allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT,
674 operand->fixed_slot_index()); 772 operand->fixed_slot_index());
675 } else if (operand->HasFixedRegisterPolicy()) { 773 } else if (operand->HasFixedRegisterPolicy()) {
676 allocated = AllocatedOperand(AllocatedOperand::REGISTER, 774 allocated = AllocatedOperand(AllocatedOperand::REGISTER,
677 operand->fixed_register_index()); 775 operand->fixed_register_index());
678 } else if (operand->HasFixedDoubleRegisterPolicy()) { 776 } else if (operand->HasFixedDoubleRegisterPolicy()) {
679 allocated = AllocatedOperand(AllocatedOperand::DOUBLE_REGISTER, 777 allocated = AllocatedOperand(AllocatedOperand::DOUBLE_REGISTER,
680 operand->fixed_register_index()); 778 operand->fixed_register_index());
681 } else { 779 } else {
682 UNREACHABLE(); 780 UNREACHABLE();
683 } 781 }
684 InstructionOperand::ReplaceWith(operand, &allocated); 782 InstructionOperand::ReplaceWith(operand, &allocated);
685 if (is_tagged) { 783 if (is_tagged) {
686 TRACE("Fixed reg is tagged at %d\n", pos); 784 TRACE("Fixed reg is tagged at %d\n", pos);
687 auto instr = InstructionAt(pos); 785 auto instr = InstructionAt(pos);
688 if (instr->HasReferenceMap()) { 786 if (instr->HasReferenceMap()) {
689 instr->reference_map()->RecordReference(*operand); 787 instr->reference_map()->RecordReference(*operand);
690 } 788 }
691 } 789 }
692 return operand; 790 return operand;
693 } 791 }
694 792
695 793
696 LiveRange* RegisterAllocator::NewLiveRange(int index) { 794 LiveRange* LiveRangeBuilder::NewLiveRange(int index) {
697 // The LiveRange object itself can go in the local zone, but the 795 // The LiveRange object itself can go in the local zone, but the
698 // InstructionOperand needs to go in the code zone, since it may survive 796 // InstructionOperand needs to go in the code zone, since it may survive
699 // register allocation. 797 // register allocation.
700 return new (local_zone()) LiveRange(index, code_zone()); 798 return new (local_zone()) LiveRange(index, code_zone());
701 } 799 }
702 800
703 801
704 LiveRange* RegisterAllocator::FixedLiveRangeFor(int index) { 802 LiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) {
705 DCHECK(index < config()->num_general_registers()); 803 DCHECK(index < config()->num_general_registers());
706 auto result = fixed_live_ranges()[index]; 804 auto result = fixed_live_ranges()[index];
707 if (result == nullptr) { 805 if (result == nullptr) {
708 result = NewLiveRange(FixedLiveRangeID(index)); 806 result = NewLiveRange(FixedLiveRangeID(index));
709 DCHECK(result->IsFixed()); 807 DCHECK(result->IsFixed());
710 result->kind_ = GENERAL_REGISTERS; 808 result->set_kind(GENERAL_REGISTERS);
711 SetLiveRangeAssignedRegister(result, index); 809 SetLiveRangeAssignedRegister(result, index);
712 fixed_live_ranges()[index] = result; 810 fixed_live_ranges()[index] = result;
713 } 811 }
714 return result; 812 return result;
715 } 813 }
716 814
717 815
718 LiveRange* RegisterAllocator::FixedDoubleLiveRangeFor(int index) { 816 LiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) {
719 DCHECK(index < config()->num_aliased_double_registers()); 817 DCHECK(index < config()->num_aliased_double_registers());
720 auto result = fixed_double_live_ranges()[index]; 818 auto result = fixed_double_live_ranges()[index];
721 if (result == nullptr) { 819 if (result == nullptr) {
722 result = NewLiveRange(FixedDoubleLiveRangeID(index)); 820 result = NewLiveRange(FixedDoubleLiveRangeID(index));
723 DCHECK(result->IsFixed()); 821 DCHECK(result->IsFixed());
724 result->kind_ = DOUBLE_REGISTERS; 822 result->set_kind(DOUBLE_REGISTERS);
725 SetLiveRangeAssignedRegister(result, index); 823 SetLiveRangeAssignedRegister(result, index);
726 fixed_double_live_ranges()[index] = result; 824 fixed_double_live_ranges()[index] = result;
727 } 825 }
728 return result; 826 return result;
729 } 827 }
730 828
731 829
732 LiveRange* RegisterAllocator::LiveRangeFor(int index) { 830 LiveRange* LiveRangeBuilder::LiveRangeFor(int index) {
733 if (index >= static_cast<int>(live_ranges().size())) { 831 if (index >= static_cast<int>(live_ranges().size())) {
734 live_ranges().resize(index + 1, nullptr); 832 live_ranges().resize(index + 1, nullptr);
735 } 833 }
736 auto result = live_ranges()[index]; 834 auto result = live_ranges()[index];
737 if (result == nullptr) { 835 if (result == nullptr) {
738 result = NewLiveRange(index); 836 result = NewLiveRange(index);
739 live_ranges()[index] = result; 837 live_ranges()[index] = result;
740 } 838 }
741 return result; 839 return result;
742 } 840 }
743 841
744 842
745 Instruction* RegisterAllocator::GetLastInstruction( 843 Instruction* LiveRangeBuilder::GetLastInstruction(
746 const InstructionBlock* block) { 844 const InstructionBlock* block) {
747 return code()->InstructionAt(block->last_instruction_index()); 845 return code()->InstructionAt(block->last_instruction_index());
748 } 846 }
749 847
750 848
751 LiveRange* RegisterAllocator::LiveRangeFor(InstructionOperand* operand) { 849 LiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
752 if (operand->IsUnallocated()) { 850 if (operand->IsUnallocated()) {
753 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register()); 851 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register());
754 } else if (operand->IsConstant()) { 852 } else if (operand->IsConstant()) {
755 return LiveRangeFor(ConstantOperand::cast(operand)->virtual_register()); 853 return LiveRangeFor(ConstantOperand::cast(operand)->virtual_register());
756 } else if (operand->IsRegister()) { 854 } else if (operand->IsRegister()) {
757 return FixedLiveRangeFor(RegisterOperand::cast(operand)->index()); 855 return FixedLiveRangeFor(RegisterOperand::cast(operand)->index());
758 } else if (operand->IsDoubleRegister()) { 856 } else if (operand->IsDoubleRegister()) {
759 return FixedDoubleLiveRangeFor( 857 return FixedDoubleLiveRangeFor(
760 DoubleRegisterOperand::cast(operand)->index()); 858 DoubleRegisterOperand::cast(operand)->index());
761 } else { 859 } else {
762 return nullptr; 860 return nullptr;
763 } 861 }
764 } 862 }
765 863
766 864
767 void RegisterAllocator::Define(LifetimePosition position, 865 void LiveRangeBuilder::Define(LifetimePosition position,
768 InstructionOperand* operand, 866 InstructionOperand* operand,
769 InstructionOperand* hint) { 867 InstructionOperand* hint) {
770 auto range = LiveRangeFor(operand); 868 auto range = LiveRangeFor(operand);
771 if (range == nullptr) return; 869 if (range == nullptr) return;
772 870
773 if (range->IsEmpty() || range->Start().Value() > position.Value()) { 871 if (range->IsEmpty() || range->Start().Value() > position.Value()) {
774 // Can happen if there is a definition without use. 872 // Can happen if there is a definition without use.
775 range->AddUseInterval(position, position.NextStart(), local_zone()); 873 range->AddUseInterval(position, position.NextStart(), local_zone());
776 range->AddUsePosition(position.NextStart(), nullptr, nullptr, local_zone()); 874 range->AddUsePosition(position.NextStart(), nullptr, nullptr, local_zone());
777 } else { 875 } else {
778 range->ShortenTo(position); 876 range->ShortenTo(position);
779 } 877 }
780 878
781 if (operand->IsUnallocated()) { 879 if (operand->IsUnallocated()) {
782 auto unalloc_operand = UnallocatedOperand::cast(operand); 880 auto unalloc_operand = UnallocatedOperand::cast(operand);
783 range->AddUsePosition(position, unalloc_operand, hint, local_zone()); 881 range->AddUsePosition(position, unalloc_operand, hint, local_zone());
784 } 882 }
785 } 883 }
786 884
787 885
788 void RegisterAllocator::Use(LifetimePosition block_start, 886 void LiveRangeBuilder::Use(LifetimePosition block_start,
789 LifetimePosition position, 887 LifetimePosition position,
790 InstructionOperand* operand, 888 InstructionOperand* operand,
791 InstructionOperand* hint) { 889 InstructionOperand* hint) {
792 auto range = LiveRangeFor(operand); 890 auto range = LiveRangeFor(operand);
793 if (range == nullptr) return; 891 if (range == nullptr) return;
794 if (operand->IsUnallocated()) { 892 if (operand->IsUnallocated()) {
795 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand); 893 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
796 range->AddUsePosition(position, unalloc_operand, hint, local_zone()); 894 range->AddUsePosition(position, unalloc_operand, hint, local_zone());
797 } 895 }
798 range->AddUseInterval(block_start, position, local_zone()); 896 range->AddUseInterval(block_start, position, local_zone());
799 } 897 }
800 898
801 899
802 MoveOperands* RegisterAllocator::AddGapMove(int index, 900 MoveOperands* LiveRangeBuilder::AddGapMove(int index,
803 Instruction::GapPosition position, 901 Instruction::GapPosition position,
804 const InstructionOperand& from, 902 const InstructionOperand& from,
805 const InstructionOperand& to) { 903 const InstructionOperand& to) {
806 auto instr = code()->InstructionAt(index); 904 auto instr = code()->InstructionAt(index);
807 auto moves = instr->GetOrCreateParallelMove(position, code_zone()); 905 auto moves = instr->GetOrCreateParallelMove(position, code_zone());
808 return moves->AddMove(from, to); 906 return moves->AddMove(from, to);
809 } 907 }
810 908
811 909
812 static bool AreUseIntervalsIntersecting(UseInterval* interval1, 910 SpillRange* LiveRangeBuilder::AssignSpillRangeToLiveRange(LiveRange* range) {
813 UseInterval* interval2) {
814 while (interval1 != nullptr && interval2 != nullptr) {
815 if (interval1->start().Value() < interval2->start().Value()) {
816 if (interval1->end().Value() > interval2->start().Value()) {
817 return true;
818 }
819 interval1 = interval1->next();
820 } else {
821 if (interval2->end().Value() > interval1->start().Value()) {
822 return true;
823 }
824 interval2 = interval2->next();
825 }
826 }
827 return false;
828 }
829
830
831 SpillRange::SpillRange(LiveRange* range, Zone* zone) : live_ranges_(zone) {
832 auto src = range->first_interval();
833 UseInterval* result = nullptr;
834 UseInterval* node = nullptr;
835 // Copy the nodes
836 while (src != nullptr) {
837 auto new_node = new (zone) UseInterval(src->start(), src->end());
838 if (result == nullptr) {
839 result = new_node;
840 } else {
841 node->set_next(new_node);
842 }
843 node = new_node;
844 src = src->next();
845 }
846 use_interval_ = result;
847 live_ranges().push_back(range);
848 end_position_ = node->end();
849 DCHECK(!range->HasSpillRange());
850 range->SetSpillRange(this);
851 }
852
853
854 bool SpillRange::IsIntersectingWith(SpillRange* other) const {
855 if (this->use_interval_ == nullptr || other->use_interval_ == nullptr ||
856 this->End().Value() <= other->use_interval_->start().Value() ||
857 other->End().Value() <= this->use_interval_->start().Value()) {
858 return false;
859 }
860 return AreUseIntervalsIntersecting(use_interval_, other->use_interval_);
861 }
862
863
864 bool SpillRange::TryMerge(SpillRange* other) {
865 if (Kind() != other->Kind() || IsIntersectingWith(other)) return false;
866
867 auto max = LifetimePosition::MaxPosition();
868 if (End().Value() < other->End().Value() &&
869 other->End().Value() != max.Value()) {
870 end_position_ = other->End();
871 }
872 other->end_position_ = max;
873
874 MergeDisjointIntervals(other->use_interval_);
875 other->use_interval_ = nullptr;
876
877 for (auto range : other->live_ranges()) {
878 DCHECK(range->GetSpillRange() == other);
879 range->SetSpillRange(this);
880 }
881
882 live_ranges().insert(live_ranges().end(), other->live_ranges().begin(),
883 other->live_ranges().end());
884 other->live_ranges().clear();
885
886 return true;
887 }
888
889
890 void SpillRange::SetOperand(AllocatedOperand* op) {
891 for (auto range : live_ranges()) {
892 DCHECK(range->GetSpillRange() == this);
893 range->CommitSpillOperand(op);
894 }
895 }
896
897
898 void SpillRange::MergeDisjointIntervals(UseInterval* other) {
899 UseInterval* tail = nullptr;
900 auto current = use_interval_;
901 while (other != nullptr) {
902 // Make sure the 'current' list starts first
903 if (current == nullptr ||
904 current->start().Value() > other->start().Value()) {
905 std::swap(current, other);
906 }
907 // Check disjointness
908 DCHECK(other == nullptr ||
909 current->end().Value() <= other->start().Value());
910 // Append the 'current' node to the result accumulator and move forward
911 if (tail == nullptr) {
912 use_interval_ = current;
913 } else {
914 tail->set_next(current);
915 }
916 tail = current;
917 current = current->next();
918 }
919 // Other list is empty => we are done
920 }
921
922
923 void RegisterAllocator::AssignSpillSlots() {
924 // Merge disjoint spill ranges
925 for (size_t i = 0; i < spill_ranges().size(); i++) {
926 auto range = spill_ranges()[i];
927 if (range->IsEmpty()) continue;
928 for (size_t j = i + 1; j < spill_ranges().size(); j++) {
929 auto other = spill_ranges()[j];
930 if (!other->IsEmpty()) {
931 range->TryMerge(other);
932 }
933 }
934 }
935
936 // Allocate slots for the merged spill ranges.
937 for (auto range : spill_ranges()) {
938 if (range->IsEmpty()) continue;
939 // Allocate a new operand referring to the spill slot.
940 auto kind = range->Kind();
941 int index = frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS);
942 auto op_kind = kind == DOUBLE_REGISTERS
943 ? AllocatedOperand::DOUBLE_STACK_SLOT
944 : AllocatedOperand::STACK_SLOT;
945 auto op = AllocatedOperand::New(code_zone(), op_kind, index);
946 range->SetOperand(op);
947 }
948 }
949
950
951 void RegisterAllocator::CommitAssignment() {
952 for (auto range : live_ranges()) {
953 if (range == nullptr || range->IsEmpty()) continue;
954 InstructionOperand* spill_operand = nullptr;
955 if (!range->TopLevel()->HasNoSpillType()) {
956 spill_operand = range->TopLevel()->GetSpillOperand();
957 }
958 auto assigned = range->GetAssignedOperand();
959 range->ConvertUsesToOperand(assigned, spill_operand);
960 if (range->is_phi()) AssignPhiInput(range, assigned);
961 if (!range->IsChild() && spill_operand != nullptr) {
962 range->CommitSpillsAtDefinition(code(), spill_operand,
963 range->has_slot_use());
964 }
965 }
966 }
967
968
969 SpillRange* RegisterAllocator::AssignSpillRangeToLiveRange(LiveRange* range) {
970 auto spill_range = new (local_zone()) SpillRange(range, local_zone()); 911 auto spill_range = new (local_zone()) SpillRange(range, local_zone());
971 spill_ranges().push_back(spill_range); 912 spill_ranges().push_back(spill_range);
972 return spill_range; 913 return spill_range;
973 } 914 }
974 915
975 916
976 bool RegisterAllocator::TryReuseSpillForPhi(LiveRange* range) { 917 void LiveRangeBuilder::MeetRegisterConstraints(const InstructionBlock* block) {
977 if (range->IsChild() || !range->is_phi()) return false;
978 DCHECK(!range->HasSpillOperand());
979
980 auto lookup = phi_map_.find(range->id());
981 DCHECK(lookup != phi_map_.end());
982 auto phi = lookup->second->phi;
983 auto block = lookup->second->block;
984 // Count the number of spilled operands.
985 size_t spilled_count = 0;
986 LiveRange* first_op = nullptr;
987 for (size_t i = 0; i < phi->operands().size(); i++) {
988 int op = phi->operands()[i];
989 LiveRange* op_range = LiveRangeFor(op);
990 if (op_range->GetSpillRange() == nullptr) continue;
991 auto pred = code()->InstructionBlockAt(block->predecessors()[i]);
992 auto pred_end = LifetimePosition::InstructionFromInstructionIndex(
993 pred->last_instruction_index());
994 while (op_range != nullptr && !op_range->CanCover(pred_end)) {
995 op_range = op_range->next();
996 }
997 if (op_range != nullptr && op_range->IsSpilled()) {
998 spilled_count++;
999 if (first_op == nullptr) {
1000 first_op = op_range->TopLevel();
1001 }
1002 }
1003 }
1004
1005 // Only continue if more than half of the operands are spilled.
1006 if (spilled_count * 2 <= phi->operands().size()) {
1007 return false;
1008 }
1009
1010 // Try to merge the spilled operands and count the number of merged spilled
1011 // operands.
1012 DCHECK(first_op != nullptr);
1013 auto first_op_spill = first_op->GetSpillRange();
1014 size_t num_merged = 1;
1015 for (size_t i = 1; i < phi->operands().size(); i++) {
1016 int op = phi->operands()[i];
1017 auto op_range = LiveRangeFor(op);
1018 auto op_spill = op_range->GetSpillRange();
1019 if (op_spill != nullptr &&
1020 (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill))) {
1021 num_merged++;
1022 }
1023 }
1024
1025 // Only continue if enough operands could be merged to the
1026 // same spill slot.
1027 if (num_merged * 2 <= phi->operands().size() ||
1028 AreUseIntervalsIntersecting(first_op_spill->interval(),
1029 range->first_interval())) {
1030 return false;
1031 }
1032
1033 // If the range does not need register soon, spill it to the merged
1034 // spill range.
1035 auto next_pos = range->Start();
1036 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
1037 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
1038 if (pos == nullptr) {
1039 auto spill_range = range->TopLevel()->HasSpillRange()
1040 ? range->TopLevel()->GetSpillRange()
1041 : AssignSpillRangeToLiveRange(range->TopLevel());
1042 CHECK(first_op_spill->TryMerge(spill_range));
1043 Spill(range);
1044 return true;
1045 } else if (pos->pos().Value() > range->Start().NextStart().Value()) {
1046 auto spill_range = range->TopLevel()->HasSpillRange()
1047 ? range->TopLevel()->GetSpillRange()
1048 : AssignSpillRangeToLiveRange(range->TopLevel());
1049 CHECK(first_op_spill->TryMerge(spill_range));
1050 SpillBetween(range, range->Start(), pos->pos());
1051 DCHECK(UnhandledIsSorted());
1052 return true;
1053 }
1054 return false;
1055 }
1056
1057
1058 void RegisterAllocator::MeetRegisterConstraints(const InstructionBlock* block) {
1059 int start = block->first_instruction_index(); 918 int start = block->first_instruction_index();
1060 int end = block->last_instruction_index(); 919 int end = block->last_instruction_index();
1061 DCHECK_NE(-1, start); 920 DCHECK_NE(-1, start);
1062 for (int i = start; i <= end; ++i) { 921 for (int i = start; i <= end; ++i) {
1063 MeetConstraintsBefore(i); 922 MeetConstraintsBefore(i);
1064 if (i != end) MeetConstraintsAfter(i); 923 if (i != end) MeetConstraintsAfter(i);
1065 } 924 }
1066 // Meet register constraints for the instruction in the end. 925 // Meet register constraints for the instruction in the end.
1067 MeetRegisterConstraintsForLastInstructionInBlock(block); 926 MeetRegisterConstraintsForLastInstructionInBlock(block);
1068 } 927 }
1069 928
1070 929
1071 void RegisterAllocator::MeetRegisterConstraintsForLastInstructionInBlock( 930 void LiveRangeBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
1072 const InstructionBlock* block) { 931 const InstructionBlock* block) {
1073 int end = block->last_instruction_index(); 932 int end = block->last_instruction_index();
1074 auto last_instruction = InstructionAt(end); 933 auto last_instruction = InstructionAt(end);
1075 for (size_t i = 0; i < last_instruction->OutputCount(); i++) { 934 for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
1076 auto output_operand = last_instruction->OutputAt(i); 935 auto output_operand = last_instruction->OutputAt(i);
1077 DCHECK(!output_operand->IsConstant()); 936 DCHECK(!output_operand->IsConstant());
1078 auto output = UnallocatedOperand::cast(output_operand); 937 auto output = UnallocatedOperand::cast(output_operand);
1079 int output_vreg = output->virtual_register(); 938 int output_vreg = output->virtual_register();
1080 auto range = LiveRangeFor(output_vreg); 939 auto range = LiveRangeFor(output_vreg);
1081 bool assigned = false; 940 bool assigned = false;
(...skipping 25 matching lines...) Expand all
1107 DCHECK(successor->PredecessorCount() == 1); 966 DCHECK(successor->PredecessorCount() == 1);
1108 int gap_index = successor->first_instruction_index(); 967 int gap_index = successor->first_instruction_index();
1109 range->SpillAtDefinition(local_zone(), gap_index, output); 968 range->SpillAtDefinition(local_zone(), gap_index, output);
1110 range->SetSpillStartIndex(gap_index); 969 range->SetSpillStartIndex(gap_index);
1111 } 970 }
1112 } 971 }
1113 } 972 }
1114 } 973 }
1115 974
1116 975
1117 void RegisterAllocator::MeetConstraintsAfter(int instr_index) { 976 void LiveRangeBuilder::MeetConstraintsAfter(int instr_index) {
1118 auto first = InstructionAt(instr_index); 977 auto first = InstructionAt(instr_index);
1119 // Handle fixed temporaries. 978 // Handle fixed temporaries.
1120 for (size_t i = 0; i < first->TempCount(); i++) { 979 for (size_t i = 0; i < first->TempCount(); i++) {
1121 auto temp = UnallocatedOperand::cast(first->TempAt(i)); 980 auto temp = UnallocatedOperand::cast(first->TempAt(i));
1122 if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false); 981 if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false);
1123 } 982 }
1124 // Handle constant/fixed output operands. 983 // Handle constant/fixed output operands.
1125 for (size_t i = 0; i < first->OutputCount(); i++) { 984 for (size_t i = 0; i < first->OutputCount(); i++) {
1126 InstructionOperand* output = first->OutputAt(i); 985 InstructionOperand* output = first->OutputAt(i);
1127 if (output->IsConstant()) { 986 if (output->IsConstant()) {
1128 int output_vreg = ConstantOperand::cast(output)->virtual_register(); 987 int output_vreg = ConstantOperand::cast(output)->virtual_register();
1129 auto range = LiveRangeFor(output_vreg); 988 auto range = LiveRangeFor(output_vreg);
1130 range->SetSpillStartIndex(instr_index + 1); 989 range->SetSpillStartIndex(instr_index + 1);
1131 range->SetSpillOperand(output); 990 range->SetSpillOperand(output);
1132 continue; 991 continue;
1133 } 992 }
1134 auto first_output = UnallocatedOperand::cast(output); 993 auto first_output = UnallocatedOperand::cast(output);
1135 auto range = LiveRangeFor(first_output->virtual_register()); 994 auto range = LiveRangeFor(first_output->virtual_register());
1136 bool assigned = false; 995 bool assigned = false;
1137 if (first_output->HasFixedPolicy()) { 996 if (first_output->HasFixedPolicy()) {
1138 int output_vreg = first_output->virtual_register(); 997 int output_vreg = first_output->virtual_register();
1139 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg); 998 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
1140 bool is_tagged = HasTaggedValue(output_vreg); 999 bool is_tagged = IsReference(output_vreg);
1141 AllocateFixed(first_output, instr_index, is_tagged); 1000 AllocateFixed(first_output, instr_index, is_tagged);
1142 1001
1143 // This value is produced on the stack, we never need to spill it. 1002 // This value is produced on the stack, we never need to spill it.
1144 if (first_output->IsStackSlot()) { 1003 if (first_output->IsStackSlot()) {
1145 DCHECK(StackSlotOperand::cast(first_output)->index() < 1004 DCHECK(StackSlotOperand::cast(first_output)->index() <
1146 frame_->GetSpillSlotCount()); 1005 frame_->GetSpillSlotCount());
1147 range->SetSpillOperand(StackSlotOperand::cast(first_output)); 1006 range->SetSpillOperand(StackSlotOperand::cast(first_output));
1148 range->SetSpillStartIndex(instr_index + 1); 1007 range->SetSpillStartIndex(instr_index + 1);
1149 assigned = true; 1008 assigned = true;
1150 } 1009 }
1151 AddGapMove(instr_index + 1, Instruction::START, *first_output, 1010 AddGapMove(instr_index + 1, Instruction::START, *first_output,
1152 output_copy); 1011 output_copy);
1153 } 1012 }
1154 // Make sure we add a gap move for spilling (if we have not done 1013 // Make sure we add a gap move for spilling (if we have not done
1155 // so already). 1014 // so already).
1156 if (!assigned) { 1015 if (!assigned) {
1157 range->SpillAtDefinition(local_zone(), instr_index + 1, first_output); 1016 range->SpillAtDefinition(local_zone(), instr_index + 1, first_output);
1158 range->SetSpillStartIndex(instr_index + 1); 1017 range->SetSpillStartIndex(instr_index + 1);
1159 } 1018 }
1160 } 1019 }
1161 } 1020 }
1162 1021
1163 1022
1164 void RegisterAllocator::MeetConstraintsBefore(int instr_index) { 1023 void LiveRangeBuilder::MeetConstraintsBefore(int instr_index) {
1165 auto second = InstructionAt(instr_index); 1024 auto second = InstructionAt(instr_index);
1166 // Handle fixed input operands of second instruction. 1025 // Handle fixed input operands of second instruction.
1167 for (size_t i = 0; i < second->InputCount(); i++) { 1026 for (size_t i = 0; i < second->InputCount(); i++) {
1168 auto input = second->InputAt(i); 1027 auto input = second->InputAt(i);
1169 if (input->IsImmediate()) continue; // Ignore immediates. 1028 if (input->IsImmediate()) continue; // Ignore immediates.
1170 auto cur_input = UnallocatedOperand::cast(input); 1029 auto cur_input = UnallocatedOperand::cast(input);
1171 if (cur_input->HasFixedPolicy()) { 1030 if (cur_input->HasFixedPolicy()) {
1172 int input_vreg = cur_input->virtual_register(); 1031 int input_vreg = cur_input->virtual_register();
1173 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg); 1032 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
1174 bool is_tagged = HasTaggedValue(input_vreg); 1033 bool is_tagged = IsReference(input_vreg);
1175 AllocateFixed(cur_input, instr_index, is_tagged); 1034 AllocateFixed(cur_input, instr_index, is_tagged);
1176 AddGapMove(instr_index, Instruction::END, input_copy, *cur_input); 1035 AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
1177 } 1036 }
1178 } 1037 }
1179 // Handle "output same as input" for second instruction. 1038 // Handle "output same as input" for second instruction.
1180 for (size_t i = 0; i < second->OutputCount(); i++) { 1039 for (size_t i = 0; i < second->OutputCount(); i++) {
1181 auto output = second->OutputAt(i); 1040 auto output = second->OutputAt(i);
1182 if (!output->IsUnallocated()) continue; 1041 if (!output->IsUnallocated()) continue;
1183 auto second_output = UnallocatedOperand::cast(output); 1042 auto second_output = UnallocatedOperand::cast(output);
1184 if (!second_output->HasSameAsInputPolicy()) continue; 1043 if (!second_output->HasSameAsInputPolicy()) continue;
1185 DCHECK(i == 0); // Only valid for first output. 1044 DCHECK(i == 0); // Only valid for first output.
1186 UnallocatedOperand* cur_input = 1045 UnallocatedOperand* cur_input =
1187 UnallocatedOperand::cast(second->InputAt(0)); 1046 UnallocatedOperand::cast(second->InputAt(0));
1188 int output_vreg = second_output->virtual_register(); 1047 int output_vreg = second_output->virtual_register();
1189 int input_vreg = cur_input->virtual_register(); 1048 int input_vreg = cur_input->virtual_register();
1190 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg); 1049 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
1191 cur_input->set_virtual_register(second_output->virtual_register()); 1050 cur_input->set_virtual_register(second_output->virtual_register());
1192 AddGapMove(instr_index, Instruction::END, input_copy, *cur_input); 1051 AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
1193 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { 1052 if (IsReference(input_vreg) && !IsReference(output_vreg)) {
1194 if (second->HasReferenceMap()) { 1053 if (second->HasReferenceMap()) {
1195 second->reference_map()->RecordReference(input_copy); 1054 second->reference_map()->RecordReference(input_copy);
1196 } 1055 }
1197 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { 1056 } else if (!IsReference(input_vreg) && IsReference(output_vreg)) {
1198 // The input is assumed to immediately have a tagged representation, 1057 // The input is assumed to immediately have a tagged representation,
1199 // before the pointer map can be used. I.e. the pointer map at the 1058 // before the pointer map can be used. I.e. the pointer map at the
1200 // instruction will include the output operand (whose value at the 1059 // instruction will include the output operand (whose value at the
1201 // beginning of the instruction is equal to the input operand). If 1060 // beginning of the instruction is equal to the input operand). If
1202 // this is not desired, then the pointer map at this instruction needs 1061 // this is not desired, then the pointer map at this instruction needs
1203 // to be adjusted manually. 1062 // to be adjusted manually.
1204 } 1063 }
1205 } 1064 }
1206 } 1065 }
1207 1066
1208 1067
1209 bool RegisterAllocator::IsOutputRegisterOf(Instruction* instr, int index) { 1068 bool LiveRangeBuilder::IsOutputRegisterOf(Instruction* instr, int index) {
1210 for (size_t i = 0; i < instr->OutputCount(); i++) { 1069 for (size_t i = 0; i < instr->OutputCount(); i++) {
1211 auto output = instr->OutputAt(i); 1070 auto output = instr->OutputAt(i);
1212 if (output->IsRegister() && RegisterOperand::cast(output)->index() == index) 1071 if (output->IsRegister() && RegisterOperand::cast(output)->index() == index)
1213 return true; 1072 return true;
1214 } 1073 }
1215 return false; 1074 return false;
1216 } 1075 }
1217 1076
1218 1077
1219 bool RegisterAllocator::IsOutputDoubleRegisterOf(Instruction* instr, 1078 bool LiveRangeBuilder::IsOutputDoubleRegisterOf(Instruction* instr, int index) {
1220 int index) {
1221 for (size_t i = 0; i < instr->OutputCount(); i++) { 1079 for (size_t i = 0; i < instr->OutputCount(); i++) {
1222 auto output = instr->OutputAt(i); 1080 auto output = instr->OutputAt(i);
1223 if (output->IsDoubleRegister() && 1081 if (output->IsDoubleRegister() &&
1224 DoubleRegisterOperand::cast(output)->index() == index) 1082 DoubleRegisterOperand::cast(output)->index() == index)
1225 return true; 1083 return true;
1226 } 1084 }
1227 return false; 1085 return false;
1228 } 1086 }
1229 1087
1230 1088
1231 void RegisterAllocator::ProcessInstructions(const InstructionBlock* block, 1089 void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
1232 BitVector* live) { 1090 BitVector* live) {
1233 int block_start = block->first_instruction_index(); 1091 int block_start = block->first_instruction_index();
1234 auto block_start_position = 1092 auto block_start_position =
1235 LifetimePosition::GapFromInstructionIndex(block_start); 1093 LifetimePosition::GapFromInstructionIndex(block_start);
1236 1094
1237 for (int index = block->last_instruction_index(); index >= block_start; 1095 for (int index = block->last_instruction_index(); index >= block_start;
1238 index--) { 1096 index--) {
1239 auto curr_position = 1097 auto curr_position =
1240 LifetimePosition::InstructionFromInstructionIndex(index); 1098 LifetimePosition::InstructionFromInstructionIndex(index);
1241 auto instr = InstructionAt(index); 1099 auto instr = InstructionAt(index);
1242 DCHECK(instr != nullptr); 1100 DCHECK(instr != nullptr);
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after
1355 Use(block_start_position, curr_position, &from, hint); 1213 Use(block_start_position, curr_position, &from, hint);
1356 if (from.IsUnallocated()) { 1214 if (from.IsUnallocated()) {
1357 live->Add(UnallocatedOperand::cast(from).virtual_register()); 1215 live->Add(UnallocatedOperand::cast(from).virtual_register());
1358 } 1216 }
1359 } 1217 }
1360 } 1218 }
1361 } 1219 }
1362 } 1220 }
1363 1221
1364 1222
1365 void RegisterAllocator::ResolvePhis(const InstructionBlock* block) { 1223 void LiveRangeBuilder::ResolvePhis(const InstructionBlock* block) {
1366 for (auto phi : block->phis()) { 1224 for (auto phi : block->phis()) {
1367 int phi_vreg = phi->virtual_register(); 1225 int phi_vreg = phi->virtual_register();
1368 auto map_value = new (local_zone()) PhiMapValue(phi, block, local_zone()); 1226 auto map_value = new (local_zone()) PhiMapValue(phi, block, local_zone());
1369 auto res = phi_map_.insert(std::make_pair(phi_vreg, map_value)); 1227 auto res = phi_map_.insert(std::make_pair(phi_vreg, map_value));
1370 DCHECK(res.second); 1228 DCHECK(res.second);
1371 USE(res); 1229 USE(res);
1372 auto& output = phi->output(); 1230 auto& output = phi->output();
1373 for (size_t i = 0; i < phi->operands().size(); ++i) { 1231 for (size_t i = 0; i < phi->operands().size(); ++i) {
1374 InstructionBlock* cur_block = 1232 InstructionBlock* cur_block =
1375 code()->InstructionBlockAt(block->predecessors()[i]); 1233 code()->InstructionBlockAt(block->predecessors()[i]);
1376 UnallocatedOperand input(UnallocatedOperand::ANY, phi->operands()[i]); 1234 UnallocatedOperand input(UnallocatedOperand::ANY, phi->operands()[i]);
1377 auto move = AddGapMove(cur_block->last_instruction_index(), 1235 auto move = AddGapMove(cur_block->last_instruction_index(),
1378 Instruction::END, input, output); 1236 Instruction::END, input, output);
1379 map_value->incoming_moves.push_back(move); 1237 map_value->incoming_moves.push_back(move);
1380 DCHECK(!InstructionAt(cur_block->last_instruction_index()) 1238 DCHECK(!InstructionAt(cur_block->last_instruction_index())
1381 ->HasReferenceMap()); 1239 ->HasReferenceMap());
1382 } 1240 }
1383 auto live_range = LiveRangeFor(phi_vreg); 1241 auto live_range = LiveRangeFor(phi_vreg);
1384 int gap_index = block->first_instruction_index(); 1242 int gap_index = block->first_instruction_index();
1385 live_range->SpillAtDefinition(local_zone(), gap_index, &output); 1243 live_range->SpillAtDefinition(local_zone(), gap_index, &output);
1386 live_range->SetSpillStartIndex(gap_index); 1244 live_range->SetSpillStartIndex(gap_index);
1387 // We use the phi-ness of some nodes in some later heuristics. 1245 // We use the phi-ness of some nodes in some later heuristics.
1388 live_range->set_is_phi(true); 1246 live_range->set_is_phi(true);
1389 live_range->set_is_non_loop_phi(!block->IsLoopHeader()); 1247 live_range->set_is_non_loop_phi(!block->IsLoopHeader());
1390 } 1248 }
1391 } 1249 }
1392 1250
1393 1251
1394 void RegisterAllocator::AssignPhiInput(LiveRange* range, 1252 void LiveRangeBuilder::AssignPhiInput(LiveRange* range,
1395 const InstructionOperand& assignment) { 1253 const InstructionOperand& assignment) {
1396 DCHECK(range->is_phi()); 1254 DCHECK(range->is_phi());
1397 auto it = phi_map_.find(range->id()); 1255 auto it = phi_map_.find(range->id());
1398 DCHECK(it != phi_map_.end()); 1256 DCHECK(it != phi_map_.end());
1399 for (auto move : it->second->incoming_moves) { 1257 for (auto move : it->second->incoming_moves) {
1400 move->set_destination(assignment); 1258 move->set_destination(assignment);
1401 } 1259 }
1402 } 1260 }
1403 1261
1404 1262
1405 void RegisterAllocator::MeetRegisterConstraints() { 1263 void LiveRangeBuilder::MeetRegisterConstraints() {
1406 for (auto block : code()->instruction_blocks()) { 1264 for (auto block : code()->instruction_blocks()) {
1407 MeetRegisterConstraints(block); 1265 MeetRegisterConstraints(block);
1408 } 1266 }
1409 } 1267 }
1410 1268
1411 1269
1412 void RegisterAllocator::ResolvePhis() { 1270 void LiveRangeBuilder::ResolvePhis() {
1413 // Process the blocks in reverse order. 1271 // Process the blocks in reverse order.
1414 for (auto i = code()->instruction_blocks().rbegin(); 1272 for (auto i = code()->instruction_blocks().rbegin();
1415 i != code()->instruction_blocks().rend(); ++i) { 1273 i != code()->instruction_blocks().rend(); ++i) {
1416 ResolvePhis(*i); 1274 ResolvePhis(*i);
1417 } 1275 }
1418 } 1276 }
1419 1277
1420 1278
1421 const InstructionBlock* RegisterAllocator::GetInstructionBlock( 1279 void LiveRangeBuilder::BuildLiveRanges() {
1422 LifetimePosition pos) {
1423 return code()->GetInstructionBlock(pos.ToInstructionIndex());
1424 }
1425
1426
1427 void RegisterAllocator::ConnectRanges() {
1428 ZoneMap<std::pair<ParallelMove*, InstructionOperand>, InstructionOperand>
1429 delayed_insertion_map(local_zone());
1430 for (auto first_range : live_ranges()) {
1431 if (first_range == nullptr || first_range->IsChild()) continue;
1432 for (auto second_range = first_range->next(); second_range != nullptr;
1433 first_range = second_range, second_range = second_range->next()) {
1434 auto pos = second_range->Start();
1435 // Add gap move if the two live ranges touch and there is no block
1436 // boundary.
1437 if (second_range->IsSpilled()) continue;
1438 if (first_range->End().Value() != pos.Value()) continue;
1439 if (IsBlockBoundary(pos) &&
1440 !CanEagerlyResolveControlFlow(GetInstructionBlock(pos))) {
1441 continue;
1442 }
1443 auto prev_operand = first_range->GetAssignedOperand();
1444 auto cur_operand = second_range->GetAssignedOperand();
1445 if (prev_operand == cur_operand) continue;
1446 bool delay_insertion = false;
1447 Instruction::GapPosition gap_pos;
1448 int gap_index = pos.ToInstructionIndex();
1449 if (pos.IsGapPosition()) {
1450 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
1451 } else {
1452 if (pos.IsStart()) {
1453 delay_insertion = true;
1454 } else {
1455 gap_index++;
1456 }
1457 gap_pos = delay_insertion ? Instruction::END : Instruction::START;
1458 }
1459 auto move = code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
1460 gap_pos, code_zone());
1461 if (!delay_insertion) {
1462 move->AddMove(prev_operand, cur_operand);
1463 } else {
1464 delayed_insertion_map.insert(
1465 std::make_pair(std::make_pair(move, prev_operand), cur_operand));
1466 }
1467 }
1468 }
1469 if (delayed_insertion_map.empty()) return;
1470 // Insert all the moves which should occur after the stored move.
1471 ZoneVector<MoveOperands*> to_insert(local_zone());
1472 ZoneVector<MoveOperands*> to_eliminate(local_zone());
1473 to_insert.reserve(4);
1474 to_eliminate.reserve(4);
1475 auto moves = delayed_insertion_map.begin()->first.first;
1476 for (auto it = delayed_insertion_map.begin();; ++it) {
1477 bool done = it == delayed_insertion_map.end();
1478 if (done || it->first.first != moves) {
1479 // Commit the MoveOperands for current ParallelMove.
1480 for (auto move : to_eliminate) {
1481 move->Eliminate();
1482 }
1483 for (auto move : to_insert) {
1484 moves->push_back(move);
1485 }
1486 if (done) break;
1487 // Reset state.
1488 to_eliminate.clear();
1489 to_insert.clear();
1490 moves = it->first.first;
1491 }
1492 // Gather all MoveOperands for a single ParallelMove.
1493 auto move = new (code_zone()) MoveOperands(it->first.second, it->second);
1494 auto eliminate = moves->PrepareInsertAfter(move);
1495 to_insert.push_back(move);
1496 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
1497 }
1498 }
1499
1500
1501 bool RegisterAllocator::CanEagerlyResolveControlFlow(
1502 const InstructionBlock* block) const {
1503 if (block->PredecessorCount() != 1) return false;
1504 return block->predecessors()[0].IsNext(block->rpo_number());
1505 }
1506
1507
1508 namespace {
1509
1510 class LiveRangeBound {
1511 public:
1512 explicit LiveRangeBound(const LiveRange* range)
1513 : range_(range), start_(range->Start()), end_(range->End()) {
1514 DCHECK(!range->IsEmpty());
1515 }
1516
1517 bool CanCover(LifetimePosition position) {
1518 return start_.Value() <= position.Value() &&
1519 position.Value() < end_.Value();
1520 }
1521
1522 const LiveRange* const range_;
1523 const LifetimePosition start_;
1524 const LifetimePosition end_;
1525
1526 private:
1527 DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
1528 };
1529
1530
1531 struct FindResult {
1532 const LiveRange* cur_cover_;
1533 const LiveRange* pred_cover_;
1534 };
1535
1536
1537 class LiveRangeBoundArray {
1538 public:
1539 LiveRangeBoundArray() : length_(0), start_(nullptr) {}
1540
1541 bool ShouldInitialize() { return start_ == nullptr; }
1542
1543 void Initialize(Zone* zone, const LiveRange* const range) {
1544 size_t length = 0;
1545 for (auto i = range; i != nullptr; i = i->next()) length++;
1546 start_ = zone->NewArray<LiveRangeBound>(length);
1547 length_ = length;
1548 auto curr = start_;
1549 for (auto i = range; i != nullptr; i = i->next(), ++curr) {
1550 new (curr) LiveRangeBound(i);
1551 }
1552 }
1553
1554 LiveRangeBound* Find(const LifetimePosition position) const {
1555 size_t left_index = 0;
1556 size_t right_index = length_;
1557 while (true) {
1558 size_t current_index = left_index + (right_index - left_index) / 2;
1559 DCHECK(right_index > current_index);
1560 auto bound = &start_[current_index];
1561 if (bound->start_.Value() <= position.Value()) {
1562 if (position.Value() < bound->end_.Value()) return bound;
1563 DCHECK(left_index < current_index);
1564 left_index = current_index;
1565 } else {
1566 right_index = current_index;
1567 }
1568 }
1569 }
1570
1571 LiveRangeBound* FindPred(const InstructionBlock* pred) {
1572 auto pred_end = LifetimePosition::InstructionFromInstructionIndex(
1573 pred->last_instruction_index());
1574 return Find(pred_end);
1575 }
1576
1577 LiveRangeBound* FindSucc(const InstructionBlock* succ) {
1578 auto succ_start = LifetimePosition::GapFromInstructionIndex(
1579 succ->first_instruction_index());
1580 return Find(succ_start);
1581 }
1582
1583 void Find(const InstructionBlock* block, const InstructionBlock* pred,
1584 FindResult* result) const {
1585 auto pred_end = LifetimePosition::InstructionFromInstructionIndex(
1586 pred->last_instruction_index());
1587 auto bound = Find(pred_end);
1588 result->pred_cover_ = bound->range_;
1589 auto cur_start = LifetimePosition::GapFromInstructionIndex(
1590 block->first_instruction_index());
1591 // Common case.
1592 if (bound->CanCover(cur_start)) {
1593 result->cur_cover_ = bound->range_;
1594 return;
1595 }
1596 result->cur_cover_ = Find(cur_start)->range_;
1597 DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
1598 }
1599
1600 private:
1601 size_t length_;
1602 LiveRangeBound* start_;
1603
1604 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
1605 };
1606
1607
1608 class LiveRangeFinder {
1609 public:
1610 explicit LiveRangeFinder(const RegisterAllocator& allocator)
1611 : allocator_(allocator),
1612 bounds_length_(static_cast<int>(allocator.live_ranges().size())),
1613 bounds_(allocator.local_zone()->NewArray<LiveRangeBoundArray>(
1614 bounds_length_)) {
1615 for (int i = 0; i < bounds_length_; ++i) {
1616 new (&bounds_[i]) LiveRangeBoundArray();
1617 }
1618 }
1619
1620 LiveRangeBoundArray* ArrayFor(int operand_index) {
1621 DCHECK(operand_index < bounds_length_);
1622 auto range = allocator_.live_ranges()[operand_index];
1623 DCHECK(range != nullptr && !range->IsEmpty());
1624 auto array = &bounds_[operand_index];
1625 if (array->ShouldInitialize()) {
1626 array->Initialize(allocator_.local_zone(), range);
1627 }
1628 return array;
1629 }
1630
1631 private:
1632 const RegisterAllocator& allocator_;
1633 const int bounds_length_;
1634 LiveRangeBoundArray* const bounds_;
1635
1636 DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
1637 };
1638
1639 } // namespace
1640
1641
1642 void RegisterAllocator::ResolveControlFlow() {
1643 // Lazily linearize live ranges in memory for fast lookup.
1644 LiveRangeFinder finder(*this);
1645 for (auto block : code()->instruction_blocks()) {
1646 if (CanEagerlyResolveControlFlow(block)) continue;
1647 auto live = live_in_sets_[block->rpo_number().ToInt()];
1648 BitVector::Iterator iterator(live);
1649 while (!iterator.Done()) {
1650 auto* array = finder.ArrayFor(iterator.Current());
1651 for (auto pred : block->predecessors()) {
1652 FindResult result;
1653 const auto* pred_block = code()->InstructionBlockAt(pred);
1654 array->Find(block, pred_block, &result);
1655 if (result.cur_cover_ == result.pred_cover_ ||
1656 result.cur_cover_->IsSpilled())
1657 continue;
1658 auto pred_op = result.pred_cover_->GetAssignedOperand();
1659 auto cur_op = result.cur_cover_->GetAssignedOperand();
1660 if (pred_op == cur_op) continue;
1661 ResolveControlFlow(block, cur_op, pred_block, pred_op);
1662 }
1663 iterator.Advance();
1664 }
1665 }
1666 }
1667
1668
1669 void RegisterAllocator::ResolveControlFlow(const InstructionBlock* block,
1670 const InstructionOperand& cur_op,
1671 const InstructionBlock* pred,
1672 const InstructionOperand& pred_op) {
1673 DCHECK(pred_op != cur_op);
1674 int gap_index;
1675 Instruction::GapPosition position;
1676 if (block->PredecessorCount() == 1) {
1677 gap_index = block->first_instruction_index();
1678 position = Instruction::START;
1679 } else {
1680 DCHECK(pred->SuccessorCount() == 1);
1681 DCHECK(!InstructionAt(pred->last_instruction_index())->HasReferenceMap());
1682 gap_index = pred->last_instruction_index();
1683 position = Instruction::END;
1684 }
1685 AddGapMove(gap_index, position, pred_op, cur_op);
1686 }
1687
1688
1689 void RegisterAllocator::BuildLiveRanges() {
1690 // Process the blocks in reverse order. 1280 // Process the blocks in reverse order.
1691 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0; 1281 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
1692 --block_id) { 1282 --block_id) {
1693 auto block = code()->InstructionBlockAt(RpoNumber::FromInt(block_id)); 1283 auto block = code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
1694 auto live = ComputeLiveOut(block); 1284 auto live = ComputeLiveOut(block);
1695 // Initially consider all live_out values live for the entire block. We 1285 // Initially consider all live_out values live for the entire block. We
1696 // will shorten these intervals if necessary. 1286 // will shorten these intervals if necessary.
1697 AddInitialIntervals(block, live); 1287 AddInitialIntervals(block, live);
1698 1288
1699 // Process the instructions in reverse order, generating and killing 1289 // Process the instructions in reverse order, generating and killing
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1743 // Insert all values into the live in sets of all blocks in the loop. 1333 // Insert all values into the live in sets of all blocks in the loop.
1744 for (int i = block->rpo_number().ToInt() + 1; 1334 for (int i = block->rpo_number().ToInt() + 1;
1745 i < block->loop_end().ToInt(); ++i) { 1335 i < block->loop_end().ToInt(); ++i) {
1746 live_in_sets_[i]->Union(*live); 1336 live_in_sets_[i]->Union(*live);
1747 } 1337 }
1748 } 1338 }
1749 } 1339 }
1750 1340
1751 for (auto range : live_ranges()) { 1341 for (auto range : live_ranges()) {
1752 if (range == nullptr) continue; 1342 if (range == nullptr) continue;
1753 range->kind_ = RequiredRegisterKind(range->id()); 1343 range->set_kind(RequiredRegisterKind(range->id()));
1754 // Give slots to all ranges with a non fixed slot use. 1344 // Give slots to all ranges with a non fixed slot use.
1755 if (range->has_slot_use() && range->HasNoSpillType()) { 1345 if (range->has_slot_use() && range->HasNoSpillType()) {
1756 AssignSpillRangeToLiveRange(range); 1346 AssignSpillRangeToLiveRange(range);
1757 } 1347 }
1758 // TODO(bmeurer): This is a horrible hack to make sure that for constant 1348 // TODO(bmeurer): This is a horrible hack to make sure that for constant
1759 // live ranges, every use requires the constant to be in a register. 1349 // live ranges, every use requires the constant to be in a register.
1760 // Without this hack, all uses with "any" policy would get the constant 1350 // Without this hack, all uses with "any" policy would get the constant
1761 // operand assigned. 1351 // operand assigned.
1762 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) { 1352 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
1763 for (auto pos = range->first_pos(); pos != nullptr; pos = pos->next_) { 1353 for (auto pos = range->first_pos(); pos != nullptr; pos = pos->next_) {
1764 if (pos->type() == UsePositionType::kRequiresSlot) continue; 1354 if (pos->type() == UsePositionType::kRequiresSlot) continue;
1765 UsePositionType new_type = UsePositionType::kAny; 1355 UsePositionType new_type = UsePositionType::kAny;
1766 // Can't mark phis as needing a register. 1356 // Can't mark phis as needing a register.
1767 if (!pos->pos().IsGapPosition()) { 1357 if (!pos->pos().IsGapPosition()) {
1768 new_type = UsePositionType::kRequiresRegister; 1358 new_type = UsePositionType::kRequiresRegister;
1769 } 1359 }
1770 pos->set_type(new_type, true); 1360 pos->set_type(new_type, true);
1771 } 1361 }
1772 } 1362 }
1773 } 1363 }
1364
1365 #ifdef DEBUG
1366 Verify();
1367 #endif
1774 } 1368 }
1775 1369
1776 1370
1777 bool RegisterAllocator::ExistsUseWithoutDefinition() { 1371 bool LiveRangeBuilder::ExistsUseWithoutDefinition() {
1778 bool found = false; 1372 bool found = false;
1779 BitVector::Iterator iterator(live_in_sets_[0]); 1373 BitVector::Iterator iterator(live_in_sets_[0]);
1780 while (!iterator.Done()) { 1374 while (!iterator.Done()) {
1781 found = true; 1375 found = true;
1782 int operand_index = iterator.Current(); 1376 int operand_index = iterator.Current();
1783 PrintF("Register allocator error: live v%d reached first block.\n", 1377 PrintF("Register allocator error: live v%d reached first block.\n",
1784 operand_index); 1378 operand_index);
1785 LiveRange* range = LiveRangeFor(operand_index); 1379 LiveRange* range = LiveRangeFor(operand_index);
1786 PrintF(" (first use is at %d)\n", range->first_pos()->pos().Value()); 1380 PrintF(" (first use is at %d)\n", range->first_pos()->pos().Value());
1787 if (debug_name() == nullptr) { 1381 if (debug_name() == nullptr) {
1788 PrintF("\n"); 1382 PrintF("\n");
1789 } else { 1383 } else {
1790 PrintF(" (function: %s)\n", debug_name()); 1384 PrintF(" (function: %s)\n", debug_name());
1791 } 1385 }
1792 iterator.Advance(); 1386 iterator.Advance();
1793 } 1387 }
1794 return found; 1388 return found;
1795 } 1389 }
1796 1390
1797 1391
1798 bool RegisterAllocator::SafePointsAreInOrder() const { 1392 bool LiveRangeBuilder::IsBlockBoundary(LifetimePosition pos) {
1799 int safe_point = 0; 1393 return pos.IsFullStart() &&
1800 for (auto map : *code()->reference_maps()) { 1394 code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() ==
1801 if (safe_point > map->instruction_position()) return false; 1395 pos.ToInstructionIndex();
1802 safe_point = map->instruction_position();
1803 }
1804 return true;
1805 } 1396 }
1806 1397
1807 1398
1808 void RegisterAllocator::PopulateReferenceMaps() { 1399 RegisterKind LiveRangeBuilder::RequiredRegisterKind(
1809 DCHECK(SafePointsAreInOrder()); 1400 int virtual_register) const {
1401 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS
1402 : GENERAL_REGISTERS;
1403 }
1810 1404
1811 // Iterate over all safe point positions and record a pointer
1812 // for all spilled live ranges at this point.
1813 int last_range_start = 0;
1814 auto reference_maps = code()->reference_maps();
1815 ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
1816 for (LiveRange* range : live_ranges()) {
1817 if (range == nullptr) continue;
1818 // Iterate over the first parts of multi-part live ranges.
1819 if (range->IsChild()) continue;
1820 // Skip non-reference values.
1821 if (!HasTaggedValue(range->id())) continue;
1822 // Skip empty live ranges.
1823 if (range->IsEmpty()) continue;
1824 1405
1825 // Find the extent of the range and its children. 1406 void LiveRangeBuilder::Verify() const {
1826 int start = range->Start().ToInstructionIndex(); 1407 for (auto current : live_ranges()) {
1827 int end = 0; 1408 if (current != nullptr) current->Verify();
1828 for (auto cur = range; cur != nullptr; cur = cur->next()) {
1829 auto this_end = cur->End();
1830 if (this_end.ToInstructionIndex() > end)
1831 end = this_end.ToInstructionIndex();
1832 DCHECK(cur->Start().ToInstructionIndex() >= start);
1833 }
1834
1835 // Most of the ranges are in order, but not all. Keep an eye on when they
1836 // step backwards and reset the first_it so we don't miss any safe points.
1837 if (start < last_range_start) first_it = reference_maps->begin();
1838 last_range_start = start;
1839
1840 // Step across all the safe points that are before the start of this range,
1841 // recording how far we step in order to save doing this for the next range.
1842 for (; first_it != reference_maps->end(); ++first_it) {
1843 auto map = *first_it;
1844 if (map->instruction_position() >= start) break;
1845 }
1846
1847 // Step through the safe points to see whether they are in the range.
1848 for (auto it = first_it; it != reference_maps->end(); ++it) {
1849 auto map = *it;
1850 int safe_point = map->instruction_position();
1851
1852 // The safe points are sorted so we can stop searching here.
1853 if (safe_point - 1 > end) break;
1854
1855 // Advance to the next active range that covers the current
1856 // safe point position.
1857 auto safe_point_pos =
1858 LifetimePosition::InstructionFromInstructionIndex(safe_point);
1859 auto cur = range;
1860 while (cur != nullptr && !cur->Covers(safe_point_pos)) {
1861 cur = cur->next();
1862 }
1863 if (cur == nullptr) continue;
1864
1865 // Check if the live range is spilled and the safe point is after
1866 // the spill position.
1867 if (range->HasSpillOperand() &&
1868 safe_point >= range->spill_start_index() &&
1869 !range->GetSpillOperand()->IsConstant()) {
1870 TRACE("Pointer for range %d (spilled at %d) at safe point %d\n",
1871 range->id(), range->spill_start_index(), safe_point);
1872 map->RecordReference(*range->GetSpillOperand());
1873 }
1874
1875 if (!cur->IsSpilled()) {
1876 TRACE(
1877 "Pointer in register for range %d (start at %d) "
1878 "at safe point %d\n",
1879 cur->id(), cur->Start().Value(), safe_point);
1880 auto operand = cur->GetAssignedOperand();
1881 DCHECK(!operand.IsStackSlot());
1882 map->RecordReference(operand);
1883 }
1884 }
1885 } 1409 }
1886 } 1410 }
1887 1411
1888 1412
1889 void RegisterAllocator::AllocateGeneralRegisters() { 1413 void LiveRangeBuilder::SetLiveRangeAssignedRegister(LiveRange* range, int reg) {
1890 num_registers_ = config()->num_general_registers(); 1414 if (range->Kind() == DOUBLE_REGISTERS) {
1891 mode_ = GENERAL_REGISTERS; 1415 assigned_double_registers_->Add(reg);
1892 AllocateRegisters(); 1416 } else {
1417 DCHECK(range->Kind() == GENERAL_REGISTERS);
1418 assigned_registers_->Add(reg);
1419 }
1420 range->set_assigned_register(reg);
1421 auto assignment = range->GetAssignedOperand();
1422 // TODO(dcarney): stop aliasing hint operands.
1423 range->ConvertUsesToOperand(assignment, nullptr);
1424 if (range->is_phi()) AssignPhiInput(range, assignment);
1893 } 1425 }
1894 1426
1895 1427
1896 void RegisterAllocator::AllocateDoubleRegisters() { 1428 LinearScanAllocator::LinearScanAllocator(LiveRangeBuilder* live_range_builder,
1897 num_registers_ = config()->num_aliased_double_registers(); 1429 RegisterKind kind)
1898 mode_ = DOUBLE_REGISTERS; 1430 : live_range_builder_(live_range_builder),
1899 AllocateRegisters(); 1431 mode_(kind),
1432 num_registers_(kind == DOUBLE_REGISTERS
1433 ? config()->num_aliased_double_registers()
1434 : config()->num_general_registers()),
1435 unhandled_live_ranges_(local_zone()),
1436 active_live_ranges_(local_zone()),
1437 inactive_live_ranges_(local_zone()) {
1438 unhandled_live_ranges().reserve(
1439 static_cast<size_t>(code()->VirtualRegisterCount() * 2));
1440 active_live_ranges().reserve(8);
1441 inactive_live_ranges().reserve(8);
1442 // TryAllocateFreeReg and AllocateBlockedReg assume this
1443 // when allocating local arrays.
1444 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >=
1445 config()->num_general_registers());
1900 } 1446 }
1901 1447
1902 1448
1903 void RegisterAllocator::AllocateRegisters() { 1449 void LinearScanAllocator::AllocateRegisters() {
1904 DCHECK(unhandled_live_ranges().empty()); 1450 DCHECK(unhandled_live_ranges().empty());
1905 1451
1906 for (auto range : live_ranges()) { 1452 for (auto range : live_ranges()) {
1907 if (range == nullptr) continue; 1453 if (range == nullptr) continue;
1908 if (range->Kind() == mode_) { 1454 if (range->Kind() == mode_) {
1909 AddToUnhandledUnsorted(range); 1455 AddToUnhandledUnsorted(range);
1910 } 1456 }
1911 } 1457 }
1912 SortUnhandled(); 1458 SortUnhandled();
1913 DCHECK(UnhandledIsSorted()); 1459 DCHECK(UnhandledIsSorted());
1914 1460
1915 DCHECK(active_live_ranges().empty()); 1461 DCHECK(active_live_ranges().empty());
1916 DCHECK(inactive_live_ranges().empty()); 1462 DCHECK(inactive_live_ranges().empty());
1917 1463
1918 if (mode_ == DOUBLE_REGISTERS) { 1464 if (mode_ == DOUBLE_REGISTERS) {
1919 for (int i = 0; i < config()->num_aliased_double_registers(); ++i) { 1465 for (auto current : fixed_double_live_ranges()) {
1920 auto current = fixed_double_live_ranges()[i]; 1466 if (current != nullptr) AddToInactive(current);
1921 if (current != nullptr) {
1922 AddToInactive(current);
1923 }
1924 } 1467 }
1925 } else { 1468 } else {
1926 DCHECK(mode_ == GENERAL_REGISTERS); 1469 DCHECK(mode_ == GENERAL_REGISTERS);
1927 for (auto current : fixed_live_ranges()) { 1470 for (auto current : fixed_live_ranges()) {
1928 if (current != nullptr) { 1471 if (current != nullptr) AddToInactive(current);
1929 AddToInactive(current);
1930 }
1931 } 1472 }
1932 } 1473 }
1933 1474
1934 while (!unhandled_live_ranges().empty()) { 1475 while (!unhandled_live_ranges().empty()) {
1935 DCHECK(UnhandledIsSorted()); 1476 DCHECK(UnhandledIsSorted());
1936 auto current = unhandled_live_ranges().back(); 1477 auto current = unhandled_live_ranges().back();
1937 unhandled_live_ranges().pop_back(); 1478 unhandled_live_ranges().pop_back();
1938 DCHECK(UnhandledIsSorted()); 1479 DCHECK(UnhandledIsSorted());
1939 auto position = current->Start(); 1480 auto position = current->Start();
1940 #ifdef DEBUG 1481 #ifdef DEBUG
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
1994 if (current->HasRegisterAssigned()) { 1535 if (current->HasRegisterAssigned()) {
1995 AddToActive(current); 1536 AddToActive(current);
1996 } 1537 }
1997 } 1538 }
1998 1539
1999 active_live_ranges().clear(); 1540 active_live_ranges().clear();
2000 inactive_live_ranges().clear(); 1541 inactive_live_ranges().clear();
2001 } 1542 }
2002 1543
2003 1544
2004 const char* RegisterAllocator::RegisterName(int allocation_index) { 1545 const char* LinearScanAllocator::RegisterName(int allocation_index) {
2005 if (mode_ == GENERAL_REGISTERS) { 1546 if (mode_ == GENERAL_REGISTERS) {
2006 return config()->general_register_name(allocation_index); 1547 return config()->general_register_name(allocation_index);
2007 } else { 1548 } else {
2008 return config()->double_register_name(allocation_index); 1549 return config()->double_register_name(allocation_index);
2009 } 1550 }
2010 } 1551 }
2011 1552
2012 1553
2013 bool RegisterAllocator::HasTaggedValue(int virtual_register) const { 1554 void LinearScanAllocator::AddToActive(LiveRange* range) {
2014 return code()->IsReference(virtual_register);
2015 }
2016
2017
2018 RegisterKind RegisterAllocator::RequiredRegisterKind(
2019 int virtual_register) const {
2020 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS
2021 : GENERAL_REGISTERS;
2022 }
2023
2024
2025 void RegisterAllocator::AddToActive(LiveRange* range) {
2026 TRACE("Add live range %d to active\n", range->id()); 1555 TRACE("Add live range %d to active\n", range->id());
2027 active_live_ranges().push_back(range); 1556 active_live_ranges().push_back(range);
2028 } 1557 }
2029 1558
2030 1559
2031 void RegisterAllocator::AddToInactive(LiveRange* range) { 1560 void LinearScanAllocator::AddToInactive(LiveRange* range) {
2032 TRACE("Add live range %d to inactive\n", range->id()); 1561 TRACE("Add live range %d to inactive\n", range->id());
2033 inactive_live_ranges().push_back(range); 1562 inactive_live_ranges().push_back(range);
2034 } 1563 }
2035 1564
2036 1565
2037 void RegisterAllocator::AddToUnhandledSorted(LiveRange* range) { 1566 void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) {
2038 if (range == nullptr || range->IsEmpty()) return; 1567 if (range == nullptr || range->IsEmpty()) return;
2039 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); 1568 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
2040 DCHECK(allocation_finger_.Value() <= range->Start().Value()); 1569 DCHECK(allocation_finger_.Value() <= range->Start().Value());
2041 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0; 1570 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0;
2042 --i) { 1571 --i) {
2043 auto cur_range = unhandled_live_ranges().at(i); 1572 auto cur_range = unhandled_live_ranges().at(i);
2044 if (!range->ShouldBeAllocatedBefore(cur_range)) continue; 1573 if (!range->ShouldBeAllocatedBefore(cur_range)) continue;
2045 TRACE("Add live range %d to unhandled at %d\n", range->id(), i + 1); 1574 TRACE("Add live range %d to unhandled at %d\n", range->id(), i + 1);
2046 auto it = unhandled_live_ranges().begin() + (i + 1); 1575 auto it = unhandled_live_ranges().begin() + (i + 1);
2047 unhandled_live_ranges().insert(it, range); 1576 unhandled_live_ranges().insert(it, range);
2048 DCHECK(UnhandledIsSorted()); 1577 DCHECK(UnhandledIsSorted());
2049 return; 1578 return;
2050 } 1579 }
2051 TRACE("Add live range %d to unhandled at start\n", range->id()); 1580 TRACE("Add live range %d to unhandled at start\n", range->id());
2052 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range); 1581 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range);
2053 DCHECK(UnhandledIsSorted()); 1582 DCHECK(UnhandledIsSorted());
2054 } 1583 }
2055 1584
2056 1585
2057 void RegisterAllocator::AddToUnhandledUnsorted(LiveRange* range) { 1586 void LinearScanAllocator::AddToUnhandledUnsorted(LiveRange* range) {
2058 if (range == nullptr || range->IsEmpty()) return; 1587 if (range == nullptr || range->IsEmpty()) return;
2059 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); 1588 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
2060 TRACE("Add live range %d to unhandled unsorted at end\n", range->id()); 1589 TRACE("Add live range %d to unhandled unsorted at end\n", range->id());
2061 unhandled_live_ranges().push_back(range); 1590 unhandled_live_ranges().push_back(range);
2062 } 1591 }
2063 1592
2064 1593
2065 static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) { 1594 static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) {
2066 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a)); 1595 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a));
2067 if (a->ShouldBeAllocatedBefore(b)) return false; 1596 if (a->ShouldBeAllocatedBefore(b)) return false;
2068 if (b->ShouldBeAllocatedBefore(a)) return true; 1597 if (b->ShouldBeAllocatedBefore(a)) return true;
2069 return a->id() < b->id(); 1598 return a->id() < b->id();
2070 } 1599 }
2071 1600
2072 1601
2073 // Sort the unhandled live ranges so that the ranges to be processed first are 1602 // Sort the unhandled live ranges so that the ranges to be processed first are
2074 // at the end of the array list. This is convenient for the register allocation 1603 // at the end of the array list. This is convenient for the register allocation
2075 // algorithm because it is efficient to remove elements from the end. 1604 // algorithm because it is efficient to remove elements from the end.
2076 void RegisterAllocator::SortUnhandled() { 1605 void LinearScanAllocator::SortUnhandled() {
2077 TRACE("Sort unhandled\n"); 1606 TRACE("Sort unhandled\n");
2078 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(), 1607 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(),
2079 &UnhandledSortHelper); 1608 &UnhandledSortHelper);
2080 } 1609 }
2081 1610
2082 1611
2083 bool RegisterAllocator::UnhandledIsSorted() { 1612 bool LinearScanAllocator::UnhandledIsSorted() {
2084 size_t len = unhandled_live_ranges().size(); 1613 size_t len = unhandled_live_ranges().size();
2085 for (size_t i = 1; i < len; i++) { 1614 for (size_t i = 1; i < len; i++) {
2086 auto a = unhandled_live_ranges().at(i - 1); 1615 auto a = unhandled_live_ranges().at(i - 1);
2087 auto b = unhandled_live_ranges().at(i); 1616 auto b = unhandled_live_ranges().at(i);
2088 if (a->Start().Value() < b->Start().Value()) return false; 1617 if (a->Start().Value() < b->Start().Value()) return false;
2089 } 1618 }
2090 return true; 1619 return true;
2091 } 1620 }
2092 1621
2093 1622
2094 void RegisterAllocator::ActiveToHandled(LiveRange* range) { 1623 void LinearScanAllocator::ActiveToHandled(LiveRange* range) {
2095 RemoveElement(&active_live_ranges(), range); 1624 RemoveElement(&active_live_ranges(), range);
2096 TRACE("Moving live range %d from active to handled\n", range->id()); 1625 TRACE("Moving live range %d from active to handled\n", range->id());
2097 } 1626 }
2098 1627
2099 1628
2100 void RegisterAllocator::ActiveToInactive(LiveRange* range) { 1629 void LinearScanAllocator::ActiveToInactive(LiveRange* range) {
2101 RemoveElement(&active_live_ranges(), range); 1630 RemoveElement(&active_live_ranges(), range);
2102 inactive_live_ranges().push_back(range); 1631 inactive_live_ranges().push_back(range);
2103 TRACE("Moving live range %d from active to inactive\n", range->id()); 1632 TRACE("Moving live range %d from active to inactive\n", range->id());
2104 } 1633 }
2105 1634
2106 1635
2107 void RegisterAllocator::InactiveToHandled(LiveRange* range) { 1636 void LinearScanAllocator::InactiveToHandled(LiveRange* range) {
2108 RemoveElement(&inactive_live_ranges(), range); 1637 RemoveElement(&inactive_live_ranges(), range);
2109 TRACE("Moving live range %d from inactive to handled\n", range->id()); 1638 TRACE("Moving live range %d from inactive to handled\n", range->id());
2110 } 1639 }
2111 1640
2112 1641
2113 void RegisterAllocator::InactiveToActive(LiveRange* range) { 1642 void LinearScanAllocator::InactiveToActive(LiveRange* range) {
2114 RemoveElement(&inactive_live_ranges(), range); 1643 RemoveElement(&inactive_live_ranges(), range);
2115 active_live_ranges().push_back(range); 1644 active_live_ranges().push_back(range);
2116 TRACE("Moving live range %d from inactive to active\n", range->id()); 1645 TRACE("Moving live range %d from inactive to active\n", range->id());
2117 } 1646 }
2118 1647
2119 1648
2120 bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) { 1649 bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) {
2121 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; 1650 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters];
2122 1651
2123 for (int i = 0; i < num_registers_; i++) { 1652 for (int i = 0; i < num_registers_; i++) {
2124 free_until_pos[i] = LifetimePosition::MaxPosition(); 1653 free_until_pos[i] = LifetimePosition::MaxPosition();
2125 } 1654 }
2126 1655
2127 for (auto cur_active : active_live_ranges()) { 1656 for (auto cur_active : active_live_ranges()) {
2128 free_until_pos[cur_active->assigned_register()] = 1657 free_until_pos[cur_active->assigned_register()] =
2129 LifetimePosition::GapFromInstructionIndex(0); 1658 LifetimePosition::GapFromInstructionIndex(0);
2130 } 1659 }
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
2179 // the range end. 1708 // the range end.
2180 DCHECK(pos.Value() >= current->End().Value()); 1709 DCHECK(pos.Value() >= current->End().Value());
2181 TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg), 1710 TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg),
2182 current->id()); 1711 current->id());
2183 SetLiveRangeAssignedRegister(current, reg); 1712 SetLiveRangeAssignedRegister(current, reg);
2184 1713
2185 return true; 1714 return true;
2186 } 1715 }
2187 1716
2188 1717
2189 void RegisterAllocator::AllocateBlockedReg(LiveRange* current) { 1718 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
2190 auto register_use = current->NextRegisterPosition(current->Start()); 1719 auto register_use = current->NextRegisterPosition(current->Start());
2191 if (register_use == nullptr) { 1720 if (register_use == nullptr) {
2192 // There is no use in the current live range that requires a register. 1721 // There is no use in the current live range that requires a register.
2193 // We can just spill it. 1722 // We can just spill it.
2194 Spill(current); 1723 Spill(current);
2195 return; 1724 return;
2196 } 1725 }
2197 1726
2198 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters]; 1727 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters];
2199 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; 1728 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters];
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
2269 1798
2270 1799
2271 static const InstructionBlock* GetContainingLoop( 1800 static const InstructionBlock* GetContainingLoop(
2272 const InstructionSequence* sequence, const InstructionBlock* block) { 1801 const InstructionSequence* sequence, const InstructionBlock* block) {
2273 auto index = block->loop_header(); 1802 auto index = block->loop_header();
2274 if (!index.IsValid()) return nullptr; 1803 if (!index.IsValid()) return nullptr;
2275 return sequence->InstructionBlockAt(index); 1804 return sequence->InstructionBlockAt(index);
2276 } 1805 }
2277 1806
2278 1807
2279 LifetimePosition RegisterAllocator::FindOptimalSpillingPos( 1808 LifetimePosition LinearScanAllocator::FindOptimalSpillingPos(
2280 LiveRange* range, LifetimePosition pos) { 1809 LiveRange* range, LifetimePosition pos) {
2281 auto block = GetInstructionBlock(pos.Start()); 1810 auto block = GetInstructionBlock(pos.Start());
2282 auto loop_header = 1811 auto loop_header =
2283 block->IsLoopHeader() ? block : GetContainingLoop(code(), block); 1812 block->IsLoopHeader() ? block : GetContainingLoop(code(), block);
2284 1813
2285 if (loop_header == nullptr) return pos; 1814 if (loop_header == nullptr) return pos;
2286 1815
2287 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); 1816 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos);
2288 1817
2289 while (loop_header != nullptr) { 1818 while (loop_header != nullptr) {
(...skipping 11 matching lines...) Expand all
2301 } 1830 }
2302 1831
2303 // Try hoisting out to an outer loop. 1832 // Try hoisting out to an outer loop.
2304 loop_header = GetContainingLoop(code(), loop_header); 1833 loop_header = GetContainingLoop(code(), loop_header);
2305 } 1834 }
2306 1835
2307 return pos; 1836 return pos;
2308 } 1837 }
2309 1838
2310 1839
2311 void RegisterAllocator::SplitAndSpillIntersecting(LiveRange* current) { 1840 void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) {
2312 DCHECK(current->HasRegisterAssigned()); 1841 DCHECK(current->HasRegisterAssigned());
2313 int reg = current->assigned_register(); 1842 int reg = current->assigned_register();
2314 auto split_pos = current->Start(); 1843 auto split_pos = current->Start();
2315 for (size_t i = 0; i < active_live_ranges().size(); ++i) { 1844 for (size_t i = 0; i < active_live_ranges().size(); ++i) {
2316 auto range = active_live_ranges()[i]; 1845 auto range = active_live_ranges()[i];
2317 if (range->assigned_register() == reg) { 1846 if (range->assigned_register() == reg) {
2318 auto next_pos = range->NextRegisterPosition(current->Start()); 1847 auto next_pos = range->NextRegisterPosition(current->Start());
2319 auto spill_pos = FindOptimalSpillingPos(range, split_pos); 1848 auto spill_pos = FindOptimalSpillingPos(range, split_pos);
2320 if (next_pos == nullptr) { 1849 if (next_pos == nullptr) {
2321 SpillAfter(range, spill_pos); 1850 SpillAfter(range, spill_pos);
(...skipping 27 matching lines...) Expand all
2349 SpillBetween(range, split_pos, next_intersection); 1878 SpillBetween(range, split_pos, next_intersection);
2350 } 1879 }
2351 InactiveToHandled(range); 1880 InactiveToHandled(range);
2352 --i; 1881 --i;
2353 } 1882 }
2354 } 1883 }
2355 } 1884 }
2356 } 1885 }
2357 1886
2358 1887
2359 bool RegisterAllocator::IsBlockBoundary(LifetimePosition pos) { 1888 bool LinearScanAllocator::TryReuseSpillForPhi(LiveRange* range) {
2360 return pos.IsFullStart() && 1889 if (range->IsChild() || !range->is_phi()) return false;
2361 code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() == 1890 DCHECK(!range->HasSpillOperand());
2362 pos.ToInstructionIndex(); 1891
1892 auto lookup = phi_map().find(range->id());
1893 DCHECK(lookup != phi_map().end());
1894 auto phi = lookup->second->phi;
1895 auto block = lookup->second->block;
1896 // Count the number of spilled operands.
1897 size_t spilled_count = 0;
1898 LiveRange* first_op = nullptr;
1899 for (size_t i = 0; i < phi->operands().size(); i++) {
1900 int op = phi->operands()[i];
1901 LiveRange* op_range = LiveRangeFor(op);
1902 if (op_range->GetSpillRange() == nullptr) continue;
1903 auto pred = code()->InstructionBlockAt(block->predecessors()[i]);
1904 auto pred_end = LifetimePosition::InstructionFromInstructionIndex(
1905 pred->last_instruction_index());
1906 while (op_range != nullptr && !op_range->CanCover(pred_end)) {
1907 op_range = op_range->next();
1908 }
1909 if (op_range != nullptr && op_range->IsSpilled()) {
1910 spilled_count++;
1911 if (first_op == nullptr) {
1912 first_op = op_range->TopLevel();
1913 }
1914 }
1915 }
1916
1917 // Only continue if more than half of the operands are spilled.
1918 if (spilled_count * 2 <= phi->operands().size()) {
1919 return false;
1920 }
1921
1922 // Try to merge the spilled operands and count the number of merged spilled
1923 // operands.
1924 DCHECK(first_op != nullptr);
1925 auto first_op_spill = first_op->GetSpillRange();
1926 size_t num_merged = 1;
1927 for (size_t i = 1; i < phi->operands().size(); i++) {
1928 int op = phi->operands()[i];
1929 auto op_range = LiveRangeFor(op);
1930 auto op_spill = op_range->GetSpillRange();
1931 if (op_spill != nullptr &&
1932 (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill))) {
1933 num_merged++;
1934 }
1935 }
1936
1937 // Only continue if enough operands could be merged to the
1938 // same spill slot.
1939 if (num_merged * 2 <= phi->operands().size() ||
1940 AreUseIntervalsIntersecting(first_op_spill->interval(),
1941 range->first_interval())) {
1942 return false;
1943 }
1944
1945 // If the range does not need register soon, spill it to the merged
1946 // spill range.
1947 auto next_pos = range->Start();
1948 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
1949 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
1950 if (pos == nullptr) {
1951 auto spill_range = range->TopLevel()->HasSpillRange()
1952 ? range->TopLevel()->GetSpillRange()
1953 : live_range_builder()->AssignSpillRangeToLiveRange(
1954 range->TopLevel());
1955 bool merged = first_op_spill->TryMerge(spill_range);
1956 CHECK(merged);
1957 Spill(range);
1958 return true;
1959 } else if (pos->pos().Value() > range->Start().NextStart().Value()) {
1960 auto spill_range = range->TopLevel()->HasSpillRange()
1961 ? range->TopLevel()->GetSpillRange()
1962 : live_range_builder()->AssignSpillRangeToLiveRange(
1963 range->TopLevel());
1964 bool merged = first_op_spill->TryMerge(spill_range);
1965 CHECK(merged);
1966 SpillBetween(range, range->Start(), pos->pos());
1967 DCHECK(UnhandledIsSorted());
1968 return true;
1969 }
1970 return false;
2363 } 1971 }
2364 1972
2365 1973
2366 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, 1974 LiveRange* LinearScanAllocator::SplitRangeAt(LiveRange* range,
2367 LifetimePosition pos) { 1975 LifetimePosition pos) {
2368 DCHECK(!range->IsFixed()); 1976 DCHECK(!range->IsFixed());
2369 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value()); 1977 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value());
2370 1978
2371 if (pos.Value() <= range->Start().Value()) return range; 1979 if (pos.Value() <= range->Start().Value()) return range;
2372 1980
2373 // We can't properly connect liveranges if splitting occurred at the end 1981 // We can't properly connect liveranges if splitting occurred at the end
2374 // a block. 1982 // a block.
2375 DCHECK(pos.IsStart() || pos.IsGapPosition() || 1983 DCHECK(pos.IsStart() || pos.IsGapPosition() ||
2376 (code()->GetInstructionBlock(pos.ToInstructionIndex())) 1984 (code()->GetInstructionBlock(pos.ToInstructionIndex()))
2377 ->last_instruction_index() != pos.ToInstructionIndex()); 1985 ->last_instruction_index() != pos.ToInstructionIndex());
2378 1986
2379 int vreg = GetVirtualRegister(); 1987 int vreg = GetVirtualRegister();
2380 auto result = LiveRangeFor(vreg); 1988 auto result = LiveRangeFor(vreg);
2381 range->SplitAt(pos, result, local_zone()); 1989 range->SplitAt(pos, result, local_zone());
2382 return result; 1990 return result;
2383 } 1991 }
2384 1992
2385 1993
2386 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range, 1994 LiveRange* LinearScanAllocator::SplitBetween(LiveRange* range,
2387 LifetimePosition start, 1995 LifetimePosition start,
2388 LifetimePosition end) { 1996 LifetimePosition end) {
2389 DCHECK(!range->IsFixed()); 1997 DCHECK(!range->IsFixed());
2390 TRACE("Splitting live range %d in position between [%d, %d]\n", range->id(), 1998 TRACE("Splitting live range %d in position between [%d, %d]\n", range->id(),
2391 start.Value(), end.Value()); 1999 start.Value(), end.Value());
2392 2000
2393 auto split_pos = FindOptimalSplitPos(start, end); 2001 auto split_pos = FindOptimalSplitPos(start, end);
2394 DCHECK(split_pos.Value() >= start.Value()); 2002 DCHECK(split_pos.Value() >= start.Value());
2395 return SplitRangeAt(range, split_pos); 2003 return SplitRangeAt(range, split_pos);
2396 } 2004 }
2397 2005
2398 2006
2399 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, 2007 LifetimePosition LinearScanAllocator::FindOptimalSplitPos(
2400 LifetimePosition end) { 2008 LifetimePosition start, LifetimePosition end) {
2401 int start_instr = start.ToInstructionIndex(); 2009 int start_instr = start.ToInstructionIndex();
2402 int end_instr = end.ToInstructionIndex(); 2010 int end_instr = end.ToInstructionIndex();
2403 DCHECK(start_instr <= end_instr); 2011 DCHECK(start_instr <= end_instr);
2404 2012
2405 // We have no choice 2013 // We have no choice
2406 if (start_instr == end_instr) return end; 2014 if (start_instr == end_instr) return end;
2407 2015
2408 auto start_block = GetInstructionBlock(start); 2016 auto start_block = GetInstructionBlock(start);
2409 auto end_block = GetInstructionBlock(end); 2017 auto end_block = GetInstructionBlock(end);
2410 2018
(...skipping 14 matching lines...) Expand all
2425 2033
2426 // We did not find any suitable outer loop. Split at the latest possible 2034 // We did not find any suitable outer loop. Split at the latest possible
2427 // position unless end_block is a loop header itself. 2035 // position unless end_block is a loop header itself.
2428 if (block == end_block && !end_block->IsLoopHeader()) return end; 2036 if (block == end_block && !end_block->IsLoopHeader()) return end;
2429 2037
2430 return LifetimePosition::GapFromInstructionIndex( 2038 return LifetimePosition::GapFromInstructionIndex(
2431 block->first_instruction_index()); 2039 block->first_instruction_index());
2432 } 2040 }
2433 2041
2434 2042
2435 void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { 2043 void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2436 auto second_part = SplitRangeAt(range, pos); 2044 auto second_part = SplitRangeAt(range, pos);
2437 Spill(second_part); 2045 Spill(second_part);
2438 } 2046 }
2439 2047
2440 2048
2441 void RegisterAllocator::SpillBetween(LiveRange* range, LifetimePosition start, 2049 void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
2442 LifetimePosition end) { 2050 LifetimePosition end) {
2443 SpillBetweenUntil(range, start, start, end); 2051 SpillBetweenUntil(range, start, start, end);
2444 } 2052 }
2445 2053
2446 2054
2447 void RegisterAllocator::SpillBetweenUntil(LiveRange* range, 2055 void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
2448 LifetimePosition start, 2056 LifetimePosition start,
2449 LifetimePosition until, 2057 LifetimePosition until,
2450 LifetimePosition end) { 2058 LifetimePosition end) {
2451 CHECK(start.Value() < end.Value()); 2059 CHECK(start.Value() < end.Value());
2452 auto second_part = SplitRangeAt(range, start); 2060 auto second_part = SplitRangeAt(range, start);
2453 2061
2454 if (second_part->Start().Value() < end.Value()) { 2062 if (second_part->Start().Value() < end.Value()) {
2455 // The split result intersects with [start, end[. 2063 // The split result intersects with [start, end[.
2456 // Split it at position between ]start+1, end[, spill the middle part 2064 // Split it at position between ]start+1, end[, spill the middle part
2457 // and put the rest to unhandled. 2065 // and put the rest to unhandled.
2458 auto third_part_end = end.PrevStart().End(); 2066 auto third_part_end = end.PrevStart().End();
2459 if (IsBlockBoundary(end.Start())) { 2067 if (live_range_builder()->IsBlockBoundary(end.Start())) {
2460 third_part_end = end.Start(); 2068 third_part_end = end.Start();
2461 } 2069 }
2462 auto third_part = SplitBetween( 2070 auto third_part = SplitBetween(
2463 second_part, Max(second_part->Start().End(), until), third_part_end); 2071 second_part, Max(second_part->Start().End(), until), third_part_end);
2464 2072
2465 DCHECK(third_part != second_part); 2073 DCHECK(third_part != second_part);
2466 2074
2467 Spill(second_part); 2075 Spill(second_part);
2468 AddToUnhandledSorted(third_part); 2076 AddToUnhandledSorted(third_part);
2469 } else { 2077 } else {
2470 // The split result does not intersect with [start, end[. 2078 // The split result does not intersect with [start, end[.
2471 // Nothing to spill. Just put it to unhandled as whole. 2079 // Nothing to spill. Just put it to unhandled as whole.
2472 AddToUnhandledSorted(second_part); 2080 AddToUnhandledSorted(second_part);
2473 } 2081 }
2474 } 2082 }
2475 2083
2476 2084
2477 void RegisterAllocator::Spill(LiveRange* range) { 2085 void LinearScanAllocator::Spill(LiveRange* range) {
2478 DCHECK(!range->IsSpilled()); 2086 DCHECK(!range->IsSpilled());
2479 TRACE("Spilling live range %d\n", range->id()); 2087 TRACE("Spilling live range %d\n", range->id());
2480 auto first = range->TopLevel(); 2088 auto first = range->TopLevel();
2481 if (first->HasNoSpillType()) { 2089 if (first->HasNoSpillType()) {
2482 AssignSpillRangeToLiveRange(first); 2090 live_range_builder()->AssignSpillRangeToLiveRange(first);
2483 } 2091 }
2484 range->MakeSpilled(); 2092 range->MakeSpilled();
2485 } 2093 }
2486 2094
2487 2095
2488 int RegisterAllocator::RegisterCount() const { return num_registers_; } 2096 OperandAssigner::OperandAssigner(LiveRangeBuilder* live_range_builder)
2489 2097 : live_range_builder_(live_range_builder) {}
2490 2098
2491 #ifdef DEBUG 2099
2492 2100 void OperandAssigner::AssignSpillSlots() {
2493 2101 auto& spill_ranges = live_range_builder()->spill_ranges();
2494 void RegisterAllocator::Verify() const { 2102 // Merge disjoint spill ranges
2495 for (auto current : live_ranges()) { 2103 for (size_t i = 0; i < spill_ranges.size(); i++) {
2496 if (current != nullptr) current->Verify(); 2104 auto range = spill_ranges[i];
2497 } 2105 if (range->IsEmpty()) continue;
2498 } 2106 for (size_t j = i + 1; j < spill_ranges.size(); j++) {
2499 2107 auto other = spill_ranges[j];
2500 2108 if (!other->IsEmpty()) {
2501 #endif 2109 range->TryMerge(other);
2502 2110 }
2503 2111 }
2504 void RegisterAllocator::SetLiveRangeAssignedRegister(LiveRange* range, 2112 }
2505 int reg) { 2113 // Allocate slots for the merged spill ranges.
2506 if (range->Kind() == DOUBLE_REGISTERS) { 2114 for (auto range : spill_ranges) {
2507 assigned_double_registers_->Add(reg); 2115 if (range->IsEmpty()) continue;
2116 // Allocate a new operand referring to the spill slot.
2117 auto kind = range->Kind();
2118 int index = live_range_builder()->frame()->AllocateSpillSlot(
2119 kind == DOUBLE_REGISTERS);
2120 auto op_kind = kind == DOUBLE_REGISTERS
2121 ? AllocatedOperand::DOUBLE_STACK_SLOT
2122 : AllocatedOperand::STACK_SLOT;
2123 auto op = AllocatedOperand::New(live_range_builder()->code_zone(), op_kind,
2124 index);
2125 range->SetOperand(op);
2126 }
2127 }
2128
2129
2130 void OperandAssigner::CommitAssignment() {
2131 for (auto range : live_range_builder()->live_ranges()) {
2132 if (range == nullptr || range->IsEmpty()) continue;
2133 InstructionOperand* spill_operand = nullptr;
2134 if (!range->TopLevel()->HasNoSpillType()) {
2135 spill_operand = range->TopLevel()->GetSpillOperand();
2136 }
2137 auto assigned = range->GetAssignedOperand();
2138 range->ConvertUsesToOperand(assigned, spill_operand);
2139 if (range->is_phi()) live_range_builder()->AssignPhiInput(range, assigned);
2140 if (!range->IsChild() && spill_operand != nullptr) {
2141 range->CommitSpillsAtDefinition(live_range_builder()->code(),
2142 spill_operand, range->has_slot_use());
2143 }
2144 }
2145 }
2146
2147
2148 ReferenceMapPopulator::ReferenceMapPopulator(
2149 LiveRangeBuilder* live_range_builder)
2150 : live_range_builder_(live_range_builder) {}
2151
2152
2153 bool ReferenceMapPopulator::SafePointsAreInOrder() const {
2154 int safe_point = 0;
2155 for (auto map : *live_range_builder()->code()->reference_maps()) {
2156 if (safe_point > map->instruction_position()) return false;
2157 safe_point = map->instruction_position();
2158 }
2159 return true;
2160 }
2161
2162
2163 void ReferenceMapPopulator::PopulateReferenceMaps() {
2164 DCHECK(SafePointsAreInOrder());
2165
2166 // Iterate over all safe point positions and record a pointer
2167 // for all spilled live ranges at this point.
2168 int last_range_start = 0;
2169 auto reference_maps = live_range_builder()->code()->reference_maps();
2170 ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
2171 for (LiveRange* range : live_range_builder()->live_ranges()) {
2172 if (range == nullptr) continue;
2173 // Iterate over the first parts of multi-part live ranges.
2174 if (range->IsChild()) continue;
2175 // Skip non-reference values.
2176 if (!IsReference(range->id())) continue;
2177 // Skip empty live ranges.
2178 if (range->IsEmpty()) continue;
2179
2180 // Find the extent of the range and its children.
2181 int start = range->Start().ToInstructionIndex();
2182 int end = 0;
2183 for (auto cur = range; cur != nullptr; cur = cur->next()) {
2184 auto this_end = cur->End();
2185 if (this_end.ToInstructionIndex() > end)
2186 end = this_end.ToInstructionIndex();
2187 DCHECK(cur->Start().ToInstructionIndex() >= start);
2188 }
2189
2190 // Most of the ranges are in order, but not all. Keep an eye on when they
2191 // step backwards and reset the first_it so we don't miss any safe points.
2192 if (start < last_range_start) first_it = reference_maps->begin();
2193 last_range_start = start;
2194
2195 // Step across all the safe points that are before the start of this range,
2196 // recording how far we step in order to save doing this for the next range.
2197 for (; first_it != reference_maps->end(); ++first_it) {
2198 auto map = *first_it;
2199 if (map->instruction_position() >= start) break;
2200 }
2201
2202 // Step through the safe points to see whether they are in the range.
2203 for (auto it = first_it; it != reference_maps->end(); ++it) {
2204 auto map = *it;
2205 int safe_point = map->instruction_position();
2206
2207 // The safe points are sorted so we can stop searching here.
2208 if (safe_point - 1 > end) break;
2209
2210 // Advance to the next active range that covers the current
2211 // safe point position.
2212 auto safe_point_pos =
2213 LifetimePosition::InstructionFromInstructionIndex(safe_point);
2214 auto cur = range;
2215 while (cur != nullptr && !cur->Covers(safe_point_pos)) {
2216 cur = cur->next();
2217 }
2218 if (cur == nullptr) continue;
2219
2220 // Check if the live range is spilled and the safe point is after
2221 // the spill position.
2222 if (range->HasSpillOperand() &&
2223 safe_point >= range->spill_start_index() &&
2224 !range->GetSpillOperand()->IsConstant()) {
2225 TRACE("Pointer for range %d (spilled at %d) at safe point %d\n",
2226 range->id(), range->spill_start_index(), safe_point);
2227 map->RecordReference(*range->GetSpillOperand());
2228 }
2229
2230 if (!cur->IsSpilled()) {
2231 TRACE(
2232 "Pointer in register for range %d (start at %d) "
2233 "at safe point %d\n",
2234 cur->id(), cur->Start().Value(), safe_point);
2235 auto operand = cur->GetAssignedOperand();
2236 DCHECK(!operand.IsStackSlot());
2237 map->RecordReference(operand);
2238 }
2239 }
2240 }
2241 }
2242
2243
2244 namespace {
2245
2246 class LiveRangeBound {
2247 public:
2248 explicit LiveRangeBound(const LiveRange* range)
2249 : range_(range), start_(range->Start()), end_(range->End()) {
2250 DCHECK(!range->IsEmpty());
2251 }
2252
2253 bool CanCover(LifetimePosition position) {
2254 return start_.Value() <= position.Value() &&
2255 position.Value() < end_.Value();
2256 }
2257
2258 const LiveRange* const range_;
2259 const LifetimePosition start_;
2260 const LifetimePosition end_;
2261
2262 private:
2263 DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
2264 };
2265
2266
2267 struct FindResult {
2268 const LiveRange* cur_cover_;
2269 const LiveRange* pred_cover_;
2270 };
2271
2272
2273 class LiveRangeBoundArray {
2274 public:
2275 LiveRangeBoundArray() : length_(0), start_(nullptr) {}
2276
2277 bool ShouldInitialize() { return start_ == nullptr; }
2278
2279 void Initialize(Zone* zone, const LiveRange* const range) {
2280 size_t length = 0;
2281 for (auto i = range; i != nullptr; i = i->next()) length++;
2282 start_ = zone->NewArray<LiveRangeBound>(length);
2283 length_ = length;
2284 auto curr = start_;
2285 for (auto i = range; i != nullptr; i = i->next(), ++curr) {
2286 new (curr) LiveRangeBound(i);
2287 }
2288 }
2289
2290 LiveRangeBound* Find(const LifetimePosition position) const {
2291 size_t left_index = 0;
2292 size_t right_index = length_;
2293 while (true) {
2294 size_t current_index = left_index + (right_index - left_index) / 2;
2295 DCHECK(right_index > current_index);
2296 auto bound = &start_[current_index];
2297 if (bound->start_.Value() <= position.Value()) {
2298 if (position.Value() < bound->end_.Value()) return bound;
2299 DCHECK(left_index < current_index);
2300 left_index = current_index;
2301 } else {
2302 right_index = current_index;
2303 }
2304 }
2305 }
2306
2307 LiveRangeBound* FindPred(const InstructionBlock* pred) {
2308 auto pred_end = LifetimePosition::InstructionFromInstructionIndex(
2309 pred->last_instruction_index());
2310 return Find(pred_end);
2311 }
2312
2313 LiveRangeBound* FindSucc(const InstructionBlock* succ) {
2314 auto succ_start = LifetimePosition::GapFromInstructionIndex(
2315 succ->first_instruction_index());
2316 return Find(succ_start);
2317 }
2318
2319 void Find(const InstructionBlock* block, const InstructionBlock* pred,
2320 FindResult* result) const {
2321 auto pred_end = LifetimePosition::InstructionFromInstructionIndex(
2322 pred->last_instruction_index());
2323 auto bound = Find(pred_end);
2324 result->pred_cover_ = bound->range_;
2325 auto cur_start = LifetimePosition::GapFromInstructionIndex(
2326 block->first_instruction_index());
2327 // Common case.
2328 if (bound->CanCover(cur_start)) {
2329 result->cur_cover_ = bound->range_;
2330 return;
2331 }
2332 result->cur_cover_ = Find(cur_start)->range_;
2333 DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
2334 }
2335
2336 private:
2337 size_t length_;
2338 LiveRangeBound* start_;
2339
2340 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
2341 };
2342
2343
2344 class LiveRangeFinder {
2345 public:
2346 explicit LiveRangeFinder(const LiveRangeBuilder& builder)
2347 : builder_(builder),
2348 bounds_length_(static_cast<int>(builder.live_ranges().size())),
2349 bounds_(builder.local_zone()->NewArray<LiveRangeBoundArray>(
2350 bounds_length_)) {
2351 for (int i = 0; i < bounds_length_; ++i) {
2352 new (&bounds_[i]) LiveRangeBoundArray();
2353 }
2354 }
2355
2356 LiveRangeBoundArray* ArrayFor(int operand_index) {
2357 DCHECK(operand_index < bounds_length_);
2358 auto range = builder_.live_ranges()[operand_index];
2359 DCHECK(range != nullptr && !range->IsEmpty());
2360 auto array = &bounds_[operand_index];
2361 if (array->ShouldInitialize()) {
2362 array->Initialize(builder_.local_zone(), range);
2363 }
2364 return array;
2365 }
2366
2367 private:
2368 const LiveRangeBuilder& builder_;
2369 const int bounds_length_;
2370 LiveRangeBoundArray* const bounds_;
2371
2372 DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
2373 };
2374
2375 } // namespace
2376
2377
2378 LiveRangeConnector::LiveRangeConnector(LiveRangeBuilder* live_range_builder)
2379 : live_range_builder_(live_range_builder) {}
2380
2381
2382 bool LiveRangeConnector::CanEagerlyResolveControlFlow(
2383 const InstructionBlock* block) const {
2384 if (block->PredecessorCount() != 1) return false;
2385 return block->predecessors()[0].IsNext(block->rpo_number());
2386 }
2387
2388
2389 void LiveRangeConnector::ResolveControlFlow() {
2390 // Lazily linearize live ranges in memory for fast lookup.
2391 LiveRangeFinder finder(*live_range_builder());
2392 auto& live_in_sets = live_range_builder()->live_in_sets();
2393 for (auto block : code()->instruction_blocks()) {
2394 if (CanEagerlyResolveControlFlow(block)) continue;
2395 auto live = live_in_sets[block->rpo_number().ToInt()];
2396 BitVector::Iterator iterator(live);
2397 while (!iterator.Done()) {
2398 auto* array = finder.ArrayFor(iterator.Current());
2399 for (auto pred : block->predecessors()) {
2400 FindResult result;
2401 const auto* pred_block = code()->InstructionBlockAt(pred);
2402 array->Find(block, pred_block, &result);
2403 if (result.cur_cover_ == result.pred_cover_ ||
2404 result.cur_cover_->IsSpilled())
2405 continue;
2406 auto pred_op = result.pred_cover_->GetAssignedOperand();
2407 auto cur_op = result.cur_cover_->GetAssignedOperand();
2408 if (pred_op == cur_op) continue;
2409 ResolveControlFlow(block, cur_op, pred_block, pred_op);
2410 }
2411 iterator.Advance();
2412 }
2413 }
2414 }
2415
2416
2417 void LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
2418 const InstructionOperand& cur_op,
2419 const InstructionBlock* pred,
2420 const InstructionOperand& pred_op) {
2421 DCHECK(pred_op != cur_op);
2422 int gap_index;
2423 Instruction::GapPosition position;
2424 if (block->PredecessorCount() == 1) {
2425 gap_index = block->first_instruction_index();
2426 position = Instruction::START;
2508 } else { 2427 } else {
2509 DCHECK(range->Kind() == GENERAL_REGISTERS); 2428 DCHECK(pred->SuccessorCount() == 1);
2510 assigned_registers_->Add(reg); 2429 DCHECK(!live_range_builder()
2511 } 2430 ->InstructionAt(pred->last_instruction_index())
2512 range->set_assigned_register(reg); 2431 ->HasReferenceMap());
2513 auto assignment = range->GetAssignedOperand(); 2432 gap_index = pred->last_instruction_index();
2514 // TODO(dcarney): stop aliasing hint operands. 2433 position = Instruction::END;
2515 range->ConvertUsesToOperand(assignment, nullptr); 2434 }
2516 if (range->is_phi()) AssignPhiInput(range, assignment); 2435 live_range_builder()->AddGapMove(gap_index, position, pred_op, cur_op);
2436 }
2437
2438
2439 void LiveRangeConnector::ConnectRanges(Zone* temp_zone) {
2440 ZoneMap<std::pair<ParallelMove*, InstructionOperand>, InstructionOperand>
2441 delayed_insertion_map(temp_zone);
2442 for (auto first_range : live_range_builder()->live_ranges()) {
2443 if (first_range == nullptr || first_range->IsChild()) continue;
2444 for (auto second_range = first_range->next(); second_range != nullptr;
2445 first_range = second_range, second_range = second_range->next()) {
2446 auto pos = second_range->Start();
2447 // Add gap move if the two live ranges touch and there is no block
2448 // boundary.
2449 if (second_range->IsSpilled()) continue;
2450 if (first_range->End().Value() != pos.Value()) continue;
2451 if (live_range_builder()->IsBlockBoundary(pos) &&
2452 !CanEagerlyResolveControlFlow(GetInstructionBlock(pos))) {
2453 continue;
2454 }
2455 auto prev_operand = first_range->GetAssignedOperand();
2456 auto cur_operand = second_range->GetAssignedOperand();
2457 if (prev_operand == cur_operand) continue;
2458 bool delay_insertion = false;
2459 Instruction::GapPosition gap_pos;
2460 int gap_index = pos.ToInstructionIndex();
2461 if (pos.IsGapPosition()) {
2462 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
2463 } else {
2464 if (pos.IsStart()) {
2465 delay_insertion = true;
2466 } else {
2467 gap_index++;
2468 }
2469 gap_pos = delay_insertion ? Instruction::END : Instruction::START;
2470 }
2471 auto move = code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
2472 gap_pos, code_zone());
2473 if (!delay_insertion) {
2474 move->AddMove(prev_operand, cur_operand);
2475 } else {
2476 delayed_insertion_map.insert(
2477 std::make_pair(std::make_pair(move, prev_operand), cur_operand));
2478 }
2479 }
2480 }
2481 if (delayed_insertion_map.empty()) return;
2482 // Insert all the moves which should occur after the stored move.
2483 ZoneVector<MoveOperands*> to_insert(temp_zone);
2484 ZoneVector<MoveOperands*> to_eliminate(temp_zone);
2485 to_insert.reserve(4);
2486 to_eliminate.reserve(4);
2487 auto moves = delayed_insertion_map.begin()->first.first;
2488 for (auto it = delayed_insertion_map.begin();; ++it) {
2489 bool done = it == delayed_insertion_map.end();
2490 if (done || it->first.first != moves) {
2491 // Commit the MoveOperands for current ParallelMove.
2492 for (auto move : to_eliminate) {
2493 move->Eliminate();
2494 }
2495 for (auto move : to_insert) {
2496 moves->push_back(move);
2497 }
2498 if (done) break;
2499 // Reset state.
2500 to_eliminate.clear();
2501 to_insert.clear();
2502 moves = it->first.first;
2503 }
2504 // Gather all MoveOperands for a single ParallelMove.
2505 auto move = new (code_zone()) MoveOperands(it->first.second, it->second);
2506 auto eliminate = moves->PrepareInsertAfter(move);
2507 to_insert.push_back(move);
2508 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
2509 }
2517 } 2510 }
2518 2511
2519 } // namespace compiler 2512 } // namespace compiler
2520 } // namespace internal 2513 } // namespace internal
2521 } // namespace v8 2514 } // namespace v8
OLDNEW
« src/compiler/register-allocator.h ('K') | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698