Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(68)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 1094063002: [turbofan] split register allocator into little pieces (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/linkage.h" 5 #include "src/compiler/linkage.h"
6 #include "src/compiler/register-allocator.h" 6 #include "src/compiler/register-allocator.h"
7 #include "src/string-stream.h" 7 #include "src/string-stream.h"
8 8
9 namespace v8 { 9 namespace v8 {
10 namespace internal { 10 namespace internal {
11 namespace compiler { 11 namespace compiler {
12 12
13 #define TRACE(...) \ 13 #define TRACE(...) \
14 do { \ 14 do { \
15 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ 15 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
16 } while (false) 16 } while (false)
17 17
18 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { 18 namespace {
19
20 inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
19 return a.Value() < b.Value() ? a : b; 21 return a.Value() < b.Value() ? a : b;
20 } 22 }
21 23
22 24
23 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { 25 inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
24 return a.Value() > b.Value() ? a : b; 26 return a.Value() > b.Value() ? a : b;
25 } 27 }
26 28
27 29
28 static void RemoveElement(ZoneVector<LiveRange*>* v, LiveRange* range) { 30 void RemoveElement(ZoneVector<LiveRange*>* v, LiveRange* range) {
29 auto it = std::find(v->begin(), v->end(), range); 31 auto it = std::find(v->begin(), v->end(), range);
30 DCHECK(it != v->end()); 32 DCHECK(it != v->end());
31 v->erase(it); 33 v->erase(it);
32 } 34 }
33 35
36 } // namespace
37
34 38
35 UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand, 39 UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
36 InstructionOperand* hint) 40 InstructionOperand* hint)
37 : operand_(operand), hint_(hint), pos_(pos), next_(nullptr), flags_(0) { 41 : operand_(operand), hint_(hint), pos_(pos), next_(nullptr), flags_(0) {
38 bool register_beneficial = true; 42 bool register_beneficial = true;
39 UsePositionType type = UsePositionType::kAny; 43 UsePositionType type = UsePositionType::kAny;
40 if (operand_ != nullptr && operand_->IsUnallocated()) { 44 if (operand_ != nullptr && operand_->IsUnallocated()) {
41 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_); 45 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
42 if (unalloc->HasRegisterPolicy()) { 46 if (unalloc->HasRegisterPolicy()) {
43 type = UsePositionType::kRequiresRegister; 47 type = UsePositionType::kRequiresRegister;
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
78 struct LiveRange::SpillAtDefinitionList : ZoneObject { 82 struct LiveRange::SpillAtDefinitionList : ZoneObject {
79 SpillAtDefinitionList(int gap_index, InstructionOperand* operand, 83 SpillAtDefinitionList(int gap_index, InstructionOperand* operand,
80 SpillAtDefinitionList* next) 84 SpillAtDefinitionList* next)
81 : gap_index(gap_index), operand(operand), next(next) {} 85 : gap_index(gap_index), operand(operand), next(next) {}
82 const int gap_index; 86 const int gap_index;
83 InstructionOperand* const operand; 87 InstructionOperand* const operand;
84 SpillAtDefinitionList* const next; 88 SpillAtDefinitionList* const next;
85 }; 89 };
86 90
87 91
88 #ifdef DEBUG 92 LiveRange::LiveRange(int id, Zone* zone)
93 : id_(id),
94 spilled_(false),
95 has_slot_use_(false),
96 is_phi_(false),
97 is_non_loop_phi_(false),
98 kind_(UNALLOCATED_REGISTERS),
99 assigned_register_(kInvalidAssignment),
100 last_interval_(nullptr),
101 first_interval_(nullptr),
102 first_pos_(nullptr),
103 parent_(nullptr),
104 next_(nullptr),
105 current_interval_(nullptr),
106 last_processed_use_(nullptr),
107 current_hint_operand_(nullptr),
108 spill_start_index_(kMaxInt),
109 spill_type_(SpillType::kNoSpillType),
110 spill_operand_(nullptr),
111 spills_at_definition_(nullptr) {}
89 112
90 113
91 void LiveRange::Verify() const { 114 void LiveRange::Verify() const {
92 UsePosition* cur = first_pos_; 115 UsePosition* cur = first_pos_;
93 while (cur != nullptr) { 116 while (cur != nullptr) {
94 DCHECK(Start().Value() <= cur->pos().Value() && 117 CHECK(Start().Value() <= cur->pos().Value() &&
95 cur->pos().Value() <= End().Value()); 118 cur->pos().Value() <= End().Value());
96 cur = cur->next(); 119 cur = cur->next();
97 } 120 }
98 } 121 }
99 122
100 123
101 bool LiveRange::HasOverlap(UseInterval* target) const { 124 bool LiveRange::HasOverlap(UseInterval* target) const {
102 UseInterval* current_interval = first_interval_; 125 UseInterval* current_interval = first_interval_;
103 while (current_interval != nullptr) { 126 while (current_interval != nullptr) {
104 // Intervals overlap if the start of one is contained in the other. 127 // Intervals overlap if the start of one is contained in the other.
105 if (current_interval->Contains(target->start()) || 128 if (current_interval->Contains(target->start()) ||
106 target->Contains(current_interval->start())) { 129 target->Contains(current_interval->start())) {
107 return true; 130 return true;
108 } 131 }
109 current_interval = current_interval->next(); 132 current_interval = current_interval->next();
110 } 133 }
111 return false; 134 return false;
112 } 135 }
113 136
114 137
115 #endif
116
117
118 LiveRange::LiveRange(int id, Zone* zone)
119 : id_(id),
120 spilled_(false),
121 has_slot_use_(false),
122 is_phi_(false),
123 is_non_loop_phi_(false),
124 kind_(UNALLOCATED_REGISTERS),
125 assigned_register_(kInvalidAssignment),
126 last_interval_(nullptr),
127 first_interval_(nullptr),
128 first_pos_(nullptr),
129 parent_(nullptr),
130 next_(nullptr),
131 current_interval_(nullptr),
132 last_processed_use_(nullptr),
133 current_hint_operand_(nullptr),
134 spill_start_index_(kMaxInt),
135 spill_type_(SpillType::kNoSpillType),
136 spill_operand_(nullptr),
137 spills_at_definition_(nullptr) {}
138
139
140 void LiveRange::set_assigned_register(int reg) { 138 void LiveRange::set_assigned_register(int reg) {
141 DCHECK(!HasRegisterAssigned() && !IsSpilled()); 139 DCHECK(!HasRegisterAssigned() && !IsSpilled());
142 assigned_register_ = reg; 140 assigned_register_ = reg;
143 } 141 }
144 142
145 143
146 void LiveRange::MakeSpilled() { 144 void LiveRange::MakeSpilled() {
147 DCHECK(!IsSpilled()); 145 DCHECK(!IsSpilled());
148 DCHECK(!TopLevel()->HasNoSpillType()); 146 DCHECK(!TopLevel()->HasNoSpillType());
149 spilled_ = true; 147 spilled_ = true;
(...skipping 414 matching lines...) Expand 10 before | Expand all | Expand 10 after
564 if (a == nullptr || a->start().Value() > other->End().Value()) break; 562 if (a == nullptr || a->start().Value() > other->End().Value()) break;
565 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); 563 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
566 } else { 564 } else {
567 b = b->next(); 565 b = b->next();
568 } 566 }
569 } 567 }
570 return LifetimePosition::Invalid(); 568 return LifetimePosition::Invalid();
571 } 569 }
572 570
573 571
574 RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config,
575 Zone* zone, Frame* frame,
576 InstructionSequence* code,
577 const char* debug_name)
578 : local_zone_(zone),
579 frame_(frame),
580 code_(code),
581 debug_name_(debug_name),
582 config_(config),
583 phi_map_(local_zone()),
584 live_in_sets_(code->InstructionBlockCount(), nullptr, local_zone()),
585 live_ranges_(code->VirtualRegisterCount() * 2, nullptr, local_zone()),
586 fixed_live_ranges_(this->config()->num_general_registers(), nullptr,
587 local_zone()),
588 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr,
589 local_zone()),
590 unhandled_live_ranges_(local_zone()),
591 active_live_ranges_(local_zone()),
592 inactive_live_ranges_(local_zone()),
593 spill_ranges_(local_zone()),
594 mode_(UNALLOCATED_REGISTERS),
595 num_registers_(-1) {
596 DCHECK(this->config()->num_general_registers() <=
597 RegisterConfiguration::kMaxGeneralRegisters);
598 DCHECK(this->config()->num_double_registers() <=
599 RegisterConfiguration::kMaxDoubleRegisters);
600 // TryAllocateFreeReg and AllocateBlockedReg assume this
601 // when allocating local arrays.
602 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >=
603 this->config()->num_general_registers());
604 unhandled_live_ranges().reserve(
605 static_cast<size_t>(code->VirtualRegisterCount() * 2));
606 active_live_ranges().reserve(8);
607 inactive_live_ranges().reserve(8);
608 spill_ranges().reserve(8);
609 assigned_registers_ =
610 new (code_zone()) BitVector(config->num_general_registers(), code_zone());
611 assigned_double_registers_ = new (code_zone())
612 BitVector(config->num_aliased_double_registers(), code_zone());
613 frame->SetAllocatedRegisters(assigned_registers_);
614 frame->SetAllocatedDoubleRegisters(assigned_double_registers_);
615 }
616
617
618 BitVector* RegisterAllocator::ComputeLiveOut(const InstructionBlock* block) {
619 // Compute live out for the given block, except not including backward
620 // successor edges.
621 auto live_out = new (local_zone())
622 BitVector(code()->VirtualRegisterCount(), local_zone());
623
624 // Process all successor blocks.
625 for (auto succ : block->successors()) {
626 // Add values live on entry to the successor. Note the successor's
627 // live_in will not be computed yet for backwards edges.
628 auto live_in = live_in_sets_[succ.ToSize()];
629 if (live_in != nullptr) live_out->Union(*live_in);
630
631 // All phi input operands corresponding to this successor edge are live
632 // out from this block.
633 auto successor = code()->InstructionBlockAt(succ);
634 size_t index = successor->PredecessorIndexOf(block->rpo_number());
635 DCHECK(index < successor->PredecessorCount());
636 for (auto phi : successor->phis()) {
637 live_out->Add(phi->operands()[index]);
638 }
639 }
640 return live_out;
641 }
642
643
644 void RegisterAllocator::AddInitialIntervals(const InstructionBlock* block,
645 BitVector* live_out) {
646 // Add an interval that includes the entire block to the live range for
647 // each live_out value.
648 auto start = LifetimePosition::GapFromInstructionIndex(
649 block->first_instruction_index());
650 auto end = LifetimePosition::InstructionFromInstructionIndex(
651 block->last_instruction_index()).NextStart();
652 BitVector::Iterator iterator(live_out);
653 while (!iterator.Done()) {
654 int operand_index = iterator.Current();
655 auto range = LiveRangeFor(operand_index);
656 range->AddUseInterval(start, end, local_zone());
657 iterator.Advance();
658 }
659 }
660
661
662 int RegisterAllocator::FixedDoubleLiveRangeID(int index) {
663 return -index - 1 - config()->num_general_registers();
664 }
665
666
667 InstructionOperand* RegisterAllocator::AllocateFixed(
668 UnallocatedOperand* operand, int pos, bool is_tagged) {
669 TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
670 DCHECK(operand->HasFixedPolicy());
671 InstructionOperand allocated;
672 if (operand->HasFixedSlotPolicy()) {
673 allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT,
674 operand->fixed_slot_index());
675 } else if (operand->HasFixedRegisterPolicy()) {
676 allocated = AllocatedOperand(AllocatedOperand::REGISTER,
677 operand->fixed_register_index());
678 } else if (operand->HasFixedDoubleRegisterPolicy()) {
679 allocated = AllocatedOperand(AllocatedOperand::DOUBLE_REGISTER,
680 operand->fixed_register_index());
681 } else {
682 UNREACHABLE();
683 }
684 InstructionOperand::ReplaceWith(operand, &allocated);
685 if (is_tagged) {
686 TRACE("Fixed reg is tagged at %d\n", pos);
687 auto instr = InstructionAt(pos);
688 if (instr->HasReferenceMap()) {
689 instr->reference_map()->RecordReference(*operand);
690 }
691 }
692 return operand;
693 }
694
695
696 LiveRange* RegisterAllocator::NewLiveRange(int index) {
697 // The LiveRange object itself can go in the local zone, but the
698 // InstructionOperand needs to go in the code zone, since it may survive
699 // register allocation.
700 return new (local_zone()) LiveRange(index, code_zone());
701 }
702
703
704 LiveRange* RegisterAllocator::FixedLiveRangeFor(int index) {
705 DCHECK(index < config()->num_general_registers());
706 auto result = fixed_live_ranges()[index];
707 if (result == nullptr) {
708 result = NewLiveRange(FixedLiveRangeID(index));
709 DCHECK(result->IsFixed());
710 result->kind_ = GENERAL_REGISTERS;
711 SetLiveRangeAssignedRegister(result, index);
712 fixed_live_ranges()[index] = result;
713 }
714 return result;
715 }
716
717
718 LiveRange* RegisterAllocator::FixedDoubleLiveRangeFor(int index) {
719 DCHECK(index < config()->num_aliased_double_registers());
720 auto result = fixed_double_live_ranges()[index];
721 if (result == nullptr) {
722 result = NewLiveRange(FixedDoubleLiveRangeID(index));
723 DCHECK(result->IsFixed());
724 result->kind_ = DOUBLE_REGISTERS;
725 SetLiveRangeAssignedRegister(result, index);
726 fixed_double_live_ranges()[index] = result;
727 }
728 return result;
729 }
730
731
732 LiveRange* RegisterAllocator::LiveRangeFor(int index) {
733 if (index >= static_cast<int>(live_ranges().size())) {
734 live_ranges().resize(index + 1, nullptr);
735 }
736 auto result = live_ranges()[index];
737 if (result == nullptr) {
738 result = NewLiveRange(index);
739 live_ranges()[index] = result;
740 }
741 return result;
742 }
743
744
745 Instruction* RegisterAllocator::GetLastInstruction(
746 const InstructionBlock* block) {
747 return code()->InstructionAt(block->last_instruction_index());
748 }
749
750
751 LiveRange* RegisterAllocator::LiveRangeFor(InstructionOperand* operand) {
752 if (operand->IsUnallocated()) {
753 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register());
754 } else if (operand->IsConstant()) {
755 return LiveRangeFor(ConstantOperand::cast(operand)->virtual_register());
756 } else if (operand->IsRegister()) {
757 return FixedLiveRangeFor(RegisterOperand::cast(operand)->index());
758 } else if (operand->IsDoubleRegister()) {
759 return FixedDoubleLiveRangeFor(
760 DoubleRegisterOperand::cast(operand)->index());
761 } else {
762 return nullptr;
763 }
764 }
765
766
767 void RegisterAllocator::Define(LifetimePosition position,
768 InstructionOperand* operand,
769 InstructionOperand* hint) {
770 auto range = LiveRangeFor(operand);
771 if (range == nullptr) return;
772
773 if (range->IsEmpty() || range->Start().Value() > position.Value()) {
774 // Can happen if there is a definition without use.
775 range->AddUseInterval(position, position.NextStart(), local_zone());
776 range->AddUsePosition(position.NextStart(), nullptr, nullptr, local_zone());
777 } else {
778 range->ShortenTo(position);
779 }
780
781 if (operand->IsUnallocated()) {
782 auto unalloc_operand = UnallocatedOperand::cast(operand);
783 range->AddUsePosition(position, unalloc_operand, hint, local_zone());
784 }
785 }
786
787
788 void RegisterAllocator::Use(LifetimePosition block_start,
789 LifetimePosition position,
790 InstructionOperand* operand,
791 InstructionOperand* hint) {
792 auto range = LiveRangeFor(operand);
793 if (range == nullptr) return;
794 if (operand->IsUnallocated()) {
795 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
796 range->AddUsePosition(position, unalloc_operand, hint, local_zone());
797 }
798 range->AddUseInterval(block_start, position, local_zone());
799 }
800
801
802 MoveOperands* RegisterAllocator::AddGapMove(int index,
803 Instruction::GapPosition position,
804 const InstructionOperand& from,
805 const InstructionOperand& to) {
806 auto instr = code()->InstructionAt(index);
807 auto moves = instr->GetOrCreateParallelMove(position, code_zone());
808 return moves->AddMove(from, to);
809 }
810
811
812 static bool AreUseIntervalsIntersecting(UseInterval* interval1, 572 static bool AreUseIntervalsIntersecting(UseInterval* interval1,
813 UseInterval* interval2) { 573 UseInterval* interval2) {
814 while (interval1 != nullptr && interval2 != nullptr) { 574 while (interval1 != nullptr && interval2 != nullptr) {
815 if (interval1->start().Value() < interval2->start().Value()) { 575 if (interval1->start().Value() < interval2->start().Value()) {
816 if (interval1->end().Value() > interval2->start().Value()) { 576 if (interval1->end().Value() > interval2->start().Value()) {
817 return true; 577 return true;
818 } 578 }
819 interval1 = interval1->next(); 579 interval1 = interval1->next();
820 } else { 580 } else {
821 if (interval2->end().Value() > interval1->start().Value()) { 581 if (interval2->end().Value() > interval1->start().Value()) {
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
913 } else { 673 } else {
914 tail->set_next(current); 674 tail->set_next(current);
915 } 675 }
916 tail = current; 676 tail = current;
917 current = current->next(); 677 current = current->next();
918 } 678 }
919 // Other list is empty => we are done 679 // Other list is empty => we are done
920 } 680 }
921 681
922 682
923 void RegisterAllocator::AssignSpillSlots() { 683 RegisterAllocationData::RegisterAllocationData(
924 // Merge disjoint spill ranges 684 const RegisterConfiguration* config, Zone* zone, Frame* frame,
925 for (size_t i = 0; i < spill_ranges().size(); i++) { 685 InstructionSequence* code, const char* debug_name)
926 auto range = spill_ranges()[i]; 686 : allocation_zone_(zone),
927 if (range->IsEmpty()) continue; 687 frame_(frame),
928 for (size_t j = i + 1; j < spill_ranges().size(); j++) { 688 code_(code),
929 auto other = spill_ranges()[j]; 689 debug_name_(debug_name),
930 if (!other->IsEmpty()) { 690 config_(config),
931 range->TryMerge(other); 691 phi_map_(allocation_zone()),
932 } 692 live_in_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
693 live_ranges_(code->VirtualRegisterCount() * 2, nullptr,
694 allocation_zone()),
695 fixed_live_ranges_(this->config()->num_general_registers(), nullptr,
696 allocation_zone()),
697 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr,
698 allocation_zone()),
699 spill_ranges_(allocation_zone()),
700 assigned_registers_(nullptr),
701 assigned_double_registers_(nullptr) {
702 DCHECK(this->config()->num_general_registers() <=
703 RegisterConfiguration::kMaxGeneralRegisters);
704 DCHECK(this->config()->num_double_registers() <=
705 RegisterConfiguration::kMaxDoubleRegisters);
706 spill_ranges().reserve(8);
707 assigned_registers_ = new (code_zone())
708 BitVector(this->config()->num_general_registers(), code_zone());
709 assigned_double_registers_ = new (code_zone())
710 BitVector(this->config()->num_aliased_double_registers(), code_zone());
711 this->frame()->SetAllocatedRegisters(assigned_registers_);
712 this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
713 }
714
715
716 LiveRange* RegisterAllocationData::LiveRangeFor(int index) {
717 if (index >= static_cast<int>(live_ranges().size())) {
718 live_ranges().resize(index + 1, nullptr);
719 }
720 auto result = live_ranges()[index];
721 if (result == nullptr) {
722 result = NewLiveRange(index);
723 live_ranges()[index] = result;
724 }
725 return result;
726 }
727
728
729 MoveOperands* RegisterAllocationData::AddGapMove(
730 int index, Instruction::GapPosition position,
731 const InstructionOperand& from, const InstructionOperand& to) {
732 auto instr = code()->InstructionAt(index);
733 auto moves = instr->GetOrCreateParallelMove(position, code_zone());
734 return moves->AddMove(from, to);
735 }
736
737
738 void RegisterAllocationData::AssignPhiInput(
739 LiveRange* range, const InstructionOperand& assignment) {
740 DCHECK(range->is_phi());
741 auto it = phi_map().find(range->id());
742 DCHECK(it != phi_map().end());
743 for (auto move : it->second->incoming_moves) {
744 move->set_destination(assignment);
745 }
746 }
747
748
749 LiveRange* RegisterAllocationData::NewLiveRange(int index) {
750 // The LiveRange object itself can go in the local zone, but the
751 // InstructionOperand needs to go in the code zone, since it may survive
752 // register allocation.
753 return new (allocation_zone()) LiveRange(index, code_zone());
754 }
755
756
757 bool RegisterAllocationData::ExistsUseWithoutDefinition() {
758 bool found = false;
759 BitVector::Iterator iterator(live_in_sets()[0]);
760 while (!iterator.Done()) {
761 found = true;
762 int operand_index = iterator.Current();
763 PrintF("Register allocator error: live v%d reached first block.\n",
764 operand_index);
765 LiveRange* range = LiveRangeFor(operand_index);
766 PrintF(" (first use is at %d)\n", range->first_pos()->pos().Value());
767 if (debug_name() == nullptr) {
768 PrintF("\n");
769 } else {
770 PrintF(" (function: %s)\n", debug_name());
933 } 771 }
934 } 772 iterator.Advance();
935 773 }
936 // Allocate slots for the merged spill ranges. 774 return found;
937 for (auto range : spill_ranges()) { 775 }
938 if (range->IsEmpty()) continue; 776
939 // Allocate a new operand referring to the spill slot. 777
940 auto kind = range->Kind(); 778 SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange(
941 int index = frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS); 779 LiveRange* range) {
942 auto op_kind = kind == DOUBLE_REGISTERS 780 auto spill_range =
943 ? AllocatedOperand::DOUBLE_STACK_SLOT 781 new (allocation_zone()) SpillRange(range, allocation_zone());
944 : AllocatedOperand::STACK_SLOT;
945 auto op = AllocatedOperand::New(code_zone(), op_kind, index);
946 range->SetOperand(op);
947 }
948 }
949
950
951 void RegisterAllocator::CommitAssignment() {
952 for (auto range : live_ranges()) {
953 if (range == nullptr || range->IsEmpty()) continue;
954 InstructionOperand* spill_operand = nullptr;
955 if (!range->TopLevel()->HasNoSpillType()) {
956 spill_operand = range->TopLevel()->GetSpillOperand();
957 }
958 auto assigned = range->GetAssignedOperand();
959 range->ConvertUsesToOperand(assigned, spill_operand);
960 if (range->is_phi()) AssignPhiInput(range, assigned);
961 if (!range->IsChild() && spill_operand != nullptr) {
962 range->CommitSpillsAtDefinition(code(), spill_operand,
963 range->has_slot_use());
964 }
965 }
966 }
967
968
969 SpillRange* RegisterAllocator::AssignSpillRangeToLiveRange(LiveRange* range) {
970 auto spill_range = new (local_zone()) SpillRange(range, local_zone());
971 spill_ranges().push_back(spill_range); 782 spill_ranges().push_back(spill_range);
972 return spill_range; 783 return spill_range;
973 } 784 }
974 785
975 786
976 bool RegisterAllocator::TryReuseSpillForPhi(LiveRange* range) { 787 void RegisterAllocationData::SetLiveRangeAssignedRegister(LiveRange* range,
977 if (range->IsChild() || !range->is_phi()) return false; 788 int reg) {
978 DCHECK(!range->HasSpillOperand()); 789 if (range->Kind() == DOUBLE_REGISTERS) {
979 790 assigned_double_registers_->Add(reg);
980 auto lookup = phi_map_.find(range->id()); 791 } else {
981 DCHECK(lookup != phi_map_.end()); 792 DCHECK(range->Kind() == GENERAL_REGISTERS);
982 auto phi = lookup->second->phi; 793 assigned_registers_->Add(reg);
983 auto block = lookup->second->block; 794 }
984 // Count the number of spilled operands. 795 range->set_assigned_register(reg);
985 size_t spilled_count = 0; 796 auto assignment = range->GetAssignedOperand();
986 LiveRange* first_op = nullptr; 797 // TODO(dcarney): stop aliasing hint operands.
987 for (size_t i = 0; i < phi->operands().size(); i++) { 798 range->ConvertUsesToOperand(assignment, nullptr);
988 int op = phi->operands()[i]; 799 if (range->is_phi()) AssignPhiInput(range, assignment);
989 LiveRange* op_range = LiveRangeFor(op); 800 }
990 if (op_range->GetSpillRange() == nullptr) continue; 801
991 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); 802
992 auto pred_end = LifetimePosition::InstructionFromInstructionIndex( 803 LiveRangeBuilder::LiveRangeBuilder(RegisterAllocationData* data)
993 pred->last_instruction_index()); 804 : data_(data) {}
994 while (op_range != nullptr && !op_range->CanCover(pred_end)) { 805
995 op_range = op_range->next(); 806
807 BitVector* LiveRangeBuilder::ComputeLiveOut(const InstructionBlock* block) {
808 // Compute live out for the given block, except not including backward
809 // successor edges.
810 auto live_out = new (allocation_zone())
811 BitVector(code()->VirtualRegisterCount(), allocation_zone());
812
813 // Process all successor blocks.
814 for (auto succ : block->successors()) {
815 // Add values live on entry to the successor. Note the successor's
816 // live_in will not be computed yet for backwards edges.
817 auto live_in = live_in_sets()[succ.ToSize()];
818 if (live_in != nullptr) live_out->Union(*live_in);
819
820 // All phi input operands corresponding to this successor edge are live
821 // out from this block.
822 auto successor = code()->InstructionBlockAt(succ);
823 size_t index = successor->PredecessorIndexOf(block->rpo_number());
824 DCHECK(index < successor->PredecessorCount());
825 for (auto phi : successor->phis()) {
826 live_out->Add(phi->operands()[index]);
996 } 827 }
997 if (op_range != nullptr && op_range->IsSpilled()) { 828 }
998 spilled_count++; 829 return live_out;
999 if (first_op == nullptr) { 830 }
1000 first_op = op_range->TopLevel(); 831
1001 } 832
833 void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block,
834 BitVector* live_out) {
835 // Add an interval that includes the entire block to the live range for
836 // each live_out value.
837 auto start = LifetimePosition::GapFromInstructionIndex(
838 block->first_instruction_index());
839 auto end = LifetimePosition::InstructionFromInstructionIndex(
840 block->last_instruction_index()).NextStart();
841 BitVector::Iterator iterator(live_out);
842 while (!iterator.Done()) {
843 int operand_index = iterator.Current();
844 auto range = LiveRangeFor(operand_index);
845 range->AddUseInterval(start, end, allocation_zone());
846 iterator.Advance();
847 }
848 }
849
850
851 Instruction* LiveRangeBuilder::GetLastInstruction(
852 const InstructionBlock* block) {
853 return code()->InstructionAt(block->last_instruction_index());
854 }
855
856
857 int LiveRangeBuilder::FixedDoubleLiveRangeID(int index) {
858 return -index - 1 - config()->num_general_registers();
859 }
860
861
862 InstructionOperand* LiveRangeBuilder::AllocateFixed(UnallocatedOperand* operand,
863 int pos, bool is_tagged) {
864 TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
865 DCHECK(operand->HasFixedPolicy());
866 InstructionOperand allocated;
867 if (operand->HasFixedSlotPolicy()) {
868 allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT,
869 operand->fixed_slot_index());
870 } else if (operand->HasFixedRegisterPolicy()) {
871 allocated = AllocatedOperand(AllocatedOperand::REGISTER,
872 operand->fixed_register_index());
873 } else if (operand->HasFixedDoubleRegisterPolicy()) {
874 allocated = AllocatedOperand(AllocatedOperand::DOUBLE_REGISTER,
875 operand->fixed_register_index());
876 } else {
877 UNREACHABLE();
878 }
879 InstructionOperand::ReplaceWith(operand, &allocated);
880 if (is_tagged) {
881 TRACE("Fixed reg is tagged at %d\n", pos);
882 auto instr = InstructionAt(pos);
883 if (instr->HasReferenceMap()) {
884 instr->reference_map()->RecordReference(*operand);
1002 } 885 }
1003 } 886 }
1004 887 return operand;
1005 // Only continue if more than half of the operands are spilled. 888 }
1006 if (spilled_count * 2 <= phi->operands().size()) { 889
1007 return false; 890
1008 } 891 LiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) {
1009 892 DCHECK(index < config()->num_general_registers());
1010 // Try to merge the spilled operands and count the number of merged spilled 893 auto result = fixed_live_ranges()[index];
1011 // operands. 894 if (result == nullptr) {
1012 DCHECK(first_op != nullptr); 895 result = data()->NewLiveRange(FixedLiveRangeID(index));
1013 auto first_op_spill = first_op->GetSpillRange(); 896 DCHECK(result->IsFixed());
1014 size_t num_merged = 1; 897 result->set_kind(GENERAL_REGISTERS);
1015 for (size_t i = 1; i < phi->operands().size(); i++) { 898 data()->SetLiveRangeAssignedRegister(result, index);
1016 int op = phi->operands()[i]; 899 fixed_live_ranges()[index] = result;
1017 auto op_range = LiveRangeFor(op); 900 }
1018 auto op_spill = op_range->GetSpillRange(); 901 return result;
1019 if (op_spill != nullptr && 902 }
1020 (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill))) { 903
1021 num_merged++; 904
1022 } 905 LiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) {
1023 } 906 DCHECK(index < config()->num_aliased_double_registers());
1024 907 auto result = fixed_double_live_ranges()[index];
1025 // Only continue if enough operands could be merged to the 908 if (result == nullptr) {
1026 // same spill slot. 909 result = data()->NewLiveRange(FixedDoubleLiveRangeID(index));
1027 if (num_merged * 2 <= phi->operands().size() || 910 DCHECK(result->IsFixed());
1028 AreUseIntervalsIntersecting(first_op_spill->interval(), 911 result->set_kind(DOUBLE_REGISTERS);
1029 range->first_interval())) { 912 data()->SetLiveRangeAssignedRegister(result, index);
1030 return false; 913 fixed_double_live_ranges()[index] = result;
1031 } 914 }
1032 915 return result;
1033 // If the range does not need register soon, spill it to the merged 916 }
1034 // spill range. 917
1035 auto next_pos = range->Start(); 918
1036 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart(); 919 LiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
1037 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); 920 if (operand->IsUnallocated()) {
1038 if (pos == nullptr) { 921 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register());
1039 auto spill_range = range->TopLevel()->HasSpillRange() 922 } else if (operand->IsConstant()) {
1040 ? range->TopLevel()->GetSpillRange() 923 return LiveRangeFor(ConstantOperand::cast(operand)->virtual_register());
1041 : AssignSpillRangeToLiveRange(range->TopLevel()); 924 } else if (operand->IsRegister()) {
1042 CHECK(first_op_spill->TryMerge(spill_range)); 925 return FixedLiveRangeFor(RegisterOperand::cast(operand)->index());
1043 Spill(range); 926 } else if (operand->IsDoubleRegister()) {
1044 return true; 927 return FixedDoubleLiveRangeFor(
1045 } else if (pos->pos().Value() > range->Start().NextStart().Value()) { 928 DoubleRegisterOperand::cast(operand)->index());
1046 auto spill_range = range->TopLevel()->HasSpillRange() 929 } else {
1047 ? range->TopLevel()->GetSpillRange() 930 return nullptr;
1048 : AssignSpillRangeToLiveRange(range->TopLevel()); 931 }
1049 CHECK(first_op_spill->TryMerge(spill_range)); 932 }
1050 SpillBetween(range, range->Start(), pos->pos()); 933
1051 DCHECK(UnhandledIsSorted()); 934
1052 return true; 935 void LiveRangeBuilder::Define(LifetimePosition position,
1053 } 936 InstructionOperand* operand,
1054 return false; 937 InstructionOperand* hint) {
1055 } 938 auto range = LiveRangeFor(operand);
1056 939 if (range == nullptr) return;
1057 940
1058 void RegisterAllocator::MeetRegisterConstraints(const InstructionBlock* block) { 941 if (range->IsEmpty() || range->Start().Value() > position.Value()) {
942 // Can happen if there is a definition without use.
943 range->AddUseInterval(position, position.NextStart(), allocation_zone());
944 range->AddUsePosition(position.NextStart(), nullptr, nullptr,
945 allocation_zone());
946 } else {
947 range->ShortenTo(position);
948 }
949
950 if (operand->IsUnallocated()) {
951 auto unalloc_operand = UnallocatedOperand::cast(operand);
952 range->AddUsePosition(position, unalloc_operand, hint, allocation_zone());
953 }
954 }
955
956
957 void LiveRangeBuilder::Use(LifetimePosition block_start,
958 LifetimePosition position,
959 InstructionOperand* operand,
960 InstructionOperand* hint) {
961 auto range = LiveRangeFor(operand);
962 if (range == nullptr) return;
963 if (operand->IsUnallocated()) {
964 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
965 range->AddUsePosition(position, unalloc_operand, hint, allocation_zone());
966 }
967 range->AddUseInterval(block_start, position, allocation_zone());
968 }
969
970
971 void LiveRangeBuilder::MeetRegisterConstraints(const InstructionBlock* block) {
1059 int start = block->first_instruction_index(); 972 int start = block->first_instruction_index();
1060 int end = block->last_instruction_index(); 973 int end = block->last_instruction_index();
1061 DCHECK_NE(-1, start); 974 DCHECK_NE(-1, start);
1062 for (int i = start; i <= end; ++i) { 975 for (int i = start; i <= end; ++i) {
1063 MeetConstraintsBefore(i); 976 MeetConstraintsBefore(i);
1064 if (i != end) MeetConstraintsAfter(i); 977 if (i != end) MeetConstraintsAfter(i);
1065 } 978 }
1066 // Meet register constraints for the instruction in the end. 979 // Meet register constraints for the instruction in the end.
1067 MeetRegisterConstraintsForLastInstructionInBlock(block); 980 MeetRegisterConstraintsForLastInstructionInBlock(block);
1068 } 981 }
1069 982
1070 983
1071 void RegisterAllocator::MeetRegisterConstraintsForLastInstructionInBlock( 984 void LiveRangeBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
1072 const InstructionBlock* block) { 985 const InstructionBlock* block) {
1073 int end = block->last_instruction_index(); 986 int end = block->last_instruction_index();
1074 auto last_instruction = InstructionAt(end); 987 auto last_instruction = InstructionAt(end);
1075 for (size_t i = 0; i < last_instruction->OutputCount(); i++) { 988 for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
1076 auto output_operand = last_instruction->OutputAt(i); 989 auto output_operand = last_instruction->OutputAt(i);
1077 DCHECK(!output_operand->IsConstant()); 990 DCHECK(!output_operand->IsConstant());
1078 auto output = UnallocatedOperand::cast(output_operand); 991 auto output = UnallocatedOperand::cast(output_operand);
1079 int output_vreg = output->virtual_register(); 992 int output_vreg = output->virtual_register();
1080 auto range = LiveRangeFor(output_vreg); 993 auto range = LiveRangeFor(output_vreg);
1081 bool assigned = false; 994 bool assigned = false;
1082 if (output->HasFixedPolicy()) { 995 if (output->HasFixedPolicy()) {
1083 AllocateFixed(output, -1, false); 996 AllocateFixed(output, -1, false);
1084 // This value is produced on the stack, we never need to spill it. 997 // This value is produced on the stack, we never need to spill it.
1085 if (output->IsStackSlot()) { 998 if (output->IsStackSlot()) {
1086 DCHECK(StackSlotOperand::cast(output)->index() < 999 DCHECK(StackSlotOperand::cast(output)->index() <
1087 frame_->GetSpillSlotCount()); 1000 data()->frame()->GetSpillSlotCount());
1088 range->SetSpillOperand(StackSlotOperand::cast(output)); 1001 range->SetSpillOperand(StackSlotOperand::cast(output));
1089 range->SetSpillStartIndex(end); 1002 range->SetSpillStartIndex(end);
1090 assigned = true; 1003 assigned = true;
1091 } 1004 }
1092 1005
1093 for (auto succ : block->successors()) { 1006 for (auto succ : block->successors()) {
1094 const InstructionBlock* successor = code()->InstructionBlockAt(succ); 1007 const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1095 DCHECK(successor->PredecessorCount() == 1); 1008 DCHECK(successor->PredecessorCount() == 1);
1096 int gap_index = successor->first_instruction_index(); 1009 int gap_index = successor->first_instruction_index();
1097 // Create an unconstrained operand for the same virtual register 1010 // Create an unconstrained operand for the same virtual register
1098 // and insert a gap move from the fixed output to the operand. 1011 // and insert a gap move from the fixed output to the operand.
1099 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg); 1012 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
1100 AddGapMove(gap_index, Instruction::START, *output, output_copy); 1013 data()->AddGapMove(gap_index, Instruction::START, *output, output_copy);
1101 } 1014 }
1102 } 1015 }
1103 1016
1104 if (!assigned) { 1017 if (!assigned) {
1105 for (auto succ : block->successors()) { 1018 for (auto succ : block->successors()) {
1106 const InstructionBlock* successor = code()->InstructionBlockAt(succ); 1019 const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1107 DCHECK(successor->PredecessorCount() == 1); 1020 DCHECK(successor->PredecessorCount() == 1);
1108 int gap_index = successor->first_instruction_index(); 1021 int gap_index = successor->first_instruction_index();
1109 range->SpillAtDefinition(local_zone(), gap_index, output); 1022 range->SpillAtDefinition(allocation_zone(), gap_index, output);
1110 range->SetSpillStartIndex(gap_index); 1023 range->SetSpillStartIndex(gap_index);
1111 } 1024 }
1112 } 1025 }
1113 } 1026 }
1114 } 1027 }
1115 1028
1116 1029
1117 void RegisterAllocator::MeetConstraintsAfter(int instr_index) { 1030 void LiveRangeBuilder::MeetConstraintsAfter(int instr_index) {
1118 auto first = InstructionAt(instr_index); 1031 auto first = InstructionAt(instr_index);
1119 // Handle fixed temporaries. 1032 // Handle fixed temporaries.
1120 for (size_t i = 0; i < first->TempCount(); i++) { 1033 for (size_t i = 0; i < first->TempCount(); i++) {
1121 auto temp = UnallocatedOperand::cast(first->TempAt(i)); 1034 auto temp = UnallocatedOperand::cast(first->TempAt(i));
1122 if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false); 1035 if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false);
1123 } 1036 }
1124 // Handle constant/fixed output operands. 1037 // Handle constant/fixed output operands.
1125 for (size_t i = 0; i < first->OutputCount(); i++) { 1038 for (size_t i = 0; i < first->OutputCount(); i++) {
1126 InstructionOperand* output = first->OutputAt(i); 1039 InstructionOperand* output = first->OutputAt(i);
1127 if (output->IsConstant()) { 1040 if (output->IsConstant()) {
1128 int output_vreg = ConstantOperand::cast(output)->virtual_register(); 1041 int output_vreg = ConstantOperand::cast(output)->virtual_register();
1129 auto range = LiveRangeFor(output_vreg); 1042 auto range = LiveRangeFor(output_vreg);
1130 range->SetSpillStartIndex(instr_index + 1); 1043 range->SetSpillStartIndex(instr_index + 1);
1131 range->SetSpillOperand(output); 1044 range->SetSpillOperand(output);
1132 continue; 1045 continue;
1133 } 1046 }
1134 auto first_output = UnallocatedOperand::cast(output); 1047 auto first_output = UnallocatedOperand::cast(output);
1135 auto range = LiveRangeFor(first_output->virtual_register()); 1048 auto range = LiveRangeFor(first_output->virtual_register());
1136 bool assigned = false; 1049 bool assigned = false;
1137 if (first_output->HasFixedPolicy()) { 1050 if (first_output->HasFixedPolicy()) {
1138 int output_vreg = first_output->virtual_register(); 1051 int output_vreg = first_output->virtual_register();
1139 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg); 1052 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
1140 bool is_tagged = HasTaggedValue(output_vreg); 1053 bool is_tagged = IsReference(output_vreg);
1141 AllocateFixed(first_output, instr_index, is_tagged); 1054 AllocateFixed(first_output, instr_index, is_tagged);
1142 1055
1143 // This value is produced on the stack, we never need to spill it. 1056 // This value is produced on the stack, we never need to spill it.
1144 if (first_output->IsStackSlot()) { 1057 if (first_output->IsStackSlot()) {
1145 DCHECK(StackSlotOperand::cast(first_output)->index() < 1058 DCHECK(StackSlotOperand::cast(first_output)->index() <
1146 frame_->GetSpillSlotCount()); 1059 data()->frame()->GetSpillSlotCount());
1147 range->SetSpillOperand(StackSlotOperand::cast(first_output)); 1060 range->SetSpillOperand(StackSlotOperand::cast(first_output));
1148 range->SetSpillStartIndex(instr_index + 1); 1061 range->SetSpillStartIndex(instr_index + 1);
1149 assigned = true; 1062 assigned = true;
1150 } 1063 }
1151 AddGapMove(instr_index + 1, Instruction::START, *first_output, 1064 data()->AddGapMove(instr_index + 1, Instruction::START, *first_output,
1152 output_copy); 1065 output_copy);
1153 } 1066 }
1154 // Make sure we add a gap move for spilling (if we have not done 1067 // Make sure we add a gap move for spilling (if we have not done
1155 // so already). 1068 // so already).
1156 if (!assigned) { 1069 if (!assigned) {
1157 range->SpillAtDefinition(local_zone(), instr_index + 1, first_output); 1070 range->SpillAtDefinition(allocation_zone(), instr_index + 1,
1071 first_output);
1158 range->SetSpillStartIndex(instr_index + 1); 1072 range->SetSpillStartIndex(instr_index + 1);
1159 } 1073 }
1160 } 1074 }
1161 } 1075 }
1162 1076
1163 1077
1164 void RegisterAllocator::MeetConstraintsBefore(int instr_index) { 1078 void LiveRangeBuilder::MeetConstraintsBefore(int instr_index) {
1165 auto second = InstructionAt(instr_index); 1079 auto second = InstructionAt(instr_index);
1166 // Handle fixed input operands of second instruction. 1080 // Handle fixed input operands of second instruction.
1167 for (size_t i = 0; i < second->InputCount(); i++) { 1081 for (size_t i = 0; i < second->InputCount(); i++) {
1168 auto input = second->InputAt(i); 1082 auto input = second->InputAt(i);
1169 if (input->IsImmediate()) continue; // Ignore immediates. 1083 if (input->IsImmediate()) continue; // Ignore immediates.
1170 auto cur_input = UnallocatedOperand::cast(input); 1084 auto cur_input = UnallocatedOperand::cast(input);
1171 if (cur_input->HasFixedPolicy()) { 1085 if (cur_input->HasFixedPolicy()) {
1172 int input_vreg = cur_input->virtual_register(); 1086 int input_vreg = cur_input->virtual_register();
1173 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg); 1087 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
1174 bool is_tagged = HasTaggedValue(input_vreg); 1088 bool is_tagged = IsReference(input_vreg);
1175 AllocateFixed(cur_input, instr_index, is_tagged); 1089 AllocateFixed(cur_input, instr_index, is_tagged);
1176 AddGapMove(instr_index, Instruction::END, input_copy, *cur_input); 1090 data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
1177 } 1091 }
1178 } 1092 }
1179 // Handle "output same as input" for second instruction. 1093 // Handle "output same as input" for second instruction.
1180 for (size_t i = 0; i < second->OutputCount(); i++) { 1094 for (size_t i = 0; i < second->OutputCount(); i++) {
1181 auto output = second->OutputAt(i); 1095 auto output = second->OutputAt(i);
1182 if (!output->IsUnallocated()) continue; 1096 if (!output->IsUnallocated()) continue;
1183 auto second_output = UnallocatedOperand::cast(output); 1097 auto second_output = UnallocatedOperand::cast(output);
1184 if (!second_output->HasSameAsInputPolicy()) continue; 1098 if (!second_output->HasSameAsInputPolicy()) continue;
1185 DCHECK(i == 0); // Only valid for first output. 1099 DCHECK(i == 0); // Only valid for first output.
1186 UnallocatedOperand* cur_input = 1100 UnallocatedOperand* cur_input =
1187 UnallocatedOperand::cast(second->InputAt(0)); 1101 UnallocatedOperand::cast(second->InputAt(0));
1188 int output_vreg = second_output->virtual_register(); 1102 int output_vreg = second_output->virtual_register();
1189 int input_vreg = cur_input->virtual_register(); 1103 int input_vreg = cur_input->virtual_register();
1190 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg); 1104 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
1191 cur_input->set_virtual_register(second_output->virtual_register()); 1105 cur_input->set_virtual_register(second_output->virtual_register());
1192 AddGapMove(instr_index, Instruction::END, input_copy, *cur_input); 1106 data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
1193 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { 1107 if (IsReference(input_vreg) && !IsReference(output_vreg)) {
1194 if (second->HasReferenceMap()) { 1108 if (second->HasReferenceMap()) {
1195 second->reference_map()->RecordReference(input_copy); 1109 second->reference_map()->RecordReference(input_copy);
1196 } 1110 }
1197 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { 1111 } else if (!IsReference(input_vreg) && IsReference(output_vreg)) {
1198 // The input is assumed to immediately have a tagged representation, 1112 // The input is assumed to immediately have a tagged representation,
1199 // before the pointer map can be used. I.e. the pointer map at the 1113 // before the pointer map can be used. I.e. the pointer map at the
1200 // instruction will include the output operand (whose value at the 1114 // instruction will include the output operand (whose value at the
1201 // beginning of the instruction is equal to the input operand). If 1115 // beginning of the instruction is equal to the input operand). If
1202 // this is not desired, then the pointer map at this instruction needs 1116 // this is not desired, then the pointer map at this instruction needs
1203 // to be adjusted manually. 1117 // to be adjusted manually.
1204 } 1118 }
1205 } 1119 }
1206 } 1120 }
1207 1121
1208 1122
1209 bool RegisterAllocator::IsOutputRegisterOf(Instruction* instr, int index) { 1123 bool LiveRangeBuilder::IsOutputRegisterOf(Instruction* instr, int index) {
1210 for (size_t i = 0; i < instr->OutputCount(); i++) { 1124 for (size_t i = 0; i < instr->OutputCount(); i++) {
1211 auto output = instr->OutputAt(i); 1125 auto output = instr->OutputAt(i);
1212 if (output->IsRegister() && RegisterOperand::cast(output)->index() == index) 1126 if (output->IsRegister() && RegisterOperand::cast(output)->index() == index)
1213 return true; 1127 return true;
1214 } 1128 }
1215 return false; 1129 return false;
1216 } 1130 }
1217 1131
1218 1132
1219 bool RegisterAllocator::IsOutputDoubleRegisterOf(Instruction* instr, 1133 bool LiveRangeBuilder::IsOutputDoubleRegisterOf(Instruction* instr, int index) {
1220 int index) {
1221 for (size_t i = 0; i < instr->OutputCount(); i++) { 1134 for (size_t i = 0; i < instr->OutputCount(); i++) {
1222 auto output = instr->OutputAt(i); 1135 auto output = instr->OutputAt(i);
1223 if (output->IsDoubleRegister() && 1136 if (output->IsDoubleRegister() &&
1224 DoubleRegisterOperand::cast(output)->index() == index) 1137 DoubleRegisterOperand::cast(output)->index() == index)
1225 return true; 1138 return true;
1226 } 1139 }
1227 return false; 1140 return false;
1228 } 1141 }
1229 1142
1230 1143
1231 void RegisterAllocator::ProcessInstructions(const InstructionBlock* block, 1144 void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
1232 BitVector* live) { 1145 BitVector* live) {
1233 int block_start = block->first_instruction_index(); 1146 int block_start = block->first_instruction_index();
1234 auto block_start_position = 1147 auto block_start_position =
1235 LifetimePosition::GapFromInstructionIndex(block_start); 1148 LifetimePosition::GapFromInstructionIndex(block_start);
1236 1149
1237 for (int index = block->last_instruction_index(); index >= block_start; 1150 for (int index = block->last_instruction_index(); index >= block_start;
1238 index--) { 1151 index--) {
1239 auto curr_position = 1152 auto curr_position =
1240 LifetimePosition::InstructionFromInstructionIndex(index); 1153 LifetimePosition::InstructionFromInstructionIndex(index);
1241 auto instr = InstructionAt(index); 1154 auto instr = InstructionAt(index);
1242 DCHECK(instr != nullptr); 1155 DCHECK(instr != nullptr);
(...skipping 11 matching lines...) Expand all
1254 live->Remove(out_vreg); 1167 live->Remove(out_vreg);
1255 } 1168 }
1256 Define(curr_position, output, nullptr); 1169 Define(curr_position, output, nullptr);
1257 } 1170 }
1258 1171
1259 if (instr->ClobbersRegisters()) { 1172 if (instr->ClobbersRegisters()) {
1260 for (int i = 0; i < config()->num_general_registers(); ++i) { 1173 for (int i = 0; i < config()->num_general_registers(); ++i) {
1261 if (!IsOutputRegisterOf(instr, i)) { 1174 if (!IsOutputRegisterOf(instr, i)) {
1262 auto range = FixedLiveRangeFor(i); 1175 auto range = FixedLiveRangeFor(i);
1263 range->AddUseInterval(curr_position, curr_position.End(), 1176 range->AddUseInterval(curr_position, curr_position.End(),
1264 local_zone()); 1177 allocation_zone());
1265 } 1178 }
1266 } 1179 }
1267 } 1180 }
1268 1181
1269 if (instr->ClobbersDoubleRegisters()) { 1182 if (instr->ClobbersDoubleRegisters()) {
1270 for (int i = 0; i < config()->num_aliased_double_registers(); ++i) { 1183 for (int i = 0; i < config()->num_aliased_double_registers(); ++i) {
1271 if (!IsOutputDoubleRegisterOf(instr, i)) { 1184 if (!IsOutputDoubleRegisterOf(instr, i)) {
1272 auto range = FixedDoubleLiveRangeFor(i); 1185 auto range = FixedDoubleLiveRangeFor(i);
1273 range->AddUseInterval(curr_position, curr_position.End(), 1186 range->AddUseInterval(curr_position, curr_position.End(),
1274 local_zone()); 1187 allocation_zone());
1275 } 1188 }
1276 } 1189 }
1277 } 1190 }
1278 1191
1279 for (size_t i = 0; i < instr->InputCount(); i++) { 1192 for (size_t i = 0; i < instr->InputCount(); i++) {
1280 auto input = instr->InputAt(i); 1193 auto input = instr->InputAt(i);
1281 if (input->IsImmediate()) continue; // Ignore immediates. 1194 if (input->IsImmediate()) continue; // Ignore immediates.
1282 LifetimePosition use_pos; 1195 LifetimePosition use_pos;
1283 if (input->IsUnallocated() && 1196 if (input->IsUnallocated() &&
1284 UnallocatedOperand::cast(input)->IsUsedAtStart()) { 1197 UnallocatedOperand::cast(input)->IsUsedAtStart()) {
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
1355 Use(block_start_position, curr_position, &from, hint); 1268 Use(block_start_position, curr_position, &from, hint);
1356 if (from.IsUnallocated()) { 1269 if (from.IsUnallocated()) {
1357 live->Add(UnallocatedOperand::cast(from).virtual_register()); 1270 live->Add(UnallocatedOperand::cast(from).virtual_register());
1358 } 1271 }
1359 } 1272 }
1360 } 1273 }
1361 } 1274 }
1362 } 1275 }
1363 1276
1364 1277
1365 void RegisterAllocator::ResolvePhis(const InstructionBlock* block) { 1278 void LiveRangeBuilder::ResolvePhis(const InstructionBlock* block) {
1366 for (auto phi : block->phis()) { 1279 for (auto phi : block->phis()) {
1367 int phi_vreg = phi->virtual_register(); 1280 int phi_vreg = phi->virtual_register();
1368 auto map_value = new (local_zone()) PhiMapValue(phi, block, local_zone()); 1281 auto map_value = new (allocation_zone())
1369 auto res = phi_map_.insert(std::make_pair(phi_vreg, map_value)); 1282 RegisterAllocationData::PhiMapValue(phi, block, allocation_zone());
1283 auto res = phi_map().insert(std::make_pair(phi_vreg, map_value));
1370 DCHECK(res.second); 1284 DCHECK(res.second);
1371 USE(res); 1285 USE(res);
1372 auto& output = phi->output(); 1286 auto& output = phi->output();
1373 for (size_t i = 0; i < phi->operands().size(); ++i) { 1287 for (size_t i = 0; i < phi->operands().size(); ++i) {
1374 InstructionBlock* cur_block = 1288 InstructionBlock* cur_block =
1375 code()->InstructionBlockAt(block->predecessors()[i]); 1289 code()->InstructionBlockAt(block->predecessors()[i]);
1376 UnallocatedOperand input(UnallocatedOperand::ANY, phi->operands()[i]); 1290 UnallocatedOperand input(UnallocatedOperand::ANY, phi->operands()[i]);
1377 auto move = AddGapMove(cur_block->last_instruction_index(), 1291 auto move = data()->AddGapMove(cur_block->last_instruction_index(),
1378 Instruction::END, input, output); 1292 Instruction::END, input, output);
1379 map_value->incoming_moves.push_back(move); 1293 map_value->incoming_moves.push_back(move);
1380 DCHECK(!InstructionAt(cur_block->last_instruction_index()) 1294 DCHECK(!InstructionAt(cur_block->last_instruction_index())
1381 ->HasReferenceMap()); 1295 ->HasReferenceMap());
1382 } 1296 }
1383 auto live_range = LiveRangeFor(phi_vreg); 1297 auto live_range = LiveRangeFor(phi_vreg);
1384 int gap_index = block->first_instruction_index(); 1298 int gap_index = block->first_instruction_index();
1385 live_range->SpillAtDefinition(local_zone(), gap_index, &output); 1299 live_range->SpillAtDefinition(allocation_zone(), gap_index, &output);
1386 live_range->SetSpillStartIndex(gap_index); 1300 live_range->SetSpillStartIndex(gap_index);
1387 // We use the phi-ness of some nodes in some later heuristics. 1301 // We use the phi-ness of some nodes in some later heuristics.
1388 live_range->set_is_phi(true); 1302 live_range->set_is_phi(true);
1389 live_range->set_is_non_loop_phi(!block->IsLoopHeader()); 1303 live_range->set_is_non_loop_phi(!block->IsLoopHeader());
1390 } 1304 }
1391 } 1305 }
1392 1306
1393 1307
1394 void RegisterAllocator::AssignPhiInput(LiveRange* range, 1308 void LiveRangeBuilder::MeetRegisterConstraints() {
1395 const InstructionOperand& assignment) {
1396 DCHECK(range->is_phi());
1397 auto it = phi_map_.find(range->id());
1398 DCHECK(it != phi_map_.end());
1399 for (auto move : it->second->incoming_moves) {
1400 move->set_destination(assignment);
1401 }
1402 }
1403
1404
1405 void RegisterAllocator::MeetRegisterConstraints() {
1406 for (auto block : code()->instruction_blocks()) { 1309 for (auto block : code()->instruction_blocks()) {
1407 MeetRegisterConstraints(block); 1310 MeetRegisterConstraints(block);
1408 } 1311 }
1409 } 1312 }
1410 1313
1411 1314
1412 void RegisterAllocator::ResolvePhis() { 1315 void LiveRangeBuilder::ResolvePhis() {
1413 // Process the blocks in reverse order. 1316 // Process the blocks in reverse order.
1414 for (auto i = code()->instruction_blocks().rbegin(); 1317 for (auto i = code()->instruction_blocks().rbegin();
1415 i != code()->instruction_blocks().rend(); ++i) { 1318 i != code()->instruction_blocks().rend(); ++i) {
1416 ResolvePhis(*i); 1319 ResolvePhis(*i);
1417 } 1320 }
1418 } 1321 }
1419 1322
1420 1323
1421 const InstructionBlock* RegisterAllocator::GetInstructionBlock( 1324 void LiveRangeBuilder::BuildLiveRanges() {
1422 LifetimePosition pos) {
1423 return code()->GetInstructionBlock(pos.ToInstructionIndex());
1424 }
1425
1426
1427 void RegisterAllocator::ConnectRanges() {
1428 ZoneMap<std::pair<ParallelMove*, InstructionOperand>, InstructionOperand>
1429 delayed_insertion_map(local_zone());
1430 for (auto first_range : live_ranges()) {
1431 if (first_range == nullptr || first_range->IsChild()) continue;
1432 for (auto second_range = first_range->next(); second_range != nullptr;
1433 first_range = second_range, second_range = second_range->next()) {
1434 auto pos = second_range->Start();
1435 // Add gap move if the two live ranges touch and there is no block
1436 // boundary.
1437 if (second_range->IsSpilled()) continue;
1438 if (first_range->End().Value() != pos.Value()) continue;
1439 if (IsBlockBoundary(pos) &&
1440 !CanEagerlyResolveControlFlow(GetInstructionBlock(pos))) {
1441 continue;
1442 }
1443 auto prev_operand = first_range->GetAssignedOperand();
1444 auto cur_operand = second_range->GetAssignedOperand();
1445 if (prev_operand == cur_operand) continue;
1446 bool delay_insertion = false;
1447 Instruction::GapPosition gap_pos;
1448 int gap_index = pos.ToInstructionIndex();
1449 if (pos.IsGapPosition()) {
1450 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
1451 } else {
1452 if (pos.IsStart()) {
1453 delay_insertion = true;
1454 } else {
1455 gap_index++;
1456 }
1457 gap_pos = delay_insertion ? Instruction::END : Instruction::START;
1458 }
1459 auto move = code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
1460 gap_pos, code_zone());
1461 if (!delay_insertion) {
1462 move->AddMove(prev_operand, cur_operand);
1463 } else {
1464 delayed_insertion_map.insert(
1465 std::make_pair(std::make_pair(move, prev_operand), cur_operand));
1466 }
1467 }
1468 }
1469 if (delayed_insertion_map.empty()) return;
1470 // Insert all the moves which should occur after the stored move.
1471 ZoneVector<MoveOperands*> to_insert(local_zone());
1472 ZoneVector<MoveOperands*> to_eliminate(local_zone());
1473 to_insert.reserve(4);
1474 to_eliminate.reserve(4);
1475 auto moves = delayed_insertion_map.begin()->first.first;
1476 for (auto it = delayed_insertion_map.begin();; ++it) {
1477 bool done = it == delayed_insertion_map.end();
1478 if (done || it->first.first != moves) {
1479 // Commit the MoveOperands for current ParallelMove.
1480 for (auto move : to_eliminate) {
1481 move->Eliminate();
1482 }
1483 for (auto move : to_insert) {
1484 moves->push_back(move);
1485 }
1486 if (done) break;
1487 // Reset state.
1488 to_eliminate.clear();
1489 to_insert.clear();
1490 moves = it->first.first;
1491 }
1492 // Gather all MoveOperands for a single ParallelMove.
1493 auto move = new (code_zone()) MoveOperands(it->first.second, it->second);
1494 auto eliminate = moves->PrepareInsertAfter(move);
1495 to_insert.push_back(move);
1496 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
1497 }
1498 }
1499
1500
1501 bool RegisterAllocator::CanEagerlyResolveControlFlow(
1502 const InstructionBlock* block) const {
1503 if (block->PredecessorCount() != 1) return false;
1504 return block->predecessors()[0].IsNext(block->rpo_number());
1505 }
1506
1507
1508 namespace {
1509
1510 class LiveRangeBound {
1511 public:
1512 explicit LiveRangeBound(const LiveRange* range)
1513 : range_(range), start_(range->Start()), end_(range->End()) {
1514 DCHECK(!range->IsEmpty());
1515 }
1516
1517 bool CanCover(LifetimePosition position) {
1518 return start_.Value() <= position.Value() &&
1519 position.Value() < end_.Value();
1520 }
1521
1522 const LiveRange* const range_;
1523 const LifetimePosition start_;
1524 const LifetimePosition end_;
1525
1526 private:
1527 DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
1528 };
1529
1530
1531 struct FindResult {
1532 const LiveRange* cur_cover_;
1533 const LiveRange* pred_cover_;
1534 };
1535
1536
1537 class LiveRangeBoundArray {
1538 public:
1539 LiveRangeBoundArray() : length_(0), start_(nullptr) {}
1540
1541 bool ShouldInitialize() { return start_ == nullptr; }
1542
1543 void Initialize(Zone* zone, const LiveRange* const range) {
1544 size_t length = 0;
1545 for (auto i = range; i != nullptr; i = i->next()) length++;
1546 start_ = zone->NewArray<LiveRangeBound>(length);
1547 length_ = length;
1548 auto curr = start_;
1549 for (auto i = range; i != nullptr; i = i->next(), ++curr) {
1550 new (curr) LiveRangeBound(i);
1551 }
1552 }
1553
1554 LiveRangeBound* Find(const LifetimePosition position) const {
1555 size_t left_index = 0;
1556 size_t right_index = length_;
1557 while (true) {
1558 size_t current_index = left_index + (right_index - left_index) / 2;
1559 DCHECK(right_index > current_index);
1560 auto bound = &start_[current_index];
1561 if (bound->start_.Value() <= position.Value()) {
1562 if (position.Value() < bound->end_.Value()) return bound;
1563 DCHECK(left_index < current_index);
1564 left_index = current_index;
1565 } else {
1566 right_index = current_index;
1567 }
1568 }
1569 }
1570
1571 LiveRangeBound* FindPred(const InstructionBlock* pred) {
1572 auto pred_end = LifetimePosition::InstructionFromInstructionIndex(
1573 pred->last_instruction_index());
1574 return Find(pred_end);
1575 }
1576
1577 LiveRangeBound* FindSucc(const InstructionBlock* succ) {
1578 auto succ_start = LifetimePosition::GapFromInstructionIndex(
1579 succ->first_instruction_index());
1580 return Find(succ_start);
1581 }
1582
1583 void Find(const InstructionBlock* block, const InstructionBlock* pred,
1584 FindResult* result) const {
1585 auto pred_end = LifetimePosition::InstructionFromInstructionIndex(
1586 pred->last_instruction_index());
1587 auto bound = Find(pred_end);
1588 result->pred_cover_ = bound->range_;
1589 auto cur_start = LifetimePosition::GapFromInstructionIndex(
1590 block->first_instruction_index());
1591 // Common case.
1592 if (bound->CanCover(cur_start)) {
1593 result->cur_cover_ = bound->range_;
1594 return;
1595 }
1596 result->cur_cover_ = Find(cur_start)->range_;
1597 DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
1598 }
1599
1600 private:
1601 size_t length_;
1602 LiveRangeBound* start_;
1603
1604 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
1605 };
1606
1607
1608 class LiveRangeFinder {
1609 public:
1610 explicit LiveRangeFinder(const RegisterAllocator& allocator)
1611 : allocator_(allocator),
1612 bounds_length_(static_cast<int>(allocator.live_ranges().size())),
1613 bounds_(allocator.local_zone()->NewArray<LiveRangeBoundArray>(
1614 bounds_length_)) {
1615 for (int i = 0; i < bounds_length_; ++i) {
1616 new (&bounds_[i]) LiveRangeBoundArray();
1617 }
1618 }
1619
1620 LiveRangeBoundArray* ArrayFor(int operand_index) {
1621 DCHECK(operand_index < bounds_length_);
1622 auto range = allocator_.live_ranges()[operand_index];
1623 DCHECK(range != nullptr && !range->IsEmpty());
1624 auto array = &bounds_[operand_index];
1625 if (array->ShouldInitialize()) {
1626 array->Initialize(allocator_.local_zone(), range);
1627 }
1628 return array;
1629 }
1630
1631 private:
1632 const RegisterAllocator& allocator_;
1633 const int bounds_length_;
1634 LiveRangeBoundArray* const bounds_;
1635
1636 DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
1637 };
1638
1639 } // namespace
1640
1641
1642 void RegisterAllocator::ResolveControlFlow() {
1643 // Lazily linearize live ranges in memory for fast lookup.
1644 LiveRangeFinder finder(*this);
1645 for (auto block : code()->instruction_blocks()) {
1646 if (CanEagerlyResolveControlFlow(block)) continue;
1647 auto live = live_in_sets_[block->rpo_number().ToInt()];
1648 BitVector::Iterator iterator(live);
1649 while (!iterator.Done()) {
1650 auto* array = finder.ArrayFor(iterator.Current());
1651 for (auto pred : block->predecessors()) {
1652 FindResult result;
1653 const auto* pred_block = code()->InstructionBlockAt(pred);
1654 array->Find(block, pred_block, &result);
1655 if (result.cur_cover_ == result.pred_cover_ ||
1656 result.cur_cover_->IsSpilled())
1657 continue;
1658 auto pred_op = result.pred_cover_->GetAssignedOperand();
1659 auto cur_op = result.cur_cover_->GetAssignedOperand();
1660 if (pred_op == cur_op) continue;
1661 ResolveControlFlow(block, cur_op, pred_block, pred_op);
1662 }
1663 iterator.Advance();
1664 }
1665 }
1666 }
1667
1668
1669 void RegisterAllocator::ResolveControlFlow(const InstructionBlock* block,
1670 const InstructionOperand& cur_op,
1671 const InstructionBlock* pred,
1672 const InstructionOperand& pred_op) {
1673 DCHECK(pred_op != cur_op);
1674 int gap_index;
1675 Instruction::GapPosition position;
1676 if (block->PredecessorCount() == 1) {
1677 gap_index = block->first_instruction_index();
1678 position = Instruction::START;
1679 } else {
1680 DCHECK(pred->SuccessorCount() == 1);
1681 DCHECK(!InstructionAt(pred->last_instruction_index())->HasReferenceMap());
1682 gap_index = pred->last_instruction_index();
1683 position = Instruction::END;
1684 }
1685 AddGapMove(gap_index, position, pred_op, cur_op);
1686 }
1687
1688
1689 void RegisterAllocator::BuildLiveRanges() {
1690 // Process the blocks in reverse order. 1325 // Process the blocks in reverse order.
1691 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0; 1326 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
1692 --block_id) { 1327 --block_id) {
1693 auto block = code()->InstructionBlockAt(RpoNumber::FromInt(block_id)); 1328 auto block = code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
1694 auto live = ComputeLiveOut(block); 1329 auto live = ComputeLiveOut(block);
1695 // Initially consider all live_out values live for the entire block. We 1330 // Initially consider all live_out values live for the entire block. We
1696 // will shorten these intervals if necessary. 1331 // will shorten these intervals if necessary.
1697 AddInitialIntervals(block, live); 1332 AddInitialIntervals(block, live);
1698 1333
1699 // Process the instructions in reverse order, generating and killing 1334 // Process the instructions in reverse order, generating and killing
(...skipping 17 matching lines...) Expand all
1717 } 1352 }
1718 } 1353 }
1719 DCHECK(hint != nullptr); 1354 DCHECK(hint != nullptr);
1720 auto block_start = LifetimePosition::GapFromInstructionIndex( 1355 auto block_start = LifetimePosition::GapFromInstructionIndex(
1721 block->first_instruction_index()); 1356 block->first_instruction_index());
1722 Define(block_start, &phi->output(), hint); 1357 Define(block_start, &phi->output(), hint);
1723 } 1358 }
1724 1359
1725 // Now live is live_in for this block except not including values live 1360 // Now live is live_in for this block except not including values live
1726 // out on backward successor edges. 1361 // out on backward successor edges.
1727 live_in_sets_[block_id] = live; 1362 live_in_sets()[block_id] = live;
1728 1363
1729 if (block->IsLoopHeader()) { 1364 if (block->IsLoopHeader()) {
1730 // Add a live range stretching from the first loop instruction to the last 1365 // Add a live range stretching from the first loop instruction to the last
1731 // for each value live on entry to the header. 1366 // for each value live on entry to the header.
1732 BitVector::Iterator iterator(live); 1367 BitVector::Iterator iterator(live);
1733 auto start = LifetimePosition::GapFromInstructionIndex( 1368 auto start = LifetimePosition::GapFromInstructionIndex(
1734 block->first_instruction_index()); 1369 block->first_instruction_index());
1735 auto end = LifetimePosition::GapFromInstructionIndex( 1370 auto end = LifetimePosition::GapFromInstructionIndex(
1736 code()->LastLoopInstructionIndex(block)).NextFullStart(); 1371 code()->LastLoopInstructionIndex(block)).NextFullStart();
1737 while (!iterator.Done()) { 1372 while (!iterator.Done()) {
1738 int operand_index = iterator.Current(); 1373 int operand_index = iterator.Current();
1739 auto range = LiveRangeFor(operand_index); 1374 auto range = LiveRangeFor(operand_index);
1740 range->EnsureInterval(start, end, local_zone()); 1375 range->EnsureInterval(start, end, allocation_zone());
1741 iterator.Advance(); 1376 iterator.Advance();
1742 } 1377 }
1743 // Insert all values into the live in sets of all blocks in the loop. 1378 // Insert all values into the live in sets of all blocks in the loop.
1744 for (int i = block->rpo_number().ToInt() + 1; 1379 for (int i = block->rpo_number().ToInt() + 1;
1745 i < block->loop_end().ToInt(); ++i) { 1380 i < block->loop_end().ToInt(); ++i) {
1746 live_in_sets_[i]->Union(*live); 1381 live_in_sets()[i]->Union(*live);
1747 } 1382 }
1748 } 1383 }
1749 } 1384 }
1750 1385
1751 for (auto range : live_ranges()) { 1386 for (auto range : live_ranges()) {
1752 if (range == nullptr) continue; 1387 if (range == nullptr) continue;
1753 range->kind_ = RequiredRegisterKind(range->id()); 1388 range->set_kind(RequiredRegisterKind(range->id()));
1754 // Give slots to all ranges with a non fixed slot use. 1389 // Give slots to all ranges with a non fixed slot use.
1755 if (range->has_slot_use() && range->HasNoSpillType()) { 1390 if (range->has_slot_use() && range->HasNoSpillType()) {
1756 AssignSpillRangeToLiveRange(range); 1391 data()->AssignSpillRangeToLiveRange(range);
1757 } 1392 }
1758 // TODO(bmeurer): This is a horrible hack to make sure that for constant 1393 // TODO(bmeurer): This is a horrible hack to make sure that for constant
1759 // live ranges, every use requires the constant to be in a register. 1394 // live ranges, every use requires the constant to be in a register.
1760 // Without this hack, all uses with "any" policy would get the constant 1395 // Without this hack, all uses with "any" policy would get the constant
1761 // operand assigned. 1396 // operand assigned.
1762 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) { 1397 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
1763 for (auto pos = range->first_pos(); pos != nullptr; pos = pos->next_) { 1398 for (auto pos = range->first_pos(); pos != nullptr; pos = pos->next_) {
1764 if (pos->type() == UsePositionType::kRequiresSlot) continue; 1399 if (pos->type() == UsePositionType::kRequiresSlot) continue;
1765 UsePositionType new_type = UsePositionType::kAny; 1400 UsePositionType new_type = UsePositionType::kAny;
1766 // Can't mark phis as needing a register. 1401 // Can't mark phis as needing a register.
1767 if (!pos->pos().IsGapPosition()) { 1402 if (!pos->pos().IsGapPosition()) {
1768 new_type = UsePositionType::kRequiresRegister; 1403 new_type = UsePositionType::kRequiresRegister;
1769 } 1404 }
1770 pos->set_type(new_type, true); 1405 pos->set_type(new_type, true);
1771 } 1406 }
1772 } 1407 }
1773 } 1408 }
1409
1410 #ifdef DEBUG
1411 Verify();
1412 #endif
1413 }
1414
1415
1416 RegisterKind LiveRangeBuilder::RequiredRegisterKind(
1417 int virtual_register) const {
1418 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS
1419 : GENERAL_REGISTERS;
1420 }
1421
1422
1423 void LiveRangeBuilder::Verify() const {
1424 for (auto current : data()->live_ranges()) {
1425 if (current != nullptr) current->Verify();
1426 }
1774 } 1427 }
1775 1428
1776 1429
1777 bool RegisterAllocator::ExistsUseWithoutDefinition() { 1430 LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
1778 bool found = false; 1431 RegisterKind kind)
1779 BitVector::Iterator iterator(live_in_sets_[0]); 1432 : data_(data),
1780 while (!iterator.Done()) { 1433 mode_(kind),
1781 found = true; 1434 num_registers_(kind == DOUBLE_REGISTERS
1782 int operand_index = iterator.Current(); 1435 ? config()->num_aliased_double_registers()
1783 PrintF("Register allocator error: live v%d reached first block.\n", 1436 : config()->num_general_registers()),
1784 operand_index); 1437 unhandled_live_ranges_(allocation_zone()),
1785 LiveRange* range = LiveRangeFor(operand_index); 1438 active_live_ranges_(allocation_zone()),
1786 PrintF(" (first use is at %d)\n", range->first_pos()->pos().Value()); 1439 inactive_live_ranges_(allocation_zone()) {
1787 if (debug_name() == nullptr) { 1440 unhandled_live_ranges().reserve(
1788 PrintF("\n"); 1441 static_cast<size_t>(code()->VirtualRegisterCount() * 2));
1789 } else { 1442 active_live_ranges().reserve(8);
1790 PrintF(" (function: %s)\n", debug_name()); 1443 inactive_live_ranges().reserve(8);
1791 } 1444 // TryAllocateFreeReg and AllocateBlockedReg assume this
1792 iterator.Advance(); 1445 // when allocating local arrays.
1793 } 1446 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >=
1794 return found; 1447 config()->num_general_registers());
1795 } 1448 }
1796 1449
1797 1450
1798 bool RegisterAllocator::SafePointsAreInOrder() const { 1451 void LinearScanAllocator::AllocateRegisters() {
1799 int safe_point = 0;
1800 for (auto map : *code()->reference_maps()) {
1801 if (safe_point > map->instruction_position()) return false;
1802 safe_point = map->instruction_position();
1803 }
1804 return true;
1805 }
1806
1807
1808 void RegisterAllocator::PopulateReferenceMaps() {
1809 DCHECK(SafePointsAreInOrder());
1810
1811 // Iterate over all safe point positions and record a pointer
1812 // for all spilled live ranges at this point.
1813 int last_range_start = 0;
1814 auto reference_maps = code()->reference_maps();
1815 ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
1816 for (LiveRange* range : live_ranges()) {
1817 if (range == nullptr) continue;
1818 // Iterate over the first parts of multi-part live ranges.
1819 if (range->IsChild()) continue;
1820 // Skip non-reference values.
1821 if (!HasTaggedValue(range->id())) continue;
1822 // Skip empty live ranges.
1823 if (range->IsEmpty()) continue;
1824
1825 // Find the extent of the range and its children.
1826 int start = range->Start().ToInstructionIndex();
1827 int end = 0;
1828 for (auto cur = range; cur != nullptr; cur = cur->next()) {
1829 auto this_end = cur->End();
1830 if (this_end.ToInstructionIndex() > end)
1831 end = this_end.ToInstructionIndex();
1832 DCHECK(cur->Start().ToInstructionIndex() >= start);
1833 }
1834
1835 // Most of the ranges are in order, but not all. Keep an eye on when they
1836 // step backwards and reset the first_it so we don't miss any safe points.
1837 if (start < last_range_start) first_it = reference_maps->begin();
1838 last_range_start = start;
1839
1840 // Step across all the safe points that are before the start of this range,
1841 // recording how far we step in order to save doing this for the next range.
1842 for (; first_it != reference_maps->end(); ++first_it) {
1843 auto map = *first_it;
1844 if (map->instruction_position() >= start) break;
1845 }
1846
1847 // Step through the safe points to see whether they are in the range.
1848 for (auto it = first_it; it != reference_maps->end(); ++it) {
1849 auto map = *it;
1850 int safe_point = map->instruction_position();
1851
1852 // The safe points are sorted so we can stop searching here.
1853 if (safe_point - 1 > end) break;
1854
1855 // Advance to the next active range that covers the current
1856 // safe point position.
1857 auto safe_point_pos =
1858 LifetimePosition::InstructionFromInstructionIndex(safe_point);
1859 auto cur = range;
1860 while (cur != nullptr && !cur->Covers(safe_point_pos)) {
1861 cur = cur->next();
1862 }
1863 if (cur == nullptr) continue;
1864
1865 // Check if the live range is spilled and the safe point is after
1866 // the spill position.
1867 if (range->HasSpillOperand() &&
1868 safe_point >= range->spill_start_index() &&
1869 !range->GetSpillOperand()->IsConstant()) {
1870 TRACE("Pointer for range %d (spilled at %d) at safe point %d\n",
1871 range->id(), range->spill_start_index(), safe_point);
1872 map->RecordReference(*range->GetSpillOperand());
1873 }
1874
1875 if (!cur->IsSpilled()) {
1876 TRACE(
1877 "Pointer in register for range %d (start at %d) "
1878 "at safe point %d\n",
1879 cur->id(), cur->Start().Value(), safe_point);
1880 auto operand = cur->GetAssignedOperand();
1881 DCHECK(!operand.IsStackSlot());
1882 map->RecordReference(operand);
1883 }
1884 }
1885 }
1886 }
1887
1888
1889 void RegisterAllocator::AllocateGeneralRegisters() {
1890 num_registers_ = config()->num_general_registers();
1891 mode_ = GENERAL_REGISTERS;
1892 AllocateRegisters();
1893 }
1894
1895
1896 void RegisterAllocator::AllocateDoubleRegisters() {
1897 num_registers_ = config()->num_aliased_double_registers();
1898 mode_ = DOUBLE_REGISTERS;
1899 AllocateRegisters();
1900 }
1901
1902
1903 void RegisterAllocator::AllocateRegisters() {
1904 DCHECK(unhandled_live_ranges().empty()); 1452 DCHECK(unhandled_live_ranges().empty());
1905 1453
1906 for (auto range : live_ranges()) { 1454 for (auto range : live_ranges()) {
1907 if (range == nullptr) continue; 1455 if (range == nullptr) continue;
1908 if (range->Kind() == mode_) { 1456 if (range->Kind() == mode_) {
1909 AddToUnhandledUnsorted(range); 1457 AddToUnhandledUnsorted(range);
1910 } 1458 }
1911 } 1459 }
1912 SortUnhandled(); 1460 SortUnhandled();
1913 DCHECK(UnhandledIsSorted()); 1461 DCHECK(UnhandledIsSorted());
1914 1462
1915 DCHECK(active_live_ranges().empty()); 1463 DCHECK(active_live_ranges().empty());
1916 DCHECK(inactive_live_ranges().empty()); 1464 DCHECK(inactive_live_ranges().empty());
1917 1465
1918 if (mode_ == DOUBLE_REGISTERS) { 1466 if (mode_ == DOUBLE_REGISTERS) {
1919 for (int i = 0; i < config()->num_aliased_double_registers(); ++i) { 1467 for (auto current : fixed_double_live_ranges()) {
1920 auto current = fixed_double_live_ranges()[i]; 1468 if (current != nullptr) AddToInactive(current);
1921 if (current != nullptr) {
1922 AddToInactive(current);
1923 }
1924 } 1469 }
1925 } else { 1470 } else {
1926 DCHECK(mode_ == GENERAL_REGISTERS); 1471 DCHECK(mode_ == GENERAL_REGISTERS);
1927 for (auto current : fixed_live_ranges()) { 1472 for (auto current : fixed_live_ranges()) {
1928 if (current != nullptr) { 1473 if (current != nullptr) AddToInactive(current);
1929 AddToInactive(current);
1930 }
1931 } 1474 }
1932 } 1475 }
1933 1476
1934 while (!unhandled_live_ranges().empty()) { 1477 while (!unhandled_live_ranges().empty()) {
1935 DCHECK(UnhandledIsSorted()); 1478 DCHECK(UnhandledIsSorted());
1936 auto current = unhandled_live_ranges().back(); 1479 auto current = unhandled_live_ranges().back();
1937 unhandled_live_ranges().pop_back(); 1480 unhandled_live_ranges().pop_back();
1938 DCHECK(UnhandledIsSorted()); 1481 DCHECK(UnhandledIsSorted());
1939 auto position = current->Start(); 1482 auto position = current->Start();
1940 #ifdef DEBUG 1483 #ifdef DEBUG
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
1994 if (current->HasRegisterAssigned()) { 1537 if (current->HasRegisterAssigned()) {
1995 AddToActive(current); 1538 AddToActive(current);
1996 } 1539 }
1997 } 1540 }
1998 1541
1999 active_live_ranges().clear(); 1542 active_live_ranges().clear();
2000 inactive_live_ranges().clear(); 1543 inactive_live_ranges().clear();
2001 } 1544 }
2002 1545
2003 1546
2004 const char* RegisterAllocator::RegisterName(int allocation_index) { 1547 const char* LinearScanAllocator::RegisterName(int allocation_index) {
2005 if (mode_ == GENERAL_REGISTERS) { 1548 if (mode_ == GENERAL_REGISTERS) {
2006 return config()->general_register_name(allocation_index); 1549 return config()->general_register_name(allocation_index);
2007 } else { 1550 } else {
2008 return config()->double_register_name(allocation_index); 1551 return config()->double_register_name(allocation_index);
2009 } 1552 }
2010 } 1553 }
2011 1554
2012 1555
2013 bool RegisterAllocator::HasTaggedValue(int virtual_register) const { 1556 void LinearScanAllocator::AddToActive(LiveRange* range) {
2014 return code()->IsReference(virtual_register);
2015 }
2016
2017
2018 RegisterKind RegisterAllocator::RequiredRegisterKind(
2019 int virtual_register) const {
2020 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS
2021 : GENERAL_REGISTERS;
2022 }
2023
2024
2025 void RegisterAllocator::AddToActive(LiveRange* range) {
2026 TRACE("Add live range %d to active\n", range->id()); 1557 TRACE("Add live range %d to active\n", range->id());
2027 active_live_ranges().push_back(range); 1558 active_live_ranges().push_back(range);
2028 } 1559 }
2029 1560
2030 1561
2031 void RegisterAllocator::AddToInactive(LiveRange* range) { 1562 void LinearScanAllocator::AddToInactive(LiveRange* range) {
2032 TRACE("Add live range %d to inactive\n", range->id()); 1563 TRACE("Add live range %d to inactive\n", range->id());
2033 inactive_live_ranges().push_back(range); 1564 inactive_live_ranges().push_back(range);
2034 } 1565 }
2035 1566
2036 1567
2037 void RegisterAllocator::AddToUnhandledSorted(LiveRange* range) { 1568 void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) {
2038 if (range == nullptr || range->IsEmpty()) return; 1569 if (range == nullptr || range->IsEmpty()) return;
2039 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); 1570 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
2040 DCHECK(allocation_finger_.Value() <= range->Start().Value()); 1571 DCHECK(allocation_finger_.Value() <= range->Start().Value());
2041 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0; 1572 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0;
2042 --i) { 1573 --i) {
2043 auto cur_range = unhandled_live_ranges().at(i); 1574 auto cur_range = unhandled_live_ranges().at(i);
2044 if (!range->ShouldBeAllocatedBefore(cur_range)) continue; 1575 if (!range->ShouldBeAllocatedBefore(cur_range)) continue;
2045 TRACE("Add live range %d to unhandled at %d\n", range->id(), i + 1); 1576 TRACE("Add live range %d to unhandled at %d\n", range->id(), i + 1);
2046 auto it = unhandled_live_ranges().begin() + (i + 1); 1577 auto it = unhandled_live_ranges().begin() + (i + 1);
2047 unhandled_live_ranges().insert(it, range); 1578 unhandled_live_ranges().insert(it, range);
2048 DCHECK(UnhandledIsSorted()); 1579 DCHECK(UnhandledIsSorted());
2049 return; 1580 return;
2050 } 1581 }
2051 TRACE("Add live range %d to unhandled at start\n", range->id()); 1582 TRACE("Add live range %d to unhandled at start\n", range->id());
2052 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range); 1583 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range);
2053 DCHECK(UnhandledIsSorted()); 1584 DCHECK(UnhandledIsSorted());
2054 } 1585 }
2055 1586
2056 1587
2057 void RegisterAllocator::AddToUnhandledUnsorted(LiveRange* range) { 1588 void LinearScanAllocator::AddToUnhandledUnsorted(LiveRange* range) {
2058 if (range == nullptr || range->IsEmpty()) return; 1589 if (range == nullptr || range->IsEmpty()) return;
2059 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); 1590 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
2060 TRACE("Add live range %d to unhandled unsorted at end\n", range->id()); 1591 TRACE("Add live range %d to unhandled unsorted at end\n", range->id());
2061 unhandled_live_ranges().push_back(range); 1592 unhandled_live_ranges().push_back(range);
2062 } 1593 }
2063 1594
2064 1595
2065 static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) { 1596 static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) {
2066 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a)); 1597 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a));
2067 if (a->ShouldBeAllocatedBefore(b)) return false; 1598 if (a->ShouldBeAllocatedBefore(b)) return false;
2068 if (b->ShouldBeAllocatedBefore(a)) return true; 1599 if (b->ShouldBeAllocatedBefore(a)) return true;
2069 return a->id() < b->id(); 1600 return a->id() < b->id();
2070 } 1601 }
2071 1602
2072 1603
2073 // Sort the unhandled live ranges so that the ranges to be processed first are 1604 // Sort the unhandled live ranges so that the ranges to be processed first are
2074 // at the end of the array list. This is convenient for the register allocation 1605 // at the end of the array list. This is convenient for the register allocation
2075 // algorithm because it is efficient to remove elements from the end. 1606 // algorithm because it is efficient to remove elements from the end.
2076 void RegisterAllocator::SortUnhandled() { 1607 void LinearScanAllocator::SortUnhandled() {
2077 TRACE("Sort unhandled\n"); 1608 TRACE("Sort unhandled\n");
2078 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(), 1609 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(),
2079 &UnhandledSortHelper); 1610 &UnhandledSortHelper);
2080 } 1611 }
2081 1612
2082 1613
2083 bool RegisterAllocator::UnhandledIsSorted() { 1614 bool LinearScanAllocator::UnhandledIsSorted() {
2084 size_t len = unhandled_live_ranges().size(); 1615 size_t len = unhandled_live_ranges().size();
2085 for (size_t i = 1; i < len; i++) { 1616 for (size_t i = 1; i < len; i++) {
2086 auto a = unhandled_live_ranges().at(i - 1); 1617 auto a = unhandled_live_ranges().at(i - 1);
2087 auto b = unhandled_live_ranges().at(i); 1618 auto b = unhandled_live_ranges().at(i);
2088 if (a->Start().Value() < b->Start().Value()) return false; 1619 if (a->Start().Value() < b->Start().Value()) return false;
2089 } 1620 }
2090 return true; 1621 return true;
2091 } 1622 }
2092 1623
2093 1624
2094 void RegisterAllocator::ActiveToHandled(LiveRange* range) { 1625 void LinearScanAllocator::ActiveToHandled(LiveRange* range) {
2095 RemoveElement(&active_live_ranges(), range); 1626 RemoveElement(&active_live_ranges(), range);
2096 TRACE("Moving live range %d from active to handled\n", range->id()); 1627 TRACE("Moving live range %d from active to handled\n", range->id());
2097 } 1628 }
2098 1629
2099 1630
2100 void RegisterAllocator::ActiveToInactive(LiveRange* range) { 1631 void LinearScanAllocator::ActiveToInactive(LiveRange* range) {
2101 RemoveElement(&active_live_ranges(), range); 1632 RemoveElement(&active_live_ranges(), range);
2102 inactive_live_ranges().push_back(range); 1633 inactive_live_ranges().push_back(range);
2103 TRACE("Moving live range %d from active to inactive\n", range->id()); 1634 TRACE("Moving live range %d from active to inactive\n", range->id());
2104 } 1635 }
2105 1636
2106 1637
2107 void RegisterAllocator::InactiveToHandled(LiveRange* range) { 1638 void LinearScanAllocator::InactiveToHandled(LiveRange* range) {
2108 RemoveElement(&inactive_live_ranges(), range); 1639 RemoveElement(&inactive_live_ranges(), range);
2109 TRACE("Moving live range %d from inactive to handled\n", range->id()); 1640 TRACE("Moving live range %d from inactive to handled\n", range->id());
2110 } 1641 }
2111 1642
2112 1643
2113 void RegisterAllocator::InactiveToActive(LiveRange* range) { 1644 void LinearScanAllocator::InactiveToActive(LiveRange* range) {
2114 RemoveElement(&inactive_live_ranges(), range); 1645 RemoveElement(&inactive_live_ranges(), range);
2115 active_live_ranges().push_back(range); 1646 active_live_ranges().push_back(range);
2116 TRACE("Moving live range %d from inactive to active\n", range->id()); 1647 TRACE("Moving live range %d from inactive to active\n", range->id());
2117 } 1648 }
2118 1649
2119 1650
2120 bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) { 1651 bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) {
2121 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; 1652 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters];
2122 1653
2123 for (int i = 0; i < num_registers_; i++) { 1654 for (int i = 0; i < num_registers_; i++) {
2124 free_until_pos[i] = LifetimePosition::MaxPosition(); 1655 free_until_pos[i] = LifetimePosition::MaxPosition();
2125 } 1656 }
2126 1657
2127 for (auto cur_active : active_live_ranges()) { 1658 for (auto cur_active : active_live_ranges()) {
2128 free_until_pos[cur_active->assigned_register()] = 1659 free_until_pos[cur_active->assigned_register()] =
2129 LifetimePosition::GapFromInstructionIndex(0); 1660 LifetimePosition::GapFromInstructionIndex(0);
2130 } 1661 }
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
2179 // the range end. 1710 // the range end.
2180 DCHECK(pos.Value() >= current->End().Value()); 1711 DCHECK(pos.Value() >= current->End().Value());
2181 TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg), 1712 TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg),
2182 current->id()); 1713 current->id());
2183 SetLiveRangeAssignedRegister(current, reg); 1714 SetLiveRangeAssignedRegister(current, reg);
2184 1715
2185 return true; 1716 return true;
2186 } 1717 }
2187 1718
2188 1719
2189 void RegisterAllocator::AllocateBlockedReg(LiveRange* current) { 1720 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
2190 auto register_use = current->NextRegisterPosition(current->Start()); 1721 auto register_use = current->NextRegisterPosition(current->Start());
2191 if (register_use == nullptr) { 1722 if (register_use == nullptr) {
2192 // There is no use in the current live range that requires a register. 1723 // There is no use in the current live range that requires a register.
2193 // We can just spill it. 1724 // We can just spill it.
2194 Spill(current); 1725 Spill(current);
2195 return; 1726 return;
2196 } 1727 }
2197 1728
2198 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters]; 1729 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters];
2199 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; 1730 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters];
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
2269 1800
2270 1801
2271 static const InstructionBlock* GetContainingLoop( 1802 static const InstructionBlock* GetContainingLoop(
2272 const InstructionSequence* sequence, const InstructionBlock* block) { 1803 const InstructionSequence* sequence, const InstructionBlock* block) {
2273 auto index = block->loop_header(); 1804 auto index = block->loop_header();
2274 if (!index.IsValid()) return nullptr; 1805 if (!index.IsValid()) return nullptr;
2275 return sequence->InstructionBlockAt(index); 1806 return sequence->InstructionBlockAt(index);
2276 } 1807 }
2277 1808
2278 1809
2279 LifetimePosition RegisterAllocator::FindOptimalSpillingPos( 1810 LifetimePosition LinearScanAllocator::FindOptimalSpillingPos(
2280 LiveRange* range, LifetimePosition pos) { 1811 LiveRange* range, LifetimePosition pos) {
2281 auto block = GetInstructionBlock(pos.Start()); 1812 auto block = GetInstructionBlock(pos.Start());
2282 auto loop_header = 1813 auto loop_header =
2283 block->IsLoopHeader() ? block : GetContainingLoop(code(), block); 1814 block->IsLoopHeader() ? block : GetContainingLoop(code(), block);
2284 1815
2285 if (loop_header == nullptr) return pos; 1816 if (loop_header == nullptr) return pos;
2286 1817
2287 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); 1818 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos);
2288 1819
2289 while (loop_header != nullptr) { 1820 while (loop_header != nullptr) {
(...skipping 11 matching lines...) Expand all
2301 } 1832 }
2302 1833
2303 // Try hoisting out to an outer loop. 1834 // Try hoisting out to an outer loop.
2304 loop_header = GetContainingLoop(code(), loop_header); 1835 loop_header = GetContainingLoop(code(), loop_header);
2305 } 1836 }
2306 1837
2307 return pos; 1838 return pos;
2308 } 1839 }
2309 1840
2310 1841
2311 void RegisterAllocator::SplitAndSpillIntersecting(LiveRange* current) { 1842 void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) {
2312 DCHECK(current->HasRegisterAssigned()); 1843 DCHECK(current->HasRegisterAssigned());
2313 int reg = current->assigned_register(); 1844 int reg = current->assigned_register();
2314 auto split_pos = current->Start(); 1845 auto split_pos = current->Start();
2315 for (size_t i = 0; i < active_live_ranges().size(); ++i) { 1846 for (size_t i = 0; i < active_live_ranges().size(); ++i) {
2316 auto range = active_live_ranges()[i]; 1847 auto range = active_live_ranges()[i];
2317 if (range->assigned_register() == reg) { 1848 if (range->assigned_register() == reg) {
2318 auto next_pos = range->NextRegisterPosition(current->Start()); 1849 auto next_pos = range->NextRegisterPosition(current->Start());
2319 auto spill_pos = FindOptimalSpillingPos(range, split_pos); 1850 auto spill_pos = FindOptimalSpillingPos(range, split_pos);
2320 if (next_pos == nullptr) { 1851 if (next_pos == nullptr) {
2321 SpillAfter(range, spill_pos); 1852 SpillAfter(range, spill_pos);
(...skipping 27 matching lines...) Expand all
2349 SpillBetween(range, split_pos, next_intersection); 1880 SpillBetween(range, split_pos, next_intersection);
2350 } 1881 }
2351 InactiveToHandled(range); 1882 InactiveToHandled(range);
2352 --i; 1883 --i;
2353 } 1884 }
2354 } 1885 }
2355 } 1886 }
2356 } 1887 }
2357 1888
2358 1889
2359 bool RegisterAllocator::IsBlockBoundary(LifetimePosition pos) { 1890 bool LinearScanAllocator::TryReuseSpillForPhi(LiveRange* range) {
2360 return pos.IsFullStart() && 1891 if (range->IsChild() || !range->is_phi()) return false;
2361 code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() == 1892 DCHECK(!range->HasSpillOperand());
2362 pos.ToInstructionIndex(); 1893
1894 auto lookup = phi_map().find(range->id());
1895 DCHECK(lookup != phi_map().end());
1896 auto phi = lookup->second->phi;
1897 auto block = lookup->second->block;
1898 // Count the number of spilled operands.
1899 size_t spilled_count = 0;
1900 LiveRange* first_op = nullptr;
1901 for (size_t i = 0; i < phi->operands().size(); i++) {
1902 int op = phi->operands()[i];
1903 LiveRange* op_range = LiveRangeFor(op);
1904 if (op_range->GetSpillRange() == nullptr) continue;
1905 auto pred = code()->InstructionBlockAt(block->predecessors()[i]);
1906 auto pred_end = LifetimePosition::InstructionFromInstructionIndex(
1907 pred->last_instruction_index());
1908 while (op_range != nullptr && !op_range->CanCover(pred_end)) {
1909 op_range = op_range->next();
1910 }
1911 if (op_range != nullptr && op_range->IsSpilled()) {
1912 spilled_count++;
1913 if (first_op == nullptr) {
1914 first_op = op_range->TopLevel();
1915 }
1916 }
1917 }
1918
1919 // Only continue if more than half of the operands are spilled.
1920 if (spilled_count * 2 <= phi->operands().size()) {
1921 return false;
1922 }
1923
1924 // Try to merge the spilled operands and count the number of merged spilled
1925 // operands.
1926 DCHECK(first_op != nullptr);
1927 auto first_op_spill = first_op->GetSpillRange();
1928 size_t num_merged = 1;
1929 for (size_t i = 1; i < phi->operands().size(); i++) {
1930 int op = phi->operands()[i];
1931 auto op_range = LiveRangeFor(op);
1932 auto op_spill = op_range->GetSpillRange();
1933 if (op_spill != nullptr &&
1934 (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill))) {
1935 num_merged++;
1936 }
1937 }
1938
1939 // Only continue if enough operands could be merged to the
1940 // same spill slot.
1941 if (num_merged * 2 <= phi->operands().size() ||
1942 AreUseIntervalsIntersecting(first_op_spill->interval(),
1943 range->first_interval())) {
1944 return false;
1945 }
1946
1947 // If the range does not need register soon, spill it to the merged
1948 // spill range.
1949 auto next_pos = range->Start();
1950 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
1951 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
1952 if (pos == nullptr) {
1953 auto spill_range =
1954 range->TopLevel()->HasSpillRange()
1955 ? range->TopLevel()->GetSpillRange()
1956 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
1957 bool merged = first_op_spill->TryMerge(spill_range);
1958 CHECK(merged);
1959 Spill(range);
1960 return true;
1961 } else if (pos->pos().Value() > range->Start().NextStart().Value()) {
1962 auto spill_range =
1963 range->TopLevel()->HasSpillRange()
1964 ? range->TopLevel()->GetSpillRange()
1965 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
1966 bool merged = first_op_spill->TryMerge(spill_range);
1967 CHECK(merged);
1968 SpillBetween(range, range->Start(), pos->pos());
1969 DCHECK(UnhandledIsSorted());
1970 return true;
1971 }
1972 return false;
2363 } 1973 }
2364 1974
2365 1975
2366 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, 1976 LiveRange* LinearScanAllocator::SplitRangeAt(LiveRange* range,
2367 LifetimePosition pos) { 1977 LifetimePosition pos) {
2368 DCHECK(!range->IsFixed()); 1978 DCHECK(!range->IsFixed());
2369 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value()); 1979 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value());
2370 1980
2371 if (pos.Value() <= range->Start().Value()) return range; 1981 if (pos.Value() <= range->Start().Value()) return range;
2372 1982
2373 // We can't properly connect liveranges if splitting occurred at the end 1983 // We can't properly connect liveranges if splitting occurred at the end
2374 // a block. 1984 // a block.
2375 DCHECK(pos.IsStart() || pos.IsGapPosition() || 1985 DCHECK(pos.IsStart() || pos.IsGapPosition() ||
2376 (code()->GetInstructionBlock(pos.ToInstructionIndex())) 1986 (code()->GetInstructionBlock(pos.ToInstructionIndex()))
2377 ->last_instruction_index() != pos.ToInstructionIndex()); 1987 ->last_instruction_index() != pos.ToInstructionIndex());
2378 1988
2379 int vreg = GetVirtualRegister(); 1989 int vreg = GetVirtualRegister();
2380 auto result = LiveRangeFor(vreg); 1990 auto result = LiveRangeFor(vreg);
2381 range->SplitAt(pos, result, local_zone()); 1991 range->SplitAt(pos, result, allocation_zone());
2382 return result; 1992 return result;
2383 } 1993 }
2384 1994
2385 1995
2386 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range, 1996 LiveRange* LinearScanAllocator::SplitBetween(LiveRange* range,
2387 LifetimePosition start, 1997 LifetimePosition start,
2388 LifetimePosition end) { 1998 LifetimePosition end) {
2389 DCHECK(!range->IsFixed()); 1999 DCHECK(!range->IsFixed());
2390 TRACE("Splitting live range %d in position between [%d, %d]\n", range->id(), 2000 TRACE("Splitting live range %d in position between [%d, %d]\n", range->id(),
2391 start.Value(), end.Value()); 2001 start.Value(), end.Value());
2392 2002
2393 auto split_pos = FindOptimalSplitPos(start, end); 2003 auto split_pos = FindOptimalSplitPos(start, end);
2394 DCHECK(split_pos.Value() >= start.Value()); 2004 DCHECK(split_pos.Value() >= start.Value());
2395 return SplitRangeAt(range, split_pos); 2005 return SplitRangeAt(range, split_pos);
2396 } 2006 }
2397 2007
2398 2008
2399 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, 2009 LifetimePosition LinearScanAllocator::FindOptimalSplitPos(
2400 LifetimePosition end) { 2010 LifetimePosition start, LifetimePosition end) {
2401 int start_instr = start.ToInstructionIndex(); 2011 int start_instr = start.ToInstructionIndex();
2402 int end_instr = end.ToInstructionIndex(); 2012 int end_instr = end.ToInstructionIndex();
2403 DCHECK(start_instr <= end_instr); 2013 DCHECK(start_instr <= end_instr);
2404 2014
2405 // We have no choice 2015 // We have no choice
2406 if (start_instr == end_instr) return end; 2016 if (start_instr == end_instr) return end;
2407 2017
2408 auto start_block = GetInstructionBlock(start); 2018 auto start_block = GetInstructionBlock(start);
2409 auto end_block = GetInstructionBlock(end); 2019 auto end_block = GetInstructionBlock(end);
2410 2020
(...skipping 14 matching lines...) Expand all
2425 2035
2426 // We did not find any suitable outer loop. Split at the latest possible 2036 // We did not find any suitable outer loop. Split at the latest possible
2427 // position unless end_block is a loop header itself. 2037 // position unless end_block is a loop header itself.
2428 if (block == end_block && !end_block->IsLoopHeader()) return end; 2038 if (block == end_block && !end_block->IsLoopHeader()) return end;
2429 2039
2430 return LifetimePosition::GapFromInstructionIndex( 2040 return LifetimePosition::GapFromInstructionIndex(
2431 block->first_instruction_index()); 2041 block->first_instruction_index());
2432 } 2042 }
2433 2043
2434 2044
2435 void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { 2045 void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2436 auto second_part = SplitRangeAt(range, pos); 2046 auto second_part = SplitRangeAt(range, pos);
2437 Spill(second_part); 2047 Spill(second_part);
2438 } 2048 }
2439 2049
2440 2050
2441 void RegisterAllocator::SpillBetween(LiveRange* range, LifetimePosition start, 2051 void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
2442 LifetimePosition end) { 2052 LifetimePosition end) {
2443 SpillBetweenUntil(range, start, start, end); 2053 SpillBetweenUntil(range, start, start, end);
2444 } 2054 }
2445 2055
2446 2056
2447 void RegisterAllocator::SpillBetweenUntil(LiveRange* range, 2057 void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
2448 LifetimePosition start, 2058 LifetimePosition start,
2449 LifetimePosition until, 2059 LifetimePosition until,
2450 LifetimePosition end) { 2060 LifetimePosition end) {
2451 CHECK(start.Value() < end.Value()); 2061 CHECK(start.Value() < end.Value());
2452 auto second_part = SplitRangeAt(range, start); 2062 auto second_part = SplitRangeAt(range, start);
2453 2063
2454 if (second_part->Start().Value() < end.Value()) { 2064 if (second_part->Start().Value() < end.Value()) {
2455 // The split result intersects with [start, end[. 2065 // The split result intersects with [start, end[.
2456 // Split it at position between ]start+1, end[, spill the middle part 2066 // Split it at position between ]start+1, end[, spill the middle part
2457 // and put the rest to unhandled. 2067 // and put the rest to unhandled.
2458 auto third_part_end = end.PrevStart().End(); 2068 auto third_part_end = end.PrevStart().End();
2459 if (IsBlockBoundary(end.Start())) { 2069 if (data()->IsBlockBoundary(end.Start())) {
2460 third_part_end = end.Start(); 2070 third_part_end = end.Start();
2461 } 2071 }
2462 auto third_part = SplitBetween( 2072 auto third_part = SplitBetween(
2463 second_part, Max(second_part->Start().End(), until), third_part_end); 2073 second_part, Max(second_part->Start().End(), until), third_part_end);
2464 2074
2465 DCHECK(third_part != second_part); 2075 DCHECK(third_part != second_part);
2466 2076
2467 Spill(second_part); 2077 Spill(second_part);
2468 AddToUnhandledSorted(third_part); 2078 AddToUnhandledSorted(third_part);
2469 } else { 2079 } else {
2470 // The split result does not intersect with [start, end[. 2080 // The split result does not intersect with [start, end[.
2471 // Nothing to spill. Just put it to unhandled as whole. 2081 // Nothing to spill. Just put it to unhandled as whole.
2472 AddToUnhandledSorted(second_part); 2082 AddToUnhandledSorted(second_part);
2473 } 2083 }
2474 } 2084 }
2475 2085
2476 2086
2477 void RegisterAllocator::Spill(LiveRange* range) { 2087 void LinearScanAllocator::Spill(LiveRange* range) {
2478 DCHECK(!range->IsSpilled()); 2088 DCHECK(!range->IsSpilled());
2479 TRACE("Spilling live range %d\n", range->id()); 2089 TRACE("Spilling live range %d\n", range->id());
2480 auto first = range->TopLevel(); 2090 auto first = range->TopLevel();
2481 if (first->HasNoSpillType()) { 2091 if (first->HasNoSpillType()) {
2482 AssignSpillRangeToLiveRange(first); 2092 data()->AssignSpillRangeToLiveRange(first);
2483 } 2093 }
2484 range->MakeSpilled(); 2094 range->MakeSpilled();
2485 } 2095 }
2486 2096
2487 2097
2488 int RegisterAllocator::RegisterCount() const { return num_registers_; } 2098 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
2489 2099
2490 2100
2491 #ifdef DEBUG 2101 void OperandAssigner::AssignSpillSlots() {
2492 2102 auto& spill_ranges = data()->spill_ranges();
2493 2103 // Merge disjoint spill ranges
2494 void RegisterAllocator::Verify() const { 2104 for (size_t i = 0; i < spill_ranges.size(); i++) {
2495 for (auto current : live_ranges()) { 2105 auto range = spill_ranges[i];
2496 if (current != nullptr) current->Verify(); 2106 if (range->IsEmpty()) continue;
2497 } 2107 for (size_t j = i + 1; j < spill_ranges.size(); j++) {
2498 } 2108 auto other = spill_ranges[j];
2499 2109 if (!other->IsEmpty()) {
2500 2110 range->TryMerge(other);
2501 #endif 2111 }
2502 2112 }
2503 2113 }
2504 void RegisterAllocator::SetLiveRangeAssignedRegister(LiveRange* range, 2114 // Allocate slots for the merged spill ranges.
2505 int reg) { 2115 for (auto range : spill_ranges) {
2506 if (range->Kind() == DOUBLE_REGISTERS) { 2116 if (range->IsEmpty()) continue;
2507 assigned_double_registers_->Add(reg); 2117 // Allocate a new operand referring to the spill slot.
2118 auto kind = range->Kind();
2119 int index = data()->frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS);
2120 auto op_kind = kind == DOUBLE_REGISTERS
2121 ? AllocatedOperand::DOUBLE_STACK_SLOT
2122 : AllocatedOperand::STACK_SLOT;
2123 auto op = AllocatedOperand::New(data()->code_zone(), op_kind, index);
2124 range->SetOperand(op);
2125 }
2126 }
2127
2128
2129 void OperandAssigner::CommitAssignment() {
2130 for (auto range : data()->live_ranges()) {
2131 if (range == nullptr || range->IsEmpty()) continue;
2132 InstructionOperand* spill_operand = nullptr;
2133 if (!range->TopLevel()->HasNoSpillType()) {
2134 spill_operand = range->TopLevel()->GetSpillOperand();
2135 }
2136 auto assigned = range->GetAssignedOperand();
2137 range->ConvertUsesToOperand(assigned, spill_operand);
2138 if (range->is_phi()) data()->AssignPhiInput(range, assigned);
2139 if (!range->IsChild() && spill_operand != nullptr) {
2140 range->CommitSpillsAtDefinition(data()->code(), spill_operand,
2141 range->has_slot_use());
2142 }
2143 }
2144 }
2145
2146
2147 ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data)
2148 : data_(data) {}
2149
2150
2151 bool ReferenceMapPopulator::SafePointsAreInOrder() const {
2152 int safe_point = 0;
2153 for (auto map : *data()->code()->reference_maps()) {
2154 if (safe_point > map->instruction_position()) return false;
2155 safe_point = map->instruction_position();
2156 }
2157 return true;
2158 }
2159
2160
2161 void ReferenceMapPopulator::PopulateReferenceMaps() {
2162 DCHECK(SafePointsAreInOrder());
2163
2164 // Iterate over all safe point positions and record a pointer
2165 // for all spilled live ranges at this point.
2166 int last_range_start = 0;
2167 auto reference_maps = data()->code()->reference_maps();
2168 ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
2169 for (LiveRange* range : data()->live_ranges()) {
2170 if (range == nullptr) continue;
2171 // Iterate over the first parts of multi-part live ranges.
2172 if (range->IsChild()) continue;
2173 // Skip non-reference values.
2174 if (!IsReference(range->id())) continue;
2175 // Skip empty live ranges.
2176 if (range->IsEmpty()) continue;
2177
2178 // Find the extent of the range and its children.
2179 int start = range->Start().ToInstructionIndex();
2180 int end = 0;
2181 for (auto cur = range; cur != nullptr; cur = cur->next()) {
2182 auto this_end = cur->End();
2183 if (this_end.ToInstructionIndex() > end)
2184 end = this_end.ToInstructionIndex();
2185 DCHECK(cur->Start().ToInstructionIndex() >= start);
2186 }
2187
2188 // Most of the ranges are in order, but not all. Keep an eye on when they
2189 // step backwards and reset the first_it so we don't miss any safe points.
2190 if (start < last_range_start) first_it = reference_maps->begin();
2191 last_range_start = start;
2192
2193 // Step across all the safe points that are before the start of this range,
2194 // recording how far we step in order to save doing this for the next range.
2195 for (; first_it != reference_maps->end(); ++first_it) {
2196 auto map = *first_it;
2197 if (map->instruction_position() >= start) break;
2198 }
2199
2200 // Step through the safe points to see whether they are in the range.
2201 for (auto it = first_it; it != reference_maps->end(); ++it) {
2202 auto map = *it;
2203 int safe_point = map->instruction_position();
2204
2205 // The safe points are sorted so we can stop searching here.
2206 if (safe_point - 1 > end) break;
2207
2208 // Advance to the next active range that covers the current
2209 // safe point position.
2210 auto safe_point_pos =
2211 LifetimePosition::InstructionFromInstructionIndex(safe_point);
2212 auto cur = range;
2213 while (cur != nullptr && !cur->Covers(safe_point_pos)) {
2214 cur = cur->next();
2215 }
2216 if (cur == nullptr) continue;
2217
2218 // Check if the live range is spilled and the safe point is after
2219 // the spill position.
2220 if (range->HasSpillOperand() &&
2221 safe_point >= range->spill_start_index() &&
2222 !range->GetSpillOperand()->IsConstant()) {
2223 TRACE("Pointer for range %d (spilled at %d) at safe point %d\n",
2224 range->id(), range->spill_start_index(), safe_point);
2225 map->RecordReference(*range->GetSpillOperand());
2226 }
2227
2228 if (!cur->IsSpilled()) {
2229 TRACE(
2230 "Pointer in register for range %d (start at %d) "
2231 "at safe point %d\n",
2232 cur->id(), cur->Start().Value(), safe_point);
2233 auto operand = cur->GetAssignedOperand();
2234 DCHECK(!operand.IsStackSlot());
2235 map->RecordReference(operand);
2236 }
2237 }
2238 }
2239 }
2240
2241
2242 namespace {
2243
2244 class LiveRangeBound {
2245 public:
2246 explicit LiveRangeBound(const LiveRange* range)
2247 : range_(range), start_(range->Start()), end_(range->End()) {
2248 DCHECK(!range->IsEmpty());
2249 }
2250
2251 bool CanCover(LifetimePosition position) {
2252 return start_.Value() <= position.Value() &&
2253 position.Value() < end_.Value();
2254 }
2255
2256 const LiveRange* const range_;
2257 const LifetimePosition start_;
2258 const LifetimePosition end_;
2259
2260 private:
2261 DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
2262 };
2263
2264
2265 struct FindResult {
2266 const LiveRange* cur_cover_;
2267 const LiveRange* pred_cover_;
2268 };
2269
2270
2271 class LiveRangeBoundArray {
2272 public:
2273 LiveRangeBoundArray() : length_(0), start_(nullptr) {}
2274
2275 bool ShouldInitialize() { return start_ == nullptr; }
2276
2277 void Initialize(Zone* zone, const LiveRange* const range) {
2278 size_t length = 0;
2279 for (auto i = range; i != nullptr; i = i->next()) length++;
2280 start_ = zone->NewArray<LiveRangeBound>(length);
2281 length_ = length;
2282 auto curr = start_;
2283 for (auto i = range; i != nullptr; i = i->next(), ++curr) {
2284 new (curr) LiveRangeBound(i);
2285 }
2286 }
2287
2288 LiveRangeBound* Find(const LifetimePosition position) const {
2289 size_t left_index = 0;
2290 size_t right_index = length_;
2291 while (true) {
2292 size_t current_index = left_index + (right_index - left_index) / 2;
2293 DCHECK(right_index > current_index);
2294 auto bound = &start_[current_index];
2295 if (bound->start_.Value() <= position.Value()) {
2296 if (position.Value() < bound->end_.Value()) return bound;
2297 DCHECK(left_index < current_index);
2298 left_index = current_index;
2299 } else {
2300 right_index = current_index;
2301 }
2302 }
2303 }
2304
2305 LiveRangeBound* FindPred(const InstructionBlock* pred) {
2306 auto pred_end = LifetimePosition::InstructionFromInstructionIndex(
2307 pred->last_instruction_index());
2308 return Find(pred_end);
2309 }
2310
2311 LiveRangeBound* FindSucc(const InstructionBlock* succ) {
2312 auto succ_start = LifetimePosition::GapFromInstructionIndex(
2313 succ->first_instruction_index());
2314 return Find(succ_start);
2315 }
2316
2317 void Find(const InstructionBlock* block, const InstructionBlock* pred,
2318 FindResult* result) const {
2319 auto pred_end = LifetimePosition::InstructionFromInstructionIndex(
2320 pred->last_instruction_index());
2321 auto bound = Find(pred_end);
2322 result->pred_cover_ = bound->range_;
2323 auto cur_start = LifetimePosition::GapFromInstructionIndex(
2324 block->first_instruction_index());
2325 // Common case.
2326 if (bound->CanCover(cur_start)) {
2327 result->cur_cover_ = bound->range_;
2328 return;
2329 }
2330 result->cur_cover_ = Find(cur_start)->range_;
2331 DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
2332 }
2333
2334 private:
2335 size_t length_;
2336 LiveRangeBound* start_;
2337
2338 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
2339 };
2340
2341
2342 class LiveRangeFinder {
2343 public:
2344 explicit LiveRangeFinder(const RegisterAllocationData* data)
2345 : data_(data),
2346 bounds_length_(static_cast<int>(data_->live_ranges().size())),
2347 bounds_(data_->allocation_zone()->NewArray<LiveRangeBoundArray>(
2348 bounds_length_)) {
2349 for (int i = 0; i < bounds_length_; ++i) {
2350 new (&bounds_[i]) LiveRangeBoundArray();
2351 }
2352 }
2353
2354 LiveRangeBoundArray* ArrayFor(int operand_index) {
2355 DCHECK(operand_index < bounds_length_);
2356 auto range = data_->live_ranges()[operand_index];
2357 DCHECK(range != nullptr && !range->IsEmpty());
2358 auto array = &bounds_[operand_index];
2359 if (array->ShouldInitialize()) {
2360 array->Initialize(data_->allocation_zone(), range);
2361 }
2362 return array;
2363 }
2364
2365 private:
2366 const RegisterAllocationData* const data_;
2367 const int bounds_length_;
2368 LiveRangeBoundArray* const bounds_;
2369
2370 DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
2371 };
2372
2373 } // namespace
2374
2375
2376 LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data)
2377 : data_(data) {}
2378
2379
2380 bool LiveRangeConnector::CanEagerlyResolveControlFlow(
2381 const InstructionBlock* block) const {
2382 if (block->PredecessorCount() != 1) return false;
2383 return block->predecessors()[0].IsNext(block->rpo_number());
2384 }
2385
2386
2387 void LiveRangeConnector::ResolveControlFlow() {
2388 // Lazily linearize live ranges in memory for fast lookup.
2389 LiveRangeFinder finder(data());
2390 auto& live_in_sets = data()->live_in_sets();
2391 for (auto block : code()->instruction_blocks()) {
2392 if (CanEagerlyResolveControlFlow(block)) continue;
2393 auto live = live_in_sets[block->rpo_number().ToInt()];
2394 BitVector::Iterator iterator(live);
2395 while (!iterator.Done()) {
2396 auto* array = finder.ArrayFor(iterator.Current());
2397 for (auto pred : block->predecessors()) {
2398 FindResult result;
2399 const auto* pred_block = code()->InstructionBlockAt(pred);
2400 array->Find(block, pred_block, &result);
2401 if (result.cur_cover_ == result.pred_cover_ ||
2402 result.cur_cover_->IsSpilled())
2403 continue;
2404 auto pred_op = result.pred_cover_->GetAssignedOperand();
2405 auto cur_op = result.cur_cover_->GetAssignedOperand();
2406 if (pred_op == cur_op) continue;
2407 ResolveControlFlow(block, cur_op, pred_block, pred_op);
2408 }
2409 iterator.Advance();
2410 }
2411 }
2412 }
2413
2414
2415 void LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
2416 const InstructionOperand& cur_op,
2417 const InstructionBlock* pred,
2418 const InstructionOperand& pred_op) {
2419 DCHECK(pred_op != cur_op);
2420 int gap_index;
2421 Instruction::GapPosition position;
2422 if (block->PredecessorCount() == 1) {
2423 gap_index = block->first_instruction_index();
2424 position = Instruction::START;
2508 } else { 2425 } else {
2509 DCHECK(range->Kind() == GENERAL_REGISTERS); 2426 DCHECK(pred->SuccessorCount() == 1);
2510 assigned_registers_->Add(reg); 2427 DCHECK(!data()
2511 } 2428 ->InstructionAt(pred->last_instruction_index())
2512 range->set_assigned_register(reg); 2429 ->HasReferenceMap());
2513 auto assignment = range->GetAssignedOperand(); 2430 gap_index = pred->last_instruction_index();
2514 // TODO(dcarney): stop aliasing hint operands. 2431 position = Instruction::END;
2515 range->ConvertUsesToOperand(assignment, nullptr); 2432 }
2516 if (range->is_phi()) AssignPhiInput(range, assignment); 2433 data()->AddGapMove(gap_index, position, pred_op, cur_op);
2434 }
2435
2436
2437 void LiveRangeConnector::ConnectRanges(Zone* temp_zone) {
2438 ZoneMap<std::pair<ParallelMove*, InstructionOperand>, InstructionOperand>
2439 delayed_insertion_map(temp_zone);
2440 for (auto first_range : data()->live_ranges()) {
2441 if (first_range == nullptr || first_range->IsChild()) continue;
2442 for (auto second_range = first_range->next(); second_range != nullptr;
2443 first_range = second_range, second_range = second_range->next()) {
2444 auto pos = second_range->Start();
2445 // Add gap move if the two live ranges touch and there is no block
2446 // boundary.
2447 if (second_range->IsSpilled()) continue;
2448 if (first_range->End().Value() != pos.Value()) continue;
2449 if (data()->IsBlockBoundary(pos) &&
2450 !CanEagerlyResolveControlFlow(GetInstructionBlock(pos))) {
2451 continue;
2452 }
2453 auto prev_operand = first_range->GetAssignedOperand();
2454 auto cur_operand = second_range->GetAssignedOperand();
2455 if (prev_operand == cur_operand) continue;
2456 bool delay_insertion = false;
2457 Instruction::GapPosition gap_pos;
2458 int gap_index = pos.ToInstructionIndex();
2459 if (pos.IsGapPosition()) {
2460 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
2461 } else {
2462 if (pos.IsStart()) {
2463 delay_insertion = true;
2464 } else {
2465 gap_index++;
2466 }
2467 gap_pos = delay_insertion ? Instruction::END : Instruction::START;
2468 }
2469 auto move = code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
2470 gap_pos, code_zone());
2471 if (!delay_insertion) {
2472 move->AddMove(prev_operand, cur_operand);
2473 } else {
2474 delayed_insertion_map.insert(
2475 std::make_pair(std::make_pair(move, prev_operand), cur_operand));
2476 }
2477 }
2478 }
2479 if (delayed_insertion_map.empty()) return;
2480 // Insert all the moves which should occur after the stored move.
2481 ZoneVector<MoveOperands*> to_insert(temp_zone);
2482 ZoneVector<MoveOperands*> to_eliminate(temp_zone);
2483 to_insert.reserve(4);
2484 to_eliminate.reserve(4);
2485 auto moves = delayed_insertion_map.begin()->first.first;
2486 for (auto it = delayed_insertion_map.begin();; ++it) {
2487 bool done = it == delayed_insertion_map.end();
2488 if (done || it->first.first != moves) {
2489 // Commit the MoveOperands for current ParallelMove.
2490 for (auto move : to_eliminate) {
2491 move->Eliminate();
2492 }
2493 for (auto move : to_insert) {
2494 moves->push_back(move);
2495 }
2496 if (done) break;
2497 // Reset state.
2498 to_eliminate.clear();
2499 to_insert.clear();
2500 moves = it->first.first;
2501 }
2502 // Gather all MoveOperands for a single ParallelMove.
2503 auto move = new (code_zone()) MoveOperands(it->first.second, it->second);
2504 auto eliminate = moves->PrepareInsertAfter(move);
2505 to_insert.push_back(move);
2506 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
2507 }
2517 } 2508 }
2518 2509
2519 } // namespace compiler 2510 } // namespace compiler
2520 } // namespace internal 2511 } // namespace internal
2521 } // namespace v8 2512 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698