Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(258)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 1305393003: [turbofan] Deferred blocks splintering. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/register-allocator.h ('k') | src/flag-definitions.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/base/adapters.h" 5 #include "src/base/adapters.h"
6 #include "src/compiler/linkage.h" 6 #include "src/compiler/linkage.h"
7 #include "src/compiler/register-allocator.h" 7 #include "src/compiler/register-allocator.h"
8 #include "src/string-stream.h" 8 #include "src/string-stream.h"
9 9
10 namespace v8 { 10 namespace v8 {
(...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after
246 246
247 LiveRange::LiveRange(int id, MachineType machine_type) 247 LiveRange::LiveRange(int id, MachineType machine_type)
248 : id_(id), 248 : id_(id),
249 spill_start_index_(kMaxInt), 249 spill_start_index_(kMaxInt),
250 bits_(0), 250 bits_(0),
251 last_interval_(nullptr), 251 last_interval_(nullptr),
252 first_interval_(nullptr), 252 first_interval_(nullptr),
253 first_pos_(nullptr), 253 first_pos_(nullptr),
254 parent_(nullptr), 254 parent_(nullptr),
255 next_(nullptr), 255 next_(nullptr),
256 splintered_from_(nullptr),
256 spill_operand_(nullptr), 257 spill_operand_(nullptr),
257 spills_at_definition_(nullptr), 258 spills_at_definition_(nullptr),
258 current_interval_(nullptr), 259 current_interval_(nullptr),
259 last_processed_use_(nullptr), 260 last_processed_use_(nullptr),
260 current_hint_position_(nullptr), 261 current_hint_position_(nullptr),
261 size_(kInvalidSize), 262 size_(kInvalidSize),
262 weight_(kInvalidWeight), 263 weight_(kInvalidWeight),
263 spilled_in_deferred_block_(false) { 264 spilled_in_deferred_block_(false) {
264 DCHECK(AllocatedOperand::IsSupportedMachineType(machine_type)); 265 DCHECK(AllocatedOperand::IsSupportedMachineType(machine_type));
265 bits_ = SpillTypeField::encode(SpillType::kNoSpillType) | 266 bits_ = SpillTypeField::encode(SpillType::kNoSpillType) |
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after
440 441
441 void LiveRange::SetSpillOperand(InstructionOperand* operand) { 442 void LiveRange::SetSpillOperand(InstructionOperand* operand) {
442 DCHECK(HasNoSpillType()); 443 DCHECK(HasNoSpillType());
443 DCHECK(!operand->IsUnallocated() && !operand->IsImmediate()); 444 DCHECK(!operand->IsUnallocated() && !operand->IsImmediate());
444 set_spill_type(SpillType::kSpillOperand); 445 set_spill_type(SpillType::kSpillOperand);
445 spill_operand_ = operand; 446 spill_operand_ = operand;
446 } 447 }
447 448
448 449
449 void LiveRange::SetSpillRange(SpillRange* spill_range) { 450 void LiveRange::SetSpillRange(SpillRange* spill_range) {
450 DCHECK(HasNoSpillType() || HasSpillRange()); 451 DCHECK(!HasSpillOperand());
451 DCHECK(spill_range); 452 DCHECK(spill_range);
452 set_spill_type(SpillType::kSpillRange); 453 DCHECK(!IsChild());
453 spill_range_ = spill_range; 454 spill_range_ = spill_range;
454 } 455 }
455 456
456 457
457 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const { 458 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const {
458 UsePosition* use_pos = last_processed_use_; 459 UsePosition* use_pos = last_processed_use_;
459 if (use_pos == nullptr || use_pos->pos() > start) { 460 if (use_pos == nullptr || use_pos->pos() > start) {
460 use_pos = first_pos(); 461 use_pos = first_pos();
461 } 462 }
462 while (use_pos != nullptr && use_pos->pos() < start) { 463 while (use_pos != nullptr && use_pos->pos() < start) {
(...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after
661 // invalid at construction. 662 // invalid at construction.
662 size_ = kInvalidSize; 663 size_ = kInvalidSize;
663 weight_ = kInvalidWeight; 664 weight_ = kInvalidWeight;
664 #ifdef DEBUG 665 #ifdef DEBUG
665 Verify(); 666 Verify();
666 result->Verify(); 667 result->Verify();
667 #endif 668 #endif
668 } 669 }
669 670
670 671
672 void LiveRange::Splinter(LifetimePosition start, LifetimePosition end,
673 LiveRange* result, Zone* zone) {
674 DCHECK(start != Start() || end != End());
675 DCHECK(start < end);
676
677 result->set_spill_type(spill_type());
678
679 if (start <= Start()) {
680 DCHECK(end < End());
681 SplitAt(end, result, zone);
682 next_ = nullptr;
683 } else if (end >= End()) {
684 DCHECK(start > Start());
685 SplitAt(start, result, zone);
686 next_ = nullptr;
687 } else {
688 DCHECK(start < End() && Start() < end);
689
690 const int kInvalidId = std::numeric_limits<int>::max();
691
692 SplitAt(start, result, zone);
693
694 LiveRange end_part(kInvalidId, this->machine_type());
695 result->SplitAt(end, &end_part, zone);
696
697 next_ = end_part.next_;
698 last_interval_->set_next(end_part.first_interval_);
699 last_interval_ = end_part.last_interval_;
700
701
702 if (first_pos_ == nullptr) {
703 first_pos_ = end_part.first_pos_;
704 } else {
705 UsePosition* pos = first_pos_;
706 for (; pos->next() != nullptr; pos = pos->next()) {
707 }
708 pos->set_next(end_part.first_pos_);
709 }
710 }
711 result->next_ = nullptr;
712 result->parent_ = nullptr;
713
714 result->SetSplinteredFrom(this);
715 }
716
717
718 void LiveRange::SetSplinteredFrom(LiveRange* splinter_parent) {
719 // The splinter parent is always the original "Top".
720 DCHECK(splinter_parent->Start() < Start());
721 DCHECK(!splinter_parent->IsChild());
722
723 splintered_from_ = splinter_parent;
724 if (!HasSpillOperand()) {
725 SetSpillRange(splinter_parent->spill_range_);
726 }
727 }
728
729
730 void LiveRange::AppendChild(LiveRange* other) {
731 DCHECK(!other->IsChild());
732 next_ = other;
733
734 other->UpdateParentForAllChildren(TopLevel());
735 TopLevel()->UpdateSpillRangePostMerge(other);
736 }
737
738
739 void LiveRange::UpdateSpillRangePostMerge(LiveRange* merged) {
740 DCHECK(!IsChild());
741 DCHECK(merged->TopLevel() == this);
742
743 if (HasNoSpillType() && merged->HasSpillRange()) {
744 set_spill_type(merged->spill_type());
745 DCHECK(GetSpillRange()->live_ranges().size() > 0);
746 merged->spill_range_ = nullptr;
747 merged->bits_ = SpillTypeField::update(merged->bits_,
748 LiveRange::SpillType::kNoSpillType);
749 }
750 }
751
752
753 void LiveRange::UpdateParentForAllChildren(LiveRange* new_parent) {
754 LiveRange* child = this;
755 for (; child != nullptr; child = child->next()) {
756 child->parent_ = new_parent;
757 }
758 }
759
760
761 LiveRange* LiveRange::GetLastChild() {
762 LiveRange* ret = this;
763 for (; ret->next() != nullptr; ret = ret->next()) {
764 }
765 return ret;
766 }
767
768
769 void LiveRange::Merge(LiveRange* other, RegisterAllocationData* data) {
770 DCHECK(!IsChild());
771 DCHECK(!other->IsChild());
772 DCHECK(Start() < other->Start());
773
774 LiveRange* last_other = other->GetLastChild();
775 LiveRange* last_me = GetLastChild();
776
777 // Simple case: we just append at the end.
778 if (last_me->End() <= other->Start()) return last_me->AppendChild(other);
779
780 DCHECK(last_me->End() > last_other->End());
781
782 // In the more general case, we need to find the ranges between which to
783 // insert.
784 LiveRange* insertion_point = this;
785 for (; insertion_point->next() != nullptr &&
786 insertion_point->next()->Start() <= other->Start();
787 insertion_point = insertion_point->next()) {
788 }
789
790 // When we splintered the original range, we reconstituted the original range
791 // into one range without children, but with discontinuities. To merge the
792 // splinter back in, we need to split the range - or a child obtained after
793 // register allocation splitting.
794 LiveRange* after = insertion_point->next();
795 if (insertion_point->End() > other->Start()) {
796 LiveRange* new_after = data->NewChildRangeFor(insertion_point);
797 insertion_point->SplitAt(other->Start(), new_after,
798 data->allocation_zone());
799 new_after->set_spilled(insertion_point->spilled());
800 if (!new_after->spilled())
801 new_after->set_assigned_register(insertion_point->assigned_register());
802 after = new_after;
803 }
804
805 last_other->next_ = after;
806 insertion_point->next_ = other;
807 other->UpdateParentForAllChildren(TopLevel());
808 TopLevel()->UpdateSpillRangePostMerge(other);
809 }
810
811
671 // This implements an ordering on live ranges so that they are ordered by their 812 // This implements an ordering on live ranges so that they are ordered by their
672 // start positions. This is needed for the correctness of the register 813 // start positions. This is needed for the correctness of the register
673 // allocation algorithm. If two live ranges start at the same offset then there 814 // allocation algorithm. If two live ranges start at the same offset then there
674 // is a tie breaker based on where the value is first used. This part of the 815 // is a tie breaker based on where the value is first used. This part of the
675 // ordering is merely a heuristic. 816 // ordering is merely a heuristic.
676 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const { 817 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
677 LifetimePosition start = Start(); 818 LifetimePosition start = Start();
678 LifetimePosition other_start = other->Start(); 819 LifetimePosition other_start = other->Start();
679 if (start == other_start) { 820 if (start == other_start) {
680 UsePosition* pos = first_pos(); 821 UsePosition* pos = first_pos();
(...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after
906 os << '[' << interval->start() << ", " << interval->end() << ')' 1047 os << '[' << interval->start() << ", " << interval->end() << ')'
907 << std::endl; 1048 << std::endl;
908 interval = interval->next(); 1049 interval = interval->next();
909 } 1050 }
910 os << "}"; 1051 os << "}";
911 return os; 1052 return os;
912 } 1053 }
913 1054
914 1055
915 SpillRange::SpillRange(LiveRange* parent, Zone* zone) 1056 SpillRange::SpillRange(LiveRange* parent, Zone* zone)
916 : live_ranges_(zone), assigned_slot_(kUnassignedSlot) { 1057 : live_ranges_(zone),
1058 assigned_slot_(kUnassignedSlot),
1059 byte_width_(GetByteWidth(parent->machine_type())),
1060 kind_(parent->kind()) {
1061 // Spill ranges are created for top level, non-splintered ranges. This is so
1062 // that, when merging decisions are made, we consider the full extent of the
1063 // virtual register, and avoid clobbering it.
917 DCHECK(!parent->IsChild()); 1064 DCHECK(!parent->IsChild());
1065 DCHECK(!parent->IsSplinter());
918 UseInterval* result = nullptr; 1066 UseInterval* result = nullptr;
919 UseInterval* node = nullptr; 1067 UseInterval* node = nullptr;
920 // Copy the intervals for all ranges. 1068 // Copy the intervals for all ranges.
921 for (auto range = parent; range != nullptr; range = range->next()) { 1069 for (auto range = parent; range != nullptr; range = range->next()) {
922 auto src = range->first_interval(); 1070 auto src = range->first_interval();
923 while (src != nullptr) { 1071 while (src != nullptr) {
924 auto new_node = new (zone) UseInterval(src->start(), src->end()); 1072 auto new_node = new (zone) UseInterval(src->start(), src->end());
925 if (result == nullptr) { 1073 if (result == nullptr) {
926 result = new_node; 1074 result = new_node;
927 } else { 1075 } else {
928 node->set_next(new_node); 1076 node->set_next(new_node);
929 } 1077 }
930 node = new_node; 1078 node = new_node;
931 src = src->next(); 1079 src = src->next();
932 } 1080 }
933 } 1081 }
934 use_interval_ = result; 1082 use_interval_ = result;
935 live_ranges().push_back(parent); 1083 live_ranges().push_back(parent);
936 end_position_ = node->end(); 1084 end_position_ = node->end();
937 DCHECK(!parent->HasSpillRange());
938 parent->SetSpillRange(this); 1085 parent->SetSpillRange(this);
939 } 1086 }
940 1087
941 1088
942 int SpillRange::ByteWidth() const { 1089 int SpillRange::ByteWidth() const {
943 return GetByteWidth(live_ranges_[0]->machine_type()); 1090 return GetByteWidth(live_ranges_[0]->machine_type());
944 } 1091 }
945 1092
946 1093
947 bool SpillRange::IsIntersectingWith(SpillRange* other) const { 1094 bool SpillRange::IsIntersectingWith(SpillRange* other) const {
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
1049 allocation_zone()), 1196 allocation_zone()),
1050 spill_ranges_(allocation_zone()), 1197 spill_ranges_(allocation_zone()),
1051 delayed_references_(allocation_zone()), 1198 delayed_references_(allocation_zone()),
1052 assigned_registers_(nullptr), 1199 assigned_registers_(nullptr),
1053 assigned_double_registers_(nullptr), 1200 assigned_double_registers_(nullptr),
1054 virtual_register_count_(code->VirtualRegisterCount()) { 1201 virtual_register_count_(code->VirtualRegisterCount()) {
1055 DCHECK(this->config()->num_general_registers() <= 1202 DCHECK(this->config()->num_general_registers() <=
1056 RegisterConfiguration::kMaxGeneralRegisters); 1203 RegisterConfiguration::kMaxGeneralRegisters);
1057 DCHECK(this->config()->num_double_registers() <= 1204 DCHECK(this->config()->num_double_registers() <=
1058 RegisterConfiguration::kMaxDoubleRegisters); 1205 RegisterConfiguration::kMaxDoubleRegisters);
1059 spill_ranges().reserve(8);
1060 assigned_registers_ = new (code_zone()) 1206 assigned_registers_ = new (code_zone())
1061 BitVector(this->config()->num_general_registers(), code_zone()); 1207 BitVector(this->config()->num_general_registers(), code_zone());
1062 assigned_double_registers_ = new (code_zone()) 1208 assigned_double_registers_ = new (code_zone())
1063 BitVector(this->config()->num_aliased_double_registers(), code_zone()); 1209 BitVector(this->config()->num_aliased_double_registers(), code_zone());
1064 this->frame()->SetAllocatedRegisters(assigned_registers_); 1210 this->frame()->SetAllocatedRegisters(assigned_registers_);
1065 this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_); 1211 this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
1066 } 1212 }
1067 1213
1068 1214
1069 MoveOperands* RegisterAllocationData::AddGapMove( 1215 MoveOperands* RegisterAllocationData::AddGapMove(
(...skipping 23 matching lines...) Expand all
1093 return result; 1239 return result;
1094 } 1240 }
1095 1241
1096 1242
1097 LiveRange* RegisterAllocationData::NewLiveRange(int index, 1243 LiveRange* RegisterAllocationData::NewLiveRange(int index,
1098 MachineType machine_type) { 1244 MachineType machine_type) {
1099 return new (allocation_zone()) LiveRange(index, machine_type); 1245 return new (allocation_zone()) LiveRange(index, machine_type);
1100 } 1246 }
1101 1247
1102 1248
1249 LiveRange* RegisterAllocationData::NextLiveRange(MachineType machine_type) {
1250 int vreg = virtual_register_count_++;
1251 if (vreg >= static_cast<int>(live_ranges().size())) {
1252 live_ranges().resize(vreg + 1, nullptr);
1253 }
1254 LiveRange* ret = NewLiveRange(vreg, machine_type);
1255 return ret;
1256 }
1257
1258
1103 LiveRange* RegisterAllocationData::NewChildRangeFor(LiveRange* range) { 1259 LiveRange* RegisterAllocationData::NewChildRangeFor(LiveRange* range) {
1104 int vreg = virtual_register_count_++; 1260 auto child = NextLiveRange(range->machine_type());
1105 if (vreg >= static_cast<int>(live_ranges().size())) { 1261 DCHECK_NULL(live_ranges()[child->id()]);
1106 live_ranges().resize(vreg + 1, nullptr); 1262 live_ranges()[child->id()] = child;
1107 }
1108 auto child = new (allocation_zone()) LiveRange(vreg, range->machine_type());
1109 DCHECK_NULL(live_ranges()[vreg]);
1110 live_ranges()[vreg] = child;
1111 return child; 1263 return child;
1112 } 1264 }
1113 1265
1114 1266
1115 RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap( 1267 RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap(
1116 const InstructionBlock* block, PhiInstruction* phi) { 1268 const InstructionBlock* block, PhiInstruction* phi) {
1117 auto map_value = new (allocation_zone()) 1269 auto map_value = new (allocation_zone())
1118 RegisterAllocationData::PhiMapValue(phi, block, allocation_zone()); 1270 RegisterAllocationData::PhiMapValue(phi, block, allocation_zone());
1119 auto res = 1271 auto res =
1120 phi_map_.insert(std::make_pair(phi->virtual_register(), map_value)); 1272 phi_map_.insert(std::make_pair(phi->virtual_register(), map_value));
(...skipping 27 matching lines...) Expand all
1148 PrintF(" (function: %s)\n", debug_name()); 1300 PrintF(" (function: %s)\n", debug_name());
1149 } 1301 }
1150 iterator.Advance(); 1302 iterator.Advance();
1151 } 1303 }
1152 return found; 1304 return found;
1153 } 1305 }
1154 1306
1155 1307
1156 SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange( 1308 SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange(
1157 LiveRange* range) { 1309 LiveRange* range) {
1158 auto spill_range = 1310 DCHECK(!range->IsChild());
1159 new (allocation_zone()) SpillRange(range, allocation_zone()); 1311 DCHECK(!range->HasSpillOperand());
1160 spill_ranges().push_back(spill_range); 1312
1313 SpillRange* spill_range = range->GetAllocatedSpillRange();
1314 if (spill_range == nullptr) {
1315 DCHECK(!range->IsSplinter());
1316 spill_range = new (allocation_zone()) SpillRange(range, allocation_zone());
1317 }
1318 range->set_spill_type(LiveRange::SpillType::kSpillRange);
1319
1320 spill_ranges().insert(spill_range);
1161 return spill_range; 1321 return spill_range;
1162 } 1322 }
1163 1323
1324
1325 SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange(
1326 LiveRange* range) {
1327 DCHECK(!range->HasSpillOperand());
1328 DCHECK(!range->IsChild());
1329 DCHECK(!range->IsSplinter());
1330 auto spill_range =
1331 new (allocation_zone()) SpillRange(range, allocation_zone());
1332 return spill_range;
1333 }
1334
1164 1335
1165 void RegisterAllocationData::MarkAllocated(RegisterKind kind, int index) { 1336 void RegisterAllocationData::MarkAllocated(RegisterKind kind, int index) {
1166 if (kind == DOUBLE_REGISTERS) { 1337 if (kind == DOUBLE_REGISTERS) {
1167 assigned_double_registers_->Add(index); 1338 assigned_double_registers_->Add(index);
1168 } else { 1339 } else {
1169 DCHECK(kind == GENERAL_REGISTERS); 1340 DCHECK(kind == GENERAL_REGISTERS);
1170 assigned_registers_->Add(index); 1341 assigned_registers_->Add(index);
1171 } 1342 }
1172 } 1343 }
1173 1344
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
1223 OFStream os(stdout); 1394 OFStream os(stdout);
1224 PrintableInstructionOperand wrapper; 1395 PrintableInstructionOperand wrapper;
1225 wrapper.register_configuration_ = config(); 1396 wrapper.register_configuration_ = config();
1226 wrapper.op_ = move->destination(); 1397 wrapper.op_ = move->destination();
1227 os << wrapper << " = "; 1398 os << wrapper << " = ";
1228 wrapper.op_ = move->source(); 1399 wrapper.op_ = move->source();
1229 os << wrapper << std::endl; 1400 os << wrapper << std::endl;
1230 } 1401 }
1231 1402
1232 1403
1404 void RegisterAllocationData::Print(const SpillRange* spill_range) {
1405 OFStream os(stdout);
1406 os << "{" << std::endl;
1407 for (LiveRange* range : spill_range->live_ranges()) {
1408 os << range->id() << " ";
1409 }
1410 os << std::endl;
1411
1412 for (UseInterval* interval = spill_range->interval(); interval != nullptr;
1413 interval = interval->next()) {
1414 os << '[' << interval->start() << ", " << interval->end() << ')'
1415 << std::endl;
1416 }
1417 os << "}" << std::endl;
1418 }
1419
1420
1233 ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data) 1421 ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data)
1234 : data_(data) {} 1422 : data_(data) {}
1235 1423
1236 1424
1237 InstructionOperand* ConstraintBuilder::AllocateFixed( 1425 InstructionOperand* ConstraintBuilder::AllocateFixed(
1238 UnallocatedOperand* operand, int pos, bool is_tagged) { 1426 UnallocatedOperand* operand, int pos, bool is_tagged) {
1239 TRACE("Allocating fixed reg for op %d\n", operand->virtual_register()); 1427 TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
1240 DCHECK(operand->HasFixedPolicy()); 1428 DCHECK(operand->HasFixedPolicy());
1241 InstructionOperand allocated; 1429 InstructionOperand allocated;
1242 MachineType machine_type = InstructionSequence::DefaultRepresentation(); 1430 MachineType machine_type = InstructionSequence::DefaultRepresentation();
(...skipping 224 matching lines...) Expand 10 before | Expand all | Expand 10 after
1467 live_range->set_is_non_loop_phi(!block->IsLoopHeader()); 1655 live_range->set_is_non_loop_phi(!block->IsLoopHeader());
1468 } 1656 }
1469 } 1657 }
1470 1658
1471 1659
1472 LiveRangeBuilder::LiveRangeBuilder(RegisterAllocationData* data, 1660 LiveRangeBuilder::LiveRangeBuilder(RegisterAllocationData* data,
1473 Zone* local_zone) 1661 Zone* local_zone)
1474 : data_(data), phi_hints_(local_zone) {} 1662 : data_(data), phi_hints_(local_zone) {}
1475 1663
1476 1664
1477 BitVector* LiveRangeBuilder::ComputeLiveOut(const InstructionBlock* block) { 1665 BitVector* LiveRangeBuilder::ComputeLiveOut(const InstructionBlock* block,
1666 RegisterAllocationData* data) {
1478 // Compute live out for the given block, except not including backward 1667 // Compute live out for the given block, except not including backward
1479 // successor edges. 1668 // successor edges.
1480 auto live_out = new (allocation_zone()) 1669 Zone* zone = data->allocation_zone();
1481 BitVector(code()->VirtualRegisterCount(), allocation_zone()); 1670 const InstructionSequence* code = data->code();
1671
1672 auto live_out = new (zone) BitVector(code->VirtualRegisterCount(), zone);
1482 1673
1483 // Process all successor blocks. 1674 // Process all successor blocks.
1484 for (auto succ : block->successors()) { 1675 for (auto succ : block->successors()) {
1485 // Add values live on entry to the successor. Note the successor's 1676 // Add values live on entry to the successor.
1486 // live_in will not be computed yet for backwards edges. 1677 if (succ <= block->rpo_number()) continue;
1487 auto live_in = live_in_sets()[succ.ToSize()]; 1678 auto live_in = data->live_in_sets()[succ.ToSize()];
1488 if (live_in != nullptr) live_out->Union(*live_in); 1679 if (live_in != nullptr) live_out->Union(*live_in);
1489 1680
1490 // All phi input operands corresponding to this successor edge are live 1681 // All phi input operands corresponding to this successor edge are live
1491 // out from this block. 1682 // out from this block.
1492 auto successor = code()->InstructionBlockAt(succ); 1683 auto successor = code->InstructionBlockAt(succ);
1493 size_t index = successor->PredecessorIndexOf(block->rpo_number()); 1684 size_t index = successor->PredecessorIndexOf(block->rpo_number());
1494 DCHECK(index < successor->PredecessorCount()); 1685 DCHECK(index < successor->PredecessorCount());
1495 for (auto phi : successor->phis()) { 1686 for (auto phi : successor->phis()) {
1496 live_out->Add(phi->operands()[index]); 1687 live_out->Add(phi->operands()[index]);
1497 } 1688 }
1498 } 1689 }
1499 return live_out; 1690 return live_out;
1500 } 1691 }
1501 1692
1502 1693
(...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after
1827 live_in_sets()[i]->Union(*live); 2018 live_in_sets()[i]->Union(*live);
1828 } 2019 }
1829 } 2020 }
1830 2021
1831 2022
1832 void LiveRangeBuilder::BuildLiveRanges() { 2023 void LiveRangeBuilder::BuildLiveRanges() {
1833 // Process the blocks in reverse order. 2024 // Process the blocks in reverse order.
1834 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0; 2025 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
1835 --block_id) { 2026 --block_id) {
1836 auto block = code()->InstructionBlockAt(RpoNumber::FromInt(block_id)); 2027 auto block = code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
1837 auto live = ComputeLiveOut(block); 2028 auto live = ComputeLiveOut(block, data());
1838 // Initially consider all live_out values live for the entire block. We 2029 // Initially consider all live_out values live for the entire block. We
1839 // will shorten these intervals if necessary. 2030 // will shorten these intervals if necessary.
1840 AddInitialIntervals(block, live); 2031 AddInitialIntervals(block, live);
1841 // Process the instructions in reverse order, generating and killing 2032 // Process the instructions in reverse order, generating and killing
1842 // live values. 2033 // live values.
1843 ProcessInstructions(block, live); 2034 ProcessInstructions(block, live);
1844 // All phi output operands are killed by this block. 2035 // All phi output operands are killed by this block.
1845 ProcessPhis(block, live); 2036 ProcessPhis(block, live);
1846 // Now live is live_in for this block except not including values live 2037 // Now live is live_in for this block except not including values live
1847 // out on backward successor edges. 2038 // out on backward successor edges.
(...skipping 742 matching lines...) Expand 10 before | Expand all | Expand 10 after
2590 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame(); 2781 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
2591 } 2782 }
2592 } 2783 }
2593 } 2784 }
2594 2785
2595 2786
2596 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {} 2787 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
2597 2788
2598 2789
2599 void OperandAssigner::AssignSpillSlots() { 2790 void OperandAssigner::AssignSpillSlots() {
2600 auto& spill_ranges = data()->spill_ranges(); 2791 ZoneSet<SpillRange*>& spill_ranges = data()->spill_ranges();
2601 // Merge disjoint spill ranges 2792 // Merge disjoint spill ranges
2602 for (size_t i = 0; i < spill_ranges.size(); i++) { 2793 for (auto i = spill_ranges.begin(), end = spill_ranges.end(); i != end; ++i) {
2603 auto range = spill_ranges[i]; 2794 SpillRange* range = *i;
2604 if (range->IsEmpty()) continue; 2795 if (range->IsEmpty()) continue;
2605 for (size_t j = i + 1; j < spill_ranges.size(); j++) { 2796 auto j = i;
2606 auto other = spill_ranges[j]; 2797 j++;
2798 for (; j != end; ++j) {
2799 SpillRange* other = *j;
2607 if (!other->IsEmpty()) { 2800 if (!other->IsEmpty()) {
2608 range->TryMerge(other); 2801 range->TryMerge(other);
2609 } 2802 }
2610 } 2803 }
2611 } 2804 }
2612 // Allocate slots for the merged spill ranges. 2805 // Allocate slots for the merged spill ranges.
2613 for (auto range : spill_ranges) { 2806 for (auto range : spill_ranges) {
2614 if (range->IsEmpty()) continue; 2807 if (range->IsEmpty()) continue;
2615 // Allocate a new operand referring to the spill slot. 2808 // Allocate a new operand referring to the spill slot.
2616 int byte_width = range->ByteWidth(); 2809 int byte_width = range->ByteWidth();
(...skipping 442 matching lines...) Expand 10 before | Expand all | Expand 10 after
3059 auto eliminate = moves->PrepareInsertAfter(move); 3252 auto eliminate = moves->PrepareInsertAfter(move);
3060 to_insert.push_back(move); 3253 to_insert.push_back(move);
3061 if (eliminate != nullptr) to_eliminate.push_back(eliminate); 3254 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
3062 } 3255 }
3063 } 3256 }
3064 3257
3065 3258
3066 } // namespace compiler 3259 } // namespace compiler
3067 } // namespace internal 3260 } // namespace internal
3068 } // namespace v8 3261 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/register-allocator.h ('k') | src/flag-definitions.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698