Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(343)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 1105793002: Greedy reg alloc - better splitting (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/base/adapters.h" 5 #include "src/base/adapters.h"
6 #include "src/compiler/linkage.h" 6 #include "src/compiler/linkage.h"
7 #include "src/compiler/register-allocator.h" 7 #include "src/compiler/register-allocator.h"
8 #include "src/string-stream.h" 8 #include "src/string-stream.h"
9 9
10 namespace v8 { 10 namespace v8 {
(...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after
197 UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) { 197 UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
198 DCHECK(Contains(pos) && pos != start()); 198 DCHECK(Contains(pos) && pos != start());
199 auto after = new (zone) UseInterval(pos, end_); 199 auto after = new (zone) UseInterval(pos, end_);
200 after->next_ = next_; 200 after->next_ = next_;
201 next_ = nullptr; 201 next_ = nullptr;
202 end_ = pos; 202 end_ = pos;
203 return after; 203 return after;
204 } 204 }
205 205
206 206
207 std::ostream& operator<<(std::ostream& os, const LifetimePosition pos) {
208 os << '@' << pos.ToInstructionIndex();
209 if (pos.IsGapPosition()) {
210 os << 'g';
211 } else {
212 os << 'i';
213 }
214 if (pos.IsStart()) {
215 os << 's';
216 } else {
217 os << 'e';
218 }
219 return os;
220 }
221
222
207 struct LiveRange::SpillAtDefinitionList : ZoneObject { 223 struct LiveRange::SpillAtDefinitionList : ZoneObject {
208 SpillAtDefinitionList(int gap_index, InstructionOperand* operand, 224 SpillAtDefinitionList(int gap_index, InstructionOperand* operand,
209 SpillAtDefinitionList* next) 225 SpillAtDefinitionList* next)
210 : gap_index(gap_index), operand(operand), next(next) {} 226 : gap_index(gap_index), operand(operand), next(next) {}
211 const int gap_index; 227 const int gap_index;
212 InstructionOperand* const operand; 228 InstructionOperand* const operand;
213 SpillAtDefinitionList* const next; 229 SpillAtDefinitionList* const next;
214 }; 230 };
215 231
216 232
(...skipping 637 matching lines...) Expand 10 before | Expand all | Expand 10 after
854 phi_map_(allocation_zone()), 870 phi_map_(allocation_zone()),
855 live_in_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()), 871 live_in_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
856 live_ranges_(code->VirtualRegisterCount() * 2, nullptr, 872 live_ranges_(code->VirtualRegisterCount() * 2, nullptr,
857 allocation_zone()), 873 allocation_zone()),
858 fixed_live_ranges_(this->config()->num_general_registers(), nullptr, 874 fixed_live_ranges_(this->config()->num_general_registers(), nullptr,
859 allocation_zone()), 875 allocation_zone()),
860 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr, 876 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr,
861 allocation_zone()), 877 allocation_zone()),
862 spill_ranges_(allocation_zone()), 878 spill_ranges_(allocation_zone()),
863 assigned_registers_(nullptr), 879 assigned_registers_(nullptr),
864 assigned_double_registers_(nullptr) { 880 assigned_double_registers_(nullptr),
881 dbgs_(stdout) {
865 DCHECK(this->config()->num_general_registers() <= 882 DCHECK(this->config()->num_general_registers() <=
866 RegisterConfiguration::kMaxGeneralRegisters); 883 RegisterConfiguration::kMaxGeneralRegisters);
867 DCHECK(this->config()->num_double_registers() <= 884 DCHECK(this->config()->num_double_registers() <=
868 RegisterConfiguration::kMaxDoubleRegisters); 885 RegisterConfiguration::kMaxDoubleRegisters);
869 spill_ranges().reserve(8); 886 spill_ranges().reserve(8);
870 assigned_registers_ = new (code_zone()) 887 assigned_registers_ = new (code_zone())
871 BitVector(this->config()->num_general_registers(), code_zone()); 888 BitVector(this->config()->num_general_registers(), code_zone());
872 assigned_double_registers_ = new (code_zone()) 889 assigned_double_registers_ = new (code_zone())
873 BitVector(this->config()->num_aliased_double_registers(), code_zone()); 890 BitVector(this->config()->num_aliased_double_registers(), code_zone());
874 this->frame()->SetAllocatedRegisters(assigned_registers_); 891 this->frame()->SetAllocatedRegisters(assigned_registers_);
875 this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_); 892 this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
876 } 893 }
877 894
878 895
879 LiveRange* RegisterAllocationData::LiveRangeFor(int index) { 896 LiveRange* RegisterAllocationData::LiveRangeFor(int index) {
880 if (index >= static_cast<int>(live_ranges().size())) { 897 if (index >= static_cast<int>(live_ranges().size())) {
881 live_ranges().resize(index + 1, nullptr); 898 live_ranges().resize(index + 1, nullptr);
882 } 899 }
883 auto result = live_ranges()[index]; 900 auto result = live_ranges()[index];
884 if (result == nullptr) { 901 if (result == nullptr) {
885 result = NewLiveRange(index); 902 result = NewLiveRange(index);
886 live_ranges()[index] = result; 903 live_ranges()[index] = result;
887 } 904 }
888 return result; 905 return result;
889 } 906 }
890 907
891 908
909 std::ostream& operator<<(std::ostream& os,
dcarney 2015/04/30 06:01:32 this should come after the live range class functi
Mircea Trofin 2015/04/30 18:16:49 Done.
910 const PrintableLiveRange printable_range) {
911 const LiveRange* range = printable_range.range_;
912 os << "Range: " << range->id() << " ";
913 if (range->is_phi()) os << "phi ";
914 if (range->is_non_loop_phi()) os << "nlphi ";
915
916 os << "{" << std::endl;
917 auto interval = range->first_interval();
918 auto use_pos = range->first_pos();
919 PrintableInstructionOperand pio;
920 pio.register_configuration_ = printable_range.register_configuration_;
921 while (use_pos != nullptr) {
922 pio.op_ = *use_pos->operand();
923 os << pio << use_pos->pos() << " ";
924 use_pos = use_pos->next();
925 }
926 os << std::endl;
927
928 while (interval != nullptr) {
929 os << '[' << interval->start() << ", " << interval->end() << ')'
930 << std::endl;
931 interval = interval->next();
932 }
933 os << "}";
934 return os;
935 }
936
937
892 MoveOperands* RegisterAllocationData::AddGapMove( 938 MoveOperands* RegisterAllocationData::AddGapMove(
893 int index, Instruction::GapPosition position, 939 int index, Instruction::GapPosition position,
894 const InstructionOperand& from, const InstructionOperand& to) { 940 const InstructionOperand& from, const InstructionOperand& to) {
895 auto instr = code()->InstructionAt(index); 941 auto instr = code()->InstructionAt(index);
896 auto moves = instr->GetOrCreateParallelMove(position, code_zone()); 942 auto moves = instr->GetOrCreateParallelMove(position, code_zone());
897 return moves->AddMove(from, to); 943 return moves->AddMove(from, to);
898 } 944 }
899 945
900 946
901 LiveRange* RegisterAllocationData::NewLiveRange(int index) { 947 LiveRange* RegisterAllocationData::NewLiveRange(int index) {
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
956 void RegisterAllocationData::MarkAllocated(RegisterKind kind, int index) { 1002 void RegisterAllocationData::MarkAllocated(RegisterKind kind, int index) {
957 if (kind == DOUBLE_REGISTERS) { 1003 if (kind == DOUBLE_REGISTERS) {
958 assigned_double_registers_->Add(index); 1004 assigned_double_registers_->Add(index);
959 } else { 1005 } else {
960 DCHECK(kind == GENERAL_REGISTERS); 1006 DCHECK(kind == GENERAL_REGISTERS);
961 assigned_registers_->Add(index); 1007 assigned_registers_->Add(index);
962 } 1008 }
963 } 1009 }
964 1010
965 1011
1012 ALLOW_UNUSED_TYPE
1013 void RegisterAllocationData::Print(Instruction* ins) {
1014 dbgs() << ToPrintable(ins) << std::endl;
1015 }
1016
1017 ALLOW_UNUSED_TYPE
1018 void RegisterAllocationData::Print(InstructionOperand op) {
1019 dbgs() << ToPrintable(op) << std::endl;
1020 }
1021
1022 ALLOW_UNUSED_TYPE
1023 void RegisterAllocationData::Print(InstructionSequence* seq) {
1024 dbgs() << ToPrintable(seq) << std::endl;
1025 }
1026
1027 ALLOW_UNUSED_TYPE
1028 void RegisterAllocationData::Print(LiveRange* range) {
1029 dbgs() << ToPrintable(range) << std::endl;
1030 }
1031
1032
966 ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data) 1033 ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data)
967 : data_(data) {} 1034 : data_(data) {}
968 1035
969 1036
970 InstructionOperand* ConstraintBuilder::AllocateFixed( 1037 InstructionOperand* ConstraintBuilder::AllocateFixed(
971 UnallocatedOperand* operand, int pos, bool is_tagged) { 1038 UnallocatedOperand* operand, int pos, bool is_tagged) {
972 TRACE("Allocating fixed reg for op %d\n", operand->virtual_register()); 1039 TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
973 DCHECK(operand->HasFixedPolicy()); 1040 DCHECK(operand->HasFixedPolicy());
974 InstructionOperand allocated; 1041 InstructionOperand allocated;
975 if (operand->HasFixedSlotPolicy()) { 1042 if (operand->HasFixedSlotPolicy()) {
(...skipping 1416 matching lines...) Expand 10 before | Expand all | Expand 10 after
2392 } 2459 }
2393 range->set_assigned_register(reg_id); 2460 range->set_assigned_register(reg_id);
2394 range->SetUseHints(reg_id); 2461 range->SetUseHints(reg_id);
2395 if (range->is_phi()) { 2462 if (range->is_phi()) {
2396 data()->GetPhiMapValueFor(range->id())->set_assigned_register(reg_id); 2463 data()->GetPhiMapValueFor(range->id())->set_assigned_register(reg_id);
2397 } 2464 }
2398 } 2465 }
2399 2466
2400 2467
2401 float GreedyAllocator::CalculateSpillWeight(LiveRange* range) { 2468 float GreedyAllocator::CalculateSpillWeight(LiveRange* range) {
2469 InstructionOperand* first_hint = nullptr;
2470 if (range->FirstHintPosition() != nullptr) {
2471 first_hint = range->FirstHintPosition()->operand();
2472 }
2473
2402 if (range->IsFixed()) return std::numeric_limits<float>::max(); 2474 if (range->IsFixed()) return std::numeric_limits<float>::max();
2475 bool spill;
2476 if (!FindProgressingSplitPosition(range, &spill).IsValid())
2477 return std::numeric_limits<float>::max();
2403 2478
2404 int hint_register; 2479 float multiplier = 1.0;
2405 if (range->FirstHintPosition(&hint_register) != nullptr) { 2480 if (first_hint != nullptr && first_hint->IsRegister()) {
2406 return std::numeric_limits<float>::max(); 2481 multiplier = 3.0;
2407 } 2482 }
2408 2483
2409 unsigned use_count = 0; 2484 unsigned use_count = 0;
2410 auto* pos = range->first_pos(); 2485 auto* pos = range->first_pos();
2411 while (pos != nullptr) { 2486 while (pos != nullptr) {
2412 use_count++; 2487 use_count++;
2413 pos = pos->next(); 2488 pos = pos->next();
2414 } 2489 }
2415 2490
2416 // GetLiveRangeSize is DCHECK-ed to not be 0
2417 unsigned range_size = GetLiveRangeSize(range); 2491 unsigned range_size = GetLiveRangeSize(range);
2418 DCHECK_NE(0U, range_size); 2492 DCHECK_NE(0U, range_size);
2419 2493
2420 return static_cast<float>(use_count) / static_cast<float>(range_size); 2494 return multiplier * static_cast<float>(use_count) /
2495 static_cast<float>(range_size);
2421 } 2496 }
2422 2497
2423 2498
2424 float GreedyAllocator::CalculateMaxSpillWeight( 2499 float GreedyAllocator::CalculateMaxSpillWeight(
2425 const ZoneSet<LiveRange*>& ranges) { 2500 const ZoneSet<LiveRange*>& ranges) {
2426 float max = 0.0; 2501 float max = 0.0;
2427 for (auto* r : ranges) { 2502 for (auto* r : ranges) {
2428 max = std::max(max, CalculateSpillWeight(r)); 2503 max = std::max(max, CalculateSpillWeight(r));
2429 } 2504 }
2430 return max; 2505 return max;
(...skipping 22 matching lines...) Expand all
2453 conflicting->insert(existing); 2528 conflicting->insert(existing);
2454 } 2529 }
2455 segment = segment->next(); 2530 segment = segment->next();
2456 } 2531 }
2457 if (!conflicting->empty()) return false; 2532 if (!conflicting->empty()) return false;
2458 // No conflicts means we can safely allocate this register to this range. 2533 // No conflicts means we can safely allocate this register to this range.
2459 AssignRangeToRegister(reg_id, range); 2534 AssignRangeToRegister(reg_id, range);
2460 return true; 2535 return true;
2461 } 2536 }
2462 2537
2538
2539 int GreedyAllocator::GetHintedRegister(LiveRange* range) {
2540 int reg;
2541 if (range->FirstHintPosition(&reg) != nullptr) {
2542 return reg;
2543 }
2544 return -1;
2545 }
2546
2547
2463 bool GreedyAllocator::TryAllocate(LiveRange* current, 2548 bool GreedyAllocator::TryAllocate(LiveRange* current,
2464 ZoneSet<LiveRange*>* conflicting) { 2549 ZoneSet<LiveRange*>* conflicting) {
2465 if (current->HasSpillOperand()) {
2466 Spill(current);
2467 return true;
2468 }
2469 if (current->IsFixed()) { 2550 if (current->IsFixed()) {
2470 return TryAllocatePhysicalRegister(current->assigned_register(), current, 2551 return TryAllocatePhysicalRegister(current->assigned_register(), current,
2471 conflicting); 2552 conflicting);
2472 } 2553 }
2473 2554
2474 if (current->HasRegisterAssigned()) { 2555 int hinted_reg_id = GetHintedRegister(current);
2475 int reg_id = current->assigned_register(); 2556 if (hinted_reg_id >= 0) {
2476 return TryAllocatePhysicalRegister(reg_id, current, conflicting); 2557 if (TryAllocatePhysicalRegister(hinted_reg_id, current, conflicting)) {
2558 return true;
2559 }
2477 } 2560 }
2478 2561
2562 ZoneSet<LiveRange*> local_conflicts(data()->allocation_zone());
2479 for (unsigned candidate_reg = 0; candidate_reg < allocations_.size(); 2563 for (unsigned candidate_reg = 0; candidate_reg < allocations_.size();
2480 candidate_reg++) { 2564 candidate_reg++) {
2481 if (TryAllocatePhysicalRegister(candidate_reg, current, conflicting)) { 2565 if (hinted_reg_id >= 0 &&
2566 candidate_reg == static_cast<size_t>(hinted_reg_id))
2567 continue;
2568 local_conflicts.clear();
2569 if (TryAllocatePhysicalRegister(candidate_reg, current, &local_conflicts)) {
2482 conflicting->clear(); 2570 conflicting->clear();
2483 return true; 2571 return true;
2572 } else {
2573 conflicting->insert(local_conflicts.begin(), local_conflicts.end());
2484 } 2574 }
2485 } 2575 }
2486 return false; 2576 return false;
2487 } 2577 }
2488 2578
2579
2489 LiveRange* GreedyAllocator::SpillBetweenUntil(LiveRange* range, 2580 LiveRange* GreedyAllocator::SpillBetweenUntil(LiveRange* range,
2490 LifetimePosition start, 2581 LifetimePosition start,
2491 LifetimePosition until, 2582 LifetimePosition until,
2492 LifetimePosition end) { 2583 LifetimePosition end) {
2493 CHECK(start < end); 2584 CHECK(start < end);
2494 auto second_part = SplitRangeAt(range, start); 2585 auto second_part = SplitRangeAt(range, start);
2495 2586
2496 if (second_part->Start() < end) { 2587 if (second_part->Start() < end) {
2497 // The split result intersects with [start, end[. 2588 // The split result intersects with [start, end[.
2498 // Split it at position between ]start+1, end[, spill the middle part 2589 // Split it at position between ]start+1, end[, spill the middle part
(...skipping 13 matching lines...) Expand all
2512 // The split result does not intersect with [start, end[. 2603 // The split result does not intersect with [start, end[.
2513 // Nothing to spill. Just return it for re-processing. 2604 // Nothing to spill. Just return it for re-processing.
2514 return second_part; 2605 return second_part;
2515 } 2606 }
2516 } 2607 }
2517 2608
2518 2609
2519 void GreedyAllocator::Enqueue(LiveRange* range) { 2610 void GreedyAllocator::Enqueue(LiveRange* range) {
2520 if (range == nullptr || range->IsEmpty()) return; 2611 if (range == nullptr || range->IsEmpty()) return;
2521 unsigned size = GetLiveRangeSize(range); 2612 unsigned size = GetLiveRangeSize(range);
2613 TRACE("Enqueuing range %d\n", range->id());
2522 queue_.push(std::make_pair(size, range)); 2614 queue_.push(std::make_pair(size, range));
2523 } 2615 }
2524 2616
2525 2617
2526 // TODO(mtrofin): consolidate with identical code segment in
2527 // LinearScanAllocator::AllocateRegisters
2528 bool GreedyAllocator::HandleSpillOperands(LiveRange* range) { 2618 bool GreedyAllocator::HandleSpillOperands(LiveRange* range) {
2529 auto position = range->Start(); 2619 auto position = range->Start();
2530 TRACE("Processing interval %d start=%d\n", range->id(), position.value()); 2620 TRACE("Processing interval %d start=%d\n", range->id(), position.value());
2531 2621
2532 if (!range->HasNoSpillType()) { 2622 if (!range->HasNoSpillType()) {
2533 TRACE("Live range %d already has a spill operand\n", range->id()); 2623 TRACE("Live range %d already has a spill operand\n", range->id());
2534 auto next_pos = position; 2624 auto next_pos = position;
2535 if (next_pos.IsGapPosition()) { 2625 if (next_pos.IsGapPosition()) {
2536 next_pos = next_pos.NextStart(); 2626 next_pos = next_pos.NextStart();
2537 } 2627 }
2538 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); 2628 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
2539 // If the range already has a spill operand and it doesn't need a 2629 // If the range already has a spill operand and it doesn't need a
2540 // register immediately, split it and spill the first part of the range. 2630 // register immediately, split it and spill the first part of the range.
2541 if (pos == nullptr) { 2631 if (pos == nullptr) {
2542 Spill(range); 2632 Spill(range);
2543 return true; 2633 return true;
2544 } else if (pos->pos() > range->Start().NextStart()) { 2634 } else if (pos->pos() > range->Start().NextStart()) {
2545 // Do not spill live range eagerly if use position that can benefit from 2635 // Do not spill live range eagerly if use position that can benefit from
2546 // the register is too close to the start of live range. 2636 // the register is too close to the start of live range.
2547 auto* reminder = SpillBetweenUntil(range, position, position, pos->pos()); 2637 auto* reminder = SpillBetweenUntil(range, position, position, pos->pos());
2548 Enqueue(reminder); 2638 Enqueue(reminder);
2549 return true; 2639 return true;
2550 } 2640 }
2551 } 2641 }
2552 return false; 2642 return (TryReuseSpillForPhi(range));
2553 // TODO(mtrofin): Do we need this?
2554 // return (TryReuseSpillForPhi(range));
2555 } 2643 }
2556 2644
2557 2645
2558 void GreedyAllocator::AllocateRegisters() { 2646 void GreedyAllocator::AllocateRegisters() {
2559 for (auto range : data()->live_ranges()) { 2647 for (auto range : data()->live_ranges()) {
2560 if (range == nullptr) continue; 2648 if (range == nullptr) continue;
2561 if (range->kind() == mode()) { 2649 if (range->kind() == mode()) {
2562 DCHECK(!range->HasRegisterAssigned() && !range->spilled()); 2650 DCHECK(!range->HasRegisterAssigned() && !range->spilled());
2563 TRACE("Enqueueing live range %d to priority queue \n", range->id()); 2651 TRACE("Enqueueing live range %d to priority queue \n", range->id());
2564 Enqueue(range); 2652 Enqueue(range);
2565 } 2653 }
2566 } 2654 }
2567 2655
2568 allocations_.resize(num_registers()); 2656 allocations_.resize(num_registers());
2569 auto* zone = data()->allocation_zone(); 2657 auto* zone = allocations_.get_allocator().zone();
2570 for (int i = 0; i < num_registers(); i++) { 2658 for (int i = 0; i < num_registers(); i++) {
2571 allocations_[i] = new (zone) CoallescedLiveRanges(zone); 2659 allocations_[i] = new (zone) CoallescedLiveRanges(zone);
2572 } 2660 }
2573 2661
2574 for (auto* current : GetFixedRegisters(data(), mode())) { 2662 for (auto* current : GetFixedRegisters(data(), mode())) {
2575 if (current != nullptr) { 2663 if (current != nullptr) {
2576 DCHECK_EQ(mode(), current->kind()); 2664 DCHECK_EQ(mode(), current->kind());
2577 int reg_nr = current->assigned_register(); 2665 int reg_nr = current->assigned_register();
2578 bool inserted = allocations_[reg_nr]->Insert(current); 2666 bool inserted = allocations_[reg_nr]->Insert(current);
2579 CHECK(inserted); 2667 CHECK(inserted);
2580 } 2668 }
2581 } 2669 }
2582 2670
2583 while (!queue_.empty()) { 2671 while (!queue_.empty()) {
2584 auto current_pair = queue_.top(); 2672 auto current_pair = queue_.top();
2585 queue_.pop(); 2673 queue_.pop();
2586 auto current = current_pair.second; 2674 auto current = current_pair.second;
2587 if (HandleSpillOperands(current)) continue; 2675 if (HandleSpillOperands(current)) {
2676 continue;
2677 }
2678 bool spill = false;
2588 ZoneSet<LiveRange*> conflicting(zone); 2679 ZoneSet<LiveRange*> conflicting(zone);
2589 if (!TryAllocate(current, &conflicting)) { 2680 if (!TryAllocate(current, &conflicting)) {
2590 DCHECK(!conflicting.empty()); 2681 DCHECK(!conflicting.empty());
2591 float this_max = CalculateSpillWeight(current); 2682 float this_weight = std::numeric_limits<float>::max();
2592 float max_conflicting = CalculateMaxSpillWeight(conflicting); 2683 LifetimePosition split_pos =
2593 if (max_conflicting < this_max) { 2684 FindProgressingSplitPosition(current, &spill);
2594 for (auto* conflict : conflicting) { 2685 if (split_pos.IsValid()) {
2686 this_weight = CalculateSpillWeight(current);
2687 }
2688
2689 bool evicted = false;
2690 for (auto* conflict : conflicting) {
2691 if (CalculateSpillWeight(conflict) < this_weight) {
2595 Evict(conflict); 2692 Evict(conflict);
2596 Enqueue(conflict); 2693 Enqueue(conflict);
2694 evicted = true;
2597 } 2695 }
2696 }
2697 if (evicted) {
2598 conflicting.clear(); 2698 conflicting.clear();
2599 bool allocated = TryAllocate(current, &conflicting); 2699 TryAllocate(current, &conflicting);
2600 CHECK(allocated); 2700 }
2601 DCHECK(conflicting.empty()); 2701 if (!conflicting.empty()) {
2602 } else {
2603 DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start())); 2702 DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start()));
2604 bool allocated = AllocateBlockedRange(current, conflicting); 2703 DCHECK(split_pos.IsValid());
2605 CHECK(allocated); 2704 AllocateBlockedRange(current, split_pos, spill);
2606 } 2705 }
2607 } 2706 }
2608 } 2707 }
2609 2708
2610 for (size_t i = 0; i < allocations_.size(); ++i) { 2709 for (size_t i = 0; i < allocations_.size(); ++i) {
2611 if (!allocations_[i]->IsEmpty()) { 2710 if (!allocations_[i]->IsEmpty()) {
2612 data()->MarkAllocated(mode(), static_cast<int>(i)); 2711 data()->MarkAllocated(mode(), static_cast<int>(i));
2613 } 2712 }
2614 } 2713 }
2615 } 2714 }
2616 2715
2617 2716
2618 bool GreedyAllocator::AllocateBlockedRange( 2717 LifetimePosition GreedyAllocator::FindProgressingSplitPosition(
2619 LiveRange* current, const ZoneSet<LiveRange*>& conflicts) { 2718 LiveRange* range, bool* is_spill_pos) {
2620 auto register_use = current->NextRegisterPosition(current->Start()); 2719 auto start = range->Start();
2621 if (register_use == nullptr) { 2720 auto end = range->End();
2622 // There is no use in the current live range that requires a register. 2721
2623 // We can just spill it. 2722 UsePosition* next_reg_use = range->first_pos();
2624 Spill(current); 2723 while (next_reg_use != nullptr &&
2625 return true; 2724 next_reg_use->type() != UsePositionType::kRequiresRegister) {
2725 next_reg_use = next_reg_use->next();
2626 } 2726 }
2627 2727
2628 auto second_part = SplitRangeAt(current, register_use->pos()); 2728 if (next_reg_use == nullptr) {
2629 Spill(second_part); 2729 *is_spill_pos = true;
2730 auto ret = FindOptimalSpillingPos(range, start);
2731 DCHECK(ret.IsValid());
2732 return ret;
2733 }
2630 2734
2631 return true; 2735 *is_spill_pos = false;
2736 auto ret = next_reg_use->pos();
2737
2738 if (start < ret.Prev()) {
2739 return ret.Prev();
2740 }
2741
2742 if (end == ret && start.Next() < end) {
2743 return end.Prev();
2744 }
2745
2746 auto blocked_pos = start;
2747 while (next_reg_use->next() != nullptr &&
2748 next_reg_use->next()->pos().Prev() <= next_reg_use->pos()) {
2749 next_reg_use = next_reg_use->next();
2750 blocked_pos = next_reg_use->pos();
2751 ret = blocked_pos.Next();
2752 }
2753 DCHECK(next_reg_use != nullptr);
2754
2755 if (ret < end && start < ret) {
2756 return ret;
2757 }
2758 ret = ret.Next();
2759
2760 if (ret == blocked_pos) {
2761 return LifetimePosition::Invalid();
2762 }
2763
2764 if (ret < end && start < ret) {
2765 return ret;
2766 }
2767
2768 return LifetimePosition::Invalid();
2632 } 2769 }
2633 2770
2634 2771
2772 void GreedyAllocator::AllocateBlockedRange(LiveRange* current,
2773 LifetimePosition pos, bool spill) {
2774 auto tail = SplitRangeAt(current, pos);
2775 if (spill) {
2776 Spill(tail);
2777 } else {
2778 Enqueue(tail);
2779 }
2780 if (tail != current) {
2781 Enqueue(current);
2782 }
2783 }
2784
2785
2786 bool GreedyAllocator::TryReuseSpillForPhi(LiveRange* range) {
2787 if (range->IsChild() || !range->is_phi()) return false;
2788 DCHECK(!range->HasSpillOperand());
2789
2790 auto phi_map_value = data()->GetPhiMapValueFor(range->id());
2791 auto phi = phi_map_value->phi();
2792 auto block = phi_map_value->block();
2793 // Count the number of spilled operands.
2794 size_t spilled_count = 0;
2795 LiveRange* first_op = nullptr;
2796 for (size_t i = 0; i < phi->operands().size(); i++) {
2797 int op = phi->operands()[i];
2798 LiveRange* op_range = LiveRangeFor(op);
2799 if (!op_range->HasSpillRange()) continue;
2800 auto pred = code()->InstructionBlockAt(block->predecessors()[i]);
2801 auto pred_end = LifetimePosition::InstructionFromInstructionIndex(
2802 pred->last_instruction_index());
2803 while (op_range != nullptr && !op_range->CanCover(pred_end)) {
2804 op_range = op_range->next();
2805 }
2806 if (op_range != nullptr && op_range->spilled()) {
2807 spilled_count++;
2808 if (first_op == nullptr) {
2809 first_op = op_range->TopLevel();
2810 }
2811 }
2812 }
2813
2814 // Only continue if more than half of the operands are spilled.
2815 if (spilled_count * 2 <= phi->operands().size()) {
2816 return false;
2817 }
2818
2819 // Try to merge the spilled operands and count the number of merged spilled
2820 // operands.
2821 DCHECK(first_op != nullptr);
2822 auto first_op_spill = first_op->GetSpillRange();
2823 size_t num_merged = 1;
2824 for (size_t i = 1; i < phi->operands().size(); i++) {
2825 int op = phi->operands()[i];
2826 auto op_range = LiveRangeFor(op);
2827 if (!op_range->HasSpillRange()) continue;
2828 auto op_spill = op_range->GetSpillRange();
2829 if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) {
2830 num_merged++;
2831 }
2832 }
2833
2834 // Only continue if enough operands could be merged to the
2835 // same spill slot.
2836 if (num_merged * 2 <= phi->operands().size() ||
2837 AreUseIntervalsIntersecting(first_op_spill->interval(),
2838 range->first_interval())) {
2839 return false;
2840 }
2841
2842 // If the range does not need register soon, spill it to the merged
2843 // spill range.
2844 auto next_pos = range->Start();
2845 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
2846 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
2847 if (pos == nullptr) {
2848 auto spill_range =
2849 range->TopLevel()->HasSpillRange()
2850 ? range->TopLevel()->GetSpillRange()
2851 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
2852 bool merged = first_op_spill->TryMerge(spill_range);
2853 CHECK(merged);
2854 Spill(range);
2855 return true;
2856 } else if (pos->pos() > range->Start().NextStart()) {
2857 auto spill_range =
2858 range->TopLevel()->HasSpillRange()
2859 ? range->TopLevel()->GetSpillRange()
2860 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
2861 bool merged = first_op_spill->TryMerge(spill_range);
2862 CHECK(merged);
2863 Enqueue(
2864 SpillBetweenUntil(range, range->Start(), range->Start(), pos->pos()));
2865 return true;
2866 }
2867 return false;
2868 }
2869
2870
2635 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {} 2871 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
2636 2872
2637 2873
2638 void OperandAssigner::AssignSpillSlots() { 2874 void OperandAssigner::AssignSpillSlots() {
2639 auto& spill_ranges = data()->spill_ranges(); 2875 auto& spill_ranges = data()->spill_ranges();
2640 // Merge disjoint spill ranges 2876 // Merge disjoint spill ranges
2641 for (size_t i = 0; i < spill_ranges.size(); i++) { 2877 for (size_t i = 0; i < spill_ranges.size(); i++) {
2642 auto range = spill_ranges[i]; 2878 auto range = spill_ranges[i];
2643 if (range->IsEmpty()) continue; 2879 if (range->IsEmpty()) continue;
2644 for (size_t j = i + 1; j < spill_ranges.size(); j++) { 2880 for (size_t j = i + 1; j < spill_ranges.size(); j++) {
(...skipping 24 matching lines...) Expand all
2669 InstructionOperand* spill_operand = nullptr; 2905 InstructionOperand* spill_operand = nullptr;
2670 if (!range->TopLevel()->HasNoSpillType()) { 2906 if (!range->TopLevel()->HasNoSpillType()) {
2671 spill_operand = range->TopLevel()->GetSpillOperand(); 2907 spill_operand = range->TopLevel()->GetSpillOperand();
2672 } 2908 }
2673 auto assigned = range->GetAssignedOperand(); 2909 auto assigned = range->GetAssignedOperand();
2674 range->ConvertUsesToOperand(assigned, spill_operand); 2910 range->ConvertUsesToOperand(assigned, spill_operand);
2675 if (range->is_phi()) { 2911 if (range->is_phi()) {
2676 data()->GetPhiMapValueFor(range->id())->CommitAssignment(assigned); 2912 data()->GetPhiMapValueFor(range->id())->CommitAssignment(assigned);
2677 } 2913 }
2678 if (!range->IsChild() && spill_operand != nullptr) { 2914 if (!range->IsChild() && spill_operand != nullptr) {
2679 range->CommitSpillsAtDefinition(data()->code(), spill_operand, 2915 range->CommitSpillsAtDefinition(
2680 range->has_slot_use()); 2916 data()->code(), spill_operand,
2917 range->has_slot_use() || range->spilled());
2681 } 2918 }
2682 } 2919 }
2683 } 2920 }
2684 2921
2685 2922
2686 ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data) 2923 ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data)
2687 : data_(data) {} 2924 : data_(data) {}
2688 2925
2689 2926
2690 bool ReferenceMapPopulator::SafePointsAreInOrder() const { 2927 bool ReferenceMapPopulator::SafePointsAreInOrder() const {
(...skipping 352 matching lines...) Expand 10 before | Expand all | Expand 10 after
3043 auto eliminate = moves->PrepareInsertAfter(move); 3280 auto eliminate = moves->PrepareInsertAfter(move);
3044 to_insert.push_back(move); 3281 to_insert.push_back(move);
3045 if (eliminate != nullptr) to_eliminate.push_back(eliminate); 3282 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
3046 } 3283 }
3047 } 3284 }
3048 3285
3049 3286
3050 } // namespace compiler 3287 } // namespace compiler
3051 } // namespace internal 3288 } // namespace internal
3052 } // namespace v8 3289 } // namespace v8
OLDNEW
« src/compiler/register-allocator.h ('K') | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698