Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(386)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 1105793002: Greedy reg alloc - better splitting (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/base/adapters.h" 5 #include "src/base/adapters.h"
6 #include "src/compiler/linkage.h" 6 #include "src/compiler/linkage.h"
7 #include "src/compiler/register-allocator.h" 7 #include "src/compiler/register-allocator.h"
8 #include "src/string-stream.h" 8 #include "src/string-stream.h"
9 9
10 namespace v8 { 10 namespace v8 {
(...skipping 207 matching lines...) Expand 10 before | Expand all | Expand 10 after
218 UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) { 218 UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
219 DCHECK(Contains(pos) && pos != start()); 219 DCHECK(Contains(pos) && pos != start());
220 auto after = new (zone) UseInterval(pos, end_); 220 auto after = new (zone) UseInterval(pos, end_);
221 after->next_ = next_; 221 after->next_ = next_;
222 next_ = nullptr; 222 next_ = nullptr;
223 end_ = pos; 223 end_ = pos;
224 return after; 224 return after;
225 } 225 }
226 226
227 227
228 std::ostream& operator<<(std::ostream& os, const LifetimePosition pos) {
229 os << '@' << pos.ToInstructionIndex();
230 if (pos.IsGapPosition()) {
231 os << 'g';
232 } else {
233 os << 'i';
234 }
235 if (pos.IsStart()) {
236 os << 's';
237 } else {
238 os << 'e';
239 }
240 return os;
241 }
242
243
228 struct LiveRange::SpillAtDefinitionList : ZoneObject { 244 struct LiveRange::SpillAtDefinitionList : ZoneObject {
229 SpillAtDefinitionList(int gap_index, InstructionOperand* operand, 245 SpillAtDefinitionList(int gap_index, InstructionOperand* operand,
230 SpillAtDefinitionList* next) 246 SpillAtDefinitionList* next)
231 : gap_index(gap_index), operand(operand), next(next) {} 247 : gap_index(gap_index), operand(operand), next(next) {}
232 const int gap_index; 248 const int gap_index;
233 InstructionOperand* const operand; 249 InstructionOperand* const operand;
234 SpillAtDefinitionList* const next; 250 SpillAtDefinitionList* const next;
235 }; 251 };
236 252
237 253
(...skipping 521 matching lines...) Expand 10 before | Expand all | Expand 10 after
759 if (interval2->end() > interval1->start()) { 775 if (interval2->end() > interval1->start()) {
760 return true; 776 return true;
761 } 777 }
762 interval2 = interval2->next(); 778 interval2 = interval2->next();
763 } 779 }
764 } 780 }
765 return false; 781 return false;
766 } 782 }
767 783
768 784
785 std::ostream& operator<<(std::ostream& os,
786 const PrintableLiveRange& printable_range) {
787 const LiveRange* range = printable_range.range_;
788 os << "Range: " << range->id() << " ";
789 if (range->is_phi()) os << "phi ";
790 if (range->is_non_loop_phi()) os << "nlphi ";
791
792 os << "{" << std::endl;
793 auto interval = range->first_interval();
794 auto use_pos = range->first_pos();
795 PrintableInstructionOperand pio;
796 pio.register_configuration_ = printable_range.register_configuration_;
797 while (use_pos != nullptr) {
798 pio.op_ = *use_pos->operand();
799 os << pio << use_pos->pos() << " ";
800 use_pos = use_pos->next();
801 }
802 os << std::endl;
803
804 while (interval != nullptr) {
805 os << '[' << interval->start() << ", " << interval->end() << ')'
806 << std::endl;
807 interval = interval->next();
808 }
809 os << "}";
810 return os;
811 }
812
813
769 SpillRange::SpillRange(LiveRange* parent, Zone* zone) 814 SpillRange::SpillRange(LiveRange* parent, Zone* zone)
770 : live_ranges_(zone), assigned_slot_(kUnassignedSlot) { 815 : live_ranges_(zone), assigned_slot_(kUnassignedSlot) {
771 DCHECK(!parent->IsChild()); 816 DCHECK(!parent->IsChild());
772 UseInterval* result = nullptr; 817 UseInterval* result = nullptr;
773 UseInterval* node = nullptr; 818 UseInterval* node = nullptr;
774 // Copy the intervals for all ranges. 819 // Copy the intervals for all ranges.
775 for (auto range = parent; range != nullptr; range = range->next()) { 820 for (auto range = parent; range != nullptr; range = range->next()) {
776 auto src = range->first_interval(); 821 auto src = range->first_interval();
777 while (src != nullptr) { 822 while (src != nullptr) {
778 auto new_node = new (zone) UseInterval(src->start(), src->end()); 823 auto new_node = new (zone) UseInterval(src->start(), src->end());
(...skipping 1567 matching lines...) Expand 10 before | Expand all | Expand 10 after
2346 Spill(second_part); 2391 Spill(second_part);
2347 AddToUnhandledSorted(third_part); 2392 AddToUnhandledSorted(third_part);
2348 } else { 2393 } else {
2349 // The split result does not intersect with [start, end[. 2394 // The split result does not intersect with [start, end[.
2350 // Nothing to spill. Just put it to unhandled as whole. 2395 // Nothing to spill. Just put it to unhandled as whole.
2351 AddToUnhandledSorted(second_part); 2396 AddToUnhandledSorted(second_part);
2352 } 2397 }
2353 } 2398 }
2354 2399
2355 2400
2356 class CoallescedLiveRanges : public ZoneObject { 2401 class CoalescedLiveRanges : public ZoneObject {
2357 public: 2402 public:
2358 explicit CoallescedLiveRanges(Zone* zone) : storage_(zone) {} 2403 explicit CoalescedLiveRanges(Zone* zone) : storage_(zone) {}
2359 2404
2360 LiveRange* Find(UseInterval* query) { 2405 LiveRange* Find(UseInterval* query) {
2361 ZoneSplayTree<Config>::Locator locator; 2406 ZoneSplayTree<Config>::Locator locator;
2362 2407
2363 if (storage_.Find(GetKey(query), &locator)) { 2408 if (storage_.Find(GetKey(query), &locator)) {
2364 return locator.value(); 2409 return locator.value();
2365 } 2410 }
2366 return nullptr; 2411 return nullptr;
2367 } 2412 }
2368 2413
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
2412 bool Insert(UseInterval* interval, LiveRange* range) { 2457 bool Insert(UseInterval* interval, LiveRange* range) {
2413 ZoneSplayTree<Config>::Locator locator; 2458 ZoneSplayTree<Config>::Locator locator;
2414 bool ret = storage_.Insert(GetKey(interval), &locator); 2459 bool ret = storage_.Insert(GetKey(interval), &locator);
2415 if (ret) locator.set_value(range); 2460 if (ret) locator.set_value(range);
2416 return ret; 2461 return ret;
2417 } 2462 }
2418 2463
2419 bool Remove(UseInterval* key) { return storage_.Remove(GetKey(key)); } 2464 bool Remove(UseInterval* key) { return storage_.Remove(GetKey(key)); }
2420 2465
2421 ZoneSplayTree<Config> storage_; 2466 ZoneSplayTree<Config> storage_;
2422 DISALLOW_COPY_AND_ASSIGN(CoallescedLiveRanges); 2467 DISALLOW_COPY_AND_ASSIGN(CoalescedLiveRanges);
2423 }; 2468 };
2424 2469
2425 2470
2426 const std::pair<int, int> CoallescedLiveRanges::Config::kNoKey = {0, 0}; 2471 const std::pair<int, int> CoalescedLiveRanges::Config::kNoKey = {0, 0};
2427 2472
2428 GreedyAllocator::GreedyAllocator(RegisterAllocationData* data, 2473 GreedyAllocator::GreedyAllocator(RegisterAllocationData* data,
2429 RegisterKind kind, Zone* local_zone) 2474 RegisterKind kind, Zone* local_zone)
2430 : RegisterAllocator(data, kind), 2475 : RegisterAllocator(data, kind),
2476 local_zone_(local_zone),
2431 allocations_(local_zone), 2477 allocations_(local_zone),
2432 queue_(local_zone) {} 2478 queue_(local_zone) {}
2433 2479
2434 2480
2435 unsigned GreedyAllocator::GetLiveRangeSize(LiveRange* range) { 2481 unsigned GreedyAllocator::GetLiveRangeSize(LiveRange* range) {
2436 auto interval = range->first_interval(); 2482 auto interval = range->first_interval();
2437 if (interval == nullptr) return 0; 2483 if (interval == nullptr) return 0;
2438 2484
2439 unsigned size = 0; 2485 unsigned size = 0;
2440 while (interval != nullptr) { 2486 while (interval != nullptr) {
(...skipping 13 matching lines...) Expand all
2454 } 2500 }
2455 range->set_assigned_register(reg_id); 2501 range->set_assigned_register(reg_id);
2456 range->SetUseHints(reg_id); 2502 range->SetUseHints(reg_id);
2457 if (range->is_phi()) { 2503 if (range->is_phi()) {
2458 data()->GetPhiMapValueFor(range->id())->set_assigned_register(reg_id); 2504 data()->GetPhiMapValueFor(range->id())->set_assigned_register(reg_id);
2459 } 2505 }
2460 } 2506 }
2461 2507
2462 2508
2463 float GreedyAllocator::CalculateSpillWeight(LiveRange* range) { 2509 float GreedyAllocator::CalculateSpillWeight(LiveRange* range) {
2510 InstructionOperand* first_hint = nullptr;
2511 if (range->FirstHintPosition() != nullptr) {
2512 first_hint = range->FirstHintPosition()->operand();
2513 }
2514
2464 if (range->IsFixed()) return std::numeric_limits<float>::max(); 2515 if (range->IsFixed()) return std::numeric_limits<float>::max();
2516 bool spill;
2517 if (!FindProgressingSplitPosition(range, &spill).IsValid())
2518 return std::numeric_limits<float>::max();
2465 2519
2466 int hint_register; 2520 float multiplier = 1.0;
2467 if (range->FirstHintPosition(&hint_register) != nullptr) { 2521 if (first_hint != nullptr && first_hint->IsRegister()) {
2468 return std::numeric_limits<float>::max(); 2522 multiplier = 3.0;
2469 } 2523 }
2470 2524
2471 unsigned use_count = 0; 2525 unsigned use_count = 0;
2472 auto* pos = range->first_pos(); 2526 auto* pos = range->first_pos();
2473 while (pos != nullptr) { 2527 while (pos != nullptr) {
2474 use_count++; 2528 use_count++;
2475 pos = pos->next(); 2529 pos = pos->next();
2476 } 2530 }
2477 2531
2478 // GetLiveRangeSize is DCHECK-ed to not be 0
2479 unsigned range_size = GetLiveRangeSize(range); 2532 unsigned range_size = GetLiveRangeSize(range);
2480 DCHECK_NE(0U, range_size); 2533 DCHECK_NE(0U, range_size);
2481 2534
2482 return static_cast<float>(use_count) / static_cast<float>(range_size); 2535 return multiplier * static_cast<float>(use_count) /
2536 static_cast<float>(range_size);
2483 } 2537 }
2484 2538
2485 2539
2486 float GreedyAllocator::CalculateMaxSpillWeight( 2540 float GreedyAllocator::CalculateMaxSpillWeight(
2487 const ZoneSet<LiveRange*>& ranges) { 2541 const ZoneSet<LiveRange*>& ranges) {
2488 float max = 0.0; 2542 float max = 0.0;
2489 for (auto* r : ranges) { 2543 for (auto* r : ranges) {
2490 max = std::max(max, CalculateSpillWeight(r)); 2544 max = std::max(max, CalculateSpillWeight(r));
2491 } 2545 }
2492 return max; 2546 return max;
(...skipping 22 matching lines...) Expand all
2515 conflicting->insert(existing); 2569 conflicting->insert(existing);
2516 } 2570 }
2517 segment = segment->next(); 2571 segment = segment->next();
2518 } 2572 }
2519 if (!conflicting->empty()) return false; 2573 if (!conflicting->empty()) return false;
2520 // No conflicts means we can safely allocate this register to this range. 2574 // No conflicts means we can safely allocate this register to this range.
2521 AssignRangeToRegister(reg_id, range); 2575 AssignRangeToRegister(reg_id, range);
2522 return true; 2576 return true;
2523 } 2577 }
2524 2578
2579
2580 int GreedyAllocator::GetHintedRegister(LiveRange* range) {
2581 int reg;
2582 if (range->FirstHintPosition(&reg) != nullptr) {
2583 return reg;
2584 }
2585 return -1;
2586 }
2587
2588
2525 bool GreedyAllocator::TryAllocate(LiveRange* current, 2589 bool GreedyAllocator::TryAllocate(LiveRange* current,
2526 ZoneSet<LiveRange*>* conflicting) { 2590 ZoneSet<LiveRange*>* conflicting) {
2527 if (current->HasSpillOperand()) {
2528 Spill(current);
2529 return true;
2530 }
2531 if (current->IsFixed()) { 2591 if (current->IsFixed()) {
2532 return TryAllocatePhysicalRegister(current->assigned_register(), current, 2592 return TryAllocatePhysicalRegister(current->assigned_register(), current,
2533 conflicting); 2593 conflicting);
2534 } 2594 }
2535 2595
2536 if (current->HasRegisterAssigned()) { 2596 int hinted_reg_id = GetHintedRegister(current);
2537 int reg_id = current->assigned_register(); 2597 if (hinted_reg_id >= 0) {
2538 return TryAllocatePhysicalRegister(reg_id, current, conflicting); 2598 if (TryAllocatePhysicalRegister(hinted_reg_id, current, conflicting)) {
2599 return true;
2600 }
2539 } 2601 }
2540 2602
2603 ZoneSet<LiveRange*> local_conflicts(local_zone());
2541 for (unsigned candidate_reg = 0; candidate_reg < allocations_.size(); 2604 for (unsigned candidate_reg = 0; candidate_reg < allocations_.size();
2542 candidate_reg++) { 2605 candidate_reg++) {
2543 if (TryAllocatePhysicalRegister(candidate_reg, current, conflicting)) { 2606 if (hinted_reg_id >= 0 &&
2607 candidate_reg == static_cast<size_t>(hinted_reg_id))
2608 continue;
2609 local_conflicts.clear();
2610 if (TryAllocatePhysicalRegister(candidate_reg, current, &local_conflicts)) {
2544 conflicting->clear(); 2611 conflicting->clear();
2545 return true; 2612 return true;
2613 } else {
2614 conflicting->insert(local_conflicts.begin(), local_conflicts.end());
2546 } 2615 }
2547 } 2616 }
2548 return false; 2617 return false;
2549 } 2618 }
2550 2619
2620
2551 LiveRange* GreedyAllocator::SpillBetweenUntil(LiveRange* range, 2621 LiveRange* GreedyAllocator::SpillBetweenUntil(LiveRange* range,
2552 LifetimePosition start, 2622 LifetimePosition start,
2553 LifetimePosition until, 2623 LifetimePosition until,
2554 LifetimePosition end) { 2624 LifetimePosition end) {
2555 CHECK(start < end); 2625 CHECK(start < end);
2556 auto second_part = SplitRangeAt(range, start); 2626 auto second_part = SplitRangeAt(range, start);
2557 2627
2558 if (second_part->Start() < end) { 2628 if (second_part->Start() < end) {
2559 // The split result intersects with [start, end[. 2629 // The split result intersects with [start, end[.
2560 // Split it at position between ]start+1, end[, spill the middle part 2630 // Split it at position between ]start+1, end[, spill the middle part
(...skipping 13 matching lines...) Expand all
2574 // The split result does not intersect with [start, end[. 2644 // The split result does not intersect with [start, end[.
2575 // Nothing to spill. Just return it for re-processing. 2645 // Nothing to spill. Just return it for re-processing.
2576 return second_part; 2646 return second_part;
2577 } 2647 }
2578 } 2648 }
2579 2649
2580 2650
2581 void GreedyAllocator::Enqueue(LiveRange* range) { 2651 void GreedyAllocator::Enqueue(LiveRange* range) {
2582 if (range == nullptr || range->IsEmpty()) return; 2652 if (range == nullptr || range->IsEmpty()) return;
2583 unsigned size = GetLiveRangeSize(range); 2653 unsigned size = GetLiveRangeSize(range);
2654 TRACE("Enqueuing range %d\n", range->id());
2584 queue_.push(std::make_pair(size, range)); 2655 queue_.push(std::make_pair(size, range));
2585 } 2656 }
2586 2657
2587 2658
2588 // TODO(mtrofin): consolidate with identical code segment in
2589 // LinearScanAllocator::AllocateRegisters
2590 bool GreedyAllocator::HandleSpillOperands(LiveRange* range) { 2659 bool GreedyAllocator::HandleSpillOperands(LiveRange* range) {
2591 auto position = range->Start(); 2660 auto position = range->Start();
2592 TRACE("Processing interval %d start=%d\n", range->id(), position.value()); 2661 TRACE("Processing interval %d start=%d\n", range->id(), position.value());
2593 2662
2594 if (!range->HasNoSpillType()) { 2663 if (!range->HasNoSpillType()) {
2595 TRACE("Live range %d already has a spill operand\n", range->id()); 2664 TRACE("Live range %d already has a spill operand\n", range->id());
2596 auto next_pos = position; 2665 auto next_pos = position;
2597 if (next_pos.IsGapPosition()) { 2666 if (next_pos.IsGapPosition()) {
2598 next_pos = next_pos.NextStart(); 2667 next_pos = next_pos.NextStart();
2599 } 2668 }
2600 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); 2669 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
2601 // If the range already has a spill operand and it doesn't need a 2670 // If the range already has a spill operand and it doesn't need a
2602 // register immediately, split it and spill the first part of the range. 2671 // register immediately, split it and spill the first part of the range.
2603 if (pos == nullptr) { 2672 if (pos == nullptr) {
2604 Spill(range); 2673 Spill(range);
2605 return true; 2674 return true;
2606 } else if (pos->pos() > range->Start().NextStart()) { 2675 } else if (pos->pos() > range->Start().NextStart()) {
2607 // Do not spill live range eagerly if use position that can benefit from 2676 // Do not spill live range eagerly if use position that can benefit from
2608 // the register is too close to the start of live range. 2677 // the register is too close to the start of live range.
2609 auto* reminder = SpillBetweenUntil(range, position, position, pos->pos()); 2678 auto* reminder = SpillBetweenUntil(range, position, position, pos->pos());
2610 Enqueue(reminder); 2679 Enqueue(reminder);
2611 return true; 2680 return true;
2612 } 2681 }
2613 } 2682 }
2614 return false; 2683 return TryReuseSpillForPhi(range);
2615 // TODO(mtrofin): Do we need this?
2616 // return (TryReuseSpillForPhi(range));
2617 } 2684 }
2618 2685
2619 2686
2620 void GreedyAllocator::AllocateRegisters() { 2687 void GreedyAllocator::AllocateRegisters() {
2621 for (auto range : data()->live_ranges()) { 2688 for (auto range : data()->live_ranges()) {
2622 if (range == nullptr) continue; 2689 if (range == nullptr) continue;
2623 if (range->kind() == mode()) { 2690 if (range->kind() == mode()) {
2624 DCHECK(!range->HasRegisterAssigned() && !range->spilled()); 2691 DCHECK(!range->HasRegisterAssigned() && !range->spilled());
2625 TRACE("Enqueueing live range %d to priority queue \n", range->id()); 2692 TRACE("Enqueueing live range %d to priority queue \n", range->id());
2626 Enqueue(range); 2693 Enqueue(range);
2627 } 2694 }
2628 } 2695 }
2629 2696
2630 allocations_.resize(num_registers()); 2697 allocations_.resize(num_registers());
2631 auto* zone = data()->allocation_zone();
2632 for (int i = 0; i < num_registers(); i++) { 2698 for (int i = 0; i < num_registers(); i++) {
2633 allocations_[i] = new (zone) CoallescedLiveRanges(zone); 2699 allocations_[i] = new (local_zone()) CoalescedLiveRanges(local_zone());
2634 } 2700 }
2635 2701
2636 for (auto* current : GetFixedRegisters(data(), mode())) { 2702 for (auto* current : GetFixedRegisters(data(), mode())) {
2637 if (current != nullptr) { 2703 if (current != nullptr) {
2638 DCHECK_EQ(mode(), current->kind()); 2704 DCHECK_EQ(mode(), current->kind());
2639 int reg_nr = current->assigned_register(); 2705 int reg_nr = current->assigned_register();
2640 bool inserted = allocations_[reg_nr]->Insert(current); 2706 bool inserted = allocations_[reg_nr]->Insert(current);
2641 CHECK(inserted); 2707 CHECK(inserted);
2642 } 2708 }
2643 } 2709 }
2644 2710
2645 while (!queue_.empty()) { 2711 while (!queue_.empty()) {
2646 auto current_pair = queue_.top(); 2712 auto current_pair = queue_.top();
2647 queue_.pop(); 2713 queue_.pop();
2648 auto current = current_pair.second; 2714 auto current = current_pair.second;
2649 if (HandleSpillOperands(current)) continue; 2715 if (HandleSpillOperands(current)) {
2650 ZoneSet<LiveRange*> conflicting(zone); 2716 continue;
2717 }
2718 bool spill = false;
2719 ZoneSet<LiveRange*> conflicting(local_zone());
2651 if (!TryAllocate(current, &conflicting)) { 2720 if (!TryAllocate(current, &conflicting)) {
2652 DCHECK(!conflicting.empty()); 2721 DCHECK(!conflicting.empty());
2653 float this_max = CalculateSpillWeight(current); 2722 float this_weight = std::numeric_limits<float>::max();
2654 float max_conflicting = CalculateMaxSpillWeight(conflicting); 2723 LifetimePosition split_pos =
2655 if (max_conflicting < this_max) { 2724 FindProgressingSplitPosition(current, &spill);
2656 for (auto* conflict : conflicting) { 2725 if (split_pos.IsValid()) {
2726 this_weight = CalculateSpillWeight(current);
2727 }
2728
2729 bool evicted = false;
2730 for (auto* conflict : conflicting) {
2731 if (CalculateSpillWeight(conflict) < this_weight) {
2657 Evict(conflict); 2732 Evict(conflict);
2658 Enqueue(conflict); 2733 Enqueue(conflict);
2734 evicted = true;
2659 } 2735 }
2736 }
2737 if (evicted) {
2660 conflicting.clear(); 2738 conflicting.clear();
2661 bool allocated = TryAllocate(current, &conflicting); 2739 TryAllocate(current, &conflicting);
2662 CHECK(allocated); 2740 }
2663 DCHECK(conflicting.empty()); 2741 if (!conflicting.empty()) {
2664 } else {
2665 DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start())); 2742 DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start()));
2666 bool allocated = AllocateBlockedRange(current, conflicting); 2743 DCHECK(split_pos.IsValid());
2667 CHECK(allocated); 2744 AllocateBlockedRange(current, split_pos, spill);
2668 } 2745 }
2669 } 2746 }
2670 } 2747 }
2671 2748
2672 for (size_t i = 0; i < allocations_.size(); ++i) { 2749 for (size_t i = 0; i < allocations_.size(); ++i) {
2673 if (!allocations_[i]->IsEmpty()) { 2750 if (!allocations_[i]->IsEmpty()) {
2674 data()->MarkAllocated(mode(), static_cast<int>(i)); 2751 data()->MarkAllocated(mode(), static_cast<int>(i));
2675 } 2752 }
2676 } 2753 }
2677 } 2754 }
2678 2755
2679 2756
2680 bool GreedyAllocator::AllocateBlockedRange( 2757 LifetimePosition GreedyAllocator::GetSplittablePos(LifetimePosition pos) {
2681 LiveRange* current, const ZoneSet<LiveRange*>& conflicts) { 2758 auto ret = pos.PrevStart().End();
2682 auto register_use = current->NextRegisterPosition(current->Start()); 2759 if (IsBlockBoundary(code(), pos.Start())) {
2683 if (register_use == nullptr) { 2760 ret = pos.Start();
2684 // There is no use in the current live range that requires a register. 2761 }
2685 // We can just spill it. 2762 DCHECK(ret <= pos);
2686 Spill(current); 2763 return ret;
2687 return true; 2764 }
2765
2766 LifetimePosition GreedyAllocator::FindProgressingSplitPosition(
2767 LiveRange* range, bool* is_spill_pos) {
2768 auto start = range->Start();
2769 auto end = range->End();
2770
2771 UsePosition* next_reg_use = range->first_pos();
2772 while (next_reg_use != nullptr &&
2773 next_reg_use->type() != UsePositionType::kRequiresRegister) {
2774 next_reg_use = next_reg_use->next();
2688 } 2775 }
2689 2776
2690 auto second_part = SplitRangeAt(current, register_use->pos()); 2777 if (next_reg_use == nullptr) {
2691 Spill(second_part); 2778 *is_spill_pos = true;
2779 auto ret = FindOptimalSpillingPos(range, start);
2780 DCHECK(ret.IsValid());
2781 return ret;
2782 }
2692 2783
2693 return true; 2784 *is_spill_pos = false;
2785 auto reg_pos = next_reg_use->pos();
2786 auto correct_pos = GetSplittablePos(reg_pos);
2787 if (start < correct_pos && correct_pos < end) {
2788 return correct_pos;
2789 }
2790
2791 if (correct_pos >= end) {
2792 return LifetimePosition::Invalid();
2793 }
2794
2795 // Correct_pos must be at or before start. Find the next use position.
2796 next_reg_use = next_reg_use->next();
2797 auto reference = reg_pos;
2798 while (next_reg_use != nullptr) {
2799 auto pos = next_reg_use->pos();
2800 // Skip over tight successive uses.
2801 if (reference.NextStart() < pos) {
2802 break;
2803 }
2804 reference = pos;
2805 next_reg_use = next_reg_use->next();
2806 }
2807
2808 if (next_reg_use == nullptr) {
2809 // While there may not be another use, we may still have space in the range
2810 // to clip off.
2811 correct_pos = reference.NextStart();
2812 if (start < correct_pos && correct_pos < end) {
2813 return correct_pos;
2814 }
2815 return LifetimePosition::Invalid();
2816 }
2817
2818 correct_pos = GetSplittablePos(next_reg_use->pos());
2819 if (start < correct_pos && correct_pos < end) {
2820 DCHECK(reference < correct_pos);
2821 return correct_pos;
2822 }
2823 return LifetimePosition::Invalid();
2694 } 2824 }
2695 2825
2696 2826
2827 void GreedyAllocator::AllocateBlockedRange(LiveRange* current,
2828 LifetimePosition pos, bool spill) {
2829 auto tail = SplitRangeAt(current, pos);
2830 if (spill) {
2831 Spill(tail);
2832 } else {
2833 Enqueue(tail);
2834 }
2835 if (tail != current) {
2836 Enqueue(current);
2837 }
2838 }
2839
2840
2697 SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data) 2841 SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data)
2698 : data_(data) {} 2842 : data_(data) {}
2699 2843
2700 2844
2701 void SpillSlotLocator::LocateSpillSlots() { 2845 void SpillSlotLocator::LocateSpillSlots() {
2702 auto code = data()->code(); 2846 auto code = data()->code();
2703 for (auto range : data()->live_ranges()) { 2847 for (auto range : data()->live_ranges()) {
2704 if (range == nullptr || range->IsEmpty() || range->IsChild()) continue; 2848 if (range == nullptr || range->IsEmpty() || range->IsChild()) continue;
2705 // We care only about ranges which spill in the frame. 2849 // We care only about ranges which spill in the frame.
2706 if (!range->HasSpillRange()) continue; 2850 if (!range->HasSpillRange()) continue;
2707 auto spills = range->spills_at_definition(); 2851 auto spills = range->spills_at_definition();
2708 DCHECK_NOT_NULL(spills); 2852 DCHECK_NOT_NULL(spills);
2709 for (; spills != nullptr; spills = spills->next) { 2853 for (; spills != nullptr; spills = spills->next) {
2710 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame(); 2854 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
2711 } 2855 }
2712 } 2856 }
2713 } 2857 }
2714 2858
2715 2859
2860 bool GreedyAllocator::TryReuseSpillForPhi(LiveRange* range) {
2861 if (range->IsChild() || !range->is_phi()) return false;
2862 DCHECK(!range->HasSpillOperand());
2863
2864 auto phi_map_value = data()->GetPhiMapValueFor(range->id());
2865 auto phi = phi_map_value->phi();
2866 auto block = phi_map_value->block();
2867 // Count the number of spilled operands.
2868 size_t spilled_count = 0;
2869 LiveRange* first_op = nullptr;
2870 for (size_t i = 0; i < phi->operands().size(); i++) {
2871 int op = phi->operands()[i];
2872 LiveRange* op_range = LiveRangeFor(op);
2873 if (!op_range->HasSpillRange()) continue;
2874 auto pred = code()->InstructionBlockAt(block->predecessors()[i]);
2875 auto pred_end = LifetimePosition::InstructionFromInstructionIndex(
2876 pred->last_instruction_index());
2877 while (op_range != nullptr && !op_range->CanCover(pred_end)) {
2878 op_range = op_range->next();
2879 }
2880 if (op_range != nullptr && op_range->spilled()) {
2881 spilled_count++;
2882 if (first_op == nullptr) {
2883 first_op = op_range->TopLevel();
2884 }
2885 }
2886 }
2887
2888 // Only continue if more than half of the operands are spilled.
2889 if (spilled_count * 2 <= phi->operands().size()) {
2890 return false;
2891 }
2892
2893 // Try to merge the spilled operands and count the number of merged spilled
2894 // operands.
2895 DCHECK(first_op != nullptr);
2896 auto first_op_spill = first_op->GetSpillRange();
2897 size_t num_merged = 1;
2898 for (size_t i = 1; i < phi->operands().size(); i++) {
2899 int op = phi->operands()[i];
2900 auto op_range = LiveRangeFor(op);
2901 if (!op_range->HasSpillRange()) continue;
2902 auto op_spill = op_range->GetSpillRange();
2903 if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) {
2904 num_merged++;
2905 }
2906 }
2907
2908 // Only continue if enough operands could be merged to the
2909 // same spill slot.
2910 if (num_merged * 2 <= phi->operands().size() ||
2911 AreUseIntervalsIntersecting(first_op_spill->interval(),
2912 range->first_interval())) {
2913 return false;
2914 }
2915
2916 // If the range does not need register soon, spill it to the merged
2917 // spill range.
2918 auto next_pos = range->Start();
2919 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
2920 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
2921 if (pos == nullptr) {
2922 auto spill_range =
2923 range->TopLevel()->HasSpillRange()
2924 ? range->TopLevel()->GetSpillRange()
2925 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
2926 bool merged = first_op_spill->TryMerge(spill_range);
2927 CHECK(merged);
2928 Spill(range);
2929 return true;
2930 } else if (pos->pos() > range->Start().NextStart()) {
2931 auto spill_range =
2932 range->TopLevel()->HasSpillRange()
2933 ? range->TopLevel()->GetSpillRange()
2934 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
2935 bool merged = first_op_spill->TryMerge(spill_range);
2936 CHECK(merged);
2937 Enqueue(
2938 SpillBetweenUntil(range, range->Start(), range->Start(), pos->pos()));
2939 return true;
2940 }
2941 return false;
2942 }
2943
2944
2716 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {} 2945 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
2717 2946
2718 2947
2719 void OperandAssigner::AssignSpillSlots() { 2948 void OperandAssigner::AssignSpillSlots() {
2720 auto& spill_ranges = data()->spill_ranges(); 2949 auto& spill_ranges = data()->spill_ranges();
2721 // Merge disjoint spill ranges 2950 // Merge disjoint spill ranges
2722 for (size_t i = 0; i < spill_ranges.size(); i++) { 2951 for (size_t i = 0; i < spill_ranges.size(); i++) {
2723 auto range = spill_ranges[i]; 2952 auto range = spill_ranges[i];
2724 if (range->IsEmpty()) continue; 2953 if (range->IsEmpty()) continue;
2725 for (size_t j = i + 1; j < spill_ranges.size(); j++) { 2954 for (size_t j = i + 1; j < spill_ranges.size(); j++) {
(...skipping 22 matching lines...) Expand all
2748 spill_operand = *range->TopLevel()->GetSpillOperand(); 2977 spill_operand = *range->TopLevel()->GetSpillOperand();
2749 } else if (range->TopLevel()->HasSpillRange()) { 2978 } else if (range->TopLevel()->HasSpillRange()) {
2750 spill_operand = range->TopLevel()->GetSpillRangeOperand(); 2979 spill_operand = range->TopLevel()->GetSpillRangeOperand();
2751 } 2980 }
2752 auto assigned = range->GetAssignedOperand(); 2981 auto assigned = range->GetAssignedOperand();
2753 range->ConvertUsesToOperand(assigned, spill_operand); 2982 range->ConvertUsesToOperand(assigned, spill_operand);
2754 if (range->is_phi()) { 2983 if (range->is_phi()) {
2755 data()->GetPhiMapValueFor(range->id())->CommitAssignment(assigned); 2984 data()->GetPhiMapValueFor(range->id())->CommitAssignment(assigned);
2756 } 2985 }
2757 if (!range->IsChild() && !spill_operand.IsInvalid()) { 2986 if (!range->IsChild() && !spill_operand.IsInvalid()) {
2758 range->CommitSpillsAtDefinition(data()->code(), spill_operand, 2987 range->CommitSpillsAtDefinition(
2759 range->has_slot_use()); 2988 data()->code(), spill_operand,
2989 range->has_slot_use() || range->spilled());
2760 } 2990 }
2761 } 2991 }
2762 } 2992 }
2763 2993
2764 2994
2765 ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data) 2995 ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data)
2766 : data_(data) {} 2996 : data_(data) {}
2767 2997
2768 2998
2769 bool ReferenceMapPopulator::SafePointsAreInOrder() const { 2999 bool ReferenceMapPopulator::SafePointsAreInOrder() const {
(...skipping 387 matching lines...) Expand 10 before | Expand all | Expand 10 after
3157 auto eliminate = moves->PrepareInsertAfter(move); 3387 auto eliminate = moves->PrepareInsertAfter(move);
3158 to_insert.push_back(move); 3388 to_insert.push_back(move);
3159 if (eliminate != nullptr) to_eliminate.push_back(eliminate); 3389 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
3160 } 3390 }
3161 } 3391 }
3162 3392
3163 3393
3164 } // namespace compiler 3394 } // namespace compiler
3165 } // namespace internal 3395 } // namespace internal
3166 } // namespace v8 3396 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698