Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(224)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 1105793002: Greedy reg alloc - better splitting (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/base/adapters.h" 5 #include "src/base/adapters.h"
6 #include "src/compiler/linkage.h" 6 #include "src/compiler/linkage.h"
7 #include "src/compiler/register-allocator.h" 7 #include "src/compiler/register-allocator.h"
8 #include "src/string-stream.h" 8 #include "src/string-stream.h"
9 9
10 namespace v8 { 10 namespace v8 {
(...skipping 871 matching lines...) Expand 10 before | Expand all | Expand 10 after
882 } 882 }
883 auto result = live_ranges()[index]; 883 auto result = live_ranges()[index];
884 if (result == nullptr) { 884 if (result == nullptr) {
885 result = NewLiveRange(index); 885 result = NewLiveRange(index);
886 live_ranges()[index] = result; 886 live_ranges()[index] = result;
887 } 887 }
888 return result; 888 return result;
889 } 889 }
890 890
891 891
892 #ifdef DEBUG
893 void RegisterAllocationData::Print(LiveRange* range) {
894 std::cout << "Range: " << range->id();
895 if (range->is_phi()) std::cout << "phi ";
896 if (range->is_non_loop_phi()) std::cout << "nlphi ";
897
898 std::cout << "{" << std::endl;
899 auto interval = range->first_interval();
900 auto use_pos = range->first_pos();
901 PrintableInstructionOperand pio;
902 pio.register_configuration_ = config();
903 while (use_pos != nullptr) {
904 pio.op_ = *use_pos->operand();
905 std::cout << use_pos->pos() << " " << pio << " ";
906 use_pos = use_pos->next();
907 }
908 std::cout << std::endl;
909
910 while (interval != nullptr) {
911 std::cout << '[' << interval->start() << ", " << interval->end() << ')'
912 << std::endl;
913 interval = interval->next();
914 }
915 std::cout << "}" << std::endl;
916 }
917
918
919 void RegisterAllocationData::Print(ZoneSet<LiveRange*>& conflicts) {
920 for (LiveRange* range : conflicts) {
921 std::cout << "Range: " << range->id() << std::endl;
922 Print(range);
923 std::cout << std::endl;
924 }
925 }
926
927
928 void RegisterAllocationData::Print(InstructionSequence* code) {
929 PrintableInstructionSequence prt = {config_, code};
930 std::cout << prt << std::endl;
931 }
932
933
934 void RegisterAllocationData::Print(Instruction* instr) {
935 PrintableInstruction prt = {config_, instr};
936 std::cout << prt << std::endl;
937 }
938
939
940 void RegisterAllocationData::Print(InstructionSequence* code,
941 InstructionBlock* block) {
942 for (int i = block->first_instruction_index();
943 i < block->last_instruction_index(); i++) {
944 PrintableInstruction prt = {config_, code->InstructionAt(i)};
945 std::cout << prt << std::endl;
946 }
947 }
948
949
950 void RegisterAllocationData::PrintOperators(LiveRange* range) {
951 PrintableInstructionOperand pio;
952 pio.register_configuration_ = config_;
953 auto use = range->first_pos();
954 while (use != nullptr) {
955 auto op = use->operand();
956 pio.op_ = *op;
957 std::cout << pio;
958 use = use->next();
959 if (use != nullptr) {
960 std::cout << "; ";
961 }
962 }
963 std::cout << std::endl;
964 }
965
966
967 void RegisterAllocationData::Print(LifetimePosition pos) {
968 std::cout << pos << std::endl;
969 }
970 #endif
971
972
892 MoveOperands* RegisterAllocationData::AddGapMove( 973 MoveOperands* RegisterAllocationData::AddGapMove(
893 int index, Instruction::GapPosition position, 974 int index, Instruction::GapPosition position,
894 const InstructionOperand& from, const InstructionOperand& to) { 975 const InstructionOperand& from, const InstructionOperand& to) {
895 auto instr = code()->InstructionAt(index); 976 auto instr = code()->InstructionAt(index);
896 auto moves = instr->GetOrCreateParallelMove(position, code_zone()); 977 auto moves = instr->GetOrCreateParallelMove(position, code_zone());
897 return moves->AddMove(from, to); 978 return moves->AddMove(from, to);
898 } 979 }
899 980
900 981
901 LiveRange* RegisterAllocationData::NewLiveRange(int index) { 982 LiveRange* RegisterAllocationData::NewLiveRange(int index) {
(...skipping 1490 matching lines...) Expand 10 before | Expand all | Expand 10 after
2392 } 2473 }
2393 range->set_assigned_register(reg_id); 2474 range->set_assigned_register(reg_id);
2394 range->SetUseHints(reg_id); 2475 range->SetUseHints(reg_id);
2395 if (range->is_phi()) { 2476 if (range->is_phi()) {
2396 data()->GetPhiMapValueFor(range->id())->set_assigned_register(reg_id); 2477 data()->GetPhiMapValueFor(range->id())->set_assigned_register(reg_id);
2397 } 2478 }
2398 } 2479 }
2399 2480
2400 2481
2401 float GreedyAllocator::CalculateSpillWeight(LiveRange* range) { 2482 float GreedyAllocator::CalculateSpillWeight(LiveRange* range) {
2483 InstructionOperand* first_hint = nullptr;
2484 if (range->FirstHintPosition() != nullptr) {
2485 first_hint = range->FirstHintPosition()->operand();
2486 }
2487
2402 if (range->IsFixed()) return std::numeric_limits<float>::max(); 2488 if (range->IsFixed()) return std::numeric_limits<float>::max();
2489 bool spill;
2490 if (!FindProgressingSplitPosition(range, &spill).IsValid())
2491 return std::numeric_limits<float>::max();
2403 2492
2404 int hint_register; 2493 float multiplier = 1.0;
2405 if (range->FirstHintPosition(&hint_register) != nullptr) { 2494 if (first_hint != nullptr && first_hint->IsRegister()) {
2406 return std::numeric_limits<float>::max(); 2495 multiplier = 3.0;
2407 } 2496 }
2408 2497
2409 unsigned use_count = 0; 2498 unsigned use_count = 0;
2410 auto* pos = range->first_pos(); 2499 auto* pos = range->first_pos();
2411 while (pos != nullptr) { 2500 while (pos != nullptr) {
2412 use_count++; 2501 use_count++;
2413 pos = pos->next(); 2502 pos = pos->next();
2414 } 2503 }
2415 2504
2416 // GetLiveRangeSize is DCHECK-ed to not be 0
2417 unsigned range_size = GetLiveRangeSize(range); 2505 unsigned range_size = GetLiveRangeSize(range);
2418 DCHECK_NE(0U, range_size); 2506 DCHECK_NE(0U, range_size);
2419 2507
2420 return static_cast<float>(use_count) / static_cast<float>(range_size); 2508 return multiplier * static_cast<float>(use_count) /
2509 static_cast<float>(range_size);
2421 } 2510 }
2422 2511
2423 2512
2424 float GreedyAllocator::CalculateMaxSpillWeight( 2513 float GreedyAllocator::CalculateMaxSpillWeight(
2425 const ZoneSet<LiveRange*>& ranges) { 2514 const ZoneSet<LiveRange*>& ranges) {
2426 float max = 0.0; 2515 float max = 0.0;
2427 for (auto* r : ranges) { 2516 for (auto* r : ranges) {
2428 max = std::max(max, CalculateSpillWeight(r)); 2517 max = std::max(max, CalculateSpillWeight(r));
2429 } 2518 }
2430 return max; 2519 return max;
(...skipping 22 matching lines...) Expand all
2453 conflicting->insert(existing); 2542 conflicting->insert(existing);
2454 } 2543 }
2455 segment = segment->next(); 2544 segment = segment->next();
2456 } 2545 }
2457 if (!conflicting->empty()) return false; 2546 if (!conflicting->empty()) return false;
2458 // No conflicts means we can safely allocate this register to this range. 2547 // No conflicts means we can safely allocate this register to this range.
2459 AssignRangeToRegister(reg_id, range); 2548 AssignRangeToRegister(reg_id, range);
2460 return true; 2549 return true;
2461 } 2550 }
2462 2551
2552
2553 int GreedyAllocator::GetHintedRegister(LiveRange* range) {
2554 int reg;
2555 if (range->FirstHintPosition(&reg) != nullptr) {
2556 return reg;
2557 }
2558 return -1;
2559 }
2560
2561
2463 bool GreedyAllocator::TryAllocate(LiveRange* current, 2562 bool GreedyAllocator::TryAllocate(LiveRange* current,
2464 ZoneSet<LiveRange*>* conflicting) { 2563 ZoneSet<LiveRange*>* conflicting) {
2465 if (current->HasSpillOperand()) {
2466 Spill(current);
2467 return true;
2468 }
2469 if (current->IsFixed()) { 2564 if (current->IsFixed()) {
2470 return TryAllocatePhysicalRegister(current->assigned_register(), current, 2565 return TryAllocatePhysicalRegister(current->assigned_register(), current,
2471 conflicting); 2566 conflicting);
2472 } 2567 }
2473 2568
2474 if (current->HasRegisterAssigned()) { 2569 int hinted_reg_id = GetHintedRegister(current);
2475 int reg_id = current->assigned_register(); 2570 if (hinted_reg_id >= 0) {
2476 return TryAllocatePhysicalRegister(reg_id, current, conflicting); 2571 if (TryAllocatePhysicalRegister(hinted_reg_id, current, conflicting)) {
2572 return true;
2573 }
2477 } 2574 }
2478 2575
2576 ZoneSet<LiveRange*> local_conflicts(data()->allocation_zone());
dcarney 2015/04/29 18:18:35 the only things that should be allocated in the al
Mircea Trofin 2015/04/30 18:16:49 Done.
2479 for (unsigned candidate_reg = 0; candidate_reg < allocations_.size(); 2577 for (unsigned candidate_reg = 0; candidate_reg < allocations_.size();
2480 candidate_reg++) { 2578 candidate_reg++) {
2481 if (TryAllocatePhysicalRegister(candidate_reg, current, conflicting)) { 2579 if (hinted_reg_id >= 0 &&
2580 candidate_reg == static_cast<size_t>(hinted_reg_id))
2581 continue;
2582 local_conflicts.clear();
2583 if (TryAllocatePhysicalRegister(candidate_reg, current, &local_conflicts)) {
2482 conflicting->clear(); 2584 conflicting->clear();
2483 return true; 2585 return true;
2586 } else {
2587 conflicting->insert(local_conflicts.begin(), local_conflicts.end());
2484 } 2588 }
2485 } 2589 }
2486 return false; 2590 return false;
2487 } 2591 }
2488 2592
2593
2489 LiveRange* GreedyAllocator::SpillBetweenUntil(LiveRange* range, 2594 LiveRange* GreedyAllocator::SpillBetweenUntil(LiveRange* range,
2490 LifetimePosition start, 2595 LifetimePosition start,
2491 LifetimePosition until, 2596 LifetimePosition until,
2492 LifetimePosition end) { 2597 LifetimePosition end) {
2493 CHECK(start < end); 2598 CHECK(start < end);
2494 auto second_part = SplitRangeAt(range, start); 2599 auto second_part = SplitRangeAt(range, start);
2495 2600
2496 if (second_part->Start() < end) { 2601 if (second_part->Start() < end) {
2497 // The split result intersects with [start, end[. 2602 // The split result intersects with [start, end[.
2498 // Split it at position between ]start+1, end[, spill the middle part 2603 // Split it at position between ]start+1, end[, spill the middle part
(...skipping 13 matching lines...) Expand all
2512 // The split result does not intersect with [start, end[. 2617 // The split result does not intersect with [start, end[.
2513 // Nothing to spill. Just return it for re-processing. 2618 // Nothing to spill. Just return it for re-processing.
2514 return second_part; 2619 return second_part;
2515 } 2620 }
2516 } 2621 }
2517 2622
2518 2623
2519 void GreedyAllocator::Enqueue(LiveRange* range) { 2624 void GreedyAllocator::Enqueue(LiveRange* range) {
2520 if (range == nullptr || range->IsEmpty()) return; 2625 if (range == nullptr || range->IsEmpty()) return;
2521 unsigned size = GetLiveRangeSize(range); 2626 unsigned size = GetLiveRangeSize(range);
2627 TRACE("Enqueuing range %d\n", range->id());
2522 queue_.push(std::make_pair(size, range)); 2628 queue_.push(std::make_pair(size, range));
2523 } 2629 }
2524 2630
2525 2631
2526 // TODO(mtrofin): consolidate with identical code segment in
2527 // LinearScanAllocator::AllocateRegisters
2528 bool GreedyAllocator::HandleSpillOperands(LiveRange* range) { 2632 bool GreedyAllocator::HandleSpillOperands(LiveRange* range) {
2529 auto position = range->Start(); 2633 auto position = range->Start();
2530 TRACE("Processing interval %d start=%d\n", range->id(), position.value()); 2634 TRACE("Processing interval %d start=%d\n", range->id(), position.value());
2531 2635
2532 if (!range->HasNoSpillType()) { 2636 if (!range->HasNoSpillType()) {
2533 TRACE("Live range %d already has a spill operand\n", range->id()); 2637 TRACE("Live range %d already has a spill operand\n", range->id());
2534 auto next_pos = position; 2638 auto next_pos = position;
2535 if (next_pos.IsGapPosition()) { 2639 if (next_pos.IsGapPosition()) {
2536 next_pos = next_pos.NextStart(); 2640 next_pos = next_pos.NextStart();
2537 } 2641 }
2538 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); 2642 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
2539 // If the range already has a spill operand and it doesn't need a 2643 // If the range already has a spill operand and it doesn't need a
2540 // register immediately, split it and spill the first part of the range. 2644 // register immediately, split it and spill the first part of the range.
2541 if (pos == nullptr) { 2645 if (pos == nullptr) {
2542 Spill(range); 2646 Spill(range);
2543 return true; 2647 return true;
2544 } else if (pos->pos() > range->Start().NextStart()) { 2648 } else if (pos->pos() > range->Start().NextStart()) {
2545 // Do not spill live range eagerly if use position that can benefit from 2649 // Do not spill live range eagerly if use position that can benefit from
2546 // the register is too close to the start of live range. 2650 // the register is too close to the start of live range.
2547 auto* reminder = SpillBetweenUntil(range, position, position, pos->pos()); 2651 auto* reminder = SpillBetweenUntil(range, position, position, pos->pos());
2548 Enqueue(reminder); 2652 Enqueue(reminder);
2549 return true; 2653 return true;
2550 } 2654 }
2551 } 2655 }
2552 return false; 2656 return (TryReuseSpillForPhi(range));
2553 // TODO(mtrofin): Do we need this?
2554 // return (TryReuseSpillForPhi(range));
2555 } 2657 }
2556 2658
2557 2659
2558 void GreedyAllocator::AllocateRegisters() { 2660 void GreedyAllocator::AllocateRegisters() {
2559 for (auto range : data()->live_ranges()) { 2661 for (auto range : data()->live_ranges()) {
2560 if (range == nullptr) continue; 2662 if (range == nullptr) continue;
2561 if (range->kind() == mode()) { 2663 if (range->kind() == mode()) {
2562 DCHECK(!range->HasRegisterAssigned() && !range->spilled()); 2664 DCHECK(!range->HasRegisterAssigned() && !range->spilled());
2563 TRACE("Enqueueing live range %d to priority queue \n", range->id()); 2665 TRACE("Enqueueing live range %d to priority queue \n", range->id());
2564 Enqueue(range); 2666 Enqueue(range);
2565 } 2667 }
2566 } 2668 }
2567 2669
2568 allocations_.resize(num_registers()); 2670 allocations_.resize(num_registers());
2569 auto* zone = data()->allocation_zone(); 2671 auto* zone = allocations_.get_allocator().zone();
dcarney 2015/04/29 18:18:35 use local_zone() accessor here as well
Mircea Trofin 2015/04/30 18:16:49 Done.
2570 for (int i = 0; i < num_registers(); i++) { 2672 for (int i = 0; i < num_registers(); i++) {
2571 allocations_[i] = new (zone) CoallescedLiveRanges(zone); 2673 allocations_[i] = new (zone) CoallescedLiveRanges(zone);
2572 } 2674 }
2573 2675
2574 for (auto* current : GetFixedRegisters(data(), mode())) { 2676 for (auto* current : GetFixedRegisters(data(), mode())) {
2575 if (current != nullptr) { 2677 if (current != nullptr) {
2576 DCHECK_EQ(mode(), current->kind()); 2678 DCHECK_EQ(mode(), current->kind());
2577 int reg_nr = current->assigned_register(); 2679 int reg_nr = current->assigned_register();
2578 bool inserted = allocations_[reg_nr]->Insert(current); 2680 bool inserted = allocations_[reg_nr]->Insert(current);
2579 CHECK(inserted); 2681 CHECK(inserted);
2580 } 2682 }
2581 } 2683 }
2582 2684
2583 while (!queue_.empty()) { 2685 while (!queue_.empty()) {
2584 auto current_pair = queue_.top(); 2686 auto current_pair = queue_.top();
2585 queue_.pop(); 2687 queue_.pop();
2586 auto current = current_pair.second; 2688 auto current = current_pair.second;
2587 if (HandleSpillOperands(current)) continue; 2689 if (HandleSpillOperands(current)) {
2690 continue;
2691 }
2692 bool spill = false;
2588 ZoneSet<LiveRange*> conflicting(zone); 2693 ZoneSet<LiveRange*> conflicting(zone);
2589 if (!TryAllocate(current, &conflicting)) { 2694 if (!TryAllocate(current, &conflicting)) {
2590 DCHECK(!conflicting.empty()); 2695 DCHECK(!conflicting.empty());
2591 float this_max = CalculateSpillWeight(current); 2696 float this_weight = std::numeric_limits<float>::max();
2592 float max_conflicting = CalculateMaxSpillWeight(conflicting); 2697 LifetimePosition split_pos =
2593 if (max_conflicting < this_max) { 2698 FindProgressingSplitPosition(current, &spill);
2594 for (auto* conflict : conflicting) { 2699 if (split_pos.IsValid()) {
2700 this_weight = CalculateSpillWeight(current);
2701 }
2702
2703 bool evicted = false;
2704 for (auto* conflict : conflicting) {
2705 if (CalculateSpillWeight(conflict) < this_weight) {
2595 Evict(conflict); 2706 Evict(conflict);
2596 Enqueue(conflict); 2707 Enqueue(conflict);
2708 evicted = true;
2597 } 2709 }
2710 }
2711 if (evicted) {
2598 conflicting.clear(); 2712 conflicting.clear();
2599 bool allocated = TryAllocate(current, &conflicting); 2713 TryAllocate(current, &conflicting);
2600 CHECK(allocated); 2714 }
2601 DCHECK(conflicting.empty()); 2715 if (!conflicting.empty()) {
2602 } else {
2603 DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start())); 2716 DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start()));
2604 bool allocated = AllocateBlockedRange(current, conflicting); 2717 DCHECK(split_pos.IsValid());
2605 CHECK(allocated); 2718 AllocateBlockedRange(current, split_pos, spill);
2606 } 2719 }
2607 } 2720 }
2608 } 2721 }
2609 2722
2610 for (size_t i = 0; i < allocations_.size(); ++i) { 2723 for (size_t i = 0; i < allocations_.size(); ++i) {
2611 if (!allocations_[i]->IsEmpty()) { 2724 if (!allocations_[i]->IsEmpty()) {
2612 data()->MarkAllocated(mode(), static_cast<int>(i)); 2725 data()->MarkAllocated(mode(), static_cast<int>(i));
2613 } 2726 }
2614 } 2727 }
2615 } 2728 }
2616 2729
2617 2730
2618 bool GreedyAllocator::AllocateBlockedRange( 2731 LifetimePosition GreedyAllocator::FindProgressingSplitPosition(
2619 LiveRange* current, const ZoneSet<LiveRange*>& conflicts) { 2732 LiveRange* range, bool* is_spill_pos) {
2620 auto register_use = current->NextRegisterPosition(current->Start()); 2733 auto start = range->Start();
2621 if (register_use == nullptr) { 2734 auto end = range->End();
2622 // There is no use in the current live range that requires a register. 2735
2623 // We can just spill it. 2736 UsePosition* next_reg_use = range->first_pos();
2624 Spill(current); 2737 while (next_reg_use != nullptr &&
2625 return true; 2738 next_reg_use->type() != UsePositionType::kRequiresRegister) {
2739 next_reg_use = next_reg_use->next();
2626 } 2740 }
2627 2741
2628 auto second_part = SplitRangeAt(current, register_use->pos()); 2742 if (next_reg_use == nullptr) {
2629 Spill(second_part); 2743 *is_spill_pos = true;
2744 auto ret = FindOptimalSpillingPos(range, start);
2745 DCHECK(ret.IsValid());
2746 return ret;
2747 }
2630 2748
2631 return true; 2749 *is_spill_pos = false;
2750 auto ret = next_reg_use->pos();
2751
2752 if (start < ret.Prev()) {
2753 return ret.Prev();
2754 }
2755
2756 if (end == ret && start.Next() < end) {
2757 return end.Prev();
2758 }
2759
2760 auto blocked_pos = start;
2761 while (next_reg_use->next() != nullptr &&
2762 next_reg_use->next()->pos().Prev() <= next_reg_use->pos()) {
2763 next_reg_use = next_reg_use->next();
2764 blocked_pos = next_reg_use->pos();
2765 ret = blocked_pos.Next();
2766 }
2767 DCHECK(next_reg_use != nullptr);
2768
2769 if (ret < end && start < ret) {
2770 return ret;
2771 }
2772 ret = ret.Next();
2773
2774 if (ret == blocked_pos) {
2775 return LifetimePosition::Invalid();
2776 }
2777
2778 if (ret < end && start < ret) {
2779 return ret;
2780 }
2781
2782 return LifetimePosition::Invalid();
2632 } 2783 }
2633 2784
2634 2785
2786 void GreedyAllocator::AllocateBlockedRange(LiveRange* current,
2787 LifetimePosition pos, bool spill) {
2788 auto tail = SplitRangeAt(current, pos);
2789 if (spill) {
2790 Spill(tail);
2791 } else {
2792 Enqueue(tail);
2793 }
2794 if (tail != current) {
2795 Enqueue(current);
2796 }
2797 }
2798
2799
2800 bool GreedyAllocator::TryReuseSpillForPhi(LiveRange* range) {
2801 if (range->IsChild() || !range->is_phi()) return false;
2802 DCHECK(!range->HasSpillOperand());
2803
2804 auto phi_map_value = data()->GetPhiMapValueFor(range->id());
2805 auto phi = phi_map_value->phi();
2806 auto block = phi_map_value->block();
2807 // Count the number of spilled operands.
2808 size_t spilled_count = 0;
2809 LiveRange* first_op = nullptr;
2810 for (size_t i = 0; i < phi->operands().size(); i++) {
2811 int op = phi->operands()[i];
2812 LiveRange* op_range = LiveRangeFor(op);
2813 if (!op_range->HasSpillRange()) continue;
2814 auto pred = code()->InstructionBlockAt(block->predecessors()[i]);
2815 auto pred_end = LifetimePosition::InstructionFromInstructionIndex(
2816 pred->last_instruction_index());
2817 while (op_range != nullptr && !op_range->CanCover(pred_end)) {
2818 op_range = op_range->next();
2819 }
2820 if (op_range != nullptr && op_range->spilled()) {
2821 spilled_count++;
2822 if (first_op == nullptr) {
2823 first_op = op_range->TopLevel();
2824 }
2825 }
2826 }
2827
2828 // Only continue if more than half of the operands are spilled.
2829 if (spilled_count * 2 <= phi->operands().size()) {
2830 return false;
2831 }
2832
2833 // Try to merge the spilled operands and count the number of merged spilled
2834 // operands.
2835 DCHECK(first_op != nullptr);
2836 auto first_op_spill = first_op->GetSpillRange();
2837 size_t num_merged = 1;
2838 for (size_t i = 1; i < phi->operands().size(); i++) {
2839 int op = phi->operands()[i];
2840 auto op_range = LiveRangeFor(op);
2841 if (!op_range->HasSpillRange()) continue;
2842 auto op_spill = op_range->GetSpillRange();
2843 if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) {
2844 num_merged++;
2845 }
2846 }
2847
2848 // Only continue if enough operands could be merged to the
2849 // same spill slot.
2850 if (num_merged * 2 <= phi->operands().size() ||
2851 AreUseIntervalsIntersecting(first_op_spill->interval(),
2852 range->first_interval())) {
2853 return false;
2854 }
2855
2856 // If the range does not need register soon, spill it to the merged
2857 // spill range.
2858 auto next_pos = range->Start();
2859 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
2860 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
2861 if (pos == nullptr) {
2862 auto spill_range =
2863 range->TopLevel()->HasSpillRange()
2864 ? range->TopLevel()->GetSpillRange()
2865 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
2866 bool merged = first_op_spill->TryMerge(spill_range);
2867 CHECK(merged);
2868 Spill(range);
2869 return true;
2870 } else if (pos->pos() > range->Start().NextStart()) {
2871 auto spill_range =
2872 range->TopLevel()->HasSpillRange()
2873 ? range->TopLevel()->GetSpillRange()
2874 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
2875 bool merged = first_op_spill->TryMerge(spill_range);
2876 CHECK(merged);
2877 Enqueue(
2878 SpillBetweenUntil(range, range->Start(), range->Start(), pos->pos()));
2879 return true;
2880 }
2881 return false;
2882 }
2883
2884
2635 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {} 2885 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
2636 2886
2637 2887
2638 void OperandAssigner::AssignSpillSlots() { 2888 void OperandAssigner::AssignSpillSlots() {
2639 auto& spill_ranges = data()->spill_ranges(); 2889 auto& spill_ranges = data()->spill_ranges();
2640 // Merge disjoint spill ranges 2890 // Merge disjoint spill ranges
2641 for (size_t i = 0; i < spill_ranges.size(); i++) { 2891 for (size_t i = 0; i < spill_ranges.size(); i++) {
2642 auto range = spill_ranges[i]; 2892 auto range = spill_ranges[i];
2643 if (range->IsEmpty()) continue; 2893 if (range->IsEmpty()) continue;
2644 for (size_t j = i + 1; j < spill_ranges.size(); j++) { 2894 for (size_t j = i + 1; j < spill_ranges.size(); j++) {
(...skipping 24 matching lines...) Expand all
2669 InstructionOperand* spill_operand = nullptr; 2919 InstructionOperand* spill_operand = nullptr;
2670 if (!range->TopLevel()->HasNoSpillType()) { 2920 if (!range->TopLevel()->HasNoSpillType()) {
2671 spill_operand = range->TopLevel()->GetSpillOperand(); 2921 spill_operand = range->TopLevel()->GetSpillOperand();
2672 } 2922 }
2673 auto assigned = range->GetAssignedOperand(); 2923 auto assigned = range->GetAssignedOperand();
2674 range->ConvertUsesToOperand(assigned, spill_operand); 2924 range->ConvertUsesToOperand(assigned, spill_operand);
2675 if (range->is_phi()) { 2925 if (range->is_phi()) {
2676 data()->GetPhiMapValueFor(range->id())->CommitAssignment(assigned); 2926 data()->GetPhiMapValueFor(range->id())->CommitAssignment(assigned);
2677 } 2927 }
2678 if (!range->IsChild() && spill_operand != nullptr) { 2928 if (!range->IsChild() && spill_operand != nullptr) {
2679 range->CommitSpillsAtDefinition(data()->code(), spill_operand, 2929 range->CommitSpillsAtDefinition(
2680 range->has_slot_use()); 2930 data()->code(), spill_operand,
2931 range->has_slot_use() || range->spilled());
2681 } 2932 }
2682 } 2933 }
2683 } 2934 }
2684 2935
2685 2936
2686 ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data) 2937 ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data)
2687 : data_(data) {} 2938 : data_(data) {}
2688 2939
2689 2940
2690 bool ReferenceMapPopulator::SafePointsAreInOrder() const { 2941 bool ReferenceMapPopulator::SafePointsAreInOrder() const {
(...skipping 352 matching lines...) Expand 10 before | Expand all | Expand 10 after
3043 auto eliminate = moves->PrepareInsertAfter(move); 3294 auto eliminate = moves->PrepareInsertAfter(move);
3044 to_insert.push_back(move); 3295 to_insert.push_back(move);
3045 if (eliminate != nullptr) to_eliminate.push_back(eliminate); 3296 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
3046 } 3297 }
3047 } 3298 }
3048 3299
3049 3300
3050 } // namespace compiler 3301 } // namespace compiler
3051 } // namespace internal 3302 } // namespace internal
3052 } // namespace v8 3303 } // namespace v8
OLDNEW
« src/compiler/register-allocator.h ('K') | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698