Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 2347563004: [turbofan] Avoid large deopt blocks (Closed)
Patch Set: [turbofan] Avoid large deopt blocks Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/base/adapters.h" 5 #include "src/base/adapters.h"
6 #include "src/compiler/linkage.h" 6 #include "src/compiler/linkage.h"
7 #include "src/compiler/register-allocator.h" 7 #include "src/compiler/register-allocator.h"
8 #include "src/string-stream.h" 8 #include "src/string-stream.h"
9 9
10 namespace v8 { 10 namespace v8 {
(...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after
335 DCHECK(op.IsStackSlot() || op.IsFPStackSlot()); 335 DCHECK(op.IsStackSlot() || op.IsFPStackSlot());
336 return UsePositionHintType::kNone; 336 return UsePositionHintType::kNone;
337 } 337 }
338 case InstructionOperand::INVALID: 338 case InstructionOperand::INVALID:
339 break; 339 break;
340 } 340 }
341 UNREACHABLE(); 341 UNREACHABLE();
342 return UsePositionHintType::kNone; 342 return UsePositionHintType::kNone;
343 } 343 }
344 344
345 void UsePosition::SetHint(UsePosition* use_pos) {
346 DCHECK_NOT_NULL(use_pos);
347 hint_ = use_pos;
348 flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
349 }
345 350
346 void UsePosition::ResolveHint(UsePosition* use_pos) { 351 void UsePosition::ResolveHint(UsePosition* use_pos) {
347 DCHECK_NOT_NULL(use_pos); 352 DCHECK_NOT_NULL(use_pos);
348 if (HintTypeField::decode(flags_) != UsePositionHintType::kUnresolved) return; 353 if (HintTypeField::decode(flags_) != UsePositionHintType::kUnresolved) return;
349 hint_ = use_pos; 354 hint_ = use_pos;
350 flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos); 355 flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
351 } 356 }
352 357
353 358
354 void UsePosition::set_type(UsePositionType type, bool register_beneficial) { 359 void UsePosition::set_type(UsePositionType type, bool register_beneficial) {
(...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after
574 : current_interval_->start(); 579 : current_interval_->start();
575 if (to_start_of->start() > start) { 580 if (to_start_of->start() > start) {
576 current_interval_ = to_start_of; 581 current_interval_ = to_start_of;
577 } 582 }
578 } 583 }
579 584
580 585
581 LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) { 586 LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) {
582 int new_id = TopLevel()->GetNextChildId(); 587 int new_id = TopLevel()->GetNextChildId();
583 LiveRange* child = new (zone) LiveRange(new_id, representation(), TopLevel()); 588 LiveRange* child = new (zone) LiveRange(new_id, representation(), TopLevel());
584 DetachAt(position, child, zone); 589 // If we split, we do so because we're about to switch registers or move
590 // to/from a slot, so there's no value in connecting hints.
591 DetachAt(position, child, zone, DoNotConnectHints);
585 592
586 child->top_level_ = TopLevel(); 593 child->top_level_ = TopLevel();
587 child->next_ = next_; 594 child->next_ = next_;
588 next_ = child; 595 next_ = child;
589 return child; 596 return child;
590 } 597 }
591 598
592
593 UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result, 599 UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result,
594 Zone* zone) { 600 Zone* zone,
601 HintConnectionOption connect_hints) {
595 DCHECK(Start() < position); 602 DCHECK(Start() < position);
596 DCHECK(End() > position); 603 DCHECK(End() > position);
597 DCHECK(result->IsEmpty()); 604 DCHECK(result->IsEmpty());
598 // Find the last interval that ends before the position. If the 605 // Find the last interval that ends before the position. If the
599 // position is contained in one of the intervals in the chain, we 606 // position is contained in one of the intervals in the chain, we
600 // split that interval and use the first part. 607 // split that interval and use the first part.
601 UseInterval* current = FirstSearchIntervalForPosition(position); 608 UseInterval* current = FirstSearchIntervalForPosition(position);
602 609
603 // If the split position coincides with the beginning of a use interval 610 // If the split position coincides with the beginning of a use interval
604 // we need to split use positons in a special way. 611 // we need to split use positons in a special way.
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
663 } else { 670 } else {
664 first_pos_ = nullptr; 671 first_pos_ = nullptr;
665 } 672 }
666 result->first_pos_ = use_after; 673 result->first_pos_ = use_after;
667 674
668 // Discard cached iteration state. It might be pointing 675 // Discard cached iteration state. It might be pointing
669 // to the use that no longer belongs to this live range. 676 // to the use that no longer belongs to this live range.
670 last_processed_use_ = nullptr; 677 last_processed_use_ = nullptr;
671 current_interval_ = nullptr; 678 current_interval_ = nullptr;
672 679
680 if (connect_hints == ConnectHints && use_before != nullptr &&
681 use_after != nullptr) {
682 use_after->SetHint(use_before);
683 }
673 #ifdef DEBUG 684 #ifdef DEBUG
674 VerifyChildStructure(); 685 VerifyChildStructure();
675 result->VerifyChildStructure(); 686 result->VerifyChildStructure();
676 #endif 687 #endif
677 return use_before; 688 return use_before;
678 } 689 }
679 690
680 691
681 void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) { 692 void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) {
682 LiveRange* child = this; 693 LiveRange* child = this;
(...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after
905 916
906 TopLevelLiveRange splinter_temp(-1, representation()); 917 TopLevelLiveRange splinter_temp(-1, representation());
907 UsePosition* last_in_splinter = nullptr; 918 UsePosition* last_in_splinter = nullptr;
908 // Live ranges defined in deferred blocks stay in deferred blocks, so we 919 // Live ranges defined in deferred blocks stay in deferred blocks, so we
909 // don't need to splinter them. That means that start should always be 920 // don't need to splinter them. That means that start should always be
910 // after the beginning of the range. 921 // after the beginning of the range.
911 DCHECK(start > Start()); 922 DCHECK(start > Start());
912 923
913 if (end >= End()) { 924 if (end >= End()) {
914 DCHECK(start > Start()); 925 DCHECK(start > Start());
915 DetachAt(start, &splinter_temp, zone); 926 DetachAt(start, &splinter_temp, zone, ConnectHints);
916 next_ = nullptr; 927 next_ = nullptr;
917 } else { 928 } else {
918 DCHECK(start < End() && Start() < end); 929 DCHECK(start < End() && Start() < end);
919 930
920 const int kInvalidId = std::numeric_limits<int>::max(); 931 const int kInvalidId = std::numeric_limits<int>::max();
921 932
922 UsePosition* last = DetachAt(start, &splinter_temp, zone); 933 UsePosition* last = DetachAt(start, &splinter_temp, zone, ConnectHints);
923 934
924 LiveRange end_part(kInvalidId, this->representation(), nullptr); 935 LiveRange end_part(kInvalidId, this->representation(), nullptr);
925 last_in_splinter = splinter_temp.DetachAt(end, &end_part, zone); 936 // The last chunk exits the deferred region, and we don't want to connect
937 // hints here, because the non-deferred region shouldn't be affected
938 // by allocation decisions on the deferred path.
939 last_in_splinter =
940 splinter_temp.DetachAt(end, &end_part, zone, DoNotConnectHints);
926 941
927 next_ = end_part.next_; 942 next_ = end_part.next_;
928 last_interval_->set_next(end_part.first_interval_); 943 last_interval_->set_next(end_part.first_interval_);
929 // The next splinter will happen either at or after the current interval. 944 // The next splinter will happen either at or after the current interval.
930 // We can optimize DetachAt by setting current_interval_ accordingly, 945 // We can optimize DetachAt by setting current_interval_ accordingly,
931 // which will then be picked up by FirstSearchIntervalForPosition. 946 // which will then be picked up by FirstSearchIntervalForPosition.
932 current_interval_ = last_interval_; 947 current_interval_ = last_interval_;
933 last_interval_ = end_part.last_interval_; 948 last_interval_ = end_part.last_interval_;
934 949
935 if (first_pos_ == nullptr) { 950 if (first_pos_ == nullptr) {
(...skipping 1458 matching lines...) Expand 10 before | Expand all | Expand 10 after
2394 (range->HasSpillRange() && !range->has_slot_use())) { 2409 (range->HasSpillRange() && !range->has_slot_use())) {
2395 continue; 2410 continue;
2396 } 2411 }
2397 LifetimePosition start = range->Start(); 2412 LifetimePosition start = range->Start();
2398 TRACE("Live range %d:%d is defined by a spill operand.\n", 2413 TRACE("Live range %d:%d is defined by a spill operand.\n",
2399 range->TopLevel()->vreg(), range->relative_id()); 2414 range->TopLevel()->vreg(), range->relative_id());
2400 LifetimePosition next_pos = start; 2415 LifetimePosition next_pos = start;
2401 if (next_pos.IsGapPosition()) { 2416 if (next_pos.IsGapPosition()) {
2402 next_pos = next_pos.NextStart(); 2417 next_pos = next_pos.NextStart();
2403 } 2418 }
2404 UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos); 2419
2420 // With splinters, we can be more strict and skip over positions
2421 // not strictly needing registers.
2422 UsePosition* pos =
2423 range->IsSplinter()
2424 ? range->NextRegisterPosition(next_pos)
2425 : range->NextUsePositionRegisterIsBeneficial(next_pos);
2405 // If the range already has a spill operand and it doesn't need a 2426 // If the range already has a spill operand and it doesn't need a
2406 // register immediately, split it and spill the first part of the range. 2427 // register immediately, split it and spill the first part of the range.
2407 if (pos == nullptr) { 2428 if (pos == nullptr) {
2408 Spill(range); 2429 Spill(range);
2409 } else if (pos->pos() > range->Start().NextStart()) { 2430 } else if (pos->pos() > range->Start().NextStart()) {
2410 // Do not spill live range eagerly if use position that can benefit from 2431 // Do not spill live range eagerly if use position that can benefit from
2411 // the register is too close to the start of live range. 2432 // the register is too close to the start of live range.
2412 LifetimePosition split_pos = GetSplitPositionForInstruction( 2433 LifetimePosition split_pos = GetSplitPositionForInstruction(
2413 range, pos->pos().ToInstructionIndex()); 2434 range, pos->pos().ToInstructionIndex());
2414 // There is no place to split, so we can't split and spill. 2435 // There is no place to split, so we can't split and spill.
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after
2627 InactiveToHandled(cur_inactive); 2648 InactiveToHandled(cur_inactive);
2628 --i; // Live range was removed from the list of inactive live ranges. 2649 --i; // Live range was removed from the list of inactive live ranges.
2629 } else if (cur_inactive->Covers(position)) { 2650 } else if (cur_inactive->Covers(position)) {
2630 InactiveToActive(cur_inactive); 2651 InactiveToActive(cur_inactive);
2631 --i; // Live range was removed from the list of inactive live ranges. 2652 --i; // Live range was removed from the list of inactive live ranges.
2632 } 2653 }
2633 } 2654 }
2634 2655
2635 DCHECK(!current->HasRegisterAssigned() && !current->spilled()); 2656 DCHECK(!current->HasRegisterAssigned() && !current->spilled());
2636 2657
2637 bool result = TryAllocateFreeReg(current); 2658 ProcessCurrentRange(current);
2638 if (!result) AllocateBlockedReg(current);
2639 if (current->HasRegisterAssigned()) {
2640 AddToActive(current);
2641 }
2642 } 2659 }
2643 } 2660 }
2644 2661
2662 bool LinearScanAllocator::TrySplitAndSpillSplinter(LiveRange* range) {
2663 DCHECK(range->TopLevel()->IsSplinter());
2664 // If there was no hint, apply regular (hot path) heuristics.
2665 if (range->FirstHintPosition() == nullptr) return false;
2666 // If we can spill the whole range, great. Otherwise, split above the
2667 // first use needing a register and spill the top part.
2668 const UsePosition* next_reg = range->NextRegisterPosition(range->Start());
2669 if (next_reg == nullptr) {
2670 Spill(range);
2671 return true;
2672 } else if (next_reg->pos().PrevStart() > range->Start()) {
2673 LiveRange* tail = SplitRangeAt(range, next_reg->pos().PrevStart());
2674 AddToUnhandledSorted(tail);
2675 Spill(range);
2676 return true;
2677 }
2678 return false;
2679 }
2645 2680
2646 void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range, 2681 void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
2647 int reg) { 2682 int reg) {
2648 data()->MarkAllocated(range->representation(), reg); 2683 data()->MarkAllocated(range->representation(), reg);
2649 range->set_assigned_register(reg); 2684 range->set_assigned_register(reg);
2650 range->SetUseHints(reg); 2685 range->SetUseHints(reg);
2651 if (range->IsTopLevel() && range->TopLevel()->is_phi()) { 2686 if (range->IsTopLevel() && range->TopLevel()->is_phi()) {
2652 data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg); 2687 data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg);
2653 } 2688 }
2654 } 2689 }
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
2750 } 2785 }
2751 2786
2752 2787
2753 void LinearScanAllocator::InactiveToActive(LiveRange* range) { 2788 void LinearScanAllocator::InactiveToActive(LiveRange* range) {
2754 RemoveElement(&inactive_live_ranges(), range); 2789 RemoveElement(&inactive_live_ranges(), range);
2755 active_live_ranges().push_back(range); 2790 active_live_ranges().push_back(range);
2756 TRACE("Moving live range %d:%d from inactive to active\n", 2791 TRACE("Moving live range %d:%d from inactive to active\n",
2757 range->TopLevel()->vreg(), range->relative_id()); 2792 range->TopLevel()->vreg(), range->relative_id());
2758 } 2793 }
2759 2794
2795 void LinearScanAllocator::FindFreeRegistersForRange(
2796 LiveRange* range, Vector<LifetimePosition> positions) {
2797 int num_regs = num_registers();
2798 DCHECK_GE(positions.length(), num_regs);
2760 2799
2761 bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) {
2762 int num_regs = num_registers();
2763 int num_codes = num_allocatable_registers();
2764 const int* codes = allocatable_register_codes();
2765
2766 LifetimePosition free_until_pos[RegisterConfiguration::kMaxFPRegisters];
2767 for (int i = 0; i < num_regs; i++) { 2800 for (int i = 0; i < num_regs; i++) {
2768 free_until_pos[i] = LifetimePosition::MaxPosition(); 2801 positions[i] = LifetimePosition::MaxPosition();
2769 } 2802 }
2770 2803
2771 for (LiveRange* cur_active : active_live_ranges()) { 2804 for (LiveRange* cur_active : active_live_ranges()) {
2772 int cur_reg = cur_active->assigned_register(); 2805 int cur_reg = cur_active->assigned_register();
2773 free_until_pos[cur_reg] = LifetimePosition::GapFromInstructionIndex(0); 2806 positions[cur_reg] = LifetimePosition::GapFromInstructionIndex(0);
2774 TRACE("Register %s is free until pos %d (1)\n", RegisterName(cur_reg), 2807 TRACE("Register %s is free until pos %d (1)\n", RegisterName(cur_reg),
2775 LifetimePosition::GapFromInstructionIndex(0).value()); 2808 LifetimePosition::GapFromInstructionIndex(0).value());
2776 } 2809 }
2777 2810
2778 for (LiveRange* cur_inactive : inactive_live_ranges()) { 2811 for (LiveRange* cur_inactive : inactive_live_ranges()) {
2779 DCHECK(cur_inactive->End() > current->Start()); 2812 DCHECK(cur_inactive->End() > range->Start());
2780 LifetimePosition next_intersection = 2813 LifetimePosition next_intersection = cur_inactive->FirstIntersection(range);
2781 cur_inactive->FirstIntersection(current);
2782 if (!next_intersection.IsValid()) continue; 2814 if (!next_intersection.IsValid()) continue;
2783 int cur_reg = cur_inactive->assigned_register(); 2815 int cur_reg = cur_inactive->assigned_register();
2784 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); 2816 positions[cur_reg] = Min(positions[cur_reg], next_intersection);
2785 TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg), 2817 TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg),
2786 Min(free_until_pos[cur_reg], next_intersection).value()); 2818 Min(positions[cur_reg], next_intersection).value());
2787 } 2819 }
2820 }
2788 2821
2822 // High-level register allocation summary:
2823 //
2824 // For regular, or hot (i.e. not splinter) ranges, we attempt to first
2825 // allocate first the preferred (hint) register. If that is not possible,
2826 // we find a register that's free, and allocate that. If that's not possible,
2827 // we search for a register to steal from a range that was allocated. The
2828 // goal is to optimize for throughput by avoiding register-to-memory
2829 // moves, which are expensive.
2830 //
2831 // For splinters, the goal is to minimize the number of moves. First we try
2832 // to allocate the preferred register (more discussion follows). Failing that,
2833 // we bail out and spill as far as we can, unless the first use is at start,
2834 // case in which we apply the same behavior as we do for regular ranges.
2835 // If there is no hint, we apply the hot-path behavior.
2836 //
2837 // For the splinter, the hint register may come from:
2838 //
2839 // - the hot path (we set it at splintering time with SetHint). In this case, if
2840 // we cannot offer the hint register, spilling is better because it's at most
2841 // 1 move, while trying to find and offer another register is at least 1 move.
2842 //
2843 // - a constraint. If we cannot offer that register, it's because there is some
2844 // interference. So offering the hint register up to the interference would
2845 // result
2846 // in a move at the interference, plus a move to satisfy the constraint. This is
2847 // also the number of moves if we spill, with the potential of the range being
2848 // already spilled and thus saving a move (the spill).
2849 // Note that this can only be an input constraint, if it were an output one,
2850 // the range wouldn't be a splinter because it means it'd be defined in a
2851 // deferred
2852 // block, and we don't mark those as splinters (they live in deferred blocks
2853 // only).
2854 //
2855 // - a phi. The same analysis as in the case of the input constraint applies.
2856 //
2857 void LinearScanAllocator::ProcessCurrentRange(LiveRange* current) {
2858 LifetimePosition free_until_pos_buff[RegisterConfiguration::kMaxFPRegisters];
2859 Vector<LifetimePosition> free_until_pos(
2860 free_until_pos_buff, RegisterConfiguration::kMaxFPRegisters);
2861 FindFreeRegistersForRange(current, free_until_pos);
2862 if (!TryAllocatePreferredReg(current, free_until_pos)) {
2863 if (current->TopLevel()->IsSplinter()) {
2864 if (TrySplitAndSpillSplinter(current)) return;
2865 }
2866 if (!TryAllocateFreeReg(current, free_until_pos)) {
2867 AllocateBlockedReg(current);
2868 }
2869 }
2870 if (current->HasRegisterAssigned()) {
2871 AddToActive(current);
2872 }
2873 }
2874
2875 bool LinearScanAllocator::TryAllocatePreferredReg(
2876 LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
2789 int hint_register; 2877 int hint_register;
2790 if (current->FirstHintPosition(&hint_register) != nullptr) { 2878 if (current->FirstHintPosition(&hint_register) != nullptr) {
2791 TRACE( 2879 TRACE(
2792 "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n", 2880 "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n",
2793 RegisterName(hint_register), free_until_pos[hint_register].value(), 2881 RegisterName(hint_register), free_until_pos[hint_register].value(),
2794 current->TopLevel()->vreg(), current->relative_id(), 2882 current->TopLevel()->vreg(), current->relative_id(),
2795 current->End().value()); 2883 current->End().value());
2796 2884
2797 // The desired register is free until the end of the current live range. 2885 // The desired register is free until the end of the current live range.
2798 if (free_until_pos[hint_register] >= current->End()) { 2886 if (free_until_pos[hint_register] >= current->End()) {
2799 TRACE("Assigning preferred reg %s to live range %d:%d\n", 2887 TRACE("Assigning preferred reg %s to live range %d:%d\n",
2800 RegisterName(hint_register), current->TopLevel()->vreg(), 2888 RegisterName(hint_register), current->TopLevel()->vreg(),
2801 current->relative_id()); 2889 current->relative_id());
2802 SetLiveRangeAssignedRegister(current, hint_register); 2890 SetLiveRangeAssignedRegister(current, hint_register);
2803 return true; 2891 return true;
2804 } 2892 }
2805 } 2893 }
2894 return false;
2895 }
2896
2897 bool LinearScanAllocator::TryAllocateFreeReg(
2898 LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
2899 int num_codes = num_allocatable_registers();
2900 const int* codes = allocatable_register_codes();
2901 DCHECK_GE(free_until_pos.length(), num_codes);
2806 2902
2807 // Find the register which stays free for the longest time. 2903 // Find the register which stays free for the longest time.
2808 int reg = codes[0]; 2904 int reg = codes[0];
2809 for (int i = 1; i < num_codes; ++i) { 2905 for (int i = 1; i < num_codes; ++i) {
2810 int code = codes[i]; 2906 int code = codes[i];
2811 if (free_until_pos[code] > free_until_pos[reg]) { 2907 if (free_until_pos[code] > free_until_pos[reg]) {
2812 reg = code; 2908 reg = code;
2813 } 2909 }
2814 } 2910 }
2815 2911
(...skipping 14 matching lines...) Expand all
2830 // Register reg is available at the range start and is free until the range 2926 // Register reg is available at the range start and is free until the range
2831 // end. 2927 // end.
2832 DCHECK(pos >= current->End()); 2928 DCHECK(pos >= current->End());
2833 TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg), 2929 TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg),
2834 current->TopLevel()->vreg(), current->relative_id()); 2930 current->TopLevel()->vreg(), current->relative_id());
2835 SetLiveRangeAssignedRegister(current, reg); 2931 SetLiveRangeAssignedRegister(current, reg);
2836 2932
2837 return true; 2933 return true;
2838 } 2934 }
2839 2935
2840
2841 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) { 2936 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
2842 UsePosition* register_use = current->NextRegisterPosition(current->Start()); 2937 UsePosition* register_use = current->NextRegisterPosition(current->Start());
2843 if (register_use == nullptr) { 2938 if (register_use == nullptr) {
2844 // There is no use in the current live range that requires a register. 2939 // There is no use in the current live range that requires a register.
2845 // We can just spill it. 2940 // We can just spill it.
2846 Spill(current); 2941 Spill(current);
2847 return; 2942 return;
2848 } 2943 }
2849 2944
2850 int num_regs = num_registers(); 2945 int num_regs = num_registers();
(...skipping 769 matching lines...) Expand 10 before | Expand all | Expand 10 after
3620 } 3715 }
3621 } 3716 }
3622 } 3717 }
3623 } 3718 }
3624 } 3719 }
3625 3720
3626 3721
3627 } // namespace compiler 3722 } // namespace compiler
3628 } // namespace internal 3723 } // namespace internal
3629 } // namespace v8 3724 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698