Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(222)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 2347563004: [turbofan] Avoid large deopt blocks (Closed)
Patch Set: refactoring Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/base/adapters.h" 5 #include "src/base/adapters.h"
6 #include "src/compiler/linkage.h" 6 #include "src/compiler/linkage.h"
7 #include "src/compiler/register-allocator.h" 7 #include "src/compiler/register-allocator.h"
8 #include "src/string-stream.h" 8 #include "src/string-stream.h"
9 9
10 namespace v8 { 10 namespace v8 {
(...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after
335 DCHECK(op.IsStackSlot() || op.IsFPStackSlot()); 335 DCHECK(op.IsStackSlot() || op.IsFPStackSlot());
336 return UsePositionHintType::kNone; 336 return UsePositionHintType::kNone;
337 } 337 }
338 case InstructionOperand::INVALID: 338 case InstructionOperand::INVALID:
339 break; 339 break;
340 } 340 }
341 UNREACHABLE(); 341 UNREACHABLE();
342 return UsePositionHintType::kNone; 342 return UsePositionHintType::kNone;
343 } 343 }
344 344
345 void UsePosition::SetHint(UsePosition* use_pos) {
346 DCHECK_NOT_NULL(use_pos);
347 hint_ = use_pos;
348 flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
349 }
345 350
346 void UsePosition::ResolveHint(UsePosition* use_pos) { 351 void UsePosition::ResolveHint(UsePosition* use_pos) {
347 DCHECK_NOT_NULL(use_pos); 352 DCHECK_NOT_NULL(use_pos);
348 if (HintTypeField::decode(flags_) != UsePositionHintType::kUnresolved) return; 353 if (HintTypeField::decode(flags_) != UsePositionHintType::kUnresolved) return;
349 hint_ = use_pos; 354 hint_ = use_pos;
350 flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos); 355 flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
351 } 356 }
352 357
353 358
354 void UsePosition::set_type(UsePositionType type, bool register_beneficial) { 359 void UsePosition::set_type(UsePositionType type, bool register_beneficial) {
(...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after
574 : current_interval_->start(); 579 : current_interval_->start();
575 if (to_start_of->start() > start) { 580 if (to_start_of->start() > start) {
576 current_interval_ = to_start_of; 581 current_interval_ = to_start_of;
577 } 582 }
578 } 583 }
579 584
580 585
581 LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) { 586 LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) {
582 int new_id = TopLevel()->GetNextChildId(); 587 int new_id = TopLevel()->GetNextChildId();
583 LiveRange* child = new (zone) LiveRange(new_id, representation(), TopLevel()); 588 LiveRange* child = new (zone) LiveRange(new_id, representation(), TopLevel());
584 DetachAt(position, child, zone); 589 // If we split, we do so because we're about to switch registers or move
590 // to/from a slot, so there's no value in connecting hints.
591 DetachAt(position, child, zone, DoNotConnectHints);
585 592
586 child->top_level_ = TopLevel(); 593 child->top_level_ = TopLevel();
587 child->next_ = next_; 594 child->next_ = next_;
588 next_ = child; 595 next_ = child;
589 return child; 596 return child;
590 } 597 }
591 598
592
593 UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result, 599 UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result,
594 Zone* zone) { 600 Zone* zone,
601 HintConnectionOption connect_hints) {
595 DCHECK(Start() < position); 602 DCHECK(Start() < position);
596 DCHECK(End() > position); 603 DCHECK(End() > position);
597 DCHECK(result->IsEmpty()); 604 DCHECK(result->IsEmpty());
598 // Find the last interval that ends before the position. If the 605 // Find the last interval that ends before the position. If the
599 // position is contained in one of the intervals in the chain, we 606 // position is contained in one of the intervals in the chain, we
600 // split that interval and use the first part. 607 // split that interval and use the first part.
601 UseInterval* current = FirstSearchIntervalForPosition(position); 608 UseInterval* current = FirstSearchIntervalForPosition(position);
602 609
603 // If the split position coincides with the beginning of a use interval 610 // If the split position coincides with the beginning of a use interval
604 // we need to split use positons in a special way. 611 // we need to split use positons in a special way.
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
663 } else { 670 } else {
664 first_pos_ = nullptr; 671 first_pos_ = nullptr;
665 } 672 }
666 result->first_pos_ = use_after; 673 result->first_pos_ = use_after;
667 674
668 // Discard cached iteration state. It might be pointing 675 // Discard cached iteration state. It might be pointing
669 // to the use that no longer belongs to this live range. 676 // to the use that no longer belongs to this live range.
670 last_processed_use_ = nullptr; 677 last_processed_use_ = nullptr;
671 current_interval_ = nullptr; 678 current_interval_ = nullptr;
672 679
680 if (connect_hints == ConnectHints && use_before != nullptr &&
681 use_after != nullptr) {
682 use_after->SetHint(use_before);
683 }
673 #ifdef DEBUG 684 #ifdef DEBUG
674 VerifyChildStructure(); 685 VerifyChildStructure();
675 result->VerifyChildStructure(); 686 result->VerifyChildStructure();
676 #endif 687 #endif
677 return use_before; 688 return use_before;
678 } 689 }
679 690
680 691
681 void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) { 692 void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) {
682 LiveRange* child = this; 693 LiveRange* child = this;
(...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after
905 916
906 TopLevelLiveRange splinter_temp(-1, representation()); 917 TopLevelLiveRange splinter_temp(-1, representation());
907 UsePosition* last_in_splinter = nullptr; 918 UsePosition* last_in_splinter = nullptr;
908 // Live ranges defined in deferred blocks stay in deferred blocks, so we 919 // Live ranges defined in deferred blocks stay in deferred blocks, so we
909 // don't need to splinter them. That means that start should always be 920 // don't need to splinter them. That means that start should always be
910 // after the beginning of the range. 921 // after the beginning of the range.
911 DCHECK(start > Start()); 922 DCHECK(start > Start());
912 923
913 if (end >= End()) { 924 if (end >= End()) {
914 DCHECK(start > Start()); 925 DCHECK(start > Start());
915 DetachAt(start, &splinter_temp, zone); 926 DetachAt(start, &splinter_temp, zone, ConnectHints);
916 next_ = nullptr; 927 next_ = nullptr;
917 } else { 928 } else {
918 DCHECK(start < End() && Start() < end); 929 DCHECK(start < End() && Start() < end);
919 930
920 const int kInvalidId = std::numeric_limits<int>::max(); 931 const int kInvalidId = std::numeric_limits<int>::max();
921 932
922 UsePosition* last = DetachAt(start, &splinter_temp, zone); 933 UsePosition* last = DetachAt(start, &splinter_temp, zone, ConnectHints);
923 934
924 LiveRange end_part(kInvalidId, this->representation(), nullptr); 935 LiveRange end_part(kInvalidId, this->representation(), nullptr);
925 last_in_splinter = splinter_temp.DetachAt(end, &end_part, zone); 936 // The last chunk exits the deferred region, and we don't want to connect
937 // hints here, because the non-deferred region shouldn't be affected
938 // by allocation decisions on the deferred path.
939 last_in_splinter =
940 splinter_temp.DetachAt(end, &end_part, zone, DoNotConnectHints);
926 941
927 next_ = end_part.next_; 942 next_ = end_part.next_;
928 last_interval_->set_next(end_part.first_interval_); 943 last_interval_->set_next(end_part.first_interval_);
929 // The next splinter will happen either at or after the current interval. 944 // The next splinter will happen either at or after the current interval.
930 // We can optimize DetachAt by setting current_interval_ accordingly, 945 // We can optimize DetachAt by setting current_interval_ accordingly,
931 // which will then be picked up by FirstSearchIntervalForPosition. 946 // which will then be picked up by FirstSearchIntervalForPosition.
932 current_interval_ = last_interval_; 947 current_interval_ = last_interval_;
933 last_interval_ = end_part.last_interval_; 948 last_interval_ = end_part.last_interval_;
934 949
935 if (first_pos_ == nullptr) { 950 if (first_pos_ == nullptr) {
(...skipping 1458 matching lines...) Expand 10 before | Expand all | Expand 10 after
2394 (range->HasSpillRange() && !range->has_slot_use())) { 2409 (range->HasSpillRange() && !range->has_slot_use())) {
2395 continue; 2410 continue;
2396 } 2411 }
2397 LifetimePosition start = range->Start(); 2412 LifetimePosition start = range->Start();
2398 TRACE("Live range %d:%d is defined by a spill operand.\n", 2413 TRACE("Live range %d:%d is defined by a spill operand.\n",
2399 range->TopLevel()->vreg(), range->relative_id()); 2414 range->TopLevel()->vreg(), range->relative_id());
2400 LifetimePosition next_pos = start; 2415 LifetimePosition next_pos = start;
2401 if (next_pos.IsGapPosition()) { 2416 if (next_pos.IsGapPosition()) {
2402 next_pos = next_pos.NextStart(); 2417 next_pos = next_pos.NextStart();
2403 } 2418 }
2404 UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos); 2419
2420 // With splinters, we can be more strict and skip over positions
2421 // not strictly needing registers.
2422 UsePosition* pos =
2423 range->IsSplinter()
2424 ? range->NextRegisterPosition(next_pos)
2425 : range->NextUsePositionRegisterIsBeneficial(next_pos);
2405 // If the range already has a spill operand and it doesn't need a 2426 // If the range already has a spill operand and it doesn't need a
2406 // register immediately, split it and spill the first part of the range. 2427 // register immediately, split it and spill the first part of the range.
2407 if (pos == nullptr) { 2428 if (pos == nullptr) {
2408 Spill(range); 2429 Spill(range);
2409 } else if (pos->pos() > range->Start().NextStart()) { 2430 } else if (pos->pos() > range->Start().NextStart()) {
2410 // Do not spill live range eagerly if use position that can benefit from 2431 // Do not spill live range eagerly if use position that can benefit from
2411 // the register is too close to the start of live range. 2432 // the register is too close to the start of live range.
2412 LifetimePosition split_pos = GetSplitPositionForInstruction( 2433 LifetimePosition split_pos = GetSplitPositionForInstruction(
2413 range, pos->pos().ToInstructionIndex()); 2434 range, pos->pos().ToInstructionIndex());
2414 // There is no place to split, so we can't split and spill. 2435 // There is no place to split, so we can't split and spill.
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after
2627 InactiveToHandled(cur_inactive); 2648 InactiveToHandled(cur_inactive);
2628 --i; // Live range was removed from the list of inactive live ranges. 2649 --i; // Live range was removed from the list of inactive live ranges.
2629 } else if (cur_inactive->Covers(position)) { 2650 } else if (cur_inactive->Covers(position)) {
2630 InactiveToActive(cur_inactive); 2651 InactiveToActive(cur_inactive);
2631 --i; // Live range was removed from the list of inactive live ranges. 2652 --i; // Live range was removed from the list of inactive live ranges.
2632 } 2653 }
2633 } 2654 }
2634 2655
2635 DCHECK(!current->HasRegisterAssigned() && !current->spilled()); 2656 DCHECK(!current->HasRegisterAssigned() && !current->spilled());
2636 2657
2637 bool result = TryAllocateFreeReg(current); 2658 ProcessCurrentRange(current);
2638 if (!result) AllocateBlockedReg(current);
2639 if (current->HasRegisterAssigned()) {
2640 AddToActive(current);
2641 }
2642 } 2659 }
2643 } 2660 }
2644 2661
2662 bool LinearScanAllocator::TrySplitAndSpillSplinter(LiveRange* range) {
2663 DCHECK(range->TopLevel()->IsSplinter());
2664 // If we can spill the whole range, great. Otherwise, split above the
2665 // first use needing a register and spill the top part.
2666 const UsePosition* next_reg = range->NextRegisterPosition(range->Start());
2667 if (next_reg == nullptr) {
2668 Spill(range);
2669 return true;
2670 } else if (next_reg->pos().PrevStart() > range->Start()) {
2671 LiveRange* tail = SplitRangeAt(range, next_reg->pos().PrevStart());
2672 AddToUnhandledSorted(tail);
2673 Spill(range);
2674 return true;
2675 }
2676 return false;
2677 }
2645 2678
2646 void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range, 2679 void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
2647 int reg) { 2680 int reg) {
2648 data()->MarkAllocated(range->representation(), reg); 2681 data()->MarkAllocated(range->representation(), reg);
2649 range->set_assigned_register(reg); 2682 range->set_assigned_register(reg);
2650 range->SetUseHints(reg); 2683 range->SetUseHints(reg);
2651 if (range->IsTopLevel() && range->TopLevel()->is_phi()) { 2684 if (range->IsTopLevel() && range->TopLevel()->is_phi()) {
2652 data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg); 2685 data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg);
2653 } 2686 }
2654 } 2687 }
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
2750 } 2783 }
2751 2784
2752 2785
2753 void LinearScanAllocator::InactiveToActive(LiveRange* range) { 2786 void LinearScanAllocator::InactiveToActive(LiveRange* range) {
2754 RemoveElement(&inactive_live_ranges(), range); 2787 RemoveElement(&inactive_live_ranges(), range);
2755 active_live_ranges().push_back(range); 2788 active_live_ranges().push_back(range);
2756 TRACE("Moving live range %d:%d from inactive to active\n", 2789 TRACE("Moving live range %d:%d from inactive to active\n",
2757 range->TopLevel()->vreg(), range->relative_id()); 2790 range->TopLevel()->vreg(), range->relative_id());
2758 } 2791 }
2759 2792
2793 void LinearScanAllocator::FindFreeRegistersForRange(
2794 LiveRange* range, Vector<LifetimePosition> positions) {
2795 int num_regs = num_registers();
2796 DCHECK_GE(positions.length(), num_regs);
2760 2797
2761 bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) {
2762 int num_regs = num_registers();
2763 int num_codes = num_allocatable_registers();
2764 const int* codes = allocatable_register_codes();
2765
2766 LifetimePosition free_until_pos[RegisterConfiguration::kMaxFPRegisters];
2767 for (int i = 0; i < num_regs; i++) { 2798 for (int i = 0; i < num_regs; i++) {
2768 free_until_pos[i] = LifetimePosition::MaxPosition(); 2799 positions[i] = LifetimePosition::MaxPosition();
2769 } 2800 }
2770 2801
2771 for (LiveRange* cur_active : active_live_ranges()) { 2802 for (LiveRange* cur_active : active_live_ranges()) {
2772 int cur_reg = cur_active->assigned_register(); 2803 int cur_reg = cur_active->assigned_register();
2773 free_until_pos[cur_reg] = LifetimePosition::GapFromInstructionIndex(0); 2804 positions[cur_reg] = LifetimePosition::GapFromInstructionIndex(0);
2774 TRACE("Register %s is free until pos %d (1)\n", RegisterName(cur_reg), 2805 TRACE("Register %s is free until pos %d (1)\n", RegisterName(cur_reg),
2775 LifetimePosition::GapFromInstructionIndex(0).value()); 2806 LifetimePosition::GapFromInstructionIndex(0).value());
2776 } 2807 }
2777 2808
2778 for (LiveRange* cur_inactive : inactive_live_ranges()) { 2809 for (LiveRange* cur_inactive : inactive_live_ranges()) {
2779 DCHECK(cur_inactive->End() > current->Start()); 2810 DCHECK(cur_inactive->End() > range->Start());
2780 LifetimePosition next_intersection = 2811 LifetimePosition next_intersection = cur_inactive->FirstIntersection(range);
2781 cur_inactive->FirstIntersection(current);
2782 if (!next_intersection.IsValid()) continue; 2812 if (!next_intersection.IsValid()) continue;
2783 int cur_reg = cur_inactive->assigned_register(); 2813 int cur_reg = cur_inactive->assigned_register();
2784 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); 2814 positions[cur_reg] = Min(positions[cur_reg], next_intersection);
2785 TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg), 2815 TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg),
2786 Min(free_until_pos[cur_reg], next_intersection).value()); 2816 Min(positions[cur_reg], next_intersection).value());
2787 } 2817 }
2818 }
2788 2819
2820 void LinearScanAllocator::ProcessCurrentRange(LiveRange* current) {
2821 LifetimePosition free_until_pos_buff[RegisterConfiguration::kMaxFPRegisters];
2822 Vector<LifetimePosition> free_until_pos(
2823 free_until_pos_buff, RegisterConfiguration::kMaxFPRegisters);
2824 FindFreeRegistersForRange(current, free_until_pos);
2825 if (!TryAllocatePreferredReg(current, free_until_pos)) {
2826 if (current->TopLevel()->IsSplinter()) {
2827 // Splinters have hints inserted when they are created. The hints
2828 // are the operands on the hot path carrying the value into the
2829 // deferred blocks region.
2830 // For splinters, we don't want to insist in finding a register - unless
2831 // we need a register right at start.
2832 // The goal is to reduce the number of moves, so if the hint isn't
Jarin 2016/10/07 11:10:53 I still think this is super confusing because you
Mircea Trofin 2016/10/11 03:36:49 Rewrote, separately analyzing each hint kind.
2833 // met, trying to find another register would result in at least
2834 // as many moves as spilling (or more, if other ranges need to
2835 // elbow their way in).
2836 // Tell the caller a free register wasn't allocated, and let the
2837 // caller handle the splinter. We will spill/split aggressively.
2838 // A more in depth description may be found in the Turbofan
2839 // Register Allocator design doc (see v8 wiki).
2840 if (TrySplitAndSpillSplinter(current)) return;
2841 }
2842 if (!TryAllocateFreeReg(current, free_until_pos)) {
2843 AllocateBlockedReg(current);
2844 }
2845 }
2846 if (current->HasRegisterAssigned()) {
2847 AddToActive(current);
2848 }
2849 }
2850
2851 bool LinearScanAllocator::TryAllocatePreferredReg(
2852 LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
2789 int hint_register; 2853 int hint_register;
2790 if (current->FirstHintPosition(&hint_register) != nullptr) { 2854 if (current->FirstHintPosition(&hint_register) != nullptr) {
2791 TRACE( 2855 TRACE(
2792 "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n", 2856 "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n",
2793 RegisterName(hint_register), free_until_pos[hint_register].value(), 2857 RegisterName(hint_register), free_until_pos[hint_register].value(),
2794 current->TopLevel()->vreg(), current->relative_id(), 2858 current->TopLevel()->vreg(), current->relative_id(),
2795 current->End().value()); 2859 current->End().value());
2796 2860
2797 // The desired register is free until the end of the current live range. 2861 // The desired register is free until the end of the current live range.
2798 if (free_until_pos[hint_register] >= current->End()) { 2862 if (free_until_pos[hint_register] >= current->End()) {
2799 TRACE("Assigning preferred reg %s to live range %d:%d\n", 2863 TRACE("Assigning preferred reg %s to live range %d:%d\n",
2800 RegisterName(hint_register), current->TopLevel()->vreg(), 2864 RegisterName(hint_register), current->TopLevel()->vreg(),
2801 current->relative_id()); 2865 current->relative_id());
2802 SetLiveRangeAssignedRegister(current, hint_register); 2866 SetLiveRangeAssignedRegister(current, hint_register);
2803 return true; 2867 return true;
2804 } 2868 }
2805 } 2869 }
2870 return false;
2871 }
2872
2873 bool LinearScanAllocator::TryAllocateFreeReg(
2874 LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
2875 int num_codes = num_allocatable_registers();
2876 const int* codes = allocatable_register_codes();
2877 DCHECK_GE(free_until_pos.length(), num_codes);
2806 2878
2807 // Find the register which stays free for the longest time. 2879 // Find the register which stays free for the longest time.
2808 int reg = codes[0]; 2880 int reg = codes[0];
2809 for (int i = 1; i < num_codes; ++i) { 2881 for (int i = 1; i < num_codes; ++i) {
2810 int code = codes[i]; 2882 int code = codes[i];
2811 if (free_until_pos[code] > free_until_pos[reg]) { 2883 if (free_until_pos[code] > free_until_pos[reg]) {
2812 reg = code; 2884 reg = code;
2813 } 2885 }
2814 } 2886 }
2815 2887
(...skipping 14 matching lines...) Expand all
2830 // Register reg is available at the range start and is free until the range 2902 // Register reg is available at the range start and is free until the range
2831 // end. 2903 // end.
2832 DCHECK(pos >= current->End()); 2904 DCHECK(pos >= current->End());
2833 TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg), 2905 TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg),
2834 current->TopLevel()->vreg(), current->relative_id()); 2906 current->TopLevel()->vreg(), current->relative_id());
2835 SetLiveRangeAssignedRegister(current, reg); 2907 SetLiveRangeAssignedRegister(current, reg);
2836 2908
2837 return true; 2909 return true;
2838 } 2910 }
2839 2911
2840
2841 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) { 2912 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
2842 UsePosition* register_use = current->NextRegisterPosition(current->Start()); 2913 UsePosition* register_use = current->NextRegisterPosition(current->Start());
2843 if (register_use == nullptr) { 2914 if (register_use == nullptr) {
2844 // There is no use in the current live range that requires a register. 2915 // There is no use in the current live range that requires a register.
2845 // We can just spill it. 2916 // We can just spill it.
2846 Spill(current); 2917 Spill(current);
2847 return; 2918 return;
2848 } 2919 }
2849 2920
2850 int num_regs = num_registers(); 2921 int num_regs = num_registers();
(...skipping 769 matching lines...) Expand 10 before | Expand all | Expand 10 after
3620 } 3691 }
3621 } 3692 }
3622 } 3693 }
3623 } 3694 }
3624 } 3695 }
3625 3696
3626 3697
3627 } // namespace compiler 3698 } // namespace compiler
3628 } // namespace internal 3699 } // namespace internal
3629 } // namespace v8 3700 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698