Index: src/compiler/register-allocator.h |
diff --git a/src/compiler/register-allocator.h b/src/compiler/register-allocator.h |
index c537bbe7f93352720d659b87dc449843c16b3b27..ff4ddd0c741403528424b3afc66e259f6ff4acae 100644 |
--- a/src/compiler/register-allocator.h |
+++ b/src/compiler/register-allocator.h |
@@ -274,27 +274,26 @@ class UsePosition final : public ZoneObject { |
class SpillRange; |
class RegisterAllocationData; |
+class TopLevelLiveRange; |
// Representation of SSA values' live ranges as a collection of (continuous) |
// intervals over the instruction ordering. |
-class LiveRange final : public ZoneObject { |
+class LiveRange : public ZoneObject { |
public: |
- explicit LiveRange(int id, MachineType machine_type); |
- |
UseInterval* first_interval() const { return first_interval_; } |
UsePosition* first_pos() const { return first_pos_; } |
- LiveRange* parent() const { return parent_; } |
- LiveRange* TopLevel() { return (parent_ == nullptr) ? this : parent_; } |
- const LiveRange* TopLevel() const { |
- return (parent_ == nullptr) ? this : parent_; |
- } |
+ TopLevelLiveRange* TopLevel() { return top_level_; } |
+ const TopLevelLiveRange* TopLevel() const { return top_level_; } |
+ |
+ bool IsTopLevel() const; |
+ |
LiveRange* next() const { return next_; } |
- bool IsChild() const { return parent() != nullptr; } |
- int id() const { return id_; } |
- bool IsFixed() const { return id_ < 0; } |
+ |
+ int relative_id() const { return relative_id_; } |
+ |
bool IsEmpty() const { return first_interval() == nullptr; } |
+ |
InstructionOperand GetAssignedOperand() const; |
- int spill_start_index() const { return spill_start_index_; } |
MachineType machine_type() const { return MachineTypeField::decode(bits_); } |
@@ -310,22 +309,6 @@ class LiveRange final : public ZoneObject { |
RegisterKind kind() const; |
- // Correct only for parent. |
- bool is_phi() const { return IsPhiField::decode(bits_); } |
- void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); } |
- |
- // Correct only for parent. |
- bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); } |
- void set_is_non_loop_phi(bool value) { |
- bits_ = IsNonLoopPhiField::update(bits_, value); |
- } |
- |
- // Relevant only for parent. |
- bool has_slot_use() const { return HasSlotUseField::decode(bits_); } |
- void set_has_slot_use(bool value) { |
- bits_ = HasSlotUseField::update(bits_, value); |
- } |
- |
// Returns use position in this live range that follows both start |
// and last processed use position. |
UsePosition* NextUsePosition(LifetimePosition start) const; |
@@ -350,14 +333,18 @@ class LiveRange final : public ZoneObject { |
// Can this live range be spilled at this position. |
bool CanBeSpilled(LifetimePosition pos) const; |
- // Split this live range at the given position which must follow the start of |
- // the range. |
+ // Splitting primitive used by both splitting and splintering members. |
+ // Performs the split, but does not link the resulting ranges. |
+ // The given position must follow the start of the range. |
// All uses following the given position will be moved from this |
// live range to the result live range. |
- void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone); |
- void Splinter(LifetimePosition start, LifetimePosition end, LiveRange* result, |
- Zone* zone); |
- void Merge(LiveRange* other, RegisterAllocationData* data); |
+ // The current range will terminate at position, while result will start from |
+ // position. |
+ void DetachAt(LifetimePosition position, LiveRange* result, Zone* zone); |
+ |
+ // Detaches at position, and then links the resulting ranges. Returns the |
+ // child, which starts at position. |
+ LiveRange* SplitAt(LifetimePosition position, Zone* zone); |
// Returns nullptr when no register is hinted, otherwise sets register_index. |
UsePosition* FirstHintPosition(int* register_index) const; |
@@ -381,7 +368,117 @@ class LiveRange final : public ZoneObject { |
return last_interval_->end(); |
} |
+ bool ShouldBeAllocatedBefore(const LiveRange* other) const; |
+ bool CanCover(LifetimePosition position) const; |
+ bool Covers(LifetimePosition position) const; |
+ LifetimePosition FirstIntersection(LiveRange* other) const; |
+ |
+ void Verify() const; |
+ |
+ void ConvertUsesToOperand(const InstructionOperand& op, |
+ const InstructionOperand& spill_op); |
+ void SetUseHints(int register_index); |
+ void UnsetUseHints() { SetUseHints(kUnassignedRegister); } |
+ |
+ // Used solely by the Greedy Allocator: |
+ unsigned GetSize(); |
+ float weight() const { return weight_; } |
+ void set_weight(float weight) { weight_ = weight; } |
+ |
+ static const int kInvalidSize = -1; |
+ static const float kInvalidWeight; |
+ static const float kMaxWeight; |
+ |
+ private: |
+ friend class TopLevelLiveRange; |
+ explicit LiveRange(int relative_id, MachineType machine_type, |
+ TopLevelLiveRange* top_level); |
+ |
+ void AppendAsChild(TopLevelLiveRange* other); |
+ void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level); |
+ |
+ void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } |
+ |
+ UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; |
+ void AdvanceLastProcessedMarker(UseInterval* to_start_of, |
+ LifetimePosition but_not_past) const; |
+ |
+ typedef BitField<bool, 0, 1> SpilledField; |
+ typedef BitField<int32_t, 6, 6> AssignedRegisterField; |
+ typedef BitField<MachineType, 12, 15> MachineTypeField; |
+ |
+ int relative_id_; |
+ uint32_t bits_; |
+ UseInterval* last_interval_; |
+ UseInterval* first_interval_; |
+ UsePosition* first_pos_; |
+ TopLevelLiveRange* top_level_; |
+ LiveRange* next_; |
+ // This is used as a cache, it doesn't affect correctness. |
+ mutable UseInterval* current_interval_; |
+ // This is used as a cache, it doesn't affect correctness. |
+ mutable UsePosition* last_processed_use_; |
+ // This is used as a cache, it's invalid outside of BuildLiveRanges. |
+ mutable UsePosition* current_hint_position_; |
+ |
+ // greedy: the number of LifetimePositions covered by this range. Used to |
+ // prioritize selecting live ranges for register assignment, as well as |
+ // in weight calculations. |
+ int size_; |
+ |
+ // greedy: a metric for resolving conflicts between ranges with an assigned |
+ // register and ranges that intersect them and need a register. |
+ float weight_; |
+ |
+ DISALLOW_COPY_AND_ASSIGN(LiveRange); |
+}; |
+ |
+ |
+class TopLevelLiveRange final : public LiveRange { |
+ public: |
+ explicit TopLevelLiveRange(int vreg, MachineType machine_type); |
+ int spill_start_index() const { return spill_start_index_; } |
+ |
+ bool IsFixed() const { return vreg_ < 0; } |
+ |
+ bool is_phi() const { return IsPhiField::decode(bits_); } |
+ void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); } |
+ |
+ bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); } |
+ void set_is_non_loop_phi(bool value) { |
+ bits_ = IsNonLoopPhiField::update(bits_, value); |
+ } |
+ |
+ bool has_slot_use() const { return HasSlotUseField::decode(bits_); } |
+ void set_has_slot_use(bool value) { |
+ bits_ = HasSlotUseField::update(bits_, value); |
+ } |
+ |
+ // Add a new interval or a new use position to this live range. |
+ void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone); |
+ void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone); |
+ void AddUsePosition(UsePosition* pos); |
+ |
+ // Shorten the most recently added interval by setting a new start. |
+ void ShortenTo(LifetimePosition start); |
+ |
+ // Detaches between start and end, and attributes the resulting range to |
+ // result. |
+ // The current range is pointed to as "splintered_from". No parent/child |
+ // relationship is established between this and result. |
+ void Splinter(LifetimePosition start, LifetimePosition end, |
+ TopLevelLiveRange* result, Zone* zone); |
+ |
+ // Assuming other was splintered from this range, embeds other and its |
+ // children as part of the children sequence of this range. |
+ void Merge(TopLevelLiveRange* other, RegisterAllocationData* data); |
+ |
+ // Spill range management. |
+ void SetSpillRange(SpillRange* spill_range); |
enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange }; |
+ void set_spill_type(SpillType value) { |
+ bits_ = SpillTypeField::update(bits_, value); |
+ } |
SpillType spill_type() const { return SpillTypeField::decode(bits_); } |
InstructionOperand* GetSpillOperand() const { |
DCHECK(spill_type() == SpillType::kSpillOperand); |
@@ -404,21 +501,21 @@ class LiveRange final : public ZoneObject { |
return spill_type() == SpillType::kSpillOperand; |
} |
bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; } |
- bool MayRequireSpillRange() const { |
- DCHECK(!IsChild() && !IsSplinter()); |
- return !HasSpillOperand() && spill_range_ == nullptr; |
- } |
AllocatedOperand GetSpillRangeOperand() const; |
void SpillAtDefinition(Zone* zone, int gap_index, |
InstructionOperand* operand); |
void SetSpillOperand(InstructionOperand* operand); |
- void SetSpillRange(SpillRange* spill_range); |
+ void SetSpillStartIndex(int start) { |
+ spill_start_index_ = Min(start, spill_start_index_); |
+ } |
+ |
+ void SetSplinteredFrom(TopLevelLiveRange* splinter_parent); |
void CommitSpillsAtDefinition(InstructionSequence* sequence, |
const InstructionOperand& operand, |
bool might_be_duplicated); |
- // This must be applied on top level ranges. |
+ |
// If all the children of this range are spilled in deferred blocks, and if |
// for any non-spilled child with a use position requiring a slot, that range |
// is contained in a deferred block, mark the range as |
@@ -429,29 +526,19 @@ class LiveRange final : public ZoneObject { |
bool TryCommitSpillInDeferredBlock(InstructionSequence* code, |
const InstructionOperand& spill_operand); |
- void SetSpillStartIndex(int start) { |
- spill_start_index_ = Min(start, spill_start_index_); |
+ TopLevelLiveRange* splintered_from() const { return splintered_from_; } |
+ bool IsSplinter() const { return splintered_from_ != nullptr; } |
+ bool MayRequireSpillRange() const { |
+ DCHECK(!IsSplinter()); |
+ return !HasSpillOperand() && spill_range_ == nullptr; |
} |
+ void UpdateSpillRangePostMerge(TopLevelLiveRange* merged); |
+ int vreg() const { return vreg_; } |
- bool ShouldBeAllocatedBefore(const LiveRange* other) const; |
- bool CanCover(LifetimePosition position) const; |
- bool Covers(LifetimePosition position) const; |
- LifetimePosition FirstIntersection(LiveRange* other) const; |
- |
- // Add a new interval or a new use position to this live range. |
- void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone); |
- void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone); |
- void AddUsePosition(UsePosition* pos); |
- |
- // Shorten the most recently added interval by setting a new start. |
- void ShortenTo(LifetimePosition start); |
- |
- void Verify() const; |
- |
- void ConvertUsesToOperand(const InstructionOperand& op, |
- const InstructionOperand& spill_op); |
- void SetUseHints(int register_index); |
- void UnsetUseHints() { SetUseHints(kUnassignedRegister); } |
+ int GetNextChildId() { return ++last_child_id_; } |
+ bool IsSpilledOnlyInDeferredBlocks() const { |
+ return spilled_in_deferred_blocks_; |
+ } |
struct SpillAtDefinitionList; |
@@ -459,91 +546,29 @@ class LiveRange final : public ZoneObject { |
return spills_at_definition_; |
} |
- // Used solely by the Greedy Allocator: |
- unsigned GetSize(); |
- float weight() const { return weight_; } |
- void set_weight(float weight) { weight_ = weight; } |
- |
- bool IsSpilledOnlyInDeferredBlocks() const { |
- return spilled_in_deferred_block_; |
- } |
- |
- static const int kInvalidSize = -1; |
- static const float kInvalidWeight; |
- static const float kMaxWeight; |
- |
- LiveRange* splintered_from() const { |
- DCHECK(!IsChild()); |
- return splintered_from_; |
- } |
- bool IsSplinter() const { |
- DCHECK(!IsChild()); |
- return splintered_from_ != nullptr; |
- } |
- |
- void set_spill_type(SpillType value) { |
- bits_ = SpillTypeField::update(bits_, value); |
- } |
- |
private: |
- void AppendChild(LiveRange* other); |
- void UpdateParentForAllChildren(LiveRange* new_parent); |
- void UpdateSpillRangePostMerge(LiveRange* merged); |
- |
- void SetSplinteredFrom(LiveRange* splinter_parent); |
- |
- |
- void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } |
- |
- UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; |
- void AdvanceLastProcessedMarker(UseInterval* to_start_of, |
- LifetimePosition but_not_past) const; |
- |
- LiveRange* GetLastChild(); |
- |
- typedef BitField<bool, 0, 1> SpilledField; |
typedef BitField<bool, 1, 1> HasSlotUseField; |
typedef BitField<bool, 2, 1> IsPhiField; |
typedef BitField<bool, 3, 1> IsNonLoopPhiField; |
typedef BitField<SpillType, 4, 2> SpillTypeField; |
- typedef BitField<int32_t, 6, 6> AssignedRegisterField; |
- typedef BitField<MachineType, 12, 15> MachineTypeField; |
- int id_; |
- int spill_start_index_; |
- uint32_t bits_; |
- UseInterval* last_interval_; |
- UseInterval* first_interval_; |
- UsePosition* first_pos_; |
- LiveRange* parent_; |
- LiveRange* next_; |
- LiveRange* splintered_from_; |
+ LiveRange* GetLastChild(); |
+ |
+ int vreg_; |
+ int last_child_id_; |
+ TopLevelLiveRange* splintered_from_; |
union { |
// Correct value determined by spill_type() |
InstructionOperand* spill_operand_; |
SpillRange* spill_range_; |
}; |
SpillAtDefinitionList* spills_at_definition_; |
- // This is used as a cache, it doesn't affect correctness. |
- mutable UseInterval* current_interval_; |
- // This is used as a cache, it doesn't affect correctness. |
- mutable UsePosition* last_processed_use_; |
- // This is used as a cache, it's invalid outside of BuildLiveRanges. |
- mutable UsePosition* current_hint_position_; |
- |
- // greedy: the number of LifetimePositions covered by this range. Used to |
- // prioritize selecting live ranges for register assignment, as well as |
- // in weight calculations. |
- int size_; |
- |
- // greedy: a metric for resolving conflicts between ranges with an assigned |
- // register and ranges that intersect them and need a register. |
- float weight_; |
- |
// TODO(mtrofin): generalize spilling after definition, currently specialized |
// just for spill in a single deferred block. |
- bool spilled_in_deferred_block_; |
- DISALLOW_COPY_AND_ASSIGN(LiveRange); |
+ bool spilled_in_deferred_blocks_; |
+ int spill_start_index_; |
+ |
+ DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange); |
}; |
@@ -560,7 +585,7 @@ std::ostream& operator<<(std::ostream& os, |
class SpillRange final : public ZoneObject { |
public: |
static const int kUnassignedSlot = -1; |
- SpillRange(LiveRange* range, Zone* zone); |
+ SpillRange(TopLevelLiveRange* range, Zone* zone); |
UseInterval* interval() const { return use_interval_; } |
// Currently, only 4 or 8 byte slots are supported. |
@@ -576,8 +601,10 @@ class SpillRange final : public ZoneObject { |
DCHECK_NE(kUnassignedSlot, assigned_slot_); |
return assigned_slot_; |
} |
- const ZoneVector<LiveRange*>& live_ranges() const { return live_ranges_; } |
- ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; } |
+ const ZoneVector<TopLevelLiveRange*>& live_ranges() const { |
+ return live_ranges_; |
+ } |
+ ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; } |
int byte_width() const { return byte_width_; } |
RegisterKind kind() const { return kind_; } |
@@ -587,7 +614,7 @@ class SpillRange final : public ZoneObject { |
// Merge intervals, making sure the use intervals are sorted |
void MergeDisjointIntervals(UseInterval* other); |
- ZoneVector<LiveRange*> live_ranges_; |
+ ZoneVector<TopLevelLiveRange*> live_ranges_; |
UseInterval* use_interval_; |
LifetimePosition end_position_; |
int assigned_slot_; |
@@ -637,16 +664,20 @@ class RegisterAllocationData final : public ZoneObject { |
InstructionSequence* code, |
const char* debug_name = nullptr); |
- const ZoneVector<LiveRange*>& live_ranges() const { return live_ranges_; } |
- ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; } |
- const ZoneVector<LiveRange*>& fixed_live_ranges() const { |
+ const ZoneVector<TopLevelLiveRange*>& live_ranges() const { |
+ return live_ranges_; |
+ } |
+ ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; } |
+ const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const { |
return fixed_live_ranges_; |
} |
- ZoneVector<LiveRange*>& fixed_live_ranges() { return fixed_live_ranges_; } |
- ZoneVector<LiveRange*>& fixed_double_live_ranges() { |
+ ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() { |
+ return fixed_live_ranges_; |
+ } |
+ ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() { |
return fixed_double_live_ranges_; |
} |
- const ZoneVector<LiveRange*>& fixed_double_live_ranges() const { |
+ const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const { |
return fixed_double_live_ranges_; |
} |
ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; } |
@@ -665,21 +696,20 @@ class RegisterAllocationData final : public ZoneObject { |
MachineType MachineTypeFor(int virtual_register); |
- LiveRange* LiveRangeFor(int index); |
+ TopLevelLiveRange* GetOrCreateLiveRangeFor(int index); |
// Creates a new live range. |
- LiveRange* NewLiveRange(int index, MachineType machine_type); |
- LiveRange* NextLiveRange(MachineType machine_type); |
- LiveRange* NewChildRangeFor(LiveRange* range); |
+ TopLevelLiveRange* NewLiveRange(int index, MachineType machine_type); |
+ TopLevelLiveRange* NextLiveRange(MachineType machine_type); |
- SpillRange* AssignSpillRangeToLiveRange(LiveRange* range); |
- SpillRange* CreateSpillRangeForLiveRange(LiveRange* range); |
+ SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range); |
+ SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range); |
MoveOperands* AddGapMove(int index, Instruction::GapPosition position, |
const InstructionOperand& from, |
const InstructionOperand& to); |
- bool IsReference(int virtual_register) const { |
- return code()->IsReference(virtual_register); |
+ bool IsReference(TopLevelLiveRange* top_range) const { |
+ return code()->IsReference(top_range->vreg()); |
} |
bool ExistsUseWithoutDefinition(); |
@@ -688,6 +718,7 @@ class RegisterAllocationData final : public ZoneObject { |
PhiMapValue* InitializePhiMap(const InstructionBlock* block, |
PhiInstruction* phi); |
+ PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range); |
PhiMapValue* GetPhiMapValueFor(int virtual_register); |
bool IsBlockBoundary(LifetimePosition pos) const; |
@@ -699,6 +730,8 @@ class RegisterAllocationData final : public ZoneObject { |
void Print(const SpillRange* spill_range); |
private: |
+ int GetNextLiveRangeId(); |
+ |
Zone* const allocation_zone_; |
Frame* const frame_; |
InstructionSequence* const code_; |
@@ -706,9 +739,9 @@ class RegisterAllocationData final : public ZoneObject { |
const RegisterConfiguration* const config_; |
PhiMap phi_map_; |
ZoneVector<BitVector*> live_in_sets_; |
- ZoneVector<LiveRange*> live_ranges_; |
- ZoneVector<LiveRange*> fixed_live_ranges_; |
- ZoneVector<LiveRange*> fixed_double_live_ranges_; |
+ ZoneVector<TopLevelLiveRange*> live_ranges_; |
+ ZoneVector<TopLevelLiveRange*> fixed_live_ranges_; |
+ ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_; |
ZoneSet<SpillRange*> spill_ranges_; |
DelayedReferences delayed_references_; |
BitVector* assigned_registers_; |
@@ -735,12 +768,6 @@ class ConstraintBuilder final : public ZoneObject { |
InstructionSequence* code() const { return data()->code(); } |
Zone* allocation_zone() const { return data()->allocation_zone(); } |
- Instruction* InstructionAt(int index) { return code()->InstructionAt(index); } |
- bool IsReference(int virtual_register) const { |
- return data()->IsReference(virtual_register); |
- } |
- LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); } |
- |
InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos, |
bool is_tagged); |
void MeetRegisterConstraints(const InstructionBlock* block); |
@@ -775,8 +802,6 @@ class LiveRangeBuilder final : public ZoneObject { |
return data()->live_in_sets(); |
} |
- LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); } |
- |
void Verify() const; |
// Liveness analysis support. |
@@ -787,8 +812,8 @@ class LiveRangeBuilder final : public ZoneObject { |
static int FixedLiveRangeID(int index) { return -index - 1; } |
int FixedDoubleLiveRangeID(int index); |
- LiveRange* FixedLiveRangeFor(int index); |
- LiveRange* FixedDoubleLiveRangeFor(int index); |
+ TopLevelLiveRange* FixedLiveRangeFor(int index); |
+ TopLevelLiveRange* FixedDoubleLiveRangeFor(int index); |
void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos); |
void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos); |
@@ -798,7 +823,7 @@ class LiveRangeBuilder final : public ZoneObject { |
UsePosition* NewUsePosition(LifetimePosition pos) { |
return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone); |
} |
- LiveRange* LiveRangeFor(InstructionOperand* operand); |
+ TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand); |
// Helper methods for building intervals. |
UsePosition* Define(LifetimePosition position, InstructionOperand* operand, |
void* hint, UsePositionHintType hint_type); |
@@ -832,8 +857,6 @@ class RegisterAllocator : public ZoneObject { |
Zone* allocation_zone() const { return data()->allocation_zone(); } |
- LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); } |
- |
// Split the given range at the given position. |
// If range starts at or after the given position then the |
// original range is returned. |
@@ -859,7 +882,7 @@ class RegisterAllocator : public ZoneObject { |
LifetimePosition FindOptimalSpillingPos(LiveRange* range, |
LifetimePosition pos); |
- const ZoneVector<LiveRange*>& GetFixedRegisters() const; |
+ const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const; |
const char* RegisterName(int allocation_index) const; |
private: |
@@ -903,7 +926,7 @@ class LinearScanAllocator final : public RegisterAllocator { |
void InactiveToActive(LiveRange* range); |
// Helper methods for allocating registers. |
- bool TryReuseSpillForPhi(LiveRange* range); |
+ bool TryReuseSpillForPhi(TopLevelLiveRange* range); |
bool TryAllocateFreeReg(LiveRange* range); |
void AllocateBlockedReg(LiveRange* range); |