| Index: src/compiler/register-allocator.h
|
| diff --git a/src/compiler/register-allocator.h b/src/compiler/register-allocator.h
|
| index baffedd919292c93e68b263793f3424c30aa0aaa..0ce3c923cc94c4f61520a7f311920578e77642a1 100644
|
| --- a/src/compiler/register-allocator.h
|
| +++ b/src/compiler/register-allocator.h
|
| @@ -251,6 +251,7 @@ class UsePosition final : public ZoneObject {
|
| return hint_type() != UsePositionHintType::kUnresolved;
|
| }
|
| static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
|
| + void dump_hint(std::ostream& os, const RegisterConfiguration* config);
|
|
|
| private:
|
| typedef BitField<UsePositionType, 0, 2> TypeField;
|
| @@ -269,6 +270,7 @@ class UsePosition final : public ZoneObject {
|
|
|
|
|
| class SpillRange;
|
| +class LiveRangeGroup;
|
|
|
|
|
| // Representation of SSA values' live ranges as a collection of (continuous)
|
| @@ -341,7 +343,10 @@ class LiveRange final : public ZoneObject {
|
| LifetimePosition start) const;
|
|
|
| // Can this live range be spilled at this position.
|
| - bool CanBeSpilled(LifetimePosition pos) const;
|
| + bool CanBeSpilled(LifetimePosition pos) const { return CanBeSplit(pos); }
|
| +
|
| + // Can this live range be split at this position.
|
| + bool CanBeSplit(LifetimePosition pos) const;
|
|
|
| // Split this live range at the given position which must follow the start of
|
| // the range.
|
| @@ -428,7 +433,29 @@ class LiveRange final : public ZoneObject {
|
| return spills_at_definition_;
|
| }
|
|
|
| + LiveRangeGroup* group() { return group_; }
|
| + void SetGroup(LiveRangeGroup* grp) { group_ = grp; }
|
| +
|
| + float weight() const {
|
| + DCHECK(weight_ >= 0.0);
|
| + return weight_;
|
| + }
|
| +
|
| + unsigned size() {
|
| + if (size_ < 0) RecalculateSize();
|
| + DCHECK(size_ > 0);
|
| + return static_cast<unsigned>(size_);
|
| + }
|
| +
|
| + void InvalidateWeightAndSize() {
|
| + weight_ = -1.0;
|
| + size_ = -1;
|
| + }
|
| + void RecalculateWeight(InstructionSequence* code);
|
| + LifetimePosition GetFirstSplittablePosition();
|
| +
|
| private:
|
| + void RecalculateSize();
|
| void set_spill_type(SpillType value) {
|
| bits_ = SpillTypeField::update(bits_, value);
|
| }
|
| @@ -455,6 +482,8 @@ class LiveRange final : public ZoneObject {
|
| UsePosition* first_pos_;
|
| LiveRange* parent_;
|
| LiveRange* next_;
|
| + float weight_;
|
| + int size_;
|
| union {
|
| // Correct value determined by spill_type()
|
| InstructionOperand* spill_operand_;
|
| @@ -468,10 +497,22 @@ class LiveRange final : public ZoneObject {
|
| // This is used as a cache, it's invalid outside of BuildLiveRanges.
|
| mutable UsePosition* current_hint_position_;
|
|
|
| + LiveRangeGroup* group_;
|
| DISALLOW_COPY_AND_ASSIGN(LiveRange);
|
| };
|
|
|
|
|
| +class LiveRangeGroup final : public ZoneObject {
|
| + public:
|
| + explicit LiveRangeGroup(Zone* zone) : ranges_(zone) {}
|
| + ZoneSet<LiveRange*>& ranges() { return ranges_; }
|
| +
|
| + private:
|
| + ZoneSet<LiveRange*> ranges_;
|
| +
|
| + DISALLOW_COPY_AND_ASSIGN(LiveRangeGroup);
|
| +};
|
| +
|
| struct PrintableLiveRange {
|
| const RegisterConfiguration* register_configuration_;
|
| const LiveRange* range_;
|
| @@ -497,6 +538,7 @@ class SpillRange final : public ZoneObject {
|
| DCHECK_EQ(kUnassignedSlot, assigned_slot_);
|
| assigned_slot_ = index;
|
| }
|
| + bool IsSlotAssigned() { return assigned_slot_ != kUnassignedSlot; }
|
| int assigned_slot() {
|
| DCHECK_NE(kUnassignedSlot, assigned_slot_);
|
| return assigned_slot_;
|
| @@ -518,6 +560,9 @@ class SpillRange final : public ZoneObject {
|
| };
|
|
|
|
|
| +std::ostream& operator<<(std::ostream& os, SpillRange* range);
|
| +
|
| +
|
| class RegisterAllocationData final : public ZoneObject {
|
| public:
|
| class PhiMapValue : public ZoneObject {
|
| @@ -728,12 +773,33 @@ class LiveRangeBuilder final : public ZoneObject {
|
| DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder);
|
| };
|
|
|
| +struct AllocatorStats {
|
| + unsigned spills;
|
| + unsigned wins;
|
| + unsigned losses_no_eviction;
|
| + unsigned losses_after_eviction;
|
| + unsigned good_split_attempts;
|
| + unsigned good_split_successes;
|
| + unsigned good_split_smart_above;
|
| + unsigned good_split_smart_below;
|
| + unsigned good_split_above;
|
| + unsigned good_split_below;
|
| + unsigned groups_attempted;
|
| + unsigned groups_succeeded;
|
| + unsigned groups_allocated;
|
| + void reset() { *this = {0}; }
|
| +};
|
| +
|
| +
|
| +std::ostream& operator<<(std::ostream& os, const AllocatorStats& stats);
|
| +
|
|
|
| class RegisterAllocator : public ZoneObject {
|
| public:
|
| explicit RegisterAllocator(RegisterAllocationData* data, RegisterKind kind);
|
|
|
| protected:
|
| + AllocatorStats stats_;
|
| RegisterAllocationData* data() const { return data_; }
|
| InstructionSequence* code() const { return data()->code(); }
|
| RegisterKind mode() const { return mode_; }
|
| @@ -767,6 +833,7 @@ class RegisterAllocator : public ZoneObject {
|
| // hoist spill position out to the point just before the loop.
|
| LifetimePosition FindOptimalSpillingPos(LiveRange* range,
|
| LifetimePosition pos);
|
| + const char* RegisterName(int allocation_index) const;
|
|
|
| private:
|
| RegisterAllocationData* const data_;
|
| @@ -786,8 +853,6 @@ class LinearScanAllocator final : public RegisterAllocator {
|
| void AllocateRegisters();
|
|
|
| private:
|
| - const char* RegisterName(int allocation_index) const;
|
| -
|
| ZoneVector<LiveRange*>& unhandled_live_ranges() {
|
| return unhandled_live_ranges_;
|
| }
|
| @@ -840,9 +905,91 @@ class LinearScanAllocator final : public RegisterAllocator {
|
| DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator);
|
| };
|
|
|
| +
|
| +// Storage detail for CoalescedLiveRanges.
|
| +struct AllocatedInterval {
|
| + LifetimePosition start;
|
| + LifetimePosition end;
|
| + LiveRange* range;
|
| + struct Comparer : public std::binary_function<AllocatedInterval,
|
| + AllocatedInterval, bool> {
|
| + bool operator()(const AllocatedInterval& x,
|
| + const AllocatedInterval& y) const {
|
| + return x.start < y.start;
|
| + }
|
| + };
|
| +};
|
| +
|
| +
|
| +typedef ZoneSet<AllocatedInterval, AllocatedInterval::Comparer>::const_iterator
|
| + interval_iterator;
|
| +
|
| +
|
| class CoalescedLiveRanges;
|
|
|
|
|
| +// Iterator over allocated live ranges (CoalescedLiveRanges) that conflict with
|
| +// a given one. See CoalescedLiveRanges::first_conflict.
|
| +// Note that iterating may produce non-consecutively repeating live ranges.
|
| +struct conflict_iterator {
|
| + friend class CoalescedLiveRanges;
|
| + conflict_iterator() : query_(nullptr), storage_(nullptr) {}
|
| +
|
| + LiveRange* operator->() { return pos_->range; }
|
| +
|
| + LiveRange* operator*() { return pos_->range; }
|
| +
|
| + conflict_iterator& operator++();
|
| + bool IsValid() const { return storage_ != nullptr; }
|
| +
|
| + bool operator==(const conflict_iterator& other) {
|
| + return IsValid() && other.IsValid() && query_ == other.query_ &&
|
| + pos_ == other.pos_ && storage_ == other.storage_;
|
| + }
|
| +
|
| + bool operator!=(const conflict_iterator& other) {
|
| + return IsValid() ^ other.IsValid() || query_ != other.query_ ||
|
| + storage_ != other.storage_ ||
|
| + (storage_ != nullptr && pos_ != other.pos_);
|
| + }
|
| +
|
| + private:
|
| + static bool Intersects(LifetimePosition a_start, LifetimePosition a_end,
|
| + LifetimePosition b_start, LifetimePosition b_end) {
|
| + if (a_start == b_start) return true;
|
| + if (a_start < b_start) return a_end > b_start;
|
| + return Intersects(b_start, b_end, a_start, a_end);
|
| + }
|
| +
|
| + conflict_iterator(UseInterval* q,
|
| + ZoneSet<AllocatedInterval, AllocatedInterval::Comparer>* s,
|
| + interval_iterator& it)
|
| + : query_(nullptr), storage_(nullptr), pos_(it) {}
|
| +
|
| + conflict_iterator(
|
| + UseInterval* query,
|
| + ZoneSet<AllocatedInterval, AllocatedInterval::Comparer>* storage)
|
| + : query_(query), storage_(storage) {
|
| + InitializeForNewQuery();
|
| + }
|
| +
|
| + void MoveToEnd() {
|
| + query_ = nullptr;
|
| + storage_ = nullptr;
|
| + }
|
| +
|
| + void InitializeForNewQuery();
|
| +
|
| + AllocatedInterval AsAllocatedInterval(LifetimePosition pos) {
|
| + return {pos, LifetimePosition::Invalid(), nullptr};
|
| + }
|
| +
|
| + UseInterval* query_;
|
| + ZoneSet<AllocatedInterval, AllocatedInterval::Comparer>* storage_;
|
| + interval_iterator pos_;
|
| +};
|
| +
|
| +
|
| // A variant of the LLVM Greedy Register Allocator. See
|
| // http://blog.llvm.org/2011/09/greedy-register-allocation-in-llvm-30.html
|
| class GreedyAllocator final : public RegisterAllocator {
|
| @@ -853,36 +1000,86 @@ class GreedyAllocator final : public RegisterAllocator {
|
| void AllocateRegisters();
|
|
|
| private:
|
| - LifetimePosition GetSplittablePos(LifetimePosition pos);
|
| + void AllocateRange(LiveRange* current);
|
| + void AllocateGroup(LiveRangeGroup* grp);
|
| + bool TryAllocateGroup(LiveRangeGroup* grp);
|
| + bool TryAllocateGroupAtRegister(unsigned reg, LiveRangeGroup* grp);
|
| +
|
| + struct PQueueElem {
|
| + bool IsGroup() { return is_grp_; }
|
| + explicit PQueueElem(LiveRange* range) : is_grp_(false) {
|
| + storage_.range_ = range;
|
| + }
|
| +
|
| + explicit PQueueElem(LiveRangeGroup* grp) : is_grp_(true) {
|
| + storage_.grp_ = grp;
|
| + }
|
| +
|
| +
|
| + LiveRange* get_range() {
|
| + if (is_grp_) return nullptr;
|
| + return storage_.range_;
|
| + }
|
| +
|
| +
|
| + LiveRangeGroup* get_group() {
|
| + if (is_grp_) return storage_.grp_;
|
| + return nullptr;
|
| + }
|
| +
|
| + private:
|
| + bool is_grp_;
|
| + union {
|
| + LiveRange* range_;
|
| + LiveRangeGroup* grp_;
|
| + } storage_;
|
| + };
|
| +
|
| + void GroupAndEnqueue();
|
| const RegisterConfiguration* config() const { return data()->config(); }
|
| Zone* local_zone() const { return local_zone_; }
|
| - bool TryReuseSpillForPhi(LiveRange* range);
|
| - int GetHintedRegister(LiveRange* range);
|
|
|
| - typedef ZonePriorityQueue<std::pair<unsigned, LiveRange*>> PQueue;
|
| + struct PQueueElemComparer
|
| + : public std::binary_function<std::pair<unsigned, PQueueElem>,
|
| + std::pair<unsigned, PQueueElem>, bool> {
|
| + bool operator()(const std::pair<unsigned, PQueueElem>& x,
|
| + const std::pair<unsigned, PQueueElem>& y) const {
|
| + return x.first < y.first;
|
| + }
|
| + };
|
| +
|
| + typedef ZonePriorityQueue<std::pair<unsigned, PQueueElem>, PQueueElemComparer>
|
| + PQueue;
|
|
|
| - unsigned GetLiveRangeSize(LiveRange* range);
|
| void Enqueue(LiveRange* range);
|
| + void Enqueue(LiveRangeGroup* group);
|
|
|
| void Evict(LiveRange* range);
|
| - float CalculateSpillWeight(LiveRange* range);
|
| - float CalculateMaxSpillWeight(const ZoneSet<LiveRange*>& ranges);
|
| + void EvictAll(int reg, const conflict_iterator& first_conflict);
|
| +
|
| + // Find the register with the minimum spill weight. If that is still higher
|
| + // or equal to competing, return -1.
|
| + int FindRegisterToEvictForRange(
|
| + conflict_iterator
|
| + all_conflicts[RegisterConfiguration::kMaxDoubleRegisters],
|
| + float competing);
|
|
|
| + // Same as above, for all the registers in a group.
|
| + int FindRegisterToEvictForGroup(LiveRangeGroup* grp, float competing);
|
|
|
| - bool TryAllocate(LiveRange* current, ZoneSet<LiveRange*>* conflicting);
|
| - bool TryAllocatePhysicalRegister(unsigned reg_id, LiveRange* range,
|
| - ZoneSet<LiveRange*>* conflicting);
|
| - bool HandleSpillOperands(LiveRange* range);
|
| - void AllocateBlockedRange(LiveRange* current, LifetimePosition pos,
|
| - bool spill);
|
| + template <typename TIter>
|
| + float CalculateMaxSpillWeight(const TIter& begin, const TIter& end);
|
| +
|
| + bool HandleSpillOperands(LiveRange* range, LiveRange** remaining);
|
| + void HandleBlockedRange(LiveRange* current);
|
| + bool TrySplitBeforeLoops(LiveRange* current);
|
| + bool TrySplitAfterLoops(LiveRange* current);
|
| + bool TrySplitAroundCalls(LiveRange* range);
|
|
|
| LiveRange* SpillBetweenUntil(LiveRange* range, LifetimePosition start,
|
| LifetimePosition until, LifetimePosition end);
|
| void AssignRangeToRegister(int reg_id, LiveRange* range);
|
|
|
| - LifetimePosition FindProgressingSplitPosition(LiveRange* range,
|
| - bool* is_spill_pos);
|
| -
|
| Zone* local_zone_;
|
| ZoneVector<CoalescedLiveRanges*> allocations_;
|
| PQueue queue_;
|
|
|