Chromium Code Reviews| Index: runtime/vm/flow_graph_allocator.h |
| diff --git a/runtime/vm/flow_graph_allocator.h b/runtime/vm/flow_graph_allocator.h |
| index 7848d2b11822e7b5d6a25d69c61d9430cd34ba02..d563c5ab3b92379a92e55ccc22b4602e4387dbf5 100644 |
| --- a/runtime/vm/flow_graph_allocator.h |
| +++ b/runtime/vm/flow_graph_allocator.h |
| @@ -10,12 +10,16 @@ |
| namespace dart { |
| +class FlowGraphBuilder; |
| +class LiveRange; |
| +class UseInterval; |
| + |
| class FlowGraphAllocator : public ValueObject { |
| public: |
| - FlowGraphAllocator(const GrowableArray<BlockEntryInstr*>& postorder, |
| - intptr_t max_ssa_temp_index); |
| + FlowGraphAllocator(const GrowableArray<BlockEntryInstr*>& block_order, |
| + FlowGraphBuilder* builder); |
| - void ResolveConstraints(); |
| + void AllocateRegisters(); |
| // Build live-in and live-out sets for each block. |
| void AnalyzeLiveness(); |
| @@ -42,6 +46,8 @@ class FlowGraphAllocator : public ValueObject { |
| // Print results of liveness analysis. |
| void DumpLiveness(); |
| + FlowGraphBuilder* builder_; |
|
srdjan
2012/07/10 23:27:54
Move all instance fields together, to the end. The
Vyacheslav Egorov (Google)
2012/07/11 13:27:58
Done. Moved fields down. Added comments to each ne
|
| + |
| // Live-out sets for each block. They contain indices of SSA values |
| // that are live out from this block: that is values that were either |
| // defined in this block or live into it and that are used in some |
| @@ -56,15 +62,211 @@ class FlowGraphAllocator : public ValueObject { |
| // that are used by this block or its successors. |
| GrowableArray<BitVector*> live_in_; |
| + const GrowableArray<BlockEntryInstr*>& block_order_; |
| const GrowableArray<BlockEntryInstr*>& postorder_; |
| // Number of virtual registers. Currently equal to the number of |
| // SSA values. |
| const intptr_t vreg_count_; |
| + void NumberInstructions(); |
| + |
| + LiveRange* GetLiveRange(intptr_t vreg); |
| + void BuildLiveRanges(); |
| + void PrintLiveRanges(); |
| + |
| + |
| + void UseValue(Instruction* instr, |
| + intptr_t def, |
| + intptr_t use, |
| + intptr_t vreg, |
| + Location* loc, |
| + bool use_at_end); |
| + void Define(Instruction* instr, intptr_t def, intptr_t vreg, Location* loc); |
| + |
| + void AddToUnallocated(UseInterval* chain); |
| + void BlockLocation(Location loc, intptr_t pos); |
| + |
| + bool AllocateFreeRegister(UseInterval* unallocated); |
| + void AssignFreeRegister(UseInterval* unallocated, Register reg); |
| + |
| + void FinalizeInterval(UseInterval* interval, Location loc); |
| + void AdvanceActiveIntervals(const intptr_t start); |
| + |
| + bool UnallocatedIsSorted(); |
| + void AllocateCPURegisters(); |
| + |
| + GrowableArray<LiveRange*> live_ranges_; |
| + |
| + GrowableArray<UseInterval*> unallocated_; |
| + |
| + UseInterval* cpu_regs_[kNumberOfCpuRegisters]; |
| + |
| DISALLOW_COPY_AND_ASSIGN(FlowGraphAllocator); |
| }; |
| + |
| +// UsePosition represents a single use of an SSA value by some instruction. |
| +// It points to a location slot which either tells register allocator |
| +// where instruction expects the value (if slot contains a fixed location) or |
| +// asks register allocator to allocate storage (register or spill slot) for |
| +// this use with certain properties (if slot contain an unallocated location). |
| +class UsePosition : public ZoneAllocated { |
| + public: |
| + static const intptr_t kFixedUse = 0x1; |
| + static const intptr_t kSameAsFirstUse = 0x2; |
| + static const intptr_t kInstructionTagMask = 0x3; |
|
srdjan
2012/07/10 23:27:54
If you make the upper three enum, you could use ty
Vyacheslav Egorov (Google)
2012/07/11 13:27:58
Switched to enum.
This is a pretty ugly place. I
|
| + static const intptr_t kPositionShift = 2; |
| + |
| + static intptr_t FlagForUse(Location* loc) { |
|
srdjan
2012/07/10 23:27:54
const Location&
Vyacheslav Egorov (Google)
2012/07/11 13:27:58
Done.
|
| + ASSERT(loc != NULL); |
| + if (loc->IsRegister()) return kFixedUse; |
| + if (loc->IsUnallocated() && |
| + loc->policy() == Location::kSameAsFirstInput) { |
|
srdjan
2012/07/10 23:27:54
Add parenthesis
Vyacheslav Egorov (Google)
2012/07/11 13:27:58
Done.
|
| + return kSameAsFirstUse; |
| + } |
| + return kInstructionTagMask; |
| + } |
| + |
| + // TODO(vegorov): we encode either position or instruction pointer |
| + // into the pos_ field to generate moves when needed to resolve |
| + // fixed or same-as-first constraints, but this looks ugly. |
| + UsePosition(Instruction* instr, |
| + intptr_t pos, |
| + UsePosition* next, |
| + Location* location_slot) |
| + : pos_(pos << kPositionShift), |
|
srdjan
2012/07/10 23:27:54
This shift is wasted if instr != NULL. Maybe set i
Vyacheslav Egorov (Google)
2012/07/11 13:27:58
instr != NULL is considered an unlikely occasion t
srdjan
2012/07/11 17:22:31
Add comment, otherwise 'somebody' may clean it up
|
| + location_slot_(location_slot), |
| + next_(next) { |
| + if (instr != NULL) { |
| + pos_ = reinterpret_cast<intptr_t>(instr) | FlagForUse(location_slot_); |
| + } |
| + ASSERT(this->pos() == pos); |
| + } |
| + |
| + // Tell the use that it should load the value from the given location. |
| + // If location slot for the use is flexible (unallocated) it will be updated |
| + // with the given location. Otherwise a move will be scheduled from the given |
| + // location to the location already stored in the slot. |
| + void AssignLocation(Location loc); |
| + |
| + Location* location_slot() const { return location_slot_; } |
| + void set_location_slot(Location* location_slot) { |
| + location_slot_ = location_slot; |
| + } |
| + |
| + void set_next(UsePosition* next) { next_ = next; } |
| + UsePosition* next() const { return next_; } |
| + |
| + intptr_t pos() const { |
| + if ((pos_ & kInstructionTagMask) != 0) { |
| + return instr()->pos(); |
| + } |
| + return pos_ >> kPositionShift; |
| + } |
| + |
| + Instruction* instr() const { |
| + ASSERT((pos_ & kInstructionTagMask) != 0); |
| + return reinterpret_cast<Instruction*>(pos_ & ~kInstructionTagMask); |
| + } |
| + |
| + bool HasHint() const { |
| + return (pos_ & kInstructionTagMask) == kFixedUse; |
| + } |
| + |
| + Location hint() const { |
| + ASSERT(HasHint()); |
| + ASSERT(location_slot()->IsRegister()); |
| + return *location_slot_; |
| + } |
| + |
| + private: |
| + intptr_t pos_; |
| + Location* location_slot_; |
| + UsePosition* next_; |
| +}; |
| + |
| + |
| +// UseInterval represents a holeless half open interval of liveness for a given |
| +// SSA value: [start, end). The interval can cover zero or more uses. |
| +// During the register allocation UseIntervals from different live ranges |
| +// allocated to the same register will be chained together through |
| +// next_allocated_ field. |
| +// Note: currently all uses of the same SSA value are linked together into a |
| +// single list (and not split between UseIntervals). |
|
srdjan
2012/07/10 23:27:54
Please describe what are start and end values (wha
Vyacheslav Egorov (Google)
2012/07/11 13:27:58
Expanded comment. Added comment to NumberInstructi
|
| +class UseInterval : public ZoneAllocated { |
| + public: |
| + UseInterval(intptr_t vreg, intptr_t start, intptr_t end, UseInterval* next) |
| + : vreg_(vreg), |
| + start_(start), |
| + end_(end), |
| + uses_((next == NULL) ? NULL : next->uses_), |
| + next_(next), |
| + next_allocated_(next) { } |
| + |
| + |
| + void AddUse(Instruction* instr, intptr_t pos, Location* loc); |
| + void Print(); |
| + |
| + intptr_t vreg() const { return vreg_; } |
| + intptr_t start() const { return start_; } |
| + intptr_t end() const { return end_; } |
| + UsePosition* first_use() const { return uses_; } |
| + UseInterval* next() const { return next_; } |
| + |
| + bool Contains(intptr_t pos) { |
|
srdjan
2012/07/10 23:27:54
const
Vyacheslav Egorov (Google)
2012/07/11 13:27:58
Done.
|
| + return (start() <= pos) && (pos < end()); |
| + } |
| + |
| + intptr_t Intersect(UseInterval* other); |
| + |
| + UseInterval* Split(intptr_t pos); |
| + |
| + void set_next_allocated(UseInterval* next_allocated) { |
| + next_allocated_ = next_allocated; |
| + } |
| + UseInterval* next_allocated() const { return next_allocated_; } |
| + |
| + private: |
| + friend class LiveRange; |
| + const intptr_t vreg_; |
| + |
| + intptr_t start_; |
| + intptr_t end_; |
| + |
| + UsePosition* uses_; |
| + |
| + UseInterval* next_; |
| + UseInterval* next_allocated_; |
| +}; |
| + |
| + |
| +// LiveRange represents a sequence of UseIntervals for a given SSA value. |
| +// TODO(vegorov): this class is actually redundant currently. |
| +class LiveRange : public ZoneAllocated { |
| + public: |
| + explicit LiveRange(intptr_t vreg) : vreg_(vreg), head_(NULL) { } |
| + |
| + void DefineAt(Instruction* instr, intptr_t pos, Location* loc); |
| + |
| + void UseAt(Instruction* instr, |
| + intptr_t def_pos, |
| + intptr_t use_pos, |
| + bool use_at_end, |
| + Location* loc); |
| + |
| + void AddUseInterval(intptr_t start, intptr_t end); |
| + |
| + void Print(); |
| + |
| + UseInterval* head() const { return head_; } |
| + |
| + private: |
| + const intptr_t vreg_; |
| + UseInterval* head_; |
| +}; |
| + |
| + |
| } // namespace dart |
| #endif // VM_FLOW_GRAPH_ALLOCATOR_H_ |