Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(803)

Side by Side Diff: runtime/vm/flow_graph_allocator.h

Issue 10800037: New linear scan allocator. (Closed) Base URL: https://dart.googlecode.com/svn/branches/bleeding_edge/dart
Patch Set: Created 8 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | runtime/vm/flow_graph_allocator.cc » ('j') | runtime/vm/flow_graph_allocator.cc » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #ifndef VM_FLOW_GRAPH_ALLOCATOR_H_ 5 #ifndef VM_FLOW_GRAPH_ALLOCATOR_H_
6 #define VM_FLOW_GRAPH_ALLOCATOR_H_ 6 #define VM_FLOW_GRAPH_ALLOCATOR_H_
7 7
8 #include "vm/growable_array.h" 8 #include "vm/growable_array.h"
9 #include "vm/intermediate_language.h" 9 #include "vm/intermediate_language.h"
10 10
11 namespace dart { 11 namespace dart {
12 12
13 class AllocationFinger;
13 class FlowGraphBuilder; 14 class FlowGraphBuilder;
14 class LiveRange; 15 class LiveRange;
15 class UseInterval; 16 class UseInterval;
17 class UsePosition;
16 18
17 class FlowGraphAllocator : public ValueObject { 19 class FlowGraphAllocator : public ValueObject {
18 public: 20 public:
19 FlowGraphAllocator(const GrowableArray<BlockEntryInstr*>& block_order, 21 FlowGraphAllocator(const GrowableArray<BlockEntryInstr*>& block_order,
20 FlowGraphBuilder* builder); 22 FlowGraphBuilder* builder);
21 23
22 void AllocateRegisters(); 24 void AllocateRegisters();
23 25
24 // Build live-in and live-out sets for each block. 26 // Build live-in and live-out sets for each block.
25 void AnalyzeLiveness(); 27 void AnalyzeLiveness();
(...skipping 30 matching lines...) Expand all
56 // 58 //
57 // Having two positions allows us to capture non-trivial register 59 // Having two positions allows us to capture non-trivial register
58 // constraints in use intervals: for example we can declare that 60 // constraints in use intervals: for example we can declare that
59 // an input value is only used at the start of the instruction and 61 // an input value is only used at the start of the instruction and
60 // this might allow register allocator to allocate both this input 62 // this might allow register allocator to allocate both this input
61 // and output (or temp) to the same register if this is the last 63 // and output (or temp) to the same register if this is the last
62 // use of the value. 64 // use of the value.
63 // Additionally creates parallel moves at the joins' predecessors 65 // Additionally creates parallel moves at the joins' predecessors
64 // that will be used for phi resolution. 66 // that will be used for phi resolution.
65 void NumberInstructions(); 67 void NumberInstructions();
68 Instruction* InstructionAt(intptr_t pos);
69 bool IsBlockEntry(intptr_t pos);
66 70
67 LiveRange* GetLiveRange(intptr_t vreg); 71 LiveRange* GetLiveRange(intptr_t vreg);
68 void BuildLiveRanges(); 72 void BuildLiveRanges();
69 void PrintLiveRanges(); 73 void PrintLiveRanges();
70 74
71 // Register use of the given virtual register at lifetime position use_pos. 75 // Register use of the given virtual register at lifetime position use_pos.
72 // If definition position is unknown then start of the block contaning 76 // If definition position is unknown then start of the block contaning
73 // use_pos will be passed. 77 // use_pos will be passed.
74 void UseValue(Instruction* instr, 78 void UseValue(intptr_t def_pos, // Lifetime position for the definition.
75 intptr_t def_pos, // Lifetime position for the definition.
76 intptr_t use_pos, // Lifetime position for the use. 79 intptr_t use_pos, // Lifetime position for the use.
77 intptr_t vreg, 80 intptr_t vreg,
78 Location* loc, 81 Location* loc,
79 bool use_at_end); 82 bool use_at_end);
80 83
81 // Register definition of the given virtual register at lifetime position 84 // Register definition of the given virtual register at lifetime position
82 // def_pos. Existing use interval will be shortened to start at def_pos. 85 // def_pos. Existing use interval will be shortened to start at def_pos.
83 void Define(Instruction* instr, 86 void Define(intptr_t def_pos,
84 intptr_t def_pos,
85 intptr_t vreg, 87 intptr_t vreg,
86 Location* loc); 88 Location* loc);
87 89
88 void AddToUnallocated(UseInterval* chain); 90 void AddToUnallocated(LiveRange* range);
89 void BlockLocation(Location loc, intptr_t pos); 91 void BlockLocation(Location loc, intptr_t pos);
90 92
91 bool AllocateFreeRegister(UseInterval* unallocated); 93 bool AllocateFreeRegister(LiveRange* unallocated);
92 void AssignFreeRegister(UseInterval* unallocated, Register reg);
93 94
94 void FinalizeInterval(UseInterval* interval, Location loc); 95 void AllocateAnyRegister(LiveRange* unallocated);
96 bool UpdateFreeUntil(Register reg,
97 LiveRange* unallocated,
98 intptr_t* cur_free_until,
99 intptr_t* cur_blocked_at);
100
101 void AssignBlockedRegister(LiveRange* unallocated, Register reg);
102
103 intptr_t FirstIntersectionWithAllocated(Register reg,
104 LiveRange* unallocated);
105 bool EvictIntersection(LiveRange* allocated, LiveRange* unallocated);
106
107 void RemoveEvicted(Register reg, intptr_t first_evicted);
108
95 void AdvanceActiveIntervals(const intptr_t start); 109 void AdvanceActiveIntervals(const intptr_t start);
96 110
111 LiveRange* SplitBetween(LiveRange* range, intptr_t from, intptr_t to);
112
113 intptr_t AllocateSpillSlotFor(LiveRange* range);
114 void Spill(LiveRange* range);
115 void SpillAfter(LiveRange* range, intptr_t from);
116 void SpillBetween(LiveRange* range, intptr_t from, intptr_t to);
117
97 bool UnallocatedIsSorted(); 118 bool UnallocatedIsSorted();
98 void AllocateCPURegisters(); 119 void AllocateCPURegisters();
99 120
121 // Tell the use that it should load the value from the given location.
122 // If location slot for the use is flexible (unallocated) it will be updated
123 // with the given location. Otherwise a move will be scheduled from the given
124 // location to the location already stored in the slot.
125 void ConvertUseTo(UsePosition* use, Location loc);
126 void ConvertAllUses(LiveRange* range);
127
128
129 // Connect split siblings over non-linear control flow edges.
130 void ResolveControlFlow();
131 void ConnectSplitSiblings(LiveRange* range,
132 BlockEntryInstr* source_block,
133 BlockEntryInstr* target_block);
134
135
100 // TODO(vegorov): this field is used only to call Bailout. Remove when 136 // TODO(vegorov): this field is used only to call Bailout. Remove when
101 // all bailouts are gone. 137 // all bailouts are gone.
102 FlowGraphBuilder* builder_; 138 FlowGraphBuilder* builder_;
103 139
104 const GrowableArray<BlockEntryInstr*>& block_order_; 140 const GrowableArray<BlockEntryInstr*>& block_order_;
105 const GrowableArray<BlockEntryInstr*>& postorder_; 141 const GrowableArray<BlockEntryInstr*>& postorder_;
106 142
143 GrowableArray<Instruction*> instructions_;
144
107 // Live-out sets for each block. They contain indices of SSA values 145 // Live-out sets for each block. They contain indices of SSA values
108 // that are live out from this block: that is values that were either 146 // that are live out from this block: that is values that were either
109 // defined in this block or live into it and that are used in some 147 // defined in this block or live into it and that are used in some
110 // successor block. 148 // successor block.
111 GrowableArray<BitVector*> live_out_; 149 GrowableArray<BitVector*> live_out_;
112 150
113 // Kill sets for each block. They contain indices of SSA values that 151 // Kill sets for each block. They contain indices of SSA values that
114 // are defined by this block. 152 // are defined by this block.
115 GrowableArray<BitVector*> kill_; 153 GrowableArray<BitVector*> kill_;
116 154
117 // Live-in sets for each block. They contain indices of SSA values 155 // Live-in sets for each block. They contain indices of SSA values
118 // that are used by this block or its successors. 156 // that are used by this block or its successors.
119 GrowableArray<BitVector*> live_in_; 157 GrowableArray<BitVector*> live_in_;
120 158
121 // Number of virtual registers. Currently equal to the number of 159 // Number of virtual registers. Currently equal to the number of
122 // SSA values. 160 // SSA values.
123 const intptr_t vreg_count_; 161 const intptr_t vreg_count_;
124 162
125 // LiveRanges corresponding to SSA values. 163 // LiveRanges corresponding to SSA values.
126 GrowableArray<LiveRange*> live_ranges_; 164 GrowableArray<LiveRange*> live_ranges_;
127 165
128 // Worklist for register allocator. Always maintained sorted according 166 // Worklist for register allocator. Always maintained sorted according
129 // to ShouldBeAllocatedBefore predicate. 167 // to ShouldBeAllocatedBefore predicate.
130 GrowableArray<UseInterval*> unallocated_; 168 GrowableArray<LiveRange*> unallocated_;
131 169
132 // Per register lists of allocated UseIntervals, linked through 170 // Per register lists of allocated UseIntervals. Contains only those
133 // next_allocated field. Contains only those intervals that 171 // intervals that can be affected by future allocation decisions.
134 // can be affected by future allocation decisions. Those intervals 172 // Those intervals that end before the start of the current UseInterval are
135 // that end before the start of the current UseInterval are removed 173 // removed from this list and will not be affected.
136 // from this list and will not be affected. 174 GrowableArray<LiveRange*> cpu_regs_[kNumberOfCpuRegisters];
137 UseInterval* cpu_regs_[kNumberOfCpuRegisters]; 175
176 GrowableArray<intptr_t> spill_slots_;
177
178 bool blocked_cpu_regs_[kNumberOfCpuRegisters];
138 179
139 DISALLOW_COPY_AND_ASSIGN(FlowGraphAllocator); 180 DISALLOW_COPY_AND_ASSIGN(FlowGraphAllocator);
140 }; 181 };
141 182
142 183
143 // UsePosition represents a single use of an SSA value by some instruction. 184 // UsePosition represents a single use of an SSA value by some instruction.
144 // It points to a location slot which either tells register allocator 185 // It points to a location slot which either tells register allocator
145 // where instruction expects the value (if slot contains a fixed location) or 186 // where instruction expects the value (if slot contains a fixed location) or
146 // asks register allocator to allocate storage (register or spill slot) for 187 // asks register allocator to allocate storage (register or spill slot) for
147 // this use with certain properties (if slot contain an unallocated location). 188 // this use with certain properties (if slot contain an unallocated location).
148 class UsePosition : public ZoneAllocated { 189 class UsePosition : public ZoneAllocated {
149 public: 190 public:
150 enum UseFlag { 191 UsePosition(intptr_t pos,
151 kNoFlag = 0,
152 kFixedUse = 1,
153 kSameAsFirstUse = 2,
154 kOther = 3
155 };
156
157 static const intptr_t kUseFlagMask = 0x3;
158 static const intptr_t kPositionShift = 2;
159
160 static UseFlag FlagForUse(const Location& loc) {
161 if (loc.IsRegister()) return kFixedUse;
162 if (loc.IsUnallocated() && (loc.policy() == Location::kSameAsFirstInput)) {
163 return kSameAsFirstUse;
164 }
165 return kOther;
166 }
167
168 // TODO(vegorov): we encode either position or instruction pointer
169 // into the pos_ field to generate moves when needed to resolve
170 // fixed or same-as-first constraints, but this looks ugly.
171 UsePosition(Instruction* instr,
172 intptr_t pos,
173 UsePosition* next, 192 UsePosition* next,
174 Location* location_slot) 193 Location* location_slot)
175 : pos_(pos << kPositionShift), 194 : pos_(pos),
176 location_slot_(location_slot), 195 location_slot_(location_slot),
177 next_(next) { 196 next_(next) {
178 // Non-NULL instr is considered unlikely so we preinitialize pos_ field
179 // with an encoded position even if instr is not NULL.
180 if (instr != NULL) {
181 ASSERT(location_slot_ != NULL);
182 pos_ = reinterpret_cast<intptr_t>(instr) | FlagForUse(*location_slot_);
183 }
184 ASSERT(this->pos() == pos);
185 } 197 }
186 198
187 // Tell the use that it should load the value from the given location.
188 // If location slot for the use is flexible (unallocated) it will be updated
189 // with the given location. Otherwise a move will be scheduled from the given
190 // location to the location already stored in the slot.
191 void AssignLocation(Location loc);
192
193 Location* location_slot() const { return location_slot_; } 199 Location* location_slot() const { return location_slot_; }
194 void set_location_slot(Location* location_slot) { 200 void set_location_slot(Location* location_slot) {
195 location_slot_ = location_slot; 201 location_slot_ = location_slot;
196 } 202 }
197 203
198 void set_next(UsePosition* next) { next_ = next; } 204 void set_next(UsePosition* next) { next_ = next; }
199 UsePosition* next() const { return next_; } 205 UsePosition* next() const { return next_; }
200 206
201 intptr_t pos() const { 207 intptr_t pos() const { return pos_; }
202 if ((pos_ & kUseFlagMask) != kNoFlag) {
203 return instr()->lifetime_position();
204 }
205 return pos_ >> kPositionShift;
206 }
207
208 Instruction* instr() const {
209 ASSERT((pos_ & kUseFlagMask) != kNoFlag);
210 return reinterpret_cast<Instruction*>(pos_ & ~kUseFlagMask);
211 }
212 208
213 bool HasHint() const { 209 bool HasHint() const {
214 return (pos_ & kUseFlagMask) == kFixedUse; 210 return (location_slot() != NULL) && (location_slot()->IsRegister());
215 } 211 }
216 212
217 Location hint() const { 213 Location hint() const {
218 ASSERT(HasHint()); 214 ASSERT(HasHint());
219 ASSERT(location_slot()->IsRegister()); 215 return *location_slot();
220 return *location_slot_;
221 } 216 }
222 217
223 private: 218 private:
224 intptr_t pos_; 219 intptr_t pos_;
225 Location* location_slot_; 220 Location* location_slot_;
226 UsePosition* next_; 221 UsePosition* next_;
227 }; 222 };
228 223
229 224
230 // UseInterval represents a holeless half open interval of liveness for a given 225 // UseInterval represents a holeless half open interval of liveness for a given
231 // SSA value: [start, end) in terms of lifetime positions that 226 // SSA value: [start, end) in terms of lifetime positions that
232 // NumberInstructions assigns to instructions. Register allocator has to keep 227 // NumberInstructions assigns to instructions. Register allocator has to keep
233 // a value live in the register or in a spill slot from start position and until 228 // a value live in the register or in a spill slot from start position and until
234 // the end position. The interval can cover zero or more uses. 229 // the end position. The interval can cover zero or more uses.
235 // During the register allocation UseIntervals from different live ranges
236 // allocated to the same register will be chained together through
237 // next_allocated_ field.
238 // Note: currently all uses of the same SSA value are linked together into a 230 // Note: currently all uses of the same SSA value are linked together into a
239 // single list (and not split between UseIntervals). 231 // single list (and not split between UseIntervals).
240 class UseInterval : public ZoneAllocated { 232 class UseInterval : public ZoneAllocated {
241 public: 233 public:
242 UseInterval(intptr_t vreg, intptr_t start, intptr_t end, UseInterval* next) 234 UseInterval(intptr_t start, intptr_t end, UseInterval* next)
243 : vreg_(vreg), 235 : start_(start),
244 start_(start),
245 end_(end), 236 end_(end),
246 uses_((next == NULL) ? NULL : next->uses_), 237 next_(next) { }
247 next_(next),
248 next_allocated_(next) { }
249 238
250
251 void AddUse(Instruction* instr, intptr_t pos, Location* loc);
252 void Print(); 239 void Print();
253 240
254 intptr_t vreg() const { return vreg_; }
255 intptr_t start() const { return start_; } 241 intptr_t start() const { return start_; }
256 intptr_t end() const { return end_; } 242 intptr_t end() const { return end_; }
257 UsePosition* first_use() const { return uses_; }
258 UseInterval* next() const { return next_; } 243 UseInterval* next() const { return next_; }
259 244
260 bool Contains(intptr_t pos) const { 245 bool Contains(intptr_t pos) const {
261 return (start() <= pos) && (pos < end()); 246 return (start() <= pos) && (pos < end());
262 } 247 }
263 248
264 // Return the smallest position that is covered by both UseIntervals or 249 // Return the smallest position that is covered by both UseIntervals or
265 // kIllegalPosition if intervals do not intersect. 250 // kIllegalPosition if intervals do not intersect.
266 intptr_t Intersect(UseInterval* other); 251 intptr_t Intersect(UseInterval* other);
267 252
268 UseInterval* Split(intptr_t pos); 253 // UseInterval* Split(intptr_t pos);
srdjan 2012/07/19 22:54:39 Delete dead code.
Vyacheslav Egorov (Google) 2012/07/24 12:26:41 Done.
269
270 void set_next_allocated(UseInterval* next_allocated) {
271 next_allocated_ = next_allocated;
272 }
273 UseInterval* next_allocated() const { return next_allocated_; }
274 254
275 private: 255 private:
276 friend class LiveRange; 256 friend class LiveRange;
277 const intptr_t vreg_;
278 257
279 intptr_t start_; 258 intptr_t start_;
280 intptr_t end_; 259 intptr_t end_;
260 UseInterval* next_;
261 };
281 262
282 UsePosition* uses_;
283 263
284 UseInterval* next_; 264 class AllocationFinger : public ValueObject {
285 UseInterval* next_allocated_; 265 public:
266 AllocationFinger()
267 : first_pending_use_interval_(NULL),
268 first_register_use_(NULL),
269 first_register_beneficial_use_(NULL),
270 first_hinted_use_(NULL) {
271 }
272
273 void Initialize(LiveRange* range);
274 bool Advance(intptr_t start);
275
276 UseInterval* first_pending_use_interval() const {
277 return first_pending_use_interval_;
278 }
279
280 Location FirstHint();
281 UsePosition* FirstRegisterUse(intptr_t after_pos);
282 UsePosition* FirstRegisterBeneficialUse(intptr_t after_pos);
283
284 private:
285 UseInterval* first_pending_use_interval_;
286 UsePosition* first_register_use_;
287 UsePosition* first_register_beneficial_use_;
288 UsePosition* first_hinted_use_;
286 }; 289 };
287 290
288 291
289 // LiveRange represents a sequence of UseIntervals for a given SSA value. 292 // LiveRange represents a sequence of UseIntervals for a given SSA value.
290 // TODO(vegorov): this class is actually redundant currently.
291 class LiveRange : public ZoneAllocated { 293 class LiveRange : public ZoneAllocated {
292 public: 294 public:
293 explicit LiveRange(intptr_t vreg) : vreg_(vreg), head_(NULL) { } 295 explicit LiveRange(intptr_t vreg)
296 : vreg_(vreg),
297 uses_(NULL),
298 first_use_interval_(NULL),
299 last_use_interval_(NULL),
300 next_sibling_(NULL) {
srdjan 2012/07/19 22:54:39 , finger_() {
Vyacheslav Egorov (Google) 2012/07/24 12:26:41 Done.
301 }
294 302
295 void DefineAt(Instruction* instr, intptr_t pos, Location* loc); 303 static LiveRange* MakeTemp(intptr_t pos, Location* location_slot);
296 304
297 void UseAt(Instruction* instr, 305 intptr_t vreg() const { return vreg_; }
298 intptr_t def_pos, 306 LiveRange* next_sibling() const { return next_sibling_; }
307 UsePosition* first_use() const { return uses_; }
308 UseInterval* first_use_interval() const { return first_use_interval_; }
309 UseInterval* last_use_interval() const { return last_use_interval_; }
310 Location assigned_location() const { return assigned_location_; }
311 intptr_t Start() const { return first_use_interval()->start(); }
312 intptr_t End() const { return first_use_interval()->end(); }
313
314 AllocationFinger* finger() { return &finger_; }
315
316 void set_assigned_location(Location location) {
317 assigned_location_ = location;
318 }
319
320
321 void DefineAt(intptr_t pos, Location* loc);
322
323 void UseAt(intptr_t def_pos,
299 intptr_t use_pos, 324 intptr_t use_pos,
300 bool use_at_end, 325 bool use_at_end,
301 Location* loc); 326 Location* loc);
302 327
328 void AddUse(intptr_t pos, Location* location_slot);
303 void AddUseInterval(intptr_t start, intptr_t end); 329 void AddUseInterval(intptr_t start, intptr_t end);
304 330
305 void Print(); 331 void Print();
306 332
307 UseInterval* head() const { return head_; } 333 void AssignLocation(UseInterval* use, Location loc);
334
335 LiveRange* SplitAt(intptr_t pos);
336
337 bool CanCover(intptr_t pos) const {
338 return (Start() <= pos) && (pos < End());
339 }
308 340
309 private: 341 private:
342 LiveRange(intptr_t vreg,
343 UsePosition* uses,
344 UseInterval* first_use_interval,
345 UseInterval* last_use_interval,
346 LiveRange* next_sibling)
347 : vreg_(vreg),
348 uses_(uses),
349 first_use_interval_(first_use_interval),
350 last_use_interval_(last_use_interval),
351 next_sibling_(next_sibling) {
srdjan 2012/07/19 22:54:39 , finger_() {
Vyacheslav Egorov (Google) 2012/07/24 12:26:41 Done.
352 }
353
310 const intptr_t vreg_; 354 const intptr_t vreg_;
311 UseInterval* head_; 355 Location assigned_location_;
356
357 UsePosition* uses_;
358 UseInterval* first_use_interval_;
359 UseInterval* last_use_interval_;
360
361 LiveRange* next_sibling_;
362
363 AllocationFinger finger_;
312 }; 364 };
313 365
314 366
315 } // namespace dart 367 } // namespace dart
316 368
317 #endif // VM_FLOW_GRAPH_ALLOCATOR_H_ 369 #endif // VM_FLOW_GRAPH_ALLOCATOR_H_
OLDNEW
« no previous file with comments | « no previous file | runtime/vm/flow_graph_allocator.cc » ('j') | runtime/vm/flow_graph_allocator.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698