OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/flow_graph_allocator.h" | 5 #include "vm/flow_graph_allocator.h" |
6 | 6 |
7 #include "vm/bit_vector.h" | 7 #include "vm/bit_vector.h" |
8 #include "vm/intermediate_language.h" | |
9 #include "vm/il_printer.h" | |
10 #include "vm/flow_graph.h" | 8 #include "vm/flow_graph.h" |
11 #include "vm/flow_graph_compiler.h" | 9 #include "vm/flow_graph_compiler.h" |
| 10 #include "vm/il_printer.h" |
| 11 #include "vm/intermediate_language.h" |
12 #include "vm/log.h" | 12 #include "vm/log.h" |
13 #include "vm/parser.h" | 13 #include "vm/parser.h" |
14 #include "vm/stack_frame.h" | 14 #include "vm/stack_frame.h" |
15 | 15 |
16 namespace dart { | 16 namespace dart { |
17 | 17 |
18 #if defined(DEBUG) | 18 #if defined(DEBUG) |
19 #define TRACE_ALLOC(statement) \ | 19 #define TRACE_ALLOC(statement) \ |
20 do { \ | 20 do { \ |
21 if (FLAG_trace_ssa_allocator) statement; \ | 21 if (FLAG_trace_ssa_allocator) statement; \ |
22 } while (0) | 22 } while (0) |
23 #else | 23 #else |
24 #define TRACE_ALLOC(statement) | 24 #define TRACE_ALLOC(statement) |
25 #endif | 25 #endif |
26 | 26 |
27 | |
28 static const intptr_t kNoVirtualRegister = -1; | 27 static const intptr_t kNoVirtualRegister = -1; |
29 static const intptr_t kTempVirtualRegister = -2; | 28 static const intptr_t kTempVirtualRegister = -2; |
30 static const intptr_t kIllegalPosition = -1; | 29 static const intptr_t kIllegalPosition = -1; |
31 static const intptr_t kMaxPosition = 0x7FFFFFFF; | 30 static const intptr_t kMaxPosition = 0x7FFFFFFF; |
32 static const intptr_t kPairVirtualRegisterOffset = 1; | 31 static const intptr_t kPairVirtualRegisterOffset = 1; |
33 | 32 |
34 // Definitions which have pair representations | 33 // Definitions which have pair representations |
35 // (kPairOfTagged) use two virtual register names. | 34 // (kPairOfTagged) use two virtual register names. |
36 // At SSA index allocation time each definition reserves two SSA indexes, | 35 // At SSA index allocation time each definition reserves two SSA indexes, |
37 // the second index is only used for pairs. This function maps from the first | 36 // the second index is only used for pairs. This function maps from the first |
38 // SSA index to the second. | 37 // SSA index to the second. |
39 static intptr_t ToSecondPairVreg(intptr_t vreg) { | 38 static intptr_t ToSecondPairVreg(intptr_t vreg) { |
40 // Map vreg to its pair vreg. | 39 // Map vreg to its pair vreg. |
41 return vreg + kPairVirtualRegisterOffset; | 40 return vreg + kPairVirtualRegisterOffset; |
42 } | 41 } |
43 | 42 |
44 | |
45 static intptr_t MinPosition(intptr_t a, intptr_t b) { | 43 static intptr_t MinPosition(intptr_t a, intptr_t b) { |
46 return (a < b) ? a : b; | 44 return (a < b) ? a : b; |
47 } | 45 } |
48 | 46 |
49 | |
50 static bool IsInstructionStartPosition(intptr_t pos) { | 47 static bool IsInstructionStartPosition(intptr_t pos) { |
51 return (pos & 1) == 0; | 48 return (pos & 1) == 0; |
52 } | 49 } |
53 | 50 |
54 | |
55 static bool IsInstructionEndPosition(intptr_t pos) { | 51 static bool IsInstructionEndPosition(intptr_t pos) { |
56 return (pos & 1) == 1; | 52 return (pos & 1) == 1; |
57 } | 53 } |
58 | 54 |
59 | |
60 static intptr_t ToInstructionStart(intptr_t pos) { | 55 static intptr_t ToInstructionStart(intptr_t pos) { |
61 return (pos & ~1); | 56 return (pos & ~1); |
62 } | 57 } |
63 | 58 |
64 | |
65 static intptr_t ToInstructionEnd(intptr_t pos) { | 59 static intptr_t ToInstructionEnd(intptr_t pos) { |
66 return (pos | 1); | 60 return (pos | 1); |
67 } | 61 } |
68 | 62 |
69 | |
70 FlowGraphAllocator::FlowGraphAllocator(const FlowGraph& flow_graph, | 63 FlowGraphAllocator::FlowGraphAllocator(const FlowGraph& flow_graph, |
71 bool intrinsic_mode) | 64 bool intrinsic_mode) |
72 : flow_graph_(flow_graph), | 65 : flow_graph_(flow_graph), |
73 reaching_defs_(flow_graph), | 66 reaching_defs_(flow_graph), |
74 value_representations_(flow_graph.max_virtual_register_number()), | 67 value_representations_(flow_graph.max_virtual_register_number()), |
75 block_order_(flow_graph.reverse_postorder()), | 68 block_order_(flow_graph.reverse_postorder()), |
76 postorder_(flow_graph.postorder()), | 69 postorder_(flow_graph.postorder()), |
77 liveness_(flow_graph), | 70 liveness_(flow_graph), |
78 vreg_count_(flow_graph.max_virtual_register_number()), | 71 vreg_count_(flow_graph.max_virtual_register_number()), |
79 live_ranges_(flow_graph.max_virtual_register_number()), | 72 live_ranges_(flow_graph.max_virtual_register_number()), |
(...skipping 30 matching lines...) Expand all Loading... |
110 if (intrinsic_mode) { | 103 if (intrinsic_mode) { |
111 blocked_cpu_registers_[ARGS_DESC_REG] = true; | 104 blocked_cpu_registers_[ARGS_DESC_REG] = true; |
112 #if !defined(TARGET_ARCH_IA32) | 105 #if !defined(TARGET_ARCH_IA32) |
113 // Need to preserve CODE_REG to be able to store the PC marker | 106 // Need to preserve CODE_REG to be able to store the PC marker |
114 // and load the pool pointer. | 107 // and load the pool pointer. |
115 blocked_cpu_registers_[CODE_REG] = true; | 108 blocked_cpu_registers_[CODE_REG] = true; |
116 #endif | 109 #endif |
117 } | 110 } |
118 } | 111 } |
119 | 112 |
120 | |
121 static void DeepLiveness(MaterializeObjectInstr* mat, BitVector* live_in) { | 113 static void DeepLiveness(MaterializeObjectInstr* mat, BitVector* live_in) { |
122 if (mat->was_visited_for_liveness()) { | 114 if (mat->was_visited_for_liveness()) { |
123 return; | 115 return; |
124 } | 116 } |
125 mat->mark_visited_for_liveness(); | 117 mat->mark_visited_for_liveness(); |
126 | 118 |
127 for (intptr_t i = 0; i < mat->InputCount(); i++) { | 119 for (intptr_t i = 0; i < mat->InputCount(); i++) { |
128 if (!mat->InputAt(i)->BindsToConstant()) { | 120 if (!mat->InputAt(i)->BindsToConstant()) { |
129 Definition* defn = mat->InputAt(i)->definition(); | 121 Definition* defn = mat->InputAt(i)->definition(); |
130 MaterializeObjectInstr* inner_mat = defn->AsMaterializeObject(); | 122 MaterializeObjectInstr* inner_mat = defn->AsMaterializeObject(); |
131 if (inner_mat != NULL) { | 123 if (inner_mat != NULL) { |
132 DeepLiveness(inner_mat, live_in); | 124 DeepLiveness(inner_mat, live_in); |
133 } else { | 125 } else { |
134 intptr_t idx = defn->ssa_temp_index(); | 126 intptr_t idx = defn->ssa_temp_index(); |
135 live_in->Add(idx); | 127 live_in->Add(idx); |
136 } | 128 } |
137 } | 129 } |
138 } | 130 } |
139 } | 131 } |
140 | 132 |
141 | |
142 void SSALivenessAnalysis::ComputeInitialSets() { | 133 void SSALivenessAnalysis::ComputeInitialSets() { |
143 const intptr_t block_count = postorder_.length(); | 134 const intptr_t block_count = postorder_.length(); |
144 for (intptr_t i = 0; i < block_count; i++) { | 135 for (intptr_t i = 0; i < block_count; i++) { |
145 BlockEntryInstr* block = postorder_[i]; | 136 BlockEntryInstr* block = postorder_[i]; |
146 | 137 |
147 BitVector* kill = kill_[i]; | 138 BitVector* kill = kill_[i]; |
148 BitVector* live_in = live_in_[i]; | 139 BitVector* live_in = live_in_[i]; |
149 | 140 |
150 // Iterate backwards starting at the last instruction. | 141 // Iterate backwards starting at the last instruction. |
151 for (BackwardInstructionIterator it(block); !it.Done(); it.Advance()) { | 142 for (BackwardInstructionIterator it(block); !it.Done(); it.Advance()) { |
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
260 | 251 |
261 // Process initial definitions, ie, constants and incoming parameters. | 252 // Process initial definitions, ie, constants and incoming parameters. |
262 for (intptr_t i = 0; i < graph_entry_->initial_definitions()->length(); i++) { | 253 for (intptr_t i = 0; i < graph_entry_->initial_definitions()->length(); i++) { |
263 Definition* def = (*graph_entry_->initial_definitions())[i]; | 254 Definition* def = (*graph_entry_->initial_definitions())[i]; |
264 const intptr_t vreg = def->ssa_temp_index(); | 255 const intptr_t vreg = def->ssa_temp_index(); |
265 kill_[graph_entry_->postorder_number()]->Add(vreg); | 256 kill_[graph_entry_->postorder_number()]->Add(vreg); |
266 live_in_[graph_entry_->postorder_number()]->Remove(vreg); | 257 live_in_[graph_entry_->postorder_number()]->Remove(vreg); |
267 } | 258 } |
268 } | 259 } |
269 | 260 |
270 | |
271 UsePosition* LiveRange::AddUse(intptr_t pos, Location* location_slot) { | 261 UsePosition* LiveRange::AddUse(intptr_t pos, Location* location_slot) { |
272 ASSERT(location_slot != NULL); | 262 ASSERT(location_slot != NULL); |
273 ASSERT((first_use_interval_->start_ <= pos) && | 263 ASSERT((first_use_interval_->start_ <= pos) && |
274 (pos <= first_use_interval_->end_)); | 264 (pos <= first_use_interval_->end_)); |
275 if (uses_ != NULL) { | 265 if (uses_ != NULL) { |
276 if ((uses_->pos() == pos) && (uses_->location_slot() == location_slot)) { | 266 if ((uses_->pos() == pos) && (uses_->location_slot() == location_slot)) { |
277 return uses_; | 267 return uses_; |
278 } else if (uses_->pos() < pos) { | 268 } else if (uses_->pos() < pos) { |
279 // If an instruction at position P is using the same value both as | 269 // If an instruction at position P is using the same value both as |
280 // a fixed register input and a non-fixed input (in this order) we will | 270 // a fixed register input and a non-fixed input (in this order) we will |
(...skipping 15 matching lines...) Expand all Loading... |
296 | 286 |
297 insert_after->set_next( | 287 insert_after->set_next( |
298 new UsePosition(pos, insert_after->next(), location_slot)); | 288 new UsePosition(pos, insert_after->next(), location_slot)); |
299 return insert_after->next(); | 289 return insert_after->next(); |
300 } | 290 } |
301 } | 291 } |
302 uses_ = new UsePosition(pos, uses_, location_slot); | 292 uses_ = new UsePosition(pos, uses_, location_slot); |
303 return uses_; | 293 return uses_; |
304 } | 294 } |
305 | 295 |
306 | |
307 void LiveRange::AddSafepoint(intptr_t pos, LocationSummary* locs) { | 296 void LiveRange::AddSafepoint(intptr_t pos, LocationSummary* locs) { |
308 ASSERT(IsInstructionStartPosition(pos)); | 297 ASSERT(IsInstructionStartPosition(pos)); |
309 SafepointPosition* safepoint = | 298 SafepointPosition* safepoint = |
310 new SafepointPosition(ToInstructionEnd(pos), locs); | 299 new SafepointPosition(ToInstructionEnd(pos), locs); |
311 | 300 |
312 if (first_safepoint_ == NULL) { | 301 if (first_safepoint_ == NULL) { |
313 ASSERT(last_safepoint_ == NULL); | 302 ASSERT(last_safepoint_ == NULL); |
314 first_safepoint_ = last_safepoint_ = safepoint; | 303 first_safepoint_ = last_safepoint_ = safepoint; |
315 } else { | 304 } else { |
316 ASSERT(last_safepoint_ != NULL); | 305 ASSERT(last_safepoint_ != NULL); |
317 // We assume that safepoints list is sorted by position and that | 306 // We assume that safepoints list is sorted by position and that |
318 // safepoints are added in this order. | 307 // safepoints are added in this order. |
319 ASSERT(last_safepoint_->pos() < pos); | 308 ASSERT(last_safepoint_->pos() < pos); |
320 last_safepoint_->set_next(safepoint); | 309 last_safepoint_->set_next(safepoint); |
321 last_safepoint_ = safepoint; | 310 last_safepoint_ = safepoint; |
322 } | 311 } |
323 } | 312 } |
324 | 313 |
325 | |
326 void LiveRange::AddHintedUse(intptr_t pos, | 314 void LiveRange::AddHintedUse(intptr_t pos, |
327 Location* location_slot, | 315 Location* location_slot, |
328 Location* hint) { | 316 Location* hint) { |
329 ASSERT(hint != NULL); | 317 ASSERT(hint != NULL); |
330 AddUse(pos, location_slot)->set_hint(hint); | 318 AddUse(pos, location_slot)->set_hint(hint); |
331 } | 319 } |
332 | 320 |
333 | |
334 void LiveRange::AddUseInterval(intptr_t start, intptr_t end) { | 321 void LiveRange::AddUseInterval(intptr_t start, intptr_t end) { |
335 ASSERT(start < end); | 322 ASSERT(start < end); |
336 | 323 |
337 // Live ranges are being build by visiting instructions in post-order. | 324 // Live ranges are being build by visiting instructions in post-order. |
338 // This implies that use intervals will be prepended in a monotonically | 325 // This implies that use intervals will be prepended in a monotonically |
339 // decreasing order. | 326 // decreasing order. |
340 if (first_use_interval() != NULL) { | 327 if (first_use_interval() != NULL) { |
341 // If the first use interval and the use interval we are adding | 328 // If the first use interval and the use interval we are adding |
342 // touch then we can just extend the first interval to cover their | 329 // touch then we can just extend the first interval to cover their |
343 // union. | 330 // union. |
(...skipping 16 matching lines...) Expand all Loading... |
360 ASSERT(end < first_use_interval()->start()); | 347 ASSERT(end < first_use_interval()->start()); |
361 } | 348 } |
362 | 349 |
363 first_use_interval_ = new UseInterval(start, end, first_use_interval_); | 350 first_use_interval_ = new UseInterval(start, end, first_use_interval_); |
364 if (last_use_interval_ == NULL) { | 351 if (last_use_interval_ == NULL) { |
365 ASSERT(first_use_interval_->next() == NULL); | 352 ASSERT(first_use_interval_->next() == NULL); |
366 last_use_interval_ = first_use_interval_; | 353 last_use_interval_ = first_use_interval_; |
367 } | 354 } |
368 } | 355 } |
369 | 356 |
370 | |
371 void LiveRange::DefineAt(intptr_t pos) { | 357 void LiveRange::DefineAt(intptr_t pos) { |
372 // Live ranges are being build by visiting instructions in post-order. | 358 // Live ranges are being build by visiting instructions in post-order. |
373 // This implies that use intervals will be prepended in a monotonically | 359 // This implies that use intervals will be prepended in a monotonically |
374 // decreasing order. | 360 // decreasing order. |
375 // When we encounter a use of a value inside a block we optimistically | 361 // When we encounter a use of a value inside a block we optimistically |
376 // expand the first use interval to cover the block from the start | 362 // expand the first use interval to cover the block from the start |
377 // to the last use in the block and then we shrink it if we encounter | 363 // to the last use in the block and then we shrink it if we encounter |
378 // definition of the value inside the same block. | 364 // definition of the value inside the same block. |
379 if (first_use_interval_ == NULL) { | 365 if (first_use_interval_ == NULL) { |
380 // Definition without a use. | 366 // Definition without a use. |
381 first_use_interval_ = new UseInterval(pos, pos + 1, NULL); | 367 first_use_interval_ = new UseInterval(pos, pos + 1, NULL); |
382 last_use_interval_ = first_use_interval_; | 368 last_use_interval_ = first_use_interval_; |
383 } else { | 369 } else { |
384 // Shrink the first use interval. It was optimistically expanded to | 370 // Shrink the first use interval. It was optimistically expanded to |
385 // cover the block from the start to the last use in the block. | 371 // cover the block from the start to the last use in the block. |
386 ASSERT(first_use_interval_->start_ <= pos); | 372 ASSERT(first_use_interval_->start_ <= pos); |
387 first_use_interval_->start_ = pos; | 373 first_use_interval_->start_ = pos; |
388 } | 374 } |
389 } | 375 } |
390 | 376 |
391 | |
392 LiveRange* FlowGraphAllocator::GetLiveRange(intptr_t vreg) { | 377 LiveRange* FlowGraphAllocator::GetLiveRange(intptr_t vreg) { |
393 if (live_ranges_[vreg] == NULL) { | 378 if (live_ranges_[vreg] == NULL) { |
394 Representation rep = value_representations_[vreg]; | 379 Representation rep = value_representations_[vreg]; |
395 ASSERT(rep != kNoRepresentation); | 380 ASSERT(rep != kNoRepresentation); |
396 live_ranges_[vreg] = new LiveRange(vreg, rep); | 381 live_ranges_[vreg] = new LiveRange(vreg, rep); |
397 } | 382 } |
398 return live_ranges_[vreg]; | 383 return live_ranges_[vreg]; |
399 } | 384 } |
400 | 385 |
401 | |
402 LiveRange* FlowGraphAllocator::MakeLiveRangeForTemporary() { | 386 LiveRange* FlowGraphAllocator::MakeLiveRangeForTemporary() { |
403 // Representation does not matter for temps. | 387 // Representation does not matter for temps. |
404 Representation ignored = kNoRepresentation; | 388 Representation ignored = kNoRepresentation; |
405 LiveRange* range = new LiveRange(kTempVirtualRegister, ignored); | 389 LiveRange* range = new LiveRange(kTempVirtualRegister, ignored); |
406 #if defined(DEBUG) | 390 #if defined(DEBUG) |
407 temporaries_.Add(range); | 391 temporaries_.Add(range); |
408 #endif | 392 #endif |
409 return range; | 393 return range; |
410 } | 394 } |
411 | 395 |
412 | |
413 void FlowGraphAllocator::BlockRegisterLocation(Location loc, | 396 void FlowGraphAllocator::BlockRegisterLocation(Location loc, |
414 intptr_t from, | 397 intptr_t from, |
415 intptr_t to, | 398 intptr_t to, |
416 bool* blocked_registers, | 399 bool* blocked_registers, |
417 LiveRange** blocking_ranges) { | 400 LiveRange** blocking_ranges) { |
418 if (blocked_registers[loc.register_code()]) { | 401 if (blocked_registers[loc.register_code()]) { |
419 return; | 402 return; |
420 } | 403 } |
421 | 404 |
422 if (blocking_ranges[loc.register_code()] == NULL) { | 405 if (blocking_ranges[loc.register_code()] == NULL) { |
423 Representation ignored = kNoRepresentation; | 406 Representation ignored = kNoRepresentation; |
424 LiveRange* range = new LiveRange(kNoVirtualRegister, ignored); | 407 LiveRange* range = new LiveRange(kNoVirtualRegister, ignored); |
425 blocking_ranges[loc.register_code()] = range; | 408 blocking_ranges[loc.register_code()] = range; |
426 range->set_assigned_location(loc); | 409 range->set_assigned_location(loc); |
427 #if defined(DEBUG) | 410 #if defined(DEBUG) |
428 temporaries_.Add(range); | 411 temporaries_.Add(range); |
429 #endif | 412 #endif |
430 } | 413 } |
431 | 414 |
432 blocking_ranges[loc.register_code()]->AddUseInterval(from, to); | 415 blocking_ranges[loc.register_code()]->AddUseInterval(from, to); |
433 } | 416 } |
434 | 417 |
435 | |
436 // Block location from the start of the instruction to its end. | 418 // Block location from the start of the instruction to its end. |
437 void FlowGraphAllocator::BlockLocation(Location loc, | 419 void FlowGraphAllocator::BlockLocation(Location loc, |
438 intptr_t from, | 420 intptr_t from, |
439 intptr_t to) { | 421 intptr_t to) { |
440 if (loc.IsRegister()) { | 422 if (loc.IsRegister()) { |
441 BlockRegisterLocation(loc, from, to, blocked_cpu_registers_, cpu_regs_); | 423 BlockRegisterLocation(loc, from, to, blocked_cpu_registers_, cpu_regs_); |
442 #if defined(TARGET_ARCH_DBC) | 424 #if defined(TARGET_ARCH_DBC) |
443 last_used_register_ = | 425 last_used_register_ = |
444 Utils::Maximum(last_used_register_, loc.register_code()); | 426 Utils::Maximum(last_used_register_, loc.register_code()); |
445 #endif | 427 #endif |
446 } else if (loc.IsFpuRegister()) { | 428 } else if (loc.IsFpuRegister()) { |
447 BlockRegisterLocation(loc, from, to, blocked_fpu_registers_, fpu_regs_); | 429 BlockRegisterLocation(loc, from, to, blocked_fpu_registers_, fpu_regs_); |
448 } else { | 430 } else { |
449 UNREACHABLE(); | 431 UNREACHABLE(); |
450 } | 432 } |
451 } | 433 } |
452 | 434 |
453 | |
454 void LiveRange::Print() { | 435 void LiveRange::Print() { |
455 if (first_use_interval() == NULL) { | 436 if (first_use_interval() == NULL) { |
456 return; | 437 return; |
457 } | 438 } |
458 | 439 |
459 THR_Print(" live range v%" Pd " [%" Pd ", %" Pd ") in ", vreg(), Start(), | 440 THR_Print(" live range v%" Pd " [%" Pd ", %" Pd ") in ", vreg(), Start(), |
460 End()); | 441 End()); |
461 assigned_location().Print(); | 442 assigned_location().Print(); |
462 if (spill_slot_.HasStackIndex()) { | 443 if (spill_slot_.HasStackIndex()) { |
463 intptr_t stack_slot = spill_slot_.stack_index(); | 444 intptr_t stack_slot = spill_slot_.stack_index(); |
(...skipping 23 matching lines...) Expand all Loading... |
487 THR_Print("\n"); | 468 THR_Print("\n"); |
488 use_pos = use_pos->next(); | 469 use_pos = use_pos->next(); |
489 } | 470 } |
490 } | 471 } |
491 | 472 |
492 if (next_sibling() != NULL) { | 473 if (next_sibling() != NULL) { |
493 next_sibling()->Print(); | 474 next_sibling()->Print(); |
494 } | 475 } |
495 } | 476 } |
496 | 477 |
497 | |
498 void FlowGraphAllocator::PrintLiveRanges() { | 478 void FlowGraphAllocator::PrintLiveRanges() { |
499 #if defined(DEBUG) | 479 #if defined(DEBUG) |
500 for (intptr_t i = 0; i < temporaries_.length(); i++) { | 480 for (intptr_t i = 0; i < temporaries_.length(); i++) { |
501 temporaries_[i]->Print(); | 481 temporaries_[i]->Print(); |
502 } | 482 } |
503 #endif | 483 #endif |
504 | 484 |
505 for (intptr_t i = 0; i < live_ranges_.length(); i++) { | 485 for (intptr_t i = 0; i < live_ranges_.length(); i++) { |
506 if (live_ranges_[i] != NULL) { | 486 if (live_ranges_[i] != NULL) { |
507 live_ranges_[i]->Print(); | 487 live_ranges_[i]->Print(); |
508 } | 488 } |
509 } | 489 } |
510 } | 490 } |
511 | 491 |
512 | |
513 // Returns true if all uses of the given range inside the given loop | 492 // Returns true if all uses of the given range inside the given loop |
514 // have Any allocation policy. | 493 // have Any allocation policy. |
515 static bool HasOnlyUnconstrainedUsesInLoop(LiveRange* range, | 494 static bool HasOnlyUnconstrainedUsesInLoop(LiveRange* range, |
516 BlockInfo* loop_header) { | 495 BlockInfo* loop_header) { |
517 const intptr_t boundary = loop_header->last_block()->end_pos(); | 496 const intptr_t boundary = loop_header->last_block()->end_pos(); |
518 | 497 |
519 UsePosition* use = range->first_use(); | 498 UsePosition* use = range->first_use(); |
520 while ((use != NULL) && (use->pos() < boundary)) { | 499 while ((use != NULL) && (use->pos() < boundary)) { |
521 if (!use->location_slot()->Equals(Location::Any())) { | 500 if (!use->location_slot()->Equals(Location::Any())) { |
522 return false; | 501 return false; |
523 } | 502 } |
524 use = use->next(); | 503 use = use->next(); |
525 } | 504 } |
526 | 505 |
527 return true; | 506 return true; |
528 } | 507 } |
529 | 508 |
530 | |
531 // Returns true if all uses of the given range have Any allocation policy. | 509 // Returns true if all uses of the given range have Any allocation policy. |
532 static bool HasOnlyUnconstrainedUses(LiveRange* range) { | 510 static bool HasOnlyUnconstrainedUses(LiveRange* range) { |
533 UsePosition* use = range->first_use(); | 511 UsePosition* use = range->first_use(); |
534 while (use != NULL) { | 512 while (use != NULL) { |
535 if (!use->location_slot()->Equals(Location::Any())) { | 513 if (!use->location_slot()->Equals(Location::Any())) { |
536 return false; | 514 return false; |
537 } | 515 } |
538 use = use->next(); | 516 use = use->next(); |
539 } | 517 } |
540 return true; | 518 return true; |
541 } | 519 } |
542 | 520 |
543 | |
544 void FlowGraphAllocator::BuildLiveRanges() { | 521 void FlowGraphAllocator::BuildLiveRanges() { |
545 const intptr_t block_count = postorder_.length(); | 522 const intptr_t block_count = postorder_.length(); |
546 ASSERT(postorder_.Last()->IsGraphEntry()); | 523 ASSERT(postorder_.Last()->IsGraphEntry()); |
547 BitVector* current_interference_set = NULL; | 524 BitVector* current_interference_set = NULL; |
548 Zone* zone = flow_graph_.zone(); | 525 Zone* zone = flow_graph_.zone(); |
549 for (intptr_t i = 0; i < (block_count - 1); i++) { | 526 for (intptr_t i = 0; i < (block_count - 1); i++) { |
550 BlockEntryInstr* block = postorder_[i]; | 527 BlockEntryInstr* block = postorder_[i]; |
551 | 528 |
552 BlockInfo* block_info = BlockInfoAt(block->start_pos()); | 529 BlockInfo* block_info = BlockInfoAt(block->start_pos()); |
553 | 530 |
(...skipping 25 matching lines...) Expand all Loading... |
579 | 556 |
580 // Now process all instructions in reverse order. | 557 // Now process all instructions in reverse order. |
581 while (current != block) { | 558 while (current != block) { |
582 // Skip parallel moves that we insert while processing instructions. | 559 // Skip parallel moves that we insert while processing instructions. |
583 if (!current->IsParallelMove()) { | 560 if (!current->IsParallelMove()) { |
584 ProcessOneInstruction(block, current, current_interference_set); | 561 ProcessOneInstruction(block, current, current_interference_set); |
585 } | 562 } |
586 current = current->previous(); | 563 current = current->previous(); |
587 } | 564 } |
588 | 565 |
589 | |
590 // Check if any values live into the loop can be spilled for free. | 566 // Check if any values live into the loop can be spilled for free. |
591 if (block_info->is_loop_header()) { | 567 if (block_info->is_loop_header()) { |
592 current_interference_set = NULL; | 568 current_interference_set = NULL; |
593 for (BitVector::Iterator it(liveness_.GetLiveInSetAt(i)); !it.Done(); | 569 for (BitVector::Iterator it(liveness_.GetLiveInSetAt(i)); !it.Done(); |
594 it.Advance()) { | 570 it.Advance()) { |
595 LiveRange* range = GetLiveRange(it.Current()); | 571 LiveRange* range = GetLiveRange(it.Current()); |
596 if (HasOnlyUnconstrainedUsesInLoop(range, block_info)) { | 572 if (HasOnlyUnconstrainedUsesInLoop(range, block_info)) { |
597 range->MarkHasOnlyUnconstrainedUsesInLoop(block_info->loop_id()); | 573 range->MarkHasOnlyUnconstrainedUsesInLoop(block_info->loop_id()); |
598 } | 574 } |
599 } | 575 } |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
640 for (intptr_t i = 0; i < graph_entry->initial_definitions()->length(); i++) { | 616 for (intptr_t i = 0; i < graph_entry->initial_definitions()->length(); i++) { |
641 Definition* defn = (*graph_entry->initial_definitions())[i]; | 617 Definition* defn = (*graph_entry->initial_definitions())[i]; |
642 ASSERT(!defn->HasPairRepresentation()); | 618 ASSERT(!defn->HasPairRepresentation()); |
643 LiveRange* range = GetLiveRange(defn->ssa_temp_index()); | 619 LiveRange* range = GetLiveRange(defn->ssa_temp_index()); |
644 range->AddUseInterval(graph_entry->start_pos(), graph_entry->end_pos()); | 620 range->AddUseInterval(graph_entry->start_pos(), graph_entry->end_pos()); |
645 range->DefineAt(graph_entry->start_pos()); | 621 range->DefineAt(graph_entry->start_pos()); |
646 ProcessInitialDefinition(defn, range, graph_entry); | 622 ProcessInitialDefinition(defn, range, graph_entry); |
647 } | 623 } |
648 } | 624 } |
649 | 625 |
650 | |
651 void FlowGraphAllocator::SplitInitialDefinitionAt(LiveRange* range, | 626 void FlowGraphAllocator::SplitInitialDefinitionAt(LiveRange* range, |
652 intptr_t pos) { | 627 intptr_t pos) { |
653 if (range->End() > pos) { | 628 if (range->End() > pos) { |
654 LiveRange* tail = range->SplitAt(pos); | 629 LiveRange* tail = range->SplitAt(pos); |
655 CompleteRange(tail, Location::kRegister); | 630 CompleteRange(tail, Location::kRegister); |
656 } | 631 } |
657 } | 632 } |
658 | 633 |
659 | |
660 void FlowGraphAllocator::ProcessInitialDefinition(Definition* defn, | 634 void FlowGraphAllocator::ProcessInitialDefinition(Definition* defn, |
661 LiveRange* range, | 635 LiveRange* range, |
662 BlockEntryInstr* block) { | 636 BlockEntryInstr* block) { |
663 #if defined(TARGET_ARCH_DBC) | 637 #if defined(TARGET_ARCH_DBC) |
664 if (block->IsCatchBlockEntry()) { | 638 if (block->IsCatchBlockEntry()) { |
665 if (defn->IsParameter()) { | 639 if (defn->IsParameter()) { |
666 // This must be in sync with FlowGraphCompiler::CatchEntryRegForVariable. | 640 // This must be in sync with FlowGraphCompiler::CatchEntryRegForVariable. |
667 ParameterInstr* param = defn->AsParameter(); | 641 ParameterInstr* param = defn->AsParameter(); |
668 intptr_t slot_index = param->index(); | 642 intptr_t slot_index = param->index(); |
669 AssignSafepoints(defn, range); | 643 AssignSafepoints(defn, range); |
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
780 // Note, all incoming parameters are assumed to be tagged. | 754 // Note, all incoming parameters are assumed to be tagged. |
781 MarkAsObjectAtSafepoints(range); | 755 MarkAsObjectAtSafepoints(range); |
782 } else if (defn->IsConstant() && block->IsCatchBlockEntry()) { | 756 } else if (defn->IsConstant() && block->IsCatchBlockEntry()) { |
783 // Constants at catch block entries consume spill slots. | 757 // Constants at catch block entries consume spill slots. |
784 spill_slots_.Add(range_end); | 758 spill_slots_.Add(range_end); |
785 quad_spill_slots_.Add(false); | 759 quad_spill_slots_.Add(false); |
786 untagged_spill_slots_.Add(false); | 760 untagged_spill_slots_.Add(false); |
787 } | 761 } |
788 } | 762 } |
789 | 763 |
790 | |
791 static Location::Kind RegisterKindFromPolicy(Location loc) { | 764 static Location::Kind RegisterKindFromPolicy(Location loc) { |
792 if (loc.policy() == Location::kRequiresFpuRegister) { | 765 if (loc.policy() == Location::kRequiresFpuRegister) { |
793 return Location::kFpuRegister; | 766 return Location::kFpuRegister; |
794 } else { | 767 } else { |
795 return Location::kRegister; | 768 return Location::kRegister; |
796 } | 769 } |
797 } | 770 } |
798 | 771 |
799 | |
800 static Location::Kind RegisterKindForResult(Instruction* instr) { | 772 static Location::Kind RegisterKindForResult(Instruction* instr) { |
801 const Representation rep = instr->representation(); | 773 const Representation rep = instr->representation(); |
802 #if !defined(TARGET_ARCH_DBC) | 774 #if !defined(TARGET_ARCH_DBC) |
803 if ((rep == kUnboxedDouble) || (rep == kUnboxedFloat32x4) || | 775 if ((rep == kUnboxedDouble) || (rep == kUnboxedFloat32x4) || |
804 (rep == kUnboxedInt32x4) || (rep == kUnboxedFloat64x2)) { | 776 (rep == kUnboxedInt32x4) || (rep == kUnboxedFloat64x2)) { |
805 return Location::kFpuRegister; | 777 return Location::kFpuRegister; |
806 } else { | 778 } else { |
807 return Location::kRegister; | 779 return Location::kRegister; |
808 } | 780 } |
809 #else | 781 #else |
810 // DBC supports only unboxed doubles and does not have distinguished FPU | 782 // DBC supports only unboxed doubles and does not have distinguished FPU |
811 // registers. | 783 // registers. |
812 ASSERT((rep != kUnboxedFloat32x4) && (rep != kUnboxedInt32x4) && | 784 ASSERT((rep != kUnboxedFloat32x4) && (rep != kUnboxedInt32x4) && |
813 (rep != kUnboxedFloat64x2)); | 785 (rep != kUnboxedFloat64x2)); |
814 return Location::kRegister; | 786 return Location::kRegister; |
815 #endif | 787 #endif |
816 } | 788 } |
817 | 789 |
818 | |
819 // | 790 // |
820 // When describing shape of live ranges in comments below we are going to use | 791 // When describing shape of live ranges in comments below we are going to use |
821 // the following notation: | 792 // the following notation: |
822 // | 793 // |
823 // B block entry | 794 // B block entry |
824 // g g' start and end of goto instruction | 795 // g g' start and end of goto instruction |
825 // i i' start and end of any other instruction | 796 // i i' start and end of any other instruction |
826 // j j' start and end of any other instruction | 797 // j j' start and end of any other instruction |
827 | 798 |
828 // - body of a use interval | 799 // - body of a use interval |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
906 ->assigned_location_slot()); | 877 ->assigned_location_slot()); |
907 move->set_src(Location::PrefersRegister()); | 878 move->set_src(Location::PrefersRegister()); |
908 } | 879 } |
909 } | 880 } |
910 | 881 |
911 // Begin backward iteration with the instruction before the parallel | 882 // Begin backward iteration with the instruction before the parallel |
912 // move. | 883 // move. |
913 return goto_instr->previous(); | 884 return goto_instr->previous(); |
914 } | 885 } |
915 | 886 |
916 | |
917 void FlowGraphAllocator::ConnectIncomingPhiMoves(JoinEntryInstr* join) { | 887 void FlowGraphAllocator::ConnectIncomingPhiMoves(JoinEntryInstr* join) { |
918 // For join blocks we need to add destinations of phi resolution moves | 888 // For join blocks we need to add destinations of phi resolution moves |
919 // to phi's live range so that register allocator will fill them with moves. | 889 // to phi's live range so that register allocator will fill them with moves. |
920 | 890 |
921 // All uses are recorded at the start position in the block. | 891 // All uses are recorded at the start position in the block. |
922 const intptr_t pos = join->start_pos(); | 892 const intptr_t pos = join->start_pos(); |
923 const bool is_loop_header = BlockInfoAt(join->start_pos())->is_loop_header(); | 893 const bool is_loop_header = BlockInfoAt(join->start_pos())->is_loop_header(); |
924 intptr_t move_idx = 0; | 894 intptr_t move_idx = 0; |
925 for (PhiIterator it(join); !it.Done(); it.Advance()) { | 895 for (PhiIterator it(join); !it.Done(); it.Advance()) { |
926 PhiInstr* phi = it.Current(); | 896 PhiInstr* phi = it.Current(); |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
968 if (is_pair_phi) { | 938 if (is_pair_phi) { |
969 LiveRange* second_range = GetLiveRange(ToSecondPairVreg(vreg)); | 939 LiveRange* second_range = GetLiveRange(ToSecondPairVreg(vreg)); |
970 AssignSafepoints(phi, second_range); | 940 AssignSafepoints(phi, second_range); |
971 CompleteRange(second_range, RegisterKindForResult(phi)); | 941 CompleteRange(second_range, RegisterKindForResult(phi)); |
972 } | 942 } |
973 | 943 |
974 move_idx += is_pair_phi ? 2 : 1; | 944 move_idx += is_pair_phi ? 2 : 1; |
975 } | 945 } |
976 } | 946 } |
977 | 947 |
978 | |
979 void FlowGraphAllocator::ProcessEnvironmentUses(BlockEntryInstr* block, | 948 void FlowGraphAllocator::ProcessEnvironmentUses(BlockEntryInstr* block, |
980 Instruction* current) { | 949 Instruction* current) { |
981 ASSERT(current->env() != NULL); | 950 ASSERT(current->env() != NULL); |
982 Environment* env = current->env(); | 951 Environment* env = current->env(); |
983 while (env != NULL) { | 952 while (env != NULL) { |
984 // Any value mentioned in the deoptimization environment should survive | 953 // Any value mentioned in the deoptimization environment should survive |
985 // until the end of instruction but it does not need to be in the register. | 954 // until the end of instruction but it does not need to be in the register. |
986 // Expected shape of live range: | 955 // Expected shape of live range: |
987 // | 956 // |
988 // i i' | 957 // i i' |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1050 range->AddUseInterval(block_start_pos, use_pos); | 1019 range->AddUseInterval(block_start_pos, use_pos); |
1051 range->AddUse(use_pos, &locations[i]); | 1020 range->AddUse(use_pos, &locations[i]); |
1052 } | 1021 } |
1053 } | 1022 } |
1054 | 1023 |
1055 env->set_locations(locations); | 1024 env->set_locations(locations); |
1056 env = env->outer(); | 1025 env = env->outer(); |
1057 } | 1026 } |
1058 } | 1027 } |
1059 | 1028 |
1060 | |
1061 void FlowGraphAllocator::ProcessMaterializationUses( | 1029 void FlowGraphAllocator::ProcessMaterializationUses( |
1062 BlockEntryInstr* block, | 1030 BlockEntryInstr* block, |
1063 const intptr_t block_start_pos, | 1031 const intptr_t block_start_pos, |
1064 const intptr_t use_pos, | 1032 const intptr_t use_pos, |
1065 MaterializeObjectInstr* mat) { | 1033 MaterializeObjectInstr* mat) { |
1066 // Materialization can occur several times in the same environment. | 1034 // Materialization can occur several times in the same environment. |
1067 // Check if we already processed this one. | 1035 // Check if we already processed this one. |
1068 if (mat->locations() != NULL) { | 1036 if (mat->locations() != NULL) { |
1069 return; // Already processed. | 1037 return; // Already processed. |
1070 } | 1038 } |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1104 def->AsMaterializeObject()); | 1072 def->AsMaterializeObject()); |
1105 } else { | 1073 } else { |
1106 locations[i] = Location::Any(); | 1074 locations[i] = Location::Any(); |
1107 LiveRange* range = GetLiveRange(def->ssa_temp_index()); | 1075 LiveRange* range = GetLiveRange(def->ssa_temp_index()); |
1108 range->AddUseInterval(block_start_pos, use_pos); | 1076 range->AddUseInterval(block_start_pos, use_pos); |
1109 range->AddUse(use_pos, &locations[i]); | 1077 range->AddUse(use_pos, &locations[i]); |
1110 } | 1078 } |
1111 } | 1079 } |
1112 } | 1080 } |
1113 | 1081 |
1114 | |
1115 void FlowGraphAllocator::ProcessOneInput(BlockEntryInstr* block, | 1082 void FlowGraphAllocator::ProcessOneInput(BlockEntryInstr* block, |
1116 intptr_t pos, | 1083 intptr_t pos, |
1117 Location* in_ref, | 1084 Location* in_ref, |
1118 Value* input, | 1085 Value* input, |
1119 intptr_t vreg, | 1086 intptr_t vreg, |
1120 RegisterSet* live_registers) { | 1087 RegisterSet* live_registers) { |
1121 ASSERT(in_ref != NULL); | 1088 ASSERT(in_ref != NULL); |
1122 ASSERT(!in_ref->IsPairLocation()); | 1089 ASSERT(!in_ref->IsPairLocation()); |
1123 ASSERT(input != NULL); | 1090 ASSERT(input != NULL); |
1124 ASSERT(block != NULL); | 1091 ASSERT(block != NULL); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1168 // value -----* | 1135 // value -----* |
1169 // | 1136 // |
1170 range->AddUseInterval(block->start_pos(), pos + 1); | 1137 range->AddUseInterval(block->start_pos(), pos + 1); |
1171 range->AddUse(pos + 1, in_ref); | 1138 range->AddUse(pos + 1, in_ref); |
1172 } | 1139 } |
1173 } else { | 1140 } else { |
1174 ASSERT(in_ref->IsConstant()); | 1141 ASSERT(in_ref->IsConstant()); |
1175 } | 1142 } |
1176 } | 1143 } |
1177 | 1144 |
1178 | |
1179 void FlowGraphAllocator::ProcessOneOutput(BlockEntryInstr* block, | 1145 void FlowGraphAllocator::ProcessOneOutput(BlockEntryInstr* block, |
1180 intptr_t pos, | 1146 intptr_t pos, |
1181 Location* out, | 1147 Location* out, |
1182 Definition* def, | 1148 Definition* def, |
1183 intptr_t vreg, | 1149 intptr_t vreg, |
1184 bool output_same_as_first_input, | 1150 bool output_same_as_first_input, |
1185 Location* in_ref, | 1151 Location* in_ref, |
1186 Definition* input, | 1152 Definition* input, |
1187 intptr_t input_vreg, | 1153 intptr_t input_vreg, |
1188 BitVector* interference_set) { | 1154 BitVector* interference_set) { |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1276 // Shorten live range to the point of definition and add use to be filled by | 1242 // Shorten live range to the point of definition and add use to be filled by |
1277 // allocator. | 1243 // allocator. |
1278 range->DefineAt(pos); | 1244 range->DefineAt(pos); |
1279 range->AddUse(pos, out); | 1245 range->AddUse(pos, out); |
1280 } | 1246 } |
1281 | 1247 |
1282 AssignSafepoints(def, range); | 1248 AssignSafepoints(def, range); |
1283 CompleteRange(range, RegisterKindForResult(def)); | 1249 CompleteRange(range, RegisterKindForResult(def)); |
1284 } | 1250 } |
1285 | 1251 |
1286 | |
1287 // Create and update live ranges corresponding to instruction's inputs, | 1252 // Create and update live ranges corresponding to instruction's inputs, |
1288 // temporaries and output. | 1253 // temporaries and output. |
1289 void FlowGraphAllocator::ProcessOneInstruction(BlockEntryInstr* block, | 1254 void FlowGraphAllocator::ProcessOneInstruction(BlockEntryInstr* block, |
1290 Instruction* current, | 1255 Instruction* current, |
1291 BitVector* interference_set) { | 1256 BitVector* interference_set) { |
1292 LocationSummary* locs = current->locs(); | 1257 LocationSummary* locs = current->locs(); |
1293 | 1258 |
1294 Definition* def = current->AsDefinition(); | 1259 Definition* def = current->AsDefinition(); |
1295 if ((def != NULL) && (def->AsConstant() != NULL)) { | 1260 if ((def != NULL) && (def->AsConstant() != NULL)) { |
1296 ASSERT(!def->HasPairRepresentation()); | 1261 ASSERT(!def->HasPairRepresentation()); |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1426 BlockLocation(Location::RegisterLocation(static_cast<Register>(reg)), pos, | 1391 BlockLocation(Location::RegisterLocation(static_cast<Register>(reg)), pos, |
1427 pos + 1); | 1392 pos + 1); |
1428 } | 1393 } |
1429 | 1394 |
1430 for (intptr_t reg = 0; reg < kNumberOfFpuRegisters; reg++) { | 1395 for (intptr_t reg = 0; reg < kNumberOfFpuRegisters; reg++) { |
1431 BlockLocation( | 1396 BlockLocation( |
1432 Location::FpuRegisterLocation(static_cast<FpuRegister>(reg)), pos, | 1397 Location::FpuRegisterLocation(static_cast<FpuRegister>(reg)), pos, |
1433 pos + 1); | 1398 pos + 1); |
1434 } | 1399 } |
1435 | 1400 |
1436 | |
1437 #if defined(DEBUG) | 1401 #if defined(DEBUG) |
1438 // Verify that temps, inputs and output were specified as fixed | 1402 // Verify that temps, inputs and output were specified as fixed |
1439 // locations. Every register is blocked now so attempt to | 1403 // locations. Every register is blocked now so attempt to |
1440 // allocate will not succeed. | 1404 // allocate will not succeed. |
1441 for (intptr_t j = 0; j < locs->temp_count(); j++) { | 1405 for (intptr_t j = 0; j < locs->temp_count(); j++) { |
1442 ASSERT(!locs->temp(j).IsPairLocation()); | 1406 ASSERT(!locs->temp(j).IsPairLocation()); |
1443 ASSERT(!locs->temp(j).IsUnallocated()); | 1407 ASSERT(!locs->temp(j).IsUnallocated()); |
1444 } | 1408 } |
1445 | 1409 |
1446 for (intptr_t j = 0; j < locs->input_count(); j++) { | 1410 for (intptr_t j = 0; j < locs->input_count(); j++) { |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1526 interference_set); | 1490 interference_set); |
1527 } else { | 1491 } else { |
1528 ProcessOneOutput(block, pos, out, def, def->ssa_temp_index(), | 1492 ProcessOneOutput(block, pos, out, def, def->ssa_temp_index(), |
1529 false, // output is not mapped to first input. | 1493 false, // output is not mapped to first input. |
1530 NULL, NULL, -1, // First input not needed. | 1494 NULL, NULL, -1, // First input not needed. |
1531 interference_set); | 1495 interference_set); |
1532 } | 1496 } |
1533 } | 1497 } |
1534 } | 1498 } |
1535 | 1499 |
1536 | |
1537 static ParallelMoveInstr* CreateParallelMoveBefore(Instruction* instr, | 1500 static ParallelMoveInstr* CreateParallelMoveBefore(Instruction* instr, |
1538 intptr_t pos) { | 1501 intptr_t pos) { |
1539 ASSERT(pos > 0); | 1502 ASSERT(pos > 0); |
1540 Instruction* prev = instr->previous(); | 1503 Instruction* prev = instr->previous(); |
1541 ParallelMoveInstr* move = prev->AsParallelMove(); | 1504 ParallelMoveInstr* move = prev->AsParallelMove(); |
1542 if ((move == NULL) || (move->lifetime_position() != pos)) { | 1505 if ((move == NULL) || (move->lifetime_position() != pos)) { |
1543 move = new ParallelMoveInstr(); | 1506 move = new ParallelMoveInstr(); |
1544 prev->LinkTo(move); | 1507 prev->LinkTo(move); |
1545 move->LinkTo(instr); | 1508 move->LinkTo(instr); |
1546 move->set_lifetime_position(pos); | 1509 move->set_lifetime_position(pos); |
1547 } | 1510 } |
1548 return move; | 1511 return move; |
1549 } | 1512 } |
1550 | 1513 |
1551 | |
1552 static ParallelMoveInstr* CreateParallelMoveAfter(Instruction* instr, | 1514 static ParallelMoveInstr* CreateParallelMoveAfter(Instruction* instr, |
1553 intptr_t pos) { | 1515 intptr_t pos) { |
1554 Instruction* next = instr->next(); | 1516 Instruction* next = instr->next(); |
1555 if (next->IsParallelMove() && (next->lifetime_position() == pos)) { | 1517 if (next->IsParallelMove() && (next->lifetime_position() == pos)) { |
1556 return next->AsParallelMove(); | 1518 return next->AsParallelMove(); |
1557 } | 1519 } |
1558 return CreateParallelMoveBefore(next, pos); | 1520 return CreateParallelMoveBefore(next, pos); |
1559 } | 1521 } |
1560 | 1522 |
1561 | |
1562 // Linearize the control flow graph. The chosen order will be used by the | 1523 // Linearize the control flow graph. The chosen order will be used by the |
1563 // linear-scan register allocator. Number most instructions with a pair of | 1524 // linear-scan register allocator. Number most instructions with a pair of |
1564 // numbers representing lifetime positions. Introduce explicit parallel | 1525 // numbers representing lifetime positions. Introduce explicit parallel |
1565 // move instructions in the predecessors of join nodes. The moves are used | 1526 // move instructions in the predecessors of join nodes. The moves are used |
1566 // for phi resolution. | 1527 // for phi resolution. |
1567 void FlowGraphAllocator::NumberInstructions() { | 1528 void FlowGraphAllocator::NumberInstructions() { |
1568 intptr_t pos = 0; | 1529 intptr_t pos = 0; |
1569 | 1530 |
1570 // The basic block order is reverse postorder. | 1531 // The basic block order is reverse postorder. |
1571 const intptr_t block_count = postorder_.length(); | 1532 const intptr_t block_count = postorder_.length(); |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1616 | 1577 |
1617 // Populate the ParallelMove with empty moves. | 1578 // Populate the ParallelMove with empty moves. |
1618 for (intptr_t j = 0; j < move_count; j++) { | 1579 for (intptr_t j = 0; j < move_count; j++) { |
1619 move->AddMove(Location::NoLocation(), Location::NoLocation()); | 1580 move->AddMove(Location::NoLocation(), Location::NoLocation()); |
1620 } | 1581 } |
1621 } | 1582 } |
1622 } | 1583 } |
1623 } | 1584 } |
1624 } | 1585 } |
1625 | 1586 |
1626 | |
1627 // Discover structural (reducible) loops nesting structure. | 1587 // Discover structural (reducible) loops nesting structure. |
1628 void FlowGraphAllocator::DiscoverLoops() { | 1588 void FlowGraphAllocator::DiscoverLoops() { |
1629 // This algorithm relies on the assumption that we emit blocks in reverse | 1589 // This algorithm relies on the assumption that we emit blocks in reverse |
1630 // postorder, so postorder number can be used to identify loop nesting. | 1590 // postorder, so postorder number can be used to identify loop nesting. |
1631 // | 1591 // |
1632 // TODO(vegorov): consider using a generic algorithm to correctly discover | 1592 // TODO(vegorov): consider using a generic algorithm to correctly discover |
1633 // both headers of reducible and irreducible loops. | 1593 // both headers of reducible and irreducible loops. |
1634 BlockInfo* current_loop = NULL; | 1594 BlockInfo* current_loop = NULL; |
1635 | 1595 |
1636 intptr_t loop_id = 0; // All loop headers have a unique id. | 1596 intptr_t loop_id = 0; // All loop headers have a unique id. |
(...skipping 29 matching lines...) Expand all Loading... |
1666 if (current_info == current_loop) { | 1626 if (current_info == current_loop) { |
1667 ASSERT(current_loop->is_loop_header()); | 1627 ASSERT(current_loop->is_loop_header()); |
1668 current_loop = current_info->loop(); | 1628 current_loop = current_info->loop(); |
1669 } else { | 1629 } else { |
1670 current_info->set_loop(current_loop); | 1630 current_info->set_loop(current_loop); |
1671 } | 1631 } |
1672 } | 1632 } |
1673 } | 1633 } |
1674 } | 1634 } |
1675 | 1635 |
1676 | |
1677 Instruction* FlowGraphAllocator::InstructionAt(intptr_t pos) const { | 1636 Instruction* FlowGraphAllocator::InstructionAt(intptr_t pos) const { |
1678 return instructions_[pos / 2]; | 1637 return instructions_[pos / 2]; |
1679 } | 1638 } |
1680 | 1639 |
1681 | |
1682 BlockInfo* FlowGraphAllocator::BlockInfoAt(intptr_t pos) const { | 1640 BlockInfo* FlowGraphAllocator::BlockInfoAt(intptr_t pos) const { |
1683 return block_info_[pos / 2]; | 1641 return block_info_[pos / 2]; |
1684 } | 1642 } |
1685 | 1643 |
1686 | |
1687 bool FlowGraphAllocator::IsBlockEntry(intptr_t pos) const { | 1644 bool FlowGraphAllocator::IsBlockEntry(intptr_t pos) const { |
1688 return IsInstructionStartPosition(pos) && InstructionAt(pos)->IsBlockEntry(); | 1645 return IsInstructionStartPosition(pos) && InstructionAt(pos)->IsBlockEntry(); |
1689 } | 1646 } |
1690 | 1647 |
1691 | |
1692 void AllocationFinger::Initialize(LiveRange* range) { | 1648 void AllocationFinger::Initialize(LiveRange* range) { |
1693 first_pending_use_interval_ = range->first_use_interval(); | 1649 first_pending_use_interval_ = range->first_use_interval(); |
1694 first_register_use_ = range->first_use(); | 1650 first_register_use_ = range->first_use(); |
1695 first_register_beneficial_use_ = range->first_use(); | 1651 first_register_beneficial_use_ = range->first_use(); |
1696 first_hinted_use_ = range->first_use(); | 1652 first_hinted_use_ = range->first_use(); |
1697 } | 1653 } |
1698 | 1654 |
1699 | |
1700 bool AllocationFinger::Advance(const intptr_t start) { | 1655 bool AllocationFinger::Advance(const intptr_t start) { |
1701 UseInterval* a = first_pending_use_interval_; | 1656 UseInterval* a = first_pending_use_interval_; |
1702 while (a != NULL && a->end() <= start) | 1657 while (a != NULL && a->end() <= start) |
1703 a = a->next(); | 1658 a = a->next(); |
1704 first_pending_use_interval_ = a; | 1659 first_pending_use_interval_ = a; |
1705 return (first_pending_use_interval_ == NULL); | 1660 return (first_pending_use_interval_ == NULL); |
1706 } | 1661 } |
1707 | 1662 |
1708 | |
1709 Location AllocationFinger::FirstHint() { | 1663 Location AllocationFinger::FirstHint() { |
1710 UsePosition* use = first_hinted_use_; | 1664 UsePosition* use = first_hinted_use_; |
1711 | 1665 |
1712 while (use != NULL) { | 1666 while (use != NULL) { |
1713 if (use->HasHint()) return use->hint(); | 1667 if (use->HasHint()) return use->hint(); |
1714 use = use->next(); | 1668 use = use->next(); |
1715 } | 1669 } |
1716 | 1670 |
1717 return Location::NoLocation(); | 1671 return Location::NoLocation(); |
1718 } | 1672 } |
1719 | 1673 |
1720 | |
1721 static UsePosition* FirstUseAfter(UsePosition* use, intptr_t after) { | 1674 static UsePosition* FirstUseAfter(UsePosition* use, intptr_t after) { |
1722 while ((use != NULL) && (use->pos() < after)) { | 1675 while ((use != NULL) && (use->pos() < after)) { |
1723 use = use->next(); | 1676 use = use->next(); |
1724 } | 1677 } |
1725 return use; | 1678 return use; |
1726 } | 1679 } |
1727 | 1680 |
1728 | |
1729 UsePosition* AllocationFinger::FirstRegisterUse(intptr_t after) { | 1681 UsePosition* AllocationFinger::FirstRegisterUse(intptr_t after) { |
1730 for (UsePosition* use = FirstUseAfter(first_register_use_, after); | 1682 for (UsePosition* use = FirstUseAfter(first_register_use_, after); |
1731 use != NULL; use = use->next()) { | 1683 use != NULL; use = use->next()) { |
1732 Location* loc = use->location_slot(); | 1684 Location* loc = use->location_slot(); |
1733 if (loc->IsUnallocated() && | 1685 if (loc->IsUnallocated() && |
1734 ((loc->policy() == Location::kRequiresRegister) || | 1686 ((loc->policy() == Location::kRequiresRegister) || |
1735 (loc->policy() == Location::kRequiresFpuRegister))) { | 1687 (loc->policy() == Location::kRequiresFpuRegister))) { |
1736 first_register_use_ = use; | 1688 first_register_use_ = use; |
1737 return use; | 1689 return use; |
1738 } | 1690 } |
1739 } | 1691 } |
1740 return NULL; | 1692 return NULL; |
1741 } | 1693 } |
1742 | 1694 |
1743 | |
1744 UsePosition* AllocationFinger::FirstRegisterBeneficialUse(intptr_t after) { | 1695 UsePosition* AllocationFinger::FirstRegisterBeneficialUse(intptr_t after) { |
1745 for (UsePosition* use = FirstUseAfter(first_register_beneficial_use_, after); | 1696 for (UsePosition* use = FirstUseAfter(first_register_beneficial_use_, after); |
1746 use != NULL; use = use->next()) { | 1697 use != NULL; use = use->next()) { |
1747 Location* loc = use->location_slot(); | 1698 Location* loc = use->location_slot(); |
1748 if (loc->IsUnallocated() && loc->IsRegisterBeneficial()) { | 1699 if (loc->IsUnallocated() && loc->IsRegisterBeneficial()) { |
1749 first_register_beneficial_use_ = use; | 1700 first_register_beneficial_use_ = use; |
1750 return use; | 1701 return use; |
1751 } | 1702 } |
1752 } | 1703 } |
1753 return NULL; | 1704 return NULL; |
1754 } | 1705 } |
1755 | 1706 |
1756 | |
1757 UsePosition* AllocationFinger::FirstInterferingUse(intptr_t after) { | 1707 UsePosition* AllocationFinger::FirstInterferingUse(intptr_t after) { |
1758 if (IsInstructionEndPosition(after)) { | 1708 if (IsInstructionEndPosition(after)) { |
1759 // If after is a position at the end of the instruction disregard | 1709 // If after is a position at the end of the instruction disregard |
1760 // any use occurring at it. | 1710 // any use occurring at it. |
1761 after += 1; | 1711 after += 1; |
1762 } | 1712 } |
1763 return FirstRegisterUse(after); | 1713 return FirstRegisterUse(after); |
1764 } | 1714 } |
1765 | 1715 |
1766 | |
1767 void AllocationFinger::UpdateAfterSplit(intptr_t first_use_after_split_pos) { | 1716 void AllocationFinger::UpdateAfterSplit(intptr_t first_use_after_split_pos) { |
1768 if ((first_register_use_ != NULL) && | 1717 if ((first_register_use_ != NULL) && |
1769 (first_register_use_->pos() >= first_use_after_split_pos)) { | 1718 (first_register_use_->pos() >= first_use_after_split_pos)) { |
1770 first_register_use_ = NULL; | 1719 first_register_use_ = NULL; |
1771 } | 1720 } |
1772 | 1721 |
1773 if ((first_register_beneficial_use_ != NULL) && | 1722 if ((first_register_beneficial_use_ != NULL) && |
1774 (first_register_beneficial_use_->pos() >= first_use_after_split_pos)) { | 1723 (first_register_beneficial_use_->pos() >= first_use_after_split_pos)) { |
1775 first_register_beneficial_use_ = NULL; | 1724 first_register_beneficial_use_ = NULL; |
1776 } | 1725 } |
1777 } | 1726 } |
1778 | 1727 |
1779 | |
1780 intptr_t UseInterval::Intersect(UseInterval* other) { | 1728 intptr_t UseInterval::Intersect(UseInterval* other) { |
1781 if (this->start() <= other->start()) { | 1729 if (this->start() <= other->start()) { |
1782 if (other->start() < this->end()) return other->start(); | 1730 if (other->start() < this->end()) return other->start(); |
1783 } else if (this->start() < other->end()) { | 1731 } else if (this->start() < other->end()) { |
1784 return this->start(); | 1732 return this->start(); |
1785 } | 1733 } |
1786 return kIllegalPosition; | 1734 return kIllegalPosition; |
1787 } | 1735 } |
1788 | 1736 |
1789 | |
1790 static intptr_t FirstIntersection(UseInterval* a, UseInterval* u) { | 1737 static intptr_t FirstIntersection(UseInterval* a, UseInterval* u) { |
1791 while (a != NULL && u != NULL) { | 1738 while (a != NULL && u != NULL) { |
1792 const intptr_t pos = a->Intersect(u); | 1739 const intptr_t pos = a->Intersect(u); |
1793 if (pos != kIllegalPosition) return pos; | 1740 if (pos != kIllegalPosition) return pos; |
1794 | 1741 |
1795 if (a->start() < u->start()) { | 1742 if (a->start() < u->start()) { |
1796 a = a->next(); | 1743 a = a->next(); |
1797 } else { | 1744 } else { |
1798 u = u->next(); | 1745 u = u->next(); |
1799 } | 1746 } |
1800 } | 1747 } |
1801 | 1748 |
1802 return kMaxPosition; | 1749 return kMaxPosition; |
1803 } | 1750 } |
1804 | 1751 |
1805 | |
1806 template <typename PositionType> | 1752 template <typename PositionType> |
1807 PositionType* SplitListOfPositions(PositionType** head, | 1753 PositionType* SplitListOfPositions(PositionType** head, |
1808 intptr_t split_pos, | 1754 intptr_t split_pos, |
1809 bool split_at_start) { | 1755 bool split_at_start) { |
1810 PositionType* last_before_split = NULL; | 1756 PositionType* last_before_split = NULL; |
1811 PositionType* pos = *head; | 1757 PositionType* pos = *head; |
1812 if (split_at_start) { | 1758 if (split_at_start) { |
1813 while ((pos != NULL) && (pos->pos() < split_pos)) { | 1759 while ((pos != NULL) && (pos->pos() < split_pos)) { |
1814 last_before_split = pos; | 1760 last_before_split = pos; |
1815 pos = pos->next(); | 1761 pos = pos->next(); |
1816 } | 1762 } |
1817 } else { | 1763 } else { |
1818 while ((pos != NULL) && (pos->pos() <= split_pos)) { | 1764 while ((pos != NULL) && (pos->pos() <= split_pos)) { |
1819 last_before_split = pos; | 1765 last_before_split = pos; |
1820 pos = pos->next(); | 1766 pos = pos->next(); |
1821 } | 1767 } |
1822 } | 1768 } |
1823 | 1769 |
1824 if (last_before_split == NULL) { | 1770 if (last_before_split == NULL) { |
1825 *head = NULL; | 1771 *head = NULL; |
1826 } else { | 1772 } else { |
1827 last_before_split->set_next(NULL); | 1773 last_before_split->set_next(NULL); |
1828 } | 1774 } |
1829 | 1775 |
1830 return pos; | 1776 return pos; |
1831 } | 1777 } |
1832 | 1778 |
1833 | |
1834 LiveRange* LiveRange::SplitAt(intptr_t split_pos) { | 1779 LiveRange* LiveRange::SplitAt(intptr_t split_pos) { |
1835 if (Start() == split_pos) return this; | 1780 if (Start() == split_pos) return this; |
1836 | 1781 |
1837 UseInterval* interval = finger_.first_pending_use_interval(); | 1782 UseInterval* interval = finger_.first_pending_use_interval(); |
1838 if (interval == NULL) { | 1783 if (interval == NULL) { |
1839 finger_.Initialize(this); | 1784 finger_.Initialize(this); |
1840 interval = finger_.first_pending_use_interval(); | 1785 interval = finger_.first_pending_use_interval(); |
1841 } | 1786 } |
1842 | 1787 |
1843 ASSERT(split_pos < End()); | 1788 ASSERT(split_pos < End()); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1887 last_use_interval_ = last_before_split; | 1832 last_use_interval_ = last_before_split; |
1888 last_use_interval_->next_ = NULL; | 1833 last_use_interval_->next_ = NULL; |
1889 | 1834 |
1890 if (first_use_after_split != NULL) { | 1835 if (first_use_after_split != NULL) { |
1891 finger_.UpdateAfterSplit(first_use_after_split->pos()); | 1836 finger_.UpdateAfterSplit(first_use_after_split->pos()); |
1892 } | 1837 } |
1893 | 1838 |
1894 return next_sibling_; | 1839 return next_sibling_; |
1895 } | 1840 } |
1896 | 1841 |
1897 | |
1898 LiveRange* FlowGraphAllocator::SplitBetween(LiveRange* range, | 1842 LiveRange* FlowGraphAllocator::SplitBetween(LiveRange* range, |
1899 intptr_t from, | 1843 intptr_t from, |
1900 intptr_t to) { | 1844 intptr_t to) { |
1901 TRACE_ALLOC(THR_Print("split v%" Pd " [%" Pd ", %" Pd ") between [%" Pd | 1845 TRACE_ALLOC(THR_Print("split v%" Pd " [%" Pd ", %" Pd ") between [%" Pd |
1902 ", %" Pd ")\n", | 1846 ", %" Pd ")\n", |
1903 range->vreg(), range->Start(), range->End(), from, to)); | 1847 range->vreg(), range->Start(), range->End(), from, to)); |
1904 | 1848 |
1905 intptr_t split_pos = kIllegalPosition; | 1849 intptr_t split_pos = kIllegalPosition; |
1906 | 1850 |
1907 BlockInfo* split_block = BlockInfoAt(to); | 1851 BlockInfo* split_block = BlockInfoAt(to); |
(...skipping 18 matching lines...) Expand all Loading... |
1926 // instruction. | 1870 // instruction. |
1927 split_pos = ToInstructionStart(to) - 1; | 1871 split_pos = ToInstructionStart(to) - 1; |
1928 } | 1872 } |
1929 | 1873 |
1930 ASSERT(split_pos != kIllegalPosition); | 1874 ASSERT(split_pos != kIllegalPosition); |
1931 ASSERT(from < split_pos); | 1875 ASSERT(from < split_pos); |
1932 | 1876 |
1933 return range->SplitAt(split_pos); | 1877 return range->SplitAt(split_pos); |
1934 } | 1878 } |
1935 | 1879 |
1936 | |
1937 void FlowGraphAllocator::SpillBetween(LiveRange* range, | 1880 void FlowGraphAllocator::SpillBetween(LiveRange* range, |
1938 intptr_t from, | 1881 intptr_t from, |
1939 intptr_t to) { | 1882 intptr_t to) { |
1940 ASSERT(from < to); | 1883 ASSERT(from < to); |
1941 TRACE_ALLOC(THR_Print("spill v%" Pd " [%" Pd ", %" Pd ") " | 1884 TRACE_ALLOC(THR_Print("spill v%" Pd " [%" Pd ", %" Pd ") " |
1942 "between [%" Pd ", %" Pd ")\n", | 1885 "between [%" Pd ", %" Pd ")\n", |
1943 range->vreg(), range->Start(), range->End(), from, to)); | 1886 range->vreg(), range->Start(), range->End(), from, to)); |
1944 LiveRange* tail = range->SplitAt(from); | 1887 LiveRange* tail = range->SplitAt(from); |
1945 | 1888 |
1946 if (tail->Start() < to) { | 1889 if (tail->Start() < to) { |
1947 // There is an intersection of tail and [from, to). | 1890 // There is an intersection of tail and [from, to). |
1948 LiveRange* tail_tail = SplitBetween(tail, tail->Start(), to); | 1891 LiveRange* tail_tail = SplitBetween(tail, tail->Start(), to); |
1949 Spill(tail); | 1892 Spill(tail); |
1950 AddToUnallocated(tail_tail); | 1893 AddToUnallocated(tail_tail); |
1951 } else { | 1894 } else { |
1952 // No intersection between tail and [from, to). | 1895 // No intersection between tail and [from, to). |
1953 AddToUnallocated(tail); | 1896 AddToUnallocated(tail); |
1954 } | 1897 } |
1955 } | 1898 } |
1956 | 1899 |
1957 | |
1958 void FlowGraphAllocator::SpillAfter(LiveRange* range, intptr_t from) { | 1900 void FlowGraphAllocator::SpillAfter(LiveRange* range, intptr_t from) { |
1959 TRACE_ALLOC(THR_Print("spill v%" Pd " [%" Pd ", %" Pd ") after %" Pd "\n", | 1901 TRACE_ALLOC(THR_Print("spill v%" Pd " [%" Pd ", %" Pd ") after %" Pd "\n", |
1960 range->vreg(), range->Start(), range->End(), from)); | 1902 range->vreg(), range->Start(), range->End(), from)); |
1961 | 1903 |
1962 // When spilling the value inside the loop check if this spill can | 1904 // When spilling the value inside the loop check if this spill can |
1963 // be moved outside. | 1905 // be moved outside. |
1964 BlockInfo* block_info = BlockInfoAt(from); | 1906 BlockInfo* block_info = BlockInfoAt(from); |
1965 if (block_info->is_loop_header() || (block_info->loop() != NULL)) { | 1907 if (block_info->is_loop_header() || (block_info->loop() != NULL)) { |
1966 BlockInfo* loop_header = | 1908 BlockInfo* loop_header = |
1967 block_info->is_loop_header() ? block_info : block_info->loop(); | 1909 block_info->is_loop_header() ? block_info : block_info->loop(); |
1968 | 1910 |
1969 if ((range->Start() <= loop_header->entry()->start_pos()) && | 1911 if ((range->Start() <= loop_header->entry()->start_pos()) && |
1970 RangeHasOnlyUnconstrainedUsesInLoop(range, loop_header->loop_id())) { | 1912 RangeHasOnlyUnconstrainedUsesInLoop(range, loop_header->loop_id())) { |
1971 ASSERT(loop_header->entry()->start_pos() <= from); | 1913 ASSERT(loop_header->entry()->start_pos() <= from); |
1972 from = loop_header->entry()->start_pos(); | 1914 from = loop_header->entry()->start_pos(); |
1973 TRACE_ALLOC( | 1915 TRACE_ALLOC( |
1974 THR_Print(" moved spill position to loop header %" Pd "\n", from)); | 1916 THR_Print(" moved spill position to loop header %" Pd "\n", from)); |
1975 } | 1917 } |
1976 } | 1918 } |
1977 | 1919 |
1978 LiveRange* tail = range->SplitAt(from); | 1920 LiveRange* tail = range->SplitAt(from); |
1979 Spill(tail); | 1921 Spill(tail); |
1980 } | 1922 } |
1981 | 1923 |
1982 | |
1983 void FlowGraphAllocator::AllocateSpillSlotFor(LiveRange* range) { | 1924 void FlowGraphAllocator::AllocateSpillSlotFor(LiveRange* range) { |
1984 #if defined(TARGET_ARCH_DBC) | 1925 #if defined(TARGET_ARCH_DBC) |
1985 // There is no need to support spilling on DBC because we have a lot of | 1926 // There is no need to support spilling on DBC because we have a lot of |
1986 // registers and registers and spill-slots have the same performance | 1927 // registers and registers and spill-slots have the same performance |
1987 // characteristics. | 1928 // characteristics. |
1988 flow_graph_.parsed_function().Bailout("FlowGraphAllocator", "SPILL"); | 1929 flow_graph_.parsed_function().Bailout("FlowGraphAllocator", "SPILL"); |
1989 UNREACHABLE(); | 1930 UNREACHABLE(); |
1990 #endif | 1931 #endif |
1991 | 1932 |
1992 ASSERT(range->spill_slot().IsInvalid()); | 1933 ASSERT(range->spill_slot().IsInvalid()); |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2070 } else { | 2011 } else { |
2071 ASSERT((range->representation() == kUnboxedDouble)); | 2012 ASSERT((range->representation() == kUnboxedDouble)); |
2072 location = Location::DoubleStackSlot(slot_idx); | 2013 location = Location::DoubleStackSlot(slot_idx); |
2073 } | 2014 } |
2074 range->set_spill_slot(location); | 2015 range->set_spill_slot(location); |
2075 } | 2016 } |
2076 | 2017 |
2077 spilled_.Add(range); | 2018 spilled_.Add(range); |
2078 } | 2019 } |
2079 | 2020 |
2080 | |
2081 void FlowGraphAllocator::MarkAsObjectAtSafepoints(LiveRange* range) { | 2021 void FlowGraphAllocator::MarkAsObjectAtSafepoints(LiveRange* range) { |
2082 intptr_t stack_index = range->spill_slot().stack_index(); | 2022 intptr_t stack_index = range->spill_slot().stack_index(); |
2083 ASSERT(stack_index >= 0); | 2023 ASSERT(stack_index >= 0); |
2084 | 2024 |
2085 while (range != NULL) { | 2025 while (range != NULL) { |
2086 for (SafepointPosition* safepoint = range->first_safepoint(); | 2026 for (SafepointPosition* safepoint = range->first_safepoint(); |
2087 safepoint != NULL; safepoint = safepoint->next()) { | 2027 safepoint != NULL; safepoint = safepoint->next()) { |
2088 // Mark the stack slot as having an object. | 2028 // Mark the stack slot as having an object. |
2089 safepoint->locs()->SetStackBit(stack_index); | 2029 safepoint->locs()->SetStackBit(stack_index); |
2090 } | 2030 } |
2091 range = range->next_sibling(); | 2031 range = range->next_sibling(); |
2092 } | 2032 } |
2093 } | 2033 } |
2094 | 2034 |
2095 | |
2096 void FlowGraphAllocator::Spill(LiveRange* range) { | 2035 void FlowGraphAllocator::Spill(LiveRange* range) { |
2097 LiveRange* parent = GetLiveRange(range->vreg()); | 2036 LiveRange* parent = GetLiveRange(range->vreg()); |
2098 if (parent->spill_slot().IsInvalid()) { | 2037 if (parent->spill_slot().IsInvalid()) { |
2099 AllocateSpillSlotFor(parent); | 2038 AllocateSpillSlotFor(parent); |
2100 if (range->representation() == kTagged) { | 2039 if (range->representation() == kTagged) { |
2101 MarkAsObjectAtSafepoints(parent); | 2040 MarkAsObjectAtSafepoints(parent); |
2102 } | 2041 } |
2103 } | 2042 } |
2104 range->set_assigned_location(parent->spill_slot()); | 2043 range->set_assigned_location(parent->spill_slot()); |
2105 ConvertAllUses(range); | 2044 ConvertAllUses(range); |
2106 } | 2045 } |
2107 | 2046 |
2108 | |
2109 intptr_t FlowGraphAllocator::FirstIntersectionWithAllocated( | 2047 intptr_t FlowGraphAllocator::FirstIntersectionWithAllocated( |
2110 intptr_t reg, | 2048 intptr_t reg, |
2111 LiveRange* unallocated) { | 2049 LiveRange* unallocated) { |
2112 intptr_t intersection = kMaxPosition; | 2050 intptr_t intersection = kMaxPosition; |
2113 for (intptr_t i = 0; i < registers_[reg]->length(); i++) { | 2051 for (intptr_t i = 0; i < registers_[reg]->length(); i++) { |
2114 LiveRange* allocated = (*registers_[reg])[i]; | 2052 LiveRange* allocated = (*registers_[reg])[i]; |
2115 if (allocated == NULL) continue; | 2053 if (allocated == NULL) continue; |
2116 | 2054 |
2117 UseInterval* allocated_head = | 2055 UseInterval* allocated_head = |
2118 allocated->finger()->first_pending_use_interval(); | 2056 allocated->finger()->first_pending_use_interval(); |
2119 if (allocated_head->start() >= intersection) continue; | 2057 if (allocated_head->start() >= intersection) continue; |
2120 | 2058 |
2121 const intptr_t pos = FirstIntersection( | 2059 const intptr_t pos = FirstIntersection( |
2122 unallocated->finger()->first_pending_use_interval(), allocated_head); | 2060 unallocated->finger()->first_pending_use_interval(), allocated_head); |
2123 if (pos < intersection) intersection = pos; | 2061 if (pos < intersection) intersection = pos; |
2124 } | 2062 } |
2125 return intersection; | 2063 return intersection; |
2126 } | 2064 } |
2127 | 2065 |
2128 | |
2129 void ReachingDefs::AddPhi(PhiInstr* phi) { | 2066 void ReachingDefs::AddPhi(PhiInstr* phi) { |
2130 if (phi->reaching_defs() == NULL) { | 2067 if (phi->reaching_defs() == NULL) { |
2131 Zone* zone = flow_graph_.zone(); | 2068 Zone* zone = flow_graph_.zone(); |
2132 phi->set_reaching_defs( | 2069 phi->set_reaching_defs( |
2133 new (zone) BitVector(zone, flow_graph_.max_virtual_register_number())); | 2070 new (zone) BitVector(zone, flow_graph_.max_virtual_register_number())); |
2134 | 2071 |
2135 // Compute initial set reaching defs set. | 2072 // Compute initial set reaching defs set. |
2136 bool depends_on_phi = false; | 2073 bool depends_on_phi = false; |
2137 for (intptr_t i = 0; i < phi->InputCount(); i++) { | 2074 for (intptr_t i = 0; i < phi->InputCount(); i++) { |
2138 Definition* input = phi->InputAt(i)->definition(); | 2075 Definition* input = phi->InputAt(i)->definition(); |
2139 if (input->IsPhi()) { | 2076 if (input->IsPhi()) { |
2140 depends_on_phi = true; | 2077 depends_on_phi = true; |
2141 } | 2078 } |
2142 phi->reaching_defs()->Add(input->ssa_temp_index()); | 2079 phi->reaching_defs()->Add(input->ssa_temp_index()); |
2143 if (phi->HasPairRepresentation()) { | 2080 if (phi->HasPairRepresentation()) { |
2144 phi->reaching_defs()->Add(ToSecondPairVreg(input->ssa_temp_index())); | 2081 phi->reaching_defs()->Add(ToSecondPairVreg(input->ssa_temp_index())); |
2145 } | 2082 } |
2146 } | 2083 } |
2147 | 2084 |
2148 // If this phi depends on another phi then we need fix point iteration. | 2085 // If this phi depends on another phi then we need fix point iteration. |
2149 if (depends_on_phi) phis_.Add(phi); | 2086 if (depends_on_phi) phis_.Add(phi); |
2150 } | 2087 } |
2151 } | 2088 } |
2152 | 2089 |
2153 | |
2154 void ReachingDefs::Compute() { | 2090 void ReachingDefs::Compute() { |
2155 // Transitively collect all phis that are used by the given phi. | 2091 // Transitively collect all phis that are used by the given phi. |
2156 for (intptr_t i = 0; i < phis_.length(); i++) { | 2092 for (intptr_t i = 0; i < phis_.length(); i++) { |
2157 PhiInstr* phi = phis_[i]; | 2093 PhiInstr* phi = phis_[i]; |
2158 | 2094 |
2159 // Add all phis that affect this phi to the list. | 2095 // Add all phis that affect this phi to the list. |
2160 for (intptr_t i = 0; i < phi->InputCount(); i++) { | 2096 for (intptr_t i = 0; i < phi->InputCount(); i++) { |
2161 PhiInstr* input_phi = phi->InputAt(i)->definition()->AsPhi(); | 2097 PhiInstr* input_phi = phi->InputAt(i)->definition()->AsPhi(); |
2162 if (input_phi != NULL) { | 2098 if (input_phi != NULL) { |
2163 AddPhi(input_phi); | 2099 AddPhi(input_phi); |
(...skipping 14 matching lines...) Expand all Loading... |
2178 changed = true; | 2114 changed = true; |
2179 } | 2115 } |
2180 } | 2116 } |
2181 } | 2117 } |
2182 } | 2118 } |
2183 } while (changed); | 2119 } while (changed); |
2184 | 2120 |
2185 phis_.Clear(); | 2121 phis_.Clear(); |
2186 } | 2122 } |
2187 | 2123 |
2188 | |
2189 BitVector* ReachingDefs::Get(PhiInstr* phi) { | 2124 BitVector* ReachingDefs::Get(PhiInstr* phi) { |
2190 if (phi->reaching_defs() == NULL) { | 2125 if (phi->reaching_defs() == NULL) { |
2191 ASSERT(phis_.is_empty()); | 2126 ASSERT(phis_.is_empty()); |
2192 AddPhi(phi); | 2127 AddPhi(phi); |
2193 Compute(); | 2128 Compute(); |
2194 } | 2129 } |
2195 return phi->reaching_defs(); | 2130 return phi->reaching_defs(); |
2196 } | 2131 } |
2197 | 2132 |
2198 | |
2199 bool FlowGraphAllocator::AllocateFreeRegister(LiveRange* unallocated) { | 2133 bool FlowGraphAllocator::AllocateFreeRegister(LiveRange* unallocated) { |
2200 intptr_t candidate = kNoRegister; | 2134 intptr_t candidate = kNoRegister; |
2201 intptr_t free_until = 0; | 2135 intptr_t free_until = 0; |
2202 | 2136 |
2203 // If hint is available try hint first. | 2137 // If hint is available try hint first. |
2204 // TODO(vegorov): ensure that phis are hinted on the back edge. | 2138 // TODO(vegorov): ensure that phis are hinted on the back edge. |
2205 Location hint = unallocated->finger()->FirstHint(); | 2139 Location hint = unallocated->finger()->FirstHint(); |
2206 if (hint.IsMachineRegister()) { | 2140 if (hint.IsMachineRegister()) { |
2207 if (!blocked_registers_[hint.register_code()]) { | 2141 if (!blocked_registers_[hint.register_code()]) { |
2208 free_until = | 2142 free_until = |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2318 | 2252 |
2319 registers_[candidate]->Add(unallocated); | 2253 registers_[candidate]->Add(unallocated); |
2320 unallocated->set_assigned_location(MakeRegisterLocation(candidate)); | 2254 unallocated->set_assigned_location(MakeRegisterLocation(candidate)); |
2321 #if defined(TARGET_ARCH_DBC) | 2255 #if defined(TARGET_ARCH_DBC) |
2322 last_used_register_ = Utils::Maximum(last_used_register_, candidate); | 2256 last_used_register_ = Utils::Maximum(last_used_register_, candidate); |
2323 #endif | 2257 #endif |
2324 | 2258 |
2325 return true; | 2259 return true; |
2326 } | 2260 } |
2327 | 2261 |
2328 | |
2329 bool FlowGraphAllocator::RangeHasOnlyUnconstrainedUsesInLoop(LiveRange* range, | 2262 bool FlowGraphAllocator::RangeHasOnlyUnconstrainedUsesInLoop(LiveRange* range, |
2330 intptr_t loop_id) { | 2263 intptr_t loop_id) { |
2331 if (range->vreg() >= 0) { | 2264 if (range->vreg() >= 0) { |
2332 LiveRange* parent = GetLiveRange(range->vreg()); | 2265 LiveRange* parent = GetLiveRange(range->vreg()); |
2333 return parent->HasOnlyUnconstrainedUsesInLoop(loop_id); | 2266 return parent->HasOnlyUnconstrainedUsesInLoop(loop_id); |
2334 } | 2267 } |
2335 return false; | 2268 return false; |
2336 } | 2269 } |
2337 | 2270 |
2338 | |
2339 bool FlowGraphAllocator::IsCheapToEvictRegisterInLoop(BlockInfo* loop, | 2271 bool FlowGraphAllocator::IsCheapToEvictRegisterInLoop(BlockInfo* loop, |
2340 intptr_t reg) { | 2272 intptr_t reg) { |
2341 const intptr_t loop_start = loop->entry()->start_pos(); | 2273 const intptr_t loop_start = loop->entry()->start_pos(); |
2342 const intptr_t loop_end = loop->last_block()->end_pos(); | 2274 const intptr_t loop_end = loop->last_block()->end_pos(); |
2343 | 2275 |
2344 for (intptr_t i = 0; i < registers_[reg]->length(); i++) { | 2276 for (intptr_t i = 0; i < registers_[reg]->length(); i++) { |
2345 LiveRange* allocated = (*registers_[reg])[i]; | 2277 LiveRange* allocated = (*registers_[reg])[i]; |
2346 | 2278 |
2347 UseInterval* interval = allocated->finger()->first_pending_use_interval(); | 2279 UseInterval* interval = allocated->finger()->first_pending_use_interval(); |
2348 if (interval->Contains(loop_start)) { | 2280 if (interval->Contains(loop_start)) { |
2349 if (!RangeHasOnlyUnconstrainedUsesInLoop(allocated, loop->loop_id())) { | 2281 if (!RangeHasOnlyUnconstrainedUsesInLoop(allocated, loop->loop_id())) { |
2350 return false; | 2282 return false; |
2351 } | 2283 } |
2352 } else if (interval->start() < loop_end) { | 2284 } else if (interval->start() < loop_end) { |
2353 return false; | 2285 return false; |
2354 } | 2286 } |
2355 } | 2287 } |
2356 | 2288 |
2357 return true; | 2289 return true; |
2358 } | 2290 } |
2359 | 2291 |
2360 | |
2361 bool FlowGraphAllocator::HasCheapEvictionCandidate(LiveRange* phi_range) { | 2292 bool FlowGraphAllocator::HasCheapEvictionCandidate(LiveRange* phi_range) { |
2362 ASSERT(phi_range->is_loop_phi()); | 2293 ASSERT(phi_range->is_loop_phi()); |
2363 | 2294 |
2364 BlockInfo* loop_header = BlockInfoAt(phi_range->Start()); | 2295 BlockInfo* loop_header = BlockInfoAt(phi_range->Start()); |
2365 ASSERT(loop_header->is_loop_header()); | 2296 ASSERT(loop_header->is_loop_header()); |
2366 ASSERT(phi_range->Start() == loop_header->entry()->start_pos()); | 2297 ASSERT(phi_range->Start() == loop_header->entry()->start_pos()); |
2367 | 2298 |
2368 for (intptr_t reg = 0; reg < NumberOfRegisters(); ++reg) { | 2299 for (intptr_t reg = 0; reg < NumberOfRegisters(); ++reg) { |
2369 if (blocked_registers_[reg]) continue; | 2300 if (blocked_registers_[reg]) continue; |
2370 if (IsCheapToEvictRegisterInLoop(loop_header, reg)) { | 2301 if (IsCheapToEvictRegisterInLoop(loop_header, reg)) { |
2371 return true; | 2302 return true; |
2372 } | 2303 } |
2373 } | 2304 } |
2374 | 2305 |
2375 return false; | 2306 return false; |
2376 } | 2307 } |
2377 | 2308 |
2378 | |
2379 void FlowGraphAllocator::AllocateAnyRegister(LiveRange* unallocated) { | 2309 void FlowGraphAllocator::AllocateAnyRegister(LiveRange* unallocated) { |
2380 // If a loop phi has no register uses we might still want to allocate it | 2310 // If a loop phi has no register uses we might still want to allocate it |
2381 // to the register to reduce amount of memory moves on the back edge. | 2311 // to the register to reduce amount of memory moves on the back edge. |
2382 // This is possible if there is a register blocked by a range that can be | 2312 // This is possible if there is a register blocked by a range that can be |
2383 // cheaply evicted i.e. it has no register beneficial uses inside the | 2313 // cheaply evicted i.e. it has no register beneficial uses inside the |
2384 // loop. | 2314 // loop. |
2385 UsePosition* register_use = | 2315 UsePosition* register_use = |
2386 unallocated->finger()->FirstRegisterUse(unallocated->Start()); | 2316 unallocated->finger()->FirstRegisterUse(unallocated->Start()); |
2387 if ((register_use == NULL) && | 2317 if ((register_use == NULL) && |
2388 !(unallocated->is_loop_phi() && HasCheapEvictionCandidate(unallocated))) { | 2318 !(unallocated->is_loop_phi() && HasCheapEvictionCandidate(unallocated))) { |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2421 // Register is blocked before the end of the live range. Split the range | 2351 // Register is blocked before the end of the live range. Split the range |
2422 // at latest at blocked_at position. | 2352 // at latest at blocked_at position. |
2423 LiveRange* tail = | 2353 LiveRange* tail = |
2424 SplitBetween(unallocated, unallocated->Start(), blocked_at + 1); | 2354 SplitBetween(unallocated, unallocated->Start(), blocked_at + 1); |
2425 AddToUnallocated(tail); | 2355 AddToUnallocated(tail); |
2426 } | 2356 } |
2427 | 2357 |
2428 AssignNonFreeRegister(unallocated, candidate); | 2358 AssignNonFreeRegister(unallocated, candidate); |
2429 } | 2359 } |
2430 | 2360 |
2431 | |
2432 bool FlowGraphAllocator::UpdateFreeUntil(intptr_t reg, | 2361 bool FlowGraphAllocator::UpdateFreeUntil(intptr_t reg, |
2433 LiveRange* unallocated, | 2362 LiveRange* unallocated, |
2434 intptr_t* cur_free_until, | 2363 intptr_t* cur_free_until, |
2435 intptr_t* cur_blocked_at) { | 2364 intptr_t* cur_blocked_at) { |
2436 intptr_t free_until = kMaxPosition; | 2365 intptr_t free_until = kMaxPosition; |
2437 intptr_t blocked_at = kMaxPosition; | 2366 intptr_t blocked_at = kMaxPosition; |
2438 const intptr_t start = unallocated->Start(); | 2367 const intptr_t start = unallocated->Start(); |
2439 | 2368 |
2440 for (intptr_t i = 0; i < registers_[reg]->length(); i++) { | 2369 for (intptr_t i = 0; i < registers_[reg]->length(); i++) { |
2441 LiveRange* allocated = (*registers_[reg])[i]; | 2370 LiveRange* allocated = (*registers_[reg])[i]; |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2475 return false; | 2404 return false; |
2476 } | 2405 } |
2477 } | 2406 } |
2478 | 2407 |
2479 ASSERT(free_until > *cur_free_until); | 2408 ASSERT(free_until > *cur_free_until); |
2480 *cur_free_until = free_until; | 2409 *cur_free_until = free_until; |
2481 *cur_blocked_at = blocked_at; | 2410 *cur_blocked_at = blocked_at; |
2482 return true; | 2411 return true; |
2483 } | 2412 } |
2484 | 2413 |
2485 | |
2486 void FlowGraphAllocator::RemoveEvicted(intptr_t reg, intptr_t first_evicted) { | 2414 void FlowGraphAllocator::RemoveEvicted(intptr_t reg, intptr_t first_evicted) { |
2487 intptr_t to = first_evicted; | 2415 intptr_t to = first_evicted; |
2488 intptr_t from = first_evicted + 1; | 2416 intptr_t from = first_evicted + 1; |
2489 while (from < registers_[reg]->length()) { | 2417 while (from < registers_[reg]->length()) { |
2490 LiveRange* allocated = (*registers_[reg])[from++]; | 2418 LiveRange* allocated = (*registers_[reg])[from++]; |
2491 if (allocated != NULL) (*registers_[reg])[to++] = allocated; | 2419 if (allocated != NULL) (*registers_[reg])[to++] = allocated; |
2492 } | 2420 } |
2493 registers_[reg]->TruncateTo(to); | 2421 registers_[reg]->TruncateTo(to); |
2494 } | 2422 } |
2495 | 2423 |
2496 | |
2497 void FlowGraphAllocator::AssignNonFreeRegister(LiveRange* unallocated, | 2424 void FlowGraphAllocator::AssignNonFreeRegister(LiveRange* unallocated, |
2498 intptr_t reg) { | 2425 intptr_t reg) { |
2499 intptr_t first_evicted = -1; | 2426 intptr_t first_evicted = -1; |
2500 for (intptr_t i = registers_[reg]->length() - 1; i >= 0; i--) { | 2427 for (intptr_t i = registers_[reg]->length() - 1; i >= 0; i--) { |
2501 LiveRange* allocated = (*registers_[reg])[i]; | 2428 LiveRange* allocated = (*registers_[reg])[i]; |
2502 if (allocated->vreg() < 0) continue; // Can't be evicted. | 2429 if (allocated->vreg() < 0) continue; // Can't be evicted. |
2503 if (EvictIntersection(allocated, unallocated)) { | 2430 if (EvictIntersection(allocated, unallocated)) { |
2504 // If allocated was not spilled convert all pending uses. | 2431 // If allocated was not spilled convert all pending uses. |
2505 if (allocated->assigned_location().IsMachineRegister()) { | 2432 if (allocated->assigned_location().IsMachineRegister()) { |
2506 ASSERT(allocated->End() <= unallocated->Start()); | 2433 ASSERT(allocated->End() <= unallocated->Start()); |
2507 ConvertAllUses(allocated); | 2434 ConvertAllUses(allocated); |
2508 } | 2435 } |
2509 (*registers_[reg])[i] = NULL; | 2436 (*registers_[reg])[i] = NULL; |
2510 first_evicted = i; | 2437 first_evicted = i; |
2511 } | 2438 } |
2512 } | 2439 } |
2513 | 2440 |
2514 // Remove evicted ranges from the array. | 2441 // Remove evicted ranges from the array. |
2515 if (first_evicted != -1) RemoveEvicted(reg, first_evicted); | 2442 if (first_evicted != -1) RemoveEvicted(reg, first_evicted); |
2516 | 2443 |
2517 registers_[reg]->Add(unallocated); | 2444 registers_[reg]->Add(unallocated); |
2518 unallocated->set_assigned_location(MakeRegisterLocation(reg)); | 2445 unallocated->set_assigned_location(MakeRegisterLocation(reg)); |
2519 #if defined(TARGET_ARCH_DBC) | 2446 #if defined(TARGET_ARCH_DBC) |
2520 last_used_register_ = Utils::Maximum(last_used_register_, reg); | 2447 last_used_register_ = Utils::Maximum(last_used_register_, reg); |
2521 #endif | 2448 #endif |
2522 } | 2449 } |
2523 | 2450 |
2524 | |
2525 bool FlowGraphAllocator::EvictIntersection(LiveRange* allocated, | 2451 bool FlowGraphAllocator::EvictIntersection(LiveRange* allocated, |
2526 LiveRange* unallocated) { | 2452 LiveRange* unallocated) { |
2527 UseInterval* first_unallocated = | 2453 UseInterval* first_unallocated = |
2528 unallocated->finger()->first_pending_use_interval(); | 2454 unallocated->finger()->first_pending_use_interval(); |
2529 const intptr_t intersection = FirstIntersection( | 2455 const intptr_t intersection = FirstIntersection( |
2530 allocated->finger()->first_pending_use_interval(), first_unallocated); | 2456 allocated->finger()->first_pending_use_interval(), first_unallocated); |
2531 if (intersection == kMaxPosition) return false; | 2457 if (intersection == kMaxPosition) return false; |
2532 | 2458 |
2533 const intptr_t spill_position = first_unallocated->start(); | 2459 const intptr_t spill_position = first_unallocated->start(); |
2534 UsePosition* use = allocated->finger()->FirstInterferingUse(spill_position); | 2460 UsePosition* use = allocated->finger()->FirstInterferingUse(spill_position); |
2535 if (use == NULL) { | 2461 if (use == NULL) { |
2536 // No register uses after this point. | 2462 // No register uses after this point. |
2537 SpillAfter(allocated, spill_position); | 2463 SpillAfter(allocated, spill_position); |
2538 } else { | 2464 } else { |
2539 const intptr_t restore_position = | 2465 const intptr_t restore_position = |
2540 (spill_position < intersection) ? MinPosition(intersection, use->pos()) | 2466 (spill_position < intersection) ? MinPosition(intersection, use->pos()) |
2541 : use->pos(); | 2467 : use->pos(); |
2542 | 2468 |
2543 SpillBetween(allocated, spill_position, restore_position); | 2469 SpillBetween(allocated, spill_position, restore_position); |
2544 } | 2470 } |
2545 | 2471 |
2546 return true; | 2472 return true; |
2547 } | 2473 } |
2548 | 2474 |
2549 | |
2550 MoveOperands* FlowGraphAllocator::AddMoveAt(intptr_t pos, | 2475 MoveOperands* FlowGraphAllocator::AddMoveAt(intptr_t pos, |
2551 Location to, | 2476 Location to, |
2552 Location from) { | 2477 Location from) { |
2553 ASSERT(!IsBlockEntry(pos)); | 2478 ASSERT(!IsBlockEntry(pos)); |
2554 | 2479 |
2555 if (pos < kNormalEntryPos) { | 2480 if (pos < kNormalEntryPos) { |
2556 ASSERT(pos > 0); | 2481 ASSERT(pos > 0); |
2557 // Parallel moves added to the GraphEntry (B0) will be added at the start | 2482 // Parallel moves added to the GraphEntry (B0) will be added at the start |
2558 // of the normal entry (B1) | 2483 // of the normal entry (B1) |
2559 BlockEntryInstr* entry = InstructionAt(kNormalEntryPos)->AsBlockEntry(); | 2484 BlockEntryInstr* entry = InstructionAt(kNormalEntryPos)->AsBlockEntry(); |
2560 return entry->GetParallelMove()->AddMove(to, from); | 2485 return entry->GetParallelMove()->AddMove(to, from); |
2561 } | 2486 } |
2562 | 2487 |
2563 Instruction* instr = InstructionAt(pos); | 2488 Instruction* instr = InstructionAt(pos); |
2564 | 2489 |
2565 ParallelMoveInstr* parallel_move = NULL; | 2490 ParallelMoveInstr* parallel_move = NULL; |
2566 if (IsInstructionStartPosition(pos)) { | 2491 if (IsInstructionStartPosition(pos)) { |
2567 parallel_move = CreateParallelMoveBefore(instr, pos); | 2492 parallel_move = CreateParallelMoveBefore(instr, pos); |
2568 } else { | 2493 } else { |
2569 parallel_move = CreateParallelMoveAfter(instr, pos); | 2494 parallel_move = CreateParallelMoveAfter(instr, pos); |
2570 } | 2495 } |
2571 | 2496 |
2572 return parallel_move->AddMove(to, from); | 2497 return parallel_move->AddMove(to, from); |
2573 } | 2498 } |
2574 | 2499 |
2575 | |
2576 void FlowGraphAllocator::ConvertUseTo(UsePosition* use, Location loc) { | 2500 void FlowGraphAllocator::ConvertUseTo(UsePosition* use, Location loc) { |
2577 ASSERT(!loc.IsPairLocation()); | 2501 ASSERT(!loc.IsPairLocation()); |
2578 ASSERT(use->location_slot() != NULL); | 2502 ASSERT(use->location_slot() != NULL); |
2579 Location* slot = use->location_slot(); | 2503 Location* slot = use->location_slot(); |
2580 ASSERT(slot->IsUnallocated()); | 2504 ASSERT(slot->IsUnallocated()); |
2581 TRACE_ALLOC(THR_Print(" use at %" Pd " converted to ", use->pos())); | 2505 TRACE_ALLOC(THR_Print(" use at %" Pd " converted to ", use->pos())); |
2582 TRACE_ALLOC(loc.Print()); | 2506 TRACE_ALLOC(loc.Print()); |
2583 TRACE_ALLOC(THR_Print("\n")); | 2507 TRACE_ALLOC(THR_Print("\n")); |
2584 *slot = loc; | 2508 *slot = loc; |
2585 } | 2509 } |
2586 | 2510 |
2587 | |
2588 void FlowGraphAllocator::ConvertAllUses(LiveRange* range) { | 2511 void FlowGraphAllocator::ConvertAllUses(LiveRange* range) { |
2589 if (range->vreg() == kNoVirtualRegister) return; | 2512 if (range->vreg() == kNoVirtualRegister) return; |
2590 | 2513 |
2591 const Location loc = range->assigned_location(); | 2514 const Location loc = range->assigned_location(); |
2592 ASSERT(!loc.IsInvalid()); | 2515 ASSERT(!loc.IsInvalid()); |
2593 | 2516 |
2594 TRACE_ALLOC(THR_Print("range [%" Pd ", %" Pd ") " | 2517 TRACE_ALLOC(THR_Print("range [%" Pd ", %" Pd ") " |
2595 "for v%" Pd " has been allocated to ", | 2518 "for v%" Pd " has been allocated to ", |
2596 range->Start(), range->End(), range->vreg())); | 2519 range->Start(), range->End(), range->vreg())); |
2597 TRACE_ALLOC(loc.Print()); | 2520 TRACE_ALLOC(loc.Print()); |
(...skipping 15 matching lines...) Expand all Loading... |
2613 } | 2536 } |
2614 #else | 2537 #else |
2615 if (range->representation() == kTagged) { | 2538 if (range->representation() == kTagged) { |
2616 safepoint->locs()->SetStackBit(loc.reg()); | 2539 safepoint->locs()->SetStackBit(loc.reg()); |
2617 } | 2540 } |
2618 #endif // !defined(TARGET_ARCH_DBC) | 2541 #endif // !defined(TARGET_ARCH_DBC) |
2619 } | 2542 } |
2620 } | 2543 } |
2621 } | 2544 } |
2622 | 2545 |
2623 | |
2624 void FlowGraphAllocator::AdvanceActiveIntervals(const intptr_t start) { | 2546 void FlowGraphAllocator::AdvanceActiveIntervals(const intptr_t start) { |
2625 for (intptr_t reg = 0; reg < NumberOfRegisters(); reg++) { | 2547 for (intptr_t reg = 0; reg < NumberOfRegisters(); reg++) { |
2626 if (registers_[reg]->is_empty()) continue; | 2548 if (registers_[reg]->is_empty()) continue; |
2627 | 2549 |
2628 intptr_t first_evicted = -1; | 2550 intptr_t first_evicted = -1; |
2629 for (intptr_t i = registers_[reg]->length() - 1; i >= 0; i--) { | 2551 for (intptr_t i = registers_[reg]->length() - 1; i >= 0; i--) { |
2630 LiveRange* range = (*registers_[reg])[i]; | 2552 LiveRange* range = (*registers_[reg])[i]; |
2631 if (range->finger()->Advance(start)) { | 2553 if (range->finger()->Advance(start)) { |
2632 ConvertAllUses(range); | 2554 ConvertAllUses(range); |
2633 (*registers_[reg])[i] = NULL; | 2555 (*registers_[reg])[i] = NULL; |
2634 first_evicted = i; | 2556 first_evicted = i; |
2635 } | 2557 } |
2636 } | 2558 } |
2637 | 2559 |
2638 if (first_evicted != -1) RemoveEvicted(reg, first_evicted); | 2560 if (first_evicted != -1) RemoveEvicted(reg, first_evicted); |
2639 } | 2561 } |
2640 } | 2562 } |
2641 | 2563 |
2642 | |
2643 bool LiveRange::Contains(intptr_t pos) const { | 2564 bool LiveRange::Contains(intptr_t pos) const { |
2644 if (!CanCover(pos)) return false; | 2565 if (!CanCover(pos)) return false; |
2645 | 2566 |
2646 for (UseInterval* interval = first_use_interval_; interval != NULL; | 2567 for (UseInterval* interval = first_use_interval_; interval != NULL; |
2647 interval = interval->next()) { | 2568 interval = interval->next()) { |
2648 if (interval->Contains(pos)) { | 2569 if (interval->Contains(pos)) { |
2649 return true; | 2570 return true; |
2650 } | 2571 } |
2651 } | 2572 } |
2652 | 2573 |
2653 return false; | 2574 return false; |
2654 } | 2575 } |
2655 | 2576 |
2656 | |
2657 void FlowGraphAllocator::AssignSafepoints(Definition* defn, LiveRange* range) { | 2577 void FlowGraphAllocator::AssignSafepoints(Definition* defn, LiveRange* range) { |
2658 for (intptr_t i = safepoints_.length() - 1; i >= 0; i--) { | 2578 for (intptr_t i = safepoints_.length() - 1; i >= 0; i--) { |
2659 Instruction* safepoint_instr = safepoints_[i]; | 2579 Instruction* safepoint_instr = safepoints_[i]; |
2660 if (safepoint_instr == defn) { | 2580 if (safepoint_instr == defn) { |
2661 // The value is not live until after the definition is fully executed, | 2581 // The value is not live until after the definition is fully executed, |
2662 // don't assign the safepoint inside the definition itself to | 2582 // don't assign the safepoint inside the definition itself to |
2663 // definition's liverange. | 2583 // definition's liverange. |
2664 continue; | 2584 continue; |
2665 } | 2585 } |
2666 | 2586 |
2667 const intptr_t pos = safepoint_instr->lifetime_position(); | 2587 const intptr_t pos = safepoint_instr->lifetime_position(); |
2668 if (range->End() <= pos) break; | 2588 if (range->End() <= pos) break; |
2669 | 2589 |
2670 if (range->Contains(pos)) { | 2590 if (range->Contains(pos)) { |
2671 range->AddSafepoint(pos, safepoint_instr->locs()); | 2591 range->AddSafepoint(pos, safepoint_instr->locs()); |
2672 } | 2592 } |
2673 } | 2593 } |
2674 } | 2594 } |
2675 | 2595 |
2676 | |
2677 static inline bool ShouldBeAllocatedBefore(LiveRange* a, LiveRange* b) { | 2596 static inline bool ShouldBeAllocatedBefore(LiveRange* a, LiveRange* b) { |
2678 // TODO(vegorov): consider first hint position when ordering live ranges. | 2597 // TODO(vegorov): consider first hint position when ordering live ranges. |
2679 return a->Start() <= b->Start(); | 2598 return a->Start() <= b->Start(); |
2680 } | 2599 } |
2681 | 2600 |
2682 | |
2683 static void AddToSortedListOfRanges(GrowableArray<LiveRange*>* list, | 2601 static void AddToSortedListOfRanges(GrowableArray<LiveRange*>* list, |
2684 LiveRange* range) { | 2602 LiveRange* range) { |
2685 range->finger()->Initialize(range); | 2603 range->finger()->Initialize(range); |
2686 | 2604 |
2687 if (list->is_empty()) { | 2605 if (list->is_empty()) { |
2688 list->Add(range); | 2606 list->Add(range); |
2689 return; | 2607 return; |
2690 } | 2608 } |
2691 | 2609 |
2692 for (intptr_t i = list->length() - 1; i >= 0; i--) { | 2610 for (intptr_t i = list->length() - 1; i >= 0; i--) { |
2693 if (ShouldBeAllocatedBefore(range, (*list)[i])) { | 2611 if (ShouldBeAllocatedBefore(range, (*list)[i])) { |
2694 list->InsertAt(i + 1, range); | 2612 list->InsertAt(i + 1, range); |
2695 return; | 2613 return; |
2696 } | 2614 } |
2697 } | 2615 } |
2698 list->InsertAt(0, range); | 2616 list->InsertAt(0, range); |
2699 } | 2617 } |
2700 | 2618 |
2701 | |
2702 void FlowGraphAllocator::AddToUnallocated(LiveRange* range) { | 2619 void FlowGraphAllocator::AddToUnallocated(LiveRange* range) { |
2703 AddToSortedListOfRanges(&unallocated_, range); | 2620 AddToSortedListOfRanges(&unallocated_, range); |
2704 } | 2621 } |
2705 | 2622 |
2706 | |
2707 void FlowGraphAllocator::CompleteRange(LiveRange* range, Location::Kind kind) { | 2623 void FlowGraphAllocator::CompleteRange(LiveRange* range, Location::Kind kind) { |
2708 switch (kind) { | 2624 switch (kind) { |
2709 case Location::kRegister: | 2625 case Location::kRegister: |
2710 AddToSortedListOfRanges(&unallocated_cpu_, range); | 2626 AddToSortedListOfRanges(&unallocated_cpu_, range); |
2711 break; | 2627 break; |
2712 | 2628 |
2713 case Location::kFpuRegister: | 2629 case Location::kFpuRegister: |
2714 AddToSortedListOfRanges(&unallocated_xmm_, range); | 2630 AddToSortedListOfRanges(&unallocated_xmm_, range); |
2715 break; | 2631 break; |
2716 | 2632 |
2717 default: | 2633 default: |
2718 UNREACHABLE(); | 2634 UNREACHABLE(); |
2719 } | 2635 } |
2720 } | 2636 } |
2721 | 2637 |
2722 | |
2723 #if defined(DEBUG) | 2638 #if defined(DEBUG) |
2724 bool FlowGraphAllocator::UnallocatedIsSorted() { | 2639 bool FlowGraphAllocator::UnallocatedIsSorted() { |
2725 for (intptr_t i = unallocated_.length() - 1; i >= 1; i--) { | 2640 for (intptr_t i = unallocated_.length() - 1; i >= 1; i--) { |
2726 LiveRange* a = unallocated_[i]; | 2641 LiveRange* a = unallocated_[i]; |
2727 LiveRange* b = unallocated_[i - 1]; | 2642 LiveRange* b = unallocated_[i - 1]; |
2728 if (!ShouldBeAllocatedBefore(a, b)) { | 2643 if (!ShouldBeAllocatedBefore(a, b)) { |
2729 UNREACHABLE(); | 2644 UNREACHABLE(); |
2730 return false; | 2645 return false; |
2731 } | 2646 } |
2732 } | 2647 } |
2733 return true; | 2648 return true; |
2734 } | 2649 } |
2735 #endif | 2650 #endif |
2736 | 2651 |
2737 | |
2738 void FlowGraphAllocator::PrepareForAllocation( | 2652 void FlowGraphAllocator::PrepareForAllocation( |
2739 Location::Kind register_kind, | 2653 Location::Kind register_kind, |
2740 intptr_t number_of_registers, | 2654 intptr_t number_of_registers, |
2741 const GrowableArray<LiveRange*>& unallocated, | 2655 const GrowableArray<LiveRange*>& unallocated, |
2742 LiveRange** blocking_ranges, | 2656 LiveRange** blocking_ranges, |
2743 bool* blocked_registers) { | 2657 bool* blocked_registers) { |
2744 register_kind_ = register_kind; | 2658 register_kind_ = register_kind; |
2745 number_of_registers_ = number_of_registers; | 2659 number_of_registers_ = number_of_registers; |
2746 | 2660 |
2747 blocked_registers_.Clear(); | 2661 blocked_registers_.Clear(); |
(...skipping 10 matching lines...) Expand all Loading... |
2758 ASSERT(registers_[reg]->is_empty()); | 2672 ASSERT(registers_[reg]->is_empty()); |
2759 | 2673 |
2760 LiveRange* range = blocking_ranges[reg]; | 2674 LiveRange* range = blocking_ranges[reg]; |
2761 if (range != NULL) { | 2675 if (range != NULL) { |
2762 range->finger()->Initialize(range); | 2676 range->finger()->Initialize(range); |
2763 registers_[reg]->Add(range); | 2677 registers_[reg]->Add(range); |
2764 } | 2678 } |
2765 } | 2679 } |
2766 } | 2680 } |
2767 | 2681 |
2768 | |
2769 void FlowGraphAllocator::AllocateUnallocatedRanges() { | 2682 void FlowGraphAllocator::AllocateUnallocatedRanges() { |
2770 #if defined(DEBUG) | 2683 #if defined(DEBUG) |
2771 ASSERT(UnallocatedIsSorted()); | 2684 ASSERT(UnallocatedIsSorted()); |
2772 #endif | 2685 #endif |
2773 | 2686 |
2774 while (!unallocated_.is_empty()) { | 2687 while (!unallocated_.is_empty()) { |
2775 LiveRange* range = unallocated_.RemoveLast(); | 2688 LiveRange* range = unallocated_.RemoveLast(); |
2776 const intptr_t start = range->Start(); | 2689 const intptr_t start = range->Start(); |
2777 TRACE_ALLOC(THR_Print("Processing live range for v%" Pd " " | 2690 TRACE_ALLOC(THR_Print("Processing live range for v%" Pd " " |
2778 "starting at %" Pd "\n", | 2691 "starting at %" Pd "\n", |
(...skipping 14 matching lines...) Expand all Loading... |
2793 } | 2706 } |
2794 | 2707 |
2795 // All allocation decisions were done. | 2708 // All allocation decisions were done. |
2796 ASSERT(unallocated_.is_empty()); | 2709 ASSERT(unallocated_.is_empty()); |
2797 | 2710 |
2798 // Finish allocation. | 2711 // Finish allocation. |
2799 AdvanceActiveIntervals(kMaxPosition); | 2712 AdvanceActiveIntervals(kMaxPosition); |
2800 TRACE_ALLOC(THR_Print("Allocation completed\n")); | 2713 TRACE_ALLOC(THR_Print("Allocation completed\n")); |
2801 } | 2714 } |
2802 | 2715 |
2803 | |
2804 bool FlowGraphAllocator::TargetLocationIsSpillSlot(LiveRange* range, | 2716 bool FlowGraphAllocator::TargetLocationIsSpillSlot(LiveRange* range, |
2805 Location target) { | 2717 Location target) { |
2806 if (target.IsStackSlot() || target.IsDoubleStackSlot() || | 2718 if (target.IsStackSlot() || target.IsDoubleStackSlot() || |
2807 target.IsConstant()) { | 2719 target.IsConstant()) { |
2808 ASSERT(GetLiveRange(range->vreg())->spill_slot().Equals(target)); | 2720 ASSERT(GetLiveRange(range->vreg())->spill_slot().Equals(target)); |
2809 return true; | 2721 return true; |
2810 } | 2722 } |
2811 return false; | 2723 return false; |
2812 } | 2724 } |
2813 | 2725 |
2814 | |
2815 void FlowGraphAllocator::ConnectSplitSiblings(LiveRange* parent, | 2726 void FlowGraphAllocator::ConnectSplitSiblings(LiveRange* parent, |
2816 BlockEntryInstr* source_block, | 2727 BlockEntryInstr* source_block, |
2817 BlockEntryInstr* target_block) { | 2728 BlockEntryInstr* target_block) { |
2818 TRACE_ALLOC(THR_Print("Connect v%" Pd " on the edge B%" Pd " -> B%" Pd "\n", | 2729 TRACE_ALLOC(THR_Print("Connect v%" Pd " on the edge B%" Pd " -> B%" Pd "\n", |
2819 parent->vreg(), source_block->block_id(), | 2730 parent->vreg(), source_block->block_id(), |
2820 target_block->block_id())); | 2731 target_block->block_id())); |
2821 if (parent->next_sibling() == NULL) { | 2732 if (parent->next_sibling() == NULL) { |
2822 // Nothing to connect. The whole range was allocated to the same location. | 2733 // Nothing to connect. The whole range was allocated to the same location. |
2823 TRACE_ALLOC(THR_Print("range v%" Pd " has no siblings\n", parent->vreg())); | 2734 TRACE_ALLOC(THR_Print("range v%" Pd " has no siblings\n", parent->vreg())); |
2824 return; | 2735 return; |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2874 | 2785 |
2875 Instruction* last = source_block->last_instruction(); | 2786 Instruction* last = source_block->last_instruction(); |
2876 if ((last->SuccessorCount() == 1) && !source_block->IsGraphEntry()) { | 2787 if ((last->SuccessorCount() == 1) && !source_block->IsGraphEntry()) { |
2877 ASSERT(last->IsGoto()); | 2788 ASSERT(last->IsGoto()); |
2878 last->AsGoto()->GetParallelMove()->AddMove(target, source); | 2789 last->AsGoto()->GetParallelMove()->AddMove(target, source); |
2879 } else { | 2790 } else { |
2880 target_block->GetParallelMove()->AddMove(target, source); | 2791 target_block->GetParallelMove()->AddMove(target, source); |
2881 } | 2792 } |
2882 } | 2793 } |
2883 | 2794 |
2884 | |
2885 void FlowGraphAllocator::ResolveControlFlow() { | 2795 void FlowGraphAllocator::ResolveControlFlow() { |
2886 // Resolve linear control flow between touching split siblings | 2796 // Resolve linear control flow between touching split siblings |
2887 // inside basic blocks. | 2797 // inside basic blocks. |
2888 for (intptr_t vreg = 0; vreg < live_ranges_.length(); vreg++) { | 2798 for (intptr_t vreg = 0; vreg < live_ranges_.length(); vreg++) { |
2889 LiveRange* range = live_ranges_[vreg]; | 2799 LiveRange* range = live_ranges_[vreg]; |
2890 if (range == NULL) continue; | 2800 if (range == NULL) continue; |
2891 | 2801 |
2892 while (range->next_sibling() != NULL) { | 2802 while (range->next_sibling() != NULL) { |
2893 LiveRange* sibling = range->next_sibling(); | 2803 LiveRange* sibling = range->next_sibling(); |
2894 TRACE_ALLOC(THR_Print("connecting [%" Pd ", %" Pd ") [", range->Start(), | 2804 TRACE_ALLOC(THR_Print("connecting [%" Pd ", %" Pd ") [", range->Start(), |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2930 range->assigned_location().IsDoubleStackSlot() || | 2840 range->assigned_location().IsDoubleStackSlot() || |
2931 range->assigned_location().IsConstant()) { | 2841 range->assigned_location().IsConstant()) { |
2932 ASSERT(range->assigned_location().Equals(range->spill_slot())); | 2842 ASSERT(range->assigned_location().Equals(range->spill_slot())); |
2933 } else { | 2843 } else { |
2934 AddMoveAt(range->Start() + 1, range->spill_slot(), | 2844 AddMoveAt(range->Start() + 1, range->spill_slot(), |
2935 range->assigned_location()); | 2845 range->assigned_location()); |
2936 } | 2846 } |
2937 } | 2847 } |
2938 } | 2848 } |
2939 | 2849 |
2940 | |
2941 static Representation RepresentationForRange(Representation definition_rep) { | 2850 static Representation RepresentationForRange(Representation definition_rep) { |
2942 if (definition_rep == kUnboxedMint) { | 2851 if (definition_rep == kUnboxedMint) { |
2943 // kUnboxedMint is split into two ranges, each of which are kUntagged. | 2852 // kUnboxedMint is split into two ranges, each of which are kUntagged. |
2944 return kUntagged; | 2853 return kUntagged; |
2945 } else if (definition_rep == kUnboxedUint32) { | 2854 } else if (definition_rep == kUnboxedUint32) { |
2946 // kUnboxedUint32 is untagged. | 2855 // kUnboxedUint32 is untagged. |
2947 return kUntagged; | 2856 return kUntagged; |
2948 } | 2857 } |
2949 return definition_rep; | 2858 return definition_rep; |
2950 } | 2859 } |
2951 | 2860 |
2952 | |
2953 void FlowGraphAllocator::CollectRepresentations() { | 2861 void FlowGraphAllocator::CollectRepresentations() { |
2954 // Parameters. | 2862 // Parameters. |
2955 GraphEntryInstr* graph_entry = flow_graph_.graph_entry(); | 2863 GraphEntryInstr* graph_entry = flow_graph_.graph_entry(); |
2956 for (intptr_t i = 0; i < graph_entry->initial_definitions()->length(); ++i) { | 2864 for (intptr_t i = 0; i < graph_entry->initial_definitions()->length(); ++i) { |
2957 Definition* def = (*graph_entry->initial_definitions())[i]; | 2865 Definition* def = (*graph_entry->initial_definitions())[i]; |
2958 value_representations_[def->ssa_temp_index()] = | 2866 value_representations_[def->ssa_temp_index()] = |
2959 RepresentationForRange(def->representation()); | 2867 RepresentationForRange(def->representation()); |
2960 ASSERT(!def->HasPairRepresentation()); | 2868 ASSERT(!def->HasPairRepresentation()); |
2961 } | 2869 } |
2962 | 2870 |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2999 RepresentationForRange(def->representation()); | 2907 RepresentationForRange(def->representation()); |
3000 if (def->HasPairRepresentation()) { | 2908 if (def->HasPairRepresentation()) { |
3001 value_representations_[ToSecondPairVreg(vreg)] = | 2909 value_representations_[ToSecondPairVreg(vreg)] = |
3002 RepresentationForRange(def->representation()); | 2910 RepresentationForRange(def->representation()); |
3003 } | 2911 } |
3004 } | 2912 } |
3005 } | 2913 } |
3006 } | 2914 } |
3007 } | 2915 } |
3008 | 2916 |
3009 | |
3010 void FlowGraphAllocator::AllocateRegisters() { | 2917 void FlowGraphAllocator::AllocateRegisters() { |
3011 CollectRepresentations(); | 2918 CollectRepresentations(); |
3012 | 2919 |
3013 liveness_.Analyze(); | 2920 liveness_.Analyze(); |
3014 | 2921 |
3015 NumberInstructions(); | 2922 NumberInstructions(); |
3016 | 2923 |
3017 DiscoverLoops(); | 2924 DiscoverLoops(); |
3018 | 2925 |
3019 #if defined(TARGET_ARCH_DBC) | 2926 #if defined(TARGET_ARCH_DBC) |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3101 if (FLAG_support_il_printer) { | 3008 if (FLAG_support_il_printer) { |
3102 #ifndef PRODUCT | 3009 #ifndef PRODUCT |
3103 FlowGraphPrinter printer(flow_graph_, true); | 3010 FlowGraphPrinter printer(flow_graph_, true); |
3104 printer.PrintBlocks(); | 3011 printer.PrintBlocks(); |
3105 #endif | 3012 #endif |
3106 } | 3013 } |
3107 THR_Print("----------------------------------------------\n"); | 3014 THR_Print("----------------------------------------------\n"); |
3108 } | 3015 } |
3109 } | 3016 } |
3110 | 3017 |
3111 | |
3112 } // namespace dart | 3018 } // namespace dart |
OLD | NEW |