OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/base/adapters.h" | 5 #include "src/base/adapters.h" |
6 #include "src/compiler/linkage.h" | 6 #include "src/compiler/linkage.h" |
7 #include "src/compiler/register-allocator.h" | 7 #include "src/compiler/register-allocator.h" |
8 #include "src/string-stream.h" | 8 #include "src/string-stream.h" |
9 | 9 |
10 namespace v8 { | 10 namespace v8 { |
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
223 } | 223 } |
224 if (pos.IsStart()) { | 224 if (pos.IsStart()) { |
225 os << 's'; | 225 os << 's'; |
226 } else { | 226 } else { |
227 os << 'e'; | 227 os << 'e'; |
228 } | 228 } |
229 return os; | 229 return os; |
230 } | 230 } |
231 | 231 |
232 | 232 |
233 struct LiveRange::SpillAtDefinitionList : ZoneObject { | |
234 SpillAtDefinitionList(int gap_index, InstructionOperand* operand, | |
235 SpillAtDefinitionList* next) | |
236 : gap_index(gap_index), operand(operand), next(next) {} | |
237 const int gap_index; | |
238 InstructionOperand* const operand; | |
239 SpillAtDefinitionList* const next; | |
240 }; | |
241 | |
242 | |
243 const float LiveRange::kInvalidWeight = -1; | 233 const float LiveRange::kInvalidWeight = -1; |
244 const float LiveRange::kMaxWeight = std::numeric_limits<float>::max(); | 234 const float LiveRange::kMaxWeight = std::numeric_limits<float>::max(); |
245 | 235 |
246 | 236 |
247 LiveRange::LiveRange(int id, MachineType machine_type) | 237 LiveRange::LiveRange(int relative_id, MachineType machine_type, |
248 : id_(id), | 238 TopLevelLiveRange* top_level) |
249 spill_start_index_(kMaxInt), | 239 : relative_id_(relative_id), |
250 bits_(0), | 240 bits_(0), |
251 last_interval_(nullptr), | 241 last_interval_(nullptr), |
252 first_interval_(nullptr), | 242 first_interval_(nullptr), |
253 first_pos_(nullptr), | 243 first_pos_(nullptr), |
254 parent_(nullptr), | 244 top_level_(top_level), |
255 next_(nullptr), | 245 next_(nullptr), |
256 splintered_from_(nullptr), | |
257 spill_operand_(nullptr), | |
258 spills_at_definition_(nullptr), | |
259 current_interval_(nullptr), | 246 current_interval_(nullptr), |
260 last_processed_use_(nullptr), | 247 last_processed_use_(nullptr), |
261 current_hint_position_(nullptr), | 248 current_hint_position_(nullptr), |
262 size_(kInvalidSize), | 249 size_(kInvalidSize), |
263 weight_(kInvalidWeight), | 250 weight_(kInvalidWeight) { |
264 spilled_in_deferred_block_(false) { | |
265 DCHECK(AllocatedOperand::IsSupportedMachineType(machine_type)); | 251 DCHECK(AllocatedOperand::IsSupportedMachineType(machine_type)); |
266 bits_ = SpillTypeField::encode(SpillType::kNoSpillType) | | 252 bits_ = AssignedRegisterField::encode(kUnassignedRegister) | |
267 AssignedRegisterField::encode(kUnassignedRegister) | | |
268 MachineTypeField::encode(machine_type); | 253 MachineTypeField::encode(machine_type); |
269 } | 254 } |
270 | 255 |
271 | 256 |
272 void LiveRange::Verify() const { | 257 void LiveRange::Verify() const { |
273 // Walk the positions, verifying that each is in an interval. | 258 // Walk the positions, verifying that each is in an interval. |
274 auto interval = first_interval_; | 259 auto interval = first_interval_; |
275 for (auto pos = first_pos_; pos != nullptr; pos = pos->next()) { | 260 for (auto pos = first_pos_; pos != nullptr; pos = pos->next()) { |
276 CHECK(Start() <= pos->pos()); | 261 CHECK(Start() <= pos->pos()); |
277 CHECK(pos->pos() <= End()); | 262 CHECK(pos->pos() <= End()); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
309 case kRepFloat32: | 294 case kRepFloat32: |
310 case kRepFloat64: | 295 case kRepFloat64: |
311 return DOUBLE_REGISTERS; | 296 return DOUBLE_REGISTERS; |
312 default: | 297 default: |
313 break; | 298 break; |
314 } | 299 } |
315 return GENERAL_REGISTERS; | 300 return GENERAL_REGISTERS; |
316 } | 301 } |
317 | 302 |
318 | 303 |
319 void LiveRange::SpillAtDefinition(Zone* zone, int gap_index, | |
320 InstructionOperand* operand) { | |
321 DCHECK(HasNoSpillType()); | |
322 spills_at_definition_ = new (zone) | |
323 SpillAtDefinitionList(gap_index, operand, spills_at_definition_); | |
324 } | |
325 | |
326 | |
327 bool LiveRange::TryCommitSpillInDeferredBlock( | |
328 InstructionSequence* code, const InstructionOperand& spill_operand) { | |
329 DCHECK(!IsChild()); | |
330 | |
331 if (!FLAG_turbo_preprocess_ranges || IsEmpty() || HasNoSpillType() || | |
332 spill_operand.IsConstant() || spill_operand.IsImmediate()) { | |
333 return false; | |
334 } | |
335 | |
336 int count = 0; | |
337 for (const LiveRange* child = this; child != nullptr; child = child->next()) { | |
338 int first_instr = child->Start().ToInstructionIndex(); | |
339 | |
340 // If the range starts at instruction end, the first instruction index is | |
341 // the next one. | |
342 if (!child->Start().IsGapPosition() && !child->Start().IsStart()) { | |
343 ++first_instr; | |
344 } | |
345 | |
346 // We only look at where the range starts. It doesn't matter where it ends: | |
347 // if it ends past this block, then either there is a phi there already, | |
348 // or ResolveControlFlow will adapt the last instruction gap of this block | |
349 // as if there were a phi. In either case, data flow will be correct. | |
350 const InstructionBlock* block = code->GetInstructionBlock(first_instr); | |
351 | |
352 // If we have slot uses in a subrange, bail out, because we need the value | |
353 // on the stack before that use. | |
354 bool has_slot_use = child->NextSlotPosition(child->Start()) != nullptr; | |
355 if (!block->IsDeferred()) { | |
356 if (child->spilled() || has_slot_use) { | |
357 TRACE( | |
358 "Live Range %d must be spilled at definition: found a " | |
359 "slot-requiring non-deferred child range %d.\n", | |
360 TopLevel()->id(), child->id()); | |
361 return false; | |
362 } | |
363 } else { | |
364 if (child->spilled() || has_slot_use) ++count; | |
365 } | |
366 } | |
367 if (count == 0) return false; | |
368 | |
369 spill_start_index_ = -1; | |
370 spilled_in_deferred_block_ = true; | |
371 | |
372 TRACE("Live Range %d will be spilled only in deferred blocks.\n", id()); | |
373 // If we have ranges that aren't spilled but require the operand on the stack, | |
374 // make sure we insert the spill. | |
375 for (const LiveRange* child = this; child != nullptr; child = child->next()) { | |
376 if (!child->spilled() && | |
377 child->NextSlotPosition(child->Start()) != nullptr) { | |
378 auto instr = code->InstructionAt(child->Start().ToInstructionIndex()); | |
379 // Insert spill at the end to let live range connections happen at START. | |
380 auto move = | |
381 instr->GetOrCreateParallelMove(Instruction::END, code->zone()); | |
382 InstructionOperand assigned = child->GetAssignedOperand(); | |
383 if (TopLevel()->has_slot_use()) { | |
384 bool found = false; | |
385 for (auto move_op : *move) { | |
386 if (move_op->IsEliminated()) continue; | |
387 if (move_op->source().Equals(assigned) && | |
388 move_op->destination().Equals(spill_operand)) { | |
389 found = true; | |
390 break; | |
391 } | |
392 } | |
393 if (found) continue; | |
394 } | |
395 | |
396 move->AddMove(assigned, spill_operand); | |
397 } | |
398 } | |
399 | |
400 return true; | |
401 } | |
402 | |
403 | |
404 void LiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence, | |
405 const InstructionOperand& op, | |
406 bool might_be_duplicated) { | |
407 DCHECK_IMPLIES(op.IsConstant(), spills_at_definition_ == nullptr); | |
408 DCHECK(!IsChild()); | |
409 auto zone = sequence->zone(); | |
410 | |
411 for (auto to_spill = spills_at_definition_; to_spill != nullptr; | |
412 to_spill = to_spill->next) { | |
413 auto instr = sequence->InstructionAt(to_spill->gap_index); | |
414 auto move = instr->GetOrCreateParallelMove(Instruction::START, zone); | |
415 // Skip insertion if it's possible that the move exists already as a | |
416 // constraint move from a fixed output register to a slot. | |
417 if (might_be_duplicated) { | |
418 bool found = false; | |
419 for (auto move_op : *move) { | |
420 if (move_op->IsEliminated()) continue; | |
421 if (move_op->source().Equals(*to_spill->operand) && | |
422 move_op->destination().Equals(op)) { | |
423 found = true; | |
424 break; | |
425 } | |
426 } | |
427 if (found) continue; | |
428 } | |
429 move->AddMove(*to_spill->operand, op); | |
430 } | |
431 } | |
432 | |
433 | |
434 UsePosition* LiveRange::FirstHintPosition(int* register_index) const { | 304 UsePosition* LiveRange::FirstHintPosition(int* register_index) const { |
435 for (auto pos = first_pos_; pos != nullptr; pos = pos->next()) { | 305 for (auto pos = first_pos_; pos != nullptr; pos = pos->next()) { |
436 if (pos->HintRegister(register_index)) return pos; | 306 if (pos->HintRegister(register_index)) return pos; |
437 } | 307 } |
438 return nullptr; | 308 return nullptr; |
439 } | 309 } |
440 | 310 |
441 | 311 |
442 void LiveRange::SetSpillOperand(InstructionOperand* operand) { | |
443 DCHECK(HasNoSpillType()); | |
444 DCHECK(!operand->IsUnallocated() && !operand->IsImmediate()); | |
445 set_spill_type(SpillType::kSpillOperand); | |
446 spill_operand_ = operand; | |
447 } | |
448 | |
449 | |
450 void LiveRange::SetSpillRange(SpillRange* spill_range) { | |
451 DCHECK(!HasSpillOperand()); | |
452 DCHECK(spill_range); | |
453 DCHECK(!IsChild()); | |
454 spill_range_ = spill_range; | |
455 } | |
456 | |
457 | |
458 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const { | 312 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const { |
459 UsePosition* use_pos = last_processed_use_; | 313 UsePosition* use_pos = last_processed_use_; |
460 if (use_pos == nullptr || use_pos->pos() > start) { | 314 if (use_pos == nullptr || use_pos->pos() > start) { |
461 use_pos = first_pos(); | 315 use_pos = first_pos(); |
462 } | 316 } |
463 while (use_pos != nullptr && use_pos->pos() < start) { | 317 while (use_pos != nullptr && use_pos->pos() < start) { |
464 use_pos = use_pos->next(); | 318 use_pos = use_pos->next(); |
465 } | 319 } |
466 last_processed_use_ = use_pos; | 320 last_processed_use_ = use_pos; |
467 return use_pos; | 321 return use_pos; |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
511 | 365 |
512 bool LiveRange::CanBeSpilled(LifetimePosition pos) const { | 366 bool LiveRange::CanBeSpilled(LifetimePosition pos) const { |
513 // We cannot spill a live range that has a use requiring a register | 367 // We cannot spill a live range that has a use requiring a register |
514 // at the current or the immediate next position. | 368 // at the current or the immediate next position. |
515 auto use_pos = NextRegisterPosition(pos); | 369 auto use_pos = NextRegisterPosition(pos); |
516 if (use_pos == nullptr) return true; | 370 if (use_pos == nullptr) return true; |
517 return use_pos->pos() > pos.NextStart().End(); | 371 return use_pos->pos() > pos.NextStart().End(); |
518 } | 372 } |
519 | 373 |
520 | 374 |
| 375 bool LiveRange::IsTopLevel() const { return top_level_ == this; } |
| 376 |
| 377 |
521 InstructionOperand LiveRange::GetAssignedOperand() const { | 378 InstructionOperand LiveRange::GetAssignedOperand() const { |
522 if (HasRegisterAssigned()) { | 379 if (HasRegisterAssigned()) { |
523 DCHECK(!spilled()); | 380 DCHECK(!spilled()); |
524 switch (kind()) { | 381 switch (kind()) { |
525 case GENERAL_REGISTERS: | 382 case GENERAL_REGISTERS: |
526 return RegisterOperand(machine_type(), assigned_register()); | 383 return RegisterOperand(machine_type(), assigned_register()); |
527 case DOUBLE_REGISTERS: | 384 case DOUBLE_REGISTERS: |
528 return DoubleRegisterOperand(machine_type(), assigned_register()); | 385 return DoubleRegisterOperand(machine_type(), assigned_register()); |
529 } | 386 } |
530 } | 387 } |
531 DCHECK(spilled()); | 388 DCHECK(spilled()); |
532 DCHECK(!HasRegisterAssigned()); | 389 DCHECK(!HasRegisterAssigned()); |
533 if (TopLevel()->HasSpillOperand()) { | 390 if (TopLevel()->HasSpillOperand()) { |
534 auto op = TopLevel()->GetSpillOperand(); | 391 auto op = TopLevel()->GetSpillOperand(); |
535 DCHECK(!op->IsUnallocated()); | 392 DCHECK(!op->IsUnallocated()); |
536 return *op; | 393 return *op; |
537 } | 394 } |
538 return TopLevel()->GetSpillRangeOperand(); | 395 return TopLevel()->GetSpillRangeOperand(); |
539 } | 396 } |
540 | 397 |
541 | 398 |
542 AllocatedOperand LiveRange::GetSpillRangeOperand() const { | |
543 auto spill_range = GetSpillRange(); | |
544 int index = spill_range->assigned_slot(); | |
545 switch (kind()) { | |
546 case GENERAL_REGISTERS: | |
547 return StackSlotOperand(machine_type(), index); | |
548 case DOUBLE_REGISTERS: | |
549 return DoubleStackSlotOperand(machine_type(), index); | |
550 } | |
551 UNREACHABLE(); | |
552 return StackSlotOperand(kMachNone, 0); | |
553 } | |
554 | |
555 | |
556 UseInterval* LiveRange::FirstSearchIntervalForPosition( | 399 UseInterval* LiveRange::FirstSearchIntervalForPosition( |
557 LifetimePosition position) const { | 400 LifetimePosition position) const { |
558 if (current_interval_ == nullptr) return first_interval_; | 401 if (current_interval_ == nullptr) return first_interval_; |
559 if (current_interval_->start() > position) { | 402 if (current_interval_->start() > position) { |
560 current_interval_ = nullptr; | 403 current_interval_ = nullptr; |
561 return first_interval_; | 404 return first_interval_; |
562 } | 405 } |
563 return current_interval_; | 406 return current_interval_; |
564 } | 407 } |
565 | 408 |
566 | 409 |
567 void LiveRange::AdvanceLastProcessedMarker( | 410 void LiveRange::AdvanceLastProcessedMarker( |
568 UseInterval* to_start_of, LifetimePosition but_not_past) const { | 411 UseInterval* to_start_of, LifetimePosition but_not_past) const { |
569 if (to_start_of == nullptr) return; | 412 if (to_start_of == nullptr) return; |
570 if (to_start_of->start() > but_not_past) return; | 413 if (to_start_of->start() > but_not_past) return; |
571 auto start = current_interval_ == nullptr ? LifetimePosition::Invalid() | 414 auto start = current_interval_ == nullptr ? LifetimePosition::Invalid() |
572 : current_interval_->start(); | 415 : current_interval_->start(); |
573 if (to_start_of->start() > start) { | 416 if (to_start_of->start() > start) { |
574 current_interval_ = to_start_of; | 417 current_interval_ = to_start_of; |
575 } | 418 } |
576 } | 419 } |
577 | 420 |
578 | 421 |
579 void LiveRange::SplitAt(LifetimePosition position, LiveRange* result, | 422 LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) { |
580 Zone* zone) { | 423 int new_id = TopLevel()->GetNextChildId(); |
| 424 LiveRange* child = new (zone) LiveRange(new_id, machine_type(), TopLevel()); |
| 425 DetachAt(position, child, zone); |
| 426 |
| 427 child->top_level_ = TopLevel(); |
| 428 child->next_ = next_; |
| 429 next_ = child; |
| 430 |
| 431 return child; |
| 432 } |
| 433 |
| 434 |
| 435 void LiveRange::DetachAt(LifetimePosition position, LiveRange* result, |
| 436 Zone* zone) { |
581 DCHECK(Start() < position); | 437 DCHECK(Start() < position); |
582 DCHECK(result->IsEmpty()); | 438 DCHECK(result->IsEmpty()); |
583 // Find the last interval that ends before the position. If the | 439 // Find the last interval that ends before the position. If the |
584 // position is contained in one of the intervals in the chain, we | 440 // position is contained in one of the intervals in the chain, we |
585 // split that interval and use the first part. | 441 // split that interval and use the first part. |
586 auto current = FirstSearchIntervalForPosition(position); | 442 auto current = FirstSearchIntervalForPosition(position); |
587 | 443 |
588 // If the split position coincides with the beginning of a use interval | 444 // If the split position coincides with the beginning of a use interval |
589 // we need to split use positons in a special way. | 445 // we need to split use positons in a special way. |
590 bool split_at_start = false; | 446 bool split_at_start = false; |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
645 } else { | 501 } else { |
646 first_pos_ = nullptr; | 502 first_pos_ = nullptr; |
647 } | 503 } |
648 result->first_pos_ = use_after; | 504 result->first_pos_ = use_after; |
649 | 505 |
650 // Discard cached iteration state. It might be pointing | 506 // Discard cached iteration state. It might be pointing |
651 // to the use that no longer belongs to this live range. | 507 // to the use that no longer belongs to this live range. |
652 last_processed_use_ = nullptr; | 508 last_processed_use_ = nullptr; |
653 current_interval_ = nullptr; | 509 current_interval_ = nullptr; |
654 | 510 |
655 // Link the new live range in the chain before any of the other | |
656 // ranges linked from the range before the split. | |
657 result->parent_ = (parent_ == nullptr) ? this : parent_; | |
658 result->next_ = next_; | |
659 next_ = result; | |
660 | |
661 // Invalidate size and weight of this range. The child range has them | 511 // Invalidate size and weight of this range. The child range has them |
662 // invalid at construction. | 512 // invalid at construction. |
663 size_ = kInvalidSize; | 513 size_ = kInvalidSize; |
664 weight_ = kInvalidWeight; | 514 weight_ = kInvalidWeight; |
665 #ifdef DEBUG | 515 #ifdef DEBUG |
666 Verify(); | 516 Verify(); |
667 result->Verify(); | 517 result->Verify(); |
668 #endif | 518 #endif |
669 } | 519 } |
670 | 520 |
671 | 521 |
672 void LiveRange::Splinter(LifetimePosition start, LifetimePosition end, | 522 void LiveRange::AppendAsChild(TopLevelLiveRange* other) { |
673 LiveRange* result, Zone* zone) { | 523 next_ = other; |
| 524 |
| 525 other->UpdateParentForAllChildren(TopLevel()); |
| 526 TopLevel()->UpdateSpillRangePostMerge(other); |
| 527 } |
| 528 |
| 529 |
| 530 void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) { |
| 531 LiveRange* child = this; |
| 532 for (; child != nullptr; child = child->next()) { |
| 533 child->top_level_ = new_top_level; |
| 534 } |
| 535 } |
| 536 |
| 537 |
| 538 void LiveRange::ConvertUsesToOperand(const InstructionOperand& op, |
| 539 const InstructionOperand& spill_op) { |
| 540 for (auto pos = first_pos(); pos != nullptr; pos = pos->next()) { |
| 541 DCHECK(Start() <= pos->pos() && pos->pos() <= End()); |
| 542 if (!pos->HasOperand()) continue; |
| 543 switch (pos->type()) { |
| 544 case UsePositionType::kRequiresSlot: |
| 545 DCHECK(spill_op.IsStackSlot() || spill_op.IsDoubleStackSlot()); |
| 546 InstructionOperand::ReplaceWith(pos->operand(), &spill_op); |
| 547 break; |
| 548 case UsePositionType::kRequiresRegister: |
| 549 DCHECK(op.IsRegister() || op.IsDoubleRegister()); |
| 550 // Fall through. |
| 551 case UsePositionType::kAny: |
| 552 InstructionOperand::ReplaceWith(pos->operand(), &op); |
| 553 break; |
| 554 } |
| 555 } |
| 556 } |
| 557 |
| 558 |
| 559 // This implements an ordering on live ranges so that they are ordered by their |
| 560 // start positions. This is needed for the correctness of the register |
| 561 // allocation algorithm. If two live ranges start at the same offset then there |
| 562 // is a tie breaker based on where the value is first used. This part of the |
| 563 // ordering is merely a heuristic. |
| 564 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const { |
| 565 LifetimePosition start = Start(); |
| 566 LifetimePosition other_start = other->Start(); |
| 567 if (start == other_start) { |
| 568 UsePosition* pos = first_pos(); |
| 569 if (pos == nullptr) return false; |
| 570 UsePosition* other_pos = other->first_pos(); |
| 571 if (other_pos == nullptr) return true; |
| 572 return pos->pos() < other_pos->pos(); |
| 573 } |
| 574 return start < other_start; |
| 575 } |
| 576 |
| 577 |
| 578 void LiveRange::SetUseHints(int register_index) { |
| 579 for (auto pos = first_pos(); pos != nullptr; pos = pos->next()) { |
| 580 if (!pos->HasOperand()) continue; |
| 581 switch (pos->type()) { |
| 582 case UsePositionType::kRequiresSlot: |
| 583 break; |
| 584 case UsePositionType::kRequiresRegister: |
| 585 case UsePositionType::kAny: |
| 586 pos->set_assigned_register(register_index); |
| 587 break; |
| 588 } |
| 589 } |
| 590 } |
| 591 |
| 592 |
| 593 bool LiveRange::CanCover(LifetimePosition position) const { |
| 594 if (IsEmpty()) return false; |
| 595 return Start() <= position && position < End(); |
| 596 } |
| 597 |
| 598 |
| 599 bool LiveRange::Covers(LifetimePosition position) const { |
| 600 if (!CanCover(position)) return false; |
| 601 auto start_search = FirstSearchIntervalForPosition(position); |
| 602 for (auto interval = start_search; interval != nullptr; |
| 603 interval = interval->next()) { |
| 604 DCHECK(interval->next() == nullptr || |
| 605 interval->next()->start() >= interval->start()); |
| 606 AdvanceLastProcessedMarker(interval, position); |
| 607 if (interval->Contains(position)) return true; |
| 608 if (interval->start() > position) return false; |
| 609 } |
| 610 return false; |
| 611 } |
| 612 |
| 613 |
| 614 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const { |
| 615 auto b = other->first_interval(); |
| 616 if (b == nullptr) return LifetimePosition::Invalid(); |
| 617 auto advance_last_processed_up_to = b->start(); |
| 618 auto a = FirstSearchIntervalForPosition(b->start()); |
| 619 while (a != nullptr && b != nullptr) { |
| 620 if (a->start() > other->End()) break; |
| 621 if (b->start() > End()) break; |
| 622 auto cur_intersection = a->Intersect(b); |
| 623 if (cur_intersection.IsValid()) { |
| 624 return cur_intersection; |
| 625 } |
| 626 if (a->start() < b->start()) { |
| 627 a = a->next(); |
| 628 if (a == nullptr || a->start() > other->End()) break; |
| 629 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); |
| 630 } else { |
| 631 b = b->next(); |
| 632 } |
| 633 } |
| 634 return LifetimePosition::Invalid(); |
| 635 } |
| 636 |
| 637 |
| 638 unsigned LiveRange::GetSize() { |
| 639 if (size_ == kInvalidSize) { |
| 640 size_ = 0; |
| 641 for (auto interval = first_interval(); interval != nullptr; |
| 642 interval = interval->next()) { |
| 643 size_ += (interval->end().value() - interval->start().value()); |
| 644 } |
| 645 } |
| 646 |
| 647 return static_cast<unsigned>(size_); |
| 648 } |
| 649 |
| 650 |
| 651 struct TopLevelLiveRange::SpillAtDefinitionList : ZoneObject { |
| 652 SpillAtDefinitionList(int gap_index, InstructionOperand* operand, |
| 653 SpillAtDefinitionList* next) |
| 654 : gap_index(gap_index), operand(operand), next(next) {} |
| 655 const int gap_index; |
| 656 InstructionOperand* const operand; |
| 657 SpillAtDefinitionList* const next; |
| 658 }; |
| 659 |
| 660 |
| 661 TopLevelLiveRange::TopLevelLiveRange(int vreg, MachineType machine_type) |
| 662 : LiveRange(0, machine_type, this), |
| 663 vreg_(vreg), |
| 664 last_child_id_(0), |
| 665 splintered_from_(nullptr), |
| 666 spill_operand_(nullptr), |
| 667 spills_at_definition_(nullptr), |
| 668 spilled_in_deferred_blocks_(false), |
| 669 spill_start_index_(kMaxInt) { |
| 670 bits_ |= SpillTypeField::encode(SpillType::kNoSpillType); |
| 671 } |
| 672 |
| 673 |
| 674 void TopLevelLiveRange::SpillAtDefinition(Zone* zone, int gap_index, |
| 675 InstructionOperand* operand) { |
| 676 DCHECK(HasNoSpillType()); |
| 677 spills_at_definition_ = new (zone) |
| 678 SpillAtDefinitionList(gap_index, operand, spills_at_definition_); |
| 679 } |
| 680 |
| 681 |
| 682 bool TopLevelLiveRange::TryCommitSpillInDeferredBlock( |
| 683 InstructionSequence* code, const InstructionOperand& spill_operand) { |
| 684 if (!FLAG_turbo_preprocess_ranges || IsEmpty() || HasNoSpillType() || |
| 685 spill_operand.IsConstant() || spill_operand.IsImmediate()) { |
| 686 return false; |
| 687 } |
| 688 |
| 689 int count = 0; |
| 690 for (const LiveRange* child = this; child != nullptr; child = child->next()) { |
| 691 int first_instr = child->Start().ToInstructionIndex(); |
| 692 |
| 693 // If the range starts at instruction end, the first instruction index is |
| 694 // the next one. |
| 695 if (!child->Start().IsGapPosition() && !child->Start().IsStart()) { |
| 696 ++first_instr; |
| 697 } |
| 698 |
| 699 // We only look at where the range starts. It doesn't matter where it ends: |
| 700 // if it ends past this block, then either there is a phi there already, |
| 701 // or ResolveControlFlow will adapt the last instruction gap of this block |
| 702 // as if there were a phi. In either case, data flow will be correct. |
| 703 const InstructionBlock* block = code->GetInstructionBlock(first_instr); |
| 704 |
| 705 // If we have slot uses in a subrange, bail out, because we need the value |
| 706 // on the stack before that use. |
| 707 bool has_slot_use = child->NextSlotPosition(child->Start()) != nullptr; |
| 708 if (!block->IsDeferred()) { |
| 709 if (child->spilled() || has_slot_use) { |
| 710 TRACE( |
| 711 "Live Range %d must be spilled at definition: found a " |
| 712 "slot-requiring non-deferred child range %d.\n", |
| 713 TopLevel()->vreg(), child->relative_id()); |
| 714 return false; |
| 715 } |
| 716 } else { |
| 717 if (child->spilled() || has_slot_use) ++count; |
| 718 } |
| 719 } |
| 720 if (count == 0) return false; |
| 721 |
| 722 spill_start_index_ = -1; |
| 723 spilled_in_deferred_blocks_ = true; |
| 724 |
| 725 TRACE("Live Range %d will be spilled only in deferred blocks.\n", vreg()); |
| 726 // If we have ranges that aren't spilled but require the operand on the stack, |
| 727 // make sure we insert the spill. |
| 728 for (const LiveRange* child = this; child != nullptr; child = child->next()) { |
| 729 if (!child->spilled() && |
| 730 child->NextSlotPosition(child->Start()) != nullptr) { |
| 731 auto instr = code->InstructionAt(child->Start().ToInstructionIndex()); |
| 732 // Insert spill at the end to let live range connections happen at START. |
| 733 auto move = |
| 734 instr->GetOrCreateParallelMove(Instruction::END, code->zone()); |
| 735 InstructionOperand assigned = child->GetAssignedOperand(); |
| 736 if (TopLevel()->has_slot_use()) { |
| 737 bool found = false; |
| 738 for (auto move_op : *move) { |
| 739 if (move_op->IsEliminated()) continue; |
| 740 if (move_op->source().Equals(assigned) && |
| 741 move_op->destination().Equals(spill_operand)) { |
| 742 found = true; |
| 743 break; |
| 744 } |
| 745 } |
| 746 if (found) continue; |
| 747 } |
| 748 |
| 749 move->AddMove(assigned, spill_operand); |
| 750 } |
| 751 } |
| 752 |
| 753 return true; |
| 754 } |
| 755 |
| 756 |
| 757 void TopLevelLiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence, |
| 758 const InstructionOperand& op, |
| 759 bool might_be_duplicated) { |
| 760 DCHECK_IMPLIES(op.IsConstant(), spills_at_definition_ == nullptr); |
| 761 auto zone = sequence->zone(); |
| 762 |
| 763 for (auto to_spill = spills_at_definition_; to_spill != nullptr; |
| 764 to_spill = to_spill->next) { |
| 765 auto instr = sequence->InstructionAt(to_spill->gap_index); |
| 766 auto move = instr->GetOrCreateParallelMove(Instruction::START, zone); |
| 767 // Skip insertion if it's possible that the move exists already as a |
| 768 // constraint move from a fixed output register to a slot. |
| 769 if (might_be_duplicated) { |
| 770 bool found = false; |
| 771 for (auto move_op : *move) { |
| 772 if (move_op->IsEliminated()) continue; |
| 773 if (move_op->source().Equals(*to_spill->operand) && |
| 774 move_op->destination().Equals(op)) { |
| 775 found = true; |
| 776 break; |
| 777 } |
| 778 } |
| 779 if (found) continue; |
| 780 } |
| 781 move->AddMove(*to_spill->operand, op); |
| 782 } |
| 783 } |
| 784 |
| 785 |
| 786 void TopLevelLiveRange::SetSpillOperand(InstructionOperand* operand) { |
| 787 DCHECK(HasNoSpillType()); |
| 788 DCHECK(!operand->IsUnallocated() && !operand->IsImmediate()); |
| 789 set_spill_type(SpillType::kSpillOperand); |
| 790 spill_operand_ = operand; |
| 791 } |
| 792 |
| 793 |
| 794 void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) { |
| 795 DCHECK(!HasSpillOperand()); |
| 796 DCHECK(spill_range); |
| 797 spill_range_ = spill_range; |
| 798 } |
| 799 |
| 800 |
| 801 AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const { |
| 802 auto spill_range = GetSpillRange(); |
| 803 int index = spill_range->assigned_slot(); |
| 804 switch (kind()) { |
| 805 case GENERAL_REGISTERS: |
| 806 return StackSlotOperand(machine_type(), index); |
| 807 case DOUBLE_REGISTERS: |
| 808 return DoubleStackSlotOperand(machine_type(), index); |
| 809 } |
| 810 UNREACHABLE(); |
| 811 return StackSlotOperand(kMachNone, 0); |
| 812 } |
| 813 |
| 814 |
| 815 void TopLevelLiveRange::Splinter(LifetimePosition start, LifetimePosition end, |
| 816 TopLevelLiveRange* result, Zone* zone) { |
674 DCHECK(start != Start() || end != End()); | 817 DCHECK(start != Start() || end != End()); |
675 DCHECK(start < end); | 818 DCHECK(start < end); |
676 | 819 |
677 result->set_spill_type(spill_type()); | 820 result->set_spill_type(spill_type()); |
678 | 821 |
679 if (start <= Start()) { | 822 if (start <= Start()) { |
680 DCHECK(end < End()); | 823 DCHECK(end < End()); |
681 SplitAt(end, result, zone); | 824 DetachAt(end, result, zone); |
682 next_ = nullptr; | 825 next_ = nullptr; |
683 } else if (end >= End()) { | 826 } else if (end >= End()) { |
684 DCHECK(start > Start()); | 827 DCHECK(start > Start()); |
685 SplitAt(start, result, zone); | 828 DetachAt(start, result, zone); |
686 next_ = nullptr; | 829 next_ = nullptr; |
687 } else { | 830 } else { |
688 DCHECK(start < End() && Start() < end); | 831 DCHECK(start < End() && Start() < end); |
689 | 832 |
690 const int kInvalidId = std::numeric_limits<int>::max(); | 833 const int kInvalidId = std::numeric_limits<int>::max(); |
691 | 834 |
692 SplitAt(start, result, zone); | 835 DetachAt(start, result, zone); |
693 | 836 |
694 LiveRange end_part(kInvalidId, this->machine_type()); | 837 LiveRange end_part(kInvalidId, this->machine_type(), nullptr); |
695 result->SplitAt(end, &end_part, zone); | 838 result->DetachAt(end, &end_part, zone); |
696 | 839 |
697 next_ = end_part.next_; | 840 next_ = end_part.next_; |
698 last_interval_->set_next(end_part.first_interval_); | 841 last_interval_->set_next(end_part.first_interval_); |
699 last_interval_ = end_part.last_interval_; | 842 last_interval_ = end_part.last_interval_; |
700 | 843 |
701 | 844 |
702 if (first_pos_ == nullptr) { | 845 if (first_pos_ == nullptr) { |
703 first_pos_ = end_part.first_pos_; | 846 first_pos_ = end_part.first_pos_; |
704 } else { | 847 } else { |
705 UsePosition* pos = first_pos_; | 848 UsePosition* pos = first_pos_; |
706 for (; pos->next() != nullptr; pos = pos->next()) { | 849 for (; pos->next() != nullptr; pos = pos->next()) { |
707 } | 850 } |
708 pos->set_next(end_part.first_pos_); | 851 pos->set_next(end_part.first_pos_); |
709 } | 852 } |
710 } | 853 } |
711 result->next_ = nullptr; | 854 result->next_ = nullptr; |
712 result->parent_ = nullptr; | 855 result->top_level_ = result; |
713 | 856 |
714 result->SetSplinteredFrom(this); | 857 result->SetSplinteredFrom(this); |
715 } | 858 } |
716 | 859 |
717 | 860 |
718 void LiveRange::SetSplinteredFrom(LiveRange* splinter_parent) { | 861 void TopLevelLiveRange::SetSplinteredFrom(TopLevelLiveRange* splinter_parent) { |
719 // The splinter parent is always the original "Top". | 862 // The splinter parent is always the original "Top". |
720 DCHECK(splinter_parent->Start() < Start()); | 863 DCHECK(splinter_parent->Start() < Start()); |
721 DCHECK(!splinter_parent->IsChild()); | |
722 | 864 |
723 splintered_from_ = splinter_parent; | 865 splintered_from_ = splinter_parent; |
724 if (!HasSpillOperand()) { | 866 if (!HasSpillOperand()) { |
725 SetSpillRange(splinter_parent->spill_range_); | 867 SetSpillRange(splinter_parent->spill_range_); |
726 } | 868 } |
727 } | 869 } |
728 | 870 |
729 | 871 |
730 void LiveRange::AppendChild(LiveRange* other) { | 872 void TopLevelLiveRange::UpdateSpillRangePostMerge(TopLevelLiveRange* merged) { |
731 DCHECK(!other->IsChild()); | |
732 next_ = other; | |
733 | |
734 other->UpdateParentForAllChildren(TopLevel()); | |
735 TopLevel()->UpdateSpillRangePostMerge(other); | |
736 } | |
737 | |
738 | |
739 void LiveRange::UpdateSpillRangePostMerge(LiveRange* merged) { | |
740 DCHECK(!IsChild()); | |
741 DCHECK(merged->TopLevel() == this); | 873 DCHECK(merged->TopLevel() == this); |
742 | 874 |
743 if (HasNoSpillType() && merged->HasSpillRange()) { | 875 if (HasNoSpillType() && merged->HasSpillRange()) { |
744 set_spill_type(merged->spill_type()); | 876 set_spill_type(merged->spill_type()); |
745 DCHECK(GetSpillRange()->live_ranges().size() > 0); | 877 DCHECK(GetSpillRange()->live_ranges().size() > 0); |
746 merged->spill_range_ = nullptr; | 878 merged->spill_range_ = nullptr; |
747 merged->bits_ = SpillTypeField::update(merged->bits_, | 879 merged->bits_ = |
748 LiveRange::SpillType::kNoSpillType); | 880 SpillTypeField::update(merged->bits_, SpillType::kNoSpillType); |
749 } | 881 } |
750 } | 882 } |
751 | 883 |
752 | 884 |
753 void LiveRange::UpdateParentForAllChildren(LiveRange* new_parent) { | 885 LiveRange* TopLevelLiveRange::GetLastChild() { |
754 LiveRange* child = this; | |
755 for (; child != nullptr; child = child->next()) { | |
756 child->parent_ = new_parent; | |
757 } | |
758 } | |
759 | |
760 | |
761 LiveRange* LiveRange::GetLastChild() { | |
762 LiveRange* ret = this; | 886 LiveRange* ret = this; |
763 for (; ret->next() != nullptr; ret = ret->next()) { | 887 for (; ret->next() != nullptr; ret = ret->next()) { |
764 } | 888 } |
765 return ret; | 889 return ret; |
766 } | 890 } |
767 | 891 |
768 | 892 |
769 void LiveRange::Merge(LiveRange* other, RegisterAllocationData* data) { | 893 void TopLevelLiveRange::Merge(TopLevelLiveRange* other, |
770 DCHECK(!IsChild()); | 894 RegisterAllocationData* data) { |
771 DCHECK(!other->IsChild()); | |
772 DCHECK(Start() < other->Start()); | 895 DCHECK(Start() < other->Start()); |
773 | 896 |
| 897 data->live_ranges()[other->vreg()] = nullptr; |
| 898 |
774 LiveRange* last_other = other->GetLastChild(); | 899 LiveRange* last_other = other->GetLastChild(); |
775 LiveRange* last_me = GetLastChild(); | 900 LiveRange* last_me = GetLastChild(); |
776 | 901 |
777 // Simple case: we just append at the end. | 902 // Simple case: we just append at the end. |
778 if (last_me->End() <= other->Start()) return last_me->AppendChild(other); | 903 if (last_me->End() <= other->Start()) return last_me->AppendAsChild(other); |
779 | 904 |
780 DCHECK(last_me->End() > last_other->End()); | 905 DCHECK(last_me->End() > last_other->End()); |
781 | 906 |
782 // In the more general case, we need to find the ranges between which to | 907 // In the more general case, we need to find the ranges between which to |
783 // insert. | 908 // insert. |
784 LiveRange* insertion_point = this; | 909 LiveRange* insertion_point = this; |
785 for (; insertion_point->next() != nullptr && | 910 for (; insertion_point->next() != nullptr && |
786 insertion_point->next()->Start() <= other->Start(); | 911 insertion_point->next()->Start() <= other->Start(); |
787 insertion_point = insertion_point->next()) { | 912 insertion_point = insertion_point->next()) { |
788 } | 913 } |
789 | 914 |
790 // When we splintered the original range, we reconstituted the original range | 915 // When we splintered the original range, we reconstituted the original range |
791 // into one range without children, but with discontinuities. To merge the | 916 // into one range without children, but with discontinuities. To merge the |
792 // splinter back in, we need to split the range - or a child obtained after | 917 // splinter back in, we need to split the range - or a child obtained after |
793 // register allocation splitting. | 918 // register allocation splitting. |
794 LiveRange* after = insertion_point->next(); | 919 LiveRange* after = insertion_point->next(); |
795 if (insertion_point->End() > other->Start()) { | 920 if (insertion_point->End() > other->Start()) { |
796 LiveRange* new_after = data->NewChildRangeFor(insertion_point); | 921 LiveRange* new_after = |
797 insertion_point->SplitAt(other->Start(), new_after, | 922 insertion_point->SplitAt(other->Start(), data->allocation_zone()); |
798 data->allocation_zone()); | |
799 new_after->set_spilled(insertion_point->spilled()); | 923 new_after->set_spilled(insertion_point->spilled()); |
800 if (!new_after->spilled()) | 924 if (!new_after->spilled()) |
801 new_after->set_assigned_register(insertion_point->assigned_register()); | 925 new_after->set_assigned_register(insertion_point->assigned_register()); |
802 after = new_after; | 926 after = new_after; |
803 } | 927 } |
804 | 928 |
805 last_other->next_ = after; | 929 last_other->next_ = after; |
806 insertion_point->next_ = other; | 930 insertion_point->next_ = other; |
807 other->UpdateParentForAllChildren(TopLevel()); | 931 other->UpdateParentForAllChildren(TopLevel()); |
808 TopLevel()->UpdateSpillRangePostMerge(other); | 932 TopLevel()->UpdateSpillRangePostMerge(other); |
809 } | 933 } |
810 | 934 |
811 | 935 |
812 // This implements an ordering on live ranges so that they are ordered by their | 936 void TopLevelLiveRange::ShortenTo(LifetimePosition start) { |
813 // start positions. This is needed for the correctness of the register | 937 TRACE("Shorten live range %d to [%d\n", vreg(), start.value()); |
814 // allocation algorithm. If two live ranges start at the same offset then there | |
815 // is a tie breaker based on where the value is first used. This part of the | |
816 // ordering is merely a heuristic. | |
817 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const { | |
818 LifetimePosition start = Start(); | |
819 LifetimePosition other_start = other->Start(); | |
820 if (start == other_start) { | |
821 UsePosition* pos = first_pos(); | |
822 if (pos == nullptr) return false; | |
823 UsePosition* other_pos = other->first_pos(); | |
824 if (other_pos == nullptr) return true; | |
825 return pos->pos() < other_pos->pos(); | |
826 } | |
827 return start < other_start; | |
828 } | |
829 | |
830 | |
831 void LiveRange::ShortenTo(LifetimePosition start) { | |
832 TRACE("Shorten live range %d to [%d\n", id_, start.value()); | |
833 DCHECK(first_interval_ != nullptr); | 938 DCHECK(first_interval_ != nullptr); |
834 DCHECK(first_interval_->start() <= start); | 939 DCHECK(first_interval_->start() <= start); |
835 DCHECK(start < first_interval_->end()); | 940 DCHECK(start < first_interval_->end()); |
836 first_interval_->set_start(start); | 941 first_interval_->set_start(start); |
837 } | 942 } |
838 | 943 |
839 | 944 |
840 void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end, | 945 void TopLevelLiveRange::EnsureInterval(LifetimePosition start, |
841 Zone* zone) { | 946 LifetimePosition end, Zone* zone) { |
842 TRACE("Ensure live range %d in interval [%d %d[\n", id_, start.value(), | 947 TRACE("Ensure live range %d in interval [%d %d[\n", vreg(), start.value(), |
843 end.value()); | 948 end.value()); |
844 auto new_end = end; | 949 auto new_end = end; |
845 while (first_interval_ != nullptr && first_interval_->start() <= end) { | 950 while (first_interval_ != nullptr && first_interval_->start() <= end) { |
846 if (first_interval_->end() > end) { | 951 if (first_interval_->end() > end) { |
847 new_end = first_interval_->end(); | 952 new_end = first_interval_->end(); |
848 } | 953 } |
849 first_interval_ = first_interval_->next(); | 954 first_interval_ = first_interval_->next(); |
850 } | 955 } |
851 | 956 |
852 auto new_interval = new (zone) UseInterval(start, new_end); | 957 auto new_interval = new (zone) UseInterval(start, new_end); |
853 new_interval->set_next(first_interval_); | 958 new_interval->set_next(first_interval_); |
854 first_interval_ = new_interval; | 959 first_interval_ = new_interval; |
855 if (new_interval->next() == nullptr) { | 960 if (new_interval->next() == nullptr) { |
856 last_interval_ = new_interval; | 961 last_interval_ = new_interval; |
857 } | 962 } |
858 } | 963 } |
859 | 964 |
860 | 965 |
861 void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end, | 966 void TopLevelLiveRange::AddUseInterval(LifetimePosition start, |
862 Zone* zone) { | 967 LifetimePosition end, Zone* zone) { |
863 TRACE("Add to live range %d interval [%d %d[\n", id_, start.value(), | 968 TRACE("Add to live range %d interval [%d %d[\n", vreg(), start.value(), |
864 end.value()); | 969 end.value()); |
865 if (first_interval_ == nullptr) { | 970 if (first_interval_ == nullptr) { |
866 auto interval = new (zone) UseInterval(start, end); | 971 auto interval = new (zone) UseInterval(start, end); |
867 first_interval_ = interval; | 972 first_interval_ = interval; |
868 last_interval_ = interval; | 973 last_interval_ = interval; |
869 } else { | 974 } else { |
870 if (end == first_interval_->start()) { | 975 if (end == first_interval_->start()) { |
871 first_interval_->set_start(start); | 976 first_interval_->set_start(start); |
872 } else if (end < first_interval_->start()) { | 977 } else if (end < first_interval_->start()) { |
873 auto interval = new (zone) UseInterval(start, end); | 978 auto interval = new (zone) UseInterval(start, end); |
874 interval->set_next(first_interval_); | 979 interval->set_next(first_interval_); |
875 first_interval_ = interval; | 980 first_interval_ = interval; |
876 } else { | 981 } else { |
877 // Order of instruction's processing (see ProcessInstructions) guarantees | 982 // Order of instruction's processing (see ProcessInstructions) guarantees |
878 // that each new use interval either precedes or intersects with | 983 // that each new use interval either precedes or intersects with |
879 // last added interval. | 984 // last added interval. |
880 DCHECK(start < first_interval_->end()); | 985 DCHECK(start < first_interval_->end()); |
881 first_interval_->set_start(Min(start, first_interval_->start())); | 986 first_interval_->set_start(Min(start, first_interval_->start())); |
882 first_interval_->set_end(Max(end, first_interval_->end())); | 987 first_interval_->set_end(Max(end, first_interval_->end())); |
883 } | 988 } |
884 } | 989 } |
885 } | 990 } |
886 | 991 |
887 | 992 |
888 void LiveRange::AddUsePosition(UsePosition* use_pos) { | 993 void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) { |
889 auto pos = use_pos->pos(); | 994 auto pos = use_pos->pos(); |
890 TRACE("Add to live range %d use position %d\n", id_, pos.value()); | 995 TRACE("Add to live range %d use position %d\n", vreg(), pos.value()); |
891 UsePosition* prev_hint = nullptr; | 996 UsePosition* prev_hint = nullptr; |
892 UsePosition* prev = nullptr; | 997 UsePosition* prev = nullptr; |
893 auto current = first_pos_; | 998 auto current = first_pos_; |
894 while (current != nullptr && current->pos() < pos) { | 999 while (current != nullptr && current->pos() < pos) { |
895 prev_hint = current->HasHint() ? current : prev_hint; | 1000 prev_hint = current->HasHint() ? current : prev_hint; |
896 prev = current; | 1001 prev = current; |
897 current = current->next(); | 1002 current = current->next(); |
898 } | 1003 } |
899 | 1004 |
900 if (prev == nullptr) { | 1005 if (prev == nullptr) { |
901 use_pos->set_next(first_pos_); | 1006 use_pos->set_next(first_pos_); |
902 first_pos_ = use_pos; | 1007 first_pos_ = use_pos; |
903 } else { | 1008 } else { |
904 use_pos->set_next(prev->next()); | 1009 use_pos->set_next(prev->next()); |
905 prev->set_next(use_pos); | 1010 prev->set_next(use_pos); |
906 } | 1011 } |
907 | 1012 |
908 if (prev_hint == nullptr && use_pos->HasHint()) { | 1013 if (prev_hint == nullptr && use_pos->HasHint()) { |
909 current_hint_position_ = use_pos; | 1014 current_hint_position_ = use_pos; |
910 } | 1015 } |
911 } | 1016 } |
912 | 1017 |
913 | 1018 |
914 void LiveRange::ConvertUsesToOperand(const InstructionOperand& op, | |
915 const InstructionOperand& spill_op) { | |
916 for (auto pos = first_pos(); pos != nullptr; pos = pos->next()) { | |
917 DCHECK(Start() <= pos->pos() && pos->pos() <= End()); | |
918 if (!pos->HasOperand()) continue; | |
919 switch (pos->type()) { | |
920 case UsePositionType::kRequiresSlot: | |
921 DCHECK(spill_op.IsStackSlot() || spill_op.IsDoubleStackSlot()); | |
922 InstructionOperand::ReplaceWith(pos->operand(), &spill_op); | |
923 break; | |
924 case UsePositionType::kRequiresRegister: | |
925 DCHECK(op.IsRegister() || op.IsDoubleRegister()); | |
926 // Fall through. | |
927 case UsePositionType::kAny: | |
928 InstructionOperand::ReplaceWith(pos->operand(), &op); | |
929 break; | |
930 } | |
931 } | |
932 } | |
933 | |
934 | |
935 void LiveRange::SetUseHints(int register_index) { | |
936 for (auto pos = first_pos(); pos != nullptr; pos = pos->next()) { | |
937 if (!pos->HasOperand()) continue; | |
938 switch (pos->type()) { | |
939 case UsePositionType::kRequiresSlot: | |
940 break; | |
941 case UsePositionType::kRequiresRegister: | |
942 case UsePositionType::kAny: | |
943 pos->set_assigned_register(register_index); | |
944 break; | |
945 } | |
946 } | |
947 } | |
948 | |
949 | |
950 bool LiveRange::CanCover(LifetimePosition position) const { | |
951 if (IsEmpty()) return false; | |
952 return Start() <= position && position < End(); | |
953 } | |
954 | |
955 | |
956 bool LiveRange::Covers(LifetimePosition position) const { | |
957 if (!CanCover(position)) return false; | |
958 auto start_search = FirstSearchIntervalForPosition(position); | |
959 for (auto interval = start_search; interval != nullptr; | |
960 interval = interval->next()) { | |
961 DCHECK(interval->next() == nullptr || | |
962 interval->next()->start() >= interval->start()); | |
963 AdvanceLastProcessedMarker(interval, position); | |
964 if (interval->Contains(position)) return true; | |
965 if (interval->start() > position) return false; | |
966 } | |
967 return false; | |
968 } | |
969 | |
970 | |
971 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const { | |
972 auto b = other->first_interval(); | |
973 if (b == nullptr) return LifetimePosition::Invalid(); | |
974 auto advance_last_processed_up_to = b->start(); | |
975 auto a = FirstSearchIntervalForPosition(b->start()); | |
976 while (a != nullptr && b != nullptr) { | |
977 if (a->start() > other->End()) break; | |
978 if (b->start() > End()) break; | |
979 auto cur_intersection = a->Intersect(b); | |
980 if (cur_intersection.IsValid()) { | |
981 return cur_intersection; | |
982 } | |
983 if (a->start() < b->start()) { | |
984 a = a->next(); | |
985 if (a == nullptr || a->start() > other->End()) break; | |
986 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); | |
987 } else { | |
988 b = b->next(); | |
989 } | |
990 } | |
991 return LifetimePosition::Invalid(); | |
992 } | |
993 | |
994 | |
995 unsigned LiveRange::GetSize() { | |
996 if (size_ == kInvalidSize) { | |
997 size_ = 0; | |
998 for (auto interval = first_interval(); interval != nullptr; | |
999 interval = interval->next()) { | |
1000 size_ += (interval->end().value() - interval->start().value()); | |
1001 } | |
1002 } | |
1003 | |
1004 return static_cast<unsigned>(size_); | |
1005 } | |
1006 | |
1007 | |
1008 static bool AreUseIntervalsIntersecting(UseInterval* interval1, | 1019 static bool AreUseIntervalsIntersecting(UseInterval* interval1, |
1009 UseInterval* interval2) { | 1020 UseInterval* interval2) { |
1010 while (interval1 != nullptr && interval2 != nullptr) { | 1021 while (interval1 != nullptr && interval2 != nullptr) { |
1011 if (interval1->start() < interval2->start()) { | 1022 if (interval1->start() < interval2->start()) { |
1012 if (interval1->end() > interval2->start()) { | 1023 if (interval1->end() > interval2->start()) { |
1013 return true; | 1024 return true; |
1014 } | 1025 } |
1015 interval1 = interval1->next(); | 1026 interval1 = interval1->next(); |
1016 } else { | 1027 } else { |
1017 if (interval2->end() > interval1->start()) { | 1028 if (interval2->end() > interval1->start()) { |
1018 return true; | 1029 return true; |
1019 } | 1030 } |
1020 interval2 = interval2->next(); | 1031 interval2 = interval2->next(); |
1021 } | 1032 } |
1022 } | 1033 } |
1023 return false; | 1034 return false; |
1024 } | 1035 } |
1025 | 1036 |
1026 | 1037 |
1027 std::ostream& operator<<(std::ostream& os, | 1038 std::ostream& operator<<(std::ostream& os, |
1028 const PrintableLiveRange& printable_range) { | 1039 const PrintableLiveRange& printable_range) { |
1029 const LiveRange* range = printable_range.range_; | 1040 const LiveRange* range = printable_range.range_; |
1030 os << "Range: " << range->id() << " "; | 1041 os << "Range: " << range->TopLevel()->vreg() << ":" << range->relative_id() |
1031 if (range->is_phi()) os << "phi "; | 1042 << " "; |
1032 if (range->is_non_loop_phi()) os << "nlphi "; | 1043 if (range->TopLevel()->is_phi()) os << "phi "; |
| 1044 if (range->TopLevel()->is_non_loop_phi()) os << "nlphi "; |
1033 | 1045 |
1034 os << "{" << std::endl; | 1046 os << "{" << std::endl; |
1035 auto interval = range->first_interval(); | 1047 auto interval = range->first_interval(); |
1036 auto use_pos = range->first_pos(); | 1048 auto use_pos = range->first_pos(); |
1037 PrintableInstructionOperand pio; | 1049 PrintableInstructionOperand pio; |
1038 pio.register_configuration_ = printable_range.register_configuration_; | 1050 pio.register_configuration_ = printable_range.register_configuration_; |
1039 while (use_pos != nullptr) { | 1051 while (use_pos != nullptr) { |
1040 pio.op_ = *use_pos->operand(); | 1052 pio.op_ = *use_pos->operand(); |
1041 os << pio << use_pos->pos() << " "; | 1053 os << pio << use_pos->pos() << " "; |
1042 use_pos = use_pos->next(); | 1054 use_pos = use_pos->next(); |
1043 } | 1055 } |
1044 os << std::endl; | 1056 os << std::endl; |
1045 | 1057 |
1046 while (interval != nullptr) { | 1058 while (interval != nullptr) { |
1047 os << '[' << interval->start() << ", " << interval->end() << ')' | 1059 os << '[' << interval->start() << ", " << interval->end() << ')' |
1048 << std::endl; | 1060 << std::endl; |
1049 interval = interval->next(); | 1061 interval = interval->next(); |
1050 } | 1062 } |
1051 os << "}"; | 1063 os << "}"; |
1052 return os; | 1064 return os; |
1053 } | 1065 } |
1054 | 1066 |
1055 | 1067 |
1056 SpillRange::SpillRange(LiveRange* parent, Zone* zone) | 1068 SpillRange::SpillRange(TopLevelLiveRange* parent, Zone* zone) |
1057 : live_ranges_(zone), | 1069 : live_ranges_(zone), |
1058 assigned_slot_(kUnassignedSlot), | 1070 assigned_slot_(kUnassignedSlot), |
1059 byte_width_(GetByteWidth(parent->machine_type())), | 1071 byte_width_(GetByteWidth(parent->machine_type())), |
1060 kind_(parent->kind()) { | 1072 kind_(parent->kind()) { |
1061 // Spill ranges are created for top level, non-splintered ranges. This is so | 1073 // Spill ranges are created for top level, non-splintered ranges. This is so |
1062 // that, when merging decisions are made, we consider the full extent of the | 1074 // that, when merging decisions are made, we consider the full extent of the |
1063 // virtual register, and avoid clobbering it. | 1075 // virtual register, and avoid clobbering it. |
1064 DCHECK(!parent->IsChild()); | |
1065 DCHECK(!parent->IsSplinter()); | 1076 DCHECK(!parent->IsSplinter()); |
1066 UseInterval* result = nullptr; | 1077 UseInterval* result = nullptr; |
1067 UseInterval* node = nullptr; | 1078 UseInterval* node = nullptr; |
1068 // Copy the intervals for all ranges. | 1079 // Copy the intervals for all ranges. |
1069 for (auto range = parent; range != nullptr; range = range->next()) { | 1080 for (LiveRange* range = parent; range != nullptr; range = range->next()) { |
1070 auto src = range->first_interval(); | 1081 auto src = range->first_interval(); |
1071 while (src != nullptr) { | 1082 while (src != nullptr) { |
1072 auto new_node = new (zone) UseInterval(src->start(), src->end()); | 1083 auto new_node = new (zone) UseInterval(src->start(), src->end()); |
1073 if (result == nullptr) { | 1084 if (result == nullptr) { |
1074 result = new_node; | 1085 result = new_node; |
1075 } else { | 1086 } else { |
1076 node->set_next(new_node); | 1087 node->set_next(new_node); |
1077 } | 1088 } |
1078 node = new_node; | 1089 node = new_node; |
1079 src = src->next(); | 1090 src = src->next(); |
(...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1220 return moves->AddMove(from, to); | 1231 return moves->AddMove(from, to); |
1221 } | 1232 } |
1222 | 1233 |
1223 | 1234 |
1224 MachineType RegisterAllocationData::MachineTypeFor(int virtual_register) { | 1235 MachineType RegisterAllocationData::MachineTypeFor(int virtual_register) { |
1225 DCHECK_LT(virtual_register, code()->VirtualRegisterCount()); | 1236 DCHECK_LT(virtual_register, code()->VirtualRegisterCount()); |
1226 return code()->GetRepresentation(virtual_register); | 1237 return code()->GetRepresentation(virtual_register); |
1227 } | 1238 } |
1228 | 1239 |
1229 | 1240 |
1230 LiveRange* RegisterAllocationData::LiveRangeFor(int index) { | 1241 TopLevelLiveRange* RegisterAllocationData::GetOrCreateLiveRangeFor(int index) { |
1231 if (index >= static_cast<int>(live_ranges().size())) { | 1242 if (index >= static_cast<int>(live_ranges().size())) { |
1232 live_ranges().resize(index + 1, nullptr); | 1243 live_ranges().resize(index + 1, nullptr); |
1233 } | 1244 } |
1234 auto result = live_ranges()[index]; | 1245 auto result = live_ranges()[index]; |
1235 if (result == nullptr) { | 1246 if (result == nullptr) { |
1236 result = NewLiveRange(index, MachineTypeFor(index)); | 1247 result = NewLiveRange(index, MachineTypeFor(index)); |
1237 live_ranges()[index] = result; | 1248 live_ranges()[index] = result; |
1238 } | 1249 } |
1239 return result; | 1250 return result; |
1240 } | 1251 } |
1241 | 1252 |
1242 | 1253 |
1243 LiveRange* RegisterAllocationData::NewLiveRange(int index, | 1254 TopLevelLiveRange* RegisterAllocationData::NewLiveRange( |
1244 MachineType machine_type) { | 1255 int index, MachineType machine_type) { |
1245 return new (allocation_zone()) LiveRange(index, machine_type); | 1256 return new (allocation_zone()) TopLevelLiveRange(index, machine_type); |
1246 } | 1257 } |
1247 | 1258 |
1248 | 1259 |
1249 LiveRange* RegisterAllocationData::NextLiveRange(MachineType machine_type) { | 1260 int RegisterAllocationData::GetNextLiveRangeId() { |
1250 int vreg = virtual_register_count_++; | 1261 int vreg = virtual_register_count_++; |
1251 if (vreg >= static_cast<int>(live_ranges().size())) { | 1262 if (vreg >= static_cast<int>(live_ranges().size())) { |
1252 live_ranges().resize(vreg + 1, nullptr); | 1263 live_ranges().resize(vreg + 1, nullptr); |
1253 } | 1264 } |
1254 LiveRange* ret = NewLiveRange(vreg, machine_type); | 1265 return vreg; |
| 1266 } |
| 1267 |
| 1268 |
| 1269 TopLevelLiveRange* RegisterAllocationData::NextLiveRange( |
| 1270 MachineType machine_type) { |
| 1271 int vreg = GetNextLiveRangeId(); |
| 1272 TopLevelLiveRange* ret = NewLiveRange(vreg, machine_type); |
1255 return ret; | 1273 return ret; |
1256 } | 1274 } |
1257 | 1275 |
1258 | 1276 |
1259 LiveRange* RegisterAllocationData::NewChildRangeFor(LiveRange* range) { | |
1260 auto child = NextLiveRange(range->machine_type()); | |
1261 DCHECK_NULL(live_ranges()[child->id()]); | |
1262 live_ranges()[child->id()] = child; | |
1263 return child; | |
1264 } | |
1265 | |
1266 | |
1267 RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap( | 1277 RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap( |
1268 const InstructionBlock* block, PhiInstruction* phi) { | 1278 const InstructionBlock* block, PhiInstruction* phi) { |
1269 auto map_value = new (allocation_zone()) | 1279 auto map_value = new (allocation_zone()) |
1270 RegisterAllocationData::PhiMapValue(phi, block, allocation_zone()); | 1280 RegisterAllocationData::PhiMapValue(phi, block, allocation_zone()); |
1271 auto res = | 1281 auto res = |
1272 phi_map_.insert(std::make_pair(phi->virtual_register(), map_value)); | 1282 phi_map_.insert(std::make_pair(phi->virtual_register(), map_value)); |
1273 DCHECK(res.second); | 1283 DCHECK(res.second); |
1274 USE(res); | 1284 USE(res); |
1275 return map_value; | 1285 return map_value; |
1276 } | 1286 } |
1277 | 1287 |
1278 | 1288 |
1279 RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor( | 1289 RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor( |
1280 int virtual_register) { | 1290 int virtual_register) { |
1281 auto it = phi_map_.find(virtual_register); | 1291 auto it = phi_map_.find(virtual_register); |
1282 DCHECK(it != phi_map_.end()); | 1292 DCHECK(it != phi_map_.end()); |
1283 return it->second; | 1293 return it->second; |
1284 } | 1294 } |
1285 | 1295 |
1286 | 1296 |
| 1297 RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor( |
| 1298 TopLevelLiveRange* top_range) { |
| 1299 return GetPhiMapValueFor(top_range->vreg()); |
| 1300 } |
| 1301 |
| 1302 |
1287 bool RegisterAllocationData::ExistsUseWithoutDefinition() { | 1303 bool RegisterAllocationData::ExistsUseWithoutDefinition() { |
1288 bool found = false; | 1304 bool found = false; |
1289 BitVector::Iterator iterator(live_in_sets()[0]); | 1305 BitVector::Iterator iterator(live_in_sets()[0]); |
1290 while (!iterator.Done()) { | 1306 while (!iterator.Done()) { |
1291 found = true; | 1307 found = true; |
1292 int operand_index = iterator.Current(); | 1308 int operand_index = iterator.Current(); |
1293 PrintF("Register allocator error: live v%d reached first block.\n", | 1309 PrintF("Register allocator error: live v%d reached first block.\n", |
1294 operand_index); | 1310 operand_index); |
1295 LiveRange* range = LiveRangeFor(operand_index); | 1311 LiveRange* range = GetOrCreateLiveRangeFor(operand_index); |
1296 PrintF(" (first use is at %d)\n", range->first_pos()->pos().value()); | 1312 PrintF(" (first use is at %d)\n", range->first_pos()->pos().value()); |
1297 if (debug_name() == nullptr) { | 1313 if (debug_name() == nullptr) { |
1298 PrintF("\n"); | 1314 PrintF("\n"); |
1299 } else { | 1315 } else { |
1300 PrintF(" (function: %s)\n", debug_name()); | 1316 PrintF(" (function: %s)\n", debug_name()); |
1301 } | 1317 } |
1302 iterator.Advance(); | 1318 iterator.Advance(); |
1303 } | 1319 } |
1304 return found; | 1320 return found; |
1305 } | 1321 } |
1306 | 1322 |
1307 | 1323 |
1308 SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange( | 1324 SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange( |
1309 LiveRange* range) { | 1325 TopLevelLiveRange* range) { |
1310 DCHECK(!range->IsChild()); | |
1311 DCHECK(!range->HasSpillOperand()); | 1326 DCHECK(!range->HasSpillOperand()); |
1312 | 1327 |
1313 SpillRange* spill_range = range->GetAllocatedSpillRange(); | 1328 SpillRange* spill_range = range->GetAllocatedSpillRange(); |
1314 if (spill_range == nullptr) { | 1329 if (spill_range == nullptr) { |
1315 DCHECK(!range->IsSplinter()); | 1330 DCHECK(!range->IsSplinter()); |
1316 spill_range = new (allocation_zone()) SpillRange(range, allocation_zone()); | 1331 spill_range = new (allocation_zone()) SpillRange(range, allocation_zone()); |
1317 } | 1332 } |
1318 range->set_spill_type(LiveRange::SpillType::kSpillRange); | 1333 range->set_spill_type(TopLevelLiveRange::SpillType::kSpillRange); |
1319 | 1334 |
1320 spill_ranges().insert(spill_range); | 1335 spill_ranges().insert(spill_range); |
1321 return spill_range; | 1336 return spill_range; |
1322 } | 1337 } |
1323 | 1338 |
1324 | 1339 |
1325 SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange( | 1340 SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange( |
1326 LiveRange* range) { | 1341 TopLevelLiveRange* range) { |
1327 DCHECK(!range->HasSpillOperand()); | 1342 DCHECK(!range->HasSpillOperand()); |
1328 DCHECK(!range->IsChild()); | |
1329 DCHECK(!range->IsSplinter()); | 1343 DCHECK(!range->IsSplinter()); |
1330 auto spill_range = | 1344 auto spill_range = |
1331 new (allocation_zone()) SpillRange(range, allocation_zone()); | 1345 new (allocation_zone()) SpillRange(range, allocation_zone()); |
1332 return spill_range; | 1346 return spill_range; |
1333 } | 1347 } |
1334 | 1348 |
1335 | 1349 |
1336 void RegisterAllocationData::MarkAllocated(RegisterKind kind, int index) { | 1350 void RegisterAllocationData::MarkAllocated(RegisterKind kind, int index) { |
1337 if (kind == DOUBLE_REGISTERS) { | 1351 if (kind == DOUBLE_REGISTERS) { |
1338 assigned_double_registers_->Add(index); | 1352 assigned_double_registers_->Add(index); |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1397 wrapper.op_ = move->destination(); | 1411 wrapper.op_ = move->destination(); |
1398 os << wrapper << " = "; | 1412 os << wrapper << " = "; |
1399 wrapper.op_ = move->source(); | 1413 wrapper.op_ = move->source(); |
1400 os << wrapper << std::endl; | 1414 os << wrapper << std::endl; |
1401 } | 1415 } |
1402 | 1416 |
1403 | 1417 |
1404 void RegisterAllocationData::Print(const SpillRange* spill_range) { | 1418 void RegisterAllocationData::Print(const SpillRange* spill_range) { |
1405 OFStream os(stdout); | 1419 OFStream os(stdout); |
1406 os << "{" << std::endl; | 1420 os << "{" << std::endl; |
1407 for (LiveRange* range : spill_range->live_ranges()) { | 1421 for (TopLevelLiveRange* range : spill_range->live_ranges()) { |
1408 os << range->id() << " "; | 1422 os << range->vreg() << " "; |
1409 } | 1423 } |
1410 os << std::endl; | 1424 os << std::endl; |
1411 | 1425 |
1412 for (UseInterval* interval = spill_range->interval(); interval != nullptr; | 1426 for (UseInterval* interval = spill_range->interval(); interval != nullptr; |
1413 interval = interval->next()) { | 1427 interval = interval->next()) { |
1414 os << '[' << interval->start() << ", " << interval->end() << ')' | 1428 os << '[' << interval->start() << ", " << interval->end() << ')' |
1415 << std::endl; | 1429 << std::endl; |
1416 } | 1430 } |
1417 os << "}" << std::endl; | 1431 os << "}" << std::endl; |
1418 } | 1432 } |
(...skipping 25 matching lines...) Expand all Loading... |
1444 } else if (operand->HasFixedDoubleRegisterPolicy()) { | 1458 } else if (operand->HasFixedDoubleRegisterPolicy()) { |
1445 DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register); | 1459 DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register); |
1446 allocated = AllocatedOperand(AllocatedOperand::DOUBLE_REGISTER, | 1460 allocated = AllocatedOperand(AllocatedOperand::DOUBLE_REGISTER, |
1447 machine_type, operand->fixed_register_index()); | 1461 machine_type, operand->fixed_register_index()); |
1448 } else { | 1462 } else { |
1449 UNREACHABLE(); | 1463 UNREACHABLE(); |
1450 } | 1464 } |
1451 InstructionOperand::ReplaceWith(operand, &allocated); | 1465 InstructionOperand::ReplaceWith(operand, &allocated); |
1452 if (is_tagged) { | 1466 if (is_tagged) { |
1453 TRACE("Fixed reg is tagged at %d\n", pos); | 1467 TRACE("Fixed reg is tagged at %d\n", pos); |
1454 auto instr = InstructionAt(pos); | 1468 auto instr = code()->InstructionAt(pos); |
1455 if (instr->HasReferenceMap()) { | 1469 if (instr->HasReferenceMap()) { |
1456 instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand)); | 1470 instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand)); |
1457 } | 1471 } |
1458 } | 1472 } |
1459 return operand; | 1473 return operand; |
1460 } | 1474 } |
1461 | 1475 |
1462 | 1476 |
1463 void ConstraintBuilder::MeetRegisterConstraints() { | 1477 void ConstraintBuilder::MeetRegisterConstraints() { |
1464 for (auto block : code()->instruction_blocks()) { | 1478 for (auto block : code()->instruction_blocks()) { |
(...skipping 11 matching lines...) Expand all Loading... |
1476 if (i != end) MeetConstraintsAfter(i); | 1490 if (i != end) MeetConstraintsAfter(i); |
1477 } | 1491 } |
1478 // Meet register constraints for the instruction in the end. | 1492 // Meet register constraints for the instruction in the end. |
1479 MeetRegisterConstraintsForLastInstructionInBlock(block); | 1493 MeetRegisterConstraintsForLastInstructionInBlock(block); |
1480 } | 1494 } |
1481 | 1495 |
1482 | 1496 |
1483 void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock( | 1497 void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock( |
1484 const InstructionBlock* block) { | 1498 const InstructionBlock* block) { |
1485 int end = block->last_instruction_index(); | 1499 int end = block->last_instruction_index(); |
1486 auto last_instruction = InstructionAt(end); | 1500 auto last_instruction = code()->InstructionAt(end); |
1487 for (size_t i = 0; i < last_instruction->OutputCount(); i++) { | 1501 for (size_t i = 0; i < last_instruction->OutputCount(); i++) { |
1488 auto output_operand = last_instruction->OutputAt(i); | 1502 auto output_operand = last_instruction->OutputAt(i); |
1489 DCHECK(!output_operand->IsConstant()); | 1503 DCHECK(!output_operand->IsConstant()); |
1490 auto output = UnallocatedOperand::cast(output_operand); | 1504 auto output = UnallocatedOperand::cast(output_operand); |
1491 int output_vreg = output->virtual_register(); | 1505 int output_vreg = output->virtual_register(); |
1492 auto range = LiveRangeFor(output_vreg); | 1506 auto range = data()->GetOrCreateLiveRangeFor(output_vreg); |
1493 bool assigned = false; | 1507 bool assigned = false; |
1494 if (output->HasFixedPolicy()) { | 1508 if (output->HasFixedPolicy()) { |
1495 AllocateFixed(output, -1, false); | 1509 AllocateFixed(output, -1, false); |
1496 // This value is produced on the stack, we never need to spill it. | 1510 // This value is produced on the stack, we never need to spill it. |
1497 if (output->IsStackSlot()) { | 1511 if (output->IsStackSlot()) { |
1498 DCHECK(StackSlotOperand::cast(output)->index() < | 1512 DCHECK(StackSlotOperand::cast(output)->index() < |
1499 data()->frame()->GetSpillSlotCount()); | 1513 data()->frame()->GetSpillSlotCount()); |
1500 range->SetSpillOperand(StackSlotOperand::cast(output)); | 1514 range->SetSpillOperand(StackSlotOperand::cast(output)); |
1501 range->SetSpillStartIndex(end); | 1515 range->SetSpillStartIndex(end); |
1502 assigned = true; | 1516 assigned = true; |
(...skipping 17 matching lines...) Expand all Loading... |
1520 int gap_index = successor->first_instruction_index(); | 1534 int gap_index = successor->first_instruction_index(); |
1521 range->SpillAtDefinition(allocation_zone(), gap_index, output); | 1535 range->SpillAtDefinition(allocation_zone(), gap_index, output); |
1522 range->SetSpillStartIndex(gap_index); | 1536 range->SetSpillStartIndex(gap_index); |
1523 } | 1537 } |
1524 } | 1538 } |
1525 } | 1539 } |
1526 } | 1540 } |
1527 | 1541 |
1528 | 1542 |
1529 void ConstraintBuilder::MeetConstraintsAfter(int instr_index) { | 1543 void ConstraintBuilder::MeetConstraintsAfter(int instr_index) { |
1530 auto first = InstructionAt(instr_index); | 1544 auto first = code()->InstructionAt(instr_index); |
1531 // Handle fixed temporaries. | 1545 // Handle fixed temporaries. |
1532 for (size_t i = 0; i < first->TempCount(); i++) { | 1546 for (size_t i = 0; i < first->TempCount(); i++) { |
1533 auto temp = UnallocatedOperand::cast(first->TempAt(i)); | 1547 auto temp = UnallocatedOperand::cast(first->TempAt(i)); |
1534 if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false); | 1548 if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false); |
1535 } | 1549 } |
1536 // Handle constant/fixed output operands. | 1550 // Handle constant/fixed output operands. |
1537 for (size_t i = 0; i < first->OutputCount(); i++) { | 1551 for (size_t i = 0; i < first->OutputCount(); i++) { |
1538 InstructionOperand* output = first->OutputAt(i); | 1552 InstructionOperand* output = first->OutputAt(i); |
1539 if (output->IsConstant()) { | 1553 if (output->IsConstant()) { |
1540 int output_vreg = ConstantOperand::cast(output)->virtual_register(); | 1554 int output_vreg = ConstantOperand::cast(output)->virtual_register(); |
1541 auto range = LiveRangeFor(output_vreg); | 1555 auto range = data()->GetOrCreateLiveRangeFor(output_vreg); |
1542 range->SetSpillStartIndex(instr_index + 1); | 1556 range->SetSpillStartIndex(instr_index + 1); |
1543 range->SetSpillOperand(output); | 1557 range->SetSpillOperand(output); |
1544 continue; | 1558 continue; |
1545 } | 1559 } |
1546 auto first_output = UnallocatedOperand::cast(output); | 1560 auto first_output = UnallocatedOperand::cast(output); |
1547 auto range = LiveRangeFor(first_output->virtual_register()); | 1561 auto range = |
| 1562 data()->GetOrCreateLiveRangeFor(first_output->virtual_register()); |
1548 bool assigned = false; | 1563 bool assigned = false; |
1549 if (first_output->HasFixedPolicy()) { | 1564 if (first_output->HasFixedPolicy()) { |
1550 int output_vreg = first_output->virtual_register(); | 1565 int output_vreg = first_output->virtual_register(); |
1551 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg); | 1566 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg); |
1552 bool is_tagged = IsReference(output_vreg); | 1567 bool is_tagged = code()->IsReference(output_vreg); |
1553 AllocateFixed(first_output, instr_index, is_tagged); | 1568 AllocateFixed(first_output, instr_index, is_tagged); |
1554 | 1569 |
1555 // This value is produced on the stack, we never need to spill it. | 1570 // This value is produced on the stack, we never need to spill it. |
1556 if (first_output->IsStackSlot()) { | 1571 if (first_output->IsStackSlot()) { |
1557 DCHECK(StackSlotOperand::cast(first_output)->index() < | 1572 DCHECK(StackSlotOperand::cast(first_output)->index() < |
1558 data()->frame()->GetTotalFrameSlotCount()); | 1573 data()->frame()->GetTotalFrameSlotCount()); |
1559 range->SetSpillOperand(StackSlotOperand::cast(first_output)); | 1574 range->SetSpillOperand(StackSlotOperand::cast(first_output)); |
1560 range->SetSpillStartIndex(instr_index + 1); | 1575 range->SetSpillStartIndex(instr_index + 1); |
1561 assigned = true; | 1576 assigned = true; |
1562 } | 1577 } |
1563 data()->AddGapMove(instr_index + 1, Instruction::START, *first_output, | 1578 data()->AddGapMove(instr_index + 1, Instruction::START, *first_output, |
1564 output_copy); | 1579 output_copy); |
1565 } | 1580 } |
1566 // Make sure we add a gap move for spilling (if we have not done | 1581 // Make sure we add a gap move for spilling (if we have not done |
1567 // so already). | 1582 // so already). |
1568 if (!assigned) { | 1583 if (!assigned) { |
1569 range->SpillAtDefinition(allocation_zone(), instr_index + 1, | 1584 range->SpillAtDefinition(allocation_zone(), instr_index + 1, |
1570 first_output); | 1585 first_output); |
1571 range->SetSpillStartIndex(instr_index + 1); | 1586 range->SetSpillStartIndex(instr_index + 1); |
1572 } | 1587 } |
1573 } | 1588 } |
1574 } | 1589 } |
1575 | 1590 |
1576 | 1591 |
1577 void ConstraintBuilder::MeetConstraintsBefore(int instr_index) { | 1592 void ConstraintBuilder::MeetConstraintsBefore(int instr_index) { |
1578 auto second = InstructionAt(instr_index); | 1593 auto second = code()->InstructionAt(instr_index); |
1579 // Handle fixed input operands of second instruction. | 1594 // Handle fixed input operands of second instruction. |
1580 for (size_t i = 0; i < second->InputCount(); i++) { | 1595 for (size_t i = 0; i < second->InputCount(); i++) { |
1581 auto input = second->InputAt(i); | 1596 auto input = second->InputAt(i); |
1582 if (input->IsImmediate()) continue; // Ignore immediates. | 1597 if (input->IsImmediate()) continue; // Ignore immediates. |
1583 auto cur_input = UnallocatedOperand::cast(input); | 1598 auto cur_input = UnallocatedOperand::cast(input); |
1584 if (cur_input->HasFixedPolicy()) { | 1599 if (cur_input->HasFixedPolicy()) { |
1585 int input_vreg = cur_input->virtual_register(); | 1600 int input_vreg = cur_input->virtual_register(); |
1586 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg); | 1601 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg); |
1587 bool is_tagged = IsReference(input_vreg); | 1602 bool is_tagged = code()->IsReference(input_vreg); |
1588 AllocateFixed(cur_input, instr_index, is_tagged); | 1603 AllocateFixed(cur_input, instr_index, is_tagged); |
1589 data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input); | 1604 data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input); |
1590 } | 1605 } |
1591 } | 1606 } |
1592 // Handle "output same as input" for second instruction. | 1607 // Handle "output same as input" for second instruction. |
1593 for (size_t i = 0; i < second->OutputCount(); i++) { | 1608 for (size_t i = 0; i < second->OutputCount(); i++) { |
1594 auto output = second->OutputAt(i); | 1609 auto output = second->OutputAt(i); |
1595 if (!output->IsUnallocated()) continue; | 1610 if (!output->IsUnallocated()) continue; |
1596 auto second_output = UnallocatedOperand::cast(output); | 1611 auto second_output = UnallocatedOperand::cast(output); |
1597 if (!second_output->HasSameAsInputPolicy()) continue; | 1612 if (!second_output->HasSameAsInputPolicy()) continue; |
1598 DCHECK(i == 0); // Only valid for first output. | 1613 DCHECK(i == 0); // Only valid for first output. |
1599 UnallocatedOperand* cur_input = | 1614 UnallocatedOperand* cur_input = |
1600 UnallocatedOperand::cast(second->InputAt(0)); | 1615 UnallocatedOperand::cast(second->InputAt(0)); |
1601 int output_vreg = second_output->virtual_register(); | 1616 int output_vreg = second_output->virtual_register(); |
1602 int input_vreg = cur_input->virtual_register(); | 1617 int input_vreg = cur_input->virtual_register(); |
1603 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg); | 1618 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg); |
1604 cur_input->set_virtual_register(second_output->virtual_register()); | 1619 cur_input->set_virtual_register(second_output->virtual_register()); |
1605 auto gap_move = data()->AddGapMove(instr_index, Instruction::END, | 1620 auto gap_move = data()->AddGapMove(instr_index, Instruction::END, |
1606 input_copy, *cur_input); | 1621 input_copy, *cur_input); |
1607 if (IsReference(input_vreg) && !IsReference(output_vreg)) { | 1622 if (code()->IsReference(input_vreg) && !code()->IsReference(output_vreg)) { |
1608 if (second->HasReferenceMap()) { | 1623 if (second->HasReferenceMap()) { |
1609 RegisterAllocationData::DelayedReference delayed_reference = { | 1624 RegisterAllocationData::DelayedReference delayed_reference = { |
1610 second->reference_map(), &gap_move->source()}; | 1625 second->reference_map(), &gap_move->source()}; |
1611 data()->delayed_references().push_back(delayed_reference); | 1626 data()->delayed_references().push_back(delayed_reference); |
1612 } | 1627 } |
1613 } else if (!IsReference(input_vreg) && IsReference(output_vreg)) { | 1628 } else if (!code()->IsReference(input_vreg) && |
| 1629 code()->IsReference(output_vreg)) { |
1614 // The input is assumed to immediately have a tagged representation, | 1630 // The input is assumed to immediately have a tagged representation, |
1615 // before the pointer map can be used. I.e. the pointer map at the | 1631 // before the pointer map can be used. I.e. the pointer map at the |
1616 // instruction will include the output operand (whose value at the | 1632 // instruction will include the output operand (whose value at the |
1617 // beginning of the instruction is equal to the input operand). If | 1633 // beginning of the instruction is equal to the input operand). If |
1618 // this is not desired, then the pointer map at this instruction needs | 1634 // this is not desired, then the pointer map at this instruction needs |
1619 // to be adjusted manually. | 1635 // to be adjusted manually. |
1620 } | 1636 } |
1621 } | 1637 } |
1622 } | 1638 } |
1623 | 1639 |
(...skipping 12 matching lines...) Expand all Loading... |
1636 auto map_value = data()->InitializePhiMap(block, phi); | 1652 auto map_value = data()->InitializePhiMap(block, phi); |
1637 auto& output = phi->output(); | 1653 auto& output = phi->output(); |
1638 // Map the destination operands, so the commitment phase can find them. | 1654 // Map the destination operands, so the commitment phase can find them. |
1639 for (size_t i = 0; i < phi->operands().size(); ++i) { | 1655 for (size_t i = 0; i < phi->operands().size(); ++i) { |
1640 InstructionBlock* cur_block = | 1656 InstructionBlock* cur_block = |
1641 code()->InstructionBlockAt(block->predecessors()[i]); | 1657 code()->InstructionBlockAt(block->predecessors()[i]); |
1642 UnallocatedOperand input(UnallocatedOperand::ANY, phi->operands()[i]); | 1658 UnallocatedOperand input(UnallocatedOperand::ANY, phi->operands()[i]); |
1643 auto move = data()->AddGapMove(cur_block->last_instruction_index(), | 1659 auto move = data()->AddGapMove(cur_block->last_instruction_index(), |
1644 Instruction::END, input, output); | 1660 Instruction::END, input, output); |
1645 map_value->AddOperand(&move->destination()); | 1661 map_value->AddOperand(&move->destination()); |
1646 DCHECK(!InstructionAt(cur_block->last_instruction_index()) | 1662 DCHECK(!code() |
| 1663 ->InstructionAt(cur_block->last_instruction_index()) |
1647 ->HasReferenceMap()); | 1664 ->HasReferenceMap()); |
1648 } | 1665 } |
1649 auto live_range = LiveRangeFor(phi_vreg); | 1666 auto live_range = data()->GetOrCreateLiveRangeFor(phi_vreg); |
1650 int gap_index = block->first_instruction_index(); | 1667 int gap_index = block->first_instruction_index(); |
1651 live_range->SpillAtDefinition(allocation_zone(), gap_index, &output); | 1668 live_range->SpillAtDefinition(allocation_zone(), gap_index, &output); |
1652 live_range->SetSpillStartIndex(gap_index); | 1669 live_range->SetSpillStartIndex(gap_index); |
1653 // We use the phi-ness of some nodes in some later heuristics. | 1670 // We use the phi-ness of some nodes in some later heuristics. |
1654 live_range->set_is_phi(true); | 1671 live_range->set_is_phi(true); |
1655 live_range->set_is_non_loop_phi(!block->IsLoopHeader()); | 1672 live_range->set_is_non_loop_phi(!block->IsLoopHeader()); |
1656 } | 1673 } |
1657 } | 1674 } |
1658 | 1675 |
1659 | 1676 |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1695 BitVector* live_out) { | 1712 BitVector* live_out) { |
1696 // Add an interval that includes the entire block to the live range for | 1713 // Add an interval that includes the entire block to the live range for |
1697 // each live_out value. | 1714 // each live_out value. |
1698 auto start = LifetimePosition::GapFromInstructionIndex( | 1715 auto start = LifetimePosition::GapFromInstructionIndex( |
1699 block->first_instruction_index()); | 1716 block->first_instruction_index()); |
1700 auto end = LifetimePosition::InstructionFromInstructionIndex( | 1717 auto end = LifetimePosition::InstructionFromInstructionIndex( |
1701 block->last_instruction_index()).NextStart(); | 1718 block->last_instruction_index()).NextStart(); |
1702 BitVector::Iterator iterator(live_out); | 1719 BitVector::Iterator iterator(live_out); |
1703 while (!iterator.Done()) { | 1720 while (!iterator.Done()) { |
1704 int operand_index = iterator.Current(); | 1721 int operand_index = iterator.Current(); |
1705 auto range = LiveRangeFor(operand_index); | 1722 auto range = data()->GetOrCreateLiveRangeFor(operand_index); |
1706 range->AddUseInterval(start, end, allocation_zone()); | 1723 range->AddUseInterval(start, end, allocation_zone()); |
1707 iterator.Advance(); | 1724 iterator.Advance(); |
1708 } | 1725 } |
1709 } | 1726 } |
1710 | 1727 |
1711 | 1728 |
1712 int LiveRangeBuilder::FixedDoubleLiveRangeID(int index) { | 1729 int LiveRangeBuilder::FixedDoubleLiveRangeID(int index) { |
1713 return -index - 1 - config()->num_general_registers(); | 1730 return -index - 1 - config()->num_general_registers(); |
1714 } | 1731 } |
1715 | 1732 |
1716 | 1733 |
1717 LiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) { | 1734 TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) { |
1718 DCHECK(index < config()->num_general_registers()); | 1735 DCHECK(index < config()->num_general_registers()); |
1719 auto result = data()->fixed_live_ranges()[index]; | 1736 auto result = data()->fixed_live_ranges()[index]; |
1720 if (result == nullptr) { | 1737 if (result == nullptr) { |
1721 result = data()->NewLiveRange(FixedLiveRangeID(index), | 1738 result = data()->NewLiveRange(FixedLiveRangeID(index), |
1722 InstructionSequence::DefaultRepresentation()); | 1739 InstructionSequence::DefaultRepresentation()); |
1723 DCHECK(result->IsFixed()); | 1740 DCHECK(result->IsFixed()); |
1724 result->set_assigned_register(index); | 1741 result->set_assigned_register(index); |
1725 data()->MarkAllocated(GENERAL_REGISTERS, index); | 1742 data()->MarkAllocated(GENERAL_REGISTERS, index); |
1726 data()->fixed_live_ranges()[index] = result; | 1743 data()->fixed_live_ranges()[index] = result; |
1727 } | 1744 } |
1728 return result; | 1745 return result; |
1729 } | 1746 } |
1730 | 1747 |
1731 | 1748 |
1732 LiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) { | 1749 TopLevelLiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) { |
1733 DCHECK(index < config()->num_aliased_double_registers()); | 1750 DCHECK(index < config()->num_aliased_double_registers()); |
1734 auto result = data()->fixed_double_live_ranges()[index]; | 1751 auto result = data()->fixed_double_live_ranges()[index]; |
1735 if (result == nullptr) { | 1752 if (result == nullptr) { |
1736 result = data()->NewLiveRange(FixedDoubleLiveRangeID(index), kRepFloat64); | 1753 result = data()->NewLiveRange(FixedDoubleLiveRangeID(index), kRepFloat64); |
1737 DCHECK(result->IsFixed()); | 1754 DCHECK(result->IsFixed()); |
1738 result->set_assigned_register(index); | 1755 result->set_assigned_register(index); |
1739 data()->MarkAllocated(DOUBLE_REGISTERS, index); | 1756 data()->MarkAllocated(DOUBLE_REGISTERS, index); |
1740 data()->fixed_double_live_ranges()[index] = result; | 1757 data()->fixed_double_live_ranges()[index] = result; |
1741 } | 1758 } |
1742 return result; | 1759 return result; |
1743 } | 1760 } |
1744 | 1761 |
1745 | 1762 |
1746 LiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) { | 1763 TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) { |
1747 if (operand->IsUnallocated()) { | 1764 if (operand->IsUnallocated()) { |
1748 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register()); | 1765 return data()->GetOrCreateLiveRangeFor( |
| 1766 UnallocatedOperand::cast(operand)->virtual_register()); |
1749 } else if (operand->IsConstant()) { | 1767 } else if (operand->IsConstant()) { |
1750 return LiveRangeFor(ConstantOperand::cast(operand)->virtual_register()); | 1768 return data()->GetOrCreateLiveRangeFor( |
| 1769 ConstantOperand::cast(operand)->virtual_register()); |
1751 } else if (operand->IsRegister()) { | 1770 } else if (operand->IsRegister()) { |
1752 return FixedLiveRangeFor(RegisterOperand::cast(operand)->index()); | 1771 return FixedLiveRangeFor(RegisterOperand::cast(operand)->index()); |
1753 } else if (operand->IsDoubleRegister()) { | 1772 } else if (operand->IsDoubleRegister()) { |
1754 return FixedDoubleLiveRangeFor( | 1773 return FixedDoubleLiveRangeFor( |
1755 DoubleRegisterOperand::cast(operand)->index()); | 1774 DoubleRegisterOperand::cast(operand)->index()); |
1756 } else { | 1775 } else { |
1757 return nullptr; | 1776 return nullptr; |
1758 } | 1777 } |
1759 } | 1778 } |
1760 | 1779 |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1870 use_pos = curr_position; | 1889 use_pos = curr_position; |
1871 } else { | 1890 } else { |
1872 use_pos = curr_position.End(); | 1891 use_pos = curr_position.End(); |
1873 } | 1892 } |
1874 | 1893 |
1875 if (input->IsUnallocated()) { | 1894 if (input->IsUnallocated()) { |
1876 UnallocatedOperand* unalloc = UnallocatedOperand::cast(input); | 1895 UnallocatedOperand* unalloc = UnallocatedOperand::cast(input); |
1877 int vreg = unalloc->virtual_register(); | 1896 int vreg = unalloc->virtual_register(); |
1878 live->Add(vreg); | 1897 live->Add(vreg); |
1879 if (unalloc->HasSlotPolicy()) { | 1898 if (unalloc->HasSlotPolicy()) { |
1880 LiveRangeFor(vreg)->set_has_slot_use(true); | 1899 data()->GetOrCreateLiveRangeFor(vreg)->set_has_slot_use(true); |
1881 } | 1900 } |
1882 } | 1901 } |
1883 Use(block_start_position, use_pos, input); | 1902 Use(block_start_position, use_pos, input); |
1884 } | 1903 } |
1885 | 1904 |
1886 for (size_t i = 0; i < instr->TempCount(); i++) { | 1905 for (size_t i = 0; i < instr->TempCount(); i++) { |
1887 auto temp = instr->TempAt(i); | 1906 auto temp = instr->TempAt(i); |
1888 // Unsupported. | 1907 // Unsupported. |
1889 DCHECK_IMPLIES(temp->IsUnallocated(), | 1908 DCHECK_IMPLIES(temp->IsUnallocated(), |
1890 !UnallocatedOperand::cast(temp)->HasSlotPolicy()); | 1909 !UnallocatedOperand::cast(temp)->HasSlotPolicy()); |
(...skipping 25 matching lines...) Expand all Loading... |
1916 } | 1935 } |
1917 for (auto cur : *move) { | 1936 for (auto cur : *move) { |
1918 auto& from = cur->source(); | 1937 auto& from = cur->source(); |
1919 auto& to = cur->destination(); | 1938 auto& to = cur->destination(); |
1920 void* hint = &to; | 1939 void* hint = &to; |
1921 UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to); | 1940 UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to); |
1922 UsePosition* to_use = nullptr; | 1941 UsePosition* to_use = nullptr; |
1923 int phi_vreg = -1; | 1942 int phi_vreg = -1; |
1924 if (to.IsUnallocated()) { | 1943 if (to.IsUnallocated()) { |
1925 int to_vreg = UnallocatedOperand::cast(to).virtual_register(); | 1944 int to_vreg = UnallocatedOperand::cast(to).virtual_register(); |
1926 auto to_range = LiveRangeFor(to_vreg); | 1945 auto to_range = data()->GetOrCreateLiveRangeFor(to_vreg); |
1927 if (to_range->is_phi()) { | 1946 if (to_range->is_phi()) { |
1928 phi_vreg = to_vreg; | 1947 phi_vreg = to_vreg; |
1929 if (to_range->is_non_loop_phi()) { | 1948 if (to_range->is_non_loop_phi()) { |
1930 hint = to_range->current_hint_position(); | 1949 hint = to_range->current_hint_position(); |
1931 hint_type = hint == nullptr ? UsePositionHintType::kNone | 1950 hint_type = hint == nullptr ? UsePositionHintType::kNone |
1932 : UsePositionHintType::kUsePos; | 1951 : UsePositionHintType::kUsePos; |
1933 } else { | 1952 } else { |
1934 hint_type = UsePositionHintType::kPhi; | 1953 hint_type = UsePositionHintType::kPhi; |
1935 hint = data()->GetPhiMapValueFor(to_vreg); | 1954 hint = data()->GetPhiMapValueFor(to_vreg); |
1936 } | 1955 } |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2001 DCHECK(block->IsLoopHeader()); | 2020 DCHECK(block->IsLoopHeader()); |
2002 // Add a live range stretching from the first loop instruction to the last | 2021 // Add a live range stretching from the first loop instruction to the last |
2003 // for each value live on entry to the header. | 2022 // for each value live on entry to the header. |
2004 BitVector::Iterator iterator(live); | 2023 BitVector::Iterator iterator(live); |
2005 auto start = LifetimePosition::GapFromInstructionIndex( | 2024 auto start = LifetimePosition::GapFromInstructionIndex( |
2006 block->first_instruction_index()); | 2025 block->first_instruction_index()); |
2007 auto end = LifetimePosition::GapFromInstructionIndex( | 2026 auto end = LifetimePosition::GapFromInstructionIndex( |
2008 code()->LastLoopInstructionIndex(block)).NextFullStart(); | 2027 code()->LastLoopInstructionIndex(block)).NextFullStart(); |
2009 while (!iterator.Done()) { | 2028 while (!iterator.Done()) { |
2010 int operand_index = iterator.Current(); | 2029 int operand_index = iterator.Current(); |
2011 auto range = LiveRangeFor(operand_index); | 2030 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index); |
2012 range->EnsureInterval(start, end, allocation_zone()); | 2031 range->EnsureInterval(start, end, allocation_zone()); |
2013 iterator.Advance(); | 2032 iterator.Advance(); |
2014 } | 2033 } |
2015 // Insert all values into the live in sets of all blocks in the loop. | 2034 // Insert all values into the live in sets of all blocks in the loop. |
2016 for (int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt(); | 2035 for (int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt(); |
2017 ++i) { | 2036 ++i) { |
2018 live_in_sets()[i]->Union(*live); | 2037 live_in_sets()[i]->Union(*live); |
2019 } | 2038 } |
2020 } | 2039 } |
2021 | 2040 |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2083 if (it == phi_hints_.end()) return; | 2102 if (it == phi_hints_.end()) return; |
2084 DCHECK(!it->second->IsResolved()); | 2103 DCHECK(!it->second->IsResolved()); |
2085 it->second->ResolveHint(use_pos); | 2104 it->second->ResolveHint(use_pos); |
2086 } | 2105 } |
2087 | 2106 |
2088 | 2107 |
2089 void LiveRangeBuilder::Verify() const { | 2108 void LiveRangeBuilder::Verify() const { |
2090 for (auto& hint : phi_hints_) { | 2109 for (auto& hint : phi_hints_) { |
2091 CHECK(hint.second->IsResolved()); | 2110 CHECK(hint.second->IsResolved()); |
2092 } | 2111 } |
2093 for (auto current : data()->live_ranges()) { | 2112 for (LiveRange* current : data()->live_ranges()) { |
2094 if (current != nullptr) current->Verify(); | 2113 if (current != nullptr) current->Verify(); |
2095 } | 2114 } |
2096 } | 2115 } |
2097 | 2116 |
2098 | 2117 |
2099 RegisterAllocator::RegisterAllocator(RegisterAllocationData* data, | 2118 RegisterAllocator::RegisterAllocator(RegisterAllocationData* data, |
2100 RegisterKind kind) | 2119 RegisterKind kind) |
2101 : data_(data), | 2120 : data_(data), |
2102 mode_(kind), | 2121 mode_(kind), |
2103 num_registers_(GetRegisterCount(data->config(), kind)) {} | 2122 num_registers_(GetRegisterCount(data->config(), kind)) {} |
2104 | 2123 |
2105 | 2124 |
2106 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, | 2125 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, |
2107 LifetimePosition pos) { | 2126 LifetimePosition pos) { |
2108 DCHECK(!range->IsFixed()); | 2127 DCHECK(!range->TopLevel()->IsFixed()); |
2109 TRACE("Splitting live range %d at %d\n", range->id(), pos.value()); | 2128 TRACE("Splitting live range %d:%d at %d\n", range->TopLevel()->vreg(), |
| 2129 range->relative_id(), pos.value()); |
2110 | 2130 |
2111 if (pos <= range->Start()) return range; | 2131 if (pos <= range->Start()) return range; |
2112 | 2132 |
2113 // We can't properly connect liveranges if splitting occurred at the end | 2133 // We can't properly connect liveranges if splitting occurred at the end |
2114 // a block. | 2134 // a block. |
2115 DCHECK(pos.IsStart() || pos.IsGapPosition() || | 2135 DCHECK(pos.IsStart() || pos.IsGapPosition() || |
2116 (GetInstructionBlock(code(), pos)->last_instruction_index() != | 2136 (GetInstructionBlock(code(), pos)->last_instruction_index() != |
2117 pos.ToInstructionIndex())); | 2137 pos.ToInstructionIndex())); |
2118 | 2138 |
2119 auto result = data()->NewChildRangeFor(range); | 2139 LiveRange* result = range->SplitAt(pos, allocation_zone()); |
2120 range->SplitAt(pos, result, allocation_zone()); | |
2121 return result; | 2140 return result; |
2122 } | 2141 } |
2123 | 2142 |
2124 | 2143 |
2125 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range, | 2144 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range, |
2126 LifetimePosition start, | 2145 LifetimePosition start, |
2127 LifetimePosition end) { | 2146 LifetimePosition end) { |
2128 DCHECK(!range->IsFixed()); | 2147 DCHECK(!range->TopLevel()->IsFixed()); |
2129 TRACE("Splitting live range %d in position between [%d, %d]\n", range->id(), | 2148 TRACE("Splitting live range %d:%d in position between [%d, %d]\n", |
2130 start.value(), end.value()); | 2149 range->TopLevel()->vreg(), range->relative_id(), start.value(), |
| 2150 end.value()); |
2131 | 2151 |
2132 auto split_pos = FindOptimalSplitPos(start, end); | 2152 auto split_pos = FindOptimalSplitPos(start, end); |
2133 DCHECK(split_pos >= start); | 2153 DCHECK(split_pos >= start); |
2134 return SplitRangeAt(range, split_pos); | 2154 return SplitRangeAt(range, split_pos); |
2135 } | 2155 } |
2136 | 2156 |
2137 | 2157 |
2138 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, | 2158 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, |
2139 LifetimePosition end) { | 2159 LifetimePosition end) { |
2140 int start_instr = start.ToInstructionIndex(); | 2160 int start_instr = start.ToInstructionIndex(); |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2198 // Try hoisting out to an outer loop. | 2218 // Try hoisting out to an outer loop. |
2199 loop_header = GetContainingLoop(code(), loop_header); | 2219 loop_header = GetContainingLoop(code(), loop_header); |
2200 } | 2220 } |
2201 | 2221 |
2202 return pos; | 2222 return pos; |
2203 } | 2223 } |
2204 | 2224 |
2205 | 2225 |
2206 void RegisterAllocator::Spill(LiveRange* range) { | 2226 void RegisterAllocator::Spill(LiveRange* range) { |
2207 DCHECK(!range->spilled()); | 2227 DCHECK(!range->spilled()); |
2208 TRACE("Spilling live range %d\n", range->id()); | 2228 TopLevelLiveRange* first = range->TopLevel(); |
2209 auto first = range->TopLevel(); | 2229 TRACE("Spilling live range %d:%d\n", first->vreg(), range->relative_id()); |
| 2230 |
2210 if (first->HasNoSpillType()) { | 2231 if (first->HasNoSpillType()) { |
2211 data()->AssignSpillRangeToLiveRange(first); | 2232 data()->AssignSpillRangeToLiveRange(first); |
2212 } | 2233 } |
2213 range->Spill(); | 2234 range->Spill(); |
2214 } | 2235 } |
2215 | 2236 |
2216 | 2237 |
2217 const ZoneVector<LiveRange*>& RegisterAllocator::GetFixedRegisters() const { | 2238 const ZoneVector<TopLevelLiveRange*>& RegisterAllocator::GetFixedRegisters() |
| 2239 const { |
2218 return mode() == DOUBLE_REGISTERS ? data()->fixed_double_live_ranges() | 2240 return mode() == DOUBLE_REGISTERS ? data()->fixed_double_live_ranges() |
2219 : data()->fixed_live_ranges(); | 2241 : data()->fixed_live_ranges(); |
2220 } | 2242 } |
2221 | 2243 |
2222 | 2244 |
2223 const char* RegisterAllocator::RegisterName(int allocation_index) const { | 2245 const char* RegisterAllocator::RegisterName(int allocation_index) const { |
2224 if (mode() == GENERAL_REGISTERS) { | 2246 if (mode() == GENERAL_REGISTERS) { |
2225 return data()->config()->general_register_name(allocation_index); | 2247 return data()->config()->general_register_name(allocation_index); |
2226 } else { | 2248 } else { |
2227 return data()->config()->double_register_name(allocation_index); | 2249 return data()->config()->double_register_name(allocation_index); |
(...skipping 16 matching lines...) Expand all Loading... |
2244 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= | 2266 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= |
2245 this->data()->config()->num_general_registers()); | 2267 this->data()->config()->num_general_registers()); |
2246 } | 2268 } |
2247 | 2269 |
2248 | 2270 |
2249 void LinearScanAllocator::AllocateRegisters() { | 2271 void LinearScanAllocator::AllocateRegisters() { |
2250 DCHECK(unhandled_live_ranges().empty()); | 2272 DCHECK(unhandled_live_ranges().empty()); |
2251 DCHECK(active_live_ranges().empty()); | 2273 DCHECK(active_live_ranges().empty()); |
2252 DCHECK(inactive_live_ranges().empty()); | 2274 DCHECK(inactive_live_ranges().empty()); |
2253 | 2275 |
2254 for (auto range : data()->live_ranges()) { | 2276 for (LiveRange* range : data()->live_ranges()) { |
2255 if (range == nullptr) continue; | 2277 if (range == nullptr) continue; |
2256 if (range->kind() == mode()) { | 2278 if (range->kind() == mode()) { |
2257 AddToUnhandledUnsorted(range); | 2279 AddToUnhandledUnsorted(range); |
2258 } | 2280 } |
2259 } | 2281 } |
2260 SortUnhandled(); | 2282 SortUnhandled(); |
2261 DCHECK(UnhandledIsSorted()); | 2283 DCHECK(UnhandledIsSorted()); |
2262 | 2284 |
2263 auto& fixed_ranges = GetFixedRegisters(); | 2285 auto& fixed_ranges = GetFixedRegisters(); |
2264 for (auto current : fixed_ranges) { | 2286 for (auto current : fixed_ranges) { |
2265 if (current != nullptr) { | 2287 if (current != nullptr) { |
2266 DCHECK_EQ(mode(), current->kind()); | 2288 DCHECK_EQ(mode(), current->kind()); |
2267 AddToInactive(current); | 2289 AddToInactive(current); |
2268 } | 2290 } |
2269 } | 2291 } |
2270 | 2292 |
2271 while (!unhandled_live_ranges().empty()) { | 2293 while (!unhandled_live_ranges().empty()) { |
2272 DCHECK(UnhandledIsSorted()); | 2294 DCHECK(UnhandledIsSorted()); |
2273 auto current = unhandled_live_ranges().back(); | 2295 auto current = unhandled_live_ranges().back(); |
2274 unhandled_live_ranges().pop_back(); | 2296 unhandled_live_ranges().pop_back(); |
2275 DCHECK(UnhandledIsSorted()); | 2297 DCHECK(UnhandledIsSorted()); |
2276 auto position = current->Start(); | 2298 auto position = current->Start(); |
2277 #ifdef DEBUG | 2299 #ifdef DEBUG |
2278 allocation_finger_ = position; | 2300 allocation_finger_ = position; |
2279 #endif | 2301 #endif |
2280 TRACE("Processing interval %d start=%d\n", current->id(), position.value()); | 2302 TRACE("Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(), |
| 2303 current->relative_id(), position.value()); |
2281 | 2304 |
2282 if (!current->HasNoSpillType()) { | 2305 if (current->IsTopLevel() && !current->TopLevel()->HasNoSpillType()) { |
2283 TRACE("Live range %d already has a spill operand\n", current->id()); | 2306 TRACE("Live range %d:%d already has a spill operand\n", |
| 2307 current->TopLevel()->vreg(), current->relative_id()); |
2284 auto next_pos = position; | 2308 auto next_pos = position; |
2285 if (next_pos.IsGapPosition()) { | 2309 if (next_pos.IsGapPosition()) { |
2286 next_pos = next_pos.NextStart(); | 2310 next_pos = next_pos.NextStart(); |
2287 } | 2311 } |
2288 auto pos = current->NextUsePositionRegisterIsBeneficial(next_pos); | 2312 auto pos = current->NextUsePositionRegisterIsBeneficial(next_pos); |
2289 // If the range already has a spill operand and it doesn't need a | 2313 // If the range already has a spill operand and it doesn't need a |
2290 // register immediately, split it and spill the first part of the range. | 2314 // register immediately, split it and spill the first part of the range. |
2291 if (pos == nullptr) { | 2315 if (pos == nullptr) { |
2292 Spill(current); | 2316 Spill(current); |
2293 continue; | 2317 continue; |
2294 } else if (pos->pos() > current->Start().NextStart()) { | 2318 } else if (pos->pos() > current->Start().NextStart()) { |
2295 // Do not spill live range eagerly if use position that can benefit from | 2319 // Do not spill live range eagerly if use position that can benefit from |
2296 // the register is too close to the start of live range. | 2320 // the register is too close to the start of live range. |
2297 SpillBetween(current, current->Start(), pos->pos()); | 2321 SpillBetween(current, current->Start(), pos->pos()); |
2298 DCHECK(UnhandledIsSorted()); | 2322 DCHECK(UnhandledIsSorted()); |
2299 continue; | 2323 continue; |
2300 } | 2324 } |
2301 } | 2325 } |
2302 | 2326 |
2303 if (TryReuseSpillForPhi(current)) continue; | 2327 if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel())) |
| 2328 continue; |
2304 | 2329 |
2305 for (size_t i = 0; i < active_live_ranges().size(); ++i) { | 2330 for (size_t i = 0; i < active_live_ranges().size(); ++i) { |
2306 auto cur_active = active_live_ranges()[i]; | 2331 auto cur_active = active_live_ranges()[i]; |
2307 if (cur_active->End() <= position) { | 2332 if (cur_active->End() <= position) { |
2308 ActiveToHandled(cur_active); | 2333 ActiveToHandled(cur_active); |
2309 --i; // The live range was removed from the list of active live ranges. | 2334 --i; // The live range was removed from the list of active live ranges. |
2310 } else if (!cur_active->Covers(position)) { | 2335 } else if (!cur_active->Covers(position)) { |
2311 ActiveToInactive(cur_active); | 2336 ActiveToInactive(cur_active); |
2312 --i; // The live range was removed from the list of active live ranges. | 2337 --i; // The live range was removed from the list of active live ranges. |
2313 } | 2338 } |
(...skipping 19 matching lines...) Expand all Loading... |
2333 } | 2358 } |
2334 } | 2359 } |
2335 } | 2360 } |
2336 | 2361 |
2337 | 2362 |
2338 void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range, | 2363 void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range, |
2339 int reg) { | 2364 int reg) { |
2340 data()->MarkAllocated(range->kind(), reg); | 2365 data()->MarkAllocated(range->kind(), reg); |
2341 range->set_assigned_register(reg); | 2366 range->set_assigned_register(reg); |
2342 range->SetUseHints(reg); | 2367 range->SetUseHints(reg); |
2343 if (range->is_phi()) { | 2368 if (range->IsTopLevel() && range->TopLevel()->is_phi()) { |
2344 data()->GetPhiMapValueFor(range->id())->set_assigned_register(reg); | 2369 data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg); |
2345 } | 2370 } |
2346 } | 2371 } |
2347 | 2372 |
2348 | 2373 |
2349 void LinearScanAllocator::AddToActive(LiveRange* range) { | 2374 void LinearScanAllocator::AddToActive(LiveRange* range) { |
2350 TRACE("Add live range %d to active\n", range->id()); | 2375 TRACE("Add live range %d:%d to active\n", range->TopLevel()->vreg(), |
| 2376 range->relative_id()); |
2351 active_live_ranges().push_back(range); | 2377 active_live_ranges().push_back(range); |
2352 } | 2378 } |
2353 | 2379 |
2354 | 2380 |
2355 void LinearScanAllocator::AddToInactive(LiveRange* range) { | 2381 void LinearScanAllocator::AddToInactive(LiveRange* range) { |
2356 TRACE("Add live range %d to inactive\n", range->id()); | 2382 TRACE("Add live range %d:%d to inactive\n", range->TopLevel()->vreg(), |
| 2383 range->relative_id()); |
2357 inactive_live_ranges().push_back(range); | 2384 inactive_live_ranges().push_back(range); |
2358 } | 2385 } |
2359 | 2386 |
2360 | 2387 |
2361 void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) { | 2388 void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) { |
2362 if (range == nullptr || range->IsEmpty()) return; | 2389 if (range == nullptr || range->IsEmpty()) return; |
2363 DCHECK(!range->HasRegisterAssigned() && !range->spilled()); | 2390 DCHECK(!range->HasRegisterAssigned() && !range->spilled()); |
2364 DCHECK(allocation_finger_ <= range->Start()); | 2391 DCHECK(allocation_finger_ <= range->Start()); |
2365 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0; | 2392 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0; |
2366 --i) { | 2393 --i) { |
2367 auto cur_range = unhandled_live_ranges().at(i); | 2394 auto cur_range = unhandled_live_ranges().at(i); |
2368 if (!range->ShouldBeAllocatedBefore(cur_range)) continue; | 2395 if (!range->ShouldBeAllocatedBefore(cur_range)) continue; |
2369 TRACE("Add live range %d to unhandled at %d\n", range->id(), i + 1); | 2396 TRACE("Add live range %d:%d to unhandled at %d\n", |
| 2397 range->TopLevel()->vreg(), range->relative_id(), i + 1); |
2370 auto it = unhandled_live_ranges().begin() + (i + 1); | 2398 auto it = unhandled_live_ranges().begin() + (i + 1); |
2371 unhandled_live_ranges().insert(it, range); | 2399 unhandled_live_ranges().insert(it, range); |
2372 DCHECK(UnhandledIsSorted()); | 2400 DCHECK(UnhandledIsSorted()); |
2373 return; | 2401 return; |
2374 } | 2402 } |
2375 TRACE("Add live range %d to unhandled at start\n", range->id()); | 2403 TRACE("Add live range %d:%d to unhandled at start\n", |
| 2404 range->TopLevel()->vreg(), range->relative_id()); |
2376 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range); | 2405 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range); |
2377 DCHECK(UnhandledIsSorted()); | 2406 DCHECK(UnhandledIsSorted()); |
2378 } | 2407 } |
2379 | 2408 |
2380 | 2409 |
2381 void LinearScanAllocator::AddToUnhandledUnsorted(LiveRange* range) { | 2410 void LinearScanAllocator::AddToUnhandledUnsorted(LiveRange* range) { |
2382 if (range == nullptr || range->IsEmpty()) return; | 2411 if (range == nullptr || range->IsEmpty()) return; |
2383 DCHECK(!range->HasRegisterAssigned() && !range->spilled()); | 2412 DCHECK(!range->HasRegisterAssigned() && !range->spilled()); |
2384 TRACE("Add live range %d to unhandled unsorted at end\n", range->id()); | 2413 TRACE("Add live range %d:%d to unhandled unsorted at end\n", |
| 2414 range->TopLevel()->vreg(), range->relative_id()); |
2385 unhandled_live_ranges().push_back(range); | 2415 unhandled_live_ranges().push_back(range); |
2386 } | 2416 } |
2387 | 2417 |
2388 | 2418 |
2389 static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) { | 2419 static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) { |
2390 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a)); | 2420 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a)); |
2391 if (a->ShouldBeAllocatedBefore(b)) return false; | 2421 if (a->ShouldBeAllocatedBefore(b)) return false; |
2392 if (b->ShouldBeAllocatedBefore(a)) return true; | 2422 if (b->ShouldBeAllocatedBefore(a)) return true; |
2393 return a->id() < b->id(); | 2423 return a->TopLevel()->vreg() < b->TopLevel()->vreg(); |
2394 } | 2424 } |
2395 | 2425 |
2396 | 2426 |
2397 // Sort the unhandled live ranges so that the ranges to be processed first are | 2427 // Sort the unhandled live ranges so that the ranges to be processed first are |
2398 // at the end of the array list. This is convenient for the register allocation | 2428 // at the end of the array list. This is convenient for the register allocation |
2399 // algorithm because it is efficient to remove elements from the end. | 2429 // algorithm because it is efficient to remove elements from the end. |
2400 void LinearScanAllocator::SortUnhandled() { | 2430 void LinearScanAllocator::SortUnhandled() { |
2401 TRACE("Sort unhandled\n"); | 2431 TRACE("Sort unhandled\n"); |
2402 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(), | 2432 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(), |
2403 &UnhandledSortHelper); | 2433 &UnhandledSortHelper); |
2404 } | 2434 } |
2405 | 2435 |
2406 | 2436 |
2407 bool LinearScanAllocator::UnhandledIsSorted() { | 2437 bool LinearScanAllocator::UnhandledIsSorted() { |
2408 size_t len = unhandled_live_ranges().size(); | 2438 size_t len = unhandled_live_ranges().size(); |
2409 for (size_t i = 1; i < len; i++) { | 2439 for (size_t i = 1; i < len; i++) { |
2410 auto a = unhandled_live_ranges().at(i - 1); | 2440 auto a = unhandled_live_ranges().at(i - 1); |
2411 auto b = unhandled_live_ranges().at(i); | 2441 auto b = unhandled_live_ranges().at(i); |
2412 if (a->Start() < b->Start()) return false; | 2442 if (a->Start() < b->Start()) return false; |
2413 } | 2443 } |
2414 return true; | 2444 return true; |
2415 } | 2445 } |
2416 | 2446 |
2417 | 2447 |
2418 void LinearScanAllocator::ActiveToHandled(LiveRange* range) { | 2448 void LinearScanAllocator::ActiveToHandled(LiveRange* range) { |
2419 RemoveElement(&active_live_ranges(), range); | 2449 RemoveElement(&active_live_ranges(), range); |
2420 TRACE("Moving live range %d from active to handled\n", range->id()); | 2450 TRACE("Moving live range %d:%d from active to handled\n", |
| 2451 range->TopLevel()->vreg(), range->relative_id()); |
2421 } | 2452 } |
2422 | 2453 |
2423 | 2454 |
2424 void LinearScanAllocator::ActiveToInactive(LiveRange* range) { | 2455 void LinearScanAllocator::ActiveToInactive(LiveRange* range) { |
2425 RemoveElement(&active_live_ranges(), range); | 2456 RemoveElement(&active_live_ranges(), range); |
2426 inactive_live_ranges().push_back(range); | 2457 inactive_live_ranges().push_back(range); |
2427 TRACE("Moving live range %d from active to inactive\n", range->id()); | 2458 TRACE("Moving live range %d:%d from active to inactive\n", |
| 2459 range->TopLevel()->vreg(), range->relative_id()); |
2428 } | 2460 } |
2429 | 2461 |
2430 | 2462 |
2431 void LinearScanAllocator::InactiveToHandled(LiveRange* range) { | 2463 void LinearScanAllocator::InactiveToHandled(LiveRange* range) { |
2432 RemoveElement(&inactive_live_ranges(), range); | 2464 RemoveElement(&inactive_live_ranges(), range); |
2433 TRACE("Moving live range %d from inactive to handled\n", range->id()); | 2465 TRACE("Moving live range %d:%d from inactive to handled\n", |
| 2466 range->TopLevel()->vreg(), range->relative_id()); |
2434 } | 2467 } |
2435 | 2468 |
2436 | 2469 |
2437 void LinearScanAllocator::InactiveToActive(LiveRange* range) { | 2470 void LinearScanAllocator::InactiveToActive(LiveRange* range) { |
2438 RemoveElement(&inactive_live_ranges(), range); | 2471 RemoveElement(&inactive_live_ranges(), range); |
2439 active_live_ranges().push_back(range); | 2472 active_live_ranges().push_back(range); |
2440 TRACE("Moving live range %d from inactive to active\n", range->id()); | 2473 TRACE("Moving live range %d:%d from inactive to active\n", |
| 2474 range->TopLevel()->vreg(), range->relative_id()); |
2441 } | 2475 } |
2442 | 2476 |
2443 | 2477 |
2444 bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) { | 2478 bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) { |
2445 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2479 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
2446 | 2480 |
2447 for (int i = 0; i < num_registers(); i++) { | 2481 for (int i = 0; i < num_registers(); i++) { |
2448 free_until_pos[i] = LifetimePosition::MaxPosition(); | 2482 free_until_pos[i] = LifetimePosition::MaxPosition(); |
2449 } | 2483 } |
2450 | 2484 |
2451 for (auto cur_active : active_live_ranges()) { | 2485 for (auto cur_active : active_live_ranges()) { |
2452 free_until_pos[cur_active->assigned_register()] = | 2486 free_until_pos[cur_active->assigned_register()] = |
2453 LifetimePosition::GapFromInstructionIndex(0); | 2487 LifetimePosition::GapFromInstructionIndex(0); |
2454 } | 2488 } |
2455 | 2489 |
2456 for (auto cur_inactive : inactive_live_ranges()) { | 2490 for (auto cur_inactive : inactive_live_ranges()) { |
2457 DCHECK(cur_inactive->End() > current->Start()); | 2491 DCHECK(cur_inactive->End() > current->Start()); |
2458 auto next_intersection = cur_inactive->FirstIntersection(current); | 2492 auto next_intersection = cur_inactive->FirstIntersection(current); |
2459 if (!next_intersection.IsValid()) continue; | 2493 if (!next_intersection.IsValid()) continue; |
2460 int cur_reg = cur_inactive->assigned_register(); | 2494 int cur_reg = cur_inactive->assigned_register(); |
2461 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); | 2495 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); |
2462 } | 2496 } |
2463 | 2497 |
2464 int hint_register; | 2498 int hint_register; |
2465 if (current->FirstHintPosition(&hint_register) != nullptr) { | 2499 if (current->FirstHintPosition(&hint_register) != nullptr) { |
2466 TRACE("Found reg hint %s (free until [%d) for live range %d (end %d[).\n", | 2500 TRACE( |
2467 RegisterName(hint_register), free_until_pos[hint_register].value(), | 2501 "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n", |
2468 current->id(), current->End().value()); | 2502 RegisterName(hint_register), free_until_pos[hint_register].value(), |
| 2503 current->TopLevel()->vreg(), current->relative_id(), |
| 2504 current->End().value()); |
2469 | 2505 |
2470 // The desired register is free until the end of the current live range. | 2506 // The desired register is free until the end of the current live range. |
2471 if (free_until_pos[hint_register] >= current->End()) { | 2507 if (free_until_pos[hint_register] >= current->End()) { |
2472 TRACE("Assigning preferred reg %s to live range %d\n", | 2508 TRACE("Assigning preferred reg %s to live range %d:%d\n", |
2473 RegisterName(hint_register), current->id()); | 2509 RegisterName(hint_register), current->TopLevel()->vreg(), |
| 2510 current->relative_id()); |
2474 SetLiveRangeAssignedRegister(current, hint_register); | 2511 SetLiveRangeAssignedRegister(current, hint_register); |
2475 return true; | 2512 return true; |
2476 } | 2513 } |
2477 } | 2514 } |
2478 | 2515 |
2479 // Find the register which stays free for the longest time. | 2516 // Find the register which stays free for the longest time. |
2480 int reg = 0; | 2517 int reg = 0; |
2481 for (int i = 1; i < num_registers(); ++i) { | 2518 for (int i = 1; i < num_registers(); ++i) { |
2482 if (free_until_pos[i] > free_until_pos[reg]) { | 2519 if (free_until_pos[i] > free_until_pos[reg]) { |
2483 reg = i; | 2520 reg = i; |
(...skipping 10 matching lines...) Expand all Loading... |
2494 if (pos < current->End()) { | 2531 if (pos < current->End()) { |
2495 // Register reg is available at the range start but becomes blocked before | 2532 // Register reg is available at the range start but becomes blocked before |
2496 // the range end. Split current at position where it becomes blocked. | 2533 // the range end. Split current at position where it becomes blocked. |
2497 auto tail = SplitRangeAt(current, pos); | 2534 auto tail = SplitRangeAt(current, pos); |
2498 AddToUnhandledSorted(tail); | 2535 AddToUnhandledSorted(tail); |
2499 } | 2536 } |
2500 | 2537 |
2501 // Register reg is available at the range start and is free until | 2538 // Register reg is available at the range start and is free until |
2502 // the range end. | 2539 // the range end. |
2503 DCHECK(pos >= current->End()); | 2540 DCHECK(pos >= current->End()); |
2504 TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg), | 2541 TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg), |
2505 current->id()); | 2542 current->TopLevel()->vreg(), current->relative_id()); |
2506 SetLiveRangeAssignedRegister(current, reg); | 2543 SetLiveRangeAssignedRegister(current, reg); |
2507 | 2544 |
2508 return true; | 2545 return true; |
2509 } | 2546 } |
2510 | 2547 |
2511 | 2548 |
2512 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) { | 2549 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) { |
2513 auto register_use = current->NextRegisterPosition(current->Start()); | 2550 auto register_use = current->NextRegisterPosition(current->Start()); |
2514 if (register_use == nullptr) { | 2551 if (register_use == nullptr) { |
2515 // There is no use in the current live range that requires a register. | 2552 // There is no use in the current live range that requires a register. |
2516 // We can just spill it. | 2553 // We can just spill it. |
2517 Spill(current); | 2554 Spill(current); |
2518 return; | 2555 return; |
2519 } | 2556 } |
2520 | 2557 |
2521 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2558 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
2522 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2559 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
2523 | 2560 |
2524 for (int i = 0; i < num_registers(); i++) { | 2561 for (int i = 0; i < num_registers(); i++) { |
2525 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition(); | 2562 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition(); |
2526 } | 2563 } |
2527 | 2564 |
2528 for (auto range : active_live_ranges()) { | 2565 for (auto range : active_live_ranges()) { |
2529 int cur_reg = range->assigned_register(); | 2566 int cur_reg = range->assigned_register(); |
2530 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) { | 2567 if (range->TopLevel()->IsFixed() || |
| 2568 !range->CanBeSpilled(current->Start())) { |
2531 block_pos[cur_reg] = use_pos[cur_reg] = | 2569 block_pos[cur_reg] = use_pos[cur_reg] = |
2532 LifetimePosition::GapFromInstructionIndex(0); | 2570 LifetimePosition::GapFromInstructionIndex(0); |
2533 } else { | 2571 } else { |
2534 auto next_use = | 2572 auto next_use = |
2535 range->NextUsePositionRegisterIsBeneficial(current->Start()); | 2573 range->NextUsePositionRegisterIsBeneficial(current->Start()); |
2536 if (next_use == nullptr) { | 2574 if (next_use == nullptr) { |
2537 use_pos[cur_reg] = range->End(); | 2575 use_pos[cur_reg] = range->End(); |
2538 } else { | 2576 } else { |
2539 use_pos[cur_reg] = next_use->pos(); | 2577 use_pos[cur_reg] = next_use->pos(); |
2540 } | 2578 } |
2541 } | 2579 } |
2542 } | 2580 } |
2543 | 2581 |
2544 for (auto range : inactive_live_ranges()) { | 2582 for (auto range : inactive_live_ranges()) { |
2545 DCHECK(range->End() > current->Start()); | 2583 DCHECK(range->End() > current->Start()); |
2546 auto next_intersection = range->FirstIntersection(current); | 2584 auto next_intersection = range->FirstIntersection(current); |
2547 if (!next_intersection.IsValid()) continue; | 2585 if (!next_intersection.IsValid()) continue; |
2548 int cur_reg = range->assigned_register(); | 2586 int cur_reg = range->assigned_register(); |
2549 if (range->IsFixed()) { | 2587 if (range->TopLevel()->IsFixed()) { |
2550 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); | 2588 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); |
2551 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); | 2589 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); |
2552 } else { | 2590 } else { |
2553 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection); | 2591 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection); |
2554 } | 2592 } |
2555 } | 2593 } |
2556 | 2594 |
2557 int reg = 0; | 2595 int reg = 0; |
2558 for (int i = 1; i < num_registers(); ++i) { | 2596 for (int i = 1; i < num_registers(); ++i) { |
2559 if (use_pos[i] > use_pos[reg]) { | 2597 if (use_pos[i] > use_pos[reg]) { |
(...skipping 13 matching lines...) Expand all Loading... |
2573 if (block_pos[reg] < current->End()) { | 2611 if (block_pos[reg] < current->End()) { |
2574 // Register becomes blocked before the current range end. Split before that | 2612 // Register becomes blocked before the current range end. Split before that |
2575 // position. | 2613 // position. |
2576 LiveRange* tail = | 2614 LiveRange* tail = |
2577 SplitBetween(current, current->Start(), block_pos[reg].Start()); | 2615 SplitBetween(current, current->Start(), block_pos[reg].Start()); |
2578 AddToUnhandledSorted(tail); | 2616 AddToUnhandledSorted(tail); |
2579 } | 2617 } |
2580 | 2618 |
2581 // Register reg is not blocked for the whole range. | 2619 // Register reg is not blocked for the whole range. |
2582 DCHECK(block_pos[reg] >= current->End()); | 2620 DCHECK(block_pos[reg] >= current->End()); |
2583 TRACE("Assigning blocked reg %s to live range %d\n", RegisterName(reg), | 2621 TRACE("Assigning blocked reg %s to live range %d:%d\n", RegisterName(reg), |
2584 current->id()); | 2622 current->TopLevel()->vreg(), current->relative_id()); |
2585 SetLiveRangeAssignedRegister(current, reg); | 2623 SetLiveRangeAssignedRegister(current, reg); |
2586 | 2624 |
2587 // This register was not free. Thus we need to find and spill | 2625 // This register was not free. Thus we need to find and spill |
2588 // parts of active and inactive live regions that use the same register | 2626 // parts of active and inactive live regions that use the same register |
2589 // at the same lifetime positions as current. | 2627 // at the same lifetime positions as current. |
2590 SplitAndSpillIntersecting(current); | 2628 SplitAndSpillIntersecting(current); |
2591 } | 2629 } |
2592 | 2630 |
2593 | 2631 |
2594 void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) { | 2632 void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) { |
(...skipping 19 matching lines...) Expand all Loading... |
2614 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos()); | 2652 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos()); |
2615 } | 2653 } |
2616 ActiveToHandled(range); | 2654 ActiveToHandled(range); |
2617 --i; | 2655 --i; |
2618 } | 2656 } |
2619 } | 2657 } |
2620 | 2658 |
2621 for (size_t i = 0; i < inactive_live_ranges().size(); ++i) { | 2659 for (size_t i = 0; i < inactive_live_ranges().size(); ++i) { |
2622 auto range = inactive_live_ranges()[i]; | 2660 auto range = inactive_live_ranges()[i]; |
2623 DCHECK(range->End() > current->Start()); | 2661 DCHECK(range->End() > current->Start()); |
2624 if (range->assigned_register() == reg && !range->IsFixed()) { | 2662 if (range->assigned_register() == reg && !range->TopLevel()->IsFixed()) { |
2625 LifetimePosition next_intersection = range->FirstIntersection(current); | 2663 LifetimePosition next_intersection = range->FirstIntersection(current); |
2626 if (next_intersection.IsValid()) { | 2664 if (next_intersection.IsValid()) { |
2627 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); | 2665 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); |
2628 if (next_pos == nullptr) { | 2666 if (next_pos == nullptr) { |
2629 SpillAfter(range, split_pos); | 2667 SpillAfter(range, split_pos); |
2630 } else { | 2668 } else { |
2631 next_intersection = Min(next_intersection, next_pos->pos()); | 2669 next_intersection = Min(next_intersection, next_pos->pos()); |
2632 SpillBetween(range, split_pos, next_intersection); | 2670 SpillBetween(range, split_pos, next_intersection); |
2633 } | 2671 } |
2634 InactiveToHandled(range); | 2672 InactiveToHandled(range); |
2635 --i; | 2673 --i; |
2636 } | 2674 } |
2637 } | 2675 } |
2638 } | 2676 } |
2639 } | 2677 } |
2640 | 2678 |
2641 | 2679 |
2642 bool LinearScanAllocator::TryReuseSpillForPhi(LiveRange* range) { | 2680 bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) { |
2643 if (range->IsChild() || !range->is_phi()) return false; | 2681 if (!range->is_phi()) return false; |
| 2682 |
2644 DCHECK(!range->HasSpillOperand()); | 2683 DCHECK(!range->HasSpillOperand()); |
2645 auto phi_map_value = data()->GetPhiMapValueFor(range->id()); | 2684 auto phi_map_value = data()->GetPhiMapValueFor(range); |
2646 auto phi = phi_map_value->phi(); | 2685 auto phi = phi_map_value->phi(); |
2647 auto block = phi_map_value->block(); | 2686 auto block = phi_map_value->block(); |
2648 // Count the number of spilled operands. | 2687 // Count the number of spilled operands. |
2649 size_t spilled_count = 0; | 2688 size_t spilled_count = 0; |
2650 LiveRange* first_op = nullptr; | 2689 LiveRange* first_op = nullptr; |
2651 for (size_t i = 0; i < phi->operands().size(); i++) { | 2690 for (size_t i = 0; i < phi->operands().size(); i++) { |
2652 int op = phi->operands()[i]; | 2691 int op = phi->operands()[i]; |
2653 LiveRange* op_range = LiveRangeFor(op); | 2692 LiveRange* op_range = data()->GetOrCreateLiveRangeFor(op); |
2654 if (!op_range->HasSpillRange()) continue; | 2693 if (!op_range->TopLevel()->HasSpillRange()) continue; |
2655 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); | 2694 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); |
2656 auto pred_end = LifetimePosition::InstructionFromInstructionIndex( | 2695 auto pred_end = LifetimePosition::InstructionFromInstructionIndex( |
2657 pred->last_instruction_index()); | 2696 pred->last_instruction_index()); |
2658 while (op_range != nullptr && !op_range->CanCover(pred_end)) { | 2697 while (op_range != nullptr && !op_range->CanCover(pred_end)) { |
2659 op_range = op_range->next(); | 2698 op_range = op_range->next(); |
2660 } | 2699 } |
2661 if (op_range != nullptr && op_range->spilled()) { | 2700 if (op_range != nullptr && op_range->spilled()) { |
2662 spilled_count++; | 2701 spilled_count++; |
2663 if (first_op == nullptr) { | 2702 if (first_op == nullptr) { |
2664 first_op = op_range->TopLevel(); | 2703 first_op = op_range->TopLevel(); |
2665 } | 2704 } |
2666 } | 2705 } |
2667 } | 2706 } |
2668 | 2707 |
2669 // Only continue if more than half of the operands are spilled. | 2708 // Only continue if more than half of the operands are spilled. |
2670 if (spilled_count * 2 <= phi->operands().size()) { | 2709 if (spilled_count * 2 <= phi->operands().size()) { |
2671 return false; | 2710 return false; |
2672 } | 2711 } |
2673 | 2712 |
2674 // Try to merge the spilled operands and count the number of merged spilled | 2713 // Try to merge the spilled operands and count the number of merged spilled |
2675 // operands. | 2714 // operands. |
2676 DCHECK(first_op != nullptr); | 2715 DCHECK(first_op != nullptr); |
2677 auto first_op_spill = first_op->GetSpillRange(); | 2716 auto first_op_spill = first_op->TopLevel()->GetSpillRange(); |
2678 size_t num_merged = 1; | 2717 size_t num_merged = 1; |
2679 for (size_t i = 1; i < phi->operands().size(); i++) { | 2718 for (size_t i = 1; i < phi->operands().size(); i++) { |
2680 int op = phi->operands()[i]; | 2719 int op = phi->operands()[i]; |
2681 auto op_range = LiveRangeFor(op); | 2720 auto op_range = data()->GetOrCreateLiveRangeFor(op); |
2682 if (!op_range->HasSpillRange()) continue; | 2721 if (!op_range->HasSpillRange()) continue; |
2683 auto op_spill = op_range->GetSpillRange(); | 2722 auto op_spill = op_range->GetSpillRange(); |
2684 if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) { | 2723 if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) { |
2685 num_merged++; | 2724 num_merged++; |
2686 } | 2725 } |
2687 } | 2726 } |
2688 | 2727 |
2689 // Only continue if enough operands could be merged to the | 2728 // Only continue if enough operands could be merged to the |
2690 // same spill slot. | 2729 // same spill slot. |
2691 if (num_merged * 2 <= phi->operands().size() || | 2730 if (num_merged * 2 <= phi->operands().size() || |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2764 } | 2803 } |
2765 } | 2804 } |
2766 | 2805 |
2767 | 2806 |
2768 SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data) | 2807 SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data) |
2769 : data_(data) {} | 2808 : data_(data) {} |
2770 | 2809 |
2771 | 2810 |
2772 void SpillSlotLocator::LocateSpillSlots() { | 2811 void SpillSlotLocator::LocateSpillSlots() { |
2773 auto code = data()->code(); | 2812 auto code = data()->code(); |
2774 for (auto range : data()->live_ranges()) { | 2813 for (TopLevelLiveRange* range : data()->live_ranges()) { |
2775 if (range == nullptr || range->IsEmpty() || range->IsChild()) continue; | 2814 if (range == nullptr || range->IsEmpty()) continue; |
2776 // We care only about ranges which spill in the frame. | 2815 // We care only about ranges which spill in the frame. |
2777 if (!range->HasSpillRange()) continue; | 2816 if (!range->HasSpillRange()) continue; |
2778 auto spills = range->spills_at_definition(); | 2817 auto spills = range->spills_at_definition(); |
2779 DCHECK_NOT_NULL(spills); | 2818 DCHECK_NOT_NULL(spills); |
2780 for (; spills != nullptr; spills = spills->next) { | 2819 for (; spills != nullptr; spills = spills->next) { |
2781 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame(); | 2820 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame(); |
2782 } | 2821 } |
2783 } | 2822 } |
2784 } | 2823 } |
2785 | 2824 |
(...skipping 21 matching lines...) Expand all Loading... |
2807 if (range->IsEmpty()) continue; | 2846 if (range->IsEmpty()) continue; |
2808 // Allocate a new operand referring to the spill slot. | 2847 // Allocate a new operand referring to the spill slot. |
2809 int byte_width = range->ByteWidth(); | 2848 int byte_width = range->ByteWidth(); |
2810 int index = data()->frame()->AllocateSpillSlot(byte_width); | 2849 int index = data()->frame()->AllocateSpillSlot(byte_width); |
2811 range->set_assigned_slot(index); | 2850 range->set_assigned_slot(index); |
2812 } | 2851 } |
2813 } | 2852 } |
2814 | 2853 |
2815 | 2854 |
2816 void OperandAssigner::CommitAssignment() { | 2855 void OperandAssigner::CommitAssignment() { |
2817 for (auto range : data()->live_ranges()) { | 2856 for (TopLevelLiveRange* top_range : data()->live_ranges()) { |
2818 if (range == nullptr || range->IsEmpty()) continue; | 2857 if (top_range == nullptr || top_range->IsEmpty()) continue; |
2819 InstructionOperand spill_operand; | 2858 InstructionOperand spill_operand; |
2820 if (range->TopLevel()->HasSpillOperand()) { | 2859 if (top_range->HasSpillOperand()) { |
2821 spill_operand = *range->TopLevel()->GetSpillOperand(); | 2860 spill_operand = *top_range->TopLevel()->GetSpillOperand(); |
2822 } else if (range->TopLevel()->HasSpillRange()) { | 2861 } else if (top_range->TopLevel()->HasSpillRange()) { |
2823 spill_operand = range->TopLevel()->GetSpillRangeOperand(); | 2862 spill_operand = top_range->TopLevel()->GetSpillRangeOperand(); |
2824 } | 2863 } |
2825 auto assigned = range->GetAssignedOperand(); | 2864 if (top_range->is_phi()) { |
2826 range->ConvertUsesToOperand(assigned, spill_operand); | 2865 data()->GetPhiMapValueFor(top_range)->CommitAssignment( |
2827 if (range->is_phi()) { | 2866 top_range->GetAssignedOperand()); |
2828 data()->GetPhiMapValueFor(range->id())->CommitAssignment(assigned); | |
2829 } | 2867 } |
2830 if (!range->IsChild() && !spill_operand.IsInvalid()) { | 2868 for (LiveRange* range = top_range; range != nullptr; |
| 2869 range = range->next()) { |
| 2870 auto assigned = range->GetAssignedOperand(); |
| 2871 range->ConvertUsesToOperand(assigned, spill_operand); |
| 2872 } |
| 2873 |
| 2874 if (!spill_operand.IsInvalid()) { |
2831 // If this top level range has a child spilled in a deferred block, we use | 2875 // If this top level range has a child spilled in a deferred block, we use |
2832 // the range and control flow connection mechanism instead of spilling at | 2876 // the range and control flow connection mechanism instead of spilling at |
2833 // definition. Refer to the ConnectLiveRanges and ResolveControlFlow | 2877 // definition. Refer to the ConnectLiveRanges and ResolveControlFlow |
2834 // phases. Normally, when we spill at definition, we do not insert a | 2878 // phases. Normally, when we spill at definition, we do not insert a |
2835 // connecting move when a successor child range is spilled - because the | 2879 // connecting move when a successor child range is spilled - because the |
2836 // spilled range picks up its value from the slot which was assigned at | 2880 // spilled range picks up its value from the slot which was assigned at |
2837 // definition. For ranges that are determined to spill only in deferred | 2881 // definition. For ranges that are determined to spill only in deferred |
2838 // blocks, we let ConnectLiveRanges and ResolveControlFlow insert such | 2882 // blocks, we let ConnectLiveRanges and ResolveControlFlow insert such |
2839 // moves between ranges. Because of how the ranges are split around | 2883 // moves between ranges. Because of how the ranges are split around |
2840 // deferred blocks, this amounts to spilling and filling inside such | 2884 // deferred blocks, this amounts to spilling and filling inside such |
2841 // blocks. | 2885 // blocks. |
2842 if (!range->TryCommitSpillInDeferredBlock(data()->code(), | 2886 if (!top_range->TryCommitSpillInDeferredBlock(data()->code(), |
2843 spill_operand)) { | 2887 spill_operand)) { |
2844 // Spill at definition if the range isn't spilled only in deferred | 2888 // Spill at definition if the range isn't spilled only in deferred |
2845 // blocks. | 2889 // blocks. |
2846 range->CommitSpillsAtDefinition( | 2890 top_range->CommitSpillsAtDefinition( |
2847 data()->code(), spill_operand, | 2891 data()->code(), spill_operand, |
2848 range->has_slot_use() || range->spilled()); | 2892 top_range->has_slot_use() || top_range->spilled()); |
2849 } | 2893 } |
2850 } | 2894 } |
2851 } | 2895 } |
2852 } | 2896 } |
2853 | 2897 |
2854 | 2898 |
2855 ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data) | 2899 ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data) |
2856 : data_(data) {} | 2900 : data_(data) {} |
2857 | 2901 |
2858 | 2902 |
(...skipping 12 matching lines...) Expand all Loading... |
2871 // Map all delayed references. | 2915 // Map all delayed references. |
2872 for (auto& delayed_reference : data()->delayed_references()) { | 2916 for (auto& delayed_reference : data()->delayed_references()) { |
2873 delayed_reference.map->RecordReference( | 2917 delayed_reference.map->RecordReference( |
2874 AllocatedOperand::cast(*delayed_reference.operand)); | 2918 AllocatedOperand::cast(*delayed_reference.operand)); |
2875 } | 2919 } |
2876 // Iterate over all safe point positions and record a pointer | 2920 // Iterate over all safe point positions and record a pointer |
2877 // for all spilled live ranges at this point. | 2921 // for all spilled live ranges at this point. |
2878 int last_range_start = 0; | 2922 int last_range_start = 0; |
2879 auto reference_maps = data()->code()->reference_maps(); | 2923 auto reference_maps = data()->code()->reference_maps(); |
2880 ReferenceMapDeque::const_iterator first_it = reference_maps->begin(); | 2924 ReferenceMapDeque::const_iterator first_it = reference_maps->begin(); |
2881 for (LiveRange* range : data()->live_ranges()) { | 2925 for (TopLevelLiveRange* range : data()->live_ranges()) { |
2882 if (range == nullptr) continue; | 2926 if (range == nullptr) continue; |
2883 // Iterate over the first parts of multi-part live ranges. | |
2884 if (range->IsChild()) continue; | |
2885 // Skip non-reference values. | 2927 // Skip non-reference values. |
2886 if (!data()->IsReference(range->id())) continue; | 2928 if (!data()->IsReference(range)) continue; |
2887 // Skip empty live ranges. | 2929 // Skip empty live ranges. |
2888 if (range->IsEmpty()) continue; | 2930 if (range->IsEmpty()) continue; |
2889 | 2931 |
2890 // Find the extent of the range and its children. | 2932 // Find the extent of the range and its children. |
2891 int start = range->Start().ToInstructionIndex(); | 2933 int start = range->Start().ToInstructionIndex(); |
2892 int end = 0; | 2934 int end = 0; |
2893 for (auto cur = range; cur != nullptr; cur = cur->next()) { | 2935 for (LiveRange* cur = range; cur != nullptr; cur = cur->next()) { |
2894 auto this_end = cur->End(); | 2936 auto this_end = cur->End(); |
2895 if (this_end.ToInstructionIndex() > end) | 2937 if (this_end.ToInstructionIndex() > end) |
2896 end = this_end.ToInstructionIndex(); | 2938 end = this_end.ToInstructionIndex(); |
2897 DCHECK(cur->Start().ToInstructionIndex() >= start); | 2939 DCHECK(cur->Start().ToInstructionIndex() >= start); |
2898 } | 2940 } |
2899 | 2941 |
2900 // Most of the ranges are in order, but not all. Keep an eye on when they | 2942 // Most of the ranges are in order, but not all. Keep an eye on when they |
2901 // step backwards and reset the first_it so we don't miss any safe points. | 2943 // step backwards and reset the first_it so we don't miss any safe points. |
2902 if (start < last_range_start) first_it = reference_maps->begin(); | 2944 if (start < last_range_start) first_it = reference_maps->begin(); |
2903 last_range_start = start; | 2945 last_range_start = start; |
(...skipping 24 matching lines...) Expand all Loading... |
2928 auto map = *it; | 2970 auto map = *it; |
2929 int safe_point = map->instruction_position(); | 2971 int safe_point = map->instruction_position(); |
2930 | 2972 |
2931 // The safe points are sorted so we can stop searching here. | 2973 // The safe points are sorted so we can stop searching here. |
2932 if (safe_point - 1 > end) break; | 2974 if (safe_point - 1 > end) break; |
2933 | 2975 |
2934 // Advance to the next active range that covers the current | 2976 // Advance to the next active range that covers the current |
2935 // safe point position. | 2977 // safe point position. |
2936 auto safe_point_pos = | 2978 auto safe_point_pos = |
2937 LifetimePosition::InstructionFromInstructionIndex(safe_point); | 2979 LifetimePosition::InstructionFromInstructionIndex(safe_point); |
2938 auto cur = range; | 2980 LiveRange* cur = range; |
2939 while (cur != nullptr && !cur->Covers(safe_point_pos)) { | 2981 while (cur != nullptr && !cur->Covers(safe_point_pos)) { |
2940 cur = cur->next(); | 2982 cur = cur->next(); |
2941 } | 2983 } |
2942 if (cur == nullptr) continue; | 2984 if (cur == nullptr) continue; |
2943 | 2985 |
2944 // Check if the live range is spilled and the safe point is after | 2986 // Check if the live range is spilled and the safe point is after |
2945 // the spill position. | 2987 // the spill position. |
2946 int spill_index = range->IsSpilledOnlyInDeferredBlocks() | 2988 int spill_index = range->IsSpilledOnlyInDeferredBlocks() |
2947 ? cur->Start().ToInstructionIndex() | 2989 ? cur->Start().ToInstructionIndex() |
2948 : range->spill_start_index(); | 2990 : range->spill_start_index(); |
2949 | 2991 |
2950 if (!spill_operand.IsInvalid() && safe_point >= spill_index) { | 2992 if (!spill_operand.IsInvalid() && safe_point >= spill_index) { |
2951 TRACE("Pointer for range %d (spilled at %d) at safe point %d\n", | 2993 TRACE("Pointer for range %d (spilled at %d) at safe point %d\n", |
2952 range->id(), spill_index, safe_point); | 2994 range->vreg(), spill_index, safe_point); |
2953 map->RecordReference(AllocatedOperand::cast(spill_operand)); | 2995 map->RecordReference(AllocatedOperand::cast(spill_operand)); |
2954 } | 2996 } |
2955 | 2997 |
2956 if (!cur->spilled()) { | 2998 if (!cur->spilled()) { |
2957 TRACE( | 2999 TRACE( |
2958 "Pointer in register for range %d (start at %d) " | 3000 "Pointer in register for range %d:%d (start at %d) " |
2959 "at safe point %d\n", | 3001 "at safe point %d\n", |
2960 cur->id(), cur->Start().value(), safe_point); | 3002 range->vreg(), cur->relative_id(), cur->Start().value(), |
| 3003 safe_point); |
2961 auto operand = cur->GetAssignedOperand(); | 3004 auto operand = cur->GetAssignedOperand(); |
2962 DCHECK(!operand.IsStackSlot()); | 3005 DCHECK(!operand.IsStackSlot()); |
2963 DCHECK_EQ(kRepTagged, AllocatedOperand::cast(operand).machine_type()); | 3006 DCHECK_EQ(kRepTagged, AllocatedOperand::cast(operand).machine_type()); |
2964 map->RecordReference(AllocatedOperand::cast(operand)); | 3007 map->RecordReference(AllocatedOperand::cast(operand)); |
2965 } | 3008 } |
2966 } | 3009 } |
2967 } | 3010 } |
2968 } | 3011 } |
2969 | 3012 |
2970 | 3013 |
(...skipping 206 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3177 ->HasReferenceMap()); | 3220 ->HasReferenceMap()); |
3178 gap_index = pred->last_instruction_index(); | 3221 gap_index = pred->last_instruction_index(); |
3179 position = Instruction::END; | 3222 position = Instruction::END; |
3180 } | 3223 } |
3181 data()->AddGapMove(gap_index, position, pred_op, cur_op); | 3224 data()->AddGapMove(gap_index, position, pred_op, cur_op); |
3182 } | 3225 } |
3183 | 3226 |
3184 | 3227 |
3185 void LiveRangeConnector::ConnectRanges(Zone* local_zone) { | 3228 void LiveRangeConnector::ConnectRanges(Zone* local_zone) { |
3186 DelayedInsertionMap delayed_insertion_map(local_zone); | 3229 DelayedInsertionMap delayed_insertion_map(local_zone); |
3187 for (auto first_range : data()->live_ranges()) { | 3230 for (TopLevelLiveRange* top_range : data()->live_ranges()) { |
3188 if (first_range == nullptr || first_range->IsChild()) continue; | 3231 if (top_range == nullptr) continue; |
3189 bool connect_spilled = first_range->IsSpilledOnlyInDeferredBlocks(); | 3232 bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks(); |
3190 for (auto second_range = first_range->next(); second_range != nullptr; | 3233 LiveRange* first_range = top_range; |
| 3234 for (LiveRange *second_range = first_range->next(); second_range != nullptr; |
3191 first_range = second_range, second_range = second_range->next()) { | 3235 first_range = second_range, second_range = second_range->next()) { |
3192 auto pos = second_range->Start(); | 3236 auto pos = second_range->Start(); |
3193 // Add gap move if the two live ranges touch and there is no block | 3237 // Add gap move if the two live ranges touch and there is no block |
3194 // boundary. | 3238 // boundary. |
3195 if (!connect_spilled && second_range->spilled()) continue; | 3239 if (!connect_spilled && second_range->spilled()) continue; |
3196 if (first_range->End() != pos) continue; | 3240 if (first_range->End() != pos) continue; |
3197 if (data()->IsBlockBoundary(pos) && | 3241 if (data()->IsBlockBoundary(pos) && |
3198 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) { | 3242 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) { |
3199 continue; | 3243 continue; |
3200 } | 3244 } |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3252 auto eliminate = moves->PrepareInsertAfter(move); | 3296 auto eliminate = moves->PrepareInsertAfter(move); |
3253 to_insert.push_back(move); | 3297 to_insert.push_back(move); |
3254 if (eliminate != nullptr) to_eliminate.push_back(eliminate); | 3298 if (eliminate != nullptr) to_eliminate.push_back(eliminate); |
3255 } | 3299 } |
3256 } | 3300 } |
3257 | 3301 |
3258 | 3302 |
3259 } // namespace compiler | 3303 } // namespace compiler |
3260 } // namespace internal | 3304 } // namespace internal |
3261 } // namespace v8 | 3305 } // namespace v8 |
OLD | NEW |