OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/greedy-allocator.h" | 5 #include "src/compiler/greedy-allocator.h" |
6 #include "src/compiler/register-allocator.h" | 6 #include "src/compiler/register-allocator.h" |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 namespace compiler { | 10 namespace compiler { |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
56 LifetimePosition ret = LifetimePosition::Invalid(); | 56 LifetimePosition ret = LifetimePosition::Invalid(); |
57 | 57 |
58 ret = LifetimePosition::GapFromInstructionIndex(instruction_index); | 58 ret = LifetimePosition::GapFromInstructionIndex(instruction_index); |
59 if (range->Start() >= ret || ret >= range->End()) { | 59 if (range->Start() >= ret || ret >= range->End()) { |
60 return LifetimePosition::Invalid(); | 60 return LifetimePosition::Invalid(); |
61 } | 61 } |
62 return ret; | 62 return ret; |
63 } | 63 } |
64 | 64 |
65 | 65 |
66 int GetFirstGapIndex(const UseInterval* interval) { | |
67 LifetimePosition start = interval->start(); | |
68 int ret = start.ToInstructionIndex(); | |
69 return ret; | |
70 } | |
71 | |
72 | |
73 int GetLastGapIndex(const UseInterval* interval) { | |
74 LifetimePosition end = interval->end(); | |
75 return end.ToInstructionIndex(); | |
76 } | |
77 | |
78 | |
79 // Basic heuristic for advancing the algorithm, if any other splitting heuristic | |
80 // failed. | |
81 LifetimePosition GetLastResortSplitPosition(const LiveRange* range, | |
82 const InstructionSequence* code) { | |
83 if (range->first_interval()->next() != nullptr) { | |
84 return range->first_interval()->next()->start(); | |
85 } | |
86 | |
87 UseInterval* interval = range->first_interval(); | |
88 int first = GetFirstGapIndex(interval); | |
89 int last = GetLastGapIndex(interval); | |
90 if (first == last) return LifetimePosition::Invalid(); | |
91 | |
92 // TODO(mtrofin:) determine why we can't just split somewhere arbitrary | |
93 // within the range, e.g. it's middle. | |
94 for (UsePosition* pos = range->first_pos(); pos != nullptr; | |
95 pos = pos->next()) { | |
96 if (pos->type() != UsePositionType::kRequiresRegister) continue; | |
97 LifetimePosition before = | |
98 GetSplitPositionForInstruction(range, pos->pos().ToInstructionIndex()); | |
99 if (before.IsValid()) return before; | |
100 LifetimePosition after = GetSplitPositionForInstruction( | |
101 range, pos->pos().ToInstructionIndex() + 1); | |
102 if (after.IsValid()) return after; | |
103 } | |
104 return LifetimePosition::Invalid(); | |
105 } | |
106 | |
107 | |
108 bool IsProgressPossible(const LiveRange* range, | |
109 const InstructionSequence* code) { | |
110 return range->CanBeSpilled(range->Start()) || | |
111 GetLastResortSplitPosition(range, code).IsValid(); | |
112 } | |
113 } // namespace | 66 } // namespace |
114 | 67 |
115 | 68 |
116 AllocationCandidate AllocationScheduler::GetNext() { | 69 AllocationCandidate AllocationScheduler::GetNext() { |
117 DCHECK(!queue_.empty()); | 70 DCHECK(!queue_.empty()); |
118 AllocationCandidate ret = queue_.top(); | 71 AllocationCandidate ret = queue_.top(); |
119 queue_.pop(); | 72 queue_.pop(); |
120 return ret; | 73 return ret; |
121 } | 74 } |
122 | 75 |
(...skipping 426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
549 void GreedyAllocator::EnsureValidRangeWeight(LiveRange* range) { | 502 void GreedyAllocator::EnsureValidRangeWeight(LiveRange* range) { |
550 // The live range weight will be invalidated when ranges are created or split. | 503 // The live range weight will be invalidated when ranges are created or split. |
551 // Otherwise, it is consistently updated when the range is allocated or | 504 // Otherwise, it is consistently updated when the range is allocated or |
552 // unallocated. | 505 // unallocated. |
553 if (range->weight() != LiveRange::kInvalidWeight) return; | 506 if (range->weight() != LiveRange::kInvalidWeight) return; |
554 | 507 |
555 if (range->TopLevel()->IsFixed()) { | 508 if (range->TopLevel()->IsFixed()) { |
556 range->set_weight(LiveRange::kMaxWeight); | 509 range->set_weight(LiveRange::kMaxWeight); |
557 return; | 510 return; |
558 } | 511 } |
559 if (!IsProgressPossible(range, code())) { | 512 if (!IsProgressPossible(range)) { |
560 range->set_weight(LiveRange::kMaxWeight); | 513 range->set_weight(LiveRange::kMaxWeight); |
561 return; | 514 return; |
562 } | 515 } |
563 | 516 |
564 float use_count = 0.0; | 517 float use_count = 0.0; |
565 for (auto pos = range->first_pos(); pos != nullptr; pos = pos->next()) { | 518 for (auto pos = range->first_pos(); pos != nullptr; pos = pos->next()) { |
566 ++use_count; | 519 ++use_count; |
567 } | 520 } |
568 range->set_weight(use_count / static_cast<float>(range->GetSize())); | 521 range->set_weight(use_count / static_cast<float>(range->GetSize())); |
569 } | 522 } |
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
675 pos = GetSplitPositionForInstruction(range, pos.ToInstructionIndex()); | 628 pos = GetSplitPositionForInstruction(range, pos.ToInstructionIndex()); |
676 return pos; | 629 return pos; |
677 } | 630 } |
678 | 631 |
679 | 632 |
680 void GreedyAllocator::SplitOrSpillBlockedRange(LiveRange* range) { | 633 void GreedyAllocator::SplitOrSpillBlockedRange(LiveRange* range) { |
681 if (TrySplitAroundCalls(range)) return; | 634 if (TrySplitAroundCalls(range)) return; |
682 | 635 |
683 LifetimePosition pos = FindSplitPositionBeforeLoops(range); | 636 LifetimePosition pos = FindSplitPositionBeforeLoops(range); |
684 | 637 |
685 if (!pos.IsValid()) pos = GetLastResortSplitPosition(range, code()); | 638 if (!pos.IsValid()) pos = GetLastResortSplitPosition(range); |
686 if (pos.IsValid()) { | 639 if (pos.IsValid()) { |
687 LiveRange* tail = Split(range, data(), pos); | 640 LiveRange* tail = Split(range, data(), pos); |
688 DCHECK(tail != range); | 641 DCHECK(tail != range); |
689 scheduler().Schedule(tail); | 642 scheduler().Schedule(tail); |
690 scheduler().Schedule(range); | 643 scheduler().Schedule(range); |
691 return; | 644 return; |
692 } | 645 } |
693 SpillRangeAsLastResort(range); | 646 SpillRangeAsLastResort(range); |
694 } | 647 } |
695 | 648 |
696 | 649 |
| 650 // Basic heuristic for advancing the algorithm, if any other splitting heuristic |
| 651 // failed. |
| 652 LifetimePosition GreedyAllocator::GetLastResortSplitPosition( |
| 653 const LiveRange* range) { |
| 654 LifetimePosition previous = range->Start(); |
| 655 for (UsePosition *pos = range->NextRegisterPosition(previous); pos != nullptr; |
| 656 previous = previous.NextFullStart(), |
| 657 pos = range->NextRegisterPosition(previous)) { |
| 658 LifetimePosition optimal = FindOptimalSplitPos(previous, pos->pos()); |
| 659 LifetimePosition before = |
| 660 GetSplitPositionForInstruction(range, optimal.ToInstructionIndex()); |
| 661 if (before.IsValid()) return before; |
| 662 LifetimePosition after = GetSplitPositionForInstruction( |
| 663 range, pos->pos().ToInstructionIndex() + 1); |
| 664 if (after.IsValid()) return after; |
| 665 } |
| 666 return LifetimePosition::Invalid(); |
| 667 } |
| 668 |
| 669 |
| 670 bool GreedyAllocator::IsProgressPossible(const LiveRange* range) { |
| 671 return range->CanBeSpilled(range->Start()) || |
| 672 GetLastResortSplitPosition(range).IsValid(); |
| 673 } |
| 674 |
697 } // namespace compiler | 675 } // namespace compiler |
698 } // namespace internal | 676 } // namespace internal |
699 } // namespace v8 | 677 } // namespace v8 |
OLD | NEW |