OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/live-range-separator.h" | 5 #include "src/compiler/live-range-separator.h" |
6 #include "src/compiler/register-allocator.h" | 6 #include "src/compiler/register-allocator.h" |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 namespace compiler { | 10 namespace compiler { |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
46 block->set_last_deferred(last); | 46 block->set_last_deferred(last); |
47 // We know last is still deferred, and that last + 1, is not (or is an | 47 // We know last is still deferred, and that last + 1, is not (or is an |
48 // invalid index). So skip over last + 1 and continue from last + 2. This | 48 // invalid index). So skip over last + 1 and continue from last + 2. This |
49 // way, we visit each block exactly once, and the total complexity of this | 49 // way, we visit each block exactly once, and the total complexity of this |
50 // function is O(n), n being jthe number of blocks. | 50 // function is O(n), n being jthe number of blocks. |
51 blk_id = last.ToInt() + 1; | 51 blk_id = last.ToInt() + 1; |
52 } | 52 } |
53 } | 53 } |
54 | 54 |
55 | 55 |
56 // If the live range has a liveness hole right between start and end, | |
57 // we don't need to splinter it. | |
58 bool IsIntervalAlreadyExcluded(const LiveRange *range, LifetimePosition start, | |
59 LifetimePosition end) { | |
60 for (UseInterval *interval = range->first_interval(); interval != nullptr; | |
61 interval = interval->next()) { | |
62 if (interval->start() <= start && start < interval->end()) return false; | |
63 if (interval->start() < end && end <= interval->end()) return false; | |
64 } | |
65 return true; | |
66 } | |
67 | |
68 | |
69 void CreateSplinter(TopLevelLiveRange *range, RegisterAllocationData *data, | 56 void CreateSplinter(TopLevelLiveRange *range, RegisterAllocationData *data, |
70 LifetimePosition first_cut, LifetimePosition last_cut) { | 57 LifetimePosition first_cut, LifetimePosition last_cut) { |
71 DCHECK(!range->IsSplinter()); | 58 DCHECK(!range->IsSplinter()); |
72 // We can ignore ranges that live solely in deferred blocks. | 59 // We can ignore ranges that live solely in deferred blocks. |
73 // If a range ends right at the end of a deferred block, it is marked by | 60 // If a range ends right at the end of a deferred block, it is marked by |
74 // the range builder as ending at gap start of the next block - since the | 61 // the range builder as ending at gap start of the next block - since the |
75 // end is a position where the variable isn't live. We need to take that | 62 // end is a position where the variable isn't live. We need to take that |
76 // into consideration. | 63 // into consideration. |
77 LifetimePosition max_allowed_end = last_cut.NextFullStart(); | 64 LifetimePosition max_allowed_end = last_cut.NextFullStart(); |
78 | 65 |
79 if (first_cut <= range->Start() && max_allowed_end >= range->End()) { | 66 if (first_cut <= range->Start() && max_allowed_end >= range->End()) { |
80 return; | 67 return; |
81 } | 68 } |
82 | 69 |
83 LifetimePosition start = Max(first_cut, range->Start()); | 70 LifetimePosition start = Max(first_cut, range->Start()); |
84 LifetimePosition end = Min(last_cut, range->End()); | 71 LifetimePosition end = Min(last_cut, range->End()); |
85 // Skip ranges that have a hole where the deferred block(s) are. | |
86 if (IsIntervalAlreadyExcluded(range, start, end)) return; | |
87 | 72 |
88 if (start < end) { | 73 if (start < end) { |
89 // Ensure the original range has a spill range associated, before it gets | 74 // Ensure the original range has a spill range associated, before it gets |
90 // splintered. Splinters will point to it. This way, when attempting to | 75 // splintered. Splinters will point to it. This way, when attempting to |
91 // reuse spill slots of splinters, during allocation, we avoid clobbering | 76 // reuse spill slots of splinters, during allocation, we avoid clobbering |
92 // such slots. | 77 // such slots. |
93 if (range->MayRequireSpillRange()) { | 78 if (range->MayRequireSpillRange()) { |
94 data->CreateSpillRangeForLiveRange(range); | 79 data->CreateSpillRangeForLiveRange(range); |
95 } | 80 } |
96 TopLevelLiveRange *result = data->NextLiveRange(range->machine_type()); | 81 TopLevelLiveRange *result = data->NextLiveRange(range->machine_type()); |
(...skipping 26 matching lines...) Expand all Loading... |
123 | 108 |
124 LifetimePosition first_cut = LifetimePosition::GapFromInstructionIndex( | 109 LifetimePosition first_cut = LifetimePosition::GapFromInstructionIndex( |
125 block->first_instruction_index()); | 110 block->first_instruction_index()); |
126 | 111 |
127 LifetimePosition last_cut = LifetimePosition::GapFromInstructionIndex( | 112 LifetimePosition last_cut = LifetimePosition::GapFromInstructionIndex( |
128 static_cast<int>(code->instructions().size())); | 113 static_cast<int>(code->instructions().size())); |
129 | 114 |
130 const BitVector *in_set = in_sets[block->rpo_number().ToInt()]; | 115 const BitVector *in_set = in_sets[block->rpo_number().ToInt()]; |
131 InstructionBlock *last = code->InstructionBlockAt(last_deferred); | 116 InstructionBlock *last = code->InstructionBlockAt(last_deferred); |
132 const BitVector *out_set = LiveRangeBuilder::ComputeLiveOut(last, data); | 117 const BitVector *out_set = LiveRangeBuilder::ComputeLiveOut(last, data); |
133 last_cut = LifetimePosition::GapFromInstructionIndex( | 118 int last_index = last->last_instruction_index(); |
134 last->last_instruction_index()); | 119 if (code->InstructionAt(last_index)->opcode() == |
| 120 ArchOpcode::kArchDeoptimize) { |
| 121 ++last_index; |
| 122 } |
| 123 last_cut = LifetimePosition::GapFromInstructionIndex(last_index); |
135 | 124 |
136 BitVector ranges_to_splinter(*in_set, zone); | 125 BitVector ranges_to_splinter(*in_set, zone); |
137 ranges_to_splinter.Union(*out_set); | 126 ranges_to_splinter.Union(*out_set); |
138 BitVector::Iterator iterator(&ranges_to_splinter); | 127 BitVector::Iterator iterator(&ranges_to_splinter); |
139 | 128 |
140 while (!iterator.Done()) { | 129 while (!iterator.Done()) { |
141 int range_id = iterator.Current(); | 130 int range_id = iterator.Current(); |
142 iterator.Advance(); | 131 iterator.Advance(); |
143 | 132 |
144 TopLevelLiveRange *range = data->live_ranges()[range_id]; | 133 TopLevelLiveRange *range = data->live_ranges()[range_id]; |
(...skipping 20 matching lines...) Expand all Loading... |
165 TopLevelLiveRange *splinter_parent = range->splintered_from(); | 154 TopLevelLiveRange *splinter_parent = range->splintered_from(); |
166 | 155 |
167 splinter_parent->Merge(range, data()); | 156 splinter_parent->Merge(range, data()); |
168 } | 157 } |
169 } | 158 } |
170 | 159 |
171 | 160 |
172 } // namespace compiler | 161 } // namespace compiler |
173 } // namespace internal | 162 } // namespace internal |
174 } // namespace v8 | 163 } // namespace v8 |
OLD | NEW |