OLD | NEW |
| (Empty) |
1 // Copyright 2015 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "src/compiler/preprocess-live-ranges.h" | |
6 #include "src/compiler/register-allocator.h" | |
7 | |
8 namespace v8 { | |
9 namespace internal { | |
10 namespace compiler { | |
11 | |
12 | |
13 #define TRACE(...) \ | |
14 do { \ | |
15 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ | |
16 } while (false) | |
17 | |
18 | |
19 namespace { | |
20 | |
21 LiveRange* Split(LiveRange* range, RegisterAllocationData* data, | |
22 LifetimePosition pos) { | |
23 DCHECK(range->Start() < pos && pos < range->End()); | |
24 DCHECK(pos.IsStart() || pos.IsGapPosition() || | |
25 (data->code() | |
26 ->GetInstructionBlock(pos.ToInstructionIndex()) | |
27 ->last_instruction_index() != pos.ToInstructionIndex())); | |
28 LiveRange* result = data->NewChildRangeFor(range); | |
29 range->SplitAt(pos, result, data->allocation_zone()); | |
30 TRACE("Split range %d(v%d) @%d => %d.\n", range->id(), | |
31 range->TopLevel()->id(), pos.ToInstructionIndex(), result->id()); | |
32 return result; | |
33 } | |
34 | |
35 | |
36 LifetimePosition GetSplitPositionForInstruction(const LiveRange* range, | |
37 int instruction_index) { | |
38 LifetimePosition ret = LifetimePosition::Invalid(); | |
39 | |
40 ret = LifetimePosition::GapFromInstructionIndex(instruction_index); | |
41 if (range->Start() >= ret || ret >= range->End()) { | |
42 return LifetimePosition::Invalid(); | |
43 } | |
44 return ret; | |
45 } | |
46 | |
47 | |
48 LiveRange* SplitRangeAfterBlock(LiveRange* range, RegisterAllocationData* data, | |
49 const InstructionBlock* block) { | |
50 const InstructionSequence* code = data->code(); | |
51 int last_index = block->last_instruction_index(); | |
52 int outside_index = static_cast<int>(code->instructions().size()); | |
53 bool has_handler = false; | |
54 for (auto successor_id : block->successors()) { | |
55 const InstructionBlock* successor = code->InstructionBlockAt(successor_id); | |
56 if (successor->IsHandler()) { | |
57 has_handler = true; | |
58 } | |
59 outside_index = Min(outside_index, successor->first_instruction_index()); | |
60 } | |
61 int split_at = has_handler ? outside_index : last_index; | |
62 LifetimePosition after_block = | |
63 GetSplitPositionForInstruction(range, split_at); | |
64 | |
65 if (after_block.IsValid()) { | |
66 return Split(range, data, after_block); | |
67 } | |
68 | |
69 return range; | |
70 } | |
71 | |
72 | |
73 int GetFirstInstructionIndex(const UseInterval* interval) { | |
74 int ret = interval->start().ToInstructionIndex(); | |
75 if (!interval->start().IsGapPosition() && !interval->start().IsStart()) { | |
76 ++ret; | |
77 } | |
78 return ret; | |
79 } | |
80 | |
81 | |
82 bool DoesSubsequenceClobber(const InstructionSequence* code, int start, | |
83 int end) { | |
84 for (int i = start; i <= end; ++i) { | |
85 if (code->InstructionAt(i)->IsCall()) return true; | |
86 } | |
87 return false; | |
88 } | |
89 | |
90 | |
91 void SplitRangeAtDeferredBlocksWithCalls(LiveRange* range, | |
92 RegisterAllocationData* data) { | |
93 DCHECK(!range->IsFixed()); | |
94 DCHECK(!range->spilled()); | |
95 if (range->TopLevel()->HasSpillOperand()) { | |
96 TRACE( | |
97 "Skipping deferred block analysis for live range %d because it has a " | |
98 "spill operand.\n", | |
99 range->TopLevel()->id()); | |
100 return; | |
101 } | |
102 | |
103 const InstructionSequence* code = data->code(); | |
104 LiveRange* current_subrange = range; | |
105 | |
106 UseInterval* interval = current_subrange->first_interval(); | |
107 | |
108 while (interval != nullptr) { | |
109 int first_index = GetFirstInstructionIndex(interval); | |
110 int last_index = interval->end().ToInstructionIndex(); | |
111 | |
112 if (last_index > code->LastInstructionIndex()) { | |
113 last_index = code->LastInstructionIndex(); | |
114 } | |
115 | |
116 interval = interval->next(); | |
117 | |
118 for (int index = first_index; index <= last_index;) { | |
119 const InstructionBlock* block = code->GetInstructionBlock(index); | |
120 int last_block_index = static_cast<int>(block->last_instruction_index()); | |
121 int last_covered_index = Min(last_index, last_block_index); | |
122 int working_index = index; | |
123 index = block->last_instruction_index() + 1; | |
124 | |
125 if (!block->IsDeferred() || | |
126 !DoesSubsequenceClobber(code, working_index, last_covered_index)) { | |
127 continue; | |
128 } | |
129 | |
130 TRACE("Deferred block B%d clobbers range %d(v%d).\n", | |
131 block->rpo_number().ToInt(), current_subrange->id(), | |
132 current_subrange->TopLevel()->id()); | |
133 LifetimePosition block_start = | |
134 GetSplitPositionForInstruction(current_subrange, working_index); | |
135 LiveRange* block_and_after = nullptr; | |
136 if (block_start.IsValid()) { | |
137 block_and_after = Split(current_subrange, data, block_start); | |
138 } else { | |
139 block_and_after = current_subrange; | |
140 } | |
141 LiveRange* next = SplitRangeAfterBlock(block_and_after, data, block); | |
142 if (next != current_subrange) interval = next->first_interval(); | |
143 current_subrange = next; | |
144 break; | |
145 } | |
146 } | |
147 } | |
148 } | |
149 | |
150 | |
151 void PreprocessLiveRanges::PreprocessRanges() { | |
152 SplitRangesAroundDeferredBlocks(); | |
153 } | |
154 | |
155 | |
156 void PreprocessLiveRanges::SplitRangesAroundDeferredBlocks() { | |
157 size_t live_range_count = data()->live_ranges().size(); | |
158 for (size_t i = 0; i < live_range_count; i++) { | |
159 LiveRange* range = data()->live_ranges()[i]; | |
160 if (range != nullptr && !range->IsEmpty() && !range->spilled() && | |
161 !range->IsFixed() && !range->IsChild()) { | |
162 SplitRangeAtDeferredBlocksWithCalls(range, data()); | |
163 } | |
164 } | |
165 } | |
166 | |
167 } // namespace compiler | |
168 } // namespace internal | |
169 } // namespace v8 | |
OLD | NEW |