Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(221)

Side by Side Diff: src/compiler/greedy-allocator.cc

Issue 1242123006: [turbofan] Deferred block spilling heuristic - first step. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Tight splitting Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | src/compiler/register-allocator.h » ('j') | src/compiler/register-allocator.cc » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/greedy-allocator.h" 5 #include "src/compiler/greedy-allocator.h"
6 #include "src/compiler/register-allocator.h" 6 #include "src/compiler/register-allocator.h"
7 7
8 namespace v8 { 8 namespace v8 {
9 namespace internal { 9 namespace internal {
10 namespace compiler { 10 namespace compiler {
(...skipping 22 matching lines...) Expand all
33 DCHECK(pos.IsStart() || pos.IsGapPosition() || 33 DCHECK(pos.IsStart() || pos.IsGapPosition() ||
34 (data->code() 34 (data->code()
35 ->GetInstructionBlock(pos.ToInstructionIndex()) 35 ->GetInstructionBlock(pos.ToInstructionIndex())
36 ->last_instruction_index() != pos.ToInstructionIndex())); 36 ->last_instruction_index() != pos.ToInstructionIndex()));
37 LiveRange* result = data->NewChildRangeFor(range); 37 LiveRange* result = data->NewChildRangeFor(range);
38 range->SplitAt(pos, result, data->allocation_zone()); 38 range->SplitAt(pos, result, data->allocation_zone());
39 return result; 39 return result;
40 } 40 }
41 41
42 42
43 void SpillToStackSlot(LiveRange* range, RegisterAllocationData* data) {
44 DCHECK(!range->spilled());
45 TRACE("Spilling live range %d\n", range->id());
46 auto first = range->TopLevel();
47 if (first->HasNoSpillType()) {
48 data->AssignSpillRangeToLiveRange(first);
49 }
50 range->Spill();
51 }
52
53
43 // TODO(mtrofin): explain why splitting in gap START is always OK. 54 // TODO(mtrofin): explain why splitting in gap START is always OK.
44 LifetimePosition GetSplitPositionForInstruction(const LiveRange* range, 55 LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
45 const InstructionSequence* code, 56 const InstructionSequence* code,
46 int instruction_index) { 57 int instruction_index) {
47 LifetimePosition ret = LifetimePosition::Invalid(); 58 LifetimePosition ret = LifetimePosition::Invalid();
48 59
49 ret = LifetimePosition::GapFromInstructionIndex(instruction_index); 60 ret = LifetimePosition::GapFromInstructionIndex(instruction_index);
50 if (range->Start() >= ret || ret >= range->End()) { 61 if (range->Start() >= ret || ret >= range->End()) {
51 return LifetimePosition::Invalid(); 62 return LifetimePosition::Invalid();
52 } 63 }
53 return ret; 64 return ret;
54 } 65 }
55 66
56 67
57 int GetFirstGapIndex(const UseInterval* interval) { 68 int GetFirstGapIndex(const UseInterval* interval) {
58 LifetimePosition start = interval->start(); 69 LifetimePosition start = interval->start();
59 int ret = start.ToInstructionIndex(); 70 int ret = start.ToInstructionIndex();
60 return ret; 71 return ret;
61 } 72 }
62 73
63 74
64 int GetLastGapIndex(const UseInterval* interval) { 75 int GetLastGapIndex(const UseInterval* interval) {
65 LifetimePosition end = interval->end(); 76 LifetimePosition end = interval->end();
66 return end.ToInstructionIndex(); 77 return end.ToInstructionIndex();
67 } 78 }
68 79
69 80
81 int GetFirstInstructionIndex(const UseInterval* interval) {
82 int ret = interval->start().ToInstructionIndex();
83 if (!interval->start().IsGapPosition() && !interval->start().IsStart()) {
84 ++ret;
85 }
86 return ret;
87 }
88
89
90 int GetLastInstructionIndex(const UseInterval* interval) {
91 int ret = interval->end().ToInstructionIndex();
92 if (interval->end().IsGapPosition() || interval->end().IsStart()) {
93 // Meaning, this is either a gap or instruction start
94 --ret;
95 }
96 return ret;
97 }
98
99
100 LifetimePosition GetFirstSplitPosFollowingCall(const LiveRange* range,
101 const InstructionSequence* code,
102 int call_pos) {
103 int next_pos = call_pos + 1;
104 const InstructionBlock* block = code->GetInstructionBlock(call_pos);
105 if (next_pos == block->last_instruction_index()) {
106 // This may be a throwing call. Look through the successors for a
107 // handler. If there is one, then we pick the next position as the
108 // closest block start out of the successors:
109 // - if the next block is the handler, then splitting at the first position
110 // ensures a "fill" move instruction. The normal control flow block will
111 // evaluate false for LiveRangeConnector::CanEagerlyResolveControlFlow when
112 // we do the LiveRangeConnector::ResolveControlFlow phase, so that will
113 // ensure a fill move for it, too.
114 // - if the next block is the normal flow, then the same argument as above
115 // holds, but reversing the block roles.
116 // If there is no handler, just use call_pos + 1.
117 int outside_pos = static_cast<int>(code->instructions().size());
118 int alternative_next_pos = outside_pos;
119 bool found_handler = false;
120
121 for (RpoNumber successor_id : block->successors()) {
122 const InstructionBlock* successor =
123 code->InstructionBlockAt(successor_id);
124 if (successor->IsHandler()) {
125 DCHECK(!found_handler);
126 found_handler = true;
127 }
128 int first_succ_instruction = successor->first_instruction_index();
129 alternative_next_pos = Min(alternative_next_pos, first_succ_instruction);
130 }
131 if (found_handler) {
132 DCHECK_NE(outside_pos, alternative_next_pos);
133 next_pos = alternative_next_pos;
134 }
135 }
136 return GetSplitPositionForInstruction(range, code, next_pos);
137 }
138
139
140 // Split before a call that requires parameters on stack, and spill until
141 // the first position requiring the parameter back in a register.
142 bool TrySplittingAroundCalls(LiveRange* range, RegisterAllocationData* data,
143 AllocationScheduler* scheduler) {
144 TRACE("Attempting to split range %d around calls.\n", range->id());
145 const InstructionSequence* code = data->code();
146
147 for (auto interval = range->first_interval(); interval != nullptr;
148 interval = interval->next()) {
149 int first_index = GetFirstInstructionIndex(interval);
150 int last_index = GetLastInstructionIndex(interval);
151 for (int index = first_index; index <= last_index; ++index) {
152 auto instr = code->InstructionAt(index);
153 if (!instr->IsCall()) continue;
154 LifetimePosition before_call =
155 GetSplitPositionForInstruction(range, code, index);
156 if (!before_call.IsValid()) continue;
157 TRACE("Found call at instruction %d for range %d.\n", index, range->id());
158 LiveRange* call_part = Split(range, data, before_call);
159 scheduler->Schedule(range);
160
161 LifetimePosition after_call =
162 GetFirstSplitPosFollowingCall(call_part, code, index);
163 if (after_call.IsValid()) {
164 TRACE("Splitting after call at instruction %d for range %d.\n",
165 after_call.ToInstructionIndex(), call_part->id());
166 LiveRange* to_refill = Split(call_part, data, after_call);
167 scheduler->Schedule(to_refill);
168 }
169 SpillToStackSlot(call_part, data);
170 return true;
171 }
172 }
173 return false;
174 }
175
176
70 // Basic heuristic for advancing the algorithm, if any other splitting heuristic 177 // Basic heuristic for advancing the algorithm, if any other splitting heuristic
71 // failed. 178 // failed.
72 LifetimePosition GetLastResortSplitPosition(const LiveRange* range, 179 LifetimePosition GetLastResortSplitPosition(const LiveRange* range,
73 const InstructionSequence* code) { 180 const InstructionSequence* code) {
74 if (range->first_interval()->next() != nullptr) { 181 if (range->first_interval()->next() != nullptr) {
75 return range->first_interval()->next()->start(); 182 return range->first_interval()->next()->start();
76 } 183 }
77 184
78 UseInterval* interval = range->first_interval(); 185 UseInterval* interval = range->first_interval();
79 int first = GetFirstGapIndex(interval); 186 int first = GetFirstGapIndex(interval);
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
237 LifetimePosition start = range->Start(); 344 LifetimePosition start = range->Start();
238 TRACE("Live range %d is defined by a spill operand.\n", range->id()); 345 TRACE("Live range %d is defined by a spill operand.\n", range->id());
239 auto next_pos = start; 346 auto next_pos = start;
240 if (next_pos.IsGapPosition()) { 347 if (next_pos.IsGapPosition()) {
241 next_pos = next_pos.NextStart(); 348 next_pos = next_pos.NextStart();
242 } 349 }
243 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); 350 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
244 // If the range already has a spill operand and it doesn't need a 351 // If the range already has a spill operand and it doesn't need a
245 // register immediately, split it and spill the first part of the range. 352 // register immediately, split it and spill the first part of the range.
246 if (pos == nullptr) { 353 if (pos == nullptr) {
247 Spill(range); 354 SpillToStackSlot(range, data());
248 } else if (pos->pos() > range->Start().NextStart()) { 355 } else if (pos->pos() > range->Start().NextStart()) {
249 // Do not spill live range eagerly if use position that can benefit from 356 // Do not spill live range eagerly if use position that can benefit from
250 // the register is too close to the start of live range. 357 // the register is too close to the start of live range.
251 auto split_pos = pos->pos(); 358 auto split_pos = GetSplitPositionForInstruction(
252 if (data()->IsBlockBoundary(split_pos.Start())) { 359 range, code(), pos->pos().ToInstructionIndex());
253 split_pos = split_pos.Start(); 360 if (split_pos.IsValid()) {
254 } else { 361 Split(range, data(), split_pos);
255 split_pos = split_pos.PrevStart().End(); 362 SpillToStackSlot(range, data());
256 } 363 }
257 Split(range, data(), split_pos);
258 Spill(range);
259 } 364 }
260 } 365 }
261 } 366 }
262 367
263 368
264 void GreedyAllocator::AllocateRegisters() { 369 void GreedyAllocator::AllocateRegisters() {
265 CHECK(scheduler().empty()); 370 CHECK(scheduler().empty());
266 CHECK(allocations_.empty()); 371 CHECK(allocations_.empty());
267 372
268 TRACE("Begin allocating function %s with the Greedy Allocator\n", 373 TRACE("Begin allocating function %s with the Greedy Allocator\n",
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
319 } 424 }
320 range->set_weight(use_count / static_cast<float>(range->GetSize())); 425 range->set_weight(use_count / static_cast<float>(range->GetSize()));
321 } 426 }
322 427
323 428
324 void GreedyAllocator::SpillRangeAsLastResort(LiveRange* range) { 429 void GreedyAllocator::SpillRangeAsLastResort(LiveRange* range) {
325 LifetimePosition start = range->Start(); 430 LifetimePosition start = range->Start();
326 CHECK(range->CanBeSpilled(start)); 431 CHECK(range->CanBeSpilled(start));
327 432
328 DCHECK(range->NextRegisterPosition(start) == nullptr); 433 DCHECK(range->NextRegisterPosition(start) == nullptr);
329 Spill(range); 434 SpillToStackSlot(range, data());
330 } 435 }
331 436
332 437
333 void GreedyAllocator::SplitOrSpillBlockedRange(LiveRange* range) { 438 void GreedyAllocator::SplitOrSpillBlockedRange(LiveRange* range) {
439 if (TrySplittingAroundCalls(range, data(), &scheduler())) return;
440
334 // TODO(mtrofin): replace the call below with the entry point selecting 441 // TODO(mtrofin): replace the call below with the entry point selecting
335 // heuristics, once they exist, out of which GLRSP is the last one. 442 // heuristics, once they exist, out of which GLRSP is the last one.
336 auto pos = GetLastResortSplitPosition(range, code()); 443 auto pos = GetLastResortSplitPosition(range, code());
337 if (pos.IsValid()) { 444 if (pos.IsValid()) {
338 LiveRange* tail = Split(range, data(), pos); 445 LiveRange* tail = Split(range, data(), pos);
339 DCHECK(tail != range); 446 DCHECK(tail != range);
340 scheduler().Schedule(tail); 447 scheduler().Schedule(tail);
341 scheduler().Schedule(range); 448 scheduler().Schedule(range);
342 return; 449 return;
343 } 450 }
344 SpillRangeAsLastResort(range); 451 SpillRangeAsLastResort(range);
345 } 452 }
346 453
347 454
348 } // namespace compiler 455 } // namespace compiler
349 } // namespace internal 456 } // namespace internal
350 } // namespace v8 457 } // namespace v8
OLDNEW
« no previous file with comments | « no previous file | src/compiler/register-allocator.h » ('j') | src/compiler/register-allocator.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698