Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/greedy-allocator.h" | 5 #include "src/compiler/greedy-allocator.h" |
| 6 #include "src/compiler/register-allocator.h" | 6 #include "src/compiler/register-allocator.h" |
| 7 | 7 |
| 8 namespace v8 { | 8 namespace v8 { |
| 9 namespace internal { | 9 namespace internal { |
| 10 namespace compiler { | 10 namespace compiler { |
| 11 | 11 |
| 12 #define TRACE(...) \ | 12 #define TRACE(...) \ |
| 13 do { \ | 13 do { \ |
| 14 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ | 14 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ |
| 15 } while (false) | 15 } while (false) |
| 16 | 16 |
| 17 | |
| 18 namespace { | 17 namespace { |
| 19 | 18 |
| 20 | 19 |
| 21 void UpdateOperands(LiveRange* range, RegisterAllocationData* data) { | 20 void UpdateOperands(LiveRange* range, RegisterAllocationData* data) { |
| 22 int reg_id = range->assigned_register(); | 21 int reg_id = range->assigned_register(); |
| 23 range->SetUseHints(reg_id); | 22 range->SetUseHints(reg_id); |
| 24 if (range->is_phi()) { | 23 if (range->is_phi()) { |
| 25 data->GetPhiMapValueFor(range->id())->set_assigned_register(reg_id); | 24 data->GetPhiMapValueFor(range->id())->set_assigned_register(reg_id); |
| 26 } | 25 } |
| 27 } | 26 } |
| 28 | 27 |
| 29 | 28 |
| 30 LiveRange* Split(LiveRange* range, RegisterAllocationData* data, | 29 LiveRange* Split(LiveRange* range, RegisterAllocationData* data, |
| 31 LifetimePosition pos) { | 30 LifetimePosition pos) { |
| 32 DCHECK(range->Start() < pos && pos < range->End()); | 31 DCHECK(range->Start() < pos && pos < range->End()); |
| 33 DCHECK(pos.IsStart() || pos.IsGapPosition() || | 32 DCHECK(pos.IsStart() || pos.IsGapPosition() || |
| 34 (data->code() | 33 (data->code() |
| 35 ->GetInstructionBlock(pos.ToInstructionIndex()) | 34 ->GetInstructionBlock(pos.ToInstructionIndex()) |
| 36 ->last_instruction_index() != pos.ToInstructionIndex())); | 35 ->last_instruction_index() != pos.ToInstructionIndex())); |
| 37 LiveRange* result = data->NewChildRangeFor(range); | 36 LiveRange* result = data->NewChildRangeFor(range); |
| 38 range->SplitAt(pos, result, data->allocation_zone()); | 37 range->SplitAt(pos, result, data->allocation_zone()); |
| 39 return result; | 38 return result; |
| 40 } | 39 } |
| 41 | 40 |
| 42 | 41 |
| 42 void SpillToStackSlot(LiveRange* range, RegisterAllocationData* data) { | |
| 43 DCHECK(!range->spilled()); | |
| 44 TRACE("Spilling live range %d\n", range->id()); | |
| 45 auto first = range->TopLevel(); | |
| 46 if (first->HasNoSpillType()) { | |
| 47 data->AssignSpillRangeToLiveRange(first); | |
| 48 } | |
| 49 range->Spill(); | |
| 50 } | |
| 51 | |
| 52 | |
| 43 // TODO(mtrofin): explain why splitting in gap START is always OK. | 53 // TODO(mtrofin): explain why splitting in gap START is always OK. |
| 44 LifetimePosition GetSplitPositionForInstruction(const LiveRange* range, | 54 LifetimePosition GetSplitPositionForInstruction(const LiveRange* range, |
| 45 const InstructionSequence* code, | 55 const InstructionSequence* code, |
| 46 int instruction_index) { | 56 int instruction_index) { |
| 47 LifetimePosition ret = LifetimePosition::Invalid(); | 57 LifetimePosition ret = LifetimePosition::Invalid(); |
| 48 | 58 |
| 49 ret = LifetimePosition::GapFromInstructionIndex(instruction_index); | 59 ret = LifetimePosition::GapFromInstructionIndex(instruction_index); |
| 50 if (range->Start() >= ret || ret >= range->End()) { | 60 if (range->Start() >= ret || ret >= range->End()) { |
| 51 return LifetimePosition::Invalid(); | 61 return LifetimePosition::Invalid(); |
| 52 } | 62 } |
| 53 return ret; | 63 return ret; |
| 54 } | 64 } |
| 55 | 65 |
| 56 | 66 |
| 57 int GetFirstGapIndex(const UseInterval* interval) { | 67 int GetFirstGapIndex(const UseInterval* interval) { |
| 58 LifetimePosition start = interval->start(); | 68 LifetimePosition start = interval->start(); |
| 59 int ret = start.ToInstructionIndex(); | 69 int ret = start.ToInstructionIndex(); |
| 60 return ret; | 70 return ret; |
| 61 } | 71 } |
| 62 | 72 |
| 63 | 73 |
| 64 int GetLastGapIndex(const UseInterval* interval) { | 74 int GetLastGapIndex(const UseInterval* interval) { |
| 65 LifetimePosition end = interval->end(); | 75 LifetimePosition end = interval->end(); |
| 66 return end.ToInstructionIndex(); | 76 return end.ToInstructionIndex(); |
| 67 } | 77 } |
| 68 | 78 |
| 69 | 79 |
| 80 int GetFirstInstructionIndex(const UseInterval* interval) { | |
| 81 int ret = interval->start().ToInstructionIndex(); | |
| 82 if (!interval->start().IsGapPosition() && !interval->start().IsStart()) { | |
| 83 ++ret; | |
| 84 } | |
| 85 return ret; | |
| 86 } | |
| 87 | |
| 88 | |
| 89 int GetLastInstructionIndex(const UseInterval* interval) { | |
| 90 int ret = interval->end().ToInstructionIndex(); | |
| 91 if (interval->end().IsGapPosition() || interval->end().IsStart()) { | |
| 92 // Meaning, this is either a gap or instruction start | |
| 93 --ret; | |
| 94 } | |
| 95 return ret; | |
| 96 } | |
| 97 | |
| 98 | |
| 99 // Split before a call that requires parameters on stack, and spill until | |
| 100 // the first position requiring the parameter back in a register. | |
| 101 bool TrySplittingAroundCalls(LiveRange* range, RegisterAllocationData* data, | |
| 102 AllocationScheduler* scheduler) { | |
| 103 UsePosition* use = range->first_pos(); | |
| 104 | |
| 105 for (auto interval = range->first_interval(); interval != nullptr; | |
| 106 interval = interval->next()) { | |
| 107 int first_index = GetFirstInstructionIndex(interval); | |
| 108 int last_index = GetLastInstructionIndex(interval); | |
| 109 for (int index = first_index; index <= last_index; ++index) { | |
| 110 auto instr = data->code()->InstructionAt(index); | |
| 111 if (!instr->IsCall()) continue; | |
| 112 while (use != nullptr && use->pos().ToInstructionIndex() < index) { | |
| 113 use = use->next(); | |
| 114 } | |
| 115 if (use != nullptr && use->pos().ToInstructionIndex() == index && | |
| 116 use->type() == UsePositionType::kRequiresRegister) | |
| 117 continue; | |
| 118 LifetimePosition before_call = | |
| 119 GetSplitPositionForInstruction(range, data->code(), index); | |
| 120 if (!before_call.IsValid()) continue; | |
| 121 LiveRange* call_part = Split(range, data, before_call); | |
| 122 scheduler->Schedule(range); | |
| 123 | |
| 124 for (auto next_use = use; next_use != nullptr; | |
| 125 next_use = next_use->next()) { | |
| 126 if (next_use->pos() <= before_call && !next_use->RegisterIsBeneficial()) | |
| 127 continue; | |
| 128 int instr_to_split = | |
| 129 index + 1; // next_use->pos().ToInstructionIndex(); | |
|
Mircea Trofin
2015/07/23 02:20:04
If we change instr_to_split = next_use->pos().ToIn
| |
| 130 LifetimePosition after_call = GetSplitPositionForInstruction( | |
| 131 call_part, data->code(), instr_to_split); | |
| 132 if (after_call.IsValid()) { | |
| 133 LiveRange* keep = Split(call_part, data, after_call); | |
| 134 scheduler->Schedule(keep); | |
| 135 break; | |
| 136 } | |
| 137 } | |
| 138 | |
| 139 SpillToStackSlot(call_part, data); | |
| 140 return true; | |
| 141 } | |
| 142 } | |
| 143 return false; | |
| 144 } | |
| 145 | |
| 146 | |
| 70 // Basic heuristic for advancing the algorithm, if any other splitting heuristic | 147 // Basic heuristic for advancing the algorithm, if any other splitting heuristic |
| 71 // failed. | 148 // failed. |
| 72 LifetimePosition GetLastResortSplitPosition(const LiveRange* range, | 149 LifetimePosition GetLastResortSplitPosition(const LiveRange* range, |
| 73 const InstructionSequence* code) { | 150 const InstructionSequence* code) { |
| 74 if (range->first_interval()->next() != nullptr) { | 151 if (range->first_interval()->next() != nullptr) { |
| 75 return range->first_interval()->next()->start(); | 152 return range->first_interval()->next()->start(); |
| 76 } | 153 } |
| 77 | 154 |
| 78 UseInterval* interval = range->first_interval(); | 155 UseInterval* interval = range->first_interval(); |
| 79 int first = GetFirstGapIndex(interval); | 156 int first = GetFirstGapIndex(interval); |
| (...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 237 LifetimePosition start = range->Start(); | 314 LifetimePosition start = range->Start(); |
| 238 TRACE("Live range %d is defined by a spill operand.\n", range->id()); | 315 TRACE("Live range %d is defined by a spill operand.\n", range->id()); |
| 239 auto next_pos = start; | 316 auto next_pos = start; |
| 240 if (next_pos.IsGapPosition()) { | 317 if (next_pos.IsGapPosition()) { |
| 241 next_pos = next_pos.NextStart(); | 318 next_pos = next_pos.NextStart(); |
| 242 } | 319 } |
| 243 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); | 320 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); |
| 244 // If the range already has a spill operand and it doesn't need a | 321 // If the range already has a spill operand and it doesn't need a |
| 245 // register immediately, split it and spill the first part of the range. | 322 // register immediately, split it and spill the first part of the range. |
| 246 if (pos == nullptr) { | 323 if (pos == nullptr) { |
| 247 Spill(range); | 324 SpillToStackSlot(range, data()); |
| 248 } else if (pos->pos() > range->Start().NextStart()) { | 325 } else if (pos->pos() > range->Start().NextStart()) { |
| 249 // Do not spill live range eagerly if use position that can benefit from | 326 // Do not spill live range eagerly if use position that can benefit from |
| 250 // the register is too close to the start of live range. | 327 // the register is too close to the start of live range. |
| 251 auto split_pos = pos->pos(); | 328 auto split_pos = GetSplitPositionForInstruction( |
| 252 if (data()->IsBlockBoundary(split_pos.Start())) { | 329 range, code(), pos->pos().ToInstructionIndex()); |
| 253 split_pos = split_pos.Start(); | 330 if (split_pos.IsValid()) { |
| 254 } else { | 331 Split(range, data(), split_pos); |
| 255 split_pos = split_pos.PrevStart().End(); | 332 SpillToStackSlot(range, data()); |
| 256 } | 333 } |
| 257 Split(range, data(), split_pos); | |
| 258 Spill(range); | |
| 259 } | 334 } |
| 260 } | 335 } |
| 261 } | 336 } |
| 262 | 337 |
| 263 | 338 |
| 264 void GreedyAllocator::AllocateRegisters() { | 339 void GreedyAllocator::AllocateRegisters() { |
| 265 CHECK(scheduler().empty()); | 340 CHECK(scheduler().empty()); |
| 266 CHECK(allocations_.empty()); | 341 CHECK(allocations_.empty()); |
| 267 | 342 |
| 268 TRACE("Begin allocating function %s with the Greedy Allocator\n", | 343 TRACE("Begin allocating function %s with the Greedy Allocator\n", |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 319 } | 394 } |
| 320 range->set_weight(use_count / static_cast<float>(range->GetSize())); | 395 range->set_weight(use_count / static_cast<float>(range->GetSize())); |
| 321 } | 396 } |
| 322 | 397 |
| 323 | 398 |
| 324 void GreedyAllocator::SpillRangeAsLastResort(LiveRange* range) { | 399 void GreedyAllocator::SpillRangeAsLastResort(LiveRange* range) { |
| 325 LifetimePosition start = range->Start(); | 400 LifetimePosition start = range->Start(); |
| 326 CHECK(range->CanBeSpilled(start)); | 401 CHECK(range->CanBeSpilled(start)); |
| 327 | 402 |
| 328 DCHECK(range->NextRegisterPosition(start) == nullptr); | 403 DCHECK(range->NextRegisterPosition(start) == nullptr); |
| 329 Spill(range); | 404 SpillToStackSlot(range, data()); |
| 330 } | 405 } |
| 331 | 406 |
| 332 | 407 |
| 333 void GreedyAllocator::SplitOrSpillBlockedRange(LiveRange* range) { | 408 void GreedyAllocator::SplitOrSpillBlockedRange(LiveRange* range) { |
| 409 if (TrySplittingAroundCalls(range, data(), &scheduler())) return; | |
| 410 | |
| 334 // TODO(mtrofin): replace the call below with the entry point selecting | 411 // TODO(mtrofin): replace the call below with the entry point selecting |
| 335 // heuristics, once they exist, out of which GLRSP is the last one. | 412 // heuristics, once they exist, out of which GLRSP is the last one. |
| 336 auto pos = GetLastResortSplitPosition(range, code()); | 413 auto pos = GetLastResortSplitPosition(range, code()); |
| 337 if (pos.IsValid()) { | 414 if (pos.IsValid()) { |
| 338 LiveRange* tail = Split(range, data(), pos); | 415 LiveRange* tail = Split(range, data(), pos); |
| 339 DCHECK(tail != range); | 416 DCHECK(tail != range); |
| 340 scheduler().Schedule(tail); | 417 scheduler().Schedule(tail); |
| 341 scheduler().Schedule(range); | 418 scheduler().Schedule(range); |
| 342 return; | 419 return; |
| 343 } | 420 } |
| 344 SpillRangeAsLastResort(range); | 421 SpillRangeAsLastResort(range); |
| 345 } | 422 } |
| 346 | 423 |
| 347 | 424 |
| 348 } // namespace compiler | 425 } // namespace compiler |
| 349 } // namespace internal | 426 } // namespace internal |
| 350 } // namespace v8 | 427 } // namespace v8 |
| OLD | NEW |