| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/greedy-allocator.h" | 5 #include "src/compiler/greedy-allocator.h" |
| 6 #include "src/compiler/register-allocator.h" | 6 #include "src/compiler/register-allocator.h" |
| 7 | 7 |
| 8 namespace v8 { | 8 namespace v8 { |
| 9 namespace internal { | 9 namespace internal { |
| 10 namespace compiler { | 10 namespace compiler { |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 43 DCHECK(range->Start() < pos && pos < range->End()); | 43 DCHECK(range->Start() < pos && pos < range->End()); |
| 44 DCHECK(pos.IsStart() || pos.IsGapPosition() || | 44 DCHECK(pos.IsStart() || pos.IsGapPosition() || |
| 45 (data->code() | 45 (data->code() |
| 46 ->GetInstructionBlock(pos.ToInstructionIndex()) | 46 ->GetInstructionBlock(pos.ToInstructionIndex()) |
| 47 ->last_instruction_index() != pos.ToInstructionIndex())); | 47 ->last_instruction_index() != pos.ToInstructionIndex())); |
| 48 LiveRange* result = range->SplitAt(pos, data->allocation_zone()); | 48 LiveRange* result = range->SplitAt(pos, data->allocation_zone()); |
| 49 return result; | 49 return result; |
| 50 } | 50 } |
| 51 | 51 |
| 52 | 52 |
| 53 // TODO(mtrofin): explain why splitting in gap START is always OK. | |
| 54 LifetimePosition GetSplitPositionForInstruction(const LiveRange* range, | |
| 55 int instruction_index) { | |
| 56 LifetimePosition ret = LifetimePosition::Invalid(); | |
| 57 | |
| 58 ret = LifetimePosition::GapFromInstructionIndex(instruction_index); | |
| 59 if (range->Start() >= ret || ret >= range->End()) { | |
| 60 return LifetimePosition::Invalid(); | |
| 61 } | |
| 62 return ret; | |
| 63 } | |
| 64 | |
| 65 | |
| 66 } // namespace | 53 } // namespace |
| 67 | 54 |
| 68 | 55 |
| 69 AllocationCandidate AllocationScheduler::GetNext() { | 56 AllocationCandidate AllocationScheduler::GetNext() { |
| 70 DCHECK(!queue_.empty()); | 57 DCHECK(!queue_.empty()); |
| 71 AllocationCandidate ret = queue_.top(); | 58 AllocationCandidate ret = queue_.top(); |
| 72 queue_.pop(); | 59 queue_.pop(); |
| 73 return ret; | 60 return ret; |
| 74 } | 61 } |
| 75 | 62 |
| (...skipping 291 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 367 conflict->UnsetAssignedRegister(); | 354 conflict->UnsetAssignedRegister(); |
| 368 UnsetOperands(conflict, data()); | 355 UnsetOperands(conflict, data()); |
| 369 UpdateWeightAtEviction(conflict); | 356 UpdateWeightAtEviction(conflict); |
| 370 scheduler().Schedule(conflict); | 357 scheduler().Schedule(conflict); |
| 371 TRACE("Evicted range %d%d.\n", conflict->TopLevel()->vreg(), | 358 TRACE("Evicted range %d%d.\n", conflict->TopLevel()->vreg(), |
| 372 conflict->relative_id()); | 359 conflict->relative_id()); |
| 373 } | 360 } |
| 374 } | 361 } |
| 375 | 362 |
| 376 | 363 |
| 377 void GreedyAllocator::SplitAndSpillRangesDefinedByMemoryOperand() { | |
| 378 size_t initial_range_count = data()->live_ranges().size(); | |
| 379 for (size_t i = 0; i < initial_range_count; ++i) { | |
| 380 TopLevelLiveRange* range = data()->live_ranges()[i]; | |
| 381 if (!CanProcessRange(range)) continue; | |
| 382 if (!range->HasSpillOperand()) continue; | |
| 383 | |
| 384 LifetimePosition start = range->Start(); | |
| 385 TRACE("Live range %d:%d is defined by a spill operand.\n", | |
| 386 range->TopLevel()->vreg(), range->relative_id()); | |
| 387 auto next_pos = start; | |
| 388 if (next_pos.IsGapPosition()) { | |
| 389 next_pos = next_pos.NextStart(); | |
| 390 } | |
| 391 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); | |
| 392 // If the range already has a spill operand and it doesn't need a | |
| 393 // register immediately, split it and spill the first part of the range. | |
| 394 if (pos == nullptr) { | |
| 395 Spill(range); | |
| 396 } else if (pos->pos() > range->Start().NextStart()) { | |
| 397 // Do not spill live range eagerly if use position that can benefit from | |
| 398 // the register is too close to the start of live range. | |
| 399 auto split_pos = GetSplitPositionForInstruction( | |
| 400 range, pos->pos().ToInstructionIndex()); | |
| 401 // There is no place to split, so we can't split and spill. | |
| 402 if (!split_pos.IsValid()) continue; | |
| 403 | |
| 404 split_pos = | |
| 405 FindOptimalSplitPos(range->Start().NextFullStart(), split_pos); | |
| 406 | |
| 407 Split(range, data(), split_pos); | |
| 408 Spill(range); | |
| 409 } | |
| 410 } | |
| 411 } | |
| 412 | |
| 413 | |
| 414 void GreedyAllocator::AllocateRegisters() { | 364 void GreedyAllocator::AllocateRegisters() { |
| 415 CHECK(scheduler().empty()); | 365 CHECK(scheduler().empty()); |
| 416 CHECK(allocations_.empty()); | 366 CHECK(allocations_.empty()); |
| 417 | 367 |
| 418 TRACE("Begin allocating function %s with the Greedy Allocator\n", | 368 TRACE("Begin allocating function %s with the Greedy Allocator\n", |
| 419 data()->debug_name()); | 369 data()->debug_name()); |
| 420 | 370 |
| 421 SplitAndSpillRangesDefinedByMemoryOperand(); | 371 SplitAndSpillRangesDefinedByMemoryOperand(); |
| 422 GroupLiveRanges(); | 372 GroupLiveRanges(); |
| 423 ScheduleAllocationCandidates(); | 373 ScheduleAllocationCandidates(); |
| (...skipping 246 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 670 | 620 |
| 671 | 621 |
| 672 bool GreedyAllocator::IsProgressPossible(const LiveRange* range) { | 622 bool GreedyAllocator::IsProgressPossible(const LiveRange* range) { |
| 673 return range->CanBeSpilled(range->Start()) || | 623 return range->CanBeSpilled(range->Start()) || |
| 674 GetLastResortSplitPosition(range).IsValid(); | 624 GetLastResortSplitPosition(range).IsValid(); |
| 675 } | 625 } |
| 676 | 626 |
| 677 } // namespace compiler | 627 } // namespace compiler |
| 678 } // namespace internal | 628 } // namespace internal |
| 679 } // namespace v8 | 629 } // namespace v8 |
| OLD | NEW |