OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/greedy-allocator.h" | 5 #include "src/compiler/greedy-allocator.h" |
6 #include "src/compiler/register-allocator.h" | 6 #include "src/compiler/register-allocator.h" |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 namespace compiler { | 10 namespace compiler { |
(...skipping 372 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
383 | 383 |
384 void GreedyAllocator::SpillRangeAsLastResort(LiveRange* range) { | 384 void GreedyAllocator::SpillRangeAsLastResort(LiveRange* range) { |
385 LifetimePosition start = range->Start(); | 385 LifetimePosition start = range->Start(); |
386 CHECK(range->CanBeSpilled(start)); | 386 CHECK(range->CanBeSpilled(start)); |
387 | 387 |
388 DCHECK(range->NextRegisterPosition(start) == nullptr); | 388 DCHECK(range->NextRegisterPosition(start) == nullptr); |
389 Spill(range); | 389 Spill(range); |
390 } | 390 } |
391 | 391 |
392 | 392 |
| 393 LiveRange* GreedyAllocator::GetRemainderAfterSplittingAroundFirstCall( |
| 394 LiveRange* range) { |
| 395 LiveRange* ret = range; |
| 396 for (UseInterval* interval = range->first_interval(); interval != nullptr; |
| 397 interval = interval->next()) { |
| 398 LifetimePosition start = interval->start(); |
| 399 LifetimePosition end = interval->end(); |
| 400 // If the interval starts at instruction end, then the first instruction |
| 401 // in the interval is the next one. |
| 402 int first_full_instruction = (start.IsGapPosition() || start.IsStart()) |
| 403 ? start.ToInstructionIndex() |
| 404 : start.ToInstructionIndex() + 1; |
| 405 // If the interval ends in a gap or at instruction start, then the last |
| 406 // instruction is the previous one. |
| 407 int last_full_instruction = (end.IsGapPosition() || end.IsStart()) |
| 408 ? end.ToInstructionIndex() - 1 |
| 409 : end.ToInstructionIndex(); |
| 410 |
| 411 for (int instruction_index = first_full_instruction; |
| 412 instruction_index <= last_full_instruction; ++instruction_index) { |
| 413 if (!code()->InstructionAt(instruction_index)->IsCall()) continue; |
| 414 |
| 415 LifetimePosition before = |
| 416 GetSplitPositionForInstruction(range, instruction_index); |
| 417 LiveRange* second_part = |
| 418 before.IsValid() ? Split(range, data(), before) : range; |
| 419 |
| 420 if (range != second_part) scheduler().Schedule(range); |
| 421 |
| 422 LifetimePosition after = |
| 423 FindSplitPositionAfterCall(second_part, instruction_index); |
| 424 |
| 425 if (after.IsValid()) { |
| 426 ret = Split(second_part, data(), after); |
| 427 } else { |
| 428 ret = nullptr; |
| 429 } |
| 430 Spill(second_part); |
| 431 return ret; |
| 432 } |
| 433 } |
| 434 return ret; |
| 435 } |
| 436 |
| 437 |
| 438 bool GreedyAllocator::TrySplitAroundCalls(LiveRange* range) { |
| 439 bool modified = false; |
| 440 |
| 441 while (range != nullptr) { |
| 442 LiveRange* remainder = GetRemainderAfterSplittingAroundFirstCall(range); |
| 443 // If we performed no modification, we're done. |
| 444 if (remainder == range) { |
| 445 break; |
| 446 } |
| 447 // We performed a modification. |
| 448 modified = true; |
| 449 range = remainder; |
| 450 } |
| 451 // If we have a remainder and we made modifications, it means the remainder |
| 452 // has no calls and we should schedule it for further processing. If we made |
| 453 // no modifications, we will just return false, because we want the algorithm |
| 454 // to make progress by trying some other heuristic. |
| 455 if (modified && range != nullptr) { |
| 456 DCHECK(!range->spilled()); |
| 457 DCHECK(!range->HasRegisterAssigned()); |
| 458 scheduler().Schedule(range); |
| 459 } |
| 460 return modified; |
| 461 } |
| 462 |
| 463 |
| 464 LifetimePosition GreedyAllocator::FindSplitPositionAfterCall( |
| 465 const LiveRange* range, int call_index) { |
| 466 LifetimePosition after_call = |
| 467 Max(range->Start(), |
| 468 LifetimePosition::GapFromInstructionIndex(call_index + 1)); |
| 469 UsePosition* next_use = range->NextRegisterPosition(after_call); |
| 470 if (!next_use) return LifetimePosition::Invalid(); |
| 471 |
| 472 LifetimePosition split_pos = FindOptimalSplitPos(after_call, next_use->pos()); |
| 473 split_pos = |
| 474 GetSplitPositionForInstruction(range, split_pos.ToInstructionIndex()); |
| 475 return split_pos; |
| 476 } |
| 477 |
| 478 |
393 void GreedyAllocator::SplitOrSpillBlockedRange(LiveRange* range) { | 479 void GreedyAllocator::SplitOrSpillBlockedRange(LiveRange* range) { |
394 // TODO(mtrofin): replace the call below with the entry point selecting | 480 if (TrySplitAroundCalls(range)) return; |
395 // heuristics, once they exist, out of which GLRSP is the last one. | |
396 auto pos = GetLastResortSplitPosition(range, code()); | 481 auto pos = GetLastResortSplitPosition(range, code()); |
397 if (pos.IsValid()) { | 482 if (pos.IsValid()) { |
398 LiveRange* tail = Split(range, data(), pos); | 483 LiveRange* tail = Split(range, data(), pos); |
399 DCHECK(tail != range); | 484 DCHECK(tail != range); |
400 scheduler().Schedule(tail); | 485 scheduler().Schedule(tail); |
401 scheduler().Schedule(range); | 486 scheduler().Schedule(range); |
402 return; | 487 return; |
403 } | 488 } |
404 SpillRangeAsLastResort(range); | 489 SpillRangeAsLastResort(range); |
405 } | 490 } |
406 | 491 |
407 | 492 |
408 } // namespace compiler | 493 } // namespace compiler |
409 } // namespace internal | 494 } // namespace internal |
410 } // namespace v8 | 495 } // namespace v8 |
OLD | NEW |