| Index: src/compiler/register-allocator.cc
|
| diff --git a/src/compiler/register-allocator.cc b/src/compiler/register-allocator.cc
|
| index 8de847e1a75211c779dfc8ee4d7237cd83100929..0410b417f84136a52c80b1a5940cd1651d99c2f2 100644
|
| --- a/src/compiler/register-allocator.cc
|
| +++ b/src/compiler/register-allocator.cc
|
| @@ -2443,6 +2443,28 @@
|
| TRACE("Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(),
|
| current->relative_id(), position.value());
|
|
|
| + if (current->IsTopLevel() && !current->TopLevel()->HasNoSpillType()) {
|
| + TRACE("Live range %d:%d already has a spill operand\n",
|
| + current->TopLevel()->vreg(), current->relative_id());
|
| + auto next_pos = position;
|
| + if (next_pos.IsGapPosition()) {
|
| + next_pos = next_pos.NextStart();
|
| + }
|
| + auto pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
|
| + // If the range already has a spill operand and it doesn't need a
|
| + // register immediately, split it and spill the first part of the range.
|
| + if (pos == nullptr) {
|
| + Spill(current);
|
| + continue;
|
| + } else if (pos->pos() > current->Start().NextStart()) {
|
| + // Do not spill live range eagerly if use position that can benefit from
|
| + // the register is too close to the start of live range.
|
| + SpillBetween(current, current->Start(), pos->pos());
|
| + DCHECK(UnhandledIsSorted());
|
| + continue;
|
| + }
|
| + }
|
| +
|
| if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel()))
|
| continue;
|
|
|
|
|