| Index: src/lithium-allocator.cc
|
| diff --git a/src/lithium-allocator.cc b/src/lithium-allocator.cc
|
| index 7d09d179d2dce29a06715d7339f49d89f96b0b37..513a67c7c839dbe4c63e7eeda18f6bc8c93cf820 100644
|
| --- a/src/lithium-allocator.cc
|
| +++ b/src/lithium-allocator.cc
|
| @@ -247,7 +247,7 @@ LOperand* LiveRange::CreateAssignedOperand() {
|
| LOperand* op = NULL;
|
| if (HasRegisterAssigned()) {
|
| ASSERT(!IsSpilled());
|
| - if (assigned_double_) {
|
| + if (IsDouble()) {
|
| op = LDoubleRegister::Create(assigned_register());
|
| } else {
|
| op = LRegister::Create(assigned_register());
|
| @@ -290,7 +290,7 @@ void LiveRange::AdvanceLastProcessedMarker(
|
|
|
|
|
| void LiveRange::SplitAt(LifetimePosition position, LiveRange* result) {
|
| - ASSERT(Start().Value() <= position.Value());
|
| + ASSERT(Start().Value() < position.Value());
|
| ASSERT(result->IsEmpty());
|
| // Find the last interval that ends before the position. If the
|
| // position is contained in one of the intervals in the chain, we
|
| @@ -625,7 +625,7 @@ LiveRange* LAllocator::FixedLiveRangeFor(int index) {
|
| if (result == NULL) {
|
| result = new LiveRange(FixedLiveRangeID(index));
|
| ASSERT(result->IsFixed());
|
| - result->set_assigned_register(index, false);
|
| + result->set_assigned_register(index, GENERAL_REGISTERS);
|
| fixed_live_ranges_[index] = result;
|
| }
|
| return result;
|
| @@ -642,7 +642,7 @@ LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
|
| if (result == NULL) {
|
| result = new LiveRange(FixedDoubleLiveRangeID(index));
|
| ASSERT(result->IsFixed());
|
| - result->set_assigned_register(index, true);
|
| + result->set_assigned_register(index, DOUBLE_REGISTERS);
|
| fixed_double_live_ranges_[index] = result;
|
| }
|
| return result;
|
| @@ -1258,14 +1258,6 @@ void LAllocator::BuildLiveRanges() {
|
| }
|
|
|
|
|
| -void LAllocator::AllocateGeneralRegisters() {
|
| - HPhase phase("Allocate general registers", this);
|
| - num_registers_ = Register::kNumAllocatableRegisters;
|
| - mode_ = CPU_REGISTERS;
|
| - AllocateRegisters();
|
| -}
|
| -
|
| -
|
| bool LAllocator::SafePointsAreInOrder() const {
|
| const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
|
| int safe_point = 0;
|
| @@ -1397,10 +1389,18 @@ void LAllocator::ProcessOsrEntry() {
|
| }
|
|
|
|
|
| +void LAllocator::AllocateGeneralRegisters() {
|
| + HPhase phase("Allocate general registers", this);
|
| + num_registers_ = Register::kNumAllocatableRegisters;
|
| + mode_ = GENERAL_REGISTERS;
|
| + AllocateRegisters();
|
| +}
|
| +
|
| +
|
| void LAllocator::AllocateDoubleRegisters() {
|
| HPhase phase("Allocate double registers", this);
|
| num_registers_ = DoubleRegister::kNumAllocatableRegisters;
|
| - mode_ = XMM_REGISTERS;
|
| + mode_ = DOUBLE_REGISTERS;
|
| AllocateRegisters();
|
| }
|
|
|
| @@ -1411,7 +1411,7 @@ void LAllocator::AllocateRegisters() {
|
|
|
| for (int i = 0; i < live_ranges_.length(); ++i) {
|
| if (live_ranges_[i] != NULL) {
|
| - if (HasDoubleValue(live_ranges_[i]->id()) == (mode_ == XMM_REGISTERS)) {
|
| + if (RequiredRegisterKind(live_ranges_[i]->id()) == mode_) {
|
| AddToUnhandledUnsorted(live_ranges_[i]);
|
| }
|
| }
|
| @@ -1422,7 +1422,7 @@ void LAllocator::AllocateRegisters() {
|
| ASSERT(active_live_ranges_.is_empty());
|
| ASSERT(inactive_live_ranges_.is_empty());
|
|
|
| - if (mode_ == XMM_REGISTERS) {
|
| + if (mode_ == DOUBLE_REGISTERS) {
|
| for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) {
|
| LiveRange* current = fixed_double_live_ranges_.at(i);
|
| if (current != NULL) {
|
| @@ -1463,11 +1463,7 @@ void LAllocator::AllocateRegisters() {
|
| current->Start().NextInstruction().Value()) {
|
| // Do not spill live range eagerly if use position that can benefit from
|
| // the register is too close to the start of live range.
|
| - LiveRange* part = Split(current,
|
| - current->Start().NextInstruction(),
|
| - pos->pos());
|
| - Spill(current);
|
| - AddToUnhandledSorted(part);
|
| + SpillBetween(current, current->Start(), pos->pos());
|
| ASSERT(UnhandledIsSorted());
|
| continue;
|
| }
|
| @@ -1521,6 +1517,16 @@ void LAllocator::Setup() {
|
| }
|
|
|
|
|
| +const char* LAllocator::RegisterName(int allocation_index) {
|
| + ASSERT(mode_ != NONE);
|
| + if (mode_ == GENERAL_REGISTERS) {
|
| + return Register::AllocationIndexToString(allocation_index);
|
| + } else {
|
| + return DoubleRegister::AllocationIndexToString(allocation_index);
|
| + }
|
| +}
|
| +
|
| +
|
| void LAllocator::TraceAlloc(const char* msg, ...) {
|
| if (FLAG_trace_alloc) {
|
| va_list arguments;
|
| @@ -1544,10 +1550,12 @@ bool LAllocator::HasTaggedValue(int virtual_register) const {
|
| }
|
|
|
|
|
| -bool LAllocator::HasDoubleValue(int virtual_register) const {
|
| +RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
|
| HValue* value = graph()->LookupValue(virtual_register);
|
| - if (value == NULL) return false;
|
| - return value->representation().IsDouble();
|
| + if (value != NULL && value->representation().IsDouble()) {
|
| + return DOUBLE_REGISTERS;
|
| + }
|
| + return GENERAL_REGISTERS;
|
| }
|
|
|
|
|
| @@ -1728,16 +1736,22 @@ void LAllocator::InactiveToActive(LiveRange* range) {
|
| }
|
|
|
|
|
| +// TryAllocateFreeReg and AllocateBlockedReg assume this
|
| +// when allocating local arrays.
|
| +STATIC_ASSERT(DoubleRegister::kNumAllocatableRegisters >=
|
| + Register::kNumAllocatableRegisters);
|
| +
|
| +
|
| bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
|
| - LifetimePosition max_pos = LifetimePosition::FromInstructionIndex(
|
| - chunk_->instructions()->length() + 1);
|
| - ASSERT(DoubleRegister::kNumAllocatableRegisters >=
|
| - Register::kNumAllocatableRegisters);
|
| - EmbeddedVector<LifetimePosition, DoubleRegister::kNumAllocatableRegisters>
|
| - free_pos(max_pos);
|
| + LifetimePosition free_until_pos[DoubleRegister::kNumAllocatableRegisters];
|
| +
|
| + for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
|
| + free_until_pos[i] = LifetimePosition::MaxPosition();
|
| + }
|
| +
|
| for (int i = 0; i < active_live_ranges_.length(); ++i) {
|
| LiveRange* cur_active = active_live_ranges_.at(i);
|
| - free_pos[cur_active->assigned_register()] =
|
| + free_until_pos[cur_active->assigned_register()] =
|
| LifetimePosition::FromInstructionIndex(0);
|
| }
|
|
|
| @@ -1748,65 +1762,83 @@ bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
|
| cur_inactive->FirstIntersection(current);
|
| if (!next_intersection.IsValid()) continue;
|
| int cur_reg = cur_inactive->assigned_register();
|
| - free_pos[cur_reg] = Min(free_pos[cur_reg], next_intersection);
|
| + free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
|
| }
|
|
|
| - UsePosition* pos = current->FirstPosWithHint();
|
| - if (pos != NULL) {
|
| - LOperand* hint = pos->hint();
|
| + UsePosition* hinted_use = current->FirstPosWithHint();
|
| + if (hinted_use != NULL) {
|
| + LOperand* hint = hinted_use->hint();
|
| if (hint->IsRegister() || hint->IsDoubleRegister()) {
|
| int register_index = hint->index();
|
| - TraceAlloc("Found reg hint %d for live range %d (free [%d, end %d[)\n",
|
| - register_index,
|
| - current->id(),
|
| - free_pos[register_index].Value(),
|
| - current->End().Value());
|
| - if (free_pos[register_index].Value() >= current->End().Value()) {
|
| - TraceAlloc("Assigning preferred reg %d to live range %d\n",
|
| - register_index,
|
| + TraceAlloc(
|
| + "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
|
| + RegisterName(register_index),
|
| + free_until_pos[register_index].Value(),
|
| + current->id(),
|
| + current->End().Value());
|
| +
|
| + // The desired register is free until the end of the current live range.
|
| + if (free_until_pos[register_index].Value() >= current->End().Value()) {
|
| + TraceAlloc("Assigning preferred reg %s to live range %d\n",
|
| + RegisterName(register_index),
|
| current->id());
|
| - current->set_assigned_register(register_index, mode_ == XMM_REGISTERS);
|
| + current->set_assigned_register(register_index, mode_);
|
| return true;
|
| }
|
| }
|
| }
|
|
|
| - int max_reg = 0;
|
| + // Find the register which stays free for the longest time.
|
| + int reg = 0;
|
| for (int i = 1; i < RegisterCount(); ++i) {
|
| - if (free_pos[i].Value() > free_pos[max_reg].Value()) {
|
| - max_reg = i;
|
| + if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
|
| + reg = i;
|
| }
|
| }
|
|
|
| - if (free_pos[max_reg].InstructionIndex() == 0) {
|
| + LifetimePosition pos = free_until_pos[reg];
|
| +
|
| + if (pos.Value() <= current->Start().Value()) {
|
| + // All registers are blocked.
|
| return false;
|
| - } else if (free_pos[max_reg].Value() >= current->End().Value()) {
|
| - TraceAlloc("Assigning reg %d to live range %d\n", max_reg, current->id());
|
| - current->set_assigned_register(max_reg, mode_ == XMM_REGISTERS);
|
| - } else {
|
| - // Split the interval before first use position of max_reg and never split
|
| - // it interval at its start position.
|
| - LifetimePosition pos = free_pos[max_reg];
|
| - if (pos.Value() <= current->Start().Value()) return false;
|
| - LiveRange* second_range = Split(current, pos);
|
| - AddToUnhandledSorted(second_range);
|
| - current->set_assigned_register(max_reg, mode_ == XMM_REGISTERS);
|
| }
|
|
|
| + if (pos.Value() < current->End().Value()) {
|
| + // Register reg is available at the range start but becomes blocked before
|
| + // the range end. Split current at position where it becomes blocked.
|
| + LiveRange* tail = SplitAt(current, pos);
|
| + AddToUnhandledSorted(tail);
|
| + }
|
| +
|
| +
|
| + // Register reg is available at the range start and is free until
|
| + // the range end.
|
| + ASSERT(pos.Value() >= current->End().Value());
|
| + TraceAlloc("Assigning reg %s to live range %d\n",
|
| + RegisterName(reg),
|
| + current->id());
|
| + current->set_assigned_register(reg, mode_);
|
| +
|
| return true;
|
| }
|
|
|
|
|
| void LAllocator::AllocateBlockedReg(LiveRange* current) {
|
| - LifetimePosition max_pos =
|
| - LifetimePosition::FromInstructionIndex(
|
| - chunk_->instructions()->length() + 1);
|
| - ASSERT(DoubleRegister::kNumAllocatableRegisters >=
|
| - Register::kNumAllocatableRegisters);
|
| - EmbeddedVector<LifetimePosition, DoubleRegister::kNumAllocatableRegisters>
|
| - use_pos(max_pos);
|
| - EmbeddedVector<LifetimePosition, DoubleRegister::kNumAllocatableRegisters>
|
| - block_pos(max_pos);
|
| + UsePosition* register_use = current->NextRegisterPosition(current->Start());
|
| + if (register_use == NULL) {
|
| + // There is no use in the current live range that requires a register.
|
| + // We can just spill it.
|
| + Spill(current);
|
| + return;
|
| + }
|
| +
|
| +
|
| + LifetimePosition use_pos[DoubleRegister::kNumAllocatableRegisters];
|
| + LifetimePosition block_pos[DoubleRegister::kNumAllocatableRegisters];
|
| +
|
| + for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
|
| + use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
|
| + }
|
|
|
| for (int i = 0; i < active_live_ranges_.length(); ++i) {
|
| LiveRange* range = active_live_ranges_[i];
|
| @@ -1839,30 +1871,48 @@ void LAllocator::AllocateBlockedReg(LiveRange* current) {
|
| }
|
| }
|
|
|
| - int max_reg = 0;
|
| + int reg = 0;
|
| for (int i = 1; i < RegisterCount(); ++i) {
|
| - if (use_pos[i].Value() > use_pos[max_reg].Value()) {
|
| - max_reg = i;
|
| + if (use_pos[i].Value() > use_pos[reg].Value()) {
|
| + reg = i;
|
| }
|
| }
|
|
|
| - UsePosition* first_usage = current->NextRegisterPosition(current->Start());
|
| - if (first_usage == NULL) {
|
| - Spill(current);
|
| - } else if (use_pos[max_reg].Value() < first_usage->pos().Value()) {
|
| - SplitAndSpill(current, current->Start(), first_usage->pos());
|
| - } else {
|
| - if (block_pos[max_reg].Value() < current->End().Value()) {
|
| - // Split current before blocked position.
|
| - LiveRange* second_range = Split(current,
|
| - current->Start(),
|
| - block_pos[max_reg]);
|
| - AddToUnhandledSorted(second_range);
|
| - }
|
| + LifetimePosition pos = use_pos[reg];
|
| +
|
| + if (pos.Value() < register_use->pos().Value()) {
|
| + // All registers are blocked before the first use that requires a register.
|
| + // Spill starting part of live range up to that use.
|
| + //
|
| + // Corner case: the first use position is equal to the start of the range.
|
| + // In this case we have nothing to spill and SpillBetween will just return
|
| + // this range to the list of unhandled ones. This will lead to the infinite
|
| + // loop.
|
| + ASSERT(current->Start().Value() < register_use->pos().Value());
|
| + SpillBetween(current, current->Start(), register_use->pos());
|
| + return;
|
| + }
|
|
|
| - current->set_assigned_register(max_reg, mode_ == XMM_REGISTERS);
|
| - SplitAndSpillIntersecting(current);
|
| + if (block_pos[reg].Value() < current->End().Value()) {
|
| + // Register becomes blocked before the current range end. Split before that
|
| + // position.
|
| + LiveRange* tail = SplitBetween(current,
|
| + current->Start(),
|
| + block_pos[reg].InstructionStart());
|
| + AddToUnhandledSorted(tail);
|
| }
|
| +
|
| + // Register reg is not blocked for the whole range.
|
| + ASSERT(block_pos[reg].Value() >= current->End().Value());
|
| + TraceAlloc("Assigning reg %s to live range %d\n",
|
| + RegisterName(reg),
|
| + current->id());
|
| + current->set_assigned_register(reg, mode_);
|
| +
|
| + // This register was not free. Thus we need to find and spill
|
| + // parts of active and inactive live regions that use the same register
|
| + // at the same lifetime positions as current.
|
| + SplitAndSpillIntersecting(current);
|
| }
|
|
|
|
|
| @@ -1875,9 +1925,9 @@ void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
|
| if (range->assigned_register() == reg) {
|
| UsePosition* next_pos = range->NextRegisterPosition(current->Start());
|
| if (next_pos == NULL) {
|
| - SplitAndSpill(range, split_pos);
|
| + SpillAfter(range, split_pos);
|
| } else {
|
| - SplitAndSpill(range, split_pos, next_pos->pos());
|
| + SpillBetween(range, split_pos, next_pos->pos());
|
| }
|
| ActiveToHandled(range);
|
| --i;
|
| @@ -1892,10 +1942,10 @@ void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
|
| if (next_intersection.IsValid()) {
|
| UsePosition* next_pos = range->NextRegisterPosition(current->Start());
|
| if (next_pos == NULL) {
|
| - SplitAndSpill(range, split_pos);
|
| + SpillAfter(range, split_pos);
|
| } else {
|
| next_intersection = Min(next_intersection, next_pos->pos());
|
| - SplitAndSpill(range, split_pos, next_intersection);
|
| + SpillBetween(range, split_pos, next_intersection);
|
| }
|
| InactiveToHandled(range);
|
| --i;
|
| @@ -1905,19 +1955,50 @@ void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
|
| }
|
|
|
|
|
| -LiveRange* LAllocator::Split(LiveRange* range,
|
| - LifetimePosition start,
|
| - LifetimePosition end) {
|
| +bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
|
| + return pos.IsInstructionStart() &&
|
| + chunk_->instructions()->at(pos.InstructionIndex())->IsLabel();
|
| +}
|
| +
|
| +
|
| +void LAllocator::AddGapMove(int pos, LiveRange* prev, LiveRange* next) {
|
| + UsePosition* prev_pos = prev->AddUsePosition(
|
| + LifetimePosition::FromInstructionIndex(pos));
|
| + UsePosition* next_pos = next->AddUsePosition(
|
| + LifetimePosition::FromInstructionIndex(pos));
|
| + LOperand* prev_operand = prev_pos->operand();
|
| + LOperand* next_operand = next_pos->operand();
|
| + LGap* gap = chunk_->GetGapAt(pos);
|
| + gap->GetOrCreateParallelMove(LGap::START)->
|
| + AddMove(prev_operand, next_operand);
|
| + next_pos->set_hint(prev_operand);
|
| +}
|
| +
|
| +
|
| +LiveRange* LAllocator::SplitAt(LiveRange* range, LifetimePosition pos) {
|
| + ASSERT(!range->IsFixed());
|
| + TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
|
| +
|
| + if (pos.Value() <= range->Start().Value()) return range;
|
| +
|
| + LiveRange* result = LiveRangeFor(next_virtual_register_++);
|
| + range->SplitAt(pos, result);
|
| + return result;
|
| +}
|
| +
|
| +
|
| +LiveRange* LAllocator::SplitBetween(LiveRange* range,
|
| + LifetimePosition start,
|
| + LifetimePosition end) {
|
| ASSERT(!range->IsFixed());
|
| - TraceAlloc("Splitting live range %d in position between [%d, %d[\n",
|
| + TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
|
| range->id(),
|
| start.Value(),
|
| end.Value());
|
|
|
| - LifetimePosition split_pos = FindOptimalSplitPos(
|
| - start, end.PrevInstruction().InstructionEnd());
|
| + LifetimePosition split_pos = FindOptimalSplitPos(start, end);
|
| ASSERT(split_pos.Value() >= start.Value());
|
| - return Split(range, split_pos);
|
| + return SplitAt(range, split_pos);
|
| }
|
|
|
|
|
| @@ -1940,79 +2021,52 @@ LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
|
| }
|
|
|
| HBasicBlock* block = end_block;
|
| - // Move to the most outside loop header.
|
| + // Find header of outermost loop.
|
| while (block->parent_loop_header() != NULL &&
|
| block->parent_loop_header()->block_id() > start_block->block_id()) {
|
| block = block->parent_loop_header();
|
| }
|
|
|
| - if (block == end_block) {
|
| - return end;
|
| - }
|
| + if (block == end_block) return end;
|
|
|
| return LifetimePosition::FromInstructionIndex(
|
| block->first_instruction_index());
|
| }
|
|
|
|
|
| -bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
|
| - return pos.IsInstructionStart() &&
|
| - chunk_->instructions()->at(pos.InstructionIndex())->IsLabel();
|
| +void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
|
| + LiveRange* second_part = SplitAt(range, pos);
|
| + Spill(second_part);
|
| }
|
|
|
|
|
| -void LAllocator::AddGapMove(int pos, LiveRange* prev, LiveRange* next) {
|
| - UsePosition* prev_pos = prev->AddUsePosition(
|
| - LifetimePosition::FromInstructionIndex(pos));
|
| - UsePosition* next_pos = next->AddUsePosition(
|
| - LifetimePosition::FromInstructionIndex(pos));
|
| - LOperand* prev_operand = prev_pos->operand();
|
| - LOperand* next_operand = next_pos->operand();
|
| - LGap* gap = chunk_->GetGapAt(pos);
|
| - gap->GetOrCreateParallelMove(LGap::START)->
|
| - AddMove(prev_operand, next_operand);
|
| - next_pos->set_hint(prev_operand);
|
| -}
|
| -
|
| +void LAllocator::SpillBetween(LiveRange* range,
|
| + LifetimePosition start,
|
| + LifetimePosition end) {
|
| + ASSERT(start.Value() < end.Value());
|
| + LiveRange* second_part = SplitAt(range, start);
|
|
|
| -LiveRange* LAllocator::Split(LiveRange* range, LifetimePosition pos) {
|
| - ASSERT(!range->IsFixed());
|
| - TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
|
| - if (pos.Value() <= range->Start().Value()) {
|
| - return range;
|
| - }
|
| - LiveRange* result = LiveRangeFor(next_virtual_register_++);
|
| - range->SplitAt(pos, result);
|
| - return result;
|
| -}
|
| + if (second_part->Start().Value() < end.Value()) {
|
| + // The split result intersects with [start, end[.
|
| + // Split it at position between ]start+1, end[, spill the middle part
|
| + // and put the rest to unhandled.
|
| + LiveRange* third_part = SplitBetween(
|
| + second_part,
|
| + second_part->Start().InstructionEnd(),
|
| + end.PrevInstruction().InstructionEnd());
|
|
|
| + ASSERT(third_part != second_part);
|
|
|
| -void LAllocator::SplitAndSpill(LiveRange* range,
|
| - LifetimePosition start,
|
| - LifetimePosition end) {
|
| - // We have an interval range and want to make sure that it is
|
| - // spilled at start and at most spilled until end.
|
| - ASSERT(start.Value() < end.Value());
|
| - LiveRange* tail_part = Split(range, start);
|
| - if (tail_part->Start().Value() < end.Value()) {
|
| - LiveRange* third_part = Split(tail_part,
|
| - tail_part->Start().NextInstruction(),
|
| - end);
|
| - Spill(tail_part);
|
| - ASSERT(third_part != tail_part);
|
| + Spill(second_part);
|
| AddToUnhandledSorted(third_part);
|
| } else {
|
| - AddToUnhandledSorted(tail_part);
|
| + // The split result does not intersect with [start, end[.
|
| + // Nothing to spill. Just put it to unhandled as whole.
|
| + AddToUnhandledSorted(second_part);
|
| }
|
| }
|
|
|
|
|
| -void LAllocator::SplitAndSpill(LiveRange* range, LifetimePosition at) {
|
| - LiveRange* second_part = Split(range, at);
|
| - Spill(second_part);
|
| -}
|
| -
|
| -
|
| void LAllocator::Spill(LiveRange* range) {
|
| ASSERT(!range->IsSpilled());
|
| TraceAlloc("Spilling live range %d\n", range->id());
|
| @@ -2020,7 +2074,7 @@ void LAllocator::Spill(LiveRange* range) {
|
|
|
| if (!first->HasAllocatedSpillOperand()) {
|
| LOperand* op = TryReuseSpillSlot(range);
|
| - if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == XMM_REGISTERS);
|
| + if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS);
|
| first->SetSpillOperand(op);
|
| }
|
| range->MakeSpilled();
|
|
|