| Index: src/compiler/register-allocator.cc
|
| diff --git a/src/compiler/register-allocator.cc b/src/compiler/register-allocator.cc
|
| index 119e11f031c372f8a49732dc8940975b33d180c4..4647938df6293dd052ce1bcc52605b6362e3f74a 100644
|
| --- a/src/compiler/register-allocator.cc
|
| +++ b/src/compiler/register-allocator.cc
|
| @@ -33,7 +33,7 @@ int GetRegisterCount(const RegisterConfiguration* cfg, RegisterKind kind) {
|
|
|
| int GetAllocatableRegisterCount(const RegisterConfiguration* cfg,
|
| RegisterKind kind) {
|
| - return kind == FP_REGISTERS ? cfg->num_allocatable_aliased_double_registers()
|
| + return kind == FP_REGISTERS ? cfg->num_allocatable_double_registers()
|
| : cfg->num_allocatable_general_registers();
|
| }
|
|
|
| @@ -64,25 +64,33 @@ Instruction* GetLastInstruction(InstructionSequence* code,
|
| return code->InstructionAt(block->last_instruction_index());
|
| }
|
|
|
| -
|
| -bool IsOutputRegisterOf(Instruction* instr, Register reg) {
|
| +bool IsOutputRegisterOf(Instruction* instr, int code) {
|
| for (size_t i = 0; i < instr->OutputCount(); i++) {
|
| InstructionOperand* output = instr->OutputAt(i);
|
| if (output->IsRegister() &&
|
| - LocationOperand::cast(output)->GetRegister().is(reg)) {
|
| + LocationOperand::cast(output)->register_code() == code) {
|
| return true;
|
| }
|
| }
|
| return false;
|
| }
|
|
|
| -
|
| -bool IsOutputDoubleRegisterOf(Instruction* instr, DoubleRegister reg) {
|
| +bool IsOutputFPRegisterOf(Instruction* instr, MachineRepresentation rep,
|
| + int code) {
|
| for (size_t i = 0; i < instr->OutputCount(); i++) {
|
| InstructionOperand* output = instr->OutputAt(i);
|
| - if (output->IsFPRegister() &&
|
| - LocationOperand::cast(output)->GetDoubleRegister().is(reg)) {
|
| - return true;
|
| + if (output->IsFPRegister()) {
|
| + const LocationOperand* op = LocationOperand::cast(output);
|
| + const RegisterConfiguration* config =
|
| + RegisterConfiguration::ArchDefault(RegisterConfiguration::TURBOFAN);
|
| + if (config->fp_aliasing_kind() != RegisterConfiguration::COMBINE) {
|
| + if (op->register_code() == code) return true;
|
| + } else {
|
| + if (config->AreAliases(op->representation(), op->register_code(), rep,
|
| + code)) {
|
| + return true;
|
| + }
|
| + }
|
| }
|
| }
|
| return false;
|
| @@ -319,11 +327,7 @@ bool UsePosition::HintRegister(int* register_code) const {
|
| case UsePositionHintType::kOperand: {
|
| InstructionOperand* operand =
|
| reinterpret_cast<InstructionOperand*>(hint_);
|
| - int assigned_register =
|
| - operand->IsRegister()
|
| - ? LocationOperand::cast(operand)->GetRegister().code()
|
| - : LocationOperand::cast(operand)->GetDoubleRegister().code();
|
| - *register_code = assigned_register;
|
| + *register_code = LocationOperand::cast(operand)->register_code();
|
| return true;
|
| }
|
| case UsePositionHintType::kPhi: {
|
| @@ -1254,12 +1258,6 @@ SpillRange::SpillRange(TopLevelLiveRange* parent, Zone* zone)
|
| parent->SetSpillRange(this);
|
| }
|
|
|
| -
|
| -int SpillRange::ByteWidth() const {
|
| - return GetByteWidth(live_ranges_[0]->representation());
|
| -}
|
| -
|
| -
|
| bool SpillRange::IsIntersectingWith(SpillRange* other) const {
|
| if (this->use_interval_ == nullptr || other->use_interval_ == nullptr ||
|
| this->End() <= other->use_interval_->start() ||
|
| @@ -1362,7 +1360,6 @@ void RegisterAllocationData::PhiMapValue::CommitAssignment(
|
| }
|
| }
|
|
|
| -
|
| RegisterAllocationData::RegisterAllocationData(
|
| const RegisterConfiguration* config, Zone* zone, Frame* frame,
|
| InstructionSequence* code, const char* debug_name)
|
| @@ -1378,6 +1375,8 @@ RegisterAllocationData::RegisterAllocationData(
|
| allocation_zone()),
|
| fixed_live_ranges_(this->config()->num_general_registers(), nullptr,
|
| allocation_zone()),
|
| + fixed_float_live_ranges_(this->config()->num_float_registers(), nullptr,
|
| + allocation_zone()),
|
| fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr,
|
| allocation_zone()),
|
| spill_ranges_(code->VirtualRegisterCount(), nullptr, allocation_zone()),
|
| @@ -1553,17 +1552,30 @@ SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange(
|
| return spill_range;
|
| }
|
|
|
| -
|
| -void RegisterAllocationData::MarkAllocated(RegisterKind kind, int index) {
|
| - if (kind == FP_REGISTERS) {
|
| - assigned_double_registers_->Add(index);
|
| - } else {
|
| - DCHECK(kind == GENERAL_REGISTERS);
|
| - assigned_registers_->Add(index);
|
| +void RegisterAllocationData::MarkAllocated(MachineRepresentation rep,
|
| + int index) {
|
| + switch (rep) {
|
| + case MachineRepresentation::kFloat32:
|
| + if (config()->fp_aliasing_kind() == RegisterConfiguration::COMBINE) {
|
| + int alias_base_index = -1;
|
| + int aliases = config()->GetAliases(
|
| + rep, index, MachineRepresentation::kFloat64, &alias_base_index);
|
| + while (aliases--) {
|
| + int aliased_reg = alias_base_index + aliases;
|
| + assigned_double_registers_->Add(aliased_reg);
|
| + }
|
| + }
|
| + break;
|
| + case MachineRepresentation::kFloat64:
|
| + assigned_double_registers_->Add(index);
|
| + break;
|
| + default:
|
| + DCHECK(!IsFloatingPoint(rep));
|
| + assigned_registers_->Add(index);
|
| + break;
|
| }
|
| }
|
|
|
| -
|
| bool RegisterAllocationData::IsBlockBoundary(LifetimePosition pos) const {
|
| return pos.IsFullStart() &&
|
| code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() ==
|
| @@ -1877,42 +1889,62 @@ void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block,
|
| }
|
| }
|
|
|
| -
|
| -int LiveRangeBuilder::FixedDoubleLiveRangeID(int index) {
|
| - return -index - 1 - config()->num_general_registers();
|
| +int LiveRangeBuilder::FixedFPLiveRangeID(int index, MachineRepresentation rep) {
|
| + switch (rep) {
|
| + case MachineRepresentation::kFloat32:
|
| + return -index - 1 - config()->num_general_registers();
|
| + case MachineRepresentation::kFloat64:
|
| + return -index - 1 - config()->num_general_registers() -
|
| + config()->num_float_registers();
|
| + default:
|
| + break;
|
| + }
|
| + UNREACHABLE();
|
| + return 0;
|
| }
|
|
|
| -
|
| TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) {
|
| DCHECK(index < config()->num_general_registers());
|
| TopLevelLiveRange* result = data()->fixed_live_ranges()[index];
|
| if (result == nullptr) {
|
| - result = data()->NewLiveRange(FixedLiveRangeID(index),
|
| - InstructionSequence::DefaultRepresentation());
|
| + MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
|
| + result = data()->NewLiveRange(FixedLiveRangeID(index), rep);
|
| DCHECK(result->IsFixed());
|
| result->set_assigned_register(index);
|
| - data()->MarkAllocated(GENERAL_REGISTERS, index);
|
| + data()->MarkAllocated(rep, index);
|
| data()->fixed_live_ranges()[index] = result;
|
| }
|
| return result;
|
| }
|
|
|
| -
|
| -TopLevelLiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) {
|
| - DCHECK(index < config()->num_double_registers());
|
| - TopLevelLiveRange* result = data()->fixed_double_live_ranges()[index];
|
| - if (result == nullptr) {
|
| - result = data()->NewLiveRange(FixedDoubleLiveRangeID(index),
|
| - MachineRepresentation::kFloat64);
|
| - DCHECK(result->IsFixed());
|
| - result->set_assigned_register(index);
|
| - data()->MarkAllocated(FP_REGISTERS, index);
|
| - data()->fixed_double_live_ranges()[index] = result;
|
| +TopLevelLiveRange* LiveRangeBuilder::FixedFPLiveRangeFor(
|
| + int index, MachineRepresentation rep) {
|
| + TopLevelLiveRange* result = nullptr;
|
| + if (rep == MachineRepresentation::kFloat64) {
|
| + DCHECK(index < config()->num_double_registers());
|
| + result = data()->fixed_double_live_ranges()[index];
|
| + if (result == nullptr) {
|
| + result = data()->NewLiveRange(FixedFPLiveRangeID(index, rep), rep);
|
| + DCHECK(result->IsFixed());
|
| + result->set_assigned_register(index);
|
| + data()->MarkAllocated(rep, index);
|
| + data()->fixed_double_live_ranges()[index] = result;
|
| + }
|
| + } else {
|
| + DCHECK(rep == MachineRepresentation::kFloat32);
|
| + DCHECK(index < config()->num_float_registers());
|
| + result = data()->fixed_float_live_ranges()[index];
|
| + if (result == nullptr) {
|
| + result = data()->NewLiveRange(FixedFPLiveRangeID(index, rep), rep);
|
| + DCHECK(result->IsFixed());
|
| + result->set_assigned_register(index);
|
| + data()->MarkAllocated(rep, index);
|
| + data()->fixed_float_live_ranges()[index] = result;
|
| + }
|
| }
|
| return result;
|
| }
|
|
|
| -
|
| TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
|
| if (operand->IsUnallocated()) {
|
| return data()->GetOrCreateLiveRangeFor(
|
| @@ -1924,8 +1956,8 @@ TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
|
| return FixedLiveRangeFor(
|
| LocationOperand::cast(operand)->GetRegister().code());
|
| } else if (operand->IsFPRegister()) {
|
| - return FixedDoubleLiveRangeFor(
|
| - LocationOperand::cast(operand)->GetDoubleRegister().code());
|
| + LocationOperand* op = LocationOperand::cast(operand);
|
| + return FixedFPLiveRangeFor(op->register_code(), op->representation());
|
| } else {
|
| return nullptr;
|
| }
|
| @@ -2021,7 +2053,7 @@ void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
|
| if (instr->ClobbersRegisters()) {
|
| for (int i = 0; i < config()->num_allocatable_general_registers(); ++i) {
|
| int code = config()->GetAllocatableGeneralCode(i);
|
| - if (!IsOutputRegisterOf(instr, Register::from_code(code))) {
|
| + if (!IsOutputRegisterOf(instr, code)) {
|
| TopLevelLiveRange* range = FixedLiveRangeFor(code);
|
| range->AddUseInterval(curr_position, curr_position.End(),
|
| allocation_zone());
|
| @@ -2030,11 +2062,22 @@ void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
|
| }
|
|
|
| if (instr->ClobbersDoubleRegisters()) {
|
| - for (int i = 0; i < config()->num_allocatable_aliased_double_registers();
|
| - ++i) {
|
| + for (int i = 0; i < config()->num_allocatable_double_registers(); ++i) {
|
| int code = config()->GetAllocatableDoubleCode(i);
|
| - if (!IsOutputDoubleRegisterOf(instr, DoubleRegister::from_code(code))) {
|
| - TopLevelLiveRange* range = FixedDoubleLiveRangeFor(code);
|
| + if (!IsOutputFPRegisterOf(instr, MachineRepresentation::kFloat64,
|
| + code)) {
|
| + TopLevelLiveRange* range =
|
| + FixedFPLiveRangeFor(code, MachineRepresentation::kFloat64);
|
| + range->AddUseInterval(curr_position, curr_position.End(),
|
| + allocation_zone());
|
| + }
|
| + }
|
| + for (int i = 0; i < config()->num_allocatable_float_registers(); ++i) {
|
| + int code = config()->GetAllocatableFloatCode(i);
|
| + if (!IsOutputFPRegisterOf(instr, MachineRepresentation::kFloat32,
|
| + code)) {
|
| + TopLevelLiveRange* range =
|
| + FixedFPLiveRangeFor(code, MachineRepresentation::kFloat32);
|
| range->AddUseInterval(curr_position, curr_position.End(),
|
| allocation_zone());
|
| }
|
| @@ -2381,8 +2424,10 @@ RegisterAllocator::RegisterAllocator(RegisterAllocationData* data,
|
| num_allocatable_registers_(
|
| GetAllocatableRegisterCount(data->config(), kind)),
|
| allocatable_register_codes_(
|
| - GetAllocatableRegisterCodes(data->config(), kind)) {}
|
| -
|
| + GetAllocatableRegisterCodes(data->config(), kind)),
|
| + no_combining_(kind != FP_REGISTERS ||
|
| + data->config()->fp_aliasing_kind() !=
|
| + RegisterConfiguration::COMBINE) {}
|
|
|
| LifetimePosition RegisterAllocator::GetSplitPositionForInstruction(
|
| const LiveRange* range, int instruction_index) {
|
| @@ -2552,14 +2597,6 @@ void RegisterAllocator::Spill(LiveRange* range) {
|
| range->Spill();
|
| }
|
|
|
| -
|
| -const ZoneVector<TopLevelLiveRange*>& RegisterAllocator::GetFixedRegisters()
|
| - const {
|
| - return mode() == FP_REGISTERS ? data()->fixed_double_live_ranges()
|
| - : data()->fixed_live_ranges();
|
| -}
|
| -
|
| -
|
| const char* RegisterAllocator::RegisterName(int register_code) const {
|
| if (mode() == GENERAL_REGISTERS) {
|
| return data()->config()->GetGeneralRegisterName(register_code);
|
| @@ -2606,11 +2643,16 @@ void LinearScanAllocator::AllocateRegisters() {
|
| SortUnhandled();
|
| DCHECK(UnhandledIsSorted());
|
|
|
| - auto& fixed_ranges = GetFixedRegisters();
|
| - for (TopLevelLiveRange* current : fixed_ranges) {
|
| - if (current != nullptr) {
|
| - DCHECK_EQ(mode(), current->kind());
|
| - AddToInactive(current);
|
| + if (mode() == GENERAL_REGISTERS) {
|
| + for (TopLevelLiveRange* current : data()->fixed_live_ranges()) {
|
| + if (current != nullptr) AddToInactive(current);
|
| + }
|
| + } else {
|
| + for (TopLevelLiveRange* current : data()->fixed_float_live_ranges()) {
|
| + if (current != nullptr) AddToInactive(current);
|
| + }
|
| + for (TopLevelLiveRange* current : data()->fixed_double_live_ranges()) {
|
| + if (current != nullptr) AddToInactive(current);
|
| }
|
| }
|
|
|
| @@ -2664,7 +2706,7 @@ void LinearScanAllocator::AllocateRegisters() {
|
|
|
| void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
|
| int reg) {
|
| - data()->MarkAllocated(range->kind(), reg);
|
| + data()->MarkAllocated(range->representation(), reg);
|
| range->set_assigned_register(reg);
|
| range->SetUseHints(reg);
|
| if (range->IsTopLevel() && range->TopLevel()->is_phi()) {
|
| @@ -2778,18 +2820,37 @@ void LinearScanAllocator::InactiveToActive(LiveRange* range) {
|
|
|
|
|
| bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) {
|
| + int num_regs = num_registers();
|
| + int num_codes = num_allocatable_registers();
|
| + const int* codes = allocatable_register_codes();
|
| + if (!no_combining() &&
|
| + (current->representation() == MachineRepresentation::kFloat32)) {
|
| + num_regs = data()->config()->num_float_registers();
|
| + num_codes = data()->config()->num_allocatable_float_registers();
|
| + codes = data()->config()->allocatable_float_codes();
|
| + }
|
| LifetimePosition free_until_pos[RegisterConfiguration::kMaxFPRegisters];
|
| -
|
| - for (int i = 0; i < num_registers(); i++) {
|
| + for (int i = 0; i < num_regs; i++) {
|
| free_until_pos[i] = LifetimePosition::MaxPosition();
|
| }
|
|
|
| for (LiveRange* cur_active : active_live_ranges()) {
|
| - free_until_pos[cur_active->assigned_register()] =
|
| - LifetimePosition::GapFromInstructionIndex(0);
|
| - TRACE("Register %s is free until pos %d (1)\n",
|
| - RegisterName(cur_active->assigned_register()),
|
| - LifetimePosition::GapFromInstructionIndex(0).value());
|
| + int cur_reg = cur_active->assigned_register();
|
| + if (no_combining()) {
|
| + free_until_pos[cur_reg] = LifetimePosition::GapFromInstructionIndex(0);
|
| + TRACE("Register %s is free until pos %d (1)\n", RegisterName(cur_reg),
|
| + LifetimePosition::GapFromInstructionIndex(0).value());
|
| + } else {
|
| + int alias_base_index = -1;
|
| + int aliases = data()->config()->GetAliases(
|
| + cur_active->representation(), cur_reg, current->representation(),
|
| + &alias_base_index);
|
| + while (aliases--) {
|
| + int aliased_reg = alias_base_index + aliases;
|
| + free_until_pos[aliased_reg] =
|
| + LifetimePosition::GapFromInstructionIndex(0);
|
| + }
|
| + }
|
| }
|
|
|
| for (LiveRange* cur_inactive : inactive_live_ranges()) {
|
| @@ -2798,9 +2859,21 @@ bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) {
|
| cur_inactive->FirstIntersection(current);
|
| if (!next_intersection.IsValid()) continue;
|
| int cur_reg = cur_inactive->assigned_register();
|
| - free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
|
| - TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg),
|
| - Min(free_until_pos[cur_reg], next_intersection).value());
|
| + if (no_combining()) {
|
| + free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
|
| + TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg),
|
| + Min(free_until_pos[cur_reg], next_intersection).value());
|
| + } else {
|
| + int alias_base_index = -1;
|
| + int aliases = data()->config()->GetAliases(
|
| + cur_inactive->representation(), cur_reg, current->representation(),
|
| + &alias_base_index);
|
| + while (aliases--) {
|
| + int aliased_reg = alias_base_index + aliases;
|
| + free_until_pos[aliased_reg] =
|
| + Min(free_until_pos[aliased_reg], next_intersection);
|
| + }
|
| + }
|
| }
|
|
|
| int hint_register;
|
| @@ -2822,9 +2895,9 @@ bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) {
|
| }
|
|
|
| // Find the register which stays free for the longest time.
|
| - int reg = allocatable_register_code(0);
|
| - for (int i = 1; i < num_allocatable_registers(); ++i) {
|
| - int code = allocatable_register_code(i);
|
| + int reg = codes[0];
|
| + for (int i = 1; i < num_codes; ++i) {
|
| + int code = codes[i];
|
| if (free_until_pos[code] > free_until_pos[reg]) {
|
| reg = code;
|
| }
|
| @@ -2844,8 +2917,8 @@ bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) {
|
| AddToUnhandledSorted(tail);
|
| }
|
|
|
| - // Register reg is available at the range start and is free until
|
| - // the range end.
|
| + // Register reg is available at the range start and is free until the range
|
| + // end.
|
| DCHECK(pos >= current->End());
|
| TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg),
|
| current->TopLevel()->vreg(), current->relative_id());
|
| @@ -2864,26 +2937,58 @@ void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
|
| return;
|
| }
|
|
|
| + int num_regs = num_registers();
|
| + int num_codes = num_allocatable_registers();
|
| + const int* codes = allocatable_register_codes();
|
| + if (!no_combining() &&
|
| + (current->representation() == MachineRepresentation::kFloat32)) {
|
| + num_regs = data()->config()->num_float_registers();
|
| + num_codes = data()->config()->num_allocatable_float_registers();
|
| + codes = data()->config()->allocatable_float_codes();
|
| + }
|
| +
|
| LifetimePosition use_pos[RegisterConfiguration::kMaxFPRegisters];
|
| LifetimePosition block_pos[RegisterConfiguration::kMaxFPRegisters];
|
| -
|
| - for (int i = 0; i < num_registers(); i++) {
|
| + for (int i = 0; i < num_regs; i++) {
|
| use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
|
| }
|
|
|
| for (LiveRange* range : active_live_ranges()) {
|
| int cur_reg = range->assigned_register();
|
| - if (range->TopLevel()->IsFixed() ||
|
| - !range->CanBeSpilled(current->Start())) {
|
| - block_pos[cur_reg] = use_pos[cur_reg] =
|
| - LifetimePosition::GapFromInstructionIndex(0);
|
| - } else {
|
| - UsePosition* next_use =
|
| - range->NextUsePositionRegisterIsBeneficial(current->Start());
|
| - if (next_use == nullptr) {
|
| - use_pos[cur_reg] = range->End();
|
| + bool is_fixed_or_cant_spill =
|
| + range->TopLevel()->IsFixed() || !range->CanBeSpilled(current->Start());
|
| + if (no_combining()) {
|
| + if (is_fixed_or_cant_spill) {
|
| + block_pos[cur_reg] = use_pos[cur_reg] =
|
| + LifetimePosition::GapFromInstructionIndex(0);
|
| } else {
|
| - use_pos[cur_reg] = next_use->pos();
|
| + UsePosition* next_use =
|
| + range->NextUsePositionRegisterIsBeneficial(current->Start());
|
| + if (next_use == nullptr) {
|
| + use_pos[cur_reg] = range->End();
|
| + } else {
|
| + use_pos[cur_reg] = next_use->pos();
|
| + }
|
| + }
|
| + } else {
|
| + int alias_base_index = -1;
|
| + int aliases = data()->config()->GetAliases(
|
| + range->representation(), cur_reg, current->representation(),
|
| + &alias_base_index);
|
| + while (aliases--) {
|
| + int aliased_reg = alias_base_index + aliases;
|
| + if (is_fixed_or_cant_spill) {
|
| + block_pos[aliased_reg] = use_pos[aliased_reg] =
|
| + LifetimePosition::GapFromInstructionIndex(0);
|
| + } else {
|
| + UsePosition* next_use =
|
| + range->NextUsePositionRegisterIsBeneficial(current->Start());
|
| + if (next_use == nullptr) {
|
| + use_pos[aliased_reg] = range->End();
|
| + } else {
|
| + use_pos[aliased_reg] = next_use->pos();
|
| + }
|
| + }
|
| }
|
| }
|
| }
|
| @@ -2893,17 +2998,36 @@ void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
|
| LifetimePosition next_intersection = range->FirstIntersection(current);
|
| if (!next_intersection.IsValid()) continue;
|
| int cur_reg = range->assigned_register();
|
| - if (range->TopLevel()->IsFixed()) {
|
| - block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
|
| - use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
|
| + bool is_fixed = range->TopLevel()->IsFixed();
|
| + if (no_combining()) {
|
| + if (is_fixed) {
|
| + block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
|
| + use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
|
| + } else {
|
| + use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
|
| + }
|
| } else {
|
| - use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
|
| + int alias_base_index = -1;
|
| + int aliases = data()->config()->GetAliases(
|
| + range->representation(), cur_reg, current->representation(),
|
| + &alias_base_index);
|
| + while (aliases--) {
|
| + int aliased_reg = alias_base_index + aliases;
|
| + if (is_fixed) {
|
| + block_pos[aliased_reg] =
|
| + Min(block_pos[aliased_reg], next_intersection);
|
| + use_pos[aliased_reg] =
|
| + Min(block_pos[aliased_reg], use_pos[aliased_reg]);
|
| + } else {
|
| + use_pos[aliased_reg] = Min(use_pos[aliased_reg], next_intersection);
|
| + }
|
| + }
|
| }
|
| }
|
|
|
| - int reg = allocatable_register_code(0);
|
| - for (int i = 1; i < num_allocatable_registers(); ++i) {
|
| - int code = allocatable_register_code(i);
|
| + int reg = codes[0];
|
| + for (int i = 1; i < num_codes; ++i) {
|
| + int code = codes[i];
|
| if (use_pos[code] > use_pos[reg]) {
|
| reg = code;
|
| }
|
| @@ -2949,45 +3073,61 @@ void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) {
|
| LifetimePosition split_pos = current->Start();
|
| for (size_t i = 0; i < active_live_ranges().size(); ++i) {
|
| LiveRange* range = active_live_ranges()[i];
|
| - if (range->assigned_register() == reg) {
|
| - UsePosition* next_pos = range->NextRegisterPosition(current->Start());
|
| - LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
|
| - if (next_pos == nullptr) {
|
| - SpillAfter(range, spill_pos);
|
| - } else {
|
| - // When spilling between spill_pos and next_pos ensure that the range
|
| - // remains spilled at least until the start of the current live range.
|
| - // This guarantees that we will not introduce new unhandled ranges that
|
| - // start before the current range as this violates allocation invariant
|
| - // and will lead to an inconsistent state of active and inactive
|
| - // live-ranges: ranges are allocated in order of their start positions,
|
| - // ranges are retired from active/inactive when the start of the
|
| - // current live-range is larger than their end.
|
| - DCHECK(LifetimePosition::ExistsGapPositionBetween(current->Start(),
|
| - next_pos->pos()));
|
| - SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
|
| + if (no_combining()) {
|
| + if (range->assigned_register() != reg) continue;
|
| + } else {
|
| + if (!data()->config()->AreAliases(current->representation(), reg,
|
| + range->representation(),
|
| + range->assigned_register())) {
|
| + continue;
|
| }
|
| - ActiveToHandled(range);
|
| - --i;
|
| }
|
| +
|
| + UsePosition* next_pos = range->NextRegisterPosition(current->Start());
|
| + LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
|
| + if (next_pos == nullptr) {
|
| + SpillAfter(range, spill_pos);
|
| + } else {
|
| + // When spilling between spill_pos and next_pos ensure that the range
|
| + // remains spilled at least until the start of the current live range.
|
| + // This guarantees that we will not introduce new unhandled ranges that
|
| + // start before the current range as this violates allocation invariants
|
| + // and will lead to an inconsistent state of active and inactive
|
| + // live-ranges: ranges are allocated in order of their start positions,
|
| + // ranges are retired from active/inactive when the start of the
|
| + // current live-range is larger than their end.
|
| + DCHECK(LifetimePosition::ExistsGapPositionBetween(current->Start(),
|
| + next_pos->pos()));
|
| + SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
|
| + }
|
| + ActiveToHandled(range);
|
| + --i;
|
| }
|
|
|
| for (size_t i = 0; i < inactive_live_ranges().size(); ++i) {
|
| LiveRange* range = inactive_live_ranges()[i];
|
| DCHECK(range->End() > current->Start());
|
| - if (range->assigned_register() == reg && !range->TopLevel()->IsFixed()) {
|
| - LifetimePosition next_intersection = range->FirstIntersection(current);
|
| - if (next_intersection.IsValid()) {
|
| - UsePosition* next_pos = range->NextRegisterPosition(current->Start());
|
| - if (next_pos == nullptr) {
|
| - SpillAfter(range, split_pos);
|
| - } else {
|
| - next_intersection = Min(next_intersection, next_pos->pos());
|
| - SpillBetween(range, split_pos, next_intersection);
|
| - }
|
| - InactiveToHandled(range);
|
| - --i;
|
| + if (range->TopLevel()->IsFixed()) continue;
|
| + if (no_combining()) {
|
| + if (range->assigned_register() != reg) continue;
|
| + } else {
|
| + if (!data()->config()->AreAliases(current->representation(), reg,
|
| + range->representation(),
|
| + range->assigned_register()))
|
| + continue;
|
| + }
|
| +
|
| + LifetimePosition next_intersection = range->FirstIntersection(current);
|
| + if (next_intersection.IsValid()) {
|
| + UsePosition* next_pos = range->NextRegisterPosition(current->Start());
|
| + if (next_pos == nullptr) {
|
| + SpillAfter(range, split_pos);
|
| + } else {
|
| + next_intersection = Min(next_intersection, next_pos->pos());
|
| + SpillBetween(range, split_pos, next_intersection);
|
| }
|
| + InactiveToHandled(range);
|
| + --i;
|
| }
|
| }
|
| }
|
| @@ -3167,8 +3307,7 @@ void OperandAssigner::AssignSpillSlots() {
|
| if (range == nullptr || range->IsEmpty()) continue;
|
| // Allocate a new operand referring to the spill slot.
|
| if (!range->HasSlot()) {
|
| - int byte_width = range->ByteWidth();
|
| - int index = data()->frame()->AllocateSpillSlot(byte_width);
|
| + int index = data()->frame()->AllocateSpillSlot(range->byte_width());
|
| range->set_assigned_slot(index);
|
| }
|
| }
|
|
|