| Index: src/compiler/register-allocator.cc
|
| diff --git a/src/compiler/register-allocator.cc b/src/compiler/register-allocator.cc
|
| index 49229d5872cd22b2aac4aaaed8392163f26e5776..9badf7931cdb1696baefec2730968aab59245673 100644
|
| --- a/src/compiler/register-allocator.cc
|
| +++ b/src/compiler/register-allocator.cc
|
| @@ -89,6 +89,27 @@ bool IsOutputDoubleRegisterOf(Instruction* instr, int index) {
|
| return false;
|
| }
|
|
|
| +
|
| +// TODO(dcarney): fix frame to allow frame accesses to half size location.
|
| +int GetByteWidth(MachineType machine_type) {
|
| + DCHECK_EQ(RepresentationOf(machine_type), machine_type);
|
| + switch (machine_type) {
|
| + case kRepBit:
|
| + case kRepWord8:
|
| + case kRepWord16:
|
| + case kRepWord32:
|
| + case kRepTagged:
|
| + return kPointerSize;
|
| + case kRepFloat32:
|
| + case kRepWord64:
|
| + case kRepFloat64:
|
| + return 8;
|
| + default:
|
| + UNREACHABLE();
|
| + return 0;
|
| + }
|
| +}
|
| +
|
| } // namespace
|
|
|
|
|
| @@ -214,7 +235,7 @@ struct LiveRange::SpillAtDefinitionList : ZoneObject {
|
| };
|
|
|
|
|
| -LiveRange::LiveRange(int id)
|
| +LiveRange::LiveRange(int id, MachineType machine_type)
|
| : id_(id),
|
| spill_start_index_(kMaxInt),
|
| bits_(0),
|
| @@ -228,8 +249,10 @@ LiveRange::LiveRange(int id)
|
| current_interval_(nullptr),
|
| last_processed_use_(nullptr),
|
| current_hint_position_(nullptr) {
|
| + DCHECK(AllocatedOperand::IsSupportedMachineType(machine_type));
|
| bits_ = SpillTypeField::encode(SpillType::kNoSpillType) |
|
| - AssignedRegisterField::encode(kUnassignedRegister);
|
| + AssignedRegisterField::encode(kUnassignedRegister) |
|
| + MachineTypeField::encode(machine_type);
|
| }
|
|
|
|
|
| @@ -268,6 +291,18 @@ void LiveRange::Spill() {
|
| }
|
|
|
|
|
| +RegisterKind LiveRange::kind() const {
|
| + switch (RepresentationOf(machine_type())) {
|
| + case kRepFloat32:
|
| + case kRepFloat64:
|
| + return DOUBLE_REGISTERS;
|
| + default:
|
| + break;
|
| + }
|
| + return GENERAL_REGISTERS;
|
| +}
|
| +
|
| +
|
| void LiveRange::SpillAtDefinition(Zone* zone, int gap_index,
|
| InstructionOperand* operand) {
|
| DCHECK(HasNoSpillType());
|
| @@ -277,9 +312,9 @@ void LiveRange::SpillAtDefinition(Zone* zone, int gap_index,
|
|
|
|
|
| void LiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence,
|
| - InstructionOperand* op,
|
| + const InstructionOperand& op,
|
| bool might_be_duplicated) {
|
| - DCHECK_IMPLIES(op->IsConstant(), spills_at_definition_ == nullptr);
|
| + DCHECK_IMPLIES(op.IsConstant(), spills_at_definition_ == nullptr);
|
| DCHECK(!IsChild());
|
| auto zone = sequence->zone();
|
| for (auto to_spill = spills_at_definition_; to_spill != nullptr;
|
| @@ -292,15 +327,15 @@ void LiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence,
|
| bool found = false;
|
| for (auto move_op : *move) {
|
| if (move_op->IsEliminated()) continue;
|
| - if (move_op->source() == *to_spill->operand &&
|
| - move_op->destination() == *op) {
|
| + if (move_op->source().Equals(*to_spill->operand) &&
|
| + move_op->destination().Equals(op)) {
|
| found = true;
|
| break;
|
| }
|
| }
|
| if (found) continue;
|
| }
|
| - move->AddMove(*to_spill->operand, *op);
|
| + move->AddMove(*to_spill->operand, op);
|
| }
|
| }
|
|
|
| @@ -329,14 +364,6 @@ void LiveRange::SetSpillRange(SpillRange* spill_range) {
|
| }
|
|
|
|
|
| -void LiveRange::CommitSpillOperand(AllocatedOperand* operand) {
|
| - DCHECK(HasSpillRange());
|
| - DCHECK(!IsChild());
|
| - set_spill_type(SpillType::kSpillOperand);
|
| - spill_operand_ = operand;
|
| -}
|
| -
|
| -
|
| UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const {
|
| UsePosition* use_pos = last_processed_use_;
|
| if (use_pos == nullptr || use_pos->pos() > start) {
|
| @@ -395,18 +422,33 @@ InstructionOperand LiveRange::GetAssignedOperand() const {
|
| DCHECK(!spilled());
|
| switch (kind()) {
|
| case GENERAL_REGISTERS:
|
| - return RegisterOperand(assigned_register());
|
| + return RegisterOperand(machine_type(), assigned_register());
|
| case DOUBLE_REGISTERS:
|
| - return DoubleRegisterOperand(assigned_register());
|
| - default:
|
| - UNREACHABLE();
|
| + return DoubleRegisterOperand(machine_type(), assigned_register());
|
| }
|
| }
|
| DCHECK(spilled());
|
| DCHECK(!HasRegisterAssigned());
|
| - auto op = TopLevel()->GetSpillOperand();
|
| - DCHECK(!op->IsUnallocated());
|
| - return *op;
|
| + if (TopLevel()->HasSpillOperand()) {
|
| + auto op = TopLevel()->GetSpillOperand();
|
| + DCHECK(!op->IsUnallocated());
|
| + return *op;
|
| + }
|
| + return TopLevel()->GetSpillRangeOperand();
|
| +}
|
| +
|
| +
|
| +AllocatedOperand LiveRange::GetSpillRangeOperand() const {
|
| + auto spill_range = GetSpillRange();
|
| + int index = spill_range->assigned_slot();
|
| + switch (kind()) {
|
| + case GENERAL_REGISTERS:
|
| + return StackSlotOperand(machine_type(), index);
|
| + case DOUBLE_REGISTERS:
|
| + return DoubleStackSlotOperand(machine_type(), index);
|
| + }
|
| + UNREACHABLE();
|
| + return StackSlotOperand(kMachNone, 0);
|
| }
|
|
|
|
|
| @@ -512,7 +554,6 @@ void LiveRange::SplitAt(LifetimePosition position, LiveRange* result,
|
| // Link the new live range in the chain before any of the other
|
| // ranges linked from the range before the split.
|
| result->parent_ = (parent_ == nullptr) ? this : parent_;
|
| - result->set_kind(result->parent_->kind());
|
| result->next_ = next_;
|
| next_ = result;
|
|
|
| @@ -626,15 +667,14 @@ void LiveRange::AddUsePosition(UsePosition* use_pos) {
|
|
|
|
|
| void LiveRange::ConvertUsesToOperand(const InstructionOperand& op,
|
| - InstructionOperand* spill_op) {
|
| + const InstructionOperand& spill_op) {
|
| for (auto pos = first_pos(); pos != nullptr; pos = pos->next()) {
|
| DCHECK(Start() <= pos->pos() && pos->pos() <= End());
|
| if (!pos->HasOperand()) continue;
|
| switch (pos->type()) {
|
| case UsePositionType::kRequiresSlot:
|
| - if (spill_op != nullptr) {
|
| - InstructionOperand::ReplaceWith(pos->operand(), spill_op);
|
| - }
|
| + DCHECK(spill_op.IsStackSlot() || spill_op.IsDoubleStackSlot());
|
| + InstructionOperand::ReplaceWith(pos->operand(), &spill_op);
|
| break;
|
| case UsePositionType::kRequiresRegister:
|
| DCHECK(op.IsRegister() || op.IsDoubleRegister());
|
| @@ -726,7 +766,8 @@ static bool AreUseIntervalsIntersecting(UseInterval* interval1,
|
| }
|
|
|
|
|
| -SpillRange::SpillRange(LiveRange* parent, Zone* zone) : live_ranges_(zone) {
|
| +SpillRange::SpillRange(LiveRange* parent, Zone* zone)
|
| + : live_ranges_(zone), assigned_slot_(kUnassignedSlot) {
|
| DCHECK(!parent->IsChild());
|
| UseInterval* result = nullptr;
|
| UseInterval* node = nullptr;
|
| @@ -752,6 +793,11 @@ SpillRange::SpillRange(LiveRange* parent, Zone* zone) : live_ranges_(zone) {
|
| }
|
|
|
|
|
| +int SpillRange::ByteWidth() const {
|
| + return GetByteWidth(live_ranges_[0]->machine_type());
|
| +}
|
| +
|
| +
|
| bool SpillRange::IsIntersectingWith(SpillRange* other) const {
|
| if (this->use_interval_ == nullptr || other->use_interval_ == nullptr ||
|
| this->End() <= other->use_interval_->start() ||
|
| @@ -763,7 +809,11 @@ bool SpillRange::IsIntersectingWith(SpillRange* other) const {
|
|
|
|
|
| bool SpillRange::TryMerge(SpillRange* other) {
|
| - if (kind() != other->kind() || IsIntersectingWith(other)) return false;
|
| + // TODO(dcarney): byte widths should be compared here not kinds.
|
| + if (live_ranges_[0]->kind() != other->live_ranges_[0]->kind() ||
|
| + IsIntersectingWith(other)) {
|
| + return false;
|
| + }
|
|
|
| auto max = LifetimePosition::MaxPosition();
|
| if (End() < other->End() && other->End() != max) {
|
| @@ -787,14 +837,6 @@ bool SpillRange::TryMerge(SpillRange* other) {
|
| }
|
|
|
|
|
| -void SpillRange::SetOperand(AllocatedOperand* op) {
|
| - for (auto range : live_ranges()) {
|
| - DCHECK(range->GetSpillRange() == this);
|
| - range->CommitSpillOperand(op);
|
| - }
|
| -}
|
| -
|
| -
|
| void SpillRange::MergeDisjointIntervals(UseInterval* other) {
|
| UseInterval* tail = nullptr;
|
| auto current = use_interval_;
|
| @@ -861,7 +903,8 @@ RegisterAllocationData::RegisterAllocationData(
|
| allocation_zone()),
|
| spill_ranges_(allocation_zone()),
|
| assigned_registers_(nullptr),
|
| - assigned_double_registers_(nullptr) {
|
| + assigned_double_registers_(nullptr),
|
| + virtual_register_count_(code->VirtualRegisterCount()) {
|
| DCHECK(this->config()->num_general_registers() <=
|
| RegisterConfiguration::kMaxGeneralRegisters);
|
| DCHECK(this->config()->num_double_registers() <=
|
| @@ -876,30 +919,49 @@ RegisterAllocationData::RegisterAllocationData(
|
| }
|
|
|
|
|
| +MoveOperands* RegisterAllocationData::AddGapMove(
|
| + int index, Instruction::GapPosition position,
|
| + const InstructionOperand& from, const InstructionOperand& to) {
|
| + auto instr = code()->InstructionAt(index);
|
| + auto moves = instr->GetOrCreateParallelMove(position, code_zone());
|
| + return moves->AddMove(from, to);
|
| +}
|
| +
|
| +
|
| +MachineType RegisterAllocationData::MachineTypeFor(int virtual_register) {
|
| + DCHECK_LT(virtual_register, code()->VirtualRegisterCount());
|
| + return code()->GetRepresentation(virtual_register);
|
| +}
|
| +
|
| +
|
| LiveRange* RegisterAllocationData::LiveRangeFor(int index) {
|
| if (index >= static_cast<int>(live_ranges().size())) {
|
| live_ranges().resize(index + 1, nullptr);
|
| }
|
| auto result = live_ranges()[index];
|
| if (result == nullptr) {
|
| - result = NewLiveRange(index);
|
| + result = NewLiveRange(index, MachineTypeFor(index));
|
| live_ranges()[index] = result;
|
| }
|
| return result;
|
| }
|
|
|
|
|
| -MoveOperands* RegisterAllocationData::AddGapMove(
|
| - int index, Instruction::GapPosition position,
|
| - const InstructionOperand& from, const InstructionOperand& to) {
|
| - auto instr = code()->InstructionAt(index);
|
| - auto moves = instr->GetOrCreateParallelMove(position, code_zone());
|
| - return moves->AddMove(from, to);
|
| +LiveRange* RegisterAllocationData::NewLiveRange(int index,
|
| + MachineType machine_type) {
|
| + return new (allocation_zone()) LiveRange(index, machine_type);
|
| }
|
|
|
|
|
| -LiveRange* RegisterAllocationData::NewLiveRange(int index) {
|
| - return new (allocation_zone()) LiveRange(index);
|
| +LiveRange* RegisterAllocationData::NewChildRangeFor(LiveRange* range) {
|
| + int vreg = virtual_register_count_++;
|
| + if (vreg >= static_cast<int>(live_ranges().size())) {
|
| + live_ranges().resize(vreg + 1, nullptr);
|
| + }
|
| + auto child = new (allocation_zone()) LiveRange(vreg, range->machine_type());
|
| + DCHECK_NULL(live_ranges()[vreg]);
|
| + live_ranges()[vreg] = child;
|
| + return child;
|
| }
|
|
|
|
|
| @@ -972,15 +1034,21 @@ InstructionOperand* ConstraintBuilder::AllocateFixed(
|
| TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
|
| DCHECK(operand->HasFixedPolicy());
|
| InstructionOperand allocated;
|
| + MachineType machine_type = InstructionSequence::DefaultRepresentation();
|
| + int virtual_register = operand->virtual_register();
|
| + if (virtual_register != InstructionOperand::kInvalidVirtualRegister) {
|
| + machine_type = data()->MachineTypeFor(virtual_register);
|
| + }
|
| if (operand->HasFixedSlotPolicy()) {
|
| - allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT,
|
| + allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT, machine_type,
|
| operand->fixed_slot_index());
|
| } else if (operand->HasFixedRegisterPolicy()) {
|
| - allocated = AllocatedOperand(AllocatedOperand::REGISTER,
|
| + allocated = AllocatedOperand(AllocatedOperand::REGISTER, machine_type,
|
| operand->fixed_register_index());
|
| } else if (operand->HasFixedDoubleRegisterPolicy()) {
|
| + DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register);
|
| allocated = AllocatedOperand(AllocatedOperand::DOUBLE_REGISTER,
|
| - operand->fixed_register_index());
|
| + machine_type, operand->fixed_register_index());
|
| } else {
|
| UNREACHABLE();
|
| }
|
| @@ -1248,9 +1316,9 @@ LiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) {
|
| DCHECK(index < config()->num_general_registers());
|
| auto result = data()->fixed_live_ranges()[index];
|
| if (result == nullptr) {
|
| - result = data()->NewLiveRange(FixedLiveRangeID(index));
|
| + result = data()->NewLiveRange(FixedLiveRangeID(index),
|
| + InstructionSequence::DefaultRepresentation());
|
| DCHECK(result->IsFixed());
|
| - result->set_kind(GENERAL_REGISTERS);
|
| result->set_assigned_register(index);
|
| data()->MarkAllocated(GENERAL_REGISTERS, index);
|
| data()->fixed_live_ranges()[index] = result;
|
| @@ -1263,9 +1331,8 @@ LiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) {
|
| DCHECK(index < config()->num_aliased_double_registers());
|
| auto result = data()->fixed_double_live_ranges()[index];
|
| if (result == nullptr) {
|
| - result = data()->NewLiveRange(FixedDoubleLiveRangeID(index));
|
| + result = data()->NewLiveRange(FixedDoubleLiveRangeID(index), kRepFloat64);
|
| DCHECK(result->IsFixed());
|
| - result->set_kind(DOUBLE_REGISTERS);
|
| result->set_assigned_register(index);
|
| data()->MarkAllocated(DOUBLE_REGISTERS, index);
|
| data()->fixed_double_live_ranges()[index] = result;
|
| @@ -1565,7 +1632,6 @@ void LiveRangeBuilder::BuildLiveRanges() {
|
| // Postprocess the ranges.
|
| for (auto range : data()->live_ranges()) {
|
| if (range == nullptr) continue;
|
| - range->set_kind(RequiredRegisterKind(range->id()));
|
| // Give slots to all ranges with a non fixed slot use.
|
| if (range->has_slot_use() && range->HasNoSpillType()) {
|
| data()->AssignSpillRangeToLiveRange(range);
|
| @@ -1610,13 +1676,6 @@ void LiveRangeBuilder::ResolvePhiHint(InstructionOperand* operand,
|
| }
|
|
|
|
|
| -RegisterKind LiveRangeBuilder::RequiredRegisterKind(
|
| - int virtual_register) const {
|
| - return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS
|
| - : GENERAL_REGISTERS;
|
| -}
|
| -
|
| -
|
| void LiveRangeBuilder::Verify() const {
|
| for (auto& hint : phi_hints_) {
|
| CHECK(hint.second->IsResolved());
|
| @@ -1647,8 +1706,7 @@ LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
|
| (GetInstructionBlock(code(), pos)->last_instruction_index() !=
|
| pos.ToInstructionIndex()));
|
|
|
| - int vreg = code()->NextVirtualRegister();
|
| - auto result = LiveRangeFor(vreg);
|
| + auto result = data()->NewChildRangeFor(range);
|
| range->SplitAt(pos, result, allocation_zone());
|
| return result;
|
| }
|
| @@ -2652,13 +2710,9 @@ void OperandAssigner::AssignSpillSlots() {
|
| for (auto range : spill_ranges) {
|
| if (range->IsEmpty()) continue;
|
| // Allocate a new operand referring to the spill slot.
|
| - auto kind = range->kind();
|
| - int index = data()->frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS);
|
| - auto op_kind = kind == DOUBLE_REGISTERS
|
| - ? AllocatedOperand::DOUBLE_STACK_SLOT
|
| - : AllocatedOperand::STACK_SLOT;
|
| - auto op = AllocatedOperand::New(data()->code_zone(), op_kind, index);
|
| - range->SetOperand(op);
|
| + int byte_width = range->ByteWidth();
|
| + int index = data()->frame()->AllocateSpillSlot(byte_width);
|
| + range->set_assigned_slot(index);
|
| }
|
| }
|
|
|
| @@ -2666,16 +2720,18 @@ void OperandAssigner::AssignSpillSlots() {
|
| void OperandAssigner::CommitAssignment() {
|
| for (auto range : data()->live_ranges()) {
|
| if (range == nullptr || range->IsEmpty()) continue;
|
| - InstructionOperand* spill_operand = nullptr;
|
| - if (!range->TopLevel()->HasNoSpillType()) {
|
| - spill_operand = range->TopLevel()->GetSpillOperand();
|
| + InstructionOperand spill_operand;
|
| + if (range->TopLevel()->HasSpillOperand()) {
|
| + spill_operand = *range->TopLevel()->GetSpillOperand();
|
| + } else if (range->TopLevel()->HasSpillRange()) {
|
| + spill_operand = range->TopLevel()->GetSpillRangeOperand();
|
| }
|
| auto assigned = range->GetAssignedOperand();
|
| range->ConvertUsesToOperand(assigned, spill_operand);
|
| if (range->is_phi()) {
|
| data()->GetPhiMapValueFor(range->id())->CommitAssignment(assigned);
|
| }
|
| - if (!range->IsChild() && spill_operand != nullptr) {
|
| + if (!range->IsChild() && !spill_operand.IsInvalid()) {
|
| range->CommitSpillsAtDefinition(data()->code(), spill_operand,
|
| range->has_slot_use());
|
| }
|
| @@ -2756,12 +2812,21 @@ void ReferenceMapPopulator::PopulateReferenceMaps() {
|
|
|
| // Check if the live range is spilled and the safe point is after
|
| // the spill position.
|
| - if (range->HasSpillOperand() &&
|
| - safe_point >= range->spill_start_index() &&
|
| - !range->GetSpillOperand()->IsConstant()) {
|
| + if (((range->HasSpillOperand() &&
|
| + !range->GetSpillOperand()->IsConstant()) ||
|
| + range->HasSpillRange()) &&
|
| + safe_point >= range->spill_start_index()) {
|
| TRACE("Pointer for range %d (spilled at %d) at safe point %d\n",
|
| range->id(), range->spill_start_index(), safe_point);
|
| - map->RecordReference(*range->GetSpillOperand());
|
| + InstructionOperand operand;
|
| + if (range->HasSpillOperand()) {
|
| + operand = *range->GetSpillOperand();
|
| + } else {
|
| + operand = range->GetSpillRangeOperand();
|
| + }
|
| + DCHECK(operand.IsStackSlot());
|
| + DCHECK_EQ(kRepTagged, AllocatedOperand::cast(operand).machine_type());
|
| + map->RecordReference(operand);
|
| }
|
|
|
| if (!cur->spilled()) {
|
| @@ -2771,6 +2836,7 @@ void ReferenceMapPopulator::PopulateReferenceMaps() {
|
| cur->id(), cur->Start().value(), safe_point);
|
| auto operand = cur->GetAssignedOperand();
|
| DCHECK(!operand.IsStackSlot());
|
| + DCHECK_EQ(kRepTagged, AllocatedOperand::cast(operand).machine_type());
|
| map->RecordReference(operand);
|
| }
|
| }
|
| @@ -2909,6 +2975,24 @@ class LiveRangeFinder {
|
| DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
|
| };
|
|
|
| +
|
| +typedef std::pair<ParallelMove*, InstructionOperand> DelayedInsertionMapKey;
|
| +
|
| +
|
| +struct DelayedInsertionMapCompare {
|
| + bool operator()(const DelayedInsertionMapKey& a,
|
| + const DelayedInsertionMapKey& b) {
|
| + if (a.first == b.first) {
|
| + return a.second.Compare(b.second);
|
| + }
|
| + return a.first < b.first;
|
| + }
|
| +};
|
| +
|
| +
|
| +typedef ZoneMap<DelayedInsertionMapKey, InstructionOperand,
|
| + DelayedInsertionMapCompare> DelayedInsertionMap;
|
| +
|
| } // namespace
|
|
|
|
|
| @@ -2942,7 +3026,7 @@ void LiveRangeConnector::ResolveControlFlow(Zone* local_zone) {
|
| continue;
|
| auto pred_op = result.pred_cover_->GetAssignedOperand();
|
| auto cur_op = result.cur_cover_->GetAssignedOperand();
|
| - if (pred_op == cur_op) continue;
|
| + if (pred_op.Equals(cur_op)) continue;
|
| ResolveControlFlow(block, cur_op, pred_block, pred_op);
|
| }
|
| iterator.Advance();
|
| @@ -2955,7 +3039,7 @@ void LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
|
| const InstructionOperand& cur_op,
|
| const InstructionBlock* pred,
|
| const InstructionOperand& pred_op) {
|
| - DCHECK(pred_op != cur_op);
|
| + DCHECK(!pred_op.Equals(cur_op));
|
| int gap_index;
|
| Instruction::GapPosition position;
|
| if (block->PredecessorCount() == 1) {
|
| @@ -2974,8 +3058,7 @@ void LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
|
|
|
|
|
| void LiveRangeConnector::ConnectRanges(Zone* local_zone) {
|
| - ZoneMap<std::pair<ParallelMove*, InstructionOperand>, InstructionOperand>
|
| - delayed_insertion_map(local_zone);
|
| + DelayedInsertionMap delayed_insertion_map(local_zone);
|
| for (auto first_range : data()->live_ranges()) {
|
| if (first_range == nullptr || first_range->IsChild()) continue;
|
| for (auto second_range = first_range->next(); second_range != nullptr;
|
| @@ -2991,7 +3074,7 @@ void LiveRangeConnector::ConnectRanges(Zone* local_zone) {
|
| }
|
| auto prev_operand = first_range->GetAssignedOperand();
|
| auto cur_operand = second_range->GetAssignedOperand();
|
| - if (prev_operand == cur_operand) continue;
|
| + if (prev_operand.Equals(cur_operand)) continue;
|
| bool delay_insertion = false;
|
| Instruction::GapPosition gap_pos;
|
| int gap_index = pos.ToInstructionIndex();
|
|
|