| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/base/adapters.h" | 5 #include "src/base/adapters.h" |
| 6 #include "src/compiler/linkage.h" | 6 #include "src/compiler/linkage.h" |
| 7 #include "src/compiler/register-allocator.h" | 7 #include "src/compiler/register-allocator.h" |
| 8 #include "src/string-stream.h" | 8 #include "src/string-stream.h" |
| 9 | 9 |
| 10 namespace v8 { | 10 namespace v8 { |
| (...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 249 first_interval_(nullptr), | 249 first_interval_(nullptr), |
| 250 first_pos_(nullptr), | 250 first_pos_(nullptr), |
| 251 parent_(nullptr), | 251 parent_(nullptr), |
| 252 next_(nullptr), | 252 next_(nullptr), |
| 253 spill_operand_(nullptr), | 253 spill_operand_(nullptr), |
| 254 spills_at_definition_(nullptr), | 254 spills_at_definition_(nullptr), |
| 255 current_interval_(nullptr), | 255 current_interval_(nullptr), |
| 256 last_processed_use_(nullptr), | 256 last_processed_use_(nullptr), |
| 257 current_hint_position_(nullptr), | 257 current_hint_position_(nullptr), |
| 258 size_(kInvalidSize), | 258 size_(kInvalidSize), |
| 259 weight_(kInvalidWeight) { | 259 weight_(kInvalidWeight), |
| 260 spilled_in_deferred_block_(false) { |
| 260 DCHECK(AllocatedOperand::IsSupportedMachineType(machine_type)); | 261 DCHECK(AllocatedOperand::IsSupportedMachineType(machine_type)); |
| 261 bits_ = SpillTypeField::encode(SpillType::kNoSpillType) | | 262 bits_ = SpillTypeField::encode(SpillType::kNoSpillType) | |
| 262 AssignedRegisterField::encode(kUnassignedRegister) | | 263 AssignedRegisterField::encode(kUnassignedRegister) | |
| 263 MachineTypeField::encode(machine_type); | 264 MachineTypeField::encode(machine_type); |
| 264 } | 265 } |
| 265 | 266 |
| 266 | 267 |
| 267 void LiveRange::Verify() const { | 268 void LiveRange::Verify() const { |
| 268 // Walk the positions, verifying that each is in an interval. | 269 // Walk the positions, verifying that each is in an interval. |
| 269 auto interval = first_interval_; | 270 auto interval = first_interval_; |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 312 | 313 |
| 313 | 314 |
| 314 void LiveRange::SpillAtDefinition(Zone* zone, int gap_index, | 315 void LiveRange::SpillAtDefinition(Zone* zone, int gap_index, |
| 315 InstructionOperand* operand) { | 316 InstructionOperand* operand) { |
| 316 DCHECK(HasNoSpillType()); | 317 DCHECK(HasNoSpillType()); |
| 317 spills_at_definition_ = new (zone) | 318 spills_at_definition_ = new (zone) |
| 318 SpillAtDefinitionList(gap_index, operand, spills_at_definition_); | 319 SpillAtDefinitionList(gap_index, operand, spills_at_definition_); |
| 319 } | 320 } |
| 320 | 321 |
| 321 | 322 |
| 323 bool LiveRange::TryCommitSpillInDeferredBlock( |
| 324 InstructionSequence* code, const InstructionOperand& spill_operand) { |
| 325 DCHECK(!IsChild()); |
| 326 |
| 327 if (!FLAG_turbo_preprocess_ranges || IsEmpty() || HasNoSpillType() || |
| 328 spill_operand.IsConstant() || spill_operand.IsImmediate()) { |
| 329 return false; |
| 330 } |
| 331 |
| 332 int count = 0; |
| 333 for (const LiveRange* child = this; child != nullptr; child = child->next()) { |
| 334 int first_instr = child->Start().ToInstructionIndex(); |
| 335 |
| 336 // If the range starts at instruction end, the first instruction index is |
| 337 // the next one. |
| 338 if (!child->Start().IsGapPosition() && !child->Start().IsStart()) { |
| 339 ++first_instr; |
| 340 } |
| 341 |
| 342 // We only look at where the range starts. It doesn't matter where it ends: |
| 343 // if it ends past this block, then either there is a phi there already, |
| 344 // or ResolveControlFlow will adapt the last instruction gap of this block |
| 345 // as if there were a phi. In either case, data flow will be correct. |
| 346 const InstructionBlock* block = code->GetInstructionBlock(first_instr); |
| 347 |
| 348 // If we have slot uses in a subrange, bail out, because we need the value |
| 349 // on the stack before that use. |
| 350 bool has_slot_use = child->NextSlotPosition(child->Start()) != nullptr; |
| 351 if (!block->IsDeferred()) { |
| 352 if (child->spilled() || has_slot_use) { |
| 353 TRACE( |
| 354 "Live Range %d must be spilled at definition: found a " |
| 355 "slot-requiring non-deferred child range %d.\n", |
| 356 TopLevel()->id(), child->id()); |
| 357 return false; |
| 358 } |
| 359 } else { |
| 360 if (child->spilled() || has_slot_use) ++count; |
| 361 } |
| 362 } |
| 363 if (count == 0) return false; |
| 364 |
| 365 spill_start_index_ = -1; |
| 366 spilled_in_deferred_block_ = true; |
| 367 |
| 368 TRACE("Live Range %d will be spilled only in deferred blocks.\n", id()); |
| 369 // If we have ranges that aren't spilled but require the operand on the stack, |
| 370 // make sure we insert the spill. |
| 371 for (const LiveRange* child = this; child != nullptr; child = child->next()) { |
| 372 if (!child->spilled() && |
| 373 child->NextSlotPosition(child->Start()) != nullptr) { |
| 374 auto instr = code->InstructionAt(child->Start().ToInstructionIndex()); |
| 375 // Insert spill at the end to let live range connections happen at START. |
| 376 auto move = |
| 377 instr->GetOrCreateParallelMove(Instruction::END, code->zone()); |
| 378 InstructionOperand assigned = child->GetAssignedOperand(); |
| 379 if (TopLevel()->has_slot_use()) { |
| 380 bool found = false; |
| 381 for (auto move_op : *move) { |
| 382 if (move_op->IsEliminated()) continue; |
| 383 if (move_op->source().Equals(assigned) && |
| 384 move_op->destination().Equals(spill_operand)) { |
| 385 found = true; |
| 386 break; |
| 387 } |
| 388 } |
| 389 if (found) continue; |
| 390 } |
| 391 |
| 392 move->AddMove(assigned, spill_operand); |
| 393 } |
| 394 } |
| 395 |
| 396 return true; |
| 397 } |
| 398 |
| 399 |
| 322 void LiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence, | 400 void LiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence, |
| 323 const InstructionOperand& op, | 401 const InstructionOperand& op, |
| 324 bool might_be_duplicated) { | 402 bool might_be_duplicated) { |
| 325 DCHECK_IMPLIES(op.IsConstant(), spills_at_definition_ == nullptr); | 403 DCHECK_IMPLIES(op.IsConstant(), spills_at_definition_ == nullptr); |
| 326 DCHECK(!IsChild()); | 404 DCHECK(!IsChild()); |
| 327 auto zone = sequence->zone(); | 405 auto zone = sequence->zone(); |
| 406 |
| 328 for (auto to_spill = spills_at_definition_; to_spill != nullptr; | 407 for (auto to_spill = spills_at_definition_; to_spill != nullptr; |
| 329 to_spill = to_spill->next) { | 408 to_spill = to_spill->next) { |
| 330 auto instr = sequence->InstructionAt(to_spill->gap_index); | 409 auto instr = sequence->InstructionAt(to_spill->gap_index); |
| 331 auto move = instr->GetOrCreateParallelMove(Instruction::START, zone); | 410 auto move = instr->GetOrCreateParallelMove(Instruction::START, zone); |
| 332 // Skip insertion if it's possible that the move exists already as a | 411 // Skip insertion if it's possible that the move exists already as a |
| 333 // constraint move from a fixed output register to a slot. | 412 // constraint move from a fixed output register to a slot. |
| 334 if (might_be_duplicated) { | 413 if (might_be_duplicated) { |
| 335 bool found = false; | 414 bool found = false; |
| 336 for (auto move_op : *move) { | 415 for (auto move_op : *move) { |
| 337 if (move_op->IsEliminated()) continue; | 416 if (move_op->IsEliminated()) continue; |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 409 | 488 |
| 410 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) const { | 489 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) const { |
| 411 UsePosition* pos = NextUsePosition(start); | 490 UsePosition* pos = NextUsePosition(start); |
| 412 while (pos != nullptr && pos->type() != UsePositionType::kRequiresRegister) { | 491 while (pos != nullptr && pos->type() != UsePositionType::kRequiresRegister) { |
| 413 pos = pos->next(); | 492 pos = pos->next(); |
| 414 } | 493 } |
| 415 return pos; | 494 return pos; |
| 416 } | 495 } |
| 417 | 496 |
| 418 | 497 |
| 498 UsePosition* LiveRange::NextSlotPosition(LifetimePosition start) const { |
| 499 for (UsePosition* pos = NextUsePosition(start); pos != nullptr; |
| 500 pos = pos->next()) { |
| 501 if (pos->type() != UsePositionType::kRequiresSlot) continue; |
| 502 return pos; |
| 503 } |
| 504 return nullptr; |
| 505 } |
| 506 |
| 507 |
| 419 bool LiveRange::CanBeSpilled(LifetimePosition pos) const { | 508 bool LiveRange::CanBeSpilled(LifetimePosition pos) const { |
| 420 // We cannot spill a live range that has a use requiring a register | 509 // We cannot spill a live range that has a use requiring a register |
| 421 // at the current or the immediate next position. | 510 // at the current or the immediate next position. |
| 422 auto use_pos = NextRegisterPosition(pos); | 511 auto use_pos = NextRegisterPosition(pos); |
| 423 if (use_pos == nullptr) return true; | 512 if (use_pos == nullptr) return true; |
| 424 return use_pos->pos() > pos.NextStart().End(); | 513 return use_pos->pos() > pos.NextStart().End(); |
| 425 } | 514 } |
| 426 | 515 |
| 427 | 516 |
| 428 InstructionOperand LiveRange::GetAssignedOperand() const { | 517 InstructionOperand LiveRange::GetAssignedOperand() const { |
| (...skipping 1108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1537 auto output = instr->OutputAt(i); | 1626 auto output = instr->OutputAt(i); |
| 1538 if (output->IsUnallocated()) { | 1627 if (output->IsUnallocated()) { |
| 1539 // Unsupported. | 1628 // Unsupported. |
| 1540 DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy()); | 1629 DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy()); |
| 1541 int out_vreg = UnallocatedOperand::cast(output)->virtual_register(); | 1630 int out_vreg = UnallocatedOperand::cast(output)->virtual_register(); |
| 1542 live->Remove(out_vreg); | 1631 live->Remove(out_vreg); |
| 1543 } else if (output->IsConstant()) { | 1632 } else if (output->IsConstant()) { |
| 1544 int out_vreg = ConstantOperand::cast(output)->virtual_register(); | 1633 int out_vreg = ConstantOperand::cast(output)->virtual_register(); |
| 1545 live->Remove(out_vreg); | 1634 live->Remove(out_vreg); |
| 1546 } | 1635 } |
| 1547 Define(curr_position, output); | 1636 if (block->IsHandler() && index == block_start) { |
| 1637 // The register defined here is blocked from gap start - it is the |
| 1638 // exception value. |
| 1639 // TODO(mtrofin): should we explore an explicit opcode for |
| 1640 // the first instruction in the handler? |
| 1641 Define(LifetimePosition::GapFromInstructionIndex(index), output); |
| 1642 } else { |
| 1643 Define(curr_position, output); |
| 1644 } |
| 1548 } | 1645 } |
| 1549 | 1646 |
| 1550 if (instr->ClobbersRegisters()) { | 1647 if (instr->ClobbersRegisters()) { |
| 1551 for (int i = 0; i < config()->num_general_registers(); ++i) { | 1648 for (int i = 0; i < config()->num_general_registers(); ++i) { |
| 1552 if (!IsOutputRegisterOf(instr, i)) { | 1649 if (!IsOutputRegisterOf(instr, i)) { |
| 1553 auto range = FixedLiveRangeFor(i); | 1650 auto range = FixedLiveRangeFor(i); |
| 1554 range->AddUseInterval(curr_position, curr_position.End(), | 1651 range->AddUseInterval(curr_position, curr_position.End(), |
| 1555 allocation_zone()); | 1652 allocation_zone()); |
| 1556 } | 1653 } |
| 1557 } | 1654 } |
| (...skipping 967 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2525 spill_operand = *range->TopLevel()->GetSpillOperand(); | 2622 spill_operand = *range->TopLevel()->GetSpillOperand(); |
| 2526 } else if (range->TopLevel()->HasSpillRange()) { | 2623 } else if (range->TopLevel()->HasSpillRange()) { |
| 2527 spill_operand = range->TopLevel()->GetSpillRangeOperand(); | 2624 spill_operand = range->TopLevel()->GetSpillRangeOperand(); |
| 2528 } | 2625 } |
| 2529 auto assigned = range->GetAssignedOperand(); | 2626 auto assigned = range->GetAssignedOperand(); |
| 2530 range->ConvertUsesToOperand(assigned, spill_operand); | 2627 range->ConvertUsesToOperand(assigned, spill_operand); |
| 2531 if (range->is_phi()) { | 2628 if (range->is_phi()) { |
| 2532 data()->GetPhiMapValueFor(range->id())->CommitAssignment(assigned); | 2629 data()->GetPhiMapValueFor(range->id())->CommitAssignment(assigned); |
| 2533 } | 2630 } |
| 2534 if (!range->IsChild() && !spill_operand.IsInvalid()) { | 2631 if (!range->IsChild() && !spill_operand.IsInvalid()) { |
| 2535 range->CommitSpillsAtDefinition( | 2632 // If this top level range has a child spilled in a deferred block, we use |
| 2536 data()->code(), spill_operand, | 2633 // the range and control flow connection mechanism instead of spilling at |
| 2537 range->has_slot_use() || range->spilled()); | 2634 // definition. Refer to the ConnectLiveRanges and ResolveControlFlow |
| 2635 // phases. Normally, when we spill at definition, we do not insert a |
| 2636 // connecting move when a successor child range is spilled - because the |
| 2637 // spilled range picks up its value from the slot which was assigned at |
| 2638 // definition. For ranges that are determined to spill only in deferred |
| 2639 // blocks, we let ConnectLiveRanges and ResolveControlFlow insert such |
| 2640 // moves between ranges. Because of how the ranges are split around |
| 2641 // deferred blocks, this amounts to spilling and filling inside such |
| 2642 // blocks. |
| 2643 if (!range->TryCommitSpillInDeferredBlock(data()->code(), |
| 2644 spill_operand)) { |
| 2645 // Spill at definition if the range isn't spilled only in deferred |
| 2646 // blocks. |
| 2647 range->CommitSpillsAtDefinition( |
| 2648 data()->code(), spill_operand, |
| 2649 range->has_slot_use() || range->spilled()); |
| 2650 } |
| 2538 } | 2651 } |
| 2539 } | 2652 } |
| 2540 } | 2653 } |
| 2541 | 2654 |
| 2542 | 2655 |
| 2543 ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data) | 2656 ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data) |
| 2544 : data_(data) {} | 2657 : data_(data) {} |
| 2545 | 2658 |
| 2546 | 2659 |
| 2547 bool ReferenceMapPopulator::SafePointsAreInOrder() const { | 2660 bool ReferenceMapPopulator::SafePointsAreInOrder() const { |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2624 auto safe_point_pos = | 2737 auto safe_point_pos = |
| 2625 LifetimePosition::InstructionFromInstructionIndex(safe_point); | 2738 LifetimePosition::InstructionFromInstructionIndex(safe_point); |
| 2626 auto cur = range; | 2739 auto cur = range; |
| 2627 while (cur != nullptr && !cur->Covers(safe_point_pos)) { | 2740 while (cur != nullptr && !cur->Covers(safe_point_pos)) { |
| 2628 cur = cur->next(); | 2741 cur = cur->next(); |
| 2629 } | 2742 } |
| 2630 if (cur == nullptr) continue; | 2743 if (cur == nullptr) continue; |
| 2631 | 2744 |
| 2632 // Check if the live range is spilled and the safe point is after | 2745 // Check if the live range is spilled and the safe point is after |
| 2633 // the spill position. | 2746 // the spill position. |
| 2634 if (!spill_operand.IsInvalid() && | 2747 int spill_index = range->IsSpilledOnlyInDeferredBlocks() |
| 2635 safe_point >= range->spill_start_index()) { | 2748 ? cur->Start().ToInstructionIndex() |
| 2749 : range->spill_start_index(); |
| 2750 |
| 2751 if (!spill_operand.IsInvalid() && safe_point >= spill_index) { |
| 2636 TRACE("Pointer for range %d (spilled at %d) at safe point %d\n", | 2752 TRACE("Pointer for range %d (spilled at %d) at safe point %d\n", |
| 2637 range->id(), range->spill_start_index(), safe_point); | 2753 range->id(), spill_index, safe_point); |
| 2638 map->RecordReference(AllocatedOperand::cast(spill_operand)); | 2754 map->RecordReference(AllocatedOperand::cast(spill_operand)); |
| 2639 } | 2755 } |
| 2640 | 2756 |
| 2641 if (!cur->spilled()) { | 2757 if (!cur->spilled()) { |
| 2642 TRACE( | 2758 TRACE( |
| 2643 "Pointer in register for range %d (start at %d) " | 2759 "Pointer in register for range %d (start at %d) " |
| 2644 "at safe point %d\n", | 2760 "at safe point %d\n", |
| 2645 cur->id(), cur->Start().value(), safe_point); | 2761 cur->id(), cur->Start().value(), safe_point); |
| 2646 auto operand = cur->GetAssignedOperand(); | 2762 auto operand = cur->GetAssignedOperand(); |
| 2647 DCHECK(!operand.IsStackSlot()); | 2763 DCHECK(!operand.IsStackSlot()); |
| (...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2824 if (CanEagerlyResolveControlFlow(block)) continue; | 2940 if (CanEagerlyResolveControlFlow(block)) continue; |
| 2825 auto live = live_in_sets[block->rpo_number().ToInt()]; | 2941 auto live = live_in_sets[block->rpo_number().ToInt()]; |
| 2826 BitVector::Iterator iterator(live); | 2942 BitVector::Iterator iterator(live); |
| 2827 while (!iterator.Done()) { | 2943 while (!iterator.Done()) { |
| 2828 auto* array = finder.ArrayFor(iterator.Current()); | 2944 auto* array = finder.ArrayFor(iterator.Current()); |
| 2829 for (auto pred : block->predecessors()) { | 2945 for (auto pred : block->predecessors()) { |
| 2830 FindResult result; | 2946 FindResult result; |
| 2831 const auto* pred_block = code()->InstructionBlockAt(pred); | 2947 const auto* pred_block = code()->InstructionBlockAt(pred); |
| 2832 array->Find(block, pred_block, &result); | 2948 array->Find(block, pred_block, &result); |
| 2833 if (result.cur_cover_ == result.pred_cover_ || | 2949 if (result.cur_cover_ == result.pred_cover_ || |
| 2834 result.cur_cover_->spilled()) | 2950 (!result.cur_cover_->TopLevel()->IsSpilledOnlyInDeferredBlocks() && |
| 2951 result.cur_cover_->spilled())) |
| 2835 continue; | 2952 continue; |
| 2836 auto pred_op = result.pred_cover_->GetAssignedOperand(); | 2953 auto pred_op = result.pred_cover_->GetAssignedOperand(); |
| 2837 auto cur_op = result.cur_cover_->GetAssignedOperand(); | 2954 auto cur_op = result.cur_cover_->GetAssignedOperand(); |
| 2838 if (pred_op.Equals(cur_op)) continue; | 2955 if (pred_op.Equals(cur_op)) continue; |
| 2839 ResolveControlFlow(block, cur_op, pred_block, pred_op); | 2956 ResolveControlFlow(block, cur_op, pred_block, pred_op); |
| 2840 } | 2957 } |
| 2841 iterator.Advance(); | 2958 iterator.Advance(); |
| 2842 } | 2959 } |
| 2843 } | 2960 } |
| 2844 } | 2961 } |
| (...skipping 18 matching lines...) Expand all Loading... |
| 2863 position = Instruction::END; | 2980 position = Instruction::END; |
| 2864 } | 2981 } |
| 2865 data()->AddGapMove(gap_index, position, pred_op, cur_op); | 2982 data()->AddGapMove(gap_index, position, pred_op, cur_op); |
| 2866 } | 2983 } |
| 2867 | 2984 |
| 2868 | 2985 |
| 2869 void LiveRangeConnector::ConnectRanges(Zone* local_zone) { | 2986 void LiveRangeConnector::ConnectRanges(Zone* local_zone) { |
| 2870 DelayedInsertionMap delayed_insertion_map(local_zone); | 2987 DelayedInsertionMap delayed_insertion_map(local_zone); |
| 2871 for (auto first_range : data()->live_ranges()) { | 2988 for (auto first_range : data()->live_ranges()) { |
| 2872 if (first_range == nullptr || first_range->IsChild()) continue; | 2989 if (first_range == nullptr || first_range->IsChild()) continue; |
| 2990 bool connect_spilled = first_range->IsSpilledOnlyInDeferredBlocks(); |
| 2873 for (auto second_range = first_range->next(); second_range != nullptr; | 2991 for (auto second_range = first_range->next(); second_range != nullptr; |
| 2874 first_range = second_range, second_range = second_range->next()) { | 2992 first_range = second_range, second_range = second_range->next()) { |
| 2875 auto pos = second_range->Start(); | 2993 auto pos = second_range->Start(); |
| 2876 // Add gap move if the two live ranges touch and there is no block | 2994 // Add gap move if the two live ranges touch and there is no block |
| 2877 // boundary. | 2995 // boundary. |
| 2878 if (second_range->spilled()) continue; | 2996 if (!connect_spilled && second_range->spilled()) continue; |
| 2879 if (first_range->End() != pos) continue; | 2997 if (first_range->End() != pos) continue; |
| 2880 if (data()->IsBlockBoundary(pos) && | 2998 if (data()->IsBlockBoundary(pos) && |
| 2881 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) { | 2999 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) { |
| 2882 continue; | 3000 continue; |
| 2883 } | 3001 } |
| 2884 auto prev_operand = first_range->GetAssignedOperand(); | 3002 auto prev_operand = first_range->GetAssignedOperand(); |
| 2885 auto cur_operand = second_range->GetAssignedOperand(); | 3003 auto cur_operand = second_range->GetAssignedOperand(); |
| 2886 if (prev_operand.Equals(cur_operand)) continue; | 3004 if (prev_operand.Equals(cur_operand)) continue; |
| 2887 bool delay_insertion = false; | 3005 bool delay_insertion = false; |
| 2888 Instruction::GapPosition gap_pos; | 3006 Instruction::GapPosition gap_pos; |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2935 auto eliminate = moves->PrepareInsertAfter(move); | 3053 auto eliminate = moves->PrepareInsertAfter(move); |
| 2936 to_insert.push_back(move); | 3054 to_insert.push_back(move); |
| 2937 if (eliminate != nullptr) to_eliminate.push_back(eliminate); | 3055 if (eliminate != nullptr) to_eliminate.push_back(eliminate); |
| 2938 } | 3056 } |
| 2939 } | 3057 } |
| 2940 | 3058 |
| 2941 | 3059 |
| 2942 } // namespace compiler | 3060 } // namespace compiler |
| 2943 } // namespace internal | 3061 } // namespace internal |
| 2944 } // namespace v8 | 3062 } // namespace v8 |
| OLD | NEW |