Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/linkage.h" | 5 #include "src/compiler/linkage.h" |
| 6 #include "src/compiler/pipeline-statistics.h" | 6 #include "src/compiler/pipeline-statistics.h" |
| 7 #include "src/compiler/register-allocator.h" | 7 #include "src/compiler/register-allocator.h" |
| 8 #include "src/string-stream.h" | 8 #include "src/string-stream.h" |
| 9 | 9 |
| 10 namespace v8 { | 10 namespace v8 { |
| (...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 192 bool LiveRange::CanBeSpilled(LifetimePosition pos) { | 192 bool LiveRange::CanBeSpilled(LifetimePosition pos) { |
| 193 // We cannot spill a live range that has a use requiring a register | 193 // We cannot spill a live range that has a use requiring a register |
| 194 // at the current or the immediate next position. | 194 // at the current or the immediate next position. |
| 195 UsePosition* use_pos = NextRegisterPosition(pos); | 195 UsePosition* use_pos = NextRegisterPosition(pos); |
| 196 if (use_pos == NULL) return true; | 196 if (use_pos == NULL) return true; |
| 197 return use_pos->pos().Value() > | 197 return use_pos->pos().Value() > |
| 198 pos.NextInstruction().InstructionEnd().Value(); | 198 pos.NextInstruction().InstructionEnd().Value(); |
| 199 } | 199 } |
| 200 | 200 |
| 201 | 201 |
| 202 InstructionOperand* LiveRange::CreateAssignedOperand(Zone* zone) { | 202 InstructionOperand* LiveRange::CreateAssignedOperand(Zone* zone) const { |
| 203 InstructionOperand* op = NULL; | 203 InstructionOperand* op = NULL; |
| 204 if (HasRegisterAssigned()) { | 204 if (HasRegisterAssigned()) { |
| 205 DCHECK(!IsSpilled()); | 205 DCHECK(!IsSpilled()); |
| 206 switch (Kind()) { | 206 switch (Kind()) { |
| 207 case GENERAL_REGISTERS: | 207 case GENERAL_REGISTERS: |
| 208 op = RegisterOperand::Create(assigned_register(), zone); | 208 op = RegisterOperand::Create(assigned_register(), zone); |
| 209 break; | 209 break; |
| 210 case DOUBLE_REGISTERS: | 210 case DOUBLE_REGISTERS: |
| 211 op = DoubleRegisterOperand::Create(assigned_register(), zone); | 211 op = DoubleRegisterOperand::Create(assigned_register(), zone); |
| 212 break; | 212 break; |
| (...skipping 287 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 500 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); | 500 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); |
| 501 } else { | 501 } else { |
| 502 b = b->next(); | 502 b = b->next(); |
| 503 } | 503 } |
| 504 } | 504 } |
| 505 return LifetimePosition::Invalid(); | 505 return LifetimePosition::Invalid(); |
| 506 } | 506 } |
| 507 | 507 |
| 508 | 508 |
| 509 RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, | 509 RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, |
| 510 Zone* local_zone, Frame* frame, | 510 Zone* zone, Frame* frame, |
| 511 InstructionSequence* code, | 511 InstructionSequence* code, |
| 512 const char* debug_name) | 512 const char* debug_name) |
| 513 : zone_(local_zone), | 513 : local_zone_(zone), |
| 514 frame_(frame), | 514 frame_(frame), |
| 515 code_(code), | 515 code_(code), |
| 516 debug_name_(debug_name), | 516 debug_name_(debug_name), |
| 517 config_(config), | 517 config_(config), |
| 518 live_in_sets_(code->InstructionBlockCount(), zone()), | 518 live_in_sets_(code->InstructionBlockCount(), local_zone()), |
| 519 live_ranges_(code->VirtualRegisterCount() * 2, zone()), | 519 live_ranges_(code->VirtualRegisterCount() * 2, local_zone()), |
| 520 fixed_live_ranges_(this->config()->num_general_registers(), NULL, zone()), | 520 fixed_live_ranges_(this->config()->num_general_registers(), NULL, |
| 521 local_zone()), | |
| 521 fixed_double_live_ranges_(this->config()->num_double_registers(), NULL, | 522 fixed_double_live_ranges_(this->config()->num_double_registers(), NULL, |
| 522 zone()), | 523 local_zone()), |
| 523 unhandled_live_ranges_(code->VirtualRegisterCount() * 2, zone()), | 524 unhandled_live_ranges_(code->VirtualRegisterCount() * 2, local_zone()), |
| 524 active_live_ranges_(8, zone()), | 525 active_live_ranges_(8, local_zone()), |
| 525 inactive_live_ranges_(8, zone()), | 526 inactive_live_ranges_(8, local_zone()), |
| 526 reusable_slots_(8, zone()), | 527 reusable_slots_(8, local_zone()), |
| 527 mode_(UNALLOCATED_REGISTERS), | 528 mode_(UNALLOCATED_REGISTERS), |
| 528 num_registers_(-1), | 529 num_registers_(-1), |
| 529 allocation_ok_(true) { | 530 allocation_ok_(true) { |
| 530 DCHECK(this->config()->num_general_registers() <= | 531 DCHECK(this->config()->num_general_registers() <= |
| 531 RegisterConfiguration::kMaxGeneralRegisters); | 532 RegisterConfiguration::kMaxGeneralRegisters); |
| 532 DCHECK(this->config()->num_double_registers() <= | 533 DCHECK(this->config()->num_double_registers() <= |
| 533 RegisterConfiguration::kMaxDoubleRegisters); | 534 RegisterConfiguration::kMaxDoubleRegisters); |
| 534 // TryAllocateFreeReg and AllocateBlockedReg assume this | 535 // TryAllocateFreeReg and AllocateBlockedReg assume this |
| 535 // when allocating local arrays. | 536 // when allocating local arrays. |
| 536 DCHECK(this->config()->num_double_registers() >= | 537 DCHECK(this->config()->num_double_registers() >= |
| 537 this->config()->num_general_registers()); | 538 this->config()->num_general_registers()); |
| 538 } | 539 } |
| 539 | 540 |
| 540 | 541 |
| 541 void RegisterAllocator::InitializeLivenessAnalysis() { | 542 void RegisterAllocator::InitializeLivenessAnalysis() { |
| 542 // Initialize the live_in sets for each block to NULL. | 543 // Initialize the live_in sets for each block to NULL. |
| 543 int block_count = code()->InstructionBlockCount(); | 544 int block_count = code()->InstructionBlockCount(); |
| 544 live_in_sets_.Initialize(block_count, zone()); | 545 live_in_sets_.Initialize(block_count, local_zone()); |
| 545 live_in_sets_.AddBlock(NULL, block_count, zone()); | 546 live_in_sets_.AddBlock(NULL, block_count, local_zone()); |
| 546 } | 547 } |
| 547 | 548 |
| 548 | 549 |
| 549 BitVector* RegisterAllocator::ComputeLiveOut(const InstructionBlock* block) { | 550 BitVector* RegisterAllocator::ComputeLiveOut(const InstructionBlock* block) { |
| 550 // Compute live out for the given block, except not including backward | 551 // Compute live out for the given block, except not including backward |
| 551 // successor edges. | 552 // successor edges. |
| 552 BitVector* live_out = | 553 BitVector* live_out = new (local_zone()) |
| 553 new (zone()) BitVector(code()->VirtualRegisterCount(), zone()); | 554 BitVector(code()->VirtualRegisterCount(), local_zone()); |
| 554 | 555 |
| 555 // Process all successor blocks. | 556 // Process all successor blocks. |
| 556 for (auto succ : block->successors()) { | 557 for (auto succ : block->successors()) { |
| 557 // Add values live on entry to the successor. Note the successor's | 558 // Add values live on entry to the successor. Note the successor's |
| 558 // live_in will not be computed yet for backwards edges. | 559 // live_in will not be computed yet for backwards edges. |
| 559 BitVector* live_in = live_in_sets_[static_cast<int>(succ.ToSize())]; | 560 BitVector* live_in = live_in_sets_[static_cast<int>(succ.ToSize())]; |
| 560 if (live_in != NULL) live_out->Union(*live_in); | 561 if (live_in != NULL) live_out->Union(*live_in); |
| 561 | 562 |
| 562 // All phi input operands corresponding to this successor edge are live | 563 // All phi input operands corresponding to this successor edge are live |
| 563 // out from this block. | 564 // out from this block. |
| (...skipping 13 matching lines...) Expand all Loading... | |
| 577 // Add an interval that includes the entire block to the live range for | 578 // Add an interval that includes the entire block to the live range for |
| 578 // each live_out value. | 579 // each live_out value. |
| 579 LifetimePosition start = | 580 LifetimePosition start = |
| 580 LifetimePosition::FromInstructionIndex(block->first_instruction_index()); | 581 LifetimePosition::FromInstructionIndex(block->first_instruction_index()); |
| 581 LifetimePosition end = LifetimePosition::FromInstructionIndex( | 582 LifetimePosition end = LifetimePosition::FromInstructionIndex( |
| 582 block->last_instruction_index()).NextInstruction(); | 583 block->last_instruction_index()).NextInstruction(); |
| 583 BitVector::Iterator iterator(live_out); | 584 BitVector::Iterator iterator(live_out); |
| 584 while (!iterator.Done()) { | 585 while (!iterator.Done()) { |
| 585 int operand_index = iterator.Current(); | 586 int operand_index = iterator.Current(); |
| 586 LiveRange* range = LiveRangeFor(operand_index); | 587 LiveRange* range = LiveRangeFor(operand_index); |
| 587 range->AddUseInterval(start, end, zone()); | 588 range->AddUseInterval(start, end, local_zone()); |
| 588 iterator.Advance(); | 589 iterator.Advance(); |
| 589 } | 590 } |
| 590 } | 591 } |
| 591 | 592 |
| 592 | 593 |
| 593 int RegisterAllocator::FixedDoubleLiveRangeID(int index) { | 594 int RegisterAllocator::FixedDoubleLiveRangeID(int index) { |
| 594 return -index - 1 - config()->num_general_registers(); | 595 return -index - 1 - config()->num_general_registers(); |
| 595 } | 596 } |
| 596 | 597 |
| 597 | 598 |
| (...skipping 25 matching lines...) Expand all Loading... | |
| 623 | 624 |
| 624 | 625 |
| 625 LiveRange* RegisterAllocator::FixedLiveRangeFor(int index) { | 626 LiveRange* RegisterAllocator::FixedLiveRangeFor(int index) { |
| 626 DCHECK(index < config()->num_general_registers()); | 627 DCHECK(index < config()->num_general_registers()); |
| 627 LiveRange* result = fixed_live_ranges_[index]; | 628 LiveRange* result = fixed_live_ranges_[index]; |
| 628 if (result == NULL) { | 629 if (result == NULL) { |
| 629 // TODO(titzer): add a utility method to allocate a new LiveRange: | 630 // TODO(titzer): add a utility method to allocate a new LiveRange: |
| 630 // The LiveRange object itself can go in this zone, but the | 631 // The LiveRange object itself can go in this zone, but the |
| 631 // InstructionOperand needs | 632 // InstructionOperand needs |
| 632 // to go in the code zone, since it may survive register allocation. | 633 // to go in the code zone, since it may survive register allocation. |
| 633 result = new (zone()) LiveRange(FixedLiveRangeID(index), code_zone()); | 634 result = new (local_zone()) LiveRange(FixedLiveRangeID(index), code_zone()); |
| 634 DCHECK(result->IsFixed()); | 635 DCHECK(result->IsFixed()); |
| 635 result->kind_ = GENERAL_REGISTERS; | 636 result->kind_ = GENERAL_REGISTERS; |
| 636 SetLiveRangeAssignedRegister(result, index); | 637 SetLiveRangeAssignedRegister(result, index); |
| 637 fixed_live_ranges_[index] = result; | 638 fixed_live_ranges_[index] = result; |
| 638 } | 639 } |
| 639 return result; | 640 return result; |
| 640 } | 641 } |
| 641 | 642 |
| 642 | 643 |
| 643 LiveRange* RegisterAllocator::FixedDoubleLiveRangeFor(int index) { | 644 LiveRange* RegisterAllocator::FixedDoubleLiveRangeFor(int index) { |
| 644 DCHECK(index < config()->num_aliased_double_registers()); | 645 DCHECK(index < config()->num_aliased_double_registers()); |
| 645 LiveRange* result = fixed_double_live_ranges_[index]; | 646 LiveRange* result = fixed_double_live_ranges_[index]; |
| 646 if (result == NULL) { | 647 if (result == NULL) { |
| 647 result = new (zone()) LiveRange(FixedDoubleLiveRangeID(index), code_zone()); | 648 result = new (local_zone()) |
| 649 LiveRange(FixedDoubleLiveRangeID(index), code_zone()); | |
| 648 DCHECK(result->IsFixed()); | 650 DCHECK(result->IsFixed()); |
| 649 result->kind_ = DOUBLE_REGISTERS; | 651 result->kind_ = DOUBLE_REGISTERS; |
| 650 SetLiveRangeAssignedRegister(result, index); | 652 SetLiveRangeAssignedRegister(result, index); |
| 651 fixed_double_live_ranges_[index] = result; | 653 fixed_double_live_ranges_[index] = result; |
| 652 } | 654 } |
| 653 return result; | 655 return result; |
| 654 } | 656 } |
| 655 | 657 |
| 656 | 658 |
| 657 LiveRange* RegisterAllocator::LiveRangeFor(int index) { | 659 LiveRange* RegisterAllocator::LiveRangeFor(int index) { |
| 658 if (index >= live_ranges_.length()) { | 660 if (index >= live_ranges_.length()) { |
| 659 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone()); | 661 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, |
| 662 local_zone()); | |
| 660 } | 663 } |
| 661 LiveRange* result = live_ranges_[index]; | 664 LiveRange* result = live_ranges_[index]; |
| 662 if (result == NULL) { | 665 if (result == NULL) { |
| 663 result = new (zone()) LiveRange(index, code_zone()); | 666 result = new (local_zone()) LiveRange(index, code_zone()); |
| 664 live_ranges_[index] = result; | 667 live_ranges_[index] = result; |
| 665 } | 668 } |
| 666 return result; | 669 return result; |
| 667 } | 670 } |
| 668 | 671 |
| 669 | 672 |
| 670 GapInstruction* RegisterAllocator::GetLastGap(const InstructionBlock* block) { | 673 GapInstruction* RegisterAllocator::GetLastGap(const InstructionBlock* block) { |
| 671 int last_instruction = block->last_instruction_index(); | 674 int last_instruction = block->last_instruction_index(); |
| 672 return code()->GapAt(last_instruction - 1); | 675 return code()->GapAt(last_instruction - 1); |
| 673 } | 676 } |
| (...skipping 13 matching lines...) Expand all Loading... | |
| 687 | 690 |
| 688 | 691 |
| 689 void RegisterAllocator::Define(LifetimePosition position, | 692 void RegisterAllocator::Define(LifetimePosition position, |
| 690 InstructionOperand* operand, | 693 InstructionOperand* operand, |
| 691 InstructionOperand* hint) { | 694 InstructionOperand* hint) { |
| 692 LiveRange* range = LiveRangeFor(operand); | 695 LiveRange* range = LiveRangeFor(operand); |
| 693 if (range == NULL) return; | 696 if (range == NULL) return; |
| 694 | 697 |
| 695 if (range->IsEmpty() || range->Start().Value() > position.Value()) { | 698 if (range->IsEmpty() || range->Start().Value() > position.Value()) { |
| 696 // Can happen if there is a definition without use. | 699 // Can happen if there is a definition without use. |
| 697 range->AddUseInterval(position, position.NextInstruction(), zone()); | 700 range->AddUseInterval(position, position.NextInstruction(), local_zone()); |
| 698 range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone()); | 701 range->AddUsePosition(position.NextInstruction(), NULL, NULL, local_zone()); |
| 699 } else { | 702 } else { |
| 700 range->ShortenTo(position); | 703 range->ShortenTo(position); |
| 701 } | 704 } |
| 702 | 705 |
| 703 if (operand->IsUnallocated()) { | 706 if (operand->IsUnallocated()) { |
| 704 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand); | 707 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand); |
| 705 range->AddUsePosition(position, unalloc_operand, hint, zone()); | 708 range->AddUsePosition(position, unalloc_operand, hint, local_zone()); |
| 706 } | 709 } |
| 707 } | 710 } |
| 708 | 711 |
| 709 | 712 |
| 710 void RegisterAllocator::Use(LifetimePosition block_start, | 713 void RegisterAllocator::Use(LifetimePosition block_start, |
| 711 LifetimePosition position, | 714 LifetimePosition position, |
| 712 InstructionOperand* operand, | 715 InstructionOperand* operand, |
| 713 InstructionOperand* hint) { | 716 InstructionOperand* hint) { |
| 714 LiveRange* range = LiveRangeFor(operand); | 717 LiveRange* range = LiveRangeFor(operand); |
| 715 if (range == NULL) return; | 718 if (range == NULL) return; |
| 716 if (operand->IsUnallocated()) { | 719 if (operand->IsUnallocated()) { |
| 717 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand); | 720 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand); |
| 718 range->AddUsePosition(position, unalloc_operand, hint, zone()); | 721 range->AddUsePosition(position, unalloc_operand, hint, local_zone()); |
| 719 } | 722 } |
| 720 range->AddUseInterval(block_start, position, zone()); | 723 range->AddUseInterval(block_start, position, local_zone()); |
| 721 } | 724 } |
| 722 | 725 |
| 723 | 726 |
| 724 void RegisterAllocator::AddConstraintsGapMove(int index, | 727 void RegisterAllocator::AddConstraintsGapMove(int index, |
| 725 InstructionOperand* from, | 728 InstructionOperand* from, |
| 726 InstructionOperand* to) { | 729 InstructionOperand* to) { |
| 727 GapInstruction* gap = code()->GapAt(index); | 730 GapInstruction* gap = code()->GapAt(index); |
| 728 ParallelMove* move = | 731 ParallelMove* move = |
| 729 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()); | 732 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()); |
| 730 if (from->IsUnallocated()) { | 733 if (from->IsUnallocated()) { |
| (...skipping 285 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1016 live->Remove(out_vreg); | 1019 live->Remove(out_vreg); |
| 1017 } | 1020 } |
| 1018 Define(curr_position, output, NULL); | 1021 Define(curr_position, output, NULL); |
| 1019 } | 1022 } |
| 1020 | 1023 |
| 1021 if (instr->ClobbersRegisters()) { | 1024 if (instr->ClobbersRegisters()) { |
| 1022 for (int i = 0; i < config()->num_general_registers(); ++i) { | 1025 for (int i = 0; i < config()->num_general_registers(); ++i) { |
| 1023 if (!IsOutputRegisterOf(instr, i)) { | 1026 if (!IsOutputRegisterOf(instr, i)) { |
| 1024 LiveRange* range = FixedLiveRangeFor(i); | 1027 LiveRange* range = FixedLiveRangeFor(i); |
| 1025 range->AddUseInterval(curr_position, curr_position.InstructionEnd(), | 1028 range->AddUseInterval(curr_position, curr_position.InstructionEnd(), |
| 1026 zone()); | 1029 local_zone()); |
| 1027 } | 1030 } |
| 1028 } | 1031 } |
| 1029 } | 1032 } |
| 1030 | 1033 |
| 1031 if (instr->ClobbersDoubleRegisters()) { | 1034 if (instr->ClobbersDoubleRegisters()) { |
| 1032 for (int i = 0; i < config()->num_aliased_double_registers(); ++i) { | 1035 for (int i = 0; i < config()->num_aliased_double_registers(); ++i) { |
| 1033 if (!IsOutputDoubleRegisterOf(instr, i)) { | 1036 if (!IsOutputDoubleRegisterOf(instr, i)) { |
| 1034 LiveRange* range = FixedDoubleLiveRangeFor(i); | 1037 LiveRange* range = FixedDoubleLiveRangeFor(i); |
| 1035 range->AddUseInterval(curr_position, curr_position.InstructionEnd(), | 1038 range->AddUseInterval(curr_position, curr_position.InstructionEnd(), |
| 1036 zone()); | 1039 local_zone()); |
| 1037 } | 1040 } |
| 1038 } | 1041 } |
| 1039 } | 1042 } |
| 1040 | 1043 |
| 1041 for (size_t i = 0; i < instr->InputCount(); i++) { | 1044 for (size_t i = 0; i < instr->InputCount(); i++) { |
| 1042 InstructionOperand* input = instr->InputAt(i); | 1045 InstructionOperand* input = instr->InputAt(i); |
| 1043 if (input->IsImmediate()) continue; // Ignore immediates. | 1046 if (input->IsImmediate()) continue; // Ignore immediates. |
| 1044 LifetimePosition use_pos; | 1047 LifetimePosition use_pos; |
| 1045 if (input->IsUnallocated() && | 1048 if (input->IsUnallocated() && |
| 1046 UnallocatedOperand::cast(input)->IsUsedAtStart()) { | 1049 UnallocatedOperand::cast(input)->IsUsedAtStart()) { |
| (...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1153 PhaseScope phase_scope(stats, "resolve control flow"); | 1156 PhaseScope phase_scope(stats, "resolve control flow"); |
| 1154 ResolveControlFlow(); | 1157 ResolveControlFlow(); |
| 1155 } | 1158 } |
| 1156 frame()->SetAllocatedRegisters(assigned_registers_); | 1159 frame()->SetAllocatedRegisters(assigned_registers_); |
| 1157 frame()->SetAllocatedDoubleRegisters(assigned_double_registers_); | 1160 frame()->SetAllocatedDoubleRegisters(assigned_double_registers_); |
| 1158 return true; | 1161 return true; |
| 1159 } | 1162 } |
| 1160 | 1163 |
| 1161 | 1164 |
| 1162 void RegisterAllocator::MeetRegisterConstraints() { | 1165 void RegisterAllocator::MeetRegisterConstraints() { |
| 1163 for (int i = 0; i < code()->InstructionBlockCount(); ++i) { | 1166 for (auto block : code()->instruction_blocks()) { |
| 1164 MeetRegisterConstraints( | 1167 MeetRegisterConstraints(block); |
| 1165 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(i))); | |
| 1166 if (!AllocationOk()) return; | 1168 if (!AllocationOk()) return; |
| 1167 } | 1169 } |
| 1168 } | 1170 } |
| 1169 | 1171 |
| 1170 | 1172 |
| 1171 void RegisterAllocator::ResolvePhis() { | 1173 void RegisterAllocator::ResolvePhis() { |
| 1172 // Process the blocks in reverse order. | 1174 // Process the blocks in reverse order. |
| 1173 for (int i = code()->InstructionBlockCount() - 1; i >= 0; --i) { | 1175 for (auto i = code()->instruction_blocks().rbegin(); |
| 1174 ResolvePhis(code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(i))); | 1176 i != code()->instruction_blocks().rend(); ++i) { |
| 1177 ResolvePhis(*i); | |
| 1175 } | 1178 } |
| 1176 } | 1179 } |
| 1177 | 1180 |
| 1178 | |
| 1179 void RegisterAllocator::ResolveControlFlow(LiveRange* range, | |
| 1180 const InstructionBlock* block, | |
| 1181 const InstructionBlock* pred) { | |
| 1182 LifetimePosition pred_end = | |
| 1183 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); | |
| 1184 LifetimePosition cur_start = | |
| 1185 LifetimePosition::FromInstructionIndex(block->first_instruction_index()); | |
| 1186 LiveRange* pred_cover = NULL; | |
| 1187 LiveRange* cur_cover = NULL; | |
| 1188 LiveRange* cur_range = range; | |
| 1189 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) { | |
| 1190 if (cur_range->CanCover(cur_start)) { | |
| 1191 DCHECK(cur_cover == NULL); | |
| 1192 cur_cover = cur_range; | |
| 1193 } | |
| 1194 if (cur_range->CanCover(pred_end)) { | |
| 1195 DCHECK(pred_cover == NULL); | |
| 1196 pred_cover = cur_range; | |
| 1197 } | |
| 1198 cur_range = cur_range->next(); | |
| 1199 } | |
| 1200 | |
| 1201 if (cur_cover->IsSpilled()) return; | |
| 1202 DCHECK(pred_cover != NULL && cur_cover != NULL); | |
| 1203 if (pred_cover != cur_cover) { | |
| 1204 InstructionOperand* pred_op = | |
| 1205 pred_cover->CreateAssignedOperand(code_zone()); | |
| 1206 InstructionOperand* cur_op = cur_cover->CreateAssignedOperand(code_zone()); | |
| 1207 if (!pred_op->Equals(cur_op)) { | |
| 1208 GapInstruction* gap = NULL; | |
| 1209 if (block->PredecessorCount() == 1) { | |
| 1210 gap = code()->GapAt(block->first_instruction_index()); | |
| 1211 } else { | |
| 1212 DCHECK(pred->SuccessorCount() == 1); | |
| 1213 gap = GetLastGap(pred); | |
| 1214 | |
| 1215 Instruction* branch = InstructionAt(pred->last_instruction_index()); | |
| 1216 DCHECK(!branch->HasPointerMap()); | |
| 1217 USE(branch); | |
| 1218 } | |
| 1219 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()) | |
| 1220 ->AddMove(pred_op, cur_op, code_zone()); | |
| 1221 } | |
| 1222 } | |
| 1223 } | |
| 1224 | |
| 1225 | 1181 |
| 1226 ParallelMove* RegisterAllocator::GetConnectingParallelMove( | 1182 ParallelMove* RegisterAllocator::GetConnectingParallelMove( |
| 1227 LifetimePosition pos) { | 1183 LifetimePosition pos) { |
| 1228 int index = pos.InstructionIndex(); | 1184 int index = pos.InstructionIndex(); |
| 1229 if (code()->IsGapAt(index)) { | 1185 if (code()->IsGapAt(index)) { |
| 1230 GapInstruction* gap = code()->GapAt(index); | 1186 GapInstruction* gap = code()->GapAt(index); |
| 1231 return gap->GetOrCreateParallelMove( | 1187 return gap->GetOrCreateParallelMove( |
| 1232 pos.IsInstructionStart() ? GapInstruction::START : GapInstruction::END, | 1188 pos.IsInstructionStart() ? GapInstruction::START : GapInstruction::END, |
| 1233 code_zone()); | 1189 code_zone()); |
| 1234 } | 1190 } |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1281 } | 1237 } |
| 1282 | 1238 |
| 1283 | 1239 |
| 1284 bool RegisterAllocator::CanEagerlyResolveControlFlow( | 1240 bool RegisterAllocator::CanEagerlyResolveControlFlow( |
| 1285 const InstructionBlock* block) const { | 1241 const InstructionBlock* block) const { |
| 1286 if (block->PredecessorCount() != 1) return false; | 1242 if (block->PredecessorCount() != 1) return false; |
| 1287 return block->predecessors()[0].IsNext(block->rpo_number()); | 1243 return block->predecessors()[0].IsNext(block->rpo_number()); |
| 1288 } | 1244 } |
| 1289 | 1245 |
| 1290 | 1246 |
| 1247 namespace { | |
| 1248 | |
| 1249 class LiveRangeBound { | |
| 1250 public: | |
| 1251 explicit LiveRangeBound(const LiveRange* range) | |
| 1252 : range_(range), start_(range->Start()), end_(range->End()) { | |
| 1253 DCHECK(!range->IsEmpty()); | |
| 1254 } | |
| 1255 | |
| 1256 bool CanCover(LifetimePosition position) { | |
| 1257 return start_.Value() <= position.Value() && | |
| 1258 position.Value() < end_.Value(); | |
| 1259 } | |
| 1260 | |
| 1261 const LiveRange* const range_; | |
| 1262 const LifetimePosition start_; | |
| 1263 const LifetimePosition end_; | |
| 1264 | |
| 1265 private: | |
| 1266 DISALLOW_COPY_AND_ASSIGN(LiveRangeBound); | |
| 1267 }; | |
| 1268 | |
| 1269 | |
| 1270 struct FindResult { | |
| 1271 const LiveRange* cur_cover_; | |
| 1272 const LiveRange* pred_cover_; | |
| 1273 }; | |
| 1274 | |
| 1275 | |
| 1276 class LiveRangeBoundArray { | |
| 1277 public: | |
| 1278 LiveRangeBoundArray() : length_(0), start_(nullptr) {} | |
| 1279 | |
| 1280 bool ShouldInitialize() { return start_ == NULL; } | |
| 1281 | |
| 1282 void Initialize(Zone* zone, const LiveRange* const range) { | |
| 1283 size_t length = 0; | |
| 1284 for (const LiveRange* i = range; i != NULL; i = i->next()) length++; | |
| 1285 start_ = zone->NewArray<LiveRangeBound>(static_cast<int>(length)); | |
| 1286 length_ = length; | |
| 1287 LiveRangeBound* curr = start_; | |
| 1288 for (const LiveRange* i = range; i != NULL; i = i->next(), ++curr) { | |
| 1289 new (curr) LiveRangeBound(i); | |
| 1290 } | |
| 1291 } | |
| 1292 | |
| 1293 LiveRangeBound* Find(const LifetimePosition position) const { | |
| 1294 size_t left_index = 0; | |
| 1295 size_t right_index = length_; | |
| 1296 while (true) { | |
| 1297 size_t current_index = left_index + (right_index - left_index) / 2; | |
| 1298 DCHECK(right_index > current_index); | |
| 1299 LiveRangeBound* bound = &start_[current_index]; | |
| 1300 if (bound->start_.Value() <= position.Value()) { | |
| 1301 if (position.Value() < bound->end_.Value()) return bound; | |
| 1302 DCHECK(left_index < current_index); | |
| 1303 left_index = current_index; | |
| 1304 } else { | |
|
Jarin
2014/11/05 12:30:18
Thanks, this looks much better!
| |
| 1305 right_index = current_index; | |
| 1306 } | |
| 1307 } | |
| 1308 } | |
| 1309 | |
| 1310 void Find(const InstructionBlock* block, const InstructionBlock* pred, | |
| 1311 FindResult* result) const { | |
| 1312 const LifetimePosition pred_end = | |
| 1313 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); | |
| 1314 LiveRangeBound* bound = Find(pred_end); | |
| 1315 result->pred_cover_ = bound->range_; | |
| 1316 const LifetimePosition cur_start = LifetimePosition::FromInstructionIndex( | |
| 1317 block->first_instruction_index()); | |
| 1318 // Common case. | |
| 1319 if (bound->CanCover(cur_start)) { | |
| 1320 result->cur_cover_ = bound->range_; | |
| 1321 return; | |
| 1322 } | |
| 1323 result->cur_cover_ = Find(cur_start)->range_; | |
| 1324 DCHECK(result->pred_cover_ != NULL && result->cur_cover_ != NULL); | |
| 1325 } | |
| 1326 | |
| 1327 private: | |
| 1328 size_t length_; | |
| 1329 LiveRangeBound* start_; | |
| 1330 | |
| 1331 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray); | |
| 1332 }; | |
| 1333 | |
| 1334 | |
| 1335 class LiveRangeFinder { | |
| 1336 public: | |
| 1337 explicit LiveRangeFinder(const RegisterAllocator& allocator) | |
| 1338 : allocator_(allocator), | |
| 1339 bounds_length_(allocator.live_ranges().length()), | |
| 1340 bounds_(allocator.local_zone()->NewArray<LiveRangeBoundArray>( | |
| 1341 bounds_length_)) { | |
| 1342 for (int i = 0; i < bounds_length_; ++i) { | |
| 1343 new (&bounds_[i]) LiveRangeBoundArray(); | |
| 1344 } | |
| 1345 } | |
| 1346 | |
| 1347 LiveRangeBoundArray* ArrayFor(int operand_index) { | |
| 1348 DCHECK(operand_index < bounds_length_); | |
| 1349 const LiveRange* range = allocator_.live_ranges()[operand_index]; | |
| 1350 DCHECK(range != nullptr && !range->IsEmpty()); | |
| 1351 LiveRangeBoundArray* array = &bounds_[operand_index]; | |
| 1352 if (array->ShouldInitialize()) { | |
| 1353 array->Initialize(allocator_.local_zone(), range); | |
| 1354 } | |
| 1355 return array; | |
| 1356 } | |
| 1357 | |
| 1358 private: | |
| 1359 const RegisterAllocator& allocator_; | |
| 1360 const int bounds_length_; | |
| 1361 LiveRangeBoundArray* const bounds_; | |
| 1362 | |
| 1363 DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder); | |
| 1364 }; | |
| 1365 | |
| 1366 } // namespace | |
| 1367 | |
| 1368 | |
| 1291 void RegisterAllocator::ResolveControlFlow() { | 1369 void RegisterAllocator::ResolveControlFlow() { |
| 1292 for (int block_id = 1; block_id < code()->InstructionBlockCount(); | 1370 // Lazily linearize live ranges in memory for fast lookup. |
| 1293 ++block_id) { | 1371 LiveRangeFinder finder(*this); |
| 1294 const InstructionBlock* block = | 1372 for (auto block : code()->instruction_blocks()) { |
| 1295 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(block_id)); | |
| 1296 if (CanEagerlyResolveControlFlow(block)) continue; | 1373 if (CanEagerlyResolveControlFlow(block)) continue; |
| 1297 BitVector* live = live_in_sets_[block->rpo_number().ToInt()]; | 1374 BitVector* live = live_in_sets_[block->rpo_number().ToInt()]; |
| 1298 BitVector::Iterator iterator(live); | 1375 BitVector::Iterator iterator(live); |
| 1299 while (!iterator.Done()) { | 1376 while (!iterator.Done()) { |
| 1300 int operand_index = iterator.Current(); | 1377 LiveRangeBoundArray* array = finder.ArrayFor(iterator.Current()); |
| 1301 for (auto pred : block->predecessors()) { | 1378 for (auto pred : block->predecessors()) { |
| 1302 const InstructionBlock* cur = code()->InstructionBlockAt(pred); | 1379 FindResult result; |
| 1303 LiveRange* cur_range = LiveRangeFor(operand_index); | 1380 const InstructionBlock* pred_block = code()->InstructionBlockAt(pred); |
| 1304 ResolveControlFlow(cur_range, block, cur); | 1381 array->Find(block, pred_block, &result); |
| 1382 if (result.cur_cover_ == result.pred_cover_ || | |
| 1383 result.cur_cover_->IsSpilled()) | |
| 1384 continue; | |
| 1385 ResolveControlFlow(block, result.cur_cover_, pred_block, | |
| 1386 result.pred_cover_); | |
| 1305 } | 1387 } |
| 1306 iterator.Advance(); | 1388 iterator.Advance(); |
| 1307 } | 1389 } |
| 1308 } | 1390 } |
| 1309 } | 1391 } |
| 1310 | 1392 |
| 1311 | 1393 |
| 1394 void RegisterAllocator::ResolveControlFlow(const InstructionBlock* block, | |
| 1395 const LiveRange* cur_cover, | |
| 1396 const InstructionBlock* pred, | |
| 1397 const LiveRange* pred_cover) { | |
| 1398 InstructionOperand* pred_op = pred_cover->CreateAssignedOperand(code_zone()); | |
| 1399 InstructionOperand* cur_op = cur_cover->CreateAssignedOperand(code_zone()); | |
| 1400 if (!pred_op->Equals(cur_op)) { | |
| 1401 GapInstruction* gap = NULL; | |
| 1402 if (block->PredecessorCount() == 1) { | |
| 1403 gap = code()->GapAt(block->first_instruction_index()); | |
| 1404 } else { | |
| 1405 DCHECK(pred->SuccessorCount() == 1); | |
| 1406 gap = GetLastGap(pred); | |
| 1407 | |
| 1408 Instruction* branch = InstructionAt(pred->last_instruction_index()); | |
| 1409 DCHECK(!branch->HasPointerMap()); | |
| 1410 USE(branch); | |
| 1411 } | |
| 1412 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()) | |
| 1413 ->AddMove(pred_op, cur_op, code_zone()); | |
| 1414 } | |
| 1415 } | |
| 1416 | |
| 1417 | |
| 1312 void RegisterAllocator::BuildLiveRanges() { | 1418 void RegisterAllocator::BuildLiveRanges() { |
| 1313 InitializeLivenessAnalysis(); | 1419 InitializeLivenessAnalysis(); |
| 1314 // Process the blocks in reverse order. | 1420 // Process the blocks in reverse order. |
| 1315 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0; | 1421 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0; |
| 1316 --block_id) { | 1422 --block_id) { |
| 1317 const InstructionBlock* block = | 1423 const InstructionBlock* block = |
| 1318 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(block_id)); | 1424 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(block_id)); |
| 1319 BitVector* live = ComputeLiveOut(block); | 1425 BitVector* live = ComputeLiveOut(block); |
| 1320 // Initially consider all live_out values live for the entire block. We | 1426 // Initially consider all live_out values live for the entire block. We |
| 1321 // will shorten these intervals if necessary. | 1427 // will shorten these intervals if necessary. |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1364 // for each value live on entry to the header. | 1470 // for each value live on entry to the header. |
| 1365 BitVector::Iterator iterator(live); | 1471 BitVector::Iterator iterator(live); |
| 1366 LifetimePosition start = LifetimePosition::FromInstructionIndex( | 1472 LifetimePosition start = LifetimePosition::FromInstructionIndex( |
| 1367 block->first_instruction_index()); | 1473 block->first_instruction_index()); |
| 1368 LifetimePosition end = | 1474 LifetimePosition end = |
| 1369 LifetimePosition::FromInstructionIndex( | 1475 LifetimePosition::FromInstructionIndex( |
| 1370 code()->LastLoopInstructionIndex(block)).NextInstruction(); | 1476 code()->LastLoopInstructionIndex(block)).NextInstruction(); |
| 1371 while (!iterator.Done()) { | 1477 while (!iterator.Done()) { |
| 1372 int operand_index = iterator.Current(); | 1478 int operand_index = iterator.Current(); |
| 1373 LiveRange* range = LiveRangeFor(operand_index); | 1479 LiveRange* range = LiveRangeFor(operand_index); |
| 1374 range->EnsureInterval(start, end, zone()); | 1480 range->EnsureInterval(start, end, local_zone()); |
| 1375 iterator.Advance(); | 1481 iterator.Advance(); |
| 1376 } | 1482 } |
| 1377 | 1483 |
| 1378 // Insert all values into the live in sets of all blocks in the loop. | 1484 // Insert all values into the live in sets of all blocks in the loop. |
| 1379 for (int i = block->rpo_number().ToInt() + 1; | 1485 for (int i = block->rpo_number().ToInt() + 1; |
| 1380 i < block->loop_end().ToInt(); ++i) { | 1486 i < block->loop_end().ToInt(); ++i) { |
| 1381 live_in_sets_[i]->Union(*live); | 1487 live_in_sets_[i]->Union(*live); |
| 1382 } | 1488 } |
| 1383 } | 1489 } |
| 1384 | 1490 |
| (...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1660 | 1766 |
| 1661 RegisterKind RegisterAllocator::RequiredRegisterKind( | 1767 RegisterKind RegisterAllocator::RequiredRegisterKind( |
| 1662 int virtual_register) const { | 1768 int virtual_register) const { |
| 1663 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS | 1769 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS |
| 1664 : GENERAL_REGISTERS; | 1770 : GENERAL_REGISTERS; |
| 1665 } | 1771 } |
| 1666 | 1772 |
| 1667 | 1773 |
| 1668 void RegisterAllocator::AddToActive(LiveRange* range) { | 1774 void RegisterAllocator::AddToActive(LiveRange* range) { |
| 1669 TraceAlloc("Add live range %d to active\n", range->id()); | 1775 TraceAlloc("Add live range %d to active\n", range->id()); |
| 1670 active_live_ranges_.Add(range, zone()); | 1776 active_live_ranges_.Add(range, local_zone()); |
| 1671 } | 1777 } |
| 1672 | 1778 |
| 1673 | 1779 |
| 1674 void RegisterAllocator::AddToInactive(LiveRange* range) { | 1780 void RegisterAllocator::AddToInactive(LiveRange* range) { |
| 1675 TraceAlloc("Add live range %d to inactive\n", range->id()); | 1781 TraceAlloc("Add live range %d to inactive\n", range->id()); |
| 1676 inactive_live_ranges_.Add(range, zone()); | 1782 inactive_live_ranges_.Add(range, local_zone()); |
| 1677 } | 1783 } |
| 1678 | 1784 |
| 1679 | 1785 |
| 1680 void RegisterAllocator::AddToUnhandledSorted(LiveRange* range) { | 1786 void RegisterAllocator::AddToUnhandledSorted(LiveRange* range) { |
| 1681 if (range == NULL || range->IsEmpty()) return; | 1787 if (range == NULL || range->IsEmpty()) return; |
| 1682 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); | 1788 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); |
| 1683 DCHECK(allocation_finger_.Value() <= range->Start().Value()); | 1789 DCHECK(allocation_finger_.Value() <= range->Start().Value()); |
| 1684 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) { | 1790 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) { |
| 1685 LiveRange* cur_range = unhandled_live_ranges_.at(i); | 1791 LiveRange* cur_range = unhandled_live_ranges_.at(i); |
| 1686 if (range->ShouldBeAllocatedBefore(cur_range)) { | 1792 if (range->ShouldBeAllocatedBefore(cur_range)) { |
| 1687 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1); | 1793 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1); |
| 1688 unhandled_live_ranges_.InsertAt(i + 1, range, zone()); | 1794 unhandled_live_ranges_.InsertAt(i + 1, range, local_zone()); |
| 1689 DCHECK(UnhandledIsSorted()); | 1795 DCHECK(UnhandledIsSorted()); |
| 1690 return; | 1796 return; |
| 1691 } | 1797 } |
| 1692 } | 1798 } |
| 1693 TraceAlloc("Add live range %d to unhandled at start\n", range->id()); | 1799 TraceAlloc("Add live range %d to unhandled at start\n", range->id()); |
| 1694 unhandled_live_ranges_.InsertAt(0, range, zone()); | 1800 unhandled_live_ranges_.InsertAt(0, range, local_zone()); |
| 1695 DCHECK(UnhandledIsSorted()); | 1801 DCHECK(UnhandledIsSorted()); |
| 1696 } | 1802 } |
| 1697 | 1803 |
| 1698 | 1804 |
| 1699 void RegisterAllocator::AddToUnhandledUnsorted(LiveRange* range) { | 1805 void RegisterAllocator::AddToUnhandledUnsorted(LiveRange* range) { |
| 1700 if (range == NULL || range->IsEmpty()) return; | 1806 if (range == NULL || range->IsEmpty()) return; |
| 1701 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); | 1807 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); |
| 1702 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id()); | 1808 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id()); |
| 1703 unhandled_live_ranges_.Add(range, zone()); | 1809 unhandled_live_ranges_.Add(range, local_zone()); |
| 1704 } | 1810 } |
| 1705 | 1811 |
| 1706 | 1812 |
| 1707 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) { | 1813 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) { |
| 1708 DCHECK(!(*a)->ShouldBeAllocatedBefore(*b) || | 1814 DCHECK(!(*a)->ShouldBeAllocatedBefore(*b) || |
| 1709 !(*b)->ShouldBeAllocatedBefore(*a)); | 1815 !(*b)->ShouldBeAllocatedBefore(*a)); |
| 1710 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1; | 1816 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1; |
| 1711 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1; | 1817 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1; |
| 1712 return (*a)->id() - (*b)->id(); | 1818 return (*a)->id() - (*b)->id(); |
| 1713 } | 1819 } |
| (...skipping 21 matching lines...) Expand all Loading... | |
| 1735 | 1841 |
| 1736 void RegisterAllocator::FreeSpillSlot(LiveRange* range) { | 1842 void RegisterAllocator::FreeSpillSlot(LiveRange* range) { |
| 1737 // Check that we are the last range. | 1843 // Check that we are the last range. |
| 1738 if (range->next() != NULL) return; | 1844 if (range->next() != NULL) return; |
| 1739 | 1845 |
| 1740 if (!range->TopLevel()->HasAllocatedSpillOperand()) return; | 1846 if (!range->TopLevel()->HasAllocatedSpillOperand()) return; |
| 1741 | 1847 |
| 1742 InstructionOperand* spill_operand = range->TopLevel()->GetSpillOperand(); | 1848 InstructionOperand* spill_operand = range->TopLevel()->GetSpillOperand(); |
| 1743 if (spill_operand->IsConstant()) return; | 1849 if (spill_operand->IsConstant()) return; |
| 1744 if (spill_operand->index() >= 0) { | 1850 if (spill_operand->index() >= 0) { |
| 1745 reusable_slots_.Add(range, zone()); | 1851 reusable_slots_.Add(range, local_zone()); |
| 1746 } | 1852 } |
| 1747 } | 1853 } |
| 1748 | 1854 |
| 1749 | 1855 |
| 1750 InstructionOperand* RegisterAllocator::TryReuseSpillSlot(LiveRange* range) { | 1856 InstructionOperand* RegisterAllocator::TryReuseSpillSlot(LiveRange* range) { |
| 1751 if (reusable_slots_.is_empty()) return NULL; | 1857 if (reusable_slots_.is_empty()) return NULL; |
| 1752 if (reusable_slots_.first()->End().Value() > | 1858 if (reusable_slots_.first()->End().Value() > |
| 1753 range->TopLevel()->Start().Value()) { | 1859 range->TopLevel()->Start().Value()) { |
| 1754 return NULL; | 1860 return NULL; |
| 1755 } | 1861 } |
| 1756 InstructionOperand* result = | 1862 InstructionOperand* result = |
| 1757 reusable_slots_.first()->TopLevel()->GetSpillOperand(); | 1863 reusable_slots_.first()->TopLevel()->GetSpillOperand(); |
| 1758 reusable_slots_.Remove(0); | 1864 reusable_slots_.Remove(0); |
| 1759 return result; | 1865 return result; |
| 1760 } | 1866 } |
| 1761 | 1867 |
| 1762 | 1868 |
| 1763 void RegisterAllocator::ActiveToHandled(LiveRange* range) { | 1869 void RegisterAllocator::ActiveToHandled(LiveRange* range) { |
| 1764 DCHECK(active_live_ranges_.Contains(range)); | 1870 DCHECK(active_live_ranges_.Contains(range)); |
| 1765 active_live_ranges_.RemoveElement(range); | 1871 active_live_ranges_.RemoveElement(range); |
| 1766 TraceAlloc("Moving live range %d from active to handled\n", range->id()); | 1872 TraceAlloc("Moving live range %d from active to handled\n", range->id()); |
| 1767 FreeSpillSlot(range); | 1873 FreeSpillSlot(range); |
| 1768 } | 1874 } |
| 1769 | 1875 |
| 1770 | 1876 |
| 1771 void RegisterAllocator::ActiveToInactive(LiveRange* range) { | 1877 void RegisterAllocator::ActiveToInactive(LiveRange* range) { |
| 1772 DCHECK(active_live_ranges_.Contains(range)); | 1878 DCHECK(active_live_ranges_.Contains(range)); |
| 1773 active_live_ranges_.RemoveElement(range); | 1879 active_live_ranges_.RemoveElement(range); |
| 1774 inactive_live_ranges_.Add(range, zone()); | 1880 inactive_live_ranges_.Add(range, local_zone()); |
| 1775 TraceAlloc("Moving live range %d from active to inactive\n", range->id()); | 1881 TraceAlloc("Moving live range %d from active to inactive\n", range->id()); |
| 1776 } | 1882 } |
| 1777 | 1883 |
| 1778 | 1884 |
| 1779 void RegisterAllocator::InactiveToHandled(LiveRange* range) { | 1885 void RegisterAllocator::InactiveToHandled(LiveRange* range) { |
| 1780 DCHECK(inactive_live_ranges_.Contains(range)); | 1886 DCHECK(inactive_live_ranges_.Contains(range)); |
| 1781 inactive_live_ranges_.RemoveElement(range); | 1887 inactive_live_ranges_.RemoveElement(range); |
| 1782 TraceAlloc("Moving live range %d from inactive to handled\n", range->id()); | 1888 TraceAlloc("Moving live range %d from inactive to handled\n", range->id()); |
| 1783 FreeSpillSlot(range); | 1889 FreeSpillSlot(range); |
| 1784 } | 1890 } |
| 1785 | 1891 |
| 1786 | 1892 |
| 1787 void RegisterAllocator::InactiveToActive(LiveRange* range) { | 1893 void RegisterAllocator::InactiveToActive(LiveRange* range) { |
| 1788 DCHECK(inactive_live_ranges_.Contains(range)); | 1894 DCHECK(inactive_live_ranges_.Contains(range)); |
| 1789 inactive_live_ranges_.RemoveElement(range); | 1895 inactive_live_ranges_.RemoveElement(range); |
| 1790 active_live_ranges_.Add(range, zone()); | 1896 active_live_ranges_.Add(range, local_zone()); |
| 1791 TraceAlloc("Moving live range %d from inactive to active\n", range->id()); | 1897 TraceAlloc("Moving live range %d from inactive to active\n", range->id()); |
| 1792 } | 1898 } |
| 1793 | 1899 |
| 1794 | 1900 |
| 1795 bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) { | 1901 bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) { |
| 1796 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 1902 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
| 1797 | 1903 |
| 1798 for (int i = 0; i < num_registers_; i++) { | 1904 for (int i = 0; i < num_registers_; i++) { |
| 1799 free_until_pos[i] = LifetimePosition::MaxPosition(); | 1905 free_until_pos[i] = LifetimePosition::MaxPosition(); |
| 1800 } | 1906 } |
| (...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2056 if (pos.Value() <= range->Start().Value()) return range; | 2162 if (pos.Value() <= range->Start().Value()) return range; |
| 2057 | 2163 |
| 2058 // We can't properly connect liveranges if split occured at the end | 2164 // We can't properly connect liveranges if split occured at the end |
| 2059 // of control instruction. | 2165 // of control instruction. |
| 2060 DCHECK(pos.IsInstructionStart() || | 2166 DCHECK(pos.IsInstructionStart() || |
| 2061 !InstructionAt(pos.InstructionIndex())->IsControl()); | 2167 !InstructionAt(pos.InstructionIndex())->IsControl()); |
| 2062 | 2168 |
| 2063 int vreg = GetVirtualRegister(); | 2169 int vreg = GetVirtualRegister(); |
| 2064 if (!AllocationOk()) return NULL; | 2170 if (!AllocationOk()) return NULL; |
| 2065 LiveRange* result = LiveRangeFor(vreg); | 2171 LiveRange* result = LiveRangeFor(vreg); |
| 2066 range->SplitAt(pos, result, zone()); | 2172 range->SplitAt(pos, result, local_zone()); |
| 2067 return result; | 2173 return result; |
| 2068 } | 2174 } |
| 2069 | 2175 |
| 2070 | 2176 |
| 2071 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range, | 2177 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range, |
| 2072 LifetimePosition start, | 2178 LifetimePosition start, |
| 2073 LifetimePosition end) { | 2179 LifetimePosition end) { |
| 2074 DCHECK(!range->IsFixed()); | 2180 DCHECK(!range->IsFixed()); |
| 2075 TraceAlloc("Splitting live range %d in position between [%d, %d]\n", | 2181 TraceAlloc("Splitting live range %d in position between [%d, %d]\n", |
| 2076 range->id(), start.Value(), end.Value()); | 2182 range->id(), start.Value(), end.Value()); |
| (...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2164 TraceAlloc("Spilling live range %d\n", range->id()); | 2270 TraceAlloc("Spilling live range %d\n", range->id()); |
| 2165 LiveRange* first = range->TopLevel(); | 2271 LiveRange* first = range->TopLevel(); |
| 2166 | 2272 |
| 2167 if (!first->HasAllocatedSpillOperand()) { | 2273 if (!first->HasAllocatedSpillOperand()) { |
| 2168 InstructionOperand* op = TryReuseSpillSlot(range); | 2274 InstructionOperand* op = TryReuseSpillSlot(range); |
| 2169 if (op == NULL) { | 2275 if (op == NULL) { |
| 2170 // Allocate a new operand referring to the spill slot. | 2276 // Allocate a new operand referring to the spill slot. |
| 2171 RegisterKind kind = range->Kind(); | 2277 RegisterKind kind = range->Kind(); |
| 2172 int index = frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS); | 2278 int index = frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS); |
| 2173 if (kind == DOUBLE_REGISTERS) { | 2279 if (kind == DOUBLE_REGISTERS) { |
| 2174 op = DoubleStackSlotOperand::Create(index, zone()); | 2280 op = DoubleStackSlotOperand::Create(index, local_zone()); |
| 2175 } else { | 2281 } else { |
| 2176 DCHECK(kind == GENERAL_REGISTERS); | 2282 DCHECK(kind == GENERAL_REGISTERS); |
| 2177 op = StackSlotOperand::Create(index, zone()); | 2283 op = StackSlotOperand::Create(index, local_zone()); |
| 2178 } | 2284 } |
| 2179 } | 2285 } |
| 2180 first->SetSpillOperand(op); | 2286 first->SetSpillOperand(op); |
| 2181 } | 2287 } |
| 2182 range->MakeSpilled(code_zone()); | 2288 range->MakeSpilled(code_zone()); |
| 2183 } | 2289 } |
| 2184 | 2290 |
| 2185 | 2291 |
| 2186 int RegisterAllocator::RegisterCount() const { return num_registers_; } | 2292 int RegisterAllocator::RegisterCount() const { return num_registers_; } |
| 2187 | 2293 |
| (...skipping 18 matching lines...) Expand all Loading... | |
| 2206 } else { | 2312 } else { |
| 2207 DCHECK(range->Kind() == GENERAL_REGISTERS); | 2313 DCHECK(range->Kind() == GENERAL_REGISTERS); |
| 2208 assigned_registers_->Add(reg); | 2314 assigned_registers_->Add(reg); |
| 2209 } | 2315 } |
| 2210 range->set_assigned_register(reg, code_zone()); | 2316 range->set_assigned_register(reg, code_zone()); |
| 2211 } | 2317 } |
| 2212 | 2318 |
| 2213 } | 2319 } |
| 2214 } | 2320 } |
| 2215 } // namespace v8::internal::compiler | 2321 } // namespace v8::internal::compiler |
| OLD | NEW |