Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(233)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 664683002: [turbofan] decouple register allocation from schedule and graph (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/compiler/register-allocator.h ('k') | src/compiler/schedule.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/register-allocator.h" 5 #include "src/compiler/register-allocator.h"
6 6
7 #include "src/compiler/generic-node-inl.h"
8 #include "src/compiler/linkage.h" 7 #include "src/compiler/linkage.h"
9 #include "src/hydrogen.h" 8 #include "src/hydrogen.h"
10 #include "src/string-stream.h" 9 #include "src/string-stream.h"
11 10
12 namespace v8 { 11 namespace v8 {
13 namespace internal { 12 namespace internal {
14 namespace compiler { 13 namespace compiler {
15 14
16 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { 15 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
17 return a.Value() < b.Value() ? a : b; 16 return a.Value() < b.Value() ? a : b;
(...skipping 499 matching lines...) Expand 10 before | Expand all | Expand 10 after
517 516
518 517
519 void RegisterAllocator::InitializeLivenessAnalysis() { 518 void RegisterAllocator::InitializeLivenessAnalysis() {
520 // Initialize the live_in sets for each block to NULL. 519 // Initialize the live_in sets for each block to NULL.
521 int block_count = code()->BasicBlockCount(); 520 int block_count = code()->BasicBlockCount();
522 live_in_sets_.Initialize(block_count, zone()); 521 live_in_sets_.Initialize(block_count, zone());
523 live_in_sets_.AddBlock(NULL, block_count, zone()); 522 live_in_sets_.AddBlock(NULL, block_count, zone());
524 } 523 }
525 524
526 525
527 BitVector* RegisterAllocator::ComputeLiveOut(BasicBlock* block) { 526 BitVector* RegisterAllocator::ComputeLiveOut(const InstructionBlock* block) {
528 // Compute live out for the given block, except not including backward 527 // Compute live out for the given block, except not including backward
529 // successor edges. 528 // successor edges.
530 BitVector* live_out = 529 BitVector* live_out =
531 new (zone()) BitVector(code()->VirtualRegisterCount(), zone()); 530 new (zone()) BitVector(code()->VirtualRegisterCount(), zone());
532 531
533 // Process all successor blocks. 532 // Process all successor blocks.
534 for (BasicBlock::Successors::iterator i = block->successors_begin(); 533 for (auto succ : block->successors()) {
535 i != block->successors_end(); ++i) {
536 // Add values live on entry to the successor. Note the successor's 534 // Add values live on entry to the successor. Note the successor's
537 // live_in will not be computed yet for backwards edges. 535 // live_in will not be computed yet for backwards edges.
538 BasicBlock* successor = *i; 536 BitVector* live_in = live_in_sets_[succ.ToSize()];
539 BitVector* live_in = live_in_sets_[successor->rpo_number()];
540 if (live_in != NULL) live_out->Union(*live_in); 537 if (live_in != NULL) live_out->Union(*live_in);
541 538
542 // All phi input operands corresponding to this successor edge are live 539 // All phi input operands corresponding to this successor edge are live
543 // out from this block. 540 // out from this block.
544 size_t index = successor->PredecessorIndexOf(block); 541 const InstructionBlock* successor = code()->InstructionBlockAt(succ);
542 size_t index = successor->PredecessorIndexOf(block->rpo_number());
545 DCHECK(index < successor->PredecessorCount()); 543 DCHECK(index < successor->PredecessorCount());
546 for (BasicBlock::const_iterator j = successor->begin(); 544 for (auto phi : successor->phis()) {
547 j != successor->end(); ++j) { 545 live_out->Add(phi->operands()[index]);
548 Node* phi = *j;
549 if (phi->opcode() != IrOpcode::kPhi) continue;
550 Node* input = phi->InputAt(static_cast<int>(index));
551 live_out->Add(code()->GetVirtualRegister(input));
552 } 546 }
553 } 547 }
554 return live_out; 548 return live_out;
555 } 549 }
556 550
557 551
558 void RegisterAllocator::AddInitialIntervals(BasicBlock* block, 552 void RegisterAllocator::AddInitialIntervals(const InstructionBlock* block,
559 BitVector* live_out) { 553 BitVector* live_out) {
560 // Add an interval that includes the entire block to the live range for 554 // Add an interval that includes the entire block to the live range for
561 // each live_out value. 555 // each live_out value.
562 LifetimePosition start = LifetimePosition::FromInstructionIndex( 556 LifetimePosition start =
563 code()->first_instruction_index(block)); 557 LifetimePosition::FromInstructionIndex(block->first_instruction_index());
564 LifetimePosition end = 558 LifetimePosition end = LifetimePosition::FromInstructionIndex(
565 LifetimePosition::FromInstructionIndex( 559 block->last_instruction_index()).NextInstruction();
566 code()->last_instruction_index(block)).NextInstruction();
567 BitVector::Iterator iterator(live_out); 560 BitVector::Iterator iterator(live_out);
568 while (!iterator.Done()) { 561 while (!iterator.Done()) {
569 int operand_index = iterator.Current(); 562 int operand_index = iterator.Current();
570 LiveRange* range = LiveRangeFor(operand_index); 563 LiveRange* range = LiveRangeFor(operand_index);
571 range->AddUseInterval(start, end, zone()); 564 range->AddUseInterval(start, end, zone());
572 iterator.Advance(); 565 iterator.Advance();
573 } 566 }
574 } 567 }
575 568
576 569
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
644 } 637 }
645 LiveRange* result = live_ranges_[index]; 638 LiveRange* result = live_ranges_[index];
646 if (result == NULL) { 639 if (result == NULL) {
647 result = new (zone()) LiveRange(index, code_zone()); 640 result = new (zone()) LiveRange(index, code_zone());
648 live_ranges_[index] = result; 641 live_ranges_[index] = result;
649 } 642 }
650 return result; 643 return result;
651 } 644 }
652 645
653 646
654 GapInstruction* RegisterAllocator::GetLastGap(BasicBlock* block) { 647 GapInstruction* RegisterAllocator::GetLastGap(const InstructionBlock* block) {
655 int last_instruction = code()->last_instruction_index(block); 648 int last_instruction = block->last_instruction_index();
656 return code()->GapAt(last_instruction - 1); 649 return code()->GapAt(last_instruction - 1);
657 } 650 }
658 651
659 652
660 LiveRange* RegisterAllocator::LiveRangeFor(InstructionOperand* operand) { 653 LiveRange* RegisterAllocator::LiveRangeFor(InstructionOperand* operand) {
661 if (operand->IsUnallocated()) { 654 if (operand->IsUnallocated()) {
662 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register()); 655 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register());
663 } else if (operand->IsRegister()) { 656 } else if (operand->IsRegister()) {
664 return FixedLiveRangeFor(operand->index()); 657 return FixedLiveRangeFor(operand->index());
665 } else if (operand->IsDoubleRegister()) { 658 } else if (operand->IsDoubleRegister()) {
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
722 move->AddMove(cur.source(), to, code_zone()); 715 move->AddMove(cur.source(), to, code_zone());
723 return; 716 return;
724 } 717 }
725 } 718 }
726 } 719 }
727 } 720 }
728 move->AddMove(from, to, code_zone()); 721 move->AddMove(from, to, code_zone());
729 } 722 }
730 723
731 724
732 void RegisterAllocator::MeetRegisterConstraints(BasicBlock* block) { 725 void RegisterAllocator::MeetRegisterConstraints(const InstructionBlock* block) {
733 int start = code()->first_instruction_index(block); 726 int start = block->first_instruction_index();
734 int end = code()->last_instruction_index(block); 727 int end = block->last_instruction_index();
735 DCHECK_NE(-1, start); 728 DCHECK_NE(-1, start);
736 for (int i = start; i <= end; ++i) { 729 for (int i = start; i <= end; ++i) {
737 if (code()->IsGapAt(i)) { 730 if (code()->IsGapAt(i)) {
738 Instruction* instr = NULL; 731 Instruction* instr = NULL;
739 Instruction* prev_instr = NULL; 732 Instruction* prev_instr = NULL;
740 if (i < end) instr = InstructionAt(i + 1); 733 if (i < end) instr = InstructionAt(i + 1);
741 if (i > start) prev_instr = InstructionAt(i - 1); 734 if (i > start) prev_instr = InstructionAt(i - 1);
742 MeetConstraintsBetween(prev_instr, instr, i); 735 MeetConstraintsBetween(prev_instr, instr, i);
743 if (!AllocationOk()) return; 736 if (!AllocationOk()) return;
744 } 737 }
745 } 738 }
746 739
747 // Meet register constraints for the instruction in the end. 740 // Meet register constraints for the instruction in the end.
748 if (!code()->IsGapAt(end)) { 741 if (!code()->IsGapAt(end)) {
749 MeetRegisterConstraintsForLastInstructionInBlock(block); 742 MeetRegisterConstraintsForLastInstructionInBlock(block);
750 } 743 }
751 } 744 }
752 745
753 746
754 void RegisterAllocator::MeetRegisterConstraintsForLastInstructionInBlock( 747 void RegisterAllocator::MeetRegisterConstraintsForLastInstructionInBlock(
755 BasicBlock* block) { 748 const InstructionBlock* block) {
756 int end = code()->last_instruction_index(block); 749 int end = block->last_instruction_index();
757 Instruction* last_instruction = InstructionAt(end); 750 Instruction* last_instruction = InstructionAt(end);
758 for (size_t i = 0; i < last_instruction->OutputCount(); i++) { 751 for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
759 InstructionOperand* output_operand = last_instruction->OutputAt(i); 752 InstructionOperand* output_operand = last_instruction->OutputAt(i);
760 DCHECK(!output_operand->IsConstant()); 753 DCHECK(!output_operand->IsConstant());
761 UnallocatedOperand* output = UnallocatedOperand::cast(output_operand); 754 UnallocatedOperand* output = UnallocatedOperand::cast(output_operand);
762 int output_vreg = output->virtual_register(); 755 int output_vreg = output->virtual_register();
763 LiveRange* range = LiveRangeFor(output_vreg); 756 LiveRange* range = LiveRangeFor(output_vreg);
764 bool assigned = false; 757 bool assigned = false;
765 if (output->HasFixedPolicy()) { 758 if (output->HasFixedPolicy()) {
766 AllocateFixed(output, -1, false); 759 AllocateFixed(output, -1, false);
767 // This value is produced on the stack, we never need to spill it. 760 // This value is produced on the stack, we never need to spill it.
768 if (output->IsStackSlot()) { 761 if (output->IsStackSlot()) {
769 range->SetSpillOperand(output); 762 range->SetSpillOperand(output);
770 range->SetSpillStartIndex(end); 763 range->SetSpillStartIndex(end);
771 assigned = true; 764 assigned = true;
772 } 765 }
773 766
774 for (BasicBlock::Successors::iterator succ = block->successors_begin(); 767 for (auto succ : block->successors()) {
775 succ != block->successors_end(); ++succ) { 768 const InstructionBlock* successor = code()->InstructionBlockAt(succ);
776 DCHECK((*succ)->PredecessorCount() == 1); 769 DCHECK(successor->PredecessorCount() == 1);
777 int gap_index = code()->first_instruction_index(*succ) + 1; 770 int gap_index = successor->first_instruction_index() + 1;
778 DCHECK(code()->IsGapAt(gap_index)); 771 DCHECK(code()->IsGapAt(gap_index));
779 772
780 // Create an unconstrained operand for the same virtual register 773 // Create an unconstrained operand for the same virtual register
781 // and insert a gap move from the fixed output to the operand. 774 // and insert a gap move from the fixed output to the operand.
782 UnallocatedOperand* output_copy = 775 UnallocatedOperand* output_copy =
783 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY); 776 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY);
784 output_copy->set_virtual_register(output_vreg); 777 output_copy->set_virtual_register(output_vreg);
785 778
786 code()->AddGapMove(gap_index, output, output_copy); 779 code()->AddGapMove(gap_index, output, output_copy);
787 } 780 }
788 } 781 }
789 782
790 if (!assigned) { 783 if (!assigned) {
791 for (BasicBlock::Successors::iterator succ = block->successors_begin(); 784 for (auto succ : block->successors()) {
792 succ != block->successors_end(); ++succ) { 785 const InstructionBlock* successor = code()->InstructionBlockAt(succ);
793 DCHECK((*succ)->PredecessorCount() == 1); 786 DCHECK(successor->PredecessorCount() == 1);
794 int gap_index = code()->first_instruction_index(*succ) + 1; 787 int gap_index = successor->first_instruction_index() + 1;
795 range->SetSpillStartIndex(gap_index); 788 range->SetSpillStartIndex(gap_index);
796 789
797 // This move to spill operand is not a real use. Liveness analysis 790 // This move to spill operand is not a real use. Liveness analysis
798 // and splitting of live ranges do not account for it. 791 // and splitting of live ranges do not account for it.
799 // Thus it should be inserted to a lifetime position corresponding to 792 // Thus it should be inserted to a lifetime position corresponding to
800 // the instruction end. 793 // the instruction end.
801 GapInstruction* gap = code()->GapAt(gap_index); 794 GapInstruction* gap = code()->GapAt(gap_index);
802 ParallelMove* move = 795 ParallelMove* move =
803 gap->GetOrCreateParallelMove(GapInstruction::BEFORE, code_zone()); 796 gap->GetOrCreateParallelMove(GapInstruction::BEFORE, code_zone());
804 move->AddMove(output, range->GetSpillOperand(), code_zone()); 797 move->AddMove(output, range->GetSpillOperand(), code_zone());
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
929 bool RegisterAllocator::IsOutputDoubleRegisterOf(Instruction* instr, 922 bool RegisterAllocator::IsOutputDoubleRegisterOf(Instruction* instr,
930 int index) { 923 int index) {
931 for (size_t i = 0; i < instr->OutputCount(); i++) { 924 for (size_t i = 0; i < instr->OutputCount(); i++) {
932 InstructionOperand* output = instr->OutputAt(i); 925 InstructionOperand* output = instr->OutputAt(i);
933 if (output->IsDoubleRegister() && output->index() == index) return true; 926 if (output->IsDoubleRegister() && output->index() == index) return true;
934 } 927 }
935 return false; 928 return false;
936 } 929 }
937 930
938 931
939 void RegisterAllocator::ProcessInstructions(BasicBlock* block, 932 void RegisterAllocator::ProcessInstructions(const InstructionBlock* block,
940 BitVector* live) { 933 BitVector* live) {
941 int block_start = code()->first_instruction_index(block); 934 int block_start = block->first_instruction_index();
942 935
943 LifetimePosition block_start_position = 936 LifetimePosition block_start_position =
944 LifetimePosition::FromInstructionIndex(block_start); 937 LifetimePosition::FromInstructionIndex(block_start);
945 938
946 for (int index = code()->last_instruction_index(block); index >= block_start; 939 for (int index = block->last_instruction_index(); index >= block_start;
947 index--) { 940 index--) {
948 LifetimePosition curr_position = 941 LifetimePosition curr_position =
949 LifetimePosition::FromInstructionIndex(index); 942 LifetimePosition::FromInstructionIndex(index);
950 943
951 Instruction* instr = InstructionAt(index); 944 Instruction* instr = InstructionAt(index);
952 DCHECK(instr != NULL); 945 DCHECK(instr != NULL);
953 if (instr->IsGapMoves()) { 946 if (instr->IsGapMoves()) {
954 // Process the moves of the gap instruction, making their sources live. 947 // Process the moves of the gap instruction, making their sources live.
955 GapInstruction* gap = code()->GapAt(index); 948 GapInstruction* gap = code()->GapAt(index);
956 949
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
1052 } 1045 }
1053 } 1046 }
1054 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL); 1047 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
1055 Define(curr_position, temp, NULL); 1048 Define(curr_position, temp, NULL);
1056 } 1049 }
1057 } 1050 }
1058 } 1051 }
1059 } 1052 }
1060 1053
1061 1054
1062 void RegisterAllocator::ResolvePhis(BasicBlock* block) { 1055 void RegisterAllocator::ResolvePhis(const InstructionBlock* block) {
1063 for (BasicBlock::const_iterator i = block->begin(); i != block->end(); ++i) { 1056 for (auto phi : block->phis()) {
1064 Node* phi = *i;
1065 if (phi->opcode() != IrOpcode::kPhi) continue;
1066
1067 UnallocatedOperand* phi_operand = 1057 UnallocatedOperand* phi_operand =
1068 new (code_zone()) UnallocatedOperand(UnallocatedOperand::NONE); 1058 new (code_zone()) UnallocatedOperand(UnallocatedOperand::NONE);
1069 int phi_vreg = code()->GetVirtualRegister(phi); 1059 int phi_vreg = phi->virtual_register();
1070 phi_operand->set_virtual_register(phi_vreg); 1060 phi_operand->set_virtual_register(phi_vreg);
1071 1061
1072 size_t j = 0; 1062 for (size_t i = 0; i < phi->operands().size(); ++i) {
1073 Node::Inputs inputs = phi->inputs();
1074 for (Node::Inputs::iterator iter(inputs.begin()); iter != inputs.end();
1075 ++iter, ++j) {
1076 Node* op = *iter;
1077 // TODO(mstarzinger): Use a ValueInputIterator instead.
1078 if (j >= block->PredecessorCount()) continue;
1079 UnallocatedOperand* operand = 1063 UnallocatedOperand* operand =
1080 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY); 1064 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY);
1081 operand->set_virtual_register(code()->GetVirtualRegister(op)); 1065 operand->set_virtual_register(phi->operands()[i]);
1082 BasicBlock* cur_block = block->PredecessorAt(j); 1066 InstructionBlock* cur_block =
1067 code()->InstructionBlockAt(block->predecessors()[i]);
1083 // The gap move must be added without any special processing as in 1068 // The gap move must be added without any special processing as in
1084 // the AddConstraintsGapMove. 1069 // the AddConstraintsGapMove.
1085 code()->AddGapMove(code()->last_instruction_index(cur_block) - 1, operand, 1070 code()->AddGapMove(cur_block->last_instruction_index() - 1, operand,
1086 phi_operand); 1071 phi_operand);
1087 1072
1088 Instruction* branch = 1073 Instruction* branch = InstructionAt(cur_block->last_instruction_index());
1089 InstructionAt(code()->last_instruction_index(cur_block));
1090 DCHECK(!branch->HasPointerMap()); 1074 DCHECK(!branch->HasPointerMap());
1091 USE(branch); 1075 USE(branch);
1092 } 1076 }
1093 1077
1094 LiveRange* live_range = LiveRangeFor(phi_vreg); 1078 LiveRange* live_range = LiveRangeFor(phi_vreg);
1095 BlockStartInstruction* block_start = 1079 BlockStartInstruction* block_start =
1096 code()->GetBlockStart(block->GetRpoNumber()); 1080 code()->GetBlockStart(block->rpo_number());
1097 block_start->GetOrCreateParallelMove(GapInstruction::START, code_zone()) 1081 block_start->GetOrCreateParallelMove(GapInstruction::START, code_zone())
1098 ->AddMove(phi_operand, live_range->GetSpillOperand(), code_zone()); 1082 ->AddMove(phi_operand, live_range->GetSpillOperand(), code_zone());
1099 live_range->SetSpillStartIndex(code()->first_instruction_index(block)); 1083 live_range->SetSpillStartIndex(block->first_instruction_index());
1100 1084
1101 // We use the phi-ness of some nodes in some later heuristics. 1085 // We use the phi-ness of some nodes in some later heuristics.
1102 live_range->set_is_phi(true); 1086 live_range->set_is_phi(true);
1103 if (!block->IsLoopHeader()) { 1087 if (!block->IsLoopHeader()) {
1104 live_range->set_is_non_loop_phi(true); 1088 live_range->set_is_non_loop_phi(true);
1105 } 1089 }
1106 } 1090 }
1107 } 1091 }
1108 1092
1109 1093
(...skipping 15 matching lines...) Expand all
1125 ResolveControlFlow(); 1109 ResolveControlFlow();
1126 code()->frame()->SetAllocatedRegisters(assigned_registers_); 1110 code()->frame()->SetAllocatedRegisters(assigned_registers_);
1127 code()->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_); 1111 code()->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
1128 return true; 1112 return true;
1129 } 1113 }
1130 1114
1131 1115
1132 void RegisterAllocator::MeetRegisterConstraints() { 1116 void RegisterAllocator::MeetRegisterConstraints() {
1133 RegisterAllocatorPhase phase("L_Register constraints", this); 1117 RegisterAllocatorPhase phase("L_Register constraints", this);
1134 for (int i = 0; i < code()->BasicBlockCount(); ++i) { 1118 for (int i = 0; i < code()->BasicBlockCount(); ++i) {
1135 MeetRegisterConstraints(code()->BlockAt(i)); 1119 MeetRegisterConstraints(
1120 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(i)));
1136 if (!AllocationOk()) return; 1121 if (!AllocationOk()) return;
1137 } 1122 }
1138 } 1123 }
1139 1124
1140 1125
1141 void RegisterAllocator::ResolvePhis() { 1126 void RegisterAllocator::ResolvePhis() {
1142 RegisterAllocatorPhase phase("L_Resolve phis", this); 1127 RegisterAllocatorPhase phase("L_Resolve phis", this);
1143 1128
1144 // Process the blocks in reverse order. 1129 // Process the blocks in reverse order.
1145 for (int i = code()->BasicBlockCount() - 1; i >= 0; --i) { 1130 for (int i = code()->BasicBlockCount() - 1; i >= 0; --i) {
1146 ResolvePhis(code()->BlockAt(i)); 1131 ResolvePhis(code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(i)));
1147 } 1132 }
1148 } 1133 }
1149 1134
1150 1135
1151 void RegisterAllocator::ResolveControlFlow(LiveRange* range, BasicBlock* block, 1136 void RegisterAllocator::ResolveControlFlow(LiveRange* range,
1152 BasicBlock* pred) { 1137 const InstructionBlock* block,
1153 LifetimePosition pred_end = LifetimePosition::FromInstructionIndex( 1138 const InstructionBlock* pred) {
1154 code()->last_instruction_index(pred)); 1139 LifetimePosition pred_end =
1155 LifetimePosition cur_start = LifetimePosition::FromInstructionIndex( 1140 LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1156 code()->first_instruction_index(block)); 1141 LifetimePosition cur_start =
1142 LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1157 LiveRange* pred_cover = NULL; 1143 LiveRange* pred_cover = NULL;
1158 LiveRange* cur_cover = NULL; 1144 LiveRange* cur_cover = NULL;
1159 LiveRange* cur_range = range; 1145 LiveRange* cur_range = range;
1160 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) { 1146 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1161 if (cur_range->CanCover(cur_start)) { 1147 if (cur_range->CanCover(cur_start)) {
1162 DCHECK(cur_cover == NULL); 1148 DCHECK(cur_cover == NULL);
1163 cur_cover = cur_range; 1149 cur_cover = cur_range;
1164 } 1150 }
1165 if (cur_range->CanCover(pred_end)) { 1151 if (cur_range->CanCover(pred_end)) {
1166 DCHECK(pred_cover == NULL); 1152 DCHECK(pred_cover == NULL);
1167 pred_cover = cur_range; 1153 pred_cover = cur_range;
1168 } 1154 }
1169 cur_range = cur_range->next(); 1155 cur_range = cur_range->next();
1170 } 1156 }
1171 1157
1172 if (cur_cover->IsSpilled()) return; 1158 if (cur_cover->IsSpilled()) return;
1173 DCHECK(pred_cover != NULL && cur_cover != NULL); 1159 DCHECK(pred_cover != NULL && cur_cover != NULL);
1174 if (pred_cover != cur_cover) { 1160 if (pred_cover != cur_cover) {
1175 InstructionOperand* pred_op = 1161 InstructionOperand* pred_op =
1176 pred_cover->CreateAssignedOperand(code_zone()); 1162 pred_cover->CreateAssignedOperand(code_zone());
1177 InstructionOperand* cur_op = cur_cover->CreateAssignedOperand(code_zone()); 1163 InstructionOperand* cur_op = cur_cover->CreateAssignedOperand(code_zone());
1178 if (!pred_op->Equals(cur_op)) { 1164 if (!pred_op->Equals(cur_op)) {
1179 GapInstruction* gap = NULL; 1165 GapInstruction* gap = NULL;
1180 if (block->PredecessorCount() == 1) { 1166 if (block->PredecessorCount() == 1) {
1181 gap = code()->GapAt(code()->first_instruction_index(block)); 1167 gap = code()->GapAt(block->first_instruction_index());
1182 } else { 1168 } else {
1183 DCHECK(pred->SuccessorCount() == 1); 1169 DCHECK(pred->SuccessorCount() == 1);
1184 gap = GetLastGap(pred); 1170 gap = GetLastGap(pred);
1185 1171
1186 Instruction* branch = 1172 Instruction* branch = InstructionAt(pred->last_instruction_index());
1187 InstructionAt(code()->last_instruction_index(pred));
1188 DCHECK(!branch->HasPointerMap()); 1173 DCHECK(!branch->HasPointerMap());
1189 USE(branch); 1174 USE(branch);
1190 } 1175 }
1191 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()) 1176 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone())
1192 ->AddMove(pred_op, cur_op, code_zone()); 1177 ->AddMove(pred_op, cur_op, code_zone());
1193 } 1178 }
1194 } 1179 }
1195 } 1180 }
1196 1181
1197 1182
1198 ParallelMove* RegisterAllocator::GetConnectingParallelMove( 1183 ParallelMove* RegisterAllocator::GetConnectingParallelMove(
1199 LifetimePosition pos) { 1184 LifetimePosition pos) {
1200 int index = pos.InstructionIndex(); 1185 int index = pos.InstructionIndex();
1201 if (code()->IsGapAt(index)) { 1186 if (code()->IsGapAt(index)) {
1202 GapInstruction* gap = code()->GapAt(index); 1187 GapInstruction* gap = code()->GapAt(index);
1203 return gap->GetOrCreateParallelMove( 1188 return gap->GetOrCreateParallelMove(
1204 pos.IsInstructionStart() ? GapInstruction::START : GapInstruction::END, 1189 pos.IsInstructionStart() ? GapInstruction::START : GapInstruction::END,
1205 code_zone()); 1190 code_zone());
1206 } 1191 }
1207 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1); 1192 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1208 return code()->GapAt(gap_pos)->GetOrCreateParallelMove( 1193 return code()->GapAt(gap_pos)->GetOrCreateParallelMove(
1209 (gap_pos < index) ? GapInstruction::AFTER : GapInstruction::BEFORE, 1194 (gap_pos < index) ? GapInstruction::AFTER : GapInstruction::BEFORE,
1210 code_zone()); 1195 code_zone());
1211 } 1196 }
1212 1197
1213 1198
1214 BasicBlock* RegisterAllocator::GetBlock(LifetimePosition pos) { 1199 const InstructionBlock* RegisterAllocator::GetInstructionBlock(
1215 return code()->GetBasicBlock(pos.InstructionIndex()); 1200 LifetimePosition pos) {
1201 return code()->GetInstructionBlock(pos.InstructionIndex());
1216 } 1202 }
1217 1203
1218 1204
1219 void RegisterAllocator::ConnectRanges() { 1205 void RegisterAllocator::ConnectRanges() {
1220 RegisterAllocatorPhase phase("L_Connect ranges", this); 1206 RegisterAllocatorPhase phase("L_Connect ranges", this);
1221 for (int i = 0; i < live_ranges()->length(); ++i) { 1207 for (int i = 0; i < live_ranges()->length(); ++i) {
1222 LiveRange* first_range = live_ranges()->at(i); 1208 LiveRange* first_range = live_ranges()->at(i);
1223 if (first_range == NULL || first_range->parent() != NULL) continue; 1209 if (first_range == NULL || first_range->parent() != NULL) continue;
1224 1210
1225 LiveRange* second_range = first_range->next(); 1211 LiveRange* second_range = first_range->next();
1226 while (second_range != NULL) { 1212 while (second_range != NULL) {
1227 LifetimePosition pos = second_range->Start(); 1213 LifetimePosition pos = second_range->Start();
1228 1214
1229 if (!second_range->IsSpilled()) { 1215 if (!second_range->IsSpilled()) {
1230 // Add gap move if the two live ranges touch and there is no block 1216 // Add gap move if the two live ranges touch and there is no block
1231 // boundary. 1217 // boundary.
1232 if (first_range->End().Value() == pos.Value()) { 1218 if (first_range->End().Value() == pos.Value()) {
1233 bool should_insert = true; 1219 bool should_insert = true;
1234 if (IsBlockBoundary(pos)) { 1220 if (IsBlockBoundary(pos)) {
1235 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos)); 1221 should_insert =
1222 CanEagerlyResolveControlFlow(GetInstructionBlock(pos));
1236 } 1223 }
1237 if (should_insert) { 1224 if (should_insert) {
1238 ParallelMove* move = GetConnectingParallelMove(pos); 1225 ParallelMove* move = GetConnectingParallelMove(pos);
1239 InstructionOperand* prev_operand = 1226 InstructionOperand* prev_operand =
1240 first_range->CreateAssignedOperand(code_zone()); 1227 first_range->CreateAssignedOperand(code_zone());
1241 InstructionOperand* cur_operand = 1228 InstructionOperand* cur_operand =
1242 second_range->CreateAssignedOperand(code_zone()); 1229 second_range->CreateAssignedOperand(code_zone());
1243 move->AddMove(prev_operand, cur_operand, code_zone()); 1230 move->AddMove(prev_operand, cur_operand, code_zone());
1244 } 1231 }
1245 } 1232 }
1246 } 1233 }
1247 1234
1248 first_range = second_range; 1235 first_range = second_range;
1249 second_range = second_range->next(); 1236 second_range = second_range->next();
1250 } 1237 }
1251 } 1238 }
1252 } 1239 }
1253 1240
1254 1241
1255 bool RegisterAllocator::CanEagerlyResolveControlFlow(BasicBlock* block) const { 1242 bool RegisterAllocator::CanEagerlyResolveControlFlow(
1243 const InstructionBlock* block) const {
1256 if (block->PredecessorCount() != 1) return false; 1244 if (block->PredecessorCount() != 1) return false;
1257 return block->PredecessorAt(0)->rpo_number() == block->rpo_number() - 1; 1245 return block->predecessors()[0].IsNext(block->rpo_number());
1258 } 1246 }
1259 1247
1260 1248
1261 void RegisterAllocator::ResolveControlFlow() { 1249 void RegisterAllocator::ResolveControlFlow() {
1262 RegisterAllocatorPhase phase("L_Resolve control flow", this); 1250 RegisterAllocatorPhase phase("L_Resolve control flow", this);
1263 for (int block_id = 1; block_id < code()->BasicBlockCount(); ++block_id) { 1251 for (int block_id = 1; block_id < code()->BasicBlockCount(); ++block_id) {
1264 BasicBlock* block = code()->BlockAt(block_id); 1252 const InstructionBlock* block =
1253 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(block_id));
1265 if (CanEagerlyResolveControlFlow(block)) continue; 1254 if (CanEagerlyResolveControlFlow(block)) continue;
1266 BitVector* live = live_in_sets_[block->rpo_number()]; 1255 BitVector* live = live_in_sets_[block->rpo_number().ToInt()];
1267 BitVector::Iterator iterator(live); 1256 BitVector::Iterator iterator(live);
1268 while (!iterator.Done()) { 1257 while (!iterator.Done()) {
1269 int operand_index = iterator.Current(); 1258 int operand_index = iterator.Current();
1270 for (BasicBlock::Predecessors::iterator i = block->predecessors_begin(); 1259 for (auto pred : block->predecessors()) {
1271 i != block->predecessors_end(); ++i) { 1260 const InstructionBlock* cur = code()->InstructionBlockAt(pred);
1272 BasicBlock* cur = *i;
1273 LiveRange* cur_range = LiveRangeFor(operand_index); 1261 LiveRange* cur_range = LiveRangeFor(operand_index);
1274 ResolveControlFlow(cur_range, block, cur); 1262 ResolveControlFlow(cur_range, block, cur);
1275 } 1263 }
1276 iterator.Advance(); 1264 iterator.Advance();
1277 } 1265 }
1278 } 1266 }
1279 } 1267 }
1280 1268
1281 1269
1282 void RegisterAllocator::BuildLiveRanges() { 1270 void RegisterAllocator::BuildLiveRanges() {
1283 RegisterAllocatorPhase phase("L_Build live ranges", this); 1271 RegisterAllocatorPhase phase("L_Build live ranges", this);
1284 InitializeLivenessAnalysis(); 1272 InitializeLivenessAnalysis();
1285 // Process the blocks in reverse order. 1273 // Process the blocks in reverse order.
1286 for (int block_id = code()->BasicBlockCount() - 1; block_id >= 0; 1274 for (int block_id = code()->BasicBlockCount() - 1; block_id >= 0;
1287 --block_id) { 1275 --block_id) {
1288 BasicBlock* block = code()->BlockAt(block_id); 1276 const InstructionBlock* block =
1277 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(block_id));
1289 BitVector* live = ComputeLiveOut(block); 1278 BitVector* live = ComputeLiveOut(block);
1290 // Initially consider all live_out values live for the entire block. We 1279 // Initially consider all live_out values live for the entire block. We
1291 // will shorten these intervals if necessary. 1280 // will shorten these intervals if necessary.
1292 AddInitialIntervals(block, live); 1281 AddInitialIntervals(block, live);
1293 1282
1294 // Process the instructions in reverse order, generating and killing 1283 // Process the instructions in reverse order, generating and killing
1295 // live values. 1284 // live values.
1296 ProcessInstructions(block, live); 1285 ProcessInstructions(block, live);
1297 // All phi output operands are killed by this block. 1286 // All phi output operands are killed by this block.
1298 for (BasicBlock::const_iterator i = block->begin(); i != block->end(); 1287 for (auto phi : block->phis()) {
1299 ++i) {
1300 Node* phi = *i;
1301 if (phi->opcode() != IrOpcode::kPhi) continue;
1302
1303 // The live range interval already ends at the first instruction of the 1288 // The live range interval already ends at the first instruction of the
1304 // block. 1289 // block.
1305 int phi_vreg = code()->GetVirtualRegister(phi); 1290 int phi_vreg = phi->virtual_register();
1306 live->Remove(phi_vreg); 1291 live->Remove(phi_vreg);
1307 1292
1308 InstructionOperand* hint = NULL; 1293 InstructionOperand* hint = NULL;
1309 InstructionOperand* phi_operand = NULL; 1294 InstructionOperand* phi_operand = NULL;
1310 GapInstruction* gap = GetLastGap(block->PredecessorAt(0)); 1295 GapInstruction* gap =
1296 GetLastGap(code()->InstructionBlockAt(block->predecessors()[0]));
1311 1297
1312 // TODO(titzer): no need to create the parallel move if it doesn't exit. 1298 // TODO(titzer): no need to create the parallel move if it doesn't exit.
1313 ParallelMove* move = 1299 ParallelMove* move =
1314 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()); 1300 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone());
1315 for (int j = 0; j < move->move_operands()->length(); ++j) { 1301 for (int j = 0; j < move->move_operands()->length(); ++j) {
1316 InstructionOperand* to = move->move_operands()->at(j).destination(); 1302 InstructionOperand* to = move->move_operands()->at(j).destination();
1317 if (to->IsUnallocated() && 1303 if (to->IsUnallocated() &&
1318 UnallocatedOperand::cast(to)->virtual_register() == phi_vreg) { 1304 UnallocatedOperand::cast(to)->virtual_register() == phi_vreg) {
1319 hint = move->move_operands()->at(j).source(); 1305 hint = move->move_operands()->at(j).source();
1320 phi_operand = to; 1306 phi_operand = to;
1321 break; 1307 break;
1322 } 1308 }
1323 } 1309 }
1324 DCHECK(hint != NULL); 1310 DCHECK(hint != NULL);
1325 1311
1326 LifetimePosition block_start = LifetimePosition::FromInstructionIndex( 1312 LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1327 code()->first_instruction_index(block)); 1313 block->first_instruction_index());
1328 Define(block_start, phi_operand, hint); 1314 Define(block_start, phi_operand, hint);
1329 } 1315 }
1330 1316
1331 // Now live is live_in for this block except not including values live 1317 // Now live is live_in for this block except not including values live
1332 // out on backward successor edges. 1318 // out on backward successor edges.
1333 live_in_sets_[block_id] = live; 1319 live_in_sets_[block_id] = live;
1334 1320
1335 if (block->IsLoopHeader()) { 1321 if (block->IsLoopHeader()) {
1336 // Add a live range stretching from the first loop instruction to the last 1322 // Add a live range stretching from the first loop instruction to the last
1337 // for each value live on entry to the header. 1323 // for each value live on entry to the header.
1338 BitVector::Iterator iterator(live); 1324 BitVector::Iterator iterator(live);
1339 LifetimePosition start = LifetimePosition::FromInstructionIndex( 1325 LifetimePosition start = LifetimePosition::FromInstructionIndex(
1340 code()->first_instruction_index(block)); 1326 block->first_instruction_index());
1341 int end_index = 1327 int end_index = code()
1342 code()->last_instruction_index(code()->BlockAt(block->loop_end())); 1328 ->InstructionBlockAt(block->loop_end())
1329 ->last_instruction_index();
1343 LifetimePosition end = 1330 LifetimePosition end =
1344 LifetimePosition::FromInstructionIndex(end_index).NextInstruction(); 1331 LifetimePosition::FromInstructionIndex(end_index).NextInstruction();
1345 while (!iterator.Done()) { 1332 while (!iterator.Done()) {
1346 int operand_index = iterator.Current(); 1333 int operand_index = iterator.Current();
1347 LiveRange* range = LiveRangeFor(operand_index); 1334 LiveRange* range = LiveRangeFor(operand_index);
1348 range->EnsureInterval(start, end, zone()); 1335 range->EnsureInterval(start, end, zone());
1349 iterator.Advance(); 1336 iterator.Advance();
1350 } 1337 }
1351 1338
1352 // Insert all values into the live in sets of all blocks in the loop. 1339 // Insert all values into the live in sets of all blocks in the loop.
1353 for (int i = block->rpo_number() + 1; i < block->loop_end(); ++i) { 1340 for (int i = block->rpo_number().ToInt() + 1;
1341 i < block->loop_end().ToInt(); ++i) {
1354 live_in_sets_[i]->Union(*live); 1342 live_in_sets_[i]->Union(*live);
1355 } 1343 }
1356 } 1344 }
1357 1345
1358 #ifdef DEBUG 1346 #ifdef DEBUG
1359 if (block_id == 0) { 1347 if (block_id == 0) {
1360 BitVector::Iterator iterator(live); 1348 BitVector::Iterator iterator(live);
1361 bool found = false; 1349 bool found = false;
1362 while (!iterator.Done()) { 1350 while (!iterator.Done()) {
1363 found = true; 1351 found = true;
(...skipping 587 matching lines...) Expand 10 before | Expand all | Expand 10 after
1951 1939
1952 // This register was not free. Thus we need to find and spill 1940 // This register was not free. Thus we need to find and spill
1953 // parts of active and inactive live regions that use the same register 1941 // parts of active and inactive live regions that use the same register
1954 // at the same lifetime positions as current. 1942 // at the same lifetime positions as current.
1955 SplitAndSpillIntersecting(current); 1943 SplitAndSpillIntersecting(current);
1956 } 1944 }
1957 1945
1958 1946
1959 LifetimePosition RegisterAllocator::FindOptimalSpillingPos( 1947 LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
1960 LiveRange* range, LifetimePosition pos) { 1948 LiveRange* range, LifetimePosition pos) {
1961 BasicBlock* block = GetBlock(pos.InstructionStart()); 1949 const InstructionBlock* block = GetInstructionBlock(pos.InstructionStart());
1962 BasicBlock* loop_header = 1950 const InstructionBlock* loop_header =
1963 block->IsLoopHeader() ? block : code()->GetContainingLoop(block); 1951 block->IsLoopHeader() ? block : code()->GetContainingLoop(block);
1964 1952
1965 if (loop_header == NULL) return pos; 1953 if (loop_header == NULL) return pos;
1966 1954
1967 UsePosition* prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); 1955 UsePosition* prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos);
1968 1956
1969 while (loop_header != NULL) { 1957 while (loop_header != NULL) {
1970 // We are going to spill live range inside the loop. 1958 // We are going to spill live range inside the loop.
1971 // If possible try to move spilling position backwards to loop header. 1959 // If possible try to move spilling position backwards to loop header.
1972 // This will reduce number of memory moves on the back edge. 1960 // This will reduce number of memory moves on the back edge.
1973 LifetimePosition loop_start = LifetimePosition::FromInstructionIndex( 1961 LifetimePosition loop_start = LifetimePosition::FromInstructionIndex(
1974 code()->first_instruction_index(loop_header)); 1962 loop_header->first_instruction_index());
1975 1963
1976 if (range->Covers(loop_start)) { 1964 if (range->Covers(loop_start)) {
1977 if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) { 1965 if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) {
1978 // No register beneficial use inside the loop before the pos. 1966 // No register beneficial use inside the loop before the pos.
1979 pos = loop_start; 1967 pos = loop_start;
1980 } 1968 }
1981 } 1969 }
1982 1970
1983 // Try hoisting out to an outer loop. 1971 // Try hoisting out to an outer loop.
1984 loop_header = code()->GetContainingLoop(loop_header); 1972 loop_header = code()->GetContainingLoop(loop_header);
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
2079 2067
2080 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, 2068 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start,
2081 LifetimePosition end) { 2069 LifetimePosition end) {
2082 int start_instr = start.InstructionIndex(); 2070 int start_instr = start.InstructionIndex();
2083 int end_instr = end.InstructionIndex(); 2071 int end_instr = end.InstructionIndex();
2084 DCHECK(start_instr <= end_instr); 2072 DCHECK(start_instr <= end_instr);
2085 2073
2086 // We have no choice 2074 // We have no choice
2087 if (start_instr == end_instr) return end; 2075 if (start_instr == end_instr) return end;
2088 2076
2089 BasicBlock* start_block = GetBlock(start); 2077 const InstructionBlock* start_block = GetInstructionBlock(start);
2090 BasicBlock* end_block = GetBlock(end); 2078 const InstructionBlock* end_block = GetInstructionBlock(end);
2091 2079
2092 if (end_block == start_block) { 2080 if (end_block == start_block) {
2093 // The interval is split in the same basic block. Split at the latest 2081 // The interval is split in the same basic block. Split at the latest
2094 // possible position. 2082 // possible position.
2095 return end; 2083 return end;
2096 } 2084 }
2097 2085
2098 BasicBlock* block = end_block; 2086 const InstructionBlock* block = end_block;
2099 // Find header of outermost loop. 2087 // Find header of outermost loop.
2100 // TODO(titzer): fix redundancy below. 2088 // TODO(titzer): fix redundancy below.
2101 while (code()->GetContainingLoop(block) != NULL && 2089 while (code()->GetContainingLoop(block) != NULL &&
2102 code()->GetContainingLoop(block)->rpo_number() > 2090 code()->GetContainingLoop(block)->rpo_number().ToInt() >
2103 start_block->rpo_number()) { 2091 start_block->rpo_number().ToInt()) {
2104 block = code()->GetContainingLoop(block); 2092 block = code()->GetContainingLoop(block);
2105 } 2093 }
2106 2094
2107 // We did not find any suitable outer loop. Split at the latest possible 2095 // We did not find any suitable outer loop. Split at the latest possible
2108 // position unless end_block is a loop header itself. 2096 // position unless end_block is a loop header itself.
2109 if (block == end_block && !end_block->IsLoopHeader()) return end; 2097 if (block == end_block && !end_block->IsLoopHeader()) return end;
2110 2098
2111 return LifetimePosition::FromInstructionIndex( 2099 return LifetimePosition::FromInstructionIndex(
2112 code()->first_instruction_index(block)); 2100 block->first_instruction_index());
2113 } 2101 }
2114 2102
2115 2103
2116 void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { 2104 void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2117 LiveRange* second_part = SplitRangeAt(range, pos); 2105 LiveRange* second_part = SplitRangeAt(range, pos);
2118 if (!AllocationOk()) return; 2106 if (!AllocationOk()) return;
2119 Spill(second_part); 2107 Spill(second_part);
2120 } 2108 }
2121 2109
2122 2110
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
2225 allocator_zone_start_allocation_size_; 2213 allocator_zone_start_allocation_size_;
2226 isolate()->GetTStatistics()->SaveTiming(name(), base::TimeDelta(), size); 2214 isolate()->GetTStatistics()->SaveTiming(name(), base::TimeDelta(), size);
2227 } 2215 }
2228 #ifdef DEBUG 2216 #ifdef DEBUG
2229 if (allocator_ != NULL) allocator_->Verify(); 2217 if (allocator_ != NULL) allocator_->Verify();
2230 #endif 2218 #endif
2231 } 2219 }
2232 } 2220 }
2233 } 2221 }
2234 } // namespace v8::internal::compiler 2222 } // namespace v8::internal::compiler
OLDNEW
« no previous file with comments | « src/compiler/register-allocator.h ('k') | src/compiler/schedule.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698