| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/register-allocator.h" | 5 #include "src/compiler/register-allocator.h" |
| 6 | 6 |
| 7 #include "src/compiler/generic-node-inl.h" |
| 7 #include "src/compiler/linkage.h" | 8 #include "src/compiler/linkage.h" |
| 8 #include "src/hydrogen.h" | 9 #include "src/hydrogen.h" |
| 9 #include "src/string-stream.h" | 10 #include "src/string-stream.h" |
| 10 | 11 |
| 11 namespace v8 { | 12 namespace v8 { |
| 12 namespace internal { | 13 namespace internal { |
| 13 namespace compiler { | 14 namespace compiler { |
| 14 | 15 |
| 15 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { | 16 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { |
| 16 return a.Value() < b.Value() ? a : b; | 17 return a.Value() < b.Value() ? a : b; |
| (...skipping 506 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 523 } | 524 } |
| 524 | 525 |
| 525 | 526 |
| 526 BitVector* RegisterAllocator::ComputeLiveOut(BasicBlock* block) { | 527 BitVector* RegisterAllocator::ComputeLiveOut(BasicBlock* block) { |
| 527 // Compute live out for the given block, except not including backward | 528 // Compute live out for the given block, except not including backward |
| 528 // successor edges. | 529 // successor edges. |
| 529 BitVector* live_out = | 530 BitVector* live_out = |
| 530 new (zone()) BitVector(code()->VirtualRegisterCount(), zone()); | 531 new (zone()) BitVector(code()->VirtualRegisterCount(), zone()); |
| 531 | 532 |
| 532 // Process all successor blocks. | 533 // Process all successor blocks. |
| 533 BasicBlock::Successors successors = block->successors(); | 534 for (BasicBlock::Successors::iterator i = block->successors_begin(); |
| 534 for (BasicBlock::Successors::iterator i = successors.begin(); | 535 i != block->successors_end(); ++i) { |
| 535 i != successors.end(); ++i) { | |
| 536 // Add values live on entry to the successor. Note the successor's | 536 // Add values live on entry to the successor. Note the successor's |
| 537 // live_in will not be computed yet for backwards edges. | 537 // live_in will not be computed yet for backwards edges. |
| 538 BasicBlock* successor = *i; | 538 BasicBlock* successor = *i; |
| 539 BitVector* live_in = live_in_sets_[successor->rpo_number_]; | 539 BitVector* live_in = live_in_sets_[successor->rpo_number()]; |
| 540 if (live_in != NULL) live_out->Union(*live_in); | 540 if (live_in != NULL) live_out->Union(*live_in); |
| 541 | 541 |
| 542 // All phi input operands corresponding to this successor edge are live | 542 // All phi input operands corresponding to this successor edge are live |
| 543 // out from this block. | 543 // out from this block. |
| 544 int index = successor->PredecessorIndexOf(block); | 544 size_t index = successor->PredecessorIndexOf(block); |
| 545 DCHECK(index >= 0); | 545 DCHECK(index < successor->PredecessorCount()); |
| 546 DCHECK(index < static_cast<int>(successor->PredecessorCount())); | |
| 547 for (BasicBlock::const_iterator j = successor->begin(); | 546 for (BasicBlock::const_iterator j = successor->begin(); |
| 548 j != successor->end(); ++j) { | 547 j != successor->end(); ++j) { |
| 549 Node* phi = *j; | 548 Node* phi = *j; |
| 550 if (phi->opcode() != IrOpcode::kPhi) continue; | 549 if (phi->opcode() != IrOpcode::kPhi) continue; |
| 551 Node* input = phi->InputAt(index); | 550 Node* input = phi->InputAt(static_cast<int>(index)); |
| 552 live_out->Add(input->id()); | 551 live_out->Add(input->id()); |
| 553 } | 552 } |
| 554 } | 553 } |
| 555 | 554 |
| 556 return live_out; | 555 return live_out; |
| 557 } | 556 } |
| 558 | 557 |
| 559 | 558 |
| 560 void RegisterAllocator::AddInitialIntervals(BasicBlock* block, | 559 void RegisterAllocator::AddInitialIntervals(BasicBlock* block, |
| 561 BitVector* live_out) { | 560 BitVector* live_out) { |
| (...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 765 bool assigned = false; | 764 bool assigned = false; |
| 766 if (output->HasFixedPolicy()) { | 765 if (output->HasFixedPolicy()) { |
| 767 AllocateFixed(output, -1, false); | 766 AllocateFixed(output, -1, false); |
| 768 // This value is produced on the stack, we never need to spill it. | 767 // This value is produced on the stack, we never need to spill it. |
| 769 if (output->IsStackSlot()) { | 768 if (output->IsStackSlot()) { |
| 770 range->SetSpillOperand(output); | 769 range->SetSpillOperand(output); |
| 771 range->SetSpillStartIndex(end); | 770 range->SetSpillStartIndex(end); |
| 772 assigned = true; | 771 assigned = true; |
| 773 } | 772 } |
| 774 | 773 |
| 775 BasicBlock::Successors successors = block->successors(); | 774 for (BasicBlock::Successors::iterator succ = block->successors_begin(); |
| 776 for (BasicBlock::Successors::iterator succ = successors.begin(); | 775 succ != block->successors_end(); ++succ) { |
| 777 succ != successors.end(); ++succ) { | |
| 778 DCHECK((*succ)->PredecessorCount() == 1); | 776 DCHECK((*succ)->PredecessorCount() == 1); |
| 779 int gap_index = (*succ)->first_instruction_index() + 1; | 777 int gap_index = (*succ)->first_instruction_index() + 1; |
| 780 DCHECK(code()->IsGapAt(gap_index)); | 778 DCHECK(code()->IsGapAt(gap_index)); |
| 781 | 779 |
| 782 // Create an unconstrained operand for the same virtual register | 780 // Create an unconstrained operand for the same virtual register |
| 783 // and insert a gap move from the fixed output to the operand. | 781 // and insert a gap move from the fixed output to the operand. |
| 784 UnallocatedOperand* output_copy = | 782 UnallocatedOperand* output_copy = |
| 785 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY); | 783 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY); |
| 786 output_copy->set_virtual_register(output_vreg); | 784 output_copy->set_virtual_register(output_vreg); |
| 787 | 785 |
| 788 code()->AddGapMove(gap_index, output, output_copy); | 786 code()->AddGapMove(gap_index, output, output_copy); |
| 789 } | 787 } |
| 790 } | 788 } |
| 791 | 789 |
| 792 if (!assigned) { | 790 if (!assigned) { |
| 793 BasicBlock::Successors successors = block->successors(); | 791 for (BasicBlock::Successors::iterator succ = block->successors_begin(); |
| 794 for (BasicBlock::Successors::iterator succ = successors.begin(); | 792 succ != block->successors_end(); ++succ) { |
| 795 succ != successors.end(); ++succ) { | |
| 796 DCHECK((*succ)->PredecessorCount() == 1); | 793 DCHECK((*succ)->PredecessorCount() == 1); |
| 797 int gap_index = (*succ)->first_instruction_index() + 1; | 794 int gap_index = (*succ)->first_instruction_index() + 1; |
| 798 range->SetSpillStartIndex(gap_index); | 795 range->SetSpillStartIndex(gap_index); |
| 799 | 796 |
| 800 // This move to spill operand is not a real use. Liveness analysis | 797 // This move to spill operand is not a real use. Liveness analysis |
| 801 // and splitting of live ranges do not account for it. | 798 // and splitting of live ranges do not account for it. |
| 802 // Thus it should be inserted to a lifetime position corresponding to | 799 // Thus it should be inserted to a lifetime position corresponding to |
| 803 // the instruction end. | 800 // the instruction end. |
| 804 GapInstruction* gap = code()->GapAt(gap_index); | 801 GapInstruction* gap = code()->GapAt(gap_index); |
| 805 ParallelMove* move = | 802 ParallelMove* move = |
| (...skipping 258 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1064 | 1061 |
| 1065 void RegisterAllocator::ResolvePhis(BasicBlock* block) { | 1062 void RegisterAllocator::ResolvePhis(BasicBlock* block) { |
| 1066 for (BasicBlock::const_iterator i = block->begin(); i != block->end(); ++i) { | 1063 for (BasicBlock::const_iterator i = block->begin(); i != block->end(); ++i) { |
| 1067 Node* phi = *i; | 1064 Node* phi = *i; |
| 1068 if (phi->opcode() != IrOpcode::kPhi) continue; | 1065 if (phi->opcode() != IrOpcode::kPhi) continue; |
| 1069 | 1066 |
| 1070 UnallocatedOperand* phi_operand = | 1067 UnallocatedOperand* phi_operand = |
| 1071 new (code_zone()) UnallocatedOperand(UnallocatedOperand::NONE); | 1068 new (code_zone()) UnallocatedOperand(UnallocatedOperand::NONE); |
| 1072 phi_operand->set_virtual_register(phi->id()); | 1069 phi_operand->set_virtual_register(phi->id()); |
| 1073 | 1070 |
| 1074 int j = 0; | 1071 size_t j = 0; |
| 1075 Node::Inputs inputs = phi->inputs(); | 1072 Node::Inputs inputs = phi->inputs(); |
| 1076 for (Node::Inputs::iterator iter(inputs.begin()); iter != inputs.end(); | 1073 for (Node::Inputs::iterator iter(inputs.begin()); iter != inputs.end(); |
| 1077 ++iter, ++j) { | 1074 ++iter, ++j) { |
| 1078 Node* op = *iter; | 1075 Node* op = *iter; |
| 1079 // TODO(mstarzinger): Use a ValueInputIterator instead. | 1076 // TODO(mstarzinger): Use a ValueInputIterator instead. |
| 1080 if (j >= block->PredecessorCount()) continue; | 1077 if (j >= block->PredecessorCount()) continue; |
| 1081 UnallocatedOperand* operand = | 1078 UnallocatedOperand* operand = |
| 1082 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY); | 1079 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY); |
| 1083 operand->set_virtual_register(op->id()); | 1080 operand->set_virtual_register(op->id()); |
| 1084 BasicBlock* cur_block = block->PredecessorAt(j); | 1081 BasicBlock* cur_block = block->PredecessorAt(j); |
| (...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1246 | 1243 |
| 1247 first_range = second_range; | 1244 first_range = second_range; |
| 1248 second_range = second_range->next(); | 1245 second_range = second_range->next(); |
| 1249 } | 1246 } |
| 1250 } | 1247 } |
| 1251 } | 1248 } |
| 1252 | 1249 |
| 1253 | 1250 |
| 1254 bool RegisterAllocator::CanEagerlyResolveControlFlow(BasicBlock* block) const { | 1251 bool RegisterAllocator::CanEagerlyResolveControlFlow(BasicBlock* block) const { |
| 1255 if (block->PredecessorCount() != 1) return false; | 1252 if (block->PredecessorCount() != 1) return false; |
| 1256 return block->PredecessorAt(0)->rpo_number_ == block->rpo_number_ - 1; | 1253 return block->PredecessorAt(0)->rpo_number() == block->rpo_number() - 1; |
| 1257 } | 1254 } |
| 1258 | 1255 |
| 1259 | 1256 |
| 1260 void RegisterAllocator::ResolveControlFlow() { | 1257 void RegisterAllocator::ResolveControlFlow() { |
| 1261 RegisterAllocatorPhase phase("L_Resolve control flow", this); | 1258 RegisterAllocatorPhase phase("L_Resolve control flow", this); |
| 1262 for (int block_id = 1; block_id < code()->BasicBlockCount(); ++block_id) { | 1259 for (int block_id = 1; block_id < code()->BasicBlockCount(); ++block_id) { |
| 1263 BasicBlock* block = code()->BlockAt(block_id); | 1260 BasicBlock* block = code()->BlockAt(block_id); |
| 1264 if (CanEagerlyResolveControlFlow(block)) continue; | 1261 if (CanEagerlyResolveControlFlow(block)) continue; |
| 1265 BitVector* live = live_in_sets_[block->rpo_number_]; | 1262 BitVector* live = live_in_sets_[block->rpo_number()]; |
| 1266 BitVector::Iterator iterator(live); | 1263 BitVector::Iterator iterator(live); |
| 1267 while (!iterator.Done()) { | 1264 while (!iterator.Done()) { |
| 1268 int operand_index = iterator.Current(); | 1265 int operand_index = iterator.Current(); |
| 1269 BasicBlock::Predecessors predecessors = block->predecessors(); | 1266 for (BasicBlock::Predecessors::iterator i = block->predecessors_begin(); |
| 1270 for (BasicBlock::Predecessors::iterator i = predecessors.begin(); | 1267 i != block->predecessors_end(); ++i) { |
| 1271 i != predecessors.end(); ++i) { | |
| 1272 BasicBlock* cur = *i; | 1268 BasicBlock* cur = *i; |
| 1273 LiveRange* cur_range = LiveRangeFor(operand_index); | 1269 LiveRange* cur_range = LiveRangeFor(operand_index); |
| 1274 ResolveControlFlow(cur_range, block, cur); | 1270 ResolveControlFlow(cur_range, block, cur); |
| 1275 } | 1271 } |
| 1276 iterator.Advance(); | 1272 iterator.Advance(); |
| 1277 } | 1273 } |
| 1278 } | 1274 } |
| 1279 } | 1275 } |
| 1280 | 1276 |
| 1281 | 1277 |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1331 // out on backward successor edges. | 1327 // out on backward successor edges. |
| 1332 live_in_sets_[block_id] = live; | 1328 live_in_sets_[block_id] = live; |
| 1333 | 1329 |
| 1334 if (block->IsLoopHeader()) { | 1330 if (block->IsLoopHeader()) { |
| 1335 // Add a live range stretching from the first loop instruction to the last | 1331 // Add a live range stretching from the first loop instruction to the last |
| 1336 // for each value live on entry to the header. | 1332 // for each value live on entry to the header. |
| 1337 BitVector::Iterator iterator(live); | 1333 BitVector::Iterator iterator(live); |
| 1338 LifetimePosition start = LifetimePosition::FromInstructionIndex( | 1334 LifetimePosition start = LifetimePosition::FromInstructionIndex( |
| 1339 block->first_instruction_index()); | 1335 block->first_instruction_index()); |
| 1340 int end_index = | 1336 int end_index = |
| 1341 code()->BlockAt(block->loop_end_)->last_instruction_index(); | 1337 code()->BlockAt(block->loop_end())->last_instruction_index(); |
| 1342 LifetimePosition end = | 1338 LifetimePosition end = |
| 1343 LifetimePosition::FromInstructionIndex(end_index).NextInstruction(); | 1339 LifetimePosition::FromInstructionIndex(end_index).NextInstruction(); |
| 1344 while (!iterator.Done()) { | 1340 while (!iterator.Done()) { |
| 1345 int operand_index = iterator.Current(); | 1341 int operand_index = iterator.Current(); |
| 1346 LiveRange* range = LiveRangeFor(operand_index); | 1342 LiveRange* range = LiveRangeFor(operand_index); |
| 1347 range->EnsureInterval(start, end, zone()); | 1343 range->EnsureInterval(start, end, zone()); |
| 1348 iterator.Advance(); | 1344 iterator.Advance(); |
| 1349 } | 1345 } |
| 1350 | 1346 |
| 1351 // Insert all values into the live in sets of all blocks in the loop. | 1347 // Insert all values into the live in sets of all blocks in the loop. |
| 1352 for (int i = block->rpo_number_ + 1; i < block->loop_end_; ++i) { | 1348 for (int i = block->rpo_number() + 1; i < block->loop_end(); ++i) { |
| 1353 live_in_sets_[i]->Union(*live); | 1349 live_in_sets_[i]->Union(*live); |
| 1354 } | 1350 } |
| 1355 } | 1351 } |
| 1356 | 1352 |
| 1357 #ifdef DEBUG | 1353 #ifdef DEBUG |
| 1358 if (block_id == 0) { | 1354 if (block_id == 0) { |
| 1359 BitVector::Iterator iterator(live); | 1355 BitVector::Iterator iterator(live); |
| 1360 bool found = false; | 1356 bool found = false; |
| 1361 while (!iterator.Done()) { | 1357 while (!iterator.Done()) { |
| 1362 found = true; | 1358 found = true; |
| (...skipping 728 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2091 if (end_block == start_block) { | 2087 if (end_block == start_block) { |
| 2092 // The interval is split in the same basic block. Split at the latest | 2088 // The interval is split in the same basic block. Split at the latest |
| 2093 // possible position. | 2089 // possible position. |
| 2094 return end; | 2090 return end; |
| 2095 } | 2091 } |
| 2096 | 2092 |
| 2097 BasicBlock* block = end_block; | 2093 BasicBlock* block = end_block; |
| 2098 // Find header of outermost loop. | 2094 // Find header of outermost loop. |
| 2099 // TODO(titzer): fix redundancy below. | 2095 // TODO(titzer): fix redundancy below. |
| 2100 while (code()->GetContainingLoop(block) != NULL && | 2096 while (code()->GetContainingLoop(block) != NULL && |
| 2101 code()->GetContainingLoop(block)->rpo_number_ > | 2097 code()->GetContainingLoop(block)->rpo_number() > |
| 2102 start_block->rpo_number_) { | 2098 start_block->rpo_number()) { |
| 2103 block = code()->GetContainingLoop(block); | 2099 block = code()->GetContainingLoop(block); |
| 2104 } | 2100 } |
| 2105 | 2101 |
| 2106 // We did not find any suitable outer loop. Split at the latest possible | 2102 // We did not find any suitable outer loop. Split at the latest possible |
| 2107 // position unless end_block is a loop header itself. | 2103 // position unless end_block is a loop header itself. |
| 2108 if (block == end_block && !end_block->IsLoopHeader()) return end; | 2104 if (block == end_block && !end_block->IsLoopHeader()) return end; |
| 2109 | 2105 |
| 2110 return LifetimePosition::FromInstructionIndex( | 2106 return LifetimePosition::FromInstructionIndex( |
| 2111 block->first_instruction_index()); | 2107 block->first_instruction_index()); |
| 2112 } | 2108 } |
| (...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2224 allocator_zone_start_allocation_size_; | 2220 allocator_zone_start_allocation_size_; |
| 2225 isolate()->GetTStatistics()->SaveTiming(name(), base::TimeDelta(), size); | 2221 isolate()->GetTStatistics()->SaveTiming(name(), base::TimeDelta(), size); |
| 2226 } | 2222 } |
| 2227 #ifdef DEBUG | 2223 #ifdef DEBUG |
| 2228 if (allocator_ != NULL) allocator_->Verify(); | 2224 if (allocator_ != NULL) allocator_->Verify(); |
| 2229 #endif | 2225 #endif |
| 2230 } | 2226 } |
| 2231 } | 2227 } |
| 2232 } | 2228 } |
| 2233 } // namespace v8::internal::compiler | 2229 } // namespace v8::internal::compiler |
| OLD | NEW |