Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(339)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 664683002: [turbofan] decouple register allocation from schedule and graph (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: rebase Created 6 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/register-allocator.h" 5 #include "src/compiler/register-allocator.h"
6 6
7 #include "src/compiler/generic-node-inl.h"
8 #include "src/compiler/linkage.h" 7 #include "src/compiler/linkage.h"
9 #include "src/hydrogen.h" 8 #include "src/hydrogen.h"
10 #include "src/string-stream.h" 9 #include "src/string-stream.h"
11 10
12 namespace v8 { 11 namespace v8 {
13 namespace internal { 12 namespace internal {
14 namespace compiler { 13 namespace compiler {
15 14
16 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { 15 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
17 return a.Value() < b.Value() ? a : b; 16 return a.Value() < b.Value() ? a : b;
(...skipping 499 matching lines...) Expand 10 before | Expand all | Expand 10 after
517 516
518 517
519 void RegisterAllocator::InitializeLivenessAnalysis() { 518 void RegisterAllocator::InitializeLivenessAnalysis() {
520 // Initialize the live_in sets for each block to NULL. 519 // Initialize the live_in sets for each block to NULL.
521 int block_count = code()->BasicBlockCount(); 520 int block_count = code()->BasicBlockCount();
522 live_in_sets_.Initialize(block_count, zone()); 521 live_in_sets_.Initialize(block_count, zone());
523 live_in_sets_.AddBlock(NULL, block_count, zone()); 522 live_in_sets_.AddBlock(NULL, block_count, zone());
524 } 523 }
525 524
526 525
527 BitVector* RegisterAllocator::ComputeLiveOut(BasicBlock* block) { 526 BitVector* RegisterAllocator::ComputeLiveOut(const InstructionBlock* block) {
528 // Compute live out for the given block, except not including backward 527 // Compute live out for the given block, except not including backward
529 // successor edges. 528 // successor edges.
530 BitVector* live_out = 529 BitVector* live_out =
531 new (zone()) BitVector(code()->VirtualRegisterCount(), zone()); 530 new (zone()) BitVector(code()->VirtualRegisterCount(), zone());
532 531
533 // Process all successor blocks. 532 // Process all successor blocks.
534 for (BasicBlock::Successors::iterator i = block->successors_begin(); 533 for (InstructionBlock::Successors::const_iterator i =
Benedikt Meurer 2014/10/19 18:13:03 s/InstructionBlock::Successors::const_iterator/aut
534 block->successors_begin();
535 i != block->successors_end(); ++i) { 535 i != block->successors_end(); ++i) {
536 // Add values live on entry to the successor. Note the successor's 536 // Add values live on entry to the successor. Note the successor's
537 // live_in will not be computed yet for backwards edges. 537 // live_in will not be computed yet for backwards edges.
538 BasicBlock* successor = *i; 538 BitVector* live_in = live_in_sets_[(*i).ToSize()];
539 BitVector* live_in = live_in_sets_[successor->rpo_number()];
540 if (live_in != NULL) live_out->Union(*live_in); 539 if (live_in != NULL) live_out->Union(*live_in);
541 540
542 // All phi input operands corresponding to this successor edge are live 541 // All phi input operands corresponding to this successor edge are live
543 // out from this block. 542 // out from this block.
544 size_t index = successor->PredecessorIndexOf(block); 543 const InstructionBlock* successor = code()->InstructionBlockAt(*i);
544 size_t index = successor->PredecessorIndexOf(block->rpo_number());
545 DCHECK(index < successor->PredecessorCount()); 545 DCHECK(index < successor->PredecessorCount());
546 for (BasicBlock::const_iterator j = successor->begin(); 546 for (InstructionBlock::PhiInstructions::const_iterator j =
Benedikt Meurer 2014/10/19 18:13:03 s/InstructionBlock::PhiInstructions::const_iterato
547 j != successor->end(); ++j) { 547 successor->phis_begin();
548 Node* phi = *j; 548 j != successor->phis_end(); ++j) {
549 if (phi->opcode() != IrOpcode::kPhi) continue; 549 live_out->Add((*j)->operands()[index]);
550 Node* input = phi->InputAt(static_cast<int>(index));
551 live_out->Add(code()->GetVirtualRegister(input));
552 } 550 }
553 } 551 }
554 return live_out; 552 return live_out;
555 } 553 }
556 554
557 555
558 void RegisterAllocator::AddInitialIntervals(BasicBlock* block, 556 void RegisterAllocator::AddInitialIntervals(const InstructionBlock* block,
559 BitVector* live_out) { 557 BitVector* live_out) {
560 // Add an interval that includes the entire block to the live range for 558 // Add an interval that includes the entire block to the live range for
561 // each live_out value. 559 // each live_out value.
562 LifetimePosition start = LifetimePosition::FromInstructionIndex( 560 LifetimePosition start =
563 code()->first_instruction_index(block)); 561 LifetimePosition::FromInstructionIndex(block->first_instruction_index());
564 LifetimePosition end = 562 LifetimePosition end = LifetimePosition::FromInstructionIndex(
565 LifetimePosition::FromInstructionIndex( 563 block->last_instruction_index()).NextInstruction();
566 code()->last_instruction_index(block)).NextInstruction();
567 BitVector::Iterator iterator(live_out); 564 BitVector::Iterator iterator(live_out);
568 while (!iterator.Done()) { 565 while (!iterator.Done()) {
569 int operand_index = iterator.Current(); 566 int operand_index = iterator.Current();
570 LiveRange* range = LiveRangeFor(operand_index); 567 LiveRange* range = LiveRangeFor(operand_index);
571 range->AddUseInterval(start, end, zone()); 568 range->AddUseInterval(start, end, zone());
572 iterator.Advance(); 569 iterator.Advance();
573 } 570 }
574 } 571 }
575 572
576 573
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
644 } 641 }
645 LiveRange* result = live_ranges_[index]; 642 LiveRange* result = live_ranges_[index];
646 if (result == NULL) { 643 if (result == NULL) {
647 result = new (zone()) LiveRange(index, code_zone()); 644 result = new (zone()) LiveRange(index, code_zone());
648 live_ranges_[index] = result; 645 live_ranges_[index] = result;
649 } 646 }
650 return result; 647 return result;
651 } 648 }
652 649
653 650
654 GapInstruction* RegisterAllocator::GetLastGap(BasicBlock* block) { 651 GapInstruction* RegisterAllocator::GetLastGap(const InstructionBlock* block) {
655 int last_instruction = code()->last_instruction_index(block); 652 int last_instruction = block->last_instruction_index();
656 return code()->GapAt(last_instruction - 1); 653 return code()->GapAt(last_instruction - 1);
657 } 654 }
658 655
659 656
660 LiveRange* RegisterAllocator::LiveRangeFor(InstructionOperand* operand) { 657 LiveRange* RegisterAllocator::LiveRangeFor(InstructionOperand* operand) {
661 if (operand->IsUnallocated()) { 658 if (operand->IsUnallocated()) {
662 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register()); 659 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register());
663 } else if (operand->IsRegister()) { 660 } else if (operand->IsRegister()) {
664 return FixedLiveRangeFor(operand->index()); 661 return FixedLiveRangeFor(operand->index());
665 } else if (operand->IsDoubleRegister()) { 662 } else if (operand->IsDoubleRegister()) {
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
722 move->AddMove(cur.source(), to, code_zone()); 719 move->AddMove(cur.source(), to, code_zone());
723 return; 720 return;
724 } 721 }
725 } 722 }
726 } 723 }
727 } 724 }
728 move->AddMove(from, to, code_zone()); 725 move->AddMove(from, to, code_zone());
729 } 726 }
730 727
731 728
732 void RegisterAllocator::MeetRegisterConstraints(BasicBlock* block) { 729 void RegisterAllocator::MeetRegisterConstraints(const InstructionBlock* block) {
733 int start = code()->first_instruction_index(block); 730 int start = block->first_instruction_index();
734 int end = code()->last_instruction_index(block); 731 int end = block->last_instruction_index();
735 DCHECK_NE(-1, start); 732 DCHECK_NE(-1, start);
736 for (int i = start; i <= end; ++i) { 733 for (int i = start; i <= end; ++i) {
737 if (code()->IsGapAt(i)) { 734 if (code()->IsGapAt(i)) {
738 Instruction* instr = NULL; 735 Instruction* instr = NULL;
739 Instruction* prev_instr = NULL; 736 Instruction* prev_instr = NULL;
740 if (i < end) instr = InstructionAt(i + 1); 737 if (i < end) instr = InstructionAt(i + 1);
741 if (i > start) prev_instr = InstructionAt(i - 1); 738 if (i > start) prev_instr = InstructionAt(i - 1);
742 MeetConstraintsBetween(prev_instr, instr, i); 739 MeetConstraintsBetween(prev_instr, instr, i);
743 if (!AllocationOk()) return; 740 if (!AllocationOk()) return;
744 } 741 }
745 } 742 }
746 743
747 // Meet register constraints for the instruction in the end. 744 // Meet register constraints for the instruction in the end.
748 if (!code()->IsGapAt(end)) { 745 if (!code()->IsGapAt(end)) {
749 MeetRegisterConstraintsForLastInstructionInBlock(block); 746 MeetRegisterConstraintsForLastInstructionInBlock(block);
750 } 747 }
751 } 748 }
752 749
753 750
754 void RegisterAllocator::MeetRegisterConstraintsForLastInstructionInBlock( 751 void RegisterAllocator::MeetRegisterConstraintsForLastInstructionInBlock(
755 BasicBlock* block) { 752 const InstructionBlock* block) {
756 int end = code()->last_instruction_index(block); 753 int end = block->last_instruction_index();
757 Instruction* last_instruction = InstructionAt(end); 754 Instruction* last_instruction = InstructionAt(end);
758 for (size_t i = 0; i < last_instruction->OutputCount(); i++) { 755 for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
759 InstructionOperand* output_operand = last_instruction->OutputAt(i); 756 InstructionOperand* output_operand = last_instruction->OutputAt(i);
760 DCHECK(!output_operand->IsConstant()); 757 DCHECK(!output_operand->IsConstant());
761 UnallocatedOperand* output = UnallocatedOperand::cast(output_operand); 758 UnallocatedOperand* output = UnallocatedOperand::cast(output_operand);
762 int output_vreg = output->virtual_register(); 759 int output_vreg = output->virtual_register();
763 LiveRange* range = LiveRangeFor(output_vreg); 760 LiveRange* range = LiveRangeFor(output_vreg);
764 bool assigned = false; 761 bool assigned = false;
765 if (output->HasFixedPolicy()) { 762 if (output->HasFixedPolicy()) {
766 AllocateFixed(output, -1, false); 763 AllocateFixed(output, -1, false);
767 // This value is produced on the stack, we never need to spill it. 764 // This value is produced on the stack, we never need to spill it.
768 if (output->IsStackSlot()) { 765 if (output->IsStackSlot()) {
769 range->SetSpillOperand(output); 766 range->SetSpillOperand(output);
770 range->SetSpillStartIndex(end); 767 range->SetSpillStartIndex(end);
771 assigned = true; 768 assigned = true;
772 } 769 }
773 770
774 for (BasicBlock::Successors::iterator succ = block->successors_begin(); 771 for (InstructionBlock::Successors::const_iterator succ =
Benedikt Meurer 2014/10/19 18:13:03 s/InstructionBlock::Successors::const_iterator/aut
772 block->successors_begin();
775 succ != block->successors_end(); ++succ) { 773 succ != block->successors_end(); ++succ) {
776 DCHECK((*succ)->PredecessorCount() == 1); 774 const InstructionBlock* successor = code()->InstructionBlockAt((*succ));
777 int gap_index = code()->first_instruction_index(*succ) + 1; 775 DCHECK(successor->PredecessorCount() == 1);
776 int gap_index = successor->first_instruction_index() + 1;
778 DCHECK(code()->IsGapAt(gap_index)); 777 DCHECK(code()->IsGapAt(gap_index));
779 778
780 // Create an unconstrained operand for the same virtual register 779 // Create an unconstrained operand for the same virtual register
781 // and insert a gap move from the fixed output to the operand. 780 // and insert a gap move from the fixed output to the operand.
782 UnallocatedOperand* output_copy = 781 UnallocatedOperand* output_copy =
783 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY); 782 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY);
784 output_copy->set_virtual_register(output_vreg); 783 output_copy->set_virtual_register(output_vreg);
785 784
786 code()->AddGapMove(gap_index, output, output_copy); 785 code()->AddGapMove(gap_index, output, output_copy);
787 } 786 }
788 } 787 }
789 788
790 if (!assigned) { 789 if (!assigned) {
791 for (BasicBlock::Successors::iterator succ = block->successors_begin(); 790 for (InstructionBlock::Successors::const_iterator succ =
Benedikt Meurer 2014/10/19 18:13:03 s/InstructionBlock::Successors::const_iterator/aut
791 block->successors_begin();
792 succ != block->successors_end(); ++succ) { 792 succ != block->successors_end(); ++succ) {
793 DCHECK((*succ)->PredecessorCount() == 1); 793 const InstructionBlock* successor = code()->InstructionBlockAt((*succ));
794 int gap_index = code()->first_instruction_index(*succ) + 1; 794 DCHECK(successor->PredecessorCount() == 1);
795 int gap_index = successor->first_instruction_index() + 1;
795 range->SetSpillStartIndex(gap_index); 796 range->SetSpillStartIndex(gap_index);
796 797
797 // This move to spill operand is not a real use. Liveness analysis 798 // This move to spill operand is not a real use. Liveness analysis
798 // and splitting of live ranges do not account for it. 799 // and splitting of live ranges do not account for it.
799 // Thus it should be inserted to a lifetime position corresponding to 800 // Thus it should be inserted to a lifetime position corresponding to
800 // the instruction end. 801 // the instruction end.
801 GapInstruction* gap = code()->GapAt(gap_index); 802 GapInstruction* gap = code()->GapAt(gap_index);
802 ParallelMove* move = 803 ParallelMove* move =
803 gap->GetOrCreateParallelMove(GapInstruction::BEFORE, code_zone()); 804 gap->GetOrCreateParallelMove(GapInstruction::BEFORE, code_zone());
804 move->AddMove(output, range->GetSpillOperand(), code_zone()); 805 move->AddMove(output, range->GetSpillOperand(), code_zone());
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
929 bool RegisterAllocator::IsOutputDoubleRegisterOf(Instruction* instr, 930 bool RegisterAllocator::IsOutputDoubleRegisterOf(Instruction* instr,
930 int index) { 931 int index) {
931 for (size_t i = 0; i < instr->OutputCount(); i++) { 932 for (size_t i = 0; i < instr->OutputCount(); i++) {
932 InstructionOperand* output = instr->OutputAt(i); 933 InstructionOperand* output = instr->OutputAt(i);
933 if (output->IsDoubleRegister() && output->index() == index) return true; 934 if (output->IsDoubleRegister() && output->index() == index) return true;
934 } 935 }
935 return false; 936 return false;
936 } 937 }
937 938
938 939
939 void RegisterAllocator::ProcessInstructions(BasicBlock* block, 940 void RegisterAllocator::ProcessInstructions(const InstructionBlock* block,
940 BitVector* live) { 941 BitVector* live) {
941 int block_start = code()->first_instruction_index(block); 942 int block_start = block->first_instruction_index();
942 943
943 LifetimePosition block_start_position = 944 LifetimePosition block_start_position =
944 LifetimePosition::FromInstructionIndex(block_start); 945 LifetimePosition::FromInstructionIndex(block_start);
945 946
946 for (int index = code()->last_instruction_index(block); index >= block_start; 947 for (int index = block->last_instruction_index(); index >= block_start;
947 index--) { 948 index--) {
948 LifetimePosition curr_position = 949 LifetimePosition curr_position =
949 LifetimePosition::FromInstructionIndex(index); 950 LifetimePosition::FromInstructionIndex(index);
950 951
951 Instruction* instr = InstructionAt(index); 952 Instruction* instr = InstructionAt(index);
952 DCHECK(instr != NULL); 953 DCHECK(instr != NULL);
953 if (instr->IsGapMoves()) { 954 if (instr->IsGapMoves()) {
954 // Process the moves of the gap instruction, making their sources live. 955 // Process the moves of the gap instruction, making their sources live.
955 GapInstruction* gap = code()->GapAt(index); 956 GapInstruction* gap = code()->GapAt(index);
956 957
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
1052 } 1053 }
1053 } 1054 }
1054 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL); 1055 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
1055 Define(curr_position, temp, NULL); 1056 Define(curr_position, temp, NULL);
1056 } 1057 }
1057 } 1058 }
1058 } 1059 }
1059 } 1060 }
1060 1061
1061 1062
1062 void RegisterAllocator::ResolvePhis(BasicBlock* block) { 1063 void RegisterAllocator::ResolvePhis(const InstructionBlock* block) {
1063 for (BasicBlock::const_iterator i = block->begin(); i != block->end(); ++i) { 1064 for (InstructionBlock::PhiInstructions::const_iterator i =
Benedikt Meurer 2014/10/19 18:13:03 s/InstructionBlock::PhiInstructions::const_iterato
1064 Node* phi = *i; 1065 block->phis_begin();
1065 if (phi->opcode() != IrOpcode::kPhi) continue; 1066 i != block->phis_end(); ++i) {
1066
1067 UnallocatedOperand* phi_operand = 1067 UnallocatedOperand* phi_operand =
1068 new (code_zone()) UnallocatedOperand(UnallocatedOperand::NONE); 1068 new (code_zone()) UnallocatedOperand(UnallocatedOperand::NONE);
1069 int phi_vreg = code()->GetVirtualRegister(phi); 1069 PhiInstruction* phi = *i;
1070 int phi_vreg = phi->virtual_register();
1070 phi_operand->set_virtual_register(phi_vreg); 1071 phi_operand->set_virtual_register(phi_vreg);
1071 1072
1072 size_t j = 0; 1073 size_t j = 0;
1073 Node::Inputs inputs = phi->inputs(); 1074 for (IntVector::const_iterator iter = phi->operands().begin();
1074 for (Node::Inputs::iterator iter(inputs.begin()); iter != inputs.end(); 1075 iter != phi->operands().end(); ++iter, ++j) {
1075 ++iter, ++j) {
1076 Node* op = *iter;
1077 // TODO(mstarzinger): Use a ValueInputIterator instead.
1078 if (j >= block->PredecessorCount()) continue;
1079 UnallocatedOperand* operand = 1076 UnallocatedOperand* operand =
1080 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY); 1077 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY);
1081 operand->set_virtual_register(code()->GetVirtualRegister(op)); 1078 operand->set_virtual_register(*iter);
1082 BasicBlock* cur_block = block->PredecessorAt(j); 1079 InstructionBlock* cur_block =
1080 code()->InstructionBlockAt(block->PredecessorAt(j));
1083 // The gap move must be added without any special processing as in 1081 // The gap move must be added without any special processing as in
1084 // the AddConstraintsGapMove. 1082 // the AddConstraintsGapMove.
1085 code()->AddGapMove(code()->last_instruction_index(cur_block) - 1, operand, 1083 code()->AddGapMove(cur_block->last_instruction_index() - 1, operand,
1086 phi_operand); 1084 phi_operand);
1087 1085
1088 Instruction* branch = 1086 Instruction* branch = InstructionAt(cur_block->last_instruction_index());
1089 InstructionAt(code()->last_instruction_index(cur_block));
1090 DCHECK(!branch->HasPointerMap()); 1087 DCHECK(!branch->HasPointerMap());
1091 USE(branch); 1088 USE(branch);
1092 } 1089 }
1093 1090
1094 LiveRange* live_range = LiveRangeFor(phi_vreg); 1091 LiveRange* live_range = LiveRangeFor(phi_vreg);
1095 BlockStartInstruction* block_start = 1092 BlockStartInstruction* block_start =
1096 code()->GetBlockStart(block->GetRpoNumber()); 1093 code()->GetBlockStart(block->rpo_number());
1097 block_start->GetOrCreateParallelMove(GapInstruction::START, code_zone()) 1094 block_start->GetOrCreateParallelMove(GapInstruction::START, code_zone())
1098 ->AddMove(phi_operand, live_range->GetSpillOperand(), code_zone()); 1095 ->AddMove(phi_operand, live_range->GetSpillOperand(), code_zone());
1099 live_range->SetSpillStartIndex(code()->first_instruction_index(block)); 1096 live_range->SetSpillStartIndex(block->first_instruction_index());
1100 1097
1101 // We use the phi-ness of some nodes in some later heuristics. 1098 // We use the phi-ness of some nodes in some later heuristics.
1102 live_range->set_is_phi(true); 1099 live_range->set_is_phi(true);
1103 if (!block->IsLoopHeader()) { 1100 if (!block->IsLoopHeader()) {
1104 live_range->set_is_non_loop_phi(true); 1101 live_range->set_is_non_loop_phi(true);
1105 } 1102 }
1106 } 1103 }
1107 } 1104 }
1108 1105
1109 1106
(...skipping 15 matching lines...) Expand all
1125 ResolveControlFlow(); 1122 ResolveControlFlow();
1126 code()->frame()->SetAllocatedRegisters(assigned_registers_); 1123 code()->frame()->SetAllocatedRegisters(assigned_registers_);
1127 code()->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_); 1124 code()->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
1128 return true; 1125 return true;
1129 } 1126 }
1130 1127
1131 1128
1132 void RegisterAllocator::MeetRegisterConstraints() { 1129 void RegisterAllocator::MeetRegisterConstraints() {
1133 RegisterAllocatorPhase phase("L_Register constraints", this); 1130 RegisterAllocatorPhase phase("L_Register constraints", this);
1134 for (int i = 0; i < code()->BasicBlockCount(); ++i) { 1131 for (int i = 0; i < code()->BasicBlockCount(); ++i) {
1135 MeetRegisterConstraints(code()->BlockAt(i)); 1132 MeetRegisterConstraints(
1133 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(i)));
1136 if (!AllocationOk()) return; 1134 if (!AllocationOk()) return;
1137 } 1135 }
1138 } 1136 }
1139 1137
1140 1138
1141 void RegisterAllocator::ResolvePhis() { 1139 void RegisterAllocator::ResolvePhis() {
1142 RegisterAllocatorPhase phase("L_Resolve phis", this); 1140 RegisterAllocatorPhase phase("L_Resolve phis", this);
1143 1141
1144 // Process the blocks in reverse order. 1142 // Process the blocks in reverse order.
1145 for (int i = code()->BasicBlockCount() - 1; i >= 0; --i) { 1143 for (int i = code()->BasicBlockCount() - 1; i >= 0; --i) {
1146 ResolvePhis(code()->BlockAt(i)); 1144 ResolvePhis(code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(i)));
1147 } 1145 }
1148 } 1146 }
1149 1147
1150 1148
1151 void RegisterAllocator::ResolveControlFlow(LiveRange* range, BasicBlock* block, 1149 void RegisterAllocator::ResolveControlFlow(LiveRange* range,
1152 BasicBlock* pred) { 1150 const InstructionBlock* block,
1153 LifetimePosition pred_end = LifetimePosition::FromInstructionIndex( 1151 const InstructionBlock* pred) {
1154 code()->last_instruction_index(pred)); 1152 LifetimePosition pred_end =
1155 LifetimePosition cur_start = LifetimePosition::FromInstructionIndex( 1153 LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1156 code()->first_instruction_index(block)); 1154 LifetimePosition cur_start =
1155 LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1157 LiveRange* pred_cover = NULL; 1156 LiveRange* pred_cover = NULL;
1158 LiveRange* cur_cover = NULL; 1157 LiveRange* cur_cover = NULL;
1159 LiveRange* cur_range = range; 1158 LiveRange* cur_range = range;
1160 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) { 1159 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1161 if (cur_range->CanCover(cur_start)) { 1160 if (cur_range->CanCover(cur_start)) {
1162 DCHECK(cur_cover == NULL); 1161 DCHECK(cur_cover == NULL);
1163 cur_cover = cur_range; 1162 cur_cover = cur_range;
1164 } 1163 }
1165 if (cur_range->CanCover(pred_end)) { 1164 if (cur_range->CanCover(pred_end)) {
1166 DCHECK(pred_cover == NULL); 1165 DCHECK(pred_cover == NULL);
1167 pred_cover = cur_range; 1166 pred_cover = cur_range;
1168 } 1167 }
1169 cur_range = cur_range->next(); 1168 cur_range = cur_range->next();
1170 } 1169 }
1171 1170
1172 if (cur_cover->IsSpilled()) return; 1171 if (cur_cover->IsSpilled()) return;
1173 DCHECK(pred_cover != NULL && cur_cover != NULL); 1172 DCHECK(pred_cover != NULL && cur_cover != NULL);
1174 if (pred_cover != cur_cover) { 1173 if (pred_cover != cur_cover) {
1175 InstructionOperand* pred_op = 1174 InstructionOperand* pred_op =
1176 pred_cover->CreateAssignedOperand(code_zone()); 1175 pred_cover->CreateAssignedOperand(code_zone());
1177 InstructionOperand* cur_op = cur_cover->CreateAssignedOperand(code_zone()); 1176 InstructionOperand* cur_op = cur_cover->CreateAssignedOperand(code_zone());
1178 if (!pred_op->Equals(cur_op)) { 1177 if (!pred_op->Equals(cur_op)) {
1179 GapInstruction* gap = NULL; 1178 GapInstruction* gap = NULL;
1180 if (block->PredecessorCount() == 1) { 1179 if (block->PredecessorCount() == 1) {
1181 gap = code()->GapAt(code()->first_instruction_index(block)); 1180 gap = code()->GapAt(block->first_instruction_index());
1182 } else { 1181 } else {
1183 DCHECK(pred->SuccessorCount() == 1); 1182 DCHECK(pred->SuccessorCount() == 1);
1184 gap = GetLastGap(pred); 1183 gap = GetLastGap(pred);
1185 1184
1186 Instruction* branch = 1185 Instruction* branch = InstructionAt(pred->last_instruction_index());
1187 InstructionAt(code()->last_instruction_index(pred));
1188 DCHECK(!branch->HasPointerMap()); 1186 DCHECK(!branch->HasPointerMap());
1189 USE(branch); 1187 USE(branch);
1190 } 1188 }
1191 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()) 1189 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone())
1192 ->AddMove(pred_op, cur_op, code_zone()); 1190 ->AddMove(pred_op, cur_op, code_zone());
1193 } 1191 }
1194 } 1192 }
1195 } 1193 }
1196 1194
1197 1195
1198 ParallelMove* RegisterAllocator::GetConnectingParallelMove( 1196 ParallelMove* RegisterAllocator::GetConnectingParallelMove(
1199 LifetimePosition pos) { 1197 LifetimePosition pos) {
1200 int index = pos.InstructionIndex(); 1198 int index = pos.InstructionIndex();
1201 if (code()->IsGapAt(index)) { 1199 if (code()->IsGapAt(index)) {
1202 GapInstruction* gap = code()->GapAt(index); 1200 GapInstruction* gap = code()->GapAt(index);
1203 return gap->GetOrCreateParallelMove( 1201 return gap->GetOrCreateParallelMove(
1204 pos.IsInstructionStart() ? GapInstruction::START : GapInstruction::END, 1202 pos.IsInstructionStart() ? GapInstruction::START : GapInstruction::END,
1205 code_zone()); 1203 code_zone());
1206 } 1204 }
1207 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1); 1205 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1208 return code()->GapAt(gap_pos)->GetOrCreateParallelMove( 1206 return code()->GapAt(gap_pos)->GetOrCreateParallelMove(
1209 (gap_pos < index) ? GapInstruction::AFTER : GapInstruction::BEFORE, 1207 (gap_pos < index) ? GapInstruction::AFTER : GapInstruction::BEFORE,
1210 code_zone()); 1208 code_zone());
1211 } 1209 }
1212 1210
1213 1211
1214 BasicBlock* RegisterAllocator::GetBlock(LifetimePosition pos) { 1212 const InstructionBlock* RegisterAllocator::GetInstructionBlock(
1215 return code()->GetBasicBlock(pos.InstructionIndex()); 1213 LifetimePosition pos) {
1214 return code()->GetInstructionBlock(pos.InstructionIndex());
1216 } 1215 }
1217 1216
1218 1217
1219 void RegisterAllocator::ConnectRanges() { 1218 void RegisterAllocator::ConnectRanges() {
1220 RegisterAllocatorPhase phase("L_Connect ranges", this); 1219 RegisterAllocatorPhase phase("L_Connect ranges", this);
1221 for (int i = 0; i < live_ranges()->length(); ++i) { 1220 for (int i = 0; i < live_ranges()->length(); ++i) {
1222 LiveRange* first_range = live_ranges()->at(i); 1221 LiveRange* first_range = live_ranges()->at(i);
1223 if (first_range == NULL || first_range->parent() != NULL) continue; 1222 if (first_range == NULL || first_range->parent() != NULL) continue;
1224 1223
1225 LiveRange* second_range = first_range->next(); 1224 LiveRange* second_range = first_range->next();
1226 while (second_range != NULL) { 1225 while (second_range != NULL) {
1227 LifetimePosition pos = second_range->Start(); 1226 LifetimePosition pos = second_range->Start();
1228 1227
1229 if (!second_range->IsSpilled()) { 1228 if (!second_range->IsSpilled()) {
1230 // Add gap move if the two live ranges touch and there is no block 1229 // Add gap move if the two live ranges touch and there is no block
1231 // boundary. 1230 // boundary.
1232 if (first_range->End().Value() == pos.Value()) { 1231 if (first_range->End().Value() == pos.Value()) {
1233 bool should_insert = true; 1232 bool should_insert = true;
1234 if (IsBlockBoundary(pos)) { 1233 if (IsBlockBoundary(pos)) {
1235 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos)); 1234 should_insert =
1235 CanEagerlyResolveControlFlow(GetInstructionBlock(pos));
1236 } 1236 }
1237 if (should_insert) { 1237 if (should_insert) {
1238 ParallelMove* move = GetConnectingParallelMove(pos); 1238 ParallelMove* move = GetConnectingParallelMove(pos);
1239 InstructionOperand* prev_operand = 1239 InstructionOperand* prev_operand =
1240 first_range->CreateAssignedOperand(code_zone()); 1240 first_range->CreateAssignedOperand(code_zone());
1241 InstructionOperand* cur_operand = 1241 InstructionOperand* cur_operand =
1242 second_range->CreateAssignedOperand(code_zone()); 1242 second_range->CreateAssignedOperand(code_zone());
1243 move->AddMove(prev_operand, cur_operand, code_zone()); 1243 move->AddMove(prev_operand, cur_operand, code_zone());
1244 } 1244 }
1245 } 1245 }
1246 } 1246 }
1247 1247
1248 first_range = second_range; 1248 first_range = second_range;
1249 second_range = second_range->next(); 1249 second_range = second_range->next();
1250 } 1250 }
1251 } 1251 }
1252 } 1252 }
1253 1253
1254 1254
1255 bool RegisterAllocator::CanEagerlyResolveControlFlow(BasicBlock* block) const { 1255 bool RegisterAllocator::CanEagerlyResolveControlFlow(
1256 const InstructionBlock* block) const {
1256 if (block->PredecessorCount() != 1) return false; 1257 if (block->PredecessorCount() != 1) return false;
1257 return block->PredecessorAt(0)->rpo_number() == block->rpo_number() - 1; 1258 return block->PredecessorAt(0).IsNext(block->rpo_number());
1258 } 1259 }
1259 1260
1260 1261
1261 void RegisterAllocator::ResolveControlFlow() { 1262 void RegisterAllocator::ResolveControlFlow() {
1262 RegisterAllocatorPhase phase("L_Resolve control flow", this); 1263 RegisterAllocatorPhase phase("L_Resolve control flow", this);
1263 for (int block_id = 1; block_id < code()->BasicBlockCount(); ++block_id) { 1264 for (int block_id = 1; block_id < code()->BasicBlockCount(); ++block_id) {
1264 BasicBlock* block = code()->BlockAt(block_id); 1265 const InstructionBlock* block =
1266 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(block_id));
1265 if (CanEagerlyResolveControlFlow(block)) continue; 1267 if (CanEagerlyResolveControlFlow(block)) continue;
1266 BitVector* live = live_in_sets_[block->rpo_number()]; 1268 BitVector* live = live_in_sets_[block->rpo_number().ToInt()];
1267 BitVector::Iterator iterator(live); 1269 BitVector::Iterator iterator(live);
1268 while (!iterator.Done()) { 1270 while (!iterator.Done()) {
1269 int operand_index = iterator.Current(); 1271 int operand_index = iterator.Current();
1270 for (BasicBlock::Predecessors::iterator i = block->predecessors_begin(); 1272 for (InstructionBlock::Predecessors::const_iterator i =
1273 block->predecessors_begin();
1271 i != block->predecessors_end(); ++i) { 1274 i != block->predecessors_end(); ++i) {
1272 BasicBlock* cur = *i; 1275 const InstructionBlock* cur = code()->InstructionBlockAt(*i);
1273 LiveRange* cur_range = LiveRangeFor(operand_index); 1276 LiveRange* cur_range = LiveRangeFor(operand_index);
1274 ResolveControlFlow(cur_range, block, cur); 1277 ResolveControlFlow(cur_range, block, cur);
1275 } 1278 }
1276 iterator.Advance(); 1279 iterator.Advance();
1277 } 1280 }
1278 } 1281 }
1279 } 1282 }
1280 1283
1281 1284
1282 void RegisterAllocator::BuildLiveRanges() { 1285 void RegisterAllocator::BuildLiveRanges() {
1283 RegisterAllocatorPhase phase("L_Build live ranges", this); 1286 RegisterAllocatorPhase phase("L_Build live ranges", this);
1284 InitializeLivenessAnalysis(); 1287 InitializeLivenessAnalysis();
1285 // Process the blocks in reverse order. 1288 // Process the blocks in reverse order.
1286 for (int block_id = code()->BasicBlockCount() - 1; block_id >= 0; 1289 for (int block_id = code()->BasicBlockCount() - 1; block_id >= 0;
1287 --block_id) { 1290 --block_id) {
1288 BasicBlock* block = code()->BlockAt(block_id); 1291 const InstructionBlock* block =
1292 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(block_id));
1289 BitVector* live = ComputeLiveOut(block); 1293 BitVector* live = ComputeLiveOut(block);
1290 // Initially consider all live_out values live for the entire block. We 1294 // Initially consider all live_out values live for the entire block. We
1291 // will shorten these intervals if necessary. 1295 // will shorten these intervals if necessary.
1292 AddInitialIntervals(block, live); 1296 AddInitialIntervals(block, live);
1293 1297
1294 // Process the instructions in reverse order, generating and killing 1298 // Process the instructions in reverse order, generating and killing
1295 // live values. 1299 // live values.
1296 ProcessInstructions(block, live); 1300 ProcessInstructions(block, live);
1297 // All phi output operands are killed by this block. 1301 // All phi output operands are killed by this block.
1298 for (BasicBlock::const_iterator i = block->begin(); i != block->end(); 1302 for (InstructionBlock::PhiInstructions::const_iterator i =
1299 ++i) { 1303 block->phis_begin();
1300 Node* phi = *i; 1304 i != block->phis_end(); ++i) {
1301 if (phi->opcode() != IrOpcode::kPhi) continue; 1305 PhiInstruction* phi = *i;
1302
1303 // The live range interval already ends at the first instruction of the 1306 // The live range interval already ends at the first instruction of the
1304 // block. 1307 // block.
1305 int phi_vreg = code()->GetVirtualRegister(phi); 1308 int phi_vreg = phi->virtual_register();
1306 live->Remove(phi_vreg); 1309 live->Remove(phi_vreg);
1307 1310
1308 InstructionOperand* hint = NULL; 1311 InstructionOperand* hint = NULL;
1309 InstructionOperand* phi_operand = NULL; 1312 InstructionOperand* phi_operand = NULL;
1310 GapInstruction* gap = GetLastGap(block->PredecessorAt(0)); 1313 GapInstruction* gap =
1314 GetLastGap(code()->InstructionBlockAt(block->PredecessorAt(0)));
1311 1315
1312 // TODO(titzer): no need to create the parallel move if it doesn't exit. 1316 // TODO(titzer): no need to create the parallel move if it doesn't exit.
1313 ParallelMove* move = 1317 ParallelMove* move =
1314 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()); 1318 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone());
1315 for (int j = 0; j < move->move_operands()->length(); ++j) { 1319 for (int j = 0; j < move->move_operands()->length(); ++j) {
1316 InstructionOperand* to = move->move_operands()->at(j).destination(); 1320 InstructionOperand* to = move->move_operands()->at(j).destination();
1317 if (to->IsUnallocated() && 1321 if (to->IsUnallocated() &&
1318 UnallocatedOperand::cast(to)->virtual_register() == phi_vreg) { 1322 UnallocatedOperand::cast(to)->virtual_register() == phi_vreg) {
1319 hint = move->move_operands()->at(j).source(); 1323 hint = move->move_operands()->at(j).source();
1320 phi_operand = to; 1324 phi_operand = to;
1321 break; 1325 break;
1322 } 1326 }
1323 } 1327 }
1324 DCHECK(hint != NULL); 1328 DCHECK(hint != NULL);
1325 1329
1326 LifetimePosition block_start = LifetimePosition::FromInstructionIndex( 1330 LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1327 code()->first_instruction_index(block)); 1331 block->first_instruction_index());
1328 Define(block_start, phi_operand, hint); 1332 Define(block_start, phi_operand, hint);
1329 } 1333 }
1330 1334
1331 // Now live is live_in for this block except not including values live 1335 // Now live is live_in for this block except not including values live
1332 // out on backward successor edges. 1336 // out on backward successor edges.
1333 live_in_sets_[block_id] = live; 1337 live_in_sets_[block_id] = live;
1334 1338
1335 if (block->IsLoopHeader()) { 1339 if (block->IsLoopHeader()) {
1336 // Add a live range stretching from the first loop instruction to the last 1340 // Add a live range stretching from the first loop instruction to the last
1337 // for each value live on entry to the header. 1341 // for each value live on entry to the header.
1338 BitVector::Iterator iterator(live); 1342 BitVector::Iterator iterator(live);
1339 LifetimePosition start = LifetimePosition::FromInstructionIndex( 1343 LifetimePosition start = LifetimePosition::FromInstructionIndex(
1340 code()->first_instruction_index(block)); 1344 block->first_instruction_index());
1341 int end_index = 1345 int end_index = code()
1342 code()->last_instruction_index(code()->BlockAt(block->loop_end())); 1346 ->InstructionBlockAt(block->loop_end())
1347 ->last_instruction_index();
1343 LifetimePosition end = 1348 LifetimePosition end =
1344 LifetimePosition::FromInstructionIndex(end_index).NextInstruction(); 1349 LifetimePosition::FromInstructionIndex(end_index).NextInstruction();
1345 while (!iterator.Done()) { 1350 while (!iterator.Done()) {
1346 int operand_index = iterator.Current(); 1351 int operand_index = iterator.Current();
1347 LiveRange* range = LiveRangeFor(operand_index); 1352 LiveRange* range = LiveRangeFor(operand_index);
1348 range->EnsureInterval(start, end, zone()); 1353 range->EnsureInterval(start, end, zone());
1349 iterator.Advance(); 1354 iterator.Advance();
1350 } 1355 }
1351 1356
1352 // Insert all values into the live in sets of all blocks in the loop. 1357 // Insert all values into the live in sets of all blocks in the loop.
1353 for (int i = block->rpo_number() + 1; i < block->loop_end(); ++i) { 1358 for (int i = block->rpo_number().ToInt() + 1;
1359 i < block->loop_end().ToInt(); ++i) {
1354 live_in_sets_[i]->Union(*live); 1360 live_in_sets_[i]->Union(*live);
1355 } 1361 }
1356 } 1362 }
1357 1363
1358 #ifdef DEBUG 1364 #ifdef DEBUG
1359 if (block_id == 0) { 1365 if (block_id == 0) {
1360 BitVector::Iterator iterator(live); 1366 BitVector::Iterator iterator(live);
1361 bool found = false; 1367 bool found = false;
1362 while (!iterator.Done()) { 1368 while (!iterator.Done()) {
1363 found = true; 1369 found = true;
(...skipping 587 matching lines...) Expand 10 before | Expand all | Expand 10 after
1951 1957
1952 // This register was not free. Thus we need to find and spill 1958 // This register was not free. Thus we need to find and spill
1953 // parts of active and inactive live regions that use the same register 1959 // parts of active and inactive live regions that use the same register
1954 // at the same lifetime positions as current. 1960 // at the same lifetime positions as current.
1955 SplitAndSpillIntersecting(current); 1961 SplitAndSpillIntersecting(current);
1956 } 1962 }
1957 1963
1958 1964
1959 LifetimePosition RegisterAllocator::FindOptimalSpillingPos( 1965 LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
1960 LiveRange* range, LifetimePosition pos) { 1966 LiveRange* range, LifetimePosition pos) {
1961 BasicBlock* block = GetBlock(pos.InstructionStart()); 1967 const InstructionBlock* block = GetInstructionBlock(pos.InstructionStart());
1962 BasicBlock* loop_header = 1968 const InstructionBlock* loop_header =
1963 block->IsLoopHeader() ? block : code()->GetContainingLoop(block); 1969 block->IsLoopHeader() ? block : code()->GetContainingLoop(block);
1964 1970
1965 if (loop_header == NULL) return pos; 1971 if (loop_header == NULL) return pos;
1966 1972
1967 UsePosition* prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); 1973 UsePosition* prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos);
1968 1974
1969 while (loop_header != NULL) { 1975 while (loop_header != NULL) {
1970 // We are going to spill live range inside the loop. 1976 // We are going to spill live range inside the loop.
1971 // If possible try to move spilling position backwards to loop header. 1977 // If possible try to move spilling position backwards to loop header.
1972 // This will reduce number of memory moves on the back edge. 1978 // This will reduce number of memory moves on the back edge.
1973 LifetimePosition loop_start = LifetimePosition::FromInstructionIndex( 1979 LifetimePosition loop_start = LifetimePosition::FromInstructionIndex(
1974 code()->first_instruction_index(loop_header)); 1980 loop_header->first_instruction_index());
1975 1981
1976 if (range->Covers(loop_start)) { 1982 if (range->Covers(loop_start)) {
1977 if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) { 1983 if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) {
1978 // No register beneficial use inside the loop before the pos. 1984 // No register beneficial use inside the loop before the pos.
1979 pos = loop_start; 1985 pos = loop_start;
1980 } 1986 }
1981 } 1987 }
1982 1988
1983 // Try hoisting out to an outer loop. 1989 // Try hoisting out to an outer loop.
1984 loop_header = code()->GetContainingLoop(loop_header); 1990 loop_header = code()->GetContainingLoop(loop_header);
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
2079 2085
2080 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, 2086 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start,
2081 LifetimePosition end) { 2087 LifetimePosition end) {
2082 int start_instr = start.InstructionIndex(); 2088 int start_instr = start.InstructionIndex();
2083 int end_instr = end.InstructionIndex(); 2089 int end_instr = end.InstructionIndex();
2084 DCHECK(start_instr <= end_instr); 2090 DCHECK(start_instr <= end_instr);
2085 2091
2086 // We have no choice 2092 // We have no choice
2087 if (start_instr == end_instr) return end; 2093 if (start_instr == end_instr) return end;
2088 2094
2089 BasicBlock* start_block = GetBlock(start); 2095 const InstructionBlock* start_block = GetInstructionBlock(start);
2090 BasicBlock* end_block = GetBlock(end); 2096 const InstructionBlock* end_block = GetInstructionBlock(end);
2091 2097
2092 if (end_block == start_block) { 2098 if (end_block == start_block) {
2093 // The interval is split in the same basic block. Split at the latest 2099 // The interval is split in the same basic block. Split at the latest
2094 // possible position. 2100 // possible position.
2095 return end; 2101 return end;
2096 } 2102 }
2097 2103
2098 BasicBlock* block = end_block; 2104 const InstructionBlock* block = end_block;
2099 // Find header of outermost loop. 2105 // Find header of outermost loop.
2100 // TODO(titzer): fix redundancy below. 2106 // TODO(titzer): fix redundancy below.
2101 while (code()->GetContainingLoop(block) != NULL && 2107 while (code()->GetContainingLoop(block) != NULL &&
2102 code()->GetContainingLoop(block)->rpo_number() > 2108 code()->GetContainingLoop(block)->rpo_number().ToInt() >
2103 start_block->rpo_number()) { 2109 start_block->rpo_number().ToInt()) {
2104 block = code()->GetContainingLoop(block); 2110 block = code()->GetContainingLoop(block);
2105 } 2111 }
2106 2112
2107 // We did not find any suitable outer loop. Split at the latest possible 2113 // We did not find any suitable outer loop. Split at the latest possible
2108 // position unless end_block is a loop header itself. 2114 // position unless end_block is a loop header itself.
2109 if (block == end_block && !end_block->IsLoopHeader()) return end; 2115 if (block == end_block && !end_block->IsLoopHeader()) return end;
2110 2116
2111 return LifetimePosition::FromInstructionIndex( 2117 return LifetimePosition::FromInstructionIndex(
2112 code()->first_instruction_index(block)); 2118 block->first_instruction_index());
2113 } 2119 }
2114 2120
2115 2121
2116 void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { 2122 void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2117 LiveRange* second_part = SplitRangeAt(range, pos); 2123 LiveRange* second_part = SplitRangeAt(range, pos);
2118 if (!AllocationOk()) return; 2124 if (!AllocationOk()) return;
2119 Spill(second_part); 2125 Spill(second_part);
2120 } 2126 }
2121 2127
2122 2128
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
2225 allocator_zone_start_allocation_size_; 2231 allocator_zone_start_allocation_size_;
2226 isolate()->GetTStatistics()->SaveTiming(name(), base::TimeDelta(), size); 2232 isolate()->GetTStatistics()->SaveTiming(name(), base::TimeDelta(), size);
2227 } 2233 }
2228 #ifdef DEBUG 2234 #ifdef DEBUG
2229 if (allocator_ != NULL) allocator_->Verify(); 2235 if (allocator_ != NULL) allocator_->Verify();
2230 #endif 2236 #endif
2231 } 2237 }
2232 } 2238 }
2233 } 2239 }
2234 } // namespace v8::internal::compiler 2240 } // namespace v8::internal::compiler
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698