Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(271)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 798363007: [turbofan] use START and END gap positions for constraints (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 6 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/linkage.h" 5 #include "src/compiler/linkage.h"
6 #include "src/compiler/register-allocator.h" 6 #include "src/compiler/register-allocator.h"
7 #include "src/string-stream.h" 7 #include "src/string-stream.h"
8 8
9 namespace v8 { 9 namespace v8 {
10 namespace internal { 10 namespace internal {
(...skipping 759 matching lines...) Expand 10 before | Expand all | Expand 10 after
770 auto range = LiveRangeFor(operand); 770 auto range = LiveRangeFor(operand);
771 if (range == nullptr) return; 771 if (range == nullptr) return;
772 if (operand->IsUnallocated()) { 772 if (operand->IsUnallocated()) {
773 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand); 773 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
774 range->AddUsePosition(position, unalloc_operand, hint, local_zone()); 774 range->AddUsePosition(position, unalloc_operand, hint, local_zone());
775 } 775 }
776 range->AddUseInterval(block_start, position, local_zone()); 776 range->AddUseInterval(block_start, position, local_zone());
777 } 777 }
778 778
779 779
780 void RegisterAllocator::AddConstraintsGapMove(int index, 780 void RegisterAllocator::AddGapMove(int index,
781 InstructionOperand* from, 781 GapInstruction::InnerPosition position,
782 InstructionOperand* to) { 782 InstructionOperand* from,
783 InstructionOperand* to) {
783 auto gap = code()->GapAt(index); 784 auto gap = code()->GapAt(index);
784 auto move = gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()); 785 auto move = gap->GetOrCreateParallelMove(position, code_zone());
785 if (from->IsUnallocated()) {
786 const ZoneList<MoveOperands>* move_operands = move->move_operands();
787 for (int i = 0; i < move_operands->length(); ++i) {
788 auto cur = move_operands->at(i);
789 auto cur_to = cur.destination();
790 if (cur_to->IsUnallocated()) {
791 if (UnallocatedOperand::cast(cur_to)->virtual_register() ==
792 UnallocatedOperand::cast(from)->virtual_register()) {
793 move->AddMove(cur.source(), to, code_zone());
794 return;
795 }
796 }
797 }
798 }
799 move->AddMove(from, to, code_zone()); 786 move->AddMove(from, to, code_zone());
800 } 787 }
801 788
802 789
803 static bool AreUseIntervalsIntersecting(UseInterval* interval1, 790 static bool AreUseIntervalsIntersecting(UseInterval* interval1,
804 UseInterval* interval2) { 791 UseInterval* interval2) {
805 while (interval1 != nullptr && interval2 != nullptr) { 792 while (interval1 != nullptr && interval2 != nullptr) {
806 if (interval1->start().Value() < interval2->start().Value()) { 793 if (interval1->start().Value() < interval2->start().Value()) {
807 if (interval1->end().Value() > interval2->start().Value()) { 794 if (interval1->end().Value() > interval2->start().Value()) {
808 return true; 795 return true;
(...skipping 284 matching lines...) Expand 10 before | Expand all | Expand 10 after
1093 DCHECK(successor->PredecessorCount() == 1); 1080 DCHECK(successor->PredecessorCount() == 1);
1094 int gap_index = successor->first_instruction_index() + 1; 1081 int gap_index = successor->first_instruction_index() + 1;
1095 DCHECK(code()->IsGapAt(gap_index)); 1082 DCHECK(code()->IsGapAt(gap_index));
1096 1083
1097 // Create an unconstrained operand for the same virtual register 1084 // Create an unconstrained operand for the same virtual register
1098 // and insert a gap move from the fixed output to the operand. 1085 // and insert a gap move from the fixed output to the operand.
1099 UnallocatedOperand* output_copy = 1086 UnallocatedOperand* output_copy =
1100 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY); 1087 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY);
1101 output_copy->set_virtual_register(output_vreg); 1088 output_copy->set_virtual_register(output_vreg);
1102 1089
1103 code()->AddGapMove(gap_index, output, output_copy); 1090 AddGapMove(gap_index, GapInstruction::START, output, output_copy);
1104 } 1091 }
1105 } 1092 }
1106 1093
1107 if (!assigned) { 1094 if (!assigned) {
1108 for (auto succ : block->successors()) { 1095 for (auto succ : block->successors()) {
1109 const InstructionBlock* successor = code()->InstructionBlockAt(succ); 1096 const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1110 DCHECK(successor->PredecessorCount() == 1); 1097 DCHECK(successor->PredecessorCount() == 1);
1111 int gap_index = successor->first_instruction_index() + 1; 1098 int gap_index = successor->first_instruction_index() + 1;
1112 range->SpillAtDefinition(local_zone(), gap_index, output); 1099 range->SpillAtDefinition(local_zone(), gap_index, output);
1113 range->SetSpillStartIndex(gap_index); 1100 range->SetSpillStartIndex(gap_index);
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
1146 bool is_tagged = HasTaggedValue(first_output->virtual_register()); 1133 bool is_tagged = HasTaggedValue(first_output->virtual_register());
1147 AllocateFixed(first_output, gap_index, is_tagged); 1134 AllocateFixed(first_output, gap_index, is_tagged);
1148 1135
1149 // This value is produced on the stack, we never need to spill it. 1136 // This value is produced on the stack, we never need to spill it.
1150 if (first_output->IsStackSlot()) { 1137 if (first_output->IsStackSlot()) {
1151 DCHECK(first_output->index() < 0); 1138 DCHECK(first_output->index() < 0);
1152 range->SetSpillOperand(first_output); 1139 range->SetSpillOperand(first_output);
1153 range->SetSpillStartIndex(gap_index - 1); 1140 range->SetSpillStartIndex(gap_index - 1);
1154 assigned = true; 1141 assigned = true;
1155 } 1142 }
1156 code()->AddGapMove(gap_index, first_output, output_copy); 1143 AddGapMove(gap_index, GapInstruction::START, first_output,
1144 output_copy);
1157 } 1145 }
1158 1146
1159 // Make sure we add a gap move for spilling (if we have not done 1147 // Make sure we add a gap move for spilling (if we have not done
1160 // so already). 1148 // so already).
1161 if (!assigned) { 1149 if (!assigned) {
1162 range->SpillAtDefinition(local_zone(), gap_index, first_output); 1150 range->SpillAtDefinition(local_zone(), gap_index, first_output);
1163 range->SetSpillStartIndex(gap_index); 1151 range->SetSpillStartIndex(gap_index);
1164 } 1152 }
1165 } 1153 }
1166 } 1154 }
1167 } 1155 }
1168 1156
1169 if (second != nullptr) { 1157 if (second != nullptr) {
1170 // Handle fixed input operands of second instruction. 1158 // Handle fixed input operands of second instruction.
1171 for (size_t i = 0; i < second->InputCount(); i++) { 1159 for (size_t i = 0; i < second->InputCount(); i++) {
1172 auto input = second->InputAt(i); 1160 auto input = second->InputAt(i);
1173 if (input->IsImmediate()) continue; // Ignore immediates. 1161 if (input->IsImmediate()) continue; // Ignore immediates.
1174 auto cur_input = UnallocatedOperand::cast(input); 1162 auto cur_input = UnallocatedOperand::cast(input);
1175 if (cur_input->HasFixedPolicy()) { 1163 if (cur_input->HasFixedPolicy()) {
1176 auto input_copy = cur_input->CopyUnconstrained(code_zone()); 1164 auto input_copy = cur_input->CopyUnconstrained(code_zone());
1177 bool is_tagged = HasTaggedValue(cur_input->virtual_register()); 1165 bool is_tagged = HasTaggedValue(cur_input->virtual_register());
1178 AllocateFixed(cur_input, gap_index + 1, is_tagged); 1166 AllocateFixed(cur_input, gap_index + 1, is_tagged);
1179 AddConstraintsGapMove(gap_index, input_copy, cur_input); 1167 AddGapMove(gap_index, GapInstruction::END, input_copy, cur_input);
1180 } 1168 }
1181 } 1169 }
1182 1170
1183 // Handle "output same as input" for second instruction. 1171 // Handle "output same as input" for second instruction.
1184 for (size_t i = 0; i < second->OutputCount(); i++) { 1172 for (size_t i = 0; i < second->OutputCount(); i++) {
1185 auto output = second->OutputAt(i); 1173 auto output = second->OutputAt(i);
1186 if (!output->IsUnallocated()) continue; 1174 if (!output->IsUnallocated()) continue;
1187 auto second_output = UnallocatedOperand::cast(output); 1175 auto second_output = UnallocatedOperand::cast(output);
1188 if (second_output->HasSameAsInputPolicy()) { 1176 if (second_output->HasSameAsInputPolicy()) {
1189 DCHECK(i == 0); // Only valid for first output. 1177 DCHECK(i == 0); // Only valid for first output.
1190 UnallocatedOperand* cur_input = 1178 UnallocatedOperand* cur_input =
1191 UnallocatedOperand::cast(second->InputAt(0)); 1179 UnallocatedOperand::cast(second->InputAt(0));
1192 int output_vreg = second_output->virtual_register(); 1180 int output_vreg = second_output->virtual_register();
1193 int input_vreg = cur_input->virtual_register(); 1181 int input_vreg = cur_input->virtual_register();
1194 1182
1195 auto input_copy = cur_input->CopyUnconstrained(code_zone()); 1183 auto input_copy = cur_input->CopyUnconstrained(code_zone());
1196 cur_input->set_virtual_register(second_output->virtual_register()); 1184 cur_input->set_virtual_register(second_output->virtual_register());
1197 AddConstraintsGapMove(gap_index, input_copy, cur_input); 1185 AddGapMove(gap_index, GapInstruction::END, input_copy, cur_input);
1198 1186
1199 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { 1187 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
1200 int index = gap_index + 1; 1188 int index = gap_index + 1;
1201 Instruction* instr = InstructionAt(index); 1189 Instruction* instr = InstructionAt(index);
1202 if (instr->HasPointerMap()) { 1190 if (instr->HasPointerMap()) {
1203 instr->pointer_map()->RecordPointer(input_copy, code_zone()); 1191 instr->pointer_map()->RecordPointer(input_copy, code_zone());
1204 } 1192 }
1205 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { 1193 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
1206 // The input is assumed to immediately have a tagged representation, 1194 // The input is assumed to immediately have a tagged representation,
1207 // before the pointer map can be used. I.e. the pointer map at the 1195 // before the pointer map can be used. I.e. the pointer map at the
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1242 LifetimePosition::FromInstructionIndex(block_start); 1230 LifetimePosition::FromInstructionIndex(block_start);
1243 1231
1244 for (int index = block->last_instruction_index(); index >= block_start; 1232 for (int index = block->last_instruction_index(); index >= block_start;
1245 index--) { 1233 index--) {
1246 auto curr_position = LifetimePosition::FromInstructionIndex(index); 1234 auto curr_position = LifetimePosition::FromInstructionIndex(index);
1247 auto instr = InstructionAt(index); 1235 auto instr = InstructionAt(index);
1248 DCHECK(instr != nullptr); 1236 DCHECK(instr != nullptr);
1249 if (instr->IsGapMoves()) { 1237 if (instr->IsGapMoves()) {
1250 // Process the moves of the gap instruction, making their sources live. 1238 // Process the moves of the gap instruction, making their sources live.
1251 auto gap = code()->GapAt(index); 1239 auto gap = code()->GapAt(index);
1252 1240 const GapInstruction::InnerPosition kPositions[] = {
1253 // TODO(titzer): no need to create the parallel move if it doesn't exist. 1241 GapInstruction::END, GapInstruction::START};
1254 auto move = 1242 for (auto position : kPositions) {
1255 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()); 1243 auto move = gap->GetParallelMove(position);
1256 const ZoneList<MoveOperands>* move_operands = move->move_operands(); 1244 if (move == nullptr) continue;
1257 for (int i = 0; i < move_operands->length(); ++i) { 1245 if (position == GapInstruction::END) {
1258 auto cur = &move_operands->at(i); 1246 curr_position = curr_position.InstructionEnd();
1259 auto from = cur->source(); 1247 } else {
1260 auto to = cur->destination(); 1248 curr_position = curr_position.InstructionStart();
1261 auto hint = to; 1249 }
1262 if (to->IsUnallocated()) { 1250 auto move_ops = move->move_operands();
1263 int to_vreg = UnallocatedOperand::cast(to)->virtual_register(); 1251 for (auto cur = move_ops->begin(); cur != move_ops->end(); ++cur) {
1264 auto to_range = LiveRangeFor(to_vreg); 1252 auto from = cur->source();
1265 if (to_range->is_phi()) { 1253 auto to = cur->destination();
1266 DCHECK(!FLAG_turbo_delay_ssa_decon); 1254 auto hint = to;
1267 if (to_range->is_non_loop_phi()) { 1255 if (to->IsUnallocated()) {
1268 hint = to_range->current_hint_operand(); 1256 int to_vreg = UnallocatedOperand::cast(to)->virtual_register();
1257 auto to_range = LiveRangeFor(to_vreg);
1258 if (to_range->is_phi()) {
1259 DCHECK(!FLAG_turbo_delay_ssa_decon);
1260 if (to_range->is_non_loop_phi()) {
1261 hint = to_range->current_hint_operand();
1262 }
1263 } else {
1264 if (live->Contains(to_vreg)) {
1265 Define(curr_position, to, from);
1266 live->Remove(to_vreg);
1267 } else {
1268 cur->Eliminate();
1269 continue;
1270 }
1269 } 1271 }
1270 } else { 1272 } else {
1271 if (live->Contains(to_vreg)) { 1273 Define(curr_position, to, from);
1272 Define(curr_position, to, from);
1273 live->Remove(to_vreg);
1274 } else {
1275 cur->Eliminate();
1276 continue;
1277 }
1278 } 1274 }
1279 } else { 1275 Use(block_start_position, curr_position, from, hint);
1280 Define(curr_position, to, from); 1276 if (from->IsUnallocated()) {
1281 } 1277 live->Add(UnallocatedOperand::cast(from)->virtual_register());
1282 Use(block_start_position, curr_position, from, hint); 1278 }
1283 if (from->IsUnallocated()) {
1284 live->Add(UnallocatedOperand::cast(from)->virtual_register());
1285 } 1279 }
1286 } 1280 }
1287 } else { 1281 } else {
1288 // Process output, inputs, and temps of this non-gap instruction. 1282 // Process output, inputs, and temps of this non-gap instruction.
1289 for (size_t i = 0; i < instr->OutputCount(); i++) { 1283 for (size_t i = 0; i < instr->OutputCount(); i++) {
1290 auto output = instr->OutputAt(i); 1284 auto output = instr->OutputAt(i);
1291 if (output->IsUnallocated()) { 1285 if (output->IsUnallocated()) {
1292 int out_vreg = UnallocatedOperand::cast(output)->virtual_register(); 1286 int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
1293 live->Remove(out_vreg); 1287 live->Remove(out_vreg);
1294 } else if (output->IsConstant()) { 1288 } else if (output->IsConstant()) {
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
1362 std::make_pair(phi->virtual_register(), PhiMapValue(phi, block))); 1356 std::make_pair(phi->virtual_register(), PhiMapValue(phi, block)));
1363 DCHECK(res.second); 1357 DCHECK(res.second);
1364 USE(res); 1358 USE(res);
1365 } 1359 }
1366 auto output = phi->output(); 1360 auto output = phi->output();
1367 int phi_vreg = phi->virtual_register(); 1361 int phi_vreg = phi->virtual_register();
1368 if (!FLAG_turbo_delay_ssa_decon) { 1362 if (!FLAG_turbo_delay_ssa_decon) {
1369 for (size_t i = 0; i < phi->operands().size(); ++i) { 1363 for (size_t i = 0; i < phi->operands().size(); ++i) {
1370 InstructionBlock* cur_block = 1364 InstructionBlock* cur_block =
1371 code()->InstructionBlockAt(block->predecessors()[i]); 1365 code()->InstructionBlockAt(block->predecessors()[i]);
1372 // The gap move must be added without any special processing as in 1366 AddGapMove(cur_block->last_instruction_index() - 1, GapInstruction::END,
1373 // the AddConstraintsGapMove. 1367 phi->inputs()[i], output);
1374 code()->AddGapMove(cur_block->last_instruction_index() - 1,
1375 phi->inputs()[i], output);
1376 DCHECK(!InstructionAt(cur_block->last_instruction_index()) 1368 DCHECK(!InstructionAt(cur_block->last_instruction_index())
1377 ->HasPointerMap()); 1369 ->HasPointerMap());
1378 } 1370 }
1379 } 1371 }
1380 auto live_range = LiveRangeFor(phi_vreg); 1372 auto live_range = LiveRangeFor(phi_vreg);
1381 int gap_index = block->first_instruction_index(); 1373 int gap_index = block->first_instruction_index();
1382 live_range->SpillAtDefinition(local_zone(), gap_index, output); 1374 live_range->SpillAtDefinition(local_zone(), gap_index, output);
1383 live_range->SetSpillStartIndex(gap_index); 1375 live_range->SetSpillStartIndex(gap_index);
1384 // We use the phi-ness of some nodes in some later heuristics. 1376 // We use the phi-ness of some nodes in some later heuristics.
1385 live_range->set_is_phi(true); 1377 live_range->set_is_phi(true);
(...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after
1641 } 1633 }
1642 } 1634 }
1643 } 1635 }
1644 1636
1645 1637
1646 void RegisterAllocator::ResolveControlFlow(const InstructionBlock* block, 1638 void RegisterAllocator::ResolveControlFlow(const InstructionBlock* block,
1647 InstructionOperand* cur_op, 1639 InstructionOperand* cur_op,
1648 const InstructionBlock* pred, 1640 const InstructionBlock* pred,
1649 InstructionOperand* pred_op) { 1641 InstructionOperand* pred_op) {
1650 if (pred_op->Equals(cur_op)) return; 1642 if (pred_op->Equals(cur_op)) return;
1651 GapInstruction* gap = nullptr; 1643 int gap_index;
1644 GapInstruction::InnerPosition position;
1652 if (block->PredecessorCount() == 1) { 1645 if (block->PredecessorCount() == 1) {
1653 gap = code()->GapAt(block->first_instruction_index()); 1646 gap_index = block->first_instruction_index();
1647 position = GapInstruction::START;
1654 } else { 1648 } else {
1655 DCHECK(pred->SuccessorCount() == 1); 1649 DCHECK(pred->SuccessorCount() == 1);
1656 gap = GetLastGap(pred); 1650 DCHECK(!InstructionAt(pred->last_instruction_index())->HasPointerMap());
1657 auto branch = InstructionAt(pred->last_instruction_index()); 1651 gap_index = pred->last_instruction_index() - 1;
1658 DCHECK(!branch->HasPointerMap()); 1652 position = GapInstruction::END;
1659 USE(branch);
1660 } 1653 }
1661 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()) 1654 AddGapMove(gap_index, position, pred_op, cur_op);
1662 ->AddMove(pred_op, cur_op, code_zone());
1663 } 1655 }
1664 1656
1665 1657
1666 void RegisterAllocator::BuildLiveRanges() { 1658 void RegisterAllocator::BuildLiveRanges() {
1667 // Process the blocks in reverse order. 1659 // Process the blocks in reverse order.
1668 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0; 1660 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
1669 --block_id) { 1661 --block_id) {
1670 auto block = 1662 auto block =
1671 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(block_id)); 1663 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(block_id));
1672 auto live = ComputeLiveOut(block); 1664 auto live = ComputeLiveOut(block);
1673 // Initially consider all live_out values live for the entire block. We 1665 // Initially consider all live_out values live for the entire block. We
1674 // will shorten these intervals if necessary. 1666 // will shorten these intervals if necessary.
1675 AddInitialIntervals(block, live); 1667 AddInitialIntervals(block, live);
1676 1668
1677 // Process the instructions in reverse order, generating and killing 1669 // Process the instructions in reverse order, generating and killing
1678 // live values. 1670 // live values.
1679 ProcessInstructions(block, live); 1671 ProcessInstructions(block, live);
1680 // All phi output operands are killed by this block. 1672 // All phi output operands are killed by this block.
1681 for (auto phi : block->phis()) { 1673 for (auto phi : block->phis()) {
1682 // The live range interval already ends at the first instruction of the 1674 // The live range interval already ends at the first instruction of the
1683 // block. 1675 // block.
1684 int phi_vreg = phi->virtual_register(); 1676 int phi_vreg = phi->virtual_register();
1685 live->Remove(phi_vreg); 1677 live->Remove(phi_vreg);
1686 if (!FLAG_turbo_delay_ssa_decon) { 1678 if (!FLAG_turbo_delay_ssa_decon) {
1687 InstructionOperand* hint = nullptr; 1679 InstructionOperand* hint = nullptr;
1688 InstructionOperand* phi_operand = nullptr; 1680 InstructionOperand* phi_operand = nullptr;
1689 auto gap = 1681 auto gap =
1690 GetLastGap(code()->InstructionBlockAt(block->predecessors()[0])); 1682 GetLastGap(code()->InstructionBlockAt(block->predecessors()[0]));
1691 auto move = 1683 auto move =
1692 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()); 1684 gap->GetOrCreateParallelMove(GapInstruction::END, code_zone());
1693 for (int j = 0; j < move->move_operands()->length(); ++j) { 1685 for (int j = 0; j < move->move_operands()->length(); ++j) {
1694 auto to = move->move_operands()->at(j).destination(); 1686 auto to = move->move_operands()->at(j).destination();
1695 if (to->IsUnallocated() && 1687 if (to->IsUnallocated() &&
1696 UnallocatedOperand::cast(to)->virtual_register() == phi_vreg) { 1688 UnallocatedOperand::cast(to)->virtual_register() == phi_vreg) {
1697 hint = move->move_operands()->at(j).source(); 1689 hint = move->move_operands()->at(j).source();
1698 phi_operand = to; 1690 phi_operand = to;
1699 break; 1691 break;
1700 } 1692 }
1701 } 1693 }
1702 DCHECK(hint != nullptr); 1694 DCHECK(hint != nullptr);
(...skipping 844 matching lines...) Expand 10 before | Expand all | Expand 10 after
2547 } else { 2539 } else {
2548 DCHECK(range->Kind() == GENERAL_REGISTERS); 2540 DCHECK(range->Kind() == GENERAL_REGISTERS);
2549 assigned_registers_->Add(reg); 2541 assigned_registers_->Add(reg);
2550 } 2542 }
2551 range->set_assigned_register(reg, code_zone()); 2543 range->set_assigned_register(reg, code_zone());
2552 } 2544 }
2553 2545
2554 } // namespace compiler 2546 } // namespace compiler
2555 } // namespace internal 2547 } // namespace internal
2556 } // namespace v8 2548 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/register-allocator.h ('k') | test/unittests/compiler/register-allocator-unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698