| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/flow_graph_allocator.h" | 5 #include "vm/flow_graph_allocator.h" |
| 6 | 6 |
| 7 #include "vm/bit_vector.h" | 7 #include "vm/bit_vector.h" |
| 8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
| 9 #include "vm/il_printer.h" | 9 #include "vm/il_printer.h" |
| 10 #include "vm/flow_graph.h" | 10 #include "vm/flow_graph.h" |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 62 } | 62 } |
| 63 | 63 |
| 64 | 64 |
| 65 static intptr_t ToInstructionEnd(intptr_t pos) { | 65 static intptr_t ToInstructionEnd(intptr_t pos) { |
| 66 return (pos | 1); | 66 return (pos | 1); |
| 67 } | 67 } |
| 68 | 68 |
| 69 | 69 |
| 70 FlowGraphAllocator::FlowGraphAllocator(const FlowGraph& flow_graph, | 70 FlowGraphAllocator::FlowGraphAllocator(const FlowGraph& flow_graph, |
| 71 bool intrinsic_mode) | 71 bool intrinsic_mode) |
| 72 : flow_graph_(flow_graph), | 72 : flow_graph_(flow_graph), |
| 73 reaching_defs_(flow_graph), | 73 reaching_defs_(flow_graph), |
| 74 value_representations_(flow_graph.max_virtual_register_number()), | 74 value_representations_(flow_graph.max_virtual_register_number()), |
| 75 block_order_(flow_graph.reverse_postorder()), | 75 block_order_(flow_graph.reverse_postorder()), |
| 76 postorder_(flow_graph.postorder()), | 76 postorder_(flow_graph.postorder()), |
| 77 liveness_(flow_graph), | 77 liveness_(flow_graph), |
| 78 vreg_count_(flow_graph.max_virtual_register_number()), | 78 vreg_count_(flow_graph.max_virtual_register_number()), |
| 79 live_ranges_(flow_graph.max_virtual_register_number()), | 79 live_ranges_(flow_graph.max_virtual_register_number()), |
| 80 cpu_regs_(), | 80 cpu_regs_(), |
| 81 fpu_regs_(), | 81 fpu_regs_(), |
| 82 blocked_cpu_registers_(), | 82 blocked_cpu_registers_(), |
| 83 blocked_fpu_registers_(), | 83 blocked_fpu_registers_(), |
| 84 number_of_registers_(0), | 84 number_of_registers_(0), |
| 85 registers_(), | 85 registers_(), |
| 86 blocked_registers_(), | 86 blocked_registers_(), |
| 87 cpu_spill_slot_count_(0), | 87 cpu_spill_slot_count_(0), |
| 88 intrinsic_mode_(intrinsic_mode) { | 88 intrinsic_mode_(intrinsic_mode) { |
| 89 for (intptr_t i = 0; i < vreg_count_; i++) { | 89 for (intptr_t i = 0; i < vreg_count_; i++) { |
| 90 live_ranges_.Add(NULL); | 90 live_ranges_.Add(NULL); |
| 91 } | 91 } |
| 92 for (intptr_t i = 0; i < vreg_count_; i++) { | 92 for (intptr_t i = 0; i < vreg_count_; i++) { |
| 93 value_representations_.Add(kNoRepresentation); | 93 value_representations_.Add(kNoRepresentation); |
| 94 } | 94 } |
| 95 | 95 |
| 96 // All registers are marked as "not blocked" (array initialized to false). | 96 // All registers are marked as "not blocked" (array initialized to false). |
| 97 // Mark the unavailable ones as "blocked" (true). | 97 // Mark the unavailable ones as "blocked" (true). |
| 98 for (intptr_t i = 0; i < kNumberOfCpuRegisters; i++) { | 98 for (intptr_t i = 0; i < kNumberOfCpuRegisters; i++) { |
| (...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 189 | 189 |
| 190 live_in->Add(input->definition()->ssa_temp_index()); | 190 live_in->Add(input->definition()->ssa_temp_index()); |
| 191 if (input->definition()->HasPairRepresentation()) { | 191 if (input->definition()->HasPairRepresentation()) { |
| 192 live_in->Add(ToSecondPairVreg(input->definition()->ssa_temp_index())); | 192 live_in->Add(ToSecondPairVreg(input->definition()->ssa_temp_index())); |
| 193 } | 193 } |
| 194 } | 194 } |
| 195 | 195 |
| 196 // Add non-argument uses from the deoptimization environment (pushed | 196 // Add non-argument uses from the deoptimization environment (pushed |
| 197 // arguments are not allocated by the register allocator). | 197 // arguments are not allocated by the register allocator). |
| 198 if (current->env() != NULL) { | 198 if (current->env() != NULL) { |
| 199 for (Environment::DeepIterator env_it(current->env()); | 199 for (Environment::DeepIterator env_it(current->env()); !env_it.Done(); |
| 200 !env_it.Done(); | |
| 201 env_it.Advance()) { | 200 env_it.Advance()) { |
| 202 Definition* defn = env_it.CurrentValue()->definition(); | 201 Definition* defn = env_it.CurrentValue()->definition(); |
| 203 if (defn->IsMaterializeObject()) { | 202 if (defn->IsMaterializeObject()) { |
| 204 // MaterializeObject instruction is not in the graph. | 203 // MaterializeObject instruction is not in the graph. |
| 205 // Treat its inputs as part of the environment. | 204 // Treat its inputs as part of the environment. |
| 206 DeepLiveness(defn->AsMaterializeObject(), live_in); | 205 DeepLiveness(defn->AsMaterializeObject(), live_in); |
| 207 } else if (!defn->IsPushArgument() && !defn->IsConstant()) { | 206 } else if (!defn->IsPushArgument() && !defn->IsConstant()) { |
| 208 live_in->Add(defn->ssa_temp_index()); | 207 live_in->Add(defn->ssa_temp_index()); |
| 209 if (defn->HasPairRepresentation()) { | 208 if (defn->HasPairRepresentation()) { |
| 210 live_in->Add(ToSecondPairVreg(defn->ssa_temp_index())); | 209 live_in->Add(ToSecondPairVreg(defn->ssa_temp_index())); |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 242 const intptr_t second_use = ToSecondPairVreg(use); | 241 const intptr_t second_use = ToSecondPairVreg(use); |
| 243 if (!kill_[pred->postorder_number()]->Contains(second_use)) { | 242 if (!kill_[pred->postorder_number()]->Contains(second_use)) { |
| 244 live_in_[pred->postorder_number()]->Add(second_use); | 243 live_in_[pred->postorder_number()]->Add(second_use); |
| 245 } | 244 } |
| 246 } | 245 } |
| 247 } | 246 } |
| 248 } | 247 } |
| 249 } else if (block->IsCatchBlockEntry()) { | 248 } else if (block->IsCatchBlockEntry()) { |
| 250 // Process initial definitions. | 249 // Process initial definitions. |
| 251 CatchBlockEntryInstr* catch_entry = block->AsCatchBlockEntry(); | 250 CatchBlockEntryInstr* catch_entry = block->AsCatchBlockEntry(); |
| 252 for (intptr_t i = 0; | 251 for (intptr_t i = 0; i < catch_entry->initial_definitions()->length(); |
| 253 i < catch_entry->initial_definitions()->length(); | |
| 254 i++) { | 252 i++) { |
| 255 Definition* def = (*catch_entry->initial_definitions())[i]; | 253 Definition* def = (*catch_entry->initial_definitions())[i]; |
| 256 const intptr_t vreg = def->ssa_temp_index(); | 254 const intptr_t vreg = def->ssa_temp_index(); |
| 257 kill_[catch_entry->postorder_number()]->Add(vreg); | 255 kill_[catch_entry->postorder_number()]->Add(vreg); |
| 258 live_in_[catch_entry->postorder_number()]->Remove(vreg); | 256 live_in_[catch_entry->postorder_number()]->Remove(vreg); |
| 259 } | 257 } |
| 260 } | 258 } |
| 261 } | 259 } |
| 262 | 260 |
| 263 // Process initial definitions, ie, constants and incoming parameters. | 261 // Process initial definitions, ie, constants and incoming parameters. |
| 264 for (intptr_t i = 0; i < graph_entry_->initial_definitions()->length(); i++) { | 262 for (intptr_t i = 0; i < graph_entry_->initial_definitions()->length(); i++) { |
| 265 Definition* def = (*graph_entry_->initial_definitions())[i]; | 263 Definition* def = (*graph_entry_->initial_definitions())[i]; |
| 266 const intptr_t vreg = def->ssa_temp_index(); | 264 const intptr_t vreg = def->ssa_temp_index(); |
| 267 kill_[graph_entry_->postorder_number()]->Add(vreg); | 265 kill_[graph_entry_->postorder_number()]->Add(vreg); |
| 268 live_in_[graph_entry_->postorder_number()]->Remove(vreg); | 266 live_in_[graph_entry_->postorder_number()]->Remove(vreg); |
| 269 } | 267 } |
| 270 } | 268 } |
| 271 | 269 |
| 272 | 270 |
| 273 UsePosition* LiveRange::AddUse(intptr_t pos, Location* location_slot) { | 271 UsePosition* LiveRange::AddUse(intptr_t pos, Location* location_slot) { |
| 274 ASSERT(location_slot != NULL); | 272 ASSERT(location_slot != NULL); |
| 275 ASSERT((first_use_interval_->start_ <= pos) && | 273 ASSERT((first_use_interval_->start_ <= pos) && |
| 276 (pos <= first_use_interval_->end_)); | 274 (pos <= first_use_interval_->end_)); |
| 277 if (uses_ != NULL) { | 275 if (uses_ != NULL) { |
| 278 if ((uses_->pos() == pos) && | 276 if ((uses_->pos() == pos) && (uses_->location_slot() == location_slot)) { |
| 279 (uses_->location_slot() == location_slot)) { | |
| 280 return uses_; | 277 return uses_; |
| 281 } else if (uses_->pos() < pos) { | 278 } else if (uses_->pos() < pos) { |
| 282 // If an instruction at position P is using the same value both as | 279 // If an instruction at position P is using the same value both as |
| 283 // a fixed register input and a non-fixed input (in this order) we will | 280 // a fixed register input and a non-fixed input (in this order) we will |
| 284 // add uses both at position P-1 and *then* P which will make | 281 // add uses both at position P-1 and *then* P which will make |
| 285 // uses_ unsorted unless we account for it here. | 282 // uses_ unsorted unless we account for it here. |
| 286 UsePosition* insert_after = uses_; | 283 UsePosition* insert_after = uses_; |
| 287 while ((insert_after->next() != NULL) && | 284 while ((insert_after->next() != NULL) && |
| 288 (insert_after->next()->pos() < pos)) { | 285 (insert_after->next()->pos() < pos)) { |
| 289 insert_after = insert_after->next(); | 286 insert_after = insert_after->next(); |
| (...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 436 } | 433 } |
| 437 | 434 |
| 438 | 435 |
| 439 // Block location from the start of the instruction to its end. | 436 // Block location from the start of the instruction to its end. |
| 440 void FlowGraphAllocator::BlockLocation(Location loc, | 437 void FlowGraphAllocator::BlockLocation(Location loc, |
| 441 intptr_t from, | 438 intptr_t from, |
| 442 intptr_t to) { | 439 intptr_t to) { |
| 443 if (loc.IsRegister()) { | 440 if (loc.IsRegister()) { |
| 444 BlockRegisterLocation(loc, from, to, blocked_cpu_registers_, cpu_regs_); | 441 BlockRegisterLocation(loc, from, to, blocked_cpu_registers_, cpu_regs_); |
| 445 #if defined(TARGET_ARCH_DBC) | 442 #if defined(TARGET_ARCH_DBC) |
| 446 last_used_register_ = Utils::Maximum(last_used_register_, | 443 last_used_register_ = |
| 447 loc.register_code()); | 444 Utils::Maximum(last_used_register_, loc.register_code()); |
| 448 #endif | 445 #endif |
| 449 } else if (loc.IsFpuRegister()) { | 446 } else if (loc.IsFpuRegister()) { |
| 450 BlockRegisterLocation(loc, from, to, blocked_fpu_registers_, fpu_regs_); | 447 BlockRegisterLocation(loc, from, to, blocked_fpu_registers_, fpu_regs_); |
| 451 } else { | 448 } else { |
| 452 UNREACHABLE(); | 449 UNREACHABLE(); |
| 453 } | 450 } |
| 454 } | 451 } |
| 455 | 452 |
| 456 | 453 |
| 457 void LiveRange::Print() { | 454 void LiveRange::Print() { |
| 458 if (first_use_interval() == NULL) { | 455 if (first_use_interval() == NULL) { |
| 459 return; | 456 return; |
| 460 } | 457 } |
| 461 | 458 |
| 462 THR_Print(" live range v%" Pd " [%" Pd ", %" Pd ") in ", vreg(), | 459 THR_Print(" live range v%" Pd " [%" Pd ", %" Pd ") in ", vreg(), Start(), |
| 463 Start(), | 460 End()); |
| 464 End()); | |
| 465 assigned_location().Print(); | 461 assigned_location().Print(); |
| 466 if (spill_slot_.HasStackIndex()) { | 462 if (spill_slot_.HasStackIndex()) { |
| 467 intptr_t stack_slot = spill_slot_.stack_index(); | 463 intptr_t stack_slot = spill_slot_.stack_index(); |
| 468 THR_Print(" allocated spill slot: %" Pd "", stack_slot); | 464 THR_Print(" allocated spill slot: %" Pd "", stack_slot); |
| 469 } | 465 } |
| 470 THR_Print("\n"); | 466 THR_Print("\n"); |
| 471 | 467 |
| 472 SafepointPosition* safepoint = first_safepoint(); | 468 SafepointPosition* safepoint = first_safepoint(); |
| 473 while (safepoint != NULL) { | 469 while (safepoint != NULL) { |
| 474 THR_Print(" Safepoint [%" Pd "]: ", safepoint->pos()); | 470 THR_Print(" Safepoint [%" Pd "]: ", safepoint->pos()); |
| 475 safepoint->locs()->stack_bitmap()->Print(); | 471 safepoint->locs()->stack_bitmap()->Print(); |
| 476 THR_Print("\n"); | 472 THR_Print("\n"); |
| 477 safepoint = safepoint->next(); | 473 safepoint = safepoint->next(); |
| 478 } | 474 } |
| 479 | 475 |
| 480 UsePosition* use_pos = uses_; | 476 UsePosition* use_pos = uses_; |
| 481 for (UseInterval* interval = first_use_interval_; | 477 for (UseInterval* interval = first_use_interval_; interval != NULL; |
| 482 interval != NULL; | |
| 483 interval = interval->next()) { | 478 interval = interval->next()) { |
| 484 THR_Print(" use interval [%" Pd ", %" Pd ")\n", | 479 THR_Print(" use interval [%" Pd ", %" Pd ")\n", interval->start(), |
| 485 interval->start(), | |
| 486 interval->end()); | 480 interval->end()); |
| 487 while ((use_pos != NULL) && (use_pos->pos() <= interval->end())) { | 481 while ((use_pos != NULL) && (use_pos->pos() <= interval->end())) { |
| 488 THR_Print(" use at %" Pd "", use_pos->pos()); | 482 THR_Print(" use at %" Pd "", use_pos->pos()); |
| 489 if (use_pos->location_slot() != NULL) { | 483 if (use_pos->location_slot() != NULL) { |
| 490 THR_Print(" as "); | 484 THR_Print(" as "); |
| 491 use_pos->location_slot()->Print(); | 485 use_pos->location_slot()->Print(); |
| 492 } | 486 } |
| 493 THR_Print("\n"); | 487 THR_Print("\n"); |
| 494 use_pos = use_pos->next(); | 488 use_pos = use_pos->next(); |
| 495 } | 489 } |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 553 BitVector* current_interference_set = NULL; | 547 BitVector* current_interference_set = NULL; |
| 554 Zone* zone = flow_graph_.zone(); | 548 Zone* zone = flow_graph_.zone(); |
| 555 for (intptr_t i = 0; i < (block_count - 1); i++) { | 549 for (intptr_t i = 0; i < (block_count - 1); i++) { |
| 556 BlockEntryInstr* block = postorder_[i]; | 550 BlockEntryInstr* block = postorder_[i]; |
| 557 | 551 |
| 558 BlockInfo* block_info = BlockInfoAt(block->start_pos()); | 552 BlockInfo* block_info = BlockInfoAt(block->start_pos()); |
| 559 | 553 |
| 560 // For every SSA value that is live out of this block, create an interval | 554 // For every SSA value that is live out of this block, create an interval |
| 561 // that covers the whole block. It will be shortened if we encounter a | 555 // that covers the whole block. It will be shortened if we encounter a |
| 562 // definition of this value in this block. | 556 // definition of this value in this block. |
| 563 for (BitVector::Iterator it(liveness_.GetLiveOutSetAt(i)); | 557 for (BitVector::Iterator it(liveness_.GetLiveOutSetAt(i)); !it.Done(); |
| 564 !it.Done(); | |
| 565 it.Advance()) { | 558 it.Advance()) { |
| 566 LiveRange* range = GetLiveRange(it.Current()); | 559 LiveRange* range = GetLiveRange(it.Current()); |
| 567 range->AddUseInterval(block->start_pos(), block->end_pos()); | 560 range->AddUseInterval(block->start_pos(), block->end_pos()); |
| 568 } | 561 } |
| 569 | 562 |
| 570 BlockInfo* loop_header = block_info->loop_header(); | 563 BlockInfo* loop_header = block_info->loop_header(); |
| 571 if ((loop_header != NULL) && (loop_header->last_block() == block)) { | 564 if ((loop_header != NULL) && (loop_header->last_block() == block)) { |
| 572 current_interference_set = new(zone) BitVector( | 565 current_interference_set = |
| 573 zone, flow_graph_.max_virtual_register_number()); | 566 new (zone) BitVector(zone, flow_graph_.max_virtual_register_number()); |
| 574 ASSERT(loop_header->backedge_interference() == NULL); | 567 ASSERT(loop_header->backedge_interference() == NULL); |
| 575 // All values flowing into the loop header are live at the back-edge and | 568 // All values flowing into the loop header are live at the back-edge and |
| 576 // can interfere with phi moves. | 569 // can interfere with phi moves. |
| 577 current_interference_set->AddAll( | 570 current_interference_set->AddAll( |
| 578 liveness_.GetLiveInSet(loop_header->entry())); | 571 liveness_.GetLiveInSet(loop_header->entry())); |
| 579 loop_header->set_backedge_interference( | 572 loop_header->set_backedge_interference(current_interference_set); |
| 580 current_interference_set); | |
| 581 } | 573 } |
| 582 | 574 |
| 583 // Connect outgoing phi-moves that were created in NumberInstructions | 575 // Connect outgoing phi-moves that were created in NumberInstructions |
| 584 // and find last instruction that contributes to liveness. | 576 // and find last instruction that contributes to liveness. |
| 585 Instruction* current = ConnectOutgoingPhiMoves(block, | 577 Instruction* current = |
| 586 current_interference_set); | 578 ConnectOutgoingPhiMoves(block, current_interference_set); |
| 587 | 579 |
| 588 // Now process all instructions in reverse order. | 580 // Now process all instructions in reverse order. |
| 589 while (current != block) { | 581 while (current != block) { |
| 590 // Skip parallel moves that we insert while processing instructions. | 582 // Skip parallel moves that we insert while processing instructions. |
| 591 if (!current->IsParallelMove()) { | 583 if (!current->IsParallelMove()) { |
| 592 ProcessOneInstruction(block, current, current_interference_set); | 584 ProcessOneInstruction(block, current, current_interference_set); |
| 593 } | 585 } |
| 594 current = current->previous(); | 586 current = current->previous(); |
| 595 } | 587 } |
| 596 | 588 |
| 597 | 589 |
| 598 // Check if any values live into the loop can be spilled for free. | 590 // Check if any values live into the loop can be spilled for free. |
| 599 if (block_info->is_loop_header()) { | 591 if (block_info->is_loop_header()) { |
| 600 current_interference_set = NULL; | 592 current_interference_set = NULL; |
| 601 for (BitVector::Iterator it(liveness_.GetLiveInSetAt(i)); | 593 for (BitVector::Iterator it(liveness_.GetLiveInSetAt(i)); !it.Done(); |
| 602 !it.Done(); | |
| 603 it.Advance()) { | 594 it.Advance()) { |
| 604 LiveRange* range = GetLiveRange(it.Current()); | 595 LiveRange* range = GetLiveRange(it.Current()); |
| 605 if (HasOnlyUnconstrainedUsesInLoop(range, block_info)) { | 596 if (HasOnlyUnconstrainedUsesInLoop(range, block_info)) { |
| 606 range->MarkHasOnlyUnconstrainedUsesInLoop(block_info->loop_id()); | 597 range->MarkHasOnlyUnconstrainedUsesInLoop(block_info->loop_id()); |
| 607 } | 598 } |
| 608 } | 599 } |
| 609 } | 600 } |
| 610 | 601 |
| 611 if (block->IsJoinEntry()) { | 602 if (block->IsJoinEntry()) { |
| 612 ConnectIncomingPhiMoves(block->AsJoinEntry()); | 603 ConnectIncomingPhiMoves(block->AsJoinEntry()); |
| 613 } else if (block->IsCatchBlockEntry()) { | 604 } else if (block->IsCatchBlockEntry()) { |
| 614 // Process initial definitions. | 605 // Process initial definitions. |
| 615 CatchBlockEntryInstr* catch_entry = block->AsCatchBlockEntry(); | 606 CatchBlockEntryInstr* catch_entry = block->AsCatchBlockEntry(); |
| 616 | 607 |
| 617 ProcessEnvironmentUses(catch_entry, catch_entry); // For lazy deopt | 608 ProcessEnvironmentUses(catch_entry, catch_entry); // For lazy deopt |
| 618 | 609 |
| 619 for (intptr_t i = 0; | 610 for (intptr_t i = 0; i < catch_entry->initial_definitions()->length(); |
| 620 i < catch_entry->initial_definitions()->length(); | |
| 621 i++) { | 611 i++) { |
| 622 Definition* defn = (*catch_entry->initial_definitions())[i]; | 612 Definition* defn = (*catch_entry->initial_definitions())[i]; |
| 623 LiveRange* range = GetLiveRange(defn->ssa_temp_index()); | 613 LiveRange* range = GetLiveRange(defn->ssa_temp_index()); |
| 624 range->DefineAt(catch_entry->start_pos()); // Defined at block entry. | 614 range->DefineAt(catch_entry->start_pos()); // Defined at block entry. |
| 625 ProcessInitialDefinition(defn, range, catch_entry); | 615 ProcessInitialDefinition(defn, range, catch_entry); |
| 626 } | 616 } |
| 627 // Block the two fixed registers used by CatchBlockEntryInstr from the | 617 // Block the two fixed registers used by CatchBlockEntryInstr from the |
| 628 // block start to until the end of the instruction so that they are | 618 // block start to until the end of the instruction so that they are |
| 629 // preserved. | 619 // preserved. |
| 630 intptr_t start = catch_entry->start_pos(); | 620 intptr_t start = catch_entry->start_pos(); |
| 631 #if !defined(TARGET_ARCH_DBC) | 621 #if !defined(TARGET_ARCH_DBC) |
| 632 const Register exception_reg = kExceptionObjectReg; | 622 const Register exception_reg = kExceptionObjectReg; |
| 633 const Register stacktrace_reg = kStackTraceObjectReg; | 623 const Register stacktrace_reg = kStackTraceObjectReg; |
| 634 #else | 624 #else |
| 635 const intptr_t exception_reg = | 625 const intptr_t exception_reg = |
| 636 LocalVarIndex(0, catch_entry->exception_var().index()); | 626 LocalVarIndex(0, catch_entry->exception_var().index()); |
| 637 const intptr_t stacktrace_reg = | 627 const intptr_t stacktrace_reg = |
| 638 LocalVarIndex(0, catch_entry->stacktrace_var().index()); | 628 LocalVarIndex(0, catch_entry->stacktrace_var().index()); |
| 639 #endif | 629 #endif |
| 640 BlockLocation(Location::RegisterLocation(exception_reg), | 630 BlockLocation(Location::RegisterLocation(exception_reg), start, |
| 641 start, | |
| 642 ToInstructionEnd(start)); | 631 ToInstructionEnd(start)); |
| 643 BlockLocation(Location::RegisterLocation(stacktrace_reg), | 632 BlockLocation(Location::RegisterLocation(stacktrace_reg), start, |
| 644 start, | |
| 645 ToInstructionEnd(start)); | 633 ToInstructionEnd(start)); |
| 646 } | 634 } |
| 647 } | 635 } |
| 648 | 636 |
| 649 // Process incoming parameters and constants. Do this after all other | 637 // Process incoming parameters and constants. Do this after all other |
| 650 // instructions so that safepoints for all calls have already been found. | 638 // instructions so that safepoints for all calls have already been found. |
| 651 GraphEntryInstr* graph_entry = flow_graph_.graph_entry(); | 639 GraphEntryInstr* graph_entry = flow_graph_.graph_entry(); |
| 652 for (intptr_t i = 0; i < graph_entry->initial_definitions()->length(); i++) { | 640 for (intptr_t i = 0; i < graph_entry->initial_definitions()->length(); i++) { |
| 653 Definition* defn = (*graph_entry->initial_definitions())[i]; | 641 Definition* defn = (*graph_entry->initial_definitions())[i]; |
| 654 ASSERT(!defn->HasPairRepresentation()); | 642 ASSERT(!defn->HasPairRepresentation()); |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 687 } else { | 675 } else { |
| 688 ConstantInstr* constant = defn->AsConstant(); | 676 ConstantInstr* constant = defn->AsConstant(); |
| 689 ASSERT(constant != NULL); | 677 ASSERT(constant != NULL); |
| 690 range->set_assigned_location(Location::Constant(constant)); | 678 range->set_assigned_location(Location::Constant(constant)); |
| 691 range->set_spill_slot(Location::Constant(constant)); | 679 range->set_spill_slot(Location::Constant(constant)); |
| 692 AssignSafepoints(defn, range); | 680 AssignSafepoints(defn, range); |
| 693 range->finger()->Initialize(range); | 681 range->finger()->Initialize(range); |
| 694 UsePosition* use = | 682 UsePosition* use = |
| 695 range->finger()->FirstRegisterBeneficialUse(block->start_pos()); | 683 range->finger()->FirstRegisterBeneficialUse(block->start_pos()); |
| 696 if (use != NULL) { | 684 if (use != NULL) { |
| 697 LiveRange* tail = | 685 LiveRange* tail = SplitBetween(range, block->start_pos(), use->pos()); |
| 698 SplitBetween(range, block->start_pos(), use->pos()); | |
| 699 // Parameters and constants are tagged, so allocated to CPU registers. | 686 // Parameters and constants are tagged, so allocated to CPU registers. |
| 700 ASSERT(constant->representation() == kTagged); | 687 ASSERT(constant->representation() == kTagged); |
| 701 CompleteRange(tail, Location::kRegister); | 688 CompleteRange(tail, Location::kRegister); |
| 702 } | 689 } |
| 703 ConvertAllUses(range); | 690 ConvertAllUses(range); |
| 704 } | 691 } |
| 705 return; | 692 return; |
| 706 } | 693 } |
| 707 #endif | 694 #endif |
| 708 | 695 |
| (...skipping 17 matching lines...) Expand all Loading... |
| 726 ASSERT(param->base_reg() == FPREG); | 713 ASSERT(param->base_reg() == FPREG); |
| 727 if (slot_index >= 0) { | 714 if (slot_index >= 0) { |
| 728 AssignSafepoints(defn, range); | 715 AssignSafepoints(defn, range); |
| 729 range->finger()->Initialize(range); | 716 range->finger()->Initialize(range); |
| 730 range->set_assigned_location(Location::RegisterLocation(slot_index)); | 717 range->set_assigned_location(Location::RegisterLocation(slot_index)); |
| 731 SplitInitialDefinitionAt(range, kNormalEntryPos); | 718 SplitInitialDefinitionAt(range, kNormalEntryPos); |
| 732 ConvertAllUses(range); | 719 ConvertAllUses(range); |
| 733 return; | 720 return; |
| 734 } | 721 } |
| 735 #endif // defined(TARGET_ARCH_DBC) | 722 #endif // defined(TARGET_ARCH_DBC) |
| 736 range->set_assigned_location(Location::StackSlot(slot_index, | 723 range->set_assigned_location( |
| 737 param->base_reg())); | 724 Location::StackSlot(slot_index, param->base_reg())); |
| 738 range->set_spill_slot(Location::StackSlot(slot_index, | 725 range->set_spill_slot(Location::StackSlot(slot_index, param->base_reg())); |
| 739 param->base_reg())); | |
| 740 | 726 |
| 741 } else if (defn->IsCurrentContext()) { | 727 } else if (defn->IsCurrentContext()) { |
| 742 #if !defined(TARGET_ARCH_DBC) | 728 #if !defined(TARGET_ARCH_DBC) |
| 743 const Register context_reg = CTX; | 729 const Register context_reg = CTX; |
| 744 #else | 730 #else |
| 745 const intptr_t context_reg = flow_graph_.num_copied_params(); | 731 const intptr_t context_reg = flow_graph_.num_copied_params(); |
| 746 #endif | 732 #endif |
| 747 | 733 |
| 748 AssignSafepoints(defn, range); | 734 AssignSafepoints(defn, range); |
| 749 range->finger()->Initialize(range); | 735 range->finger()->Initialize(range); |
| 750 range->set_assigned_location(Location::RegisterLocation(context_reg)); | 736 range->set_assigned_location(Location::RegisterLocation(context_reg)); |
| 751 if (range->End() > kNormalEntryPos) { | 737 if (range->End() > kNormalEntryPos) { |
| 752 LiveRange* tail = range->SplitAt(kNormalEntryPos); | 738 LiveRange* tail = range->SplitAt(kNormalEntryPos); |
| 753 CompleteRange(tail, Location::kRegister); | 739 CompleteRange(tail, Location::kRegister); |
| 754 } | 740 } |
| 755 ConvertAllUses(range); | 741 ConvertAllUses(range); |
| 756 return; | 742 return; |
| 757 } else { | 743 } else { |
| 758 ConstantInstr* constant = defn->AsConstant(); | 744 ConstantInstr* constant = defn->AsConstant(); |
| 759 ASSERT(constant != NULL); | 745 ASSERT(constant != NULL); |
| 760 range->set_assigned_location(Location::Constant(constant)); | 746 range->set_assigned_location(Location::Constant(constant)); |
| 761 range->set_spill_slot(Location::Constant(constant)); | 747 range->set_spill_slot(Location::Constant(constant)); |
| 762 } | 748 } |
| 763 AssignSafepoints(defn, range); | 749 AssignSafepoints(defn, range); |
| 764 range->finger()->Initialize(range); | 750 range->finger()->Initialize(range); |
| 765 UsePosition* use = | 751 UsePosition* use = |
| 766 range->finger()->FirstRegisterBeneficialUse(block->start_pos()); | 752 range->finger()->FirstRegisterBeneficialUse(block->start_pos()); |
| 767 if (use != NULL) { | 753 if (use != NULL) { |
| 768 LiveRange* tail = | 754 LiveRange* tail = SplitBetween(range, block->start_pos(), use->pos()); |
| 769 SplitBetween(range, block->start_pos(), use->pos()); | |
| 770 // Parameters and constants are tagged, so allocated to CPU registers. | 755 // Parameters and constants are tagged, so allocated to CPU registers. |
| 771 CompleteRange(tail, Location::kRegister); | 756 CompleteRange(tail, Location::kRegister); |
| 772 } | 757 } |
| 773 ConvertAllUses(range); | 758 ConvertAllUses(range); |
| 774 if (defn->IsParameter() && (range->spill_slot().stack_index() >= 0)) { | 759 if (defn->IsParameter() && (range->spill_slot().stack_index() >= 0)) { |
| 775 // Parameters above the frame pointer consume spill slots and are marked | 760 // Parameters above the frame pointer consume spill slots and are marked |
| 776 // in stack maps. | 761 // in stack maps. |
| 777 spill_slots_.Add(range_end); | 762 spill_slots_.Add(range_end); |
| 778 quad_spill_slots_.Add(false); | 763 quad_spill_slots_.Add(false); |
| 779 untagged_spill_slots_.Add(false); | 764 untagged_spill_slots_.Add(false); |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 835 // | 820 // |
| 836 // i i' | 821 // i i' |
| 837 // value --*--) | 822 // value --*--) |
| 838 // | 823 // |
| 839 // can be read as: use interval for value starts somewhere before instruction | 824 // can be read as: use interval for value starts somewhere before instruction |
| 840 // and extends until currently processed instruction, there is a use of value | 825 // and extends until currently processed instruction, there is a use of value |
| 841 // at the start of the instruction. | 826 // at the start of the instruction. |
| 842 // | 827 // |
| 843 | 828 |
| 844 Instruction* FlowGraphAllocator::ConnectOutgoingPhiMoves( | 829 Instruction* FlowGraphAllocator::ConnectOutgoingPhiMoves( |
| 845 BlockEntryInstr* block, BitVector* interfere_at_backedge) { | 830 BlockEntryInstr* block, |
| 831 BitVector* interfere_at_backedge) { |
| 846 Instruction* last = block->last_instruction(); | 832 Instruction* last = block->last_instruction(); |
| 847 | 833 |
| 848 GotoInstr* goto_instr = last->AsGoto(); | 834 GotoInstr* goto_instr = last->AsGoto(); |
| 849 if (goto_instr == NULL) return last; | 835 if (goto_instr == NULL) return last; |
| 850 | 836 |
| 851 // If we have a parallel move here then the successor block must be a | 837 // If we have a parallel move here then the successor block must be a |
| 852 // join with phis. The phi inputs contribute uses to each predecessor | 838 // join with phis. The phi inputs contribute uses to each predecessor |
| 853 // block (and the phi outputs contribute definitions in the successor | 839 // block (and the phi outputs contribute definitions in the successor |
| 854 // block). | 840 // block). |
| 855 if (!goto_instr->HasParallelMove()) return goto_instr->previous(); | 841 if (!goto_instr->HasParallelMove()) return goto_instr->previous(); |
| (...skipping 26 matching lines...) Expand all Loading... |
| 882 // | 868 // |
| 883 // g g' | 869 // g g' |
| 884 // value --* | 870 // value --* |
| 885 // | 871 // |
| 886 intptr_t vreg = val->definition()->ssa_temp_index(); | 872 intptr_t vreg = val->definition()->ssa_temp_index(); |
| 887 LiveRange* range = GetLiveRange(vreg); | 873 LiveRange* range = GetLiveRange(vreg); |
| 888 if (interfere_at_backedge != NULL) interfere_at_backedge->Add(vreg); | 874 if (interfere_at_backedge != NULL) interfere_at_backedge->Add(vreg); |
| 889 | 875 |
| 890 range->AddUseInterval(block->start_pos(), pos); | 876 range->AddUseInterval(block->start_pos(), pos); |
| 891 range->AddHintedUse( | 877 range->AddHintedUse( |
| 892 pos, | 878 pos, move->src_slot(), |
| 893 move->src_slot(), | |
| 894 GetLiveRange(phi->ssa_temp_index())->assigned_location_slot()); | 879 GetLiveRange(phi->ssa_temp_index())->assigned_location_slot()); |
| 895 move->set_src(Location::PrefersRegister()); | 880 move->set_src(Location::PrefersRegister()); |
| 896 | 881 |
| 897 if (val->definition()->HasPairRepresentation()) { | 882 if (val->definition()->HasPairRepresentation()) { |
| 898 move = parallel_move->MoveOperandsAt(move_index++); | 883 move = parallel_move->MoveOperandsAt(move_index++); |
| 899 vreg = ToSecondPairVreg(vreg); | 884 vreg = ToSecondPairVreg(vreg); |
| 900 range = GetLiveRange(vreg); | 885 range = GetLiveRange(vreg); |
| 901 if (interfere_at_backedge != NULL) { | 886 if (interfere_at_backedge != NULL) { |
| 902 interfere_at_backedge->Add(vreg); | 887 interfere_at_backedge->Add(vreg); |
| 903 } | 888 } |
| 904 range->AddUseInterval(block->start_pos(), pos); | 889 range->AddUseInterval(block->start_pos(), pos); |
| 905 range->AddHintedUse( | 890 range->AddHintedUse(pos, move->src_slot(), |
| 906 pos, | 891 GetLiveRange(ToSecondPairVreg(phi->ssa_temp_index())) |
| 907 move->src_slot(), | 892 ->assigned_location_slot()); |
| 908 GetLiveRange(ToSecondPairVreg( | |
| 909 phi->ssa_temp_index()))->assigned_location_slot()); | |
| 910 move->set_src(Location::PrefersRegister()); | 893 move->set_src(Location::PrefersRegister()); |
| 911 } | 894 } |
| 912 } | 895 } |
| 913 | 896 |
| 914 // Begin backward iteration with the instruction before the parallel | 897 // Begin backward iteration with the instruction before the parallel |
| 915 // move. | 898 // move. |
| 916 return goto_instr->previous(); | 899 return goto_instr->previous(); |
| 917 } | 900 } |
| 918 | 901 |
| 919 | 902 |
| (...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1037 PairLocation* location_pair = locations[i].AsPairLocation(); | 1020 PairLocation* location_pair = locations[i].AsPairLocation(); |
| 1038 { | 1021 { |
| 1039 // First live range. | 1022 // First live range. |
| 1040 LiveRange* range = GetLiveRange(def->ssa_temp_index()); | 1023 LiveRange* range = GetLiveRange(def->ssa_temp_index()); |
| 1041 range->AddUseInterval(block_start_pos, use_pos); | 1024 range->AddUseInterval(block_start_pos, use_pos); |
| 1042 range->AddUse(use_pos, location_pair->SlotAt(0)); | 1025 range->AddUse(use_pos, location_pair->SlotAt(0)); |
| 1043 } | 1026 } |
| 1044 { | 1027 { |
| 1045 // Second live range. | 1028 // Second live range. |
| 1046 LiveRange* range = | 1029 LiveRange* range = |
| 1047 GetLiveRange(ToSecondPairVreg(def->ssa_temp_index())); | 1030 GetLiveRange(ToSecondPairVreg(def->ssa_temp_index())); |
| 1048 range->AddUseInterval(block_start_pos, use_pos); | 1031 range->AddUseInterval(block_start_pos, use_pos); |
| 1049 range->AddUse(use_pos, location_pair->SlotAt(1)); | 1032 range->AddUse(use_pos, location_pair->SlotAt(1)); |
| 1050 } | 1033 } |
| 1051 } else { | 1034 } else { |
| 1052 LiveRange* range = GetLiveRange(def->ssa_temp_index()); | 1035 LiveRange* range = GetLiveRange(def->ssa_temp_index()); |
| 1053 range->AddUseInterval(block_start_pos, use_pos); | 1036 range->AddUseInterval(block_start_pos, use_pos); |
| 1054 range->AddUse(use_pos, &locations[i]); | 1037 range->AddUse(use_pos, &locations[i]); |
| 1055 } | 1038 } |
| 1056 } | 1039 } |
| 1057 | 1040 |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1096 } | 1079 } |
| 1097 { | 1080 { |
| 1098 // Second live range. | 1081 // Second live range. |
| 1099 LiveRange* range = | 1082 LiveRange* range = |
| 1100 GetLiveRange(ToSecondPairVreg(def->ssa_temp_index())); | 1083 GetLiveRange(ToSecondPairVreg(def->ssa_temp_index())); |
| 1101 range->AddUseInterval(block_start_pos, use_pos); | 1084 range->AddUseInterval(block_start_pos, use_pos); |
| 1102 range->AddUse(use_pos, location_pair->SlotAt(1)); | 1085 range->AddUse(use_pos, location_pair->SlotAt(1)); |
| 1103 } | 1086 } |
| 1104 } else if (def->IsMaterializeObject()) { | 1087 } else if (def->IsMaterializeObject()) { |
| 1105 locations[i] = Location::NoLocation(); | 1088 locations[i] = Location::NoLocation(); |
| 1106 ProcessMaterializationUses( | 1089 ProcessMaterializationUses(block, block_start_pos, use_pos, |
| 1107 block, block_start_pos, use_pos, def->AsMaterializeObject()); | 1090 def->AsMaterializeObject()); |
| 1108 } else { | 1091 } else { |
| 1109 locations[i] = Location::Any(); | 1092 locations[i] = Location::Any(); |
| 1110 LiveRange* range = GetLiveRange(def->ssa_temp_index()); | 1093 LiveRange* range = GetLiveRange(def->ssa_temp_index()); |
| 1111 range->AddUseInterval(block_start_pos, use_pos); | 1094 range->AddUseInterval(block_start_pos, use_pos); |
| 1112 range->AddUse(use_pos, &locations[i]); | 1095 range->AddUse(use_pos, &locations[i]); |
| 1113 } | 1096 } |
| 1114 } | 1097 } |
| 1115 } | 1098 } |
| 1116 | 1099 |
| 1117 | 1100 |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1130 // Input is expected in a fixed register. Expected shape of | 1113 // Input is expected in a fixed register. Expected shape of |
| 1131 // live ranges: | 1114 // live ranges: |
| 1132 // | 1115 // |
| 1133 // j' i i' | 1116 // j' i i' |
| 1134 // value --* | 1117 // value --* |
| 1135 // register [-----) | 1118 // register [-----) |
| 1136 // | 1119 // |
| 1137 if (live_registers != NULL) { | 1120 if (live_registers != NULL) { |
| 1138 live_registers->Add(*in_ref, range->representation()); | 1121 live_registers->Add(*in_ref, range->representation()); |
| 1139 } | 1122 } |
| 1140 MoveOperands* move = | 1123 MoveOperands* move = AddMoveAt(pos - 1, *in_ref, Location::Any()); |
| 1141 AddMoveAt(pos - 1, *in_ref, Location::Any()); | |
| 1142 BlockLocation(*in_ref, pos - 1, pos + 1); | 1124 BlockLocation(*in_ref, pos - 1, pos + 1); |
| 1143 range->AddUseInterval(block->start_pos(), pos - 1); | 1125 range->AddUseInterval(block->start_pos(), pos - 1); |
| 1144 range->AddHintedUse(pos - 1, move->src_slot(), in_ref); | 1126 range->AddHintedUse(pos - 1, move->src_slot(), in_ref); |
| 1145 } else if (in_ref->IsUnallocated()) { | 1127 } else if (in_ref->IsUnallocated()) { |
| 1146 if (in_ref->policy() == Location::kWritableRegister) { | 1128 if (in_ref->policy() == Location::kWritableRegister) { |
| 1147 // Writable unallocated input. Expected shape of | 1129 // Writable unallocated input. Expected shape of |
| 1148 // live ranges: | 1130 // live ranges: |
| 1149 // | 1131 // |
| 1150 // i i' | 1132 // i i' |
| 1151 // value --* | 1133 // value --* |
| 1152 // temp [--) | 1134 // temp [--) |
| 1153 MoveOperands* move = AddMoveAt(pos, | 1135 MoveOperands* move = AddMoveAt(pos, Location::RequiresRegister(), |
| 1154 Location::RequiresRegister(), | |
| 1155 Location::PrefersRegister()); | 1136 Location::PrefersRegister()); |
| 1156 | 1137 |
| 1157 // Add uses to the live range of the input. | 1138 // Add uses to the live range of the input. |
| 1158 range->AddUseInterval(block->start_pos(), pos); | 1139 range->AddUseInterval(block->start_pos(), pos); |
| 1159 range->AddUse(pos, move->src_slot()); | 1140 range->AddUse(pos, move->src_slot()); |
| 1160 | 1141 |
| 1161 // Create live range for the temporary. | 1142 // Create live range for the temporary. |
| 1162 LiveRange* temp = MakeLiveRangeForTemporary(); | 1143 LiveRange* temp = MakeLiveRangeForTemporary(); |
| 1163 temp->AddUseInterval(pos, pos + 1); | 1144 temp->AddUseInterval(pos, pos + 1); |
| 1164 temp->AddHintedUse(pos, in_ref, move->src_slot()); | 1145 temp->AddHintedUse(pos, in_ref, move->src_slot()); |
| (...skipping 24 matching lines...) Expand all Loading... |
| 1189 bool output_same_as_first_input, | 1170 bool output_same_as_first_input, |
| 1190 Location* in_ref, | 1171 Location* in_ref, |
| 1191 Definition* input, | 1172 Definition* input, |
| 1192 intptr_t input_vreg, | 1173 intptr_t input_vreg, |
| 1193 BitVector* interference_set) { | 1174 BitVector* interference_set) { |
| 1194 ASSERT(out != NULL); | 1175 ASSERT(out != NULL); |
| 1195 ASSERT(!out->IsPairLocation()); | 1176 ASSERT(!out->IsPairLocation()); |
| 1196 ASSERT(def != NULL); | 1177 ASSERT(def != NULL); |
| 1197 ASSERT(block != NULL); | 1178 ASSERT(block != NULL); |
| 1198 | 1179 |
| 1199 LiveRange* range = vreg >= 0 ? | 1180 LiveRange* range = |
| 1200 GetLiveRange(vreg) : MakeLiveRangeForTemporary(); | 1181 vreg >= 0 ? GetLiveRange(vreg) : MakeLiveRangeForTemporary(); |
| 1201 | 1182 |
| 1202 // Process output and finalize its liverange. | 1183 // Process output and finalize its liverange. |
| 1203 if (out->IsMachineRegister()) { | 1184 if (out->IsMachineRegister()) { |
| 1204 // Fixed output location. Expected shape of live range: | 1185 // Fixed output location. Expected shape of live range: |
| 1205 // | 1186 // |
| 1206 // i i' j j' | 1187 // i i' j j' |
| 1207 // register [--) | 1188 // register [--) |
| 1208 // output [------- | 1189 // output [------- |
| 1209 // | 1190 // |
| 1210 BlockLocation(*out, pos, pos + 1); | 1191 BlockLocation(*out, pos, pos + 1); |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1242 // start. Expected shape of live ranges: | 1223 // start. Expected shape of live ranges: |
| 1243 // | 1224 // |
| 1244 // i i' | 1225 // i i' |
| 1245 // input #0 --* | 1226 // input #0 --* |
| 1246 // output [---- | 1227 // output [---- |
| 1247 // | 1228 // |
| 1248 ASSERT(in_ref->Equals(Location::RequiresRegister()) || | 1229 ASSERT(in_ref->Equals(Location::RequiresRegister()) || |
| 1249 in_ref->Equals(Location::RequiresFpuRegister())); | 1230 in_ref->Equals(Location::RequiresFpuRegister())); |
| 1250 *out = *in_ref; | 1231 *out = *in_ref; |
| 1251 // Create move that will copy value between input and output. | 1232 // Create move that will copy value between input and output. |
| 1252 MoveOperands* move = AddMoveAt(pos, | 1233 MoveOperands* move = |
| 1253 Location::RequiresRegister(), | 1234 AddMoveAt(pos, Location::RequiresRegister(), Location::Any()); |
| 1254 Location::Any()); | |
| 1255 | 1235 |
| 1256 // Add uses to the live range of the input. | 1236 // Add uses to the live range of the input. |
| 1257 LiveRange* input_range = GetLiveRange(input_vreg); | 1237 LiveRange* input_range = GetLiveRange(input_vreg); |
| 1258 input_range->AddUseInterval(block->start_pos(), pos); | 1238 input_range->AddUseInterval(block->start_pos(), pos); |
| 1259 input_range->AddUse(pos, move->src_slot()); | 1239 input_range->AddUse(pos, move->src_slot()); |
| 1260 | 1240 |
| 1261 // Shorten output live range to the point of definition and add both input | 1241 // Shorten output live range to the point of definition and add both input |
| 1262 // and output uses slots to be filled by allocator. | 1242 // and output uses slots to be filled by allocator. |
| 1263 range->DefineAt(pos); | 1243 range->DefineAt(pos); |
| 1264 range->AddHintedUse(pos, out, move->src_slot()); | 1244 range->AddHintedUse(pos, out, move->src_slot()); |
| 1265 range->AddUse(pos, move->dest_slot()); | 1245 range->AddUse(pos, move->dest_slot()); |
| 1266 range->AddUse(pos, in_ref); | 1246 range->AddUse(pos, in_ref); |
| 1267 | 1247 |
| 1268 if ((interference_set != NULL) && | 1248 if ((interference_set != NULL) && (range->vreg() >= 0) && |
| 1269 (range->vreg() >= 0) && | |
| 1270 interference_set->Contains(range->vreg())) { | 1249 interference_set->Contains(range->vreg())) { |
| 1271 interference_set->Add(input->ssa_temp_index()); | 1250 interference_set->Add(input->ssa_temp_index()); |
| 1272 } | 1251 } |
| 1273 } else { | 1252 } else { |
| 1274 // Normal unallocated location that requires a register. Expected shape of | 1253 // Normal unallocated location that requires a register. Expected shape of |
| 1275 // live range: | 1254 // live range: |
| 1276 // | 1255 // |
| 1277 // i i' | 1256 // i i' |
| 1278 // output [------- | 1257 // output [------- |
| 1279 // | 1258 // |
| (...skipping 14 matching lines...) Expand all Loading... |
| 1294 // Create and update live ranges corresponding to instruction's inputs, | 1273 // Create and update live ranges corresponding to instruction's inputs, |
| 1295 // temporaries and output. | 1274 // temporaries and output. |
| 1296 void FlowGraphAllocator::ProcessOneInstruction(BlockEntryInstr* block, | 1275 void FlowGraphAllocator::ProcessOneInstruction(BlockEntryInstr* block, |
| 1297 Instruction* current, | 1276 Instruction* current, |
| 1298 BitVector* interference_set) { | 1277 BitVector* interference_set) { |
| 1299 LocationSummary* locs = current->locs(); | 1278 LocationSummary* locs = current->locs(); |
| 1300 | 1279 |
| 1301 Definition* def = current->AsDefinition(); | 1280 Definition* def = current->AsDefinition(); |
| 1302 if ((def != NULL) && (def->AsConstant() != NULL)) { | 1281 if ((def != NULL) && (def->AsConstant() != NULL)) { |
| 1303 ASSERT(!def->HasPairRepresentation()); | 1282 ASSERT(!def->HasPairRepresentation()); |
| 1304 LiveRange* range = (def->ssa_temp_index() != -1) ? | 1283 LiveRange* range = (def->ssa_temp_index() != -1) |
| 1305 GetLiveRange(def->ssa_temp_index()) : NULL; | 1284 ? GetLiveRange(def->ssa_temp_index()) |
| 1285 : NULL; |
| 1306 | 1286 |
| 1307 // Drop definitions of constants that have no uses. | 1287 // Drop definitions of constants that have no uses. |
| 1308 if ((range == NULL) || (range->first_use() == NULL)) { | 1288 if ((range == NULL) || (range->first_use() == NULL)) { |
| 1309 locs->set_out(0, Location::NoLocation()); | 1289 locs->set_out(0, Location::NoLocation()); |
| 1310 return; | 1290 return; |
| 1311 } | 1291 } |
| 1312 | 1292 |
| 1313 // If this constant has only unconstrained uses convert them all | 1293 // If this constant has only unconstrained uses convert them all |
| 1314 // to use the constant directly and drop this definition. | 1294 // to use the constant directly and drop this definition. |
| 1315 // TODO(vegorov): improve allocation when we have enough registers to keep | 1295 // TODO(vegorov): improve allocation when we have enough registers to keep |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1361 Location::RequiresRegister())); | 1341 Location::RequiresRegister())); |
| 1362 } | 1342 } |
| 1363 // Add uses from the deoptimization environment. | 1343 // Add uses from the deoptimization environment. |
| 1364 if (current->env() != NULL) ProcessEnvironmentUses(block, current); | 1344 if (current->env() != NULL) ProcessEnvironmentUses(block, current); |
| 1365 | 1345 |
| 1366 // Process inputs. | 1346 // Process inputs. |
| 1367 // Skip the first input if output is specified with kSameAsFirstInput policy, | 1347 // Skip the first input if output is specified with kSameAsFirstInput policy, |
| 1368 // they will be processed together at the very end. | 1348 // they will be processed together at the very end. |
| 1369 { | 1349 { |
| 1370 for (intptr_t j = output_same_as_first_input ? 1 : 0; | 1350 for (intptr_t j = output_same_as_first_input ? 1 : 0; |
| 1371 j < locs->input_count(); | 1351 j < locs->input_count(); j++) { |
| 1372 j++) { | |
| 1373 // Determine if we are dealing with a value pair, and if so, whether | 1352 // Determine if we are dealing with a value pair, and if so, whether |
| 1374 // the location is the first register or second register. | 1353 // the location is the first register or second register. |
| 1375 Value* input = current->InputAt(j); | 1354 Value* input = current->InputAt(j); |
| 1376 Location* in_ref = locs->in_slot(j); | 1355 Location* in_ref = locs->in_slot(j); |
| 1377 RegisterSet* live_registers = NULL; | 1356 RegisterSet* live_registers = NULL; |
| 1378 if (locs->HasCallOnSlowPath()) { | 1357 if (locs->HasCallOnSlowPath()) { |
| 1379 live_registers = locs->live_registers(); | 1358 live_registers = locs->live_registers(); |
| 1380 } | 1359 } |
| 1381 if (in_ref->IsPairLocation()) { | 1360 if (in_ref->IsPairLocation()) { |
| 1382 ASSERT(input->definition()->HasPairRepresentation()); | 1361 ASSERT(input->definition()->HasPairRepresentation()); |
| 1383 PairLocation* pair = in_ref->AsPairLocation(); | 1362 PairLocation* pair = in_ref->AsPairLocation(); |
| 1384 const intptr_t vreg = input->definition()->ssa_temp_index(); | 1363 const intptr_t vreg = input->definition()->ssa_temp_index(); |
| 1385 // Each element of the pair is assigned it's own virtual register number | 1364 // Each element of the pair is assigned it's own virtual register number |
| 1386 // and is allocated its own LiveRange. | 1365 // and is allocated its own LiveRange. |
| 1387 ProcessOneInput(block, pos, pair->SlotAt(0), | 1366 ProcessOneInput(block, pos, pair->SlotAt(0), input, vreg, |
| 1388 input, vreg, live_registers); | 1367 live_registers); |
| 1389 ProcessOneInput(block, pos, pair->SlotAt(1), input, | 1368 ProcessOneInput(block, pos, pair->SlotAt(1), input, |
| 1390 ToSecondPairVreg(vreg), live_registers); | 1369 ToSecondPairVreg(vreg), live_registers); |
| 1391 } else { | 1370 } else { |
| 1392 ProcessOneInput(block, pos, in_ref, input, | 1371 ProcessOneInput(block, pos, in_ref, input, |
| 1393 input->definition()->ssa_temp_index(), live_registers); | 1372 input->definition()->ssa_temp_index(), live_registers); |
| 1394 } | 1373 } |
| 1395 } | 1374 } |
| 1396 } | 1375 } |
| 1397 | 1376 |
| 1398 // Process temps. | 1377 // Process temps. |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1411 } else if (temp.IsUnallocated()) { | 1390 } else if (temp.IsUnallocated()) { |
| 1412 LiveRange* range = MakeLiveRangeForTemporary(); | 1391 LiveRange* range = MakeLiveRangeForTemporary(); |
| 1413 range->AddUseInterval(pos, pos + 1); | 1392 range->AddUseInterval(pos, pos + 1); |
| 1414 range->AddUse(pos, locs->temp_slot(j)); | 1393 range->AddUse(pos, locs->temp_slot(j)); |
| 1415 CompleteRange(range, RegisterKindFromPolicy(temp)); | 1394 CompleteRange(range, RegisterKindFromPolicy(temp)); |
| 1416 } else { | 1395 } else { |
| 1417 UNREACHABLE(); | 1396 UNREACHABLE(); |
| 1418 } | 1397 } |
| 1419 } | 1398 } |
| 1420 | 1399 |
| 1421 // Block all allocatable registers for calls. | 1400 // Block all allocatable registers for calls. |
| 1422 // Note that on DBC registers are always essentially spilled so | 1401 // Note that on DBC registers are always essentially spilled so |
| 1423 // we don't need to block anything. | 1402 // we don't need to block anything. |
| 1424 #if !defined(TARGET_ARCH_DBC) | 1403 #if !defined(TARGET_ARCH_DBC) |
| 1425 if (locs->always_calls()) { | 1404 if (locs->always_calls()) { |
| 1426 // Expected shape of live range: | 1405 // Expected shape of live range: |
| 1427 // | 1406 // |
| 1428 // i i' | 1407 // i i' |
| 1429 // [--) | 1408 // [--) |
| 1430 // | 1409 // |
| 1431 // The stack bitmap describes the position i. | 1410 // The stack bitmap describes the position i. |
| 1432 for (intptr_t reg = 0; reg < kNumberOfCpuRegisters; reg++) { | 1411 for (intptr_t reg = 0; reg < kNumberOfCpuRegisters; reg++) { |
| 1433 BlockLocation(Location::RegisterLocation(static_cast<Register>(reg)), | 1412 BlockLocation(Location::RegisterLocation(static_cast<Register>(reg)), pos, |
| 1434 pos, | |
| 1435 pos + 1); | 1413 pos + 1); |
| 1436 } | 1414 } |
| 1437 | 1415 |
| 1438 for (intptr_t reg = 0; reg < kNumberOfFpuRegisters; reg++) { | 1416 for (intptr_t reg = 0; reg < kNumberOfFpuRegisters; reg++) { |
| 1439 BlockLocation( | 1417 BlockLocation( |
| 1440 Location::FpuRegisterLocation(static_cast<FpuRegister>(reg)), | 1418 Location::FpuRegisterLocation(static_cast<FpuRegister>(reg)), pos, |
| 1441 pos, | |
| 1442 pos + 1); | 1419 pos + 1); |
| 1443 } | 1420 } |
| 1444 | 1421 |
| 1445 | 1422 |
| 1446 #if defined(DEBUG) | 1423 #if defined(DEBUG) |
| 1447 // Verify that temps, inputs and output were specified as fixed | 1424 // Verify that temps, inputs and output were specified as fixed |
| 1448 // locations. Every register is blocked now so attempt to | 1425 // locations. Every register is blocked now so attempt to |
| 1449 // allocate will not succeed. | 1426 // allocate will not succeed. |
| 1450 for (intptr_t j = 0; j < locs->temp_count(); j++) { | 1427 for (intptr_t j = 0; j < locs->temp_count(); j++) { |
| 1451 ASSERT(!locs->temp(j).IsPairLocation()); | 1428 ASSERT(!locs->temp(j).IsPairLocation()); |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1492 if (out->IsPairLocation()) { | 1469 if (out->IsPairLocation()) { |
| 1493 ASSERT(def->HasPairRepresentation()); | 1470 ASSERT(def->HasPairRepresentation()); |
| 1494 PairLocation* pair = out->AsPairLocation(); | 1471 PairLocation* pair = out->AsPairLocation(); |
| 1495 if (output_same_as_first_input) { | 1472 if (output_same_as_first_input) { |
| 1496 ASSERT(locs->in_slot(0)->IsPairLocation()); | 1473 ASSERT(locs->in_slot(0)->IsPairLocation()); |
| 1497 PairLocation* in_pair = locs->in_slot(0)->AsPairLocation(); | 1474 PairLocation* in_pair = locs->in_slot(0)->AsPairLocation(); |
| 1498 Definition* input = current->InputAt(0)->definition(); | 1475 Definition* input = current->InputAt(0)->definition(); |
| 1499 ASSERT(input->HasPairRepresentation()); | 1476 ASSERT(input->HasPairRepresentation()); |
| 1500 // Each element of the pair is assigned it's own virtual register number | 1477 // Each element of the pair is assigned it's own virtual register number |
| 1501 // and is allocated its own LiveRange. | 1478 // and is allocated its own LiveRange. |
| 1502 ProcessOneOutput(block, pos, // BlockEntry, seq. | 1479 ProcessOneOutput(block, pos, // BlockEntry, seq. |
| 1503 pair->SlotAt(0), def, // (output) Location, Definition. | 1480 pair->SlotAt(0), def, // (output) Location, Definition. |
| 1504 def->ssa_temp_index(), // (output) virtual register. | 1481 def->ssa_temp_index(), // (output) virtual register. |
| 1505 true, // output mapped to first input. | 1482 true, // output mapped to first input. |
| 1506 in_pair->SlotAt(0), input, // (input) Location, Def. | 1483 in_pair->SlotAt(0), input, // (input) Location, Def. |
| 1507 input->ssa_temp_index(), // (input) virtual register. | 1484 input->ssa_temp_index(), // (input) virtual register. |
| 1508 interference_set); | 1485 interference_set); |
| 1509 ProcessOneOutput(block, pos, | 1486 ProcessOneOutput( |
| 1510 pair->SlotAt(1), def, | 1487 block, pos, pair->SlotAt(1), def, |
| 1511 ToSecondPairVreg(def->ssa_temp_index()), | 1488 ToSecondPairVreg(def->ssa_temp_index()), true, in_pair->SlotAt(1), |
| 1512 true, | 1489 input, ToSecondPairVreg(input->ssa_temp_index()), interference_set); |
| 1513 in_pair->SlotAt(1), input, | |
| 1514 ToSecondPairVreg(input->ssa_temp_index()), | |
| 1515 interference_set); | |
| 1516 } else { | 1490 } else { |
| 1517 // Each element of the pair is assigned it's own virtual register number | 1491 // Each element of the pair is assigned it's own virtual register number |
| 1518 // and is allocated its own LiveRange. | 1492 // and is allocated its own LiveRange. |
| 1519 ProcessOneOutput(block, pos, | 1493 ProcessOneOutput(block, pos, pair->SlotAt(0), def, def->ssa_temp_index(), |
| 1520 pair->SlotAt(0), def, | 1494 false, // output is not mapped to first input. |
| 1521 def->ssa_temp_index(), | 1495 NULL, NULL, -1, // First input not needed. |
| 1522 false, // output is not mapped to first input. | |
| 1523 NULL, NULL, -1, // First input not needed. | |
| 1524 interference_set); | 1496 interference_set); |
| 1525 ProcessOneOutput(block, pos, | 1497 ProcessOneOutput(block, pos, pair->SlotAt(1), def, |
| 1526 pair->SlotAt(1), def, | 1498 ToSecondPairVreg(def->ssa_temp_index()), false, NULL, |
| 1527 ToSecondPairVreg(def->ssa_temp_index()), | 1499 NULL, -1, interference_set); |
| 1528 false, | |
| 1529 NULL, NULL, -1, | |
| 1530 interference_set); | |
| 1531 } | 1500 } |
| 1532 } else { | 1501 } else { |
| 1533 if (output_same_as_first_input) { | 1502 if (output_same_as_first_input) { |
| 1534 Location* in_ref = locs->in_slot(0); | 1503 Location* in_ref = locs->in_slot(0); |
| 1535 Definition* input = current->InputAt(0)->definition(); | 1504 Definition* input = current->InputAt(0)->definition(); |
| 1536 ASSERT(!in_ref->IsPairLocation()); | 1505 ASSERT(!in_ref->IsPairLocation()); |
| 1537 ProcessOneOutput(block, pos, // BlockEntry, Instruction, seq. | 1506 ProcessOneOutput(block, pos, // BlockEntry, Instruction, seq. |
| 1538 out, def, // (output) Location, Definition. | 1507 out, def, // (output) Location, Definition. |
| 1539 def->ssa_temp_index(), // (output) virtual register. | 1508 def->ssa_temp_index(), // (output) virtual register. |
| 1540 true, // output mapped to first input. | 1509 true, // output mapped to first input. |
| 1541 in_ref, input, // (input) Location, Def. | 1510 in_ref, input, // (input) Location, Def. |
| 1542 input->ssa_temp_index(), // (input) virtual register. | 1511 input->ssa_temp_index(), // (input) virtual register. |
| 1543 interference_set); | 1512 interference_set); |
| 1544 } else { | 1513 } else { |
| 1545 ProcessOneOutput(block, pos, | 1514 ProcessOneOutput(block, pos, out, def, def->ssa_temp_index(), |
| 1546 out, def, | 1515 false, // output is not mapped to first input. |
| 1547 def->ssa_temp_index(), | 1516 NULL, NULL, -1, // First input not needed. |
| 1548 false, // output is not mapped to first input. | |
| 1549 NULL, NULL, -1, // First input not needed. | |
| 1550 interference_set); | 1517 interference_set); |
| 1551 } | 1518 } |
| 1552 } | 1519 } |
| 1553 } | 1520 } |
| 1554 | 1521 |
| 1555 | 1522 |
| 1556 static ParallelMoveInstr* CreateParallelMoveBefore(Instruction* instr, | 1523 static ParallelMoveInstr* CreateParallelMoveBefore(Instruction* instr, |
| 1557 intptr_t pos) { | 1524 intptr_t pos) { |
| 1558 ASSERT(pos > 0); | 1525 ASSERT(pos > 0); |
| 1559 Instruction* prev = instr->previous(); | 1526 Instruction* prev = instr->previous(); |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1660 GotoInstr* goto_instr = block->last_instruction()->AsGoto(); | 1627 GotoInstr* goto_instr = block->last_instruction()->AsGoto(); |
| 1661 if (goto_instr != NULL) { | 1628 if (goto_instr != NULL) { |
| 1662 JoinEntryInstr* successor = goto_instr->successor(); | 1629 JoinEntryInstr* successor = goto_instr->successor(); |
| 1663 if (successor->postorder_number() > i) { | 1630 if (successor->postorder_number() > i) { |
| 1664 // This is back-edge. | 1631 // This is back-edge. |
| 1665 BlockInfo* successor_info = BlockInfoAt(successor->lifetime_position()); | 1632 BlockInfo* successor_info = BlockInfoAt(successor->lifetime_position()); |
| 1666 ASSERT(successor_info->entry() == successor); | 1633 ASSERT(successor_info->entry() == successor); |
| 1667 if (!successor_info->is_loop_header() && | 1634 if (!successor_info->is_loop_header() && |
| 1668 ((current_loop == NULL) || | 1635 ((current_loop == NULL) || |
| 1669 (current_loop->entry()->postorder_number() > | 1636 (current_loop->entry()->postorder_number() > |
| 1670 successor_info->entry()->postorder_number()))) { | 1637 successor_info->entry()->postorder_number()))) { |
| 1671 ASSERT(successor_info != current_loop); | 1638 ASSERT(successor_info != current_loop); |
| 1672 | 1639 |
| 1673 successor_info->mark_loop_header(); | 1640 successor_info->mark_loop_header(); |
| 1674 successor_info->set_loop_id(loop_id++); | 1641 successor_info->set_loop_id(loop_id++); |
| 1675 successor_info->set_last_block(block); | 1642 successor_info->set_last_block(block); |
| 1676 // For loop header loop information points to the outer loop. | 1643 // For loop header loop information points to the outer loop. |
| 1677 successor_info->set_loop(current_loop); | 1644 successor_info->set_loop(current_loop); |
| 1678 current_loop = successor_info; | 1645 current_loop = successor_info; |
| 1679 } | 1646 } |
| 1680 } | 1647 } |
| (...skipping 30 matching lines...) Expand all Loading... |
| 1711 void AllocationFinger::Initialize(LiveRange* range) { | 1678 void AllocationFinger::Initialize(LiveRange* range) { |
| 1712 first_pending_use_interval_ = range->first_use_interval(); | 1679 first_pending_use_interval_ = range->first_use_interval(); |
| 1713 first_register_use_ = range->first_use(); | 1680 first_register_use_ = range->first_use(); |
| 1714 first_register_beneficial_use_ = range->first_use(); | 1681 first_register_beneficial_use_ = range->first_use(); |
| 1715 first_hinted_use_ = range->first_use(); | 1682 first_hinted_use_ = range->first_use(); |
| 1716 } | 1683 } |
| 1717 | 1684 |
| 1718 | 1685 |
| 1719 bool AllocationFinger::Advance(const intptr_t start) { | 1686 bool AllocationFinger::Advance(const intptr_t start) { |
| 1720 UseInterval* a = first_pending_use_interval_; | 1687 UseInterval* a = first_pending_use_interval_; |
| 1721 while (a != NULL && a->end() <= start) a = a->next(); | 1688 while (a != NULL && a->end() <= start) |
| 1689 a = a->next(); |
| 1722 first_pending_use_interval_ = a; | 1690 first_pending_use_interval_ = a; |
| 1723 return (first_pending_use_interval_ == NULL); | 1691 return (first_pending_use_interval_ == NULL); |
| 1724 } | 1692 } |
| 1725 | 1693 |
| 1726 | 1694 |
| 1727 Location AllocationFinger::FirstHint() { | 1695 Location AllocationFinger::FirstHint() { |
| 1728 UsePosition* use = first_hinted_use_; | 1696 UsePosition* use = first_hinted_use_; |
| 1729 | 1697 |
| 1730 while (use != NULL) { | 1698 while (use != NULL) { |
| 1731 if (use->HasHint()) return use->hint(); | 1699 if (use->HasHint()) return use->hint(); |
| 1732 use = use->next(); | 1700 use = use->next(); |
| 1733 } | 1701 } |
| 1734 | 1702 |
| 1735 return Location::NoLocation(); | 1703 return Location::NoLocation(); |
| 1736 } | 1704 } |
| 1737 | 1705 |
| 1738 | 1706 |
| 1739 static UsePosition* FirstUseAfter(UsePosition* use, intptr_t after) { | 1707 static UsePosition* FirstUseAfter(UsePosition* use, intptr_t after) { |
| 1740 while ((use != NULL) && (use->pos() < after)) { | 1708 while ((use != NULL) && (use->pos() < after)) { |
| 1741 use = use->next(); | 1709 use = use->next(); |
| 1742 } | 1710 } |
| 1743 return use; | 1711 return use; |
| 1744 } | 1712 } |
| 1745 | 1713 |
| 1746 | 1714 |
| 1747 UsePosition* AllocationFinger::FirstRegisterUse(intptr_t after) { | 1715 UsePosition* AllocationFinger::FirstRegisterUse(intptr_t after) { |
| 1748 for (UsePosition* use = FirstUseAfter(first_register_use_, after); | 1716 for (UsePosition* use = FirstUseAfter(first_register_use_, after); |
| 1749 use != NULL; | 1717 use != NULL; use = use->next()) { |
| 1750 use = use->next()) { | |
| 1751 Location* loc = use->location_slot(); | 1718 Location* loc = use->location_slot(); |
| 1752 if (loc->IsUnallocated() && | 1719 if (loc->IsUnallocated() && |
| 1753 ((loc->policy() == Location::kRequiresRegister) || | 1720 ((loc->policy() == Location::kRequiresRegister) || |
| 1754 (loc->policy() == Location::kRequiresFpuRegister))) { | 1721 (loc->policy() == Location::kRequiresFpuRegister))) { |
| 1755 first_register_use_ = use; | 1722 first_register_use_ = use; |
| 1756 return use; | 1723 return use; |
| 1757 } | 1724 } |
| 1758 } | 1725 } |
| 1759 return NULL; | 1726 return NULL; |
| 1760 } | 1727 } |
| 1761 | 1728 |
| 1762 | 1729 |
| 1763 UsePosition* AllocationFinger::FirstRegisterBeneficialUse(intptr_t after) { | 1730 UsePosition* AllocationFinger::FirstRegisterBeneficialUse(intptr_t after) { |
| 1764 for (UsePosition* use = FirstUseAfter(first_register_beneficial_use_, after); | 1731 for (UsePosition* use = FirstUseAfter(first_register_beneficial_use_, after); |
| 1765 use != NULL; | 1732 use != NULL; use = use->next()) { |
| 1766 use = use->next()) { | |
| 1767 Location* loc = use->location_slot(); | 1733 Location* loc = use->location_slot(); |
| 1768 if (loc->IsUnallocated() && loc->IsRegisterBeneficial()) { | 1734 if (loc->IsUnallocated() && loc->IsRegisterBeneficial()) { |
| 1769 first_register_beneficial_use_ = use; | 1735 first_register_beneficial_use_ = use; |
| 1770 return use; | 1736 return use; |
| 1771 } | 1737 } |
| 1772 } | 1738 } |
| 1773 return NULL; | 1739 return NULL; |
| 1774 } | 1740 } |
| 1775 | 1741 |
| 1776 | 1742 |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1816 a = a->next(); | 1782 a = a->next(); |
| 1817 } else { | 1783 } else { |
| 1818 u = u->next(); | 1784 u = u->next(); |
| 1819 } | 1785 } |
| 1820 } | 1786 } |
| 1821 | 1787 |
| 1822 return kMaxPosition; | 1788 return kMaxPosition; |
| 1823 } | 1789 } |
| 1824 | 1790 |
| 1825 | 1791 |
| 1826 template<typename PositionType> | 1792 template <typename PositionType> |
| 1827 PositionType* SplitListOfPositions(PositionType** head, | 1793 PositionType* SplitListOfPositions(PositionType** head, |
| 1828 intptr_t split_pos, | 1794 intptr_t split_pos, |
| 1829 bool split_at_start) { | 1795 bool split_at_start) { |
| 1830 PositionType* last_before_split = NULL; | 1796 PositionType* last_before_split = NULL; |
| 1831 PositionType* pos = *head; | 1797 PositionType* pos = *head; |
| 1832 if (split_at_start) { | 1798 if (split_at_start) { |
| 1833 while ((pos != NULL) && (pos->pos() < split_pos)) { | 1799 while ((pos != NULL) && (pos->pos() < split_pos)) { |
| 1834 last_before_split = pos; | 1800 last_before_split = pos; |
| 1835 pos = pos->next(); | 1801 pos = pos->next(); |
| 1836 } | 1802 } |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1869 UseInterval* last_before_split = NULL; | 1835 UseInterval* last_before_split = NULL; |
| 1870 while (interval->end() <= split_pos) { | 1836 while (interval->end() <= split_pos) { |
| 1871 last_before_split = interval; | 1837 last_before_split = interval; |
| 1872 interval = interval->next(); | 1838 interval = interval->next(); |
| 1873 } | 1839 } |
| 1874 | 1840 |
| 1875 const bool split_at_start = (interval->start() == split_pos); | 1841 const bool split_at_start = (interval->start() == split_pos); |
| 1876 | 1842 |
| 1877 UseInterval* first_after_split = interval; | 1843 UseInterval* first_after_split = interval; |
| 1878 if (!split_at_start && interval->Contains(split_pos)) { | 1844 if (!split_at_start && interval->Contains(split_pos)) { |
| 1879 first_after_split = new UseInterval(split_pos, | 1845 first_after_split = |
| 1880 interval->end(), | 1846 new UseInterval(split_pos, interval->end(), interval->next()); |
| 1881 interval->next()); | |
| 1882 interval->end_ = split_pos; | 1847 interval->end_ = split_pos; |
| 1883 interval->next_ = first_after_split; | 1848 interval->next_ = first_after_split; |
| 1884 last_before_split = interval; | 1849 last_before_split = interval; |
| 1885 } | 1850 } |
| 1886 | 1851 |
| 1887 ASSERT(last_before_split != NULL); | 1852 ASSERT(last_before_split != NULL); |
| 1888 ASSERT(last_before_split->next() == first_after_split); | 1853 ASSERT(last_before_split->next() == first_after_split); |
| 1889 ASSERT(last_before_split->end() <= split_pos); | 1854 ASSERT(last_before_split->end() <= split_pos); |
| 1890 ASSERT(split_pos <= first_after_split->start()); | 1855 ASSERT(split_pos <= first_after_split->start()); |
| 1891 | 1856 |
| 1892 UsePosition* first_use_after_split = | 1857 UsePosition* first_use_after_split = |
| 1893 SplitListOfPositions(&uses_, split_pos, split_at_start); | 1858 SplitListOfPositions(&uses_, split_pos, split_at_start); |
| 1894 | 1859 |
| 1895 SafepointPosition* first_safepoint_after_split = | 1860 SafepointPosition* first_safepoint_after_split = |
| 1896 SplitListOfPositions(&first_safepoint_, split_pos, split_at_start); | 1861 SplitListOfPositions(&first_safepoint_, split_pos, split_at_start); |
| 1897 | 1862 |
| 1898 UseInterval* last_use_interval = (last_before_split == last_use_interval_) ? | 1863 UseInterval* last_use_interval = (last_before_split == last_use_interval_) |
| 1899 first_after_split : last_use_interval_; | 1864 ? first_after_split |
| 1900 next_sibling_ = new LiveRange(vreg(), | 1865 : last_use_interval_; |
| 1901 representation(), | 1866 next_sibling_ = new LiveRange(vreg(), representation(), first_use_after_split, |
| 1902 first_use_after_split, | 1867 first_after_split, last_use_interval, |
| 1903 first_after_split, | 1868 first_safepoint_after_split, next_sibling_); |
| 1904 last_use_interval, | |
| 1905 first_safepoint_after_split, | |
| 1906 next_sibling_); | |
| 1907 | 1869 |
| 1908 TRACE_ALLOC(THR_Print(" split sibling [%" Pd ", %" Pd ")\n", | 1870 TRACE_ALLOC(THR_Print(" split sibling [%" Pd ", %" Pd ")\n", |
| 1909 next_sibling_->Start(), next_sibling_->End())); | 1871 next_sibling_->Start(), next_sibling_->End())); |
| 1910 | 1872 |
| 1911 last_use_interval_ = last_before_split; | 1873 last_use_interval_ = last_before_split; |
| 1912 last_use_interval_->next_ = NULL; | 1874 last_use_interval_->next_ = NULL; |
| 1913 | 1875 |
| 1914 if (first_use_after_split != NULL) { | 1876 if (first_use_after_split != NULL) { |
| 1915 finger_.UpdateAfterSplit(first_use_after_split->pos()); | 1877 finger_.UpdateAfterSplit(first_use_after_split->pos()); |
| 1916 } | 1878 } |
| 1917 | 1879 |
| 1918 return next_sibling_; | 1880 return next_sibling_; |
| 1919 } | 1881 } |
| 1920 | 1882 |
| 1921 | 1883 |
| 1922 LiveRange* FlowGraphAllocator::SplitBetween(LiveRange* range, | 1884 LiveRange* FlowGraphAllocator::SplitBetween(LiveRange* range, |
| 1923 intptr_t from, | 1885 intptr_t from, |
| 1924 intptr_t to) { | 1886 intptr_t to) { |
| 1925 TRACE_ALLOC(THR_Print("split v%" Pd " [%" Pd ", %" Pd | 1887 TRACE_ALLOC(THR_Print("split v%" Pd " [%" Pd ", %" Pd ") between [%" Pd |
| 1926 ") between [%" Pd ", %" Pd ")\n", | 1888 ", %" Pd ")\n", |
| 1927 range->vreg(), range->Start(), range->End(), from, to)); | 1889 range->vreg(), range->Start(), range->End(), from, to)); |
| 1928 | 1890 |
| 1929 intptr_t split_pos = kIllegalPosition; | 1891 intptr_t split_pos = kIllegalPosition; |
| 1930 | 1892 |
| 1931 BlockInfo* split_block = BlockInfoAt(to); | 1893 BlockInfo* split_block = BlockInfoAt(to); |
| 1932 if (from < split_block->entry()->lifetime_position()) { | 1894 if (from < split_block->entry()->lifetime_position()) { |
| 1933 // Interval [from, to) spans multiple blocks. | 1895 // Interval [from, to) spans multiple blocks. |
| 1934 | 1896 |
| 1935 // If last block is inside a loop prefer splitting at outermost loop's | 1897 // If last block is inside a loop prefer splitting at outermost loop's |
| 1936 // header. | 1898 // header. |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1987 // be moved outside. | 1949 // be moved outside. |
| 1988 BlockInfo* block_info = BlockInfoAt(from); | 1950 BlockInfo* block_info = BlockInfoAt(from); |
| 1989 if (block_info->is_loop_header() || (block_info->loop() != NULL)) { | 1951 if (block_info->is_loop_header() || (block_info->loop() != NULL)) { |
| 1990 BlockInfo* loop_header = | 1952 BlockInfo* loop_header = |
| 1991 block_info->is_loop_header() ? block_info : block_info->loop(); | 1953 block_info->is_loop_header() ? block_info : block_info->loop(); |
| 1992 | 1954 |
| 1993 if ((range->Start() <= loop_header->entry()->start_pos()) && | 1955 if ((range->Start() <= loop_header->entry()->start_pos()) && |
| 1994 RangeHasOnlyUnconstrainedUsesInLoop(range, loop_header->loop_id())) { | 1956 RangeHasOnlyUnconstrainedUsesInLoop(range, loop_header->loop_id())) { |
| 1995 ASSERT(loop_header->entry()->start_pos() <= from); | 1957 ASSERT(loop_header->entry()->start_pos() <= from); |
| 1996 from = loop_header->entry()->start_pos(); | 1958 from = loop_header->entry()->start_pos(); |
| 1997 TRACE_ALLOC(THR_Print(" moved spill position to loop header %" Pd "\n", | 1959 TRACE_ALLOC( |
| 1998 from)); | 1960 THR_Print(" moved spill position to loop header %" Pd "\n", from)); |
| 1999 } | 1961 } |
| 2000 } | 1962 } |
| 2001 | 1963 |
| 2002 LiveRange* tail = range->SplitAt(from); | 1964 LiveRange* tail = range->SplitAt(from); |
| 2003 Spill(tail); | 1965 Spill(tail); |
| 2004 } | 1966 } |
| 2005 | 1967 |
| 2006 | 1968 |
| 2007 void FlowGraphAllocator::AllocateSpillSlotFor(LiveRange* range) { | 1969 void FlowGraphAllocator::AllocateSpillSlotFor(LiveRange* range) { |
| 2008 #if defined(TARGET_ARCH_DBC) | 1970 #if defined(TARGET_ARCH_DBC) |
| (...skipping 15 matching lines...) Expand all Loading... |
| 2024 const intptr_t start = range->Start(); | 1986 const intptr_t start = range->Start(); |
| 2025 const intptr_t end = last_sibling->End(); | 1987 const intptr_t end = last_sibling->End(); |
| 2026 | 1988 |
| 2027 // During fpu register allocation spill slot indices are computed in terms of | 1989 // During fpu register allocation spill slot indices are computed in terms of |
| 2028 // double (64bit) stack slots. We treat quad stack slot (128bit) as a | 1990 // double (64bit) stack slots. We treat quad stack slot (128bit) as a |
| 2029 // consecutive pair of two double spill slots. | 1991 // consecutive pair of two double spill slots. |
| 2030 // Special care is taken to never allocate the same index to both | 1992 // Special care is taken to never allocate the same index to both |
| 2031 // double and quad spill slots as it complicates disambiguation during | 1993 // double and quad spill slots as it complicates disambiguation during |
| 2032 // parallel move resolution. | 1994 // parallel move resolution. |
| 2033 const bool need_quad = (register_kind_ == Location::kFpuRegister) && | 1995 const bool need_quad = (register_kind_ == Location::kFpuRegister) && |
| 2034 ((range->representation() == kUnboxedFloat32x4) || | 1996 ((range->representation() == kUnboxedFloat32x4) || |
| 2035 (range->representation() == kUnboxedInt32x4) || | 1997 (range->representation() == kUnboxedInt32x4) || |
| 2036 (range->representation() == kUnboxedFloat64x2)); | 1998 (range->representation() == kUnboxedFloat64x2)); |
| 2037 const bool need_untagged = (register_kind_ == Location::kRegister) && | 1999 const bool need_untagged = (register_kind_ == Location::kRegister) && |
| 2038 ((range->representation() == kUntagged)); | 2000 ((range->representation() == kUntagged)); |
| 2039 | 2001 |
| 2040 // Search for a free spill slot among allocated: the value in it should be | 2002 // Search for a free spill slot among allocated: the value in it should be |
| 2041 // dead and its type should match (e.g. it should not be a part of the quad if | 2003 // dead and its type should match (e.g. it should not be a part of the quad if |
| 2042 // we are allocating normal double slot). | 2004 // we are allocating normal double slot). |
| 2043 // For CPU registers we need to take reserved slots for try-catch into | 2005 // For CPU registers we need to take reserved slots for try-catch into |
| 2044 // account. | 2006 // account. |
| 2045 intptr_t idx = register_kind_ == Location::kRegister | 2007 intptr_t idx = register_kind_ == Location::kRegister |
| 2046 ? flow_graph_.graph_entry()->fixed_slot_count() | 2008 ? flow_graph_.graph_entry()->fixed_slot_count() |
| 2047 : 0; | 2009 : 0; |
| 2048 for (; idx < spill_slots_.length(); idx++) { | 2010 for (; idx < spill_slots_.length(); idx++) { |
| 2049 if ((need_quad == quad_spill_slots_[idx]) && | 2011 if ((need_quad == quad_spill_slots_[idx]) && |
| 2050 (need_untagged == untagged_spill_slots_[idx]) && | 2012 (need_untagged == untagged_spill_slots_[idx]) && |
| 2051 (spill_slots_[idx] <= start)) { | 2013 (spill_slots_[idx] <= start)) { |
| 2052 break; | 2014 break; |
| 2053 } | 2015 } |
| 2054 } | 2016 } |
| 2055 | 2017 |
| 2056 if (idx == spill_slots_.length()) { | 2018 if (idx == spill_slots_.length()) { |
| 2057 // No free spill slot found. Allocate a new one. | 2019 // No free spill slot found. Allocate a new one. |
| (...skipping 17 matching lines...) Expand all Loading... |
| 2075 ASSERT(!quad_spill_slots_[idx]); | 2037 ASSERT(!quad_spill_slots_[idx]); |
| 2076 } | 2038 } |
| 2077 | 2039 |
| 2078 // Assign spill slot to the range. | 2040 // Assign spill slot to the range. |
| 2079 if (register_kind_ == Location::kRegister) { | 2041 if (register_kind_ == Location::kRegister) { |
| 2080 range->set_spill_slot(Location::StackSlot(idx)); | 2042 range->set_spill_slot(Location::StackSlot(idx)); |
| 2081 } else { | 2043 } else { |
| 2082 // We use the index of the slot with the lowest address as an index for the | 2044 // We use the index of the slot with the lowest address as an index for the |
| 2083 // FPU register spill slot. In terms of indexes this relation is inverted: | 2045 // FPU register spill slot. In terms of indexes this relation is inverted: |
| 2084 // so we have to take the highest index. | 2046 // so we have to take the highest index. |
| 2085 const intptr_t slot_idx = cpu_spill_slot_count_ + | 2047 const intptr_t slot_idx = cpu_spill_slot_count_ + idx * kDoubleSpillFactor + |
| 2086 idx * kDoubleSpillFactor + (kDoubleSpillFactor - 1); | 2048 (kDoubleSpillFactor - 1); |
| 2087 | 2049 |
| 2088 Location location; | 2050 Location location; |
| 2089 if ((range->representation() == kUnboxedFloat32x4) || | 2051 if ((range->representation() == kUnboxedFloat32x4) || |
| 2090 (range->representation() == kUnboxedInt32x4) || | 2052 (range->representation() == kUnboxedInt32x4) || |
| 2091 (range->representation() == kUnboxedFloat64x2)) { | 2053 (range->representation() == kUnboxedFloat64x2)) { |
| 2092 ASSERT(need_quad); | 2054 ASSERT(need_quad); |
| 2093 location = Location::QuadStackSlot(slot_idx); | 2055 location = Location::QuadStackSlot(slot_idx); |
| 2094 } else { | 2056 } else { |
| 2095 ASSERT((range->representation() == kUnboxedDouble)); | 2057 ASSERT((range->representation() == kUnboxedDouble)); |
| 2096 location = Location::DoubleStackSlot(slot_idx); | 2058 location = Location::DoubleStackSlot(slot_idx); |
| 2097 } | 2059 } |
| 2098 range->set_spill_slot(location); | 2060 range->set_spill_slot(location); |
| 2099 } | 2061 } |
| 2100 | 2062 |
| 2101 spilled_.Add(range); | 2063 spilled_.Add(range); |
| 2102 } | 2064 } |
| 2103 | 2065 |
| 2104 | 2066 |
| 2105 void FlowGraphAllocator::MarkAsObjectAtSafepoints(LiveRange* range) { | 2067 void FlowGraphAllocator::MarkAsObjectAtSafepoints(LiveRange* range) { |
| 2106 intptr_t stack_index = range->spill_slot().stack_index(); | 2068 intptr_t stack_index = range->spill_slot().stack_index(); |
| 2107 ASSERT(stack_index >= 0); | 2069 ASSERT(stack_index >= 0); |
| 2108 | 2070 |
| 2109 while (range != NULL) { | 2071 while (range != NULL) { |
| 2110 for (SafepointPosition* safepoint = range->first_safepoint(); | 2072 for (SafepointPosition* safepoint = range->first_safepoint(); |
| 2111 safepoint != NULL; | 2073 safepoint != NULL; safepoint = safepoint->next()) { |
| 2112 safepoint = safepoint->next()) { | |
| 2113 // Mark the stack slot as having an object. | 2074 // Mark the stack slot as having an object. |
| 2114 safepoint->locs()->SetStackBit(stack_index); | 2075 safepoint->locs()->SetStackBit(stack_index); |
| 2115 } | 2076 } |
| 2116 range = range->next_sibling(); | 2077 range = range->next_sibling(); |
| 2117 } | 2078 } |
| 2118 } | 2079 } |
| 2119 | 2080 |
| 2120 | 2081 |
| 2121 void FlowGraphAllocator::Spill(LiveRange* range) { | 2082 void FlowGraphAllocator::Spill(LiveRange* range) { |
| 2122 LiveRange* parent = GetLiveRange(range->vreg()); | 2083 LiveRange* parent = GetLiveRange(range->vreg()); |
| 2123 if (parent->spill_slot().IsInvalid()) { | 2084 if (parent->spill_slot().IsInvalid()) { |
| 2124 AllocateSpillSlotFor(parent); | 2085 AllocateSpillSlotFor(parent); |
| 2125 if (range->representation() == kTagged) { | 2086 if (range->representation() == kTagged) { |
| 2126 MarkAsObjectAtSafepoints(parent); | 2087 MarkAsObjectAtSafepoints(parent); |
| 2127 } | 2088 } |
| 2128 } | 2089 } |
| 2129 range->set_assigned_location(parent->spill_slot()); | 2090 range->set_assigned_location(parent->spill_slot()); |
| 2130 ConvertAllUses(range); | 2091 ConvertAllUses(range); |
| 2131 } | 2092 } |
| 2132 | 2093 |
| 2133 | 2094 |
| 2134 intptr_t FlowGraphAllocator::FirstIntersectionWithAllocated( | 2095 intptr_t FlowGraphAllocator::FirstIntersectionWithAllocated( |
| 2135 intptr_t reg, LiveRange* unallocated) { | 2096 intptr_t reg, |
| 2097 LiveRange* unallocated) { |
| 2136 intptr_t intersection = kMaxPosition; | 2098 intptr_t intersection = kMaxPosition; |
| 2137 for (intptr_t i = 0; i < registers_[reg]->length(); i++) { | 2099 for (intptr_t i = 0; i < registers_[reg]->length(); i++) { |
| 2138 LiveRange* allocated = (*registers_[reg])[i]; | 2100 LiveRange* allocated = (*registers_[reg])[i]; |
| 2139 if (allocated == NULL) continue; | 2101 if (allocated == NULL) continue; |
| 2140 | 2102 |
| 2141 UseInterval* allocated_head = | 2103 UseInterval* allocated_head = |
| 2142 allocated->finger()->first_pending_use_interval(); | 2104 allocated->finger()->first_pending_use_interval(); |
| 2143 if (allocated_head->start() >= intersection) continue; | 2105 if (allocated_head->start() >= intersection) continue; |
| 2144 | 2106 |
| 2145 const intptr_t pos = FirstIntersection( | 2107 const intptr_t pos = FirstIntersection( |
| 2146 unallocated->finger()->first_pending_use_interval(), | 2108 unallocated->finger()->first_pending_use_interval(), allocated_head); |
| 2147 allocated_head); | |
| 2148 if (pos < intersection) intersection = pos; | 2109 if (pos < intersection) intersection = pos; |
| 2149 } | 2110 } |
| 2150 return intersection; | 2111 return intersection; |
| 2151 } | 2112 } |
| 2152 | 2113 |
| 2153 | 2114 |
| 2154 void ReachingDefs::AddPhi(PhiInstr* phi) { | 2115 void ReachingDefs::AddPhi(PhiInstr* phi) { |
| 2155 if (phi->reaching_defs() == NULL) { | 2116 if (phi->reaching_defs() == NULL) { |
| 2156 Zone* zone = flow_graph_.zone(); | 2117 Zone* zone = flow_graph_.zone(); |
| 2157 phi->set_reaching_defs(new(zone) BitVector( | 2118 phi->set_reaching_defs( |
| 2158 zone, flow_graph_.max_virtual_register_number())); | 2119 new (zone) BitVector(zone, flow_graph_.max_virtual_register_number())); |
| 2159 | 2120 |
| 2160 // Compute initial set reaching defs set. | 2121 // Compute initial set reaching defs set. |
| 2161 bool depends_on_phi = false; | 2122 bool depends_on_phi = false; |
| 2162 for (intptr_t i = 0; i < phi->InputCount(); i++) { | 2123 for (intptr_t i = 0; i < phi->InputCount(); i++) { |
| 2163 Definition* input = phi->InputAt(i)->definition(); | 2124 Definition* input = phi->InputAt(i)->definition(); |
| 2164 if (input->IsPhi()) { | 2125 if (input->IsPhi()) { |
| 2165 depends_on_phi = true; | 2126 depends_on_phi = true; |
| 2166 } | 2127 } |
| 2167 phi->reaching_defs()->Add(input->ssa_temp_index()); | 2128 phi->reaching_defs()->Add(input->ssa_temp_index()); |
| 2168 if (phi->HasPairRepresentation()) { | 2129 if (phi->HasPairRepresentation()) { |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2223 | 2184 |
| 2224 bool FlowGraphAllocator::AllocateFreeRegister(LiveRange* unallocated) { | 2185 bool FlowGraphAllocator::AllocateFreeRegister(LiveRange* unallocated) { |
| 2225 intptr_t candidate = kNoRegister; | 2186 intptr_t candidate = kNoRegister; |
| 2226 intptr_t free_until = 0; | 2187 intptr_t free_until = 0; |
| 2227 | 2188 |
| 2228 // If hint is available try hint first. | 2189 // If hint is available try hint first. |
| 2229 // TODO(vegorov): ensure that phis are hinted on the back edge. | 2190 // TODO(vegorov): ensure that phis are hinted on the back edge. |
| 2230 Location hint = unallocated->finger()->FirstHint(); | 2191 Location hint = unallocated->finger()->FirstHint(); |
| 2231 if (hint.IsMachineRegister()) { | 2192 if (hint.IsMachineRegister()) { |
| 2232 if (!blocked_registers_[hint.register_code()]) { | 2193 if (!blocked_registers_[hint.register_code()]) { |
| 2233 free_until = FirstIntersectionWithAllocated(hint.register_code(), | 2194 free_until = |
| 2234 unallocated); | 2195 FirstIntersectionWithAllocated(hint.register_code(), unallocated); |
| 2235 candidate = hint.register_code(); | 2196 candidate = hint.register_code(); |
| 2236 } | 2197 } |
| 2237 | 2198 |
| 2238 TRACE_ALLOC(THR_Print("found hint %s for v%" Pd ": free until %" Pd "\n", | 2199 TRACE_ALLOC(THR_Print("found hint %s for v%" Pd ": free until %" Pd "\n", |
| 2239 hint.Name(), | 2200 hint.Name(), unallocated->vreg(), free_until)); |
| 2240 unallocated->vreg(), | |
| 2241 free_until)); | |
| 2242 } else { | 2201 } else { |
| 2243 for (intptr_t reg = 0; reg < NumberOfRegisters(); ++reg) { | 2202 for (intptr_t reg = 0; reg < NumberOfRegisters(); ++reg) { |
| 2244 if (!blocked_registers_[reg] && (registers_[reg]->length() == 0)) { | 2203 if (!blocked_registers_[reg] && (registers_[reg]->length() == 0)) { |
| 2245 candidate = reg; | 2204 candidate = reg; |
| 2246 free_until = kMaxPosition; | 2205 free_until = kMaxPosition; |
| 2247 break; | 2206 break; |
| 2248 } | 2207 } |
| 2249 } | 2208 } |
| 2250 } | 2209 } |
| 2251 | 2210 |
| (...skipping 12 matching lines...) Expand all Loading... |
| 2264 } | 2223 } |
| 2265 | 2224 |
| 2266 // All registers are blocked by active ranges. | 2225 // All registers are blocked by active ranges. |
| 2267 if (free_until <= unallocated->Start()) return false; | 2226 if (free_until <= unallocated->Start()) return false; |
| 2268 | 2227 |
| 2269 // We have a very good candidate (either hinted to us or completely free). | 2228 // We have a very good candidate (either hinted to us or completely free). |
| 2270 // If we are in a loop try to reduce number of moves on the back edge by | 2229 // If we are in a loop try to reduce number of moves on the back edge by |
| 2271 // searching for a candidate that does not interfere with phis on the back | 2230 // searching for a candidate that does not interfere with phis on the back |
| 2272 // edge. | 2231 // edge. |
| 2273 BlockInfo* loop_header = BlockInfoAt(unallocated->Start())->loop_header(); | 2232 BlockInfo* loop_header = BlockInfoAt(unallocated->Start())->loop_header(); |
| 2274 if ((unallocated->vreg() >= 0) && | 2233 if ((unallocated->vreg() >= 0) && (loop_header != NULL) && |
| 2275 (loop_header != NULL) && | |
| 2276 (free_until >= loop_header->last_block()->end_pos()) && | 2234 (free_until >= loop_header->last_block()->end_pos()) && |
| 2277 loop_header->backedge_interference()->Contains(unallocated->vreg())) { | 2235 loop_header->backedge_interference()->Contains(unallocated->vreg())) { |
| 2278 GrowableArray<bool> used_on_backedge(number_of_registers_); | 2236 GrowableArray<bool> used_on_backedge(number_of_registers_); |
| 2279 for (intptr_t i = 0; i < number_of_registers_; i++) { | 2237 for (intptr_t i = 0; i < number_of_registers_; i++) { |
| 2280 used_on_backedge.Add(false); | 2238 used_on_backedge.Add(false); |
| 2281 } | 2239 } |
| 2282 | 2240 |
| 2283 for (PhiIterator it(loop_header->entry()->AsJoinEntry()); | 2241 for (PhiIterator it(loop_header->entry()->AsJoinEntry()); !it.Done(); |
| 2284 !it.Done(); | |
| 2285 it.Advance()) { | 2242 it.Advance()) { |
| 2286 PhiInstr* phi = it.Current(); | 2243 PhiInstr* phi = it.Current(); |
| 2287 ASSERT(phi->is_alive()); | 2244 ASSERT(phi->is_alive()); |
| 2288 const intptr_t phi_vreg = phi->ssa_temp_index(); | 2245 const intptr_t phi_vreg = phi->ssa_temp_index(); |
| 2289 LiveRange* range = GetLiveRange(phi_vreg); | 2246 LiveRange* range = GetLiveRange(phi_vreg); |
| 2290 if (range->assigned_location().kind() == register_kind_) { | 2247 if (range->assigned_location().kind() == register_kind_) { |
| 2291 const intptr_t reg = range->assigned_location().register_code(); | 2248 const intptr_t reg = range->assigned_location().register_code(); |
| 2292 if (!reaching_defs_.Get(phi)->Contains(unallocated->vreg())) { | 2249 if (!reaching_defs_.Get(phi)->Contains(unallocated->vreg())) { |
| 2293 used_on_backedge[reg] = true; | 2250 used_on_backedge[reg] = true; |
| 2294 } | 2251 } |
| 2295 } | 2252 } |
| 2296 if (phi->HasPairRepresentation()) { | 2253 if (phi->HasPairRepresentation()) { |
| 2297 const intptr_t second_phi_vreg = ToSecondPairVreg(phi_vreg); | 2254 const intptr_t second_phi_vreg = ToSecondPairVreg(phi_vreg); |
| 2298 LiveRange* second_range = GetLiveRange(second_phi_vreg); | 2255 LiveRange* second_range = GetLiveRange(second_phi_vreg); |
| 2299 if (second_range->assigned_location().kind() == register_kind_) { | 2256 if (second_range->assigned_location().kind() == register_kind_) { |
| 2300 const intptr_t reg = | 2257 const intptr_t reg = |
| 2301 second_range->assigned_location().register_code(); | 2258 second_range->assigned_location().register_code(); |
| 2302 if (!reaching_defs_.Get(phi)->Contains(unallocated->vreg())) { | 2259 if (!reaching_defs_.Get(phi)->Contains(unallocated->vreg())) { |
| 2303 used_on_backedge[reg] = true; | 2260 used_on_backedge[reg] = true; |
| 2304 } | 2261 } |
| 2305 } | 2262 } |
| 2306 } | 2263 } |
| 2307 } | 2264 } |
| 2308 | 2265 |
| 2309 if (used_on_backedge[candidate]) { | 2266 if (used_on_backedge[candidate]) { |
| 2310 TRACE_ALLOC(THR_Print( | 2267 TRACE_ALLOC(THR_Print("considering %s for v%" Pd |
| 2311 "considering %s for v%" Pd ": has interference on the back edge" | 2268 ": has interference on the back edge" |
| 2312 " {loop [%" Pd ", %" Pd ")}\n", | 2269 " {loop [%" Pd ", %" Pd ")}\n", |
| 2313 MakeRegisterLocation(candidate).Name(), | 2270 MakeRegisterLocation(candidate).Name(), |
| 2314 unallocated->vreg(), | 2271 unallocated->vreg(), |
| 2315 loop_header->entry()->start_pos(), | 2272 loop_header->entry()->start_pos(), |
| 2316 loop_header->last_block()->end_pos())); | 2273 loop_header->last_block()->end_pos())); |
| 2317 for (intptr_t reg = 0; reg < NumberOfRegisters(); ++reg) { | 2274 for (intptr_t reg = 0; reg < NumberOfRegisters(); ++reg) { |
| 2318 if (blocked_registers_[reg] || | 2275 if (blocked_registers_[reg] || (reg == candidate) || |
| 2319 (reg == candidate) || | |
| 2320 used_on_backedge[reg]) { | 2276 used_on_backedge[reg]) { |
| 2321 continue; | 2277 continue; |
| 2322 } | 2278 } |
| 2323 | 2279 |
| 2324 const intptr_t intersection = | 2280 const intptr_t intersection = |
| 2325 FirstIntersectionWithAllocated(reg, unallocated); | 2281 FirstIntersectionWithAllocated(reg, unallocated); |
| 2326 if (intersection >= free_until) { | 2282 if (intersection >= free_until) { |
| 2327 candidate = reg; | 2283 candidate = reg; |
| 2328 free_until = intersection; | 2284 free_until = intersection; |
| 2329 TRACE_ALLOC(THR_Print( | 2285 TRACE_ALLOC(THR_Print( |
| 2330 "found %s for v%" Pd " with no interference on the back edge\n", | 2286 "found %s for v%" Pd " with no interference on the back edge\n", |
| 2331 MakeRegisterLocation(candidate).Name(), | 2287 MakeRegisterLocation(candidate).Name(), candidate)); |
| 2332 candidate)); | |
| 2333 break; | 2288 break; |
| 2334 } | 2289 } |
| 2335 } | 2290 } |
| 2336 } | 2291 } |
| 2337 } | 2292 } |
| 2338 | 2293 |
| 2339 TRACE_ALLOC(THR_Print("assigning free register ")); | 2294 TRACE_ALLOC(THR_Print("assigning free register ")); |
| 2340 TRACE_ALLOC(MakeRegisterLocation(candidate).Print()); | 2295 TRACE_ALLOC(MakeRegisterLocation(candidate).Print()); |
| 2341 TRACE_ALLOC(THR_Print(" to v%" Pd "\n", unallocated->vreg())); | 2296 TRACE_ALLOC(THR_Print(" to v%" Pd "\n", unallocated->vreg())); |
| 2342 | 2297 |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2426 intptr_t blocked_at = kMaxPosition; | 2381 intptr_t blocked_at = kMaxPosition; |
| 2427 | 2382 |
| 2428 for (int reg = 0; reg < NumberOfRegisters(); ++reg) { | 2383 for (int reg = 0; reg < NumberOfRegisters(); ++reg) { |
| 2429 if (blocked_registers_[reg]) continue; | 2384 if (blocked_registers_[reg]) continue; |
| 2430 if (UpdateFreeUntil(reg, unallocated, &free_until, &blocked_at)) { | 2385 if (UpdateFreeUntil(reg, unallocated, &free_until, &blocked_at)) { |
| 2431 candidate = reg; | 2386 candidate = reg; |
| 2432 } | 2387 } |
| 2433 } | 2388 } |
| 2434 | 2389 |
| 2435 const intptr_t register_use_pos = | 2390 const intptr_t register_use_pos = |
| 2436 (register_use != NULL) ? register_use->pos() | 2391 (register_use != NULL) ? register_use->pos() : unallocated->Start(); |
| 2437 : unallocated->Start(); | |
| 2438 if (free_until < register_use_pos) { | 2392 if (free_until < register_use_pos) { |
| 2439 // Can't acquire free register. Spill until we really need one. | 2393 // Can't acquire free register. Spill until we really need one. |
| 2440 ASSERT(unallocated->Start() < ToInstructionStart(register_use_pos)); | 2394 ASSERT(unallocated->Start() < ToInstructionStart(register_use_pos)); |
| 2441 SpillBetween(unallocated, unallocated->Start(), register_use->pos()); | 2395 SpillBetween(unallocated, unallocated->Start(), register_use->pos()); |
| 2442 return; | 2396 return; |
| 2443 } | 2397 } |
| 2444 | 2398 |
| 2445 ASSERT(candidate != kNoRegister); | 2399 ASSERT(candidate != kNoRegister); |
| 2446 | 2400 |
| 2447 TRACE_ALLOC(THR_Print("assigning blocked register ")); | 2401 TRACE_ALLOC(THR_Print("assigning blocked register ")); |
| 2448 TRACE_ALLOC(MakeRegisterLocation(candidate).Print()); | 2402 TRACE_ALLOC(MakeRegisterLocation(candidate).Print()); |
| 2449 TRACE_ALLOC(THR_Print(" to live range v%" Pd " until %" Pd "\n", | 2403 TRACE_ALLOC(THR_Print(" to live range v%" Pd " until %" Pd "\n", |
| 2450 unallocated->vreg(), blocked_at)); | 2404 unallocated->vreg(), blocked_at)); |
| 2451 | 2405 |
| 2452 if (blocked_at < unallocated->End()) { | 2406 if (blocked_at < unallocated->End()) { |
| 2453 // Register is blocked before the end of the live range. Split the range | 2407 // Register is blocked before the end of the live range. Split the range |
| 2454 // at latest at blocked_at position. | 2408 // at latest at blocked_at position. |
| 2455 LiveRange* tail = SplitBetween(unallocated, | 2409 LiveRange* tail = |
| 2456 unallocated->Start(), | 2410 SplitBetween(unallocated, unallocated->Start(), blocked_at + 1); |
| 2457 blocked_at + 1); | |
| 2458 AddToUnallocated(tail); | 2411 AddToUnallocated(tail); |
| 2459 } | 2412 } |
| 2460 | 2413 |
| 2461 AssignNonFreeRegister(unallocated, candidate); | 2414 AssignNonFreeRegister(unallocated, candidate); |
| 2462 } | 2415 } |
| 2463 | 2416 |
| 2464 | 2417 |
| 2465 bool FlowGraphAllocator::UpdateFreeUntil(intptr_t reg, | 2418 bool FlowGraphAllocator::UpdateFreeUntil(intptr_t reg, |
| 2466 LiveRange* unallocated, | 2419 LiveRange* unallocated, |
| 2467 intptr_t* cur_free_until, | 2420 intptr_t* cur_free_until, |
| 2468 intptr_t* cur_blocked_at) { | 2421 intptr_t* cur_blocked_at) { |
| 2469 intptr_t free_until = kMaxPosition; | 2422 intptr_t free_until = kMaxPosition; |
| 2470 intptr_t blocked_at = kMaxPosition; | 2423 intptr_t blocked_at = kMaxPosition; |
| 2471 const intptr_t start = unallocated->Start(); | 2424 const intptr_t start = unallocated->Start(); |
| 2472 | 2425 |
| 2473 for (intptr_t i = 0; i < registers_[reg]->length(); i++) { | 2426 for (intptr_t i = 0; i < registers_[reg]->length(); i++) { |
| 2474 LiveRange* allocated = (*registers_[reg])[i]; | 2427 LiveRange* allocated = (*registers_[reg])[i]; |
| 2475 | 2428 |
| 2476 UseInterval* first_pending_use_interval = | 2429 UseInterval* first_pending_use_interval = |
| 2477 allocated->finger()->first_pending_use_interval(); | 2430 allocated->finger()->first_pending_use_interval(); |
| 2478 if (first_pending_use_interval->Contains(start)) { | 2431 if (first_pending_use_interval->Contains(start)) { |
| 2479 // This is an active interval. | 2432 // This is an active interval. |
| 2480 if (allocated->vreg() < 0) { | 2433 if (allocated->vreg() < 0) { |
| 2481 // This register blocked by an interval that | 2434 // This register blocked by an interval that |
| 2482 // can't be spilled. | 2435 // can't be spilled. |
| 2483 return false; | 2436 return false; |
| 2484 } | 2437 } |
| 2485 | 2438 |
| 2486 UsePosition* use = | 2439 UsePosition* use = allocated->finger()->FirstInterferingUse(start); |
| 2487 allocated->finger()->FirstInterferingUse(start); | |
| 2488 if ((use != NULL) && ((ToInstructionStart(use->pos()) - start) <= 1)) { | 2440 if ((use != NULL) && ((ToInstructionStart(use->pos()) - start) <= 1)) { |
| 2489 // This register is blocked by interval that is used | 2441 // This register is blocked by interval that is used |
| 2490 // as register in the current instruction and can't | 2442 // as register in the current instruction and can't |
| 2491 // be spilled. | 2443 // be spilled. |
| 2492 return false; | 2444 return false; |
| 2493 } | 2445 } |
| 2494 | 2446 |
| 2495 const intptr_t use_pos = (use != NULL) ? use->pos() | 2447 const intptr_t use_pos = (use != NULL) ? use->pos() : allocated->End(); |
| 2496 : allocated->End(); | |
| 2497 | 2448 |
| 2498 if (use_pos < free_until) free_until = use_pos; | 2449 if (use_pos < free_until) free_until = use_pos; |
| 2499 } else { | 2450 } else { |
| 2500 // This is inactive interval. | 2451 // This is inactive interval. |
| 2501 const intptr_t intersection = FirstIntersection( | 2452 const intptr_t intersection = FirstIntersection( |
| 2502 first_pending_use_interval, unallocated->first_use_interval()); | 2453 first_pending_use_interval, unallocated->first_use_interval()); |
| 2503 if (intersection != kMaxPosition) { | 2454 if (intersection != kMaxPosition) { |
| 2504 if (intersection < free_until) free_until = intersection; | 2455 if (intersection < free_until) free_until = intersection; |
| 2505 if (allocated->vreg() == kNoVirtualRegister) blocked_at = intersection; | 2456 if (allocated->vreg() == kNoVirtualRegister) blocked_at = intersection; |
| 2506 } | 2457 } |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2555 last_used_register_ = Utils::Maximum(last_used_register_, reg); | 2506 last_used_register_ = Utils::Maximum(last_used_register_, reg); |
| 2556 #endif | 2507 #endif |
| 2557 } | 2508 } |
| 2558 | 2509 |
| 2559 | 2510 |
| 2560 bool FlowGraphAllocator::EvictIntersection(LiveRange* allocated, | 2511 bool FlowGraphAllocator::EvictIntersection(LiveRange* allocated, |
| 2561 LiveRange* unallocated) { | 2512 LiveRange* unallocated) { |
| 2562 UseInterval* first_unallocated = | 2513 UseInterval* first_unallocated = |
| 2563 unallocated->finger()->first_pending_use_interval(); | 2514 unallocated->finger()->first_pending_use_interval(); |
| 2564 const intptr_t intersection = FirstIntersection( | 2515 const intptr_t intersection = FirstIntersection( |
| 2565 allocated->finger()->first_pending_use_interval(), | 2516 allocated->finger()->first_pending_use_interval(), first_unallocated); |
| 2566 first_unallocated); | |
| 2567 if (intersection == kMaxPosition) return false; | 2517 if (intersection == kMaxPosition) return false; |
| 2568 | 2518 |
| 2569 const intptr_t spill_position = first_unallocated->start(); | 2519 const intptr_t spill_position = first_unallocated->start(); |
| 2570 UsePosition* use = allocated->finger()->FirstInterferingUse(spill_position); | 2520 UsePosition* use = allocated->finger()->FirstInterferingUse(spill_position); |
| 2571 if (use == NULL) { | 2521 if (use == NULL) { |
| 2572 // No register uses after this point. | 2522 // No register uses after this point. |
| 2573 SpillAfter(allocated, spill_position); | 2523 SpillAfter(allocated, spill_position); |
| 2574 } else { | 2524 } else { |
| 2575 const intptr_t restore_position = | 2525 const intptr_t restore_position = |
| 2576 (spill_position < intersection) ? MinPosition(intersection, use->pos()) | 2526 (spill_position < intersection) ? MinPosition(intersection, use->pos()) |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2634 TRACE_ALLOC(THR_Print(":\n")); | 2584 TRACE_ALLOC(THR_Print(":\n")); |
| 2635 | 2585 |
| 2636 for (UsePosition* use = range->first_use(); use != NULL; use = use->next()) { | 2586 for (UsePosition* use = range->first_use(); use != NULL; use = use->next()) { |
| 2637 ConvertUseTo(use, loc); | 2587 ConvertUseTo(use, loc); |
| 2638 } | 2588 } |
| 2639 | 2589 |
| 2640 // Add live registers at all safepoints for instructions with slow-path | 2590 // Add live registers at all safepoints for instructions with slow-path |
| 2641 // code. | 2591 // code. |
| 2642 if (loc.IsMachineRegister()) { | 2592 if (loc.IsMachineRegister()) { |
| 2643 for (SafepointPosition* safepoint = range->first_safepoint(); | 2593 for (SafepointPosition* safepoint = range->first_safepoint(); |
| 2644 safepoint != NULL; | 2594 safepoint != NULL; safepoint = safepoint->next()) { |
| 2645 safepoint = safepoint->next()) { | |
| 2646 #if !defined(TARGET_ARCH_DBC) | 2595 #if !defined(TARGET_ARCH_DBC) |
| 2647 if (!safepoint->locs()->always_calls()) { | 2596 if (!safepoint->locs()->always_calls()) { |
| 2648 ASSERT(safepoint->locs()->can_call()); | 2597 ASSERT(safepoint->locs()->can_call()); |
| 2649 safepoint->locs()->live_registers()->Add(loc, range->representation()); | 2598 safepoint->locs()->live_registers()->Add(loc, range->representation()); |
| 2650 } | 2599 } |
| 2651 #else | 2600 #else |
| 2652 if (range->representation() == kTagged) { | 2601 if (range->representation() == kTagged) { |
| 2653 safepoint->locs()->SetStackBit(loc.reg()); | 2602 safepoint->locs()->SetStackBit(loc.reg()); |
| 2654 } | 2603 } |
| 2655 #endif // !defined(TARGET_ARCH_DBC) | 2604 #endif // !defined(TARGET_ARCH_DBC) |
| (...skipping 17 matching lines...) Expand all Loading... |
| 2673 } | 2622 } |
| 2674 | 2623 |
| 2675 if (first_evicted != -1) RemoveEvicted(reg, first_evicted); | 2624 if (first_evicted != -1) RemoveEvicted(reg, first_evicted); |
| 2676 } | 2625 } |
| 2677 } | 2626 } |
| 2678 | 2627 |
| 2679 | 2628 |
| 2680 bool LiveRange::Contains(intptr_t pos) const { | 2629 bool LiveRange::Contains(intptr_t pos) const { |
| 2681 if (!CanCover(pos)) return false; | 2630 if (!CanCover(pos)) return false; |
| 2682 | 2631 |
| 2683 for (UseInterval* interval = first_use_interval_; | 2632 for (UseInterval* interval = first_use_interval_; interval != NULL; |
| 2684 interval != NULL; | |
| 2685 interval = interval->next()) { | 2633 interval = interval->next()) { |
| 2686 if (interval->Contains(pos)) { | 2634 if (interval->Contains(pos)) { |
| 2687 return true; | 2635 return true; |
| 2688 } | 2636 } |
| 2689 } | 2637 } |
| 2690 | 2638 |
| 2691 return false; | 2639 return false; |
| 2692 } | 2640 } |
| 2693 | 2641 |
| 2694 | 2642 |
| 2695 void FlowGraphAllocator::AssignSafepoints(Definition* defn, | 2643 void FlowGraphAllocator::AssignSafepoints(Definition* defn, LiveRange* range) { |
| 2696 LiveRange* range) { | |
| 2697 for (intptr_t i = safepoints_.length() - 1; i >= 0; i--) { | 2644 for (intptr_t i = safepoints_.length() - 1; i >= 0; i--) { |
| 2698 Instruction* safepoint_instr = safepoints_[i]; | 2645 Instruction* safepoint_instr = safepoints_[i]; |
| 2699 if (safepoint_instr == defn) { | 2646 if (safepoint_instr == defn) { |
| 2700 // The value is not live until after the definition is fully executed, | 2647 // The value is not live until after the definition is fully executed, |
| 2701 // don't assign the safepoint inside the definition itself to | 2648 // don't assign the safepoint inside the definition itself to |
| 2702 // definition's liverange. | 2649 // definition's liverange. |
| 2703 continue; | 2650 continue; |
| 2704 } | 2651 } |
| 2705 | 2652 |
| 2706 const intptr_t pos = safepoint_instr->lifetime_position(); | 2653 const intptr_t pos = safepoint_instr->lifetime_position(); |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2808 void FlowGraphAllocator::AllocateUnallocatedRanges() { | 2755 void FlowGraphAllocator::AllocateUnallocatedRanges() { |
| 2809 #if defined(DEBUG) | 2756 #if defined(DEBUG) |
| 2810 ASSERT(UnallocatedIsSorted()); | 2757 ASSERT(UnallocatedIsSorted()); |
| 2811 #endif | 2758 #endif |
| 2812 | 2759 |
| 2813 while (!unallocated_.is_empty()) { | 2760 while (!unallocated_.is_empty()) { |
| 2814 LiveRange* range = unallocated_.RemoveLast(); | 2761 LiveRange* range = unallocated_.RemoveLast(); |
| 2815 const intptr_t start = range->Start(); | 2762 const intptr_t start = range->Start(); |
| 2816 TRACE_ALLOC(THR_Print("Processing live range for v%" Pd " " | 2763 TRACE_ALLOC(THR_Print("Processing live range for v%" Pd " " |
| 2817 "starting at %" Pd "\n", | 2764 "starting at %" Pd "\n", |
| 2818 range->vreg(), | 2765 range->vreg(), start)); |
| 2819 start)); | |
| 2820 | 2766 |
| 2821 // TODO(vegorov): eagerly spill liveranges without register uses. | 2767 // TODO(vegorov): eagerly spill liveranges without register uses. |
| 2822 AdvanceActiveIntervals(start); | 2768 AdvanceActiveIntervals(start); |
| 2823 | 2769 |
| 2824 if (!AllocateFreeRegister(range)) { | 2770 if (!AllocateFreeRegister(range)) { |
| 2825 if (intrinsic_mode_) { | 2771 if (intrinsic_mode_) { |
| 2826 // No spilling when compiling intrinsics. | 2772 // No spilling when compiling intrinsics. |
| 2827 // TODO(fschneider): Handle spilling in intrinsics. For now, the | 2773 // TODO(fschneider): Handle spilling in intrinsics. For now, the |
| 2828 // IR has to be built so that there are enough free registers. | 2774 // IR has to be built so that there are enough free registers. |
| 2829 UNREACHABLE(); | 2775 UNREACHABLE(); |
| 2830 } | 2776 } |
| 2831 AllocateAnyRegister(range); | 2777 AllocateAnyRegister(range); |
| 2832 } | 2778 } |
| 2833 } | 2779 } |
| 2834 | 2780 |
| 2835 // All allocation decisions were done. | 2781 // All allocation decisions were done. |
| 2836 ASSERT(unallocated_.is_empty()); | 2782 ASSERT(unallocated_.is_empty()); |
| 2837 | 2783 |
| 2838 // Finish allocation. | 2784 // Finish allocation. |
| 2839 AdvanceActiveIntervals(kMaxPosition); | 2785 AdvanceActiveIntervals(kMaxPosition); |
| 2840 TRACE_ALLOC(THR_Print("Allocation completed\n")); | 2786 TRACE_ALLOC(THR_Print("Allocation completed\n")); |
| 2841 } | 2787 } |
| 2842 | 2788 |
| 2843 | 2789 |
| 2844 bool FlowGraphAllocator::TargetLocationIsSpillSlot(LiveRange* range, | 2790 bool FlowGraphAllocator::TargetLocationIsSpillSlot(LiveRange* range, |
| 2845 Location target) { | 2791 Location target) { |
| 2846 if (target.IsStackSlot() || | 2792 if (target.IsStackSlot() || target.IsDoubleStackSlot() || |
| 2847 target.IsDoubleStackSlot() || | |
| 2848 target.IsConstant()) { | 2793 target.IsConstant()) { |
| 2849 ASSERT(GetLiveRange(range->vreg())->spill_slot().Equals(target)); | 2794 ASSERT(GetLiveRange(range->vreg())->spill_slot().Equals(target)); |
| 2850 return true; | 2795 return true; |
| 2851 } | 2796 } |
| 2852 return false; | 2797 return false; |
| 2853 } | 2798 } |
| 2854 | 2799 |
| 2855 | 2800 |
| 2856 void FlowGraphAllocator::ConnectSplitSiblings(LiveRange* parent, | 2801 void FlowGraphAllocator::ConnectSplitSiblings(LiveRange* parent, |
| 2857 BlockEntryInstr* source_block, | 2802 BlockEntryInstr* source_block, |
| 2858 BlockEntryInstr* target_block) { | 2803 BlockEntryInstr* target_block) { |
| 2859 TRACE_ALLOC(THR_Print("Connect v%" Pd " on the edge B%" Pd " -> B%" Pd "\n", | 2804 TRACE_ALLOC(THR_Print("Connect v%" Pd " on the edge B%" Pd " -> B%" Pd "\n", |
| 2860 parent->vreg(), | 2805 parent->vreg(), source_block->block_id(), |
| 2861 source_block->block_id(), | |
| 2862 target_block->block_id())); | 2806 target_block->block_id())); |
| 2863 if (parent->next_sibling() == NULL) { | 2807 if (parent->next_sibling() == NULL) { |
| 2864 // Nothing to connect. The whole range was allocated to the same location. | 2808 // Nothing to connect. The whole range was allocated to the same location. |
| 2865 TRACE_ALLOC(THR_Print("range v%" Pd " has no siblings\n", parent->vreg())); | 2809 TRACE_ALLOC(THR_Print("range v%" Pd " has no siblings\n", parent->vreg())); |
| 2866 return; | 2810 return; |
| 2867 } | 2811 } |
| 2868 | 2812 |
| 2869 const intptr_t source_pos = source_block->end_pos() - 1; | 2813 const intptr_t source_pos = source_block->end_pos() - 1; |
| 2870 ASSERT(IsInstructionEndPosition(source_pos)); | 2814 ASSERT(IsInstructionEndPosition(source_pos)); |
| 2871 | 2815 |
| (...skipping 22 matching lines...) Expand all Loading... |
| 2894 #if defined(DEBUG) | 2838 #if defined(DEBUG) |
| 2895 target_cover = range; | 2839 target_cover = range; |
| 2896 #endif | 2840 #endif |
| 2897 } | 2841 } |
| 2898 | 2842 |
| 2899 range = range->next_sibling(); | 2843 range = range->next_sibling(); |
| 2900 } | 2844 } |
| 2901 | 2845 |
| 2902 TRACE_ALLOC(THR_Print("connecting v%" Pd " between [%" Pd ", %" Pd ") {%s} " | 2846 TRACE_ALLOC(THR_Print("connecting v%" Pd " between [%" Pd ", %" Pd ") {%s} " |
| 2903 "to [%" Pd ", %" Pd ") {%s}\n", | 2847 "to [%" Pd ", %" Pd ") {%s}\n", |
| 2904 parent->vreg(), | 2848 parent->vreg(), source_cover->Start(), |
| 2905 source_cover->Start(), | 2849 source_cover->End(), source.Name(), |
| 2906 source_cover->End(), | 2850 target_cover->Start(), target_cover->End(), |
| 2907 source.Name(), | |
| 2908 target_cover->Start(), | |
| 2909 target_cover->End(), | |
| 2910 target.Name())); | 2851 target.Name())); |
| 2911 | 2852 |
| 2912 // Siblings were allocated to the same register. | 2853 // Siblings were allocated to the same register. |
| 2913 if (source.Equals(target)) return; | 2854 if (source.Equals(target)) return; |
| 2914 | 2855 |
| 2915 // Values are eagerly spilled. Spill slot already contains appropriate value. | 2856 // Values are eagerly spilled. Spill slot already contains appropriate value. |
| 2916 if (TargetLocationIsSpillSlot(parent, target)) { | 2857 if (TargetLocationIsSpillSlot(parent, target)) { |
| 2917 return; | 2858 return; |
| 2918 } | 2859 } |
| 2919 | 2860 |
| 2920 Instruction* last = source_block->last_instruction(); | 2861 Instruction* last = source_block->last_instruction(); |
| 2921 if ((last->SuccessorCount() == 1) && !source_block->IsGraphEntry()) { | 2862 if ((last->SuccessorCount() == 1) && !source_block->IsGraphEntry()) { |
| 2922 ASSERT(last->IsGoto()); | 2863 ASSERT(last->IsGoto()); |
| 2923 last->AsGoto()->GetParallelMove()->AddMove(target, source); | 2864 last->AsGoto()->GetParallelMove()->AddMove(target, source); |
| 2924 } else { | 2865 } else { |
| 2925 target_block->GetParallelMove()->AddMove(target, source); | 2866 target_block->GetParallelMove()->AddMove(target, source); |
| 2926 } | 2867 } |
| 2927 } | 2868 } |
| 2928 | 2869 |
| 2929 | 2870 |
| 2930 void FlowGraphAllocator::ResolveControlFlow() { | 2871 void FlowGraphAllocator::ResolveControlFlow() { |
| 2931 // Resolve linear control flow between touching split siblings | 2872 // Resolve linear control flow between touching split siblings |
| 2932 // inside basic blocks. | 2873 // inside basic blocks. |
| 2933 for (intptr_t vreg = 0; vreg < live_ranges_.length(); vreg++) { | 2874 for (intptr_t vreg = 0; vreg < live_ranges_.length(); vreg++) { |
| 2934 LiveRange* range = live_ranges_[vreg]; | 2875 LiveRange* range = live_ranges_[vreg]; |
| 2935 if (range == NULL) continue; | 2876 if (range == NULL) continue; |
| 2936 | 2877 |
| 2937 while (range->next_sibling() != NULL) { | 2878 while (range->next_sibling() != NULL) { |
| 2938 LiveRange* sibling = range->next_sibling(); | 2879 LiveRange* sibling = range->next_sibling(); |
| 2939 TRACE_ALLOC(THR_Print("connecting [%" Pd ", %" Pd ") [", | 2880 TRACE_ALLOC(THR_Print("connecting [%" Pd ", %" Pd ") [", range->Start(), |
| 2940 range->Start(), range->End())); | 2881 range->End())); |
| 2941 TRACE_ALLOC(range->assigned_location().Print()); | 2882 TRACE_ALLOC(range->assigned_location().Print()); |
| 2942 TRACE_ALLOC(THR_Print("] to [%" Pd ", %" Pd ") [", | 2883 TRACE_ALLOC(THR_Print("] to [%" Pd ", %" Pd ") [", sibling->Start(), |
| 2943 sibling->Start(), sibling->End())); | 2884 sibling->End())); |
| 2944 TRACE_ALLOC(sibling->assigned_location().Print()); | 2885 TRACE_ALLOC(sibling->assigned_location().Print()); |
| 2945 TRACE_ALLOC(THR_Print("]\n")); | 2886 TRACE_ALLOC(THR_Print("]\n")); |
| 2946 if ((range->End() == sibling->Start()) && | 2887 if ((range->End() == sibling->Start()) && |
| 2947 !TargetLocationIsSpillSlot(range, sibling->assigned_location()) && | 2888 !TargetLocationIsSpillSlot(range, sibling->assigned_location()) && |
| 2948 !range->assigned_location().Equals(sibling->assigned_location()) && | 2889 !range->assigned_location().Equals(sibling->assigned_location()) && |
| 2949 !IsBlockEntry(range->End())) { | 2890 !IsBlockEntry(range->End())) { |
| 2950 AddMoveAt(sibling->Start(), | 2891 AddMoveAt(sibling->Start(), sibling->assigned_location(), |
| 2951 sibling->assigned_location(), | |
| 2952 range->assigned_location()); | 2892 range->assigned_location()); |
| 2953 } | 2893 } |
| 2954 range = sibling; | 2894 range = sibling; |
| 2955 } | 2895 } |
| 2956 } | 2896 } |
| 2957 | 2897 |
| 2958 // Resolve non-linear control flow across branches. | 2898 // Resolve non-linear control flow across branches. |
| 2959 for (intptr_t i = 1; i < block_order_.length(); i++) { | 2899 for (intptr_t i = 1; i < block_order_.length(); i++) { |
| 2960 BlockEntryInstr* block = block_order_[i]; | 2900 BlockEntryInstr* block = block_order_[i]; |
| 2961 BitVector* live = liveness_.GetLiveInSet(block); | 2901 BitVector* live = liveness_.GetLiveInSet(block); |
| 2962 for (BitVector::Iterator it(live); !it.Done(); it.Advance()) { | 2902 for (BitVector::Iterator it(live); !it.Done(); it.Advance()) { |
| 2963 LiveRange* range = GetLiveRange(it.Current()); | 2903 LiveRange* range = GetLiveRange(it.Current()); |
| 2964 for (intptr_t j = 0; j < block->PredecessorCount(); j++) { | 2904 for (intptr_t j = 0; j < block->PredecessorCount(); j++) { |
| 2965 ConnectSplitSiblings(range, block->PredecessorAt(j), block); | 2905 ConnectSplitSiblings(range, block->PredecessorAt(j), block); |
| 2966 } | 2906 } |
| 2967 } | 2907 } |
| 2968 } | 2908 } |
| 2969 | 2909 |
| 2970 // Eagerly spill values. | 2910 // Eagerly spill values. |
| 2971 // TODO(vegorov): if value is spilled on the cold path (e.g. by the call) | 2911 // TODO(vegorov): if value is spilled on the cold path (e.g. by the call) |
| 2972 // this will cause spilling to occur on the fast path (at the definition). | 2912 // this will cause spilling to occur on the fast path (at the definition). |
| 2973 for (intptr_t i = 0; i < spilled_.length(); i++) { | 2913 for (intptr_t i = 0; i < spilled_.length(); i++) { |
| 2974 LiveRange* range = spilled_[i]; | 2914 LiveRange* range = spilled_[i]; |
| 2975 if (range->assigned_location().IsStackSlot() || | 2915 if (range->assigned_location().IsStackSlot() || |
| 2976 range->assigned_location().IsDoubleStackSlot() || | 2916 range->assigned_location().IsDoubleStackSlot() || |
| 2977 range->assigned_location().IsConstant()) { | 2917 range->assigned_location().IsConstant()) { |
| 2978 ASSERT(range->assigned_location().Equals(range->spill_slot())); | 2918 ASSERT(range->assigned_location().Equals(range->spill_slot())); |
| 2979 } else { | 2919 } else { |
| 2980 AddMoveAt(range->Start() + 1, | 2920 AddMoveAt(range->Start() + 1, range->spill_slot(), |
| 2981 range->spill_slot(), | |
| 2982 range->assigned_location()); | 2921 range->assigned_location()); |
| 2983 } | 2922 } |
| 2984 } | 2923 } |
| 2985 } | 2924 } |
| 2986 | 2925 |
| 2987 | 2926 |
| 2988 static Representation RepresentationForRange(Representation definition_rep) { | 2927 static Representation RepresentationForRange(Representation definition_rep) { |
| 2989 if (definition_rep == kUnboxedMint) { | 2928 if (definition_rep == kUnboxedMint) { |
| 2990 // kUnboxedMint is split into two ranges, each of which are kUntagged. | 2929 // kUnboxedMint is split into two ranges, each of which are kUntagged. |
| 2991 return kUntagged; | 2930 return kUntagged; |
| 2992 } else if (definition_rep == kUnboxedUint32) { | 2931 } else if (definition_rep == kUnboxedUint32) { |
| 2993 // kUnboxedUint32 is untagged. | 2932 // kUnboxedUint32 is untagged. |
| 2994 return kUntagged; | 2933 return kUntagged; |
| 2995 } | 2934 } |
| 2996 return definition_rep; | 2935 return definition_rep; |
| 2997 } | 2936 } |
| 2998 | 2937 |
| 2999 | 2938 |
| 3000 void FlowGraphAllocator::CollectRepresentations() { | 2939 void FlowGraphAllocator::CollectRepresentations() { |
| 3001 // Parameters. | 2940 // Parameters. |
| 3002 GraphEntryInstr* graph_entry = flow_graph_.graph_entry(); | 2941 GraphEntryInstr* graph_entry = flow_graph_.graph_entry(); |
| 3003 for (intptr_t i = 0; i < graph_entry->initial_definitions()->length(); ++i) { | 2942 for (intptr_t i = 0; i < graph_entry->initial_definitions()->length(); ++i) { |
| 3004 Definition* def = (*graph_entry->initial_definitions())[i]; | 2943 Definition* def = (*graph_entry->initial_definitions())[i]; |
| 3005 value_representations_[def->ssa_temp_index()] = | 2944 value_representations_[def->ssa_temp_index()] = |
| 3006 RepresentationForRange(def->representation()); | 2945 RepresentationForRange(def->representation()); |
| 3007 ASSERT(!def->HasPairRepresentation()); | 2946 ASSERT(!def->HasPairRepresentation()); |
| 3008 } | 2947 } |
| 3009 | 2948 |
| 3010 for (BlockIterator it = flow_graph_.reverse_postorder_iterator(); | 2949 for (BlockIterator it = flow_graph_.reverse_postorder_iterator(); !it.Done(); |
| 3011 !it.Done(); | |
| 3012 it.Advance()) { | 2950 it.Advance()) { |
| 3013 BlockEntryInstr* block = it.Current(); | 2951 BlockEntryInstr* block = it.Current(); |
| 3014 | 2952 |
| 3015 // Catch entry. | 2953 // Catch entry. |
| 3016 if (block->IsCatchBlockEntry()) { | 2954 if (block->IsCatchBlockEntry()) { |
| 3017 CatchBlockEntryInstr* catch_entry = block->AsCatchBlockEntry(); | 2955 CatchBlockEntryInstr* catch_entry = block->AsCatchBlockEntry(); |
| 3018 for (intptr_t i = 0; | 2956 for (intptr_t i = 0; i < catch_entry->initial_definitions()->length(); |
| 3019 i < catch_entry->initial_definitions()->length(); | |
| 3020 ++i) { | 2957 ++i) { |
| 3021 Definition* def = (*catch_entry->initial_definitions())[i]; | 2958 Definition* def = (*catch_entry->initial_definitions())[i]; |
| 3022 ASSERT(!def->HasPairRepresentation()); | 2959 ASSERT(!def->HasPairRepresentation()); |
| 3023 value_representations_[def->ssa_temp_index()] = | 2960 value_representations_[def->ssa_temp_index()] = |
| 3024 RepresentationForRange(def->representation()); | 2961 RepresentationForRange(def->representation()); |
| 3025 } | 2962 } |
| 3026 } | 2963 } |
| 3027 // Phis. | 2964 // Phis. |
| 3028 if (block->IsJoinEntry()) { | 2965 if (block->IsJoinEntry()) { |
| 3029 JoinEntryInstr* join = block->AsJoinEntry(); | 2966 JoinEntryInstr* join = block->AsJoinEntry(); |
| 3030 for (PhiIterator it(join); !it.Done(); it.Advance()) { | 2967 for (PhiIterator it(join); !it.Done(); it.Advance()) { |
| 3031 PhiInstr* phi = it.Current(); | 2968 PhiInstr* phi = it.Current(); |
| 3032 ASSERT(phi != NULL && phi->ssa_temp_index() >= 0); | 2969 ASSERT(phi != NULL && phi->ssa_temp_index() >= 0); |
| 3033 value_representations_[phi->ssa_temp_index()] = | 2970 value_representations_[phi->ssa_temp_index()] = |
| 3034 RepresentationForRange(phi->representation()); | 2971 RepresentationForRange(phi->representation()); |
| 3035 if (phi->HasPairRepresentation()) { | 2972 if (phi->HasPairRepresentation()) { |
| 3036 value_representations_[ToSecondPairVreg(phi->ssa_temp_index())] = | 2973 value_representations_[ToSecondPairVreg(phi->ssa_temp_index())] = |
| 3037 RepresentationForRange(phi->representation()); | 2974 RepresentationForRange(phi->representation()); |
| 3038 } | 2975 } |
| 3039 } | 2976 } |
| 3040 } | 2977 } |
| 3041 // Normal instructions. | 2978 // Normal instructions. |
| 3042 for (ForwardInstructionIterator instr_it(block); | 2979 for (ForwardInstructionIterator instr_it(block); !instr_it.Done(); |
| 3043 !instr_it.Done(); | |
| 3044 instr_it.Advance()) { | 2980 instr_it.Advance()) { |
| 3045 Definition* def = instr_it.Current()->AsDefinition(); | 2981 Definition* def = instr_it.Current()->AsDefinition(); |
| 3046 if ((def != NULL) && (def->ssa_temp_index() >= 0)) { | 2982 if ((def != NULL) && (def->ssa_temp_index() >= 0)) { |
| 3047 const intptr_t vreg = def->ssa_temp_index(); | 2983 const intptr_t vreg = def->ssa_temp_index(); |
| 3048 value_representations_[vreg] = | 2984 value_representations_[vreg] = |
| 3049 RepresentationForRange(def->representation()); | 2985 RepresentationForRange(def->representation()); |
| 3050 if (def->HasPairRepresentation()) { | 2986 if (def->HasPairRepresentation()) { |
| 3051 value_representations_[ToSecondPairVreg(vreg)] = | 2987 value_representations_[ToSecondPairVreg(vreg)] = |
| 3052 RepresentationForRange(def->representation()); | 2988 RepresentationForRange(def->representation()); |
| 3053 } | 2989 } |
| 3054 } | 2990 } |
| 3055 } | 2991 } |
| 3056 } | 2992 } |
| 3057 } | 2993 } |
| 3058 | 2994 |
| 3059 | 2995 |
| 3060 void FlowGraphAllocator::AllocateRegisters() { | 2996 void FlowGraphAllocator::AllocateRegisters() { |
| 3061 CollectRepresentations(); | 2997 CollectRepresentations(); |
| 3062 | 2998 |
| (...skipping 20 matching lines...) Expand all Loading... |
| 3083 function.ToFullyQualifiedCString()); | 3019 function.ToFullyQualifiedCString()); |
| 3084 if (FLAG_support_il_printer) { | 3020 if (FLAG_support_il_printer) { |
| 3085 #ifndef PRODUCT | 3021 #ifndef PRODUCT |
| 3086 FlowGraphPrinter printer(flow_graph_, true); | 3022 FlowGraphPrinter printer(flow_graph_, true); |
| 3087 printer.PrintBlocks(); | 3023 printer.PrintBlocks(); |
| 3088 #endif | 3024 #endif |
| 3089 } | 3025 } |
| 3090 THR_Print("----------------------------------------------\n"); | 3026 THR_Print("----------------------------------------------\n"); |
| 3091 } | 3027 } |
| 3092 | 3028 |
| 3093 PrepareForAllocation(Location::kRegister, | 3029 PrepareForAllocation(Location::kRegister, kNumberOfCpuRegisters, |
| 3094 kNumberOfCpuRegisters, | 3030 unallocated_cpu_, cpu_regs_, blocked_cpu_registers_); |
| 3095 unallocated_cpu_, | |
| 3096 cpu_regs_, | |
| 3097 blocked_cpu_registers_); | |
| 3098 AllocateUnallocatedRanges(); | 3031 AllocateUnallocatedRanges(); |
| 3099 #if defined(TARGET_ARCH_DBC) | 3032 #if defined(TARGET_ARCH_DBC) |
| 3100 const intptr_t last_used_cpu_register = last_used_register_; | 3033 const intptr_t last_used_cpu_register = last_used_register_; |
| 3101 last_used_register_ = -1; | 3034 last_used_register_ = -1; |
| 3102 #endif | 3035 #endif |
| 3103 | 3036 |
| 3104 cpu_spill_slot_count_ = spill_slots_.length(); | 3037 cpu_spill_slot_count_ = spill_slots_.length(); |
| 3105 spill_slots_.Clear(); | 3038 spill_slots_.Clear(); |
| 3106 quad_spill_slots_.Clear(); | 3039 quad_spill_slots_.Clear(); |
| 3107 untagged_spill_slots_.Clear(); | 3040 untagged_spill_slots_.Clear(); |
| 3108 | 3041 |
| 3109 PrepareForAllocation(Location::kFpuRegister, | 3042 PrepareForAllocation(Location::kFpuRegister, kNumberOfFpuRegisters, |
| 3110 kNumberOfFpuRegisters, | 3043 unallocated_xmm_, fpu_regs_, blocked_fpu_registers_); |
| 3111 unallocated_xmm_, | |
| 3112 fpu_regs_, | |
| 3113 blocked_fpu_registers_); | |
| 3114 #if defined(TARGET_ARCH_DBC) | 3044 #if defined(TARGET_ARCH_DBC) |
| 3115 // For DBC all registers should have been allocated in the first pass. | 3045 // For DBC all registers should have been allocated in the first pass. |
| 3116 ASSERT(unallocated_.is_empty()); | 3046 ASSERT(unallocated_.is_empty()); |
| 3117 #endif | 3047 #endif |
| 3118 | 3048 |
| 3119 AllocateUnallocatedRanges(); | 3049 AllocateUnallocatedRanges(); |
| 3120 #if defined(TARGET_ARCH_DBC) | 3050 #if defined(TARGET_ARCH_DBC) |
| 3121 const intptr_t last_used_fpu_register = last_used_register_; | 3051 const intptr_t last_used_fpu_register = last_used_register_; |
| 3122 ASSERT(last_used_fpu_register == -1); // Not supported right now. | 3052 ASSERT(last_used_fpu_register == -1); // Not supported right now. |
| 3123 #endif | 3053 #endif |
| 3124 | 3054 |
| 3125 ResolveControlFlow(); | 3055 ResolveControlFlow(); |
| 3126 | 3056 |
| 3127 GraphEntryInstr* entry = block_order_[0]->AsGraphEntry(); | 3057 GraphEntryInstr* entry = block_order_[0]->AsGraphEntry(); |
| 3128 ASSERT(entry != NULL); | 3058 ASSERT(entry != NULL); |
| 3129 intptr_t double_spill_slot_count = spill_slots_.length() * kDoubleSpillFactor; | 3059 intptr_t double_spill_slot_count = spill_slots_.length() * kDoubleSpillFactor; |
| 3130 entry->set_spill_slot_count(cpu_spill_slot_count_ + double_spill_slot_count); | 3060 entry->set_spill_slot_count(cpu_spill_slot_count_ + double_spill_slot_count); |
| 3131 | 3061 |
| 3132 #if defined(TARGET_ARCH_DBC) | 3062 #if defined(TARGET_ARCH_DBC) |
| 3133 // Spilling is unsupported on DBC. | 3063 // Spilling is unsupported on DBC. |
| 3134 if (entry->spill_slot_count() != 0) { | 3064 if (entry->spill_slot_count() != 0) { |
| 3135 UNREACHABLE(); | 3065 UNREACHABLE(); |
| 3136 } | 3066 } |
| 3137 | 3067 |
| 3138 // We store number of used DBC registers in the spill slot count to avoid | 3068 // We store number of used DBC registers in the spill slot count to avoid |
| 3139 // introducing a separate field. It has roughly the same meaning: | 3069 // introducing a separate field. It has roughly the same meaning: |
| 3140 // number of used registers determines how big of a frame to reserve for | 3070 // number of used registers determines how big of a frame to reserve for |
| 3141 // this function on DBC stack. | 3071 // this function on DBC stack. |
| 3142 entry->set_spill_slot_count(Utils::Maximum((last_used_cpu_register + 1) + | 3072 entry->set_spill_slot_count(Utils::Maximum( |
| 3143 (last_used_fpu_register + 1), | 3073 (last_used_cpu_register + 1) + (last_used_fpu_register + 1), |
| 3144 flow_graph_.num_copied_params())); | 3074 flow_graph_.num_copied_params())); |
| 3145 #endif | 3075 #endif |
| 3146 | 3076 |
| 3147 if (FLAG_print_ssa_liveranges) { | 3077 if (FLAG_print_ssa_liveranges) { |
| 3148 const Function& function = flow_graph_.function(); | 3078 const Function& function = flow_graph_.function(); |
| 3149 | 3079 |
| 3150 THR_Print("-- [after ssa allocator] ranges [%s] ---------\n", | 3080 THR_Print("-- [after ssa allocator] ranges [%s] ---------\n", |
| 3151 function.ToFullyQualifiedCString()); | 3081 function.ToFullyQualifiedCString()); |
| 3152 PrintLiveRanges(); | 3082 PrintLiveRanges(); |
| 3153 THR_Print("----------------------------------------------\n"); | 3083 THR_Print("----------------------------------------------\n"); |
| 3154 | 3084 |
| 3155 THR_Print("-- [after ssa allocator] ir [%s] -------------\n", | 3085 THR_Print("-- [after ssa allocator] ir [%s] -------------\n", |
| 3156 function.ToFullyQualifiedCString()); | 3086 function.ToFullyQualifiedCString()); |
| 3157 if (FLAG_support_il_printer) { | 3087 if (FLAG_support_il_printer) { |
| 3158 #ifndef PRODUCT | 3088 #ifndef PRODUCT |
| 3159 FlowGraphPrinter printer(flow_graph_, true); | 3089 FlowGraphPrinter printer(flow_graph_, true); |
| 3160 printer.PrintBlocks(); | 3090 printer.PrintBlocks(); |
| 3161 #endif | 3091 #endif |
| 3162 } | 3092 } |
| 3163 THR_Print("----------------------------------------------\n"); | 3093 THR_Print("----------------------------------------------\n"); |
| 3164 } | 3094 } |
| 3165 } | 3095 } |
| 3166 | 3096 |
| 3167 | 3097 |
| 3168 } // namespace dart | 3098 } // namespace dart |
| OLD | NEW |