| OLD | NEW |
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/flow_graph.h" | 5 #include "vm/flow_graph.h" |
| 6 | 6 |
| 7 #include "vm/bit_vector.h" | 7 #include "vm/bit_vector.h" |
| 8 #include "vm/flow_graph_builder.h" | 8 #include "vm/flow_graph_builder.h" |
| 9 #include "vm/intermediate_language.h" | 9 #include "vm/intermediate_language.h" |
| 10 #include "vm/growable_array.h" | 10 #include "vm/growable_array.h" |
| 11 #include "vm/object_store.h" |
| 11 #include "vm/report.h" | 12 #include "vm/report.h" |
| 12 | 13 |
| 13 namespace dart { | 14 namespace dart { |
| 14 | 15 |
| 15 DECLARE_FLAG(bool, reorder_basic_blocks); | 16 DECLARE_FLAG(bool, reorder_basic_blocks); |
| 16 DECLARE_FLAG(bool, trace_optimization); | 17 DECLARE_FLAG(bool, trace_optimization); |
| 17 DECLARE_FLAG(bool, verify_compiler); | 18 DECLARE_FLAG(bool, verify_compiler); |
| 18 | 19 |
| 19 | 20 |
| 20 FlowGraph::FlowGraph(const FlowGraphBuilder& builder, | 21 FlowGraph::FlowGraph(const FlowGraphBuilder& builder, |
| 21 GraphEntryInstr* graph_entry, | 22 GraphEntryInstr* graph_entry, |
| 22 intptr_t max_block_id) | 23 intptr_t max_block_id) |
| 23 : isolate_(Isolate::Current()), | 24 : isolate_(Isolate::Current()), |
| 24 parent_(), | 25 parent_(), |
| 25 current_ssa_temp_index_(0), | 26 current_ssa_temp_index_(0), |
| 26 max_block_id_(max_block_id), | 27 max_block_id_(max_block_id), |
| 27 builder_(builder), | 28 builder_(builder), |
| 28 parsed_function_(*builder.parsed_function()), | 29 parsed_function_(*builder.parsed_function()), |
| 29 num_copied_params_(builder.num_copied_params()), | 30 num_copied_params_(builder.num_copied_params()), |
| 30 num_non_copied_params_(builder.num_non_copied_params()), | 31 num_non_copied_params_(builder.num_non_copied_params()), |
| 31 num_stack_locals_(builder.num_stack_locals()), | 32 num_stack_locals_(builder.num_stack_locals()), |
| 32 graph_entry_(graph_entry), | 33 graph_entry_(graph_entry), |
| 33 preorder_(), | 34 preorder_(), |
| 34 postorder_(), | 35 postorder_(), |
| 35 reverse_postorder_(), | 36 reverse_postorder_(), |
| 36 optimized_block_order_(), | 37 optimized_block_order_(), |
| 37 constant_null_(NULL), | 38 constant_null_(NULL), |
| 38 constant_dead_(NULL), | 39 constant_dead_(NULL), |
| 40 constant_empty_context_(NULL), |
| 39 block_effects_(NULL), | 41 block_effects_(NULL), |
| 40 licm_allowed_(true), | 42 licm_allowed_(true), |
| 41 loop_headers_(NULL), | 43 loop_headers_(NULL), |
| 42 loop_invariant_loads_(NULL), | 44 loop_invariant_loads_(NULL), |
| 43 guarded_fields_(builder.guarded_fields()), | 45 guarded_fields_(builder.guarded_fields()), |
| 44 deferred_prefixes_(builder.deferred_prefixes()), | 46 deferred_prefixes_(builder.deferred_prefixes()), |
| 45 captured_parameters_( | 47 captured_parameters_( |
| 46 new(isolate_) BitVector(isolate_, variable_count())) { | 48 new(isolate_) BitVector(isolate_, variable_count())) { |
| 47 DiscoverBlocks(); | 49 DiscoverBlocks(); |
| 48 } | 50 } |
| (...skipping 409 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 458 } | 460 } |
| 459 assigned_vars_.Add(kill); | 461 assigned_vars_.Add(kill); |
| 460 } | 462 } |
| 461 | 463 |
| 462 return assigned_vars_; | 464 return assigned_vars_; |
| 463 } | 465 } |
| 464 | 466 |
| 465 // Returns true if the value set by the given store reaches any load from the | 467 // Returns true if the value set by the given store reaches any load from the |
| 466 // same local variable. | 468 // same local variable. |
| 467 bool IsStoreAlive(BlockEntryInstr* block, StoreLocalInstr* store) { | 469 bool IsStoreAlive(BlockEntryInstr* block, StoreLocalInstr* store) { |
| 470 if (store->local().Equals(*flow_graph_->CurrentContextVar())) { |
| 471 return true; |
| 472 } |
| 473 |
| 468 if (store->is_dead()) { | 474 if (store->is_dead()) { |
| 469 return false; | 475 return false; |
| 470 } | 476 } |
| 471 | |
| 472 if (store->is_last()) { | 477 if (store->is_last()) { |
| 473 const intptr_t index = store->local().BitIndexIn(num_non_copied_params_); | 478 const intptr_t index = store->local().BitIndexIn(num_non_copied_params_); |
| 474 return GetLiveOutSet(block)->Contains(index); | 479 return GetLiveOutSet(block)->Contains(index); |
| 475 } | 480 } |
| 476 | 481 |
| 477 return true; | 482 return true; |
| 478 } | 483 } |
| 479 | 484 |
| 480 // Returns true if the given load is the last for the local and the value | 485 // Returns true if the given load is the last for the local and the value |
| 481 // of the local will not flow into another one. | 486 // of the local will not flow into another one. |
| 482 bool IsLastLoad(BlockEntryInstr* block, LoadLocalInstr* load) { | 487 bool IsLastLoad(BlockEntryInstr* block, LoadLocalInstr* load) { |
| 488 if (load->local().Equals(*flow_graph_->CurrentContextVar())) { |
| 489 return false; |
| 490 } |
| 483 const intptr_t index = load->local().BitIndexIn(num_non_copied_params_); | 491 const intptr_t index = load->local().BitIndexIn(num_non_copied_params_); |
| 484 return load->is_last() && !GetLiveOutSet(block)->Contains(index); | 492 return load->is_last() && !GetLiveOutSet(block)->Contains(index); |
| 485 } | 493 } |
| 486 | 494 |
| 487 private: | 495 private: |
| 488 virtual void ComputeInitialSets(); | 496 virtual void ComputeInitialSets(); |
| 489 | 497 |
| 490 const FlowGraph* flow_graph_; | 498 const FlowGraph* flow_graph_; |
| 491 const intptr_t num_non_copied_params_; | 499 const intptr_t num_non_copied_params_; |
| 492 GrowableArray<BitVector*> assigned_vars_; | 500 GrowableArray<BitVector*> assigned_vars_; |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 558 intptr_t next_virtual_register_number, | 566 intptr_t next_virtual_register_number, |
| 559 ZoneGrowableArray<Definition*>* inlining_parameters) { | 567 ZoneGrowableArray<Definition*>* inlining_parameters) { |
| 560 ASSERT((next_virtual_register_number == 0) || (inlining_parameters != NULL)); | 568 ASSERT((next_virtual_register_number == 0) || (inlining_parameters != NULL)); |
| 561 current_ssa_temp_index_ = next_virtual_register_number; | 569 current_ssa_temp_index_ = next_virtual_register_number; |
| 562 GrowableArray<BitVector*> dominance_frontier; | 570 GrowableArray<BitVector*> dominance_frontier; |
| 563 ComputeDominators(&dominance_frontier); | 571 ComputeDominators(&dominance_frontier); |
| 564 | 572 |
| 565 VariableLivenessAnalysis variable_liveness(this); | 573 VariableLivenessAnalysis variable_liveness(this); |
| 566 variable_liveness.Analyze(); | 574 variable_liveness.Analyze(); |
| 567 | 575 |
| 576 GrowableArray<PhiInstr*> live_phis; |
| 577 |
| 568 InsertPhis(preorder_, | 578 InsertPhis(preorder_, |
| 569 variable_liveness.ComputeAssignedVars(), | 579 variable_liveness.ComputeAssignedVars(), |
| 570 dominance_frontier); | 580 dominance_frontier, |
| 581 &live_phis); |
| 571 | 582 |
| 572 GrowableArray<PhiInstr*> live_phis; | |
| 573 | 583 |
| 574 // Rename uses to reference inserted phis where appropriate. | 584 // Rename uses to reference inserted phis where appropriate. |
| 575 // Collect phis that reach a non-environment use. | 585 // Collect phis that reach a non-environment use. |
| 576 Rename(&live_phis, &variable_liveness, inlining_parameters); | 586 Rename(&live_phis, &variable_liveness, inlining_parameters); |
| 577 | 587 |
| 578 // Propagate alive mark transitively from alive phis and then remove | 588 // Propagate alive mark transitively from alive phis and then remove |
| 579 // non-live ones. | 589 // non-live ones. |
| 580 RemoveDeadPhis(&live_phis); | 590 RemoveDeadPhis(&live_phis); |
| 581 } | 591 } |
| 582 | 592 |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 695 (*label)[current_index] = | 705 (*label)[current_index] = |
| 696 Utils::Minimum((*label)[current_index], (*label)[next_index]); | 706 Utils::Minimum((*label)[current_index], (*label)[next_index]); |
| 697 (*parent)[current_index] = (*parent)[next_index]; | 707 (*parent)[current_index] = (*parent)[next_index]; |
| 698 } | 708 } |
| 699 } | 709 } |
| 700 | 710 |
| 701 | 711 |
| 702 void FlowGraph::InsertPhis( | 712 void FlowGraph::InsertPhis( |
| 703 const GrowableArray<BlockEntryInstr*>& preorder, | 713 const GrowableArray<BlockEntryInstr*>& preorder, |
| 704 const GrowableArray<BitVector*>& assigned_vars, | 714 const GrowableArray<BitVector*>& assigned_vars, |
| 705 const GrowableArray<BitVector*>& dom_frontier) { | 715 const GrowableArray<BitVector*>& dom_frontier, |
| 716 GrowableArray<PhiInstr*>* live_phis) { |
| 706 const intptr_t block_count = preorder.length(); | 717 const intptr_t block_count = preorder.length(); |
| 707 // Map preorder block number to the highest variable index that has a phi | 718 // Map preorder block number to the highest variable index that has a phi |
| 708 // in that block. Use it to avoid inserting multiple phis for the same | 719 // in that block. Use it to avoid inserting multiple phis for the same |
| 709 // variable. | 720 // variable. |
| 710 GrowableArray<intptr_t> has_already(block_count); | 721 GrowableArray<intptr_t> has_already(block_count); |
| 711 // Map preorder block number to the highest variable index for which the | 722 // Map preorder block number to the highest variable index for which the |
| 712 // block went on the worklist. Use it to avoid adding the same block to | 723 // block went on the worklist. Use it to avoid adding the same block to |
| 713 // the worklist more than once for the same variable. | 724 // the worklist more than once for the same variable. |
| 714 GrowableArray<intptr_t> work(block_count); | 725 GrowableArray<intptr_t> work(block_count); |
| 715 | 726 |
| 716 // Initialize has_already and work. | 727 // Initialize has_already and work. |
| 717 for (intptr_t block_index = 0; block_index < block_count; ++block_index) { | 728 for (intptr_t block_index = 0; block_index < block_count; ++block_index) { |
| 718 has_already.Add(-1); | 729 has_already.Add(-1); |
| 719 work.Add(-1); | 730 work.Add(-1); |
| 720 } | 731 } |
| 721 | 732 |
| 722 // Insert phis for each variable in turn. | 733 // Insert phis for each variable in turn. |
| 723 GrowableArray<BlockEntryInstr*> worklist; | 734 GrowableArray<BlockEntryInstr*> worklist; |
| 724 for (intptr_t var_index = 0; var_index < variable_count(); ++var_index) { | 735 for (intptr_t var_index = 0; var_index < variable_count(); ++var_index) { |
| 736 const bool always_live = var_index == CurrentContextEnvIndex(); |
| 725 // Add to the worklist each block containing an assignment. | 737 // Add to the worklist each block containing an assignment. |
| 726 for (intptr_t block_index = 0; block_index < block_count; ++block_index) { | 738 for (intptr_t block_index = 0; block_index < block_count; ++block_index) { |
| 727 if (assigned_vars[block_index]->Contains(var_index)) { | 739 if (assigned_vars[block_index]->Contains(var_index)) { |
| 728 work[block_index] = var_index; | 740 work[block_index] = var_index; |
| 729 worklist.Add(preorder[block_index]); | 741 worklist.Add(preorder[block_index]); |
| 730 } | 742 } |
| 731 } | 743 } |
| 732 | 744 |
| 733 while (!worklist.is_empty()) { | 745 while (!worklist.is_empty()) { |
| 734 BlockEntryInstr* current = worklist.RemoveLast(); | 746 BlockEntryInstr* current = worklist.RemoveLast(); |
| 735 // Ensure a phi for each block in the dominance frontier of current. | 747 // Ensure a phi for each block in the dominance frontier of current. |
| 736 for (BitVector::Iterator it(dom_frontier[current->preorder_number()]); | 748 for (BitVector::Iterator it(dom_frontier[current->preorder_number()]); |
| 737 !it.Done(); | 749 !it.Done(); |
| 738 it.Advance()) { | 750 it.Advance()) { |
| 739 int index = it.Current(); | 751 int index = it.Current(); |
| 740 if (has_already[index] < var_index) { | 752 if (has_already[index] < var_index) { |
| 741 BlockEntryInstr* block = preorder[index]; | 753 BlockEntryInstr* block = preorder[index]; |
| 742 ASSERT(block->IsJoinEntry()); | 754 ASSERT(block->IsJoinEntry()); |
| 743 block->AsJoinEntry()->InsertPhi(var_index, variable_count()); | 755 PhiInstr* phi = block->AsJoinEntry()->InsertPhi(var_index, |
| 756 variable_count()); |
| 757 if (always_live) { |
| 758 phi->mark_alive(); |
| 759 live_phis->Add(phi); |
| 760 } |
| 744 has_already[index] = var_index; | 761 has_already[index] = var_index; |
| 745 if (work[index] < var_index) { | 762 if (work[index] < var_index) { |
| 746 work[index] = var_index; | 763 work[index] = var_index; |
| 747 worklist.Add(block); | 764 worklist.Add(block); |
| 748 } | 765 } |
| 749 } | 766 } |
| 750 } | 767 } |
| 751 } | 768 } |
| 752 } | 769 } |
| 753 } | 770 } |
| 754 | 771 |
| 755 | 772 |
| 756 void FlowGraph::Rename(GrowableArray<PhiInstr*>* live_phis, | 773 void FlowGraph::Rename(GrowableArray<PhiInstr*>* live_phis, |
| 757 VariableLivenessAnalysis* variable_liveness, | 774 VariableLivenessAnalysis* variable_liveness, |
| 758 ZoneGrowableArray<Definition*>* inlining_parameters) { | 775 ZoneGrowableArray<Definition*>* inlining_parameters) { |
| 759 GraphEntryInstr* entry = graph_entry(); | 776 GraphEntryInstr* entry = graph_entry(); |
| 760 | 777 |
| 761 // Initial renaming environment. | 778 // Initial renaming environment. |
| 762 GrowableArray<Definition*> env(variable_count()); | 779 GrowableArray<Definition*> env(variable_count()); |
| 763 | 780 |
| 764 // Add global constants to the initial definitions. | 781 // Add global constants to the initial definitions. |
| 765 constant_null_ = GetConstant(Object::ZoneHandle()); | 782 constant_null_ = GetConstant(Object::ZoneHandle()); |
| 766 constant_dead_ = GetConstant(Symbols::OptimizedOut()); | 783 constant_dead_ = GetConstant(Symbols::OptimizedOut()); |
| 784 constant_empty_context_ = GetConstant(Context::Handle( |
| 785 isolate()->object_store()->empty_context())); |
| 767 | 786 |
| 768 // Add parameters to the initial definitions and renaming environment. | 787 // Add parameters to the initial definitions and renaming environment. |
| 769 if (inlining_parameters != NULL) { | 788 if (inlining_parameters != NULL) { |
| 770 // Use known parameters. | 789 // Use known parameters. |
| 771 ASSERT(parameter_count() == inlining_parameters->length()); | 790 ASSERT(parameter_count() == inlining_parameters->length()); |
| 772 for (intptr_t i = 0; i < parameter_count(); ++i) { | 791 for (intptr_t i = 0; i < parameter_count(); ++i) { |
| 773 Definition* defn = (*inlining_parameters)[i]; | 792 Definition* defn = (*inlining_parameters)[i]; |
| 774 AllocateSSAIndexes(defn); | 793 AllocateSSAIndexes(defn); |
| 775 AddToInitialDefinitions(defn); | 794 AddToInitialDefinitions(defn); |
| 776 env.Add(defn); | 795 env.Add(defn); |
| 777 } | 796 } |
| 778 } else { | 797 } else { |
| 779 // Create new parameters. For functions compiled for OSR, the locals | 798 // Create new parameters. For functions compiled for OSR, the locals |
| 780 // are unknown and so treated like parameters. | 799 // are unknown and so treated like parameters. |
| 781 intptr_t count = IsCompiledForOsr() ? variable_count() : parameter_count(); | 800 intptr_t count = IsCompiledForOsr() ? variable_count() : parameter_count(); |
| 782 for (intptr_t i = 0; i < count; ++i) { | 801 for (intptr_t i = 0; i < count; ++i) { |
| 783 ParameterInstr* param = new(isolate()) ParameterInstr(i, entry); | 802 ParameterInstr* param = new(isolate()) ParameterInstr(i, entry); |
| 784 param->set_ssa_temp_index(alloc_ssa_temp_index()); // New SSA temp. | 803 param->set_ssa_temp_index(alloc_ssa_temp_index()); // New SSA temp. |
| 785 AddToInitialDefinitions(param); | 804 AddToInitialDefinitions(param); |
| 786 env.Add(param); | 805 env.Add(param); |
| 787 } | 806 } |
| 788 } | 807 } |
| 789 | 808 |
| 790 // Initialize all locals with #null in the renaming environment. For OSR, | 809 // Initialize all locals in the renaming environment For OSR, the locals have |
| 791 // the locals have already been handled as parameters. | 810 // already been handled as parameters. |
| 792 if (!IsCompiledForOsr()) { | 811 if (!IsCompiledForOsr()) { |
| 793 for (intptr_t i = parameter_count(); i < variable_count(); ++i) { | 812 for (intptr_t i = parameter_count(); i < variable_count(); ++i) { |
| 794 env.Add(constant_null()); | 813 if (i == CurrentContextEnvIndex()) { |
| 814 if (parsed_function().function().IsClosureFunction()) { |
| 815 CurrentContextInstr* context = new CurrentContextInstr(); |
| 816 context->set_ssa_temp_index(alloc_ssa_temp_index()); // New SSA temp. |
| 817 AddToInitialDefinitions(context); |
| 818 env.Add(context); |
| 819 } else { |
| 820 env.Add(constant_empty_context()); |
| 821 } |
| 822 } else { |
| 823 env.Add(constant_null()); |
| 824 } |
| 795 } | 825 } |
| 796 } | 826 } |
| 797 | 827 |
| 798 if (entry->SuccessorCount() > 1) { | 828 if (entry->SuccessorCount() > 1) { |
| 799 // Functions with try-catch have a fixed area of stack slots reserved | 829 // Functions with try-catch have a fixed area of stack slots reserved |
| 800 // so that all local variables are stored at a known location when | 830 // so that all local variables are stored at a known location when |
| 801 // on entry to the catch. | 831 // on entry to the catch. |
| 802 entry->set_fixed_slot_count(num_stack_locals() + num_copied_params()); | 832 entry->set_fixed_slot_count(num_stack_locals() + num_copied_params()); |
| 803 } | 833 } |
| 804 RenameRecursive(entry, &env, live_phis, variable_liveness); | 834 RenameRecursive(entry, &env, live_phis, variable_liveness); |
| 805 } | 835 } |
| 806 | 836 |
| 807 | 837 |
| 808 void FlowGraph::AttachEnvironment(Instruction* instr, | 838 void FlowGraph::AttachEnvironment(Instruction* instr, |
| 809 GrowableArray<Definition*>* env) { | 839 GrowableArray<Definition*>* env) { |
| 810 Environment* deopt_env = | 840 Environment* deopt_env = |
| 811 Environment::From(isolate(), | 841 Environment::From(isolate(), |
| 812 *env, | 842 *env, |
| 813 num_non_copied_params_, | 843 num_non_copied_params_, |
| 814 &parsed_function_); | 844 &parsed_function_); |
| 815 // TODO(fschneider): Add predicates CanEagerlyDeoptimize and | |
| 816 // CanLazilyDeoptimize to instructions to generally deal with instructions | |
| 817 // that have pushed arguments and input operands. | |
| 818 // Right now, closure calls are the only instructions that have both. They | |
| 819 // also don't have an eager deoptimziation point, so the environment attached | |
| 820 // here is only used for after the call. | |
| 821 if (instr->IsClosureCall()) { | 845 if (instr->IsClosureCall()) { |
| 822 deopt_env = deopt_env->DeepCopy(isolate(), | 846 deopt_env = deopt_env->DeepCopy(isolate(), |
| 823 deopt_env->Length() - instr->InputCount()); | 847 deopt_env->Length() - instr->InputCount()); |
| 824 } | 848 } |
| 825 instr->SetEnvironment(deopt_env); | 849 instr->SetEnvironment(deopt_env); |
| 826 for (Environment::DeepIterator it(deopt_env); !it.Done(); it.Advance()) { | 850 for (Environment::DeepIterator it(deopt_env); !it.Done(); it.Advance()) { |
| 827 Value* use = it.CurrentValue(); | 851 Value* use = it.CurrentValue(); |
| 828 use->definition()->AddEnvUse(use); | 852 use->definition()->AddEnvUse(use); |
| 829 } | 853 } |
| 830 if (instr->CanDeoptimize()) { | 854 if (instr->CanDeoptimize()) { |
| 831 instr->env()->set_deopt_id(instr->deopt_id()); | 855 instr->env()->set_deopt_id(instr->deopt_id()); |
| 832 } | 856 } |
| 833 } | 857 } |
| 834 | 858 |
| 835 | 859 |
| 836 void FlowGraph::RenameRecursive(BlockEntryInstr* block_entry, | 860 void FlowGraph::RenameRecursive(BlockEntryInstr* block_entry, |
| 837 GrowableArray<Definition*>* env, | 861 GrowableArray<Definition*>* env, |
| 838 GrowableArray<PhiInstr*>* live_phis, | 862 GrowableArray<PhiInstr*>* live_phis, |
| 839 VariableLivenessAnalysis* variable_liveness) { | 863 VariableLivenessAnalysis* variable_liveness) { |
| 840 // 1. Process phis first. | 864 // 1. Process phis first. |
| 841 if (block_entry->IsJoinEntry()) { | 865 if (block_entry->IsJoinEntry()) { |
| 842 JoinEntryInstr* join = block_entry->AsJoinEntry(); | 866 JoinEntryInstr* join = block_entry->AsJoinEntry(); |
| 843 if (join->phis() != NULL) { | 867 if (join->phis() != NULL) { |
| 844 for (intptr_t i = 0; i < join->phis()->length(); ++i) { | 868 for (intptr_t i = 0; i < join->phis()->length(); ++i) { |
| 845 PhiInstr* phi = (*join->phis())[i]; | 869 PhiInstr* phi = (*join->phis())[i]; |
| 846 if (phi != NULL) { | 870 if (phi != NULL) { |
| 847 (*env)[i] = phi; | 871 (*env)[i] = phi; |
| 848 phi->set_ssa_temp_index(alloc_ssa_temp_index()); // New SSA temp. | 872 phi->set_ssa_temp_index(alloc_ssa_temp_index()); // New SSA temp. |
| 849 if (block_entry->InsideTryBlock()) { | 873 if (block_entry->InsideTryBlock() && !phi->is_alive()) { |
| 850 // This is a safe approximation. Inside try{} all locals are | 874 // This is a safe approximation. Inside try{} all locals are |
| 851 // used at every call implicitly, so we mark all phis as live | 875 // used at every call implicitly, so we mark all phis as live |
| 852 // from the start. | 876 // from the start. |
| 853 // TODO(fschneider): Improve this approximation to eliminate | 877 // TODO(fschneider): Improve this approximation to eliminate |
| 854 // more redundant phis. | 878 // more redundant phis. |
| 855 phi->mark_alive(); | 879 phi->mark_alive(); |
| 856 live_phis->Add(phi); | 880 live_phis->Add(phi); |
| 857 } | 881 } |
| 858 } | 882 } |
| 859 } | 883 } |
| 860 } | 884 } |
| 861 } else if (block_entry->IsCatchBlockEntry()) { | 885 } else if (block_entry->IsCatchBlockEntry()) { |
| 862 // Add real definitions for all locals and parameters. | 886 // Add real definitions for all locals and parameters. |
| 863 for (intptr_t i = 0; i < env->length(); ++i) { | 887 for (intptr_t i = 0; i < env->length(); ++i) { |
| 864 ParameterInstr* param = new(isolate()) ParameterInstr(i, block_entry); | 888 ParameterInstr* param = new(isolate()) ParameterInstr(i, block_entry); |
| 865 param->set_ssa_temp_index(alloc_ssa_temp_index()); // New SSA temp. | 889 param->set_ssa_temp_index(alloc_ssa_temp_index()); // New SSA temp. |
| 866 (*env)[i] = param; | 890 (*env)[i] = param; |
| 867 block_entry->AsCatchBlockEntry()->initial_definitions()->Add(param); | 891 block_entry->AsCatchBlockEntry()->initial_definitions()->Add(param); |
| 868 } | 892 } |
| 869 } | 893 } |
| 870 | 894 |
| 871 // Prune non-live variables at block entry by replacing their environment | 895 // Prune non-live variables at block entry by replacing their environment |
| 872 // slots with null. | 896 // slots with null. |
| 873 BitVector* live_in = variable_liveness->GetLiveInSet(block_entry); | 897 BitVector* live_in = variable_liveness->GetLiveInSet(block_entry); |
| 874 for (intptr_t i = 0; i < variable_count(); i++) { | 898 for (intptr_t i = 0; i < variable_count(); i++) { |
| 875 if (!live_in->Contains(i)) { | 899 // TODO(fschneider): Make sure that live_in always contains the |
| 900 // CurrentContext variable to avoid the special case here. |
| 901 if (!live_in->Contains(i) && (i != CurrentContextEnvIndex())) { |
| 876 (*env)[i] = constant_dead(); | 902 (*env)[i] = constant_dead(); |
| 877 } | 903 } |
| 878 } | 904 } |
| 879 | 905 |
| 880 // Attach environment to the block entry. | 906 // Attach environment to the block entry. |
| 881 AttachEnvironment(block_entry, env); | 907 AttachEnvironment(block_entry, env); |
| 882 | 908 |
| 883 // 2. Process normal instructions. | 909 // 2. Process normal instructions. |
| 884 for (ForwardInstructionIterator it(block_entry); !it.Done(); it.Advance()) { | 910 for (ForwardInstructionIterator it(block_entry); !it.Done(); it.Advance()) { |
| 885 Instruction* current = it.Current(); | 911 Instruction* current = it.Current(); |
| (...skipping 422 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1308 } | 1334 } |
| 1309 | 1335 |
| 1310 | 1336 |
| 1311 bool BlockEffects::IsSideEffectFreePath(BlockEntryInstr* from, | 1337 bool BlockEffects::IsSideEffectFreePath(BlockEntryInstr* from, |
| 1312 BlockEntryInstr* to) const { | 1338 BlockEntryInstr* to) const { |
| 1313 return available_at_[to->postorder_number()]->Contains( | 1339 return available_at_[to->postorder_number()]->Contains( |
| 1314 from->postorder_number()); | 1340 from->postorder_number()); |
| 1315 } | 1341 } |
| 1316 | 1342 |
| 1317 } // namespace dart | 1343 } // namespace dart |
| OLD | NEW |