OLD | NEW |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/flow_graph.h" | 5 #include "vm/flow_graph.h" |
6 | 6 |
7 #include "vm/bit_vector.h" | 7 #include "vm/bit_vector.h" |
8 #include "vm/flow_graph_builder.h" | 8 #include "vm/flow_graph_builder.h" |
9 #include "vm/intermediate_language.h" | 9 #include "vm/intermediate_language.h" |
10 #include "vm/growable_array.h" | 10 #include "vm/growable_array.h" |
11 #include "vm/object_store.h" | 11 #include "vm/object_store.h" |
12 #include "vm/report.h" | 12 #include "vm/report.h" |
13 | 13 |
14 namespace dart { | 14 namespace dart { |
15 | 15 |
16 DEFINE_FLAG(bool, prune_dead_locals, true, "optimize dead locals away"); | 16 DEFINE_FLAG(bool, prune_dead_locals, true, "optimize dead locals away"); |
17 DECLARE_FLAG(bool, reorder_basic_blocks); | 17 DECLARE_FLAG(bool, reorder_basic_blocks); |
18 DECLARE_FLAG(bool, trace_optimization); | 18 DECLARE_FLAG(bool, trace_optimization); |
19 DECLARE_FLAG(bool, verify_compiler); | 19 DECLARE_FLAG(bool, verify_compiler); |
20 | 20 |
21 | 21 |
22 FlowGraph::FlowGraph(ParsedFunction* parsed_function, | 22 FlowGraph::FlowGraph(ParsedFunction* parsed_function, |
23 GraphEntryInstr* graph_entry, | 23 GraphEntryInstr* graph_entry, |
24 intptr_t max_block_id) | 24 intptr_t max_block_id) |
25 : isolate_(Isolate::Current()), | 25 : thread_(Thread::Current()), |
26 parent_(), | 26 parent_(), |
27 current_ssa_temp_index_(0), | 27 current_ssa_temp_index_(0), |
28 max_block_id_(max_block_id), | 28 max_block_id_(max_block_id), |
29 parsed_function_(parsed_function), | 29 parsed_function_(parsed_function), |
30 num_copied_params_(parsed_function->num_copied_params()), | 30 num_copied_params_(parsed_function->num_copied_params()), |
31 num_non_copied_params_(parsed_function->num_non_copied_params()), | 31 num_non_copied_params_(parsed_function->num_non_copied_params()), |
32 graph_entry_(graph_entry), | 32 graph_entry_(graph_entry), |
33 preorder_(), | 33 preorder_(), |
34 postorder_(), | 34 postorder_(), |
35 reverse_postorder_(), | 35 reverse_postorder_(), |
36 optimized_block_order_(), | 36 optimized_block_order_(), |
37 constant_null_(NULL), | 37 constant_null_(NULL), |
38 constant_dead_(NULL), | 38 constant_dead_(NULL), |
39 constant_empty_context_(NULL), | 39 constant_empty_context_(NULL), |
40 block_effects_(NULL), | 40 block_effects_(NULL), |
41 licm_allowed_(true), | 41 licm_allowed_(true), |
42 loop_headers_(NULL), | 42 loop_headers_(NULL), |
43 loop_invariant_loads_(NULL), | 43 loop_invariant_loads_(NULL), |
44 guarded_fields_(parsed_function->guarded_fields()), | 44 guarded_fields_(parsed_function->guarded_fields()), |
45 deferred_prefixes_(parsed_function->deferred_prefixes()), | 45 deferred_prefixes_(parsed_function->deferred_prefixes()), |
46 captured_parameters_( | 46 captured_parameters_(new(zone()) BitVector(zone(), variable_count())) { |
47 new(isolate_) BitVector(isolate_, variable_count())) { | |
48 DiscoverBlocks(); | 47 DiscoverBlocks(); |
49 } | 48 } |
50 | 49 |
51 | 50 |
52 void FlowGraph::AddToGuardedFields( | 51 void FlowGraph::AddToGuardedFields( |
53 ZoneGrowableArray<const Field*>* array, | 52 ZoneGrowableArray<const Field*>* array, |
54 const Field* field) { | 53 const Field* field) { |
55 if ((field->guarded_cid() == kDynamicCid) || | 54 if ((field->guarded_cid() == kDynamicCid) || |
56 (field->guarded_cid() == kIllegalCid)) { | 55 (field->guarded_cid() == kIllegalCid)) { |
57 return; | 56 return; |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
91 return ShouldReorderBlocks(parsed_function()->function(), is_optimized) | 90 return ShouldReorderBlocks(parsed_function()->function(), is_optimized) |
92 ? &optimized_block_order_ | 91 ? &optimized_block_order_ |
93 : &reverse_postorder_; | 92 : &reverse_postorder_; |
94 } | 93 } |
95 | 94 |
96 | 95 |
97 ConstantInstr* FlowGraph::GetConstant(const Object& object) { | 96 ConstantInstr* FlowGraph::GetConstant(const Object& object) { |
98 ConstantInstr* constant = constant_instr_pool_.Lookup(object); | 97 ConstantInstr* constant = constant_instr_pool_.Lookup(object); |
99 if (constant == NULL) { | 98 if (constant == NULL) { |
100 // Otherwise, allocate and add it to the pool. | 99 // Otherwise, allocate and add it to the pool. |
101 constant = new(isolate()) ConstantInstr( | 100 constant = new(zone()) ConstantInstr( |
102 Object::ZoneHandle(isolate(), object.raw())); | 101 Object::ZoneHandle(isolate(), object.raw())); |
103 constant->set_ssa_temp_index(alloc_ssa_temp_index()); | 102 constant->set_ssa_temp_index(alloc_ssa_temp_index()); |
104 | 103 |
105 AddToInitialDefinitions(constant); | 104 AddToInitialDefinitions(constant); |
106 constant_instr_pool_.Insert(constant); | 105 constant_instr_pool_.Insert(constant); |
107 } | 106 } |
108 return constant; | 107 return constant; |
109 } | 108 } |
110 | 109 |
111 | 110 |
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
217 | 216 |
218 // Block effects are using postorder numbering. Discard computed information. | 217 // Block effects are using postorder numbering. Discard computed information. |
219 block_effects_ = NULL; | 218 block_effects_ = NULL; |
220 loop_headers_ = NULL; | 219 loop_headers_ = NULL; |
221 loop_invariant_loads_ = NULL; | 220 loop_invariant_loads_ = NULL; |
222 } | 221 } |
223 | 222 |
224 | 223 |
225 void FlowGraph::MergeBlocks() { | 224 void FlowGraph::MergeBlocks() { |
226 bool changed = false; | 225 bool changed = false; |
227 BitVector* merged = new(isolate()) BitVector(isolate(), postorder().length()); | 226 BitVector* merged = new(zone()) BitVector(zone(), postorder().length()); |
228 for (BlockIterator block_it = reverse_postorder_iterator(); | 227 for (BlockIterator block_it = reverse_postorder_iterator(); |
229 !block_it.Done(); | 228 !block_it.Done(); |
230 block_it.Advance()) { | 229 block_it.Advance()) { |
231 BlockEntryInstr* block = block_it.Current(); | 230 BlockEntryInstr* block = block_it.Current(); |
232 if (block->IsGraphEntry()) continue; | 231 if (block->IsGraphEntry()) continue; |
233 if (merged->Contains(block->postorder_number())) continue; | 232 if (merged->Contains(block->postorder_number())) continue; |
234 | 233 |
235 Instruction* last = block->last_instruction(); | 234 Instruction* last = block->last_instruction(); |
236 BlockEntryInstr* successor = NULL; | 235 BlockEntryInstr* successor = NULL; |
237 while ((!last->IsIndirectGoto()) && | 236 while ((!last->IsIndirectGoto()) && |
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
365 VerifyUseListsInInstruction(it.Current()); | 364 VerifyUseListsInInstruction(it.Current()); |
366 } | 365 } |
367 } | 366 } |
368 return true; // Return true so we can ASSERT validation. | 367 return true; // Return true so we can ASSERT validation. |
369 } | 368 } |
370 | 369 |
371 | 370 |
372 LivenessAnalysis::LivenessAnalysis( | 371 LivenessAnalysis::LivenessAnalysis( |
373 intptr_t variable_count, | 372 intptr_t variable_count, |
374 const GrowableArray<BlockEntryInstr*>& postorder) | 373 const GrowableArray<BlockEntryInstr*>& postorder) |
375 : isolate_(Isolate::Current()), | 374 : zone_(Thread::Current()->zone()), |
376 variable_count_(variable_count), | 375 variable_count_(variable_count), |
377 postorder_(postorder), | 376 postorder_(postorder), |
378 live_out_(postorder.length()), | 377 live_out_(postorder.length()), |
379 kill_(postorder.length()), | 378 kill_(postorder.length()), |
380 live_in_(postorder.length()) { | 379 live_in_(postorder.length()) { |
381 } | 380 } |
382 | 381 |
383 | 382 |
384 bool LivenessAnalysis::UpdateLiveOut(const BlockEntryInstr& block) { | 383 bool LivenessAnalysis::UpdateLiveOut(const BlockEntryInstr& block) { |
385 BitVector* live_out = live_out_[block.postorder_number()]; | 384 BitVector* live_out = live_out_[block.postorder_number()]; |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
422 changed = true; | 421 changed = true; |
423 } | 422 } |
424 } | 423 } |
425 } while (changed); | 424 } while (changed); |
426 } | 425 } |
427 | 426 |
428 | 427 |
429 void LivenessAnalysis::Analyze() { | 428 void LivenessAnalysis::Analyze() { |
430 const intptr_t block_count = postorder_.length(); | 429 const intptr_t block_count = postorder_.length(); |
431 for (intptr_t i = 0; i < block_count; i++) { | 430 for (intptr_t i = 0; i < block_count; i++) { |
432 live_out_.Add(new(isolate()) BitVector(isolate(), variable_count_)); | 431 live_out_.Add(new(zone()) BitVector(zone(), variable_count_)); |
433 kill_.Add(new(isolate()) BitVector(isolate(), variable_count_)); | 432 kill_.Add(new(zone()) BitVector(zone(), variable_count_)); |
434 live_in_.Add(new(isolate()) BitVector(isolate(), variable_count_)); | 433 live_in_.Add(new(zone()) BitVector(zone(), variable_count_)); |
435 } | 434 } |
436 | 435 |
437 ComputeInitialSets(); | 436 ComputeInitialSets(); |
438 ComputeLiveInAndLiveOutSets(); | 437 ComputeLiveInAndLiveOutSets(); |
439 } | 438 } |
440 | 439 |
441 | 440 |
442 static void PrintBitVector(const char* tag, BitVector* v) { | 441 static void PrintBitVector(const char* tag, BitVector* v) { |
443 OS::Print("%s:", tag); | 442 OS::Print("%s:", tag); |
444 for (BitVector::Iterator it(v); !it.Done(); it.Advance()) { | 443 for (BitVector::Iterator it(v); !it.Done(); it.Advance()) { |
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
541 | 540 |
542 const FlowGraph* flow_graph_; | 541 const FlowGraph* flow_graph_; |
543 const intptr_t num_non_copied_params_; | 542 const intptr_t num_non_copied_params_; |
544 GrowableArray<BitVector*> assigned_vars_; | 543 GrowableArray<BitVector*> assigned_vars_; |
545 }; | 544 }; |
546 | 545 |
547 | 546 |
548 void VariableLivenessAnalysis::ComputeInitialSets() { | 547 void VariableLivenessAnalysis::ComputeInitialSets() { |
549 const intptr_t block_count = postorder_.length(); | 548 const intptr_t block_count = postorder_.length(); |
550 | 549 |
551 BitVector* last_loads = new(isolate()) BitVector(isolate(), variable_count_); | 550 BitVector* last_loads = new(zone()) BitVector(zone(), variable_count_); |
552 for (intptr_t i = 0; i < block_count; i++) { | 551 for (intptr_t i = 0; i < block_count; i++) { |
553 BlockEntryInstr* block = postorder_[i]; | 552 BlockEntryInstr* block = postorder_[i]; |
554 | 553 |
555 BitVector* kill = kill_[i]; | 554 BitVector* kill = kill_[i]; |
556 BitVector* live_in = live_in_[i]; | 555 BitVector* live_in = live_in_[i]; |
557 last_loads->Clear(); | 556 last_loads->Clear(); |
558 | 557 |
559 // There is an implicit use (load-local) of every local variable at each | 558 // There is an implicit use (load-local) of every local variable at each |
560 // call inside a try{} block and every call has an implicit control-flow | 559 // call inside a try{} block and every call has an implicit control-flow |
561 // to the catch entry. As an approximation we mark all locals as live | 560 // to the catch entry. As an approximation we mark all locals as live |
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
667 // Use a link-eval data structure with path compression. Implement path | 666 // Use a link-eval data structure with path compression. Implement path |
668 // compression in place by mutating the parent array. Each block has a | 667 // compression in place by mutating the parent array. Each block has a |
669 // label, which is the minimum block number on the compressed path. | 668 // label, which is the minimum block number on the compressed path. |
670 | 669 |
671 // Initialize idom, semi, and label used by SEMI-NCA. Initialize the | 670 // Initialize idom, semi, and label used by SEMI-NCA. Initialize the |
672 // dominance frontier output array. | 671 // dominance frontier output array. |
673 for (intptr_t i = 0; i < size; ++i) { | 672 for (intptr_t i = 0; i < size; ++i) { |
674 idom.Add(parent_[i]); | 673 idom.Add(parent_[i]); |
675 semi.Add(i); | 674 semi.Add(i); |
676 label.Add(i); | 675 label.Add(i); |
677 dominance_frontier->Add(new(isolate()) BitVector(isolate(), size)); | 676 dominance_frontier->Add(new(zone()) BitVector(zone(), size)); |
678 } | 677 } |
679 | 678 |
680 // Loop over the blocks in reverse preorder (not including the graph | 679 // Loop over the blocks in reverse preorder (not including the graph |
681 // entry). Clear the dominated blocks in the graph entry in case | 680 // entry). Clear the dominated blocks in the graph entry in case |
682 // ComputeDominators is used to recompute them. | 681 // ComputeDominators is used to recompute them. |
683 preorder_[0]->ClearDominatedBlocks(); | 682 preorder_[0]->ClearDominatedBlocks(); |
684 for (intptr_t block_index = size - 1; block_index >= 1; --block_index) { | 683 for (intptr_t block_index = size - 1; block_index >= 1; --block_index) { |
685 // Loop over the predecessors. | 684 // Loop over the predecessors. |
686 BlockEntryInstr* block = preorder_[block_index]; | 685 BlockEntryInstr* block = preorder_[block_index]; |
687 // Clear the immediately dominated blocks in case ComputeDominators is | 686 // Clear the immediately dominated blocks in case ComputeDominators is |
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
837 Definition* defn = (*inlining_parameters)[i]; | 836 Definition* defn = (*inlining_parameters)[i]; |
838 AllocateSSAIndexes(defn); | 837 AllocateSSAIndexes(defn); |
839 AddToInitialDefinitions(defn); | 838 AddToInitialDefinitions(defn); |
840 env.Add(defn); | 839 env.Add(defn); |
841 } | 840 } |
842 } else { | 841 } else { |
843 // Create new parameters. For functions compiled for OSR, the locals | 842 // Create new parameters. For functions compiled for OSR, the locals |
844 // are unknown and so treated like parameters. | 843 // are unknown and so treated like parameters. |
845 intptr_t count = IsCompiledForOsr() ? variable_count() : parameter_count(); | 844 intptr_t count = IsCompiledForOsr() ? variable_count() : parameter_count(); |
846 for (intptr_t i = 0; i < count; ++i) { | 845 for (intptr_t i = 0; i < count; ++i) { |
847 ParameterInstr* param = new(isolate()) ParameterInstr(i, entry); | 846 ParameterInstr* param = new(zone()) ParameterInstr(i, entry); |
848 param->set_ssa_temp_index(alloc_ssa_temp_index()); // New SSA temp. | 847 param->set_ssa_temp_index(alloc_ssa_temp_index()); // New SSA temp. |
849 AddToInitialDefinitions(param); | 848 AddToInitialDefinitions(param); |
850 env.Add(param); | 849 env.Add(param); |
851 } | 850 } |
852 } | 851 } |
853 | 852 |
854 // Initialize all locals in the renaming environment For OSR, the locals have | 853 // Initialize all locals in the renaming environment For OSR, the locals have |
855 // already been handled as parameters. | 854 // already been handled as parameters. |
856 if (!IsCompiledForOsr()) { | 855 if (!IsCompiledForOsr()) { |
857 for (intptr_t i = parameter_count(); i < variable_count(); ++i) { | 856 for (intptr_t i = parameter_count(); i < variable_count(); ++i) { |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
923 // more redundant phis. | 922 // more redundant phis. |
924 phi->mark_alive(); | 923 phi->mark_alive(); |
925 live_phis->Add(phi); | 924 live_phis->Add(phi); |
926 } | 925 } |
927 } | 926 } |
928 } | 927 } |
929 } | 928 } |
930 } else if (block_entry->IsCatchBlockEntry()) { | 929 } else if (block_entry->IsCatchBlockEntry()) { |
931 // Add real definitions for all locals and parameters. | 930 // Add real definitions for all locals and parameters. |
932 for (intptr_t i = 0; i < env->length(); ++i) { | 931 for (intptr_t i = 0; i < env->length(); ++i) { |
933 ParameterInstr* param = new(isolate()) ParameterInstr(i, block_entry); | 932 ParameterInstr* param = new(zone()) ParameterInstr(i, block_entry); |
934 param->set_ssa_temp_index(alloc_ssa_temp_index()); // New SSA temp. | 933 param->set_ssa_temp_index(alloc_ssa_temp_index()); // New SSA temp. |
935 (*env)[i] = param; | 934 (*env)[i] = param; |
936 block_entry->AsCatchBlockEntry()->initial_definitions()->Add(param); | 935 block_entry->AsCatchBlockEntry()->initial_definitions()->Add(param); |
937 } | 936 } |
938 } | 937 } |
939 | 938 |
940 // Prune non-live variables at block entry by replacing their environment | 939 // Prune non-live variables at block entry by replacing their environment |
941 // slots with null. | 940 // slots with null. |
942 BitVector* live_in = variable_liveness->GetLiveInSet(block_entry); | 941 BitVector* live_in = variable_liveness->GetLiveInSet(block_entry); |
943 for (intptr_t i = 0; i < variable_count(); i++) { | 942 for (intptr_t i = 0; i < variable_count(); i++) { |
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1102 block_entry->last_instruction()->SuccessorAt(0)->IsJoinEntry()) { | 1101 block_entry->last_instruction()->SuccessorAt(0)->IsJoinEntry()) { |
1103 JoinEntryInstr* successor = | 1102 JoinEntryInstr* successor = |
1104 block_entry->last_instruction()->SuccessorAt(0)->AsJoinEntry(); | 1103 block_entry->last_instruction()->SuccessorAt(0)->AsJoinEntry(); |
1105 intptr_t pred_index = successor->IndexOfPredecessor(block_entry); | 1104 intptr_t pred_index = successor->IndexOfPredecessor(block_entry); |
1106 ASSERT(pred_index >= 0); | 1105 ASSERT(pred_index >= 0); |
1107 if (successor->phis() != NULL) { | 1106 if (successor->phis() != NULL) { |
1108 for (intptr_t i = 0; i < successor->phis()->length(); ++i) { | 1107 for (intptr_t i = 0; i < successor->phis()->length(); ++i) { |
1109 PhiInstr* phi = (*successor->phis())[i]; | 1108 PhiInstr* phi = (*successor->phis())[i]; |
1110 if (phi != NULL) { | 1109 if (phi != NULL) { |
1111 // Rename input operand. | 1110 // Rename input operand. |
1112 Value* use = new(isolate()) Value((*env)[i]); | 1111 Value* use = new(zone()) Value((*env)[i]); |
1113 phi->SetInputAt(pred_index, use); | 1112 phi->SetInputAt(pred_index, use); |
1114 } | 1113 } |
1115 } | 1114 } |
1116 } | 1115 } |
1117 } | 1116 } |
1118 } | 1117 } |
1119 | 1118 |
1120 | 1119 |
1121 void FlowGraph::RemoveDeadPhis(GrowableArray<PhiInstr*>* live_phis) { | 1120 void FlowGraph::RemoveDeadPhis(GrowableArray<PhiInstr*>* live_phis) { |
1122 // Augment live_phis with those that have implicit real used at | 1121 // Augment live_phis with those that have implicit real used at |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1187 } | 1186 } |
1188 } | 1187 } |
1189 } | 1188 } |
1190 | 1189 |
1191 | 1190 |
1192 // Find the natural loop for the back edge m->n and attach loop information | 1191 // Find the natural loop for the back edge m->n and attach loop information |
1193 // to block n (loop header). The algorithm is described in "Advanced Compiler | 1192 // to block n (loop header). The algorithm is described in "Advanced Compiler |
1194 // Design & Implementation" (Muchnick) p192. | 1193 // Design & Implementation" (Muchnick) p192. |
1195 BitVector* FlowGraph::FindLoop(BlockEntryInstr* m, BlockEntryInstr* n) const { | 1194 BitVector* FlowGraph::FindLoop(BlockEntryInstr* m, BlockEntryInstr* n) const { |
1196 GrowableArray<BlockEntryInstr*> stack; | 1195 GrowableArray<BlockEntryInstr*> stack; |
1197 BitVector* loop = new(isolate()) BitVector(isolate(), preorder_.length()); | 1196 BitVector* loop = new(zone()) BitVector(zone(), preorder_.length()); |
1198 | 1197 |
1199 loop->Add(n->preorder_number()); | 1198 loop->Add(n->preorder_number()); |
1200 if (n != m) { | 1199 if (n != m) { |
1201 loop->Add(m->preorder_number()); | 1200 loop->Add(m->preorder_number()); |
1202 stack.Add(m); | 1201 stack.Add(m); |
1203 } | 1202 } |
1204 | 1203 |
1205 while (!stack.is_empty()) { | 1204 while (!stack.is_empty()) { |
1206 BlockEntryInstr* p = stack.RemoveLast(); | 1205 BlockEntryInstr* p = stack.RemoveLast(); |
1207 for (intptr_t i = 0; i < p->PredecessorCount(); ++i) { | 1206 for (intptr_t i = 0; i < p->PredecessorCount(); ++i) { |
1208 BlockEntryInstr* q = p->PredecessorAt(i); | 1207 BlockEntryInstr* q = p->PredecessorAt(i); |
1209 if (!loop->Contains(q->preorder_number())) { | 1208 if (!loop->Contains(q->preorder_number())) { |
1210 loop->Add(q->preorder_number()); | 1209 loop->Add(q->preorder_number()); |
1211 stack.Add(q); | 1210 stack.Add(q); |
1212 } | 1211 } |
1213 } | 1212 } |
1214 } | 1213 } |
1215 return loop; | 1214 return loop; |
1216 } | 1215 } |
1217 | 1216 |
1218 | 1217 |
1219 ZoneGrowableArray<BlockEntryInstr*>* FlowGraph::ComputeLoops() const { | 1218 ZoneGrowableArray<BlockEntryInstr*>* FlowGraph::ComputeLoops() const { |
1220 ZoneGrowableArray<BlockEntryInstr*>* loop_headers = | 1219 ZoneGrowableArray<BlockEntryInstr*>* loop_headers = |
1221 new(isolate()) ZoneGrowableArray<BlockEntryInstr*>(); | 1220 new(zone()) ZoneGrowableArray<BlockEntryInstr*>(); |
1222 | 1221 |
1223 for (BlockIterator it = postorder_iterator(); | 1222 for (BlockIterator it = postorder_iterator(); |
1224 !it.Done(); | 1223 !it.Done(); |
1225 it.Advance()) { | 1224 it.Advance()) { |
1226 BlockEntryInstr* block = it.Current(); | 1225 BlockEntryInstr* block = it.Current(); |
1227 for (intptr_t i = 0; i < block->PredecessorCount(); ++i) { | 1226 for (intptr_t i = 0; i < block->PredecessorCount(); ++i) { |
1228 BlockEntryInstr* pred = block->PredecessorAt(i); | 1227 BlockEntryInstr* pred = block->PredecessorAt(i); |
1229 if (block->Dominates(pred)) { | 1228 if (block->Dominates(pred)) { |
1230 if (FLAG_trace_optimization) { | 1229 if (FLAG_trace_optimization) { |
1231 OS::Print("Back edge B%" Pd " -> B%" Pd "\n", pred->block_id(), | 1230 OS::Print("Back edge B%" Pd " -> B%" Pd "\n", pred->block_id(), |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1272 !it.Done(); | 1271 !it.Done(); |
1273 it.Advance()) { | 1272 it.Advance()) { |
1274 ++size; | 1273 ++size; |
1275 } | 1274 } |
1276 } | 1275 } |
1277 return size; | 1276 return size; |
1278 } | 1277 } |
1279 | 1278 |
1280 | 1279 |
1281 void FlowGraph::ComputeBlockEffects() { | 1280 void FlowGraph::ComputeBlockEffects() { |
1282 block_effects_ = new(isolate()) BlockEffects(this); | 1281 block_effects_ = new(zone()) BlockEffects(this); |
1283 } | 1282 } |
1284 | 1283 |
1285 | 1284 |
1286 BlockEffects::BlockEffects(FlowGraph* flow_graph) | 1285 BlockEffects::BlockEffects(FlowGraph* flow_graph) |
1287 : available_at_(flow_graph->postorder().length()) { | 1286 : available_at_(flow_graph->postorder().length()) { |
1288 // We are tracking a single effect. | 1287 // We are tracking a single effect. |
1289 ASSERT(EffectSet::kLastEffect == 1); | 1288 ASSERT(EffectSet::kLastEffect == 1); |
1290 Isolate* isolate = flow_graph->isolate(); | 1289 Zone* zone = flow_graph->zone(); |
1291 const intptr_t block_count = flow_graph->postorder().length(); | 1290 const intptr_t block_count = flow_graph->postorder().length(); |
1292 | 1291 |
1293 // Set of blocks that contain side-effects. | 1292 // Set of blocks that contain side-effects. |
1294 BitVector* kill = new(isolate) BitVector(isolate, block_count); | 1293 BitVector* kill = new(zone) BitVector(zone, block_count); |
1295 | 1294 |
1296 // Per block available-after sets. Block A is available after the block B if | 1295 // Per block available-after sets. Block A is available after the block B if |
1297 // and only if A is either equal to B or A is available at B and B contains no | 1296 // and only if A is either equal to B or A is available at B and B contains no |
1298 // side-effects. Initially we consider all blocks available after all other | 1297 // side-effects. Initially we consider all blocks available after all other |
1299 // blocks. | 1298 // blocks. |
1300 GrowableArray<BitVector*> available_after(block_count); | 1299 GrowableArray<BitVector*> available_after(block_count); |
1301 | 1300 |
1302 // Discover all blocks with side-effects. | 1301 // Discover all blocks with side-effects. |
1303 for (BlockIterator it = flow_graph->postorder_iterator(); | 1302 for (BlockIterator it = flow_graph->postorder_iterator(); |
1304 !it.Done(); | 1303 !it.Done(); |
1305 it.Advance()) { | 1304 it.Advance()) { |
1306 available_at_.Add(NULL); | 1305 available_at_.Add(NULL); |
1307 available_after.Add(NULL); | 1306 available_after.Add(NULL); |
1308 | 1307 |
1309 BlockEntryInstr* block = it.Current(); | 1308 BlockEntryInstr* block = it.Current(); |
1310 for (ForwardInstructionIterator it(block); | 1309 for (ForwardInstructionIterator it(block); |
1311 !it.Done(); | 1310 !it.Done(); |
1312 it.Advance()) { | 1311 it.Advance()) { |
1313 if (!it.Current()->Effects().IsNone()) { | 1312 if (!it.Current()->Effects().IsNone()) { |
1314 kill->Add(block->postorder_number()); | 1313 kill->Add(block->postorder_number()); |
1315 break; | 1314 break; |
1316 } | 1315 } |
1317 } | 1316 } |
1318 } | 1317 } |
1319 | 1318 |
1320 BitVector* temp = new(isolate) BitVector(isolate, block_count); | 1319 BitVector* temp = new(zone) BitVector(zone, block_count); |
1321 | 1320 |
1322 // Recompute available-at based on predecessors' available-after until the fix | 1321 // Recompute available-at based on predecessors' available-after until the fix |
1323 // point is reached. | 1322 // point is reached. |
1324 bool changed; | 1323 bool changed; |
1325 do { | 1324 do { |
1326 changed = false; | 1325 changed = false; |
1327 | 1326 |
1328 for (BlockIterator it = flow_graph->reverse_postorder_iterator(); | 1327 for (BlockIterator it = flow_graph->reverse_postorder_iterator(); |
1329 !it.Done(); | 1328 !it.Done(); |
1330 it.Advance()) { | 1329 it.Advance()) { |
(...skipping 12 matching lines...) Expand all Loading... |
1343 temp->Intersect(available_after[pred]); | 1342 temp->Intersect(available_after[pred]); |
1344 } | 1343 } |
1345 } | 1344 } |
1346 } | 1345 } |
1347 | 1346 |
1348 BitVector* current = available_at_[block_num]; | 1347 BitVector* current = available_at_[block_num]; |
1349 if ((current == NULL) || !current->Equals(*temp)) { | 1348 if ((current == NULL) || !current->Equals(*temp)) { |
1350 // Available-at changed: update it and recompute available-after. | 1349 // Available-at changed: update it and recompute available-after. |
1351 if (available_at_[block_num] == NULL) { | 1350 if (available_at_[block_num] == NULL) { |
1352 current = available_at_[block_num] = | 1351 current = available_at_[block_num] = |
1353 new(isolate) BitVector(isolate, block_count); | 1352 new(zone) BitVector(zone, block_count); |
1354 available_after[block_num] = | 1353 available_after[block_num] = |
1355 new(isolate) BitVector(isolate, block_count); | 1354 new(zone) BitVector(zone, block_count); |
1356 // Block is always available after itself. | 1355 // Block is always available after itself. |
1357 available_after[block_num]->Add(block_num); | 1356 available_after[block_num]->Add(block_num); |
1358 } | 1357 } |
1359 current->CopyFrom(temp); | 1358 current->CopyFrom(temp); |
1360 if (!kill->Contains(block_num)) { | 1359 if (!kill->Contains(block_num)) { |
1361 available_after[block_num]->CopyFrom(temp); | 1360 available_after[block_num]->CopyFrom(temp); |
1362 // Block is always available after itself. | 1361 // Block is always available after itself. |
1363 available_after[block_num]->Add(block_num); | 1362 available_after[block_num]->Add(block_num); |
1364 } | 1363 } |
1365 changed = true; | 1364 changed = true; |
(...skipping 17 matching lines...) Expand all Loading... |
1383 } | 1382 } |
1384 | 1383 |
1385 | 1384 |
1386 bool BlockEffects::IsSideEffectFreePath(BlockEntryInstr* from, | 1385 bool BlockEffects::IsSideEffectFreePath(BlockEntryInstr* from, |
1387 BlockEntryInstr* to) const { | 1386 BlockEntryInstr* to) const { |
1388 return available_at_[to->postorder_number()]->Contains( | 1387 return available_at_[to->postorder_number()]->Contains( |
1389 from->postorder_number()); | 1388 from->postorder_number()); |
1390 } | 1389 } |
1391 | 1390 |
1392 } // namespace dart | 1391 } // namespace dart |
OLD | NEW |