Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1102)

Side by Side Diff: runtime/vm/flow_graph.cc

Issue 2974233002: VM: Re-format to use at most one newline between functions (Closed)
Patch Set: Rebase and merge Created 3 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/flow_graph.h ('k') | runtime/vm/flow_graph_allocator.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/flow_graph.h" 5 #include "vm/flow_graph.h"
6 6
7 #include "vm/bit_vector.h" 7 #include "vm/bit_vector.h"
8 #include "vm/cha.h" 8 #include "vm/cha.h"
9 #include "vm/flow_graph_builder.h" 9 #include "vm/flow_graph_builder.h"
10 #include "vm/flow_graph_compiler.h" 10 #include "vm/flow_graph_compiler.h"
11 #include "vm/flow_graph_range_analysis.h" 11 #include "vm/flow_graph_range_analysis.h"
12 #include "vm/growable_array.h"
12 #include "vm/il_printer.h" 13 #include "vm/il_printer.h"
13 #include "vm/intermediate_language.h" 14 #include "vm/intermediate_language.h"
14 #include "vm/growable_array.h"
15 #include "vm/object_store.h" 15 #include "vm/object_store.h"
16 16
17 namespace dart { 17 namespace dart {
18 18
19 #if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_IA32) 19 #if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_IA32)
20 DEFINE_FLAG(bool, trace_smi_widening, false, "Trace Smi->Int32 widening pass."); 20 DEFINE_FLAG(bool, trace_smi_widening, false, "Trace Smi->Int32 widening pass.");
21 #endif 21 #endif
22 DEFINE_FLAG(bool, prune_dead_locals, true, "optimize dead locals away"); 22 DEFINE_FLAG(bool, prune_dead_locals, true, "optimize dead locals away");
23 DECLARE_FLAG(bool, verify_compiler); 23 DECLARE_FLAG(bool, verify_compiler);
24 24
25
26 FlowGraph::FlowGraph(const ParsedFunction& parsed_function, 25 FlowGraph::FlowGraph(const ParsedFunction& parsed_function,
27 GraphEntryInstr* graph_entry, 26 GraphEntryInstr* graph_entry,
28 intptr_t max_block_id) 27 intptr_t max_block_id)
29 : thread_(Thread::Current()), 28 : thread_(Thread::Current()),
30 parent_(), 29 parent_(),
31 current_ssa_temp_index_(0), 30 current_ssa_temp_index_(0),
32 max_block_id_(max_block_id), 31 max_block_id_(max_block_id),
33 parsed_function_(parsed_function), 32 parsed_function_(parsed_function),
34 num_copied_params_(parsed_function.num_copied_params()), 33 num_copied_params_(parsed_function.num_copied_params()),
35 num_non_copied_params_(parsed_function.num_non_copied_params()), 34 num_non_copied_params_(parsed_function.num_non_copied_params()),
36 graph_entry_(graph_entry), 35 graph_entry_(graph_entry),
37 preorder_(), 36 preorder_(),
38 postorder_(), 37 postorder_(),
39 reverse_postorder_(), 38 reverse_postorder_(),
40 optimized_block_order_(), 39 optimized_block_order_(),
41 constant_null_(NULL), 40 constant_null_(NULL),
42 constant_dead_(NULL), 41 constant_dead_(NULL),
43 constant_empty_context_(NULL), 42 constant_empty_context_(NULL),
44 block_effects_(NULL), 43 block_effects_(NULL),
45 licm_allowed_(true), 44 licm_allowed_(true),
46 loop_headers_(NULL), 45 loop_headers_(NULL),
47 loop_invariant_loads_(NULL), 46 loop_invariant_loads_(NULL),
48 deferred_prefixes_(parsed_function.deferred_prefixes()), 47 deferred_prefixes_(parsed_function.deferred_prefixes()),
49 await_token_positions_(NULL), 48 await_token_positions_(NULL),
50 captured_parameters_(new (zone()) BitVector(zone(), variable_count())), 49 captured_parameters_(new (zone()) BitVector(zone(), variable_count())),
51 inlining_id_(-1) { 50 inlining_id_(-1) {
52 DiscoverBlocks(); 51 DiscoverBlocks();
53 } 52 }
54 53
55
56 void FlowGraph::EnsureSSATempIndex(Definition* defn, Definition* replacement) { 54 void FlowGraph::EnsureSSATempIndex(Definition* defn, Definition* replacement) {
57 if ((replacement->ssa_temp_index() == -1) && (defn->ssa_temp_index() != -1)) { 55 if ((replacement->ssa_temp_index() == -1) && (defn->ssa_temp_index() != -1)) {
58 AllocateSSAIndexes(replacement); 56 AllocateSSAIndexes(replacement);
59 } 57 }
60 } 58 }
61 59
62
63 void FlowGraph::ReplaceCurrentInstruction(ForwardInstructionIterator* iterator, 60 void FlowGraph::ReplaceCurrentInstruction(ForwardInstructionIterator* iterator,
64 Instruction* current, 61 Instruction* current,
65 Instruction* replacement) { 62 Instruction* replacement) {
66 Definition* current_defn = current->AsDefinition(); 63 Definition* current_defn = current->AsDefinition();
67 if ((replacement != NULL) && (current_defn != NULL)) { 64 if ((replacement != NULL) && (current_defn != NULL)) {
68 Definition* replacement_defn = replacement->AsDefinition(); 65 Definition* replacement_defn = replacement->AsDefinition();
69 ASSERT(replacement_defn != NULL); 66 ASSERT(replacement_defn != NULL);
70 current_defn->ReplaceUsesWith(replacement_defn); 67 current_defn->ReplaceUsesWith(replacement_defn);
71 EnsureSSATempIndex(current_defn, replacement_defn); 68 EnsureSSATempIndex(current_defn, replacement_defn);
72 69
(...skipping 18 matching lines...) Expand all
91 if (replacement == NULL || i >= replacement->ArgumentCount() || 88 if (replacement == NULL || i >= replacement->ArgumentCount() ||
92 replacement->PushArgumentAt(i) != push) { 89 replacement->PushArgumentAt(i) != push) {
93 push->ReplaceUsesWith(push->value()->definition()); 90 push->ReplaceUsesWith(push->value()->definition());
94 push->RemoveFromGraph(); 91 push->RemoveFromGraph();
95 } 92 }
96 } 93 }
97 } 94 }
98 iterator->RemoveCurrentFromGraph(); 95 iterator->RemoveCurrentFromGraph();
99 } 96 }
100 97
101
102 void FlowGraph::AddToDeferredPrefixes( 98 void FlowGraph::AddToDeferredPrefixes(
103 ZoneGrowableArray<const LibraryPrefix*>* from) { 99 ZoneGrowableArray<const LibraryPrefix*>* from) {
104 ZoneGrowableArray<const LibraryPrefix*>* to = deferred_prefixes(); 100 ZoneGrowableArray<const LibraryPrefix*>* to = deferred_prefixes();
105 for (intptr_t i = 0; i < from->length(); i++) { 101 for (intptr_t i = 0; i < from->length(); i++) {
106 const LibraryPrefix* prefix = (*from)[i]; 102 const LibraryPrefix* prefix = (*from)[i];
107 for (intptr_t j = 0; j < to->length(); j++) { 103 for (intptr_t j = 0; j < to->length(); j++) {
108 if ((*to)[j]->raw() == prefix->raw()) { 104 if ((*to)[j]->raw() == prefix->raw()) {
109 return; 105 return;
110 } 106 }
111 } 107 }
112 to->Add(prefix); 108 to->Add(prefix);
113 } 109 }
114 } 110 }
115 111
116
117 bool FlowGraph::ShouldReorderBlocks(const Function& function, 112 bool FlowGraph::ShouldReorderBlocks(const Function& function,
118 bool is_optimized) { 113 bool is_optimized) {
119 return is_optimized && FLAG_reorder_basic_blocks && !function.is_intrinsic(); 114 return is_optimized && FLAG_reorder_basic_blocks && !function.is_intrinsic();
120 } 115 }
121 116
122
123 GrowableArray<BlockEntryInstr*>* FlowGraph::CodegenBlockOrder( 117 GrowableArray<BlockEntryInstr*>* FlowGraph::CodegenBlockOrder(
124 bool is_optimized) { 118 bool is_optimized) {
125 return ShouldReorderBlocks(function(), is_optimized) ? &optimized_block_order_ 119 return ShouldReorderBlocks(function(), is_optimized) ? &optimized_block_order_
126 : &reverse_postorder_; 120 : &reverse_postorder_;
127 } 121 }
128 122
129
130 ConstantInstr* FlowGraph::GetConstant(const Object& object) { 123 ConstantInstr* FlowGraph::GetConstant(const Object& object) {
131 ConstantInstr* constant = constant_instr_pool_.LookupValue(object); 124 ConstantInstr* constant = constant_instr_pool_.LookupValue(object);
132 if (constant == NULL) { 125 if (constant == NULL) {
133 // Otherwise, allocate and add it to the pool. 126 // Otherwise, allocate and add it to the pool.
134 constant = 127 constant =
135 new (zone()) ConstantInstr(Object::ZoneHandle(zone(), object.raw())); 128 new (zone()) ConstantInstr(Object::ZoneHandle(zone(), object.raw()));
136 constant->set_ssa_temp_index(alloc_ssa_temp_index()); 129 constant->set_ssa_temp_index(alloc_ssa_temp_index());
137 130
138 AddToInitialDefinitions(constant); 131 AddToInitialDefinitions(constant);
139 constant_instr_pool_.Insert(constant); 132 constant_instr_pool_.Insert(constant);
140 } 133 }
141 return constant; 134 return constant;
142 } 135 }
143 136
144
145 void FlowGraph::AddToInitialDefinitions(Definition* defn) { 137 void FlowGraph::AddToInitialDefinitions(Definition* defn) {
146 // TODO(zerny): Set previous to the graph entry so it is accessible by 138 // TODO(zerny): Set previous to the graph entry so it is accessible by
147 // GetBlock. Remove this once there is a direct pointer to the block. 139 // GetBlock. Remove this once there is a direct pointer to the block.
148 defn->set_previous(graph_entry_); 140 defn->set_previous(graph_entry_);
149 graph_entry_->initial_definitions()->Add(defn); 141 graph_entry_->initial_definitions()->Add(defn);
150 } 142 }
151 143
152
153 void FlowGraph::InsertBefore(Instruction* next, 144 void FlowGraph::InsertBefore(Instruction* next,
154 Instruction* instr, 145 Instruction* instr,
155 Environment* env, 146 Environment* env,
156 UseKind use_kind) { 147 UseKind use_kind) {
157 InsertAfter(next->previous(), instr, env, use_kind); 148 InsertAfter(next->previous(), instr, env, use_kind);
158 } 149 }
159 150
160
161 void FlowGraph::InsertAfter(Instruction* prev, 151 void FlowGraph::InsertAfter(Instruction* prev,
162 Instruction* instr, 152 Instruction* instr,
163 Environment* env, 153 Environment* env,
164 UseKind use_kind) { 154 UseKind use_kind) {
165 if (use_kind == kValue) { 155 if (use_kind == kValue) {
166 ASSERT(instr->IsDefinition()); 156 ASSERT(instr->IsDefinition());
167 AllocateSSAIndexes(instr->AsDefinition()); 157 AllocateSSAIndexes(instr->AsDefinition());
168 } 158 }
169 instr->InsertAfter(prev); 159 instr->InsertAfter(prev);
170 ASSERT(instr->env() == NULL); 160 ASSERT(instr->env() == NULL);
171 if (env != NULL) { 161 if (env != NULL) {
172 env->DeepCopyTo(zone(), instr); 162 env->DeepCopyTo(zone(), instr);
173 } 163 }
174 } 164 }
175 165
176
177 Instruction* FlowGraph::AppendTo(Instruction* prev, 166 Instruction* FlowGraph::AppendTo(Instruction* prev,
178 Instruction* instr, 167 Instruction* instr,
179 Environment* env, 168 Environment* env,
180 UseKind use_kind) { 169 UseKind use_kind) {
181 if (use_kind == kValue) { 170 if (use_kind == kValue) {
182 ASSERT(instr->IsDefinition()); 171 ASSERT(instr->IsDefinition());
183 AllocateSSAIndexes(instr->AsDefinition()); 172 AllocateSSAIndexes(instr->AsDefinition());
184 } 173 }
185 ASSERT(instr->env() == NULL); 174 ASSERT(instr->env() == NULL);
186 if (env != NULL) { 175 if (env != NULL) {
187 env->DeepCopyTo(zone(), instr); 176 env->DeepCopyTo(zone(), instr);
188 } 177 }
189 return prev->AppendInstruction(instr); 178 return prev->AppendInstruction(instr);
190 } 179 }
191 180
192
193 // A wrapper around block entries including an index of the next successor to 181 // A wrapper around block entries including an index of the next successor to
194 // be read. 182 // be read.
195 class BlockTraversalState { 183 class BlockTraversalState {
196 public: 184 public:
197 explicit BlockTraversalState(BlockEntryInstr* block) 185 explicit BlockTraversalState(BlockEntryInstr* block)
198 : block_(block), 186 : block_(block),
199 next_successor_ix_(block->last_instruction()->SuccessorCount() - 1) {} 187 next_successor_ix_(block->last_instruction()->SuccessorCount() - 1) {}
200 188
201 bool HasNextSuccessor() const { return next_successor_ix_ >= 0; } 189 bool HasNextSuccessor() const { return next_successor_ix_ >= 0; }
202 BlockEntryInstr* NextSuccessor() { 190 BlockEntryInstr* NextSuccessor() {
203 ASSERT(HasNextSuccessor()); 191 ASSERT(HasNextSuccessor());
204 return block_->last_instruction()->SuccessorAt(next_successor_ix_--); 192 return block_->last_instruction()->SuccessorAt(next_successor_ix_--);
205 } 193 }
206 194
207 BlockEntryInstr* block() const { return block_; } 195 BlockEntryInstr* block() const { return block_; }
208 196
209 private: 197 private:
210 BlockEntryInstr* block_; 198 BlockEntryInstr* block_;
211 intptr_t next_successor_ix_; 199 intptr_t next_successor_ix_;
212 200
213 DISALLOW_ALLOCATION(); 201 DISALLOW_ALLOCATION();
214 }; 202 };
215 203
216
217 void FlowGraph::DiscoverBlocks() { 204 void FlowGraph::DiscoverBlocks() {
218 StackZone zone(thread()); 205 StackZone zone(thread());
219 206
220 // Initialize state. 207 // Initialize state.
221 preorder_.Clear(); 208 preorder_.Clear();
222 postorder_.Clear(); 209 postorder_.Clear();
223 reverse_postorder_.Clear(); 210 reverse_postorder_.Clear();
224 parent_.Clear(); 211 parent_.Clear();
225 212
226 GrowableArray<BlockTraversalState> block_stack; 213 GrowableArray<BlockTraversalState> block_stack;
(...skipping 24 matching lines...) Expand all
251 for (intptr_t i = 0; i < block_count; ++i) { 238 for (intptr_t i = 0; i < block_count; ++i) {
252 reverse_postorder_.Add(postorder_[block_count - i - 1]); 239 reverse_postorder_.Add(postorder_[block_count - i - 1]);
253 } 240 }
254 241
255 // Block effects are using postorder numbering. Discard computed information. 242 // Block effects are using postorder numbering. Discard computed information.
256 block_effects_ = NULL; 243 block_effects_ = NULL;
257 loop_headers_ = NULL; 244 loop_headers_ = NULL;
258 loop_invariant_loads_ = NULL; 245 loop_invariant_loads_ = NULL;
259 } 246 }
260 247
261
262 void FlowGraph::MergeBlocks() { 248 void FlowGraph::MergeBlocks() {
263 bool changed = false; 249 bool changed = false;
264 BitVector* merged = new (zone()) BitVector(zone(), postorder().length()); 250 BitVector* merged = new (zone()) BitVector(zone(), postorder().length());
265 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done(); 251 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done();
266 block_it.Advance()) { 252 block_it.Advance()) {
267 BlockEntryInstr* block = block_it.Current(); 253 BlockEntryInstr* block = block_it.Current();
268 if (block->IsGraphEntry()) continue; 254 if (block->IsGraphEntry()) continue;
269 if (merged->Contains(block->postorder_number())) continue; 255 if (merged->Contains(block->postorder_number())) continue;
270 256
271 Instruction* last = block->last_instruction(); 257 Instruction* last = block->last_instruction();
(...skipping 21 matching lines...) Expand all
293 // The new block inherits the block id of the last successor to maintain 279 // The new block inherits the block id of the last successor to maintain
294 // the order of phi inputs at its successors consistent with block ids. 280 // the order of phi inputs at its successors consistent with block ids.
295 if (successor != NULL) { 281 if (successor != NULL) {
296 block->set_block_id(successor->block_id()); 282 block->set_block_id(successor->block_id());
297 } 283 }
298 } 284 }
299 // Recompute block order after changes were made. 285 // Recompute block order after changes were made.
300 if (changed) DiscoverBlocks(); 286 if (changed) DiscoverBlocks();
301 } 287 }
302 288
303
304 // Debugging code to verify the construction of use lists. 289 // Debugging code to verify the construction of use lists.
305 static intptr_t MembershipCount(Value* use, Value* list) { 290 static intptr_t MembershipCount(Value* use, Value* list) {
306 intptr_t count = 0; 291 intptr_t count = 0;
307 while (list != NULL) { 292 while (list != NULL) {
308 if (list == use) ++count; 293 if (list == use) ++count;
309 list = list->next_use(); 294 list = list->next_use();
310 } 295 }
311 return count; 296 return count;
312 } 297 }
313 298
314
315 static void VerifyUseListsInInstruction(Instruction* instr) { 299 static void VerifyUseListsInInstruction(Instruction* instr) {
316 ASSERT(instr != NULL); 300 ASSERT(instr != NULL);
317 ASSERT(!instr->IsJoinEntry()); 301 ASSERT(!instr->IsJoinEntry());
318 for (intptr_t i = 0; i < instr->InputCount(); ++i) { 302 for (intptr_t i = 0; i < instr->InputCount(); ++i) {
319 Value* use = instr->InputAt(i); 303 Value* use = instr->InputAt(i);
320 ASSERT(use->definition() != NULL); 304 ASSERT(use->definition() != NULL);
321 ASSERT((use->definition() != instr) || use->definition()->IsPhi() || 305 ASSERT((use->definition() != instr) || use->definition()->IsPhi() ||
322 use->definition()->IsMaterializeObject()); 306 use->definition()->IsMaterializeObject());
323 ASSERT(use->instruction() == instr); 307 ASSERT(use->instruction() == instr);
324 ASSERT(use->use_index() == i); 308 ASSERT(use->use_index() == i);
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
393 phi->set_is_receiver(PhiInstr::kNotReceiver); 377 phi->set_is_receiver(PhiInstr::kNotReceiver);
394 break; 378 break;
395 } 379 }
396 } 380 }
397 381
398 if (phi->is_receiver() == PhiInstr::kNotReceiver) { 382 if (phi->is_receiver() == PhiInstr::kNotReceiver) {
399 unmark->Add(phi); 383 unmark->Add(phi);
400 } 384 }
401 } 385 }
402 386
403
404 void FlowGraph::ComputeIsReceiver(PhiInstr* phi) const { 387 void FlowGraph::ComputeIsReceiver(PhiInstr* phi) const {
405 GrowableArray<PhiInstr*> unmark; 388 GrowableArray<PhiInstr*> unmark;
406 ComputeIsReceiverRecursive(phi, &unmark); 389 ComputeIsReceiverRecursive(phi, &unmark);
407 390
408 // Now drain unmark. 391 // Now drain unmark.
409 while (!unmark.is_empty()) { 392 while (!unmark.is_empty()) {
410 PhiInstr* phi = unmark.RemoveLast(); 393 PhiInstr* phi = unmark.RemoveLast();
411 for (Value::Iterator it(phi->input_use_list()); !it.Done(); it.Advance()) { 394 for (Value::Iterator it(phi->input_use_list()); !it.Done(); it.Advance()) {
412 PhiInstr* use = it.Current()->instruction()->AsPhi(); 395 PhiInstr* use = it.Current()->instruction()->AsPhi();
413 if ((use != NULL) && (use->is_receiver() == PhiInstr::kReceiver)) { 396 if ((use != NULL) && (use->is_receiver() == PhiInstr::kReceiver)) {
414 use->set_is_receiver(PhiInstr::kNotReceiver); 397 use->set_is_receiver(PhiInstr::kNotReceiver);
415 unmark.Add(use); 398 unmark.Add(use);
416 } 399 }
417 } 400 }
418 } 401 }
419 } 402 }
420 403
421
422 bool FlowGraph::IsReceiver(Definition* def) const { 404 bool FlowGraph::IsReceiver(Definition* def) const {
423 def = def->OriginalDefinition(); // Could be redefined. 405 def = def->OriginalDefinition(); // Could be redefined.
424 if (def->IsParameter()) return (def->AsParameter()->index() == 0); 406 if (def->IsParameter()) return (def->AsParameter()->index() == 0);
425 if (!def->IsPhi() || graph_entry()->catch_entries().is_empty()) return false; 407 if (!def->IsPhi() || graph_entry()->catch_entries().is_empty()) return false;
426 PhiInstr* phi = def->AsPhi(); 408 PhiInstr* phi = def->AsPhi();
427 if (phi->is_receiver() != PhiInstr::kUnknownReceiver) { 409 if (phi->is_receiver() != PhiInstr::kUnknownReceiver) {
428 return (phi->is_receiver() == PhiInstr::kReceiver); 410 return (phi->is_receiver() == PhiInstr::kReceiver);
429 } 411 }
430 // Not known if this phi is the receiver yet. Compute it now. 412 // Not known if this phi is the receiver yet. Compute it now.
431 ComputeIsReceiver(phi); 413 ComputeIsReceiver(phi);
432 return (phi->is_receiver() == PhiInstr::kReceiver); 414 return (phi->is_receiver() == PhiInstr::kReceiver);
433 } 415 }
434 416
435
436 // Use CHA to determine if the call needs a class check: if the callee's 417 // Use CHA to determine if the call needs a class check: if the callee's
437 // receiver is the same as the caller's receiver and there are no overridden 418 // receiver is the same as the caller's receiver and there are no overridden
438 // callee functions, then no class check is needed. 419 // callee functions, then no class check is needed.
439 bool FlowGraph::InstanceCallNeedsClassCheck(InstanceCallInstr* call, 420 bool FlowGraph::InstanceCallNeedsClassCheck(InstanceCallInstr* call,
440 RawFunction::Kind kind) const { 421 RawFunction::Kind kind) const {
441 if (!FLAG_use_cha_deopt && !isolate()->all_classes_finalized()) { 422 if (!FLAG_use_cha_deopt && !isolate()->all_classes_finalized()) {
442 // Even if class or function are private, lazy class finalization 423 // Even if class or function are private, lazy class finalization
443 // may later add overriding methods. 424 // may later add overriding methods.
444 return true; 425 return true;
445 } 426 }
(...skipping 14 matching lines...) Expand all
460 "no overrides of '%s' '%s'\n", 441 "no overrides of '%s' '%s'\n",
461 name.ToCString(), cls.ToCString()); 442 name.ToCString(), cls.ToCString());
462 } 443 }
463 thread()->cha()->AddToGuardedClasses(cls, subclass_count); 444 thread()->cha()->AddToGuardedClasses(cls, subclass_count);
464 return false; 445 return false;
465 } 446 }
466 } 447 }
467 return true; 448 return true;
468 } 449 }
469 450
470
471 Instruction* FlowGraph::CreateCheckClass(Definition* to_check, 451 Instruction* FlowGraph::CreateCheckClass(Definition* to_check,
472 const Cids& cids, 452 const Cids& cids,
473 intptr_t deopt_id, 453 intptr_t deopt_id,
474 TokenPosition token_pos) { 454 TokenPosition token_pos) {
475 if (cids.IsMonomorphic() && cids.MonomorphicReceiverCid() == kSmiCid) { 455 if (cids.IsMonomorphic() && cids.MonomorphicReceiverCid() == kSmiCid) {
476 return new (zone()) 456 return new (zone())
477 CheckSmiInstr(new (zone()) Value(to_check), deopt_id, token_pos); 457 CheckSmiInstr(new (zone()) Value(to_check), deopt_id, token_pos);
478 } 458 }
479 return new (zone()) 459 return new (zone())
480 CheckClassInstr(new (zone()) Value(to_check), deopt_id, cids, token_pos); 460 CheckClassInstr(new (zone()) Value(to_check), deopt_id, cids, token_pos);
481 } 461 }
482 462
483
484 bool FlowGraph::VerifyUseLists() { 463 bool FlowGraph::VerifyUseLists() {
485 // Verify the initial definitions. 464 // Verify the initial definitions.
486 for (intptr_t i = 0; i < graph_entry_->initial_definitions()->length(); ++i) { 465 for (intptr_t i = 0; i < graph_entry_->initial_definitions()->length(); ++i) {
487 VerifyUseListsInInstruction((*graph_entry_->initial_definitions())[i]); 466 VerifyUseListsInInstruction((*graph_entry_->initial_definitions())[i]);
488 } 467 }
489 468
490 // Verify phis in join entries and the instructions in each block. 469 // Verify phis in join entries and the instructions in each block.
491 for (intptr_t i = 0; i < preorder_.length(); ++i) { 470 for (intptr_t i = 0; i < preorder_.length(); ++i) {
492 BlockEntryInstr* entry = preorder_[i]; 471 BlockEntryInstr* entry = preorder_[i];
493 JoinEntryInstr* join = entry->AsJoinEntry(); 472 JoinEntryInstr* join = entry->AsJoinEntry();
494 if (join != NULL) { 473 if (join != NULL) {
495 for (PhiIterator it(join); !it.Done(); it.Advance()) { 474 for (PhiIterator it(join); !it.Done(); it.Advance()) {
496 PhiInstr* phi = it.Current(); 475 PhiInstr* phi = it.Current();
497 ASSERT(phi != NULL); 476 ASSERT(phi != NULL);
498 VerifyUseListsInInstruction(phi); 477 VerifyUseListsInInstruction(phi);
499 } 478 }
500 } 479 }
501 for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) { 480 for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) {
502 VerifyUseListsInInstruction(it.Current()); 481 VerifyUseListsInInstruction(it.Current());
503 } 482 }
504 } 483 }
505 484
506 return true; // Return true so we can ASSERT validation. 485 return true; // Return true so we can ASSERT validation.
507 } 486 }
508 487
509
510 // Verify that a redefinition dominates all uses of the redefined value. 488 // Verify that a redefinition dominates all uses of the redefined value.
511 bool FlowGraph::VerifyRedefinitions() { 489 bool FlowGraph::VerifyRedefinitions() {
512 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done(); 490 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done();
513 block_it.Advance()) { 491 block_it.Advance()) {
514 for (ForwardInstructionIterator instr_it(block_it.Current()); 492 for (ForwardInstructionIterator instr_it(block_it.Current());
515 !instr_it.Done(); instr_it.Advance()) { 493 !instr_it.Done(); instr_it.Advance()) {
516 RedefinitionInstr* redefinition = instr_it.Current()->AsRedefinition(); 494 RedefinitionInstr* redefinition = instr_it.Current()->AsRedefinition();
517 if (redefinition != NULL) { 495 if (redefinition != NULL) {
518 Definition* original = redefinition->value()->definition(); 496 Definition* original = redefinition->value()->definition();
519 for (Value::Iterator it(original->input_use_list()); !it.Done(); 497 for (Value::Iterator it(original->input_use_list()); !it.Done();
520 it.Advance()) { 498 it.Advance()) {
521 Value* original_use = it.Current(); 499 Value* original_use = it.Current();
522 if (original_use->instruction() == redefinition) { 500 if (original_use->instruction() == redefinition) {
523 continue; 501 continue;
524 } 502 }
525 if (original_use->instruction()->IsDominatedBy(redefinition)) { 503 if (original_use->instruction()->IsDominatedBy(redefinition)) {
526 FlowGraphPrinter::PrintGraph("VerifyRedefinitions", this); 504 FlowGraphPrinter::PrintGraph("VerifyRedefinitions", this);
527 THR_Print("%s\n", redefinition->ToCString()); 505 THR_Print("%s\n", redefinition->ToCString());
528 THR_Print("use=%s\n", original_use->instruction()->ToCString()); 506 THR_Print("use=%s\n", original_use->instruction()->ToCString());
529 return false; 507 return false;
530 } 508 }
531 } 509 }
532 } 510 }
533 } 511 }
534 } 512 }
535 return true; 513 return true;
536 } 514 }
537 515
538
539 LivenessAnalysis::LivenessAnalysis( 516 LivenessAnalysis::LivenessAnalysis(
540 intptr_t variable_count, 517 intptr_t variable_count,
541 const GrowableArray<BlockEntryInstr*>& postorder) 518 const GrowableArray<BlockEntryInstr*>& postorder)
542 : zone_(Thread::Current()->zone()), 519 : zone_(Thread::Current()->zone()),
543 variable_count_(variable_count), 520 variable_count_(variable_count),
544 postorder_(postorder), 521 postorder_(postorder),
545 live_out_(postorder.length()), 522 live_out_(postorder.length()),
546 kill_(postorder.length()), 523 kill_(postorder.length()),
547 live_in_(postorder.length()) {} 524 live_in_(postorder.length()) {}
548 525
549
550 bool LivenessAnalysis::UpdateLiveOut(const BlockEntryInstr& block) { 526 bool LivenessAnalysis::UpdateLiveOut(const BlockEntryInstr& block) {
551 BitVector* live_out = live_out_[block.postorder_number()]; 527 BitVector* live_out = live_out_[block.postorder_number()];
552 bool changed = false; 528 bool changed = false;
553 Instruction* last = block.last_instruction(); 529 Instruction* last = block.last_instruction();
554 ASSERT(last != NULL); 530 ASSERT(last != NULL);
555 for (intptr_t i = 0; i < last->SuccessorCount(); i++) { 531 for (intptr_t i = 0; i < last->SuccessorCount(); i++) {
556 BlockEntryInstr* succ = last->SuccessorAt(i); 532 BlockEntryInstr* succ = last->SuccessorAt(i);
557 ASSERT(succ != NULL); 533 ASSERT(succ != NULL);
558 if (live_out->AddAll(live_in_[succ->postorder_number()])) { 534 if (live_out->AddAll(live_in_[succ->postorder_number()])) {
559 changed = true; 535 changed = true;
560 } 536 }
561 } 537 }
562 return changed; 538 return changed;
563 } 539 }
564 540
565
566 bool LivenessAnalysis::UpdateLiveIn(const BlockEntryInstr& block) { 541 bool LivenessAnalysis::UpdateLiveIn(const BlockEntryInstr& block) {
567 BitVector* live_out = live_out_[block.postorder_number()]; 542 BitVector* live_out = live_out_[block.postorder_number()];
568 BitVector* kill = kill_[block.postorder_number()]; 543 BitVector* kill = kill_[block.postorder_number()];
569 BitVector* live_in = live_in_[block.postorder_number()]; 544 BitVector* live_in = live_in_[block.postorder_number()];
570 return live_in->KillAndAdd(kill, live_out); 545 return live_in->KillAndAdd(kill, live_out);
571 } 546 }
572 547
573
574 void LivenessAnalysis::ComputeLiveInAndLiveOutSets() { 548 void LivenessAnalysis::ComputeLiveInAndLiveOutSets() {
575 const intptr_t block_count = postorder_.length(); 549 const intptr_t block_count = postorder_.length();
576 bool changed; 550 bool changed;
577 do { 551 do {
578 changed = false; 552 changed = false;
579 553
580 for (intptr_t i = 0; i < block_count; i++) { 554 for (intptr_t i = 0; i < block_count; i++) {
581 const BlockEntryInstr& block = *postorder_[i]; 555 const BlockEntryInstr& block = *postorder_[i];
582 556
583 // Live-in set depends only on kill set which does not 557 // Live-in set depends only on kill set which does not
584 // change in this loop and live-out set. If live-out 558 // change in this loop and live-out set. If live-out
585 // set does not change there is no need to recompute 559 // set does not change there is no need to recompute
586 // live-in set. 560 // live-in set.
587 if (UpdateLiveOut(block) && UpdateLiveIn(block)) { 561 if (UpdateLiveOut(block) && UpdateLiveIn(block)) {
588 changed = true; 562 changed = true;
589 } 563 }
590 } 564 }
591 } while (changed); 565 } while (changed);
592 } 566 }
593 567
594
595 void LivenessAnalysis::Analyze() { 568 void LivenessAnalysis::Analyze() {
596 const intptr_t block_count = postorder_.length(); 569 const intptr_t block_count = postorder_.length();
597 for (intptr_t i = 0; i < block_count; i++) { 570 for (intptr_t i = 0; i < block_count; i++) {
598 live_out_.Add(new (zone()) BitVector(zone(), variable_count_)); 571 live_out_.Add(new (zone()) BitVector(zone(), variable_count_));
599 kill_.Add(new (zone()) BitVector(zone(), variable_count_)); 572 kill_.Add(new (zone()) BitVector(zone(), variable_count_));
600 live_in_.Add(new (zone()) BitVector(zone(), variable_count_)); 573 live_in_.Add(new (zone()) BitVector(zone(), variable_count_));
601 } 574 }
602 575
603 ComputeInitialSets(); 576 ComputeInitialSets();
604 ComputeLiveInAndLiveOutSets(); 577 ComputeLiveInAndLiveOutSets();
605 } 578 }
606 579
607
608 static void PrintBitVector(const char* tag, BitVector* v) { 580 static void PrintBitVector(const char* tag, BitVector* v) {
609 OS::Print("%s:", tag); 581 OS::Print("%s:", tag);
610 for (BitVector::Iterator it(v); !it.Done(); it.Advance()) { 582 for (BitVector::Iterator it(v); !it.Done(); it.Advance()) {
611 OS::Print(" %" Pd "", it.Current()); 583 OS::Print(" %" Pd "", it.Current());
612 } 584 }
613 OS::Print("\n"); 585 OS::Print("\n");
614 } 586 }
615 587
616
617 void LivenessAnalysis::Dump() { 588 void LivenessAnalysis::Dump() {
618 const intptr_t block_count = postorder_.length(); 589 const intptr_t block_count = postorder_.length();
619 for (intptr_t i = 0; i < block_count; i++) { 590 for (intptr_t i = 0; i < block_count; i++) {
620 BlockEntryInstr* block = postorder_[i]; 591 BlockEntryInstr* block = postorder_[i];
621 OS::Print("block @%" Pd " -> ", block->block_id()); 592 OS::Print("block @%" Pd " -> ", block->block_id());
622 593
623 Instruction* last = block->last_instruction(); 594 Instruction* last = block->last_instruction();
624 for (intptr_t j = 0; j < last->SuccessorCount(); j++) { 595 for (intptr_t j = 0; j < last->SuccessorCount(); j++) {
625 BlockEntryInstr* succ = last->SuccessorAt(j); 596 BlockEntryInstr* succ = last->SuccessorAt(j);
626 OS::Print(" @%" Pd "", succ->block_id()); 597 OS::Print(" @%" Pd "", succ->block_id());
627 } 598 }
628 OS::Print("\n"); 599 OS::Print("\n");
629 600
630 PrintBitVector(" live out", live_out_[i]); 601 PrintBitVector(" live out", live_out_[i]);
631 PrintBitVector(" kill", kill_[i]); 602 PrintBitVector(" kill", kill_[i]);
632 PrintBitVector(" live in", live_in_[i]); 603 PrintBitVector(" live in", live_in_[i]);
633 } 604 }
634 } 605 }
635 606
636
637 // Computes liveness information for local variables. 607 // Computes liveness information for local variables.
638 class VariableLivenessAnalysis : public LivenessAnalysis { 608 class VariableLivenessAnalysis : public LivenessAnalysis {
639 public: 609 public:
640 explicit VariableLivenessAnalysis(FlowGraph* flow_graph) 610 explicit VariableLivenessAnalysis(FlowGraph* flow_graph)
641 : LivenessAnalysis(flow_graph->variable_count(), flow_graph->postorder()), 611 : LivenessAnalysis(flow_graph->variable_count(), flow_graph->postorder()),
642 flow_graph_(flow_graph), 612 flow_graph_(flow_graph),
643 num_non_copied_params_(flow_graph->num_non_copied_params()), 613 num_non_copied_params_(flow_graph->num_non_copied_params()),
644 assigned_vars_() {} 614 assigned_vars_() {}
645 615
646 // For every block (in preorder) compute and return set of variables that 616 // For every block (in preorder) compute and return set of variables that
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
703 } 673 }
704 674
705 private: 675 private:
706 virtual void ComputeInitialSets(); 676 virtual void ComputeInitialSets();
707 677
708 const FlowGraph* flow_graph_; 678 const FlowGraph* flow_graph_;
709 const intptr_t num_non_copied_params_; 679 const intptr_t num_non_copied_params_;
710 GrowableArray<BitVector*> assigned_vars_; 680 GrowableArray<BitVector*> assigned_vars_;
711 }; 681 };
712 682
713
714 void VariableLivenessAnalysis::ComputeInitialSets() { 683 void VariableLivenessAnalysis::ComputeInitialSets() {
715 const intptr_t block_count = postorder_.length(); 684 const intptr_t block_count = postorder_.length();
716 685
717 BitVector* last_loads = new (zone()) BitVector(zone(), variable_count_); 686 BitVector* last_loads = new (zone()) BitVector(zone(), variable_count_);
718 for (intptr_t i = 0; i < block_count; i++) { 687 for (intptr_t i = 0; i < block_count; i++) {
719 BlockEntryInstr* block = postorder_[i]; 688 BlockEntryInstr* block = postorder_[i];
720 689
721 BitVector* kill = kill_[i]; 690 BitVector* kill = kill_[i];
722 BitVector* live_in = live_in_[i]; 691 BitVector* live_in = live_in_[i];
723 last_loads->Clear(); 692 last_loads->Clear();
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
764 } 733 }
765 kill->Add(index); 734 kill->Add(index);
766 } 735 }
767 live_in->Remove(index); 736 live_in->Remove(index);
768 continue; 737 continue;
769 } 738 }
770 } 739 }
771 } 740 }
772 } 741 }
773 742
774
775 void FlowGraph::ComputeSSA( 743 void FlowGraph::ComputeSSA(
776 intptr_t next_virtual_register_number, 744 intptr_t next_virtual_register_number,
777 ZoneGrowableArray<Definition*>* inlining_parameters) { 745 ZoneGrowableArray<Definition*>* inlining_parameters) {
778 ASSERT((next_virtual_register_number == 0) || (inlining_parameters != NULL)); 746 ASSERT((next_virtual_register_number == 0) || (inlining_parameters != NULL));
779 current_ssa_temp_index_ = next_virtual_register_number; 747 current_ssa_temp_index_ = next_virtual_register_number;
780 GrowableArray<BitVector*> dominance_frontier; 748 GrowableArray<BitVector*> dominance_frontier;
781 ComputeDominators(&dominance_frontier); 749 ComputeDominators(&dominance_frontier);
782 750
783 VariableLivenessAnalysis variable_liveness(this); 751 VariableLivenessAnalysis variable_liveness(this);
784 variable_liveness.Analyze(); 752 variable_liveness.Analyze();
785 753
786 GrowableArray<PhiInstr*> live_phis; 754 GrowableArray<PhiInstr*> live_phis;
787 755
788 InsertPhis(preorder_, variable_liveness.ComputeAssignedVars(), 756 InsertPhis(preorder_, variable_liveness.ComputeAssignedVars(),
789 dominance_frontier, &live_phis); 757 dominance_frontier, &live_phis);
790 758
791
792 // Rename uses to reference inserted phis where appropriate. 759 // Rename uses to reference inserted phis where appropriate.
793 // Collect phis that reach a non-environment use. 760 // Collect phis that reach a non-environment use.
794 Rename(&live_phis, &variable_liveness, inlining_parameters); 761 Rename(&live_phis, &variable_liveness, inlining_parameters);
795 762
796 // Propagate alive mark transitively from alive phis and then remove 763 // Propagate alive mark transitively from alive phis and then remove
797 // non-live ones. 764 // non-live ones.
798 RemoveDeadPhis(&live_phis); 765 RemoveDeadPhis(&live_phis);
799 } 766 }
800 767
801
802 // Compute immediate dominators and the dominance frontier for each basic 768 // Compute immediate dominators and the dominance frontier for each basic
803 // block. As a side effect of the algorithm, sets the immediate dominator 769 // block. As a side effect of the algorithm, sets the immediate dominator
804 // of each basic block. 770 // of each basic block.
805 // 771 //
806 // dominance_frontier: an output parameter encoding the dominance frontier. 772 // dominance_frontier: an output parameter encoding the dominance frontier.
807 // The array maps the preorder block number of a block to the set of 773 // The array maps the preorder block number of a block to the set of
808 // (preorder block numbers of) blocks in the dominance frontier. 774 // (preorder block numbers of) blocks in the dominance frontier.
809 void FlowGraph::ComputeDominators( 775 void FlowGraph::ComputeDominators(
810 GrowableArray<BitVector*>* dominance_frontier) { 776 GrowableArray<BitVector*>* dominance_frontier) {
811 // Use the SEMI-NCA algorithm to compute dominators. This is a two-pass 777 // Use the SEMI-NCA algorithm to compute dominators. This is a two-pass
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
895 for (intptr_t i = 0; i < count; ++i) { 861 for (intptr_t i = 0; i < count; ++i) {
896 BlockEntryInstr* runner = block->PredecessorAt(i); 862 BlockEntryInstr* runner = block->PredecessorAt(i);
897 while (runner != block->dominator()) { 863 while (runner != block->dominator()) {
898 (*dominance_frontier)[runner->preorder_number()]->Add(block_index); 864 (*dominance_frontier)[runner->preorder_number()]->Add(block_index);
899 runner = runner->dominator(); 865 runner = runner->dominator();
900 } 866 }
901 } 867 }
902 } 868 }
903 } 869 }
904 870
905
906 void FlowGraph::CompressPath(intptr_t start_index, 871 void FlowGraph::CompressPath(intptr_t start_index,
907 intptr_t current_index, 872 intptr_t current_index,
908 GrowableArray<intptr_t>* parent, 873 GrowableArray<intptr_t>* parent,
909 GrowableArray<intptr_t>* label) { 874 GrowableArray<intptr_t>* label) {
910 intptr_t next_index = (*parent)[current_index]; 875 intptr_t next_index = (*parent)[current_index];
911 if (next_index > start_index) { 876 if (next_index > start_index) {
912 CompressPath(start_index, next_index, parent, label); 877 CompressPath(start_index, next_index, parent, label);
913 (*label)[current_index] = 878 (*label)[current_index] =
914 Utils::Minimum((*label)[current_index], (*label)[next_index]); 879 Utils::Minimum((*label)[current_index], (*label)[next_index]);
915 (*parent)[current_index] = (*parent)[next_index]; 880 (*parent)[current_index] = (*parent)[next_index];
916 } 881 }
917 } 882 }
918 883
919
920 void FlowGraph::InsertPhis(const GrowableArray<BlockEntryInstr*>& preorder, 884 void FlowGraph::InsertPhis(const GrowableArray<BlockEntryInstr*>& preorder,
921 const GrowableArray<BitVector*>& assigned_vars, 885 const GrowableArray<BitVector*>& assigned_vars,
922 const GrowableArray<BitVector*>& dom_frontier, 886 const GrowableArray<BitVector*>& dom_frontier,
923 GrowableArray<PhiInstr*>* live_phis) { 887 GrowableArray<PhiInstr*>* live_phis) {
924 const intptr_t block_count = preorder.length(); 888 const intptr_t block_count = preorder.length();
925 // Map preorder block number to the highest variable index that has a phi 889 // Map preorder block number to the highest variable index that has a phi
926 // in that block. Use it to avoid inserting multiple phis for the same 890 // in that block. Use it to avoid inserting multiple phis for the same
927 // variable. 891 // variable.
928 GrowableArray<intptr_t> has_already(block_count); 892 GrowableArray<intptr_t> has_already(block_count);
929 // Map preorder block number to the highest variable index for which the 893 // Map preorder block number to the highest variable index for which the
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
969 if (work[index] < var_index) { 933 if (work[index] < var_index) {
970 work[index] = var_index; 934 work[index] = var_index;
971 worklist.Add(block); 935 worklist.Add(block);
972 } 936 }
973 } 937 }
974 } 938 }
975 } 939 }
976 } 940 }
977 } 941 }
978 942
979
980 void FlowGraph::Rename(GrowableArray<PhiInstr*>* live_phis, 943 void FlowGraph::Rename(GrowableArray<PhiInstr*>* live_phis,
981 VariableLivenessAnalysis* variable_liveness, 944 VariableLivenessAnalysis* variable_liveness,
982 ZoneGrowableArray<Definition*>* inlining_parameters) { 945 ZoneGrowableArray<Definition*>* inlining_parameters) {
983 GraphEntryInstr* entry = graph_entry(); 946 GraphEntryInstr* entry = graph_entry();
984 947
985 // Initial renaming environment. 948 // Initial renaming environment.
986 GrowableArray<Definition*> env(variable_count()); 949 GrowableArray<Definition*> env(variable_count());
987 950
988 // Add global constants to the initial definitions. 951 // Add global constants to the initial definitions.
989 constant_null_ = GetConstant(Object::ZoneHandle()); 952 constant_null_ = GetConstant(Object::ZoneHandle());
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
1063 1026
1064 if (entry->SuccessorCount() > 1) { 1027 if (entry->SuccessorCount() > 1) {
1065 // Functions with try-catch have a fixed area of stack slots reserved 1028 // Functions with try-catch have a fixed area of stack slots reserved
1066 // so that all local variables are stored at a known location when 1029 // so that all local variables are stored at a known location when
1067 // on entry to the catch. 1030 // on entry to the catch.
1068 entry->set_fixed_slot_count(num_stack_locals() + num_copied_params()); 1031 entry->set_fixed_slot_count(num_stack_locals() + num_copied_params());
1069 } 1032 }
1070 RenameRecursive(entry, &env, live_phis, variable_liveness); 1033 RenameRecursive(entry, &env, live_phis, variable_liveness);
1071 } 1034 }
1072 1035
1073
1074 void FlowGraph::AttachEnvironment(Instruction* instr, 1036 void FlowGraph::AttachEnvironment(Instruction* instr,
1075 GrowableArray<Definition*>* env) { 1037 GrowableArray<Definition*>* env) {
1076 Environment* deopt_env = 1038 Environment* deopt_env =
1077 Environment::From(zone(), *env, num_non_copied_params_, parsed_function_); 1039 Environment::From(zone(), *env, num_non_copied_params_, parsed_function_);
1078 if (instr->IsClosureCall()) { 1040 if (instr->IsClosureCall()) {
1079 deopt_env = 1041 deopt_env =
1080 deopt_env->DeepCopy(zone(), deopt_env->Length() - instr->InputCount()); 1042 deopt_env->DeepCopy(zone(), deopt_env->Length() - instr->InputCount());
1081 } 1043 }
1082 instr->SetEnvironment(deopt_env); 1044 instr->SetEnvironment(deopt_env);
1083 for (Environment::DeepIterator it(deopt_env); !it.Done(); it.Advance()) { 1045 for (Environment::DeepIterator it(deopt_env); !it.Done(); it.Advance()) {
1084 Value* use = it.CurrentValue(); 1046 Value* use = it.CurrentValue();
1085 use->definition()->AddEnvUse(use); 1047 use->definition()->AddEnvUse(use);
1086 } 1048 }
1087 if (instr->ComputeCanDeoptimize()) { 1049 if (instr->ComputeCanDeoptimize()) {
1088 instr->env()->set_deopt_id(instr->deopt_id()); 1050 instr->env()->set_deopt_id(instr->deopt_id());
1089 } 1051 }
1090 } 1052 }
1091 1053
1092
1093 void FlowGraph::RenameRecursive(BlockEntryInstr* block_entry, 1054 void FlowGraph::RenameRecursive(BlockEntryInstr* block_entry,
1094 GrowableArray<Definition*>* env, 1055 GrowableArray<Definition*>* env,
1095 GrowableArray<PhiInstr*>* live_phis, 1056 GrowableArray<PhiInstr*>* live_phis,
1096 VariableLivenessAnalysis* variable_liveness) { 1057 VariableLivenessAnalysis* variable_liveness) {
1097 // 1. Process phis first. 1058 // 1. Process phis first.
1098 if (block_entry->IsJoinEntry()) { 1059 if (block_entry->IsJoinEntry()) {
1099 JoinEntryInstr* join = block_entry->AsJoinEntry(); 1060 JoinEntryInstr* join = block_entry->AsJoinEntry();
1100 if (join->phis() != NULL) { 1061 if (join->phis() != NULL) {
1101 for (intptr_t i = 0; i < join->phis()->length(); ++i) { 1062 for (intptr_t i = 0; i < join->phis()->length(); ++i) {
1102 PhiInstr* phi = (*join->phis())[i]; 1063 PhiInstr* phi = (*join->phis())[i];
(...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after
1281 if (phi != NULL) { 1242 if (phi != NULL) {
1282 // Rename input operand. 1243 // Rename input operand.
1283 Value* use = new (zone()) Value((*env)[i]); 1244 Value* use = new (zone()) Value((*env)[i]);
1284 phi->SetInputAt(pred_index, use); 1245 phi->SetInputAt(pred_index, use);
1285 } 1246 }
1286 } 1247 }
1287 } 1248 }
1288 } 1249 }
1289 } 1250 }
1290 1251
1291
1292 void FlowGraph::RemoveDeadPhis(GrowableArray<PhiInstr*>* live_phis) { 1252 void FlowGraph::RemoveDeadPhis(GrowableArray<PhiInstr*>* live_phis) {
1293 // Augment live_phis with those that have implicit real used at 1253 // Augment live_phis with those that have implicit real used at
1294 // potentially throwing instructions if there is a try-catch in this graph. 1254 // potentially throwing instructions if there is a try-catch in this graph.
1295 if (graph_entry()->SuccessorCount() > 1) { 1255 if (graph_entry()->SuccessorCount() > 1) {
1296 for (BlockIterator it(postorder_iterator()); !it.Done(); it.Advance()) { 1256 for (BlockIterator it(postorder_iterator()); !it.Done(); it.Advance()) {
1297 JoinEntryInstr* join = it.Current()->AsJoinEntry(); 1257 JoinEntryInstr* join = it.Current()->AsJoinEntry();
1298 if (join == NULL) continue; 1258 if (join == NULL) continue;
1299 for (PhiIterator phi_it(join); !phi_it.Done(); phi_it.Advance()) { 1259 for (PhiIterator phi_it(join); !phi_it.Done(); phi_it.Advance()) {
1300 PhiInstr* phi = phi_it.Current(); 1260 PhiInstr* phi = phi_it.Current();
1301 if (phi == NULL || phi->is_alive() || (phi->input_use_list() != NULL) || 1261 if (phi == NULL || phi->is_alive() || (phi->input_use_list() != NULL) ||
(...skipping 25 matching lines...) Expand all
1327 } 1287 }
1328 } 1288 }
1329 } 1289 }
1330 1290
1331 for (BlockIterator it(postorder_iterator()); !it.Done(); it.Advance()) { 1291 for (BlockIterator it(postorder_iterator()); !it.Done(); it.Advance()) {
1332 JoinEntryInstr* join = it.Current()->AsJoinEntry(); 1292 JoinEntryInstr* join = it.Current()->AsJoinEntry();
1333 if (join != NULL) join->RemoveDeadPhis(constant_dead()); 1293 if (join != NULL) join->RemoveDeadPhis(constant_dead());
1334 } 1294 }
1335 } 1295 }
1336 1296
1337
1338 RedefinitionInstr* FlowGraph::EnsureRedefinition(Instruction* prev, 1297 RedefinitionInstr* FlowGraph::EnsureRedefinition(Instruction* prev,
1339 Definition* original, 1298 Definition* original,
1340 CompileType compile_type) { 1299 CompileType compile_type) {
1341 RedefinitionInstr* first = prev->next()->AsRedefinition(); 1300 RedefinitionInstr* first = prev->next()->AsRedefinition();
1342 if (first != NULL && (first->constrained_type() != NULL)) { 1301 if (first != NULL && (first->constrained_type() != NULL)) {
1343 if ((first->value()->definition() == original) && 1302 if ((first->value()->definition() == original) &&
1344 first->constrained_type()->IsEqualTo(&compile_type)) { 1303 first->constrained_type()->IsEqualTo(&compile_type)) {
1345 // Already redefined. Do nothing. 1304 // Already redefined. Do nothing.
1346 return NULL; 1305 return NULL;
1347 } 1306 }
1348 } 1307 }
1349 RedefinitionInstr* redef = new RedefinitionInstr(new Value(original)); 1308 RedefinitionInstr* redef = new RedefinitionInstr(new Value(original));
1350 redef->set_constrained_type(new CompileType(compile_type)); 1309 redef->set_constrained_type(new CompileType(compile_type));
1351 InsertAfter(prev, redef, NULL, FlowGraph::kValue); 1310 InsertAfter(prev, redef, NULL, FlowGraph::kValue);
1352 RenameDominatedUses(original, redef, redef); 1311 RenameDominatedUses(original, redef, redef);
1353 return redef; 1312 return redef;
1354 } 1313 }
1355 1314
1356
1357 void FlowGraph::RemoveRedefinitions() { 1315 void FlowGraph::RemoveRedefinitions() {
1358 // Remove redefinition instructions inserted to inhibit hoisting. 1316 // Remove redefinition instructions inserted to inhibit hoisting.
1359 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done(); 1317 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done();
1360 block_it.Advance()) { 1318 block_it.Advance()) {
1361 thread()->CheckForSafepoint(); 1319 thread()->CheckForSafepoint();
1362 for (ForwardInstructionIterator instr_it(block_it.Current()); 1320 for (ForwardInstructionIterator instr_it(block_it.Current());
1363 !instr_it.Done(); instr_it.Advance()) { 1321 !instr_it.Done(); instr_it.Advance()) {
1364 RedefinitionInstr* redefinition = instr_it.Current()->AsRedefinition(); 1322 RedefinitionInstr* redefinition = instr_it.Current()->AsRedefinition();
1365 if (redefinition != NULL) { 1323 if (redefinition != NULL) {
1366 Definition* original; 1324 Definition* original;
1367 do { 1325 do {
1368 original = redefinition->value()->definition(); 1326 original = redefinition->value()->definition();
1369 } while (original->IsRedefinition()); 1327 } while (original->IsRedefinition());
1370 redefinition->ReplaceUsesWith(original); 1328 redefinition->ReplaceUsesWith(original);
1371 instr_it.RemoveCurrentFromGraph(); 1329 instr_it.RemoveCurrentFromGraph();
1372 } 1330 }
1373 } 1331 }
1374 } 1332 }
1375 } 1333 }
1376 1334
1377
1378 // Find the natural loop for the back edge m->n and attach loop information 1335 // Find the natural loop for the back edge m->n and attach loop information
1379 // to block n (loop header). The algorithm is described in "Advanced Compiler 1336 // to block n (loop header). The algorithm is described in "Advanced Compiler
1380 // Design & Implementation" (Muchnick) p192. 1337 // Design & Implementation" (Muchnick) p192.
1381 BitVector* FlowGraph::FindLoop(BlockEntryInstr* m, BlockEntryInstr* n) const { 1338 BitVector* FlowGraph::FindLoop(BlockEntryInstr* m, BlockEntryInstr* n) const {
1382 GrowableArray<BlockEntryInstr*> stack; 1339 GrowableArray<BlockEntryInstr*> stack;
1383 BitVector* loop = new (zone()) BitVector(zone(), preorder_.length()); 1340 BitVector* loop = new (zone()) BitVector(zone(), preorder_.length());
1384 1341
1385 loop->Add(n->preorder_number()); 1342 loop->Add(n->preorder_number());
1386 if (n != m) { 1343 if (n != m) {
1387 loop->Add(m->preorder_number()); 1344 loop->Add(m->preorder_number());
1388 stack.Add(m); 1345 stack.Add(m);
1389 } 1346 }
1390 1347
1391 while (!stack.is_empty()) { 1348 while (!stack.is_empty()) {
1392 BlockEntryInstr* p = stack.RemoveLast(); 1349 BlockEntryInstr* p = stack.RemoveLast();
1393 for (intptr_t i = 0; i < p->PredecessorCount(); ++i) { 1350 for (intptr_t i = 0; i < p->PredecessorCount(); ++i) {
1394 BlockEntryInstr* q = p->PredecessorAt(i); 1351 BlockEntryInstr* q = p->PredecessorAt(i);
1395 if (!loop->Contains(q->preorder_number())) { 1352 if (!loop->Contains(q->preorder_number())) {
1396 loop->Add(q->preorder_number()); 1353 loop->Add(q->preorder_number());
1397 stack.Add(q); 1354 stack.Add(q);
1398 } 1355 }
1399 } 1356 }
1400 } 1357 }
1401 return loop; 1358 return loop;
1402 } 1359 }
1403 1360
1404
1405 ZoneGrowableArray<BlockEntryInstr*>* FlowGraph::ComputeLoops() const { 1361 ZoneGrowableArray<BlockEntryInstr*>* FlowGraph::ComputeLoops() const {
1406 ZoneGrowableArray<BlockEntryInstr*>* loop_headers = 1362 ZoneGrowableArray<BlockEntryInstr*>* loop_headers =
1407 new (zone()) ZoneGrowableArray<BlockEntryInstr*>(); 1363 new (zone()) ZoneGrowableArray<BlockEntryInstr*>();
1408 1364
1409 for (BlockIterator it = postorder_iterator(); !it.Done(); it.Advance()) { 1365 for (BlockIterator it = postorder_iterator(); !it.Done(); it.Advance()) {
1410 BlockEntryInstr* block = it.Current(); 1366 BlockEntryInstr* block = it.Current();
1411 for (intptr_t i = 0; i < block->PredecessorCount(); ++i) { 1367 for (intptr_t i = 0; i < block->PredecessorCount(); ++i) {
1412 BlockEntryInstr* pred = block->PredecessorAt(i); 1368 BlockEntryInstr* pred = block->PredecessorAt(i);
1413 if (block->Dominates(pred)) { 1369 if (block->Dominates(pred)) {
1414 if (FLAG_trace_optimization) { 1370 if (FLAG_trace_optimization) {
(...skipping 24 matching lines...) Expand all
1439 OS::Print("Loop header B%" Pd "\n", header->block_id()); 1395 OS::Print("Loop header B%" Pd "\n", header->block_id());
1440 for (BitVector::Iterator it(header->loop_info()); !it.Done(); 1396 for (BitVector::Iterator it(header->loop_info()); !it.Done();
1441 it.Advance()) { 1397 it.Advance()) {
1442 OS::Print(" B%" Pd "\n", preorder_[it.Current()]->block_id()); 1398 OS::Print(" B%" Pd "\n", preorder_[it.Current()]->block_id());
1443 } 1399 }
1444 } 1400 }
1445 } 1401 }
1446 return loop_headers; 1402 return loop_headers;
1447 } 1403 }
1448 1404
1449
1450 intptr_t FlowGraph::InstructionCount() const { 1405 intptr_t FlowGraph::InstructionCount() const {
1451 intptr_t size = 0; 1406 intptr_t size = 0;
1452 // Iterate each block, skipping the graph entry. 1407 // Iterate each block, skipping the graph entry.
1453 for (intptr_t i = 1; i < preorder_.length(); ++i) { 1408 for (intptr_t i = 1; i < preorder_.length(); ++i) {
1454 for (ForwardInstructionIterator it(preorder_[i]); !it.Done(); 1409 for (ForwardInstructionIterator it(preorder_[i]); !it.Done();
1455 it.Advance()) { 1410 it.Advance()) {
1456 ++size; 1411 ++size;
1457 } 1412 }
1458 } 1413 }
1459 return size; 1414 return size;
1460 } 1415 }
1461 1416
1462
1463 void FlowGraph::ComputeBlockEffects() { 1417 void FlowGraph::ComputeBlockEffects() {
1464 block_effects_ = new (zone()) BlockEffects(this); 1418 block_effects_ = new (zone()) BlockEffects(this);
1465 } 1419 }
1466 1420
1467
1468 BlockEffects::BlockEffects(FlowGraph* flow_graph) 1421 BlockEffects::BlockEffects(FlowGraph* flow_graph)
1469 : available_at_(flow_graph->postorder().length()) { 1422 : available_at_(flow_graph->postorder().length()) {
1470 // We are tracking a single effect. 1423 // We are tracking a single effect.
1471 ASSERT(EffectSet::kLastEffect == 1); 1424 ASSERT(EffectSet::kLastEffect == 1);
1472 Zone* zone = flow_graph->zone(); 1425 Zone* zone = flow_graph->zone();
1473 const intptr_t block_count = flow_graph->postorder().length(); 1426 const intptr_t block_count = flow_graph->postorder().length();
1474 1427
1475 // Set of blocks that contain side-effects. 1428 // Set of blocks that contain side-effects.
1476 BitVector* kill = new (zone) BitVector(zone, block_count); 1429 BitVector* kill = new (zone) BitVector(zone, block_count);
1477 1430
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
1538 available_after[block_num]->CopyFrom(temp); 1491 available_after[block_num]->CopyFrom(temp);
1539 // Block is always available after itself. 1492 // Block is always available after itself.
1540 available_after[block_num]->Add(block_num); 1493 available_after[block_num]->Add(block_num);
1541 } 1494 }
1542 changed = true; 1495 changed = true;
1543 } 1496 }
1544 } 1497 }
1545 } while (changed); 1498 } while (changed);
1546 } 1499 }
1547 1500
1548
1549 bool BlockEffects::IsAvailableAt(Instruction* instr, 1501 bool BlockEffects::IsAvailableAt(Instruction* instr,
1550 BlockEntryInstr* block) const { 1502 BlockEntryInstr* block) const {
1551 return (instr->Dependencies().IsNone()) || 1503 return (instr->Dependencies().IsNone()) ||
1552 IsSideEffectFreePath(instr->GetBlock(), block); 1504 IsSideEffectFreePath(instr->GetBlock(), block);
1553 } 1505 }
1554 1506
1555
1556 bool BlockEffects::CanBeMovedTo(Instruction* instr, 1507 bool BlockEffects::CanBeMovedTo(Instruction* instr,
1557 BlockEntryInstr* block) const { 1508 BlockEntryInstr* block) const {
1558 return (instr->Dependencies().IsNone()) || 1509 return (instr->Dependencies().IsNone()) ||
1559 IsSideEffectFreePath(block, instr->GetBlock()); 1510 IsSideEffectFreePath(block, instr->GetBlock());
1560 } 1511 }
1561 1512
1562
1563 bool BlockEffects::IsSideEffectFreePath(BlockEntryInstr* from, 1513 bool BlockEffects::IsSideEffectFreePath(BlockEntryInstr* from,
1564 BlockEntryInstr* to) const { 1514 BlockEntryInstr* to) const {
1565 return available_at_[to->postorder_number()]->Contains( 1515 return available_at_[to->postorder_number()]->Contains(
1566 from->postorder_number()); 1516 from->postorder_number());
1567 } 1517 }
1568 1518
1569
1570 // Quick access to the current zone. 1519 // Quick access to the current zone.
1571 #define Z (zone()) 1520 #define Z (zone())
1572 1521
1573
1574 void FlowGraph::ConvertUse(Value* use, Representation from_rep) { 1522 void FlowGraph::ConvertUse(Value* use, Representation from_rep) {
1575 const Representation to_rep = 1523 const Representation to_rep =
1576 use->instruction()->RequiredInputRepresentation(use->use_index()); 1524 use->instruction()->RequiredInputRepresentation(use->use_index());
1577 if (from_rep == to_rep || to_rep == kNoRepresentation) { 1525 if (from_rep == to_rep || to_rep == kNoRepresentation) {
1578 return; 1526 return;
1579 } 1527 }
1580 InsertConversion(from_rep, to_rep, use, /*is_environment_use=*/false); 1528 InsertConversion(from_rep, to_rep, use, /*is_environment_use=*/false);
1581 } 1529 }
1582 1530
1583
1584 static bool IsUnboxedInteger(Representation rep) { 1531 static bool IsUnboxedInteger(Representation rep) {
1585 return (rep == kUnboxedInt32) || (rep == kUnboxedUint32) || 1532 return (rep == kUnboxedInt32) || (rep == kUnboxedUint32) ||
1586 (rep == kUnboxedMint); 1533 (rep == kUnboxedMint);
1587 } 1534 }
1588 1535
1589
1590 static bool ShouldInlineSimd() { 1536 static bool ShouldInlineSimd() {
1591 return FlowGraphCompiler::SupportsUnboxedSimd128(); 1537 return FlowGraphCompiler::SupportsUnboxedSimd128();
1592 } 1538 }
1593 1539
1594
1595 static bool CanUnboxDouble() { 1540 static bool CanUnboxDouble() {
1596 return FlowGraphCompiler::SupportsUnboxedDoubles(); 1541 return FlowGraphCompiler::SupportsUnboxedDoubles();
1597 } 1542 }
1598 1543
1599
1600 static bool CanConvertUnboxedMintToDouble() { 1544 static bool CanConvertUnboxedMintToDouble() {
1601 return FlowGraphCompiler::CanConvertUnboxedMintToDouble(); 1545 return FlowGraphCompiler::CanConvertUnboxedMintToDouble();
1602 } 1546 }
1603 1547
1604
1605 void FlowGraph::InsertConversion(Representation from, 1548 void FlowGraph::InsertConversion(Representation from,
1606 Representation to, 1549 Representation to,
1607 Value* use, 1550 Value* use,
1608 bool is_environment_use) { 1551 bool is_environment_use) {
1609 Instruction* insert_before; 1552 Instruction* insert_before;
1610 Instruction* deopt_target; 1553 Instruction* deopt_target;
1611 PhiInstr* phi = use->instruction()->AsPhi(); 1554 PhiInstr* phi = use->instruction()->AsPhi();
1612 if (phi != NULL) { 1555 if (phi != NULL) {
1613 ASSERT(phi->is_alive()); 1556 ASSERT(phi->is_alive());
1614 // For phis conversions have to be inserted in the predecessor. 1557 // For phis conversions have to be inserted in the predecessor.
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
1666 use->BindTo(converted); 1609 use->BindTo(converted);
1667 } 1610 }
1668 1611
1669 if ((to == kUnboxedInt32) && (phi != NULL)) { 1612 if ((to == kUnboxedInt32) && (phi != NULL)) {
1670 // Int32 phis are unboxed optimistically. Ensure that unboxing 1613 // Int32 phis are unboxed optimistically. Ensure that unboxing
1671 // has deoptimization target attached from the goto instruction. 1614 // has deoptimization target attached from the goto instruction.
1672 CopyDeoptTarget(converted, insert_before); 1615 CopyDeoptTarget(converted, insert_before);
1673 } 1616 }
1674 } 1617 }
1675 1618
1676
1677 void FlowGraph::ConvertEnvironmentUse(Value* use, Representation from_rep) { 1619 void FlowGraph::ConvertEnvironmentUse(Value* use, Representation from_rep) {
1678 const Representation to_rep = kTagged; 1620 const Representation to_rep = kTagged;
1679 if (from_rep == to_rep) { 1621 if (from_rep == to_rep) {
1680 return; 1622 return;
1681 } 1623 }
1682 InsertConversion(from_rep, to_rep, use, /*is_environment_use=*/true); 1624 InsertConversion(from_rep, to_rep, use, /*is_environment_use=*/true);
1683 } 1625 }
1684 1626
1685
1686 void FlowGraph::InsertConversionsFor(Definition* def) { 1627 void FlowGraph::InsertConversionsFor(Definition* def) {
1687 const Representation from_rep = def->representation(); 1628 const Representation from_rep = def->representation();
1688 1629
1689 for (Value::Iterator it(def->input_use_list()); !it.Done(); it.Advance()) { 1630 for (Value::Iterator it(def->input_use_list()); !it.Done(); it.Advance()) {
1690 ConvertUse(it.Current(), from_rep); 1631 ConvertUse(it.Current(), from_rep);
1691 } 1632 }
1692 1633
1693 if (graph_entry()->SuccessorCount() > 1) { 1634 if (graph_entry()->SuccessorCount() > 1) {
1694 for (Value::Iterator it(def->env_use_list()); !it.Done(); it.Advance()) { 1635 for (Value::Iterator it(def->env_use_list()); !it.Done(); it.Advance()) {
1695 Value* use = it.Current(); 1636 Value* use = it.Current();
1696 if (use->instruction()->MayThrow() && 1637 if (use->instruction()->MayThrow() &&
1697 use->instruction()->GetBlock()->InsideTryBlock()) { 1638 use->instruction()->GetBlock()->InsideTryBlock()) {
1698 // Environment uses at calls inside try-blocks must be converted to 1639 // Environment uses at calls inside try-blocks must be converted to
1699 // tagged representation. 1640 // tagged representation.
1700 ConvertEnvironmentUse(it.Current(), from_rep); 1641 ConvertEnvironmentUse(it.Current(), from_rep);
1701 } 1642 }
1702 } 1643 }
1703 } 1644 }
1704 } 1645 }
1705 1646
1706
1707 static void UnboxPhi(PhiInstr* phi) { 1647 static void UnboxPhi(PhiInstr* phi) {
1708 Representation unboxed = phi->representation(); 1648 Representation unboxed = phi->representation();
1709 1649
1710 switch (phi->Type()->ToCid()) { 1650 switch (phi->Type()->ToCid()) {
1711 case kDoubleCid: 1651 case kDoubleCid:
1712 if (CanUnboxDouble()) { 1652 if (CanUnboxDouble()) {
1713 unboxed = kUnboxedDouble; 1653 unboxed = kUnboxedDouble;
1714 } 1654 }
1715 break; 1655 break;
1716 case kFloat32x4Cid: 1656 case kFloat32x4Cid:
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
1777 unboxed = 1717 unboxed =
1778 RangeUtils::Fits(phi->range(), RangeBoundary::kRangeBoundaryInt32) 1718 RangeUtils::Fits(phi->range(), RangeBoundary::kRangeBoundaryInt32)
1779 ? kUnboxedInt32 1719 ? kUnboxedInt32
1780 : kUnboxedMint; 1720 : kUnboxedMint;
1781 } 1721 }
1782 } 1722 }
1783 1723
1784 phi->set_representation(unboxed); 1724 phi->set_representation(unboxed);
1785 } 1725 }
1786 1726
1787
1788 void FlowGraph::SelectRepresentations() { 1727 void FlowGraph::SelectRepresentations() {
1789 // Conservatively unbox all phis that were proven to be of Double, 1728 // Conservatively unbox all phis that were proven to be of Double,
1790 // Float32x4, or Int32x4 type. 1729 // Float32x4, or Int32x4 type.
1791 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done(); 1730 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done();
1792 block_it.Advance()) { 1731 block_it.Advance()) {
1793 JoinEntryInstr* join_entry = block_it.Current()->AsJoinEntry(); 1732 JoinEntryInstr* join_entry = block_it.Current()->AsJoinEntry();
1794 if (join_entry != NULL) { 1733 if (join_entry != NULL) {
1795 for (PhiIterator it(join_entry); !it.Done(); it.Advance()) { 1734 for (PhiIterator it(join_entry); !it.Done(); it.Advance()) {
1796 PhiInstr* phi = it.Current(); 1735 PhiInstr* phi = it.Current();
1797 UnboxPhi(phi); 1736 UnboxPhi(phi);
(...skipping 29 matching lines...) Expand all
1827 } 1766 }
1828 for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) { 1767 for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) {
1829 Definition* def = it.Current()->AsDefinition(); 1768 Definition* def = it.Current()->AsDefinition();
1830 if (def != NULL) { 1769 if (def != NULL) {
1831 InsertConversionsFor(def); 1770 InsertConversionsFor(def);
1832 } 1771 }
1833 } 1772 }
1834 } 1773 }
1835 } 1774 }
1836 1775
1837
1838 #if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_IA32) 1776 #if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_IA32)
1839 // Smi widening pass is only meaningful on platforms where Smi 1777 // Smi widening pass is only meaningful on platforms where Smi
1840 // is smaller than 32bit. For now only support it on ARM and ia32. 1778 // is smaller than 32bit. For now only support it on ARM and ia32.
1841 static bool CanBeWidened(BinarySmiOpInstr* smi_op) { 1779 static bool CanBeWidened(BinarySmiOpInstr* smi_op) {
1842 return BinaryInt32OpInstr::IsSupported(smi_op->op_kind(), smi_op->left(), 1780 return BinaryInt32OpInstr::IsSupported(smi_op->op_kind(), smi_op->left(),
1843 smi_op->right()); 1781 smi_op->right());
1844 } 1782 }
1845 1783
1846
1847 static bool BenefitsFromWidening(BinarySmiOpInstr* smi_op) { 1784 static bool BenefitsFromWidening(BinarySmiOpInstr* smi_op) {
1848 // TODO(vegorov): when shifts with non-constants shift count are supported 1785 // TODO(vegorov): when shifts with non-constants shift count are supported
1849 // add them here as we save untagging for the count. 1786 // add them here as we save untagging for the count.
1850 switch (smi_op->op_kind()) { 1787 switch (smi_op->op_kind()) {
1851 case Token::kMUL: 1788 case Token::kMUL:
1852 case Token::kSHR: 1789 case Token::kSHR:
1853 // For kMUL we save untagging of the argument for kSHR 1790 // For kMUL we save untagging of the argument for kSHR
1854 // we save tagging of the result. 1791 // we save tagging of the result.
1855 return true; 1792 return true;
1856 1793
1857 default: 1794 default:
1858 return false; 1795 return false;
1859 } 1796 }
1860 } 1797 }
1861 1798
1862
1863 void FlowGraph::WidenSmiToInt32() { 1799 void FlowGraph::WidenSmiToInt32() {
1864 GrowableArray<BinarySmiOpInstr*> candidates; 1800 GrowableArray<BinarySmiOpInstr*> candidates;
1865 1801
1866 // Step 1. Collect all instructions that potentially benefit from widening of 1802 // Step 1. Collect all instructions that potentially benefit from widening of
1867 // their operands (or their result) into int32 range. 1803 // their operands (or their result) into int32 range.
1868 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done(); 1804 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done();
1869 block_it.Advance()) { 1805 block_it.Advance()) {
1870 for (ForwardInstructionIterator instr_it(block_it.Current()); 1806 for (ForwardInstructionIterator instr_it(block_it.Current());
1871 !instr_it.Done(); instr_it.Advance()) { 1807 !instr_it.Done(); instr_it.Advance()) {
1872 BinarySmiOpInstr* smi_op = instr_it.Current()->AsBinarySmiOp(); 1808 BinarySmiOpInstr* smi_op = instr_it.Current()->AsBinarySmiOp();
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after
2042 } 1978 }
2043 #else 1979 #else
2044 void FlowGraph::WidenSmiToInt32() { 1980 void FlowGraph::WidenSmiToInt32() {
2045 // TODO(vegorov) ideally on 64-bit platforms we would like to narrow smi 1981 // TODO(vegorov) ideally on 64-bit platforms we would like to narrow smi
2046 // operations to 32-bit where it saves tagging and untagging and allows 1982 // operations to 32-bit where it saves tagging and untagging and allows
2047 // to use shorted (and faster) instructions. But we currently don't 1983 // to use shorted (and faster) instructions. But we currently don't
2048 // save enough range information in the ICData to drive this decision. 1984 // save enough range information in the ICData to drive this decision.
2049 } 1985 }
2050 #endif 1986 #endif
2051 1987
2052
2053 void FlowGraph::EliminateEnvironments() { 1988 void FlowGraph::EliminateEnvironments() {
2054 // After this pass we can no longer perform LICM and hoist instructions 1989 // After this pass we can no longer perform LICM and hoist instructions
2055 // that can deoptimize. 1990 // that can deoptimize.
2056 1991
2057 disallow_licm(); 1992 disallow_licm();
2058 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done(); 1993 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done();
2059 block_it.Advance()) { 1994 block_it.Advance()) {
2060 BlockEntryInstr* block = block_it.Current(); 1995 BlockEntryInstr* block = block_it.Current();
2061 if (!block->IsCatchBlockEntry()) { 1996 if (!block->IsCatchBlockEntry()) {
2062 block->RemoveEnvironment(); 1997 block->RemoveEnvironment();
2063 } 1998 }
2064 for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) { 1999 for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
2065 Instruction* current = it.Current(); 2000 Instruction* current = it.Current();
2066 if (!current->ComputeCanDeoptimize() && 2001 if (!current->ComputeCanDeoptimize() &&
2067 (!current->MayThrow() || !current->GetBlock()->InsideTryBlock())) { 2002 (!current->MayThrow() || !current->GetBlock()->InsideTryBlock())) {
2068 // Instructions that can throw need an environment for optimized 2003 // Instructions that can throw need an environment for optimized
2069 // try-catch. 2004 // try-catch.
2070 // TODO(srdjan): --source-lines needs deopt environments to get at 2005 // TODO(srdjan): --source-lines needs deopt environments to get at
2071 // the code for this instruction, however, leaving the environment 2006 // the code for this instruction, however, leaving the environment
2072 // changes code. 2007 // changes code.
2073 current->RemoveEnvironment(); 2008 current->RemoveEnvironment();
2074 } 2009 }
2075 } 2010 }
2076 } 2011 }
2077 } 2012 }
2078 2013
2079
2080 bool FlowGraph::Canonicalize() { 2014 bool FlowGraph::Canonicalize() {
2081 bool changed = false; 2015 bool changed = false;
2082 2016
2083 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done(); 2017 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done();
2084 block_it.Advance()) { 2018 block_it.Advance()) {
2085 for (ForwardInstructionIterator it(block_it.Current()); !it.Done(); 2019 for (ForwardInstructionIterator it(block_it.Current()); !it.Done();
2086 it.Advance()) { 2020 it.Advance()) {
2087 Instruction* current = it.Current(); 2021 Instruction* current = it.Current();
2088 if (current->HasUnmatchedInputRepresentations()) { 2022 if (current->HasUnmatchedInputRepresentations()) {
2089 // Can't canonicalize this instruction until all conversions for its 2023 // Can't canonicalize this instruction until all conversions for its
2090 // inputs are inserted. 2024 // inputs are inserted.
2091 continue; 2025 continue;
2092 } 2026 }
2093 2027
2094 Instruction* replacement = current->Canonicalize(this); 2028 Instruction* replacement = current->Canonicalize(this);
2095 2029
2096 if (replacement != current) { 2030 if (replacement != current) {
2097 // For non-definitions Canonicalize should return either NULL or 2031 // For non-definitions Canonicalize should return either NULL or
2098 // this. 2032 // this.
2099 ASSERT((replacement == NULL) || current->IsDefinition()); 2033 ASSERT((replacement == NULL) || current->IsDefinition());
2100 ReplaceCurrentInstruction(&it, current, replacement); 2034 ReplaceCurrentInstruction(&it, current, replacement);
2101 changed = true; 2035 changed = true;
2102 } 2036 }
2103 } 2037 }
2104 } 2038 }
2105 return changed; 2039 return changed;
2106 } 2040 }
2107 2041
2108
2109 // Optimize (a << b) & c pattern: if c is a positive Smi or zero, then the 2042 // Optimize (a << b) & c pattern: if c is a positive Smi or zero, then the
2110 // shift can be a truncating Smi shift-left and result is always Smi. 2043 // shift can be a truncating Smi shift-left and result is always Smi.
2111 // Merging occurs only per basic-block. 2044 // Merging occurs only per basic-block.
2112 void FlowGraph::TryOptimizePatterns() { 2045 void FlowGraph::TryOptimizePatterns() {
2113 if (!FLAG_truncating_left_shift) return; 2046 if (!FLAG_truncating_left_shift) return;
2114 GrowableArray<BinarySmiOpInstr*> div_mod_merge; 2047 GrowableArray<BinarySmiOpInstr*> div_mod_merge;
2115 GrowableArray<InvokeMathCFunctionInstr*> sin_cos_merge; 2048 GrowableArray<InvokeMathCFunctionInstr*> sin_cos_merge;
2116 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done(); 2049 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done();
2117 block_it.Advance()) { 2050 block_it.Advance()) {
2118 // Merging only per basic-block. 2051 // Merging only per basic-block.
(...skipping 27 matching lines...) Expand all
2146 if (math_unary->HasUses()) { 2079 if (math_unary->HasUses()) {
2147 sin_cos_merge.Add(math_unary); 2080 sin_cos_merge.Add(math_unary);
2148 } 2081 }
2149 } 2082 }
2150 } 2083 }
2151 } 2084 }
2152 TryMergeTruncDivMod(&div_mod_merge); 2085 TryMergeTruncDivMod(&div_mod_merge);
2153 } 2086 }
2154 } 2087 }
2155 2088
2156
2157 // Returns true if use is dominated by the given instruction. 2089 // Returns true if use is dominated by the given instruction.
2158 // Note: uses that occur at instruction itself are not dominated by it. 2090 // Note: uses that occur at instruction itself are not dominated by it.
2159 static bool IsDominatedUse(Instruction* dom, Value* use) { 2091 static bool IsDominatedUse(Instruction* dom, Value* use) {
2160 BlockEntryInstr* dom_block = dom->GetBlock(); 2092 BlockEntryInstr* dom_block = dom->GetBlock();
2161 2093
2162 Instruction* instr = use->instruction(); 2094 Instruction* instr = use->instruction();
2163 2095
2164 PhiInstr* phi = instr->AsPhi(); 2096 PhiInstr* phi = instr->AsPhi();
2165 if (phi != NULL) { 2097 if (phi != NULL) {
2166 return dom_block->Dominates(phi->block()->PredecessorAt(use->use_index())); 2098 return dom_block->Dominates(phi->block()->PredecessorAt(use->use_index()));
2167 } 2099 }
2168 2100
2169 BlockEntryInstr* use_block = instr->GetBlock(); 2101 BlockEntryInstr* use_block = instr->GetBlock();
2170 if (use_block == dom_block) { 2102 if (use_block == dom_block) {
2171 // Fast path for the case of block entry. 2103 // Fast path for the case of block entry.
2172 if (dom_block == dom) return true; 2104 if (dom_block == dom) return true;
2173 2105
2174 for (Instruction* curr = dom->next(); curr != NULL; curr = curr->next()) { 2106 for (Instruction* curr = dom->next(); curr != NULL; curr = curr->next()) {
2175 if (curr == instr) return true; 2107 if (curr == instr) return true;
2176 } 2108 }
2177 2109
2178 return false; 2110 return false;
2179 } 2111 }
2180 2112
2181 return dom_block->Dominates(use_block); 2113 return dom_block->Dominates(use_block);
2182 } 2114 }
2183 2115
2184
2185 void FlowGraph::RenameDominatedUses(Definition* def, 2116 void FlowGraph::RenameDominatedUses(Definition* def,
2186 Instruction* dom, 2117 Instruction* dom,
2187 Definition* other) { 2118 Definition* other) {
2188 for (Value::Iterator it(def->input_use_list()); !it.Done(); it.Advance()) { 2119 for (Value::Iterator it(def->input_use_list()); !it.Done(); it.Advance()) {
2189 Value* use = it.Current(); 2120 Value* use = it.Current();
2190 if (IsDominatedUse(dom, use)) { 2121 if (IsDominatedUse(dom, use)) {
2191 use->BindTo(other); 2122 use->BindTo(other);
2192 } 2123 }
2193 } 2124 }
2194 } 2125 }
2195 2126
2196
2197 void FlowGraph::RenameUsesDominatedByRedefinitions() { 2127 void FlowGraph::RenameUsesDominatedByRedefinitions() {
2198 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done(); 2128 for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done();
2199 block_it.Advance()) { 2129 block_it.Advance()) {
2200 for (ForwardInstructionIterator instr_it(block_it.Current()); 2130 for (ForwardInstructionIterator instr_it(block_it.Current());
2201 !instr_it.Done(); instr_it.Advance()) { 2131 !instr_it.Done(); instr_it.Advance()) {
2202 RedefinitionInstr* redefinition = instr_it.Current()->AsRedefinition(); 2132 RedefinitionInstr* redefinition = instr_it.Current()->AsRedefinition();
2203 if (redefinition != NULL) { 2133 if (redefinition != NULL) {
2204 Definition* original = redefinition->value()->definition(); 2134 Definition* original = redefinition->value()->definition();
2205 RenameDominatedUses(original, redefinition, redefinition); 2135 RenameDominatedUses(original, redefinition, redefinition);
2206 } 2136 }
2207 } 2137 }
2208 } 2138 }
2209 } 2139 }
2210 2140
2211
2212 static bool IsPositiveOrZeroSmiConst(Definition* d) { 2141 static bool IsPositiveOrZeroSmiConst(Definition* d) {
2213 ConstantInstr* const_instr = d->AsConstant(); 2142 ConstantInstr* const_instr = d->AsConstant();
2214 if ((const_instr != NULL) && (const_instr->value().IsSmi())) { 2143 if ((const_instr != NULL) && (const_instr->value().IsSmi())) {
2215 return Smi::Cast(const_instr->value()).Value() >= 0; 2144 return Smi::Cast(const_instr->value()).Value() >= 0;
2216 } 2145 }
2217 return false; 2146 return false;
2218 } 2147 }
2219 2148
2220
2221 static BinarySmiOpInstr* AsSmiShiftLeftInstruction(Definition* d) { 2149 static BinarySmiOpInstr* AsSmiShiftLeftInstruction(Definition* d) {
2222 BinarySmiOpInstr* instr = d->AsBinarySmiOp(); 2150 BinarySmiOpInstr* instr = d->AsBinarySmiOp();
2223 if ((instr != NULL) && (instr->op_kind() == Token::kSHL)) { 2151 if ((instr != NULL) && (instr->op_kind() == Token::kSHL)) {
2224 return instr; 2152 return instr;
2225 } 2153 }
2226 return NULL; 2154 return NULL;
2227 } 2155 }
2228 2156
2229
2230 void FlowGraph::OptimizeLeftShiftBitAndSmiOp( 2157 void FlowGraph::OptimizeLeftShiftBitAndSmiOp(
2231 ForwardInstructionIterator* current_iterator, 2158 ForwardInstructionIterator* current_iterator,
2232 Definition* bit_and_instr, 2159 Definition* bit_and_instr,
2233 Definition* left_instr, 2160 Definition* left_instr,
2234 Definition* right_instr) { 2161 Definition* right_instr) {
2235 ASSERT(bit_and_instr != NULL); 2162 ASSERT(bit_and_instr != NULL);
2236 ASSERT((left_instr != NULL) && (right_instr != NULL)); 2163 ASSERT((left_instr != NULL) && (right_instr != NULL));
2237 2164
2238 // Check for pattern, smi_shift_left must be single-use. 2165 // Check for pattern, smi_shift_left must be single-use.
2239 bool is_positive_or_zero = IsPositiveOrZeroSmiConst(left_instr); 2166 bool is_positive_or_zero = IsPositiveOrZeroSmiConst(left_instr);
(...skipping 16 matching lines...) Expand all
2256 ASSERT(bit_and_instr->IsBinarySmiOp() || bit_and_instr->IsBinaryMintOp()); 2183 ASSERT(bit_and_instr->IsBinarySmiOp() || bit_and_instr->IsBinaryMintOp());
2257 if (bit_and_instr->IsBinaryMintOp()) { 2184 if (bit_and_instr->IsBinaryMintOp()) {
2258 // Replace Mint op with Smi op. 2185 // Replace Mint op with Smi op.
2259 BinarySmiOpInstr* smi_op = new (Z) BinarySmiOpInstr( 2186 BinarySmiOpInstr* smi_op = new (Z) BinarySmiOpInstr(
2260 Token::kBIT_AND, new (Z) Value(left_instr), new (Z) Value(right_instr), 2187 Token::kBIT_AND, new (Z) Value(left_instr), new (Z) Value(right_instr),
2261 Thread::kNoDeoptId); // BIT_AND cannot deoptimize. 2188 Thread::kNoDeoptId); // BIT_AND cannot deoptimize.
2262 bit_and_instr->ReplaceWith(smi_op, current_iterator); 2189 bit_and_instr->ReplaceWith(smi_op, current_iterator);
2263 } 2190 }
2264 } 2191 }
2265 2192
2266
2267 // Dart: 2193 // Dart:
2268 // var x = d % 10; 2194 // var x = d % 10;
2269 // var y = d ~/ 10; 2195 // var y = d ~/ 10;
2270 // var z = x + y; 2196 // var z = x + y;
2271 // 2197 //
2272 // IL: 2198 // IL:
2273 // v4 <- %(v2, v3) 2199 // v4 <- %(v2, v3)
2274 // v5 <- ~/(v2, v3) 2200 // v5 <- ~/(v2, v3)
2275 // v6 <- +(v4, v5) 2201 // v6 <- +(v4, v5)
2276 // 2202 //
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
2327 other_binop->RemoveFromGraph(); 2253 other_binop->RemoveFromGraph();
2328 // Only one merge possible. Because canonicalization happens later, 2254 // Only one merge possible. Because canonicalization happens later,
2329 // more candidates are possible. 2255 // more candidates are possible.
2330 // TODO(srdjan): Allow merging of trunc-div/mod into truncDivMod. 2256 // TODO(srdjan): Allow merging of trunc-div/mod into truncDivMod.
2331 break; 2257 break;
2332 } 2258 }
2333 } 2259 }
2334 } 2260 }
2335 } 2261 }
2336 2262
2337
2338 void FlowGraph::AppendExtractNthOutputForMerged(Definition* instr, 2263 void FlowGraph::AppendExtractNthOutputForMerged(Definition* instr,
2339 intptr_t index, 2264 intptr_t index,
2340 Representation rep, 2265 Representation rep,
2341 intptr_t cid) { 2266 intptr_t cid) {
2342 ExtractNthOutputInstr* extract = 2267 ExtractNthOutputInstr* extract =
2343 new (Z) ExtractNthOutputInstr(new (Z) Value(instr), index, rep, cid); 2268 new (Z) ExtractNthOutputInstr(new (Z) Value(instr), index, rep, cid);
2344 instr->ReplaceUsesWith(extract); 2269 instr->ReplaceUsesWith(extract);
2345 InsertAfter(instr, extract, NULL, FlowGraph::kValue); 2270 InsertAfter(instr, extract, NULL, FlowGraph::kValue);
2346 } 2271 }
2347 2272
2348
2349 } // namespace dart 2273 } // namespace dart
OLDNEW
« no previous file with comments | « runtime/vm/flow_graph.h ('k') | runtime/vm/flow_graph_allocator.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698