OLD | NEW |
| (Empty) |
1 // Copyright 2013 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "src/hydrogen.h" | |
6 #include "src/hydrogen-gvn.h" | |
7 #include "src/v8.h" | |
8 | |
9 namespace v8 { | |
10 namespace internal { | |
11 | |
12 class HInstructionMap final : public ZoneObject { | |
13 public: | |
14 HInstructionMap(Zone* zone, SideEffectsTracker* side_effects_tracker) | |
15 : array_size_(0), | |
16 lists_size_(0), | |
17 count_(0), | |
18 array_(NULL), | |
19 lists_(NULL), | |
20 free_list_head_(kNil), | |
21 side_effects_tracker_(side_effects_tracker) { | |
22 ResizeLists(kInitialSize, zone); | |
23 Resize(kInitialSize, zone); | |
24 } | |
25 | |
26 void Kill(SideEffects side_effects); | |
27 | |
28 void Add(HInstruction* instr, Zone* zone) { | |
29 present_depends_on_.Add(side_effects_tracker_->ComputeDependsOn(instr)); | |
30 Insert(instr, zone); | |
31 } | |
32 | |
33 HInstruction* Lookup(HInstruction* instr) const; | |
34 | |
35 HInstructionMap* Copy(Zone* zone) const { | |
36 return new(zone) HInstructionMap(zone, this); | |
37 } | |
38 | |
39 bool IsEmpty() const { return count_ == 0; } | |
40 | |
41 private: | |
42 // A linked list of HInstruction* values. Stored in arrays. | |
43 struct HInstructionMapListElement { | |
44 HInstruction* instr; | |
45 int next; // Index in the array of the next list element. | |
46 }; | |
47 static const int kNil = -1; // The end of a linked list | |
48 | |
49 // Must be a power of 2. | |
50 static const int kInitialSize = 16; | |
51 | |
52 HInstructionMap(Zone* zone, const HInstructionMap* other); | |
53 | |
54 void Resize(int new_size, Zone* zone); | |
55 void ResizeLists(int new_size, Zone* zone); | |
56 void Insert(HInstruction* instr, Zone* zone); | |
57 uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); } | |
58 | |
59 int array_size_; | |
60 int lists_size_; | |
61 int count_; // The number of values stored in the HInstructionMap. | |
62 SideEffects present_depends_on_; | |
63 HInstructionMapListElement* array_; | |
64 // Primary store - contains the first value | |
65 // with a given hash. Colliding elements are stored in linked lists. | |
66 HInstructionMapListElement* lists_; | |
67 // The linked lists containing hash collisions. | |
68 int free_list_head_; // Unused elements in lists_ are on the free list. | |
69 SideEffectsTracker* side_effects_tracker_; | |
70 }; | |
71 | |
72 | |
73 class HSideEffectMap final BASE_EMBEDDED { | |
74 public: | |
75 HSideEffectMap(); | |
76 explicit HSideEffectMap(HSideEffectMap* other); | |
77 HSideEffectMap& operator= (const HSideEffectMap& other); | |
78 | |
79 void Kill(SideEffects side_effects); | |
80 | |
81 void Store(SideEffects side_effects, HInstruction* instr); | |
82 | |
83 bool IsEmpty() const { return count_ == 0; } | |
84 | |
85 inline HInstruction* operator[](int i) const { | |
86 DCHECK(0 <= i); | |
87 DCHECK(i < kNumberOfTrackedSideEffects); | |
88 return data_[i]; | |
89 } | |
90 inline HInstruction* at(int i) const { return operator[](i); } | |
91 | |
92 private: | |
93 int count_; | |
94 HInstruction* data_[kNumberOfTrackedSideEffects]; | |
95 }; | |
96 | |
97 | |
98 void TraceGVN(const char* msg, ...) { | |
99 va_list arguments; | |
100 va_start(arguments, msg); | |
101 base::OS::VPrint(msg, arguments); | |
102 va_end(arguments); | |
103 } | |
104 | |
105 | |
106 // Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when | |
107 // --trace-gvn is off. | |
108 #define TRACE_GVN_1(msg, a1) \ | |
109 if (FLAG_trace_gvn) { \ | |
110 TraceGVN(msg, a1); \ | |
111 } | |
112 | |
113 #define TRACE_GVN_2(msg, a1, a2) \ | |
114 if (FLAG_trace_gvn) { \ | |
115 TraceGVN(msg, a1, a2); \ | |
116 } | |
117 | |
118 #define TRACE_GVN_3(msg, a1, a2, a3) \ | |
119 if (FLAG_trace_gvn) { \ | |
120 TraceGVN(msg, a1, a2, a3); \ | |
121 } | |
122 | |
123 #define TRACE_GVN_4(msg, a1, a2, a3, a4) \ | |
124 if (FLAG_trace_gvn) { \ | |
125 TraceGVN(msg, a1, a2, a3, a4); \ | |
126 } | |
127 | |
128 #define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \ | |
129 if (FLAG_trace_gvn) { \ | |
130 TraceGVN(msg, a1, a2, a3, a4, a5); \ | |
131 } | |
132 | |
133 | |
134 HInstructionMap::HInstructionMap(Zone* zone, const HInstructionMap* other) | |
135 : array_size_(other->array_size_), | |
136 lists_size_(other->lists_size_), | |
137 count_(other->count_), | |
138 present_depends_on_(other->present_depends_on_), | |
139 array_(zone->NewArray<HInstructionMapListElement>(other->array_size_)), | |
140 lists_(zone->NewArray<HInstructionMapListElement>(other->lists_size_)), | |
141 free_list_head_(other->free_list_head_), | |
142 side_effects_tracker_(other->side_effects_tracker_) { | |
143 MemCopy(array_, other->array_, | |
144 array_size_ * sizeof(HInstructionMapListElement)); | |
145 MemCopy(lists_, other->lists_, | |
146 lists_size_ * sizeof(HInstructionMapListElement)); | |
147 } | |
148 | |
149 | |
150 void HInstructionMap::Kill(SideEffects changes) { | |
151 if (!present_depends_on_.ContainsAnyOf(changes)) return; | |
152 present_depends_on_.RemoveAll(); | |
153 for (int i = 0; i < array_size_; ++i) { | |
154 HInstruction* instr = array_[i].instr; | |
155 if (instr != NULL) { | |
156 // Clear list of collisions first, so we know if it becomes empty. | |
157 int kept = kNil; // List of kept elements. | |
158 int next; | |
159 for (int current = array_[i].next; current != kNil; current = next) { | |
160 next = lists_[current].next; | |
161 HInstruction* instr = lists_[current].instr; | |
162 SideEffects depends_on = side_effects_tracker_->ComputeDependsOn(instr); | |
163 if (depends_on.ContainsAnyOf(changes)) { | |
164 // Drop it. | |
165 count_--; | |
166 lists_[current].next = free_list_head_; | |
167 free_list_head_ = current; | |
168 } else { | |
169 // Keep it. | |
170 lists_[current].next = kept; | |
171 kept = current; | |
172 present_depends_on_.Add(depends_on); | |
173 } | |
174 } | |
175 array_[i].next = kept; | |
176 | |
177 // Now possibly drop directly indexed element. | |
178 instr = array_[i].instr; | |
179 SideEffects depends_on = side_effects_tracker_->ComputeDependsOn(instr); | |
180 if (depends_on.ContainsAnyOf(changes)) { // Drop it. | |
181 count_--; | |
182 int head = array_[i].next; | |
183 if (head == kNil) { | |
184 array_[i].instr = NULL; | |
185 } else { | |
186 array_[i].instr = lists_[head].instr; | |
187 array_[i].next = lists_[head].next; | |
188 lists_[head].next = free_list_head_; | |
189 free_list_head_ = head; | |
190 } | |
191 } else { | |
192 present_depends_on_.Add(depends_on); // Keep it. | |
193 } | |
194 } | |
195 } | |
196 } | |
197 | |
198 | |
199 HInstruction* HInstructionMap::Lookup(HInstruction* instr) const { | |
200 uint32_t hash = static_cast<uint32_t>(instr->Hashcode()); | |
201 uint32_t pos = Bound(hash); | |
202 if (array_[pos].instr != NULL) { | |
203 if (array_[pos].instr->Equals(instr)) return array_[pos].instr; | |
204 int next = array_[pos].next; | |
205 while (next != kNil) { | |
206 if (lists_[next].instr->Equals(instr)) return lists_[next].instr; | |
207 next = lists_[next].next; | |
208 } | |
209 } | |
210 return NULL; | |
211 } | |
212 | |
213 | |
214 void HInstructionMap::Resize(int new_size, Zone* zone) { | |
215 DCHECK(new_size > count_); | |
216 // Hashing the values into the new array has no more collisions than in the | |
217 // old hash map, so we can use the existing lists_ array, if we are careful. | |
218 | |
219 // Make sure we have at least one free element. | |
220 if (free_list_head_ == kNil) { | |
221 ResizeLists(lists_size_ << 1, zone); | |
222 } | |
223 | |
224 HInstructionMapListElement* new_array = | |
225 zone->NewArray<HInstructionMapListElement>(new_size); | |
226 memset(new_array, 0, sizeof(HInstructionMapListElement) * new_size); | |
227 | |
228 HInstructionMapListElement* old_array = array_; | |
229 int old_size = array_size_; | |
230 | |
231 int old_count = count_; | |
232 count_ = 0; | |
233 // Do not modify present_depends_on_. It is currently correct. | |
234 array_size_ = new_size; | |
235 array_ = new_array; | |
236 | |
237 if (old_array != NULL) { | |
238 // Iterate over all the elements in lists, rehashing them. | |
239 for (int i = 0; i < old_size; ++i) { | |
240 if (old_array[i].instr != NULL) { | |
241 int current = old_array[i].next; | |
242 while (current != kNil) { | |
243 Insert(lists_[current].instr, zone); | |
244 int next = lists_[current].next; | |
245 lists_[current].next = free_list_head_; | |
246 free_list_head_ = current; | |
247 current = next; | |
248 } | |
249 // Rehash the directly stored instruction. | |
250 Insert(old_array[i].instr, zone); | |
251 } | |
252 } | |
253 } | |
254 USE(old_count); | |
255 DCHECK(count_ == old_count); | |
256 } | |
257 | |
258 | |
259 void HInstructionMap::ResizeLists(int new_size, Zone* zone) { | |
260 DCHECK(new_size > lists_size_); | |
261 | |
262 HInstructionMapListElement* new_lists = | |
263 zone->NewArray<HInstructionMapListElement>(new_size); | |
264 memset(new_lists, 0, sizeof(HInstructionMapListElement) * new_size); | |
265 | |
266 HInstructionMapListElement* old_lists = lists_; | |
267 int old_size = lists_size_; | |
268 | |
269 lists_size_ = new_size; | |
270 lists_ = new_lists; | |
271 | |
272 if (old_lists != NULL) { | |
273 MemCopy(lists_, old_lists, old_size * sizeof(HInstructionMapListElement)); | |
274 } | |
275 for (int i = old_size; i < lists_size_; ++i) { | |
276 lists_[i].next = free_list_head_; | |
277 free_list_head_ = i; | |
278 } | |
279 } | |
280 | |
281 | |
282 void HInstructionMap::Insert(HInstruction* instr, Zone* zone) { | |
283 DCHECK(instr != NULL); | |
284 // Resizing when half of the hashtable is filled up. | |
285 if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone); | |
286 DCHECK(count_ < array_size_); | |
287 count_++; | |
288 uint32_t pos = Bound(static_cast<uint32_t>(instr->Hashcode())); | |
289 if (array_[pos].instr == NULL) { | |
290 array_[pos].instr = instr; | |
291 array_[pos].next = kNil; | |
292 } else { | |
293 if (free_list_head_ == kNil) { | |
294 ResizeLists(lists_size_ << 1, zone); | |
295 } | |
296 int new_element_pos = free_list_head_; | |
297 DCHECK(new_element_pos != kNil); | |
298 free_list_head_ = lists_[free_list_head_].next; | |
299 lists_[new_element_pos].instr = instr; | |
300 lists_[new_element_pos].next = array_[pos].next; | |
301 DCHECK(array_[pos].next == kNil || lists_[array_[pos].next].instr != NULL); | |
302 array_[pos].next = new_element_pos; | |
303 } | |
304 } | |
305 | |
306 | |
307 HSideEffectMap::HSideEffectMap() : count_(0) { | |
308 memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize); | |
309 } | |
310 | |
311 | |
312 HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) { | |
313 *this = *other; // Calls operator=. | |
314 } | |
315 | |
316 | |
317 HSideEffectMap& HSideEffectMap::operator=(const HSideEffectMap& other) { | |
318 if (this != &other) { | |
319 MemCopy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize); | |
320 } | |
321 return *this; | |
322 } | |
323 | |
324 | |
325 void HSideEffectMap::Kill(SideEffects side_effects) { | |
326 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { | |
327 if (side_effects.ContainsFlag(GVNFlagFromInt(i))) { | |
328 if (data_[i] != NULL) count_--; | |
329 data_[i] = NULL; | |
330 } | |
331 } | |
332 } | |
333 | |
334 | |
335 void HSideEffectMap::Store(SideEffects side_effects, HInstruction* instr) { | |
336 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { | |
337 if (side_effects.ContainsFlag(GVNFlagFromInt(i))) { | |
338 if (data_[i] == NULL) count_++; | |
339 data_[i] = instr; | |
340 } | |
341 } | |
342 } | |
343 | |
344 | |
345 SideEffects SideEffectsTracker::ComputeChanges(HInstruction* instr) { | |
346 int index; | |
347 SideEffects result(instr->ChangesFlags()); | |
348 if (result.ContainsFlag(kGlobalVars)) { | |
349 if (instr->IsStoreNamedField()) { | |
350 HStoreNamedField* store = HStoreNamedField::cast(instr); | |
351 HConstant* target = HConstant::cast(store->object()); | |
352 if (ComputeGlobalVar(Unique<PropertyCell>::cast(target->GetUnique()), | |
353 &index)) { | |
354 result.RemoveFlag(kGlobalVars); | |
355 result.AddSpecial(GlobalVar(index)); | |
356 return result; | |
357 } | |
358 } | |
359 for (index = 0; index < kNumberOfGlobalVars; ++index) { | |
360 result.AddSpecial(GlobalVar(index)); | |
361 } | |
362 } else if (result.ContainsFlag(kInobjectFields)) { | |
363 if (instr->IsStoreNamedField() && | |
364 ComputeInobjectField(HStoreNamedField::cast(instr)->access(), &index)) { | |
365 result.RemoveFlag(kInobjectFields); | |
366 result.AddSpecial(InobjectField(index)); | |
367 } else { | |
368 for (index = 0; index < kNumberOfInobjectFields; ++index) { | |
369 result.AddSpecial(InobjectField(index)); | |
370 } | |
371 } | |
372 } | |
373 return result; | |
374 } | |
375 | |
376 | |
377 SideEffects SideEffectsTracker::ComputeDependsOn(HInstruction* instr) { | |
378 int index; | |
379 SideEffects result(instr->DependsOnFlags()); | |
380 if (result.ContainsFlag(kGlobalVars)) { | |
381 if (instr->IsLoadNamedField()) { | |
382 HLoadNamedField* load = HLoadNamedField::cast(instr); | |
383 HConstant* target = HConstant::cast(load->object()); | |
384 if (ComputeGlobalVar(Unique<PropertyCell>::cast(target->GetUnique()), | |
385 &index)) { | |
386 result.RemoveFlag(kGlobalVars); | |
387 result.AddSpecial(GlobalVar(index)); | |
388 return result; | |
389 } | |
390 } | |
391 for (index = 0; index < kNumberOfGlobalVars; ++index) { | |
392 result.AddSpecial(GlobalVar(index)); | |
393 } | |
394 } else if (result.ContainsFlag(kInobjectFields)) { | |
395 if (instr->IsLoadNamedField() && | |
396 ComputeInobjectField(HLoadNamedField::cast(instr)->access(), &index)) { | |
397 result.RemoveFlag(kInobjectFields); | |
398 result.AddSpecial(InobjectField(index)); | |
399 } else { | |
400 for (index = 0; index < kNumberOfInobjectFields; ++index) { | |
401 result.AddSpecial(InobjectField(index)); | |
402 } | |
403 } | |
404 } | |
405 return result; | |
406 } | |
407 | |
408 | |
409 std::ostream& operator<<(std::ostream& os, const TrackedEffects& te) { | |
410 SideEffectsTracker* t = te.tracker; | |
411 const char* separator = ""; | |
412 os << "["; | |
413 for (int bit = 0; bit < kNumberOfFlags; ++bit) { | |
414 GVNFlag flag = GVNFlagFromInt(bit); | |
415 if (te.effects.ContainsFlag(flag)) { | |
416 os << separator; | |
417 separator = ", "; | |
418 switch (flag) { | |
419 #define DECLARE_FLAG(Type) \ | |
420 case k##Type: \ | |
421 os << #Type; \ | |
422 break; | |
423 GVN_TRACKED_FLAG_LIST(DECLARE_FLAG) | |
424 GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG) | |
425 #undef DECLARE_FLAG | |
426 default: | |
427 break; | |
428 } | |
429 } | |
430 } | |
431 for (int index = 0; index < t->num_global_vars_; ++index) { | |
432 if (te.effects.ContainsSpecial(t->GlobalVar(index))) { | |
433 os << separator << "[" << *t->global_vars_[index].handle() << "]"; | |
434 separator = ", "; | |
435 } | |
436 } | |
437 for (int index = 0; index < t->num_inobject_fields_; ++index) { | |
438 if (te.effects.ContainsSpecial(t->InobjectField(index))) { | |
439 os << separator << t->inobject_fields_[index]; | |
440 separator = ", "; | |
441 } | |
442 } | |
443 os << "]"; | |
444 return os; | |
445 } | |
446 | |
447 | |
448 bool SideEffectsTracker::ComputeGlobalVar(Unique<PropertyCell> cell, | |
449 int* index) { | |
450 for (int i = 0; i < num_global_vars_; ++i) { | |
451 if (cell == global_vars_[i]) { | |
452 *index = i; | |
453 return true; | |
454 } | |
455 } | |
456 if (num_global_vars_ < kNumberOfGlobalVars) { | |
457 if (FLAG_trace_gvn) { | |
458 OFStream os(stdout); | |
459 os << "Tracking global var [" << *cell.handle() << "] " | |
460 << "(mapped to index " << num_global_vars_ << ")" << std::endl; | |
461 } | |
462 *index = num_global_vars_; | |
463 global_vars_[num_global_vars_++] = cell; | |
464 return true; | |
465 } | |
466 return false; | |
467 } | |
468 | |
469 | |
470 bool SideEffectsTracker::ComputeInobjectField(HObjectAccess access, | |
471 int* index) { | |
472 for (int i = 0; i < num_inobject_fields_; ++i) { | |
473 if (access.Equals(inobject_fields_[i])) { | |
474 *index = i; | |
475 return true; | |
476 } | |
477 } | |
478 if (num_inobject_fields_ < kNumberOfInobjectFields) { | |
479 if (FLAG_trace_gvn) { | |
480 OFStream os(stdout); | |
481 os << "Tracking inobject field access " << access << " (mapped to index " | |
482 << num_inobject_fields_ << ")" << std::endl; | |
483 } | |
484 *index = num_inobject_fields_; | |
485 inobject_fields_[num_inobject_fields_++] = access; | |
486 return true; | |
487 } | |
488 return false; | |
489 } | |
490 | |
491 | |
492 HGlobalValueNumberingPhase::HGlobalValueNumberingPhase(HGraph* graph) | |
493 : HPhase("H_Global value numbering", graph), | |
494 removed_side_effects_(false), | |
495 block_side_effects_(graph->blocks()->length(), zone()), | |
496 loop_side_effects_(graph->blocks()->length(), zone()), | |
497 visited_on_paths_(graph->blocks()->length(), zone()) { | |
498 DCHECK(!AllowHandleAllocation::IsAllowed()); | |
499 block_side_effects_.AddBlock( | |
500 SideEffects(), graph->blocks()->length(), zone()); | |
501 loop_side_effects_.AddBlock( | |
502 SideEffects(), graph->blocks()->length(), zone()); | |
503 } | |
504 | |
505 | |
506 void HGlobalValueNumberingPhase::Run() { | |
507 DCHECK(!removed_side_effects_); | |
508 for (int i = FLAG_gvn_iterations; i > 0; --i) { | |
509 // Compute the side effects. | |
510 ComputeBlockSideEffects(); | |
511 | |
512 // Perform loop invariant code motion if requested. | |
513 if (FLAG_loop_invariant_code_motion) LoopInvariantCodeMotion(); | |
514 | |
515 // Perform the actual value numbering. | |
516 AnalyzeGraph(); | |
517 | |
518 // Continue GVN if we removed any side effects. | |
519 if (!removed_side_effects_) break; | |
520 removed_side_effects_ = false; | |
521 | |
522 // Clear all side effects. | |
523 DCHECK_EQ(block_side_effects_.length(), graph()->blocks()->length()); | |
524 DCHECK_EQ(loop_side_effects_.length(), graph()->blocks()->length()); | |
525 for (int i = 0; i < graph()->blocks()->length(); ++i) { | |
526 block_side_effects_[i].RemoveAll(); | |
527 loop_side_effects_[i].RemoveAll(); | |
528 } | |
529 visited_on_paths_.Clear(); | |
530 } | |
531 } | |
532 | |
533 | |
534 void HGlobalValueNumberingPhase::ComputeBlockSideEffects() { | |
535 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) { | |
536 // Compute side effects for the block. | |
537 HBasicBlock* block = graph()->blocks()->at(i); | |
538 SideEffects side_effects; | |
539 if (block->IsReachable() && !block->IsDeoptimizing()) { | |
540 int id = block->block_id(); | |
541 for (HInstructionIterator it(block); !it.Done(); it.Advance()) { | |
542 HInstruction* instr = it.Current(); | |
543 side_effects.Add(side_effects_tracker_.ComputeChanges(instr)); | |
544 } | |
545 block_side_effects_[id].Add(side_effects); | |
546 | |
547 // Loop headers are part of their loop. | |
548 if (block->IsLoopHeader()) { | |
549 loop_side_effects_[id].Add(side_effects); | |
550 } | |
551 | |
552 // Propagate loop side effects upwards. | |
553 if (block->HasParentLoopHeader()) { | |
554 HBasicBlock* with_parent = block; | |
555 if (block->IsLoopHeader()) side_effects = loop_side_effects_[id]; | |
556 do { | |
557 HBasicBlock* parent_block = with_parent->parent_loop_header(); | |
558 loop_side_effects_[parent_block->block_id()].Add(side_effects); | |
559 with_parent = parent_block; | |
560 } while (with_parent->HasParentLoopHeader()); | |
561 } | |
562 } | |
563 } | |
564 } | |
565 | |
566 | |
567 void HGlobalValueNumberingPhase::LoopInvariantCodeMotion() { | |
568 TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n", | |
569 graph()->use_optimistic_licm() ? "yes" : "no"); | |
570 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) { | |
571 HBasicBlock* block = graph()->blocks()->at(i); | |
572 if (block->IsLoopHeader()) { | |
573 SideEffects side_effects = loop_side_effects_[block->block_id()]; | |
574 if (FLAG_trace_gvn) { | |
575 OFStream os(stdout); | |
576 os << "Try loop invariant motion for " << *block << " changes " | |
577 << Print(side_effects) << std::endl; | |
578 } | |
579 HBasicBlock* last = block->loop_information()->GetLastBackEdge(); | |
580 for (int j = block->block_id(); j <= last->block_id(); ++j) { | |
581 ProcessLoopBlock(graph()->blocks()->at(j), block, side_effects); | |
582 } | |
583 } | |
584 } | |
585 } | |
586 | |
587 | |
588 void HGlobalValueNumberingPhase::ProcessLoopBlock( | |
589 HBasicBlock* block, | |
590 HBasicBlock* loop_header, | |
591 SideEffects loop_kills) { | |
592 HBasicBlock* pre_header = loop_header->predecessors()->at(0); | |
593 if (FLAG_trace_gvn) { | |
594 OFStream os(stdout); | |
595 os << "Loop invariant code motion for " << *block << " depends on " | |
596 << Print(loop_kills) << std::endl; | |
597 } | |
598 HInstruction* instr = block->first(); | |
599 while (instr != NULL) { | |
600 HInstruction* next = instr->next(); | |
601 if (instr->CheckFlag(HValue::kUseGVN)) { | |
602 SideEffects changes = side_effects_tracker_.ComputeChanges(instr); | |
603 SideEffects depends_on = side_effects_tracker_.ComputeDependsOn(instr); | |
604 if (FLAG_trace_gvn) { | |
605 OFStream os(stdout); | |
606 os << "Checking instruction i" << instr->id() << " (" | |
607 << instr->Mnemonic() << ") changes " << Print(changes) | |
608 << ", depends on " << Print(depends_on) << ". Loop changes " | |
609 << Print(loop_kills) << std::endl; | |
610 } | |
611 bool can_hoist = !depends_on.ContainsAnyOf(loop_kills); | |
612 if (can_hoist && !graph()->use_optimistic_licm()) { | |
613 can_hoist = block->IsLoopSuccessorDominator(); | |
614 } | |
615 | |
616 if (can_hoist) { | |
617 bool inputs_loop_invariant = true; | |
618 for (int i = 0; i < instr->OperandCount(); ++i) { | |
619 if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) { | |
620 inputs_loop_invariant = false; | |
621 } | |
622 } | |
623 | |
624 if (inputs_loop_invariant && ShouldMove(instr, loop_header)) { | |
625 TRACE_GVN_2("Hoisting loop invariant instruction i%d to block B%d\n", | |
626 instr->id(), pre_header->block_id()); | |
627 // Move the instruction out of the loop. | |
628 instr->Unlink(); | |
629 instr->InsertBefore(pre_header->end()); | |
630 if (instr->HasSideEffects()) removed_side_effects_ = true; | |
631 } | |
632 } | |
633 } | |
634 instr = next; | |
635 } | |
636 } | |
637 | |
638 | |
639 bool HGlobalValueNumberingPhase::AllowCodeMotion() { | |
640 return info()->IsStub() || info()->opt_count() + 1 < FLAG_max_opt_count; | |
641 } | |
642 | |
643 | |
644 bool HGlobalValueNumberingPhase::ShouldMove(HInstruction* instr, | |
645 HBasicBlock* loop_header) { | |
646 // If we've disabled code motion or we're in a block that unconditionally | |
647 // deoptimizes, don't move any instructions. | |
648 return AllowCodeMotion() && !instr->block()->IsDeoptimizing() && | |
649 instr->block()->IsReachable(); | |
650 } | |
651 | |
652 | |
653 SideEffects | |
654 HGlobalValueNumberingPhase::CollectSideEffectsOnPathsToDominatedBlock( | |
655 HBasicBlock* dominator, HBasicBlock* dominated) { | |
656 SideEffects side_effects; | |
657 for (int i = 0; i < dominated->predecessors()->length(); ++i) { | |
658 HBasicBlock* block = dominated->predecessors()->at(i); | |
659 if (dominator->block_id() < block->block_id() && | |
660 block->block_id() < dominated->block_id() && | |
661 !visited_on_paths_.Contains(block->block_id())) { | |
662 visited_on_paths_.Add(block->block_id()); | |
663 side_effects.Add(block_side_effects_[block->block_id()]); | |
664 if (block->IsLoopHeader()) { | |
665 side_effects.Add(loop_side_effects_[block->block_id()]); | |
666 } | |
667 side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock( | |
668 dominator, block)); | |
669 } | |
670 } | |
671 return side_effects; | |
672 } | |
673 | |
674 | |
675 // Each instance of this class is like a "stack frame" for the recursive | |
676 // traversal of the dominator tree done during GVN (the stack is handled | |
677 // as a double linked list). | |
678 // We reuse frames when possible so the list length is limited by the depth | |
679 // of the dominator tree but this forces us to initialize each frame calling | |
680 // an explicit "Initialize" method instead of a using constructor. | |
681 class GvnBasicBlockState: public ZoneObject { | |
682 public: | |
683 static GvnBasicBlockState* CreateEntry(Zone* zone, | |
684 HBasicBlock* entry_block, | |
685 HInstructionMap* entry_map) { | |
686 return new(zone) | |
687 GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone); | |
688 } | |
689 | |
690 HBasicBlock* block() { return block_; } | |
691 HInstructionMap* map() { return map_; } | |
692 HSideEffectMap* dominators() { return &dominators_; } | |
693 | |
694 GvnBasicBlockState* next_in_dominator_tree_traversal( | |
695 Zone* zone, | |
696 HBasicBlock** dominator) { | |
697 // This assignment needs to happen before calling next_dominated() because | |
698 // that call can reuse "this" if we are at the last dominated block. | |
699 *dominator = block(); | |
700 GvnBasicBlockState* result = next_dominated(zone); | |
701 if (result == NULL) { | |
702 GvnBasicBlockState* dominator_state = pop(); | |
703 if (dominator_state != NULL) { | |
704 // This branch is guaranteed not to return NULL because pop() never | |
705 // returns a state where "is_done() == true". | |
706 *dominator = dominator_state->block(); | |
707 result = dominator_state->next_dominated(zone); | |
708 } else { | |
709 // Unnecessary (we are returning NULL) but done for cleanness. | |
710 *dominator = NULL; | |
711 } | |
712 } | |
713 return result; | |
714 } | |
715 | |
716 private: | |
717 void Initialize(HBasicBlock* block, | |
718 HInstructionMap* map, | |
719 HSideEffectMap* dominators, | |
720 bool copy_map, | |
721 Zone* zone) { | |
722 block_ = block; | |
723 map_ = copy_map ? map->Copy(zone) : map; | |
724 dominated_index_ = -1; | |
725 length_ = block->dominated_blocks()->length(); | |
726 if (dominators != NULL) { | |
727 dominators_ = *dominators; | |
728 } | |
729 } | |
730 bool is_done() { return dominated_index_ >= length_; } | |
731 | |
732 GvnBasicBlockState(GvnBasicBlockState* previous, | |
733 HBasicBlock* block, | |
734 HInstructionMap* map, | |
735 HSideEffectMap* dominators, | |
736 Zone* zone) | |
737 : previous_(previous), next_(NULL) { | |
738 Initialize(block, map, dominators, true, zone); | |
739 } | |
740 | |
741 GvnBasicBlockState* next_dominated(Zone* zone) { | |
742 dominated_index_++; | |
743 if (dominated_index_ == length_ - 1) { | |
744 // No need to copy the map for the last child in the dominator tree. | |
745 Initialize(block_->dominated_blocks()->at(dominated_index_), | |
746 map(), | |
747 dominators(), | |
748 false, | |
749 zone); | |
750 return this; | |
751 } else if (dominated_index_ < length_) { | |
752 return push(zone, block_->dominated_blocks()->at(dominated_index_)); | |
753 } else { | |
754 return NULL; | |
755 } | |
756 } | |
757 | |
758 GvnBasicBlockState* push(Zone* zone, HBasicBlock* block) { | |
759 if (next_ == NULL) { | |
760 next_ = | |
761 new(zone) GvnBasicBlockState(this, block, map(), dominators(), zone); | |
762 } else { | |
763 next_->Initialize(block, map(), dominators(), true, zone); | |
764 } | |
765 return next_; | |
766 } | |
767 GvnBasicBlockState* pop() { | |
768 GvnBasicBlockState* result = previous_; | |
769 while (result != NULL && result->is_done()) { | |
770 TRACE_GVN_2("Backtracking from block B%d to block b%d\n", | |
771 block()->block_id(), | |
772 previous_->block()->block_id()) | |
773 result = result->previous_; | |
774 } | |
775 return result; | |
776 } | |
777 | |
778 GvnBasicBlockState* previous_; | |
779 GvnBasicBlockState* next_; | |
780 HBasicBlock* block_; | |
781 HInstructionMap* map_; | |
782 HSideEffectMap dominators_; | |
783 int dominated_index_; | |
784 int length_; | |
785 }; | |
786 | |
787 | |
788 // This is a recursive traversal of the dominator tree but it has been turned | |
789 // into a loop to avoid stack overflows. | |
790 // The logical "stack frames" of the recursion are kept in a list of | |
791 // GvnBasicBlockState instances. | |
792 void HGlobalValueNumberingPhase::AnalyzeGraph() { | |
793 HBasicBlock* entry_block = graph()->entry_block(); | |
794 HInstructionMap* entry_map = | |
795 new(zone()) HInstructionMap(zone(), &side_effects_tracker_); | |
796 GvnBasicBlockState* current = | |
797 GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map); | |
798 | |
799 while (current != NULL) { | |
800 HBasicBlock* block = current->block(); | |
801 HInstructionMap* map = current->map(); | |
802 HSideEffectMap* dominators = current->dominators(); | |
803 | |
804 TRACE_GVN_2("Analyzing block B%d%s\n", | |
805 block->block_id(), | |
806 block->IsLoopHeader() ? " (loop header)" : ""); | |
807 | |
808 // If this is a loop header kill everything killed by the loop. | |
809 if (block->IsLoopHeader()) { | |
810 map->Kill(loop_side_effects_[block->block_id()]); | |
811 dominators->Kill(loop_side_effects_[block->block_id()]); | |
812 } | |
813 | |
814 // Go through all instructions of the current block. | |
815 for (HInstructionIterator it(block); !it.Done(); it.Advance()) { | |
816 HInstruction* instr = it.Current(); | |
817 if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) { | |
818 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { | |
819 HValue* other = dominators->at(i); | |
820 GVNFlag flag = GVNFlagFromInt(i); | |
821 if (instr->DependsOnFlags().Contains(flag) && other != NULL) { | |
822 TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n", | |
823 i, | |
824 instr->id(), | |
825 instr->Mnemonic(), | |
826 other->id(), | |
827 other->Mnemonic()); | |
828 if (instr->HandleSideEffectDominator(flag, other)) { | |
829 removed_side_effects_ = true; | |
830 } | |
831 } | |
832 } | |
833 } | |
834 // Instruction was unlinked during graph traversal. | |
835 if (!instr->IsLinked()) continue; | |
836 | |
837 SideEffects changes = side_effects_tracker_.ComputeChanges(instr); | |
838 if (!changes.IsEmpty()) { | |
839 // Clear all instructions in the map that are affected by side effects. | |
840 // Store instruction as the dominating one for tracked side effects. | |
841 map->Kill(changes); | |
842 dominators->Store(changes, instr); | |
843 if (FLAG_trace_gvn) { | |
844 OFStream os(stdout); | |
845 os << "Instruction i" << instr->id() << " changes " << Print(changes) | |
846 << std::endl; | |
847 } | |
848 } | |
849 if (instr->CheckFlag(HValue::kUseGVN) && | |
850 !instr->CheckFlag(HValue::kCantBeReplaced)) { | |
851 DCHECK(!instr->HasObservableSideEffects()); | |
852 HInstruction* other = map->Lookup(instr); | |
853 if (other != NULL) { | |
854 DCHECK(instr->Equals(other) && other->Equals(instr)); | |
855 TRACE_GVN_4("Replacing instruction i%d (%s) with i%d (%s)\n", | |
856 instr->id(), | |
857 instr->Mnemonic(), | |
858 other->id(), | |
859 other->Mnemonic()); | |
860 if (instr->HasSideEffects()) removed_side_effects_ = true; | |
861 instr->DeleteAndReplaceWith(other); | |
862 } else { | |
863 map->Add(instr, zone()); | |
864 } | |
865 } | |
866 } | |
867 | |
868 HBasicBlock* dominator_block; | |
869 GvnBasicBlockState* next = | |
870 current->next_in_dominator_tree_traversal(zone(), | |
871 &dominator_block); | |
872 | |
873 if (next != NULL) { | |
874 HBasicBlock* dominated = next->block(); | |
875 HInstructionMap* successor_map = next->map(); | |
876 HSideEffectMap* successor_dominators = next->dominators(); | |
877 | |
878 // Kill everything killed on any path between this block and the | |
879 // dominated block. We don't have to traverse these paths if the | |
880 // value map and the dominators list is already empty. If the range | |
881 // of block ids (block_id, dominated_id) is empty there are no such | |
882 // paths. | |
883 if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) && | |
884 dominator_block->block_id() + 1 < dominated->block_id()) { | |
885 visited_on_paths_.Clear(); | |
886 SideEffects side_effects_on_all_paths = | |
887 CollectSideEffectsOnPathsToDominatedBlock(dominator_block, | |
888 dominated); | |
889 successor_map->Kill(side_effects_on_all_paths); | |
890 successor_dominators->Kill(side_effects_on_all_paths); | |
891 } | |
892 } | |
893 current = next; | |
894 } | |
895 } | |
896 | |
897 } // namespace internal | |
898 } // namespace v8 | |
OLD | NEW |