Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/hydrogen-gvn.cc

Issue 16095004: Extract GlobalValueNumberer and helper classes from hydrogen.cc and move to hydrogen-gvn.cc. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
(Empty)
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "hydrogen.h"
29 #include "v8.h"
30
31 namespace v8 {
32 namespace internal {
33
34 class HValueMap: public ZoneObject {
35 public:
36 explicit HValueMap(Zone* zone)
37 : array_size_(0),
38 lists_size_(0),
39 count_(0),
40 present_flags_(0),
41 array_(NULL),
42 lists_(NULL),
43 free_list_head_(kNil) {
44 ResizeLists(kInitialSize, zone);
45 Resize(kInitialSize, zone);
46 }
47
48 void Kill(GVNFlagSet flags);
49
50 void Add(HValue* value, Zone* zone) {
51 present_flags_.Add(value->gvn_flags());
52 Insert(value, zone);
53 }
54
55 HValue* Lookup(HValue* value) const;
56
57 HValueMap* Copy(Zone* zone) const {
58 return new(zone) HValueMap(zone, this);
59 }
60
61 bool IsEmpty() const { return count_ == 0; }
62
63 private:
64 // A linked list of HValue* values. Stored in arrays.
65 struct HValueMapListElement {
66 HValue* value;
67 int next; // Index in the array of the next list element.
68 };
69 static const int kNil = -1; // The end of a linked list
70
71 // Must be a power of 2.
72 static const int kInitialSize = 16;
73
74 HValueMap(Zone* zone, const HValueMap* other);
75
76 void Resize(int new_size, Zone* zone);
77 void ResizeLists(int new_size, Zone* zone);
78 void Insert(HValue* value, Zone* zone);
79 uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); }
80
81 int array_size_;
82 int lists_size_;
83 int count_; // The number of values stored in the HValueMap.
84 GVNFlagSet present_flags_; // All flags that are in any value in the
85 // HValueMap.
86 HValueMapListElement* array_; // Primary store - contains the first value
87 // with a given hash. Colliding elements are stored in linked lists.
88 HValueMapListElement* lists_; // The linked lists containing hash collisions.
89 int free_list_head_; // Unused elements in lists_ are on the free list.
90 };
91
92
93 class HSideEffectMap BASE_EMBEDDED {
94 public:
95 HSideEffectMap();
96 explicit HSideEffectMap(HSideEffectMap* other);
97 HSideEffectMap& operator= (const HSideEffectMap& other);
98
99 void Kill(GVNFlagSet flags);
100
101 void Store(GVNFlagSet flags, HInstruction* instr);
102
103 bool IsEmpty() const { return count_ == 0; }
104
105 inline HInstruction* operator[](int i) const {
106 ASSERT(0 <= i);
107 ASSERT(i < kNumberOfTrackedSideEffects);
108 return data_[i];
109 }
110 inline HInstruction* at(int i) const { return operator[](i); }
111
112 private:
113 int count_;
114 HInstruction* data_[kNumberOfTrackedSideEffects];
115 };
116
117
118 void TraceGVN(const char* msg, ...) {
119 va_list arguments;
120 va_start(arguments, msg);
121 OS::VPrint(msg, arguments);
122 va_end(arguments);
123 }
124
125 // Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when
126 // --trace-gvn is off.
127 #define TRACE_GVN_1(msg, a1) \
128 if (FLAG_trace_gvn) { \
129 TraceGVN(msg, a1); \
130 }
131
132 #define TRACE_GVN_2(msg, a1, a2) \
133 if (FLAG_trace_gvn) { \
134 TraceGVN(msg, a1, a2); \
135 }
136
137 #define TRACE_GVN_3(msg, a1, a2, a3) \
138 if (FLAG_trace_gvn) { \
139 TraceGVN(msg, a1, a2, a3); \
140 }
141
142 #define TRACE_GVN_4(msg, a1, a2, a3, a4) \
143 if (FLAG_trace_gvn) { \
144 TraceGVN(msg, a1, a2, a3, a4); \
145 }
146
147 #define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \
148 if (FLAG_trace_gvn) { \
149 TraceGVN(msg, a1, a2, a3, a4, a5); \
150 }
151
152
153 HValueMap::HValueMap(Zone* zone, const HValueMap* other)
154 : array_size_(other->array_size_),
155 lists_size_(other->lists_size_),
156 count_(other->count_),
157 present_flags_(other->present_flags_),
158 array_(zone->NewArray<HValueMapListElement>(other->array_size_)),
159 lists_(zone->NewArray<HValueMapListElement>(other->lists_size_)),
160 free_list_head_(other->free_list_head_) {
161 OS::MemCopy(
162 array_, other->array_, array_size_ * sizeof(HValueMapListElement));
163 OS::MemCopy(
164 lists_, other->lists_, lists_size_ * sizeof(HValueMapListElement));
165 }
166
167
168 void HValueMap::Kill(GVNFlagSet flags) {
169 GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(flags);
170 if (!present_flags_.ContainsAnyOf(depends_flags)) return;
171 present_flags_.RemoveAll();
172 for (int i = 0; i < array_size_; ++i) {
173 HValue* value = array_[i].value;
174 if (value != NULL) {
175 // Clear list of collisions first, so we know if it becomes empty.
176 int kept = kNil; // List of kept elements.
177 int next;
178 for (int current = array_[i].next; current != kNil; current = next) {
179 next = lists_[current].next;
180 HValue* value = lists_[current].value;
181 if (value->gvn_flags().ContainsAnyOf(depends_flags)) {
182 // Drop it.
183 count_--;
184 lists_[current].next = free_list_head_;
185 free_list_head_ = current;
186 } else {
187 // Keep it.
188 lists_[current].next = kept;
189 kept = current;
190 present_flags_.Add(value->gvn_flags());
191 }
192 }
193 array_[i].next = kept;
194
195 // Now possibly drop directly indexed element.
196 value = array_[i].value;
197 if (value->gvn_flags().ContainsAnyOf(depends_flags)) { // Drop it.
198 count_--;
199 int head = array_[i].next;
200 if (head == kNil) {
201 array_[i].value = NULL;
202 } else {
203 array_[i].value = lists_[head].value;
204 array_[i].next = lists_[head].next;
205 lists_[head].next = free_list_head_;
206 free_list_head_ = head;
207 }
208 } else {
209 present_flags_.Add(value->gvn_flags()); // Keep it.
210 }
211 }
212 }
213 }
214
215
216 HValue* HValueMap::Lookup(HValue* value) const {
217 uint32_t hash = static_cast<uint32_t>(value->Hashcode());
218 uint32_t pos = Bound(hash);
219 if (array_[pos].value != NULL) {
220 if (array_[pos].value->Equals(value)) return array_[pos].value;
221 int next = array_[pos].next;
222 while (next != kNil) {
223 if (lists_[next].value->Equals(value)) return lists_[next].value;
224 next = lists_[next].next;
225 }
226 }
227 return NULL;
228 }
229
230
231 void HValueMap::Resize(int new_size, Zone* zone) {
232 ASSERT(new_size > count_);
233 // Hashing the values into the new array has no more collisions than in the
234 // old hash map, so we can use the existing lists_ array, if we are careful.
235
236 // Make sure we have at least one free element.
237 if (free_list_head_ == kNil) {
238 ResizeLists(lists_size_ << 1, zone);
239 }
240
241 HValueMapListElement* new_array =
242 zone->NewArray<HValueMapListElement>(new_size);
243 memset(new_array, 0, sizeof(HValueMapListElement) * new_size);
244
245 HValueMapListElement* old_array = array_;
246 int old_size = array_size_;
247
248 int old_count = count_;
249 count_ = 0;
250 // Do not modify present_flags_. It is currently correct.
251 array_size_ = new_size;
252 array_ = new_array;
253
254 if (old_array != NULL) {
255 // Iterate over all the elements in lists, rehashing them.
256 for (int i = 0; i < old_size; ++i) {
257 if (old_array[i].value != NULL) {
258 int current = old_array[i].next;
259 while (current != kNil) {
260 Insert(lists_[current].value, zone);
261 int next = lists_[current].next;
262 lists_[current].next = free_list_head_;
263 free_list_head_ = current;
264 current = next;
265 }
266 // Rehash the directly stored value.
267 Insert(old_array[i].value, zone);
268 }
269 }
270 }
271 USE(old_count);
272 ASSERT(count_ == old_count);
273 }
274
275
276 void HValueMap::ResizeLists(int new_size, Zone* zone) {
277 ASSERT(new_size > lists_size_);
278
279 HValueMapListElement* new_lists =
280 zone->NewArray<HValueMapListElement>(new_size);
281 memset(new_lists, 0, sizeof(HValueMapListElement) * new_size);
282
283 HValueMapListElement* old_lists = lists_;
284 int old_size = lists_size_;
285
286 lists_size_ = new_size;
287 lists_ = new_lists;
288
289 if (old_lists != NULL) {
290 OS::MemCopy(lists_, old_lists, old_size * sizeof(HValueMapListElement));
291 }
292 for (int i = old_size; i < lists_size_; ++i) {
293 lists_[i].next = free_list_head_;
294 free_list_head_ = i;
295 }
296 }
297
298
299 void HValueMap::Insert(HValue* value, Zone* zone) {
300 ASSERT(value != NULL);
301 // Resizing when half of the hashtable is filled up.
302 if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone);
303 ASSERT(count_ < array_size_);
304 count_++;
305 uint32_t pos = Bound(static_cast<uint32_t>(value->Hashcode()));
306 if (array_[pos].value == NULL) {
307 array_[pos].value = value;
308 array_[pos].next = kNil;
309 } else {
310 if (free_list_head_ == kNil) {
311 ResizeLists(lists_size_ << 1, zone);
312 }
313 int new_element_pos = free_list_head_;
314 ASSERT(new_element_pos != kNil);
315 free_list_head_ = lists_[free_list_head_].next;
316 lists_[new_element_pos].value = value;
317 lists_[new_element_pos].next = array_[pos].next;
318 ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].value != NULL);
319 array_[pos].next = new_element_pos;
320 }
321 }
322
323
324 HSideEffectMap::HSideEffectMap() : count_(0) {
325 memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize);
326 }
327
328
329 HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) {
330 *this = *other; // Calls operator=.
331 }
332
333
334 HSideEffectMap& HSideEffectMap::operator= (const HSideEffectMap& other) {
335 if (this != &other) {
336 OS::MemCopy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize);
337 }
338 return *this;
339 }
340
341 void HSideEffectMap::Kill(GVNFlagSet flags) {
342 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
343 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
344 if (flags.Contains(changes_flag)) {
345 if (data_[i] != NULL) count_--;
346 data_[i] = NULL;
347 }
348 }
349 }
350
351
352 void HSideEffectMap::Store(GVNFlagSet flags, HInstruction* instr) {
353 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
354 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
355 if (flags.Contains(changes_flag)) {
356 if (data_[i] == NULL) count_++;
357 data_[i] = instr;
358 }
359 }
360 }
361
362
363 bool HGlobalValueNumberer::Analyze() {
364 removed_side_effects_ = false;
365 ComputeBlockSideEffects();
366 if (FLAG_loop_invariant_code_motion) {
367 LoopInvariantCodeMotion();
368 }
369 AnalyzeGraph();
370 return removed_side_effects_;
371 }
372
373
374 void HGlobalValueNumberer::ComputeBlockSideEffects() {
375 // The Analyze phase of GVN can be called multiple times. Clear loop side
376 // effects before computing them to erase the contents from previous Analyze
377 // passes.
378 for (int i = 0; i < loop_side_effects_.length(); ++i) {
379 loop_side_effects_[i].RemoveAll();
380 }
381 for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
382 // Compute side effects for the block.
383 HBasicBlock* block = graph_->blocks()->at(i);
384 HInstruction* instr = block->first();
385 int id = block->block_id();
386 GVNFlagSet side_effects;
387 while (instr != NULL) {
388 side_effects.Add(instr->ChangesFlags());
389 if (instr->IsSoftDeoptimize()) {
390 block_side_effects_[id].RemoveAll();
391 side_effects.RemoveAll();
392 break;
393 }
394 instr = instr->next();
395 }
396 block_side_effects_[id].Add(side_effects);
397
398 // Loop headers are part of their loop.
399 if (block->IsLoopHeader()) {
400 loop_side_effects_[id].Add(side_effects);
401 }
402
403 // Propagate loop side effects upwards.
404 if (block->HasParentLoopHeader()) {
405 int header_id = block->parent_loop_header()->block_id();
406 loop_side_effects_[header_id].Add(block->IsLoopHeader()
407 ? loop_side_effects_[id]
408 : side_effects);
409 }
410 }
411 }
412
413
414 SmartArrayPointer<char> GetGVNFlagsString(GVNFlagSet flags) {
415 char underlying_buffer[kLastFlag * 128];
416 Vector<char> buffer(underlying_buffer, sizeof(underlying_buffer));
417 #if DEBUG
418 int offset = 0;
419 const char* separator = "";
420 const char* comma = ", ";
421 buffer[0] = 0;
422 uint32_t set_depends_on = 0;
423 uint32_t set_changes = 0;
424 for (int bit = 0; bit < kLastFlag; ++bit) {
425 if ((flags.ToIntegral() & (1 << bit)) != 0) {
426 if (bit % 2 == 0) {
427 set_changes++;
428 } else {
429 set_depends_on++;
430 }
431 }
432 }
433 bool positive_changes = set_changes < (kLastFlag / 2);
434 bool positive_depends_on = set_depends_on < (kLastFlag / 2);
435 if (set_changes > 0) {
436 if (positive_changes) {
437 offset += OS::SNPrintF(buffer + offset, "changes [");
438 } else {
439 offset += OS::SNPrintF(buffer + offset, "changes all except [");
440 }
441 for (int bit = 0; bit < kLastFlag; ++bit) {
442 if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_changes) {
443 switch (static_cast<GVNFlag>(bit)) {
444 #define DECLARE_FLAG(type) \
445 case kChanges##type: \
446 offset += OS::SNPrintF(buffer + offset, separator); \
447 offset += OS::SNPrintF(buffer + offset, #type); \
448 separator = comma; \
449 break;
450 GVN_TRACKED_FLAG_LIST(DECLARE_FLAG)
451 GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG)
452 #undef DECLARE_FLAG
453 default:
454 break;
455 }
456 }
457 }
458 offset += OS::SNPrintF(buffer + offset, "]");
459 }
460 if (set_depends_on > 0) {
461 separator = "";
462 if (set_changes > 0) {
463 offset += OS::SNPrintF(buffer + offset, ", ");
464 }
465 if (positive_depends_on) {
466 offset += OS::SNPrintF(buffer + offset, "depends on [");
467 } else {
468 offset += OS::SNPrintF(buffer + offset, "depends on all except [");
469 }
470 for (int bit = 0; bit < kLastFlag; ++bit) {
471 if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_depends_on) {
472 switch (static_cast<GVNFlag>(bit)) {
473 #define DECLARE_FLAG(type) \
474 case kDependsOn##type: \
475 offset += OS::SNPrintF(buffer + offset, separator); \
476 offset += OS::SNPrintF(buffer + offset, #type); \
477 separator = comma; \
478 break;
479 GVN_TRACKED_FLAG_LIST(DECLARE_FLAG)
480 GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG)
481 #undef DECLARE_FLAG
482 default:
483 break;
484 }
485 }
486 }
487 offset += OS::SNPrintF(buffer + offset, "]");
488 }
489 #else
490 OS::SNPrintF(buffer, "0x%08X", flags.ToIntegral());
491 #endif
492 size_t string_len = strlen(underlying_buffer) + 1;
493 ASSERT(string_len <= sizeof(underlying_buffer));
494 char* result = new char[strlen(underlying_buffer) + 1];
495 OS::MemCopy(result, underlying_buffer, string_len);
496 return SmartArrayPointer<char>(result);
497 }
498
499
500 void HGlobalValueNumberer::LoopInvariantCodeMotion() {
501 TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n",
502 graph_->use_optimistic_licm() ? "yes" : "no");
503 for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
504 HBasicBlock* block = graph_->blocks()->at(i);
505 if (block->IsLoopHeader()) {
506 GVNFlagSet side_effects = loop_side_effects_[block->block_id()];
507 TRACE_GVN_2("Try loop invariant motion for block B%d %s\n",
508 block->block_id(),
509 *GetGVNFlagsString(side_effects));
510
511 GVNFlagSet accumulated_first_time_depends;
512 GVNFlagSet accumulated_first_time_changes;
513 HBasicBlock* last = block->loop_information()->GetLastBackEdge();
514 for (int j = block->block_id(); j <= last->block_id(); ++j) {
515 ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects,
516 &accumulated_first_time_depends,
517 &accumulated_first_time_changes);
518 }
519 }
520 }
521 }
522
523
524 void HGlobalValueNumberer::ProcessLoopBlock(
525 HBasicBlock* block,
526 HBasicBlock* loop_header,
527 GVNFlagSet loop_kills,
528 GVNFlagSet* first_time_depends,
529 GVNFlagSet* first_time_changes) {
530 HBasicBlock* pre_header = loop_header->predecessors()->at(0);
531 GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills);
532 TRACE_GVN_2("Loop invariant motion for B%d %s\n",
533 block->block_id(),
534 *GetGVNFlagsString(depends_flags));
535 HInstruction* instr = block->first();
536 while (instr != NULL) {
537 HInstruction* next = instr->next();
538 bool hoisted = false;
539 if (instr->CheckFlag(HValue::kUseGVN)) {
540 TRACE_GVN_4("Checking instruction %d (%s) %s. Loop %s\n",
541 instr->id(),
542 instr->Mnemonic(),
543 *GetGVNFlagsString(instr->gvn_flags()),
544 *GetGVNFlagsString(loop_kills));
545 bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags);
546 if (can_hoist && !graph()->use_optimistic_licm()) {
547 can_hoist = block->IsLoopSuccessorDominator();
548 }
549
550 if (can_hoist) {
551 bool inputs_loop_invariant = true;
552 for (int i = 0; i < instr->OperandCount(); ++i) {
553 if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
554 inputs_loop_invariant = false;
555 }
556 }
557
558 if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
559 TRACE_GVN_1("Hoisting loop invariant instruction %d\n", instr->id());
560 // Move the instruction out of the loop.
561 instr->Unlink();
562 instr->InsertBefore(pre_header->end());
563 if (instr->HasSideEffects()) removed_side_effects_ = true;
564 hoisted = true;
565 }
566 }
567 }
568 if (!hoisted) {
569 // If an instruction is not hoisted, we have to account for its side
570 // effects when hoisting later HTransitionElementsKind instructions.
571 GVNFlagSet previous_depends = *first_time_depends;
572 GVNFlagSet previous_changes = *first_time_changes;
573 first_time_depends->Add(instr->DependsOnFlags());
574 first_time_changes->Add(instr->ChangesFlags());
575 if (!(previous_depends == *first_time_depends)) {
576 TRACE_GVN_1("Updated first-time accumulated %s\n",
577 *GetGVNFlagsString(*first_time_depends));
578 }
579 if (!(previous_changes == *first_time_changes)) {
580 TRACE_GVN_1("Updated first-time accumulated %s\n",
581 *GetGVNFlagsString(*first_time_changes));
582 }
583 }
584 instr = next;
585 }
586 }
587
588
589 bool HGlobalValueNumberer::AllowCodeMotion() {
590 return info()->IsStub() || info()->opt_count() + 1 < FLAG_max_opt_count;
591 }
592
593
594 bool HGlobalValueNumberer::ShouldMove(HInstruction* instr,
595 HBasicBlock* loop_header) {
596 // If we've disabled code motion or we're in a block that unconditionally
597 // deoptimizes, don't move any instructions.
598 return AllowCodeMotion() && !instr->block()->IsDeoptimizing();
599 }
600
601
602 GVNFlagSet HGlobalValueNumberer::CollectSideEffectsOnPathsToDominatedBlock(
603 HBasicBlock* dominator, HBasicBlock* dominated) {
604 GVNFlagSet side_effects;
605 for (int i = 0; i < dominated->predecessors()->length(); ++i) {
606 HBasicBlock* block = dominated->predecessors()->at(i);
607 if (dominator->block_id() < block->block_id() &&
608 block->block_id() < dominated->block_id() &&
609 visited_on_paths_.Add(block->block_id())) {
610 side_effects.Add(block_side_effects_[block->block_id()]);
611 if (block->IsLoopHeader()) {
612 side_effects.Add(loop_side_effects_[block->block_id()]);
613 }
614 side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock(
615 dominator, block));
616 }
617 }
618 return side_effects;
619 }
620
621
622 // Each instance of this class is like a "stack frame" for the recursive
623 // traversal of the dominator tree done during GVN (the stack is handled
624 // as a double linked list).
625 // We reuse frames when possible so the list length is limited by the depth
626 // of the dominator tree but this forces us to initialize each frame calling
627 // an explicit "Initialize" method instead of a using constructor.
628 class GvnBasicBlockState: public ZoneObject {
629 public:
630 static GvnBasicBlockState* CreateEntry(Zone* zone,
631 HBasicBlock* entry_block,
632 HValueMap* entry_map) {
633 return new(zone)
634 GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone);
635 }
636
637 HBasicBlock* block() { return block_; }
638 HValueMap* map() { return map_; }
639 HSideEffectMap* dominators() { return &dominators_; }
640
641 GvnBasicBlockState* next_in_dominator_tree_traversal(
642 Zone* zone,
643 HBasicBlock** dominator) {
644 // This assignment needs to happen before calling next_dominated() because
645 // that call can reuse "this" if we are at the last dominated block.
646 *dominator = block();
647 GvnBasicBlockState* result = next_dominated(zone);
648 if (result == NULL) {
649 GvnBasicBlockState* dominator_state = pop();
650 if (dominator_state != NULL) {
651 // This branch is guaranteed not to return NULL because pop() never
652 // returns a state where "is_done() == true".
653 *dominator = dominator_state->block();
654 result = dominator_state->next_dominated(zone);
655 } else {
656 // Unnecessary (we are returning NULL) but done for cleanness.
657 *dominator = NULL;
658 }
659 }
660 return result;
661 }
662
663 private:
664 void Initialize(HBasicBlock* block,
665 HValueMap* map,
666 HSideEffectMap* dominators,
667 bool copy_map,
668 Zone* zone) {
669 block_ = block;
670 map_ = copy_map ? map->Copy(zone) : map;
671 dominated_index_ = -1;
672 length_ = block->dominated_blocks()->length();
673 if (dominators != NULL) {
674 dominators_ = *dominators;
675 }
676 }
677 bool is_done() { return dominated_index_ >= length_; }
678
679 GvnBasicBlockState(GvnBasicBlockState* previous,
680 HBasicBlock* block,
681 HValueMap* map,
682 HSideEffectMap* dominators,
683 Zone* zone)
684 : previous_(previous), next_(NULL) {
685 Initialize(block, map, dominators, true, zone);
686 }
687
688 GvnBasicBlockState* next_dominated(Zone* zone) {
689 dominated_index_++;
690 if (dominated_index_ == length_ - 1) {
691 // No need to copy the map for the last child in the dominator tree.
692 Initialize(block_->dominated_blocks()->at(dominated_index_),
693 map(),
694 dominators(),
695 false,
696 zone);
697 return this;
698 } else if (dominated_index_ < length_) {
699 return push(zone,
700 block_->dominated_blocks()->at(dominated_index_),
701 dominators());
702 } else {
703 return NULL;
704 }
705 }
706
707 GvnBasicBlockState* push(Zone* zone,
708 HBasicBlock* block,
709 HSideEffectMap* dominators) {
710 if (next_ == NULL) {
711 next_ =
712 new(zone) GvnBasicBlockState(this, block, map(), dominators, zone);
713 } else {
714 next_->Initialize(block, map(), dominators, true, zone);
715 }
716 return next_;
717 }
718 GvnBasicBlockState* pop() {
719 GvnBasicBlockState* result = previous_;
720 while (result != NULL && result->is_done()) {
721 TRACE_GVN_2("Backtracking from block B%d to block b%d\n",
722 block()->block_id(),
723 previous_->block()->block_id())
724 result = result->previous_;
725 }
726 return result;
727 }
728
729 GvnBasicBlockState* previous_;
730 GvnBasicBlockState* next_;
731 HBasicBlock* block_;
732 HValueMap* map_;
733 HSideEffectMap dominators_;
734 int dominated_index_;
735 int length_;
736 };
737
738 // This is a recursive traversal of the dominator tree but it has been turned
739 // into a loop to avoid stack overflows.
740 // The logical "stack frames" of the recursion are kept in a list of
741 // GvnBasicBlockState instances.
742 void HGlobalValueNumberer::AnalyzeGraph() {
743 HBasicBlock* entry_block = graph_->entry_block();
744 HValueMap* entry_map = new(zone()) HValueMap(zone());
745 GvnBasicBlockState* current =
746 GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map);
747
748 while (current != NULL) {
749 HBasicBlock* block = current->block();
750 HValueMap* map = current->map();
751 HSideEffectMap* dominators = current->dominators();
752
753 TRACE_GVN_2("Analyzing block B%d%s\n",
754 block->block_id(),
755 block->IsLoopHeader() ? " (loop header)" : "");
756
757 // If this is a loop header kill everything killed by the loop.
758 if (block->IsLoopHeader()) {
759 map->Kill(loop_side_effects_[block->block_id()]);
760 }
761
762 // Go through all instructions of the current block.
763 HInstruction* instr = block->first();
764 while (instr != NULL) {
765 HInstruction* next = instr->next();
766 GVNFlagSet flags = instr->ChangesFlags();
767 if (!flags.IsEmpty()) {
768 // Clear all instructions in the map that are affected by side effects.
769 // Store instruction as the dominating one for tracked side effects.
770 map->Kill(flags);
771 dominators->Store(flags, instr);
772 TRACE_GVN_2("Instruction %d %s\n", instr->id(),
773 *GetGVNFlagsString(flags));
774 }
775 if (instr->CheckFlag(HValue::kUseGVN)) {
776 ASSERT(!instr->HasObservableSideEffects());
777 HValue* other = map->Lookup(instr);
778 if (other != NULL) {
779 ASSERT(instr->Equals(other) && other->Equals(instr));
780 TRACE_GVN_4("Replacing value %d (%s) with value %d (%s)\n",
781 instr->id(),
782 instr->Mnemonic(),
783 other->id(),
784 other->Mnemonic());
785 if (instr->HasSideEffects()) removed_side_effects_ = true;
786 instr->DeleteAndReplaceWith(other);
787 } else {
788 map->Add(instr, zone());
789 }
790 }
791 if (instr->IsLinked() &&
792 instr->CheckFlag(HValue::kTrackSideEffectDominators)) {
793 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
794 HValue* other = dominators->at(i);
795 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
796 GVNFlag depends_on_flag = HValue::DependsOnFlagFromInt(i);
797 if (instr->DependsOnFlags().Contains(depends_on_flag) &&
798 (other != NULL)) {
799 TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n",
800 i,
801 instr->id(),
802 instr->Mnemonic(),
803 other->id(),
804 other->Mnemonic());
805 instr->SetSideEffectDominator(changes_flag, other);
806 }
807 }
808 }
809 instr = next;
810 }
811
812 HBasicBlock* dominator_block;
813 GvnBasicBlockState* next =
814 current->next_in_dominator_tree_traversal(zone(), &dominator_block);
815
816 if (next != NULL) {
817 HBasicBlock* dominated = next->block();
818 HValueMap* successor_map = next->map();
819 HSideEffectMap* successor_dominators = next->dominators();
820
821 // Kill everything killed on any path between this block and the
822 // dominated block. We don't have to traverse these paths if the
823 // value map and the dominators list is already empty. If the range
824 // of block ids (block_id, dominated_id) is empty there are no such
825 // paths.
826 if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) &&
827 dominator_block->block_id() + 1 < dominated->block_id()) {
828 visited_on_paths_.Clear();
829 GVNFlagSet side_effects_on_all_paths =
830 CollectSideEffectsOnPathsToDominatedBlock(dominator_block,
831 dominated);
832 successor_map->Kill(side_effects_on_all_paths);
833 successor_dominators->Kill(side_effects_on_all_paths);
834 }
835 }
836 current = next;
837 }
838 }
839
840 } } // namespace v8::internal
OLDNEW
« src/hydrogen.h ('K') | « src/hydrogen.cc ('k') | tools/gyp/v8.gyp » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698