OLD | NEW |
(Empty) | |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are |
| 4 // met: |
| 5 // |
| 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. |
| 12 // * Neither the name of Google Inc. nor the names of its |
| 13 // contributors may be used to endorse or promote products derived |
| 14 // from this software without specific prior written permission. |
| 15 // |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 |
| 28 #include "hydrogen.h" |
| 29 #include "hydrogen-gvn.h" |
| 30 #include "v8.h" |
| 31 |
| 32 namespace v8 { |
| 33 namespace internal { |
| 34 |
| 35 class HValueMap: public ZoneObject { |
| 36 public: |
| 37 explicit HValueMap(Zone* zone) |
| 38 : array_size_(0), |
| 39 lists_size_(0), |
| 40 count_(0), |
| 41 present_flags_(0), |
| 42 array_(NULL), |
| 43 lists_(NULL), |
| 44 free_list_head_(kNil) { |
| 45 ResizeLists(kInitialSize, zone); |
| 46 Resize(kInitialSize, zone); |
| 47 } |
| 48 |
| 49 void Kill(GVNFlagSet flags); |
| 50 |
| 51 void Add(HValue* value, Zone* zone) { |
| 52 present_flags_.Add(value->gvn_flags()); |
| 53 Insert(value, zone); |
| 54 } |
| 55 |
| 56 HValue* Lookup(HValue* value) const; |
| 57 |
| 58 HValueMap* Copy(Zone* zone) const { |
| 59 return new(zone) HValueMap(zone, this); |
| 60 } |
| 61 |
| 62 bool IsEmpty() const { return count_ == 0; } |
| 63 |
| 64 private: |
| 65 // A linked list of HValue* values. Stored in arrays. |
| 66 struct HValueMapListElement { |
| 67 HValue* value; |
| 68 int next; // Index in the array of the next list element. |
| 69 }; |
| 70 static const int kNil = -1; // The end of a linked list |
| 71 |
| 72 // Must be a power of 2. |
| 73 static const int kInitialSize = 16; |
| 74 |
| 75 HValueMap(Zone* zone, const HValueMap* other); |
| 76 |
| 77 void Resize(int new_size, Zone* zone); |
| 78 void ResizeLists(int new_size, Zone* zone); |
| 79 void Insert(HValue* value, Zone* zone); |
| 80 uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); } |
| 81 |
| 82 int array_size_; |
| 83 int lists_size_; |
| 84 int count_; // The number of values stored in the HValueMap. |
| 85 GVNFlagSet present_flags_; // All flags that are in any value in the |
| 86 // HValueMap. |
| 87 HValueMapListElement* array_; // Primary store - contains the first value |
| 88 // with a given hash. Colliding elements are stored in linked lists. |
| 89 HValueMapListElement* lists_; // The linked lists containing hash collisions. |
| 90 int free_list_head_; // Unused elements in lists_ are on the free list. |
| 91 }; |
| 92 |
| 93 |
| 94 class HSideEffectMap BASE_EMBEDDED { |
| 95 public: |
| 96 HSideEffectMap(); |
| 97 explicit HSideEffectMap(HSideEffectMap* other); |
| 98 HSideEffectMap& operator= (const HSideEffectMap& other); |
| 99 |
| 100 void Kill(GVNFlagSet flags); |
| 101 |
| 102 void Store(GVNFlagSet flags, HInstruction* instr); |
| 103 |
| 104 bool IsEmpty() const { return count_ == 0; } |
| 105 |
| 106 inline HInstruction* operator[](int i) const { |
| 107 ASSERT(0 <= i); |
| 108 ASSERT(i < kNumberOfTrackedSideEffects); |
| 109 return data_[i]; |
| 110 } |
| 111 inline HInstruction* at(int i) const { return operator[](i); } |
| 112 |
| 113 private: |
| 114 int count_; |
| 115 HInstruction* data_[kNumberOfTrackedSideEffects]; |
| 116 }; |
| 117 |
| 118 |
| 119 void TraceGVN(const char* msg, ...) { |
| 120 va_list arguments; |
| 121 va_start(arguments, msg); |
| 122 OS::VPrint(msg, arguments); |
| 123 va_end(arguments); |
| 124 } |
| 125 |
| 126 // Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when |
| 127 // --trace-gvn is off. |
| 128 #define TRACE_GVN_1(msg, a1) \ |
| 129 if (FLAG_trace_gvn) { \ |
| 130 TraceGVN(msg, a1); \ |
| 131 } |
| 132 |
| 133 #define TRACE_GVN_2(msg, a1, a2) \ |
| 134 if (FLAG_trace_gvn) { \ |
| 135 TraceGVN(msg, a1, a2); \ |
| 136 } |
| 137 |
| 138 #define TRACE_GVN_3(msg, a1, a2, a3) \ |
| 139 if (FLAG_trace_gvn) { \ |
| 140 TraceGVN(msg, a1, a2, a3); \ |
| 141 } |
| 142 |
| 143 #define TRACE_GVN_4(msg, a1, a2, a3, a4) \ |
| 144 if (FLAG_trace_gvn) { \ |
| 145 TraceGVN(msg, a1, a2, a3, a4); \ |
| 146 } |
| 147 |
| 148 #define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \ |
| 149 if (FLAG_trace_gvn) { \ |
| 150 TraceGVN(msg, a1, a2, a3, a4, a5); \ |
| 151 } |
| 152 |
| 153 |
| 154 HValueMap::HValueMap(Zone* zone, const HValueMap* other) |
| 155 : array_size_(other->array_size_), |
| 156 lists_size_(other->lists_size_), |
| 157 count_(other->count_), |
| 158 present_flags_(other->present_flags_), |
| 159 array_(zone->NewArray<HValueMapListElement>(other->array_size_)), |
| 160 lists_(zone->NewArray<HValueMapListElement>(other->lists_size_)), |
| 161 free_list_head_(other->free_list_head_) { |
| 162 OS::MemCopy( |
| 163 array_, other->array_, array_size_ * sizeof(HValueMapListElement)); |
| 164 OS::MemCopy( |
| 165 lists_, other->lists_, lists_size_ * sizeof(HValueMapListElement)); |
| 166 } |
| 167 |
| 168 |
| 169 void HValueMap::Kill(GVNFlagSet flags) { |
| 170 GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(flags); |
| 171 if (!present_flags_.ContainsAnyOf(depends_flags)) return; |
| 172 present_flags_.RemoveAll(); |
| 173 for (int i = 0; i < array_size_; ++i) { |
| 174 HValue* value = array_[i].value; |
| 175 if (value != NULL) { |
| 176 // Clear list of collisions first, so we know if it becomes empty. |
| 177 int kept = kNil; // List of kept elements. |
| 178 int next; |
| 179 for (int current = array_[i].next; current != kNil; current = next) { |
| 180 next = lists_[current].next; |
| 181 HValue* value = lists_[current].value; |
| 182 if (value->gvn_flags().ContainsAnyOf(depends_flags)) { |
| 183 // Drop it. |
| 184 count_--; |
| 185 lists_[current].next = free_list_head_; |
| 186 free_list_head_ = current; |
| 187 } else { |
| 188 // Keep it. |
| 189 lists_[current].next = kept; |
| 190 kept = current; |
| 191 present_flags_.Add(value->gvn_flags()); |
| 192 } |
| 193 } |
| 194 array_[i].next = kept; |
| 195 |
| 196 // Now possibly drop directly indexed element. |
| 197 value = array_[i].value; |
| 198 if (value->gvn_flags().ContainsAnyOf(depends_flags)) { // Drop it. |
| 199 count_--; |
| 200 int head = array_[i].next; |
| 201 if (head == kNil) { |
| 202 array_[i].value = NULL; |
| 203 } else { |
| 204 array_[i].value = lists_[head].value; |
| 205 array_[i].next = lists_[head].next; |
| 206 lists_[head].next = free_list_head_; |
| 207 free_list_head_ = head; |
| 208 } |
| 209 } else { |
| 210 present_flags_.Add(value->gvn_flags()); // Keep it. |
| 211 } |
| 212 } |
| 213 } |
| 214 } |
| 215 |
| 216 |
| 217 HValue* HValueMap::Lookup(HValue* value) const { |
| 218 uint32_t hash = static_cast<uint32_t>(value->Hashcode()); |
| 219 uint32_t pos = Bound(hash); |
| 220 if (array_[pos].value != NULL) { |
| 221 if (array_[pos].value->Equals(value)) return array_[pos].value; |
| 222 int next = array_[pos].next; |
| 223 while (next != kNil) { |
| 224 if (lists_[next].value->Equals(value)) return lists_[next].value; |
| 225 next = lists_[next].next; |
| 226 } |
| 227 } |
| 228 return NULL; |
| 229 } |
| 230 |
| 231 |
| 232 void HValueMap::Resize(int new_size, Zone* zone) { |
| 233 ASSERT(new_size > count_); |
| 234 // Hashing the values into the new array has no more collisions than in the |
| 235 // old hash map, so we can use the existing lists_ array, if we are careful. |
| 236 |
| 237 // Make sure we have at least one free element. |
| 238 if (free_list_head_ == kNil) { |
| 239 ResizeLists(lists_size_ << 1, zone); |
| 240 } |
| 241 |
| 242 HValueMapListElement* new_array = |
| 243 zone->NewArray<HValueMapListElement>(new_size); |
| 244 memset(new_array, 0, sizeof(HValueMapListElement) * new_size); |
| 245 |
| 246 HValueMapListElement* old_array = array_; |
| 247 int old_size = array_size_; |
| 248 |
| 249 int old_count = count_; |
| 250 count_ = 0; |
| 251 // Do not modify present_flags_. It is currently correct. |
| 252 array_size_ = new_size; |
| 253 array_ = new_array; |
| 254 |
| 255 if (old_array != NULL) { |
| 256 // Iterate over all the elements in lists, rehashing them. |
| 257 for (int i = 0; i < old_size; ++i) { |
| 258 if (old_array[i].value != NULL) { |
| 259 int current = old_array[i].next; |
| 260 while (current != kNil) { |
| 261 Insert(lists_[current].value, zone); |
| 262 int next = lists_[current].next; |
| 263 lists_[current].next = free_list_head_; |
| 264 free_list_head_ = current; |
| 265 current = next; |
| 266 } |
| 267 // Rehash the directly stored value. |
| 268 Insert(old_array[i].value, zone); |
| 269 } |
| 270 } |
| 271 } |
| 272 USE(old_count); |
| 273 ASSERT(count_ == old_count); |
| 274 } |
| 275 |
| 276 |
| 277 void HValueMap::ResizeLists(int new_size, Zone* zone) { |
| 278 ASSERT(new_size > lists_size_); |
| 279 |
| 280 HValueMapListElement* new_lists = |
| 281 zone->NewArray<HValueMapListElement>(new_size); |
| 282 memset(new_lists, 0, sizeof(HValueMapListElement) * new_size); |
| 283 |
| 284 HValueMapListElement* old_lists = lists_; |
| 285 int old_size = lists_size_; |
| 286 |
| 287 lists_size_ = new_size; |
| 288 lists_ = new_lists; |
| 289 |
| 290 if (old_lists != NULL) { |
| 291 OS::MemCopy(lists_, old_lists, old_size * sizeof(HValueMapListElement)); |
| 292 } |
| 293 for (int i = old_size; i < lists_size_; ++i) { |
| 294 lists_[i].next = free_list_head_; |
| 295 free_list_head_ = i; |
| 296 } |
| 297 } |
| 298 |
| 299 |
| 300 void HValueMap::Insert(HValue* value, Zone* zone) { |
| 301 ASSERT(value != NULL); |
| 302 // Resizing when half of the hashtable is filled up. |
| 303 if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone); |
| 304 ASSERT(count_ < array_size_); |
| 305 count_++; |
| 306 uint32_t pos = Bound(static_cast<uint32_t>(value->Hashcode())); |
| 307 if (array_[pos].value == NULL) { |
| 308 array_[pos].value = value; |
| 309 array_[pos].next = kNil; |
| 310 } else { |
| 311 if (free_list_head_ == kNil) { |
| 312 ResizeLists(lists_size_ << 1, zone); |
| 313 } |
| 314 int new_element_pos = free_list_head_; |
| 315 ASSERT(new_element_pos != kNil); |
| 316 free_list_head_ = lists_[free_list_head_].next; |
| 317 lists_[new_element_pos].value = value; |
| 318 lists_[new_element_pos].next = array_[pos].next; |
| 319 ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].value != NULL); |
| 320 array_[pos].next = new_element_pos; |
| 321 } |
| 322 } |
| 323 |
| 324 |
| 325 HSideEffectMap::HSideEffectMap() : count_(0) { |
| 326 memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize); |
| 327 } |
| 328 |
| 329 |
| 330 HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) { |
| 331 *this = *other; // Calls operator=. |
| 332 } |
| 333 |
| 334 |
| 335 HSideEffectMap& HSideEffectMap::operator= (const HSideEffectMap& other) { |
| 336 if (this != &other) { |
| 337 OS::MemCopy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize); |
| 338 } |
| 339 return *this; |
| 340 } |
| 341 |
| 342 void HSideEffectMap::Kill(GVNFlagSet flags) { |
| 343 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { |
| 344 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i); |
| 345 if (flags.Contains(changes_flag)) { |
| 346 if (data_[i] != NULL) count_--; |
| 347 data_[i] = NULL; |
| 348 } |
| 349 } |
| 350 } |
| 351 |
| 352 |
| 353 void HSideEffectMap::Store(GVNFlagSet flags, HInstruction* instr) { |
| 354 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { |
| 355 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i); |
| 356 if (flags.Contains(changes_flag)) { |
| 357 if (data_[i] == NULL) count_++; |
| 358 data_[i] = instr; |
| 359 } |
| 360 } |
| 361 } |
| 362 |
| 363 |
| 364 HGlobalValueNumberer::HGlobalValueNumberer(HGraph* graph, CompilationInfo* info) |
| 365 : graph_(graph), |
| 366 info_(info), |
| 367 removed_side_effects_(false), |
| 368 block_side_effects_(graph->blocks()->length(), graph->zone()), |
| 369 loop_side_effects_(graph->blocks()->length(), graph->zone()), |
| 370 visited_on_paths_(graph->zone(), graph->blocks()->length()) { |
| 371 #ifdef DEBUG |
| 372 ASSERT(info->isolate()->optimizing_compiler_thread()->IsOptimizerThread() || |
| 373 !info->isolate()->heap()->IsAllocationAllowed()); |
| 374 #endif |
| 375 block_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length(), |
| 376 graph_->zone()); |
| 377 loop_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length(), |
| 378 graph_->zone()); |
| 379 } |
| 380 |
| 381 bool HGlobalValueNumberer::Analyze() { |
| 382 removed_side_effects_ = false; |
| 383 ComputeBlockSideEffects(); |
| 384 if (FLAG_loop_invariant_code_motion) { |
| 385 LoopInvariantCodeMotion(); |
| 386 } |
| 387 AnalyzeGraph(); |
| 388 return removed_side_effects_; |
| 389 } |
| 390 |
| 391 |
| 392 void HGlobalValueNumberer::ComputeBlockSideEffects() { |
| 393 // The Analyze phase of GVN can be called multiple times. Clear loop side |
| 394 // effects before computing them to erase the contents from previous Analyze |
| 395 // passes. |
| 396 for (int i = 0; i < loop_side_effects_.length(); ++i) { |
| 397 loop_side_effects_[i].RemoveAll(); |
| 398 } |
| 399 for (int i = graph_->blocks()->length() - 1; i >= 0; --i) { |
| 400 // Compute side effects for the block. |
| 401 HBasicBlock* block = graph_->blocks()->at(i); |
| 402 HInstruction* instr = block->first(); |
| 403 int id = block->block_id(); |
| 404 GVNFlagSet side_effects; |
| 405 while (instr != NULL) { |
| 406 side_effects.Add(instr->ChangesFlags()); |
| 407 if (instr->IsSoftDeoptimize()) { |
| 408 block_side_effects_[id].RemoveAll(); |
| 409 side_effects.RemoveAll(); |
| 410 break; |
| 411 } |
| 412 instr = instr->next(); |
| 413 } |
| 414 block_side_effects_[id].Add(side_effects); |
| 415 |
| 416 // Loop headers are part of their loop. |
| 417 if (block->IsLoopHeader()) { |
| 418 loop_side_effects_[id].Add(side_effects); |
| 419 } |
| 420 |
| 421 // Propagate loop side effects upwards. |
| 422 if (block->HasParentLoopHeader()) { |
| 423 int header_id = block->parent_loop_header()->block_id(); |
| 424 loop_side_effects_[header_id].Add(block->IsLoopHeader() |
| 425 ? loop_side_effects_[id] |
| 426 : side_effects); |
| 427 } |
| 428 } |
| 429 } |
| 430 |
| 431 |
| 432 SmartArrayPointer<char> GetGVNFlagsString(GVNFlagSet flags) { |
| 433 char underlying_buffer[kLastFlag * 128]; |
| 434 Vector<char> buffer(underlying_buffer, sizeof(underlying_buffer)); |
| 435 #if DEBUG |
| 436 int offset = 0; |
| 437 const char* separator = ""; |
| 438 const char* comma = ", "; |
| 439 buffer[0] = 0; |
| 440 uint32_t set_depends_on = 0; |
| 441 uint32_t set_changes = 0; |
| 442 for (int bit = 0; bit < kLastFlag; ++bit) { |
| 443 if ((flags.ToIntegral() & (1 << bit)) != 0) { |
| 444 if (bit % 2 == 0) { |
| 445 set_changes++; |
| 446 } else { |
| 447 set_depends_on++; |
| 448 } |
| 449 } |
| 450 } |
| 451 bool positive_changes = set_changes < (kLastFlag / 2); |
| 452 bool positive_depends_on = set_depends_on < (kLastFlag / 2); |
| 453 if (set_changes > 0) { |
| 454 if (positive_changes) { |
| 455 offset += OS::SNPrintF(buffer + offset, "changes ["); |
| 456 } else { |
| 457 offset += OS::SNPrintF(buffer + offset, "changes all except ["); |
| 458 } |
| 459 for (int bit = 0; bit < kLastFlag; ++bit) { |
| 460 if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_changes) { |
| 461 switch (static_cast<GVNFlag>(bit)) { |
| 462 #define DECLARE_FLAG(type) \ |
| 463 case kChanges##type: \ |
| 464 offset += OS::SNPrintF(buffer + offset, separator); \ |
| 465 offset += OS::SNPrintF(buffer + offset, #type); \ |
| 466 separator = comma; \ |
| 467 break; |
| 468 GVN_TRACKED_FLAG_LIST(DECLARE_FLAG) |
| 469 GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG) |
| 470 #undef DECLARE_FLAG |
| 471 default: |
| 472 break; |
| 473 } |
| 474 } |
| 475 } |
| 476 offset += OS::SNPrintF(buffer + offset, "]"); |
| 477 } |
| 478 if (set_depends_on > 0) { |
| 479 separator = ""; |
| 480 if (set_changes > 0) { |
| 481 offset += OS::SNPrintF(buffer + offset, ", "); |
| 482 } |
| 483 if (positive_depends_on) { |
| 484 offset += OS::SNPrintF(buffer + offset, "depends on ["); |
| 485 } else { |
| 486 offset += OS::SNPrintF(buffer + offset, "depends on all except ["); |
| 487 } |
| 488 for (int bit = 0; bit < kLastFlag; ++bit) { |
| 489 if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_depends_on) { |
| 490 switch (static_cast<GVNFlag>(bit)) { |
| 491 #define DECLARE_FLAG(type) \ |
| 492 case kDependsOn##type: \ |
| 493 offset += OS::SNPrintF(buffer + offset, separator); \ |
| 494 offset += OS::SNPrintF(buffer + offset, #type); \ |
| 495 separator = comma; \ |
| 496 break; |
| 497 GVN_TRACKED_FLAG_LIST(DECLARE_FLAG) |
| 498 GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG) |
| 499 #undef DECLARE_FLAG |
| 500 default: |
| 501 break; |
| 502 } |
| 503 } |
| 504 } |
| 505 offset += OS::SNPrintF(buffer + offset, "]"); |
| 506 } |
| 507 #else |
| 508 OS::SNPrintF(buffer, "0x%08X", flags.ToIntegral()); |
| 509 #endif |
| 510 size_t string_len = strlen(underlying_buffer) + 1; |
| 511 ASSERT(string_len <= sizeof(underlying_buffer)); |
| 512 char* result = new char[strlen(underlying_buffer) + 1]; |
| 513 OS::MemCopy(result, underlying_buffer, string_len); |
| 514 return SmartArrayPointer<char>(result); |
| 515 } |
| 516 |
| 517 |
| 518 void HGlobalValueNumberer::LoopInvariantCodeMotion() { |
| 519 TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n", |
| 520 graph_->use_optimistic_licm() ? "yes" : "no"); |
| 521 for (int i = graph_->blocks()->length() - 1; i >= 0; --i) { |
| 522 HBasicBlock* block = graph_->blocks()->at(i); |
| 523 if (block->IsLoopHeader()) { |
| 524 GVNFlagSet side_effects = loop_side_effects_[block->block_id()]; |
| 525 TRACE_GVN_2("Try loop invariant motion for block B%d %s\n", |
| 526 block->block_id(), |
| 527 *GetGVNFlagsString(side_effects)); |
| 528 |
| 529 GVNFlagSet accumulated_first_time_depends; |
| 530 GVNFlagSet accumulated_first_time_changes; |
| 531 HBasicBlock* last = block->loop_information()->GetLastBackEdge(); |
| 532 for (int j = block->block_id(); j <= last->block_id(); ++j) { |
| 533 ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects, |
| 534 &accumulated_first_time_depends, |
| 535 &accumulated_first_time_changes); |
| 536 } |
| 537 } |
| 538 } |
| 539 } |
| 540 |
| 541 |
| 542 void HGlobalValueNumberer::ProcessLoopBlock( |
| 543 HBasicBlock* block, |
| 544 HBasicBlock* loop_header, |
| 545 GVNFlagSet loop_kills, |
| 546 GVNFlagSet* first_time_depends, |
| 547 GVNFlagSet* first_time_changes) { |
| 548 HBasicBlock* pre_header = loop_header->predecessors()->at(0); |
| 549 GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills); |
| 550 TRACE_GVN_2("Loop invariant motion for B%d %s\n", |
| 551 block->block_id(), |
| 552 *GetGVNFlagsString(depends_flags)); |
| 553 HInstruction* instr = block->first(); |
| 554 while (instr != NULL) { |
| 555 HInstruction* next = instr->next(); |
| 556 bool hoisted = false; |
| 557 if (instr->CheckFlag(HValue::kUseGVN)) { |
| 558 TRACE_GVN_4("Checking instruction %d (%s) %s. Loop %s\n", |
| 559 instr->id(), |
| 560 instr->Mnemonic(), |
| 561 *GetGVNFlagsString(instr->gvn_flags()), |
| 562 *GetGVNFlagsString(loop_kills)); |
| 563 bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags); |
| 564 if (can_hoist && !graph()->use_optimistic_licm()) { |
| 565 can_hoist = block->IsLoopSuccessorDominator(); |
| 566 } |
| 567 |
| 568 if (can_hoist) { |
| 569 bool inputs_loop_invariant = true; |
| 570 for (int i = 0; i < instr->OperandCount(); ++i) { |
| 571 if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) { |
| 572 inputs_loop_invariant = false; |
| 573 } |
| 574 } |
| 575 |
| 576 if (inputs_loop_invariant && ShouldMove(instr, loop_header)) { |
| 577 TRACE_GVN_1("Hoisting loop invariant instruction %d\n", instr->id()); |
| 578 // Move the instruction out of the loop. |
| 579 instr->Unlink(); |
| 580 instr->InsertBefore(pre_header->end()); |
| 581 if (instr->HasSideEffects()) removed_side_effects_ = true; |
| 582 hoisted = true; |
| 583 } |
| 584 } |
| 585 } |
| 586 if (!hoisted) { |
| 587 // If an instruction is not hoisted, we have to account for its side |
| 588 // effects when hoisting later HTransitionElementsKind instructions. |
| 589 GVNFlagSet previous_depends = *first_time_depends; |
| 590 GVNFlagSet previous_changes = *first_time_changes; |
| 591 first_time_depends->Add(instr->DependsOnFlags()); |
| 592 first_time_changes->Add(instr->ChangesFlags()); |
| 593 if (!(previous_depends == *first_time_depends)) { |
| 594 TRACE_GVN_1("Updated first-time accumulated %s\n", |
| 595 *GetGVNFlagsString(*first_time_depends)); |
| 596 } |
| 597 if (!(previous_changes == *first_time_changes)) { |
| 598 TRACE_GVN_1("Updated first-time accumulated %s\n", |
| 599 *GetGVNFlagsString(*first_time_changes)); |
| 600 } |
| 601 } |
| 602 instr = next; |
| 603 } |
| 604 } |
| 605 |
| 606 |
| 607 bool HGlobalValueNumberer::AllowCodeMotion() { |
| 608 return info()->IsStub() || info()->opt_count() + 1 < FLAG_max_opt_count; |
| 609 } |
| 610 |
| 611 |
| 612 bool HGlobalValueNumberer::ShouldMove(HInstruction* instr, |
| 613 HBasicBlock* loop_header) { |
| 614 // If we've disabled code motion or we're in a block that unconditionally |
| 615 // deoptimizes, don't move any instructions. |
| 616 return AllowCodeMotion() && !instr->block()->IsDeoptimizing(); |
| 617 } |
| 618 |
| 619 |
| 620 GVNFlagSet HGlobalValueNumberer::CollectSideEffectsOnPathsToDominatedBlock( |
| 621 HBasicBlock* dominator, HBasicBlock* dominated) { |
| 622 GVNFlagSet side_effects; |
| 623 for (int i = 0; i < dominated->predecessors()->length(); ++i) { |
| 624 HBasicBlock* block = dominated->predecessors()->at(i); |
| 625 if (dominator->block_id() < block->block_id() && |
| 626 block->block_id() < dominated->block_id() && |
| 627 visited_on_paths_.Add(block->block_id())) { |
| 628 side_effects.Add(block_side_effects_[block->block_id()]); |
| 629 if (block->IsLoopHeader()) { |
| 630 side_effects.Add(loop_side_effects_[block->block_id()]); |
| 631 } |
| 632 side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock( |
| 633 dominator, block)); |
| 634 } |
| 635 } |
| 636 return side_effects; |
| 637 } |
| 638 |
| 639 |
| 640 // Each instance of this class is like a "stack frame" for the recursive |
| 641 // traversal of the dominator tree done during GVN (the stack is handled |
| 642 // as a double linked list). |
| 643 // We reuse frames when possible so the list length is limited by the depth |
| 644 // of the dominator tree but this forces us to initialize each frame calling |
| 645 // an explicit "Initialize" method instead of a using constructor. |
| 646 class GvnBasicBlockState: public ZoneObject { |
| 647 public: |
| 648 static GvnBasicBlockState* CreateEntry(Zone* zone, |
| 649 HBasicBlock* entry_block, |
| 650 HValueMap* entry_map) { |
| 651 return new(zone) |
| 652 GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone); |
| 653 } |
| 654 |
| 655 HBasicBlock* block() { return block_; } |
| 656 HValueMap* map() { return map_; } |
| 657 HSideEffectMap* dominators() { return &dominators_; } |
| 658 |
| 659 GvnBasicBlockState* next_in_dominator_tree_traversal( |
| 660 Zone* zone, |
| 661 HBasicBlock** dominator) { |
| 662 // This assignment needs to happen before calling next_dominated() because |
| 663 // that call can reuse "this" if we are at the last dominated block. |
| 664 *dominator = block(); |
| 665 GvnBasicBlockState* result = next_dominated(zone); |
| 666 if (result == NULL) { |
| 667 GvnBasicBlockState* dominator_state = pop(); |
| 668 if (dominator_state != NULL) { |
| 669 // This branch is guaranteed not to return NULL because pop() never |
| 670 // returns a state where "is_done() == true". |
| 671 *dominator = dominator_state->block(); |
| 672 result = dominator_state->next_dominated(zone); |
| 673 } else { |
| 674 // Unnecessary (we are returning NULL) but done for cleanness. |
| 675 *dominator = NULL; |
| 676 } |
| 677 } |
| 678 return result; |
| 679 } |
| 680 |
| 681 private: |
| 682 void Initialize(HBasicBlock* block, |
| 683 HValueMap* map, |
| 684 HSideEffectMap* dominators, |
| 685 bool copy_map, |
| 686 Zone* zone) { |
| 687 block_ = block; |
| 688 map_ = copy_map ? map->Copy(zone) : map; |
| 689 dominated_index_ = -1; |
| 690 length_ = block->dominated_blocks()->length(); |
| 691 if (dominators != NULL) { |
| 692 dominators_ = *dominators; |
| 693 } |
| 694 } |
| 695 bool is_done() { return dominated_index_ >= length_; } |
| 696 |
| 697 GvnBasicBlockState(GvnBasicBlockState* previous, |
| 698 HBasicBlock* block, |
| 699 HValueMap* map, |
| 700 HSideEffectMap* dominators, |
| 701 Zone* zone) |
| 702 : previous_(previous), next_(NULL) { |
| 703 Initialize(block, map, dominators, true, zone); |
| 704 } |
| 705 |
| 706 GvnBasicBlockState* next_dominated(Zone* zone) { |
| 707 dominated_index_++; |
| 708 if (dominated_index_ == length_ - 1) { |
| 709 // No need to copy the map for the last child in the dominator tree. |
| 710 Initialize(block_->dominated_blocks()->at(dominated_index_), |
| 711 map(), |
| 712 dominators(), |
| 713 false, |
| 714 zone); |
| 715 return this; |
| 716 } else if (dominated_index_ < length_) { |
| 717 return push(zone, |
| 718 block_->dominated_blocks()->at(dominated_index_), |
| 719 dominators()); |
| 720 } else { |
| 721 return NULL; |
| 722 } |
| 723 } |
| 724 |
| 725 GvnBasicBlockState* push(Zone* zone, |
| 726 HBasicBlock* block, |
| 727 HSideEffectMap* dominators) { |
| 728 if (next_ == NULL) { |
| 729 next_ = |
| 730 new(zone) GvnBasicBlockState(this, block, map(), dominators, zone); |
| 731 } else { |
| 732 next_->Initialize(block, map(), dominators, true, zone); |
| 733 } |
| 734 return next_; |
| 735 } |
| 736 GvnBasicBlockState* pop() { |
| 737 GvnBasicBlockState* result = previous_; |
| 738 while (result != NULL && result->is_done()) { |
| 739 TRACE_GVN_2("Backtracking from block B%d to block b%d\n", |
| 740 block()->block_id(), |
| 741 previous_->block()->block_id()) |
| 742 result = result->previous_; |
| 743 } |
| 744 return result; |
| 745 } |
| 746 |
| 747 GvnBasicBlockState* previous_; |
| 748 GvnBasicBlockState* next_; |
| 749 HBasicBlock* block_; |
| 750 HValueMap* map_; |
| 751 HSideEffectMap dominators_; |
| 752 int dominated_index_; |
| 753 int length_; |
| 754 }; |
| 755 |
| 756 // This is a recursive traversal of the dominator tree but it has been turned |
| 757 // into a loop to avoid stack overflows. |
| 758 // The logical "stack frames" of the recursion are kept in a list of |
| 759 // GvnBasicBlockState instances. |
| 760 void HGlobalValueNumberer::AnalyzeGraph() { |
| 761 HBasicBlock* entry_block = graph_->entry_block(); |
| 762 HValueMap* entry_map = new(zone()) HValueMap(zone()); |
| 763 GvnBasicBlockState* current = |
| 764 GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map); |
| 765 |
| 766 while (current != NULL) { |
| 767 HBasicBlock* block = current->block(); |
| 768 HValueMap* map = current->map(); |
| 769 HSideEffectMap* dominators = current->dominators(); |
| 770 |
| 771 TRACE_GVN_2("Analyzing block B%d%s\n", |
| 772 block->block_id(), |
| 773 block->IsLoopHeader() ? " (loop header)" : ""); |
| 774 |
| 775 // If this is a loop header kill everything killed by the loop. |
| 776 if (block->IsLoopHeader()) { |
| 777 map->Kill(loop_side_effects_[block->block_id()]); |
| 778 } |
| 779 |
| 780 // Go through all instructions of the current block. |
| 781 HInstruction* instr = block->first(); |
| 782 while (instr != NULL) { |
| 783 HInstruction* next = instr->next(); |
| 784 GVNFlagSet flags = instr->ChangesFlags(); |
| 785 if (!flags.IsEmpty()) { |
| 786 // Clear all instructions in the map that are affected by side effects. |
| 787 // Store instruction as the dominating one for tracked side effects. |
| 788 map->Kill(flags); |
| 789 dominators->Store(flags, instr); |
| 790 TRACE_GVN_2("Instruction %d %s\n", instr->id(), |
| 791 *GetGVNFlagsString(flags)); |
| 792 } |
| 793 if (instr->CheckFlag(HValue::kUseGVN)) { |
| 794 ASSERT(!instr->HasObservableSideEffects()); |
| 795 HValue* other = map->Lookup(instr); |
| 796 if (other != NULL) { |
| 797 ASSERT(instr->Equals(other) && other->Equals(instr)); |
| 798 TRACE_GVN_4("Replacing value %d (%s) with value %d (%s)\n", |
| 799 instr->id(), |
| 800 instr->Mnemonic(), |
| 801 other->id(), |
| 802 other->Mnemonic()); |
| 803 if (instr->HasSideEffects()) removed_side_effects_ = true; |
| 804 instr->DeleteAndReplaceWith(other); |
| 805 } else { |
| 806 map->Add(instr, zone()); |
| 807 } |
| 808 } |
| 809 if (instr->IsLinked() && |
| 810 instr->CheckFlag(HValue::kTrackSideEffectDominators)) { |
| 811 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { |
| 812 HValue* other = dominators->at(i); |
| 813 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i); |
| 814 GVNFlag depends_on_flag = HValue::DependsOnFlagFromInt(i); |
| 815 if (instr->DependsOnFlags().Contains(depends_on_flag) && |
| 816 (other != NULL)) { |
| 817 TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n", |
| 818 i, |
| 819 instr->id(), |
| 820 instr->Mnemonic(), |
| 821 other->id(), |
| 822 other->Mnemonic()); |
| 823 instr->SetSideEffectDominator(changes_flag, other); |
| 824 } |
| 825 } |
| 826 } |
| 827 instr = next; |
| 828 } |
| 829 |
| 830 HBasicBlock* dominator_block; |
| 831 GvnBasicBlockState* next = |
| 832 current->next_in_dominator_tree_traversal(zone(), &dominator_block); |
| 833 |
| 834 if (next != NULL) { |
| 835 HBasicBlock* dominated = next->block(); |
| 836 HValueMap* successor_map = next->map(); |
| 837 HSideEffectMap* successor_dominators = next->dominators(); |
| 838 |
| 839 // Kill everything killed on any path between this block and the |
| 840 // dominated block. We don't have to traverse these paths if the |
| 841 // value map and the dominators list is already empty. If the range |
| 842 // of block ids (block_id, dominated_id) is empty there are no such |
| 843 // paths. |
| 844 if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) && |
| 845 dominator_block->block_id() + 1 < dominated->block_id()) { |
| 846 visited_on_paths_.Clear(); |
| 847 GVNFlagSet side_effects_on_all_paths = |
| 848 CollectSideEffectsOnPathsToDominatedBlock(dominator_block, |
| 849 dominated); |
| 850 successor_map->Kill(side_effects_on_all_paths); |
| 851 successor_dominators->Kill(side_effects_on_all_paths); |
| 852 } |
| 853 } |
| 854 current = next; |
| 855 } |
| 856 } |
| 857 |
| 858 } } // namespace v8::internal |
OLD | NEW |