| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 10 matching lines...) Expand all Loading... |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #include "hydrogen-alias-analysis.h" | 28 #include "hydrogen-alias-analysis.h" |
| 29 #include "hydrogen-load-elimination.h" | 29 #include "hydrogen-load-elimination.h" |
| 30 #include "hydrogen-instructions.h" | 30 #include "hydrogen-instructions.h" |
| 31 #include "hydrogen-flow-engine.h" |
| 31 | 32 |
| 32 namespace v8 { | 33 namespace v8 { |
| 33 namespace internal { | 34 namespace internal { |
| 34 | 35 |
| 36 #define GLOBAL true |
| 37 #define TRACE(x) if (FLAG_trace_load_elimination) PrintF x |
| 38 |
| 35 static const int kMaxTrackedFields = 16; | 39 static const int kMaxTrackedFields = 16; |
| 36 static const int kMaxTrackedObjects = 5; | 40 static const int kMaxTrackedObjects = 5; |
| 37 | 41 |
| 38 // An element in the field approximation list. | 42 // An element in the field approximation list. |
| 39 class HFieldApproximation : public ZoneObject { | 43 class HFieldApproximation : public ZoneObject { |
| 40 public: // Just a data blob. | 44 public: // Just a data blob. |
| 41 HValue* object_; | 45 HValue* object_; |
| 42 HLoadNamedField* last_load_; | 46 HLoadNamedField* last_load_; |
| 43 HValue* last_value_; | 47 HValue* last_value_; |
| 44 HFieldApproximation* next_; | 48 HFieldApproximation* next_; |
| 49 |
| 50 // Recursively copy the entire linked list of field approximations. |
| 51 HFieldApproximation* Copy(Zone* zone) { |
| 52 if (this == NULL) return NULL; |
| 53 HFieldApproximation* copy = new(zone) HFieldApproximation(); |
| 54 copy->object_ = this->object_; |
| 55 copy->last_load_ = this->last_load_; |
| 56 copy->last_value_ = this->last_value_; |
| 57 copy->next_ = this->next_->Copy(zone); |
| 58 return copy; |
| 59 } |
| 45 }; | 60 }; |
| 46 | 61 |
| 47 | 62 |
| 48 // The main datastructure used during load/store elimination. Each in-object | 63 // The main datastructure used during load/store elimination. Each in-object |
| 49 // field is tracked separately. For each field, store a list of known field | 64 // field is tracked separately. For each field, store a list of known field |
| 50 // values for known objects. | 65 // values for known objects. |
| 51 class HLoadEliminationTable BASE_EMBEDDED { | 66 class HLoadEliminationTable : public ZoneObject { |
| 52 public: | 67 public: |
| 53 HLoadEliminationTable(Zone* zone, HAliasAnalyzer* aliasing) | 68 HLoadEliminationTable(Zone* zone, HAliasAnalyzer* aliasing) |
| 54 : zone_(zone), fields_(kMaxTrackedFields, zone), aliasing_(aliasing) { } | 69 : zone_(zone), fields_(kMaxTrackedFields, zone), aliasing_(aliasing) { } |
| 55 | 70 |
| 71 // The main processing of instructions. |
| 72 HLoadEliminationTable* Process(HInstruction* instr, Zone* zone) { |
| 73 switch (instr->opcode()) { |
| 74 case HValue::kLoadNamedField: { |
| 75 HLoadNamedField* l = HLoadNamedField::cast(instr); |
| 76 TRACE((" process L%d field %d (o%d)\n", |
| 77 instr->id(), |
| 78 FieldOf(l->access()), |
| 79 l->object()->ActualValue()->id())); |
| 80 HValue* result = load(l); |
| 81 if (result != instr) { |
| 82 // The load can be replaced with a previous load or a value. |
| 83 TRACE((" replace L%d -> v%d\n", instr->id(), result->id())); |
| 84 instr->DeleteAndReplaceWith(result); |
| 85 } |
| 86 break; |
| 87 } |
| 88 case HValue::kStoreNamedField: { |
| 89 HStoreNamedField* s = HStoreNamedField::cast(instr); |
| 90 TRACE((" process S%d field %d (o%d) = v%d\n", |
| 91 instr->id(), |
| 92 FieldOf(s->access()), |
| 93 s->object()->ActualValue()->id(), |
| 94 s->value()->id())); |
| 95 HValue* result = store(s); |
| 96 if (result == NULL) { |
| 97 // The store is redundant. Remove it. |
| 98 TRACE((" remove S%d\n", instr->id())); |
| 99 instr->DeleteAndReplaceWith(NULL); |
| 100 } |
| 101 break; |
| 102 } |
| 103 default: { |
| 104 if (instr->CheckGVNFlag(kChangesInobjectFields)) { |
| 105 TRACE((" kill-all i%d\n", instr->id())); |
| 106 Kill(); |
| 107 break; |
| 108 } |
| 109 if (instr->CheckGVNFlag(kChangesMaps)) { |
| 110 TRACE((" kill-maps i%d\n", instr->id())); |
| 111 KillOffset(JSObject::kMapOffset); |
| 112 } |
| 113 if (instr->CheckGVNFlag(kChangesElementsKind)) { |
| 114 TRACE((" kill-elements-kind i%d\n", instr->id())); |
| 115 KillOffset(JSObject::kMapOffset); |
| 116 KillOffset(JSObject::kElementsOffset); |
| 117 } |
| 118 if (instr->CheckGVNFlag(kChangesElementsPointer)) { |
| 119 TRACE((" kill-elements i%d\n", instr->id())); |
| 120 KillOffset(JSObject::kElementsOffset); |
| 121 } |
| 122 if (instr->CheckGVNFlag(kChangesOsrEntries)) { |
| 123 TRACE((" kill-osr i%d\n", instr->id())); |
| 124 Kill(); |
| 125 } |
| 126 } |
| 127 // Improvements possible: |
| 128 // - learn from HCheckMaps for field 0 |
| 129 // - remove unobservable stores (write-after-write) |
| 130 // - track cells |
| 131 // - track globals |
| 132 // - track roots |
| 133 } |
| 134 return this; |
| 135 } |
| 136 |
| 137 // Support for global analysis with HFlowEngine: Copy state to sucessor block. |
| 138 HLoadEliminationTable* Copy(HBasicBlock* succ, Zone* zone) { |
| 139 HLoadEliminationTable* copy = |
| 140 new(zone) HLoadEliminationTable(zone, aliasing_); |
| 141 copy->EnsureFields(fields_.length()); |
| 142 for (int i = 0; i < fields_.length(); i++) { |
| 143 copy->fields_[i] = fields_[i]->Copy(zone); |
| 144 } |
| 145 if (FLAG_trace_load_elimination) { |
| 146 TRACE((" copy-to B%d\n", succ->block_id())); |
| 147 copy->Print(); |
| 148 } |
| 149 return copy; |
| 150 } |
| 151 |
| 152 // Support for global analysis with HFlowEngine: Merge this state with |
| 153 // the other incoming state. |
| 154 HLoadEliminationTable* Merge(HBasicBlock* succ, |
| 155 HLoadEliminationTable* that, Zone* zone) { |
| 156 if (that->fields_.length() < fields_.length()) { |
| 157 // Drop fields not in the other table. |
| 158 fields_.Rewind(that->fields_.length()); |
| 159 } |
| 160 for (int i = 0; i < fields_.length(); i++) { |
| 161 // Merge the field approximations for like fields. |
| 162 HFieldApproximation* approx = fields_[i]; |
| 163 HFieldApproximation* prev = NULL; |
| 164 while (approx != NULL) { |
| 165 // TODO(titzer): Merging is O(N * M); sort? |
| 166 HFieldApproximation* other = that->Find(approx->object_, i); |
| 167 if (other == NULL || !Equal(approx->last_value_, other->last_value_)) { |
| 168 // Kill an entry that doesn't agree with the other value. |
| 169 if (prev != NULL) { |
| 170 prev->next_ = approx->next_; |
| 171 } else { |
| 172 fields_[i] = approx->next_; |
| 173 } |
| 174 approx = approx->next_; |
| 175 continue; |
| 176 } |
| 177 prev = approx; |
| 178 approx = approx->next_; |
| 179 } |
| 180 } |
| 181 return this; |
| 182 } |
| 183 |
| 184 friend class HLoadEliminationEffects; // Calls Kill() and others. |
| 185 friend class HLoadEliminationPhase; |
| 186 |
| 187 private: |
| 56 // Process a load instruction, updating internal table state. If a previous | 188 // Process a load instruction, updating internal table state. If a previous |
| 57 // load or store for this object and field exists, return the new value with | 189 // load or store for this object and field exists, return the new value with |
| 58 // which the load should be replaced. Otherwise, return {instr}. | 190 // which the load should be replaced. Otherwise, return {instr}. |
| 59 HValue* load(HLoadNamedField* instr) { | 191 HValue* load(HLoadNamedField* instr) { |
| 60 int field = FieldOf(instr->access()); | 192 int field = FieldOf(instr->access()); |
| 61 if (field < 0) return instr; | 193 if (field < 0) return instr; |
| 62 | 194 |
| 63 HValue* object = instr->object()->ActualValue(); | 195 HValue* object = instr->object()->ActualValue(); |
| 64 HFieldApproximation* approx = FindOrCreate(object, field); | 196 HFieldApproximation* approx = FindOrCreate(object, field); |
| 65 | 197 |
| 66 if (approx->last_value_ == NULL) { | 198 if (approx->last_value_ == NULL) { |
| 67 // Load is not redundant. Fill out a new entry. | 199 // Load is not redundant. Fill out a new entry. |
| 68 approx->last_load_ = instr; | 200 approx->last_load_ = instr; |
| 69 approx->last_value_ = instr; | 201 approx->last_value_ = instr; |
| 70 return instr; | 202 return instr; |
| 71 } else { | 203 } else { |
| 72 // Eliminate the load. Reuse previously stored value or load instruction. | 204 // Eliminate the load. Reuse previously stored value or load instruction. |
| 73 return approx->last_value_; | 205 return approx->last_value_; |
| 74 } | 206 } |
| 75 } | 207 } |
| 76 | 208 |
| 77 // Process a store instruction, updating internal table state. If a previous | 209 // Process a store instruction, updating internal table state. If a previous |
| 78 // store to the same object and field makes this store redundant (e.g. because | 210 // store to the same object and field makes this store redundant (e.g. because |
| 79 // the stored values are the same), return NULL indicating that this store | 211 // the stored values are the same), return NULL indicating that this store |
| 80 // instruction is redundant. Otherwise, return {instr}. | 212 // instruction is redundant. Otherwise, return {instr}. |
| 81 HValue* store(HStoreNamedField* instr) { | 213 HValue* store(HStoreNamedField* instr) { |
| 82 int field = FieldOf(instr->access()); | 214 int field = FieldOf(instr->access()); |
| 83 if (field < 0) return instr; | 215 if (field < 0) return KillIfMisaligned(instr); |
| 84 | 216 |
| 85 HValue* object = instr->object()->ActualValue(); | 217 HValue* object = instr->object()->ActualValue(); |
| 86 HValue* value = instr->value(); | 218 HValue* value = instr->value(); |
| 87 | 219 |
| 88 // Kill non-equivalent may-alias entries. | 220 // Kill non-equivalent may-alias entries. |
| 89 KillFieldInternal(object, field, value); | 221 KillFieldInternal(object, field, value); |
| 90 if (instr->has_transition()) { | 222 if (instr->has_transition()) { |
| 91 // A transition store alters the map of the object. | 223 // A transition store alters the map of the object. |
| 92 // TODO(titzer): remember the new map (a constant) for the object. | 224 // TODO(titzer): remember the new map (a constant) for the object. |
| 93 KillFieldInternal(object, FieldOf(JSObject::kMapOffset), NULL); | 225 KillFieldInternal(object, FieldOf(JSObject::kMapOffset), NULL); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 111 } | 243 } |
| 112 | 244 |
| 113 // Kill all entries matching the given offset. | 245 // Kill all entries matching the given offset. |
| 114 void KillOffset(int offset) { | 246 void KillOffset(int offset) { |
| 115 int field = FieldOf(offset); | 247 int field = FieldOf(offset); |
| 116 if (field >= 0 && field < fields_.length()) { | 248 if (field >= 0 && field < fields_.length()) { |
| 117 fields_[field] = NULL; | 249 fields_[field] = NULL; |
| 118 } | 250 } |
| 119 } | 251 } |
| 120 | 252 |
| 121 // Compute the field index for the given object access; -1 if not tracked. | 253 // Kill all entries aliasing the given store. |
| 122 int FieldOf(HObjectAccess access) { | 254 void KillStore(HStoreNamedField* s) { |
| 123 // Only track kMaxTrackedFields in-object fields. | 255 int field = FieldOf(s->access()); |
| 124 if (!access.IsInobject()) return -1; | 256 if (field >= 0) { |
| 125 return FieldOf(access.offset()); | 257 KillFieldInternal(s->object()->ActualValue(), field, s->value()); |
| 126 } | 258 } else { |
| 127 | 259 KillIfMisaligned(s); |
| 128 // Print this table to stdout. | |
| 129 void Print() { | |
| 130 for (int i = 0; i < fields_.length(); i++) { | |
| 131 PrintF(" field %d: ", i); | |
| 132 for (HFieldApproximation* a = fields_[i]; a != NULL; a = a->next_) { | |
| 133 PrintF("[o%d =", a->object_->id()); | |
| 134 if (a->last_load_ != NULL) PrintF(" L%d", a->last_load_->id()); | |
| 135 if (a->last_value_ != NULL) PrintF(" v%d", a->last_value_->id()); | |
| 136 PrintF("] "); | |
| 137 } | |
| 138 PrintF("\n"); | |
| 139 } | 260 } |
| 140 } | 261 } |
| 141 | 262 |
| 142 private: | 263 // Kill multiple entries in the case of a misaligned store. |
| 264 HValue* KillIfMisaligned(HStoreNamedField* instr) { |
| 265 HObjectAccess access = instr->access(); |
| 266 if (access.IsInobject()) { |
| 267 int offset = access.offset(); |
| 268 if ((offset % kPointerSize) != 0) { |
| 269 // Kill the field containing the first word of the access. |
| 270 HValue* object = instr->object()->ActualValue(); |
| 271 int field = offset / kPointerSize; |
| 272 KillFieldInternal(object, field, NULL); |
| 273 |
| 274 // Kill the next field in case of overlap. |
| 275 int size = kPointerSize; |
| 276 if (access.representation().IsByte()) size = 1; |
| 277 else if (access.representation().IsInteger32()) size = 4; |
| 278 int next_field = (offset + size - 1) / kPointerSize; |
| 279 if (next_field != field) KillFieldInternal(object, next_field, NULL); |
| 280 } |
| 281 } |
| 282 return instr; |
| 283 } |
| 284 |
| 285 // Find an entry for the given object and field pair. |
| 286 HFieldApproximation* Find(HValue* object, int field) { |
| 287 // Search for a field approximation for this object. |
| 288 HFieldApproximation* approx = fields_[field]; |
| 289 while (approx != NULL) { |
| 290 if (aliasing_->MustAlias(object, approx->object_)) return approx; |
| 291 approx = approx->next_; |
| 292 } |
| 293 return NULL; |
| 294 } |
| 295 |
| 143 // Find or create an entry for the given object and field pair. | 296 // Find or create an entry for the given object and field pair. |
| 144 HFieldApproximation* FindOrCreate(HValue* object, int field) { | 297 HFieldApproximation* FindOrCreate(HValue* object, int field) { |
| 145 EnsureFields(field + 1); | 298 EnsureFields(field + 1); |
| 146 | 299 |
| 147 // Search for a field approximation for this object. | 300 // Search for a field approximation for this object. |
| 148 HFieldApproximation* approx = fields_[field]; | 301 HFieldApproximation* approx = fields_[field]; |
| 149 int count = 0; | 302 int count = 0; |
| 150 while (approx != NULL) { | 303 while (approx != NULL) { |
| 151 if (aliasing_->MustAlias(object, approx->object_)) return approx; | 304 if (aliasing_->MustAlias(object, approx->object_)) return approx; |
| 152 count++; | 305 count++; |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 211 | 364 |
| 212 HFieldApproximation* prev = NULL; | 365 HFieldApproximation* prev = NULL; |
| 213 while (approx->next_ != NULL) { | 366 while (approx->next_ != NULL) { |
| 214 prev = approx; | 367 prev = approx; |
| 215 approx = approx->next_; | 368 approx = approx->next_; |
| 216 } | 369 } |
| 217 if (prev != NULL) prev->next_ = NULL; | 370 if (prev != NULL) prev->next_ = NULL; |
| 218 return approx; | 371 return approx; |
| 219 } | 372 } |
| 220 | 373 |
| 374 // Compute the field index for the given object access; -1 if not tracked. |
| 375 int FieldOf(HObjectAccess access) { |
| 376 return access.IsInobject() ? FieldOf(access.offset()) : -1; |
| 377 } |
| 378 |
| 379 // Compute the field index for the given in-object offset; -1 if not tracked. |
| 380 int FieldOf(int offset) { |
| 381 if (offset >= kMaxTrackedFields * kPointerSize) return -1; |
| 382 // TODO(titzer): track misaligned loads in a separate list? |
| 383 if ((offset % kPointerSize) != 0) return -1; // Ignore misaligned accesses. |
| 384 return offset / kPointerSize; |
| 385 } |
| 386 |
| 221 // Ensure internal storage for the given number of fields. | 387 // Ensure internal storage for the given number of fields. |
| 222 void EnsureFields(int num_fields) { | 388 void EnsureFields(int num_fields) { |
| 223 while (fields_.length() < num_fields) fields_.Add(NULL, zone_); | 389 if (fields_.length() < num_fields) { |
| 390 fields_.AddBlock(NULL, num_fields - fields_.length(), zone_); |
| 391 } |
| 224 } | 392 } |
| 225 | 393 |
| 226 // Compute the field index for the given in-object offset. | 394 // Print this table to stdout. |
| 227 int FieldOf(int offset) { | 395 void Print() { |
| 228 if (offset >= kMaxTrackedFields * kPointerSize) return -1; | 396 for (int i = 0; i < fields_.length(); i++) { |
| 229 ASSERT((offset % kPointerSize) == 0); // Assume aligned accesses. | 397 PrintF(" field %d: ", i); |
| 230 return offset / kPointerSize; | 398 for (HFieldApproximation* a = fields_[i]; a != NULL; a = a->next_) { |
| 399 PrintF("[o%d =", a->object_->id()); |
| 400 if (a->last_load_ != NULL) PrintF(" L%d", a->last_load_->id()); |
| 401 if (a->last_value_ != NULL) PrintF(" v%d", a->last_value_->id()); |
| 402 PrintF("] "); |
| 403 } |
| 404 PrintF("\n"); |
| 405 } |
| 231 } | 406 } |
| 232 | 407 |
| 233 Zone* zone_; | 408 Zone* zone_; |
| 234 ZoneList<HFieldApproximation*> fields_; | 409 ZoneList<HFieldApproximation*> fields_; |
| 235 HAliasAnalyzer* aliasing_; | 410 HAliasAnalyzer* aliasing_; |
| 236 }; | 411 }; |
| 237 | 412 |
| 238 | 413 |
| 239 void HLoadEliminationPhase::Run() { | 414 // Support for HFlowEngine: collect store effects within loops. |
| 240 for (int i = 0; i < graph()->blocks()->length(); i++) { | 415 class HLoadEliminationEffects : public ZoneObject { |
| 241 HBasicBlock* block = graph()->blocks()->at(i); | 416 public: |
| 242 EliminateLoads(block); | 417 explicit HLoadEliminationEffects(Zone* zone) |
| 418 : zone_(zone), |
| 419 maps_stored_(false), |
| 420 fields_stored_(false), |
| 421 elements_stored_(false), |
| 422 stores_(5, zone) { } |
| 423 |
| 424 inline bool Disabled() { |
| 425 return false; // Effects are _not_ disabled. |
| 243 } | 426 } |
| 244 } | 427 |
| 428 // Process a possibly side-effecting instruction. |
| 429 void Process(HInstruction* instr, Zone* zone) { |
| 430 switch (instr->opcode()) { |
| 431 case HValue::kStoreNamedField: { |
| 432 stores_.Add(HStoreNamedField::cast(instr), zone_); |
| 433 break; |
| 434 } |
| 435 case HValue::kOsrEntry: { |
| 436 // Kill everything. Loads must not be hoisted past the OSR entry. |
| 437 maps_stored_ = true; |
| 438 fields_stored_ = true; |
| 439 elements_stored_ = true; |
| 440 } |
| 441 default: { |
| 442 fields_stored_ |= instr->CheckGVNFlag(kChangesInobjectFields); |
| 443 maps_stored_ |= instr->CheckGVNFlag(kChangesMaps); |
| 444 maps_stored_ |= instr->CheckGVNFlag(kChangesElementsKind); |
| 445 elements_stored_ |= instr->CheckGVNFlag(kChangesElementsKind); |
| 446 elements_stored_ |= instr->CheckGVNFlag(kChangesElementsPointer); |
| 447 } |
| 448 } |
| 449 } |
| 450 |
| 451 // Apply these effects to the given load elimination table. |
| 452 void Apply(HLoadEliminationTable* table) { |
| 453 if (fields_stored_) { |
| 454 table->Kill(); |
| 455 return; |
| 456 } |
| 457 if (maps_stored_) { |
| 458 table->KillOffset(JSObject::kMapOffset); |
| 459 } |
| 460 if (elements_stored_) { |
| 461 table->KillOffset(JSObject::kElementsOffset); |
| 462 } |
| 463 |
| 464 // Kill non-agreeing fields for each store contained in these effects. |
| 465 for (int i = 0; i < stores_.length(); i++) { |
| 466 table->KillStore(stores_[i]); |
| 467 } |
| 468 } |
| 469 |
| 470 // Union these effects with the other effects. |
| 471 void Union(HLoadEliminationEffects* that, Zone* zone) { |
| 472 maps_stored_ |= that->maps_stored_; |
| 473 fields_stored_ |= that->fields_stored_; |
| 474 elements_stored_ |= that->elements_stored_; |
| 475 for (int i = 0; i < that->stores_.length(); i++) { |
| 476 stores_.Add(that->stores_[i], zone); |
| 477 } |
| 478 } |
| 479 |
| 480 private: |
| 481 Zone* zone_; |
| 482 bool maps_stored_ : 1; |
| 483 bool fields_stored_ : 1; |
| 484 bool elements_stored_ : 1; |
| 485 ZoneList<HStoreNamedField*> stores_; |
| 486 }; |
| 245 | 487 |
| 246 | 488 |
| 247 // For code de-uglification. | 489 // The main routine of the analysis phase. Use the HFlowEngine for either a |
| 248 #define TRACE(x) if (FLAG_trace_load_elimination) PrintF x | 490 // local or a global analysis. |
| 491 void HLoadEliminationPhase::Run() { |
| 492 HFlowEngine<HLoadEliminationTable, HLoadEliminationEffects> |
| 493 engine(graph(), zone()); |
| 494 HAliasAnalyzer aliasing; |
| 495 HLoadEliminationTable* table = |
| 496 new(zone()) HLoadEliminationTable(zone(), &aliasing); |
| 249 | 497 |
| 250 | 498 if (GLOBAL) { |
| 251 // Eliminate loads and stores local to a block. | 499 // Perform a global analysis. |
| 252 void HLoadEliminationPhase::EliminateLoads(HBasicBlock* block) { | 500 engine.AnalyzeDominatedBlocks(graph()->blocks()->at(0), table); |
| 253 HAliasAnalyzer aliasing; | 501 } else { |
| 254 HLoadEliminationTable table(zone(), &aliasing); | 502 // Perform only local analysis. |
| 255 | 503 for (int i = 0; i < graph()->blocks()->length(); i++) { |
| 256 TRACE(("-- load-elim B%d -------------------------------------------------\n", | 504 table->Kill(); |
| 257 block->block_id())); | 505 engine.AnalyzeOneBlock(graph()->blocks()->at(i), table); |
| 258 | |
| 259 for (HInstructionIterator it(block); !it.Done(); it.Advance()) { | |
| 260 bool changed = false; | |
| 261 HInstruction* instr = it.Current(); | |
| 262 | |
| 263 switch (instr->opcode()) { | |
| 264 case HValue::kLoadNamedField: { | |
| 265 HLoadNamedField* load = HLoadNamedField::cast(instr); | |
| 266 TRACE((" process L%d field %d (o%d)\n", | |
| 267 instr->id(), | |
| 268 table.FieldOf(load->access()), | |
| 269 load->object()->ActualValue()->id())); | |
| 270 HValue* result = table.load(load); | |
| 271 if (result != instr) { | |
| 272 // The load can be replaced with a previous load or a value. | |
| 273 TRACE((" replace L%d -> v%d\n", instr->id(), result->id())); | |
| 274 instr->DeleteAndReplaceWith(result); | |
| 275 } | |
| 276 changed = true; | |
| 277 break; | |
| 278 } | |
| 279 case HValue::kStoreNamedField: { | |
| 280 HStoreNamedField* store = HStoreNamedField::cast(instr); | |
| 281 TRACE((" process S%d field %d (o%d) = v%d\n", | |
| 282 instr->id(), | |
| 283 table.FieldOf(store->access()), | |
| 284 store->object()->ActualValue()->id(), | |
| 285 store->value()->id())); | |
| 286 HValue* result = table.store(store); | |
| 287 if (result == NULL) { | |
| 288 // The store is redundant. Remove it. | |
| 289 TRACE((" remove S%d\n", instr->id())); | |
| 290 instr->DeleteAndReplaceWith(NULL); | |
| 291 } | |
| 292 changed = true; | |
| 293 break; | |
| 294 } | |
| 295 default: { | |
| 296 if (instr->CheckGVNFlag(kChangesInobjectFields)) { | |
| 297 TRACE((" kill-all i%d\n", instr->id())); | |
| 298 table.Kill(); | |
| 299 continue; | |
| 300 } | |
| 301 if (instr->CheckGVNFlag(kChangesMaps)) { | |
| 302 TRACE((" kill-maps i%d\n", instr->id())); | |
| 303 table.KillOffset(JSObject::kMapOffset); | |
| 304 } | |
| 305 if (instr->CheckGVNFlag(kChangesElementsKind)) { | |
| 306 TRACE((" kill-elements-kind i%d\n", instr->id())); | |
| 307 table.KillOffset(JSObject::kMapOffset); | |
| 308 table.KillOffset(JSObject::kElementsOffset); | |
| 309 } | |
| 310 if (instr->CheckGVNFlag(kChangesElementsPointer)) { | |
| 311 TRACE((" kill-elements i%d\n", instr->id())); | |
| 312 table.KillOffset(JSObject::kElementsOffset); | |
| 313 } | |
| 314 } | |
| 315 // Improvements possible: | |
| 316 // - learn from HCheckMaps for field 0 | |
| 317 // - remove unobservable stores (write-after-write) | |
| 318 } | |
| 319 | |
| 320 if (changed && FLAG_trace_load_elimination) { | |
| 321 table.Print(); | |
| 322 } | 506 } |
| 323 } | 507 } |
| 324 } | 508 } |
| 325 | 509 |
| 326 | |
| 327 } } // namespace v8::internal | 510 } } // namespace v8::internal |
| OLD | NEW |