OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 10 matching lines...) Expand all Loading... |
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #include "hydrogen-alias-analysis.h" | 28 #include "hydrogen-alias-analysis.h" |
29 #include "hydrogen-load-elimination.h" | 29 #include "hydrogen-load-elimination.h" |
30 #include "hydrogen-instructions.h" | 30 #include "hydrogen-instructions.h" |
| 31 #include "hydrogen-flow-engine.h" |
31 | 32 |
32 namespace v8 { | 33 namespace v8 { |
33 namespace internal { | 34 namespace internal { |
34 | 35 |
| 36 #define GLOBAL true |
| 37 #define TRACE(x) if (FLAG_trace_load_elimination) PrintF x |
| 38 |
35 static const int kMaxTrackedFields = 16; | 39 static const int kMaxTrackedFields = 16; |
36 static const int kMaxTrackedObjects = 5; | 40 static const int kMaxTrackedObjects = 5; |
37 | 41 |
38 // An element in the field approximation list. | 42 // An element in the field approximation list. |
39 class HFieldApproximation : public ZoneObject { | 43 class HFieldApproximation : public ZoneObject { |
40 public: // Just a data blob. | 44 public: // Just a data blob. |
41 HValue* object_; | 45 HValue* object_; |
42 HLoadNamedField* last_load_; | 46 HLoadNamedField* last_load_; |
43 HValue* last_value_; | 47 HValue* last_value_; |
44 HFieldApproximation* next_; | 48 HFieldApproximation* next_; |
| 49 |
| 50 // Recursively copy the entire linked list of field approximations. |
| 51 HFieldApproximation* Copy(Zone* zone) { |
| 52 if (this == NULL) return NULL; |
| 53 HFieldApproximation* copy = new(zone) HFieldApproximation(); |
| 54 copy->object_ = this->object_; |
| 55 copy->last_load_ = this->last_load_; |
| 56 copy->last_value_ = this->last_value_; |
| 57 copy->next_ = this->next_->Copy(zone); |
| 58 return copy; |
| 59 } |
45 }; | 60 }; |
46 | 61 |
47 | 62 |
48 // The main datastructure used during load/store elimination. Each in-object | 63 // The main datastructure used during load/store elimination. Each in-object |
49 // field is tracked separately. For each field, store a list of known field | 64 // field is tracked separately. For each field, store a list of known field |
50 // values for known objects. | 65 // values for known objects. |
51 class HLoadEliminationTable BASE_EMBEDDED { | 66 class HLoadEliminationTable : public ZoneObject { |
52 public: | 67 public: |
53 HLoadEliminationTable(Zone* zone, HAliasAnalyzer* aliasing) | 68 HLoadEliminationTable(Zone* zone, HAliasAnalyzer* aliasing) |
54 : zone_(zone), fields_(kMaxTrackedFields, zone), aliasing_(aliasing) { } | 69 : zone_(zone), fields_(kMaxTrackedFields, zone), aliasing_(aliasing) { } |
55 | 70 |
| 71 // The main processing of instructions. |
| 72 HLoadEliminationTable* Process(HInstruction* instr, Zone* zone) { |
| 73 switch (instr->opcode()) { |
| 74 case HValue::kLoadNamedField: { |
| 75 HLoadNamedField* l = HLoadNamedField::cast(instr); |
| 76 TRACE((" process L%d field %d (o%d)\n", |
| 77 instr->id(), |
| 78 FieldOf(l->access()), |
| 79 l->object()->ActualValue()->id())); |
| 80 HValue* result = load(l); |
| 81 if (result != instr) { |
| 82 // The load can be replaced with a previous load or a value. |
| 83 TRACE((" replace L%d -> v%d\n", instr->id(), result->id())); |
| 84 instr->DeleteAndReplaceWith(result); |
| 85 } |
| 86 break; |
| 87 } |
| 88 case HValue::kStoreNamedField: { |
| 89 HStoreNamedField* s = HStoreNamedField::cast(instr); |
| 90 TRACE((" process S%d field %d (o%d) = v%d\n", |
| 91 instr->id(), |
| 92 FieldOf(s->access()), |
| 93 s->object()->ActualValue()->id(), |
| 94 s->value()->id())); |
| 95 HValue* result = store(s); |
| 96 if (result == NULL) { |
| 97 // The store is redundant. Remove it. |
| 98 TRACE((" remove S%d\n", instr->id())); |
| 99 instr->DeleteAndReplaceWith(NULL); |
| 100 } |
| 101 break; |
| 102 } |
| 103 default: { |
| 104 if (instr->CheckGVNFlag(kChangesInobjectFields)) { |
| 105 TRACE((" kill-all i%d\n", instr->id())); |
| 106 Kill(); |
| 107 break; |
| 108 } |
| 109 if (instr->CheckGVNFlag(kChangesMaps)) { |
| 110 TRACE((" kill-maps i%d\n", instr->id())); |
| 111 KillOffset(JSObject::kMapOffset); |
| 112 } |
| 113 if (instr->CheckGVNFlag(kChangesElementsKind)) { |
| 114 TRACE((" kill-elements-kind i%d\n", instr->id())); |
| 115 KillOffset(JSObject::kMapOffset); |
| 116 KillOffset(JSObject::kElementsOffset); |
| 117 } |
| 118 if (instr->CheckGVNFlag(kChangesElementsPointer)) { |
| 119 TRACE((" kill-elements i%d\n", instr->id())); |
| 120 KillOffset(JSObject::kElementsOffset); |
| 121 } |
| 122 if (instr->CheckGVNFlag(kChangesOsrEntries)) { |
| 123 TRACE((" kill-osr i%d\n", instr->id())); |
| 124 Kill(); |
| 125 } |
| 126 } |
| 127 // Improvements possible: |
| 128 // - learn from HCheckMaps for field 0 |
| 129 // - remove unobservable stores (write-after-write) |
| 130 // - track cells |
| 131 // - track globals |
| 132 // - track roots |
| 133 } |
| 134 return this; |
| 135 } |
| 136 |
| 137 // Support for global analysis with HFlowEngine: Copy state to sucessor block. |
| 138 HLoadEliminationTable* Copy(HBasicBlock* succ, Zone* zone) { |
| 139 HLoadEliminationTable* copy = |
| 140 new(zone) HLoadEliminationTable(zone, aliasing_); |
| 141 copy->EnsureFields(fields_.length()); |
| 142 for (int i = 0; i < fields_.length(); i++) { |
| 143 copy->fields_[i] = fields_[i]->Copy(zone); |
| 144 } |
| 145 if (FLAG_trace_load_elimination) { |
| 146 TRACE((" copy-to B%d\n", succ->block_id())); |
| 147 copy->Print(); |
| 148 } |
| 149 return copy; |
| 150 } |
| 151 |
| 152 // Support for global analysis with HFlowEngine: Merge this state with |
| 153 // the other incoming state. |
| 154 HLoadEliminationTable* Merge(HBasicBlock* succ, |
| 155 HLoadEliminationTable* that, Zone* zone) { |
| 156 if (that->fields_.length() < fields_.length()) { |
| 157 // Drop fields not in the other table. |
| 158 fields_.Rewind(that->fields_.length()); |
| 159 } |
| 160 for (int i = 0; i < fields_.length(); i++) { |
| 161 // Merge the field approximations for like fields. |
| 162 HFieldApproximation* approx = fields_[i]; |
| 163 HFieldApproximation* prev = NULL; |
| 164 while (approx != NULL) { |
| 165 // TODO(titzer): Merging is O(N * M); sort? |
| 166 HFieldApproximation* other = that->Find(approx->object_, i); |
| 167 if (other == NULL || !Equal(approx->last_value_, other->last_value_)) { |
| 168 // Kill an entry that doesn't agree with the other value. |
| 169 if (prev != NULL) { |
| 170 prev->next_ = approx->next_; |
| 171 } else { |
| 172 fields_[i] = approx->next_; |
| 173 } |
| 174 approx = approx->next_; |
| 175 continue; |
| 176 } |
| 177 prev = approx; |
| 178 approx = approx->next_; |
| 179 } |
| 180 } |
| 181 return this; |
| 182 } |
| 183 |
| 184 friend class HLoadEliminationEffects; // Calls Kill() and others. |
| 185 friend class HLoadEliminationPhase; |
| 186 |
| 187 private: |
56 // Process a load instruction, updating internal table state. If a previous | 188 // Process a load instruction, updating internal table state. If a previous |
57 // load or store for this object and field exists, return the new value with | 189 // load or store for this object and field exists, return the new value with |
58 // which the load should be replaced. Otherwise, return {instr}. | 190 // which the load should be replaced. Otherwise, return {instr}. |
59 HValue* load(HLoadNamedField* instr) { | 191 HValue* load(HLoadNamedField* instr) { |
60 int field = FieldOf(instr->access()); | 192 int field = FieldOf(instr->access()); |
61 if (field < 0) return instr; | 193 if (field < 0) return instr; |
62 | 194 |
63 HValue* object = instr->object()->ActualValue(); | 195 HValue* object = instr->object()->ActualValue(); |
64 HFieldApproximation* approx = FindOrCreate(object, field); | 196 HFieldApproximation* approx = FindOrCreate(object, field); |
65 | 197 |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
111 } | 243 } |
112 | 244 |
113 // Kill all entries matching the given offset. | 245 // Kill all entries matching the given offset. |
114 void KillOffset(int offset) { | 246 void KillOffset(int offset) { |
115 int field = FieldOf(offset); | 247 int field = FieldOf(offset); |
116 if (field >= 0 && field < fields_.length()) { | 248 if (field >= 0 && field < fields_.length()) { |
117 fields_[field] = NULL; | 249 fields_[field] = NULL; |
118 } | 250 } |
119 } | 251 } |
120 | 252 |
121 // Compute the field index for the given object access; -1 if not tracked. | 253 // Find an entry for the given object and field pair. |
122 int FieldOf(HObjectAccess access) { | 254 HFieldApproximation* Find(HValue* object, int field) { |
123 // Only track kMaxTrackedFields in-object fields. | 255 // Search for a field approximation for this object. |
124 if (!access.IsInobject()) return -1; | 256 HFieldApproximation* approx = fields_[field]; |
125 return FieldOf(access.offset()); | 257 while (approx != NULL) { |
| 258 if (aliasing_->MustAlias(object, approx->object_)) return approx; |
| 259 approx = approx->next_; |
| 260 } |
| 261 return NULL; |
126 } | 262 } |
127 | 263 |
128 // Print this table to stdout. | |
129 void Print() { | |
130 for (int i = 0; i < fields_.length(); i++) { | |
131 PrintF(" field %d: ", i); | |
132 for (HFieldApproximation* a = fields_[i]; a != NULL; a = a->next_) { | |
133 PrintF("[o%d =", a->object_->id()); | |
134 if (a->last_load_ != NULL) PrintF(" L%d", a->last_load_->id()); | |
135 if (a->last_value_ != NULL) PrintF(" v%d", a->last_value_->id()); | |
136 PrintF("] "); | |
137 } | |
138 PrintF("\n"); | |
139 } | |
140 } | |
141 | |
142 private: | |
143 // Find or create an entry for the given object and field pair. | 264 // Find or create an entry for the given object and field pair. |
144 HFieldApproximation* FindOrCreate(HValue* object, int field) { | 265 HFieldApproximation* FindOrCreate(HValue* object, int field) { |
145 EnsureFields(field + 1); | 266 EnsureFields(field + 1); |
146 | 267 |
147 // Search for a field approximation for this object. | 268 // Search for a field approximation for this object. |
148 HFieldApproximation* approx = fields_[field]; | 269 HFieldApproximation* approx = fields_[field]; |
149 int count = 0; | 270 int count = 0; |
150 while (approx != NULL) { | 271 while (approx != NULL) { |
151 if (aliasing_->MustAlias(object, approx->object_)) return approx; | 272 if (aliasing_->MustAlias(object, approx->object_)) return approx; |
152 count++; | 273 count++; |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
211 | 332 |
212 HFieldApproximation* prev = NULL; | 333 HFieldApproximation* prev = NULL; |
213 while (approx->next_ != NULL) { | 334 while (approx->next_ != NULL) { |
214 prev = approx; | 335 prev = approx; |
215 approx = approx->next_; | 336 approx = approx->next_; |
216 } | 337 } |
217 if (prev != NULL) prev->next_ = NULL; | 338 if (prev != NULL) prev->next_ = NULL; |
218 return approx; | 339 return approx; |
219 } | 340 } |
220 | 341 |
| 342 // Compute the field index for the given object access; -1 if not tracked. |
| 343 int FieldOf(HObjectAccess access) { |
| 344 return access.IsInobject() ? FieldOf(access.offset()) : -1; |
| 345 } |
| 346 |
| 347 // Compute the field index for the given in-object offset; -1 if not tracked. |
| 348 int FieldOf(int offset) { |
| 349 if (offset >= kMaxTrackedFields * kPointerSize) return -1; |
| 350 ASSERT((offset % kPointerSize) == 0); // Assume aligned accesses. |
| 351 return offset / kPointerSize; |
| 352 } |
| 353 |
221 // Ensure internal storage for the given number of fields. | 354 // Ensure internal storage for the given number of fields. |
222 void EnsureFields(int num_fields) { | 355 void EnsureFields(int num_fields) { |
223 while (fields_.length() < num_fields) fields_.Add(NULL, zone_); | 356 if (fields_.length() < num_fields) { |
| 357 fields_.AddBlock(NULL, num_fields - fields_.length(), zone_); |
| 358 } |
224 } | 359 } |
225 | 360 |
226 // Compute the field index for the given in-object offset. | 361 // Print this table to stdout. |
227 int FieldOf(int offset) { | 362 void Print() { |
228 if (offset >= kMaxTrackedFields * kPointerSize) return -1; | 363 for (int i = 0; i < fields_.length(); i++) { |
229 ASSERT((offset % kPointerSize) == 0); // Assume aligned accesses. | 364 PrintF(" field %d: ", i); |
230 return offset / kPointerSize; | 365 for (HFieldApproximation* a = fields_[i]; a != NULL; a = a->next_) { |
| 366 PrintF("[o%d =", a->object_->id()); |
| 367 if (a->last_load_ != NULL) PrintF(" L%d", a->last_load_->id()); |
| 368 if (a->last_value_ != NULL) PrintF(" v%d", a->last_value_->id()); |
| 369 PrintF("] "); |
| 370 } |
| 371 PrintF("\n"); |
| 372 } |
231 } | 373 } |
232 | 374 |
233 Zone* zone_; | 375 Zone* zone_; |
234 ZoneList<HFieldApproximation*> fields_; | 376 ZoneList<HFieldApproximation*> fields_; |
235 HAliasAnalyzer* aliasing_; | 377 HAliasAnalyzer* aliasing_; |
236 }; | 378 }; |
237 | 379 |
238 | 380 |
239 void HLoadEliminationPhase::Run() { | 381 // Support for HFlowEngine: collect store effects within loops. |
240 for (int i = 0; i < graph()->blocks()->length(); i++) { | 382 class HLoadEliminationEffects : public ZoneObject { |
241 HBasicBlock* block = graph()->blocks()->at(i); | 383 public: |
242 EliminateLoads(block); | 384 explicit HLoadEliminationEffects(Zone* zone) |
| 385 : zone_(zone), |
| 386 maps_stored_(false), |
| 387 fields_stored_(false), |
| 388 elements_stored_(false), |
| 389 stores_(5, zone) { } |
| 390 |
| 391 inline bool Disabled() { |
| 392 return false; // Effects are _not_ disabled. |
243 } | 393 } |
244 } | 394 |
| 395 // Process a possibly side-effecting instruction. |
| 396 void Process(HInstruction* instr, Zone* zone) { |
| 397 switch (instr->opcode()) { |
| 398 case HValue::kStoreNamedField: { |
| 399 stores_.Add(HStoreNamedField::cast(instr), zone_); |
| 400 break; |
| 401 } |
| 402 case HValue::kOsrEntry: { |
| 403 // Kill everything. Loads must not be hoisted past the OSR entry. |
| 404 maps_stored_ = true; |
| 405 fields_stored_ = true; |
| 406 elements_stored_ = true; |
| 407 } |
| 408 default: { |
| 409 fields_stored_ |= instr->CheckGVNFlag(kChangesInobjectFields); |
| 410 maps_stored_ |= instr->CheckGVNFlag(kChangesMaps); |
| 411 maps_stored_ |= instr->CheckGVNFlag(kChangesElementsKind); |
| 412 elements_stored_ |= instr->CheckGVNFlag(kChangesElementsKind); |
| 413 elements_stored_ |= instr->CheckGVNFlag(kChangesElementsPointer); |
| 414 } |
| 415 } |
| 416 } |
| 417 |
| 418 // Apply these effects to the given load elimination table. |
| 419 void Apply(HLoadEliminationTable* table) { |
| 420 if (fields_stored_) { |
| 421 table->Kill(); |
| 422 return; |
| 423 } |
| 424 if (maps_stored_) { |
| 425 table->KillOffset(JSObject::kMapOffset); |
| 426 } |
| 427 if (elements_stored_) { |
| 428 table->KillOffset(JSObject::kElementsOffset); |
| 429 } |
| 430 |
| 431 // Kill non-agreeing fields for each store contained in these effects. |
| 432 for (int i = 0; i < stores_.length(); i++) { |
| 433 HStoreNamedField* s = stores_[i]; |
| 434 int field = table->FieldOf(s->access()); |
| 435 if (field >= 0) { |
| 436 table->KillFieldInternal(s->object()->ActualValue(), field, s->value()); |
| 437 } |
| 438 } |
| 439 } |
| 440 |
| 441 // Union these effects with the other effects. |
| 442 void Union(HLoadEliminationEffects* that, Zone* zone) { |
| 443 maps_stored_ |= that->maps_stored_; |
| 444 fields_stored_ |= that->fields_stored_; |
| 445 elements_stored_ |= that->elements_stored_; |
| 446 for (int i = 0; i < that->stores_.length(); i++) { |
| 447 stores_.Add(that->stores_[i], zone); |
| 448 } |
| 449 } |
| 450 |
| 451 private: |
| 452 Zone* zone_; |
| 453 bool maps_stored_ : 1; |
| 454 bool fields_stored_ : 1; |
| 455 bool elements_stored_ : 1; |
| 456 ZoneList<HStoreNamedField*> stores_; |
| 457 }; |
245 | 458 |
246 | 459 |
247 // For code de-uglification. | 460 // The main routine of the analysis phase. Use the HFlowEngine for either a |
248 #define TRACE(x) if (FLAG_trace_load_elimination) PrintF x | 461 // local or a global analysis. |
| 462 void HLoadEliminationPhase::Run() { |
| 463 HFlowEngine<HLoadEliminationTable, HLoadEliminationEffects> |
| 464 engine(graph(), zone()); |
| 465 HAliasAnalyzer aliasing; |
| 466 HLoadEliminationTable* table = |
| 467 new(zone()) HLoadEliminationTable(zone(), &aliasing); |
249 | 468 |
250 | 469 if (GLOBAL) { |
251 // Eliminate loads and stores local to a block. | 470 // Perform a global analysis. |
252 void HLoadEliminationPhase::EliminateLoads(HBasicBlock* block) { | 471 engine.AnalyzeDominatedBlocks(graph()->blocks()->at(0), table); |
253 HAliasAnalyzer aliasing; | 472 } else { |
254 HLoadEliminationTable table(zone(), &aliasing); | 473 // Perform only local analysis. |
255 | 474 for (int i = 0; i < graph()->blocks()->length(); i++) { |
256 TRACE(("-- load-elim B%d -------------------------------------------------\n", | 475 table->Kill(); |
257 block->block_id())); | 476 engine.AnalyzeOneBlock(graph()->blocks()->at(i), table); |
258 | |
259 for (HInstructionIterator it(block); !it.Done(); it.Advance()) { | |
260 bool changed = false; | |
261 HInstruction* instr = it.Current(); | |
262 | |
263 switch (instr->opcode()) { | |
264 case HValue::kLoadNamedField: { | |
265 HLoadNamedField* load = HLoadNamedField::cast(instr); | |
266 TRACE((" process L%d field %d (o%d)\n", | |
267 instr->id(), | |
268 table.FieldOf(load->access()), | |
269 load->object()->ActualValue()->id())); | |
270 HValue* result = table.load(load); | |
271 if (result != instr) { | |
272 // The load can be replaced with a previous load or a value. | |
273 TRACE((" replace L%d -> v%d\n", instr->id(), result->id())); | |
274 instr->DeleteAndReplaceWith(result); | |
275 } | |
276 changed = true; | |
277 break; | |
278 } | |
279 case HValue::kStoreNamedField: { | |
280 HStoreNamedField* store = HStoreNamedField::cast(instr); | |
281 TRACE((" process S%d field %d (o%d) = v%d\n", | |
282 instr->id(), | |
283 table.FieldOf(store->access()), | |
284 store->object()->ActualValue()->id(), | |
285 store->value()->id())); | |
286 HValue* result = table.store(store); | |
287 if (result == NULL) { | |
288 // The store is redundant. Remove it. | |
289 TRACE((" remove S%d\n", instr->id())); | |
290 instr->DeleteAndReplaceWith(NULL); | |
291 } | |
292 changed = true; | |
293 break; | |
294 } | |
295 default: { | |
296 if (instr->CheckGVNFlag(kChangesInobjectFields)) { | |
297 TRACE((" kill-all i%d\n", instr->id())); | |
298 table.Kill(); | |
299 continue; | |
300 } | |
301 if (instr->CheckGVNFlag(kChangesMaps)) { | |
302 TRACE((" kill-maps i%d\n", instr->id())); | |
303 table.KillOffset(JSObject::kMapOffset); | |
304 } | |
305 if (instr->CheckGVNFlag(kChangesElementsKind)) { | |
306 TRACE((" kill-elements-kind i%d\n", instr->id())); | |
307 table.KillOffset(JSObject::kMapOffset); | |
308 table.KillOffset(JSObject::kElementsOffset); | |
309 } | |
310 if (instr->CheckGVNFlag(kChangesElementsPointer)) { | |
311 TRACE((" kill-elements i%d\n", instr->id())); | |
312 table.KillOffset(JSObject::kElementsOffset); | |
313 } | |
314 } | |
315 // Improvements possible: | |
316 // - learn from HCheckMaps for field 0 | |
317 // - remove unobservable stores (write-after-write) | |
318 } | |
319 | |
320 if (changed && FLAG_trace_load_elimination) { | |
321 table.Print(); | |
322 } | 477 } |
323 } | 478 } |
324 } | 479 } |
325 | 480 |
326 | |
327 } } // namespace v8::internal | 481 } } // namespace v8::internal |
OLD | NEW |