Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(630)

Side by Side Diff: runtime/vm/flow_graph_optimizer.cc

Issue 184523002: Allocation sinking for contexts. (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: added tests Created 6 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/flow_graph_optimizer.h" 5 #include "vm/flow_graph_optimizer.h"
6 6
7 #include "vm/bit_vector.h" 7 #include "vm/bit_vector.h"
8 #include "vm/cha.h" 8 #include "vm/cha.h"
9 #include "vm/cpu.h" 9 #include "vm/cpu.h"
10 #include "vm/dart_entry.h" 10 #include "vm/dart_entry.h"
(...skipping 5469 matching lines...) Expand 10 before | Expand all | Expand 10 after
5480 return (kind_ == other->kind_) && 5480 return (kind_ == other->kind_) &&
5481 (representation_ == other->representation_) && 5481 (representation_ == other->representation_) &&
5482 (instance_ == other->instance_) && 5482 (instance_ == other->instance_) &&
5483 SameField(other); 5483 SameField(other);
5484 } 5484 }
5485 5485
5486 // Create a zone allocated copy of this place and assign given id to it. 5486 // Create a zone allocated copy of this place and assign given id to it.
5487 static Place* Wrap(Isolate* isolate, const Place& place, intptr_t id); 5487 static Place* Wrap(Isolate* isolate, const Place& place, intptr_t id);
5488 5488
5489 static bool IsAllocation(Definition* defn) { 5489 static bool IsAllocation(Definition* defn) {
5490 // TODO(vegorov): add CreateContext to this list.
5491 return (defn != NULL) && 5490 return (defn != NULL) &&
5492 (defn->IsAllocateObject() || 5491 (defn->IsAllocateObject() ||
5493 defn->IsCreateArray() || 5492 defn->IsCreateArray() ||
5493 defn->IsAllocateUninitializedContext() ||
5494 (defn->IsStaticCall() && 5494 (defn->IsStaticCall() &&
5495 defn->AsStaticCall()->IsRecognizedFactory())); 5495 defn->AsStaticCall()->IsRecognizedFactory()));
5496 } 5496 }
5497 5497
5498 private: 5498 private:
5499 Place(Kind kind, Definition* instance, intptr_t selector) 5499 Place(Kind kind, Definition* instance, intptr_t selector)
5500 : kind_(kind), 5500 : kind_(kind),
5501 representation_(kNoRepresentation), 5501 representation_(kNoRepresentation),
5502 instance_(instance), 5502 instance_(instance),
5503 raw_selector_(selector), 5503 raw_selector_(selector),
(...skipping 410 matching lines...) Expand 10 before | Expand all | Expand 10 after
5914 // the same offset e.g. through recognized kind. 5914 // the same offset e.g. through recognized kind.
5915 return true; 5915 return true;
5916 } 5916 }
5917 5917
5918 return ((place->kind() == Place::kField) || 5918 return ((place->kind() == Place::kField) ||
5919 (place->kind() == Place::kVMField)) && 5919 (place->kind() == Place::kVMField)) &&
5920 !CanBeAliased(place->instance()); 5920 !CanBeAliased(place->instance());
5921 } 5921 }
5922 5922
5923 // Returns true if there are direct loads from the given place. 5923 // Returns true if there are direct loads from the given place.
5924 bool HasLoadsFromPlace(Definition* defn, const Place* place) { 5924 bool HasLoadsFromPlace(Definition* defn, const Place* place) {
Vyacheslav Egorov (Google) 2014/11/13 13:01:42 It would be good if we could assert somehow that *
Florian Schneider 2014/11/13 13:38:53 Done.
5925 ASSERT((place->kind() == Place::kField) || 5925 ASSERT((place->kind() == Place::kField) ||
5926 (place->kind() == Place::kVMField)); 5926 (place->kind() == Place::kVMField));
5927 ASSERT(place->instance() == defn);
5928 5927
5929 for (Value* use = defn->input_use_list(); 5928 for (Value* use = defn->input_use_list();
5930 use != NULL; 5929 use != NULL;
5931 use = use->next_use()) { 5930 use = use->next_use()) {
5931 Instruction* instr = use->instruction();
5932 if ((instr->IsRedefinition() ||
5933 instr->IsAssertAssignable()) &&
5934 HasLoadsFromPlace(instr->AsDefinition(), place)) {
5935 return true;
5936 }
5932 bool is_load = false, is_store; 5937 bool is_load = false, is_store;
5933 Place load_place(use->instruction(), &is_load, &is_store); 5938 Place load_place(instr, &is_load, &is_store);
5934 5939
5935 if (is_load && load_place.Equals(place)) { 5940 if (is_load && load_place.Equals(place)) {
5936 return true; 5941 return true;
5937 } 5942 }
5938 } 5943 }
5939 5944
5940 return false; 5945 return false;
5941 } 5946 }
5942 5947
5943 // Check if any use of the definition can create an alias. 5948 // Check if any use of the definition can create an alias.
5944 // Can add more objects into aliasing_worklist_. 5949 // Can add more objects into aliasing_worklist_.
5945 bool AnyUseCreatesAlias(Definition* defn) { 5950 bool AnyUseCreatesAlias(Definition* defn) {
5946 for (Value* use = defn->input_use_list(); 5951 for (Value* use = defn->input_use_list();
5947 use != NULL; 5952 use != NULL;
5948 use = use->next_use()) { 5953 use = use->next_use()) {
5949 Instruction* instr = use->instruction(); 5954 Instruction* instr = use->instruction();
5950 if (instr->IsPushArgument() || 5955 if (instr->IsPushArgument() ||
5951 (instr->IsStoreIndexed() 5956 (instr->IsStoreIndexed()
5952 && (use->use_index() == StoreIndexedInstr::kValuePos)) || 5957 && (use->use_index() == StoreIndexedInstr::kValuePos)) ||
5953 instr->IsStoreStaticField() || 5958 instr->IsStoreStaticField() ||
5954 instr->IsPhi() || 5959 instr->IsPhi()) {
5955 instr->IsAssertAssignable() || 5960 return true;
5956 instr->IsRedefinition()) { 5961 } else if ((instr->IsAssertAssignable() || instr->IsRedefinition()) &&
5962 AnyUseCreatesAlias(instr->AsDefinition())) {
5957 return true; 5963 return true;
5958 } else if ((instr->IsStoreInstanceField() 5964 } else if ((instr->IsStoreInstanceField()
5959 && (use->use_index() != StoreInstanceFieldInstr::kInstancePos))) { 5965 && (use->use_index() != StoreInstanceFieldInstr::kInstancePos))) {
5960 ASSERT(use->use_index() == StoreInstanceFieldInstr::kValuePos); 5966 ASSERT(use->use_index() == StoreInstanceFieldInstr::kValuePos);
5961 // If we store this value into an object that is not aliased itself 5967 // If we store this value into an object that is not aliased itself
5962 // and we never load again then the store does not create an alias. 5968 // and we never load again then the store does not create an alias.
5963 StoreInstanceFieldInstr* store = instr->AsStoreInstanceField(); 5969 StoreInstanceFieldInstr* store = instr->AsStoreInstanceField();
5964 Definition* instance = store->instance()->definition(); 5970 Definition* instance =
5965 if (instance->IsAllocateObject() && !instance->Identity().IsAliased()) { 5971 store->instance()->definition()->OriginalDefinition();
5972 if (Place::IsAllocation(instance) &&
5973 !instance->Identity().IsAliased()) {
5966 bool is_load, is_store; 5974 bool is_load, is_store;
5967 Place store_place(instr, &is_load, &is_store); 5975 Place store_place(instr, &is_load, &is_store);
5968 5976
5969 if (!HasLoadsFromPlace(instance, &store_place)) { 5977 if (!HasLoadsFromPlace(instance, &store_place)) {
5970 // No loads found that match this store. If it is yet unknown if 5978 // No loads found that match this store. If it is yet unknown if
5971 // the object is not aliased then optimistically assume this but 5979 // the object is not aliased then optimistically assume this but
5972 // add it to the worklist to check its uses transitively. 5980 // add it to the worklist to check its uses transitively.
5973 if (instance->Identity().IsUnknown()) { 5981 if (instance->Identity().IsUnknown()) {
5974 instance->SetIdentity(AliasIdentity::NotAliased()); 5982 instance->SetIdentity(AliasIdentity::NotAliased());
5975 aliasing_worklist_.Add(instance); 5983 aliasing_worklist_.Add(instance);
5976 } 5984 }
5977 continue; 5985 continue;
5978 } 5986 }
5979 } 5987 }
5980
5981 return true; 5988 return true;
5982 } 5989 }
5983 } 5990 }
5984 return false; 5991 return false;
5985 } 5992 }
5986 5993
5987 // Mark any value stored into the given object as potentially aliased. 5994 // Mark any value stored into the given object as potentially aliased.
5988 void MarkStoredValuesEscaping(Definition* defn) { 5995 void MarkStoredValuesEscaping(Definition* defn) {
5989 if (!defn->IsAllocateObject()) {
5990 return;
5991 }
5992
5993 // Find all stores into this object. 5996 // Find all stores into this object.
5994 for (Value* use = defn->input_use_list(); 5997 for (Value* use = defn->input_use_list();
5995 use != NULL; 5998 use != NULL;
5996 use = use->next_use()) { 5999 use = use->next_use()) {
6000 if (use->instruction()->IsRedefinition() ||
6001 use->instruction()->IsAssertAssignable()) {
6002 MarkStoredValuesEscaping(use->instruction()->AsDefinition());
6003 continue;
6004 }
5997 if ((use->use_index() == StoreInstanceFieldInstr::kInstancePos) && 6005 if ((use->use_index() == StoreInstanceFieldInstr::kInstancePos) &&
5998 use->instruction()->IsStoreInstanceField()) { 6006 use->instruction()->IsStoreInstanceField()) {
5999 StoreInstanceFieldInstr* store = 6007 StoreInstanceFieldInstr* store =
6000 use->instruction()->AsStoreInstanceField(); 6008 use->instruction()->AsStoreInstanceField();
6001 Definition* value = store->value()->definition(); 6009 Definition* value = store->value()->definition()->OriginalDefinition();
6002 if (value->Identity().IsNotAliased()) { 6010 if (value->Identity().IsNotAliased()) {
6003 value->SetIdentity(AliasIdentity::Aliased()); 6011 value->SetIdentity(AliasIdentity::Aliased());
6004 identity_rollback_.Add(value); 6012 identity_rollback_.Add(value);
6005 6013
6006 // Add to worklist to propagate the mark transitively. 6014 // Add to worklist to propagate the mark transitively.
6007 aliasing_worklist_.Add(value); 6015 aliasing_worklist_.Add(value);
6008 } 6016 }
6009 } 6017 }
6010 } 6018 }
6011 } 6019 }
(...skipping 3423 matching lines...) Expand 10 before | Expand all | Expand 10 after
9435 } 9443 }
9436 9444
9437 return false; 9445 return false;
9438 } 9446 }
9439 9447
9440 9448
9441 // Right now we are attempting to sink allocation only into 9449 // Right now we are attempting to sink allocation only into
9442 // deoptimization exit. So candidate should only be used in StoreInstanceField 9450 // deoptimization exit. So candidate should only be used in StoreInstanceField
9443 // instructions that write into fields of the allocated object. 9451 // instructions that write into fields of the allocated object.
9444 // We do not support materialization of the object that has type arguments. 9452 // We do not support materialization of the object that has type arguments.
9445 static bool IsAllocationSinkingCandidate(AllocateObjectInstr* alloc, 9453 static bool IsAllocationSinkingCandidate(Definition* alloc,
9446 SafeUseCheck check_type) { 9454 SafeUseCheck check_type) {
9447 for (Value* use = alloc->input_use_list(); 9455 for (Value* use = alloc->input_use_list();
9448 use != NULL; 9456 use != NULL;
9449 use = use->next_use()) { 9457 use = use->next_use()) {
9450 if (!IsSafeUse(use, check_type)) { 9458 if (!IsSafeUse(use, check_type)) {
9451 if (FLAG_trace_optimization) { 9459 if (FLAG_trace_optimization) {
9452 OS::Print("use of %s at %s is unsafe for allocation sinking\n", 9460 OS::Print("use of %s at %s is unsafe for allocation sinking\n",
9453 alloc->ToCString(), 9461 alloc->ToCString(),
9454 use->instruction()->ToCString()); 9462 use->instruction()->ToCString());
9455 } 9463 }
(...skipping 12 matching lines...) Expand all
9468 if (store != NULL) { 9476 if (store != NULL) {
9469 return store->instance()->definition(); 9477 return store->instance()->definition();
9470 } 9478 }
9471 9479
9472 return NULL; 9480 return NULL;
9473 } 9481 }
9474 9482
9475 9483
9476 // Remove the given allocation from the graph. It is not observable. 9484 // Remove the given allocation from the graph. It is not observable.
9477 // If deoptimization occurs the object will be materialized. 9485 // If deoptimization occurs the object will be materialized.
9478 void AllocationSinking::EliminateAllocation(AllocateObjectInstr* alloc) { 9486 void AllocationSinking::EliminateAllocation(Definition* alloc) {
9479 ASSERT(IsAllocationSinkingCandidate(alloc, kStrictCheck)); 9487 ASSERT(IsAllocationSinkingCandidate(alloc, kStrictCheck));
9480 9488
9481 if (FLAG_trace_optimization) { 9489 if (FLAG_trace_optimization) {
9482 OS::Print("removing allocation from the graph: v%" Pd "\n", 9490 OS::Print("removing allocation from the graph: v%" Pd "\n",
9483 alloc->ssa_temp_index()); 9491 alloc->ssa_temp_index());
9484 } 9492 }
9485 9493
9486 // As an allocation sinking candidate it is only used in stores to its own 9494 // As an allocation sinking candidate it is only used in stores to its own
9487 // fields. Remove these stores. 9495 // fields. Remove these stores.
9488 for (Value* use = alloc->input_use_list(); 9496 for (Value* use = alloc->input_use_list();
(...skipping 25 matching lines...) Expand all
9514 // Find allocation instructions that can be potentially eliminated and 9522 // Find allocation instructions that can be potentially eliminated and
9515 // rematerialized at deoptimization exits if needed. See IsSafeUse 9523 // rematerialized at deoptimization exits if needed. See IsSafeUse
9516 // for the description of algorithm used below. 9524 // for the description of algorithm used below.
9517 void AllocationSinking::CollectCandidates() { 9525 void AllocationSinking::CollectCandidates() {
9518 // Optimistically collect all potential candidates. 9526 // Optimistically collect all potential candidates.
9519 for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator(); 9527 for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator();
9520 !block_it.Done(); 9528 !block_it.Done();
9521 block_it.Advance()) { 9529 block_it.Advance()) {
9522 BlockEntryInstr* block = block_it.Current(); 9530 BlockEntryInstr* block = block_it.Current();
9523 for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) { 9531 for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
9524 AllocateObjectInstr* alloc = it.Current()->AsAllocateObject(); 9532 { AllocateObjectInstr* alloc = it.Current()->AsAllocateObject();
9525 if ((alloc != NULL) && 9533 if ((alloc != NULL) &&
9526 IsAllocationSinkingCandidate(alloc, kOptimisticCheck)) { 9534 IsAllocationSinkingCandidate(alloc, kOptimisticCheck)) {
9527 alloc->SetIdentity(AliasIdentity::AllocationSinkingCandidate()); 9535 alloc->SetIdentity(AliasIdentity::AllocationSinkingCandidate());
9528 candidates_.Add(alloc); 9536 candidates_.Add(alloc);
9537 }
9538 }
9539 { AllocateUninitializedContextInstr* alloc =
9540 it.Current()->AsAllocateUninitializedContext();
9541 if ((alloc != NULL) &&
9542 IsAllocationSinkingCandidate(alloc, kOptimisticCheck)) {
9543 alloc->SetIdentity(AliasIdentity::AllocationSinkingCandidate());
9544 candidates_.Add(alloc);
9545 }
9529 } 9546 }
9530 } 9547 }
9531 } 9548 }
9532 9549
9533 // Transitively unmark all candidates that are not strictly valid. 9550 // Transitively unmark all candidates that are not strictly valid.
9534 bool changed; 9551 bool changed;
9535 do { 9552 do {
9536 changed = false; 9553 changed = false;
9537 for (intptr_t i = 0; i < candidates_.length(); i++) { 9554 for (intptr_t i = 0; i < candidates_.length(); i++) {
9538 AllocateObjectInstr* alloc = candidates_[i]; 9555 Definition* alloc = candidates_[i];
9539 if (alloc->Identity().IsAllocationSinkingCandidate()) { 9556 if (alloc->Identity().IsAllocationSinkingCandidate()) {
9540 if (!IsAllocationSinkingCandidate(alloc, kStrictCheck)) { 9557 if (!IsAllocationSinkingCandidate(alloc, kStrictCheck)) {
9541 alloc->SetIdentity(AliasIdentity::Unknown()); 9558 alloc->SetIdentity(AliasIdentity::Unknown());
9542 changed = true; 9559 changed = true;
9543 } 9560 }
9544 } 9561 }
9545 } 9562 }
9546 } while (changed); 9563 } while (changed);
9547 9564
9548 // Shrink the list of candidates removing all unmarked ones. 9565 // Shrink the list of candidates removing all unmarked ones.
9549 intptr_t j = 0; 9566 intptr_t j = 0;
9550 for (intptr_t i = 0; i < candidates_.length(); i++) { 9567 for (intptr_t i = 0; i < candidates_.length(); i++) {
9551 AllocateObjectInstr* alloc = candidates_[i]; 9568 Definition* alloc = candidates_[i];
9552 if (alloc->Identity().IsAllocationSinkingCandidate()) { 9569 if (alloc->Identity().IsAllocationSinkingCandidate()) {
9553 if (FLAG_trace_optimization) { 9570 if (FLAG_trace_optimization) {
9554 OS::Print("discovered allocation sinking candidate: v%" Pd "\n", 9571 OS::Print("discovered allocation sinking candidate: v%" Pd "\n",
9555 alloc->ssa_temp_index()); 9572 alloc->ssa_temp_index());
9556 } 9573 }
9557 9574
9558 if (j != i) { 9575 if (j != i) {
9559 candidates_[j] = alloc; 9576 candidates_[j] = alloc;
9560 } 9577 }
9561 j++; 9578 j++;
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
9623 // the load forwarding because they flow into phis that load forwarding 9640 // the load forwarding because they flow into phis that load forwarding
9624 // inserts. Discover such allocations and remove them from the list 9641 // inserts. Discover such allocations and remove them from the list
9625 // of allocation sinking candidates undoing all changes that we did 9642 // of allocation sinking candidates undoing all changes that we did
9626 // in preparation for sinking these allocations. 9643 // in preparation for sinking these allocations.
9627 void AllocationSinking::DiscoverFailedCandidates() { 9644 void AllocationSinking::DiscoverFailedCandidates() {
9628 // Transitively unmark all candidates that are not strictly valid. 9645 // Transitively unmark all candidates that are not strictly valid.
9629 bool changed; 9646 bool changed;
9630 do { 9647 do {
9631 changed = false; 9648 changed = false;
9632 for (intptr_t i = 0; i < candidates_.length(); i++) { 9649 for (intptr_t i = 0; i < candidates_.length(); i++) {
9633 AllocateObjectInstr* alloc = candidates_[i]; 9650 Definition* alloc = candidates_[i];
9634 if (alloc->Identity().IsAllocationSinkingCandidate()) { 9651 if (alloc->Identity().IsAllocationSinkingCandidate()) {
9635 if (!IsAllocationSinkingCandidate(alloc, kStrictCheck)) { 9652 if (!IsAllocationSinkingCandidate(alloc, kStrictCheck)) {
9636 alloc->SetIdentity(AliasIdentity::Unknown()); 9653 alloc->SetIdentity(AliasIdentity::Unknown());
9637 changed = true; 9654 changed = true;
9638 } 9655 }
9639 } 9656 }
9640 } 9657 }
9641 } while (changed); 9658 } while (changed);
9642 9659
9643 // Remove all failed candidates from the candidates list. 9660 // Remove all failed candidates from the candidates list.
9644 intptr_t j = 0; 9661 intptr_t j = 0;
9645 for (intptr_t i = 0; i < candidates_.length(); i++) { 9662 for (intptr_t i = 0; i < candidates_.length(); i++) {
9646 AllocateObjectInstr* alloc = candidates_[i]; 9663 Definition* alloc = candidates_[i];
9647 if (!alloc->Identity().IsAllocationSinkingCandidate()) { 9664 if (!alloc->Identity().IsAllocationSinkingCandidate()) {
9648 if (FLAG_trace_optimization) { 9665 if (FLAG_trace_optimization) {
9649 OS::Print("allocation v%" Pd " can't be eliminated\n", 9666 OS::Print("allocation v%" Pd " can't be eliminated\n",
9650 alloc->ssa_temp_index()); 9667 alloc->ssa_temp_index());
9651 } 9668 }
9652 9669
9653 #ifdef DEBUG 9670 #ifdef DEBUG
9654 for (Value* use = alloc->env_use_list(); 9671 for (Value* use = alloc->env_use_list();
9655 use != NULL; 9672 use != NULL;
9656 use = use->next_use()) { 9673 use = use->next_use()) {
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after
9820 } 9837 }
9821 9838
9822 return NULL; 9839 return NULL;
9823 } 9840 }
9824 9841
9825 9842
9826 // Insert MaterializeObject instruction for the given allocation before 9843 // Insert MaterializeObject instruction for the given allocation before
9827 // the given instruction that can deoptimize. 9844 // the given instruction that can deoptimize.
9828 void AllocationSinking::CreateMaterializationAt( 9845 void AllocationSinking::CreateMaterializationAt(
9829 Instruction* exit, 9846 Instruction* exit,
9830 AllocateObjectInstr* alloc, 9847 Definition* alloc,
9831 const Class& cls,
9832 const ZoneGrowableArray<const Object*>& slots) { 9848 const ZoneGrowableArray<const Object*>& slots) {
9833 ZoneGrowableArray<Value*>* values = 9849 ZoneGrowableArray<Value*>* values =
9834 new(I) ZoneGrowableArray<Value*>(slots.length()); 9850 new(I) ZoneGrowableArray<Value*>(slots.length());
9835 9851
9836 // All loads should be inserted before the first materialization so that 9852 // All loads should be inserted before the first materialization so that
9837 // IR follows the following pattern: loads, materializations, deoptimizing 9853 // IR follows the following pattern: loads, materializations, deoptimizing
9838 // instruction. 9854 // instruction.
9839 Instruction* load_point = FirstMaterializationAt(exit); 9855 Instruction* load_point = FirstMaterializationAt(exit);
9840 9856
9841 // Insert load instruction for every field. 9857 // Insert load instruction for every field.
9842 for (intptr_t i = 0; i < slots.length(); i++) { 9858 for (intptr_t i = 0; i < slots.length(); i++) {
9843 LoadFieldInstr* load = slots[i]->IsField() 9859 LoadFieldInstr* load = slots[i]->IsField()
9844 ? new(I) LoadFieldInstr( 9860 ? new(I) LoadFieldInstr(
9845 new(I) Value(alloc), 9861 new(I) Value(alloc),
9846 &Field::Cast(*slots[i]), 9862 &Field::Cast(*slots[i]),
9847 AbstractType::ZoneHandle(I), 9863 AbstractType::ZoneHandle(I),
9848 alloc->token_pos()) 9864 alloc->token_pos())
9849 : new(I) LoadFieldInstr( 9865 : new(I) LoadFieldInstr(
9850 new(I) Value(alloc), 9866 new(I) Value(alloc),
9851 Smi::Cast(*slots[i]).Value(), 9867 Smi::Cast(*slots[i]).Value(),
9852 AbstractType::ZoneHandle(I), 9868 AbstractType::ZoneHandle(I),
9853 alloc->token_pos()); 9869 alloc->token_pos());
9854 flow_graph_->InsertBefore( 9870 flow_graph_->InsertBefore(
9855 load_point, load, NULL, FlowGraph::kValue); 9871 load_point, load, NULL, FlowGraph::kValue);
9856 values->Add(new(I) Value(load)); 9872 values->Add(new(I) Value(load));
9857 } 9873 }
9858 9874
9859 MaterializeObjectInstr* mat = 9875 MaterializeObjectInstr* mat = NULL;
9860 new(I) MaterializeObjectInstr(alloc, cls, slots, values); 9876 if (alloc->IsAllocateObject()) {
9877 mat = new(I) MaterializeObjectInstr(
9878 alloc->AsAllocateObject(), slots, values);
9879 } else {
9880 ASSERT(alloc->IsAllocateUninitializedContext());
9881 mat = new(I) MaterializeObjectInstr(
9882 alloc->AsAllocateUninitializedContext(), slots, values);
9883 }
9884
9861 flow_graph_->InsertBefore(exit, mat, NULL, FlowGraph::kValue); 9885 flow_graph_->InsertBefore(exit, mat, NULL, FlowGraph::kValue);
9862 9886
9863 // Replace all mentions of this allocation with a newly inserted 9887 // Replace all mentions of this allocation with a newly inserted
9864 // MaterializeObject instruction. 9888 // MaterializeObject instruction.
9865 // We must preserve the identity: all mentions are replaced by the same 9889 // We must preserve the identity: all mentions are replaced by the same
9866 // materialization. 9890 // materialization.
9867 for (Environment::DeepIterator env_it(exit->env()); 9891 for (Environment::DeepIterator env_it(exit->env());
9868 !env_it.Done(); 9892 !env_it.Done();
9869 env_it.Advance()) { 9893 env_it.Advance()) {
9870 Value* use = env_it.CurrentValue(); 9894 Value* use = env_it.CurrentValue();
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
9943 // We are not removing allocations from the worklist not to waste space on 9967 // We are not removing allocations from the worklist not to waste space on
9944 // the side maintaining BitVector of already processed allocations: worklist 9968 // the side maintaining BitVector of already processed allocations: worklist
9945 // is expected to be very small thus linear search in it is just as effecient 9969 // is expected to be very small thus linear search in it is just as effecient
9946 // as a bitvector. 9970 // as a bitvector.
9947 for (intptr_t i = 0; i < worklist_.length(); i++) { 9971 for (intptr_t i = 0; i < worklist_.length(); i++) {
9948 Collect(worklist_[i]); 9972 Collect(worklist_[i]);
9949 } 9973 }
9950 } 9974 }
9951 9975
9952 9976
9953 void AllocationSinking::InsertMaterializations(AllocateObjectInstr* alloc) { 9977 void AllocationSinking::InsertMaterializations(Definition* alloc) {
9954 // Collect all fields that are written for this instance. 9978 // Collect all fields that are written for this instance.
9955 ZoneGrowableArray<const Object*>* slots = 9979 ZoneGrowableArray<const Object*>* slots =
9956 new(I) ZoneGrowableArray<const Object*>(5); 9980 new(I) ZoneGrowableArray<const Object*>(5);
9957 9981
9958 for (Value* use = alloc->input_use_list(); 9982 for (Value* use = alloc->input_use_list();
9959 use != NULL; 9983 use != NULL;
9960 use = use->next_use()) { 9984 use = use->next_use()) {
9961 StoreInstanceFieldInstr* store = use->instruction()->AsStoreInstanceField(); 9985 StoreInstanceFieldInstr* store = use->instruction()->AsStoreInstanceField();
9962 if ((store != NULL) && (store->instance()->definition() == alloc)) { 9986 if ((store != NULL) && (store->instance()->definition() == alloc)) {
9963 if (!store->field().IsNull()) { 9987 if (!store->field().IsNull()) {
9964 AddSlot(slots, store->field()); 9988 AddSlot(slots, store->field());
9965 } else { 9989 } else {
9966 AddSlot(slots, Smi::ZoneHandle(I, Smi::New(store->offset_in_bytes()))); 9990 AddSlot(slots, Smi::ZoneHandle(I, Smi::New(store->offset_in_bytes())));
9967 } 9991 }
9968 } 9992 }
9969 } 9993 }
9970 9994
9971 if (alloc->ArgumentCount() > 0) { 9995 if (alloc->ArgumentCount() > 0) {
9972 ASSERT(alloc->ArgumentCount() == 1); 9996 AllocateObjectInstr* alloc_object = alloc->AsAllocateObject();
9973 intptr_t type_args_offset = alloc->cls().type_arguments_field_offset(); 9997 ASSERT(alloc_object->ArgumentCount() == 1);
9998 intptr_t type_args_offset =
9999 alloc_object->cls().type_arguments_field_offset();
9974 AddSlot(slots, Smi::ZoneHandle(I, Smi::New(type_args_offset))); 10000 AddSlot(slots, Smi::ZoneHandle(I, Smi::New(type_args_offset)));
9975 } 10001 }
9976 10002
9977 // Collect all instructions that mention this object in the environment. 10003 // Collect all instructions that mention this object in the environment.
9978 exits_collector_.CollectTransitively(alloc); 10004 exits_collector_.CollectTransitively(alloc);
9979 10005
9980 // Insert materializations at environment uses. 10006 // Insert materializations at environment uses.
9981 for (intptr_t i = 0; i < exits_collector_.exits().length(); i++) { 10007 for (intptr_t i = 0; i < exits_collector_.exits().length(); i++) {
9982 CreateMaterializationAt( 10008 CreateMaterializationAt(
9983 exits_collector_.exits()[i], alloc, alloc->cls(), *slots); 10009 exits_collector_.exits()[i], alloc, *slots);
9984 } 10010 }
9985 } 10011 }
9986 10012
9987 10013
9988 } // namespace dart 10014 } // namespace dart
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698