Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(184)

Side by Side Diff: runtime/vm/flow_graph_optimizer.cc

Issue 184523002: Allocation sinking for contexts. (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: added new test Created 6 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/flow_graph_optimizer.h" 5 #include "vm/flow_graph_optimizer.h"
6 6
7 #include "vm/bit_vector.h" 7 #include "vm/bit_vector.h"
8 #include "vm/cha.h" 8 #include "vm/cha.h"
9 #include "vm/cpu.h" 9 #include "vm/cpu.h"
10 #include "vm/dart_entry.h" 10 #include "vm/dart_entry.h"
(...skipping 5469 matching lines...) Expand 10 before | Expand all | Expand 10 after
5480 return (kind_ == other->kind_) && 5480 return (kind_ == other->kind_) &&
5481 (representation_ == other->representation_) && 5481 (representation_ == other->representation_) &&
5482 (instance_ == other->instance_) && 5482 (instance_ == other->instance_) &&
5483 SameField(other); 5483 SameField(other);
5484 } 5484 }
5485 5485
5486 // Create a zone allocated copy of this place and assign given id to it. 5486 // Create a zone allocated copy of this place and assign given id to it.
5487 static Place* Wrap(Isolate* isolate, const Place& place, intptr_t id); 5487 static Place* Wrap(Isolate* isolate, const Place& place, intptr_t id);
5488 5488
5489 static bool IsAllocation(Definition* defn) { 5489 static bool IsAllocation(Definition* defn) {
5490 // TODO(vegorov): add CreateContext to this list.
5491 return (defn != NULL) && 5490 return (defn != NULL) &&
5492 (defn->IsAllocateObject() || 5491 (defn->IsAllocateObject() ||
5493 defn->IsCreateArray() || 5492 defn->IsCreateArray() ||
5493 defn->IsAllocateUninitializedContext() ||
5494 (defn->IsStaticCall() && 5494 (defn->IsStaticCall() &&
5495 defn->AsStaticCall()->IsRecognizedFactory())); 5495 defn->AsStaticCall()->IsRecognizedFactory()));
5496 } 5496 }
5497 5497
5498 private: 5498 private:
5499 Place(Kind kind, Definition* instance, intptr_t selector) 5499 Place(Kind kind, Definition* instance, intptr_t selector)
5500 : kind_(kind), 5500 : kind_(kind),
5501 representation_(kNoRepresentation), 5501 representation_(kNoRepresentation),
5502 instance_(instance), 5502 instance_(instance),
5503 raw_selector_(selector), 5503 raw_selector_(selector),
(...skipping 413 matching lines...) Expand 10 before | Expand all | Expand 10 after
5917 5917
5918 return ((place->kind() == Place::kField) || 5918 return ((place->kind() == Place::kField) ||
5919 (place->kind() == Place::kVMField)) && 5919 (place->kind() == Place::kVMField)) &&
5920 !CanBeAliased(place->instance()); 5920 !CanBeAliased(place->instance());
5921 } 5921 }
5922 5922
5923 // Returns true if there are direct loads from the given place. 5923 // Returns true if there are direct loads from the given place.
5924 bool HasLoadsFromPlace(Definition* defn, const Place* place) { 5924 bool HasLoadsFromPlace(Definition* defn, const Place* place) {
5925 ASSERT((place->kind() == Place::kField) || 5925 ASSERT((place->kind() == Place::kField) ||
5926 (place->kind() == Place::kVMField)); 5926 (place->kind() == Place::kVMField));
5927 ASSERT(place->instance() == defn);
5928 5927
5929 for (Value* use = defn->input_use_list(); 5928 for (Value* use = defn->input_use_list();
5930 use != NULL; 5929 use != NULL;
5931 use = use->next_use()) { 5930 use = use->next_use()) {
5931 Instruction* instr = use->instruction();
5932 if ((instr->IsRedefinition() ||
5933 instr->IsAssertAssignable()) &&
5934 HasLoadsFromPlace(instr->AsDefinition(), place)) {
5935 return true;
5936 }
5932 bool is_load = false, is_store; 5937 bool is_load = false, is_store;
5933 Place load_place(use->instruction(), &is_load, &is_store); 5938 Place load_place(instr, &is_load, &is_store);
5934 5939
5935 if (is_load && load_place.Equals(place)) { 5940 if (is_load && load_place.Equals(place)) {
5936 return true; 5941 return true;
5937 } 5942 }
5938 } 5943 }
5939 5944
5940 return false; 5945 return false;
5941 } 5946 }
5942 5947
5943 // Check if any use of the definition can create an alias. 5948 // Check if any use of the definition can create an alias.
5944 // Can add more objects into aliasing_worklist_. 5949 // Can add more objects into aliasing_worklist_.
5945 bool AnyUseCreatesAlias(Definition* defn) { 5950 bool AnyUseCreatesAlias(Definition* defn) {
5946 for (Value* use = defn->input_use_list(); 5951 for (Value* use = defn->input_use_list();
5947 use != NULL; 5952 use != NULL;
5948 use = use->next_use()) { 5953 use = use->next_use()) {
5949 Instruction* instr = use->instruction(); 5954 Instruction* instr = use->instruction();
5950 if (instr->IsPushArgument() || 5955 if (instr->IsPushArgument() ||
5951 (instr->IsStoreIndexed() 5956 (instr->IsStoreIndexed()
5952 && (use->use_index() == StoreIndexedInstr::kValuePos)) || 5957 && (use->use_index() == StoreIndexedInstr::kValuePos)) ||
5953 instr->IsStoreStaticField() || 5958 instr->IsStoreStaticField() ||
5954 instr->IsPhi() || 5959 instr->IsPhi()) {
5955 instr->IsAssertAssignable() || 5960 return true;
5956 instr->IsRedefinition()) { 5961 } else if ((instr->IsAssertAssignable() || instr->IsRedefinition()) &&
5962 AnyUseCreatesAlias(instr->AsDefinition())) {
5957 return true; 5963 return true;
5958 } else if ((instr->IsStoreInstanceField() 5964 } else if ((instr->IsStoreInstanceField()
5959 && (use->use_index() != StoreInstanceFieldInstr::kInstancePos))) { 5965 && (use->use_index() != StoreInstanceFieldInstr::kInstancePos))) {
5960 ASSERT(use->use_index() == StoreInstanceFieldInstr::kValuePos); 5966 ASSERT(use->use_index() == StoreInstanceFieldInstr::kValuePos);
5961 // If we store this value into an object that is not aliased itself 5967 // If we store this value into an object that is not aliased itself
5962 // and we never load again then the store does not create an alias. 5968 // and we never load again then the store does not create an alias.
5963 StoreInstanceFieldInstr* store = instr->AsStoreInstanceField(); 5969 StoreInstanceFieldInstr* store = instr->AsStoreInstanceField();
5964 Definition* instance = store->instance()->definition(); 5970 Definition* instance =
5965 if (instance->IsAllocateObject() && !instance->Identity().IsAliased()) { 5971 store->instance()->definition()->OriginalDefinition();
5972 if (Place::IsAllocation(instance) &&
5973 !instance->Identity().IsAliased()) {
5966 bool is_load, is_store; 5974 bool is_load, is_store;
5967 Place store_place(instr, &is_load, &is_store); 5975 Place store_place(instr, &is_load, &is_store);
5968 5976
5969 if (!HasLoadsFromPlace(instance, &store_place)) { 5977 if (!HasLoadsFromPlace(instance, &store_place)) {
5970 // No loads found that match this store. If it is yet unknown if 5978 // No loads found that match this store. If it is yet unknown if
5971 // the object is not aliased then optimistically assume this but 5979 // the object is not aliased then optimistically assume this but
5972 // add it to the worklist to check its uses transitively. 5980 // add it to the worklist to check its uses transitively.
5973 if (instance->Identity().IsUnknown()) { 5981 if (instance->Identity().IsUnknown()) {
5974 instance->SetIdentity(AliasIdentity::NotAliased()); 5982 instance->SetIdentity(AliasIdentity::NotAliased());
5975 aliasing_worklist_.Add(instance); 5983 aliasing_worklist_.Add(instance);
5976 } 5984 }
5977 continue; 5985 continue;
5978 } 5986 }
5979 } 5987 }
5980
5981 return true; 5988 return true;
5982 } 5989 }
5983 } 5990 }
5984 return false; 5991 return false;
5985 } 5992 }
5986 5993
5987 // Mark any value stored into the given object as potentially aliased. 5994 // Mark any value stored into the given object as potentially aliased.
5988 void MarkStoredValuesEscaping(Definition* defn) { 5995 void MarkStoredValuesEscaping(Definition* defn) {
5989 if (!defn->IsAllocateObject()) {
5990 return;
5991 }
5992
5993 // Find all stores into this object. 5996 // Find all stores into this object.
5994 for (Value* use = defn->input_use_list(); 5997 for (Value* use = defn->input_use_list();
5995 use != NULL; 5998 use != NULL;
5996 use = use->next_use()) { 5999 use = use->next_use()) {
6000 if (use->instruction()->IsRedefinition() ||
6001 use->instruction()->IsAssertAssignable()) {
6002 MarkStoredValuesEscaping(use->instruction()->AsDefinition());
6003 continue;
6004 }
5997 if ((use->use_index() == StoreInstanceFieldInstr::kInstancePos) && 6005 if ((use->use_index() == StoreInstanceFieldInstr::kInstancePos) &&
5998 use->instruction()->IsStoreInstanceField()) { 6006 use->instruction()->IsStoreInstanceField()) {
5999 StoreInstanceFieldInstr* store = 6007 StoreInstanceFieldInstr* store =
6000 use->instruction()->AsStoreInstanceField(); 6008 use->instruction()->AsStoreInstanceField();
6001 Definition* value = store->value()->definition(); 6009 Definition* value = store->value()->definition()->OriginalDefinition();
6002 if (value->Identity().IsNotAliased()) { 6010 if (value->Identity().IsNotAliased()) {
6003 value->SetIdentity(AliasIdentity::Aliased()); 6011 value->SetIdentity(AliasIdentity::Aliased());
6004 identity_rollback_.Add(value); 6012 identity_rollback_.Add(value);
6005 6013
6006 // Add to worklist to propagate the mark transitively. 6014 // Add to worklist to propagate the mark transitively.
6007 aliasing_worklist_.Add(value); 6015 aliasing_worklist_.Add(value);
6008 } 6016 }
6009 } 6017 }
6010 } 6018 }
6011 } 6019 }
6012 6020
6013 // Determine if the given definition can't be aliased. 6021 // Determine if the given definition can't be aliased.
6014 void ComputeAliasing(Definition* alloc) { 6022 void ComputeAliasing(Definition* alloc) {
6023 ASSERT(Place::IsAllocation(alloc));
6015 ASSERT(alloc->Identity().IsUnknown()); 6024 ASSERT(alloc->Identity().IsUnknown());
6016 ASSERT(aliasing_worklist_.is_empty()); 6025 ASSERT(aliasing_worklist_.is_empty());
6017 6026
6018 alloc->SetIdentity(AliasIdentity::NotAliased()); 6027 alloc->SetIdentity(AliasIdentity::NotAliased());
6019 aliasing_worklist_.Add(alloc); 6028 aliasing_worklist_.Add(alloc);
6020 6029
6021 while (!aliasing_worklist_.is_empty()) { 6030 while (!aliasing_worklist_.is_empty()) {
6022 Definition* defn = aliasing_worklist_.RemoveLast(); 6031 Definition* defn = aliasing_worklist_.RemoveLast();
6023 6032 ASSERT(Place::IsAllocation(defn));
6024 // If the definition in the worklist was optimistically marked as 6033 // If the definition in the worklist was optimistically marked as
6025 // not-aliased check that optimistic assumption still holds: check if 6034 // not-aliased check that optimistic assumption still holds: check if
6026 // any of its uses can create an alias. 6035 // any of its uses can create an alias.
6027 if (!defn->Identity().IsAliased() && AnyUseCreatesAlias(defn)) { 6036 if (!defn->Identity().IsAliased() && AnyUseCreatesAlias(defn)) {
6028 defn->SetIdentity(AliasIdentity::Aliased()); 6037 defn->SetIdentity(AliasIdentity::Aliased());
6029 identity_rollback_.Add(defn); 6038 identity_rollback_.Add(defn);
6030 } 6039 }
6031 6040
6032 // If the allocation site is marked as aliased conservatively mark 6041 // If the allocation site is marked as aliased conservatively mark
6033 // any values stored into the object aliased too. 6042 // any values stored into the object aliased too.
(...skipping 3401 matching lines...) Expand 10 before | Expand all | Expand 10 after
9435 } 9444 }
9436 9445
9437 return false; 9446 return false;
9438 } 9447 }
9439 9448
9440 9449
9441 // Right now we are attempting to sink allocation only into 9450 // Right now we are attempting to sink allocation only into
9442 // deoptimization exit. So candidate should only be used in StoreInstanceField 9451 // deoptimization exit. So candidate should only be used in StoreInstanceField
9443 // instructions that write into fields of the allocated object. 9452 // instructions that write into fields of the allocated object.
9444 // We do not support materialization of the object that has type arguments. 9453 // We do not support materialization of the object that has type arguments.
9445 static bool IsAllocationSinkingCandidate(AllocateObjectInstr* alloc, 9454 static bool IsAllocationSinkingCandidate(Definition* alloc,
9446 SafeUseCheck check_type) { 9455 SafeUseCheck check_type) {
9447 for (Value* use = alloc->input_use_list(); 9456 for (Value* use = alloc->input_use_list();
9448 use != NULL; 9457 use != NULL;
9449 use = use->next_use()) { 9458 use = use->next_use()) {
9450 if (!IsSafeUse(use, check_type)) { 9459 if (!IsSafeUse(use, check_type)) {
9451 if (FLAG_trace_optimization) { 9460 if (FLAG_trace_optimization) {
9452 OS::Print("use of %s at %s is unsafe for allocation sinking\n", 9461 OS::Print("use of %s at %s is unsafe for allocation sinking\n",
9453 alloc->ToCString(), 9462 alloc->ToCString(),
9454 use->instruction()->ToCString()); 9463 use->instruction()->ToCString());
9455 } 9464 }
(...skipping 12 matching lines...) Expand all
9468 if (store != NULL) { 9477 if (store != NULL) {
9469 return store->instance()->definition(); 9478 return store->instance()->definition();
9470 } 9479 }
9471 9480
9472 return NULL; 9481 return NULL;
9473 } 9482 }
9474 9483
9475 9484
9476 // Remove the given allocation from the graph. It is not observable. 9485 // Remove the given allocation from the graph. It is not observable.
9477 // If deoptimization occurs the object will be materialized. 9486 // If deoptimization occurs the object will be materialized.
9478 void AllocationSinking::EliminateAllocation(AllocateObjectInstr* alloc) { 9487 void AllocationSinking::EliminateAllocation(Definition* alloc) {
9479 ASSERT(IsAllocationSinkingCandidate(alloc, kStrictCheck)); 9488 ASSERT(IsAllocationSinkingCandidate(alloc, kStrictCheck));
9480 9489
9481 if (FLAG_trace_optimization) { 9490 if (FLAG_trace_optimization) {
9482 OS::Print("removing allocation from the graph: v%" Pd "\n", 9491 OS::Print("removing allocation from the graph: v%" Pd "\n",
9483 alloc->ssa_temp_index()); 9492 alloc->ssa_temp_index());
9484 } 9493 }
9485 9494
9486 // As an allocation sinking candidate it is only used in stores to its own 9495 // As an allocation sinking candidate it is only used in stores to its own
9487 // fields. Remove these stores. 9496 // fields. Remove these stores.
9488 for (Value* use = alloc->input_use_list(); 9497 for (Value* use = alloc->input_use_list();
(...skipping 25 matching lines...) Expand all
9514 // Find allocation instructions that can be potentially eliminated and 9523 // Find allocation instructions that can be potentially eliminated and
9515 // rematerialized at deoptimization exits if needed. See IsSafeUse 9524 // rematerialized at deoptimization exits if needed. See IsSafeUse
9516 // for the description of algorithm used below. 9525 // for the description of algorithm used below.
9517 void AllocationSinking::CollectCandidates() { 9526 void AllocationSinking::CollectCandidates() {
9518 // Optimistically collect all potential candidates. 9527 // Optimistically collect all potential candidates.
9519 for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator(); 9528 for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator();
9520 !block_it.Done(); 9529 !block_it.Done();
9521 block_it.Advance()) { 9530 block_it.Advance()) {
9522 BlockEntryInstr* block = block_it.Current(); 9531 BlockEntryInstr* block = block_it.Current();
9523 for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) { 9532 for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
9524 AllocateObjectInstr* alloc = it.Current()->AsAllocateObject(); 9533 { AllocateObjectInstr* alloc = it.Current()->AsAllocateObject();
9525 if ((alloc != NULL) && 9534 if ((alloc != NULL) &&
9526 IsAllocationSinkingCandidate(alloc, kOptimisticCheck)) { 9535 IsAllocationSinkingCandidate(alloc, kOptimisticCheck)) {
9527 alloc->SetIdentity(AliasIdentity::AllocationSinkingCandidate()); 9536 alloc->SetIdentity(AliasIdentity::AllocationSinkingCandidate());
9528 candidates_.Add(alloc); 9537 candidates_.Add(alloc);
9538 }
9539 }
9540 { AllocateUninitializedContextInstr* alloc =
9541 it.Current()->AsAllocateUninitializedContext();
9542 if ((alloc != NULL) &&
9543 IsAllocationSinkingCandidate(alloc, kOptimisticCheck)) {
9544 alloc->SetIdentity(AliasIdentity::AllocationSinkingCandidate());
9545 candidates_.Add(alloc);
9546 }
9529 } 9547 }
9530 } 9548 }
9531 } 9549 }
9532 9550
9533 // Transitively unmark all candidates that are not strictly valid. 9551 // Transitively unmark all candidates that are not strictly valid.
9534 bool changed; 9552 bool changed;
9535 do { 9553 do {
9536 changed = false; 9554 changed = false;
9537 for (intptr_t i = 0; i < candidates_.length(); i++) { 9555 for (intptr_t i = 0; i < candidates_.length(); i++) {
9538 AllocateObjectInstr* alloc = candidates_[i]; 9556 Definition* alloc = candidates_[i];
9539 if (alloc->Identity().IsAllocationSinkingCandidate()) { 9557 if (alloc->Identity().IsAllocationSinkingCandidate()) {
9540 if (!IsAllocationSinkingCandidate(alloc, kStrictCheck)) { 9558 if (!IsAllocationSinkingCandidate(alloc, kStrictCheck)) {
9541 alloc->SetIdentity(AliasIdentity::Unknown()); 9559 alloc->SetIdentity(AliasIdentity::Unknown());
9542 changed = true; 9560 changed = true;
9543 } 9561 }
9544 } 9562 }
9545 } 9563 }
9546 } while (changed); 9564 } while (changed);
9547 9565
9548 // Shrink the list of candidates removing all unmarked ones. 9566 // Shrink the list of candidates removing all unmarked ones.
9549 intptr_t j = 0; 9567 intptr_t j = 0;
9550 for (intptr_t i = 0; i < candidates_.length(); i++) { 9568 for (intptr_t i = 0; i < candidates_.length(); i++) {
9551 AllocateObjectInstr* alloc = candidates_[i]; 9569 Definition* alloc = candidates_[i];
9552 if (alloc->Identity().IsAllocationSinkingCandidate()) { 9570 if (alloc->Identity().IsAllocationSinkingCandidate()) {
9553 if (FLAG_trace_optimization) { 9571 if (FLAG_trace_optimization) {
9554 OS::Print("discovered allocation sinking candidate: v%" Pd "\n", 9572 OS::Print("discovered allocation sinking candidate: v%" Pd "\n",
9555 alloc->ssa_temp_index()); 9573 alloc->ssa_temp_index());
9556 } 9574 }
9557 9575
9558 if (j != i) { 9576 if (j != i) {
9559 candidates_[j] = alloc; 9577 candidates_[j] = alloc;
9560 } 9578 }
9561 j++; 9579 j++;
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
9623 // the load forwarding because they flow into phis that load forwarding 9641 // the load forwarding because they flow into phis that load forwarding
9624 // inserts. Discover such allocations and remove them from the list 9642 // inserts. Discover such allocations and remove them from the list
9625 // of allocation sinking candidates undoing all changes that we did 9643 // of allocation sinking candidates undoing all changes that we did
9626 // in preparation for sinking these allocations. 9644 // in preparation for sinking these allocations.
9627 void AllocationSinking::DiscoverFailedCandidates() { 9645 void AllocationSinking::DiscoverFailedCandidates() {
9628 // Transitively unmark all candidates that are not strictly valid. 9646 // Transitively unmark all candidates that are not strictly valid.
9629 bool changed; 9647 bool changed;
9630 do { 9648 do {
9631 changed = false; 9649 changed = false;
9632 for (intptr_t i = 0; i < candidates_.length(); i++) { 9650 for (intptr_t i = 0; i < candidates_.length(); i++) {
9633 AllocateObjectInstr* alloc = candidates_[i]; 9651 Definition* alloc = candidates_[i];
9634 if (alloc->Identity().IsAllocationSinkingCandidate()) { 9652 if (alloc->Identity().IsAllocationSinkingCandidate()) {
9635 if (!IsAllocationSinkingCandidate(alloc, kStrictCheck)) { 9653 if (!IsAllocationSinkingCandidate(alloc, kStrictCheck)) {
9636 alloc->SetIdentity(AliasIdentity::Unknown()); 9654 alloc->SetIdentity(AliasIdentity::Unknown());
9637 changed = true; 9655 changed = true;
9638 } 9656 }
9639 } 9657 }
9640 } 9658 }
9641 } while (changed); 9659 } while (changed);
9642 9660
9643 // Remove all failed candidates from the candidates list. 9661 // Remove all failed candidates from the candidates list.
9644 intptr_t j = 0; 9662 intptr_t j = 0;
9645 for (intptr_t i = 0; i < candidates_.length(); i++) { 9663 for (intptr_t i = 0; i < candidates_.length(); i++) {
9646 AllocateObjectInstr* alloc = candidates_[i]; 9664 Definition* alloc = candidates_[i];
9647 if (!alloc->Identity().IsAllocationSinkingCandidate()) { 9665 if (!alloc->Identity().IsAllocationSinkingCandidate()) {
9648 if (FLAG_trace_optimization) { 9666 if (FLAG_trace_optimization) {
9649 OS::Print("allocation v%" Pd " can't be eliminated\n", 9667 OS::Print("allocation v%" Pd " can't be eliminated\n",
9650 alloc->ssa_temp_index()); 9668 alloc->ssa_temp_index());
9651 } 9669 }
9652 9670
9653 #ifdef DEBUG 9671 #ifdef DEBUG
9654 for (Value* use = alloc->env_use_list(); 9672 for (Value* use = alloc->env_use_list();
9655 use != NULL; 9673 use != NULL;
9656 use = use->next_use()) { 9674 use = use->next_use()) {
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after
9820 } 9838 }
9821 9839
9822 return NULL; 9840 return NULL;
9823 } 9841 }
9824 9842
9825 9843
9826 // Insert MaterializeObject instruction for the given allocation before 9844 // Insert MaterializeObject instruction for the given allocation before
9827 // the given instruction that can deoptimize. 9845 // the given instruction that can deoptimize.
9828 void AllocationSinking::CreateMaterializationAt( 9846 void AllocationSinking::CreateMaterializationAt(
9829 Instruction* exit, 9847 Instruction* exit,
9830 AllocateObjectInstr* alloc, 9848 Definition* alloc,
9831 const Class& cls,
9832 const ZoneGrowableArray<const Object*>& slots) { 9849 const ZoneGrowableArray<const Object*>& slots) {
9833 ZoneGrowableArray<Value*>* values = 9850 ZoneGrowableArray<Value*>* values =
9834 new(I) ZoneGrowableArray<Value*>(slots.length()); 9851 new(I) ZoneGrowableArray<Value*>(slots.length());
9835 9852
9836 // All loads should be inserted before the first materialization so that 9853 // All loads should be inserted before the first materialization so that
9837 // IR follows the following pattern: loads, materializations, deoptimizing 9854 // IR follows the following pattern: loads, materializations, deoptimizing
9838 // instruction. 9855 // instruction.
9839 Instruction* load_point = FirstMaterializationAt(exit); 9856 Instruction* load_point = FirstMaterializationAt(exit);
9840 9857
9841 // Insert load instruction for every field. 9858 // Insert load instruction for every field.
9842 for (intptr_t i = 0; i < slots.length(); i++) { 9859 for (intptr_t i = 0; i < slots.length(); i++) {
9843 LoadFieldInstr* load = slots[i]->IsField() 9860 LoadFieldInstr* load = slots[i]->IsField()
9844 ? new(I) LoadFieldInstr( 9861 ? new(I) LoadFieldInstr(
9845 new(I) Value(alloc), 9862 new(I) Value(alloc),
9846 &Field::Cast(*slots[i]), 9863 &Field::Cast(*slots[i]),
9847 AbstractType::ZoneHandle(I), 9864 AbstractType::ZoneHandle(I),
9848 alloc->token_pos()) 9865 alloc->token_pos())
9849 : new(I) LoadFieldInstr( 9866 : new(I) LoadFieldInstr(
9850 new(I) Value(alloc), 9867 new(I) Value(alloc),
9851 Smi::Cast(*slots[i]).Value(), 9868 Smi::Cast(*slots[i]).Value(),
9852 AbstractType::ZoneHandle(I), 9869 AbstractType::ZoneHandle(I),
9853 alloc->token_pos()); 9870 alloc->token_pos());
9854 flow_graph_->InsertBefore( 9871 flow_graph_->InsertBefore(
9855 load_point, load, NULL, FlowGraph::kValue); 9872 load_point, load, NULL, FlowGraph::kValue);
9856 values->Add(new(I) Value(load)); 9873 values->Add(new(I) Value(load));
9857 } 9874 }
9858 9875
9859 MaterializeObjectInstr* mat = 9876 MaterializeObjectInstr* mat = NULL;
9860 new(I) MaterializeObjectInstr(alloc, cls, slots, values); 9877 if (alloc->IsAllocateObject()) {
9878 mat = new(I) MaterializeObjectInstr(
9879 alloc->AsAllocateObject(), slots, values);
9880 } else {
9881 ASSERT(alloc->IsAllocateUninitializedContext());
9882 mat = new(I) MaterializeObjectInstr(
9883 alloc->AsAllocateUninitializedContext(), slots, values);
9884 }
9885
9861 flow_graph_->InsertBefore(exit, mat, NULL, FlowGraph::kValue); 9886 flow_graph_->InsertBefore(exit, mat, NULL, FlowGraph::kValue);
9862 9887
9863 // Replace all mentions of this allocation with a newly inserted 9888 // Replace all mentions of this allocation with a newly inserted
9864 // MaterializeObject instruction. 9889 // MaterializeObject instruction.
9865 // We must preserve the identity: all mentions are replaced by the same 9890 // We must preserve the identity: all mentions are replaced by the same
9866 // materialization. 9891 // materialization.
9867 for (Environment::DeepIterator env_it(exit->env()); 9892 for (Environment::DeepIterator env_it(exit->env());
9868 !env_it.Done(); 9893 !env_it.Done();
9869 env_it.Advance()) { 9894 env_it.Advance()) {
9870 Value* use = env_it.CurrentValue(); 9895 Value* use = env_it.CurrentValue();
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
9943 // We are not removing allocations from the worklist not to waste space on 9968 // We are not removing allocations from the worklist not to waste space on
9944 // the side maintaining BitVector of already processed allocations: worklist 9969 // the side maintaining BitVector of already processed allocations: worklist
9945 // is expected to be very small thus linear search in it is just as effecient 9970 // is expected to be very small thus linear search in it is just as effecient
9946 // as a bitvector. 9971 // as a bitvector.
9947 for (intptr_t i = 0; i < worklist_.length(); i++) { 9972 for (intptr_t i = 0; i < worklist_.length(); i++) {
9948 Collect(worklist_[i]); 9973 Collect(worklist_[i]);
9949 } 9974 }
9950 } 9975 }
9951 9976
9952 9977
9953 void AllocationSinking::InsertMaterializations(AllocateObjectInstr* alloc) { 9978 void AllocationSinking::InsertMaterializations(Definition* alloc) {
9954 // Collect all fields that are written for this instance. 9979 // Collect all fields that are written for this instance.
9955 ZoneGrowableArray<const Object*>* slots = 9980 ZoneGrowableArray<const Object*>* slots =
9956 new(I) ZoneGrowableArray<const Object*>(5); 9981 new(I) ZoneGrowableArray<const Object*>(5);
9957 9982
9958 for (Value* use = alloc->input_use_list(); 9983 for (Value* use = alloc->input_use_list();
9959 use != NULL; 9984 use != NULL;
9960 use = use->next_use()) { 9985 use = use->next_use()) {
9961 StoreInstanceFieldInstr* store = use->instruction()->AsStoreInstanceField(); 9986 StoreInstanceFieldInstr* store = use->instruction()->AsStoreInstanceField();
9962 if ((store != NULL) && (store->instance()->definition() == alloc)) { 9987 if ((store != NULL) && (store->instance()->definition() == alloc)) {
9963 if (!store->field().IsNull()) { 9988 if (!store->field().IsNull()) {
9964 AddSlot(slots, store->field()); 9989 AddSlot(slots, store->field());
9965 } else { 9990 } else {
9966 AddSlot(slots, Smi::ZoneHandle(I, Smi::New(store->offset_in_bytes()))); 9991 AddSlot(slots, Smi::ZoneHandle(I, Smi::New(store->offset_in_bytes())));
9967 } 9992 }
9968 } 9993 }
9969 } 9994 }
9970 9995
9971 if (alloc->ArgumentCount() > 0) { 9996 if (alloc->ArgumentCount() > 0) {
9972 ASSERT(alloc->ArgumentCount() == 1); 9997 AllocateObjectInstr* alloc_object = alloc->AsAllocateObject();
9973 intptr_t type_args_offset = alloc->cls().type_arguments_field_offset(); 9998 ASSERT(alloc_object->ArgumentCount() == 1);
9999 intptr_t type_args_offset =
10000 alloc_object->cls().type_arguments_field_offset();
9974 AddSlot(slots, Smi::ZoneHandle(I, Smi::New(type_args_offset))); 10001 AddSlot(slots, Smi::ZoneHandle(I, Smi::New(type_args_offset)));
9975 } 10002 }
9976 10003
9977 // Collect all instructions that mention this object in the environment. 10004 // Collect all instructions that mention this object in the environment.
9978 exits_collector_.CollectTransitively(alloc); 10005 exits_collector_.CollectTransitively(alloc);
9979 10006
9980 // Insert materializations at environment uses. 10007 // Insert materializations at environment uses.
9981 for (intptr_t i = 0; i < exits_collector_.exits().length(); i++) { 10008 for (intptr_t i = 0; i < exits_collector_.exits().length(); i++) {
9982 CreateMaterializationAt( 10009 CreateMaterializationAt(
9983 exits_collector_.exits()[i], alloc, alloc->cls(), *slots); 10010 exits_collector_.exits()[i], alloc, *slots);
9984 } 10011 }
9985 } 10012 }
9986 10013
9987 10014
9988 } // namespace dart 10015 } // namespace dart
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698