Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(753)

Unified Diff: runtime/vm/flow_graph_optimizer.cc

Issue 184523002: Allocation sinking for contexts. (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: support Redefinitions in alias computation Created 6 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: runtime/vm/flow_graph_optimizer.cc
===================================================================
--- runtime/vm/flow_graph_optimizer.cc (revision 41455)
+++ runtime/vm/flow_graph_optimizer.cc (working copy)
@@ -5435,10 +5435,10 @@
static Place* Wrap(Isolate* isolate, const Place& place, intptr_t id);
static bool IsAllocation(Definition* defn) {
- // TODO(vegorov): add CreateContext to this list.
return (defn != NULL) &&
(defn->IsAllocateObject() ||
defn->IsCreateArray() ||
+ defn->IsAllocateUninitializedContext() ||
(defn->IsStaticCall() &&
defn->AsStaticCall()->IsRecognizedFactory()));
}
@@ -5899,10 +5899,11 @@
(instr->IsStoreIndexed()
&& (use->use_index() == StoreIndexedInstr::kValuePos)) ||
instr->IsStoreStaticField() ||
- instr->IsPhi() ||
- instr->IsAssertAssignable() ||
- instr->IsRedefinition()) {
+ instr->IsPhi()) {
return true;
+ } else if (instr->IsAssertAssignable() ||
+ instr->IsRedefinition()) {
+ return AnyUseCreatesAlias(instr->AsDefinition());
} else if ((instr->IsStoreInstanceField()
&& (use->use_index() != StoreInstanceFieldInstr::kInstancePos))) {
ASSERT(use->use_index() == StoreInstanceFieldInstr::kValuePos);
@@ -5910,7 +5911,8 @@
// and we never load again then the store does not create an alias.
StoreInstanceFieldInstr* store = instr->AsStoreInstanceField();
Definition* instance = store->instance()->definition();
- if (instance->IsAllocateObject() && !instance->Identity().IsAliased()) {
+ if (Place::IsAllocation(instance) &&
+ !instance->Identity().IsAliased()) {
bool is_load, is_store;
Place store_place(instr, &is_load, &is_store);
@@ -5934,7 +5936,7 @@
// Mark any value stored into the given object as potentially aliased.
void MarkStoredValuesEscaping(Definition* defn) {
- if (!defn->IsAllocateObject()) {
+ if (!Place::IsAllocation(defn)) {
return;
}
@@ -9387,7 +9389,7 @@
// deoptimization exit. So candidate should only be used in StoreInstanceField
// instructions that write into fields of the allocated object.
// We do not support materialization of the object that has type arguments.
-static bool IsAllocationSinkingCandidate(AllocateObjectInstr* alloc,
+static bool IsAllocationSinkingCandidate(Definition* alloc,
SafeUseCheck check_type) {
for (Value* use = alloc->input_use_list();
use != NULL;
@@ -9420,7 +9422,7 @@
// Remove the given allocation from the graph. It is not observable.
// If deoptimization occurs the object will be materialized.
-void AllocationSinking::EliminateAllocation(AllocateObjectInstr* alloc) {
+void AllocationSinking::EliminateAllocation(Definition* alloc) {
ASSERT(IsAllocationSinkingCandidate(alloc, kStrictCheck));
if (FLAG_trace_optimization) {
@@ -9466,12 +9468,21 @@
block_it.Advance()) {
BlockEntryInstr* block = block_it.Current();
for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
- AllocateObjectInstr* alloc = it.Current()->AsAllocateObject();
- if ((alloc != NULL) &&
- IsAllocationSinkingCandidate(alloc, kOptimisticCheck)) {
- alloc->SetIdentity(AliasIdentity::AllocationSinkingCandidate());
- candidates_.Add(alloc);
+ { AllocateObjectInstr* alloc = it.Current()->AsAllocateObject();
+ if ((alloc != NULL) &&
+ IsAllocationSinkingCandidate(alloc, kOptimisticCheck)) {
+ alloc->SetIdentity(AliasIdentity::AllocationSinkingCandidate());
+ candidates_.Add(alloc);
+ }
}
+ { AllocateUninitializedContextInstr* alloc =
+ it.Current()->AsAllocateUninitializedContext();
+ if ((alloc != NULL) &&
+ IsAllocationSinkingCandidate(alloc, kOptimisticCheck)) {
+ alloc->SetIdentity(AliasIdentity::AllocationSinkingCandidate());
+ candidates_.Add(alloc);
+ }
+ }
}
}
@@ -9480,7 +9491,7 @@
do {
changed = false;
for (intptr_t i = 0; i < candidates_.length(); i++) {
- AllocateObjectInstr* alloc = candidates_[i];
+ Definition* alloc = candidates_[i];
if (alloc->Identity().IsAllocationSinkingCandidate()) {
if (!IsAllocationSinkingCandidate(alloc, kStrictCheck)) {
alloc->SetIdentity(AliasIdentity::Unknown());
@@ -9493,7 +9504,7 @@
// Shrink the list of candidates removing all unmarked ones.
intptr_t j = 0;
for (intptr_t i = 0; i < candidates_.length(); i++) {
- AllocateObjectInstr* alloc = candidates_[i];
+ Definition* alloc = candidates_[i];
if (alloc->Identity().IsAllocationSinkingCandidate()) {
if (FLAG_trace_optimization) {
OS::Print("discovered allocation sinking candidate: v%" Pd "\n",
@@ -9575,7 +9586,7 @@
do {
changed = false;
for (intptr_t i = 0; i < candidates_.length(); i++) {
- AllocateObjectInstr* alloc = candidates_[i];
+ Definition* alloc = candidates_[i];
if (alloc->Identity().IsAllocationSinkingCandidate()) {
if (!IsAllocationSinkingCandidate(alloc, kStrictCheck)) {
alloc->SetIdentity(AliasIdentity::Unknown());
@@ -9588,7 +9599,7 @@
// Remove all failed candidates from the candidates list.
intptr_t j = 0;
for (intptr_t i = 0; i < candidates_.length(); i++) {
- AllocateObjectInstr* alloc = candidates_[i];
+ Definition* alloc = candidates_[i];
if (!alloc->Identity().IsAllocationSinkingCandidate()) {
if (FLAG_trace_optimization) {
OS::Print("allocation v%" Pd " can't be eliminated\n",
@@ -9772,8 +9783,7 @@
// the given instruction that can deoptimize.
void AllocationSinking::CreateMaterializationAt(
Instruction* exit,
- AllocateObjectInstr* alloc,
- const Class& cls,
+ Definition* alloc,
const ZoneGrowableArray<const Object*>& slots) {
ZoneGrowableArray<Value*>* values =
new(I) ZoneGrowableArray<Value*>(slots.length());
@@ -9801,8 +9811,16 @@
values->Add(new(I) Value(load));
}
- MaterializeObjectInstr* mat =
- new(I) MaterializeObjectInstr(alloc, cls, slots, values);
+ MaterializeObjectInstr* mat = NULL;
+ if (alloc->IsAllocateObject()) {
+ mat = new(I) MaterializeObjectInstr(
+ alloc->AsAllocateObject(), slots, values);
+ } else {
+ ASSERT(alloc->IsAllocateUninitializedContext());
+ mat = new(I) MaterializeObjectInstr(
+ alloc->AsAllocateUninitializedContext(), slots, values);
+ }
+
flow_graph_->InsertBefore(exit, mat, NULL, FlowGraph::kValue);
// Replace all mentions of this allocation with a newly inserted
@@ -9895,7 +9913,7 @@
}
-void AllocationSinking::InsertMaterializations(AllocateObjectInstr* alloc) {
+void AllocationSinking::InsertMaterializations(Definition* alloc) {
// Collect all fields that are written for this instance.
ZoneGrowableArray<const Object*>* slots =
new(I) ZoneGrowableArray<const Object*>(5);
@@ -9914,8 +9932,10 @@
}
if (alloc->ArgumentCount() > 0) {
- ASSERT(alloc->ArgumentCount() == 1);
- intptr_t type_args_offset = alloc->cls().type_arguments_field_offset();
+ AllocateObjectInstr* alloc_object = alloc->AsAllocateObject();
+ ASSERT(alloc_object->ArgumentCount() == 1);
+ intptr_t type_args_offset =
+ alloc_object->cls().type_arguments_field_offset();
AddSlot(slots, Smi::ZoneHandle(I, Smi::New(type_args_offset)));
}
@@ -9925,7 +9945,7 @@
// Insert materializations at environment uses.
for (intptr_t i = 0; i < exits_collector_.exits().length(); i++) {
CreateMaterializationAt(
- exits_collector_.exits()[i], alloc, alloc->cls(), *slots);
+ exits_collector_.exits()[i], alloc, *slots);
}
}

Powered by Google App Engine
This is Rietveld 408576698