Index: runtime/vm/flow_graph_compiler.cc |
diff --git a/runtime/vm/flow_graph_compiler.cc b/runtime/vm/flow_graph_compiler.cc |
index 7a1985d47a6086124c60912489e4e69ec6de0f3c..2a636074ae6f2c6ac65617c1c20a2867a2b5b9cc 100644 |
--- a/runtime/vm/flow_graph_compiler.cc |
+++ b/runtime/vm/flow_graph_compiler.cc |
@@ -225,8 +225,8 @@ FlowGraphCompiler::FlowGraphCompiler( |
void FlowGraphCompiler::InitCompiler() { |
- pc_descriptors_list_ = new DescriptorList(64); |
- exception_handlers_list_ = new ExceptionHandlerList(); |
+ pc_descriptors_list_ = new(zone()) DescriptorList(64); |
+ exception_handlers_list_ = new(zone())ExceptionHandlerList(); |
block_info_.Clear(); |
// Conservative detection of leaf routines used to remove the stack check |
// on function entry. |
@@ -237,7 +237,7 @@ void FlowGraphCompiler::InitCompiler() { |
// indicating a non-leaf routine and calls without IC data indicating |
// possible reoptimization. |
for (int i = 0; i < block_order_.length(); ++i) { |
- block_info_.Add(new BlockInfo()); |
+ block_info_.Add(new(zone()) BlockInfo()); |
if (is_optimizing() && !flow_graph().IsCompiledForOsr()) { |
BlockEntryInstr* entry = block_order_[i]; |
for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) { |
@@ -590,7 +590,7 @@ void FlowGraphCompiler::EmitTrySync(Instruction* instr, intptr_t try_index) { |
// Parameters first. |
intptr_t i = 0; |
const intptr_t num_non_copied_params = flow_graph().num_non_copied_params(); |
- ParallelMoveInstr* move_instr = new ParallelMoveInstr(); |
+ ParallelMoveInstr* move_instr = new(zone()) ParallelMoveInstr(); |
for (; i < num_non_copied_params; ++i) { |
// Don't sync captured parameters. They are not in the environment. |
if (flow_graph().captured_parameters()->Contains(i)) continue; |
@@ -718,14 +718,14 @@ void FlowGraphCompiler::AddCurrentDescriptor(RawPcDescriptors::Kind kind, |
void FlowGraphCompiler::AddStaticCallTarget(const Function& func) { |
ASSERT(func.IsZoneHandle()); |
static_calls_target_table_.Add( |
- StaticCallsStruct(assembler()->CodeSize(), &func, NULL)); |
+ new(zone()) StaticCallsStruct(assembler()->CodeSize(), &func, NULL)); |
} |
void FlowGraphCompiler::AddStubCallTarget(const Code& code) { |
ASSERT(code.IsZoneHandle()); |
static_calls_target_table_.Add( |
- StaticCallsStruct(assembler()->CodeSize(), NULL, &code)); |
+ new(zone()) StaticCallsStruct(assembler()->CodeSize(), NULL, &code)); |
} |
@@ -734,10 +734,10 @@ void FlowGraphCompiler::AddDeoptIndexAtCall(intptr_t deopt_id, |
ASSERT(is_optimizing()); |
ASSERT(!intrinsic_mode()); |
CompilerDeoptInfo* info = |
- new CompilerDeoptInfo(deopt_id, |
- ICData::kDeoptAtCall, |
- 0, // No flags. |
- pending_deoptimization_env_); |
+ new(zone()) CompilerDeoptInfo(deopt_id, |
+ ICData::kDeoptAtCall, |
+ 0, // No flags. |
+ pending_deoptimization_env_); |
info->set_pc_offset(assembler()->CodeSize()); |
deopt_infos_.Add(info); |
} |
@@ -883,10 +883,10 @@ Label* FlowGraphCompiler::AddDeoptStub(intptr_t deopt_id, |
ASSERT(!Compiler::always_optimize()); |
ASSERT(is_optimizing_); |
CompilerDeoptInfoWithStub* stub = |
- new CompilerDeoptInfoWithStub(deopt_id, |
- reason, |
- flags, |
- pending_deoptimization_env_); |
+ new(zone()) CompilerDeoptInfoWithStub(deopt_id, |
+ reason, |
+ flags, |
+ pending_deoptimization_env_); |
deopt_infos_.Add(stub); |
return stub->entry_label(); |
} |
@@ -995,15 +995,15 @@ void FlowGraphCompiler::FinalizeStaticCallTargetsTable(const Code& code) { |
Smi& smi_offset = Smi::Handle(zone()); |
for (intptr_t i = 0; i < static_calls_target_table_.length(); i++) { |
const intptr_t target_ix = Code::kSCallTableEntryLength * i; |
- smi_offset = Smi::New(static_calls_target_table_[i].offset); |
+ smi_offset = Smi::New(static_calls_target_table_[i]->offset); |
targets.SetAt(target_ix + Code::kSCallTableOffsetEntry, smi_offset); |
- if (static_calls_target_table_[i].function != NULL) { |
+ if (static_calls_target_table_[i]->function != NULL) { |
targets.SetAt(target_ix + Code::kSCallTableFunctionEntry, |
- *static_calls_target_table_[i].function); |
+ *static_calls_target_table_[i]->function); |
} |
- if (static_calls_target_table_[i].code != NULL) { |
+ if (static_calls_target_table_[i]->code != NULL) { |
targets.SetAt(target_ix + Code::kSCallTableCodeEntry, |
- *static_calls_target_table_[i].code); |
+ *static_calls_target_table_[i]->code); |
} |
} |
code.set_static_calls_target_table(targets); |