Index: runtime/vm/flow_graph_compiler.cc |
diff --git a/runtime/vm/flow_graph_compiler.cc b/runtime/vm/flow_graph_compiler.cc |
index 405b37f7b9f98e1461f8a604f7414fea5d836288..33a252e384fa3558bdf833aea3b8de81811d6941 100644 |
--- a/runtime/vm/flow_graph_compiler.cc |
+++ b/runtime/vm/flow_graph_compiler.cc |
@@ -27,6 +27,7 @@ |
#include "vm/stub_code.h" |
#include "vm/symbols.h" |
#include "vm/timeline.h" |
+#include "vm/code_statistics.h" |
namespace dart { |
@@ -207,7 +208,8 @@ FlowGraphCompiler::FlowGraphCompiler( |
bool is_optimizing, |
const GrowableArray<const Function*>& inline_id_to_function, |
const GrowableArray<TokenPosition>& inline_id_to_token_pos, |
- const GrowableArray<intptr_t>& caller_inline_id) |
+ const GrowableArray<intptr_t>& caller_inline_id, |
+ CodeStatistics* stats) |
: thread_(Thread::Current()), |
zone_(Thread::Current()->zone()), |
assembler_(assembler), |
@@ -244,7 +246,8 @@ FlowGraphCompiler::FlowGraphCompiler( |
inlined_code_intervals_(Array::ZoneHandle(Object::empty_array().raw())), |
inline_id_to_function_(inline_id_to_function), |
inline_id_to_token_pos_(inline_id_to_token_pos), |
- caller_inline_id_(caller_inline_id) { |
+ caller_inline_id_(caller_inline_id), |
+ stats_(stats) { |
ASSERT(flow_graph->parsed_function().function().raw() == |
parsed_function.function().raw()); |
if (!is_optimizing) { |
@@ -448,7 +451,9 @@ void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) { |
} else if (instr->MayThrow() && |
(CurrentTryIndex() != CatchClauseNode::kInvalidTryIndex)) { |
// Optimized try-block: Sync locals to fixed stack locations. |
+ SpecialStatsBegin(CombinedCodeStatistics::kTagTrySyncSpilling); |
EmitTrySync(instr, CurrentTryIndex()); |
+ SpecialStatsEnd(CombinedCodeStatistics::kTagTrySyncSpilling); |
} |
} |
@@ -539,12 +544,17 @@ void FlowGraphCompiler::VisitBlocks() { |
BeginCodeSourceRange(); |
ASSERT(pending_deoptimization_env_ == NULL); |
pending_deoptimization_env_ = entry->env(); |
+ StatsBegin(entry); |
entry->EmitNativeCode(this); |
+ StatsEnd(entry); |
pending_deoptimization_env_ = NULL; |
EndCodeSourceRange(entry->token_pos()); |
// Compile all successors until an exit, branch, or a block entry. |
for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) { |
Instruction* instr = it.Current(); |
+ |
+ StatsBegin(instr); |
+ |
// Compose intervals. |
if (instr->has_inlining_id() && is_optimizing()) { |
if (prev_inlining_id != instr->inlining_id()) { |
@@ -589,6 +599,7 @@ void FlowGraphCompiler::VisitBlocks() { |
FrameStateUpdateWith(instr); |
} |
#endif |
+ StatsEnd(instr); |
} |
#if defined(DEBUG) && !defined(TARGET_ARCH_DBC) |
@@ -1152,7 +1163,9 @@ bool FlowGraphCompiler::TryIntrinsify() { |
// Reading from a mutable double box requires allocating a fresh double. |
if (field.is_instance() && |
(FLAG_precompiled_mode || !IsPotentialUnboxedField(field))) { |
+ SpecialStatsBegin(CombinedCodeStatistics::kTagIntrinsics); |
GenerateInlinedGetter(field.Offset()); |
+ SpecialStatsEnd(CombinedCodeStatistics::kTagIntrinsics); |
return !FLAG_use_field_guards; |
} |
return false; |
@@ -1165,7 +1178,9 @@ bool FlowGraphCompiler::TryIntrinsify() { |
if (field.is_instance() && |
(FLAG_precompiled_mode || field.guarded_cid() == kDynamicCid)) { |
+ SpecialStatsBegin(CombinedCodeStatistics::kTagIntrinsics); |
GenerateInlinedSetter(field.Offset()); |
+ SpecialStatsEnd(CombinedCodeStatistics::kTagIntrinsics); |
return !FLAG_use_field_guards; |
} |
return false; |
@@ -1174,7 +1189,9 @@ bool FlowGraphCompiler::TryIntrinsify() { |
EnterIntrinsicMode(); |
+ SpecialStatsBegin(CombinedCodeStatistics::kTagIntrinsics); |
bool complete = Intrinsifier::Intrinsify(parsed_function(), this); |
+ SpecialStatsEnd(CombinedCodeStatistics::kTagIntrinsics); |
ExitIntrinsicMode(); |