Index: runtime/vm/flow_graph_compiler_x64.cc |
diff --git a/runtime/vm/flow_graph_compiler_x64.cc b/runtime/vm/flow_graph_compiler_x64.cc |
index bc7139c6d7f7156dc187e3937fe417f7731056b6..db309c23d635ae11c07a63acc14e39e61c307a3c 100644 |
--- a/runtime/vm/flow_graph_compiler_x64.cc |
+++ b/runtime/vm/flow_graph_compiler_x64.cc |
@@ -984,9 +984,6 @@ void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { |
// NOTE: If the entry code shape changes, ReturnAddressLocator in profiler.cc |
// needs to be updated to match. |
void FlowGraphCompiler::EmitFrameEntry() { |
- const Function& function = parsed_function().function(); |
- // Load pool pointer. |
- |
if (flow_graph().IsCompiledForOsr()) { |
intptr_t extra_slots = StackSize() |
- flow_graph().num_stack_locals() |
@@ -997,6 +994,7 @@ void FlowGraphCompiler::EmitFrameEntry() { |
const Register new_pp = R13; |
__ LoadPoolPointer(new_pp); |
+ const Function& function = parsed_function().function(); |
if (CanOptimizeFunction() && |
function.IsOptimizable() && |
(!is_optimizing() || may_reoptimize())) { |
@@ -1027,6 +1025,13 @@ void FlowGraphCompiler::EmitFrameEntry() { |
void FlowGraphCompiler::CompileGraph() { |
InitCompiler(); |
+ const Function& function = parsed_function().function(); |
+ |
+#ifdef DART_PRECOMPILER |
+ if (function.IsDynamicFunction()) { |
+ __ MonomorphicCheckedEntry(); |
+ } |
+#endif // DART_PRECOMPILER |
if (TryIntrinsify()) { |
// Skip regular code generation. |
@@ -1036,8 +1041,6 @@ void FlowGraphCompiler::CompileGraph() { |
EmitFrameEntry(); |
ASSERT(assembler()->constant_pool_allowed()); |
- const Function& function = parsed_function().function(); |
- |
const int num_fixed_params = function.num_fixed_parameters(); |
const int num_copied_params = parsed_function().num_copied_params(); |
const int num_locals = parsed_function().num_stack_locals(); |