Index: runtime/vm/flow_graph_compiler_arm64.cc |
diff --git a/runtime/vm/flow_graph_compiler_arm64.cc b/runtime/vm/flow_graph_compiler_arm64.cc |
index 53429c8bda2bc2b539b327a3997b8030ee407e35..56cd1348a73bf96c94c3c9d33239a63dde32ddd9 100644 |
--- a/runtime/vm/flow_graph_compiler_arm64.cc |
+++ b/runtime/vm/flow_graph_compiler_arm64.cc |
@@ -28,6 +28,18 @@ DECLARE_FLAG(bool, enable_simd_inline); |
DECLARE_FLAG(bool, use_megamorphic_stub); |
+void MegamorphicSlowPath::EmitNativeCode(FlowGraphCompiler* compiler) { |
+ Assembler* assembler = compiler->assembler(); |
+#define __ assembler-> |
+ __ Bind(entry_label()); |
+ __ Comment("MegamorphicSlowPath"); |
+ compiler->EmitMegamorphicInstanceCall(ic_data_, argument_count_, deopt_id_, |
+ token_pos_, locs_, try_index_); |
+ __ b(exit_label()); |
+#undef __ |
+} |
+ |
+ |
FlowGraphCompiler::~FlowGraphCompiler() { |
// BlockInfos are zone-allocated, so their destructors are not called. |
// Verify the labels explicitly here. |
@@ -172,8 +184,8 @@ void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, |
intptr_t stub_ix) { |
// Calls do not need stubs, they share a deoptimization trampoline. |
ASSERT(reason() != ICData::kDeoptAtCall); |
- Assembler* assem = compiler->assembler(); |
-#define __ assem-> |
+ Assembler* assembler = compiler->assembler(); |
+#define __ assembler-> |
__ Comment("%s", Name()); |
__ Bind(entry_label()); |
if (FLAG_trap_on_deoptimization) { |
@@ -183,7 +195,7 @@ void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, |
ASSERT(deopt_env() != NULL); |
__ Push(CODE_REG); |
__ BranchLink(*StubCode::Deoptimize_entry()); |
- set_pc_offset(assem->CodeSize()); |
+ set_pc_offset(assembler->CodeSize()); |
#undef __ |
} |
@@ -1248,7 +1260,8 @@ void FlowGraphCompiler::EmitMegamorphicInstanceCall( |
intptr_t argument_count, |
intptr_t deopt_id, |
intptr_t token_pos, |
- LocationSummary* locs) { |
+ LocationSummary* locs, |
+ intptr_t try_index) { |
const String& name = String::Handle(zone(), ic_data.target_name()); |
const Array& arguments_descriptor = |
Array::ZoneHandle(zone(), ic_data.arguments_descriptor()); |
@@ -1266,13 +1279,26 @@ void FlowGraphCompiler::EmitMegamorphicInstanceCall( |
} |
__ blr(R1); |
- AddCurrentDescriptor(RawPcDescriptors::kOther, |
- Thread::kNoDeoptId, token_pos); |
RecordSafepoint(locs); |
const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); |
- if (is_optimizing()) { |
+ if (Compiler::always_optimize()) { |
+ // Megamorphic calls may occur in slow path stubs. |
+ // If valid use try_index argument. |
+ if (try_index == CatchClauseNode::kInvalidTryIndex) { |
+ try_index = CurrentTryIndex(); |
+ } |
+ pc_descriptors_list()->AddDescriptor(RawPcDescriptors::kOther, |
+ assembler()->CodeSize(), |
+ Thread::kNoDeoptId, |
+ token_pos, |
+ try_index); |
+ } else if (is_optimizing()) { |
+ AddCurrentDescriptor(RawPcDescriptors::kOther, |
+ Thread::kNoDeoptId, token_pos); |
AddDeoptIndexAtCall(deopt_id_after, token_pos); |
} else { |
+ AddCurrentDescriptor(RawPcDescriptors::kOther, |
+ Thread::kNoDeoptId, token_pos); |
// Add deoptimization continuation point after the call and before the |
// arguments are removed. |
AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); |