Index: runtime/vm/intermediate_language_arm64.cc |
diff --git a/runtime/vm/intermediate_language_arm64.cc b/runtime/vm/intermediate_language_arm64.cc |
index b5f235f79e8a7e3cca70bdf7e4acd2585782a2d0..4c4224d192b42ba76180d483eb173f7093a27648 100644 |
--- a/runtime/vm/intermediate_language_arm64.cc |
+++ b/runtime/vm/intermediate_language_arm64.cc |
@@ -229,7 +229,7 @@ void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
// deoptimization point in optimized code, after call. |
const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id()); |
if (compiler->is_optimizing()) { |
- compiler->AddDeoptIndexAtCall(deopt_id_after, token_pos()); |
+ compiler->AddDeoptIndexAtCall(deopt_id_after); |
} |
// Add deoptimization continuation point after the call and before the |
// arguments are removed. |
@@ -2559,11 +2559,16 @@ void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
compiler->assembler()->CodeSize(), |
catch_handler_types_, |
needs_stacktrace()); |
- |
- // Restore the pool pointer. |
- __ RestoreCodePointer(); |
- __ LoadPoolPointer(); |
- |
+ // On lazy deoptimization we patch the optimized code here to enter the |
+ // deoptimization stub. |
+ const intptr_t deopt_id = Thread::ToDeoptAfter(GetDeoptId()); |
+ if (compiler->is_optimizing()) { |
+ compiler->AddDeoptIndexAtCall(deopt_id); |
+ } else { |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, |
+ deopt_id, |
+ TokenPosition::kNoSource); |
+ } |
if (HasParallelMove()) { |
compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
} |