Index: runtime/vm/intermediate_language_mips.cc |
diff --git a/runtime/vm/intermediate_language_mips.cc b/runtime/vm/intermediate_language_mips.cc |
index f583fa1e13b2ab3c4a0bf2aa4a2b55abedded3e0..a47e57279298c9d32c60adcb2a2345bedb564b57 100644 |
--- a/runtime/vm/intermediate_language_mips.cc |
+++ b/runtime/vm/intermediate_language_mips.cc |
@@ -278,7 +278,7 @@ void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
// deoptimization point in optimized code, after call. |
const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id()); |
if (compiler->is_optimizing()) { |
- compiler->AddDeoptIndexAtCall(deopt_id_after, token_pos()); |
+ compiler->AddDeoptIndexAtCall(deopt_id_after); |
} |
// Add deoptimization continuation point after the call and before the |
// arguments are removed. |
@@ -2673,14 +2673,19 @@ void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
compiler->assembler()->CodeSize(), |
catch_handler_types_, |
needs_stacktrace()); |
- // Restore pool pointer. |
- __ RestoreCodePointer(); |
- __ LoadPoolPointer(); |
- |
+ // On lazy deoptimization we patch the optimized code here to enter the |
+ // deoptimization stub. |
+ const intptr_t deopt_id = Thread::ToDeoptAfter(GetDeoptId()); |
+ if (compiler->is_optimizing()) { |
+ compiler->AddDeoptIndexAtCall(deopt_id); |
+ } else { |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, |
+ deopt_id, |
+ TokenPosition::kNoSource); |
+ } |
if (HasParallelMove()) { |
compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
} |
- |
// Restore SP from FP as we are coming from a throw and the code for |
// popping arguments has not been run. |
const intptr_t fp_sp_dist = |