Index: runtime/vm/stub_code_mips.cc |
diff --git a/runtime/vm/stub_code_mips.cc b/runtime/vm/stub_code_mips.cc |
index be1f46625813c45415b54337ed95728b23a08b7b..de6db702434fe4f22652ae9c35b658c28563908e 100644 |
--- a/runtime/vm/stub_code_mips.cc |
+++ b/runtime/vm/stub_code_mips.cc |
@@ -547,27 +547,29 @@ static void GenerateDeoptimizationSequence(Assembler* assembler, |
__ Ret(); |
} |
+// RA: return address + call-instruction-size |
// V0: result, must be preserved |
void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { |
+ // Correct return address to point just after the call that is being |
+ // deoptimized. |
+ __ AddImmediate(RA, -CallPattern::kDeoptCallLengthInBytes); |
// Push zap value instead of CODE_REG for lazy deopt. |
__ LoadImmediate(TMP, 0xf1f1f1f1); |
__ Push(TMP); |
- // Return address for "call" to deopt stub. |
- __ LoadImmediate(RA, 0xe1e1e1e1); |
- __ lw(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); |
GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); |
} |
+// RA: return address + call-instruction-size |
// V0: exception, must be preserved |
// V1: stacktrace, must be preserved |
void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { |
+ // Correct return address to point just after the call that is being |
+ // deoptimized. |
+ __ AddImmediate(RA, -CallPattern::kDeoptCallLengthInBytes); |
// Push zap value instead of CODE_REG for lazy deopt. |
__ LoadImmediate(TMP, 0xf1f1f1f1); |
__ Push(TMP); |
- // Return address for "call" to deopt stub. |
- __ LoadImmediate(RA, 0xe1e1e1e1); |
- __ lw(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); |
GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); |
} |