Index: runtime/vm/stub_code_arm64.cc |
diff --git a/runtime/vm/stub_code_arm64.cc b/runtime/vm/stub_code_arm64.cc |
index 1eebe2586cade37075a41b50899a951cb5f27633..fdcf3a70eb86eefbe0b055f5fdd83e3bc23e03e6 100644 |
--- a/runtime/vm/stub_code_arm64.cc |
+++ b/runtime/vm/stub_code_arm64.cc |
@@ -557,29 +557,27 @@ static void GenerateDeoptimizationSequence(Assembler* assembler, |
} |
-// LR: return address + call-instruction-size |
// R0: result, must be preserved |
void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { |
- // Correct return address to point just after the call that is being |
- // deoptimized. |
- __ AddImmediate(LR, LR, -CallPattern::kDeoptCallLengthInBytes); |
// Push zap value instead of CODE_REG for lazy deopt. |
__ LoadImmediate(TMP, 0xf1f1f1f1); |
__ Push(TMP); |
+ // Return address for "call" to deopt stub. |
+ __ LoadImmediate(LR, 0xe1e1e1e1); |
+ __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); |
GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); |
} |
-// LR: return address + call-instruction-size |
// R0: exception, must be preserved |
// R1: stacktrace, must be preserved |
void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { |
- // Correct return address to point just after the call that is being |
- // deoptimized. |
- __ AddImmediate(LR, LR, -CallPattern::kDeoptCallLengthInBytes); |
// Push zap value instead of CODE_REG for lazy deopt. |
__ LoadImmediate(TMP, 0xf1f1f1f1); |
__ Push(TMP); |
+ // Return address for "call" to deopt stub. |
+ __ LoadImmediate(LR, 0xe1e1e1e1); |
+ __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); |
GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); |
} |