Index: runtime/vm/stub_code_x64.cc |
diff --git a/runtime/vm/stub_code_x64.cc b/runtime/vm/stub_code_x64.cc |
index ca1ce140f0d3e36a7329812a030190298c368bc0..6ec5ba1e7f37ba76a88702b1107cadf6d3094c0a 100644 |
--- a/runtime/vm/stub_code_x64.cc |
+++ b/runtime/vm/stub_code_x64.cc |
@@ -497,25 +497,31 @@ static void GenerateDeoptimizationSequence(Assembler* assembler, |
} |
+// TOS: return address + call-instruction-size (5 bytes). |
// RAX: result, must be preserved |
void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { |
+ // Correct return address to point just after the call that is being |
+ // deoptimized. |
+ __ popq(RBX); |
+ __ subq(RBX, Immediate(ShortCallPattern::pattern_length_in_bytes())); |
// Push zap value instead of CODE_REG for lazy deopt. |
__ pushq(Immediate(0xf1f1f1f1)); |
- // Return address for "call" to deopt stub. |
- __ pushq(Immediate(0xe1e1e1e1)); |
- __ movq(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); |
+ __ pushq(RBX); |
GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); |
} |
+// TOS: return address + call-instruction-size (5 bytes). |
// RAX: exception, must be preserved |
// RDX: stacktrace, must be preserved |
void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { |
+ // Correct return address to point just after the call that is being |
+ // deoptimized. |
+ __ popq(RBX); |
+ __ subq(RBX, Immediate(ShortCallPattern::pattern_length_in_bytes())); |
// Push zap value instead of CODE_REG for lazy deopt. |
__ pushq(Immediate(0xf1f1f1f1)); |
- // Return address for "call" to deopt stub. |
- __ pushq(Immediate(0xe1e1e1e1)); |
- __ movq(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); |
+ __ pushq(RBX); |
GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); |
} |