Index: runtime/vm/code_patcher_arm64.cc |
diff --git a/runtime/vm/code_patcher_arm64.cc b/runtime/vm/code_patcher_arm64.cc |
index 7b0afef646b834eb8ff43dd34959aae7b74fb7bf..3e10b5a35512076d78095afef2f5abf68c05c427 100644 |
--- a/runtime/vm/code_patcher_arm64.cc |
+++ b/runtime/vm/code_patcher_arm64.cc |
@@ -118,6 +118,27 @@ RawFunction* CodePatcher::GetUnoptimizedStaticCallAt( |
} |
+void CodePatcher::PatchNativeCallAt(uword return_address, |
+ const Code& code, |
+ NativeFunction target, |
+ const Code& trampoline) { |
+ ASSERT(code.ContainsInstructionAt(return_address)); |
+ NativeCallPattern call(return_address, code); |
+ call.set_target(trampoline.EntryPoint()); |
+ call.set_native_function(target); |
+} |
+ |
+ |
+uword CodePatcher::GetNativeCallAt(uword return_address, |
+ const Code& code, |
+ NativeFunction* target) { |
+ ASSERT(code.ContainsInstructionAt(return_address)); |
+ NativeCallPattern call(return_address, code); |
+ *target = call.native_function(); |
+ return call.target(); |
+} |
+ |
+ |
// This class pattern matches on a load from the object pool. Loading on |
// ARM64 is complicated because it can take more than one form. We |
// match backwards from the end of the sequence so we can reuse the code for |