| Index: src/third_party/valgrind/valgrind.h
|
| ===================================================================
|
| --- src/third_party/valgrind/valgrind.h (revision 9666)
|
| +++ src/third_party/valgrind/valgrind.h (working copy)
|
| @@ -12,7 +12,7 @@
|
| This file is part of Valgrind, a dynamic binary instrumentation
|
| framework.
|
|
|
| - Copyright (C) 2000-2007 Julian Seward. All rights reserved.
|
| + Copyright (C) 2000-2010 Julian Seward. All rights reserved.
|
|
|
| Redistribution and use in source and binary forms, with or without
|
| modification, are permitted provided that the following conditions
|
| @@ -73,6 +73,25 @@
|
| #ifndef __VALGRIND_H
|
| #define __VALGRIND_H
|
|
|
| +
|
| +/* ------------------------------------------------------------------ */
|
| +/* VERSION NUMBER OF VALGRIND */
|
| +/* ------------------------------------------------------------------ */
|
| +
|
| +/* Specify Valgrind's version number, so that user code can
|
| + conditionally compile based on our version number. Note that these
|
| + were introduced at version 3.6 and so do not exist in version 3.5
|
| + or earlier. The recommended way to use them to check for "version
|
| + X.Y or later" is (eg)
|
| +
|
| +#if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
|
| + && (__VALGRIND_MAJOR__ > 3 \
|
| + || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
|
| +*/
|
| +#define __VALGRIND_MAJOR__ 3
|
| +#define __VALGRIND_MINOR__ 6
|
| +
|
| +
|
| #include <stdarg.h>
|
| #include <stdint.h>
|
|
|
| @@ -85,34 +104,44 @@
|
| identifying architectures, which are different to the ones we use
|
| within the rest of Valgrind. Note, __powerpc__ is active for both
|
| 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
|
| - latter (on Linux, that is). */
|
| + latter (on Linux, that is).
|
| +
|
| + Misc note: how to find out what's predefined in gcc by default:
|
| + gcc -Wp,-dM somefile.c
|
| +*/
|
| +#undef PLAT_x86_darwin
|
| +#undef PLAT_amd64_darwin
|
| +#undef PLAT_x86_win32
|
| #undef PLAT_x86_linux
|
| #undef PLAT_amd64_linux
|
| #undef PLAT_ppc32_linux
|
| #undef PLAT_ppc64_linux
|
| -#undef PLAT_ppc32_aix5
|
| -#undef PLAT_ppc64_aix5
|
| +#undef PLAT_arm_linux
|
| +#undef PLAT_s390x_linux
|
|
|
| -#if !defined(_AIX) && defined(__i386__)
|
| +
|
| +#if defined(__APPLE__) && defined(__i386__)
|
| +# define PLAT_x86_darwin 1
|
| +#elif defined(__APPLE__) && defined(__x86_64__)
|
| +# define PLAT_amd64_darwin 1
|
| +#elif defined(__MINGW32__) || defined(__CYGWIN32__) \
|
| + || (defined(_WIN32) && defined(_M_IX86))
|
| +# define PLAT_x86_win32 1
|
| +#elif defined(__linux__) && defined(__i386__)
|
| # define PLAT_x86_linux 1
|
| -#elif !defined(_AIX) && defined(__x86_64__)
|
| +#elif defined(__linux__) && defined(__x86_64__)
|
| # define PLAT_amd64_linux 1
|
| -#elif !defined(_AIX) && defined(__powerpc__) && !defined(__powerpc64__)
|
| +#elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
|
| # define PLAT_ppc32_linux 1
|
| -#elif !defined(_AIX) && defined(__powerpc__) && defined(__powerpc64__)
|
| +#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__)
|
| # define PLAT_ppc64_linux 1
|
| -#elif defined(_AIX) && defined(__64BIT__)
|
| -# define PLAT_ppc64_aix5 1
|
| -#elif defined(_AIX) && !defined(__64BIT__)
|
| -# define PLAT_ppc32_aix5 1
|
| -#endif
|
| -
|
| -
|
| +#elif defined(__linux__) && defined(__arm__)
|
| +# define PLAT_arm_linux 1
|
| +#elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
|
| +# define PLAT_s390x_linux 1
|
| +#else
|
| /* If we're not compiling for our target platform, don't generate
|
| any inline asms. */
|
| -#if !defined(PLAT_x86_linux) && !defined(PLAT_amd64_linux) \
|
| - && !defined(PLAT_ppc32_linux) && !defined(PLAT_ppc64_linux) \
|
| - && !defined(PLAT_ppc32_aix5) && !defined(PLAT_ppc64_aix5)
|
| # if !defined(NVALGRIND)
|
| # define NVALGRIND 1
|
| # endif
|
| @@ -124,17 +153,31 @@
|
| /* in here of use to end-users -- skip to the next section. */
|
| /* ------------------------------------------------------------------ */
|
|
|
| +/*
|
| + * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
|
| + * request. Accepts both pointers and integers as arguments.
|
| + *
|
| + * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
|
| + * client request and whose value equals the client request result. Accepts
|
| + * both pointers and integers as arguments.
|
| + */
|
| +
|
| +#define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
|
| + _zzq_request, _zzq_arg1, _zzq_arg2, \
|
| + _zzq_arg3, _zzq_arg4, _zzq_arg5) \
|
| + { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
|
| + (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
|
| + (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); }
|
| +
|
| #if defined(NVALGRIND)
|
|
|
| /* Define NVALGRIND to completely remove the Valgrind magic sequence
|
| from the compiled code (analogous to NDEBUG's effects on
|
| assert()) */
|
| -#define VALGRIND_DO_CLIENT_REQUEST( \
|
| - _zzq_rlval, _zzq_default, _zzq_request, \
|
| +#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
|
| + _zzq_default, _zzq_request, \
|
| _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
|
| - { \
|
| - (_zzq_rlval) = (_zzq_default); \
|
| - }
|
| + (_zzq_default)
|
|
|
| #else /* ! NVALGRIND */
|
|
|
| @@ -173,9 +216,10 @@
|
| inline asm stuff to be useful.
|
| */
|
|
|
| -/* ------------------------- x86-linux ------------------------- */
|
| +/* ------------------------- x86-{linux,darwin} ---------------- */
|
|
|
| -#if defined(PLAT_x86_linux)
|
| +#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
|
| + || (defined(PLAT_x86_win32) && defined(__GNUC__))
|
|
|
| typedef
|
| struct {
|
| @@ -187,10 +231,11 @@
|
| "roll $3, %%edi ; roll $13, %%edi\n\t" \
|
| "roll $29, %%edi ; roll $19, %%edi\n\t"
|
|
|
| -#define VALGRIND_DO_CLIENT_REQUEST( \
|
| - _zzq_rlval, _zzq_default, _zzq_request, \
|
| +#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
|
| + _zzq_default, _zzq_request, \
|
| _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
|
| - { volatile unsigned int _zzq_args[6]; \
|
| + __extension__ \
|
| + ({volatile unsigned int _zzq_args[6]; \
|
| volatile unsigned int _zzq_result; \
|
| _zzq_args[0] = (unsigned int)(_zzq_request); \
|
| _zzq_args[1] = (unsigned int)(_zzq_arg1); \
|
| @@ -205,8 +250,8 @@
|
| : "a" (&_zzq_args[0]), "0" (_zzq_default) \
|
| : "cc", "memory" \
|
| ); \
|
| - _zzq_rlval = _zzq_result; \
|
| - }
|
| + _zzq_result; \
|
| + })
|
|
|
| #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
|
| { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
|
| @@ -225,14 +270,80 @@
|
| __SPECIAL_INSTRUCTION_PREAMBLE \
|
| /* call-noredir *%EAX */ \
|
| "xchgl %%edx,%%edx\n\t"
|
| -#endif /* PLAT_x86_linux */
|
| +#endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
|
|
|
| -/* ------------------------ amd64-linux ------------------------ */
|
| +/* ------------------------- x86-Win32 ------------------------- */
|
|
|
| -#if defined(PLAT_amd64_linux)
|
| +#if defined(PLAT_x86_win32) && !defined(__GNUC__)
|
|
|
| typedef
|
| struct {
|
| + unsigned int nraddr; /* where's the code? */
|
| + }
|
| + OrigFn;
|
| +
|
| +#if defined(_MSC_VER)
|
| +
|
| +#define __SPECIAL_INSTRUCTION_PREAMBLE \
|
| + __asm rol edi, 3 __asm rol edi, 13 \
|
| + __asm rol edi, 29 __asm rol edi, 19
|
| +
|
| +#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
|
| + _zzq_default, _zzq_request, \
|
| + _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
|
| + valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
|
| + (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
|
| + (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
|
| + (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
|
| +
|
| +static __inline uintptr_t
|
| +valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
|
| + uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
|
| + uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
|
| + uintptr_t _zzq_arg5)
|
| +{
|
| + volatile uintptr_t _zzq_args[6];
|
| + volatile unsigned int _zzq_result;
|
| + _zzq_args[0] = (uintptr_t)(_zzq_request);
|
| + _zzq_args[1] = (uintptr_t)(_zzq_arg1);
|
| + _zzq_args[2] = (uintptr_t)(_zzq_arg2);
|
| + _zzq_args[3] = (uintptr_t)(_zzq_arg3);
|
| + _zzq_args[4] = (uintptr_t)(_zzq_arg4);
|
| + _zzq_args[5] = (uintptr_t)(_zzq_arg5);
|
| + __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
|
| + __SPECIAL_INSTRUCTION_PREAMBLE
|
| + /* %EDX = client_request ( %EAX ) */
|
| + __asm xchg ebx,ebx
|
| + __asm mov _zzq_result, edx
|
| + }
|
| + return _zzq_result;
|
| +}
|
| +
|
| +#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
|
| + { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
|
| + volatile unsigned int __addr; \
|
| + __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
|
| + /* %EAX = guest_NRADDR */ \
|
| + __asm xchg ecx,ecx \
|
| + __asm mov __addr, eax \
|
| + } \
|
| + _zzq_orig->nraddr = __addr; \
|
| + }
|
| +
|
| +#define VALGRIND_CALL_NOREDIR_EAX ERROR
|
| +
|
| +#else
|
| +#error Unsupported compiler.
|
| +#endif
|
| +
|
| +#endif /* PLAT_x86_win32 */
|
| +
|
| +/* ------------------------ amd64-{linux,darwin} --------------- */
|
| +
|
| +#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
|
| +
|
| +typedef
|
| + struct {
|
| uint64_t nraddr; /* where's the code? */
|
| }
|
| OrigFn;
|
| @@ -241,10 +352,11 @@
|
| "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
|
| "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
|
|
|
| -#define VALGRIND_DO_CLIENT_REQUEST( \
|
| - _zzq_rlval, _zzq_default, _zzq_request, \
|
| +#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
|
| + _zzq_default, _zzq_request, \
|
| _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
|
| - { volatile uint64_t _zzq_args[6]; \
|
| + __extension__ \
|
| + ({ volatile uint64_t _zzq_args[6]; \
|
| volatile uint64_t _zzq_result; \
|
| _zzq_args[0] = (uint64_t)(_zzq_request); \
|
| _zzq_args[1] = (uint64_t)(_zzq_arg1); \
|
| @@ -259,8 +371,8 @@
|
| : "a" (&_zzq_args[0]), "0" (_zzq_default) \
|
| : "cc", "memory" \
|
| ); \
|
| - _zzq_rlval = _zzq_result; \
|
| - }
|
| + _zzq_result; \
|
| + })
|
|
|
| #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
|
| { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
|
| @@ -279,7 +391,7 @@
|
| __SPECIAL_INSTRUCTION_PREAMBLE \
|
| /* call-noredir *%RAX */ \
|
| "xchgq %%rdx,%%rdx\n\t"
|
| -#endif /* PLAT_amd64_linux */
|
| +#endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
|
|
|
| /* ------------------------ ppc32-linux ------------------------ */
|
|
|
| @@ -295,11 +407,12 @@
|
| "rlwinm 0,0,3,0,0 ; rlwinm 0,0,13,0,0\n\t" \
|
| "rlwinm 0,0,29,0,0 ; rlwinm 0,0,19,0,0\n\t"
|
|
|
| -#define VALGRIND_DO_CLIENT_REQUEST( \
|
| - _zzq_rlval, _zzq_default, _zzq_request, \
|
| +#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
|
| + _zzq_default, _zzq_request, \
|
| _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
|
| \
|
| - { unsigned int _zzq_args[6]; \
|
| + __extension__ \
|
| + ({ unsigned int _zzq_args[6]; \
|
| unsigned int _zzq_result; \
|
| unsigned int* _zzq_ptr; \
|
| _zzq_args[0] = (unsigned int)(_zzq_request); \
|
| @@ -318,8 +431,8 @@
|
| : "=b" (_zzq_result) \
|
| : "b" (_zzq_default), "b" (_zzq_ptr) \
|
| : "cc", "memory", "r3", "r4"); \
|
| - _zzq_rlval = _zzq_result; \
|
| - }
|
| + _zzq_result; \
|
| + })
|
|
|
| #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
|
| { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
|
| @@ -356,11 +469,12 @@
|
| "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
|
| "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
|
|
|
| -#define VALGRIND_DO_CLIENT_REQUEST( \
|
| - _zzq_rlval, _zzq_default, _zzq_request, \
|
| +#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
|
| + _zzq_default, _zzq_request, \
|
| _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
|
| \
|
| - { uint64_t _zzq_args[6]; \
|
| + __extension__ \
|
| + ({ uint64_t _zzq_args[6]; \
|
| register uint64_t _zzq_result __asm__("r3"); \
|
| register uint64_t* _zzq_ptr __asm__("r4"); \
|
| _zzq_args[0] = (uint64_t)(_zzq_request); \
|
| @@ -376,8 +490,8 @@
|
| : "=r" (_zzq_result) \
|
| : "0" (_zzq_default), "r" (_zzq_ptr) \
|
| : "cc", "memory"); \
|
| - _zzq_rlval = _zzq_result; \
|
| - }
|
| + _zzq_result; \
|
| + })
|
|
|
| #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
|
| { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
|
| @@ -407,150 +521,136 @@
|
|
|
| #endif /* PLAT_ppc64_linux */
|
|
|
| -/* ------------------------ ppc32-aix5 ------------------------- */
|
| +/* ------------------------- arm-linux ------------------------- */
|
|
|
| -#if defined(PLAT_ppc32_aix5)
|
| +#if defined(PLAT_arm_linux)
|
|
|
| typedef
|
| struct {
|
| unsigned int nraddr; /* where's the code? */
|
| - unsigned int r2; /* what tocptr do we need? */
|
| }
|
| OrigFn;
|
|
|
| #define __SPECIAL_INSTRUCTION_PREAMBLE \
|
| - "rlwinm 0,0,3,0,0 ; rlwinm 0,0,13,0,0\n\t" \
|
| - "rlwinm 0,0,29,0,0 ; rlwinm 0,0,19,0,0\n\t"
|
| + "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
|
| + "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
|
|
|
| -#define VALGRIND_DO_CLIENT_REQUEST( \
|
| - _zzq_rlval, _zzq_default, _zzq_request, \
|
| +#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
|
| + _zzq_default, _zzq_request, \
|
| _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
|
| \
|
| - { unsigned int _zzq_args[7]; \
|
| - register unsigned int _zzq_result; \
|
| - register unsigned int* _zzq_ptr; \
|
| + __extension__ \
|
| + ({volatile unsigned int _zzq_args[6]; \
|
| + volatile unsigned int _zzq_result; \
|
| _zzq_args[0] = (unsigned int)(_zzq_request); \
|
| _zzq_args[1] = (unsigned int)(_zzq_arg1); \
|
| _zzq_args[2] = (unsigned int)(_zzq_arg2); \
|
| _zzq_args[3] = (unsigned int)(_zzq_arg3); \
|
| _zzq_args[4] = (unsigned int)(_zzq_arg4); \
|
| _zzq_args[5] = (unsigned int)(_zzq_arg5); \
|
| - _zzq_args[6] = (unsigned int)(_zzq_default); \
|
| - _zzq_ptr = _zzq_args; \
|
| - __asm__ volatile("mr 4,%1\n\t" \
|
| - "lwz 3, 24(4)\n\t" \
|
| + __asm__ volatile("mov r3, %1\n\t" /*default*/ \
|
| + "mov r4, %2\n\t" /*ptr*/ \
|
| __SPECIAL_INSTRUCTION_PREAMBLE \
|
| - /* %R3 = client_request ( %R4 ) */ \
|
| - "or 1,1,1\n\t" \
|
| - "mr %0,3" \
|
| - : "=b" (_zzq_result) \
|
| - : "b" (_zzq_ptr) \
|
| - : "r3", "r4", "cc", "memory"); \
|
| - _zzq_rlval = _zzq_result; \
|
| - }
|
| + /* R3 = client_request ( R4 ) */ \
|
| + "orr r10, r10, r10\n\t" \
|
| + "mov %0, r3" /*result*/ \
|
| + : "=r" (_zzq_result) \
|
| + : "r" (_zzq_default), "r" (&_zzq_args[0]) \
|
| + : "cc","memory", "r3", "r4"); \
|
| + _zzq_result; \
|
| + })
|
|
|
| #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
|
| { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
|
| - register unsigned int __addr; \
|
| + unsigned int __addr; \
|
| __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
|
| - /* %R3 = guest_NRADDR */ \
|
| - "or 2,2,2\n\t" \
|
| - "mr %0,3" \
|
| - : "=b" (__addr) \
|
| + /* R3 = guest_NRADDR */ \
|
| + "orr r11, r11, r11\n\t" \
|
| + "mov %0, r3" \
|
| + : "=r" (__addr) \
|
| : \
|
| - : "r3", "cc", "memory" \
|
| + : "cc", "memory", "r3" \
|
| ); \
|
| _zzq_orig->nraddr = __addr; \
|
| - __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
|
| - /* %R3 = guest_NRADDR_GPR2 */ \
|
| - "or 4,4,4\n\t" \
|
| - "mr %0,3" \
|
| - : "=b" (__addr) \
|
| - : \
|
| - : "r3", "cc", "memory" \
|
| - ); \
|
| - _zzq_orig->r2 = __addr; \
|
| }
|
|
|
| -#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| +#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
|
| __SPECIAL_INSTRUCTION_PREAMBLE \
|
| - /* branch-and-link-to-noredir *%R11 */ \
|
| - "or 3,3,3\n\t"
|
| + /* branch-and-link-to-noredir *%R4 */ \
|
| + "orr r12, r12, r12\n\t"
|
|
|
| -#endif /* PLAT_ppc32_aix5 */
|
| +#endif /* PLAT_arm_linux */
|
|
|
| -/* ------------------------ ppc64-aix5 ------------------------- */
|
| +/* ------------------------ s390x-linux ------------------------ */
|
|
|
| -#if defined(PLAT_ppc64_aix5)
|
| +#if defined(PLAT_s390x_linux)
|
|
|
| typedef
|
| - struct {
|
| - uint64_t nraddr; /* where's the code? */
|
| - uint64_t r2; /* what tocptr do we need? */
|
| - }
|
| - OrigFn;
|
| + struct {
|
| + uint64_t nraddr; /* where's the code? */
|
| + }
|
| + OrigFn;
|
|
|
| -#define __SPECIAL_INSTRUCTION_PREAMBLE \
|
| - "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
|
| - "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
|
| +/* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
|
| + * code. This detection is implemented in platform specific toIR.c
|
| + * (e.g. VEX/priv/guest_s390_decoder.c).
|
| + */
|
| +#define __SPECIAL_INSTRUCTION_PREAMBLE \
|
| + "lr 15,15\n\t" \
|
| + "lr 1,1\n\t" \
|
| + "lr 2,2\n\t" \
|
| + "lr 3,3\n\t"
|
|
|
| -#define VALGRIND_DO_CLIENT_REQUEST( \
|
| - _zzq_rlval, _zzq_default, _zzq_request, \
|
| - _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
|
| - \
|
| - { uint64_t _zzq_args[7]; \
|
| - register uint64_t _zzq_result; \
|
| - register uint64_t* _zzq_ptr; \
|
| - _zzq_args[0] = (unsigned int long long)(_zzq_request); \
|
| - _zzq_args[1] = (unsigned int long long)(_zzq_arg1); \
|
| - _zzq_args[2] = (unsigned int long long)(_zzq_arg2); \
|
| - _zzq_args[3] = (unsigned int long long)(_zzq_arg3); \
|
| - _zzq_args[4] = (unsigned int long long)(_zzq_arg4); \
|
| - _zzq_args[5] = (unsigned int long long)(_zzq_arg5); \
|
| - _zzq_args[6] = (unsigned int long long)(_zzq_default); \
|
| - _zzq_ptr = _zzq_args; \
|
| - __asm__ volatile("mr 4,%1\n\t" \
|
| - "ld 3, 48(4)\n\t" \
|
| - __SPECIAL_INSTRUCTION_PREAMBLE \
|
| - /* %R3 = client_request ( %R4 ) */ \
|
| - "or 1,1,1\n\t" \
|
| - "mr %0,3" \
|
| - : "=b" (_zzq_result) \
|
| - : "b" (_zzq_ptr) \
|
| - : "r3", "r4", "cc", "memory"); \
|
| - _zzq_rlval = _zzq_result; \
|
| - }
|
| +#define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
|
| +#define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
|
| +#define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
|
|
|
| -#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
|
| - { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
|
| - register uint64_t __addr; \
|
| - __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
|
| - /* %R3 = guest_NRADDR */ \
|
| - "or 2,2,2\n\t" \
|
| - "mr %0,3" \
|
| - : "=b" (__addr) \
|
| - : \
|
| - : "r3", "cc", "memory" \
|
| - ); \
|
| - _zzq_orig->nraddr = __addr; \
|
| - __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
|
| - /* %R3 = guest_NRADDR_GPR2 */ \
|
| - "or 4,4,4\n\t" \
|
| - "mr %0,3" \
|
| - : "=b" (__addr) \
|
| - : \
|
| - : "r3", "cc", "memory" \
|
| - ); \
|
| - _zzq_orig->r2 = __addr; \
|
| - }
|
| +#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
|
| + _zzq_default, _zzq_request, \
|
| + _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
|
| + __extension__ \
|
| + ({volatile uint64_t _zzq_args[6]; \
|
| + volatile uint64_t _zzq_result; \
|
| + _zzq_args[0] = (uint64_t)(_zzq_request); \
|
| + _zzq_args[1] = (uint64_t)(_zzq_arg1); \
|
| + _zzq_args[2] = (uint64_t)(_zzq_arg2); \
|
| + _zzq_args[3] = (uint64_t)(_zzq_arg3); \
|
| + _zzq_args[4] = (uint64_t)(_zzq_arg4); \
|
| + _zzq_args[5] = (uint64_t)(_zzq_arg5); \
|
| + __asm__ volatile(/* r2 = args */ \
|
| + "lgr 2,%1\n\t" \
|
| + /* r3 = default */ \
|
| + "lgr 3,%2\n\t" \
|
| + __SPECIAL_INSTRUCTION_PREAMBLE \
|
| + __CLIENT_REQUEST_CODE \
|
| + /* results = r3 */ \
|
| + "lgr %0, 3\n\t" \
|
| + : "=d" (_zzq_result) \
|
| + : "a" (&_zzq_args[0]), "0" (_zzq_default) \
|
| + : "cc", "2", "3", "memory" \
|
| + ); \
|
| + _zzq_result; \
|
| + })
|
|
|
| -#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - __SPECIAL_INSTRUCTION_PREAMBLE \
|
| - /* branch-and-link-to-noredir *%R11 */ \
|
| - "or 3,3,3\n\t"
|
| +#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
|
| + { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
|
| + volatile uint64_t __addr; \
|
| + __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
|
| + __GET_NR_CONTEXT_CODE \
|
| + "lgr %0, 3\n\t" \
|
| + : "=a" (__addr) \
|
| + : \
|
| + : "cc", "3", "memory" \
|
| + ); \
|
| + _zzq_orig->nraddr = __addr; \
|
| + }
|
|
|
| -#endif /* PLAT_ppc64_aix5 */
|
| +#define VALGRIND_CALL_NOREDIR_R1 \
|
| + __SPECIAL_INSTRUCTION_PREAMBLE \
|
| + __CALL_NO_REDIR_CODE
|
|
|
| +#endif /* PLAT_s390x_linux */
|
| +
|
| /* Insert assembly code for other platforms here... */
|
|
|
| #endif /* NVALGRIND */
|
| @@ -582,11 +682,15 @@
|
| /* Use these to write the name of your wrapper. NOTE: duplicates
|
| VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. */
|
|
|
| +/* Use an extra level of macroisation so as to ensure the soname/fnname
|
| + args are fully macro-expanded before pasting them together. */
|
| +#define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
|
| +
|
| #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
|
| - _vgwZU_##soname##_##fnname
|
| + VG_CONCAT4(_vgwZU_,soname,_,fnname)
|
|
|
| #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
|
| - _vgwZZ_##soname##_##fnname
|
| + VG_CONCAT4(_vgwZZ_,soname,_,fnname)
|
|
|
| /* Use this macro from within a wrapper function to collect the
|
| context (address and possibly other info) of the original function.
|
| @@ -613,10 +717,26 @@
|
| do { volatile unsigned long _junk; \
|
| CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
|
|
|
| -/* ------------------------- x86-linux ------------------------- */
|
| +#define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
|
| + do { volatile unsigned long _junk; \
|
| + CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
|
|
|
| -#if defined(PLAT_x86_linux)
|
| +#define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
|
| + do { volatile unsigned long _junk; \
|
| + CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
|
|
|
| +#define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
|
| + do { volatile unsigned long _junk; \
|
| + CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
|
| +
|
| +#define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
|
| + do { volatile unsigned long _junk; \
|
| + CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
|
| +
|
| +/* ------------------------- x86-{linux,darwin} ---------------- */
|
| +
|
| +#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
|
| +
|
| /* These regs are trashed by the hidden call. No need to mention eax
|
| as gcc can already see that, plus causes gcc to bomb. */
|
| #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
|
| @@ -648,10 +768,11 @@
|
| _argvec[0] = (unsigned long)_orig.nraddr; \
|
| _argvec[1] = (unsigned long)(arg1); \
|
| __asm__ volatile( \
|
| + "subl $12, %%esp\n\t" \
|
| "pushl 4(%%eax)\n\t" \
|
| "movl (%%eax), %%eax\n\t" /* target->%eax */ \
|
| VALGRIND_CALL_NOREDIR_EAX \
|
| - "addl $4, %%esp\n" \
|
| + "addl $16, %%esp\n" \
|
| : /*out*/ "=a" (_res) \
|
| : /*in*/ "a" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| @@ -668,11 +789,12 @@
|
| _argvec[1] = (unsigned long)(arg1); \
|
| _argvec[2] = (unsigned long)(arg2); \
|
| __asm__ volatile( \
|
| + "subl $8, %%esp\n\t" \
|
| "pushl 8(%%eax)\n\t" \
|
| "pushl 4(%%eax)\n\t" \
|
| "movl (%%eax), %%eax\n\t" /* target->%eax */ \
|
| VALGRIND_CALL_NOREDIR_EAX \
|
| - "addl $8, %%esp\n" \
|
| + "addl $16, %%esp\n" \
|
| : /*out*/ "=a" (_res) \
|
| : /*in*/ "a" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| @@ -690,12 +812,13 @@
|
| _argvec[2] = (unsigned long)(arg2); \
|
| _argvec[3] = (unsigned long)(arg3); \
|
| __asm__ volatile( \
|
| + "subl $4, %%esp\n\t" \
|
| "pushl 12(%%eax)\n\t" \
|
| "pushl 8(%%eax)\n\t" \
|
| "pushl 4(%%eax)\n\t" \
|
| "movl (%%eax), %%eax\n\t" /* target->%eax */ \
|
| VALGRIND_CALL_NOREDIR_EAX \
|
| - "addl $12, %%esp\n" \
|
| + "addl $16, %%esp\n" \
|
| : /*out*/ "=a" (_res) \
|
| : /*in*/ "a" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| @@ -740,6 +863,7 @@
|
| _argvec[4] = (unsigned long)(arg4); \
|
| _argvec[5] = (unsigned long)(arg5); \
|
| __asm__ volatile( \
|
| + "subl $12, %%esp\n\t" \
|
| "pushl 20(%%eax)\n\t" \
|
| "pushl 16(%%eax)\n\t" \
|
| "pushl 12(%%eax)\n\t" \
|
| @@ -747,7 +871,7 @@
|
| "pushl 4(%%eax)\n\t" \
|
| "movl (%%eax), %%eax\n\t" /* target->%eax */ \
|
| VALGRIND_CALL_NOREDIR_EAX \
|
| - "addl $20, %%esp\n" \
|
| + "addl $32, %%esp\n" \
|
| : /*out*/ "=a" (_res) \
|
| : /*in*/ "a" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| @@ -768,6 +892,7 @@
|
| _argvec[5] = (unsigned long)(arg5); \
|
| _argvec[6] = (unsigned long)(arg6); \
|
| __asm__ volatile( \
|
| + "subl $8, %%esp\n\t" \
|
| "pushl 24(%%eax)\n\t" \
|
| "pushl 20(%%eax)\n\t" \
|
| "pushl 16(%%eax)\n\t" \
|
| @@ -776,7 +901,7 @@
|
| "pushl 4(%%eax)\n\t" \
|
| "movl (%%eax), %%eax\n\t" /* target->%eax */ \
|
| VALGRIND_CALL_NOREDIR_EAX \
|
| - "addl $24, %%esp\n" \
|
| + "addl $32, %%esp\n" \
|
| : /*out*/ "=a" (_res) \
|
| : /*in*/ "a" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| @@ -799,6 +924,7 @@
|
| _argvec[6] = (unsigned long)(arg6); \
|
| _argvec[7] = (unsigned long)(arg7); \
|
| __asm__ volatile( \
|
| + "subl $4, %%esp\n\t" \
|
| "pushl 28(%%eax)\n\t" \
|
| "pushl 24(%%eax)\n\t" \
|
| "pushl 20(%%eax)\n\t" \
|
| @@ -808,7 +934,7 @@
|
| "pushl 4(%%eax)\n\t" \
|
| "movl (%%eax), %%eax\n\t" /* target->%eax */ \
|
| VALGRIND_CALL_NOREDIR_EAX \
|
| - "addl $28, %%esp\n" \
|
| + "addl $32, %%esp\n" \
|
| : /*out*/ "=a" (_res) \
|
| : /*in*/ "a" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| @@ -867,6 +993,7 @@
|
| _argvec[8] = (unsigned long)(arg8); \
|
| _argvec[9] = (unsigned long)(arg9); \
|
| __asm__ volatile( \
|
| + "subl $12, %%esp\n\t" \
|
| "pushl 36(%%eax)\n\t" \
|
| "pushl 32(%%eax)\n\t" \
|
| "pushl 28(%%eax)\n\t" \
|
| @@ -878,7 +1005,7 @@
|
| "pushl 4(%%eax)\n\t" \
|
| "movl (%%eax), %%eax\n\t" /* target->%eax */ \
|
| VALGRIND_CALL_NOREDIR_EAX \
|
| - "addl $36, %%esp\n" \
|
| + "addl $48, %%esp\n" \
|
| : /*out*/ "=a" (_res) \
|
| : /*in*/ "a" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| @@ -904,6 +1031,7 @@
|
| _argvec[9] = (unsigned long)(arg9); \
|
| _argvec[10] = (unsigned long)(arg10); \
|
| __asm__ volatile( \
|
| + "subl $8, %%esp\n\t" \
|
| "pushl 40(%%eax)\n\t" \
|
| "pushl 36(%%eax)\n\t" \
|
| "pushl 32(%%eax)\n\t" \
|
| @@ -916,7 +1044,7 @@
|
| "pushl 4(%%eax)\n\t" \
|
| "movl (%%eax), %%eax\n\t" /* target->%eax */ \
|
| VALGRIND_CALL_NOREDIR_EAX \
|
| - "addl $40, %%esp\n" \
|
| + "addl $48, %%esp\n" \
|
| : /*out*/ "=a" (_res) \
|
| : /*in*/ "a" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| @@ -944,6 +1072,7 @@
|
| _argvec[10] = (unsigned long)(arg10); \
|
| _argvec[11] = (unsigned long)(arg11); \
|
| __asm__ volatile( \
|
| + "subl $4, %%esp\n\t" \
|
| "pushl 44(%%eax)\n\t" \
|
| "pushl 40(%%eax)\n\t" \
|
| "pushl 36(%%eax)\n\t" \
|
| @@ -957,7 +1086,7 @@
|
| "pushl 4(%%eax)\n\t" \
|
| "movl (%%eax), %%eax\n\t" /* target->%eax */ \
|
| VALGRIND_CALL_NOREDIR_EAX \
|
| - "addl $44, %%esp\n" \
|
| + "addl $48, %%esp\n" \
|
| : /*out*/ "=a" (_res) \
|
| : /*in*/ "a" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| @@ -1008,11 +1137,11 @@
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#endif /* PLAT_x86_linux */
|
| +#endif /* PLAT_x86_linux || PLAT_x86_darwin */
|
|
|
| -/* ------------------------ amd64-linux ------------------------ */
|
| +/* ------------------------ amd64-{linux,darwin} --------------- */
|
|
|
| -#if defined(PLAT_amd64_linux)
|
| +#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
|
|
|
| /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
|
|
|
| @@ -1020,6 +1149,78 @@
|
| #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
|
| "rdi", "r8", "r9", "r10", "r11"
|
|
|
| +/* This is all pretty complex. It's so as to make stack unwinding
|
| + work reliably. See bug 243270. The basic problem is the sub and
|
| + add of 128 of %rsp in all of the following macros. If gcc believes
|
| + the CFA is in %rsp, then unwinding may fail, because what's at the
|
| + CFA is not what gcc "expected" when it constructs the CFIs for the
|
| + places where the macros are instantiated.
|
| +
|
| + But we can't just add a CFI annotation to increase the CFA offset
|
| + by 128, to match the sub of 128 from %rsp, because we don't know
|
| + whether gcc has chosen %rsp as the CFA at that point, or whether it
|
| + has chosen some other register (eg, %rbp). In the latter case,
|
| + adding a CFI annotation to change the CFA offset is simply wrong.
|
| +
|
| + So the solution is to get hold of the CFA using
|
| + __builtin_dwarf_cfa(), put it in a known register, and add a
|
| + CFI annotation to say what the register is. We choose %rbp for
|
| + this (perhaps perversely), because:
|
| +
|
| + (1) %rbp is already subject to unwinding. If a new register was
|
| + chosen then the unwinder would have to unwind it in all stack
|
| + traces, which is expensive, and
|
| +
|
| + (2) %rbp is already subject to precise exception updates in the
|
| + JIT. If a new register was chosen, we'd have to have precise
|
| + exceptions for it too, which reduces performance of the
|
| + generated code.
|
| +
|
| + However .. one extra complication. We can't just whack the result
|
| + of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
|
| + list of trashed registers at the end of the inline assembly
|
| + fragments; gcc won't allow %rbp to appear in that list. Hence
|
| + instead we need to stash %rbp in %r15 for the duration of the asm,
|
| + and say that %r15 is trashed instead. gcc seems happy to go with
|
| + that.
|
| +
|
| + Oh .. and this all needs to be conditionalised so that it is
|
| + unchanged from before this commit, when compiled with older gccs
|
| + that don't support __builtin_dwarf_cfa. Furthermore, since
|
| + this header file is freestanding, it has to be independent of
|
| + config.h, and so the following conditionalisation cannot depend on
|
| + configure time checks.
|
| +
|
| + Although it's not clear from
|
| + 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
|
| + this expression excludes Darwin.
|
| + .cfi directives in Darwin assembly appear to be completely
|
| + different and I haven't investigated how they work.
|
| +
|
| + For even more entertainment value, note we have to use the
|
| + completely undocumented __builtin_dwarf_cfa(), which appears to
|
| + really compute the CFA, whereas __builtin_frame_address(0) claims
|
| + to but actually doesn't. See
|
| + https://bugs.kde.org/show_bug.cgi?id=243270#c47
|
| +*/
|
| +#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
|
| +# define __FRAME_POINTER \
|
| + ,"r"(__builtin_dwarf_cfa())
|
| +# define VALGRIND_CFI_PROLOGUE \
|
| + "movq %%rbp, %%r15\n\t" \
|
| + "movq %2, %%rbp\n\t" \
|
| + ".cfi_remember_state\n\t" \
|
| + ".cfi_def_cfa rbp, 0\n\t"
|
| +# define VALGRIND_CFI_EPILOGUE \
|
| + "movq %%r15, %%rbp\n\t" \
|
| + ".cfi_restore_state\n\t"
|
| +#else
|
| +# define __FRAME_POINTER
|
| +# define VALGRIND_CFI_PROLOGUE
|
| +# define VALGRIND_CFI_EPILOGUE
|
| +#endif
|
| +
|
| +
|
| /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
|
| long) == 8. */
|
|
|
| @@ -1039,7 +1240,7 @@
|
| redzone, for the duration of the hidden call, to make it safe.
|
|
|
| Probably the same problem afflicts the other redzone-style ABIs too
|
| - (ppc64-linux, ppc32-aix5, ppc64-aix5); but for those, the stack is
|
| + (ppc64-linux); but for those, the stack is
|
| self describing (none of this CFI nonsense) so at least messing
|
| with the stack pointer doesn't give a danger of non-unwindable
|
| stack. */
|
| @@ -1051,13 +1252,15 @@
|
| volatile unsigned long _res; \
|
| _argvec[0] = (unsigned long)_orig.nraddr; \
|
| __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| "subq $128,%%rsp\n\t" \
|
| "movq (%%rax), %%rax\n\t" /* target->%rax */ \
|
| VALGRIND_CALL_NOREDIR_RAX \
|
| "addq $128,%%rsp\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| : /*out*/ "=a" (_res) \
|
| - : /*in*/ "a" (&_argvec[0]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
| @@ -1070,14 +1273,16 @@
|
| _argvec[0] = (unsigned long)_orig.nraddr; \
|
| _argvec[1] = (unsigned long)(arg1); \
|
| __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| "subq $128,%%rsp\n\t" \
|
| "movq 8(%%rax), %%rdi\n\t" \
|
| "movq (%%rax), %%rax\n\t" /* target->%rax */ \
|
| VALGRIND_CALL_NOREDIR_RAX \
|
| "addq $128,%%rsp\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| : /*out*/ "=a" (_res) \
|
| - : /*in*/ "a" (&_argvec[0]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
| @@ -1091,15 +1296,17 @@
|
| _argvec[1] = (unsigned long)(arg1); \
|
| _argvec[2] = (unsigned long)(arg2); \
|
| __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| "subq $128,%%rsp\n\t" \
|
| "movq 16(%%rax), %%rsi\n\t" \
|
| "movq 8(%%rax), %%rdi\n\t" \
|
| "movq (%%rax), %%rax\n\t" /* target->%rax */ \
|
| VALGRIND_CALL_NOREDIR_RAX \
|
| "addq $128,%%rsp\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| : /*out*/ "=a" (_res) \
|
| - : /*in*/ "a" (&_argvec[0]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
| @@ -1114,6 +1321,7 @@
|
| _argvec[2] = (unsigned long)(arg2); \
|
| _argvec[3] = (unsigned long)(arg3); \
|
| __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| "subq $128,%%rsp\n\t" \
|
| "movq 24(%%rax), %%rdx\n\t" \
|
| "movq 16(%%rax), %%rsi\n\t" \
|
| @@ -1121,9 +1329,10 @@
|
| "movq (%%rax), %%rax\n\t" /* target->%rax */ \
|
| VALGRIND_CALL_NOREDIR_RAX \
|
| "addq $128,%%rsp\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| : /*out*/ "=a" (_res) \
|
| - : /*in*/ "a" (&_argvec[0]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
| @@ -1139,6 +1348,7 @@
|
| _argvec[3] = (unsigned long)(arg3); \
|
| _argvec[4] = (unsigned long)(arg4); \
|
| __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| "subq $128,%%rsp\n\t" \
|
| "movq 32(%%rax), %%rcx\n\t" \
|
| "movq 24(%%rax), %%rdx\n\t" \
|
| @@ -1147,9 +1357,10 @@
|
| "movq (%%rax), %%rax\n\t" /* target->%rax */ \
|
| VALGRIND_CALL_NOREDIR_RAX \
|
| "addq $128,%%rsp\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| : /*out*/ "=a" (_res) \
|
| - : /*in*/ "a" (&_argvec[0]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
| @@ -1166,6 +1377,7 @@
|
| _argvec[4] = (unsigned long)(arg4); \
|
| _argvec[5] = (unsigned long)(arg5); \
|
| __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| "subq $128,%%rsp\n\t" \
|
| "movq 40(%%rax), %%r8\n\t" \
|
| "movq 32(%%rax), %%rcx\n\t" \
|
| @@ -1175,9 +1387,10 @@
|
| "movq (%%rax), %%rax\n\t" /* target->%rax */ \
|
| VALGRIND_CALL_NOREDIR_RAX \
|
| "addq $128,%%rsp\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| : /*out*/ "=a" (_res) \
|
| - : /*in*/ "a" (&_argvec[0]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
| @@ -1195,6 +1408,7 @@
|
| _argvec[5] = (unsigned long)(arg5); \
|
| _argvec[6] = (unsigned long)(arg6); \
|
| __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| "subq $128,%%rsp\n\t" \
|
| "movq 48(%%rax), %%r9\n\t" \
|
| "movq 40(%%rax), %%r8\n\t" \
|
| @@ -1203,11 +1417,12 @@
|
| "movq 16(%%rax), %%rsi\n\t" \
|
| "movq 8(%%rax), %%rdi\n\t" \
|
| "movq (%%rax), %%rax\n\t" /* target->%rax */ \
|
| + VALGRIND_CALL_NOREDIR_RAX \
|
| "addq $128,%%rsp\n\t" \
|
| - VALGRIND_CALL_NOREDIR_RAX \
|
| + VALGRIND_CFI_EPILOGUE \
|
| : /*out*/ "=a" (_res) \
|
| - : /*in*/ "a" (&_argvec[0]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
| @@ -1227,7 +1442,8 @@
|
| _argvec[6] = (unsigned long)(arg6); \
|
| _argvec[7] = (unsigned long)(arg7); \
|
| __asm__ volatile( \
|
| - "subq $128,%%rsp\n\t" \
|
| + VALGRIND_CFI_PROLOGUE \
|
| + "subq $136,%%rsp\n\t" \
|
| "pushq 56(%%rax)\n\t" \
|
| "movq 48(%%rax), %%r9\n\t" \
|
| "movq 40(%%rax), %%r8\n\t" \
|
| @@ -1238,10 +1454,11 @@
|
| "movq (%%rax), %%rax\n\t" /* target->%rax */ \
|
| VALGRIND_CALL_NOREDIR_RAX \
|
| "addq $8, %%rsp\n" \
|
| - "addq $128,%%rsp\n\t" \
|
| + "addq $136,%%rsp\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| : /*out*/ "=a" (_res) \
|
| - : /*in*/ "a" (&_argvec[0]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
| @@ -1262,6 +1479,7 @@
|
| _argvec[7] = (unsigned long)(arg7); \
|
| _argvec[8] = (unsigned long)(arg8); \
|
| __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| "subq $128,%%rsp\n\t" \
|
| "pushq 64(%%rax)\n\t" \
|
| "pushq 56(%%rax)\n\t" \
|
| @@ -1275,9 +1493,10 @@
|
| VALGRIND_CALL_NOREDIR_RAX \
|
| "addq $16, %%rsp\n" \
|
| "addq $128,%%rsp\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| : /*out*/ "=a" (_res) \
|
| - : /*in*/ "a" (&_argvec[0]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
| @@ -1299,7 +1518,8 @@
|
| _argvec[8] = (unsigned long)(arg8); \
|
| _argvec[9] = (unsigned long)(arg9); \
|
| __asm__ volatile( \
|
| - "subq $128,%%rsp\n\t" \
|
| + VALGRIND_CFI_PROLOGUE \
|
| + "subq $136,%%rsp\n\t" \
|
| "pushq 72(%%rax)\n\t" \
|
| "pushq 64(%%rax)\n\t" \
|
| "pushq 56(%%rax)\n\t" \
|
| @@ -1312,10 +1532,11 @@
|
| "movq (%%rax), %%rax\n\t" /* target->%rax */ \
|
| VALGRIND_CALL_NOREDIR_RAX \
|
| "addq $24, %%rsp\n" \
|
| - "addq $128,%%rsp\n\t" \
|
| + "addq $136,%%rsp\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| : /*out*/ "=a" (_res) \
|
| - : /*in*/ "a" (&_argvec[0]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
| @@ -1338,6 +1559,7 @@
|
| _argvec[9] = (unsigned long)(arg9); \
|
| _argvec[10] = (unsigned long)(arg10); \
|
| __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| "subq $128,%%rsp\n\t" \
|
| "pushq 80(%%rax)\n\t" \
|
| "pushq 72(%%rax)\n\t" \
|
| @@ -1353,9 +1575,10 @@
|
| VALGRIND_CALL_NOREDIR_RAX \
|
| "addq $32, %%rsp\n" \
|
| "addq $128,%%rsp\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| : /*out*/ "=a" (_res) \
|
| - : /*in*/ "a" (&_argvec[0]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
| @@ -1379,7 +1602,8 @@
|
| _argvec[10] = (unsigned long)(arg10); \
|
| _argvec[11] = (unsigned long)(arg11); \
|
| __asm__ volatile( \
|
| - "subq $128,%%rsp\n\t" \
|
| + VALGRIND_CFI_PROLOGUE \
|
| + "subq $136,%%rsp\n\t" \
|
| "pushq 88(%%rax)\n\t" \
|
| "pushq 80(%%rax)\n\t" \
|
| "pushq 72(%%rax)\n\t" \
|
| @@ -1394,10 +1618,11 @@
|
| "movq (%%rax), %%rax\n\t" /* target->%rax */ \
|
| VALGRIND_CALL_NOREDIR_RAX \
|
| "addq $40, %%rsp\n" \
|
| - "addq $128,%%rsp\n\t" \
|
| + "addq $136,%%rsp\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| : /*out*/ "=a" (_res) \
|
| - : /*in*/ "a" (&_argvec[0]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
| @@ -1422,6 +1647,7 @@
|
| _argvec[11] = (unsigned long)(arg11); \
|
| _argvec[12] = (unsigned long)(arg12); \
|
| __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| "subq $128,%%rsp\n\t" \
|
| "pushq 96(%%rax)\n\t" \
|
| "pushq 88(%%rax)\n\t" \
|
| @@ -1439,14 +1665,15 @@
|
| VALGRIND_CALL_NOREDIR_RAX \
|
| "addq $48, %%rsp\n" \
|
| "addq $128,%%rsp\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| : /*out*/ "=a" (_res) \
|
| - : /*in*/ "a" (&_argvec[0]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#endif /* PLAT_amd64_linux */
|
| +#endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
|
|
|
| /* ------------------------ ppc32-linux ------------------------ */
|
|
|
| @@ -2439,54 +2666,28 @@
|
|
|
| #endif /* PLAT_ppc64_linux */
|
|
|
| -/* ------------------------ ppc32-aix5 ------------------------- */
|
| +/* ------------------------- arm-linux ------------------------- */
|
|
|
| -#if defined(PLAT_ppc32_aix5)
|
| +#if defined(PLAT_arm_linux)
|
|
|
| -/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
|
| -
|
| /* These regs are trashed by the hidden call. */
|
| -#define __CALLER_SAVED_REGS \
|
| - "lr", "ctr", "xer", \
|
| - "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
|
| - "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
|
| - "r11", "r12", "r13"
|
| +#define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
|
|
|
| -/* Expand the stack frame, copying enough info that unwinding
|
| - still works. Trashes r3. */
|
| -
|
| -#define VG_EXPAND_FRAME_BY_trashes_r3(_n_fr) \
|
| - "addi 1,1,-" #_n_fr "\n\t" \
|
| - "lwz 3," #_n_fr "(1)\n\t" \
|
| - "stw 3,0(1)\n\t"
|
| -
|
| -#define VG_CONTRACT_FRAME_BY(_n_fr) \
|
| - "addi 1,1," #_n_fr "\n\t"
|
| -
|
| -/* These CALL_FN_ macros assume that on ppc32-aix5, sizeof(unsigned
|
| +/* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
|
| long) == 4. */
|
|
|
| #define CALL_FN_W_v(lval, orig) \
|
| do { \
|
| volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+0]; \
|
| + volatile unsigned long _argvec[1]; \
|
| volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "stw 2,-8(11)\n\t" /* save tocptr */ \
|
| - "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
|
| - "lwz 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "lwz 2,-8(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| + "ldr r4, [%1] \n\t" /* target->r4 */ \
|
| + VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
|
| + "mov %0, r0\n" \
|
| : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| + : /*in*/ "0" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| @@ -2495,27 +2696,18 @@
|
| #define CALL_FN_W_W(lval, orig, arg1) \
|
| do { \
|
| volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+1]; \
|
| + volatile unsigned long _argvec[2]; \
|
| volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)(arg1); \
|
| __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "stw 2,-8(11)\n\t" /* save tocptr */ \
|
| - "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
|
| - "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
|
| - "lwz 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "lwz 2,-8(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| + "ldr r0, [%1, #4] \n\t" \
|
| + "ldr r4, [%1] \n\t" /* target->r4 */ \
|
| + VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
|
| + "mov %0, r0\n" \
|
| : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| + : /*in*/ "0" (&_argvec[0]) \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
| @@ -2523,28 +2715,19 @@
|
| #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
|
| do { \
|
| volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+2]; \
|
| + volatile unsigned long _argvec[3]; \
|
| volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)(arg1); \
|
| + _argvec[2] = (unsigned long)(arg2); \
|
| __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "stw 2,-8(11)\n\t" /* save tocptr */ \
|
| - "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
|
| - "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
|
| - "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
|
| - "lwz 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "lwz 2,-8(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| + "ldr r0, [%1, #4] \n\t" \
|
| + "ldr r1, [%1, #8] \n\t" \
|
| + "ldr r4, [%1] \n\t" /* target->r4 */ \
|
| + VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
|
| + "mov %0, r0\n" \
|
| : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| + : /*in*/ "0" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| @@ -2553,30 +2736,21 @@
|
| #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
|
| do { \
|
| volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+3]; \
|
| + volatile unsigned long _argvec[4]; \
|
| volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)(arg1); \
|
| + _argvec[2] = (unsigned long)(arg2); \
|
| + _argvec[3] = (unsigned long)(arg3); \
|
| __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "stw 2,-8(11)\n\t" /* save tocptr */ \
|
| - "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
|
| - "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
|
| - "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
|
| - "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
|
| - "lwz 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "lwz 2,-8(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| + "ldr r0, [%1, #4] \n\t" \
|
| + "ldr r1, [%1, #8] \n\t" \
|
| + "ldr r2, [%1, #12] \n\t" \
|
| + "ldr r4, [%1] \n\t" /* target->r4 */ \
|
| + VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
|
| + "mov %0, r0\n" \
|
| : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| + : /*in*/ "0" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| @@ -2585,32 +2759,23 @@
|
| #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
|
| do { \
|
| volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+4]; \
|
| + volatile unsigned long _argvec[5]; \
|
| volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)(arg1); \
|
| + _argvec[2] = (unsigned long)(arg2); \
|
| + _argvec[3] = (unsigned long)(arg3); \
|
| + _argvec[4] = (unsigned long)(arg4); \
|
| __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "stw 2,-8(11)\n\t" /* save tocptr */ \
|
| - "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
|
| - "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
|
| - "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
|
| - "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
|
| - "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
|
| - "lwz 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "lwz 2,-8(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| + "ldr r0, [%1, #4] \n\t" \
|
| + "ldr r1, [%1, #8] \n\t" \
|
| + "ldr r2, [%1, #12] \n\t" \
|
| + "ldr r3, [%1, #16] \n\t" \
|
| + "ldr r4, [%1] \n\t" /* target->r4 */ \
|
| + VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
|
| + "mov %0, r0" \
|
| : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| + : /*in*/ "0" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| @@ -2619,34 +2784,27 @@
|
| #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
|
| do { \
|
| volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+5]; \
|
| + volatile unsigned long _argvec[6]; \
|
| volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - _argvec[2+5] = (unsigned long)arg5; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)(arg1); \
|
| + _argvec[2] = (unsigned long)(arg2); \
|
| + _argvec[3] = (unsigned long)(arg3); \
|
| + _argvec[4] = (unsigned long)(arg4); \
|
| + _argvec[5] = (unsigned long)(arg5); \
|
| __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "stw 2,-8(11)\n\t" /* save tocptr */ \
|
| - "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
|
| - "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
|
| - "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
|
| - "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
|
| - "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
|
| - "lwz 7, 20(11)\n\t" /* arg5->r7 */ \
|
| - "lwz 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "lwz 2,-8(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| + "ldr r0, [%1, #20] \n\t" \
|
| + "push {r0} \n\t" \
|
| + "ldr r0, [%1, #4] \n\t" \
|
| + "ldr r1, [%1, #8] \n\t" \
|
| + "ldr r2, [%1, #12] \n\t" \
|
| + "ldr r3, [%1, #16] \n\t" \
|
| + "ldr r4, [%1] \n\t" /* target->r4 */ \
|
| + VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
|
| + "add sp, sp, #4 \n\t" \
|
| + "mov %0, r0" \
|
| : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| + : /*in*/ "0" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| @@ -2655,36 +2813,29 @@
|
| #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
|
| do { \
|
| volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+6]; \
|
| + volatile unsigned long _argvec[7]; \
|
| volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - _argvec[2+5] = (unsigned long)arg5; \
|
| - _argvec[2+6] = (unsigned long)arg6; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)(arg1); \
|
| + _argvec[2] = (unsigned long)(arg2); \
|
| + _argvec[3] = (unsigned long)(arg3); \
|
| + _argvec[4] = (unsigned long)(arg4); \
|
| + _argvec[5] = (unsigned long)(arg5); \
|
| + _argvec[6] = (unsigned long)(arg6); \
|
| __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "stw 2,-8(11)\n\t" /* save tocptr */ \
|
| - "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
|
| - "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
|
| - "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
|
| - "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
|
| - "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
|
| - "lwz 7, 20(11)\n\t" /* arg5->r7 */ \
|
| - "lwz 8, 24(11)\n\t" /* arg6->r8 */ \
|
| - "lwz 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "lwz 2,-8(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| + "ldr r0, [%1, #20] \n\t" \
|
| + "ldr r1, [%1, #24] \n\t" \
|
| + "push {r0, r1} \n\t" \
|
| + "ldr r0, [%1, #4] \n\t" \
|
| + "ldr r1, [%1, #8] \n\t" \
|
| + "ldr r2, [%1, #12] \n\t" \
|
| + "ldr r3, [%1, #16] \n\t" \
|
| + "ldr r4, [%1] \n\t" /* target->r4 */ \
|
| + VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
|
| + "add sp, sp, #8 \n\t" \
|
| + "mov %0, r0" \
|
| : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| + : /*in*/ "0" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| @@ -2694,38 +2845,31 @@
|
| arg7) \
|
| do { \
|
| volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+7]; \
|
| + volatile unsigned long _argvec[8]; \
|
| volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - _argvec[2+5] = (unsigned long)arg5; \
|
| - _argvec[2+6] = (unsigned long)arg6; \
|
| - _argvec[2+7] = (unsigned long)arg7; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)(arg1); \
|
| + _argvec[2] = (unsigned long)(arg2); \
|
| + _argvec[3] = (unsigned long)(arg3); \
|
| + _argvec[4] = (unsigned long)(arg4); \
|
| + _argvec[5] = (unsigned long)(arg5); \
|
| + _argvec[6] = (unsigned long)(arg6); \
|
| + _argvec[7] = (unsigned long)(arg7); \
|
| __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "stw 2,-8(11)\n\t" /* save tocptr */ \
|
| - "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
|
| - "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
|
| - "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
|
| - "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
|
| - "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
|
| - "lwz 7, 20(11)\n\t" /* arg5->r7 */ \
|
| - "lwz 8, 24(11)\n\t" /* arg6->r8 */ \
|
| - "lwz 9, 28(11)\n\t" /* arg7->r9 */ \
|
| - "lwz 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "lwz 2,-8(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| + "ldr r0, [%1, #20] \n\t" \
|
| + "ldr r1, [%1, #24] \n\t" \
|
| + "ldr r2, [%1, #28] \n\t" \
|
| + "push {r0, r1, r2} \n\t" \
|
| + "ldr r0, [%1, #4] \n\t" \
|
| + "ldr r1, [%1, #8] \n\t" \
|
| + "ldr r2, [%1, #12] \n\t" \
|
| + "ldr r3, [%1, #16] \n\t" \
|
| + "ldr r4, [%1] \n\t" /* target->r4 */ \
|
| + VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
|
| + "add sp, sp, #12 \n\t" \
|
| + "mov %0, r0" \
|
| : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| + : /*in*/ "0" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| @@ -2735,40 +2879,33 @@
|
| arg7,arg8) \
|
| do { \
|
| volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+8]; \
|
| + volatile unsigned long _argvec[9]; \
|
| volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - _argvec[2+5] = (unsigned long)arg5; \
|
| - _argvec[2+6] = (unsigned long)arg6; \
|
| - _argvec[2+7] = (unsigned long)arg7; \
|
| - _argvec[2+8] = (unsigned long)arg8; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)(arg1); \
|
| + _argvec[2] = (unsigned long)(arg2); \
|
| + _argvec[3] = (unsigned long)(arg3); \
|
| + _argvec[4] = (unsigned long)(arg4); \
|
| + _argvec[5] = (unsigned long)(arg5); \
|
| + _argvec[6] = (unsigned long)(arg6); \
|
| + _argvec[7] = (unsigned long)(arg7); \
|
| + _argvec[8] = (unsigned long)(arg8); \
|
| __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "stw 2,-8(11)\n\t" /* save tocptr */ \
|
| - "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
|
| - "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
|
| - "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
|
| - "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
|
| - "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
|
| - "lwz 7, 20(11)\n\t" /* arg5->r7 */ \
|
| - "lwz 8, 24(11)\n\t" /* arg6->r8 */ \
|
| - "lwz 9, 28(11)\n\t" /* arg7->r9 */ \
|
| - "lwz 10, 32(11)\n\t" /* arg8->r10 */ \
|
| - "lwz 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "lwz 2,-8(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| + "ldr r0, [%1, #20] \n\t" \
|
| + "ldr r1, [%1, #24] \n\t" \
|
| + "ldr r2, [%1, #28] \n\t" \
|
| + "ldr r3, [%1, #32] \n\t" \
|
| + "push {r0, r1, r2, r3} \n\t" \
|
| + "ldr r0, [%1, #4] \n\t" \
|
| + "ldr r1, [%1, #8] \n\t" \
|
| + "ldr r2, [%1, #12] \n\t" \
|
| + "ldr r3, [%1, #16] \n\t" \
|
| + "ldr r4, [%1] \n\t" /* target->r4 */ \
|
| + VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
|
| + "add sp, sp, #16 \n\t" \
|
| + "mov %0, r0" \
|
| : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| + : /*in*/ "0" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| @@ -2778,47 +2915,35 @@
|
| arg7,arg8,arg9) \
|
| do { \
|
| volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+9]; \
|
| + volatile unsigned long _argvec[10]; \
|
| volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - _argvec[2+5] = (unsigned long)arg5; \
|
| - _argvec[2+6] = (unsigned long)arg6; \
|
| - _argvec[2+7] = (unsigned long)arg7; \
|
| - _argvec[2+8] = (unsigned long)arg8; \
|
| - _argvec[2+9] = (unsigned long)arg9; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)(arg1); \
|
| + _argvec[2] = (unsigned long)(arg2); \
|
| + _argvec[3] = (unsigned long)(arg3); \
|
| + _argvec[4] = (unsigned long)(arg4); \
|
| + _argvec[5] = (unsigned long)(arg5); \
|
| + _argvec[6] = (unsigned long)(arg6); \
|
| + _argvec[7] = (unsigned long)(arg7); \
|
| + _argvec[8] = (unsigned long)(arg8); \
|
| + _argvec[9] = (unsigned long)(arg9); \
|
| __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "stw 2,-8(11)\n\t" /* save tocptr */ \
|
| - "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(64) \
|
| - /* arg9 */ \
|
| - "lwz 3,36(11)\n\t" \
|
| - "stw 3,56(1)\n\t" \
|
| - /* args1-8 */ \
|
| - "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
|
| - "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
|
| - "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
|
| - "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
|
| - "lwz 7, 20(11)\n\t" /* arg5->r7 */ \
|
| - "lwz 8, 24(11)\n\t" /* arg6->r8 */ \
|
| - "lwz 9, 28(11)\n\t" /* arg7->r9 */ \
|
| - "lwz 10, 32(11)\n\t" /* arg8->r10 */ \
|
| - "lwz 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "lwz 2,-8(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(64) \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| + "ldr r0, [%1, #20] \n\t" \
|
| + "ldr r1, [%1, #24] \n\t" \
|
| + "ldr r2, [%1, #28] \n\t" \
|
| + "ldr r3, [%1, #32] \n\t" \
|
| + "ldr r4, [%1, #36] \n\t" \
|
| + "push {r0, r1, r2, r3, r4} \n\t" \
|
| + "ldr r0, [%1, #4] \n\t" \
|
| + "ldr r1, [%1, #8] \n\t" \
|
| + "ldr r2, [%1, #12] \n\t" \
|
| + "ldr r3, [%1, #16] \n\t" \
|
| + "ldr r4, [%1] \n\t" /* target->r4 */ \
|
| + VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
|
| + "add sp, sp, #20 \n\t" \
|
| + "mov %0, r0" \
|
| : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| + : /*in*/ "0" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| @@ -2828,740 +2953,614 @@
|
| arg7,arg8,arg9,arg10) \
|
| do { \
|
| volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+10]; \
|
| + volatile unsigned long _argvec[11]; \
|
| volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - _argvec[2+5] = (unsigned long)arg5; \
|
| - _argvec[2+6] = (unsigned long)arg6; \
|
| - _argvec[2+7] = (unsigned long)arg7; \
|
| - _argvec[2+8] = (unsigned long)arg8; \
|
| - _argvec[2+9] = (unsigned long)arg9; \
|
| - _argvec[2+10] = (unsigned long)arg10; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)(arg1); \
|
| + _argvec[2] = (unsigned long)(arg2); \
|
| + _argvec[3] = (unsigned long)(arg3); \
|
| + _argvec[4] = (unsigned long)(arg4); \
|
| + _argvec[5] = (unsigned long)(arg5); \
|
| + _argvec[6] = (unsigned long)(arg6); \
|
| + _argvec[7] = (unsigned long)(arg7); \
|
| + _argvec[8] = (unsigned long)(arg8); \
|
| + _argvec[9] = (unsigned long)(arg9); \
|
| + _argvec[10] = (unsigned long)(arg10); \
|
| __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "stw 2,-8(11)\n\t" /* save tocptr */ \
|
| - "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(64) \
|
| - /* arg10 */ \
|
| - "lwz 3,40(11)\n\t" \
|
| - "stw 3,60(1)\n\t" \
|
| - /* arg9 */ \
|
| - "lwz 3,36(11)\n\t" \
|
| - "stw 3,56(1)\n\t" \
|
| - /* args1-8 */ \
|
| - "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
|
| - "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
|
| - "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
|
| - "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
|
| - "lwz 7, 20(11)\n\t" /* arg5->r7 */ \
|
| - "lwz 8, 24(11)\n\t" /* arg6->r8 */ \
|
| - "lwz 9, 28(11)\n\t" /* arg7->r9 */ \
|
| - "lwz 10, 32(11)\n\t" /* arg8->r10 */ \
|
| - "lwz 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "lwz 2,-8(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(64) \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| + "ldr r0, [%1, #40] \n\t" \
|
| + "push {r0} \n\t" \
|
| + "ldr r0, [%1, #20] \n\t" \
|
| + "ldr r1, [%1, #24] \n\t" \
|
| + "ldr r2, [%1, #28] \n\t" \
|
| + "ldr r3, [%1, #32] \n\t" \
|
| + "ldr r4, [%1, #36] \n\t" \
|
| + "push {r0, r1, r2, r3, r4} \n\t" \
|
| + "ldr r0, [%1, #4] \n\t" \
|
| + "ldr r1, [%1, #8] \n\t" \
|
| + "ldr r2, [%1, #12] \n\t" \
|
| + "ldr r3, [%1, #16] \n\t" \
|
| + "ldr r4, [%1] \n\t" /* target->r4 */ \
|
| + VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
|
| + "add sp, sp, #24 \n\t" \
|
| + "mov %0, r0" \
|
| : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| + : /*in*/ "0" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
|
| - arg7,arg8,arg9,arg10,arg11) \
|
| +#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
|
| + arg6,arg7,arg8,arg9,arg10, \
|
| + arg11) \
|
| do { \
|
| volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+11]; \
|
| + volatile unsigned long _argvec[12]; \
|
| volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - _argvec[2+5] = (unsigned long)arg5; \
|
| - _argvec[2+6] = (unsigned long)arg6; \
|
| - _argvec[2+7] = (unsigned long)arg7; \
|
| - _argvec[2+8] = (unsigned long)arg8; \
|
| - _argvec[2+9] = (unsigned long)arg9; \
|
| - _argvec[2+10] = (unsigned long)arg10; \
|
| - _argvec[2+11] = (unsigned long)arg11; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)(arg1); \
|
| + _argvec[2] = (unsigned long)(arg2); \
|
| + _argvec[3] = (unsigned long)(arg3); \
|
| + _argvec[4] = (unsigned long)(arg4); \
|
| + _argvec[5] = (unsigned long)(arg5); \
|
| + _argvec[6] = (unsigned long)(arg6); \
|
| + _argvec[7] = (unsigned long)(arg7); \
|
| + _argvec[8] = (unsigned long)(arg8); \
|
| + _argvec[9] = (unsigned long)(arg9); \
|
| + _argvec[10] = (unsigned long)(arg10); \
|
| + _argvec[11] = (unsigned long)(arg11); \
|
| __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "stw 2,-8(11)\n\t" /* save tocptr */ \
|
| - "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(72) \
|
| - /* arg11 */ \
|
| - "lwz 3,44(11)\n\t" \
|
| - "stw 3,64(1)\n\t" \
|
| - /* arg10 */ \
|
| - "lwz 3,40(11)\n\t" \
|
| - "stw 3,60(1)\n\t" \
|
| - /* arg9 */ \
|
| - "lwz 3,36(11)\n\t" \
|
| - "stw 3,56(1)\n\t" \
|
| - /* args1-8 */ \
|
| - "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
|
| - "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
|
| - "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
|
| - "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
|
| - "lwz 7, 20(11)\n\t" /* arg5->r7 */ \
|
| - "lwz 8, 24(11)\n\t" /* arg6->r8 */ \
|
| - "lwz 9, 28(11)\n\t" /* arg7->r9 */ \
|
| - "lwz 10, 32(11)\n\t" /* arg8->r10 */ \
|
| - "lwz 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "lwz 2,-8(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(72) \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| + "ldr r0, [%1, #40] \n\t" \
|
| + "ldr r1, [%1, #44] \n\t" \
|
| + "push {r0, r1} \n\t" \
|
| + "ldr r0, [%1, #20] \n\t" \
|
| + "ldr r1, [%1, #24] \n\t" \
|
| + "ldr r2, [%1, #28] \n\t" \
|
| + "ldr r3, [%1, #32] \n\t" \
|
| + "ldr r4, [%1, #36] \n\t" \
|
| + "push {r0, r1, r2, r3, r4} \n\t" \
|
| + "ldr r0, [%1, #4] \n\t" \
|
| + "ldr r1, [%1, #8] \n\t" \
|
| + "ldr r2, [%1, #12] \n\t" \
|
| + "ldr r3, [%1, #16] \n\t" \
|
| + "ldr r4, [%1] \n\t" /* target->r4 */ \
|
| + VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
|
| + "add sp, sp, #28 \n\t" \
|
| + "mov %0, r0" \
|
| : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| + : /*in*/ "0" (&_argvec[0]) \
|
| + : /*trash*/ "cc", "memory",__CALLER_SAVED_REGS \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
|
| - arg7,arg8,arg9,arg10,arg11,arg12) \
|
| +#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
|
| + arg6,arg7,arg8,arg9,arg10, \
|
| + arg11,arg12) \
|
| do { \
|
| volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+12]; \
|
| + volatile unsigned long _argvec[13]; \
|
| volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - _argvec[2+5] = (unsigned long)arg5; \
|
| - _argvec[2+6] = (unsigned long)arg6; \
|
| - _argvec[2+7] = (unsigned long)arg7; \
|
| - _argvec[2+8] = (unsigned long)arg8; \
|
| - _argvec[2+9] = (unsigned long)arg9; \
|
| - _argvec[2+10] = (unsigned long)arg10; \
|
| - _argvec[2+11] = (unsigned long)arg11; \
|
| - _argvec[2+12] = (unsigned long)arg12; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)(arg1); \
|
| + _argvec[2] = (unsigned long)(arg2); \
|
| + _argvec[3] = (unsigned long)(arg3); \
|
| + _argvec[4] = (unsigned long)(arg4); \
|
| + _argvec[5] = (unsigned long)(arg5); \
|
| + _argvec[6] = (unsigned long)(arg6); \
|
| + _argvec[7] = (unsigned long)(arg7); \
|
| + _argvec[8] = (unsigned long)(arg8); \
|
| + _argvec[9] = (unsigned long)(arg9); \
|
| + _argvec[10] = (unsigned long)(arg10); \
|
| + _argvec[11] = (unsigned long)(arg11); \
|
| + _argvec[12] = (unsigned long)(arg12); \
|
| __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "stw 2,-8(11)\n\t" /* save tocptr */ \
|
| - "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(72) \
|
| - /* arg12 */ \
|
| - "lwz 3,48(11)\n\t" \
|
| - "stw 3,68(1)\n\t" \
|
| - /* arg11 */ \
|
| - "lwz 3,44(11)\n\t" \
|
| - "stw 3,64(1)\n\t" \
|
| - /* arg10 */ \
|
| - "lwz 3,40(11)\n\t" \
|
| - "stw 3,60(1)\n\t" \
|
| - /* arg9 */ \
|
| - "lwz 3,36(11)\n\t" \
|
| - "stw 3,56(1)\n\t" \
|
| - /* args1-8 */ \
|
| - "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
|
| - "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
|
| - "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
|
| - "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
|
| - "lwz 7, 20(11)\n\t" /* arg5->r7 */ \
|
| - "lwz 8, 24(11)\n\t" /* arg6->r8 */ \
|
| - "lwz 9, 28(11)\n\t" /* arg7->r9 */ \
|
| - "lwz 10, 32(11)\n\t" /* arg8->r10 */ \
|
| - "lwz 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "lwz 2,-8(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(72) \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| + "ldr r0, [%1, #40] \n\t" \
|
| + "ldr r1, [%1, #44] \n\t" \
|
| + "ldr r2, [%1, #48] \n\t" \
|
| + "push {r0, r1, r2} \n\t" \
|
| + "ldr r0, [%1, #20] \n\t" \
|
| + "ldr r1, [%1, #24] \n\t" \
|
| + "ldr r2, [%1, #28] \n\t" \
|
| + "ldr r3, [%1, #32] \n\t" \
|
| + "ldr r4, [%1, #36] \n\t" \
|
| + "push {r0, r1, r2, r3, r4} \n\t" \
|
| + "ldr r0, [%1, #4] \n\t" \
|
| + "ldr r1, [%1, #8] \n\t" \
|
| + "ldr r2, [%1, #12] \n\t" \
|
| + "ldr r3, [%1, #16] \n\t" \
|
| + "ldr r4, [%1] \n\t" /* target->r4 */ \
|
| + VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
|
| + "add sp, sp, #32 \n\t" \
|
| + "mov %0, r0" \
|
| : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| + : /*in*/ "0" (&_argvec[0]) \
|
| : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| ); \
|
| lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#endif /* PLAT_ppc32_aix5 */
|
| +#endif /* PLAT_arm_linux */
|
|
|
| -/* ------------------------ ppc64-aix5 ------------------------- */
|
| +/* ------------------------- s390x-linux ------------------------- */
|
|
|
| -#if defined(PLAT_ppc64_aix5)
|
| +#if defined(PLAT_s390x_linux)
|
|
|
| -/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
|
| +/* Similar workaround as amd64 (see above), but we use r11 as frame
|
| + pointer and save the old r11 in r7. r11 might be used for
|
| + argvec, therefore we copy argvec in r1 since r1 is clobbered
|
| + after the call anyway. */
|
| +#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
|
| +# define __FRAME_POINTER \
|
| + ,"d"(__builtin_dwarf_cfa())
|
| +# define VALGRIND_CFI_PROLOGUE \
|
| + ".cfi_remember_state\n\t" \
|
| + "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
|
| + "lgr 7,11\n\t" \
|
| + "lgr 11,%2\n\t" \
|
| + ".cfi_def_cfa r11, 0\n\t"
|
| +# define VALGRIND_CFI_EPILOGUE \
|
| + "lgr 11, 7\n\t" \
|
| + ".cfi_restore_state\n\t"
|
| +#else
|
| +# define __FRAME_POINTER
|
| +# define VALGRIND_CFI_PROLOGUE \
|
| + "lgr 1,%1\n\t"
|
| +# define VALGRIND_CFI_EPILOGUE
|
| +#endif
|
|
|
| -/* These regs are trashed by the hidden call. */
|
| -#define __CALLER_SAVED_REGS \
|
| - "lr", "ctr", "xer", \
|
| - "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
|
| - "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
|
| - "r11", "r12", "r13"
|
|
|
| -/* Expand the stack frame, copying enough info that unwinding
|
| - still works. Trashes r3. */
|
|
|
| -#define VG_EXPAND_FRAME_BY_trashes_r3(_n_fr) \
|
| - "addi 1,1,-" #_n_fr "\n\t" \
|
| - "ld 3," #_n_fr "(1)\n\t" \
|
| - "std 3,0(1)\n\t"
|
|
|
| -#define VG_CONTRACT_FRAME_BY(_n_fr) \
|
| - "addi 1,1," #_n_fr "\n\t"
|
| +/* These regs are trashed by the hidden call. Note that we overwrite
|
| + r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
|
| + function a proper return address. All others are ABI defined call
|
| + clobbers. */
|
| +#define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
|
| + "f0","f1","f2","f3","f4","f5","f6","f7"
|
|
|
| -/* These CALL_FN_ macros assume that on ppc64-aix5, sizeof(unsigned
|
| - long) == 8. */
|
|
|
| -#define CALL_FN_W_v(lval, orig) \
|
| - do { \
|
| - volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+0]; \
|
| - volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "std 2,-16(11)\n\t" /* save tocptr */ \
|
| - "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
|
| - "ld 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "ld 2,-16(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| - : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| - ); \
|
| - lval = (__typeof__(lval)) _res; \
|
| +#define CALL_FN_W_v(lval, orig) \
|
| + do { \
|
| + volatile OrigFn _orig = (orig); \
|
| + volatile unsigned long _argvec[1]; \
|
| + volatile unsigned long _res; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| + "aghi 15,-160\n\t" \
|
| + "lg 1, 0(1)\n\t" /* target->r1 */ \
|
| + VALGRIND_CALL_NOREDIR_R1 \
|
| + "lgr %0, 2\n\t" \
|
| + "aghi 15,160\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| + : /*out*/ "=d" (_res) \
|
| + : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
|
| + ); \
|
| + lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#define CALL_FN_W_W(lval, orig, arg1) \
|
| - do { \
|
| - volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+1]; \
|
| - volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "std 2,-16(11)\n\t" /* save tocptr */ \
|
| - "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
|
| - "ld 3, 8(11)\n\t" /* arg1->r3 */ \
|
| - "ld 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "ld 2,-16(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| - : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| - ); \
|
| - lval = (__typeof__(lval)) _res; \
|
| +/* The call abi has the arguments in r2-r6 and stack */
|
| +#define CALL_FN_W_W(lval, orig, arg1) \
|
| + do { \
|
| + volatile OrigFn _orig = (orig); \
|
| + volatile unsigned long _argvec[2]; \
|
| + volatile unsigned long _res; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)arg1; \
|
| + __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| + "aghi 15,-160\n\t" \
|
| + "lg 2, 8(1)\n\t" \
|
| + "lg 1, 0(1)\n\t" \
|
| + VALGRIND_CALL_NOREDIR_R1 \
|
| + "lgr %0, 2\n\t" \
|
| + "aghi 15,160\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| + : /*out*/ "=d" (_res) \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
|
| + ); \
|
| + lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
|
| - do { \
|
| - volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+2]; \
|
| - volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "std 2,-16(11)\n\t" /* save tocptr */ \
|
| - "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
|
| - "ld 3, 8(11)\n\t" /* arg1->r3 */ \
|
| - "ld 4, 16(11)\n\t" /* arg2->r4 */ \
|
| - "ld 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "ld 2,-16(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| - : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| - ); \
|
| - lval = (__typeof__(lval)) _res; \
|
| +#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
|
| + do { \
|
| + volatile OrigFn _orig = (orig); \
|
| + volatile unsigned long _argvec[3]; \
|
| + volatile unsigned long _res; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)arg1; \
|
| + _argvec[2] = (unsigned long)arg2; \
|
| + __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| + "aghi 15,-160\n\t" \
|
| + "lg 2, 8(1)\n\t" \
|
| + "lg 3,16(1)\n\t" \
|
| + "lg 1, 0(1)\n\t" \
|
| + VALGRIND_CALL_NOREDIR_R1 \
|
| + "lgr %0, 2\n\t" \
|
| + "aghi 15,160\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| + : /*out*/ "=d" (_res) \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
|
| + ); \
|
| + lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
|
| - do { \
|
| - volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+3]; \
|
| - volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "std 2,-16(11)\n\t" /* save tocptr */ \
|
| - "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
|
| - "ld 3, 8(11)\n\t" /* arg1->r3 */ \
|
| - "ld 4, 16(11)\n\t" /* arg2->r4 */ \
|
| - "ld 5, 24(11)\n\t" /* arg3->r5 */ \
|
| - "ld 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "ld 2,-16(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| - : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| - ); \
|
| - lval = (__typeof__(lval)) _res; \
|
| +#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
|
| + do { \
|
| + volatile OrigFn _orig = (orig); \
|
| + volatile unsigned long _argvec[4]; \
|
| + volatile unsigned long _res; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)arg1; \
|
| + _argvec[2] = (unsigned long)arg2; \
|
| + _argvec[3] = (unsigned long)arg3; \
|
| + __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| + "aghi 15,-160\n\t" \
|
| + "lg 2, 8(1)\n\t" \
|
| + "lg 3,16(1)\n\t" \
|
| + "lg 4,24(1)\n\t" \
|
| + "lg 1, 0(1)\n\t" \
|
| + VALGRIND_CALL_NOREDIR_R1 \
|
| + "lgr %0, 2\n\t" \
|
| + "aghi 15,160\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| + : /*out*/ "=d" (_res) \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
|
| + ); \
|
| + lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
|
| - do { \
|
| - volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+4]; \
|
| - volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "std 2,-16(11)\n\t" /* save tocptr */ \
|
| - "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
|
| - "ld 3, 8(11)\n\t" /* arg1->r3 */ \
|
| - "ld 4, 16(11)\n\t" /* arg2->r4 */ \
|
| - "ld 5, 24(11)\n\t" /* arg3->r5 */ \
|
| - "ld 6, 32(11)\n\t" /* arg4->r6 */ \
|
| - "ld 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "ld 2,-16(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| - : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| - ); \
|
| - lval = (__typeof__(lval)) _res; \
|
| +#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
|
| + do { \
|
| + volatile OrigFn _orig = (orig); \
|
| + volatile unsigned long _argvec[5]; \
|
| + volatile unsigned long _res; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)arg1; \
|
| + _argvec[2] = (unsigned long)arg2; \
|
| + _argvec[3] = (unsigned long)arg3; \
|
| + _argvec[4] = (unsigned long)arg4; \
|
| + __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| + "aghi 15,-160\n\t" \
|
| + "lg 2, 8(1)\n\t" \
|
| + "lg 3,16(1)\n\t" \
|
| + "lg 4,24(1)\n\t" \
|
| + "lg 5,32(1)\n\t" \
|
| + "lg 1, 0(1)\n\t" \
|
| + VALGRIND_CALL_NOREDIR_R1 \
|
| + "lgr %0, 2\n\t" \
|
| + "aghi 15,160\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| + : /*out*/ "=d" (_res) \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
|
| + ); \
|
| + lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
|
| - do { \
|
| - volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+5]; \
|
| - volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - _argvec[2+5] = (unsigned long)arg5; \
|
| - __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "std 2,-16(11)\n\t" /* save tocptr */ \
|
| - "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
|
| - "ld 3, 8(11)\n\t" /* arg1->r3 */ \
|
| - "ld 4, 16(11)\n\t" /* arg2->r4 */ \
|
| - "ld 5, 24(11)\n\t" /* arg3->r5 */ \
|
| - "ld 6, 32(11)\n\t" /* arg4->r6 */ \
|
| - "ld 7, 40(11)\n\t" /* arg5->r7 */ \
|
| - "ld 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "ld 2,-16(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| - : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| - ); \
|
| - lval = (__typeof__(lval)) _res; \
|
| +#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
|
| + do { \
|
| + volatile OrigFn _orig = (orig); \
|
| + volatile unsigned long _argvec[6]; \
|
| + volatile unsigned long _res; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)arg1; \
|
| + _argvec[2] = (unsigned long)arg2; \
|
| + _argvec[3] = (unsigned long)arg3; \
|
| + _argvec[4] = (unsigned long)arg4; \
|
| + _argvec[5] = (unsigned long)arg5; \
|
| + __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| + "aghi 15,-160\n\t" \
|
| + "lg 2, 8(1)\n\t" \
|
| + "lg 3,16(1)\n\t" \
|
| + "lg 4,24(1)\n\t" \
|
| + "lg 5,32(1)\n\t" \
|
| + "lg 6,40(1)\n\t" \
|
| + "lg 1, 0(1)\n\t" \
|
| + VALGRIND_CALL_NOREDIR_R1 \
|
| + "lgr %0, 2\n\t" \
|
| + "aghi 15,160\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| + : /*out*/ "=d" (_res) \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
|
| + ); \
|
| + lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
|
| - do { \
|
| - volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+6]; \
|
| - volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - _argvec[2+5] = (unsigned long)arg5; \
|
| - _argvec[2+6] = (unsigned long)arg6; \
|
| - __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "std 2,-16(11)\n\t" /* save tocptr */ \
|
| - "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
|
| - "ld 3, 8(11)\n\t" /* arg1->r3 */ \
|
| - "ld 4, 16(11)\n\t" /* arg2->r4 */ \
|
| - "ld 5, 24(11)\n\t" /* arg3->r5 */ \
|
| - "ld 6, 32(11)\n\t" /* arg4->r6 */ \
|
| - "ld 7, 40(11)\n\t" /* arg5->r7 */ \
|
| - "ld 8, 48(11)\n\t" /* arg6->r8 */ \
|
| - "ld 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "ld 2,-16(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| - : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| - ); \
|
| - lval = (__typeof__(lval)) _res; \
|
| +#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
|
| + arg6) \
|
| + do { \
|
| + volatile OrigFn _orig = (orig); \
|
| + volatile unsigned long _argvec[7]; \
|
| + volatile unsigned long _res; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)arg1; \
|
| + _argvec[2] = (unsigned long)arg2; \
|
| + _argvec[3] = (unsigned long)arg3; \
|
| + _argvec[4] = (unsigned long)arg4; \
|
| + _argvec[5] = (unsigned long)arg5; \
|
| + _argvec[6] = (unsigned long)arg6; \
|
| + __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| + "aghi 15,-168\n\t" \
|
| + "lg 2, 8(1)\n\t" \
|
| + "lg 3,16(1)\n\t" \
|
| + "lg 4,24(1)\n\t" \
|
| + "lg 5,32(1)\n\t" \
|
| + "lg 6,40(1)\n\t" \
|
| + "mvc 160(8,15), 48(1)\n\t" \
|
| + "lg 1, 0(1)\n\t" \
|
| + VALGRIND_CALL_NOREDIR_R1 \
|
| + "lgr %0, 2\n\t" \
|
| + "aghi 15,168\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| + : /*out*/ "=d" (_res) \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
|
| + ); \
|
| + lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
|
| - arg7) \
|
| - do { \
|
| - volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+7]; \
|
| - volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - _argvec[2+5] = (unsigned long)arg5; \
|
| - _argvec[2+6] = (unsigned long)arg6; \
|
| - _argvec[2+7] = (unsigned long)arg7; \
|
| - __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "std 2,-16(11)\n\t" /* save tocptr */ \
|
| - "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
|
| - "ld 3, 8(11)\n\t" /* arg1->r3 */ \
|
| - "ld 4, 16(11)\n\t" /* arg2->r4 */ \
|
| - "ld 5, 24(11)\n\t" /* arg3->r5 */ \
|
| - "ld 6, 32(11)\n\t" /* arg4->r6 */ \
|
| - "ld 7, 40(11)\n\t" /* arg5->r7 */ \
|
| - "ld 8, 48(11)\n\t" /* arg6->r8 */ \
|
| - "ld 9, 56(11)\n\t" /* arg7->r9 */ \
|
| - "ld 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "ld 2,-16(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| - : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| - ); \
|
| - lval = (__typeof__(lval)) _res; \
|
| +#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
|
| + arg6, arg7) \
|
| + do { \
|
| + volatile OrigFn _orig = (orig); \
|
| + volatile unsigned long _argvec[8]; \
|
| + volatile unsigned long _res; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)arg1; \
|
| + _argvec[2] = (unsigned long)arg2; \
|
| + _argvec[3] = (unsigned long)arg3; \
|
| + _argvec[4] = (unsigned long)arg4; \
|
| + _argvec[5] = (unsigned long)arg5; \
|
| + _argvec[6] = (unsigned long)arg6; \
|
| + _argvec[7] = (unsigned long)arg7; \
|
| + __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| + "aghi 15,-176\n\t" \
|
| + "lg 2, 8(1)\n\t" \
|
| + "lg 3,16(1)\n\t" \
|
| + "lg 4,24(1)\n\t" \
|
| + "lg 5,32(1)\n\t" \
|
| + "lg 6,40(1)\n\t" \
|
| + "mvc 160(8,15), 48(1)\n\t" \
|
| + "mvc 168(8,15), 56(1)\n\t" \
|
| + "lg 1, 0(1)\n\t" \
|
| + VALGRIND_CALL_NOREDIR_R1 \
|
| + "lgr %0, 2\n\t" \
|
| + "aghi 15,176\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| + : /*out*/ "=d" (_res) \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
|
| + ); \
|
| + lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
|
| - arg7,arg8) \
|
| - do { \
|
| - volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+8]; \
|
| - volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - _argvec[2+5] = (unsigned long)arg5; \
|
| - _argvec[2+6] = (unsigned long)arg6; \
|
| - _argvec[2+7] = (unsigned long)arg7; \
|
| - _argvec[2+8] = (unsigned long)arg8; \
|
| - __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "std 2,-16(11)\n\t" /* save tocptr */ \
|
| - "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
|
| - "ld 3, 8(11)\n\t" /* arg1->r3 */ \
|
| - "ld 4, 16(11)\n\t" /* arg2->r4 */ \
|
| - "ld 5, 24(11)\n\t" /* arg3->r5 */ \
|
| - "ld 6, 32(11)\n\t" /* arg4->r6 */ \
|
| - "ld 7, 40(11)\n\t" /* arg5->r7 */ \
|
| - "ld 8, 48(11)\n\t" /* arg6->r8 */ \
|
| - "ld 9, 56(11)\n\t" /* arg7->r9 */ \
|
| - "ld 10, 64(11)\n\t" /* arg8->r10 */ \
|
| - "ld 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "ld 2,-16(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| - : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| - ); \
|
| - lval = (__typeof__(lval)) _res; \
|
| +#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
|
| + arg6, arg7 ,arg8) \
|
| + do { \
|
| + volatile OrigFn _orig = (orig); \
|
| + volatile unsigned long _argvec[9]; \
|
| + volatile unsigned long _res; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)arg1; \
|
| + _argvec[2] = (unsigned long)arg2; \
|
| + _argvec[3] = (unsigned long)arg3; \
|
| + _argvec[4] = (unsigned long)arg4; \
|
| + _argvec[5] = (unsigned long)arg5; \
|
| + _argvec[6] = (unsigned long)arg6; \
|
| + _argvec[7] = (unsigned long)arg7; \
|
| + _argvec[8] = (unsigned long)arg8; \
|
| + __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| + "aghi 15,-184\n\t" \
|
| + "lg 2, 8(1)\n\t" \
|
| + "lg 3,16(1)\n\t" \
|
| + "lg 4,24(1)\n\t" \
|
| + "lg 5,32(1)\n\t" \
|
| + "lg 6,40(1)\n\t" \
|
| + "mvc 160(8,15), 48(1)\n\t" \
|
| + "mvc 168(8,15), 56(1)\n\t" \
|
| + "mvc 176(8,15), 64(1)\n\t" \
|
| + "lg 1, 0(1)\n\t" \
|
| + VALGRIND_CALL_NOREDIR_R1 \
|
| + "lgr %0, 2\n\t" \
|
| + "aghi 15,184\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| + : /*out*/ "=d" (_res) \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
|
| + ); \
|
| + lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
|
| - arg7,arg8,arg9) \
|
| - do { \
|
| - volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+9]; \
|
| - volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - _argvec[2+5] = (unsigned long)arg5; \
|
| - _argvec[2+6] = (unsigned long)arg6; \
|
| - _argvec[2+7] = (unsigned long)arg7; \
|
| - _argvec[2+8] = (unsigned long)arg8; \
|
| - _argvec[2+9] = (unsigned long)arg9; \
|
| - __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "std 2,-16(11)\n\t" /* save tocptr */ \
|
| - "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(128) \
|
| - /* arg9 */ \
|
| - "ld 3,72(11)\n\t" \
|
| - "std 3,112(1)\n\t" \
|
| - /* args1-8 */ \
|
| - "ld 3, 8(11)\n\t" /* arg1->r3 */ \
|
| - "ld 4, 16(11)\n\t" /* arg2->r4 */ \
|
| - "ld 5, 24(11)\n\t" /* arg3->r5 */ \
|
| - "ld 6, 32(11)\n\t" /* arg4->r6 */ \
|
| - "ld 7, 40(11)\n\t" /* arg5->r7 */ \
|
| - "ld 8, 48(11)\n\t" /* arg6->r8 */ \
|
| - "ld 9, 56(11)\n\t" /* arg7->r9 */ \
|
| - "ld 10, 64(11)\n\t" /* arg8->r10 */ \
|
| - "ld 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "ld 2,-16(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(128) \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| - : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| - ); \
|
| - lval = (__typeof__(lval)) _res; \
|
| +#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
|
| + arg6, arg7 ,arg8, arg9) \
|
| + do { \
|
| + volatile OrigFn _orig = (orig); \
|
| + volatile unsigned long _argvec[10]; \
|
| + volatile unsigned long _res; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)arg1; \
|
| + _argvec[2] = (unsigned long)arg2; \
|
| + _argvec[3] = (unsigned long)arg3; \
|
| + _argvec[4] = (unsigned long)arg4; \
|
| + _argvec[5] = (unsigned long)arg5; \
|
| + _argvec[6] = (unsigned long)arg6; \
|
| + _argvec[7] = (unsigned long)arg7; \
|
| + _argvec[8] = (unsigned long)arg8; \
|
| + _argvec[9] = (unsigned long)arg9; \
|
| + __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| + "aghi 15,-192\n\t" \
|
| + "lg 2, 8(1)\n\t" \
|
| + "lg 3,16(1)\n\t" \
|
| + "lg 4,24(1)\n\t" \
|
| + "lg 5,32(1)\n\t" \
|
| + "lg 6,40(1)\n\t" \
|
| + "mvc 160(8,15), 48(1)\n\t" \
|
| + "mvc 168(8,15), 56(1)\n\t" \
|
| + "mvc 176(8,15), 64(1)\n\t" \
|
| + "mvc 184(8,15), 72(1)\n\t" \
|
| + "lg 1, 0(1)\n\t" \
|
| + VALGRIND_CALL_NOREDIR_R1 \
|
| + "lgr %0, 2\n\t" \
|
| + "aghi 15,192\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| + : /*out*/ "=d" (_res) \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
|
| + ); \
|
| + lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
|
| - arg7,arg8,arg9,arg10) \
|
| - do { \
|
| - volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+10]; \
|
| - volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - _argvec[2+5] = (unsigned long)arg5; \
|
| - _argvec[2+6] = (unsigned long)arg6; \
|
| - _argvec[2+7] = (unsigned long)arg7; \
|
| - _argvec[2+8] = (unsigned long)arg8; \
|
| - _argvec[2+9] = (unsigned long)arg9; \
|
| - _argvec[2+10] = (unsigned long)arg10; \
|
| - __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "std 2,-16(11)\n\t" /* save tocptr */ \
|
| - "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(128) \
|
| - /* arg10 */ \
|
| - "ld 3,80(11)\n\t" \
|
| - "std 3,120(1)\n\t" \
|
| - /* arg9 */ \
|
| - "ld 3,72(11)\n\t" \
|
| - "std 3,112(1)\n\t" \
|
| - /* args1-8 */ \
|
| - "ld 3, 8(11)\n\t" /* arg1->r3 */ \
|
| - "ld 4, 16(11)\n\t" /* arg2->r4 */ \
|
| - "ld 5, 24(11)\n\t" /* arg3->r5 */ \
|
| - "ld 6, 32(11)\n\t" /* arg4->r6 */ \
|
| - "ld 7, 40(11)\n\t" /* arg5->r7 */ \
|
| - "ld 8, 48(11)\n\t" /* arg6->r8 */ \
|
| - "ld 9, 56(11)\n\t" /* arg7->r9 */ \
|
| - "ld 10, 64(11)\n\t" /* arg8->r10 */ \
|
| - "ld 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "ld 2,-16(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(128) \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| - : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| - ); \
|
| - lval = (__typeof__(lval)) _res; \
|
| +#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
|
| + arg6, arg7 ,arg8, arg9, arg10) \
|
| + do { \
|
| + volatile OrigFn _orig = (orig); \
|
| + volatile unsigned long _argvec[11]; \
|
| + volatile unsigned long _res; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)arg1; \
|
| + _argvec[2] = (unsigned long)arg2; \
|
| + _argvec[3] = (unsigned long)arg3; \
|
| + _argvec[4] = (unsigned long)arg4; \
|
| + _argvec[5] = (unsigned long)arg5; \
|
| + _argvec[6] = (unsigned long)arg6; \
|
| + _argvec[7] = (unsigned long)arg7; \
|
| + _argvec[8] = (unsigned long)arg8; \
|
| + _argvec[9] = (unsigned long)arg9; \
|
| + _argvec[10] = (unsigned long)arg10; \
|
| + __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| + "aghi 15,-200\n\t" \
|
| + "lg 2, 8(1)\n\t" \
|
| + "lg 3,16(1)\n\t" \
|
| + "lg 4,24(1)\n\t" \
|
| + "lg 5,32(1)\n\t" \
|
| + "lg 6,40(1)\n\t" \
|
| + "mvc 160(8,15), 48(1)\n\t" \
|
| + "mvc 168(8,15), 56(1)\n\t" \
|
| + "mvc 176(8,15), 64(1)\n\t" \
|
| + "mvc 184(8,15), 72(1)\n\t" \
|
| + "mvc 192(8,15), 80(1)\n\t" \
|
| + "lg 1, 0(1)\n\t" \
|
| + VALGRIND_CALL_NOREDIR_R1 \
|
| + "lgr %0, 2\n\t" \
|
| + "aghi 15,200\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| + : /*out*/ "=d" (_res) \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
|
| + ); \
|
| + lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
|
| - arg7,arg8,arg9,arg10,arg11) \
|
| - do { \
|
| - volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+11]; \
|
| - volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - _argvec[2+5] = (unsigned long)arg5; \
|
| - _argvec[2+6] = (unsigned long)arg6; \
|
| - _argvec[2+7] = (unsigned long)arg7; \
|
| - _argvec[2+8] = (unsigned long)arg8; \
|
| - _argvec[2+9] = (unsigned long)arg9; \
|
| - _argvec[2+10] = (unsigned long)arg10; \
|
| - _argvec[2+11] = (unsigned long)arg11; \
|
| - __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "std 2,-16(11)\n\t" /* save tocptr */ \
|
| - "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(144) \
|
| - /* arg11 */ \
|
| - "ld 3,88(11)\n\t" \
|
| - "std 3,128(1)\n\t" \
|
| - /* arg10 */ \
|
| - "ld 3,80(11)\n\t" \
|
| - "std 3,120(1)\n\t" \
|
| - /* arg9 */ \
|
| - "ld 3,72(11)\n\t" \
|
| - "std 3,112(1)\n\t" \
|
| - /* args1-8 */ \
|
| - "ld 3, 8(11)\n\t" /* arg1->r3 */ \
|
| - "ld 4, 16(11)\n\t" /* arg2->r4 */ \
|
| - "ld 5, 24(11)\n\t" /* arg3->r5 */ \
|
| - "ld 6, 32(11)\n\t" /* arg4->r6 */ \
|
| - "ld 7, 40(11)\n\t" /* arg5->r7 */ \
|
| - "ld 8, 48(11)\n\t" /* arg6->r8 */ \
|
| - "ld 9, 56(11)\n\t" /* arg7->r9 */ \
|
| - "ld 10, 64(11)\n\t" /* arg8->r10 */ \
|
| - "ld 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "ld 2,-16(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(144) \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| - : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| - ); \
|
| - lval = (__typeof__(lval)) _res; \
|
| +#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
|
| + arg6, arg7 ,arg8, arg9, arg10, arg11) \
|
| + do { \
|
| + volatile OrigFn _orig = (orig); \
|
| + volatile unsigned long _argvec[12]; \
|
| + volatile unsigned long _res; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)arg1; \
|
| + _argvec[2] = (unsigned long)arg2; \
|
| + _argvec[3] = (unsigned long)arg3; \
|
| + _argvec[4] = (unsigned long)arg4; \
|
| + _argvec[5] = (unsigned long)arg5; \
|
| + _argvec[6] = (unsigned long)arg6; \
|
| + _argvec[7] = (unsigned long)arg7; \
|
| + _argvec[8] = (unsigned long)arg8; \
|
| + _argvec[9] = (unsigned long)arg9; \
|
| + _argvec[10] = (unsigned long)arg10; \
|
| + _argvec[11] = (unsigned long)arg11; \
|
| + __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| + "aghi 15,-208\n\t" \
|
| + "lg 2, 8(1)\n\t" \
|
| + "lg 3,16(1)\n\t" \
|
| + "lg 4,24(1)\n\t" \
|
| + "lg 5,32(1)\n\t" \
|
| + "lg 6,40(1)\n\t" \
|
| + "mvc 160(8,15), 48(1)\n\t" \
|
| + "mvc 168(8,15), 56(1)\n\t" \
|
| + "mvc 176(8,15), 64(1)\n\t" \
|
| + "mvc 184(8,15), 72(1)\n\t" \
|
| + "mvc 192(8,15), 80(1)\n\t" \
|
| + "mvc 200(8,15), 88(1)\n\t" \
|
| + "lg 1, 0(1)\n\t" \
|
| + VALGRIND_CALL_NOREDIR_R1 \
|
| + "lgr %0, 2\n\t" \
|
| + "aghi 15,208\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| + : /*out*/ "=d" (_res) \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
|
| + ); \
|
| + lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
|
| - arg7,arg8,arg9,arg10,arg11,arg12) \
|
| - do { \
|
| - volatile OrigFn _orig = (orig); \
|
| - volatile unsigned long _argvec[3+12]; \
|
| - volatile unsigned long _res; \
|
| - /* _argvec[0] holds current r2 across the call */ \
|
| - _argvec[1] = (unsigned long)_orig.r2; \
|
| - _argvec[2] = (unsigned long)_orig.nraddr; \
|
| - _argvec[2+1] = (unsigned long)arg1; \
|
| - _argvec[2+2] = (unsigned long)arg2; \
|
| - _argvec[2+3] = (unsigned long)arg3; \
|
| - _argvec[2+4] = (unsigned long)arg4; \
|
| - _argvec[2+5] = (unsigned long)arg5; \
|
| - _argvec[2+6] = (unsigned long)arg6; \
|
| - _argvec[2+7] = (unsigned long)arg7; \
|
| - _argvec[2+8] = (unsigned long)arg8; \
|
| - _argvec[2+9] = (unsigned long)arg9; \
|
| - _argvec[2+10] = (unsigned long)arg10; \
|
| - _argvec[2+11] = (unsigned long)arg11; \
|
| - _argvec[2+12] = (unsigned long)arg12; \
|
| - __asm__ volatile( \
|
| - "mr 11,%1\n\t" \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(512) \
|
| - "std 2,-16(11)\n\t" /* save tocptr */ \
|
| - "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
|
| - VG_EXPAND_FRAME_BY_trashes_r3(144) \
|
| - /* arg12 */ \
|
| - "ld 3,96(11)\n\t" \
|
| - "std 3,136(1)\n\t" \
|
| - /* arg11 */ \
|
| - "ld 3,88(11)\n\t" \
|
| - "std 3,128(1)\n\t" \
|
| - /* arg10 */ \
|
| - "ld 3,80(11)\n\t" \
|
| - "std 3,120(1)\n\t" \
|
| - /* arg9 */ \
|
| - "ld 3,72(11)\n\t" \
|
| - "std 3,112(1)\n\t" \
|
| - /* args1-8 */ \
|
| - "ld 3, 8(11)\n\t" /* arg1->r3 */ \
|
| - "ld 4, 16(11)\n\t" /* arg2->r4 */ \
|
| - "ld 5, 24(11)\n\t" /* arg3->r5 */ \
|
| - "ld 6, 32(11)\n\t" /* arg4->r6 */ \
|
| - "ld 7, 40(11)\n\t" /* arg5->r7 */ \
|
| - "ld 8, 48(11)\n\t" /* arg6->r8 */ \
|
| - "ld 9, 56(11)\n\t" /* arg7->r9 */ \
|
| - "ld 10, 64(11)\n\t" /* arg8->r10 */ \
|
| - "ld 11, 0(11)\n\t" /* target->r11 */ \
|
| - VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
|
| - "mr 11,%1\n\t" \
|
| - "mr %0,3\n\t" \
|
| - "ld 2,-16(11)\n\t" /* restore tocptr */ \
|
| - VG_CONTRACT_FRAME_BY(144) \
|
| - VG_CONTRACT_FRAME_BY(512) \
|
| - : /*out*/ "=r" (_res) \
|
| - : /*in*/ "r" (&_argvec[2]) \
|
| - : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
|
| - ); \
|
| - lval = (__typeof__(lval)) _res; \
|
| +#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
|
| + arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
|
| + do { \
|
| + volatile OrigFn _orig = (orig); \
|
| + volatile unsigned long _argvec[13]; \
|
| + volatile unsigned long _res; \
|
| + _argvec[0] = (unsigned long)_orig.nraddr; \
|
| + _argvec[1] = (unsigned long)arg1; \
|
| + _argvec[2] = (unsigned long)arg2; \
|
| + _argvec[3] = (unsigned long)arg3; \
|
| + _argvec[4] = (unsigned long)arg4; \
|
| + _argvec[5] = (unsigned long)arg5; \
|
| + _argvec[6] = (unsigned long)arg6; \
|
| + _argvec[7] = (unsigned long)arg7; \
|
| + _argvec[8] = (unsigned long)arg8; \
|
| + _argvec[9] = (unsigned long)arg9; \
|
| + _argvec[10] = (unsigned long)arg10; \
|
| + _argvec[11] = (unsigned long)arg11; \
|
| + _argvec[12] = (unsigned long)arg12; \
|
| + __asm__ volatile( \
|
| + VALGRIND_CFI_PROLOGUE \
|
| + "aghi 15,-216\n\t" \
|
| + "lg 2, 8(1)\n\t" \
|
| + "lg 3,16(1)\n\t" \
|
| + "lg 4,24(1)\n\t" \
|
| + "lg 5,32(1)\n\t" \
|
| + "lg 6,40(1)\n\t" \
|
| + "mvc 160(8,15), 48(1)\n\t" \
|
| + "mvc 168(8,15), 56(1)\n\t" \
|
| + "mvc 176(8,15), 64(1)\n\t" \
|
| + "mvc 184(8,15), 72(1)\n\t" \
|
| + "mvc 192(8,15), 80(1)\n\t" \
|
| + "mvc 200(8,15), 88(1)\n\t" \
|
| + "mvc 208(8,15), 96(1)\n\t" \
|
| + "lg 1, 0(1)\n\t" \
|
| + VALGRIND_CALL_NOREDIR_R1 \
|
| + "lgr %0, 2\n\t" \
|
| + "aghi 15,216\n\t" \
|
| + VALGRIND_CFI_EPILOGUE \
|
| + : /*out*/ "=d" (_res) \
|
| + : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
|
| + : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
|
| + ); \
|
| + lval = (__typeof__(lval)) _res; \
|
| } while (0)
|
|
|
| -#endif /* PLAT_ppc64_aix5 */
|
|
|
| +#endif /* PLAT_s390x_linux */
|
|
|
| +
|
| /* ------------------------------------------------------------------ */
|
| /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
|
| /* */
|
| @@ -3605,9 +3604,14 @@
|
| errors. */
|
| VG_USERREQ__COUNT_ERRORS = 0x1201,
|
|
|
| + /* Allows a string (gdb monitor command) to be passed to the tool
|
| + Used for interaction with vgdb/gdb */
|
| + VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
|
| +
|
| /* These are useful and can be interpreted by any tool that
|
| tracks malloc() et al, by using vg_replace_malloc.c. */
|
| VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
|
| + VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
|
| VG_USERREQ__FREELIKE_BLOCK = 0x1302,
|
| /* Memory pool support. */
|
| VG_USERREQ__CREATE_MEMPOOL = 0x1303,
|
| @@ -3620,30 +3624,43 @@
|
| VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
|
|
|
| /* Allow printfs to valgrind log. */
|
| + /* The first two pass the va_list argument by value, which
|
| + assumes it is the same size as or smaller than a UWord,
|
| + which generally isn't the case. Hence are deprecated.
|
| + The second two pass the vargs by reference and so are
|
| + immune to this problem. */
|
| + /* both :: char* fmt, va_list vargs (DEPRECATED) */
|
| VG_USERREQ__PRINTF = 0x1401,
|
| VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
|
| + /* both :: char* fmt, va_list* vargs */
|
| + VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
|
| + VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
|
|
|
| /* Stack support. */
|
| VG_USERREQ__STACK_REGISTER = 0x1501,
|
| VG_USERREQ__STACK_DEREGISTER = 0x1502,
|
| - VG_USERREQ__STACK_CHANGE = 0x1503
|
| + VG_USERREQ__STACK_CHANGE = 0x1503,
|
| +
|
| + /* Wine support */
|
| + VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
|
| +
|
| + /* Querying of debug info. */
|
| + VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701
|
| } Vg_ClientRequest;
|
|
|
| #if !defined(__GNUC__)
|
| # define __extension__ /* */
|
| #endif
|
|
|
| +
|
| /* Returns the number of Valgrinds this code is running under. That
|
| is, 0 if running natively, 1 if running under Valgrind, 2 if
|
| running under Valgrind which is running under another Valgrind,
|
| etc. */
|
| -#define RUNNING_ON_VALGRIND __extension__ \
|
| - ({unsigned int _qzz_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0 /* if not */, \
|
| - VG_USERREQ__RUNNING_ON_VALGRIND, \
|
| - 0, 0, 0, 0, 0); \
|
| - _qzz_res; \
|
| - })
|
| +#define RUNNING_ON_VALGRIND \
|
| + (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
|
| + VG_USERREQ__RUNNING_ON_VALGRIND, \
|
| + 0, 0, 0, 0, 0) \
|
|
|
|
|
| /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
|
| @@ -3651,58 +3668,95 @@
|
| since it provides a way to make sure valgrind will retranslate the
|
| invalidated area. Returns no value. */
|
| #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
|
| - {unsigned int _qzz_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
|
| + (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| VG_USERREQ__DISCARD_TRANSLATIONS, \
|
| - _qzz_addr, _qzz_len, 0, 0, 0); \
|
| - }
|
| + _qzz_addr, _qzz_len, 0, 0, 0)
|
|
|
|
|
| /* These requests are for getting Valgrind itself to print something.
|
| - Possibly with a backtrace. This is a really ugly hack. */
|
| + Possibly with a backtrace. This is a really ugly hack. The return value
|
| + is the number of characters printed, excluding the "**<pid>** " part at the
|
| + start and the backtrace (if present). */
|
|
|
| -#if defined(NVALGRIND)
|
| -
|
| -# define VALGRIND_PRINTF(...)
|
| -# define VALGRIND_PRINTF_BACKTRACE(...)
|
| -
|
| -#else /* NVALGRIND */
|
| -
|
| +#if defined(__GNUC__) || defined(__INTEL_COMPILER)
|
| /* Modern GCC will optimize the static routine out if unused,
|
| and unused attribute will shut down warnings about it. */
|
| static int VALGRIND_PRINTF(const char *format, ...)
|
| __attribute__((format(__printf__, 1, 2), __unused__));
|
| +#endif
|
| static int
|
| +#if defined(_MSC_VER)
|
| +__inline
|
| +#endif
|
| VALGRIND_PRINTF(const char *format, ...)
|
| {
|
| +#if defined(NVALGRIND)
|
| + return 0;
|
| +#else /* NVALGRIND */
|
| +#if defined(_MSC_VER)
|
| + uintptr_t _qzz_res;
|
| +#else
|
| unsigned long _qzz_res;
|
| +#endif
|
| va_list vargs;
|
| va_start(vargs, format);
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, VG_USERREQ__PRINTF,
|
| - (unsigned long)format, (unsigned long)vargs,
|
| +#if defined(_MSC_VER)
|
| + _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
|
| + VG_USERREQ__PRINTF_VALIST_BY_REF,
|
| + (uintptr_t)format,
|
| + (uintptr_t)&vargs,
|
| 0, 0, 0);
|
| +#else
|
| + _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
|
| + VG_USERREQ__PRINTF_VALIST_BY_REF,
|
| + (unsigned long)format,
|
| + (unsigned long)&vargs,
|
| + 0, 0, 0);
|
| +#endif
|
| va_end(vargs);
|
| return (int)_qzz_res;
|
| +#endif /* NVALGRIND */
|
| }
|
|
|
| +#if defined(__GNUC__) || defined(__INTEL_COMPILER)
|
| static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
|
| __attribute__((format(__printf__, 1, 2), __unused__));
|
| +#endif
|
| static int
|
| +#if defined(_MSC_VER)
|
| +__inline
|
| +#endif
|
| VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
|
| {
|
| +#if defined(NVALGRIND)
|
| + return 0;
|
| +#else /* NVALGRIND */
|
| +#if defined(_MSC_VER)
|
| + uintptr_t _qzz_res;
|
| +#else
|
| unsigned long _qzz_res;
|
| +#endif
|
| va_list vargs;
|
| va_start(vargs, format);
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, VG_USERREQ__PRINTF_BACKTRACE,
|
| - (unsigned long)format, (unsigned long)vargs,
|
| +#if defined(_MSC_VER)
|
| + _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
|
| + VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
|
| + (uintptr_t)format,
|
| + (uintptr_t)&vargs,
|
| 0, 0, 0);
|
| +#else
|
| + _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
|
| + VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
|
| + (unsigned long)format,
|
| + (unsigned long)&vargs,
|
| + 0, 0, 0);
|
| +#endif
|
| va_end(vargs);
|
| return (int)_qzz_res;
|
| +#endif /* NVALGRIND */
|
| }
|
|
|
| -#endif /* NVALGRIND */
|
|
|
| -
|
| /* These requests allow control to move from the simulated CPU to the
|
| real CPU, calling an arbitary function.
|
|
|
| @@ -3727,199 +3781,253 @@
|
| with a lot in the past.
|
| */
|
| #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
|
| - __extension__ \
|
| - ({unsigned long _qyy_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qyy_res, 0 /* default return */, \
|
| - VG_USERREQ__CLIENT_CALL0, \
|
| - _qyy_fn, \
|
| - 0, 0, 0, 0); \
|
| - _qyy_res; \
|
| - })
|
| + VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
|
| + VG_USERREQ__CLIENT_CALL0, \
|
| + _qyy_fn, \
|
| + 0, 0, 0, 0)
|
|
|
| -#define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
|
| - __extension__ \
|
| - ({unsigned long _qyy_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qyy_res, 0 /* default return */, \
|
| - VG_USERREQ__CLIENT_CALL1, \
|
| - _qyy_fn, \
|
| - _qyy_arg1, 0, 0, 0); \
|
| - _qyy_res; \
|
| - })
|
| +#define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
|
| + VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
|
| + VG_USERREQ__CLIENT_CALL1, \
|
| + _qyy_fn, \
|
| + _qyy_arg1, 0, 0, 0)
|
|
|
| -#define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
|
| - __extension__ \
|
| - ({unsigned long _qyy_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qyy_res, 0 /* default return */, \
|
| - VG_USERREQ__CLIENT_CALL2, \
|
| - _qyy_fn, \
|
| - _qyy_arg1, _qyy_arg2, 0, 0); \
|
| - _qyy_res; \
|
| - })
|
| +#define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
|
| + VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
|
| + VG_USERREQ__CLIENT_CALL2, \
|
| + _qyy_fn, \
|
| + _qyy_arg1, _qyy_arg2, 0, 0)
|
|
|
| #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
|
| - __extension__ \
|
| - ({unsigned long _qyy_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qyy_res, 0 /* default return */, \
|
| - VG_USERREQ__CLIENT_CALL3, \
|
| - _qyy_fn, \
|
| - _qyy_arg1, _qyy_arg2, \
|
| - _qyy_arg3, 0); \
|
| - _qyy_res; \
|
| - })
|
| + VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
|
| + VG_USERREQ__CLIENT_CALL3, \
|
| + _qyy_fn, \
|
| + _qyy_arg1, _qyy_arg2, \
|
| + _qyy_arg3, 0)
|
|
|
|
|
| /* Counts the number of errors that have been recorded by a tool. Nb:
|
| the tool must record the errors with VG_(maybe_record_error)() or
|
| VG_(unique_error)() for them to be counted. */
|
| #define VALGRIND_COUNT_ERRORS \
|
| - __extension__ \
|
| - ({unsigned int _qyy_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qyy_res, 0 /* default return */, \
|
| + (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
|
| + 0 /* default return */, \
|
| VG_USERREQ__COUNT_ERRORS, \
|
| - 0, 0, 0, 0, 0); \
|
| - _qyy_res; \
|
| - })
|
| + 0, 0, 0, 0, 0)
|
|
|
| -/* Mark a block of memory as having been allocated by a malloc()-like
|
| - function. `addr' is the start of the usable block (ie. after any
|
| - redzone) `rzB' is redzone size if the allocator can apply redzones;
|
| - use '0' if not. Adding redzones makes it more likely Valgrind will spot
|
| - block overruns. `is_zeroed' indicates if the memory is zeroed, as it is
|
| - for calloc(). Put it immediately after the point where a block is
|
| - allocated.
|
| +/* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
|
| + when heap blocks are allocated in order to give accurate results. This
|
| + happens automatically for the standard allocator functions such as
|
| + malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
|
| + delete[], etc.
|
| +
|
| + But if your program uses a custom allocator, this doesn't automatically
|
| + happen, and Valgrind will not do as well. For example, if you allocate
|
| + superblocks with mmap() and then allocates chunks of the superblocks, all
|
| + Valgrind's observations will be at the mmap() level and it won't know that
|
| + the chunks should be considered separate entities. In Memcheck's case,
|
| + that means you probably won't get heap block overrun detection (because
|
| + there won't be redzones marked as unaddressable) and you definitely won't
|
| + get any leak detection.
|
| +
|
| + The following client requests allow a custom allocator to be annotated so
|
| + that it can be handled accurately by Valgrind.
|
| +
|
| + VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
|
| + by a malloc()-like function. For Memcheck (an illustrative case), this
|
| + does two things:
|
| +
|
| + - It records that the block has been allocated. This means any addresses
|
| + within the block mentioned in error messages will be
|
| + identified as belonging to the block. It also means that if the block
|
| + isn't freed it will be detected by the leak checker.
|
| +
|
| + - It marks the block as being addressable and undefined (if 'is_zeroed' is
|
| + not set), or addressable and defined (if 'is_zeroed' is set). This
|
| + controls how accesses to the block by the program are handled.
|
|
|
| - If you're using Memcheck: If you're allocating memory via superblocks,
|
| - and then handing out small chunks of each superblock, if you don't have
|
| - redzones on your small blocks, it's worth marking the superblock with
|
| - VALGRIND_MAKE_MEM_NOACCESS when it's created, so that block overruns are
|
| - detected. But if you can put redzones on, it's probably better to not do
|
| - this, so that messages for small overruns are described in terms of the
|
| - small block rather than the superblock (but if you have a big overrun
|
| - that skips over a redzone, you could miss an error this way). See
|
| - memcheck/tests/custom_alloc.c for an example.
|
| + 'addr' is the start of the usable block (ie. after any
|
| + redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
|
| + can apply redzones -- these are blocks of padding at the start and end of
|
| + each block. Adding redzones is recommended as it makes it much more likely
|
| + Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
|
| + zeroed (or filled with another predictable value), as is the case for
|
| + calloc().
|
| +
|
| + VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
|
| + heap block -- that will be used by the client program -- is allocated.
|
| + It's best to put it at the outermost level of the allocator if possible;
|
| + for example, if you have a function my_alloc() which calls
|
| + internal_alloc(), and the client request is put inside internal_alloc(),
|
| + stack traces relating to the heap block will contain entries for both
|
| + my_alloc() and internal_alloc(), which is probably not what you want.
|
|
|
| - WARNING: if your allocator uses malloc() or 'new' to allocate
|
| - superblocks, rather than mmap() or brk(), this will not work properly --
|
| - you'll likely get assertion failures during leak detection. This is
|
| - because Valgrind doesn't like seeing overlapping heap blocks. Sorry.
|
| + For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
|
| + custom blocks from within a heap block, B, that has been allocated with
|
| + malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
|
| + -- the custom blocks will take precedence.
|
|
|
| - Nb: block must be freed via a free()-like function specified
|
| - with VALGRIND_FREELIKE_BLOCK or mismatch errors will occur. */
|
| + VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
|
| + Memcheck, it does two things:
|
| +
|
| + - It records that the block has been deallocated. This assumes that the
|
| + block was annotated as having been allocated via
|
| + VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
|
| +
|
| + - It marks the block as being unaddressable.
|
| +
|
| + VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
|
| + heap block is deallocated.
|
| +
|
| + VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
|
| + Memcheck, it does four things:
|
| +
|
| + - It records that the size of a block has been changed. This assumes that
|
| + the block was annotated as having been allocated via
|
| + VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
|
| +
|
| + - If the block shrunk, it marks the freed memory as being unaddressable.
|
| +
|
| + - If the block grew, it marks the new area as undefined and defines a red
|
| + zone past the end of the new block.
|
| +
|
| + - The V-bits of the overlap between the old and the new block are preserved.
|
| +
|
| + VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
|
| + and before deallocation of the old block.
|
| +
|
| + In many cases, these three client requests will not be enough to get your
|
| + allocator working well with Memcheck. More specifically, if your allocator
|
| + writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
|
| + will be necessary to mark the memory as addressable just before the zeroing
|
| + occurs, otherwise you'll get a lot of invalid write errors. For example,
|
| + you'll need to do this if your allocator recycles freed blocks, but it
|
| + zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
|
| + Alternatively, if your allocator reuses freed blocks for allocator-internal
|
| + data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
|
| +
|
| + Really, what's happening is a blurring of the lines between the client
|
| + program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
|
| + memory should be considered unaddressable to the client program, but the
|
| + allocator knows more than the rest of the client program and so may be able
|
| + to safely access it. Extra client requests are necessary for Valgrind to
|
| + understand the distinction between the allocator and the rest of the
|
| + program.
|
| +
|
| + Ignored if addr == 0.
|
| +*/
|
| #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
|
| - {unsigned int _qzz_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
|
| + VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| VG_USERREQ__MALLOCLIKE_BLOCK, \
|
| - addr, sizeB, rzB, is_zeroed, 0); \
|
| - }
|
| + addr, sizeB, rzB, is_zeroed, 0)
|
|
|
| -/* Mark a block of memory as having been freed by a free()-like function.
|
| - `rzB' is redzone size; it must match that given to
|
| - VALGRIND_MALLOCLIKE_BLOCK. Memory not freed will be detected by the leak
|
| - checker. Put it immediately after the point where the block is freed. */
|
| +/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
|
| + Ignored if addr == 0.
|
| +*/
|
| +#define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
|
| + VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| + VG_USERREQ__RESIZEINPLACE_BLOCK, \
|
| + addr, oldSizeB, newSizeB, rzB, 0)
|
| +
|
| +/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
|
| + Ignored if addr == 0.
|
| +*/
|
| #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
|
| - {unsigned int _qzz_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
|
| + VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| VG_USERREQ__FREELIKE_BLOCK, \
|
| - addr, rzB, 0, 0, 0); \
|
| - }
|
| + addr, rzB, 0, 0, 0)
|
|
|
| /* Create a memory pool. */
|
| #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
|
| - {unsigned int _qzz_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
|
| + VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| VG_USERREQ__CREATE_MEMPOOL, \
|
| - pool, rzB, is_zeroed, 0, 0); \
|
| - }
|
| + pool, rzB, is_zeroed, 0, 0)
|
|
|
| /* Destroy a memory pool. */
|
| #define VALGRIND_DESTROY_MEMPOOL(pool) \
|
| - {unsigned int _qzz_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
|
| + VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| VG_USERREQ__DESTROY_MEMPOOL, \
|
| - pool, 0, 0, 0, 0); \
|
| - }
|
| + pool, 0, 0, 0, 0)
|
|
|
| /* Associate a piece of memory with a memory pool. */
|
| #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
|
| - {unsigned int _qzz_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
|
| + VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| VG_USERREQ__MEMPOOL_ALLOC, \
|
| - pool, addr, size, 0, 0); \
|
| - }
|
| + pool, addr, size, 0, 0)
|
|
|
| /* Disassociate a piece of memory from a memory pool. */
|
| #define VALGRIND_MEMPOOL_FREE(pool, addr) \
|
| - {unsigned int _qzz_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
|
| + VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| VG_USERREQ__MEMPOOL_FREE, \
|
| - pool, addr, 0, 0, 0); \
|
| - }
|
| + pool, addr, 0, 0, 0)
|
|
|
| /* Disassociate any pieces outside a particular range. */
|
| #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
|
| - {unsigned int _qzz_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
|
| + VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| VG_USERREQ__MEMPOOL_TRIM, \
|
| - pool, addr, size, 0, 0); \
|
| - }
|
| + pool, addr, size, 0, 0)
|
|
|
| /* Resize and/or move a piece associated with a memory pool. */
|
| #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
|
| - {unsigned int _qzz_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
|
| + VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| VG_USERREQ__MOVE_MEMPOOL, \
|
| - poolA, poolB, 0, 0, 0); \
|
| - }
|
| + poolA, poolB, 0, 0, 0)
|
|
|
| /* Resize and/or move a piece associated with a memory pool. */
|
| #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
|
| - {unsigned int _qzz_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
|
| + VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| VG_USERREQ__MEMPOOL_CHANGE, \
|
| - pool, addrA, addrB, size, 0); \
|
| - }
|
| + pool, addrA, addrB, size, 0)
|
|
|
| /* Return 1 if a mempool exists, else 0. */
|
| #define VALGRIND_MEMPOOL_EXISTS(pool) \
|
| - ({unsigned int _qzz_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
|
| + (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| VG_USERREQ__MEMPOOL_EXISTS, \
|
| - pool, 0, 0, 0, 0); \
|
| - _qzz_res; \
|
| - })
|
| + pool, 0, 0, 0, 0)
|
|
|
| /* Mark a piece of memory as being a stack. Returns a stack id. */
|
| #define VALGRIND_STACK_REGISTER(start, end) \
|
| - ({unsigned int _qzz_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
|
| + (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| VG_USERREQ__STACK_REGISTER, \
|
| - start, end, 0, 0, 0); \
|
| - _qzz_res; \
|
| - })
|
| + start, end, 0, 0, 0)
|
|
|
| /* Unmark the piece of memory associated with a stack id as being a
|
| stack. */
|
| #define VALGRIND_STACK_DEREGISTER(id) \
|
| - {unsigned int _qzz_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
|
| + (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| VG_USERREQ__STACK_DEREGISTER, \
|
| - id, 0, 0, 0, 0); \
|
| - }
|
| + id, 0, 0, 0, 0)
|
|
|
| /* Change the start and end address of the stack id. */
|
| #define VALGRIND_STACK_CHANGE(id, start, end) \
|
| - {unsigned int _qzz_res; \
|
| - VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
|
| + VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| VG_USERREQ__STACK_CHANGE, \
|
| - id, start, end, 0, 0); \
|
| - }
|
| + id, start, end, 0, 0)
|
|
|
| +/* Load PDB debug info for Wine PE image_map. */
|
| +#define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
|
| + VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| + VG_USERREQ__LOAD_PDB_DEBUGINFO, \
|
| + fd, ptr, total_size, delta, 0)
|
|
|
| +/* Map a code address to a source file name and line number. buf64
|
| + must point to a 64-byte buffer in the caller's address space. The
|
| + result will be dumped in there and is guaranteed to be zero
|
| + terminated. If no info is found, the first byte is set to zero. */
|
| +#define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
|
| + (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
|
| + VG_USERREQ__MAP_IP_TO_SRCLOC, \
|
| + addr, buf64, 0, 0, 0)
|
| +
|
| +
|
| +#undef PLAT_x86_darwin
|
| +#undef PLAT_amd64_darwin
|
| +#undef PLAT_x86_win32
|
| #undef PLAT_x86_linux
|
| #undef PLAT_amd64_linux
|
| #undef PLAT_ppc32_linux
|
| #undef PLAT_ppc64_linux
|
| -#undef PLAT_ppc32_aix5
|
| -#undef PLAT_ppc64_aix5
|
| +#undef PLAT_arm_linux
|
| +#undef PLAT_s390x_linux
|
|
|
| #endif /* __VALGRIND_H */
|
|
|