as gcc can already see that, plus causes gcc to bomb. */
#define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
+/* Macros to save and align the stack before making a function
+ call and restore it afterwards as gcc may not keep the stack
+ pointer aligned if it doesn't realise calls are being made
+ to other functions. */
+
+#define VALGRIND_ALIGN_STACK \
+ "movl %%esp,%%edi\n\t" \
+ "andl $0xfffffff0,%%esp\n\t"
+#define VALGRIND_RESTORE_STACK \
+ "movl %%edi,%%esp\n\t"
+
/* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
long) == 4. */
volatile unsigned long _res; \
_argvec[0] = (unsigned long)_orig.nraddr; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"movl (%%eax), %%eax\n\t" /* target->%eax */ \
VALGRIND_CALL_NOREDIR_EAX \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=a" (_res) \
: /*in*/ "a" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[0] = (unsigned long)_orig.nraddr; \
_argvec[1] = (unsigned long)(arg1); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"subl $12, %%esp\n\t" \
"pushl 4(%%eax)\n\t" \
"movl (%%eax), %%eax\n\t" /* target->%eax */ \
VALGRIND_CALL_NOREDIR_EAX \
- "addl $16, %%esp\n" \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=a" (_res) \
: /*in*/ "a" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[1] = (unsigned long)(arg1); \
_argvec[2] = (unsigned long)(arg2); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"subl $8, %%esp\n\t" \
"pushl 8(%%eax)\n\t" \
"pushl 4(%%eax)\n\t" \
"movl (%%eax), %%eax\n\t" /* target->%eax */ \
VALGRIND_CALL_NOREDIR_EAX \
- "addl $16, %%esp\n" \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=a" (_res) \
: /*in*/ "a" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[2] = (unsigned long)(arg2); \
_argvec[3] = (unsigned long)(arg3); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"subl $4, %%esp\n\t" \
"pushl 12(%%eax)\n\t" \
"pushl 8(%%eax)\n\t" \
"pushl 4(%%eax)\n\t" \
"movl (%%eax), %%eax\n\t" /* target->%eax */ \
VALGRIND_CALL_NOREDIR_EAX \
- "addl $16, %%esp\n" \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=a" (_res) \
: /*in*/ "a" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[3] = (unsigned long)(arg3); \
_argvec[4] = (unsigned long)(arg4); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"pushl 16(%%eax)\n\t" \
"pushl 12(%%eax)\n\t" \
"pushl 8(%%eax)\n\t" \
"pushl 4(%%eax)\n\t" \
"movl (%%eax), %%eax\n\t" /* target->%eax */ \
VALGRIND_CALL_NOREDIR_EAX \
- "addl $16, %%esp\n" \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=a" (_res) \
: /*in*/ "a" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[4] = (unsigned long)(arg4); \
_argvec[5] = (unsigned long)(arg5); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"subl $12, %%esp\n\t" \
"pushl 20(%%eax)\n\t" \
"pushl 16(%%eax)\n\t" \
"pushl 4(%%eax)\n\t" \
"movl (%%eax), %%eax\n\t" /* target->%eax */ \
VALGRIND_CALL_NOREDIR_EAX \
- "addl $32, %%esp\n" \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=a" (_res) \
: /*in*/ "a" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[5] = (unsigned long)(arg5); \
_argvec[6] = (unsigned long)(arg6); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"subl $8, %%esp\n\t" \
"pushl 24(%%eax)\n\t" \
"pushl 20(%%eax)\n\t" \
"pushl 4(%%eax)\n\t" \
"movl (%%eax), %%eax\n\t" /* target->%eax */ \
VALGRIND_CALL_NOREDIR_EAX \
- "addl $32, %%esp\n" \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=a" (_res) \
: /*in*/ "a" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[6] = (unsigned long)(arg6); \
_argvec[7] = (unsigned long)(arg7); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"subl $4, %%esp\n\t" \
"pushl 28(%%eax)\n\t" \
"pushl 24(%%eax)\n\t" \
"pushl 4(%%eax)\n\t" \
"movl (%%eax), %%eax\n\t" /* target->%eax */ \
VALGRIND_CALL_NOREDIR_EAX \
- "addl $32, %%esp\n" \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=a" (_res) \
: /*in*/ "a" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[7] = (unsigned long)(arg7); \
_argvec[8] = (unsigned long)(arg8); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"pushl 32(%%eax)\n\t" \
"pushl 28(%%eax)\n\t" \
"pushl 24(%%eax)\n\t" \
"pushl 4(%%eax)\n\t" \
"movl (%%eax), %%eax\n\t" /* target->%eax */ \
VALGRIND_CALL_NOREDIR_EAX \
- "addl $32, %%esp\n" \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=a" (_res) \
: /*in*/ "a" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[8] = (unsigned long)(arg8); \
_argvec[9] = (unsigned long)(arg9); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"subl $12, %%esp\n\t" \
"pushl 36(%%eax)\n\t" \
"pushl 32(%%eax)\n\t" \
"pushl 4(%%eax)\n\t" \
"movl (%%eax), %%eax\n\t" /* target->%eax */ \
VALGRIND_CALL_NOREDIR_EAX \
- "addl $48, %%esp\n" \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=a" (_res) \
: /*in*/ "a" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[9] = (unsigned long)(arg9); \
_argvec[10] = (unsigned long)(arg10); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"subl $8, %%esp\n\t" \
"pushl 40(%%eax)\n\t" \
"pushl 36(%%eax)\n\t" \
"pushl 4(%%eax)\n\t" \
"movl (%%eax), %%eax\n\t" /* target->%eax */ \
VALGRIND_CALL_NOREDIR_EAX \
- "addl $48, %%esp\n" \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=a" (_res) \
: /*in*/ "a" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[10] = (unsigned long)(arg10); \
_argvec[11] = (unsigned long)(arg11); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"subl $4, %%esp\n\t" \
"pushl 44(%%eax)\n\t" \
"pushl 40(%%eax)\n\t" \
"pushl 4(%%eax)\n\t" \
"movl (%%eax), %%eax\n\t" /* target->%eax */ \
VALGRIND_CALL_NOREDIR_EAX \
- "addl $48, %%esp\n" \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=a" (_res) \
: /*in*/ "a" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[11] = (unsigned long)(arg11); \
_argvec[12] = (unsigned long)(arg12); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"pushl 48(%%eax)\n\t" \
"pushl 44(%%eax)\n\t" \
"pushl 40(%%eax)\n\t" \
"pushl 4(%%eax)\n\t" \
"movl (%%eax), %%eax\n\t" /* target->%eax */ \
VALGRIND_CALL_NOREDIR_EAX \
- "addl $48, %%esp\n" \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=a" (_res) \
: /*in*/ "a" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
# define VALGRIND_CFI_EPILOGUE
#endif
+/* Macros to save and align the stack before making a function
+ call and restore it afterwards as gcc may not keep the stack
+ pointer aligned if it doesn't realise calls are being made
+ to other functions. */
+
+#define VALGRIND_ALIGN_STACK \
+ "movq %%rsp,%%r14\n\t" \
+ "andq $0xfffffffffffffff0,%%rsp\n\t"
+#define VALGRIND_RESTORE_STACK \
+ "movq %%r14,%%rsp\n\t"
/* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
long) == 8. */
with the stack pointer doesn't give a danger of non-unwindable
stack. */
-#define CALL_FN_W_v(lval, orig) \
- do { \
- volatile OrigFn _orig = (orig); \
- volatile unsigned long _argvec[1]; \
- volatile unsigned long _res; \
- _argvec[0] = (unsigned long)_orig.nraddr; \
- __asm__ volatile( \
- VALGRIND_CFI_PROLOGUE \
- "subq $128,%%rsp\n\t" \
- "movq (%%rax), %%rax\n\t" /* target->%rax */ \
- VALGRIND_CALL_NOREDIR_RAX \
- "addq $128,%%rsp\n\t" \
- VALGRIND_CFI_EPILOGUE \
- : /*out*/ "=a" (_res) \
- : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
- ); \
- lval = (__typeof__(lval)) _res; \
+#define CALL_FN_W_v(lval, orig) \
+ do { \
+ volatile OrigFn _orig = (orig); \
+ volatile unsigned long _argvec[1]; \
+ volatile unsigned long _res; \
+ _argvec[0] = (unsigned long)_orig.nraddr; \
+ __asm__ volatile( \
+ VALGRIND_CFI_PROLOGUE \
+ VALGRIND_ALIGN_STACK \
+ "subq $128,%%rsp\n\t" \
+ "movq (%%rax), %%rax\n\t" /* target->%rax */ \
+ VALGRIND_CALL_NOREDIR_RAX \
+ VALGRIND_RESTORE_STACK \
+ VALGRIND_CFI_EPILOGUE \
+ : /*out*/ "=a" (_res) \
+ : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
+ ); \
+ lval = (__typeof__(lval)) _res; \
} while (0)
-#define CALL_FN_W_W(lval, orig, arg1) \
- do { \
- volatile OrigFn _orig = (orig); \
- volatile unsigned long _argvec[2]; \
- volatile unsigned long _res; \
- _argvec[0] = (unsigned long)_orig.nraddr; \
- _argvec[1] = (unsigned long)(arg1); \
- __asm__ volatile( \
- VALGRIND_CFI_PROLOGUE \
- "subq $128,%%rsp\n\t" \
- "movq 8(%%rax), %%rdi\n\t" \
- "movq (%%rax), %%rax\n\t" /* target->%rax */ \
- VALGRIND_CALL_NOREDIR_RAX \
- "addq $128,%%rsp\n\t" \
- VALGRIND_CFI_EPILOGUE \
- : /*out*/ "=a" (_res) \
- : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
- ); \
- lval = (__typeof__(lval)) _res; \
+#define CALL_FN_W_W(lval, orig, arg1) \
+ do { \
+ volatile OrigFn _orig = (orig); \
+ volatile unsigned long _argvec[2]; \
+ volatile unsigned long _res; \
+ _argvec[0] = (unsigned long)_orig.nraddr; \
+ _argvec[1] = (unsigned long)(arg1); \
+ __asm__ volatile( \
+ VALGRIND_CFI_PROLOGUE \
+ VALGRIND_ALIGN_STACK \
+ "subq $128,%%rsp\n\t" \
+ "movq 8(%%rax), %%rdi\n\t" \
+ "movq (%%rax), %%rax\n\t" /* target->%rax */ \
+ VALGRIND_CALL_NOREDIR_RAX \
+ VALGRIND_RESTORE_STACK \
+ VALGRIND_CFI_EPILOGUE \
+ : /*out*/ "=a" (_res) \
+ : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
+ ); \
+ lval = (__typeof__(lval)) _res; \
} while (0)
-#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
- do { \
- volatile OrigFn _orig = (orig); \
- volatile unsigned long _argvec[3]; \
- volatile unsigned long _res; \
- _argvec[0] = (unsigned long)_orig.nraddr; \
- _argvec[1] = (unsigned long)(arg1); \
- _argvec[2] = (unsigned long)(arg2); \
- __asm__ volatile( \
- VALGRIND_CFI_PROLOGUE \
- "subq $128,%%rsp\n\t" \
- "movq 16(%%rax), %%rsi\n\t" \
- "movq 8(%%rax), %%rdi\n\t" \
- "movq (%%rax), %%rax\n\t" /* target->%rax */ \
- VALGRIND_CALL_NOREDIR_RAX \
- "addq $128,%%rsp\n\t" \
- VALGRIND_CFI_EPILOGUE \
- : /*out*/ "=a" (_res) \
- : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
- ); \
- lval = (__typeof__(lval)) _res; \
+#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
+ do { \
+ volatile OrigFn _orig = (orig); \
+ volatile unsigned long _argvec[3]; \
+ volatile unsigned long _res; \
+ _argvec[0] = (unsigned long)_orig.nraddr; \
+ _argvec[1] = (unsigned long)(arg1); \
+ _argvec[2] = (unsigned long)(arg2); \
+ __asm__ volatile( \
+ VALGRIND_CFI_PROLOGUE \
+ VALGRIND_ALIGN_STACK \
+ "subq $128,%%rsp\n\t" \
+ "movq 16(%%rax), %%rsi\n\t" \
+ "movq 8(%%rax), %%rdi\n\t" \
+ "movq (%%rax), %%rax\n\t" /* target->%rax */ \
+ VALGRIND_CALL_NOREDIR_RAX \
+ VALGRIND_RESTORE_STACK \
+ VALGRIND_CFI_EPILOGUE \
+ : /*out*/ "=a" (_res) \
+ : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
+ ); \
+ lval = (__typeof__(lval)) _res; \
} while (0)
-#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
- do { \
- volatile OrigFn _orig = (orig); \
- volatile unsigned long _argvec[4]; \
- volatile unsigned long _res; \
- _argvec[0] = (unsigned long)_orig.nraddr; \
- _argvec[1] = (unsigned long)(arg1); \
- _argvec[2] = (unsigned long)(arg2); \
- _argvec[3] = (unsigned long)(arg3); \
- __asm__ volatile( \
- VALGRIND_CFI_PROLOGUE \
- "subq $128,%%rsp\n\t" \
- "movq 24(%%rax), %%rdx\n\t" \
- "movq 16(%%rax), %%rsi\n\t" \
- "movq 8(%%rax), %%rdi\n\t" \
- "movq (%%rax), %%rax\n\t" /* target->%rax */ \
- VALGRIND_CALL_NOREDIR_RAX \
- "addq $128,%%rsp\n\t" \
- VALGRIND_CFI_EPILOGUE \
- : /*out*/ "=a" (_res) \
- : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
- ); \
- lval = (__typeof__(lval)) _res; \
+#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
+ do { \
+ volatile OrigFn _orig = (orig); \
+ volatile unsigned long _argvec[4]; \
+ volatile unsigned long _res; \
+ _argvec[0] = (unsigned long)_orig.nraddr; \
+ _argvec[1] = (unsigned long)(arg1); \
+ _argvec[2] = (unsigned long)(arg2); \
+ _argvec[3] = (unsigned long)(arg3); \
+ __asm__ volatile( \
+ VALGRIND_CFI_PROLOGUE \
+ VALGRIND_ALIGN_STACK \
+ "subq $128,%%rsp\n\t" \
+ "movq 24(%%rax), %%rdx\n\t" \
+ "movq 16(%%rax), %%rsi\n\t" \
+ "movq 8(%%rax), %%rdi\n\t" \
+ "movq (%%rax), %%rax\n\t" /* target->%rax */ \
+ VALGRIND_CALL_NOREDIR_RAX \
+ VALGRIND_RESTORE_STACK \
+ VALGRIND_CFI_EPILOGUE \
+ : /*out*/ "=a" (_res) \
+ : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
+ ); \
+ lval = (__typeof__(lval)) _res; \
} while (0)
-#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
- do { \
- volatile OrigFn _orig = (orig); \
- volatile unsigned long _argvec[5]; \
- volatile unsigned long _res; \
- _argvec[0] = (unsigned long)_orig.nraddr; \
- _argvec[1] = (unsigned long)(arg1); \
- _argvec[2] = (unsigned long)(arg2); \
- _argvec[3] = (unsigned long)(arg3); \
- _argvec[4] = (unsigned long)(arg4); \
- __asm__ volatile( \
- VALGRIND_CFI_PROLOGUE \
- "subq $128,%%rsp\n\t" \
- "movq 32(%%rax), %%rcx\n\t" \
- "movq 24(%%rax), %%rdx\n\t" \
- "movq 16(%%rax), %%rsi\n\t" \
- "movq 8(%%rax), %%rdi\n\t" \
- "movq (%%rax), %%rax\n\t" /* target->%rax */ \
- VALGRIND_CALL_NOREDIR_RAX \
- "addq $128,%%rsp\n\t" \
- VALGRIND_CFI_EPILOGUE \
- : /*out*/ "=a" (_res) \
- : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
- ); \
- lval = (__typeof__(lval)) _res; \
+#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
+ do { \
+ volatile OrigFn _orig = (orig); \
+ volatile unsigned long _argvec[5]; \
+ volatile unsigned long _res; \
+ _argvec[0] = (unsigned long)_orig.nraddr; \
+ _argvec[1] = (unsigned long)(arg1); \
+ _argvec[2] = (unsigned long)(arg2); \
+ _argvec[3] = (unsigned long)(arg3); \
+ _argvec[4] = (unsigned long)(arg4); \
+ __asm__ volatile( \
+ VALGRIND_CFI_PROLOGUE \
+ VALGRIND_ALIGN_STACK \
+ "subq $128,%%rsp\n\t" \
+ "movq 32(%%rax), %%rcx\n\t" \
+ "movq 24(%%rax), %%rdx\n\t" \
+ "movq 16(%%rax), %%rsi\n\t" \
+ "movq 8(%%rax), %%rdi\n\t" \
+ "movq (%%rax), %%rax\n\t" /* target->%rax */ \
+ VALGRIND_CALL_NOREDIR_RAX \
+ VALGRIND_RESTORE_STACK \
+ VALGRIND_CFI_EPILOGUE \
+ : /*out*/ "=a" (_res) \
+ : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
+ ); \
+ lval = (__typeof__(lval)) _res; \
} while (0)
-#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
- do { \
- volatile OrigFn _orig = (orig); \
- volatile unsigned long _argvec[6]; \
- volatile unsigned long _res; \
- _argvec[0] = (unsigned long)_orig.nraddr; \
- _argvec[1] = (unsigned long)(arg1); \
- _argvec[2] = (unsigned long)(arg2); \
- _argvec[3] = (unsigned long)(arg3); \
- _argvec[4] = (unsigned long)(arg4); \
- _argvec[5] = (unsigned long)(arg5); \
- __asm__ volatile( \
- VALGRIND_CFI_PROLOGUE \
- "subq $128,%%rsp\n\t" \
- "movq 40(%%rax), %%r8\n\t" \
- "movq 32(%%rax), %%rcx\n\t" \
- "movq 24(%%rax), %%rdx\n\t" \
- "movq 16(%%rax), %%rsi\n\t" \
- "movq 8(%%rax), %%rdi\n\t" \
- "movq (%%rax), %%rax\n\t" /* target->%rax */ \
- VALGRIND_CALL_NOREDIR_RAX \
- "addq $128,%%rsp\n\t" \
- VALGRIND_CFI_EPILOGUE \
- : /*out*/ "=a" (_res) \
- : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
- ); \
- lval = (__typeof__(lval)) _res; \
+#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
+ do { \
+ volatile OrigFn _orig = (orig); \
+ volatile unsigned long _argvec[6]; \
+ volatile unsigned long _res; \
+ _argvec[0] = (unsigned long)_orig.nraddr; \
+ _argvec[1] = (unsigned long)(arg1); \
+ _argvec[2] = (unsigned long)(arg2); \
+ _argvec[3] = (unsigned long)(arg3); \
+ _argvec[4] = (unsigned long)(arg4); \
+ _argvec[5] = (unsigned long)(arg5); \
+ __asm__ volatile( \
+ VALGRIND_CFI_PROLOGUE \
+ VALGRIND_ALIGN_STACK \
+ "subq $128,%%rsp\n\t" \
+ "movq 40(%%rax), %%r8\n\t" \
+ "movq 32(%%rax), %%rcx\n\t" \
+ "movq 24(%%rax), %%rdx\n\t" \
+ "movq 16(%%rax), %%rsi\n\t" \
+ "movq 8(%%rax), %%rdi\n\t" \
+ "movq (%%rax), %%rax\n\t" /* target->%rax */ \
+ VALGRIND_CALL_NOREDIR_RAX \
+ VALGRIND_RESTORE_STACK \
+ VALGRIND_CFI_EPILOGUE \
+ : /*out*/ "=a" (_res) \
+ : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
+ ); \
+ lval = (__typeof__(lval)) _res; \
} while (0)
-#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
- do { \
- volatile OrigFn _orig = (orig); \
- volatile unsigned long _argvec[7]; \
- volatile unsigned long _res; \
- _argvec[0] = (unsigned long)_orig.nraddr; \
- _argvec[1] = (unsigned long)(arg1); \
- _argvec[2] = (unsigned long)(arg2); \
- _argvec[3] = (unsigned long)(arg3); \
- _argvec[4] = (unsigned long)(arg4); \
- _argvec[5] = (unsigned long)(arg5); \
- _argvec[6] = (unsigned long)(arg6); \
- __asm__ volatile( \
- VALGRIND_CFI_PROLOGUE \
- "subq $128,%%rsp\n\t" \
- "movq 48(%%rax), %%r9\n\t" \
- "movq 40(%%rax), %%r8\n\t" \
- "movq 32(%%rax), %%rcx\n\t" \
- "movq 24(%%rax), %%rdx\n\t" \
- "movq 16(%%rax), %%rsi\n\t" \
- "movq 8(%%rax), %%rdi\n\t" \
- "movq (%%rax), %%rax\n\t" /* target->%rax */ \
- VALGRIND_CALL_NOREDIR_RAX \
- "addq $128,%%rsp\n\t" \
- VALGRIND_CFI_EPILOGUE \
- : /*out*/ "=a" (_res) \
- : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
- ); \
- lval = (__typeof__(lval)) _res; \
+#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
+ do { \
+ volatile OrigFn _orig = (orig); \
+ volatile unsigned long _argvec[7]; \
+ volatile unsigned long _res; \
+ _argvec[0] = (unsigned long)_orig.nraddr; \
+ _argvec[1] = (unsigned long)(arg1); \
+ _argvec[2] = (unsigned long)(arg2); \
+ _argvec[3] = (unsigned long)(arg3); \
+ _argvec[4] = (unsigned long)(arg4); \
+ _argvec[5] = (unsigned long)(arg5); \
+ _argvec[6] = (unsigned long)(arg6); \
+ __asm__ volatile( \
+ VALGRIND_CFI_PROLOGUE \
+ VALGRIND_ALIGN_STACK \
+ "subq $128,%%rsp\n\t" \
+ "movq 48(%%rax), %%r9\n\t" \
+ "movq 40(%%rax), %%r8\n\t" \
+ "movq 32(%%rax), %%rcx\n\t" \
+ "movq 24(%%rax), %%rdx\n\t" \
+ "movq 16(%%rax), %%rsi\n\t" \
+ "movq 8(%%rax), %%rdi\n\t" \
+ "movq (%%rax), %%rax\n\t" /* target->%rax */ \
+ VALGRIND_CALL_NOREDIR_RAX \
+ VALGRIND_RESTORE_STACK \
+ VALGRIND_CFI_EPILOGUE \
+ : /*out*/ "=a" (_res) \
+ : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
+ ); \
+ lval = (__typeof__(lval)) _res; \
} while (0)
-#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
- arg7) \
- do { \
- volatile OrigFn _orig = (orig); \
- volatile unsigned long _argvec[8]; \
- volatile unsigned long _res; \
- _argvec[0] = (unsigned long)_orig.nraddr; \
- _argvec[1] = (unsigned long)(arg1); \
- _argvec[2] = (unsigned long)(arg2); \
- _argvec[3] = (unsigned long)(arg3); \
- _argvec[4] = (unsigned long)(arg4); \
- _argvec[5] = (unsigned long)(arg5); \
- _argvec[6] = (unsigned long)(arg6); \
- _argvec[7] = (unsigned long)(arg7); \
- __asm__ volatile( \
- VALGRIND_CFI_PROLOGUE \
- "subq $136,%%rsp\n\t" \
- "pushq 56(%%rax)\n\t" \
- "movq 48(%%rax), %%r9\n\t" \
- "movq 40(%%rax), %%r8\n\t" \
- "movq 32(%%rax), %%rcx\n\t" \
- "movq 24(%%rax), %%rdx\n\t" \
- "movq 16(%%rax), %%rsi\n\t" \
- "movq 8(%%rax), %%rdi\n\t" \
- "movq (%%rax), %%rax\n\t" /* target->%rax */ \
- VALGRIND_CALL_NOREDIR_RAX \
- "addq $8, %%rsp\n" \
- "addq $136,%%rsp\n\t" \
- VALGRIND_CFI_EPILOGUE \
- : /*out*/ "=a" (_res) \
- : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
- ); \
- lval = (__typeof__(lval)) _res; \
+#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
+ arg7) \
+ do { \
+ volatile OrigFn _orig = (orig); \
+ volatile unsigned long _argvec[8]; \
+ volatile unsigned long _res; \
+ _argvec[0] = (unsigned long)_orig.nraddr; \
+ _argvec[1] = (unsigned long)(arg1); \
+ _argvec[2] = (unsigned long)(arg2); \
+ _argvec[3] = (unsigned long)(arg3); \
+ _argvec[4] = (unsigned long)(arg4); \
+ _argvec[5] = (unsigned long)(arg5); \
+ _argvec[6] = (unsigned long)(arg6); \
+ _argvec[7] = (unsigned long)(arg7); \
+ __asm__ volatile( \
+ VALGRIND_CFI_PROLOGUE \
+ VALGRIND_ALIGN_STACK \
+ "subq $136,%%rsp\n\t" \
+ "pushq 56(%%rax)\n\t" \
+ "movq 48(%%rax), %%r9\n\t" \
+ "movq 40(%%rax), %%r8\n\t" \
+ "movq 32(%%rax), %%rcx\n\t" \
+ "movq 24(%%rax), %%rdx\n\t" \
+ "movq 16(%%rax), %%rsi\n\t" \
+ "movq 8(%%rax), %%rdi\n\t" \
+ "movq (%%rax), %%rax\n\t" /* target->%rax */ \
+ VALGRIND_CALL_NOREDIR_RAX \
+ VALGRIND_RESTORE_STACK \
+ VALGRIND_CFI_EPILOGUE \
+ : /*out*/ "=a" (_res) \
+ : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
+ ); \
+ lval = (__typeof__(lval)) _res; \
} while (0)
-#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
- arg7,arg8) \
- do { \
- volatile OrigFn _orig = (orig); \
- volatile unsigned long _argvec[9]; \
- volatile unsigned long _res; \
- _argvec[0] = (unsigned long)_orig.nraddr; \
- _argvec[1] = (unsigned long)(arg1); \
- _argvec[2] = (unsigned long)(arg2); \
- _argvec[3] = (unsigned long)(arg3); \
- _argvec[4] = (unsigned long)(arg4); \
- _argvec[5] = (unsigned long)(arg5); \
- _argvec[6] = (unsigned long)(arg6); \
- _argvec[7] = (unsigned long)(arg7); \
- _argvec[8] = (unsigned long)(arg8); \
- __asm__ volatile( \
- VALGRIND_CFI_PROLOGUE \
- "subq $128,%%rsp\n\t" \
- "pushq 64(%%rax)\n\t" \
- "pushq 56(%%rax)\n\t" \
- "movq 48(%%rax), %%r9\n\t" \
- "movq 40(%%rax), %%r8\n\t" \
- "movq 32(%%rax), %%rcx\n\t" \
- "movq 24(%%rax), %%rdx\n\t" \
- "movq 16(%%rax), %%rsi\n\t" \
- "movq 8(%%rax), %%rdi\n\t" \
- "movq (%%rax), %%rax\n\t" /* target->%rax */ \
- VALGRIND_CALL_NOREDIR_RAX \
- "addq $16, %%rsp\n" \
- "addq $128,%%rsp\n\t" \
- VALGRIND_CFI_EPILOGUE \
- : /*out*/ "=a" (_res) \
- : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
- ); \
- lval = (__typeof__(lval)) _res; \
+#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
+ arg7,arg8) \
+ do { \
+ volatile OrigFn _orig = (orig); \
+ volatile unsigned long _argvec[9]; \
+ volatile unsigned long _res; \
+ _argvec[0] = (unsigned long)_orig.nraddr; \
+ _argvec[1] = (unsigned long)(arg1); \
+ _argvec[2] = (unsigned long)(arg2); \
+ _argvec[3] = (unsigned long)(arg3); \
+ _argvec[4] = (unsigned long)(arg4); \
+ _argvec[5] = (unsigned long)(arg5); \
+ _argvec[6] = (unsigned long)(arg6); \
+ _argvec[7] = (unsigned long)(arg7); \
+ _argvec[8] = (unsigned long)(arg8); \
+ __asm__ volatile( \
+ VALGRIND_CFI_PROLOGUE \
+ VALGRIND_ALIGN_STACK \
+ "subq $128,%%rsp\n\t" \
+ "pushq 64(%%rax)\n\t" \
+ "pushq 56(%%rax)\n\t" \
+ "movq 48(%%rax), %%r9\n\t" \
+ "movq 40(%%rax), %%r8\n\t" \
+ "movq 32(%%rax), %%rcx\n\t" \
+ "movq 24(%%rax), %%rdx\n\t" \
+ "movq 16(%%rax), %%rsi\n\t" \
+ "movq 8(%%rax), %%rdi\n\t" \
+ "movq (%%rax), %%rax\n\t" /* target->%rax */ \
+ VALGRIND_CALL_NOREDIR_RAX \
+ VALGRIND_RESTORE_STACK \
+ VALGRIND_CFI_EPILOGUE \
+ : /*out*/ "=a" (_res) \
+ : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
+ ); \
+ lval = (__typeof__(lval)) _res; \
} while (0)
-#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
- arg7,arg8,arg9) \
- do { \
- volatile OrigFn _orig = (orig); \
- volatile unsigned long _argvec[10]; \
- volatile unsigned long _res; \
- _argvec[0] = (unsigned long)_orig.nraddr; \
- _argvec[1] = (unsigned long)(arg1); \
- _argvec[2] = (unsigned long)(arg2); \
- _argvec[3] = (unsigned long)(arg3); \
- _argvec[4] = (unsigned long)(arg4); \
- _argvec[5] = (unsigned long)(arg5); \
- _argvec[6] = (unsigned long)(arg6); \
- _argvec[7] = (unsigned long)(arg7); \
- _argvec[8] = (unsigned long)(arg8); \
- _argvec[9] = (unsigned long)(arg9); \
- __asm__ volatile( \
- VALGRIND_CFI_PROLOGUE \
- "subq $136,%%rsp\n\t" \
- "pushq 72(%%rax)\n\t" \
- "pushq 64(%%rax)\n\t" \
- "pushq 56(%%rax)\n\t" \
- "movq 48(%%rax), %%r9\n\t" \
- "movq 40(%%rax), %%r8\n\t" \
- "movq 32(%%rax), %%rcx\n\t" \
- "movq 24(%%rax), %%rdx\n\t" \
- "movq 16(%%rax), %%rsi\n\t" \
- "movq 8(%%rax), %%rdi\n\t" \
- "movq (%%rax), %%rax\n\t" /* target->%rax */ \
- VALGRIND_CALL_NOREDIR_RAX \
- "addq $24, %%rsp\n" \
- "addq $136,%%rsp\n\t" \
- VALGRIND_CFI_EPILOGUE \
- : /*out*/ "=a" (_res) \
- : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
- ); \
- lval = (__typeof__(lval)) _res; \
+#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
+ arg7,arg8,arg9) \
+ do { \
+ volatile OrigFn _orig = (orig); \
+ volatile unsigned long _argvec[10]; \
+ volatile unsigned long _res; \
+ _argvec[0] = (unsigned long)_orig.nraddr; \
+ _argvec[1] = (unsigned long)(arg1); \
+ _argvec[2] = (unsigned long)(arg2); \
+ _argvec[3] = (unsigned long)(arg3); \
+ _argvec[4] = (unsigned long)(arg4); \
+ _argvec[5] = (unsigned long)(arg5); \
+ _argvec[6] = (unsigned long)(arg6); \
+ _argvec[7] = (unsigned long)(arg7); \
+ _argvec[8] = (unsigned long)(arg8); \
+ _argvec[9] = (unsigned long)(arg9); \
+ __asm__ volatile( \
+ VALGRIND_CFI_PROLOGUE \
+ VALGRIND_ALIGN_STACK \
+ "subq $136,%%rsp\n\t" \
+ "pushq 72(%%rax)\n\t" \
+ "pushq 64(%%rax)\n\t" \
+ "pushq 56(%%rax)\n\t" \
+ "movq 48(%%rax), %%r9\n\t" \
+ "movq 40(%%rax), %%r8\n\t" \
+ "movq 32(%%rax), %%rcx\n\t" \
+ "movq 24(%%rax), %%rdx\n\t" \
+ "movq 16(%%rax), %%rsi\n\t" \
+ "movq 8(%%rax), %%rdi\n\t" \
+ "movq (%%rax), %%rax\n\t" /* target->%rax */ \
+ VALGRIND_CALL_NOREDIR_RAX \
+ VALGRIND_RESTORE_STACK \
+ VALGRIND_CFI_EPILOGUE \
+ : /*out*/ "=a" (_res) \
+ : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
+ ); \
+ lval = (__typeof__(lval)) _res; \
} while (0)
-#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
- arg7,arg8,arg9,arg10) \
- do { \
- volatile OrigFn _orig = (orig); \
- volatile unsigned long _argvec[11]; \
- volatile unsigned long _res; \
- _argvec[0] = (unsigned long)_orig.nraddr; \
- _argvec[1] = (unsigned long)(arg1); \
- _argvec[2] = (unsigned long)(arg2); \
- _argvec[3] = (unsigned long)(arg3); \
- _argvec[4] = (unsigned long)(arg4); \
- _argvec[5] = (unsigned long)(arg5); \
- _argvec[6] = (unsigned long)(arg6); \
- _argvec[7] = (unsigned long)(arg7); \
- _argvec[8] = (unsigned long)(arg8); \
- _argvec[9] = (unsigned long)(arg9); \
- _argvec[10] = (unsigned long)(arg10); \
- __asm__ volatile( \
- VALGRIND_CFI_PROLOGUE \
- "subq $128,%%rsp\n\t" \
- "pushq 80(%%rax)\n\t" \
- "pushq 72(%%rax)\n\t" \
- "pushq 64(%%rax)\n\t" \
- "pushq 56(%%rax)\n\t" \
- "movq 48(%%rax), %%r9\n\t" \
- "movq 40(%%rax), %%r8\n\t" \
- "movq 32(%%rax), %%rcx\n\t" \
- "movq 24(%%rax), %%rdx\n\t" \
- "movq 16(%%rax), %%rsi\n\t" \
- "movq 8(%%rax), %%rdi\n\t" \
- "movq (%%rax), %%rax\n\t" /* target->%rax */ \
- VALGRIND_CALL_NOREDIR_RAX \
- "addq $32, %%rsp\n" \
- "addq $128,%%rsp\n\t" \
- VALGRIND_CFI_EPILOGUE \
- : /*out*/ "=a" (_res) \
- : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
- ); \
- lval = (__typeof__(lval)) _res; \
+#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
+ arg7,arg8,arg9,arg10) \
+ do { \
+ volatile OrigFn _orig = (orig); \
+ volatile unsigned long _argvec[11]; \
+ volatile unsigned long _res; \
+ _argvec[0] = (unsigned long)_orig.nraddr; \
+ _argvec[1] = (unsigned long)(arg1); \
+ _argvec[2] = (unsigned long)(arg2); \
+ _argvec[3] = (unsigned long)(arg3); \
+ _argvec[4] = (unsigned long)(arg4); \
+ _argvec[5] = (unsigned long)(arg5); \
+ _argvec[6] = (unsigned long)(arg6); \
+ _argvec[7] = (unsigned long)(arg7); \
+ _argvec[8] = (unsigned long)(arg8); \
+ _argvec[9] = (unsigned long)(arg9); \
+ _argvec[10] = (unsigned long)(arg10); \
+ __asm__ volatile( \
+ VALGRIND_CFI_PROLOGUE \
+ VALGRIND_ALIGN_STACK \
+ "subq $128,%%rsp\n\t" \
+ "pushq 80(%%rax)\n\t" \
+ "pushq 72(%%rax)\n\t" \
+ "pushq 64(%%rax)\n\t" \
+ "pushq 56(%%rax)\n\t" \
+ "movq 48(%%rax), %%r9\n\t" \
+ "movq 40(%%rax), %%r8\n\t" \
+ "movq 32(%%rax), %%rcx\n\t" \
+ "movq 24(%%rax), %%rdx\n\t" \
+ "movq 16(%%rax), %%rsi\n\t" \
+ "movq 8(%%rax), %%rdi\n\t" \
+ "movq (%%rax), %%rax\n\t" /* target->%rax */ \
+ VALGRIND_CALL_NOREDIR_RAX \
+ VALGRIND_RESTORE_STACK \
+ VALGRIND_CFI_EPILOGUE \
+ : /*out*/ "=a" (_res) \
+ : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
+ ); \
+ lval = (__typeof__(lval)) _res; \
} while (0)
-#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
- arg7,arg8,arg9,arg10,arg11) \
- do { \
- volatile OrigFn _orig = (orig); \
- volatile unsigned long _argvec[12]; \
- volatile unsigned long _res; \
- _argvec[0] = (unsigned long)_orig.nraddr; \
- _argvec[1] = (unsigned long)(arg1); \
- _argvec[2] = (unsigned long)(arg2); \
- _argvec[3] = (unsigned long)(arg3); \
- _argvec[4] = (unsigned long)(arg4); \
- _argvec[5] = (unsigned long)(arg5); \
- _argvec[6] = (unsigned long)(arg6); \
- _argvec[7] = (unsigned long)(arg7); \
- _argvec[8] = (unsigned long)(arg8); \
- _argvec[9] = (unsigned long)(arg9); \
- _argvec[10] = (unsigned long)(arg10); \
- _argvec[11] = (unsigned long)(arg11); \
- __asm__ volatile( \
- VALGRIND_CFI_PROLOGUE \
- "subq $136,%%rsp\n\t" \
- "pushq 88(%%rax)\n\t" \
- "pushq 80(%%rax)\n\t" \
- "pushq 72(%%rax)\n\t" \
- "pushq 64(%%rax)\n\t" \
- "pushq 56(%%rax)\n\t" \
- "movq 48(%%rax), %%r9\n\t" \
- "movq 40(%%rax), %%r8\n\t" \
- "movq 32(%%rax), %%rcx\n\t" \
- "movq 24(%%rax), %%rdx\n\t" \
- "movq 16(%%rax), %%rsi\n\t" \
- "movq 8(%%rax), %%rdi\n\t" \
- "movq (%%rax), %%rax\n\t" /* target->%rax */ \
- VALGRIND_CALL_NOREDIR_RAX \
- "addq $40, %%rsp\n" \
- "addq $136,%%rsp\n\t" \
- VALGRIND_CFI_EPILOGUE \
- : /*out*/ "=a" (_res) \
- : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
- ); \
- lval = (__typeof__(lval)) _res; \
+#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
+ arg7,arg8,arg9,arg10,arg11) \
+ do { \
+ volatile OrigFn _orig = (orig); \
+ volatile unsigned long _argvec[12]; \
+ volatile unsigned long _res; \
+ _argvec[0] = (unsigned long)_orig.nraddr; \
+ _argvec[1] = (unsigned long)(arg1); \
+ _argvec[2] = (unsigned long)(arg2); \
+ _argvec[3] = (unsigned long)(arg3); \
+ _argvec[4] = (unsigned long)(arg4); \
+ _argvec[5] = (unsigned long)(arg5); \
+ _argvec[6] = (unsigned long)(arg6); \
+ _argvec[7] = (unsigned long)(arg7); \
+ _argvec[8] = (unsigned long)(arg8); \
+ _argvec[9] = (unsigned long)(arg9); \
+ _argvec[10] = (unsigned long)(arg10); \
+ _argvec[11] = (unsigned long)(arg11); \
+ __asm__ volatile( \
+ VALGRIND_CFI_PROLOGUE \
+ VALGRIND_ALIGN_STACK \
+ "subq $136,%%rsp\n\t" \
+ "pushq 88(%%rax)\n\t" \
+ "pushq 80(%%rax)\n\t" \
+ "pushq 72(%%rax)\n\t" \
+ "pushq 64(%%rax)\n\t" \
+ "pushq 56(%%rax)\n\t" \
+ "movq 48(%%rax), %%r9\n\t" \
+ "movq 40(%%rax), %%r8\n\t" \
+ "movq 32(%%rax), %%rcx\n\t" \
+ "movq 24(%%rax), %%rdx\n\t" \
+ "movq 16(%%rax), %%rsi\n\t" \
+ "movq 8(%%rax), %%rdi\n\t" \
+ "movq (%%rax), %%rax\n\t" /* target->%rax */ \
+ VALGRIND_CALL_NOREDIR_RAX \
+ VALGRIND_RESTORE_STACK \
+ VALGRIND_CFI_EPILOGUE \
+ : /*out*/ "=a" (_res) \
+ : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
+ ); \
+ lval = (__typeof__(lval)) _res; \
} while (0)
-#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
- arg7,arg8,arg9,arg10,arg11,arg12) \
- do { \
- volatile OrigFn _orig = (orig); \
- volatile unsigned long _argvec[13]; \
- volatile unsigned long _res; \
- _argvec[0] = (unsigned long)_orig.nraddr; \
- _argvec[1] = (unsigned long)(arg1); \
- _argvec[2] = (unsigned long)(arg2); \
- _argvec[3] = (unsigned long)(arg3); \
- _argvec[4] = (unsigned long)(arg4); \
- _argvec[5] = (unsigned long)(arg5); \
- _argvec[6] = (unsigned long)(arg6); \
- _argvec[7] = (unsigned long)(arg7); \
- _argvec[8] = (unsigned long)(arg8); \
- _argvec[9] = (unsigned long)(arg9); \
- _argvec[10] = (unsigned long)(arg10); \
- _argvec[11] = (unsigned long)(arg11); \
- _argvec[12] = (unsigned long)(arg12); \
- __asm__ volatile( \
- VALGRIND_CFI_PROLOGUE \
- "subq $128,%%rsp\n\t" \
- "pushq 96(%%rax)\n\t" \
- "pushq 88(%%rax)\n\t" \
- "pushq 80(%%rax)\n\t" \
- "pushq 72(%%rax)\n\t" \
- "pushq 64(%%rax)\n\t" \
- "pushq 56(%%rax)\n\t" \
- "movq 48(%%rax), %%r9\n\t" \
- "movq 40(%%rax), %%r8\n\t" \
- "movq 32(%%rax), %%rcx\n\t" \
- "movq 24(%%rax), %%rdx\n\t" \
- "movq 16(%%rax), %%rsi\n\t" \
- "movq 8(%%rax), %%rdi\n\t" \
- "movq (%%rax), %%rax\n\t" /* target->%rax */ \
- VALGRIND_CALL_NOREDIR_RAX \
- "addq $48, %%rsp\n" \
- "addq $128,%%rsp\n\t" \
- VALGRIND_CFI_EPILOGUE \
- : /*out*/ "=a" (_res) \
- : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
- ); \
- lval = (__typeof__(lval)) _res; \
+#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
+ arg7,arg8,arg9,arg10,arg11,arg12) \
+ do { \
+ volatile OrigFn _orig = (orig); \
+ volatile unsigned long _argvec[13]; \
+ volatile unsigned long _res; \
+ _argvec[0] = (unsigned long)_orig.nraddr; \
+ _argvec[1] = (unsigned long)(arg1); \
+ _argvec[2] = (unsigned long)(arg2); \
+ _argvec[3] = (unsigned long)(arg3); \
+ _argvec[4] = (unsigned long)(arg4); \
+ _argvec[5] = (unsigned long)(arg5); \
+ _argvec[6] = (unsigned long)(arg6); \
+ _argvec[7] = (unsigned long)(arg7); \
+ _argvec[8] = (unsigned long)(arg8); \
+ _argvec[9] = (unsigned long)(arg9); \
+ _argvec[10] = (unsigned long)(arg10); \
+ _argvec[11] = (unsigned long)(arg11); \
+ _argvec[12] = (unsigned long)(arg12); \
+ __asm__ volatile( \
+ VALGRIND_CFI_PROLOGUE \
+ VALGRIND_ALIGN_STACK \
+ "subq $128,%%rsp\n\t" \
+ "pushq 96(%%rax)\n\t" \
+ "pushq 88(%%rax)\n\t" \
+ "pushq 80(%%rax)\n\t" \
+ "pushq 72(%%rax)\n\t" \
+ "pushq 64(%%rax)\n\t" \
+ "pushq 56(%%rax)\n\t" \
+ "movq 48(%%rax), %%r9\n\t" \
+ "movq 40(%%rax), %%r8\n\t" \
+ "movq 32(%%rax), %%rcx\n\t" \
+ "movq 24(%%rax), %%rdx\n\t" \
+ "movq 16(%%rax), %%rsi\n\t" \
+ "movq 8(%%rax), %%rdi\n\t" \
+ "movq (%%rax), %%rax\n\t" /* target->%rax */ \
+ VALGRIND_CALL_NOREDIR_RAX \
+ VALGRIND_RESTORE_STACK \
+ VALGRIND_CFI_EPILOGUE \
+ : /*out*/ "=a" (_res) \
+ : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
+ ); \
+ lval = (__typeof__(lval)) _res; \
} while (0)
#endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
"r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
"r11", "r12", "r13"
+/* Macros to save and align the stack before making a function
+ call and restore it afterwards as gcc may not keep the stack
+ pointer aligned if it doesn't realise calls are being made
+ to other functions. */
+
+#define VALGRIND_ALIGN_STACK \
+ "mr 28,1\n\t" \
+ "rlwinm 1,1,0,0,27\n\t"
+#define VALGRIND_RESTORE_STACK \
+ "mr 1,28\n\t"
+
/* These CALL_FN_ macros assume that on ppc32-linux,
sizeof(unsigned long) == 4. */
volatile unsigned long _res; \
_argvec[0] = (unsigned long)_orig.nraddr; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"lwz 11,0(11)\n\t" /* target->r11 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
+ VALGRIND_RESTORE_STACK \
"mr %0,3" \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[0] = (unsigned long)_orig.nraddr; \
_argvec[1] = (unsigned long)arg1; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"lwz 3,4(11)\n\t" /* arg1->r3 */ \
"lwz 11,0(11)\n\t" /* target->r11 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
+ VALGRIND_RESTORE_STACK \
"mr %0,3" \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[1] = (unsigned long)arg1; \
_argvec[2] = (unsigned long)arg2; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"lwz 3,4(11)\n\t" /* arg1->r3 */ \
"lwz 4,8(11)\n\t" \
"lwz 11,0(11)\n\t" /* target->r11 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
+ VALGRIND_RESTORE_STACK \
"mr %0,3" \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[2] = (unsigned long)arg2; \
_argvec[3] = (unsigned long)arg3; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"lwz 3,4(11)\n\t" /* arg1->r3 */ \
"lwz 4,8(11)\n\t" \
"lwz 5,12(11)\n\t" \
"lwz 11,0(11)\n\t" /* target->r11 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
+ VALGRIND_RESTORE_STACK \
"mr %0,3" \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[3] = (unsigned long)arg3; \
_argvec[4] = (unsigned long)arg4; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"lwz 3,4(11)\n\t" /* arg1->r3 */ \
"lwz 4,8(11)\n\t" \
"lwz 6,16(11)\n\t" /* arg4->r6 */ \
"lwz 11,0(11)\n\t" /* target->r11 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
+ VALGRIND_RESTORE_STACK \
"mr %0,3" \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[4] = (unsigned long)arg4; \
_argvec[5] = (unsigned long)arg5; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"lwz 3,4(11)\n\t" /* arg1->r3 */ \
"lwz 4,8(11)\n\t" \
"lwz 7,20(11)\n\t" \
"lwz 11,0(11)\n\t" /* target->r11 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
+ VALGRIND_RESTORE_STACK \
"mr %0,3" \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[5] = (unsigned long)arg5; \
_argvec[6] = (unsigned long)arg6; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"lwz 3,4(11)\n\t" /* arg1->r3 */ \
"lwz 4,8(11)\n\t" \
"lwz 8,24(11)\n\t" \
"lwz 11,0(11)\n\t" /* target->r11 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
+ VALGRIND_RESTORE_STACK \
"mr %0,3" \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[6] = (unsigned long)arg6; \
_argvec[7] = (unsigned long)arg7; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"lwz 3,4(11)\n\t" /* arg1->r3 */ \
"lwz 4,8(11)\n\t" \
"lwz 9,28(11)\n\t" \
"lwz 11,0(11)\n\t" /* target->r11 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
+ VALGRIND_RESTORE_STACK \
"mr %0,3" \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[7] = (unsigned long)arg7; \
_argvec[8] = (unsigned long)arg8; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"lwz 3,4(11)\n\t" /* arg1->r3 */ \
"lwz 4,8(11)\n\t" \
"lwz 10,32(11)\n\t" /* arg8->r10 */ \
"lwz 11,0(11)\n\t" /* target->r11 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
+ VALGRIND_RESTORE_STACK \
"mr %0,3" \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[8] = (unsigned long)arg8; \
_argvec[9] = (unsigned long)arg9; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"addi 1,1,-16\n\t" \
/* arg9 */ \
"lwz 10,32(11)\n\t" /* arg8->r10 */ \
"lwz 11,0(11)\n\t" /* target->r11 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
- "addi 1,1,16\n\t" \
+ VALGRIND_RESTORE_STACK \
"mr %0,3" \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[9] = (unsigned long)arg9; \
_argvec[10] = (unsigned long)arg10; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"addi 1,1,-16\n\t" \
/* arg10 */ \
"lwz 10,32(11)\n\t" /* arg8->r10 */ \
"lwz 11,0(11)\n\t" /* target->r11 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
- "addi 1,1,16\n\t" \
+ VALGRIND_RESTORE_STACK \
"mr %0,3" \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[10] = (unsigned long)arg10; \
_argvec[11] = (unsigned long)arg11; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"addi 1,1,-32\n\t" \
/* arg11 */ \
"lwz 10,32(11)\n\t" /* arg8->r10 */ \
"lwz 11,0(11)\n\t" /* target->r11 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
- "addi 1,1,32\n\t" \
+ VALGRIND_RESTORE_STACK \
"mr %0,3" \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[11] = (unsigned long)arg11; \
_argvec[12] = (unsigned long)arg12; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"addi 1,1,-32\n\t" \
/* arg12 */ \
"lwz 10,32(11)\n\t" /* arg8->r10 */ \
"lwz 11,0(11)\n\t" /* target->r11 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
- "addi 1,1,32\n\t" \
+ VALGRIND_RESTORE_STACK \
"mr %0,3" \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
"r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
"r11", "r12", "r13"
+/* Macros to save and align the stack before making a function
+ call and restore it afterwards as gcc may not keep the stack
+ pointer aligned if it doesn't realise calls are being made
+ to other functions. */
+
+#define VALGRIND_ALIGN_STACK \
+ "mr 28,1\n\t" \
+ "rldicr 1,1,0,59\n\t"
+#define VALGRIND_RESTORE_STACK \
+ "mr 1,28\n\t"
+
/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
long) == 8. */
_argvec[1] = (unsigned long)_orig.r2; \
_argvec[2] = (unsigned long)_orig.nraddr; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"std 2,-16(11)\n\t" /* save tocptr */ \
"ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
"mr 11,%1\n\t" \
"mr %0,3\n\t" \
- "ld 2,-16(11)" /* restore tocptr */ \
+ "ld 2,-16(11)\n\t" /* restore tocptr */ \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[2]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[2] = (unsigned long)_orig.nraddr; \
_argvec[2+1] = (unsigned long)arg1; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"std 2,-16(11)\n\t" /* save tocptr */ \
"ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
"mr 11,%1\n\t" \
"mr %0,3\n\t" \
- "ld 2,-16(11)" /* restore tocptr */ \
+ "ld 2,-16(11)\n\t" /* restore tocptr */ \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[2]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[2+1] = (unsigned long)arg1; \
_argvec[2+2] = (unsigned long)arg2; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"std 2,-16(11)\n\t" /* save tocptr */ \
"ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
"mr 11,%1\n\t" \
"mr %0,3\n\t" \
- "ld 2,-16(11)" /* restore tocptr */ \
+ "ld 2,-16(11)\n\t" /* restore tocptr */ \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[2]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[2+2] = (unsigned long)arg2; \
_argvec[2+3] = (unsigned long)arg3; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"std 2,-16(11)\n\t" /* save tocptr */ \
"ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
"mr 11,%1\n\t" \
"mr %0,3\n\t" \
- "ld 2,-16(11)" /* restore tocptr */ \
+ "ld 2,-16(11)\n\t" /* restore tocptr */ \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[2]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[2+3] = (unsigned long)arg3; \
_argvec[2+4] = (unsigned long)arg4; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"std 2,-16(11)\n\t" /* save tocptr */ \
"ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
"mr 11,%1\n\t" \
"mr %0,3\n\t" \
- "ld 2,-16(11)" /* restore tocptr */ \
+ "ld 2,-16(11)\n\t" /* restore tocptr */ \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[2]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[2+4] = (unsigned long)arg4; \
_argvec[2+5] = (unsigned long)arg5; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"std 2,-16(11)\n\t" /* save tocptr */ \
"ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
"mr 11,%1\n\t" \
"mr %0,3\n\t" \
- "ld 2,-16(11)" /* restore tocptr */ \
+ "ld 2,-16(11)\n\t" /* restore tocptr */ \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[2]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[2+5] = (unsigned long)arg5; \
_argvec[2+6] = (unsigned long)arg6; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"std 2,-16(11)\n\t" /* save tocptr */ \
"ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
"mr 11,%1\n\t" \
"mr %0,3\n\t" \
- "ld 2,-16(11)" /* restore tocptr */ \
+ "ld 2,-16(11)\n\t" /* restore tocptr */ \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[2]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[2+6] = (unsigned long)arg6; \
_argvec[2+7] = (unsigned long)arg7; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"std 2,-16(11)\n\t" /* save tocptr */ \
"ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
"mr 11,%1\n\t" \
"mr %0,3\n\t" \
- "ld 2,-16(11)" /* restore tocptr */ \
+ "ld 2,-16(11)\n\t" /* restore tocptr */ \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[2]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[2+7] = (unsigned long)arg7; \
_argvec[2+8] = (unsigned long)arg8; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"std 2,-16(11)\n\t" /* save tocptr */ \
"ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
"mr 11,%1\n\t" \
"mr %0,3\n\t" \
- "ld 2,-16(11)" /* restore tocptr */ \
+ "ld 2,-16(11)\n\t" /* restore tocptr */ \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[2]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[2+8] = (unsigned long)arg8; \
_argvec[2+9] = (unsigned long)arg9; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"std 2,-16(11)\n\t" /* save tocptr */ \
"ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
"mr 11,%1\n\t" \
"mr %0,3\n\t" \
"ld 2,-16(11)\n\t" /* restore tocptr */ \
- "addi 1,1,128" /* restore frame */ \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[2]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[2+9] = (unsigned long)arg9; \
_argvec[2+10] = (unsigned long)arg10; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"std 2,-16(11)\n\t" /* save tocptr */ \
"ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
"mr 11,%1\n\t" \
"mr %0,3\n\t" \
"ld 2,-16(11)\n\t" /* restore tocptr */ \
- "addi 1,1,128" /* restore frame */ \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[2]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[2+10] = (unsigned long)arg10; \
_argvec[2+11] = (unsigned long)arg11; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"std 2,-16(11)\n\t" /* save tocptr */ \
"ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
"mr 11,%1\n\t" \
"mr %0,3\n\t" \
"ld 2,-16(11)\n\t" /* restore tocptr */ \
- "addi 1,1,144" /* restore frame */ \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[2]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[2+11] = (unsigned long)arg11; \
_argvec[2+12] = (unsigned long)arg12; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"mr 11,%1\n\t" \
"std 2,-16(11)\n\t" /* save tocptr */ \
"ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
"mr 11,%1\n\t" \
"mr %0,3\n\t" \
"ld 2,-16(11)\n\t" /* restore tocptr */ \
- "addi 1,1,144" /* restore frame */ \
+ VALGRIND_RESTORE_STACK \
: /*out*/ "=r" (_res) \
: /*in*/ "r" (&_argvec[2]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
/* These regs are trashed by the hidden call. */
#define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
+/* Macros to save and align the stack before making a function
+ call and restore it afterwards as gcc may not keep the stack
+ pointer aligned if it doesn't realise calls are being made
+ to other functions. */
+
+#define VALGRIND_ALIGN_STACK \
+ "mov r11, sp\n\t" \
+ "bic sp, sp, #7\n\t"
+#define VALGRIND_RESTORE_STACK \
+ "mov sp, r11\n\t"
+
/* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
long) == 4. */
volatile unsigned long _res; \
_argvec[0] = (unsigned long)_orig.nraddr; \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"ldr r4, [%1] \n\t" /* target->r4 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
+ VALGRIND_RESTORE_STACK \
"mov %0, r0\n" \
: /*out*/ "=r" (_res) \
: /*in*/ "0" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r11" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[0] = (unsigned long)_orig.nraddr; \
_argvec[1] = (unsigned long)(arg1); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"ldr r0, [%1, #4] \n\t" \
"ldr r4, [%1] \n\t" /* target->r4 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
+ VALGRIND_RESTORE_STACK \
"mov %0, r0\n" \
: /*out*/ "=r" (_res) \
: /*in*/ "0" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r11" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[1] = (unsigned long)(arg1); \
_argvec[2] = (unsigned long)(arg2); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"ldr r0, [%1, #4] \n\t" \
"ldr r1, [%1, #8] \n\t" \
"ldr r4, [%1] \n\t" /* target->r4 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
+ VALGRIND_RESTORE_STACK \
"mov %0, r0\n" \
: /*out*/ "=r" (_res) \
: /*in*/ "0" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r11" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[2] = (unsigned long)(arg2); \
_argvec[3] = (unsigned long)(arg3); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"ldr r0, [%1, #4] \n\t" \
"ldr r1, [%1, #8] \n\t" \
"ldr r2, [%1, #12] \n\t" \
"ldr r4, [%1] \n\t" /* target->r4 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
+ VALGRIND_RESTORE_STACK \
"mov %0, r0\n" \
: /*out*/ "=r" (_res) \
: /*in*/ "0" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r11" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[3] = (unsigned long)(arg3); \
_argvec[4] = (unsigned long)(arg4); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"ldr r0, [%1, #4] \n\t" \
"ldr r1, [%1, #8] \n\t" \
"ldr r2, [%1, #12] \n\t" \
"ldr r3, [%1, #16] \n\t" \
"ldr r4, [%1] \n\t" /* target->r4 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
+ VALGRIND_RESTORE_STACK \
"mov %0, r0" \
: /*out*/ "=r" (_res) \
: /*in*/ "0" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r11" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[4] = (unsigned long)(arg4); \
_argvec[5] = (unsigned long)(arg5); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
+ "sub sp, sp, #4 \n\t" \
"ldr r0, [%1, #20] \n\t" \
"push {r0} \n\t" \
"ldr r0, [%1, #4] \n\t" \
"ldr r3, [%1, #16] \n\t" \
"ldr r4, [%1] \n\t" /* target->r4 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
- "add sp, sp, #4 \n\t" \
+ VALGRIND_RESTORE_STACK \
"mov %0, r0" \
: /*out*/ "=r" (_res) \
: /*in*/ "0" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r11" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[5] = (unsigned long)(arg5); \
_argvec[6] = (unsigned long)(arg6); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"ldr r0, [%1, #20] \n\t" \
"ldr r1, [%1, #24] \n\t" \
"push {r0, r1} \n\t" \
"ldr r3, [%1, #16] \n\t" \
"ldr r4, [%1] \n\t" /* target->r4 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
- "add sp, sp, #8 \n\t" \
+ VALGRIND_RESTORE_STACK \
"mov %0, r0" \
: /*out*/ "=r" (_res) \
: /*in*/ "0" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r11" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[6] = (unsigned long)(arg6); \
_argvec[7] = (unsigned long)(arg7); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
+ "sub sp, sp, #4 \n\t" \
"ldr r0, [%1, #20] \n\t" \
"ldr r1, [%1, #24] \n\t" \
"ldr r2, [%1, #28] \n\t" \
"ldr r3, [%1, #16] \n\t" \
"ldr r4, [%1] \n\t" /* target->r4 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
- "add sp, sp, #12 \n\t" \
+ VALGRIND_RESTORE_STACK \
"mov %0, r0" \
: /*out*/ "=r" (_res) \
: /*in*/ "0" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r11" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[7] = (unsigned long)(arg7); \
_argvec[8] = (unsigned long)(arg8); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"ldr r0, [%1, #20] \n\t" \
"ldr r1, [%1, #24] \n\t" \
"ldr r2, [%1, #28] \n\t" \
"ldr r3, [%1, #16] \n\t" \
"ldr r4, [%1] \n\t" /* target->r4 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
- "add sp, sp, #16 \n\t" \
+ VALGRIND_RESTORE_STACK \
"mov %0, r0" \
: /*out*/ "=r" (_res) \
: /*in*/ "0" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r11" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[8] = (unsigned long)(arg8); \
_argvec[9] = (unsigned long)(arg9); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
+ "sub sp, sp, #4 \n\t" \
"ldr r0, [%1, #20] \n\t" \
"ldr r1, [%1, #24] \n\t" \
"ldr r2, [%1, #28] \n\t" \
"ldr r3, [%1, #16] \n\t" \
"ldr r4, [%1] \n\t" /* target->r4 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
- "add sp, sp, #20 \n\t" \
+ VALGRIND_RESTORE_STACK \
"mov %0, r0" \
: /*out*/ "=r" (_res) \
: /*in*/ "0" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r11" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[9] = (unsigned long)(arg9); \
_argvec[10] = (unsigned long)(arg10); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"ldr r0, [%1, #40] \n\t" \
"push {r0} \n\t" \
"ldr r0, [%1, #20] \n\t" \
"ldr r3, [%1, #16] \n\t" \
"ldr r4, [%1] \n\t" /* target->r4 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
- "add sp, sp, #24 \n\t" \
+ VALGRIND_RESTORE_STACK \
"mov %0, r0" \
: /*out*/ "=r" (_res) \
: /*in*/ "0" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r11" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[10] = (unsigned long)(arg10); \
_argvec[11] = (unsigned long)(arg11); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
+ "sub sp, sp, #4 \n\t" \
"ldr r0, [%1, #40] \n\t" \
"ldr r1, [%1, #44] \n\t" \
"push {r0, r1} \n\t" \
"ldr r3, [%1, #16] \n\t" \
"ldr r4, [%1] \n\t" /* target->r4 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
- "add sp, sp, #28 \n\t" \
+ VALGRIND_RESTORE_STACK \
"mov %0, r0" \
: /*out*/ "=r" (_res) \
: /*in*/ "0" (&_argvec[0]) \
- : /*trash*/ "cc", "memory",__CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r11" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)
_argvec[11] = (unsigned long)(arg11); \
_argvec[12] = (unsigned long)(arg12); \
__asm__ volatile( \
+ VALGRIND_ALIGN_STACK \
"ldr r0, [%1, #40] \n\t" \
"ldr r1, [%1, #44] \n\t" \
"ldr r2, [%1, #48] \n\t" \
"ldr r3, [%1, #16] \n\t" \
"ldr r4, [%1] \n\t" /* target->r4 */ \
VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
- "add sp, sp, #32 \n\t" \
+ VALGRIND_RESTORE_STACK \
"mov %0, r0" \
: /*out*/ "=r" (_res) \
: /*in*/ "0" (&_argvec[0]) \
- : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
+ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r11" \
); \
lval = (__typeof__(lval)) _res; \
} while (0)