From a07c2e157185dc577d6fa5fa154a44e4b3afe185 Mon Sep 17 00:00:00 2001 From: sewardj Date: Fri, 9 Nov 2007 23:09:50 +0000 Subject: Merge r6806 from branches/THRCHECK: Fix longstanding error in the amd64-linux function-wrapping macros: protect the caller's red zone across the hidden call. All rather nasty as explained in big comment. git-svn-id: svn://svn.valgrind.org/valgrind/trunk@7120 a5019735-40e9-0310-863c-91ae7b9d1cf9 --- include/valgrind.h | 47 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) (limited to 'include/valgrind.h') diff --git a/include/valgrind.h b/include/valgrind.h index 20c5d0ead..f18cc97e3 100644 --- a/include/valgrind.h +++ b/include/valgrind.h @@ -1022,6 +1022,27 @@ typedef /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned long) == 8. */ +/* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_ + macros. In order not to trash the stack redzone, we need to drop + %rsp by 128 before the hidden call, and restore afterwards. The + nastyness is that it is only by luck that the stack still appears + to be unwindable during the hidden call - since then the behaviour + of any routine using this macro does not match what the CFI data + says. Sigh. + + Why is this important? Imagine that a wrapper has a stack + allocated local, and passes to the hidden call, a pointer to it. + Because gcc does not know about the hidden call, it may allocate + that local in the redzone. Unfortunately the hidden call may then + trash it before it comes to use it. So we must step clear of the + redzone, for the duration of the hidden call, to make it safe. + + Probably the same problem afflicts the other redzone-style ABIs too + (ppc64-linux, ppc32-aix5, ppc64-aix5); but for those, the stack is + self describing (none of this CFI nonsense) so at least messing + with the stack pointer doesn't give a danger of non-unwindable + stack. */ + #define CALL_FN_W_v(lval, orig) \ do { \ volatile OrigFn _orig = (orig); \ @@ -1029,8 +1050,10 @@ typedef volatile unsigned long _res; \ _argvec[0] = (unsigned long)_orig.nraddr; \ __asm__ volatile( \ + "subq $128,%%rsp\n\t" \ "movq (%%rax), %%rax\n\t" /* target->%rax */ \ VALGRIND_CALL_NOREDIR_RAX \ + "addq $128,%%rsp\n\t" \ : /*out*/ "=a" (_res) \ : /*in*/ "a" (&_argvec[0]) \ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \ @@ -1046,9 +1069,11 @@ typedef _argvec[0] = (unsigned long)_orig.nraddr; \ _argvec[1] = (unsigned long)(arg1); \ __asm__ volatile( \ + "subq $128,%%rsp\n\t" \ "movq 8(%%rax), %%rdi\n\t" \ "movq (%%rax), %%rax\n\t" /* target->%rax */ \ VALGRIND_CALL_NOREDIR_RAX \ + "addq $128,%%rsp\n\t" \ : /*out*/ "=a" (_res) \ : /*in*/ "a" (&_argvec[0]) \ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \ @@ -1065,10 +1090,12 @@ typedef _argvec[1] = (unsigned long)(arg1); \ _argvec[2] = (unsigned long)(arg2); \ __asm__ volatile( \ + "subq $128,%%rsp\n\t" \ "movq 16(%%rax), %%rsi\n\t" \ "movq 8(%%rax), %%rdi\n\t" \ "movq (%%rax), %%rax\n\t" /* target->%rax */ \ VALGRIND_CALL_NOREDIR_RAX \ + "addq $128,%%rsp\n\t" \ : /*out*/ "=a" (_res) \ : /*in*/ "a" (&_argvec[0]) \ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \ @@ -1086,11 +1113,13 @@ typedef _argvec[2] = (unsigned long)(arg2); \ _argvec[3] = (unsigned long)(arg3); \ __asm__ volatile( \ + "subq $128,%%rsp\n\t" \ "movq 24(%%rax), %%rdx\n\t" \ "movq 16(%%rax), %%rsi\n\t" \ "movq 8(%%rax), %%rdi\n\t" \ "movq (%%rax), %%rax\n\t" /* target->%rax */ \ VALGRIND_CALL_NOREDIR_RAX \ + "addq $128,%%rsp\n\t" \ : /*out*/ "=a" (_res) \ : /*in*/ "a" (&_argvec[0]) \ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \ @@ -1109,12 +1138,14 @@ typedef _argvec[3] = (unsigned long)(arg3); \ _argvec[4] = (unsigned long)(arg4); \ __asm__ volatile( \ + "subq $128,%%rsp\n\t" \ "movq 32(%%rax), %%rcx\n\t" \ "movq 24(%%rax), %%rdx\n\t" \ "movq 16(%%rax), %%rsi\n\t" \ "movq 8(%%rax), %%rdi\n\t" \ "movq (%%rax), %%rax\n\t" /* target->%rax */ \ VALGRIND_CALL_NOREDIR_RAX \ + "addq $128,%%rsp\n\t" \ : /*out*/ "=a" (_res) \ : /*in*/ "a" (&_argvec[0]) \ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \ @@ -1134,6 +1165,7 @@ typedef _argvec[4] = (unsigned long)(arg4); \ _argvec[5] = (unsigned long)(arg5); \ __asm__ volatile( \ + "subq $128,%%rsp\n\t" \ "movq 40(%%rax), %%r8\n\t" \ "movq 32(%%rax), %%rcx\n\t" \ "movq 24(%%rax), %%rdx\n\t" \ @@ -1141,6 +1173,7 @@ typedef "movq 8(%%rax), %%rdi\n\t" \ "movq (%%rax), %%rax\n\t" /* target->%rax */ \ VALGRIND_CALL_NOREDIR_RAX \ + "addq $128,%%rsp\n\t" \ : /*out*/ "=a" (_res) \ : /*in*/ "a" (&_argvec[0]) \ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \ @@ -1161,6 +1194,7 @@ typedef _argvec[5] = (unsigned long)(arg5); \ _argvec[6] = (unsigned long)(arg6); \ __asm__ volatile( \ + "subq $128,%%rsp\n\t" \ "movq 48(%%rax), %%r9\n\t" \ "movq 40(%%rax), %%r8\n\t" \ "movq 32(%%rax), %%rcx\n\t" \ @@ -1168,6 +1202,7 @@ typedef "movq 16(%%rax), %%rsi\n\t" \ "movq 8(%%rax), %%rdi\n\t" \ "movq (%%rax), %%rax\n\t" /* target->%rax */ \ + "addq $128,%%rsp\n\t" \ VALGRIND_CALL_NOREDIR_RAX \ : /*out*/ "=a" (_res) \ : /*in*/ "a" (&_argvec[0]) \ @@ -1191,6 +1226,7 @@ typedef _argvec[6] = (unsigned long)(arg6); \ _argvec[7] = (unsigned long)(arg7); \ __asm__ volatile( \ + "subq $128,%%rsp\n\t" \ "pushq 56(%%rax)\n\t" \ "movq 48(%%rax), %%r9\n\t" \ "movq 40(%%rax), %%r8\n\t" \ @@ -1201,6 +1237,7 @@ typedef "movq (%%rax), %%rax\n\t" /* target->%rax */ \ VALGRIND_CALL_NOREDIR_RAX \ "addq $8, %%rsp\n" \ + "addq $128,%%rsp\n\t" \ : /*out*/ "=a" (_res) \ : /*in*/ "a" (&_argvec[0]) \ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \ @@ -1224,6 +1261,7 @@ typedef _argvec[7] = (unsigned long)(arg7); \ _argvec[8] = (unsigned long)(arg8); \ __asm__ volatile( \ + "subq $128,%%rsp\n\t" \ "pushq 64(%%rax)\n\t" \ "pushq 56(%%rax)\n\t" \ "movq 48(%%rax), %%r9\n\t" \ @@ -1235,6 +1273,7 @@ typedef "movq (%%rax), %%rax\n\t" /* target->%rax */ \ VALGRIND_CALL_NOREDIR_RAX \ "addq $16, %%rsp\n" \ + "addq $128,%%rsp\n\t" \ : /*out*/ "=a" (_res) \ : /*in*/ "a" (&_argvec[0]) \ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \ @@ -1259,6 +1298,7 @@ typedef _argvec[8] = (unsigned long)(arg8); \ _argvec[9] = (unsigned long)(arg9); \ __asm__ volatile( \ + "subq $128,%%rsp\n\t" \ "pushq 72(%%rax)\n\t" \ "pushq 64(%%rax)\n\t" \ "pushq 56(%%rax)\n\t" \ @@ -1271,6 +1311,7 @@ typedef "movq (%%rax), %%rax\n\t" /* target->%rax */ \ VALGRIND_CALL_NOREDIR_RAX \ "addq $24, %%rsp\n" \ + "addq $128,%%rsp\n\t" \ : /*out*/ "=a" (_res) \ : /*in*/ "a" (&_argvec[0]) \ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \ @@ -1296,6 +1337,7 @@ typedef _argvec[9] = (unsigned long)(arg9); \ _argvec[10] = (unsigned long)(arg10); \ __asm__ volatile( \ + "subq $128,%%rsp\n\t" \ "pushq 80(%%rax)\n\t" \ "pushq 72(%%rax)\n\t" \ "pushq 64(%%rax)\n\t" \ @@ -1309,6 +1351,7 @@ typedef "movq (%%rax), %%rax\n\t" /* target->%rax */ \ VALGRIND_CALL_NOREDIR_RAX \ "addq $32, %%rsp\n" \ + "addq $128,%%rsp\n\t" \ : /*out*/ "=a" (_res) \ : /*in*/ "a" (&_argvec[0]) \ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \ @@ -1335,6 +1378,7 @@ typedef _argvec[10] = (unsigned long)(arg10); \ _argvec[11] = (unsigned long)(arg11); \ __asm__ volatile( \ + "subq $128,%%rsp\n\t" \ "pushq 88(%%rax)\n\t" \ "pushq 80(%%rax)\n\t" \ "pushq 72(%%rax)\n\t" \ @@ -1349,6 +1393,7 @@ typedef "movq (%%rax), %%rax\n\t" /* target->%rax */ \ VALGRIND_CALL_NOREDIR_RAX \ "addq $40, %%rsp\n" \ + "addq $128,%%rsp\n\t" \ : /*out*/ "=a" (_res) \ : /*in*/ "a" (&_argvec[0]) \ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \ @@ -1376,6 +1421,7 @@ typedef _argvec[11] = (unsigned long)(arg11); \ _argvec[12] = (unsigned long)(arg12); \ __asm__ volatile( \ + "subq $128,%%rsp\n\t" \ "pushq 96(%%rax)\n\t" \ "pushq 88(%%rax)\n\t" \ "pushq 80(%%rax)\n\t" \ @@ -1391,6 +1437,7 @@ typedef "movq (%%rax), %%rax\n\t" /* target->%rax */ \ VALGRIND_CALL_NOREDIR_RAX \ "addq $48, %%rsp\n" \ + "addq $128,%%rsp\n\t" \ : /*out*/ "=a" (_res) \ : /*in*/ "a" (&_argvec[0]) \ : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \ -- cgit v1.2.3