From 897deacd018856aa6be62bc03a913367ae6c2f63 Mon Sep 17 00:00:00 2001 From: Chris Wright Date: Fri, 24 Jun 2005 16:49:14 -0700 Subject: [PATCH] Add "memory" clobbers to the x86 inline asm of strncmp and friends, forwarded by jgarzik. --- queue/memory-clobber-on-x86.patch | 122 ++++++++++++++++++++++++++++++ 1 file changed, 122 insertions(+) create mode 100644 queue/memory-clobber-on-x86.patch diff --git a/queue/memory-clobber-on-x86.patch b/queue/memory-clobber-on-x86.patch new file mode 100644 index 00000000000..855d0f42387 --- /dev/null +++ b/queue/memory-clobber-on-x86.patch @@ -0,0 +1,122 @@ +From SRS0=Ut8f=U6=vger.kernel.org=bk-commits-head-owner@bounce2.pobox.com Fri Jun 24 16:02:22 2005 +Date: Fri, 24 Jun 2005 16:01:24 -0700 +From: Linux Kernel Mailing List +To: bk-commits-head@vger.kernel.org +Subject: Add "memory" clobbers to the x86 inline asm of strncmp and friends + +From: Linus Torvalds + +Add "memory" clobbers to the x86 inline asm of strncmp and friends + +They don't actually clobber memory, but gcc doesn't even know they +_read_ memory, so can apparently re-order memory accesses around them. + +Which obviously does the wrong thing if the memory access happens to +change the memory that the compare function is accessing.. + +Verified to fix a strange boot problem by Jens Axboe. + +Signed-off-by: Chris Wright +--- + + include/asm-i386/string.h | 32 ++++++++++++++++++++++---------- + 1 files changed, 22 insertions(+), 10 deletions(-) + +diff --git a/include/asm-i386/string.h b/include/asm-i386/string.h +--- a/include/asm-i386/string.h ++++ b/include/asm-i386/string.h +@@ -116,7 +116,8 @@ __asm__ __volatile__( + "orb $1,%%al\n" + "3:" + :"=a" (__res), "=&S" (d0), "=&D" (d1) +- :"1" (cs),"2" (ct)); ++ :"1" (cs),"2" (ct) ++ :"memory"); + return __res; + } + +@@ -138,8 +139,9 @@ __asm__ __volatile__( + "3:\tsbbl %%eax,%%eax\n\t" + "orb $1,%%al\n" + "4:" +- :"=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2) +- :"1" (cs),"2" (ct),"3" (count)); ++ :"=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2) ++ :"1" (cs),"2" (ct),"3" (count) ++ :"memory"); + return __res; + } + +@@ -158,7 +160,9 @@ __asm__ __volatile__( + "movl $1,%1\n" + "2:\tmovl %1,%0\n\t" + "decl %0" +- :"=a" (__res), "=&S" (d0) : "1" (s),"0" (c)); ++ :"=a" (__res), "=&S" (d0) ++ :"1" (s),"0" (c) ++ :"memory"); + return __res; + } + +@@ -175,7 +179,9 @@ __asm__ __volatile__( + "leal -1(%%esi),%0\n" + "2:\ttestb %%al,%%al\n\t" + "jne 1b" +- :"=g" (__res), "=&S" (d0), "=&a" (d1) :"0" (0),"1" (s),"2" (c)); ++ :"=g" (__res), "=&S" (d0), "=&a" (d1) ++ :"0" (0),"1" (s),"2" (c) ++ :"memory"); + return __res; + } + +@@ -189,7 +195,9 @@ __asm__ __volatile__( + "scasb\n\t" + "notl %0\n\t" + "decl %0" +- :"=c" (__res), "=&D" (d0) :"1" (s),"a" (0), "0" (0xffffffffu)); ++ :"=c" (__res), "=&D" (d0) ++ :"1" (s),"a" (0), "0" (0xffffffffu) ++ :"memory"); + return __res; + } + +@@ -333,7 +341,9 @@ __asm__ __volatile__( + "je 1f\n\t" + "movl $1,%0\n" + "1:\tdecl %0" +- :"=D" (__res), "=&c" (d0) : "a" (c),"0" (cs),"1" (count)); ++ :"=D" (__res), "=&c" (d0) ++ :"a" (c),"0" (cs),"1" (count) ++ :"memory"); + return __res; + } + +@@ -369,7 +379,7 @@ __asm__ __volatile__( + "je 2f\n\t" + "stosb\n" + "2:" +- : "=&c" (d0), "=&D" (d1) ++ :"=&c" (d0), "=&D" (d1) + :"a" (c), "q" (count), "0" (count/4), "1" ((long) s) + :"memory"); + return (s); +@@ -392,7 +402,8 @@ __asm__ __volatile__( + "jne 1b\n" + "3:\tsubl %2,%0" + :"=a" (__res), "=&d" (d0) +- :"c" (s),"1" (count)); ++ :"c" (s),"1" (count) ++ :"memory"); + return __res; + } + /* end of additional stuff */ +@@ -473,7 +484,8 @@ static inline void * memscan(void * addr + "dec %%edi\n" + "1:" + : "=D" (addr), "=c" (size) +- : "0" (addr), "1" (size), "a" (c)); ++ : "0" (addr), "1" (size), "a" (c) ++ : "memory"); + return addr; + } + -- 2.47.3