]> git.ipfire.org Git - thirdparty/kernel/stable.git/commitdiff
x86: replace LOCK_PREFIX in futex.h
authorThomas Gleixner <tglx@linutronix.de>
Sat, 23 Feb 2008 16:56:56 +0000 (11:56 -0500)
committerChris Wright <chrisw@sous-sol.org>
Mon, 24 Mar 2008 18:47:16 +0000 (11:47 -0700)
Commit: 9d55b9923a1b7ea8193b8875c57ec940dc2ff027

The exception fixup for the futex macros __futex_atomic_op1/2 and
futex_atomic_cmpxchg_inatomic() is missing an entry when the lock
prefix is replaced by a NOP via SMP alternatives.

Chuck Ebert tracked this down from the information provided in:
https://bugzilla.redhat.com/show_bug.cgi?id=429412

A possible solution would be to add another fixup after the
LOCK_PREFIX, so both the LOCK and NOP case have their own entry in the
exception table, but it's not really worth the trouble.

Simply replace LOCK_PREFIX with lock and keep those untouched by SMP
alternatives.

Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
[cebbert@redhat.com: backport to 2.6.24]
Signed-off-by: Chris Wright <chrisw@sous-sol.org>
Signed-off-by: Greg Kroah-Hartman <gregkh@suse.de>
include/asm-x86/futex_32.h
include/asm-x86/futex_64.h

index 438ef0ec7101c2f4b2fcf4fb711f99e81494712e..80964fd6944b6ce66cfc1c16c53ed883de7aa8b3 100644 (file)
@@ -28,7 +28,7 @@
 "1:    movl    %2, %0\n\
        movl    %0, %3\n"                                       \
        insn "\n"                                               \
-"2:    " LOCK_PREFIX "cmpxchgl %3, %2\n\
+"2:    lock ; cmpxchgl %3, %2\n\
        jnz     1b\n\
 3:     .section .fixup,\"ax\"\n\
 4:     mov     %5, %1\n\
@@ -68,7 +68,7 @@ futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
 #endif
                switch (op) {
                case FUTEX_OP_ADD:
-                       __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret,
+                       __futex_atomic_op1("lock ; xaddl %0, %2", ret,
                                           oldval, uaddr, oparg);
                        break;
                case FUTEX_OP_OR:
@@ -111,7 +111,7 @@ futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
                return -EFAULT;
 
        __asm__ __volatile__(
-               "1:     " LOCK_PREFIX "cmpxchgl %3, %1          \n"
+               "1:     lock ; cmpxchgl %3, %1                  \n"
 
                "2:     .section .fixup, \"ax\"                 \n"
                "3:     mov     %2, %0                          \n"
index 5cdfb08013c38889dc8a42a2dff4c4a3c1833ff8..423c051f4e91e2d72138de71be05781a703bff85 100644 (file)
@@ -27,7 +27,7 @@
 "1:    movl    %2, %0\n\
        movl    %0, %3\n"                                       \
        insn "\n"                                               \
-"2:    " LOCK_PREFIX "cmpxchgl %3, %2\n\
+"2:    lock ; cmpxchgl %3, %2\n\
        jnz     1b\n\
 3:     .section .fixup,\"ax\"\n\
 4:     mov     %5, %1\n\
@@ -62,7 +62,7 @@ futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
                __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
                break;
        case FUTEX_OP_ADD:
-               __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval,
+               __futex_atomic_op1("lock ; xaddl %0, %2", ret, oldval,
                                   uaddr, oparg);
                break;
        case FUTEX_OP_OR:
@@ -101,7 +101,7 @@ futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
                return -EFAULT;
 
        __asm__ __volatile__(
-               "1:     " LOCK_PREFIX "cmpxchgl %3, %1          \n"
+               "1:     lock ; cmpxchgl %3, %1                  \n"
 
                "2:     .section .fixup, \"ax\"                 \n"
                "3:     mov     %2, %0                          \n"