]> git.ipfire.org Git - thirdparty/linux.git/commitdiff
x86/futex: Convert to scoped user access
authorThomas Gleixner <tglx@linutronix.de>
Mon, 27 Oct 2025 08:44:02 +0000 (09:44 +0100)
committerIngo Molnar <mingo@kernel.org>
Tue, 4 Nov 2025 07:28:29 +0000 (08:28 +0100)
Replace the open coded implementation with the scoped user access
guards

No functional change intended.

Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Link: https://patch.msgid.link/20251027083745.799714344@linutronix.de
arch/x86/include/asm/futex.h

index 6e2458088800acf76993fa974c573347d17b1d5d..fe5d9a10d900a69e9af995adc03d42b5baf52cd4 100644 (file)
@@ -46,38 +46,31 @@ do {                                                                \
 } while(0)
 
 static __always_inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
-               u32 __user *uaddr)
+                                                      u32 __user *uaddr)
 {
-       if (can_do_masked_user_access())
-               uaddr = masked_user_access_begin(uaddr);
-       else if (!user_access_begin(uaddr, sizeof(u32)))
-               return -EFAULT;
-
-       switch (op) {
-       case FUTEX_OP_SET:
-               unsafe_atomic_op1("xchgl %0, %2", oval, uaddr, oparg, Efault);
-               break;
-       case FUTEX_OP_ADD:
-               unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval,
-                                  uaddr, oparg, Efault);
-               break;
-       case FUTEX_OP_OR:
-               unsafe_atomic_op2("orl %4, %3", oval, uaddr, oparg, Efault);
-               break;
-       case FUTEX_OP_ANDN:
-               unsafe_atomic_op2("andl %4, %3", oval, uaddr, ~oparg, Efault);
-               break;
-       case FUTEX_OP_XOR:
-               unsafe_atomic_op2("xorl %4, %3", oval, uaddr, oparg, Efault);
-               break;
-       default:
-               user_access_end();
-               return -ENOSYS;
+       scoped_user_rw_access(uaddr, Efault) {
+               switch (op) {
+               case FUTEX_OP_SET:
+                       unsafe_atomic_op1("xchgl %0, %2", oval, uaddr, oparg, Efault);
+                       break;
+               case FUTEX_OP_ADD:
+                       unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval, uaddr, oparg, Efault);
+                       break;
+               case FUTEX_OP_OR:
+                       unsafe_atomic_op2("orl %4, %3", oval, uaddr, oparg, Efault);
+                       break;
+               case FUTEX_OP_ANDN:
+                       unsafe_atomic_op2("andl %4, %3", oval, uaddr, ~oparg, Efault);
+                       break;
+               case FUTEX_OP_XOR:
+                       unsafe_atomic_op2("xorl %4, %3", oval, uaddr, oparg, Efault);
+                       break;
+               default:
+                       return -ENOSYS;
+               }
        }
-       user_access_end();
        return 0;
 Efault:
-       user_access_end();
        return -EFAULT;
 }
 
@@ -86,21 +79,19 @@ static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
 {
        int ret = 0;
 
-       if (can_do_masked_user_access())
-               uaddr = masked_user_access_begin(uaddr);
-       else if (!user_access_begin(uaddr, sizeof(u32)))
-               return -EFAULT;
-       asm volatile("\n"
-               "1:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"
-               "2:\n"
-               _ASM_EXTABLE_TYPE_REG(1b, 2b, EX_TYPE_EFAULT_REG, %0) \
-               : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
-               : "r" (newval), "1" (oldval)
-               : "memory"
-       );
-       user_access_end();
-       *uval = oldval;
+       scoped_user_rw_access(uaddr, Efault) {
+               asm_inline volatile("\n"
+                                   "1:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"
+                                   "2:\n"
+                                   _ASM_EXTABLE_TYPE_REG(1b, 2b, EX_TYPE_EFAULT_REG, %0)
+                                   : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
+                                   : "r" (newval), "1" (oldval)
+                                   : "memory");
+               *uval = oldval;
+       }
        return ret;
+Efault:
+       return -EFAULT;
 }
 
 #endif