]> git.ipfire.org Git - thirdparty/linux.git/commitdiff
arm64: atomics: lse: Remove unused parameters from ATOMIC_FETCH_OP_AND macros
authorSeongsu Park <sgsu.park@samsung.com>
Wed, 26 Nov 2025 02:10:25 +0000 (11:10 +0900)
committerCatalin Marinas <catalin.marinas@arm.com>
Thu, 27 Nov 2025 18:15:24 +0000 (18:15 +0000)
The ATOMIC_FETCH_OP_AND and ATOMIC64_FETCH_OP_AND macros accept 'mb' and
'cl' parameters but never use them in their implementation. These macros
simply delegate to the corresponding andnot functions, which handle the
actual atomic operations and memory barriers.

Signed-off-by: Seongsu Park <sgsu.park@samsung.com>
Acked-by: Mark Rutland <mark.rutland@arm.com>
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
arch/arm64/include/asm/atomic_lse.h

index 87f568a94e558716c9ced5459803947bffd81adf..afad1849c4cf5b1db1064c236d6c1d38525b00c1 100644 (file)
@@ -103,17 +103,17 @@ static __always_inline void __lse_atomic_and(int i, atomic_t *v)
        return __lse_atomic_andnot(~i, v);
 }
 
-#define ATOMIC_FETCH_OP_AND(name, mb, cl...)                           \
+#define ATOMIC_FETCH_OP_AND(name)                                      \
 static __always_inline int                                             \
 __lse_atomic_fetch_and##name(int i, atomic_t *v)                       \
 {                                                                      \
        return __lse_atomic_fetch_andnot##name(~i, v);                  \
 }
 
-ATOMIC_FETCH_OP_AND(_relaxed,   )
-ATOMIC_FETCH_OP_AND(_acquire,  a, "memory")
-ATOMIC_FETCH_OP_AND(_release,  l, "memory")
-ATOMIC_FETCH_OP_AND(        , al, "memory")
+ATOMIC_FETCH_OP_AND(_relaxed)
+ATOMIC_FETCH_OP_AND(_acquire)
+ATOMIC_FETCH_OP_AND(_release)
+ATOMIC_FETCH_OP_AND(        )
 
 #undef ATOMIC_FETCH_OP_AND
 
@@ -210,17 +210,17 @@ static __always_inline void __lse_atomic64_and(s64 i, atomic64_t *v)
        return __lse_atomic64_andnot(~i, v);
 }
 
-#define ATOMIC64_FETCH_OP_AND(name, mb, cl...)                         \
+#define ATOMIC64_FETCH_OP_AND(name)                                    \
 static __always_inline long                                            \
 __lse_atomic64_fetch_and##name(s64 i, atomic64_t *v)                   \
 {                                                                      \
        return __lse_atomic64_fetch_andnot##name(~i, v);                \
 }
 
-ATOMIC64_FETCH_OP_AND(_relaxed,   )
-ATOMIC64_FETCH_OP_AND(_acquire,  a, "memory")
-ATOMIC64_FETCH_OP_AND(_release,  l, "memory")
-ATOMIC64_FETCH_OP_AND(        , al, "memory")
+ATOMIC64_FETCH_OP_AND(_relaxed)
+ATOMIC64_FETCH_OP_AND(_acquire)
+ATOMIC64_FETCH_OP_AND(_release)
+ATOMIC64_FETCH_OP_AND(        )
 
 #undef ATOMIC64_FETCH_OP_AND