]> git.ipfire.org Git - thirdparty/kernel/linux.git/commitdiff
x86/percpu: Introduce the pcpu_binary_op() macro
authorUros Bizjak <ubizjak@gmail.com>
Tue, 30 Apr 2024 09:17:20 +0000 (11:17 +0200)
committerIngo Molnar <mingo@kernel.org>
Sat, 18 May 2024 09:18:40 +0000 (11:18 +0200)
Introduce the pcpu_binary_op() macro, a copy of the percpu_to_op() macro.

Update percpu binary operators to use the new macro, since
percpu_to_op() will be re-purposed as a raw percpu write accessor
in a follow-up patch.

No functional change intended.

Signed-off-by: Uros Bizjak <ubizjak@gmail.com>
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Cc: Andy Lutomirski <luto@kernel.org>
Cc: Josh Poimboeuf <jpoimboe@redhat.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Link: https://lore.kernel.org/r/20240430091833.196482-1-ubizjak@gmail.com
arch/x86/include/asm/percpu.h

index 3bedee1801e2a84e3b0277e9f0b907f5f24ee421..cc40d8d9c272c425c0f2e0dd912f4e938d8a1174 100644 (file)
@@ -163,6 +163,19 @@ do {                                                                       \
            : [var] "+m" (__my_cpu_var(_var)));                         \
 })
 
+#define percpu_binary_op(size, qual, op, _var, _val)                   \
+do {                                                                   \
+       __pcpu_type_##size pto_val__ = __pcpu_cast_##size(_val);        \
+       if (0) {                                                        \
+               typeof(_var) pto_tmp__;                                 \
+               pto_tmp__ = (_val);                                     \
+               (void)pto_tmp__;                                        \
+       }                                                               \
+       asm qual(__pcpu_op2_##size(op, "%[val]", __percpu_arg([var]))   \
+           : [var] "+m" (__my_cpu_var(_var))                           \
+           : [val] __pcpu_reg_imm_##size(pto_val__));                  \
+} while (0)
+
 /*
  * Generate a percpu add to memory instruction and optimize code
  * if one is added or subtracted.
@@ -182,7 +195,7 @@ do {                                                                        \
        else if (pao_ID__ == -1)                                        \
                percpu_unary_op(size, qual, "dec", var);                \
        else                                                            \
-               percpu_to_op(size, qual, "add", var, val);              \
+               percpu_binary_op(size, qual, "add", var, val);          \
 } while (0)
 
 #define percpu_from_op(size, qual, op, _var)                           \
@@ -492,12 +505,12 @@ do {                                                                      \
 #define raw_cpu_add_1(pcp, val)                percpu_add_op(1, , (pcp), val)
 #define raw_cpu_add_2(pcp, val)                percpu_add_op(2, , (pcp), val)
 #define raw_cpu_add_4(pcp, val)                percpu_add_op(4, , (pcp), val)
-#define raw_cpu_and_1(pcp, val)                percpu_to_op(1, , "and", (pcp), val)
-#define raw_cpu_and_2(pcp, val)                percpu_to_op(2, , "and", (pcp), val)
-#define raw_cpu_and_4(pcp, val)                percpu_to_op(4, , "and", (pcp), val)
-#define raw_cpu_or_1(pcp, val)         percpu_to_op(1, , "or", (pcp), val)
-#define raw_cpu_or_2(pcp, val)         percpu_to_op(2, , "or", (pcp), val)
-#define raw_cpu_or_4(pcp, val)         percpu_to_op(4, , "or", (pcp), val)
+#define raw_cpu_and_1(pcp, val)                percpu_binary_op(1, , "and", (pcp), val)
+#define raw_cpu_and_2(pcp, val)                percpu_binary_op(2, , "and", (pcp), val)
+#define raw_cpu_and_4(pcp, val)                percpu_binary_op(4, , "and", (pcp), val)
+#define raw_cpu_or_1(pcp, val)         percpu_binary_op(1, , "or", (pcp), val)
+#define raw_cpu_or_2(pcp, val)         percpu_binary_op(2, , "or", (pcp), val)
+#define raw_cpu_or_4(pcp, val)         percpu_binary_op(4, , "or", (pcp), val)
 #define raw_cpu_xchg_1(pcp, val)       raw_percpu_xchg_op(pcp, val)
 #define raw_cpu_xchg_2(pcp, val)       raw_percpu_xchg_op(pcp, val)
 #define raw_cpu_xchg_4(pcp, val)       raw_percpu_xchg_op(pcp, val)
@@ -505,12 +518,12 @@ do {                                                                      \
 #define this_cpu_add_1(pcp, val)       percpu_add_op(1, volatile, (pcp), val)
 #define this_cpu_add_2(pcp, val)       percpu_add_op(2, volatile, (pcp), val)
 #define this_cpu_add_4(pcp, val)       percpu_add_op(4, volatile, (pcp), val)
-#define this_cpu_and_1(pcp, val)       percpu_to_op(1, volatile, "and", (pcp), val)
-#define this_cpu_and_2(pcp, val)       percpu_to_op(2, volatile, "and", (pcp), val)
-#define this_cpu_and_4(pcp, val)       percpu_to_op(4, volatile, "and", (pcp), val)
-#define this_cpu_or_1(pcp, val)                percpu_to_op(1, volatile, "or", (pcp), val)
-#define this_cpu_or_2(pcp, val)                percpu_to_op(2, volatile, "or", (pcp), val)
-#define this_cpu_or_4(pcp, val)                percpu_to_op(4, volatile, "or", (pcp), val)
+#define this_cpu_and_1(pcp, val)       percpu_binary_op(1, volatile, "and", (pcp), val)
+#define this_cpu_and_2(pcp, val)       percpu_binary_op(2, volatile, "and", (pcp), val)
+#define this_cpu_and_4(pcp, val)       percpu_binary_op(4, volatile, "and", (pcp), val)
+#define this_cpu_or_1(pcp, val)                percpu_binary_op(1, volatile, "or", (pcp), val)
+#define this_cpu_or_2(pcp, val)                percpu_binary_op(2, volatile, "or", (pcp), val)
+#define this_cpu_or_4(pcp, val)                percpu_binary_op(4, volatile, "or", (pcp), val)
 #define this_cpu_xchg_1(pcp, nval)     this_percpu_xchg_op(pcp, nval)
 #define this_cpu_xchg_2(pcp, nval)     this_percpu_xchg_op(pcp, nval)
 #define this_cpu_xchg_4(pcp, nval)     this_percpu_xchg_op(pcp, nval)
@@ -543,16 +556,16 @@ do {                                                                      \
 #define this_cpu_read_stable_8(pcp)    percpu_stable_op(8, "mov", pcp)
 
 #define raw_cpu_add_8(pcp, val)                        percpu_add_op(8, , (pcp), val)
-#define raw_cpu_and_8(pcp, val)                        percpu_to_op(8, , "and", (pcp), val)
-#define raw_cpu_or_8(pcp, val)                 percpu_to_op(8, , "or", (pcp), val)
+#define raw_cpu_and_8(pcp, val)                        percpu_binary_op(8, , "and", (pcp), val)
+#define raw_cpu_or_8(pcp, val)                 percpu_binary_op(8, , "or", (pcp), val)
 #define raw_cpu_add_return_8(pcp, val)         percpu_add_return_op(8, , pcp, val)
 #define raw_cpu_xchg_8(pcp, nval)              raw_percpu_xchg_op(pcp, nval)
 #define raw_cpu_cmpxchg_8(pcp, oval, nval)     percpu_cmpxchg_op(8, , pcp, oval, nval)
 #define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval)        percpu_try_cmpxchg_op(8, , pcp, ovalp, nval)
 
 #define this_cpu_add_8(pcp, val)               percpu_add_op(8, volatile, (pcp), val)
-#define this_cpu_and_8(pcp, val)               percpu_to_op(8, volatile, "and", (pcp), val)
-#define this_cpu_or_8(pcp, val)                        percpu_to_op(8, volatile, "or", (pcp), val)
+#define this_cpu_and_8(pcp, val)               percpu_binary_op(8, volatile, "and", (pcp), val)
+#define this_cpu_or_8(pcp, val)                        percpu_binary_op(8, volatile, "or", (pcp), val)
 #define this_cpu_add_return_8(pcp, val)                percpu_add_return_op(8, volatile, pcp, val)
 #define this_cpu_xchg_8(pcp, nval)             this_percpu_xchg_op(pcp, nval)
 #define this_cpu_cmpxchg_8(pcp, oval, nval)    percpu_cmpxchg_op(8, volatile, pcp, oval, nval)