]> git.ipfire.org Git - thirdparty/kernel/stable-queue.git/commitdiff
4.4-stable patches
authorGreg Kroah-Hartman <gregkh@linuxfoundation.org>
Tue, 1 Jun 2021 08:47:41 +0000 (10:47 +0200)
committerGreg Kroah-Hartman <gregkh@linuxfoundation.org>
Tue, 1 Jun 2021 08:47:41 +0000 (10:47 +0200)
added patches:
x86-asm-add-instruction-suffixes-to-bitops.patch
x86-asm-change-the-gen_-_rmwcc-macros-to-not-quote-the-condition.patch

queue-4.4/series
queue-4.4/x86-asm-add-instruction-suffixes-to-bitops.patch [new file with mode: 0644]
queue-4.4/x86-asm-change-the-gen_-_rmwcc-macros-to-not-quote-the-condition.patch [new file with mode: 0644]

index b1b5d37a581a5fc717ce3537d55cc17b86f5b080..c4f54246b05590a54c526158127d1bc87232228a 100644 (file)
@@ -59,3 +59,5 @@ mips-ralink-export-rt_sysc_membase-for-rt2880_wdt.c.patch
 hugetlbfs-hugetlb_fault_mutex_hash-cleanup.patch
 bluetooth-eliminate-the-potential-race-condition-when-removing-the-hci-controller.patch
 usb-core-reduce-power-on-good-delay-time-of-root-hub.patch
+x86-asm-change-the-gen_-_rmwcc-macros-to-not-quote-the-condition.patch
+x86-asm-add-instruction-suffixes-to-bitops.patch
diff --git a/queue-4.4/x86-asm-add-instruction-suffixes-to-bitops.patch b/queue-4.4/x86-asm-add-instruction-suffixes-to-bitops.patch
new file mode 100644 (file)
index 0000000..231f050
--- /dev/null
@@ -0,0 +1,157 @@
+From 7ee66a8a71cf68a44ecfdf2f58e6847cf654ff88 Mon Sep 17 00:00:00 2001
+From: Jan Beulich <JBeulich@suse.com>
+Date: Mon, 26 Feb 2018 04:11:51 -0700
+Subject: x86/asm: Add instruction suffixes to bitops
+
+From: Jan Beulich <JBeulich@suse.com>
+
+commit 22636f8c9511245cb3c8412039f1dd95afb3aa59 upstream.
+
+Omitting suffixes from instructions in AT&T mode is bad practice when
+operand size cannot be determined by the assembler from register
+operands, and is likely going to be warned about by upstream gas in the
+future (mine does already). Add the missing suffixes here. Note that for
+64-bit this means some operations change from being 32-bit to 64-bit.
+
+Signed-off-by: Jan Beulich <jbeulich@suse.com>
+Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
+Link: https://lkml.kernel.org/r/5A93F98702000078001ABACC@prv-mh.provo.novell.com
+Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
+---
+ arch/x86/include/asm/bitops.h |   29 ++++++++++++++++-------------
+ arch/x86/include/asm/percpu.h |    2 +-
+ 2 files changed, 17 insertions(+), 14 deletions(-)
+
+--- a/arch/x86/include/asm/bitops.h
++++ b/arch/x86/include/asm/bitops.h
+@@ -77,7 +77,7 @@ set_bit(long nr, volatile unsigned long
+                       : "iq" ((u8)CONST_MASK(nr))
+                       : "memory");
+       } else {
+-              asm volatile(LOCK_PREFIX "bts %1,%0"
++              asm volatile(LOCK_PREFIX __ASM_SIZE(bts) " %1,%0"
+                       : BITOP_ADDR(addr) : "Ir" (nr) : "memory");
+       }
+ }
+@@ -93,7 +93,7 @@ set_bit(long nr, volatile unsigned long
+  */
+ static inline void __set_bit(long nr, volatile unsigned long *addr)
+ {
+-      asm volatile("bts %1,%0" : ADDR : "Ir" (nr) : "memory");
++      asm volatile(__ASM_SIZE(bts) " %1,%0" : ADDR : "Ir" (nr) : "memory");
+ }
+ /**
+@@ -114,7 +114,7 @@ clear_bit(long nr, volatile unsigned lon
+                       : CONST_MASK_ADDR(nr, addr)
+                       : "iq" ((u8)~CONST_MASK(nr)));
+       } else {
+-              asm volatile(LOCK_PREFIX "btr %1,%0"
++              asm volatile(LOCK_PREFIX __ASM_SIZE(btr) " %1,%0"
+                       : BITOP_ADDR(addr)
+                       : "Ir" (nr));
+       }
+@@ -136,7 +136,7 @@ static inline void clear_bit_unlock(long
+ static inline void __clear_bit(long nr, volatile unsigned long *addr)
+ {
+-      asm volatile("btr %1,%0" : ADDR : "Ir" (nr));
++      asm volatile(__ASM_SIZE(btr) " %1,%0" : ADDR : "Ir" (nr));
+ }
+ /*
+@@ -168,7 +168,7 @@ static inline void __clear_bit_unlock(lo
+  */
+ static inline void __change_bit(long nr, volatile unsigned long *addr)
+ {
+-      asm volatile("btc %1,%0" : ADDR : "Ir" (nr));
++      asm volatile(__ASM_SIZE(btc) " %1,%0" : ADDR : "Ir" (nr));
+ }
+ /**
+@@ -187,7 +187,7 @@ static inline void change_bit(long nr, v
+                       : CONST_MASK_ADDR(nr, addr)
+                       : "iq" ((u8)CONST_MASK(nr)));
+       } else {
+-              asm volatile(LOCK_PREFIX "btc %1,%0"
++              asm volatile(LOCK_PREFIX __ASM_SIZE(btc) " %1,%0"
+                       : BITOP_ADDR(addr)
+                       : "Ir" (nr));
+       }
+@@ -203,7 +203,8 @@ static inline void change_bit(long nr, v
+  */
+ static inline int test_and_set_bit(long nr, volatile unsigned long *addr)
+ {
+-      GEN_BINARY_RMWcc(LOCK_PREFIX "bts", *addr, "Ir", nr, "%0", c);
++      GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts),
++                       *addr, "Ir", nr, "%0", c);
+ }
+ /**
+@@ -232,7 +233,7 @@ static inline int __test_and_set_bit(lon
+ {
+       int oldbit;
+-      asm("bts %2,%1\n\t"
++      asm(__ASM_SIZE(bts) " %2,%1\n\t"
+           "sbb %0,%0"
+           : "=r" (oldbit), ADDR
+           : "Ir" (nr));
+@@ -249,7 +250,8 @@ static inline int __test_and_set_bit(lon
+  */
+ static inline int test_and_clear_bit(long nr, volatile unsigned long *addr)
+ {
+-      GEN_BINARY_RMWcc(LOCK_PREFIX "btr", *addr, "Ir", nr, "%0", c);
++      GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr),
++                       *addr, "Ir", nr, "%0", c);
+ }
+ /**
+@@ -272,7 +274,7 @@ static inline int __test_and_clear_bit(l
+ {
+       int oldbit;
+-      asm volatile("btr %2,%1\n\t"
++      asm volatile(__ASM_SIZE(btr) " %2,%1\n\t"
+                    "sbb %0,%0"
+                    : "=r" (oldbit), ADDR
+                    : "Ir" (nr));
+@@ -284,7 +286,7 @@ static inline int __test_and_change_bit(
+ {
+       int oldbit;
+-      asm volatile("btc %2,%1\n\t"
++      asm volatile(__ASM_SIZE(btc) " %2,%1\n\t"
+                    "sbb %0,%0"
+                    : "=r" (oldbit), ADDR
+                    : "Ir" (nr) : "memory");
+@@ -302,7 +304,8 @@ static inline int __test_and_change_bit(
+  */
+ static inline int test_and_change_bit(long nr, volatile unsigned long *addr)
+ {
+-      GEN_BINARY_RMWcc(LOCK_PREFIX "btc", *addr, "Ir", nr, "%0", c);
++      GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc),
++                       *addr, "Ir", nr, "%0", c);
+ }
+ static __always_inline int constant_test_bit(long nr, const volatile unsigned long *addr)
+@@ -315,7 +318,7 @@ static inline int variable_test_bit(long
+ {
+       int oldbit;
+-      asm volatile("bt %2,%1\n\t"
++      asm volatile(__ASM_SIZE(bt) " %2,%1\n\t"
+                    "sbb %0,%0"
+                    : "=r" (oldbit)
+                    : "m" (*(unsigned long *)addr), "Ir" (nr));
+--- a/arch/x86/include/asm/percpu.h
++++ b/arch/x86/include/asm/percpu.h
+@@ -534,7 +534,7 @@ static inline int x86_this_cpu_variable_
+ {
+       int oldbit;
+-      asm volatile("bt "__percpu_arg(2)",%1\n\t"
++      asm volatile("btl "__percpu_arg(2)",%1\n\t"
+                       "sbb %0,%0"
+                       : "=r" (oldbit)
+                       : "m" (*(unsigned long *)addr), "Ir" (nr));
diff --git a/queue-4.4/x86-asm-change-the-gen_-_rmwcc-macros-to-not-quote-the-condition.patch b/queue-4.4/x86-asm-change-the-gen_-_rmwcc-macros-to-not-quote-the-condition.patch
new file mode 100644 (file)
index 0000000..026d9aa
--- /dev/null
@@ -0,0 +1,203 @@
+From 18fe58229d80c7f4f138a07e84ba608e1ebd232b Mon Sep 17 00:00:00 2001
+From: "H. Peter Anvin" <hpa@zytor.com>
+Date: Wed, 8 Jun 2016 12:38:39 -0700
+Subject: x86, asm: change the GEN_*_RMWcc() macros to not quote the condition
+
+From: H. Peter Anvin <hpa@zytor.com>
+
+commit 18fe58229d80c7f4f138a07e84ba608e1ebd232b upstream.
+
+Change the lexical defintion of the GEN_*_RMWcc() macros to not take
+the condition code as a quoted string.  This will help support
+changing them to use the new __GCC_ASM_FLAG_OUTPUTS__ feature in a
+subsequent patch.
+
+Signed-off-by: H. Peter Anvin <hpa@zytor.com>
+Link: http://lkml.kernel.org/r/1465414726-197858-4-git-send-email-hpa@linux.intel.com
+Reviewed-by: Andy Lutomirski <luto@kernel.org>
+Reviewed-by: Borislav Petkov <bp@suse.de>
+Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
+Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
+---
+ arch/x86/include/asm/atomic.h      |    8 ++++----
+ arch/x86/include/asm/atomic64_64.h |    8 ++++----
+ arch/x86/include/asm/bitops.h      |    6 +++---
+ arch/x86/include/asm/local.h       |    8 ++++----
+ arch/x86/include/asm/preempt.h     |    2 +-
+ arch/x86/include/asm/rmwcc.h       |    4 ++--
+ 6 files changed, 18 insertions(+), 18 deletions(-)
+
+--- a/arch/x86/include/asm/atomic.h
++++ b/arch/x86/include/asm/atomic.h
+@@ -77,7 +77,7 @@ static __always_inline void atomic_sub(i
+  */
+ static __always_inline int atomic_sub_and_test(int i, atomic_t *v)
+ {
+-      GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e");
++      GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", e);
+ }
+ /**
+@@ -114,7 +114,7 @@ static __always_inline void atomic_dec(a
+  */
+ static __always_inline int atomic_dec_and_test(atomic_t *v)
+ {
+-      GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e");
++      GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", e);
+ }
+ /**
+@@ -127,7 +127,7 @@ static __always_inline int atomic_dec_an
+  */
+ static __always_inline int atomic_inc_and_test(atomic_t *v)
+ {
+-      GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e");
++      GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", e);
+ }
+ /**
+@@ -141,7 +141,7 @@ static __always_inline int atomic_inc_an
+  */
+ static __always_inline int atomic_add_negative(int i, atomic_t *v)
+ {
+-      GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s");
++      GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", s);
+ }
+ /**
+--- a/arch/x86/include/asm/atomic64_64.h
++++ b/arch/x86/include/asm/atomic64_64.h
+@@ -72,7 +72,7 @@ static inline void atomic64_sub(long i,
+  */
+ static inline int atomic64_sub_and_test(long i, atomic64_t *v)
+ {
+-      GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", "e");
++      GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", e);
+ }
+ /**
+@@ -111,7 +111,7 @@ static __always_inline void atomic64_dec
+  */
+ static inline int atomic64_dec_and_test(atomic64_t *v)
+ {
+-      GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", "e");
++      GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", e);
+ }
+ /**
+@@ -124,7 +124,7 @@ static inline int atomic64_dec_and_test(
+  */
+ static inline int atomic64_inc_and_test(atomic64_t *v)
+ {
+-      GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", "e");
++      GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", e);
+ }
+ /**
+@@ -138,7 +138,7 @@ static inline int atomic64_inc_and_test(
+  */
+ static inline int atomic64_add_negative(long i, atomic64_t *v)
+ {
+-      GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", "s");
++      GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", s);
+ }
+ /**
+--- a/arch/x86/include/asm/bitops.h
++++ b/arch/x86/include/asm/bitops.h
+@@ -203,7 +203,7 @@ static inline void change_bit(long nr, v
+  */
+ static inline int test_and_set_bit(long nr, volatile unsigned long *addr)
+ {
+-      GEN_BINARY_RMWcc(LOCK_PREFIX "bts", *addr, "Ir", nr, "%0", "c");
++      GEN_BINARY_RMWcc(LOCK_PREFIX "bts", *addr, "Ir", nr, "%0", c);
+ }
+ /**
+@@ -249,7 +249,7 @@ static inline int __test_and_set_bit(lon
+  */
+ static inline int test_and_clear_bit(long nr, volatile unsigned long *addr)
+ {
+-      GEN_BINARY_RMWcc(LOCK_PREFIX "btr", *addr, "Ir", nr, "%0", "c");
++      GEN_BINARY_RMWcc(LOCK_PREFIX "btr", *addr, "Ir", nr, "%0", c);
+ }
+ /**
+@@ -302,7 +302,7 @@ static inline int __test_and_change_bit(
+  */
+ static inline int test_and_change_bit(long nr, volatile unsigned long *addr)
+ {
+-      GEN_BINARY_RMWcc(LOCK_PREFIX "btc", *addr, "Ir", nr, "%0", "c");
++      GEN_BINARY_RMWcc(LOCK_PREFIX "btc", *addr, "Ir", nr, "%0", c);
+ }
+ static __always_inline int constant_test_bit(long nr, const volatile unsigned long *addr)
+--- a/arch/x86/include/asm/local.h
++++ b/arch/x86/include/asm/local.h
+@@ -52,7 +52,7 @@ static inline void local_sub(long i, loc
+  */
+ static inline int local_sub_and_test(long i, local_t *l)
+ {
+-      GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, "er", i, "%0", "e");
++      GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, "er", i, "%0", e);
+ }
+ /**
+@@ -65,7 +65,7 @@ static inline int local_sub_and_test(lon
+  */
+ static inline int local_dec_and_test(local_t *l)
+ {
+-      GEN_UNARY_RMWcc(_ASM_DEC, l->a.counter, "%0", "e");
++      GEN_UNARY_RMWcc(_ASM_DEC, l->a.counter, "%0", e);
+ }
+ /**
+@@ -78,7 +78,7 @@ static inline int local_dec_and_test(loc
+  */
+ static inline int local_inc_and_test(local_t *l)
+ {
+-      GEN_UNARY_RMWcc(_ASM_INC, l->a.counter, "%0", "e");
++      GEN_UNARY_RMWcc(_ASM_INC, l->a.counter, "%0", e);
+ }
+ /**
+@@ -92,7 +92,7 @@ static inline int local_inc_and_test(loc
+  */
+ static inline int local_add_negative(long i, local_t *l)
+ {
+-      GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, "er", i, "%0", "s");
++      GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, "er", i, "%0", s);
+ }
+ /**
+--- a/arch/x86/include/asm/preempt.h
++++ b/arch/x86/include/asm/preempt.h
+@@ -81,7 +81,7 @@ static __always_inline void __preempt_co
+  */
+ static __always_inline bool __preempt_count_dec_and_test(void)
+ {
+-      GEN_UNARY_RMWcc("decl", __preempt_count, __percpu_arg(0), "e");
++      GEN_UNARY_RMWcc("decl", __preempt_count, __percpu_arg(0), e);
+ }
+ /*
+--- a/arch/x86/include/asm/rmwcc.h
++++ b/arch/x86/include/asm/rmwcc.h
+@@ -5,7 +5,7 @@
+ #define __GEN_RMWcc(fullop, var, cc, ...)                             \
+ do {                                                                  \
+-      asm_volatile_goto (fullop "; j" cc " %l[cc_label]"              \
++      asm_volatile_goto (fullop "; j" #cc " %l[cc_label]"             \
+                       : : "m" (var), ## __VA_ARGS__                   \
+                       : "memory" : cc_label);                         \
+       return 0;                                                       \
+@@ -24,7 +24,7 @@ cc_label:                                                            \
+ #define __GEN_RMWcc(fullop, var, cc, ...)                             \
+ do {                                                                  \
+       char c;                                                         \
+-      asm volatile (fullop "; set" cc " %1"                           \
++      asm volatile (fullop "; set" #cc " %1"                          \
+                       : "+m" (var), "=qm" (c)                         \
+                       : __VA_ARGS__ : "memory");                      \
+       return c != 0;                                                  \