--- /dev/null
+From 22636f8c9511245cb3c8412039f1dd95afb3aa59 Mon Sep 17 00:00:00 2001
+From: Jan Beulich <JBeulich@suse.com>
+Date: Mon, 26 Feb 2018 04:11:51 -0700
+Subject: x86/asm: Add instruction suffixes to bitops
+
+From: Jan Beulich <JBeulich@suse.com>
+
+commit 22636f8c9511245cb3c8412039f1dd95afb3aa59 upstream.
+
+Omitting suffixes from instructions in AT&T mode is bad practice when
+operand size cannot be determined by the assembler from register
+operands, and is likely going to be warned about by upstream gas in the
+future (mine does already). Add the missing suffixes here. Note that for
+64-bit this means some operations change from being 32-bit to 64-bit.
+
+Signed-off-by: Jan Beulich <jbeulich@suse.com>
+Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
+Link: https://lkml.kernel.org/r/5A93F98702000078001ABACC@prv-mh.provo.novell.com
+Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
+
+---
+ arch/x86/include/asm/bitops.h | 29 ++++++++++++++++-------------
+ arch/x86/include/asm/percpu.h | 2 +-
+ 2 files changed, 17 insertions(+), 14 deletions(-)
+
+--- a/arch/x86/include/asm/bitops.h
++++ b/arch/x86/include/asm/bitops.h
+@@ -77,7 +77,7 @@ set_bit(long nr, volatile unsigned long
+ : "iq" ((u8)CONST_MASK(nr))
+ : "memory");
+ } else {
+- asm volatile(LOCK_PREFIX "bts %1,%0"
++ asm volatile(LOCK_PREFIX __ASM_SIZE(bts) " %1,%0"
+ : BITOP_ADDR(addr) : "Ir" (nr) : "memory");
+ }
+ }
+@@ -93,7 +93,7 @@ set_bit(long nr, volatile unsigned long
+ */
+ static __always_inline void __set_bit(long nr, volatile unsigned long *addr)
+ {
+- asm volatile("bts %1,%0" : ADDR : "Ir" (nr) : "memory");
++ asm volatile(__ASM_SIZE(bts) " %1,%0" : ADDR : "Ir" (nr) : "memory");
+ }
+
+ /**
+@@ -114,7 +114,7 @@ clear_bit(long nr, volatile unsigned lon
+ : CONST_MASK_ADDR(nr, addr)
+ : "iq" ((u8)~CONST_MASK(nr)));
+ } else {
+- asm volatile(LOCK_PREFIX "btr %1,%0"
++ asm volatile(LOCK_PREFIX __ASM_SIZE(btr) " %1,%0"
+ : BITOP_ADDR(addr)
+ : "Ir" (nr));
+ }
+@@ -136,7 +136,7 @@ static __always_inline void clear_bit_un
+
+ static __always_inline void __clear_bit(long nr, volatile unsigned long *addr)
+ {
+- asm volatile("btr %1,%0" : ADDR : "Ir" (nr));
++ asm volatile(__ASM_SIZE(btr) " %1,%0" : ADDR : "Ir" (nr));
+ }
+
+ /*
+@@ -168,7 +168,7 @@ static __always_inline void __clear_bit_
+ */
+ static __always_inline void __change_bit(long nr, volatile unsigned long *addr)
+ {
+- asm volatile("btc %1,%0" : ADDR : "Ir" (nr));
++ asm volatile(__ASM_SIZE(btc) " %1,%0" : ADDR : "Ir" (nr));
+ }
+
+ /**
+@@ -187,7 +187,7 @@ static __always_inline void change_bit(l
+ : CONST_MASK_ADDR(nr, addr)
+ : "iq" ((u8)CONST_MASK(nr)));
+ } else {
+- asm volatile(LOCK_PREFIX "btc %1,%0"
++ asm volatile(LOCK_PREFIX __ASM_SIZE(btc) " %1,%0"
+ : BITOP_ADDR(addr)
+ : "Ir" (nr));
+ }
+@@ -203,7 +203,8 @@ static __always_inline void change_bit(l
+ */
+ static __always_inline bool test_and_set_bit(long nr, volatile unsigned long *addr)
+ {
+- GEN_BINARY_RMWcc(LOCK_PREFIX "bts", *addr, "Ir", nr, "%0", c);
++ GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts),
++ *addr, "Ir", nr, "%0", c);
+ }
+
+ /**
+@@ -232,7 +233,7 @@ static __always_inline bool __test_and_s
+ {
+ bool oldbit;
+
+- asm("bts %2,%1"
++ asm(__ASM_SIZE(bts) " %2,%1"
+ CC_SET(c)
+ : CC_OUT(c) (oldbit), ADDR
+ : "Ir" (nr));
+@@ -249,7 +250,8 @@ static __always_inline bool __test_and_s
+ */
+ static __always_inline bool test_and_clear_bit(long nr, volatile unsigned long *addr)
+ {
+- GEN_BINARY_RMWcc(LOCK_PREFIX "btr", *addr, "Ir", nr, "%0", c);
++ GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr),
++ *addr, "Ir", nr, "%0", c);
+ }
+
+ /**
+@@ -272,7 +274,7 @@ static __always_inline bool __test_and_c
+ {
+ bool oldbit;
+
+- asm volatile("btr %2,%1"
++ asm volatile(__ASM_SIZE(btr) " %2,%1"
+ CC_SET(c)
+ : CC_OUT(c) (oldbit), ADDR
+ : "Ir" (nr));
+@@ -284,7 +286,7 @@ static __always_inline bool __test_and_c
+ {
+ bool oldbit;
+
+- asm volatile("btc %2,%1"
++ asm volatile(__ASM_SIZE(btc) " %2,%1"
+ CC_SET(c)
+ : CC_OUT(c) (oldbit), ADDR
+ : "Ir" (nr) : "memory");
+@@ -302,7 +304,8 @@ static __always_inline bool __test_and_c
+ */
+ static __always_inline bool test_and_change_bit(long nr, volatile unsigned long *addr)
+ {
+- GEN_BINARY_RMWcc(LOCK_PREFIX "btc", *addr, "Ir", nr, "%0", c);
++ GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc),
++ *addr, "Ir", nr, "%0", c);
+ }
+
+ static __always_inline bool constant_test_bit(long nr, const volatile unsigned long *addr)
+@@ -315,7 +318,7 @@ static __always_inline bool variable_tes
+ {
+ bool oldbit;
+
+- asm volatile("bt %2,%1"
++ asm volatile(__ASM_SIZE(bt) " %2,%1"
+ CC_SET(c)
+ : CC_OUT(c) (oldbit)
+ : "m" (*(unsigned long *)addr), "Ir" (nr));
+--- a/arch/x86/include/asm/percpu.h
++++ b/arch/x86/include/asm/percpu.h
+@@ -536,7 +536,7 @@ static inline bool x86_this_cpu_variable
+ {
+ bool oldbit;
+
+- asm volatile("bt "__percpu_arg(2)",%1"
++ asm volatile("btl "__percpu_arg(2)",%1"
+ CC_SET(c)
+ : CC_OUT(c) (oldbit)
+ : "m" (*(unsigned long __percpu *)addr), "Ir" (nr));
--- /dev/null
+From 3c52b5c64326d9dcfee4e10611c53ec1b1b20675 Mon Sep 17 00:00:00 2001
+From: Uros Bizjak <ubizjak@gmail.com>
+Date: Wed, 6 Sep 2017 17:18:08 +0200
+Subject: x86/asm: Remove unnecessary \n\t in front of CC_SET() from asm templates
+
+From: Uros Bizjak <ubizjak@gmail.com>
+
+commit 3c52b5c64326d9dcfee4e10611c53ec1b1b20675 upstream.
+
+There is no need for \n\t in front of CC_SET(), as the macro already includes these two.
+
+Signed-off-by: Uros Bizjak <ubizjak@gmail.com>
+Cc: Linus Torvalds <torvalds@linux-foundation.org>
+Cc: Peter Zijlstra <peterz@infradead.org>
+Cc: Thomas Gleixner <tglx@linutronix.de>
+Link: http://lkml.kernel.org/r/20170906151808.5634-1-ubizjak@gmail.com
+Signed-off-by: Ingo Molnar <mingo@kernel.org>
+Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
+
+---
+ arch/x86/include/asm/archrandom.h | 8 ++++----
+ arch/x86/include/asm/bitops.h | 8 ++++----
+ arch/x86/include/asm/percpu.h | 2 +-
+ 3 files changed, 9 insertions(+), 9 deletions(-)
+
+--- a/arch/x86/include/asm/archrandom.h
++++ b/arch/x86/include/asm/archrandom.h
+@@ -45,7 +45,7 @@ static inline bool rdrand_long(unsigned
+ bool ok;
+ unsigned int retry = RDRAND_RETRY_LOOPS;
+ do {
+- asm volatile(RDRAND_LONG "\n\t"
++ asm volatile(RDRAND_LONG
+ CC_SET(c)
+ : CC_OUT(c) (ok), "=a" (*v));
+ if (ok)
+@@ -59,7 +59,7 @@ static inline bool rdrand_int(unsigned i
+ bool ok;
+ unsigned int retry = RDRAND_RETRY_LOOPS;
+ do {
+- asm volatile(RDRAND_INT "\n\t"
++ asm volatile(RDRAND_INT
+ CC_SET(c)
+ : CC_OUT(c) (ok), "=a" (*v));
+ if (ok)
+@@ -71,7 +71,7 @@ static inline bool rdrand_int(unsigned i
+ static inline bool rdseed_long(unsigned long *v)
+ {
+ bool ok;
+- asm volatile(RDSEED_LONG "\n\t"
++ asm volatile(RDSEED_LONG
+ CC_SET(c)
+ : CC_OUT(c) (ok), "=a" (*v));
+ return ok;
+@@ -80,7 +80,7 @@ static inline bool rdseed_long(unsigned
+ static inline bool rdseed_int(unsigned int *v)
+ {
+ bool ok;
+- asm volatile(RDSEED_INT "\n\t"
++ asm volatile(RDSEED_INT
+ CC_SET(c)
+ : CC_OUT(c) (ok), "=a" (*v));
+ return ok;
+--- a/arch/x86/include/asm/bitops.h
++++ b/arch/x86/include/asm/bitops.h
+@@ -232,7 +232,7 @@ static __always_inline bool __test_and_s
+ {
+ bool oldbit;
+
+- asm("bts %2,%1\n\t"
++ asm("bts %2,%1"
+ CC_SET(c)
+ : CC_OUT(c) (oldbit), ADDR
+ : "Ir" (nr));
+@@ -272,7 +272,7 @@ static __always_inline bool __test_and_c
+ {
+ bool oldbit;
+
+- asm volatile("btr %2,%1\n\t"
++ asm volatile("btr %2,%1"
+ CC_SET(c)
+ : CC_OUT(c) (oldbit), ADDR
+ : "Ir" (nr));
+@@ -284,7 +284,7 @@ static __always_inline bool __test_and_c
+ {
+ bool oldbit;
+
+- asm volatile("btc %2,%1\n\t"
++ asm volatile("btc %2,%1"
+ CC_SET(c)
+ : CC_OUT(c) (oldbit), ADDR
+ : "Ir" (nr) : "memory");
+@@ -315,7 +315,7 @@ static __always_inline bool variable_tes
+ {
+ bool oldbit;
+
+- asm volatile("bt %2,%1\n\t"
++ asm volatile("bt %2,%1"
+ CC_SET(c)
+ : CC_OUT(c) (oldbit)
+ : "m" (*(unsigned long *)addr), "Ir" (nr));
+--- a/arch/x86/include/asm/percpu.h
++++ b/arch/x86/include/asm/percpu.h
+@@ -536,7 +536,7 @@ static inline bool x86_this_cpu_variable
+ {
+ bool oldbit;
+
+- asm volatile("bt "__percpu_arg(2)",%1\n\t"
++ asm volatile("bt "__percpu_arg(2)",%1"
+ CC_SET(c)
+ : CC_OUT(c) (oldbit)
+ : "m" (*(unsigned long __percpu *)addr), "Ir" (nr));