From: Borislav Petkov Date: Wed, 12 May 2021 09:33:10 +0000 (+0200) Subject: x86/asm: Simplify __smp_mb() definition X-Git-Tag: v5.14-rc1~172^2~9 X-Git-Url: http://git.ipfire.org/?a=commitdiff_plain;h=1bc67873d401e6c2e6e30be7fef21337db07a042;p=thirdparty%2Flinux.git x86/asm: Simplify __smp_mb() definition Drop the bitness ifdeffery in favor of using _ASM_SP, which is the helper macro for the rSP register specification for 32 and 64 bit depending on the build. No functional changes. Signed-off-by: Borislav Petkov Signed-off-by: Ingo Molnar Acked-by: Peter Zijlstra (Intel) Link: https://lore.kernel.org/r/20210512093310.5635-1-bp@alien8.de --- diff --git a/arch/x86/include/asm/barrier.h b/arch/x86/include/asm/barrier.h index 4819d5e5a3353..3ba772a69cc8b 100644 --- a/arch/x86/include/asm/barrier.h +++ b/arch/x86/include/asm/barrier.h @@ -54,11 +54,8 @@ static inline unsigned long array_index_mask_nospec(unsigned long index, #define dma_rmb() barrier() #define dma_wmb() barrier() -#ifdef CONFIG_X86_32 -#define __smp_mb() asm volatile("lock; addl $0,-4(%%esp)" ::: "memory", "cc") -#else -#define __smp_mb() asm volatile("lock; addl $0,-4(%%rsp)" ::: "memory", "cc") -#endif +#define __smp_mb() asm volatile("lock; addl $0,-4(%%" _ASM_SP ")" ::: "memory", "cc") + #define __smp_rmb() dma_rmb() #define __smp_wmb() barrier() #define __smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0)