]> git.ipfire.org Git - thirdparty/kernel/stable-queue.git/blob - queue-6.6/x86-bugs-fix-the-srso-mitigation-on-zen3-4.patch
49ecd612c5f23e678c797dd330e8b1075ee387db
[thirdparty/kernel/stable-queue.git] / queue-6.6 / x86-bugs-fix-the-srso-mitigation-on-zen3-4.patch
1 From 5ec47ec6bd0952874bd60b3f8e7833eb2b9cdcbb Mon Sep 17 00:00:00 2001
2 From: "Borislav Petkov (AMD)" <bp@alien8.de>
3 Date: Thu, 28 Mar 2024 13:59:05 +0100
4 Subject: x86/bugs: Fix the SRSO mitigation on Zen3/4
5
6 From: "Borislav Petkov (AMD)" <bp@alien8.de>
7
8 Commit 4535e1a4174c4111d92c5a9a21e542d232e0fcaa upstream.
9
10 The original version of the mitigation would patch in the calls to the
11 untraining routines directly. That is, the alternative() in UNTRAIN_RET
12 will patch in the CALL to srso_alias_untrain_ret() directly.
13
14 However, even if commit e7c25c441e9e ("x86/cpu: Cleanup the untrain
15 mess") meant well in trying to clean up the situation, due to micro-
16 architectural reasons, the untraining routine srso_alias_untrain_ret()
17 must be the target of a CALL instruction and not of a JMP instruction as
18 it is done now.
19
20 Reshuffle the alternative macros to accomplish that.
21
22 Fixes: e7c25c441e9e ("x86/cpu: Cleanup the untrain mess")
23 Signed-off-by: Borislav Petkov (AMD) <bp@alien8.de>
24 Reviewed-by: Ingo Molnar <mingo@kernel.org>
25 Cc: stable@kernel.org
26 Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
27 Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
28 ---
29 arch/x86/include/asm/asm-prototypes.h | 1 +
30 arch/x86/include/asm/nospec-branch.h | 21 ++++++++++++++++-----
31 arch/x86/lib/retpoline.S | 10 +++++-----
32 3 files changed, 22 insertions(+), 10 deletions(-)
33
34 --- a/arch/x86/include/asm/asm-prototypes.h
35 +++ b/arch/x86/include/asm/asm-prototypes.h
36 @@ -13,6 +13,7 @@
37 #include <asm/preempt.h>
38 #include <asm/asm.h>
39 #include <asm/gsseg.h>
40 +#include <asm/nospec-branch.h>
41
42 #ifndef CONFIG_X86_CMPXCHG64
43 extern void cmpxchg8b_emu(void);
44 --- a/arch/x86/include/asm/nospec-branch.h
45 +++ b/arch/x86/include/asm/nospec-branch.h
46 @@ -271,11 +271,20 @@
47 .Lskip_rsb_\@:
48 .endm
49
50 +/*
51 + * The CALL to srso_alias_untrain_ret() must be patched in directly at
52 + * the spot where untraining must be done, ie., srso_alias_untrain_ret()
53 + * must be the target of a CALL instruction instead of indirectly
54 + * jumping to a wrapper which then calls it. Therefore, this macro is
55 + * called outside of __UNTRAIN_RET below, for the time being, before the
56 + * kernel can support nested alternatives with arbitrary nesting.
57 + */
58 +.macro CALL_UNTRAIN_RET
59 #if defined(CONFIG_CPU_UNRET_ENTRY) || defined(CONFIG_CPU_SRSO)
60 -#define CALL_UNTRAIN_RET "call entry_untrain_ret"
61 -#else
62 -#define CALL_UNTRAIN_RET ""
63 + ALTERNATIVE_2 "", "call entry_untrain_ret", X86_FEATURE_UNRET, \
64 + "call srso_alias_untrain_ret", X86_FEATURE_SRSO_ALIAS
65 #endif
66 +.endm
67
68 /*
69 * Mitigate RETBleed for AMD/Hygon Zen uarch. Requires KERNEL CR3 because the
70 @@ -291,8 +300,8 @@
71 .macro __UNTRAIN_RET ibpb_feature, call_depth_insns
72 #if defined(CONFIG_RETHUNK) || defined(CONFIG_CPU_IBPB_ENTRY)
73 VALIDATE_UNRET_END
74 - ALTERNATIVE_3 "", \
75 - CALL_UNTRAIN_RET, X86_FEATURE_UNRET, \
76 + CALL_UNTRAIN_RET
77 + ALTERNATIVE_2 "", \
78 "call entry_ibpb", \ibpb_feature, \
79 __stringify(\call_depth_insns), X86_FEATURE_CALL_DEPTH
80 #endif
81 @@ -351,6 +360,8 @@ extern void retbleed_return_thunk(void);
82 static inline void retbleed_return_thunk(void) {}
83 #endif
84
85 +extern void srso_alias_untrain_ret(void);
86 +
87 #ifdef CONFIG_CPU_SRSO
88 extern void srso_return_thunk(void);
89 extern void srso_alias_return_thunk(void);
90 --- a/arch/x86/lib/retpoline.S
91 +++ b/arch/x86/lib/retpoline.S
92 @@ -218,10 +218,12 @@ SYM_CODE_START(srso_return_thunk)
93 SYM_CODE_END(srso_return_thunk)
94
95 #define JMP_SRSO_UNTRAIN_RET "jmp srso_untrain_ret"
96 -#define JMP_SRSO_ALIAS_UNTRAIN_RET "jmp srso_alias_untrain_ret"
97 #else /* !CONFIG_CPU_SRSO */
98 #define JMP_SRSO_UNTRAIN_RET "ud2"
99 -#define JMP_SRSO_ALIAS_UNTRAIN_RET "ud2"
100 +/* Dummy for the alternative in CALL_UNTRAIN_RET. */
101 +SYM_CODE_START(srso_alias_untrain_ret)
102 + RET
103 +SYM_FUNC_END(srso_alias_untrain_ret)
104 #endif /* CONFIG_CPU_SRSO */
105
106 #ifdef CONFIG_CPU_UNRET_ENTRY
107 @@ -314,9 +316,7 @@ __EXPORT_THUNK(retbleed_untrain_ret)
108 #if defined(CONFIG_CPU_UNRET_ENTRY) || defined(CONFIG_CPU_SRSO)
109
110 SYM_FUNC_START(entry_untrain_ret)
111 - ALTERNATIVE_2 JMP_RETBLEED_UNTRAIN_RET, \
112 - JMP_SRSO_UNTRAIN_RET, X86_FEATURE_SRSO, \
113 - JMP_SRSO_ALIAS_UNTRAIN_RET, X86_FEATURE_SRSO_ALIAS
114 + ALTERNATIVE JMP_RETBLEED_UNTRAIN_RET, JMP_SRSO_UNTRAIN_RET, X86_FEATURE_SRSO
115 SYM_FUNC_END(entry_untrain_ret)
116 __EXPORT_THUNK(entry_untrain_ret)
117