hc
2024-10-12 a5969cabbb4660eab42b6ef0412cbbd1200cf14d
kernel/arch/x86/include/asm/nospec-branch.h
....@@ -112,7 +112,7 @@
112112 * eventually turn into it's own annotation.
113113 */
114114 .macro ANNOTATE_UNRET_END
115
-#ifdef CONFIG_DEBUG_ENTRY
115
+#if (defined(CONFIG_CPU_UNRET_ENTRY) || defined(CONFIG_CPU_SRSO))
116116 ANNOTATE_RETPOLINE_SAFE
117117 nop
118118 #endif
....@@ -156,9 +156,9 @@
156156 .endm
157157
158158 #ifdef CONFIG_CPU_UNRET_ENTRY
159
-#define CALL_ZEN_UNTRAIN_RET "call zen_untrain_ret"
159
+#define CALL_UNTRAIN_RET "call entry_untrain_ret"
160160 #else
161
-#define CALL_ZEN_UNTRAIN_RET ""
161
+#define CALL_UNTRAIN_RET ""
162162 #endif
163163
164164 /*
....@@ -166,17 +166,18 @@
166166 * return thunk isn't mapped into the userspace tables (then again, AMD
167167 * typically has NO_MELTDOWN).
168168 *
169
- * While zen_untrain_ret() doesn't clobber anything but requires stack,
169
+ * While retbleed_untrain_ret() doesn't clobber anything but requires stack,
170170 * entry_ibpb() will clobber AX, CX, DX.
171171 *
172172 * As such, this must be placed after every *SWITCH_TO_KERNEL_CR3 at a point
173173 * where we have a stack but before any RET instruction.
174174 */
175175 .macro UNTRAIN_RET
176
-#if defined(CONFIG_CPU_UNRET_ENTRY) || defined(CONFIG_CPU_IBPB_ENTRY)
176
+#if defined(CONFIG_CPU_UNRET_ENTRY) || defined(CONFIG_CPU_IBPB_ENTRY) || \
177
+ defined(CONFIG_CPU_SRSO)
177178 ANNOTATE_UNRET_END
178179 ALTERNATIVE_2 "", \
179
- CALL_ZEN_UNTRAIN_RET, X86_FEATURE_UNRET, \
180
+ CALL_UNTRAIN_RET, X86_FEATURE_UNRET, \
180181 "call entry_ibpb", X86_FEATURE_ENTRY_IBPB
181182 #endif
182183 .endm
....@@ -189,8 +190,21 @@
189190 _ASM_PTR " 999b\n\t" \
190191 ".popsection\n\t"
191192
193
+#ifdef CONFIG_RETHUNK
192194 extern void __x86_return_thunk(void);
193
-extern void zen_untrain_ret(void);
195
+#else
196
+static inline void __x86_return_thunk(void) {}
197
+#endif
198
+
199
+extern void retbleed_return_thunk(void);
200
+extern void srso_return_thunk(void);
201
+extern void srso_alias_return_thunk(void);
202
+
203
+extern void retbleed_untrain_ret(void);
204
+extern void srso_untrain_ret(void);
205
+extern void srso_alias_untrain_ret(void);
206
+
207
+extern void entry_untrain_ret(void);
194208 extern void entry_ibpb(void);
195209
196210 #ifdef CONFIG_RETPOLINE
....@@ -300,11 +314,11 @@
300314 : "memory");
301315 }
302316
317
+extern u64 x86_pred_cmd;
318
+
303319 static inline void indirect_branch_prediction_barrier(void)
304320 {
305
- u64 val = PRED_CMD_IBPB;
306
-
307
- alternative_msr_write(MSR_IA32_PRED_CMD, val, X86_FEATURE_USE_IBPB);
321
+ alternative_msr_write(MSR_IA32_PRED_CMD, x86_pred_cmd, X86_FEATURE_USE_IBPB);
308322 }
309323
310324 /* The Intel SPEC CTRL MSR base value cache */