| .. | .. |
|---|
| 112 | 112 | * eventually turn into it's own annotation. |
|---|
| 113 | 113 | */ |
|---|
| 114 | 114 | .macro ANNOTATE_UNRET_END |
|---|
| 115 | | -#ifdef CONFIG_DEBUG_ENTRY |
|---|
| 115 | +#if (defined(CONFIG_CPU_UNRET_ENTRY) || defined(CONFIG_CPU_SRSO)) |
|---|
| 116 | 116 | ANNOTATE_RETPOLINE_SAFE |
|---|
| 117 | 117 | nop |
|---|
| 118 | 118 | #endif |
|---|
| .. | .. |
|---|
| 156 | 156 | .endm |
|---|
| 157 | 157 | |
|---|
| 158 | 158 | #ifdef CONFIG_CPU_UNRET_ENTRY |
|---|
| 159 | | -#define CALL_ZEN_UNTRAIN_RET "call zen_untrain_ret" |
|---|
| 159 | +#define CALL_UNTRAIN_RET "call entry_untrain_ret" |
|---|
| 160 | 160 | #else |
|---|
| 161 | | -#define CALL_ZEN_UNTRAIN_RET "" |
|---|
| 161 | +#define CALL_UNTRAIN_RET "" |
|---|
| 162 | 162 | #endif |
|---|
| 163 | 163 | |
|---|
| 164 | 164 | /* |
|---|
| .. | .. |
|---|
| 166 | 166 | * return thunk isn't mapped into the userspace tables (then again, AMD |
|---|
| 167 | 167 | * typically has NO_MELTDOWN). |
|---|
| 168 | 168 | * |
|---|
| 169 | | - * While zen_untrain_ret() doesn't clobber anything but requires stack, |
|---|
| 169 | + * While retbleed_untrain_ret() doesn't clobber anything but requires stack, |
|---|
| 170 | 170 | * entry_ibpb() will clobber AX, CX, DX. |
|---|
| 171 | 171 | * |
|---|
| 172 | 172 | * As such, this must be placed after every *SWITCH_TO_KERNEL_CR3 at a point |
|---|
| 173 | 173 | * where we have a stack but before any RET instruction. |
|---|
| 174 | 174 | */ |
|---|
| 175 | 175 | .macro UNTRAIN_RET |
|---|
| 176 | | -#if defined(CONFIG_CPU_UNRET_ENTRY) || defined(CONFIG_CPU_IBPB_ENTRY) |
|---|
| 176 | +#if defined(CONFIG_CPU_UNRET_ENTRY) || defined(CONFIG_CPU_IBPB_ENTRY) || \ |
|---|
| 177 | + defined(CONFIG_CPU_SRSO) |
|---|
| 177 | 178 | ANNOTATE_UNRET_END |
|---|
| 178 | 179 | ALTERNATIVE_2 "", \ |
|---|
| 179 | | - CALL_ZEN_UNTRAIN_RET, X86_FEATURE_UNRET, \ |
|---|
| 180 | + CALL_UNTRAIN_RET, X86_FEATURE_UNRET, \ |
|---|
| 180 | 181 | "call entry_ibpb", X86_FEATURE_ENTRY_IBPB |
|---|
| 181 | 182 | #endif |
|---|
| 182 | 183 | .endm |
|---|
| .. | .. |
|---|
| 189 | 190 | _ASM_PTR " 999b\n\t" \ |
|---|
| 190 | 191 | ".popsection\n\t" |
|---|
| 191 | 192 | |
|---|
| 193 | +#ifdef CONFIG_RETHUNK |
|---|
| 192 | 194 | extern void __x86_return_thunk(void); |
|---|
| 193 | | -extern void zen_untrain_ret(void); |
|---|
| 195 | +#else |
|---|
| 196 | +static inline void __x86_return_thunk(void) {} |
|---|
| 197 | +#endif |
|---|
| 198 | + |
|---|
| 199 | +extern void retbleed_return_thunk(void); |
|---|
| 200 | +extern void srso_return_thunk(void); |
|---|
| 201 | +extern void srso_alias_return_thunk(void); |
|---|
| 202 | + |
|---|
| 203 | +extern void retbleed_untrain_ret(void); |
|---|
| 204 | +extern void srso_untrain_ret(void); |
|---|
| 205 | +extern void srso_alias_untrain_ret(void); |
|---|
| 206 | + |
|---|
| 207 | +extern void entry_untrain_ret(void); |
|---|
| 194 | 208 | extern void entry_ibpb(void); |
|---|
| 195 | 209 | |
|---|
| 196 | 210 | #ifdef CONFIG_RETPOLINE |
|---|
| .. | .. |
|---|
| 300 | 314 | : "memory"); |
|---|
| 301 | 315 | } |
|---|
| 302 | 316 | |
|---|
| 317 | +extern u64 x86_pred_cmd; |
|---|
| 318 | + |
|---|
| 303 | 319 | static inline void indirect_branch_prediction_barrier(void) |
|---|
| 304 | 320 | { |
|---|
| 305 | | - u64 val = PRED_CMD_IBPB; |
|---|
| 306 | | - |
|---|
| 307 | | - alternative_msr_write(MSR_IA32_PRED_CMD, val, X86_FEATURE_USE_IBPB); |
|---|
| 321 | + alternative_msr_write(MSR_IA32_PRED_CMD, x86_pred_cmd, X86_FEATURE_USE_IBPB); |
|---|
| 308 | 322 | } |
|---|
| 309 | 323 | |
|---|
| 310 | 324 | /* The Intel SPEC CTRL MSR base value cache */ |
|---|