[RFC][PATCH 17/17] x86/cpu: Use fancy alternatives to get rid of entry_untrain_ret()

From: Peter Zijlstra
Date: Wed Aug 09 2023 - 03:27:43 EST


Use the new nested alternatives to create what is effectively
ALTERNATIVE_5 and merge the dummy entry_untrain_ret stub into
UNTRAIN_RET properly.

Signed-off-by: Peter Zijlstra (Intel) <peterz@xxxxxxxxxxxxx>
---
arch/x86/include/asm/nospec-branch.h | 33 ++++++++++++++++++---------------
arch/x86/lib/retpoline.S | 7 -------
2 files changed, 18 insertions(+), 22 deletions(-)

--- a/arch/x86/include/asm/nospec-branch.h
+++ b/arch/x86/include/asm/nospec-branch.h
@@ -271,11 +271,15 @@
.endm

#ifdef CONFIG_CPU_UNRET_ENTRY
-#define CALL_UNTRAIN_RET "call entry_untrain_ret"
+#define ALT_UNRET(old) \
+ __ALTERNATIVE(__ALTERNATIVE(__ALTERNATIVE(old, call zen_untrain_ret, X86_FEATURE_UNRET), \
+ call srso_untrain_ret, X86_FEATURE_SRSO), \
+ call srso_alias_untrain_ret, X86_FEATURE_SRSO_ALIAS)
#else
-#define CALL_UNTRAIN_RET ""
+#define ALT_UNRET(old) old
#endif

+
/*
* Mitigate RETBleed for AMD/Hygon Zen uarch. Requires KERNEL CR3 because the
* return thunk isn't mapped into the userspace tables (then again, AMD
@@ -290,30 +294,30 @@
.macro UNTRAIN_RET
#if defined(CONFIG_CPU_UNRET_ENTRY) || defined(CONFIG_CALL_DEPTH_TRACKING)
VALIDATE_UNRET_END
- ALTERNATIVE_3 "", \
- CALL_UNTRAIN_RET, X86_FEATURE_UNRET, \
- "call entry_ibpb", X86_FEATURE_ENTRY_IBPB, \
- __stringify(RESET_CALL_DEPTH), X86_FEATURE_CALL_DEPTH
+
+ __ALTERNATIVE(__ALTERNATIVE(ALT_UNRET(;),
+ call entry_ibpb, X86_FEATURE_ENTRY_IBPB),
+ RESET_CALL_DEPTH, X86_FEATURE_CALL_DEPTH)
#endif
.endm

.macro UNTRAIN_RET_VM
#if defined(CONFIG_CPU_UNRET_ENTRY) || defined(CONFIG_CALL_DEPTH_TRACKING)
VALIDATE_UNRET_END
- ALTERNATIVE_3 "", \
- CALL_UNTRAIN_RET, X86_FEATURE_UNRET, \
- "call entry_ibpb", X86_FEATURE_IBPB_ON_VMEXIT, \
- __stringify(RESET_CALL_DEPTH), X86_FEATURE_CALL_DEPTH
+
+ __ALTERNATIVE(__ALTERNATIVE(ALT_UNRET(;),
+ call entry_ibpb, X86_FEATURE_IBPB_ON_VMEXIT),
+ RESET_CALL_DEPTH, X86_FEATURE_CALL_DEPTH)
#endif
.endm

.macro UNTRAIN_RET_FROM_CALL
#if defined(CONFIG_CPU_UNRET_ENTRY) || defined(CONFIG_CALL_DEPTH_TRACKING)
VALIDATE_UNRET_END
- ALTERNATIVE_3 "", \
- CALL_UNTRAIN_RET, X86_FEATURE_UNRET, \
- "call entry_ibpb", X86_FEATURE_ENTRY_IBPB, \
- __stringify(RESET_CALL_DEPTH_FROM_CALL), X86_FEATURE_CALL_DEPTH
+
+ __ALTERNATIVE(__ALTERNATIVE(ALT_UNRET(;),
+ call entry_ibpb, X86_FEATURE_ENTRY_IBPB),
+ RESET_CALL_DEPTH_FROM_CALL, X86_FEATURE_CALL_DEPTH)
#endif
.endm

@@ -348,7 +352,6 @@ extern void zen_untrain_ret(void);
extern void srso_untrain_ret(void);
extern void srso_alias_untrain_ret(void);

-extern void entry_untrain_ret(void);
extern void entry_ibpb(void);

extern void (*x86_return_thunk)(void);
--- a/arch/x86/lib/retpoline.S
+++ b/arch/x86/lib/retpoline.S
@@ -263,13 +263,6 @@ SYM_CODE_END(srso_safe_ret)
SYM_FUNC_END(srso_untrain_ret)
__EXPORT_THUNK(srso_untrain_ret)

-SYM_FUNC_START(entry_untrain_ret)
- ALTERNATIVE_2 "jmp zen_untrain_ret", \
- "jmp srso_untrain_ret", X86_FEATURE_SRSO, \
- "jmp srso_alias_untrain_ret", X86_FEATURE_SRSO_ALIAS
-SYM_FUNC_END(entry_untrain_ret)
-__EXPORT_THUNK(entry_untrain_ret)
-
/*
* Both these do an unbalanced CALL to mess up the RSB, terminate with UD2
* to indicate noreturn.