| .. | .. |
|---|
| 2 | 2 | #ifndef __ASM_LSE_H |
|---|
| 3 | 3 | #define __ASM_LSE_H |
|---|
| 4 | 4 | |
|---|
| 5 | | -#if defined(CONFIG_AS_LSE) && defined(CONFIG_ARM64_LSE_ATOMICS) |
|---|
| 5 | +#include <asm/atomic_ll_sc.h> |
|---|
| 6 | + |
|---|
| 7 | +#if defined(CONFIG_ARM64_LSE_ATOMICS) && !defined(BUILD_FIPS140_KO) |
|---|
| 6 | 8 | |
|---|
| 7 | 9 | #define __LSE_PREAMBLE ".arch_extension lse\n" |
|---|
| 8 | 10 | |
|---|
| 9 | 11 | #include <linux/compiler_types.h> |
|---|
| 10 | 12 | #include <linux/export.h> |
|---|
| 13 | +#include <linux/jump_label.h> |
|---|
| 11 | 14 | #include <linux/stringify.h> |
|---|
| 12 | 15 | #include <asm/alternative.h> |
|---|
| 16 | +#include <asm/atomic_lse.h> |
|---|
| 13 | 17 | #include <asm/cpucaps.h> |
|---|
| 14 | 18 | |
|---|
| 15 | | -#ifdef __ASSEMBLER__ |
|---|
| 19 | +extern struct static_key_false cpu_hwcap_keys[ARM64_NCAPS]; |
|---|
| 20 | +extern struct static_key_false arm64_const_caps_ready; |
|---|
| 16 | 21 | |
|---|
| 17 | | -.arch_extension lse |
|---|
| 22 | +static inline bool system_uses_lse_atomics(void) |
|---|
| 23 | +{ |
|---|
| 24 | + return (static_branch_likely(&arm64_const_caps_ready)) && |
|---|
| 25 | + static_branch_likely(&cpu_hwcap_keys[ARM64_HAS_LSE_ATOMICS]); |
|---|
| 26 | +} |
|---|
| 18 | 27 | |
|---|
| 19 | | -.macro alt_lse, llsc, lse |
|---|
| 20 | | - alternative_insn "\llsc", "\lse", ARM64_HAS_LSE_ATOMICS |
|---|
| 21 | | -.endm |
|---|
| 22 | | - |
|---|
| 23 | | -#else /* __ASSEMBLER__ */ |
|---|
| 24 | | - |
|---|
| 25 | | -/* Move the ll/sc atomics out-of-line */ |
|---|
| 26 | | -#define __LL_SC_INLINE notrace |
|---|
| 27 | | -#define __LL_SC_PREFIX(x) __ll_sc_##x |
|---|
| 28 | | -#define __LL_SC_EXPORT(x) EXPORT_SYMBOL(__LL_SC_PREFIX(x)) |
|---|
| 29 | | - |
|---|
| 30 | | -/* Macro for constructing calls to out-of-line ll/sc atomics */ |
|---|
| 31 | | -#define __LL_SC_CALL(op) "bl\t" __stringify(__LL_SC_PREFIX(op)) "\n" |
|---|
| 32 | | -#define __LL_SC_CLOBBERS "x16", "x17", "x30" |
|---|
| 28 | +#define __lse_ll_sc_body(op, ...) \ |
|---|
| 29 | +({ \ |
|---|
| 30 | + system_uses_lse_atomics() ? \ |
|---|
| 31 | + __lse_##op(__VA_ARGS__) : \ |
|---|
| 32 | + __ll_sc_##op(__VA_ARGS__); \ |
|---|
| 33 | +}) |
|---|
| 33 | 34 | |
|---|
| 34 | 35 | /* In-line patching at runtime */ |
|---|
| 35 | 36 | #define ARM64_LSE_ATOMIC_INSN(llsc, lse) \ |
|---|
| 36 | 37 | ALTERNATIVE(llsc, __LSE_PREAMBLE lse, ARM64_HAS_LSE_ATOMICS) |
|---|
| 37 | 38 | |
|---|
| 38 | | -#endif /* __ASSEMBLER__ */ |
|---|
| 39 | | -#else /* CONFIG_AS_LSE && CONFIG_ARM64_LSE_ATOMICS */ |
|---|
| 39 | +#else /* CONFIG_ARM64_LSE_ATOMICS */ |
|---|
| 40 | 40 | |
|---|
| 41 | | -#ifdef __ASSEMBLER__ |
|---|
| 41 | +static inline bool system_uses_lse_atomics(void) { return false; } |
|---|
| 42 | 42 | |
|---|
| 43 | | -.macro alt_lse, llsc, lse |
|---|
| 44 | | - \llsc |
|---|
| 45 | | -.endm |
|---|
| 46 | | - |
|---|
| 47 | | -#else /* __ASSEMBLER__ */ |
|---|
| 48 | | - |
|---|
| 49 | | -#define __LL_SC_INLINE static inline |
|---|
| 50 | | -#define __LL_SC_PREFIX(x) x |
|---|
| 51 | | -#define __LL_SC_EXPORT(x) |
|---|
| 43 | +#define __lse_ll_sc_body(op, ...) __ll_sc_##op(__VA_ARGS__) |
|---|
| 52 | 44 | |
|---|
| 53 | 45 | #define ARM64_LSE_ATOMIC_INSN(llsc, lse) llsc |
|---|
| 54 | 46 | |
|---|
| 55 | | -#endif /* __ASSEMBLER__ */ |
|---|
| 56 | | -#endif /* CONFIG_AS_LSE && CONFIG_ARM64_LSE_ATOMICS */ |
|---|
| 47 | +#endif /* CONFIG_ARM64_LSE_ATOMICS */ |
|---|
| 57 | 48 | #endif /* __ASM_LSE_H */ |
|---|