hc
2024-01-31 f9004dbfff8a3fbbd7e2a88c8a4327c7f2f8e5b2
kernel/arch/x86/crypto/chacha_glue.c
....@@ -1,21 +1,18 @@
1
+// SPDX-License-Identifier: GPL-2.0-or-later
12 /*
23 * x64 SIMD accelerated ChaCha and XChaCha stream ciphers,
34 * including ChaCha20 (RFC7539)
45 *
56 * Copyright (C) 2015 Martin Willi
6
- *
7
- * This program is free software; you can redistribute it and/or modify
8
- * it under the terms of the GNU General Public License as published by
9
- * the Free Software Foundation; either version 2 of the License, or
10
- * (at your option) any later version.
117 */
128
139 #include <crypto/algapi.h>
1410 #include <crypto/internal/chacha.h>
11
+#include <crypto/internal/simd.h>
1512 #include <crypto/internal/skcipher.h>
1613 #include <linux/kernel.h>
1714 #include <linux/module.h>
18
-#include <asm/fpu/api.h>
15
+#include <linux/sizes.h>
1916 #include <asm/simd.h>
2017
2118 asmlinkage void chacha_block_xor_ssse3(u32 *state, u8 *dst, const u8 *src,
....@@ -81,8 +78,7 @@
8178 }
8279 }
8380
84
- if (IS_ENABLED(CONFIG_AS_AVX2) &&
85
- static_branch_likely(&chacha_use_avx2)) {
81
+ if (static_branch_likely(&chacha_use_avx2)) {
8682 while (bytes >= CHACHA_BLOCK_SIZE * 8) {
8783 chacha_8block_xor_avx2(state, dst, src, bytes, nrounds);
8884 bytes -= CHACHA_BLOCK_SIZE * 8;
....@@ -127,7 +123,7 @@
127123
128124 void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
129125 {
130
- if (!static_branch_likely(&chacha_use_simd) || !may_use_simd()) {
126
+ if (!static_branch_likely(&chacha_use_simd) || !crypto_simd_usable()) {
131127 hchacha_block_generic(state, stream, nrounds);
132128 } else {
133129 kernel_fpu_begin();
....@@ -146,7 +142,7 @@
146142 void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
147143 int nrounds)
148144 {
149
- if (!static_branch_likely(&chacha_use_simd) || !may_use_simd() ||
145
+ if (!static_branch_likely(&chacha_use_simd) || !crypto_simd_usable() ||
150146 bytes <= CHACHA_BLOCK_SIZE)
151147 return chacha_crypt_generic(state, dst, src, bytes, nrounds);
152148
....@@ -182,7 +178,7 @@
182178 nbytes = round_down(nbytes, walk.stride);
183179
184180 if (!static_branch_likely(&chacha_use_simd) ||
185
- !may_use_simd()) {
181
+ !crypto_simd_usable()) {
186182 chacha_crypt_generic(state, walk.dst.virt.addr,
187183 walk.src.virt.addr, nbytes,
188184 ctx->nrounds);
....@@ -217,7 +213,7 @@
217213
218214 chacha_init_generic(state, ctx->key, req->iv);
219215
220
- if (req->cryptlen > CHACHA_BLOCK_SIZE && irq_fpu_usable()) {
216
+ if (req->cryptlen > CHACHA_BLOCK_SIZE && crypto_simd_usable()) {
221217 kernel_fpu_begin();
222218 hchacha_block_ssse3(state, subctx.key, ctx->nrounds);
223219 kernel_fpu_end();
....@@ -287,8 +283,7 @@
287283
288284 static_branch_enable(&chacha_use_simd);
289285
290
- if (IS_ENABLED(CONFIG_AS_AVX2) &&
291
- boot_cpu_has(X86_FEATURE_AVX) &&
286
+ if (boot_cpu_has(X86_FEATURE_AVX) &&
292287 boot_cpu_has(X86_FEATURE_AVX2) &&
293288 cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
294289 static_branch_enable(&chacha_use_avx2);
....@@ -298,13 +293,13 @@
298293 boot_cpu_has(X86_FEATURE_AVX512BW)) /* kmovq */
299294 static_branch_enable(&chacha_use_avx512vl);
300295 }
301
- return IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) ?
296
+ return IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ?
302297 crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0;
303298 }
304299
305300 static void __exit chacha_simd_mod_fini(void)
306301 {
307
- if (IS_REACHABLE(CONFIG_CRYPTO_BLKCIPHER) && boot_cpu_has(X86_FEATURE_SSSE3))
302
+ if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) && boot_cpu_has(X86_FEATURE_SSSE3))
308303 crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
309304 }
310305