From 23fa18eaa71266feff7ba8d83022d9e1cc83c65a Mon Sep 17 00:00:00 2001 From: hc <hc@nodka.com> Date: Fri, 10 May 2024 07:42:03 +0000 Subject: [PATCH] disable pwm7 --- kernel/arch/arm64/crypto/sha2-ce-glue.c | 94 ++++++++++++++++++++++++++++++----------------- 1 files changed, 60 insertions(+), 34 deletions(-) diff --git a/kernel/arch/arm64/crypto/sha2-ce-glue.c b/kernel/arch/arm64/crypto/sha2-ce-glue.c index e7dfd98..1cfc4f6 100644 --- a/kernel/arch/arm64/crypto/sha2-ce-glue.c +++ b/kernel/arch/arm64/crypto/sha2-ce-glue.c @@ -1,17 +1,15 @@ +// SPDX-License-Identifier: GPL-2.0-only /* * sha2-ce-glue.c - SHA-224/SHA-256 using ARMv8 Crypto Extensions * * Copyright (C) 2014 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org> - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. */ #include <asm/neon.h> #include <asm/simd.h> #include <asm/unaligned.h> #include <crypto/internal/hash.h> +#include <crypto/internal/simd.h> #include <crypto/sha.h> #include <crypto/sha256_base.h> #include <linux/cpufeature.h> @@ -29,16 +27,26 @@ u32 finalize; }; -asmlinkage void sha2_ce_transform(struct sha256_ce_state *sst, u8 const *src, - int blocks); -#ifdef CONFIG_CFI_CLANG -static inline void __cfi_sha2_ce_transform(struct sha256_state *sst, - u8 const *src, int blocks) +extern const u32 sha256_ce_offsetof_count; +extern const u32 sha256_ce_offsetof_finalize; + +asmlinkage int sha2_ce_transform(struct sha256_ce_state *sst, u8 const *src, + int blocks); + +static void __sha2_ce_transform(struct sha256_state *sst, u8 const *src, + int blocks) { - sha2_ce_transform((struct sha256_ce_state *)sst, src, blocks); + while (blocks) { + int rem; + + kernel_neon_begin(); + rem = sha2_ce_transform(container_of(sst, struct sha256_ce_state, + sst), src, blocks); + kernel_neon_end(); + src += (blocks - rem) * SHA256_BLOCK_SIZE; + blocks = rem; + } } -#define sha2_ce_transform __cfi_sha2_ce_transform -#endif const u32 sha256_ce_offsetof_count = offsetof(struct sha256_ce_state, sst.count); @@ -47,20 +55,23 @@ asmlinkage void sha256_block_data_order(u32 *digest, u8 const *src, int blocks); +static void __sha256_block_data_order(struct sha256_state *sst, u8 const *src, + int blocks) +{ + sha256_block_data_order(sst->state, src, blocks); +} + static int sha256_ce_update(struct shash_desc *desc, const u8 *data, unsigned int len) { struct sha256_ce_state *sctx = shash_desc_ctx(desc); - if (!may_use_simd()) + if (!crypto_simd_usable()) return sha256_base_do_update(desc, data, len, - (sha256_block_fn *)sha256_block_data_order); + __sha256_block_data_order); sctx->finalize = 0; - kernel_neon_begin(); - sha256_base_do_update(desc, data, len, - (sha256_block_fn *)sha2_ce_transform); - kernel_neon_end(); + sha256_base_do_update(desc, data, len, __sha2_ce_transform); return 0; } @@ -71,12 +82,11 @@ struct sha256_ce_state *sctx = shash_desc_ctx(desc); bool finalize = !sctx->sst.count && !(len % SHA256_BLOCK_SIZE) && len; - if (!may_use_simd()) { + if (!crypto_simd_usable()) { if (len) sha256_base_do_update(desc, data, len, - (sha256_block_fn *)sha256_block_data_order); - sha256_base_do_finalize(desc, - (sha256_block_fn *)sha256_block_data_order); + __sha256_block_data_order); + sha256_base_do_finalize(desc, __sha256_block_data_order); return sha256_base_finish(desc, out); } @@ -86,13 +96,9 @@ */ sctx->finalize = finalize; - kernel_neon_begin(); - sha256_base_do_update(desc, data, len, - (sha256_block_fn *)sha2_ce_transform); + sha256_base_do_update(desc, data, len, __sha2_ce_transform); if (!finalize) - sha256_base_do_finalize(desc, - (sha256_block_fn *)sha2_ce_transform); - kernel_neon_end(); + sha256_base_do_finalize(desc, __sha2_ce_transform); return sha256_base_finish(desc, out); } @@ -100,17 +106,31 @@ { struct sha256_ce_state *sctx = shash_desc_ctx(desc); - if (!may_use_simd()) { - sha256_base_do_finalize(desc, - (sha256_block_fn *)sha256_block_data_order); + if (!crypto_simd_usable()) { + sha256_base_do_finalize(desc, __sha256_block_data_order); return sha256_base_finish(desc, out); } sctx->finalize = 0; - kernel_neon_begin(); - sha256_base_do_finalize(desc, (sha256_block_fn *)sha2_ce_transform); - kernel_neon_end(); + sha256_base_do_finalize(desc, __sha2_ce_transform); return sha256_base_finish(desc, out); +} + +static int sha256_ce_export(struct shash_desc *desc, void *out) +{ + struct sha256_ce_state *sctx = shash_desc_ctx(desc); + + memcpy(out, &sctx->sst, sizeof(struct sha256_state)); + return 0; +} + +static int sha256_ce_import(struct shash_desc *desc, const void *in) +{ + struct sha256_ce_state *sctx = shash_desc_ctx(desc); + + memcpy(&sctx->sst, in, sizeof(struct sha256_state)); + sctx->finalize = 0; + return 0; } static struct shash_alg algs[] = { { @@ -118,7 +138,10 @@ .update = sha256_ce_update, .final = sha256_ce_final, .finup = sha256_ce_finup, + .export = sha256_ce_export, + .import = sha256_ce_import, .descsize = sizeof(struct sha256_ce_state), + .statesize = sizeof(struct sha256_state), .digestsize = SHA224_DIGEST_SIZE, .base = { .cra_name = "sha224", @@ -132,7 +155,10 @@ .update = sha256_ce_update, .final = sha256_ce_final, .finup = sha256_ce_finup, + .export = sha256_ce_export, + .import = sha256_ce_import, .descsize = sizeof(struct sha256_ce_state), + .statesize = sizeof(struct sha256_state), .digestsize = SHA256_DIGEST_SIZE, .base = { .cra_name = "sha256", -- Gitblit v1.6.2