From 9999e48639b3cecb08ffb37358bcba3b48161b29 Mon Sep 17 00:00:00 2001 From: hc <hc@nodka.com> Date: Fri, 10 May 2024 08:50:17 +0000 Subject: [PATCH] add ax88772_rst --- kernel/arch/x86/crypto/cast5_avx_glue.c | 40 ++++++++++++++-------------------------- 1 files changed, 14 insertions(+), 26 deletions(-) diff --git a/kernel/arch/x86/crypto/cast5_avx_glue.c b/kernel/arch/x86/crypto/cast5_avx_glue.c index d4bf7fc..384ccb0 100644 --- a/kernel/arch/x86/crypto/cast5_avx_glue.c +++ b/kernel/arch/x86/crypto/cast5_avx_glue.c @@ -1,24 +1,9 @@ +// SPDX-License-Identifier: GPL-2.0-or-later /* - * Glue Code for the AVX assembler implemention of the Cast5 Cipher + * Glue Code for the AVX assembler implementation of the Cast5 Cipher * * Copyright (C) 2012 Johannes Goetzfried * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de> - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 2 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 - * USA - * */ #include <asm/crypto/glue_helper.h> @@ -61,7 +46,7 @@ static int ecb_crypt(struct skcipher_request *req, bool enc) { - bool fpu_enabled; + bool fpu_enabled = false; struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct cast5_ctx *ctx = crypto_skcipher_ctx(tfm); struct skcipher_walk walk; @@ -76,7 +61,7 @@ u8 *wsrc = walk.src.virt.addr; u8 *wdst = walk.dst.virt.addr; - fpu_enabled = cast5_fpu_begin(false, &walk, nbytes); + fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); /* Process multi-block batch */ if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) { @@ -105,9 +90,10 @@ } while (nbytes >= bsize); done: - cast5_fpu_end(fpu_enabled); err = skcipher_walk_done(&walk, nbytes); } + + cast5_fpu_end(fpu_enabled); return err; } @@ -211,7 +197,7 @@ { struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct cast5_ctx *ctx = crypto_skcipher_ctx(tfm); - bool fpu_enabled; + bool fpu_enabled = false; struct skcipher_walk walk; unsigned int nbytes; int err; @@ -219,11 +205,12 @@ err = skcipher_walk_virt(&walk, req, false); while ((nbytes = walk.nbytes)) { - fpu_enabled = cast5_fpu_begin(false, &walk, nbytes); + fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); nbytes = __cbc_decrypt(ctx, &walk); - cast5_fpu_end(fpu_enabled); err = skcipher_walk_done(&walk, nbytes); } + + cast5_fpu_end(fpu_enabled); return err; } @@ -290,7 +277,7 @@ { struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct cast5_ctx *ctx = crypto_skcipher_ctx(tfm); - bool fpu_enabled; + bool fpu_enabled = false; struct skcipher_walk walk; unsigned int nbytes; int err; @@ -298,12 +285,13 @@ err = skcipher_walk_virt(&walk, req, false); while ((nbytes = walk.nbytes) >= CAST5_BLOCK_SIZE) { - fpu_enabled = cast5_fpu_begin(false, &walk, nbytes); + fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); nbytes = __ctr_crypt(&walk, ctx); - cast5_fpu_end(fpu_enabled); err = skcipher_walk_done(&walk, nbytes); } + cast5_fpu_end(fpu_enabled); + if (walk.nbytes) { ctr_crypt_final(&walk, ctx); err = skcipher_walk_done(&walk, 0); -- Gitblit v1.6.2