From e636c8d336489bf3eed5878299e6cc045bbad077 Mon Sep 17 00:00:00 2001 From: hc <hc@nodka.com> Date: Tue, 20 Feb 2024 01:17:29 +0000 Subject: [PATCH] debug lk --- kernel/include/crypto/internal/skcipher.h | 93 ++++++++++++++++++++++++++-------------------- 1 files changed, 53 insertions(+), 40 deletions(-) diff --git a/kernel/include/crypto/internal/skcipher.h b/kernel/include/crypto/internal/skcipher.h index e42f706..9dd6c0c 100644 --- a/kernel/include/crypto/internal/skcipher.h +++ b/kernel/include/crypto/internal/skcipher.h @@ -1,19 +1,15 @@ +/* SPDX-License-Identifier: GPL-2.0-or-later */ /* * Symmetric key ciphers. * * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> - * - * This program is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License as published by the Free - * Software Foundation; either version 2 of the License, or (at your option) - * any later version. - * */ #ifndef _CRYPTO_INTERNAL_SKCIPHER_H #define _CRYPTO_INTERNAL_SKCIPHER_H #include <crypto/algapi.h> +#include <crypto/internal/cipher.h> #include <crypto/skcipher.h> #include <linux/list.h> #include <linux/types.h> @@ -70,8 +66,6 @@ unsigned int alignmask; }; -extern const struct crypto_type crypto_givcipher_type; - static inline struct crypto_instance *skcipher_crypto_instance( struct skcipher_instance *inst) { @@ -95,14 +89,9 @@ req->base.complete(&req->base, err); } -static inline void crypto_set_skcipher_spawn( - struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst) -{ - crypto_set_spawn(&spawn->base, inst); -} - -int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name, - u32 type, u32 mask); +int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, + struct crypto_instance *inst, + const char *name, u32 type, u32 mask); static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn) { @@ -147,23 +136,15 @@ void skcipher_walk_atomise(struct skcipher_walk *walk); int skcipher_walk_async(struct skcipher_walk *walk, struct skcipher_request *req); -int skcipher_walk_aead(struct skcipher_walk *walk, struct aead_request *req, - bool atomic); int skcipher_walk_aead_encrypt(struct skcipher_walk *walk, struct aead_request *req, bool atomic); int skcipher_walk_aead_decrypt(struct skcipher_walk *walk, struct aead_request *req, bool atomic); void skcipher_walk_complete(struct skcipher_walk *walk, int err); -static inline void ablkcipher_request_complete(struct ablkcipher_request *req, - int err) +static inline void skcipher_walk_abort(struct skcipher_walk *walk) { - req->base.complete(&req->base, err); -} - -static inline u32 ablkcipher_request_flags(struct ablkcipher_request *req) -{ - return req->base.flags; + skcipher_walk_done(walk, -ECANCELED); } static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm) @@ -184,28 +165,60 @@ static inline unsigned int crypto_skcipher_alg_min_keysize( struct skcipher_alg *alg) { - if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) == - CRYPTO_ALG_TYPE_BLKCIPHER) - return alg->base.cra_blkcipher.min_keysize; - - if (alg->base.cra_ablkcipher.encrypt) - return alg->base.cra_ablkcipher.min_keysize; - return alg->min_keysize; } static inline unsigned int crypto_skcipher_alg_max_keysize( struct skcipher_alg *alg) { - if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) == - CRYPTO_ALG_TYPE_BLKCIPHER) - return alg->base.cra_blkcipher.max_keysize; - - if (alg->base.cra_ablkcipher.encrypt) - return alg->base.cra_ablkcipher.max_keysize; - return alg->max_keysize; } +static inline unsigned int crypto_skcipher_alg_walksize( + struct skcipher_alg *alg) +{ + return alg->walksize; +} + +/** + * crypto_skcipher_walksize() - obtain walk size + * @tfm: cipher handle + * + * In some cases, algorithms can only perform optimally when operating on + * multiple blocks in parallel. This is reflected by the walksize, which + * must be a multiple of the chunksize (or equal if the concern does not + * apply) + * + * Return: walk size in bytes + */ +static inline unsigned int crypto_skcipher_walksize( + struct crypto_skcipher *tfm) +{ + return crypto_skcipher_alg_walksize(crypto_skcipher_alg(tfm)); +} + +/* Helpers for simple block cipher modes of operation */ +struct skcipher_ctx_simple { + struct crypto_cipher *cipher; /* underlying block cipher */ +}; +static inline struct crypto_cipher * +skcipher_cipher_simple(struct crypto_skcipher *tfm) +{ + struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm); + + return ctx->cipher; +} + +struct skcipher_instance *skcipher_alloc_instance_simple( + struct crypto_template *tmpl, struct rtattr **tb); + +static inline struct crypto_alg *skcipher_ialg_simple( + struct skcipher_instance *inst) +{ + struct crypto_cipher_spawn *spawn = skcipher_instance_ctx(inst); + + return crypto_spawn_cipher_alg(spawn); +} + #endif /* _CRYPTO_INTERNAL_SKCIPHER_H */ -- Gitblit v1.6.2