From 102a0743326a03cd1a1202ceda21e175b7d3575c Mon Sep 17 00:00:00 2001 From: hc <hc@nodka.com> Date: Tue, 20 Feb 2024 01:20:52 +0000 Subject: [PATCH] add new system file --- kernel/crypto/shash.c | 181 ++++++++++++++++++++++++++------------------- 1 files changed, 104 insertions(+), 77 deletions(-) diff --git a/kernel/crypto/shash.c b/kernel/crypto/shash.c index 55e7a2f..0a0a50c 100644 --- a/kernel/crypto/shash.c +++ b/kernel/crypto/shash.c @@ -1,13 +1,8 @@ +// SPDX-License-Identifier: GPL-2.0-or-later /* * Synchronous Cryptographic Hash operations. * * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> - * - * This program is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License as published by the Free - * Software Foundation; either version 2 of the License, or (at your option) - * any later version. - * */ #include <crypto/scatterwalk.h> @@ -61,14 +56,13 @@ alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); memcpy(alignbuffer, key, keylen); err = shash->setkey(tfm, alignbuffer, keylen); - kzfree(buffer); + kfree_sensitive(buffer); return err; } static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg) { - if (crypto_shash_alg_has_setkey(alg) && - !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY)) + if (crypto_shash_alg_needs_key(alg)) crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY); } @@ -94,13 +88,6 @@ } EXPORT_SYMBOL_GPL(crypto_shash_setkey); -static inline unsigned int shash_align_buffer_size(unsigned len, - unsigned long mask) -{ - typedef u8 __aligned_largest u8_aligned; - return len + (mask & ~(__alignof__(u8_aligned) - 1)); -} - static int shash_update_unaligned(struct shash_desc *desc, const u8 *data, unsigned int len) { @@ -109,10 +96,16 @@ unsigned long alignmask = crypto_shash_alignmask(tfm); unsigned int unaligned_len = alignmask + 1 - ((unsigned long)data & alignmask); - u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)] - __aligned_largest; + /* + * We cannot count on __aligned() working for large values: + * https://patchwork.kernel.org/patch/9507697/ + */ + u8 ubuf[MAX_ALGAPI_ALIGNMASK * 2]; u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); int err; + + if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf))) + return -EINVAL; if (unaligned_len > len) unaligned_len = len; @@ -145,10 +138,16 @@ unsigned long alignmask = crypto_shash_alignmask(tfm); struct shash_alg *shash = crypto_shash_alg(tfm); unsigned int ds = crypto_shash_digestsize(tfm); - u8 ubuf[shash_align_buffer_size(ds, alignmask)] - __aligned_largest; + /* + * We cannot count on __aligned() working for large values: + * https://patchwork.kernel.org/patch/9507697/ + */ + u8 ubuf[MAX_ALGAPI_ALIGNMASK + HASH_MAX_DIGESTSIZE]; u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); int err; + + if (WARN_ON(buf + ds > ubuf + sizeof(ubuf))) + return -EINVAL; err = shash->final(desc, buf); if (err) @@ -219,6 +218,22 @@ } EXPORT_SYMBOL_GPL(crypto_shash_digest); +int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data, + unsigned int len, u8 *out) +{ + SHASH_DESC_ON_STACK(desc, tfm); + int err; + + desc->tfm = tfm; + + err = crypto_shash_digest(desc, data, len, out); + + shash_desc_zero(desc); + + return err; +} +EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest); + static int shash_default_export(struct shash_desc *desc, void *out) { memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm)); @@ -245,7 +260,6 @@ struct shash_desc *desc = ahash_request_ctx(req); desc->tfm = *ctx; - desc->flags = req->base.flags; return crypto_shash_init(desc); } @@ -300,7 +314,6 @@ struct shash_desc *desc = ahash_request_ctx(req); desc->tfm = *ctx; - desc->flags = req->base.flags; return shash_ahash_finup(req, desc); } @@ -314,14 +327,13 @@ if (nbytes && (sg = req->src, offset = sg->offset, - nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) { + nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) { void *data; data = kmap_atomic(sg_page(sg)); err = crypto_shash_digest(desc, data + offset, nbytes, req->result); kunmap_atomic(data); - crypto_yield(desc->flags); } else err = crypto_shash_init(desc) ?: shash_ahash_finup(req, desc); @@ -336,7 +348,6 @@ struct shash_desc *desc = ahash_request_ctx(req); desc->tfm = *ctx; - desc->flags = req->base.flags; return shash_ahash_digest(req, desc); } @@ -352,7 +363,6 @@ struct shash_desc *desc = ahash_request_ctx(req); desc->tfm = *ctx; - desc->flags = req->base.flags; return crypto_shash_import(desc, in); } @@ -395,26 +405,57 @@ crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) & CRYPTO_TFM_NEED_KEY); - if (alg->export) - crt->export = shash_async_export; - if (alg->import) - crt->import = shash_async_import; + crt->export = shash_async_export; + crt->import = shash_async_import; crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); return 0; } +static void crypto_shash_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_shash *hash = __crypto_shash_cast(tfm); + struct shash_alg *alg = crypto_shash_alg(hash); + + alg->exit_tfm(hash); +} + static int crypto_shash_init_tfm(struct crypto_tfm *tfm) { struct crypto_shash *hash = __crypto_shash_cast(tfm); struct shash_alg *alg = crypto_shash_alg(hash); + int err; hash->descsize = alg->descsize; shash_set_needkey(hash, alg); + if (alg->exit_tfm) + tfm->exit = crypto_shash_exit_tfm; + + if (!alg->init_tfm) + return 0; + + err = alg->init_tfm(hash); + if (err) + return err; + + /* ->init_tfm() may have increased the descsize. */ + if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) { + if (alg->exit_tfm) + alg->exit_tfm(hash); + return -EINVAL; + } + return 0; +} + +static void crypto_shash_free_instance(struct crypto_instance *inst) +{ + struct shash_instance *shash = shash_instance(inst); + + shash->free(shash); } #ifdef CONFIG_NET @@ -423,18 +464,14 @@ struct crypto_report_hash rhash; struct shash_alg *salg = __crypto_shash_alg(alg); - strncpy(rhash.type, "shash", sizeof(rhash.type)); + memset(&rhash, 0, sizeof(rhash)); + + strscpy(rhash.type, "shash", sizeof(rhash.type)); rhash.blocksize = alg->cra_blocksize; rhash.digestsize = salg->digestsize; - if (nla_put(skb, CRYPTOCFGA_REPORT_HASH, - sizeof(struct crypto_report_hash), &rhash)) - goto nla_put_failure; - return 0; - -nla_put_failure: - return -EMSGSIZE; + return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash); } #else static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg) @@ -457,6 +494,7 @@ static const struct crypto_type crypto_shash_type = { .extsize = crypto_alg_extsize, .init_tfm = crypto_shash_init_tfm, + .free = crypto_shash_free_instance, #ifdef CONFIG_PROC_FS .show = crypto_shash_show, #endif @@ -466,6 +504,15 @@ .type = CRYPTO_ALG_TYPE_SHASH, .tfmsize = offsetof(struct crypto_shash, base), }; + +int crypto_grab_shash(struct crypto_shash_spawn *spawn, + struct crypto_instance *inst, + const char *name, u32 type, u32 mask) +{ + spawn->base.frontend = &crypto_shash_type; + return crypto_grab_spawn(&spawn->base, inst, name, type, mask); +} +EXPORT_SYMBOL_GPL(crypto_grab_shash); struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, u32 mask) @@ -478,9 +525,12 @@ { struct crypto_alg *base = &alg->base; - if (alg->digestsize > PAGE_SIZE / 8 || - alg->descsize > PAGE_SIZE / 8 || - alg->statesize > PAGE_SIZE / 8) + if (alg->digestsize > HASH_MAX_DIGESTSIZE || + alg->descsize > HASH_MAX_DESCSIZE || + alg->statesize > HASH_MAX_STATESIZE) + return -EINVAL; + + if ((alg->export && !alg->import) || (alg->import && !alg->export)) return -EINVAL; base->cra_type = &crypto_shash_type; @@ -515,9 +565,9 @@ } EXPORT_SYMBOL_GPL(crypto_register_shash); -int crypto_unregister_shash(struct shash_alg *alg) +void crypto_unregister_shash(struct shash_alg *alg) { - return crypto_unregister_alg(&alg->base); + crypto_unregister_alg(&alg->base); } EXPORT_SYMBOL_GPL(crypto_unregister_shash); @@ -541,19 +591,12 @@ } EXPORT_SYMBOL_GPL(crypto_register_shashes); -int crypto_unregister_shashes(struct shash_alg *algs, int count) +void crypto_unregister_shashes(struct shash_alg *algs, int count) { - int i, ret; + int i; - for (i = count - 1; i >= 0; --i) { - ret = crypto_unregister_shash(&algs[i]); - if (ret) - pr_err("Failed to unregister %s %s: %d\n", - algs[i].base.cra_driver_name, - algs[i].base.cra_name, ret); - } - - return 0; + for (i = count - 1; i >= 0; --i) + crypto_unregister_shash(&algs[i]); } EXPORT_SYMBOL_GPL(crypto_unregister_shashes); @@ -561,6 +604,9 @@ struct shash_instance *inst) { int err; + + if (WARN_ON(!inst->free)) + return -EINVAL; err = shash_prepare_alg(&inst->alg); if (err) @@ -570,31 +616,12 @@ } EXPORT_SYMBOL_GPL(shash_register_instance); -void shash_free_instance(struct crypto_instance *inst) +void shash_free_singlespawn_instance(struct shash_instance *inst) { - crypto_drop_spawn(crypto_instance_ctx(inst)); - kfree(shash_instance(inst)); + crypto_drop_spawn(shash_instance_ctx(inst)); + kfree(inst); } -EXPORT_SYMBOL_GPL(shash_free_instance); - -int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn, - struct shash_alg *alg, - struct crypto_instance *inst) -{ - return crypto_init_spawn2(&spawn->base, &alg->base, inst, - &crypto_shash_type); -} -EXPORT_SYMBOL_GPL(crypto_init_shash_spawn); - -struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask) -{ - struct crypto_alg *alg; - - alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask); - return IS_ERR(alg) ? ERR_CAST(alg) : - container_of(alg, struct shash_alg, base); -} -EXPORT_SYMBOL_GPL(shash_attr_alg); +EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance); MODULE_LICENSE("GPL"); MODULE_DESCRIPTION("Synchronous cryptographic hash type"); -- Gitblit v1.6.2