hc
2024-02-20 102a0743326a03cd1a1202ceda21e175b7d3575c
kernel/crypto/shash.c
....@@ -1,13 +1,8 @@
1
+// SPDX-License-Identifier: GPL-2.0-or-later
12 /*
23 * Synchronous Cryptographic Hash operations.
34 *
45 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5
- *
6
- * This program is free software; you can redistribute it and/or modify it
7
- * under the terms of the GNU General Public License as published by the Free
8
- * Software Foundation; either version 2 of the License, or (at your option)
9
- * any later version.
10
- *
116 */
127
138 #include <crypto/scatterwalk.h>
....@@ -61,14 +56,13 @@
6156 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
6257 memcpy(alignbuffer, key, keylen);
6358 err = shash->setkey(tfm, alignbuffer, keylen);
64
- kzfree(buffer);
59
+ kfree_sensitive(buffer);
6560 return err;
6661 }
6762
6863 static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
6964 {
70
- if (crypto_shash_alg_has_setkey(alg) &&
71
- !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
65
+ if (crypto_shash_alg_needs_key(alg))
7266 crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
7367 }
7468
....@@ -94,13 +88,6 @@
9488 }
9589 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
9690
97
-static inline unsigned int shash_align_buffer_size(unsigned len,
98
- unsigned long mask)
99
-{
100
- typedef u8 __aligned_largest u8_aligned;
101
- return len + (mask & ~(__alignof__(u8_aligned) - 1));
102
-}
103
-
10491 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
10592 unsigned int len)
10693 {
....@@ -109,10 +96,16 @@
10996 unsigned long alignmask = crypto_shash_alignmask(tfm);
11097 unsigned int unaligned_len = alignmask + 1 -
11198 ((unsigned long)data & alignmask);
112
- u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
113
- __aligned_largest;
99
+ /*
100
+ * We cannot count on __aligned() working for large values:
101
+ * https://patchwork.kernel.org/patch/9507697/
102
+ */
103
+ u8 ubuf[MAX_ALGAPI_ALIGNMASK * 2];
114104 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
115105 int err;
106
+
107
+ if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
108
+ return -EINVAL;
116109
117110 if (unaligned_len > len)
118111 unaligned_len = len;
....@@ -145,10 +138,16 @@
145138 unsigned long alignmask = crypto_shash_alignmask(tfm);
146139 struct shash_alg *shash = crypto_shash_alg(tfm);
147140 unsigned int ds = crypto_shash_digestsize(tfm);
148
- u8 ubuf[shash_align_buffer_size(ds, alignmask)]
149
- __aligned_largest;
141
+ /*
142
+ * We cannot count on __aligned() working for large values:
143
+ * https://patchwork.kernel.org/patch/9507697/
144
+ */
145
+ u8 ubuf[MAX_ALGAPI_ALIGNMASK + HASH_MAX_DIGESTSIZE];
150146 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
151147 int err;
148
+
149
+ if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
150
+ return -EINVAL;
152151
153152 err = shash->final(desc, buf);
154153 if (err)
....@@ -219,6 +218,22 @@
219218 }
220219 EXPORT_SYMBOL_GPL(crypto_shash_digest);
221220
221
+int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
222
+ unsigned int len, u8 *out)
223
+{
224
+ SHASH_DESC_ON_STACK(desc, tfm);
225
+ int err;
226
+
227
+ desc->tfm = tfm;
228
+
229
+ err = crypto_shash_digest(desc, data, len, out);
230
+
231
+ shash_desc_zero(desc);
232
+
233
+ return err;
234
+}
235
+EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
236
+
222237 static int shash_default_export(struct shash_desc *desc, void *out)
223238 {
224239 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
....@@ -245,7 +260,6 @@
245260 struct shash_desc *desc = ahash_request_ctx(req);
246261
247262 desc->tfm = *ctx;
248
- desc->flags = req->base.flags;
249263
250264 return crypto_shash_init(desc);
251265 }
....@@ -300,7 +314,6 @@
300314 struct shash_desc *desc = ahash_request_ctx(req);
301315
302316 desc->tfm = *ctx;
303
- desc->flags = req->base.flags;
304317
305318 return shash_ahash_finup(req, desc);
306319 }
....@@ -314,14 +327,13 @@
314327
315328 if (nbytes &&
316329 (sg = req->src, offset = sg->offset,
317
- nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
330
+ nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
318331 void *data;
319332
320333 data = kmap_atomic(sg_page(sg));
321334 err = crypto_shash_digest(desc, data + offset, nbytes,
322335 req->result);
323336 kunmap_atomic(data);
324
- crypto_yield(desc->flags);
325337 } else
326338 err = crypto_shash_init(desc) ?:
327339 shash_ahash_finup(req, desc);
....@@ -336,7 +348,6 @@
336348 struct shash_desc *desc = ahash_request_ctx(req);
337349
338350 desc->tfm = *ctx;
339
- desc->flags = req->base.flags;
340351
341352 return shash_ahash_digest(req, desc);
342353 }
....@@ -352,7 +363,6 @@
352363 struct shash_desc *desc = ahash_request_ctx(req);
353364
354365 desc->tfm = *ctx;
355
- desc->flags = req->base.flags;
356366
357367 return crypto_shash_import(desc, in);
358368 }
....@@ -395,26 +405,57 @@
395405 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
396406 CRYPTO_TFM_NEED_KEY);
397407
398
- if (alg->export)
399
- crt->export = shash_async_export;
400
- if (alg->import)
401
- crt->import = shash_async_import;
408
+ crt->export = shash_async_export;
409
+ crt->import = shash_async_import;
402410
403411 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
404412
405413 return 0;
406414 }
407415
416
+static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
417
+{
418
+ struct crypto_shash *hash = __crypto_shash_cast(tfm);
419
+ struct shash_alg *alg = crypto_shash_alg(hash);
420
+
421
+ alg->exit_tfm(hash);
422
+}
423
+
408424 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
409425 {
410426 struct crypto_shash *hash = __crypto_shash_cast(tfm);
411427 struct shash_alg *alg = crypto_shash_alg(hash);
428
+ int err;
412429
413430 hash->descsize = alg->descsize;
414431
415432 shash_set_needkey(hash, alg);
416433
434
+ if (alg->exit_tfm)
435
+ tfm->exit = crypto_shash_exit_tfm;
436
+
437
+ if (!alg->init_tfm)
438
+ return 0;
439
+
440
+ err = alg->init_tfm(hash);
441
+ if (err)
442
+ return err;
443
+
444
+ /* ->init_tfm() may have increased the descsize. */
445
+ if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
446
+ if (alg->exit_tfm)
447
+ alg->exit_tfm(hash);
448
+ return -EINVAL;
449
+ }
450
+
417451 return 0;
452
+}
453
+
454
+static void crypto_shash_free_instance(struct crypto_instance *inst)
455
+{
456
+ struct shash_instance *shash = shash_instance(inst);
457
+
458
+ shash->free(shash);
418459 }
419460
420461 #ifdef CONFIG_NET
....@@ -423,18 +464,14 @@
423464 struct crypto_report_hash rhash;
424465 struct shash_alg *salg = __crypto_shash_alg(alg);
425466
426
- strncpy(rhash.type, "shash", sizeof(rhash.type));
467
+ memset(&rhash, 0, sizeof(rhash));
468
+
469
+ strscpy(rhash.type, "shash", sizeof(rhash.type));
427470
428471 rhash.blocksize = alg->cra_blocksize;
429472 rhash.digestsize = salg->digestsize;
430473
431
- if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
432
- sizeof(struct crypto_report_hash), &rhash))
433
- goto nla_put_failure;
434
- return 0;
435
-
436
-nla_put_failure:
437
- return -EMSGSIZE;
474
+ return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
438475 }
439476 #else
440477 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
....@@ -457,6 +494,7 @@
457494 static const struct crypto_type crypto_shash_type = {
458495 .extsize = crypto_alg_extsize,
459496 .init_tfm = crypto_shash_init_tfm,
497
+ .free = crypto_shash_free_instance,
460498 #ifdef CONFIG_PROC_FS
461499 .show = crypto_shash_show,
462500 #endif
....@@ -466,6 +504,15 @@
466504 .type = CRYPTO_ALG_TYPE_SHASH,
467505 .tfmsize = offsetof(struct crypto_shash, base),
468506 };
507
+
508
+int crypto_grab_shash(struct crypto_shash_spawn *spawn,
509
+ struct crypto_instance *inst,
510
+ const char *name, u32 type, u32 mask)
511
+{
512
+ spawn->base.frontend = &crypto_shash_type;
513
+ return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
514
+}
515
+EXPORT_SYMBOL_GPL(crypto_grab_shash);
469516
470517 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
471518 u32 mask)
....@@ -478,9 +525,12 @@
478525 {
479526 struct crypto_alg *base = &alg->base;
480527
481
- if (alg->digestsize > PAGE_SIZE / 8 ||
482
- alg->descsize > PAGE_SIZE / 8 ||
483
- alg->statesize > PAGE_SIZE / 8)
528
+ if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
529
+ alg->descsize > HASH_MAX_DESCSIZE ||
530
+ alg->statesize > HASH_MAX_STATESIZE)
531
+ return -EINVAL;
532
+
533
+ if ((alg->export && !alg->import) || (alg->import && !alg->export))
484534 return -EINVAL;
485535
486536 base->cra_type = &crypto_shash_type;
....@@ -515,9 +565,9 @@
515565 }
516566 EXPORT_SYMBOL_GPL(crypto_register_shash);
517567
518
-int crypto_unregister_shash(struct shash_alg *alg)
568
+void crypto_unregister_shash(struct shash_alg *alg)
519569 {
520
- return crypto_unregister_alg(&alg->base);
570
+ crypto_unregister_alg(&alg->base);
521571 }
522572 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
523573
....@@ -541,19 +591,12 @@
541591 }
542592 EXPORT_SYMBOL_GPL(crypto_register_shashes);
543593
544
-int crypto_unregister_shashes(struct shash_alg *algs, int count)
594
+void crypto_unregister_shashes(struct shash_alg *algs, int count)
545595 {
546
- int i, ret;
596
+ int i;
547597
548
- for (i = count - 1; i >= 0; --i) {
549
- ret = crypto_unregister_shash(&algs[i]);
550
- if (ret)
551
- pr_err("Failed to unregister %s %s: %d\n",
552
- algs[i].base.cra_driver_name,
553
- algs[i].base.cra_name, ret);
554
- }
555
-
556
- return 0;
598
+ for (i = count - 1; i >= 0; --i)
599
+ crypto_unregister_shash(&algs[i]);
557600 }
558601 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
559602
....@@ -561,6 +604,9 @@
561604 struct shash_instance *inst)
562605 {
563606 int err;
607
+
608
+ if (WARN_ON(!inst->free))
609
+ return -EINVAL;
564610
565611 err = shash_prepare_alg(&inst->alg);
566612 if (err)
....@@ -570,31 +616,12 @@
570616 }
571617 EXPORT_SYMBOL_GPL(shash_register_instance);
572618
573
-void shash_free_instance(struct crypto_instance *inst)
619
+void shash_free_singlespawn_instance(struct shash_instance *inst)
574620 {
575
- crypto_drop_spawn(crypto_instance_ctx(inst));
576
- kfree(shash_instance(inst));
621
+ crypto_drop_spawn(shash_instance_ctx(inst));
622
+ kfree(inst);
577623 }
578
-EXPORT_SYMBOL_GPL(shash_free_instance);
579
-
580
-int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
581
- struct shash_alg *alg,
582
- struct crypto_instance *inst)
583
-{
584
- return crypto_init_spawn2(&spawn->base, &alg->base, inst,
585
- &crypto_shash_type);
586
-}
587
-EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
588
-
589
-struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
590
-{
591
- struct crypto_alg *alg;
592
-
593
- alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
594
- return IS_ERR(alg) ? ERR_CAST(alg) :
595
- container_of(alg, struct shash_alg, base);
596
-}
597
-EXPORT_SYMBOL_GPL(shash_attr_alg);
624
+EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
598625
599626 MODULE_LICENSE("GPL");
600627 MODULE_DESCRIPTION("Synchronous cryptographic hash type");