| .. | .. |
|---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-or-later |
|---|
| 1 | 2 | /* |
|---|
| 2 | 3 | * Synchronous Cryptographic Hash operations. |
|---|
| 3 | 4 | * |
|---|
| 4 | 5 | * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> |
|---|
| 5 | | - * |
|---|
| 6 | | - * This program is free software; you can redistribute it and/or modify it |
|---|
| 7 | | - * under the terms of the GNU General Public License as published by the Free |
|---|
| 8 | | - * Software Foundation; either version 2 of the License, or (at your option) |
|---|
| 9 | | - * any later version. |
|---|
| 10 | | - * |
|---|
| 11 | 6 | */ |
|---|
| 12 | 7 | |
|---|
| 13 | 8 | #include <crypto/scatterwalk.h> |
|---|
| .. | .. |
|---|
| 61 | 56 | alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); |
|---|
| 62 | 57 | memcpy(alignbuffer, key, keylen); |
|---|
| 63 | 58 | err = shash->setkey(tfm, alignbuffer, keylen); |
|---|
| 64 | | - kzfree(buffer); |
|---|
| 59 | + kfree_sensitive(buffer); |
|---|
| 65 | 60 | return err; |
|---|
| 66 | 61 | } |
|---|
| 67 | 62 | |
|---|
| 68 | 63 | static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg) |
|---|
| 69 | 64 | { |
|---|
| 70 | | - if (crypto_shash_alg_has_setkey(alg) && |
|---|
| 71 | | - !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY)) |
|---|
| 65 | + if (crypto_shash_alg_needs_key(alg)) |
|---|
| 72 | 66 | crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY); |
|---|
| 73 | 67 | } |
|---|
| 74 | 68 | |
|---|
| .. | .. |
|---|
| 94 | 88 | } |
|---|
| 95 | 89 | EXPORT_SYMBOL_GPL(crypto_shash_setkey); |
|---|
| 96 | 90 | |
|---|
| 97 | | -static inline unsigned int shash_align_buffer_size(unsigned len, |
|---|
| 98 | | - unsigned long mask) |
|---|
| 99 | | -{ |
|---|
| 100 | | - typedef u8 __aligned_largest u8_aligned; |
|---|
| 101 | | - return len + (mask & ~(__alignof__(u8_aligned) - 1)); |
|---|
| 102 | | -} |
|---|
| 103 | | - |
|---|
| 104 | 91 | static int shash_update_unaligned(struct shash_desc *desc, const u8 *data, |
|---|
| 105 | 92 | unsigned int len) |
|---|
| 106 | 93 | { |
|---|
| .. | .. |
|---|
| 109 | 96 | unsigned long alignmask = crypto_shash_alignmask(tfm); |
|---|
| 110 | 97 | unsigned int unaligned_len = alignmask + 1 - |
|---|
| 111 | 98 | ((unsigned long)data & alignmask); |
|---|
| 112 | | - u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)] |
|---|
| 113 | | - __aligned_largest; |
|---|
| 99 | + /* |
|---|
| 100 | + * We cannot count on __aligned() working for large values: |
|---|
| 101 | + * https://patchwork.kernel.org/patch/9507697/ |
|---|
| 102 | + */ |
|---|
| 103 | + u8 ubuf[MAX_ALGAPI_ALIGNMASK * 2]; |
|---|
| 114 | 104 | u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); |
|---|
| 115 | 105 | int err; |
|---|
| 106 | + |
|---|
| 107 | + if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf))) |
|---|
| 108 | + return -EINVAL; |
|---|
| 116 | 109 | |
|---|
| 117 | 110 | if (unaligned_len > len) |
|---|
| 118 | 111 | unaligned_len = len; |
|---|
| .. | .. |
|---|
| 145 | 138 | unsigned long alignmask = crypto_shash_alignmask(tfm); |
|---|
| 146 | 139 | struct shash_alg *shash = crypto_shash_alg(tfm); |
|---|
| 147 | 140 | unsigned int ds = crypto_shash_digestsize(tfm); |
|---|
| 148 | | - u8 ubuf[shash_align_buffer_size(ds, alignmask)] |
|---|
| 149 | | - __aligned_largest; |
|---|
| 141 | + /* |
|---|
| 142 | + * We cannot count on __aligned() working for large values: |
|---|
| 143 | + * https://patchwork.kernel.org/patch/9507697/ |
|---|
| 144 | + */ |
|---|
| 145 | + u8 ubuf[MAX_ALGAPI_ALIGNMASK + HASH_MAX_DIGESTSIZE]; |
|---|
| 150 | 146 | u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); |
|---|
| 151 | 147 | int err; |
|---|
| 148 | + |
|---|
| 149 | + if (WARN_ON(buf + ds > ubuf + sizeof(ubuf))) |
|---|
| 150 | + return -EINVAL; |
|---|
| 152 | 151 | |
|---|
| 153 | 152 | err = shash->final(desc, buf); |
|---|
| 154 | 153 | if (err) |
|---|
| .. | .. |
|---|
| 219 | 218 | } |
|---|
| 220 | 219 | EXPORT_SYMBOL_GPL(crypto_shash_digest); |
|---|
| 221 | 220 | |
|---|
| 221 | +int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data, |
|---|
| 222 | + unsigned int len, u8 *out) |
|---|
| 223 | +{ |
|---|
| 224 | + SHASH_DESC_ON_STACK(desc, tfm); |
|---|
| 225 | + int err; |
|---|
| 226 | + |
|---|
| 227 | + desc->tfm = tfm; |
|---|
| 228 | + |
|---|
| 229 | + err = crypto_shash_digest(desc, data, len, out); |
|---|
| 230 | + |
|---|
| 231 | + shash_desc_zero(desc); |
|---|
| 232 | + |
|---|
| 233 | + return err; |
|---|
| 234 | +} |
|---|
| 235 | +EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest); |
|---|
| 236 | + |
|---|
| 222 | 237 | static int shash_default_export(struct shash_desc *desc, void *out) |
|---|
| 223 | 238 | { |
|---|
| 224 | 239 | memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm)); |
|---|
| .. | .. |
|---|
| 245 | 260 | struct shash_desc *desc = ahash_request_ctx(req); |
|---|
| 246 | 261 | |
|---|
| 247 | 262 | desc->tfm = *ctx; |
|---|
| 248 | | - desc->flags = req->base.flags; |
|---|
| 249 | 263 | |
|---|
| 250 | 264 | return crypto_shash_init(desc); |
|---|
| 251 | 265 | } |
|---|
| .. | .. |
|---|
| 300 | 314 | struct shash_desc *desc = ahash_request_ctx(req); |
|---|
| 301 | 315 | |
|---|
| 302 | 316 | desc->tfm = *ctx; |
|---|
| 303 | | - desc->flags = req->base.flags; |
|---|
| 304 | 317 | |
|---|
| 305 | 318 | return shash_ahash_finup(req, desc); |
|---|
| 306 | 319 | } |
|---|
| .. | .. |
|---|
| 314 | 327 | |
|---|
| 315 | 328 | if (nbytes && |
|---|
| 316 | 329 | (sg = req->src, offset = sg->offset, |
|---|
| 317 | | - nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) { |
|---|
| 330 | + nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) { |
|---|
| 318 | 331 | void *data; |
|---|
| 319 | 332 | |
|---|
| 320 | 333 | data = kmap_atomic(sg_page(sg)); |
|---|
| 321 | 334 | err = crypto_shash_digest(desc, data + offset, nbytes, |
|---|
| 322 | 335 | req->result); |
|---|
| 323 | 336 | kunmap_atomic(data); |
|---|
| 324 | | - crypto_yield(desc->flags); |
|---|
| 325 | 337 | } else |
|---|
| 326 | 338 | err = crypto_shash_init(desc) ?: |
|---|
| 327 | 339 | shash_ahash_finup(req, desc); |
|---|
| .. | .. |
|---|
| 336 | 348 | struct shash_desc *desc = ahash_request_ctx(req); |
|---|
| 337 | 349 | |
|---|
| 338 | 350 | desc->tfm = *ctx; |
|---|
| 339 | | - desc->flags = req->base.flags; |
|---|
| 340 | 351 | |
|---|
| 341 | 352 | return shash_ahash_digest(req, desc); |
|---|
| 342 | 353 | } |
|---|
| .. | .. |
|---|
| 352 | 363 | struct shash_desc *desc = ahash_request_ctx(req); |
|---|
| 353 | 364 | |
|---|
| 354 | 365 | desc->tfm = *ctx; |
|---|
| 355 | | - desc->flags = req->base.flags; |
|---|
| 356 | 366 | |
|---|
| 357 | 367 | return crypto_shash_import(desc, in); |
|---|
| 358 | 368 | } |
|---|
| .. | .. |
|---|
| 395 | 405 | crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) & |
|---|
| 396 | 406 | CRYPTO_TFM_NEED_KEY); |
|---|
| 397 | 407 | |
|---|
| 398 | | - if (alg->export) |
|---|
| 399 | | - crt->export = shash_async_export; |
|---|
| 400 | | - if (alg->import) |
|---|
| 401 | | - crt->import = shash_async_import; |
|---|
| 408 | + crt->export = shash_async_export; |
|---|
| 409 | + crt->import = shash_async_import; |
|---|
| 402 | 410 | |
|---|
| 403 | 411 | crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); |
|---|
| 404 | 412 | |
|---|
| 405 | 413 | return 0; |
|---|
| 406 | 414 | } |
|---|
| 407 | 415 | |
|---|
| 416 | +static void crypto_shash_exit_tfm(struct crypto_tfm *tfm) |
|---|
| 417 | +{ |
|---|
| 418 | + struct crypto_shash *hash = __crypto_shash_cast(tfm); |
|---|
| 419 | + struct shash_alg *alg = crypto_shash_alg(hash); |
|---|
| 420 | + |
|---|
| 421 | + alg->exit_tfm(hash); |
|---|
| 422 | +} |
|---|
| 423 | + |
|---|
| 408 | 424 | static int crypto_shash_init_tfm(struct crypto_tfm *tfm) |
|---|
| 409 | 425 | { |
|---|
| 410 | 426 | struct crypto_shash *hash = __crypto_shash_cast(tfm); |
|---|
| 411 | 427 | struct shash_alg *alg = crypto_shash_alg(hash); |
|---|
| 428 | + int err; |
|---|
| 412 | 429 | |
|---|
| 413 | 430 | hash->descsize = alg->descsize; |
|---|
| 414 | 431 | |
|---|
| 415 | 432 | shash_set_needkey(hash, alg); |
|---|
| 416 | 433 | |
|---|
| 434 | + if (alg->exit_tfm) |
|---|
| 435 | + tfm->exit = crypto_shash_exit_tfm; |
|---|
| 436 | + |
|---|
| 437 | + if (!alg->init_tfm) |
|---|
| 438 | + return 0; |
|---|
| 439 | + |
|---|
| 440 | + err = alg->init_tfm(hash); |
|---|
| 441 | + if (err) |
|---|
| 442 | + return err; |
|---|
| 443 | + |
|---|
| 444 | + /* ->init_tfm() may have increased the descsize. */ |
|---|
| 445 | + if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) { |
|---|
| 446 | + if (alg->exit_tfm) |
|---|
| 447 | + alg->exit_tfm(hash); |
|---|
| 448 | + return -EINVAL; |
|---|
| 449 | + } |
|---|
| 450 | + |
|---|
| 417 | 451 | return 0; |
|---|
| 452 | +} |
|---|
| 453 | + |
|---|
| 454 | +static void crypto_shash_free_instance(struct crypto_instance *inst) |
|---|
| 455 | +{ |
|---|
| 456 | + struct shash_instance *shash = shash_instance(inst); |
|---|
| 457 | + |
|---|
| 458 | + shash->free(shash); |
|---|
| 418 | 459 | } |
|---|
| 419 | 460 | |
|---|
| 420 | 461 | #ifdef CONFIG_NET |
|---|
| .. | .. |
|---|
| 423 | 464 | struct crypto_report_hash rhash; |
|---|
| 424 | 465 | struct shash_alg *salg = __crypto_shash_alg(alg); |
|---|
| 425 | 466 | |
|---|
| 426 | | - strncpy(rhash.type, "shash", sizeof(rhash.type)); |
|---|
| 467 | + memset(&rhash, 0, sizeof(rhash)); |
|---|
| 468 | + |
|---|
| 469 | + strscpy(rhash.type, "shash", sizeof(rhash.type)); |
|---|
| 427 | 470 | |
|---|
| 428 | 471 | rhash.blocksize = alg->cra_blocksize; |
|---|
| 429 | 472 | rhash.digestsize = salg->digestsize; |
|---|
| 430 | 473 | |
|---|
| 431 | | - if (nla_put(skb, CRYPTOCFGA_REPORT_HASH, |
|---|
| 432 | | - sizeof(struct crypto_report_hash), &rhash)) |
|---|
| 433 | | - goto nla_put_failure; |
|---|
| 434 | | - return 0; |
|---|
| 435 | | - |
|---|
| 436 | | -nla_put_failure: |
|---|
| 437 | | - return -EMSGSIZE; |
|---|
| 474 | + return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash); |
|---|
| 438 | 475 | } |
|---|
| 439 | 476 | #else |
|---|
| 440 | 477 | static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg) |
|---|
| .. | .. |
|---|
| 457 | 494 | static const struct crypto_type crypto_shash_type = { |
|---|
| 458 | 495 | .extsize = crypto_alg_extsize, |
|---|
| 459 | 496 | .init_tfm = crypto_shash_init_tfm, |
|---|
| 497 | + .free = crypto_shash_free_instance, |
|---|
| 460 | 498 | #ifdef CONFIG_PROC_FS |
|---|
| 461 | 499 | .show = crypto_shash_show, |
|---|
| 462 | 500 | #endif |
|---|
| .. | .. |
|---|
| 466 | 504 | .type = CRYPTO_ALG_TYPE_SHASH, |
|---|
| 467 | 505 | .tfmsize = offsetof(struct crypto_shash, base), |
|---|
| 468 | 506 | }; |
|---|
| 507 | + |
|---|
| 508 | +int crypto_grab_shash(struct crypto_shash_spawn *spawn, |
|---|
| 509 | + struct crypto_instance *inst, |
|---|
| 510 | + const char *name, u32 type, u32 mask) |
|---|
| 511 | +{ |
|---|
| 512 | + spawn->base.frontend = &crypto_shash_type; |
|---|
| 513 | + return crypto_grab_spawn(&spawn->base, inst, name, type, mask); |
|---|
| 514 | +} |
|---|
| 515 | +EXPORT_SYMBOL_GPL(crypto_grab_shash); |
|---|
| 469 | 516 | |
|---|
| 470 | 517 | struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, |
|---|
| 471 | 518 | u32 mask) |
|---|
| .. | .. |
|---|
| 478 | 525 | { |
|---|
| 479 | 526 | struct crypto_alg *base = &alg->base; |
|---|
| 480 | 527 | |
|---|
| 481 | | - if (alg->digestsize > PAGE_SIZE / 8 || |
|---|
| 482 | | - alg->descsize > PAGE_SIZE / 8 || |
|---|
| 483 | | - alg->statesize > PAGE_SIZE / 8) |
|---|
| 528 | + if (alg->digestsize > HASH_MAX_DIGESTSIZE || |
|---|
| 529 | + alg->descsize > HASH_MAX_DESCSIZE || |
|---|
| 530 | + alg->statesize > HASH_MAX_STATESIZE) |
|---|
| 531 | + return -EINVAL; |
|---|
| 532 | + |
|---|
| 533 | + if ((alg->export && !alg->import) || (alg->import && !alg->export)) |
|---|
| 484 | 534 | return -EINVAL; |
|---|
| 485 | 535 | |
|---|
| 486 | 536 | base->cra_type = &crypto_shash_type; |
|---|
| .. | .. |
|---|
| 515 | 565 | } |
|---|
| 516 | 566 | EXPORT_SYMBOL_GPL(crypto_register_shash); |
|---|
| 517 | 567 | |
|---|
| 518 | | -int crypto_unregister_shash(struct shash_alg *alg) |
|---|
| 568 | +void crypto_unregister_shash(struct shash_alg *alg) |
|---|
| 519 | 569 | { |
|---|
| 520 | | - return crypto_unregister_alg(&alg->base); |
|---|
| 570 | + crypto_unregister_alg(&alg->base); |
|---|
| 521 | 571 | } |
|---|
| 522 | 572 | EXPORT_SYMBOL_GPL(crypto_unregister_shash); |
|---|
| 523 | 573 | |
|---|
| .. | .. |
|---|
| 541 | 591 | } |
|---|
| 542 | 592 | EXPORT_SYMBOL_GPL(crypto_register_shashes); |
|---|
| 543 | 593 | |
|---|
| 544 | | -int crypto_unregister_shashes(struct shash_alg *algs, int count) |
|---|
| 594 | +void crypto_unregister_shashes(struct shash_alg *algs, int count) |
|---|
| 545 | 595 | { |
|---|
| 546 | | - int i, ret; |
|---|
| 596 | + int i; |
|---|
| 547 | 597 | |
|---|
| 548 | | - for (i = count - 1; i >= 0; --i) { |
|---|
| 549 | | - ret = crypto_unregister_shash(&algs[i]); |
|---|
| 550 | | - if (ret) |
|---|
| 551 | | - pr_err("Failed to unregister %s %s: %d\n", |
|---|
| 552 | | - algs[i].base.cra_driver_name, |
|---|
| 553 | | - algs[i].base.cra_name, ret); |
|---|
| 554 | | - } |
|---|
| 555 | | - |
|---|
| 556 | | - return 0; |
|---|
| 598 | + for (i = count - 1; i >= 0; --i) |
|---|
| 599 | + crypto_unregister_shash(&algs[i]); |
|---|
| 557 | 600 | } |
|---|
| 558 | 601 | EXPORT_SYMBOL_GPL(crypto_unregister_shashes); |
|---|
| 559 | 602 | |
|---|
| .. | .. |
|---|
| 561 | 604 | struct shash_instance *inst) |
|---|
| 562 | 605 | { |
|---|
| 563 | 606 | int err; |
|---|
| 607 | + |
|---|
| 608 | + if (WARN_ON(!inst->free)) |
|---|
| 609 | + return -EINVAL; |
|---|
| 564 | 610 | |
|---|
| 565 | 611 | err = shash_prepare_alg(&inst->alg); |
|---|
| 566 | 612 | if (err) |
|---|
| .. | .. |
|---|
| 570 | 616 | } |
|---|
| 571 | 617 | EXPORT_SYMBOL_GPL(shash_register_instance); |
|---|
| 572 | 618 | |
|---|
| 573 | | -void shash_free_instance(struct crypto_instance *inst) |
|---|
| 619 | +void shash_free_singlespawn_instance(struct shash_instance *inst) |
|---|
| 574 | 620 | { |
|---|
| 575 | | - crypto_drop_spawn(crypto_instance_ctx(inst)); |
|---|
| 576 | | - kfree(shash_instance(inst)); |
|---|
| 621 | + crypto_drop_spawn(shash_instance_ctx(inst)); |
|---|
| 622 | + kfree(inst); |
|---|
| 577 | 623 | } |
|---|
| 578 | | -EXPORT_SYMBOL_GPL(shash_free_instance); |
|---|
| 579 | | - |
|---|
| 580 | | -int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn, |
|---|
| 581 | | - struct shash_alg *alg, |
|---|
| 582 | | - struct crypto_instance *inst) |
|---|
| 583 | | -{ |
|---|
| 584 | | - return crypto_init_spawn2(&spawn->base, &alg->base, inst, |
|---|
| 585 | | - &crypto_shash_type); |
|---|
| 586 | | -} |
|---|
| 587 | | -EXPORT_SYMBOL_GPL(crypto_init_shash_spawn); |
|---|
| 588 | | - |
|---|
| 589 | | -struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask) |
|---|
| 590 | | -{ |
|---|
| 591 | | - struct crypto_alg *alg; |
|---|
| 592 | | - |
|---|
| 593 | | - alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask); |
|---|
| 594 | | - return IS_ERR(alg) ? ERR_CAST(alg) : |
|---|
| 595 | | - container_of(alg, struct shash_alg, base); |
|---|
| 596 | | -} |
|---|
| 597 | | -EXPORT_SYMBOL_GPL(shash_attr_alg); |
|---|
| 624 | +EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance); |
|---|
| 598 | 625 | |
|---|
| 599 | 626 | MODULE_LICENSE("GPL"); |
|---|
| 600 | 627 | MODULE_DESCRIPTION("Synchronous cryptographic hash type"); |
|---|