.. | .. |
---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-only |
---|
1 | 2 | /* n2_core.c: Niagara2 Stream Processing Unit (SPU) crypto support. |
---|
2 | 3 | * |
---|
3 | 4 | * Copyright (C) 2010, 2011 David S. Miller <davem@davemloft.net> |
---|
.. | .. |
---|
16 | 17 | #include <crypto/md5.h> |
---|
17 | 18 | #include <crypto/sha.h> |
---|
18 | 19 | #include <crypto/aes.h> |
---|
19 | | -#include <crypto/des.h> |
---|
| 20 | +#include <crypto/internal/des.h> |
---|
20 | 21 | #include <linux/mutex.h> |
---|
21 | 22 | #include <linux/delay.h> |
---|
22 | 23 | #include <linux/sched.h> |
---|
23 | 24 | |
---|
24 | 25 | #include <crypto/internal/hash.h> |
---|
| 26 | +#include <crypto/internal/skcipher.h> |
---|
25 | 27 | #include <crypto/scatterwalk.h> |
---|
26 | 28 | #include <crypto/algapi.h> |
---|
27 | 29 | |
---|
.. | .. |
---|
247 | 249 | struct n2_ahash_alg { |
---|
248 | 250 | struct list_head entry; |
---|
249 | 251 | const u8 *hash_zero; |
---|
250 | | - const u32 *hash_init; |
---|
| 252 | + const u8 *hash_init; |
---|
251 | 253 | u8 hw_op_hashsz; |
---|
252 | 254 | u8 digest_size; |
---|
253 | 255 | u8 auth_type; |
---|
.. | .. |
---|
380 | 382 | fallback_tfm = crypto_alloc_ahash(fallback_driver_name, 0, |
---|
381 | 383 | CRYPTO_ALG_NEED_FALLBACK); |
---|
382 | 384 | if (IS_ERR(fallback_tfm)) { |
---|
383 | | - pr_warning("Fallback driver '%s' could not be loaded!\n", |
---|
384 | | - fallback_driver_name); |
---|
| 385 | + pr_warn("Fallback driver '%s' could not be loaded!\n", |
---|
| 386 | + fallback_driver_name); |
---|
385 | 387 | err = PTR_ERR(fallback_tfm); |
---|
386 | 388 | goto out; |
---|
387 | 389 | } |
---|
.. | .. |
---|
417 | 419 | fallback_tfm = crypto_alloc_ahash(fallback_driver_name, 0, |
---|
418 | 420 | CRYPTO_ALG_NEED_FALLBACK); |
---|
419 | 421 | if (IS_ERR(fallback_tfm)) { |
---|
420 | | - pr_warning("Fallback driver '%s' could not be loaded!\n", |
---|
421 | | - fallback_driver_name); |
---|
| 422 | + pr_warn("Fallback driver '%s' could not be loaded!\n", |
---|
| 423 | + fallback_driver_name); |
---|
422 | 424 | err = PTR_ERR(fallback_tfm); |
---|
423 | 425 | goto out; |
---|
424 | 426 | } |
---|
425 | 427 | |
---|
426 | 428 | child_shash = crypto_alloc_shash(n2alg->child_alg, 0, 0); |
---|
427 | 429 | if (IS_ERR(child_shash)) { |
---|
428 | | - pr_warning("Child shash '%s' could not be loaded!\n", |
---|
429 | | - n2alg->child_alg); |
---|
| 430 | + pr_warn("Child shash '%s' could not be loaded!\n", |
---|
| 431 | + n2alg->child_alg); |
---|
430 | 432 | err = PTR_ERR(child_shash); |
---|
431 | 433 | goto out_free_fallback; |
---|
432 | 434 | } |
---|
.. | .. |
---|
460 | 462 | struct n2_hmac_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
461 | 463 | struct crypto_shash *child_shash = ctx->child_shash; |
---|
462 | 464 | struct crypto_ahash *fallback_tfm; |
---|
463 | | - SHASH_DESC_ON_STACK(shash, child_shash); |
---|
464 | 465 | int err, bs, ds; |
---|
465 | 466 | |
---|
466 | 467 | fallback_tfm = ctx->base.fallback_tfm; |
---|
.. | .. |
---|
468 | 469 | if (err) |
---|
469 | 470 | return err; |
---|
470 | 471 | |
---|
471 | | - shash->tfm = child_shash; |
---|
472 | | - shash->flags = crypto_ahash_get_flags(tfm) & |
---|
473 | | - CRYPTO_TFM_REQ_MAY_SLEEP; |
---|
474 | | - |
---|
475 | 472 | bs = crypto_shash_blocksize(child_shash); |
---|
476 | 473 | ds = crypto_shash_digestsize(child_shash); |
---|
477 | 474 | BUG_ON(ds > N2_HASH_KEY_MAX); |
---|
478 | 475 | if (keylen > bs) { |
---|
479 | | - err = crypto_shash_digest(shash, key, keylen, |
---|
480 | | - ctx->hash_key); |
---|
| 476 | + err = crypto_shash_tfm_digest(child_shash, key, keylen, |
---|
| 477 | + ctx->hash_key); |
---|
481 | 478 | if (err) |
---|
482 | 479 | return err; |
---|
483 | 480 | keylen = ds; |
---|
.. | .. |
---|
658 | 655 | ctx->hash_key_len); |
---|
659 | 656 | } |
---|
660 | 657 | |
---|
661 | | -struct n2_cipher_context { |
---|
| 658 | +struct n2_skcipher_context { |
---|
662 | 659 | int key_len; |
---|
663 | 660 | int enc_type; |
---|
664 | 661 | union { |
---|
665 | 662 | u8 aes[AES_MAX_KEY_SIZE]; |
---|
666 | 663 | u8 des[DES_KEY_SIZE]; |
---|
667 | 664 | u8 des3[3 * DES_KEY_SIZE]; |
---|
668 | | - u8 arc4[258]; /* S-box, X, Y */ |
---|
669 | 665 | } key; |
---|
670 | 666 | }; |
---|
671 | 667 | |
---|
.. | .. |
---|
684 | 680 | }; |
---|
685 | 681 | |
---|
686 | 682 | struct n2_request_context { |
---|
687 | | - struct ablkcipher_walk walk; |
---|
| 683 | + struct skcipher_walk walk; |
---|
688 | 684 | struct list_head chunk_list; |
---|
689 | 685 | struct n2_crypto_chunk chunk; |
---|
690 | 686 | u8 temp_iv[16]; |
---|
.. | .. |
---|
709 | 705 | * is not a valid sequence. |
---|
710 | 706 | */ |
---|
711 | 707 | |
---|
712 | | -struct n2_cipher_alg { |
---|
| 708 | +struct n2_skcipher_alg { |
---|
713 | 709 | struct list_head entry; |
---|
714 | 710 | u8 enc_type; |
---|
715 | | - struct crypto_alg alg; |
---|
| 711 | + struct skcipher_alg skcipher; |
---|
716 | 712 | }; |
---|
717 | 713 | |
---|
718 | | -static inline struct n2_cipher_alg *n2_cipher_alg(struct crypto_tfm *tfm) |
---|
| 714 | +static inline struct n2_skcipher_alg *n2_skcipher_alg(struct crypto_skcipher *tfm) |
---|
719 | 715 | { |
---|
720 | | - struct crypto_alg *alg = tfm->__crt_alg; |
---|
| 716 | + struct skcipher_alg *alg = crypto_skcipher_alg(tfm); |
---|
721 | 717 | |
---|
722 | | - return container_of(alg, struct n2_cipher_alg, alg); |
---|
| 718 | + return container_of(alg, struct n2_skcipher_alg, skcipher); |
---|
723 | 719 | } |
---|
724 | 720 | |
---|
725 | | -struct n2_cipher_request_context { |
---|
726 | | - struct ablkcipher_walk walk; |
---|
| 721 | +struct n2_skcipher_request_context { |
---|
| 722 | + struct skcipher_walk walk; |
---|
727 | 723 | }; |
---|
728 | 724 | |
---|
729 | | -static int n2_aes_setkey(struct crypto_ablkcipher *cipher, const u8 *key, |
---|
| 725 | +static int n2_aes_setkey(struct crypto_skcipher *skcipher, const u8 *key, |
---|
730 | 726 | unsigned int keylen) |
---|
731 | 727 | { |
---|
732 | | - struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher); |
---|
733 | | - struct n2_cipher_context *ctx = crypto_tfm_ctx(tfm); |
---|
734 | | - struct n2_cipher_alg *n2alg = n2_cipher_alg(tfm); |
---|
| 728 | + struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); |
---|
| 729 | + struct n2_skcipher_context *ctx = crypto_tfm_ctx(tfm); |
---|
| 730 | + struct n2_skcipher_alg *n2alg = n2_skcipher_alg(skcipher); |
---|
735 | 731 | |
---|
736 | 732 | ctx->enc_type = (n2alg->enc_type & ENC_TYPE_CHAINING_MASK); |
---|
737 | 733 | |
---|
.. | .. |
---|
746 | 742 | ctx->enc_type |= ENC_TYPE_ALG_AES256; |
---|
747 | 743 | break; |
---|
748 | 744 | default: |
---|
749 | | - crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); |
---|
750 | 745 | return -EINVAL; |
---|
751 | 746 | } |
---|
752 | 747 | |
---|
.. | .. |
---|
755 | 750 | return 0; |
---|
756 | 751 | } |
---|
757 | 752 | |
---|
758 | | -static int n2_des_setkey(struct crypto_ablkcipher *cipher, const u8 *key, |
---|
| 753 | +static int n2_des_setkey(struct crypto_skcipher *skcipher, const u8 *key, |
---|
759 | 754 | unsigned int keylen) |
---|
760 | 755 | { |
---|
761 | | - struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher); |
---|
762 | | - struct n2_cipher_context *ctx = crypto_tfm_ctx(tfm); |
---|
763 | | - struct n2_cipher_alg *n2alg = n2_cipher_alg(tfm); |
---|
764 | | - u32 tmp[DES_EXPKEY_WORDS]; |
---|
| 756 | + struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); |
---|
| 757 | + struct n2_skcipher_context *ctx = crypto_tfm_ctx(tfm); |
---|
| 758 | + struct n2_skcipher_alg *n2alg = n2_skcipher_alg(skcipher); |
---|
765 | 759 | int err; |
---|
766 | 760 | |
---|
| 761 | + err = verify_skcipher_des_key(skcipher, key); |
---|
| 762 | + if (err) |
---|
| 763 | + return err; |
---|
| 764 | + |
---|
767 | 765 | ctx->enc_type = n2alg->enc_type; |
---|
768 | | - |
---|
769 | | - if (keylen != DES_KEY_SIZE) { |
---|
770 | | - crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); |
---|
771 | | - return -EINVAL; |
---|
772 | | - } |
---|
773 | | - |
---|
774 | | - err = des_ekey(tmp, key); |
---|
775 | | - if (err == 0 && (tfm->crt_flags & CRYPTO_TFM_REQ_WEAK_KEY)) { |
---|
776 | | - tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY; |
---|
777 | | - return -EINVAL; |
---|
778 | | - } |
---|
779 | 766 | |
---|
780 | 767 | ctx->key_len = keylen; |
---|
781 | 768 | memcpy(ctx->key.des, key, keylen); |
---|
782 | 769 | return 0; |
---|
783 | 770 | } |
---|
784 | 771 | |
---|
785 | | -static int n2_3des_setkey(struct crypto_ablkcipher *cipher, const u8 *key, |
---|
| 772 | +static int n2_3des_setkey(struct crypto_skcipher *skcipher, const u8 *key, |
---|
786 | 773 | unsigned int keylen) |
---|
787 | 774 | { |
---|
788 | | - struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher); |
---|
789 | | - struct n2_cipher_context *ctx = crypto_tfm_ctx(tfm); |
---|
790 | | - struct n2_cipher_alg *n2alg = n2_cipher_alg(tfm); |
---|
| 775 | + struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); |
---|
| 776 | + struct n2_skcipher_context *ctx = crypto_tfm_ctx(tfm); |
---|
| 777 | + struct n2_skcipher_alg *n2alg = n2_skcipher_alg(skcipher); |
---|
| 778 | + int err; |
---|
| 779 | + |
---|
| 780 | + err = verify_skcipher_des3_key(skcipher, key); |
---|
| 781 | + if (err) |
---|
| 782 | + return err; |
---|
791 | 783 | |
---|
792 | 784 | ctx->enc_type = n2alg->enc_type; |
---|
793 | 785 | |
---|
794 | | - if (keylen != (3 * DES_KEY_SIZE)) { |
---|
795 | | - crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); |
---|
796 | | - return -EINVAL; |
---|
797 | | - } |
---|
798 | 786 | ctx->key_len = keylen; |
---|
799 | 787 | memcpy(ctx->key.des3, key, keylen); |
---|
800 | 788 | return 0; |
---|
801 | 789 | } |
---|
802 | 790 | |
---|
803 | | -static int n2_arc4_setkey(struct crypto_ablkcipher *cipher, const u8 *key, |
---|
804 | | - unsigned int keylen) |
---|
805 | | -{ |
---|
806 | | - struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher); |
---|
807 | | - struct n2_cipher_context *ctx = crypto_tfm_ctx(tfm); |
---|
808 | | - struct n2_cipher_alg *n2alg = n2_cipher_alg(tfm); |
---|
809 | | - u8 *s = ctx->key.arc4; |
---|
810 | | - u8 *x = s + 256; |
---|
811 | | - u8 *y = x + 1; |
---|
812 | | - int i, j, k; |
---|
813 | | - |
---|
814 | | - ctx->enc_type = n2alg->enc_type; |
---|
815 | | - |
---|
816 | | - j = k = 0; |
---|
817 | | - *x = 0; |
---|
818 | | - *y = 0; |
---|
819 | | - for (i = 0; i < 256; i++) |
---|
820 | | - s[i] = i; |
---|
821 | | - for (i = 0; i < 256; i++) { |
---|
822 | | - u8 a = s[i]; |
---|
823 | | - j = (j + key[k] + a) & 0xff; |
---|
824 | | - s[i] = s[j]; |
---|
825 | | - s[j] = a; |
---|
826 | | - if (++k >= keylen) |
---|
827 | | - k = 0; |
---|
828 | | - } |
---|
829 | | - |
---|
830 | | - return 0; |
---|
831 | | -} |
---|
832 | | - |
---|
833 | | -static inline int cipher_descriptor_len(int nbytes, unsigned int block_size) |
---|
| 791 | +static inline int skcipher_descriptor_len(int nbytes, unsigned int block_size) |
---|
834 | 792 | { |
---|
835 | 793 | int this_len = nbytes; |
---|
836 | 794 | |
---|
.. | .. |
---|
838 | 796 | return this_len > (1 << 16) ? (1 << 16) : this_len; |
---|
839 | 797 | } |
---|
840 | 798 | |
---|
841 | | -static int __n2_crypt_chunk(struct crypto_tfm *tfm, struct n2_crypto_chunk *cp, |
---|
| 799 | +static int __n2_crypt_chunk(struct crypto_skcipher *skcipher, |
---|
| 800 | + struct n2_crypto_chunk *cp, |
---|
842 | 801 | struct spu_queue *qp, bool encrypt) |
---|
843 | 802 | { |
---|
844 | | - struct n2_cipher_context *ctx = crypto_tfm_ctx(tfm); |
---|
| 803 | + struct n2_skcipher_context *ctx = crypto_skcipher_ctx(skcipher); |
---|
845 | 804 | struct cwq_initial_entry *ent; |
---|
846 | 805 | bool in_place; |
---|
847 | 806 | int i; |
---|
.. | .. |
---|
885 | 844 | return (spu_queue_submit(qp, ent) != HV_EOK) ? -EINVAL : 0; |
---|
886 | 845 | } |
---|
887 | 846 | |
---|
888 | | -static int n2_compute_chunks(struct ablkcipher_request *req) |
---|
| 847 | +static int n2_compute_chunks(struct skcipher_request *req) |
---|
889 | 848 | { |
---|
890 | | - struct n2_request_context *rctx = ablkcipher_request_ctx(req); |
---|
891 | | - struct ablkcipher_walk *walk = &rctx->walk; |
---|
| 849 | + struct n2_request_context *rctx = skcipher_request_ctx(req); |
---|
| 850 | + struct skcipher_walk *walk = &rctx->walk; |
---|
892 | 851 | struct n2_crypto_chunk *chunk; |
---|
893 | 852 | unsigned long dest_prev; |
---|
894 | 853 | unsigned int tot_len; |
---|
895 | 854 | bool prev_in_place; |
---|
896 | 855 | int err, nbytes; |
---|
897 | 856 | |
---|
898 | | - ablkcipher_walk_init(walk, req->dst, req->src, req->nbytes); |
---|
899 | | - err = ablkcipher_walk_phys(req, walk); |
---|
| 857 | + err = skcipher_walk_async(walk, req); |
---|
900 | 858 | if (err) |
---|
901 | 859 | return err; |
---|
902 | 860 | |
---|
.. | .. |
---|
918 | 876 | bool in_place; |
---|
919 | 877 | int this_len; |
---|
920 | 878 | |
---|
921 | | - src_paddr = (page_to_phys(walk->src.page) + |
---|
922 | | - walk->src.offset); |
---|
923 | | - dest_paddr = (page_to_phys(walk->dst.page) + |
---|
924 | | - walk->dst.offset); |
---|
| 879 | + src_paddr = (page_to_phys(walk->src.phys.page) + |
---|
| 880 | + walk->src.phys.offset); |
---|
| 881 | + dest_paddr = (page_to_phys(walk->dst.phys.page) + |
---|
| 882 | + walk->dst.phys.offset); |
---|
925 | 883 | in_place = (src_paddr == dest_paddr); |
---|
926 | | - this_len = cipher_descriptor_len(nbytes, walk->blocksize); |
---|
| 884 | + this_len = skcipher_descriptor_len(nbytes, walk->blocksize); |
---|
927 | 885 | |
---|
928 | 886 | if (chunk->arr_len != 0) { |
---|
929 | 887 | if (in_place != prev_in_place || |
---|
.. | .. |
---|
954 | 912 | prev_in_place = in_place; |
---|
955 | 913 | tot_len += this_len; |
---|
956 | 914 | |
---|
957 | | - err = ablkcipher_walk_done(req, walk, nbytes - this_len); |
---|
| 915 | + err = skcipher_walk_done(walk, nbytes - this_len); |
---|
958 | 916 | if (err) |
---|
959 | 917 | break; |
---|
960 | 918 | } |
---|
.. | .. |
---|
966 | 924 | return err; |
---|
967 | 925 | } |
---|
968 | 926 | |
---|
969 | | -static void n2_chunk_complete(struct ablkcipher_request *req, void *final_iv) |
---|
| 927 | +static void n2_chunk_complete(struct skcipher_request *req, void *final_iv) |
---|
970 | 928 | { |
---|
971 | | - struct n2_request_context *rctx = ablkcipher_request_ctx(req); |
---|
| 929 | + struct n2_request_context *rctx = skcipher_request_ctx(req); |
---|
972 | 930 | struct n2_crypto_chunk *c, *tmp; |
---|
973 | 931 | |
---|
974 | 932 | if (final_iv) |
---|
975 | 933 | memcpy(rctx->walk.iv, final_iv, rctx->walk.blocksize); |
---|
976 | 934 | |
---|
977 | | - ablkcipher_walk_complete(&rctx->walk); |
---|
978 | 935 | list_for_each_entry_safe(c, tmp, &rctx->chunk_list, entry) { |
---|
979 | 936 | list_del(&c->entry); |
---|
980 | 937 | if (unlikely(c != &rctx->chunk)) |
---|
.. | .. |
---|
983 | 940 | |
---|
984 | 941 | } |
---|
985 | 942 | |
---|
986 | | -static int n2_do_ecb(struct ablkcipher_request *req, bool encrypt) |
---|
| 943 | +static int n2_do_ecb(struct skcipher_request *req, bool encrypt) |
---|
987 | 944 | { |
---|
988 | | - struct n2_request_context *rctx = ablkcipher_request_ctx(req); |
---|
989 | | - struct crypto_tfm *tfm = req->base.tfm; |
---|
| 945 | + struct n2_request_context *rctx = skcipher_request_ctx(req); |
---|
| 946 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
990 | 947 | int err = n2_compute_chunks(req); |
---|
991 | 948 | struct n2_crypto_chunk *c, *tmp; |
---|
992 | 949 | unsigned long flags, hv_ret; |
---|
.. | .. |
---|
1025 | 982 | return err; |
---|
1026 | 983 | } |
---|
1027 | 984 | |
---|
1028 | | -static int n2_encrypt_ecb(struct ablkcipher_request *req) |
---|
| 985 | +static int n2_encrypt_ecb(struct skcipher_request *req) |
---|
1029 | 986 | { |
---|
1030 | 987 | return n2_do_ecb(req, true); |
---|
1031 | 988 | } |
---|
1032 | 989 | |
---|
1033 | | -static int n2_decrypt_ecb(struct ablkcipher_request *req) |
---|
| 990 | +static int n2_decrypt_ecb(struct skcipher_request *req) |
---|
1034 | 991 | { |
---|
1035 | 992 | return n2_do_ecb(req, false); |
---|
1036 | 993 | } |
---|
1037 | 994 | |
---|
1038 | | -static int n2_do_chaining(struct ablkcipher_request *req, bool encrypt) |
---|
| 995 | +static int n2_do_chaining(struct skcipher_request *req, bool encrypt) |
---|
1039 | 996 | { |
---|
1040 | | - struct n2_request_context *rctx = ablkcipher_request_ctx(req); |
---|
1041 | | - struct crypto_tfm *tfm = req->base.tfm; |
---|
| 997 | + struct n2_request_context *rctx = skcipher_request_ctx(req); |
---|
| 998 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
1042 | 999 | unsigned long flags, hv_ret, iv_paddr; |
---|
1043 | 1000 | int err = n2_compute_chunks(req); |
---|
1044 | 1001 | struct n2_crypto_chunk *c, *tmp; |
---|
.. | .. |
---|
1115 | 1072 | return err; |
---|
1116 | 1073 | } |
---|
1117 | 1074 | |
---|
1118 | | -static int n2_encrypt_chaining(struct ablkcipher_request *req) |
---|
| 1075 | +static int n2_encrypt_chaining(struct skcipher_request *req) |
---|
1119 | 1076 | { |
---|
1120 | 1077 | return n2_do_chaining(req, true); |
---|
1121 | 1078 | } |
---|
1122 | 1079 | |
---|
1123 | | -static int n2_decrypt_chaining(struct ablkcipher_request *req) |
---|
| 1080 | +static int n2_decrypt_chaining(struct skcipher_request *req) |
---|
1124 | 1081 | { |
---|
1125 | 1082 | return n2_do_chaining(req, false); |
---|
1126 | 1083 | } |
---|
1127 | 1084 | |
---|
1128 | | -struct n2_cipher_tmpl { |
---|
| 1085 | +struct n2_skcipher_tmpl { |
---|
1129 | 1086 | const char *name; |
---|
1130 | 1087 | const char *drv_name; |
---|
1131 | 1088 | u8 block_size; |
---|
1132 | 1089 | u8 enc_type; |
---|
1133 | | - struct ablkcipher_alg ablkcipher; |
---|
| 1090 | + struct skcipher_alg skcipher; |
---|
1134 | 1091 | }; |
---|
1135 | 1092 | |
---|
1136 | | -static const struct n2_cipher_tmpl cipher_tmpls[] = { |
---|
1137 | | - /* ARC4: only ECB is supported (chaining bits ignored) */ |
---|
1138 | | - { .name = "ecb(arc4)", |
---|
1139 | | - .drv_name = "ecb-arc4", |
---|
1140 | | - .block_size = 1, |
---|
1141 | | - .enc_type = (ENC_TYPE_ALG_RC4_STREAM | |
---|
1142 | | - ENC_TYPE_CHAINING_ECB), |
---|
1143 | | - .ablkcipher = { |
---|
1144 | | - .min_keysize = 1, |
---|
1145 | | - .max_keysize = 256, |
---|
1146 | | - .setkey = n2_arc4_setkey, |
---|
1147 | | - .encrypt = n2_encrypt_ecb, |
---|
1148 | | - .decrypt = n2_decrypt_ecb, |
---|
1149 | | - }, |
---|
1150 | | - }, |
---|
1151 | | - |
---|
| 1093 | +static const struct n2_skcipher_tmpl skcipher_tmpls[] = { |
---|
1152 | 1094 | /* DES: ECB CBC and CFB are supported */ |
---|
1153 | 1095 | { .name = "ecb(des)", |
---|
1154 | 1096 | .drv_name = "ecb-des", |
---|
1155 | 1097 | .block_size = DES_BLOCK_SIZE, |
---|
1156 | 1098 | .enc_type = (ENC_TYPE_ALG_DES | |
---|
1157 | 1099 | ENC_TYPE_CHAINING_ECB), |
---|
1158 | | - .ablkcipher = { |
---|
| 1100 | + .skcipher = { |
---|
1159 | 1101 | .min_keysize = DES_KEY_SIZE, |
---|
1160 | 1102 | .max_keysize = DES_KEY_SIZE, |
---|
1161 | 1103 | .setkey = n2_des_setkey, |
---|
.. | .. |
---|
1168 | 1110 | .block_size = DES_BLOCK_SIZE, |
---|
1169 | 1111 | .enc_type = (ENC_TYPE_ALG_DES | |
---|
1170 | 1112 | ENC_TYPE_CHAINING_CBC), |
---|
1171 | | - .ablkcipher = { |
---|
| 1113 | + .skcipher = { |
---|
1172 | 1114 | .ivsize = DES_BLOCK_SIZE, |
---|
1173 | 1115 | .min_keysize = DES_KEY_SIZE, |
---|
1174 | 1116 | .max_keysize = DES_KEY_SIZE, |
---|
.. | .. |
---|
1182 | 1124 | .block_size = DES_BLOCK_SIZE, |
---|
1183 | 1125 | .enc_type = (ENC_TYPE_ALG_DES | |
---|
1184 | 1126 | ENC_TYPE_CHAINING_CFB), |
---|
1185 | | - .ablkcipher = { |
---|
| 1127 | + .skcipher = { |
---|
1186 | 1128 | .min_keysize = DES_KEY_SIZE, |
---|
1187 | 1129 | .max_keysize = DES_KEY_SIZE, |
---|
1188 | 1130 | .setkey = n2_des_setkey, |
---|
.. | .. |
---|
1197 | 1139 | .block_size = DES_BLOCK_SIZE, |
---|
1198 | 1140 | .enc_type = (ENC_TYPE_ALG_3DES | |
---|
1199 | 1141 | ENC_TYPE_CHAINING_ECB), |
---|
1200 | | - .ablkcipher = { |
---|
| 1142 | + .skcipher = { |
---|
1201 | 1143 | .min_keysize = 3 * DES_KEY_SIZE, |
---|
1202 | 1144 | .max_keysize = 3 * DES_KEY_SIZE, |
---|
1203 | 1145 | .setkey = n2_3des_setkey, |
---|
.. | .. |
---|
1210 | 1152 | .block_size = DES_BLOCK_SIZE, |
---|
1211 | 1153 | .enc_type = (ENC_TYPE_ALG_3DES | |
---|
1212 | 1154 | ENC_TYPE_CHAINING_CBC), |
---|
1213 | | - .ablkcipher = { |
---|
| 1155 | + .skcipher = { |
---|
1214 | 1156 | .ivsize = DES_BLOCK_SIZE, |
---|
1215 | 1157 | .min_keysize = 3 * DES_KEY_SIZE, |
---|
1216 | 1158 | .max_keysize = 3 * DES_KEY_SIZE, |
---|
.. | .. |
---|
1224 | 1166 | .block_size = DES_BLOCK_SIZE, |
---|
1225 | 1167 | .enc_type = (ENC_TYPE_ALG_3DES | |
---|
1226 | 1168 | ENC_TYPE_CHAINING_CFB), |
---|
1227 | | - .ablkcipher = { |
---|
| 1169 | + .skcipher = { |
---|
1228 | 1170 | .min_keysize = 3 * DES_KEY_SIZE, |
---|
1229 | 1171 | .max_keysize = 3 * DES_KEY_SIZE, |
---|
1230 | 1172 | .setkey = n2_3des_setkey, |
---|
.. | .. |
---|
1238 | 1180 | .block_size = AES_BLOCK_SIZE, |
---|
1239 | 1181 | .enc_type = (ENC_TYPE_ALG_AES128 | |
---|
1240 | 1182 | ENC_TYPE_CHAINING_ECB), |
---|
1241 | | - .ablkcipher = { |
---|
| 1183 | + .skcipher = { |
---|
1242 | 1184 | .min_keysize = AES_MIN_KEY_SIZE, |
---|
1243 | 1185 | .max_keysize = AES_MAX_KEY_SIZE, |
---|
1244 | 1186 | .setkey = n2_aes_setkey, |
---|
.. | .. |
---|
1251 | 1193 | .block_size = AES_BLOCK_SIZE, |
---|
1252 | 1194 | .enc_type = (ENC_TYPE_ALG_AES128 | |
---|
1253 | 1195 | ENC_TYPE_CHAINING_CBC), |
---|
1254 | | - .ablkcipher = { |
---|
| 1196 | + .skcipher = { |
---|
1255 | 1197 | .ivsize = AES_BLOCK_SIZE, |
---|
1256 | 1198 | .min_keysize = AES_MIN_KEY_SIZE, |
---|
1257 | 1199 | .max_keysize = AES_MAX_KEY_SIZE, |
---|
.. | .. |
---|
1265 | 1207 | .block_size = AES_BLOCK_SIZE, |
---|
1266 | 1208 | .enc_type = (ENC_TYPE_ALG_AES128 | |
---|
1267 | 1209 | ENC_TYPE_CHAINING_COUNTER), |
---|
1268 | | - .ablkcipher = { |
---|
| 1210 | + .skcipher = { |
---|
1269 | 1211 | .ivsize = AES_BLOCK_SIZE, |
---|
1270 | 1212 | .min_keysize = AES_MIN_KEY_SIZE, |
---|
1271 | 1213 | .max_keysize = AES_MAX_KEY_SIZE, |
---|
.. | .. |
---|
1276 | 1218 | }, |
---|
1277 | 1219 | |
---|
1278 | 1220 | }; |
---|
1279 | | -#define NUM_CIPHER_TMPLS ARRAY_SIZE(cipher_tmpls) |
---|
| 1221 | +#define NUM_CIPHER_TMPLS ARRAY_SIZE(skcipher_tmpls) |
---|
1280 | 1222 | |
---|
1281 | | -static LIST_HEAD(cipher_algs); |
---|
| 1223 | +static LIST_HEAD(skcipher_algs); |
---|
1282 | 1224 | |
---|
1283 | 1225 | struct n2_hash_tmpl { |
---|
1284 | 1226 | const char *name; |
---|
1285 | 1227 | const u8 *hash_zero; |
---|
1286 | | - const u32 *hash_init; |
---|
| 1228 | + const u8 *hash_init; |
---|
1287 | 1229 | u8 hw_op_hashsz; |
---|
1288 | 1230 | u8 digest_size; |
---|
| 1231 | + u8 statesize; |
---|
1289 | 1232 | u8 block_size; |
---|
1290 | 1233 | u8 auth_type; |
---|
1291 | 1234 | u8 hmac_type; |
---|
1292 | 1235 | }; |
---|
1293 | 1236 | |
---|
1294 | | -static const u32 md5_init[MD5_HASH_WORDS] = { |
---|
| 1237 | +static const __le32 n2_md5_init[MD5_HASH_WORDS] = { |
---|
1295 | 1238 | cpu_to_le32(MD5_H0), |
---|
1296 | 1239 | cpu_to_le32(MD5_H1), |
---|
1297 | 1240 | cpu_to_le32(MD5_H2), |
---|
1298 | 1241 | cpu_to_le32(MD5_H3), |
---|
1299 | 1242 | }; |
---|
1300 | | -static const u32 sha1_init[SHA1_DIGEST_SIZE / 4] = { |
---|
| 1243 | +static const u32 n2_sha1_init[SHA1_DIGEST_SIZE / 4] = { |
---|
1301 | 1244 | SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4, |
---|
1302 | 1245 | }; |
---|
1303 | | -static const u32 sha256_init[SHA256_DIGEST_SIZE / 4] = { |
---|
| 1246 | +static const u32 n2_sha256_init[SHA256_DIGEST_SIZE / 4] = { |
---|
1304 | 1247 | SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3, |
---|
1305 | 1248 | SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7, |
---|
1306 | 1249 | }; |
---|
1307 | | -static const u32 sha224_init[SHA256_DIGEST_SIZE / 4] = { |
---|
| 1250 | +static const u32 n2_sha224_init[SHA256_DIGEST_SIZE / 4] = { |
---|
1308 | 1251 | SHA224_H0, SHA224_H1, SHA224_H2, SHA224_H3, |
---|
1309 | 1252 | SHA224_H4, SHA224_H5, SHA224_H6, SHA224_H7, |
---|
1310 | 1253 | }; |
---|
.. | .. |
---|
1312 | 1255 | static const struct n2_hash_tmpl hash_tmpls[] = { |
---|
1313 | 1256 | { .name = "md5", |
---|
1314 | 1257 | .hash_zero = md5_zero_message_hash, |
---|
1315 | | - .hash_init = md5_init, |
---|
| 1258 | + .hash_init = (u8 *)n2_md5_init, |
---|
1316 | 1259 | .auth_type = AUTH_TYPE_MD5, |
---|
1317 | 1260 | .hmac_type = AUTH_TYPE_HMAC_MD5, |
---|
1318 | 1261 | .hw_op_hashsz = MD5_DIGEST_SIZE, |
---|
1319 | 1262 | .digest_size = MD5_DIGEST_SIZE, |
---|
| 1263 | + .statesize = sizeof(struct md5_state), |
---|
1320 | 1264 | .block_size = MD5_HMAC_BLOCK_SIZE }, |
---|
1321 | 1265 | { .name = "sha1", |
---|
1322 | 1266 | .hash_zero = sha1_zero_message_hash, |
---|
1323 | | - .hash_init = sha1_init, |
---|
| 1267 | + .hash_init = (u8 *)n2_sha1_init, |
---|
1324 | 1268 | .auth_type = AUTH_TYPE_SHA1, |
---|
1325 | 1269 | .hmac_type = AUTH_TYPE_HMAC_SHA1, |
---|
1326 | 1270 | .hw_op_hashsz = SHA1_DIGEST_SIZE, |
---|
1327 | 1271 | .digest_size = SHA1_DIGEST_SIZE, |
---|
| 1272 | + .statesize = sizeof(struct sha1_state), |
---|
1328 | 1273 | .block_size = SHA1_BLOCK_SIZE }, |
---|
1329 | 1274 | { .name = "sha256", |
---|
1330 | 1275 | .hash_zero = sha256_zero_message_hash, |
---|
1331 | | - .hash_init = sha256_init, |
---|
| 1276 | + .hash_init = (u8 *)n2_sha256_init, |
---|
1332 | 1277 | .auth_type = AUTH_TYPE_SHA256, |
---|
1333 | 1278 | .hmac_type = AUTH_TYPE_HMAC_SHA256, |
---|
1334 | 1279 | .hw_op_hashsz = SHA256_DIGEST_SIZE, |
---|
1335 | 1280 | .digest_size = SHA256_DIGEST_SIZE, |
---|
| 1281 | + .statesize = sizeof(struct sha256_state), |
---|
1336 | 1282 | .block_size = SHA256_BLOCK_SIZE }, |
---|
1337 | 1283 | { .name = "sha224", |
---|
1338 | 1284 | .hash_zero = sha224_zero_message_hash, |
---|
1339 | | - .hash_init = sha224_init, |
---|
| 1285 | + .hash_init = (u8 *)n2_sha224_init, |
---|
1340 | 1286 | .auth_type = AUTH_TYPE_SHA256, |
---|
1341 | 1287 | .hmac_type = AUTH_TYPE_RESERVED, |
---|
1342 | 1288 | .hw_op_hashsz = SHA256_DIGEST_SIZE, |
---|
1343 | 1289 | .digest_size = SHA224_DIGEST_SIZE, |
---|
| 1290 | + .statesize = sizeof(struct sha256_state), |
---|
1344 | 1291 | .block_size = SHA224_BLOCK_SIZE }, |
---|
1345 | 1292 | }; |
---|
1346 | 1293 | #define NUM_HASH_TMPLS ARRAY_SIZE(hash_tmpls) |
---|
.. | .. |
---|
1352 | 1299 | |
---|
1353 | 1300 | static void __n2_unregister_algs(void) |
---|
1354 | 1301 | { |
---|
1355 | | - struct n2_cipher_alg *cipher, *cipher_tmp; |
---|
| 1302 | + struct n2_skcipher_alg *skcipher, *skcipher_tmp; |
---|
1356 | 1303 | struct n2_ahash_alg *alg, *alg_tmp; |
---|
1357 | 1304 | struct n2_hmac_alg *hmac, *hmac_tmp; |
---|
1358 | 1305 | |
---|
1359 | | - list_for_each_entry_safe(cipher, cipher_tmp, &cipher_algs, entry) { |
---|
1360 | | - crypto_unregister_alg(&cipher->alg); |
---|
1361 | | - list_del(&cipher->entry); |
---|
1362 | | - kfree(cipher); |
---|
| 1306 | + list_for_each_entry_safe(skcipher, skcipher_tmp, &skcipher_algs, entry) { |
---|
| 1307 | + crypto_unregister_skcipher(&skcipher->skcipher); |
---|
| 1308 | + list_del(&skcipher->entry); |
---|
| 1309 | + kfree(skcipher); |
---|
1363 | 1310 | } |
---|
1364 | 1311 | list_for_each_entry_safe(hmac, hmac_tmp, &hmac_algs, derived.entry) { |
---|
1365 | 1312 | crypto_unregister_ahash(&hmac->derived.alg); |
---|
.. | .. |
---|
1373 | 1320 | } |
---|
1374 | 1321 | } |
---|
1375 | 1322 | |
---|
1376 | | -static int n2_cipher_cra_init(struct crypto_tfm *tfm) |
---|
| 1323 | +static int n2_skcipher_init_tfm(struct crypto_skcipher *tfm) |
---|
1377 | 1324 | { |
---|
1378 | | - tfm->crt_ablkcipher.reqsize = sizeof(struct n2_request_context); |
---|
| 1325 | + crypto_skcipher_set_reqsize(tfm, sizeof(struct n2_request_context)); |
---|
1379 | 1326 | return 0; |
---|
1380 | 1327 | } |
---|
1381 | 1328 | |
---|
1382 | | -static int __n2_register_one_cipher(const struct n2_cipher_tmpl *tmpl) |
---|
| 1329 | +static int __n2_register_one_skcipher(const struct n2_skcipher_tmpl *tmpl) |
---|
1383 | 1330 | { |
---|
1384 | | - struct n2_cipher_alg *p = kzalloc(sizeof(*p), GFP_KERNEL); |
---|
1385 | | - struct crypto_alg *alg; |
---|
| 1331 | + struct n2_skcipher_alg *p = kzalloc(sizeof(*p), GFP_KERNEL); |
---|
| 1332 | + struct skcipher_alg *alg; |
---|
1386 | 1333 | int err; |
---|
1387 | 1334 | |
---|
1388 | 1335 | if (!p) |
---|
1389 | 1336 | return -ENOMEM; |
---|
1390 | 1337 | |
---|
1391 | | - alg = &p->alg; |
---|
| 1338 | + alg = &p->skcipher; |
---|
| 1339 | + *alg = tmpl->skcipher; |
---|
1392 | 1340 | |
---|
1393 | | - snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name); |
---|
1394 | | - snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s-n2", tmpl->drv_name); |
---|
1395 | | - alg->cra_priority = N2_CRA_PRIORITY; |
---|
1396 | | - alg->cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | |
---|
1397 | | - CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC; |
---|
1398 | | - alg->cra_blocksize = tmpl->block_size; |
---|
| 1341 | + snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name); |
---|
| 1342 | + snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s-n2", tmpl->drv_name); |
---|
| 1343 | + alg->base.cra_priority = N2_CRA_PRIORITY; |
---|
| 1344 | + alg->base.cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC | |
---|
| 1345 | + CRYPTO_ALG_ALLOCATES_MEMORY; |
---|
| 1346 | + alg->base.cra_blocksize = tmpl->block_size; |
---|
1399 | 1347 | p->enc_type = tmpl->enc_type; |
---|
1400 | | - alg->cra_ctxsize = sizeof(struct n2_cipher_context); |
---|
1401 | | - alg->cra_type = &crypto_ablkcipher_type; |
---|
1402 | | - alg->cra_u.ablkcipher = tmpl->ablkcipher; |
---|
1403 | | - alg->cra_init = n2_cipher_cra_init; |
---|
1404 | | - alg->cra_module = THIS_MODULE; |
---|
| 1348 | + alg->base.cra_ctxsize = sizeof(struct n2_skcipher_context); |
---|
| 1349 | + alg->base.cra_module = THIS_MODULE; |
---|
| 1350 | + alg->init = n2_skcipher_init_tfm; |
---|
1405 | 1351 | |
---|
1406 | | - list_add(&p->entry, &cipher_algs); |
---|
1407 | | - err = crypto_register_alg(alg); |
---|
| 1352 | + list_add(&p->entry, &skcipher_algs); |
---|
| 1353 | + err = crypto_register_skcipher(alg); |
---|
1408 | 1354 | if (err) { |
---|
1409 | | - pr_err("%s alg registration failed\n", alg->cra_name); |
---|
| 1355 | + pr_err("%s alg registration failed\n", alg->base.cra_name); |
---|
1410 | 1356 | list_del(&p->entry); |
---|
1411 | 1357 | kfree(p); |
---|
1412 | 1358 | } else { |
---|
1413 | | - pr_info("%s alg registered\n", alg->cra_name); |
---|
| 1359 | + pr_info("%s alg registered\n", alg->base.cra_name); |
---|
1414 | 1360 | } |
---|
1415 | 1361 | return err; |
---|
1416 | 1362 | } |
---|
.. | .. |
---|
1482 | 1428 | |
---|
1483 | 1429 | halg = &ahash->halg; |
---|
1484 | 1430 | halg->digestsize = tmpl->digest_size; |
---|
| 1431 | + halg->statesize = tmpl->statesize; |
---|
1485 | 1432 | |
---|
1486 | 1433 | base = &halg->base; |
---|
1487 | 1434 | snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name); |
---|
.. | .. |
---|
1525 | 1472 | } |
---|
1526 | 1473 | } |
---|
1527 | 1474 | for (i = 0; i < NUM_CIPHER_TMPLS; i++) { |
---|
1528 | | - err = __n2_register_one_cipher(&cipher_tmpls[i]); |
---|
| 1475 | + err = __n2_register_one_skcipher(&skcipher_tmpls[i]); |
---|
1529 | 1476 | if (err) { |
---|
1530 | 1477 | __n2_unregister_algs(); |
---|
1531 | 1478 | goto out; |
---|