| .. | .. |
|---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-only |
|---|
| 1 | 2 | /* |
|---|
| 2 | 3 | * Intel IXP4xx NPE-C crypto driver |
|---|
| 3 | 4 | * |
|---|
| 4 | 5 | * Copyright (C) 2008 Christian Hohnstaedt <chohnstaedt@innominate.com> |
|---|
| 5 | | - * |
|---|
| 6 | | - * This program is free software; you can redistribute it and/or modify it |
|---|
| 7 | | - * under the terms of version 2 of the GNU General Public License |
|---|
| 8 | | - * as published by the Free Software Foundation. |
|---|
| 9 | | - * |
|---|
| 10 | 6 | */ |
|---|
| 11 | 7 | |
|---|
| 12 | 8 | #include <linux/platform_device.h> |
|---|
| .. | .. |
|---|
| 21 | 17 | #include <linux/module.h> |
|---|
| 22 | 18 | |
|---|
| 23 | 19 | #include <crypto/ctr.h> |
|---|
| 24 | | -#include <crypto/des.h> |
|---|
| 20 | +#include <crypto/internal/des.h> |
|---|
| 25 | 21 | #include <crypto/aes.h> |
|---|
| 26 | 22 | #include <crypto/hmac.h> |
|---|
| 27 | 23 | #include <crypto/sha.h> |
|---|
| 28 | 24 | #include <crypto/algapi.h> |
|---|
| 29 | 25 | #include <crypto/internal/aead.h> |
|---|
| 26 | +#include <crypto/internal/skcipher.h> |
|---|
| 30 | 27 | #include <crypto/authenc.h> |
|---|
| 31 | 28 | #include <crypto/scatterwalk.h> |
|---|
| 32 | 29 | |
|---|
| 33 | | -#include <mach/npe.h> |
|---|
| 34 | | -#include <mach/qmgr.h> |
|---|
| 30 | +#include <linux/soc/ixp4xx/npe.h> |
|---|
| 31 | +#include <linux/soc/ixp4xx/qmgr.h> |
|---|
| 35 | 32 | |
|---|
| 36 | 33 | #define MAX_KEYLEN 32 |
|---|
| 37 | 34 | |
|---|
| .. | .. |
|---|
| 104 | 101 | u16 pkt_len; |
|---|
| 105 | 102 | u16 buf_len; |
|---|
| 106 | 103 | #endif |
|---|
| 107 | | - u32 phys_addr; |
|---|
| 104 | + dma_addr_t phys_addr; |
|---|
| 108 | 105 | u32 __reserved[4]; |
|---|
| 109 | 106 | struct buffer_desc *next; |
|---|
| 110 | 107 | enum dma_data_direction dir; |
|---|
| .. | .. |
|---|
| 121 | 118 | u8 mode; /* NPE_OP_* operation mode */ |
|---|
| 122 | 119 | #endif |
|---|
| 123 | 120 | u8 iv[MAX_IVLEN]; /* IV for CBC mode or CTR IV for CTR mode */ |
|---|
| 124 | | - u32 icv_rev_aes; /* icv or rev aes */ |
|---|
| 125 | | - u32 src_buf; |
|---|
| 126 | | - u32 dst_buf; |
|---|
| 121 | + dma_addr_t icv_rev_aes; /* icv or rev aes */ |
|---|
| 122 | + dma_addr_t src_buf; |
|---|
| 123 | + dma_addr_t dst_buf; |
|---|
| 127 | 124 | #ifdef __ARMEB__ |
|---|
| 128 | 125 | u16 auth_offs; /* Authentication start offset */ |
|---|
| 129 | 126 | u16 auth_len; /* Authentication data length */ |
|---|
| .. | .. |
|---|
| 141 | 138 | /* Used by Host: 4*4 bytes*/ |
|---|
| 142 | 139 | unsigned ctl_flags; |
|---|
| 143 | 140 | union { |
|---|
| 144 | | - struct ablkcipher_request *ablk_req; |
|---|
| 141 | + struct skcipher_request *ablk_req; |
|---|
| 145 | 142 | struct aead_request *aead_req; |
|---|
| 146 | 143 | struct crypto_tfm *tfm; |
|---|
| 147 | 144 | } data; |
|---|
| .. | .. |
|---|
| 152 | 149 | struct ablk_ctx { |
|---|
| 153 | 150 | struct buffer_desc *src; |
|---|
| 154 | 151 | struct buffer_desc *dst; |
|---|
| 152 | + u8 iv[MAX_IVLEN]; |
|---|
| 153 | + bool encrypt; |
|---|
| 155 | 154 | }; |
|---|
| 156 | 155 | |
|---|
| 157 | 156 | struct aead_ctx { |
|---|
| .. | .. |
|---|
| 190 | 189 | }; |
|---|
| 191 | 190 | |
|---|
| 192 | 191 | struct ixp_alg { |
|---|
| 193 | | - struct crypto_alg crypto; |
|---|
| 192 | + struct skcipher_alg crypto; |
|---|
| 194 | 193 | const struct ix_hash_algo *hash; |
|---|
| 195 | 194 | u32 cfg_enc; |
|---|
| 196 | 195 | u32 cfg_dec; |
|---|
| .. | .. |
|---|
| 243 | 242 | |
|---|
| 244 | 243 | static inline u32 cipher_cfg_enc(struct crypto_tfm *tfm) |
|---|
| 245 | 244 | { |
|---|
| 246 | | - return container_of(tfm->__crt_alg, struct ixp_alg,crypto)->cfg_enc; |
|---|
| 245 | + return container_of(tfm->__crt_alg, struct ixp_alg,crypto.base)->cfg_enc; |
|---|
| 247 | 246 | } |
|---|
| 248 | 247 | |
|---|
| 249 | 248 | static inline u32 cipher_cfg_dec(struct crypto_tfm *tfm) |
|---|
| 250 | 249 | { |
|---|
| 251 | | - return container_of(tfm->__crt_alg, struct ixp_alg,crypto)->cfg_dec; |
|---|
| 250 | + return container_of(tfm->__crt_alg, struct ixp_alg,crypto.base)->cfg_dec; |
|---|
| 252 | 251 | } |
|---|
| 253 | 252 | |
|---|
| 254 | 253 | static inline const struct ix_hash_algo *ix_hash(struct crypto_tfm *tfm) |
|---|
| 255 | 254 | { |
|---|
| 256 | | - return container_of(tfm->__crt_alg, struct ixp_alg, crypto)->hash; |
|---|
| 255 | + return container_of(tfm->__crt_alg, struct ixp_alg, crypto.base)->hash; |
|---|
| 257 | 256 | } |
|---|
| 258 | 257 | |
|---|
| 259 | 258 | static int setup_crypt_desc(void) |
|---|
| 260 | 259 | { |
|---|
| 261 | 260 | struct device *dev = &pdev->dev; |
|---|
| 262 | 261 | BUILD_BUG_ON(sizeof(struct crypt_ctl) != 64); |
|---|
| 263 | | - crypt_virt = dma_zalloc_coherent(dev, |
|---|
| 264 | | - NPE_QLEN * sizeof(struct crypt_ctl), |
|---|
| 265 | | - &crypt_phys, GFP_ATOMIC); |
|---|
| 262 | + crypt_virt = dma_alloc_coherent(dev, |
|---|
| 263 | + NPE_QLEN * sizeof(struct crypt_ctl), |
|---|
| 264 | + &crypt_phys, GFP_ATOMIC); |
|---|
| 266 | 265 | if (!crypt_virt) |
|---|
| 267 | 266 | return -ENOMEM; |
|---|
| 268 | 267 | return 0; |
|---|
| .. | .. |
|---|
| 324 | 323 | } |
|---|
| 325 | 324 | } |
|---|
| 326 | 325 | |
|---|
| 327 | | -static void free_buf_chain(struct device *dev, struct buffer_desc *buf,u32 phys) |
|---|
| 326 | +static void free_buf_chain(struct device *dev, struct buffer_desc *buf, |
|---|
| 327 | + dma_addr_t phys) |
|---|
| 328 | 328 | { |
|---|
| 329 | 329 | while (buf) { |
|---|
| 330 | 330 | struct buffer_desc *buf1; |
|---|
| .. | .. |
|---|
| 381 | 381 | break; |
|---|
| 382 | 382 | } |
|---|
| 383 | 383 | case CTL_FLAG_PERFORM_ABLK: { |
|---|
| 384 | | - struct ablkcipher_request *req = crypt->data.ablk_req; |
|---|
| 385 | | - struct ablk_ctx *req_ctx = ablkcipher_request_ctx(req); |
|---|
| 384 | + struct skcipher_request *req = crypt->data.ablk_req; |
|---|
| 385 | + struct ablk_ctx *req_ctx = skcipher_request_ctx(req); |
|---|
| 386 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 387 | + unsigned int ivsize = crypto_skcipher_ivsize(tfm); |
|---|
| 388 | + unsigned int offset; |
|---|
| 389 | + |
|---|
| 390 | + if (ivsize > 0) { |
|---|
| 391 | + offset = req->cryptlen - ivsize; |
|---|
| 392 | + if (req_ctx->encrypt) { |
|---|
| 393 | + scatterwalk_map_and_copy(req->iv, req->dst, |
|---|
| 394 | + offset, ivsize, 0); |
|---|
| 395 | + } else { |
|---|
| 396 | + memcpy(req->iv, req_ctx->iv, ivsize); |
|---|
| 397 | + memzero_explicit(req_ctx->iv, ivsize); |
|---|
| 398 | + } |
|---|
| 399 | + } |
|---|
| 386 | 400 | |
|---|
| 387 | 401 | if (req_ctx->dst) { |
|---|
| 388 | 402 | free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); |
|---|
| .. | .. |
|---|
| 574 | 588 | return ret; |
|---|
| 575 | 589 | } |
|---|
| 576 | 590 | |
|---|
| 577 | | -static int init_tfm_ablk(struct crypto_tfm *tfm) |
|---|
| 591 | +static int init_tfm_ablk(struct crypto_skcipher *tfm) |
|---|
| 578 | 592 | { |
|---|
| 579 | | - tfm->crt_ablkcipher.reqsize = sizeof(struct ablk_ctx); |
|---|
| 580 | | - return init_tfm(tfm); |
|---|
| 593 | + crypto_skcipher_set_reqsize(tfm, sizeof(struct ablk_ctx)); |
|---|
| 594 | + return init_tfm(crypto_skcipher_tfm(tfm)); |
|---|
| 581 | 595 | } |
|---|
| 582 | 596 | |
|---|
| 583 | 597 | static int init_tfm_aead(struct crypto_aead *tfm) |
|---|
| .. | .. |
|---|
| 593 | 607 | free_sa_dir(&ctx->decrypt); |
|---|
| 594 | 608 | } |
|---|
| 595 | 609 | |
|---|
| 610 | +static void exit_tfm_ablk(struct crypto_skcipher *tfm) |
|---|
| 611 | +{ |
|---|
| 612 | + exit_tfm(crypto_skcipher_tfm(tfm)); |
|---|
| 613 | +} |
|---|
| 614 | + |
|---|
| 596 | 615 | static void exit_tfm_aead(struct crypto_aead *tfm) |
|---|
| 597 | 616 | { |
|---|
| 598 | 617 | exit_tfm(crypto_aead_tfm(tfm)); |
|---|
| .. | .. |
|---|
| 606 | 625 | struct buffer_desc *buf; |
|---|
| 607 | 626 | int i; |
|---|
| 608 | 627 | u8 *pad; |
|---|
| 609 | | - u32 pad_phys, buf_phys; |
|---|
| 628 | + dma_addr_t pad_phys, buf_phys; |
|---|
| 610 | 629 | |
|---|
| 611 | 630 | BUILD_BUG_ON(NPE_CTX_LEN < HMAC_PAD_BLOCKLEN); |
|---|
| 612 | 631 | pad = dma_pool_alloc(ctx_pool, GFP_KERNEL, &pad_phys); |
|---|
| .. | .. |
|---|
| 737 | 756 | u32 keylen_cfg = 0; |
|---|
| 738 | 757 | struct ix_sa_dir *dir; |
|---|
| 739 | 758 | struct ixp_ctx *ctx = crypto_tfm_ctx(tfm); |
|---|
| 740 | | - u32 *flags = &tfm->crt_flags; |
|---|
| 759 | + int err; |
|---|
| 741 | 760 | |
|---|
| 742 | 761 | dir = encrypt ? &ctx->encrypt : &ctx->decrypt; |
|---|
| 743 | 762 | cinfo = dir->npe_ctx; |
|---|
| .. | .. |
|---|
| 754 | 773 | case 24: keylen_cfg = MOD_AES192; break; |
|---|
| 755 | 774 | case 32: keylen_cfg = MOD_AES256; break; |
|---|
| 756 | 775 | default: |
|---|
| 757 | | - *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
|---|
| 758 | 776 | return -EINVAL; |
|---|
| 759 | 777 | } |
|---|
| 760 | 778 | cipher_cfg |= keylen_cfg; |
|---|
| 761 | | - } else if (cipher_cfg & MOD_3DES) { |
|---|
| 762 | | - const u32 *K = (const u32 *)key; |
|---|
| 763 | | - if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) || |
|---|
| 764 | | - !((K[2] ^ K[4]) | (K[3] ^ K[5])))) |
|---|
| 765 | | - { |
|---|
| 766 | | - *flags |= CRYPTO_TFM_RES_BAD_KEY_SCHED; |
|---|
| 767 | | - return -EINVAL; |
|---|
| 768 | | - } |
|---|
| 769 | 779 | } else { |
|---|
| 770 | | - u32 tmp[DES_EXPKEY_WORDS]; |
|---|
| 771 | | - if (des_ekey(tmp, key) == 0) { |
|---|
| 772 | | - *flags |= CRYPTO_TFM_RES_WEAK_KEY; |
|---|
| 773 | | - } |
|---|
| 780 | + err = crypto_des_verify_key(tfm, key); |
|---|
| 781 | + if (err) |
|---|
| 782 | + return err; |
|---|
| 774 | 783 | } |
|---|
| 775 | 784 | /* write cfg word to cryptinfo */ |
|---|
| 776 | 785 | *(u32*)cinfo = cpu_to_be32(cipher_cfg); |
|---|
| .. | .. |
|---|
| 799 | 808 | for (; nbytes > 0; sg = sg_next(sg)) { |
|---|
| 800 | 809 | unsigned len = min(nbytes, sg->length); |
|---|
| 801 | 810 | struct buffer_desc *next_buf; |
|---|
| 802 | | - u32 next_buf_phys; |
|---|
| 811 | + dma_addr_t next_buf_phys; |
|---|
| 803 | 812 | void *ptr; |
|---|
| 804 | 813 | |
|---|
| 805 | 814 | nbytes -= len; |
|---|
| .. | .. |
|---|
| 823 | 832 | return buf; |
|---|
| 824 | 833 | } |
|---|
| 825 | 834 | |
|---|
| 826 | | -static int ablk_setkey(struct crypto_ablkcipher *tfm, const u8 *key, |
|---|
| 835 | +static int ablk_setkey(struct crypto_skcipher *tfm, const u8 *key, |
|---|
| 827 | 836 | unsigned int key_len) |
|---|
| 828 | 837 | { |
|---|
| 829 | | - struct ixp_ctx *ctx = crypto_ablkcipher_ctx(tfm); |
|---|
| 830 | | - u32 *flags = &tfm->base.crt_flags; |
|---|
| 838 | + struct ixp_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 831 | 839 | int ret; |
|---|
| 832 | 840 | |
|---|
| 833 | 841 | init_completion(&ctx->completion); |
|---|
| .. | .. |
|---|
| 843 | 851 | if (ret) |
|---|
| 844 | 852 | goto out; |
|---|
| 845 | 853 | ret = setup_cipher(&tfm->base, 1, key, key_len); |
|---|
| 846 | | - if (ret) |
|---|
| 847 | | - goto out; |
|---|
| 848 | | - |
|---|
| 849 | | - if (*flags & CRYPTO_TFM_RES_WEAK_KEY) { |
|---|
| 850 | | - if (*flags & CRYPTO_TFM_REQ_WEAK_KEY) { |
|---|
| 851 | | - ret = -EINVAL; |
|---|
| 852 | | - } else { |
|---|
| 853 | | - *flags &= ~CRYPTO_TFM_RES_WEAK_KEY; |
|---|
| 854 | | - } |
|---|
| 855 | | - } |
|---|
| 856 | 854 | out: |
|---|
| 857 | 855 | if (!atomic_dec_and_test(&ctx->configuring)) |
|---|
| 858 | 856 | wait_for_completion(&ctx->completion); |
|---|
| 859 | 857 | return ret; |
|---|
| 860 | 858 | } |
|---|
| 861 | 859 | |
|---|
| 862 | | -static int ablk_rfc3686_setkey(struct crypto_ablkcipher *tfm, const u8 *key, |
|---|
| 860 | +static int ablk_des3_setkey(struct crypto_skcipher *tfm, const u8 *key, |
|---|
| 861 | + unsigned int key_len) |
|---|
| 862 | +{ |
|---|
| 863 | + return verify_skcipher_des3_key(tfm, key) ?: |
|---|
| 864 | + ablk_setkey(tfm, key, key_len); |
|---|
| 865 | +} |
|---|
| 866 | + |
|---|
| 867 | +static int ablk_rfc3686_setkey(struct crypto_skcipher *tfm, const u8 *key, |
|---|
| 863 | 868 | unsigned int key_len) |
|---|
| 864 | 869 | { |
|---|
| 865 | | - struct ixp_ctx *ctx = crypto_ablkcipher_ctx(tfm); |
|---|
| 870 | + struct ixp_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 866 | 871 | |
|---|
| 867 | 872 | /* the nonce is stored in bytes at end of key */ |
|---|
| 868 | 873 | if (key_len < CTR_RFC3686_NONCE_SIZE) |
|---|
| .. | .. |
|---|
| 875 | 880 | return ablk_setkey(tfm, key, key_len); |
|---|
| 876 | 881 | } |
|---|
| 877 | 882 | |
|---|
| 878 | | -static int ablk_perform(struct ablkcipher_request *req, int encrypt) |
|---|
| 883 | +static int ablk_perform(struct skcipher_request *req, int encrypt) |
|---|
| 879 | 884 | { |
|---|
| 880 | | - struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); |
|---|
| 881 | | - struct ixp_ctx *ctx = crypto_ablkcipher_ctx(tfm); |
|---|
| 882 | | - unsigned ivsize = crypto_ablkcipher_ivsize(tfm); |
|---|
| 885 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 886 | + struct ixp_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 887 | + unsigned ivsize = crypto_skcipher_ivsize(tfm); |
|---|
| 883 | 888 | struct ix_sa_dir *dir; |
|---|
| 884 | 889 | struct crypt_ctl *crypt; |
|---|
| 885 | | - unsigned int nbytes = req->nbytes; |
|---|
| 890 | + unsigned int nbytes = req->cryptlen; |
|---|
| 886 | 891 | enum dma_data_direction src_direction = DMA_BIDIRECTIONAL; |
|---|
| 887 | | - struct ablk_ctx *req_ctx = ablkcipher_request_ctx(req); |
|---|
| 892 | + struct ablk_ctx *req_ctx = skcipher_request_ctx(req); |
|---|
| 888 | 893 | struct buffer_desc src_hook; |
|---|
| 889 | 894 | struct device *dev = &pdev->dev; |
|---|
| 895 | + unsigned int offset; |
|---|
| 890 | 896 | gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? |
|---|
| 891 | 897 | GFP_KERNEL : GFP_ATOMIC; |
|---|
| 892 | 898 | |
|---|
| .. | .. |
|---|
| 896 | 902 | return -EAGAIN; |
|---|
| 897 | 903 | |
|---|
| 898 | 904 | dir = encrypt ? &ctx->encrypt : &ctx->decrypt; |
|---|
| 905 | + req_ctx->encrypt = encrypt; |
|---|
| 899 | 906 | |
|---|
| 900 | 907 | crypt = get_crypt_desc(); |
|---|
| 901 | 908 | if (!crypt) |
|---|
| .. | .. |
|---|
| 909 | 916 | crypt->crypt_offs = 0; |
|---|
| 910 | 917 | crypt->crypt_len = nbytes; |
|---|
| 911 | 918 | |
|---|
| 912 | | - BUG_ON(ivsize && !req->info); |
|---|
| 913 | | - memcpy(crypt->iv, req->info, ivsize); |
|---|
| 919 | + BUG_ON(ivsize && !req->iv); |
|---|
| 920 | + memcpy(crypt->iv, req->iv, ivsize); |
|---|
| 921 | + if (ivsize > 0 && !encrypt) { |
|---|
| 922 | + offset = req->cryptlen - ivsize; |
|---|
| 923 | + scatterwalk_map_and_copy(req_ctx->iv, req->src, offset, ivsize, 0); |
|---|
| 924 | + } |
|---|
| 914 | 925 | if (req->src != req->dst) { |
|---|
| 915 | 926 | struct buffer_desc dst_hook; |
|---|
| 916 | 927 | crypt->mode |= NPE_OP_NOT_IN_PLACE; |
|---|
| .. | .. |
|---|
| 948 | 959 | return -ENOMEM; |
|---|
| 949 | 960 | } |
|---|
| 950 | 961 | |
|---|
| 951 | | -static int ablk_encrypt(struct ablkcipher_request *req) |
|---|
| 962 | +static int ablk_encrypt(struct skcipher_request *req) |
|---|
| 952 | 963 | { |
|---|
| 953 | 964 | return ablk_perform(req, 1); |
|---|
| 954 | 965 | } |
|---|
| 955 | 966 | |
|---|
| 956 | | -static int ablk_decrypt(struct ablkcipher_request *req) |
|---|
| 967 | +static int ablk_decrypt(struct skcipher_request *req) |
|---|
| 957 | 968 | { |
|---|
| 958 | 969 | return ablk_perform(req, 0); |
|---|
| 959 | 970 | } |
|---|
| 960 | 971 | |
|---|
| 961 | | -static int ablk_rfc3686_crypt(struct ablkcipher_request *req) |
|---|
| 972 | +static int ablk_rfc3686_crypt(struct skcipher_request *req) |
|---|
| 962 | 973 | { |
|---|
| 963 | | - struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); |
|---|
| 964 | | - struct ixp_ctx *ctx = crypto_ablkcipher_ctx(tfm); |
|---|
| 974 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 975 | + struct ixp_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 965 | 976 | u8 iv[CTR_RFC3686_BLOCK_SIZE]; |
|---|
| 966 | | - u8 *info = req->info; |
|---|
| 977 | + u8 *info = req->iv; |
|---|
| 967 | 978 | int ret; |
|---|
| 968 | 979 | |
|---|
| 969 | 980 | /* set up counter block */ |
|---|
| .. | .. |
|---|
| 974 | 985 | *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) = |
|---|
| 975 | 986 | cpu_to_be32(1); |
|---|
| 976 | 987 | |
|---|
| 977 | | - req->info = iv; |
|---|
| 988 | + req->iv = iv; |
|---|
| 978 | 989 | ret = ablk_perform(req, 1); |
|---|
| 979 | | - req->info = info; |
|---|
| 990 | + req->iv = info; |
|---|
| 980 | 991 | return ret; |
|---|
| 981 | 992 | } |
|---|
| 982 | 993 | |
|---|
| .. | .. |
|---|
| 1097 | 1108 | static int aead_setup(struct crypto_aead *tfm, unsigned int authsize) |
|---|
| 1098 | 1109 | { |
|---|
| 1099 | 1110 | struct ixp_ctx *ctx = crypto_aead_ctx(tfm); |
|---|
| 1100 | | - u32 *flags = &tfm->base.crt_flags; |
|---|
| 1101 | 1111 | unsigned digest_len = crypto_aead_maxauthsize(tfm); |
|---|
| 1102 | 1112 | int ret; |
|---|
| 1103 | 1113 | |
|---|
| .. | .. |
|---|
| 1121 | 1131 | goto out; |
|---|
| 1122 | 1132 | ret = setup_auth(&tfm->base, 1, authsize, ctx->authkey, |
|---|
| 1123 | 1133 | ctx->authkey_len, digest_len); |
|---|
| 1124 | | - if (ret) |
|---|
| 1125 | | - goto out; |
|---|
| 1126 | | - |
|---|
| 1127 | | - if (*flags & CRYPTO_TFM_RES_WEAK_KEY) { |
|---|
| 1128 | | - if (*flags & CRYPTO_TFM_REQ_WEAK_KEY) { |
|---|
| 1129 | | - ret = -EINVAL; |
|---|
| 1130 | | - goto out; |
|---|
| 1131 | | - } else { |
|---|
| 1132 | | - *flags &= ~CRYPTO_TFM_RES_WEAK_KEY; |
|---|
| 1133 | | - } |
|---|
| 1134 | | - } |
|---|
| 1135 | 1134 | out: |
|---|
| 1136 | 1135 | if (!atomic_dec_and_test(&ctx->configuring)) |
|---|
| 1137 | 1136 | wait_for_completion(&ctx->completion); |
|---|
| .. | .. |
|---|
| 1170 | 1169 | memzero_explicit(&keys, sizeof(keys)); |
|---|
| 1171 | 1170 | return aead_setup(tfm, crypto_aead_authsize(tfm)); |
|---|
| 1172 | 1171 | badkey: |
|---|
| 1173 | | - crypto_aead_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); |
|---|
| 1174 | 1172 | memzero_explicit(&keys, sizeof(keys)); |
|---|
| 1175 | 1173 | return -EINVAL; |
|---|
| 1174 | +} |
|---|
| 1175 | + |
|---|
| 1176 | +static int des3_aead_setkey(struct crypto_aead *tfm, const u8 *key, |
|---|
| 1177 | + unsigned int keylen) |
|---|
| 1178 | +{ |
|---|
| 1179 | + struct ixp_ctx *ctx = crypto_aead_ctx(tfm); |
|---|
| 1180 | + struct crypto_authenc_keys keys; |
|---|
| 1181 | + int err; |
|---|
| 1182 | + |
|---|
| 1183 | + err = crypto_authenc_extractkeys(&keys, key, keylen); |
|---|
| 1184 | + if (unlikely(err)) |
|---|
| 1185 | + goto badkey; |
|---|
| 1186 | + |
|---|
| 1187 | + err = -EINVAL; |
|---|
| 1188 | + if (keys.authkeylen > sizeof(ctx->authkey)) |
|---|
| 1189 | + goto badkey; |
|---|
| 1190 | + |
|---|
| 1191 | + err = verify_aead_des3_key(tfm, keys.enckey, keys.enckeylen); |
|---|
| 1192 | + if (err) |
|---|
| 1193 | + goto badkey; |
|---|
| 1194 | + |
|---|
| 1195 | + memcpy(ctx->authkey, keys.authkey, keys.authkeylen); |
|---|
| 1196 | + memcpy(ctx->enckey, keys.enckey, keys.enckeylen); |
|---|
| 1197 | + ctx->authkey_len = keys.authkeylen; |
|---|
| 1198 | + ctx->enckey_len = keys.enckeylen; |
|---|
| 1199 | + |
|---|
| 1200 | + memzero_explicit(&keys, sizeof(keys)); |
|---|
| 1201 | + return aead_setup(tfm, crypto_aead_authsize(tfm)); |
|---|
| 1202 | +badkey: |
|---|
| 1203 | + memzero_explicit(&keys, sizeof(keys)); |
|---|
| 1204 | + return err; |
|---|
| 1176 | 1205 | } |
|---|
| 1177 | 1206 | |
|---|
| 1178 | 1207 | static int aead_encrypt(struct aead_request *req) |
|---|
| .. | .. |
|---|
| 1188 | 1217 | static struct ixp_alg ixp4xx_algos[] = { |
|---|
| 1189 | 1218 | { |
|---|
| 1190 | 1219 | .crypto = { |
|---|
| 1191 | | - .cra_name = "cbc(des)", |
|---|
| 1192 | | - .cra_blocksize = DES_BLOCK_SIZE, |
|---|
| 1193 | | - .cra_u = { .ablkcipher = { |
|---|
| 1194 | | - .min_keysize = DES_KEY_SIZE, |
|---|
| 1195 | | - .max_keysize = DES_KEY_SIZE, |
|---|
| 1196 | | - .ivsize = DES_BLOCK_SIZE, |
|---|
| 1197 | | - .geniv = "eseqiv", |
|---|
| 1198 | | - } |
|---|
| 1199 | | - } |
|---|
| 1220 | + .base.cra_name = "cbc(des)", |
|---|
| 1221 | + .base.cra_blocksize = DES_BLOCK_SIZE, |
|---|
| 1222 | + |
|---|
| 1223 | + .min_keysize = DES_KEY_SIZE, |
|---|
| 1224 | + .max_keysize = DES_KEY_SIZE, |
|---|
| 1225 | + .ivsize = DES_BLOCK_SIZE, |
|---|
| 1200 | 1226 | }, |
|---|
| 1201 | 1227 | .cfg_enc = CIPH_ENCR | MOD_DES | MOD_CBC_ENC | KEYLEN_192, |
|---|
| 1202 | 1228 | .cfg_dec = CIPH_DECR | MOD_DES | MOD_CBC_DEC | KEYLEN_192, |
|---|
| 1203 | 1229 | |
|---|
| 1204 | 1230 | }, { |
|---|
| 1205 | 1231 | .crypto = { |
|---|
| 1206 | | - .cra_name = "ecb(des)", |
|---|
| 1207 | | - .cra_blocksize = DES_BLOCK_SIZE, |
|---|
| 1208 | | - .cra_u = { .ablkcipher = { |
|---|
| 1209 | | - .min_keysize = DES_KEY_SIZE, |
|---|
| 1210 | | - .max_keysize = DES_KEY_SIZE, |
|---|
| 1211 | | - } |
|---|
| 1212 | | - } |
|---|
| 1232 | + .base.cra_name = "ecb(des)", |
|---|
| 1233 | + .base.cra_blocksize = DES_BLOCK_SIZE, |
|---|
| 1234 | + .min_keysize = DES_KEY_SIZE, |
|---|
| 1235 | + .max_keysize = DES_KEY_SIZE, |
|---|
| 1213 | 1236 | }, |
|---|
| 1214 | 1237 | .cfg_enc = CIPH_ENCR | MOD_DES | MOD_ECB | KEYLEN_192, |
|---|
| 1215 | 1238 | .cfg_dec = CIPH_DECR | MOD_DES | MOD_ECB | KEYLEN_192, |
|---|
| 1216 | 1239 | }, { |
|---|
| 1217 | 1240 | .crypto = { |
|---|
| 1218 | | - .cra_name = "cbc(des3_ede)", |
|---|
| 1219 | | - .cra_blocksize = DES3_EDE_BLOCK_SIZE, |
|---|
| 1220 | | - .cra_u = { .ablkcipher = { |
|---|
| 1221 | | - .min_keysize = DES3_EDE_KEY_SIZE, |
|---|
| 1222 | | - .max_keysize = DES3_EDE_KEY_SIZE, |
|---|
| 1223 | | - .ivsize = DES3_EDE_BLOCK_SIZE, |
|---|
| 1224 | | - .geniv = "eseqiv", |
|---|
| 1225 | | - } |
|---|
| 1226 | | - } |
|---|
| 1241 | + .base.cra_name = "cbc(des3_ede)", |
|---|
| 1242 | + .base.cra_blocksize = DES3_EDE_BLOCK_SIZE, |
|---|
| 1243 | + |
|---|
| 1244 | + .min_keysize = DES3_EDE_KEY_SIZE, |
|---|
| 1245 | + .max_keysize = DES3_EDE_KEY_SIZE, |
|---|
| 1246 | + .ivsize = DES3_EDE_BLOCK_SIZE, |
|---|
| 1247 | + .setkey = ablk_des3_setkey, |
|---|
| 1227 | 1248 | }, |
|---|
| 1228 | 1249 | .cfg_enc = CIPH_ENCR | MOD_3DES | MOD_CBC_ENC | KEYLEN_192, |
|---|
| 1229 | 1250 | .cfg_dec = CIPH_DECR | MOD_3DES | MOD_CBC_DEC | KEYLEN_192, |
|---|
| 1230 | 1251 | }, { |
|---|
| 1231 | 1252 | .crypto = { |
|---|
| 1232 | | - .cra_name = "ecb(des3_ede)", |
|---|
| 1233 | | - .cra_blocksize = DES3_EDE_BLOCK_SIZE, |
|---|
| 1234 | | - .cra_u = { .ablkcipher = { |
|---|
| 1235 | | - .min_keysize = DES3_EDE_KEY_SIZE, |
|---|
| 1236 | | - .max_keysize = DES3_EDE_KEY_SIZE, |
|---|
| 1237 | | - } |
|---|
| 1238 | | - } |
|---|
| 1253 | + .base.cra_name = "ecb(des3_ede)", |
|---|
| 1254 | + .base.cra_blocksize = DES3_EDE_BLOCK_SIZE, |
|---|
| 1255 | + |
|---|
| 1256 | + .min_keysize = DES3_EDE_KEY_SIZE, |
|---|
| 1257 | + .max_keysize = DES3_EDE_KEY_SIZE, |
|---|
| 1258 | + .setkey = ablk_des3_setkey, |
|---|
| 1239 | 1259 | }, |
|---|
| 1240 | 1260 | .cfg_enc = CIPH_ENCR | MOD_3DES | MOD_ECB | KEYLEN_192, |
|---|
| 1241 | 1261 | .cfg_dec = CIPH_DECR | MOD_3DES | MOD_ECB | KEYLEN_192, |
|---|
| 1242 | 1262 | }, { |
|---|
| 1243 | 1263 | .crypto = { |
|---|
| 1244 | | - .cra_name = "cbc(aes)", |
|---|
| 1245 | | - .cra_blocksize = AES_BLOCK_SIZE, |
|---|
| 1246 | | - .cra_u = { .ablkcipher = { |
|---|
| 1247 | | - .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 1248 | | - .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 1249 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 1250 | | - .geniv = "eseqiv", |
|---|
| 1251 | | - } |
|---|
| 1252 | | - } |
|---|
| 1264 | + .base.cra_name = "cbc(aes)", |
|---|
| 1265 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
|---|
| 1266 | + |
|---|
| 1267 | + .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 1268 | + .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 1269 | + .ivsize = AES_BLOCK_SIZE, |
|---|
| 1253 | 1270 | }, |
|---|
| 1254 | 1271 | .cfg_enc = CIPH_ENCR | MOD_AES | MOD_CBC_ENC, |
|---|
| 1255 | 1272 | .cfg_dec = CIPH_DECR | MOD_AES | MOD_CBC_DEC, |
|---|
| 1256 | 1273 | }, { |
|---|
| 1257 | 1274 | .crypto = { |
|---|
| 1258 | | - .cra_name = "ecb(aes)", |
|---|
| 1259 | | - .cra_blocksize = AES_BLOCK_SIZE, |
|---|
| 1260 | | - .cra_u = { .ablkcipher = { |
|---|
| 1261 | | - .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 1262 | | - .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 1263 | | - } |
|---|
| 1264 | | - } |
|---|
| 1275 | + .base.cra_name = "ecb(aes)", |
|---|
| 1276 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
|---|
| 1277 | + |
|---|
| 1278 | + .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 1279 | + .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 1265 | 1280 | }, |
|---|
| 1266 | 1281 | .cfg_enc = CIPH_ENCR | MOD_AES | MOD_ECB, |
|---|
| 1267 | 1282 | .cfg_dec = CIPH_DECR | MOD_AES | MOD_ECB, |
|---|
| 1268 | 1283 | }, { |
|---|
| 1269 | 1284 | .crypto = { |
|---|
| 1270 | | - .cra_name = "ctr(aes)", |
|---|
| 1271 | | - .cra_blocksize = AES_BLOCK_SIZE, |
|---|
| 1272 | | - .cra_u = { .ablkcipher = { |
|---|
| 1273 | | - .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 1274 | | - .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 1275 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 1276 | | - .geniv = "eseqiv", |
|---|
| 1277 | | - } |
|---|
| 1278 | | - } |
|---|
| 1285 | + .base.cra_name = "ctr(aes)", |
|---|
| 1286 | + .base.cra_blocksize = 1, |
|---|
| 1287 | + |
|---|
| 1288 | + .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 1289 | + .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 1290 | + .ivsize = AES_BLOCK_SIZE, |
|---|
| 1279 | 1291 | }, |
|---|
| 1280 | 1292 | .cfg_enc = CIPH_ENCR | MOD_AES | MOD_CTR, |
|---|
| 1281 | 1293 | .cfg_dec = CIPH_ENCR | MOD_AES | MOD_CTR, |
|---|
| 1282 | 1294 | }, { |
|---|
| 1283 | 1295 | .crypto = { |
|---|
| 1284 | | - .cra_name = "rfc3686(ctr(aes))", |
|---|
| 1285 | | - .cra_blocksize = AES_BLOCK_SIZE, |
|---|
| 1286 | | - .cra_u = { .ablkcipher = { |
|---|
| 1287 | | - .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 1288 | | - .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 1289 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 1290 | | - .geniv = "eseqiv", |
|---|
| 1291 | | - .setkey = ablk_rfc3686_setkey, |
|---|
| 1292 | | - .encrypt = ablk_rfc3686_crypt, |
|---|
| 1293 | | - .decrypt = ablk_rfc3686_crypt } |
|---|
| 1294 | | - } |
|---|
| 1296 | + .base.cra_name = "rfc3686(ctr(aes))", |
|---|
| 1297 | + .base.cra_blocksize = 1, |
|---|
| 1298 | + |
|---|
| 1299 | + .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 1300 | + .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 1301 | + .ivsize = AES_BLOCK_SIZE, |
|---|
| 1302 | + .setkey = ablk_rfc3686_setkey, |
|---|
| 1303 | + .encrypt = ablk_rfc3686_crypt, |
|---|
| 1304 | + .decrypt = ablk_rfc3686_crypt, |
|---|
| 1295 | 1305 | }, |
|---|
| 1296 | 1306 | .cfg_enc = CIPH_ENCR | MOD_AES | MOD_CTR, |
|---|
| 1297 | 1307 | .cfg_dec = CIPH_ENCR | MOD_AES | MOD_CTR, |
|---|
| .. | .. |
|---|
| 1318 | 1328 | }, |
|---|
| 1319 | 1329 | .ivsize = DES3_EDE_BLOCK_SIZE, |
|---|
| 1320 | 1330 | .maxauthsize = MD5_DIGEST_SIZE, |
|---|
| 1331 | + .setkey = des3_aead_setkey, |
|---|
| 1321 | 1332 | }, |
|---|
| 1322 | 1333 | .hash = &hash_alg_md5, |
|---|
| 1323 | 1334 | .cfg_enc = CIPH_ENCR | MOD_3DES | MOD_CBC_ENC | KEYLEN_192, |
|---|
| .. | .. |
|---|
| 1342 | 1353 | }, |
|---|
| 1343 | 1354 | .ivsize = DES3_EDE_BLOCK_SIZE, |
|---|
| 1344 | 1355 | .maxauthsize = SHA1_DIGEST_SIZE, |
|---|
| 1356 | + .setkey = des3_aead_setkey, |
|---|
| 1345 | 1357 | }, |
|---|
| 1346 | 1358 | .hash = &hash_alg_sha1, |
|---|
| 1347 | 1359 | .cfg_enc = CIPH_ENCR | MOD_3DES | MOD_CBC_ENC | KEYLEN_192, |
|---|
| .. | .. |
|---|
| 1398 | 1410 | return err; |
|---|
| 1399 | 1411 | } |
|---|
| 1400 | 1412 | for (i=0; i< num; i++) { |
|---|
| 1401 | | - struct crypto_alg *cra = &ixp4xx_algos[i].crypto; |
|---|
| 1413 | + struct skcipher_alg *cra = &ixp4xx_algos[i].crypto; |
|---|
| 1402 | 1414 | |
|---|
| 1403 | | - if (snprintf(cra->cra_driver_name, CRYPTO_MAX_ALG_NAME, |
|---|
| 1404 | | - "%s"IXP_POSTFIX, cra->cra_name) >= |
|---|
| 1415 | + if (snprintf(cra->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, |
|---|
| 1416 | + "%s"IXP_POSTFIX, cra->base.cra_name) >= |
|---|
| 1405 | 1417 | CRYPTO_MAX_ALG_NAME) |
|---|
| 1406 | 1418 | { |
|---|
| 1407 | 1419 | continue; |
|---|
| .. | .. |
|---|
| 1411 | 1423 | } |
|---|
| 1412 | 1424 | |
|---|
| 1413 | 1425 | /* block ciphers */ |
|---|
| 1414 | | - cra->cra_type = &crypto_ablkcipher_type; |
|---|
| 1415 | | - cra->cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | |
|---|
| 1416 | | - CRYPTO_ALG_KERN_DRIVER_ONLY | |
|---|
| 1417 | | - CRYPTO_ALG_ASYNC; |
|---|
| 1418 | | - if (!cra->cra_ablkcipher.setkey) |
|---|
| 1419 | | - cra->cra_ablkcipher.setkey = ablk_setkey; |
|---|
| 1420 | | - if (!cra->cra_ablkcipher.encrypt) |
|---|
| 1421 | | - cra->cra_ablkcipher.encrypt = ablk_encrypt; |
|---|
| 1422 | | - if (!cra->cra_ablkcipher.decrypt) |
|---|
| 1423 | | - cra->cra_ablkcipher.decrypt = ablk_decrypt; |
|---|
| 1424 | | - cra->cra_init = init_tfm_ablk; |
|---|
| 1426 | + cra->base.cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | |
|---|
| 1427 | + CRYPTO_ALG_ASYNC | |
|---|
| 1428 | + CRYPTO_ALG_ALLOCATES_MEMORY; |
|---|
| 1429 | + if (!cra->setkey) |
|---|
| 1430 | + cra->setkey = ablk_setkey; |
|---|
| 1431 | + if (!cra->encrypt) |
|---|
| 1432 | + cra->encrypt = ablk_encrypt; |
|---|
| 1433 | + if (!cra->decrypt) |
|---|
| 1434 | + cra->decrypt = ablk_decrypt; |
|---|
| 1435 | + cra->init = init_tfm_ablk; |
|---|
| 1436 | + cra->exit = exit_tfm_ablk; |
|---|
| 1425 | 1437 | |
|---|
| 1426 | | - cra->cra_ctxsize = sizeof(struct ixp_ctx); |
|---|
| 1427 | | - cra->cra_module = THIS_MODULE; |
|---|
| 1428 | | - cra->cra_alignmask = 3; |
|---|
| 1429 | | - cra->cra_priority = 300; |
|---|
| 1430 | | - cra->cra_exit = exit_tfm; |
|---|
| 1431 | | - if (crypto_register_alg(cra)) |
|---|
| 1438 | + cra->base.cra_ctxsize = sizeof(struct ixp_ctx); |
|---|
| 1439 | + cra->base.cra_module = THIS_MODULE; |
|---|
| 1440 | + cra->base.cra_alignmask = 3; |
|---|
| 1441 | + cra->base.cra_priority = 300; |
|---|
| 1442 | + if (crypto_register_skcipher(cra)) |
|---|
| 1432 | 1443 | printk(KERN_ERR "Failed to register '%s'\n", |
|---|
| 1433 | | - cra->cra_name); |
|---|
| 1444 | + cra->base.cra_name); |
|---|
| 1434 | 1445 | else |
|---|
| 1435 | 1446 | ixp4xx_algos[i].registered = 1; |
|---|
| 1436 | 1447 | } |
|---|
| .. | .. |
|---|
| 1447 | 1458 | |
|---|
| 1448 | 1459 | /* authenc */ |
|---|
| 1449 | 1460 | cra->base.cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | |
|---|
| 1450 | | - CRYPTO_ALG_ASYNC; |
|---|
| 1451 | | - cra->setkey = aead_setkey; |
|---|
| 1461 | + CRYPTO_ALG_ASYNC | |
|---|
| 1462 | + CRYPTO_ALG_ALLOCATES_MEMORY; |
|---|
| 1463 | + cra->setkey = cra->setkey ?: aead_setkey; |
|---|
| 1452 | 1464 | cra->setauthsize = aead_setauthsize; |
|---|
| 1453 | 1465 | cra->encrypt = aead_encrypt; |
|---|
| 1454 | 1466 | cra->decrypt = aead_decrypt; |
|---|
| .. | .. |
|---|
| 1481 | 1493 | |
|---|
| 1482 | 1494 | for (i=0; i< num; i++) { |
|---|
| 1483 | 1495 | if (ixp4xx_algos[i].registered) |
|---|
| 1484 | | - crypto_unregister_alg(&ixp4xx_algos[i].crypto); |
|---|
| 1496 | + crypto_unregister_skcipher(&ixp4xx_algos[i].crypto); |
|---|
| 1485 | 1497 | } |
|---|
| 1486 | 1498 | release_ixp_crypto(&pdev->dev); |
|---|
| 1487 | 1499 | platform_device_unregister(pdev); |
|---|