From f9004dbfff8a3fbbd7e2a88c8a4327c7f2f8e5b2 Mon Sep 17 00:00:00 2001 From: hc <hc@nodka.com> Date: Wed, 31 Jan 2024 01:04:47 +0000 Subject: [PATCH] add driver 5G --- kernel/drivers/crypto/bcm/cipher.c | 673 ++++++++++++++++++++++--------------------------------- 1 files changed, 269 insertions(+), 404 deletions(-) diff --git a/kernel/drivers/crypto/bcm/cipher.c b/kernel/drivers/crypto/bcm/cipher.c index c63992f..1cb310a 100644 --- a/kernel/drivers/crypto/bcm/cipher.c +++ b/kernel/drivers/crypto/bcm/cipher.c @@ -1,17 +1,6 @@ +// SPDX-License-Identifier: GPL-2.0-only /* * Copyright 2016 Broadcom - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License, version 2, as - * published by the Free Software Foundation (the "GPL"). - * - * This program is distributed in the hope that it will be useful, but - * WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * General Public License version 2 (GPLv2) for more details. - * - * You should have received a copy of the GNU General Public License - * version 2 (GPLv2) along with this source code. */ #include <linux/err.h> @@ -35,7 +24,7 @@ #include <crypto/aead.h> #include <crypto/internal/aead.h> #include <crypto/aes.h> -#include <crypto/des.h> +#include <crypto/internal/des.h> #include <crypto/hmac.h> #include <crypto/sha.h> #include <crypto/md5.h> @@ -96,7 +85,7 @@ * 0x70 - ring 2 * 0x78 - ring 3 */ -char BCMHEADER[] = { 0x60, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x28 }; +static char BCMHEADER[] = { 0x60, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x28 }; /* * Some SPU hw does not use BCM header on SPU messages. So BCM_HDR_LEN * is set dynamically after reading SPU type from device tree. @@ -121,8 +110,8 @@ } /** - * spu_ablkcipher_rx_sg_create() - Build up the scatterlist of buffers used to - * receive a SPU response message for an ablkcipher request. Includes buffers to + * spu_skcipher_rx_sg_create() - Build up the scatterlist of buffers used to + * receive a SPU response message for an skcipher request. Includes buffers to * catch SPU message headers and the response data. * @mssg: mailbox message containing the receive sg * @rctx: crypto request context @@ -141,7 +130,7 @@ * < 0 if an error */ static int -spu_ablkcipher_rx_sg_create(struct brcm_message *mssg, +spu_skcipher_rx_sg_create(struct brcm_message *mssg, struct iproc_reqctx_s *rctx, u8 rx_frag_num, unsigned int chunksize, u32 stat_pad_len) @@ -176,10 +165,6 @@ return -EFAULT; } - if (ctx->cipher.alg == CIPHER_ALG_RC4) - /* Add buffer to catch 260-byte SUPDT field for RC4 */ - sg_set_buf(sg++, rctx->msg_buf.c.supdt_tweak, SPU_SUPDT_LEN); - if (stat_pad_len) sg_set_buf(sg++, rctx->msg_buf.rx_stat_pad, stat_pad_len); @@ -190,8 +175,8 @@ } /** - * spu_ablkcipher_tx_sg_create() - Build up the scatterlist of buffers used to - * send a SPU request message for an ablkcipher request. Includes SPU message + * spu_skcipher_tx_sg_create() - Build up the scatterlist of buffers used to + * send a SPU request message for an skcipher request. Includes SPU message * headers and the request data. * @mssg: mailbox message containing the transmit sg * @rctx: crypto request context @@ -209,7 +194,7 @@ * < 0 if an error */ static int -spu_ablkcipher_tx_sg_create(struct brcm_message *mssg, +spu_skcipher_tx_sg_create(struct brcm_message *mssg, struct iproc_reqctx_s *rctx, u8 tx_frag_num, unsigned int chunksize, u32 pad_len) { @@ -294,7 +279,7 @@ } /** - * handle_ablkcipher_req() - Submit as much of a block cipher request as fits in + * handle_skcipher_req() - Submit as much of a block cipher request as fits in * a single SPU request message, starting at the current position in the request * data. * @rctx: Crypto request context @@ -311,24 +296,23 @@ * asynchronously * Any other value indicates an error */ -static int handle_ablkcipher_req(struct iproc_reqctx_s *rctx) +static int handle_skcipher_req(struct iproc_reqctx_s *rctx) { struct spu_hw *spu = &iproc_priv.spu; struct crypto_async_request *areq = rctx->parent; - struct ablkcipher_request *req = - container_of(areq, struct ablkcipher_request, base); + struct skcipher_request *req = + container_of(areq, struct skcipher_request, base); struct iproc_ctx_s *ctx = rctx->ctx; struct spu_cipher_parms cipher_parms; - int err = 0; - unsigned int chunksize = 0; /* Num bytes of request to submit */ - int remaining = 0; /* Bytes of request still to process */ + int err; + unsigned int chunksize; /* Num bytes of request to submit */ + int remaining; /* Bytes of request still to process */ int chunk_start; /* Beginning of data for current SPU msg */ /* IV or ctr value to use in this SPU msg */ u8 local_iv_ctr[MAX_IV_SIZE]; u32 stat_pad_len; /* num bytes to align status field */ u32 pad_len; /* total length of all padding */ - bool update_key = false; struct brcm_message *mssg; /* mailbox message */ /* number of entries in src and dst sg in mailbox message. */ @@ -402,28 +386,6 @@ } } - if (ctx->cipher.alg == CIPHER_ALG_RC4) { - rx_frag_num++; - if (chunk_start) { - /* - * for non-first RC4 chunks, use SUPDT from previous - * response as key for this chunk. - */ - cipher_parms.key_buf = rctx->msg_buf.c.supdt_tweak; - update_key = true; - cipher_parms.type = CIPHER_TYPE_UPDT; - } else if (!rctx->is_encrypt) { - /* - * First RC4 chunk. For decrypt, key in pre-built msg - * header may have been changed if encrypt required - * multiple chunks. So revert the key to the - * ctx->enckey value. - */ - update_key = true; - cipher_parms.type = CIPHER_TYPE_INIT; - } - } - if (ctx->max_payload == SPU_MAX_PAYLOAD_INF) flow_log("max_payload infinite\n"); else @@ -436,14 +398,9 @@ memcpy(rctx->msg_buf.bcm_spu_req_hdr, ctx->bcm_spu_req_hdr, sizeof(rctx->msg_buf.bcm_spu_req_hdr)); - /* - * Pass SUPDT field as key. Key field in finish() call is only used - * when update_key has been set above for RC4. Will be ignored in - * all other cases. - */ spu->spu_cipher_req_finish(rctx->msg_buf.bcm_spu_req_hdr + BCM_HDR_LEN, ctx->spu_req_hdr_len, !(rctx->is_encrypt), - &cipher_parms, update_key, chunksize); + &cipher_parms, chunksize); atomic64_add(chunksize, &iproc_priv.bytes_out); @@ -479,7 +436,7 @@ spu->spu_xts_tweak_in_payload()) rx_frag_num++; /* extra sg to insert tweak */ - err = spu_ablkcipher_rx_sg_create(mssg, rctx, rx_frag_num, chunksize, + err = spu_skcipher_rx_sg_create(mssg, rctx, rx_frag_num, chunksize, stat_pad_len); if (err) return err; @@ -493,7 +450,7 @@ spu->spu_xts_tweak_in_payload()) tx_frag_num++; /* extra sg to insert tweak */ - err = spu_ablkcipher_tx_sg_create(mssg, rctx, tx_frag_num, chunksize, + err = spu_skcipher_tx_sg_create(mssg, rctx, tx_frag_num, chunksize, pad_len); if (err) return err; @@ -506,16 +463,16 @@ } /** - * handle_ablkcipher_resp() - Process a block cipher SPU response. Updates the + * handle_skcipher_resp() - Process a block cipher SPU response. Updates the * total received count for the request and updates global stats. * @rctx: Crypto request context */ -static void handle_ablkcipher_resp(struct iproc_reqctx_s *rctx) +static void handle_skcipher_resp(struct iproc_reqctx_s *rctx) { struct spu_hw *spu = &iproc_priv.spu; #ifdef DEBUG struct crypto_async_request *areq = rctx->parent; - struct ablkcipher_request *req = ablkcipher_request_cast(areq); + struct skcipher_request *req = skcipher_request_cast(areq); #endif struct iproc_ctx_s *ctx = rctx->ctx; u32 payload_len; @@ -538,9 +495,6 @@ __func__, rctx->total_received, payload_len); dump_sg(req->dst, rctx->total_received, payload_len); - if (ctx->cipher.alg == CIPHER_ALG_RC4) - packet_dump(" supdt ", rctx->msg_buf.c.supdt_tweak, - SPU_SUPDT_LEN); rctx->total_received += payload_len; if (rctx->total_received == rctx->total_todo) { @@ -709,7 +663,7 @@ /* number of bytes still to be hashed in this req */ unsigned int nbytes_to_hash = 0; - int err = 0; + int err; unsigned int chunksize = 0; /* length of hash carry + new data */ /* * length of new data, not from hash carry, to be submitted in @@ -1675,7 +1629,7 @@ struct spu_hw *spu = &iproc_priv.spu; struct brcm_message *mssg = msg; struct iproc_reqctx_s *rctx; - int err = 0; + int err; rctx = mssg->ctx; if (unlikely(!rctx)) { @@ -1696,8 +1650,8 @@ /* Process the SPU response message */ switch (rctx->ctx->alg->type) { - case CRYPTO_ALG_TYPE_ABLKCIPHER: - handle_ablkcipher_resp(rctx); + case CRYPTO_ALG_TYPE_SKCIPHER: + handle_skcipher_resp(rctx); break; case CRYPTO_ALG_TYPE_AHASH: handle_ahash_resp(rctx); @@ -1719,8 +1673,8 @@ spu_chunk_cleanup(rctx); switch (rctx->ctx->alg->type) { - case CRYPTO_ALG_TYPE_ABLKCIPHER: - err = handle_ablkcipher_req(rctx); + case CRYPTO_ALG_TYPE_SKCIPHER: + err = handle_skcipher_req(rctx); break; case CRYPTO_ALG_TYPE_AHASH: err = handle_ahash_req(rctx); @@ -1750,7 +1704,7 @@ /* ==================== Kernel Cryptographic API ==================== */ /** - * ablkcipher_enqueue() - Handle ablkcipher encrypt or decrypt request. + * skcipher_enqueue() - Handle skcipher encrypt or decrypt request. * @req: Crypto API request * @encrypt: true if encrypting; false if decrypting * @@ -1758,11 +1712,11 @@ * asynchronously * < 0 if an error */ -static int ablkcipher_enqueue(struct ablkcipher_request *req, bool encrypt) +static int skcipher_enqueue(struct skcipher_request *req, bool encrypt) { - struct iproc_reqctx_s *rctx = ablkcipher_request_ctx(req); + struct iproc_reqctx_s *rctx = skcipher_request_ctx(req); struct iproc_ctx_s *ctx = - crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req)); + crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); int err; flow_log("%s() enc:%u\n", __func__, encrypt); @@ -1772,7 +1726,7 @@ rctx->parent = &req->base; rctx->is_encrypt = encrypt; rctx->bd_suppress = false; - rctx->total_todo = req->nbytes; + rctx->total_todo = req->cryptlen; rctx->src_sent = 0; rctx->total_sent = 0; rctx->total_received = 0; @@ -1793,15 +1747,15 @@ ctx->cipher.mode == CIPHER_MODE_GCM || ctx->cipher.mode == CIPHER_MODE_CCM) { rctx->iv_ctr_len = - crypto_ablkcipher_ivsize(crypto_ablkcipher_reqtfm(req)); - memcpy(rctx->msg_buf.iv_ctr, req->info, rctx->iv_ctr_len); + crypto_skcipher_ivsize(crypto_skcipher_reqtfm(req)); + memcpy(rctx->msg_buf.iv_ctr, req->iv, rctx->iv_ctr_len); } else { rctx->iv_ctr_len = 0; } /* Choose a SPU to process this request */ rctx->chan_idx = select_channel(); - err = handle_ablkcipher_req(rctx); + err = handle_skcipher_req(rctx); if (err != -EINPROGRESS) /* synchronous result */ spu_chunk_cleanup(rctx); @@ -1809,58 +1763,38 @@ return err; } -static int des_setkey(struct crypto_ablkcipher *cipher, const u8 *key, +static int des_setkey(struct crypto_skcipher *cipher, const u8 *key, unsigned int keylen) { - struct iproc_ctx_s *ctx = crypto_ablkcipher_ctx(cipher); - u32 tmp[DES_EXPKEY_WORDS]; + struct iproc_ctx_s *ctx = crypto_skcipher_ctx(cipher); + int err; - if (keylen == DES_KEY_SIZE) { - if (des_ekey(tmp, key) == 0) { - if (crypto_ablkcipher_get_flags(cipher) & - CRYPTO_TFM_REQ_WEAK_KEY) { - u32 flags = CRYPTO_TFM_RES_WEAK_KEY; + err = verify_skcipher_des_key(cipher, key); + if (err) + return err; - crypto_ablkcipher_set_flags(cipher, flags); - return -EINVAL; - } - } - - ctx->cipher_type = CIPHER_TYPE_DES; - } else { - crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); - return -EINVAL; - } + ctx->cipher_type = CIPHER_TYPE_DES; return 0; } -static int threedes_setkey(struct crypto_ablkcipher *cipher, const u8 *key, +static int threedes_setkey(struct crypto_skcipher *cipher, const u8 *key, unsigned int keylen) { - struct iproc_ctx_s *ctx = crypto_ablkcipher_ctx(cipher); + struct iproc_ctx_s *ctx = crypto_skcipher_ctx(cipher); + int err; - if (keylen == (DES_KEY_SIZE * 3)) { - const u32 *K = (const u32 *)key; - u32 flags = CRYPTO_TFM_RES_BAD_KEY_SCHED; + err = verify_skcipher_des3_key(cipher, key); + if (err) + return err; - if (!((K[0] ^ K[2]) | (K[1] ^ K[3])) || - !((K[2] ^ K[4]) | (K[3] ^ K[5]))) { - crypto_ablkcipher_set_flags(cipher, flags); - return -EINVAL; - } - - ctx->cipher_type = CIPHER_TYPE_3DES; - } else { - crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); - return -EINVAL; - } + ctx->cipher_type = CIPHER_TYPE_3DES; return 0; } -static int aes_setkey(struct crypto_ablkcipher *cipher, const u8 *key, +static int aes_setkey(struct crypto_skcipher *cipher, const u8 *key, unsigned int keylen) { - struct iproc_ctx_s *ctx = crypto_ablkcipher_ctx(cipher); + struct iproc_ctx_s *ctx = crypto_skcipher_ctx(cipher); if (ctx->cipher.mode == CIPHER_MODE_XTS) /* XTS includes two keys of equal length */ @@ -1877,7 +1811,6 @@ ctx->cipher_type = CIPHER_TYPE_AES256; break; default: - crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); return -EINVAL; } WARN_ON((ctx->max_payload != SPU_MAX_PAYLOAD_INF) && @@ -1885,36 +1818,16 @@ return 0; } -static int rc4_setkey(struct crypto_ablkcipher *cipher, const u8 *key, - unsigned int keylen) -{ - struct iproc_ctx_s *ctx = crypto_ablkcipher_ctx(cipher); - int i; - - ctx->enckeylen = ARC4_MAX_KEY_SIZE + ARC4_STATE_SIZE; - - ctx->enckey[0] = 0x00; /* 0x00 */ - ctx->enckey[1] = 0x00; /* i */ - ctx->enckey[2] = 0x00; /* 0x00 */ - ctx->enckey[3] = 0x00; /* j */ - for (i = 0; i < ARC4_MAX_KEY_SIZE; i++) - ctx->enckey[i + ARC4_STATE_SIZE] = key[i % keylen]; - - ctx->cipher_type = CIPHER_TYPE_INIT; - - return 0; -} - -static int ablkcipher_setkey(struct crypto_ablkcipher *cipher, const u8 *key, +static int skcipher_setkey(struct crypto_skcipher *cipher, const u8 *key, unsigned int keylen) { struct spu_hw *spu = &iproc_priv.spu; - struct iproc_ctx_s *ctx = crypto_ablkcipher_ctx(cipher); + struct iproc_ctx_s *ctx = crypto_skcipher_ctx(cipher); struct spu_cipher_parms cipher_parms; u32 alloc_len = 0; int err; - flow_log("ablkcipher_setkey() keylen: %d\n", keylen); + flow_log("skcipher_setkey() keylen: %d\n", keylen); flow_dump(" key: ", key, keylen); switch (ctx->cipher.alg) { @@ -1927,9 +1840,6 @@ case CIPHER_ALG_AES: err = aes_setkey(cipher, key, keylen); break; - case CIPHER_ALG_RC4: - err = rc4_setkey(cipher, key, keylen); - break; default: pr_err("%s() Error: unknown cipher alg\n", __func__); err = -EINVAL; @@ -1937,11 +1847,9 @@ if (err) return err; - /* RC4 already populated ctx->enkey */ - if (ctx->cipher.alg != CIPHER_ALG_RC4) { - memcpy(ctx->enckey, key, keylen); - ctx->enckeylen = keylen; - } + memcpy(ctx->enckey, key, keylen); + ctx->enckeylen = keylen; + /* SPU needs XTS keys in the reverse order the crypto API presents */ if ((ctx->cipher.alg == CIPHER_ALG_AES) && (ctx->cipher.mode == CIPHER_MODE_XTS)) { @@ -1957,7 +1865,7 @@ alloc_len = BCM_HDR_LEN + SPU2_HEADER_ALLOC_LEN; memset(ctx->bcm_spu_req_hdr, 0, alloc_len); cipher_parms.iv_buf = NULL; - cipher_parms.iv_len = crypto_ablkcipher_ivsize(cipher); + cipher_parms.iv_len = crypto_skcipher_ivsize(cipher); flow_log("%s: iv_len %u\n", __func__, cipher_parms.iv_len); cipher_parms.alg = ctx->cipher.alg; @@ -1981,17 +1889,17 @@ return 0; } -static int ablkcipher_encrypt(struct ablkcipher_request *req) +static int skcipher_encrypt(struct skcipher_request *req) { - flow_log("ablkcipher_encrypt() nbytes:%u\n", req->nbytes); + flow_log("skcipher_encrypt() nbytes:%u\n", req->cryptlen); - return ablkcipher_enqueue(req, true); + return skcipher_enqueue(req, true); } -static int ablkcipher_decrypt(struct ablkcipher_request *req) +static int skcipher_decrypt(struct skcipher_request *req) { - flow_log("ablkcipher_decrypt() nbytes:%u\n", req->nbytes); - return ablkcipher_enqueue(req, false); + flow_log("skcipher_decrypt() nbytes:%u\n", req->cryptlen); + return skcipher_enqueue(req, false); } static int ahash_enqueue(struct ahash_request *req) @@ -1999,7 +1907,7 @@ struct iproc_reqctx_s *rctx = ahash_request_ctx(req); struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); struct iproc_ctx_s *ctx = crypto_ahash_ctx(tfm); - int err = 0; + int err; const char *alg_name; flow_log("ahash_enqueue() nbytes:%u\n", req->nbytes); @@ -2093,7 +2001,7 @@ * Return: true if incremental hashing is not supported * false otherwise */ -bool spu_no_incr_hash(struct iproc_ctx_s *ctx) +static bool spu_no_incr_hash(struct iproc_ctx_s *ctx) { struct spu_hw *spu = &iproc_priv.spu; @@ -2139,7 +2047,6 @@ goto err_hash; } ctx->shash->tfm = hash; - ctx->shash->flags = 0; /* Set the key using data we already have from setkey */ if (ctx->authkeylen > 0) { @@ -2332,7 +2239,7 @@ static int ahash_digest(struct ahash_request *req) { - int err = 0; + int err; flow_log("ahash_digest() nbytes:%u\n", req->nbytes); @@ -2640,6 +2547,19 @@ return 1; } + /* + * RFC4106 and RFC4543 cannot handle the case where AAD is other than + * 16 or 20 bytes long. So use fallback in this case. + */ + if (ctx->cipher.mode == CIPHER_MODE_GCM && + ctx->cipher.alg == CIPHER_ALG_AES && + rctx->iv_ctr_len == GCM_RFC4106_IV_SIZE && + req->assoclen != 16 && req->assoclen != 20) { + flow_log("RFC4106/RFC4543 needs fallback for assoclen" + " other than 16 or 20 bytes\n"); + return 1; + } + payload_len = req->cryptlen; if (spu->spu_type == SPU_TYPE_SPUM) payload_len += req->assoclen; @@ -2866,40 +2786,16 @@ switch (ctx->alg->cipher_info.alg) { case CIPHER_ALG_DES: - if (ctx->enckeylen == DES_KEY_SIZE) { - u32 tmp[DES_EXPKEY_WORDS]; - u32 flags = CRYPTO_TFM_RES_WEAK_KEY; + if (verify_aead_des_key(cipher, keys.enckey, keys.enckeylen)) + return -EINVAL; - if (des_ekey(tmp, keys.enckey) == 0) { - if (crypto_aead_get_flags(cipher) & - CRYPTO_TFM_REQ_WEAK_KEY) { - crypto_aead_set_flags(cipher, flags); - return -EINVAL; - } - } - - ctx->cipher_type = CIPHER_TYPE_DES; - } else { - goto badkey; - } + ctx->cipher_type = CIPHER_TYPE_DES; break; case CIPHER_ALG_3DES: - if (ctx->enckeylen == (DES_KEY_SIZE * 3)) { - const u32 *K = (const u32 *)keys.enckey; - u32 flags = CRYPTO_TFM_RES_BAD_KEY_SCHED; - - if (!((K[0] ^ K[2]) | (K[1] ^ K[3])) || - !((K[2] ^ K[4]) | (K[3] ^ K[5]))) { - crypto_aead_set_flags(cipher, flags); - return -EINVAL; - } - - ctx->cipher_type = CIPHER_TYPE_3DES; - } else { - crypto_aead_set_flags(cipher, - CRYPTO_TFM_RES_BAD_KEY_LEN); + if (verify_aead_des3_key(cipher, keys.enckey, keys.enckeylen)) return -EINVAL; - } + + ctx->cipher_type = CIPHER_TYPE_3DES; break; case CIPHER_ALG_AES: switch (ctx->enckeylen) { @@ -2915,9 +2811,6 @@ default: goto badkey; } - break; - case CIPHER_ALG_RC4: - ctx->cipher_type = CIPHER_TYPE_INIT; break; default: pr_err("%s() Error: Unknown cipher alg\n", __func__); @@ -2937,13 +2830,8 @@ ctx->fallback_cipher->base.crt_flags |= tfm->crt_flags & CRYPTO_TFM_REQ_MASK; ret = crypto_aead_setkey(ctx->fallback_cipher, key, keylen); - if (ret) { + if (ret) flow_log(" fallback setkey() returned:%d\n", ret); - tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; - tfm->crt_flags |= - (ctx->fallback_cipher->base.crt_flags & - CRYPTO_TFM_RES_MASK); - } } ctx->spu_resp_hdr_len = spu->spu_response_hdr_len(ctx->authkeylen, @@ -2959,7 +2847,6 @@ ctx->authkeylen = 0; ctx->digestsize = 0; - crypto_aead_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); return -EINVAL; } @@ -3011,13 +2898,8 @@ tfm->crt_flags & CRYPTO_TFM_REQ_MASK; ret = crypto_aead_setkey(ctx->fallback_cipher, key, keylen + ctx->salt_len); - if (ret) { + if (ret) flow_log(" fallback setkey() returned:%d\n", ret); - tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; - tfm->crt_flags |= - (ctx->fallback_cipher->base.crt_flags & - CRYPTO_TFM_RES_MASK); - } } ctx->spu_resp_hdr_len = spu->spu_response_hdr_len(ctx->authkeylen, @@ -3036,7 +2918,6 @@ ctx->authkeylen = 0; ctx->digestsize = 0; - crypto_aead_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); return -EINVAL; } @@ -3302,7 +3183,9 @@ .cra_name = "authenc(hmac(md5),cbc(aes))", .cra_driver_name = "authenc-hmac-md5-cbc-aes-iproc", .cra_blocksize = AES_BLOCK_SIZE, - .cra_flags = CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC + .cra_flags = CRYPTO_ALG_NEED_FALLBACK | + CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY }, .setkey = aead_authenc_setkey, .ivsize = AES_BLOCK_SIZE, @@ -3325,7 +3208,9 @@ .cra_name = "authenc(hmac(sha1),cbc(aes))", .cra_driver_name = "authenc-hmac-sha1-cbc-aes-iproc", .cra_blocksize = AES_BLOCK_SIZE, - .cra_flags = CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC + .cra_flags = CRYPTO_ALG_NEED_FALLBACK | + CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY }, .setkey = aead_authenc_setkey, .ivsize = AES_BLOCK_SIZE, @@ -3348,7 +3233,9 @@ .cra_name = "authenc(hmac(sha256),cbc(aes))", .cra_driver_name = "authenc-hmac-sha256-cbc-aes-iproc", .cra_blocksize = AES_BLOCK_SIZE, - .cra_flags = CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC + .cra_flags = CRYPTO_ALG_NEED_FALLBACK | + CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY }, .setkey = aead_authenc_setkey, .ivsize = AES_BLOCK_SIZE, @@ -3371,7 +3258,9 @@ .cra_name = "authenc(hmac(md5),cbc(des))", .cra_driver_name = "authenc-hmac-md5-cbc-des-iproc", .cra_blocksize = DES_BLOCK_SIZE, - .cra_flags = CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC + .cra_flags = CRYPTO_ALG_NEED_FALLBACK | + CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY }, .setkey = aead_authenc_setkey, .ivsize = DES_BLOCK_SIZE, @@ -3394,7 +3283,9 @@ .cra_name = "authenc(hmac(sha1),cbc(des))", .cra_driver_name = "authenc-hmac-sha1-cbc-des-iproc", .cra_blocksize = DES_BLOCK_SIZE, - .cra_flags = CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC + .cra_flags = CRYPTO_ALG_NEED_FALLBACK | + CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY }, .setkey = aead_authenc_setkey, .ivsize = DES_BLOCK_SIZE, @@ -3417,7 +3308,9 @@ .cra_name = "authenc(hmac(sha224),cbc(des))", .cra_driver_name = "authenc-hmac-sha224-cbc-des-iproc", .cra_blocksize = DES_BLOCK_SIZE, - .cra_flags = CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC + .cra_flags = CRYPTO_ALG_NEED_FALLBACK | + CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY }, .setkey = aead_authenc_setkey, .ivsize = DES_BLOCK_SIZE, @@ -3440,7 +3333,9 @@ .cra_name = "authenc(hmac(sha256),cbc(des))", .cra_driver_name = "authenc-hmac-sha256-cbc-des-iproc", .cra_blocksize = DES_BLOCK_SIZE, - .cra_flags = CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC + .cra_flags = CRYPTO_ALG_NEED_FALLBACK | + CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY }, .setkey = aead_authenc_setkey, .ivsize = DES_BLOCK_SIZE, @@ -3463,7 +3358,9 @@ .cra_name = "authenc(hmac(sha384),cbc(des))", .cra_driver_name = "authenc-hmac-sha384-cbc-des-iproc", .cra_blocksize = DES_BLOCK_SIZE, - .cra_flags = CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC + .cra_flags = CRYPTO_ALG_NEED_FALLBACK | + CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY }, .setkey = aead_authenc_setkey, .ivsize = DES_BLOCK_SIZE, @@ -3486,7 +3383,9 @@ .cra_name = "authenc(hmac(sha512),cbc(des))", .cra_driver_name = "authenc-hmac-sha512-cbc-des-iproc", .cra_blocksize = DES_BLOCK_SIZE, - .cra_flags = CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC + .cra_flags = CRYPTO_ALG_NEED_FALLBACK | + CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY }, .setkey = aead_authenc_setkey, .ivsize = DES_BLOCK_SIZE, @@ -3509,7 +3408,9 @@ .cra_name = "authenc(hmac(md5),cbc(des3_ede))", .cra_driver_name = "authenc-hmac-md5-cbc-des3-iproc", .cra_blocksize = DES3_EDE_BLOCK_SIZE, - .cra_flags = CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC + .cra_flags = CRYPTO_ALG_NEED_FALLBACK | + CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY }, .setkey = aead_authenc_setkey, .ivsize = DES3_EDE_BLOCK_SIZE, @@ -3532,7 +3433,9 @@ .cra_name = "authenc(hmac(sha1),cbc(des3_ede))", .cra_driver_name = "authenc-hmac-sha1-cbc-des3-iproc", .cra_blocksize = DES3_EDE_BLOCK_SIZE, - .cra_flags = CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC + .cra_flags = CRYPTO_ALG_NEED_FALLBACK | + CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY }, .setkey = aead_authenc_setkey, .ivsize = DES3_EDE_BLOCK_SIZE, @@ -3555,7 +3458,9 @@ .cra_name = "authenc(hmac(sha224),cbc(des3_ede))", .cra_driver_name = "authenc-hmac-sha224-cbc-des3-iproc", .cra_blocksize = DES3_EDE_BLOCK_SIZE, - .cra_flags = CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC + .cra_flags = CRYPTO_ALG_NEED_FALLBACK | + CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY }, .setkey = aead_authenc_setkey, .ivsize = DES3_EDE_BLOCK_SIZE, @@ -3578,7 +3483,9 @@ .cra_name = "authenc(hmac(sha256),cbc(des3_ede))", .cra_driver_name = "authenc-hmac-sha256-cbc-des3-iproc", .cra_blocksize = DES3_EDE_BLOCK_SIZE, - .cra_flags = CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC + .cra_flags = CRYPTO_ALG_NEED_FALLBACK | + CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY }, .setkey = aead_authenc_setkey, .ivsize = DES3_EDE_BLOCK_SIZE, @@ -3601,7 +3508,9 @@ .cra_name = "authenc(hmac(sha384),cbc(des3_ede))", .cra_driver_name = "authenc-hmac-sha384-cbc-des3-iproc", .cra_blocksize = DES3_EDE_BLOCK_SIZE, - .cra_flags = CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC + .cra_flags = CRYPTO_ALG_NEED_FALLBACK | + CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY }, .setkey = aead_authenc_setkey, .ivsize = DES3_EDE_BLOCK_SIZE, @@ -3624,7 +3533,9 @@ .cra_name = "authenc(hmac(sha512),cbc(des3_ede))", .cra_driver_name = "authenc-hmac-sha512-cbc-des3-iproc", .cra_blocksize = DES3_EDE_BLOCK_SIZE, - .cra_flags = CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC + .cra_flags = CRYPTO_ALG_NEED_FALLBACK | + CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY }, .setkey = aead_authenc_setkey, .ivsize = DES3_EDE_BLOCK_SIZE, @@ -3641,39 +3552,16 @@ .auth_first = 0, }, -/* ABLKCIPHER algorithms. */ +/* SKCIPHER algorithms. */ { - .type = CRYPTO_ALG_TYPE_ABLKCIPHER, - .alg.crypto = { - .cra_name = "ecb(arc4)", - .cra_driver_name = "ecb-arc4-iproc", - .cra_blocksize = ARC4_BLOCK_SIZE, - .cra_ablkcipher = { - .min_keysize = ARC4_MIN_KEY_SIZE, - .max_keysize = ARC4_MAX_KEY_SIZE, - .ivsize = 0, - } - }, - .cipher_info = { - .alg = CIPHER_ALG_RC4, - .mode = CIPHER_MODE_NONE, - }, - .auth_info = { - .alg = HASH_ALG_NONE, - .mode = HASH_MODE_NONE, - }, - }, - { - .type = CRYPTO_ALG_TYPE_ABLKCIPHER, - .alg.crypto = { - .cra_name = "ofb(des)", - .cra_driver_name = "ofb-des-iproc", - .cra_blocksize = DES_BLOCK_SIZE, - .cra_ablkcipher = { - .min_keysize = DES_KEY_SIZE, - .max_keysize = DES_KEY_SIZE, - .ivsize = DES_BLOCK_SIZE, - } + .type = CRYPTO_ALG_TYPE_SKCIPHER, + .alg.skcipher = { + .base.cra_name = "ofb(des)", + .base.cra_driver_name = "ofb-des-iproc", + .base.cra_blocksize = DES_BLOCK_SIZE, + .min_keysize = DES_KEY_SIZE, + .max_keysize = DES_KEY_SIZE, + .ivsize = DES_BLOCK_SIZE, }, .cipher_info = { .alg = CIPHER_ALG_DES, @@ -3685,16 +3573,14 @@ }, }, { - .type = CRYPTO_ALG_TYPE_ABLKCIPHER, - .alg.crypto = { - .cra_name = "cbc(des)", - .cra_driver_name = "cbc-des-iproc", - .cra_blocksize = DES_BLOCK_SIZE, - .cra_ablkcipher = { - .min_keysize = DES_KEY_SIZE, - .max_keysize = DES_KEY_SIZE, - .ivsize = DES_BLOCK_SIZE, - } + .type = CRYPTO_ALG_TYPE_SKCIPHER, + .alg.skcipher = { + .base.cra_name = "cbc(des)", + .base.cra_driver_name = "cbc-des-iproc", + .base.cra_blocksize = DES_BLOCK_SIZE, + .min_keysize = DES_KEY_SIZE, + .max_keysize = DES_KEY_SIZE, + .ivsize = DES_BLOCK_SIZE, }, .cipher_info = { .alg = CIPHER_ALG_DES, @@ -3706,16 +3592,14 @@ }, }, { - .type = CRYPTO_ALG_TYPE_ABLKCIPHER, - .alg.crypto = { - .cra_name = "ecb(des)", - .cra_driver_name = "ecb-des-iproc", - .cra_blocksize = DES_BLOCK_SIZE, - .cra_ablkcipher = { - .min_keysize = DES_KEY_SIZE, - .max_keysize = DES_KEY_SIZE, - .ivsize = 0, - } + .type = CRYPTO_ALG_TYPE_SKCIPHER, + .alg.skcipher = { + .base.cra_name = "ecb(des)", + .base.cra_driver_name = "ecb-des-iproc", + .base.cra_blocksize = DES_BLOCK_SIZE, + .min_keysize = DES_KEY_SIZE, + .max_keysize = DES_KEY_SIZE, + .ivsize = 0, }, .cipher_info = { .alg = CIPHER_ALG_DES, @@ -3727,16 +3611,14 @@ }, }, { - .type = CRYPTO_ALG_TYPE_ABLKCIPHER, - .alg.crypto = { - .cra_name = "ofb(des3_ede)", - .cra_driver_name = "ofb-des3-iproc", - .cra_blocksize = DES3_EDE_BLOCK_SIZE, - .cra_ablkcipher = { - .min_keysize = DES3_EDE_KEY_SIZE, - .max_keysize = DES3_EDE_KEY_SIZE, - .ivsize = DES3_EDE_BLOCK_SIZE, - } + .type = CRYPTO_ALG_TYPE_SKCIPHER, + .alg.skcipher = { + .base.cra_name = "ofb(des3_ede)", + .base.cra_driver_name = "ofb-des3-iproc", + .base.cra_blocksize = DES3_EDE_BLOCK_SIZE, + .min_keysize = DES3_EDE_KEY_SIZE, + .max_keysize = DES3_EDE_KEY_SIZE, + .ivsize = DES3_EDE_BLOCK_SIZE, }, .cipher_info = { .alg = CIPHER_ALG_3DES, @@ -3748,16 +3630,14 @@ }, }, { - .type = CRYPTO_ALG_TYPE_ABLKCIPHER, - .alg.crypto = { - .cra_name = "cbc(des3_ede)", - .cra_driver_name = "cbc-des3-iproc", - .cra_blocksize = DES3_EDE_BLOCK_SIZE, - .cra_ablkcipher = { - .min_keysize = DES3_EDE_KEY_SIZE, - .max_keysize = DES3_EDE_KEY_SIZE, - .ivsize = DES3_EDE_BLOCK_SIZE, - } + .type = CRYPTO_ALG_TYPE_SKCIPHER, + .alg.skcipher = { + .base.cra_name = "cbc(des3_ede)", + .base.cra_driver_name = "cbc-des3-iproc", + .base.cra_blocksize = DES3_EDE_BLOCK_SIZE, + .min_keysize = DES3_EDE_KEY_SIZE, + .max_keysize = DES3_EDE_KEY_SIZE, + .ivsize = DES3_EDE_BLOCK_SIZE, }, .cipher_info = { .alg = CIPHER_ALG_3DES, @@ -3769,16 +3649,14 @@ }, }, { - .type = CRYPTO_ALG_TYPE_ABLKCIPHER, - .alg.crypto = { - .cra_name = "ecb(des3_ede)", - .cra_driver_name = "ecb-des3-iproc", - .cra_blocksize = DES3_EDE_BLOCK_SIZE, - .cra_ablkcipher = { - .min_keysize = DES3_EDE_KEY_SIZE, - .max_keysize = DES3_EDE_KEY_SIZE, - .ivsize = 0, - } + .type = CRYPTO_ALG_TYPE_SKCIPHER, + .alg.skcipher = { + .base.cra_name = "ecb(des3_ede)", + .base.cra_driver_name = "ecb-des3-iproc", + .base.cra_blocksize = DES3_EDE_BLOCK_SIZE, + .min_keysize = DES3_EDE_KEY_SIZE, + .max_keysize = DES3_EDE_KEY_SIZE, + .ivsize = 0, }, .cipher_info = { .alg = CIPHER_ALG_3DES, @@ -3790,16 +3668,14 @@ }, }, { - .type = CRYPTO_ALG_TYPE_ABLKCIPHER, - .alg.crypto = { - .cra_name = "ofb(aes)", - .cra_driver_name = "ofb-aes-iproc", - .cra_blocksize = AES_BLOCK_SIZE, - .cra_ablkcipher = { - .min_keysize = AES_MIN_KEY_SIZE, - .max_keysize = AES_MAX_KEY_SIZE, - .ivsize = AES_BLOCK_SIZE, - } + .type = CRYPTO_ALG_TYPE_SKCIPHER, + .alg.skcipher = { + .base.cra_name = "ofb(aes)", + .base.cra_driver_name = "ofb-aes-iproc", + .base.cra_blocksize = AES_BLOCK_SIZE, + .min_keysize = AES_MIN_KEY_SIZE, + .max_keysize = AES_MAX_KEY_SIZE, + .ivsize = AES_BLOCK_SIZE, }, .cipher_info = { .alg = CIPHER_ALG_AES, @@ -3811,16 +3687,14 @@ }, }, { - .type = CRYPTO_ALG_TYPE_ABLKCIPHER, - .alg.crypto = { - .cra_name = "cbc(aes)", - .cra_driver_name = "cbc-aes-iproc", - .cra_blocksize = AES_BLOCK_SIZE, - .cra_ablkcipher = { - .min_keysize = AES_MIN_KEY_SIZE, - .max_keysize = AES_MAX_KEY_SIZE, - .ivsize = AES_BLOCK_SIZE, - } + .type = CRYPTO_ALG_TYPE_SKCIPHER, + .alg.skcipher = { + .base.cra_name = "cbc(aes)", + .base.cra_driver_name = "cbc-aes-iproc", + .base.cra_blocksize = AES_BLOCK_SIZE, + .min_keysize = AES_MIN_KEY_SIZE, + .max_keysize = AES_MAX_KEY_SIZE, + .ivsize = AES_BLOCK_SIZE, }, .cipher_info = { .alg = CIPHER_ALG_AES, @@ -3832,16 +3706,14 @@ }, }, { - .type = CRYPTO_ALG_TYPE_ABLKCIPHER, - .alg.crypto = { - .cra_name = "ecb(aes)", - .cra_driver_name = "ecb-aes-iproc", - .cra_blocksize = AES_BLOCK_SIZE, - .cra_ablkcipher = { - .min_keysize = AES_MIN_KEY_SIZE, - .max_keysize = AES_MAX_KEY_SIZE, - .ivsize = 0, - } + .type = CRYPTO_ALG_TYPE_SKCIPHER, + .alg.skcipher = { + .base.cra_name = "ecb(aes)", + .base.cra_driver_name = "ecb-aes-iproc", + .base.cra_blocksize = AES_BLOCK_SIZE, + .min_keysize = AES_MIN_KEY_SIZE, + .max_keysize = AES_MAX_KEY_SIZE, + .ivsize = 0, }, .cipher_info = { .alg = CIPHER_ALG_AES, @@ -3853,17 +3725,14 @@ }, }, { - .type = CRYPTO_ALG_TYPE_ABLKCIPHER, - .alg.crypto = { - .cra_name = "ctr(aes)", - .cra_driver_name = "ctr-aes-iproc", - .cra_blocksize = AES_BLOCK_SIZE, - .cra_ablkcipher = { - /* .geniv = "chainiv", */ - .min_keysize = AES_MIN_KEY_SIZE, - .max_keysize = AES_MAX_KEY_SIZE, - .ivsize = AES_BLOCK_SIZE, - } + .type = CRYPTO_ALG_TYPE_SKCIPHER, + .alg.skcipher = { + .base.cra_name = "ctr(aes)", + .base.cra_driver_name = "ctr-aes-iproc", + .base.cra_blocksize = AES_BLOCK_SIZE, + .min_keysize = AES_MIN_KEY_SIZE, + .max_keysize = AES_MAX_KEY_SIZE, + .ivsize = AES_BLOCK_SIZE, }, .cipher_info = { .alg = CIPHER_ALG_AES, @@ -3875,16 +3744,14 @@ }, }, { - .type = CRYPTO_ALG_TYPE_ABLKCIPHER, - .alg.crypto = { - .cra_name = "xts(aes)", - .cra_driver_name = "xts-aes-iproc", - .cra_blocksize = AES_BLOCK_SIZE, - .cra_ablkcipher = { - .min_keysize = 2 * AES_MIN_KEY_SIZE, - .max_keysize = 2 * AES_MAX_KEY_SIZE, - .ivsize = AES_BLOCK_SIZE, - } + .type = CRYPTO_ALG_TYPE_SKCIPHER, + .alg.skcipher = { + .base.cra_name = "xts(aes)", + .base.cra_driver_name = "xts-aes-iproc", + .base.cra_blocksize = AES_BLOCK_SIZE, + .min_keysize = 2 * AES_MIN_KEY_SIZE, + .max_keysize = 2 * AES_MAX_KEY_SIZE, + .ivsize = AES_BLOCK_SIZE, }, .cipher_info = { .alg = CIPHER_ALG_AES, @@ -3905,7 +3772,8 @@ .cra_name = "md5", .cra_driver_name = "md5-iproc", .cra_blocksize = MD5_BLOCK_WORDS * 4, - .cra_flags = CRYPTO_ALG_ASYNC, + .cra_flags = CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY, } }, .cipher_info = { @@ -4339,16 +4207,17 @@ return 0; } -static int ablkcipher_cra_init(struct crypto_tfm *tfm) +static int skcipher_init_tfm(struct crypto_skcipher *skcipher) { - struct crypto_alg *alg = tfm->__crt_alg; + struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); + struct skcipher_alg *alg = crypto_skcipher_alg(skcipher); struct iproc_alg_s *cipher_alg; flow_log("%s()\n", __func__); - tfm->crt_ablkcipher.reqsize = sizeof(struct iproc_reqctx_s); + crypto_skcipher_set_reqsize(skcipher, sizeof(struct iproc_reqctx_s)); - cipher_alg = container_of(alg, struct iproc_alg_s, alg.crypto); + cipher_alg = container_of(alg, struct iproc_alg_s, alg.skcipher); return generic_cra_init(tfm, cipher_alg); } @@ -4418,6 +4287,11 @@ static void generic_cra_exit(struct crypto_tfm *tfm) { atomic_dec(&iproc_priv.session_count); +} + +static void skcipher_exit_tfm(struct crypto_skcipher *tfm) +{ + generic_cra_exit(crypto_skcipher_tfm(tfm)); } static void aead_cra_exit(struct crypto_aead *aead) @@ -4524,7 +4398,7 @@ for (i = 0; i < iproc_priv.spu.num_chan; i++) { iproc_priv.mbox[i] = mbox_request_channel(mcl, i); if (IS_ERR(iproc_priv.mbox[i])) { - err = (int)PTR_ERR(iproc_priv.mbox[i]); + err = PTR_ERR(iproc_priv.mbox[i]); dev_err(dev, "Mbox channel %d request failed with err %d", i, err); @@ -4581,38 +4455,30 @@ atomic_set(&iproc_priv.bad_icv, 0); } -static int spu_register_ablkcipher(struct iproc_alg_s *driver_alg) +static int spu_register_skcipher(struct iproc_alg_s *driver_alg) { - struct spu_hw *spu = &iproc_priv.spu; - struct crypto_alg *crypto = &driver_alg->alg.crypto; + struct skcipher_alg *crypto = &driver_alg->alg.skcipher; int err; - /* SPU2 does not support RC4 */ - if ((driver_alg->cipher_info.alg == CIPHER_ALG_RC4) && - (spu->spu_type == SPU_TYPE_SPU2)) - return 0; + crypto->base.cra_module = THIS_MODULE; + crypto->base.cra_priority = cipher_pri; + crypto->base.cra_alignmask = 0; + crypto->base.cra_ctxsize = sizeof(struct iproc_ctx_s); + crypto->base.cra_flags = CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY | + CRYPTO_ALG_KERN_DRIVER_ONLY; - crypto->cra_module = THIS_MODULE; - crypto->cra_priority = cipher_pri; - crypto->cra_alignmask = 0; - crypto->cra_ctxsize = sizeof(struct iproc_ctx_s); - INIT_LIST_HEAD(&crypto->cra_list); + crypto->init = skcipher_init_tfm; + crypto->exit = skcipher_exit_tfm; + crypto->setkey = skcipher_setkey; + crypto->encrypt = skcipher_encrypt; + crypto->decrypt = skcipher_decrypt; - crypto->cra_init = ablkcipher_cra_init; - crypto->cra_exit = generic_cra_exit; - crypto->cra_type = &crypto_ablkcipher_type; - crypto->cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC | - CRYPTO_ALG_KERN_DRIVER_ONLY; - - crypto->cra_ablkcipher.setkey = ablkcipher_setkey; - crypto->cra_ablkcipher.encrypt = ablkcipher_encrypt; - crypto->cra_ablkcipher.decrypt = ablkcipher_decrypt; - - err = crypto_register_alg(crypto); + err = crypto_register_skcipher(crypto); /* Mark alg as having been registered, if successful */ if (err == 0) driver_alg->registered = true; - pr_debug(" registered ablkcipher %s\n", crypto->cra_driver_name); + pr_debug(" registered skcipher %s\n", crypto->base.cra_driver_name); return err; } @@ -4639,7 +4505,8 @@ hash->halg.base.cra_ctxsize = sizeof(struct iproc_ctx_s); hash->halg.base.cra_init = ahash_cra_init; hash->halg.base.cra_exit = generic_cra_exit; - hash->halg.base.cra_flags = CRYPTO_ALG_ASYNC; + hash->halg.base.cra_flags = CRYPTO_ALG_ASYNC | + CRYPTO_ALG_ALLOCATES_MEMORY; hash->halg.statesize = sizeof(struct spu_hash_export_s); if (driver_alg->auth_info.mode != HASH_MODE_HMAC) { @@ -4682,9 +4549,8 @@ aead->base.cra_priority = aead_pri; aead->base.cra_alignmask = 0; aead->base.cra_ctxsize = sizeof(struct iproc_ctx_s); - INIT_LIST_HEAD(&aead->base.cra_list); - aead->base.cra_flags |= CRYPTO_ALG_ASYNC; + aead->base.cra_flags |= CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY; /* setkey set in alg initialization */ aead->setauthsize = aead_setauthsize; aead->encrypt = aead_encrypt; @@ -4708,8 +4574,8 @@ for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { switch (driver_algs[i].type) { - case CRYPTO_ALG_TYPE_ABLKCIPHER: - err = spu_register_ablkcipher(&driver_algs[i]); + case CRYPTO_ALG_TYPE_SKCIPHER: + err = spu_register_skcipher(&driver_algs[i]); break; case CRYPTO_ALG_TYPE_AHASH: err = spu_register_ahash(&driver_algs[i]); @@ -4739,8 +4605,8 @@ if (!driver_algs[j].registered) continue; switch (driver_algs[j].type) { - case CRYPTO_ALG_TYPE_ABLKCIPHER: - crypto_unregister_alg(&driver_algs[j].alg.crypto); + case CRYPTO_ALG_TYPE_SKCIPHER: + crypto_unregister_skcipher(&driver_algs[j].alg.skcipher); driver_algs[j].registered = false; break; case CRYPTO_ALG_TYPE_AHASH: @@ -4810,21 +4676,20 @@ matched_spu_type = of_device_get_match_data(dev); if (!matched_spu_type) { - dev_err(&pdev->dev, "Failed to match device\n"); + dev_err(dev, "Failed to match device\n"); return -ENODEV; } spu->spu_type = matched_spu_type->type; spu->spu_subtype = matched_spu_type->subtype; - i = 0; for (i = 0; (i < MAX_SPUS) && ((spu_ctrl_regs = platform_get_resource(pdev, IORESOURCE_MEM, i)) != NULL); i++) { spu->reg_vbase[i] = devm_ioremap_resource(dev, spu_ctrl_regs); if (IS_ERR(spu->reg_vbase[i])) { err = PTR_ERR(spu->reg_vbase[i]); - dev_err(&pdev->dev, "Failed to map registers: %d\n", + dev_err(dev, "Failed to map registers: %d\n", err); spu->reg_vbase[i] = NULL; return err; @@ -4836,11 +4701,11 @@ return 0; } -int bcm_spu_probe(struct platform_device *pdev) +static int bcm_spu_probe(struct platform_device *pdev) { struct device *dev = &pdev->dev; struct spu_hw *spu = &iproc_priv.spu; - int err = 0; + int err; iproc_priv.pdev = pdev; platform_set_drvdata(iproc_priv.pdev, @@ -4850,7 +4715,7 @@ if (err < 0) goto failure; - err = spu_mb_init(&pdev->dev); + err = spu_mb_init(dev); if (err < 0) goto failure; @@ -4859,7 +4724,7 @@ else if (spu->spu_type == SPU_TYPE_SPU2) iproc_priv.bcm_hdr_len = 0; - spu_functions_register(&pdev->dev, spu->spu_type, spu->spu_subtype); + spu_functions_register(dev, spu->spu_type, spu->spu_subtype); spu_counters_init(); @@ -4880,7 +4745,7 @@ return err; } -int bcm_spu_remove(struct platform_device *pdev) +static int bcm_spu_remove(struct platform_device *pdev) { int i; struct device *dev = &pdev->dev; @@ -4896,10 +4761,10 @@ continue; switch (driver_algs[i].type) { - case CRYPTO_ALG_TYPE_ABLKCIPHER: - crypto_unregister_alg(&driver_algs[i].alg.crypto); + case CRYPTO_ALG_TYPE_SKCIPHER: + crypto_unregister_skcipher(&driver_algs[i].alg.skcipher); dev_dbg(dev, " unregistered cipher %s\n", - driver_algs[i].alg.crypto.cra_driver_name); + driver_algs[i].alg.skcipher.base.cra_driver_name); driver_algs[i].registered = false; break; case CRYPTO_ALG_TYPE_AHASH: -- Gitblit v1.6.2