| .. | .. |
|---|
| 1 | | -// SPDX-License-Identifier: GPL-2.0 |
|---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-only |
|---|
| 2 | 2 | /* |
|---|
| 3 | 3 | * AMD Cryptographic Coprocessor (CCP) AES crypto API support |
|---|
| 4 | 4 | * |
|---|
| 5 | 5 | * Copyright (C) 2013-2019 Advanced Micro Devices, Inc. |
|---|
| 6 | 6 | * |
|---|
| 7 | 7 | * Author: Tom Lendacky <thomas.lendacky@amd.com> |
|---|
| 8 | | - * |
|---|
| 9 | | - * This program is free software; you can redistribute it and/or modify |
|---|
| 10 | | - * it under the terms of the GNU General Public License version 2 as |
|---|
| 11 | | - * published by the Free Software Foundation. |
|---|
| 12 | 8 | */ |
|---|
| 13 | 9 | |
|---|
| 14 | 10 | #include <linux/module.h> |
|---|
| .. | .. |
|---|
| 25 | 21 | |
|---|
| 26 | 22 | static int ccp_aes_complete(struct crypto_async_request *async_req, int ret) |
|---|
| 27 | 23 | { |
|---|
| 28 | | - struct ablkcipher_request *req = ablkcipher_request_cast(async_req); |
|---|
| 24 | + struct skcipher_request *req = skcipher_request_cast(async_req); |
|---|
| 29 | 25 | struct ccp_ctx *ctx = crypto_tfm_ctx(req->base.tfm); |
|---|
| 30 | | - struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); |
|---|
| 26 | + struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req); |
|---|
| 31 | 27 | |
|---|
| 32 | 28 | if (ret) |
|---|
| 33 | 29 | return ret; |
|---|
| 34 | 30 | |
|---|
| 35 | 31 | if (ctx->u.aes.mode != CCP_AES_MODE_ECB) |
|---|
| 36 | | - memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); |
|---|
| 32 | + memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE); |
|---|
| 37 | 33 | |
|---|
| 38 | 34 | return 0; |
|---|
| 39 | 35 | } |
|---|
| 40 | 36 | |
|---|
| 41 | | -static int ccp_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key, |
|---|
| 37 | +static int ccp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key, |
|---|
| 42 | 38 | unsigned int key_len) |
|---|
| 43 | 39 | { |
|---|
| 44 | | - struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ablkcipher_tfm(tfm)); |
|---|
| 45 | | - struct ccp_crypto_ablkcipher_alg *alg = |
|---|
| 46 | | - ccp_crypto_ablkcipher_alg(crypto_ablkcipher_tfm(tfm)); |
|---|
| 40 | + struct ccp_crypto_skcipher_alg *alg = ccp_crypto_skcipher_alg(tfm); |
|---|
| 41 | + struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 47 | 42 | |
|---|
| 48 | 43 | switch (key_len) { |
|---|
| 49 | 44 | case AES_KEYSIZE_128: |
|---|
| .. | .. |
|---|
| 56 | 51 | ctx->u.aes.type = CCP_AES_TYPE_256; |
|---|
| 57 | 52 | break; |
|---|
| 58 | 53 | default: |
|---|
| 59 | | - crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); |
|---|
| 60 | 54 | return -EINVAL; |
|---|
| 61 | 55 | } |
|---|
| 62 | 56 | ctx->u.aes.mode = alg->mode; |
|---|
| .. | .. |
|---|
| 68 | 62 | return 0; |
|---|
| 69 | 63 | } |
|---|
| 70 | 64 | |
|---|
| 71 | | -static int ccp_aes_crypt(struct ablkcipher_request *req, bool encrypt) |
|---|
| 65 | +static int ccp_aes_crypt(struct skcipher_request *req, bool encrypt) |
|---|
| 72 | 66 | { |
|---|
| 73 | | - struct ccp_ctx *ctx = crypto_tfm_ctx(req->base.tfm); |
|---|
| 74 | | - struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); |
|---|
| 67 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 68 | + struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 69 | + struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req); |
|---|
| 75 | 70 | struct scatterlist *iv_sg = NULL; |
|---|
| 76 | 71 | unsigned int iv_len = 0; |
|---|
| 77 | 72 | int ret; |
|---|
| .. | .. |
|---|
| 81 | 76 | |
|---|
| 82 | 77 | if (((ctx->u.aes.mode == CCP_AES_MODE_ECB) || |
|---|
| 83 | 78 | (ctx->u.aes.mode == CCP_AES_MODE_CBC)) && |
|---|
| 84 | | - (req->nbytes & (AES_BLOCK_SIZE - 1))) |
|---|
| 79 | + (req->cryptlen & (AES_BLOCK_SIZE - 1))) |
|---|
| 85 | 80 | return -EINVAL; |
|---|
| 86 | 81 | |
|---|
| 87 | 82 | if (ctx->u.aes.mode != CCP_AES_MODE_ECB) { |
|---|
| 88 | | - if (!req->info) |
|---|
| 83 | + if (!req->iv) |
|---|
| 89 | 84 | return -EINVAL; |
|---|
| 90 | 85 | |
|---|
| 91 | | - memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); |
|---|
| 86 | + memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE); |
|---|
| 92 | 87 | iv_sg = &rctx->iv_sg; |
|---|
| 93 | 88 | iv_len = AES_BLOCK_SIZE; |
|---|
| 94 | 89 | sg_init_one(iv_sg, rctx->iv, iv_len); |
|---|
| .. | .. |
|---|
| 106 | 101 | rctx->cmd.u.aes.iv = iv_sg; |
|---|
| 107 | 102 | rctx->cmd.u.aes.iv_len = iv_len; |
|---|
| 108 | 103 | rctx->cmd.u.aes.src = req->src; |
|---|
| 109 | | - rctx->cmd.u.aes.src_len = req->nbytes; |
|---|
| 104 | + rctx->cmd.u.aes.src_len = req->cryptlen; |
|---|
| 110 | 105 | rctx->cmd.u.aes.dst = req->dst; |
|---|
| 111 | 106 | |
|---|
| 112 | 107 | ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); |
|---|
| .. | .. |
|---|
| 114 | 109 | return ret; |
|---|
| 115 | 110 | } |
|---|
| 116 | 111 | |
|---|
| 117 | | -static int ccp_aes_encrypt(struct ablkcipher_request *req) |
|---|
| 112 | +static int ccp_aes_encrypt(struct skcipher_request *req) |
|---|
| 118 | 113 | { |
|---|
| 119 | 114 | return ccp_aes_crypt(req, true); |
|---|
| 120 | 115 | } |
|---|
| 121 | 116 | |
|---|
| 122 | | -static int ccp_aes_decrypt(struct ablkcipher_request *req) |
|---|
| 117 | +static int ccp_aes_decrypt(struct skcipher_request *req) |
|---|
| 123 | 118 | { |
|---|
| 124 | 119 | return ccp_aes_crypt(req, false); |
|---|
| 125 | 120 | } |
|---|
| 126 | 121 | |
|---|
| 127 | | -static int ccp_aes_cra_init(struct crypto_tfm *tfm) |
|---|
| 122 | +static int ccp_aes_init_tfm(struct crypto_skcipher *tfm) |
|---|
| 128 | 123 | { |
|---|
| 129 | | - struct ccp_ctx *ctx = crypto_tfm_ctx(tfm); |
|---|
| 124 | + struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 130 | 125 | |
|---|
| 131 | 126 | ctx->complete = ccp_aes_complete; |
|---|
| 132 | 127 | ctx->u.aes.key_len = 0; |
|---|
| 133 | 128 | |
|---|
| 134 | | - tfm->crt_ablkcipher.reqsize = sizeof(struct ccp_aes_req_ctx); |
|---|
| 129 | + crypto_skcipher_set_reqsize(tfm, sizeof(struct ccp_aes_req_ctx)); |
|---|
| 135 | 130 | |
|---|
| 136 | 131 | return 0; |
|---|
| 137 | | -} |
|---|
| 138 | | - |
|---|
| 139 | | -static void ccp_aes_cra_exit(struct crypto_tfm *tfm) |
|---|
| 140 | | -{ |
|---|
| 141 | 132 | } |
|---|
| 142 | 133 | |
|---|
| 143 | 134 | static int ccp_aes_rfc3686_complete(struct crypto_async_request *async_req, |
|---|
| 144 | 135 | int ret) |
|---|
| 145 | 136 | { |
|---|
| 146 | | - struct ablkcipher_request *req = ablkcipher_request_cast(async_req); |
|---|
| 147 | | - struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); |
|---|
| 137 | + struct skcipher_request *req = skcipher_request_cast(async_req); |
|---|
| 138 | + struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req); |
|---|
| 148 | 139 | |
|---|
| 149 | 140 | /* Restore the original pointer */ |
|---|
| 150 | | - req->info = rctx->rfc3686_info; |
|---|
| 141 | + req->iv = rctx->rfc3686_info; |
|---|
| 151 | 142 | |
|---|
| 152 | 143 | return ccp_aes_complete(async_req, ret); |
|---|
| 153 | 144 | } |
|---|
| 154 | 145 | |
|---|
| 155 | | -static int ccp_aes_rfc3686_setkey(struct crypto_ablkcipher *tfm, const u8 *key, |
|---|
| 146 | +static int ccp_aes_rfc3686_setkey(struct crypto_skcipher *tfm, const u8 *key, |
|---|
| 156 | 147 | unsigned int key_len) |
|---|
| 157 | 148 | { |
|---|
| 158 | | - struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ablkcipher_tfm(tfm)); |
|---|
| 149 | + struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 159 | 150 | |
|---|
| 160 | 151 | if (key_len < CTR_RFC3686_NONCE_SIZE) |
|---|
| 161 | 152 | return -EINVAL; |
|---|
| .. | .. |
|---|
| 166 | 157 | return ccp_aes_setkey(tfm, key, key_len); |
|---|
| 167 | 158 | } |
|---|
| 168 | 159 | |
|---|
| 169 | | -static int ccp_aes_rfc3686_crypt(struct ablkcipher_request *req, bool encrypt) |
|---|
| 160 | +static int ccp_aes_rfc3686_crypt(struct skcipher_request *req, bool encrypt) |
|---|
| 170 | 161 | { |
|---|
| 171 | | - struct ccp_ctx *ctx = crypto_tfm_ctx(req->base.tfm); |
|---|
| 172 | | - struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); |
|---|
| 162 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 163 | + struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 164 | + struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req); |
|---|
| 173 | 165 | u8 *iv; |
|---|
| 174 | 166 | |
|---|
| 175 | 167 | /* Initialize the CTR block */ |
|---|
| .. | .. |
|---|
| 177 | 169 | memcpy(iv, ctx->u.aes.nonce, CTR_RFC3686_NONCE_SIZE); |
|---|
| 178 | 170 | |
|---|
| 179 | 171 | iv += CTR_RFC3686_NONCE_SIZE; |
|---|
| 180 | | - memcpy(iv, req->info, CTR_RFC3686_IV_SIZE); |
|---|
| 172 | + memcpy(iv, req->iv, CTR_RFC3686_IV_SIZE); |
|---|
| 181 | 173 | |
|---|
| 182 | 174 | iv += CTR_RFC3686_IV_SIZE; |
|---|
| 183 | 175 | *(__be32 *)iv = cpu_to_be32(1); |
|---|
| 184 | 176 | |
|---|
| 185 | 177 | /* Point to the new IV */ |
|---|
| 186 | | - rctx->rfc3686_info = req->info; |
|---|
| 187 | | - req->info = rctx->rfc3686_iv; |
|---|
| 178 | + rctx->rfc3686_info = req->iv; |
|---|
| 179 | + req->iv = rctx->rfc3686_iv; |
|---|
| 188 | 180 | |
|---|
| 189 | 181 | return ccp_aes_crypt(req, encrypt); |
|---|
| 190 | 182 | } |
|---|
| 191 | 183 | |
|---|
| 192 | | -static int ccp_aes_rfc3686_encrypt(struct ablkcipher_request *req) |
|---|
| 184 | +static int ccp_aes_rfc3686_encrypt(struct skcipher_request *req) |
|---|
| 193 | 185 | { |
|---|
| 194 | 186 | return ccp_aes_rfc3686_crypt(req, true); |
|---|
| 195 | 187 | } |
|---|
| 196 | 188 | |
|---|
| 197 | | -static int ccp_aes_rfc3686_decrypt(struct ablkcipher_request *req) |
|---|
| 189 | +static int ccp_aes_rfc3686_decrypt(struct skcipher_request *req) |
|---|
| 198 | 190 | { |
|---|
| 199 | 191 | return ccp_aes_rfc3686_crypt(req, false); |
|---|
| 200 | 192 | } |
|---|
| 201 | 193 | |
|---|
| 202 | | -static int ccp_aes_rfc3686_cra_init(struct crypto_tfm *tfm) |
|---|
| 194 | +static int ccp_aes_rfc3686_init_tfm(struct crypto_skcipher *tfm) |
|---|
| 203 | 195 | { |
|---|
| 204 | | - struct ccp_ctx *ctx = crypto_tfm_ctx(tfm); |
|---|
| 196 | + struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 205 | 197 | |
|---|
| 206 | 198 | ctx->complete = ccp_aes_rfc3686_complete; |
|---|
| 207 | 199 | ctx->u.aes.key_len = 0; |
|---|
| 208 | 200 | |
|---|
| 209 | | - tfm->crt_ablkcipher.reqsize = sizeof(struct ccp_aes_req_ctx); |
|---|
| 201 | + crypto_skcipher_set_reqsize(tfm, sizeof(struct ccp_aes_req_ctx)); |
|---|
| 210 | 202 | |
|---|
| 211 | 203 | return 0; |
|---|
| 212 | 204 | } |
|---|
| 213 | 205 | |
|---|
| 214 | | -static void ccp_aes_rfc3686_cra_exit(struct crypto_tfm *tfm) |
|---|
| 215 | | -{ |
|---|
| 216 | | -} |
|---|
| 206 | +static const struct skcipher_alg ccp_aes_defaults = { |
|---|
| 207 | + .setkey = ccp_aes_setkey, |
|---|
| 208 | + .encrypt = ccp_aes_encrypt, |
|---|
| 209 | + .decrypt = ccp_aes_decrypt, |
|---|
| 210 | + .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 211 | + .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 212 | + .init = ccp_aes_init_tfm, |
|---|
| 217 | 213 | |
|---|
| 218 | | -static struct crypto_alg ccp_aes_defaults = { |
|---|
| 219 | | - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | |
|---|
| 220 | | - CRYPTO_ALG_ASYNC | |
|---|
| 221 | | - CRYPTO_ALG_KERN_DRIVER_ONLY | |
|---|
| 222 | | - CRYPTO_ALG_NEED_FALLBACK, |
|---|
| 223 | | - .cra_blocksize = AES_BLOCK_SIZE, |
|---|
| 224 | | - .cra_ctxsize = sizeof(struct ccp_ctx), |
|---|
| 225 | | - .cra_priority = CCP_CRA_PRIORITY, |
|---|
| 226 | | - .cra_type = &crypto_ablkcipher_type, |
|---|
| 227 | | - .cra_init = ccp_aes_cra_init, |
|---|
| 228 | | - .cra_exit = ccp_aes_cra_exit, |
|---|
| 229 | | - .cra_module = THIS_MODULE, |
|---|
| 230 | | - .cra_ablkcipher = { |
|---|
| 231 | | - .setkey = ccp_aes_setkey, |
|---|
| 232 | | - .encrypt = ccp_aes_encrypt, |
|---|
| 233 | | - .decrypt = ccp_aes_decrypt, |
|---|
| 234 | | - .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 235 | | - .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 236 | | - }, |
|---|
| 214 | + .base.cra_flags = CRYPTO_ALG_ASYNC | |
|---|
| 215 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
|---|
| 216 | + CRYPTO_ALG_KERN_DRIVER_ONLY | |
|---|
| 217 | + CRYPTO_ALG_NEED_FALLBACK, |
|---|
| 218 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
|---|
| 219 | + .base.cra_ctxsize = sizeof(struct ccp_ctx), |
|---|
| 220 | + .base.cra_priority = CCP_CRA_PRIORITY, |
|---|
| 221 | + .base.cra_module = THIS_MODULE, |
|---|
| 237 | 222 | }; |
|---|
| 238 | 223 | |
|---|
| 239 | | -static struct crypto_alg ccp_aes_rfc3686_defaults = { |
|---|
| 240 | | - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | |
|---|
| 241 | | - CRYPTO_ALG_ASYNC | |
|---|
| 242 | | - CRYPTO_ALG_KERN_DRIVER_ONLY | |
|---|
| 243 | | - CRYPTO_ALG_NEED_FALLBACK, |
|---|
| 244 | | - .cra_blocksize = CTR_RFC3686_BLOCK_SIZE, |
|---|
| 245 | | - .cra_ctxsize = sizeof(struct ccp_ctx), |
|---|
| 246 | | - .cra_priority = CCP_CRA_PRIORITY, |
|---|
| 247 | | - .cra_type = &crypto_ablkcipher_type, |
|---|
| 248 | | - .cra_init = ccp_aes_rfc3686_cra_init, |
|---|
| 249 | | - .cra_exit = ccp_aes_rfc3686_cra_exit, |
|---|
| 250 | | - .cra_module = THIS_MODULE, |
|---|
| 251 | | - .cra_ablkcipher = { |
|---|
| 252 | | - .setkey = ccp_aes_rfc3686_setkey, |
|---|
| 253 | | - .encrypt = ccp_aes_rfc3686_encrypt, |
|---|
| 254 | | - .decrypt = ccp_aes_rfc3686_decrypt, |
|---|
| 255 | | - .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE, |
|---|
| 256 | | - .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE, |
|---|
| 257 | | - }, |
|---|
| 224 | +static const struct skcipher_alg ccp_aes_rfc3686_defaults = { |
|---|
| 225 | + .setkey = ccp_aes_rfc3686_setkey, |
|---|
| 226 | + .encrypt = ccp_aes_rfc3686_encrypt, |
|---|
| 227 | + .decrypt = ccp_aes_rfc3686_decrypt, |
|---|
| 228 | + .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE, |
|---|
| 229 | + .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE, |
|---|
| 230 | + .init = ccp_aes_rfc3686_init_tfm, |
|---|
| 231 | + |
|---|
| 232 | + .base.cra_flags = CRYPTO_ALG_ASYNC | |
|---|
| 233 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
|---|
| 234 | + CRYPTO_ALG_KERN_DRIVER_ONLY | |
|---|
| 235 | + CRYPTO_ALG_NEED_FALLBACK, |
|---|
| 236 | + .base.cra_blocksize = CTR_RFC3686_BLOCK_SIZE, |
|---|
| 237 | + .base.cra_ctxsize = sizeof(struct ccp_ctx), |
|---|
| 238 | + .base.cra_priority = CCP_CRA_PRIORITY, |
|---|
| 239 | + .base.cra_module = THIS_MODULE, |
|---|
| 258 | 240 | }; |
|---|
| 259 | 241 | |
|---|
| 260 | 242 | struct ccp_aes_def { |
|---|
| .. | .. |
|---|
| 264 | 246 | const char *driver_name; |
|---|
| 265 | 247 | unsigned int blocksize; |
|---|
| 266 | 248 | unsigned int ivsize; |
|---|
| 267 | | - struct crypto_alg *alg_defaults; |
|---|
| 249 | + const struct skcipher_alg *alg_defaults; |
|---|
| 268 | 250 | }; |
|---|
| 269 | 251 | |
|---|
| 270 | 252 | static struct ccp_aes_def aes_algs[] = { |
|---|
| .. | .. |
|---|
| 327 | 309 | static int ccp_register_aes_alg(struct list_head *head, |
|---|
| 328 | 310 | const struct ccp_aes_def *def) |
|---|
| 329 | 311 | { |
|---|
| 330 | | - struct ccp_crypto_ablkcipher_alg *ccp_alg; |
|---|
| 331 | | - struct crypto_alg *alg; |
|---|
| 312 | + struct ccp_crypto_skcipher_alg *ccp_alg; |
|---|
| 313 | + struct skcipher_alg *alg; |
|---|
| 332 | 314 | int ret; |
|---|
| 333 | 315 | |
|---|
| 334 | 316 | ccp_alg = kzalloc(sizeof(*ccp_alg), GFP_KERNEL); |
|---|
| .. | .. |
|---|
| 342 | 324 | /* Copy the defaults and override as necessary */ |
|---|
| 343 | 325 | alg = &ccp_alg->alg; |
|---|
| 344 | 326 | *alg = *def->alg_defaults; |
|---|
| 345 | | - snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", def->name); |
|---|
| 346 | | - snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", |
|---|
| 327 | + snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", def->name); |
|---|
| 328 | + snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", |
|---|
| 347 | 329 | def->driver_name); |
|---|
| 348 | | - alg->cra_blocksize = def->blocksize; |
|---|
| 349 | | - alg->cra_ablkcipher.ivsize = def->ivsize; |
|---|
| 330 | + alg->base.cra_blocksize = def->blocksize; |
|---|
| 331 | + alg->ivsize = def->ivsize; |
|---|
| 350 | 332 | |
|---|
| 351 | | - ret = crypto_register_alg(alg); |
|---|
| 333 | + ret = crypto_register_skcipher(alg); |
|---|
| 352 | 334 | if (ret) { |
|---|
| 353 | | - pr_err("%s ablkcipher algorithm registration error (%d)\n", |
|---|
| 354 | | - alg->cra_name, ret); |
|---|
| 335 | + pr_err("%s skcipher algorithm registration error (%d)\n", |
|---|
| 336 | + alg->base.cra_name, ret); |
|---|
| 355 | 337 | kfree(ccp_alg); |
|---|
| 356 | 338 | return ret; |
|---|
| 357 | 339 | } |
|---|