hc
2023-12-11 d2ccde1c8e90d38cee87a1b0309ad2827f3fd30d
kernel/drivers/crypto/ccp/ccp-crypto-aes.c
....@@ -1,14 +1,10 @@
1
-// SPDX-License-Identifier: GPL-2.0
1
+// SPDX-License-Identifier: GPL-2.0-only
22 /*
33 * AMD Cryptographic Coprocessor (CCP) AES crypto API support
44 *
55 * Copyright (C) 2013-2019 Advanced Micro Devices, Inc.
66 *
77 * Author: Tom Lendacky <thomas.lendacky@amd.com>
8
- *
9
- * This program is free software; you can redistribute it and/or modify
10
- * it under the terms of the GNU General Public License version 2 as
11
- * published by the Free Software Foundation.
128 */
139
1410 #include <linux/module.h>
....@@ -25,25 +21,24 @@
2521
2622 static int ccp_aes_complete(struct crypto_async_request *async_req, int ret)
2723 {
28
- struct ablkcipher_request *req = ablkcipher_request_cast(async_req);
24
+ struct skcipher_request *req = skcipher_request_cast(async_req);
2925 struct ccp_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
30
- struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req);
26
+ struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req);
3127
3228 if (ret)
3329 return ret;
3430
3531 if (ctx->u.aes.mode != CCP_AES_MODE_ECB)
36
- memcpy(req->info, rctx->iv, AES_BLOCK_SIZE);
32
+ memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE);
3733
3834 return 0;
3935 }
4036
41
-static int ccp_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
37
+static int ccp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
4238 unsigned int key_len)
4339 {
44
- struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ablkcipher_tfm(tfm));
45
- struct ccp_crypto_ablkcipher_alg *alg =
46
- ccp_crypto_ablkcipher_alg(crypto_ablkcipher_tfm(tfm));
40
+ struct ccp_crypto_skcipher_alg *alg = ccp_crypto_skcipher_alg(tfm);
41
+ struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm);
4742
4843 switch (key_len) {
4944 case AES_KEYSIZE_128:
....@@ -56,7 +51,6 @@
5651 ctx->u.aes.type = CCP_AES_TYPE_256;
5752 break;
5853 default:
59
- crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
6054 return -EINVAL;
6155 }
6256 ctx->u.aes.mode = alg->mode;
....@@ -68,10 +62,11 @@
6862 return 0;
6963 }
7064
71
-static int ccp_aes_crypt(struct ablkcipher_request *req, bool encrypt)
65
+static int ccp_aes_crypt(struct skcipher_request *req, bool encrypt)
7266 {
73
- struct ccp_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
74
- struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req);
67
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
68
+ struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm);
69
+ struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req);
7570 struct scatterlist *iv_sg = NULL;
7671 unsigned int iv_len = 0;
7772 int ret;
....@@ -81,14 +76,14 @@
8176
8277 if (((ctx->u.aes.mode == CCP_AES_MODE_ECB) ||
8378 (ctx->u.aes.mode == CCP_AES_MODE_CBC)) &&
84
- (req->nbytes & (AES_BLOCK_SIZE - 1)))
79
+ (req->cryptlen & (AES_BLOCK_SIZE - 1)))
8580 return -EINVAL;
8681
8782 if (ctx->u.aes.mode != CCP_AES_MODE_ECB) {
88
- if (!req->info)
83
+ if (!req->iv)
8984 return -EINVAL;
9085
91
- memcpy(rctx->iv, req->info, AES_BLOCK_SIZE);
86
+ memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE);
9287 iv_sg = &rctx->iv_sg;
9388 iv_len = AES_BLOCK_SIZE;
9489 sg_init_one(iv_sg, rctx->iv, iv_len);
....@@ -106,7 +101,7 @@
106101 rctx->cmd.u.aes.iv = iv_sg;
107102 rctx->cmd.u.aes.iv_len = iv_len;
108103 rctx->cmd.u.aes.src = req->src;
109
- rctx->cmd.u.aes.src_len = req->nbytes;
104
+ rctx->cmd.u.aes.src_len = req->cryptlen;
110105 rctx->cmd.u.aes.dst = req->dst;
111106
112107 ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd);
....@@ -114,48 +109,44 @@
114109 return ret;
115110 }
116111
117
-static int ccp_aes_encrypt(struct ablkcipher_request *req)
112
+static int ccp_aes_encrypt(struct skcipher_request *req)
118113 {
119114 return ccp_aes_crypt(req, true);
120115 }
121116
122
-static int ccp_aes_decrypt(struct ablkcipher_request *req)
117
+static int ccp_aes_decrypt(struct skcipher_request *req)
123118 {
124119 return ccp_aes_crypt(req, false);
125120 }
126121
127
-static int ccp_aes_cra_init(struct crypto_tfm *tfm)
122
+static int ccp_aes_init_tfm(struct crypto_skcipher *tfm)
128123 {
129
- struct ccp_ctx *ctx = crypto_tfm_ctx(tfm);
124
+ struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm);
130125
131126 ctx->complete = ccp_aes_complete;
132127 ctx->u.aes.key_len = 0;
133128
134
- tfm->crt_ablkcipher.reqsize = sizeof(struct ccp_aes_req_ctx);
129
+ crypto_skcipher_set_reqsize(tfm, sizeof(struct ccp_aes_req_ctx));
135130
136131 return 0;
137
-}
138
-
139
-static void ccp_aes_cra_exit(struct crypto_tfm *tfm)
140
-{
141132 }
142133
143134 static int ccp_aes_rfc3686_complete(struct crypto_async_request *async_req,
144135 int ret)
145136 {
146
- struct ablkcipher_request *req = ablkcipher_request_cast(async_req);
147
- struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req);
137
+ struct skcipher_request *req = skcipher_request_cast(async_req);
138
+ struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req);
148139
149140 /* Restore the original pointer */
150
- req->info = rctx->rfc3686_info;
141
+ req->iv = rctx->rfc3686_info;
151142
152143 return ccp_aes_complete(async_req, ret);
153144 }
154145
155
-static int ccp_aes_rfc3686_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
146
+static int ccp_aes_rfc3686_setkey(struct crypto_skcipher *tfm, const u8 *key,
156147 unsigned int key_len)
157148 {
158
- struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ablkcipher_tfm(tfm));
149
+ struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm);
159150
160151 if (key_len < CTR_RFC3686_NONCE_SIZE)
161152 return -EINVAL;
....@@ -166,10 +157,11 @@
166157 return ccp_aes_setkey(tfm, key, key_len);
167158 }
168159
169
-static int ccp_aes_rfc3686_crypt(struct ablkcipher_request *req, bool encrypt)
160
+static int ccp_aes_rfc3686_crypt(struct skcipher_request *req, bool encrypt)
170161 {
171
- struct ccp_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
172
- struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req);
162
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
163
+ struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm);
164
+ struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req);
173165 u8 *iv;
174166
175167 /* Initialize the CTR block */
....@@ -177,84 +169,74 @@
177169 memcpy(iv, ctx->u.aes.nonce, CTR_RFC3686_NONCE_SIZE);
178170
179171 iv += CTR_RFC3686_NONCE_SIZE;
180
- memcpy(iv, req->info, CTR_RFC3686_IV_SIZE);
172
+ memcpy(iv, req->iv, CTR_RFC3686_IV_SIZE);
181173
182174 iv += CTR_RFC3686_IV_SIZE;
183175 *(__be32 *)iv = cpu_to_be32(1);
184176
185177 /* Point to the new IV */
186
- rctx->rfc3686_info = req->info;
187
- req->info = rctx->rfc3686_iv;
178
+ rctx->rfc3686_info = req->iv;
179
+ req->iv = rctx->rfc3686_iv;
188180
189181 return ccp_aes_crypt(req, encrypt);
190182 }
191183
192
-static int ccp_aes_rfc3686_encrypt(struct ablkcipher_request *req)
184
+static int ccp_aes_rfc3686_encrypt(struct skcipher_request *req)
193185 {
194186 return ccp_aes_rfc3686_crypt(req, true);
195187 }
196188
197
-static int ccp_aes_rfc3686_decrypt(struct ablkcipher_request *req)
189
+static int ccp_aes_rfc3686_decrypt(struct skcipher_request *req)
198190 {
199191 return ccp_aes_rfc3686_crypt(req, false);
200192 }
201193
202
-static int ccp_aes_rfc3686_cra_init(struct crypto_tfm *tfm)
194
+static int ccp_aes_rfc3686_init_tfm(struct crypto_skcipher *tfm)
203195 {
204
- struct ccp_ctx *ctx = crypto_tfm_ctx(tfm);
196
+ struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm);
205197
206198 ctx->complete = ccp_aes_rfc3686_complete;
207199 ctx->u.aes.key_len = 0;
208200
209
- tfm->crt_ablkcipher.reqsize = sizeof(struct ccp_aes_req_ctx);
201
+ crypto_skcipher_set_reqsize(tfm, sizeof(struct ccp_aes_req_ctx));
210202
211203 return 0;
212204 }
213205
214
-static void ccp_aes_rfc3686_cra_exit(struct crypto_tfm *tfm)
215
-{
216
-}
206
+static const struct skcipher_alg ccp_aes_defaults = {
207
+ .setkey = ccp_aes_setkey,
208
+ .encrypt = ccp_aes_encrypt,
209
+ .decrypt = ccp_aes_decrypt,
210
+ .min_keysize = AES_MIN_KEY_SIZE,
211
+ .max_keysize = AES_MAX_KEY_SIZE,
212
+ .init = ccp_aes_init_tfm,
217213
218
-static struct crypto_alg ccp_aes_defaults = {
219
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
220
- CRYPTO_ALG_ASYNC |
221
- CRYPTO_ALG_KERN_DRIVER_ONLY |
222
- CRYPTO_ALG_NEED_FALLBACK,
223
- .cra_blocksize = AES_BLOCK_SIZE,
224
- .cra_ctxsize = sizeof(struct ccp_ctx),
225
- .cra_priority = CCP_CRA_PRIORITY,
226
- .cra_type = &crypto_ablkcipher_type,
227
- .cra_init = ccp_aes_cra_init,
228
- .cra_exit = ccp_aes_cra_exit,
229
- .cra_module = THIS_MODULE,
230
- .cra_ablkcipher = {
231
- .setkey = ccp_aes_setkey,
232
- .encrypt = ccp_aes_encrypt,
233
- .decrypt = ccp_aes_decrypt,
234
- .min_keysize = AES_MIN_KEY_SIZE,
235
- .max_keysize = AES_MAX_KEY_SIZE,
236
- },
214
+ .base.cra_flags = CRYPTO_ALG_ASYNC |
215
+ CRYPTO_ALG_ALLOCATES_MEMORY |
216
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
217
+ CRYPTO_ALG_NEED_FALLBACK,
218
+ .base.cra_blocksize = AES_BLOCK_SIZE,
219
+ .base.cra_ctxsize = sizeof(struct ccp_ctx),
220
+ .base.cra_priority = CCP_CRA_PRIORITY,
221
+ .base.cra_module = THIS_MODULE,
237222 };
238223
239
-static struct crypto_alg ccp_aes_rfc3686_defaults = {
240
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
241
- CRYPTO_ALG_ASYNC |
242
- CRYPTO_ALG_KERN_DRIVER_ONLY |
243
- CRYPTO_ALG_NEED_FALLBACK,
244
- .cra_blocksize = CTR_RFC3686_BLOCK_SIZE,
245
- .cra_ctxsize = sizeof(struct ccp_ctx),
246
- .cra_priority = CCP_CRA_PRIORITY,
247
- .cra_type = &crypto_ablkcipher_type,
248
- .cra_init = ccp_aes_rfc3686_cra_init,
249
- .cra_exit = ccp_aes_rfc3686_cra_exit,
250
- .cra_module = THIS_MODULE,
251
- .cra_ablkcipher = {
252
- .setkey = ccp_aes_rfc3686_setkey,
253
- .encrypt = ccp_aes_rfc3686_encrypt,
254
- .decrypt = ccp_aes_rfc3686_decrypt,
255
- .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
256
- .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
257
- },
224
+static const struct skcipher_alg ccp_aes_rfc3686_defaults = {
225
+ .setkey = ccp_aes_rfc3686_setkey,
226
+ .encrypt = ccp_aes_rfc3686_encrypt,
227
+ .decrypt = ccp_aes_rfc3686_decrypt,
228
+ .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
229
+ .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
230
+ .init = ccp_aes_rfc3686_init_tfm,
231
+
232
+ .base.cra_flags = CRYPTO_ALG_ASYNC |
233
+ CRYPTO_ALG_ALLOCATES_MEMORY |
234
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
235
+ CRYPTO_ALG_NEED_FALLBACK,
236
+ .base.cra_blocksize = CTR_RFC3686_BLOCK_SIZE,
237
+ .base.cra_ctxsize = sizeof(struct ccp_ctx),
238
+ .base.cra_priority = CCP_CRA_PRIORITY,
239
+ .base.cra_module = THIS_MODULE,
258240 };
259241
260242 struct ccp_aes_def {
....@@ -264,7 +246,7 @@
264246 const char *driver_name;
265247 unsigned int blocksize;
266248 unsigned int ivsize;
267
- struct crypto_alg *alg_defaults;
249
+ const struct skcipher_alg *alg_defaults;
268250 };
269251
270252 static struct ccp_aes_def aes_algs[] = {
....@@ -327,8 +309,8 @@
327309 static int ccp_register_aes_alg(struct list_head *head,
328310 const struct ccp_aes_def *def)
329311 {
330
- struct ccp_crypto_ablkcipher_alg *ccp_alg;
331
- struct crypto_alg *alg;
312
+ struct ccp_crypto_skcipher_alg *ccp_alg;
313
+ struct skcipher_alg *alg;
332314 int ret;
333315
334316 ccp_alg = kzalloc(sizeof(*ccp_alg), GFP_KERNEL);
....@@ -342,16 +324,16 @@
342324 /* Copy the defaults and override as necessary */
343325 alg = &ccp_alg->alg;
344326 *alg = *def->alg_defaults;
345
- snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", def->name);
346
- snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
327
+ snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", def->name);
328
+ snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
347329 def->driver_name);
348
- alg->cra_blocksize = def->blocksize;
349
- alg->cra_ablkcipher.ivsize = def->ivsize;
330
+ alg->base.cra_blocksize = def->blocksize;
331
+ alg->ivsize = def->ivsize;
350332
351
- ret = crypto_register_alg(alg);
333
+ ret = crypto_register_skcipher(alg);
352334 if (ret) {
353
- pr_err("%s ablkcipher algorithm registration error (%d)\n",
354
- alg->cra_name, ret);
335
+ pr_err("%s skcipher algorithm registration error (%d)\n",
336
+ alg->base.cra_name, ret);
355337 kfree(ccp_alg);
356338 return ret;
357339 }