hc
2023-12-11 d2ccde1c8e90d38cee87a1b0309ad2827f3fd30d
kernel/arch/s390/crypto/aes_s390.c
....@@ -21,6 +21,7 @@
2121 #include <crypto/algapi.h>
2222 #include <crypto/ghash.h>
2323 #include <crypto/internal/aead.h>
24
+#include <crypto/internal/cipher.h>
2425 #include <crypto/internal/skcipher.h>
2526 #include <crypto/scatterwalk.h>
2627 #include <linux/err.h>
....@@ -44,7 +45,7 @@
4445 int key_len;
4546 unsigned long fc;
4647 union {
47
- struct crypto_skcipher *blk;
48
+ struct crypto_skcipher *skcipher;
4849 struct crypto_cipher *cip;
4950 } fallback;
5051 };
....@@ -72,19 +73,12 @@
7273 unsigned int key_len)
7374 {
7475 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
75
- int ret;
7676
7777 sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
7878 sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
7979 CRYPTO_TFM_REQ_MASK);
8080
81
- ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
82
- if (ret) {
83
- tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
84
- tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags &
85
- CRYPTO_TFM_RES_MASK);
86
- }
87
- return ret;
81
+ return crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
8882 }
8983
9084 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
....@@ -108,7 +102,7 @@
108102 return 0;
109103 }
110104
111
-static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
105
+static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
112106 {
113107 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
114108
....@@ -119,7 +113,7 @@
119113 cpacf_km(sctx->fc, &sctx->key, out, in, AES_BLOCK_SIZE);
120114 }
121115
122
-static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
116
+static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
123117 {
124118 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
125119
....@@ -137,7 +131,7 @@
137131 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
138132
139133 sctx->fallback.cip = crypto_alloc_cipher(name, 0,
140
- CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
134
+ CRYPTO_ALG_NEED_FALLBACK);
141135
142136 if (IS_ERR(sctx->fallback.cip)) {
143137 pr_err("Allocating AES fallback algorithm %s failed\n",
....@@ -172,71 +166,42 @@
172166 .cia_min_keysize = AES_MIN_KEY_SIZE,
173167 .cia_max_keysize = AES_MAX_KEY_SIZE,
174168 .cia_setkey = aes_set_key,
175
- .cia_encrypt = aes_encrypt,
176
- .cia_decrypt = aes_decrypt,
169
+ .cia_encrypt = crypto_aes_encrypt,
170
+ .cia_decrypt = crypto_aes_decrypt,
177171 }
178172 }
179173 };
180174
181
-static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
182
- unsigned int len)
175
+static int setkey_fallback_skcipher(struct crypto_skcipher *tfm, const u8 *key,
176
+ unsigned int len)
183177 {
184
- struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
185
- unsigned int ret;
178
+ struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
186179
187
- crypto_skcipher_clear_flags(sctx->fallback.blk, CRYPTO_TFM_REQ_MASK);
188
- crypto_skcipher_set_flags(sctx->fallback.blk, tfm->crt_flags &
189
- CRYPTO_TFM_REQ_MASK);
190
-
191
- ret = crypto_skcipher_setkey(sctx->fallback.blk, key, len);
192
-
193
- tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
194
- tfm->crt_flags |= crypto_skcipher_get_flags(sctx->fallback.blk) &
195
- CRYPTO_TFM_RES_MASK;
196
-
197
- return ret;
180
+ crypto_skcipher_clear_flags(sctx->fallback.skcipher,
181
+ CRYPTO_TFM_REQ_MASK);
182
+ crypto_skcipher_set_flags(sctx->fallback.skcipher,
183
+ crypto_skcipher_get_flags(tfm) &
184
+ CRYPTO_TFM_REQ_MASK);
185
+ return crypto_skcipher_setkey(sctx->fallback.skcipher, key, len);
198186 }
199187
200
-static int fallback_blk_dec(struct blkcipher_desc *desc,
201
- struct scatterlist *dst, struct scatterlist *src,
202
- unsigned int nbytes)
188
+static int fallback_skcipher_crypt(struct s390_aes_ctx *sctx,
189
+ struct skcipher_request *req,
190
+ unsigned long modifier)
203191 {
204
- unsigned int ret;
205
- struct crypto_blkcipher *tfm = desc->tfm;
206
- struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(tfm);
207
- SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk);
192
+ struct skcipher_request *subreq = skcipher_request_ctx(req);
208193
209
- skcipher_request_set_tfm(req, sctx->fallback.blk);
210
- skcipher_request_set_callback(req, desc->flags, NULL, NULL);
211
- skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
212
-
213
- ret = crypto_skcipher_decrypt(req);
214
-
215
- skcipher_request_zero(req);
216
- return ret;
194
+ *subreq = *req;
195
+ skcipher_request_set_tfm(subreq, sctx->fallback.skcipher);
196
+ return (modifier & CPACF_DECRYPT) ?
197
+ crypto_skcipher_decrypt(subreq) :
198
+ crypto_skcipher_encrypt(subreq);
217199 }
218200
219
-static int fallback_blk_enc(struct blkcipher_desc *desc,
220
- struct scatterlist *dst, struct scatterlist *src,
221
- unsigned int nbytes)
222
-{
223
- unsigned int ret;
224
- struct crypto_blkcipher *tfm = desc->tfm;
225
- struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(tfm);
226
- SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk);
227
-
228
- skcipher_request_set_tfm(req, sctx->fallback.blk);
229
- skcipher_request_set_callback(req, desc->flags, NULL, NULL);
230
- skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
231
-
232
- ret = crypto_skcipher_encrypt(req);
233
- return ret;
234
-}
235
-
236
-static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
201
+static int ecb_aes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
237202 unsigned int key_len)
238203 {
239
- struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
204
+ struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
240205 unsigned long fc;
241206
242207 /* Pick the correct function code based on the key length */
....@@ -247,112 +212,92 @@
247212 /* Check if the function code is available */
248213 sctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
249214 if (!sctx->fc)
250
- return setkey_fallback_blk(tfm, in_key, key_len);
215
+ return setkey_fallback_skcipher(tfm, in_key, key_len);
251216
252217 sctx->key_len = key_len;
253218 memcpy(sctx->key, in_key, key_len);
254219 return 0;
255220 }
256221
257
-static int ecb_aes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
258
- struct blkcipher_walk *walk)
222
+static int ecb_aes_crypt(struct skcipher_request *req, unsigned long modifier)
259223 {
260
- struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
224
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
225
+ struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
226
+ struct skcipher_walk walk;
261227 unsigned int nbytes, n;
262228 int ret;
263229
264
- ret = blkcipher_walk_virt(desc, walk);
265
- while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
230
+ if (unlikely(!sctx->fc))
231
+ return fallback_skcipher_crypt(sctx, req, modifier);
232
+
233
+ ret = skcipher_walk_virt(&walk, req, false);
234
+ while ((nbytes = walk.nbytes) != 0) {
266235 /* only use complete blocks */
267236 n = nbytes & ~(AES_BLOCK_SIZE - 1);
268237 cpacf_km(sctx->fc | modifier, sctx->key,
269
- walk->dst.virt.addr, walk->src.virt.addr, n);
270
- ret = blkcipher_walk_done(desc, walk, nbytes - n);
238
+ walk.dst.virt.addr, walk.src.virt.addr, n);
239
+ ret = skcipher_walk_done(&walk, nbytes - n);
271240 }
272
-
273241 return ret;
274242 }
275243
276
-static int ecb_aes_encrypt(struct blkcipher_desc *desc,
277
- struct scatterlist *dst, struct scatterlist *src,
278
- unsigned int nbytes)
244
+static int ecb_aes_encrypt(struct skcipher_request *req)
279245 {
280
- struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
281
- struct blkcipher_walk walk;
282
-
283
- if (unlikely(!sctx->fc))
284
- return fallback_blk_enc(desc, dst, src, nbytes);
285
-
286
- blkcipher_walk_init(&walk, dst, src, nbytes);
287
- return ecb_aes_crypt(desc, 0, &walk);
246
+ return ecb_aes_crypt(req, 0);
288247 }
289248
290
-static int ecb_aes_decrypt(struct blkcipher_desc *desc,
291
- struct scatterlist *dst, struct scatterlist *src,
292
- unsigned int nbytes)
249
+static int ecb_aes_decrypt(struct skcipher_request *req)
293250 {
294
- struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
295
- struct blkcipher_walk walk;
296
-
297
- if (unlikely(!sctx->fc))
298
- return fallback_blk_dec(desc, dst, src, nbytes);
299
-
300
- blkcipher_walk_init(&walk, dst, src, nbytes);
301
- return ecb_aes_crypt(desc, CPACF_DECRYPT, &walk);
251
+ return ecb_aes_crypt(req, CPACF_DECRYPT);
302252 }
303253
304
-static int fallback_init_blk(struct crypto_tfm *tfm)
254
+static int fallback_init_skcipher(struct crypto_skcipher *tfm)
305255 {
306
- const char *name = tfm->__crt_alg->cra_name;
307
- struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
256
+ const char *name = crypto_tfm_alg_name(&tfm->base);
257
+ struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
308258
309
- sctx->fallback.blk = crypto_alloc_skcipher(name, 0,
310
- CRYPTO_ALG_ASYNC |
311
- CRYPTO_ALG_NEED_FALLBACK);
259
+ sctx->fallback.skcipher = crypto_alloc_skcipher(name, 0,
260
+ CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC);
312261
313
- if (IS_ERR(sctx->fallback.blk)) {
262
+ if (IS_ERR(sctx->fallback.skcipher)) {
314263 pr_err("Allocating AES fallback algorithm %s failed\n",
315264 name);
316
- return PTR_ERR(sctx->fallback.blk);
265
+ return PTR_ERR(sctx->fallback.skcipher);
317266 }
318267
268
+ crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
269
+ crypto_skcipher_reqsize(sctx->fallback.skcipher));
319270 return 0;
320271 }
321272
322
-static void fallback_exit_blk(struct crypto_tfm *tfm)
273
+static void fallback_exit_skcipher(struct crypto_skcipher *tfm)
323274 {
324
- struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
275
+ struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
325276
326
- crypto_free_skcipher(sctx->fallback.blk);
277
+ crypto_free_skcipher(sctx->fallback.skcipher);
327278 }
328279
329
-static struct crypto_alg ecb_aes_alg = {
330
- .cra_name = "ecb(aes)",
331
- .cra_driver_name = "ecb-aes-s390",
332
- .cra_priority = 401, /* combo: aes + ecb + 1 */
333
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
334
- CRYPTO_ALG_NEED_FALLBACK,
335
- .cra_blocksize = AES_BLOCK_SIZE,
336
- .cra_ctxsize = sizeof(struct s390_aes_ctx),
337
- .cra_type = &crypto_blkcipher_type,
338
- .cra_module = THIS_MODULE,
339
- .cra_init = fallback_init_blk,
340
- .cra_exit = fallback_exit_blk,
341
- .cra_u = {
342
- .blkcipher = {
343
- .min_keysize = AES_MIN_KEY_SIZE,
344
- .max_keysize = AES_MAX_KEY_SIZE,
345
- .setkey = ecb_aes_set_key,
346
- .encrypt = ecb_aes_encrypt,
347
- .decrypt = ecb_aes_decrypt,
348
- }
349
- }
280
+static struct skcipher_alg ecb_aes_alg = {
281
+ .base.cra_name = "ecb(aes)",
282
+ .base.cra_driver_name = "ecb-aes-s390",
283
+ .base.cra_priority = 401, /* combo: aes + ecb + 1 */
284
+ .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
285
+ .base.cra_blocksize = AES_BLOCK_SIZE,
286
+ .base.cra_ctxsize = sizeof(struct s390_aes_ctx),
287
+ .base.cra_module = THIS_MODULE,
288
+ .init = fallback_init_skcipher,
289
+ .exit = fallback_exit_skcipher,
290
+ .min_keysize = AES_MIN_KEY_SIZE,
291
+ .max_keysize = AES_MAX_KEY_SIZE,
292
+ .setkey = ecb_aes_set_key,
293
+ .encrypt = ecb_aes_encrypt,
294
+ .decrypt = ecb_aes_decrypt,
350295 };
351296
352
-static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
297
+static int cbc_aes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
353298 unsigned int key_len)
354299 {
355
- struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
300
+ struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
356301 unsigned long fc;
357302
358303 /* Pick the correct function code based on the key length */
....@@ -363,17 +308,18 @@
363308 /* Check if the function code is available */
364309 sctx->fc = (fc && cpacf_test_func(&kmc_functions, fc)) ? fc : 0;
365310 if (!sctx->fc)
366
- return setkey_fallback_blk(tfm, in_key, key_len);
311
+ return setkey_fallback_skcipher(tfm, in_key, key_len);
367312
368313 sctx->key_len = key_len;
369314 memcpy(sctx->key, in_key, key_len);
370315 return 0;
371316 }
372317
373
-static int cbc_aes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
374
- struct blkcipher_walk *walk)
318
+static int cbc_aes_crypt(struct skcipher_request *req, unsigned long modifier)
375319 {
376
- struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
320
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
321
+ struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
322
+ struct skcipher_walk walk;
377323 unsigned int nbytes, n;
378324 int ret;
379325 struct {
....@@ -381,145 +327,80 @@
381327 u8 key[AES_MAX_KEY_SIZE];
382328 } param;
383329
384
- ret = blkcipher_walk_virt(desc, walk);
385
- memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
330
+ if (unlikely(!sctx->fc))
331
+ return fallback_skcipher_crypt(sctx, req, modifier);
332
+
333
+ ret = skcipher_walk_virt(&walk, req, false);
334
+ if (ret)
335
+ return ret;
336
+ memcpy(param.iv, walk.iv, AES_BLOCK_SIZE);
386337 memcpy(param.key, sctx->key, sctx->key_len);
387
- while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
338
+ while ((nbytes = walk.nbytes) != 0) {
388339 /* only use complete blocks */
389340 n = nbytes & ~(AES_BLOCK_SIZE - 1);
390341 cpacf_kmc(sctx->fc | modifier, &param,
391
- walk->dst.virt.addr, walk->src.virt.addr, n);
392
- ret = blkcipher_walk_done(desc, walk, nbytes - n);
342
+ walk.dst.virt.addr, walk.src.virt.addr, n);
343
+ memcpy(walk.iv, param.iv, AES_BLOCK_SIZE);
344
+ ret = skcipher_walk_done(&walk, nbytes - n);
393345 }
394
- memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
346
+ memzero_explicit(&param, sizeof(param));
395347 return ret;
396348 }
397349
398
-static int cbc_aes_encrypt(struct blkcipher_desc *desc,
399
- struct scatterlist *dst, struct scatterlist *src,
400
- unsigned int nbytes)
350
+static int cbc_aes_encrypt(struct skcipher_request *req)
401351 {
402
- struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
403
- struct blkcipher_walk walk;
404
-
405
- if (unlikely(!sctx->fc))
406
- return fallback_blk_enc(desc, dst, src, nbytes);
407
-
408
- blkcipher_walk_init(&walk, dst, src, nbytes);
409
- return cbc_aes_crypt(desc, 0, &walk);
352
+ return cbc_aes_crypt(req, 0);
410353 }
411354
412
-static int cbc_aes_decrypt(struct blkcipher_desc *desc,
413
- struct scatterlist *dst, struct scatterlist *src,
414
- unsigned int nbytes)
355
+static int cbc_aes_decrypt(struct skcipher_request *req)
415356 {
416
- struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
417
- struct blkcipher_walk walk;
418
-
419
- if (unlikely(!sctx->fc))
420
- return fallback_blk_dec(desc, dst, src, nbytes);
421
-
422
- blkcipher_walk_init(&walk, dst, src, nbytes);
423
- return cbc_aes_crypt(desc, CPACF_DECRYPT, &walk);
357
+ return cbc_aes_crypt(req, CPACF_DECRYPT);
424358 }
425359
426
-static struct crypto_alg cbc_aes_alg = {
427
- .cra_name = "cbc(aes)",
428
- .cra_driver_name = "cbc-aes-s390",
429
- .cra_priority = 402, /* ecb-aes-s390 + 1 */
430
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
431
- CRYPTO_ALG_NEED_FALLBACK,
432
- .cra_blocksize = AES_BLOCK_SIZE,
433
- .cra_ctxsize = sizeof(struct s390_aes_ctx),
434
- .cra_type = &crypto_blkcipher_type,
435
- .cra_module = THIS_MODULE,
436
- .cra_init = fallback_init_blk,
437
- .cra_exit = fallback_exit_blk,
438
- .cra_u = {
439
- .blkcipher = {
440
- .min_keysize = AES_MIN_KEY_SIZE,
441
- .max_keysize = AES_MAX_KEY_SIZE,
442
- .ivsize = AES_BLOCK_SIZE,
443
- .setkey = cbc_aes_set_key,
444
- .encrypt = cbc_aes_encrypt,
445
- .decrypt = cbc_aes_decrypt,
446
- }
447
- }
360
+static struct skcipher_alg cbc_aes_alg = {
361
+ .base.cra_name = "cbc(aes)",
362
+ .base.cra_driver_name = "cbc-aes-s390",
363
+ .base.cra_priority = 402, /* ecb-aes-s390 + 1 */
364
+ .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
365
+ .base.cra_blocksize = AES_BLOCK_SIZE,
366
+ .base.cra_ctxsize = sizeof(struct s390_aes_ctx),
367
+ .base.cra_module = THIS_MODULE,
368
+ .init = fallback_init_skcipher,
369
+ .exit = fallback_exit_skcipher,
370
+ .min_keysize = AES_MIN_KEY_SIZE,
371
+ .max_keysize = AES_MAX_KEY_SIZE,
372
+ .ivsize = AES_BLOCK_SIZE,
373
+ .setkey = cbc_aes_set_key,
374
+ .encrypt = cbc_aes_encrypt,
375
+ .decrypt = cbc_aes_decrypt,
448376 };
449377
450
-static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
451
- unsigned int len)
378
+static int xts_fallback_setkey(struct crypto_skcipher *tfm, const u8 *key,
379
+ unsigned int len)
452380 {
453
- struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
454
- unsigned int ret;
381
+ struct s390_xts_ctx *xts_ctx = crypto_skcipher_ctx(tfm);
455382
456383 crypto_skcipher_clear_flags(xts_ctx->fallback, CRYPTO_TFM_REQ_MASK);
457
- crypto_skcipher_set_flags(xts_ctx->fallback, tfm->crt_flags &
458
- CRYPTO_TFM_REQ_MASK);
459
-
460
- ret = crypto_skcipher_setkey(xts_ctx->fallback, key, len);
461
-
462
- tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
463
- tfm->crt_flags |= crypto_skcipher_get_flags(xts_ctx->fallback) &
464
- CRYPTO_TFM_RES_MASK;
465
-
466
- return ret;
384
+ crypto_skcipher_set_flags(xts_ctx->fallback,
385
+ crypto_skcipher_get_flags(tfm) &
386
+ CRYPTO_TFM_REQ_MASK);
387
+ return crypto_skcipher_setkey(xts_ctx->fallback, key, len);
467388 }
468389
469
-static int xts_fallback_decrypt(struct blkcipher_desc *desc,
470
- struct scatterlist *dst, struct scatterlist *src,
471
- unsigned int nbytes)
472
-{
473
- struct crypto_blkcipher *tfm = desc->tfm;
474
- struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(tfm);
475
- SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback);
476
- unsigned int ret;
477
-
478
- skcipher_request_set_tfm(req, xts_ctx->fallback);
479
- skcipher_request_set_callback(req, desc->flags, NULL, NULL);
480
- skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
481
-
482
- ret = crypto_skcipher_decrypt(req);
483
-
484
- skcipher_request_zero(req);
485
- return ret;
486
-}
487
-
488
-static int xts_fallback_encrypt(struct blkcipher_desc *desc,
489
- struct scatterlist *dst, struct scatterlist *src,
490
- unsigned int nbytes)
491
-{
492
- struct crypto_blkcipher *tfm = desc->tfm;
493
- struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(tfm);
494
- SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback);
495
- unsigned int ret;
496
-
497
- skcipher_request_set_tfm(req, xts_ctx->fallback);
498
- skcipher_request_set_callback(req, desc->flags, NULL, NULL);
499
- skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
500
-
501
- ret = crypto_skcipher_encrypt(req);
502
-
503
- skcipher_request_zero(req);
504
- return ret;
505
-}
506
-
507
-static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
390
+static int xts_aes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
508391 unsigned int key_len)
509392 {
510
- struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
393
+ struct s390_xts_ctx *xts_ctx = crypto_skcipher_ctx(tfm);
511394 unsigned long fc;
512395 int err;
513396
514
- err = xts_check_key(tfm, in_key, key_len);
397
+ err = xts_fallback_setkey(tfm, in_key, key_len);
515398 if (err)
516399 return err;
517400
518401 /* In fips mode only 128 bit or 256 bit keys are valid */
519
- if (fips_enabled && key_len != 32 && key_len != 64) {
520
- tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
402
+ if (fips_enabled && key_len != 32 && key_len != 64)
521403 return -EINVAL;
522
- }
523404
524405 /* Pick the correct function code based on the key length */
525406 fc = (key_len == 32) ? CPACF_KM_XTS_128 :
....@@ -528,7 +409,7 @@
528409 /* Check if the function code is available */
529410 xts_ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
530411 if (!xts_ctx->fc)
531
- return xts_fallback_setkey(tfm, in_key, key_len);
412
+ return 0;
532413
533414 /* Split the XTS key into the two subkeys */
534415 key_len = key_len / 2;
....@@ -538,10 +419,11 @@
538419 return 0;
539420 }
540421
541
-static int xts_aes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
542
- struct blkcipher_walk *walk)
422
+static int xts_aes_crypt(struct skcipher_request *req, unsigned long modifier)
543423 {
544
- struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
424
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
425
+ struct s390_xts_ctx *xts_ctx = crypto_skcipher_ctx(tfm);
426
+ struct skcipher_walk walk;
545427 unsigned int offset, nbytes, n;
546428 int ret;
547429 struct {
....@@ -556,114 +438,102 @@
556438 u8 init[16];
557439 } xts_param;
558440
559
- ret = blkcipher_walk_virt(desc, walk);
441
+ if (req->cryptlen < AES_BLOCK_SIZE)
442
+ return -EINVAL;
443
+
444
+ if (unlikely(!xts_ctx->fc || (req->cryptlen % AES_BLOCK_SIZE) != 0)) {
445
+ struct skcipher_request *subreq = skcipher_request_ctx(req);
446
+
447
+ *subreq = *req;
448
+ skcipher_request_set_tfm(subreq, xts_ctx->fallback);
449
+ return (modifier & CPACF_DECRYPT) ?
450
+ crypto_skcipher_decrypt(subreq) :
451
+ crypto_skcipher_encrypt(subreq);
452
+ }
453
+
454
+ ret = skcipher_walk_virt(&walk, req, false);
455
+ if (ret)
456
+ return ret;
560457 offset = xts_ctx->key_len & 0x10;
561458 memset(pcc_param.block, 0, sizeof(pcc_param.block));
562459 memset(pcc_param.bit, 0, sizeof(pcc_param.bit));
563460 memset(pcc_param.xts, 0, sizeof(pcc_param.xts));
564
- memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
461
+ memcpy(pcc_param.tweak, walk.iv, sizeof(pcc_param.tweak));
565462 memcpy(pcc_param.key + offset, xts_ctx->pcc_key, xts_ctx->key_len);
566463 cpacf_pcc(xts_ctx->fc, pcc_param.key + offset);
567464
568465 memcpy(xts_param.key + offset, xts_ctx->key, xts_ctx->key_len);
569466 memcpy(xts_param.init, pcc_param.xts, 16);
570467
571
- while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
468
+ while ((nbytes = walk.nbytes) != 0) {
572469 /* only use complete blocks */
573470 n = nbytes & ~(AES_BLOCK_SIZE - 1);
574471 cpacf_km(xts_ctx->fc | modifier, xts_param.key + offset,
575
- walk->dst.virt.addr, walk->src.virt.addr, n);
576
- ret = blkcipher_walk_done(desc, walk, nbytes - n);
472
+ walk.dst.virt.addr, walk.src.virt.addr, n);
473
+ ret = skcipher_walk_done(&walk, nbytes - n);
577474 }
475
+ memzero_explicit(&pcc_param, sizeof(pcc_param));
476
+ memzero_explicit(&xts_param, sizeof(xts_param));
578477 return ret;
579478 }
580479
581
-static int xts_aes_encrypt(struct blkcipher_desc *desc,
582
- struct scatterlist *dst, struct scatterlist *src,
583
- unsigned int nbytes)
480
+static int xts_aes_encrypt(struct skcipher_request *req)
584481 {
585
- struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
586
- struct blkcipher_walk walk;
587
-
588
- if (!nbytes)
589
- return -EINVAL;
590
-
591
- if (unlikely(!xts_ctx->fc))
592
- return xts_fallback_encrypt(desc, dst, src, nbytes);
593
-
594
- blkcipher_walk_init(&walk, dst, src, nbytes);
595
- return xts_aes_crypt(desc, 0, &walk);
482
+ return xts_aes_crypt(req, 0);
596483 }
597484
598
-static int xts_aes_decrypt(struct blkcipher_desc *desc,
599
- struct scatterlist *dst, struct scatterlist *src,
600
- unsigned int nbytes)
485
+static int xts_aes_decrypt(struct skcipher_request *req)
601486 {
602
- struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
603
- struct blkcipher_walk walk;
604
-
605
- if (!nbytes)
606
- return -EINVAL;
607
-
608
- if (unlikely(!xts_ctx->fc))
609
- return xts_fallback_decrypt(desc, dst, src, nbytes);
610
-
611
- blkcipher_walk_init(&walk, dst, src, nbytes);
612
- return xts_aes_crypt(desc, CPACF_DECRYPT, &walk);
487
+ return xts_aes_crypt(req, CPACF_DECRYPT);
613488 }
614489
615
-static int xts_fallback_init(struct crypto_tfm *tfm)
490
+static int xts_fallback_init(struct crypto_skcipher *tfm)
616491 {
617
- const char *name = tfm->__crt_alg->cra_name;
618
- struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
492
+ const char *name = crypto_tfm_alg_name(&tfm->base);
493
+ struct s390_xts_ctx *xts_ctx = crypto_skcipher_ctx(tfm);
619494
620495 xts_ctx->fallback = crypto_alloc_skcipher(name, 0,
621
- CRYPTO_ALG_ASYNC |
622
- CRYPTO_ALG_NEED_FALLBACK);
496
+ CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC);
623497
624498 if (IS_ERR(xts_ctx->fallback)) {
625499 pr_err("Allocating XTS fallback algorithm %s failed\n",
626500 name);
627501 return PTR_ERR(xts_ctx->fallback);
628502 }
503
+ crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
504
+ crypto_skcipher_reqsize(xts_ctx->fallback));
629505 return 0;
630506 }
631507
632
-static void xts_fallback_exit(struct crypto_tfm *tfm)
508
+static void xts_fallback_exit(struct crypto_skcipher *tfm)
633509 {
634
- struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
510
+ struct s390_xts_ctx *xts_ctx = crypto_skcipher_ctx(tfm);
635511
636512 crypto_free_skcipher(xts_ctx->fallback);
637513 }
638514
639
-static struct crypto_alg xts_aes_alg = {
640
- .cra_name = "xts(aes)",
641
- .cra_driver_name = "xts-aes-s390",
642
- .cra_priority = 402, /* ecb-aes-s390 + 1 */
643
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
644
- CRYPTO_ALG_NEED_FALLBACK,
645
- .cra_blocksize = AES_BLOCK_SIZE,
646
- .cra_ctxsize = sizeof(struct s390_xts_ctx),
647
- .cra_type = &crypto_blkcipher_type,
648
- .cra_module = THIS_MODULE,
649
- .cra_init = xts_fallback_init,
650
- .cra_exit = xts_fallback_exit,
651
- .cra_u = {
652
- .blkcipher = {
653
- .min_keysize = 2 * AES_MIN_KEY_SIZE,
654
- .max_keysize = 2 * AES_MAX_KEY_SIZE,
655
- .ivsize = AES_BLOCK_SIZE,
656
- .setkey = xts_aes_set_key,
657
- .encrypt = xts_aes_encrypt,
658
- .decrypt = xts_aes_decrypt,
659
- }
660
- }
515
+static struct skcipher_alg xts_aes_alg = {
516
+ .base.cra_name = "xts(aes)",
517
+ .base.cra_driver_name = "xts-aes-s390",
518
+ .base.cra_priority = 402, /* ecb-aes-s390 + 1 */
519
+ .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
520
+ .base.cra_blocksize = AES_BLOCK_SIZE,
521
+ .base.cra_ctxsize = sizeof(struct s390_xts_ctx),
522
+ .base.cra_module = THIS_MODULE,
523
+ .init = xts_fallback_init,
524
+ .exit = xts_fallback_exit,
525
+ .min_keysize = 2 * AES_MIN_KEY_SIZE,
526
+ .max_keysize = 2 * AES_MAX_KEY_SIZE,
527
+ .ivsize = AES_BLOCK_SIZE,
528
+ .setkey = xts_aes_set_key,
529
+ .encrypt = xts_aes_encrypt,
530
+ .decrypt = xts_aes_decrypt,
661531 };
662532
663
-static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
533
+static int ctr_aes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
664534 unsigned int key_len)
665535 {
666
- struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
536
+ struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
667537 unsigned long fc;
668538
669539 /* Pick the correct function code based on the key length */
....@@ -674,7 +544,7 @@
674544 /* Check if the function code is available */
675545 sctx->fc = (fc && cpacf_test_func(&kmctr_functions, fc)) ? fc : 0;
676546 if (!sctx->fc)
677
- return setkey_fallback_blk(tfm, in_key, key_len);
547
+ return setkey_fallback_skcipher(tfm, in_key, key_len);
678548
679549 sctx->key_len = key_len;
680550 memcpy(sctx->key, in_key, key_len);
....@@ -696,30 +566,34 @@
696566 return n;
697567 }
698568
699
-static int ctr_aes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
700
- struct blkcipher_walk *walk)
569
+static int ctr_aes_crypt(struct skcipher_request *req)
701570 {
702
- struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
571
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
572
+ struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
703573 u8 buf[AES_BLOCK_SIZE], *ctrptr;
574
+ struct skcipher_walk walk;
704575 unsigned int n, nbytes;
705576 int ret, locked;
706577
578
+ if (unlikely(!sctx->fc))
579
+ return fallback_skcipher_crypt(sctx, req, 0);
580
+
707581 locked = mutex_trylock(&ctrblk_lock);
708582
709
- ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
710
- while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
583
+ ret = skcipher_walk_virt(&walk, req, false);
584
+ while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
711585 n = AES_BLOCK_SIZE;
586
+
712587 if (nbytes >= 2*AES_BLOCK_SIZE && locked)
713
- n = __ctrblk_init(ctrblk, walk->iv, nbytes);
714
- ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk->iv;
715
- cpacf_kmctr(sctx->fc | modifier, sctx->key,
716
- walk->dst.virt.addr, walk->src.virt.addr,
717
- n, ctrptr);
588
+ n = __ctrblk_init(ctrblk, walk.iv, nbytes);
589
+ ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk.iv;
590
+ cpacf_kmctr(sctx->fc, sctx->key, walk.dst.virt.addr,
591
+ walk.src.virt.addr, n, ctrptr);
718592 if (ctrptr == ctrblk)
719
- memcpy(walk->iv, ctrptr + n - AES_BLOCK_SIZE,
593
+ memcpy(walk.iv, ctrptr + n - AES_BLOCK_SIZE,
720594 AES_BLOCK_SIZE);
721
- crypto_inc(walk->iv, AES_BLOCK_SIZE);
722
- ret = blkcipher_walk_done(desc, walk, nbytes - n);
595
+ crypto_inc(walk.iv, AES_BLOCK_SIZE);
596
+ ret = skcipher_walk_done(&walk, nbytes - n);
723597 }
724598 if (locked)
725599 mutex_unlock(&ctrblk_lock);
....@@ -727,67 +601,33 @@
727601 * final block may be < AES_BLOCK_SIZE, copy only nbytes
728602 */
729603 if (nbytes) {
730
- cpacf_kmctr(sctx->fc | modifier, sctx->key,
731
- buf, walk->src.virt.addr,
732
- AES_BLOCK_SIZE, walk->iv);
733
- memcpy(walk->dst.virt.addr, buf, nbytes);
734
- crypto_inc(walk->iv, AES_BLOCK_SIZE);
735
- ret = blkcipher_walk_done(desc, walk, 0);
604
+ cpacf_kmctr(sctx->fc, sctx->key, buf, walk.src.virt.addr,
605
+ AES_BLOCK_SIZE, walk.iv);
606
+ memcpy(walk.dst.virt.addr, buf, nbytes);
607
+ crypto_inc(walk.iv, AES_BLOCK_SIZE);
608
+ ret = skcipher_walk_done(&walk, 0);
736609 }
737610
738611 return ret;
739612 }
740613
741
-static int ctr_aes_encrypt(struct blkcipher_desc *desc,
742
- struct scatterlist *dst, struct scatterlist *src,
743
- unsigned int nbytes)
744
-{
745
- struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
746
- struct blkcipher_walk walk;
747
-
748
- if (unlikely(!sctx->fc))
749
- return fallback_blk_enc(desc, dst, src, nbytes);
750
-
751
- blkcipher_walk_init(&walk, dst, src, nbytes);
752
- return ctr_aes_crypt(desc, 0, &walk);
753
-}
754
-
755
-static int ctr_aes_decrypt(struct blkcipher_desc *desc,
756
- struct scatterlist *dst, struct scatterlist *src,
757
- unsigned int nbytes)
758
-{
759
- struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
760
- struct blkcipher_walk walk;
761
-
762
- if (unlikely(!sctx->fc))
763
- return fallback_blk_dec(desc, dst, src, nbytes);
764
-
765
- blkcipher_walk_init(&walk, dst, src, nbytes);
766
- return ctr_aes_crypt(desc, CPACF_DECRYPT, &walk);
767
-}
768
-
769
-static struct crypto_alg ctr_aes_alg = {
770
- .cra_name = "ctr(aes)",
771
- .cra_driver_name = "ctr-aes-s390",
772
- .cra_priority = 402, /* ecb-aes-s390 + 1 */
773
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
774
- CRYPTO_ALG_NEED_FALLBACK,
775
- .cra_blocksize = 1,
776
- .cra_ctxsize = sizeof(struct s390_aes_ctx),
777
- .cra_type = &crypto_blkcipher_type,
778
- .cra_module = THIS_MODULE,
779
- .cra_init = fallback_init_blk,
780
- .cra_exit = fallback_exit_blk,
781
- .cra_u = {
782
- .blkcipher = {
783
- .min_keysize = AES_MIN_KEY_SIZE,
784
- .max_keysize = AES_MAX_KEY_SIZE,
785
- .ivsize = AES_BLOCK_SIZE,
786
- .setkey = ctr_aes_set_key,
787
- .encrypt = ctr_aes_encrypt,
788
- .decrypt = ctr_aes_decrypt,
789
- }
790
- }
614
+static struct skcipher_alg ctr_aes_alg = {
615
+ .base.cra_name = "ctr(aes)",
616
+ .base.cra_driver_name = "ctr-aes-s390",
617
+ .base.cra_priority = 402, /* ecb-aes-s390 + 1 */
618
+ .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
619
+ .base.cra_blocksize = 1,
620
+ .base.cra_ctxsize = sizeof(struct s390_aes_ctx),
621
+ .base.cra_module = THIS_MODULE,
622
+ .init = fallback_init_skcipher,
623
+ .exit = fallback_exit_skcipher,
624
+ .min_keysize = AES_MIN_KEY_SIZE,
625
+ .max_keysize = AES_MAX_KEY_SIZE,
626
+ .ivsize = AES_BLOCK_SIZE,
627
+ .setkey = ctr_aes_set_key,
628
+ .encrypt = ctr_aes_crypt,
629
+ .decrypt = ctr_aes_crypt,
630
+ .chunksize = AES_BLOCK_SIZE,
791631 };
792632
793633 static int gcm_aes_setkey(struct crypto_aead *tfm, const u8 *key,
....@@ -861,7 +701,7 @@
861701 unsigned int nbytes)
862702 {
863703 gw->walk_bytes_remain -= nbytes;
864
- scatterwalk_unmap(&gw->walk);
704
+ scatterwalk_unmap(gw->walk_ptr);
865705 scatterwalk_advance(&gw->walk, nbytes);
866706 scatterwalk_done(&gw->walk, 0, gw->walk_bytes_remain);
867707 gw->walk_ptr = NULL;
....@@ -936,7 +776,7 @@
936776 goto out;
937777 }
938778
939
- scatterwalk_unmap(&gw->walk);
779
+ scatterwalk_unmap(gw->walk_ptr);
940780 gw->walk_ptr = NULL;
941781
942782 gw->ptr = gw->buf;
....@@ -1116,24 +956,27 @@
1116956 },
1117957 };
1118958
1119
-static struct crypto_alg *aes_s390_algs_ptr[5];
1120
-static int aes_s390_algs_num;
959
+static struct crypto_alg *aes_s390_alg;
960
+static struct skcipher_alg *aes_s390_skcipher_algs[4];
961
+static int aes_s390_skciphers_num;
1121962 static struct aead_alg *aes_s390_aead_alg;
1122963
1123
-static int aes_s390_register_alg(struct crypto_alg *alg)
964
+static int aes_s390_register_skcipher(struct skcipher_alg *alg)
1124965 {
1125966 int ret;
1126967
1127
- ret = crypto_register_alg(alg);
968
+ ret = crypto_register_skcipher(alg);
1128969 if (!ret)
1129
- aes_s390_algs_ptr[aes_s390_algs_num++] = alg;
970
+ aes_s390_skcipher_algs[aes_s390_skciphers_num++] = alg;
1130971 return ret;
1131972 }
1132973
1133974 static void aes_s390_fini(void)
1134975 {
1135
- while (aes_s390_algs_num--)
1136
- crypto_unregister_alg(aes_s390_algs_ptr[aes_s390_algs_num]);
976
+ if (aes_s390_alg)
977
+ crypto_unregister_alg(aes_s390_alg);
978
+ while (aes_s390_skciphers_num--)
979
+ crypto_unregister_skcipher(aes_s390_skcipher_algs[aes_s390_skciphers_num]);
1137980 if (ctrblk)
1138981 free_page((unsigned long) ctrblk);
1139982
....@@ -1154,10 +997,11 @@
1154997 if (cpacf_test_func(&km_functions, CPACF_KM_AES_128) ||
1155998 cpacf_test_func(&km_functions, CPACF_KM_AES_192) ||
1156999 cpacf_test_func(&km_functions, CPACF_KM_AES_256)) {
1157
- ret = aes_s390_register_alg(&aes_alg);
1000
+ ret = crypto_register_alg(&aes_alg);
11581001 if (ret)
11591002 goto out_err;
1160
- ret = aes_s390_register_alg(&ecb_aes_alg);
1003
+ aes_s390_alg = &aes_alg;
1004
+ ret = aes_s390_register_skcipher(&ecb_aes_alg);
11611005 if (ret)
11621006 goto out_err;
11631007 }
....@@ -1165,14 +1009,14 @@
11651009 if (cpacf_test_func(&kmc_functions, CPACF_KMC_AES_128) ||
11661010 cpacf_test_func(&kmc_functions, CPACF_KMC_AES_192) ||
11671011 cpacf_test_func(&kmc_functions, CPACF_KMC_AES_256)) {
1168
- ret = aes_s390_register_alg(&cbc_aes_alg);
1012
+ ret = aes_s390_register_skcipher(&cbc_aes_alg);
11691013 if (ret)
11701014 goto out_err;
11711015 }
11721016
11731017 if (cpacf_test_func(&km_functions, CPACF_KM_XTS_128) ||
11741018 cpacf_test_func(&km_functions, CPACF_KM_XTS_256)) {
1175
- ret = aes_s390_register_alg(&xts_aes_alg);
1019
+ ret = aes_s390_register_skcipher(&xts_aes_alg);
11761020 if (ret)
11771021 goto out_err;
11781022 }
....@@ -1185,7 +1029,7 @@
11851029 ret = -ENOMEM;
11861030 goto out_err;
11871031 }
1188
- ret = aes_s390_register_alg(&ctr_aes_alg);
1032
+ ret = aes_s390_register_skcipher(&ctr_aes_alg);
11891033 if (ret)
11901034 goto out_err;
11911035 }
....@@ -1212,3 +1056,4 @@
12121056
12131057 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
12141058 MODULE_LICENSE("GPL");
1059
+MODULE_IMPORT_NS(CRYPTO_INTERNAL);