hc
2024-05-10 37f49e37ab4cb5d0bc4c60eb5c6d4dd57db767bb
kernel/drivers/crypto/padlock-aes.c
....@@ -1,3 +1,4 @@
1
+// SPDX-License-Identifier: GPL-2.0-only
12 /*
23 * Cryptographic API.
34 *
....@@ -9,6 +10,7 @@
910
1011 #include <crypto/algapi.h>
1112 #include <crypto/aes.h>
13
+#include <crypto/internal/skcipher.h>
1214 #include <crypto/padlock.h>
1315 #include <linux/module.h>
1416 #include <linux/init.h>
....@@ -16,6 +18,7 @@
1618 #include <linux/errno.h>
1719 #include <linux/interrupt.h>
1820 #include <linux/kernel.h>
21
+#include <linux/mm.h>
1922 #include <linux/percpu.h>
2023 #include <linux/smp.h>
2124 #include <linux/slab.h>
....@@ -96,9 +99,9 @@
9699 return aes_ctx_common(crypto_tfm_ctx(tfm));
97100 }
98101
99
-static inline struct aes_ctx *blk_aes_ctx(struct crypto_blkcipher *tfm)
102
+static inline struct aes_ctx *skcipher_aes_ctx(struct crypto_skcipher *tfm)
100103 {
101
- return aes_ctx_common(crypto_blkcipher_ctx(tfm));
104
+ return aes_ctx_common(crypto_skcipher_ctx(tfm));
102105 }
103106
104107 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
....@@ -106,14 +109,11 @@
106109 {
107110 struct aes_ctx *ctx = aes_ctx(tfm);
108111 const __le32 *key = (const __le32 *)in_key;
109
- u32 *flags = &tfm->crt_flags;
110112 struct crypto_aes_ctx gen_aes;
111113 int cpu;
112114
113
- if (key_len % 8) {
114
- *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
115
+ if (key_len % 8)
115116 return -EINVAL;
116
- }
117117
118118 /*
119119 * If the hardware is capable of generating the extended key
....@@ -144,10 +144,8 @@
144144 ctx->cword.encrypt.keygen = 1;
145145 ctx->cword.decrypt.keygen = 1;
146146
147
- if (crypto_aes_expand_key(&gen_aes, in_key, key_len)) {
148
- *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
147
+ if (aes_expandkey(&gen_aes, in_key, key_len))
149148 return -EINVAL;
150
- }
151149
152150 memcpy(ctx->E, gen_aes.key_enc, AES_MAX_KEYLENGTH);
153151 memcpy(ctx->D, gen_aes.key_dec, AES_MAX_KEYLENGTH);
....@@ -159,6 +157,12 @@
159157 per_cpu(paes_last_cword, cpu) = NULL;
160158
161159 return 0;
160
+}
161
+
162
+static int aes_set_key_skcipher(struct crypto_skcipher *tfm, const u8 *in_key,
163
+ unsigned int key_len)
164
+{
165
+ return aes_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
162166 }
163167
164168 /* ====== Encryption/decryption routines ====== */
....@@ -299,7 +303,7 @@
299303 return iv;
300304 }
301305
302
-static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
306
+static void padlock_aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
303307 {
304308 struct aes_ctx *ctx = aes_ctx(tfm);
305309
....@@ -308,7 +312,7 @@
308312 padlock_store_cword(&ctx->cword.encrypt);
309313 }
310314
311
-static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
315
+static void padlock_aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
312316 {
313317 struct aes_ctx *ctx = aes_ctx(tfm);
314318
....@@ -331,31 +335,30 @@
331335 .cia_min_keysize = AES_MIN_KEY_SIZE,
332336 .cia_max_keysize = AES_MAX_KEY_SIZE,
333337 .cia_setkey = aes_set_key,
334
- .cia_encrypt = aes_encrypt,
335
- .cia_decrypt = aes_decrypt,
338
+ .cia_encrypt = padlock_aes_encrypt,
339
+ .cia_decrypt = padlock_aes_decrypt,
336340 }
337341 }
338342 };
339343
340
-static int ecb_aes_encrypt(struct blkcipher_desc *desc,
341
- struct scatterlist *dst, struct scatterlist *src,
342
- unsigned int nbytes)
344
+static int ecb_aes_encrypt(struct skcipher_request *req)
343345 {
344
- struct aes_ctx *ctx = blk_aes_ctx(desc->tfm);
345
- struct blkcipher_walk walk;
346
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
347
+ struct aes_ctx *ctx = skcipher_aes_ctx(tfm);
348
+ struct skcipher_walk walk;
349
+ unsigned int nbytes;
346350 int err;
347351
348352 padlock_reset_key(&ctx->cword.encrypt);
349353
350
- blkcipher_walk_init(&walk, dst, src, nbytes);
351
- err = blkcipher_walk_virt(desc, &walk);
354
+ err = skcipher_walk_virt(&walk, req, false);
352355
353
- while ((nbytes = walk.nbytes)) {
356
+ while ((nbytes = walk.nbytes) != 0) {
354357 padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr,
355358 ctx->E, &ctx->cword.encrypt,
356359 nbytes / AES_BLOCK_SIZE);
357360 nbytes &= AES_BLOCK_SIZE - 1;
358
- err = blkcipher_walk_done(desc, &walk, nbytes);
361
+ err = skcipher_walk_done(&walk, nbytes);
359362 }
360363
361364 padlock_store_cword(&ctx->cword.encrypt);
....@@ -363,25 +366,24 @@
363366 return err;
364367 }
365368
366
-static int ecb_aes_decrypt(struct blkcipher_desc *desc,
367
- struct scatterlist *dst, struct scatterlist *src,
368
- unsigned int nbytes)
369
+static int ecb_aes_decrypt(struct skcipher_request *req)
369370 {
370
- struct aes_ctx *ctx = blk_aes_ctx(desc->tfm);
371
- struct blkcipher_walk walk;
371
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
372
+ struct aes_ctx *ctx = skcipher_aes_ctx(tfm);
373
+ struct skcipher_walk walk;
374
+ unsigned int nbytes;
372375 int err;
373376
374377 padlock_reset_key(&ctx->cword.decrypt);
375378
376
- blkcipher_walk_init(&walk, dst, src, nbytes);
377
- err = blkcipher_walk_virt(desc, &walk);
379
+ err = skcipher_walk_virt(&walk, req, false);
378380
379
- while ((nbytes = walk.nbytes)) {
381
+ while ((nbytes = walk.nbytes) != 0) {
380382 padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr,
381383 ctx->D, &ctx->cword.decrypt,
382384 nbytes / AES_BLOCK_SIZE);
383385 nbytes &= AES_BLOCK_SIZE - 1;
384
- err = blkcipher_walk_done(desc, &walk, nbytes);
386
+ err = skcipher_walk_done(&walk, nbytes);
385387 }
386388
387389 padlock_store_cword(&ctx->cword.encrypt);
....@@ -389,48 +391,41 @@
389391 return err;
390392 }
391393
392
-static struct crypto_alg ecb_aes_alg = {
393
- .cra_name = "ecb(aes)",
394
- .cra_driver_name = "ecb-aes-padlock",
395
- .cra_priority = PADLOCK_COMPOSITE_PRIORITY,
396
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
397
- .cra_blocksize = AES_BLOCK_SIZE,
398
- .cra_ctxsize = sizeof(struct aes_ctx),
399
- .cra_alignmask = PADLOCK_ALIGNMENT - 1,
400
- .cra_type = &crypto_blkcipher_type,
401
- .cra_module = THIS_MODULE,
402
- .cra_u = {
403
- .blkcipher = {
404
- .min_keysize = AES_MIN_KEY_SIZE,
405
- .max_keysize = AES_MAX_KEY_SIZE,
406
- .setkey = aes_set_key,
407
- .encrypt = ecb_aes_encrypt,
408
- .decrypt = ecb_aes_decrypt,
409
- }
410
- }
394
+static struct skcipher_alg ecb_aes_alg = {
395
+ .base.cra_name = "ecb(aes)",
396
+ .base.cra_driver_name = "ecb-aes-padlock",
397
+ .base.cra_priority = PADLOCK_COMPOSITE_PRIORITY,
398
+ .base.cra_blocksize = AES_BLOCK_SIZE,
399
+ .base.cra_ctxsize = sizeof(struct aes_ctx),
400
+ .base.cra_alignmask = PADLOCK_ALIGNMENT - 1,
401
+ .base.cra_module = THIS_MODULE,
402
+ .min_keysize = AES_MIN_KEY_SIZE,
403
+ .max_keysize = AES_MAX_KEY_SIZE,
404
+ .setkey = aes_set_key_skcipher,
405
+ .encrypt = ecb_aes_encrypt,
406
+ .decrypt = ecb_aes_decrypt,
411407 };
412408
413
-static int cbc_aes_encrypt(struct blkcipher_desc *desc,
414
- struct scatterlist *dst, struct scatterlist *src,
415
- unsigned int nbytes)
409
+static int cbc_aes_encrypt(struct skcipher_request *req)
416410 {
417
- struct aes_ctx *ctx = blk_aes_ctx(desc->tfm);
418
- struct blkcipher_walk walk;
411
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
412
+ struct aes_ctx *ctx = skcipher_aes_ctx(tfm);
413
+ struct skcipher_walk walk;
414
+ unsigned int nbytes;
419415 int err;
420416
421417 padlock_reset_key(&ctx->cword.encrypt);
422418
423
- blkcipher_walk_init(&walk, dst, src, nbytes);
424
- err = blkcipher_walk_virt(desc, &walk);
419
+ err = skcipher_walk_virt(&walk, req, false);
425420
426
- while ((nbytes = walk.nbytes)) {
421
+ while ((nbytes = walk.nbytes) != 0) {
427422 u8 *iv = padlock_xcrypt_cbc(walk.src.virt.addr,
428423 walk.dst.virt.addr, ctx->E,
429424 walk.iv, &ctx->cword.encrypt,
430425 nbytes / AES_BLOCK_SIZE);
431426 memcpy(walk.iv, iv, AES_BLOCK_SIZE);
432427 nbytes &= AES_BLOCK_SIZE - 1;
433
- err = blkcipher_walk_done(desc, &walk, nbytes);
428
+ err = skcipher_walk_done(&walk, nbytes);
434429 }
435430
436431 padlock_store_cword(&ctx->cword.decrypt);
....@@ -438,25 +433,24 @@
438433 return err;
439434 }
440435
441
-static int cbc_aes_decrypt(struct blkcipher_desc *desc,
442
- struct scatterlist *dst, struct scatterlist *src,
443
- unsigned int nbytes)
436
+static int cbc_aes_decrypt(struct skcipher_request *req)
444437 {
445
- struct aes_ctx *ctx = blk_aes_ctx(desc->tfm);
446
- struct blkcipher_walk walk;
438
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
439
+ struct aes_ctx *ctx = skcipher_aes_ctx(tfm);
440
+ struct skcipher_walk walk;
441
+ unsigned int nbytes;
447442 int err;
448443
449444 padlock_reset_key(&ctx->cword.encrypt);
450445
451
- blkcipher_walk_init(&walk, dst, src, nbytes);
452
- err = blkcipher_walk_virt(desc, &walk);
446
+ err = skcipher_walk_virt(&walk, req, false);
453447
454
- while ((nbytes = walk.nbytes)) {
448
+ while ((nbytes = walk.nbytes) != 0) {
455449 padlock_xcrypt_cbc(walk.src.virt.addr, walk.dst.virt.addr,
456450 ctx->D, walk.iv, &ctx->cword.decrypt,
457451 nbytes / AES_BLOCK_SIZE);
458452 nbytes &= AES_BLOCK_SIZE - 1;
459
- err = blkcipher_walk_done(desc, &walk, nbytes);
453
+ err = skcipher_walk_done(&walk, nbytes);
460454 }
461455
462456 padlock_store_cword(&ctx->cword.encrypt);
....@@ -464,30 +458,24 @@
464458 return err;
465459 }
466460
467
-static struct crypto_alg cbc_aes_alg = {
468
- .cra_name = "cbc(aes)",
469
- .cra_driver_name = "cbc-aes-padlock",
470
- .cra_priority = PADLOCK_COMPOSITE_PRIORITY,
471
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
472
- .cra_blocksize = AES_BLOCK_SIZE,
473
- .cra_ctxsize = sizeof(struct aes_ctx),
474
- .cra_alignmask = PADLOCK_ALIGNMENT - 1,
475
- .cra_type = &crypto_blkcipher_type,
476
- .cra_module = THIS_MODULE,
477
- .cra_u = {
478
- .blkcipher = {
479
- .min_keysize = AES_MIN_KEY_SIZE,
480
- .max_keysize = AES_MAX_KEY_SIZE,
481
- .ivsize = AES_BLOCK_SIZE,
482
- .setkey = aes_set_key,
483
- .encrypt = cbc_aes_encrypt,
484
- .decrypt = cbc_aes_decrypt,
485
- }
486
- }
461
+static struct skcipher_alg cbc_aes_alg = {
462
+ .base.cra_name = "cbc(aes)",
463
+ .base.cra_driver_name = "cbc-aes-padlock",
464
+ .base.cra_priority = PADLOCK_COMPOSITE_PRIORITY,
465
+ .base.cra_blocksize = AES_BLOCK_SIZE,
466
+ .base.cra_ctxsize = sizeof(struct aes_ctx),
467
+ .base.cra_alignmask = PADLOCK_ALIGNMENT - 1,
468
+ .base.cra_module = THIS_MODULE,
469
+ .min_keysize = AES_MIN_KEY_SIZE,
470
+ .max_keysize = AES_MAX_KEY_SIZE,
471
+ .ivsize = AES_BLOCK_SIZE,
472
+ .setkey = aes_set_key_skcipher,
473
+ .encrypt = cbc_aes_encrypt,
474
+ .decrypt = cbc_aes_decrypt,
487475 };
488476
489477 static const struct x86_cpu_id padlock_cpu_id[] = {
490
- X86_FEATURE_MATCH(X86_FEATURE_XCRYPT),
478
+ X86_MATCH_FEATURE(X86_FEATURE_XCRYPT, NULL),
491479 {}
492480 };
493481 MODULE_DEVICE_TABLE(x86cpu, padlock_cpu_id);
....@@ -505,13 +493,13 @@
505493 return -ENODEV;
506494 }
507495
508
- if ((ret = crypto_register_alg(&aes_alg)))
496
+ if ((ret = crypto_register_alg(&aes_alg)) != 0)
509497 goto aes_err;
510498
511
- if ((ret = crypto_register_alg(&ecb_aes_alg)))
499
+ if ((ret = crypto_register_skcipher(&ecb_aes_alg)) != 0)
512500 goto ecb_aes_err;
513501
514
- if ((ret = crypto_register_alg(&cbc_aes_alg)))
502
+ if ((ret = crypto_register_skcipher(&cbc_aes_alg)) != 0)
515503 goto cbc_aes_err;
516504
517505 printk(KERN_NOTICE PFX "Using VIA PadLock ACE for AES algorithm.\n");
....@@ -526,7 +514,7 @@
526514 return ret;
527515
528516 cbc_aes_err:
529
- crypto_unregister_alg(&ecb_aes_alg);
517
+ crypto_unregister_skcipher(&ecb_aes_alg);
530518 ecb_aes_err:
531519 crypto_unregister_alg(&aes_alg);
532520 aes_err:
....@@ -536,8 +524,8 @@
536524
537525 static void __exit padlock_fini(void)
538526 {
539
- crypto_unregister_alg(&cbc_aes_alg);
540
- crypto_unregister_alg(&ecb_aes_alg);
527
+ crypto_unregister_skcipher(&cbc_aes_alg);
528
+ crypto_unregister_skcipher(&ecb_aes_alg);
541529 crypto_unregister_alg(&aes_alg);
542530 }
543531