.. | .. |
---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-only |
---|
1 | 2 | /* |
---|
2 | 3 | * Cryptographic API. |
---|
3 | 4 | * |
---|
.. | .. |
---|
9 | 10 | |
---|
10 | 11 | #include <crypto/algapi.h> |
---|
11 | 12 | #include <crypto/aes.h> |
---|
| 13 | +#include <crypto/internal/skcipher.h> |
---|
12 | 14 | #include <crypto/padlock.h> |
---|
13 | 15 | #include <linux/module.h> |
---|
14 | 16 | #include <linux/init.h> |
---|
.. | .. |
---|
16 | 18 | #include <linux/errno.h> |
---|
17 | 19 | #include <linux/interrupt.h> |
---|
18 | 20 | #include <linux/kernel.h> |
---|
| 21 | +#include <linux/mm.h> |
---|
19 | 22 | #include <linux/percpu.h> |
---|
20 | 23 | #include <linux/smp.h> |
---|
21 | 24 | #include <linux/slab.h> |
---|
.. | .. |
---|
96 | 99 | return aes_ctx_common(crypto_tfm_ctx(tfm)); |
---|
97 | 100 | } |
---|
98 | 101 | |
---|
99 | | -static inline struct aes_ctx *blk_aes_ctx(struct crypto_blkcipher *tfm) |
---|
| 102 | +static inline struct aes_ctx *skcipher_aes_ctx(struct crypto_skcipher *tfm) |
---|
100 | 103 | { |
---|
101 | | - return aes_ctx_common(crypto_blkcipher_ctx(tfm)); |
---|
| 104 | + return aes_ctx_common(crypto_skcipher_ctx(tfm)); |
---|
102 | 105 | } |
---|
103 | 106 | |
---|
104 | 107 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
---|
.. | .. |
---|
106 | 109 | { |
---|
107 | 110 | struct aes_ctx *ctx = aes_ctx(tfm); |
---|
108 | 111 | const __le32 *key = (const __le32 *)in_key; |
---|
109 | | - u32 *flags = &tfm->crt_flags; |
---|
110 | 112 | struct crypto_aes_ctx gen_aes; |
---|
111 | 113 | int cpu; |
---|
112 | 114 | |
---|
113 | | - if (key_len % 8) { |
---|
114 | | - *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
---|
| 115 | + if (key_len % 8) |
---|
115 | 116 | return -EINVAL; |
---|
116 | | - } |
---|
117 | 117 | |
---|
118 | 118 | /* |
---|
119 | 119 | * If the hardware is capable of generating the extended key |
---|
.. | .. |
---|
144 | 144 | ctx->cword.encrypt.keygen = 1; |
---|
145 | 145 | ctx->cword.decrypt.keygen = 1; |
---|
146 | 146 | |
---|
147 | | - if (crypto_aes_expand_key(&gen_aes, in_key, key_len)) { |
---|
148 | | - *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
---|
| 147 | + if (aes_expandkey(&gen_aes, in_key, key_len)) |
---|
149 | 148 | return -EINVAL; |
---|
150 | | - } |
---|
151 | 149 | |
---|
152 | 150 | memcpy(ctx->E, gen_aes.key_enc, AES_MAX_KEYLENGTH); |
---|
153 | 151 | memcpy(ctx->D, gen_aes.key_dec, AES_MAX_KEYLENGTH); |
---|
.. | .. |
---|
159 | 157 | per_cpu(paes_last_cword, cpu) = NULL; |
---|
160 | 158 | |
---|
161 | 159 | return 0; |
---|
| 160 | +} |
---|
| 161 | + |
---|
| 162 | +static int aes_set_key_skcipher(struct crypto_skcipher *tfm, const u8 *in_key, |
---|
| 163 | + unsigned int key_len) |
---|
| 164 | +{ |
---|
| 165 | + return aes_set_key(crypto_skcipher_tfm(tfm), in_key, key_len); |
---|
162 | 166 | } |
---|
163 | 167 | |
---|
164 | 168 | /* ====== Encryption/decryption routines ====== */ |
---|
.. | .. |
---|
299 | 303 | return iv; |
---|
300 | 304 | } |
---|
301 | 305 | |
---|
302 | | -static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
---|
| 306 | +static void padlock_aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
---|
303 | 307 | { |
---|
304 | 308 | struct aes_ctx *ctx = aes_ctx(tfm); |
---|
305 | 309 | |
---|
.. | .. |
---|
308 | 312 | padlock_store_cword(&ctx->cword.encrypt); |
---|
309 | 313 | } |
---|
310 | 314 | |
---|
311 | | -static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
---|
| 315 | +static void padlock_aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
---|
312 | 316 | { |
---|
313 | 317 | struct aes_ctx *ctx = aes_ctx(tfm); |
---|
314 | 318 | |
---|
.. | .. |
---|
331 | 335 | .cia_min_keysize = AES_MIN_KEY_SIZE, |
---|
332 | 336 | .cia_max_keysize = AES_MAX_KEY_SIZE, |
---|
333 | 337 | .cia_setkey = aes_set_key, |
---|
334 | | - .cia_encrypt = aes_encrypt, |
---|
335 | | - .cia_decrypt = aes_decrypt, |
---|
| 338 | + .cia_encrypt = padlock_aes_encrypt, |
---|
| 339 | + .cia_decrypt = padlock_aes_decrypt, |
---|
336 | 340 | } |
---|
337 | 341 | } |
---|
338 | 342 | }; |
---|
339 | 343 | |
---|
340 | | -static int ecb_aes_encrypt(struct blkcipher_desc *desc, |
---|
341 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
342 | | - unsigned int nbytes) |
---|
| 344 | +static int ecb_aes_encrypt(struct skcipher_request *req) |
---|
343 | 345 | { |
---|
344 | | - struct aes_ctx *ctx = blk_aes_ctx(desc->tfm); |
---|
345 | | - struct blkcipher_walk walk; |
---|
| 346 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
| 347 | + struct aes_ctx *ctx = skcipher_aes_ctx(tfm); |
---|
| 348 | + struct skcipher_walk walk; |
---|
| 349 | + unsigned int nbytes; |
---|
346 | 350 | int err; |
---|
347 | 351 | |
---|
348 | 352 | padlock_reset_key(&ctx->cword.encrypt); |
---|
349 | 353 | |
---|
350 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
351 | | - err = blkcipher_walk_virt(desc, &walk); |
---|
| 354 | + err = skcipher_walk_virt(&walk, req, false); |
---|
352 | 355 | |
---|
353 | | - while ((nbytes = walk.nbytes)) { |
---|
| 356 | + while ((nbytes = walk.nbytes) != 0) { |
---|
354 | 357 | padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr, |
---|
355 | 358 | ctx->E, &ctx->cword.encrypt, |
---|
356 | 359 | nbytes / AES_BLOCK_SIZE); |
---|
357 | 360 | nbytes &= AES_BLOCK_SIZE - 1; |
---|
358 | | - err = blkcipher_walk_done(desc, &walk, nbytes); |
---|
| 361 | + err = skcipher_walk_done(&walk, nbytes); |
---|
359 | 362 | } |
---|
360 | 363 | |
---|
361 | 364 | padlock_store_cword(&ctx->cword.encrypt); |
---|
.. | .. |
---|
363 | 366 | return err; |
---|
364 | 367 | } |
---|
365 | 368 | |
---|
366 | | -static int ecb_aes_decrypt(struct blkcipher_desc *desc, |
---|
367 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
368 | | - unsigned int nbytes) |
---|
| 369 | +static int ecb_aes_decrypt(struct skcipher_request *req) |
---|
369 | 370 | { |
---|
370 | | - struct aes_ctx *ctx = blk_aes_ctx(desc->tfm); |
---|
371 | | - struct blkcipher_walk walk; |
---|
| 371 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
| 372 | + struct aes_ctx *ctx = skcipher_aes_ctx(tfm); |
---|
| 373 | + struct skcipher_walk walk; |
---|
| 374 | + unsigned int nbytes; |
---|
372 | 375 | int err; |
---|
373 | 376 | |
---|
374 | 377 | padlock_reset_key(&ctx->cword.decrypt); |
---|
375 | 378 | |
---|
376 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
377 | | - err = blkcipher_walk_virt(desc, &walk); |
---|
| 379 | + err = skcipher_walk_virt(&walk, req, false); |
---|
378 | 380 | |
---|
379 | | - while ((nbytes = walk.nbytes)) { |
---|
| 381 | + while ((nbytes = walk.nbytes) != 0) { |
---|
380 | 382 | padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr, |
---|
381 | 383 | ctx->D, &ctx->cword.decrypt, |
---|
382 | 384 | nbytes / AES_BLOCK_SIZE); |
---|
383 | 385 | nbytes &= AES_BLOCK_SIZE - 1; |
---|
384 | | - err = blkcipher_walk_done(desc, &walk, nbytes); |
---|
| 386 | + err = skcipher_walk_done(&walk, nbytes); |
---|
385 | 387 | } |
---|
386 | 388 | |
---|
387 | 389 | padlock_store_cword(&ctx->cword.encrypt); |
---|
.. | .. |
---|
389 | 391 | return err; |
---|
390 | 392 | } |
---|
391 | 393 | |
---|
392 | | -static struct crypto_alg ecb_aes_alg = { |
---|
393 | | - .cra_name = "ecb(aes)", |
---|
394 | | - .cra_driver_name = "ecb-aes-padlock", |
---|
395 | | - .cra_priority = PADLOCK_COMPOSITE_PRIORITY, |
---|
396 | | - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, |
---|
397 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
398 | | - .cra_ctxsize = sizeof(struct aes_ctx), |
---|
399 | | - .cra_alignmask = PADLOCK_ALIGNMENT - 1, |
---|
400 | | - .cra_type = &crypto_blkcipher_type, |
---|
401 | | - .cra_module = THIS_MODULE, |
---|
402 | | - .cra_u = { |
---|
403 | | - .blkcipher = { |
---|
404 | | - .min_keysize = AES_MIN_KEY_SIZE, |
---|
405 | | - .max_keysize = AES_MAX_KEY_SIZE, |
---|
406 | | - .setkey = aes_set_key, |
---|
407 | | - .encrypt = ecb_aes_encrypt, |
---|
408 | | - .decrypt = ecb_aes_decrypt, |
---|
409 | | - } |
---|
410 | | - } |
---|
| 394 | +static struct skcipher_alg ecb_aes_alg = { |
---|
| 395 | + .base.cra_name = "ecb(aes)", |
---|
| 396 | + .base.cra_driver_name = "ecb-aes-padlock", |
---|
| 397 | + .base.cra_priority = PADLOCK_COMPOSITE_PRIORITY, |
---|
| 398 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
---|
| 399 | + .base.cra_ctxsize = sizeof(struct aes_ctx), |
---|
| 400 | + .base.cra_alignmask = PADLOCK_ALIGNMENT - 1, |
---|
| 401 | + .base.cra_module = THIS_MODULE, |
---|
| 402 | + .min_keysize = AES_MIN_KEY_SIZE, |
---|
| 403 | + .max_keysize = AES_MAX_KEY_SIZE, |
---|
| 404 | + .setkey = aes_set_key_skcipher, |
---|
| 405 | + .encrypt = ecb_aes_encrypt, |
---|
| 406 | + .decrypt = ecb_aes_decrypt, |
---|
411 | 407 | }; |
---|
412 | 408 | |
---|
413 | | -static int cbc_aes_encrypt(struct blkcipher_desc *desc, |
---|
414 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
415 | | - unsigned int nbytes) |
---|
| 409 | +static int cbc_aes_encrypt(struct skcipher_request *req) |
---|
416 | 410 | { |
---|
417 | | - struct aes_ctx *ctx = blk_aes_ctx(desc->tfm); |
---|
418 | | - struct blkcipher_walk walk; |
---|
| 411 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
| 412 | + struct aes_ctx *ctx = skcipher_aes_ctx(tfm); |
---|
| 413 | + struct skcipher_walk walk; |
---|
| 414 | + unsigned int nbytes; |
---|
419 | 415 | int err; |
---|
420 | 416 | |
---|
421 | 417 | padlock_reset_key(&ctx->cword.encrypt); |
---|
422 | 418 | |
---|
423 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
424 | | - err = blkcipher_walk_virt(desc, &walk); |
---|
| 419 | + err = skcipher_walk_virt(&walk, req, false); |
---|
425 | 420 | |
---|
426 | | - while ((nbytes = walk.nbytes)) { |
---|
| 421 | + while ((nbytes = walk.nbytes) != 0) { |
---|
427 | 422 | u8 *iv = padlock_xcrypt_cbc(walk.src.virt.addr, |
---|
428 | 423 | walk.dst.virt.addr, ctx->E, |
---|
429 | 424 | walk.iv, &ctx->cword.encrypt, |
---|
430 | 425 | nbytes / AES_BLOCK_SIZE); |
---|
431 | 426 | memcpy(walk.iv, iv, AES_BLOCK_SIZE); |
---|
432 | 427 | nbytes &= AES_BLOCK_SIZE - 1; |
---|
433 | | - err = blkcipher_walk_done(desc, &walk, nbytes); |
---|
| 428 | + err = skcipher_walk_done(&walk, nbytes); |
---|
434 | 429 | } |
---|
435 | 430 | |
---|
436 | 431 | padlock_store_cword(&ctx->cword.decrypt); |
---|
.. | .. |
---|
438 | 433 | return err; |
---|
439 | 434 | } |
---|
440 | 435 | |
---|
441 | | -static int cbc_aes_decrypt(struct blkcipher_desc *desc, |
---|
442 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
443 | | - unsigned int nbytes) |
---|
| 436 | +static int cbc_aes_decrypt(struct skcipher_request *req) |
---|
444 | 437 | { |
---|
445 | | - struct aes_ctx *ctx = blk_aes_ctx(desc->tfm); |
---|
446 | | - struct blkcipher_walk walk; |
---|
| 438 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
| 439 | + struct aes_ctx *ctx = skcipher_aes_ctx(tfm); |
---|
| 440 | + struct skcipher_walk walk; |
---|
| 441 | + unsigned int nbytes; |
---|
447 | 442 | int err; |
---|
448 | 443 | |
---|
449 | 444 | padlock_reset_key(&ctx->cword.encrypt); |
---|
450 | 445 | |
---|
451 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
452 | | - err = blkcipher_walk_virt(desc, &walk); |
---|
| 446 | + err = skcipher_walk_virt(&walk, req, false); |
---|
453 | 447 | |
---|
454 | | - while ((nbytes = walk.nbytes)) { |
---|
| 448 | + while ((nbytes = walk.nbytes) != 0) { |
---|
455 | 449 | padlock_xcrypt_cbc(walk.src.virt.addr, walk.dst.virt.addr, |
---|
456 | 450 | ctx->D, walk.iv, &ctx->cword.decrypt, |
---|
457 | 451 | nbytes / AES_BLOCK_SIZE); |
---|
458 | 452 | nbytes &= AES_BLOCK_SIZE - 1; |
---|
459 | | - err = blkcipher_walk_done(desc, &walk, nbytes); |
---|
| 453 | + err = skcipher_walk_done(&walk, nbytes); |
---|
460 | 454 | } |
---|
461 | 455 | |
---|
462 | 456 | padlock_store_cword(&ctx->cword.encrypt); |
---|
.. | .. |
---|
464 | 458 | return err; |
---|
465 | 459 | } |
---|
466 | 460 | |
---|
467 | | -static struct crypto_alg cbc_aes_alg = { |
---|
468 | | - .cra_name = "cbc(aes)", |
---|
469 | | - .cra_driver_name = "cbc-aes-padlock", |
---|
470 | | - .cra_priority = PADLOCK_COMPOSITE_PRIORITY, |
---|
471 | | - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, |
---|
472 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
473 | | - .cra_ctxsize = sizeof(struct aes_ctx), |
---|
474 | | - .cra_alignmask = PADLOCK_ALIGNMENT - 1, |
---|
475 | | - .cra_type = &crypto_blkcipher_type, |
---|
476 | | - .cra_module = THIS_MODULE, |
---|
477 | | - .cra_u = { |
---|
478 | | - .blkcipher = { |
---|
479 | | - .min_keysize = AES_MIN_KEY_SIZE, |
---|
480 | | - .max_keysize = AES_MAX_KEY_SIZE, |
---|
481 | | - .ivsize = AES_BLOCK_SIZE, |
---|
482 | | - .setkey = aes_set_key, |
---|
483 | | - .encrypt = cbc_aes_encrypt, |
---|
484 | | - .decrypt = cbc_aes_decrypt, |
---|
485 | | - } |
---|
486 | | - } |
---|
| 461 | +static struct skcipher_alg cbc_aes_alg = { |
---|
| 462 | + .base.cra_name = "cbc(aes)", |
---|
| 463 | + .base.cra_driver_name = "cbc-aes-padlock", |
---|
| 464 | + .base.cra_priority = PADLOCK_COMPOSITE_PRIORITY, |
---|
| 465 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
---|
| 466 | + .base.cra_ctxsize = sizeof(struct aes_ctx), |
---|
| 467 | + .base.cra_alignmask = PADLOCK_ALIGNMENT - 1, |
---|
| 468 | + .base.cra_module = THIS_MODULE, |
---|
| 469 | + .min_keysize = AES_MIN_KEY_SIZE, |
---|
| 470 | + .max_keysize = AES_MAX_KEY_SIZE, |
---|
| 471 | + .ivsize = AES_BLOCK_SIZE, |
---|
| 472 | + .setkey = aes_set_key_skcipher, |
---|
| 473 | + .encrypt = cbc_aes_encrypt, |
---|
| 474 | + .decrypt = cbc_aes_decrypt, |
---|
487 | 475 | }; |
---|
488 | 476 | |
---|
489 | 477 | static const struct x86_cpu_id padlock_cpu_id[] = { |
---|
490 | | - X86_FEATURE_MATCH(X86_FEATURE_XCRYPT), |
---|
| 478 | + X86_MATCH_FEATURE(X86_FEATURE_XCRYPT, NULL), |
---|
491 | 479 | {} |
---|
492 | 480 | }; |
---|
493 | 481 | MODULE_DEVICE_TABLE(x86cpu, padlock_cpu_id); |
---|
.. | .. |
---|
505 | 493 | return -ENODEV; |
---|
506 | 494 | } |
---|
507 | 495 | |
---|
508 | | - if ((ret = crypto_register_alg(&aes_alg))) |
---|
| 496 | + if ((ret = crypto_register_alg(&aes_alg)) != 0) |
---|
509 | 497 | goto aes_err; |
---|
510 | 498 | |
---|
511 | | - if ((ret = crypto_register_alg(&ecb_aes_alg))) |
---|
| 499 | + if ((ret = crypto_register_skcipher(&ecb_aes_alg)) != 0) |
---|
512 | 500 | goto ecb_aes_err; |
---|
513 | 501 | |
---|
514 | | - if ((ret = crypto_register_alg(&cbc_aes_alg))) |
---|
| 502 | + if ((ret = crypto_register_skcipher(&cbc_aes_alg)) != 0) |
---|
515 | 503 | goto cbc_aes_err; |
---|
516 | 504 | |
---|
517 | 505 | printk(KERN_NOTICE PFX "Using VIA PadLock ACE for AES algorithm.\n"); |
---|
.. | .. |
---|
526 | 514 | return ret; |
---|
527 | 515 | |
---|
528 | 516 | cbc_aes_err: |
---|
529 | | - crypto_unregister_alg(&ecb_aes_alg); |
---|
| 517 | + crypto_unregister_skcipher(&ecb_aes_alg); |
---|
530 | 518 | ecb_aes_err: |
---|
531 | 519 | crypto_unregister_alg(&aes_alg); |
---|
532 | 520 | aes_err: |
---|
.. | .. |
---|
536 | 524 | |
---|
537 | 525 | static void __exit padlock_fini(void) |
---|
538 | 526 | { |
---|
539 | | - crypto_unregister_alg(&cbc_aes_alg); |
---|
540 | | - crypto_unregister_alg(&ecb_aes_alg); |
---|
| 527 | + crypto_unregister_skcipher(&cbc_aes_alg); |
---|
| 528 | + crypto_unregister_skcipher(&ecb_aes_alg); |
---|
541 | 529 | crypto_unregister_alg(&aes_alg); |
---|
542 | 530 | } |
---|
543 | 531 | |
---|