.. | .. |
---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-only |
---|
1 | 2 | /* |
---|
2 | 3 | * Cryptographic API. |
---|
3 | 4 | * |
---|
.. | .. |
---|
6 | 7 | * Copyright (c) 2014 Steffen Trumtrar <s.trumtrar@pengutronix.de> |
---|
7 | 8 | * Copyright (c) 2013 Vista Silicon S.L. |
---|
8 | 9 | * Author: Javier Martin <javier.martin@vista-silicon.com> |
---|
9 | | - * |
---|
10 | | - * This program is free software; you can redistribute it and/or modify |
---|
11 | | - * it under the terms of the GNU General Public License version 2 as published |
---|
12 | | - * by the Free Software Foundation. |
---|
13 | 10 | * |
---|
14 | 11 | * Based on omap-aes.c and tegra-aes.c |
---|
15 | 12 | */ |
---|
.. | .. |
---|
21 | 18 | #include <crypto/sha.h> |
---|
22 | 19 | |
---|
23 | 20 | #include <linux/clk.h> |
---|
24 | | -#include <linux/crypto.h> |
---|
| 21 | +#include <linux/dma-mapping.h> |
---|
25 | 22 | #include <linux/interrupt.h> |
---|
26 | 23 | #include <linux/io.h> |
---|
27 | 24 | #include <linux/irq.h> |
---|
28 | 25 | #include <linux/kernel.h> |
---|
29 | 26 | #include <linux/kthread.h> |
---|
30 | 27 | #include <linux/module.h> |
---|
31 | | -#include <linux/mutex.h> |
---|
32 | 28 | #include <linux/of.h> |
---|
33 | 29 | #include <linux/of_device.h> |
---|
34 | 30 | #include <linux/platform_device.h> |
---|
| 31 | +#include <linux/spinlock.h> |
---|
35 | 32 | |
---|
36 | 33 | #define SHA_BUFFER_LEN PAGE_SIZE |
---|
37 | 34 | #define SAHARA_MAX_SHA_BLOCK_SIZE SHA256_BLOCK_SIZE |
---|
.. | .. |
---|
154 | 151 | |
---|
155 | 152 | struct sahara_aes_reqctx { |
---|
156 | 153 | unsigned long mode; |
---|
| 154 | + struct skcipher_request fallback_req; // keep at the end |
---|
157 | 155 | }; |
---|
158 | 156 | |
---|
159 | 157 | /* |
---|
.. | .. |
---|
197 | 195 | void __iomem *regs_base; |
---|
198 | 196 | struct clk *clk_ipg; |
---|
199 | 197 | struct clk *clk_ahb; |
---|
200 | | - struct mutex queue_mutex; |
---|
| 198 | + spinlock_t queue_spinlock; |
---|
201 | 199 | struct task_struct *kthread; |
---|
202 | 200 | struct completion dma_completion; |
---|
203 | 201 | |
---|
.. | .. |
---|
354 | 352 | { |
---|
355 | 353 | u8 state; |
---|
356 | 354 | |
---|
357 | | - if (!IS_ENABLED(DEBUG)) |
---|
| 355 | + if (!__is_defined(DEBUG)) |
---|
358 | 356 | return; |
---|
359 | 357 | |
---|
360 | 358 | state = SAHARA_STATUS_GET_STATE(status); |
---|
.. | .. |
---|
406 | 404 | { |
---|
407 | 405 | int i; |
---|
408 | 406 | |
---|
409 | | - if (!IS_ENABLED(DEBUG)) |
---|
| 407 | + if (!__is_defined(DEBUG)) |
---|
410 | 408 | return; |
---|
411 | 409 | |
---|
412 | 410 | for (i = 0; i < SAHARA_MAX_HW_DESC; i++) { |
---|
.. | .. |
---|
427 | 425 | { |
---|
428 | 426 | int i; |
---|
429 | 427 | |
---|
430 | | - if (!IS_ENABLED(DEBUG)) |
---|
| 428 | + if (!__is_defined(DEBUG)) |
---|
431 | 429 | return; |
---|
432 | 430 | |
---|
433 | 431 | for (i = 0; i < SAHARA_MAX_HW_LINK; i++) { |
---|
.. | .. |
---|
550 | 548 | return -EINVAL; |
---|
551 | 549 | } |
---|
552 | 550 | |
---|
553 | | -static int sahara_aes_process(struct ablkcipher_request *req) |
---|
| 551 | +static int sahara_aes_process(struct skcipher_request *req) |
---|
554 | 552 | { |
---|
555 | 553 | struct sahara_dev *dev = dev_ptr; |
---|
556 | 554 | struct sahara_ctx *ctx; |
---|
.. | .. |
---|
561 | 559 | /* Request is ready to be dispatched by the device */ |
---|
562 | 560 | dev_dbg(dev->device, |
---|
563 | 561 | "dispatch request (nbytes=%d, src=%p, dst=%p)\n", |
---|
564 | | - req->nbytes, req->src, req->dst); |
---|
| 562 | + req->cryptlen, req->src, req->dst); |
---|
565 | 563 | |
---|
566 | 564 | /* assign new request to device */ |
---|
567 | | - dev->total = req->nbytes; |
---|
| 565 | + dev->total = req->cryptlen; |
---|
568 | 566 | dev->in_sg = req->src; |
---|
569 | 567 | dev->out_sg = req->dst; |
---|
570 | 568 | |
---|
571 | | - rctx = ablkcipher_request_ctx(req); |
---|
572 | | - ctx = crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req)); |
---|
| 569 | + rctx = skcipher_request_ctx(req); |
---|
| 570 | + ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); |
---|
573 | 571 | rctx->mode &= FLAGS_MODE_MASK; |
---|
574 | 572 | dev->flags = (dev->flags & ~FLAGS_MODE_MASK) | rctx->mode; |
---|
575 | 573 | |
---|
576 | | - if ((dev->flags & FLAGS_CBC) && req->info) |
---|
577 | | - memcpy(dev->iv_base, req->info, AES_KEYSIZE_128); |
---|
| 574 | + if ((dev->flags & FLAGS_CBC) && req->iv) |
---|
| 575 | + memcpy(dev->iv_base, req->iv, AES_KEYSIZE_128); |
---|
578 | 576 | |
---|
579 | 577 | /* assign new context to device */ |
---|
580 | 578 | dev->ctx = ctx; |
---|
.. | .. |
---|
600 | 598 | return 0; |
---|
601 | 599 | } |
---|
602 | 600 | |
---|
603 | | -static int sahara_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key, |
---|
| 601 | +static int sahara_aes_setkey(struct crypto_skcipher *tfm, const u8 *key, |
---|
604 | 602 | unsigned int keylen) |
---|
605 | 603 | { |
---|
606 | | - struct sahara_ctx *ctx = crypto_ablkcipher_ctx(tfm); |
---|
607 | | - int ret; |
---|
| 604 | + struct sahara_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
608 | 605 | |
---|
609 | 606 | ctx->keylen = keylen; |
---|
610 | 607 | |
---|
.. | .. |
---|
624 | 621 | crypto_skcipher_clear_flags(ctx->fallback, CRYPTO_TFM_REQ_MASK); |
---|
625 | 622 | crypto_skcipher_set_flags(ctx->fallback, tfm->base.crt_flags & |
---|
626 | 623 | CRYPTO_TFM_REQ_MASK); |
---|
627 | | - |
---|
628 | | - ret = crypto_skcipher_setkey(ctx->fallback, key, keylen); |
---|
629 | | - |
---|
630 | | - tfm->base.crt_flags &= ~CRYPTO_TFM_RES_MASK; |
---|
631 | | - tfm->base.crt_flags |= crypto_skcipher_get_flags(ctx->fallback) & |
---|
632 | | - CRYPTO_TFM_RES_MASK; |
---|
633 | | - return ret; |
---|
| 624 | + return crypto_skcipher_setkey(ctx->fallback, key, keylen); |
---|
634 | 625 | } |
---|
635 | 626 | |
---|
636 | | -static int sahara_aes_crypt(struct ablkcipher_request *req, unsigned long mode) |
---|
| 627 | +static int sahara_aes_crypt(struct skcipher_request *req, unsigned long mode) |
---|
637 | 628 | { |
---|
638 | | - struct sahara_aes_reqctx *rctx = ablkcipher_request_ctx(req); |
---|
| 629 | + struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req); |
---|
639 | 630 | struct sahara_dev *dev = dev_ptr; |
---|
640 | 631 | int err = 0; |
---|
641 | 632 | |
---|
642 | 633 | dev_dbg(dev->device, "nbytes: %d, enc: %d, cbc: %d\n", |
---|
643 | | - req->nbytes, !!(mode & FLAGS_ENCRYPT), !!(mode & FLAGS_CBC)); |
---|
| 634 | + req->cryptlen, !!(mode & FLAGS_ENCRYPT), !!(mode & FLAGS_CBC)); |
---|
644 | 635 | |
---|
645 | | - if (!IS_ALIGNED(req->nbytes, AES_BLOCK_SIZE)) { |
---|
| 636 | + if (!IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE)) { |
---|
646 | 637 | dev_err(dev->device, |
---|
647 | 638 | "request size is not exact amount of AES blocks\n"); |
---|
648 | 639 | return -EINVAL; |
---|
.. | .. |
---|
650 | 641 | |
---|
651 | 642 | rctx->mode = mode; |
---|
652 | 643 | |
---|
653 | | - mutex_lock(&dev->queue_mutex); |
---|
654 | | - err = ablkcipher_enqueue_request(&dev->queue, req); |
---|
655 | | - mutex_unlock(&dev->queue_mutex); |
---|
| 644 | + spin_lock_bh(&dev->queue_spinlock); |
---|
| 645 | + err = crypto_enqueue_request(&dev->queue, &req->base); |
---|
| 646 | + spin_unlock_bh(&dev->queue_spinlock); |
---|
656 | 647 | |
---|
657 | 648 | wake_up_process(dev->kthread); |
---|
658 | 649 | |
---|
659 | 650 | return err; |
---|
660 | 651 | } |
---|
661 | 652 | |
---|
662 | | -static int sahara_aes_ecb_encrypt(struct ablkcipher_request *req) |
---|
| 653 | +static int sahara_aes_ecb_encrypt(struct skcipher_request *req) |
---|
663 | 654 | { |
---|
664 | | - struct sahara_ctx *ctx = crypto_ablkcipher_ctx( |
---|
665 | | - crypto_ablkcipher_reqtfm(req)); |
---|
666 | | - int err; |
---|
| 655 | + struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req); |
---|
| 656 | + struct sahara_ctx *ctx = crypto_skcipher_ctx( |
---|
| 657 | + crypto_skcipher_reqtfm(req)); |
---|
667 | 658 | |
---|
668 | 659 | if (unlikely(ctx->keylen != AES_KEYSIZE_128)) { |
---|
669 | | - SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback); |
---|
670 | | - |
---|
671 | | - skcipher_request_set_tfm(subreq, ctx->fallback); |
---|
672 | | - skcipher_request_set_callback(subreq, req->base.flags, |
---|
673 | | - NULL, NULL); |
---|
674 | | - skcipher_request_set_crypt(subreq, req->src, req->dst, |
---|
675 | | - req->nbytes, req->info); |
---|
676 | | - err = crypto_skcipher_encrypt(subreq); |
---|
677 | | - skcipher_request_zero(subreq); |
---|
678 | | - return err; |
---|
| 660 | + skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); |
---|
| 661 | + skcipher_request_set_callback(&rctx->fallback_req, |
---|
| 662 | + req->base.flags, |
---|
| 663 | + req->base.complete, |
---|
| 664 | + req->base.data); |
---|
| 665 | + skcipher_request_set_crypt(&rctx->fallback_req, req->src, |
---|
| 666 | + req->dst, req->cryptlen, req->iv); |
---|
| 667 | + return crypto_skcipher_encrypt(&rctx->fallback_req); |
---|
679 | 668 | } |
---|
680 | 669 | |
---|
681 | 670 | return sahara_aes_crypt(req, FLAGS_ENCRYPT); |
---|
682 | 671 | } |
---|
683 | 672 | |
---|
684 | | -static int sahara_aes_ecb_decrypt(struct ablkcipher_request *req) |
---|
| 673 | +static int sahara_aes_ecb_decrypt(struct skcipher_request *req) |
---|
685 | 674 | { |
---|
686 | | - struct sahara_ctx *ctx = crypto_ablkcipher_ctx( |
---|
687 | | - crypto_ablkcipher_reqtfm(req)); |
---|
688 | | - int err; |
---|
| 675 | + struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req); |
---|
| 676 | + struct sahara_ctx *ctx = crypto_skcipher_ctx( |
---|
| 677 | + crypto_skcipher_reqtfm(req)); |
---|
689 | 678 | |
---|
690 | 679 | if (unlikely(ctx->keylen != AES_KEYSIZE_128)) { |
---|
691 | | - SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback); |
---|
692 | | - |
---|
693 | | - skcipher_request_set_tfm(subreq, ctx->fallback); |
---|
694 | | - skcipher_request_set_callback(subreq, req->base.flags, |
---|
695 | | - NULL, NULL); |
---|
696 | | - skcipher_request_set_crypt(subreq, req->src, req->dst, |
---|
697 | | - req->nbytes, req->info); |
---|
698 | | - err = crypto_skcipher_decrypt(subreq); |
---|
699 | | - skcipher_request_zero(subreq); |
---|
700 | | - return err; |
---|
| 680 | + skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); |
---|
| 681 | + skcipher_request_set_callback(&rctx->fallback_req, |
---|
| 682 | + req->base.flags, |
---|
| 683 | + req->base.complete, |
---|
| 684 | + req->base.data); |
---|
| 685 | + skcipher_request_set_crypt(&rctx->fallback_req, req->src, |
---|
| 686 | + req->dst, req->cryptlen, req->iv); |
---|
| 687 | + return crypto_skcipher_decrypt(&rctx->fallback_req); |
---|
701 | 688 | } |
---|
702 | 689 | |
---|
703 | 690 | return sahara_aes_crypt(req, 0); |
---|
704 | 691 | } |
---|
705 | 692 | |
---|
706 | | -static int sahara_aes_cbc_encrypt(struct ablkcipher_request *req) |
---|
| 693 | +static int sahara_aes_cbc_encrypt(struct skcipher_request *req) |
---|
707 | 694 | { |
---|
708 | | - struct sahara_ctx *ctx = crypto_ablkcipher_ctx( |
---|
709 | | - crypto_ablkcipher_reqtfm(req)); |
---|
710 | | - int err; |
---|
| 695 | + struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req); |
---|
| 696 | + struct sahara_ctx *ctx = crypto_skcipher_ctx( |
---|
| 697 | + crypto_skcipher_reqtfm(req)); |
---|
711 | 698 | |
---|
712 | 699 | if (unlikely(ctx->keylen != AES_KEYSIZE_128)) { |
---|
713 | | - SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback); |
---|
714 | | - |
---|
715 | | - skcipher_request_set_tfm(subreq, ctx->fallback); |
---|
716 | | - skcipher_request_set_callback(subreq, req->base.flags, |
---|
717 | | - NULL, NULL); |
---|
718 | | - skcipher_request_set_crypt(subreq, req->src, req->dst, |
---|
719 | | - req->nbytes, req->info); |
---|
720 | | - err = crypto_skcipher_encrypt(subreq); |
---|
721 | | - skcipher_request_zero(subreq); |
---|
722 | | - return err; |
---|
| 700 | + skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); |
---|
| 701 | + skcipher_request_set_callback(&rctx->fallback_req, |
---|
| 702 | + req->base.flags, |
---|
| 703 | + req->base.complete, |
---|
| 704 | + req->base.data); |
---|
| 705 | + skcipher_request_set_crypt(&rctx->fallback_req, req->src, |
---|
| 706 | + req->dst, req->cryptlen, req->iv); |
---|
| 707 | + return crypto_skcipher_encrypt(&rctx->fallback_req); |
---|
723 | 708 | } |
---|
724 | 709 | |
---|
725 | 710 | return sahara_aes_crypt(req, FLAGS_ENCRYPT | FLAGS_CBC); |
---|
726 | 711 | } |
---|
727 | 712 | |
---|
728 | | -static int sahara_aes_cbc_decrypt(struct ablkcipher_request *req) |
---|
| 713 | +static int sahara_aes_cbc_decrypt(struct skcipher_request *req) |
---|
729 | 714 | { |
---|
730 | | - struct sahara_ctx *ctx = crypto_ablkcipher_ctx( |
---|
731 | | - crypto_ablkcipher_reqtfm(req)); |
---|
732 | | - int err; |
---|
| 715 | + struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req); |
---|
| 716 | + struct sahara_ctx *ctx = crypto_skcipher_ctx( |
---|
| 717 | + crypto_skcipher_reqtfm(req)); |
---|
733 | 718 | |
---|
734 | 719 | if (unlikely(ctx->keylen != AES_KEYSIZE_128)) { |
---|
735 | | - SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback); |
---|
736 | | - |
---|
737 | | - skcipher_request_set_tfm(subreq, ctx->fallback); |
---|
738 | | - skcipher_request_set_callback(subreq, req->base.flags, |
---|
739 | | - NULL, NULL); |
---|
740 | | - skcipher_request_set_crypt(subreq, req->src, req->dst, |
---|
741 | | - req->nbytes, req->info); |
---|
742 | | - err = crypto_skcipher_decrypt(subreq); |
---|
743 | | - skcipher_request_zero(subreq); |
---|
744 | | - return err; |
---|
| 720 | + skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); |
---|
| 721 | + skcipher_request_set_callback(&rctx->fallback_req, |
---|
| 722 | + req->base.flags, |
---|
| 723 | + req->base.complete, |
---|
| 724 | + req->base.data); |
---|
| 725 | + skcipher_request_set_crypt(&rctx->fallback_req, req->src, |
---|
| 726 | + req->dst, req->cryptlen, req->iv); |
---|
| 727 | + return crypto_skcipher_decrypt(&rctx->fallback_req); |
---|
745 | 728 | } |
---|
746 | 729 | |
---|
747 | 730 | return sahara_aes_crypt(req, FLAGS_CBC); |
---|
748 | 731 | } |
---|
749 | 732 | |
---|
750 | | -static int sahara_aes_cra_init(struct crypto_tfm *tfm) |
---|
| 733 | +static int sahara_aes_init_tfm(struct crypto_skcipher *tfm) |
---|
751 | 734 | { |
---|
752 | | - const char *name = crypto_tfm_alg_name(tfm); |
---|
753 | | - struct sahara_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 735 | + const char *name = crypto_tfm_alg_name(&tfm->base); |
---|
| 736 | + struct sahara_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
754 | 737 | |
---|
755 | 738 | ctx->fallback = crypto_alloc_skcipher(name, 0, |
---|
756 | | - CRYPTO_ALG_ASYNC | |
---|
757 | 739 | CRYPTO_ALG_NEED_FALLBACK); |
---|
758 | 740 | if (IS_ERR(ctx->fallback)) { |
---|
759 | 741 | pr_err("Error allocating fallback algo %s\n", name); |
---|
760 | 742 | return PTR_ERR(ctx->fallback); |
---|
761 | 743 | } |
---|
762 | 744 | |
---|
763 | | - tfm->crt_ablkcipher.reqsize = sizeof(struct sahara_aes_reqctx); |
---|
| 745 | + crypto_skcipher_set_reqsize(tfm, sizeof(struct sahara_aes_reqctx) + |
---|
| 746 | + crypto_skcipher_reqsize(ctx->fallback)); |
---|
764 | 747 | |
---|
765 | 748 | return 0; |
---|
766 | 749 | } |
---|
767 | 750 | |
---|
768 | | -static void sahara_aes_cra_exit(struct crypto_tfm *tfm) |
---|
| 751 | +static void sahara_aes_exit_tfm(struct crypto_skcipher *tfm) |
---|
769 | 752 | { |
---|
770 | | - struct sahara_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 753 | + struct sahara_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
771 | 754 | |
---|
772 | 755 | crypto_free_skcipher(ctx->fallback); |
---|
773 | 756 | } |
---|
.. | .. |
---|
1059 | 1042 | do { |
---|
1060 | 1043 | __set_current_state(TASK_INTERRUPTIBLE); |
---|
1061 | 1044 | |
---|
1062 | | - mutex_lock(&dev->queue_mutex); |
---|
| 1045 | + spin_lock_bh(&dev->queue_spinlock); |
---|
1063 | 1046 | backlog = crypto_get_backlog(&dev->queue); |
---|
1064 | 1047 | async_req = crypto_dequeue_request(&dev->queue); |
---|
1065 | | - mutex_unlock(&dev->queue_mutex); |
---|
| 1048 | + spin_unlock_bh(&dev->queue_spinlock); |
---|
1066 | 1049 | |
---|
1067 | 1050 | if (backlog) |
---|
1068 | 1051 | backlog->complete(backlog, -EINPROGRESS); |
---|
.. | .. |
---|
1075 | 1058 | |
---|
1076 | 1059 | ret = sahara_sha_process(req); |
---|
1077 | 1060 | } else { |
---|
1078 | | - struct ablkcipher_request *req = |
---|
1079 | | - ablkcipher_request_cast(async_req); |
---|
| 1061 | + struct skcipher_request *req = |
---|
| 1062 | + skcipher_request_cast(async_req); |
---|
1080 | 1063 | |
---|
1081 | 1064 | ret = sahara_aes_process(req); |
---|
1082 | 1065 | } |
---|
.. | .. |
---|
1108 | 1091 | rctx->first = 1; |
---|
1109 | 1092 | } |
---|
1110 | 1093 | |
---|
1111 | | - mutex_lock(&dev->queue_mutex); |
---|
| 1094 | + spin_lock_bh(&dev->queue_spinlock); |
---|
1112 | 1095 | ret = crypto_enqueue_request(&dev->queue, &req->base); |
---|
1113 | | - mutex_unlock(&dev->queue_mutex); |
---|
| 1096 | + spin_unlock_bh(&dev->queue_spinlock); |
---|
1114 | 1097 | |
---|
1115 | 1098 | wake_up_process(dev->kthread); |
---|
1116 | 1099 | |
---|
.. | .. |
---|
1193 | 1176 | return 0; |
---|
1194 | 1177 | } |
---|
1195 | 1178 | |
---|
1196 | | -static struct crypto_alg aes_algs[] = { |
---|
| 1179 | +static struct skcipher_alg aes_algs[] = { |
---|
1197 | 1180 | { |
---|
1198 | | - .cra_name = "ecb(aes)", |
---|
1199 | | - .cra_driver_name = "sahara-ecb-aes", |
---|
1200 | | - .cra_priority = 300, |
---|
1201 | | - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | |
---|
1202 | | - CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK, |
---|
1203 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
1204 | | - .cra_ctxsize = sizeof(struct sahara_ctx), |
---|
1205 | | - .cra_alignmask = 0x0, |
---|
1206 | | - .cra_type = &crypto_ablkcipher_type, |
---|
1207 | | - .cra_module = THIS_MODULE, |
---|
1208 | | - .cra_init = sahara_aes_cra_init, |
---|
1209 | | - .cra_exit = sahara_aes_cra_exit, |
---|
1210 | | - .cra_u.ablkcipher = { |
---|
1211 | | - .min_keysize = AES_MIN_KEY_SIZE , |
---|
1212 | | - .max_keysize = AES_MAX_KEY_SIZE, |
---|
1213 | | - .setkey = sahara_aes_setkey, |
---|
1214 | | - .encrypt = sahara_aes_ecb_encrypt, |
---|
1215 | | - .decrypt = sahara_aes_ecb_decrypt, |
---|
1216 | | - } |
---|
| 1181 | + .base.cra_name = "ecb(aes)", |
---|
| 1182 | + .base.cra_driver_name = "sahara-ecb-aes", |
---|
| 1183 | + .base.cra_priority = 300, |
---|
| 1184 | + .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK, |
---|
| 1185 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
---|
| 1186 | + .base.cra_ctxsize = sizeof(struct sahara_ctx), |
---|
| 1187 | + .base.cra_alignmask = 0x0, |
---|
| 1188 | + .base.cra_module = THIS_MODULE, |
---|
| 1189 | + |
---|
| 1190 | + .init = sahara_aes_init_tfm, |
---|
| 1191 | + .exit = sahara_aes_exit_tfm, |
---|
| 1192 | + .min_keysize = AES_MIN_KEY_SIZE , |
---|
| 1193 | + .max_keysize = AES_MAX_KEY_SIZE, |
---|
| 1194 | + .setkey = sahara_aes_setkey, |
---|
| 1195 | + .encrypt = sahara_aes_ecb_encrypt, |
---|
| 1196 | + .decrypt = sahara_aes_ecb_decrypt, |
---|
1217 | 1197 | }, { |
---|
1218 | | - .cra_name = "cbc(aes)", |
---|
1219 | | - .cra_driver_name = "sahara-cbc-aes", |
---|
1220 | | - .cra_priority = 300, |
---|
1221 | | - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | |
---|
1222 | | - CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK, |
---|
1223 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
1224 | | - .cra_ctxsize = sizeof(struct sahara_ctx), |
---|
1225 | | - .cra_alignmask = 0x0, |
---|
1226 | | - .cra_type = &crypto_ablkcipher_type, |
---|
1227 | | - .cra_module = THIS_MODULE, |
---|
1228 | | - .cra_init = sahara_aes_cra_init, |
---|
1229 | | - .cra_exit = sahara_aes_cra_exit, |
---|
1230 | | - .cra_u.ablkcipher = { |
---|
1231 | | - .min_keysize = AES_MIN_KEY_SIZE , |
---|
1232 | | - .max_keysize = AES_MAX_KEY_SIZE, |
---|
1233 | | - .ivsize = AES_BLOCK_SIZE, |
---|
1234 | | - .setkey = sahara_aes_setkey, |
---|
1235 | | - .encrypt = sahara_aes_cbc_encrypt, |
---|
1236 | | - .decrypt = sahara_aes_cbc_decrypt, |
---|
1237 | | - } |
---|
| 1198 | + .base.cra_name = "cbc(aes)", |
---|
| 1199 | + .base.cra_driver_name = "sahara-cbc-aes", |
---|
| 1200 | + .base.cra_priority = 300, |
---|
| 1201 | + .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK, |
---|
| 1202 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
---|
| 1203 | + .base.cra_ctxsize = sizeof(struct sahara_ctx), |
---|
| 1204 | + .base.cra_alignmask = 0x0, |
---|
| 1205 | + .base.cra_module = THIS_MODULE, |
---|
| 1206 | + |
---|
| 1207 | + .init = sahara_aes_init_tfm, |
---|
| 1208 | + .exit = sahara_aes_exit_tfm, |
---|
| 1209 | + .min_keysize = AES_MIN_KEY_SIZE , |
---|
| 1210 | + .max_keysize = AES_MAX_KEY_SIZE, |
---|
| 1211 | + .ivsize = AES_BLOCK_SIZE, |
---|
| 1212 | + .setkey = sahara_aes_setkey, |
---|
| 1213 | + .encrypt = sahara_aes_cbc_encrypt, |
---|
| 1214 | + .decrypt = sahara_aes_cbc_decrypt, |
---|
1238 | 1215 | } |
---|
1239 | 1216 | }; |
---|
1240 | 1217 | |
---|
.. | .. |
---|
1322 | 1299 | unsigned int i, j, k, l; |
---|
1323 | 1300 | |
---|
1324 | 1301 | for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { |
---|
1325 | | - INIT_LIST_HEAD(&aes_algs[i].cra_list); |
---|
1326 | | - err = crypto_register_alg(&aes_algs[i]); |
---|
| 1302 | + err = crypto_register_skcipher(&aes_algs[i]); |
---|
1327 | 1303 | if (err) |
---|
1328 | 1304 | goto err_aes_algs; |
---|
1329 | 1305 | } |
---|
.. | .. |
---|
1353 | 1329 | |
---|
1354 | 1330 | err_aes_algs: |
---|
1355 | 1331 | for (j = 0; j < i; j++) |
---|
1356 | | - crypto_unregister_alg(&aes_algs[j]); |
---|
| 1332 | + crypto_unregister_skcipher(&aes_algs[j]); |
---|
1357 | 1333 | |
---|
1358 | 1334 | return err; |
---|
1359 | 1335 | } |
---|
.. | .. |
---|
1363 | 1339 | unsigned int i; |
---|
1364 | 1340 | |
---|
1365 | 1341 | for (i = 0; i < ARRAY_SIZE(aes_algs); i++) |
---|
1366 | | - crypto_unregister_alg(&aes_algs[i]); |
---|
| 1342 | + crypto_unregister_skcipher(&aes_algs[i]); |
---|
1367 | 1343 | |
---|
1368 | 1344 | for (i = 0; i < ARRAY_SIZE(sha_v3_algs); i++) |
---|
1369 | 1345 | crypto_unregister_ahash(&sha_v3_algs[i]); |
---|
.. | .. |
---|
1389 | 1365 | static int sahara_probe(struct platform_device *pdev) |
---|
1390 | 1366 | { |
---|
1391 | 1367 | struct sahara_dev *dev; |
---|
1392 | | - struct resource *res; |
---|
1393 | 1368 | u32 version; |
---|
1394 | 1369 | int irq; |
---|
1395 | 1370 | int err; |
---|
.. | .. |
---|
1403 | 1378 | platform_set_drvdata(pdev, dev); |
---|
1404 | 1379 | |
---|
1405 | 1380 | /* Get the base address */ |
---|
1406 | | - res = platform_get_resource(pdev, IORESOURCE_MEM, 0); |
---|
1407 | | - dev->regs_base = devm_ioremap_resource(&pdev->dev, res); |
---|
| 1381 | + dev->regs_base = devm_platform_ioremap_resource(pdev, 0); |
---|
1408 | 1382 | if (IS_ERR(dev->regs_base)) |
---|
1409 | 1383 | return PTR_ERR(dev->regs_base); |
---|
1410 | 1384 | |
---|
1411 | 1385 | /* Get the IRQ */ |
---|
1412 | 1386 | irq = platform_get_irq(pdev, 0); |
---|
1413 | | - if (irq < 0) { |
---|
1414 | | - dev_err(&pdev->dev, "failed to get irq resource\n"); |
---|
| 1387 | + if (irq < 0) |
---|
1415 | 1388 | return irq; |
---|
1416 | | - } |
---|
1417 | 1389 | |
---|
1418 | 1390 | err = devm_request_irq(&pdev->dev, irq, sahara_irq_handler, |
---|
1419 | 1391 | 0, dev_name(&pdev->dev), dev); |
---|
.. | .. |
---|
1482 | 1454 | |
---|
1483 | 1455 | crypto_init_queue(&dev->queue, SAHARA_QUEUE_LENGTH); |
---|
1484 | 1456 | |
---|
1485 | | - mutex_init(&dev->queue_mutex); |
---|
| 1457 | + spin_lock_init(&dev->queue_spinlock); |
---|
1486 | 1458 | |
---|
1487 | 1459 | dev_ptr = dev; |
---|
1488 | 1460 | |
---|