hc
2024-05-10 37f49e37ab4cb5d0bc4c60eb5c6d4dd57db767bb
kernel/drivers/crypto/sahara.c
....@@ -1,3 +1,4 @@
1
+// SPDX-License-Identifier: GPL-2.0-only
12 /*
23 * Cryptographic API.
34 *
....@@ -6,10 +7,6 @@
67 * Copyright (c) 2014 Steffen Trumtrar <s.trumtrar@pengutronix.de>
78 * Copyright (c) 2013 Vista Silicon S.L.
89 * Author: Javier Martin <javier.martin@vista-silicon.com>
9
- *
10
- * This program is free software; you can redistribute it and/or modify
11
- * it under the terms of the GNU General Public License version 2 as published
12
- * by the Free Software Foundation.
1310 *
1411 * Based on omap-aes.c and tegra-aes.c
1512 */
....@@ -21,17 +18,17 @@
2118 #include <crypto/sha.h>
2219
2320 #include <linux/clk.h>
24
-#include <linux/crypto.h>
21
+#include <linux/dma-mapping.h>
2522 #include <linux/interrupt.h>
2623 #include <linux/io.h>
2724 #include <linux/irq.h>
2825 #include <linux/kernel.h>
2926 #include <linux/kthread.h>
3027 #include <linux/module.h>
31
-#include <linux/mutex.h>
3228 #include <linux/of.h>
3329 #include <linux/of_device.h>
3430 #include <linux/platform_device.h>
31
+#include <linux/spinlock.h>
3532
3633 #define SHA_BUFFER_LEN PAGE_SIZE
3734 #define SAHARA_MAX_SHA_BLOCK_SIZE SHA256_BLOCK_SIZE
....@@ -154,6 +151,7 @@
154151
155152 struct sahara_aes_reqctx {
156153 unsigned long mode;
154
+ struct skcipher_request fallback_req; // keep at the end
157155 };
158156
159157 /*
....@@ -197,7 +195,7 @@
197195 void __iomem *regs_base;
198196 struct clk *clk_ipg;
199197 struct clk *clk_ahb;
200
- struct mutex queue_mutex;
198
+ spinlock_t queue_spinlock;
201199 struct task_struct *kthread;
202200 struct completion dma_completion;
203201
....@@ -354,7 +352,7 @@
354352 {
355353 u8 state;
356354
357
- if (!IS_ENABLED(DEBUG))
355
+ if (!__is_defined(DEBUG))
358356 return;
359357
360358 state = SAHARA_STATUS_GET_STATE(status);
....@@ -406,7 +404,7 @@
406404 {
407405 int i;
408406
409
- if (!IS_ENABLED(DEBUG))
407
+ if (!__is_defined(DEBUG))
410408 return;
411409
412410 for (i = 0; i < SAHARA_MAX_HW_DESC; i++) {
....@@ -427,7 +425,7 @@
427425 {
428426 int i;
429427
430
- if (!IS_ENABLED(DEBUG))
428
+ if (!__is_defined(DEBUG))
431429 return;
432430
433431 for (i = 0; i < SAHARA_MAX_HW_LINK; i++) {
....@@ -550,7 +548,7 @@
550548 return -EINVAL;
551549 }
552550
553
-static int sahara_aes_process(struct ablkcipher_request *req)
551
+static int sahara_aes_process(struct skcipher_request *req)
554552 {
555553 struct sahara_dev *dev = dev_ptr;
556554 struct sahara_ctx *ctx;
....@@ -561,20 +559,20 @@
561559 /* Request is ready to be dispatched by the device */
562560 dev_dbg(dev->device,
563561 "dispatch request (nbytes=%d, src=%p, dst=%p)\n",
564
- req->nbytes, req->src, req->dst);
562
+ req->cryptlen, req->src, req->dst);
565563
566564 /* assign new request to device */
567
- dev->total = req->nbytes;
565
+ dev->total = req->cryptlen;
568566 dev->in_sg = req->src;
569567 dev->out_sg = req->dst;
570568
571
- rctx = ablkcipher_request_ctx(req);
572
- ctx = crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req));
569
+ rctx = skcipher_request_ctx(req);
570
+ ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
573571 rctx->mode &= FLAGS_MODE_MASK;
574572 dev->flags = (dev->flags & ~FLAGS_MODE_MASK) | rctx->mode;
575573
576
- if ((dev->flags & FLAGS_CBC) && req->info)
577
- memcpy(dev->iv_base, req->info, AES_KEYSIZE_128);
574
+ if ((dev->flags & FLAGS_CBC) && req->iv)
575
+ memcpy(dev->iv_base, req->iv, AES_KEYSIZE_128);
578576
579577 /* assign new context to device */
580578 dev->ctx = ctx;
....@@ -600,11 +598,10 @@
600598 return 0;
601599 }
602600
603
-static int sahara_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
601
+static int sahara_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
604602 unsigned int keylen)
605603 {
606
- struct sahara_ctx *ctx = crypto_ablkcipher_ctx(tfm);
607
- int ret;
604
+ struct sahara_ctx *ctx = crypto_skcipher_ctx(tfm);
608605
609606 ctx->keylen = keylen;
610607
....@@ -624,25 +621,19 @@
624621 crypto_skcipher_clear_flags(ctx->fallback, CRYPTO_TFM_REQ_MASK);
625622 crypto_skcipher_set_flags(ctx->fallback, tfm->base.crt_flags &
626623 CRYPTO_TFM_REQ_MASK);
627
-
628
- ret = crypto_skcipher_setkey(ctx->fallback, key, keylen);
629
-
630
- tfm->base.crt_flags &= ~CRYPTO_TFM_RES_MASK;
631
- tfm->base.crt_flags |= crypto_skcipher_get_flags(ctx->fallback) &
632
- CRYPTO_TFM_RES_MASK;
633
- return ret;
624
+ return crypto_skcipher_setkey(ctx->fallback, key, keylen);
634625 }
635626
636
-static int sahara_aes_crypt(struct ablkcipher_request *req, unsigned long mode)
627
+static int sahara_aes_crypt(struct skcipher_request *req, unsigned long mode)
637628 {
638
- struct sahara_aes_reqctx *rctx = ablkcipher_request_ctx(req);
629
+ struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req);
639630 struct sahara_dev *dev = dev_ptr;
640631 int err = 0;
641632
642633 dev_dbg(dev->device, "nbytes: %d, enc: %d, cbc: %d\n",
643
- req->nbytes, !!(mode & FLAGS_ENCRYPT), !!(mode & FLAGS_CBC));
634
+ req->cryptlen, !!(mode & FLAGS_ENCRYPT), !!(mode & FLAGS_CBC));
644635
645
- if (!IS_ALIGNED(req->nbytes, AES_BLOCK_SIZE)) {
636
+ if (!IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE)) {
646637 dev_err(dev->device,
647638 "request size is not exact amount of AES blocks\n");
648639 return -EINVAL;
....@@ -650,124 +641,116 @@
650641
651642 rctx->mode = mode;
652643
653
- mutex_lock(&dev->queue_mutex);
654
- err = ablkcipher_enqueue_request(&dev->queue, req);
655
- mutex_unlock(&dev->queue_mutex);
644
+ spin_lock_bh(&dev->queue_spinlock);
645
+ err = crypto_enqueue_request(&dev->queue, &req->base);
646
+ spin_unlock_bh(&dev->queue_spinlock);
656647
657648 wake_up_process(dev->kthread);
658649
659650 return err;
660651 }
661652
662
-static int sahara_aes_ecb_encrypt(struct ablkcipher_request *req)
653
+static int sahara_aes_ecb_encrypt(struct skcipher_request *req)
663654 {
664
- struct sahara_ctx *ctx = crypto_ablkcipher_ctx(
665
- crypto_ablkcipher_reqtfm(req));
666
- int err;
655
+ struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req);
656
+ struct sahara_ctx *ctx = crypto_skcipher_ctx(
657
+ crypto_skcipher_reqtfm(req));
667658
668659 if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
669
- SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
670
-
671
- skcipher_request_set_tfm(subreq, ctx->fallback);
672
- skcipher_request_set_callback(subreq, req->base.flags,
673
- NULL, NULL);
674
- skcipher_request_set_crypt(subreq, req->src, req->dst,
675
- req->nbytes, req->info);
676
- err = crypto_skcipher_encrypt(subreq);
677
- skcipher_request_zero(subreq);
678
- return err;
660
+ skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
661
+ skcipher_request_set_callback(&rctx->fallback_req,
662
+ req->base.flags,
663
+ req->base.complete,
664
+ req->base.data);
665
+ skcipher_request_set_crypt(&rctx->fallback_req, req->src,
666
+ req->dst, req->cryptlen, req->iv);
667
+ return crypto_skcipher_encrypt(&rctx->fallback_req);
679668 }
680669
681670 return sahara_aes_crypt(req, FLAGS_ENCRYPT);
682671 }
683672
684
-static int sahara_aes_ecb_decrypt(struct ablkcipher_request *req)
673
+static int sahara_aes_ecb_decrypt(struct skcipher_request *req)
685674 {
686
- struct sahara_ctx *ctx = crypto_ablkcipher_ctx(
687
- crypto_ablkcipher_reqtfm(req));
688
- int err;
675
+ struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req);
676
+ struct sahara_ctx *ctx = crypto_skcipher_ctx(
677
+ crypto_skcipher_reqtfm(req));
689678
690679 if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
691
- SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
692
-
693
- skcipher_request_set_tfm(subreq, ctx->fallback);
694
- skcipher_request_set_callback(subreq, req->base.flags,
695
- NULL, NULL);
696
- skcipher_request_set_crypt(subreq, req->src, req->dst,
697
- req->nbytes, req->info);
698
- err = crypto_skcipher_decrypt(subreq);
699
- skcipher_request_zero(subreq);
700
- return err;
680
+ skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
681
+ skcipher_request_set_callback(&rctx->fallback_req,
682
+ req->base.flags,
683
+ req->base.complete,
684
+ req->base.data);
685
+ skcipher_request_set_crypt(&rctx->fallback_req, req->src,
686
+ req->dst, req->cryptlen, req->iv);
687
+ return crypto_skcipher_decrypt(&rctx->fallback_req);
701688 }
702689
703690 return sahara_aes_crypt(req, 0);
704691 }
705692
706
-static int sahara_aes_cbc_encrypt(struct ablkcipher_request *req)
693
+static int sahara_aes_cbc_encrypt(struct skcipher_request *req)
707694 {
708
- struct sahara_ctx *ctx = crypto_ablkcipher_ctx(
709
- crypto_ablkcipher_reqtfm(req));
710
- int err;
695
+ struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req);
696
+ struct sahara_ctx *ctx = crypto_skcipher_ctx(
697
+ crypto_skcipher_reqtfm(req));
711698
712699 if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
713
- SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
714
-
715
- skcipher_request_set_tfm(subreq, ctx->fallback);
716
- skcipher_request_set_callback(subreq, req->base.flags,
717
- NULL, NULL);
718
- skcipher_request_set_crypt(subreq, req->src, req->dst,
719
- req->nbytes, req->info);
720
- err = crypto_skcipher_encrypt(subreq);
721
- skcipher_request_zero(subreq);
722
- return err;
700
+ skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
701
+ skcipher_request_set_callback(&rctx->fallback_req,
702
+ req->base.flags,
703
+ req->base.complete,
704
+ req->base.data);
705
+ skcipher_request_set_crypt(&rctx->fallback_req, req->src,
706
+ req->dst, req->cryptlen, req->iv);
707
+ return crypto_skcipher_encrypt(&rctx->fallback_req);
723708 }
724709
725710 return sahara_aes_crypt(req, FLAGS_ENCRYPT | FLAGS_CBC);
726711 }
727712
728
-static int sahara_aes_cbc_decrypt(struct ablkcipher_request *req)
713
+static int sahara_aes_cbc_decrypt(struct skcipher_request *req)
729714 {
730
- struct sahara_ctx *ctx = crypto_ablkcipher_ctx(
731
- crypto_ablkcipher_reqtfm(req));
732
- int err;
715
+ struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req);
716
+ struct sahara_ctx *ctx = crypto_skcipher_ctx(
717
+ crypto_skcipher_reqtfm(req));
733718
734719 if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
735
- SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
736
-
737
- skcipher_request_set_tfm(subreq, ctx->fallback);
738
- skcipher_request_set_callback(subreq, req->base.flags,
739
- NULL, NULL);
740
- skcipher_request_set_crypt(subreq, req->src, req->dst,
741
- req->nbytes, req->info);
742
- err = crypto_skcipher_decrypt(subreq);
743
- skcipher_request_zero(subreq);
744
- return err;
720
+ skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
721
+ skcipher_request_set_callback(&rctx->fallback_req,
722
+ req->base.flags,
723
+ req->base.complete,
724
+ req->base.data);
725
+ skcipher_request_set_crypt(&rctx->fallback_req, req->src,
726
+ req->dst, req->cryptlen, req->iv);
727
+ return crypto_skcipher_decrypt(&rctx->fallback_req);
745728 }
746729
747730 return sahara_aes_crypt(req, FLAGS_CBC);
748731 }
749732
750
-static int sahara_aes_cra_init(struct crypto_tfm *tfm)
733
+static int sahara_aes_init_tfm(struct crypto_skcipher *tfm)
751734 {
752
- const char *name = crypto_tfm_alg_name(tfm);
753
- struct sahara_ctx *ctx = crypto_tfm_ctx(tfm);
735
+ const char *name = crypto_tfm_alg_name(&tfm->base);
736
+ struct sahara_ctx *ctx = crypto_skcipher_ctx(tfm);
754737
755738 ctx->fallback = crypto_alloc_skcipher(name, 0,
756
- CRYPTO_ALG_ASYNC |
757739 CRYPTO_ALG_NEED_FALLBACK);
758740 if (IS_ERR(ctx->fallback)) {
759741 pr_err("Error allocating fallback algo %s\n", name);
760742 return PTR_ERR(ctx->fallback);
761743 }
762744
763
- tfm->crt_ablkcipher.reqsize = sizeof(struct sahara_aes_reqctx);
745
+ crypto_skcipher_set_reqsize(tfm, sizeof(struct sahara_aes_reqctx) +
746
+ crypto_skcipher_reqsize(ctx->fallback));
764747
765748 return 0;
766749 }
767750
768
-static void sahara_aes_cra_exit(struct crypto_tfm *tfm)
751
+static void sahara_aes_exit_tfm(struct crypto_skcipher *tfm)
769752 {
770
- struct sahara_ctx *ctx = crypto_tfm_ctx(tfm);
753
+ struct sahara_ctx *ctx = crypto_skcipher_ctx(tfm);
771754
772755 crypto_free_skcipher(ctx->fallback);
773756 }
....@@ -1059,10 +1042,10 @@
10591042 do {
10601043 __set_current_state(TASK_INTERRUPTIBLE);
10611044
1062
- mutex_lock(&dev->queue_mutex);
1045
+ spin_lock_bh(&dev->queue_spinlock);
10631046 backlog = crypto_get_backlog(&dev->queue);
10641047 async_req = crypto_dequeue_request(&dev->queue);
1065
- mutex_unlock(&dev->queue_mutex);
1048
+ spin_unlock_bh(&dev->queue_spinlock);
10661049
10671050 if (backlog)
10681051 backlog->complete(backlog, -EINPROGRESS);
....@@ -1075,8 +1058,8 @@
10751058
10761059 ret = sahara_sha_process(req);
10771060 } else {
1078
- struct ablkcipher_request *req =
1079
- ablkcipher_request_cast(async_req);
1061
+ struct skcipher_request *req =
1062
+ skcipher_request_cast(async_req);
10801063
10811064 ret = sahara_aes_process(req);
10821065 }
....@@ -1108,9 +1091,9 @@
11081091 rctx->first = 1;
11091092 }
11101093
1111
- mutex_lock(&dev->queue_mutex);
1094
+ spin_lock_bh(&dev->queue_spinlock);
11121095 ret = crypto_enqueue_request(&dev->queue, &req->base);
1113
- mutex_unlock(&dev->queue_mutex);
1096
+ spin_unlock_bh(&dev->queue_spinlock);
11141097
11151098 wake_up_process(dev->kthread);
11161099
....@@ -1193,48 +1176,42 @@
11931176 return 0;
11941177 }
11951178
1196
-static struct crypto_alg aes_algs[] = {
1179
+static struct skcipher_alg aes_algs[] = {
11971180 {
1198
- .cra_name = "ecb(aes)",
1199
- .cra_driver_name = "sahara-ecb-aes",
1200
- .cra_priority = 300,
1201
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1202
- CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
1203
- .cra_blocksize = AES_BLOCK_SIZE,
1204
- .cra_ctxsize = sizeof(struct sahara_ctx),
1205
- .cra_alignmask = 0x0,
1206
- .cra_type = &crypto_ablkcipher_type,
1207
- .cra_module = THIS_MODULE,
1208
- .cra_init = sahara_aes_cra_init,
1209
- .cra_exit = sahara_aes_cra_exit,
1210
- .cra_u.ablkcipher = {
1211
- .min_keysize = AES_MIN_KEY_SIZE ,
1212
- .max_keysize = AES_MAX_KEY_SIZE,
1213
- .setkey = sahara_aes_setkey,
1214
- .encrypt = sahara_aes_ecb_encrypt,
1215
- .decrypt = sahara_aes_ecb_decrypt,
1216
- }
1181
+ .base.cra_name = "ecb(aes)",
1182
+ .base.cra_driver_name = "sahara-ecb-aes",
1183
+ .base.cra_priority = 300,
1184
+ .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
1185
+ .base.cra_blocksize = AES_BLOCK_SIZE,
1186
+ .base.cra_ctxsize = sizeof(struct sahara_ctx),
1187
+ .base.cra_alignmask = 0x0,
1188
+ .base.cra_module = THIS_MODULE,
1189
+
1190
+ .init = sahara_aes_init_tfm,
1191
+ .exit = sahara_aes_exit_tfm,
1192
+ .min_keysize = AES_MIN_KEY_SIZE ,
1193
+ .max_keysize = AES_MAX_KEY_SIZE,
1194
+ .setkey = sahara_aes_setkey,
1195
+ .encrypt = sahara_aes_ecb_encrypt,
1196
+ .decrypt = sahara_aes_ecb_decrypt,
12171197 }, {
1218
- .cra_name = "cbc(aes)",
1219
- .cra_driver_name = "sahara-cbc-aes",
1220
- .cra_priority = 300,
1221
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1222
- CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
1223
- .cra_blocksize = AES_BLOCK_SIZE,
1224
- .cra_ctxsize = sizeof(struct sahara_ctx),
1225
- .cra_alignmask = 0x0,
1226
- .cra_type = &crypto_ablkcipher_type,
1227
- .cra_module = THIS_MODULE,
1228
- .cra_init = sahara_aes_cra_init,
1229
- .cra_exit = sahara_aes_cra_exit,
1230
- .cra_u.ablkcipher = {
1231
- .min_keysize = AES_MIN_KEY_SIZE ,
1232
- .max_keysize = AES_MAX_KEY_SIZE,
1233
- .ivsize = AES_BLOCK_SIZE,
1234
- .setkey = sahara_aes_setkey,
1235
- .encrypt = sahara_aes_cbc_encrypt,
1236
- .decrypt = sahara_aes_cbc_decrypt,
1237
- }
1198
+ .base.cra_name = "cbc(aes)",
1199
+ .base.cra_driver_name = "sahara-cbc-aes",
1200
+ .base.cra_priority = 300,
1201
+ .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
1202
+ .base.cra_blocksize = AES_BLOCK_SIZE,
1203
+ .base.cra_ctxsize = sizeof(struct sahara_ctx),
1204
+ .base.cra_alignmask = 0x0,
1205
+ .base.cra_module = THIS_MODULE,
1206
+
1207
+ .init = sahara_aes_init_tfm,
1208
+ .exit = sahara_aes_exit_tfm,
1209
+ .min_keysize = AES_MIN_KEY_SIZE ,
1210
+ .max_keysize = AES_MAX_KEY_SIZE,
1211
+ .ivsize = AES_BLOCK_SIZE,
1212
+ .setkey = sahara_aes_setkey,
1213
+ .encrypt = sahara_aes_cbc_encrypt,
1214
+ .decrypt = sahara_aes_cbc_decrypt,
12381215 }
12391216 };
12401217
....@@ -1322,8 +1299,7 @@
13221299 unsigned int i, j, k, l;
13231300
13241301 for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
1325
- INIT_LIST_HEAD(&aes_algs[i].cra_list);
1326
- err = crypto_register_alg(&aes_algs[i]);
1302
+ err = crypto_register_skcipher(&aes_algs[i]);
13271303 if (err)
13281304 goto err_aes_algs;
13291305 }
....@@ -1353,7 +1329,7 @@
13531329
13541330 err_aes_algs:
13551331 for (j = 0; j < i; j++)
1356
- crypto_unregister_alg(&aes_algs[j]);
1332
+ crypto_unregister_skcipher(&aes_algs[j]);
13571333
13581334 return err;
13591335 }
....@@ -1363,7 +1339,7 @@
13631339 unsigned int i;
13641340
13651341 for (i = 0; i < ARRAY_SIZE(aes_algs); i++)
1366
- crypto_unregister_alg(&aes_algs[i]);
1342
+ crypto_unregister_skcipher(&aes_algs[i]);
13671343
13681344 for (i = 0; i < ARRAY_SIZE(sha_v3_algs); i++)
13691345 crypto_unregister_ahash(&sha_v3_algs[i]);
....@@ -1389,7 +1365,6 @@
13891365 static int sahara_probe(struct platform_device *pdev)
13901366 {
13911367 struct sahara_dev *dev;
1392
- struct resource *res;
13931368 u32 version;
13941369 int irq;
13951370 int err;
....@@ -1403,17 +1378,14 @@
14031378 platform_set_drvdata(pdev, dev);
14041379
14051380 /* Get the base address */
1406
- res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1407
- dev->regs_base = devm_ioremap_resource(&pdev->dev, res);
1381
+ dev->regs_base = devm_platform_ioremap_resource(pdev, 0);
14081382 if (IS_ERR(dev->regs_base))
14091383 return PTR_ERR(dev->regs_base);
14101384
14111385 /* Get the IRQ */
14121386 irq = platform_get_irq(pdev, 0);
1413
- if (irq < 0) {
1414
- dev_err(&pdev->dev, "failed to get irq resource\n");
1387
+ if (irq < 0)
14151388 return irq;
1416
- }
14171389
14181390 err = devm_request_irq(&pdev->dev, irq, sahara_irq_handler,
14191391 0, dev_name(&pdev->dev), dev);
....@@ -1482,7 +1454,7 @@
14821454
14831455 crypto_init_queue(&dev->queue, SAHARA_QUEUE_LENGTH);
14841456
1485
- mutex_init(&dev->queue_mutex);
1457
+ spin_lock_init(&dev->queue_spinlock);
14861458
14871459 dev_ptr = dev;
14881460