hc
2024-05-10 37f49e37ab4cb5d0bc4c60eb5c6d4dd57db767bb
kernel/drivers/crypto/stm32/stm32-cryp.c
....@@ -1,7 +1,7 @@
1
+// SPDX-License-Identifier: GPL-2.0-only
12 /*
23 * Copyright (C) STMicroelectronics SA 2017
34 * Author: Fabien Dessenne <fabien.dessenne@st.com>
4
- * License terms: GNU General Public License (GPL), version 2
55 */
66
77 #include <linux/clk.h>
....@@ -15,10 +15,11 @@
1515 #include <linux/reset.h>
1616
1717 #include <crypto/aes.h>
18
-#include <crypto/des.h>
18
+#include <crypto/internal/des.h>
1919 #include <crypto/engine.h>
2020 #include <crypto/scatterwalk.h>
2121 #include <crypto/internal/aead.h>
22
+#include <crypto/internal/skcipher.h>
2223
2324 #define DRIVER_NAME "stm32-cryp"
2425
....@@ -36,7 +37,6 @@
3637 /* Mode mask = bits [15..0] */
3738 #define FLG_MODE_MASK GENMASK(15, 0)
3839 /* Bit [31..16] status */
39
-#define FLG_CCM_PADDED_WA BIT(16)
4040
4141 /* Registers */
4242 #define CRYP_CR 0x00000000
....@@ -104,8 +104,6 @@
104104 /* Misc */
105105 #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32))
106106 #define GCM_CTR_INIT 2
107
-#define _walked_in (cryp->in_walk.offset - cryp->in_sg->offset)
108
-#define _walked_out (cryp->out_walk.offset - cryp->out_sg->offset)
109107 #define CRYP_AUTOSUSPEND_DELAY 50
110108
111109 struct stm32_cryp_caps {
....@@ -117,7 +115,7 @@
117115 struct crypto_engine_ctx enginectx;
118116 struct stm32_cryp *cryp;
119117 int keylen;
120
- u32 key[AES_KEYSIZE_256 / sizeof(u32)];
118
+ __be32 key[AES_KEYSIZE_256 / sizeof(u32)];
121119 unsigned long flags;
122120 };
123121
....@@ -137,33 +135,22 @@
137135
138136 struct crypto_engine *engine;
139137
140
- struct mutex lock; /* protects req / areq */
141
- struct ablkcipher_request *req;
138
+ struct skcipher_request *req;
142139 struct aead_request *areq;
143140
144141 size_t authsize;
145142 size_t hw_blocksize;
146143
147
- size_t total_in;
148
- size_t total_in_save;
149
- size_t total_out;
150
- size_t total_out_save;
144
+ size_t payload_in;
145
+ size_t header_in;
146
+ size_t payload_out;
151147
152
- struct scatterlist *in_sg;
153148 struct scatterlist *out_sg;
154
- struct scatterlist *out_sg_save;
155
-
156
- struct scatterlist in_sgl;
157
- struct scatterlist out_sgl;
158
- bool sgs_copied;
159
-
160
- int in_sg_len;
161
- int out_sg_len;
162149
163150 struct scatter_walk in_walk;
164151 struct scatter_walk out_walk;
165152
166
- u32 last_ctr[4];
153
+ __be32 last_ctr[4];
167154 u32 gcm_ctr;
168155 };
169156
....@@ -262,6 +249,7 @@
262249 }
263250
264251 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
252
+static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err);
265253
266254 static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
267255 {
....@@ -283,114 +271,34 @@
283271 return cryp;
284272 }
285273
286
-static int stm32_cryp_check_aligned(struct scatterlist *sg, size_t total,
287
- size_t align)
288
-{
289
- int len = 0;
290
-
291
- if (!total)
292
- return 0;
293
-
294
- if (!IS_ALIGNED(total, align))
295
- return -EINVAL;
296
-
297
- while (sg) {
298
- if (!IS_ALIGNED(sg->offset, sizeof(u32)))
299
- return -EINVAL;
300
-
301
- if (!IS_ALIGNED(sg->length, align))
302
- return -EINVAL;
303
-
304
- len += sg->length;
305
- sg = sg_next(sg);
306
- }
307
-
308
- if (len != total)
309
- return -EINVAL;
310
-
311
- return 0;
312
-}
313
-
314
-static int stm32_cryp_check_io_aligned(struct stm32_cryp *cryp)
315
-{
316
- int ret;
317
-
318
- ret = stm32_cryp_check_aligned(cryp->in_sg, cryp->total_in,
319
- cryp->hw_blocksize);
320
- if (ret)
321
- return ret;
322
-
323
- ret = stm32_cryp_check_aligned(cryp->out_sg, cryp->total_out,
324
- cryp->hw_blocksize);
325
-
326
- return ret;
327
-}
328
-
329
-static void sg_copy_buf(void *buf, struct scatterlist *sg,
330
- unsigned int start, unsigned int nbytes, int out)
331
-{
332
- struct scatter_walk walk;
333
-
334
- if (!nbytes)
335
- return;
336
-
337
- scatterwalk_start(&walk, sg);
338
- scatterwalk_advance(&walk, start);
339
- scatterwalk_copychunks(buf, &walk, nbytes, out);
340
- scatterwalk_done(&walk, out, 0);
341
-}
342
-
343
-static int stm32_cryp_copy_sgs(struct stm32_cryp *cryp)
344
-{
345
- void *buf_in, *buf_out;
346
- int pages, total_in, total_out;
347
-
348
- if (!stm32_cryp_check_io_aligned(cryp)) {
349
- cryp->sgs_copied = 0;
350
- return 0;
351
- }
352
-
353
- total_in = ALIGN(cryp->total_in, cryp->hw_blocksize);
354
- pages = total_in ? get_order(total_in) : 1;
355
- buf_in = (void *)__get_free_pages(GFP_ATOMIC, pages);
356
-
357
- total_out = ALIGN(cryp->total_out, cryp->hw_blocksize);
358
- pages = total_out ? get_order(total_out) : 1;
359
- buf_out = (void *)__get_free_pages(GFP_ATOMIC, pages);
360
-
361
- if (!buf_in || !buf_out) {
362
- dev_err(cryp->dev, "Can't allocate pages when unaligned\n");
363
- cryp->sgs_copied = 0;
364
- return -EFAULT;
365
- }
366
-
367
- sg_copy_buf(buf_in, cryp->in_sg, 0, cryp->total_in, 0);
368
-
369
- sg_init_one(&cryp->in_sgl, buf_in, total_in);
370
- cryp->in_sg = &cryp->in_sgl;
371
- cryp->in_sg_len = 1;
372
-
373
- sg_init_one(&cryp->out_sgl, buf_out, total_out);
374
- cryp->out_sg_save = cryp->out_sg;
375
- cryp->out_sg = &cryp->out_sgl;
376
- cryp->out_sg_len = 1;
377
-
378
- cryp->sgs_copied = 1;
379
-
380
- return 0;
381
-}
382
-
383
-static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, u32 *iv)
274
+static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv)
384275 {
385276 if (!iv)
386277 return;
387278
388
- stm32_cryp_write(cryp, CRYP_IV0LR, cpu_to_be32(*iv++));
389
- stm32_cryp_write(cryp, CRYP_IV0RR, cpu_to_be32(*iv++));
279
+ stm32_cryp_write(cryp, CRYP_IV0LR, be32_to_cpu(*iv++));
280
+ stm32_cryp_write(cryp, CRYP_IV0RR, be32_to_cpu(*iv++));
390281
391282 if (is_aes(cryp)) {
392
- stm32_cryp_write(cryp, CRYP_IV1LR, cpu_to_be32(*iv++));
393
- stm32_cryp_write(cryp, CRYP_IV1RR, cpu_to_be32(*iv++));
283
+ stm32_cryp_write(cryp, CRYP_IV1LR, be32_to_cpu(*iv++));
284
+ stm32_cryp_write(cryp, CRYP_IV1RR, be32_to_cpu(*iv++));
285
+ }
286
+}
287
+
288
+static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
289
+{
290
+ struct skcipher_request *req = cryp->req;
291
+ __be32 *tmp = (void *)req->iv;
292
+
293
+ if (!tmp)
294
+ return;
295
+
296
+ *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
297
+ *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
298
+
299
+ if (is_aes(cryp)) {
300
+ *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
301
+ *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
394302 }
395303 }
396304
....@@ -400,13 +308,13 @@
400308 int r_id;
401309
402310 if (is_des(c)) {
403
- stm32_cryp_write(c, CRYP_K1LR, cpu_to_be32(c->ctx->key[0]));
404
- stm32_cryp_write(c, CRYP_K1RR, cpu_to_be32(c->ctx->key[1]));
311
+ stm32_cryp_write(c, CRYP_K1LR, be32_to_cpu(c->ctx->key[0]));
312
+ stm32_cryp_write(c, CRYP_K1RR, be32_to_cpu(c->ctx->key[1]));
405313 } else {
406314 r_id = CRYP_K3RR;
407315 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
408316 stm32_cryp_write(c, r_id,
409
- cpu_to_be32(c->ctx->key[i - 1]));
317
+ be32_to_cpu(c->ctx->key[i - 1]));
410318 }
411319 }
412320
....@@ -452,7 +360,7 @@
452360 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
453361 {
454362 int ret;
455
- u32 iv[4];
363
+ __be32 iv[4];
456364
457365 /* Phase 1 : init */
458366 memcpy(iv, cryp->areq->iv, 12);
....@@ -464,16 +372,100 @@
464372
465373 /* Wait for end of processing */
466374 ret = stm32_cryp_wait_enable(cryp);
467
- if (ret)
375
+ if (ret) {
468376 dev_err(cryp->dev, "Timeout (gcm init)\n");
377
+ return ret;
378
+ }
469379
470
- return ret;
380
+ /* Prepare next phase */
381
+ if (cryp->areq->assoclen) {
382
+ cfg |= CR_PH_HEADER;
383
+ stm32_cryp_write(cryp, CRYP_CR, cfg);
384
+ } else if (stm32_cryp_get_input_text_len(cryp)) {
385
+ cfg |= CR_PH_PAYLOAD;
386
+ stm32_cryp_write(cryp, CRYP_CR, cfg);
387
+ }
388
+
389
+ return 0;
390
+}
391
+
392
+static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp)
393
+{
394
+ u32 cfg;
395
+ int err;
396
+
397
+ /* Check if whole header written */
398
+ if (!cryp->header_in) {
399
+ /* Wait for completion */
400
+ err = stm32_cryp_wait_busy(cryp);
401
+ if (err) {
402
+ dev_err(cryp->dev, "Timeout (gcm/ccm header)\n");
403
+ stm32_cryp_write(cryp, CRYP_IMSCR, 0);
404
+ stm32_cryp_finish_req(cryp, err);
405
+ return;
406
+ }
407
+
408
+ if (stm32_cryp_get_input_text_len(cryp)) {
409
+ /* Phase 3 : payload */
410
+ cfg = stm32_cryp_read(cryp, CRYP_CR);
411
+ cfg &= ~CR_CRYPEN;
412
+ stm32_cryp_write(cryp, CRYP_CR, cfg);
413
+
414
+ cfg &= ~CR_PH_MASK;
415
+ cfg |= CR_PH_PAYLOAD | CR_CRYPEN;
416
+ stm32_cryp_write(cryp, CRYP_CR, cfg);
417
+ } else {
418
+ /*
419
+ * Phase 4 : tag.
420
+ * Nothing to read, nothing to write, caller have to
421
+ * end request
422
+ */
423
+ }
424
+ }
425
+}
426
+
427
+static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp)
428
+{
429
+ unsigned int i;
430
+ size_t written;
431
+ size_t len;
432
+ u32 alen = cryp->areq->assoclen;
433
+ u32 block[AES_BLOCK_32] = {0};
434
+ u8 *b8 = (u8 *)block;
435
+
436
+ if (alen <= 65280) {
437
+ /* Write first u32 of B1 */
438
+ b8[0] = (alen >> 8) & 0xFF;
439
+ b8[1] = alen & 0xFF;
440
+ len = 2;
441
+ } else {
442
+ /* Build the two first u32 of B1 */
443
+ b8[0] = 0xFF;
444
+ b8[1] = 0xFE;
445
+ b8[2] = (alen & 0xFF000000) >> 24;
446
+ b8[3] = (alen & 0x00FF0000) >> 16;
447
+ b8[4] = (alen & 0x0000FF00) >> 8;
448
+ b8[5] = alen & 0x000000FF;
449
+ len = 6;
450
+ }
451
+
452
+ written = min_t(size_t, AES_BLOCK_SIZE - len, alen);
453
+
454
+ scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0);
455
+ for (i = 0; i < AES_BLOCK_32; i++)
456
+ stm32_cryp_write(cryp, CRYP_DIN, block[i]);
457
+
458
+ cryp->header_in -= written;
459
+
460
+ stm32_crypt_gcmccm_end_header(cryp);
471461 }
472462
473463 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
474464 {
475465 int ret;
476
- u8 iv[AES_BLOCK_SIZE], b0[AES_BLOCK_SIZE];
466
+ u32 iv_32[AES_BLOCK_32], b0_32[AES_BLOCK_32];
467
+ u8 *iv = (u8 *)iv_32, *b0 = (u8 *)b0_32;
468
+ __be32 *bd;
477469 u32 *d;
478470 unsigned int i, textlen;
479471
....@@ -481,7 +473,7 @@
481473 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
482474 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
483475 iv[AES_BLOCK_SIZE - 1] = 1;
484
- stm32_cryp_hw_write_iv(cryp, (u32 *)iv);
476
+ stm32_cryp_hw_write_iv(cryp, (__be32 *)iv);
485477
486478 /* Build B0 */
487479 memcpy(b0, iv, AES_BLOCK_SIZE);
....@@ -501,19 +493,36 @@
501493
502494 /* Write B0 */
503495 d = (u32 *)b0;
496
+ bd = (__be32 *)b0;
504497
505498 for (i = 0; i < AES_BLOCK_32; i++) {
499
+ u32 xd = d[i];
500
+
506501 if (!cryp->caps->padding_wa)
507
- *d = cpu_to_be32(*d);
508
- stm32_cryp_write(cryp, CRYP_DIN, *d++);
502
+ xd = be32_to_cpu(bd[i]);
503
+ stm32_cryp_write(cryp, CRYP_DIN, xd);
509504 }
510505
511506 /* Wait for end of processing */
512507 ret = stm32_cryp_wait_enable(cryp);
513
- if (ret)
508
+ if (ret) {
514509 dev_err(cryp->dev, "Timeout (ccm init)\n");
510
+ return ret;
511
+ }
515512
516
- return ret;
513
+ /* Prepare next phase */
514
+ if (cryp->areq->assoclen) {
515
+ cfg |= CR_PH_HEADER | CR_CRYPEN;
516
+ stm32_cryp_write(cryp, CRYP_CR, cfg);
517
+
518
+ /* Write first (special) block (may move to next phase [payload]) */
519
+ stm32_cryp_write_ccm_first_header(cryp);
520
+ } else if (stm32_cryp_get_input_text_len(cryp)) {
521
+ cfg |= CR_PH_PAYLOAD;
522
+ stm32_cryp_write(cryp, CRYP_CR, cfg);
523
+ }
524
+
525
+ return 0;
517526 }
518527
519528 static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
....@@ -584,23 +593,13 @@
584593 if (ret)
585594 return ret;
586595
587
- /* Phase 2 : header (authenticated data) */
588
- if (cryp->areq->assoclen) {
589
- cfg |= CR_PH_HEADER;
590
- } else if (stm32_cryp_get_input_text_len(cryp)) {
591
- cfg |= CR_PH_PAYLOAD;
592
- stm32_cryp_write(cryp, CRYP_CR, cfg);
593
- } else {
594
- cfg |= CR_PH_INIT;
595
- }
596
-
597596 break;
598597
599598 case CR_DES_CBC:
600599 case CR_TDES_CBC:
601600 case CR_AES_CBC:
602601 case CR_AES_CTR:
603
- stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->req->info);
602
+ stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv);
604603 break;
605604
606605 default:
....@@ -612,8 +611,6 @@
612611
613612 stm32_cryp_write(cryp, CRYP_CR, cfg);
614613
615
- cryp->flags &= ~FLG_CCM_PADDED_WA;
616
-
617614 return 0;
618615 }
619616
....@@ -623,40 +620,17 @@
623620 /* Phase 4 : output tag */
624621 err = stm32_cryp_read_auth_tag(cryp);
625622
626
- if (cryp->sgs_copied) {
627
- void *buf_in, *buf_out;
628
- int pages, len;
629
-
630
- buf_in = sg_virt(&cryp->in_sgl);
631
- buf_out = sg_virt(&cryp->out_sgl);
632
-
633
- sg_copy_buf(buf_out, cryp->out_sg_save, 0,
634
- cryp->total_out_save, 1);
635
-
636
- len = ALIGN(cryp->total_in_save, cryp->hw_blocksize);
637
- pages = len ? get_order(len) : 1;
638
- free_pages((unsigned long)buf_in, pages);
639
-
640
- len = ALIGN(cryp->total_out_save, cryp->hw_blocksize);
641
- pages = len ? get_order(len) : 1;
642
- free_pages((unsigned long)buf_out, pages);
643
- }
623
+ if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp))))
624
+ stm32_cryp_get_iv(cryp);
644625
645626 pm_runtime_mark_last_busy(cryp->dev);
646627 pm_runtime_put_autosuspend(cryp->dev);
647628
648
- if (is_gcm(cryp) || is_ccm(cryp)) {
629
+ if (is_gcm(cryp) || is_ccm(cryp))
649630 crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
650
- cryp->areq = NULL;
651
- } else {
652
- crypto_finalize_ablkcipher_request(cryp->engine, cryp->req,
631
+ else
632
+ crypto_finalize_skcipher_request(cryp->engine, cryp->req,
653633 err);
654
- cryp->req = NULL;
655
- }
656
-
657
- memset(cryp->ctx->key, 0, cryp->ctx->keylen);
658
-
659
- mutex_unlock(&cryp->lock);
660634 }
661635
662636 static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
....@@ -671,11 +645,11 @@
671645 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
672646 void *areq);
673647
674
-static int stm32_cryp_cra_init(struct crypto_tfm *tfm)
648
+static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm)
675649 {
676
- struct stm32_cryp_ctx *ctx = crypto_tfm_ctx(tfm);
650
+ struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
677651
678
- tfm->crt_ablkcipher.reqsize = sizeof(struct stm32_cryp_reqctx);
652
+ crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
679653
680654 ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req;
681655 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req;
....@@ -700,11 +674,11 @@
700674 return 0;
701675 }
702676
703
-static int stm32_cryp_crypt(struct ablkcipher_request *req, unsigned long mode)
677
+static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode)
704678 {
705
- struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(
706
- crypto_ablkcipher_reqtfm(req));
707
- struct stm32_cryp_reqctx *rctx = ablkcipher_request_ctx(req);
679
+ struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
680
+ crypto_skcipher_reqtfm(req));
681
+ struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req);
708682 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
709683
710684 if (!cryp)
....@@ -712,7 +686,7 @@
712686
713687 rctx->mode = mode;
714688
715
- return crypto_transfer_ablkcipher_request_to_engine(cryp->engine, req);
689
+ return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
716690 }
717691
718692 static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
....@@ -729,10 +703,10 @@
729703 return crypto_transfer_aead_request_to_engine(cryp->engine, req);
730704 }
731705
732
-static int stm32_cryp_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
706
+static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key,
733707 unsigned int keylen)
734708 {
735
- struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
709
+ struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
736710
737711 memcpy(ctx->key, key, keylen);
738712 ctx->keylen = keylen;
....@@ -740,7 +714,7 @@
740714 return 0;
741715 }
742716
743
-static int stm32_cryp_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
717
+static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
744718 unsigned int keylen)
745719 {
746720 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
....@@ -750,22 +724,18 @@
750724 return stm32_cryp_setkey(tfm, key, keylen);
751725 }
752726
753
-static int stm32_cryp_des_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
727
+static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key,
754728 unsigned int keylen)
755729 {
756
- if (keylen != DES_KEY_SIZE)
757
- return -EINVAL;
758
- else
759
- return stm32_cryp_setkey(tfm, key, keylen);
730
+ return verify_skcipher_des_key(tfm, key) ?:
731
+ stm32_cryp_setkey(tfm, key, keylen);
760732 }
761733
762
-static int stm32_cryp_tdes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
734
+static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key,
763735 unsigned int keylen)
764736 {
765
- if (keylen != (3 * DES_KEY_SIZE))
766
- return -EINVAL;
767
- else
768
- return stm32_cryp_setkey(tfm, key, keylen);
737
+ return verify_skcipher_des3_key(tfm, key) ?:
738
+ stm32_cryp_setkey(tfm, key, keylen);
769739 }
770740
771741 static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
....@@ -786,7 +756,20 @@
786756 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
787757 unsigned int authsize)
788758 {
789
- return authsize == AES_BLOCK_SIZE ? 0 : -EINVAL;
759
+ switch (authsize) {
760
+ case 4:
761
+ case 8:
762
+ case 12:
763
+ case 13:
764
+ case 14:
765
+ case 15:
766
+ case 16:
767
+ break;
768
+ default:
769
+ return -EINVAL;
770
+ }
771
+
772
+ return 0;
790773 }
791774
792775 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
....@@ -808,33 +791,63 @@
808791 return 0;
809792 }
810793
811
-static int stm32_cryp_aes_ecb_encrypt(struct ablkcipher_request *req)
794
+static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req)
812795 {
796
+ if (req->cryptlen % AES_BLOCK_SIZE)
797
+ return -EINVAL;
798
+
799
+ if (req->cryptlen == 0)
800
+ return 0;
801
+
813802 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
814803 }
815804
816
-static int stm32_cryp_aes_ecb_decrypt(struct ablkcipher_request *req)
805
+static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req)
817806 {
807
+ if (req->cryptlen % AES_BLOCK_SIZE)
808
+ return -EINVAL;
809
+
810
+ if (req->cryptlen == 0)
811
+ return 0;
812
+
818813 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
819814 }
820815
821
-static int stm32_cryp_aes_cbc_encrypt(struct ablkcipher_request *req)
816
+static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req)
822817 {
818
+ if (req->cryptlen % AES_BLOCK_SIZE)
819
+ return -EINVAL;
820
+
821
+ if (req->cryptlen == 0)
822
+ return 0;
823
+
823824 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
824825 }
825826
826
-static int stm32_cryp_aes_cbc_decrypt(struct ablkcipher_request *req)
827
+static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req)
827828 {
829
+ if (req->cryptlen % AES_BLOCK_SIZE)
830
+ return -EINVAL;
831
+
832
+ if (req->cryptlen == 0)
833
+ return 0;
834
+
828835 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
829836 }
830837
831
-static int stm32_cryp_aes_ctr_encrypt(struct ablkcipher_request *req)
838
+static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req)
832839 {
840
+ if (req->cryptlen == 0)
841
+ return 0;
842
+
833843 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
834844 }
835845
836
-static int stm32_cryp_aes_ctr_decrypt(struct ablkcipher_request *req)
846
+static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req)
837847 {
848
+ if (req->cryptlen == 0)
849
+ return 0;
850
+
838851 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
839852 }
840853
....@@ -848,68 +861,138 @@
848861 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
849862 }
850863
864
+static inline int crypto_ccm_check_iv(const u8 *iv)
865
+{
866
+ /* 2 <= L <= 8, so 1 <= L' <= 7. */
867
+ if (iv[0] < 1 || iv[0] > 7)
868
+ return -EINVAL;
869
+
870
+ return 0;
871
+}
872
+
851873 static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
852874 {
875
+ int err;
876
+
877
+ err = crypto_ccm_check_iv(req->iv);
878
+ if (err)
879
+ return err;
880
+
853881 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
854882 }
855883
856884 static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
857885 {
886
+ int err;
887
+
888
+ err = crypto_ccm_check_iv(req->iv);
889
+ if (err)
890
+ return err;
891
+
858892 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
859893 }
860894
861
-static int stm32_cryp_des_ecb_encrypt(struct ablkcipher_request *req)
895
+static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req)
862896 {
897
+ if (req->cryptlen % DES_BLOCK_SIZE)
898
+ return -EINVAL;
899
+
900
+ if (req->cryptlen == 0)
901
+ return 0;
902
+
863903 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
864904 }
865905
866
-static int stm32_cryp_des_ecb_decrypt(struct ablkcipher_request *req)
906
+static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req)
867907 {
908
+ if (req->cryptlen % DES_BLOCK_SIZE)
909
+ return -EINVAL;
910
+
911
+ if (req->cryptlen == 0)
912
+ return 0;
913
+
868914 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
869915 }
870916
871
-static int stm32_cryp_des_cbc_encrypt(struct ablkcipher_request *req)
917
+static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req)
872918 {
919
+ if (req->cryptlen % DES_BLOCK_SIZE)
920
+ return -EINVAL;
921
+
922
+ if (req->cryptlen == 0)
923
+ return 0;
924
+
873925 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
874926 }
875927
876
-static int stm32_cryp_des_cbc_decrypt(struct ablkcipher_request *req)
928
+static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req)
877929 {
930
+ if (req->cryptlen % DES_BLOCK_SIZE)
931
+ return -EINVAL;
932
+
933
+ if (req->cryptlen == 0)
934
+ return 0;
935
+
878936 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
879937 }
880938
881
-static int stm32_cryp_tdes_ecb_encrypt(struct ablkcipher_request *req)
939
+static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req)
882940 {
941
+ if (req->cryptlen % DES_BLOCK_SIZE)
942
+ return -EINVAL;
943
+
944
+ if (req->cryptlen == 0)
945
+ return 0;
946
+
883947 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
884948 }
885949
886
-static int stm32_cryp_tdes_ecb_decrypt(struct ablkcipher_request *req)
950
+static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req)
887951 {
952
+ if (req->cryptlen % DES_BLOCK_SIZE)
953
+ return -EINVAL;
954
+
955
+ if (req->cryptlen == 0)
956
+ return 0;
957
+
888958 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
889959 }
890960
891
-static int stm32_cryp_tdes_cbc_encrypt(struct ablkcipher_request *req)
961
+static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req)
892962 {
963
+ if (req->cryptlen % DES_BLOCK_SIZE)
964
+ return -EINVAL;
965
+
966
+ if (req->cryptlen == 0)
967
+ return 0;
968
+
893969 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
894970 }
895971
896
-static int stm32_cryp_tdes_cbc_decrypt(struct ablkcipher_request *req)
972
+static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req)
897973 {
974
+ if (req->cryptlen % DES_BLOCK_SIZE)
975
+ return -EINVAL;
976
+
977
+ if (req->cryptlen == 0)
978
+ return 0;
979
+
898980 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
899981 }
900982
901
-static int stm32_cryp_prepare_req(struct ablkcipher_request *req,
983
+static int stm32_cryp_prepare_req(struct skcipher_request *req,
902984 struct aead_request *areq)
903985 {
904986 struct stm32_cryp_ctx *ctx;
905987 struct stm32_cryp *cryp;
906988 struct stm32_cryp_reqctx *rctx;
989
+ struct scatterlist *in_sg;
907990 int ret;
908991
909992 if (!req && !areq)
910993 return -EINVAL;
911994
912
- ctx = req ? crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req)) :
995
+ ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
913996 crypto_aead_ctx(crypto_aead_reqtfm(areq));
914997
915998 cryp = ctx->cryp;
....@@ -917,9 +1000,7 @@
9171000 if (!cryp)
9181001 return -ENODEV;
9191002
920
- mutex_lock(&cryp->lock);
921
-
922
- rctx = req ? ablkcipher_request_ctx(req) : aead_request_ctx(areq);
1003
+ rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
9231004 rctx->mode &= FLG_MODE_MASK;
9241005
9251006 ctx->cryp = cryp;
....@@ -930,88 +1011,65 @@
9301011
9311012 if (req) {
9321013 cryp->req = req;
933
- cryp->total_in = req->nbytes;
934
- cryp->total_out = cryp->total_in;
1014
+ cryp->areq = NULL;
1015
+ cryp->header_in = 0;
1016
+ cryp->payload_in = req->cryptlen;
1017
+ cryp->payload_out = req->cryptlen;
1018
+ cryp->authsize = 0;
9351019 } else {
9361020 /*
9371021 * Length of input and output data:
9381022 * Encryption case:
939
- * INPUT = AssocData || PlainText
1023
+ * INPUT = AssocData || PlainText
9401024 * <- assoclen -> <- cryptlen ->
941
- * <------- total_in ----------->
9421025 *
943
- * OUTPUT = AssocData || CipherText || AuthTag
944
- * <- assoclen -> <- cryptlen -> <- authsize ->
945
- * <---------------- total_out ----------------->
1026
+ * OUTPUT = AssocData || CipherText || AuthTag
1027
+ * <- assoclen -> <-- cryptlen --> <- authsize ->
9461028 *
9471029 * Decryption case:
948
- * INPUT = AssocData || CipherText || AuthTag
949
- * <- assoclen -> <--------- cryptlen --------->
950
- * <- authsize ->
951
- * <---------------- total_in ------------------>
1030
+ * INPUT = AssocData || CipherTex || AuthTag
1031
+ * <- assoclen ---> <---------- cryptlen ---------->
9521032 *
953
- * OUTPUT = AssocData || PlainText
954
- * <- assoclen -> <- crypten - authsize ->
955
- * <---------- total_out ----------------->
1033
+ * OUTPUT = AssocData || PlainText
1034
+ * <- assoclen -> <- cryptlen - authsize ->
9561035 */
9571036 cryp->areq = areq;
1037
+ cryp->req = NULL;
9581038 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
959
- cryp->total_in = areq->assoclen + areq->cryptlen;
960
- if (is_encrypt(cryp))
961
- /* Append auth tag to output */
962
- cryp->total_out = cryp->total_in + cryp->authsize;
963
- else
964
- /* No auth tag in output */
965
- cryp->total_out = cryp->total_in - cryp->authsize;
1039
+ if (is_encrypt(cryp)) {
1040
+ cryp->payload_in = areq->cryptlen;
1041
+ cryp->header_in = areq->assoclen;
1042
+ cryp->payload_out = areq->cryptlen;
1043
+ } else {
1044
+ cryp->payload_in = areq->cryptlen - cryp->authsize;
1045
+ cryp->header_in = areq->assoclen;
1046
+ cryp->payload_out = cryp->payload_in;
1047
+ }
9661048 }
9671049
968
- cryp->total_in_save = cryp->total_in;
969
- cryp->total_out_save = cryp->total_out;
1050
+ in_sg = req ? req->src : areq->src;
1051
+ scatterwalk_start(&cryp->in_walk, in_sg);
9701052
971
- cryp->in_sg = req ? req->src : areq->src;
9721053 cryp->out_sg = req ? req->dst : areq->dst;
973
- cryp->out_sg_save = cryp->out_sg;
974
-
975
- cryp->in_sg_len = sg_nents_for_len(cryp->in_sg, cryp->total_in);
976
- if (cryp->in_sg_len < 0) {
977
- dev_err(cryp->dev, "Cannot get in_sg_len\n");
978
- ret = cryp->in_sg_len;
979
- goto out;
980
- }
981
-
982
- cryp->out_sg_len = sg_nents_for_len(cryp->out_sg, cryp->total_out);
983
- if (cryp->out_sg_len < 0) {
984
- dev_err(cryp->dev, "Cannot get out_sg_len\n");
985
- ret = cryp->out_sg_len;
986
- goto out;
987
- }
988
-
989
- ret = stm32_cryp_copy_sgs(cryp);
990
- if (ret)
991
- goto out;
992
-
993
- scatterwalk_start(&cryp->in_walk, cryp->in_sg);
9941054 scatterwalk_start(&cryp->out_walk, cryp->out_sg);
9951055
9961056 if (is_gcm(cryp) || is_ccm(cryp)) {
9971057 /* In output, jump after assoc data */
998
- scatterwalk_advance(&cryp->out_walk, cryp->areq->assoclen);
999
- cryp->total_out -= cryp->areq->assoclen;
1058
+ scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2);
10001059 }
10011060
1002
- ret = stm32_cryp_hw_init(cryp);
1003
-out:
1004
- if (ret)
1005
- mutex_unlock(&cryp->lock);
1061
+ if (is_ctr(cryp))
1062
+ memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr));
10061063
1064
+ ret = stm32_cryp_hw_init(cryp);
10071065 return ret;
10081066 }
10091067
10101068 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
10111069 void *areq)
10121070 {
1013
- struct ablkcipher_request *req = container_of(areq,
1014
- struct ablkcipher_request,
1071
+ struct skcipher_request *req = container_of(areq,
1072
+ struct skcipher_request,
10151073 base);
10161074
10171075 return stm32_cryp_prepare_req(req, NULL);
....@@ -1019,11 +1077,11 @@
10191077
10201078 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
10211079 {
1022
- struct ablkcipher_request *req = container_of(areq,
1023
- struct ablkcipher_request,
1080
+ struct skcipher_request *req = container_of(areq,
1081
+ struct skcipher_request,
10241082 base);
1025
- struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(
1026
- crypto_ablkcipher_reqtfm(req));
1083
+ struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
1084
+ crypto_skcipher_reqtfm(req));
10271085 struct stm32_cryp *cryp = ctx->cryp;
10281086
10291087 if (!cryp)
....@@ -1050,8 +1108,7 @@
10501108 if (!cryp)
10511109 return -ENODEV;
10521110
1053
- if (unlikely(!cryp->areq->assoclen &&
1054
- !stm32_cryp_get_input_text_len(cryp))) {
1111
+ if (unlikely(!cryp->payload_in && !cryp->header_in)) {
10551112 /* No input data to process: get tag and finish */
10561113 stm32_cryp_finish_req(cryp, 0);
10571114 return 0;
....@@ -1060,43 +1117,10 @@
10601117 return stm32_cryp_cpu_start(cryp);
10611118 }
10621119
1063
-static u32 *stm32_cryp_next_out(struct stm32_cryp *cryp, u32 *dst,
1064
- unsigned int n)
1065
-{
1066
- scatterwalk_advance(&cryp->out_walk, n);
1067
-
1068
- if (unlikely(cryp->out_sg->length == _walked_out)) {
1069
- cryp->out_sg = sg_next(cryp->out_sg);
1070
- if (cryp->out_sg) {
1071
- scatterwalk_start(&cryp->out_walk, cryp->out_sg);
1072
- return (sg_virt(cryp->out_sg) + _walked_out);
1073
- }
1074
- }
1075
-
1076
- return (u32 *)((u8 *)dst + n);
1077
-}
1078
-
1079
-static u32 *stm32_cryp_next_in(struct stm32_cryp *cryp, u32 *src,
1080
- unsigned int n)
1081
-{
1082
- scatterwalk_advance(&cryp->in_walk, n);
1083
-
1084
- if (unlikely(cryp->in_sg->length == _walked_in)) {
1085
- cryp->in_sg = sg_next(cryp->in_sg);
1086
- if (cryp->in_sg) {
1087
- scatterwalk_start(&cryp->in_walk, cryp->in_sg);
1088
- return (sg_virt(cryp->in_sg) + _walked_in);
1089
- }
1090
- }
1091
-
1092
- return (u32 *)((u8 *)src + n);
1093
-}
1094
-
10951120 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
10961121 {
1097
- u32 cfg, size_bit, *dst, d32;
1098
- u8 *d8;
1099
- unsigned int i, j;
1122
+ u32 cfg, size_bit;
1123
+ unsigned int i;
11001124 int ret = 0;
11011125
11021126 /* Update Config */
....@@ -1113,31 +1137,34 @@
11131137 /* GCM: write aad and payload size (in bits) */
11141138 size_bit = cryp->areq->assoclen * 8;
11151139 if (cryp->caps->swap_final)
1116
- size_bit = cpu_to_be32(size_bit);
1140
+ size_bit = (__force u32)cpu_to_be32(size_bit);
11171141
11181142 stm32_cryp_write(cryp, CRYP_DIN, 0);
11191143 stm32_cryp_write(cryp, CRYP_DIN, size_bit);
11201144
11211145 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
1122
- cryp->areq->cryptlen - AES_BLOCK_SIZE;
1146
+ cryp->areq->cryptlen - cryp->authsize;
11231147 size_bit *= 8;
11241148 if (cryp->caps->swap_final)
1125
- size_bit = cpu_to_be32(size_bit);
1149
+ size_bit = (__force u32)cpu_to_be32(size_bit);
11261150
11271151 stm32_cryp_write(cryp, CRYP_DIN, 0);
11281152 stm32_cryp_write(cryp, CRYP_DIN, size_bit);
11291153 } else {
11301154 /* CCM: write CTR0 */
1131
- u8 iv[AES_BLOCK_SIZE];
1132
- u32 *iv32 = (u32 *)iv;
1155
+ u32 iv32[AES_BLOCK_32];
1156
+ u8 *iv = (u8 *)iv32;
1157
+ __be32 *biv = (__be32 *)iv32;
11331158
11341159 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
11351160 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
11361161
11371162 for (i = 0; i < AES_BLOCK_32; i++) {
1163
+ u32 xiv = iv32[i];
1164
+
11381165 if (!cryp->caps->padding_wa)
1139
- *iv32 = cpu_to_be32(*iv32);
1140
- stm32_cryp_write(cryp, CRYP_DIN, *iv32++);
1166
+ xiv = be32_to_cpu(biv[i]);
1167
+ stm32_cryp_write(cryp, CRYP_DIN, xiv);
11411168 }
11421169 }
11431170
....@@ -1149,39 +1176,18 @@
11491176 }
11501177
11511178 if (is_encrypt(cryp)) {
1179
+ u32 out_tag[AES_BLOCK_32];
1180
+
11521181 /* Get and write tag */
1153
- dst = sg_virt(cryp->out_sg) + _walked_out;
1182
+ for (i = 0; i < AES_BLOCK_32; i++)
1183
+ out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
11541184
1155
- for (i = 0; i < AES_BLOCK_32; i++) {
1156
- if (cryp->total_out >= sizeof(u32)) {
1157
- /* Read a full u32 */
1158
- *dst = stm32_cryp_read(cryp, CRYP_DOUT);
1159
-
1160
- dst = stm32_cryp_next_out(cryp, dst,
1161
- sizeof(u32));
1162
- cryp->total_out -= sizeof(u32);
1163
- } else if (!cryp->total_out) {
1164
- /* Empty fifo out (data from input padding) */
1165
- stm32_cryp_read(cryp, CRYP_DOUT);
1166
- } else {
1167
- /* Read less than an u32 */
1168
- d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1169
- d8 = (u8 *)&d32;
1170
-
1171
- for (j = 0; j < cryp->total_out; j++) {
1172
- *((u8 *)dst) = *(d8++);
1173
- dst = stm32_cryp_next_out(cryp, dst, 1);
1174
- }
1175
- cryp->total_out = 0;
1176
- }
1177
- }
1185
+ scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1);
11781186 } else {
11791187 /* Get and check tag */
11801188 u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
11811189
1182
- scatterwalk_map_and_copy(in_tag, cryp->in_sg,
1183
- cryp->total_in_save - cryp->authsize,
1184
- cryp->authsize, 0);
1190
+ scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0);
11851191
11861192 for (i = 0; i < AES_BLOCK_32; i++)
11871193 out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
....@@ -1201,115 +1207,59 @@
12011207 {
12021208 u32 cr;
12031209
1204
- if (unlikely(cryp->last_ctr[3] == 0xFFFFFFFF)) {
1205
- cryp->last_ctr[3] = 0;
1206
- cryp->last_ctr[2]++;
1207
- if (!cryp->last_ctr[2]) {
1208
- cryp->last_ctr[1]++;
1209
- if (!cryp->last_ctr[1])
1210
- cryp->last_ctr[0]++;
1211
- }
1210
+ if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) {
1211
+ /*
1212
+ * In this case, we need to increment manually the ctr counter,
1213
+ * as HW doesn't handle the U32 carry.
1214
+ */
1215
+ crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr));
12121216
12131217 cr = stm32_cryp_read(cryp, CRYP_CR);
12141218 stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN);
12151219
1216
- stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->last_ctr);
1220
+ stm32_cryp_hw_write_iv(cryp, cryp->last_ctr);
12171221
12181222 stm32_cryp_write(cryp, CRYP_CR, cr);
12191223 }
12201224
1221
- cryp->last_ctr[0] = stm32_cryp_read(cryp, CRYP_IV0LR);
1222
- cryp->last_ctr[1] = stm32_cryp_read(cryp, CRYP_IV0RR);
1223
- cryp->last_ctr[2] = stm32_cryp_read(cryp, CRYP_IV1LR);
1224
- cryp->last_ctr[3] = stm32_cryp_read(cryp, CRYP_IV1RR);
1225
+ /* The IV registers are BE */
1226
+ cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
1227
+ cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
1228
+ cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
1229
+ cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
12251230 }
12261231
1227
-static bool stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
1232
+static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
12281233 {
1229
- unsigned int i, j;
1230
- u32 d32, *dst;
1231
- u8 *d8;
1232
- size_t tag_size;
1234
+ unsigned int i;
1235
+ u32 block[AES_BLOCK_32];
12331236
1234
- /* Do no read tag now (if any) */
1235
- if (is_encrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp)))
1236
- tag_size = cryp->authsize;
1237
- else
1238
- tag_size = 0;
1237
+ for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1238
+ block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
12391239
1240
- dst = sg_virt(cryp->out_sg) + _walked_out;
1241
-
1242
- for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) {
1243
- if (likely(cryp->total_out - tag_size >= sizeof(u32))) {
1244
- /* Read a full u32 */
1245
- *dst = stm32_cryp_read(cryp, CRYP_DOUT);
1246
-
1247
- dst = stm32_cryp_next_out(cryp, dst, sizeof(u32));
1248
- cryp->total_out -= sizeof(u32);
1249
- } else if (cryp->total_out == tag_size) {
1250
- /* Empty fifo out (data from input padding) */
1251
- d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1252
- } else {
1253
- /* Read less than an u32 */
1254
- d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1255
- d8 = (u8 *)&d32;
1256
-
1257
- for (j = 0; j < cryp->total_out - tag_size; j++) {
1258
- *((u8 *)dst) = *(d8++);
1259
- dst = stm32_cryp_next_out(cryp, dst, 1);
1260
- }
1261
- cryp->total_out = tag_size;
1262
- }
1263
- }
1264
-
1265
- return !(cryp->total_out - tag_size) || !cryp->total_in;
1240
+ scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1241
+ cryp->payload_out), 1);
1242
+ cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1243
+ cryp->payload_out);
12661244 }
12671245
12681246 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
12691247 {
1270
- unsigned int i, j;
1271
- u32 *src;
1272
- u8 d8[4];
1273
- size_t tag_size;
1248
+ unsigned int i;
1249
+ u32 block[AES_BLOCK_32] = {0};
12741250
1275
- /* Do no write tag (if any) */
1276
- if (is_decrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp)))
1277
- tag_size = cryp->authsize;
1278
- else
1279
- tag_size = 0;
1251
+ scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize,
1252
+ cryp->payload_in), 0);
1253
+ for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1254
+ stm32_cryp_write(cryp, CRYP_DIN, block[i]);
12801255
1281
- src = sg_virt(cryp->in_sg) + _walked_in;
1282
-
1283
- for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) {
1284
- if (likely(cryp->total_in - tag_size >= sizeof(u32))) {
1285
- /* Write a full u32 */
1286
- stm32_cryp_write(cryp, CRYP_DIN, *src);
1287
-
1288
- src = stm32_cryp_next_in(cryp, src, sizeof(u32));
1289
- cryp->total_in -= sizeof(u32);
1290
- } else if (cryp->total_in == tag_size) {
1291
- /* Write padding data */
1292
- stm32_cryp_write(cryp, CRYP_DIN, 0);
1293
- } else {
1294
- /* Write less than an u32 */
1295
- memset(d8, 0, sizeof(u32));
1296
- for (j = 0; j < cryp->total_in - tag_size; j++) {
1297
- d8[j] = *((u8 *)src);
1298
- src = stm32_cryp_next_in(cryp, src, 1);
1299
- }
1300
-
1301
- stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1302
- cryp->total_in = tag_size;
1303
- }
1304
- }
1256
+ cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in);
13051257 }
13061258
13071259 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
13081260 {
13091261 int err;
1310
- u32 cfg, tmp[AES_BLOCK_32];
1311
- size_t total_in_ori = cryp->total_in;
1312
- struct scatterlist *out_sg_ori = cryp->out_sg;
1262
+ u32 cfg, block[AES_BLOCK_32] = {0};
13131263 unsigned int i;
13141264
13151265 /* 'Special workaround' procedure described in the datasheet */
....@@ -1334,18 +1284,25 @@
13341284
13351285 /* b) pad and write the last block */
13361286 stm32_cryp_irq_write_block(cryp);
1337
- cryp->total_in = total_in_ori;
1287
+ /* wait end of process */
13381288 err = stm32_cryp_wait_output(cryp);
13391289 if (err) {
1340
- dev_err(cryp->dev, "Timeout (write gcm header)\n");
1290
+ dev_err(cryp->dev, "Timeout (write gcm last data)\n");
13411291 return stm32_cryp_finish_req(cryp, err);
13421292 }
13431293
13441294 /* c) get and store encrypted data */
1345
- stm32_cryp_irq_read_data(cryp);
1346
- scatterwalk_map_and_copy(tmp, out_sg_ori,
1347
- cryp->total_in_save - total_in_ori,
1348
- total_in_ori, 0);
1295
+ /*
1296
+ * Same code as stm32_cryp_irq_read_data(), but we want to store
1297
+ * block value
1298
+ */
1299
+ for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1300
+ block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1301
+
1302
+ scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1303
+ cryp->payload_out), 1);
1304
+ cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1305
+ cryp->payload_out);
13491306
13501307 /* d) change mode back to AES GCM */
13511308 cfg &= ~CR_ALGO_MASK;
....@@ -1358,19 +1315,13 @@
13581315 stm32_cryp_write(cryp, CRYP_CR, cfg);
13591316
13601317 /* f) write padded data */
1361
- for (i = 0; i < AES_BLOCK_32; i++) {
1362
- if (cryp->total_in)
1363
- stm32_cryp_write(cryp, CRYP_DIN, tmp[i]);
1364
- else
1365
- stm32_cryp_write(cryp, CRYP_DIN, 0);
1366
-
1367
- cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in);
1368
- }
1318
+ for (i = 0; i < AES_BLOCK_32; i++)
1319
+ stm32_cryp_write(cryp, CRYP_DIN, block[i]);
13691320
13701321 /* g) Empty fifo out */
13711322 err = stm32_cryp_wait_output(cryp);
13721323 if (err) {
1373
- dev_err(cryp->dev, "Timeout (write gcm header)\n");
1324
+ dev_err(cryp->dev, "Timeout (write gcm padded data)\n");
13741325 return stm32_cryp_finish_req(cryp, err);
13751326 }
13761327
....@@ -1383,16 +1334,14 @@
13831334
13841335 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
13851336 {
1386
- u32 cfg, payload_bytes;
1337
+ u32 cfg;
13871338
13881339 /* disable ip, set NPBLB and reneable ip */
13891340 cfg = stm32_cryp_read(cryp, CRYP_CR);
13901341 cfg &= ~CR_CRYPEN;
13911342 stm32_cryp_write(cryp, CRYP_CR, cfg);
13921343
1393
- payload_bytes = is_decrypt(cryp) ? cryp->total_in - cryp->authsize :
1394
- cryp->total_in;
1395
- cfg |= (cryp->hw_blocksize - payload_bytes) << CR_NBPBL_SHIFT;
1344
+ cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT;
13961345 cfg |= CR_CRYPEN;
13971346 stm32_cryp_write(cryp, CRYP_CR, cfg);
13981347 }
....@@ -1401,13 +1350,11 @@
14011350 {
14021351 int err = 0;
14031352 u32 cfg, iv1tmp;
1404
- u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32], tmp[AES_BLOCK_32];
1405
- size_t last_total_out, total_in_ori = cryp->total_in;
1406
- struct scatterlist *out_sg_ori = cryp->out_sg;
1353
+ u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32];
1354
+ u32 block[AES_BLOCK_32] = {0};
14071355 unsigned int i;
14081356
14091357 /* 'Special workaround' procedure described in the datasheet */
1410
- cryp->flags |= FLG_CCM_PADDED_WA;
14111358
14121359 /* a) disable ip */
14131360 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
....@@ -1437,7 +1384,7 @@
14371384
14381385 /* b) pad and write the last block */
14391386 stm32_cryp_irq_write_block(cryp);
1440
- cryp->total_in = total_in_ori;
1387
+ /* wait end of process */
14411388 err = stm32_cryp_wait_output(cryp);
14421389 if (err) {
14431390 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
....@@ -1445,13 +1392,16 @@
14451392 }
14461393
14471394 /* c) get and store decrypted data */
1448
- last_total_out = cryp->total_out;
1449
- stm32_cryp_irq_read_data(cryp);
1395
+ /*
1396
+ * Same code as stm32_cryp_irq_read_data(), but we want to store
1397
+ * block value
1398
+ */
1399
+ for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1400
+ block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
14501401
1451
- memset(tmp, 0, sizeof(tmp));
1452
- scatterwalk_map_and_copy(tmp, out_sg_ori,
1453
- cryp->total_out_save - last_total_out,
1454
- last_total_out, 0);
1402
+ scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1403
+ cryp->payload_out), 1);
1404
+ cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out);
14551405
14561406 /* d) Load again CRYP_CSGCMCCMxR */
14571407 for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
....@@ -1468,10 +1418,10 @@
14681418 stm32_cryp_write(cryp, CRYP_CR, cfg);
14691419
14701420 /* g) XOR and write padded data */
1471
- for (i = 0; i < ARRAY_SIZE(tmp); i++) {
1472
- tmp[i] ^= cstmp1[i];
1473
- tmp[i] ^= cstmp2[i];
1474
- stm32_cryp_write(cryp, CRYP_DIN, tmp[i]);
1421
+ for (i = 0; i < ARRAY_SIZE(block); i++) {
1422
+ block[i] ^= cstmp1[i];
1423
+ block[i] ^= cstmp2[i];
1424
+ stm32_cryp_write(cryp, CRYP_DIN, block[i]);
14751425 }
14761426
14771427 /* h) wait for completion */
....@@ -1485,30 +1435,34 @@
14851435
14861436 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
14871437 {
1488
- if (unlikely(!cryp->total_in)) {
1438
+ if (unlikely(!cryp->payload_in)) {
14891439 dev_warn(cryp->dev, "No more data to process\n");
14901440 return;
14911441 }
14921442
1493
- if (unlikely(cryp->total_in < AES_BLOCK_SIZE &&
1443
+ if (unlikely(cryp->payload_in < AES_BLOCK_SIZE &&
14941444 (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
14951445 is_encrypt(cryp))) {
14961446 /* Padding for AES GCM encryption */
1497
- if (cryp->caps->padding_wa)
1447
+ if (cryp->caps->padding_wa) {
14981448 /* Special case 1 */
1499
- return stm32_cryp_irq_write_gcm_padded_data(cryp);
1449
+ stm32_cryp_irq_write_gcm_padded_data(cryp);
1450
+ return;
1451
+ }
15001452
15011453 /* Setting padding bytes (NBBLB) */
15021454 stm32_cryp_irq_set_npblb(cryp);
15031455 }
15041456
1505
- if (unlikely((cryp->total_in - cryp->authsize < AES_BLOCK_SIZE) &&
1457
+ if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) &&
15061458 (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
15071459 is_decrypt(cryp))) {
15081460 /* Padding for AES CCM decryption */
1509
- if (cryp->caps->padding_wa)
1461
+ if (cryp->caps->padding_wa) {
15101462 /* Special case 2 */
1511
- return stm32_cryp_irq_write_ccm_padded_data(cryp);
1463
+ stm32_cryp_irq_write_ccm_padded_data(cryp);
1464
+ return;
1465
+ }
15121466
15131467 /* Setting padding bytes (NBBLB) */
15141468 stm32_cryp_irq_set_npblb(cryp);
....@@ -1520,191 +1474,59 @@
15201474 stm32_cryp_irq_write_block(cryp);
15211475 }
15221476
1523
-static void stm32_cryp_irq_write_gcm_header(struct stm32_cryp *cryp)
1477
+static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp)
15241478 {
1525
- int err;
1526
- unsigned int i, j;
1527
- u32 cfg, *src;
1479
+ unsigned int i;
1480
+ u32 block[AES_BLOCK_32] = {0};
1481
+ size_t written;
15281482
1529
- src = sg_virt(cryp->in_sg) + _walked_in;
1483
+ written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in);
15301484
1531
- for (i = 0; i < AES_BLOCK_32; i++) {
1532
- stm32_cryp_write(cryp, CRYP_DIN, *src);
1485
+ scatterwalk_copychunks(block, &cryp->in_walk, written, 0);
1486
+ for (i = 0; i < AES_BLOCK_32; i++)
1487
+ stm32_cryp_write(cryp, CRYP_DIN, block[i]);
15331488
1534
- src = stm32_cryp_next_in(cryp, src, sizeof(u32));
1535
- cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in);
1489
+ cryp->header_in -= written;
15361490
1537
- /* Check if whole header written */
1538
- if ((cryp->total_in_save - cryp->total_in) ==
1539
- cryp->areq->assoclen) {
1540
- /* Write padding if needed */
1541
- for (j = i + 1; j < AES_BLOCK_32; j++)
1542
- stm32_cryp_write(cryp, CRYP_DIN, 0);
1543
-
1544
- /* Wait for completion */
1545
- err = stm32_cryp_wait_busy(cryp);
1546
- if (err) {
1547
- dev_err(cryp->dev, "Timeout (gcm header)\n");
1548
- return stm32_cryp_finish_req(cryp, err);
1549
- }
1550
-
1551
- if (stm32_cryp_get_input_text_len(cryp)) {
1552
- /* Phase 3 : payload */
1553
- cfg = stm32_cryp_read(cryp, CRYP_CR);
1554
- cfg &= ~CR_CRYPEN;
1555
- stm32_cryp_write(cryp, CRYP_CR, cfg);
1556
-
1557
- cfg &= ~CR_PH_MASK;
1558
- cfg |= CR_PH_PAYLOAD;
1559
- cfg |= CR_CRYPEN;
1560
- stm32_cryp_write(cryp, CRYP_CR, cfg);
1561
- } else {
1562
- /* Phase 4 : tag */
1563
- stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1564
- stm32_cryp_finish_req(cryp, 0);
1565
- }
1566
-
1567
- break;
1568
- }
1569
-
1570
- if (!cryp->total_in)
1571
- break;
1572
- }
1573
-}
1574
-
1575
-static void stm32_cryp_irq_write_ccm_header(struct stm32_cryp *cryp)
1576
-{
1577
- int err;
1578
- unsigned int i = 0, j, k;
1579
- u32 alen, cfg, *src;
1580
- u8 d8[4];
1581
-
1582
- src = sg_virt(cryp->in_sg) + _walked_in;
1583
- alen = cryp->areq->assoclen;
1584
-
1585
- if (!_walked_in) {
1586
- if (cryp->areq->assoclen <= 65280) {
1587
- /* Write first u32 of B1 */
1588
- d8[0] = (alen >> 8) & 0xFF;
1589
- d8[1] = alen & 0xFF;
1590
- d8[2] = *((u8 *)src);
1591
- src = stm32_cryp_next_in(cryp, src, 1);
1592
- d8[3] = *((u8 *)src);
1593
- src = stm32_cryp_next_in(cryp, src, 1);
1594
-
1595
- stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1596
- i++;
1597
-
1598
- cryp->total_in -= min_t(size_t, 2, cryp->total_in);
1599
- } else {
1600
- /* Build the two first u32 of B1 */
1601
- d8[0] = 0xFF;
1602
- d8[1] = 0xFE;
1603
- d8[2] = alen & 0xFF000000;
1604
- d8[3] = alen & 0x00FF0000;
1605
-
1606
- stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1607
- i++;
1608
-
1609
- d8[0] = alen & 0x0000FF00;
1610
- d8[1] = alen & 0x000000FF;
1611
- d8[2] = *((u8 *)src);
1612
- src = stm32_cryp_next_in(cryp, src, 1);
1613
- d8[3] = *((u8 *)src);
1614
- src = stm32_cryp_next_in(cryp, src, 1);
1615
-
1616
- stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1617
- i++;
1618
-
1619
- cryp->total_in -= min_t(size_t, 2, cryp->total_in);
1620
- }
1621
- }
1622
-
1623
- /* Write next u32 */
1624
- for (; i < AES_BLOCK_32; i++) {
1625
- /* Build an u32 */
1626
- memset(d8, 0, sizeof(u32));
1627
- for (k = 0; k < sizeof(u32); k++) {
1628
- d8[k] = *((u8 *)src);
1629
- src = stm32_cryp_next_in(cryp, src, 1);
1630
-
1631
- cryp->total_in -= min_t(size_t, 1, cryp->total_in);
1632
- if ((cryp->total_in_save - cryp->total_in) == alen)
1633
- break;
1634
- }
1635
-
1636
- stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1637
-
1638
- if ((cryp->total_in_save - cryp->total_in) == alen) {
1639
- /* Write padding if needed */
1640
- for (j = i + 1; j < AES_BLOCK_32; j++)
1641
- stm32_cryp_write(cryp, CRYP_DIN, 0);
1642
-
1643
- /* Wait for completion */
1644
- err = stm32_cryp_wait_busy(cryp);
1645
- if (err) {
1646
- dev_err(cryp->dev, "Timeout (ccm header)\n");
1647
- return stm32_cryp_finish_req(cryp, err);
1648
- }
1649
-
1650
- if (stm32_cryp_get_input_text_len(cryp)) {
1651
- /* Phase 3 : payload */
1652
- cfg = stm32_cryp_read(cryp, CRYP_CR);
1653
- cfg &= ~CR_CRYPEN;
1654
- stm32_cryp_write(cryp, CRYP_CR, cfg);
1655
-
1656
- cfg &= ~CR_PH_MASK;
1657
- cfg |= CR_PH_PAYLOAD;
1658
- cfg |= CR_CRYPEN;
1659
- stm32_cryp_write(cryp, CRYP_CR, cfg);
1660
- } else {
1661
- /* Phase 4 : tag */
1662
- stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1663
- stm32_cryp_finish_req(cryp, 0);
1664
- }
1665
-
1666
- break;
1667
- }
1668
- }
1491
+ stm32_crypt_gcmccm_end_header(cryp);
16691492 }
16701493
16711494 static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
16721495 {
16731496 struct stm32_cryp *cryp = arg;
16741497 u32 ph;
1498
+ u32 it_mask = stm32_cryp_read(cryp, CRYP_IMSCR);
16751499
16761500 if (cryp->irq_status & MISR_OUT)
16771501 /* Output FIFO IRQ: read data */
1678
- if (unlikely(stm32_cryp_irq_read_data(cryp))) {
1679
- /* All bytes processed, finish */
1680
- stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1681
- stm32_cryp_finish_req(cryp, 0);
1682
- return IRQ_HANDLED;
1683
- }
1502
+ stm32_cryp_irq_read_data(cryp);
16841503
16851504 if (cryp->irq_status & MISR_IN) {
1686
- if (is_gcm(cryp)) {
1505
+ if (is_gcm(cryp) || is_ccm(cryp)) {
16871506 ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
16881507 if (unlikely(ph == CR_PH_HEADER))
16891508 /* Write Header */
1690
- stm32_cryp_irq_write_gcm_header(cryp);
1509
+ stm32_cryp_irq_write_gcmccm_header(cryp);
16911510 else
16921511 /* Input FIFO IRQ: write data */
16931512 stm32_cryp_irq_write_data(cryp);
1694
- cryp->gcm_ctr++;
1695
- } else if (is_ccm(cryp)) {
1696
- ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
1697
- if (unlikely(ph == CR_PH_HEADER))
1698
- /* Write Header */
1699
- stm32_cryp_irq_write_ccm_header(cryp);
1700
- else
1701
- /* Input FIFO IRQ: write data */
1702
- stm32_cryp_irq_write_data(cryp);
1513
+ if (is_gcm(cryp))
1514
+ cryp->gcm_ctr++;
17031515 } else {
17041516 /* Input FIFO IRQ: write data */
17051517 stm32_cryp_irq_write_data(cryp);
17061518 }
17071519 }
1520
+
1521
+ /* Mask useless interrupts */
1522
+ if (!cryp->payload_in && !cryp->header_in)
1523
+ it_mask &= ~IMSCR_IN;
1524
+ if (!cryp->payload_out)
1525
+ it_mask &= ~IMSCR_OUT;
1526
+ stm32_cryp_write(cryp, CRYP_IMSCR, it_mask);
1527
+
1528
+ if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out)
1529
+ stm32_cryp_finish_req(cryp, 0);
17081530
17091531 return IRQ_HANDLED;
17101532 }
....@@ -1718,150 +1540,129 @@
17181540 return IRQ_WAKE_THREAD;
17191541 }
17201542
1721
-static struct crypto_alg crypto_algs[] = {
1543
+static struct skcipher_alg crypto_algs[] = {
17221544 {
1723
- .cra_name = "ecb(aes)",
1724
- .cra_driver_name = "stm32-ecb-aes",
1725
- .cra_priority = 200,
1726
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1727
- CRYPTO_ALG_ASYNC,
1728
- .cra_blocksize = AES_BLOCK_SIZE,
1729
- .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1730
- .cra_alignmask = 0xf,
1731
- .cra_type = &crypto_ablkcipher_type,
1732
- .cra_module = THIS_MODULE,
1733
- .cra_init = stm32_cryp_cra_init,
1734
- .cra_ablkcipher = {
1735
- .min_keysize = AES_MIN_KEY_SIZE,
1736
- .max_keysize = AES_MAX_KEY_SIZE,
1737
- .setkey = stm32_cryp_aes_setkey,
1738
- .encrypt = stm32_cryp_aes_ecb_encrypt,
1739
- .decrypt = stm32_cryp_aes_ecb_decrypt,
1740
- }
1545
+ .base.cra_name = "ecb(aes)",
1546
+ .base.cra_driver_name = "stm32-ecb-aes",
1547
+ .base.cra_priority = 200,
1548
+ .base.cra_flags = CRYPTO_ALG_ASYNC,
1549
+ .base.cra_blocksize = AES_BLOCK_SIZE,
1550
+ .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1551
+ .base.cra_alignmask = 0,
1552
+ .base.cra_module = THIS_MODULE,
1553
+
1554
+ .init = stm32_cryp_init_tfm,
1555
+ .min_keysize = AES_MIN_KEY_SIZE,
1556
+ .max_keysize = AES_MAX_KEY_SIZE,
1557
+ .setkey = stm32_cryp_aes_setkey,
1558
+ .encrypt = stm32_cryp_aes_ecb_encrypt,
1559
+ .decrypt = stm32_cryp_aes_ecb_decrypt,
17411560 },
17421561 {
1743
- .cra_name = "cbc(aes)",
1744
- .cra_driver_name = "stm32-cbc-aes",
1745
- .cra_priority = 200,
1746
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1747
- CRYPTO_ALG_ASYNC,
1748
- .cra_blocksize = AES_BLOCK_SIZE,
1749
- .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1750
- .cra_alignmask = 0xf,
1751
- .cra_type = &crypto_ablkcipher_type,
1752
- .cra_module = THIS_MODULE,
1753
- .cra_init = stm32_cryp_cra_init,
1754
- .cra_ablkcipher = {
1755
- .min_keysize = AES_MIN_KEY_SIZE,
1756
- .max_keysize = AES_MAX_KEY_SIZE,
1757
- .ivsize = AES_BLOCK_SIZE,
1758
- .setkey = stm32_cryp_aes_setkey,
1759
- .encrypt = stm32_cryp_aes_cbc_encrypt,
1760
- .decrypt = stm32_cryp_aes_cbc_decrypt,
1761
- }
1562
+ .base.cra_name = "cbc(aes)",
1563
+ .base.cra_driver_name = "stm32-cbc-aes",
1564
+ .base.cra_priority = 200,
1565
+ .base.cra_flags = CRYPTO_ALG_ASYNC,
1566
+ .base.cra_blocksize = AES_BLOCK_SIZE,
1567
+ .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1568
+ .base.cra_alignmask = 0,
1569
+ .base.cra_module = THIS_MODULE,
1570
+
1571
+ .init = stm32_cryp_init_tfm,
1572
+ .min_keysize = AES_MIN_KEY_SIZE,
1573
+ .max_keysize = AES_MAX_KEY_SIZE,
1574
+ .ivsize = AES_BLOCK_SIZE,
1575
+ .setkey = stm32_cryp_aes_setkey,
1576
+ .encrypt = stm32_cryp_aes_cbc_encrypt,
1577
+ .decrypt = stm32_cryp_aes_cbc_decrypt,
17621578 },
17631579 {
1764
- .cra_name = "ctr(aes)",
1765
- .cra_driver_name = "stm32-ctr-aes",
1766
- .cra_priority = 200,
1767
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1768
- CRYPTO_ALG_ASYNC,
1769
- .cra_blocksize = 1,
1770
- .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1771
- .cra_alignmask = 0xf,
1772
- .cra_type = &crypto_ablkcipher_type,
1773
- .cra_module = THIS_MODULE,
1774
- .cra_init = stm32_cryp_cra_init,
1775
- .cra_ablkcipher = {
1776
- .min_keysize = AES_MIN_KEY_SIZE,
1777
- .max_keysize = AES_MAX_KEY_SIZE,
1778
- .ivsize = AES_BLOCK_SIZE,
1779
- .setkey = stm32_cryp_aes_setkey,
1780
- .encrypt = stm32_cryp_aes_ctr_encrypt,
1781
- .decrypt = stm32_cryp_aes_ctr_decrypt,
1782
- }
1580
+ .base.cra_name = "ctr(aes)",
1581
+ .base.cra_driver_name = "stm32-ctr-aes",
1582
+ .base.cra_priority = 200,
1583
+ .base.cra_flags = CRYPTO_ALG_ASYNC,
1584
+ .base.cra_blocksize = 1,
1585
+ .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1586
+ .base.cra_alignmask = 0,
1587
+ .base.cra_module = THIS_MODULE,
1588
+
1589
+ .init = stm32_cryp_init_tfm,
1590
+ .min_keysize = AES_MIN_KEY_SIZE,
1591
+ .max_keysize = AES_MAX_KEY_SIZE,
1592
+ .ivsize = AES_BLOCK_SIZE,
1593
+ .setkey = stm32_cryp_aes_setkey,
1594
+ .encrypt = stm32_cryp_aes_ctr_encrypt,
1595
+ .decrypt = stm32_cryp_aes_ctr_decrypt,
17831596 },
17841597 {
1785
- .cra_name = "ecb(des)",
1786
- .cra_driver_name = "stm32-ecb-des",
1787
- .cra_priority = 200,
1788
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1789
- CRYPTO_ALG_ASYNC,
1790
- .cra_blocksize = DES_BLOCK_SIZE,
1791
- .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1792
- .cra_alignmask = 0xf,
1793
- .cra_type = &crypto_ablkcipher_type,
1794
- .cra_module = THIS_MODULE,
1795
- .cra_init = stm32_cryp_cra_init,
1796
- .cra_ablkcipher = {
1797
- .min_keysize = DES_BLOCK_SIZE,
1798
- .max_keysize = DES_BLOCK_SIZE,
1799
- .setkey = stm32_cryp_des_setkey,
1800
- .encrypt = stm32_cryp_des_ecb_encrypt,
1801
- .decrypt = stm32_cryp_des_ecb_decrypt,
1802
- }
1598
+ .base.cra_name = "ecb(des)",
1599
+ .base.cra_driver_name = "stm32-ecb-des",
1600
+ .base.cra_priority = 200,
1601
+ .base.cra_flags = CRYPTO_ALG_ASYNC,
1602
+ .base.cra_blocksize = DES_BLOCK_SIZE,
1603
+ .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1604
+ .base.cra_alignmask = 0,
1605
+ .base.cra_module = THIS_MODULE,
1606
+
1607
+ .init = stm32_cryp_init_tfm,
1608
+ .min_keysize = DES_BLOCK_SIZE,
1609
+ .max_keysize = DES_BLOCK_SIZE,
1610
+ .setkey = stm32_cryp_des_setkey,
1611
+ .encrypt = stm32_cryp_des_ecb_encrypt,
1612
+ .decrypt = stm32_cryp_des_ecb_decrypt,
18031613 },
18041614 {
1805
- .cra_name = "cbc(des)",
1806
- .cra_driver_name = "stm32-cbc-des",
1807
- .cra_priority = 200,
1808
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1809
- CRYPTO_ALG_ASYNC,
1810
- .cra_blocksize = DES_BLOCK_SIZE,
1811
- .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1812
- .cra_alignmask = 0xf,
1813
- .cra_type = &crypto_ablkcipher_type,
1814
- .cra_module = THIS_MODULE,
1815
- .cra_init = stm32_cryp_cra_init,
1816
- .cra_ablkcipher = {
1817
- .min_keysize = DES_BLOCK_SIZE,
1818
- .max_keysize = DES_BLOCK_SIZE,
1819
- .ivsize = DES_BLOCK_SIZE,
1820
- .setkey = stm32_cryp_des_setkey,
1821
- .encrypt = stm32_cryp_des_cbc_encrypt,
1822
- .decrypt = stm32_cryp_des_cbc_decrypt,
1823
- }
1615
+ .base.cra_name = "cbc(des)",
1616
+ .base.cra_driver_name = "stm32-cbc-des",
1617
+ .base.cra_priority = 200,
1618
+ .base.cra_flags = CRYPTO_ALG_ASYNC,
1619
+ .base.cra_blocksize = DES_BLOCK_SIZE,
1620
+ .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1621
+ .base.cra_alignmask = 0,
1622
+ .base.cra_module = THIS_MODULE,
1623
+
1624
+ .init = stm32_cryp_init_tfm,
1625
+ .min_keysize = DES_BLOCK_SIZE,
1626
+ .max_keysize = DES_BLOCK_SIZE,
1627
+ .ivsize = DES_BLOCK_SIZE,
1628
+ .setkey = stm32_cryp_des_setkey,
1629
+ .encrypt = stm32_cryp_des_cbc_encrypt,
1630
+ .decrypt = stm32_cryp_des_cbc_decrypt,
18241631 },
18251632 {
1826
- .cra_name = "ecb(des3_ede)",
1827
- .cra_driver_name = "stm32-ecb-des3",
1828
- .cra_priority = 200,
1829
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1830
- CRYPTO_ALG_ASYNC,
1831
- .cra_blocksize = DES_BLOCK_SIZE,
1832
- .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1833
- .cra_alignmask = 0xf,
1834
- .cra_type = &crypto_ablkcipher_type,
1835
- .cra_module = THIS_MODULE,
1836
- .cra_init = stm32_cryp_cra_init,
1837
- .cra_ablkcipher = {
1838
- .min_keysize = 3 * DES_BLOCK_SIZE,
1839
- .max_keysize = 3 * DES_BLOCK_SIZE,
1840
- .setkey = stm32_cryp_tdes_setkey,
1841
- .encrypt = stm32_cryp_tdes_ecb_encrypt,
1842
- .decrypt = stm32_cryp_tdes_ecb_decrypt,
1843
- }
1633
+ .base.cra_name = "ecb(des3_ede)",
1634
+ .base.cra_driver_name = "stm32-ecb-des3",
1635
+ .base.cra_priority = 200,
1636
+ .base.cra_flags = CRYPTO_ALG_ASYNC,
1637
+ .base.cra_blocksize = DES_BLOCK_SIZE,
1638
+ .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1639
+ .base.cra_alignmask = 0,
1640
+ .base.cra_module = THIS_MODULE,
1641
+
1642
+ .init = stm32_cryp_init_tfm,
1643
+ .min_keysize = 3 * DES_BLOCK_SIZE,
1644
+ .max_keysize = 3 * DES_BLOCK_SIZE,
1645
+ .setkey = stm32_cryp_tdes_setkey,
1646
+ .encrypt = stm32_cryp_tdes_ecb_encrypt,
1647
+ .decrypt = stm32_cryp_tdes_ecb_decrypt,
18441648 },
18451649 {
1846
- .cra_name = "cbc(des3_ede)",
1847
- .cra_driver_name = "stm32-cbc-des3",
1848
- .cra_priority = 200,
1849
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1850
- CRYPTO_ALG_ASYNC,
1851
- .cra_blocksize = DES_BLOCK_SIZE,
1852
- .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1853
- .cra_alignmask = 0xf,
1854
- .cra_type = &crypto_ablkcipher_type,
1855
- .cra_module = THIS_MODULE,
1856
- .cra_init = stm32_cryp_cra_init,
1857
- .cra_ablkcipher = {
1858
- .min_keysize = 3 * DES_BLOCK_SIZE,
1859
- .max_keysize = 3 * DES_BLOCK_SIZE,
1860
- .ivsize = DES_BLOCK_SIZE,
1861
- .setkey = stm32_cryp_tdes_setkey,
1862
- .encrypt = stm32_cryp_tdes_cbc_encrypt,
1863
- .decrypt = stm32_cryp_tdes_cbc_decrypt,
1864
- }
1650
+ .base.cra_name = "cbc(des3_ede)",
1651
+ .base.cra_driver_name = "stm32-cbc-des3",
1652
+ .base.cra_priority = 200,
1653
+ .base.cra_flags = CRYPTO_ALG_ASYNC,
1654
+ .base.cra_blocksize = DES_BLOCK_SIZE,
1655
+ .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1656
+ .base.cra_alignmask = 0,
1657
+ .base.cra_module = THIS_MODULE,
1658
+
1659
+ .init = stm32_cryp_init_tfm,
1660
+ .min_keysize = 3 * DES_BLOCK_SIZE,
1661
+ .max_keysize = 3 * DES_BLOCK_SIZE,
1662
+ .ivsize = DES_BLOCK_SIZE,
1663
+ .setkey = stm32_cryp_tdes_setkey,
1664
+ .encrypt = stm32_cryp_tdes_cbc_encrypt,
1665
+ .decrypt = stm32_cryp_tdes_cbc_decrypt,
18651666 },
18661667 };
18671668
....@@ -1882,7 +1683,7 @@
18821683 .cra_flags = CRYPTO_ALG_ASYNC,
18831684 .cra_blocksize = 1,
18841685 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1885
- .cra_alignmask = 0xf,
1686
+ .cra_alignmask = 0,
18861687 .cra_module = THIS_MODULE,
18871688 },
18881689 },
....@@ -1902,7 +1703,7 @@
19021703 .cra_flags = CRYPTO_ALG_ASYNC,
19031704 .cra_blocksize = 1,
19041705 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1905
- .cra_alignmask = 0xf,
1706
+ .cra_alignmask = 0,
19061707 .cra_module = THIS_MODULE,
19071708 },
19081709 },
....@@ -1929,7 +1730,6 @@
19291730 {
19301731 struct device *dev = &pdev->dev;
19311732 struct stm32_cryp *cryp;
1932
- struct resource *res;
19331733 struct reset_control *rst;
19341734 int irq, ret;
19351735
....@@ -1943,18 +1743,13 @@
19431743
19441744 cryp->dev = dev;
19451745
1946
- mutex_init(&cryp->lock);
1947
-
1948
- res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1949
- cryp->regs = devm_ioremap_resource(dev, res);
1746
+ cryp->regs = devm_platform_ioremap_resource(pdev, 0);
19501747 if (IS_ERR(cryp->regs))
19511748 return PTR_ERR(cryp->regs);
19521749
19531750 irq = platform_get_irq(pdev, 0);
1954
- if (irq < 0) {
1955
- dev_err(dev, "Cannot get IRQ resource\n");
1751
+ if (irq < 0)
19561752 return irq;
1957
- }
19581753
19591754 ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
19601755 stm32_cryp_irq_thread, IRQF_ONESHOT,
....@@ -2010,7 +1805,7 @@
20101805 goto err_engine2;
20111806 }
20121807
2013
- ret = crypto_register_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
1808
+ ret = crypto_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
20141809 if (ret) {
20151810 dev_err(dev, "Could not register algs\n");
20161811 goto err_algs;
....@@ -2027,7 +1822,7 @@
20271822 return 0;
20281823
20291824 err_aead_algs:
2030
- crypto_unregister_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
1825
+ crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
20311826 err_algs:
20321827 err_engine2:
20331828 crypto_engine_exit(cryp->engine);
....@@ -2052,12 +1847,12 @@
20521847 if (!cryp)
20531848 return -ENODEV;
20541849
2055
- ret = pm_runtime_get_sync(cryp->dev);
1850
+ ret = pm_runtime_resume_and_get(cryp->dev);
20561851 if (ret < 0)
20571852 return ret;
20581853
20591854 crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2060
- crypto_unregister_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
1855
+ crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
20611856
20621857 crypto_engine_exit(cryp->engine);
20631858