.. | .. |
---|
| 1 | +// SPDX-License-Identifier: GPL-2.0 |
---|
1 | 2 | /* |
---|
2 | 3 | * Cryptographic API. |
---|
3 | 4 | * |
---|
.. | .. |
---|
5 | 6 | * |
---|
6 | 7 | * Copyright (c) 2012 Eukréa Electromatique - ATMEL |
---|
7 | 8 | * Author: Nicolas Royer <nicolas@eukrea.com> |
---|
8 | | - * |
---|
9 | | - * This program is free software; you can redistribute it and/or modify |
---|
10 | | - * it under the terms of the GNU General Public License version 2 as published |
---|
11 | | - * by the Free Software Foundation. |
---|
12 | 9 | * |
---|
13 | 10 | * Some ideas are from omap-aes.c driver. |
---|
14 | 11 | */ |
---|
.. | .. |
---|
24 | 21 | #include <linux/platform_device.h> |
---|
25 | 22 | |
---|
26 | 23 | #include <linux/device.h> |
---|
| 24 | +#include <linux/dmaengine.h> |
---|
27 | 25 | #include <linux/init.h> |
---|
28 | 26 | #include <linux/errno.h> |
---|
29 | 27 | #include <linux/interrupt.h> |
---|
.. | .. |
---|
39 | 37 | #include <crypto/gcm.h> |
---|
40 | 38 | #include <crypto/xts.h> |
---|
41 | 39 | #include <crypto/internal/aead.h> |
---|
42 | | -#include <linux/platform_data/crypto-atmel.h> |
---|
43 | | -#include <dt-bindings/dma/at91.h> |
---|
| 40 | +#include <crypto/internal/skcipher.h> |
---|
44 | 41 | #include "atmel-aes-regs.h" |
---|
45 | 42 | #include "atmel-authenc.h" |
---|
46 | 43 | |
---|
.. | .. |
---|
119 | 116 | struct atmel_aes_ctr_ctx { |
---|
120 | 117 | struct atmel_aes_base_ctx base; |
---|
121 | 118 | |
---|
122 | | - u32 iv[AES_BLOCK_SIZE / sizeof(u32)]; |
---|
| 119 | + __be32 iv[AES_BLOCK_SIZE / sizeof(u32)]; |
---|
123 | 120 | size_t offset; |
---|
124 | 121 | struct scatterlist src[2]; |
---|
125 | 122 | struct scatterlist dst[2]; |
---|
| 123 | + u32 blocks; |
---|
126 | 124 | }; |
---|
127 | 125 | |
---|
128 | 126 | struct atmel_aes_gcm_ctx { |
---|
.. | .. |
---|
131 | 129 | struct scatterlist src[2]; |
---|
132 | 130 | struct scatterlist dst[2]; |
---|
133 | 131 | |
---|
134 | | - u32 j0[AES_BLOCK_SIZE / sizeof(u32)]; |
---|
| 132 | + __be32 j0[AES_BLOCK_SIZE / sizeof(u32)]; |
---|
135 | 133 | u32 tag[AES_BLOCK_SIZE / sizeof(u32)]; |
---|
136 | | - u32 ghash[AES_BLOCK_SIZE / sizeof(u32)]; |
---|
| 134 | + __be32 ghash[AES_BLOCK_SIZE / sizeof(u32)]; |
---|
137 | 135 | size_t textlen; |
---|
138 | 136 | |
---|
139 | | - const u32 *ghash_in; |
---|
140 | | - u32 *ghash_out; |
---|
| 137 | + const __be32 *ghash_in; |
---|
| 138 | + __be32 *ghash_out; |
---|
141 | 139 | atmel_aes_fn_t ghash_resume; |
---|
142 | 140 | }; |
---|
143 | 141 | |
---|
.. | .. |
---|
156 | 154 | |
---|
157 | 155 | struct atmel_aes_reqctx { |
---|
158 | 156 | unsigned long mode; |
---|
159 | | - u32 lastc[AES_BLOCK_SIZE / sizeof(u32)]; |
---|
| 157 | + u8 lastc[AES_BLOCK_SIZE]; |
---|
160 | 158 | }; |
---|
161 | 159 | |
---|
162 | 160 | #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC) |
---|
.. | .. |
---|
390 | 388 | } |
---|
391 | 389 | |
---|
392 | 390 | static inline void atmel_aes_read_block(struct atmel_aes_dev *dd, u32 offset, |
---|
393 | | - u32 *value) |
---|
| 391 | + void *value) |
---|
394 | 392 | { |
---|
395 | 393 | atmel_aes_read_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); |
---|
396 | 394 | } |
---|
397 | 395 | |
---|
398 | 396 | static inline void atmel_aes_write_block(struct atmel_aes_dev *dd, u32 offset, |
---|
399 | | - const u32 *value) |
---|
| 397 | + const void *value) |
---|
400 | 398 | { |
---|
401 | 399 | atmel_aes_write_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); |
---|
402 | 400 | } |
---|
.. | .. |
---|
494 | 492 | |
---|
495 | 493 | static void atmel_aes_set_iv_as_last_ciphertext_block(struct atmel_aes_dev *dd) |
---|
496 | 494 | { |
---|
497 | | - struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); |
---|
498 | | - struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req); |
---|
499 | | - struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); |
---|
500 | | - unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher); |
---|
| 495 | + struct skcipher_request *req = skcipher_request_cast(dd->areq); |
---|
| 496 | + struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req); |
---|
| 497 | + struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); |
---|
| 498 | + unsigned int ivsize = crypto_skcipher_ivsize(skcipher); |
---|
501 | 499 | |
---|
502 | | - if (req->nbytes < ivsize) |
---|
| 500 | + if (req->cryptlen < ivsize) |
---|
503 | 501 | return; |
---|
504 | 502 | |
---|
505 | 503 | if (rctx->mode & AES_FLAGS_ENCRYPT) { |
---|
506 | | - scatterwalk_map_and_copy(req->info, req->dst, |
---|
507 | | - req->nbytes - ivsize, ivsize, 0); |
---|
| 504 | + scatterwalk_map_and_copy(req->iv, req->dst, |
---|
| 505 | + req->cryptlen - ivsize, ivsize, 0); |
---|
508 | 506 | } else { |
---|
509 | 507 | if (req->src == req->dst) |
---|
510 | | - memcpy(req->info, rctx->lastc, ivsize); |
---|
| 508 | + memcpy(req->iv, rctx->lastc, ivsize); |
---|
511 | 509 | else |
---|
512 | | - scatterwalk_map_and_copy(req->info, req->src, |
---|
513 | | - req->nbytes - ivsize, |
---|
| 510 | + scatterwalk_map_and_copy(req->iv, req->src, |
---|
| 511 | + req->cryptlen - ivsize, |
---|
514 | 512 | ivsize, 0); |
---|
515 | 513 | } |
---|
516 | 514 | } |
---|
517 | 515 | |
---|
| 516 | +static inline struct atmel_aes_ctr_ctx * |
---|
| 517 | +atmel_aes_ctr_ctx_cast(struct atmel_aes_base_ctx *ctx) |
---|
| 518 | +{ |
---|
| 519 | + return container_of(ctx, struct atmel_aes_ctr_ctx, base); |
---|
| 520 | +} |
---|
| 521 | + |
---|
| 522 | +static void atmel_aes_ctr_update_req_iv(struct atmel_aes_dev *dd) |
---|
| 523 | +{ |
---|
| 524 | + struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); |
---|
| 525 | + struct skcipher_request *req = skcipher_request_cast(dd->areq); |
---|
| 526 | + struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); |
---|
| 527 | + unsigned int ivsize = crypto_skcipher_ivsize(skcipher); |
---|
| 528 | + int i; |
---|
| 529 | + |
---|
| 530 | + /* |
---|
| 531 | + * The CTR transfer works in fragments of data of maximum 1 MByte |
---|
| 532 | + * because of the 16 bit CTR counter embedded in the IP. When reaching |
---|
| 533 | + * here, ctx->blocks contains the number of blocks of the last fragment |
---|
| 534 | + * processed, there is no need to explicit cast it to u16. |
---|
| 535 | + */ |
---|
| 536 | + for (i = 0; i < ctx->blocks; i++) |
---|
| 537 | + crypto_inc((u8 *)ctx->iv, AES_BLOCK_SIZE); |
---|
| 538 | + |
---|
| 539 | + memcpy(req->iv, ctx->iv, ivsize); |
---|
| 540 | +} |
---|
| 541 | + |
---|
518 | 542 | static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err) |
---|
519 | 543 | { |
---|
| 544 | + struct skcipher_request *req = skcipher_request_cast(dd->areq); |
---|
| 545 | + struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req); |
---|
| 546 | + |
---|
520 | 547 | #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC) |
---|
521 | 548 | if (dd->ctx->is_aead) |
---|
522 | 549 | atmel_aes_authenc_complete(dd, err); |
---|
.. | .. |
---|
525 | 552 | clk_disable(dd->iclk); |
---|
526 | 553 | dd->flags &= ~AES_FLAGS_BUSY; |
---|
527 | 554 | |
---|
528 | | - if (!dd->ctx->is_aead) |
---|
529 | | - atmel_aes_set_iv_as_last_ciphertext_block(dd); |
---|
| 555 | + if (!err && !dd->ctx->is_aead && |
---|
| 556 | + (rctx->mode & AES_FLAGS_OPMODE_MASK) != AES_FLAGS_ECB) { |
---|
| 557 | + if ((rctx->mode & AES_FLAGS_OPMODE_MASK) != AES_FLAGS_CTR) |
---|
| 558 | + atmel_aes_set_iv_as_last_ciphertext_block(dd); |
---|
| 559 | + else |
---|
| 560 | + atmel_aes_ctr_update_req_iv(dd); |
---|
| 561 | + } |
---|
530 | 562 | |
---|
531 | 563 | if (dd->is_async) |
---|
532 | 564 | dd->areq->complete(dd->areq, err); |
---|
.. | .. |
---|
537 | 569 | } |
---|
538 | 570 | |
---|
539 | 571 | static void atmel_aes_write_ctrl_key(struct atmel_aes_dev *dd, bool use_dma, |
---|
540 | | - const u32 *iv, const u32 *key, int keylen) |
---|
| 572 | + const __be32 *iv, const u32 *key, int keylen) |
---|
541 | 573 | { |
---|
542 | 574 | u32 valmr = 0; |
---|
543 | 575 | |
---|
.. | .. |
---|
568 | 600 | } |
---|
569 | 601 | |
---|
570 | 602 | static inline void atmel_aes_write_ctrl(struct atmel_aes_dev *dd, bool use_dma, |
---|
571 | | - const u32 *iv) |
---|
| 603 | + const __be32 *iv) |
---|
572 | 604 | |
---|
573 | 605 | { |
---|
574 | 606 | atmel_aes_write_ctrl_key(dd, use_dma, iv, |
---|
.. | .. |
---|
791 | 823 | int err; |
---|
792 | 824 | |
---|
793 | 825 | memset(&config, 0, sizeof(config)); |
---|
794 | | - config.direction = dir; |
---|
795 | 826 | config.src_addr_width = addr_width; |
---|
796 | 827 | config.dst_addr_width = addr_width; |
---|
797 | 828 | config.src_maxburst = maxburst; |
---|
.. | .. |
---|
829 | 860 | dma_async_issue_pending(dma->chan); |
---|
830 | 861 | |
---|
831 | 862 | return 0; |
---|
832 | | -} |
---|
833 | | - |
---|
834 | | -static void atmel_aes_dma_transfer_stop(struct atmel_aes_dev *dd, |
---|
835 | | - enum dma_transfer_direction dir) |
---|
836 | | -{ |
---|
837 | | - struct atmel_aes_dma *dma; |
---|
838 | | - |
---|
839 | | - switch (dir) { |
---|
840 | | - case DMA_MEM_TO_DEV: |
---|
841 | | - dma = &dd->src; |
---|
842 | | - break; |
---|
843 | | - |
---|
844 | | - case DMA_DEV_TO_MEM: |
---|
845 | | - dma = &dd->dst; |
---|
846 | | - break; |
---|
847 | | - |
---|
848 | | - default: |
---|
849 | | - return; |
---|
850 | | - } |
---|
851 | | - |
---|
852 | | - dmaengine_terminate_all(dma->chan); |
---|
853 | 863 | } |
---|
854 | 864 | |
---|
855 | 865 | static int atmel_aes_dma_start(struct atmel_aes_dev *dd, |
---|
.. | .. |
---|
910 | 920 | return -EINPROGRESS; |
---|
911 | 921 | |
---|
912 | 922 | output_transfer_stop: |
---|
913 | | - atmel_aes_dma_transfer_stop(dd, DMA_DEV_TO_MEM); |
---|
| 923 | + dmaengine_terminate_sync(dd->dst.chan); |
---|
914 | 924 | unmap: |
---|
915 | 925 | atmel_aes_unmap(dd); |
---|
916 | 926 | exit: |
---|
917 | 927 | return atmel_aes_complete(dd, err); |
---|
918 | 928 | } |
---|
919 | 929 | |
---|
920 | | -static void atmel_aes_dma_stop(struct atmel_aes_dev *dd) |
---|
921 | | -{ |
---|
922 | | - atmel_aes_dma_transfer_stop(dd, DMA_MEM_TO_DEV); |
---|
923 | | - atmel_aes_dma_transfer_stop(dd, DMA_DEV_TO_MEM); |
---|
924 | | - atmel_aes_unmap(dd); |
---|
925 | | -} |
---|
926 | | - |
---|
927 | 930 | static void atmel_aes_dma_callback(void *data) |
---|
928 | 931 | { |
---|
929 | 932 | struct atmel_aes_dev *dd = data; |
---|
930 | 933 | |
---|
931 | | - atmel_aes_dma_stop(dd); |
---|
| 934 | + atmel_aes_unmap(dd); |
---|
932 | 935 | dd->is_async = true; |
---|
933 | 936 | (void)dd->resume(dd); |
---|
934 | 937 | } |
---|
.. | .. |
---|
983 | 986 | |
---|
984 | 987 | static int atmel_aes_start(struct atmel_aes_dev *dd) |
---|
985 | 988 | { |
---|
986 | | - struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); |
---|
987 | | - struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req); |
---|
988 | | - bool use_dma = (req->nbytes >= ATMEL_AES_DMA_THRESHOLD || |
---|
| 989 | + struct skcipher_request *req = skcipher_request_cast(dd->areq); |
---|
| 990 | + struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req); |
---|
| 991 | + bool use_dma = (req->cryptlen >= ATMEL_AES_DMA_THRESHOLD || |
---|
989 | 992 | dd->ctx->block_size != AES_BLOCK_SIZE); |
---|
990 | 993 | int err; |
---|
991 | 994 | |
---|
.. | .. |
---|
995 | 998 | if (err) |
---|
996 | 999 | return atmel_aes_complete(dd, err); |
---|
997 | 1000 | |
---|
998 | | - atmel_aes_write_ctrl(dd, use_dma, req->info); |
---|
| 1001 | + atmel_aes_write_ctrl(dd, use_dma, (void *)req->iv); |
---|
999 | 1002 | if (use_dma) |
---|
1000 | | - return atmel_aes_dma_start(dd, req->src, req->dst, req->nbytes, |
---|
| 1003 | + return atmel_aes_dma_start(dd, req->src, req->dst, |
---|
| 1004 | + req->cryptlen, |
---|
1001 | 1005 | atmel_aes_transfer_complete); |
---|
1002 | 1006 | |
---|
1003 | | - return atmel_aes_cpu_start(dd, req->src, req->dst, req->nbytes, |
---|
| 1007 | + return atmel_aes_cpu_start(dd, req->src, req->dst, req->cryptlen, |
---|
1004 | 1008 | atmel_aes_transfer_complete); |
---|
1005 | | -} |
---|
1006 | | - |
---|
1007 | | -static inline struct atmel_aes_ctr_ctx * |
---|
1008 | | -atmel_aes_ctr_ctx_cast(struct atmel_aes_base_ctx *ctx) |
---|
1009 | | -{ |
---|
1010 | | - return container_of(ctx, struct atmel_aes_ctr_ctx, base); |
---|
1011 | 1009 | } |
---|
1012 | 1010 | |
---|
1013 | 1011 | static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd) |
---|
1014 | 1012 | { |
---|
1015 | 1013 | struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); |
---|
1016 | | - struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); |
---|
| 1014 | + struct skcipher_request *req = skcipher_request_cast(dd->areq); |
---|
1017 | 1015 | struct scatterlist *src, *dst; |
---|
1018 | 1016 | size_t datalen; |
---|
1019 | 1017 | u32 ctr; |
---|
1020 | | - u16 blocks, start, end; |
---|
| 1018 | + u16 start, end; |
---|
1021 | 1019 | bool use_dma, fragmented = false; |
---|
1022 | 1020 | |
---|
1023 | 1021 | /* Check for transfer completion. */ |
---|
1024 | 1022 | ctx->offset += dd->total; |
---|
1025 | | - if (ctx->offset >= req->nbytes) |
---|
| 1023 | + if (ctx->offset >= req->cryptlen) |
---|
1026 | 1024 | return atmel_aes_transfer_complete(dd); |
---|
1027 | 1025 | |
---|
1028 | 1026 | /* Compute data length. */ |
---|
1029 | | - datalen = req->nbytes - ctx->offset; |
---|
1030 | | - blocks = DIV_ROUND_UP(datalen, AES_BLOCK_SIZE); |
---|
| 1027 | + datalen = req->cryptlen - ctx->offset; |
---|
| 1028 | + ctx->blocks = DIV_ROUND_UP(datalen, AES_BLOCK_SIZE); |
---|
1031 | 1029 | ctr = be32_to_cpu(ctx->iv[3]); |
---|
1032 | 1030 | |
---|
1033 | 1031 | /* Check 16bit counter overflow. */ |
---|
1034 | 1032 | start = ctr & 0xffff; |
---|
1035 | | - end = start + blocks - 1; |
---|
| 1033 | + end = start + ctx->blocks - 1; |
---|
1036 | 1034 | |
---|
1037 | | - if (blocks >> 16 || end < start) { |
---|
| 1035 | + if (ctx->blocks >> 16 || end < start) { |
---|
1038 | 1036 | ctr |= 0xffff; |
---|
1039 | 1037 | datalen = AES_BLOCK_SIZE * (0x10000 - start); |
---|
1040 | 1038 | fragmented = true; |
---|
.. | .. |
---|
1069 | 1067 | static int atmel_aes_ctr_start(struct atmel_aes_dev *dd) |
---|
1070 | 1068 | { |
---|
1071 | 1069 | struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); |
---|
1072 | | - struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); |
---|
1073 | | - struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req); |
---|
| 1070 | + struct skcipher_request *req = skcipher_request_cast(dd->areq); |
---|
| 1071 | + struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req); |
---|
1074 | 1072 | int err; |
---|
1075 | 1073 | |
---|
1076 | 1074 | atmel_aes_set_mode(dd, rctx); |
---|
.. | .. |
---|
1079 | 1077 | if (err) |
---|
1080 | 1078 | return atmel_aes_complete(dd, err); |
---|
1081 | 1079 | |
---|
1082 | | - memcpy(ctx->iv, req->info, AES_BLOCK_SIZE); |
---|
| 1080 | + memcpy(ctx->iv, req->iv, AES_BLOCK_SIZE); |
---|
1083 | 1081 | ctx->offset = 0; |
---|
1084 | 1082 | dd->total = 0; |
---|
1085 | 1083 | return atmel_aes_ctr_transfer(dd); |
---|
1086 | 1084 | } |
---|
1087 | 1085 | |
---|
1088 | | -static int atmel_aes_crypt(struct ablkcipher_request *req, unsigned long mode) |
---|
| 1086 | +static int atmel_aes_crypt(struct skcipher_request *req, unsigned long mode) |
---|
1089 | 1087 | { |
---|
1090 | | - struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); |
---|
1091 | | - struct atmel_aes_base_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); |
---|
| 1088 | + struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); |
---|
| 1089 | + struct atmel_aes_base_ctx *ctx = crypto_skcipher_ctx(skcipher); |
---|
1092 | 1090 | struct atmel_aes_reqctx *rctx; |
---|
1093 | 1091 | struct atmel_aes_dev *dd; |
---|
1094 | 1092 | |
---|
.. | .. |
---|
1119 | 1117 | if (!dd) |
---|
1120 | 1118 | return -ENODEV; |
---|
1121 | 1119 | |
---|
1122 | | - rctx = ablkcipher_request_ctx(req); |
---|
| 1120 | + rctx = skcipher_request_ctx(req); |
---|
1123 | 1121 | rctx->mode = mode; |
---|
1124 | 1122 | |
---|
1125 | | - if (!(mode & AES_FLAGS_ENCRYPT) && (req->src == req->dst)) { |
---|
1126 | | - unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher); |
---|
| 1123 | + if ((mode & AES_FLAGS_OPMODE_MASK) != AES_FLAGS_ECB && |
---|
| 1124 | + !(mode & AES_FLAGS_ENCRYPT) && req->src == req->dst) { |
---|
| 1125 | + unsigned int ivsize = crypto_skcipher_ivsize(skcipher); |
---|
1127 | 1126 | |
---|
1128 | | - if (req->nbytes >= ivsize) |
---|
| 1127 | + if (req->cryptlen >= ivsize) |
---|
1129 | 1128 | scatterwalk_map_and_copy(rctx->lastc, req->src, |
---|
1130 | | - req->nbytes - ivsize, |
---|
| 1129 | + req->cryptlen - ivsize, |
---|
1131 | 1130 | ivsize, 0); |
---|
1132 | 1131 | } |
---|
1133 | 1132 | |
---|
1134 | 1133 | return atmel_aes_handle_queue(dd, &req->base); |
---|
1135 | 1134 | } |
---|
1136 | 1135 | |
---|
1137 | | -static int atmel_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key, |
---|
| 1136 | +static int atmel_aes_setkey(struct crypto_skcipher *tfm, const u8 *key, |
---|
1138 | 1137 | unsigned int keylen) |
---|
1139 | 1138 | { |
---|
1140 | | - struct atmel_aes_base_ctx *ctx = crypto_ablkcipher_ctx(tfm); |
---|
| 1139 | + struct atmel_aes_base_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
1141 | 1140 | |
---|
1142 | 1141 | if (keylen != AES_KEYSIZE_128 && |
---|
1143 | 1142 | keylen != AES_KEYSIZE_192 && |
---|
1144 | | - keylen != AES_KEYSIZE_256) { |
---|
1145 | | - crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); |
---|
| 1143 | + keylen != AES_KEYSIZE_256) |
---|
1146 | 1144 | return -EINVAL; |
---|
1147 | | - } |
---|
1148 | 1145 | |
---|
1149 | 1146 | memcpy(ctx->key, key, keylen); |
---|
1150 | 1147 | ctx->keylen = keylen; |
---|
.. | .. |
---|
1152 | 1149 | return 0; |
---|
1153 | 1150 | } |
---|
1154 | 1151 | |
---|
1155 | | -static int atmel_aes_ecb_encrypt(struct ablkcipher_request *req) |
---|
| 1152 | +static int atmel_aes_ecb_encrypt(struct skcipher_request *req) |
---|
1156 | 1153 | { |
---|
1157 | 1154 | return atmel_aes_crypt(req, AES_FLAGS_ECB | AES_FLAGS_ENCRYPT); |
---|
1158 | 1155 | } |
---|
1159 | 1156 | |
---|
1160 | | -static int atmel_aes_ecb_decrypt(struct ablkcipher_request *req) |
---|
| 1157 | +static int atmel_aes_ecb_decrypt(struct skcipher_request *req) |
---|
1161 | 1158 | { |
---|
1162 | 1159 | return atmel_aes_crypt(req, AES_FLAGS_ECB); |
---|
1163 | 1160 | } |
---|
1164 | 1161 | |
---|
1165 | | -static int atmel_aes_cbc_encrypt(struct ablkcipher_request *req) |
---|
| 1162 | +static int atmel_aes_cbc_encrypt(struct skcipher_request *req) |
---|
1166 | 1163 | { |
---|
1167 | 1164 | return atmel_aes_crypt(req, AES_FLAGS_CBC | AES_FLAGS_ENCRYPT); |
---|
1168 | 1165 | } |
---|
1169 | 1166 | |
---|
1170 | | -static int atmel_aes_cbc_decrypt(struct ablkcipher_request *req) |
---|
| 1167 | +static int atmel_aes_cbc_decrypt(struct skcipher_request *req) |
---|
1171 | 1168 | { |
---|
1172 | 1169 | return atmel_aes_crypt(req, AES_FLAGS_CBC); |
---|
1173 | 1170 | } |
---|
1174 | 1171 | |
---|
1175 | | -static int atmel_aes_ofb_encrypt(struct ablkcipher_request *req) |
---|
| 1172 | +static int atmel_aes_ofb_encrypt(struct skcipher_request *req) |
---|
1176 | 1173 | { |
---|
1177 | 1174 | return atmel_aes_crypt(req, AES_FLAGS_OFB | AES_FLAGS_ENCRYPT); |
---|
1178 | 1175 | } |
---|
1179 | 1176 | |
---|
1180 | | -static int atmel_aes_ofb_decrypt(struct ablkcipher_request *req) |
---|
| 1177 | +static int atmel_aes_ofb_decrypt(struct skcipher_request *req) |
---|
1181 | 1178 | { |
---|
1182 | 1179 | return atmel_aes_crypt(req, AES_FLAGS_OFB); |
---|
1183 | 1180 | } |
---|
1184 | 1181 | |
---|
1185 | | -static int atmel_aes_cfb_encrypt(struct ablkcipher_request *req) |
---|
| 1182 | +static int atmel_aes_cfb_encrypt(struct skcipher_request *req) |
---|
1186 | 1183 | { |
---|
1187 | 1184 | return atmel_aes_crypt(req, AES_FLAGS_CFB128 | AES_FLAGS_ENCRYPT); |
---|
1188 | 1185 | } |
---|
1189 | 1186 | |
---|
1190 | | -static int atmel_aes_cfb_decrypt(struct ablkcipher_request *req) |
---|
| 1187 | +static int atmel_aes_cfb_decrypt(struct skcipher_request *req) |
---|
1191 | 1188 | { |
---|
1192 | 1189 | return atmel_aes_crypt(req, AES_FLAGS_CFB128); |
---|
1193 | 1190 | } |
---|
1194 | 1191 | |
---|
1195 | | -static int atmel_aes_cfb64_encrypt(struct ablkcipher_request *req) |
---|
| 1192 | +static int atmel_aes_cfb64_encrypt(struct skcipher_request *req) |
---|
1196 | 1193 | { |
---|
1197 | 1194 | return atmel_aes_crypt(req, AES_FLAGS_CFB64 | AES_FLAGS_ENCRYPT); |
---|
1198 | 1195 | } |
---|
1199 | 1196 | |
---|
1200 | | -static int atmel_aes_cfb64_decrypt(struct ablkcipher_request *req) |
---|
| 1197 | +static int atmel_aes_cfb64_decrypt(struct skcipher_request *req) |
---|
1201 | 1198 | { |
---|
1202 | 1199 | return atmel_aes_crypt(req, AES_FLAGS_CFB64); |
---|
1203 | 1200 | } |
---|
1204 | 1201 | |
---|
1205 | | -static int atmel_aes_cfb32_encrypt(struct ablkcipher_request *req) |
---|
| 1202 | +static int atmel_aes_cfb32_encrypt(struct skcipher_request *req) |
---|
1206 | 1203 | { |
---|
1207 | 1204 | return atmel_aes_crypt(req, AES_FLAGS_CFB32 | AES_FLAGS_ENCRYPT); |
---|
1208 | 1205 | } |
---|
1209 | 1206 | |
---|
1210 | | -static int atmel_aes_cfb32_decrypt(struct ablkcipher_request *req) |
---|
| 1207 | +static int atmel_aes_cfb32_decrypt(struct skcipher_request *req) |
---|
1211 | 1208 | { |
---|
1212 | 1209 | return atmel_aes_crypt(req, AES_FLAGS_CFB32); |
---|
1213 | 1210 | } |
---|
1214 | 1211 | |
---|
1215 | | -static int atmel_aes_cfb16_encrypt(struct ablkcipher_request *req) |
---|
| 1212 | +static int atmel_aes_cfb16_encrypt(struct skcipher_request *req) |
---|
1216 | 1213 | { |
---|
1217 | 1214 | return atmel_aes_crypt(req, AES_FLAGS_CFB16 | AES_FLAGS_ENCRYPT); |
---|
1218 | 1215 | } |
---|
1219 | 1216 | |
---|
1220 | | -static int atmel_aes_cfb16_decrypt(struct ablkcipher_request *req) |
---|
| 1217 | +static int atmel_aes_cfb16_decrypt(struct skcipher_request *req) |
---|
1221 | 1218 | { |
---|
1222 | 1219 | return atmel_aes_crypt(req, AES_FLAGS_CFB16); |
---|
1223 | 1220 | } |
---|
1224 | 1221 | |
---|
1225 | | -static int atmel_aes_cfb8_encrypt(struct ablkcipher_request *req) |
---|
| 1222 | +static int atmel_aes_cfb8_encrypt(struct skcipher_request *req) |
---|
1226 | 1223 | { |
---|
1227 | 1224 | return atmel_aes_crypt(req, AES_FLAGS_CFB8 | AES_FLAGS_ENCRYPT); |
---|
1228 | 1225 | } |
---|
1229 | 1226 | |
---|
1230 | | -static int atmel_aes_cfb8_decrypt(struct ablkcipher_request *req) |
---|
| 1227 | +static int atmel_aes_cfb8_decrypt(struct skcipher_request *req) |
---|
1231 | 1228 | { |
---|
1232 | 1229 | return atmel_aes_crypt(req, AES_FLAGS_CFB8); |
---|
1233 | 1230 | } |
---|
1234 | 1231 | |
---|
1235 | | -static int atmel_aes_ctr_encrypt(struct ablkcipher_request *req) |
---|
| 1232 | +static int atmel_aes_ctr_encrypt(struct skcipher_request *req) |
---|
1236 | 1233 | { |
---|
1237 | 1234 | return atmel_aes_crypt(req, AES_FLAGS_CTR | AES_FLAGS_ENCRYPT); |
---|
1238 | 1235 | } |
---|
1239 | 1236 | |
---|
1240 | | -static int atmel_aes_ctr_decrypt(struct ablkcipher_request *req) |
---|
| 1237 | +static int atmel_aes_ctr_decrypt(struct skcipher_request *req) |
---|
1241 | 1238 | { |
---|
1242 | 1239 | return atmel_aes_crypt(req, AES_FLAGS_CTR); |
---|
1243 | 1240 | } |
---|
1244 | 1241 | |
---|
1245 | | -static int atmel_aes_cra_init(struct crypto_tfm *tfm) |
---|
| 1242 | +static int atmel_aes_init_tfm(struct crypto_skcipher *tfm) |
---|
1246 | 1243 | { |
---|
1247 | | - struct atmel_aes_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 1244 | + struct atmel_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
1248 | 1245 | |
---|
1249 | | - tfm->crt_ablkcipher.reqsize = sizeof(struct atmel_aes_reqctx); |
---|
| 1246 | + crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx)); |
---|
1250 | 1247 | ctx->base.start = atmel_aes_start; |
---|
1251 | 1248 | |
---|
1252 | 1249 | return 0; |
---|
1253 | 1250 | } |
---|
1254 | 1251 | |
---|
1255 | | -static int atmel_aes_ctr_cra_init(struct crypto_tfm *tfm) |
---|
| 1252 | +static int atmel_aes_ctr_init_tfm(struct crypto_skcipher *tfm) |
---|
1256 | 1253 | { |
---|
1257 | | - struct atmel_aes_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 1254 | + struct atmel_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
1258 | 1255 | |
---|
1259 | | - tfm->crt_ablkcipher.reqsize = sizeof(struct atmel_aes_reqctx); |
---|
| 1256 | + crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx)); |
---|
1260 | 1257 | ctx->base.start = atmel_aes_ctr_start; |
---|
1261 | 1258 | |
---|
1262 | 1259 | return 0; |
---|
1263 | 1260 | } |
---|
1264 | 1261 | |
---|
1265 | | -static struct crypto_alg aes_algs[] = { |
---|
| 1262 | +static struct skcipher_alg aes_algs[] = { |
---|
1266 | 1263 | { |
---|
1267 | | - .cra_name = "ecb(aes)", |
---|
1268 | | - .cra_driver_name = "atmel-ecb-aes", |
---|
1269 | | - .cra_priority = ATMEL_AES_PRIORITY, |
---|
1270 | | - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, |
---|
1271 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
1272 | | - .cra_ctxsize = sizeof(struct atmel_aes_ctx), |
---|
1273 | | - .cra_alignmask = 0xf, |
---|
1274 | | - .cra_type = &crypto_ablkcipher_type, |
---|
1275 | | - .cra_module = THIS_MODULE, |
---|
1276 | | - .cra_init = atmel_aes_cra_init, |
---|
1277 | | - .cra_u.ablkcipher = { |
---|
1278 | | - .min_keysize = AES_MIN_KEY_SIZE, |
---|
1279 | | - .max_keysize = AES_MAX_KEY_SIZE, |
---|
1280 | | - .setkey = atmel_aes_setkey, |
---|
1281 | | - .encrypt = atmel_aes_ecb_encrypt, |
---|
1282 | | - .decrypt = atmel_aes_ecb_decrypt, |
---|
1283 | | - } |
---|
| 1264 | + .base.cra_name = "ecb(aes)", |
---|
| 1265 | + .base.cra_driver_name = "atmel-ecb-aes", |
---|
| 1266 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
---|
| 1267 | + .base.cra_ctxsize = sizeof(struct atmel_aes_ctx), |
---|
| 1268 | + |
---|
| 1269 | + .init = atmel_aes_init_tfm, |
---|
| 1270 | + .min_keysize = AES_MIN_KEY_SIZE, |
---|
| 1271 | + .max_keysize = AES_MAX_KEY_SIZE, |
---|
| 1272 | + .setkey = atmel_aes_setkey, |
---|
| 1273 | + .encrypt = atmel_aes_ecb_encrypt, |
---|
| 1274 | + .decrypt = atmel_aes_ecb_decrypt, |
---|
1284 | 1275 | }, |
---|
1285 | 1276 | { |
---|
1286 | | - .cra_name = "cbc(aes)", |
---|
1287 | | - .cra_driver_name = "atmel-cbc-aes", |
---|
1288 | | - .cra_priority = ATMEL_AES_PRIORITY, |
---|
1289 | | - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, |
---|
1290 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
1291 | | - .cra_ctxsize = sizeof(struct atmel_aes_ctx), |
---|
1292 | | - .cra_alignmask = 0xf, |
---|
1293 | | - .cra_type = &crypto_ablkcipher_type, |
---|
1294 | | - .cra_module = THIS_MODULE, |
---|
1295 | | - .cra_init = atmel_aes_cra_init, |
---|
1296 | | - .cra_u.ablkcipher = { |
---|
1297 | | - .min_keysize = AES_MIN_KEY_SIZE, |
---|
1298 | | - .max_keysize = AES_MAX_KEY_SIZE, |
---|
1299 | | - .ivsize = AES_BLOCK_SIZE, |
---|
1300 | | - .setkey = atmel_aes_setkey, |
---|
1301 | | - .encrypt = atmel_aes_cbc_encrypt, |
---|
1302 | | - .decrypt = atmel_aes_cbc_decrypt, |
---|
1303 | | - } |
---|
| 1277 | + .base.cra_name = "cbc(aes)", |
---|
| 1278 | + .base.cra_driver_name = "atmel-cbc-aes", |
---|
| 1279 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
---|
| 1280 | + .base.cra_ctxsize = sizeof(struct atmel_aes_ctx), |
---|
| 1281 | + |
---|
| 1282 | + .init = atmel_aes_init_tfm, |
---|
| 1283 | + .min_keysize = AES_MIN_KEY_SIZE, |
---|
| 1284 | + .max_keysize = AES_MAX_KEY_SIZE, |
---|
| 1285 | + .setkey = atmel_aes_setkey, |
---|
| 1286 | + .encrypt = atmel_aes_cbc_encrypt, |
---|
| 1287 | + .decrypt = atmel_aes_cbc_decrypt, |
---|
| 1288 | + .ivsize = AES_BLOCK_SIZE, |
---|
1304 | 1289 | }, |
---|
1305 | 1290 | { |
---|
1306 | | - .cra_name = "ofb(aes)", |
---|
1307 | | - .cra_driver_name = "atmel-ofb-aes", |
---|
1308 | | - .cra_priority = ATMEL_AES_PRIORITY, |
---|
1309 | | - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, |
---|
1310 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
1311 | | - .cra_ctxsize = sizeof(struct atmel_aes_ctx), |
---|
1312 | | - .cra_alignmask = 0xf, |
---|
1313 | | - .cra_type = &crypto_ablkcipher_type, |
---|
1314 | | - .cra_module = THIS_MODULE, |
---|
1315 | | - .cra_init = atmel_aes_cra_init, |
---|
1316 | | - .cra_u.ablkcipher = { |
---|
1317 | | - .min_keysize = AES_MIN_KEY_SIZE, |
---|
1318 | | - .max_keysize = AES_MAX_KEY_SIZE, |
---|
1319 | | - .ivsize = AES_BLOCK_SIZE, |
---|
1320 | | - .setkey = atmel_aes_setkey, |
---|
1321 | | - .encrypt = atmel_aes_ofb_encrypt, |
---|
1322 | | - .decrypt = atmel_aes_ofb_decrypt, |
---|
1323 | | - } |
---|
| 1291 | + .base.cra_name = "ofb(aes)", |
---|
| 1292 | + .base.cra_driver_name = "atmel-ofb-aes", |
---|
| 1293 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
---|
| 1294 | + .base.cra_ctxsize = sizeof(struct atmel_aes_ctx), |
---|
| 1295 | + |
---|
| 1296 | + .init = atmel_aes_init_tfm, |
---|
| 1297 | + .min_keysize = AES_MIN_KEY_SIZE, |
---|
| 1298 | + .max_keysize = AES_MAX_KEY_SIZE, |
---|
| 1299 | + .setkey = atmel_aes_setkey, |
---|
| 1300 | + .encrypt = atmel_aes_ofb_encrypt, |
---|
| 1301 | + .decrypt = atmel_aes_ofb_decrypt, |
---|
| 1302 | + .ivsize = AES_BLOCK_SIZE, |
---|
1324 | 1303 | }, |
---|
1325 | 1304 | { |
---|
1326 | | - .cra_name = "cfb(aes)", |
---|
1327 | | - .cra_driver_name = "atmel-cfb-aes", |
---|
1328 | | - .cra_priority = ATMEL_AES_PRIORITY, |
---|
1329 | | - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, |
---|
1330 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
1331 | | - .cra_ctxsize = sizeof(struct atmel_aes_ctx), |
---|
1332 | | - .cra_alignmask = 0xf, |
---|
1333 | | - .cra_type = &crypto_ablkcipher_type, |
---|
1334 | | - .cra_module = THIS_MODULE, |
---|
1335 | | - .cra_init = atmel_aes_cra_init, |
---|
1336 | | - .cra_u.ablkcipher = { |
---|
1337 | | - .min_keysize = AES_MIN_KEY_SIZE, |
---|
1338 | | - .max_keysize = AES_MAX_KEY_SIZE, |
---|
1339 | | - .ivsize = AES_BLOCK_SIZE, |
---|
1340 | | - .setkey = atmel_aes_setkey, |
---|
1341 | | - .encrypt = atmel_aes_cfb_encrypt, |
---|
1342 | | - .decrypt = atmel_aes_cfb_decrypt, |
---|
1343 | | - } |
---|
| 1305 | + .base.cra_name = "cfb(aes)", |
---|
| 1306 | + .base.cra_driver_name = "atmel-cfb-aes", |
---|
| 1307 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
---|
| 1308 | + .base.cra_ctxsize = sizeof(struct atmel_aes_ctx), |
---|
| 1309 | + |
---|
| 1310 | + .init = atmel_aes_init_tfm, |
---|
| 1311 | + .min_keysize = AES_MIN_KEY_SIZE, |
---|
| 1312 | + .max_keysize = AES_MAX_KEY_SIZE, |
---|
| 1313 | + .setkey = atmel_aes_setkey, |
---|
| 1314 | + .encrypt = atmel_aes_cfb_encrypt, |
---|
| 1315 | + .decrypt = atmel_aes_cfb_decrypt, |
---|
| 1316 | + .ivsize = AES_BLOCK_SIZE, |
---|
1344 | 1317 | }, |
---|
1345 | 1318 | { |
---|
1346 | | - .cra_name = "cfb32(aes)", |
---|
1347 | | - .cra_driver_name = "atmel-cfb32-aes", |
---|
1348 | | - .cra_priority = ATMEL_AES_PRIORITY, |
---|
1349 | | - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, |
---|
1350 | | - .cra_blocksize = CFB32_BLOCK_SIZE, |
---|
1351 | | - .cra_ctxsize = sizeof(struct atmel_aes_ctx), |
---|
1352 | | - .cra_alignmask = 0x3, |
---|
1353 | | - .cra_type = &crypto_ablkcipher_type, |
---|
1354 | | - .cra_module = THIS_MODULE, |
---|
1355 | | - .cra_init = atmel_aes_cra_init, |
---|
1356 | | - .cra_u.ablkcipher = { |
---|
1357 | | - .min_keysize = AES_MIN_KEY_SIZE, |
---|
1358 | | - .max_keysize = AES_MAX_KEY_SIZE, |
---|
1359 | | - .ivsize = AES_BLOCK_SIZE, |
---|
1360 | | - .setkey = atmel_aes_setkey, |
---|
1361 | | - .encrypt = atmel_aes_cfb32_encrypt, |
---|
1362 | | - .decrypt = atmel_aes_cfb32_decrypt, |
---|
1363 | | - } |
---|
| 1319 | + .base.cra_name = "cfb32(aes)", |
---|
| 1320 | + .base.cra_driver_name = "atmel-cfb32-aes", |
---|
| 1321 | + .base.cra_blocksize = CFB32_BLOCK_SIZE, |
---|
| 1322 | + .base.cra_ctxsize = sizeof(struct atmel_aes_ctx), |
---|
| 1323 | + |
---|
| 1324 | + .init = atmel_aes_init_tfm, |
---|
| 1325 | + .min_keysize = AES_MIN_KEY_SIZE, |
---|
| 1326 | + .max_keysize = AES_MAX_KEY_SIZE, |
---|
| 1327 | + .setkey = atmel_aes_setkey, |
---|
| 1328 | + .encrypt = atmel_aes_cfb32_encrypt, |
---|
| 1329 | + .decrypt = atmel_aes_cfb32_decrypt, |
---|
| 1330 | + .ivsize = AES_BLOCK_SIZE, |
---|
1364 | 1331 | }, |
---|
1365 | 1332 | { |
---|
1366 | | - .cra_name = "cfb16(aes)", |
---|
1367 | | - .cra_driver_name = "atmel-cfb16-aes", |
---|
1368 | | - .cra_priority = ATMEL_AES_PRIORITY, |
---|
1369 | | - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, |
---|
1370 | | - .cra_blocksize = CFB16_BLOCK_SIZE, |
---|
1371 | | - .cra_ctxsize = sizeof(struct atmel_aes_ctx), |
---|
1372 | | - .cra_alignmask = 0x1, |
---|
1373 | | - .cra_type = &crypto_ablkcipher_type, |
---|
1374 | | - .cra_module = THIS_MODULE, |
---|
1375 | | - .cra_init = atmel_aes_cra_init, |
---|
1376 | | - .cra_u.ablkcipher = { |
---|
1377 | | - .min_keysize = AES_MIN_KEY_SIZE, |
---|
1378 | | - .max_keysize = AES_MAX_KEY_SIZE, |
---|
1379 | | - .ivsize = AES_BLOCK_SIZE, |
---|
1380 | | - .setkey = atmel_aes_setkey, |
---|
1381 | | - .encrypt = atmel_aes_cfb16_encrypt, |
---|
1382 | | - .decrypt = atmel_aes_cfb16_decrypt, |
---|
1383 | | - } |
---|
| 1333 | + .base.cra_name = "cfb16(aes)", |
---|
| 1334 | + .base.cra_driver_name = "atmel-cfb16-aes", |
---|
| 1335 | + .base.cra_blocksize = CFB16_BLOCK_SIZE, |
---|
| 1336 | + .base.cra_ctxsize = sizeof(struct atmel_aes_ctx), |
---|
| 1337 | + |
---|
| 1338 | + .init = atmel_aes_init_tfm, |
---|
| 1339 | + .min_keysize = AES_MIN_KEY_SIZE, |
---|
| 1340 | + .max_keysize = AES_MAX_KEY_SIZE, |
---|
| 1341 | + .setkey = atmel_aes_setkey, |
---|
| 1342 | + .encrypt = atmel_aes_cfb16_encrypt, |
---|
| 1343 | + .decrypt = atmel_aes_cfb16_decrypt, |
---|
| 1344 | + .ivsize = AES_BLOCK_SIZE, |
---|
1384 | 1345 | }, |
---|
1385 | 1346 | { |
---|
1386 | | - .cra_name = "cfb8(aes)", |
---|
1387 | | - .cra_driver_name = "atmel-cfb8-aes", |
---|
1388 | | - .cra_priority = ATMEL_AES_PRIORITY, |
---|
1389 | | - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, |
---|
1390 | | - .cra_blocksize = CFB8_BLOCK_SIZE, |
---|
1391 | | - .cra_ctxsize = sizeof(struct atmel_aes_ctx), |
---|
1392 | | - .cra_alignmask = 0x0, |
---|
1393 | | - .cra_type = &crypto_ablkcipher_type, |
---|
1394 | | - .cra_module = THIS_MODULE, |
---|
1395 | | - .cra_init = atmel_aes_cra_init, |
---|
1396 | | - .cra_u.ablkcipher = { |
---|
1397 | | - .min_keysize = AES_MIN_KEY_SIZE, |
---|
1398 | | - .max_keysize = AES_MAX_KEY_SIZE, |
---|
1399 | | - .ivsize = AES_BLOCK_SIZE, |
---|
1400 | | - .setkey = atmel_aes_setkey, |
---|
1401 | | - .encrypt = atmel_aes_cfb8_encrypt, |
---|
1402 | | - .decrypt = atmel_aes_cfb8_decrypt, |
---|
1403 | | - } |
---|
| 1347 | + .base.cra_name = "cfb8(aes)", |
---|
| 1348 | + .base.cra_driver_name = "atmel-cfb8-aes", |
---|
| 1349 | + .base.cra_blocksize = CFB8_BLOCK_SIZE, |
---|
| 1350 | + .base.cra_ctxsize = sizeof(struct atmel_aes_ctx), |
---|
| 1351 | + |
---|
| 1352 | + .init = atmel_aes_init_tfm, |
---|
| 1353 | + .min_keysize = AES_MIN_KEY_SIZE, |
---|
| 1354 | + .max_keysize = AES_MAX_KEY_SIZE, |
---|
| 1355 | + .setkey = atmel_aes_setkey, |
---|
| 1356 | + .encrypt = atmel_aes_cfb8_encrypt, |
---|
| 1357 | + .decrypt = atmel_aes_cfb8_decrypt, |
---|
| 1358 | + .ivsize = AES_BLOCK_SIZE, |
---|
1404 | 1359 | }, |
---|
1405 | 1360 | { |
---|
1406 | | - .cra_name = "ctr(aes)", |
---|
1407 | | - .cra_driver_name = "atmel-ctr-aes", |
---|
1408 | | - .cra_priority = ATMEL_AES_PRIORITY, |
---|
1409 | | - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, |
---|
1410 | | - .cra_blocksize = 1, |
---|
1411 | | - .cra_ctxsize = sizeof(struct atmel_aes_ctr_ctx), |
---|
1412 | | - .cra_alignmask = 0xf, |
---|
1413 | | - .cra_type = &crypto_ablkcipher_type, |
---|
1414 | | - .cra_module = THIS_MODULE, |
---|
1415 | | - .cra_init = atmel_aes_ctr_cra_init, |
---|
1416 | | - .cra_u.ablkcipher = { |
---|
1417 | | - .min_keysize = AES_MIN_KEY_SIZE, |
---|
1418 | | - .max_keysize = AES_MAX_KEY_SIZE, |
---|
1419 | | - .ivsize = AES_BLOCK_SIZE, |
---|
1420 | | - .setkey = atmel_aes_setkey, |
---|
1421 | | - .encrypt = atmel_aes_ctr_encrypt, |
---|
1422 | | - .decrypt = atmel_aes_ctr_decrypt, |
---|
1423 | | - } |
---|
| 1361 | + .base.cra_name = "ctr(aes)", |
---|
| 1362 | + .base.cra_driver_name = "atmel-ctr-aes", |
---|
| 1363 | + .base.cra_blocksize = 1, |
---|
| 1364 | + .base.cra_ctxsize = sizeof(struct atmel_aes_ctr_ctx), |
---|
| 1365 | + |
---|
| 1366 | + .init = atmel_aes_ctr_init_tfm, |
---|
| 1367 | + .min_keysize = AES_MIN_KEY_SIZE, |
---|
| 1368 | + .max_keysize = AES_MAX_KEY_SIZE, |
---|
| 1369 | + .setkey = atmel_aes_setkey, |
---|
| 1370 | + .encrypt = atmel_aes_ctr_encrypt, |
---|
| 1371 | + .decrypt = atmel_aes_ctr_decrypt, |
---|
| 1372 | + .ivsize = AES_BLOCK_SIZE, |
---|
1424 | 1373 | }, |
---|
1425 | 1374 | }; |
---|
1426 | 1375 | |
---|
1427 | | -static struct crypto_alg aes_cfb64_alg = { |
---|
1428 | | - .cra_name = "cfb64(aes)", |
---|
1429 | | - .cra_driver_name = "atmel-cfb64-aes", |
---|
1430 | | - .cra_priority = ATMEL_AES_PRIORITY, |
---|
1431 | | - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, |
---|
1432 | | - .cra_blocksize = CFB64_BLOCK_SIZE, |
---|
1433 | | - .cra_ctxsize = sizeof(struct atmel_aes_ctx), |
---|
1434 | | - .cra_alignmask = 0x7, |
---|
1435 | | - .cra_type = &crypto_ablkcipher_type, |
---|
1436 | | - .cra_module = THIS_MODULE, |
---|
1437 | | - .cra_init = atmel_aes_cra_init, |
---|
1438 | | - .cra_u.ablkcipher = { |
---|
1439 | | - .min_keysize = AES_MIN_KEY_SIZE, |
---|
1440 | | - .max_keysize = AES_MAX_KEY_SIZE, |
---|
1441 | | - .ivsize = AES_BLOCK_SIZE, |
---|
1442 | | - .setkey = atmel_aes_setkey, |
---|
1443 | | - .encrypt = atmel_aes_cfb64_encrypt, |
---|
1444 | | - .decrypt = atmel_aes_cfb64_decrypt, |
---|
1445 | | - } |
---|
| 1376 | +static struct skcipher_alg aes_cfb64_alg = { |
---|
| 1377 | + .base.cra_name = "cfb64(aes)", |
---|
| 1378 | + .base.cra_driver_name = "atmel-cfb64-aes", |
---|
| 1379 | + .base.cra_blocksize = CFB64_BLOCK_SIZE, |
---|
| 1380 | + .base.cra_ctxsize = sizeof(struct atmel_aes_ctx), |
---|
| 1381 | + |
---|
| 1382 | + .init = atmel_aes_init_tfm, |
---|
| 1383 | + .min_keysize = AES_MIN_KEY_SIZE, |
---|
| 1384 | + .max_keysize = AES_MAX_KEY_SIZE, |
---|
| 1385 | + .setkey = atmel_aes_setkey, |
---|
| 1386 | + .encrypt = atmel_aes_cfb64_encrypt, |
---|
| 1387 | + .decrypt = atmel_aes_cfb64_decrypt, |
---|
| 1388 | + .ivsize = AES_BLOCK_SIZE, |
---|
1446 | 1389 | }; |
---|
1447 | 1390 | |
---|
1448 | 1391 | |
---|
.. | .. |
---|
1450 | 1393 | |
---|
1451 | 1394 | static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd, |
---|
1452 | 1395 | const u32 *data, size_t datalen, |
---|
1453 | | - const u32 *ghash_in, u32 *ghash_out, |
---|
| 1396 | + const __be32 *ghash_in, __be32 *ghash_out, |
---|
1454 | 1397 | atmel_aes_fn_t resume); |
---|
1455 | 1398 | static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd); |
---|
1456 | 1399 | static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd); |
---|
.. | .. |
---|
1471 | 1414 | |
---|
1472 | 1415 | static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd, |
---|
1473 | 1416 | const u32 *data, size_t datalen, |
---|
1474 | | - const u32 *ghash_in, u32 *ghash_out, |
---|
| 1417 | + const __be32 *ghash_in, __be32 *ghash_out, |
---|
1475 | 1418 | atmel_aes_fn_t resume) |
---|
1476 | 1419 | { |
---|
1477 | 1420 | struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); |
---|
.. | .. |
---|
1558 | 1501 | |
---|
1559 | 1502 | memcpy(data, iv, ivsize); |
---|
1560 | 1503 | memset(data + ivsize, 0, padlen + sizeof(u64)); |
---|
1561 | | - ((u64 *)(data + datalen))[-1] = cpu_to_be64(ivsize * 8); |
---|
| 1504 | + ((__be64 *)(data + datalen))[-1] = cpu_to_be64(ivsize * 8); |
---|
1562 | 1505 | |
---|
1563 | 1506 | return atmel_aes_gcm_ghash(dd, (const u32 *)data, datalen, |
---|
1564 | 1507 | NULL, ctx->j0, atmel_aes_gcm_process); |
---|
.. | .. |
---|
1591 | 1534 | { |
---|
1592 | 1535 | struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); |
---|
1593 | 1536 | struct aead_request *req = aead_request_cast(dd->areq); |
---|
1594 | | - u32 j0_lsw, *j0 = ctx->j0; |
---|
| 1537 | + __be32 j0_lsw, *j0 = ctx->j0; |
---|
1595 | 1538 | size_t padlen; |
---|
1596 | 1539 | |
---|
1597 | 1540 | /* Write incr32(J0) into IV. */ |
---|
1598 | 1541 | j0_lsw = j0[3]; |
---|
1599 | | - j0[3] = cpu_to_be32(be32_to_cpu(j0[3]) + 1); |
---|
| 1542 | + be32_add_cpu(&j0[3], 1); |
---|
1600 | 1543 | atmel_aes_write_block(dd, AES_IVR(0), j0); |
---|
1601 | 1544 | j0[3] = j0_lsw; |
---|
1602 | 1545 | |
---|
.. | .. |
---|
1674 | 1617 | { |
---|
1675 | 1618 | struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); |
---|
1676 | 1619 | struct aead_request *req = aead_request_cast(dd->areq); |
---|
1677 | | - u64 *data = dd->buf; |
---|
| 1620 | + __be64 *data = dd->buf; |
---|
1678 | 1621 | |
---|
1679 | 1622 | if (likely(dd->flags & AES_FLAGS_GTAGEN)) { |
---|
1680 | 1623 | if (!(atmel_aes_read(dd, AES_ISR) & AES_INT_TAGRDY)) { |
---|
.. | .. |
---|
1771 | 1714 | |
---|
1772 | 1715 | if (keylen != AES_KEYSIZE_256 && |
---|
1773 | 1716 | keylen != AES_KEYSIZE_192 && |
---|
1774 | | - keylen != AES_KEYSIZE_128) { |
---|
1775 | | - crypto_aead_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); |
---|
| 1717 | + keylen != AES_KEYSIZE_128) |
---|
1776 | 1718 | return -EINVAL; |
---|
1777 | | - } |
---|
1778 | 1719 | |
---|
1779 | 1720 | memcpy(ctx->key, key, keylen); |
---|
1780 | 1721 | ctx->keylen = keylen; |
---|
.. | .. |
---|
1785 | 1726 | static int atmel_aes_gcm_setauthsize(struct crypto_aead *tfm, |
---|
1786 | 1727 | unsigned int authsize) |
---|
1787 | 1728 | { |
---|
1788 | | - /* Same as crypto_gcm_authsize() from crypto/gcm.c */ |
---|
1789 | | - switch (authsize) { |
---|
1790 | | - case 4: |
---|
1791 | | - case 8: |
---|
1792 | | - case 12: |
---|
1793 | | - case 13: |
---|
1794 | | - case 14: |
---|
1795 | | - case 15: |
---|
1796 | | - case 16: |
---|
1797 | | - break; |
---|
1798 | | - default: |
---|
1799 | | - return -EINVAL; |
---|
1800 | | - } |
---|
1801 | | - |
---|
1802 | | - return 0; |
---|
| 1729 | + return crypto_gcm_check_authsize(authsize); |
---|
1803 | 1730 | } |
---|
1804 | 1731 | |
---|
1805 | 1732 | static int atmel_aes_gcm_encrypt(struct aead_request *req) |
---|
.. | .. |
---|
1834 | 1761 | .base = { |
---|
1835 | 1762 | .cra_name = "gcm(aes)", |
---|
1836 | 1763 | .cra_driver_name = "atmel-gcm-aes", |
---|
1837 | | - .cra_priority = ATMEL_AES_PRIORITY, |
---|
1838 | | - .cra_flags = CRYPTO_ALG_ASYNC, |
---|
1839 | 1764 | .cra_blocksize = 1, |
---|
1840 | 1765 | .cra_ctxsize = sizeof(struct atmel_aes_gcm_ctx), |
---|
1841 | | - .cra_alignmask = 0xf, |
---|
1842 | | - .cra_module = THIS_MODULE, |
---|
1843 | 1766 | }, |
---|
1844 | 1767 | }; |
---|
1845 | 1768 | |
---|
.. | .. |
---|
1857 | 1780 | static int atmel_aes_xts_start(struct atmel_aes_dev *dd) |
---|
1858 | 1781 | { |
---|
1859 | 1782 | struct atmel_aes_xts_ctx *ctx = atmel_aes_xts_ctx_cast(dd->ctx); |
---|
1860 | | - struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); |
---|
1861 | | - struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req); |
---|
| 1783 | + struct skcipher_request *req = skcipher_request_cast(dd->areq); |
---|
| 1784 | + struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req); |
---|
1862 | 1785 | unsigned long flags; |
---|
1863 | 1786 | int err; |
---|
1864 | 1787 | |
---|
.. | .. |
---|
1868 | 1791 | if (err) |
---|
1869 | 1792 | return atmel_aes_complete(dd, err); |
---|
1870 | 1793 | |
---|
1871 | | - /* Compute the tweak value from req->info with ecb(aes). */ |
---|
| 1794 | + /* Compute the tweak value from req->iv with ecb(aes). */ |
---|
1872 | 1795 | flags = dd->flags; |
---|
1873 | 1796 | dd->flags &= ~AES_FLAGS_MODE_MASK; |
---|
1874 | 1797 | dd->flags |= (AES_FLAGS_ECB | AES_FLAGS_ENCRYPT); |
---|
.. | .. |
---|
1876 | 1799 | ctx->key2, ctx->base.keylen); |
---|
1877 | 1800 | dd->flags = flags; |
---|
1878 | 1801 | |
---|
1879 | | - atmel_aes_write_block(dd, AES_IDATAR(0), req->info); |
---|
| 1802 | + atmel_aes_write_block(dd, AES_IDATAR(0), req->iv); |
---|
1880 | 1803 | return atmel_aes_wait_for_data_ready(dd, atmel_aes_xts_process_data); |
---|
1881 | 1804 | } |
---|
1882 | 1805 | |
---|
1883 | 1806 | static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd) |
---|
1884 | 1807 | { |
---|
1885 | | - struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); |
---|
1886 | | - bool use_dma = (req->nbytes >= ATMEL_AES_DMA_THRESHOLD); |
---|
| 1808 | + struct skcipher_request *req = skcipher_request_cast(dd->areq); |
---|
| 1809 | + bool use_dma = (req->cryptlen >= ATMEL_AES_DMA_THRESHOLD); |
---|
1887 | 1810 | u32 tweak[AES_BLOCK_SIZE / sizeof(u32)]; |
---|
1888 | | - static const u32 one[AES_BLOCK_SIZE / sizeof(u32)] = {cpu_to_le32(1), }; |
---|
| 1811 | + static const __le32 one[AES_BLOCK_SIZE / sizeof(u32)] = {cpu_to_le32(1), }; |
---|
1889 | 1812 | u8 *tweak_bytes = (u8 *)tweak; |
---|
1890 | 1813 | int i; |
---|
1891 | 1814 | |
---|
.. | .. |
---|
1908 | 1831 | atmel_aes_write_block(dd, AES_TWR(0), tweak); |
---|
1909 | 1832 | atmel_aes_write_block(dd, AES_ALPHAR(0), one); |
---|
1910 | 1833 | if (use_dma) |
---|
1911 | | - return atmel_aes_dma_start(dd, req->src, req->dst, req->nbytes, |
---|
| 1834 | + return atmel_aes_dma_start(dd, req->src, req->dst, |
---|
| 1835 | + req->cryptlen, |
---|
1912 | 1836 | atmel_aes_transfer_complete); |
---|
1913 | 1837 | |
---|
1914 | | - return atmel_aes_cpu_start(dd, req->src, req->dst, req->nbytes, |
---|
| 1838 | + return atmel_aes_cpu_start(dd, req->src, req->dst, req->cryptlen, |
---|
1915 | 1839 | atmel_aes_transfer_complete); |
---|
1916 | 1840 | } |
---|
1917 | 1841 | |
---|
1918 | | -static int atmel_aes_xts_setkey(struct crypto_ablkcipher *tfm, const u8 *key, |
---|
| 1842 | +static int atmel_aes_xts_setkey(struct crypto_skcipher *tfm, const u8 *key, |
---|
1919 | 1843 | unsigned int keylen) |
---|
1920 | 1844 | { |
---|
1921 | | - struct atmel_aes_xts_ctx *ctx = crypto_ablkcipher_ctx(tfm); |
---|
| 1845 | + struct atmel_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
1922 | 1846 | int err; |
---|
1923 | 1847 | |
---|
1924 | | - err = xts_check_key(crypto_ablkcipher_tfm(tfm), key, keylen); |
---|
| 1848 | + err = xts_check_key(crypto_skcipher_tfm(tfm), key, keylen); |
---|
1925 | 1849 | if (err) |
---|
1926 | 1850 | return err; |
---|
1927 | 1851 | |
---|
.. | .. |
---|
1932 | 1856 | return 0; |
---|
1933 | 1857 | } |
---|
1934 | 1858 | |
---|
1935 | | -static int atmel_aes_xts_encrypt(struct ablkcipher_request *req) |
---|
| 1859 | +static int atmel_aes_xts_encrypt(struct skcipher_request *req) |
---|
1936 | 1860 | { |
---|
1937 | 1861 | return atmel_aes_crypt(req, AES_FLAGS_XTS | AES_FLAGS_ENCRYPT); |
---|
1938 | 1862 | } |
---|
1939 | 1863 | |
---|
1940 | | -static int atmel_aes_xts_decrypt(struct ablkcipher_request *req) |
---|
| 1864 | +static int atmel_aes_xts_decrypt(struct skcipher_request *req) |
---|
1941 | 1865 | { |
---|
1942 | 1866 | return atmel_aes_crypt(req, AES_FLAGS_XTS); |
---|
1943 | 1867 | } |
---|
1944 | 1868 | |
---|
1945 | | -static int atmel_aes_xts_cra_init(struct crypto_tfm *tfm) |
---|
| 1869 | +static int atmel_aes_xts_init_tfm(struct crypto_skcipher *tfm) |
---|
1946 | 1870 | { |
---|
1947 | | - struct atmel_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 1871 | + struct atmel_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
1948 | 1872 | |
---|
1949 | | - tfm->crt_ablkcipher.reqsize = sizeof(struct atmel_aes_reqctx); |
---|
| 1873 | + crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx)); |
---|
1950 | 1874 | ctx->base.start = atmel_aes_xts_start; |
---|
1951 | 1875 | |
---|
1952 | 1876 | return 0; |
---|
1953 | 1877 | } |
---|
1954 | 1878 | |
---|
1955 | | -static struct crypto_alg aes_xts_alg = { |
---|
1956 | | - .cra_name = "xts(aes)", |
---|
1957 | | - .cra_driver_name = "atmel-xts-aes", |
---|
1958 | | - .cra_priority = ATMEL_AES_PRIORITY, |
---|
1959 | | - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, |
---|
1960 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
1961 | | - .cra_ctxsize = sizeof(struct atmel_aes_xts_ctx), |
---|
1962 | | - .cra_alignmask = 0xf, |
---|
1963 | | - .cra_type = &crypto_ablkcipher_type, |
---|
1964 | | - .cra_module = THIS_MODULE, |
---|
1965 | | - .cra_init = atmel_aes_xts_cra_init, |
---|
1966 | | - .cra_u.ablkcipher = { |
---|
1967 | | - .min_keysize = 2 * AES_MIN_KEY_SIZE, |
---|
1968 | | - .max_keysize = 2 * AES_MAX_KEY_SIZE, |
---|
1969 | | - .ivsize = AES_BLOCK_SIZE, |
---|
1970 | | - .setkey = atmel_aes_xts_setkey, |
---|
1971 | | - .encrypt = atmel_aes_xts_encrypt, |
---|
1972 | | - .decrypt = atmel_aes_xts_decrypt, |
---|
1973 | | - } |
---|
| 1879 | +static struct skcipher_alg aes_xts_alg = { |
---|
| 1880 | + .base.cra_name = "xts(aes)", |
---|
| 1881 | + .base.cra_driver_name = "atmel-xts-aes", |
---|
| 1882 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
---|
| 1883 | + .base.cra_ctxsize = sizeof(struct atmel_aes_xts_ctx), |
---|
| 1884 | + |
---|
| 1885 | + .min_keysize = 2 * AES_MIN_KEY_SIZE, |
---|
| 1886 | + .max_keysize = 2 * AES_MAX_KEY_SIZE, |
---|
| 1887 | + .ivsize = AES_BLOCK_SIZE, |
---|
| 1888 | + .setkey = atmel_aes_xts_setkey, |
---|
| 1889 | + .encrypt = atmel_aes_xts_encrypt, |
---|
| 1890 | + .decrypt = atmel_aes_xts_decrypt, |
---|
| 1891 | + .init = atmel_aes_xts_init_tfm, |
---|
1974 | 1892 | }; |
---|
1975 | 1893 | |
---|
1976 | 1894 | #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC) |
---|
.. | .. |
---|
2041 | 1959 | struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req); |
---|
2042 | 1960 | bool enc = atmel_aes_is_encrypt(dd); |
---|
2043 | 1961 | struct scatterlist *src, *dst; |
---|
2044 | | - u32 iv[AES_BLOCK_SIZE / sizeof(u32)]; |
---|
| 1962 | + __be32 iv[AES_BLOCK_SIZE / sizeof(u32)]; |
---|
2045 | 1963 | u32 emr; |
---|
2046 | 1964 | |
---|
2047 | 1965 | if (is_async) |
---|
.. | .. |
---|
2123 | 2041 | { |
---|
2124 | 2042 | struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm); |
---|
2125 | 2043 | struct crypto_authenc_keys keys; |
---|
2126 | | - u32 flags; |
---|
2127 | 2044 | int err; |
---|
2128 | 2045 | |
---|
2129 | 2046 | if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) |
---|
.. | .. |
---|
2133 | 2050 | goto badkey; |
---|
2134 | 2051 | |
---|
2135 | 2052 | /* Save auth key. */ |
---|
2136 | | - flags = crypto_aead_get_flags(tfm); |
---|
2137 | 2053 | err = atmel_sha_authenc_setkey(ctx->auth, |
---|
2138 | 2054 | keys.authkey, keys.authkeylen, |
---|
2139 | | - &flags); |
---|
2140 | | - crypto_aead_set_flags(tfm, flags & CRYPTO_TFM_RES_MASK); |
---|
| 2055 | + crypto_aead_get_flags(tfm)); |
---|
2141 | 2056 | if (err) { |
---|
2142 | 2057 | memzero_explicit(&keys, sizeof(keys)); |
---|
2143 | 2058 | return err; |
---|
.. | .. |
---|
2151 | 2066 | return 0; |
---|
2152 | 2067 | |
---|
2153 | 2068 | badkey: |
---|
2154 | | - crypto_aead_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); |
---|
2155 | 2069 | memzero_explicit(&keys, sizeof(keys)); |
---|
2156 | 2070 | return -EINVAL; |
---|
2157 | 2071 | } |
---|
.. | .. |
---|
2262 | 2176 | .base = { |
---|
2263 | 2177 | .cra_name = "authenc(hmac(sha1),cbc(aes))", |
---|
2264 | 2178 | .cra_driver_name = "atmel-authenc-hmac-sha1-cbc-aes", |
---|
2265 | | - .cra_priority = ATMEL_AES_PRIORITY, |
---|
2266 | | - .cra_flags = CRYPTO_ALG_ASYNC, |
---|
2267 | 2179 | .cra_blocksize = AES_BLOCK_SIZE, |
---|
2268 | 2180 | .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx), |
---|
2269 | | - .cra_alignmask = 0xf, |
---|
2270 | | - .cra_module = THIS_MODULE, |
---|
2271 | 2181 | }, |
---|
2272 | 2182 | }, |
---|
2273 | 2183 | { |
---|
.. | .. |
---|
2282 | 2192 | .base = { |
---|
2283 | 2193 | .cra_name = "authenc(hmac(sha224),cbc(aes))", |
---|
2284 | 2194 | .cra_driver_name = "atmel-authenc-hmac-sha224-cbc-aes", |
---|
2285 | | - .cra_priority = ATMEL_AES_PRIORITY, |
---|
2286 | | - .cra_flags = CRYPTO_ALG_ASYNC, |
---|
2287 | 2195 | .cra_blocksize = AES_BLOCK_SIZE, |
---|
2288 | 2196 | .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx), |
---|
2289 | | - .cra_alignmask = 0xf, |
---|
2290 | | - .cra_module = THIS_MODULE, |
---|
2291 | 2197 | }, |
---|
2292 | 2198 | }, |
---|
2293 | 2199 | { |
---|
.. | .. |
---|
2302 | 2208 | .base = { |
---|
2303 | 2209 | .cra_name = "authenc(hmac(sha256),cbc(aes))", |
---|
2304 | 2210 | .cra_driver_name = "atmel-authenc-hmac-sha256-cbc-aes", |
---|
2305 | | - .cra_priority = ATMEL_AES_PRIORITY, |
---|
2306 | | - .cra_flags = CRYPTO_ALG_ASYNC, |
---|
2307 | 2211 | .cra_blocksize = AES_BLOCK_SIZE, |
---|
2308 | 2212 | .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx), |
---|
2309 | | - .cra_alignmask = 0xf, |
---|
2310 | | - .cra_module = THIS_MODULE, |
---|
2311 | 2213 | }, |
---|
2312 | 2214 | }, |
---|
2313 | 2215 | { |
---|
.. | .. |
---|
2322 | 2224 | .base = { |
---|
2323 | 2225 | .cra_name = "authenc(hmac(sha384),cbc(aes))", |
---|
2324 | 2226 | .cra_driver_name = "atmel-authenc-hmac-sha384-cbc-aes", |
---|
2325 | | - .cra_priority = ATMEL_AES_PRIORITY, |
---|
2326 | | - .cra_flags = CRYPTO_ALG_ASYNC, |
---|
2327 | 2227 | .cra_blocksize = AES_BLOCK_SIZE, |
---|
2328 | 2228 | .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx), |
---|
2329 | | - .cra_alignmask = 0xf, |
---|
2330 | | - .cra_module = THIS_MODULE, |
---|
2331 | 2229 | }, |
---|
2332 | 2230 | }, |
---|
2333 | 2231 | { |
---|
.. | .. |
---|
2342 | 2240 | .base = { |
---|
2343 | 2241 | .cra_name = "authenc(hmac(sha512),cbc(aes))", |
---|
2344 | 2242 | .cra_driver_name = "atmel-authenc-hmac-sha512-cbc-aes", |
---|
2345 | | - .cra_priority = ATMEL_AES_PRIORITY, |
---|
2346 | | - .cra_flags = CRYPTO_ALG_ASYNC, |
---|
2347 | 2243 | .cra_blocksize = AES_BLOCK_SIZE, |
---|
2348 | 2244 | .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx), |
---|
2349 | | - .cra_alignmask = 0xf, |
---|
2350 | | - .cra_module = THIS_MODULE, |
---|
2351 | 2245 | }, |
---|
2352 | 2246 | }, |
---|
2353 | 2247 | }; |
---|
.. | .. |
---|
2374 | 2268 | free_page((unsigned long)dd->buf); |
---|
2375 | 2269 | } |
---|
2376 | 2270 | |
---|
2377 | | -static bool atmel_aes_filter(struct dma_chan *chan, void *slave) |
---|
| 2271 | +static int atmel_aes_dma_init(struct atmel_aes_dev *dd) |
---|
2378 | 2272 | { |
---|
2379 | | - struct at_dma_slave *sl = slave; |
---|
2380 | | - |
---|
2381 | | - if (sl && sl->dma_dev == chan->device->dev) { |
---|
2382 | | - chan->private = sl; |
---|
2383 | | - return true; |
---|
2384 | | - } else { |
---|
2385 | | - return false; |
---|
2386 | | - } |
---|
2387 | | -} |
---|
2388 | | - |
---|
2389 | | -static int atmel_aes_dma_init(struct atmel_aes_dev *dd, |
---|
2390 | | - struct crypto_platform_data *pdata) |
---|
2391 | | -{ |
---|
2392 | | - struct at_dma_slave *slave; |
---|
2393 | | - dma_cap_mask_t mask; |
---|
2394 | | - |
---|
2395 | | - dma_cap_zero(mask); |
---|
2396 | | - dma_cap_set(DMA_SLAVE, mask); |
---|
| 2273 | + int ret; |
---|
2397 | 2274 | |
---|
2398 | 2275 | /* Try to grab 2 DMA channels */ |
---|
2399 | | - slave = &pdata->dma_slave->rxdata; |
---|
2400 | | - dd->src.chan = dma_request_slave_channel_compat(mask, atmel_aes_filter, |
---|
2401 | | - slave, dd->dev, "tx"); |
---|
2402 | | - if (!dd->src.chan) |
---|
| 2276 | + dd->src.chan = dma_request_chan(dd->dev, "tx"); |
---|
| 2277 | + if (IS_ERR(dd->src.chan)) { |
---|
| 2278 | + ret = PTR_ERR(dd->src.chan); |
---|
2403 | 2279 | goto err_dma_in; |
---|
| 2280 | + } |
---|
2404 | 2281 | |
---|
2405 | | - slave = &pdata->dma_slave->txdata; |
---|
2406 | | - dd->dst.chan = dma_request_slave_channel_compat(mask, atmel_aes_filter, |
---|
2407 | | - slave, dd->dev, "rx"); |
---|
2408 | | - if (!dd->dst.chan) |
---|
| 2282 | + dd->dst.chan = dma_request_chan(dd->dev, "rx"); |
---|
| 2283 | + if (IS_ERR(dd->dst.chan)) { |
---|
| 2284 | + ret = PTR_ERR(dd->dst.chan); |
---|
2409 | 2285 | goto err_dma_out; |
---|
| 2286 | + } |
---|
2410 | 2287 | |
---|
2411 | 2288 | return 0; |
---|
2412 | 2289 | |
---|
2413 | 2290 | err_dma_out: |
---|
2414 | 2291 | dma_release_channel(dd->src.chan); |
---|
2415 | 2292 | err_dma_in: |
---|
2416 | | - dev_warn(dd->dev, "no DMA channel available\n"); |
---|
2417 | | - return -ENODEV; |
---|
| 2293 | + dev_err(dd->dev, "no DMA channel available\n"); |
---|
| 2294 | + return ret; |
---|
2418 | 2295 | } |
---|
2419 | 2296 | |
---|
2420 | 2297 | static void atmel_aes_dma_cleanup(struct atmel_aes_dev *dd) |
---|
.. | .. |
---|
2467 | 2344 | #endif |
---|
2468 | 2345 | |
---|
2469 | 2346 | if (dd->caps.has_xts) |
---|
2470 | | - crypto_unregister_alg(&aes_xts_alg); |
---|
| 2347 | + crypto_unregister_skcipher(&aes_xts_alg); |
---|
2471 | 2348 | |
---|
2472 | 2349 | if (dd->caps.has_gcm) |
---|
2473 | 2350 | crypto_unregister_aead(&aes_gcm_alg); |
---|
2474 | 2351 | |
---|
2475 | 2352 | if (dd->caps.has_cfb64) |
---|
2476 | | - crypto_unregister_alg(&aes_cfb64_alg); |
---|
| 2353 | + crypto_unregister_skcipher(&aes_cfb64_alg); |
---|
2477 | 2354 | |
---|
2478 | 2355 | for (i = 0; i < ARRAY_SIZE(aes_algs); i++) |
---|
2479 | | - crypto_unregister_alg(&aes_algs[i]); |
---|
| 2356 | + crypto_unregister_skcipher(&aes_algs[i]); |
---|
| 2357 | +} |
---|
| 2358 | + |
---|
| 2359 | +static void atmel_aes_crypto_alg_init(struct crypto_alg *alg) |
---|
| 2360 | +{ |
---|
| 2361 | + alg->cra_flags = CRYPTO_ALG_ASYNC; |
---|
| 2362 | + alg->cra_alignmask = 0xf; |
---|
| 2363 | + alg->cra_priority = ATMEL_AES_PRIORITY; |
---|
| 2364 | + alg->cra_module = THIS_MODULE; |
---|
2480 | 2365 | } |
---|
2481 | 2366 | |
---|
2482 | 2367 | static int atmel_aes_register_algs(struct atmel_aes_dev *dd) |
---|
.. | .. |
---|
2484 | 2369 | int err, i, j; |
---|
2485 | 2370 | |
---|
2486 | 2371 | for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { |
---|
2487 | | - err = crypto_register_alg(&aes_algs[i]); |
---|
| 2372 | + atmel_aes_crypto_alg_init(&aes_algs[i].base); |
---|
| 2373 | + |
---|
| 2374 | + err = crypto_register_skcipher(&aes_algs[i]); |
---|
2488 | 2375 | if (err) |
---|
2489 | 2376 | goto err_aes_algs; |
---|
2490 | 2377 | } |
---|
2491 | 2378 | |
---|
2492 | 2379 | if (dd->caps.has_cfb64) { |
---|
2493 | | - err = crypto_register_alg(&aes_cfb64_alg); |
---|
| 2380 | + atmel_aes_crypto_alg_init(&aes_cfb64_alg.base); |
---|
| 2381 | + |
---|
| 2382 | + err = crypto_register_skcipher(&aes_cfb64_alg); |
---|
2494 | 2383 | if (err) |
---|
2495 | 2384 | goto err_aes_cfb64_alg; |
---|
2496 | 2385 | } |
---|
2497 | 2386 | |
---|
2498 | 2387 | if (dd->caps.has_gcm) { |
---|
| 2388 | + atmel_aes_crypto_alg_init(&aes_gcm_alg.base); |
---|
| 2389 | + |
---|
2499 | 2390 | err = crypto_register_aead(&aes_gcm_alg); |
---|
2500 | 2391 | if (err) |
---|
2501 | 2392 | goto err_aes_gcm_alg; |
---|
2502 | 2393 | } |
---|
2503 | 2394 | |
---|
2504 | 2395 | if (dd->caps.has_xts) { |
---|
2505 | | - err = crypto_register_alg(&aes_xts_alg); |
---|
| 2396 | + atmel_aes_crypto_alg_init(&aes_xts_alg.base); |
---|
| 2397 | + |
---|
| 2398 | + err = crypto_register_skcipher(&aes_xts_alg); |
---|
2506 | 2399 | if (err) |
---|
2507 | 2400 | goto err_aes_xts_alg; |
---|
2508 | 2401 | } |
---|
.. | .. |
---|
2510 | 2403 | #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC) |
---|
2511 | 2404 | if (dd->caps.has_authenc) { |
---|
2512 | 2405 | for (i = 0; i < ARRAY_SIZE(aes_authenc_algs); i++) { |
---|
| 2406 | + atmel_aes_crypto_alg_init(&aes_authenc_algs[i].base); |
---|
| 2407 | + |
---|
2513 | 2408 | err = crypto_register_aead(&aes_authenc_algs[i]); |
---|
2514 | 2409 | if (err) |
---|
2515 | 2410 | goto err_aes_authenc_alg; |
---|
.. | .. |
---|
2524 | 2419 | err_aes_authenc_alg: |
---|
2525 | 2420 | for (j = 0; j < i; j++) |
---|
2526 | 2421 | crypto_unregister_aead(&aes_authenc_algs[j]); |
---|
2527 | | - crypto_unregister_alg(&aes_xts_alg); |
---|
| 2422 | + crypto_unregister_skcipher(&aes_xts_alg); |
---|
2528 | 2423 | #endif |
---|
2529 | 2424 | err_aes_xts_alg: |
---|
2530 | 2425 | crypto_unregister_aead(&aes_gcm_alg); |
---|
2531 | 2426 | err_aes_gcm_alg: |
---|
2532 | | - crypto_unregister_alg(&aes_cfb64_alg); |
---|
| 2427 | + crypto_unregister_skcipher(&aes_cfb64_alg); |
---|
2533 | 2428 | err_aes_cfb64_alg: |
---|
2534 | 2429 | i = ARRAY_SIZE(aes_algs); |
---|
2535 | 2430 | err_aes_algs: |
---|
2536 | 2431 | for (j = 0; j < i; j++) |
---|
2537 | | - crypto_unregister_alg(&aes_algs[j]); |
---|
| 2432 | + crypto_unregister_skcipher(&aes_algs[j]); |
---|
2538 | 2433 | |
---|
2539 | 2434 | return err; |
---|
2540 | 2435 | } |
---|
.. | .. |
---|
2584 | 2479 | { /* sentinel */ } |
---|
2585 | 2480 | }; |
---|
2586 | 2481 | MODULE_DEVICE_TABLE(of, atmel_aes_dt_ids); |
---|
2587 | | - |
---|
2588 | | -static struct crypto_platform_data *atmel_aes_of_init(struct platform_device *pdev) |
---|
2589 | | -{ |
---|
2590 | | - struct device_node *np = pdev->dev.of_node; |
---|
2591 | | - struct crypto_platform_data *pdata; |
---|
2592 | | - |
---|
2593 | | - if (!np) { |
---|
2594 | | - dev_err(&pdev->dev, "device node not found\n"); |
---|
2595 | | - return ERR_PTR(-EINVAL); |
---|
2596 | | - } |
---|
2597 | | - |
---|
2598 | | - pdata = devm_kzalloc(&pdev->dev, sizeof(*pdata), GFP_KERNEL); |
---|
2599 | | - if (!pdata) |
---|
2600 | | - return ERR_PTR(-ENOMEM); |
---|
2601 | | - |
---|
2602 | | - pdata->dma_slave = devm_kzalloc(&pdev->dev, |
---|
2603 | | - sizeof(*(pdata->dma_slave)), |
---|
2604 | | - GFP_KERNEL); |
---|
2605 | | - if (!pdata->dma_slave) { |
---|
2606 | | - devm_kfree(&pdev->dev, pdata); |
---|
2607 | | - return ERR_PTR(-ENOMEM); |
---|
2608 | | - } |
---|
2609 | | - |
---|
2610 | | - return pdata; |
---|
2611 | | -} |
---|
2612 | | -#else |
---|
2613 | | -static inline struct crypto_platform_data *atmel_aes_of_init(struct platform_device *pdev) |
---|
2614 | | -{ |
---|
2615 | | - return ERR_PTR(-EINVAL); |
---|
2616 | | -} |
---|
2617 | 2482 | #endif |
---|
2618 | 2483 | |
---|
2619 | 2484 | static int atmel_aes_probe(struct platform_device *pdev) |
---|
2620 | 2485 | { |
---|
2621 | 2486 | struct atmel_aes_dev *aes_dd; |
---|
2622 | | - struct crypto_platform_data *pdata; |
---|
2623 | 2487 | struct device *dev = &pdev->dev; |
---|
2624 | 2488 | struct resource *aes_res; |
---|
2625 | 2489 | int err; |
---|
2626 | 2490 | |
---|
2627 | | - pdata = pdev->dev.platform_data; |
---|
2628 | | - if (!pdata) { |
---|
2629 | | - pdata = atmel_aes_of_init(pdev); |
---|
2630 | | - if (IS_ERR(pdata)) { |
---|
2631 | | - err = PTR_ERR(pdata); |
---|
2632 | | - goto aes_dd_err; |
---|
2633 | | - } |
---|
2634 | | - } |
---|
2635 | | - |
---|
2636 | | - if (!pdata->dma_slave) { |
---|
2637 | | - err = -ENXIO; |
---|
2638 | | - goto aes_dd_err; |
---|
2639 | | - } |
---|
2640 | | - |
---|
2641 | 2491 | aes_dd = devm_kzalloc(&pdev->dev, sizeof(*aes_dd), GFP_KERNEL); |
---|
2642 | | - if (aes_dd == NULL) { |
---|
2643 | | - err = -ENOMEM; |
---|
2644 | | - goto aes_dd_err; |
---|
2645 | | - } |
---|
| 2492 | + if (!aes_dd) |
---|
| 2493 | + return -ENOMEM; |
---|
2646 | 2494 | |
---|
2647 | 2495 | aes_dd->dev = dev; |
---|
2648 | 2496 | |
---|
.. | .. |
---|
2663 | 2511 | if (!aes_res) { |
---|
2664 | 2512 | dev_err(dev, "no MEM resource info\n"); |
---|
2665 | 2513 | err = -ENODEV; |
---|
2666 | | - goto res_err; |
---|
| 2514 | + goto err_tasklet_kill; |
---|
2667 | 2515 | } |
---|
2668 | 2516 | aes_dd->phys_base = aes_res->start; |
---|
2669 | 2517 | |
---|
2670 | 2518 | /* Get the IRQ */ |
---|
2671 | 2519 | aes_dd->irq = platform_get_irq(pdev, 0); |
---|
2672 | 2520 | if (aes_dd->irq < 0) { |
---|
2673 | | - dev_err(dev, "no IRQ resource info\n"); |
---|
2674 | 2521 | err = aes_dd->irq; |
---|
2675 | | - goto res_err; |
---|
| 2522 | + goto err_tasklet_kill; |
---|
2676 | 2523 | } |
---|
2677 | 2524 | |
---|
2678 | 2525 | err = devm_request_irq(&pdev->dev, aes_dd->irq, atmel_aes_irq, |
---|
2679 | 2526 | IRQF_SHARED, "atmel-aes", aes_dd); |
---|
2680 | 2527 | if (err) { |
---|
2681 | 2528 | dev_err(dev, "unable to request aes irq.\n"); |
---|
2682 | | - goto res_err; |
---|
| 2529 | + goto err_tasklet_kill; |
---|
2683 | 2530 | } |
---|
2684 | 2531 | |
---|
2685 | 2532 | /* Initializing the clock */ |
---|
.. | .. |
---|
2687 | 2534 | if (IS_ERR(aes_dd->iclk)) { |
---|
2688 | 2535 | dev_err(dev, "clock initialization failed.\n"); |
---|
2689 | 2536 | err = PTR_ERR(aes_dd->iclk); |
---|
2690 | | - goto res_err; |
---|
| 2537 | + goto err_tasklet_kill; |
---|
2691 | 2538 | } |
---|
2692 | 2539 | |
---|
2693 | 2540 | aes_dd->io_base = devm_ioremap_resource(&pdev->dev, aes_res); |
---|
2694 | 2541 | if (IS_ERR(aes_dd->io_base)) { |
---|
2695 | 2542 | dev_err(dev, "can't ioremap\n"); |
---|
2696 | 2543 | err = PTR_ERR(aes_dd->io_base); |
---|
2697 | | - goto res_err; |
---|
| 2544 | + goto err_tasklet_kill; |
---|
2698 | 2545 | } |
---|
2699 | 2546 | |
---|
2700 | 2547 | err = clk_prepare(aes_dd->iclk); |
---|
2701 | 2548 | if (err) |
---|
2702 | | - goto res_err; |
---|
| 2549 | + goto err_tasklet_kill; |
---|
2703 | 2550 | |
---|
2704 | 2551 | err = atmel_aes_hw_version_init(aes_dd); |
---|
2705 | 2552 | if (err) |
---|
2706 | | - goto iclk_unprepare; |
---|
| 2553 | + goto err_iclk_unprepare; |
---|
2707 | 2554 | |
---|
2708 | 2555 | atmel_aes_get_cap(aes_dd); |
---|
2709 | 2556 | |
---|
2710 | 2557 | #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC) |
---|
2711 | 2558 | if (aes_dd->caps.has_authenc && !atmel_sha_authenc_is_ready()) { |
---|
2712 | 2559 | err = -EPROBE_DEFER; |
---|
2713 | | - goto iclk_unprepare; |
---|
| 2560 | + goto err_iclk_unprepare; |
---|
2714 | 2561 | } |
---|
2715 | 2562 | #endif |
---|
2716 | 2563 | |
---|
2717 | 2564 | err = atmel_aes_buff_init(aes_dd); |
---|
2718 | 2565 | if (err) |
---|
2719 | | - goto err_aes_buff; |
---|
| 2566 | + goto err_iclk_unprepare; |
---|
2720 | 2567 | |
---|
2721 | | - err = atmel_aes_dma_init(aes_dd, pdata); |
---|
| 2568 | + err = atmel_aes_dma_init(aes_dd); |
---|
2722 | 2569 | if (err) |
---|
2723 | | - goto err_aes_dma; |
---|
| 2570 | + goto err_buff_cleanup; |
---|
2724 | 2571 | |
---|
2725 | 2572 | spin_lock(&atmel_aes.lock); |
---|
2726 | 2573 | list_add_tail(&aes_dd->list, &atmel_aes.dev_list); |
---|
.. | .. |
---|
2741 | 2588 | list_del(&aes_dd->list); |
---|
2742 | 2589 | spin_unlock(&atmel_aes.lock); |
---|
2743 | 2590 | atmel_aes_dma_cleanup(aes_dd); |
---|
2744 | | -err_aes_dma: |
---|
| 2591 | +err_buff_cleanup: |
---|
2745 | 2592 | atmel_aes_buff_cleanup(aes_dd); |
---|
2746 | | -err_aes_buff: |
---|
2747 | | -iclk_unprepare: |
---|
| 2593 | +err_iclk_unprepare: |
---|
2748 | 2594 | clk_unprepare(aes_dd->iclk); |
---|
2749 | | -res_err: |
---|
| 2595 | +err_tasklet_kill: |
---|
2750 | 2596 | tasklet_kill(&aes_dd->done_task); |
---|
2751 | 2597 | tasklet_kill(&aes_dd->queue_task); |
---|
2752 | | -aes_dd_err: |
---|
2753 | | - if (err != -EPROBE_DEFER) |
---|
2754 | | - dev_err(dev, "initialization failed.\n"); |
---|
2755 | 2598 | |
---|
2756 | 2599 | return err; |
---|
2757 | 2600 | } |
---|