hc
2024-01-04 1543e317f1da31b75942316931e8f491a8920811
kernel/drivers/crypto/ux500/cryp/cryp_core.c
....@@ -1,3 +1,4 @@
1
+// SPDX-License-Identifier: GPL-2.0-only
12 /**
23 * Copyright (C) ST-Ericsson SA 2010
34 * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
....@@ -6,18 +7,19 @@
67 * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
78 * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
89 * Author: Andreas Westin <andreas.westin@stericsson.com> for ST-Ericsson.
9
- * License terms: GNU General Public License (GPL) version 2
1010 */
1111
1212 #include <linux/clk.h>
1313 #include <linux/completion.h>
14
-#include <linux/crypto.h>
14
+#include <linux/device.h>
15
+#include <linux/dma-mapping.h>
1516 #include <linux/dmaengine.h>
1617 #include <linux/err.h>
1718 #include <linux/errno.h>
1819 #include <linux/interrupt.h>
1920 #include <linux/io.h>
2021 #include <linux/irqreturn.h>
22
+#include <linux/kernel.h>
2123 #include <linux/klist.h>
2224 #include <linux/module.h>
2325 #include <linux/mod_devicetable.h>
....@@ -27,9 +29,9 @@
2729 #include <linux/platform_data/dma-ste-dma40.h>
2830
2931 #include <crypto/aes.h>
30
-#include <crypto/algapi.h>
3132 #include <crypto/ctr.h>
32
-#include <crypto/des.h>
33
+#include <crypto/internal/des.h>
34
+#include <crypto/internal/skcipher.h>
3335 #include <crypto/scatterwalk.h>
3436
3537 #include <linux/platform_data/crypto-ux500.h>
....@@ -89,17 +91,6 @@
8991 };
9092
9193 static struct cryp_driver_data driver_data;
92
-
93
-/**
94
- * uint8p_to_uint32_be - 4*uint8 to uint32 big endian
95
- * @in: Data to convert.
96
- */
97
-static inline u32 uint8p_to_uint32_be(u8 *in)
98
-{
99
- u32 *data = (u32 *)in;
100
-
101
- return cpu_to_be32p(data);
102
-}
10394
10495 /**
10596 * swap_bits_in_byte - mirror the bits in a byte
....@@ -283,6 +274,7 @@
283274 int i;
284275 int status = 0;
285276 int num_of_regs = ctx->blocksize / 8;
277
+ __be32 *civ = (__be32 *)ctx->iv;
286278 u32 iv[AES_BLOCK_SIZE / 4];
287279
288280 dev_dbg(device_data->dev, "[%s]", __func__);
....@@ -299,7 +291,7 @@
299291 }
300292
301293 for (i = 0; i < ctx->blocksize / 4; i++)
302
- iv[i] = uint8p_to_uint32_be(ctx->iv + i*4);
294
+ iv[i] = be32_to_cpup(civ + i);
303295
304296 for (i = 0; i < num_of_regs; i++) {
305297 status = cfg_iv(device_data, iv[i*2], iv[i*2+1],
....@@ -338,23 +330,24 @@
338330 int i;
339331 int num_of_regs = ctx->keylen / 8;
340332 u32 swapped_key[CRYP_MAX_KEY_SIZE / 4];
333
+ __be32 *ckey = (__be32 *)ctx->key;
341334 int cryp_error = 0;
342335
343336 dev_dbg(ctx->device->dev, "[%s]", __func__);
344337
345338 if (mode_is_aes(ctx->config.algomode)) {
346
- swap_words_in_key_and_bits_in_byte((u8 *)ctx->key,
339
+ swap_words_in_key_and_bits_in_byte((u8 *)ckey,
347340 (u8 *)swapped_key,
348341 ctx->keylen);
349342 } else {
350343 for (i = 0; i < ctx->keylen / 4; i++)
351
- swapped_key[i] = uint8p_to_uint32_be(ctx->key + i*4);
344
+ swapped_key[i] = be32_to_cpup(ckey + i);
352345 }
353346
354347 for (i = 0; i < num_of_regs; i++) {
355348 cryp_error = set_key(ctx->device,
356
- *(((u32 *)swapped_key)+i*2),
357
- *(((u32 *)swapped_key)+i*2+1),
349
+ swapped_key[i * 2],
350
+ swapped_key[i * 2 + 1],
358351 (enum cryp_key_reg_index) i);
359352
360353 if (cryp_error != 0) {
....@@ -528,9 +521,9 @@
528521
529522 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
530523
531
- if (unlikely(!IS_ALIGNED((u32)sg, 4))) {
524
+ if (unlikely(!IS_ALIGNED((unsigned long)sg, 4))) {
532525 dev_err(ctx->device->dev, "[%s]: Data in sg list isn't "
533
- "aligned! Addr: 0x%08x", __func__, (u32)sg);
526
+ "aligned! Addr: 0x%08lx", __func__, (unsigned long)sg);
534527 return -EFAULT;
535528 }
536529
....@@ -595,6 +588,12 @@
595588 }
596589
597590 cookie = dmaengine_submit(desc);
591
+ if (dma_submit_error(cookie)) {
592
+ dev_dbg(ctx->device->dev, "[%s]: DMA submission failed\n",
593
+ __func__);
594
+ return cookie;
595
+ }
596
+
598597 dma_async_issue_pending(channel);
599598
600599 return 0;
....@@ -757,9 +756,9 @@
757756
758757 ctx->outlen = ctx->datalen;
759758
760
- if (unlikely(!IS_ALIGNED((u32)indata, 4))) {
759
+ if (unlikely(!IS_ALIGNED((unsigned long)indata, 4))) {
761760 pr_debug(DEV_DBG_NAME " [%s]: Data isn't aligned! Addr: "
762
- "0x%08x", __func__, (u32)indata);
761
+ "0x%08lx", __func__, (unsigned long)indata);
763762 return -EINVAL;
764763 }
765764
....@@ -822,10 +821,10 @@
822821 return nents;
823822 }
824823
825
-static int ablk_dma_crypt(struct ablkcipher_request *areq)
824
+static int ablk_dma_crypt(struct skcipher_request *areq)
826825 {
827
- struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
828
- struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
826
+ struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
827
+ struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
829828 struct cryp_device_data *device_data;
830829
831830 int bytes_written = 0;
....@@ -834,8 +833,8 @@
834833
835834 pr_debug(DEV_DBG_NAME " [%s]", __func__);
836835
837
- ctx->datalen = areq->nbytes;
838
- ctx->outlen = areq->nbytes;
836
+ ctx->datalen = areq->cryptlen;
837
+ ctx->outlen = areq->cryptlen;
839838
840839 ret = cryp_get_device_data(ctx, &device_data);
841840 if (ret)
....@@ -879,11 +878,11 @@
879878 return 0;
880879 }
881880
882
-static int ablk_crypt(struct ablkcipher_request *areq)
881
+static int ablk_crypt(struct skcipher_request *areq)
883882 {
884
- struct ablkcipher_walk walk;
885
- struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
886
- struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
883
+ struct skcipher_walk walk;
884
+ struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
885
+ struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
887886 struct cryp_device_data *device_data;
888887 unsigned long src_paddr;
889888 unsigned long dst_paddr;
....@@ -896,21 +895,20 @@
896895 if (ret)
897896 goto out;
898897
899
- ablkcipher_walk_init(&walk, areq->dst, areq->src, areq->nbytes);
900
- ret = ablkcipher_walk_phys(areq, &walk);
898
+ ret = skcipher_walk_async(&walk, areq);
901899
902900 if (ret) {
903
- pr_err(DEV_DBG_NAME "[%s]: ablkcipher_walk_phys() failed!",
901
+ pr_err(DEV_DBG_NAME "[%s]: skcipher_walk_async() failed!",
904902 __func__);
905903 goto out;
906904 }
907905
908906 while ((nbytes = walk.nbytes) > 0) {
909907 ctx->iv = walk.iv;
910
- src_paddr = (page_to_phys(walk.src.page) + walk.src.offset);
908
+ src_paddr = (page_to_phys(walk.src.phys.page) + walk.src.phys.offset);
911909 ctx->indata = phys_to_virt(src_paddr);
912910
913
- dst_paddr = (page_to_phys(walk.dst.page) + walk.dst.offset);
911
+ dst_paddr = (page_to_phys(walk.dst.phys.page) + walk.dst.phys.offset);
914912 ctx->outdata = phys_to_virt(dst_paddr);
915913
916914 ctx->datalen = nbytes - (nbytes % ctx->blocksize);
....@@ -920,11 +918,10 @@
920918 goto out;
921919
922920 nbytes -= ctx->datalen;
923
- ret = ablkcipher_walk_done(areq, &walk, nbytes);
921
+ ret = skcipher_walk_done(&walk, nbytes);
924922 if (ret)
925923 goto out;
926924 }
927
- ablkcipher_walk_complete(&walk);
928925
929926 out:
930927 /* Release the device */
....@@ -942,11 +939,10 @@
942939 return ret;
943940 }
944941
945
-static int aes_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
942
+static int aes_skcipher_setkey(struct crypto_skcipher *cipher,
946943 const u8 *key, unsigned int keylen)
947944 {
948
- struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
949
- u32 *flags = &cipher->base.crt_flags;
945
+ struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
950946
951947 pr_debug(DEV_DBG_NAME " [%s]", __func__);
952948
....@@ -965,7 +961,6 @@
965961
966962 default:
967963 pr_err(DEV_DBG_NAME "[%s]: Unknown keylen!", __func__);
968
- *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
969964 return -EINVAL;
970965 }
971966
....@@ -977,29 +972,17 @@
977972 return 0;
978973 }
979974
980
-static int des_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
975
+static int des_skcipher_setkey(struct crypto_skcipher *cipher,
981976 const u8 *key, unsigned int keylen)
982977 {
983
- struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
984
- u32 *flags = &cipher->base.crt_flags;
985
- u32 tmp[DES_EXPKEY_WORDS];
986
- int ret;
978
+ struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
979
+ int err;
987980
988981 pr_debug(DEV_DBG_NAME " [%s]", __func__);
989
- if (keylen != DES_KEY_SIZE) {
990
- *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
991
- pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_RES_BAD_KEY_LEN",
992
- __func__);
993
- return -EINVAL;
994
- }
995982
996
- ret = des_ekey(tmp, key);
997
- if (unlikely(ret == 0) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
998
- *flags |= CRYPTO_TFM_RES_WEAK_KEY;
999
- pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_REQ_WEAK_KEY",
1000
- __func__);
1001
- return -EINVAL;
1002
- }
983
+ err = verify_skcipher_des_key(cipher, key);
984
+ if (err)
985
+ return err;
1003986
1004987 memcpy(ctx->key, key, keylen);
1005988 ctx->keylen = keylen;
....@@ -1008,41 +991,17 @@
1008991 return 0;
1009992 }
1010993
1011
-static int des3_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
994
+static int des3_skcipher_setkey(struct crypto_skcipher *cipher,
1012995 const u8 *key, unsigned int keylen)
1013996 {
1014
- struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1015
- u32 *flags = &cipher->base.crt_flags;
1016
- const u32 *K = (const u32 *)key;
1017
- u32 tmp[DES3_EDE_EXPKEY_WORDS];
1018
- int i, ret;
997
+ struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
998
+ int err;
1019999
10201000 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1021
- if (keylen != DES3_EDE_KEY_SIZE) {
1022
- *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
1023
- pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_RES_BAD_KEY_LEN",
1024
- __func__);
1025
- return -EINVAL;
1026
- }
10271001
1028
- /* Checking key interdependency for weak key detection. */
1029
- if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) ||
1030
- !((K[2] ^ K[4]) | (K[3] ^ K[5]))) &&
1031
- (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
1032
- *flags |= CRYPTO_TFM_RES_WEAK_KEY;
1033
- pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_REQ_WEAK_KEY",
1034
- __func__);
1035
- return -EINVAL;
1036
- }
1037
- for (i = 0; i < 3; i++) {
1038
- ret = des_ekey(tmp, key + i*DES_KEY_SIZE);
1039
- if (unlikely(ret == 0) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
1040
- *flags |= CRYPTO_TFM_RES_WEAK_KEY;
1041
- pr_debug(DEV_DBG_NAME " [%s]: "
1042
- "CRYPTO_TFM_REQ_WEAK_KEY", __func__);
1043
- return -EINVAL;
1044
- }
1045
- }
1002
+ err = verify_skcipher_des3_key(cipher, key);
1003
+ if (err)
1004
+ return err;
10461005
10471006 memcpy(ctx->key, key, keylen);
10481007 ctx->keylen = keylen;
....@@ -1051,10 +1010,10 @@
10511010 return 0;
10521011 }
10531012
1054
-static int cryp_blk_encrypt(struct ablkcipher_request *areq)
1013
+static int cryp_blk_encrypt(struct skcipher_request *areq)
10551014 {
1056
- struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1057
- struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1015
+ struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1016
+ struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
10581017
10591018 pr_debug(DEV_DBG_NAME " [%s]", __func__);
10601019
....@@ -1069,10 +1028,10 @@
10691028 return ablk_crypt(areq);
10701029 }
10711030
1072
-static int cryp_blk_decrypt(struct ablkcipher_request *areq)
1031
+static int cryp_blk_decrypt(struct skcipher_request *areq)
10731032 {
1074
- struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1075
- struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1033
+ struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1034
+ struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
10761035
10771036 pr_debug(DEV_DBG_NAME " [%s]", __func__);
10781037
....@@ -1088,19 +1047,19 @@
10881047
10891048 struct cryp_algo_template {
10901049 enum cryp_algo_mode algomode;
1091
- struct crypto_alg crypto;
1050
+ struct skcipher_alg skcipher;
10921051 };
10931052
1094
-static int cryp_cra_init(struct crypto_tfm *tfm)
1053
+static int cryp_init_tfm(struct crypto_skcipher *tfm)
10951054 {
1096
- struct cryp_ctx *ctx = crypto_tfm_ctx(tfm);
1097
- struct crypto_alg *alg = tfm->__crt_alg;
1055
+ struct cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
1056
+ struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
10981057 struct cryp_algo_template *cryp_alg = container_of(alg,
10991058 struct cryp_algo_template,
1100
- crypto);
1059
+ skcipher);
11011060
11021061 ctx->config.algomode = cryp_alg->algomode;
1103
- ctx->blocksize = crypto_tfm_alg_blocksize(tfm);
1062
+ ctx->blocksize = crypto_skcipher_blocksize(tfm);
11041063
11051064 return 0;
11061065 }
....@@ -1108,256 +1067,147 @@
11081067 static struct cryp_algo_template cryp_algs[] = {
11091068 {
11101069 .algomode = CRYP_ALGO_AES_ECB,
1111
- .crypto = {
1112
- .cra_name = "aes",
1113
- .cra_driver_name = "aes-ux500",
1114
- .cra_priority = 300,
1115
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1116
- CRYPTO_ALG_ASYNC,
1117
- .cra_blocksize = AES_BLOCK_SIZE,
1118
- .cra_ctxsize = sizeof(struct cryp_ctx),
1119
- .cra_alignmask = 3,
1120
- .cra_type = &crypto_ablkcipher_type,
1121
- .cra_init = cryp_cra_init,
1122
- .cra_module = THIS_MODULE,
1123
- .cra_u = {
1124
- .ablkcipher = {
1125
- .min_keysize = AES_MIN_KEY_SIZE,
1126
- .max_keysize = AES_MAX_KEY_SIZE,
1127
- .setkey = aes_ablkcipher_setkey,
1128
- .encrypt = cryp_blk_encrypt,
1129
- .decrypt = cryp_blk_decrypt
1130
- }
1131
- }
1132
- }
1133
- },
1134
- {
1135
- .algomode = CRYP_ALGO_AES_ECB,
1136
- .crypto = {
1137
- .cra_name = "ecb(aes)",
1138
- .cra_driver_name = "ecb-aes-ux500",
1139
- .cra_priority = 300,
1140
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1141
- CRYPTO_ALG_ASYNC,
1142
- .cra_blocksize = AES_BLOCK_SIZE,
1143
- .cra_ctxsize = sizeof(struct cryp_ctx),
1144
- .cra_alignmask = 3,
1145
- .cra_type = &crypto_ablkcipher_type,
1146
- .cra_init = cryp_cra_init,
1147
- .cra_module = THIS_MODULE,
1148
- .cra_u = {
1149
- .ablkcipher = {
1150
- .min_keysize = AES_MIN_KEY_SIZE,
1151
- .max_keysize = AES_MAX_KEY_SIZE,
1152
- .setkey = aes_ablkcipher_setkey,
1153
- .encrypt = cryp_blk_encrypt,
1154
- .decrypt = cryp_blk_decrypt,
1155
- }
1156
- }
1070
+ .skcipher = {
1071
+ .base.cra_name = "ecb(aes)",
1072
+ .base.cra_driver_name = "ecb-aes-ux500",
1073
+ .base.cra_priority = 300,
1074
+ .base.cra_flags = CRYPTO_ALG_ASYNC,
1075
+ .base.cra_blocksize = AES_BLOCK_SIZE,
1076
+ .base.cra_ctxsize = sizeof(struct cryp_ctx),
1077
+ .base.cra_alignmask = 3,
1078
+ .base.cra_module = THIS_MODULE,
1079
+
1080
+ .min_keysize = AES_MIN_KEY_SIZE,
1081
+ .max_keysize = AES_MAX_KEY_SIZE,
1082
+ .setkey = aes_skcipher_setkey,
1083
+ .encrypt = cryp_blk_encrypt,
1084
+ .decrypt = cryp_blk_decrypt,
1085
+ .init = cryp_init_tfm,
11571086 }
11581087 },
11591088 {
11601089 .algomode = CRYP_ALGO_AES_CBC,
1161
- .crypto = {
1162
- .cra_name = "cbc(aes)",
1163
- .cra_driver_name = "cbc-aes-ux500",
1164
- .cra_priority = 300,
1165
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1166
- CRYPTO_ALG_ASYNC,
1167
- .cra_blocksize = AES_BLOCK_SIZE,
1168
- .cra_ctxsize = sizeof(struct cryp_ctx),
1169
- .cra_alignmask = 3,
1170
- .cra_type = &crypto_ablkcipher_type,
1171
- .cra_init = cryp_cra_init,
1172
- .cra_module = THIS_MODULE,
1173
- .cra_u = {
1174
- .ablkcipher = {
1175
- .min_keysize = AES_MIN_KEY_SIZE,
1176
- .max_keysize = AES_MAX_KEY_SIZE,
1177
- .setkey = aes_ablkcipher_setkey,
1178
- .encrypt = cryp_blk_encrypt,
1179
- .decrypt = cryp_blk_decrypt,
1180
- .ivsize = AES_BLOCK_SIZE,
1181
- }
1182
- }
1090
+ .skcipher = {
1091
+ .base.cra_name = "cbc(aes)",
1092
+ .base.cra_driver_name = "cbc-aes-ux500",
1093
+ .base.cra_priority = 300,
1094
+ .base.cra_flags = CRYPTO_ALG_ASYNC,
1095
+ .base.cra_blocksize = AES_BLOCK_SIZE,
1096
+ .base.cra_ctxsize = sizeof(struct cryp_ctx),
1097
+ .base.cra_alignmask = 3,
1098
+ .base.cra_module = THIS_MODULE,
1099
+
1100
+ .min_keysize = AES_MIN_KEY_SIZE,
1101
+ .max_keysize = AES_MAX_KEY_SIZE,
1102
+ .setkey = aes_skcipher_setkey,
1103
+ .encrypt = cryp_blk_encrypt,
1104
+ .decrypt = cryp_blk_decrypt,
1105
+ .init = cryp_init_tfm,
1106
+ .ivsize = AES_BLOCK_SIZE,
11831107 }
11841108 },
11851109 {
11861110 .algomode = CRYP_ALGO_AES_CTR,
1187
- .crypto = {
1188
- .cra_name = "ctr(aes)",
1189
- .cra_driver_name = "ctr-aes-ux500",
1190
- .cra_priority = 300,
1191
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1192
- CRYPTO_ALG_ASYNC,
1193
- .cra_blocksize = AES_BLOCK_SIZE,
1194
- .cra_ctxsize = sizeof(struct cryp_ctx),
1195
- .cra_alignmask = 3,
1196
- .cra_type = &crypto_ablkcipher_type,
1197
- .cra_init = cryp_cra_init,
1198
- .cra_module = THIS_MODULE,
1199
- .cra_u = {
1200
- .ablkcipher = {
1201
- .min_keysize = AES_MIN_KEY_SIZE,
1202
- .max_keysize = AES_MAX_KEY_SIZE,
1203
- .setkey = aes_ablkcipher_setkey,
1204
- .encrypt = cryp_blk_encrypt,
1205
- .decrypt = cryp_blk_decrypt,
1206
- .ivsize = AES_BLOCK_SIZE,
1207
- }
1208
- }
1209
- }
1210
- },
1211
- {
1212
- .algomode = CRYP_ALGO_DES_ECB,
1213
- .crypto = {
1214
- .cra_name = "des",
1215
- .cra_driver_name = "des-ux500",
1216
- .cra_priority = 300,
1217
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1218
- CRYPTO_ALG_ASYNC,
1219
- .cra_blocksize = DES_BLOCK_SIZE,
1220
- .cra_ctxsize = sizeof(struct cryp_ctx),
1221
- .cra_alignmask = 3,
1222
- .cra_type = &crypto_ablkcipher_type,
1223
- .cra_init = cryp_cra_init,
1224
- .cra_module = THIS_MODULE,
1225
- .cra_u = {
1226
- .ablkcipher = {
1227
- .min_keysize = DES_KEY_SIZE,
1228
- .max_keysize = DES_KEY_SIZE,
1229
- .setkey = des_ablkcipher_setkey,
1230
- .encrypt = cryp_blk_encrypt,
1231
- .decrypt = cryp_blk_decrypt
1232
- }
1233
- }
1234
- }
1111
+ .skcipher = {
1112
+ .base.cra_name = "ctr(aes)",
1113
+ .base.cra_driver_name = "ctr-aes-ux500",
1114
+ .base.cra_priority = 300,
1115
+ .base.cra_flags = CRYPTO_ALG_ASYNC,
1116
+ .base.cra_blocksize = 1,
1117
+ .base.cra_ctxsize = sizeof(struct cryp_ctx),
1118
+ .base.cra_alignmask = 3,
1119
+ .base.cra_module = THIS_MODULE,
12351120
1236
- },
1237
- {
1238
- .algomode = CRYP_ALGO_TDES_ECB,
1239
- .crypto = {
1240
- .cra_name = "des3_ede",
1241
- .cra_driver_name = "des3_ede-ux500",
1242
- .cra_priority = 300,
1243
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1244
- CRYPTO_ALG_ASYNC,
1245
- .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1246
- .cra_ctxsize = sizeof(struct cryp_ctx),
1247
- .cra_alignmask = 3,
1248
- .cra_type = &crypto_ablkcipher_type,
1249
- .cra_init = cryp_cra_init,
1250
- .cra_module = THIS_MODULE,
1251
- .cra_u = {
1252
- .ablkcipher = {
1253
- .min_keysize = DES3_EDE_KEY_SIZE,
1254
- .max_keysize = DES3_EDE_KEY_SIZE,
1255
- .setkey = des_ablkcipher_setkey,
1256
- .encrypt = cryp_blk_encrypt,
1257
- .decrypt = cryp_blk_decrypt
1258
- }
1259
- }
1121
+ .min_keysize = AES_MIN_KEY_SIZE,
1122
+ .max_keysize = AES_MAX_KEY_SIZE,
1123
+ .setkey = aes_skcipher_setkey,
1124
+ .encrypt = cryp_blk_encrypt,
1125
+ .decrypt = cryp_blk_decrypt,
1126
+ .init = cryp_init_tfm,
1127
+ .ivsize = AES_BLOCK_SIZE,
1128
+ .chunksize = AES_BLOCK_SIZE,
12601129 }
12611130 },
12621131 {
12631132 .algomode = CRYP_ALGO_DES_ECB,
1264
- .crypto = {
1265
- .cra_name = "ecb(des)",
1266
- .cra_driver_name = "ecb-des-ux500",
1267
- .cra_priority = 300,
1268
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1269
- CRYPTO_ALG_ASYNC,
1270
- .cra_blocksize = DES_BLOCK_SIZE,
1271
- .cra_ctxsize = sizeof(struct cryp_ctx),
1272
- .cra_alignmask = 3,
1273
- .cra_type = &crypto_ablkcipher_type,
1274
- .cra_init = cryp_cra_init,
1275
- .cra_module = THIS_MODULE,
1276
- .cra_u = {
1277
- .ablkcipher = {
1278
- .min_keysize = DES_KEY_SIZE,
1279
- .max_keysize = DES_KEY_SIZE,
1280
- .setkey = des_ablkcipher_setkey,
1281
- .encrypt = cryp_blk_encrypt,
1282
- .decrypt = cryp_blk_decrypt,
1283
- }
1284
- }
1133
+ .skcipher = {
1134
+ .base.cra_name = "ecb(des)",
1135
+ .base.cra_driver_name = "ecb-des-ux500",
1136
+ .base.cra_priority = 300,
1137
+ .base.cra_flags = CRYPTO_ALG_ASYNC,
1138
+ .base.cra_blocksize = DES_BLOCK_SIZE,
1139
+ .base.cra_ctxsize = sizeof(struct cryp_ctx),
1140
+ .base.cra_alignmask = 3,
1141
+ .base.cra_module = THIS_MODULE,
1142
+
1143
+ .min_keysize = DES_KEY_SIZE,
1144
+ .max_keysize = DES_KEY_SIZE,
1145
+ .setkey = des_skcipher_setkey,
1146
+ .encrypt = cryp_blk_encrypt,
1147
+ .decrypt = cryp_blk_decrypt,
1148
+ .init = cryp_init_tfm,
12851149 }
12861150 },
12871151 {
12881152 .algomode = CRYP_ALGO_TDES_ECB,
1289
- .crypto = {
1290
- .cra_name = "ecb(des3_ede)",
1291
- .cra_driver_name = "ecb-des3_ede-ux500",
1292
- .cra_priority = 300,
1293
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1294
- CRYPTO_ALG_ASYNC,
1295
- .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1296
- .cra_ctxsize = sizeof(struct cryp_ctx),
1297
- .cra_alignmask = 3,
1298
- .cra_type = &crypto_ablkcipher_type,
1299
- .cra_init = cryp_cra_init,
1300
- .cra_module = THIS_MODULE,
1301
- .cra_u = {
1302
- .ablkcipher = {
1303
- .min_keysize = DES3_EDE_KEY_SIZE,
1304
- .max_keysize = DES3_EDE_KEY_SIZE,
1305
- .setkey = des3_ablkcipher_setkey,
1306
- .encrypt = cryp_blk_encrypt,
1307
- .decrypt = cryp_blk_decrypt,
1308
- }
1309
- }
1153
+ .skcipher = {
1154
+ .base.cra_name = "ecb(des3_ede)",
1155
+ .base.cra_driver_name = "ecb-des3_ede-ux500",
1156
+ .base.cra_priority = 300,
1157
+ .base.cra_flags = CRYPTO_ALG_ASYNC,
1158
+ .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1159
+ .base.cra_ctxsize = sizeof(struct cryp_ctx),
1160
+ .base.cra_alignmask = 3,
1161
+ .base.cra_module = THIS_MODULE,
1162
+
1163
+ .min_keysize = DES3_EDE_KEY_SIZE,
1164
+ .max_keysize = DES3_EDE_KEY_SIZE,
1165
+ .setkey = des3_skcipher_setkey,
1166
+ .encrypt = cryp_blk_encrypt,
1167
+ .decrypt = cryp_blk_decrypt,
1168
+ .init = cryp_init_tfm,
13101169 }
13111170 },
13121171 {
13131172 .algomode = CRYP_ALGO_DES_CBC,
1314
- .crypto = {
1315
- .cra_name = "cbc(des)",
1316
- .cra_driver_name = "cbc-des-ux500",
1317
- .cra_priority = 300,
1318
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1319
- CRYPTO_ALG_ASYNC,
1320
- .cra_blocksize = DES_BLOCK_SIZE,
1321
- .cra_ctxsize = sizeof(struct cryp_ctx),
1322
- .cra_alignmask = 3,
1323
- .cra_type = &crypto_ablkcipher_type,
1324
- .cra_init = cryp_cra_init,
1325
- .cra_module = THIS_MODULE,
1326
- .cra_u = {
1327
- .ablkcipher = {
1328
- .min_keysize = DES_KEY_SIZE,
1329
- .max_keysize = DES_KEY_SIZE,
1330
- .setkey = des_ablkcipher_setkey,
1331
- .encrypt = cryp_blk_encrypt,
1332
- .decrypt = cryp_blk_decrypt,
1333
- }
1334
- }
1173
+ .skcipher = {
1174
+ .base.cra_name = "cbc(des)",
1175
+ .base.cra_driver_name = "cbc-des-ux500",
1176
+ .base.cra_priority = 300,
1177
+ .base.cra_flags = CRYPTO_ALG_ASYNC,
1178
+ .base.cra_blocksize = DES_BLOCK_SIZE,
1179
+ .base.cra_ctxsize = sizeof(struct cryp_ctx),
1180
+ .base.cra_alignmask = 3,
1181
+ .base.cra_module = THIS_MODULE,
1182
+
1183
+ .min_keysize = DES_KEY_SIZE,
1184
+ .max_keysize = DES_KEY_SIZE,
1185
+ .setkey = des_skcipher_setkey,
1186
+ .encrypt = cryp_blk_encrypt,
1187
+ .decrypt = cryp_blk_decrypt,
1188
+ .ivsize = DES_BLOCK_SIZE,
1189
+ .init = cryp_init_tfm,
13351190 }
13361191 },
13371192 {
13381193 .algomode = CRYP_ALGO_TDES_CBC,
1339
- .crypto = {
1340
- .cra_name = "cbc(des3_ede)",
1341
- .cra_driver_name = "cbc-des3_ede-ux500",
1342
- .cra_priority = 300,
1343
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1344
- CRYPTO_ALG_ASYNC,
1345
- .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1346
- .cra_ctxsize = sizeof(struct cryp_ctx),
1347
- .cra_alignmask = 3,
1348
- .cra_type = &crypto_ablkcipher_type,
1349
- .cra_init = cryp_cra_init,
1350
- .cra_module = THIS_MODULE,
1351
- .cra_u = {
1352
- .ablkcipher = {
1353
- .min_keysize = DES3_EDE_KEY_SIZE,
1354
- .max_keysize = DES3_EDE_KEY_SIZE,
1355
- .setkey = des3_ablkcipher_setkey,
1356
- .encrypt = cryp_blk_encrypt,
1357
- .decrypt = cryp_blk_decrypt,
1358
- .ivsize = DES3_EDE_BLOCK_SIZE,
1359
- }
1360
- }
1194
+ .skcipher = {
1195
+ .base.cra_name = "cbc(des3_ede)",
1196
+ .base.cra_driver_name = "cbc-des3_ede-ux500",
1197
+ .base.cra_priority = 300,
1198
+ .base.cra_flags = CRYPTO_ALG_ASYNC,
1199
+ .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1200
+ .base.cra_ctxsize = sizeof(struct cryp_ctx),
1201
+ .base.cra_alignmask = 3,
1202
+ .base.cra_module = THIS_MODULE,
1203
+
1204
+ .min_keysize = DES3_EDE_KEY_SIZE,
1205
+ .max_keysize = DES3_EDE_KEY_SIZE,
1206
+ .setkey = des3_skcipher_setkey,
1207
+ .encrypt = cryp_blk_encrypt,
1208
+ .decrypt = cryp_blk_decrypt,
1209
+ .ivsize = DES3_EDE_BLOCK_SIZE,
1210
+ .init = cryp_init_tfm,
13611211 }
13621212 }
13631213 };
....@@ -1374,18 +1224,18 @@
13741224 pr_debug("[%s]", __func__);
13751225
13761226 for (i = 0; i < ARRAY_SIZE(cryp_algs); i++) {
1377
- ret = crypto_register_alg(&cryp_algs[i].crypto);
1227
+ ret = crypto_register_skcipher(&cryp_algs[i].skcipher);
13781228 if (ret) {
13791229 count = i;
13801230 pr_err("[%s] alg registration failed",
1381
- cryp_algs[i].crypto.cra_driver_name);
1231
+ cryp_algs[i].skcipher.base.cra_driver_name);
13821232 goto unreg;
13831233 }
13841234 }
13851235 return 0;
13861236 unreg:
13871237 for (i = 0; i < count; i++)
1388
- crypto_unregister_alg(&cryp_algs[i].crypto);
1238
+ crypto_unregister_skcipher(&cryp_algs[i].skcipher);
13891239 return ret;
13901240 }
13911241
....@@ -1399,7 +1249,7 @@
13991249 pr_debug(DEV_DBG_NAME " [%s]", __func__);
14001250
14011251 for (i = 0; i < ARRAY_SIZE(cryp_algs); i++)
1402
- crypto_unregister_alg(&cryp_algs[i].crypto);
1252
+ crypto_unregister_skcipher(&cryp_algs[i].skcipher);
14031253 }
14041254
14051255 static int ux500_cryp_probe(struct platform_device *pdev)