.. | .. |
---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-or-later |
---|
1 | 2 | /** |
---|
2 | 3 | * AMCC SoC PPC4xx Crypto Driver |
---|
3 | 4 | * |
---|
4 | 5 | * Copyright (c) 2008 Applied Micro Circuits Corporation. |
---|
5 | 6 | * All rights reserved. James Hsiao <jhsiao@amcc.com> |
---|
6 | | - * |
---|
7 | | - * This program is free software; you can redistribute it and/or modify |
---|
8 | | - * it under the terms of the GNU General Public License as published by |
---|
9 | | - * the Free Software Foundation; either version 2 of the License, or |
---|
10 | | - * (at your option) any later version. |
---|
11 | | - * |
---|
12 | | - * This program is distributed in the hope that it will be useful, |
---|
13 | | - * but WITHOUT ANY WARRANTY; without even the implied warranty of |
---|
14 | | - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
---|
15 | | - * GNU General Public License for more details. |
---|
16 | 7 | * |
---|
17 | 8 | * This file implements AMCC crypto offload Linux device driver for use with |
---|
18 | 9 | * Linux CryptoAPI. |
---|
.. | .. |
---|
40 | 31 | #include <crypto/ctr.h> |
---|
41 | 32 | #include <crypto/gcm.h> |
---|
42 | 33 | #include <crypto/sha.h> |
---|
| 34 | +#include <crypto/rng.h> |
---|
43 | 35 | #include <crypto/scatterwalk.h> |
---|
44 | 36 | #include <crypto/skcipher.h> |
---|
45 | 37 | #include <crypto/internal/aead.h> |
---|
| 38 | +#include <crypto/internal/rng.h> |
---|
46 | 39 | #include <crypto/internal/skcipher.h> |
---|
47 | 40 | #include "crypto4xx_reg_def.h" |
---|
48 | 41 | #include "crypto4xx_core.h" |
---|
.. | .. |
---|
176 | 169 | int i; |
---|
177 | 170 | dev->pdr = dma_alloc_coherent(dev->core_dev->device, |
---|
178 | 171 | sizeof(struct ce_pd) * PPC4XX_NUM_PD, |
---|
179 | | - &dev->pdr_pa, GFP_ATOMIC); |
---|
| 172 | + &dev->pdr_pa, GFP_KERNEL); |
---|
180 | 173 | if (!dev->pdr) |
---|
181 | 174 | return -ENOMEM; |
---|
182 | 175 | |
---|
.. | .. |
---|
189 | 182 | dev->pdr_pa); |
---|
190 | 183 | return -ENOMEM; |
---|
191 | 184 | } |
---|
192 | | - memset(dev->pdr, 0, sizeof(struct ce_pd) * PPC4XX_NUM_PD); |
---|
193 | 185 | dev->shadow_sa_pool = dma_alloc_coherent(dev->core_dev->device, |
---|
194 | 186 | sizeof(union shadow_sa_buf) * PPC4XX_NUM_PD, |
---|
195 | 187 | &dev->shadow_sa_pool_pa, |
---|
196 | | - GFP_ATOMIC); |
---|
| 188 | + GFP_KERNEL); |
---|
197 | 189 | if (!dev->shadow_sa_pool) |
---|
198 | 190 | return -ENOMEM; |
---|
199 | 191 | |
---|
200 | 192 | dev->shadow_sr_pool = dma_alloc_coherent(dev->core_dev->device, |
---|
201 | 193 | sizeof(struct sa_state_record) * PPC4XX_NUM_PD, |
---|
202 | | - &dev->shadow_sr_pool_pa, GFP_ATOMIC); |
---|
| 194 | + &dev->shadow_sr_pool_pa, GFP_KERNEL); |
---|
203 | 195 | if (!dev->shadow_sr_pool) |
---|
204 | 196 | return -ENOMEM; |
---|
205 | 197 | for (i = 0; i < PPC4XX_NUM_PD; i++) { |
---|
.. | .. |
---|
283 | 275 | */ |
---|
284 | 276 | static u32 crypto4xx_build_gdr(struct crypto4xx_device *dev) |
---|
285 | 277 | { |
---|
286 | | - dev->gdr = dma_zalloc_coherent(dev->core_dev->device, |
---|
287 | | - sizeof(struct ce_gd) * PPC4XX_NUM_GD, |
---|
288 | | - &dev->gdr_pa, GFP_ATOMIC); |
---|
| 278 | + dev->gdr = dma_alloc_coherent(dev->core_dev->device, |
---|
| 279 | + sizeof(struct ce_gd) * PPC4XX_NUM_GD, |
---|
| 280 | + &dev->gdr_pa, GFP_KERNEL); |
---|
289 | 281 | if (!dev->gdr) |
---|
290 | 282 | return -ENOMEM; |
---|
291 | 283 | |
---|
.. | .. |
---|
294 | 286 | |
---|
295 | 287 | static inline void crypto4xx_destroy_gdr(struct crypto4xx_device *dev) |
---|
296 | 288 | { |
---|
297 | | - dma_free_coherent(dev->core_dev->device, |
---|
| 289 | + if (dev->gdr) |
---|
| 290 | + dma_free_coherent(dev->core_dev->device, |
---|
298 | 291 | sizeof(struct ce_gd) * PPC4XX_NUM_GD, |
---|
299 | 292 | dev->gdr, dev->gdr_pa); |
---|
300 | 293 | } |
---|
.. | .. |
---|
362 | 355 | { |
---|
363 | 356 | int i; |
---|
364 | 357 | |
---|
365 | | - /* alloc memory for scatter descriptor ring */ |
---|
366 | | - dev->sdr = dma_alloc_coherent(dev->core_dev->device, |
---|
367 | | - sizeof(struct ce_sd) * PPC4XX_NUM_SD, |
---|
368 | | - &dev->sdr_pa, GFP_ATOMIC); |
---|
369 | | - if (!dev->sdr) |
---|
370 | | - return -ENOMEM; |
---|
371 | | - |
---|
372 | 358 | dev->scatter_buffer_va = |
---|
373 | 359 | dma_alloc_coherent(dev->core_dev->device, |
---|
374 | 360 | PPC4XX_SD_BUFFER_SIZE * PPC4XX_NUM_SD, |
---|
375 | | - &dev->scatter_buffer_pa, GFP_ATOMIC); |
---|
| 361 | + &dev->scatter_buffer_pa, GFP_KERNEL); |
---|
376 | 362 | if (!dev->scatter_buffer_va) |
---|
| 363 | + return -ENOMEM; |
---|
| 364 | + |
---|
| 365 | + /* alloc memory for scatter descriptor ring */ |
---|
| 366 | + dev->sdr = dma_alloc_coherent(dev->core_dev->device, |
---|
| 367 | + sizeof(struct ce_sd) * PPC4XX_NUM_SD, |
---|
| 368 | + &dev->sdr_pa, GFP_KERNEL); |
---|
| 369 | + if (!dev->sdr) |
---|
377 | 370 | return -ENOMEM; |
---|
378 | 371 | |
---|
379 | 372 | for (i = 0; i < PPC4XX_NUM_SD; i++) { |
---|
.. | .. |
---|
529 | 522 | { |
---|
530 | 523 | struct skcipher_request *req; |
---|
531 | 524 | struct scatterlist *dst; |
---|
532 | | - dma_addr_t addr; |
---|
533 | 525 | |
---|
534 | 526 | req = skcipher_request_cast(pd_uinfo->async_req); |
---|
535 | 527 | |
---|
536 | | - if (pd_uinfo->using_sd) { |
---|
| 528 | + if (pd_uinfo->sa_va->sa_command_0.bf.scatter) { |
---|
537 | 529 | crypto4xx_copy_pkt_to_dst(dev, pd, pd_uinfo, |
---|
538 | 530 | req->cryptlen, req->dst); |
---|
539 | 531 | } else { |
---|
540 | 532 | dst = pd_uinfo->dest_va; |
---|
541 | | - addr = dma_map_page(dev->core_dev->device, sg_page(dst), |
---|
542 | | - dst->offset, dst->length, DMA_FROM_DEVICE); |
---|
| 533 | + dma_unmap_page(dev->core_dev->device, pd->dest, dst->length, |
---|
| 534 | + DMA_FROM_DEVICE); |
---|
543 | 535 | } |
---|
544 | 536 | |
---|
545 | 537 | if (pd_uinfo->sa_va->sa_command_0.bf.save_iv == SA_SAVE_IV) { |
---|
.. | .. |
---|
564 | 556 | struct ahash_request *ahash_req; |
---|
565 | 557 | |
---|
566 | 558 | ahash_req = ahash_request_cast(pd_uinfo->async_req); |
---|
567 | | - ctx = crypto_tfm_ctx(ahash_req->base.tfm); |
---|
| 559 | + ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(ahash_req)); |
---|
568 | 560 | |
---|
569 | | - crypto4xx_copy_digest_to_dst(ahash_req->result, pd_uinfo, |
---|
570 | | - crypto_tfm_ctx(ahash_req->base.tfm)); |
---|
| 561 | + crypto4xx_copy_digest_to_dst(ahash_req->result, pd_uinfo, ctx); |
---|
571 | 562 | crypto4xx_ret_sg_desc(dev, pd_uinfo); |
---|
572 | 563 | |
---|
573 | 564 | if (pd_uinfo->state & PD_ENTRY_BUSY) |
---|
.. | .. |
---|
587 | 578 | u32 icv[AES_BLOCK_SIZE]; |
---|
588 | 579 | int err = 0; |
---|
589 | 580 | |
---|
590 | | - if (pd_uinfo->using_sd) { |
---|
| 581 | + if (pd_uinfo->sa_va->sa_command_0.bf.scatter) { |
---|
591 | 582 | crypto4xx_copy_pkt_to_dst(dev, pd, pd_uinfo, |
---|
592 | 583 | pd->pd_ctl_len.bf.pkt_len, |
---|
593 | 584 | dst); |
---|
594 | 585 | } else { |
---|
595 | | - __dma_sync_page(sg_page(dst), dst->offset, dst->length, |
---|
| 586 | + dma_unmap_page(dev->core_dev->device, pd->dest, dst->length, |
---|
596 | 587 | DMA_FROM_DEVICE); |
---|
597 | 588 | } |
---|
598 | 589 | |
---|
.. | .. |
---|
881 | 872 | * we know application give us dst a whole piece of memory |
---|
882 | 873 | * no need to use scatter ring. |
---|
883 | 874 | */ |
---|
884 | | - pd_uinfo->using_sd = 0; |
---|
885 | 875 | pd_uinfo->first_sd = 0xffffffff; |
---|
886 | 876 | sa->sa_command_0.bf.scatter = 0; |
---|
887 | 877 | pd->dest = (u32)dma_map_page(dev->core_dev->device, |
---|
.. | .. |
---|
895 | 885 | u32 sd_idx = fst_sd; |
---|
896 | 886 | nbytes = datalen; |
---|
897 | 887 | sa->sa_command_0.bf.scatter = 1; |
---|
898 | | - pd_uinfo->using_sd = 1; |
---|
899 | 888 | pd_uinfo->first_sd = fst_sd; |
---|
900 | 889 | sd = crypto4xx_get_sdp(dev, &sd_dma, sd_idx); |
---|
901 | 890 | pd->dest = sd_dma; |
---|
.. | .. |
---|
959 | 948 | |
---|
960 | 949 | if (alg->base.cra_flags & CRYPTO_ALG_NEED_FALLBACK) { |
---|
961 | 950 | ctx->sw_cipher.cipher = |
---|
962 | | - crypto_alloc_skcipher(alg->base.cra_name, 0, |
---|
963 | | - CRYPTO_ALG_NEED_FALLBACK | |
---|
964 | | - CRYPTO_ALG_ASYNC); |
---|
| 951 | + crypto_alloc_sync_skcipher(alg->base.cra_name, 0, |
---|
| 952 | + CRYPTO_ALG_NEED_FALLBACK); |
---|
965 | 953 | if (IS_ERR(ctx->sw_cipher.cipher)) |
---|
966 | 954 | return PTR_ERR(ctx->sw_cipher.cipher); |
---|
967 | | - |
---|
968 | | - crypto_skcipher_set_reqsize(sk, |
---|
969 | | - sizeof(struct skcipher_request) + 32 + |
---|
970 | | - crypto_skcipher_reqsize(ctx->sw_cipher.cipher)); |
---|
971 | 955 | } |
---|
972 | 956 | |
---|
973 | 957 | amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.cipher); |
---|
.. | .. |
---|
986 | 970 | |
---|
987 | 971 | crypto4xx_common_exit(ctx); |
---|
988 | 972 | if (ctx->sw_cipher.cipher) |
---|
989 | | - crypto_free_skcipher(ctx->sw_cipher.cipher); |
---|
| 973 | + crypto_free_sync_skcipher(ctx->sw_cipher.cipher); |
---|
990 | 974 | } |
---|
991 | 975 | |
---|
992 | 976 | static int crypto4xx_aead_init(struct crypto_aead *tfm) |
---|
.. | .. |
---|
1042 | 1026 | rc = crypto_register_ahash(&alg->alg.u.hash); |
---|
1043 | 1027 | break; |
---|
1044 | 1028 | |
---|
| 1029 | + case CRYPTO_ALG_TYPE_RNG: |
---|
| 1030 | + rc = crypto_register_rng(&alg->alg.u.rng); |
---|
| 1031 | + break; |
---|
| 1032 | + |
---|
1045 | 1033 | default: |
---|
1046 | 1034 | rc = crypto_register_skcipher(&alg->alg.u.cipher); |
---|
1047 | 1035 | break; |
---|
.. | .. |
---|
1069 | 1057 | |
---|
1070 | 1058 | case CRYPTO_ALG_TYPE_AEAD: |
---|
1071 | 1059 | crypto_unregister_aead(&alg->alg.u.aead); |
---|
| 1060 | + break; |
---|
| 1061 | + |
---|
| 1062 | + case CRYPTO_ALG_TYPE_RNG: |
---|
| 1063 | + crypto_unregister_rng(&alg->alg.u.rng); |
---|
1072 | 1064 | break; |
---|
1073 | 1065 | |
---|
1074 | 1066 | default: |
---|
.. | .. |
---|
1129 | 1121 | PPC4XX_TMO_ERR_INT); |
---|
1130 | 1122 | } |
---|
1131 | 1123 | |
---|
| 1124 | +static int ppc4xx_prng_data_read(struct crypto4xx_device *dev, |
---|
| 1125 | + u8 *data, unsigned int max) |
---|
| 1126 | +{ |
---|
| 1127 | + unsigned int i, curr = 0; |
---|
| 1128 | + u32 val[2]; |
---|
| 1129 | + |
---|
| 1130 | + do { |
---|
| 1131 | + /* trigger PRN generation */ |
---|
| 1132 | + writel(PPC4XX_PRNG_CTRL_AUTO_EN, |
---|
| 1133 | + dev->ce_base + CRYPTO4XX_PRNG_CTRL); |
---|
| 1134 | + |
---|
| 1135 | + for (i = 0; i < 1024; i++) { |
---|
| 1136 | + /* usually 19 iterations are enough */ |
---|
| 1137 | + if ((readl(dev->ce_base + CRYPTO4XX_PRNG_STAT) & |
---|
| 1138 | + CRYPTO4XX_PRNG_STAT_BUSY)) |
---|
| 1139 | + continue; |
---|
| 1140 | + |
---|
| 1141 | + val[0] = readl_be(dev->ce_base + CRYPTO4XX_PRNG_RES_0); |
---|
| 1142 | + val[1] = readl_be(dev->ce_base + CRYPTO4XX_PRNG_RES_1); |
---|
| 1143 | + break; |
---|
| 1144 | + } |
---|
| 1145 | + if (i == 1024) |
---|
| 1146 | + return -ETIMEDOUT; |
---|
| 1147 | + |
---|
| 1148 | + if ((max - curr) >= 8) { |
---|
| 1149 | + memcpy(data, &val, 8); |
---|
| 1150 | + data += 8; |
---|
| 1151 | + curr += 8; |
---|
| 1152 | + } else { |
---|
| 1153 | + /* copy only remaining bytes */ |
---|
| 1154 | + memcpy(data, &val, max - curr); |
---|
| 1155 | + break; |
---|
| 1156 | + } |
---|
| 1157 | + } while (curr < max); |
---|
| 1158 | + |
---|
| 1159 | + return curr; |
---|
| 1160 | +} |
---|
| 1161 | + |
---|
| 1162 | +static int crypto4xx_prng_generate(struct crypto_rng *tfm, |
---|
| 1163 | + const u8 *src, unsigned int slen, |
---|
| 1164 | + u8 *dstn, unsigned int dlen) |
---|
| 1165 | +{ |
---|
| 1166 | + struct rng_alg *alg = crypto_rng_alg(tfm); |
---|
| 1167 | + struct crypto4xx_alg *amcc_alg; |
---|
| 1168 | + struct crypto4xx_device *dev; |
---|
| 1169 | + int ret; |
---|
| 1170 | + |
---|
| 1171 | + amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.rng); |
---|
| 1172 | + dev = amcc_alg->dev; |
---|
| 1173 | + |
---|
| 1174 | + mutex_lock(&dev->core_dev->rng_lock); |
---|
| 1175 | + ret = ppc4xx_prng_data_read(dev, dstn, dlen); |
---|
| 1176 | + mutex_unlock(&dev->core_dev->rng_lock); |
---|
| 1177 | + return ret; |
---|
| 1178 | +} |
---|
| 1179 | + |
---|
| 1180 | + |
---|
| 1181 | +static int crypto4xx_prng_seed(struct crypto_rng *tfm, const u8 *seed, |
---|
| 1182 | + unsigned int slen) |
---|
| 1183 | +{ |
---|
| 1184 | + return 0; |
---|
| 1185 | +} |
---|
| 1186 | + |
---|
1132 | 1187 | /** |
---|
1133 | 1188 | * Supported Crypto Algorithms |
---|
1134 | 1189 | */ |
---|
.. | .. |
---|
1161 | 1216 | .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY, |
---|
1162 | 1217 | .cra_flags = CRYPTO_ALG_ASYNC | |
---|
1163 | 1218 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
1164 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
| 1219 | + .cra_blocksize = 1, |
---|
1165 | 1220 | .cra_ctxsize = sizeof(struct crypto4xx_ctx), |
---|
1166 | 1221 | .cra_module = THIS_MODULE, |
---|
1167 | 1222 | }, |
---|
.. | .. |
---|
1222 | 1277 | .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY, |
---|
1223 | 1278 | .cra_flags = CRYPTO_ALG_ASYNC | |
---|
1224 | 1279 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
1225 | | - .cra_blocksize = 1, |
---|
| 1280 | + .cra_blocksize = AES_BLOCK_SIZE, |
---|
1226 | 1281 | .cra_ctxsize = sizeof(struct crypto4xx_ctx), |
---|
1227 | 1282 | .cra_module = THIS_MODULE, |
---|
1228 | 1283 | }, |
---|
.. | .. |
---|
1298 | 1353 | .cra_module = THIS_MODULE, |
---|
1299 | 1354 | }, |
---|
1300 | 1355 | } }, |
---|
| 1356 | + { .type = CRYPTO_ALG_TYPE_RNG, .u.rng = { |
---|
| 1357 | + .base = { |
---|
| 1358 | + .cra_name = "stdrng", |
---|
| 1359 | + .cra_driver_name = "crypto4xx_rng", |
---|
| 1360 | + .cra_priority = 300, |
---|
| 1361 | + .cra_ctxsize = 0, |
---|
| 1362 | + .cra_module = THIS_MODULE, |
---|
| 1363 | + }, |
---|
| 1364 | + .generate = crypto4xx_prng_generate, |
---|
| 1365 | + .seed = crypto4xx_prng_seed, |
---|
| 1366 | + .seedsize = 0, |
---|
| 1367 | + } }, |
---|
1301 | 1368 | }; |
---|
1302 | 1369 | |
---|
1303 | 1370 | /** |
---|
.. | .. |
---|
1367 | 1434 | core_dev->dev->core_dev = core_dev; |
---|
1368 | 1435 | core_dev->dev->is_revb = is_revb; |
---|
1369 | 1436 | core_dev->device = dev; |
---|
| 1437 | + mutex_init(&core_dev->rng_lock); |
---|
1370 | 1438 | spin_lock_init(&core_dev->lock); |
---|
1371 | 1439 | INIT_LIST_HEAD(&core_dev->dev->alg_list); |
---|
1372 | 1440 | ratelimit_default_init(&core_dev->dev->aead_ratelimit); |
---|
| 1441 | + rc = crypto4xx_build_sdr(core_dev->dev); |
---|
| 1442 | + if (rc) |
---|
| 1443 | + goto err_build_sdr; |
---|
1373 | 1444 | rc = crypto4xx_build_pdr(core_dev->dev); |
---|
1374 | 1445 | if (rc) |
---|
1375 | | - goto err_build_pdr; |
---|
| 1446 | + goto err_build_sdr; |
---|
1376 | 1447 | |
---|
1377 | 1448 | rc = crypto4xx_build_gdr(core_dev->dev); |
---|
1378 | | - if (rc) |
---|
1379 | | - goto err_build_pdr; |
---|
1380 | | - |
---|
1381 | | - rc = crypto4xx_build_sdr(core_dev->dev); |
---|
1382 | 1449 | if (rc) |
---|
1383 | 1450 | goto err_build_sdr; |
---|
1384 | 1451 | |
---|
.. | .. |
---|
1424 | 1491 | err_build_sdr: |
---|
1425 | 1492 | crypto4xx_destroy_sdr(core_dev->dev); |
---|
1426 | 1493 | crypto4xx_destroy_gdr(core_dev->dev); |
---|
1427 | | -err_build_pdr: |
---|
1428 | 1494 | crypto4xx_destroy_pdr(core_dev->dev); |
---|
1429 | 1495 | kfree(core_dev->dev); |
---|
1430 | 1496 | err_alloc_dev: |
---|
.. | .. |
---|
1446 | 1512 | tasklet_kill(&core_dev->tasklet); |
---|
1447 | 1513 | /* Un-register with Linux CryptoAPI */ |
---|
1448 | 1514 | crypto4xx_unregister_alg(core_dev->dev); |
---|
| 1515 | + mutex_destroy(&core_dev->rng_lock); |
---|
1449 | 1516 | /* Free all allocated memory */ |
---|
1450 | 1517 | crypto4xx_stop_all(core_dev); |
---|
1451 | 1518 | |
---|