hc
2024-05-10 37f49e37ab4cb5d0bc4c60eb5c6d4dd57db767bb
kernel/drivers/crypto/ccree/cc_cipher.c
....@@ -1,12 +1,13 @@
11 // SPDX-License-Identifier: GPL-2.0
2
-/* Copyright (C) 2012-2018 ARM Limited or its affiliates. */
2
+/* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
33
44 #include <linux/kernel.h>
55 #include <linux/module.h>
66 #include <crypto/algapi.h>
77 #include <crypto/internal/skcipher.h>
8
-#include <crypto/des.h>
8
+#include <crypto/internal/des.h>
99 #include <crypto/xts.h>
10
+#include <crypto/sm4.h>
1011 #include <crypto/scatterwalk.h>
1112
1213 #include "cc_driver.h"
....@@ -15,13 +16,9 @@
1516 #include "cc_cipher.h"
1617 #include "cc_request_mgr.h"
1718
18
-#define MAX_ABLKCIPHER_SEQ_LEN 6
19
+#define MAX_SKCIPHER_SEQ_LEN 6
1920
2021 #define template_skcipher template_u.skcipher
21
-
22
-struct cc_cipher_handle {
23
- struct list_head alg_list;
24
-};
2522
2623 struct cc_user_key_info {
2724 u8 *key;
....@@ -33,26 +30,42 @@
3330 enum cc_hw_crypto_key key2_slot;
3431 };
3532
33
+struct cc_cpp_key_info {
34
+ u8 slot;
35
+ enum cc_cpp_alg alg;
36
+};
37
+
38
+enum cc_key_type {
39
+ CC_UNPROTECTED_KEY, /* User key */
40
+ CC_HW_PROTECTED_KEY, /* HW (FDE) key */
41
+ CC_POLICY_PROTECTED_KEY, /* CPP key */
42
+ CC_INVALID_PROTECTED_KEY /* Invalid key */
43
+};
44
+
3645 struct cc_cipher_ctx {
3746 struct cc_drvdata *drvdata;
3847 int keylen;
39
- int key_round_number;
4048 int cipher_mode;
4149 int flow_mode;
4250 unsigned int flags;
43
- bool hw_key;
51
+ enum cc_key_type key_type;
4452 struct cc_user_key_info user;
45
- struct cc_hw_key_info hw;
53
+ union {
54
+ struct cc_hw_key_info hw;
55
+ struct cc_cpp_key_info cpp;
56
+ };
4657 struct crypto_shash *shash_tfm;
58
+ struct crypto_skcipher *fallback_tfm;
59
+ bool fallback_on;
4760 };
4861
4962 static void cc_cipher_complete(struct device *dev, void *cc_req, int err);
5063
51
-static inline bool cc_is_hw_key(struct crypto_tfm *tfm)
64
+static inline enum cc_key_type cc_key_type(struct crypto_tfm *tfm)
5265 {
5366 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
5467
55
- return ctx_p->hw_key;
68
+ return ctx_p->key_type;
5669 }
5770
5871 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size)
....@@ -62,9 +75,7 @@
6275 switch (size) {
6376 case CC_AES_128_BIT_KEY_SIZE:
6477 case CC_AES_192_BIT_KEY_SIZE:
65
- if (ctx_p->cipher_mode != DRV_CIPHER_XTS &&
66
- ctx_p->cipher_mode != DRV_CIPHER_ESSIV &&
67
- ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)
78
+ if (ctx_p->cipher_mode != DRV_CIPHER_XTS)
6879 return 0;
6980 break;
7081 case CC_AES_256_BIT_KEY_SIZE:
....@@ -72,8 +83,7 @@
7283 case (CC_AES_192_BIT_KEY_SIZE * 2):
7384 case (CC_AES_256_BIT_KEY_SIZE * 2):
7485 if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
75
- ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
76
- ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)
86
+ ctx_p->cipher_mode == DRV_CIPHER_ESSIV)
7787 return 0;
7888 break;
7989 default:
....@@ -84,6 +94,9 @@
8494 if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE)
8595 return 0;
8696 break;
97
+ case S_DIN_to_SM4:
98
+ if (size == SM4_KEY_SIZE)
99
+ return 0;
87100 default:
88101 break;
89102 }
....@@ -97,10 +110,6 @@
97110 case S_DIN_to_AES:
98111 switch (ctx_p->cipher_mode) {
99112 case DRV_CIPHER_XTS:
100
- if (size >= AES_BLOCK_SIZE &&
101
- IS_ALIGNED(size, AES_BLOCK_SIZE))
102
- return 0;
103
- break;
104113 case DRV_CIPHER_CBC_CTS:
105114 if (size >= AES_BLOCK_SIZE)
106115 return 0;
....@@ -111,7 +120,6 @@
111120 case DRV_CIPHER_ECB:
112121 case DRV_CIPHER_CBC:
113122 case DRV_CIPHER_ESSIV:
114
- case DRV_CIPHER_BITLOCKER:
115123 if (IS_ALIGNED(size, AES_BLOCK_SIZE))
116124 return 0;
117125 break;
....@@ -123,6 +131,17 @@
123131 if (IS_ALIGNED(size, DES_BLOCK_SIZE))
124132 return 0;
125133 break;
134
+ case S_DIN_to_SM4:
135
+ switch (ctx_p->cipher_mode) {
136
+ case DRV_CIPHER_CTR:
137
+ return 0;
138
+ case DRV_CIPHER_ECB:
139
+ case DRV_CIPHER_CBC:
140
+ if (IS_ALIGNED(size, SM4_BLOCK_SIZE))
141
+ return 0;
142
+ default:
143
+ break;
144
+ }
126145 default:
127146 break;
128147 }
....@@ -137,36 +156,55 @@
137156 skcipher_alg.base);
138157 struct device *dev = drvdata_to_dev(cc_alg->drvdata);
139158 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
159
+ unsigned int fallback_req_size = 0;
140160
141161 dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p,
142162 crypto_tfm_alg_name(tfm));
143
-
144
- crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
145
- sizeof(struct cipher_req_ctx));
146163
147164 ctx_p->cipher_mode = cc_alg->cipher_mode;
148165 ctx_p->flow_mode = cc_alg->flow_mode;
149166 ctx_p->drvdata = cc_alg->drvdata;
150167
151168 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
169
+ const char *name = crypto_tfm_alg_name(tfm);
170
+
152171 /* Alloc hash tfm for essiv */
153
- ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
172
+ ctx_p->shash_tfm = crypto_alloc_shash("sha256", 0, 0);
154173 if (IS_ERR(ctx_p->shash_tfm)) {
155174 dev_err(dev, "Error allocating hash tfm for ESSIV.\n");
156175 return PTR_ERR(ctx_p->shash_tfm);
157176 }
177
+ max_key_buf_size <<= 1;
178
+
179
+ /* Alloc fallabck tfm or essiv when key size != 256 bit */
180
+ ctx_p->fallback_tfm =
181
+ crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC);
182
+
183
+ if (IS_ERR(ctx_p->fallback_tfm)) {
184
+ /* Note we're still allowing registration with no fallback since it's
185
+ * better to have most modes supported than none at all.
186
+ */
187
+ dev_warn(dev, "Error allocating fallback algo %s. Some modes may be available.\n",
188
+ name);
189
+ ctx_p->fallback_tfm = NULL;
190
+ } else {
191
+ fallback_req_size = crypto_skcipher_reqsize(ctx_p->fallback_tfm);
192
+ }
158193 }
159194
195
+ crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
196
+ sizeof(struct cipher_req_ctx) + fallback_req_size);
197
+
160198 /* Allocate key buffer, cache line aligned */
161
- ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL);
199
+ ctx_p->user.key = kzalloc(max_key_buf_size, GFP_KERNEL);
162200 if (!ctx_p->user.key)
163
- goto free_shash;
201
+ goto free_fallback;
164202
165203 dev_dbg(dev, "Allocated key buffer in context. key=@%p\n",
166204 ctx_p->user.key);
167205
168206 /* Map key buffer */
169
- ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key,
207
+ ctx_p->user.key_dma_addr = dma_map_single(dev, ctx_p->user.key,
170208 max_key_buf_size,
171209 DMA_TO_DEVICE);
172210 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
....@@ -181,7 +219,8 @@
181219
182220 free_key:
183221 kfree(ctx_p->user.key);
184
-free_shash:
222
+free_fallback:
223
+ crypto_free_skcipher(ctx_p->fallback_tfm);
185224 crypto_free_shash(ctx_p->shash_tfm);
186225
187226 return -ENOMEM;
....@@ -204,6 +243,8 @@
204243 /* Free hash tfm for essiv */
205244 crypto_free_shash(ctx_p->shash_tfm);
206245 ctx_p->shash_tfm = NULL;
246
+ crypto_free_skcipher(ctx_p->fallback_tfm);
247
+ ctx_p->fallback_tfm = NULL;
207248 }
208249
209250 /* Unmap key buffer */
....@@ -213,8 +254,8 @@
213254 &ctx_p->user.key_dma_addr);
214255
215256 /* Free key buffer in context */
216
- kzfree(ctx_p->user.key);
217257 dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key);
258
+ kfree_sensitive(ctx_p->user.key);
218259 }
219260
220261 struct tdes_keys {
....@@ -223,7 +264,7 @@
223264 u8 key3[DES_KEY_SIZE];
224265 };
225266
226
-static enum cc_hw_crypto_key cc_slot_to_hw_key(int slot_num)
267
+static enum cc_hw_crypto_key cc_slot_to_hw_key(u8 slot_num)
227268 {
228269 switch (slot_num) {
229270 case 0:
....@@ -238,6 +279,22 @@
238279 return END_OF_KEYS;
239280 }
240281
282
+static u8 cc_slot_to_cpp_key(u8 slot_num)
283
+{
284
+ return (slot_num - CC_FIRST_CPP_KEY_SLOT);
285
+}
286
+
287
+static inline enum cc_key_type cc_slot_to_key_type(u8 slot_num)
288
+{
289
+ if (slot_num >= CC_FIRST_HW_KEY_SLOT && slot_num <= CC_LAST_HW_KEY_SLOT)
290
+ return CC_HW_PROTECTED_KEY;
291
+ else if (slot_num >= CC_FIRST_CPP_KEY_SLOT &&
292
+ slot_num <= CC_LAST_CPP_KEY_SLOT)
293
+ return CC_POLICY_PROTECTED_KEY;
294
+ else
295
+ return CC_INVALID_PROTECTED_KEY;
296
+}
297
+
241298 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
242299 unsigned int keylen)
243300 {
....@@ -248,19 +305,13 @@
248305
249306 dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n",
250307 ctx_p, crypto_tfm_alg_name(tfm), keylen);
251
- dump_byte_array("key", (u8 *)key, keylen);
308
+ dump_byte_array("key", key, keylen);
252309
253310 /* STAT_PHASE_0: Init and sanity checks */
254311
255
- /* This check the size of the hardware key token */
312
+ /* This check the size of the protected key token */
256313 if (keylen != sizeof(hki)) {
257
- dev_err(dev, "Unsupported HW key size %d.\n", keylen);
258
- crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
259
- return -EINVAL;
260
- }
261
-
262
- if (ctx_p->flow_mode != S_DIN_to_AES) {
263
- dev_err(dev, "HW key not supported for non-AES flows\n");
314
+ dev_err(dev, "Unsupported protected key size %d.\n", keylen);
264315 return -EINVAL;
265316 }
266317
....@@ -272,36 +323,74 @@
272323 keylen = hki.keylen;
273324
274325 if (validate_keys_sizes(ctx_p, keylen)) {
275
- dev_err(dev, "Unsupported key size %d.\n", keylen);
276
- crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
326
+ dev_dbg(dev, "Unsupported key size %d.\n", keylen);
277327 return -EINVAL;
278
- }
279
-
280
- ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1);
281
- if (ctx_p->hw.key1_slot == END_OF_KEYS) {
282
- dev_err(dev, "Unsupported hw key1 number (%d)\n", hki.hw_key1);
283
- return -EINVAL;
284
- }
285
-
286
- if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
287
- ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
288
- ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) {
289
- if (hki.hw_key1 == hki.hw_key2) {
290
- dev_err(dev, "Illegal hw key numbers (%d,%d)\n",
291
- hki.hw_key1, hki.hw_key2);
292
- return -EINVAL;
293
- }
294
- ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2);
295
- if (ctx_p->hw.key2_slot == END_OF_KEYS) {
296
- dev_err(dev, "Unsupported hw key2 number (%d)\n",
297
- hki.hw_key2);
298
- return -EINVAL;
299
- }
300328 }
301329
302330 ctx_p->keylen = keylen;
303
- ctx_p->hw_key = true;
304
- dev_dbg(dev, "cc_is_hw_key ret 0");
331
+ ctx_p->fallback_on = false;
332
+
333
+ switch (cc_slot_to_key_type(hki.hw_key1)) {
334
+ case CC_HW_PROTECTED_KEY:
335
+ if (ctx_p->flow_mode == S_DIN_to_SM4) {
336
+ dev_err(dev, "Only AES HW protected keys are supported\n");
337
+ return -EINVAL;
338
+ }
339
+
340
+ ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1);
341
+ if (ctx_p->hw.key1_slot == END_OF_KEYS) {
342
+ dev_err(dev, "Unsupported hw key1 number (%d)\n",
343
+ hki.hw_key1);
344
+ return -EINVAL;
345
+ }
346
+
347
+ if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
348
+ ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
349
+ if (hki.hw_key1 == hki.hw_key2) {
350
+ dev_err(dev, "Illegal hw key numbers (%d,%d)\n",
351
+ hki.hw_key1, hki.hw_key2);
352
+ return -EINVAL;
353
+ }
354
+
355
+ ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2);
356
+ if (ctx_p->hw.key2_slot == END_OF_KEYS) {
357
+ dev_err(dev, "Unsupported hw key2 number (%d)\n",
358
+ hki.hw_key2);
359
+ return -EINVAL;
360
+ }
361
+ }
362
+
363
+ ctx_p->key_type = CC_HW_PROTECTED_KEY;
364
+ dev_dbg(dev, "HW protected key %d/%d set\n.",
365
+ ctx_p->hw.key1_slot, ctx_p->hw.key2_slot);
366
+ break;
367
+
368
+ case CC_POLICY_PROTECTED_KEY:
369
+ if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) {
370
+ dev_err(dev, "CPP keys not supported in this hardware revision.\n");
371
+ return -EINVAL;
372
+ }
373
+
374
+ if (ctx_p->cipher_mode != DRV_CIPHER_CBC &&
375
+ ctx_p->cipher_mode != DRV_CIPHER_CTR) {
376
+ dev_err(dev, "CPP keys only supported in CBC or CTR modes.\n");
377
+ return -EINVAL;
378
+ }
379
+
380
+ ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1);
381
+ if (ctx_p->flow_mode == S_DIN_to_AES)
382
+ ctx_p->cpp.alg = CC_CPP_AES;
383
+ else /* Must be SM4 since due to sethkey registration */
384
+ ctx_p->cpp.alg = CC_CPP_SM4;
385
+ ctx_p->key_type = CC_POLICY_PROTECTED_KEY;
386
+ dev_dbg(dev, "policy protected key alg: %d slot: %d.\n",
387
+ ctx_p->cpp.alg, ctx_p->cpp.slot);
388
+ break;
389
+
390
+ default:
391
+ dev_err(dev, "Unsupported protected key (%d)\n", hki.hw_key1);
392
+ return -EINVAL;
393
+ }
305394
306395 return 0;
307396 }
....@@ -319,17 +408,39 @@
319408
320409 dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n",
321410 ctx_p, crypto_tfm_alg_name(tfm), keylen);
322
- dump_byte_array("key", (u8 *)key, keylen);
411
+ dump_byte_array("key", key, keylen);
323412
324413 /* STAT_PHASE_0: Init and sanity checks */
325414
326415 if (validate_keys_sizes(ctx_p, keylen)) {
327
- dev_err(dev, "Unsupported key size %d.\n", keylen);
328
- crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
416
+ dev_dbg(dev, "Invalid key size %d.\n", keylen);
329417 return -EINVAL;
330418 }
331419
332
- ctx_p->hw_key = false;
420
+ if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
421
+
422
+ /* We only support 256 bit ESSIV-CBC-AES keys */
423
+ if (keylen != AES_KEYSIZE_256) {
424
+ unsigned int flags = crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_MASK;
425
+
426
+ if (likely(ctx_p->fallback_tfm)) {
427
+ ctx_p->fallback_on = true;
428
+ crypto_skcipher_clear_flags(ctx_p->fallback_tfm,
429
+ CRYPTO_TFM_REQ_MASK);
430
+ crypto_skcipher_clear_flags(ctx_p->fallback_tfm, flags);
431
+ return crypto_skcipher_setkey(ctx_p->fallback_tfm, key, keylen);
432
+ }
433
+
434
+ dev_dbg(dev, "Unsupported key size %d and no fallback.\n", keylen);
435
+ return -EINVAL;
436
+ }
437
+
438
+ /* Internal ESSIV key buffer is double sized */
439
+ max_key_buf_size <<= 1;
440
+ }
441
+
442
+ ctx_p->fallback_on = false;
443
+ ctx_p->key_type = CC_UNPROTECTED_KEY;
333444
334445 /*
335446 * Verify DES weak keys
....@@ -337,15 +448,9 @@
337448 * HW does the expansion on its own.
338449 */
339450 if (ctx_p->flow_mode == S_DIN_to_DES) {
340
- u32 tmp[DES3_EDE_EXPKEY_WORDS];
341
- if (keylen == DES3_EDE_KEY_SIZE &&
342
- __des3_ede_setkey(tmp, &tfm->crt_flags, key,
343
- DES3_EDE_KEY_SIZE)) {
344
- dev_dbg(dev, "weak 3DES key");
345
- return -EINVAL;
346
- } else if (!des_ekey(tmp, key) &&
347
- (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_WEAK_KEY)) {
348
- tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
451
+ if ((keylen == DES3_EDE_KEY_SIZE &&
452
+ verify_skcipher_des3_key(sktfm, key)) ||
453
+ verify_skcipher_des_key(sktfm, key)) {
349454 dev_dbg(dev, "weak DES key");
350455 return -EINVAL;
351456 }
....@@ -362,24 +467,20 @@
362467 max_key_buf_size, DMA_TO_DEVICE);
363468
364469 memcpy(ctx_p->user.key, key, keylen);
365
- if (keylen == 24)
366
- memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
367470
368471 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
369472 /* sha256 for key2 - use sw implementation */
370
- int key_len = keylen >> 1;
371473 int err;
372474
373
- SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm);
374
-
375
- desc->tfm = ctx_p->shash_tfm;
376
-
377
- err = crypto_shash_digest(desc, ctx_p->user.key, key_len,
378
- ctx_p->user.key + key_len);
475
+ err = crypto_shash_tfm_digest(ctx_p->shash_tfm,
476
+ ctx_p->user.key, keylen,
477
+ ctx_p->user.key + keylen);
379478 if (err) {
380479 dev_err(dev, "Failed to hash ESSIV key.\n");
381480 return err;
382481 }
482
+
483
+ keylen <<= 1;
383484 }
384485 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
385486 max_key_buf_size, DMA_TO_DEVICE);
....@@ -389,7 +490,77 @@
389490 return 0;
390491 }
391492
392
-static void cc_setup_cipher_desc(struct crypto_tfm *tfm,
493
+static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p)
494
+{
495
+ switch (ctx_p->flow_mode) {
496
+ case S_DIN_to_AES:
497
+ return S_AES_to_DOUT;
498
+ case S_DIN_to_DES:
499
+ return S_DES_to_DOUT;
500
+ case S_DIN_to_SM4:
501
+ return S_SM4_to_DOUT;
502
+ default:
503
+ return ctx_p->flow_mode;
504
+ }
505
+}
506
+
507
+static void cc_setup_readiv_desc(struct crypto_tfm *tfm,
508
+ struct cipher_req_ctx *req_ctx,
509
+ unsigned int ivsize, struct cc_hw_desc desc[],
510
+ unsigned int *seq_size)
511
+{
512
+ struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
513
+ struct device *dev = drvdata_to_dev(ctx_p->drvdata);
514
+ int cipher_mode = ctx_p->cipher_mode;
515
+ int flow_mode = cc_out_setup_mode(ctx_p);
516
+ int direction = req_ctx->gen_ctx.op_type;
517
+ dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
518
+
519
+ if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY)
520
+ return;
521
+
522
+ switch (cipher_mode) {
523
+ case DRV_CIPHER_ECB:
524
+ break;
525
+ case DRV_CIPHER_CBC:
526
+ case DRV_CIPHER_CBC_CTS:
527
+ case DRV_CIPHER_CTR:
528
+ case DRV_CIPHER_OFB:
529
+ /* Read next IV */
530
+ hw_desc_init(&desc[*seq_size]);
531
+ set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1);
532
+ set_cipher_config0(&desc[*seq_size], direction);
533
+ set_flow_mode(&desc[*seq_size], flow_mode);
534
+ set_cipher_mode(&desc[*seq_size], cipher_mode);
535
+ if (cipher_mode == DRV_CIPHER_CTR ||
536
+ cipher_mode == DRV_CIPHER_OFB) {
537
+ set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
538
+ } else {
539
+ set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE0);
540
+ }
541
+ set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
542
+ (*seq_size)++;
543
+ break;
544
+ case DRV_CIPHER_XTS:
545
+ case DRV_CIPHER_ESSIV:
546
+ /* IV */
547
+ hw_desc_init(&desc[*seq_size]);
548
+ set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
549
+ set_cipher_mode(&desc[*seq_size], cipher_mode);
550
+ set_cipher_config0(&desc[*seq_size], direction);
551
+ set_flow_mode(&desc[*seq_size], flow_mode);
552
+ set_dout_dlli(&desc[*seq_size], iv_dma_addr, CC_AES_BLOCK_SIZE,
553
+ NS_BIT, 1);
554
+ set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
555
+ (*seq_size)++;
556
+ break;
557
+ default:
558
+ dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
559
+ }
560
+}
561
+
562
+
563
+static void cc_setup_state_desc(struct crypto_tfm *tfm,
393564 struct cipher_req_ctx *req_ctx,
394565 unsigned int ivsize, unsigned int nbytes,
395566 struct cc_hw_desc desc[],
....@@ -400,24 +571,16 @@
400571 int cipher_mode = ctx_p->cipher_mode;
401572 int flow_mode = ctx_p->flow_mode;
402573 int direction = req_ctx->gen_ctx.op_type;
403
- dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
404
- unsigned int key_len = ctx_p->keylen;
405574 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
406
- unsigned int du_size = nbytes;
407
-
408
- struct cc_crypto_alg *cc_alg =
409
- container_of(tfm->__crt_alg, struct cc_crypto_alg,
410
- skcipher_alg.base);
411
-
412
- if (cc_alg->data_unit)
413
- du_size = cc_alg->data_unit;
414575
415576 switch (cipher_mode) {
577
+ case DRV_CIPHER_ECB:
578
+ break;
416579 case DRV_CIPHER_CBC:
417580 case DRV_CIPHER_CBC_CTS:
418581 case DRV_CIPHER_CTR:
419582 case DRV_CIPHER_OFB:
420
- /* Load cipher state */
583
+ /* Load IV */
421584 hw_desc_init(&desc[*seq_size]);
422585 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
423586 NS_BIT);
....@@ -431,76 +594,70 @@
431594 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
432595 }
433596 (*seq_size)++;
434
- /*FALLTHROUGH*/
435
- case DRV_CIPHER_ECB:
436
- /* Load key */
437
- hw_desc_init(&desc[*seq_size]);
438
- set_cipher_mode(&desc[*seq_size], cipher_mode);
439
- set_cipher_config0(&desc[*seq_size], direction);
440
- if (flow_mode == S_DIN_to_AES) {
441
- if (cc_is_hw_key(tfm)) {
442
- set_hw_crypto_key(&desc[*seq_size],
443
- ctx_p->hw.key1_slot);
444
- } else {
445
- set_din_type(&desc[*seq_size], DMA_DLLI,
446
- key_dma_addr, ((key_len == 24) ?
447
- AES_MAX_KEY_SIZE :
448
- key_len), NS_BIT);
449
- }
450
- set_key_size_aes(&desc[*seq_size], key_len);
451
- } else {
452
- /*des*/
453
- set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
454
- key_len, NS_BIT);
455
- set_key_size_des(&desc[*seq_size], key_len);
456
- }
457
- set_flow_mode(&desc[*seq_size], flow_mode);
458
- set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
459
- (*seq_size)++;
460597 break;
461598 case DRV_CIPHER_XTS:
462599 case DRV_CIPHER_ESSIV:
463
- case DRV_CIPHER_BITLOCKER:
464
- /* Load AES key */
465
- hw_desc_init(&desc[*seq_size]);
466
- set_cipher_mode(&desc[*seq_size], cipher_mode);
467
- set_cipher_config0(&desc[*seq_size], direction);
468
- if (cc_is_hw_key(tfm)) {
469
- set_hw_crypto_key(&desc[*seq_size],
470
- ctx_p->hw.key1_slot);
471
- } else {
472
- set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
473
- (key_len / 2), NS_BIT);
474
- }
475
- set_key_size_aes(&desc[*seq_size], (key_len / 2));
476
- set_flow_mode(&desc[*seq_size], flow_mode);
477
- set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
478
- (*seq_size)++;
600
+ break;
601
+ default:
602
+ dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
603
+ }
604
+}
605
+
606
+
607
+static void cc_setup_xex_state_desc(struct crypto_tfm *tfm,
608
+ struct cipher_req_ctx *req_ctx,
609
+ unsigned int ivsize, unsigned int nbytes,
610
+ struct cc_hw_desc desc[],
611
+ unsigned int *seq_size)
612
+{
613
+ struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
614
+ struct device *dev = drvdata_to_dev(ctx_p->drvdata);
615
+ int cipher_mode = ctx_p->cipher_mode;
616
+ int flow_mode = ctx_p->flow_mode;
617
+ int direction = req_ctx->gen_ctx.op_type;
618
+ dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
619
+ unsigned int key_len = (ctx_p->keylen / 2);
620
+ dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
621
+ unsigned int key_offset = key_len;
622
+
623
+ switch (cipher_mode) {
624
+ case DRV_CIPHER_ECB:
625
+ break;
626
+ case DRV_CIPHER_CBC:
627
+ case DRV_CIPHER_CBC_CTS:
628
+ case DRV_CIPHER_CTR:
629
+ case DRV_CIPHER_OFB:
630
+ break;
631
+ case DRV_CIPHER_XTS:
632
+ case DRV_CIPHER_ESSIV:
633
+
634
+ if (cipher_mode == DRV_CIPHER_ESSIV)
635
+ key_len = SHA256_DIGEST_SIZE;
479636
480637 /* load XEX key */
481638 hw_desc_init(&desc[*seq_size]);
482639 set_cipher_mode(&desc[*seq_size], cipher_mode);
483640 set_cipher_config0(&desc[*seq_size], direction);
484
- if (cc_is_hw_key(tfm)) {
641
+ if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
485642 set_hw_crypto_key(&desc[*seq_size],
486643 ctx_p->hw.key2_slot);
487644 } else {
488645 set_din_type(&desc[*seq_size], DMA_DLLI,
489
- (key_dma_addr + (key_len / 2)),
490
- (key_len / 2), NS_BIT);
646
+ (key_dma_addr + key_offset),
647
+ key_len, NS_BIT);
491648 }
492
- set_xex_data_unit_size(&desc[*seq_size], du_size);
649
+ set_xex_data_unit_size(&desc[*seq_size], nbytes);
493650 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
494
- set_key_size_aes(&desc[*seq_size], (key_len / 2));
651
+ set_key_size_aes(&desc[*seq_size], key_len);
495652 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
496653 (*seq_size)++;
497654
498
- /* Set state */
655
+ /* Load IV */
499656 hw_desc_init(&desc[*seq_size]);
500657 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
501658 set_cipher_mode(&desc[*seq_size], cipher_mode);
502659 set_cipher_config0(&desc[*seq_size], direction);
503
- set_key_size_aes(&desc[*seq_size], (key_len / 2));
660
+ set_key_size_aes(&desc[*seq_size], key_len);
504661 set_flow_mode(&desc[*seq_size], flow_mode);
505662 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
506663 CC_AES_BLOCK_SIZE, NS_BIT);
....@@ -511,28 +668,141 @@
511668 }
512669 }
513670
514
-static void cc_setup_cipher_data(struct crypto_tfm *tfm,
515
- struct cipher_req_ctx *req_ctx,
516
- struct scatterlist *dst,
517
- struct scatterlist *src, unsigned int nbytes,
518
- void *areq, struct cc_hw_desc desc[],
519
- unsigned int *seq_size)
671
+static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p)
672
+{
673
+ switch (ctx_p->flow_mode) {
674
+ case S_DIN_to_AES:
675
+ return DIN_AES_DOUT;
676
+ case S_DIN_to_DES:
677
+ return DIN_DES_DOUT;
678
+ case S_DIN_to_SM4:
679
+ return DIN_SM4_DOUT;
680
+ default:
681
+ return ctx_p->flow_mode;
682
+ }
683
+}
684
+
685
+static void cc_setup_key_desc(struct crypto_tfm *tfm,
686
+ struct cipher_req_ctx *req_ctx,
687
+ unsigned int nbytes, struct cc_hw_desc desc[],
688
+ unsigned int *seq_size)
520689 {
521690 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
522691 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
523
- unsigned int flow_mode = ctx_p->flow_mode;
692
+ int cipher_mode = ctx_p->cipher_mode;
693
+ int flow_mode = ctx_p->flow_mode;
694
+ int direction = req_ctx->gen_ctx.op_type;
695
+ dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
696
+ unsigned int key_len = ctx_p->keylen;
697
+ unsigned int din_size;
524698
525
- switch (ctx_p->flow_mode) {
526
- case S_DIN_to_AES:
527
- flow_mode = DIN_AES_DOUT;
699
+ switch (cipher_mode) {
700
+ case DRV_CIPHER_CBC:
701
+ case DRV_CIPHER_CBC_CTS:
702
+ case DRV_CIPHER_CTR:
703
+ case DRV_CIPHER_OFB:
704
+ case DRV_CIPHER_ECB:
705
+ /* Load key */
706
+ hw_desc_init(&desc[*seq_size]);
707
+ set_cipher_mode(&desc[*seq_size], cipher_mode);
708
+ set_cipher_config0(&desc[*seq_size], direction);
709
+
710
+ if (cc_key_type(tfm) == CC_POLICY_PROTECTED_KEY) {
711
+ /* We use the AES key size coding for all CPP algs */
712
+ set_key_size_aes(&desc[*seq_size], key_len);
713
+ set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot);
714
+ flow_mode = cc_out_flow_mode(ctx_p);
715
+ } else {
716
+ if (flow_mode == S_DIN_to_AES) {
717
+ if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
718
+ set_hw_crypto_key(&desc[*seq_size],
719
+ ctx_p->hw.key1_slot);
720
+ } else {
721
+ /* CC_POLICY_UNPROTECTED_KEY
722
+ * Invalid keys are filtered out in
723
+ * sethkey()
724
+ */
725
+ din_size = (key_len == 24) ?
726
+ AES_MAX_KEY_SIZE : key_len;
727
+
728
+ set_din_type(&desc[*seq_size], DMA_DLLI,
729
+ key_dma_addr, din_size,
730
+ NS_BIT);
731
+ }
732
+ set_key_size_aes(&desc[*seq_size], key_len);
733
+ } else {
734
+ /*des*/
735
+ set_din_type(&desc[*seq_size], DMA_DLLI,
736
+ key_dma_addr, key_len, NS_BIT);
737
+ set_key_size_des(&desc[*seq_size], key_len);
738
+ }
739
+ set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
740
+ }
741
+ set_flow_mode(&desc[*seq_size], flow_mode);
742
+ (*seq_size)++;
528743 break;
529
- case S_DIN_to_DES:
530
- flow_mode = DIN_DES_DOUT;
744
+ case DRV_CIPHER_XTS:
745
+ case DRV_CIPHER_ESSIV:
746
+ /* Load AES key */
747
+ hw_desc_init(&desc[*seq_size]);
748
+ set_cipher_mode(&desc[*seq_size], cipher_mode);
749
+ set_cipher_config0(&desc[*seq_size], direction);
750
+ if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
751
+ set_hw_crypto_key(&desc[*seq_size],
752
+ ctx_p->hw.key1_slot);
753
+ } else {
754
+ set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
755
+ (key_len / 2), NS_BIT);
756
+ }
757
+ set_key_size_aes(&desc[*seq_size], (key_len / 2));
758
+ set_flow_mode(&desc[*seq_size], flow_mode);
759
+ set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
760
+ (*seq_size)++;
531761 break;
532762 default:
533
- dev_err(dev, "invalid flow mode, flow_mode = %d\n", flow_mode);
534
- return;
763
+ dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
535764 }
765
+}
766
+
767
+static void cc_setup_mlli_desc(struct crypto_tfm *tfm,
768
+ struct cipher_req_ctx *req_ctx,
769
+ struct scatterlist *dst, struct scatterlist *src,
770
+ unsigned int nbytes, void *areq,
771
+ struct cc_hw_desc desc[], unsigned int *seq_size)
772
+{
773
+ struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
774
+ struct device *dev = drvdata_to_dev(ctx_p->drvdata);
775
+
776
+ if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) {
777
+ /* bypass */
778
+ dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n",
779
+ &req_ctx->mlli_params.mlli_dma_addr,
780
+ req_ctx->mlli_params.mlli_len,
781
+ ctx_p->drvdata->mlli_sram_addr);
782
+ hw_desc_init(&desc[*seq_size]);
783
+ set_din_type(&desc[*seq_size], DMA_DLLI,
784
+ req_ctx->mlli_params.mlli_dma_addr,
785
+ req_ctx->mlli_params.mlli_len, NS_BIT);
786
+ set_dout_sram(&desc[*seq_size],
787
+ ctx_p->drvdata->mlli_sram_addr,
788
+ req_ctx->mlli_params.mlli_len);
789
+ set_flow_mode(&desc[*seq_size], BYPASS);
790
+ (*seq_size)++;
791
+ }
792
+}
793
+
794
+static void cc_setup_flow_desc(struct crypto_tfm *tfm,
795
+ struct cipher_req_ctx *req_ctx,
796
+ struct scatterlist *dst, struct scatterlist *src,
797
+ unsigned int nbytes, struct cc_hw_desc desc[],
798
+ unsigned int *seq_size)
799
+{
800
+ struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
801
+ struct device *dev = drvdata_to_dev(ctx_p->drvdata);
802
+ unsigned int flow_mode = cc_out_flow_mode(ctx_p);
803
+ bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY ||
804
+ ctx_p->cipher_mode == DRV_CIPHER_ECB);
805
+
536806 /* Process */
537807 if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) {
538808 dev_dbg(dev, " data params addr %pad length 0x%X\n",
....@@ -543,89 +813,42 @@
543813 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
544814 nbytes, NS_BIT);
545815 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
546
- nbytes, NS_BIT, (!areq ? 0 : 1));
547
- if (areq)
816
+ nbytes, NS_BIT, (!last_desc ? 0 : 1));
817
+ if (last_desc)
548818 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
549819
550820 set_flow_mode(&desc[*seq_size], flow_mode);
551821 (*seq_size)++;
552822 } else {
553
- /* bypass */
554
- dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n",
555
- &req_ctx->mlli_params.mlli_dma_addr,
556
- req_ctx->mlli_params.mlli_len,
557
- (unsigned int)ctx_p->drvdata->mlli_sram_addr);
558
- hw_desc_init(&desc[*seq_size]);
559
- set_din_type(&desc[*seq_size], DMA_DLLI,
560
- req_ctx->mlli_params.mlli_dma_addr,
561
- req_ctx->mlli_params.mlli_len, NS_BIT);
562
- set_dout_sram(&desc[*seq_size],
563
- ctx_p->drvdata->mlli_sram_addr,
564
- req_ctx->mlli_params.mlli_len);
565
- set_flow_mode(&desc[*seq_size], BYPASS);
566
- (*seq_size)++;
567
-
568823 hw_desc_init(&desc[*seq_size]);
569824 set_din_type(&desc[*seq_size], DMA_MLLI,
570825 ctx_p->drvdata->mlli_sram_addr,
571826 req_ctx->in_mlli_nents, NS_BIT);
572827 if (req_ctx->out_nents == 0) {
573828 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
574
- (unsigned int)ctx_p->drvdata->mlli_sram_addr,
575
- (unsigned int)ctx_p->drvdata->mlli_sram_addr);
829
+ ctx_p->drvdata->mlli_sram_addr,
830
+ ctx_p->drvdata->mlli_sram_addr);
576831 set_dout_mlli(&desc[*seq_size],
577832 ctx_p->drvdata->mlli_sram_addr,
578833 req_ctx->in_mlli_nents, NS_BIT,
579
- (!areq ? 0 : 1));
834
+ (!last_desc ? 0 : 1));
580835 } else {
581836 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
582
- (unsigned int)ctx_p->drvdata->mlli_sram_addr,
583
- (unsigned int)ctx_p->drvdata->mlli_sram_addr +
837
+ ctx_p->drvdata->mlli_sram_addr,
838
+ ctx_p->drvdata->mlli_sram_addr +
584839 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents);
585840 set_dout_mlli(&desc[*seq_size],
586841 (ctx_p->drvdata->mlli_sram_addr +
587842 (LLI_ENTRY_BYTE_SIZE *
588843 req_ctx->in_mlli_nents)),
589844 req_ctx->out_mlli_nents, NS_BIT,
590
- (!areq ? 0 : 1));
845
+ (!last_desc ? 0 : 1));
591846 }
592
- if (areq)
847
+ if (last_desc)
593848 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
594849
595850 set_flow_mode(&desc[*seq_size], flow_mode);
596851 (*seq_size)++;
597
- }
598
-}
599
-
600
-/*
601
- * Update a CTR-AES 128 bit counter
602
- */
603
-static void cc_update_ctr(u8 *ctr, unsigned int increment)
604
-{
605
- if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
606
- IS_ALIGNED((unsigned long)ctr, 8)) {
607
-
608
- __be64 *high_be = (__be64 *)ctr;
609
- __be64 *low_be = high_be + 1;
610
- u64 orig_low = __be64_to_cpu(*low_be);
611
- u64 new_low = orig_low + (u64)increment;
612
-
613
- *low_be = __cpu_to_be64(new_low);
614
-
615
- if (new_low < orig_low)
616
- *high_be = __cpu_to_be64(__be64_to_cpu(*high_be) + 1);
617
- } else {
618
- u8 *pos = (ctr + AES_BLOCK_SIZE);
619
- u8 val;
620
- unsigned int size;
621
-
622
- for (; increment; increment--)
623
- for (size = AES_BLOCK_SIZE; size; size--) {
624
- val = *--pos + 1;
625
- *pos = val;
626
- if (val)
627
- break;
628
- }
629852 }
630853 }
631854
....@@ -636,43 +859,14 @@
636859 struct scatterlist *src = req->src;
637860 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
638861 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
639
- struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
640
- struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
641862 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
642
- unsigned int len;
643863
644
- cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
645
-
646
- switch (ctx_p->cipher_mode) {
647
- case DRV_CIPHER_CBC:
648
- /*
649
- * The crypto API expects us to set the req->iv to the last
650
- * ciphertext block. For encrypt, simply copy from the result.
651
- * For decrypt, we must copy from a saved buffer since this
652
- * could be an in-place decryption operation and the src is
653
- * lost by this point.
654
- */
655
- if (req_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT) {
656
- memcpy(req->iv, req_ctx->backup_info, ivsize);
657
- kzfree(req_ctx->backup_info);
658
- } else if (!err) {
659
- len = req->cryptlen - ivsize;
660
- scatterwalk_map_and_copy(req->iv, req->dst, len,
661
- ivsize, 0);
662
- }
663
- break;
664
-
665
- case DRV_CIPHER_CTR:
666
- /* Compute the counter of the last block */
667
- len = ALIGN(req->cryptlen, AES_BLOCK_SIZE) / AES_BLOCK_SIZE;
668
- cc_update_ctr((u8 *)req->iv, len);
669
- break;
670
-
671
- default:
672
- break;
864
+ if (err != -EINPROGRESS) {
865
+ /* Not a BACKLOG notification */
866
+ cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
867
+ memcpy(req->iv, req_ctx->iv, ivsize);
868
+ kfree_sensitive(req_ctx->iv);
673869 }
674
-
675
- kzfree(req_ctx->iv);
676870
677871 skcipher_request_complete(req, err);
678872 }
....@@ -690,7 +884,7 @@
690884 void *iv = req->iv;
691885 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
692886 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
693
- struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN];
887
+ struct cc_hw_desc desc[MAX_SKCIPHER_SEQ_LEN];
694888 struct cc_crypto_req cc_req = {};
695889 int rc;
696890 unsigned int seq_len = 0;
....@@ -702,10 +896,8 @@
702896
703897 /* STAT_PHASE_0: Init and sanity checks */
704898
705
- /* TODO: check data length according to mode */
706899 if (validate_data_size(ctx_p, nbytes)) {
707
- dev_err(dev, "Unsupported data size %d.\n", nbytes);
708
- crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
900
+ dev_dbg(dev, "Unsupported data size %d.\n", nbytes);
709901 rc = -EINVAL;
710902 goto exit_process;
711903 }
....@@ -713,6 +905,17 @@
713905 /* No data to process is valid */
714906 rc = 0;
715907 goto exit_process;
908
+ }
909
+
910
+ if (ctx_p->fallback_on) {
911
+ struct skcipher_request *subreq = skcipher_request_ctx(req);
912
+
913
+ *subreq = *req;
914
+ skcipher_request_set_tfm(subreq, ctx_p->fallback_tfm);
915
+ if (direction == DRV_CRYPTO_DIRECTION_ENCRYPT)
916
+ return crypto_skcipher_encrypt(subreq);
917
+ else
918
+ return crypto_skcipher_decrypt(subreq);
716919 }
717920
718921 /* The IV we are handed may be allocted from the stack so
....@@ -725,8 +928,15 @@
725928 }
726929
727930 /* Setup request structure */
728
- cc_req.user_cb = (void *)cc_cipher_complete;
729
- cc_req.user_arg = (void *)req;
931
+ cc_req.user_cb = cc_cipher_complete;
932
+ cc_req.user_arg = req;
933
+
934
+ /* Setup CPP operation details */
935
+ if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) {
936
+ cc_req.cpp.is_cpp = true;
937
+ cc_req.cpp.alg = ctx_p->cpp.alg;
938
+ cc_req.cpp.slot = ctx_p->cpp.slot;
939
+ }
730940
731941 /* Setup request context */
732942 req_ctx->gen_ctx.op_type = direction;
....@@ -742,11 +952,18 @@
742952
743953 /* STAT_PHASE_2: Create sequence */
744954
745
- /* Setup processing */
746
- cc_setup_cipher_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
955
+ /* Setup state (IV) */
956
+ cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
957
+ /* Setup MLLI line, if needed */
958
+ cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len);
959
+ /* Setup key */
960
+ cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len);
961
+ /* Setup state (IV and XEX key) */
962
+ cc_setup_xex_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
747963 /* Data processing */
748
- cc_setup_cipher_data(tfm, req_ctx, dst, src, nbytes, req, desc,
749
- &seq_len);
964
+ cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len);
965
+ /* Read next IV */
966
+ cc_setup_readiv_desc(tfm, req_ctx, ivsize, desc, &seq_len);
750967
751968 /* STAT_PHASE_3: Lock HW and push sequence */
752969
....@@ -761,8 +978,7 @@
761978
762979 exit_process:
763980 if (rc != -EINPROGRESS && rc != -EBUSY) {
764
- kzfree(req_ctx->backup_info);
765
- kzfree(req_ctx->iv);
981
+ kfree_sensitive(req_ctx->iv);
766982 }
767983
768984 return rc;
....@@ -779,30 +995,9 @@
779995
780996 static int cc_cipher_decrypt(struct skcipher_request *req)
781997 {
782
- struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
783
- struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
784
- struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
785998 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
786
- unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
787
- gfp_t flags = cc_gfp_flags(&req->base);
788
- unsigned int len;
789999
7901000 memset(req_ctx, 0, sizeof(*req_ctx));
791
-
792
- if ((ctx_p->cipher_mode == DRV_CIPHER_CBC) &&
793
- (req->cryptlen >= ivsize)) {
794
-
795
- /* Allocate and save the last IV sized bytes of the source,
796
- * which will be lost in case of in-place decryption.
797
- */
798
- req_ctx->backup_info = kzalloc(ivsize, flags);
799
- if (!req_ctx->backup_info)
800
- return -ENOMEM;
801
-
802
- len = req->cryptlen - ivsize;
803
- scatterwalk_map_and_copy(req_ctx->backup_info, req->src, len,
804
- ivsize, 0);
805
- }
8061001
8071002 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT);
8081003 }
....@@ -812,7 +1007,7 @@
8121007 {
8131008 .name = "xts(paes)",
8141009 .driver_name = "xts-paes-ccree",
815
- .blocksize = AES_BLOCK_SIZE,
1010
+ .blocksize = 1,
8161011 .template_skcipher = {
8171012 .setkey = cc_cipher_sethkey,
8181013 .encrypt = cc_cipher_encrypt,
....@@ -824,43 +1019,11 @@
8241019 .cipher_mode = DRV_CIPHER_XTS,
8251020 .flow_mode = S_DIN_to_AES,
8261021 .min_hw_rev = CC_HW_REV_630,
1022
+ .std_body = CC_STD_NIST,
1023
+ .sec_func = true,
8271024 },
8281025 {
829
- .name = "xts512(paes)",
830
- .driver_name = "xts-paes-du512-ccree",
831
- .blocksize = AES_BLOCK_SIZE,
832
- .template_skcipher = {
833
- .setkey = cc_cipher_sethkey,
834
- .encrypt = cc_cipher_encrypt,
835
- .decrypt = cc_cipher_decrypt,
836
- .min_keysize = CC_HW_KEY_SIZE,
837
- .max_keysize = CC_HW_KEY_SIZE,
838
- .ivsize = AES_BLOCK_SIZE,
839
- },
840
- .cipher_mode = DRV_CIPHER_XTS,
841
- .flow_mode = S_DIN_to_AES,
842
- .data_unit = 512,
843
- .min_hw_rev = CC_HW_REV_712,
844
- },
845
- {
846
- .name = "xts4096(paes)",
847
- .driver_name = "xts-paes-du4096-ccree",
848
- .blocksize = AES_BLOCK_SIZE,
849
- .template_skcipher = {
850
- .setkey = cc_cipher_sethkey,
851
- .encrypt = cc_cipher_encrypt,
852
- .decrypt = cc_cipher_decrypt,
853
- .min_keysize = CC_HW_KEY_SIZE,
854
- .max_keysize = CC_HW_KEY_SIZE,
855
- .ivsize = AES_BLOCK_SIZE,
856
- },
857
- .cipher_mode = DRV_CIPHER_XTS,
858
- .flow_mode = S_DIN_to_AES,
859
- .data_unit = 4096,
860
- .min_hw_rev = CC_HW_REV_712,
861
- },
862
- {
863
- .name = "essiv(paes)",
1026
+ .name = "essiv(cbc(paes),sha256)",
8641027 .driver_name = "essiv-paes-ccree",
8651028 .blocksize = AES_BLOCK_SIZE,
8661029 .template_skcipher = {
....@@ -874,90 +1037,8 @@
8741037 .cipher_mode = DRV_CIPHER_ESSIV,
8751038 .flow_mode = S_DIN_to_AES,
8761039 .min_hw_rev = CC_HW_REV_712,
877
- },
878
- {
879
- .name = "essiv512(paes)",
880
- .driver_name = "essiv-paes-du512-ccree",
881
- .blocksize = AES_BLOCK_SIZE,
882
- .template_skcipher = {
883
- .setkey = cc_cipher_sethkey,
884
- .encrypt = cc_cipher_encrypt,
885
- .decrypt = cc_cipher_decrypt,
886
- .min_keysize = CC_HW_KEY_SIZE,
887
- .max_keysize = CC_HW_KEY_SIZE,
888
- .ivsize = AES_BLOCK_SIZE,
889
- },
890
- .cipher_mode = DRV_CIPHER_ESSIV,
891
- .flow_mode = S_DIN_to_AES,
892
- .data_unit = 512,
893
- .min_hw_rev = CC_HW_REV_712,
894
- },
895
- {
896
- .name = "essiv4096(paes)",
897
- .driver_name = "essiv-paes-du4096-ccree",
898
- .blocksize = AES_BLOCK_SIZE,
899
- .template_skcipher = {
900
- .setkey = cc_cipher_sethkey,
901
- .encrypt = cc_cipher_encrypt,
902
- .decrypt = cc_cipher_decrypt,
903
- .min_keysize = CC_HW_KEY_SIZE,
904
- .max_keysize = CC_HW_KEY_SIZE,
905
- .ivsize = AES_BLOCK_SIZE,
906
- },
907
- .cipher_mode = DRV_CIPHER_ESSIV,
908
- .flow_mode = S_DIN_to_AES,
909
- .data_unit = 4096,
910
- .min_hw_rev = CC_HW_REV_712,
911
- },
912
- {
913
- .name = "bitlocker(paes)",
914
- .driver_name = "bitlocker-paes-ccree",
915
- .blocksize = AES_BLOCK_SIZE,
916
- .template_skcipher = {
917
- .setkey = cc_cipher_sethkey,
918
- .encrypt = cc_cipher_encrypt,
919
- .decrypt = cc_cipher_decrypt,
920
- .min_keysize = CC_HW_KEY_SIZE,
921
- .max_keysize = CC_HW_KEY_SIZE,
922
- .ivsize = AES_BLOCK_SIZE,
923
- },
924
- .cipher_mode = DRV_CIPHER_BITLOCKER,
925
- .flow_mode = S_DIN_to_AES,
926
- .min_hw_rev = CC_HW_REV_712,
927
- },
928
- {
929
- .name = "bitlocker512(paes)",
930
- .driver_name = "bitlocker-paes-du512-ccree",
931
- .blocksize = AES_BLOCK_SIZE,
932
- .template_skcipher = {
933
- .setkey = cc_cipher_sethkey,
934
- .encrypt = cc_cipher_encrypt,
935
- .decrypt = cc_cipher_decrypt,
936
- .min_keysize = CC_HW_KEY_SIZE,
937
- .max_keysize = CC_HW_KEY_SIZE,
938
- .ivsize = AES_BLOCK_SIZE,
939
- },
940
- .cipher_mode = DRV_CIPHER_BITLOCKER,
941
- .flow_mode = S_DIN_to_AES,
942
- .data_unit = 512,
943
- .min_hw_rev = CC_HW_REV_712,
944
- },
945
- {
946
- .name = "bitlocker4096(paes)",
947
- .driver_name = "bitlocker-paes-du4096-ccree",
948
- .blocksize = AES_BLOCK_SIZE,
949
- .template_skcipher = {
950
- .setkey = cc_cipher_sethkey,
951
- .encrypt = cc_cipher_encrypt,
952
- .decrypt = cc_cipher_decrypt,
953
- .min_keysize = CC_HW_KEY_SIZE,
954
- .max_keysize = CC_HW_KEY_SIZE,
955
- .ivsize = AES_BLOCK_SIZE,
956
- },
957
- .cipher_mode = DRV_CIPHER_BITLOCKER,
958
- .flow_mode = S_DIN_to_AES,
959
- .data_unit = 4096,
960
- .min_hw_rev = CC_HW_REV_712,
1040
+ .std_body = CC_STD_NIST,
1041
+ .sec_func = true,
9611042 },
9621043 {
9631044 .name = "ecb(paes)",
....@@ -974,6 +1055,8 @@
9741055 .cipher_mode = DRV_CIPHER_ECB,
9751056 .flow_mode = S_DIN_to_AES,
9761057 .min_hw_rev = CC_HW_REV_712,
1058
+ .std_body = CC_STD_NIST,
1059
+ .sec_func = true,
9771060 },
9781061 {
9791062 .name = "cbc(paes)",
....@@ -990,6 +1073,8 @@
9901073 .cipher_mode = DRV_CIPHER_CBC,
9911074 .flow_mode = S_DIN_to_AES,
9921075 .min_hw_rev = CC_HW_REV_712,
1076
+ .std_body = CC_STD_NIST,
1077
+ .sec_func = true,
9931078 },
9941079 {
9951080 .name = "ofb(paes)",
....@@ -1006,6 +1091,8 @@
10061091 .cipher_mode = DRV_CIPHER_OFB,
10071092 .flow_mode = S_DIN_to_AES,
10081093 .min_hw_rev = CC_HW_REV_712,
1094
+ .std_body = CC_STD_NIST,
1095
+ .sec_func = true,
10091096 },
10101097 {
10111098 .name = "cts(cbc(paes))",
....@@ -1022,6 +1109,8 @@
10221109 .cipher_mode = DRV_CIPHER_CBC_CTS,
10231110 .flow_mode = S_DIN_to_AES,
10241111 .min_hw_rev = CC_HW_REV_712,
1112
+ .std_body = CC_STD_NIST,
1113
+ .sec_func = true,
10251114 },
10261115 {
10271116 .name = "ctr(paes)",
....@@ -1038,11 +1127,17 @@
10381127 .cipher_mode = DRV_CIPHER_CTR,
10391128 .flow_mode = S_DIN_to_AES,
10401129 .min_hw_rev = CC_HW_REV_712,
1130
+ .std_body = CC_STD_NIST,
1131
+ .sec_func = true,
10411132 },
10421133 {
1134
+ /* See https://www.mail-archive.com/linux-crypto@vger.kernel.org/msg40576.html
1135
+ * for the reason why this differs from the generic
1136
+ * implementation.
1137
+ */
10431138 .name = "xts(aes)",
10441139 .driver_name = "xts-aes-ccree",
1045
- .blocksize = AES_BLOCK_SIZE,
1140
+ .blocksize = 1,
10461141 .template_skcipher = {
10471142 .setkey = cc_cipher_setkey,
10481143 .encrypt = cc_cipher_encrypt,
....@@ -1054,140 +1149,24 @@
10541149 .cipher_mode = DRV_CIPHER_XTS,
10551150 .flow_mode = S_DIN_to_AES,
10561151 .min_hw_rev = CC_HW_REV_630,
1152
+ .std_body = CC_STD_NIST,
10571153 },
10581154 {
1059
- .name = "xts512(aes)",
1060
- .driver_name = "xts-aes-du512-ccree",
1061
- .blocksize = AES_BLOCK_SIZE,
1062
- .template_skcipher = {
1063
- .setkey = cc_cipher_setkey,
1064
- .encrypt = cc_cipher_encrypt,
1065
- .decrypt = cc_cipher_decrypt,
1066
- .min_keysize = AES_MIN_KEY_SIZE * 2,
1067
- .max_keysize = AES_MAX_KEY_SIZE * 2,
1068
- .ivsize = AES_BLOCK_SIZE,
1069
- },
1070
- .cipher_mode = DRV_CIPHER_XTS,
1071
- .flow_mode = S_DIN_to_AES,
1072
- .data_unit = 512,
1073
- .min_hw_rev = CC_HW_REV_712,
1074
- },
1075
- {
1076
- .name = "xts4096(aes)",
1077
- .driver_name = "xts-aes-du4096-ccree",
1078
- .blocksize = AES_BLOCK_SIZE,
1079
- .template_skcipher = {
1080
- .setkey = cc_cipher_setkey,
1081
- .encrypt = cc_cipher_encrypt,
1082
- .decrypt = cc_cipher_decrypt,
1083
- .min_keysize = AES_MIN_KEY_SIZE * 2,
1084
- .max_keysize = AES_MAX_KEY_SIZE * 2,
1085
- .ivsize = AES_BLOCK_SIZE,
1086
- },
1087
- .cipher_mode = DRV_CIPHER_XTS,
1088
- .flow_mode = S_DIN_to_AES,
1089
- .data_unit = 4096,
1090
- .min_hw_rev = CC_HW_REV_712,
1091
- },
1092
- {
1093
- .name = "essiv(aes)",
1155
+ .name = "essiv(cbc(aes),sha256)",
10941156 .driver_name = "essiv-aes-ccree",
10951157 .blocksize = AES_BLOCK_SIZE,
10961158 .template_skcipher = {
10971159 .setkey = cc_cipher_setkey,
10981160 .encrypt = cc_cipher_encrypt,
10991161 .decrypt = cc_cipher_decrypt,
1100
- .min_keysize = AES_MIN_KEY_SIZE * 2,
1101
- .max_keysize = AES_MAX_KEY_SIZE * 2,
1162
+ .min_keysize = AES_MIN_KEY_SIZE,
1163
+ .max_keysize = AES_MAX_KEY_SIZE,
11021164 .ivsize = AES_BLOCK_SIZE,
11031165 },
11041166 .cipher_mode = DRV_CIPHER_ESSIV,
11051167 .flow_mode = S_DIN_to_AES,
11061168 .min_hw_rev = CC_HW_REV_712,
1107
- },
1108
- {
1109
- .name = "essiv512(aes)",
1110
- .driver_name = "essiv-aes-du512-ccree",
1111
- .blocksize = AES_BLOCK_SIZE,
1112
- .template_skcipher = {
1113
- .setkey = cc_cipher_setkey,
1114
- .encrypt = cc_cipher_encrypt,
1115
- .decrypt = cc_cipher_decrypt,
1116
- .min_keysize = AES_MIN_KEY_SIZE * 2,
1117
- .max_keysize = AES_MAX_KEY_SIZE * 2,
1118
- .ivsize = AES_BLOCK_SIZE,
1119
- },
1120
- .cipher_mode = DRV_CIPHER_ESSIV,
1121
- .flow_mode = S_DIN_to_AES,
1122
- .data_unit = 512,
1123
- .min_hw_rev = CC_HW_REV_712,
1124
- },
1125
- {
1126
- .name = "essiv4096(aes)",
1127
- .driver_name = "essiv-aes-du4096-ccree",
1128
- .blocksize = AES_BLOCK_SIZE,
1129
- .template_skcipher = {
1130
- .setkey = cc_cipher_setkey,
1131
- .encrypt = cc_cipher_encrypt,
1132
- .decrypt = cc_cipher_decrypt,
1133
- .min_keysize = AES_MIN_KEY_SIZE * 2,
1134
- .max_keysize = AES_MAX_KEY_SIZE * 2,
1135
- .ivsize = AES_BLOCK_SIZE,
1136
- },
1137
- .cipher_mode = DRV_CIPHER_ESSIV,
1138
- .flow_mode = S_DIN_to_AES,
1139
- .data_unit = 4096,
1140
- .min_hw_rev = CC_HW_REV_712,
1141
- },
1142
- {
1143
- .name = "bitlocker(aes)",
1144
- .driver_name = "bitlocker-aes-ccree",
1145
- .blocksize = AES_BLOCK_SIZE,
1146
- .template_skcipher = {
1147
- .setkey = cc_cipher_setkey,
1148
- .encrypt = cc_cipher_encrypt,
1149
- .decrypt = cc_cipher_decrypt,
1150
- .min_keysize = AES_MIN_KEY_SIZE * 2,
1151
- .max_keysize = AES_MAX_KEY_SIZE * 2,
1152
- .ivsize = AES_BLOCK_SIZE,
1153
- },
1154
- .cipher_mode = DRV_CIPHER_BITLOCKER,
1155
- .flow_mode = S_DIN_to_AES,
1156
- .min_hw_rev = CC_HW_REV_712,
1157
- },
1158
- {
1159
- .name = "bitlocker512(aes)",
1160
- .driver_name = "bitlocker-aes-du512-ccree",
1161
- .blocksize = AES_BLOCK_SIZE,
1162
- .template_skcipher = {
1163
- .setkey = cc_cipher_setkey,
1164
- .encrypt = cc_cipher_encrypt,
1165
- .decrypt = cc_cipher_decrypt,
1166
- .min_keysize = AES_MIN_KEY_SIZE * 2,
1167
- .max_keysize = AES_MAX_KEY_SIZE * 2,
1168
- .ivsize = AES_BLOCK_SIZE,
1169
- },
1170
- .cipher_mode = DRV_CIPHER_BITLOCKER,
1171
- .flow_mode = S_DIN_to_AES,
1172
- .data_unit = 512,
1173
- .min_hw_rev = CC_HW_REV_712,
1174
- },
1175
- {
1176
- .name = "bitlocker4096(aes)",
1177
- .driver_name = "bitlocker-aes-du4096-ccree",
1178
- .blocksize = AES_BLOCK_SIZE,
1179
- .template_skcipher = {
1180
- .setkey = cc_cipher_setkey,
1181
- .encrypt = cc_cipher_encrypt,
1182
- .decrypt = cc_cipher_decrypt,
1183
- .min_keysize = AES_MIN_KEY_SIZE * 2,
1184
- .max_keysize = AES_MAX_KEY_SIZE * 2,
1185
- .ivsize = AES_BLOCK_SIZE,
1186
- },
1187
- .cipher_mode = DRV_CIPHER_BITLOCKER,
1188
- .flow_mode = S_DIN_to_AES,
1189
- .data_unit = 4096,
1190
- .min_hw_rev = CC_HW_REV_712,
1169
+ .std_body = CC_STD_NIST,
11911170 },
11921171 {
11931172 .name = "ecb(aes)",
....@@ -1204,6 +1183,7 @@
12041183 .cipher_mode = DRV_CIPHER_ECB,
12051184 .flow_mode = S_DIN_to_AES,
12061185 .min_hw_rev = CC_HW_REV_630,
1186
+ .std_body = CC_STD_NIST,
12071187 },
12081188 {
12091189 .name = "cbc(aes)",
....@@ -1220,11 +1200,12 @@
12201200 .cipher_mode = DRV_CIPHER_CBC,
12211201 .flow_mode = S_DIN_to_AES,
12221202 .min_hw_rev = CC_HW_REV_630,
1203
+ .std_body = CC_STD_NIST,
12231204 },
12241205 {
12251206 .name = "ofb(aes)",
12261207 .driver_name = "ofb-aes-ccree",
1227
- .blocksize = AES_BLOCK_SIZE,
1208
+ .blocksize = 1,
12281209 .template_skcipher = {
12291210 .setkey = cc_cipher_setkey,
12301211 .encrypt = cc_cipher_encrypt,
....@@ -1236,6 +1217,7 @@
12361217 .cipher_mode = DRV_CIPHER_OFB,
12371218 .flow_mode = S_DIN_to_AES,
12381219 .min_hw_rev = CC_HW_REV_630,
1220
+ .std_body = CC_STD_NIST,
12391221 },
12401222 {
12411223 .name = "cts(cbc(aes))",
....@@ -1252,6 +1234,7 @@
12521234 .cipher_mode = DRV_CIPHER_CBC_CTS,
12531235 .flow_mode = S_DIN_to_AES,
12541236 .min_hw_rev = CC_HW_REV_630,
1237
+ .std_body = CC_STD_NIST,
12551238 },
12561239 {
12571240 .name = "ctr(aes)",
....@@ -1268,6 +1251,7 @@
12681251 .cipher_mode = DRV_CIPHER_CTR,
12691252 .flow_mode = S_DIN_to_AES,
12701253 .min_hw_rev = CC_HW_REV_630,
1254
+ .std_body = CC_STD_NIST,
12711255 },
12721256 {
12731257 .name = "cbc(des3_ede)",
....@@ -1284,6 +1268,7 @@
12841268 .cipher_mode = DRV_CIPHER_CBC,
12851269 .flow_mode = S_DIN_to_DES,
12861270 .min_hw_rev = CC_HW_REV_630,
1271
+ .std_body = CC_STD_NIST,
12871272 },
12881273 {
12891274 .name = "ecb(des3_ede)",
....@@ -1300,6 +1285,7 @@
13001285 .cipher_mode = DRV_CIPHER_ECB,
13011286 .flow_mode = S_DIN_to_DES,
13021287 .min_hw_rev = CC_HW_REV_630,
1288
+ .std_body = CC_STD_NIST,
13031289 },
13041290 {
13051291 .name = "cbc(des)",
....@@ -1316,6 +1302,7 @@
13161302 .cipher_mode = DRV_CIPHER_CBC,
13171303 .flow_mode = S_DIN_to_DES,
13181304 .min_hw_rev = CC_HW_REV_630,
1305
+ .std_body = CC_STD_NIST,
13191306 },
13201307 {
13211308 .name = "ecb(des)",
....@@ -1332,6 +1319,94 @@
13321319 .cipher_mode = DRV_CIPHER_ECB,
13331320 .flow_mode = S_DIN_to_DES,
13341321 .min_hw_rev = CC_HW_REV_630,
1322
+ .std_body = CC_STD_NIST,
1323
+ },
1324
+ {
1325
+ .name = "cbc(sm4)",
1326
+ .driver_name = "cbc-sm4-ccree",
1327
+ .blocksize = SM4_BLOCK_SIZE,
1328
+ .template_skcipher = {
1329
+ .setkey = cc_cipher_setkey,
1330
+ .encrypt = cc_cipher_encrypt,
1331
+ .decrypt = cc_cipher_decrypt,
1332
+ .min_keysize = SM4_KEY_SIZE,
1333
+ .max_keysize = SM4_KEY_SIZE,
1334
+ .ivsize = SM4_BLOCK_SIZE,
1335
+ },
1336
+ .cipher_mode = DRV_CIPHER_CBC,
1337
+ .flow_mode = S_DIN_to_SM4,
1338
+ .min_hw_rev = CC_HW_REV_713,
1339
+ .std_body = CC_STD_OSCCA,
1340
+ },
1341
+ {
1342
+ .name = "ecb(sm4)",
1343
+ .driver_name = "ecb-sm4-ccree",
1344
+ .blocksize = SM4_BLOCK_SIZE,
1345
+ .template_skcipher = {
1346
+ .setkey = cc_cipher_setkey,
1347
+ .encrypt = cc_cipher_encrypt,
1348
+ .decrypt = cc_cipher_decrypt,
1349
+ .min_keysize = SM4_KEY_SIZE,
1350
+ .max_keysize = SM4_KEY_SIZE,
1351
+ .ivsize = 0,
1352
+ },
1353
+ .cipher_mode = DRV_CIPHER_ECB,
1354
+ .flow_mode = S_DIN_to_SM4,
1355
+ .min_hw_rev = CC_HW_REV_713,
1356
+ .std_body = CC_STD_OSCCA,
1357
+ },
1358
+ {
1359
+ .name = "ctr(sm4)",
1360
+ .driver_name = "ctr-sm4-ccree",
1361
+ .blocksize = 1,
1362
+ .template_skcipher = {
1363
+ .setkey = cc_cipher_setkey,
1364
+ .encrypt = cc_cipher_encrypt,
1365
+ .decrypt = cc_cipher_decrypt,
1366
+ .min_keysize = SM4_KEY_SIZE,
1367
+ .max_keysize = SM4_KEY_SIZE,
1368
+ .ivsize = SM4_BLOCK_SIZE,
1369
+ },
1370
+ .cipher_mode = DRV_CIPHER_CTR,
1371
+ .flow_mode = S_DIN_to_SM4,
1372
+ .min_hw_rev = CC_HW_REV_713,
1373
+ .std_body = CC_STD_OSCCA,
1374
+ },
1375
+ {
1376
+ .name = "cbc(psm4)",
1377
+ .driver_name = "cbc-psm4-ccree",
1378
+ .blocksize = SM4_BLOCK_SIZE,
1379
+ .template_skcipher = {
1380
+ .setkey = cc_cipher_sethkey,
1381
+ .encrypt = cc_cipher_encrypt,
1382
+ .decrypt = cc_cipher_decrypt,
1383
+ .min_keysize = CC_HW_KEY_SIZE,
1384
+ .max_keysize = CC_HW_KEY_SIZE,
1385
+ .ivsize = SM4_BLOCK_SIZE,
1386
+ },
1387
+ .cipher_mode = DRV_CIPHER_CBC,
1388
+ .flow_mode = S_DIN_to_SM4,
1389
+ .min_hw_rev = CC_HW_REV_713,
1390
+ .std_body = CC_STD_OSCCA,
1391
+ .sec_func = true,
1392
+ },
1393
+ {
1394
+ .name = "ctr(psm4)",
1395
+ .driver_name = "ctr-psm4-ccree",
1396
+ .blocksize = SM4_BLOCK_SIZE,
1397
+ .template_skcipher = {
1398
+ .setkey = cc_cipher_sethkey,
1399
+ .encrypt = cc_cipher_encrypt,
1400
+ .decrypt = cc_cipher_decrypt,
1401
+ .min_keysize = CC_HW_KEY_SIZE,
1402
+ .max_keysize = CC_HW_KEY_SIZE,
1403
+ .ivsize = SM4_BLOCK_SIZE,
1404
+ },
1405
+ .cipher_mode = DRV_CIPHER_CTR,
1406
+ .flow_mode = S_DIN_to_SM4,
1407
+ .min_hw_rev = CC_HW_REV_713,
1408
+ .std_body = CC_STD_OSCCA,
1409
+ .sec_func = true,
13351410 },
13361411 };
13371412
....@@ -1341,7 +1416,7 @@
13411416 struct cc_crypto_alg *t_alg;
13421417 struct skcipher_alg *alg;
13431418
1344
- t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
1419
+ t_alg = devm_kzalloc(dev, sizeof(*t_alg), GFP_KERNEL);
13451420 if (!t_alg)
13461421 return ERR_PTR(-ENOMEM);
13471422
....@@ -1364,7 +1439,6 @@
13641439
13651440 t_alg->cipher_mode = tmpl->cipher_mode;
13661441 t_alg->flow_mode = tmpl->flow_mode;
1367
- t_alg->data_unit = tmpl->data_unit;
13681442
13691443 return t_alg;
13701444 }
....@@ -1372,42 +1446,31 @@
13721446 int cc_cipher_free(struct cc_drvdata *drvdata)
13731447 {
13741448 struct cc_crypto_alg *t_alg, *n;
1375
- struct cc_cipher_handle *cipher_handle = drvdata->cipher_handle;
13761449
1377
- if (cipher_handle) {
1378
- /* Remove registered algs */
1379
- list_for_each_entry_safe(t_alg, n, &cipher_handle->alg_list,
1380
- entry) {
1381
- crypto_unregister_skcipher(&t_alg->skcipher_alg);
1382
- list_del(&t_alg->entry);
1383
- kfree(t_alg);
1384
- }
1385
- kfree(cipher_handle);
1386
- drvdata->cipher_handle = NULL;
1450
+ /* Remove registered algs */
1451
+ list_for_each_entry_safe(t_alg, n, &drvdata->alg_list, entry) {
1452
+ crypto_unregister_skcipher(&t_alg->skcipher_alg);
1453
+ list_del(&t_alg->entry);
13871454 }
13881455 return 0;
13891456 }
13901457
13911458 int cc_cipher_alloc(struct cc_drvdata *drvdata)
13921459 {
1393
- struct cc_cipher_handle *cipher_handle;
13941460 struct cc_crypto_alg *t_alg;
13951461 struct device *dev = drvdata_to_dev(drvdata);
13961462 int rc = -ENOMEM;
13971463 int alg;
13981464
1399
- cipher_handle = kmalloc(sizeof(*cipher_handle), GFP_KERNEL);
1400
- if (!cipher_handle)
1401
- return -ENOMEM;
1402
-
1403
- INIT_LIST_HEAD(&cipher_handle->alg_list);
1404
- drvdata->cipher_handle = cipher_handle;
1465
+ INIT_LIST_HEAD(&drvdata->alg_list);
14051466
14061467 /* Linux crypto */
14071468 dev_dbg(dev, "Number of algorithms = %zu\n",
14081469 ARRAY_SIZE(skcipher_algs));
14091470 for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) {
1410
- if (skcipher_algs[alg].min_hw_rev > drvdata->hw_rev)
1471
+ if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) ||
1472
+ !(drvdata->std_bodies & skcipher_algs[alg].std_body) ||
1473
+ (drvdata->sec_disabled && skcipher_algs[alg].sec_func))
14111474 continue;
14121475
14131476 dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name);
....@@ -1428,14 +1491,12 @@
14281491 if (rc) {
14291492 dev_err(dev, "%s alg registration failed\n",
14301493 t_alg->skcipher_alg.base.cra_driver_name);
1431
- kfree(t_alg);
14321494 goto fail0;
1433
- } else {
1434
- list_add_tail(&t_alg->entry,
1435
- &cipher_handle->alg_list);
1436
- dev_dbg(dev, "Registered %s\n",
1437
- t_alg->skcipher_alg.base.cra_driver_name);
14381495 }
1496
+
1497
+ list_add_tail(&t_alg->entry, &drvdata->alg_list);
1498
+ dev_dbg(dev, "Registered %s\n",
1499
+ t_alg->skcipher_alg.base.cra_driver_name);
14391500 }
14401501 return 0;
14411502