hc
2024-05-10 37f49e37ab4cb5d0bc4c60eb5c6d4dd57db767bb
kernel/drivers/crypto/inside-secure/safexcel_hash.c
....@@ -5,9 +5,14 @@
55 * Antoine Tenart <antoine.tenart@free-electrons.com>
66 */
77
8
+#include <crypto/aes.h>
89 #include <crypto/hmac.h>
910 #include <crypto/md5.h>
1011 #include <crypto/sha.h>
12
+#include <crypto/sha3.h>
13
+#include <crypto/skcipher.h>
14
+#include <crypto/sm3.h>
15
+#include <crypto/internal/cipher.h>
1116 #include <linux/device.h>
1217 #include <linux/dma-mapping.h>
1318 #include <linux/dmapool.h>
....@@ -16,12 +21,18 @@
1621
1722 struct safexcel_ahash_ctx {
1823 struct safexcel_context base;
19
- struct safexcel_crypto_priv *priv;
2024
2125 u32 alg;
26
+ u8 key_sz;
27
+ bool cbcmac;
28
+ bool do_fallback;
29
+ bool fb_init_done;
30
+ bool fb_do_setkey;
2231
23
- u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
24
- u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
32
+ struct crypto_cipher *kaes;
33
+ struct crypto_ahash *fback;
34
+ struct crypto_shash *shpre;
35
+ struct shash_desc *shdesc;
2536 };
2637
2738 struct safexcel_ahash_req {
....@@ -29,125 +40,190 @@
2940 bool finish;
3041 bool hmac;
3142 bool needs_inv;
43
+ bool hmac_zlen;
44
+ bool len_is_le;
45
+ bool not_first;
46
+ bool xcbcmac;
3247
3348 int nents;
3449 dma_addr_t result_dma;
3550
3651 u32 digest;
3752
38
- u8 state_sz; /* expected sate size, only set once */
39
- u32 state[SHA512_DIGEST_SIZE / sizeof(u32)] __aligned(sizeof(u32));
53
+ u8 state_sz; /* expected state size, only set once */
54
+ u8 block_sz; /* block size, only set once */
55
+ u8 digest_sz; /* output digest size, only set once */
56
+ __le32 state[SHA3_512_BLOCK_SIZE /
57
+ sizeof(__le32)] __aligned(sizeof(__le32));
4058
41
- u64 len[2];
42
- u64 processed[2];
59
+ u64 len;
60
+ u64 processed;
4361
44
- u8 cache[SHA512_BLOCK_SIZE] __aligned(sizeof(u32));
62
+ u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
4563 dma_addr_t cache_dma;
4664 unsigned int cache_sz;
4765
48
- u8 cache_next[SHA512_BLOCK_SIZE] __aligned(sizeof(u32));
66
+ u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
4967 };
5068
5169 static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
5270 {
53
- u64 len, processed;
54
-
55
- len = (0xffffffff * req->len[1]) + req->len[0];
56
- processed = (0xffffffff * req->processed[1]) + req->processed[0];
57
-
58
- return len - processed;
71
+ return req->len - req->processed;
5972 }
6073
6174 static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
62
- u32 input_length, u32 result_length)
75
+ u32 input_length, u32 result_length,
76
+ bool cbcmac)
6377 {
6478 struct safexcel_token *token =
6579 (struct safexcel_token *)cdesc->control_data.token;
6680
6781 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
6882 token[0].packet_length = input_length;
69
- token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
7083 token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
7184
72
- token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
73
- token[1].packet_length = result_length;
74
- token[1].stat = EIP197_TOKEN_STAT_LAST_HASH |
85
+ input_length &= 15;
86
+ if (unlikely(cbcmac && input_length)) {
87
+ token[0].stat = 0;
88
+ token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
89
+ token[1].packet_length = 16 - input_length;
90
+ token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
91
+ token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
92
+ } else {
93
+ token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
94
+ eip197_noop_token(&token[1]);
95
+ }
96
+
97
+ token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
98
+ token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
7599 EIP197_TOKEN_STAT_LAST_PACKET;
76
- token[1].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
100
+ token[2].packet_length = result_length;
101
+ token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
77102 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
103
+
104
+ eip197_noop_token(&token[3]);
78105 }
79106
80107 static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
81108 struct safexcel_ahash_req *req,
82
- struct safexcel_command_desc *cdesc,
83
- unsigned int digestsize)
109
+ struct safexcel_command_desc *cdesc)
84110 {
85
- struct safexcel_crypto_priv *priv = ctx->priv;
86
- int i;
111
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
112
+ u64 count = 0;
87113
88
- cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_HASH_OUT;
89
- cdesc->control_data.control0 |= ctx->alg;
90
- cdesc->control_data.control0 |= req->digest;
114
+ cdesc->control_data.control0 = ctx->alg;
115
+ cdesc->control_data.control1 = 0;
91116
92
- if (req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) {
93
- if (req->processed[0] || req->processed[1]) {
94
- if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
95
- cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(5);
96
- else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
97
- cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(6);
98
- else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224 ||
99
- ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
100
- cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(9);
101
- else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384 ||
102
- ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
103
- cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(17);
117
+ /*
118
+ * Copy the input digest if needed, and setup the context
119
+ * fields. Do this now as we need it to setup the first command
120
+ * descriptor.
121
+ */
122
+ if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
123
+ if (req->xcbcmac)
124
+ memcpy(ctx->base.ctxr->data, &ctx->base.ipad, ctx->key_sz);
125
+ else
126
+ memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
104127
105
- cdesc->control_data.control1 |= CONTEXT_CONTROL_DIGEST_CNT;
106
- } else {
107
- cdesc->control_data.control0 |= CONTEXT_CONTROL_RESTART_HASH;
108
- }
128
+ if (!req->finish && req->xcbcmac)
129
+ cdesc->control_data.control0 |=
130
+ CONTEXT_CONTROL_DIGEST_XCM |
131
+ CONTEXT_CONTROL_TYPE_HASH_OUT |
132
+ CONTEXT_CONTROL_NO_FINISH_HASH |
133
+ CONTEXT_CONTROL_SIZE(req->state_sz /
134
+ sizeof(u32));
135
+ else
136
+ cdesc->control_data.control0 |=
137
+ CONTEXT_CONTROL_DIGEST_XCM |
138
+ CONTEXT_CONTROL_TYPE_HASH_OUT |
139
+ CONTEXT_CONTROL_SIZE(req->state_sz /
140
+ sizeof(u32));
141
+ return;
142
+ } else if (!req->processed) {
143
+ /* First - and possibly only - block of basic hash only */
144
+ if (req->finish)
145
+ cdesc->control_data.control0 |= req->digest |
146
+ CONTEXT_CONTROL_TYPE_HASH_OUT |
147
+ CONTEXT_CONTROL_RESTART_HASH |
148
+ /* ensure its not 0! */
149
+ CONTEXT_CONTROL_SIZE(1);
150
+ else
151
+ cdesc->control_data.control0 |= req->digest |
152
+ CONTEXT_CONTROL_TYPE_HASH_OUT |
153
+ CONTEXT_CONTROL_RESTART_HASH |
154
+ CONTEXT_CONTROL_NO_FINISH_HASH |
155
+ /* ensure its not 0! */
156
+ CONTEXT_CONTROL_SIZE(1);
157
+ return;
158
+ }
109159
110
- if (!req->finish)
111
- cdesc->control_data.control0 |= CONTEXT_CONTROL_NO_FINISH_HASH;
160
+ /* Hash continuation or HMAC, setup (inner) digest from state */
161
+ memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
112162
113
- /*
114
- * Copy the input digest if needed, and setup the context
115
- * fields. Do this now as we need it to setup the first command
116
- * descriptor.
117
- */
118
- if (req->processed[0] || req->processed[1]) {
119
- for (i = 0; i < digestsize / sizeof(u32); i++)
120
- ctx->base.ctxr->data[i] = cpu_to_le32(req->state[i]);
163
+ if (req->finish) {
164
+ /* Compute digest count for hash/HMAC finish operations */
165
+ if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
166
+ req->hmac_zlen || (req->processed != req->block_sz)) {
167
+ count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
121168
122
- if (req->finish) {
123
- u64 count = req->processed[0] / EIP197_COUNTER_BLOCK_SIZE;
124
- count += ((0xffffffff / EIP197_COUNTER_BLOCK_SIZE) *
125
- req->processed[1]);
126
-
127
- /* This is a haredware limitation, as the
128
- * counter must fit into an u32. This represents
129
- * a farily big amount of input data, so we
130
- * shouldn't see this.
131
- */
132
- if (unlikely(count & 0xffff0000)) {
133
- dev_warn(priv->dev,
134
- "Input data is too big\n");
135
- return;
136
- }
137
-
138
- ctx->base.ctxr->data[i] = cpu_to_le32(count);
169
+ /* This is a hardware limitation, as the
170
+ * counter must fit into an u32. This represents
171
+ * a fairly big amount of input data, so we
172
+ * shouldn't see this.
173
+ */
174
+ if (unlikely(count & 0xffffffff00000000ULL)) {
175
+ dev_warn(priv->dev,
176
+ "Input data is too big\n");
177
+ return;
139178 }
140179 }
141
- } else if (req->digest == CONTEXT_CONTROL_DIGEST_HMAC) {
142
- cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(2 * req->state_sz / sizeof(u32));
143180
144
- memcpy(ctx->base.ctxr->data, ctx->ipad, req->state_sz);
145
- memcpy(ctx->base.ctxr->data + req->state_sz / sizeof(u32),
146
- ctx->opad, req->state_sz);
181
+ if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
182
+ /* Special case: zero length HMAC */
183
+ req->hmac_zlen ||
184
+ /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
185
+ (req->processed != req->block_sz)) {
186
+ /* Basic hash continue operation, need digest + cnt */
187
+ cdesc->control_data.control0 |=
188
+ CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
189
+ CONTEXT_CONTROL_TYPE_HASH_OUT |
190
+ CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
191
+ /* For zero-len HMAC, don't finalize, already padded! */
192
+ if (req->hmac_zlen)
193
+ cdesc->control_data.control0 |=
194
+ CONTEXT_CONTROL_NO_FINISH_HASH;
195
+ cdesc->control_data.control1 |=
196
+ CONTEXT_CONTROL_DIGEST_CNT;
197
+ ctx->base.ctxr->data[req->state_sz >> 2] =
198
+ cpu_to_le32(count);
199
+ req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
200
+
201
+ /* Clear zero-length HMAC flag for next operation! */
202
+ req->hmac_zlen = false;
203
+ } else { /* HMAC */
204
+ /* Need outer digest for HMAC finalization */
205
+ memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
206
+ &ctx->base.opad, req->state_sz);
207
+
208
+ /* Single pass HMAC - no digest count */
209
+ cdesc->control_data.control0 |=
210
+ CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
211
+ CONTEXT_CONTROL_TYPE_HASH_OUT |
212
+ CONTEXT_CONTROL_DIGEST_HMAC;
213
+ }
214
+ } else { /* Hash continuation, do not finish yet */
215
+ cdesc->control_data.control0 |=
216
+ CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
217
+ CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
218
+ CONTEXT_CONTROL_TYPE_HASH_OUT |
219
+ CONTEXT_CONTROL_NO_FINISH_HASH;
147220 }
148221 }
149222
150
-static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
223
+static int safexcel_ahash_enqueue(struct ahash_request *areq);
224
+
225
+static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
226
+ int ring,
151227 struct crypto_async_request *async,
152228 bool *should_complete, int *ret)
153229 {
....@@ -155,6 +231,7 @@
155231 struct ahash_request *areq = ahash_request_cast(async);
156232 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
157233 struct safexcel_ahash_req *sreq = ahash_request_ctx(areq);
234
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
158235 u64 cache_len;
159236
160237 *ret = 0;
....@@ -176,7 +253,7 @@
176253 }
177254
178255 if (sreq->result_dma) {
179
- dma_unmap_single(priv->dev, sreq->result_dma, sreq->state_sz,
256
+ dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
180257 DMA_FROM_DEVICE);
181258 sreq->result_dma = 0;
182259 }
....@@ -185,11 +262,41 @@
185262 dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
186263 DMA_TO_DEVICE);
187264 sreq->cache_dma = 0;
265
+ sreq->cache_sz = 0;
188266 }
189267
190
- if (sreq->finish)
191
- memcpy(areq->result, sreq->state,
192
- crypto_ahash_digestsize(ahash));
268
+ if (sreq->finish) {
269
+ if (sreq->hmac &&
270
+ (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
271
+ /* Faking HMAC using hash - need to do outer hash */
272
+ memcpy(sreq->cache, sreq->state,
273
+ crypto_ahash_digestsize(ahash));
274
+
275
+ memcpy(sreq->state, &ctx->base.opad, sreq->digest_sz);
276
+
277
+ sreq->len = sreq->block_sz +
278
+ crypto_ahash_digestsize(ahash);
279
+ sreq->processed = sreq->block_sz;
280
+ sreq->hmac = 0;
281
+
282
+ if (priv->flags & EIP197_TRC_CACHE)
283
+ ctx->base.needs_inv = true;
284
+ areq->nbytes = 0;
285
+ safexcel_ahash_enqueue(areq);
286
+
287
+ *should_complete = false; /* Not done yet */
288
+ return 1;
289
+ }
290
+
291
+ if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
292
+ ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
293
+ /* Undo final XOR with 0xffffffff ...*/
294
+ *(__le32 *)areq->result = ~sreq->state[0];
295
+ } else {
296
+ memcpy(areq->result, sreq->state,
297
+ crypto_ahash_digestsize(ahash));
298
+ }
299
+ }
193300
194301 cache_len = safexcel_queued_len(sreq);
195302 if (cache_len)
....@@ -204,49 +311,89 @@
204311 int *commands, int *results)
205312 {
206313 struct ahash_request *areq = ahash_request_cast(async);
207
- struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
208314 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
209315 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
210
- struct safexcel_crypto_priv *priv = ctx->priv;
316
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
211317 struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
212318 struct safexcel_result_desc *rdesc;
213319 struct scatterlist *sg;
214
- int i, extra, n_cdesc = 0, ret = 0;
215
- u64 queued, len, cache_len;
320
+ struct safexcel_token *dmmy;
321
+ int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
322
+ u64 queued, len;
216323
217
- queued = len = safexcel_queued_len(req);
218
- if (queued <= crypto_ahash_blocksize(ahash))
324
+ queued = safexcel_queued_len(req);
325
+ if (queued <= HASH_CACHE_SIZE)
219326 cache_len = queued;
220327 else
221328 cache_len = queued - areq->nbytes;
222329
223
- if (!req->last_req) {
330
+ if (!req->finish && !req->last_req) {
224331 /* If this is not the last request and the queued data does not
225
- * fit into full blocks, cache it for the next send() call.
332
+ * fit into full cache blocks, cache it for the next send call.
226333 */
227
- extra = queued & (crypto_ahash_blocksize(ahash) - 1);
334
+ extra = queued & (HASH_CACHE_SIZE - 1);
335
+
336
+ /* If this is not the last request and the queued data
337
+ * is a multiple of a block, cache the last one for now.
338
+ */
228339 if (!extra)
229
- /* If this is not the last request and the queued data
230
- * is a multiple of a block, cache the last one for now.
231
- */
232
- extra = crypto_ahash_blocksize(ahash);
340
+ extra = HASH_CACHE_SIZE;
233341
234
- if (extra) {
235
- sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
236
- req->cache_next, extra,
237
- areq->nbytes - extra);
342
+ sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
343
+ req->cache_next, extra,
344
+ areq->nbytes - extra);
238345
239
- queued -= extra;
240
- len -= extra;
346
+ queued -= extra;
241347
242
- if (!queued) {
243
- *commands = 0;
244
- *results = 0;
245
- return 0;
246
- }
348
+ if (!queued) {
349
+ *commands = 0;
350
+ *results = 0;
351
+ return 0;
247352 }
353
+
354
+ extra = 0;
248355 }
249356
357
+ if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
358
+ if (unlikely(cache_len < AES_BLOCK_SIZE)) {
359
+ /*
360
+ * Cache contains less than 1 full block, complete.
361
+ */
362
+ extra = AES_BLOCK_SIZE - cache_len;
363
+ if (queued > cache_len) {
364
+ /* More data follows: borrow bytes */
365
+ u64 tmp = queued - cache_len;
366
+
367
+ skip = min_t(u64, tmp, extra);
368
+ sg_pcopy_to_buffer(areq->src,
369
+ sg_nents(areq->src),
370
+ req->cache + cache_len,
371
+ skip, 0);
372
+ }
373
+ extra -= skip;
374
+ memset(req->cache + cache_len + skip, 0, extra);
375
+ if (!ctx->cbcmac && extra) {
376
+ // 10- padding for XCBCMAC & CMAC
377
+ req->cache[cache_len + skip] = 0x80;
378
+ // HW will use K2 iso K3 - compensate!
379
+ for (i = 0; i < AES_BLOCK_SIZE / 4; i++) {
380
+ u32 *cache = (void *)req->cache;
381
+ u32 *ipad = ctx->base.ipad.word;
382
+ u32 x;
383
+
384
+ x = ipad[i] ^ ipad[i + 4];
385
+ cache[i] ^= swab32(x);
386
+ }
387
+ }
388
+ cache_len = AES_BLOCK_SIZE;
389
+ queued = queued + extra;
390
+ }
391
+
392
+ /* XCBC continue: XOR previous result into 1st word */
393
+ crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
394
+ }
395
+
396
+ len = queued;
250397 /* Add a command descriptor for the cached data, if any */
251398 if (cache_len) {
252399 req->cache_dma = dma_map_single(priv->dev, req->cache,
....@@ -257,8 +404,9 @@
257404 req->cache_sz = cache_len;
258405 first_cdesc = safexcel_add_cdesc(priv, ring, 1,
259406 (cache_len == len),
260
- req->cache_dma, cache_len, len,
261
- ctx->base.ctxr_dma);
407
+ req->cache_dma, cache_len,
408
+ len, ctx->base.ctxr_dma,
409
+ &dmmy);
262410 if (IS_ERR(first_cdesc)) {
263411 ret = PTR_ERR(first_cdesc);
264412 goto unmap_cache;
....@@ -272,7 +420,8 @@
272420
273421 /* Now handle the current ahash request buffer(s) */
274422 req->nents = dma_map_sg(priv->dev, areq->src,
275
- sg_nents_for_len(areq->src, areq->nbytes),
423
+ sg_nents_for_len(areq->src,
424
+ areq->nbytes),
276425 DMA_TO_DEVICE);
277426 if (!req->nents) {
278427 ret = -ENOMEM;
....@@ -282,35 +431,44 @@
282431 for_each_sg(areq->src, sg, req->nents, i) {
283432 int sglen = sg_dma_len(sg);
284433
434
+ if (unlikely(sglen <= skip)) {
435
+ skip -= sglen;
436
+ continue;
437
+ }
438
+
285439 /* Do not overflow the request */
286
- if (queued < sglen)
440
+ if ((queued + skip) <= sglen)
287441 sglen = queued;
442
+ else
443
+ sglen -= skip;
288444
289445 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
290
- !(queued - sglen), sg_dma_address(sg),
291
- sglen, len, ctx->base.ctxr_dma);
446
+ !(queued - sglen),
447
+ sg_dma_address(sg) + skip, sglen,
448
+ len, ctx->base.ctxr_dma, &dmmy);
292449 if (IS_ERR(cdesc)) {
293450 ret = PTR_ERR(cdesc);
294451 goto unmap_sg;
295452 }
296
- n_cdesc++;
297453
298
- if (n_cdesc == 1)
454
+ if (!n_cdesc)
299455 first_cdesc = cdesc;
456
+ n_cdesc++;
300457
301458 queued -= sglen;
302459 if (!queued)
303460 break;
461
+ skip = 0;
304462 }
305463
306464 send_command:
307465 /* Setup the context options */
308
- safexcel_context_control(ctx, req, first_cdesc, req->state_sz);
466
+ safexcel_context_control(ctx, req, first_cdesc);
309467
310468 /* Add the token */
311
- safexcel_hash_token(first_cdesc, len, req->state_sz);
469
+ safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
312470
313
- req->result_dma = dma_map_single(priv->dev, req->state, req->state_sz,
471
+ req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
314472 DMA_FROM_DEVICE);
315473 if (dma_mapping_error(priv->dev, req->result_dma)) {
316474 ret = -EINVAL;
....@@ -319,7 +477,7 @@
319477
320478 /* Add a result descriptor */
321479 rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
322
- req->state_sz);
480
+ req->digest_sz);
323481 if (IS_ERR(rdesc)) {
324482 ret = PTR_ERR(rdesc);
325483 goto unmap_result;
....@@ -327,19 +485,20 @@
327485
328486 safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
329487
330
- req->processed[0] += len;
331
- if (req->processed[0] < len)
332
- req->processed[1]++;
488
+ req->processed += len - extra;
333489
334490 *commands = n_cdesc;
335491 *results = 1;
336492 return 0;
337493
338494 unmap_result:
339
- dma_unmap_single(priv->dev, req->result_dma, req->state_sz,
495
+ dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
340496 DMA_FROM_DEVICE);
341497 unmap_sg:
342
- dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
498
+ if (req->nents) {
499
+ dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
500
+ req->nents = 0;
501
+ }
343502 cdesc_rollback:
344503 for (i = 0; i < n_cdesc; i++)
345504 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
....@@ -347,31 +506,11 @@
347506 if (req->cache_dma) {
348507 dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
349508 DMA_TO_DEVICE);
509
+ req->cache_dma = 0;
350510 req->cache_sz = 0;
351511 }
352512
353513 return ret;
354
-}
355
-
356
-static inline bool safexcel_ahash_needs_inv_get(struct ahash_request *areq)
357
-{
358
- struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
359
- struct safexcel_ahash_req *req = ahash_request_ctx(areq);
360
- unsigned int state_w_sz = req->state_sz / sizeof(u32);
361
- u64 processed;
362
- int i;
363
-
364
- processed = req->processed[0] / EIP197_COUNTER_BLOCK_SIZE;
365
- processed += (0xffffffff / EIP197_COUNTER_BLOCK_SIZE) * req->processed[1];
366
-
367
- for (i = 0; i < state_w_sz; i++)
368
- if (ctx->base.ctxr->data[i] != cpu_to_le32(req->state[i]))
369
- return true;
370
-
371
- if (ctx->base.ctxr->data[state_w_sz] != cpu_to_le32(processed))
372
- return true;
373
-
374
- return false;
375514 }
376515
377516 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
....@@ -453,7 +592,7 @@
453592 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
454593 int ret;
455594
456
- ret = safexcel_invalidate_cache(async, ctx->priv,
595
+ ret = safexcel_invalidate_cache(async, ctx->base.priv,
457596 ctx->base.ctxr_dma, ring);
458597 if (unlikely(ret))
459598 return ret;
....@@ -482,7 +621,7 @@
482621 static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
483622 {
484623 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
485
- struct safexcel_crypto_priv *priv = ctx->priv;
624
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
486625 EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
487626 struct safexcel_ahash_req *rctx = ahash_request_ctx(req);
488627 struct safexcel_inv_result result = {};
....@@ -524,28 +663,22 @@
524663 static int safexcel_ahash_cache(struct ahash_request *areq)
525664 {
526665 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
527
- struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
528
- u64 queued, cache_len;
666
+ u64 cache_len;
529667
530
- /* queued: everything accepted by the driver which will be handled by
531
- * the next send() calls.
532
- * tot sz handled by update() - tot sz handled by send()
533
- */
534
- queued = safexcel_queued_len(req);
535668 /* cache_len: everything accepted by the driver but not sent yet,
536669 * tot sz handled by update() - last req sz - tot sz handled by send()
537670 */
538
- cache_len = queued - areq->nbytes;
671
+ cache_len = safexcel_queued_len(req);
539672
540673 /*
541674 * In case there isn't enough bytes to proceed (less than a
542675 * block size), cache the data until we have enough.
543676 */
544
- if (cache_len + areq->nbytes <= crypto_ahash_blocksize(ahash)) {
677
+ if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
545678 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
546679 req->cache + cache_len,
547680 areq->nbytes, 0);
548
- return areq->nbytes;
681
+ return 0;
549682 }
550683
551684 /* We couldn't cache all the data */
....@@ -556,21 +689,28 @@
556689 {
557690 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
558691 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
559
- struct safexcel_crypto_priv *priv = ctx->priv;
692
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
560693 int ret, ring;
561694
562695 req->needs_inv = false;
563696
564697 if (ctx->base.ctxr) {
565698 if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
566
- (req->processed[0] || req->processed[1]) &&
567
- req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED)
568
- /* We're still setting needs_inv here, even though it is
699
+ /* invalidate for *any* non-XCBC continuation */
700
+ ((req->not_first && !req->xcbcmac) ||
701
+ /* invalidate if (i)digest changed */
702
+ memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
703
+ /* invalidate for HMAC finish with odigest changed */
704
+ (req->finish && req->hmac &&
705
+ memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
706
+ &ctx->base.opad, req->state_sz))))
707
+ /*
708
+ * We're still setting needs_inv here, even though it is
569709 * cleared right away, because the needs_inv flag can be
570710 * set in other functions and we want to keep the same
571711 * logic.
572712 */
573
- ctx->base.needs_inv = safexcel_ahash_needs_inv_get(areq);
713
+ ctx->base.needs_inv = true;
574714
575715 if (ctx->base.needs_inv) {
576716 ctx->base.needs_inv = false;
....@@ -584,6 +724,7 @@
584724 if (!ctx->base.ctxr)
585725 return -ENOMEM;
586726 }
727
+ req->not_first = true;
587728
588729 ring = ctx->base.ring;
589730
....@@ -600,30 +741,23 @@
600741 static int safexcel_ahash_update(struct ahash_request *areq)
601742 {
602743 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
603
- struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
744
+ int ret;
604745
605746 /* If the request is 0 length, do nothing */
606747 if (!areq->nbytes)
607748 return 0;
608749
609
- req->len[0] += areq->nbytes;
610
- if (req->len[0] < areq->nbytes)
611
- req->len[1]++;
750
+ /* Add request to the cache if it fits */
751
+ ret = safexcel_ahash_cache(areq);
612752
613
- safexcel_ahash_cache(areq);
753
+ /* Update total request length */
754
+ req->len += areq->nbytes;
614755
615
- /*
616
- * We're not doing partial updates when performing an hmac request.
617
- * Everything will be handled by the final() call.
756
+ /* If not all data could fit into the cache, go process the excess.
757
+ * Also go process immediately for an HMAC IV precompute, which
758
+ * will never be finished at all, but needs to be processed anyway.
618759 */
619
- if (req->digest == CONTEXT_CONTROL_DIGEST_HMAC)
620
- return 0;
621
-
622
- if (req->hmac)
623
- return safexcel_ahash_enqueue(areq);
624
-
625
- if (!req->last_req &&
626
- safexcel_queued_len(req) > crypto_ahash_blocksize(ahash))
760
+ if ((ret && !req->finish) || req->last_req)
627761 return safexcel_ahash_enqueue(areq);
628762
629763 return 0;
....@@ -634,11 +768,14 @@
634768 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
635769 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
636770
637
- req->last_req = true;
638771 req->finish = true;
639772
640
- /* If we have an overall 0 length request */
641
- if (!req->len[0] && !req->len[1] && !areq->nbytes) {
773
+ if (unlikely(!req->len && !areq->nbytes)) {
774
+ /*
775
+ * If we have an overall 0 length *hash* request:
776
+ * The HW cannot do 0 length hash, so we provide the correct
777
+ * result directly here.
778
+ */
642779 if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
643780 memcpy(areq->result, md5_zero_message_hash,
644781 MD5_DIGEST_SIZE);
....@@ -657,8 +794,74 @@
657794 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
658795 memcpy(areq->result, sha512_zero_message_hash,
659796 SHA512_DIGEST_SIZE);
797
+ else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
798
+ memcpy(areq->result,
799
+ EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
800
+ }
660801
661802 return 0;
803
+ } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
804
+ ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
805
+ req->len == sizeof(u32) && !areq->nbytes)) {
806
+ /* Zero length CRC32 */
807
+ memcpy(areq->result, &ctx->base.ipad, sizeof(u32));
808
+ return 0;
809
+ } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
810
+ !areq->nbytes)) {
811
+ /* Zero length CBC MAC */
812
+ memset(areq->result, 0, AES_BLOCK_SIZE);
813
+ return 0;
814
+ } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
815
+ !areq->nbytes)) {
816
+ /* Zero length (X)CBC/CMAC */
817
+ int i;
818
+
819
+ for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
820
+ u32 *result = (void *)areq->result;
821
+
822
+ /* K3 */
823
+ result[i] = swab32(ctx->base.ipad.word[i + 4]);
824
+ }
825
+ areq->result[0] ^= 0x80; // 10- padding
826
+ crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
827
+ return 0;
828
+ } else if (unlikely(req->hmac &&
829
+ (req->len == req->block_sz) &&
830
+ !areq->nbytes)) {
831
+ /*
832
+ * If we have an overall 0 length *HMAC* request:
833
+ * For HMAC, we need to finalize the inner digest
834
+ * and then perform the outer hash.
835
+ */
836
+
837
+ /* generate pad block in the cache */
838
+ /* start with a hash block of all zeroes */
839
+ memset(req->cache, 0, req->block_sz);
840
+ /* set the first byte to 0x80 to 'append a 1 bit' */
841
+ req->cache[0] = 0x80;
842
+ /* add the length in bits in the last 2 bytes */
843
+ if (req->len_is_le) {
844
+ /* Little endian length word (e.g. MD5) */
845
+ req->cache[req->block_sz-8] = (req->block_sz << 3) &
846
+ 255;
847
+ req->cache[req->block_sz-7] = (req->block_sz >> 5);
848
+ } else {
849
+ /* Big endian length word (e.g. any SHA) */
850
+ req->cache[req->block_sz-2] = (req->block_sz >> 5);
851
+ req->cache[req->block_sz-1] = (req->block_sz << 3) &
852
+ 255;
853
+ }
854
+
855
+ req->len += req->block_sz; /* plus 1 hash block */
856
+
857
+ /* Set special zero-length HMAC flag */
858
+ req->hmac_zlen = true;
859
+
860
+ /* Finalize HMAC */
861
+ req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
862
+ } else if (req->hmac) {
863
+ /* Finalize HMAC */
864
+ req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
662865 }
663866
664867 return safexcel_ahash_enqueue(areq);
....@@ -668,7 +871,6 @@
668871 {
669872 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
670873
671
- req->last_req = true;
672874 req->finish = true;
673875
674876 safexcel_ahash_update(areq);
....@@ -677,26 +879,22 @@
677879
678880 static int safexcel_ahash_export(struct ahash_request *areq, void *out)
679881 {
680
- struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
681882 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
682883 struct safexcel_ahash_export_state *export = out;
683884
684
- export->len[0] = req->len[0];
685
- export->len[1] = req->len[1];
686
- export->processed[0] = req->processed[0];
687
- export->processed[1] = req->processed[1];
885
+ export->len = req->len;
886
+ export->processed = req->processed;
688887
689888 export->digest = req->digest;
690889
691890 memcpy(export->state, req->state, req->state_sz);
692
- memcpy(export->cache, req->cache, crypto_ahash_blocksize(ahash));
891
+ memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
693892
694893 return 0;
695894 }
696895
697896 static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
698897 {
699
- struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
700898 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
701899 const struct safexcel_ahash_export_state *export = in;
702900 int ret;
....@@ -705,14 +903,12 @@
705903 if (ret)
706904 return ret;
707905
708
- req->len[0] = export->len[0];
709
- req->len[1] = export->len[1];
710
- req->processed[0] = export->processed[0];
711
- req->processed[1] = export->processed[1];
906
+ req->len = export->len;
907
+ req->processed = export->processed;
712908
713909 req->digest = export->digest;
714910
715
- memcpy(req->cache, export->cache, crypto_ahash_blocksize(ahash));
911
+ memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
716912 memcpy(req->state, export->state, req->state_sz);
717913
718914 return 0;
....@@ -725,9 +921,10 @@
725921 container_of(__crypto_ahash_alg(tfm->__crt_alg),
726922 struct safexcel_alg_template, alg.ahash);
727923
728
- ctx->priv = tmpl->priv;
924
+ ctx->base.priv = tmpl->priv;
729925 ctx->base.send = safexcel_ahash_send;
730926 ctx->base.handle_result = safexcel_handle_result;
927
+ ctx->fb_do_setkey = false;
731928
732929 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
733930 sizeof(struct safexcel_ahash_req));
....@@ -741,15 +938,11 @@
741938
742939 memset(req, 0, sizeof(*req));
743940
744
- req->state[0] = SHA1_H0;
745
- req->state[1] = SHA1_H1;
746
- req->state[2] = SHA1_H2;
747
- req->state[3] = SHA1_H3;
748
- req->state[4] = SHA1_H4;
749
-
750941 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
751942 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
752943 req->state_sz = SHA1_DIGEST_SIZE;
944
+ req->digest_sz = SHA1_DIGEST_SIZE;
945
+ req->block_sz = SHA1_BLOCK_SIZE;
753946
754947 return 0;
755948 }
....@@ -767,7 +960,7 @@
767960 static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
768961 {
769962 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
770
- struct safexcel_crypto_priv *priv = ctx->priv;
963
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
771964 int ret;
772965
773966 /* context not allocated, skip invalidation */
....@@ -786,7 +979,7 @@
786979
787980 struct safexcel_alg_template safexcel_alg_sha1 = {
788981 .type = SAFEXCEL_ALG_TYPE_AHASH,
789
- .engines = EIP97IES | EIP197B | EIP197D,
982
+ .algo_mask = SAFEXCEL_ALG_SHA1,
790983 .alg.ahash = {
791984 .init = safexcel_sha1_init,
792985 .update = safexcel_ahash_update,
....@@ -801,8 +994,9 @@
801994 .base = {
802995 .cra_name = "sha1",
803996 .cra_driver_name = "safexcel-sha1",
804
- .cra_priority = 300,
997
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
805998 .cra_flags = CRYPTO_ALG_ASYNC |
999
+ CRYPTO_ALG_ALLOCATES_MEMORY |
8061000 CRYPTO_ALG_KERN_DRIVER_ONLY,
8071001 .cra_blocksize = SHA1_BLOCK_SIZE,
8081002 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
....@@ -816,10 +1010,24 @@
8161010
8171011 static int safexcel_hmac_sha1_init(struct ahash_request *areq)
8181012 {
1013
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
8191014 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
8201015
821
- safexcel_sha1_init(areq);
822
- req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
1016
+ memset(req, 0, sizeof(*req));
1017
+
1018
+ /* Start from ipad precompute */
1019
+ memcpy(req->state, &ctx->base.ipad, SHA1_DIGEST_SIZE);
1020
+ /* Already processed the key^ipad part now! */
1021
+ req->len = SHA1_BLOCK_SIZE;
1022
+ req->processed = SHA1_BLOCK_SIZE;
1023
+
1024
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1025
+ req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1026
+ req->state_sz = SHA1_DIGEST_SIZE;
1027
+ req->digest_sz = SHA1_DIGEST_SIZE;
1028
+ req->block_sz = SHA1_BLOCK_SIZE;
1029
+ req->hmac = true;
1030
+
8231031 return 0;
8241032 }
8251033
....@@ -878,8 +1086,7 @@
8781086 }
8791087
8801088 /* Avoid leaking */
881
- memzero_explicit(keydup, keylen);
882
- kfree(keydup);
1089
+ kfree_sensitive(keydup);
8831090
8841091 if (ret)
8851092 return ret;
....@@ -931,8 +1138,9 @@
9311138 return crypto_ahash_export(areq, state);
9321139 }
9331140
934
-int safexcel_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen,
935
- void *istate, void *ostate)
1141
+static int __safexcel_hmac_setkey(const char *alg, const u8 *key,
1142
+ unsigned int keylen,
1143
+ void *istate, void *ostate)
9361144 {
9371145 struct ahash_request *areq;
9381146 struct crypto_ahash *tfm;
....@@ -981,33 +1189,36 @@
9811189 return ret;
9821190 }
9831191
1192
+int safexcel_hmac_setkey(struct safexcel_context *base, const u8 *key,
1193
+ unsigned int keylen, const char *alg,
1194
+ unsigned int state_sz)
1195
+{
1196
+ struct safexcel_crypto_priv *priv = base->priv;
1197
+ struct safexcel_ahash_export_state istate, ostate;
1198
+ int ret;
1199
+
1200
+ ret = __safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1201
+ if (ret)
1202
+ return ret;
1203
+
1204
+ if (priv->flags & EIP197_TRC_CACHE && base->ctxr &&
1205
+ (memcmp(&base->ipad, istate.state, state_sz) ||
1206
+ memcmp(&base->opad, ostate.state, state_sz)))
1207
+ base->needs_inv = true;
1208
+
1209
+ memcpy(&base->ipad, &istate.state, state_sz);
1210
+ memcpy(&base->opad, &ostate.state, state_sz);
1211
+
1212
+ return 0;
1213
+}
1214
+
9841215 static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
9851216 unsigned int keylen, const char *alg,
9861217 unsigned int state_sz)
9871218 {
988
- struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
989
- struct safexcel_crypto_priv *priv = ctx->priv;
990
- struct safexcel_ahash_export_state istate, ostate;
991
- int ret, i;
1219
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
9921220
993
- ret = safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
994
- if (ret)
995
- return ret;
996
-
997
- if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr) {
998
- for (i = 0; i < state_sz / sizeof(u32); i++) {
999
- if (ctx->ipad[i] != le32_to_cpu(istate.state[i]) ||
1000
- ctx->opad[i] != le32_to_cpu(ostate.state[i])) {
1001
- ctx->base.needs_inv = true;
1002
- break;
1003
- }
1004
- }
1005
- }
1006
-
1007
- memcpy(ctx->ipad, &istate.state, state_sz);
1008
- memcpy(ctx->opad, &ostate.state, state_sz);
1009
-
1010
- return 0;
1221
+ return safexcel_hmac_setkey(&ctx->base, key, keylen, alg, state_sz);
10111222 }
10121223
10131224 static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
....@@ -1019,7 +1230,7 @@
10191230
10201231 struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
10211232 .type = SAFEXCEL_ALG_TYPE_AHASH,
1022
- .engines = EIP97IES | EIP197B | EIP197D,
1233
+ .algo_mask = SAFEXCEL_ALG_SHA1,
10231234 .alg.ahash = {
10241235 .init = safexcel_hmac_sha1_init,
10251236 .update = safexcel_ahash_update,
....@@ -1035,8 +1246,9 @@
10351246 .base = {
10361247 .cra_name = "hmac(sha1)",
10371248 .cra_driver_name = "safexcel-hmac-sha1",
1038
- .cra_priority = 300,
1249
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
10391250 .cra_flags = CRYPTO_ALG_ASYNC |
1251
+ CRYPTO_ALG_ALLOCATES_MEMORY |
10401252 CRYPTO_ALG_KERN_DRIVER_ONLY,
10411253 .cra_blocksize = SHA1_BLOCK_SIZE,
10421254 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
....@@ -1055,18 +1267,11 @@
10551267
10561268 memset(req, 0, sizeof(*req));
10571269
1058
- req->state[0] = SHA256_H0;
1059
- req->state[1] = SHA256_H1;
1060
- req->state[2] = SHA256_H2;
1061
- req->state[3] = SHA256_H3;
1062
- req->state[4] = SHA256_H4;
1063
- req->state[5] = SHA256_H5;
1064
- req->state[6] = SHA256_H6;
1065
- req->state[7] = SHA256_H7;
1066
-
10671270 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
10681271 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
10691272 req->state_sz = SHA256_DIGEST_SIZE;
1273
+ req->digest_sz = SHA256_DIGEST_SIZE;
1274
+ req->block_sz = SHA256_BLOCK_SIZE;
10701275
10711276 return 0;
10721277 }
....@@ -1083,7 +1288,7 @@
10831288
10841289 struct safexcel_alg_template safexcel_alg_sha256 = {
10851290 .type = SAFEXCEL_ALG_TYPE_AHASH,
1086
- .engines = EIP97IES | EIP197B | EIP197D,
1291
+ .algo_mask = SAFEXCEL_ALG_SHA2_256,
10871292 .alg.ahash = {
10881293 .init = safexcel_sha256_init,
10891294 .update = safexcel_ahash_update,
....@@ -1098,8 +1303,9 @@
10981303 .base = {
10991304 .cra_name = "sha256",
11001305 .cra_driver_name = "safexcel-sha256",
1101
- .cra_priority = 300,
1306
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
11021307 .cra_flags = CRYPTO_ALG_ASYNC |
1308
+ CRYPTO_ALG_ALLOCATES_MEMORY |
11031309 CRYPTO_ALG_KERN_DRIVER_ONLY,
11041310 .cra_blocksize = SHA256_BLOCK_SIZE,
11051311 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
....@@ -1118,18 +1324,11 @@
11181324
11191325 memset(req, 0, sizeof(*req));
11201326
1121
- req->state[0] = SHA224_H0;
1122
- req->state[1] = SHA224_H1;
1123
- req->state[2] = SHA224_H2;
1124
- req->state[3] = SHA224_H3;
1125
- req->state[4] = SHA224_H4;
1126
- req->state[5] = SHA224_H5;
1127
- req->state[6] = SHA224_H6;
1128
- req->state[7] = SHA224_H7;
1129
-
11301327 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
11311328 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
11321329 req->state_sz = SHA256_DIGEST_SIZE;
1330
+ req->digest_sz = SHA256_DIGEST_SIZE;
1331
+ req->block_sz = SHA256_BLOCK_SIZE;
11331332
11341333 return 0;
11351334 }
....@@ -1146,7 +1345,7 @@
11461345
11471346 struct safexcel_alg_template safexcel_alg_sha224 = {
11481347 .type = SAFEXCEL_ALG_TYPE_AHASH,
1149
- .engines = EIP97IES | EIP197B | EIP197D,
1348
+ .algo_mask = SAFEXCEL_ALG_SHA2_256,
11501349 .alg.ahash = {
11511350 .init = safexcel_sha224_init,
11521351 .update = safexcel_ahash_update,
....@@ -1161,8 +1360,9 @@
11611360 .base = {
11621361 .cra_name = "sha224",
11631362 .cra_driver_name = "safexcel-sha224",
1164
- .cra_priority = 300,
1363
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
11651364 .cra_flags = CRYPTO_ALG_ASYNC |
1365
+ CRYPTO_ALG_ALLOCATES_MEMORY |
11661366 CRYPTO_ALG_KERN_DRIVER_ONLY,
11671367 .cra_blocksize = SHA224_BLOCK_SIZE,
11681368 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
....@@ -1183,10 +1383,24 @@
11831383
11841384 static int safexcel_hmac_sha224_init(struct ahash_request *areq)
11851385 {
1386
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
11861387 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
11871388
1188
- safexcel_sha224_init(areq);
1189
- req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
1389
+ memset(req, 0, sizeof(*req));
1390
+
1391
+ /* Start from ipad precompute */
1392
+ memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1393
+ /* Already processed the key^ipad part now! */
1394
+ req->len = SHA256_BLOCK_SIZE;
1395
+ req->processed = SHA256_BLOCK_SIZE;
1396
+
1397
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1398
+ req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1399
+ req->state_sz = SHA256_DIGEST_SIZE;
1400
+ req->digest_sz = SHA256_DIGEST_SIZE;
1401
+ req->block_sz = SHA256_BLOCK_SIZE;
1402
+ req->hmac = true;
1403
+
11901404 return 0;
11911405 }
11921406
....@@ -1202,7 +1416,7 @@
12021416
12031417 struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
12041418 .type = SAFEXCEL_ALG_TYPE_AHASH,
1205
- .engines = EIP97IES | EIP197B | EIP197D,
1419
+ .algo_mask = SAFEXCEL_ALG_SHA2_256,
12061420 .alg.ahash = {
12071421 .init = safexcel_hmac_sha224_init,
12081422 .update = safexcel_ahash_update,
....@@ -1218,8 +1432,9 @@
12181432 .base = {
12191433 .cra_name = "hmac(sha224)",
12201434 .cra_driver_name = "safexcel-hmac-sha224",
1221
- .cra_priority = 300,
1435
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
12221436 .cra_flags = CRYPTO_ALG_ASYNC |
1437
+ CRYPTO_ALG_ALLOCATES_MEMORY |
12231438 CRYPTO_ALG_KERN_DRIVER_ONLY,
12241439 .cra_blocksize = SHA224_BLOCK_SIZE,
12251440 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
....@@ -1240,10 +1455,24 @@
12401455
12411456 static int safexcel_hmac_sha256_init(struct ahash_request *areq)
12421457 {
1458
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
12431459 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
12441460
1245
- safexcel_sha256_init(areq);
1246
- req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
1461
+ memset(req, 0, sizeof(*req));
1462
+
1463
+ /* Start from ipad precompute */
1464
+ memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1465
+ /* Already processed the key^ipad part now! */
1466
+ req->len = SHA256_BLOCK_SIZE;
1467
+ req->processed = SHA256_BLOCK_SIZE;
1468
+
1469
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1470
+ req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1471
+ req->state_sz = SHA256_DIGEST_SIZE;
1472
+ req->digest_sz = SHA256_DIGEST_SIZE;
1473
+ req->block_sz = SHA256_BLOCK_SIZE;
1474
+ req->hmac = true;
1475
+
12471476 return 0;
12481477 }
12491478
....@@ -1259,7 +1488,7 @@
12591488
12601489 struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
12611490 .type = SAFEXCEL_ALG_TYPE_AHASH,
1262
- .engines = EIP97IES | EIP197B | EIP197D,
1491
+ .algo_mask = SAFEXCEL_ALG_SHA2_256,
12631492 .alg.ahash = {
12641493 .init = safexcel_hmac_sha256_init,
12651494 .update = safexcel_ahash_update,
....@@ -1275,8 +1504,9 @@
12751504 .base = {
12761505 .cra_name = "hmac(sha256)",
12771506 .cra_driver_name = "safexcel-hmac-sha256",
1278
- .cra_priority = 300,
1507
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
12791508 .cra_flags = CRYPTO_ALG_ASYNC |
1509
+ CRYPTO_ALG_ALLOCATES_MEMORY |
12801510 CRYPTO_ALG_KERN_DRIVER_ONLY,
12811511 .cra_blocksize = SHA256_BLOCK_SIZE,
12821512 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
....@@ -1295,26 +1525,11 @@
12951525
12961526 memset(req, 0, sizeof(*req));
12971527
1298
- req->state[0] = lower_32_bits(SHA512_H0);
1299
- req->state[1] = upper_32_bits(SHA512_H0);
1300
- req->state[2] = lower_32_bits(SHA512_H1);
1301
- req->state[3] = upper_32_bits(SHA512_H1);
1302
- req->state[4] = lower_32_bits(SHA512_H2);
1303
- req->state[5] = upper_32_bits(SHA512_H2);
1304
- req->state[6] = lower_32_bits(SHA512_H3);
1305
- req->state[7] = upper_32_bits(SHA512_H3);
1306
- req->state[8] = lower_32_bits(SHA512_H4);
1307
- req->state[9] = upper_32_bits(SHA512_H4);
1308
- req->state[10] = lower_32_bits(SHA512_H5);
1309
- req->state[11] = upper_32_bits(SHA512_H5);
1310
- req->state[12] = lower_32_bits(SHA512_H6);
1311
- req->state[13] = upper_32_bits(SHA512_H6);
1312
- req->state[14] = lower_32_bits(SHA512_H7);
1313
- req->state[15] = upper_32_bits(SHA512_H7);
1314
-
13151528 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
13161529 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
13171530 req->state_sz = SHA512_DIGEST_SIZE;
1531
+ req->digest_sz = SHA512_DIGEST_SIZE;
1532
+ req->block_sz = SHA512_BLOCK_SIZE;
13181533
13191534 return 0;
13201535 }
....@@ -1331,7 +1546,7 @@
13311546
13321547 struct safexcel_alg_template safexcel_alg_sha512 = {
13331548 .type = SAFEXCEL_ALG_TYPE_AHASH,
1334
- .engines = EIP97IES | EIP197B | EIP197D,
1549
+ .algo_mask = SAFEXCEL_ALG_SHA2_512,
13351550 .alg.ahash = {
13361551 .init = safexcel_sha512_init,
13371552 .update = safexcel_ahash_update,
....@@ -1346,8 +1561,9 @@
13461561 .base = {
13471562 .cra_name = "sha512",
13481563 .cra_driver_name = "safexcel-sha512",
1349
- .cra_priority = 300,
1564
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
13501565 .cra_flags = CRYPTO_ALG_ASYNC |
1566
+ CRYPTO_ALG_ALLOCATES_MEMORY |
13511567 CRYPTO_ALG_KERN_DRIVER_ONLY,
13521568 .cra_blocksize = SHA512_BLOCK_SIZE,
13531569 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
....@@ -1366,26 +1582,11 @@
13661582
13671583 memset(req, 0, sizeof(*req));
13681584
1369
- req->state[0] = lower_32_bits(SHA384_H0);
1370
- req->state[1] = upper_32_bits(SHA384_H0);
1371
- req->state[2] = lower_32_bits(SHA384_H1);
1372
- req->state[3] = upper_32_bits(SHA384_H1);
1373
- req->state[4] = lower_32_bits(SHA384_H2);
1374
- req->state[5] = upper_32_bits(SHA384_H2);
1375
- req->state[6] = lower_32_bits(SHA384_H3);
1376
- req->state[7] = upper_32_bits(SHA384_H3);
1377
- req->state[8] = lower_32_bits(SHA384_H4);
1378
- req->state[9] = upper_32_bits(SHA384_H4);
1379
- req->state[10] = lower_32_bits(SHA384_H5);
1380
- req->state[11] = upper_32_bits(SHA384_H5);
1381
- req->state[12] = lower_32_bits(SHA384_H6);
1382
- req->state[13] = upper_32_bits(SHA384_H6);
1383
- req->state[14] = lower_32_bits(SHA384_H7);
1384
- req->state[15] = upper_32_bits(SHA384_H7);
1385
-
13861585 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
13871586 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
13881587 req->state_sz = SHA512_DIGEST_SIZE;
1588
+ req->digest_sz = SHA512_DIGEST_SIZE;
1589
+ req->block_sz = SHA512_BLOCK_SIZE;
13891590
13901591 return 0;
13911592 }
....@@ -1402,7 +1603,7 @@
14021603
14031604 struct safexcel_alg_template safexcel_alg_sha384 = {
14041605 .type = SAFEXCEL_ALG_TYPE_AHASH,
1405
- .engines = EIP97IES | EIP197B | EIP197D,
1606
+ .algo_mask = SAFEXCEL_ALG_SHA2_512,
14061607 .alg.ahash = {
14071608 .init = safexcel_sha384_init,
14081609 .update = safexcel_ahash_update,
....@@ -1417,8 +1618,9 @@
14171618 .base = {
14181619 .cra_name = "sha384",
14191620 .cra_driver_name = "safexcel-sha384",
1420
- .cra_priority = 300,
1621
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
14211622 .cra_flags = CRYPTO_ALG_ASYNC |
1623
+ CRYPTO_ALG_ALLOCATES_MEMORY |
14221624 CRYPTO_ALG_KERN_DRIVER_ONLY,
14231625 .cra_blocksize = SHA384_BLOCK_SIZE,
14241626 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
....@@ -1439,10 +1641,24 @@
14391641
14401642 static int safexcel_hmac_sha512_init(struct ahash_request *areq)
14411643 {
1644
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
14421645 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
14431646
1444
- safexcel_sha512_init(areq);
1445
- req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
1647
+ memset(req, 0, sizeof(*req));
1648
+
1649
+ /* Start from ipad precompute */
1650
+ memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1651
+ /* Already processed the key^ipad part now! */
1652
+ req->len = SHA512_BLOCK_SIZE;
1653
+ req->processed = SHA512_BLOCK_SIZE;
1654
+
1655
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1656
+ req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1657
+ req->state_sz = SHA512_DIGEST_SIZE;
1658
+ req->digest_sz = SHA512_DIGEST_SIZE;
1659
+ req->block_sz = SHA512_BLOCK_SIZE;
1660
+ req->hmac = true;
1661
+
14461662 return 0;
14471663 }
14481664
....@@ -1458,7 +1674,7 @@
14581674
14591675 struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
14601676 .type = SAFEXCEL_ALG_TYPE_AHASH,
1461
- .engines = EIP97IES | EIP197B | EIP197D,
1677
+ .algo_mask = SAFEXCEL_ALG_SHA2_512,
14621678 .alg.ahash = {
14631679 .init = safexcel_hmac_sha512_init,
14641680 .update = safexcel_ahash_update,
....@@ -1474,8 +1690,9 @@
14741690 .base = {
14751691 .cra_name = "hmac(sha512)",
14761692 .cra_driver_name = "safexcel-hmac-sha512",
1477
- .cra_priority = 300,
1693
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
14781694 .cra_flags = CRYPTO_ALG_ASYNC |
1695
+ CRYPTO_ALG_ALLOCATES_MEMORY |
14791696 CRYPTO_ALG_KERN_DRIVER_ONLY,
14801697 .cra_blocksize = SHA512_BLOCK_SIZE,
14811698 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
....@@ -1496,10 +1713,24 @@
14961713
14971714 static int safexcel_hmac_sha384_init(struct ahash_request *areq)
14981715 {
1716
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
14991717 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
15001718
1501
- safexcel_sha384_init(areq);
1502
- req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
1719
+ memset(req, 0, sizeof(*req));
1720
+
1721
+ /* Start from ipad precompute */
1722
+ memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1723
+ /* Already processed the key^ipad part now! */
1724
+ req->len = SHA512_BLOCK_SIZE;
1725
+ req->processed = SHA512_BLOCK_SIZE;
1726
+
1727
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1728
+ req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1729
+ req->state_sz = SHA512_DIGEST_SIZE;
1730
+ req->digest_sz = SHA512_DIGEST_SIZE;
1731
+ req->block_sz = SHA512_BLOCK_SIZE;
1732
+ req->hmac = true;
1733
+
15031734 return 0;
15041735 }
15051736
....@@ -1515,7 +1746,7 @@
15151746
15161747 struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
15171748 .type = SAFEXCEL_ALG_TYPE_AHASH,
1518
- .engines = EIP97IES | EIP197B | EIP197D,
1749
+ .algo_mask = SAFEXCEL_ALG_SHA2_512,
15191750 .alg.ahash = {
15201751 .init = safexcel_hmac_sha384_init,
15211752 .update = safexcel_ahash_update,
....@@ -1531,8 +1762,9 @@
15311762 .base = {
15321763 .cra_name = "hmac(sha384)",
15331764 .cra_driver_name = "safexcel-hmac-sha384",
1534
- .cra_priority = 300,
1765
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
15351766 .cra_flags = CRYPTO_ALG_ASYNC |
1767
+ CRYPTO_ALG_ALLOCATES_MEMORY |
15361768 CRYPTO_ALG_KERN_DRIVER_ONLY,
15371769 .cra_blocksize = SHA384_BLOCK_SIZE,
15381770 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
....@@ -1551,14 +1783,11 @@
15511783
15521784 memset(req, 0, sizeof(*req));
15531785
1554
- req->state[0] = MD5_H0;
1555
- req->state[1] = MD5_H1;
1556
- req->state[2] = MD5_H2;
1557
- req->state[3] = MD5_H3;
1558
-
15591786 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
15601787 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
15611788 req->state_sz = MD5_DIGEST_SIZE;
1789
+ req->digest_sz = MD5_DIGEST_SIZE;
1790
+ req->block_sz = MD5_HMAC_BLOCK_SIZE;
15621791
15631792 return 0;
15641793 }
....@@ -1575,7 +1804,7 @@
15751804
15761805 struct safexcel_alg_template safexcel_alg_md5 = {
15771806 .type = SAFEXCEL_ALG_TYPE_AHASH,
1578
- .engines = EIP97IES | EIP197B | EIP197D,
1807
+ .algo_mask = SAFEXCEL_ALG_MD5,
15791808 .alg.ahash = {
15801809 .init = safexcel_md5_init,
15811810 .update = safexcel_ahash_update,
....@@ -1590,8 +1819,9 @@
15901819 .base = {
15911820 .cra_name = "md5",
15921821 .cra_driver_name = "safexcel-md5",
1593
- .cra_priority = 300,
1822
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
15941823 .cra_flags = CRYPTO_ALG_ASYNC |
1824
+ CRYPTO_ALG_ALLOCATES_MEMORY |
15951825 CRYPTO_ALG_KERN_DRIVER_ONLY,
15961826 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
15971827 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
....@@ -1605,10 +1835,25 @@
16051835
16061836 static int safexcel_hmac_md5_init(struct ahash_request *areq)
16071837 {
1838
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
16081839 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
16091840
1610
- safexcel_md5_init(areq);
1611
- req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
1841
+ memset(req, 0, sizeof(*req));
1842
+
1843
+ /* Start from ipad precompute */
1844
+ memcpy(req->state, &ctx->base.ipad, MD5_DIGEST_SIZE);
1845
+ /* Already processed the key^ipad part now! */
1846
+ req->len = MD5_HMAC_BLOCK_SIZE;
1847
+ req->processed = MD5_HMAC_BLOCK_SIZE;
1848
+
1849
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1850
+ req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1851
+ req->state_sz = MD5_DIGEST_SIZE;
1852
+ req->digest_sz = MD5_DIGEST_SIZE;
1853
+ req->block_sz = MD5_HMAC_BLOCK_SIZE;
1854
+ req->len_is_le = true; /* MD5 is little endian! ... */
1855
+ req->hmac = true;
1856
+
16121857 return 0;
16131858 }
16141859
....@@ -1631,7 +1876,7 @@
16311876
16321877 struct safexcel_alg_template safexcel_alg_hmac_md5 = {
16331878 .type = SAFEXCEL_ALG_TYPE_AHASH,
1634
- .engines = EIP97IES | EIP197B | EIP197D,
1879
+ .algo_mask = SAFEXCEL_ALG_MD5,
16351880 .alg.ahash = {
16361881 .init = safexcel_hmac_md5_init,
16371882 .update = safexcel_ahash_update,
....@@ -1647,8 +1892,9 @@
16471892 .base = {
16481893 .cra_name = "hmac(md5)",
16491894 .cra_driver_name = "safexcel-hmac-md5",
1650
- .cra_priority = 300,
1895
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
16511896 .cra_flags = CRYPTO_ALG_ASYNC |
1897
+ CRYPTO_ALG_ALLOCATES_MEMORY |
16521898 CRYPTO_ALG_KERN_DRIVER_ONLY,
16531899 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
16541900 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
....@@ -1659,3 +1905,1237 @@
16591905 },
16601906 },
16611907 };
1908
+
1909
+static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
1910
+{
1911
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1912
+ int ret = safexcel_ahash_cra_init(tfm);
1913
+
1914
+ /* Default 'key' is all zeroes */
1915
+ memset(&ctx->base.ipad, 0, sizeof(u32));
1916
+ return ret;
1917
+}
1918
+
1919
+static int safexcel_crc32_init(struct ahash_request *areq)
1920
+{
1921
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1922
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1923
+
1924
+ memset(req, 0, sizeof(*req));
1925
+
1926
+ /* Start from loaded key */
1927
+ req->state[0] = cpu_to_le32(~ctx->base.ipad.word[0]);
1928
+ /* Set processed to non-zero to enable invalidation detection */
1929
+ req->len = sizeof(u32);
1930
+ req->processed = sizeof(u32);
1931
+
1932
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
1933
+ req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1934
+ req->state_sz = sizeof(u32);
1935
+ req->digest_sz = sizeof(u32);
1936
+ req->block_sz = sizeof(u32);
1937
+
1938
+ return 0;
1939
+}
1940
+
1941
+static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
1942
+ unsigned int keylen)
1943
+{
1944
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1945
+
1946
+ if (keylen != sizeof(u32))
1947
+ return -EINVAL;
1948
+
1949
+ memcpy(&ctx->base.ipad, key, sizeof(u32));
1950
+ return 0;
1951
+}
1952
+
1953
+static int safexcel_crc32_digest(struct ahash_request *areq)
1954
+{
1955
+ return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
1956
+}
1957
+
1958
+struct safexcel_alg_template safexcel_alg_crc32 = {
1959
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
1960
+ .algo_mask = 0,
1961
+ .alg.ahash = {
1962
+ .init = safexcel_crc32_init,
1963
+ .update = safexcel_ahash_update,
1964
+ .final = safexcel_ahash_final,
1965
+ .finup = safexcel_ahash_finup,
1966
+ .digest = safexcel_crc32_digest,
1967
+ .setkey = safexcel_crc32_setkey,
1968
+ .export = safexcel_ahash_export,
1969
+ .import = safexcel_ahash_import,
1970
+ .halg = {
1971
+ .digestsize = sizeof(u32),
1972
+ .statesize = sizeof(struct safexcel_ahash_export_state),
1973
+ .base = {
1974
+ .cra_name = "crc32",
1975
+ .cra_driver_name = "safexcel-crc32",
1976
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
1977
+ .cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
1978
+ CRYPTO_ALG_ASYNC |
1979
+ CRYPTO_ALG_ALLOCATES_MEMORY |
1980
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
1981
+ .cra_blocksize = 1,
1982
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1983
+ .cra_init = safexcel_crc32_cra_init,
1984
+ .cra_exit = safexcel_ahash_cra_exit,
1985
+ .cra_module = THIS_MODULE,
1986
+ },
1987
+ },
1988
+ },
1989
+};
1990
+
1991
+static int safexcel_cbcmac_init(struct ahash_request *areq)
1992
+{
1993
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1994
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1995
+
1996
+ memset(req, 0, sizeof(*req));
1997
+
1998
+ /* Start from loaded keys */
1999
+ memcpy(req->state, &ctx->base.ipad, ctx->key_sz);
2000
+ /* Set processed to non-zero to enable invalidation detection */
2001
+ req->len = AES_BLOCK_SIZE;
2002
+ req->processed = AES_BLOCK_SIZE;
2003
+
2004
+ req->digest = CONTEXT_CONTROL_DIGEST_XCM;
2005
+ req->state_sz = ctx->key_sz;
2006
+ req->digest_sz = AES_BLOCK_SIZE;
2007
+ req->block_sz = AES_BLOCK_SIZE;
2008
+ req->xcbcmac = true;
2009
+
2010
+ return 0;
2011
+}
2012
+
2013
+static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2014
+ unsigned int len)
2015
+{
2016
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2017
+ struct crypto_aes_ctx aes;
2018
+ int ret, i;
2019
+
2020
+ ret = aes_expandkey(&aes, key, len);
2021
+ if (ret)
2022
+ return ret;
2023
+
2024
+ memset(&ctx->base.ipad, 0, 2 * AES_BLOCK_SIZE);
2025
+ for (i = 0; i < len / sizeof(u32); i++)
2026
+ ctx->base.ipad.be[i + 8] = cpu_to_be32(aes.key_enc[i]);
2027
+
2028
+ if (len == AES_KEYSIZE_192) {
2029
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2030
+ ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2031
+ } else if (len == AES_KEYSIZE_256) {
2032
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2033
+ ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2034
+ } else {
2035
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2036
+ ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2037
+ }
2038
+ ctx->cbcmac = true;
2039
+
2040
+ memzero_explicit(&aes, sizeof(aes));
2041
+ return 0;
2042
+}
2043
+
2044
+static int safexcel_cbcmac_digest(struct ahash_request *areq)
2045
+{
2046
+ return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
2047
+}
2048
+
2049
+struct safexcel_alg_template safexcel_alg_cbcmac = {
2050
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
2051
+ .algo_mask = 0,
2052
+ .alg.ahash = {
2053
+ .init = safexcel_cbcmac_init,
2054
+ .update = safexcel_ahash_update,
2055
+ .final = safexcel_ahash_final,
2056
+ .finup = safexcel_ahash_finup,
2057
+ .digest = safexcel_cbcmac_digest,
2058
+ .setkey = safexcel_cbcmac_setkey,
2059
+ .export = safexcel_ahash_export,
2060
+ .import = safexcel_ahash_import,
2061
+ .halg = {
2062
+ .digestsize = AES_BLOCK_SIZE,
2063
+ .statesize = sizeof(struct safexcel_ahash_export_state),
2064
+ .base = {
2065
+ .cra_name = "cbcmac(aes)",
2066
+ .cra_driver_name = "safexcel-cbcmac-aes",
2067
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2068
+ .cra_flags = CRYPTO_ALG_ASYNC |
2069
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2070
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2071
+ .cra_blocksize = 1,
2072
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2073
+ .cra_init = safexcel_ahash_cra_init,
2074
+ .cra_exit = safexcel_ahash_cra_exit,
2075
+ .cra_module = THIS_MODULE,
2076
+ },
2077
+ },
2078
+ },
2079
+};
2080
+
2081
+static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2082
+ unsigned int len)
2083
+{
2084
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2085
+ struct crypto_aes_ctx aes;
2086
+ u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
2087
+ int ret, i;
2088
+
2089
+ ret = aes_expandkey(&aes, key, len);
2090
+ if (ret)
2091
+ return ret;
2092
+
2093
+ /* precompute the XCBC key material */
2094
+ crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2095
+ crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2096
+ CRYPTO_TFM_REQ_MASK);
2097
+ ret = crypto_cipher_setkey(ctx->kaes, key, len);
2098
+ if (ret)
2099
+ return ret;
2100
+
2101
+ crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2102
+ "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2103
+ crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
2104
+ "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2105
+ crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
2106
+ "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
2107
+ for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
2108
+ ctx->base.ipad.word[i] = swab32(key_tmp[i]);
2109
+
2110
+ crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2111
+ crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2112
+ CRYPTO_TFM_REQ_MASK);
2113
+ ret = crypto_cipher_setkey(ctx->kaes,
2114
+ (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2115
+ AES_MIN_KEY_SIZE);
2116
+ if (ret)
2117
+ return ret;
2118
+
2119
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2120
+ ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2121
+ ctx->cbcmac = false;
2122
+
2123
+ memzero_explicit(&aes, sizeof(aes));
2124
+ return 0;
2125
+}
2126
+
2127
+static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2128
+{
2129
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2130
+
2131
+ safexcel_ahash_cra_init(tfm);
2132
+ ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
2133
+ return PTR_ERR_OR_ZERO(ctx->kaes);
2134
+}
2135
+
2136
+static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2137
+{
2138
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2139
+
2140
+ crypto_free_cipher(ctx->kaes);
2141
+ safexcel_ahash_cra_exit(tfm);
2142
+}
2143
+
2144
+struct safexcel_alg_template safexcel_alg_xcbcmac = {
2145
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
2146
+ .algo_mask = 0,
2147
+ .alg.ahash = {
2148
+ .init = safexcel_cbcmac_init,
2149
+ .update = safexcel_ahash_update,
2150
+ .final = safexcel_ahash_final,
2151
+ .finup = safexcel_ahash_finup,
2152
+ .digest = safexcel_cbcmac_digest,
2153
+ .setkey = safexcel_xcbcmac_setkey,
2154
+ .export = safexcel_ahash_export,
2155
+ .import = safexcel_ahash_import,
2156
+ .halg = {
2157
+ .digestsize = AES_BLOCK_SIZE,
2158
+ .statesize = sizeof(struct safexcel_ahash_export_state),
2159
+ .base = {
2160
+ .cra_name = "xcbc(aes)",
2161
+ .cra_driver_name = "safexcel-xcbc-aes",
2162
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2163
+ .cra_flags = CRYPTO_ALG_ASYNC |
2164
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2165
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2166
+ .cra_blocksize = AES_BLOCK_SIZE,
2167
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2168
+ .cra_init = safexcel_xcbcmac_cra_init,
2169
+ .cra_exit = safexcel_xcbcmac_cra_exit,
2170
+ .cra_module = THIS_MODULE,
2171
+ },
2172
+ },
2173
+ },
2174
+};
2175
+
2176
+static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2177
+ unsigned int len)
2178
+{
2179
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2180
+ struct crypto_aes_ctx aes;
2181
+ __be64 consts[4];
2182
+ u64 _const[2];
2183
+ u8 msb_mask, gfmask;
2184
+ int ret, i;
2185
+
2186
+ ret = aes_expandkey(&aes, key, len);
2187
+ if (ret)
2188
+ return ret;
2189
+
2190
+ for (i = 0; i < len / sizeof(u32); i++)
2191
+ ctx->base.ipad.word[i + 8] = swab32(aes.key_enc[i]);
2192
+
2193
+ /* precompute the CMAC key material */
2194
+ crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2195
+ crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2196
+ CRYPTO_TFM_REQ_MASK);
2197
+ ret = crypto_cipher_setkey(ctx->kaes, key, len);
2198
+ if (ret)
2199
+ return ret;
2200
+
2201
+ /* code below borrowed from crypto/cmac.c */
2202
+ /* encrypt the zero block */
2203
+ memset(consts, 0, AES_BLOCK_SIZE);
2204
+ crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts);
2205
+
2206
+ gfmask = 0x87;
2207
+ _const[0] = be64_to_cpu(consts[1]);
2208
+ _const[1] = be64_to_cpu(consts[0]);
2209
+
2210
+ /* gf(2^128) multiply zero-ciphertext with u and u^2 */
2211
+ for (i = 0; i < 4; i += 2) {
2212
+ msb_mask = ((s64)_const[1] >> 63) & gfmask;
2213
+ _const[1] = (_const[1] << 1) | (_const[0] >> 63);
2214
+ _const[0] = (_const[0] << 1) ^ msb_mask;
2215
+
2216
+ consts[i + 0] = cpu_to_be64(_const[1]);
2217
+ consts[i + 1] = cpu_to_be64(_const[0]);
2218
+ }
2219
+ /* end of code borrowed from crypto/cmac.c */
2220
+
2221
+ for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2222
+ ctx->base.ipad.be[i] = cpu_to_be32(((u32 *)consts)[i]);
2223
+
2224
+ if (len == AES_KEYSIZE_192) {
2225
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2226
+ ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2227
+ } else if (len == AES_KEYSIZE_256) {
2228
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2229
+ ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2230
+ } else {
2231
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2232
+ ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2233
+ }
2234
+ ctx->cbcmac = false;
2235
+
2236
+ memzero_explicit(&aes, sizeof(aes));
2237
+ return 0;
2238
+}
2239
+
2240
+struct safexcel_alg_template safexcel_alg_cmac = {
2241
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
2242
+ .algo_mask = 0,
2243
+ .alg.ahash = {
2244
+ .init = safexcel_cbcmac_init,
2245
+ .update = safexcel_ahash_update,
2246
+ .final = safexcel_ahash_final,
2247
+ .finup = safexcel_ahash_finup,
2248
+ .digest = safexcel_cbcmac_digest,
2249
+ .setkey = safexcel_cmac_setkey,
2250
+ .export = safexcel_ahash_export,
2251
+ .import = safexcel_ahash_import,
2252
+ .halg = {
2253
+ .digestsize = AES_BLOCK_SIZE,
2254
+ .statesize = sizeof(struct safexcel_ahash_export_state),
2255
+ .base = {
2256
+ .cra_name = "cmac(aes)",
2257
+ .cra_driver_name = "safexcel-cmac-aes",
2258
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2259
+ .cra_flags = CRYPTO_ALG_ASYNC |
2260
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2261
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2262
+ .cra_blocksize = AES_BLOCK_SIZE,
2263
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2264
+ .cra_init = safexcel_xcbcmac_cra_init,
2265
+ .cra_exit = safexcel_xcbcmac_cra_exit,
2266
+ .cra_module = THIS_MODULE,
2267
+ },
2268
+ },
2269
+ },
2270
+};
2271
+
2272
+static int safexcel_sm3_init(struct ahash_request *areq)
2273
+{
2274
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2275
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2276
+
2277
+ memset(req, 0, sizeof(*req));
2278
+
2279
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2280
+ req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2281
+ req->state_sz = SM3_DIGEST_SIZE;
2282
+ req->digest_sz = SM3_DIGEST_SIZE;
2283
+ req->block_sz = SM3_BLOCK_SIZE;
2284
+
2285
+ return 0;
2286
+}
2287
+
2288
+static int safexcel_sm3_digest(struct ahash_request *areq)
2289
+{
2290
+ int ret = safexcel_sm3_init(areq);
2291
+
2292
+ if (ret)
2293
+ return ret;
2294
+
2295
+ return safexcel_ahash_finup(areq);
2296
+}
2297
+
2298
+struct safexcel_alg_template safexcel_alg_sm3 = {
2299
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
2300
+ .algo_mask = SAFEXCEL_ALG_SM3,
2301
+ .alg.ahash = {
2302
+ .init = safexcel_sm3_init,
2303
+ .update = safexcel_ahash_update,
2304
+ .final = safexcel_ahash_final,
2305
+ .finup = safexcel_ahash_finup,
2306
+ .digest = safexcel_sm3_digest,
2307
+ .export = safexcel_ahash_export,
2308
+ .import = safexcel_ahash_import,
2309
+ .halg = {
2310
+ .digestsize = SM3_DIGEST_SIZE,
2311
+ .statesize = sizeof(struct safexcel_ahash_export_state),
2312
+ .base = {
2313
+ .cra_name = "sm3",
2314
+ .cra_driver_name = "safexcel-sm3",
2315
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2316
+ .cra_flags = CRYPTO_ALG_ASYNC |
2317
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2318
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2319
+ .cra_blocksize = SM3_BLOCK_SIZE,
2320
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2321
+ .cra_init = safexcel_ahash_cra_init,
2322
+ .cra_exit = safexcel_ahash_cra_exit,
2323
+ .cra_module = THIS_MODULE,
2324
+ },
2325
+ },
2326
+ },
2327
+};
2328
+
2329
+static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
2330
+ unsigned int keylen)
2331
+{
2332
+ return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
2333
+ SM3_DIGEST_SIZE);
2334
+}
2335
+
2336
+static int safexcel_hmac_sm3_init(struct ahash_request *areq)
2337
+{
2338
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2339
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2340
+
2341
+ memset(req, 0, sizeof(*req));
2342
+
2343
+ /* Start from ipad precompute */
2344
+ memcpy(req->state, &ctx->base.ipad, SM3_DIGEST_SIZE);
2345
+ /* Already processed the key^ipad part now! */
2346
+ req->len = SM3_BLOCK_SIZE;
2347
+ req->processed = SM3_BLOCK_SIZE;
2348
+
2349
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2350
+ req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2351
+ req->state_sz = SM3_DIGEST_SIZE;
2352
+ req->digest_sz = SM3_DIGEST_SIZE;
2353
+ req->block_sz = SM3_BLOCK_SIZE;
2354
+ req->hmac = true;
2355
+
2356
+ return 0;
2357
+}
2358
+
2359
+static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
2360
+{
2361
+ int ret = safexcel_hmac_sm3_init(areq);
2362
+
2363
+ if (ret)
2364
+ return ret;
2365
+
2366
+ return safexcel_ahash_finup(areq);
2367
+}
2368
+
2369
+struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
2370
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
2371
+ .algo_mask = SAFEXCEL_ALG_SM3,
2372
+ .alg.ahash = {
2373
+ .init = safexcel_hmac_sm3_init,
2374
+ .update = safexcel_ahash_update,
2375
+ .final = safexcel_ahash_final,
2376
+ .finup = safexcel_ahash_finup,
2377
+ .digest = safexcel_hmac_sm3_digest,
2378
+ .setkey = safexcel_hmac_sm3_setkey,
2379
+ .export = safexcel_ahash_export,
2380
+ .import = safexcel_ahash_import,
2381
+ .halg = {
2382
+ .digestsize = SM3_DIGEST_SIZE,
2383
+ .statesize = sizeof(struct safexcel_ahash_export_state),
2384
+ .base = {
2385
+ .cra_name = "hmac(sm3)",
2386
+ .cra_driver_name = "safexcel-hmac-sm3",
2387
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2388
+ .cra_flags = CRYPTO_ALG_ASYNC |
2389
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2390
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2391
+ .cra_blocksize = SM3_BLOCK_SIZE,
2392
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2393
+ .cra_init = safexcel_ahash_cra_init,
2394
+ .cra_exit = safexcel_ahash_cra_exit,
2395
+ .cra_module = THIS_MODULE,
2396
+ },
2397
+ },
2398
+ },
2399
+};
2400
+
2401
+static int safexcel_sha3_224_init(struct ahash_request *areq)
2402
+{
2403
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2404
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2405
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2406
+
2407
+ memset(req, 0, sizeof(*req));
2408
+
2409
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2410
+ req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2411
+ req->state_sz = SHA3_224_DIGEST_SIZE;
2412
+ req->digest_sz = SHA3_224_DIGEST_SIZE;
2413
+ req->block_sz = SHA3_224_BLOCK_SIZE;
2414
+ ctx->do_fallback = false;
2415
+ ctx->fb_init_done = false;
2416
+ return 0;
2417
+}
2418
+
2419
+static int safexcel_sha3_fbcheck(struct ahash_request *req)
2420
+{
2421
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2422
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2423
+ struct ahash_request *subreq = ahash_request_ctx(req);
2424
+ int ret = 0;
2425
+
2426
+ if (ctx->do_fallback) {
2427
+ ahash_request_set_tfm(subreq, ctx->fback);
2428
+ ahash_request_set_callback(subreq, req->base.flags,
2429
+ req->base.complete, req->base.data);
2430
+ ahash_request_set_crypt(subreq, req->src, req->result,
2431
+ req->nbytes);
2432
+ if (!ctx->fb_init_done) {
2433
+ if (ctx->fb_do_setkey) {
2434
+ /* Set fallback cipher HMAC key */
2435
+ u8 key[SHA3_224_BLOCK_SIZE];
2436
+
2437
+ memcpy(key, &ctx->base.ipad,
2438
+ crypto_ahash_blocksize(ctx->fback) / 2);
2439
+ memcpy(key +
2440
+ crypto_ahash_blocksize(ctx->fback) / 2,
2441
+ &ctx->base.opad,
2442
+ crypto_ahash_blocksize(ctx->fback) / 2);
2443
+ ret = crypto_ahash_setkey(ctx->fback, key,
2444
+ crypto_ahash_blocksize(ctx->fback));
2445
+ memzero_explicit(key,
2446
+ crypto_ahash_blocksize(ctx->fback));
2447
+ ctx->fb_do_setkey = false;
2448
+ }
2449
+ ret = ret ?: crypto_ahash_init(subreq);
2450
+ ctx->fb_init_done = true;
2451
+ }
2452
+ }
2453
+ return ret;
2454
+}
2455
+
2456
+static int safexcel_sha3_update(struct ahash_request *req)
2457
+{
2458
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2459
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2460
+ struct ahash_request *subreq = ahash_request_ctx(req);
2461
+
2462
+ ctx->do_fallback = true;
2463
+ return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
2464
+}
2465
+
2466
+static int safexcel_sha3_final(struct ahash_request *req)
2467
+{
2468
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2469
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2470
+ struct ahash_request *subreq = ahash_request_ctx(req);
2471
+
2472
+ ctx->do_fallback = true;
2473
+ return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
2474
+}
2475
+
2476
+static int safexcel_sha3_finup(struct ahash_request *req)
2477
+{
2478
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2479
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2480
+ struct ahash_request *subreq = ahash_request_ctx(req);
2481
+
2482
+ ctx->do_fallback |= !req->nbytes;
2483
+ if (ctx->do_fallback)
2484
+ /* Update or ex/import happened or len 0, cannot use the HW */
2485
+ return safexcel_sha3_fbcheck(req) ?:
2486
+ crypto_ahash_finup(subreq);
2487
+ else
2488
+ return safexcel_ahash_finup(req);
2489
+}
2490
+
2491
+static int safexcel_sha3_digest_fallback(struct ahash_request *req)
2492
+{
2493
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2494
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2495
+ struct ahash_request *subreq = ahash_request_ctx(req);
2496
+
2497
+ ctx->do_fallback = true;
2498
+ ctx->fb_init_done = false;
2499
+ return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
2500
+}
2501
+
2502
+static int safexcel_sha3_224_digest(struct ahash_request *req)
2503
+{
2504
+ if (req->nbytes)
2505
+ return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
2506
+
2507
+ /* HW cannot do zero length hash, use fallback instead */
2508
+ return safexcel_sha3_digest_fallback(req);
2509
+}
2510
+
2511
+static int safexcel_sha3_export(struct ahash_request *req, void *out)
2512
+{
2513
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2514
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2515
+ struct ahash_request *subreq = ahash_request_ctx(req);
2516
+
2517
+ ctx->do_fallback = true;
2518
+ return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
2519
+}
2520
+
2521
+static int safexcel_sha3_import(struct ahash_request *req, const void *in)
2522
+{
2523
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2524
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2525
+ struct ahash_request *subreq = ahash_request_ctx(req);
2526
+
2527
+ ctx->do_fallback = true;
2528
+ return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
2529
+ // return safexcel_ahash_import(req, in);
2530
+}
2531
+
2532
+static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
2533
+{
2534
+ struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
2535
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2536
+
2537
+ safexcel_ahash_cra_init(tfm);
2538
+
2539
+ /* Allocate fallback implementation */
2540
+ ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
2541
+ CRYPTO_ALG_ASYNC |
2542
+ CRYPTO_ALG_NEED_FALLBACK);
2543
+ if (IS_ERR(ctx->fback))
2544
+ return PTR_ERR(ctx->fback);
2545
+
2546
+ /* Update statesize from fallback algorithm! */
2547
+ crypto_hash_alg_common(ahash)->statesize =
2548
+ crypto_ahash_statesize(ctx->fback);
2549
+ crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req),
2550
+ sizeof(struct ahash_request) +
2551
+ crypto_ahash_reqsize(ctx->fback)));
2552
+ return 0;
2553
+}
2554
+
2555
+static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
2556
+{
2557
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2558
+
2559
+ crypto_free_ahash(ctx->fback);
2560
+ safexcel_ahash_cra_exit(tfm);
2561
+}
2562
+
2563
+struct safexcel_alg_template safexcel_alg_sha3_224 = {
2564
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
2565
+ .algo_mask = SAFEXCEL_ALG_SHA3,
2566
+ .alg.ahash = {
2567
+ .init = safexcel_sha3_224_init,
2568
+ .update = safexcel_sha3_update,
2569
+ .final = safexcel_sha3_final,
2570
+ .finup = safexcel_sha3_finup,
2571
+ .digest = safexcel_sha3_224_digest,
2572
+ .export = safexcel_sha3_export,
2573
+ .import = safexcel_sha3_import,
2574
+ .halg = {
2575
+ .digestsize = SHA3_224_DIGEST_SIZE,
2576
+ .statesize = sizeof(struct safexcel_ahash_export_state),
2577
+ .base = {
2578
+ .cra_name = "sha3-224",
2579
+ .cra_driver_name = "safexcel-sha3-224",
2580
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2581
+ .cra_flags = CRYPTO_ALG_ASYNC |
2582
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
2583
+ CRYPTO_ALG_NEED_FALLBACK,
2584
+ .cra_blocksize = SHA3_224_BLOCK_SIZE,
2585
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2586
+ .cra_init = safexcel_sha3_cra_init,
2587
+ .cra_exit = safexcel_sha3_cra_exit,
2588
+ .cra_module = THIS_MODULE,
2589
+ },
2590
+ },
2591
+ },
2592
+};
2593
+
2594
+static int safexcel_sha3_256_init(struct ahash_request *areq)
2595
+{
2596
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2597
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2598
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2599
+
2600
+ memset(req, 0, sizeof(*req));
2601
+
2602
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2603
+ req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2604
+ req->state_sz = SHA3_256_DIGEST_SIZE;
2605
+ req->digest_sz = SHA3_256_DIGEST_SIZE;
2606
+ req->block_sz = SHA3_256_BLOCK_SIZE;
2607
+ ctx->do_fallback = false;
2608
+ ctx->fb_init_done = false;
2609
+ return 0;
2610
+}
2611
+
2612
+static int safexcel_sha3_256_digest(struct ahash_request *req)
2613
+{
2614
+ if (req->nbytes)
2615
+ return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
2616
+
2617
+ /* HW cannot do zero length hash, use fallback instead */
2618
+ return safexcel_sha3_digest_fallback(req);
2619
+}
2620
+
2621
+struct safexcel_alg_template safexcel_alg_sha3_256 = {
2622
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
2623
+ .algo_mask = SAFEXCEL_ALG_SHA3,
2624
+ .alg.ahash = {
2625
+ .init = safexcel_sha3_256_init,
2626
+ .update = safexcel_sha3_update,
2627
+ .final = safexcel_sha3_final,
2628
+ .finup = safexcel_sha3_finup,
2629
+ .digest = safexcel_sha3_256_digest,
2630
+ .export = safexcel_sha3_export,
2631
+ .import = safexcel_sha3_import,
2632
+ .halg = {
2633
+ .digestsize = SHA3_256_DIGEST_SIZE,
2634
+ .statesize = sizeof(struct safexcel_ahash_export_state),
2635
+ .base = {
2636
+ .cra_name = "sha3-256",
2637
+ .cra_driver_name = "safexcel-sha3-256",
2638
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2639
+ .cra_flags = CRYPTO_ALG_ASYNC |
2640
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
2641
+ CRYPTO_ALG_NEED_FALLBACK,
2642
+ .cra_blocksize = SHA3_256_BLOCK_SIZE,
2643
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2644
+ .cra_init = safexcel_sha3_cra_init,
2645
+ .cra_exit = safexcel_sha3_cra_exit,
2646
+ .cra_module = THIS_MODULE,
2647
+ },
2648
+ },
2649
+ },
2650
+};
2651
+
2652
+static int safexcel_sha3_384_init(struct ahash_request *areq)
2653
+{
2654
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2655
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2656
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2657
+
2658
+ memset(req, 0, sizeof(*req));
2659
+
2660
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2661
+ req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2662
+ req->state_sz = SHA3_384_DIGEST_SIZE;
2663
+ req->digest_sz = SHA3_384_DIGEST_SIZE;
2664
+ req->block_sz = SHA3_384_BLOCK_SIZE;
2665
+ ctx->do_fallback = false;
2666
+ ctx->fb_init_done = false;
2667
+ return 0;
2668
+}
2669
+
2670
+static int safexcel_sha3_384_digest(struct ahash_request *req)
2671
+{
2672
+ if (req->nbytes)
2673
+ return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
2674
+
2675
+ /* HW cannot do zero length hash, use fallback instead */
2676
+ return safexcel_sha3_digest_fallback(req);
2677
+}
2678
+
2679
+struct safexcel_alg_template safexcel_alg_sha3_384 = {
2680
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
2681
+ .algo_mask = SAFEXCEL_ALG_SHA3,
2682
+ .alg.ahash = {
2683
+ .init = safexcel_sha3_384_init,
2684
+ .update = safexcel_sha3_update,
2685
+ .final = safexcel_sha3_final,
2686
+ .finup = safexcel_sha3_finup,
2687
+ .digest = safexcel_sha3_384_digest,
2688
+ .export = safexcel_sha3_export,
2689
+ .import = safexcel_sha3_import,
2690
+ .halg = {
2691
+ .digestsize = SHA3_384_DIGEST_SIZE,
2692
+ .statesize = sizeof(struct safexcel_ahash_export_state),
2693
+ .base = {
2694
+ .cra_name = "sha3-384",
2695
+ .cra_driver_name = "safexcel-sha3-384",
2696
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2697
+ .cra_flags = CRYPTO_ALG_ASYNC |
2698
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
2699
+ CRYPTO_ALG_NEED_FALLBACK,
2700
+ .cra_blocksize = SHA3_384_BLOCK_SIZE,
2701
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2702
+ .cra_init = safexcel_sha3_cra_init,
2703
+ .cra_exit = safexcel_sha3_cra_exit,
2704
+ .cra_module = THIS_MODULE,
2705
+ },
2706
+ },
2707
+ },
2708
+};
2709
+
2710
+static int safexcel_sha3_512_init(struct ahash_request *areq)
2711
+{
2712
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2713
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2714
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2715
+
2716
+ memset(req, 0, sizeof(*req));
2717
+
2718
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2719
+ req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2720
+ req->state_sz = SHA3_512_DIGEST_SIZE;
2721
+ req->digest_sz = SHA3_512_DIGEST_SIZE;
2722
+ req->block_sz = SHA3_512_BLOCK_SIZE;
2723
+ ctx->do_fallback = false;
2724
+ ctx->fb_init_done = false;
2725
+ return 0;
2726
+}
2727
+
2728
+static int safexcel_sha3_512_digest(struct ahash_request *req)
2729
+{
2730
+ if (req->nbytes)
2731
+ return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
2732
+
2733
+ /* HW cannot do zero length hash, use fallback instead */
2734
+ return safexcel_sha3_digest_fallback(req);
2735
+}
2736
+
2737
+struct safexcel_alg_template safexcel_alg_sha3_512 = {
2738
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
2739
+ .algo_mask = SAFEXCEL_ALG_SHA3,
2740
+ .alg.ahash = {
2741
+ .init = safexcel_sha3_512_init,
2742
+ .update = safexcel_sha3_update,
2743
+ .final = safexcel_sha3_final,
2744
+ .finup = safexcel_sha3_finup,
2745
+ .digest = safexcel_sha3_512_digest,
2746
+ .export = safexcel_sha3_export,
2747
+ .import = safexcel_sha3_import,
2748
+ .halg = {
2749
+ .digestsize = SHA3_512_DIGEST_SIZE,
2750
+ .statesize = sizeof(struct safexcel_ahash_export_state),
2751
+ .base = {
2752
+ .cra_name = "sha3-512",
2753
+ .cra_driver_name = "safexcel-sha3-512",
2754
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2755
+ .cra_flags = CRYPTO_ALG_ASYNC |
2756
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
2757
+ CRYPTO_ALG_NEED_FALLBACK,
2758
+ .cra_blocksize = SHA3_512_BLOCK_SIZE,
2759
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2760
+ .cra_init = safexcel_sha3_cra_init,
2761
+ .cra_exit = safexcel_sha3_cra_exit,
2762
+ .cra_module = THIS_MODULE,
2763
+ },
2764
+ },
2765
+ },
2766
+};
2767
+
2768
+static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
2769
+{
2770
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2771
+ int ret;
2772
+
2773
+ ret = safexcel_sha3_cra_init(tfm);
2774
+ if (ret)
2775
+ return ret;
2776
+
2777
+ /* Allocate precalc basic digest implementation */
2778
+ ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
2779
+ if (IS_ERR(ctx->shpre))
2780
+ return PTR_ERR(ctx->shpre);
2781
+
2782
+ ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
2783
+ crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
2784
+ if (!ctx->shdesc) {
2785
+ crypto_free_shash(ctx->shpre);
2786
+ return -ENOMEM;
2787
+ }
2788
+ ctx->shdesc->tfm = ctx->shpre;
2789
+ return 0;
2790
+}
2791
+
2792
+static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
2793
+{
2794
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2795
+
2796
+ crypto_free_ahash(ctx->fback);
2797
+ crypto_free_shash(ctx->shpre);
2798
+ kfree(ctx->shdesc);
2799
+ safexcel_ahash_cra_exit(tfm);
2800
+}
2801
+
2802
+static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
2803
+ unsigned int keylen)
2804
+{
2805
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2806
+ int ret = 0;
2807
+
2808
+ if (keylen > crypto_ahash_blocksize(tfm)) {
2809
+ /*
2810
+ * If the key is larger than the blocksize, then hash it
2811
+ * first using our fallback cipher
2812
+ */
2813
+ ret = crypto_shash_digest(ctx->shdesc, key, keylen,
2814
+ ctx->base.ipad.byte);
2815
+ keylen = crypto_shash_digestsize(ctx->shpre);
2816
+
2817
+ /*
2818
+ * If the digest is larger than half the blocksize, we need to
2819
+ * move the rest to opad due to the way our HMAC infra works.
2820
+ */
2821
+ if (keylen > crypto_ahash_blocksize(tfm) / 2)
2822
+ /* Buffers overlap, need to use memmove iso memcpy! */
2823
+ memmove(&ctx->base.opad,
2824
+ ctx->base.ipad.byte +
2825
+ crypto_ahash_blocksize(tfm) / 2,
2826
+ keylen - crypto_ahash_blocksize(tfm) / 2);
2827
+ } else {
2828
+ /*
2829
+ * Copy the key to our ipad & opad buffers
2830
+ * Note that ipad and opad each contain one half of the key,
2831
+ * to match the existing HMAC driver infrastructure.
2832
+ */
2833
+ if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2834
+ memcpy(&ctx->base.ipad, key, keylen);
2835
+ } else {
2836
+ memcpy(&ctx->base.ipad, key,
2837
+ crypto_ahash_blocksize(tfm) / 2);
2838
+ memcpy(&ctx->base.opad,
2839
+ key + crypto_ahash_blocksize(tfm) / 2,
2840
+ keylen - crypto_ahash_blocksize(tfm) / 2);
2841
+ }
2842
+ }
2843
+
2844
+ /* Pad key with zeroes */
2845
+ if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2846
+ memset(ctx->base.ipad.byte + keylen, 0,
2847
+ crypto_ahash_blocksize(tfm) / 2 - keylen);
2848
+ memset(&ctx->base.opad, 0, crypto_ahash_blocksize(tfm) / 2);
2849
+ } else {
2850
+ memset(ctx->base.opad.byte + keylen -
2851
+ crypto_ahash_blocksize(tfm) / 2, 0,
2852
+ crypto_ahash_blocksize(tfm) - keylen);
2853
+ }
2854
+
2855
+ /* If doing fallback, still need to set the new key! */
2856
+ ctx->fb_do_setkey = true;
2857
+ return ret;
2858
+}
2859
+
2860
+static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
2861
+{
2862
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2863
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2864
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2865
+
2866
+ memset(req, 0, sizeof(*req));
2867
+
2868
+ /* Copy (half of) the key */
2869
+ memcpy(req->state, &ctx->base.ipad, SHA3_224_BLOCK_SIZE / 2);
2870
+ /* Start of HMAC should have len == processed == blocksize */
2871
+ req->len = SHA3_224_BLOCK_SIZE;
2872
+ req->processed = SHA3_224_BLOCK_SIZE;
2873
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2874
+ req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2875
+ req->state_sz = SHA3_224_BLOCK_SIZE / 2;
2876
+ req->digest_sz = SHA3_224_DIGEST_SIZE;
2877
+ req->block_sz = SHA3_224_BLOCK_SIZE;
2878
+ req->hmac = true;
2879
+ ctx->do_fallback = false;
2880
+ ctx->fb_init_done = false;
2881
+ return 0;
2882
+}
2883
+
2884
+static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
2885
+{
2886
+ if (req->nbytes)
2887
+ return safexcel_hmac_sha3_224_init(req) ?:
2888
+ safexcel_ahash_finup(req);
2889
+
2890
+ /* HW cannot do zero length HMAC, use fallback instead */
2891
+ return safexcel_sha3_digest_fallback(req);
2892
+}
2893
+
2894
+static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
2895
+{
2896
+ return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
2897
+}
2898
+
2899
+struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
2900
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
2901
+ .algo_mask = SAFEXCEL_ALG_SHA3,
2902
+ .alg.ahash = {
2903
+ .init = safexcel_hmac_sha3_224_init,
2904
+ .update = safexcel_sha3_update,
2905
+ .final = safexcel_sha3_final,
2906
+ .finup = safexcel_sha3_finup,
2907
+ .digest = safexcel_hmac_sha3_224_digest,
2908
+ .setkey = safexcel_hmac_sha3_setkey,
2909
+ .export = safexcel_sha3_export,
2910
+ .import = safexcel_sha3_import,
2911
+ .halg = {
2912
+ .digestsize = SHA3_224_DIGEST_SIZE,
2913
+ .statesize = sizeof(struct safexcel_ahash_export_state),
2914
+ .base = {
2915
+ .cra_name = "hmac(sha3-224)",
2916
+ .cra_driver_name = "safexcel-hmac-sha3-224",
2917
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2918
+ .cra_flags = CRYPTO_ALG_ASYNC |
2919
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
2920
+ CRYPTO_ALG_NEED_FALLBACK,
2921
+ .cra_blocksize = SHA3_224_BLOCK_SIZE,
2922
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2923
+ .cra_init = safexcel_hmac_sha3_224_cra_init,
2924
+ .cra_exit = safexcel_hmac_sha3_cra_exit,
2925
+ .cra_module = THIS_MODULE,
2926
+ },
2927
+ },
2928
+ },
2929
+};
2930
+
2931
+static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
2932
+{
2933
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2934
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2935
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2936
+
2937
+ memset(req, 0, sizeof(*req));
2938
+
2939
+ /* Copy (half of) the key */
2940
+ memcpy(req->state, &ctx->base.ipad, SHA3_256_BLOCK_SIZE / 2);
2941
+ /* Start of HMAC should have len == processed == blocksize */
2942
+ req->len = SHA3_256_BLOCK_SIZE;
2943
+ req->processed = SHA3_256_BLOCK_SIZE;
2944
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2945
+ req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2946
+ req->state_sz = SHA3_256_BLOCK_SIZE / 2;
2947
+ req->digest_sz = SHA3_256_DIGEST_SIZE;
2948
+ req->block_sz = SHA3_256_BLOCK_SIZE;
2949
+ req->hmac = true;
2950
+ ctx->do_fallback = false;
2951
+ ctx->fb_init_done = false;
2952
+ return 0;
2953
+}
2954
+
2955
+static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
2956
+{
2957
+ if (req->nbytes)
2958
+ return safexcel_hmac_sha3_256_init(req) ?:
2959
+ safexcel_ahash_finup(req);
2960
+
2961
+ /* HW cannot do zero length HMAC, use fallback instead */
2962
+ return safexcel_sha3_digest_fallback(req);
2963
+}
2964
+
2965
+static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
2966
+{
2967
+ return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
2968
+}
2969
+
2970
+struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
2971
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
2972
+ .algo_mask = SAFEXCEL_ALG_SHA3,
2973
+ .alg.ahash = {
2974
+ .init = safexcel_hmac_sha3_256_init,
2975
+ .update = safexcel_sha3_update,
2976
+ .final = safexcel_sha3_final,
2977
+ .finup = safexcel_sha3_finup,
2978
+ .digest = safexcel_hmac_sha3_256_digest,
2979
+ .setkey = safexcel_hmac_sha3_setkey,
2980
+ .export = safexcel_sha3_export,
2981
+ .import = safexcel_sha3_import,
2982
+ .halg = {
2983
+ .digestsize = SHA3_256_DIGEST_SIZE,
2984
+ .statesize = sizeof(struct safexcel_ahash_export_state),
2985
+ .base = {
2986
+ .cra_name = "hmac(sha3-256)",
2987
+ .cra_driver_name = "safexcel-hmac-sha3-256",
2988
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2989
+ .cra_flags = CRYPTO_ALG_ASYNC |
2990
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
2991
+ CRYPTO_ALG_NEED_FALLBACK,
2992
+ .cra_blocksize = SHA3_256_BLOCK_SIZE,
2993
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2994
+ .cra_init = safexcel_hmac_sha3_256_cra_init,
2995
+ .cra_exit = safexcel_hmac_sha3_cra_exit,
2996
+ .cra_module = THIS_MODULE,
2997
+ },
2998
+ },
2999
+ },
3000
+};
3001
+
3002
+static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
3003
+{
3004
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3005
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3006
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3007
+
3008
+ memset(req, 0, sizeof(*req));
3009
+
3010
+ /* Copy (half of) the key */
3011
+ memcpy(req->state, &ctx->base.ipad, SHA3_384_BLOCK_SIZE / 2);
3012
+ /* Start of HMAC should have len == processed == blocksize */
3013
+ req->len = SHA3_384_BLOCK_SIZE;
3014
+ req->processed = SHA3_384_BLOCK_SIZE;
3015
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
3016
+ req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3017
+ req->state_sz = SHA3_384_BLOCK_SIZE / 2;
3018
+ req->digest_sz = SHA3_384_DIGEST_SIZE;
3019
+ req->block_sz = SHA3_384_BLOCK_SIZE;
3020
+ req->hmac = true;
3021
+ ctx->do_fallback = false;
3022
+ ctx->fb_init_done = false;
3023
+ return 0;
3024
+}
3025
+
3026
+static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
3027
+{
3028
+ if (req->nbytes)
3029
+ return safexcel_hmac_sha3_384_init(req) ?:
3030
+ safexcel_ahash_finup(req);
3031
+
3032
+ /* HW cannot do zero length HMAC, use fallback instead */
3033
+ return safexcel_sha3_digest_fallback(req);
3034
+}
3035
+
3036
+static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
3037
+{
3038
+ return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
3039
+}
3040
+
3041
+struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
3042
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
3043
+ .algo_mask = SAFEXCEL_ALG_SHA3,
3044
+ .alg.ahash = {
3045
+ .init = safexcel_hmac_sha3_384_init,
3046
+ .update = safexcel_sha3_update,
3047
+ .final = safexcel_sha3_final,
3048
+ .finup = safexcel_sha3_finup,
3049
+ .digest = safexcel_hmac_sha3_384_digest,
3050
+ .setkey = safexcel_hmac_sha3_setkey,
3051
+ .export = safexcel_sha3_export,
3052
+ .import = safexcel_sha3_import,
3053
+ .halg = {
3054
+ .digestsize = SHA3_384_DIGEST_SIZE,
3055
+ .statesize = sizeof(struct safexcel_ahash_export_state),
3056
+ .base = {
3057
+ .cra_name = "hmac(sha3-384)",
3058
+ .cra_driver_name = "safexcel-hmac-sha3-384",
3059
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
3060
+ .cra_flags = CRYPTO_ALG_ASYNC |
3061
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
3062
+ CRYPTO_ALG_NEED_FALLBACK,
3063
+ .cra_blocksize = SHA3_384_BLOCK_SIZE,
3064
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3065
+ .cra_init = safexcel_hmac_sha3_384_cra_init,
3066
+ .cra_exit = safexcel_hmac_sha3_cra_exit,
3067
+ .cra_module = THIS_MODULE,
3068
+ },
3069
+ },
3070
+ },
3071
+};
3072
+
3073
+static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
3074
+{
3075
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3076
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3077
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3078
+
3079
+ memset(req, 0, sizeof(*req));
3080
+
3081
+ /* Copy (half of) the key */
3082
+ memcpy(req->state, &ctx->base.ipad, SHA3_512_BLOCK_SIZE / 2);
3083
+ /* Start of HMAC should have len == processed == blocksize */
3084
+ req->len = SHA3_512_BLOCK_SIZE;
3085
+ req->processed = SHA3_512_BLOCK_SIZE;
3086
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
3087
+ req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3088
+ req->state_sz = SHA3_512_BLOCK_SIZE / 2;
3089
+ req->digest_sz = SHA3_512_DIGEST_SIZE;
3090
+ req->block_sz = SHA3_512_BLOCK_SIZE;
3091
+ req->hmac = true;
3092
+ ctx->do_fallback = false;
3093
+ ctx->fb_init_done = false;
3094
+ return 0;
3095
+}
3096
+
3097
+static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
3098
+{
3099
+ if (req->nbytes)
3100
+ return safexcel_hmac_sha3_512_init(req) ?:
3101
+ safexcel_ahash_finup(req);
3102
+
3103
+ /* HW cannot do zero length HMAC, use fallback instead */
3104
+ return safexcel_sha3_digest_fallback(req);
3105
+}
3106
+
3107
+static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
3108
+{
3109
+ return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
3110
+}
3111
+struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
3112
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
3113
+ .algo_mask = SAFEXCEL_ALG_SHA3,
3114
+ .alg.ahash = {
3115
+ .init = safexcel_hmac_sha3_512_init,
3116
+ .update = safexcel_sha3_update,
3117
+ .final = safexcel_sha3_final,
3118
+ .finup = safexcel_sha3_finup,
3119
+ .digest = safexcel_hmac_sha3_512_digest,
3120
+ .setkey = safexcel_hmac_sha3_setkey,
3121
+ .export = safexcel_sha3_export,
3122
+ .import = safexcel_sha3_import,
3123
+ .halg = {
3124
+ .digestsize = SHA3_512_DIGEST_SIZE,
3125
+ .statesize = sizeof(struct safexcel_ahash_export_state),
3126
+ .base = {
3127
+ .cra_name = "hmac(sha3-512)",
3128
+ .cra_driver_name = "safexcel-hmac-sha3-512",
3129
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
3130
+ .cra_flags = CRYPTO_ALG_ASYNC |
3131
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
3132
+ CRYPTO_ALG_NEED_FALLBACK,
3133
+ .cra_blocksize = SHA3_512_BLOCK_SIZE,
3134
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3135
+ .cra_init = safexcel_hmac_sha3_512_cra_init,
3136
+ .cra_exit = safexcel_hmac_sha3_cra_exit,
3137
+ .cra_module = THIS_MODULE,
3138
+ },
3139
+ },
3140
+ },
3141
+};