hc
2024-12-19 9370bb92b2d16684ee45cf24e879c93c509162da
kernel/drivers/crypto/inside-secure/safexcel_cipher.c
....@@ -5,15 +5,23 @@
55 * Antoine Tenart <antoine.tenart@free-electrons.com>
66 */
77
8
+#include <asm/unaligned.h>
89 #include <linux/device.h>
910 #include <linux/dma-mapping.h>
1011 #include <linux/dmapool.h>
11
-
1212 #include <crypto/aead.h>
1313 #include <crypto/aes.h>
1414 #include <crypto/authenc.h>
15
-#include <crypto/des.h>
15
+#include <crypto/chacha.h>
16
+#include <crypto/ctr.h>
17
+#include <crypto/internal/des.h>
18
+#include <crypto/gcm.h>
19
+#include <crypto/ghash.h>
20
+#include <crypto/poly1305.h>
1621 #include <crypto/sha.h>
22
+#include <crypto/sm3.h>
23
+#include <crypto/sm4.h>
24
+#include <crypto/xts.h>
1725 #include <crypto/skcipher.h>
1826 #include <crypto/internal/aead.h>
1927 #include <crypto/internal/skcipher.h>
....@@ -29,6 +37,8 @@
2937 SAFEXCEL_DES,
3038 SAFEXCEL_3DES,
3139 SAFEXCEL_AES,
40
+ SAFEXCEL_CHACHA20,
41
+ SAFEXCEL_SM4,
3242 };
3343
3444 struct safexcel_cipher_ctx {
....@@ -37,16 +47,23 @@
3747
3848 u32 mode;
3949 enum safexcel_cipher_alg alg;
40
- bool aead;
50
+ u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
51
+ u8 xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
52
+ u8 aadskip;
53
+ u8 blocksz;
54
+ u32 ivmask;
55
+ u32 ctrinit;
4156
42
- __le32 key[8];
43
- unsigned int key_len;
57
+ __le32 key[16];
58
+ u32 nonce;
59
+ unsigned int key_len, xts;
4460
4561 /* All the below is AEAD specific */
4662 u32 hash_alg;
4763 u32 state_sz;
48
- u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
49
- u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
64
+
65
+ struct crypto_cipher *hkaes;
66
+ struct crypto_aead *fback;
5067 };
5168
5269 struct safexcel_cipher_req {
....@@ -54,101 +71,301 @@
5471 /* Number of result descriptors associated to the request */
5572 unsigned int rdescs;
5673 bool needs_inv;
74
+ int nr_src, nr_dst;
5775 };
76
+
77
+static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
78
+ struct safexcel_command_desc *cdesc)
79
+{
80
+ if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
81
+ cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
82
+ /* 32 bit nonce */
83
+ cdesc->control_data.token[0] = ctx->nonce;
84
+ /* 64 bit IV part */
85
+ memcpy(&cdesc->control_data.token[1], iv, 8);
86
+ /* 32 bit counter, start at 0 or 1 (big endian!) */
87
+ cdesc->control_data.token[3] =
88
+ (__force u32)cpu_to_be32(ctx->ctrinit);
89
+ return 4;
90
+ }
91
+ if (ctx->alg == SAFEXCEL_CHACHA20) {
92
+ cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
93
+ /* 96 bit nonce part */
94
+ memcpy(&cdesc->control_data.token[0], &iv[4], 12);
95
+ /* 32 bit counter */
96
+ cdesc->control_data.token[3] = *(u32 *)iv;
97
+ return 4;
98
+ }
99
+
100
+ cdesc->control_data.options |= ctx->ivmask;
101
+ memcpy(cdesc->control_data.token, iv, ctx->blocksz);
102
+ return ctx->blocksz / sizeof(u32);
103
+}
58104
59105 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
60106 struct safexcel_command_desc *cdesc,
107
+ struct safexcel_token *atoken,
61108 u32 length)
62109 {
63110 struct safexcel_token *token;
64
- unsigned offset = 0;
111
+ int ivlen;
65112
66
- if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
67
- switch (ctx->alg) {
68
- case SAFEXCEL_DES:
69
- offset = DES_BLOCK_SIZE / sizeof(u32);
70
- memcpy(cdesc->control_data.token, iv, DES_BLOCK_SIZE);
71
- cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
72
- break;
73
- case SAFEXCEL_3DES:
74
- offset = DES3_EDE_BLOCK_SIZE / sizeof(u32);
75
- memcpy(cdesc->control_data.token, iv, DES3_EDE_BLOCK_SIZE);
76
- cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
77
- break;
78
-
79
- case SAFEXCEL_AES:
80
- offset = AES_BLOCK_SIZE / sizeof(u32);
81
- memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE);
82
- cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
83
- break;
84
- }
113
+ ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
114
+ if (ivlen == 4) {
115
+ /* No space in cdesc, instruction moves to atoken */
116
+ cdesc->additional_cdata_size = 1;
117
+ token = atoken;
118
+ } else {
119
+ /* Everything fits in cdesc */
120
+ token = (struct safexcel_token *)(cdesc->control_data.token + 2);
121
+ /* Need to pad with NOP */
122
+ eip197_noop_token(&token[1]);
85123 }
86124
87
- token = (struct safexcel_token *)(cdesc->control_data.token + offset);
125
+ token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
126
+ token->packet_length = length;
127
+ token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
128
+ EIP197_TOKEN_STAT_LAST_HASH;
129
+ token->instructions = EIP197_TOKEN_INS_LAST |
130
+ EIP197_TOKEN_INS_TYPE_CRYPTO |
131
+ EIP197_TOKEN_INS_TYPE_OUTPUT;
132
+}
88133
89
- token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
90
- token[0].packet_length = length;
91
- token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
92
- EIP197_TOKEN_STAT_LAST_HASH;
93
- token[0].instructions = EIP197_TOKEN_INS_LAST |
94
- EIP197_TOKEN_INS_TYPE_CRYTO |
95
- EIP197_TOKEN_INS_TYPE_OUTPUT;
134
+static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
135
+ struct safexcel_command_desc *cdesc)
136
+{
137
+ if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
138
+ ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
139
+ /* 32 bit nonce */
140
+ cdesc->control_data.token[0] = ctx->nonce;
141
+ /* 64 bit IV part */
142
+ memcpy(&cdesc->control_data.token[1], iv, 8);
143
+ /* 32 bit counter, start at 0 or 1 (big endian!) */
144
+ cdesc->control_data.token[3] =
145
+ (__force u32)cpu_to_be32(ctx->ctrinit);
146
+ return;
147
+ }
148
+ if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
149
+ /* 96 bit IV part */
150
+ memcpy(&cdesc->control_data.token[0], iv, 12);
151
+ /* 32 bit counter, start at 0 or 1 (big endian!) */
152
+ cdesc->control_data.token[3] =
153
+ (__force u32)cpu_to_be32(ctx->ctrinit);
154
+ return;
155
+ }
156
+ /* CBC */
157
+ memcpy(cdesc->control_data.token, iv, ctx->blocksz);
96158 }
97159
98160 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
99161 struct safexcel_command_desc *cdesc,
162
+ struct safexcel_token *atoken,
100163 enum safexcel_cipher_direction direction,
101164 u32 cryptlen, u32 assoclen, u32 digestsize)
102165 {
103
- struct safexcel_token *token;
104
- unsigned offset = 0;
166
+ struct safexcel_token *aadref;
167
+ int atoksize = 2; /* Start with minimum size */
168
+ int assocadj = assoclen - ctx->aadskip, aadalign;
105169
106
- if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
107
- offset = AES_BLOCK_SIZE / sizeof(u32);
108
- memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE);
109
-
110
- cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
111
- }
112
-
113
- token = (struct safexcel_token *)(cdesc->control_data.token + offset);
170
+ /* Always 4 dwords of embedded IV for AEAD modes */
171
+ cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
114172
115173 if (direction == SAFEXCEL_DECRYPT)
116174 cryptlen -= digestsize;
117175
118
- token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
119
- token[0].packet_length = assoclen;
120
- token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH |
121
- EIP197_TOKEN_INS_TYPE_OUTPUT;
176
+ if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
177
+ /* Construct IV block B0 for the CBC-MAC */
178
+ u8 *final_iv = (u8 *)cdesc->control_data.token;
179
+ u8 *cbcmaciv = (u8 *)&atoken[1];
180
+ __le32 *aadlen = (__le32 *)&atoken[5];
122181
123
- token[1].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
124
- token[1].packet_length = cryptlen;
125
- token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
126
- token[1].instructions = EIP197_TOKEN_INS_LAST |
127
- EIP197_TOKEN_INS_TYPE_CRYTO |
128
- EIP197_TOKEN_INS_TYPE_HASH |
129
- EIP197_TOKEN_INS_TYPE_OUTPUT;
182
+ if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
183
+ /* Length + nonce */
184
+ cdesc->control_data.token[0] = ctx->nonce;
185
+ /* Fixup flags byte */
186
+ *(__le32 *)cbcmaciv =
187
+ cpu_to_le32(ctx->nonce |
188
+ ((assocadj > 0) << 6) |
189
+ ((digestsize - 2) << 2));
190
+ /* 64 bit IV part */
191
+ memcpy(&cdesc->control_data.token[1], iv, 8);
192
+ memcpy(cbcmaciv + 4, iv, 8);
193
+ /* Start counter at 0 */
194
+ cdesc->control_data.token[3] = 0;
195
+ /* Message length */
196
+ *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
197
+ } else {
198
+ /* Variable length IV part */
199
+ memcpy(final_iv, iv, 15 - iv[0]);
200
+ memcpy(cbcmaciv, iv, 15 - iv[0]);
201
+ /* Start variable length counter at 0 */
202
+ memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
203
+ memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
204
+ /* fixup flags byte */
205
+ cbcmaciv[0] |= ((assocadj > 0) << 6) |
206
+ ((digestsize - 2) << 2);
207
+ /* insert lower 2 bytes of message length */
208
+ cbcmaciv[14] = cryptlen >> 8;
209
+ cbcmaciv[15] = cryptlen & 255;
210
+ }
211
+
212
+ atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
213
+ atoken->packet_length = AES_BLOCK_SIZE +
214
+ ((assocadj > 0) << 1);
215
+ atoken->stat = 0;
216
+ atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
217
+ EIP197_TOKEN_INS_TYPE_HASH;
218
+
219
+ if (likely(assocadj)) {
220
+ *aadlen = cpu_to_le32((assocadj >> 8) |
221
+ (assocadj & 255) << 8);
222
+ atoken += 6;
223
+ atoksize += 7;
224
+ } else {
225
+ atoken += 5;
226
+ atoksize += 6;
227
+ }
228
+
229
+ /* Process AAD data */
230
+ aadref = atoken;
231
+ atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
232
+ atoken->packet_length = assocadj;
233
+ atoken->stat = 0;
234
+ atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
235
+ atoken++;
236
+
237
+ /* For CCM only, align AAD data towards hash engine */
238
+ atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
239
+ aadalign = (assocadj + 2) & 15;
240
+ atoken->packet_length = assocadj && aadalign ?
241
+ 16 - aadalign :
242
+ 0;
243
+ if (likely(cryptlen)) {
244
+ atoken->stat = 0;
245
+ atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
246
+ } else {
247
+ atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
248
+ atoken->instructions = EIP197_TOKEN_INS_LAST |
249
+ EIP197_TOKEN_INS_TYPE_HASH;
250
+ }
251
+ } else {
252
+ safexcel_aead_iv(ctx, iv, cdesc);
253
+
254
+ /* Process AAD data */
255
+ aadref = atoken;
256
+ atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
257
+ atoken->packet_length = assocadj;
258
+ atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
259
+ atoken->instructions = EIP197_TOKEN_INS_LAST |
260
+ EIP197_TOKEN_INS_TYPE_HASH;
261
+ }
262
+ atoken++;
263
+
264
+ if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
265
+ /* For ESP mode (and not GMAC), skip over the IV */
266
+ atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
267
+ atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
268
+ atoken->stat = 0;
269
+ atoken->instructions = 0;
270
+ atoken++;
271
+ atoksize++;
272
+ } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
273
+ direction == SAFEXCEL_DECRYPT)) {
274
+ /* Poly-chacha decryption needs a dummy NOP here ... */
275
+ atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
276
+ atoken->packet_length = 16; /* According to Op Manual */
277
+ atoken->stat = 0;
278
+ atoken->instructions = 0;
279
+ atoken++;
280
+ atoksize++;
281
+ }
282
+
283
+ if (ctx->xcm) {
284
+ /* For GCM and CCM, obtain enc(Y0) */
285
+ atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
286
+ atoken->packet_length = 0;
287
+ atoken->stat = 0;
288
+ atoken->instructions = AES_BLOCK_SIZE;
289
+ atoken++;
290
+
291
+ atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
292
+ atoken->packet_length = AES_BLOCK_SIZE;
293
+ atoken->stat = 0;
294
+ atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
295
+ EIP197_TOKEN_INS_TYPE_CRYPTO;
296
+ atoken++;
297
+ atoksize += 2;
298
+ }
299
+
300
+ if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
301
+ /* Fixup stat field for AAD direction instruction */
302
+ aadref->stat = 0;
303
+
304
+ /* Process crypto data */
305
+ atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
306
+ atoken->packet_length = cryptlen;
307
+
308
+ if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
309
+ /* Fixup instruction field for AAD dir instruction */
310
+ aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
311
+
312
+ /* Do not send to crypt engine in case of GMAC */
313
+ atoken->instructions = EIP197_TOKEN_INS_LAST |
314
+ EIP197_TOKEN_INS_TYPE_HASH |
315
+ EIP197_TOKEN_INS_TYPE_OUTPUT;
316
+ } else {
317
+ atoken->instructions = EIP197_TOKEN_INS_LAST |
318
+ EIP197_TOKEN_INS_TYPE_CRYPTO |
319
+ EIP197_TOKEN_INS_TYPE_HASH |
320
+ EIP197_TOKEN_INS_TYPE_OUTPUT;
321
+ }
322
+
323
+ cryptlen &= 15;
324
+ if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
325
+ atoken->stat = 0;
326
+ /* For CCM only, pad crypto data to the hash engine */
327
+ atoken++;
328
+ atoksize++;
329
+ atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
330
+ atoken->packet_length = 16 - cryptlen;
331
+ atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
332
+ atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
333
+ } else {
334
+ atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
335
+ }
336
+ atoken++;
337
+ atoksize++;
338
+ }
130339
131340 if (direction == SAFEXCEL_ENCRYPT) {
132
- token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
133
- token[2].packet_length = digestsize;
134
- token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
135
- EIP197_TOKEN_STAT_LAST_PACKET;
136
- token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
137
- EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
341
+ /* Append ICV */
342
+ atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
343
+ atoken->packet_length = digestsize;
344
+ atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
345
+ EIP197_TOKEN_STAT_LAST_PACKET;
346
+ atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
347
+ EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
138348 } else {
139
- token[2].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
140
- token[2].packet_length = digestsize;
141
- token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
142
- EIP197_TOKEN_STAT_LAST_PACKET;
143
- token[2].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
349
+ /* Extract ICV */
350
+ atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
351
+ atoken->packet_length = digestsize;
352
+ atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
353
+ EIP197_TOKEN_STAT_LAST_PACKET;
354
+ atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
355
+ atoken++;
356
+ atoksize++;
144357
145
- token[3].opcode = EIP197_TOKEN_OPCODE_VERIFY;
146
- token[3].packet_length = digestsize |
147
- EIP197_TOKEN_HASH_RESULT_VERIFY;
148
- token[3].stat = EIP197_TOKEN_STAT_LAST_HASH |
149
- EIP197_TOKEN_STAT_LAST_PACKET;
150
- token[3].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
358
+ /* Verify ICV */
359
+ atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
360
+ atoken->packet_length = digestsize |
361
+ EIP197_TOKEN_HASH_RESULT_VERIFY;
362
+ atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
363
+ EIP197_TOKEN_STAT_LAST_PACKET;
364
+ atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
151365 }
366
+
367
+ /* Fixup length of the token in the command descriptor */
368
+ cdesc->additional_cdata_size = atoksize;
152369 }
153370
154371 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
....@@ -156,19 +373,17 @@
156373 {
157374 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
158375 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
159
- struct safexcel_crypto_priv *priv = ctx->priv;
376
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
160377 struct crypto_aes_ctx aes;
161378 int ret, i;
162379
163
- ret = crypto_aes_expand_key(&aes, key, len);
164
- if (ret) {
165
- crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
380
+ ret = aes_expandkey(&aes, key, len);
381
+ if (ret)
166382 return ret;
167
- }
168383
169384 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
170385 for (i = 0; i < len / sizeof(u32); i++) {
171
- if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
386
+ if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
172387 ctx->base.needs_inv = true;
173388 break;
174389 }
....@@ -184,80 +399,107 @@
184399 return 0;
185400 }
186401
187
-static int safexcel_aead_aes_setkey(struct crypto_aead *ctfm, const u8 *key,
188
- unsigned int len)
402
+static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
403
+ unsigned int len)
189404 {
190405 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
191406 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
192
- struct safexcel_ahash_export_state istate, ostate;
193
- struct safexcel_crypto_priv *priv = ctx->priv;
407
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
194408 struct crypto_authenc_keys keys;
409
+ struct crypto_aes_ctx aes;
410
+ int err = -EINVAL, i;
411
+ const char *alg;
195412
196
- if (crypto_authenc_extractkeys(&keys, key, len) != 0)
413
+ if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
197414 goto badkey;
198415
199
- if (keys.enckeylen > sizeof(ctx->key))
200
- goto badkey;
416
+ if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
417
+ /* Must have at least space for the nonce here */
418
+ if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
419
+ goto badkey;
420
+ /* last 4 bytes of key are the nonce! */
421
+ ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
422
+ CTR_RFC3686_NONCE_SIZE);
423
+ /* exclude the nonce here */
424
+ keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
425
+ }
201426
202427 /* Encryption key */
203
- if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
204
- memcmp(ctx->key, keys.enckey, keys.enckeylen))
205
- ctx->base.needs_inv = true;
428
+ switch (ctx->alg) {
429
+ case SAFEXCEL_DES:
430
+ err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
431
+ if (unlikely(err))
432
+ goto badkey;
433
+ break;
434
+ case SAFEXCEL_3DES:
435
+ err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
436
+ if (unlikely(err))
437
+ goto badkey;
438
+ break;
439
+ case SAFEXCEL_AES:
440
+ err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
441
+ if (unlikely(err))
442
+ goto badkey;
443
+ break;
444
+ case SAFEXCEL_SM4:
445
+ if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
446
+ goto badkey;
447
+ break;
448
+ default:
449
+ dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
450
+ goto badkey;
451
+ }
452
+
453
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
454
+ for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
455
+ if (le32_to_cpu(ctx->key[i]) !=
456
+ ((u32 *)keys.enckey)[i]) {
457
+ ctx->base.needs_inv = true;
458
+ break;
459
+ }
460
+ }
461
+ }
206462
207463 /* Auth key */
208464 switch (ctx->hash_alg) {
209465 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
210
- if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
211
- keys.authkeylen, &istate, &ostate))
212
- goto badkey;
466
+ alg = "safexcel-sha1";
213467 break;
214468 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
215
- if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
216
- keys.authkeylen, &istate, &ostate))
217
- goto badkey;
469
+ alg = "safexcel-sha224";
218470 break;
219471 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
220
- if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
221
- keys.authkeylen, &istate, &ostate))
222
- goto badkey;
472
+ alg = "safexcel-sha256";
223473 break;
224474 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
225
- if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
226
- keys.authkeylen, &istate, &ostate))
227
- goto badkey;
475
+ alg = "safexcel-sha384";
228476 break;
229477 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
230
- if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
231
- keys.authkeylen, &istate, &ostate))
232
- goto badkey;
478
+ alg = "safexcel-sha512";
479
+ break;
480
+ case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
481
+ alg = "safexcel-sm3";
233482 break;
234483 default:
235484 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
236485 goto badkey;
237486 }
238487
239
- crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
240
- CRYPTO_TFM_RES_MASK);
241
-
242
- if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
243
- (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
244
- memcmp(ctx->opad, ostate.state, ctx->state_sz)))
245
- ctx->base.needs_inv = true;
488
+ if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
489
+ alg, ctx->state_sz))
490
+ goto badkey;
246491
247492 /* Now copy the keys into the context */
248
- memcpy(ctx->key, keys.enckey, keys.enckeylen);
493
+ for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
494
+ ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
249495 ctx->key_len = keys.enckeylen;
250
-
251
- memcpy(ctx->ipad, &istate.state, ctx->state_sz);
252
- memcpy(ctx->opad, &ostate.state, ctx->state_sz);
253496
254497 memzero_explicit(&keys, sizeof(keys));
255498 return 0;
256499
257500 badkey:
258
- crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
259501 memzero_explicit(&keys, sizeof(keys));
260
- return -EINVAL;
502
+ return err;
261503 }
262504
263505 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
....@@ -265,59 +507,99 @@
265507 struct safexcel_cipher_req *sreq,
266508 struct safexcel_command_desc *cdesc)
267509 {
268
- struct safexcel_crypto_priv *priv = ctx->priv;
269
- int ctrl_size;
510
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
511
+ int ctrl_size = ctx->key_len / sizeof(u32);
512
+
513
+ cdesc->control_data.control1 = ctx->mode;
270514
271515 if (ctx->aead) {
272
- if (sreq->direction == SAFEXCEL_ENCRYPT)
273
- cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
274
- else
275
- cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
276
- } else {
277
- cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_CRYPTO_OUT;
516
+ /* Take in account the ipad+opad digests */
517
+ if (ctx->xcm) {
518
+ ctrl_size += ctx->state_sz / sizeof(u32);
519
+ cdesc->control_data.control0 =
520
+ CONTEXT_CONTROL_KEY_EN |
521
+ CONTEXT_CONTROL_DIGEST_XCM |
522
+ ctx->hash_alg |
523
+ CONTEXT_CONTROL_SIZE(ctrl_size);
524
+ } else if (ctx->alg == SAFEXCEL_CHACHA20) {
525
+ /* Chacha20-Poly1305 */
526
+ cdesc->control_data.control0 =
527
+ CONTEXT_CONTROL_KEY_EN |
528
+ CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
529
+ (sreq->direction == SAFEXCEL_ENCRYPT ?
530
+ CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
531
+ CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
532
+ ctx->hash_alg |
533
+ CONTEXT_CONTROL_SIZE(ctrl_size);
534
+ return 0;
535
+ } else {
536
+ ctrl_size += ctx->state_sz / sizeof(u32) * 2;
537
+ cdesc->control_data.control0 =
538
+ CONTEXT_CONTROL_KEY_EN |
539
+ CONTEXT_CONTROL_DIGEST_HMAC |
540
+ ctx->hash_alg |
541
+ CONTEXT_CONTROL_SIZE(ctrl_size);
542
+ }
278543
279
- /* The decryption control type is a combination of the
280
- * encryption type and CONTEXT_CONTROL_TYPE_NULL_IN, for all
281
- * types.
282
- */
283
- if (sreq->direction == SAFEXCEL_DECRYPT)
284
- cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_NULL_IN;
544
+ if (sreq->direction == SAFEXCEL_ENCRYPT &&
545
+ (ctx->xcm == EIP197_XCM_MODE_CCM ||
546
+ ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
547
+ cdesc->control_data.control0 |=
548
+ CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
549
+ else if (sreq->direction == SAFEXCEL_ENCRYPT)
550
+ cdesc->control_data.control0 |=
551
+ CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
552
+ else if (ctx->xcm == EIP197_XCM_MODE_CCM)
553
+ cdesc->control_data.control0 |=
554
+ CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
555
+ else
556
+ cdesc->control_data.control0 |=
557
+ CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
558
+ } else {
559
+ if (sreq->direction == SAFEXCEL_ENCRYPT)
560
+ cdesc->control_data.control0 =
561
+ CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
562
+ CONTEXT_CONTROL_KEY_EN |
563
+ CONTEXT_CONTROL_SIZE(ctrl_size);
564
+ else
565
+ cdesc->control_data.control0 =
566
+ CONTEXT_CONTROL_TYPE_CRYPTO_IN |
567
+ CONTEXT_CONTROL_KEY_EN |
568
+ CONTEXT_CONTROL_SIZE(ctrl_size);
285569 }
286570
287
- cdesc->control_data.control0 |= CONTEXT_CONTROL_KEY_EN;
288
- cdesc->control_data.control1 |= ctx->mode;
289
-
290
- if (ctx->aead)
291
- cdesc->control_data.control0 |= CONTEXT_CONTROL_DIGEST_HMAC |
292
- ctx->hash_alg;
293
-
294571 if (ctx->alg == SAFEXCEL_DES) {
295
- cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_DES;
572
+ cdesc->control_data.control0 |=
573
+ CONTEXT_CONTROL_CRYPTO_ALG_DES;
296574 } else if (ctx->alg == SAFEXCEL_3DES) {
297
- cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_3DES;
575
+ cdesc->control_data.control0 |=
576
+ CONTEXT_CONTROL_CRYPTO_ALG_3DES;
298577 } else if (ctx->alg == SAFEXCEL_AES) {
299
- switch (ctx->key_len) {
578
+ switch (ctx->key_len >> ctx->xts) {
300579 case AES_KEYSIZE_128:
301
- cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES128;
580
+ cdesc->control_data.control0 |=
581
+ CONTEXT_CONTROL_CRYPTO_ALG_AES128;
302582 break;
303583 case AES_KEYSIZE_192:
304
- cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES192;
584
+ cdesc->control_data.control0 |=
585
+ CONTEXT_CONTROL_CRYPTO_ALG_AES192;
305586 break;
306587 case AES_KEYSIZE_256:
307
- cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES256;
588
+ cdesc->control_data.control0 |=
589
+ CONTEXT_CONTROL_CRYPTO_ALG_AES256;
308590 break;
309591 default:
310592 dev_err(priv->dev, "aes keysize not supported: %u\n",
311
- ctx->key_len);
593
+ ctx->key_len >> ctx->xts);
312594 return -EINVAL;
313595 }
596
+ } else if (ctx->alg == SAFEXCEL_CHACHA20) {
597
+ cdesc->control_data.control0 |=
598
+ CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
599
+ } else if (ctx->alg == SAFEXCEL_SM4) {
600
+ cdesc->control_data.control0 |=
601
+ CONTEXT_CONTROL_CRYPTO_ALG_SM4;
314602 }
315
-
316
- ctrl_size = ctx->key_len / sizeof(u32);
317
- if (ctx->aead)
318
- /* Take in account the ipad+opad digests */
319
- ctrl_size += ctx->state_sz / sizeof(u32) * 2;
320
- cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(ctrl_size);
321603
322604 return 0;
323605 }
....@@ -330,6 +612,9 @@
330612 struct safexcel_cipher_req *sreq,
331613 bool *should_complete, int *ret)
332614 {
615
+ struct skcipher_request *areq = skcipher_request_cast(async);
616
+ struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
617
+ struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
333618 struct safexcel_result_desc *rdesc;
334619 int ndesc = 0;
335620
....@@ -356,16 +641,22 @@
356641 safexcel_complete(priv, ring);
357642
358643 if (src == dst) {
359
- dma_unmap_sg(priv->dev, src,
360
- sg_nents_for_len(src, cryptlen),
361
- DMA_BIDIRECTIONAL);
644
+ dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
362645 } else {
363
- dma_unmap_sg(priv->dev, src,
364
- sg_nents_for_len(src, cryptlen),
365
- DMA_TO_DEVICE);
366
- dma_unmap_sg(priv->dev, dst,
367
- sg_nents_for_len(dst, cryptlen),
368
- DMA_FROM_DEVICE);
646
+ dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
647
+ dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
648
+ }
649
+
650
+ /*
651
+ * Update IV in req from last crypto output word for CBC modes
652
+ */
653
+ if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
654
+ (sreq->direction == SAFEXCEL_ENCRYPT)) {
655
+ /* For encrypt take the last output word */
656
+ sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
657
+ crypto_skcipher_ivsize(skcipher),
658
+ (cryptlen -
659
+ crypto_skcipher_ivsize(skcipher)));
369660 }
370661
371662 *should_complete = true;
....@@ -380,98 +671,203 @@
380671 unsigned int digestsize, u8 *iv, int *commands,
381672 int *results)
382673 {
674
+ struct skcipher_request *areq = skcipher_request_cast(base);
675
+ struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
383676 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
384
- struct safexcel_crypto_priv *priv = ctx->priv;
677
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
385678 struct safexcel_command_desc *cdesc;
679
+ struct safexcel_command_desc *first_cdesc = NULL;
386680 struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
387681 struct scatterlist *sg;
388
- unsigned int totlen = cryptlen + assoclen;
389
- int nr_src, nr_dst, n_cdesc = 0, n_rdesc = 0, queued = totlen;
390
- int i, ret = 0;
682
+ unsigned int totlen;
683
+ unsigned int totlen_src = cryptlen + assoclen;
684
+ unsigned int totlen_dst = totlen_src;
685
+ struct safexcel_token *atoken;
686
+ int n_cdesc = 0, n_rdesc = 0;
687
+ int queued, i, ret = 0;
688
+ bool first = true;
689
+
690
+ sreq->nr_src = sg_nents_for_len(src, totlen_src);
691
+
692
+ if (ctx->aead) {
693
+ /*
694
+ * AEAD has auth tag appended to output for encrypt and
695
+ * removed from the output for decrypt!
696
+ */
697
+ if (sreq->direction == SAFEXCEL_DECRYPT)
698
+ totlen_dst -= digestsize;
699
+ else
700
+ totlen_dst += digestsize;
701
+
702
+ memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
703
+ &ctx->base.ipad, ctx->state_sz);
704
+ if (!ctx->xcm)
705
+ memcpy(ctx->base.ctxr->data + (ctx->key_len +
706
+ ctx->state_sz) / sizeof(u32), &ctx->base.opad,
707
+ ctx->state_sz);
708
+ } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
709
+ (sreq->direction == SAFEXCEL_DECRYPT)) {
710
+ /*
711
+ * Save IV from last crypto input word for CBC modes in decrypt
712
+ * direction. Need to do this first in case of inplace operation
713
+ * as it will be overwritten.
714
+ */
715
+ sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
716
+ crypto_skcipher_ivsize(skcipher),
717
+ (totlen_src -
718
+ crypto_skcipher_ivsize(skcipher)));
719
+ }
720
+
721
+ sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
722
+
723
+ /*
724
+ * Remember actual input length, source buffer length may be
725
+ * updated in case of inline operation below.
726
+ */
727
+ totlen = totlen_src;
728
+ queued = totlen_src;
391729
392730 if (src == dst) {
393
- nr_src = dma_map_sg(priv->dev, src,
394
- sg_nents_for_len(src, totlen),
395
- DMA_BIDIRECTIONAL);
396
- nr_dst = nr_src;
397
- if (!nr_src)
731
+ sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
732
+ sreq->nr_dst = sreq->nr_src;
733
+ if (unlikely((totlen_src || totlen_dst) &&
734
+ (sreq->nr_src <= 0))) {
735
+ dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
736
+ max(totlen_src, totlen_dst));
398737 return -EINVAL;
738
+ }
739
+ dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
399740 } else {
400
- nr_src = dma_map_sg(priv->dev, src,
401
- sg_nents_for_len(src, totlen),
402
- DMA_TO_DEVICE);
403
- if (!nr_src)
741
+ if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
742
+ dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
743
+ totlen_src);
404744 return -EINVAL;
745
+ }
746
+ dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
405747
406
- nr_dst = dma_map_sg(priv->dev, dst,
407
- sg_nents_for_len(dst, totlen),
408
- DMA_FROM_DEVICE);
409
- if (!nr_dst) {
410
- dma_unmap_sg(priv->dev, src,
411
- sg_nents_for_len(src, totlen),
748
+ if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
749
+ dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
750
+ totlen_dst);
751
+ dma_unmap_sg(priv->dev, src, sreq->nr_src,
412752 DMA_TO_DEVICE);
413753 return -EINVAL;
414754 }
755
+ dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
415756 }
416757
417758 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
418759
419
- if (ctx->aead) {
420
- memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
421
- ctx->ipad, ctx->state_sz);
422
- memcpy(ctx->base.ctxr->data + (ctx->key_len + ctx->state_sz) / sizeof(u32),
423
- ctx->opad, ctx->state_sz);
760
+ if (!totlen) {
761
+ /*
762
+ * The EIP97 cannot deal with zero length input packets!
763
+ * So stuff a dummy command descriptor indicating a 1 byte
764
+ * (dummy) input packet, using the context record as source.
765
+ */
766
+ first_cdesc = safexcel_add_cdesc(priv, ring,
767
+ 1, 1, ctx->base.ctxr_dma,
768
+ 1, 1, ctx->base.ctxr_dma,
769
+ &atoken);
770
+ if (IS_ERR(first_cdesc)) {
771
+ /* No space left in the command descriptor ring */
772
+ ret = PTR_ERR(first_cdesc);
773
+ goto cdesc_rollback;
774
+ }
775
+ n_cdesc = 1;
776
+ goto skip_cdesc;
424777 }
425778
426779 /* command descriptors */
427
- for_each_sg(src, sg, nr_src, i) {
780
+ for_each_sg(src, sg, sreq->nr_src, i) {
428781 int len = sg_dma_len(sg);
429782
430783 /* Do not overflow the request */
431
- if (queued - len < 0)
784
+ if (queued < len)
432785 len = queued;
433786
434
- cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc, !(queued - len),
787
+ cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
788
+ !(queued - len),
435789 sg_dma_address(sg), len, totlen,
436
- ctx->base.ctxr_dma);
790
+ ctx->base.ctxr_dma, &atoken);
437791 if (IS_ERR(cdesc)) {
438792 /* No space left in the command descriptor ring */
439793 ret = PTR_ERR(cdesc);
440794 goto cdesc_rollback;
441795 }
796
+
797
+ if (!n_cdesc)
798
+ first_cdesc = cdesc;
799
+
442800 n_cdesc++;
443
-
444
- if (n_cdesc == 1) {
445
- safexcel_context_control(ctx, base, sreq, cdesc);
446
- if (ctx->aead)
447
- safexcel_aead_token(ctx, iv, cdesc,
448
- sreq->direction, cryptlen,
449
- assoclen, digestsize);
450
- else
451
- safexcel_skcipher_token(ctx, iv, cdesc,
452
- cryptlen);
453
- }
454
-
455801 queued -= len;
456802 if (!queued)
457803 break;
458804 }
805
+skip_cdesc:
806
+ /* Add context control words and token to first command descriptor */
807
+ safexcel_context_control(ctx, base, sreq, first_cdesc);
808
+ if (ctx->aead)
809
+ safexcel_aead_token(ctx, iv, first_cdesc, atoken,
810
+ sreq->direction, cryptlen,
811
+ assoclen, digestsize);
812
+ else
813
+ safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
814
+ cryptlen);
459815
460816 /* result descriptors */
461
- for_each_sg(dst, sg, nr_dst, i) {
462
- bool first = !i, last = (i == nr_dst - 1);
817
+ for_each_sg(dst, sg, sreq->nr_dst, i) {
818
+ bool last = (i == sreq->nr_dst - 1);
463819 u32 len = sg_dma_len(sg);
464820
465
- rdesc = safexcel_add_rdesc(priv, ring, first, last,
466
- sg_dma_address(sg), len);
821
+ /* only allow the part of the buffer we know we need */
822
+ if (len > totlen_dst)
823
+ len = totlen_dst;
824
+ if (unlikely(!len))
825
+ break;
826
+ totlen_dst -= len;
827
+
828
+ /* skip over AAD space in buffer - not written */
829
+ if (assoclen) {
830
+ if (assoclen >= len) {
831
+ assoclen -= len;
832
+ continue;
833
+ }
834
+ rdesc = safexcel_add_rdesc(priv, ring, first, last,
835
+ sg_dma_address(sg) +
836
+ assoclen,
837
+ len - assoclen);
838
+ assoclen = 0;
839
+ } else {
840
+ rdesc = safexcel_add_rdesc(priv, ring, first, last,
841
+ sg_dma_address(sg),
842
+ len);
843
+ }
467844 if (IS_ERR(rdesc)) {
468845 /* No space left in the result descriptor ring */
469846 ret = PTR_ERR(rdesc);
470847 goto rdesc_rollback;
471848 }
472
- if (first)
849
+ if (first) {
473850 first_rdesc = rdesc;
851
+ first = false;
852
+ }
474853 n_rdesc++;
854
+ }
855
+
856
+ if (unlikely(first)) {
857
+ /*
858
+ * Special case: AEAD decrypt with only AAD data.
859
+ * In this case there is NO output data from the engine,
860
+ * but the engine still needs a result descriptor!
861
+ * Create a dummy one just for catching the result token.
862
+ */
863
+ rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
864
+ if (IS_ERR(rdesc)) {
865
+ /* No space left in the result descriptor ring */
866
+ ret = PTR_ERR(rdesc);
867
+ goto rdesc_rollback;
868
+ }
869
+ first_rdesc = rdesc;
870
+ n_rdesc = 1;
475871 }
476872
477873 safexcel_rdr_req_set(priv, ring, first_rdesc, base);
....@@ -488,16 +884,10 @@
488884 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
489885
490886 if (src == dst) {
491
- dma_unmap_sg(priv->dev, src,
492
- sg_nents_for_len(src, totlen),
493
- DMA_BIDIRECTIONAL);
887
+ dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
494888 } else {
495
- dma_unmap_sg(priv->dev, src,
496
- sg_nents_for_len(src, totlen),
497
- DMA_TO_DEVICE);
498
- dma_unmap_sg(priv->dev, dst,
499
- sg_nents_for_len(dst, totlen),
500
- DMA_FROM_DEVICE);
889
+ dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
890
+ dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
501891 }
502892
503893 return ret;
....@@ -612,7 +1002,7 @@
6121002 int ring, int *commands, int *results)
6131003 {
6141004 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
615
- struct safexcel_crypto_priv *priv = ctx->priv;
1005
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
6161006 int ret;
6171007
6181008 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
....@@ -631,17 +1021,27 @@
6311021 struct skcipher_request *req = skcipher_request_cast(async);
6321022 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
6331023 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
634
- struct safexcel_crypto_priv *priv = ctx->priv;
1024
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
6351025 int ret;
6361026
6371027 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
6381028
639
- if (sreq->needs_inv)
1029
+ if (sreq->needs_inv) {
6401030 ret = safexcel_cipher_send_inv(async, ring, commands, results);
641
- else
1031
+ } else {
1032
+ struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1033
+ u8 input_iv[AES_BLOCK_SIZE];
1034
+
1035
+ /*
1036
+ * Save input IV in case of CBC decrypt mode
1037
+ * Will be overwritten with output IV prior to use!
1038
+ */
1039
+ memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1040
+
6421041 ret = safexcel_send_req(async, ring, sreq, req->src,
643
- req->dst, req->cryptlen, 0, 0, req->iv,
1042
+ req->dst, req->cryptlen, 0, 0, input_iv,
6441043 commands, results);
1044
+ }
6451045
6461046 sreq->rdescs = *results;
6471047 return ret;
....@@ -654,7 +1054,7 @@
6541054 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
6551055 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
6561056 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
657
- struct safexcel_crypto_priv *priv = ctx->priv;
1057
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
6581058 int ret;
6591059
6601060 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
....@@ -676,7 +1076,7 @@
6761076 struct safexcel_inv_result *result)
6771077 {
6781078 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
679
- struct safexcel_crypto_priv *priv = ctx->priv;
1079
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
6801080 int ring = ctx->base.ring;
6811081
6821082 init_completion(&result->completion);
....@@ -736,17 +1136,14 @@
7361136
7371137 static int safexcel_queue_req(struct crypto_async_request *base,
7381138 struct safexcel_cipher_req *sreq,
739
- enum safexcel_cipher_direction dir, u32 mode,
740
- enum safexcel_cipher_alg alg)
1139
+ enum safexcel_cipher_direction dir)
7411140 {
7421141 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
743
- struct safexcel_crypto_priv *priv = ctx->priv;
1142
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
7441143 int ret, ring;
7451144
7461145 sreq->needs_inv = false;
7471146 sreq->direction = dir;
748
- ctx->alg = alg;
749
- ctx->mode = mode;
7501147
7511148 if (ctx->base.ctxr) {
7521149 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
....@@ -774,18 +1171,16 @@
7741171 return ret;
7751172 }
7761173
777
-static int safexcel_ecb_aes_encrypt(struct skcipher_request *req)
1174
+static int safexcel_encrypt(struct skcipher_request *req)
7781175 {
7791176 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
780
- SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
781
- SAFEXCEL_AES);
1177
+ SAFEXCEL_ENCRYPT);
7821178 }
7831179
784
-static int safexcel_ecb_aes_decrypt(struct skcipher_request *req)
1180
+static int safexcel_decrypt(struct skcipher_request *req)
7851181 {
7861182 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
787
- SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
788
- SAFEXCEL_AES);
1183
+ SAFEXCEL_DECRYPT);
7891184 }
7901185
7911186 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
....@@ -798,10 +1193,12 @@
7981193 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
7991194 sizeof(struct safexcel_cipher_req));
8001195
801
- ctx->priv = tmpl->priv;
1196
+ ctx->base.priv = tmpl->priv;
8021197
8031198 ctx->base.send = safexcel_skcipher_send;
8041199 ctx->base.handle_result = safexcel_skcipher_handle_result;
1200
+ ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1201
+ ctx->ctrinit = 1;
8051202 return 0;
8061203 }
8071204
....@@ -822,7 +1219,7 @@
8221219 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
8231220 {
8241221 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
825
- struct safexcel_crypto_priv *priv = ctx->priv;
1222
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
8261223 int ret;
8271224
8281225 if (safexcel_cipher_cra_exit(tfm))
....@@ -842,7 +1239,7 @@
8421239 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
8431240 {
8441241 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
845
- struct safexcel_crypto_priv *priv = ctx->priv;
1242
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
8461243 int ret;
8471244
8481245 if (safexcel_cipher_cra_exit(tfm))
....@@ -859,106 +1256,246 @@
8591256 }
8601257 }
8611258
1259
+static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1260
+{
1261
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1262
+
1263
+ safexcel_skcipher_cra_init(tfm);
1264
+ ctx->alg = SAFEXCEL_AES;
1265
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1266
+ ctx->blocksz = 0;
1267
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1268
+ return 0;
1269
+}
1270
+
8621271 struct safexcel_alg_template safexcel_alg_ecb_aes = {
8631272 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
864
- .engines = EIP97IES | EIP197B | EIP197D,
1273
+ .algo_mask = SAFEXCEL_ALG_AES,
8651274 .alg.skcipher = {
8661275 .setkey = safexcel_skcipher_aes_setkey,
867
- .encrypt = safexcel_ecb_aes_encrypt,
868
- .decrypt = safexcel_ecb_aes_decrypt,
1276
+ .encrypt = safexcel_encrypt,
1277
+ .decrypt = safexcel_decrypt,
8691278 .min_keysize = AES_MIN_KEY_SIZE,
8701279 .max_keysize = AES_MAX_KEY_SIZE,
8711280 .base = {
8721281 .cra_name = "ecb(aes)",
8731282 .cra_driver_name = "safexcel-ecb-aes",
874
- .cra_priority = 300,
1283
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
8751284 .cra_flags = CRYPTO_ALG_ASYNC |
1285
+ CRYPTO_ALG_ALLOCATES_MEMORY |
8761286 CRYPTO_ALG_KERN_DRIVER_ONLY,
8771287 .cra_blocksize = AES_BLOCK_SIZE,
8781288 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
8791289 .cra_alignmask = 0,
880
- .cra_init = safexcel_skcipher_cra_init,
1290
+ .cra_init = safexcel_skcipher_aes_ecb_cra_init,
8811291 .cra_exit = safexcel_skcipher_cra_exit,
8821292 .cra_module = THIS_MODULE,
8831293 },
8841294 },
8851295 };
8861296
887
-static int safexcel_cbc_aes_encrypt(struct skcipher_request *req)
1297
+static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
8881298 {
889
- return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
890
- SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
891
- SAFEXCEL_AES);
892
-}
1299
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
8931300
894
-static int safexcel_cbc_aes_decrypt(struct skcipher_request *req)
895
-{
896
- return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
897
- SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
898
- SAFEXCEL_AES);
1301
+ safexcel_skcipher_cra_init(tfm);
1302
+ ctx->alg = SAFEXCEL_AES;
1303
+ ctx->blocksz = AES_BLOCK_SIZE;
1304
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1305
+ return 0;
8991306 }
9001307
9011308 struct safexcel_alg_template safexcel_alg_cbc_aes = {
9021309 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
903
- .engines = EIP97IES | EIP197B | EIP197D,
1310
+ .algo_mask = SAFEXCEL_ALG_AES,
9041311 .alg.skcipher = {
9051312 .setkey = safexcel_skcipher_aes_setkey,
906
- .encrypt = safexcel_cbc_aes_encrypt,
907
- .decrypt = safexcel_cbc_aes_decrypt,
1313
+ .encrypt = safexcel_encrypt,
1314
+ .decrypt = safexcel_decrypt,
9081315 .min_keysize = AES_MIN_KEY_SIZE,
9091316 .max_keysize = AES_MAX_KEY_SIZE,
9101317 .ivsize = AES_BLOCK_SIZE,
9111318 .base = {
9121319 .cra_name = "cbc(aes)",
9131320 .cra_driver_name = "safexcel-cbc-aes",
914
- .cra_priority = 300,
1321
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
9151322 .cra_flags = CRYPTO_ALG_ASYNC |
1323
+ CRYPTO_ALG_ALLOCATES_MEMORY |
9161324 CRYPTO_ALG_KERN_DRIVER_ONLY,
9171325 .cra_blocksize = AES_BLOCK_SIZE,
9181326 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
9191327 .cra_alignmask = 0,
920
- .cra_init = safexcel_skcipher_cra_init,
1328
+ .cra_init = safexcel_skcipher_aes_cbc_cra_init,
9211329 .cra_exit = safexcel_skcipher_cra_exit,
9221330 .cra_module = THIS_MODULE,
9231331 },
9241332 },
9251333 };
9261334
927
-static int safexcel_cbc_des_encrypt(struct skcipher_request *req)
1335
+static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
9281336 {
929
- return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
930
- SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
931
- SAFEXCEL_DES);
1337
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1338
+
1339
+ safexcel_skcipher_cra_init(tfm);
1340
+ ctx->alg = SAFEXCEL_AES;
1341
+ ctx->blocksz = AES_BLOCK_SIZE;
1342
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1343
+ return 0;
9321344 }
9331345
934
-static int safexcel_cbc_des_decrypt(struct skcipher_request *req)
1346
+struct safexcel_alg_template safexcel_alg_cfb_aes = {
1347
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1348
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1349
+ .alg.skcipher = {
1350
+ .setkey = safexcel_skcipher_aes_setkey,
1351
+ .encrypt = safexcel_encrypt,
1352
+ .decrypt = safexcel_decrypt,
1353
+ .min_keysize = AES_MIN_KEY_SIZE,
1354
+ .max_keysize = AES_MAX_KEY_SIZE,
1355
+ .ivsize = AES_BLOCK_SIZE,
1356
+ .base = {
1357
+ .cra_name = "cfb(aes)",
1358
+ .cra_driver_name = "safexcel-cfb-aes",
1359
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
1360
+ .cra_flags = CRYPTO_ALG_ASYNC |
1361
+ CRYPTO_ALG_ALLOCATES_MEMORY |
1362
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
1363
+ .cra_blocksize = 1,
1364
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1365
+ .cra_alignmask = 0,
1366
+ .cra_init = safexcel_skcipher_aes_cfb_cra_init,
1367
+ .cra_exit = safexcel_skcipher_cra_exit,
1368
+ .cra_module = THIS_MODULE,
1369
+ },
1370
+ },
1371
+};
1372
+
1373
+static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
9351374 {
936
- return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
937
- SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
938
- SAFEXCEL_DES);
1375
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1376
+
1377
+ safexcel_skcipher_cra_init(tfm);
1378
+ ctx->alg = SAFEXCEL_AES;
1379
+ ctx->blocksz = AES_BLOCK_SIZE;
1380
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1381
+ return 0;
9391382 }
1383
+
1384
+struct safexcel_alg_template safexcel_alg_ofb_aes = {
1385
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1386
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1387
+ .alg.skcipher = {
1388
+ .setkey = safexcel_skcipher_aes_setkey,
1389
+ .encrypt = safexcel_encrypt,
1390
+ .decrypt = safexcel_decrypt,
1391
+ .min_keysize = AES_MIN_KEY_SIZE,
1392
+ .max_keysize = AES_MAX_KEY_SIZE,
1393
+ .ivsize = AES_BLOCK_SIZE,
1394
+ .base = {
1395
+ .cra_name = "ofb(aes)",
1396
+ .cra_driver_name = "safexcel-ofb-aes",
1397
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
1398
+ .cra_flags = CRYPTO_ALG_ASYNC |
1399
+ CRYPTO_ALG_ALLOCATES_MEMORY |
1400
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
1401
+ .cra_blocksize = 1,
1402
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1403
+ .cra_alignmask = 0,
1404
+ .cra_init = safexcel_skcipher_aes_ofb_cra_init,
1405
+ .cra_exit = safexcel_skcipher_cra_exit,
1406
+ .cra_module = THIS_MODULE,
1407
+ },
1408
+ },
1409
+};
1410
+
1411
+static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1412
+ const u8 *key, unsigned int len)
1413
+{
1414
+ struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1415
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1416
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
1417
+ struct crypto_aes_ctx aes;
1418
+ int ret, i;
1419
+ unsigned int keylen;
1420
+
1421
+ /* last 4 bytes of key are the nonce! */
1422
+ ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1423
+ /* exclude the nonce here */
1424
+ keylen = len - CTR_RFC3686_NONCE_SIZE;
1425
+ ret = aes_expandkey(&aes, key, keylen);
1426
+ if (ret)
1427
+ return ret;
1428
+
1429
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1430
+ for (i = 0; i < keylen / sizeof(u32); i++) {
1431
+ if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1432
+ ctx->base.needs_inv = true;
1433
+ break;
1434
+ }
1435
+ }
1436
+ }
1437
+
1438
+ for (i = 0; i < keylen / sizeof(u32); i++)
1439
+ ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1440
+
1441
+ ctx->key_len = keylen;
1442
+
1443
+ memzero_explicit(&aes, sizeof(aes));
1444
+ return 0;
1445
+}
1446
+
1447
+static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1448
+{
1449
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1450
+
1451
+ safexcel_skcipher_cra_init(tfm);
1452
+ ctx->alg = SAFEXCEL_AES;
1453
+ ctx->blocksz = AES_BLOCK_SIZE;
1454
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1455
+ return 0;
1456
+}
1457
+
1458
+struct safexcel_alg_template safexcel_alg_ctr_aes = {
1459
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1460
+ .algo_mask = SAFEXCEL_ALG_AES,
1461
+ .alg.skcipher = {
1462
+ .setkey = safexcel_skcipher_aesctr_setkey,
1463
+ .encrypt = safexcel_encrypt,
1464
+ .decrypt = safexcel_decrypt,
1465
+ /* Add nonce size */
1466
+ .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1467
+ .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1468
+ .ivsize = CTR_RFC3686_IV_SIZE,
1469
+ .base = {
1470
+ .cra_name = "rfc3686(ctr(aes))",
1471
+ .cra_driver_name = "safexcel-ctr-aes",
1472
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
1473
+ .cra_flags = CRYPTO_ALG_ASYNC |
1474
+ CRYPTO_ALG_ALLOCATES_MEMORY |
1475
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
1476
+ .cra_blocksize = 1,
1477
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1478
+ .cra_alignmask = 0,
1479
+ .cra_init = safexcel_skcipher_aes_ctr_cra_init,
1480
+ .cra_exit = safexcel_skcipher_cra_exit,
1481
+ .cra_module = THIS_MODULE,
1482
+ },
1483
+ },
1484
+};
9401485
9411486 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
9421487 unsigned int len)
9431488 {
944
- struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
945
- struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
946
- u32 tmp[DES_EXPKEY_WORDS];
1489
+ struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1490
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
9471491 int ret;
9481492
949
- if (len != DES_KEY_SIZE) {
950
- crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
951
- return -EINVAL;
952
- }
953
-
954
- ret = des_ekey(tmp, key);
955
- if (!ret && (tfm->crt_flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
956
- tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
957
- return -EINVAL;
958
- }
1493
+ ret = verify_skcipher_des_key(ctfm, key);
1494
+ if (ret)
1495
+ return ret;
9591496
9601497 /* if context exits and key changed, need to invalidate it */
961
- if (ctx->base.ctxr_dma)
1498
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
9621499 if (memcmp(ctx->key, key, len))
9631500 ctx->base.needs_inv = true;
9641501
....@@ -968,170 +1505,176 @@
9681505 return 0;
9691506 }
9701507
1508
+static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1509
+{
1510
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1511
+
1512
+ safexcel_skcipher_cra_init(tfm);
1513
+ ctx->alg = SAFEXCEL_DES;
1514
+ ctx->blocksz = DES_BLOCK_SIZE;
1515
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1516
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1517
+ return 0;
1518
+}
1519
+
9711520 struct safexcel_alg_template safexcel_alg_cbc_des = {
9721521 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
973
- .engines = EIP97IES | EIP197B | EIP197D,
1522
+ .algo_mask = SAFEXCEL_ALG_DES,
9741523 .alg.skcipher = {
9751524 .setkey = safexcel_des_setkey,
976
- .encrypt = safexcel_cbc_des_encrypt,
977
- .decrypt = safexcel_cbc_des_decrypt,
1525
+ .encrypt = safexcel_encrypt,
1526
+ .decrypt = safexcel_decrypt,
9781527 .min_keysize = DES_KEY_SIZE,
9791528 .max_keysize = DES_KEY_SIZE,
9801529 .ivsize = DES_BLOCK_SIZE,
9811530 .base = {
9821531 .cra_name = "cbc(des)",
9831532 .cra_driver_name = "safexcel-cbc-des",
984
- .cra_priority = 300,
985
- .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
1533
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
1534
+ .cra_flags = CRYPTO_ALG_ASYNC |
1535
+ CRYPTO_ALG_ALLOCATES_MEMORY |
9861536 CRYPTO_ALG_KERN_DRIVER_ONLY,
9871537 .cra_blocksize = DES_BLOCK_SIZE,
9881538 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
9891539 .cra_alignmask = 0,
990
- .cra_init = safexcel_skcipher_cra_init,
1540
+ .cra_init = safexcel_skcipher_des_cbc_cra_init,
9911541 .cra_exit = safexcel_skcipher_cra_exit,
9921542 .cra_module = THIS_MODULE,
9931543 },
9941544 },
9951545 };
9961546
997
-static int safexcel_ecb_des_encrypt(struct skcipher_request *req)
1547
+static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
9981548 {
999
- return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1000
- SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1001
- SAFEXCEL_DES);
1002
-}
1549
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
10031550
1004
-static int safexcel_ecb_des_decrypt(struct skcipher_request *req)
1005
-{
1006
- return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1007
- SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1008
- SAFEXCEL_DES);
1551
+ safexcel_skcipher_cra_init(tfm);
1552
+ ctx->alg = SAFEXCEL_DES;
1553
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1554
+ ctx->blocksz = 0;
1555
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1556
+ return 0;
10091557 }
10101558
10111559 struct safexcel_alg_template safexcel_alg_ecb_des = {
10121560 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1013
- .engines = EIP97IES | EIP197B | EIP197D,
1561
+ .algo_mask = SAFEXCEL_ALG_DES,
10141562 .alg.skcipher = {
10151563 .setkey = safexcel_des_setkey,
1016
- .encrypt = safexcel_ecb_des_encrypt,
1017
- .decrypt = safexcel_ecb_des_decrypt,
1564
+ .encrypt = safexcel_encrypt,
1565
+ .decrypt = safexcel_decrypt,
10181566 .min_keysize = DES_KEY_SIZE,
10191567 .max_keysize = DES_KEY_SIZE,
1020
- .ivsize = DES_BLOCK_SIZE,
10211568 .base = {
10221569 .cra_name = "ecb(des)",
10231570 .cra_driver_name = "safexcel-ecb-des",
1024
- .cra_priority = 300,
1025
- .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
1571
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
1572
+ .cra_flags = CRYPTO_ALG_ASYNC |
1573
+ CRYPTO_ALG_ALLOCATES_MEMORY |
10261574 CRYPTO_ALG_KERN_DRIVER_ONLY,
10271575 .cra_blocksize = DES_BLOCK_SIZE,
10281576 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
10291577 .cra_alignmask = 0,
1030
- .cra_init = safexcel_skcipher_cra_init,
1578
+ .cra_init = safexcel_skcipher_des_ecb_cra_init,
10311579 .cra_exit = safexcel_skcipher_cra_exit,
10321580 .cra_module = THIS_MODULE,
10331581 },
10341582 },
10351583 };
10361584
1037
-static int safexcel_cbc_des3_ede_encrypt(struct skcipher_request *req)
1038
-{
1039
- return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1040
- SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
1041
- SAFEXCEL_3DES);
1042
-}
1043
-
1044
-static int safexcel_cbc_des3_ede_decrypt(struct skcipher_request *req)
1045
-{
1046
- return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1047
- SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
1048
- SAFEXCEL_3DES);
1049
-}
1050
-
10511585 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
10521586 const u8 *key, unsigned int len)
10531587 {
1054
- struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1055
- struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1588
+ struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1589
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
1590
+ int err;
10561591
1057
- if (len != DES3_EDE_KEY_SIZE) {
1058
- crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1059
- return -EINVAL;
1060
- }
1592
+ err = verify_skcipher_des3_key(ctfm, key);
1593
+ if (err)
1594
+ return err;
10611595
10621596 /* if context exits and key changed, need to invalidate it */
1063
- if (ctx->base.ctxr_dma) {
1597
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
10641598 if (memcmp(ctx->key, key, len))
10651599 ctx->base.needs_inv = true;
1066
- }
10671600
10681601 memcpy(ctx->key, key, len);
1069
-
10701602 ctx->key_len = len;
10711603
10721604 return 0;
10731605 }
10741606
1607
+static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1608
+{
1609
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1610
+
1611
+ safexcel_skcipher_cra_init(tfm);
1612
+ ctx->alg = SAFEXCEL_3DES;
1613
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1614
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1615
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1616
+ return 0;
1617
+}
1618
+
10751619 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
10761620 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1077
- .engines = EIP97IES | EIP197B | EIP197D,
1621
+ .algo_mask = SAFEXCEL_ALG_DES,
10781622 .alg.skcipher = {
10791623 .setkey = safexcel_des3_ede_setkey,
1080
- .encrypt = safexcel_cbc_des3_ede_encrypt,
1081
- .decrypt = safexcel_cbc_des3_ede_decrypt,
1624
+ .encrypt = safexcel_encrypt,
1625
+ .decrypt = safexcel_decrypt,
10821626 .min_keysize = DES3_EDE_KEY_SIZE,
10831627 .max_keysize = DES3_EDE_KEY_SIZE,
10841628 .ivsize = DES3_EDE_BLOCK_SIZE,
10851629 .base = {
10861630 .cra_name = "cbc(des3_ede)",
10871631 .cra_driver_name = "safexcel-cbc-des3_ede",
1088
- .cra_priority = 300,
1089
- .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
1632
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
1633
+ .cra_flags = CRYPTO_ALG_ASYNC |
1634
+ CRYPTO_ALG_ALLOCATES_MEMORY |
10901635 CRYPTO_ALG_KERN_DRIVER_ONLY,
10911636 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
10921637 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
10931638 .cra_alignmask = 0,
1094
- .cra_init = safexcel_skcipher_cra_init,
1639
+ .cra_init = safexcel_skcipher_des3_cbc_cra_init,
10951640 .cra_exit = safexcel_skcipher_cra_exit,
10961641 .cra_module = THIS_MODULE,
10971642 },
10981643 },
10991644 };
11001645
1101
-static int safexcel_ecb_des3_ede_encrypt(struct skcipher_request *req)
1646
+static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
11021647 {
1103
- return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1104
- SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1105
- SAFEXCEL_3DES);
1106
-}
1648
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
11071649
1108
-static int safexcel_ecb_des3_ede_decrypt(struct skcipher_request *req)
1109
-{
1110
- return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1111
- SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1112
- SAFEXCEL_3DES);
1650
+ safexcel_skcipher_cra_init(tfm);
1651
+ ctx->alg = SAFEXCEL_3DES;
1652
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1653
+ ctx->blocksz = 0;
1654
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1655
+ return 0;
11131656 }
11141657
11151658 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
11161659 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1117
- .engines = EIP97IES | EIP197B | EIP197D,
1660
+ .algo_mask = SAFEXCEL_ALG_DES,
11181661 .alg.skcipher = {
11191662 .setkey = safexcel_des3_ede_setkey,
1120
- .encrypt = safexcel_ecb_des3_ede_encrypt,
1121
- .decrypt = safexcel_ecb_des3_ede_decrypt,
1663
+ .encrypt = safexcel_encrypt,
1664
+ .decrypt = safexcel_decrypt,
11221665 .min_keysize = DES3_EDE_KEY_SIZE,
11231666 .max_keysize = DES3_EDE_KEY_SIZE,
1124
- .ivsize = DES3_EDE_BLOCK_SIZE,
11251667 .base = {
11261668 .cra_name = "ecb(des3_ede)",
11271669 .cra_driver_name = "safexcel-ecb-des3_ede",
1128
- .cra_priority = 300,
1129
- .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
1670
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
1671
+ .cra_flags = CRYPTO_ALG_ASYNC |
1672
+ CRYPTO_ALG_ALLOCATES_MEMORY |
11301673 CRYPTO_ALG_KERN_DRIVER_ONLY,
11311674 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
11321675 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
11331676 .cra_alignmask = 0,
1134
- .cra_init = safexcel_skcipher_cra_init,
1677
+ .cra_init = safexcel_skcipher_des3_ecb_cra_init,
11351678 .cra_exit = safexcel_skcipher_cra_exit,
11361679 .cra_module = THIS_MODULE,
11371680 },
....@@ -1142,16 +1685,14 @@
11421685 {
11431686 struct safexcel_cipher_req *creq = aead_request_ctx(req);
11441687
1145
- return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT,
1146
- CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
1688
+ return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
11471689 }
11481690
11491691 static int safexcel_aead_decrypt(struct aead_request *req)
11501692 {
11511693 struct safexcel_cipher_req *creq = aead_request_ctx(req);
11521694
1153
- return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT,
1154
- CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
1695
+ return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
11551696 }
11561697
11571698 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
....@@ -1164,8 +1705,13 @@
11641705 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
11651706 sizeof(struct safexcel_cipher_req));
11661707
1167
- ctx->priv = tmpl->priv;
1708
+ ctx->base.priv = tmpl->priv;
11681709
1710
+ ctx->alg = SAFEXCEL_AES; /* default */
1711
+ ctx->blocksz = AES_BLOCK_SIZE;
1712
+ ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1713
+ ctx->ctrinit = 1;
1714
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
11691715 ctx->aead = true;
11701716 ctx->base.send = safexcel_aead_send;
11711717 ctx->base.handle_result = safexcel_aead_handle_result;
....@@ -1184,9 +1730,9 @@
11841730
11851731 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
11861732 .type = SAFEXCEL_ALG_TYPE_AEAD,
1187
- .engines = EIP97IES | EIP197B | EIP197D,
1733
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
11881734 .alg.aead = {
1189
- .setkey = safexcel_aead_aes_setkey,
1735
+ .setkey = safexcel_aead_setkey,
11901736 .encrypt = safexcel_aead_encrypt,
11911737 .decrypt = safexcel_aead_decrypt,
11921738 .ivsize = AES_BLOCK_SIZE,
....@@ -1194,8 +1740,9 @@
11941740 .base = {
11951741 .cra_name = "authenc(hmac(sha1),cbc(aes))",
11961742 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1197
- .cra_priority = 300,
1743
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
11981744 .cra_flags = CRYPTO_ALG_ASYNC |
1745
+ CRYPTO_ALG_ALLOCATES_MEMORY |
11991746 CRYPTO_ALG_KERN_DRIVER_ONLY,
12001747 .cra_blocksize = AES_BLOCK_SIZE,
12011748 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
....@@ -1219,9 +1766,9 @@
12191766
12201767 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
12211768 .type = SAFEXCEL_ALG_TYPE_AEAD,
1222
- .engines = EIP97IES | EIP197B | EIP197D,
1769
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
12231770 .alg.aead = {
1224
- .setkey = safexcel_aead_aes_setkey,
1771
+ .setkey = safexcel_aead_setkey,
12251772 .encrypt = safexcel_aead_encrypt,
12261773 .decrypt = safexcel_aead_decrypt,
12271774 .ivsize = AES_BLOCK_SIZE,
....@@ -1229,8 +1776,9 @@
12291776 .base = {
12301777 .cra_name = "authenc(hmac(sha256),cbc(aes))",
12311778 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1232
- .cra_priority = 300,
1779
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
12331780 .cra_flags = CRYPTO_ALG_ASYNC |
1781
+ CRYPTO_ALG_ALLOCATES_MEMORY |
12341782 CRYPTO_ALG_KERN_DRIVER_ONLY,
12351783 .cra_blocksize = AES_BLOCK_SIZE,
12361784 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
....@@ -1254,9 +1802,9 @@
12541802
12551803 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
12561804 .type = SAFEXCEL_ALG_TYPE_AEAD,
1257
- .engines = EIP97IES | EIP197B | EIP197D,
1805
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
12581806 .alg.aead = {
1259
- .setkey = safexcel_aead_aes_setkey,
1807
+ .setkey = safexcel_aead_setkey,
12601808 .encrypt = safexcel_aead_encrypt,
12611809 .decrypt = safexcel_aead_decrypt,
12621810 .ivsize = AES_BLOCK_SIZE,
....@@ -1264,8 +1812,9 @@
12641812 .base = {
12651813 .cra_name = "authenc(hmac(sha224),cbc(aes))",
12661814 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1267
- .cra_priority = 300,
1815
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
12681816 .cra_flags = CRYPTO_ALG_ASYNC |
1817
+ CRYPTO_ALG_ALLOCATES_MEMORY |
12691818 CRYPTO_ALG_KERN_DRIVER_ONLY,
12701819 .cra_blocksize = AES_BLOCK_SIZE,
12711820 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
....@@ -1289,9 +1838,9 @@
12891838
12901839 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
12911840 .type = SAFEXCEL_ALG_TYPE_AEAD,
1292
- .engines = EIP97IES | EIP197B | EIP197D,
1841
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
12931842 .alg.aead = {
1294
- .setkey = safexcel_aead_aes_setkey,
1843
+ .setkey = safexcel_aead_setkey,
12951844 .encrypt = safexcel_aead_encrypt,
12961845 .decrypt = safexcel_aead_decrypt,
12971846 .ivsize = AES_BLOCK_SIZE,
....@@ -1299,8 +1848,9 @@
12991848 .base = {
13001849 .cra_name = "authenc(hmac(sha512),cbc(aes))",
13011850 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1302
- .cra_priority = 300,
1851
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
13031852 .cra_flags = CRYPTO_ALG_ASYNC |
1853
+ CRYPTO_ALG_ALLOCATES_MEMORY |
13041854 CRYPTO_ALG_KERN_DRIVER_ONLY,
13051855 .cra_blocksize = AES_BLOCK_SIZE,
13061856 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
....@@ -1324,9 +1874,9 @@
13241874
13251875 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
13261876 .type = SAFEXCEL_ALG_TYPE_AEAD,
1327
- .engines = EIP97IES | EIP197B | EIP197D,
1877
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
13281878 .alg.aead = {
1329
- .setkey = safexcel_aead_aes_setkey,
1879
+ .setkey = safexcel_aead_setkey,
13301880 .encrypt = safexcel_aead_encrypt,
13311881 .decrypt = safexcel_aead_decrypt,
13321882 .ivsize = AES_BLOCK_SIZE,
....@@ -1334,8 +1884,9 @@
13341884 .base = {
13351885 .cra_name = "authenc(hmac(sha384),cbc(aes))",
13361886 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1337
- .cra_priority = 300,
1887
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
13381888 .cra_flags = CRYPTO_ALG_ASYNC |
1889
+ CRYPTO_ALG_ALLOCATES_MEMORY |
13391890 CRYPTO_ALG_KERN_DRIVER_ONLY,
13401891 .cra_blocksize = AES_BLOCK_SIZE,
13411892 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
....@@ -1346,3 +1897,1858 @@
13461897 },
13471898 },
13481899 };
1900
+
1901
+static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1902
+{
1903
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1904
+
1905
+ safexcel_aead_sha1_cra_init(tfm);
1906
+ ctx->alg = SAFEXCEL_3DES; /* override default */
1907
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1908
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1909
+ return 0;
1910
+}
1911
+
1912
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1913
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
1914
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1915
+ .alg.aead = {
1916
+ .setkey = safexcel_aead_setkey,
1917
+ .encrypt = safexcel_aead_encrypt,
1918
+ .decrypt = safexcel_aead_decrypt,
1919
+ .ivsize = DES3_EDE_BLOCK_SIZE,
1920
+ .maxauthsize = SHA1_DIGEST_SIZE,
1921
+ .base = {
1922
+ .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1923
+ .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1924
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
1925
+ .cra_flags = CRYPTO_ALG_ASYNC |
1926
+ CRYPTO_ALG_ALLOCATES_MEMORY |
1927
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
1928
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1929
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1930
+ .cra_alignmask = 0,
1931
+ .cra_init = safexcel_aead_sha1_des3_cra_init,
1932
+ .cra_exit = safexcel_aead_cra_exit,
1933
+ .cra_module = THIS_MODULE,
1934
+ },
1935
+ },
1936
+};
1937
+
1938
+static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1939
+{
1940
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1941
+
1942
+ safexcel_aead_sha256_cra_init(tfm);
1943
+ ctx->alg = SAFEXCEL_3DES; /* override default */
1944
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1945
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1946
+ return 0;
1947
+}
1948
+
1949
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1950
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
1951
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1952
+ .alg.aead = {
1953
+ .setkey = safexcel_aead_setkey,
1954
+ .encrypt = safexcel_aead_encrypt,
1955
+ .decrypt = safexcel_aead_decrypt,
1956
+ .ivsize = DES3_EDE_BLOCK_SIZE,
1957
+ .maxauthsize = SHA256_DIGEST_SIZE,
1958
+ .base = {
1959
+ .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1960
+ .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1961
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
1962
+ .cra_flags = CRYPTO_ALG_ASYNC |
1963
+ CRYPTO_ALG_ALLOCATES_MEMORY |
1964
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
1965
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1966
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1967
+ .cra_alignmask = 0,
1968
+ .cra_init = safexcel_aead_sha256_des3_cra_init,
1969
+ .cra_exit = safexcel_aead_cra_exit,
1970
+ .cra_module = THIS_MODULE,
1971
+ },
1972
+ },
1973
+};
1974
+
1975
+static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1976
+{
1977
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1978
+
1979
+ safexcel_aead_sha224_cra_init(tfm);
1980
+ ctx->alg = SAFEXCEL_3DES; /* override default */
1981
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1982
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1983
+ return 0;
1984
+}
1985
+
1986
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1987
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
1988
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1989
+ .alg.aead = {
1990
+ .setkey = safexcel_aead_setkey,
1991
+ .encrypt = safexcel_aead_encrypt,
1992
+ .decrypt = safexcel_aead_decrypt,
1993
+ .ivsize = DES3_EDE_BLOCK_SIZE,
1994
+ .maxauthsize = SHA224_DIGEST_SIZE,
1995
+ .base = {
1996
+ .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
1997
+ .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
1998
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
1999
+ .cra_flags = CRYPTO_ALG_ASYNC |
2000
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2001
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2002
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2003
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2004
+ .cra_alignmask = 0,
2005
+ .cra_init = safexcel_aead_sha224_des3_cra_init,
2006
+ .cra_exit = safexcel_aead_cra_exit,
2007
+ .cra_module = THIS_MODULE,
2008
+ },
2009
+ },
2010
+};
2011
+
2012
+static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
2013
+{
2014
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2015
+
2016
+ safexcel_aead_sha512_cra_init(tfm);
2017
+ ctx->alg = SAFEXCEL_3DES; /* override default */
2018
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2019
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2020
+ return 0;
2021
+}
2022
+
2023
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
2024
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
2025
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2026
+ .alg.aead = {
2027
+ .setkey = safexcel_aead_setkey,
2028
+ .encrypt = safexcel_aead_encrypt,
2029
+ .decrypt = safexcel_aead_decrypt,
2030
+ .ivsize = DES3_EDE_BLOCK_SIZE,
2031
+ .maxauthsize = SHA512_DIGEST_SIZE,
2032
+ .base = {
2033
+ .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
2034
+ .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2035
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2036
+ .cra_flags = CRYPTO_ALG_ASYNC |
2037
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2038
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2039
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2040
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2041
+ .cra_alignmask = 0,
2042
+ .cra_init = safexcel_aead_sha512_des3_cra_init,
2043
+ .cra_exit = safexcel_aead_cra_exit,
2044
+ .cra_module = THIS_MODULE,
2045
+ },
2046
+ },
2047
+};
2048
+
2049
+static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
2050
+{
2051
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2052
+
2053
+ safexcel_aead_sha384_cra_init(tfm);
2054
+ ctx->alg = SAFEXCEL_3DES; /* override default */
2055
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2056
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2057
+ return 0;
2058
+}
2059
+
2060
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2061
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
2062
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2063
+ .alg.aead = {
2064
+ .setkey = safexcel_aead_setkey,
2065
+ .encrypt = safexcel_aead_encrypt,
2066
+ .decrypt = safexcel_aead_decrypt,
2067
+ .ivsize = DES3_EDE_BLOCK_SIZE,
2068
+ .maxauthsize = SHA384_DIGEST_SIZE,
2069
+ .base = {
2070
+ .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2071
+ .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2072
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2073
+ .cra_flags = CRYPTO_ALG_ASYNC |
2074
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2075
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2076
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2077
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2078
+ .cra_alignmask = 0,
2079
+ .cra_init = safexcel_aead_sha384_des3_cra_init,
2080
+ .cra_exit = safexcel_aead_cra_exit,
2081
+ .cra_module = THIS_MODULE,
2082
+ },
2083
+ },
2084
+};
2085
+
2086
+static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2087
+{
2088
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2089
+
2090
+ safexcel_aead_sha1_cra_init(tfm);
2091
+ ctx->alg = SAFEXCEL_DES; /* override default */
2092
+ ctx->blocksz = DES_BLOCK_SIZE;
2093
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2094
+ return 0;
2095
+}
2096
+
2097
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2098
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
2099
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2100
+ .alg.aead = {
2101
+ .setkey = safexcel_aead_setkey,
2102
+ .encrypt = safexcel_aead_encrypt,
2103
+ .decrypt = safexcel_aead_decrypt,
2104
+ .ivsize = DES_BLOCK_SIZE,
2105
+ .maxauthsize = SHA1_DIGEST_SIZE,
2106
+ .base = {
2107
+ .cra_name = "authenc(hmac(sha1),cbc(des))",
2108
+ .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2109
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2110
+ .cra_flags = CRYPTO_ALG_ASYNC |
2111
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2112
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2113
+ .cra_blocksize = DES_BLOCK_SIZE,
2114
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2115
+ .cra_alignmask = 0,
2116
+ .cra_init = safexcel_aead_sha1_des_cra_init,
2117
+ .cra_exit = safexcel_aead_cra_exit,
2118
+ .cra_module = THIS_MODULE,
2119
+ },
2120
+ },
2121
+};
2122
+
2123
+static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2124
+{
2125
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2126
+
2127
+ safexcel_aead_sha256_cra_init(tfm);
2128
+ ctx->alg = SAFEXCEL_DES; /* override default */
2129
+ ctx->blocksz = DES_BLOCK_SIZE;
2130
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2131
+ return 0;
2132
+}
2133
+
2134
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2135
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
2136
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2137
+ .alg.aead = {
2138
+ .setkey = safexcel_aead_setkey,
2139
+ .encrypt = safexcel_aead_encrypt,
2140
+ .decrypt = safexcel_aead_decrypt,
2141
+ .ivsize = DES_BLOCK_SIZE,
2142
+ .maxauthsize = SHA256_DIGEST_SIZE,
2143
+ .base = {
2144
+ .cra_name = "authenc(hmac(sha256),cbc(des))",
2145
+ .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2146
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2147
+ .cra_flags = CRYPTO_ALG_ASYNC |
2148
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2149
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2150
+ .cra_blocksize = DES_BLOCK_SIZE,
2151
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2152
+ .cra_alignmask = 0,
2153
+ .cra_init = safexcel_aead_sha256_des_cra_init,
2154
+ .cra_exit = safexcel_aead_cra_exit,
2155
+ .cra_module = THIS_MODULE,
2156
+ },
2157
+ },
2158
+};
2159
+
2160
+static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2161
+{
2162
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2163
+
2164
+ safexcel_aead_sha224_cra_init(tfm);
2165
+ ctx->alg = SAFEXCEL_DES; /* override default */
2166
+ ctx->blocksz = DES_BLOCK_SIZE;
2167
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2168
+ return 0;
2169
+}
2170
+
2171
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2172
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
2173
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2174
+ .alg.aead = {
2175
+ .setkey = safexcel_aead_setkey,
2176
+ .encrypt = safexcel_aead_encrypt,
2177
+ .decrypt = safexcel_aead_decrypt,
2178
+ .ivsize = DES_BLOCK_SIZE,
2179
+ .maxauthsize = SHA224_DIGEST_SIZE,
2180
+ .base = {
2181
+ .cra_name = "authenc(hmac(sha224),cbc(des))",
2182
+ .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2183
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2184
+ .cra_flags = CRYPTO_ALG_ASYNC |
2185
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2186
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2187
+ .cra_blocksize = DES_BLOCK_SIZE,
2188
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2189
+ .cra_alignmask = 0,
2190
+ .cra_init = safexcel_aead_sha224_des_cra_init,
2191
+ .cra_exit = safexcel_aead_cra_exit,
2192
+ .cra_module = THIS_MODULE,
2193
+ },
2194
+ },
2195
+};
2196
+
2197
+static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2198
+{
2199
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2200
+
2201
+ safexcel_aead_sha512_cra_init(tfm);
2202
+ ctx->alg = SAFEXCEL_DES; /* override default */
2203
+ ctx->blocksz = DES_BLOCK_SIZE;
2204
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2205
+ return 0;
2206
+}
2207
+
2208
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2209
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
2210
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2211
+ .alg.aead = {
2212
+ .setkey = safexcel_aead_setkey,
2213
+ .encrypt = safexcel_aead_encrypt,
2214
+ .decrypt = safexcel_aead_decrypt,
2215
+ .ivsize = DES_BLOCK_SIZE,
2216
+ .maxauthsize = SHA512_DIGEST_SIZE,
2217
+ .base = {
2218
+ .cra_name = "authenc(hmac(sha512),cbc(des))",
2219
+ .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2220
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2221
+ .cra_flags = CRYPTO_ALG_ASYNC |
2222
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2223
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2224
+ .cra_blocksize = DES_BLOCK_SIZE,
2225
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2226
+ .cra_alignmask = 0,
2227
+ .cra_init = safexcel_aead_sha512_des_cra_init,
2228
+ .cra_exit = safexcel_aead_cra_exit,
2229
+ .cra_module = THIS_MODULE,
2230
+ },
2231
+ },
2232
+};
2233
+
2234
+static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2235
+{
2236
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2237
+
2238
+ safexcel_aead_sha384_cra_init(tfm);
2239
+ ctx->alg = SAFEXCEL_DES; /* override default */
2240
+ ctx->blocksz = DES_BLOCK_SIZE;
2241
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2242
+ return 0;
2243
+}
2244
+
2245
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2246
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
2247
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2248
+ .alg.aead = {
2249
+ .setkey = safexcel_aead_setkey,
2250
+ .encrypt = safexcel_aead_encrypt,
2251
+ .decrypt = safexcel_aead_decrypt,
2252
+ .ivsize = DES_BLOCK_SIZE,
2253
+ .maxauthsize = SHA384_DIGEST_SIZE,
2254
+ .base = {
2255
+ .cra_name = "authenc(hmac(sha384),cbc(des))",
2256
+ .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2257
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2258
+ .cra_flags = CRYPTO_ALG_ASYNC |
2259
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2260
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2261
+ .cra_blocksize = DES_BLOCK_SIZE,
2262
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2263
+ .cra_alignmask = 0,
2264
+ .cra_init = safexcel_aead_sha384_des_cra_init,
2265
+ .cra_exit = safexcel_aead_cra_exit,
2266
+ .cra_module = THIS_MODULE,
2267
+ },
2268
+ },
2269
+};
2270
+
2271
+static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2272
+{
2273
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2274
+
2275
+ safexcel_aead_sha1_cra_init(tfm);
2276
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2277
+ return 0;
2278
+}
2279
+
2280
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2281
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
2282
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2283
+ .alg.aead = {
2284
+ .setkey = safexcel_aead_setkey,
2285
+ .encrypt = safexcel_aead_encrypt,
2286
+ .decrypt = safexcel_aead_decrypt,
2287
+ .ivsize = CTR_RFC3686_IV_SIZE,
2288
+ .maxauthsize = SHA1_DIGEST_SIZE,
2289
+ .base = {
2290
+ .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2291
+ .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2292
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2293
+ .cra_flags = CRYPTO_ALG_ASYNC |
2294
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2295
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2296
+ .cra_blocksize = 1,
2297
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2298
+ .cra_alignmask = 0,
2299
+ .cra_init = safexcel_aead_sha1_ctr_cra_init,
2300
+ .cra_exit = safexcel_aead_cra_exit,
2301
+ .cra_module = THIS_MODULE,
2302
+ },
2303
+ },
2304
+};
2305
+
2306
+static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2307
+{
2308
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2309
+
2310
+ safexcel_aead_sha256_cra_init(tfm);
2311
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2312
+ return 0;
2313
+}
2314
+
2315
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2316
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
2317
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2318
+ .alg.aead = {
2319
+ .setkey = safexcel_aead_setkey,
2320
+ .encrypt = safexcel_aead_encrypt,
2321
+ .decrypt = safexcel_aead_decrypt,
2322
+ .ivsize = CTR_RFC3686_IV_SIZE,
2323
+ .maxauthsize = SHA256_DIGEST_SIZE,
2324
+ .base = {
2325
+ .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2326
+ .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2327
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2328
+ .cra_flags = CRYPTO_ALG_ASYNC |
2329
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2330
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2331
+ .cra_blocksize = 1,
2332
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2333
+ .cra_alignmask = 0,
2334
+ .cra_init = safexcel_aead_sha256_ctr_cra_init,
2335
+ .cra_exit = safexcel_aead_cra_exit,
2336
+ .cra_module = THIS_MODULE,
2337
+ },
2338
+ },
2339
+};
2340
+
2341
+static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2342
+{
2343
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2344
+
2345
+ safexcel_aead_sha224_cra_init(tfm);
2346
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2347
+ return 0;
2348
+}
2349
+
2350
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2351
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
2352
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2353
+ .alg.aead = {
2354
+ .setkey = safexcel_aead_setkey,
2355
+ .encrypt = safexcel_aead_encrypt,
2356
+ .decrypt = safexcel_aead_decrypt,
2357
+ .ivsize = CTR_RFC3686_IV_SIZE,
2358
+ .maxauthsize = SHA224_DIGEST_SIZE,
2359
+ .base = {
2360
+ .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2361
+ .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2362
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2363
+ .cra_flags = CRYPTO_ALG_ASYNC |
2364
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2365
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2366
+ .cra_blocksize = 1,
2367
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2368
+ .cra_alignmask = 0,
2369
+ .cra_init = safexcel_aead_sha224_ctr_cra_init,
2370
+ .cra_exit = safexcel_aead_cra_exit,
2371
+ .cra_module = THIS_MODULE,
2372
+ },
2373
+ },
2374
+};
2375
+
2376
+static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2377
+{
2378
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2379
+
2380
+ safexcel_aead_sha512_cra_init(tfm);
2381
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2382
+ return 0;
2383
+}
2384
+
2385
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2386
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
2387
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2388
+ .alg.aead = {
2389
+ .setkey = safexcel_aead_setkey,
2390
+ .encrypt = safexcel_aead_encrypt,
2391
+ .decrypt = safexcel_aead_decrypt,
2392
+ .ivsize = CTR_RFC3686_IV_SIZE,
2393
+ .maxauthsize = SHA512_DIGEST_SIZE,
2394
+ .base = {
2395
+ .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2396
+ .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2397
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2398
+ .cra_flags = CRYPTO_ALG_ASYNC |
2399
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2400
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2401
+ .cra_blocksize = 1,
2402
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2403
+ .cra_alignmask = 0,
2404
+ .cra_init = safexcel_aead_sha512_ctr_cra_init,
2405
+ .cra_exit = safexcel_aead_cra_exit,
2406
+ .cra_module = THIS_MODULE,
2407
+ },
2408
+ },
2409
+};
2410
+
2411
+static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2412
+{
2413
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2414
+
2415
+ safexcel_aead_sha384_cra_init(tfm);
2416
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2417
+ return 0;
2418
+}
2419
+
2420
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2421
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
2422
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2423
+ .alg.aead = {
2424
+ .setkey = safexcel_aead_setkey,
2425
+ .encrypt = safexcel_aead_encrypt,
2426
+ .decrypt = safexcel_aead_decrypt,
2427
+ .ivsize = CTR_RFC3686_IV_SIZE,
2428
+ .maxauthsize = SHA384_DIGEST_SIZE,
2429
+ .base = {
2430
+ .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2431
+ .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2432
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2433
+ .cra_flags = CRYPTO_ALG_ASYNC |
2434
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2435
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2436
+ .cra_blocksize = 1,
2437
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2438
+ .cra_alignmask = 0,
2439
+ .cra_init = safexcel_aead_sha384_ctr_cra_init,
2440
+ .cra_exit = safexcel_aead_cra_exit,
2441
+ .cra_module = THIS_MODULE,
2442
+ },
2443
+ },
2444
+};
2445
+
2446
+static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2447
+ const u8 *key, unsigned int len)
2448
+{
2449
+ struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2450
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2451
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
2452
+ struct crypto_aes_ctx aes;
2453
+ int ret, i;
2454
+ unsigned int keylen;
2455
+
2456
+ /* Check for illegal XTS keys */
2457
+ ret = xts_verify_key(ctfm, key, len);
2458
+ if (ret)
2459
+ return ret;
2460
+
2461
+ /* Only half of the key data is cipher key */
2462
+ keylen = (len >> 1);
2463
+ ret = aes_expandkey(&aes, key, keylen);
2464
+ if (ret)
2465
+ return ret;
2466
+
2467
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2468
+ for (i = 0; i < keylen / sizeof(u32); i++) {
2469
+ if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2470
+ ctx->base.needs_inv = true;
2471
+ break;
2472
+ }
2473
+ }
2474
+ }
2475
+
2476
+ for (i = 0; i < keylen / sizeof(u32); i++)
2477
+ ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2478
+
2479
+ /* The other half is the tweak key */
2480
+ ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2481
+ if (ret)
2482
+ return ret;
2483
+
2484
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2485
+ for (i = 0; i < keylen / sizeof(u32); i++) {
2486
+ if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2487
+ aes.key_enc[i]) {
2488
+ ctx->base.needs_inv = true;
2489
+ break;
2490
+ }
2491
+ }
2492
+ }
2493
+
2494
+ for (i = 0; i < keylen / sizeof(u32); i++)
2495
+ ctx->key[i + keylen / sizeof(u32)] =
2496
+ cpu_to_le32(aes.key_enc[i]);
2497
+
2498
+ ctx->key_len = keylen << 1;
2499
+
2500
+ memzero_explicit(&aes, sizeof(aes));
2501
+ return 0;
2502
+}
2503
+
2504
+static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2505
+{
2506
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2507
+
2508
+ safexcel_skcipher_cra_init(tfm);
2509
+ ctx->alg = SAFEXCEL_AES;
2510
+ ctx->blocksz = AES_BLOCK_SIZE;
2511
+ ctx->xts = 1;
2512
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2513
+ return 0;
2514
+}
2515
+
2516
+static int safexcel_encrypt_xts(struct skcipher_request *req)
2517
+{
2518
+ if (req->cryptlen < XTS_BLOCK_SIZE)
2519
+ return -EINVAL;
2520
+ return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2521
+ SAFEXCEL_ENCRYPT);
2522
+}
2523
+
2524
+static int safexcel_decrypt_xts(struct skcipher_request *req)
2525
+{
2526
+ if (req->cryptlen < XTS_BLOCK_SIZE)
2527
+ return -EINVAL;
2528
+ return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2529
+ SAFEXCEL_DECRYPT);
2530
+}
2531
+
2532
+struct safexcel_alg_template safexcel_alg_xts_aes = {
2533
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2534
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2535
+ .alg.skcipher = {
2536
+ .setkey = safexcel_skcipher_aesxts_setkey,
2537
+ .encrypt = safexcel_encrypt_xts,
2538
+ .decrypt = safexcel_decrypt_xts,
2539
+ /* XTS actually uses 2 AES keys glued together */
2540
+ .min_keysize = AES_MIN_KEY_SIZE * 2,
2541
+ .max_keysize = AES_MAX_KEY_SIZE * 2,
2542
+ .ivsize = XTS_BLOCK_SIZE,
2543
+ .base = {
2544
+ .cra_name = "xts(aes)",
2545
+ .cra_driver_name = "safexcel-xts-aes",
2546
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2547
+ .cra_flags = CRYPTO_ALG_ASYNC |
2548
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2549
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2550
+ .cra_blocksize = XTS_BLOCK_SIZE,
2551
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2552
+ .cra_alignmask = 0,
2553
+ .cra_init = safexcel_skcipher_aes_xts_cra_init,
2554
+ .cra_exit = safexcel_skcipher_cra_exit,
2555
+ .cra_module = THIS_MODULE,
2556
+ },
2557
+ },
2558
+};
2559
+
2560
+static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2561
+ unsigned int len)
2562
+{
2563
+ struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2564
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2565
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
2566
+ struct crypto_aes_ctx aes;
2567
+ u32 hashkey[AES_BLOCK_SIZE >> 2];
2568
+ int ret, i;
2569
+
2570
+ ret = aes_expandkey(&aes, key, len);
2571
+ if (ret) {
2572
+ memzero_explicit(&aes, sizeof(aes));
2573
+ return ret;
2574
+ }
2575
+
2576
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2577
+ for (i = 0; i < len / sizeof(u32); i++) {
2578
+ if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2579
+ ctx->base.needs_inv = true;
2580
+ break;
2581
+ }
2582
+ }
2583
+ }
2584
+
2585
+ for (i = 0; i < len / sizeof(u32); i++)
2586
+ ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2587
+
2588
+ ctx->key_len = len;
2589
+
2590
+ /* Compute hash key by encrypting zeroes with cipher key */
2591
+ crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
2592
+ crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2593
+ CRYPTO_TFM_REQ_MASK);
2594
+ ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2595
+ if (ret)
2596
+ return ret;
2597
+
2598
+ memset(hashkey, 0, AES_BLOCK_SIZE);
2599
+ crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
2600
+
2601
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2602
+ for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2603
+ if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2604
+ ctx->base.needs_inv = true;
2605
+ break;
2606
+ }
2607
+ }
2608
+ }
2609
+
2610
+ for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2611
+ ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2612
+
2613
+ memzero_explicit(hashkey, AES_BLOCK_SIZE);
2614
+ memzero_explicit(&aes, sizeof(aes));
2615
+ return 0;
2616
+}
2617
+
2618
+static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2619
+{
2620
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2621
+
2622
+ safexcel_aead_cra_init(tfm);
2623
+ ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2624
+ ctx->state_sz = GHASH_BLOCK_SIZE;
2625
+ ctx->xcm = EIP197_XCM_MODE_GCM;
2626
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2627
+
2628
+ ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
2629
+ return PTR_ERR_OR_ZERO(ctx->hkaes);
2630
+}
2631
+
2632
+static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2633
+{
2634
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2635
+
2636
+ crypto_free_cipher(ctx->hkaes);
2637
+ safexcel_aead_cra_exit(tfm);
2638
+}
2639
+
2640
+static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2641
+ unsigned int authsize)
2642
+{
2643
+ return crypto_gcm_check_authsize(authsize);
2644
+}
2645
+
2646
+struct safexcel_alg_template safexcel_alg_gcm = {
2647
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
2648
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2649
+ .alg.aead = {
2650
+ .setkey = safexcel_aead_gcm_setkey,
2651
+ .setauthsize = safexcel_aead_gcm_setauthsize,
2652
+ .encrypt = safexcel_aead_encrypt,
2653
+ .decrypt = safexcel_aead_decrypt,
2654
+ .ivsize = GCM_AES_IV_SIZE,
2655
+ .maxauthsize = GHASH_DIGEST_SIZE,
2656
+ .base = {
2657
+ .cra_name = "gcm(aes)",
2658
+ .cra_driver_name = "safexcel-gcm-aes",
2659
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2660
+ .cra_flags = CRYPTO_ALG_ASYNC |
2661
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2662
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2663
+ .cra_blocksize = 1,
2664
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2665
+ .cra_alignmask = 0,
2666
+ .cra_init = safexcel_aead_gcm_cra_init,
2667
+ .cra_exit = safexcel_aead_gcm_cra_exit,
2668
+ .cra_module = THIS_MODULE,
2669
+ },
2670
+ },
2671
+};
2672
+
2673
+static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2674
+ unsigned int len)
2675
+{
2676
+ struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2677
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2678
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
2679
+ struct crypto_aes_ctx aes;
2680
+ int ret, i;
2681
+
2682
+ ret = aes_expandkey(&aes, key, len);
2683
+ if (ret) {
2684
+ memzero_explicit(&aes, sizeof(aes));
2685
+ return ret;
2686
+ }
2687
+
2688
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2689
+ for (i = 0; i < len / sizeof(u32); i++) {
2690
+ if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2691
+ ctx->base.needs_inv = true;
2692
+ break;
2693
+ }
2694
+ }
2695
+ }
2696
+
2697
+ for (i = 0; i < len / sizeof(u32); i++) {
2698
+ ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2699
+ ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2700
+ cpu_to_be32(aes.key_enc[i]);
2701
+ }
2702
+
2703
+ ctx->key_len = len;
2704
+ ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2705
+
2706
+ if (len == AES_KEYSIZE_192)
2707
+ ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2708
+ else if (len == AES_KEYSIZE_256)
2709
+ ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2710
+ else
2711
+ ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2712
+
2713
+ memzero_explicit(&aes, sizeof(aes));
2714
+ return 0;
2715
+}
2716
+
2717
+static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2718
+{
2719
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2720
+
2721
+ safexcel_aead_cra_init(tfm);
2722
+ ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2723
+ ctx->state_sz = 3 * AES_BLOCK_SIZE;
2724
+ ctx->xcm = EIP197_XCM_MODE_CCM;
2725
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2726
+ ctx->ctrinit = 0;
2727
+ return 0;
2728
+}
2729
+
2730
+static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2731
+ unsigned int authsize)
2732
+{
2733
+ /* Borrowed from crypto/ccm.c */
2734
+ switch (authsize) {
2735
+ case 4:
2736
+ case 6:
2737
+ case 8:
2738
+ case 10:
2739
+ case 12:
2740
+ case 14:
2741
+ case 16:
2742
+ break;
2743
+ default:
2744
+ return -EINVAL;
2745
+ }
2746
+
2747
+ return 0;
2748
+}
2749
+
2750
+static int safexcel_ccm_encrypt(struct aead_request *req)
2751
+{
2752
+ struct safexcel_cipher_req *creq = aead_request_ctx(req);
2753
+
2754
+ if (req->iv[0] < 1 || req->iv[0] > 7)
2755
+ return -EINVAL;
2756
+
2757
+ return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2758
+}
2759
+
2760
+static int safexcel_ccm_decrypt(struct aead_request *req)
2761
+{
2762
+ struct safexcel_cipher_req *creq = aead_request_ctx(req);
2763
+
2764
+ if (req->iv[0] < 1 || req->iv[0] > 7)
2765
+ return -EINVAL;
2766
+
2767
+ return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2768
+}
2769
+
2770
+struct safexcel_alg_template safexcel_alg_ccm = {
2771
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
2772
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2773
+ .alg.aead = {
2774
+ .setkey = safexcel_aead_ccm_setkey,
2775
+ .setauthsize = safexcel_aead_ccm_setauthsize,
2776
+ .encrypt = safexcel_ccm_encrypt,
2777
+ .decrypt = safexcel_ccm_decrypt,
2778
+ .ivsize = AES_BLOCK_SIZE,
2779
+ .maxauthsize = AES_BLOCK_SIZE,
2780
+ .base = {
2781
+ .cra_name = "ccm(aes)",
2782
+ .cra_driver_name = "safexcel-ccm-aes",
2783
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2784
+ .cra_flags = CRYPTO_ALG_ASYNC |
2785
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2786
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2787
+ .cra_blocksize = 1,
2788
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2789
+ .cra_alignmask = 0,
2790
+ .cra_init = safexcel_aead_ccm_cra_init,
2791
+ .cra_exit = safexcel_aead_cra_exit,
2792
+ .cra_module = THIS_MODULE,
2793
+ },
2794
+ },
2795
+};
2796
+
2797
+static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2798
+ const u8 *key)
2799
+{
2800
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
2801
+
2802
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2803
+ if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2804
+ ctx->base.needs_inv = true;
2805
+
2806
+ memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2807
+ ctx->key_len = CHACHA_KEY_SIZE;
2808
+}
2809
+
2810
+static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2811
+ const u8 *key, unsigned int len)
2812
+{
2813
+ struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2814
+
2815
+ if (len != CHACHA_KEY_SIZE)
2816
+ return -EINVAL;
2817
+
2818
+ safexcel_chacha20_setkey(ctx, key);
2819
+
2820
+ return 0;
2821
+}
2822
+
2823
+static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2824
+{
2825
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2826
+
2827
+ safexcel_skcipher_cra_init(tfm);
2828
+ ctx->alg = SAFEXCEL_CHACHA20;
2829
+ ctx->ctrinit = 0;
2830
+ ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2831
+ return 0;
2832
+}
2833
+
2834
+struct safexcel_alg_template safexcel_alg_chacha20 = {
2835
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2836
+ .algo_mask = SAFEXCEL_ALG_CHACHA20,
2837
+ .alg.skcipher = {
2838
+ .setkey = safexcel_skcipher_chacha20_setkey,
2839
+ .encrypt = safexcel_encrypt,
2840
+ .decrypt = safexcel_decrypt,
2841
+ .min_keysize = CHACHA_KEY_SIZE,
2842
+ .max_keysize = CHACHA_KEY_SIZE,
2843
+ .ivsize = CHACHA_IV_SIZE,
2844
+ .base = {
2845
+ .cra_name = "chacha20",
2846
+ .cra_driver_name = "safexcel-chacha20",
2847
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
2848
+ .cra_flags = CRYPTO_ALG_ASYNC |
2849
+ CRYPTO_ALG_ALLOCATES_MEMORY |
2850
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
2851
+ .cra_blocksize = 1,
2852
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2853
+ .cra_alignmask = 0,
2854
+ .cra_init = safexcel_skcipher_chacha20_cra_init,
2855
+ .cra_exit = safexcel_skcipher_cra_exit,
2856
+ .cra_module = THIS_MODULE,
2857
+ },
2858
+ },
2859
+};
2860
+
2861
+static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2862
+ const u8 *key, unsigned int len)
2863
+{
2864
+ struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2865
+
2866
+ if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
2867
+ len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2868
+ /* ESP variant has nonce appended to key */
2869
+ len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2870
+ ctx->nonce = *(u32 *)(key + len);
2871
+ }
2872
+ if (len != CHACHA_KEY_SIZE)
2873
+ return -EINVAL;
2874
+
2875
+ safexcel_chacha20_setkey(ctx, key);
2876
+
2877
+ return 0;
2878
+}
2879
+
2880
+static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2881
+ unsigned int authsize)
2882
+{
2883
+ if (authsize != POLY1305_DIGEST_SIZE)
2884
+ return -EINVAL;
2885
+ return 0;
2886
+}
2887
+
2888
+static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2889
+ enum safexcel_cipher_direction dir)
2890
+{
2891
+ struct safexcel_cipher_req *creq = aead_request_ctx(req);
2892
+ struct crypto_aead *aead = crypto_aead_reqtfm(req);
2893
+ struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2894
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2895
+ struct aead_request *subreq = aead_request_ctx(req);
2896
+ u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2897
+ int ret = 0;
2898
+
2899
+ /*
2900
+ * Instead of wasting time detecting umpteen silly corner cases,
2901
+ * just dump all "small" requests to the fallback implementation.
2902
+ * HW would not be faster on such small requests anyway.
2903
+ */
2904
+ if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2905
+ req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2906
+ req->cryptlen > POLY1305_DIGEST_SIZE)) {
2907
+ return safexcel_queue_req(&req->base, creq, dir);
2908
+ }
2909
+
2910
+ /* HW cannot do full (AAD+payload) zero length, use fallback */
2911
+ memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2912
+ if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2913
+ /* ESP variant has nonce appended to the key */
2914
+ key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2915
+ ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2916
+ CHACHA_KEY_SIZE +
2917
+ EIP197_AEAD_IPSEC_NONCE_SIZE);
2918
+ } else {
2919
+ ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2920
+ CHACHA_KEY_SIZE);
2921
+ }
2922
+ if (ret) {
2923
+ crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2924
+ crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2925
+ CRYPTO_TFM_REQ_MASK);
2926
+ return ret;
2927
+ }
2928
+
2929
+ aead_request_set_tfm(subreq, ctx->fback);
2930
+ aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2931
+ req->base.data);
2932
+ aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2933
+ req->iv);
2934
+ aead_request_set_ad(subreq, req->assoclen);
2935
+
2936
+ return (dir == SAFEXCEL_ENCRYPT) ?
2937
+ crypto_aead_encrypt(subreq) :
2938
+ crypto_aead_decrypt(subreq);
2939
+}
2940
+
2941
+static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2942
+{
2943
+ return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2944
+}
2945
+
2946
+static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2947
+{
2948
+ return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2949
+}
2950
+
2951
+static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2952
+{
2953
+ struct crypto_aead *aead = __crypto_aead_cast(tfm);
2954
+ struct aead_alg *alg = crypto_aead_alg(aead);
2955
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2956
+
2957
+ safexcel_aead_cra_init(tfm);
2958
+
2959
+ /* Allocate fallback implementation */
2960
+ ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2961
+ CRYPTO_ALG_ASYNC |
2962
+ CRYPTO_ALG_NEED_FALLBACK);
2963
+ if (IS_ERR(ctx->fback))
2964
+ return PTR_ERR(ctx->fback);
2965
+
2966
+ crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2967
+ sizeof(struct aead_request) +
2968
+ crypto_aead_reqsize(ctx->fback)));
2969
+
2970
+ return 0;
2971
+}
2972
+
2973
+static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2974
+{
2975
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2976
+
2977
+ safexcel_aead_fallback_cra_init(tfm);
2978
+ ctx->alg = SAFEXCEL_CHACHA20;
2979
+ ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2980
+ CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2981
+ ctx->ctrinit = 0;
2982
+ ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2983
+ ctx->state_sz = 0; /* Precomputed by HW */
2984
+ return 0;
2985
+}
2986
+
2987
+static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2988
+{
2989
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2990
+
2991
+ crypto_free_aead(ctx->fback);
2992
+ safexcel_aead_cra_exit(tfm);
2993
+}
2994
+
2995
+struct safexcel_alg_template safexcel_alg_chachapoly = {
2996
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
2997
+ .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2998
+ .alg.aead = {
2999
+ .setkey = safexcel_aead_chachapoly_setkey,
3000
+ .setauthsize = safexcel_aead_chachapoly_setauthsize,
3001
+ .encrypt = safexcel_aead_chachapoly_encrypt,
3002
+ .decrypt = safexcel_aead_chachapoly_decrypt,
3003
+ .ivsize = CHACHAPOLY_IV_SIZE,
3004
+ .maxauthsize = POLY1305_DIGEST_SIZE,
3005
+ .base = {
3006
+ .cra_name = "rfc7539(chacha20,poly1305)",
3007
+ .cra_driver_name = "safexcel-chacha20-poly1305",
3008
+ /* +1 to put it above HW chacha + SW poly */
3009
+ .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3010
+ .cra_flags = CRYPTO_ALG_ASYNC |
3011
+ CRYPTO_ALG_ALLOCATES_MEMORY |
3012
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
3013
+ CRYPTO_ALG_NEED_FALLBACK,
3014
+ .cra_blocksize = 1,
3015
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3016
+ .cra_alignmask = 0,
3017
+ .cra_init = safexcel_aead_chachapoly_cra_init,
3018
+ .cra_exit = safexcel_aead_fallback_cra_exit,
3019
+ .cra_module = THIS_MODULE,
3020
+ },
3021
+ },
3022
+};
3023
+
3024
+static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
3025
+{
3026
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3027
+ int ret;
3028
+
3029
+ ret = safexcel_aead_chachapoly_cra_init(tfm);
3030
+ ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3031
+ ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3032
+ return ret;
3033
+}
3034
+
3035
+struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
3036
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
3037
+ .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3038
+ .alg.aead = {
3039
+ .setkey = safexcel_aead_chachapoly_setkey,
3040
+ .setauthsize = safexcel_aead_chachapoly_setauthsize,
3041
+ .encrypt = safexcel_aead_chachapoly_encrypt,
3042
+ .decrypt = safexcel_aead_chachapoly_decrypt,
3043
+ .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
3044
+ .maxauthsize = POLY1305_DIGEST_SIZE,
3045
+ .base = {
3046
+ .cra_name = "rfc7539esp(chacha20,poly1305)",
3047
+ .cra_driver_name = "safexcel-chacha20-poly1305-esp",
3048
+ /* +1 to put it above HW chacha + SW poly */
3049
+ .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3050
+ .cra_flags = CRYPTO_ALG_ASYNC |
3051
+ CRYPTO_ALG_ALLOCATES_MEMORY |
3052
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
3053
+ CRYPTO_ALG_NEED_FALLBACK,
3054
+ .cra_blocksize = 1,
3055
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3056
+ .cra_alignmask = 0,
3057
+ .cra_init = safexcel_aead_chachapolyesp_cra_init,
3058
+ .cra_exit = safexcel_aead_fallback_cra_exit,
3059
+ .cra_module = THIS_MODULE,
3060
+ },
3061
+ },
3062
+};
3063
+
3064
+static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
3065
+ const u8 *key, unsigned int len)
3066
+{
3067
+ struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3068
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3069
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
3070
+
3071
+ if (len != SM4_KEY_SIZE)
3072
+ return -EINVAL;
3073
+
3074
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3075
+ if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3076
+ ctx->base.needs_inv = true;
3077
+
3078
+ memcpy(ctx->key, key, SM4_KEY_SIZE);
3079
+ ctx->key_len = SM4_KEY_SIZE;
3080
+
3081
+ return 0;
3082
+}
3083
+
3084
+static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3085
+{
3086
+ /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3087
+ if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3088
+ return -EINVAL;
3089
+ else
3090
+ return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3091
+ SAFEXCEL_ENCRYPT);
3092
+}
3093
+
3094
+static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3095
+{
3096
+ /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3097
+ if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3098
+ return -EINVAL;
3099
+ else
3100
+ return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3101
+ SAFEXCEL_DECRYPT);
3102
+}
3103
+
3104
+static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3105
+{
3106
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3107
+
3108
+ safexcel_skcipher_cra_init(tfm);
3109
+ ctx->alg = SAFEXCEL_SM4;
3110
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3111
+ ctx->blocksz = 0;
3112
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3113
+ return 0;
3114
+}
3115
+
3116
+struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3117
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3118
+ .algo_mask = SAFEXCEL_ALG_SM4,
3119
+ .alg.skcipher = {
3120
+ .setkey = safexcel_skcipher_sm4_setkey,
3121
+ .encrypt = safexcel_sm4_blk_encrypt,
3122
+ .decrypt = safexcel_sm4_blk_decrypt,
3123
+ .min_keysize = SM4_KEY_SIZE,
3124
+ .max_keysize = SM4_KEY_SIZE,
3125
+ .base = {
3126
+ .cra_name = "ecb(sm4)",
3127
+ .cra_driver_name = "safexcel-ecb-sm4",
3128
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
3129
+ .cra_flags = CRYPTO_ALG_ASYNC |
3130
+ CRYPTO_ALG_ALLOCATES_MEMORY |
3131
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
3132
+ .cra_blocksize = SM4_BLOCK_SIZE,
3133
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3134
+ .cra_alignmask = 0,
3135
+ .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3136
+ .cra_exit = safexcel_skcipher_cra_exit,
3137
+ .cra_module = THIS_MODULE,
3138
+ },
3139
+ },
3140
+};
3141
+
3142
+static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3143
+{
3144
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3145
+
3146
+ safexcel_skcipher_cra_init(tfm);
3147
+ ctx->alg = SAFEXCEL_SM4;
3148
+ ctx->blocksz = SM4_BLOCK_SIZE;
3149
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3150
+ return 0;
3151
+}
3152
+
3153
+struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3154
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3155
+ .algo_mask = SAFEXCEL_ALG_SM4,
3156
+ .alg.skcipher = {
3157
+ .setkey = safexcel_skcipher_sm4_setkey,
3158
+ .encrypt = safexcel_sm4_blk_encrypt,
3159
+ .decrypt = safexcel_sm4_blk_decrypt,
3160
+ .min_keysize = SM4_KEY_SIZE,
3161
+ .max_keysize = SM4_KEY_SIZE,
3162
+ .ivsize = SM4_BLOCK_SIZE,
3163
+ .base = {
3164
+ .cra_name = "cbc(sm4)",
3165
+ .cra_driver_name = "safexcel-cbc-sm4",
3166
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
3167
+ .cra_flags = CRYPTO_ALG_ASYNC |
3168
+ CRYPTO_ALG_ALLOCATES_MEMORY |
3169
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
3170
+ .cra_blocksize = SM4_BLOCK_SIZE,
3171
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3172
+ .cra_alignmask = 0,
3173
+ .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3174
+ .cra_exit = safexcel_skcipher_cra_exit,
3175
+ .cra_module = THIS_MODULE,
3176
+ },
3177
+ },
3178
+};
3179
+
3180
+static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3181
+{
3182
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3183
+
3184
+ safexcel_skcipher_cra_init(tfm);
3185
+ ctx->alg = SAFEXCEL_SM4;
3186
+ ctx->blocksz = SM4_BLOCK_SIZE;
3187
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3188
+ return 0;
3189
+}
3190
+
3191
+struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3192
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3193
+ .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3194
+ .alg.skcipher = {
3195
+ .setkey = safexcel_skcipher_sm4_setkey,
3196
+ .encrypt = safexcel_encrypt,
3197
+ .decrypt = safexcel_decrypt,
3198
+ .min_keysize = SM4_KEY_SIZE,
3199
+ .max_keysize = SM4_KEY_SIZE,
3200
+ .ivsize = SM4_BLOCK_SIZE,
3201
+ .base = {
3202
+ .cra_name = "ofb(sm4)",
3203
+ .cra_driver_name = "safexcel-ofb-sm4",
3204
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
3205
+ .cra_flags = CRYPTO_ALG_ASYNC |
3206
+ CRYPTO_ALG_ALLOCATES_MEMORY |
3207
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
3208
+ .cra_blocksize = 1,
3209
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3210
+ .cra_alignmask = 0,
3211
+ .cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3212
+ .cra_exit = safexcel_skcipher_cra_exit,
3213
+ .cra_module = THIS_MODULE,
3214
+ },
3215
+ },
3216
+};
3217
+
3218
+static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3219
+{
3220
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3221
+
3222
+ safexcel_skcipher_cra_init(tfm);
3223
+ ctx->alg = SAFEXCEL_SM4;
3224
+ ctx->blocksz = SM4_BLOCK_SIZE;
3225
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3226
+ return 0;
3227
+}
3228
+
3229
+struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3230
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3231
+ .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3232
+ .alg.skcipher = {
3233
+ .setkey = safexcel_skcipher_sm4_setkey,
3234
+ .encrypt = safexcel_encrypt,
3235
+ .decrypt = safexcel_decrypt,
3236
+ .min_keysize = SM4_KEY_SIZE,
3237
+ .max_keysize = SM4_KEY_SIZE,
3238
+ .ivsize = SM4_BLOCK_SIZE,
3239
+ .base = {
3240
+ .cra_name = "cfb(sm4)",
3241
+ .cra_driver_name = "safexcel-cfb-sm4",
3242
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
3243
+ .cra_flags = CRYPTO_ALG_ASYNC |
3244
+ CRYPTO_ALG_ALLOCATES_MEMORY |
3245
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
3246
+ .cra_blocksize = 1,
3247
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3248
+ .cra_alignmask = 0,
3249
+ .cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3250
+ .cra_exit = safexcel_skcipher_cra_exit,
3251
+ .cra_module = THIS_MODULE,
3252
+ },
3253
+ },
3254
+};
3255
+
3256
+static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3257
+ const u8 *key, unsigned int len)
3258
+{
3259
+ struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3260
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3261
+
3262
+ /* last 4 bytes of key are the nonce! */
3263
+ ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3264
+ /* exclude the nonce here */
3265
+ len -= CTR_RFC3686_NONCE_SIZE;
3266
+
3267
+ return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3268
+}
3269
+
3270
+static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3271
+{
3272
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3273
+
3274
+ safexcel_skcipher_cra_init(tfm);
3275
+ ctx->alg = SAFEXCEL_SM4;
3276
+ ctx->blocksz = SM4_BLOCK_SIZE;
3277
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3278
+ return 0;
3279
+}
3280
+
3281
+struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3282
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3283
+ .algo_mask = SAFEXCEL_ALG_SM4,
3284
+ .alg.skcipher = {
3285
+ .setkey = safexcel_skcipher_sm4ctr_setkey,
3286
+ .encrypt = safexcel_encrypt,
3287
+ .decrypt = safexcel_decrypt,
3288
+ /* Add nonce size */
3289
+ .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3290
+ .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3291
+ .ivsize = CTR_RFC3686_IV_SIZE,
3292
+ .base = {
3293
+ .cra_name = "rfc3686(ctr(sm4))",
3294
+ .cra_driver_name = "safexcel-ctr-sm4",
3295
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
3296
+ .cra_flags = CRYPTO_ALG_ASYNC |
3297
+ CRYPTO_ALG_ALLOCATES_MEMORY |
3298
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
3299
+ .cra_blocksize = 1,
3300
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3301
+ .cra_alignmask = 0,
3302
+ .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3303
+ .cra_exit = safexcel_skcipher_cra_exit,
3304
+ .cra_module = THIS_MODULE,
3305
+ },
3306
+ },
3307
+};
3308
+
3309
+static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3310
+{
3311
+ /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3312
+ if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3313
+ return -EINVAL;
3314
+
3315
+ return safexcel_queue_req(&req->base, aead_request_ctx(req),
3316
+ SAFEXCEL_ENCRYPT);
3317
+}
3318
+
3319
+static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3320
+{
3321
+ struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3322
+
3323
+ /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3324
+ if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3325
+ return -EINVAL;
3326
+
3327
+ return safexcel_queue_req(&req->base, aead_request_ctx(req),
3328
+ SAFEXCEL_DECRYPT);
3329
+}
3330
+
3331
+static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3332
+{
3333
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3334
+
3335
+ safexcel_aead_cra_init(tfm);
3336
+ ctx->alg = SAFEXCEL_SM4;
3337
+ ctx->blocksz = SM4_BLOCK_SIZE;
3338
+ ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3339
+ ctx->state_sz = SHA1_DIGEST_SIZE;
3340
+ return 0;
3341
+}
3342
+
3343
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3344
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
3345
+ .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3346
+ .alg.aead = {
3347
+ .setkey = safexcel_aead_setkey,
3348
+ .encrypt = safexcel_aead_sm4_blk_encrypt,
3349
+ .decrypt = safexcel_aead_sm4_blk_decrypt,
3350
+ .ivsize = SM4_BLOCK_SIZE,
3351
+ .maxauthsize = SHA1_DIGEST_SIZE,
3352
+ .base = {
3353
+ .cra_name = "authenc(hmac(sha1),cbc(sm4))",
3354
+ .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3355
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
3356
+ .cra_flags = CRYPTO_ALG_ASYNC |
3357
+ CRYPTO_ALG_ALLOCATES_MEMORY |
3358
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
3359
+ .cra_blocksize = SM4_BLOCK_SIZE,
3360
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3361
+ .cra_alignmask = 0,
3362
+ .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3363
+ .cra_exit = safexcel_aead_cra_exit,
3364
+ .cra_module = THIS_MODULE,
3365
+ },
3366
+ },
3367
+};
3368
+
3369
+static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3370
+ const u8 *key, unsigned int len)
3371
+{
3372
+ struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3373
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3374
+
3375
+ /* Keep fallback cipher synchronized */
3376
+ return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3377
+ safexcel_aead_setkey(ctfm, key, len);
3378
+}
3379
+
3380
+static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3381
+ unsigned int authsize)
3382
+{
3383
+ struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3384
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3385
+
3386
+ /* Keep fallback cipher synchronized */
3387
+ return crypto_aead_setauthsize(ctx->fback, authsize);
3388
+}
3389
+
3390
+static int safexcel_aead_fallback_crypt(struct aead_request *req,
3391
+ enum safexcel_cipher_direction dir)
3392
+{
3393
+ struct crypto_aead *aead = crypto_aead_reqtfm(req);
3394
+ struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3395
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3396
+ struct aead_request *subreq = aead_request_ctx(req);
3397
+
3398
+ aead_request_set_tfm(subreq, ctx->fback);
3399
+ aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3400
+ req->base.data);
3401
+ aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3402
+ req->iv);
3403
+ aead_request_set_ad(subreq, req->assoclen);
3404
+
3405
+ return (dir == SAFEXCEL_ENCRYPT) ?
3406
+ crypto_aead_encrypt(subreq) :
3407
+ crypto_aead_decrypt(subreq);
3408
+}
3409
+
3410
+static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3411
+{
3412
+ struct safexcel_cipher_req *creq = aead_request_ctx(req);
3413
+
3414
+ /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3415
+ if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3416
+ return -EINVAL;
3417
+ else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3418
+ return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3419
+
3420
+ /* HW cannot do full (AAD+payload) zero length, use fallback */
3421
+ return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3422
+}
3423
+
3424
+static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3425
+{
3426
+ struct safexcel_cipher_req *creq = aead_request_ctx(req);
3427
+ struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3428
+
3429
+ /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3430
+ if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3431
+ return -EINVAL;
3432
+ else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3433
+ /* If input length > 0 only */
3434
+ return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3435
+
3436
+ /* HW cannot do full (AAD+payload) zero length, use fallback */
3437
+ return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3438
+}
3439
+
3440
+static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3441
+{
3442
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3443
+
3444
+ safexcel_aead_fallback_cra_init(tfm);
3445
+ ctx->alg = SAFEXCEL_SM4;
3446
+ ctx->blocksz = SM4_BLOCK_SIZE;
3447
+ ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3448
+ ctx->state_sz = SM3_DIGEST_SIZE;
3449
+ return 0;
3450
+}
3451
+
3452
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3453
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
3454
+ .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3455
+ .alg.aead = {
3456
+ .setkey = safexcel_aead_fallback_setkey,
3457
+ .setauthsize = safexcel_aead_fallback_setauthsize,
3458
+ .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3459
+ .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3460
+ .ivsize = SM4_BLOCK_SIZE,
3461
+ .maxauthsize = SM3_DIGEST_SIZE,
3462
+ .base = {
3463
+ .cra_name = "authenc(hmac(sm3),cbc(sm4))",
3464
+ .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3465
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
3466
+ .cra_flags = CRYPTO_ALG_ASYNC |
3467
+ CRYPTO_ALG_ALLOCATES_MEMORY |
3468
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
3469
+ CRYPTO_ALG_NEED_FALLBACK,
3470
+ .cra_blocksize = SM4_BLOCK_SIZE,
3471
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3472
+ .cra_alignmask = 0,
3473
+ .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3474
+ .cra_exit = safexcel_aead_fallback_cra_exit,
3475
+ .cra_module = THIS_MODULE,
3476
+ },
3477
+ },
3478
+};
3479
+
3480
+static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3481
+{
3482
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3483
+
3484
+ safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3485
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3486
+ return 0;
3487
+}
3488
+
3489
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3490
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
3491
+ .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3492
+ .alg.aead = {
3493
+ .setkey = safexcel_aead_setkey,
3494
+ .encrypt = safexcel_aead_encrypt,
3495
+ .decrypt = safexcel_aead_decrypt,
3496
+ .ivsize = CTR_RFC3686_IV_SIZE,
3497
+ .maxauthsize = SHA1_DIGEST_SIZE,
3498
+ .base = {
3499
+ .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3500
+ .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3501
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
3502
+ .cra_flags = CRYPTO_ALG_ASYNC |
3503
+ CRYPTO_ALG_ALLOCATES_MEMORY |
3504
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
3505
+ .cra_blocksize = 1,
3506
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3507
+ .cra_alignmask = 0,
3508
+ .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3509
+ .cra_exit = safexcel_aead_cra_exit,
3510
+ .cra_module = THIS_MODULE,
3511
+ },
3512
+ },
3513
+};
3514
+
3515
+static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3516
+{
3517
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3518
+
3519
+ safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3520
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3521
+ return 0;
3522
+}
3523
+
3524
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3525
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
3526
+ .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3527
+ .alg.aead = {
3528
+ .setkey = safexcel_aead_setkey,
3529
+ .encrypt = safexcel_aead_encrypt,
3530
+ .decrypt = safexcel_aead_decrypt,
3531
+ .ivsize = CTR_RFC3686_IV_SIZE,
3532
+ .maxauthsize = SM3_DIGEST_SIZE,
3533
+ .base = {
3534
+ .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3535
+ .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3536
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
3537
+ .cra_flags = CRYPTO_ALG_ASYNC |
3538
+ CRYPTO_ALG_ALLOCATES_MEMORY |
3539
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
3540
+ .cra_blocksize = 1,
3541
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3542
+ .cra_alignmask = 0,
3543
+ .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3544
+ .cra_exit = safexcel_aead_cra_exit,
3545
+ .cra_module = THIS_MODULE,
3546
+ },
3547
+ },
3548
+};
3549
+
3550
+static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3551
+ unsigned int len)
3552
+{
3553
+ struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3554
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3555
+
3556
+ /* last 4 bytes of key are the nonce! */
3557
+ ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3558
+
3559
+ len -= CTR_RFC3686_NONCE_SIZE;
3560
+ return safexcel_aead_gcm_setkey(ctfm, key, len);
3561
+}
3562
+
3563
+static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3564
+ unsigned int authsize)
3565
+{
3566
+ return crypto_rfc4106_check_authsize(authsize);
3567
+}
3568
+
3569
+static int safexcel_rfc4106_encrypt(struct aead_request *req)
3570
+{
3571
+ return crypto_ipsec_check_assoclen(req->assoclen) ?:
3572
+ safexcel_aead_encrypt(req);
3573
+}
3574
+
3575
+static int safexcel_rfc4106_decrypt(struct aead_request *req)
3576
+{
3577
+ return crypto_ipsec_check_assoclen(req->assoclen) ?:
3578
+ safexcel_aead_decrypt(req);
3579
+}
3580
+
3581
+static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3582
+{
3583
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3584
+ int ret;
3585
+
3586
+ ret = safexcel_aead_gcm_cra_init(tfm);
3587
+ ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3588
+ ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3589
+ return ret;
3590
+}
3591
+
3592
+struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3593
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
3594
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3595
+ .alg.aead = {
3596
+ .setkey = safexcel_rfc4106_gcm_setkey,
3597
+ .setauthsize = safexcel_rfc4106_gcm_setauthsize,
3598
+ .encrypt = safexcel_rfc4106_encrypt,
3599
+ .decrypt = safexcel_rfc4106_decrypt,
3600
+ .ivsize = GCM_RFC4106_IV_SIZE,
3601
+ .maxauthsize = GHASH_DIGEST_SIZE,
3602
+ .base = {
3603
+ .cra_name = "rfc4106(gcm(aes))",
3604
+ .cra_driver_name = "safexcel-rfc4106-gcm-aes",
3605
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
3606
+ .cra_flags = CRYPTO_ALG_ASYNC |
3607
+ CRYPTO_ALG_ALLOCATES_MEMORY |
3608
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
3609
+ .cra_blocksize = 1,
3610
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3611
+ .cra_alignmask = 0,
3612
+ .cra_init = safexcel_rfc4106_gcm_cra_init,
3613
+ .cra_exit = safexcel_aead_gcm_cra_exit,
3614
+ },
3615
+ },
3616
+};
3617
+
3618
+static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3619
+ unsigned int authsize)
3620
+{
3621
+ if (authsize != GHASH_DIGEST_SIZE)
3622
+ return -EINVAL;
3623
+
3624
+ return 0;
3625
+}
3626
+
3627
+static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3628
+{
3629
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3630
+ int ret;
3631
+
3632
+ ret = safexcel_aead_gcm_cra_init(tfm);
3633
+ ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3634
+ return ret;
3635
+}
3636
+
3637
+struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3638
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
3639
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3640
+ .alg.aead = {
3641
+ .setkey = safexcel_rfc4106_gcm_setkey,
3642
+ .setauthsize = safexcel_rfc4543_gcm_setauthsize,
3643
+ .encrypt = safexcel_rfc4106_encrypt,
3644
+ .decrypt = safexcel_rfc4106_decrypt,
3645
+ .ivsize = GCM_RFC4543_IV_SIZE,
3646
+ .maxauthsize = GHASH_DIGEST_SIZE,
3647
+ .base = {
3648
+ .cra_name = "rfc4543(gcm(aes))",
3649
+ .cra_driver_name = "safexcel-rfc4543-gcm-aes",
3650
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
3651
+ .cra_flags = CRYPTO_ALG_ASYNC |
3652
+ CRYPTO_ALG_ALLOCATES_MEMORY |
3653
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
3654
+ .cra_blocksize = 1,
3655
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3656
+ .cra_alignmask = 0,
3657
+ .cra_init = safexcel_rfc4543_gcm_cra_init,
3658
+ .cra_exit = safexcel_aead_gcm_cra_exit,
3659
+ },
3660
+ },
3661
+};
3662
+
3663
+static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3664
+ unsigned int len)
3665
+{
3666
+ struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3667
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3668
+
3669
+ /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3670
+ *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3671
+ /* last 3 bytes of key are the nonce! */
3672
+ memcpy((u8 *)&ctx->nonce + 1, key + len -
3673
+ EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3674
+ EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3675
+
3676
+ len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3677
+ return safexcel_aead_ccm_setkey(ctfm, key, len);
3678
+}
3679
+
3680
+static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3681
+ unsigned int authsize)
3682
+{
3683
+ /* Borrowed from crypto/ccm.c */
3684
+ switch (authsize) {
3685
+ case 8:
3686
+ case 12:
3687
+ case 16:
3688
+ break;
3689
+ default:
3690
+ return -EINVAL;
3691
+ }
3692
+
3693
+ return 0;
3694
+}
3695
+
3696
+static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3697
+{
3698
+ struct safexcel_cipher_req *creq = aead_request_ctx(req);
3699
+
3700
+ /* Borrowed from crypto/ccm.c */
3701
+ if (req->assoclen != 16 && req->assoclen != 20)
3702
+ return -EINVAL;
3703
+
3704
+ return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3705
+}
3706
+
3707
+static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3708
+{
3709
+ struct safexcel_cipher_req *creq = aead_request_ctx(req);
3710
+
3711
+ /* Borrowed from crypto/ccm.c */
3712
+ if (req->assoclen != 16 && req->assoclen != 20)
3713
+ return -EINVAL;
3714
+
3715
+ return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3716
+}
3717
+
3718
+static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3719
+{
3720
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3721
+ int ret;
3722
+
3723
+ ret = safexcel_aead_ccm_cra_init(tfm);
3724
+ ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3725
+ ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3726
+ return ret;
3727
+}
3728
+
3729
+struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3730
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
3731
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3732
+ .alg.aead = {
3733
+ .setkey = safexcel_rfc4309_ccm_setkey,
3734
+ .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3735
+ .encrypt = safexcel_rfc4309_ccm_encrypt,
3736
+ .decrypt = safexcel_rfc4309_ccm_decrypt,
3737
+ .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3738
+ .maxauthsize = AES_BLOCK_SIZE,
3739
+ .base = {
3740
+ .cra_name = "rfc4309(ccm(aes))",
3741
+ .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3742
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
3743
+ .cra_flags = CRYPTO_ALG_ASYNC |
3744
+ CRYPTO_ALG_ALLOCATES_MEMORY |
3745
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
3746
+ .cra_blocksize = 1,
3747
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3748
+ .cra_alignmask = 0,
3749
+ .cra_init = safexcel_rfc4309_ccm_cra_init,
3750
+ .cra_exit = safexcel_aead_cra_exit,
3751
+ .cra_module = THIS_MODULE,
3752
+ },
3753
+ },
3754
+};