hc
2024-05-10 748e4f3d702def1a4bff191e0cf93b6a05340f01
kernel/arch/arm64/crypto/aes-glue.c
....@@ -1,36 +1,38 @@
1
+// SPDX-License-Identifier: GPL-2.0-only
12 /*
23 * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
34 *
45 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
5
- *
6
- * This program is free software; you can redistribute it and/or modify
7
- * it under the terms of the GNU General Public License version 2 as
8
- * published by the Free Software Foundation.
96 */
107
118 #include <asm/neon.h>
129 #include <asm/hwcap.h>
1310 #include <asm/simd.h>
1411 #include <crypto/aes.h>
12
+#include <crypto/ctr.h>
13
+#include <crypto/sha.h>
1514 #include <crypto/internal/hash.h>
1615 #include <crypto/internal/simd.h>
1716 #include <crypto/internal/skcipher.h>
17
+#include <crypto/scatterwalk.h>
1818 #include <linux/module.h>
1919 #include <linux/cpufeature.h>
2020 #include <crypto/xts.h>
2121
2222 #include "aes-ce-setkey.h"
23
-#include "aes-ctr-fallback.h"
2423
2524 #ifdef USE_V8_CRYPTO_EXTENSIONS
2625 #define MODE "ce"
2726 #define PRIO 300
28
-#define aes_setkey ce_aes_setkey
2927 #define aes_expandkey ce_aes_expandkey
3028 #define aes_ecb_encrypt ce_aes_ecb_encrypt
3129 #define aes_ecb_decrypt ce_aes_ecb_decrypt
3230 #define aes_cbc_encrypt ce_aes_cbc_encrypt
3331 #define aes_cbc_decrypt ce_aes_cbc_decrypt
32
+#define aes_cbc_cts_encrypt ce_aes_cbc_cts_encrypt
33
+#define aes_cbc_cts_decrypt ce_aes_cbc_cts_decrypt
34
+#define aes_essiv_cbc_encrypt ce_aes_essiv_cbc_encrypt
35
+#define aes_essiv_cbc_decrypt ce_aes_essiv_cbc_decrypt
3436 #define aes_ctr_encrypt ce_aes_ctr_encrypt
3537 #define aes_xts_encrypt ce_aes_xts_encrypt
3638 #define aes_xts_decrypt ce_aes_xts_decrypt
....@@ -39,57 +41,81 @@
3941 #else
4042 #define MODE "neon"
4143 #define PRIO 200
42
-#define aes_setkey crypto_aes_set_key
43
-#define aes_expandkey crypto_aes_expand_key
4444 #define aes_ecb_encrypt neon_aes_ecb_encrypt
4545 #define aes_ecb_decrypt neon_aes_ecb_decrypt
4646 #define aes_cbc_encrypt neon_aes_cbc_encrypt
4747 #define aes_cbc_decrypt neon_aes_cbc_decrypt
48
+#define aes_cbc_cts_encrypt neon_aes_cbc_cts_encrypt
49
+#define aes_cbc_cts_decrypt neon_aes_cbc_cts_decrypt
50
+#define aes_essiv_cbc_encrypt neon_aes_essiv_cbc_encrypt
51
+#define aes_essiv_cbc_decrypt neon_aes_essiv_cbc_decrypt
4852 #define aes_ctr_encrypt neon_aes_ctr_encrypt
4953 #define aes_xts_encrypt neon_aes_xts_encrypt
5054 #define aes_xts_decrypt neon_aes_xts_decrypt
5155 #define aes_mac_update neon_aes_mac_update
5256 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
57
+#endif
58
+#if defined(USE_V8_CRYPTO_EXTENSIONS) || !IS_ENABLED(CONFIG_CRYPTO_AES_ARM64_BS)
5359 MODULE_ALIAS_CRYPTO("ecb(aes)");
5460 MODULE_ALIAS_CRYPTO("cbc(aes)");
5561 MODULE_ALIAS_CRYPTO("ctr(aes)");
5662 MODULE_ALIAS_CRYPTO("xts(aes)");
63
+#endif
64
+MODULE_ALIAS_CRYPTO("cts(cbc(aes))");
65
+MODULE_ALIAS_CRYPTO("essiv(cbc(aes),sha256)");
5766 MODULE_ALIAS_CRYPTO("cmac(aes)");
5867 MODULE_ALIAS_CRYPTO("xcbc(aes)");
5968 MODULE_ALIAS_CRYPTO("cbcmac(aes)");
60
-#endif
6169
6270 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
6371 MODULE_LICENSE("GPL v2");
6472
6573 /* defined in aes-modes.S */
66
-asmlinkage void aes_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
74
+asmlinkage void aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],
6775 int rounds, int blocks);
68
-asmlinkage void aes_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
76
+asmlinkage void aes_ecb_decrypt(u8 out[], u8 const in[], u32 const rk[],
6977 int rounds, int blocks);
7078
71
-asmlinkage void aes_cbc_encrypt(u8 out[], u8 const in[], u8 const rk[],
79
+asmlinkage void aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[],
7280 int rounds, int blocks, u8 iv[]);
73
-asmlinkage void aes_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
81
+asmlinkage void aes_cbc_decrypt(u8 out[], u8 const in[], u32 const rk[],
7482 int rounds, int blocks, u8 iv[]);
7583
76
-asmlinkage void aes_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
84
+asmlinkage void aes_cbc_cts_encrypt(u8 out[], u8 const in[], u32 const rk[],
85
+ int rounds, int bytes, u8 const iv[]);
86
+asmlinkage void aes_cbc_cts_decrypt(u8 out[], u8 const in[], u32 const rk[],
87
+ int rounds, int bytes, u8 const iv[]);
88
+
89
+asmlinkage void aes_ctr_encrypt(u8 out[], u8 const in[], u32 const rk[],
7790 int rounds, int blocks, u8 ctr[]);
7891
79
-asmlinkage void aes_xts_encrypt(u8 out[], u8 const in[], u8 const rk1[],
80
- int rounds, int blocks, u8 const rk2[], u8 iv[],
92
+asmlinkage void aes_xts_encrypt(u8 out[], u8 const in[], u32 const rk1[],
93
+ int rounds, int bytes, u32 const rk2[], u8 iv[],
8194 int first);
82
-asmlinkage void aes_xts_decrypt(u8 out[], u8 const in[], u8 const rk1[],
83
- int rounds, int blocks, u8 const rk2[], u8 iv[],
95
+asmlinkage void aes_xts_decrypt(u8 out[], u8 const in[], u32 const rk1[],
96
+ int rounds, int bytes, u32 const rk2[], u8 iv[],
8497 int first);
8598
86
-asmlinkage void aes_mac_update(u8 const in[], u32 const rk[], int rounds,
87
- int blocks, u8 dg[], int enc_before,
88
- int enc_after);
99
+asmlinkage void aes_essiv_cbc_encrypt(u8 out[], u8 const in[], u32 const rk1[],
100
+ int rounds, int blocks, u8 iv[],
101
+ u32 const rk2[]);
102
+asmlinkage void aes_essiv_cbc_decrypt(u8 out[], u8 const in[], u32 const rk1[],
103
+ int rounds, int blocks, u8 iv[],
104
+ u32 const rk2[]);
105
+
106
+asmlinkage int aes_mac_update(u8 const in[], u32 const rk[], int rounds,
107
+ int blocks, u8 dg[], int enc_before,
108
+ int enc_after);
89109
90110 struct crypto_aes_xts_ctx {
91111 struct crypto_aes_ctx key1;
92112 struct crypto_aes_ctx __aligned(8) key2;
113
+};
114
+
115
+struct crypto_aes_essiv_cbc_ctx {
116
+ struct crypto_aes_ctx key1;
117
+ struct crypto_aes_ctx __aligned(8) key2;
118
+ struct crypto_shash *hash;
93119 };
94120
95121 struct mac_tfm_ctx {
....@@ -105,11 +131,13 @@
105131 static int skcipher_aes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
106132 unsigned int key_len)
107133 {
108
- return aes_setkey(crypto_skcipher_tfm(tfm), in_key, key_len);
134
+ struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
135
+
136
+ return aes_expandkey(ctx, in_key, key_len);
109137 }
110138
111
-static int xts_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
112
- unsigned int key_len)
139
+static int __maybe_unused xts_set_key(struct crypto_skcipher *tfm,
140
+ const u8 *in_key, unsigned int key_len)
113141 {
114142 struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
115143 int ret;
....@@ -122,14 +150,27 @@
122150 if (!ret)
123151 ret = aes_expandkey(&ctx->key2, &in_key[key_len / 2],
124152 key_len / 2);
125
- if (!ret)
126
- return 0;
127
-
128
- crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
129
- return -EINVAL;
153
+ return ret;
130154 }
131155
132
-static int ecb_encrypt(struct skcipher_request *req)
156
+static int __maybe_unused essiv_cbc_set_key(struct crypto_skcipher *tfm,
157
+ const u8 *in_key,
158
+ unsigned int key_len)
159
+{
160
+ struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
161
+ u8 digest[SHA256_DIGEST_SIZE];
162
+ int ret;
163
+
164
+ ret = aes_expandkey(&ctx->key1, in_key, key_len);
165
+ if (ret)
166
+ return ret;
167
+
168
+ crypto_shash_tfm_digest(ctx->hash, in_key, key_len, digest);
169
+
170
+ return aes_expandkey(&ctx->key2, digest, sizeof(digest));
171
+}
172
+
173
+static int __maybe_unused ecb_encrypt(struct skcipher_request *req)
133174 {
134175 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
135176 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
....@@ -142,14 +183,14 @@
142183 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
143184 kernel_neon_begin();
144185 aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
145
- (u8 *)ctx->key_enc, rounds, blocks);
186
+ ctx->key_enc, rounds, blocks);
146187 kernel_neon_end();
147188 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
148189 }
149190 return err;
150191 }
151192
152
-static int ecb_decrypt(struct skcipher_request *req)
193
+static int __maybe_unused ecb_decrypt(struct skcipher_request *req)
153194 {
154195 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
155196 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
....@@ -162,54 +203,246 @@
162203 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
163204 kernel_neon_begin();
164205 aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
165
- (u8 *)ctx->key_dec, rounds, blocks);
206
+ ctx->key_dec, rounds, blocks);
166207 kernel_neon_end();
167208 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
168209 }
169210 return err;
170211 }
171212
172
-static int cbc_encrypt(struct skcipher_request *req)
213
+static int cbc_encrypt_walk(struct skcipher_request *req,
214
+ struct skcipher_walk *walk)
215
+{
216
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
217
+ struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
218
+ int err = 0, rounds = 6 + ctx->key_length / 4;
219
+ unsigned int blocks;
220
+
221
+ while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
222
+ kernel_neon_begin();
223
+ aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr,
224
+ ctx->key_enc, rounds, blocks, walk->iv);
225
+ kernel_neon_end();
226
+ err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
227
+ }
228
+ return err;
229
+}
230
+
231
+static int __maybe_unused cbc_encrypt(struct skcipher_request *req)
232
+{
233
+ struct skcipher_walk walk;
234
+ int err;
235
+
236
+ err = skcipher_walk_virt(&walk, req, false);
237
+ if (err)
238
+ return err;
239
+ return cbc_encrypt_walk(req, &walk);
240
+}
241
+
242
+static int cbc_decrypt_walk(struct skcipher_request *req,
243
+ struct skcipher_walk *walk)
244
+{
245
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
246
+ struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
247
+ int err = 0, rounds = 6 + ctx->key_length / 4;
248
+ unsigned int blocks;
249
+
250
+ while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
251
+ kernel_neon_begin();
252
+ aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr,
253
+ ctx->key_dec, rounds, blocks, walk->iv);
254
+ kernel_neon_end();
255
+ err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
256
+ }
257
+ return err;
258
+}
259
+
260
+static int __maybe_unused cbc_decrypt(struct skcipher_request *req)
261
+{
262
+ struct skcipher_walk walk;
263
+ int err;
264
+
265
+ err = skcipher_walk_virt(&walk, req, false);
266
+ if (err)
267
+ return err;
268
+ return cbc_decrypt_walk(req, &walk);
269
+}
270
+
271
+static int cts_cbc_encrypt(struct skcipher_request *req)
173272 {
174273 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
175274 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
176275 int err, rounds = 6 + ctx->key_length / 4;
276
+ int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
277
+ struct scatterlist *src = req->src, *dst = req->dst;
278
+ struct scatterlist sg_src[2], sg_dst[2];
279
+ struct skcipher_request subreq;
280
+ struct skcipher_walk walk;
281
+
282
+ skcipher_request_set_tfm(&subreq, tfm);
283
+ skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
284
+ NULL, NULL);
285
+
286
+ if (req->cryptlen <= AES_BLOCK_SIZE) {
287
+ if (req->cryptlen < AES_BLOCK_SIZE)
288
+ return -EINVAL;
289
+ cbc_blocks = 1;
290
+ }
291
+
292
+ if (cbc_blocks > 0) {
293
+ skcipher_request_set_crypt(&subreq, req->src, req->dst,
294
+ cbc_blocks * AES_BLOCK_SIZE,
295
+ req->iv);
296
+
297
+ err = skcipher_walk_virt(&walk, &subreq, false) ?:
298
+ cbc_encrypt_walk(&subreq, &walk);
299
+ if (err)
300
+ return err;
301
+
302
+ if (req->cryptlen == AES_BLOCK_SIZE)
303
+ return 0;
304
+
305
+ dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
306
+ if (req->dst != req->src)
307
+ dst = scatterwalk_ffwd(sg_dst, req->dst,
308
+ subreq.cryptlen);
309
+ }
310
+
311
+ /* handle ciphertext stealing */
312
+ skcipher_request_set_crypt(&subreq, src, dst,
313
+ req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
314
+ req->iv);
315
+
316
+ err = skcipher_walk_virt(&walk, &subreq, false);
317
+ if (err)
318
+ return err;
319
+
320
+ kernel_neon_begin();
321
+ aes_cbc_cts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
322
+ ctx->key_enc, rounds, walk.nbytes, walk.iv);
323
+ kernel_neon_end();
324
+
325
+ return skcipher_walk_done(&walk, 0);
326
+}
327
+
328
+static int cts_cbc_decrypt(struct skcipher_request *req)
329
+{
330
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
331
+ struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
332
+ int err, rounds = 6 + ctx->key_length / 4;
333
+ int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
334
+ struct scatterlist *src = req->src, *dst = req->dst;
335
+ struct scatterlist sg_src[2], sg_dst[2];
336
+ struct skcipher_request subreq;
337
+ struct skcipher_walk walk;
338
+
339
+ skcipher_request_set_tfm(&subreq, tfm);
340
+ skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
341
+ NULL, NULL);
342
+
343
+ if (req->cryptlen <= AES_BLOCK_SIZE) {
344
+ if (req->cryptlen < AES_BLOCK_SIZE)
345
+ return -EINVAL;
346
+ cbc_blocks = 1;
347
+ }
348
+
349
+ if (cbc_blocks > 0) {
350
+ skcipher_request_set_crypt(&subreq, req->src, req->dst,
351
+ cbc_blocks * AES_BLOCK_SIZE,
352
+ req->iv);
353
+
354
+ err = skcipher_walk_virt(&walk, &subreq, false) ?:
355
+ cbc_decrypt_walk(&subreq, &walk);
356
+ if (err)
357
+ return err;
358
+
359
+ if (req->cryptlen == AES_BLOCK_SIZE)
360
+ return 0;
361
+
362
+ dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
363
+ if (req->dst != req->src)
364
+ dst = scatterwalk_ffwd(sg_dst, req->dst,
365
+ subreq.cryptlen);
366
+ }
367
+
368
+ /* handle ciphertext stealing */
369
+ skcipher_request_set_crypt(&subreq, src, dst,
370
+ req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
371
+ req->iv);
372
+
373
+ err = skcipher_walk_virt(&walk, &subreq, false);
374
+ if (err)
375
+ return err;
376
+
377
+ kernel_neon_begin();
378
+ aes_cbc_cts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
379
+ ctx->key_dec, rounds, walk.nbytes, walk.iv);
380
+ kernel_neon_end();
381
+
382
+ return skcipher_walk_done(&walk, 0);
383
+}
384
+
385
+static int __maybe_unused essiv_cbc_init_tfm(struct crypto_skcipher *tfm)
386
+{
387
+ struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
388
+
389
+ ctx->hash = crypto_alloc_shash("sha256", 0, 0);
390
+
391
+ return PTR_ERR_OR_ZERO(ctx->hash);
392
+}
393
+
394
+static void __maybe_unused essiv_cbc_exit_tfm(struct crypto_skcipher *tfm)
395
+{
396
+ struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
397
+
398
+ crypto_free_shash(ctx->hash);
399
+}
400
+
401
+static int __maybe_unused essiv_cbc_encrypt(struct skcipher_request *req)
402
+{
403
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
404
+ struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
405
+ int err, rounds = 6 + ctx->key1.key_length / 4;
177406 struct skcipher_walk walk;
178407 unsigned int blocks;
179408
180409 err = skcipher_walk_virt(&walk, req, false);
181410
182
- while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
411
+ blocks = walk.nbytes / AES_BLOCK_SIZE;
412
+ if (blocks) {
183413 kernel_neon_begin();
184
- aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
185
- (u8 *)ctx->key_enc, rounds, blocks, walk.iv);
414
+ aes_essiv_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
415
+ ctx->key1.key_enc, rounds, blocks,
416
+ req->iv, ctx->key2.key_enc);
186417 kernel_neon_end();
187418 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
188419 }
189
- return err;
420
+ return err ?: cbc_encrypt_walk(req, &walk);
190421 }
191422
192
-static int cbc_decrypt(struct skcipher_request *req)
423
+static int __maybe_unused essiv_cbc_decrypt(struct skcipher_request *req)
193424 {
194425 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
195
- struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
196
- int err, rounds = 6 + ctx->key_length / 4;
426
+ struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
427
+ int err, rounds = 6 + ctx->key1.key_length / 4;
197428 struct skcipher_walk walk;
198429 unsigned int blocks;
199430
200431 err = skcipher_walk_virt(&walk, req, false);
201432
202
- while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
433
+ blocks = walk.nbytes / AES_BLOCK_SIZE;
434
+ if (blocks) {
203435 kernel_neon_begin();
204
- aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
205
- (u8 *)ctx->key_dec, rounds, blocks, walk.iv);
436
+ aes_essiv_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
437
+ ctx->key1.key_dec, rounds, blocks,
438
+ req->iv, ctx->key2.key_enc);
206439 kernel_neon_end();
207440 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
208441 }
209
- return err;
442
+ return err ?: cbc_decrypt_walk(req, &walk);
210443 }
211444
212
-static int ctr_encrypt(struct skcipher_request *req)
445
+static int __maybe_unused ctr_encrypt(struct skcipher_request *req)
213446 {
214447 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
215448 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
....@@ -222,7 +455,7 @@
222455 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
223456 kernel_neon_begin();
224457 aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
225
- (u8 *)ctx->key_enc, rounds, blocks, walk.iv);
458
+ ctx->key_enc, rounds, blocks, walk.iv);
226459 kernel_neon_end();
227460 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
228461 }
....@@ -238,7 +471,7 @@
238471 blocks = -1;
239472
240473 kernel_neon_begin();
241
- aes_ctr_encrypt(tail, NULL, (u8 *)ctx->key_enc, rounds,
474
+ aes_ctr_encrypt(tail, NULL, ctx->key_enc, rounds,
242475 blocks, walk.iv);
243476 kernel_neon_end();
244477 crypto_xor_cpy(tdst, tsrc, tail, nbytes);
....@@ -248,67 +481,157 @@
248481 return err;
249482 }
250483
251
-static int ctr_encrypt_sync(struct skcipher_request *req)
252
-{
253
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
254
- struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
255
-
256
- if (!may_use_simd())
257
- return aes_ctr_encrypt_fallback(ctx, req);
258
-
259
- return ctr_encrypt(req);
260
-}
261
-
262
-static int xts_encrypt(struct skcipher_request *req)
484
+static int __maybe_unused xts_encrypt(struct skcipher_request *req)
263485 {
264486 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
265487 struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
266488 int err, first, rounds = 6 + ctx->key1.key_length / 4;
489
+ int tail = req->cryptlen % AES_BLOCK_SIZE;
490
+ struct scatterlist sg_src[2], sg_dst[2];
491
+ struct skcipher_request subreq;
492
+ struct scatterlist *src, *dst;
267493 struct skcipher_walk walk;
268
- unsigned int blocks;
494
+
495
+ if (req->cryptlen < AES_BLOCK_SIZE)
496
+ return -EINVAL;
269497
270498 err = skcipher_walk_virt(&walk, req, false);
271499
272
- for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
500
+ if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
501
+ int xts_blocks = DIV_ROUND_UP(req->cryptlen,
502
+ AES_BLOCK_SIZE) - 2;
503
+
504
+ skcipher_walk_abort(&walk);
505
+
506
+ skcipher_request_set_tfm(&subreq, tfm);
507
+ skcipher_request_set_callback(&subreq,
508
+ skcipher_request_flags(req),
509
+ NULL, NULL);
510
+ skcipher_request_set_crypt(&subreq, req->src, req->dst,
511
+ xts_blocks * AES_BLOCK_SIZE,
512
+ req->iv);
513
+ req = &subreq;
514
+ err = skcipher_walk_virt(&walk, req, false);
515
+ } else {
516
+ tail = 0;
517
+ }
518
+
519
+ for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
520
+ int nbytes = walk.nbytes;
521
+
522
+ if (walk.nbytes < walk.total)
523
+ nbytes &= ~(AES_BLOCK_SIZE - 1);
524
+
273525 kernel_neon_begin();
274526 aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
275
- (u8 *)ctx->key1.key_enc, rounds, blocks,
276
- (u8 *)ctx->key2.key_enc, walk.iv, first);
527
+ ctx->key1.key_enc, rounds, nbytes,
528
+ ctx->key2.key_enc, walk.iv, first);
277529 kernel_neon_end();
278
- err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
530
+ err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
279531 }
280532
281
- return err;
533
+ if (err || likely(!tail))
534
+ return err;
535
+
536
+ dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
537
+ if (req->dst != req->src)
538
+ dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
539
+
540
+ skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
541
+ req->iv);
542
+
543
+ err = skcipher_walk_virt(&walk, &subreq, false);
544
+ if (err)
545
+ return err;
546
+
547
+ kernel_neon_begin();
548
+ aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
549
+ ctx->key1.key_enc, rounds, walk.nbytes,
550
+ ctx->key2.key_enc, walk.iv, first);
551
+ kernel_neon_end();
552
+
553
+ return skcipher_walk_done(&walk, 0);
282554 }
283555
284
-static int xts_decrypt(struct skcipher_request *req)
556
+static int __maybe_unused xts_decrypt(struct skcipher_request *req)
285557 {
286558 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
287559 struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
288560 int err, first, rounds = 6 + ctx->key1.key_length / 4;
561
+ int tail = req->cryptlen % AES_BLOCK_SIZE;
562
+ struct scatterlist sg_src[2], sg_dst[2];
563
+ struct skcipher_request subreq;
564
+ struct scatterlist *src, *dst;
289565 struct skcipher_walk walk;
290
- unsigned int blocks;
566
+
567
+ if (req->cryptlen < AES_BLOCK_SIZE)
568
+ return -EINVAL;
291569
292570 err = skcipher_walk_virt(&walk, req, false);
293571
294
- for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
295
- kernel_neon_begin();
296
- aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
297
- (u8 *)ctx->key1.key_dec, rounds, blocks,
298
- (u8 *)ctx->key2.key_enc, walk.iv, first);
299
- kernel_neon_end();
300
- err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
572
+ if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
573
+ int xts_blocks = DIV_ROUND_UP(req->cryptlen,
574
+ AES_BLOCK_SIZE) - 2;
575
+
576
+ skcipher_walk_abort(&walk);
577
+
578
+ skcipher_request_set_tfm(&subreq, tfm);
579
+ skcipher_request_set_callback(&subreq,
580
+ skcipher_request_flags(req),
581
+ NULL, NULL);
582
+ skcipher_request_set_crypt(&subreq, req->src, req->dst,
583
+ xts_blocks * AES_BLOCK_SIZE,
584
+ req->iv);
585
+ req = &subreq;
586
+ err = skcipher_walk_virt(&walk, req, false);
587
+ } else {
588
+ tail = 0;
301589 }
302590
303
- return err;
591
+ for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
592
+ int nbytes = walk.nbytes;
593
+
594
+ if (walk.nbytes < walk.total)
595
+ nbytes &= ~(AES_BLOCK_SIZE - 1);
596
+
597
+ kernel_neon_begin();
598
+ aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
599
+ ctx->key1.key_dec, rounds, nbytes,
600
+ ctx->key2.key_enc, walk.iv, first);
601
+ kernel_neon_end();
602
+ err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
603
+ }
604
+
605
+ if (err || likely(!tail))
606
+ return err;
607
+
608
+ dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
609
+ if (req->dst != req->src)
610
+ dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
611
+
612
+ skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
613
+ req->iv);
614
+
615
+ err = skcipher_walk_virt(&walk, &subreq, false);
616
+ if (err)
617
+ return err;
618
+
619
+
620
+ kernel_neon_begin();
621
+ aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
622
+ ctx->key1.key_dec, rounds, walk.nbytes,
623
+ ctx->key2.key_enc, walk.iv, first);
624
+ kernel_neon_end();
625
+
626
+ return skcipher_walk_done(&walk, 0);
304627 }
305628
306629 static struct skcipher_alg aes_algs[] = { {
630
+#if defined(USE_V8_CRYPTO_EXTENSIONS) || !IS_ENABLED(CONFIG_CRYPTO_AES_ARM64_BS)
307631 .base = {
308
- .cra_name = "__ecb(aes)",
309
- .cra_driver_name = "__ecb-aes-" MODE,
632
+ .cra_name = "ecb(aes)",
633
+ .cra_driver_name = "ecb-aes-" MODE,
310634 .cra_priority = PRIO,
311
- .cra_flags = CRYPTO_ALG_INTERNAL,
312635 .cra_blocksize = AES_BLOCK_SIZE,
313636 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
314637 .cra_module = THIS_MODULE,
....@@ -320,10 +643,9 @@
320643 .decrypt = ecb_decrypt,
321644 }, {
322645 .base = {
323
- .cra_name = "__cbc(aes)",
324
- .cra_driver_name = "__cbc-aes-" MODE,
646
+ .cra_name = "cbc(aes)",
647
+ .cra_driver_name = "cbc-aes-" MODE,
325648 .cra_priority = PRIO,
326
- .cra_flags = CRYPTO_ALG_INTERNAL,
327649 .cra_blocksize = AES_BLOCK_SIZE,
328650 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
329651 .cra_module = THIS_MODULE,
....@@ -336,10 +658,9 @@
336658 .decrypt = cbc_decrypt,
337659 }, {
338660 .base = {
339
- .cra_name = "__ctr(aes)",
340
- .cra_driver_name = "__ctr-aes-" MODE,
661
+ .cra_name = "ctr(aes)",
662
+ .cra_driver_name = "ctr-aes-" MODE,
341663 .cra_priority = PRIO,
342
- .cra_flags = CRYPTO_ALG_INTERNAL,
343664 .cra_blocksize = 1,
344665 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
345666 .cra_module = THIS_MODULE,
....@@ -353,26 +674,9 @@
353674 .decrypt = ctr_encrypt,
354675 }, {
355676 .base = {
356
- .cra_name = "ctr(aes)",
357
- .cra_driver_name = "ctr-aes-" MODE,
358
- .cra_priority = PRIO - 1,
359
- .cra_blocksize = 1,
360
- .cra_ctxsize = sizeof(struct crypto_aes_ctx),
361
- .cra_module = THIS_MODULE,
362
- },
363
- .min_keysize = AES_MIN_KEY_SIZE,
364
- .max_keysize = AES_MAX_KEY_SIZE,
365
- .ivsize = AES_BLOCK_SIZE,
366
- .chunksize = AES_BLOCK_SIZE,
367
- .setkey = skcipher_aes_setkey,
368
- .encrypt = ctr_encrypt_sync,
369
- .decrypt = ctr_encrypt_sync,
370
-}, {
371
- .base = {
372
- .cra_name = "__xts(aes)",
373
- .cra_driver_name = "__xts-aes-" MODE,
677
+ .cra_name = "xts(aes)",
678
+ .cra_driver_name = "xts-aes-" MODE,
374679 .cra_priority = PRIO,
375
- .cra_flags = CRYPTO_ALG_INTERNAL,
376680 .cra_blocksize = AES_BLOCK_SIZE,
377681 .cra_ctxsize = sizeof(struct crypto_aes_xts_ctx),
378682 .cra_module = THIS_MODULE,
....@@ -380,22 +684,52 @@
380684 .min_keysize = 2 * AES_MIN_KEY_SIZE,
381685 .max_keysize = 2 * AES_MAX_KEY_SIZE,
382686 .ivsize = AES_BLOCK_SIZE,
687
+ .walksize = 2 * AES_BLOCK_SIZE,
383688 .setkey = xts_set_key,
384689 .encrypt = xts_encrypt,
385690 .decrypt = xts_decrypt,
691
+}, {
692
+#endif
693
+ .base = {
694
+ .cra_name = "cts(cbc(aes))",
695
+ .cra_driver_name = "cts-cbc-aes-" MODE,
696
+ .cra_priority = PRIO,
697
+ .cra_blocksize = AES_BLOCK_SIZE,
698
+ .cra_ctxsize = sizeof(struct crypto_aes_ctx),
699
+ .cra_module = THIS_MODULE,
700
+ },
701
+ .min_keysize = AES_MIN_KEY_SIZE,
702
+ .max_keysize = AES_MAX_KEY_SIZE,
703
+ .ivsize = AES_BLOCK_SIZE,
704
+ .walksize = 2 * AES_BLOCK_SIZE,
705
+ .setkey = skcipher_aes_setkey,
706
+ .encrypt = cts_cbc_encrypt,
707
+ .decrypt = cts_cbc_decrypt,
708
+}, {
709
+ .base = {
710
+ .cra_name = "essiv(cbc(aes),sha256)",
711
+ .cra_driver_name = "essiv-cbc-aes-sha256-" MODE,
712
+ .cra_priority = PRIO + 1,
713
+ .cra_blocksize = AES_BLOCK_SIZE,
714
+ .cra_ctxsize = sizeof(struct crypto_aes_essiv_cbc_ctx),
715
+ .cra_module = THIS_MODULE,
716
+ },
717
+ .min_keysize = AES_MIN_KEY_SIZE,
718
+ .max_keysize = AES_MAX_KEY_SIZE,
719
+ .ivsize = AES_BLOCK_SIZE,
720
+ .setkey = essiv_cbc_set_key,
721
+ .encrypt = essiv_cbc_encrypt,
722
+ .decrypt = essiv_cbc_decrypt,
723
+ .init = essiv_cbc_init_tfm,
724
+ .exit = essiv_cbc_exit_tfm,
386725 } };
387726
388727 static int cbcmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
389728 unsigned int key_len)
390729 {
391730 struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
392
- int err;
393731
394
- err = aes_expandkey(&ctx->key, in_key, key_len);
395
- if (err)
396
- crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
397
-
398
- return err;
732
+ return aes_expandkey(&ctx->key, in_key, key_len);
399733 }
400734
401735 static void cmac_gf128_mul_by_x(be128 *y, const be128 *x)
....@@ -412,7 +746,6 @@
412746 {
413747 struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
414748 be128 *consts = (be128 *)ctx->consts;
415
- u8 *rk = (u8 *)ctx->key.key_enc;
416749 int rounds = 6 + key_len / 4;
417750 int err;
418751
....@@ -422,7 +755,8 @@
422755
423756 /* encrypt the zero vector */
424757 kernel_neon_begin();
425
- aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, rk, rounds, 1);
758
+ aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, ctx->key.key_enc,
759
+ rounds, 1);
426760 kernel_neon_end();
427761
428762 cmac_gf128_mul_by_x(consts, consts);
....@@ -441,7 +775,6 @@
441775 };
442776
443777 struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
444
- u8 *rk = (u8 *)ctx->key.key_enc;
445778 int rounds = 6 + key_len / 4;
446779 u8 key[AES_BLOCK_SIZE];
447780 int err;
....@@ -451,8 +784,8 @@
451784 return err;
452785
453786 kernel_neon_begin();
454
- aes_ecb_encrypt(key, ks[0], rk, rounds, 1);
455
- aes_ecb_encrypt(ctx->consts, ks[1], rk, rounds, 2);
787
+ aes_ecb_encrypt(key, ks[0], ctx->key.key_enc, rounds, 1);
788
+ aes_ecb_encrypt(ctx->consts, ks[1], ctx->key.key_enc, rounds, 2);
456789 kernel_neon_end();
457790
458791 return cbcmac_setkey(tfm, key, sizeof(key));
....@@ -473,22 +806,28 @@
473806 {
474807 int rounds = 6 + ctx->key_length / 4;
475808
476
- if (may_use_simd()) {
477
- kernel_neon_begin();
478
- aes_mac_update(in, ctx->key_enc, rounds, blocks, dg, enc_before,
479
- enc_after);
480
- kernel_neon_end();
809
+ if (crypto_simd_usable()) {
810
+ int rem;
811
+
812
+ do {
813
+ kernel_neon_begin();
814
+ rem = aes_mac_update(in, ctx->key_enc, rounds, blocks,
815
+ dg, enc_before, enc_after);
816
+ kernel_neon_end();
817
+ in += (blocks - rem) * AES_BLOCK_SIZE;
818
+ blocks = rem;
819
+ enc_before = 0;
820
+ } while (blocks);
481821 } else {
482822 if (enc_before)
483
- __aes_arm64_encrypt(ctx->key_enc, dg, dg, rounds);
823
+ aes_encrypt(ctx, dg, dg);
484824
485825 while (blocks--) {
486826 crypto_xor(dg, in, AES_BLOCK_SIZE);
487827 in += AES_BLOCK_SIZE;
488828
489829 if (blocks || enc_after)
490
- __aes_arm64_encrypt(ctx->key_enc, dg, dg,
491
- rounds);
830
+ aes_encrypt(ctx, dg, dg);
492831 }
493832 }
494833 }
....@@ -538,7 +877,7 @@
538877 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
539878 struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
540879
541
- mac_do_update(&tctx->key, NULL, 0, ctx->dg, 1, 0);
880
+ mac_do_update(&tctx->key, NULL, 0, ctx->dg, (ctx->len != 0), 0);
542881
543882 memcpy(out, ctx->dg, AES_BLOCK_SIZE);
544883
....@@ -609,28 +948,15 @@
609948 .descsize = sizeof(struct mac_desc_ctx),
610949 } };
611950
612
-static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)];
613
-
614951 static void aes_exit(void)
615952 {
616
- int i;
617
-
618
- for (i = 0; i < ARRAY_SIZE(aes_simd_algs); i++)
619
- if (aes_simd_algs[i])
620
- simd_skcipher_free(aes_simd_algs[i]);
621
-
622953 crypto_unregister_shashes(mac_algs, ARRAY_SIZE(mac_algs));
623954 crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
624955 }
625956
626957 static int __init aes_init(void)
627958 {
628
- struct simd_skcipher_alg *simd;
629
- const char *basename;
630
- const char *algname;
631
- const char *drvname;
632959 int err;
633
- int i;
634960
635961 err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
636962 if (err)
....@@ -640,26 +966,8 @@
640966 if (err)
641967 goto unregister_ciphers;
642968
643
- for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
644
- if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL))
645
- continue;
646
-
647
- algname = aes_algs[i].base.cra_name + 2;
648
- drvname = aes_algs[i].base.cra_driver_name + 2;
649
- basename = aes_algs[i].base.cra_driver_name;
650
- simd = simd_skcipher_create_compat(algname, drvname, basename);
651
- err = PTR_ERR(simd);
652
- if (IS_ERR(simd))
653
- goto unregister_simds;
654
-
655
- aes_simd_algs[i] = simd;
656
- }
657
-
658969 return 0;
659970
660
-unregister_simds:
661
- aes_exit();
662
- return err;
663971 unregister_ciphers:
664972 crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
665973 return err;
....@@ -671,5 +979,7 @@
671979 module_init(aes_init);
672980 EXPORT_SYMBOL(neon_aes_ecb_encrypt);
673981 EXPORT_SYMBOL(neon_aes_cbc_encrypt);
982
+EXPORT_SYMBOL(neon_aes_xts_encrypt);
983
+EXPORT_SYMBOL(neon_aes_xts_decrypt);
674984 #endif
675985 module_exit(aes_exit);