hc
2023-12-11 d2ccde1c8e90d38cee87a1b0309ad2827f3fd30d
kernel/arch/x86/crypto/aesni-intel_glue.c
....@@ -1,3 +1,4 @@
1
+// SPDX-License-Identifier: GPL-2.0-or-later
12 /*
23 * Support for Intel AES-NI instructions. This file contains glue
34 * code, the real AES implementation is in intel-aes_asm.S.
....@@ -12,11 +13,6 @@
1213 * Tadeusz Struk (tadeusz.struk@intel.com)
1314 * Aidan O'Mahony (aidan.o.mahony@intel.com)
1415 * Copyright (c) 2010, Intel Corporation.
15
- *
16
- * This program is free software; you can redistribute it and/or modify
17
- * it under the terms of the GNU General Public License as published by
18
- * the Free Software Foundation; either version 2 of the License, or
19
- * (at your option) any later version.
2016 */
2117
2218 #include <linux/hardirq.h>
....@@ -25,14 +21,12 @@
2521 #include <linux/err.h>
2622 #include <crypto/algapi.h>
2723 #include <crypto/aes.h>
28
-#include <crypto/cryptd.h>
2924 #include <crypto/ctr.h>
3025 #include <crypto/b128ops.h>
3126 #include <crypto/gcm.h>
3227 #include <crypto/xts.h>
3328 #include <asm/cpu_device_id.h>
34
-#include <asm/fpu/api.h>
35
-#include <asm/crypto/aes.h>
29
+#include <asm/simd.h>
3630 #include <crypto/scatterwalk.h>
3731 #include <crypto/internal/aead.h>
3832 #include <crypto/internal/simd.h>
....@@ -84,15 +78,13 @@
8478 u8 current_counter[GCM_BLOCK_LEN];
8579 u64 partial_block_len;
8680 u64 unused;
87
- u8 hash_keys[GCM_BLOCK_LEN * 8];
81
+ u8 hash_keys[GCM_BLOCK_LEN * 16];
8882 };
8983
9084 asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
9185 unsigned int key_len);
92
-asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
93
- const u8 *in);
94
-asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
95
- const u8 *in);
86
+asmlinkage void aesni_enc(const void *ctx, u8 *out, const u8 *in);
87
+asmlinkage void aesni_dec(const void *ctx, u8 *out, const u8 *in);
9688 asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
9789 const u8 *in, unsigned int len);
9890 asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
....@@ -102,11 +94,14 @@
10294 asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
10395 const u8 *in, unsigned int len, u8 *iv);
10496
105
-int crypto_fpu_init(void);
106
-void crypto_fpu_exit(void);
107
-
10897 #define AVX_GEN2_OPTSIZE 640
10998 #define AVX_GEN4_OPTSIZE 4096
99
+
100
+asmlinkage void aesni_xts_encrypt(const struct crypto_aes_ctx *ctx, u8 *out,
101
+ const u8 *in, unsigned int len, u8 *iv);
102
+
103
+asmlinkage void aesni_xts_decrypt(const struct crypto_aes_ctx *ctx, u8 *out,
104
+ const u8 *in, unsigned int len, u8 *iv);
110105
111106 #ifdef CONFIG_X86_64
112107
....@@ -114,9 +109,6 @@
114109 const u8 *in, unsigned int len, u8 *iv);
115110 asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
116111 const u8 *in, unsigned int len, u8 *iv);
117
-
118
-asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out,
119
- const u8 *in, bool enc, u8 *iv);
120112
121113 /* asmlinkage void aesni_gcm_enc()
122114 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
....@@ -178,7 +170,24 @@
178170 struct gcm_context_data *gdata,
179171 u8 *auth_tag, unsigned long auth_tag_len);
180172
181
-#ifdef CONFIG_AS_AVX
173
+static const struct aesni_gcm_tfm_s {
174
+ void (*init)(void *ctx, struct gcm_context_data *gdata, u8 *iv,
175
+ u8 *hash_subkey, const u8 *aad, unsigned long aad_len);
176
+ void (*enc_update)(void *ctx, struct gcm_context_data *gdata, u8 *out,
177
+ const u8 *in, unsigned long plaintext_len);
178
+ void (*dec_update)(void *ctx, struct gcm_context_data *gdata, u8 *out,
179
+ const u8 *in, unsigned long ciphertext_len);
180
+ void (*finalize)(void *ctx, struct gcm_context_data *gdata,
181
+ u8 *auth_tag, unsigned long auth_tag_len);
182
+} *aesni_gcm_tfm;
183
+
184
+static const struct aesni_gcm_tfm_s aesni_gcm_tfm_sse = {
185
+ .init = &aesni_gcm_init,
186
+ .enc_update = &aesni_gcm_enc_update,
187
+ .dec_update = &aesni_gcm_dec_update,
188
+ .finalize = &aesni_gcm_finalize,
189
+};
190
+
182191 asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
183192 void *keys, u8 *out, unsigned int num_bytes);
184193 asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
....@@ -186,135 +195,88 @@
186195 asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
187196 void *keys, u8 *out, unsigned int num_bytes);
188197 /*
189
- * asmlinkage void aesni_gcm_precomp_avx_gen2()
198
+ * asmlinkage void aesni_gcm_init_avx_gen2()
190199 * gcm_data *my_ctx_data, context data
191200 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
192201 */
193
-asmlinkage void aesni_gcm_precomp_avx_gen2(void *my_ctx_data, u8 *hash_subkey);
202
+asmlinkage void aesni_gcm_init_avx_gen2(void *my_ctx_data,
203
+ struct gcm_context_data *gdata,
204
+ u8 *iv,
205
+ u8 *hash_subkey,
206
+ const u8 *aad,
207
+ unsigned long aad_len);
194208
195
-asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx, u8 *out,
209
+asmlinkage void aesni_gcm_enc_update_avx_gen2(void *ctx,
210
+ struct gcm_context_data *gdata, u8 *out,
211
+ const u8 *in, unsigned long plaintext_len);
212
+asmlinkage void aesni_gcm_dec_update_avx_gen2(void *ctx,
213
+ struct gcm_context_data *gdata, u8 *out,
214
+ const u8 *in,
215
+ unsigned long ciphertext_len);
216
+asmlinkage void aesni_gcm_finalize_avx_gen2(void *ctx,
217
+ struct gcm_context_data *gdata,
218
+ u8 *auth_tag, unsigned long auth_tag_len);
219
+
220
+asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx,
221
+ struct gcm_context_data *gdata, u8 *out,
196222 const u8 *in, unsigned long plaintext_len, u8 *iv,
197223 const u8 *aad, unsigned long aad_len,
198224 u8 *auth_tag, unsigned long auth_tag_len);
199225
200
-asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx, u8 *out,
226
+asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx,
227
+ struct gcm_context_data *gdata, u8 *out,
201228 const u8 *in, unsigned long ciphertext_len, u8 *iv,
202229 const u8 *aad, unsigned long aad_len,
203230 u8 *auth_tag, unsigned long auth_tag_len);
204231
205
-static void aesni_gcm_enc_avx(void *ctx,
206
- struct gcm_context_data *data, u8 *out,
207
- const u8 *in, unsigned long plaintext_len, u8 *iv,
208
- u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
209
- u8 *auth_tag, unsigned long auth_tag_len)
210
-{
211
- struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
212
- if ((plaintext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)){
213
- aesni_gcm_enc(ctx, data, out, in,
214
- plaintext_len, iv, hash_subkey, aad,
215
- aad_len, auth_tag, auth_tag_len);
216
- } else {
217
- aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
218
- aesni_gcm_enc_avx_gen2(ctx, out, in, plaintext_len, iv, aad,
219
- aad_len, auth_tag, auth_tag_len);
220
- }
221
-}
232
+static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen2 = {
233
+ .init = &aesni_gcm_init_avx_gen2,
234
+ .enc_update = &aesni_gcm_enc_update_avx_gen2,
235
+ .dec_update = &aesni_gcm_dec_update_avx_gen2,
236
+ .finalize = &aesni_gcm_finalize_avx_gen2,
237
+};
222238
223
-static void aesni_gcm_dec_avx(void *ctx,
224
- struct gcm_context_data *data, u8 *out,
225
- const u8 *in, unsigned long ciphertext_len, u8 *iv,
226
- u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
227
- u8 *auth_tag, unsigned long auth_tag_len)
228
-{
229
- struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
230
- if ((ciphertext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) {
231
- aesni_gcm_dec(ctx, data, out, in,
232
- ciphertext_len, iv, hash_subkey, aad,
233
- aad_len, auth_tag, auth_tag_len);
234
- } else {
235
- aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
236
- aesni_gcm_dec_avx_gen2(ctx, out, in, ciphertext_len, iv, aad,
237
- aad_len, auth_tag, auth_tag_len);
238
- }
239
-}
240
-#endif
241
-
242
-#ifdef CONFIG_AS_AVX2
243239 /*
244
- * asmlinkage void aesni_gcm_precomp_avx_gen4()
240
+ * asmlinkage void aesni_gcm_init_avx_gen4()
245241 * gcm_data *my_ctx_data, context data
246242 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
247243 */
248
-asmlinkage void aesni_gcm_precomp_avx_gen4(void *my_ctx_data, u8 *hash_subkey);
244
+asmlinkage void aesni_gcm_init_avx_gen4(void *my_ctx_data,
245
+ struct gcm_context_data *gdata,
246
+ u8 *iv,
247
+ u8 *hash_subkey,
248
+ const u8 *aad,
249
+ unsigned long aad_len);
249250
250
-asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx, u8 *out,
251
+asmlinkage void aesni_gcm_enc_update_avx_gen4(void *ctx,
252
+ struct gcm_context_data *gdata, u8 *out,
253
+ const u8 *in, unsigned long plaintext_len);
254
+asmlinkage void aesni_gcm_dec_update_avx_gen4(void *ctx,
255
+ struct gcm_context_data *gdata, u8 *out,
256
+ const u8 *in,
257
+ unsigned long ciphertext_len);
258
+asmlinkage void aesni_gcm_finalize_avx_gen4(void *ctx,
259
+ struct gcm_context_data *gdata,
260
+ u8 *auth_tag, unsigned long auth_tag_len);
261
+
262
+asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx,
263
+ struct gcm_context_data *gdata, u8 *out,
251264 const u8 *in, unsigned long plaintext_len, u8 *iv,
252265 const u8 *aad, unsigned long aad_len,
253266 u8 *auth_tag, unsigned long auth_tag_len);
254267
255
-asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx, u8 *out,
268
+asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx,
269
+ struct gcm_context_data *gdata, u8 *out,
256270 const u8 *in, unsigned long ciphertext_len, u8 *iv,
257271 const u8 *aad, unsigned long aad_len,
258272 u8 *auth_tag, unsigned long auth_tag_len);
259273
260
-static void aesni_gcm_enc_avx2(void *ctx,
261
- struct gcm_context_data *data, u8 *out,
262
- const u8 *in, unsigned long plaintext_len, u8 *iv,
263
- u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
264
- u8 *auth_tag, unsigned long auth_tag_len)
265
-{
266
- struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
267
- if ((plaintext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) {
268
- aesni_gcm_enc(ctx, data, out, in,
269
- plaintext_len, iv, hash_subkey, aad,
270
- aad_len, auth_tag, auth_tag_len);
271
- } else if (plaintext_len < AVX_GEN4_OPTSIZE) {
272
- aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
273
- aesni_gcm_enc_avx_gen2(ctx, out, in, plaintext_len, iv, aad,
274
- aad_len, auth_tag, auth_tag_len);
275
- } else {
276
- aesni_gcm_precomp_avx_gen4(ctx, hash_subkey);
277
- aesni_gcm_enc_avx_gen4(ctx, out, in, plaintext_len, iv, aad,
278
- aad_len, auth_tag, auth_tag_len);
279
- }
280
-}
281
-
282
-static void aesni_gcm_dec_avx2(void *ctx,
283
- struct gcm_context_data *data, u8 *out,
284
- const u8 *in, unsigned long ciphertext_len, u8 *iv,
285
- u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
286
- u8 *auth_tag, unsigned long auth_tag_len)
287
-{
288
- struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
289
- if ((ciphertext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) {
290
- aesni_gcm_dec(ctx, data, out, in,
291
- ciphertext_len, iv, hash_subkey,
292
- aad, aad_len, auth_tag, auth_tag_len);
293
- } else if (ciphertext_len < AVX_GEN4_OPTSIZE) {
294
- aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
295
- aesni_gcm_dec_avx_gen2(ctx, out, in, ciphertext_len, iv, aad,
296
- aad_len, auth_tag, auth_tag_len);
297
- } else {
298
- aesni_gcm_precomp_avx_gen4(ctx, hash_subkey);
299
- aesni_gcm_dec_avx_gen4(ctx, out, in, ciphertext_len, iv, aad,
300
- aad_len, auth_tag, auth_tag_len);
301
- }
302
-}
303
-#endif
304
-
305
-static void (*aesni_gcm_enc_tfm)(void *ctx,
306
- struct gcm_context_data *data, u8 *out,
307
- const u8 *in, unsigned long plaintext_len,
308
- u8 *iv, u8 *hash_subkey, const u8 *aad,
309
- unsigned long aad_len, u8 *auth_tag,
310
- unsigned long auth_tag_len);
311
-
312
-static void (*aesni_gcm_dec_tfm)(void *ctx,
313
- struct gcm_context_data *data, u8 *out,
314
- const u8 *in, unsigned long ciphertext_len,
315
- u8 *iv, u8 *hash_subkey, const u8 *aad,
316
- unsigned long aad_len, u8 *auth_tag,
317
- unsigned long auth_tag_len);
274
+static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen4 = {
275
+ .init = &aesni_gcm_init_avx_gen4,
276
+ .enc_update = &aesni_gcm_enc_update_avx_gen4,
277
+ .dec_update = &aesni_gcm_dec_update_avx_gen4,
278
+ .finalize = &aesni_gcm_finalize_avx_gen4,
279
+};
318280
319281 static inline struct
320282 aesni_rfc4106_gcm_ctx *aesni_rfc4106_gcm_ctx_get(struct crypto_aead *tfm)
....@@ -351,17 +313,14 @@
351313 const u8 *in_key, unsigned int key_len)
352314 {
353315 struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
354
- u32 *flags = &tfm->crt_flags;
355316 int err;
356317
357318 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
358
- key_len != AES_KEYSIZE_256) {
359
- *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
319
+ key_len != AES_KEYSIZE_256)
360320 return -EINVAL;
361
- }
362321
363
- if (!irq_fpu_usable())
364
- err = crypto_aes_expand_key(ctx, in_key, key_len);
322
+ if (!crypto_simd_usable())
323
+ err = aes_expandkey(ctx, in_key, key_len);
365324 else {
366325 kernel_fpu_begin();
367326 err = aesni_set_key(ctx, in_key, key_len);
....@@ -377,44 +336,30 @@
377336 return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
378337 }
379338
380
-static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
339
+static void aesni_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
381340 {
382341 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
383342
384
- if (!irq_fpu_usable())
385
- crypto_aes_encrypt_x86(ctx, dst, src);
386
- else {
343
+ if (!crypto_simd_usable()) {
344
+ aes_encrypt(ctx, dst, src);
345
+ } else {
387346 kernel_fpu_begin();
388347 aesni_enc(ctx, dst, src);
389348 kernel_fpu_end();
390349 }
391350 }
392351
393
-static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
352
+static void aesni_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
394353 {
395354 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
396355
397
- if (!irq_fpu_usable())
398
- crypto_aes_decrypt_x86(ctx, dst, src);
399
- else {
356
+ if (!crypto_simd_usable()) {
357
+ aes_decrypt(ctx, dst, src);
358
+ } else {
400359 kernel_fpu_begin();
401360 aesni_dec(ctx, dst, src);
402361 kernel_fpu_end();
403362 }
404
-}
405
-
406
-static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
407
-{
408
- struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
409
-
410
- aesni_enc(ctx, dst, src);
411
-}
412
-
413
-static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
414
-{
415
- struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
416
-
417
- aesni_dec(ctx, dst, src);
418363 }
419364
420365 static int aesni_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
....@@ -528,7 +473,6 @@
528473 crypto_inc(ctrblk, AES_BLOCK_SIZE);
529474 }
530475
531
-#ifdef CONFIG_AS_AVX
532476 static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
533477 const u8 *in, unsigned int len, u8 *iv)
534478 {
....@@ -545,7 +489,6 @@
545489 else
546490 aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len);
547491 }
548
-#endif
549492
550493 static int ctr_crypt(struct skcipher_request *req)
551494 {
....@@ -599,29 +542,24 @@
599542 }
600543
601544
602
-static void aesni_xts_tweak(void *ctx, u8 *out, const u8 *in)
545
+static void aesni_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
603546 {
604
- aesni_enc(ctx, out, in);
547
+ glue_xts_crypt_128bit_one(ctx, dst, src, iv, aesni_enc);
605548 }
606549
607
-static void aesni_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
550
+static void aesni_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
608551 {
609
- glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_enc));
552
+ glue_xts_crypt_128bit_one(ctx, dst, src, iv, aesni_dec);
610553 }
611554
612
-static void aesni_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
555
+static void aesni_xts_enc32(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
613556 {
614
- glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_dec));
557
+ aesni_xts_encrypt(ctx, dst, src, 32 * AES_BLOCK_SIZE, (u8 *)iv);
615558 }
616559
617
-static void aesni_xts_enc8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
560
+static void aesni_xts_dec32(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
618561 {
619
- aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, true, (u8 *)iv);
620
-}
621
-
622
-static void aesni_xts_dec8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
623
-{
624
- aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, false, (u8 *)iv);
562
+ aesni_xts_decrypt(ctx, dst, src, 32 * AES_BLOCK_SIZE, (u8 *)iv);
625563 }
626564
627565 static const struct common_glue_ctx aesni_enc_xts = {
....@@ -629,11 +567,11 @@
629567 .fpu_blocks_limit = 1,
630568
631569 .funcs = { {
632
- .num_blocks = 8,
633
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc8) }
570
+ .num_blocks = 32,
571
+ .fn_u = { .xts = aesni_xts_enc32 }
634572 }, {
635573 .num_blocks = 1,
636
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc) }
574
+ .fn_u = { .xts = aesni_xts_enc }
637575 } }
638576 };
639577
....@@ -642,11 +580,11 @@
642580 .fpu_blocks_limit = 1,
643581
644582 .funcs = { {
645
- .num_blocks = 8,
646
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec8) }
583
+ .num_blocks = 32,
584
+ .fn_u = { .xts = aesni_xts_dec32 }
647585 }, {
648586 .num_blocks = 1,
649
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec) }
587
+ .fn_u = { .xts = aesni_xts_dec }
650588 } }
651589 };
652590
....@@ -655,10 +593,10 @@
655593 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
656594 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
657595
658
- return glue_xts_req_128bit(&aesni_enc_xts, req,
659
- XTS_TWEAK_CAST(aesni_xts_tweak),
596
+ return glue_xts_req_128bit(&aesni_enc_xts, req, aesni_enc,
660597 aes_ctx(ctx->raw_tweak_ctx),
661
- aes_ctx(ctx->raw_crypt_ctx));
598
+ aes_ctx(ctx->raw_crypt_ctx),
599
+ false);
662600 }
663601
664602 static int xts_decrypt(struct skcipher_request *req)
....@@ -666,58 +604,30 @@
666604 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
667605 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
668606
669
- return glue_xts_req_128bit(&aesni_dec_xts, req,
670
- XTS_TWEAK_CAST(aesni_xts_tweak),
607
+ return glue_xts_req_128bit(&aesni_dec_xts, req, aesni_enc,
671608 aes_ctx(ctx->raw_tweak_ctx),
672
- aes_ctx(ctx->raw_crypt_ctx));
673
-}
674
-
675
-static int rfc4106_init(struct crypto_aead *aead)
676
-{
677
- struct cryptd_aead *cryptd_tfm;
678
- struct cryptd_aead **ctx = crypto_aead_ctx(aead);
679
-
680
- cryptd_tfm = cryptd_alloc_aead("__driver-gcm-aes-aesni",
681
- CRYPTO_ALG_INTERNAL,
682
- CRYPTO_ALG_INTERNAL);
683
- if (IS_ERR(cryptd_tfm))
684
- return PTR_ERR(cryptd_tfm);
685
-
686
- *ctx = cryptd_tfm;
687
- crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base));
688
- return 0;
689
-}
690
-
691
-static void rfc4106_exit(struct crypto_aead *aead)
692
-{
693
- struct cryptd_aead **ctx = crypto_aead_ctx(aead);
694
-
695
- cryptd_free_aead(*ctx);
609
+ aes_ctx(ctx->raw_crypt_ctx),
610
+ true);
696611 }
697612
698613 static int
699614 rfc4106_set_hash_subkey(u8 *hash_subkey, const u8 *key, unsigned int key_len)
700615 {
701
- struct crypto_cipher *tfm;
616
+ struct crypto_aes_ctx ctx;
702617 int ret;
703618
704
- tfm = crypto_alloc_cipher("aes", 0, 0);
705
- if (IS_ERR(tfm))
706
- return PTR_ERR(tfm);
707
-
708
- ret = crypto_cipher_setkey(tfm, key, key_len);
619
+ ret = aes_expandkey(&ctx, key, key_len);
709620 if (ret)
710
- goto out_free_cipher;
621
+ return ret;
711622
712623 /* Clear the data in the hash sub key container to zero.*/
713624 /* We want to cipher all zeros to create the hash sub key. */
714625 memset(hash_subkey, 0, RFC4106_HASH_SUBKEY_SIZE);
715626
716
- crypto_cipher_encrypt_one(tfm, hash_subkey, hash_subkey);
627
+ aes_encrypt(&ctx, hash_subkey, hash_subkey);
717628
718
-out_free_cipher:
719
- crypto_free_cipher(tfm);
720
- return ret;
629
+ memzero_explicit(&ctx, sizeof(ctx));
630
+ return 0;
721631 }
722632
723633 static int common_rfc4106_set_key(struct crypto_aead *aead, const u8 *key,
....@@ -725,10 +635,9 @@
725635 {
726636 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead);
727637
728
- if (key_len < 4) {
729
- crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
638
+ if (key_len < 4)
730639 return -EINVAL;
731
- }
640
+
732641 /*Account for 4 byte nonce at the end.*/
733642 key_len -= 4;
734643
....@@ -739,15 +648,8 @@
739648 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
740649 }
741650
742
-static int gcmaes_wrapper_set_key(struct crypto_aead *parent, const u8 *key,
743
- unsigned int key_len)
744
-{
745
- struct cryptd_aead **ctx = crypto_aead_ctx(parent);
746
- struct cryptd_aead *cryptd_tfm = *ctx;
747
-
748
- return crypto_aead_setkey(&cryptd_tfm->base, key, key_len);
749
-}
750
-
651
+/* This is the Integrity Check Value (aka the authentication tag) length and can
652
+ * be 8, 12 or 16 bytes long. */
751653 static int common_rfc4106_set_authsize(struct crypto_aead *aead,
752654 unsigned int authsize)
753655 {
....@@ -761,17 +663,6 @@
761663 }
762664
763665 return 0;
764
-}
765
-
766
-/* This is the Integrity Check Value (aka the authentication tag length and can
767
- * be 8, 12 or 16 bytes long. */
768
-static int gcmaes_wrapper_set_authsize(struct crypto_aead *parent,
769
- unsigned int authsize)
770
-{
771
- struct cryptd_aead **ctx = crypto_aead_ctx(parent);
772
- struct cryptd_aead *cryptd_tfm = *ctx;
773
-
774
- return crypto_aead_setauthsize(&cryptd_tfm->base, authsize);
775666 }
776667
777668 static int generic_gcmaes_set_authsize(struct crypto_aead *tfm,
....@@ -799,7 +690,9 @@
799690 {
800691 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
801692 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
802
- struct gcm_context_data data AESNI_ALIGN_ATTR;
693
+ const struct aesni_gcm_tfm_s *gcm_tfm = aesni_gcm_tfm;
694
+ u8 databuf[sizeof(struct gcm_context_data) + (AESNI_ALIGN - 8)] __aligned(8);
695
+ struct gcm_context_data *data = PTR_ALIGN((void *)databuf, AESNI_ALIGN);
803696 struct scatter_walk dst_sg_walk = {};
804697 unsigned long left = req->cryptlen;
805698 unsigned long len, srclen, dstlen;
....@@ -815,6 +708,11 @@
815708
816709 if (!enc)
817710 left -= auth_tag_len;
711
+
712
+ if (left < AVX_GEN4_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen4)
713
+ gcm_tfm = &aesni_gcm_tfm_avx_gen2;
714
+ if (left < AVX_GEN2_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen2)
715
+ gcm_tfm = &aesni_gcm_tfm_sse;
818716
819717 /* Linearize assoc, if not already linear */
820718 if (req->src->length >= assoclen && req->src->length &&
....@@ -843,8 +741,7 @@
843741 }
844742
845743 kernel_fpu_begin();
846
- aesni_gcm_init(aes_ctx, &data, iv,
847
- hash_subkey, assoc, assoclen);
744
+ gcm_tfm->init(aes_ctx, data, iv, hash_subkey, assoc, assoclen);
848745 if (req->src != req->dst) {
849746 while (left) {
850747 src = scatterwalk_map(&src_sg_walk);
....@@ -854,10 +751,10 @@
854751 len = min(srclen, dstlen);
855752 if (len) {
856753 if (enc)
857
- aesni_gcm_enc_update(aes_ctx, &data,
754
+ gcm_tfm->enc_update(aes_ctx, data,
858755 dst, src, len);
859756 else
860
- aesni_gcm_dec_update(aes_ctx, &data,
757
+ gcm_tfm->dec_update(aes_ctx, data,
861758 dst, src, len);
862759 }
863760 left -= len;
....@@ -875,10 +772,10 @@
875772 len = scatterwalk_clamp(&src_sg_walk, left);
876773 if (len) {
877774 if (enc)
878
- aesni_gcm_enc_update(aes_ctx, &data,
775
+ gcm_tfm->enc_update(aes_ctx, data,
879776 src, src, len);
880777 else
881
- aesni_gcm_dec_update(aes_ctx, &data,
778
+ gcm_tfm->dec_update(aes_ctx, data,
882779 src, src, len);
883780 }
884781 left -= len;
....@@ -887,7 +784,7 @@
887784 scatterwalk_done(&src_sg_walk, 1, left);
888785 }
889786 }
890
- aesni_gcm_finalize(aes_ctx, &data, authTag, auth_tag_len);
787
+ gcm_tfm->finalize(aes_ctx, data, authTag, auth_tag_len);
891788 kernel_fpu_end();
892789
893790 if (!assocmem)
....@@ -920,147 +817,15 @@
920817 static int gcmaes_encrypt(struct aead_request *req, unsigned int assoclen,
921818 u8 *hash_subkey, u8 *iv, void *aes_ctx)
922819 {
923
- u8 one_entry_in_sg = 0;
924
- u8 *src, *dst, *assoc;
925
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
926
- unsigned long auth_tag_len = crypto_aead_authsize(tfm);
927
- struct scatter_walk src_sg_walk;
928
- struct scatter_walk dst_sg_walk = {};
929
- struct gcm_context_data data AESNI_ALIGN_ATTR;
930
-
931
- if (((struct crypto_aes_ctx *)aes_ctx)->key_length != AES_KEYSIZE_128 ||
932
- aesni_gcm_enc_tfm == aesni_gcm_enc ||
933
- req->cryptlen < AVX_GEN2_OPTSIZE) {
934
- return gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv,
935
- aes_ctx);
936
- }
937
- if (sg_is_last(req->src) &&
938
- (!PageHighMem(sg_page(req->src)) ||
939
- req->src->offset + req->src->length <= PAGE_SIZE) &&
940
- sg_is_last(req->dst) &&
941
- (!PageHighMem(sg_page(req->dst)) ||
942
- req->dst->offset + req->dst->length <= PAGE_SIZE)) {
943
- one_entry_in_sg = 1;
944
- scatterwalk_start(&src_sg_walk, req->src);
945
- assoc = scatterwalk_map(&src_sg_walk);
946
- src = assoc + req->assoclen;
947
- dst = src;
948
- if (unlikely(req->src != req->dst)) {
949
- scatterwalk_start(&dst_sg_walk, req->dst);
950
- dst = scatterwalk_map(&dst_sg_walk) + req->assoclen;
951
- }
952
- } else {
953
- /* Allocate memory for src, dst, assoc */
954
- assoc = kmalloc(req->cryptlen + auth_tag_len + req->assoclen,
955
- GFP_ATOMIC);
956
- if (unlikely(!assoc))
957
- return -ENOMEM;
958
- scatterwalk_map_and_copy(assoc, req->src, 0,
959
- req->assoclen + req->cryptlen, 0);
960
- src = assoc + req->assoclen;
961
- dst = src;
962
- }
963
-
964
- kernel_fpu_begin();
965
- aesni_gcm_enc_tfm(aes_ctx, &data, dst, src, req->cryptlen, iv,
966
- hash_subkey, assoc, assoclen,
967
- dst + req->cryptlen, auth_tag_len);
968
- kernel_fpu_end();
969
-
970
- /* The authTag (aka the Integrity Check Value) needs to be written
971
- * back to the packet. */
972
- if (one_entry_in_sg) {
973
- if (unlikely(req->src != req->dst)) {
974
- scatterwalk_unmap(dst - req->assoclen);
975
- scatterwalk_advance(&dst_sg_walk, req->dst->length);
976
- scatterwalk_done(&dst_sg_walk, 1, 0);
977
- }
978
- scatterwalk_unmap(assoc);
979
- scatterwalk_advance(&src_sg_walk, req->src->length);
980
- scatterwalk_done(&src_sg_walk, req->src == req->dst, 0);
981
- } else {
982
- scatterwalk_map_and_copy(dst, req->dst, req->assoclen,
983
- req->cryptlen + auth_tag_len, 1);
984
- kfree(assoc);
985
- }
986
- return 0;
820
+ return gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv,
821
+ aes_ctx);
987822 }
988823
989824 static int gcmaes_decrypt(struct aead_request *req, unsigned int assoclen,
990825 u8 *hash_subkey, u8 *iv, void *aes_ctx)
991826 {
992
- u8 one_entry_in_sg = 0;
993
- u8 *src, *dst, *assoc;
994
- unsigned long tempCipherLen = 0;
995
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
996
- unsigned long auth_tag_len = crypto_aead_authsize(tfm);
997
- u8 authTag[16];
998
- struct scatter_walk src_sg_walk;
999
- struct scatter_walk dst_sg_walk = {};
1000
- struct gcm_context_data data AESNI_ALIGN_ATTR;
1001
- int retval = 0;
1002
-
1003
- if (((struct crypto_aes_ctx *)aes_ctx)->key_length != AES_KEYSIZE_128 ||
1004
- aesni_gcm_enc_tfm == aesni_gcm_enc ||
1005
- req->cryptlen < AVX_GEN2_OPTSIZE) {
1006
- return gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv,
1007
- aes_ctx);
1008
- }
1009
- tempCipherLen = (unsigned long)(req->cryptlen - auth_tag_len);
1010
-
1011
- if (sg_is_last(req->src) &&
1012
- (!PageHighMem(sg_page(req->src)) ||
1013
- req->src->offset + req->src->length <= PAGE_SIZE) &&
1014
- sg_is_last(req->dst) && req->dst->length &&
1015
- (!PageHighMem(sg_page(req->dst)) ||
1016
- req->dst->offset + req->dst->length <= PAGE_SIZE)) {
1017
- one_entry_in_sg = 1;
1018
- scatterwalk_start(&src_sg_walk, req->src);
1019
- assoc = scatterwalk_map(&src_sg_walk);
1020
- src = assoc + req->assoclen;
1021
- dst = src;
1022
- if (unlikely(req->src != req->dst)) {
1023
- scatterwalk_start(&dst_sg_walk, req->dst);
1024
- dst = scatterwalk_map(&dst_sg_walk) + req->assoclen;
1025
- }
1026
- } else {
1027
- /* Allocate memory for src, dst, assoc */
1028
- assoc = kmalloc(req->cryptlen + req->assoclen, GFP_ATOMIC);
1029
- if (!assoc)
1030
- return -ENOMEM;
1031
- scatterwalk_map_and_copy(assoc, req->src, 0,
1032
- req->assoclen + req->cryptlen, 0);
1033
- src = assoc + req->assoclen;
1034
- dst = src;
1035
- }
1036
-
1037
-
1038
- kernel_fpu_begin();
1039
- aesni_gcm_dec_tfm(aes_ctx, &data, dst, src, tempCipherLen, iv,
1040
- hash_subkey, assoc, assoclen,
1041
- authTag, auth_tag_len);
1042
- kernel_fpu_end();
1043
-
1044
- /* Compare generated tag with passed in tag. */
1045
- retval = crypto_memneq(src + tempCipherLen, authTag, auth_tag_len) ?
1046
- -EBADMSG : 0;
1047
-
1048
- if (one_entry_in_sg) {
1049
- if (unlikely(req->src != req->dst)) {
1050
- scatterwalk_unmap(dst - req->assoclen);
1051
- scatterwalk_advance(&dst_sg_walk, req->dst->length);
1052
- scatterwalk_done(&dst_sg_walk, 1, 0);
1053
- }
1054
- scatterwalk_unmap(assoc);
1055
- scatterwalk_advance(&src_sg_walk, req->src->length);
1056
- scatterwalk_done(&src_sg_walk, req->src == req->dst, 0);
1057
- } else {
1058
- scatterwalk_map_and_copy(dst, req->dst, req->assoclen,
1059
- tempCipherLen, 1);
1060
- kfree(assoc);
1061
- }
1062
- return retval;
1063
-
827
+ return gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv,
828
+ aes_ctx);
1064829 }
1065830
1066831 static int helper_rfc4106_encrypt(struct aead_request *req)
....@@ -1068,7 +833,8 @@
1068833 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1069834 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
1070835 void *aes_ctx = &(ctx->aes_key_expanded);
1071
- u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
836
+ u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8);
837
+ u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
1072838 unsigned int i;
1073839 __be32 counter = cpu_to_be32(1);
1074840
....@@ -1095,7 +861,8 @@
1095861 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1096862 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
1097863 void *aes_ctx = &(ctx->aes_key_expanded);
1098
- u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
864
+ u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8);
865
+ u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
1099866 unsigned int i;
1100867
1101868 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
....@@ -1115,41 +882,9 @@
1115882 return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
1116883 aes_ctx);
1117884 }
1118
-
1119
-static int gcmaes_wrapper_encrypt(struct aead_request *req)
1120
-{
1121
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1122
- struct cryptd_aead **ctx = crypto_aead_ctx(tfm);
1123
- struct cryptd_aead *cryptd_tfm = *ctx;
1124
-
1125
- tfm = &cryptd_tfm->base;
1126
- if (irq_fpu_usable() && (!in_atomic() ||
1127
- !cryptd_aead_queued(cryptd_tfm)))
1128
- tfm = cryptd_aead_child(cryptd_tfm);
1129
-
1130
- aead_request_set_tfm(req, tfm);
1131
-
1132
- return crypto_aead_encrypt(req);
1133
-}
1134
-
1135
-static int gcmaes_wrapper_decrypt(struct aead_request *req)
1136
-{
1137
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1138
- struct cryptd_aead **ctx = crypto_aead_ctx(tfm);
1139
- struct cryptd_aead *cryptd_tfm = *ctx;
1140
-
1141
- tfm = &cryptd_tfm->base;
1142
- if (irq_fpu_usable() && (!in_atomic() ||
1143
- !cryptd_aead_queued(cryptd_tfm)))
1144
- tfm = cryptd_aead_child(cryptd_tfm);
1145
-
1146
- aead_request_set_tfm(req, tfm);
1147
-
1148
- return crypto_aead_decrypt(req);
1149
-}
1150885 #endif
1151886
1152
-static struct crypto_alg aesni_algs[] = { {
887
+static struct crypto_alg aesni_cipher_alg = {
1153888 .cra_name = "aes",
1154889 .cra_driver_name = "aes-aesni",
1155890 .cra_priority = 300,
....@@ -1162,28 +897,11 @@
1162897 .cia_min_keysize = AES_MIN_KEY_SIZE,
1163898 .cia_max_keysize = AES_MAX_KEY_SIZE,
1164899 .cia_setkey = aes_set_key,
1165
- .cia_encrypt = aes_encrypt,
1166
- .cia_decrypt = aes_decrypt
900
+ .cia_encrypt = aesni_encrypt,
901
+ .cia_decrypt = aesni_decrypt
1167902 }
1168903 }
1169
-}, {
1170
- .cra_name = "__aes",
1171
- .cra_driver_name = "__aes-aesni",
1172
- .cra_priority = 300,
1173
- .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_INTERNAL,
1174
- .cra_blocksize = AES_BLOCK_SIZE,
1175
- .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1176
- .cra_module = THIS_MODULE,
1177
- .cra_u = {
1178
- .cipher = {
1179
- .cia_min_keysize = AES_MIN_KEY_SIZE,
1180
- .cia_max_keysize = AES_MAX_KEY_SIZE,
1181
- .cia_setkey = aes_set_key,
1182
- .cia_encrypt = __aes_encrypt,
1183
- .cia_decrypt = __aes_decrypt
1184
- }
1185
- }
1186
-} };
904
+};
1187905
1188906 static struct skcipher_alg aesni_skciphers[] = {
1189907 {
....@@ -1258,22 +976,6 @@
1258976 static
1259977 struct simd_skcipher_alg *aesni_simd_skciphers[ARRAY_SIZE(aesni_skciphers)];
1260978
1261
-static struct {
1262
- const char *algname;
1263
- const char *drvname;
1264
- const char *basename;
1265
- struct simd_skcipher_alg *simd;
1266
-} aesni_simd_skciphers2[] = {
1267
-#if (defined(MODULE) && IS_ENABLED(CONFIG_CRYPTO_PCBC)) || \
1268
- IS_BUILTIN(CONFIG_CRYPTO_PCBC)
1269
- {
1270
- .algname = "pcbc(aes)",
1271
- .drvname = "pcbc-aes-aesni",
1272
- .basename = "fpu(pcbc(__aes-aesni))",
1273
- },
1274
-#endif
1275
-};
1276
-
1277979 #ifdef CONFIG_X86_64
1278980 static int generic_gcmaes_set_key(struct crypto_aead *aead, const u8 *key,
1279981 unsigned int key_len)
....@@ -1290,7 +992,8 @@
1290992 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1291993 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1292994 void *aes_ctx = &(ctx->aes_key_expanded);
1293
- u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
995
+ u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8);
996
+ u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
1294997 __be32 counter = cpu_to_be32(1);
1295998
1296999 memcpy(iv, req->iv, 12);
....@@ -1306,7 +1009,8 @@
13061009 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
13071010 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
13081011 void *aes_ctx = &(ctx->aes_key_expanded);
1309
- u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1012
+ u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8);
1013
+ u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
13101014
13111015 memcpy(iv, req->iv, 12);
13121016 *((__be32 *)(iv+12)) = counter;
....@@ -1315,31 +1019,7 @@
13151019 aes_ctx);
13161020 }
13171021
1318
-static int generic_gcmaes_init(struct crypto_aead *aead)
1319
-{
1320
- struct cryptd_aead *cryptd_tfm;
1321
- struct cryptd_aead **ctx = crypto_aead_ctx(aead);
1322
-
1323
- cryptd_tfm = cryptd_alloc_aead("__driver-generic-gcm-aes-aesni",
1324
- CRYPTO_ALG_INTERNAL,
1325
- CRYPTO_ALG_INTERNAL);
1326
- if (IS_ERR(cryptd_tfm))
1327
- return PTR_ERR(cryptd_tfm);
1328
-
1329
- *ctx = cryptd_tfm;
1330
- crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base));
1331
-
1332
- return 0;
1333
-}
1334
-
1335
-static void generic_gcmaes_exit(struct crypto_aead *aead)
1336
-{
1337
- struct cryptd_aead **ctx = crypto_aead_ctx(aead);
1338
-
1339
- cryptd_free_aead(*ctx);
1340
-}
1341
-
1342
-static struct aead_alg aesni_aead_algs[] = { {
1022
+static struct aead_alg aesni_aeads[] = { {
13431023 .setkey = common_rfc4106_set_key,
13441024 .setauthsize = common_rfc4106_set_authsize,
13451025 .encrypt = helper_rfc4106_encrypt,
....@@ -1347,30 +1027,13 @@
13471027 .ivsize = GCM_RFC4106_IV_SIZE,
13481028 .maxauthsize = 16,
13491029 .base = {
1350
- .cra_name = "__gcm-aes-aesni",
1351
- .cra_driver_name = "__driver-gcm-aes-aesni",
1030
+ .cra_name = "__rfc4106(gcm(aes))",
1031
+ .cra_driver_name = "__rfc4106-gcm-aesni",
1032
+ .cra_priority = 400,
13521033 .cra_flags = CRYPTO_ALG_INTERNAL,
13531034 .cra_blocksize = 1,
13541035 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx),
13551036 .cra_alignmask = AESNI_ALIGN - 1,
1356
- .cra_module = THIS_MODULE,
1357
- },
1358
-}, {
1359
- .init = rfc4106_init,
1360
- .exit = rfc4106_exit,
1361
- .setkey = gcmaes_wrapper_set_key,
1362
- .setauthsize = gcmaes_wrapper_set_authsize,
1363
- .encrypt = gcmaes_wrapper_encrypt,
1364
- .decrypt = gcmaes_wrapper_decrypt,
1365
- .ivsize = GCM_RFC4106_IV_SIZE,
1366
- .maxauthsize = 16,
1367
- .base = {
1368
- .cra_name = "rfc4106(gcm(aes))",
1369
- .cra_driver_name = "rfc4106-gcm-aesni",
1370
- .cra_priority = 400,
1371
- .cra_flags = CRYPTO_ALG_ASYNC,
1372
- .cra_blocksize = 1,
1373
- .cra_ctxsize = sizeof(struct cryptd_aead *),
13741037 .cra_module = THIS_MODULE,
13751038 },
13761039 }, {
....@@ -1381,165 +1044,86 @@
13811044 .ivsize = GCM_AES_IV_SIZE,
13821045 .maxauthsize = 16,
13831046 .base = {
1384
- .cra_name = "__generic-gcm-aes-aesni",
1385
- .cra_driver_name = "__driver-generic-gcm-aes-aesni",
1386
- .cra_priority = 0,
1047
+ .cra_name = "__gcm(aes)",
1048
+ .cra_driver_name = "__generic-gcm-aesni",
1049
+ .cra_priority = 400,
13871050 .cra_flags = CRYPTO_ALG_INTERNAL,
13881051 .cra_blocksize = 1,
13891052 .cra_ctxsize = sizeof(struct generic_gcmaes_ctx),
13901053 .cra_alignmask = AESNI_ALIGN - 1,
13911054 .cra_module = THIS_MODULE,
13921055 },
1393
-}, {
1394
- .init = generic_gcmaes_init,
1395
- .exit = generic_gcmaes_exit,
1396
- .setkey = gcmaes_wrapper_set_key,
1397
- .setauthsize = gcmaes_wrapper_set_authsize,
1398
- .encrypt = gcmaes_wrapper_encrypt,
1399
- .decrypt = gcmaes_wrapper_decrypt,
1400
- .ivsize = GCM_AES_IV_SIZE,
1401
- .maxauthsize = 16,
1402
- .base = {
1403
- .cra_name = "gcm(aes)",
1404
- .cra_driver_name = "generic-gcm-aesni",
1405
- .cra_priority = 400,
1406
- .cra_flags = CRYPTO_ALG_ASYNC,
1407
- .cra_blocksize = 1,
1408
- .cra_ctxsize = sizeof(struct cryptd_aead *),
1409
- .cra_module = THIS_MODULE,
1410
- },
14111056 } };
14121057 #else
1413
-static struct aead_alg aesni_aead_algs[0];
1058
+static struct aead_alg aesni_aeads[0];
14141059 #endif
14151060
1061
+static struct simd_aead_alg *aesni_simd_aeads[ARRAY_SIZE(aesni_aeads)];
14161062
14171063 static const struct x86_cpu_id aesni_cpu_id[] = {
1418
- X86_FEATURE_MATCH(X86_FEATURE_AES),
1064
+ X86_MATCH_FEATURE(X86_FEATURE_AES, NULL),
14191065 {}
14201066 };
14211067 MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id);
14221068
1423
-static void aesni_free_simds(void)
1424
-{
1425
- int i;
1426
-
1427
- for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers) &&
1428
- aesni_simd_skciphers[i]; i++)
1429
- simd_skcipher_free(aesni_simd_skciphers[i]);
1430
-
1431
- for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers2); i++)
1432
- if (aesni_simd_skciphers2[i].simd)
1433
- simd_skcipher_free(aesni_simd_skciphers2[i].simd);
1434
-}
1435
-
14361069 static int __init aesni_init(void)
14371070 {
1438
- struct simd_skcipher_alg *simd;
1439
- const char *basename;
1440
- const char *algname;
1441
- const char *drvname;
14421071 int err;
1443
- int i;
14441072
14451073 if (!x86_match_cpu(aesni_cpu_id))
14461074 return -ENODEV;
14471075 #ifdef CONFIG_X86_64
1448
-#ifdef CONFIG_AS_AVX2
14491076 if (boot_cpu_has(X86_FEATURE_AVX2)) {
14501077 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1451
- aesni_gcm_enc_tfm = aesni_gcm_enc_avx2;
1452
- aesni_gcm_dec_tfm = aesni_gcm_dec_avx2;
1078
+ aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen4;
14531079 } else
1454
-#endif
1455
-#ifdef CONFIG_AS_AVX
14561080 if (boot_cpu_has(X86_FEATURE_AVX)) {
14571081 pr_info("AVX version of gcm_enc/dec engaged.\n");
1458
- aesni_gcm_enc_tfm = aesni_gcm_enc_avx;
1459
- aesni_gcm_dec_tfm = aesni_gcm_dec_avx;
1460
- } else
1461
-#endif
1462
- {
1082
+ aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen2;
1083
+ } else {
14631084 pr_info("SSE version of gcm_enc/dec engaged.\n");
1464
- aesni_gcm_enc_tfm = aesni_gcm_enc;
1465
- aesni_gcm_dec_tfm = aesni_gcm_dec;
1085
+ aesni_gcm_tfm = &aesni_gcm_tfm_sse;
14661086 }
14671087 aesni_ctr_enc_tfm = aesni_ctr_enc;
1468
-#ifdef CONFIG_AS_AVX
14691088 if (boot_cpu_has(X86_FEATURE_AVX)) {
14701089 /* optimize performance of ctr mode encryption transform */
14711090 aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
14721091 pr_info("AES CTR mode by8 optimization enabled\n");
14731092 }
14741093 #endif
1475
-#endif
14761094
1477
- err = crypto_fpu_init();
1095
+ err = crypto_register_alg(&aesni_cipher_alg);
14781096 if (err)
14791097 return err;
14801098
1481
- err = crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1099
+ err = simd_register_skciphers_compat(aesni_skciphers,
1100
+ ARRAY_SIZE(aesni_skciphers),
1101
+ aesni_simd_skciphers);
14821102 if (err)
1483
- goto fpu_exit;
1103
+ goto unregister_cipher;
14841104
1485
- err = crypto_register_skciphers(aesni_skciphers,
1486
- ARRAY_SIZE(aesni_skciphers));
1487
- if (err)
1488
- goto unregister_algs;
1489
-
1490
- err = crypto_register_aeads(aesni_aead_algs,
1491
- ARRAY_SIZE(aesni_aead_algs));
1105
+ err = simd_register_aeads_compat(aesni_aeads, ARRAY_SIZE(aesni_aeads),
1106
+ aesni_simd_aeads);
14921107 if (err)
14931108 goto unregister_skciphers;
14941109
1495
- for (i = 0; i < ARRAY_SIZE(aesni_skciphers); i++) {
1496
- algname = aesni_skciphers[i].base.cra_name + 2;
1497
- drvname = aesni_skciphers[i].base.cra_driver_name + 2;
1498
- basename = aesni_skciphers[i].base.cra_driver_name;
1499
- simd = simd_skcipher_create_compat(algname, drvname, basename);
1500
- err = PTR_ERR(simd);
1501
- if (IS_ERR(simd))
1502
- goto unregister_simds;
1503
-
1504
- aesni_simd_skciphers[i] = simd;
1505
- }
1506
-
1507
- for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers2); i++) {
1508
- algname = aesni_simd_skciphers2[i].algname;
1509
- drvname = aesni_simd_skciphers2[i].drvname;
1510
- basename = aesni_simd_skciphers2[i].basename;
1511
- simd = simd_skcipher_create_compat(algname, drvname, basename);
1512
- err = PTR_ERR(simd);
1513
- if (IS_ERR(simd))
1514
- continue;
1515
-
1516
- aesni_simd_skciphers2[i].simd = simd;
1517
- }
1518
-
15191110 return 0;
15201111
1521
-unregister_simds:
1522
- aesni_free_simds();
1523
- crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs));
15241112 unregister_skciphers:
1525
- crypto_unregister_skciphers(aesni_skciphers,
1526
- ARRAY_SIZE(aesni_skciphers));
1527
-unregister_algs:
1528
- crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1529
-fpu_exit:
1530
- crypto_fpu_exit();
1113
+ simd_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers),
1114
+ aesni_simd_skciphers);
1115
+unregister_cipher:
1116
+ crypto_unregister_alg(&aesni_cipher_alg);
15311117 return err;
15321118 }
15331119
15341120 static void __exit aesni_exit(void)
15351121 {
1536
- aesni_free_simds();
1537
- crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs));
1538
- crypto_unregister_skciphers(aesni_skciphers,
1539
- ARRAY_SIZE(aesni_skciphers));
1540
- crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1541
-
1542
- crypto_fpu_exit();
1122
+ simd_unregister_aeads(aesni_aeads, ARRAY_SIZE(aesni_aeads),
1123
+ aesni_simd_aeads);
1124
+ simd_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers),
1125
+ aesni_simd_skciphers);
1126
+ crypto_unregister_alg(&aesni_cipher_alg);
15431127 }
15441128
15451129 late_initcall(aesni_init);