.. | .. |
---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-or-later |
---|
1 | 2 | /* |
---|
2 | 3 | * Support for Intel AES-NI instructions. This file contains glue |
---|
3 | 4 | * code, the real AES implementation is in intel-aes_asm.S. |
---|
.. | .. |
---|
12 | 13 | * Tadeusz Struk (tadeusz.struk@intel.com) |
---|
13 | 14 | * Aidan O'Mahony (aidan.o.mahony@intel.com) |
---|
14 | 15 | * Copyright (c) 2010, Intel Corporation. |
---|
15 | | - * |
---|
16 | | - * This program is free software; you can redistribute it and/or modify |
---|
17 | | - * it under the terms of the GNU General Public License as published by |
---|
18 | | - * the Free Software Foundation; either version 2 of the License, or |
---|
19 | | - * (at your option) any later version. |
---|
20 | 16 | */ |
---|
21 | 17 | |
---|
22 | 18 | #include <linux/hardirq.h> |
---|
.. | .. |
---|
25 | 21 | #include <linux/err.h> |
---|
26 | 22 | #include <crypto/algapi.h> |
---|
27 | 23 | #include <crypto/aes.h> |
---|
28 | | -#include <crypto/cryptd.h> |
---|
29 | 24 | #include <crypto/ctr.h> |
---|
30 | 25 | #include <crypto/b128ops.h> |
---|
31 | 26 | #include <crypto/gcm.h> |
---|
32 | 27 | #include <crypto/xts.h> |
---|
33 | 28 | #include <asm/cpu_device_id.h> |
---|
34 | | -#include <asm/fpu/api.h> |
---|
35 | | -#include <asm/crypto/aes.h> |
---|
| 29 | +#include <asm/simd.h> |
---|
36 | 30 | #include <crypto/scatterwalk.h> |
---|
37 | 31 | #include <crypto/internal/aead.h> |
---|
38 | 32 | #include <crypto/internal/simd.h> |
---|
.. | .. |
---|
84 | 78 | u8 current_counter[GCM_BLOCK_LEN]; |
---|
85 | 79 | u64 partial_block_len; |
---|
86 | 80 | u64 unused; |
---|
87 | | - u8 hash_keys[GCM_BLOCK_LEN * 8]; |
---|
| 81 | + u8 hash_keys[GCM_BLOCK_LEN * 16]; |
---|
88 | 82 | }; |
---|
89 | 83 | |
---|
90 | 84 | asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key, |
---|
91 | 85 | unsigned int key_len); |
---|
92 | | -asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out, |
---|
93 | | - const u8 *in); |
---|
94 | | -asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out, |
---|
95 | | - const u8 *in); |
---|
| 86 | +asmlinkage void aesni_enc(const void *ctx, u8 *out, const u8 *in); |
---|
| 87 | +asmlinkage void aesni_dec(const void *ctx, u8 *out, const u8 *in); |
---|
96 | 88 | asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out, |
---|
97 | 89 | const u8 *in, unsigned int len); |
---|
98 | 90 | asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out, |
---|
.. | .. |
---|
102 | 94 | asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out, |
---|
103 | 95 | const u8 *in, unsigned int len, u8 *iv); |
---|
104 | 96 | |
---|
105 | | -int crypto_fpu_init(void); |
---|
106 | | -void crypto_fpu_exit(void); |
---|
107 | | - |
---|
108 | 97 | #define AVX_GEN2_OPTSIZE 640 |
---|
109 | 98 | #define AVX_GEN4_OPTSIZE 4096 |
---|
| 99 | + |
---|
| 100 | +asmlinkage void aesni_xts_encrypt(const struct crypto_aes_ctx *ctx, u8 *out, |
---|
| 101 | + const u8 *in, unsigned int len, u8 *iv); |
---|
| 102 | + |
---|
| 103 | +asmlinkage void aesni_xts_decrypt(const struct crypto_aes_ctx *ctx, u8 *out, |
---|
| 104 | + const u8 *in, unsigned int len, u8 *iv); |
---|
110 | 105 | |
---|
111 | 106 | #ifdef CONFIG_X86_64 |
---|
112 | 107 | |
---|
.. | .. |
---|
114 | 109 | const u8 *in, unsigned int len, u8 *iv); |
---|
115 | 110 | asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out, |
---|
116 | 111 | const u8 *in, unsigned int len, u8 *iv); |
---|
117 | | - |
---|
118 | | -asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out, |
---|
119 | | - const u8 *in, bool enc, u8 *iv); |
---|
120 | 112 | |
---|
121 | 113 | /* asmlinkage void aesni_gcm_enc() |
---|
122 | 114 | * void *ctx, AES Key schedule. Starts on a 16 byte boundary. |
---|
.. | .. |
---|
178 | 170 | struct gcm_context_data *gdata, |
---|
179 | 171 | u8 *auth_tag, unsigned long auth_tag_len); |
---|
180 | 172 | |
---|
181 | | -#ifdef CONFIG_AS_AVX |
---|
| 173 | +static const struct aesni_gcm_tfm_s { |
---|
| 174 | + void (*init)(void *ctx, struct gcm_context_data *gdata, u8 *iv, |
---|
| 175 | + u8 *hash_subkey, const u8 *aad, unsigned long aad_len); |
---|
| 176 | + void (*enc_update)(void *ctx, struct gcm_context_data *gdata, u8 *out, |
---|
| 177 | + const u8 *in, unsigned long plaintext_len); |
---|
| 178 | + void (*dec_update)(void *ctx, struct gcm_context_data *gdata, u8 *out, |
---|
| 179 | + const u8 *in, unsigned long ciphertext_len); |
---|
| 180 | + void (*finalize)(void *ctx, struct gcm_context_data *gdata, |
---|
| 181 | + u8 *auth_tag, unsigned long auth_tag_len); |
---|
| 182 | +} *aesni_gcm_tfm; |
---|
| 183 | + |
---|
| 184 | +static const struct aesni_gcm_tfm_s aesni_gcm_tfm_sse = { |
---|
| 185 | + .init = &aesni_gcm_init, |
---|
| 186 | + .enc_update = &aesni_gcm_enc_update, |
---|
| 187 | + .dec_update = &aesni_gcm_dec_update, |
---|
| 188 | + .finalize = &aesni_gcm_finalize, |
---|
| 189 | +}; |
---|
| 190 | + |
---|
182 | 191 | asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv, |
---|
183 | 192 | void *keys, u8 *out, unsigned int num_bytes); |
---|
184 | 193 | asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv, |
---|
.. | .. |
---|
186 | 195 | asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv, |
---|
187 | 196 | void *keys, u8 *out, unsigned int num_bytes); |
---|
188 | 197 | /* |
---|
189 | | - * asmlinkage void aesni_gcm_precomp_avx_gen2() |
---|
| 198 | + * asmlinkage void aesni_gcm_init_avx_gen2() |
---|
190 | 199 | * gcm_data *my_ctx_data, context data |
---|
191 | 200 | * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary. |
---|
192 | 201 | */ |
---|
193 | | -asmlinkage void aesni_gcm_precomp_avx_gen2(void *my_ctx_data, u8 *hash_subkey); |
---|
| 202 | +asmlinkage void aesni_gcm_init_avx_gen2(void *my_ctx_data, |
---|
| 203 | + struct gcm_context_data *gdata, |
---|
| 204 | + u8 *iv, |
---|
| 205 | + u8 *hash_subkey, |
---|
| 206 | + const u8 *aad, |
---|
| 207 | + unsigned long aad_len); |
---|
194 | 208 | |
---|
195 | | -asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx, u8 *out, |
---|
| 209 | +asmlinkage void aesni_gcm_enc_update_avx_gen2(void *ctx, |
---|
| 210 | + struct gcm_context_data *gdata, u8 *out, |
---|
| 211 | + const u8 *in, unsigned long plaintext_len); |
---|
| 212 | +asmlinkage void aesni_gcm_dec_update_avx_gen2(void *ctx, |
---|
| 213 | + struct gcm_context_data *gdata, u8 *out, |
---|
| 214 | + const u8 *in, |
---|
| 215 | + unsigned long ciphertext_len); |
---|
| 216 | +asmlinkage void aesni_gcm_finalize_avx_gen2(void *ctx, |
---|
| 217 | + struct gcm_context_data *gdata, |
---|
| 218 | + u8 *auth_tag, unsigned long auth_tag_len); |
---|
| 219 | + |
---|
| 220 | +asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx, |
---|
| 221 | + struct gcm_context_data *gdata, u8 *out, |
---|
196 | 222 | const u8 *in, unsigned long plaintext_len, u8 *iv, |
---|
197 | 223 | const u8 *aad, unsigned long aad_len, |
---|
198 | 224 | u8 *auth_tag, unsigned long auth_tag_len); |
---|
199 | 225 | |
---|
200 | | -asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx, u8 *out, |
---|
| 226 | +asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx, |
---|
| 227 | + struct gcm_context_data *gdata, u8 *out, |
---|
201 | 228 | const u8 *in, unsigned long ciphertext_len, u8 *iv, |
---|
202 | 229 | const u8 *aad, unsigned long aad_len, |
---|
203 | 230 | u8 *auth_tag, unsigned long auth_tag_len); |
---|
204 | 231 | |
---|
205 | | -static void aesni_gcm_enc_avx(void *ctx, |
---|
206 | | - struct gcm_context_data *data, u8 *out, |
---|
207 | | - const u8 *in, unsigned long plaintext_len, u8 *iv, |
---|
208 | | - u8 *hash_subkey, const u8 *aad, unsigned long aad_len, |
---|
209 | | - u8 *auth_tag, unsigned long auth_tag_len) |
---|
210 | | -{ |
---|
211 | | - struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx; |
---|
212 | | - if ((plaintext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)){ |
---|
213 | | - aesni_gcm_enc(ctx, data, out, in, |
---|
214 | | - plaintext_len, iv, hash_subkey, aad, |
---|
215 | | - aad_len, auth_tag, auth_tag_len); |
---|
216 | | - } else { |
---|
217 | | - aesni_gcm_precomp_avx_gen2(ctx, hash_subkey); |
---|
218 | | - aesni_gcm_enc_avx_gen2(ctx, out, in, plaintext_len, iv, aad, |
---|
219 | | - aad_len, auth_tag, auth_tag_len); |
---|
220 | | - } |
---|
221 | | -} |
---|
| 232 | +static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen2 = { |
---|
| 233 | + .init = &aesni_gcm_init_avx_gen2, |
---|
| 234 | + .enc_update = &aesni_gcm_enc_update_avx_gen2, |
---|
| 235 | + .dec_update = &aesni_gcm_dec_update_avx_gen2, |
---|
| 236 | + .finalize = &aesni_gcm_finalize_avx_gen2, |
---|
| 237 | +}; |
---|
222 | 238 | |
---|
223 | | -static void aesni_gcm_dec_avx(void *ctx, |
---|
224 | | - struct gcm_context_data *data, u8 *out, |
---|
225 | | - const u8 *in, unsigned long ciphertext_len, u8 *iv, |
---|
226 | | - u8 *hash_subkey, const u8 *aad, unsigned long aad_len, |
---|
227 | | - u8 *auth_tag, unsigned long auth_tag_len) |
---|
228 | | -{ |
---|
229 | | - struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx; |
---|
230 | | - if ((ciphertext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) { |
---|
231 | | - aesni_gcm_dec(ctx, data, out, in, |
---|
232 | | - ciphertext_len, iv, hash_subkey, aad, |
---|
233 | | - aad_len, auth_tag, auth_tag_len); |
---|
234 | | - } else { |
---|
235 | | - aesni_gcm_precomp_avx_gen2(ctx, hash_subkey); |
---|
236 | | - aesni_gcm_dec_avx_gen2(ctx, out, in, ciphertext_len, iv, aad, |
---|
237 | | - aad_len, auth_tag, auth_tag_len); |
---|
238 | | - } |
---|
239 | | -} |
---|
240 | | -#endif |
---|
241 | | - |
---|
242 | | -#ifdef CONFIG_AS_AVX2 |
---|
243 | 239 | /* |
---|
244 | | - * asmlinkage void aesni_gcm_precomp_avx_gen4() |
---|
| 240 | + * asmlinkage void aesni_gcm_init_avx_gen4() |
---|
245 | 241 | * gcm_data *my_ctx_data, context data |
---|
246 | 242 | * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary. |
---|
247 | 243 | */ |
---|
248 | | -asmlinkage void aesni_gcm_precomp_avx_gen4(void *my_ctx_data, u8 *hash_subkey); |
---|
| 244 | +asmlinkage void aesni_gcm_init_avx_gen4(void *my_ctx_data, |
---|
| 245 | + struct gcm_context_data *gdata, |
---|
| 246 | + u8 *iv, |
---|
| 247 | + u8 *hash_subkey, |
---|
| 248 | + const u8 *aad, |
---|
| 249 | + unsigned long aad_len); |
---|
249 | 250 | |
---|
250 | | -asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx, u8 *out, |
---|
| 251 | +asmlinkage void aesni_gcm_enc_update_avx_gen4(void *ctx, |
---|
| 252 | + struct gcm_context_data *gdata, u8 *out, |
---|
| 253 | + const u8 *in, unsigned long plaintext_len); |
---|
| 254 | +asmlinkage void aesni_gcm_dec_update_avx_gen4(void *ctx, |
---|
| 255 | + struct gcm_context_data *gdata, u8 *out, |
---|
| 256 | + const u8 *in, |
---|
| 257 | + unsigned long ciphertext_len); |
---|
| 258 | +asmlinkage void aesni_gcm_finalize_avx_gen4(void *ctx, |
---|
| 259 | + struct gcm_context_data *gdata, |
---|
| 260 | + u8 *auth_tag, unsigned long auth_tag_len); |
---|
| 261 | + |
---|
| 262 | +asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx, |
---|
| 263 | + struct gcm_context_data *gdata, u8 *out, |
---|
251 | 264 | const u8 *in, unsigned long plaintext_len, u8 *iv, |
---|
252 | 265 | const u8 *aad, unsigned long aad_len, |
---|
253 | 266 | u8 *auth_tag, unsigned long auth_tag_len); |
---|
254 | 267 | |
---|
255 | | -asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx, u8 *out, |
---|
| 268 | +asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx, |
---|
| 269 | + struct gcm_context_data *gdata, u8 *out, |
---|
256 | 270 | const u8 *in, unsigned long ciphertext_len, u8 *iv, |
---|
257 | 271 | const u8 *aad, unsigned long aad_len, |
---|
258 | 272 | u8 *auth_tag, unsigned long auth_tag_len); |
---|
259 | 273 | |
---|
260 | | -static void aesni_gcm_enc_avx2(void *ctx, |
---|
261 | | - struct gcm_context_data *data, u8 *out, |
---|
262 | | - const u8 *in, unsigned long plaintext_len, u8 *iv, |
---|
263 | | - u8 *hash_subkey, const u8 *aad, unsigned long aad_len, |
---|
264 | | - u8 *auth_tag, unsigned long auth_tag_len) |
---|
265 | | -{ |
---|
266 | | - struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx; |
---|
267 | | - if ((plaintext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) { |
---|
268 | | - aesni_gcm_enc(ctx, data, out, in, |
---|
269 | | - plaintext_len, iv, hash_subkey, aad, |
---|
270 | | - aad_len, auth_tag, auth_tag_len); |
---|
271 | | - } else if (plaintext_len < AVX_GEN4_OPTSIZE) { |
---|
272 | | - aesni_gcm_precomp_avx_gen2(ctx, hash_subkey); |
---|
273 | | - aesni_gcm_enc_avx_gen2(ctx, out, in, plaintext_len, iv, aad, |
---|
274 | | - aad_len, auth_tag, auth_tag_len); |
---|
275 | | - } else { |
---|
276 | | - aesni_gcm_precomp_avx_gen4(ctx, hash_subkey); |
---|
277 | | - aesni_gcm_enc_avx_gen4(ctx, out, in, plaintext_len, iv, aad, |
---|
278 | | - aad_len, auth_tag, auth_tag_len); |
---|
279 | | - } |
---|
280 | | -} |
---|
281 | | - |
---|
282 | | -static void aesni_gcm_dec_avx2(void *ctx, |
---|
283 | | - struct gcm_context_data *data, u8 *out, |
---|
284 | | - const u8 *in, unsigned long ciphertext_len, u8 *iv, |
---|
285 | | - u8 *hash_subkey, const u8 *aad, unsigned long aad_len, |
---|
286 | | - u8 *auth_tag, unsigned long auth_tag_len) |
---|
287 | | -{ |
---|
288 | | - struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx; |
---|
289 | | - if ((ciphertext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) { |
---|
290 | | - aesni_gcm_dec(ctx, data, out, in, |
---|
291 | | - ciphertext_len, iv, hash_subkey, |
---|
292 | | - aad, aad_len, auth_tag, auth_tag_len); |
---|
293 | | - } else if (ciphertext_len < AVX_GEN4_OPTSIZE) { |
---|
294 | | - aesni_gcm_precomp_avx_gen2(ctx, hash_subkey); |
---|
295 | | - aesni_gcm_dec_avx_gen2(ctx, out, in, ciphertext_len, iv, aad, |
---|
296 | | - aad_len, auth_tag, auth_tag_len); |
---|
297 | | - } else { |
---|
298 | | - aesni_gcm_precomp_avx_gen4(ctx, hash_subkey); |
---|
299 | | - aesni_gcm_dec_avx_gen4(ctx, out, in, ciphertext_len, iv, aad, |
---|
300 | | - aad_len, auth_tag, auth_tag_len); |
---|
301 | | - } |
---|
302 | | -} |
---|
303 | | -#endif |
---|
304 | | - |
---|
305 | | -static void (*aesni_gcm_enc_tfm)(void *ctx, |
---|
306 | | - struct gcm_context_data *data, u8 *out, |
---|
307 | | - const u8 *in, unsigned long plaintext_len, |
---|
308 | | - u8 *iv, u8 *hash_subkey, const u8 *aad, |
---|
309 | | - unsigned long aad_len, u8 *auth_tag, |
---|
310 | | - unsigned long auth_tag_len); |
---|
311 | | - |
---|
312 | | -static void (*aesni_gcm_dec_tfm)(void *ctx, |
---|
313 | | - struct gcm_context_data *data, u8 *out, |
---|
314 | | - const u8 *in, unsigned long ciphertext_len, |
---|
315 | | - u8 *iv, u8 *hash_subkey, const u8 *aad, |
---|
316 | | - unsigned long aad_len, u8 *auth_tag, |
---|
317 | | - unsigned long auth_tag_len); |
---|
| 274 | +static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen4 = { |
---|
| 275 | + .init = &aesni_gcm_init_avx_gen4, |
---|
| 276 | + .enc_update = &aesni_gcm_enc_update_avx_gen4, |
---|
| 277 | + .dec_update = &aesni_gcm_dec_update_avx_gen4, |
---|
| 278 | + .finalize = &aesni_gcm_finalize_avx_gen4, |
---|
| 279 | +}; |
---|
318 | 280 | |
---|
319 | 281 | static inline struct |
---|
320 | 282 | aesni_rfc4106_gcm_ctx *aesni_rfc4106_gcm_ctx_get(struct crypto_aead *tfm) |
---|
.. | .. |
---|
351 | 313 | const u8 *in_key, unsigned int key_len) |
---|
352 | 314 | { |
---|
353 | 315 | struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx); |
---|
354 | | - u32 *flags = &tfm->crt_flags; |
---|
355 | 316 | int err; |
---|
356 | 317 | |
---|
357 | 318 | if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 && |
---|
358 | | - key_len != AES_KEYSIZE_256) { |
---|
359 | | - *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
---|
| 319 | + key_len != AES_KEYSIZE_256) |
---|
360 | 320 | return -EINVAL; |
---|
361 | | - } |
---|
362 | 321 | |
---|
363 | | - if (!irq_fpu_usable()) |
---|
364 | | - err = crypto_aes_expand_key(ctx, in_key, key_len); |
---|
| 322 | + if (!crypto_simd_usable()) |
---|
| 323 | + err = aes_expandkey(ctx, in_key, key_len); |
---|
365 | 324 | else { |
---|
366 | 325 | kernel_fpu_begin(); |
---|
367 | 326 | err = aesni_set_key(ctx, in_key, key_len); |
---|
.. | .. |
---|
377 | 336 | return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len); |
---|
378 | 337 | } |
---|
379 | 338 | |
---|
380 | | -static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
---|
| 339 | +static void aesni_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
---|
381 | 340 | { |
---|
382 | 341 | struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); |
---|
383 | 342 | |
---|
384 | | - if (!irq_fpu_usable()) |
---|
385 | | - crypto_aes_encrypt_x86(ctx, dst, src); |
---|
386 | | - else { |
---|
| 343 | + if (!crypto_simd_usable()) { |
---|
| 344 | + aes_encrypt(ctx, dst, src); |
---|
| 345 | + } else { |
---|
387 | 346 | kernel_fpu_begin(); |
---|
388 | 347 | aesni_enc(ctx, dst, src); |
---|
389 | 348 | kernel_fpu_end(); |
---|
390 | 349 | } |
---|
391 | 350 | } |
---|
392 | 351 | |
---|
393 | | -static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
---|
| 352 | +static void aesni_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
---|
394 | 353 | { |
---|
395 | 354 | struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); |
---|
396 | 355 | |
---|
397 | | - if (!irq_fpu_usable()) |
---|
398 | | - crypto_aes_decrypt_x86(ctx, dst, src); |
---|
399 | | - else { |
---|
| 356 | + if (!crypto_simd_usable()) { |
---|
| 357 | + aes_decrypt(ctx, dst, src); |
---|
| 358 | + } else { |
---|
400 | 359 | kernel_fpu_begin(); |
---|
401 | 360 | aesni_dec(ctx, dst, src); |
---|
402 | 361 | kernel_fpu_end(); |
---|
403 | 362 | } |
---|
404 | | -} |
---|
405 | | - |
---|
406 | | -static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
---|
407 | | -{ |
---|
408 | | - struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); |
---|
409 | | - |
---|
410 | | - aesni_enc(ctx, dst, src); |
---|
411 | | -} |
---|
412 | | - |
---|
413 | | -static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
---|
414 | | -{ |
---|
415 | | - struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); |
---|
416 | | - |
---|
417 | | - aesni_dec(ctx, dst, src); |
---|
418 | 363 | } |
---|
419 | 364 | |
---|
420 | 365 | static int aesni_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key, |
---|
.. | .. |
---|
528 | 473 | crypto_inc(ctrblk, AES_BLOCK_SIZE); |
---|
529 | 474 | } |
---|
530 | 475 | |
---|
531 | | -#ifdef CONFIG_AS_AVX |
---|
532 | 476 | static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out, |
---|
533 | 477 | const u8 *in, unsigned int len, u8 *iv) |
---|
534 | 478 | { |
---|
.. | .. |
---|
545 | 489 | else |
---|
546 | 490 | aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len); |
---|
547 | 491 | } |
---|
548 | | -#endif |
---|
549 | 492 | |
---|
550 | 493 | static int ctr_crypt(struct skcipher_request *req) |
---|
551 | 494 | { |
---|
.. | .. |
---|
599 | 542 | } |
---|
600 | 543 | |
---|
601 | 544 | |
---|
602 | | -static void aesni_xts_tweak(void *ctx, u8 *out, const u8 *in) |
---|
| 545 | +static void aesni_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv) |
---|
603 | 546 | { |
---|
604 | | - aesni_enc(ctx, out, in); |
---|
| 547 | + glue_xts_crypt_128bit_one(ctx, dst, src, iv, aesni_enc); |
---|
605 | 548 | } |
---|
606 | 549 | |
---|
607 | | -static void aesni_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv) |
---|
| 550 | +static void aesni_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv) |
---|
608 | 551 | { |
---|
609 | | - glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_enc)); |
---|
| 552 | + glue_xts_crypt_128bit_one(ctx, dst, src, iv, aesni_dec); |
---|
610 | 553 | } |
---|
611 | 554 | |
---|
612 | | -static void aesni_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv) |
---|
| 555 | +static void aesni_xts_enc32(const void *ctx, u8 *dst, const u8 *src, le128 *iv) |
---|
613 | 556 | { |
---|
614 | | - glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_dec)); |
---|
| 557 | + aesni_xts_encrypt(ctx, dst, src, 32 * AES_BLOCK_SIZE, (u8 *)iv); |
---|
615 | 558 | } |
---|
616 | 559 | |
---|
617 | | -static void aesni_xts_enc8(void *ctx, u128 *dst, const u128 *src, le128 *iv) |
---|
| 560 | +static void aesni_xts_dec32(const void *ctx, u8 *dst, const u8 *src, le128 *iv) |
---|
618 | 561 | { |
---|
619 | | - aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, true, (u8 *)iv); |
---|
620 | | -} |
---|
621 | | - |
---|
622 | | -static void aesni_xts_dec8(void *ctx, u128 *dst, const u128 *src, le128 *iv) |
---|
623 | | -{ |
---|
624 | | - aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, false, (u8 *)iv); |
---|
| 562 | + aesni_xts_decrypt(ctx, dst, src, 32 * AES_BLOCK_SIZE, (u8 *)iv); |
---|
625 | 563 | } |
---|
626 | 564 | |
---|
627 | 565 | static const struct common_glue_ctx aesni_enc_xts = { |
---|
.. | .. |
---|
629 | 567 | .fpu_blocks_limit = 1, |
---|
630 | 568 | |
---|
631 | 569 | .funcs = { { |
---|
632 | | - .num_blocks = 8, |
---|
633 | | - .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc8) } |
---|
| 570 | + .num_blocks = 32, |
---|
| 571 | + .fn_u = { .xts = aesni_xts_enc32 } |
---|
634 | 572 | }, { |
---|
635 | 573 | .num_blocks = 1, |
---|
636 | | - .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc) } |
---|
| 574 | + .fn_u = { .xts = aesni_xts_enc } |
---|
637 | 575 | } } |
---|
638 | 576 | }; |
---|
639 | 577 | |
---|
.. | .. |
---|
642 | 580 | .fpu_blocks_limit = 1, |
---|
643 | 581 | |
---|
644 | 582 | .funcs = { { |
---|
645 | | - .num_blocks = 8, |
---|
646 | | - .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec8) } |
---|
| 583 | + .num_blocks = 32, |
---|
| 584 | + .fn_u = { .xts = aesni_xts_dec32 } |
---|
647 | 585 | }, { |
---|
648 | 586 | .num_blocks = 1, |
---|
649 | | - .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec) } |
---|
| 587 | + .fn_u = { .xts = aesni_xts_dec } |
---|
650 | 588 | } } |
---|
651 | 589 | }; |
---|
652 | 590 | |
---|
.. | .. |
---|
655 | 593 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
656 | 594 | struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
657 | 595 | |
---|
658 | | - return glue_xts_req_128bit(&aesni_enc_xts, req, |
---|
659 | | - XTS_TWEAK_CAST(aesni_xts_tweak), |
---|
| 596 | + return glue_xts_req_128bit(&aesni_enc_xts, req, aesni_enc, |
---|
660 | 597 | aes_ctx(ctx->raw_tweak_ctx), |
---|
661 | | - aes_ctx(ctx->raw_crypt_ctx)); |
---|
| 598 | + aes_ctx(ctx->raw_crypt_ctx), |
---|
| 599 | + false); |
---|
662 | 600 | } |
---|
663 | 601 | |
---|
664 | 602 | static int xts_decrypt(struct skcipher_request *req) |
---|
.. | .. |
---|
666 | 604 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
667 | 605 | struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
668 | 606 | |
---|
669 | | - return glue_xts_req_128bit(&aesni_dec_xts, req, |
---|
670 | | - XTS_TWEAK_CAST(aesni_xts_tweak), |
---|
| 607 | + return glue_xts_req_128bit(&aesni_dec_xts, req, aesni_enc, |
---|
671 | 608 | aes_ctx(ctx->raw_tweak_ctx), |
---|
672 | | - aes_ctx(ctx->raw_crypt_ctx)); |
---|
673 | | -} |
---|
674 | | - |
---|
675 | | -static int rfc4106_init(struct crypto_aead *aead) |
---|
676 | | -{ |
---|
677 | | - struct cryptd_aead *cryptd_tfm; |
---|
678 | | - struct cryptd_aead **ctx = crypto_aead_ctx(aead); |
---|
679 | | - |
---|
680 | | - cryptd_tfm = cryptd_alloc_aead("__driver-gcm-aes-aesni", |
---|
681 | | - CRYPTO_ALG_INTERNAL, |
---|
682 | | - CRYPTO_ALG_INTERNAL); |
---|
683 | | - if (IS_ERR(cryptd_tfm)) |
---|
684 | | - return PTR_ERR(cryptd_tfm); |
---|
685 | | - |
---|
686 | | - *ctx = cryptd_tfm; |
---|
687 | | - crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base)); |
---|
688 | | - return 0; |
---|
689 | | -} |
---|
690 | | - |
---|
691 | | -static void rfc4106_exit(struct crypto_aead *aead) |
---|
692 | | -{ |
---|
693 | | - struct cryptd_aead **ctx = crypto_aead_ctx(aead); |
---|
694 | | - |
---|
695 | | - cryptd_free_aead(*ctx); |
---|
| 609 | + aes_ctx(ctx->raw_crypt_ctx), |
---|
| 610 | + true); |
---|
696 | 611 | } |
---|
697 | 612 | |
---|
698 | 613 | static int |
---|
699 | 614 | rfc4106_set_hash_subkey(u8 *hash_subkey, const u8 *key, unsigned int key_len) |
---|
700 | 615 | { |
---|
701 | | - struct crypto_cipher *tfm; |
---|
| 616 | + struct crypto_aes_ctx ctx; |
---|
702 | 617 | int ret; |
---|
703 | 618 | |
---|
704 | | - tfm = crypto_alloc_cipher("aes", 0, 0); |
---|
705 | | - if (IS_ERR(tfm)) |
---|
706 | | - return PTR_ERR(tfm); |
---|
707 | | - |
---|
708 | | - ret = crypto_cipher_setkey(tfm, key, key_len); |
---|
| 619 | + ret = aes_expandkey(&ctx, key, key_len); |
---|
709 | 620 | if (ret) |
---|
710 | | - goto out_free_cipher; |
---|
| 621 | + return ret; |
---|
711 | 622 | |
---|
712 | 623 | /* Clear the data in the hash sub key container to zero.*/ |
---|
713 | 624 | /* We want to cipher all zeros to create the hash sub key. */ |
---|
714 | 625 | memset(hash_subkey, 0, RFC4106_HASH_SUBKEY_SIZE); |
---|
715 | 626 | |
---|
716 | | - crypto_cipher_encrypt_one(tfm, hash_subkey, hash_subkey); |
---|
| 627 | + aes_encrypt(&ctx, hash_subkey, hash_subkey); |
---|
717 | 628 | |
---|
718 | | -out_free_cipher: |
---|
719 | | - crypto_free_cipher(tfm); |
---|
720 | | - return ret; |
---|
| 629 | + memzero_explicit(&ctx, sizeof(ctx)); |
---|
| 630 | + return 0; |
---|
721 | 631 | } |
---|
722 | 632 | |
---|
723 | 633 | static int common_rfc4106_set_key(struct crypto_aead *aead, const u8 *key, |
---|
.. | .. |
---|
725 | 635 | { |
---|
726 | 636 | struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead); |
---|
727 | 637 | |
---|
728 | | - if (key_len < 4) { |
---|
729 | | - crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); |
---|
| 638 | + if (key_len < 4) |
---|
730 | 639 | return -EINVAL; |
---|
731 | | - } |
---|
| 640 | + |
---|
732 | 641 | /*Account for 4 byte nonce at the end.*/ |
---|
733 | 642 | key_len -= 4; |
---|
734 | 643 | |
---|
.. | .. |
---|
739 | 648 | rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len); |
---|
740 | 649 | } |
---|
741 | 650 | |
---|
742 | | -static int gcmaes_wrapper_set_key(struct crypto_aead *parent, const u8 *key, |
---|
743 | | - unsigned int key_len) |
---|
744 | | -{ |
---|
745 | | - struct cryptd_aead **ctx = crypto_aead_ctx(parent); |
---|
746 | | - struct cryptd_aead *cryptd_tfm = *ctx; |
---|
747 | | - |
---|
748 | | - return crypto_aead_setkey(&cryptd_tfm->base, key, key_len); |
---|
749 | | -} |
---|
750 | | - |
---|
| 651 | +/* This is the Integrity Check Value (aka the authentication tag) length and can |
---|
| 652 | + * be 8, 12 or 16 bytes long. */ |
---|
751 | 653 | static int common_rfc4106_set_authsize(struct crypto_aead *aead, |
---|
752 | 654 | unsigned int authsize) |
---|
753 | 655 | { |
---|
.. | .. |
---|
761 | 663 | } |
---|
762 | 664 | |
---|
763 | 665 | return 0; |
---|
764 | | -} |
---|
765 | | - |
---|
766 | | -/* This is the Integrity Check Value (aka the authentication tag length and can |
---|
767 | | - * be 8, 12 or 16 bytes long. */ |
---|
768 | | -static int gcmaes_wrapper_set_authsize(struct crypto_aead *parent, |
---|
769 | | - unsigned int authsize) |
---|
770 | | -{ |
---|
771 | | - struct cryptd_aead **ctx = crypto_aead_ctx(parent); |
---|
772 | | - struct cryptd_aead *cryptd_tfm = *ctx; |
---|
773 | | - |
---|
774 | | - return crypto_aead_setauthsize(&cryptd_tfm->base, authsize); |
---|
775 | 666 | } |
---|
776 | 667 | |
---|
777 | 668 | static int generic_gcmaes_set_authsize(struct crypto_aead *tfm, |
---|
.. | .. |
---|
799 | 690 | { |
---|
800 | 691 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); |
---|
801 | 692 | unsigned long auth_tag_len = crypto_aead_authsize(tfm); |
---|
802 | | - struct gcm_context_data data AESNI_ALIGN_ATTR; |
---|
| 693 | + const struct aesni_gcm_tfm_s *gcm_tfm = aesni_gcm_tfm; |
---|
| 694 | + u8 databuf[sizeof(struct gcm_context_data) + (AESNI_ALIGN - 8)] __aligned(8); |
---|
| 695 | + struct gcm_context_data *data = PTR_ALIGN((void *)databuf, AESNI_ALIGN); |
---|
803 | 696 | struct scatter_walk dst_sg_walk = {}; |
---|
804 | 697 | unsigned long left = req->cryptlen; |
---|
805 | 698 | unsigned long len, srclen, dstlen; |
---|
.. | .. |
---|
815 | 708 | |
---|
816 | 709 | if (!enc) |
---|
817 | 710 | left -= auth_tag_len; |
---|
| 711 | + |
---|
| 712 | + if (left < AVX_GEN4_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen4) |
---|
| 713 | + gcm_tfm = &aesni_gcm_tfm_avx_gen2; |
---|
| 714 | + if (left < AVX_GEN2_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen2) |
---|
| 715 | + gcm_tfm = &aesni_gcm_tfm_sse; |
---|
818 | 716 | |
---|
819 | 717 | /* Linearize assoc, if not already linear */ |
---|
820 | 718 | if (req->src->length >= assoclen && req->src->length && |
---|
.. | .. |
---|
843 | 741 | } |
---|
844 | 742 | |
---|
845 | 743 | kernel_fpu_begin(); |
---|
846 | | - aesni_gcm_init(aes_ctx, &data, iv, |
---|
847 | | - hash_subkey, assoc, assoclen); |
---|
| 744 | + gcm_tfm->init(aes_ctx, data, iv, hash_subkey, assoc, assoclen); |
---|
848 | 745 | if (req->src != req->dst) { |
---|
849 | 746 | while (left) { |
---|
850 | 747 | src = scatterwalk_map(&src_sg_walk); |
---|
.. | .. |
---|
854 | 751 | len = min(srclen, dstlen); |
---|
855 | 752 | if (len) { |
---|
856 | 753 | if (enc) |
---|
857 | | - aesni_gcm_enc_update(aes_ctx, &data, |
---|
| 754 | + gcm_tfm->enc_update(aes_ctx, data, |
---|
858 | 755 | dst, src, len); |
---|
859 | 756 | else |
---|
860 | | - aesni_gcm_dec_update(aes_ctx, &data, |
---|
| 757 | + gcm_tfm->dec_update(aes_ctx, data, |
---|
861 | 758 | dst, src, len); |
---|
862 | 759 | } |
---|
863 | 760 | left -= len; |
---|
.. | .. |
---|
875 | 772 | len = scatterwalk_clamp(&src_sg_walk, left); |
---|
876 | 773 | if (len) { |
---|
877 | 774 | if (enc) |
---|
878 | | - aesni_gcm_enc_update(aes_ctx, &data, |
---|
| 775 | + gcm_tfm->enc_update(aes_ctx, data, |
---|
879 | 776 | src, src, len); |
---|
880 | 777 | else |
---|
881 | | - aesni_gcm_dec_update(aes_ctx, &data, |
---|
| 778 | + gcm_tfm->dec_update(aes_ctx, data, |
---|
882 | 779 | src, src, len); |
---|
883 | 780 | } |
---|
884 | 781 | left -= len; |
---|
.. | .. |
---|
887 | 784 | scatterwalk_done(&src_sg_walk, 1, left); |
---|
888 | 785 | } |
---|
889 | 786 | } |
---|
890 | | - aesni_gcm_finalize(aes_ctx, &data, authTag, auth_tag_len); |
---|
| 787 | + gcm_tfm->finalize(aes_ctx, data, authTag, auth_tag_len); |
---|
891 | 788 | kernel_fpu_end(); |
---|
892 | 789 | |
---|
893 | 790 | if (!assocmem) |
---|
.. | .. |
---|
920 | 817 | static int gcmaes_encrypt(struct aead_request *req, unsigned int assoclen, |
---|
921 | 818 | u8 *hash_subkey, u8 *iv, void *aes_ctx) |
---|
922 | 819 | { |
---|
923 | | - u8 one_entry_in_sg = 0; |
---|
924 | | - u8 *src, *dst, *assoc; |
---|
925 | | - struct crypto_aead *tfm = crypto_aead_reqtfm(req); |
---|
926 | | - unsigned long auth_tag_len = crypto_aead_authsize(tfm); |
---|
927 | | - struct scatter_walk src_sg_walk; |
---|
928 | | - struct scatter_walk dst_sg_walk = {}; |
---|
929 | | - struct gcm_context_data data AESNI_ALIGN_ATTR; |
---|
930 | | - |
---|
931 | | - if (((struct crypto_aes_ctx *)aes_ctx)->key_length != AES_KEYSIZE_128 || |
---|
932 | | - aesni_gcm_enc_tfm == aesni_gcm_enc || |
---|
933 | | - req->cryptlen < AVX_GEN2_OPTSIZE) { |
---|
934 | | - return gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv, |
---|
935 | | - aes_ctx); |
---|
936 | | - } |
---|
937 | | - if (sg_is_last(req->src) && |
---|
938 | | - (!PageHighMem(sg_page(req->src)) || |
---|
939 | | - req->src->offset + req->src->length <= PAGE_SIZE) && |
---|
940 | | - sg_is_last(req->dst) && |
---|
941 | | - (!PageHighMem(sg_page(req->dst)) || |
---|
942 | | - req->dst->offset + req->dst->length <= PAGE_SIZE)) { |
---|
943 | | - one_entry_in_sg = 1; |
---|
944 | | - scatterwalk_start(&src_sg_walk, req->src); |
---|
945 | | - assoc = scatterwalk_map(&src_sg_walk); |
---|
946 | | - src = assoc + req->assoclen; |
---|
947 | | - dst = src; |
---|
948 | | - if (unlikely(req->src != req->dst)) { |
---|
949 | | - scatterwalk_start(&dst_sg_walk, req->dst); |
---|
950 | | - dst = scatterwalk_map(&dst_sg_walk) + req->assoclen; |
---|
951 | | - } |
---|
952 | | - } else { |
---|
953 | | - /* Allocate memory for src, dst, assoc */ |
---|
954 | | - assoc = kmalloc(req->cryptlen + auth_tag_len + req->assoclen, |
---|
955 | | - GFP_ATOMIC); |
---|
956 | | - if (unlikely(!assoc)) |
---|
957 | | - return -ENOMEM; |
---|
958 | | - scatterwalk_map_and_copy(assoc, req->src, 0, |
---|
959 | | - req->assoclen + req->cryptlen, 0); |
---|
960 | | - src = assoc + req->assoclen; |
---|
961 | | - dst = src; |
---|
962 | | - } |
---|
963 | | - |
---|
964 | | - kernel_fpu_begin(); |
---|
965 | | - aesni_gcm_enc_tfm(aes_ctx, &data, dst, src, req->cryptlen, iv, |
---|
966 | | - hash_subkey, assoc, assoclen, |
---|
967 | | - dst + req->cryptlen, auth_tag_len); |
---|
968 | | - kernel_fpu_end(); |
---|
969 | | - |
---|
970 | | - /* The authTag (aka the Integrity Check Value) needs to be written |
---|
971 | | - * back to the packet. */ |
---|
972 | | - if (one_entry_in_sg) { |
---|
973 | | - if (unlikely(req->src != req->dst)) { |
---|
974 | | - scatterwalk_unmap(dst - req->assoclen); |
---|
975 | | - scatterwalk_advance(&dst_sg_walk, req->dst->length); |
---|
976 | | - scatterwalk_done(&dst_sg_walk, 1, 0); |
---|
977 | | - } |
---|
978 | | - scatterwalk_unmap(assoc); |
---|
979 | | - scatterwalk_advance(&src_sg_walk, req->src->length); |
---|
980 | | - scatterwalk_done(&src_sg_walk, req->src == req->dst, 0); |
---|
981 | | - } else { |
---|
982 | | - scatterwalk_map_and_copy(dst, req->dst, req->assoclen, |
---|
983 | | - req->cryptlen + auth_tag_len, 1); |
---|
984 | | - kfree(assoc); |
---|
985 | | - } |
---|
986 | | - return 0; |
---|
| 820 | + return gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv, |
---|
| 821 | + aes_ctx); |
---|
987 | 822 | } |
---|
988 | 823 | |
---|
989 | 824 | static int gcmaes_decrypt(struct aead_request *req, unsigned int assoclen, |
---|
990 | 825 | u8 *hash_subkey, u8 *iv, void *aes_ctx) |
---|
991 | 826 | { |
---|
992 | | - u8 one_entry_in_sg = 0; |
---|
993 | | - u8 *src, *dst, *assoc; |
---|
994 | | - unsigned long tempCipherLen = 0; |
---|
995 | | - struct crypto_aead *tfm = crypto_aead_reqtfm(req); |
---|
996 | | - unsigned long auth_tag_len = crypto_aead_authsize(tfm); |
---|
997 | | - u8 authTag[16]; |
---|
998 | | - struct scatter_walk src_sg_walk; |
---|
999 | | - struct scatter_walk dst_sg_walk = {}; |
---|
1000 | | - struct gcm_context_data data AESNI_ALIGN_ATTR; |
---|
1001 | | - int retval = 0; |
---|
1002 | | - |
---|
1003 | | - if (((struct crypto_aes_ctx *)aes_ctx)->key_length != AES_KEYSIZE_128 || |
---|
1004 | | - aesni_gcm_enc_tfm == aesni_gcm_enc || |
---|
1005 | | - req->cryptlen < AVX_GEN2_OPTSIZE) { |
---|
1006 | | - return gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv, |
---|
1007 | | - aes_ctx); |
---|
1008 | | - } |
---|
1009 | | - tempCipherLen = (unsigned long)(req->cryptlen - auth_tag_len); |
---|
1010 | | - |
---|
1011 | | - if (sg_is_last(req->src) && |
---|
1012 | | - (!PageHighMem(sg_page(req->src)) || |
---|
1013 | | - req->src->offset + req->src->length <= PAGE_SIZE) && |
---|
1014 | | - sg_is_last(req->dst) && req->dst->length && |
---|
1015 | | - (!PageHighMem(sg_page(req->dst)) || |
---|
1016 | | - req->dst->offset + req->dst->length <= PAGE_SIZE)) { |
---|
1017 | | - one_entry_in_sg = 1; |
---|
1018 | | - scatterwalk_start(&src_sg_walk, req->src); |
---|
1019 | | - assoc = scatterwalk_map(&src_sg_walk); |
---|
1020 | | - src = assoc + req->assoclen; |
---|
1021 | | - dst = src; |
---|
1022 | | - if (unlikely(req->src != req->dst)) { |
---|
1023 | | - scatterwalk_start(&dst_sg_walk, req->dst); |
---|
1024 | | - dst = scatterwalk_map(&dst_sg_walk) + req->assoclen; |
---|
1025 | | - } |
---|
1026 | | - } else { |
---|
1027 | | - /* Allocate memory for src, dst, assoc */ |
---|
1028 | | - assoc = kmalloc(req->cryptlen + req->assoclen, GFP_ATOMIC); |
---|
1029 | | - if (!assoc) |
---|
1030 | | - return -ENOMEM; |
---|
1031 | | - scatterwalk_map_and_copy(assoc, req->src, 0, |
---|
1032 | | - req->assoclen + req->cryptlen, 0); |
---|
1033 | | - src = assoc + req->assoclen; |
---|
1034 | | - dst = src; |
---|
1035 | | - } |
---|
1036 | | - |
---|
1037 | | - |
---|
1038 | | - kernel_fpu_begin(); |
---|
1039 | | - aesni_gcm_dec_tfm(aes_ctx, &data, dst, src, tempCipherLen, iv, |
---|
1040 | | - hash_subkey, assoc, assoclen, |
---|
1041 | | - authTag, auth_tag_len); |
---|
1042 | | - kernel_fpu_end(); |
---|
1043 | | - |
---|
1044 | | - /* Compare generated tag with passed in tag. */ |
---|
1045 | | - retval = crypto_memneq(src + tempCipherLen, authTag, auth_tag_len) ? |
---|
1046 | | - -EBADMSG : 0; |
---|
1047 | | - |
---|
1048 | | - if (one_entry_in_sg) { |
---|
1049 | | - if (unlikely(req->src != req->dst)) { |
---|
1050 | | - scatterwalk_unmap(dst - req->assoclen); |
---|
1051 | | - scatterwalk_advance(&dst_sg_walk, req->dst->length); |
---|
1052 | | - scatterwalk_done(&dst_sg_walk, 1, 0); |
---|
1053 | | - } |
---|
1054 | | - scatterwalk_unmap(assoc); |
---|
1055 | | - scatterwalk_advance(&src_sg_walk, req->src->length); |
---|
1056 | | - scatterwalk_done(&src_sg_walk, req->src == req->dst, 0); |
---|
1057 | | - } else { |
---|
1058 | | - scatterwalk_map_and_copy(dst, req->dst, req->assoclen, |
---|
1059 | | - tempCipherLen, 1); |
---|
1060 | | - kfree(assoc); |
---|
1061 | | - } |
---|
1062 | | - return retval; |
---|
1063 | | - |
---|
| 827 | + return gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv, |
---|
| 828 | + aes_ctx); |
---|
1064 | 829 | } |
---|
1065 | 830 | |
---|
1066 | 831 | static int helper_rfc4106_encrypt(struct aead_request *req) |
---|
.. | .. |
---|
1068 | 833 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); |
---|
1069 | 834 | struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm); |
---|
1070 | 835 | void *aes_ctx = &(ctx->aes_key_expanded); |
---|
1071 | | - u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN))); |
---|
| 836 | + u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8); |
---|
| 837 | + u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN); |
---|
1072 | 838 | unsigned int i; |
---|
1073 | 839 | __be32 counter = cpu_to_be32(1); |
---|
1074 | 840 | |
---|
.. | .. |
---|
1095 | 861 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); |
---|
1096 | 862 | struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm); |
---|
1097 | 863 | void *aes_ctx = &(ctx->aes_key_expanded); |
---|
1098 | | - u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN))); |
---|
| 864 | + u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8); |
---|
| 865 | + u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN); |
---|
1099 | 866 | unsigned int i; |
---|
1100 | 867 | |
---|
1101 | 868 | if (unlikely(req->assoclen != 16 && req->assoclen != 20)) |
---|
.. | .. |
---|
1115 | 882 | return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv, |
---|
1116 | 883 | aes_ctx); |
---|
1117 | 884 | } |
---|
1118 | | - |
---|
1119 | | -static int gcmaes_wrapper_encrypt(struct aead_request *req) |
---|
1120 | | -{ |
---|
1121 | | - struct crypto_aead *tfm = crypto_aead_reqtfm(req); |
---|
1122 | | - struct cryptd_aead **ctx = crypto_aead_ctx(tfm); |
---|
1123 | | - struct cryptd_aead *cryptd_tfm = *ctx; |
---|
1124 | | - |
---|
1125 | | - tfm = &cryptd_tfm->base; |
---|
1126 | | - if (irq_fpu_usable() && (!in_atomic() || |
---|
1127 | | - !cryptd_aead_queued(cryptd_tfm))) |
---|
1128 | | - tfm = cryptd_aead_child(cryptd_tfm); |
---|
1129 | | - |
---|
1130 | | - aead_request_set_tfm(req, tfm); |
---|
1131 | | - |
---|
1132 | | - return crypto_aead_encrypt(req); |
---|
1133 | | -} |
---|
1134 | | - |
---|
1135 | | -static int gcmaes_wrapper_decrypt(struct aead_request *req) |
---|
1136 | | -{ |
---|
1137 | | - struct crypto_aead *tfm = crypto_aead_reqtfm(req); |
---|
1138 | | - struct cryptd_aead **ctx = crypto_aead_ctx(tfm); |
---|
1139 | | - struct cryptd_aead *cryptd_tfm = *ctx; |
---|
1140 | | - |
---|
1141 | | - tfm = &cryptd_tfm->base; |
---|
1142 | | - if (irq_fpu_usable() && (!in_atomic() || |
---|
1143 | | - !cryptd_aead_queued(cryptd_tfm))) |
---|
1144 | | - tfm = cryptd_aead_child(cryptd_tfm); |
---|
1145 | | - |
---|
1146 | | - aead_request_set_tfm(req, tfm); |
---|
1147 | | - |
---|
1148 | | - return crypto_aead_decrypt(req); |
---|
1149 | | -} |
---|
1150 | 885 | #endif |
---|
1151 | 886 | |
---|
1152 | | -static struct crypto_alg aesni_algs[] = { { |
---|
| 887 | +static struct crypto_alg aesni_cipher_alg = { |
---|
1153 | 888 | .cra_name = "aes", |
---|
1154 | 889 | .cra_driver_name = "aes-aesni", |
---|
1155 | 890 | .cra_priority = 300, |
---|
.. | .. |
---|
1162 | 897 | .cia_min_keysize = AES_MIN_KEY_SIZE, |
---|
1163 | 898 | .cia_max_keysize = AES_MAX_KEY_SIZE, |
---|
1164 | 899 | .cia_setkey = aes_set_key, |
---|
1165 | | - .cia_encrypt = aes_encrypt, |
---|
1166 | | - .cia_decrypt = aes_decrypt |
---|
| 900 | + .cia_encrypt = aesni_encrypt, |
---|
| 901 | + .cia_decrypt = aesni_decrypt |
---|
1167 | 902 | } |
---|
1168 | 903 | } |
---|
1169 | | -}, { |
---|
1170 | | - .cra_name = "__aes", |
---|
1171 | | - .cra_driver_name = "__aes-aesni", |
---|
1172 | | - .cra_priority = 300, |
---|
1173 | | - .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_INTERNAL, |
---|
1174 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
1175 | | - .cra_ctxsize = CRYPTO_AES_CTX_SIZE, |
---|
1176 | | - .cra_module = THIS_MODULE, |
---|
1177 | | - .cra_u = { |
---|
1178 | | - .cipher = { |
---|
1179 | | - .cia_min_keysize = AES_MIN_KEY_SIZE, |
---|
1180 | | - .cia_max_keysize = AES_MAX_KEY_SIZE, |
---|
1181 | | - .cia_setkey = aes_set_key, |
---|
1182 | | - .cia_encrypt = __aes_encrypt, |
---|
1183 | | - .cia_decrypt = __aes_decrypt |
---|
1184 | | - } |
---|
1185 | | - } |
---|
1186 | | -} }; |
---|
| 904 | +}; |
---|
1187 | 905 | |
---|
1188 | 906 | static struct skcipher_alg aesni_skciphers[] = { |
---|
1189 | 907 | { |
---|
.. | .. |
---|
1258 | 976 | static |
---|
1259 | 977 | struct simd_skcipher_alg *aesni_simd_skciphers[ARRAY_SIZE(aesni_skciphers)]; |
---|
1260 | 978 | |
---|
1261 | | -static struct { |
---|
1262 | | - const char *algname; |
---|
1263 | | - const char *drvname; |
---|
1264 | | - const char *basename; |
---|
1265 | | - struct simd_skcipher_alg *simd; |
---|
1266 | | -} aesni_simd_skciphers2[] = { |
---|
1267 | | -#if (defined(MODULE) && IS_ENABLED(CONFIG_CRYPTO_PCBC)) || \ |
---|
1268 | | - IS_BUILTIN(CONFIG_CRYPTO_PCBC) |
---|
1269 | | - { |
---|
1270 | | - .algname = "pcbc(aes)", |
---|
1271 | | - .drvname = "pcbc-aes-aesni", |
---|
1272 | | - .basename = "fpu(pcbc(__aes-aesni))", |
---|
1273 | | - }, |
---|
1274 | | -#endif |
---|
1275 | | -}; |
---|
1276 | | - |
---|
1277 | 979 | #ifdef CONFIG_X86_64 |
---|
1278 | 980 | static int generic_gcmaes_set_key(struct crypto_aead *aead, const u8 *key, |
---|
1279 | 981 | unsigned int key_len) |
---|
.. | .. |
---|
1290 | 992 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); |
---|
1291 | 993 | struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm); |
---|
1292 | 994 | void *aes_ctx = &(ctx->aes_key_expanded); |
---|
1293 | | - u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN))); |
---|
| 995 | + u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8); |
---|
| 996 | + u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN); |
---|
1294 | 997 | __be32 counter = cpu_to_be32(1); |
---|
1295 | 998 | |
---|
1296 | 999 | memcpy(iv, req->iv, 12); |
---|
.. | .. |
---|
1306 | 1009 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); |
---|
1307 | 1010 | struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm); |
---|
1308 | 1011 | void *aes_ctx = &(ctx->aes_key_expanded); |
---|
1309 | | - u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN))); |
---|
| 1012 | + u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8); |
---|
| 1013 | + u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN); |
---|
1310 | 1014 | |
---|
1311 | 1015 | memcpy(iv, req->iv, 12); |
---|
1312 | 1016 | *((__be32 *)(iv+12)) = counter; |
---|
.. | .. |
---|
1315 | 1019 | aes_ctx); |
---|
1316 | 1020 | } |
---|
1317 | 1021 | |
---|
1318 | | -static int generic_gcmaes_init(struct crypto_aead *aead) |
---|
1319 | | -{ |
---|
1320 | | - struct cryptd_aead *cryptd_tfm; |
---|
1321 | | - struct cryptd_aead **ctx = crypto_aead_ctx(aead); |
---|
1322 | | - |
---|
1323 | | - cryptd_tfm = cryptd_alloc_aead("__driver-generic-gcm-aes-aesni", |
---|
1324 | | - CRYPTO_ALG_INTERNAL, |
---|
1325 | | - CRYPTO_ALG_INTERNAL); |
---|
1326 | | - if (IS_ERR(cryptd_tfm)) |
---|
1327 | | - return PTR_ERR(cryptd_tfm); |
---|
1328 | | - |
---|
1329 | | - *ctx = cryptd_tfm; |
---|
1330 | | - crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base)); |
---|
1331 | | - |
---|
1332 | | - return 0; |
---|
1333 | | -} |
---|
1334 | | - |
---|
1335 | | -static void generic_gcmaes_exit(struct crypto_aead *aead) |
---|
1336 | | -{ |
---|
1337 | | - struct cryptd_aead **ctx = crypto_aead_ctx(aead); |
---|
1338 | | - |
---|
1339 | | - cryptd_free_aead(*ctx); |
---|
1340 | | -} |
---|
1341 | | - |
---|
1342 | | -static struct aead_alg aesni_aead_algs[] = { { |
---|
| 1022 | +static struct aead_alg aesni_aeads[] = { { |
---|
1343 | 1023 | .setkey = common_rfc4106_set_key, |
---|
1344 | 1024 | .setauthsize = common_rfc4106_set_authsize, |
---|
1345 | 1025 | .encrypt = helper_rfc4106_encrypt, |
---|
.. | .. |
---|
1347 | 1027 | .ivsize = GCM_RFC4106_IV_SIZE, |
---|
1348 | 1028 | .maxauthsize = 16, |
---|
1349 | 1029 | .base = { |
---|
1350 | | - .cra_name = "__gcm-aes-aesni", |
---|
1351 | | - .cra_driver_name = "__driver-gcm-aes-aesni", |
---|
| 1030 | + .cra_name = "__rfc4106(gcm(aes))", |
---|
| 1031 | + .cra_driver_name = "__rfc4106-gcm-aesni", |
---|
| 1032 | + .cra_priority = 400, |
---|
1352 | 1033 | .cra_flags = CRYPTO_ALG_INTERNAL, |
---|
1353 | 1034 | .cra_blocksize = 1, |
---|
1354 | 1035 | .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx), |
---|
1355 | 1036 | .cra_alignmask = AESNI_ALIGN - 1, |
---|
1356 | | - .cra_module = THIS_MODULE, |
---|
1357 | | - }, |
---|
1358 | | -}, { |
---|
1359 | | - .init = rfc4106_init, |
---|
1360 | | - .exit = rfc4106_exit, |
---|
1361 | | - .setkey = gcmaes_wrapper_set_key, |
---|
1362 | | - .setauthsize = gcmaes_wrapper_set_authsize, |
---|
1363 | | - .encrypt = gcmaes_wrapper_encrypt, |
---|
1364 | | - .decrypt = gcmaes_wrapper_decrypt, |
---|
1365 | | - .ivsize = GCM_RFC4106_IV_SIZE, |
---|
1366 | | - .maxauthsize = 16, |
---|
1367 | | - .base = { |
---|
1368 | | - .cra_name = "rfc4106(gcm(aes))", |
---|
1369 | | - .cra_driver_name = "rfc4106-gcm-aesni", |
---|
1370 | | - .cra_priority = 400, |
---|
1371 | | - .cra_flags = CRYPTO_ALG_ASYNC, |
---|
1372 | | - .cra_blocksize = 1, |
---|
1373 | | - .cra_ctxsize = sizeof(struct cryptd_aead *), |
---|
1374 | 1037 | .cra_module = THIS_MODULE, |
---|
1375 | 1038 | }, |
---|
1376 | 1039 | }, { |
---|
.. | .. |
---|
1381 | 1044 | .ivsize = GCM_AES_IV_SIZE, |
---|
1382 | 1045 | .maxauthsize = 16, |
---|
1383 | 1046 | .base = { |
---|
1384 | | - .cra_name = "__generic-gcm-aes-aesni", |
---|
1385 | | - .cra_driver_name = "__driver-generic-gcm-aes-aesni", |
---|
1386 | | - .cra_priority = 0, |
---|
| 1047 | + .cra_name = "__gcm(aes)", |
---|
| 1048 | + .cra_driver_name = "__generic-gcm-aesni", |
---|
| 1049 | + .cra_priority = 400, |
---|
1387 | 1050 | .cra_flags = CRYPTO_ALG_INTERNAL, |
---|
1388 | 1051 | .cra_blocksize = 1, |
---|
1389 | 1052 | .cra_ctxsize = sizeof(struct generic_gcmaes_ctx), |
---|
1390 | 1053 | .cra_alignmask = AESNI_ALIGN - 1, |
---|
1391 | 1054 | .cra_module = THIS_MODULE, |
---|
1392 | 1055 | }, |
---|
1393 | | -}, { |
---|
1394 | | - .init = generic_gcmaes_init, |
---|
1395 | | - .exit = generic_gcmaes_exit, |
---|
1396 | | - .setkey = gcmaes_wrapper_set_key, |
---|
1397 | | - .setauthsize = gcmaes_wrapper_set_authsize, |
---|
1398 | | - .encrypt = gcmaes_wrapper_encrypt, |
---|
1399 | | - .decrypt = gcmaes_wrapper_decrypt, |
---|
1400 | | - .ivsize = GCM_AES_IV_SIZE, |
---|
1401 | | - .maxauthsize = 16, |
---|
1402 | | - .base = { |
---|
1403 | | - .cra_name = "gcm(aes)", |
---|
1404 | | - .cra_driver_name = "generic-gcm-aesni", |
---|
1405 | | - .cra_priority = 400, |
---|
1406 | | - .cra_flags = CRYPTO_ALG_ASYNC, |
---|
1407 | | - .cra_blocksize = 1, |
---|
1408 | | - .cra_ctxsize = sizeof(struct cryptd_aead *), |
---|
1409 | | - .cra_module = THIS_MODULE, |
---|
1410 | | - }, |
---|
1411 | 1056 | } }; |
---|
1412 | 1057 | #else |
---|
1413 | | -static struct aead_alg aesni_aead_algs[0]; |
---|
| 1058 | +static struct aead_alg aesni_aeads[0]; |
---|
1414 | 1059 | #endif |
---|
1415 | 1060 | |
---|
| 1061 | +static struct simd_aead_alg *aesni_simd_aeads[ARRAY_SIZE(aesni_aeads)]; |
---|
1416 | 1062 | |
---|
1417 | 1063 | static const struct x86_cpu_id aesni_cpu_id[] = { |
---|
1418 | | - X86_FEATURE_MATCH(X86_FEATURE_AES), |
---|
| 1064 | + X86_MATCH_FEATURE(X86_FEATURE_AES, NULL), |
---|
1419 | 1065 | {} |
---|
1420 | 1066 | }; |
---|
1421 | 1067 | MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id); |
---|
1422 | 1068 | |
---|
1423 | | -static void aesni_free_simds(void) |
---|
1424 | | -{ |
---|
1425 | | - int i; |
---|
1426 | | - |
---|
1427 | | - for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers) && |
---|
1428 | | - aesni_simd_skciphers[i]; i++) |
---|
1429 | | - simd_skcipher_free(aesni_simd_skciphers[i]); |
---|
1430 | | - |
---|
1431 | | - for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers2); i++) |
---|
1432 | | - if (aesni_simd_skciphers2[i].simd) |
---|
1433 | | - simd_skcipher_free(aesni_simd_skciphers2[i].simd); |
---|
1434 | | -} |
---|
1435 | | - |
---|
1436 | 1069 | static int __init aesni_init(void) |
---|
1437 | 1070 | { |
---|
1438 | | - struct simd_skcipher_alg *simd; |
---|
1439 | | - const char *basename; |
---|
1440 | | - const char *algname; |
---|
1441 | | - const char *drvname; |
---|
1442 | 1071 | int err; |
---|
1443 | | - int i; |
---|
1444 | 1072 | |
---|
1445 | 1073 | if (!x86_match_cpu(aesni_cpu_id)) |
---|
1446 | 1074 | return -ENODEV; |
---|
1447 | 1075 | #ifdef CONFIG_X86_64 |
---|
1448 | | -#ifdef CONFIG_AS_AVX2 |
---|
1449 | 1076 | if (boot_cpu_has(X86_FEATURE_AVX2)) { |
---|
1450 | 1077 | pr_info("AVX2 version of gcm_enc/dec engaged.\n"); |
---|
1451 | | - aesni_gcm_enc_tfm = aesni_gcm_enc_avx2; |
---|
1452 | | - aesni_gcm_dec_tfm = aesni_gcm_dec_avx2; |
---|
| 1078 | + aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen4; |
---|
1453 | 1079 | } else |
---|
1454 | | -#endif |
---|
1455 | | -#ifdef CONFIG_AS_AVX |
---|
1456 | 1080 | if (boot_cpu_has(X86_FEATURE_AVX)) { |
---|
1457 | 1081 | pr_info("AVX version of gcm_enc/dec engaged.\n"); |
---|
1458 | | - aesni_gcm_enc_tfm = aesni_gcm_enc_avx; |
---|
1459 | | - aesni_gcm_dec_tfm = aesni_gcm_dec_avx; |
---|
1460 | | - } else |
---|
1461 | | -#endif |
---|
1462 | | - { |
---|
| 1082 | + aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen2; |
---|
| 1083 | + } else { |
---|
1463 | 1084 | pr_info("SSE version of gcm_enc/dec engaged.\n"); |
---|
1464 | | - aesni_gcm_enc_tfm = aesni_gcm_enc; |
---|
1465 | | - aesni_gcm_dec_tfm = aesni_gcm_dec; |
---|
| 1085 | + aesni_gcm_tfm = &aesni_gcm_tfm_sse; |
---|
1466 | 1086 | } |
---|
1467 | 1087 | aesni_ctr_enc_tfm = aesni_ctr_enc; |
---|
1468 | | -#ifdef CONFIG_AS_AVX |
---|
1469 | 1088 | if (boot_cpu_has(X86_FEATURE_AVX)) { |
---|
1470 | 1089 | /* optimize performance of ctr mode encryption transform */ |
---|
1471 | 1090 | aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm; |
---|
1472 | 1091 | pr_info("AES CTR mode by8 optimization enabled\n"); |
---|
1473 | 1092 | } |
---|
1474 | 1093 | #endif |
---|
1475 | | -#endif |
---|
1476 | 1094 | |
---|
1477 | | - err = crypto_fpu_init(); |
---|
| 1095 | + err = crypto_register_alg(&aesni_cipher_alg); |
---|
1478 | 1096 | if (err) |
---|
1479 | 1097 | return err; |
---|
1480 | 1098 | |
---|
1481 | | - err = crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); |
---|
| 1099 | + err = simd_register_skciphers_compat(aesni_skciphers, |
---|
| 1100 | + ARRAY_SIZE(aesni_skciphers), |
---|
| 1101 | + aesni_simd_skciphers); |
---|
1482 | 1102 | if (err) |
---|
1483 | | - goto fpu_exit; |
---|
| 1103 | + goto unregister_cipher; |
---|
1484 | 1104 | |
---|
1485 | | - err = crypto_register_skciphers(aesni_skciphers, |
---|
1486 | | - ARRAY_SIZE(aesni_skciphers)); |
---|
1487 | | - if (err) |
---|
1488 | | - goto unregister_algs; |
---|
1489 | | - |
---|
1490 | | - err = crypto_register_aeads(aesni_aead_algs, |
---|
1491 | | - ARRAY_SIZE(aesni_aead_algs)); |
---|
| 1105 | + err = simd_register_aeads_compat(aesni_aeads, ARRAY_SIZE(aesni_aeads), |
---|
| 1106 | + aesni_simd_aeads); |
---|
1492 | 1107 | if (err) |
---|
1493 | 1108 | goto unregister_skciphers; |
---|
1494 | 1109 | |
---|
1495 | | - for (i = 0; i < ARRAY_SIZE(aesni_skciphers); i++) { |
---|
1496 | | - algname = aesni_skciphers[i].base.cra_name + 2; |
---|
1497 | | - drvname = aesni_skciphers[i].base.cra_driver_name + 2; |
---|
1498 | | - basename = aesni_skciphers[i].base.cra_driver_name; |
---|
1499 | | - simd = simd_skcipher_create_compat(algname, drvname, basename); |
---|
1500 | | - err = PTR_ERR(simd); |
---|
1501 | | - if (IS_ERR(simd)) |
---|
1502 | | - goto unregister_simds; |
---|
1503 | | - |
---|
1504 | | - aesni_simd_skciphers[i] = simd; |
---|
1505 | | - } |
---|
1506 | | - |
---|
1507 | | - for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers2); i++) { |
---|
1508 | | - algname = aesni_simd_skciphers2[i].algname; |
---|
1509 | | - drvname = aesni_simd_skciphers2[i].drvname; |
---|
1510 | | - basename = aesni_simd_skciphers2[i].basename; |
---|
1511 | | - simd = simd_skcipher_create_compat(algname, drvname, basename); |
---|
1512 | | - err = PTR_ERR(simd); |
---|
1513 | | - if (IS_ERR(simd)) |
---|
1514 | | - continue; |
---|
1515 | | - |
---|
1516 | | - aesni_simd_skciphers2[i].simd = simd; |
---|
1517 | | - } |
---|
1518 | | - |
---|
1519 | 1110 | return 0; |
---|
1520 | 1111 | |
---|
1521 | | -unregister_simds: |
---|
1522 | | - aesni_free_simds(); |
---|
1523 | | - crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs)); |
---|
1524 | 1112 | unregister_skciphers: |
---|
1525 | | - crypto_unregister_skciphers(aesni_skciphers, |
---|
1526 | | - ARRAY_SIZE(aesni_skciphers)); |
---|
1527 | | -unregister_algs: |
---|
1528 | | - crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); |
---|
1529 | | -fpu_exit: |
---|
1530 | | - crypto_fpu_exit(); |
---|
| 1113 | + simd_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers), |
---|
| 1114 | + aesni_simd_skciphers); |
---|
| 1115 | +unregister_cipher: |
---|
| 1116 | + crypto_unregister_alg(&aesni_cipher_alg); |
---|
1531 | 1117 | return err; |
---|
1532 | 1118 | } |
---|
1533 | 1119 | |
---|
1534 | 1120 | static void __exit aesni_exit(void) |
---|
1535 | 1121 | { |
---|
1536 | | - aesni_free_simds(); |
---|
1537 | | - crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs)); |
---|
1538 | | - crypto_unregister_skciphers(aesni_skciphers, |
---|
1539 | | - ARRAY_SIZE(aesni_skciphers)); |
---|
1540 | | - crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); |
---|
1541 | | - |
---|
1542 | | - crypto_fpu_exit(); |
---|
| 1122 | + simd_unregister_aeads(aesni_aeads, ARRAY_SIZE(aesni_aeads), |
---|
| 1123 | + aesni_simd_aeads); |
---|
| 1124 | + simd_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers), |
---|
| 1125 | + aesni_simd_skciphers); |
---|
| 1126 | + crypto_unregister_alg(&aesni_cipher_alg); |
---|
1543 | 1127 | } |
---|
1544 | 1128 | |
---|
1545 | 1129 | late_initcall(aesni_init); |
---|