| .. | .. |
|---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-only |
|---|
| 1 | 2 | /* |
|---|
| 2 | 3 | * aes-ce-glue.c - wrapper code for ARMv8 AES |
|---|
| 3 | 4 | * |
|---|
| 4 | 5 | * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org> |
|---|
| 5 | | - * |
|---|
| 6 | | - * This program is free software; you can redistribute it and/or modify |
|---|
| 7 | | - * it under the terms of the GNU General Public License version 2 as |
|---|
| 8 | | - * published by the Free Software Foundation. |
|---|
| 9 | 6 | */ |
|---|
| 10 | 7 | |
|---|
| 11 | 8 | #include <asm/hwcap.h> |
|---|
| 12 | 9 | #include <asm/neon.h> |
|---|
| 13 | | -#include <asm/hwcap.h> |
|---|
| 10 | +#include <asm/simd.h> |
|---|
| 11 | +#include <asm/unaligned.h> |
|---|
| 14 | 12 | #include <crypto/aes.h> |
|---|
| 13 | +#include <crypto/ctr.h> |
|---|
| 15 | 14 | #include <crypto/internal/simd.h> |
|---|
| 16 | 15 | #include <crypto/internal/skcipher.h> |
|---|
| 16 | +#include <crypto/scatterwalk.h> |
|---|
| 17 | 17 | #include <linux/cpufeature.h> |
|---|
| 18 | 18 | #include <linux/module.h> |
|---|
| 19 | 19 | #include <crypto/xts.h> |
|---|
| .. | .. |
|---|
| 26 | 26 | asmlinkage u32 ce_aes_sub(u32 input); |
|---|
| 27 | 27 | asmlinkage void ce_aes_invert(void *dst, void *src); |
|---|
| 28 | 28 | |
|---|
| 29 | | -asmlinkage void ce_aes_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[], |
|---|
| 29 | +asmlinkage void ce_aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[], |
|---|
| 30 | 30 | int rounds, int blocks); |
|---|
| 31 | | -asmlinkage void ce_aes_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[], |
|---|
| 31 | +asmlinkage void ce_aes_ecb_decrypt(u8 out[], u8 const in[], u32 const rk[], |
|---|
| 32 | 32 | int rounds, int blocks); |
|---|
| 33 | 33 | |
|---|
| 34 | | -asmlinkage void ce_aes_cbc_encrypt(u8 out[], u8 const in[], u8 const rk[], |
|---|
| 34 | +asmlinkage void ce_aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[], |
|---|
| 35 | 35 | int rounds, int blocks, u8 iv[]); |
|---|
| 36 | | -asmlinkage void ce_aes_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[], |
|---|
| 36 | +asmlinkage void ce_aes_cbc_decrypt(u8 out[], u8 const in[], u32 const rk[], |
|---|
| 37 | 37 | int rounds, int blocks, u8 iv[]); |
|---|
| 38 | +asmlinkage void ce_aes_cbc_cts_encrypt(u8 out[], u8 const in[], u32 const rk[], |
|---|
| 39 | + int rounds, int bytes, u8 const iv[]); |
|---|
| 40 | +asmlinkage void ce_aes_cbc_cts_decrypt(u8 out[], u8 const in[], u32 const rk[], |
|---|
| 41 | + int rounds, int bytes, u8 const iv[]); |
|---|
| 38 | 42 | |
|---|
| 39 | | -asmlinkage void ce_aes_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[], |
|---|
| 43 | +asmlinkage void ce_aes_ctr_encrypt(u8 out[], u8 const in[], u32 const rk[], |
|---|
| 40 | 44 | int rounds, int blocks, u8 ctr[]); |
|---|
| 41 | 45 | |
|---|
| 42 | | -asmlinkage void ce_aes_xts_encrypt(u8 out[], u8 const in[], u8 const rk1[], |
|---|
| 43 | | - int rounds, int blocks, u8 iv[], |
|---|
| 44 | | - u8 const rk2[], int first); |
|---|
| 45 | | -asmlinkage void ce_aes_xts_decrypt(u8 out[], u8 const in[], u8 const rk1[], |
|---|
| 46 | | - int rounds, int blocks, u8 iv[], |
|---|
| 47 | | - u8 const rk2[], int first); |
|---|
| 46 | +asmlinkage void ce_aes_xts_encrypt(u8 out[], u8 const in[], u32 const rk1[], |
|---|
| 47 | + int rounds, int bytes, u8 iv[], |
|---|
| 48 | + u32 const rk2[], int first); |
|---|
| 49 | +asmlinkage void ce_aes_xts_decrypt(u8 out[], u8 const in[], u32 const rk1[], |
|---|
| 50 | + int rounds, int bytes, u8 iv[], |
|---|
| 51 | + u32 const rk2[], int first); |
|---|
| 48 | 52 | |
|---|
| 49 | 53 | struct aes_block { |
|---|
| 50 | 54 | u8 b[AES_BLOCK_SIZE]; |
|---|
| .. | .. |
|---|
| 81 | 85 | key_len != AES_KEYSIZE_256) |
|---|
| 82 | 86 | return -EINVAL; |
|---|
| 83 | 87 | |
|---|
| 84 | | - memcpy(ctx->key_enc, in_key, key_len); |
|---|
| 85 | 88 | ctx->key_length = key_len; |
|---|
| 89 | + for (i = 0; i < kwords; i++) |
|---|
| 90 | + ctx->key_enc[i] = get_unaligned_le32(in_key + i * sizeof(u32)); |
|---|
| 86 | 91 | |
|---|
| 87 | 92 | kernel_neon_begin(); |
|---|
| 88 | 93 | for (i = 0; i < sizeof(rcon); i++) { |
|---|
| 89 | 94 | u32 *rki = ctx->key_enc + (i * kwords); |
|---|
| 90 | 95 | u32 *rko = rki + kwords; |
|---|
| 91 | 96 | |
|---|
| 92 | | -#ifndef CONFIG_CPU_BIG_ENDIAN |
|---|
| 93 | 97 | rko[0] = ror32(ce_aes_sub(rki[kwords - 1]), 8); |
|---|
| 94 | 98 | rko[0] = rko[0] ^ rki[0] ^ rcon[i]; |
|---|
| 95 | | -#else |
|---|
| 96 | | - rko[0] = rol32(ce_aes_sub(rki[kwords - 1]), 8); |
|---|
| 97 | | - rko[0] = rko[0] ^ rki[0] ^ (rcon[i] << 24); |
|---|
| 98 | | -#endif |
|---|
| 99 | 99 | rko[1] = rko[0] ^ rki[1]; |
|---|
| 100 | 100 | rko[2] = rko[1] ^ rki[2]; |
|---|
| 101 | 101 | rko[3] = rko[2] ^ rki[3]; |
|---|
| .. | .. |
|---|
| 138 | 138 | unsigned int key_len) |
|---|
| 139 | 139 | { |
|---|
| 140 | 140 | struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 141 | | - int ret; |
|---|
| 142 | 141 | |
|---|
| 143 | | - ret = ce_aes_expandkey(ctx, in_key, key_len); |
|---|
| 144 | | - if (!ret) |
|---|
| 145 | | - return 0; |
|---|
| 146 | | - |
|---|
| 147 | | - crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); |
|---|
| 148 | | - return -EINVAL; |
|---|
| 142 | + return ce_aes_expandkey(ctx, in_key, key_len); |
|---|
| 149 | 143 | } |
|---|
| 150 | 144 | |
|---|
| 151 | 145 | struct crypto_aes_xts_ctx { |
|---|
| .. | .. |
|---|
| 167 | 161 | if (!ret) |
|---|
| 168 | 162 | ret = ce_aes_expandkey(&ctx->key2, &in_key[key_len / 2], |
|---|
| 169 | 163 | key_len / 2); |
|---|
| 170 | | - if (!ret) |
|---|
| 171 | | - return 0; |
|---|
| 172 | | - |
|---|
| 173 | | - crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); |
|---|
| 174 | | - return -EINVAL; |
|---|
| 164 | + return ret; |
|---|
| 175 | 165 | } |
|---|
| 176 | 166 | |
|---|
| 177 | 167 | static int ecb_encrypt(struct skcipher_request *req) |
|---|
| .. | .. |
|---|
| 182 | 172 | unsigned int blocks; |
|---|
| 183 | 173 | int err; |
|---|
| 184 | 174 | |
|---|
| 185 | | - err = skcipher_walk_virt(&walk, req, true); |
|---|
| 175 | + err = skcipher_walk_virt(&walk, req, false); |
|---|
| 186 | 176 | |
|---|
| 187 | | - kernel_neon_begin(); |
|---|
| 188 | 177 | while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { |
|---|
| 178 | + kernel_neon_begin(); |
|---|
| 189 | 179 | ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, |
|---|
| 190 | | - (u8 *)ctx->key_enc, num_rounds(ctx), blocks); |
|---|
| 180 | + ctx->key_enc, num_rounds(ctx), blocks); |
|---|
| 181 | + kernel_neon_end(); |
|---|
| 191 | 182 | err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); |
|---|
| 192 | 183 | } |
|---|
| 193 | | - kernel_neon_end(); |
|---|
| 194 | 184 | return err; |
|---|
| 195 | 185 | } |
|---|
| 196 | 186 | |
|---|
| .. | .. |
|---|
| 202 | 192 | unsigned int blocks; |
|---|
| 203 | 193 | int err; |
|---|
| 204 | 194 | |
|---|
| 205 | | - err = skcipher_walk_virt(&walk, req, true); |
|---|
| 195 | + err = skcipher_walk_virt(&walk, req, false); |
|---|
| 206 | 196 | |
|---|
| 207 | | - kernel_neon_begin(); |
|---|
| 208 | 197 | while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { |
|---|
| 198 | + kernel_neon_begin(); |
|---|
| 209 | 199 | ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, |
|---|
| 210 | | - (u8 *)ctx->key_dec, num_rounds(ctx), blocks); |
|---|
| 200 | + ctx->key_dec, num_rounds(ctx), blocks); |
|---|
| 201 | + kernel_neon_end(); |
|---|
| 211 | 202 | err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); |
|---|
| 212 | 203 | } |
|---|
| 213 | | - kernel_neon_end(); |
|---|
| 204 | + return err; |
|---|
| 205 | +} |
|---|
| 206 | + |
|---|
| 207 | +static int cbc_encrypt_walk(struct skcipher_request *req, |
|---|
| 208 | + struct skcipher_walk *walk) |
|---|
| 209 | +{ |
|---|
| 210 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 211 | + struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 212 | + unsigned int blocks; |
|---|
| 213 | + int err = 0; |
|---|
| 214 | + |
|---|
| 215 | + while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { |
|---|
| 216 | + kernel_neon_begin(); |
|---|
| 217 | + ce_aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr, |
|---|
| 218 | + ctx->key_enc, num_rounds(ctx), blocks, |
|---|
| 219 | + walk->iv); |
|---|
| 220 | + kernel_neon_end(); |
|---|
| 221 | + err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); |
|---|
| 222 | + } |
|---|
| 214 | 223 | return err; |
|---|
| 215 | 224 | } |
|---|
| 216 | 225 | |
|---|
| 217 | 226 | static int cbc_encrypt(struct skcipher_request *req) |
|---|
| 218 | 227 | { |
|---|
| 219 | | - struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 220 | | - struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 221 | 228 | struct skcipher_walk walk; |
|---|
| 222 | | - unsigned int blocks; |
|---|
| 223 | 229 | int err; |
|---|
| 224 | 230 | |
|---|
| 225 | | - err = skcipher_walk_virt(&walk, req, true); |
|---|
| 231 | + err = skcipher_walk_virt(&walk, req, false); |
|---|
| 232 | + if (err) |
|---|
| 233 | + return err; |
|---|
| 234 | + return cbc_encrypt_walk(req, &walk); |
|---|
| 235 | +} |
|---|
| 226 | 236 | |
|---|
| 227 | | - kernel_neon_begin(); |
|---|
| 228 | | - while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { |
|---|
| 229 | | - ce_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr, |
|---|
| 230 | | - (u8 *)ctx->key_enc, num_rounds(ctx), blocks, |
|---|
| 231 | | - walk.iv); |
|---|
| 232 | | - err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); |
|---|
| 237 | +static int cbc_decrypt_walk(struct skcipher_request *req, |
|---|
| 238 | + struct skcipher_walk *walk) |
|---|
| 239 | +{ |
|---|
| 240 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 241 | + struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 242 | + unsigned int blocks; |
|---|
| 243 | + int err = 0; |
|---|
| 244 | + |
|---|
| 245 | + while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { |
|---|
| 246 | + kernel_neon_begin(); |
|---|
| 247 | + ce_aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr, |
|---|
| 248 | + ctx->key_dec, num_rounds(ctx), blocks, |
|---|
| 249 | + walk->iv); |
|---|
| 250 | + kernel_neon_end(); |
|---|
| 251 | + err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); |
|---|
| 233 | 252 | } |
|---|
| 234 | | - kernel_neon_end(); |
|---|
| 235 | 253 | return err; |
|---|
| 236 | 254 | } |
|---|
| 237 | 255 | |
|---|
| 238 | 256 | static int cbc_decrypt(struct skcipher_request *req) |
|---|
| 239 | 257 | { |
|---|
| 240 | | - struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 241 | | - struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 242 | 258 | struct skcipher_walk walk; |
|---|
| 243 | | - unsigned int blocks; |
|---|
| 244 | 259 | int err; |
|---|
| 245 | 260 | |
|---|
| 246 | | - err = skcipher_walk_virt(&walk, req, true); |
|---|
| 261 | + err = skcipher_walk_virt(&walk, req, false); |
|---|
| 262 | + if (err) |
|---|
| 263 | + return err; |
|---|
| 264 | + return cbc_decrypt_walk(req, &walk); |
|---|
| 265 | +} |
|---|
| 266 | + |
|---|
| 267 | +static int cts_cbc_encrypt(struct skcipher_request *req) |
|---|
| 268 | +{ |
|---|
| 269 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 270 | + struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 271 | + int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; |
|---|
| 272 | + struct scatterlist *src = req->src, *dst = req->dst; |
|---|
| 273 | + struct scatterlist sg_src[2], sg_dst[2]; |
|---|
| 274 | + struct skcipher_request subreq; |
|---|
| 275 | + struct skcipher_walk walk; |
|---|
| 276 | + int err; |
|---|
| 277 | + |
|---|
| 278 | + skcipher_request_set_tfm(&subreq, tfm); |
|---|
| 279 | + skcipher_request_set_callback(&subreq, skcipher_request_flags(req), |
|---|
| 280 | + NULL, NULL); |
|---|
| 281 | + |
|---|
| 282 | + if (req->cryptlen <= AES_BLOCK_SIZE) { |
|---|
| 283 | + if (req->cryptlen < AES_BLOCK_SIZE) |
|---|
| 284 | + return -EINVAL; |
|---|
| 285 | + cbc_blocks = 1; |
|---|
| 286 | + } |
|---|
| 287 | + |
|---|
| 288 | + if (cbc_blocks > 0) { |
|---|
| 289 | + skcipher_request_set_crypt(&subreq, req->src, req->dst, |
|---|
| 290 | + cbc_blocks * AES_BLOCK_SIZE, |
|---|
| 291 | + req->iv); |
|---|
| 292 | + |
|---|
| 293 | + err = skcipher_walk_virt(&walk, &subreq, false) ?: |
|---|
| 294 | + cbc_encrypt_walk(&subreq, &walk); |
|---|
| 295 | + if (err) |
|---|
| 296 | + return err; |
|---|
| 297 | + |
|---|
| 298 | + if (req->cryptlen == AES_BLOCK_SIZE) |
|---|
| 299 | + return 0; |
|---|
| 300 | + |
|---|
| 301 | + dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); |
|---|
| 302 | + if (req->dst != req->src) |
|---|
| 303 | + dst = scatterwalk_ffwd(sg_dst, req->dst, |
|---|
| 304 | + subreq.cryptlen); |
|---|
| 305 | + } |
|---|
| 306 | + |
|---|
| 307 | + /* handle ciphertext stealing */ |
|---|
| 308 | + skcipher_request_set_crypt(&subreq, src, dst, |
|---|
| 309 | + req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, |
|---|
| 310 | + req->iv); |
|---|
| 311 | + |
|---|
| 312 | + err = skcipher_walk_virt(&walk, &subreq, false); |
|---|
| 313 | + if (err) |
|---|
| 314 | + return err; |
|---|
| 247 | 315 | |
|---|
| 248 | 316 | kernel_neon_begin(); |
|---|
| 249 | | - while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { |
|---|
| 250 | | - ce_aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, |
|---|
| 251 | | - (u8 *)ctx->key_dec, num_rounds(ctx), blocks, |
|---|
| 252 | | - walk.iv); |
|---|
| 253 | | - err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); |
|---|
| 254 | | - } |
|---|
| 317 | + ce_aes_cbc_cts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, |
|---|
| 318 | + ctx->key_enc, num_rounds(ctx), walk.nbytes, |
|---|
| 319 | + walk.iv); |
|---|
| 255 | 320 | kernel_neon_end(); |
|---|
| 256 | | - return err; |
|---|
| 321 | + |
|---|
| 322 | + return skcipher_walk_done(&walk, 0); |
|---|
| 323 | +} |
|---|
| 324 | + |
|---|
| 325 | +static int cts_cbc_decrypt(struct skcipher_request *req) |
|---|
| 326 | +{ |
|---|
| 327 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 328 | + struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 329 | + int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; |
|---|
| 330 | + struct scatterlist *src = req->src, *dst = req->dst; |
|---|
| 331 | + struct scatterlist sg_src[2], sg_dst[2]; |
|---|
| 332 | + struct skcipher_request subreq; |
|---|
| 333 | + struct skcipher_walk walk; |
|---|
| 334 | + int err; |
|---|
| 335 | + |
|---|
| 336 | + skcipher_request_set_tfm(&subreq, tfm); |
|---|
| 337 | + skcipher_request_set_callback(&subreq, skcipher_request_flags(req), |
|---|
| 338 | + NULL, NULL); |
|---|
| 339 | + |
|---|
| 340 | + if (req->cryptlen <= AES_BLOCK_SIZE) { |
|---|
| 341 | + if (req->cryptlen < AES_BLOCK_SIZE) |
|---|
| 342 | + return -EINVAL; |
|---|
| 343 | + cbc_blocks = 1; |
|---|
| 344 | + } |
|---|
| 345 | + |
|---|
| 346 | + if (cbc_blocks > 0) { |
|---|
| 347 | + skcipher_request_set_crypt(&subreq, req->src, req->dst, |
|---|
| 348 | + cbc_blocks * AES_BLOCK_SIZE, |
|---|
| 349 | + req->iv); |
|---|
| 350 | + |
|---|
| 351 | + err = skcipher_walk_virt(&walk, &subreq, false) ?: |
|---|
| 352 | + cbc_decrypt_walk(&subreq, &walk); |
|---|
| 353 | + if (err) |
|---|
| 354 | + return err; |
|---|
| 355 | + |
|---|
| 356 | + if (req->cryptlen == AES_BLOCK_SIZE) |
|---|
| 357 | + return 0; |
|---|
| 358 | + |
|---|
| 359 | + dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); |
|---|
| 360 | + if (req->dst != req->src) |
|---|
| 361 | + dst = scatterwalk_ffwd(sg_dst, req->dst, |
|---|
| 362 | + subreq.cryptlen); |
|---|
| 363 | + } |
|---|
| 364 | + |
|---|
| 365 | + /* handle ciphertext stealing */ |
|---|
| 366 | + skcipher_request_set_crypt(&subreq, src, dst, |
|---|
| 367 | + req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, |
|---|
| 368 | + req->iv); |
|---|
| 369 | + |
|---|
| 370 | + err = skcipher_walk_virt(&walk, &subreq, false); |
|---|
| 371 | + if (err) |
|---|
| 372 | + return err; |
|---|
| 373 | + |
|---|
| 374 | + kernel_neon_begin(); |
|---|
| 375 | + ce_aes_cbc_cts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, |
|---|
| 376 | + ctx->key_dec, num_rounds(ctx), walk.nbytes, |
|---|
| 377 | + walk.iv); |
|---|
| 378 | + kernel_neon_end(); |
|---|
| 379 | + |
|---|
| 380 | + return skcipher_walk_done(&walk, 0); |
|---|
| 257 | 381 | } |
|---|
| 258 | 382 | |
|---|
| 259 | 383 | static int ctr_encrypt(struct skcipher_request *req) |
|---|
| .. | .. |
|---|
| 263 | 387 | struct skcipher_walk walk; |
|---|
| 264 | 388 | int err, blocks; |
|---|
| 265 | 389 | |
|---|
| 266 | | - err = skcipher_walk_virt(&walk, req, true); |
|---|
| 390 | + err = skcipher_walk_virt(&walk, req, false); |
|---|
| 267 | 391 | |
|---|
| 268 | | - kernel_neon_begin(); |
|---|
| 269 | 392 | while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { |
|---|
| 393 | + kernel_neon_begin(); |
|---|
| 270 | 394 | ce_aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr, |
|---|
| 271 | | - (u8 *)ctx->key_enc, num_rounds(ctx), blocks, |
|---|
| 395 | + ctx->key_enc, num_rounds(ctx), blocks, |
|---|
| 272 | 396 | walk.iv); |
|---|
| 397 | + kernel_neon_end(); |
|---|
| 273 | 398 | err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); |
|---|
| 274 | 399 | } |
|---|
| 275 | 400 | if (walk.nbytes) { |
|---|
| .. | .. |
|---|
| 283 | 408 | */ |
|---|
| 284 | 409 | blocks = -1; |
|---|
| 285 | 410 | |
|---|
| 286 | | - ce_aes_ctr_encrypt(tail, NULL, (u8 *)ctx->key_enc, |
|---|
| 287 | | - num_rounds(ctx), blocks, walk.iv); |
|---|
| 411 | + kernel_neon_begin(); |
|---|
| 412 | + ce_aes_ctr_encrypt(tail, NULL, ctx->key_enc, num_rounds(ctx), |
|---|
| 413 | + blocks, walk.iv); |
|---|
| 414 | + kernel_neon_end(); |
|---|
| 288 | 415 | crypto_xor_cpy(tdst, tsrc, tail, nbytes); |
|---|
| 289 | 416 | err = skcipher_walk_done(&walk, 0); |
|---|
| 290 | 417 | } |
|---|
| 291 | | - kernel_neon_end(); |
|---|
| 292 | | - |
|---|
| 293 | 418 | return err; |
|---|
| 419 | +} |
|---|
| 420 | + |
|---|
| 421 | +static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst) |
|---|
| 422 | +{ |
|---|
| 423 | + struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 424 | + unsigned long flags; |
|---|
| 425 | + |
|---|
| 426 | + /* |
|---|
| 427 | + * Temporarily disable interrupts to avoid races where |
|---|
| 428 | + * cachelines are evicted when the CPU is interrupted |
|---|
| 429 | + * to do something else. |
|---|
| 430 | + */ |
|---|
| 431 | + local_irq_save(flags); |
|---|
| 432 | + aes_encrypt(ctx, dst, src); |
|---|
| 433 | + local_irq_restore(flags); |
|---|
| 434 | +} |
|---|
| 435 | + |
|---|
| 436 | +static int ctr_encrypt_sync(struct skcipher_request *req) |
|---|
| 437 | +{ |
|---|
| 438 | + if (!crypto_simd_usable()) |
|---|
| 439 | + return crypto_ctr_encrypt_walk(req, ctr_encrypt_one); |
|---|
| 440 | + |
|---|
| 441 | + return ctr_encrypt(req); |
|---|
| 294 | 442 | } |
|---|
| 295 | 443 | |
|---|
| 296 | 444 | static int xts_encrypt(struct skcipher_request *req) |
|---|
| .. | .. |
|---|
| 298 | 446 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 299 | 447 | struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 300 | 448 | int err, first, rounds = num_rounds(&ctx->key1); |
|---|
| 449 | + int tail = req->cryptlen % AES_BLOCK_SIZE; |
|---|
| 450 | + struct scatterlist sg_src[2], sg_dst[2]; |
|---|
| 451 | + struct skcipher_request subreq; |
|---|
| 452 | + struct scatterlist *src, *dst; |
|---|
| 301 | 453 | struct skcipher_walk walk; |
|---|
| 302 | | - unsigned int blocks; |
|---|
| 303 | 454 | |
|---|
| 304 | | - err = skcipher_walk_virt(&walk, req, true); |
|---|
| 455 | + if (req->cryptlen < AES_BLOCK_SIZE) |
|---|
| 456 | + return -EINVAL; |
|---|
| 457 | + |
|---|
| 458 | + err = skcipher_walk_virt(&walk, req, false); |
|---|
| 459 | + |
|---|
| 460 | + if (unlikely(tail > 0 && walk.nbytes < walk.total)) { |
|---|
| 461 | + int xts_blocks = DIV_ROUND_UP(req->cryptlen, |
|---|
| 462 | + AES_BLOCK_SIZE) - 2; |
|---|
| 463 | + |
|---|
| 464 | + skcipher_walk_abort(&walk); |
|---|
| 465 | + |
|---|
| 466 | + skcipher_request_set_tfm(&subreq, tfm); |
|---|
| 467 | + skcipher_request_set_callback(&subreq, |
|---|
| 468 | + skcipher_request_flags(req), |
|---|
| 469 | + NULL, NULL); |
|---|
| 470 | + skcipher_request_set_crypt(&subreq, req->src, req->dst, |
|---|
| 471 | + xts_blocks * AES_BLOCK_SIZE, |
|---|
| 472 | + req->iv); |
|---|
| 473 | + req = &subreq; |
|---|
| 474 | + err = skcipher_walk_virt(&walk, req, false); |
|---|
| 475 | + } else { |
|---|
| 476 | + tail = 0; |
|---|
| 477 | + } |
|---|
| 478 | + |
|---|
| 479 | + for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) { |
|---|
| 480 | + int nbytes = walk.nbytes; |
|---|
| 481 | + |
|---|
| 482 | + if (walk.nbytes < walk.total) |
|---|
| 483 | + nbytes &= ~(AES_BLOCK_SIZE - 1); |
|---|
| 484 | + |
|---|
| 485 | + kernel_neon_begin(); |
|---|
| 486 | + ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, |
|---|
| 487 | + ctx->key1.key_enc, rounds, nbytes, walk.iv, |
|---|
| 488 | + ctx->key2.key_enc, first); |
|---|
| 489 | + kernel_neon_end(); |
|---|
| 490 | + err = skcipher_walk_done(&walk, walk.nbytes - nbytes); |
|---|
| 491 | + } |
|---|
| 492 | + |
|---|
| 493 | + if (err || likely(!tail)) |
|---|
| 494 | + return err; |
|---|
| 495 | + |
|---|
| 496 | + dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); |
|---|
| 497 | + if (req->dst != req->src) |
|---|
| 498 | + dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); |
|---|
| 499 | + |
|---|
| 500 | + skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, |
|---|
| 501 | + req->iv); |
|---|
| 502 | + |
|---|
| 503 | + err = skcipher_walk_virt(&walk, req, false); |
|---|
| 504 | + if (err) |
|---|
| 505 | + return err; |
|---|
| 305 | 506 | |
|---|
| 306 | 507 | kernel_neon_begin(); |
|---|
| 307 | | - for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { |
|---|
| 308 | | - ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, |
|---|
| 309 | | - (u8 *)ctx->key1.key_enc, rounds, blocks, |
|---|
| 310 | | - walk.iv, (u8 *)ctx->key2.key_enc, first); |
|---|
| 311 | | - err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); |
|---|
| 312 | | - } |
|---|
| 508 | + ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, |
|---|
| 509 | + ctx->key1.key_enc, rounds, walk.nbytes, walk.iv, |
|---|
| 510 | + ctx->key2.key_enc, first); |
|---|
| 313 | 511 | kernel_neon_end(); |
|---|
| 314 | 512 | |
|---|
| 315 | | - return err; |
|---|
| 513 | + return skcipher_walk_done(&walk, 0); |
|---|
| 316 | 514 | } |
|---|
| 317 | 515 | |
|---|
| 318 | 516 | static int xts_decrypt(struct skcipher_request *req) |
|---|
| .. | .. |
|---|
| 320 | 518 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 321 | 519 | struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 322 | 520 | int err, first, rounds = num_rounds(&ctx->key1); |
|---|
| 521 | + int tail = req->cryptlen % AES_BLOCK_SIZE; |
|---|
| 522 | + struct scatterlist sg_src[2], sg_dst[2]; |
|---|
| 523 | + struct skcipher_request subreq; |
|---|
| 524 | + struct scatterlist *src, *dst; |
|---|
| 323 | 525 | struct skcipher_walk walk; |
|---|
| 324 | | - unsigned int blocks; |
|---|
| 325 | 526 | |
|---|
| 326 | | - err = skcipher_walk_virt(&walk, req, true); |
|---|
| 527 | + if (req->cryptlen < AES_BLOCK_SIZE) |
|---|
| 528 | + return -EINVAL; |
|---|
| 529 | + |
|---|
| 530 | + err = skcipher_walk_virt(&walk, req, false); |
|---|
| 531 | + |
|---|
| 532 | + if (unlikely(tail > 0 && walk.nbytes < walk.total)) { |
|---|
| 533 | + int xts_blocks = DIV_ROUND_UP(req->cryptlen, |
|---|
| 534 | + AES_BLOCK_SIZE) - 2; |
|---|
| 535 | + |
|---|
| 536 | + skcipher_walk_abort(&walk); |
|---|
| 537 | + |
|---|
| 538 | + skcipher_request_set_tfm(&subreq, tfm); |
|---|
| 539 | + skcipher_request_set_callback(&subreq, |
|---|
| 540 | + skcipher_request_flags(req), |
|---|
| 541 | + NULL, NULL); |
|---|
| 542 | + skcipher_request_set_crypt(&subreq, req->src, req->dst, |
|---|
| 543 | + xts_blocks * AES_BLOCK_SIZE, |
|---|
| 544 | + req->iv); |
|---|
| 545 | + req = &subreq; |
|---|
| 546 | + err = skcipher_walk_virt(&walk, req, false); |
|---|
| 547 | + } else { |
|---|
| 548 | + tail = 0; |
|---|
| 549 | + } |
|---|
| 550 | + |
|---|
| 551 | + for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) { |
|---|
| 552 | + int nbytes = walk.nbytes; |
|---|
| 553 | + |
|---|
| 554 | + if (walk.nbytes < walk.total) |
|---|
| 555 | + nbytes &= ~(AES_BLOCK_SIZE - 1); |
|---|
| 556 | + |
|---|
| 557 | + kernel_neon_begin(); |
|---|
| 558 | + ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, |
|---|
| 559 | + ctx->key1.key_dec, rounds, nbytes, walk.iv, |
|---|
| 560 | + ctx->key2.key_enc, first); |
|---|
| 561 | + kernel_neon_end(); |
|---|
| 562 | + err = skcipher_walk_done(&walk, walk.nbytes - nbytes); |
|---|
| 563 | + } |
|---|
| 564 | + |
|---|
| 565 | + if (err || likely(!tail)) |
|---|
| 566 | + return err; |
|---|
| 567 | + |
|---|
| 568 | + dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); |
|---|
| 569 | + if (req->dst != req->src) |
|---|
| 570 | + dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); |
|---|
| 571 | + |
|---|
| 572 | + skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, |
|---|
| 573 | + req->iv); |
|---|
| 574 | + |
|---|
| 575 | + err = skcipher_walk_virt(&walk, req, false); |
|---|
| 576 | + if (err) |
|---|
| 577 | + return err; |
|---|
| 327 | 578 | |
|---|
| 328 | 579 | kernel_neon_begin(); |
|---|
| 329 | | - for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { |
|---|
| 330 | | - ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, |
|---|
| 331 | | - (u8 *)ctx->key1.key_dec, rounds, blocks, |
|---|
| 332 | | - walk.iv, (u8 *)ctx->key2.key_enc, first); |
|---|
| 333 | | - err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); |
|---|
| 334 | | - } |
|---|
| 580 | + ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, |
|---|
| 581 | + ctx->key1.key_dec, rounds, walk.nbytes, walk.iv, |
|---|
| 582 | + ctx->key2.key_enc, first); |
|---|
| 335 | 583 | kernel_neon_end(); |
|---|
| 336 | 584 | |
|---|
| 337 | | - return err; |
|---|
| 585 | + return skcipher_walk_done(&walk, 0); |
|---|
| 338 | 586 | } |
|---|
| 339 | 587 | |
|---|
| 340 | 588 | static struct skcipher_alg aes_algs[] = { { |
|---|
| 341 | | - .base = { |
|---|
| 342 | | - .cra_name = "__ecb(aes)", |
|---|
| 343 | | - .cra_driver_name = "__ecb-aes-ce", |
|---|
| 344 | | - .cra_priority = 300, |
|---|
| 345 | | - .cra_flags = CRYPTO_ALG_INTERNAL, |
|---|
| 346 | | - .cra_blocksize = AES_BLOCK_SIZE, |
|---|
| 347 | | - .cra_ctxsize = sizeof(struct crypto_aes_ctx), |
|---|
| 348 | | - .cra_module = THIS_MODULE, |
|---|
| 349 | | - }, |
|---|
| 350 | | - .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 351 | | - .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 352 | | - .setkey = ce_aes_setkey, |
|---|
| 353 | | - .encrypt = ecb_encrypt, |
|---|
| 354 | | - .decrypt = ecb_decrypt, |
|---|
| 589 | + .base.cra_name = "__ecb(aes)", |
|---|
| 590 | + .base.cra_driver_name = "__ecb-aes-ce", |
|---|
| 591 | + .base.cra_priority = 300, |
|---|
| 592 | + .base.cra_flags = CRYPTO_ALG_INTERNAL, |
|---|
| 593 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
|---|
| 594 | + .base.cra_ctxsize = sizeof(struct crypto_aes_ctx), |
|---|
| 595 | + .base.cra_module = THIS_MODULE, |
|---|
| 596 | + |
|---|
| 597 | + .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 598 | + .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 599 | + .setkey = ce_aes_setkey, |
|---|
| 600 | + .encrypt = ecb_encrypt, |
|---|
| 601 | + .decrypt = ecb_decrypt, |
|---|
| 355 | 602 | }, { |
|---|
| 356 | | - .base = { |
|---|
| 357 | | - .cra_name = "__cbc(aes)", |
|---|
| 358 | | - .cra_driver_name = "__cbc-aes-ce", |
|---|
| 359 | | - .cra_priority = 300, |
|---|
| 360 | | - .cra_flags = CRYPTO_ALG_INTERNAL, |
|---|
| 361 | | - .cra_blocksize = AES_BLOCK_SIZE, |
|---|
| 362 | | - .cra_ctxsize = sizeof(struct crypto_aes_ctx), |
|---|
| 363 | | - .cra_module = THIS_MODULE, |
|---|
| 364 | | - }, |
|---|
| 365 | | - .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 366 | | - .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 367 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 368 | | - .setkey = ce_aes_setkey, |
|---|
| 369 | | - .encrypt = cbc_encrypt, |
|---|
| 370 | | - .decrypt = cbc_decrypt, |
|---|
| 603 | + .base.cra_name = "__cbc(aes)", |
|---|
| 604 | + .base.cra_driver_name = "__cbc-aes-ce", |
|---|
| 605 | + .base.cra_priority = 300, |
|---|
| 606 | + .base.cra_flags = CRYPTO_ALG_INTERNAL, |
|---|
| 607 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
|---|
| 608 | + .base.cra_ctxsize = sizeof(struct crypto_aes_ctx), |
|---|
| 609 | + .base.cra_module = THIS_MODULE, |
|---|
| 610 | + |
|---|
| 611 | + .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 612 | + .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 613 | + .ivsize = AES_BLOCK_SIZE, |
|---|
| 614 | + .setkey = ce_aes_setkey, |
|---|
| 615 | + .encrypt = cbc_encrypt, |
|---|
| 616 | + .decrypt = cbc_decrypt, |
|---|
| 371 | 617 | }, { |
|---|
| 372 | | - .base = { |
|---|
| 373 | | - .cra_name = "__ctr(aes)", |
|---|
| 374 | | - .cra_driver_name = "__ctr-aes-ce", |
|---|
| 375 | | - .cra_priority = 300, |
|---|
| 376 | | - .cra_flags = CRYPTO_ALG_INTERNAL, |
|---|
| 377 | | - .cra_blocksize = 1, |
|---|
| 378 | | - .cra_ctxsize = sizeof(struct crypto_aes_ctx), |
|---|
| 379 | | - .cra_module = THIS_MODULE, |
|---|
| 380 | | - }, |
|---|
| 381 | | - .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 382 | | - .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 383 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 384 | | - .chunksize = AES_BLOCK_SIZE, |
|---|
| 385 | | - .setkey = ce_aes_setkey, |
|---|
| 386 | | - .encrypt = ctr_encrypt, |
|---|
| 387 | | - .decrypt = ctr_encrypt, |
|---|
| 618 | + .base.cra_name = "__cts(cbc(aes))", |
|---|
| 619 | + .base.cra_driver_name = "__cts-cbc-aes-ce", |
|---|
| 620 | + .base.cra_priority = 300, |
|---|
| 621 | + .base.cra_flags = CRYPTO_ALG_INTERNAL, |
|---|
| 622 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
|---|
| 623 | + .base.cra_ctxsize = sizeof(struct crypto_aes_ctx), |
|---|
| 624 | + .base.cra_module = THIS_MODULE, |
|---|
| 625 | + |
|---|
| 626 | + .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 627 | + .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 628 | + .ivsize = AES_BLOCK_SIZE, |
|---|
| 629 | + .walksize = 2 * AES_BLOCK_SIZE, |
|---|
| 630 | + .setkey = ce_aes_setkey, |
|---|
| 631 | + .encrypt = cts_cbc_encrypt, |
|---|
| 632 | + .decrypt = cts_cbc_decrypt, |
|---|
| 388 | 633 | }, { |
|---|
| 389 | | - .base = { |
|---|
| 390 | | - .cra_name = "__xts(aes)", |
|---|
| 391 | | - .cra_driver_name = "__xts-aes-ce", |
|---|
| 392 | | - .cra_priority = 300, |
|---|
| 393 | | - .cra_flags = CRYPTO_ALG_INTERNAL, |
|---|
| 394 | | - .cra_blocksize = AES_BLOCK_SIZE, |
|---|
| 395 | | - .cra_ctxsize = sizeof(struct crypto_aes_xts_ctx), |
|---|
| 396 | | - .cra_module = THIS_MODULE, |
|---|
| 397 | | - }, |
|---|
| 398 | | - .min_keysize = 2 * AES_MIN_KEY_SIZE, |
|---|
| 399 | | - .max_keysize = 2 * AES_MAX_KEY_SIZE, |
|---|
| 400 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 401 | | - .setkey = xts_set_key, |
|---|
| 402 | | - .encrypt = xts_encrypt, |
|---|
| 403 | | - .decrypt = xts_decrypt, |
|---|
| 634 | + .base.cra_name = "__ctr(aes)", |
|---|
| 635 | + .base.cra_driver_name = "__ctr-aes-ce", |
|---|
| 636 | + .base.cra_priority = 300, |
|---|
| 637 | + .base.cra_flags = CRYPTO_ALG_INTERNAL, |
|---|
| 638 | + .base.cra_blocksize = 1, |
|---|
| 639 | + .base.cra_ctxsize = sizeof(struct crypto_aes_ctx), |
|---|
| 640 | + .base.cra_module = THIS_MODULE, |
|---|
| 641 | + |
|---|
| 642 | + .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 643 | + .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 644 | + .ivsize = AES_BLOCK_SIZE, |
|---|
| 645 | + .chunksize = AES_BLOCK_SIZE, |
|---|
| 646 | + .setkey = ce_aes_setkey, |
|---|
| 647 | + .encrypt = ctr_encrypt, |
|---|
| 648 | + .decrypt = ctr_encrypt, |
|---|
| 649 | +}, { |
|---|
| 650 | + .base.cra_name = "ctr(aes)", |
|---|
| 651 | + .base.cra_driver_name = "ctr-aes-ce-sync", |
|---|
| 652 | + .base.cra_priority = 300 - 1, |
|---|
| 653 | + .base.cra_blocksize = 1, |
|---|
| 654 | + .base.cra_ctxsize = sizeof(struct crypto_aes_ctx), |
|---|
| 655 | + .base.cra_module = THIS_MODULE, |
|---|
| 656 | + |
|---|
| 657 | + .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 658 | + .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 659 | + .ivsize = AES_BLOCK_SIZE, |
|---|
| 660 | + .chunksize = AES_BLOCK_SIZE, |
|---|
| 661 | + .setkey = ce_aes_setkey, |
|---|
| 662 | + .encrypt = ctr_encrypt_sync, |
|---|
| 663 | + .decrypt = ctr_encrypt_sync, |
|---|
| 664 | +}, { |
|---|
| 665 | + .base.cra_name = "__xts(aes)", |
|---|
| 666 | + .base.cra_driver_name = "__xts-aes-ce", |
|---|
| 667 | + .base.cra_priority = 300, |
|---|
| 668 | + .base.cra_flags = CRYPTO_ALG_INTERNAL, |
|---|
| 669 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
|---|
| 670 | + .base.cra_ctxsize = sizeof(struct crypto_aes_xts_ctx), |
|---|
| 671 | + .base.cra_module = THIS_MODULE, |
|---|
| 672 | + |
|---|
| 673 | + .min_keysize = 2 * AES_MIN_KEY_SIZE, |
|---|
| 674 | + .max_keysize = 2 * AES_MAX_KEY_SIZE, |
|---|
| 675 | + .ivsize = AES_BLOCK_SIZE, |
|---|
| 676 | + .walksize = 2 * AES_BLOCK_SIZE, |
|---|
| 677 | + .setkey = xts_set_key, |
|---|
| 678 | + .encrypt = xts_encrypt, |
|---|
| 679 | + .decrypt = xts_decrypt, |
|---|
| 404 | 680 | } }; |
|---|
| 405 | 681 | |
|---|
| 406 | 682 | static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)]; |
|---|
| .. | .. |
|---|
| 429 | 705 | return err; |
|---|
| 430 | 706 | |
|---|
| 431 | 707 | for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { |
|---|
| 708 | + if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL)) |
|---|
| 709 | + continue; |
|---|
| 710 | + |
|---|
| 432 | 711 | algname = aes_algs[i].base.cra_name + 2; |
|---|
| 433 | 712 | drvname = aes_algs[i].base.cra_driver_name + 2; |
|---|
| 434 | 713 | basename = aes_algs[i].base.cra_driver_name; |
|---|