| .. | .. |
|---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-only |
|---|
| 1 | 2 | /* |
|---|
| 2 | 3 | * Bit sliced AES using NEON instructions |
|---|
| 3 | 4 | * |
|---|
| 4 | 5 | * Copyright (C) 2017 Linaro Ltd <ard.biesheuvel@linaro.org> |
|---|
| 5 | | - * |
|---|
| 6 | | - * This program is free software; you can redistribute it and/or modify |
|---|
| 7 | | - * it under the terms of the GNU General Public License version 2 as |
|---|
| 8 | | - * published by the Free Software Foundation. |
|---|
| 9 | 6 | */ |
|---|
| 10 | 7 | |
|---|
| 11 | 8 | #include <asm/neon.h> |
|---|
| 9 | +#include <asm/simd.h> |
|---|
| 12 | 10 | #include <crypto/aes.h> |
|---|
| 13 | | -#include <crypto/cbc.h> |
|---|
| 11 | +#include <crypto/ctr.h> |
|---|
| 12 | +#include <crypto/internal/cipher.h> |
|---|
| 14 | 13 | #include <crypto/internal/simd.h> |
|---|
| 15 | 14 | #include <crypto/internal/skcipher.h> |
|---|
| 15 | +#include <crypto/scatterwalk.h> |
|---|
| 16 | 16 | #include <crypto/xts.h> |
|---|
| 17 | 17 | #include <linux/module.h> |
|---|
| 18 | 18 | |
|---|
| .. | .. |
|---|
| 20 | 20 | MODULE_LICENSE("GPL v2"); |
|---|
| 21 | 21 | |
|---|
| 22 | 22 | MODULE_ALIAS_CRYPTO("ecb(aes)"); |
|---|
| 23 | | -MODULE_ALIAS_CRYPTO("cbc(aes)"); |
|---|
| 23 | +MODULE_ALIAS_CRYPTO("cbc(aes)-all"); |
|---|
| 24 | 24 | MODULE_ALIAS_CRYPTO("ctr(aes)"); |
|---|
| 25 | 25 | MODULE_ALIAS_CRYPTO("xts(aes)"); |
|---|
| 26 | + |
|---|
| 27 | +MODULE_IMPORT_NS(CRYPTO_INTERNAL); |
|---|
| 26 | 28 | |
|---|
| 27 | 29 | asmlinkage void aesbs_convert_key(u8 out[], u32 const rk[], int rounds); |
|---|
| 28 | 30 | |
|---|
| .. | .. |
|---|
| 38 | 40 | int rounds, int blocks, u8 ctr[], u8 final[]); |
|---|
| 39 | 41 | |
|---|
| 40 | 42 | asmlinkage void aesbs_xts_encrypt(u8 out[], u8 const in[], u8 const rk[], |
|---|
| 41 | | - int rounds, int blocks, u8 iv[]); |
|---|
| 43 | + int rounds, int blocks, u8 iv[], int); |
|---|
| 42 | 44 | asmlinkage void aesbs_xts_decrypt(u8 out[], u8 const in[], u8 const rk[], |
|---|
| 43 | | - int rounds, int blocks, u8 iv[]); |
|---|
| 45 | + int rounds, int blocks, u8 iv[], int); |
|---|
| 44 | 46 | |
|---|
| 45 | 47 | struct aesbs_ctx { |
|---|
| 46 | 48 | int rounds; |
|---|
| .. | .. |
|---|
| 49 | 51 | |
|---|
| 50 | 52 | struct aesbs_cbc_ctx { |
|---|
| 51 | 53 | struct aesbs_ctx key; |
|---|
| 52 | | - struct crypto_cipher *enc_tfm; |
|---|
| 54 | + struct crypto_skcipher *enc_tfm; |
|---|
| 53 | 55 | }; |
|---|
| 54 | 56 | |
|---|
| 55 | 57 | struct aesbs_xts_ctx { |
|---|
| 56 | 58 | struct aesbs_ctx key; |
|---|
| 59 | + struct crypto_cipher *cts_tfm; |
|---|
| 57 | 60 | struct crypto_cipher *tweak_tfm; |
|---|
| 61 | +}; |
|---|
| 62 | + |
|---|
| 63 | +struct aesbs_ctr_ctx { |
|---|
| 64 | + struct aesbs_ctx key; /* must be first member */ |
|---|
| 65 | + struct crypto_aes_ctx fallback; |
|---|
| 58 | 66 | }; |
|---|
| 59 | 67 | |
|---|
| 60 | 68 | static int aesbs_setkey(struct crypto_skcipher *tfm, const u8 *in_key, |
|---|
| .. | .. |
|---|
| 64 | 72 | struct crypto_aes_ctx rk; |
|---|
| 65 | 73 | int err; |
|---|
| 66 | 74 | |
|---|
| 67 | | - err = crypto_aes_expand_key(&rk, in_key, key_len); |
|---|
| 75 | + err = aes_expandkey(&rk, in_key, key_len); |
|---|
| 68 | 76 | if (err) |
|---|
| 69 | 77 | return err; |
|---|
| 70 | 78 | |
|---|
| .. | .. |
|---|
| 86 | 94 | struct skcipher_walk walk; |
|---|
| 87 | 95 | int err; |
|---|
| 88 | 96 | |
|---|
| 89 | | - err = skcipher_walk_virt(&walk, req, true); |
|---|
| 97 | + err = skcipher_walk_virt(&walk, req, false); |
|---|
| 90 | 98 | |
|---|
| 91 | | - kernel_neon_begin(); |
|---|
| 92 | 99 | while (walk.nbytes >= AES_BLOCK_SIZE) { |
|---|
| 93 | 100 | unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; |
|---|
| 94 | 101 | |
|---|
| .. | .. |
|---|
| 96 | 103 | blocks = round_down(blocks, |
|---|
| 97 | 104 | walk.stride / AES_BLOCK_SIZE); |
|---|
| 98 | 105 | |
|---|
| 106 | + kernel_neon_begin(); |
|---|
| 99 | 107 | fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, |
|---|
| 100 | 108 | ctx->rounds, blocks); |
|---|
| 109 | + kernel_neon_end(); |
|---|
| 101 | 110 | err = skcipher_walk_done(&walk, |
|---|
| 102 | 111 | walk.nbytes - blocks * AES_BLOCK_SIZE); |
|---|
| 103 | 112 | } |
|---|
| 104 | | - kernel_neon_end(); |
|---|
| 105 | 113 | |
|---|
| 106 | 114 | return err; |
|---|
| 107 | 115 | } |
|---|
| .. | .. |
|---|
| 123 | 131 | struct crypto_aes_ctx rk; |
|---|
| 124 | 132 | int err; |
|---|
| 125 | 133 | |
|---|
| 126 | | - err = crypto_aes_expand_key(&rk, in_key, key_len); |
|---|
| 134 | + err = aes_expandkey(&rk, in_key, key_len); |
|---|
| 127 | 135 | if (err) |
|---|
| 128 | 136 | return err; |
|---|
| 129 | 137 | |
|---|
| .. | .. |
|---|
| 132 | 140 | kernel_neon_begin(); |
|---|
| 133 | 141 | aesbs_convert_key(ctx->key.rk, rk.key_enc, ctx->key.rounds); |
|---|
| 134 | 142 | kernel_neon_end(); |
|---|
| 143 | + memzero_explicit(&rk, sizeof(rk)); |
|---|
| 135 | 144 | |
|---|
| 136 | | - return crypto_cipher_setkey(ctx->enc_tfm, in_key, key_len); |
|---|
| 137 | | -} |
|---|
| 138 | | - |
|---|
| 139 | | -static void cbc_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst) |
|---|
| 140 | | -{ |
|---|
| 141 | | - struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 142 | | - |
|---|
| 143 | | - crypto_cipher_encrypt_one(ctx->enc_tfm, dst, src); |
|---|
| 145 | + return crypto_skcipher_setkey(ctx->enc_tfm, in_key, key_len); |
|---|
| 144 | 146 | } |
|---|
| 145 | 147 | |
|---|
| 146 | 148 | static int cbc_encrypt(struct skcipher_request *req) |
|---|
| 147 | 149 | { |
|---|
| 148 | | - return crypto_cbc_encrypt_walk(req, cbc_encrypt_one); |
|---|
| 150 | + struct skcipher_request *subreq = skcipher_request_ctx(req); |
|---|
| 151 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 152 | + struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 153 | + |
|---|
| 154 | + skcipher_request_set_tfm(subreq, ctx->enc_tfm); |
|---|
| 155 | + skcipher_request_set_callback(subreq, |
|---|
| 156 | + skcipher_request_flags(req), |
|---|
| 157 | + NULL, NULL); |
|---|
| 158 | + skcipher_request_set_crypt(subreq, req->src, req->dst, |
|---|
| 159 | + req->cryptlen, req->iv); |
|---|
| 160 | + |
|---|
| 161 | + return crypto_skcipher_encrypt(subreq); |
|---|
| 149 | 162 | } |
|---|
| 150 | 163 | |
|---|
| 151 | 164 | static int cbc_decrypt(struct skcipher_request *req) |
|---|
| .. | .. |
|---|
| 155 | 168 | struct skcipher_walk walk; |
|---|
| 156 | 169 | int err; |
|---|
| 157 | 170 | |
|---|
| 158 | | - err = skcipher_walk_virt(&walk, req, true); |
|---|
| 171 | + err = skcipher_walk_virt(&walk, req, false); |
|---|
| 159 | 172 | |
|---|
| 160 | | - kernel_neon_begin(); |
|---|
| 161 | 173 | while (walk.nbytes >= AES_BLOCK_SIZE) { |
|---|
| 162 | 174 | unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; |
|---|
| 163 | 175 | |
|---|
| .. | .. |
|---|
| 165 | 177 | blocks = round_down(blocks, |
|---|
| 166 | 178 | walk.stride / AES_BLOCK_SIZE); |
|---|
| 167 | 179 | |
|---|
| 180 | + kernel_neon_begin(); |
|---|
| 168 | 181 | aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, |
|---|
| 169 | 182 | ctx->key.rk, ctx->key.rounds, blocks, |
|---|
| 170 | 183 | walk.iv); |
|---|
| 184 | + kernel_neon_end(); |
|---|
| 171 | 185 | err = skcipher_walk_done(&walk, |
|---|
| 172 | 186 | walk.nbytes - blocks * AES_BLOCK_SIZE); |
|---|
| 173 | 187 | } |
|---|
| 174 | | - kernel_neon_end(); |
|---|
| 175 | 188 | |
|---|
| 176 | 189 | return err; |
|---|
| 177 | 190 | } |
|---|
| 178 | 191 | |
|---|
| 179 | | -static int cbc_init(struct crypto_tfm *tfm) |
|---|
| 192 | +static int cbc_init(struct crypto_skcipher *tfm) |
|---|
| 180 | 193 | { |
|---|
| 181 | | - struct aesbs_cbc_ctx *ctx = crypto_tfm_ctx(tfm); |
|---|
| 194 | + struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 195 | + unsigned int reqsize; |
|---|
| 182 | 196 | |
|---|
| 183 | | - ctx->enc_tfm = crypto_alloc_cipher("aes", 0, 0); |
|---|
| 197 | + ctx->enc_tfm = crypto_alloc_skcipher("cbc(aes)", 0, CRYPTO_ALG_ASYNC | |
|---|
| 198 | + CRYPTO_ALG_NEED_FALLBACK); |
|---|
| 199 | + if (IS_ERR(ctx->enc_tfm)) |
|---|
| 200 | + return PTR_ERR(ctx->enc_tfm); |
|---|
| 184 | 201 | |
|---|
| 185 | | - return PTR_ERR_OR_ZERO(ctx->enc_tfm); |
|---|
| 202 | + reqsize = sizeof(struct skcipher_request); |
|---|
| 203 | + reqsize += crypto_skcipher_reqsize(ctx->enc_tfm); |
|---|
| 204 | + crypto_skcipher_set_reqsize(tfm, reqsize); |
|---|
| 205 | + |
|---|
| 206 | + return 0; |
|---|
| 186 | 207 | } |
|---|
| 187 | 208 | |
|---|
| 188 | | -static void cbc_exit(struct crypto_tfm *tfm) |
|---|
| 209 | +static void cbc_exit(struct crypto_skcipher *tfm) |
|---|
| 189 | 210 | { |
|---|
| 190 | | - struct aesbs_cbc_ctx *ctx = crypto_tfm_ctx(tfm); |
|---|
| 211 | + struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 191 | 212 | |
|---|
| 192 | | - crypto_free_cipher(ctx->enc_tfm); |
|---|
| 213 | + crypto_free_skcipher(ctx->enc_tfm); |
|---|
| 214 | +} |
|---|
| 215 | + |
|---|
| 216 | +static int aesbs_ctr_setkey_sync(struct crypto_skcipher *tfm, const u8 *in_key, |
|---|
| 217 | + unsigned int key_len) |
|---|
| 218 | +{ |
|---|
| 219 | + struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 220 | + int err; |
|---|
| 221 | + |
|---|
| 222 | + err = aes_expandkey(&ctx->fallback, in_key, key_len); |
|---|
| 223 | + if (err) |
|---|
| 224 | + return err; |
|---|
| 225 | + |
|---|
| 226 | + ctx->key.rounds = 6 + key_len / 4; |
|---|
| 227 | + |
|---|
| 228 | + kernel_neon_begin(); |
|---|
| 229 | + aesbs_convert_key(ctx->key.rk, ctx->fallback.key_enc, ctx->key.rounds); |
|---|
| 230 | + kernel_neon_end(); |
|---|
| 231 | + |
|---|
| 232 | + return 0; |
|---|
| 193 | 233 | } |
|---|
| 194 | 234 | |
|---|
| 195 | 235 | static int ctr_encrypt(struct skcipher_request *req) |
|---|
| .. | .. |
|---|
| 200 | 240 | u8 buf[AES_BLOCK_SIZE]; |
|---|
| 201 | 241 | int err; |
|---|
| 202 | 242 | |
|---|
| 203 | | - err = skcipher_walk_virt(&walk, req, true); |
|---|
| 243 | + err = skcipher_walk_virt(&walk, req, false); |
|---|
| 204 | 244 | |
|---|
| 205 | | - kernel_neon_begin(); |
|---|
| 206 | 245 | while (walk.nbytes > 0) { |
|---|
| 207 | 246 | unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; |
|---|
| 208 | 247 | u8 *final = (walk.total % AES_BLOCK_SIZE) ? buf : NULL; |
|---|
| .. | .. |
|---|
| 213 | 252 | final = NULL; |
|---|
| 214 | 253 | } |
|---|
| 215 | 254 | |
|---|
| 255 | + kernel_neon_begin(); |
|---|
| 216 | 256 | aesbs_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr, |
|---|
| 217 | 257 | ctx->rk, ctx->rounds, blocks, walk.iv, final); |
|---|
| 258 | + kernel_neon_end(); |
|---|
| 218 | 259 | |
|---|
| 219 | 260 | if (final) { |
|---|
| 220 | 261 | u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE; |
|---|
| .. | .. |
|---|
| 229 | 270 | err = skcipher_walk_done(&walk, |
|---|
| 230 | 271 | walk.nbytes - blocks * AES_BLOCK_SIZE); |
|---|
| 231 | 272 | } |
|---|
| 232 | | - kernel_neon_end(); |
|---|
| 233 | 273 | |
|---|
| 234 | 274 | return err; |
|---|
| 275 | +} |
|---|
| 276 | + |
|---|
| 277 | +static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst) |
|---|
| 278 | +{ |
|---|
| 279 | + struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 280 | + unsigned long flags; |
|---|
| 281 | + |
|---|
| 282 | + /* |
|---|
| 283 | + * Temporarily disable interrupts to avoid races where |
|---|
| 284 | + * cachelines are evicted when the CPU is interrupted |
|---|
| 285 | + * to do something else. |
|---|
| 286 | + */ |
|---|
| 287 | + local_irq_save(flags); |
|---|
| 288 | + aes_encrypt(&ctx->fallback, dst, src); |
|---|
| 289 | + local_irq_restore(flags); |
|---|
| 290 | +} |
|---|
| 291 | + |
|---|
| 292 | +static int ctr_encrypt_sync(struct skcipher_request *req) |
|---|
| 293 | +{ |
|---|
| 294 | + if (!crypto_simd_usable()) |
|---|
| 295 | + return crypto_ctr_encrypt_walk(req, ctr_encrypt_one); |
|---|
| 296 | + |
|---|
| 297 | + return ctr_encrypt(req); |
|---|
| 235 | 298 | } |
|---|
| 236 | 299 | |
|---|
| 237 | 300 | static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key, |
|---|
| .. | .. |
|---|
| 245 | 308 | return err; |
|---|
| 246 | 309 | |
|---|
| 247 | 310 | key_len /= 2; |
|---|
| 311 | + err = crypto_cipher_setkey(ctx->cts_tfm, in_key, key_len); |
|---|
| 312 | + if (err) |
|---|
| 313 | + return err; |
|---|
| 248 | 314 | err = crypto_cipher_setkey(ctx->tweak_tfm, in_key + key_len, key_len); |
|---|
| 249 | 315 | if (err) |
|---|
| 250 | 316 | return err; |
|---|
| .. | .. |
|---|
| 252 | 318 | return aesbs_setkey(tfm, in_key, key_len); |
|---|
| 253 | 319 | } |
|---|
| 254 | 320 | |
|---|
| 255 | | -static int xts_init(struct crypto_tfm *tfm) |
|---|
| 321 | +static int xts_init(struct crypto_skcipher *tfm) |
|---|
| 256 | 322 | { |
|---|
| 257 | | - struct aesbs_xts_ctx *ctx = crypto_tfm_ctx(tfm); |
|---|
| 323 | + struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 324 | + |
|---|
| 325 | + ctx->cts_tfm = crypto_alloc_cipher("aes", 0, 0); |
|---|
| 326 | + if (IS_ERR(ctx->cts_tfm)) |
|---|
| 327 | + return PTR_ERR(ctx->cts_tfm); |
|---|
| 258 | 328 | |
|---|
| 259 | 329 | ctx->tweak_tfm = crypto_alloc_cipher("aes", 0, 0); |
|---|
| 330 | + if (IS_ERR(ctx->tweak_tfm)) |
|---|
| 331 | + crypto_free_cipher(ctx->cts_tfm); |
|---|
| 260 | 332 | |
|---|
| 261 | 333 | return PTR_ERR_OR_ZERO(ctx->tweak_tfm); |
|---|
| 262 | 334 | } |
|---|
| 263 | 335 | |
|---|
| 264 | | -static void xts_exit(struct crypto_tfm *tfm) |
|---|
| 336 | +static void xts_exit(struct crypto_skcipher *tfm) |
|---|
| 265 | 337 | { |
|---|
| 266 | | - struct aesbs_xts_ctx *ctx = crypto_tfm_ctx(tfm); |
|---|
| 338 | + struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 267 | 339 | |
|---|
| 268 | 340 | crypto_free_cipher(ctx->tweak_tfm); |
|---|
| 341 | + crypto_free_cipher(ctx->cts_tfm); |
|---|
| 269 | 342 | } |
|---|
| 270 | 343 | |
|---|
| 271 | | -static int __xts_crypt(struct skcipher_request *req, |
|---|
| 344 | +static int __xts_crypt(struct skcipher_request *req, bool encrypt, |
|---|
| 272 | 345 | void (*fn)(u8 out[], u8 const in[], u8 const rk[], |
|---|
| 273 | | - int rounds, int blocks, u8 iv[])) |
|---|
| 346 | + int rounds, int blocks, u8 iv[], int)) |
|---|
| 274 | 347 | { |
|---|
| 275 | 348 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 276 | 349 | struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 350 | + int tail = req->cryptlen % AES_BLOCK_SIZE; |
|---|
| 351 | + struct skcipher_request subreq; |
|---|
| 352 | + u8 buf[2 * AES_BLOCK_SIZE]; |
|---|
| 277 | 353 | struct skcipher_walk walk; |
|---|
| 278 | 354 | int err; |
|---|
| 355 | + |
|---|
| 356 | + if (req->cryptlen < AES_BLOCK_SIZE) |
|---|
| 357 | + return -EINVAL; |
|---|
| 358 | + |
|---|
| 359 | + if (unlikely(tail)) { |
|---|
| 360 | + skcipher_request_set_tfm(&subreq, tfm); |
|---|
| 361 | + skcipher_request_set_callback(&subreq, |
|---|
| 362 | + skcipher_request_flags(req), |
|---|
| 363 | + NULL, NULL); |
|---|
| 364 | + skcipher_request_set_crypt(&subreq, req->src, req->dst, |
|---|
| 365 | + req->cryptlen - tail, req->iv); |
|---|
| 366 | + req = &subreq; |
|---|
| 367 | + } |
|---|
| 279 | 368 | |
|---|
| 280 | 369 | err = skcipher_walk_virt(&walk, req, true); |
|---|
| 281 | 370 | if (err) |
|---|
| .. | .. |
|---|
| 283 | 372 | |
|---|
| 284 | 373 | crypto_cipher_encrypt_one(ctx->tweak_tfm, walk.iv, walk.iv); |
|---|
| 285 | 374 | |
|---|
| 286 | | - kernel_neon_begin(); |
|---|
| 287 | 375 | while (walk.nbytes >= AES_BLOCK_SIZE) { |
|---|
| 288 | 376 | unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; |
|---|
| 377 | + int reorder_last_tweak = !encrypt && tail > 0; |
|---|
| 289 | 378 | |
|---|
| 290 | | - if (walk.nbytes < walk.total) |
|---|
| 379 | + if (walk.nbytes < walk.total) { |
|---|
| 291 | 380 | blocks = round_down(blocks, |
|---|
| 292 | 381 | walk.stride / AES_BLOCK_SIZE); |
|---|
| 382 | + reorder_last_tweak = 0; |
|---|
| 383 | + } |
|---|
| 293 | 384 | |
|---|
| 385 | + kernel_neon_begin(); |
|---|
| 294 | 386 | fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->key.rk, |
|---|
| 295 | | - ctx->key.rounds, blocks, walk.iv); |
|---|
| 387 | + ctx->key.rounds, blocks, walk.iv, reorder_last_tweak); |
|---|
| 388 | + kernel_neon_end(); |
|---|
| 296 | 389 | err = skcipher_walk_done(&walk, |
|---|
| 297 | 390 | walk.nbytes - blocks * AES_BLOCK_SIZE); |
|---|
| 298 | 391 | } |
|---|
| 299 | | - kernel_neon_end(); |
|---|
| 300 | 392 | |
|---|
| 301 | | - return err; |
|---|
| 393 | + if (err || likely(!tail)) |
|---|
| 394 | + return err; |
|---|
| 395 | + |
|---|
| 396 | + /* handle ciphertext stealing */ |
|---|
| 397 | + scatterwalk_map_and_copy(buf, req->dst, req->cryptlen - AES_BLOCK_SIZE, |
|---|
| 398 | + AES_BLOCK_SIZE, 0); |
|---|
| 399 | + memcpy(buf + AES_BLOCK_SIZE, buf, tail); |
|---|
| 400 | + scatterwalk_map_and_copy(buf, req->src, req->cryptlen, tail, 0); |
|---|
| 401 | + |
|---|
| 402 | + crypto_xor(buf, req->iv, AES_BLOCK_SIZE); |
|---|
| 403 | + |
|---|
| 404 | + if (encrypt) |
|---|
| 405 | + crypto_cipher_encrypt_one(ctx->cts_tfm, buf, buf); |
|---|
| 406 | + else |
|---|
| 407 | + crypto_cipher_decrypt_one(ctx->cts_tfm, buf, buf); |
|---|
| 408 | + |
|---|
| 409 | + crypto_xor(buf, req->iv, AES_BLOCK_SIZE); |
|---|
| 410 | + |
|---|
| 411 | + scatterwalk_map_and_copy(buf, req->dst, req->cryptlen - AES_BLOCK_SIZE, |
|---|
| 412 | + AES_BLOCK_SIZE + tail, 1); |
|---|
| 413 | + return 0; |
|---|
| 302 | 414 | } |
|---|
| 303 | 415 | |
|---|
| 304 | 416 | static int xts_encrypt(struct skcipher_request *req) |
|---|
| 305 | 417 | { |
|---|
| 306 | | - return __xts_crypt(req, aesbs_xts_encrypt); |
|---|
| 418 | + return __xts_crypt(req, true, aesbs_xts_encrypt); |
|---|
| 307 | 419 | } |
|---|
| 308 | 420 | |
|---|
| 309 | 421 | static int xts_decrypt(struct skcipher_request *req) |
|---|
| 310 | 422 | { |
|---|
| 311 | | - return __xts_crypt(req, aesbs_xts_decrypt); |
|---|
| 423 | + return __xts_crypt(req, false, aesbs_xts_decrypt); |
|---|
| 312 | 424 | } |
|---|
| 313 | 425 | |
|---|
| 314 | 426 | static struct skcipher_alg aes_algs[] = { { |
|---|
| .. | .. |
|---|
| 333 | 445 | .base.cra_blocksize = AES_BLOCK_SIZE, |
|---|
| 334 | 446 | .base.cra_ctxsize = sizeof(struct aesbs_cbc_ctx), |
|---|
| 335 | 447 | .base.cra_module = THIS_MODULE, |
|---|
| 336 | | - .base.cra_flags = CRYPTO_ALG_INTERNAL, |
|---|
| 337 | | - .base.cra_init = cbc_init, |
|---|
| 338 | | - .base.cra_exit = cbc_exit, |
|---|
| 448 | + .base.cra_flags = CRYPTO_ALG_INTERNAL | |
|---|
| 449 | + CRYPTO_ALG_NEED_FALLBACK, |
|---|
| 339 | 450 | |
|---|
| 340 | 451 | .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 341 | 452 | .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| .. | .. |
|---|
| 344 | 455 | .setkey = aesbs_cbc_setkey, |
|---|
| 345 | 456 | .encrypt = cbc_encrypt, |
|---|
| 346 | 457 | .decrypt = cbc_decrypt, |
|---|
| 458 | + .init = cbc_init, |
|---|
| 459 | + .exit = cbc_exit, |
|---|
| 347 | 460 | }, { |
|---|
| 348 | 461 | .base.cra_name = "__ctr(aes)", |
|---|
| 349 | 462 | .base.cra_driver_name = "__ctr-aes-neonbs", |
|---|
| .. | .. |
|---|
| 362 | 475 | .encrypt = ctr_encrypt, |
|---|
| 363 | 476 | .decrypt = ctr_encrypt, |
|---|
| 364 | 477 | }, { |
|---|
| 478 | + .base.cra_name = "ctr(aes)", |
|---|
| 479 | + .base.cra_driver_name = "ctr-aes-neonbs-sync", |
|---|
| 480 | + .base.cra_priority = 250 - 1, |
|---|
| 481 | + .base.cra_blocksize = 1, |
|---|
| 482 | + .base.cra_ctxsize = sizeof(struct aesbs_ctr_ctx), |
|---|
| 483 | + .base.cra_module = THIS_MODULE, |
|---|
| 484 | + |
|---|
| 485 | + .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 486 | + .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 487 | + .chunksize = AES_BLOCK_SIZE, |
|---|
| 488 | + .walksize = 8 * AES_BLOCK_SIZE, |
|---|
| 489 | + .ivsize = AES_BLOCK_SIZE, |
|---|
| 490 | + .setkey = aesbs_ctr_setkey_sync, |
|---|
| 491 | + .encrypt = ctr_encrypt_sync, |
|---|
| 492 | + .decrypt = ctr_encrypt_sync, |
|---|
| 493 | +}, { |
|---|
| 365 | 494 | .base.cra_name = "__xts(aes)", |
|---|
| 366 | 495 | .base.cra_driver_name = "__xts-aes-neonbs", |
|---|
| 367 | 496 | .base.cra_priority = 250, |
|---|
| .. | .. |
|---|
| 369 | 498 | .base.cra_ctxsize = sizeof(struct aesbs_xts_ctx), |
|---|
| 370 | 499 | .base.cra_module = THIS_MODULE, |
|---|
| 371 | 500 | .base.cra_flags = CRYPTO_ALG_INTERNAL, |
|---|
| 372 | | - .base.cra_init = xts_init, |
|---|
| 373 | | - .base.cra_exit = xts_exit, |
|---|
| 374 | 501 | |
|---|
| 375 | 502 | .min_keysize = 2 * AES_MIN_KEY_SIZE, |
|---|
| 376 | 503 | .max_keysize = 2 * AES_MAX_KEY_SIZE, |
|---|
| .. | .. |
|---|
| 379 | 506 | .setkey = aesbs_xts_setkey, |
|---|
| 380 | 507 | .encrypt = xts_encrypt, |
|---|
| 381 | 508 | .decrypt = xts_decrypt, |
|---|
| 509 | + .init = xts_init, |
|---|
| 510 | + .exit = xts_exit, |
|---|
| 382 | 511 | } }; |
|---|
| 383 | 512 | |
|---|
| 384 | 513 | static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)]; |
|---|