| .. | .. |
|---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-or-later |
|---|
| 1 | 2 | /* |
|---|
| 2 | 3 | * Shared glue code for 128bit block ciphers |
|---|
| 3 | 4 | * |
|---|
| .. | .. |
|---|
| 7 | 8 | * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> |
|---|
| 8 | 9 | * CTR part based on code (crypto/ctr.c) by: |
|---|
| 9 | 10 | * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com> |
|---|
| 10 | | - * |
|---|
| 11 | | - * This program is free software; you can redistribute it and/or modify |
|---|
| 12 | | - * it under the terms of the GNU General Public License as published by |
|---|
| 13 | | - * the Free Software Foundation; either version 2 of the License, or |
|---|
| 14 | | - * (at your option) any later version. |
|---|
| 15 | | - * |
|---|
| 16 | | - * This program is distributed in the hope that it will be useful, |
|---|
| 17 | | - * but WITHOUT ANY WARRANTY; without even the implied warranty of |
|---|
| 18 | | - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|---|
| 19 | | - * GNU General Public License for more details. |
|---|
| 20 | | - * |
|---|
| 21 | | - * You should have received a copy of the GNU General Public License |
|---|
| 22 | | - * along with this program; if not, write to the Free Software |
|---|
| 23 | | - * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 |
|---|
| 24 | | - * USA |
|---|
| 25 | | - * |
|---|
| 26 | 11 | */ |
|---|
| 27 | 12 | |
|---|
| 28 | 13 | #include <linux/module.h> |
|---|
| 29 | 14 | #include <crypto/b128ops.h> |
|---|
| 30 | 15 | #include <crypto/gf128mul.h> |
|---|
| 31 | 16 | #include <crypto/internal/skcipher.h> |
|---|
| 17 | +#include <crypto/scatterwalk.h> |
|---|
| 32 | 18 | #include <crypto/xts.h> |
|---|
| 33 | 19 | #include <asm/crypto/glue_helper.h> |
|---|
| 34 | 20 | |
|---|
| .. | .. |
|---|
| 38 | 24 | void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); |
|---|
| 39 | 25 | const unsigned int bsize = 128 / 8; |
|---|
| 40 | 26 | struct skcipher_walk walk; |
|---|
| 41 | | - bool fpu_enabled; |
|---|
| 27 | + bool fpu_enabled = false; |
|---|
| 42 | 28 | unsigned int nbytes; |
|---|
| 43 | 29 | int err; |
|---|
| 44 | 30 | |
|---|
| .. | .. |
|---|
| 51 | 37 | unsigned int i; |
|---|
| 52 | 38 | |
|---|
| 53 | 39 | fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, |
|---|
| 54 | | - &walk, false, nbytes); |
|---|
| 40 | + &walk, fpu_enabled, nbytes); |
|---|
| 55 | 41 | for (i = 0; i < gctx->num_funcs; i++) { |
|---|
| 56 | 42 | func_bytes = bsize * gctx->funcs[i].num_blocks; |
|---|
| 57 | 43 | |
|---|
| .. | .. |
|---|
| 69 | 55 | if (nbytes < bsize) |
|---|
| 70 | 56 | break; |
|---|
| 71 | 57 | } |
|---|
| 72 | | - glue_fpu_end(fpu_enabled); |
|---|
| 73 | 58 | err = skcipher_walk_done(&walk, nbytes); |
|---|
| 74 | 59 | } |
|---|
| 60 | + |
|---|
| 61 | + glue_fpu_end(fpu_enabled); |
|---|
| 75 | 62 | return err; |
|---|
| 76 | 63 | } |
|---|
| 77 | 64 | EXPORT_SYMBOL_GPL(glue_ecb_req_128bit); |
|---|
| .. | .. |
|---|
| 114 | 101 | void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); |
|---|
| 115 | 102 | const unsigned int bsize = 128 / 8; |
|---|
| 116 | 103 | struct skcipher_walk walk; |
|---|
| 117 | | - bool fpu_enabled; |
|---|
| 104 | + bool fpu_enabled = false; |
|---|
| 118 | 105 | unsigned int nbytes; |
|---|
| 119 | 106 | int err; |
|---|
| 120 | 107 | |
|---|
| .. | .. |
|---|
| 128 | 115 | u128 last_iv; |
|---|
| 129 | 116 | |
|---|
| 130 | 117 | fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, |
|---|
| 131 | | - &walk, false, nbytes); |
|---|
| 118 | + &walk, fpu_enabled, nbytes); |
|---|
| 132 | 119 | /* Start of the last block. */ |
|---|
| 133 | 120 | src += nbytes / bsize - 1; |
|---|
| 134 | 121 | dst += nbytes / bsize - 1; |
|---|
| .. | .. |
|---|
| 147 | 134 | src -= num_blocks - 1; |
|---|
| 148 | 135 | dst -= num_blocks - 1; |
|---|
| 149 | 136 | |
|---|
| 150 | | - gctx->funcs[i].fn_u.cbc(ctx, dst, src); |
|---|
| 137 | + gctx->funcs[i].fn_u.cbc(ctx, (u8 *)dst, |
|---|
| 138 | + (const u8 *)src); |
|---|
| 151 | 139 | |
|---|
| 152 | 140 | nbytes -= func_bytes; |
|---|
| 153 | 141 | if (nbytes < bsize) |
|---|
| .. | .. |
|---|
| 160 | 148 | done: |
|---|
| 161 | 149 | u128_xor(dst, dst, (u128 *)walk.iv); |
|---|
| 162 | 150 | *(u128 *)walk.iv = last_iv; |
|---|
| 163 | | - glue_fpu_end(fpu_enabled); |
|---|
| 164 | 151 | err = skcipher_walk_done(&walk, nbytes); |
|---|
| 165 | 152 | } |
|---|
| 166 | 153 | |
|---|
| 154 | + glue_fpu_end(fpu_enabled); |
|---|
| 167 | 155 | return err; |
|---|
| 168 | 156 | } |
|---|
| 169 | 157 | EXPORT_SYMBOL_GPL(glue_cbc_decrypt_req_128bit); |
|---|
| .. | .. |
|---|
| 174 | 162 | void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); |
|---|
| 175 | 163 | const unsigned int bsize = 128 / 8; |
|---|
| 176 | 164 | struct skcipher_walk walk; |
|---|
| 177 | | - bool fpu_enabled; |
|---|
| 165 | + bool fpu_enabled = false; |
|---|
| 178 | 166 | unsigned int nbytes; |
|---|
| 179 | 167 | int err; |
|---|
| 180 | 168 | |
|---|
| .. | .. |
|---|
| 188 | 176 | le128 ctrblk; |
|---|
| 189 | 177 | |
|---|
| 190 | 178 | fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, |
|---|
| 191 | | - &walk, false, nbytes); |
|---|
| 179 | + &walk, fpu_enabled, nbytes); |
|---|
| 192 | 180 | |
|---|
| 193 | 181 | be128_to_le128(&ctrblk, (be128 *)walk.iv); |
|---|
| 194 | 182 | |
|---|
| .. | .. |
|---|
| 201 | 189 | |
|---|
| 202 | 190 | /* Process multi-block batch */ |
|---|
| 203 | 191 | do { |
|---|
| 204 | | - gctx->funcs[i].fn_u.ctr(ctx, dst, src, &ctrblk); |
|---|
| 192 | + gctx->funcs[i].fn_u.ctr(ctx, (u8 *)dst, |
|---|
| 193 | + (const u8 *)src, |
|---|
| 194 | + &ctrblk); |
|---|
| 205 | 195 | src += num_blocks; |
|---|
| 206 | 196 | dst += num_blocks; |
|---|
| 207 | 197 | nbytes -= func_bytes; |
|---|
| .. | .. |
|---|
| 212 | 202 | } |
|---|
| 213 | 203 | |
|---|
| 214 | 204 | le128_to_be128((be128 *)walk.iv, &ctrblk); |
|---|
| 215 | | - glue_fpu_end(fpu_enabled); |
|---|
| 216 | 205 | err = skcipher_walk_done(&walk, nbytes); |
|---|
| 217 | 206 | } |
|---|
| 207 | + |
|---|
| 208 | + glue_fpu_end(fpu_enabled); |
|---|
| 218 | 209 | |
|---|
| 219 | 210 | if (nbytes) { |
|---|
| 220 | 211 | le128 ctrblk; |
|---|
| .. | .. |
|---|
| 222 | 213 | |
|---|
| 223 | 214 | be128_to_le128(&ctrblk, (be128 *)walk.iv); |
|---|
| 224 | 215 | memcpy(&tmp, walk.src.virt.addr, nbytes); |
|---|
| 225 | | - gctx->funcs[gctx->num_funcs - 1].fn_u.ctr(ctx, &tmp, &tmp, |
|---|
| 216 | + gctx->funcs[gctx->num_funcs - 1].fn_u.ctr(ctx, (u8 *)&tmp, |
|---|
| 217 | + (const u8 *)&tmp, |
|---|
| 226 | 218 | &ctrblk); |
|---|
| 227 | 219 | memcpy(walk.dst.virt.addr, &tmp, nbytes); |
|---|
| 228 | 220 | le128_to_be128((be128 *)walk.iv, &ctrblk); |
|---|
| .. | .. |
|---|
| 252 | 244 | |
|---|
| 253 | 245 | if (nbytes >= func_bytes) { |
|---|
| 254 | 246 | do { |
|---|
| 255 | | - gctx->funcs[i].fn_u.xts(ctx, dst, src, |
|---|
| 247 | + gctx->funcs[i].fn_u.xts(ctx, (u8 *)dst, |
|---|
| 248 | + (const u8 *)src, |
|---|
| 256 | 249 | walk->iv); |
|---|
| 257 | 250 | |
|---|
| 258 | 251 | src += num_blocks; |
|---|
| .. | .. |
|---|
| 272 | 265 | int glue_xts_req_128bit(const struct common_glue_ctx *gctx, |
|---|
| 273 | 266 | struct skcipher_request *req, |
|---|
| 274 | 267 | common_glue_func_t tweak_fn, void *tweak_ctx, |
|---|
| 275 | | - void *crypt_ctx) |
|---|
| 268 | + void *crypt_ctx, bool decrypt) |
|---|
| 276 | 269 | { |
|---|
| 270 | + const bool cts = (req->cryptlen % XTS_BLOCK_SIZE); |
|---|
| 277 | 271 | const unsigned int bsize = 128 / 8; |
|---|
| 272 | + struct skcipher_request subreq; |
|---|
| 278 | 273 | struct skcipher_walk walk; |
|---|
| 279 | | - bool fpu_enabled; |
|---|
| 280 | | - unsigned int nbytes; |
|---|
| 274 | + bool fpu_enabled = false; |
|---|
| 275 | + unsigned int nbytes, tail; |
|---|
| 281 | 276 | int err; |
|---|
| 277 | + |
|---|
| 278 | + if (req->cryptlen < XTS_BLOCK_SIZE) |
|---|
| 279 | + return -EINVAL; |
|---|
| 280 | + |
|---|
| 281 | + if (unlikely(cts)) { |
|---|
| 282 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 283 | + |
|---|
| 284 | + tail = req->cryptlen % XTS_BLOCK_SIZE + XTS_BLOCK_SIZE; |
|---|
| 285 | + |
|---|
| 286 | + skcipher_request_set_tfm(&subreq, tfm); |
|---|
| 287 | + skcipher_request_set_callback(&subreq, |
|---|
| 288 | + crypto_skcipher_get_flags(tfm), |
|---|
| 289 | + NULL, NULL); |
|---|
| 290 | + skcipher_request_set_crypt(&subreq, req->src, req->dst, |
|---|
| 291 | + req->cryptlen - tail, req->iv); |
|---|
| 292 | + req = &subreq; |
|---|
| 293 | + } |
|---|
| 282 | 294 | |
|---|
| 283 | 295 | err = skcipher_walk_virt(&walk, req, false); |
|---|
| 284 | 296 | nbytes = walk.nbytes; |
|---|
| 285 | | - if (!nbytes) |
|---|
| 297 | + if (err) |
|---|
| 286 | 298 | return err; |
|---|
| 287 | 299 | |
|---|
| 288 | 300 | /* set minimum length to bsize, for tweak_fn */ |
|---|
| 289 | 301 | fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, |
|---|
| 290 | | - &walk, false, |
|---|
| 302 | + &walk, fpu_enabled, |
|---|
| 291 | 303 | nbytes < bsize ? bsize : nbytes); |
|---|
| 292 | 304 | |
|---|
| 293 | 305 | /* calculate first value of T */ |
|---|
| 294 | 306 | tweak_fn(tweak_ctx, walk.iv, walk.iv); |
|---|
| 295 | 307 | |
|---|
| 296 | 308 | while (nbytes) { |
|---|
| 297 | | - fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, |
|---|
| 298 | | - &walk, fpu_enabled, |
|---|
| 299 | | - nbytes < bsize ? bsize : nbytes); |
|---|
| 300 | 309 | nbytes = __glue_xts_req_128bit(gctx, crypt_ctx, &walk); |
|---|
| 301 | 310 | |
|---|
| 302 | | - glue_fpu_end(fpu_enabled); |
|---|
| 303 | | - fpu_enabled = false; |
|---|
| 304 | 311 | err = skcipher_walk_done(&walk, nbytes); |
|---|
| 305 | 312 | nbytes = walk.nbytes; |
|---|
| 306 | 313 | } |
|---|
| 314 | + |
|---|
| 315 | + if (unlikely(cts)) { |
|---|
| 316 | + u8 *next_tweak, *final_tweak = req->iv; |
|---|
| 317 | + struct scatterlist *src, *dst; |
|---|
| 318 | + struct scatterlist s[2], d[2]; |
|---|
| 319 | + le128 b[2]; |
|---|
| 320 | + |
|---|
| 321 | + dst = src = scatterwalk_ffwd(s, req->src, req->cryptlen); |
|---|
| 322 | + if (req->dst != req->src) |
|---|
| 323 | + dst = scatterwalk_ffwd(d, req->dst, req->cryptlen); |
|---|
| 324 | + |
|---|
| 325 | + if (decrypt) { |
|---|
| 326 | + next_tweak = memcpy(b, req->iv, XTS_BLOCK_SIZE); |
|---|
| 327 | + gf128mul_x_ble(b, b); |
|---|
| 328 | + } else { |
|---|
| 329 | + next_tweak = req->iv; |
|---|
| 330 | + } |
|---|
| 331 | + |
|---|
| 332 | + skcipher_request_set_crypt(&subreq, src, dst, XTS_BLOCK_SIZE, |
|---|
| 333 | + next_tweak); |
|---|
| 334 | + |
|---|
| 335 | + err = skcipher_walk_virt(&walk, req, false) ?: |
|---|
| 336 | + skcipher_walk_done(&walk, |
|---|
| 337 | + __glue_xts_req_128bit(gctx, crypt_ctx, &walk)); |
|---|
| 338 | + if (err) |
|---|
| 339 | + goto out; |
|---|
| 340 | + |
|---|
| 341 | + scatterwalk_map_and_copy(b, dst, 0, XTS_BLOCK_SIZE, 0); |
|---|
| 342 | + memcpy(b + 1, b, tail - XTS_BLOCK_SIZE); |
|---|
| 343 | + scatterwalk_map_and_copy(b, src, XTS_BLOCK_SIZE, |
|---|
| 344 | + tail - XTS_BLOCK_SIZE, 0); |
|---|
| 345 | + scatterwalk_map_and_copy(b, dst, 0, tail, 1); |
|---|
| 346 | + |
|---|
| 347 | + skcipher_request_set_crypt(&subreq, dst, dst, XTS_BLOCK_SIZE, |
|---|
| 348 | + final_tweak); |
|---|
| 349 | + |
|---|
| 350 | + err = skcipher_walk_virt(&walk, req, false) ?: |
|---|
| 351 | + skcipher_walk_done(&walk, |
|---|
| 352 | + __glue_xts_req_128bit(gctx, crypt_ctx, &walk)); |
|---|
| 353 | + } |
|---|
| 354 | + |
|---|
| 355 | +out: |
|---|
| 356 | + glue_fpu_end(fpu_enabled); |
|---|
| 307 | 357 | |
|---|
| 308 | 358 | return err; |
|---|
| 309 | 359 | } |
|---|
| 310 | 360 | EXPORT_SYMBOL_GPL(glue_xts_req_128bit); |
|---|
| 311 | 361 | |
|---|
| 312 | | -void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src, le128 *iv, |
|---|
| 313 | | - common_glue_func_t fn) |
|---|
| 362 | +void glue_xts_crypt_128bit_one(const void *ctx, u8 *dst, const u8 *src, |
|---|
| 363 | + le128 *iv, common_glue_func_t fn) |
|---|
| 314 | 364 | { |
|---|
| 315 | 365 | le128 ivblk = *iv; |
|---|
| 316 | 366 | |
|---|
| .. | .. |
|---|
| 318 | 368 | gf128mul_x_ble(iv, &ivblk); |
|---|
| 319 | 369 | |
|---|
| 320 | 370 | /* CC <- T xor C */ |
|---|
| 321 | | - u128_xor(dst, src, (u128 *)&ivblk); |
|---|
| 371 | + u128_xor((u128 *)dst, (const u128 *)src, (u128 *)&ivblk); |
|---|
| 322 | 372 | |
|---|
| 323 | 373 | /* PP <- D(Key2,CC) */ |
|---|
| 324 | | - fn(ctx, (u8 *)dst, (u8 *)dst); |
|---|
| 374 | + fn(ctx, dst, dst); |
|---|
| 325 | 375 | |
|---|
| 326 | 376 | /* P <- T xor PP */ |
|---|
| 327 | | - u128_xor(dst, dst, (u128 *)&ivblk); |
|---|
| 377 | + u128_xor((u128 *)dst, (u128 *)dst, (u128 *)&ivblk); |
|---|
| 328 | 378 | } |
|---|
| 329 | 379 | EXPORT_SYMBOL_GPL(glue_xts_crypt_128bit_one); |
|---|
| 330 | 380 | |
|---|