.. | .. |
---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-or-later |
---|
1 | 2 | /* |
---|
2 | 3 | * Shared glue code for 128bit block ciphers |
---|
3 | 4 | * |
---|
.. | .. |
---|
7 | 8 | * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> |
---|
8 | 9 | * CTR part based on code (crypto/ctr.c) by: |
---|
9 | 10 | * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com> |
---|
10 | | - * |
---|
11 | | - * This program is free software; you can redistribute it and/or modify |
---|
12 | | - * it under the terms of the GNU General Public License as published by |
---|
13 | | - * the Free Software Foundation; either version 2 of the License, or |
---|
14 | | - * (at your option) any later version. |
---|
15 | | - * |
---|
16 | | - * This program is distributed in the hope that it will be useful, |
---|
17 | | - * but WITHOUT ANY WARRANTY; without even the implied warranty of |
---|
18 | | - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
---|
19 | | - * GNU General Public License for more details. |
---|
20 | | - * |
---|
21 | | - * You should have received a copy of the GNU General Public License |
---|
22 | | - * along with this program; if not, write to the Free Software |
---|
23 | | - * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 |
---|
24 | | - * USA |
---|
25 | | - * |
---|
26 | 11 | */ |
---|
27 | 12 | |
---|
28 | 13 | #include <linux/module.h> |
---|
29 | 14 | #include <crypto/b128ops.h> |
---|
30 | 15 | #include <crypto/gf128mul.h> |
---|
31 | 16 | #include <crypto/internal/skcipher.h> |
---|
| 17 | +#include <crypto/scatterwalk.h> |
---|
32 | 18 | #include <crypto/xts.h> |
---|
33 | 19 | #include <asm/crypto/glue_helper.h> |
---|
34 | 20 | |
---|
.. | .. |
---|
38 | 24 | void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); |
---|
39 | 25 | const unsigned int bsize = 128 / 8; |
---|
40 | 26 | struct skcipher_walk walk; |
---|
41 | | - bool fpu_enabled = false; |
---|
| 27 | + bool fpu_enabled; |
---|
42 | 28 | unsigned int nbytes; |
---|
43 | 29 | int err; |
---|
44 | 30 | |
---|
.. | .. |
---|
51 | 37 | unsigned int i; |
---|
52 | 38 | |
---|
53 | 39 | fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, |
---|
54 | | - &walk, fpu_enabled, nbytes); |
---|
| 40 | + &walk, false, nbytes); |
---|
55 | 41 | for (i = 0; i < gctx->num_funcs; i++) { |
---|
56 | 42 | func_bytes = bsize * gctx->funcs[i].num_blocks; |
---|
57 | 43 | |
---|
.. | .. |
---|
69 | 55 | if (nbytes < bsize) |
---|
70 | 56 | break; |
---|
71 | 57 | } |
---|
| 58 | + glue_fpu_end(fpu_enabled); |
---|
72 | 59 | err = skcipher_walk_done(&walk, nbytes); |
---|
73 | 60 | } |
---|
74 | | - |
---|
75 | | - glue_fpu_end(fpu_enabled); |
---|
76 | 61 | return err; |
---|
77 | 62 | } |
---|
78 | 63 | EXPORT_SYMBOL_GPL(glue_ecb_req_128bit); |
---|
.. | .. |
---|
115 | 100 | void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); |
---|
116 | 101 | const unsigned int bsize = 128 / 8; |
---|
117 | 102 | struct skcipher_walk walk; |
---|
118 | | - bool fpu_enabled = false; |
---|
| 103 | + bool fpu_enabled; |
---|
119 | 104 | unsigned int nbytes; |
---|
120 | 105 | int err; |
---|
121 | 106 | |
---|
.. | .. |
---|
129 | 114 | u128 last_iv; |
---|
130 | 115 | |
---|
131 | 116 | fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, |
---|
132 | | - &walk, fpu_enabled, nbytes); |
---|
| 117 | + &walk, false, nbytes); |
---|
133 | 118 | /* Start of the last block. */ |
---|
134 | 119 | src += nbytes / bsize - 1; |
---|
135 | 120 | dst += nbytes / bsize - 1; |
---|
.. | .. |
---|
148 | 133 | src -= num_blocks - 1; |
---|
149 | 134 | dst -= num_blocks - 1; |
---|
150 | 135 | |
---|
151 | | - gctx->funcs[i].fn_u.cbc(ctx, dst, src); |
---|
| 136 | + gctx->funcs[i].fn_u.cbc(ctx, (u8 *)dst, |
---|
| 137 | + (const u8 *)src); |
---|
152 | 138 | |
---|
153 | 139 | nbytes -= func_bytes; |
---|
154 | 140 | if (nbytes < bsize) |
---|
.. | .. |
---|
161 | 147 | done: |
---|
162 | 148 | u128_xor(dst, dst, (u128 *)walk.iv); |
---|
163 | 149 | *(u128 *)walk.iv = last_iv; |
---|
| 150 | + glue_fpu_end(fpu_enabled); |
---|
164 | 151 | err = skcipher_walk_done(&walk, nbytes); |
---|
165 | 152 | } |
---|
166 | 153 | |
---|
167 | | - glue_fpu_end(fpu_enabled); |
---|
168 | 154 | return err; |
---|
169 | 155 | } |
---|
170 | 156 | EXPORT_SYMBOL_GPL(glue_cbc_decrypt_req_128bit); |
---|
.. | .. |
---|
175 | 161 | void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); |
---|
176 | 162 | const unsigned int bsize = 128 / 8; |
---|
177 | 163 | struct skcipher_walk walk; |
---|
178 | | - bool fpu_enabled = false; |
---|
| 164 | + bool fpu_enabled; |
---|
179 | 165 | unsigned int nbytes; |
---|
180 | 166 | int err; |
---|
181 | 167 | |
---|
.. | .. |
---|
189 | 175 | le128 ctrblk; |
---|
190 | 176 | |
---|
191 | 177 | fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, |
---|
192 | | - &walk, fpu_enabled, nbytes); |
---|
| 178 | + &walk, false, nbytes); |
---|
193 | 179 | |
---|
194 | 180 | be128_to_le128(&ctrblk, (be128 *)walk.iv); |
---|
195 | 181 | |
---|
.. | .. |
---|
202 | 188 | |
---|
203 | 189 | /* Process multi-block batch */ |
---|
204 | 190 | do { |
---|
205 | | - gctx->funcs[i].fn_u.ctr(ctx, dst, src, &ctrblk); |
---|
| 191 | + gctx->funcs[i].fn_u.ctr(ctx, (u8 *)dst, |
---|
| 192 | + (const u8 *)src, |
---|
| 193 | + &ctrblk); |
---|
206 | 194 | src += num_blocks; |
---|
207 | 195 | dst += num_blocks; |
---|
208 | 196 | nbytes -= func_bytes; |
---|
.. | .. |
---|
213 | 201 | } |
---|
214 | 202 | |
---|
215 | 203 | le128_to_be128((be128 *)walk.iv, &ctrblk); |
---|
| 204 | + glue_fpu_end(fpu_enabled); |
---|
216 | 205 | err = skcipher_walk_done(&walk, nbytes); |
---|
217 | 206 | } |
---|
218 | | - |
---|
219 | | - glue_fpu_end(fpu_enabled); |
---|
220 | 207 | |
---|
221 | 208 | if (nbytes) { |
---|
222 | 209 | le128 ctrblk; |
---|
.. | .. |
---|
224 | 211 | |
---|
225 | 212 | be128_to_le128(&ctrblk, (be128 *)walk.iv); |
---|
226 | 213 | memcpy(&tmp, walk.src.virt.addr, nbytes); |
---|
227 | | - gctx->funcs[gctx->num_funcs - 1].fn_u.ctr(ctx, &tmp, &tmp, |
---|
| 214 | + gctx->funcs[gctx->num_funcs - 1].fn_u.ctr(ctx, (u8 *)&tmp, |
---|
| 215 | + (const u8 *)&tmp, |
---|
228 | 216 | &ctrblk); |
---|
229 | 217 | memcpy(walk.dst.virt.addr, &tmp, nbytes); |
---|
230 | 218 | le128_to_be128((be128 *)walk.iv, &ctrblk); |
---|
.. | .. |
---|
254 | 242 | |
---|
255 | 243 | if (nbytes >= func_bytes) { |
---|
256 | 244 | do { |
---|
257 | | - gctx->funcs[i].fn_u.xts(ctx, dst, src, |
---|
| 245 | + gctx->funcs[i].fn_u.xts(ctx, (u8 *)dst, |
---|
| 246 | + (const u8 *)src, |
---|
258 | 247 | walk->iv); |
---|
259 | 248 | |
---|
260 | 249 | src += num_blocks; |
---|
.. | .. |
---|
274 | 263 | int glue_xts_req_128bit(const struct common_glue_ctx *gctx, |
---|
275 | 264 | struct skcipher_request *req, |
---|
276 | 265 | common_glue_func_t tweak_fn, void *tweak_ctx, |
---|
277 | | - void *crypt_ctx) |
---|
| 266 | + void *crypt_ctx, bool decrypt) |
---|
278 | 267 | { |
---|
| 268 | + const bool cts = (req->cryptlen % XTS_BLOCK_SIZE); |
---|
279 | 269 | const unsigned int bsize = 128 / 8; |
---|
| 270 | + struct skcipher_request subreq; |
---|
280 | 271 | struct skcipher_walk walk; |
---|
281 | 272 | bool fpu_enabled = false; |
---|
282 | | - unsigned int nbytes; |
---|
| 273 | + unsigned int nbytes, tail; |
---|
283 | 274 | int err; |
---|
| 275 | + |
---|
| 276 | + if (req->cryptlen < XTS_BLOCK_SIZE) |
---|
| 277 | + return -EINVAL; |
---|
| 278 | + |
---|
| 279 | + if (unlikely(cts)) { |
---|
| 280 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
| 281 | + |
---|
| 282 | + tail = req->cryptlen % XTS_BLOCK_SIZE + XTS_BLOCK_SIZE; |
---|
| 283 | + |
---|
| 284 | + skcipher_request_set_tfm(&subreq, tfm); |
---|
| 285 | + skcipher_request_set_callback(&subreq, |
---|
| 286 | + crypto_skcipher_get_flags(tfm), |
---|
| 287 | + NULL, NULL); |
---|
| 288 | + skcipher_request_set_crypt(&subreq, req->src, req->dst, |
---|
| 289 | + req->cryptlen - tail, req->iv); |
---|
| 290 | + req = &subreq; |
---|
| 291 | + } |
---|
284 | 292 | |
---|
285 | 293 | err = skcipher_walk_virt(&walk, req, false); |
---|
286 | 294 | nbytes = walk.nbytes; |
---|
287 | | - if (!nbytes) |
---|
| 295 | + if (err) |
---|
288 | 296 | return err; |
---|
289 | 297 | |
---|
290 | 298 | /* set minimum length to bsize, for tweak_fn */ |
---|
.. | .. |
---|
296 | 304 | tweak_fn(tweak_ctx, walk.iv, walk.iv); |
---|
297 | 305 | |
---|
298 | 306 | while (nbytes) { |
---|
| 307 | + fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, |
---|
| 308 | + &walk, fpu_enabled, |
---|
| 309 | + nbytes < bsize ? bsize : nbytes); |
---|
299 | 310 | nbytes = __glue_xts_req_128bit(gctx, crypt_ctx, &walk); |
---|
| 311 | + |
---|
| 312 | + glue_fpu_end(fpu_enabled); |
---|
| 313 | + fpu_enabled = false; |
---|
300 | 314 | |
---|
301 | 315 | err = skcipher_walk_done(&walk, nbytes); |
---|
302 | 316 | nbytes = walk.nbytes; |
---|
303 | 317 | } |
---|
304 | 318 | |
---|
| 319 | + if (unlikely(cts)) { |
---|
| 320 | + u8 *next_tweak, *final_tweak = req->iv; |
---|
| 321 | + struct scatterlist *src, *dst; |
---|
| 322 | + struct scatterlist s[2], d[2]; |
---|
| 323 | + le128 b[2]; |
---|
| 324 | + |
---|
| 325 | + dst = src = scatterwalk_ffwd(s, req->src, req->cryptlen); |
---|
| 326 | + if (req->dst != req->src) |
---|
| 327 | + dst = scatterwalk_ffwd(d, req->dst, req->cryptlen); |
---|
| 328 | + |
---|
| 329 | + if (decrypt) { |
---|
| 330 | + next_tweak = memcpy(b, req->iv, XTS_BLOCK_SIZE); |
---|
| 331 | + gf128mul_x_ble(b, b); |
---|
| 332 | + } else { |
---|
| 333 | + next_tweak = req->iv; |
---|
| 334 | + } |
---|
| 335 | + |
---|
| 336 | + skcipher_request_set_crypt(&subreq, src, dst, XTS_BLOCK_SIZE, |
---|
| 337 | + next_tweak); |
---|
| 338 | + |
---|
| 339 | + err = skcipher_walk_virt(&walk, req, false) ?: |
---|
| 340 | + skcipher_walk_done(&walk, |
---|
| 341 | + __glue_xts_req_128bit(gctx, crypt_ctx, &walk)); |
---|
| 342 | + if (err) |
---|
| 343 | + goto out; |
---|
| 344 | + |
---|
| 345 | + scatterwalk_map_and_copy(b, dst, 0, XTS_BLOCK_SIZE, 0); |
---|
| 346 | + memcpy(b + 1, b, tail - XTS_BLOCK_SIZE); |
---|
| 347 | + scatterwalk_map_and_copy(b, src, XTS_BLOCK_SIZE, |
---|
| 348 | + tail - XTS_BLOCK_SIZE, 0); |
---|
| 349 | + scatterwalk_map_and_copy(b, dst, 0, tail, 1); |
---|
| 350 | + |
---|
| 351 | + skcipher_request_set_crypt(&subreq, dst, dst, XTS_BLOCK_SIZE, |
---|
| 352 | + final_tweak); |
---|
| 353 | + |
---|
| 354 | + err = skcipher_walk_virt(&walk, req, false) ?: |
---|
| 355 | + skcipher_walk_done(&walk, |
---|
| 356 | + __glue_xts_req_128bit(gctx, crypt_ctx, &walk)); |
---|
| 357 | + } |
---|
| 358 | + |
---|
| 359 | +out: |
---|
305 | 360 | glue_fpu_end(fpu_enabled); |
---|
306 | 361 | |
---|
307 | 362 | return err; |
---|
308 | 363 | } |
---|
309 | 364 | EXPORT_SYMBOL_GPL(glue_xts_req_128bit); |
---|
310 | 365 | |
---|
311 | | -void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src, le128 *iv, |
---|
312 | | - common_glue_func_t fn) |
---|
| 366 | +void glue_xts_crypt_128bit_one(const void *ctx, u8 *dst, const u8 *src, |
---|
| 367 | + le128 *iv, common_glue_func_t fn) |
---|
313 | 368 | { |
---|
314 | 369 | le128 ivblk = *iv; |
---|
315 | 370 | |
---|
.. | .. |
---|
317 | 372 | gf128mul_x_ble(iv, &ivblk); |
---|
318 | 373 | |
---|
319 | 374 | /* CC <- T xor C */ |
---|
320 | | - u128_xor(dst, src, (u128 *)&ivblk); |
---|
| 375 | + u128_xor((u128 *)dst, (const u128 *)src, (u128 *)&ivblk); |
---|
321 | 376 | |
---|
322 | 377 | /* PP <- D(Key2,CC) */ |
---|
323 | | - fn(ctx, (u8 *)dst, (u8 *)dst); |
---|
| 378 | + fn(ctx, dst, dst); |
---|
324 | 379 | |
---|
325 | 380 | /* P <- T xor PP */ |
---|
326 | | - u128_xor(dst, dst, (u128 *)&ivblk); |
---|
| 381 | + u128_xor((u128 *)dst, (u128 *)dst, (u128 *)&ivblk); |
---|
327 | 382 | } |
---|
328 | 383 | EXPORT_SYMBOL_GPL(glue_xts_crypt_128bit_one); |
---|
329 | 384 | |
---|