forked from ~ljy/RK356X_SDK_RELEASE

hc
2023-12-09 95099d4622f8cb224d94e314c7a8e0df60b13f87
kernel/arch/x86/crypto/glue_helper.c
....@@ -1,3 +1,4 @@
1
+// SPDX-License-Identifier: GPL-2.0-or-later
12 /*
23 * Shared glue code for 128bit block ciphers
34 *
....@@ -7,28 +8,13 @@
78 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
89 * CTR part based on code (crypto/ctr.c) by:
910 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
10
- *
11
- * This program is free software; you can redistribute it and/or modify
12
- * it under the terms of the GNU General Public License as published by
13
- * the Free Software Foundation; either version 2 of the License, or
14
- * (at your option) any later version.
15
- *
16
- * This program is distributed in the hope that it will be useful,
17
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
18
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19
- * GNU General Public License for more details.
20
- *
21
- * You should have received a copy of the GNU General Public License
22
- * along with this program; if not, write to the Free Software
23
- * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
24
- * USA
25
- *
2611 */
2712
2813 #include <linux/module.h>
2914 #include <crypto/b128ops.h>
3015 #include <crypto/gf128mul.h>
3116 #include <crypto/internal/skcipher.h>
17
+#include <crypto/scatterwalk.h>
3218 #include <crypto/xts.h>
3319 #include <asm/crypto/glue_helper.h>
3420
....@@ -147,7 +133,8 @@
147133 src -= num_blocks - 1;
148134 dst -= num_blocks - 1;
149135
150
- gctx->funcs[i].fn_u.cbc(ctx, dst, src);
136
+ gctx->funcs[i].fn_u.cbc(ctx, (u8 *)dst,
137
+ (const u8 *)src);
151138
152139 nbytes -= func_bytes;
153140 if (nbytes < bsize)
....@@ -201,7 +188,9 @@
201188
202189 /* Process multi-block batch */
203190 do {
204
- gctx->funcs[i].fn_u.ctr(ctx, dst, src, &ctrblk);
191
+ gctx->funcs[i].fn_u.ctr(ctx, (u8 *)dst,
192
+ (const u8 *)src,
193
+ &ctrblk);
205194 src += num_blocks;
206195 dst += num_blocks;
207196 nbytes -= func_bytes;
....@@ -222,7 +211,8 @@
222211
223212 be128_to_le128(&ctrblk, (be128 *)walk.iv);
224213 memcpy(&tmp, walk.src.virt.addr, nbytes);
225
- gctx->funcs[gctx->num_funcs - 1].fn_u.ctr(ctx, &tmp, &tmp,
214
+ gctx->funcs[gctx->num_funcs - 1].fn_u.ctr(ctx, (u8 *)&tmp,
215
+ (const u8 *)&tmp,
226216 &ctrblk);
227217 memcpy(walk.dst.virt.addr, &tmp, nbytes);
228218 le128_to_be128((be128 *)walk.iv, &ctrblk);
....@@ -252,7 +242,8 @@
252242
253243 if (nbytes >= func_bytes) {
254244 do {
255
- gctx->funcs[i].fn_u.xts(ctx, dst, src,
245
+ gctx->funcs[i].fn_u.xts(ctx, (u8 *)dst,
246
+ (const u8 *)src,
256247 walk->iv);
257248
258249 src += num_blocks;
....@@ -272,22 +263,41 @@
272263 int glue_xts_req_128bit(const struct common_glue_ctx *gctx,
273264 struct skcipher_request *req,
274265 common_glue_func_t tweak_fn, void *tweak_ctx,
275
- void *crypt_ctx)
266
+ void *crypt_ctx, bool decrypt)
276267 {
268
+ const bool cts = (req->cryptlen % XTS_BLOCK_SIZE);
277269 const unsigned int bsize = 128 / 8;
270
+ struct skcipher_request subreq;
278271 struct skcipher_walk walk;
279
- bool fpu_enabled;
280
- unsigned int nbytes;
272
+ bool fpu_enabled = false;
273
+ unsigned int nbytes, tail;
281274 int err;
275
+
276
+ if (req->cryptlen < XTS_BLOCK_SIZE)
277
+ return -EINVAL;
278
+
279
+ if (unlikely(cts)) {
280
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
281
+
282
+ tail = req->cryptlen % XTS_BLOCK_SIZE + XTS_BLOCK_SIZE;
283
+
284
+ skcipher_request_set_tfm(&subreq, tfm);
285
+ skcipher_request_set_callback(&subreq,
286
+ crypto_skcipher_get_flags(tfm),
287
+ NULL, NULL);
288
+ skcipher_request_set_crypt(&subreq, req->src, req->dst,
289
+ req->cryptlen - tail, req->iv);
290
+ req = &subreq;
291
+ }
282292
283293 err = skcipher_walk_virt(&walk, req, false);
284294 nbytes = walk.nbytes;
285
- if (!nbytes)
295
+ if (err)
286296 return err;
287297
288298 /* set minimum length to bsize, for tweak_fn */
289299 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
290
- &walk, false,
300
+ &walk, fpu_enabled,
291301 nbytes < bsize ? bsize : nbytes);
292302
293303 /* calculate first value of T */
....@@ -301,16 +311,60 @@
301311
302312 glue_fpu_end(fpu_enabled);
303313 fpu_enabled = false;
314
+
304315 err = skcipher_walk_done(&walk, nbytes);
305316 nbytes = walk.nbytes;
306317 }
318
+
319
+ if (unlikely(cts)) {
320
+ u8 *next_tweak, *final_tweak = req->iv;
321
+ struct scatterlist *src, *dst;
322
+ struct scatterlist s[2], d[2];
323
+ le128 b[2];
324
+
325
+ dst = src = scatterwalk_ffwd(s, req->src, req->cryptlen);
326
+ if (req->dst != req->src)
327
+ dst = scatterwalk_ffwd(d, req->dst, req->cryptlen);
328
+
329
+ if (decrypt) {
330
+ next_tweak = memcpy(b, req->iv, XTS_BLOCK_SIZE);
331
+ gf128mul_x_ble(b, b);
332
+ } else {
333
+ next_tweak = req->iv;
334
+ }
335
+
336
+ skcipher_request_set_crypt(&subreq, src, dst, XTS_BLOCK_SIZE,
337
+ next_tweak);
338
+
339
+ err = skcipher_walk_virt(&walk, req, false) ?:
340
+ skcipher_walk_done(&walk,
341
+ __glue_xts_req_128bit(gctx, crypt_ctx, &walk));
342
+ if (err)
343
+ goto out;
344
+
345
+ scatterwalk_map_and_copy(b, dst, 0, XTS_BLOCK_SIZE, 0);
346
+ memcpy(b + 1, b, tail - XTS_BLOCK_SIZE);
347
+ scatterwalk_map_and_copy(b, src, XTS_BLOCK_SIZE,
348
+ tail - XTS_BLOCK_SIZE, 0);
349
+ scatterwalk_map_and_copy(b, dst, 0, tail, 1);
350
+
351
+ skcipher_request_set_crypt(&subreq, dst, dst, XTS_BLOCK_SIZE,
352
+ final_tweak);
353
+
354
+ err = skcipher_walk_virt(&walk, req, false) ?:
355
+ skcipher_walk_done(&walk,
356
+ __glue_xts_req_128bit(gctx, crypt_ctx, &walk));
357
+ }
358
+
359
+out:
360
+ glue_fpu_end(fpu_enabled);
307361
308362 return err;
309363 }
310364 EXPORT_SYMBOL_GPL(glue_xts_req_128bit);
311365
312
-void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src, le128 *iv,
313
- common_glue_func_t fn)
366
+void glue_xts_crypt_128bit_one(const void *ctx, u8 *dst, const u8 *src,
367
+ le128 *iv, common_glue_func_t fn)
314368 {
315369 le128 ivblk = *iv;
316370
....@@ -318,13 +372,13 @@
318372 gf128mul_x_ble(iv, &ivblk);
319373
320374 /* CC <- T xor C */
321
- u128_xor(dst, src, (u128 *)&ivblk);
375
+ u128_xor((u128 *)dst, (const u128 *)src, (u128 *)&ivblk);
322376
323377 /* PP <- D(Key2,CC) */
324
- fn(ctx, (u8 *)dst, (u8 *)dst);
378
+ fn(ctx, dst, dst);
325379
326380 /* P <- T xor PP */
327
- u128_xor(dst, dst, (u128 *)&ivblk);
381
+ u128_xor((u128 *)dst, (u128 *)dst, (u128 *)&ivblk);
328382 }
329383 EXPORT_SYMBOL_GPL(glue_xts_crypt_128bit_one);
330384