forked from ~ljy/RK356X_SDK_RELEASE

hc
2024-05-10 9999e48639b3cecb08ffb37358bcba3b48161b29
kernel/arch/x86/crypto/glue_helper.c
....@@ -1,3 +1,4 @@
1
+// SPDX-License-Identifier: GPL-2.0-or-later
12 /*
23 * Shared glue code for 128bit block ciphers
34 *
....@@ -7,28 +8,13 @@
78 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
89 * CTR part based on code (crypto/ctr.c) by:
910 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
10
- *
11
- * This program is free software; you can redistribute it and/or modify
12
- * it under the terms of the GNU General Public License as published by
13
- * the Free Software Foundation; either version 2 of the License, or
14
- * (at your option) any later version.
15
- *
16
- * This program is distributed in the hope that it will be useful,
17
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
18
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19
- * GNU General Public License for more details.
20
- *
21
- * You should have received a copy of the GNU General Public License
22
- * along with this program; if not, write to the Free Software
23
- * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
24
- * USA
25
- *
2611 */
2712
2813 #include <linux/module.h>
2914 #include <crypto/b128ops.h>
3015 #include <crypto/gf128mul.h>
3116 #include <crypto/internal/skcipher.h>
17
+#include <crypto/scatterwalk.h>
3218 #include <crypto/xts.h>
3319 #include <asm/crypto/glue_helper.h>
3420
....@@ -38,7 +24,7 @@
3824 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
3925 const unsigned int bsize = 128 / 8;
4026 struct skcipher_walk walk;
41
- bool fpu_enabled;
27
+ bool fpu_enabled = false;
4228 unsigned int nbytes;
4329 int err;
4430
....@@ -51,7 +37,7 @@
5137 unsigned int i;
5238
5339 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
54
- &walk, false, nbytes);
40
+ &walk, fpu_enabled, nbytes);
5541 for (i = 0; i < gctx->num_funcs; i++) {
5642 func_bytes = bsize * gctx->funcs[i].num_blocks;
5743
....@@ -69,9 +55,10 @@
6955 if (nbytes < bsize)
7056 break;
7157 }
72
- glue_fpu_end(fpu_enabled);
7358 err = skcipher_walk_done(&walk, nbytes);
7459 }
60
+
61
+ glue_fpu_end(fpu_enabled);
7562 return err;
7663 }
7764 EXPORT_SYMBOL_GPL(glue_ecb_req_128bit);
....@@ -114,7 +101,7 @@
114101 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
115102 const unsigned int bsize = 128 / 8;
116103 struct skcipher_walk walk;
117
- bool fpu_enabled;
104
+ bool fpu_enabled = false;
118105 unsigned int nbytes;
119106 int err;
120107
....@@ -128,7 +115,7 @@
128115 u128 last_iv;
129116
130117 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
131
- &walk, false, nbytes);
118
+ &walk, fpu_enabled, nbytes);
132119 /* Start of the last block. */
133120 src += nbytes / bsize - 1;
134121 dst += nbytes / bsize - 1;
....@@ -147,7 +134,8 @@
147134 src -= num_blocks - 1;
148135 dst -= num_blocks - 1;
149136
150
- gctx->funcs[i].fn_u.cbc(ctx, dst, src);
137
+ gctx->funcs[i].fn_u.cbc(ctx, (u8 *)dst,
138
+ (const u8 *)src);
151139
152140 nbytes -= func_bytes;
153141 if (nbytes < bsize)
....@@ -160,10 +148,10 @@
160148 done:
161149 u128_xor(dst, dst, (u128 *)walk.iv);
162150 *(u128 *)walk.iv = last_iv;
163
- glue_fpu_end(fpu_enabled);
164151 err = skcipher_walk_done(&walk, nbytes);
165152 }
166153
154
+ glue_fpu_end(fpu_enabled);
167155 return err;
168156 }
169157 EXPORT_SYMBOL_GPL(glue_cbc_decrypt_req_128bit);
....@@ -174,7 +162,7 @@
174162 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
175163 const unsigned int bsize = 128 / 8;
176164 struct skcipher_walk walk;
177
- bool fpu_enabled;
165
+ bool fpu_enabled = false;
178166 unsigned int nbytes;
179167 int err;
180168
....@@ -188,7 +176,7 @@
188176 le128 ctrblk;
189177
190178 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
191
- &walk, false, nbytes);
179
+ &walk, fpu_enabled, nbytes);
192180
193181 be128_to_le128(&ctrblk, (be128 *)walk.iv);
194182
....@@ -201,7 +189,9 @@
201189
202190 /* Process multi-block batch */
203191 do {
204
- gctx->funcs[i].fn_u.ctr(ctx, dst, src, &ctrblk);
192
+ gctx->funcs[i].fn_u.ctr(ctx, (u8 *)dst,
193
+ (const u8 *)src,
194
+ &ctrblk);
205195 src += num_blocks;
206196 dst += num_blocks;
207197 nbytes -= func_bytes;
....@@ -212,9 +202,10 @@
212202 }
213203
214204 le128_to_be128((be128 *)walk.iv, &ctrblk);
215
- glue_fpu_end(fpu_enabled);
216205 err = skcipher_walk_done(&walk, nbytes);
217206 }
207
+
208
+ glue_fpu_end(fpu_enabled);
218209
219210 if (nbytes) {
220211 le128 ctrblk;
....@@ -222,7 +213,8 @@
222213
223214 be128_to_le128(&ctrblk, (be128 *)walk.iv);
224215 memcpy(&tmp, walk.src.virt.addr, nbytes);
225
- gctx->funcs[gctx->num_funcs - 1].fn_u.ctr(ctx, &tmp, &tmp,
216
+ gctx->funcs[gctx->num_funcs - 1].fn_u.ctr(ctx, (u8 *)&tmp,
217
+ (const u8 *)&tmp,
226218 &ctrblk);
227219 memcpy(walk.dst.virt.addr, &tmp, nbytes);
228220 le128_to_be128((be128 *)walk.iv, &ctrblk);
....@@ -252,7 +244,8 @@
252244
253245 if (nbytes >= func_bytes) {
254246 do {
255
- gctx->funcs[i].fn_u.xts(ctx, dst, src,
247
+ gctx->funcs[i].fn_u.xts(ctx, (u8 *)dst,
248
+ (const u8 *)src,
256249 walk->iv);
257250
258251 src += num_blocks;
....@@ -272,45 +265,102 @@
272265 int glue_xts_req_128bit(const struct common_glue_ctx *gctx,
273266 struct skcipher_request *req,
274267 common_glue_func_t tweak_fn, void *tweak_ctx,
275
- void *crypt_ctx)
268
+ void *crypt_ctx, bool decrypt)
276269 {
270
+ const bool cts = (req->cryptlen % XTS_BLOCK_SIZE);
277271 const unsigned int bsize = 128 / 8;
272
+ struct skcipher_request subreq;
278273 struct skcipher_walk walk;
279
- bool fpu_enabled;
280
- unsigned int nbytes;
274
+ bool fpu_enabled = false;
275
+ unsigned int nbytes, tail;
281276 int err;
277
+
278
+ if (req->cryptlen < XTS_BLOCK_SIZE)
279
+ return -EINVAL;
280
+
281
+ if (unlikely(cts)) {
282
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
283
+
284
+ tail = req->cryptlen % XTS_BLOCK_SIZE + XTS_BLOCK_SIZE;
285
+
286
+ skcipher_request_set_tfm(&subreq, tfm);
287
+ skcipher_request_set_callback(&subreq,
288
+ crypto_skcipher_get_flags(tfm),
289
+ NULL, NULL);
290
+ skcipher_request_set_crypt(&subreq, req->src, req->dst,
291
+ req->cryptlen - tail, req->iv);
292
+ req = &subreq;
293
+ }
282294
283295 err = skcipher_walk_virt(&walk, req, false);
284296 nbytes = walk.nbytes;
285
- if (!nbytes)
297
+ if (err)
286298 return err;
287299
288300 /* set minimum length to bsize, for tweak_fn */
289301 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
290
- &walk, false,
302
+ &walk, fpu_enabled,
291303 nbytes < bsize ? bsize : nbytes);
292304
293305 /* calculate first value of T */
294306 tweak_fn(tweak_ctx, walk.iv, walk.iv);
295307
296308 while (nbytes) {
297
- fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
298
- &walk, fpu_enabled,
299
- nbytes < bsize ? bsize : nbytes);
300309 nbytes = __glue_xts_req_128bit(gctx, crypt_ctx, &walk);
301310
302
- glue_fpu_end(fpu_enabled);
303
- fpu_enabled = false;
304311 err = skcipher_walk_done(&walk, nbytes);
305312 nbytes = walk.nbytes;
306313 }
314
+
315
+ if (unlikely(cts)) {
316
+ u8 *next_tweak, *final_tweak = req->iv;
317
+ struct scatterlist *src, *dst;
318
+ struct scatterlist s[2], d[2];
319
+ le128 b[2];
320
+
321
+ dst = src = scatterwalk_ffwd(s, req->src, req->cryptlen);
322
+ if (req->dst != req->src)
323
+ dst = scatterwalk_ffwd(d, req->dst, req->cryptlen);
324
+
325
+ if (decrypt) {
326
+ next_tweak = memcpy(b, req->iv, XTS_BLOCK_SIZE);
327
+ gf128mul_x_ble(b, b);
328
+ } else {
329
+ next_tweak = req->iv;
330
+ }
331
+
332
+ skcipher_request_set_crypt(&subreq, src, dst, XTS_BLOCK_SIZE,
333
+ next_tweak);
334
+
335
+ err = skcipher_walk_virt(&walk, req, false) ?:
336
+ skcipher_walk_done(&walk,
337
+ __glue_xts_req_128bit(gctx, crypt_ctx, &walk));
338
+ if (err)
339
+ goto out;
340
+
341
+ scatterwalk_map_and_copy(b, dst, 0, XTS_BLOCK_SIZE, 0);
342
+ memcpy(b + 1, b, tail - XTS_BLOCK_SIZE);
343
+ scatterwalk_map_and_copy(b, src, XTS_BLOCK_SIZE,
344
+ tail - XTS_BLOCK_SIZE, 0);
345
+ scatterwalk_map_and_copy(b, dst, 0, tail, 1);
346
+
347
+ skcipher_request_set_crypt(&subreq, dst, dst, XTS_BLOCK_SIZE,
348
+ final_tweak);
349
+
350
+ err = skcipher_walk_virt(&walk, req, false) ?:
351
+ skcipher_walk_done(&walk,
352
+ __glue_xts_req_128bit(gctx, crypt_ctx, &walk));
353
+ }
354
+
355
+out:
356
+ glue_fpu_end(fpu_enabled);
307357
308358 return err;
309359 }
310360 EXPORT_SYMBOL_GPL(glue_xts_req_128bit);
311361
312
-void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src, le128 *iv,
313
- common_glue_func_t fn)
362
+void glue_xts_crypt_128bit_one(const void *ctx, u8 *dst, const u8 *src,
363
+ le128 *iv, common_glue_func_t fn)
314364 {
315365 le128 ivblk = *iv;
316366
....@@ -318,13 +368,13 @@
318368 gf128mul_x_ble(iv, &ivblk);
319369
320370 /* CC <- T xor C */
321
- u128_xor(dst, src, (u128 *)&ivblk);
371
+ u128_xor((u128 *)dst, (const u128 *)src, (u128 *)&ivblk);
322372
323373 /* PP <- D(Key2,CC) */
324
- fn(ctx, (u8 *)dst, (u8 *)dst);
374
+ fn(ctx, dst, dst);
325375
326376 /* P <- T xor PP */
327
- u128_xor(dst, dst, (u128 *)&ivblk);
377
+ u128_xor((u128 *)dst, (u128 *)dst, (u128 *)&ivblk);
328378 }
329379 EXPORT_SYMBOL_GPL(glue_xts_crypt_128bit_one);
330380