forked from ~ljy/RK356X_SDK_RELEASE

hc
2024-05-10 9999e48639b3cecb08ffb37358bcba3b48161b29
kernel/arch/x86/crypto/cast5_avx_glue.c
....@@ -1,24 +1,9 @@
1
+// SPDX-License-Identifier: GPL-2.0-or-later
12 /*
2
- * Glue Code for the AVX assembler implemention of the Cast5 Cipher
3
+ * Glue Code for the AVX assembler implementation of the Cast5 Cipher
34 *
45 * Copyright (C) 2012 Johannes Goetzfried
56 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
6
- *
7
- * This program is free software; you can redistribute it and/or modify
8
- * it under the terms of the GNU General Public License as published by
9
- * the Free Software Foundation; either version 2 of the License, or
10
- * (at your option) any later version.
11
- *
12
- * This program is distributed in the hope that it will be useful,
13
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
14
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15
- * GNU General Public License for more details.
16
- *
17
- * You should have received a copy of the GNU General Public License
18
- * along with this program; if not, write to the Free Software
19
- * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
20
- * USA
21
- *
227 */
238
249 #include <asm/crypto/glue_helper.h>
....@@ -61,7 +46,7 @@
6146
6247 static int ecb_crypt(struct skcipher_request *req, bool enc)
6348 {
64
- bool fpu_enabled;
49
+ bool fpu_enabled = false;
6550 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
6651 struct cast5_ctx *ctx = crypto_skcipher_ctx(tfm);
6752 struct skcipher_walk walk;
....@@ -76,7 +61,7 @@
7661 u8 *wsrc = walk.src.virt.addr;
7762 u8 *wdst = walk.dst.virt.addr;
7863
79
- fpu_enabled = cast5_fpu_begin(false, &walk, nbytes);
64
+ fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes);
8065
8166 /* Process multi-block batch */
8267 if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) {
....@@ -105,9 +90,10 @@
10590 } while (nbytes >= bsize);
10691
10792 done:
108
- cast5_fpu_end(fpu_enabled);
10993 err = skcipher_walk_done(&walk, nbytes);
11094 }
95
+
96
+ cast5_fpu_end(fpu_enabled);
11197 return err;
11298 }
11399
....@@ -211,7 +197,7 @@
211197 {
212198 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
213199 struct cast5_ctx *ctx = crypto_skcipher_ctx(tfm);
214
- bool fpu_enabled;
200
+ bool fpu_enabled = false;
215201 struct skcipher_walk walk;
216202 unsigned int nbytes;
217203 int err;
....@@ -219,11 +205,12 @@
219205 err = skcipher_walk_virt(&walk, req, false);
220206
221207 while ((nbytes = walk.nbytes)) {
222
- fpu_enabled = cast5_fpu_begin(false, &walk, nbytes);
208
+ fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes);
223209 nbytes = __cbc_decrypt(ctx, &walk);
224
- cast5_fpu_end(fpu_enabled);
225210 err = skcipher_walk_done(&walk, nbytes);
226211 }
212
+
213
+ cast5_fpu_end(fpu_enabled);
227214 return err;
228215 }
229216
....@@ -290,7 +277,7 @@
290277 {
291278 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
292279 struct cast5_ctx *ctx = crypto_skcipher_ctx(tfm);
293
- bool fpu_enabled;
280
+ bool fpu_enabled = false;
294281 struct skcipher_walk walk;
295282 unsigned int nbytes;
296283 int err;
....@@ -298,12 +285,13 @@
298285 err = skcipher_walk_virt(&walk, req, false);
299286
300287 while ((nbytes = walk.nbytes) >= CAST5_BLOCK_SIZE) {
301
- fpu_enabled = cast5_fpu_begin(false, &walk, nbytes);
288
+ fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes);
302289 nbytes = __ctr_crypt(&walk, ctx);
303
- cast5_fpu_end(fpu_enabled);
304290 err = skcipher_walk_done(&walk, nbytes);
305291 }
306292
293
+ cast5_fpu_end(fpu_enabled);
294
+
307295 if (walk.nbytes) {
308296 ctr_crypt_final(&walk, ctx);
309297 err = skcipher_walk_done(&walk, 0);