hc
2023-12-11 6778948f9de86c3cfaf36725a7c87dcff9ba247f
kernel/arch/x86/crypto/glue_helper.c
....@@ -24,7 +24,7 @@
2424 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
2525 const unsigned int bsize = 128 / 8;
2626 struct skcipher_walk walk;
27
- bool fpu_enabled;
27
+ bool fpu_enabled = false;
2828 unsigned int nbytes;
2929 int err;
3030
....@@ -37,7 +37,7 @@
3737 unsigned int i;
3838
3939 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
40
- &walk, false, nbytes);
40
+ &walk, fpu_enabled, nbytes);
4141 for (i = 0; i < gctx->num_funcs; i++) {
4242 func_bytes = bsize * gctx->funcs[i].num_blocks;
4343
....@@ -55,9 +55,10 @@
5555 if (nbytes < bsize)
5656 break;
5757 }
58
- glue_fpu_end(fpu_enabled);
5958 err = skcipher_walk_done(&walk, nbytes);
6059 }
60
+
61
+ glue_fpu_end(fpu_enabled);
6162 return err;
6263 }
6364 EXPORT_SYMBOL_GPL(glue_ecb_req_128bit);
....@@ -100,7 +101,7 @@
100101 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
101102 const unsigned int bsize = 128 / 8;
102103 struct skcipher_walk walk;
103
- bool fpu_enabled;
104
+ bool fpu_enabled = false;
104105 unsigned int nbytes;
105106 int err;
106107
....@@ -114,7 +115,7 @@
114115 u128 last_iv;
115116
116117 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
117
- &walk, false, nbytes);
118
+ &walk, fpu_enabled, nbytes);
118119 /* Start of the last block. */
119120 src += nbytes / bsize - 1;
120121 dst += nbytes / bsize - 1;
....@@ -147,10 +148,10 @@
147148 done:
148149 u128_xor(dst, dst, (u128 *)walk.iv);
149150 *(u128 *)walk.iv = last_iv;
150
- glue_fpu_end(fpu_enabled);
151151 err = skcipher_walk_done(&walk, nbytes);
152152 }
153153
154
+ glue_fpu_end(fpu_enabled);
154155 return err;
155156 }
156157 EXPORT_SYMBOL_GPL(glue_cbc_decrypt_req_128bit);
....@@ -161,7 +162,7 @@
161162 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
162163 const unsigned int bsize = 128 / 8;
163164 struct skcipher_walk walk;
164
- bool fpu_enabled;
165
+ bool fpu_enabled = false;
165166 unsigned int nbytes;
166167 int err;
167168
....@@ -175,7 +176,7 @@
175176 le128 ctrblk;
176177
177178 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
178
- &walk, false, nbytes);
179
+ &walk, fpu_enabled, nbytes);
179180
180181 be128_to_le128(&ctrblk, (be128 *)walk.iv);
181182
....@@ -201,9 +202,10 @@
201202 }
202203
203204 le128_to_be128((be128 *)walk.iv, &ctrblk);
204
- glue_fpu_end(fpu_enabled);
205205 err = skcipher_walk_done(&walk, nbytes);
206206 }
207
+
208
+ glue_fpu_end(fpu_enabled);
207209
208210 if (nbytes) {
209211 le128 ctrblk;
....@@ -304,13 +306,7 @@
304306 tweak_fn(tweak_ctx, walk.iv, walk.iv);
305307
306308 while (nbytes) {
307
- fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
308
- &walk, fpu_enabled,
309
- nbytes < bsize ? bsize : nbytes);
310309 nbytes = __glue_xts_req_128bit(gctx, crypt_ctx, &walk);
311
-
312
- glue_fpu_end(fpu_enabled);
313
- fpu_enabled = false;
314310
315311 err = skcipher_walk_done(&walk, nbytes);
316312 nbytes = walk.nbytes;