.. | .. |
---|
46 | 46 | |
---|
47 | 47 | static int ecb_crypt(struct skcipher_request *req, bool enc) |
---|
48 | 48 | { |
---|
49 | | - bool fpu_enabled; |
---|
| 49 | + bool fpu_enabled = false; |
---|
50 | 50 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
51 | 51 | struct cast5_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
52 | 52 | struct skcipher_walk walk; |
---|
.. | .. |
---|
61 | 61 | u8 *wsrc = walk.src.virt.addr; |
---|
62 | 62 | u8 *wdst = walk.dst.virt.addr; |
---|
63 | 63 | |
---|
64 | | - fpu_enabled = cast5_fpu_begin(false, &walk, nbytes); |
---|
| 64 | + fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); |
---|
65 | 65 | |
---|
66 | 66 | /* Process multi-block batch */ |
---|
67 | 67 | if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) { |
---|
.. | .. |
---|
90 | 90 | } while (nbytes >= bsize); |
---|
91 | 91 | |
---|
92 | 92 | done: |
---|
93 | | - cast5_fpu_end(fpu_enabled); |
---|
94 | 93 | err = skcipher_walk_done(&walk, nbytes); |
---|
95 | 94 | } |
---|
| 95 | + |
---|
| 96 | + cast5_fpu_end(fpu_enabled); |
---|
96 | 97 | return err; |
---|
97 | 98 | } |
---|
98 | 99 | |
---|
.. | .. |
---|
196 | 197 | { |
---|
197 | 198 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
198 | 199 | struct cast5_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
199 | | - bool fpu_enabled; |
---|
| 200 | + bool fpu_enabled = false; |
---|
200 | 201 | struct skcipher_walk walk; |
---|
201 | 202 | unsigned int nbytes; |
---|
202 | 203 | int err; |
---|
.. | .. |
---|
204 | 205 | err = skcipher_walk_virt(&walk, req, false); |
---|
205 | 206 | |
---|
206 | 207 | while ((nbytes = walk.nbytes)) { |
---|
207 | | - fpu_enabled = cast5_fpu_begin(false, &walk, nbytes); |
---|
| 208 | + fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); |
---|
208 | 209 | nbytes = __cbc_decrypt(ctx, &walk); |
---|
209 | | - cast5_fpu_end(fpu_enabled); |
---|
210 | 210 | err = skcipher_walk_done(&walk, nbytes); |
---|
211 | 211 | } |
---|
| 212 | + |
---|
| 213 | + cast5_fpu_end(fpu_enabled); |
---|
212 | 214 | return err; |
---|
213 | 215 | } |
---|
214 | 216 | |
---|
.. | .. |
---|
275 | 277 | { |
---|
276 | 278 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
277 | 279 | struct cast5_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
278 | | - bool fpu_enabled; |
---|
| 280 | + bool fpu_enabled = false; |
---|
279 | 281 | struct skcipher_walk walk; |
---|
280 | 282 | unsigned int nbytes; |
---|
281 | 283 | int err; |
---|
.. | .. |
---|
283 | 285 | err = skcipher_walk_virt(&walk, req, false); |
---|
284 | 286 | |
---|
285 | 287 | while ((nbytes = walk.nbytes) >= CAST5_BLOCK_SIZE) { |
---|
286 | | - fpu_enabled = cast5_fpu_begin(false, &walk, nbytes); |
---|
| 288 | + fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); |
---|
287 | 289 | nbytes = __ctr_crypt(&walk, ctx); |
---|
288 | | - cast5_fpu_end(fpu_enabled); |
---|
289 | 290 | err = skcipher_walk_done(&walk, nbytes); |
---|
290 | 291 | } |
---|
291 | 292 | |
---|
| 293 | + cast5_fpu_end(fpu_enabled); |
---|
| 294 | + |
---|
292 | 295 | if (walk.nbytes) { |
---|
293 | 296 | ctr_crypt_final(&walk, ctx); |
---|
294 | 297 | err = skcipher_walk_done(&walk, 0); |
---|