.. | .. |
---|
24 | 24 | void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); |
---|
25 | 25 | const unsigned int bsize = 128 / 8; |
---|
26 | 26 | struct skcipher_walk walk; |
---|
27 | | - bool fpu_enabled; |
---|
| 27 | + bool fpu_enabled = false; |
---|
28 | 28 | unsigned int nbytes; |
---|
29 | 29 | int err; |
---|
30 | 30 | |
---|
.. | .. |
---|
37 | 37 | unsigned int i; |
---|
38 | 38 | |
---|
39 | 39 | fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, |
---|
40 | | - &walk, false, nbytes); |
---|
| 40 | + &walk, fpu_enabled, nbytes); |
---|
41 | 41 | for (i = 0; i < gctx->num_funcs; i++) { |
---|
42 | 42 | func_bytes = bsize * gctx->funcs[i].num_blocks; |
---|
43 | 43 | |
---|
.. | .. |
---|
55 | 55 | if (nbytes < bsize) |
---|
56 | 56 | break; |
---|
57 | 57 | } |
---|
58 | | - glue_fpu_end(fpu_enabled); |
---|
59 | 58 | err = skcipher_walk_done(&walk, nbytes); |
---|
60 | 59 | } |
---|
| 60 | + |
---|
| 61 | + glue_fpu_end(fpu_enabled); |
---|
61 | 62 | return err; |
---|
62 | 63 | } |
---|
63 | 64 | EXPORT_SYMBOL_GPL(glue_ecb_req_128bit); |
---|
.. | .. |
---|
100 | 101 | void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); |
---|
101 | 102 | const unsigned int bsize = 128 / 8; |
---|
102 | 103 | struct skcipher_walk walk; |
---|
103 | | - bool fpu_enabled; |
---|
| 104 | + bool fpu_enabled = false; |
---|
104 | 105 | unsigned int nbytes; |
---|
105 | 106 | int err; |
---|
106 | 107 | |
---|
.. | .. |
---|
114 | 115 | u128 last_iv; |
---|
115 | 116 | |
---|
116 | 117 | fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, |
---|
117 | | - &walk, false, nbytes); |
---|
| 118 | + &walk, fpu_enabled, nbytes); |
---|
118 | 119 | /* Start of the last block. */ |
---|
119 | 120 | src += nbytes / bsize - 1; |
---|
120 | 121 | dst += nbytes / bsize - 1; |
---|
.. | .. |
---|
147 | 148 | done: |
---|
148 | 149 | u128_xor(dst, dst, (u128 *)walk.iv); |
---|
149 | 150 | *(u128 *)walk.iv = last_iv; |
---|
150 | | - glue_fpu_end(fpu_enabled); |
---|
151 | 151 | err = skcipher_walk_done(&walk, nbytes); |
---|
152 | 152 | } |
---|
153 | 153 | |
---|
| 154 | + glue_fpu_end(fpu_enabled); |
---|
154 | 155 | return err; |
---|
155 | 156 | } |
---|
156 | 157 | EXPORT_SYMBOL_GPL(glue_cbc_decrypt_req_128bit); |
---|
.. | .. |
---|
161 | 162 | void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); |
---|
162 | 163 | const unsigned int bsize = 128 / 8; |
---|
163 | 164 | struct skcipher_walk walk; |
---|
164 | | - bool fpu_enabled; |
---|
| 165 | + bool fpu_enabled = false; |
---|
165 | 166 | unsigned int nbytes; |
---|
166 | 167 | int err; |
---|
167 | 168 | |
---|
.. | .. |
---|
175 | 176 | le128 ctrblk; |
---|
176 | 177 | |
---|
177 | 178 | fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, |
---|
178 | | - &walk, false, nbytes); |
---|
| 179 | + &walk, fpu_enabled, nbytes); |
---|
179 | 180 | |
---|
180 | 181 | be128_to_le128(&ctrblk, (be128 *)walk.iv); |
---|
181 | 182 | |
---|
.. | .. |
---|
201 | 202 | } |
---|
202 | 203 | |
---|
203 | 204 | le128_to_be128((be128 *)walk.iv, &ctrblk); |
---|
204 | | - glue_fpu_end(fpu_enabled); |
---|
205 | 205 | err = skcipher_walk_done(&walk, nbytes); |
---|
206 | 206 | } |
---|
| 207 | + |
---|
| 208 | + glue_fpu_end(fpu_enabled); |
---|
207 | 209 | |
---|
208 | 210 | if (nbytes) { |
---|
209 | 211 | le128 ctrblk; |
---|
.. | .. |
---|
304 | 306 | tweak_fn(tweak_ctx, walk.iv, walk.iv); |
---|
305 | 307 | |
---|
306 | 308 | while (nbytes) { |
---|
307 | | - fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, |
---|
308 | | - &walk, fpu_enabled, |
---|
309 | | - nbytes < bsize ? bsize : nbytes); |
---|
310 | 309 | nbytes = __glue_xts_req_128bit(gctx, crypt_ctx, &walk); |
---|
311 | | - |
---|
312 | | - glue_fpu_end(fpu_enabled); |
---|
313 | | - fpu_enabled = false; |
---|
314 | 310 | |
---|
315 | 311 | err = skcipher_walk_done(&walk, nbytes); |
---|
316 | 312 | nbytes = walk.nbytes; |
---|