| .. | .. |
|---|
| 61 | 61 | |
|---|
| 62 | 62 | static int ecb_crypt(struct skcipher_request *req, bool enc) |
|---|
| 63 | 63 | { |
|---|
| 64 | | - bool fpu_enabled = false; |
|---|
| 64 | + bool fpu_enabled; |
|---|
| 65 | 65 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 66 | 66 | struct cast5_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 67 | 67 | struct skcipher_walk walk; |
|---|
| .. | .. |
|---|
| 76 | 76 | u8 *wsrc = walk.src.virt.addr; |
|---|
| 77 | 77 | u8 *wdst = walk.dst.virt.addr; |
|---|
| 78 | 78 | |
|---|
| 79 | | - fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); |
|---|
| 79 | + fpu_enabled = cast5_fpu_begin(false, &walk, nbytes); |
|---|
| 80 | 80 | |
|---|
| 81 | 81 | /* Process multi-block batch */ |
|---|
| 82 | 82 | if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) { |
|---|
| .. | .. |
|---|
| 105 | 105 | } while (nbytes >= bsize); |
|---|
| 106 | 106 | |
|---|
| 107 | 107 | done: |
|---|
| 108 | + cast5_fpu_end(fpu_enabled); |
|---|
| 108 | 109 | err = skcipher_walk_done(&walk, nbytes); |
|---|
| 109 | 110 | } |
|---|
| 110 | | - |
|---|
| 111 | | - cast5_fpu_end(fpu_enabled); |
|---|
| 112 | 111 | return err; |
|---|
| 113 | 112 | } |
|---|
| 114 | 113 | |
|---|
| .. | .. |
|---|
| 212 | 211 | { |
|---|
| 213 | 212 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 214 | 213 | struct cast5_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 215 | | - bool fpu_enabled = false; |
|---|
| 214 | + bool fpu_enabled; |
|---|
| 216 | 215 | struct skcipher_walk walk; |
|---|
| 217 | 216 | unsigned int nbytes; |
|---|
| 218 | 217 | int err; |
|---|
| .. | .. |
|---|
| 220 | 219 | err = skcipher_walk_virt(&walk, req, false); |
|---|
| 221 | 220 | |
|---|
| 222 | 221 | while ((nbytes = walk.nbytes)) { |
|---|
| 223 | | - fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); |
|---|
| 222 | + fpu_enabled = cast5_fpu_begin(false, &walk, nbytes); |
|---|
| 224 | 223 | nbytes = __cbc_decrypt(ctx, &walk); |
|---|
| 224 | + cast5_fpu_end(fpu_enabled); |
|---|
| 225 | 225 | err = skcipher_walk_done(&walk, nbytes); |
|---|
| 226 | 226 | } |
|---|
| 227 | | - |
|---|
| 228 | | - cast5_fpu_end(fpu_enabled); |
|---|
| 229 | 227 | return err; |
|---|
| 230 | 228 | } |
|---|
| 231 | 229 | |
|---|
| .. | .. |
|---|
| 292 | 290 | { |
|---|
| 293 | 291 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
|---|
| 294 | 292 | struct cast5_ctx *ctx = crypto_skcipher_ctx(tfm); |
|---|
| 295 | | - bool fpu_enabled = false; |
|---|
| 293 | + bool fpu_enabled; |
|---|
| 296 | 294 | struct skcipher_walk walk; |
|---|
| 297 | 295 | unsigned int nbytes; |
|---|
| 298 | 296 | int err; |
|---|
| .. | .. |
|---|
| 300 | 298 | err = skcipher_walk_virt(&walk, req, false); |
|---|
| 301 | 299 | |
|---|
| 302 | 300 | while ((nbytes = walk.nbytes) >= CAST5_BLOCK_SIZE) { |
|---|
| 303 | | - fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); |
|---|
| 301 | + fpu_enabled = cast5_fpu_begin(false, &walk, nbytes); |
|---|
| 304 | 302 | nbytes = __ctr_crypt(&walk, ctx); |
|---|
| 303 | + cast5_fpu_end(fpu_enabled); |
|---|
| 305 | 304 | err = skcipher_walk_done(&walk, nbytes); |
|---|
| 306 | 305 | } |
|---|
| 307 | | - |
|---|
| 308 | | - cast5_fpu_end(fpu_enabled); |
|---|
| 309 | 306 | |
|---|
| 310 | 307 | if (walk.nbytes) { |
|---|
| 311 | 308 | ctr_crypt_final(&walk, ctx); |
|---|