.. | .. |
---|
434 | 434 | |
---|
435 | 435 | err = skcipher_walk_virt(&walk, req, true); |
---|
436 | 436 | |
---|
437 | | - kernel_fpu_begin(); |
---|
438 | 437 | while ((nbytes = walk.nbytes)) { |
---|
| 438 | + kernel_fpu_begin(); |
---|
439 | 439 | aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, |
---|
440 | 440 | nbytes & AES_BLOCK_MASK); |
---|
| 441 | + kernel_fpu_end(); |
---|
441 | 442 | nbytes &= AES_BLOCK_SIZE - 1; |
---|
442 | 443 | err = skcipher_walk_done(&walk, nbytes); |
---|
443 | 444 | } |
---|
444 | | - kernel_fpu_end(); |
---|
445 | 445 | |
---|
446 | 446 | return err; |
---|
447 | 447 | } |
---|
.. | .. |
---|
456 | 456 | |
---|
457 | 457 | err = skcipher_walk_virt(&walk, req, true); |
---|
458 | 458 | |
---|
459 | | - kernel_fpu_begin(); |
---|
460 | 459 | while ((nbytes = walk.nbytes)) { |
---|
| 460 | + kernel_fpu_begin(); |
---|
461 | 461 | aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr, |
---|
462 | 462 | nbytes & AES_BLOCK_MASK); |
---|
| 463 | + kernel_fpu_end(); |
---|
463 | 464 | nbytes &= AES_BLOCK_SIZE - 1; |
---|
464 | 465 | err = skcipher_walk_done(&walk, nbytes); |
---|
465 | 466 | } |
---|
466 | | - kernel_fpu_end(); |
---|
467 | 467 | |
---|
468 | 468 | return err; |
---|
469 | 469 | } |
---|
.. | .. |
---|
478 | 478 | |
---|
479 | 479 | err = skcipher_walk_virt(&walk, req, true); |
---|
480 | 480 | |
---|
481 | | - kernel_fpu_begin(); |
---|
482 | 481 | while ((nbytes = walk.nbytes)) { |
---|
| 482 | + kernel_fpu_begin(); |
---|
483 | 483 | aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, |
---|
484 | 484 | nbytes & AES_BLOCK_MASK, walk.iv); |
---|
| 485 | + kernel_fpu_end(); |
---|
485 | 486 | nbytes &= AES_BLOCK_SIZE - 1; |
---|
486 | 487 | err = skcipher_walk_done(&walk, nbytes); |
---|
487 | 488 | } |
---|
488 | | - kernel_fpu_end(); |
---|
489 | 489 | |
---|
490 | 490 | return err; |
---|
491 | 491 | } |
---|
.. | .. |
---|
500 | 500 | |
---|
501 | 501 | err = skcipher_walk_virt(&walk, req, true); |
---|
502 | 502 | |
---|
503 | | - kernel_fpu_begin(); |
---|
504 | 503 | while ((nbytes = walk.nbytes)) { |
---|
| 504 | + kernel_fpu_begin(); |
---|
505 | 505 | aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr, |
---|
506 | 506 | nbytes & AES_BLOCK_MASK, walk.iv); |
---|
| 507 | + kernel_fpu_end(); |
---|
507 | 508 | nbytes &= AES_BLOCK_SIZE - 1; |
---|
508 | 509 | err = skcipher_walk_done(&walk, nbytes); |
---|
509 | 510 | } |
---|
510 | | - kernel_fpu_end(); |
---|
511 | 511 | |
---|
512 | 512 | return err; |
---|
513 | 513 | } |
---|
.. | .. |
---|
557 | 557 | |
---|
558 | 558 | err = skcipher_walk_virt(&walk, req, true); |
---|
559 | 559 | |
---|
560 | | - kernel_fpu_begin(); |
---|
561 | 560 | while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { |
---|
| 561 | + kernel_fpu_begin(); |
---|
562 | 562 | aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr, |
---|
563 | 563 | nbytes & AES_BLOCK_MASK, walk.iv); |
---|
| 564 | + kernel_fpu_end(); |
---|
564 | 565 | nbytes &= AES_BLOCK_SIZE - 1; |
---|
565 | 566 | err = skcipher_walk_done(&walk, nbytes); |
---|
566 | 567 | } |
---|
567 | 568 | if (walk.nbytes) { |
---|
| 569 | + kernel_fpu_begin(); |
---|
568 | 570 | ctr_crypt_final(ctx, &walk); |
---|
| 571 | + kernel_fpu_end(); |
---|
569 | 572 | err = skcipher_walk_done(&walk, 0); |
---|
570 | 573 | } |
---|
571 | | - kernel_fpu_end(); |
---|
572 | 574 | |
---|
573 | 575 | return err; |
---|
574 | 576 | } |
---|