hc
2023-12-11 6778948f9de86c3cfaf36725a7c87dcff9ba247f
kernel/arch/x86/crypto/aesni-intel_glue.c
....@@ -379,14 +379,14 @@
379379
380380 err = skcipher_walk_virt(&walk, req, true);
381381
382
+ kernel_fpu_begin();
382383 while ((nbytes = walk.nbytes)) {
383
- kernel_fpu_begin();
384384 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
385385 nbytes & AES_BLOCK_MASK);
386
- kernel_fpu_end();
387386 nbytes &= AES_BLOCK_SIZE - 1;
388387 err = skcipher_walk_done(&walk, nbytes);
389388 }
389
+ kernel_fpu_end();
390390
391391 return err;
392392 }
....@@ -401,14 +401,14 @@
401401
402402 err = skcipher_walk_virt(&walk, req, true);
403403
404
+ kernel_fpu_begin();
404405 while ((nbytes = walk.nbytes)) {
405
- kernel_fpu_begin();
406406 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
407407 nbytes & AES_BLOCK_MASK);
408
- kernel_fpu_end();
409408 nbytes &= AES_BLOCK_SIZE - 1;
410409 err = skcipher_walk_done(&walk, nbytes);
411410 }
411
+ kernel_fpu_end();
412412
413413 return err;
414414 }
....@@ -423,14 +423,14 @@
423423
424424 err = skcipher_walk_virt(&walk, req, true);
425425
426
+ kernel_fpu_begin();
426427 while ((nbytes = walk.nbytes)) {
427
- kernel_fpu_begin();
428428 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
429429 nbytes & AES_BLOCK_MASK, walk.iv);
430
- kernel_fpu_end();
431430 nbytes &= AES_BLOCK_SIZE - 1;
432431 err = skcipher_walk_done(&walk, nbytes);
433432 }
433
+ kernel_fpu_end();
434434
435435 return err;
436436 }
....@@ -445,14 +445,14 @@
445445
446446 err = skcipher_walk_virt(&walk, req, true);
447447
448
+ kernel_fpu_begin();
448449 while ((nbytes = walk.nbytes)) {
449
- kernel_fpu_begin();
450450 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
451451 nbytes & AES_BLOCK_MASK, walk.iv);
452
- kernel_fpu_end();
453452 nbytes &= AES_BLOCK_SIZE - 1;
454453 err = skcipher_walk_done(&walk, nbytes);
455454 }
455
+ kernel_fpu_end();
456456
457457 return err;
458458 }
....@@ -500,20 +500,18 @@
500500
501501 err = skcipher_walk_virt(&walk, req, true);
502502
503
+ kernel_fpu_begin();
503504 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
504
- kernel_fpu_begin();
505505 aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr,
506506 nbytes & AES_BLOCK_MASK, walk.iv);
507
- kernel_fpu_end();
508507 nbytes &= AES_BLOCK_SIZE - 1;
509508 err = skcipher_walk_done(&walk, nbytes);
510509 }
511510 if (walk.nbytes) {
512
- kernel_fpu_begin();
513511 ctr_crypt_final(ctx, &walk);
514
- kernel_fpu_end();
515512 err = skcipher_walk_done(&walk, 0);
516513 }
514
+ kernel_fpu_end();
517515
518516 return err;
519517 }