hc
2024-05-10 748e4f3d702def1a4bff191e0cf93b6a05340f01
kernel/arch/sparc/crypto/aes_glue.c
....@@ -1,3 +1,4 @@
1
+// SPDX-License-Identifier: GPL-2.0-only
12 /* Glue code for AES encryption optimized for sparc64 crypto opcodes.
23 *
34 * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
....@@ -23,6 +24,7 @@
2324 #include <linux/types.h>
2425 #include <crypto/algapi.h>
2526 #include <crypto/aes.h>
27
+#include <crypto/internal/skcipher.h>
2628
2729 #include <asm/fpumacro.h>
2830 #include <asm/pstate.h>
....@@ -167,7 +169,6 @@
167169 unsigned int key_len)
168170 {
169171 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
170
- u32 *flags = &tfm->crt_flags;
171172
172173 switch (key_len) {
173174 case AES_KEYSIZE_128:
....@@ -186,7 +187,6 @@
186187 break;
187188
188189 default:
189
- *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
190190 return -EINVAL;
191191 }
192192
....@@ -196,145 +196,128 @@
196196 return 0;
197197 }
198198
199
-static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
199
+static int aes_set_key_skcipher(struct crypto_skcipher *tfm, const u8 *in_key,
200
+ unsigned int key_len)
201
+{
202
+ return aes_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
203
+}
204
+
205
+static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
200206 {
201207 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
202208
203209 ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
204210 }
205211
206
-static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
212
+static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
207213 {
208214 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
209215
210216 ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
211217 }
212218
213
-#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
214
-
215
-static int ecb_encrypt(struct blkcipher_desc *desc,
216
- struct scatterlist *dst, struct scatterlist *src,
217
- unsigned int nbytes)
219
+static int ecb_encrypt(struct skcipher_request *req)
218220 {
219
- struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
220
- struct blkcipher_walk walk;
221
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
222
+ const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
223
+ struct skcipher_walk walk;
224
+ unsigned int nbytes;
221225 int err;
222226
223
- blkcipher_walk_init(&walk, dst, src, nbytes);
224
- err = blkcipher_walk_virt(desc, &walk);
225
- desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
227
+ err = skcipher_walk_virt(&walk, req, true);
228
+ if (err)
229
+ return err;
226230
227231 ctx->ops->load_encrypt_keys(&ctx->key[0]);
228
- while ((nbytes = walk.nbytes)) {
229
- unsigned int block_len = nbytes & AES_BLOCK_MASK;
230
-
231
- if (likely(block_len)) {
232
- ctx->ops->ecb_encrypt(&ctx->key[0],
233
- (const u64 *)walk.src.virt.addr,
234
- (u64 *) walk.dst.virt.addr,
235
- block_len);
236
- }
237
- nbytes &= AES_BLOCK_SIZE - 1;
238
- err = blkcipher_walk_done(desc, &walk, nbytes);
232
+ while ((nbytes = walk.nbytes) != 0) {
233
+ ctx->ops->ecb_encrypt(&ctx->key[0], walk.src.virt.addr,
234
+ walk.dst.virt.addr,
235
+ round_down(nbytes, AES_BLOCK_SIZE));
236
+ err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
239237 }
240238 fprs_write(0);
241239 return err;
242240 }
243241
244
-static int ecb_decrypt(struct blkcipher_desc *desc,
245
- struct scatterlist *dst, struct scatterlist *src,
246
- unsigned int nbytes)
242
+static int ecb_decrypt(struct skcipher_request *req)
247243 {
248
- struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
249
- struct blkcipher_walk walk;
250
- u64 *key_end;
244
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
245
+ const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
246
+ const u64 *key_end;
247
+ struct skcipher_walk walk;
248
+ unsigned int nbytes;
251249 int err;
252250
253
- blkcipher_walk_init(&walk, dst, src, nbytes);
254
- err = blkcipher_walk_virt(desc, &walk);
255
- desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
251
+ err = skcipher_walk_virt(&walk, req, true);
252
+ if (err)
253
+ return err;
256254
257255 ctx->ops->load_decrypt_keys(&ctx->key[0]);
258256 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
259
- while ((nbytes = walk.nbytes)) {
260
- unsigned int block_len = nbytes & AES_BLOCK_MASK;
261
-
262
- if (likely(block_len)) {
263
- ctx->ops->ecb_decrypt(key_end,
264
- (const u64 *) walk.src.virt.addr,
265
- (u64 *) walk.dst.virt.addr, block_len);
266
- }
267
- nbytes &= AES_BLOCK_SIZE - 1;
268
- err = blkcipher_walk_done(desc, &walk, nbytes);
257
+ while ((nbytes = walk.nbytes) != 0) {
258
+ ctx->ops->ecb_decrypt(key_end, walk.src.virt.addr,
259
+ walk.dst.virt.addr,
260
+ round_down(nbytes, AES_BLOCK_SIZE));
261
+ err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
269262 }
270263 fprs_write(0);
271264
272265 return err;
273266 }
274267
275
-static int cbc_encrypt(struct blkcipher_desc *desc,
276
- struct scatterlist *dst, struct scatterlist *src,
277
- unsigned int nbytes)
268
+static int cbc_encrypt(struct skcipher_request *req)
278269 {
279
- struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
280
- struct blkcipher_walk walk;
270
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
271
+ const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
272
+ struct skcipher_walk walk;
273
+ unsigned int nbytes;
281274 int err;
282275
283
- blkcipher_walk_init(&walk, dst, src, nbytes);
284
- err = blkcipher_walk_virt(desc, &walk);
285
- desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
276
+ err = skcipher_walk_virt(&walk, req, true);
277
+ if (err)
278
+ return err;
286279
287280 ctx->ops->load_encrypt_keys(&ctx->key[0]);
288
- while ((nbytes = walk.nbytes)) {
289
- unsigned int block_len = nbytes & AES_BLOCK_MASK;
290
-
291
- if (likely(block_len)) {
292
- ctx->ops->cbc_encrypt(&ctx->key[0],
293
- (const u64 *)walk.src.virt.addr,
294
- (u64 *) walk.dst.virt.addr,
295
- block_len, (u64 *) walk.iv);
296
- }
297
- nbytes &= AES_BLOCK_SIZE - 1;
298
- err = blkcipher_walk_done(desc, &walk, nbytes);
281
+ while ((nbytes = walk.nbytes) != 0) {
282
+ ctx->ops->cbc_encrypt(&ctx->key[0], walk.src.virt.addr,
283
+ walk.dst.virt.addr,
284
+ round_down(nbytes, AES_BLOCK_SIZE),
285
+ walk.iv);
286
+ err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
299287 }
300288 fprs_write(0);
301289 return err;
302290 }
303291
304
-static int cbc_decrypt(struct blkcipher_desc *desc,
305
- struct scatterlist *dst, struct scatterlist *src,
306
- unsigned int nbytes)
292
+static int cbc_decrypt(struct skcipher_request *req)
307293 {
308
- struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
309
- struct blkcipher_walk walk;
310
- u64 *key_end;
294
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
295
+ const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
296
+ const u64 *key_end;
297
+ struct skcipher_walk walk;
298
+ unsigned int nbytes;
311299 int err;
312300
313
- blkcipher_walk_init(&walk, dst, src, nbytes);
314
- err = blkcipher_walk_virt(desc, &walk);
315
- desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
301
+ err = skcipher_walk_virt(&walk, req, true);
302
+ if (err)
303
+ return err;
316304
317305 ctx->ops->load_decrypt_keys(&ctx->key[0]);
318306 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
319
- while ((nbytes = walk.nbytes)) {
320
- unsigned int block_len = nbytes & AES_BLOCK_MASK;
321
-
322
- if (likely(block_len)) {
323
- ctx->ops->cbc_decrypt(key_end,
324
- (const u64 *) walk.src.virt.addr,
325
- (u64 *) walk.dst.virt.addr,
326
- block_len, (u64 *) walk.iv);
327
- }
328
- nbytes &= AES_BLOCK_SIZE - 1;
329
- err = blkcipher_walk_done(desc, &walk, nbytes);
307
+ while ((nbytes = walk.nbytes) != 0) {
308
+ ctx->ops->cbc_decrypt(key_end, walk.src.virt.addr,
309
+ walk.dst.virt.addr,
310
+ round_down(nbytes, AES_BLOCK_SIZE),
311
+ walk.iv);
312
+ err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
330313 }
331314 fprs_write(0);
332315
333316 return err;
334317 }
335318
336
-static void ctr_crypt_final(struct crypto_sparc64_aes_ctx *ctx,
337
- struct blkcipher_walk *walk)
319
+static void ctr_crypt_final(const struct crypto_sparc64_aes_ctx *ctx,
320
+ struct skcipher_walk *walk)
338321 {
339322 u8 *ctrblk = walk->iv;
340323 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)];
....@@ -348,40 +331,35 @@
348331 crypto_inc(ctrblk, AES_BLOCK_SIZE);
349332 }
350333
351
-static int ctr_crypt(struct blkcipher_desc *desc,
352
- struct scatterlist *dst, struct scatterlist *src,
353
- unsigned int nbytes)
334
+static int ctr_crypt(struct skcipher_request *req)
354335 {
355
- struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
356
- struct blkcipher_walk walk;
336
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
337
+ const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
338
+ struct skcipher_walk walk;
339
+ unsigned int nbytes;
357340 int err;
358341
359
- blkcipher_walk_init(&walk, dst, src, nbytes);
360
- err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
361
- desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
342
+ err = skcipher_walk_virt(&walk, req, true);
343
+ if (err)
344
+ return err;
362345
363346 ctx->ops->load_encrypt_keys(&ctx->key[0]);
364347 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
365
- unsigned int block_len = nbytes & AES_BLOCK_MASK;
366
-
367
- if (likely(block_len)) {
368
- ctx->ops->ctr_crypt(&ctx->key[0],
369
- (const u64 *)walk.src.virt.addr,
370
- (u64 *) walk.dst.virt.addr,
371
- block_len, (u64 *) walk.iv);
372
- }
373
- nbytes &= AES_BLOCK_SIZE - 1;
374
- err = blkcipher_walk_done(desc, &walk, nbytes);
348
+ ctx->ops->ctr_crypt(&ctx->key[0], walk.src.virt.addr,
349
+ walk.dst.virt.addr,
350
+ round_down(nbytes, AES_BLOCK_SIZE),
351
+ walk.iv);
352
+ err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
375353 }
376354 if (walk.nbytes) {
377355 ctr_crypt_final(ctx, &walk);
378
- err = blkcipher_walk_done(desc, &walk, 0);
356
+ err = skcipher_walk_done(&walk, 0);
379357 }
380358 fprs_write(0);
381359 return err;
382360 }
383361
384
-static struct crypto_alg algs[] = { {
362
+static struct crypto_alg cipher_alg = {
385363 .cra_name = "aes",
386364 .cra_driver_name = "aes-sparc64",
387365 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
....@@ -395,70 +373,57 @@
395373 .cia_min_keysize = AES_MIN_KEY_SIZE,
396374 .cia_max_keysize = AES_MAX_KEY_SIZE,
397375 .cia_setkey = aes_set_key,
398
- .cia_encrypt = aes_encrypt,
399
- .cia_decrypt = aes_decrypt
376
+ .cia_encrypt = crypto_aes_encrypt,
377
+ .cia_decrypt = crypto_aes_decrypt
400378 }
401379 }
402
-}, {
403
- .cra_name = "ecb(aes)",
404
- .cra_driver_name = "ecb-aes-sparc64",
405
- .cra_priority = SPARC_CR_OPCODE_PRIORITY,
406
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
407
- .cra_blocksize = AES_BLOCK_SIZE,
408
- .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
409
- .cra_alignmask = 7,
410
- .cra_type = &crypto_blkcipher_type,
411
- .cra_module = THIS_MODULE,
412
- .cra_u = {
413
- .blkcipher = {
414
- .min_keysize = AES_MIN_KEY_SIZE,
415
- .max_keysize = AES_MAX_KEY_SIZE,
416
- .setkey = aes_set_key,
417
- .encrypt = ecb_encrypt,
418
- .decrypt = ecb_decrypt,
419
- },
420
- },
421
-}, {
422
- .cra_name = "cbc(aes)",
423
- .cra_driver_name = "cbc-aes-sparc64",
424
- .cra_priority = SPARC_CR_OPCODE_PRIORITY,
425
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
426
- .cra_blocksize = AES_BLOCK_SIZE,
427
- .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
428
- .cra_alignmask = 7,
429
- .cra_type = &crypto_blkcipher_type,
430
- .cra_module = THIS_MODULE,
431
- .cra_u = {
432
- .blkcipher = {
433
- .min_keysize = AES_MIN_KEY_SIZE,
434
- .max_keysize = AES_MAX_KEY_SIZE,
435
- .ivsize = AES_BLOCK_SIZE,
436
- .setkey = aes_set_key,
437
- .encrypt = cbc_encrypt,
438
- .decrypt = cbc_decrypt,
439
- },
440
- },
441
-}, {
442
- .cra_name = "ctr(aes)",
443
- .cra_driver_name = "ctr-aes-sparc64",
444
- .cra_priority = SPARC_CR_OPCODE_PRIORITY,
445
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
446
- .cra_blocksize = 1,
447
- .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
448
- .cra_alignmask = 7,
449
- .cra_type = &crypto_blkcipher_type,
450
- .cra_module = THIS_MODULE,
451
- .cra_u = {
452
- .blkcipher = {
453
- .min_keysize = AES_MIN_KEY_SIZE,
454
- .max_keysize = AES_MAX_KEY_SIZE,
455
- .ivsize = AES_BLOCK_SIZE,
456
- .setkey = aes_set_key,
457
- .encrypt = ctr_crypt,
458
- .decrypt = ctr_crypt,
459
- },
460
- },
461
-} };
380
+};
381
+
382
+static struct skcipher_alg skcipher_algs[] = {
383
+ {
384
+ .base.cra_name = "ecb(aes)",
385
+ .base.cra_driver_name = "ecb-aes-sparc64",
386
+ .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
387
+ .base.cra_blocksize = AES_BLOCK_SIZE,
388
+ .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
389
+ .base.cra_alignmask = 7,
390
+ .base.cra_module = THIS_MODULE,
391
+ .min_keysize = AES_MIN_KEY_SIZE,
392
+ .max_keysize = AES_MAX_KEY_SIZE,
393
+ .setkey = aes_set_key_skcipher,
394
+ .encrypt = ecb_encrypt,
395
+ .decrypt = ecb_decrypt,
396
+ }, {
397
+ .base.cra_name = "cbc(aes)",
398
+ .base.cra_driver_name = "cbc-aes-sparc64",
399
+ .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
400
+ .base.cra_blocksize = AES_BLOCK_SIZE,
401
+ .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
402
+ .base.cra_alignmask = 7,
403
+ .base.cra_module = THIS_MODULE,
404
+ .min_keysize = AES_MIN_KEY_SIZE,
405
+ .max_keysize = AES_MAX_KEY_SIZE,
406
+ .ivsize = AES_BLOCK_SIZE,
407
+ .setkey = aes_set_key_skcipher,
408
+ .encrypt = cbc_encrypt,
409
+ .decrypt = cbc_decrypt,
410
+ }, {
411
+ .base.cra_name = "ctr(aes)",
412
+ .base.cra_driver_name = "ctr-aes-sparc64",
413
+ .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
414
+ .base.cra_blocksize = 1,
415
+ .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
416
+ .base.cra_alignmask = 7,
417
+ .base.cra_module = THIS_MODULE,
418
+ .min_keysize = AES_MIN_KEY_SIZE,
419
+ .max_keysize = AES_MAX_KEY_SIZE,
420
+ .ivsize = AES_BLOCK_SIZE,
421
+ .setkey = aes_set_key_skcipher,
422
+ .encrypt = ctr_crypt,
423
+ .decrypt = ctr_crypt,
424
+ .chunksize = AES_BLOCK_SIZE,
425
+ }
426
+};
462427
463428 static bool __init sparc64_has_aes_opcode(void)
464429 {
....@@ -476,22 +441,27 @@
476441
477442 static int __init aes_sparc64_mod_init(void)
478443 {
479
- int i;
444
+ int err;
480445
481
- for (i = 0; i < ARRAY_SIZE(algs); i++)
482
- INIT_LIST_HEAD(&algs[i].cra_list);
483
-
484
- if (sparc64_has_aes_opcode()) {
485
- pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
486
- return crypto_register_algs(algs, ARRAY_SIZE(algs));
446
+ if (!sparc64_has_aes_opcode()) {
447
+ pr_info("sparc64 aes opcodes not available.\n");
448
+ return -ENODEV;
487449 }
488
- pr_info("sparc64 aes opcodes not available.\n");
489
- return -ENODEV;
450
+ pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
451
+ err = crypto_register_alg(&cipher_alg);
452
+ if (err)
453
+ return err;
454
+ err = crypto_register_skciphers(skcipher_algs,
455
+ ARRAY_SIZE(skcipher_algs));
456
+ if (err)
457
+ crypto_unregister_alg(&cipher_alg);
458
+ return err;
490459 }
491460
492461 static void __exit aes_sparc64_mod_fini(void)
493462 {
494
- crypto_unregister_algs(algs, ARRAY_SIZE(algs));
463
+ crypto_unregister_alg(&cipher_alg);
464
+ crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
495465 }
496466
497467 module_init(aes_sparc64_mod_init);