hc
2024-12-19 9370bb92b2d16684ee45cf24e879c93c509162da
kernel/crypto/algapi.c
....@@ -1,13 +1,8 @@
1
+// SPDX-License-Identifier: GPL-2.0-or-later
12 /*
23 * Cryptographic API for algorithms (i.e., low-level API).
34 *
45 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5
- *
6
- * This program is free software; you can redistribute it and/or modify it
7
- * under the terms of the GNU General Public License as published by the Free
8
- * Software Foundation; either version 2 of the License, or (at your option)
9
- * any later version.
10
- *
116 */
127
138 #include <crypto/algapi.h>
....@@ -26,23 +21,6 @@
2621
2722 static LIST_HEAD(crypto_template_list);
2823
29
-static inline int crypto_set_driver_name(struct crypto_alg *alg)
30
-{
31
- static const char suffix[] = "-generic";
32
- char *driver_name = alg->cra_driver_name;
33
- int len;
34
-
35
- if (*driver_name)
36
- return 0;
37
-
38
- len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
39
- if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME)
40
- return -ENAMETOOLONG;
41
-
42
- memcpy(driver_name + len, suffix, sizeof(suffix));
43
- return 0;
44
-}
45
-
4624 static inline void crypto_check_module_sig(struct module *mod)
4725 {
4826 if (fips_enabled && mod && !module_sig_ok(mod))
....@@ -54,12 +32,20 @@
5432 {
5533 crypto_check_module_sig(alg->cra_module);
5634
35
+ if (!alg->cra_name[0] || !alg->cra_driver_name[0])
36
+ return -EINVAL;
37
+
5738 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
5839 return -EINVAL;
5940
60
- if (alg->cra_blocksize > PAGE_SIZE / 8)
41
+ /* General maximums for all algs. */
42
+ if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
6143 return -EINVAL;
6244
45
+ if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
46
+ return -EINVAL;
47
+
48
+ /* Lower maximums for specific alg types. */
6349 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
6450 CRYPTO_ALG_TYPE_CIPHER) {
6551 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
....@@ -74,16 +60,11 @@
7460
7561 refcount_set(&alg->cra_refcnt, 1);
7662
77
- return crypto_set_driver_name(alg);
63
+ return 0;
7864 }
7965
8066 static void crypto_free_instance(struct crypto_instance *inst)
8167 {
82
- if (!inst->alg.cra_type->free) {
83
- inst->tmpl->free(inst);
84
- return;
85
- }
86
-
8768 inst->alg.cra_type->free(inst);
8869 }
8970
....@@ -96,6 +77,15 @@
9677 crypto_tmpl_put(tmpl);
9778 }
9879
80
+/*
81
+ * This function adds a spawn to the list secondary_spawns which
82
+ * will be used at the end of crypto_remove_spawns to unregister
83
+ * instances, unless the spawn happens to be one that is depended
84
+ * on by the new algorithm (nalg in crypto_remove_spawns).
85
+ *
86
+ * This function is also responsible for resurrecting any algorithms
87
+ * in the dependency chain of nalg by unsetting n->dead.
88
+ */
9989 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
10090 struct list_head *stack,
10191 struct list_head *top,
....@@ -107,15 +97,17 @@
10797 if (!spawn)
10898 return NULL;
10999
110
- n = list_next_entry(spawn, list);
111
-
112
- if (spawn->alg && &n->list != stack && !n->alg)
113
- n->alg = (n->list.next == stack) ? alg :
114
- &list_next_entry(n, list)->inst->alg;
115
-
100
+ n = list_prev_entry(spawn, list);
116101 list_move(&spawn->list, secondary_spawns);
117102
118
- return &n->list == stack ? top : &n->inst->alg.cra_users;
103
+ if (list_is_last(&n->list, stack))
104
+ return top;
105
+
106
+ n = list_next_entry(n, list);
107
+ if (!spawn->dead)
108
+ n->dead = false;
109
+
110
+ return &n->inst->alg.cra_users;
119111 }
120112
121113 static void crypto_remove_instance(struct crypto_instance *inst,
....@@ -127,8 +119,6 @@
127119 return;
128120
129121 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
130
- if (hlist_unhashed(&inst->list))
131
- return;
132122
133123 if (!tmpl || !crypto_tmpl_get(tmpl))
134124 return;
....@@ -140,6 +130,12 @@
140130 BUG_ON(!list_empty(&inst->alg.cra_users));
141131 }
142132
133
+/*
134
+ * Given an algorithm alg, remove all algorithms that depend on it
135
+ * through spawns. If nalg is not null, then exempt any algorithms
136
+ * that is depended on by nalg. This is useful when nalg itself
137
+ * depends on alg.
138
+ */
143139 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
144140 struct crypto_alg *nalg)
145141 {
....@@ -158,6 +154,11 @@
158154 list_move(&spawn->list, &top);
159155 }
160156
157
+ /*
158
+ * Perform a depth-first walk starting from alg through
159
+ * the cra_users tree. The list stack records the path
160
+ * from alg to the current spawn.
161
+ */
161162 spawns = &top;
162163 do {
163164 while (!list_empty(spawns)) {
....@@ -167,17 +168,26 @@
167168 list);
168169 inst = spawn->inst;
169170
170
- BUG_ON(&inst->alg == alg);
171
-
172171 list_move(&spawn->list, &stack);
172
+ spawn->dead = !spawn->registered || &inst->alg != nalg;
173
+
174
+ if (!spawn->registered)
175
+ break;
176
+
177
+ BUG_ON(&inst->alg == alg);
173178
174179 if (&inst->alg == nalg)
175180 break;
176181
177
- spawn->alg = NULL;
178182 spawns = &inst->alg.cra_users;
179183
180184 /*
185
+ * Even if spawn->registered is true, the
186
+ * instance itself may still be unregistered.
187
+ * This is because it may have failed during
188
+ * registration. Therefore we still need to
189
+ * make the following test.
190
+ *
181191 * We may encounter an unregistered instance here, since
182192 * an instance's spawns are set up prior to the instance
183193 * being registered. An unregistered instance will have
....@@ -192,10 +202,15 @@
192202 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
193203 &secondary_spawns)));
194204
205
+ /*
206
+ * Remove all instances that are marked as dead. Also
207
+ * complete the resurrection of the others by moving them
208
+ * back to the cra_users list.
209
+ */
195210 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
196
- if (spawn->alg)
211
+ if (!spawn->dead)
197212 list_move(&spawn->list, &spawn->alg->cra_users);
198
- else
213
+ else if (spawn->registered)
199214 crypto_remove_instance(spawn->inst, list);
200215 }
201216 }
....@@ -253,6 +268,8 @@
253268 list_add(&alg->cra_list, &crypto_alg_list);
254269 list_add(&larval->alg.cra_list, &crypto_alg_list);
255270
271
+ crypto_stats_init(alg);
272
+
256273 out:
257274 return larval;
258275
....@@ -269,6 +286,7 @@
269286 struct crypto_alg *alg;
270287 struct crypto_alg *q;
271288 LIST_HEAD(list);
289
+ bool best;
272290
273291 down_write(&crypto_alg_sem);
274292 list_for_each_entry(q, &crypto_alg_list, cra_list) {
....@@ -291,6 +309,21 @@
291309 goto complete;
292310
293311 alg->cra_flags |= CRYPTO_ALG_TESTED;
312
+
313
+ /* Only satisfy larval waiters if we are the best. */
314
+ best = true;
315
+ list_for_each_entry(q, &crypto_alg_list, cra_list) {
316
+ if (crypto_is_moribund(q) || !crypto_is_larval(q))
317
+ continue;
318
+
319
+ if (strcmp(alg->cra_name, q->cra_name))
320
+ continue;
321
+
322
+ if (q->cra_priority > alg->cra_priority) {
323
+ best = false;
324
+ break;
325
+ }
326
+ }
294327
295328 list_for_each_entry(q, &crypto_alg_list, cra_list) {
296329 if (q == alg)
....@@ -315,10 +348,12 @@
315348 continue;
316349 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
317350 continue;
318
- if (!crypto_mod_get(alg))
319
- continue;
320351
321
- larval->adult = alg;
352
+ if (best && crypto_mod_get(alg))
353
+ larval->adult = alg;
354
+ else
355
+ larval->adult = ERR_PTR(-EAGAIN);
356
+
322357 continue;
323358 }
324359
....@@ -367,6 +402,8 @@
367402
368403 err = wait_for_completion_killable(&larval->completion);
369404 WARN_ON(err);
405
+ if (!err)
406
+ crypto_notify(CRYPTO_MSG_ALG_LOADED, larval);
370407
371408 out:
372409 crypto_larval_kill(&larval->alg);
....@@ -407,7 +444,7 @@
407444 return 0;
408445 }
409446
410
-int crypto_unregister_alg(struct crypto_alg *alg)
447
+void crypto_unregister_alg(struct crypto_alg *alg)
411448 {
412449 int ret;
413450 LIST_HEAD(list);
....@@ -416,15 +453,16 @@
416453 ret = crypto_remove_alg(alg, &list);
417454 up_write(&crypto_alg_sem);
418455
419
- if (ret)
420
- return ret;
456
+ if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
457
+ return;
421458
422
- BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
459
+ if (WARN_ON(refcount_read(&alg->cra_refcnt) != 1))
460
+ return;
461
+
423462 if (alg->cra_destroy)
424463 alg->cra_destroy(alg);
425464
426465 crypto_remove_final(&list);
427
- return 0;
428466 }
429467 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
430468
....@@ -448,18 +486,12 @@
448486 }
449487 EXPORT_SYMBOL_GPL(crypto_register_algs);
450488
451
-int crypto_unregister_algs(struct crypto_alg *algs, int count)
489
+void crypto_unregister_algs(struct crypto_alg *algs, int count)
452490 {
453
- int i, ret;
491
+ int i;
454492
455
- for (i = 0; i < count; i++) {
456
- ret = crypto_unregister_alg(&algs[i]);
457
- if (ret)
458
- pr_err("Failed to unregister %s %s: %d\n",
459
- algs[i].cra_driver_name, algs[i].cra_name, ret);
460
- }
461
-
462
- return 0;
493
+ for (i = 0; i < count; i++)
494
+ crypto_unregister_alg(&algs[i]);
463495 }
464496 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
465497
....@@ -484,6 +516,24 @@
484516 return err;
485517 }
486518 EXPORT_SYMBOL_GPL(crypto_register_template);
519
+
520
+int crypto_register_templates(struct crypto_template *tmpls, int count)
521
+{
522
+ int i, err;
523
+
524
+ for (i = 0; i < count; i++) {
525
+ err = crypto_register_template(&tmpls[i]);
526
+ if (err)
527
+ goto out;
528
+ }
529
+ return 0;
530
+
531
+out:
532
+ for (--i; i >= 0; --i)
533
+ crypto_unregister_template(&tmpls[i]);
534
+ return err;
535
+}
536
+EXPORT_SYMBOL_GPL(crypto_register_templates);
487537
488538 void crypto_unregister_template(struct crypto_template *tmpl)
489539 {
....@@ -513,6 +563,15 @@
513563 crypto_remove_final(&users);
514564 }
515565 EXPORT_SYMBOL_GPL(crypto_unregister_template);
566
+
567
+void crypto_unregister_templates(struct crypto_template *tmpls, int count)
568
+{
569
+ int i;
570
+
571
+ for (i = count - 1; i >= 0; --i)
572
+ crypto_unregister_template(&tmpls[i]);
573
+}
574
+EXPORT_SYMBOL_GPL(crypto_unregister_templates);
516575
517576 static struct crypto_template *__crypto_lookup_template(const char *name)
518577 {
....@@ -544,6 +603,7 @@
544603 struct crypto_instance *inst)
545604 {
546605 struct crypto_larval *larval;
606
+ struct crypto_spawn *spawn;
547607 int err;
548608
549609 err = crypto_check_alg(&inst->alg);
....@@ -554,6 +614,22 @@
554614 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
555615
556616 down_write(&crypto_alg_sem);
617
+
618
+ larval = ERR_PTR(-EAGAIN);
619
+ for (spawn = inst->spawns; spawn;) {
620
+ struct crypto_spawn *next;
621
+
622
+ if (spawn->dead)
623
+ goto unlock;
624
+
625
+ next = spawn->next;
626
+ spawn->inst = inst;
627
+ spawn->registered = true;
628
+
629
+ crypto_mod_put(spawn->alg);
630
+
631
+ spawn = next;
632
+ }
557633
558634 larval = __crypto_register_alg(&inst->alg);
559635 if (IS_ERR(larval))
....@@ -577,7 +653,7 @@
577653 }
578654 EXPORT_SYMBOL_GPL(crypto_register_instance);
579655
580
-int crypto_unregister_instance(struct crypto_instance *inst)
656
+void crypto_unregister_instance(struct crypto_instance *inst)
581657 {
582658 LIST_HEAD(list);
583659
....@@ -589,86 +665,82 @@
589665 up_write(&crypto_alg_sem);
590666
591667 crypto_remove_final(&list);
592
-
593
- return 0;
594668 }
595669 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
596670
597
-int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
598
- struct crypto_instance *inst, u32 mask)
599
-{
600
- int err = -EAGAIN;
601
-
602
- spawn->inst = inst;
603
- spawn->mask = mask;
604
-
605
- down_write(&crypto_alg_sem);
606
- if (!crypto_is_moribund(alg)) {
607
- list_add(&spawn->list, &alg->cra_users);
608
- spawn->alg = alg;
609
- err = 0;
610
- }
611
- up_write(&crypto_alg_sem);
612
-
613
- return err;
614
-}
615
-EXPORT_SYMBOL_GPL(crypto_init_spawn);
616
-
617
-int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
618
- struct crypto_instance *inst,
619
- const struct crypto_type *frontend)
620
-{
621
- int err = -EINVAL;
622
-
623
- if ((alg->cra_flags ^ frontend->type) & frontend->maskset)
624
- goto out;
625
-
626
- spawn->frontend = frontend;
627
- err = crypto_init_spawn(spawn, alg, inst, frontend->maskset);
628
-
629
-out:
630
- return err;
631
-}
632
-EXPORT_SYMBOL_GPL(crypto_init_spawn2);
633
-
634
-int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name,
635
- u32 type, u32 mask)
671
+int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
672
+ const char *name, u32 type, u32 mask)
636673 {
637674 struct crypto_alg *alg;
638
- int err;
675
+ int err = -EAGAIN;
676
+
677
+ if (WARN_ON_ONCE(inst == NULL))
678
+ return -EINVAL;
679
+
680
+ /* Allow the result of crypto_attr_alg_name() to be passed directly */
681
+ if (IS_ERR(name))
682
+ return PTR_ERR(name);
639683
640684 alg = crypto_find_alg(name, spawn->frontend, type, mask);
641685 if (IS_ERR(alg))
642686 return PTR_ERR(alg);
643687
644
- err = crypto_init_spawn(spawn, alg, spawn->inst, mask);
645
- crypto_mod_put(alg);
688
+ down_write(&crypto_alg_sem);
689
+ if (!crypto_is_moribund(alg)) {
690
+ list_add(&spawn->list, &alg->cra_users);
691
+ spawn->alg = alg;
692
+ spawn->mask = mask;
693
+ spawn->next = inst->spawns;
694
+ inst->spawns = spawn;
695
+ inst->alg.cra_flags |=
696
+ (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
697
+ err = 0;
698
+ }
699
+ up_write(&crypto_alg_sem);
700
+ if (err)
701
+ crypto_mod_put(alg);
646702 return err;
647703 }
648704 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
649705
650706 void crypto_drop_spawn(struct crypto_spawn *spawn)
651707 {
708
+ if (!spawn->alg) /* not yet initialized? */
709
+ return;
710
+
652711 down_write(&crypto_alg_sem);
653
- if (spawn->alg)
712
+ if (!spawn->dead)
654713 list_del(&spawn->list);
655714 up_write(&crypto_alg_sem);
715
+
716
+ if (!spawn->registered)
717
+ crypto_mod_put(spawn->alg);
656718 }
657719 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
658720
659721 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
660722 {
661
- struct crypto_alg *alg;
723
+ struct crypto_alg *alg = ERR_PTR(-EAGAIN);
724
+ struct crypto_alg *target;
725
+ bool shoot = false;
662726
663727 down_read(&crypto_alg_sem);
664
- alg = spawn->alg;
665
- if (alg && !crypto_mod_get(alg)) {
666
- alg->cra_flags |= CRYPTO_ALG_DYING;
667
- alg = NULL;
728
+ if (!spawn->dead) {
729
+ alg = spawn->alg;
730
+ if (!crypto_mod_get(alg)) {
731
+ target = crypto_alg_get(alg);
732
+ shoot = true;
733
+ alg = ERR_PTR(-EAGAIN);
734
+ }
668735 }
669736 up_read(&crypto_alg_sem);
670737
671
- return alg ?: ERR_PTR(-EAGAIN);
738
+ if (shoot) {
739
+ crypto_shoot_alg(target);
740
+ crypto_alg_put(target);
741
+ }
742
+
743
+ return alg;
672744 }
673745
674746 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
....@@ -748,7 +820,23 @@
748820 }
749821 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
750822
751
-int crypto_check_attr_type(struct rtattr **tb, u32 type)
823
+/**
824
+ * crypto_check_attr_type() - check algorithm type and compute inherited mask
825
+ * @tb: the template parameters
826
+ * @type: the algorithm type the template would be instantiated as
827
+ * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
828
+ * to restrict the flags of any inner algorithms
829
+ *
830
+ * Validate that the algorithm type the user requested is compatible with the
831
+ * one the template would actually be instantiated as. E.g., if the user is
832
+ * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
833
+ * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
834
+ *
835
+ * Also compute the mask to use to restrict the flags of any inner algorithms.
836
+ *
837
+ * Return: 0 on success; -errno on failure
838
+ */
839
+int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
752840 {
753841 struct crypto_attr_type *algt;
754842
....@@ -759,6 +847,7 @@
759847 if ((algt->type ^ type) & algt->mask)
760848 return -EINVAL;
761849
850
+ *mask_ret = crypto_algt_inherited_mask(algt);
762851 return 0;
763852 }
764853 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
....@@ -780,20 +869,6 @@
780869 return alga->name;
781870 }
782871 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
783
-
784
-struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
785
- const struct crypto_type *frontend,
786
- u32 type, u32 mask)
787
-{
788
- const char *name;
789
-
790
- name = crypto_attr_alg_name(rta);
791
- if (IS_ERR(name))
792
- return ERR_CAST(name);
793
-
794
- return crypto_find_alg(name, frontend, type, mask);
795
-}
796
-EXPORT_SYMBOL_GPL(crypto_attr_alg2);
797872
798873 int crypto_attr_u32(struct rtattr *rta, u32 *num)
799874 {
....@@ -828,61 +903,6 @@
828903 }
829904 EXPORT_SYMBOL_GPL(crypto_inst_setname);
830905
831
-void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
832
- unsigned int head)
833
-{
834
- struct crypto_instance *inst;
835
- char *p;
836
- int err;
837
-
838
- p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn),
839
- GFP_KERNEL);
840
- if (!p)
841
- return ERR_PTR(-ENOMEM);
842
-
843
- inst = (void *)(p + head);
844
-
845
- err = crypto_inst_setname(inst, name, alg);
846
- if (err)
847
- goto err_free_inst;
848
-
849
- return p;
850
-
851
-err_free_inst:
852
- kfree(p);
853
- return ERR_PTR(err);
854
-}
855
-EXPORT_SYMBOL_GPL(crypto_alloc_instance2);
856
-
857
-struct crypto_instance *crypto_alloc_instance(const char *name,
858
- struct crypto_alg *alg)
859
-{
860
- struct crypto_instance *inst;
861
- struct crypto_spawn *spawn;
862
- int err;
863
-
864
- inst = crypto_alloc_instance2(name, alg, 0);
865
- if (IS_ERR(inst))
866
- goto out;
867
-
868
- spawn = crypto_instance_ctx(inst);
869
- err = crypto_init_spawn(spawn, alg, inst,
870
- CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
871
-
872
- if (err)
873
- goto err_free_inst;
874
-
875
- return inst;
876
-
877
-err_free_inst:
878
- kfree(inst);
879
- inst = ERR_PTR(err);
880
-
881
-out:
882
- return inst;
883
-}
884
-EXPORT_SYMBOL_GPL(crypto_alloc_instance);
885
-
886906 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
887907 {
888908 INIT_LIST_HEAD(&queue->list);
....@@ -915,6 +935,14 @@
915935 }
916936 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
917937
938
+void crypto_enqueue_request_head(struct crypto_queue *queue,
939
+ struct crypto_async_request *request)
940
+{
941
+ queue->qlen++;
942
+ list_add(&request->list, &queue->list);
943
+}
944
+EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
945
+
918946 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
919947 {
920948 struct list_head *request;
....@@ -933,19 +961,6 @@
933961 return list_entry(request, struct crypto_async_request, list);
934962 }
935963 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
936
-
937
-int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm)
938
-{
939
- struct crypto_async_request *req;
940
-
941
- list_for_each_entry(req, &queue->list, list) {
942
- if (req->tfm == tfm)
943
- return 1;
944
- }
945
-
946
- return 0;
947
-}
948
-EXPORT_SYMBOL_GPL(crypto_tfm_in_queue);
949964
950965 static inline void crypto_inc_byte(u8 *a, unsigned int size)
951966 {
....@@ -1053,6 +1068,219 @@
10531068 }
10541069 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
10551070
1071
+#ifdef CONFIG_CRYPTO_STATS
1072
+void crypto_stats_init(struct crypto_alg *alg)
1073
+{
1074
+ memset(&alg->stats, 0, sizeof(alg->stats));
1075
+}
1076
+EXPORT_SYMBOL_GPL(crypto_stats_init);
1077
+
1078
+void crypto_stats_get(struct crypto_alg *alg)
1079
+{
1080
+ crypto_alg_get(alg);
1081
+}
1082
+EXPORT_SYMBOL_GPL(crypto_stats_get);
1083
+
1084
+void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1085
+ int ret)
1086
+{
1087
+ if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1088
+ atomic64_inc(&alg->stats.aead.err_cnt);
1089
+ } else {
1090
+ atomic64_inc(&alg->stats.aead.encrypt_cnt);
1091
+ atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1092
+ }
1093
+ crypto_alg_put(alg);
1094
+}
1095
+EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1096
+
1097
+void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1098
+ int ret)
1099
+{
1100
+ if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1101
+ atomic64_inc(&alg->stats.aead.err_cnt);
1102
+ } else {
1103
+ atomic64_inc(&alg->stats.aead.decrypt_cnt);
1104
+ atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1105
+ }
1106
+ crypto_alg_put(alg);
1107
+}
1108
+EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1109
+
1110
+void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1111
+ struct crypto_alg *alg)
1112
+{
1113
+ if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1114
+ atomic64_inc(&alg->stats.akcipher.err_cnt);
1115
+ } else {
1116
+ atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1117
+ atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1118
+ }
1119
+ crypto_alg_put(alg);
1120
+}
1121
+EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1122
+
1123
+void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1124
+ struct crypto_alg *alg)
1125
+{
1126
+ if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1127
+ atomic64_inc(&alg->stats.akcipher.err_cnt);
1128
+ } else {
1129
+ atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1130
+ atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1131
+ }
1132
+ crypto_alg_put(alg);
1133
+}
1134
+EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1135
+
1136
+void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1137
+{
1138
+ if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1139
+ atomic64_inc(&alg->stats.akcipher.err_cnt);
1140
+ else
1141
+ atomic64_inc(&alg->stats.akcipher.sign_cnt);
1142
+ crypto_alg_put(alg);
1143
+}
1144
+EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1145
+
1146
+void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1147
+{
1148
+ if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1149
+ atomic64_inc(&alg->stats.akcipher.err_cnt);
1150
+ else
1151
+ atomic64_inc(&alg->stats.akcipher.verify_cnt);
1152
+ crypto_alg_put(alg);
1153
+}
1154
+EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1155
+
1156
+void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1157
+{
1158
+ if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1159
+ atomic64_inc(&alg->stats.compress.err_cnt);
1160
+ } else {
1161
+ atomic64_inc(&alg->stats.compress.compress_cnt);
1162
+ atomic64_add(slen, &alg->stats.compress.compress_tlen);
1163
+ }
1164
+ crypto_alg_put(alg);
1165
+}
1166
+EXPORT_SYMBOL_GPL(crypto_stats_compress);
1167
+
1168
+void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1169
+{
1170
+ if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1171
+ atomic64_inc(&alg->stats.compress.err_cnt);
1172
+ } else {
1173
+ atomic64_inc(&alg->stats.compress.decompress_cnt);
1174
+ atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1175
+ }
1176
+ crypto_alg_put(alg);
1177
+}
1178
+EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1179
+
1180
+void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1181
+ struct crypto_alg *alg)
1182
+{
1183
+ if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1184
+ atomic64_inc(&alg->stats.hash.err_cnt);
1185
+ else
1186
+ atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1187
+ crypto_alg_put(alg);
1188
+}
1189
+EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1190
+
1191
+void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1192
+ struct crypto_alg *alg)
1193
+{
1194
+ if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1195
+ atomic64_inc(&alg->stats.hash.err_cnt);
1196
+ } else {
1197
+ atomic64_inc(&alg->stats.hash.hash_cnt);
1198
+ atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1199
+ }
1200
+ crypto_alg_put(alg);
1201
+}
1202
+EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1203
+
1204
+void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1205
+{
1206
+ if (ret)
1207
+ atomic64_inc(&alg->stats.kpp.err_cnt);
1208
+ else
1209
+ atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1210
+ crypto_alg_put(alg);
1211
+}
1212
+EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1213
+
1214
+void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1215
+{
1216
+ if (ret)
1217
+ atomic64_inc(&alg->stats.kpp.err_cnt);
1218
+ else
1219
+ atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1220
+ crypto_alg_put(alg);
1221
+}
1222
+EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1223
+
1224
+void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1225
+{
1226
+ if (ret)
1227
+ atomic64_inc(&alg->stats.kpp.err_cnt);
1228
+ else
1229
+ atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1230
+ crypto_alg_put(alg);
1231
+}
1232
+EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1233
+
1234
+void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1235
+{
1236
+ if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1237
+ atomic64_inc(&alg->stats.rng.err_cnt);
1238
+ else
1239
+ atomic64_inc(&alg->stats.rng.seed_cnt);
1240
+ crypto_alg_put(alg);
1241
+}
1242
+EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1243
+
1244
+void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1245
+ int ret)
1246
+{
1247
+ if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1248
+ atomic64_inc(&alg->stats.rng.err_cnt);
1249
+ } else {
1250
+ atomic64_inc(&alg->stats.rng.generate_cnt);
1251
+ atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1252
+ }
1253
+ crypto_alg_put(alg);
1254
+}
1255
+EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1256
+
1257
+void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1258
+ struct crypto_alg *alg)
1259
+{
1260
+ if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1261
+ atomic64_inc(&alg->stats.cipher.err_cnt);
1262
+ } else {
1263
+ atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1264
+ atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1265
+ }
1266
+ crypto_alg_put(alg);
1267
+}
1268
+EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1269
+
1270
+void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1271
+ struct crypto_alg *alg)
1272
+{
1273
+ if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1274
+ atomic64_inc(&alg->stats.cipher.err_cnt);
1275
+ } else {
1276
+ atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1277
+ atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1278
+ }
1279
+ crypto_alg_put(alg);
1280
+}
1281
+EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1282
+#endif
1283
+
10561284 static int __init crypto_algapi_init(void)
10571285 {
10581286 crypto_init_proc();
....@@ -1069,3 +1297,4 @@
10691297
10701298 MODULE_LICENSE("GPL");
10711299 MODULE_DESCRIPTION("Cryptographic algorithms API");
1300
+MODULE_SOFTDEP("pre: cryptomgr");