hc
2024-05-10 23fa18eaa71266feff7ba8d83022d9e1cc83c65a
kernel/include/crypto/algapi.h
....@@ -1,13 +1,8 @@
1
+/* SPDX-License-Identifier: GPL-2.0-or-later */
12 /*
23 * Cryptographic API for algorithms (i.e., low-level API).
34 *
45 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5
- *
6
- * This program is free software; you can redistribute it and/or modify it
7
- * under the terms of the GNU General Public License as published by the Free
8
- * Software Foundation; either version 2 of the License, or (at your option)
9
- * any later version.
10
- *
116 */
127 #ifndef _CRYPTO_ALGAPI_H
138 #define _CRYPTO_ALGAPI_H
....@@ -15,13 +10,14 @@
1510 #include <linux/crypto.h>
1611 #include <linux/list.h>
1712 #include <linux/kernel.h>
18
-#include <linux/skbuff.h>
1913
2014 /*
2115 * Maximum values for blocksize and alignmask, used to allocate
2216 * static buffers that are big enough for any combination of
23
- * ciphers and architectures.
17
+ * algs and architectures. Ciphers have a lower maximum size.
2418 */
19
+#define MAX_ALGAPI_BLOCKSIZE 160
20
+#define MAX_ALGAPI_ALIGNMASK 63
2521 #define MAX_CIPHER_BLOCKSIZE 16
2622 #define MAX_CIPHER_ALIGNMASK 15
2723
....@@ -30,6 +26,7 @@
3026 struct module;
3127 struct rtattr;
3228 struct seq_file;
29
+struct sk_buff;
3330
3431 struct crypto_type {
3532 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
....@@ -50,7 +47,13 @@
5047 struct crypto_alg alg;
5148
5249 struct crypto_template *tmpl;
53
- struct hlist_node list;
50
+
51
+ union {
52
+ /* Node in list of instances after registration. */
53
+ struct hlist_node list;
54
+ /* List of attached spawns before registration. */
55
+ struct crypto_spawn *spawns;
56
+ };
5457
5558 void *__ctx[] CRYPTO_MINALIGN_ATTR;
5659 };
....@@ -60,8 +63,6 @@
6063 struct hlist_head instances;
6164 struct module *module;
6265
63
- struct crypto_instance *(*alloc)(struct rtattr **tb);
64
- void (*free)(struct crypto_instance *inst);
6566 int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
6667
6768 char name[CRYPTO_MAX_ALG_NAME];
....@@ -70,9 +71,16 @@
7071 struct crypto_spawn {
7172 struct list_head list;
7273 struct crypto_alg *alg;
73
- struct crypto_instance *inst;
74
+ union {
75
+ /* Back pointer to instance after registration.*/
76
+ struct crypto_instance *inst;
77
+ /* Spawn list pointer prior to registration. */
78
+ struct crypto_spawn *next;
79
+ };
7480 const struct crypto_type *frontend;
7581 u32 mask;
82
+ bool dead;
83
+ bool registered;
7684 };
7785
7886 struct crypto_queue {
....@@ -88,111 +96,38 @@
8896 unsigned int offset;
8997 };
9098
91
-struct blkcipher_walk {
92
- union {
93
- struct {
94
- struct page *page;
95
- unsigned long offset;
96
- } phys;
97
-
98
- struct {
99
- u8 *page;
100
- u8 *addr;
101
- } virt;
102
- } src, dst;
103
-
104
- struct scatter_walk in;
105
- unsigned int nbytes;
106
-
107
- struct scatter_walk out;
108
- unsigned int total;
109
-
110
- void *page;
111
- u8 *buffer;
112
- u8 *iv;
113
- unsigned int ivsize;
114
-
115
- int flags;
116
- unsigned int walk_blocksize;
117
- unsigned int cipher_blocksize;
118
- unsigned int alignmask;
119
-};
120
-
121
-struct ablkcipher_walk {
122
- struct {
123
- struct page *page;
124
- unsigned int offset;
125
- } src, dst;
126
-
127
- struct scatter_walk in;
128
- unsigned int nbytes;
129
- struct scatter_walk out;
130
- unsigned int total;
131
- struct list_head buffers;
132
- u8 *iv_buffer;
133
- u8 *iv;
134
- int flags;
135
- unsigned int blocksize;
136
-};
137
-
138
-extern const struct crypto_type crypto_ablkcipher_type;
139
-extern const struct crypto_type crypto_blkcipher_type;
140
-
14199 void crypto_mod_put(struct crypto_alg *alg);
142100
143101 int crypto_register_template(struct crypto_template *tmpl);
102
+int crypto_register_templates(struct crypto_template *tmpls, int count);
144103 void crypto_unregister_template(struct crypto_template *tmpl);
104
+void crypto_unregister_templates(struct crypto_template *tmpls, int count);
145105 struct crypto_template *crypto_lookup_template(const char *name);
146106
147107 int crypto_register_instance(struct crypto_template *tmpl,
148108 struct crypto_instance *inst);
149
-int crypto_unregister_instance(struct crypto_instance *inst);
109
+void crypto_unregister_instance(struct crypto_instance *inst);
150110
151
-int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
152
- struct crypto_instance *inst, u32 mask);
153
-int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
154
- struct crypto_instance *inst,
155
- const struct crypto_type *frontend);
156
-int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name,
157
- u32 type, u32 mask);
158
-
111
+int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
112
+ const char *name, u32 type, u32 mask);
159113 void crypto_drop_spawn(struct crypto_spawn *spawn);
160114 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
161115 u32 mask);
162116 void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
163117
164
-static inline void crypto_set_spawn(struct crypto_spawn *spawn,
165
- struct crypto_instance *inst)
166
-{
167
- spawn->inst = inst;
168
-}
169
-
170118 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
171
-int crypto_check_attr_type(struct rtattr **tb, u32 type);
119
+int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret);
172120 const char *crypto_attr_alg_name(struct rtattr *rta);
173
-struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
174
- const struct crypto_type *frontend,
175
- u32 type, u32 mask);
176
-
177
-static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta,
178
- u32 type, u32 mask)
179
-{
180
- return crypto_attr_alg2(rta, NULL, type, mask);
181
-}
182
-
183121 int crypto_attr_u32(struct rtattr *rta, u32 *num);
184122 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
185123 struct crypto_alg *alg);
186
-void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
187
- unsigned int head);
188
-struct crypto_instance *crypto_alloc_instance(const char *name,
189
- struct crypto_alg *alg);
190124
191125 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
192126 int crypto_enqueue_request(struct crypto_queue *queue,
193127 struct crypto_async_request *request);
128
+void crypto_enqueue_request_head(struct crypto_queue *queue,
129
+ struct crypto_async_request *request);
194130 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
195
-int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
196131 static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
197132 {
198133 return queue->qlen;
....@@ -237,26 +172,6 @@
237172 }
238173 }
239174
240
-int blkcipher_walk_done(struct blkcipher_desc *desc,
241
- struct blkcipher_walk *walk, int err);
242
-int blkcipher_walk_virt(struct blkcipher_desc *desc,
243
- struct blkcipher_walk *walk);
244
-int blkcipher_walk_phys(struct blkcipher_desc *desc,
245
- struct blkcipher_walk *walk);
246
-int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
247
- struct blkcipher_walk *walk,
248
- unsigned int blocksize);
249
-int blkcipher_aead_walk_virt_block(struct blkcipher_desc *desc,
250
- struct blkcipher_walk *walk,
251
- struct crypto_aead *tfm,
252
- unsigned int blocksize);
253
-
254
-int ablkcipher_walk_done(struct ablkcipher_request *req,
255
- struct ablkcipher_walk *walk, int err);
256
-int ablkcipher_walk_phys(struct ablkcipher_request *req,
257
- struct ablkcipher_walk *walk);
258
-void __ablkcipher_walk_complete(struct ablkcipher_walk *walk);
259
-
260175 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
261176 {
262177 return PTR_ALIGN(crypto_tfm_ctx(tfm),
....@@ -274,82 +189,6 @@
274189 return inst->__ctx;
275190 }
276191
277
-static inline struct ablkcipher_alg *crypto_ablkcipher_alg(
278
- struct crypto_ablkcipher *tfm)
279
-{
280
- return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher;
281
-}
282
-
283
-static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm)
284
-{
285
- return crypto_tfm_ctx(&tfm->base);
286
-}
287
-
288
-static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm)
289
-{
290
- return crypto_tfm_ctx_aligned(&tfm->base);
291
-}
292
-
293
-static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
294
- struct crypto_spawn *spawn)
295
-{
296
- u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
297
- u32 mask = CRYPTO_ALG_TYPE_MASK;
298
-
299
- return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
300
-}
301
-
302
-static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
303
-{
304
- return crypto_tfm_ctx(&tfm->base);
305
-}
306
-
307
-static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
308
-{
309
- return crypto_tfm_ctx_aligned(&tfm->base);
310
-}
311
-
312
-static inline struct crypto_cipher *crypto_spawn_cipher(
313
- struct crypto_spawn *spawn)
314
-{
315
- u32 type = CRYPTO_ALG_TYPE_CIPHER;
316
- u32 mask = CRYPTO_ALG_TYPE_MASK;
317
-
318
- return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask));
319
-}
320
-
321
-static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
322
-{
323
- return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
324
-}
325
-
326
-static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
327
- struct scatterlist *dst,
328
- struct scatterlist *src,
329
- unsigned int nbytes)
330
-{
331
- walk->in.sg = src;
332
- walk->out.sg = dst;
333
- walk->total = nbytes;
334
-}
335
-
336
-static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk,
337
- struct scatterlist *dst,
338
- struct scatterlist *src,
339
- unsigned int nbytes)
340
-{
341
- walk->in.sg = src;
342
- walk->out.sg = dst;
343
- walk->total = nbytes;
344
- INIT_LIST_HEAD(&walk->buffers);
345
-}
346
-
347
-static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk)
348
-{
349
- if (unlikely(!list_empty(&walk->buffers)))
350
- __ablkcipher_walk_complete(walk);
351
-}
352
-
353192 static inline struct crypto_async_request *crypto_get_backlog(
354193 struct crypto_queue *queue)
355194 {
....@@ -357,47 +196,29 @@
357196 container_of(queue->backlog, struct crypto_async_request, list);
358197 }
359198
360
-static inline int ablkcipher_enqueue_request(struct crypto_queue *queue,
361
- struct ablkcipher_request *request)
199
+static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off)
362200 {
363
- return crypto_enqueue_request(queue, &request->base);
364
-}
365
-
366
-static inline struct ablkcipher_request *ablkcipher_dequeue_request(
367
- struct crypto_queue *queue)
368
-{
369
- return ablkcipher_request_cast(crypto_dequeue_request(queue));
370
-}
371
-
372
-static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req)
373
-{
374
- return req->__ctx;
375
-}
376
-
377
-static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue,
378
- struct crypto_ablkcipher *tfm)
379
-{
380
- return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm));
381
-}
382
-
383
-static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
384
- u32 type, u32 mask)
385
-{
386
- return crypto_attr_alg(tb[1], type, mask);
387
-}
388
-
389
-static inline int crypto_requires_off(u32 type, u32 mask, u32 off)
390
-{
391
- return (type ^ off) & mask & off;
201
+ return (algt->type ^ off) & algt->mask & off;
392202 }
393203
394204 /*
395
- * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
396
- * Otherwise returns zero.
205
+ * When an algorithm uses another algorithm (e.g., if it's an instance of a
206
+ * template), these are the flags that should always be set on the "outer"
207
+ * algorithm if any "inner" algorithm has them set.
397208 */
398
-static inline int crypto_requires_sync(u32 type, u32 mask)
209
+#define CRYPTO_ALG_INHERITED_FLAGS \
210
+ (CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK | \
211
+ CRYPTO_ALG_ALLOCATES_MEMORY)
212
+
213
+/*
214
+ * Given the type and mask that specify the flags restrictions on a template
215
+ * instance being created, return the mask that should be passed to
216
+ * crypto_grab_*() (along with type=0) to honor any request the user made to
217
+ * have any of the CRYPTO_ALG_INHERITED_FLAGS clear.
218
+ */
219
+static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt)
399220 {
400
- return crypto_requires_off(type, mask, CRYPTO_ALG_ASYNC);
221
+ return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS);
401222 }
402223
403224 noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
....@@ -417,12 +238,14 @@
417238 return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
418239 }
419240
420
-static inline void crypto_yield(u32 flags)
421
-{
422
-#if !defined(CONFIG_PREEMPT) || defined(CONFIG_PREEMPT_VOLUNTARY)
423
- if (flags & CRYPTO_TFM_REQ_MAY_SLEEP)
424
- cond_resched();
425
-#endif
426
-}
241
+int crypto_register_notifier(struct notifier_block *nb);
242
+int crypto_unregister_notifier(struct notifier_block *nb);
243
+
244
+/* Crypto notification events. */
245
+enum {
246
+ CRYPTO_MSG_ALG_REQUEST,
247
+ CRYPTO_MSG_ALG_REGISTER,
248
+ CRYPTO_MSG_ALG_LOADED,
249
+};
427250
428251 #endif /* _CRYPTO_ALGAPI_H */