.. | .. |
---|
| 1 | +/* SPDX-License-Identifier: GPL-2.0-or-later */ |
---|
1 | 2 | /* |
---|
2 | 3 | * Cryptographic API for algorithms (i.e., low-level API). |
---|
3 | 4 | * |
---|
4 | 5 | * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> |
---|
5 | | - * |
---|
6 | | - * This program is free software; you can redistribute it and/or modify it |
---|
7 | | - * under the terms of the GNU General Public License as published by the Free |
---|
8 | | - * Software Foundation; either version 2 of the License, or (at your option) |
---|
9 | | - * any later version. |
---|
10 | | - * |
---|
11 | 6 | */ |
---|
12 | 7 | #ifndef _CRYPTO_ALGAPI_H |
---|
13 | 8 | #define _CRYPTO_ALGAPI_H |
---|
.. | .. |
---|
15 | 10 | #include <linux/crypto.h> |
---|
16 | 11 | #include <linux/list.h> |
---|
17 | 12 | #include <linux/kernel.h> |
---|
18 | | -#include <linux/skbuff.h> |
---|
19 | 13 | |
---|
20 | 14 | /* |
---|
21 | 15 | * Maximum values for blocksize and alignmask, used to allocate |
---|
22 | 16 | * static buffers that are big enough for any combination of |
---|
23 | | - * ciphers and architectures. |
---|
| 17 | + * algs and architectures. Ciphers have a lower maximum size. |
---|
24 | 18 | */ |
---|
| 19 | +#define MAX_ALGAPI_BLOCKSIZE 160 |
---|
| 20 | +#define MAX_ALGAPI_ALIGNMASK 63 |
---|
25 | 21 | #define MAX_CIPHER_BLOCKSIZE 16 |
---|
26 | 22 | #define MAX_CIPHER_ALIGNMASK 15 |
---|
27 | 23 | |
---|
.. | .. |
---|
30 | 26 | struct module; |
---|
31 | 27 | struct rtattr; |
---|
32 | 28 | struct seq_file; |
---|
| 29 | +struct sk_buff; |
---|
33 | 30 | |
---|
34 | 31 | struct crypto_type { |
---|
35 | 32 | unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask); |
---|
.. | .. |
---|
50 | 47 | struct crypto_alg alg; |
---|
51 | 48 | |
---|
52 | 49 | struct crypto_template *tmpl; |
---|
53 | | - struct hlist_node list; |
---|
| 50 | + |
---|
| 51 | + union { |
---|
| 52 | + /* Node in list of instances after registration. */ |
---|
| 53 | + struct hlist_node list; |
---|
| 54 | + /* List of attached spawns before registration. */ |
---|
| 55 | + struct crypto_spawn *spawns; |
---|
| 56 | + }; |
---|
54 | 57 | |
---|
55 | 58 | void *__ctx[] CRYPTO_MINALIGN_ATTR; |
---|
56 | 59 | }; |
---|
.. | .. |
---|
60 | 63 | struct hlist_head instances; |
---|
61 | 64 | struct module *module; |
---|
62 | 65 | |
---|
63 | | - struct crypto_instance *(*alloc)(struct rtattr **tb); |
---|
64 | | - void (*free)(struct crypto_instance *inst); |
---|
65 | 66 | int (*create)(struct crypto_template *tmpl, struct rtattr **tb); |
---|
66 | 67 | |
---|
67 | 68 | char name[CRYPTO_MAX_ALG_NAME]; |
---|
.. | .. |
---|
70 | 71 | struct crypto_spawn { |
---|
71 | 72 | struct list_head list; |
---|
72 | 73 | struct crypto_alg *alg; |
---|
73 | | - struct crypto_instance *inst; |
---|
| 74 | + union { |
---|
| 75 | + /* Back pointer to instance after registration.*/ |
---|
| 76 | + struct crypto_instance *inst; |
---|
| 77 | + /* Spawn list pointer prior to registration. */ |
---|
| 78 | + struct crypto_spawn *next; |
---|
| 79 | + }; |
---|
74 | 80 | const struct crypto_type *frontend; |
---|
75 | 81 | u32 mask; |
---|
| 82 | + bool dead; |
---|
| 83 | + bool registered; |
---|
76 | 84 | }; |
---|
77 | 85 | |
---|
78 | 86 | struct crypto_queue { |
---|
.. | .. |
---|
88 | 96 | unsigned int offset; |
---|
89 | 97 | }; |
---|
90 | 98 | |
---|
91 | | -struct blkcipher_walk { |
---|
92 | | - union { |
---|
93 | | - struct { |
---|
94 | | - struct page *page; |
---|
95 | | - unsigned long offset; |
---|
96 | | - } phys; |
---|
97 | | - |
---|
98 | | - struct { |
---|
99 | | - u8 *page; |
---|
100 | | - u8 *addr; |
---|
101 | | - } virt; |
---|
102 | | - } src, dst; |
---|
103 | | - |
---|
104 | | - struct scatter_walk in; |
---|
105 | | - unsigned int nbytes; |
---|
106 | | - |
---|
107 | | - struct scatter_walk out; |
---|
108 | | - unsigned int total; |
---|
109 | | - |
---|
110 | | - void *page; |
---|
111 | | - u8 *buffer; |
---|
112 | | - u8 *iv; |
---|
113 | | - unsigned int ivsize; |
---|
114 | | - |
---|
115 | | - int flags; |
---|
116 | | - unsigned int walk_blocksize; |
---|
117 | | - unsigned int cipher_blocksize; |
---|
118 | | - unsigned int alignmask; |
---|
119 | | -}; |
---|
120 | | - |
---|
121 | | -struct ablkcipher_walk { |
---|
122 | | - struct { |
---|
123 | | - struct page *page; |
---|
124 | | - unsigned int offset; |
---|
125 | | - } src, dst; |
---|
126 | | - |
---|
127 | | - struct scatter_walk in; |
---|
128 | | - unsigned int nbytes; |
---|
129 | | - struct scatter_walk out; |
---|
130 | | - unsigned int total; |
---|
131 | | - struct list_head buffers; |
---|
132 | | - u8 *iv_buffer; |
---|
133 | | - u8 *iv; |
---|
134 | | - int flags; |
---|
135 | | - unsigned int blocksize; |
---|
136 | | -}; |
---|
137 | | - |
---|
138 | | -extern const struct crypto_type crypto_ablkcipher_type; |
---|
139 | | -extern const struct crypto_type crypto_blkcipher_type; |
---|
140 | | - |
---|
141 | 99 | void crypto_mod_put(struct crypto_alg *alg); |
---|
142 | 100 | |
---|
143 | 101 | int crypto_register_template(struct crypto_template *tmpl); |
---|
| 102 | +int crypto_register_templates(struct crypto_template *tmpls, int count); |
---|
144 | 103 | void crypto_unregister_template(struct crypto_template *tmpl); |
---|
| 104 | +void crypto_unregister_templates(struct crypto_template *tmpls, int count); |
---|
145 | 105 | struct crypto_template *crypto_lookup_template(const char *name); |
---|
146 | 106 | |
---|
147 | 107 | int crypto_register_instance(struct crypto_template *tmpl, |
---|
148 | 108 | struct crypto_instance *inst); |
---|
149 | | -int crypto_unregister_instance(struct crypto_instance *inst); |
---|
| 109 | +void crypto_unregister_instance(struct crypto_instance *inst); |
---|
150 | 110 | |
---|
151 | | -int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, |
---|
152 | | - struct crypto_instance *inst, u32 mask); |
---|
153 | | -int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg, |
---|
154 | | - struct crypto_instance *inst, |
---|
155 | | - const struct crypto_type *frontend); |
---|
156 | | -int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name, |
---|
157 | | - u32 type, u32 mask); |
---|
158 | | - |
---|
| 111 | +int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst, |
---|
| 112 | + const char *name, u32 type, u32 mask); |
---|
159 | 113 | void crypto_drop_spawn(struct crypto_spawn *spawn); |
---|
160 | 114 | struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, |
---|
161 | 115 | u32 mask); |
---|
162 | 116 | void *crypto_spawn_tfm2(struct crypto_spawn *spawn); |
---|
163 | 117 | |
---|
164 | | -static inline void crypto_set_spawn(struct crypto_spawn *spawn, |
---|
165 | | - struct crypto_instance *inst) |
---|
166 | | -{ |
---|
167 | | - spawn->inst = inst; |
---|
168 | | -} |
---|
169 | | - |
---|
170 | 118 | struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb); |
---|
171 | | -int crypto_check_attr_type(struct rtattr **tb, u32 type); |
---|
| 119 | +int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret); |
---|
172 | 120 | const char *crypto_attr_alg_name(struct rtattr *rta); |
---|
173 | | -struct crypto_alg *crypto_attr_alg2(struct rtattr *rta, |
---|
174 | | - const struct crypto_type *frontend, |
---|
175 | | - u32 type, u32 mask); |
---|
176 | | - |
---|
177 | | -static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta, |
---|
178 | | - u32 type, u32 mask) |
---|
179 | | -{ |
---|
180 | | - return crypto_attr_alg2(rta, NULL, type, mask); |
---|
181 | | -} |
---|
182 | | - |
---|
183 | 121 | int crypto_attr_u32(struct rtattr *rta, u32 *num); |
---|
184 | 122 | int crypto_inst_setname(struct crypto_instance *inst, const char *name, |
---|
185 | 123 | struct crypto_alg *alg); |
---|
186 | | -void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg, |
---|
187 | | - unsigned int head); |
---|
188 | | -struct crypto_instance *crypto_alloc_instance(const char *name, |
---|
189 | | - struct crypto_alg *alg); |
---|
190 | 124 | |
---|
191 | 125 | void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen); |
---|
192 | 126 | int crypto_enqueue_request(struct crypto_queue *queue, |
---|
193 | 127 | struct crypto_async_request *request); |
---|
| 128 | +void crypto_enqueue_request_head(struct crypto_queue *queue, |
---|
| 129 | + struct crypto_async_request *request); |
---|
194 | 130 | struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue); |
---|
195 | | -int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm); |
---|
196 | 131 | static inline unsigned int crypto_queue_len(struct crypto_queue *queue) |
---|
197 | 132 | { |
---|
198 | 133 | return queue->qlen; |
---|
.. | .. |
---|
237 | 172 | } |
---|
238 | 173 | } |
---|
239 | 174 | |
---|
240 | | -int blkcipher_walk_done(struct blkcipher_desc *desc, |
---|
241 | | - struct blkcipher_walk *walk, int err); |
---|
242 | | -int blkcipher_walk_virt(struct blkcipher_desc *desc, |
---|
243 | | - struct blkcipher_walk *walk); |
---|
244 | | -int blkcipher_walk_phys(struct blkcipher_desc *desc, |
---|
245 | | - struct blkcipher_walk *walk); |
---|
246 | | -int blkcipher_walk_virt_block(struct blkcipher_desc *desc, |
---|
247 | | - struct blkcipher_walk *walk, |
---|
248 | | - unsigned int blocksize); |
---|
249 | | -int blkcipher_aead_walk_virt_block(struct blkcipher_desc *desc, |
---|
250 | | - struct blkcipher_walk *walk, |
---|
251 | | - struct crypto_aead *tfm, |
---|
252 | | - unsigned int blocksize); |
---|
253 | | - |
---|
254 | | -int ablkcipher_walk_done(struct ablkcipher_request *req, |
---|
255 | | - struct ablkcipher_walk *walk, int err); |
---|
256 | | -int ablkcipher_walk_phys(struct ablkcipher_request *req, |
---|
257 | | - struct ablkcipher_walk *walk); |
---|
258 | | -void __ablkcipher_walk_complete(struct ablkcipher_walk *walk); |
---|
259 | | - |
---|
260 | 175 | static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm) |
---|
261 | 176 | { |
---|
262 | 177 | return PTR_ALIGN(crypto_tfm_ctx(tfm), |
---|
.. | .. |
---|
274 | 189 | return inst->__ctx; |
---|
275 | 190 | } |
---|
276 | 191 | |
---|
277 | | -static inline struct ablkcipher_alg *crypto_ablkcipher_alg( |
---|
278 | | - struct crypto_ablkcipher *tfm) |
---|
279 | | -{ |
---|
280 | | - return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher; |
---|
281 | | -} |
---|
282 | | - |
---|
283 | | -static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm) |
---|
284 | | -{ |
---|
285 | | - return crypto_tfm_ctx(&tfm->base); |
---|
286 | | -} |
---|
287 | | - |
---|
288 | | -static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm) |
---|
289 | | -{ |
---|
290 | | - return crypto_tfm_ctx_aligned(&tfm->base); |
---|
291 | | -} |
---|
292 | | - |
---|
293 | | -static inline struct crypto_blkcipher *crypto_spawn_blkcipher( |
---|
294 | | - struct crypto_spawn *spawn) |
---|
295 | | -{ |
---|
296 | | - u32 type = CRYPTO_ALG_TYPE_BLKCIPHER; |
---|
297 | | - u32 mask = CRYPTO_ALG_TYPE_MASK; |
---|
298 | | - |
---|
299 | | - return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask)); |
---|
300 | | -} |
---|
301 | | - |
---|
302 | | -static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm) |
---|
303 | | -{ |
---|
304 | | - return crypto_tfm_ctx(&tfm->base); |
---|
305 | | -} |
---|
306 | | - |
---|
307 | | -static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm) |
---|
308 | | -{ |
---|
309 | | - return crypto_tfm_ctx_aligned(&tfm->base); |
---|
310 | | -} |
---|
311 | | - |
---|
312 | | -static inline struct crypto_cipher *crypto_spawn_cipher( |
---|
313 | | - struct crypto_spawn *spawn) |
---|
314 | | -{ |
---|
315 | | - u32 type = CRYPTO_ALG_TYPE_CIPHER; |
---|
316 | | - u32 mask = CRYPTO_ALG_TYPE_MASK; |
---|
317 | | - |
---|
318 | | - return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask)); |
---|
319 | | -} |
---|
320 | | - |
---|
321 | | -static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm) |
---|
322 | | -{ |
---|
323 | | - return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher; |
---|
324 | | -} |
---|
325 | | - |
---|
326 | | -static inline void blkcipher_walk_init(struct blkcipher_walk *walk, |
---|
327 | | - struct scatterlist *dst, |
---|
328 | | - struct scatterlist *src, |
---|
329 | | - unsigned int nbytes) |
---|
330 | | -{ |
---|
331 | | - walk->in.sg = src; |
---|
332 | | - walk->out.sg = dst; |
---|
333 | | - walk->total = nbytes; |
---|
334 | | -} |
---|
335 | | - |
---|
336 | | -static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk, |
---|
337 | | - struct scatterlist *dst, |
---|
338 | | - struct scatterlist *src, |
---|
339 | | - unsigned int nbytes) |
---|
340 | | -{ |
---|
341 | | - walk->in.sg = src; |
---|
342 | | - walk->out.sg = dst; |
---|
343 | | - walk->total = nbytes; |
---|
344 | | - INIT_LIST_HEAD(&walk->buffers); |
---|
345 | | -} |
---|
346 | | - |
---|
347 | | -static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk) |
---|
348 | | -{ |
---|
349 | | - if (unlikely(!list_empty(&walk->buffers))) |
---|
350 | | - __ablkcipher_walk_complete(walk); |
---|
351 | | -} |
---|
352 | | - |
---|
353 | 192 | static inline struct crypto_async_request *crypto_get_backlog( |
---|
354 | 193 | struct crypto_queue *queue) |
---|
355 | 194 | { |
---|
.. | .. |
---|
357 | 196 | container_of(queue->backlog, struct crypto_async_request, list); |
---|
358 | 197 | } |
---|
359 | 198 | |
---|
360 | | -static inline int ablkcipher_enqueue_request(struct crypto_queue *queue, |
---|
361 | | - struct ablkcipher_request *request) |
---|
| 199 | +static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off) |
---|
362 | 200 | { |
---|
363 | | - return crypto_enqueue_request(queue, &request->base); |
---|
364 | | -} |
---|
365 | | - |
---|
366 | | -static inline struct ablkcipher_request *ablkcipher_dequeue_request( |
---|
367 | | - struct crypto_queue *queue) |
---|
368 | | -{ |
---|
369 | | - return ablkcipher_request_cast(crypto_dequeue_request(queue)); |
---|
370 | | -} |
---|
371 | | - |
---|
372 | | -static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req) |
---|
373 | | -{ |
---|
374 | | - return req->__ctx; |
---|
375 | | -} |
---|
376 | | - |
---|
377 | | -static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue, |
---|
378 | | - struct crypto_ablkcipher *tfm) |
---|
379 | | -{ |
---|
380 | | - return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm)); |
---|
381 | | -} |
---|
382 | | - |
---|
383 | | -static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb, |
---|
384 | | - u32 type, u32 mask) |
---|
385 | | -{ |
---|
386 | | - return crypto_attr_alg(tb[1], type, mask); |
---|
387 | | -} |
---|
388 | | - |
---|
389 | | -static inline int crypto_requires_off(u32 type, u32 mask, u32 off) |
---|
390 | | -{ |
---|
391 | | - return (type ^ off) & mask & off; |
---|
| 201 | + return (algt->type ^ off) & algt->mask & off; |
---|
392 | 202 | } |
---|
393 | 203 | |
---|
394 | 204 | /* |
---|
395 | | - * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms. |
---|
396 | | - * Otherwise returns zero. |
---|
| 205 | + * When an algorithm uses another algorithm (e.g., if it's an instance of a |
---|
| 206 | + * template), these are the flags that should always be set on the "outer" |
---|
| 207 | + * algorithm if any "inner" algorithm has them set. |
---|
397 | 208 | */ |
---|
398 | | -static inline int crypto_requires_sync(u32 type, u32 mask) |
---|
| 209 | +#define CRYPTO_ALG_INHERITED_FLAGS \ |
---|
| 210 | + (CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK | \ |
---|
| 211 | + CRYPTO_ALG_ALLOCATES_MEMORY) |
---|
| 212 | + |
---|
| 213 | +/* |
---|
| 214 | + * Given the type and mask that specify the flags restrictions on a template |
---|
| 215 | + * instance being created, return the mask that should be passed to |
---|
| 216 | + * crypto_grab_*() (along with type=0) to honor any request the user made to |
---|
| 217 | + * have any of the CRYPTO_ALG_INHERITED_FLAGS clear. |
---|
| 218 | + */ |
---|
| 219 | +static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt) |
---|
399 | 220 | { |
---|
400 | | - return crypto_requires_off(type, mask, CRYPTO_ALG_ASYNC); |
---|
| 221 | + return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS); |
---|
401 | 222 | } |
---|
402 | 223 | |
---|
403 | 224 | noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size); |
---|
.. | .. |
---|
417 | 238 | return __crypto_memneq(a, b, size) != 0UL ? 1 : 0; |
---|
418 | 239 | } |
---|
419 | 240 | |
---|
420 | | -static inline void crypto_yield(u32 flags) |
---|
421 | | -{ |
---|
422 | | -#if !defined(CONFIG_PREEMPT) || defined(CONFIG_PREEMPT_VOLUNTARY) |
---|
423 | | - if (flags & CRYPTO_TFM_REQ_MAY_SLEEP) |
---|
424 | | - cond_resched(); |
---|
425 | | -#endif |
---|
426 | | -} |
---|
| 241 | +int crypto_register_notifier(struct notifier_block *nb); |
---|
| 242 | +int crypto_unregister_notifier(struct notifier_block *nb); |
---|
| 243 | + |
---|
| 244 | +/* Crypto notification events. */ |
---|
| 245 | +enum { |
---|
| 246 | + CRYPTO_MSG_ALG_REQUEST, |
---|
| 247 | + CRYPTO_MSG_ALG_REGISTER, |
---|
| 248 | + CRYPTO_MSG_ALG_LOADED, |
---|
| 249 | +}; |
---|
427 | 250 | |
---|
428 | 251 | #endif /* _CRYPTO_ALGAPI_H */ |
---|