.. | .. |
---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-or-later |
---|
1 | 2 | /* |
---|
2 | 3 | * Software async crypto daemon. |
---|
3 | 4 | * |
---|
.. | .. |
---|
9 | 10 | * Gabriele Paoloni <gabriele.paoloni@intel.com> |
---|
10 | 11 | * Aidan O'Mahony (aidan.o.mahony@intel.com) |
---|
11 | 12 | * Copyright (c) 2010, Intel Corporation. |
---|
12 | | - * |
---|
13 | | - * This program is free software; you can redistribute it and/or modify it |
---|
14 | | - * under the terms of the GNU General Public License as published by the Free |
---|
15 | | - * Software Foundation; either version 2 of the License, or (at your option) |
---|
16 | | - * any later version. |
---|
17 | | - * |
---|
18 | 13 | */ |
---|
19 | 14 | |
---|
20 | 15 | #include <crypto/internal/hash.h> |
---|
21 | 16 | #include <crypto/internal/aead.h> |
---|
22 | 17 | #include <crypto/internal/skcipher.h> |
---|
23 | 18 | #include <crypto/cryptd.h> |
---|
24 | | -#include <crypto/crypto_wq.h> |
---|
25 | | -#include <linux/atomic.h> |
---|
| 19 | +#include <linux/refcount.h> |
---|
26 | 20 | #include <linux/err.h> |
---|
27 | 21 | #include <linux/init.h> |
---|
28 | 22 | #include <linux/kernel.h> |
---|
.. | .. |
---|
31 | 25 | #include <linux/scatterlist.h> |
---|
32 | 26 | #include <linux/sched.h> |
---|
33 | 27 | #include <linux/slab.h> |
---|
| 28 | +#include <linux/workqueue.h> |
---|
34 | 29 | |
---|
35 | 30 | static unsigned int cryptd_max_cpu_qlen = 1000; |
---|
36 | 31 | module_param(cryptd_max_cpu_qlen, uint, 0); |
---|
37 | 32 | MODULE_PARM_DESC(cryptd_max_cpu_qlen, "Set cryptd Max queue depth"); |
---|
38 | 33 | |
---|
| 34 | +static struct workqueue_struct *cryptd_wq; |
---|
| 35 | + |
---|
39 | 36 | struct cryptd_cpu_queue { |
---|
40 | 37 | struct crypto_queue queue; |
---|
41 | 38 | struct work_struct work; |
---|
42 | | - spinlock_t qlock; |
---|
43 | 39 | }; |
---|
44 | 40 | |
---|
45 | 41 | struct cryptd_queue { |
---|
| 42 | + /* |
---|
| 43 | + * Protected by disabling BH to allow enqueueing from softinterrupt and |
---|
| 44 | + * dequeuing from kworker (cryptd_queue_worker()). |
---|
| 45 | + */ |
---|
46 | 46 | struct cryptd_cpu_queue __percpu *cpu_queue; |
---|
47 | 47 | }; |
---|
48 | 48 | |
---|
.. | .. |
---|
66 | 66 | struct cryptd_queue *queue; |
---|
67 | 67 | }; |
---|
68 | 68 | |
---|
69 | | -struct cryptd_blkcipher_ctx { |
---|
70 | | - atomic_t refcnt; |
---|
71 | | - struct crypto_blkcipher *child; |
---|
72 | | -}; |
---|
73 | | - |
---|
74 | | -struct cryptd_blkcipher_request_ctx { |
---|
75 | | - crypto_completion_t complete; |
---|
76 | | -}; |
---|
77 | | - |
---|
78 | 69 | struct cryptd_skcipher_ctx { |
---|
79 | | - atomic_t refcnt; |
---|
| 70 | + refcount_t refcnt; |
---|
80 | 71 | struct crypto_skcipher *child; |
---|
81 | 72 | }; |
---|
82 | 73 | |
---|
83 | 74 | struct cryptd_skcipher_request_ctx { |
---|
84 | 75 | crypto_completion_t complete; |
---|
| 76 | + struct skcipher_request req; |
---|
85 | 77 | }; |
---|
86 | 78 | |
---|
87 | 79 | struct cryptd_hash_ctx { |
---|
88 | | - atomic_t refcnt; |
---|
| 80 | + refcount_t refcnt; |
---|
89 | 81 | struct crypto_shash *child; |
---|
90 | 82 | }; |
---|
91 | 83 | |
---|
.. | .. |
---|
95 | 87 | }; |
---|
96 | 88 | |
---|
97 | 89 | struct cryptd_aead_ctx { |
---|
98 | | - atomic_t refcnt; |
---|
| 90 | + refcount_t refcnt; |
---|
99 | 91 | struct crypto_aead *child; |
---|
100 | 92 | }; |
---|
101 | 93 | |
---|
.. | .. |
---|
118 | 110 | cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu); |
---|
119 | 111 | crypto_init_queue(&cpu_queue->queue, max_cpu_qlen); |
---|
120 | 112 | INIT_WORK(&cpu_queue->work, cryptd_queue_worker); |
---|
121 | | - spin_lock_init(&cpu_queue->qlock); |
---|
122 | 113 | } |
---|
123 | 114 | pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen); |
---|
124 | 115 | return 0; |
---|
.. | .. |
---|
139 | 130 | static int cryptd_enqueue_request(struct cryptd_queue *queue, |
---|
140 | 131 | struct crypto_async_request *request) |
---|
141 | 132 | { |
---|
142 | | - int cpu, err; |
---|
| 133 | + int err; |
---|
143 | 134 | struct cryptd_cpu_queue *cpu_queue; |
---|
144 | | - atomic_t *refcnt; |
---|
| 135 | + refcount_t *refcnt; |
---|
145 | 136 | |
---|
146 | | - cpu_queue = raw_cpu_ptr(queue->cpu_queue); |
---|
147 | | - spin_lock_bh(&cpu_queue->qlock); |
---|
148 | | - cpu = smp_processor_id(); |
---|
149 | | - |
---|
| 137 | + local_bh_disable(); |
---|
| 138 | + cpu_queue = this_cpu_ptr(queue->cpu_queue); |
---|
150 | 139 | err = crypto_enqueue_request(&cpu_queue->queue, request); |
---|
151 | 140 | |
---|
152 | 141 | refcnt = crypto_tfm_ctx(request->tfm); |
---|
153 | 142 | |
---|
154 | 143 | if (err == -ENOSPC) |
---|
155 | | - goto out_put_cpu; |
---|
| 144 | + goto out; |
---|
156 | 145 | |
---|
157 | | - queue_work_on(cpu, kcrypto_wq, &cpu_queue->work); |
---|
| 146 | + queue_work_on(smp_processor_id(), cryptd_wq, &cpu_queue->work); |
---|
158 | 147 | |
---|
159 | | - if (!atomic_read(refcnt)) |
---|
160 | | - goto out_put_cpu; |
---|
| 148 | + if (!refcount_read(refcnt)) |
---|
| 149 | + goto out; |
---|
161 | 150 | |
---|
162 | | - atomic_inc(refcnt); |
---|
| 151 | + refcount_inc(refcnt); |
---|
163 | 152 | |
---|
164 | | -out_put_cpu: |
---|
165 | | - spin_unlock_bh(&cpu_queue->qlock); |
---|
| 153 | +out: |
---|
| 154 | + local_bh_enable(); |
---|
166 | 155 | |
---|
167 | 156 | return err; |
---|
168 | 157 | } |
---|
.. | .. |
---|
179 | 168 | /* |
---|
180 | 169 | * Only handle one request at a time to avoid hogging crypto workqueue. |
---|
181 | 170 | */ |
---|
182 | | - spin_lock_bh(&cpu_queue->qlock); |
---|
| 171 | + local_bh_disable(); |
---|
183 | 172 | backlog = crypto_get_backlog(&cpu_queue->queue); |
---|
184 | 173 | req = crypto_dequeue_request(&cpu_queue->queue); |
---|
185 | | - spin_unlock_bh(&cpu_queue->qlock); |
---|
| 174 | + local_bh_enable(); |
---|
186 | 175 | |
---|
187 | 176 | if (!req) |
---|
188 | 177 | return; |
---|
.. | .. |
---|
192 | 181 | req->complete(req, 0); |
---|
193 | 182 | |
---|
194 | 183 | if (cpu_queue->queue.qlen) |
---|
195 | | - queue_work(kcrypto_wq, &cpu_queue->work); |
---|
| 184 | + queue_work(cryptd_wq, &cpu_queue->work); |
---|
196 | 185 | } |
---|
197 | 186 | |
---|
198 | 187 | static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm) |
---|
.. | .. |
---|
202 | 191 | return ictx->queue; |
---|
203 | 192 | } |
---|
204 | 193 | |
---|
205 | | -static inline void cryptd_check_internal(struct rtattr **tb, u32 *type, |
---|
206 | | - u32 *mask) |
---|
| 194 | +static void cryptd_type_and_mask(struct crypto_attr_type *algt, |
---|
| 195 | + u32 *type, u32 *mask) |
---|
207 | 196 | { |
---|
208 | | - struct crypto_attr_type *algt; |
---|
| 197 | + /* |
---|
| 198 | + * cryptd is allowed to wrap internal algorithms, but in that case the |
---|
| 199 | + * resulting cryptd instance will be marked as internal as well. |
---|
| 200 | + */ |
---|
| 201 | + *type = algt->type & CRYPTO_ALG_INTERNAL; |
---|
| 202 | + *mask = algt->mask & CRYPTO_ALG_INTERNAL; |
---|
209 | 203 | |
---|
210 | | - algt = crypto_get_attr_type(tb); |
---|
211 | | - if (IS_ERR(algt)) |
---|
212 | | - return; |
---|
| 204 | + /* No point in cryptd wrapping an algorithm that's already async. */ |
---|
| 205 | + *mask |= CRYPTO_ALG_ASYNC; |
---|
213 | 206 | |
---|
214 | | - *type |= algt->type & CRYPTO_ALG_INTERNAL; |
---|
215 | | - *mask |= algt->mask & CRYPTO_ALG_INTERNAL; |
---|
216 | | -} |
---|
217 | | - |
---|
218 | | -static int cryptd_blkcipher_setkey(struct crypto_ablkcipher *parent, |
---|
219 | | - const u8 *key, unsigned int keylen) |
---|
220 | | -{ |
---|
221 | | - struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(parent); |
---|
222 | | - struct crypto_blkcipher *child = ctx->child; |
---|
223 | | - int err; |
---|
224 | | - |
---|
225 | | - crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); |
---|
226 | | - crypto_blkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) & |
---|
227 | | - CRYPTO_TFM_REQ_MASK); |
---|
228 | | - err = crypto_blkcipher_setkey(child, key, keylen); |
---|
229 | | - crypto_ablkcipher_set_flags(parent, crypto_blkcipher_get_flags(child) & |
---|
230 | | - CRYPTO_TFM_RES_MASK); |
---|
231 | | - return err; |
---|
232 | | -} |
---|
233 | | - |
---|
234 | | -static void cryptd_blkcipher_crypt(struct ablkcipher_request *req, |
---|
235 | | - struct crypto_blkcipher *child, |
---|
236 | | - int err, |
---|
237 | | - int (*crypt)(struct blkcipher_desc *desc, |
---|
238 | | - struct scatterlist *dst, |
---|
239 | | - struct scatterlist *src, |
---|
240 | | - unsigned int len)) |
---|
241 | | -{ |
---|
242 | | - struct cryptd_blkcipher_request_ctx *rctx; |
---|
243 | | - struct cryptd_blkcipher_ctx *ctx; |
---|
244 | | - struct crypto_ablkcipher *tfm; |
---|
245 | | - struct blkcipher_desc desc; |
---|
246 | | - int refcnt; |
---|
247 | | - |
---|
248 | | - rctx = ablkcipher_request_ctx(req); |
---|
249 | | - |
---|
250 | | - if (unlikely(err == -EINPROGRESS)) |
---|
251 | | - goto out; |
---|
252 | | - |
---|
253 | | - desc.tfm = child; |
---|
254 | | - desc.info = req->info; |
---|
255 | | - desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; |
---|
256 | | - |
---|
257 | | - err = crypt(&desc, req->dst, req->src, req->nbytes); |
---|
258 | | - |
---|
259 | | - req->base.complete = rctx->complete; |
---|
260 | | - |
---|
261 | | -out: |
---|
262 | | - tfm = crypto_ablkcipher_reqtfm(req); |
---|
263 | | - ctx = crypto_ablkcipher_ctx(tfm); |
---|
264 | | - refcnt = atomic_read(&ctx->refcnt); |
---|
265 | | - |
---|
266 | | - local_bh_disable(); |
---|
267 | | - rctx->complete(&req->base, err); |
---|
268 | | - local_bh_enable(); |
---|
269 | | - |
---|
270 | | - if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt)) |
---|
271 | | - crypto_free_ablkcipher(tfm); |
---|
272 | | -} |
---|
273 | | - |
---|
274 | | -static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err) |
---|
275 | | -{ |
---|
276 | | - struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm); |
---|
277 | | - struct crypto_blkcipher *child = ctx->child; |
---|
278 | | - |
---|
279 | | - cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err, |
---|
280 | | - crypto_blkcipher_crt(child)->encrypt); |
---|
281 | | -} |
---|
282 | | - |
---|
283 | | -static void cryptd_blkcipher_decrypt(struct crypto_async_request *req, int err) |
---|
284 | | -{ |
---|
285 | | - struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm); |
---|
286 | | - struct crypto_blkcipher *child = ctx->child; |
---|
287 | | - |
---|
288 | | - cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err, |
---|
289 | | - crypto_blkcipher_crt(child)->decrypt); |
---|
290 | | -} |
---|
291 | | - |
---|
292 | | -static int cryptd_blkcipher_enqueue(struct ablkcipher_request *req, |
---|
293 | | - crypto_completion_t compl) |
---|
294 | | -{ |
---|
295 | | - struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req); |
---|
296 | | - struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); |
---|
297 | | - struct cryptd_queue *queue; |
---|
298 | | - |
---|
299 | | - queue = cryptd_get_queue(crypto_ablkcipher_tfm(tfm)); |
---|
300 | | - rctx->complete = req->base.complete; |
---|
301 | | - req->base.complete = compl; |
---|
302 | | - |
---|
303 | | - return cryptd_enqueue_request(queue, &req->base); |
---|
304 | | -} |
---|
305 | | - |
---|
306 | | -static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request *req) |
---|
307 | | -{ |
---|
308 | | - return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_encrypt); |
---|
309 | | -} |
---|
310 | | - |
---|
311 | | -static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request *req) |
---|
312 | | -{ |
---|
313 | | - return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_decrypt); |
---|
314 | | -} |
---|
315 | | - |
---|
316 | | -static int cryptd_blkcipher_init_tfm(struct crypto_tfm *tfm) |
---|
317 | | -{ |
---|
318 | | - struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); |
---|
319 | | - struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst); |
---|
320 | | - struct crypto_spawn *spawn = &ictx->spawn; |
---|
321 | | - struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
322 | | - struct crypto_blkcipher *cipher; |
---|
323 | | - |
---|
324 | | - cipher = crypto_spawn_blkcipher(spawn); |
---|
325 | | - if (IS_ERR(cipher)) |
---|
326 | | - return PTR_ERR(cipher); |
---|
327 | | - |
---|
328 | | - ctx->child = cipher; |
---|
329 | | - tfm->crt_ablkcipher.reqsize = |
---|
330 | | - sizeof(struct cryptd_blkcipher_request_ctx); |
---|
331 | | - return 0; |
---|
332 | | -} |
---|
333 | | - |
---|
334 | | -static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm) |
---|
335 | | -{ |
---|
336 | | - struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
337 | | - |
---|
338 | | - crypto_free_blkcipher(ctx->child); |
---|
| 207 | + *mask |= crypto_algt_inherited_mask(algt); |
---|
339 | 208 | } |
---|
340 | 209 | |
---|
341 | 210 | static int cryptd_init_instance(struct crypto_instance *inst, |
---|
.. | .. |
---|
355 | 224 | return 0; |
---|
356 | 225 | } |
---|
357 | 226 | |
---|
358 | | -static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head, |
---|
359 | | - unsigned int tail) |
---|
360 | | -{ |
---|
361 | | - char *p; |
---|
362 | | - struct crypto_instance *inst; |
---|
363 | | - int err; |
---|
364 | | - |
---|
365 | | - p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL); |
---|
366 | | - if (!p) |
---|
367 | | - return ERR_PTR(-ENOMEM); |
---|
368 | | - |
---|
369 | | - inst = (void *)(p + head); |
---|
370 | | - |
---|
371 | | - err = cryptd_init_instance(inst, alg); |
---|
372 | | - if (err) |
---|
373 | | - goto out_free_inst; |
---|
374 | | - |
---|
375 | | -out: |
---|
376 | | - return p; |
---|
377 | | - |
---|
378 | | -out_free_inst: |
---|
379 | | - kfree(p); |
---|
380 | | - p = ERR_PTR(err); |
---|
381 | | - goto out; |
---|
382 | | -} |
---|
383 | | - |
---|
384 | | -static int cryptd_create_blkcipher(struct crypto_template *tmpl, |
---|
385 | | - struct rtattr **tb, |
---|
386 | | - struct cryptd_queue *queue) |
---|
387 | | -{ |
---|
388 | | - struct cryptd_instance_ctx *ctx; |
---|
389 | | - struct crypto_instance *inst; |
---|
390 | | - struct crypto_alg *alg; |
---|
391 | | - u32 type = CRYPTO_ALG_TYPE_BLKCIPHER; |
---|
392 | | - u32 mask = CRYPTO_ALG_TYPE_MASK; |
---|
393 | | - int err; |
---|
394 | | - |
---|
395 | | - cryptd_check_internal(tb, &type, &mask); |
---|
396 | | - |
---|
397 | | - alg = crypto_get_attr_alg(tb, type, mask); |
---|
398 | | - if (IS_ERR(alg)) |
---|
399 | | - return PTR_ERR(alg); |
---|
400 | | - |
---|
401 | | - inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx)); |
---|
402 | | - err = PTR_ERR(inst); |
---|
403 | | - if (IS_ERR(inst)) |
---|
404 | | - goto out_put_alg; |
---|
405 | | - |
---|
406 | | - ctx = crypto_instance_ctx(inst); |
---|
407 | | - ctx->queue = queue; |
---|
408 | | - |
---|
409 | | - err = crypto_init_spawn(&ctx->spawn, alg, inst, |
---|
410 | | - CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); |
---|
411 | | - if (err) |
---|
412 | | - goto out_free_inst; |
---|
413 | | - |
---|
414 | | - type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC; |
---|
415 | | - if (alg->cra_flags & CRYPTO_ALG_INTERNAL) |
---|
416 | | - type |= CRYPTO_ALG_INTERNAL; |
---|
417 | | - inst->alg.cra_flags = type; |
---|
418 | | - inst->alg.cra_type = &crypto_ablkcipher_type; |
---|
419 | | - |
---|
420 | | - inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize; |
---|
421 | | - inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize; |
---|
422 | | - inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize; |
---|
423 | | - |
---|
424 | | - inst->alg.cra_ablkcipher.geniv = alg->cra_blkcipher.geniv; |
---|
425 | | - |
---|
426 | | - inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx); |
---|
427 | | - |
---|
428 | | - inst->alg.cra_init = cryptd_blkcipher_init_tfm; |
---|
429 | | - inst->alg.cra_exit = cryptd_blkcipher_exit_tfm; |
---|
430 | | - |
---|
431 | | - inst->alg.cra_ablkcipher.setkey = cryptd_blkcipher_setkey; |
---|
432 | | - inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue; |
---|
433 | | - inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue; |
---|
434 | | - |
---|
435 | | - err = crypto_register_instance(tmpl, inst); |
---|
436 | | - if (err) { |
---|
437 | | - crypto_drop_spawn(&ctx->spawn); |
---|
438 | | -out_free_inst: |
---|
439 | | - kfree(inst); |
---|
440 | | - } |
---|
441 | | - |
---|
442 | | -out_put_alg: |
---|
443 | | - crypto_mod_put(alg); |
---|
444 | | - return err; |
---|
445 | | -} |
---|
446 | | - |
---|
447 | 227 | static int cryptd_skcipher_setkey(struct crypto_skcipher *parent, |
---|
448 | 228 | const u8 *key, unsigned int keylen) |
---|
449 | 229 | { |
---|
450 | 230 | struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent); |
---|
451 | 231 | struct crypto_skcipher *child = ctx->child; |
---|
452 | | - int err; |
---|
453 | 232 | |
---|
454 | 233 | crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); |
---|
455 | | - crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) & |
---|
456 | | - CRYPTO_TFM_REQ_MASK); |
---|
457 | | - err = crypto_skcipher_setkey(child, key, keylen); |
---|
458 | | - crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) & |
---|
459 | | - CRYPTO_TFM_RES_MASK); |
---|
460 | | - return err; |
---|
| 234 | + crypto_skcipher_set_flags(child, |
---|
| 235 | + crypto_skcipher_get_flags(parent) & |
---|
| 236 | + CRYPTO_TFM_REQ_MASK); |
---|
| 237 | + return crypto_skcipher_setkey(child, key, keylen); |
---|
461 | 238 | } |
---|
462 | 239 | |
---|
463 | 240 | static void cryptd_skcipher_complete(struct skcipher_request *req, int err) |
---|
.. | .. |
---|
465 | 242 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
466 | 243 | struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
467 | 244 | struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); |
---|
468 | | - int refcnt = atomic_read(&ctx->refcnt); |
---|
| 245 | + int refcnt = refcount_read(&ctx->refcnt); |
---|
469 | 246 | |
---|
470 | 247 | local_bh_disable(); |
---|
471 | 248 | rctx->complete(&req->base, err); |
---|
472 | 249 | local_bh_enable(); |
---|
473 | 250 | |
---|
474 | | - if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt)) |
---|
| 251 | + if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt)) |
---|
475 | 252 | crypto_free_skcipher(tfm); |
---|
476 | 253 | } |
---|
477 | 254 | |
---|
.. | .. |
---|
482 | 259 | struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); |
---|
483 | 260 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
484 | 261 | struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
| 262 | + struct skcipher_request *subreq = &rctx->req; |
---|
485 | 263 | struct crypto_skcipher *child = ctx->child; |
---|
486 | | - SKCIPHER_REQUEST_ON_STACK(subreq, child); |
---|
487 | 264 | |
---|
488 | 265 | if (unlikely(err == -EINPROGRESS)) |
---|
489 | 266 | goto out; |
---|
.. | .. |
---|
510 | 287 | struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); |
---|
511 | 288 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
512 | 289 | struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
| 290 | + struct skcipher_request *subreq = &rctx->req; |
---|
513 | 291 | struct crypto_skcipher *child = ctx->child; |
---|
514 | | - SKCIPHER_REQUEST_ON_STACK(subreq, child); |
---|
515 | 292 | |
---|
516 | 293 | if (unlikely(err == -EINPROGRESS)) |
---|
517 | 294 | goto out; |
---|
.. | .. |
---|
569 | 346 | |
---|
570 | 347 | ctx->child = cipher; |
---|
571 | 348 | crypto_skcipher_set_reqsize( |
---|
572 | | - tfm, sizeof(struct cryptd_skcipher_request_ctx)); |
---|
| 349 | + tfm, sizeof(struct cryptd_skcipher_request_ctx) + |
---|
| 350 | + crypto_skcipher_reqsize(cipher)); |
---|
573 | 351 | return 0; |
---|
574 | 352 | } |
---|
575 | 353 | |
---|
.. | .. |
---|
590 | 368 | |
---|
591 | 369 | static int cryptd_create_skcipher(struct crypto_template *tmpl, |
---|
592 | 370 | struct rtattr **tb, |
---|
| 371 | + struct crypto_attr_type *algt, |
---|
593 | 372 | struct cryptd_queue *queue) |
---|
594 | 373 | { |
---|
595 | 374 | struct skcipherd_instance_ctx *ctx; |
---|
596 | 375 | struct skcipher_instance *inst; |
---|
597 | 376 | struct skcipher_alg *alg; |
---|
598 | | - const char *name; |
---|
599 | 377 | u32 type; |
---|
600 | 378 | u32 mask; |
---|
601 | 379 | int err; |
---|
602 | 380 | |
---|
603 | | - type = 0; |
---|
604 | | - mask = CRYPTO_ALG_ASYNC; |
---|
605 | | - |
---|
606 | | - cryptd_check_internal(tb, &type, &mask); |
---|
607 | | - |
---|
608 | | - name = crypto_attr_alg_name(tb[1]); |
---|
609 | | - if (IS_ERR(name)) |
---|
610 | | - return PTR_ERR(name); |
---|
| 381 | + cryptd_type_and_mask(algt, &type, &mask); |
---|
611 | 382 | |
---|
612 | 383 | inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); |
---|
613 | 384 | if (!inst) |
---|
.. | .. |
---|
616 | 387 | ctx = skcipher_instance_ctx(inst); |
---|
617 | 388 | ctx->queue = queue; |
---|
618 | 389 | |
---|
619 | | - crypto_set_skcipher_spawn(&ctx->spawn, skcipher_crypto_instance(inst)); |
---|
620 | | - err = crypto_grab_skcipher(&ctx->spawn, name, type, mask); |
---|
| 390 | + err = crypto_grab_skcipher(&ctx->spawn, skcipher_crypto_instance(inst), |
---|
| 391 | + crypto_attr_alg_name(tb[1]), type, mask); |
---|
621 | 392 | if (err) |
---|
622 | | - goto out_free_inst; |
---|
| 393 | + goto err_free_inst; |
---|
623 | 394 | |
---|
624 | 395 | alg = crypto_spawn_skcipher_alg(&ctx->spawn); |
---|
625 | 396 | err = cryptd_init_instance(skcipher_crypto_instance(inst), &alg->base); |
---|
626 | 397 | if (err) |
---|
627 | | - goto out_drop_skcipher; |
---|
| 398 | + goto err_free_inst; |
---|
628 | 399 | |
---|
629 | | - inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC | |
---|
630 | | - (alg->base.cra_flags & CRYPTO_ALG_INTERNAL); |
---|
631 | | - |
---|
| 400 | + inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC | |
---|
| 401 | + (alg->base.cra_flags & CRYPTO_ALG_INTERNAL); |
---|
632 | 402 | inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg); |
---|
633 | 403 | inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg); |
---|
634 | 404 | inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg); |
---|
.. | .. |
---|
647 | 417 | |
---|
648 | 418 | err = skcipher_register_instance(tmpl, inst); |
---|
649 | 419 | if (err) { |
---|
650 | | -out_drop_skcipher: |
---|
651 | | - crypto_drop_skcipher(&ctx->spawn); |
---|
652 | | -out_free_inst: |
---|
653 | | - kfree(inst); |
---|
| 420 | +err_free_inst: |
---|
| 421 | + cryptd_skcipher_free(inst); |
---|
654 | 422 | } |
---|
655 | 423 | return err; |
---|
656 | 424 | } |
---|
.. | .. |
---|
686 | 454 | { |
---|
687 | 455 | struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent); |
---|
688 | 456 | struct crypto_shash *child = ctx->child; |
---|
689 | | - int err; |
---|
690 | 457 | |
---|
691 | 458 | crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK); |
---|
692 | 459 | crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) & |
---|
693 | 460 | CRYPTO_TFM_REQ_MASK); |
---|
694 | | - err = crypto_shash_setkey(child, key, keylen); |
---|
695 | | - crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) & |
---|
696 | | - CRYPTO_TFM_RES_MASK); |
---|
697 | | - return err; |
---|
| 461 | + return crypto_shash_setkey(child, key, keylen); |
---|
698 | 462 | } |
---|
699 | 463 | |
---|
700 | 464 | static int cryptd_hash_enqueue(struct ahash_request *req, |
---|
.. | .. |
---|
716 | 480 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); |
---|
717 | 481 | struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
718 | 482 | struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); |
---|
719 | | - int refcnt = atomic_read(&ctx->refcnt); |
---|
| 483 | + int refcnt = refcount_read(&ctx->refcnt); |
---|
720 | 484 | |
---|
721 | 485 | local_bh_disable(); |
---|
722 | 486 | rctx->complete(&req->base, err); |
---|
723 | 487 | local_bh_enable(); |
---|
724 | 488 | |
---|
725 | | - if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt)) |
---|
| 489 | + if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt)) |
---|
726 | 490 | crypto_free_ahash(tfm); |
---|
727 | 491 | } |
---|
728 | 492 | |
---|
.. | .. |
---|
738 | 502 | goto out; |
---|
739 | 503 | |
---|
740 | 504 | desc->tfm = child; |
---|
741 | | - desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP; |
---|
742 | 505 | |
---|
743 | 506 | err = crypto_shash_init(desc); |
---|
744 | 507 | |
---|
.. | .. |
---|
830 | 593 | goto out; |
---|
831 | 594 | |
---|
832 | 595 | desc->tfm = child; |
---|
833 | | - desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP; |
---|
834 | 596 | |
---|
835 | 597 | err = shash_ahash_digest(req, desc); |
---|
836 | 598 | |
---|
.. | .. |
---|
859 | 621 | struct shash_desc *desc = cryptd_shash_desc(req); |
---|
860 | 622 | |
---|
861 | 623 | desc->tfm = ctx->child; |
---|
862 | | - desc->flags = req->base.flags; |
---|
863 | 624 | |
---|
864 | 625 | return crypto_shash_import(desc, in); |
---|
865 | 626 | } |
---|
866 | 627 | |
---|
| 628 | +static void cryptd_hash_free(struct ahash_instance *inst) |
---|
| 629 | +{ |
---|
| 630 | + struct hashd_instance_ctx *ctx = ahash_instance_ctx(inst); |
---|
| 631 | + |
---|
| 632 | + crypto_drop_shash(&ctx->spawn); |
---|
| 633 | + kfree(inst); |
---|
| 634 | +} |
---|
| 635 | + |
---|
867 | 636 | static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb, |
---|
| 637 | + struct crypto_attr_type *algt, |
---|
868 | 638 | struct cryptd_queue *queue) |
---|
869 | 639 | { |
---|
870 | 640 | struct hashd_instance_ctx *ctx; |
---|
871 | 641 | struct ahash_instance *inst; |
---|
872 | | - struct shash_alg *salg; |
---|
873 | | - struct crypto_alg *alg; |
---|
874 | | - u32 type = 0; |
---|
875 | | - u32 mask = 0; |
---|
| 642 | + struct shash_alg *alg; |
---|
| 643 | + u32 type; |
---|
| 644 | + u32 mask; |
---|
876 | 645 | int err; |
---|
877 | 646 | |
---|
878 | | - cryptd_check_internal(tb, &type, &mask); |
---|
| 647 | + cryptd_type_and_mask(algt, &type, &mask); |
---|
879 | 648 | |
---|
880 | | - salg = shash_attr_alg(tb[1], type, mask); |
---|
881 | | - if (IS_ERR(salg)) |
---|
882 | | - return PTR_ERR(salg); |
---|
883 | | - |
---|
884 | | - alg = &salg->base; |
---|
885 | | - inst = cryptd_alloc_instance(alg, ahash_instance_headroom(), |
---|
886 | | - sizeof(*ctx)); |
---|
887 | | - err = PTR_ERR(inst); |
---|
888 | | - if (IS_ERR(inst)) |
---|
889 | | - goto out_put_alg; |
---|
| 649 | + inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); |
---|
| 650 | + if (!inst) |
---|
| 651 | + return -ENOMEM; |
---|
890 | 652 | |
---|
891 | 653 | ctx = ahash_instance_ctx(inst); |
---|
892 | 654 | ctx->queue = queue; |
---|
893 | 655 | |
---|
894 | | - err = crypto_init_shash_spawn(&ctx->spawn, salg, |
---|
895 | | - ahash_crypto_instance(inst)); |
---|
| 656 | + err = crypto_grab_shash(&ctx->spawn, ahash_crypto_instance(inst), |
---|
| 657 | + crypto_attr_alg_name(tb[1]), type, mask); |
---|
896 | 658 | if (err) |
---|
897 | | - goto out_free_inst; |
---|
| 659 | + goto err_free_inst; |
---|
| 660 | + alg = crypto_spawn_shash_alg(&ctx->spawn); |
---|
898 | 661 | |
---|
899 | | - inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC | |
---|
900 | | - (alg->cra_flags & (CRYPTO_ALG_INTERNAL | |
---|
901 | | - CRYPTO_ALG_OPTIONAL_KEY)); |
---|
| 662 | + err = cryptd_init_instance(ahash_crypto_instance(inst), &alg->base); |
---|
| 663 | + if (err) |
---|
| 664 | + goto err_free_inst; |
---|
902 | 665 | |
---|
903 | | - inst->alg.halg.digestsize = salg->digestsize; |
---|
904 | | - inst->alg.halg.statesize = salg->statesize; |
---|
| 666 | + inst->alg.halg.base.cra_flags |= CRYPTO_ALG_ASYNC | |
---|
| 667 | + (alg->base.cra_flags & (CRYPTO_ALG_INTERNAL| |
---|
| 668 | + CRYPTO_ALG_OPTIONAL_KEY)); |
---|
| 669 | + inst->alg.halg.digestsize = alg->digestsize; |
---|
| 670 | + inst->alg.halg.statesize = alg->statesize; |
---|
905 | 671 | inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx); |
---|
906 | 672 | |
---|
907 | 673 | inst->alg.halg.base.cra_init = cryptd_hash_init_tfm; |
---|
.. | .. |
---|
913 | 679 | inst->alg.finup = cryptd_hash_finup_enqueue; |
---|
914 | 680 | inst->alg.export = cryptd_hash_export; |
---|
915 | 681 | inst->alg.import = cryptd_hash_import; |
---|
916 | | - if (crypto_shash_alg_has_setkey(salg)) |
---|
| 682 | + if (crypto_shash_alg_has_setkey(alg)) |
---|
917 | 683 | inst->alg.setkey = cryptd_hash_setkey; |
---|
918 | 684 | inst->alg.digest = cryptd_hash_digest_enqueue; |
---|
919 | 685 | |
---|
| 686 | + inst->free = cryptd_hash_free; |
---|
| 687 | + |
---|
920 | 688 | err = ahash_register_instance(tmpl, inst); |
---|
921 | 689 | if (err) { |
---|
922 | | - crypto_drop_shash(&ctx->spawn); |
---|
923 | | -out_free_inst: |
---|
924 | | - kfree(inst); |
---|
| 690 | +err_free_inst: |
---|
| 691 | + cryptd_hash_free(inst); |
---|
925 | 692 | } |
---|
926 | | - |
---|
927 | | -out_put_alg: |
---|
928 | | - crypto_mod_put(alg); |
---|
929 | 693 | return err; |
---|
930 | 694 | } |
---|
931 | 695 | |
---|
.. | .. |
---|
970 | 734 | |
---|
971 | 735 | out: |
---|
972 | 736 | ctx = crypto_aead_ctx(tfm); |
---|
973 | | - refcnt = atomic_read(&ctx->refcnt); |
---|
| 737 | + refcnt = refcount_read(&ctx->refcnt); |
---|
974 | 738 | |
---|
975 | 739 | local_bh_disable(); |
---|
976 | 740 | compl(&req->base, err); |
---|
977 | 741 | local_bh_enable(); |
---|
978 | 742 | |
---|
979 | | - if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt)) |
---|
| 743 | + if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt)) |
---|
980 | 744 | crypto_free_aead(tfm); |
---|
981 | 745 | } |
---|
982 | 746 | |
---|
.. | .. |
---|
1047 | 811 | crypto_free_aead(ctx->child); |
---|
1048 | 812 | } |
---|
1049 | 813 | |
---|
| 814 | +static void cryptd_aead_free(struct aead_instance *inst) |
---|
| 815 | +{ |
---|
| 816 | + struct aead_instance_ctx *ctx = aead_instance_ctx(inst); |
---|
| 817 | + |
---|
| 818 | + crypto_drop_aead(&ctx->aead_spawn); |
---|
| 819 | + kfree(inst); |
---|
| 820 | +} |
---|
| 821 | + |
---|
1050 | 822 | static int cryptd_create_aead(struct crypto_template *tmpl, |
---|
1051 | 823 | struct rtattr **tb, |
---|
| 824 | + struct crypto_attr_type *algt, |
---|
1052 | 825 | struct cryptd_queue *queue) |
---|
1053 | 826 | { |
---|
1054 | 827 | struct aead_instance_ctx *ctx; |
---|
1055 | 828 | struct aead_instance *inst; |
---|
1056 | 829 | struct aead_alg *alg; |
---|
1057 | | - const char *name; |
---|
1058 | | - u32 type = 0; |
---|
1059 | | - u32 mask = CRYPTO_ALG_ASYNC; |
---|
| 830 | + u32 type; |
---|
| 831 | + u32 mask; |
---|
1060 | 832 | int err; |
---|
1061 | 833 | |
---|
1062 | | - cryptd_check_internal(tb, &type, &mask); |
---|
1063 | | - |
---|
1064 | | - name = crypto_attr_alg_name(tb[1]); |
---|
1065 | | - if (IS_ERR(name)) |
---|
1066 | | - return PTR_ERR(name); |
---|
| 834 | + cryptd_type_and_mask(algt, &type, &mask); |
---|
1067 | 835 | |
---|
1068 | 836 | inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); |
---|
1069 | 837 | if (!inst) |
---|
.. | .. |
---|
1072 | 840 | ctx = aead_instance_ctx(inst); |
---|
1073 | 841 | ctx->queue = queue; |
---|
1074 | 842 | |
---|
1075 | | - crypto_set_aead_spawn(&ctx->aead_spawn, aead_crypto_instance(inst)); |
---|
1076 | | - err = crypto_grab_aead(&ctx->aead_spawn, name, type, mask); |
---|
| 843 | + err = crypto_grab_aead(&ctx->aead_spawn, aead_crypto_instance(inst), |
---|
| 844 | + crypto_attr_alg_name(tb[1]), type, mask); |
---|
1077 | 845 | if (err) |
---|
1078 | | - goto out_free_inst; |
---|
| 846 | + goto err_free_inst; |
---|
1079 | 847 | |
---|
1080 | 848 | alg = crypto_spawn_aead_alg(&ctx->aead_spawn); |
---|
1081 | 849 | err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base); |
---|
1082 | 850 | if (err) |
---|
1083 | | - goto out_drop_aead; |
---|
| 851 | + goto err_free_inst; |
---|
1084 | 852 | |
---|
1085 | | - inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC | |
---|
1086 | | - (alg->base.cra_flags & CRYPTO_ALG_INTERNAL); |
---|
| 853 | + inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC | |
---|
| 854 | + (alg->base.cra_flags & CRYPTO_ALG_INTERNAL); |
---|
1087 | 855 | inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx); |
---|
1088 | 856 | |
---|
1089 | 857 | inst->alg.ivsize = crypto_aead_alg_ivsize(alg); |
---|
.. | .. |
---|
1096 | 864 | inst->alg.encrypt = cryptd_aead_encrypt_enqueue; |
---|
1097 | 865 | inst->alg.decrypt = cryptd_aead_decrypt_enqueue; |
---|
1098 | 866 | |
---|
| 867 | + inst->free = cryptd_aead_free; |
---|
| 868 | + |
---|
1099 | 869 | err = aead_register_instance(tmpl, inst); |
---|
1100 | 870 | if (err) { |
---|
1101 | | -out_drop_aead: |
---|
1102 | | - crypto_drop_aead(&ctx->aead_spawn); |
---|
1103 | | -out_free_inst: |
---|
1104 | | - kfree(inst); |
---|
| 871 | +err_free_inst: |
---|
| 872 | + cryptd_aead_free(inst); |
---|
1105 | 873 | } |
---|
1106 | 874 | return err; |
---|
1107 | 875 | } |
---|
.. | .. |
---|
1117 | 885 | return PTR_ERR(algt); |
---|
1118 | 886 | |
---|
1119 | 887 | switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) { |
---|
1120 | | - case CRYPTO_ALG_TYPE_BLKCIPHER: |
---|
1121 | | - if ((algt->type & CRYPTO_ALG_TYPE_MASK) == |
---|
1122 | | - CRYPTO_ALG_TYPE_BLKCIPHER) |
---|
1123 | | - return cryptd_create_blkcipher(tmpl, tb, &queue); |
---|
1124 | | - |
---|
1125 | | - return cryptd_create_skcipher(tmpl, tb, &queue); |
---|
1126 | | - case CRYPTO_ALG_TYPE_DIGEST: |
---|
1127 | | - return cryptd_create_hash(tmpl, tb, &queue); |
---|
| 888 | + case CRYPTO_ALG_TYPE_SKCIPHER: |
---|
| 889 | + return cryptd_create_skcipher(tmpl, tb, algt, &queue); |
---|
| 890 | + case CRYPTO_ALG_TYPE_HASH: |
---|
| 891 | + return cryptd_create_hash(tmpl, tb, algt, &queue); |
---|
1128 | 892 | case CRYPTO_ALG_TYPE_AEAD: |
---|
1129 | | - return cryptd_create_aead(tmpl, tb, &queue); |
---|
| 893 | + return cryptd_create_aead(tmpl, tb, algt, &queue); |
---|
1130 | 894 | } |
---|
1131 | 895 | |
---|
1132 | 896 | return -EINVAL; |
---|
1133 | 897 | } |
---|
1134 | 898 | |
---|
1135 | | -static void cryptd_free(struct crypto_instance *inst) |
---|
1136 | | -{ |
---|
1137 | | - struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst); |
---|
1138 | | - struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst); |
---|
1139 | | - struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst); |
---|
1140 | | - |
---|
1141 | | - switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) { |
---|
1142 | | - case CRYPTO_ALG_TYPE_AHASH: |
---|
1143 | | - crypto_drop_shash(&hctx->spawn); |
---|
1144 | | - kfree(ahash_instance(inst)); |
---|
1145 | | - return; |
---|
1146 | | - case CRYPTO_ALG_TYPE_AEAD: |
---|
1147 | | - crypto_drop_aead(&aead_ctx->aead_spawn); |
---|
1148 | | - kfree(aead_instance(inst)); |
---|
1149 | | - return; |
---|
1150 | | - default: |
---|
1151 | | - crypto_drop_spawn(&ctx->spawn); |
---|
1152 | | - kfree(inst); |
---|
1153 | | - } |
---|
1154 | | -} |
---|
1155 | | - |
---|
1156 | 899 | static struct crypto_template cryptd_tmpl = { |
---|
1157 | 900 | .name = "cryptd", |
---|
1158 | 901 | .create = cryptd_create, |
---|
1159 | | - .free = cryptd_free, |
---|
1160 | 902 | .module = THIS_MODULE, |
---|
1161 | 903 | }; |
---|
1162 | | - |
---|
1163 | | -struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name, |
---|
1164 | | - u32 type, u32 mask) |
---|
1165 | | -{ |
---|
1166 | | - char cryptd_alg_name[CRYPTO_MAX_ALG_NAME]; |
---|
1167 | | - struct cryptd_blkcipher_ctx *ctx; |
---|
1168 | | - struct crypto_tfm *tfm; |
---|
1169 | | - |
---|
1170 | | - if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME, |
---|
1171 | | - "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME) |
---|
1172 | | - return ERR_PTR(-EINVAL); |
---|
1173 | | - type = crypto_skcipher_type(type); |
---|
1174 | | - mask &= ~CRYPTO_ALG_TYPE_MASK; |
---|
1175 | | - mask |= (CRYPTO_ALG_GENIV | CRYPTO_ALG_TYPE_BLKCIPHER_MASK); |
---|
1176 | | - tfm = crypto_alloc_base(cryptd_alg_name, type, mask); |
---|
1177 | | - if (IS_ERR(tfm)) |
---|
1178 | | - return ERR_CAST(tfm); |
---|
1179 | | - if (tfm->__crt_alg->cra_module != THIS_MODULE) { |
---|
1180 | | - crypto_free_tfm(tfm); |
---|
1181 | | - return ERR_PTR(-EINVAL); |
---|
1182 | | - } |
---|
1183 | | - |
---|
1184 | | - ctx = crypto_tfm_ctx(tfm); |
---|
1185 | | - atomic_set(&ctx->refcnt, 1); |
---|
1186 | | - |
---|
1187 | | - return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm)); |
---|
1188 | | -} |
---|
1189 | | -EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher); |
---|
1190 | | - |
---|
1191 | | -struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm) |
---|
1192 | | -{ |
---|
1193 | | - struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base); |
---|
1194 | | - return ctx->child; |
---|
1195 | | -} |
---|
1196 | | -EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child); |
---|
1197 | | - |
---|
1198 | | -bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm) |
---|
1199 | | -{ |
---|
1200 | | - struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base); |
---|
1201 | | - |
---|
1202 | | - return atomic_read(&ctx->refcnt) - 1; |
---|
1203 | | -} |
---|
1204 | | -EXPORT_SYMBOL_GPL(cryptd_ablkcipher_queued); |
---|
1205 | | - |
---|
1206 | | -void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm) |
---|
1207 | | -{ |
---|
1208 | | - struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base); |
---|
1209 | | - |
---|
1210 | | - if (atomic_dec_and_test(&ctx->refcnt)) |
---|
1211 | | - crypto_free_ablkcipher(&tfm->base); |
---|
1212 | | -} |
---|
1213 | | -EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher); |
---|
1214 | 904 | |
---|
1215 | 905 | struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name, |
---|
1216 | 906 | u32 type, u32 mask) |
---|
.. | .. |
---|
1233 | 923 | } |
---|
1234 | 924 | |
---|
1235 | 925 | ctx = crypto_skcipher_ctx(tfm); |
---|
1236 | | - atomic_set(&ctx->refcnt, 1); |
---|
| 926 | + refcount_set(&ctx->refcnt, 1); |
---|
1237 | 927 | |
---|
1238 | 928 | return container_of(tfm, struct cryptd_skcipher, base); |
---|
1239 | 929 | } |
---|
.. | .. |
---|
1251 | 941 | { |
---|
1252 | 942 | struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base); |
---|
1253 | 943 | |
---|
1254 | | - return atomic_read(&ctx->refcnt) - 1; |
---|
| 944 | + return refcount_read(&ctx->refcnt) - 1; |
---|
1255 | 945 | } |
---|
1256 | 946 | EXPORT_SYMBOL_GPL(cryptd_skcipher_queued); |
---|
1257 | 947 | |
---|
.. | .. |
---|
1259 | 949 | { |
---|
1260 | 950 | struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base); |
---|
1261 | 951 | |
---|
1262 | | - if (atomic_dec_and_test(&ctx->refcnt)) |
---|
| 952 | + if (refcount_dec_and_test(&ctx->refcnt)) |
---|
1263 | 953 | crypto_free_skcipher(&tfm->base); |
---|
1264 | 954 | } |
---|
1265 | 955 | EXPORT_SYMBOL_GPL(cryptd_free_skcipher); |
---|
.. | .. |
---|
1283 | 973 | } |
---|
1284 | 974 | |
---|
1285 | 975 | ctx = crypto_ahash_ctx(tfm); |
---|
1286 | | - atomic_set(&ctx->refcnt, 1); |
---|
| 976 | + refcount_set(&ctx->refcnt, 1); |
---|
1287 | 977 | |
---|
1288 | 978 | return __cryptd_ahash_cast(tfm); |
---|
1289 | 979 | } |
---|
.. | .. |
---|
1308 | 998 | { |
---|
1309 | 999 | struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); |
---|
1310 | 1000 | |
---|
1311 | | - return atomic_read(&ctx->refcnt) - 1; |
---|
| 1001 | + return refcount_read(&ctx->refcnt) - 1; |
---|
1312 | 1002 | } |
---|
1313 | 1003 | EXPORT_SYMBOL_GPL(cryptd_ahash_queued); |
---|
1314 | 1004 | |
---|
.. | .. |
---|
1316 | 1006 | { |
---|
1317 | 1007 | struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); |
---|
1318 | 1008 | |
---|
1319 | | - if (atomic_dec_and_test(&ctx->refcnt)) |
---|
| 1009 | + if (refcount_dec_and_test(&ctx->refcnt)) |
---|
1320 | 1010 | crypto_free_ahash(&tfm->base); |
---|
1321 | 1011 | } |
---|
1322 | 1012 | EXPORT_SYMBOL_GPL(cryptd_free_ahash); |
---|
.. | .. |
---|
1340 | 1030 | } |
---|
1341 | 1031 | |
---|
1342 | 1032 | ctx = crypto_aead_ctx(tfm); |
---|
1343 | | - atomic_set(&ctx->refcnt, 1); |
---|
| 1033 | + refcount_set(&ctx->refcnt, 1); |
---|
1344 | 1034 | |
---|
1345 | 1035 | return __cryptd_aead_cast(tfm); |
---|
1346 | 1036 | } |
---|
.. | .. |
---|
1358 | 1048 | { |
---|
1359 | 1049 | struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base); |
---|
1360 | 1050 | |
---|
1361 | | - return atomic_read(&ctx->refcnt) - 1; |
---|
| 1051 | + return refcount_read(&ctx->refcnt) - 1; |
---|
1362 | 1052 | } |
---|
1363 | 1053 | EXPORT_SYMBOL_GPL(cryptd_aead_queued); |
---|
1364 | 1054 | |
---|
.. | .. |
---|
1366 | 1056 | { |
---|
1367 | 1057 | struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base); |
---|
1368 | 1058 | |
---|
1369 | | - if (atomic_dec_and_test(&ctx->refcnt)) |
---|
| 1059 | + if (refcount_dec_and_test(&ctx->refcnt)) |
---|
1370 | 1060 | crypto_free_aead(&tfm->base); |
---|
1371 | 1061 | } |
---|
1372 | 1062 | EXPORT_SYMBOL_GPL(cryptd_free_aead); |
---|
.. | .. |
---|
1375 | 1065 | { |
---|
1376 | 1066 | int err; |
---|
1377 | 1067 | |
---|
| 1068 | + cryptd_wq = alloc_workqueue("cryptd", WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE, |
---|
| 1069 | + 1); |
---|
| 1070 | + if (!cryptd_wq) |
---|
| 1071 | + return -ENOMEM; |
---|
| 1072 | + |
---|
1378 | 1073 | err = cryptd_init_queue(&queue, cryptd_max_cpu_qlen); |
---|
1379 | 1074 | if (err) |
---|
1380 | | - return err; |
---|
| 1075 | + goto err_destroy_wq; |
---|
1381 | 1076 | |
---|
1382 | 1077 | err = crypto_register_template(&cryptd_tmpl); |
---|
1383 | 1078 | if (err) |
---|
1384 | | - cryptd_fini_queue(&queue); |
---|
| 1079 | + goto err_fini_queue; |
---|
1385 | 1080 | |
---|
| 1081 | + return 0; |
---|
| 1082 | + |
---|
| 1083 | +err_fini_queue: |
---|
| 1084 | + cryptd_fini_queue(&queue); |
---|
| 1085 | +err_destroy_wq: |
---|
| 1086 | + destroy_workqueue(cryptd_wq); |
---|
1386 | 1087 | return err; |
---|
1387 | 1088 | } |
---|
1388 | 1089 | |
---|
1389 | 1090 | static void __exit cryptd_exit(void) |
---|
1390 | 1091 | { |
---|
| 1092 | + destroy_workqueue(cryptd_wq); |
---|
1391 | 1093 | cryptd_fini_queue(&queue); |
---|
1392 | 1094 | crypto_unregister_template(&cryptd_tmpl); |
---|
1393 | 1095 | } |
---|