1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Software async crypto daemon.
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
7 * Added AEAD support to cryptd.
8 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
9 * Adrian Hoban <adrian.hoban@intel.com>
10 * Gabriele Paoloni <gabriele.paoloni@intel.com>
11 * Aidan O'Mahony (aidan.o.mahony@intel.com)
12 * Copyright (c) 2010, Intel Corporation.
15 #include <crypto/internal/hash.h>
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/skcipher.h>
18 #include <crypto/cryptd.h>
19 #include <linux/refcount.h>
20 #include <linux/err.h>
21 #include <linux/init.h>
22 #include <linux/kernel.h>
23 #include <linux/list.h>
24 #include <linux/module.h>
25 #include <linux/scatterlist.h>
26 #include <linux/sched.h>
27 #include <linux/slab.h>
28 #include <linux/workqueue.h>
30 static unsigned int cryptd_max_cpu_qlen = 1000;
31 module_param(cryptd_max_cpu_qlen, uint, 0);
32 MODULE_PARM_DESC(cryptd_max_cpu_qlen, "Set cryptd Max queue depth");
34 static struct workqueue_struct *cryptd_wq;
36 struct cryptd_cpu_queue {
37 struct crypto_queue queue;
38 struct work_struct work;
43 * Protected by disabling BH to allow enqueueing from softinterrupt and
44 * dequeuing from kworker (cryptd_queue_worker()).
46 struct cryptd_cpu_queue __percpu *cpu_queue;
49 struct cryptd_instance_ctx {
50 struct crypto_spawn spawn;
51 struct cryptd_queue *queue;
54 struct skcipherd_instance_ctx {
55 struct crypto_skcipher_spawn spawn;
56 struct cryptd_queue *queue;
59 struct hashd_instance_ctx {
60 struct crypto_shash_spawn spawn;
61 struct cryptd_queue *queue;
64 struct aead_instance_ctx {
65 struct crypto_aead_spawn aead_spawn;
66 struct cryptd_queue *queue;
69 struct cryptd_skcipher_ctx {
71 struct crypto_sync_skcipher *child;
74 struct cryptd_skcipher_request_ctx {
75 crypto_completion_t complete;
78 struct cryptd_hash_ctx {
80 struct crypto_shash *child;
83 struct cryptd_hash_request_ctx {
84 crypto_completion_t complete;
85 struct shash_desc desc;
88 struct cryptd_aead_ctx {
90 struct crypto_aead *child;
93 struct cryptd_aead_request_ctx {
94 crypto_completion_t complete;
97 static void cryptd_queue_worker(struct work_struct *work);
99 static int cryptd_init_queue(struct cryptd_queue *queue,
100 unsigned int max_cpu_qlen)
103 struct cryptd_cpu_queue *cpu_queue;
105 queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
106 if (!queue->cpu_queue)
108 for_each_possible_cpu(cpu) {
109 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
110 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
111 INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
113 pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen);
117 static void cryptd_fini_queue(struct cryptd_queue *queue)
120 struct cryptd_cpu_queue *cpu_queue;
122 for_each_possible_cpu(cpu) {
123 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
124 BUG_ON(cpu_queue->queue.qlen);
126 free_percpu(queue->cpu_queue);
129 static int cryptd_enqueue_request(struct cryptd_queue *queue,
130 struct crypto_async_request *request)
133 struct cryptd_cpu_queue *cpu_queue;
137 cpu_queue = this_cpu_ptr(queue->cpu_queue);
138 err = crypto_enqueue_request(&cpu_queue->queue, request);
140 refcnt = crypto_tfm_ctx(request->tfm);
145 queue_work_on(smp_processor_id(), cryptd_wq, &cpu_queue->work);
147 if (!refcount_read(refcnt))
150 refcount_inc(refcnt);
158 /* Called in workqueue context, do one real cryption work (via
159 * req->complete) and reschedule itself if there are more work to
161 static void cryptd_queue_worker(struct work_struct *work)
163 struct cryptd_cpu_queue *cpu_queue;
164 struct crypto_async_request *req, *backlog;
166 cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
168 * Only handle one request at a time to avoid hogging crypto workqueue.
171 backlog = crypto_get_backlog(&cpu_queue->queue);
172 req = crypto_dequeue_request(&cpu_queue->queue);
179 backlog->complete(backlog, -EINPROGRESS);
180 req->complete(req, 0);
182 if (cpu_queue->queue.qlen)
183 queue_work(cryptd_wq, &cpu_queue->work);
186 static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
188 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
189 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
193 static inline void cryptd_check_internal(struct rtattr **tb, u32 *type,
196 struct crypto_attr_type *algt;
198 algt = crypto_get_attr_type(tb);
202 *type |= algt->type & CRYPTO_ALG_INTERNAL;
203 *mask |= algt->mask & CRYPTO_ALG_INTERNAL;
206 static int cryptd_init_instance(struct crypto_instance *inst,
207 struct crypto_alg *alg)
209 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
211 alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
212 return -ENAMETOOLONG;
214 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
216 inst->alg.cra_priority = alg->cra_priority + 50;
217 inst->alg.cra_blocksize = alg->cra_blocksize;
218 inst->alg.cra_alignmask = alg->cra_alignmask;
223 static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
227 struct crypto_instance *inst;
230 p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
232 return ERR_PTR(-ENOMEM);
234 inst = (void *)(p + head);
236 err = cryptd_init_instance(inst, alg);
249 static int cryptd_skcipher_setkey(struct crypto_skcipher *parent,
250 const u8 *key, unsigned int keylen)
252 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent);
253 struct crypto_sync_skcipher *child = ctx->child;
256 crypto_sync_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
257 crypto_sync_skcipher_set_flags(child,
258 crypto_skcipher_get_flags(parent) &
259 CRYPTO_TFM_REQ_MASK);
260 err = crypto_sync_skcipher_setkey(child, key, keylen);
261 crypto_skcipher_set_flags(parent,
262 crypto_sync_skcipher_get_flags(child) &
263 CRYPTO_TFM_RES_MASK);
267 static void cryptd_skcipher_complete(struct skcipher_request *req, int err)
269 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
270 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
271 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
272 int refcnt = refcount_read(&ctx->refcnt);
275 rctx->complete(&req->base, err);
278 if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
279 crypto_free_skcipher(tfm);
282 static void cryptd_skcipher_encrypt(struct crypto_async_request *base,
285 struct skcipher_request *req = skcipher_request_cast(base);
286 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
287 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
288 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
289 struct crypto_sync_skcipher *child = ctx->child;
290 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
292 if (unlikely(err == -EINPROGRESS))
295 skcipher_request_set_sync_tfm(subreq, child);
296 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
298 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
301 err = crypto_skcipher_encrypt(subreq);
302 skcipher_request_zero(subreq);
304 req->base.complete = rctx->complete;
307 cryptd_skcipher_complete(req, err);
310 static void cryptd_skcipher_decrypt(struct crypto_async_request *base,
313 struct skcipher_request *req = skcipher_request_cast(base);
314 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
315 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
316 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
317 struct crypto_sync_skcipher *child = ctx->child;
318 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
320 if (unlikely(err == -EINPROGRESS))
323 skcipher_request_set_sync_tfm(subreq, child);
324 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
326 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
329 err = crypto_skcipher_decrypt(subreq);
330 skcipher_request_zero(subreq);
332 req->base.complete = rctx->complete;
335 cryptd_skcipher_complete(req, err);
338 static int cryptd_skcipher_enqueue(struct skcipher_request *req,
339 crypto_completion_t compl)
341 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
342 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
343 struct cryptd_queue *queue;
345 queue = cryptd_get_queue(crypto_skcipher_tfm(tfm));
346 rctx->complete = req->base.complete;
347 req->base.complete = compl;
349 return cryptd_enqueue_request(queue, &req->base);
352 static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request *req)
354 return cryptd_skcipher_enqueue(req, cryptd_skcipher_encrypt);
357 static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request *req)
359 return cryptd_skcipher_enqueue(req, cryptd_skcipher_decrypt);
362 static int cryptd_skcipher_init_tfm(struct crypto_skcipher *tfm)
364 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
365 struct skcipherd_instance_ctx *ictx = skcipher_instance_ctx(inst);
366 struct crypto_skcipher_spawn *spawn = &ictx->spawn;
367 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
368 struct crypto_skcipher *cipher;
370 cipher = crypto_spawn_skcipher(spawn);
372 return PTR_ERR(cipher);
374 ctx->child = (struct crypto_sync_skcipher *)cipher;
375 crypto_skcipher_set_reqsize(
376 tfm, sizeof(struct cryptd_skcipher_request_ctx));
380 static void cryptd_skcipher_exit_tfm(struct crypto_skcipher *tfm)
382 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
384 crypto_free_sync_skcipher(ctx->child);
387 static void cryptd_skcipher_free(struct skcipher_instance *inst)
389 struct skcipherd_instance_ctx *ctx = skcipher_instance_ctx(inst);
391 crypto_drop_skcipher(&ctx->spawn);
395 static int cryptd_create_skcipher(struct crypto_template *tmpl,
397 struct cryptd_queue *queue)
399 struct skcipherd_instance_ctx *ctx;
400 struct skcipher_instance *inst;
401 struct skcipher_alg *alg;
408 mask = CRYPTO_ALG_ASYNC;
410 cryptd_check_internal(tb, &type, &mask);
412 name = crypto_attr_alg_name(tb[1]);
414 return PTR_ERR(name);
416 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
420 ctx = skcipher_instance_ctx(inst);
423 crypto_set_skcipher_spawn(&ctx->spawn, skcipher_crypto_instance(inst));
424 err = crypto_grab_skcipher(&ctx->spawn, name, type, mask);
428 alg = crypto_spawn_skcipher_alg(&ctx->spawn);
429 err = cryptd_init_instance(skcipher_crypto_instance(inst), &alg->base);
431 goto out_drop_skcipher;
433 inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC |
434 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
436 inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg);
437 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
438 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg);
439 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg);
441 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_skcipher_ctx);
443 inst->alg.init = cryptd_skcipher_init_tfm;
444 inst->alg.exit = cryptd_skcipher_exit_tfm;
446 inst->alg.setkey = cryptd_skcipher_setkey;
447 inst->alg.encrypt = cryptd_skcipher_encrypt_enqueue;
448 inst->alg.decrypt = cryptd_skcipher_decrypt_enqueue;
450 inst->free = cryptd_skcipher_free;
452 err = skcipher_register_instance(tmpl, inst);
455 crypto_drop_skcipher(&ctx->spawn);
462 static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
464 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
465 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
466 struct crypto_shash_spawn *spawn = &ictx->spawn;
467 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
468 struct crypto_shash *hash;
470 hash = crypto_spawn_shash(spawn);
472 return PTR_ERR(hash);
475 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
476 sizeof(struct cryptd_hash_request_ctx) +
477 crypto_shash_descsize(hash));
481 static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
483 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
485 crypto_free_shash(ctx->child);
488 static int cryptd_hash_setkey(struct crypto_ahash *parent,
489 const u8 *key, unsigned int keylen)
491 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
492 struct crypto_shash *child = ctx->child;
495 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
496 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
497 CRYPTO_TFM_REQ_MASK);
498 err = crypto_shash_setkey(child, key, keylen);
499 crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
500 CRYPTO_TFM_RES_MASK);
504 static int cryptd_hash_enqueue(struct ahash_request *req,
505 crypto_completion_t compl)
507 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
508 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
509 struct cryptd_queue *queue =
510 cryptd_get_queue(crypto_ahash_tfm(tfm));
512 rctx->complete = req->base.complete;
513 req->base.complete = compl;
515 return cryptd_enqueue_request(queue, &req->base);
518 static void cryptd_hash_complete(struct ahash_request *req, int err)
520 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
521 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
522 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
523 int refcnt = refcount_read(&ctx->refcnt);
526 rctx->complete(&req->base, err);
529 if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
530 crypto_free_ahash(tfm);
533 static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
535 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
536 struct crypto_shash *child = ctx->child;
537 struct ahash_request *req = ahash_request_cast(req_async);
538 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
539 struct shash_desc *desc = &rctx->desc;
541 if (unlikely(err == -EINPROGRESS))
546 err = crypto_shash_init(desc);
548 req->base.complete = rctx->complete;
551 cryptd_hash_complete(req, err);
554 static int cryptd_hash_init_enqueue(struct ahash_request *req)
556 return cryptd_hash_enqueue(req, cryptd_hash_init);
559 static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
561 struct ahash_request *req = ahash_request_cast(req_async);
562 struct cryptd_hash_request_ctx *rctx;
564 rctx = ahash_request_ctx(req);
566 if (unlikely(err == -EINPROGRESS))
569 err = shash_ahash_update(req, &rctx->desc);
571 req->base.complete = rctx->complete;
574 cryptd_hash_complete(req, err);
577 static int cryptd_hash_update_enqueue(struct ahash_request *req)
579 return cryptd_hash_enqueue(req, cryptd_hash_update);
582 static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
584 struct ahash_request *req = ahash_request_cast(req_async);
585 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
587 if (unlikely(err == -EINPROGRESS))
590 err = crypto_shash_final(&rctx->desc, req->result);
592 req->base.complete = rctx->complete;
595 cryptd_hash_complete(req, err);
598 static int cryptd_hash_final_enqueue(struct ahash_request *req)
600 return cryptd_hash_enqueue(req, cryptd_hash_final);
603 static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
605 struct ahash_request *req = ahash_request_cast(req_async);
606 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
608 if (unlikely(err == -EINPROGRESS))
611 err = shash_ahash_finup(req, &rctx->desc);
613 req->base.complete = rctx->complete;
616 cryptd_hash_complete(req, err);
619 static int cryptd_hash_finup_enqueue(struct ahash_request *req)
621 return cryptd_hash_enqueue(req, cryptd_hash_finup);
624 static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
626 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
627 struct crypto_shash *child = ctx->child;
628 struct ahash_request *req = ahash_request_cast(req_async);
629 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
630 struct shash_desc *desc = &rctx->desc;
632 if (unlikely(err == -EINPROGRESS))
637 err = shash_ahash_digest(req, desc);
639 req->base.complete = rctx->complete;
642 cryptd_hash_complete(req, err);
645 static int cryptd_hash_digest_enqueue(struct ahash_request *req)
647 return cryptd_hash_enqueue(req, cryptd_hash_digest);
650 static int cryptd_hash_export(struct ahash_request *req, void *out)
652 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
654 return crypto_shash_export(&rctx->desc, out);
657 static int cryptd_hash_import(struct ahash_request *req, const void *in)
659 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
660 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
661 struct shash_desc *desc = cryptd_shash_desc(req);
663 desc->tfm = ctx->child;
665 return crypto_shash_import(desc, in);
668 static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
669 struct cryptd_queue *queue)
671 struct hashd_instance_ctx *ctx;
672 struct ahash_instance *inst;
673 struct shash_alg *salg;
674 struct crypto_alg *alg;
679 cryptd_check_internal(tb, &type, &mask);
681 salg = shash_attr_alg(tb[1], type, mask);
683 return PTR_ERR(salg);
686 inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
692 ctx = ahash_instance_ctx(inst);
695 err = crypto_init_shash_spawn(&ctx->spawn, salg,
696 ahash_crypto_instance(inst));
700 inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC |
701 (alg->cra_flags & (CRYPTO_ALG_INTERNAL |
702 CRYPTO_ALG_OPTIONAL_KEY));
704 inst->alg.halg.digestsize = salg->digestsize;
705 inst->alg.halg.statesize = salg->statesize;
706 inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
708 inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
709 inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
711 inst->alg.init = cryptd_hash_init_enqueue;
712 inst->alg.update = cryptd_hash_update_enqueue;
713 inst->alg.final = cryptd_hash_final_enqueue;
714 inst->alg.finup = cryptd_hash_finup_enqueue;
715 inst->alg.export = cryptd_hash_export;
716 inst->alg.import = cryptd_hash_import;
717 if (crypto_shash_alg_has_setkey(salg))
718 inst->alg.setkey = cryptd_hash_setkey;
719 inst->alg.digest = cryptd_hash_digest_enqueue;
721 err = ahash_register_instance(tmpl, inst);
723 crypto_drop_shash(&ctx->spawn);
733 static int cryptd_aead_setkey(struct crypto_aead *parent,
734 const u8 *key, unsigned int keylen)
736 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
737 struct crypto_aead *child = ctx->child;
739 return crypto_aead_setkey(child, key, keylen);
742 static int cryptd_aead_setauthsize(struct crypto_aead *parent,
743 unsigned int authsize)
745 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
746 struct crypto_aead *child = ctx->child;
748 return crypto_aead_setauthsize(child, authsize);
751 static void cryptd_aead_crypt(struct aead_request *req,
752 struct crypto_aead *child,
754 int (*crypt)(struct aead_request *req))
756 struct cryptd_aead_request_ctx *rctx;
757 struct cryptd_aead_ctx *ctx;
758 crypto_completion_t compl;
759 struct crypto_aead *tfm;
762 rctx = aead_request_ctx(req);
763 compl = rctx->complete;
765 tfm = crypto_aead_reqtfm(req);
767 if (unlikely(err == -EINPROGRESS))
769 aead_request_set_tfm(req, child);
773 ctx = crypto_aead_ctx(tfm);
774 refcnt = refcount_read(&ctx->refcnt);
777 compl(&req->base, err);
780 if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
781 crypto_free_aead(tfm);
784 static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
786 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
787 struct crypto_aead *child = ctx->child;
788 struct aead_request *req;
790 req = container_of(areq, struct aead_request, base);
791 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt);
794 static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
796 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
797 struct crypto_aead *child = ctx->child;
798 struct aead_request *req;
800 req = container_of(areq, struct aead_request, base);
801 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt);
804 static int cryptd_aead_enqueue(struct aead_request *req,
805 crypto_completion_t compl)
807 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
808 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
809 struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
811 rctx->complete = req->base.complete;
812 req->base.complete = compl;
813 return cryptd_enqueue_request(queue, &req->base);
816 static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
818 return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
821 static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
823 return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
826 static int cryptd_aead_init_tfm(struct crypto_aead *tfm)
828 struct aead_instance *inst = aead_alg_instance(tfm);
829 struct aead_instance_ctx *ictx = aead_instance_ctx(inst);
830 struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
831 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
832 struct crypto_aead *cipher;
834 cipher = crypto_spawn_aead(spawn);
836 return PTR_ERR(cipher);
839 crypto_aead_set_reqsize(
840 tfm, max((unsigned)sizeof(struct cryptd_aead_request_ctx),
841 crypto_aead_reqsize(cipher)));
845 static void cryptd_aead_exit_tfm(struct crypto_aead *tfm)
847 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
848 crypto_free_aead(ctx->child);
851 static int cryptd_create_aead(struct crypto_template *tmpl,
853 struct cryptd_queue *queue)
855 struct aead_instance_ctx *ctx;
856 struct aead_instance *inst;
857 struct aead_alg *alg;
860 u32 mask = CRYPTO_ALG_ASYNC;
863 cryptd_check_internal(tb, &type, &mask);
865 name = crypto_attr_alg_name(tb[1]);
867 return PTR_ERR(name);
869 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
873 ctx = aead_instance_ctx(inst);
876 crypto_set_aead_spawn(&ctx->aead_spawn, aead_crypto_instance(inst));
877 err = crypto_grab_aead(&ctx->aead_spawn, name, type, mask);
881 alg = crypto_spawn_aead_alg(&ctx->aead_spawn);
882 err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base);
886 inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC |
887 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
888 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
890 inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
891 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
893 inst->alg.init = cryptd_aead_init_tfm;
894 inst->alg.exit = cryptd_aead_exit_tfm;
895 inst->alg.setkey = cryptd_aead_setkey;
896 inst->alg.setauthsize = cryptd_aead_setauthsize;
897 inst->alg.encrypt = cryptd_aead_encrypt_enqueue;
898 inst->alg.decrypt = cryptd_aead_decrypt_enqueue;
900 err = aead_register_instance(tmpl, inst);
903 crypto_drop_aead(&ctx->aead_spawn);
910 static struct cryptd_queue queue;
912 static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
914 struct crypto_attr_type *algt;
916 algt = crypto_get_attr_type(tb);
918 return PTR_ERR(algt);
920 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
921 case CRYPTO_ALG_TYPE_BLKCIPHER:
922 return cryptd_create_skcipher(tmpl, tb, &queue);
923 case CRYPTO_ALG_TYPE_HASH:
924 return cryptd_create_hash(tmpl, tb, &queue);
925 case CRYPTO_ALG_TYPE_AEAD:
926 return cryptd_create_aead(tmpl, tb, &queue);
932 static void cryptd_free(struct crypto_instance *inst)
934 struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
935 struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
936 struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst);
938 switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
939 case CRYPTO_ALG_TYPE_AHASH:
940 crypto_drop_shash(&hctx->spawn);
941 kfree(ahash_instance(inst));
943 case CRYPTO_ALG_TYPE_AEAD:
944 crypto_drop_aead(&aead_ctx->aead_spawn);
945 kfree(aead_instance(inst));
948 crypto_drop_spawn(&ctx->spawn);
953 static struct crypto_template cryptd_tmpl = {
955 .create = cryptd_create,
957 .module = THIS_MODULE,
960 struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
963 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
964 struct cryptd_skcipher_ctx *ctx;
965 struct crypto_skcipher *tfm;
967 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
968 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
969 return ERR_PTR(-EINVAL);
971 tfm = crypto_alloc_skcipher(cryptd_alg_name, type, mask);
973 return ERR_CAST(tfm);
975 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
976 crypto_free_skcipher(tfm);
977 return ERR_PTR(-EINVAL);
980 ctx = crypto_skcipher_ctx(tfm);
981 refcount_set(&ctx->refcnt, 1);
983 return container_of(tfm, struct cryptd_skcipher, base);
985 EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher);
987 struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm)
989 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
991 return &ctx->child->base;
993 EXPORT_SYMBOL_GPL(cryptd_skcipher_child);
995 bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm)
997 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
999 return refcount_read(&ctx->refcnt) - 1;
1001 EXPORT_SYMBOL_GPL(cryptd_skcipher_queued);
1003 void cryptd_free_skcipher(struct cryptd_skcipher *tfm)
1005 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
1007 if (refcount_dec_and_test(&ctx->refcnt))
1008 crypto_free_skcipher(&tfm->base);
1010 EXPORT_SYMBOL_GPL(cryptd_free_skcipher);
1012 struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
1015 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1016 struct cryptd_hash_ctx *ctx;
1017 struct crypto_ahash *tfm;
1019 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1020 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1021 return ERR_PTR(-EINVAL);
1022 tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
1024 return ERR_CAST(tfm);
1025 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1026 crypto_free_ahash(tfm);
1027 return ERR_PTR(-EINVAL);
1030 ctx = crypto_ahash_ctx(tfm);
1031 refcount_set(&ctx->refcnt, 1);
1033 return __cryptd_ahash_cast(tfm);
1035 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
1037 struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
1039 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1043 EXPORT_SYMBOL_GPL(cryptd_ahash_child);
1045 struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
1047 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
1050 EXPORT_SYMBOL_GPL(cryptd_shash_desc);
1052 bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
1054 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1056 return refcount_read(&ctx->refcnt) - 1;
1058 EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
1060 void cryptd_free_ahash(struct cryptd_ahash *tfm)
1062 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1064 if (refcount_dec_and_test(&ctx->refcnt))
1065 crypto_free_ahash(&tfm->base);
1067 EXPORT_SYMBOL_GPL(cryptd_free_ahash);
1069 struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
1072 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1073 struct cryptd_aead_ctx *ctx;
1074 struct crypto_aead *tfm;
1076 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1077 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1078 return ERR_PTR(-EINVAL);
1079 tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
1081 return ERR_CAST(tfm);
1082 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1083 crypto_free_aead(tfm);
1084 return ERR_PTR(-EINVAL);
1087 ctx = crypto_aead_ctx(tfm);
1088 refcount_set(&ctx->refcnt, 1);
1090 return __cryptd_aead_cast(tfm);
1092 EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
1094 struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
1096 struct cryptd_aead_ctx *ctx;
1097 ctx = crypto_aead_ctx(&tfm->base);
1100 EXPORT_SYMBOL_GPL(cryptd_aead_child);
1102 bool cryptd_aead_queued(struct cryptd_aead *tfm)
1104 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1106 return refcount_read(&ctx->refcnt) - 1;
1108 EXPORT_SYMBOL_GPL(cryptd_aead_queued);
1110 void cryptd_free_aead(struct cryptd_aead *tfm)
1112 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1114 if (refcount_dec_and_test(&ctx->refcnt))
1115 crypto_free_aead(&tfm->base);
1117 EXPORT_SYMBOL_GPL(cryptd_free_aead);
1119 static int __init cryptd_init(void)
1123 cryptd_wq = alloc_workqueue("cryptd", WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE,
1128 err = cryptd_init_queue(&queue, cryptd_max_cpu_qlen);
1130 goto err_destroy_wq;
1132 err = crypto_register_template(&cryptd_tmpl);
1134 goto err_fini_queue;
1139 cryptd_fini_queue(&queue);
1141 destroy_workqueue(cryptd_wq);
1145 static void __exit cryptd_exit(void)
1147 destroy_workqueue(cryptd_wq);
1148 cryptd_fini_queue(&queue);
1149 crypto_unregister_template(&cryptd_tmpl);
1152 subsys_initcall(cryptd_init);
1153 module_exit(cryptd_exit);
1155 MODULE_LICENSE("GPL");
1156 MODULE_DESCRIPTION("Software async crypto daemon");
1157 MODULE_ALIAS_CRYPTO("cryptd");