2 * Software async crypto daemon.
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/algapi.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/cryptd.h>
16 #include <crypto/crypto_wq.h>
17 #include <linux/err.h>
18 #include <linux/init.h>
19 #include <linux/kernel.h>
20 #include <linux/list.h>
21 #include <linux/module.h>
22 #include <linux/scatterlist.h>
23 #include <linux/sched.h>
24 #include <linux/slab.h>
26 #define CRYPTD_MAX_CPU_QLEN 100
28 struct cryptd_cpu_queue {
29 struct crypto_queue queue;
30 struct work_struct work;
34 struct cryptd_cpu_queue *cpu_queue;
37 struct cryptd_instance_ctx {
38 struct crypto_spawn spawn;
39 struct cryptd_queue *queue;
42 struct hashd_instance_ctx {
43 struct crypto_shash_spawn spawn;
44 struct cryptd_queue *queue;
47 struct cryptd_blkcipher_ctx {
48 struct crypto_blkcipher *child;
51 struct cryptd_blkcipher_request_ctx {
52 crypto_completion_t complete;
55 struct cryptd_hash_ctx {
56 struct crypto_shash *child;
59 struct cryptd_hash_request_ctx {
60 crypto_completion_t complete;
61 struct shash_desc desc;
64 static void cryptd_queue_worker(struct work_struct *work);
66 static int cryptd_init_queue(struct cryptd_queue *queue,
67 unsigned int max_cpu_qlen)
70 struct cryptd_cpu_queue *cpu_queue;
72 queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
73 if (!queue->cpu_queue)
75 for_each_possible_cpu(cpu) {
76 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
77 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
78 INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
83 static void cryptd_fini_queue(struct cryptd_queue *queue)
86 struct cryptd_cpu_queue *cpu_queue;
88 for_each_possible_cpu(cpu) {
89 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
90 BUG_ON(cpu_queue->queue.qlen);
92 free_percpu(queue->cpu_queue);
95 static int cryptd_enqueue_request(struct cryptd_queue *queue,
96 struct crypto_async_request *request)
99 struct cryptd_cpu_queue *cpu_queue;
102 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
103 err = crypto_enqueue_request(&cpu_queue->queue, request);
104 queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
110 /* Called in workqueue context, do one real cryption work (via
111 * req->complete) and reschedule itself if there are more work to
113 static void cryptd_queue_worker(struct work_struct *work)
115 struct cryptd_cpu_queue *cpu_queue;
116 struct crypto_async_request *req, *backlog;
118 cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
119 /* Only handle one request at a time to avoid hogging crypto
120 * workqueue. preempt_disable/enable is used to prevent
121 * being preempted by cryptd_enqueue_request() */
123 backlog = crypto_get_backlog(&cpu_queue->queue);
124 req = crypto_dequeue_request(&cpu_queue->queue);
131 backlog->complete(backlog, -EINPROGRESS);
132 req->complete(req, 0);
134 if (cpu_queue->queue.qlen)
135 queue_work(kcrypto_wq, &cpu_queue->work);
138 static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
140 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
141 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
145 static int cryptd_blkcipher_setkey(struct crypto_ablkcipher *parent,
146 const u8 *key, unsigned int keylen)
148 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(parent);
149 struct crypto_blkcipher *child = ctx->child;
152 crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
153 crypto_blkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) &
154 CRYPTO_TFM_REQ_MASK);
155 err = crypto_blkcipher_setkey(child, key, keylen);
156 crypto_ablkcipher_set_flags(parent, crypto_blkcipher_get_flags(child) &
157 CRYPTO_TFM_RES_MASK);
161 static void cryptd_blkcipher_crypt(struct ablkcipher_request *req,
162 struct crypto_blkcipher *child,
164 int (*crypt)(struct blkcipher_desc *desc,
165 struct scatterlist *dst,
166 struct scatterlist *src,
169 struct cryptd_blkcipher_request_ctx *rctx;
170 struct blkcipher_desc desc;
172 rctx = ablkcipher_request_ctx(req);
174 if (unlikely(err == -EINPROGRESS))
178 desc.info = req->info;
179 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
181 err = crypt(&desc, req->dst, req->src, req->nbytes);
183 req->base.complete = rctx->complete;
187 rctx->complete(&req->base, err);
191 static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err)
193 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
194 struct crypto_blkcipher *child = ctx->child;
196 cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
197 crypto_blkcipher_crt(child)->encrypt);
200 static void cryptd_blkcipher_decrypt(struct crypto_async_request *req, int err)
202 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
203 struct crypto_blkcipher *child = ctx->child;
205 cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
206 crypto_blkcipher_crt(child)->decrypt);
209 static int cryptd_blkcipher_enqueue(struct ablkcipher_request *req,
210 crypto_completion_t complete)
212 struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req);
213 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
214 struct cryptd_queue *queue;
216 queue = cryptd_get_queue(crypto_ablkcipher_tfm(tfm));
217 rctx->complete = req->base.complete;
218 req->base.complete = complete;
220 return cryptd_enqueue_request(queue, &req->base);
223 static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request *req)
225 return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_encrypt);
228 static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request *req)
230 return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_decrypt);
233 static int cryptd_blkcipher_init_tfm(struct crypto_tfm *tfm)
235 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
236 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
237 struct crypto_spawn *spawn = &ictx->spawn;
238 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
239 struct crypto_blkcipher *cipher;
241 cipher = crypto_spawn_blkcipher(spawn);
243 return PTR_ERR(cipher);
246 tfm->crt_ablkcipher.reqsize =
247 sizeof(struct cryptd_blkcipher_request_ctx);
251 static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm)
253 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
255 crypto_free_blkcipher(ctx->child);
258 static struct crypto_instance *cryptd_alloc_instance(struct crypto_alg *alg,
261 struct crypto_instance *inst;
264 inst = kzalloc(sizeof(*inst) + tail, GFP_KERNEL);
266 inst = ERR_PTR(-ENOMEM);
271 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
272 "cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
275 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
277 inst->alg.cra_priority = alg->cra_priority + 50;
278 inst->alg.cra_blocksize = alg->cra_blocksize;
279 inst->alg.cra_alignmask = alg->cra_alignmask;
290 static struct crypto_instance *cryptd_alloc_blkcipher(
291 struct rtattr **tb, struct cryptd_queue *queue)
293 struct cryptd_instance_ctx *ctx;
294 struct crypto_instance *inst;
295 struct crypto_alg *alg;
298 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER,
299 CRYPTO_ALG_TYPE_MASK);
301 return ERR_CAST(alg);
303 inst = cryptd_alloc_instance(alg, sizeof(*ctx));
307 ctx = crypto_instance_ctx(inst);
310 err = crypto_init_spawn(&ctx->spawn, alg, inst,
311 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
315 inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
316 inst->alg.cra_type = &crypto_ablkcipher_type;
318 inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize;
319 inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
320 inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
322 inst->alg.cra_ablkcipher.geniv = alg->cra_blkcipher.geniv;
324 inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx);
326 inst->alg.cra_init = cryptd_blkcipher_init_tfm;
327 inst->alg.cra_exit = cryptd_blkcipher_exit_tfm;
329 inst->alg.cra_ablkcipher.setkey = cryptd_blkcipher_setkey;
330 inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue;
331 inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue;
343 static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
345 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
346 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
347 struct crypto_shash_spawn *spawn = &ictx->spawn;
348 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
349 struct crypto_shash *hash;
351 hash = crypto_spawn_shash(spawn);
353 return PTR_ERR(hash);
356 tfm->crt_ahash.reqsize = sizeof(struct cryptd_hash_request_ctx) +
357 crypto_shash_descsize(hash);
361 static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
363 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
365 crypto_free_shash(ctx->child);
368 static int cryptd_hash_setkey(struct crypto_ahash *parent,
369 const u8 *key, unsigned int keylen)
371 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
372 struct crypto_shash *child = ctx->child;
375 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
376 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
377 CRYPTO_TFM_REQ_MASK);
378 err = crypto_shash_setkey(child, key, keylen);
379 crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
380 CRYPTO_TFM_RES_MASK);
384 static int cryptd_hash_enqueue(struct ahash_request *req,
385 crypto_completion_t complete)
387 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
388 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
389 struct cryptd_queue *queue =
390 cryptd_get_queue(crypto_ahash_tfm(tfm));
392 rctx->complete = req->base.complete;
393 req->base.complete = complete;
395 return cryptd_enqueue_request(queue, &req->base);
398 static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
400 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
401 struct crypto_shash *child = ctx->child;
402 struct ahash_request *req = ahash_request_cast(req_async);
403 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
404 struct shash_desc *desc = &rctx->desc;
406 if (unlikely(err == -EINPROGRESS))
410 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
412 err = crypto_shash_init(desc);
414 req->base.complete = rctx->complete;
418 rctx->complete(&req->base, err);
422 static int cryptd_hash_init_enqueue(struct ahash_request *req)
424 return cryptd_hash_enqueue(req, cryptd_hash_init);
427 static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
429 struct ahash_request *req = ahash_request_cast(req_async);
430 struct cryptd_hash_request_ctx *rctx;
432 rctx = ahash_request_ctx(req);
434 if (unlikely(err == -EINPROGRESS))
437 err = shash_ahash_update(req, &rctx->desc);
439 req->base.complete = rctx->complete;
443 rctx->complete(&req->base, err);
447 static int cryptd_hash_update_enqueue(struct ahash_request *req)
449 return cryptd_hash_enqueue(req, cryptd_hash_update);
452 static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
454 struct ahash_request *req = ahash_request_cast(req_async);
455 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
457 if (unlikely(err == -EINPROGRESS))
460 err = crypto_shash_final(&rctx->desc, req->result);
462 req->base.complete = rctx->complete;
466 rctx->complete(&req->base, err);
470 static int cryptd_hash_final_enqueue(struct ahash_request *req)
472 return cryptd_hash_enqueue(req, cryptd_hash_final);
475 static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
477 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
478 struct crypto_shash *child = ctx->child;
479 struct ahash_request *req = ahash_request_cast(req_async);
480 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
481 struct shash_desc *desc = &rctx->desc;
483 if (unlikely(err == -EINPROGRESS))
487 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
489 err = shash_ahash_digest(req, desc);
491 req->base.complete = rctx->complete;
495 rctx->complete(&req->base, err);
499 static int cryptd_hash_digest_enqueue(struct ahash_request *req)
501 return cryptd_hash_enqueue(req, cryptd_hash_digest);
504 static struct crypto_instance *cryptd_alloc_hash(
505 struct rtattr **tb, struct cryptd_queue *queue)
507 struct hashd_instance_ctx *ctx;
508 struct crypto_instance *inst;
509 struct shash_alg *salg;
510 struct crypto_alg *alg;
513 salg = shash_attr_alg(tb[1], 0, 0);
515 return ERR_CAST(salg);
518 inst = cryptd_alloc_instance(alg, sizeof(*ctx));
522 ctx = crypto_instance_ctx(inst);
525 err = crypto_init_shash_spawn(&ctx->spawn, salg, inst);
529 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC;
530 inst->alg.cra_type = &crypto_ahash_type;
532 inst->alg.cra_ahash.digestsize = salg->digestsize;
533 inst->alg.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
535 inst->alg.cra_init = cryptd_hash_init_tfm;
536 inst->alg.cra_exit = cryptd_hash_exit_tfm;
538 inst->alg.cra_ahash.init = cryptd_hash_init_enqueue;
539 inst->alg.cra_ahash.update = cryptd_hash_update_enqueue;
540 inst->alg.cra_ahash.final = cryptd_hash_final_enqueue;
541 inst->alg.cra_ahash.setkey = cryptd_hash_setkey;
542 inst->alg.cra_ahash.digest = cryptd_hash_digest_enqueue;
554 static struct cryptd_queue queue;
556 static struct crypto_instance *cryptd_alloc(struct rtattr **tb)
558 struct crypto_attr_type *algt;
560 algt = crypto_get_attr_type(tb);
562 return ERR_CAST(algt);
564 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
565 case CRYPTO_ALG_TYPE_BLKCIPHER:
566 return cryptd_alloc_blkcipher(tb, &queue);
567 case CRYPTO_ALG_TYPE_DIGEST:
568 return cryptd_alloc_hash(tb, &queue);
571 return ERR_PTR(-EINVAL);
574 static void cryptd_free(struct crypto_instance *inst)
576 struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
578 crypto_drop_spawn(&ctx->spawn);
582 static struct crypto_template cryptd_tmpl = {
584 .alloc = cryptd_alloc,
586 .module = THIS_MODULE,
589 struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
592 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
593 struct crypto_tfm *tfm;
595 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
596 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
597 return ERR_PTR(-EINVAL);
598 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
599 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
600 mask &= ~CRYPTO_ALG_TYPE_MASK;
601 mask |= (CRYPTO_ALG_GENIV | CRYPTO_ALG_TYPE_BLKCIPHER_MASK);
602 tfm = crypto_alloc_base(cryptd_alg_name, type, mask);
604 return ERR_CAST(tfm);
605 if (tfm->__crt_alg->cra_module != THIS_MODULE) {
606 crypto_free_tfm(tfm);
607 return ERR_PTR(-EINVAL);
610 return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm));
612 EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher);
614 struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm)
616 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
619 EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child);
621 void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
623 crypto_free_ablkcipher(&tfm->base);
625 EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
627 static int __init cryptd_init(void)
631 err = cryptd_init_queue(&queue, CRYPTD_MAX_CPU_QLEN);
635 err = crypto_register_template(&cryptd_tmpl);
637 cryptd_fini_queue(&queue);
642 static void __exit cryptd_exit(void)
644 cryptd_fini_queue(&queue);
645 crypto_unregister_template(&cryptd_tmpl);
648 module_init(cryptd_init);
649 module_exit(cryptd_exit);
651 MODULE_LICENSE("GPL");
652 MODULE_DESCRIPTION("Software async crypto daemon");