]> git.ipfire.org Git - thirdparty/kernel/linux.git/blame - crypto/cryptd.c
apparmor: fix PROFILE_MEDIATES for untrusted input
[thirdparty/kernel/linux.git] / crypto / cryptd.c
CommitLineData
2874c5fd 1// SPDX-License-Identifier: GPL-2.0-or-later
124b53d0
HX
2/*
3 * Software async crypto daemon.
4 *
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 *
298c926c
AH
7 * Added AEAD support to cryptd.
8 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
9 * Adrian Hoban <adrian.hoban@intel.com>
10 * Gabriele Paoloni <gabriele.paoloni@intel.com>
11 * Aidan O'Mahony (aidan.o.mahony@intel.com)
12 * Copyright (c) 2010, Intel Corporation.
124b53d0
HX
13 */
14
18e33e6d 15#include <crypto/internal/hash.h>
298c926c 16#include <crypto/internal/aead.h>
4e0958d1 17#include <crypto/internal/skcipher.h>
1cac2cbc 18#include <crypto/cryptd.h>
254eff77 19#include <crypto/crypto_wq.h>
81760ea6 20#include <linux/atomic.h>
124b53d0
HX
21#include <linux/err.h>
22#include <linux/init.h>
23#include <linux/kernel.h>
124b53d0
HX
24#include <linux/list.h>
25#include <linux/module.h>
124b53d0
HX
26#include <linux/scatterlist.h>
27#include <linux/sched.h>
28#include <linux/slab.h>
124b53d0 29
eaf356e4 30static unsigned int cryptd_max_cpu_qlen = 1000;
c3a53605
JM
31module_param(cryptd_max_cpu_qlen, uint, 0);
32MODULE_PARM_DESC(cryptd_max_cpu_qlen, "Set cryptd Max queue depth");
124b53d0 33
254eff77 34struct cryptd_cpu_queue {
124b53d0 35 struct crypto_queue queue;
254eff77
HY
36 struct work_struct work;
37};
38
39struct cryptd_queue {
a29d8b8e 40 struct cryptd_cpu_queue __percpu *cpu_queue;
124b53d0
HX
41};
42
43struct cryptd_instance_ctx {
44 struct crypto_spawn spawn;
254eff77 45 struct cryptd_queue *queue;
124b53d0
HX
46};
47
4e0958d1
HX
48struct skcipherd_instance_ctx {
49 struct crypto_skcipher_spawn spawn;
50 struct cryptd_queue *queue;
51};
52
46309d89
HX
53struct hashd_instance_ctx {
54 struct crypto_shash_spawn spawn;
55 struct cryptd_queue *queue;
56};
57
298c926c
AH
58struct aead_instance_ctx {
59 struct crypto_aead_spawn aead_spawn;
60 struct cryptd_queue *queue;
61};
62
4e0958d1
HX
63struct cryptd_skcipher_ctx {
64 atomic_t refcnt;
36b3875a 65 struct crypto_sync_skcipher *child;
4e0958d1
HX
66};
67
68struct cryptd_skcipher_request_ctx {
69 crypto_completion_t complete;
70};
71
b8a28251 72struct cryptd_hash_ctx {
81760ea6 73 atomic_t refcnt;
46309d89 74 struct crypto_shash *child;
b8a28251
LH
75};
76
77struct cryptd_hash_request_ctx {
78 crypto_completion_t complete;
46309d89 79 struct shash_desc desc;
b8a28251 80};
124b53d0 81
298c926c 82struct cryptd_aead_ctx {
81760ea6 83 atomic_t refcnt;
298c926c
AH
84 struct crypto_aead *child;
85};
86
87struct cryptd_aead_request_ctx {
88 crypto_completion_t complete;
89};
90
254eff77
HY
91static void cryptd_queue_worker(struct work_struct *work);
92
93static int cryptd_init_queue(struct cryptd_queue *queue,
94 unsigned int max_cpu_qlen)
95{
96 int cpu;
97 struct cryptd_cpu_queue *cpu_queue;
98
99 queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
100 if (!queue->cpu_queue)
101 return -ENOMEM;
102 for_each_possible_cpu(cpu) {
103 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
104 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
105 INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
106 }
c3a53605 107 pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen);
254eff77
HY
108 return 0;
109}
110
111static void cryptd_fini_queue(struct cryptd_queue *queue)
112{
113 int cpu;
114 struct cryptd_cpu_queue *cpu_queue;
115
116 for_each_possible_cpu(cpu) {
117 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
118 BUG_ON(cpu_queue->queue.qlen);
119 }
120 free_percpu(queue->cpu_queue);
121}
122
123static int cryptd_enqueue_request(struct cryptd_queue *queue,
124 struct crypto_async_request *request)
125{
126 int cpu, err;
127 struct cryptd_cpu_queue *cpu_queue;
81760ea6 128 atomic_t *refcnt;
254eff77
HY
129
130 cpu = get_cpu();
0b44f486 131 cpu_queue = this_cpu_ptr(queue->cpu_queue);
254eff77 132 err = crypto_enqueue_request(&cpu_queue->queue, request);
81760ea6
HX
133
134 refcnt = crypto_tfm_ctx(request->tfm);
81760ea6 135
6b80ea38 136 if (err == -ENOSPC)
81760ea6
HX
137 goto out_put_cpu;
138
254eff77 139 queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
81760ea6
HX
140
141 if (!atomic_read(refcnt))
142 goto out_put_cpu;
143
81760ea6
HX
144 atomic_inc(refcnt);
145
146out_put_cpu:
254eff77
HY
147 put_cpu();
148
149 return err;
150}
151
152/* Called in workqueue context, do one real cryption work (via
153 * req->complete) and reschedule itself if there are more work to
154 * do. */
155static void cryptd_queue_worker(struct work_struct *work)
156{
157 struct cryptd_cpu_queue *cpu_queue;
158 struct crypto_async_request *req, *backlog;
159
160 cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
9efade1b
JK
161 /*
162 * Only handle one request at a time to avoid hogging crypto workqueue.
163 * preempt_disable/enable is used to prevent being preempted by
164 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
165 * cryptd_enqueue_request() being accessed from software interrupts.
166 */
167 local_bh_disable();
254eff77
HY
168 preempt_disable();
169 backlog = crypto_get_backlog(&cpu_queue->queue);
170 req = crypto_dequeue_request(&cpu_queue->queue);
171 preempt_enable();
9efade1b 172 local_bh_enable();
254eff77
HY
173
174 if (!req)
175 return;
176
177 if (backlog)
178 backlog->complete(backlog, -EINPROGRESS);
179 req->complete(req, 0);
180
181 if (cpu_queue->queue.qlen)
182 queue_work(kcrypto_wq, &cpu_queue->work);
183}
184
185static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
124b53d0
HX
186{
187 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
188 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
254eff77 189 return ictx->queue;
124b53d0
HX
190}
191
466a7b9e
SM
192static inline void cryptd_check_internal(struct rtattr **tb, u32 *type,
193 u32 *mask)
194{
195 struct crypto_attr_type *algt;
196
197 algt = crypto_get_attr_type(tb);
198 if (IS_ERR(algt))
199 return;
f6da3205 200
5e4b8c1f
HX
201 *type |= algt->type & CRYPTO_ALG_INTERNAL;
202 *mask |= algt->mask & CRYPTO_ALG_INTERNAL;
466a7b9e
SM
203}
204
9b8c456e
HX
205static int cryptd_init_instance(struct crypto_instance *inst,
206 struct crypto_alg *alg)
207{
208 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
209 "cryptd(%s)",
210 alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
211 return -ENAMETOOLONG;
212
213 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
214
215 inst->alg.cra_priority = alg->cra_priority + 50;
216 inst->alg.cra_blocksize = alg->cra_blocksize;
217 inst->alg.cra_alignmask = alg->cra_alignmask;
218
219 return 0;
220}
221
0b535adf
HX
222static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
223 unsigned int tail)
124b53d0 224{
0b535adf 225 char *p;
124b53d0 226 struct crypto_instance *inst;
124b53d0
HX
227 int err;
228
0b535adf
HX
229 p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
230 if (!p)
231 return ERR_PTR(-ENOMEM);
232
233 inst = (void *)(p + head);
124b53d0 234
9b8c456e
HX
235 err = cryptd_init_instance(inst, alg);
236 if (err)
124b53d0
HX
237 goto out_free_inst;
238
124b53d0 239out:
0b535adf 240 return p;
124b53d0
HX
241
242out_free_inst:
0b535adf
HX
243 kfree(p);
244 p = ERR_PTR(err);
124b53d0
HX
245 goto out;
246}
247
4e0958d1
HX
248static int cryptd_skcipher_setkey(struct crypto_skcipher *parent,
249 const u8 *key, unsigned int keylen)
250{
251 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent);
36b3875a 252 struct crypto_sync_skcipher *child = ctx->child;
4e0958d1
HX
253 int err;
254
36b3875a
KC
255 crypto_sync_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
256 crypto_sync_skcipher_set_flags(child,
257 crypto_skcipher_get_flags(parent) &
4e0958d1 258 CRYPTO_TFM_REQ_MASK);
36b3875a
KC
259 err = crypto_sync_skcipher_setkey(child, key, keylen);
260 crypto_skcipher_set_flags(parent,
261 crypto_sync_skcipher_get_flags(child) &
4e0958d1
HX
262 CRYPTO_TFM_RES_MASK);
263 return err;
264}
265
266static void cryptd_skcipher_complete(struct skcipher_request *req, int err)
267{
268 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
269 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
270 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
271 int refcnt = atomic_read(&ctx->refcnt);
272
273 local_bh_disable();
274 rctx->complete(&req->base, err);
275 local_bh_enable();
276
277 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
278 crypto_free_skcipher(tfm);
279}
280
281static void cryptd_skcipher_encrypt(struct crypto_async_request *base,
282 int err)
283{
284 struct skcipher_request *req = skcipher_request_cast(base);
285 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
286 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
287 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
36b3875a
KC
288 struct crypto_sync_skcipher *child = ctx->child;
289 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
4e0958d1
HX
290
291 if (unlikely(err == -EINPROGRESS))
292 goto out;
293
36b3875a 294 skcipher_request_set_sync_tfm(subreq, child);
4e0958d1
HX
295 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
296 NULL, NULL);
297 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
298 req->iv);
299
300 err = crypto_skcipher_encrypt(subreq);
301 skcipher_request_zero(subreq);
302
303 req->base.complete = rctx->complete;
304
305out:
306 cryptd_skcipher_complete(req, err);
307}
308
309static void cryptd_skcipher_decrypt(struct crypto_async_request *base,
310 int err)
311{
312 struct skcipher_request *req = skcipher_request_cast(base);
313 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
314 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
315 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
36b3875a
KC
316 struct crypto_sync_skcipher *child = ctx->child;
317 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
4e0958d1
HX
318
319 if (unlikely(err == -EINPROGRESS))
320 goto out;
321
36b3875a 322 skcipher_request_set_sync_tfm(subreq, child);
4e0958d1
HX
323 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
324 NULL, NULL);
325 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
326 req->iv);
327
328 err = crypto_skcipher_decrypt(subreq);
329 skcipher_request_zero(subreq);
330
331 req->base.complete = rctx->complete;
332
333out:
334 cryptd_skcipher_complete(req, err);
335}
336
337static int cryptd_skcipher_enqueue(struct skcipher_request *req,
338 crypto_completion_t compl)
339{
340 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
341 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
342 struct cryptd_queue *queue;
343
344 queue = cryptd_get_queue(crypto_skcipher_tfm(tfm));
345 rctx->complete = req->base.complete;
346 req->base.complete = compl;
347
348 return cryptd_enqueue_request(queue, &req->base);
349}
350
351static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request *req)
352{
353 return cryptd_skcipher_enqueue(req, cryptd_skcipher_encrypt);
354}
355
356static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request *req)
357{
358 return cryptd_skcipher_enqueue(req, cryptd_skcipher_decrypt);
359}
360
361static int cryptd_skcipher_init_tfm(struct crypto_skcipher *tfm)
362{
363 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
364 struct skcipherd_instance_ctx *ictx = skcipher_instance_ctx(inst);
365 struct crypto_skcipher_spawn *spawn = &ictx->spawn;
366 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
367 struct crypto_skcipher *cipher;
368
369 cipher = crypto_spawn_skcipher(spawn);
370 if (IS_ERR(cipher))
371 return PTR_ERR(cipher);
372
36b3875a 373 ctx->child = (struct crypto_sync_skcipher *)cipher;
4e0958d1
HX
374 crypto_skcipher_set_reqsize(
375 tfm, sizeof(struct cryptd_skcipher_request_ctx));
376 return 0;
377}
378
379static void cryptd_skcipher_exit_tfm(struct crypto_skcipher *tfm)
380{
381 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
382
36b3875a 383 crypto_free_sync_skcipher(ctx->child);
4e0958d1
HX
384}
385
386static void cryptd_skcipher_free(struct skcipher_instance *inst)
387{
388 struct skcipherd_instance_ctx *ctx = skcipher_instance_ctx(inst);
389
390 crypto_drop_skcipher(&ctx->spawn);
391}
392
393static int cryptd_create_skcipher(struct crypto_template *tmpl,
394 struct rtattr **tb,
395 struct cryptd_queue *queue)
396{
397 struct skcipherd_instance_ctx *ctx;
398 struct skcipher_instance *inst;
399 struct skcipher_alg *alg;
400 const char *name;
401 u32 type;
402 u32 mask;
403 int err;
404
405 type = 0;
406 mask = CRYPTO_ALG_ASYNC;
407
408 cryptd_check_internal(tb, &type, &mask);
409
410 name = crypto_attr_alg_name(tb[1]);
411 if (IS_ERR(name))
412 return PTR_ERR(name);
413
414 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
415 if (!inst)
416 return -ENOMEM;
417
418 ctx = skcipher_instance_ctx(inst);
419 ctx->queue = queue;
420
421 crypto_set_skcipher_spawn(&ctx->spawn, skcipher_crypto_instance(inst));
422 err = crypto_grab_skcipher(&ctx->spawn, name, type, mask);
423 if (err)
424 goto out_free_inst;
425
426 alg = crypto_spawn_skcipher_alg(&ctx->spawn);
427 err = cryptd_init_instance(skcipher_crypto_instance(inst), &alg->base);
428 if (err)
429 goto out_drop_skcipher;
430
431 inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC |
432 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
433
434 inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg);
435 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
436 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg);
437 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg);
438
439 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_skcipher_ctx);
440
441 inst->alg.init = cryptd_skcipher_init_tfm;
442 inst->alg.exit = cryptd_skcipher_exit_tfm;
443
444 inst->alg.setkey = cryptd_skcipher_setkey;
445 inst->alg.encrypt = cryptd_skcipher_encrypt_enqueue;
446 inst->alg.decrypt = cryptd_skcipher_decrypt_enqueue;
447
448 inst->free = cryptd_skcipher_free;
449
450 err = skcipher_register_instance(tmpl, inst);
451 if (err) {
452out_drop_skcipher:
453 crypto_drop_skcipher(&ctx->spawn);
454out_free_inst:
455 kfree(inst);
456 }
457 return err;
458}
459
b8a28251
LH
460static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
461{
462 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
46309d89
HX
463 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
464 struct crypto_shash_spawn *spawn = &ictx->spawn;
b8a28251 465 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
46309d89 466 struct crypto_shash *hash;
b8a28251 467
46309d89
HX
468 hash = crypto_spawn_shash(spawn);
469 if (IS_ERR(hash))
470 return PTR_ERR(hash);
b8a28251 471
46309d89 472 ctx->child = hash;
0d6669e2
HX
473 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
474 sizeof(struct cryptd_hash_request_ctx) +
475 crypto_shash_descsize(hash));
b8a28251
LH
476 return 0;
477}
478
479static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
480{
481 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
b8a28251 482
46309d89 483 crypto_free_shash(ctx->child);
b8a28251
LH
484}
485
486static int cryptd_hash_setkey(struct crypto_ahash *parent,
487 const u8 *key, unsigned int keylen)
488{
489 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
46309d89 490 struct crypto_shash *child = ctx->child;
b8a28251
LH
491 int err;
492
46309d89
HX
493 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
494 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
495 CRYPTO_TFM_REQ_MASK);
496 err = crypto_shash_setkey(child, key, keylen);
497 crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
498 CRYPTO_TFM_RES_MASK);
b8a28251
LH
499 return err;
500}
501
502static int cryptd_hash_enqueue(struct ahash_request *req,
3e3dc25f 503 crypto_completion_t compl)
b8a28251
LH
504{
505 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
506 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
254eff77
HY
507 struct cryptd_queue *queue =
508 cryptd_get_queue(crypto_ahash_tfm(tfm));
b8a28251
LH
509
510 rctx->complete = req->base.complete;
3e3dc25f 511 req->base.complete = compl;
b8a28251 512
254eff77 513 return cryptd_enqueue_request(queue, &req->base);
b8a28251
LH
514}
515
81760ea6
HX
516static void cryptd_hash_complete(struct ahash_request *req, int err)
517{
518 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
519 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
520 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
521 int refcnt = atomic_read(&ctx->refcnt);
522
523 local_bh_disable();
524 rctx->complete(&req->base, err);
525 local_bh_enable();
526
527 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
528 crypto_free_ahash(tfm);
529}
530
b8a28251
LH
531static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
532{
46309d89
HX
533 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
534 struct crypto_shash *child = ctx->child;
535 struct ahash_request *req = ahash_request_cast(req_async);
536 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
537 struct shash_desc *desc = &rctx->desc;
b8a28251
LH
538
539 if (unlikely(err == -EINPROGRESS))
540 goto out;
541
46309d89 542 desc->tfm = child;
b8a28251 543
46309d89 544 err = crypto_shash_init(desc);
b8a28251
LH
545
546 req->base.complete = rctx->complete;
547
548out:
81760ea6 549 cryptd_hash_complete(req, err);
b8a28251
LH
550}
551
552static int cryptd_hash_init_enqueue(struct ahash_request *req)
553{
554 return cryptd_hash_enqueue(req, cryptd_hash_init);
555}
556
557static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
558{
46309d89 559 struct ahash_request *req = ahash_request_cast(req_async);
b8a28251 560 struct cryptd_hash_request_ctx *rctx;
b8a28251
LH
561
562 rctx = ahash_request_ctx(req);
563
564 if (unlikely(err == -EINPROGRESS))
565 goto out;
566
46309d89 567 err = shash_ahash_update(req, &rctx->desc);
b8a28251
LH
568
569 req->base.complete = rctx->complete;
570
571out:
81760ea6 572 cryptd_hash_complete(req, err);
b8a28251
LH
573}
574
575static int cryptd_hash_update_enqueue(struct ahash_request *req)
576{
577 return cryptd_hash_enqueue(req, cryptd_hash_update);
578}
579
580static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
581{
46309d89
HX
582 struct ahash_request *req = ahash_request_cast(req_async);
583 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
b8a28251
LH
584
585 if (unlikely(err == -EINPROGRESS))
586 goto out;
587
46309d89 588 err = crypto_shash_final(&rctx->desc, req->result);
b8a28251
LH
589
590 req->base.complete = rctx->complete;
591
592out:
81760ea6 593 cryptd_hash_complete(req, err);
b8a28251
LH
594}
595
596static int cryptd_hash_final_enqueue(struct ahash_request *req)
597{
598 return cryptd_hash_enqueue(req, cryptd_hash_final);
599}
600
6fba00d1
HX
601static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
602{
603 struct ahash_request *req = ahash_request_cast(req_async);
604 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
605
606 if (unlikely(err == -EINPROGRESS))
607 goto out;
608
609 err = shash_ahash_finup(req, &rctx->desc);
610
611 req->base.complete = rctx->complete;
612
613out:
81760ea6 614 cryptd_hash_complete(req, err);
6fba00d1
HX
615}
616
617static int cryptd_hash_finup_enqueue(struct ahash_request *req)
618{
619 return cryptd_hash_enqueue(req, cryptd_hash_finup);
620}
621
b8a28251
LH
622static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
623{
46309d89
HX
624 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
625 struct crypto_shash *child = ctx->child;
626 struct ahash_request *req = ahash_request_cast(req_async);
627 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
628 struct shash_desc *desc = &rctx->desc;
b8a28251
LH
629
630 if (unlikely(err == -EINPROGRESS))
631 goto out;
632
46309d89 633 desc->tfm = child;
b8a28251 634
46309d89 635 err = shash_ahash_digest(req, desc);
b8a28251
LH
636
637 req->base.complete = rctx->complete;
638
639out:
81760ea6 640 cryptd_hash_complete(req, err);
b8a28251
LH
641}
642
643static int cryptd_hash_digest_enqueue(struct ahash_request *req)
644{
645 return cryptd_hash_enqueue(req, cryptd_hash_digest);
646}
647
6fba00d1
HX
648static int cryptd_hash_export(struct ahash_request *req, void *out)
649{
650 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
651
652 return crypto_shash_export(&rctx->desc, out);
653}
654
655static int cryptd_hash_import(struct ahash_request *req, const void *in)
656{
0bd22235
AB
657 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
658 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
659 struct shash_desc *desc = cryptd_shash_desc(req);
660
661 desc->tfm = ctx->child;
6fba00d1 662
0bd22235 663 return crypto_shash_import(desc, in);
6fba00d1
HX
664}
665
9cd899a3
HX
666static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
667 struct cryptd_queue *queue)
b8a28251 668{
46309d89 669 struct hashd_instance_ctx *ctx;
0b535adf 670 struct ahash_instance *inst;
46309d89 671 struct shash_alg *salg;
b8a28251 672 struct crypto_alg *alg;
466a7b9e
SM
673 u32 type = 0;
674 u32 mask = 0;
46309d89 675 int err;
b8a28251 676
466a7b9e
SM
677 cryptd_check_internal(tb, &type, &mask);
678
679 salg = shash_attr_alg(tb[1], type, mask);
46309d89 680 if (IS_ERR(salg))
9cd899a3 681 return PTR_ERR(salg);
b8a28251 682
46309d89 683 alg = &salg->base;
0b535adf
HX
684 inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
685 sizeof(*ctx));
05ed8758 686 err = PTR_ERR(inst);
b8a28251
LH
687 if (IS_ERR(inst))
688 goto out_put_alg;
689
0b535adf 690 ctx = ahash_instance_ctx(inst);
46309d89
HX
691 ctx->queue = queue;
692
0b535adf
HX
693 err = crypto_init_shash_spawn(&ctx->spawn, salg,
694 ahash_crypto_instance(inst));
46309d89
HX
695 if (err)
696 goto out_free_inst;
697
a208fa8f
EB
698 inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC |
699 (alg->cra_flags & (CRYPTO_ALG_INTERNAL |
700 CRYPTO_ALG_OPTIONAL_KEY));
b8a28251 701
0b535adf 702 inst->alg.halg.digestsize = salg->digestsize;
1a078340 703 inst->alg.halg.statesize = salg->statesize;
0b535adf 704 inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
b8a28251 705
0b535adf
HX
706 inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
707 inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
b8a28251 708
0b535adf
HX
709 inst->alg.init = cryptd_hash_init_enqueue;
710 inst->alg.update = cryptd_hash_update_enqueue;
711 inst->alg.final = cryptd_hash_final_enqueue;
6fba00d1
HX
712 inst->alg.finup = cryptd_hash_finup_enqueue;
713 inst->alg.export = cryptd_hash_export;
714 inst->alg.import = cryptd_hash_import;
841a3ff3
EB
715 if (crypto_shash_alg_has_setkey(salg))
716 inst->alg.setkey = cryptd_hash_setkey;
0b535adf 717 inst->alg.digest = cryptd_hash_digest_enqueue;
b8a28251 718
0b535adf 719 err = ahash_register_instance(tmpl, inst);
9cd899a3
HX
720 if (err) {
721 crypto_drop_shash(&ctx->spawn);
722out_free_inst:
723 kfree(inst);
724 }
725
b8a28251
LH
726out_put_alg:
727 crypto_mod_put(alg);
9cd899a3 728 return err;
b8a28251
LH
729}
730
92b9876b
HX
731static int cryptd_aead_setkey(struct crypto_aead *parent,
732 const u8 *key, unsigned int keylen)
733{
734 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
735 struct crypto_aead *child = ctx->child;
736
737 return crypto_aead_setkey(child, key, keylen);
738}
739
740static int cryptd_aead_setauthsize(struct crypto_aead *parent,
741 unsigned int authsize)
742{
743 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
744 struct crypto_aead *child = ctx->child;
745
746 return crypto_aead_setauthsize(child, authsize);
747}
748
298c926c
AH
749static void cryptd_aead_crypt(struct aead_request *req,
750 struct crypto_aead *child,
751 int err,
752 int (*crypt)(struct aead_request *req))
753{
754 struct cryptd_aead_request_ctx *rctx;
81760ea6 755 struct cryptd_aead_ctx *ctx;
ec9f2006 756 crypto_completion_t compl;
81760ea6
HX
757 struct crypto_aead *tfm;
758 int refcnt;
ec9f2006 759
298c926c 760 rctx = aead_request_ctx(req);
ec9f2006 761 compl = rctx->complete;
298c926c 762
31bd44e7
HX
763 tfm = crypto_aead_reqtfm(req);
764
298c926c
AH
765 if (unlikely(err == -EINPROGRESS))
766 goto out;
767 aead_request_set_tfm(req, child);
768 err = crypt( req );
81760ea6 769
298c926c 770out:
81760ea6
HX
771 ctx = crypto_aead_ctx(tfm);
772 refcnt = atomic_read(&ctx->refcnt);
773
298c926c 774 local_bh_disable();
ec9f2006 775 compl(&req->base, err);
298c926c 776 local_bh_enable();
81760ea6
HX
777
778 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
779 crypto_free_aead(tfm);
298c926c
AH
780}
781
782static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
783{
784 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
785 struct crypto_aead *child = ctx->child;
786 struct aead_request *req;
787
788 req = container_of(areq, struct aead_request, base);
ba3749a7 789 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt);
298c926c
AH
790}
791
792static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
793{
794 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
795 struct crypto_aead *child = ctx->child;
796 struct aead_request *req;
797
798 req = container_of(areq, struct aead_request, base);
ba3749a7 799 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt);
298c926c
AH
800}
801
802static int cryptd_aead_enqueue(struct aead_request *req,
3e3dc25f 803 crypto_completion_t compl)
298c926c
AH
804{
805 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
806 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
807 struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
808
809 rctx->complete = req->base.complete;
3e3dc25f 810 req->base.complete = compl;
298c926c
AH
811 return cryptd_enqueue_request(queue, &req->base);
812}
813
814static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
815{
816 return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
817}
818
819static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
820{
821 return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
822}
823
f614e546 824static int cryptd_aead_init_tfm(struct crypto_aead *tfm)
298c926c 825{
f614e546
HX
826 struct aead_instance *inst = aead_alg_instance(tfm);
827 struct aead_instance_ctx *ictx = aead_instance_ctx(inst);
298c926c 828 struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
f614e546 829 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
298c926c
AH
830 struct crypto_aead *cipher;
831
832 cipher = crypto_spawn_aead(spawn);
833 if (IS_ERR(cipher))
834 return PTR_ERR(cipher);
835
298c926c 836 ctx->child = cipher;
ec9f2006
HX
837 crypto_aead_set_reqsize(
838 tfm, max((unsigned)sizeof(struct cryptd_aead_request_ctx),
839 crypto_aead_reqsize(cipher)));
298c926c
AH
840 return 0;
841}
842
f614e546 843static void cryptd_aead_exit_tfm(struct crypto_aead *tfm)
298c926c 844{
f614e546 845 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
298c926c
AH
846 crypto_free_aead(ctx->child);
847}
848
849static int cryptd_create_aead(struct crypto_template *tmpl,
850 struct rtattr **tb,
851 struct cryptd_queue *queue)
852{
853 struct aead_instance_ctx *ctx;
f614e546
HX
854 struct aead_instance *inst;
855 struct aead_alg *alg;
9b8c456e
HX
856 const char *name;
857 u32 type = 0;
ec9f2006 858 u32 mask = CRYPTO_ALG_ASYNC;
298c926c
AH
859 int err;
860
466a7b9e
SM
861 cryptd_check_internal(tb, &type, &mask);
862
9b8c456e
HX
863 name = crypto_attr_alg_name(tb[1]);
864 if (IS_ERR(name))
865 return PTR_ERR(name);
298c926c 866
9b8c456e
HX
867 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
868 if (!inst)
869 return -ENOMEM;
298c926c 870
f614e546 871 ctx = aead_instance_ctx(inst);
298c926c
AH
872 ctx->queue = queue;
873
f614e546 874 crypto_set_aead_spawn(&ctx->aead_spawn, aead_crypto_instance(inst));
9b8c456e 875 err = crypto_grab_aead(&ctx->aead_spawn, name, type, mask);
298c926c
AH
876 if (err)
877 goto out_free_inst;
878
f614e546
HX
879 alg = crypto_spawn_aead_alg(&ctx->aead_spawn);
880 err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base);
9b8c456e
HX
881 if (err)
882 goto out_drop_aead;
883
f614e546 884 inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC |
5e4b8c1f 885 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
f614e546 886 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
298c926c 887
f614e546
HX
888 inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
889 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
890
891 inst->alg.init = cryptd_aead_init_tfm;
892 inst->alg.exit = cryptd_aead_exit_tfm;
893 inst->alg.setkey = cryptd_aead_setkey;
894 inst->alg.setauthsize = cryptd_aead_setauthsize;
895 inst->alg.encrypt = cryptd_aead_encrypt_enqueue;
896 inst->alg.decrypt = cryptd_aead_decrypt_enqueue;
897
898 err = aead_register_instance(tmpl, inst);
298c926c 899 if (err) {
9b8c456e
HX
900out_drop_aead:
901 crypto_drop_aead(&ctx->aead_spawn);
298c926c
AH
902out_free_inst:
903 kfree(inst);
904 }
298c926c
AH
905 return err;
906}
907
254eff77 908static struct cryptd_queue queue;
124b53d0 909
9cd899a3 910static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
124b53d0
HX
911{
912 struct crypto_attr_type *algt;
913
914 algt = crypto_get_attr_type(tb);
915 if (IS_ERR(algt))
9cd899a3 916 return PTR_ERR(algt);
124b53d0
HX
917
918 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
919 case CRYPTO_ALG_TYPE_BLKCIPHER:
4e0958d1 920 return cryptd_create_skcipher(tmpl, tb, &queue);
b8a28251 921 case CRYPTO_ALG_TYPE_DIGEST:
9cd899a3 922 return cryptd_create_hash(tmpl, tb, &queue);
298c926c
AH
923 case CRYPTO_ALG_TYPE_AEAD:
924 return cryptd_create_aead(tmpl, tb, &queue);
124b53d0
HX
925 }
926
9cd899a3 927 return -EINVAL;
124b53d0
HX
928}
929
930static void cryptd_free(struct crypto_instance *inst)
931{
932 struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
0b535adf 933 struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
298c926c 934 struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst);
0b535adf
HX
935
936 switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
937 case CRYPTO_ALG_TYPE_AHASH:
938 crypto_drop_shash(&hctx->spawn);
939 kfree(ahash_instance(inst));
940 return;
298c926c 941 case CRYPTO_ALG_TYPE_AEAD:
f614e546
HX
942 crypto_drop_aead(&aead_ctx->aead_spawn);
943 kfree(aead_instance(inst));
298c926c
AH
944 return;
945 default:
946 crypto_drop_spawn(&ctx->spawn);
947 kfree(inst);
0b535adf 948 }
124b53d0
HX
949}
950
951static struct crypto_template cryptd_tmpl = {
952 .name = "cryptd",
9cd899a3 953 .create = cryptd_create,
124b53d0
HX
954 .free = cryptd_free,
955 .module = THIS_MODULE,
956};
957
4e0958d1
HX
958struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
959 u32 type, u32 mask)
960{
961 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
962 struct cryptd_skcipher_ctx *ctx;
963 struct crypto_skcipher *tfm;
964
965 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
966 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
967 return ERR_PTR(-EINVAL);
968
969 tfm = crypto_alloc_skcipher(cryptd_alg_name, type, mask);
970 if (IS_ERR(tfm))
971 return ERR_CAST(tfm);
972
973 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
974 crypto_free_skcipher(tfm);
975 return ERR_PTR(-EINVAL);
976 }
977
978 ctx = crypto_skcipher_ctx(tfm);
979 atomic_set(&ctx->refcnt, 1);
980
981 return container_of(tfm, struct cryptd_skcipher, base);
982}
983EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher);
984
985struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm)
986{
987 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
988
36b3875a 989 return &ctx->child->base;
4e0958d1
HX
990}
991EXPORT_SYMBOL_GPL(cryptd_skcipher_child);
992
993bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm)
994{
995 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
996
997 return atomic_read(&ctx->refcnt) - 1;
998}
999EXPORT_SYMBOL_GPL(cryptd_skcipher_queued);
1000
1001void cryptd_free_skcipher(struct cryptd_skcipher *tfm)
1002{
1003 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
1004
1005 if (atomic_dec_and_test(&ctx->refcnt))
1006 crypto_free_skcipher(&tfm->base);
1007}
1008EXPORT_SYMBOL_GPL(cryptd_free_skcipher);
1009
ace13663
HY
1010struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
1011 u32 type, u32 mask)
1012{
1013 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
81760ea6 1014 struct cryptd_hash_ctx *ctx;
ace13663
HY
1015 struct crypto_ahash *tfm;
1016
1017 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1018 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1019 return ERR_PTR(-EINVAL);
1020 tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
1021 if (IS_ERR(tfm))
1022 return ERR_CAST(tfm);
1023 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1024 crypto_free_ahash(tfm);
1025 return ERR_PTR(-EINVAL);
1026 }
1027
81760ea6
HX
1028 ctx = crypto_ahash_ctx(tfm);
1029 atomic_set(&ctx->refcnt, 1);
1030
ace13663
HY
1031 return __cryptd_ahash_cast(tfm);
1032}
1033EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
1034
1035struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
1036{
1037 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1038
1039 return ctx->child;
1040}
1041EXPORT_SYMBOL_GPL(cryptd_ahash_child);
1042
0e1227d3
HY
1043struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
1044{
1045 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
1046 return &rctx->desc;
1047}
1048EXPORT_SYMBOL_GPL(cryptd_shash_desc);
1049
81760ea6
HX
1050bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
1051{
1052 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1053
1054 return atomic_read(&ctx->refcnt) - 1;
1055}
1056EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
1057
ace13663
HY
1058void cryptd_free_ahash(struct cryptd_ahash *tfm)
1059{
81760ea6
HX
1060 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1061
1062 if (atomic_dec_and_test(&ctx->refcnt))
1063 crypto_free_ahash(&tfm->base);
ace13663
HY
1064}
1065EXPORT_SYMBOL_GPL(cryptd_free_ahash);
1066
298c926c
AH
1067struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
1068 u32 type, u32 mask)
1069{
1070 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
81760ea6 1071 struct cryptd_aead_ctx *ctx;
298c926c
AH
1072 struct crypto_aead *tfm;
1073
1074 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1075 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1076 return ERR_PTR(-EINVAL);
1077 tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
1078 if (IS_ERR(tfm))
1079 return ERR_CAST(tfm);
1080 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1081 crypto_free_aead(tfm);
1082 return ERR_PTR(-EINVAL);
1083 }
81760ea6
HX
1084
1085 ctx = crypto_aead_ctx(tfm);
1086 atomic_set(&ctx->refcnt, 1);
1087
298c926c
AH
1088 return __cryptd_aead_cast(tfm);
1089}
1090EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
1091
1092struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
1093{
1094 struct cryptd_aead_ctx *ctx;
1095 ctx = crypto_aead_ctx(&tfm->base);
1096 return ctx->child;
1097}
1098EXPORT_SYMBOL_GPL(cryptd_aead_child);
1099
81760ea6
HX
1100bool cryptd_aead_queued(struct cryptd_aead *tfm)
1101{
1102 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1103
1104 return atomic_read(&ctx->refcnt) - 1;
1105}
1106EXPORT_SYMBOL_GPL(cryptd_aead_queued);
1107
298c926c
AH
1108void cryptd_free_aead(struct cryptd_aead *tfm)
1109{
81760ea6
HX
1110 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1111
1112 if (atomic_dec_and_test(&ctx->refcnt))
1113 crypto_free_aead(&tfm->base);
298c926c
AH
1114}
1115EXPORT_SYMBOL_GPL(cryptd_free_aead);
1116
124b53d0
HX
1117static int __init cryptd_init(void)
1118{
1119 int err;
1120
c3a53605 1121 err = cryptd_init_queue(&queue, cryptd_max_cpu_qlen);
124b53d0
HX
1122 if (err)
1123 return err;
1124
1125 err = crypto_register_template(&cryptd_tmpl);
1126 if (err)
254eff77 1127 cryptd_fini_queue(&queue);
124b53d0
HX
1128
1129 return err;
1130}
1131
1132static void __exit cryptd_exit(void)
1133{
254eff77 1134 cryptd_fini_queue(&queue);
124b53d0
HX
1135 crypto_unregister_template(&cryptd_tmpl);
1136}
1137
b2bac6ac 1138subsys_initcall(cryptd_init);
124b53d0
HX
1139module_exit(cryptd_exit);
1140
1141MODULE_LICENSE("GPL");
1142MODULE_DESCRIPTION("Software async crypto daemon");
4943ba16 1143MODULE_ALIAS_CRYPTO("cryptd");