]> git.ipfire.org Git - thirdparty/linux.git/blob - crypto/api.c
Merge tag 'iio-fixes-for-5.7b' of https://git.kernel.org/pub/scm/linux/kernel/git...
[thirdparty/linux.git] / crypto / api.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Scatterlist Cryptographic API.
4 *
5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10 * and Nettle, by Niels Möller.
11 */
12
13 #include <linux/err.h>
14 #include <linux/errno.h>
15 #include <linux/kernel.h>
16 #include <linux/kmod.h>
17 #include <linux/module.h>
18 #include <linux/param.h>
19 #include <linux/sched/signal.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <linux/completion.h>
23 #include "internal.h"
24
25 LIST_HEAD(crypto_alg_list);
26 EXPORT_SYMBOL_GPL(crypto_alg_list);
27 DECLARE_RWSEM(crypto_alg_sem);
28 EXPORT_SYMBOL_GPL(crypto_alg_sem);
29
30 BLOCKING_NOTIFIER_HEAD(crypto_chain);
31 EXPORT_SYMBOL_GPL(crypto_chain);
32
33 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
34
35 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
36 {
37 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
38 }
39 EXPORT_SYMBOL_GPL(crypto_mod_get);
40
41 void crypto_mod_put(struct crypto_alg *alg)
42 {
43 struct module *module = alg->cra_module;
44
45 crypto_alg_put(alg);
46 module_put(module);
47 }
48 EXPORT_SYMBOL_GPL(crypto_mod_put);
49
50 static inline int crypto_is_test_larval(struct crypto_larval *larval)
51 {
52 return larval->alg.cra_driver_name[0];
53 }
54
55 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
56 u32 mask)
57 {
58 struct crypto_alg *q, *alg = NULL;
59 int best = -2;
60
61 list_for_each_entry(q, &crypto_alg_list, cra_list) {
62 int exact, fuzzy;
63
64 if (crypto_is_moribund(q))
65 continue;
66
67 if ((q->cra_flags ^ type) & mask)
68 continue;
69
70 if (crypto_is_larval(q) &&
71 !crypto_is_test_larval((struct crypto_larval *)q) &&
72 ((struct crypto_larval *)q)->mask != mask)
73 continue;
74
75 exact = !strcmp(q->cra_driver_name, name);
76 fuzzy = !strcmp(q->cra_name, name);
77 if (!exact && !(fuzzy && q->cra_priority > best))
78 continue;
79
80 if (unlikely(!crypto_mod_get(q)))
81 continue;
82
83 best = q->cra_priority;
84 if (alg)
85 crypto_mod_put(alg);
86 alg = q;
87
88 if (exact)
89 break;
90 }
91
92 return alg;
93 }
94
95 static void crypto_larval_destroy(struct crypto_alg *alg)
96 {
97 struct crypto_larval *larval = (void *)alg;
98
99 BUG_ON(!crypto_is_larval(alg));
100 if (!IS_ERR_OR_NULL(larval->adult))
101 crypto_mod_put(larval->adult);
102 kfree(larval);
103 }
104
105 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
106 {
107 struct crypto_larval *larval;
108
109 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
110 if (!larval)
111 return ERR_PTR(-ENOMEM);
112
113 larval->mask = mask;
114 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
115 larval->alg.cra_priority = -1;
116 larval->alg.cra_destroy = crypto_larval_destroy;
117
118 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
119 init_completion(&larval->completion);
120
121 return larval;
122 }
123 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
124
125 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
126 u32 mask)
127 {
128 struct crypto_alg *alg;
129 struct crypto_larval *larval;
130
131 larval = crypto_larval_alloc(name, type, mask);
132 if (IS_ERR(larval))
133 return ERR_CAST(larval);
134
135 refcount_set(&larval->alg.cra_refcnt, 2);
136
137 down_write(&crypto_alg_sem);
138 alg = __crypto_alg_lookup(name, type, mask);
139 if (!alg) {
140 alg = &larval->alg;
141 list_add(&alg->cra_list, &crypto_alg_list);
142 }
143 up_write(&crypto_alg_sem);
144
145 if (alg != &larval->alg) {
146 kfree(larval);
147 if (crypto_is_larval(alg))
148 alg = crypto_larval_wait(alg);
149 }
150
151 return alg;
152 }
153
154 void crypto_larval_kill(struct crypto_alg *alg)
155 {
156 struct crypto_larval *larval = (void *)alg;
157
158 down_write(&crypto_alg_sem);
159 list_del(&alg->cra_list);
160 up_write(&crypto_alg_sem);
161 complete_all(&larval->completion);
162 crypto_alg_put(alg);
163 }
164 EXPORT_SYMBOL_GPL(crypto_larval_kill);
165
166 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
167 {
168 struct crypto_larval *larval = (void *)alg;
169 long timeout;
170
171 timeout = wait_for_completion_killable_timeout(
172 &larval->completion, 60 * HZ);
173
174 alg = larval->adult;
175 if (timeout < 0)
176 alg = ERR_PTR(-EINTR);
177 else if (!timeout)
178 alg = ERR_PTR(-ETIMEDOUT);
179 else if (!alg)
180 alg = ERR_PTR(-ENOENT);
181 else if (IS_ERR(alg))
182 ;
183 else if (crypto_is_test_larval(larval) &&
184 !(alg->cra_flags & CRYPTO_ALG_TESTED))
185 alg = ERR_PTR(-EAGAIN);
186 else if (!crypto_mod_get(alg))
187 alg = ERR_PTR(-EAGAIN);
188 crypto_mod_put(&larval->alg);
189
190 return alg;
191 }
192
193 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
194 u32 mask)
195 {
196 struct crypto_alg *alg;
197 u32 test = 0;
198
199 if (!((type | mask) & CRYPTO_ALG_TESTED))
200 test |= CRYPTO_ALG_TESTED;
201
202 down_read(&crypto_alg_sem);
203 alg = __crypto_alg_lookup(name, type | test, mask | test);
204 if (!alg && test) {
205 alg = __crypto_alg_lookup(name, type, mask);
206 if (alg && !crypto_is_larval(alg)) {
207 /* Test failed */
208 crypto_mod_put(alg);
209 alg = ERR_PTR(-ELIBBAD);
210 }
211 }
212 up_read(&crypto_alg_sem);
213
214 return alg;
215 }
216
217 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
218 u32 mask)
219 {
220 struct crypto_alg *alg;
221
222 if (!name)
223 return ERR_PTR(-ENOENT);
224
225 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
226 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
227
228 alg = crypto_alg_lookup(name, type, mask);
229 if (!alg && !(mask & CRYPTO_NOLOAD)) {
230 request_module("crypto-%s", name);
231
232 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
233 CRYPTO_ALG_NEED_FALLBACK))
234 request_module("crypto-%s-all", name);
235
236 alg = crypto_alg_lookup(name, type, mask);
237 }
238
239 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
240 alg = crypto_larval_wait(alg);
241 else if (!alg)
242 alg = crypto_larval_add(name, type, mask);
243
244 return alg;
245 }
246
247 int crypto_probing_notify(unsigned long val, void *v)
248 {
249 int ok;
250
251 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
252 if (ok == NOTIFY_DONE) {
253 request_module("cryptomgr");
254 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
255 }
256
257 return ok;
258 }
259 EXPORT_SYMBOL_GPL(crypto_probing_notify);
260
261 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
262 {
263 struct crypto_alg *alg;
264 struct crypto_alg *larval;
265 int ok;
266
267 /*
268 * If the internal flag is set for a cipher, require a caller to
269 * to invoke the cipher with the internal flag to use that cipher.
270 * Also, if a caller wants to allocate a cipher that may or may
271 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
272 * !(mask & CRYPTO_ALG_INTERNAL).
273 */
274 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
275 mask |= CRYPTO_ALG_INTERNAL;
276
277 larval = crypto_larval_lookup(name, type, mask);
278 if (IS_ERR(larval) || !crypto_is_larval(larval))
279 return larval;
280
281 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
282
283 if (ok == NOTIFY_STOP)
284 alg = crypto_larval_wait(larval);
285 else {
286 crypto_mod_put(larval);
287 alg = ERR_PTR(-ENOENT);
288 }
289 crypto_larval_kill(larval);
290 return alg;
291 }
292 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
293
294 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
295 {
296 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
297
298 if (type_obj)
299 return type_obj->init(tfm, type, mask);
300 return 0;
301 }
302
303 static void crypto_exit_ops(struct crypto_tfm *tfm)
304 {
305 const struct crypto_type *type = tfm->__crt_alg->cra_type;
306
307 if (type && tfm->exit)
308 tfm->exit(tfm);
309 }
310
311 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
312 {
313 const struct crypto_type *type_obj = alg->cra_type;
314 unsigned int len;
315
316 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
317 if (type_obj)
318 return len + type_obj->ctxsize(alg, type, mask);
319
320 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
321 default:
322 BUG();
323
324 case CRYPTO_ALG_TYPE_CIPHER:
325 len += crypto_cipher_ctxsize(alg);
326 break;
327
328 case CRYPTO_ALG_TYPE_COMPRESS:
329 len += crypto_compress_ctxsize(alg);
330 break;
331 }
332
333 return len;
334 }
335
336 static void crypto_shoot_alg(struct crypto_alg *alg)
337 {
338 down_write(&crypto_alg_sem);
339 alg->cra_flags |= CRYPTO_ALG_DYING;
340 up_write(&crypto_alg_sem);
341 }
342
343 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
344 u32 mask)
345 {
346 struct crypto_tfm *tfm = NULL;
347 unsigned int tfm_size;
348 int err = -ENOMEM;
349
350 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
351 tfm = kzalloc(tfm_size, GFP_KERNEL);
352 if (tfm == NULL)
353 goto out_err;
354
355 tfm->__crt_alg = alg;
356
357 err = crypto_init_ops(tfm, type, mask);
358 if (err)
359 goto out_free_tfm;
360
361 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
362 goto cra_init_failed;
363
364 goto out;
365
366 cra_init_failed:
367 crypto_exit_ops(tfm);
368 out_free_tfm:
369 if (err == -EAGAIN)
370 crypto_shoot_alg(alg);
371 kfree(tfm);
372 out_err:
373 tfm = ERR_PTR(err);
374 out:
375 return tfm;
376 }
377 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
378
379 /*
380 * crypto_alloc_base - Locate algorithm and allocate transform
381 * @alg_name: Name of algorithm
382 * @type: Type of algorithm
383 * @mask: Mask for type comparison
384 *
385 * This function should not be used by new algorithm types.
386 * Please use crypto_alloc_tfm instead.
387 *
388 * crypto_alloc_base() will first attempt to locate an already loaded
389 * algorithm. If that fails and the kernel supports dynamically loadable
390 * modules, it will then attempt to load a module of the same name or
391 * alias. If that fails it will send a query to any loaded crypto manager
392 * to construct an algorithm on the fly. A refcount is grabbed on the
393 * algorithm which is then associated with the new transform.
394 *
395 * The returned transform is of a non-determinate type. Most people
396 * should use one of the more specific allocation functions such as
397 * crypto_alloc_skcipher().
398 *
399 * In case of error the return value is an error pointer.
400 */
401 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
402 {
403 struct crypto_tfm *tfm;
404 int err;
405
406 for (;;) {
407 struct crypto_alg *alg;
408
409 alg = crypto_alg_mod_lookup(alg_name, type, mask);
410 if (IS_ERR(alg)) {
411 err = PTR_ERR(alg);
412 goto err;
413 }
414
415 tfm = __crypto_alloc_tfm(alg, type, mask);
416 if (!IS_ERR(tfm))
417 return tfm;
418
419 crypto_mod_put(alg);
420 err = PTR_ERR(tfm);
421
422 err:
423 if (err != -EAGAIN)
424 break;
425 if (fatal_signal_pending(current)) {
426 err = -EINTR;
427 break;
428 }
429 }
430
431 return ERR_PTR(err);
432 }
433 EXPORT_SYMBOL_GPL(crypto_alloc_base);
434
435 void *crypto_create_tfm(struct crypto_alg *alg,
436 const struct crypto_type *frontend)
437 {
438 char *mem;
439 struct crypto_tfm *tfm = NULL;
440 unsigned int tfmsize;
441 unsigned int total;
442 int err = -ENOMEM;
443
444 tfmsize = frontend->tfmsize;
445 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
446
447 mem = kzalloc(total, GFP_KERNEL);
448 if (mem == NULL)
449 goto out_err;
450
451 tfm = (struct crypto_tfm *)(mem + tfmsize);
452 tfm->__crt_alg = alg;
453
454 err = frontend->init_tfm(tfm);
455 if (err)
456 goto out_free_tfm;
457
458 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
459 goto cra_init_failed;
460
461 goto out;
462
463 cra_init_failed:
464 crypto_exit_ops(tfm);
465 out_free_tfm:
466 if (err == -EAGAIN)
467 crypto_shoot_alg(alg);
468 kfree(mem);
469 out_err:
470 mem = ERR_PTR(err);
471 out:
472 return mem;
473 }
474 EXPORT_SYMBOL_GPL(crypto_create_tfm);
475
476 struct crypto_alg *crypto_find_alg(const char *alg_name,
477 const struct crypto_type *frontend,
478 u32 type, u32 mask)
479 {
480 if (frontend) {
481 type &= frontend->maskclear;
482 mask &= frontend->maskclear;
483 type |= frontend->type;
484 mask |= frontend->maskset;
485 }
486
487 return crypto_alg_mod_lookup(alg_name, type, mask);
488 }
489 EXPORT_SYMBOL_GPL(crypto_find_alg);
490
491 /*
492 * crypto_alloc_tfm - Locate algorithm and allocate transform
493 * @alg_name: Name of algorithm
494 * @frontend: Frontend algorithm type
495 * @type: Type of algorithm
496 * @mask: Mask for type comparison
497 *
498 * crypto_alloc_tfm() will first attempt to locate an already loaded
499 * algorithm. If that fails and the kernel supports dynamically loadable
500 * modules, it will then attempt to load a module of the same name or
501 * alias. If that fails it will send a query to any loaded crypto manager
502 * to construct an algorithm on the fly. A refcount is grabbed on the
503 * algorithm which is then associated with the new transform.
504 *
505 * The returned transform is of a non-determinate type. Most people
506 * should use one of the more specific allocation functions such as
507 * crypto_alloc_skcipher().
508 *
509 * In case of error the return value is an error pointer.
510 */
511 void *crypto_alloc_tfm(const char *alg_name,
512 const struct crypto_type *frontend, u32 type, u32 mask)
513 {
514 void *tfm;
515 int err;
516
517 for (;;) {
518 struct crypto_alg *alg;
519
520 alg = crypto_find_alg(alg_name, frontend, type, mask);
521 if (IS_ERR(alg)) {
522 err = PTR_ERR(alg);
523 goto err;
524 }
525
526 tfm = crypto_create_tfm(alg, frontend);
527 if (!IS_ERR(tfm))
528 return tfm;
529
530 crypto_mod_put(alg);
531 err = PTR_ERR(tfm);
532
533 err:
534 if (err != -EAGAIN)
535 break;
536 if (fatal_signal_pending(current)) {
537 err = -EINTR;
538 break;
539 }
540 }
541
542 return ERR_PTR(err);
543 }
544 EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
545
546 /*
547 * crypto_destroy_tfm - Free crypto transform
548 * @mem: Start of tfm slab
549 * @tfm: Transform to free
550 *
551 * This function frees up the transform and any associated resources,
552 * then drops the refcount on the associated algorithm.
553 */
554 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
555 {
556 struct crypto_alg *alg;
557
558 if (unlikely(!mem))
559 return;
560
561 alg = tfm->__crt_alg;
562
563 if (!tfm->exit && alg->cra_exit)
564 alg->cra_exit(tfm);
565 crypto_exit_ops(tfm);
566 crypto_mod_put(alg);
567 kzfree(mem);
568 }
569 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
570
571 int crypto_has_alg(const char *name, u32 type, u32 mask)
572 {
573 int ret = 0;
574 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
575
576 if (!IS_ERR(alg)) {
577 crypto_mod_put(alg);
578 ret = 1;
579 }
580
581 return ret;
582 }
583 EXPORT_SYMBOL_GPL(crypto_has_alg);
584
585 void crypto_req_done(struct crypto_async_request *req, int err)
586 {
587 struct crypto_wait *wait = req->data;
588
589 if (err == -EINPROGRESS)
590 return;
591
592 wait->err = err;
593 complete(&wait->completion);
594 }
595 EXPORT_SYMBOL_GPL(crypto_req_done);
596
597 MODULE_DESCRIPTION("Cryptographic core API");
598 MODULE_LICENSE("GPL");
599 MODULE_SOFTDEP("pre: cryptomgr");