]> git.ipfire.org Git - thirdparty/kernel/linux.git/blob - crypto/shash.c
crypto: shash - remove support for nonzero alignmask
[thirdparty/kernel/linux.git] / crypto / shash.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Synchronous Cryptographic Hash operations.
4 *
5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8 #include <crypto/scatterwalk.h>
9 #include <linux/cryptouser.h>
10 #include <linux/err.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
13 #include <linux/seq_file.h>
14 #include <linux/string.h>
15 #include <net/netlink.h>
16
17 #include "hash.h"
18
19 static const struct crypto_type crypto_shash_type;
20
21 static inline struct crypto_istat_hash *shash_get_stat(struct shash_alg *alg)
22 {
23 return hash_get_stat(&alg->halg);
24 }
25
26 static inline int crypto_shash_errstat(struct shash_alg *alg, int err)
27 {
28 return crypto_hash_errstat(&alg->halg, err);
29 }
30
31 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
32 unsigned int keylen)
33 {
34 return -ENOSYS;
35 }
36 EXPORT_SYMBOL_GPL(shash_no_setkey);
37
38 static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
39 {
40 if (crypto_shash_alg_needs_key(alg))
41 crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
42 }
43
44 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
45 unsigned int keylen)
46 {
47 struct shash_alg *shash = crypto_shash_alg(tfm);
48 int err;
49
50 err = shash->setkey(tfm, key, keylen);
51 if (unlikely(err)) {
52 shash_set_needkey(tfm, shash);
53 return err;
54 }
55
56 crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
57 return 0;
58 }
59 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
60
61 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
62 unsigned int len)
63 {
64 struct shash_alg *shash = crypto_shash_alg(desc->tfm);
65 int err;
66
67 if (IS_ENABLED(CONFIG_CRYPTO_STATS))
68 atomic64_add(len, &shash_get_stat(shash)->hash_tlen);
69
70 err = shash->update(desc, data, len);
71
72 return crypto_shash_errstat(shash, err);
73 }
74 EXPORT_SYMBOL_GPL(crypto_shash_update);
75
76 int crypto_shash_final(struct shash_desc *desc, u8 *out)
77 {
78 struct shash_alg *shash = crypto_shash_alg(desc->tfm);
79 int err;
80
81 if (IS_ENABLED(CONFIG_CRYPTO_STATS))
82 atomic64_inc(&shash_get_stat(shash)->hash_cnt);
83
84 err = shash->final(desc, out);
85
86 return crypto_shash_errstat(shash, err);
87 }
88 EXPORT_SYMBOL_GPL(crypto_shash_final);
89
90 static int shash_default_finup(struct shash_desc *desc, const u8 *data,
91 unsigned int len, u8 *out)
92 {
93 struct shash_alg *shash = crypto_shash_alg(desc->tfm);
94
95 return shash->update(desc, data, len) ?:
96 shash->final(desc, out);
97 }
98
99 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
100 unsigned int len, u8 *out)
101 {
102 struct crypto_shash *tfm = desc->tfm;
103 struct shash_alg *shash = crypto_shash_alg(tfm);
104 int err;
105
106 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
107 struct crypto_istat_hash *istat = shash_get_stat(shash);
108
109 atomic64_inc(&istat->hash_cnt);
110 atomic64_add(len, &istat->hash_tlen);
111 }
112
113 err = shash->finup(desc, data, len, out);
114
115 return crypto_shash_errstat(shash, err);
116 }
117 EXPORT_SYMBOL_GPL(crypto_shash_finup);
118
119 static int shash_default_digest(struct shash_desc *desc, const u8 *data,
120 unsigned int len, u8 *out)
121 {
122 struct shash_alg *shash = crypto_shash_alg(desc->tfm);
123
124 return shash->init(desc) ?:
125 shash->finup(desc, data, len, out);
126 }
127
128 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
129 unsigned int len, u8 *out)
130 {
131 struct crypto_shash *tfm = desc->tfm;
132 struct shash_alg *shash = crypto_shash_alg(tfm);
133 int err;
134
135 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
136 struct crypto_istat_hash *istat = shash_get_stat(shash);
137
138 atomic64_inc(&istat->hash_cnt);
139 atomic64_add(len, &istat->hash_tlen);
140 }
141
142 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
143 err = -ENOKEY;
144 else
145 err = shash->digest(desc, data, len, out);
146
147 return crypto_shash_errstat(shash, err);
148 }
149 EXPORT_SYMBOL_GPL(crypto_shash_digest);
150
151 int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
152 unsigned int len, u8 *out)
153 {
154 SHASH_DESC_ON_STACK(desc, tfm);
155 int err;
156
157 desc->tfm = tfm;
158
159 err = crypto_shash_digest(desc, data, len, out);
160
161 shash_desc_zero(desc);
162
163 return err;
164 }
165 EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
166
167 int crypto_shash_export(struct shash_desc *desc, void *out)
168 {
169 struct crypto_shash *tfm = desc->tfm;
170 struct shash_alg *shash = crypto_shash_alg(tfm);
171
172 if (shash->export)
173 return shash->export(desc, out);
174
175 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(tfm));
176 return 0;
177 }
178 EXPORT_SYMBOL_GPL(crypto_shash_export);
179
180 int crypto_shash_import(struct shash_desc *desc, const void *in)
181 {
182 struct crypto_shash *tfm = desc->tfm;
183 struct shash_alg *shash = crypto_shash_alg(tfm);
184
185 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
186 return -ENOKEY;
187
188 if (shash->import)
189 return shash->import(desc, in);
190
191 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm));
192 return 0;
193 }
194 EXPORT_SYMBOL_GPL(crypto_shash_import);
195
196 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
197 unsigned int keylen)
198 {
199 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
200
201 return crypto_shash_setkey(*ctx, key, keylen);
202 }
203
204 static int shash_async_init(struct ahash_request *req)
205 {
206 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
207 struct shash_desc *desc = ahash_request_ctx(req);
208
209 desc->tfm = *ctx;
210
211 return crypto_shash_init(desc);
212 }
213
214 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
215 {
216 struct crypto_hash_walk walk;
217 int nbytes;
218
219 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
220 nbytes = crypto_hash_walk_done(&walk, nbytes))
221 nbytes = crypto_shash_update(desc, walk.data, nbytes);
222
223 return nbytes;
224 }
225 EXPORT_SYMBOL_GPL(shash_ahash_update);
226
227 static int shash_async_update(struct ahash_request *req)
228 {
229 return shash_ahash_update(req, ahash_request_ctx(req));
230 }
231
232 static int shash_async_final(struct ahash_request *req)
233 {
234 return crypto_shash_final(ahash_request_ctx(req), req->result);
235 }
236
237 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
238 {
239 struct crypto_hash_walk walk;
240 int nbytes;
241
242 nbytes = crypto_hash_walk_first(req, &walk);
243 if (!nbytes)
244 return crypto_shash_final(desc, req->result);
245
246 do {
247 nbytes = crypto_hash_walk_last(&walk) ?
248 crypto_shash_finup(desc, walk.data, nbytes,
249 req->result) :
250 crypto_shash_update(desc, walk.data, nbytes);
251 nbytes = crypto_hash_walk_done(&walk, nbytes);
252 } while (nbytes > 0);
253
254 return nbytes;
255 }
256 EXPORT_SYMBOL_GPL(shash_ahash_finup);
257
258 static int shash_async_finup(struct ahash_request *req)
259 {
260 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
261 struct shash_desc *desc = ahash_request_ctx(req);
262
263 desc->tfm = *ctx;
264
265 return shash_ahash_finup(req, desc);
266 }
267
268 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
269 {
270 unsigned int nbytes = req->nbytes;
271 struct scatterlist *sg;
272 unsigned int offset;
273 int err;
274
275 if (nbytes &&
276 (sg = req->src, offset = sg->offset,
277 nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
278 void *data;
279
280 data = kmap_local_page(sg_page(sg));
281 err = crypto_shash_digest(desc, data + offset, nbytes,
282 req->result);
283 kunmap_local(data);
284 } else
285 err = crypto_shash_init(desc) ?:
286 shash_ahash_finup(req, desc);
287
288 return err;
289 }
290 EXPORT_SYMBOL_GPL(shash_ahash_digest);
291
292 static int shash_async_digest(struct ahash_request *req)
293 {
294 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
295 struct shash_desc *desc = ahash_request_ctx(req);
296
297 desc->tfm = *ctx;
298
299 return shash_ahash_digest(req, desc);
300 }
301
302 static int shash_async_export(struct ahash_request *req, void *out)
303 {
304 return crypto_shash_export(ahash_request_ctx(req), out);
305 }
306
307 static int shash_async_import(struct ahash_request *req, const void *in)
308 {
309 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
310 struct shash_desc *desc = ahash_request_ctx(req);
311
312 desc->tfm = *ctx;
313
314 return crypto_shash_import(desc, in);
315 }
316
317 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
318 {
319 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
320
321 crypto_free_shash(*ctx);
322 }
323
324 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
325 {
326 struct crypto_alg *calg = tfm->__crt_alg;
327 struct shash_alg *alg = __crypto_shash_alg(calg);
328 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
329 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
330 struct crypto_shash *shash;
331
332 if (!crypto_mod_get(calg))
333 return -EAGAIN;
334
335 shash = crypto_create_tfm(calg, &crypto_shash_type);
336 if (IS_ERR(shash)) {
337 crypto_mod_put(calg);
338 return PTR_ERR(shash);
339 }
340
341 *ctx = shash;
342 tfm->exit = crypto_exit_shash_ops_async;
343
344 crt->init = shash_async_init;
345 crt->update = shash_async_update;
346 crt->final = shash_async_final;
347 crt->finup = shash_async_finup;
348 crt->digest = shash_async_digest;
349 if (crypto_shash_alg_has_setkey(alg))
350 crt->setkey = shash_async_setkey;
351
352 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
353 CRYPTO_TFM_NEED_KEY);
354
355 crt->export = shash_async_export;
356 crt->import = shash_async_import;
357
358 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
359
360 return 0;
361 }
362
363 struct crypto_ahash *crypto_clone_shash_ops_async(struct crypto_ahash *nhash,
364 struct crypto_ahash *hash)
365 {
366 struct crypto_shash **nctx = crypto_ahash_ctx(nhash);
367 struct crypto_shash **ctx = crypto_ahash_ctx(hash);
368 struct crypto_shash *shash;
369
370 shash = crypto_clone_shash(*ctx);
371 if (IS_ERR(shash)) {
372 crypto_free_ahash(nhash);
373 return ERR_CAST(shash);
374 }
375
376 *nctx = shash;
377
378 return nhash;
379 }
380
381 static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
382 {
383 struct crypto_shash *hash = __crypto_shash_cast(tfm);
384 struct shash_alg *alg = crypto_shash_alg(hash);
385
386 alg->exit_tfm(hash);
387 }
388
389 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
390 {
391 struct crypto_shash *hash = __crypto_shash_cast(tfm);
392 struct shash_alg *alg = crypto_shash_alg(hash);
393 int err;
394
395 hash->descsize = alg->descsize;
396
397 shash_set_needkey(hash, alg);
398
399 if (alg->exit_tfm)
400 tfm->exit = crypto_shash_exit_tfm;
401
402 if (!alg->init_tfm)
403 return 0;
404
405 err = alg->init_tfm(hash);
406 if (err)
407 return err;
408
409 /* ->init_tfm() may have increased the descsize. */
410 if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
411 if (alg->exit_tfm)
412 alg->exit_tfm(hash);
413 return -EINVAL;
414 }
415
416 return 0;
417 }
418
419 static void crypto_shash_free_instance(struct crypto_instance *inst)
420 {
421 struct shash_instance *shash = shash_instance(inst);
422
423 shash->free(shash);
424 }
425
426 static int __maybe_unused crypto_shash_report(
427 struct sk_buff *skb, struct crypto_alg *alg)
428 {
429 struct crypto_report_hash rhash;
430 struct shash_alg *salg = __crypto_shash_alg(alg);
431
432 memset(&rhash, 0, sizeof(rhash));
433
434 strscpy(rhash.type, "shash", sizeof(rhash.type));
435
436 rhash.blocksize = alg->cra_blocksize;
437 rhash.digestsize = salg->digestsize;
438
439 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
440 }
441
442 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
443 __maybe_unused;
444 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
445 {
446 struct shash_alg *salg = __crypto_shash_alg(alg);
447
448 seq_printf(m, "type : shash\n");
449 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
450 seq_printf(m, "digestsize : %u\n", salg->digestsize);
451 }
452
453 static int __maybe_unused crypto_shash_report_stat(
454 struct sk_buff *skb, struct crypto_alg *alg)
455 {
456 return crypto_hash_report_stat(skb, alg, "shash");
457 }
458
459 static const struct crypto_type crypto_shash_type = {
460 .extsize = crypto_alg_extsize,
461 .init_tfm = crypto_shash_init_tfm,
462 .free = crypto_shash_free_instance,
463 #ifdef CONFIG_PROC_FS
464 .show = crypto_shash_show,
465 #endif
466 #if IS_ENABLED(CONFIG_CRYPTO_USER)
467 .report = crypto_shash_report,
468 #endif
469 #ifdef CONFIG_CRYPTO_STATS
470 .report_stat = crypto_shash_report_stat,
471 #endif
472 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
473 .maskset = CRYPTO_ALG_TYPE_MASK,
474 .type = CRYPTO_ALG_TYPE_SHASH,
475 .tfmsize = offsetof(struct crypto_shash, base),
476 };
477
478 int crypto_grab_shash(struct crypto_shash_spawn *spawn,
479 struct crypto_instance *inst,
480 const char *name, u32 type, u32 mask)
481 {
482 spawn->base.frontend = &crypto_shash_type;
483 return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
484 }
485 EXPORT_SYMBOL_GPL(crypto_grab_shash);
486
487 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
488 u32 mask)
489 {
490 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
491 }
492 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
493
494 int crypto_has_shash(const char *alg_name, u32 type, u32 mask)
495 {
496 return crypto_type_has_alg(alg_name, &crypto_shash_type, type, mask);
497 }
498 EXPORT_SYMBOL_GPL(crypto_has_shash);
499
500 struct crypto_shash *crypto_clone_shash(struct crypto_shash *hash)
501 {
502 struct crypto_tfm *tfm = crypto_shash_tfm(hash);
503 struct shash_alg *alg = crypto_shash_alg(hash);
504 struct crypto_shash *nhash;
505 int err;
506
507 if (!crypto_shash_alg_has_setkey(alg)) {
508 tfm = crypto_tfm_get(tfm);
509 if (IS_ERR(tfm))
510 return ERR_CAST(tfm);
511
512 return hash;
513 }
514
515 if (!alg->clone_tfm && (alg->init_tfm || alg->base.cra_init))
516 return ERR_PTR(-ENOSYS);
517
518 nhash = crypto_clone_tfm(&crypto_shash_type, tfm);
519 if (IS_ERR(nhash))
520 return nhash;
521
522 nhash->descsize = hash->descsize;
523
524 if (alg->clone_tfm) {
525 err = alg->clone_tfm(nhash, hash);
526 if (err) {
527 crypto_free_shash(nhash);
528 return ERR_PTR(err);
529 }
530 }
531
532 return nhash;
533 }
534 EXPORT_SYMBOL_GPL(crypto_clone_shash);
535
536 int hash_prepare_alg(struct hash_alg_common *alg)
537 {
538 struct crypto_istat_hash *istat = hash_get_stat(alg);
539 struct crypto_alg *base = &alg->base;
540
541 if (alg->digestsize > HASH_MAX_DIGESTSIZE)
542 return -EINVAL;
543
544 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
545
546 if (IS_ENABLED(CONFIG_CRYPTO_STATS))
547 memset(istat, 0, sizeof(*istat));
548
549 return 0;
550 }
551
552 static int shash_prepare_alg(struct shash_alg *alg)
553 {
554 struct crypto_alg *base = &alg->halg.base;
555 int err;
556
557 if (alg->descsize > HASH_MAX_DESCSIZE)
558 return -EINVAL;
559
560 /* alignmask is not useful for shash, so it is not supported. */
561 if (base->cra_alignmask)
562 return -EINVAL;
563
564 if ((alg->export && !alg->import) || (alg->import && !alg->export))
565 return -EINVAL;
566
567 err = hash_prepare_alg(&alg->halg);
568 if (err)
569 return err;
570
571 base->cra_type = &crypto_shash_type;
572 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
573
574 /*
575 * Handle missing optional functions. For each one we can either
576 * install a default here, or we can leave the pointer as NULL and check
577 * the pointer for NULL in crypto_shash_*(), avoiding an indirect call
578 * when the default behavior is desired. For ->finup and ->digest we
579 * install defaults, since for optimal performance algorithms should
580 * implement these anyway. On the other hand, for ->import and
581 * ->export the common case and best performance comes from the simple
582 * memcpy of the shash_desc_ctx, so when those pointers are NULL we
583 * leave them NULL and provide the memcpy with no indirect call.
584 */
585 if (!alg->finup)
586 alg->finup = shash_default_finup;
587 if (!alg->digest)
588 alg->digest = shash_default_digest;
589 if (!alg->export)
590 alg->halg.statesize = alg->descsize;
591 if (!alg->setkey)
592 alg->setkey = shash_no_setkey;
593
594 return 0;
595 }
596
597 int crypto_register_shash(struct shash_alg *alg)
598 {
599 struct crypto_alg *base = &alg->base;
600 int err;
601
602 err = shash_prepare_alg(alg);
603 if (err)
604 return err;
605
606 return crypto_register_alg(base);
607 }
608 EXPORT_SYMBOL_GPL(crypto_register_shash);
609
610 void crypto_unregister_shash(struct shash_alg *alg)
611 {
612 crypto_unregister_alg(&alg->base);
613 }
614 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
615
616 int crypto_register_shashes(struct shash_alg *algs, int count)
617 {
618 int i, ret;
619
620 for (i = 0; i < count; i++) {
621 ret = crypto_register_shash(&algs[i]);
622 if (ret)
623 goto err;
624 }
625
626 return 0;
627
628 err:
629 for (--i; i >= 0; --i)
630 crypto_unregister_shash(&algs[i]);
631
632 return ret;
633 }
634 EXPORT_SYMBOL_GPL(crypto_register_shashes);
635
636 void crypto_unregister_shashes(struct shash_alg *algs, int count)
637 {
638 int i;
639
640 for (i = count - 1; i >= 0; --i)
641 crypto_unregister_shash(&algs[i]);
642 }
643 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
644
645 int shash_register_instance(struct crypto_template *tmpl,
646 struct shash_instance *inst)
647 {
648 int err;
649
650 if (WARN_ON(!inst->free))
651 return -EINVAL;
652
653 err = shash_prepare_alg(&inst->alg);
654 if (err)
655 return err;
656
657 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
658 }
659 EXPORT_SYMBOL_GPL(shash_register_instance);
660
661 void shash_free_singlespawn_instance(struct shash_instance *inst)
662 {
663 crypto_drop_spawn(shash_instance_ctx(inst));
664 kfree(inst);
665 }
666 EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
667
668 MODULE_LICENSE("GPL");
669 MODULE_DESCRIPTION("Synchronous cryptographic hash type");