1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Synchronous Cryptographic Hash operations.
5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 #include <crypto/scatterwalk.h>
9 #include <linux/cryptouser.h>
10 #include <linux/err.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
13 #include <linux/slab.h>
14 #include <linux/seq_file.h>
15 #include <linux/string.h>
16 #include <net/netlink.h>
20 #define MAX_SHASH_ALIGNMASK 63
22 static const struct crypto_type crypto_shash_type
;
24 static inline struct crypto_istat_hash
*shash_get_stat(struct shash_alg
*alg
)
26 return hash_get_stat(&alg
->halg
);
29 static inline int crypto_shash_errstat(struct shash_alg
*alg
, int err
)
31 return crypto_hash_errstat(&alg
->halg
, err
);
34 int shash_no_setkey(struct crypto_shash
*tfm
, const u8
*key
,
39 EXPORT_SYMBOL_GPL(shash_no_setkey
);
41 static int shash_setkey_unaligned(struct crypto_shash
*tfm
, const u8
*key
,
44 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
45 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
47 u8
*buffer
, *alignbuffer
;
50 absize
= keylen
+ (alignmask
& ~(crypto_tfm_ctx_alignment() - 1));
51 buffer
= kmalloc(absize
, GFP_ATOMIC
);
55 alignbuffer
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
56 memcpy(alignbuffer
, key
, keylen
);
57 err
= shash
->setkey(tfm
, alignbuffer
, keylen
);
58 kfree_sensitive(buffer
);
62 static void shash_set_needkey(struct crypto_shash
*tfm
, struct shash_alg
*alg
)
64 if (crypto_shash_alg_needs_key(alg
))
65 crypto_shash_set_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
68 int crypto_shash_setkey(struct crypto_shash
*tfm
, const u8
*key
,
71 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
72 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
75 if ((unsigned long)key
& alignmask
)
76 err
= shash_setkey_unaligned(tfm
, key
, keylen
);
78 err
= shash
->setkey(tfm
, key
, keylen
);
81 shash_set_needkey(tfm
, shash
);
85 crypto_shash_clear_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
88 EXPORT_SYMBOL_GPL(crypto_shash_setkey
);
90 static int shash_update_unaligned(struct shash_desc
*desc
, const u8
*data
,
93 struct crypto_shash
*tfm
= desc
->tfm
;
94 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
95 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
96 unsigned int unaligned_len
= alignmask
+ 1 -
97 ((unsigned long)data
& alignmask
);
99 * We cannot count on __aligned() working for large values:
100 * https://patchwork.kernel.org/patch/9507697/
102 u8 ubuf
[MAX_SHASH_ALIGNMASK
* 2];
103 u8
*buf
= PTR_ALIGN(&ubuf
[0], alignmask
+ 1);
106 if (WARN_ON(buf
+ unaligned_len
> ubuf
+ sizeof(ubuf
)))
109 if (unaligned_len
> len
)
112 memcpy(buf
, data
, unaligned_len
);
113 err
= shash
->update(desc
, buf
, unaligned_len
);
114 memset(buf
, 0, unaligned_len
);
117 shash
->update(desc
, data
+ unaligned_len
, len
- unaligned_len
);
120 int crypto_shash_update(struct shash_desc
*desc
, const u8
*data
,
123 struct crypto_shash
*tfm
= desc
->tfm
;
124 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
125 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
128 if (IS_ENABLED(CONFIG_CRYPTO_STATS
))
129 atomic64_add(len
, &shash_get_stat(shash
)->hash_tlen
);
131 if ((unsigned long)data
& alignmask
)
132 err
= shash_update_unaligned(desc
, data
, len
);
134 err
= shash
->update(desc
, data
, len
);
136 return crypto_shash_errstat(shash
, err
);
138 EXPORT_SYMBOL_GPL(crypto_shash_update
);
140 static int shash_final_unaligned(struct shash_desc
*desc
, u8
*out
)
142 struct crypto_shash
*tfm
= desc
->tfm
;
143 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
144 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
145 unsigned int ds
= crypto_shash_digestsize(tfm
);
147 * We cannot count on __aligned() working for large values:
148 * https://patchwork.kernel.org/patch/9507697/
150 u8 ubuf
[MAX_SHASH_ALIGNMASK
+ HASH_MAX_DIGESTSIZE
];
151 u8
*buf
= PTR_ALIGN(&ubuf
[0], alignmask
+ 1);
154 if (WARN_ON(buf
+ ds
> ubuf
+ sizeof(ubuf
)))
157 err
= shash
->final(desc
, buf
);
161 memcpy(out
, buf
, ds
);
168 int crypto_shash_final(struct shash_desc
*desc
, u8
*out
)
170 struct crypto_shash
*tfm
= desc
->tfm
;
171 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
172 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
175 if (IS_ENABLED(CONFIG_CRYPTO_STATS
))
176 atomic64_inc(&shash_get_stat(shash
)->hash_cnt
);
178 if ((unsigned long)out
& alignmask
)
179 err
= shash_final_unaligned(desc
, out
);
181 err
= shash
->final(desc
, out
);
183 return crypto_shash_errstat(shash
, err
);
185 EXPORT_SYMBOL_GPL(crypto_shash_final
);
187 static int shash_finup_unaligned(struct shash_desc
*desc
, const u8
*data
,
188 unsigned int len
, u8
*out
)
190 return shash_update_unaligned(desc
, data
, len
) ?:
191 shash_final_unaligned(desc
, out
);
194 int crypto_shash_finup(struct shash_desc
*desc
, const u8
*data
,
195 unsigned int len
, u8
*out
)
197 struct crypto_shash
*tfm
= desc
->tfm
;
198 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
199 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
202 if (IS_ENABLED(CONFIG_CRYPTO_STATS
)) {
203 struct crypto_istat_hash
*istat
= shash_get_stat(shash
);
205 atomic64_inc(&istat
->hash_cnt
);
206 atomic64_add(len
, &istat
->hash_tlen
);
209 if (((unsigned long)data
| (unsigned long)out
) & alignmask
)
210 err
= shash_finup_unaligned(desc
, data
, len
, out
);
212 err
= shash
->finup(desc
, data
, len
, out
);
215 return crypto_shash_errstat(shash
, err
);
217 EXPORT_SYMBOL_GPL(crypto_shash_finup
);
219 static int shash_digest_unaligned(struct shash_desc
*desc
, const u8
*data
,
220 unsigned int len
, u8
*out
)
222 return crypto_shash_init(desc
) ?:
223 shash_update_unaligned(desc
, data
, len
) ?:
224 shash_final_unaligned(desc
, out
);
227 int crypto_shash_digest(struct shash_desc
*desc
, const u8
*data
,
228 unsigned int len
, u8
*out
)
230 struct crypto_shash
*tfm
= desc
->tfm
;
231 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
232 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
235 if (IS_ENABLED(CONFIG_CRYPTO_STATS
)) {
236 struct crypto_istat_hash
*istat
= shash_get_stat(shash
);
238 atomic64_inc(&istat
->hash_cnt
);
239 atomic64_add(len
, &istat
->hash_tlen
);
242 if (crypto_shash_get_flags(tfm
) & CRYPTO_TFM_NEED_KEY
)
244 else if (((unsigned long)data
| (unsigned long)out
) & alignmask
)
245 err
= shash_digest_unaligned(desc
, data
, len
, out
);
247 err
= shash
->digest(desc
, data
, len
, out
);
249 return crypto_shash_errstat(shash
, err
);
251 EXPORT_SYMBOL_GPL(crypto_shash_digest
);
253 int crypto_shash_tfm_digest(struct crypto_shash
*tfm
, const u8
*data
,
254 unsigned int len
, u8
*out
)
256 SHASH_DESC_ON_STACK(desc
, tfm
);
261 err
= crypto_shash_digest(desc
, data
, len
, out
);
263 shash_desc_zero(desc
);
267 EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest
);
269 static int shash_default_export(struct shash_desc
*desc
, void *out
)
271 memcpy(out
, shash_desc_ctx(desc
), crypto_shash_descsize(desc
->tfm
));
275 static int shash_default_import(struct shash_desc
*desc
, const void *in
)
277 memcpy(shash_desc_ctx(desc
), in
, crypto_shash_descsize(desc
->tfm
));
281 static int shash_async_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
284 struct crypto_shash
**ctx
= crypto_ahash_ctx(tfm
);
286 return crypto_shash_setkey(*ctx
, key
, keylen
);
289 static int shash_async_init(struct ahash_request
*req
)
291 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
292 struct shash_desc
*desc
= ahash_request_ctx(req
);
296 return crypto_shash_init(desc
);
299 int shash_ahash_update(struct ahash_request
*req
, struct shash_desc
*desc
)
301 struct crypto_hash_walk walk
;
304 for (nbytes
= crypto_hash_walk_first(req
, &walk
); nbytes
> 0;
305 nbytes
= crypto_hash_walk_done(&walk
, nbytes
))
306 nbytes
= crypto_shash_update(desc
, walk
.data
, nbytes
);
310 EXPORT_SYMBOL_GPL(shash_ahash_update
);
312 static int shash_async_update(struct ahash_request
*req
)
314 return shash_ahash_update(req
, ahash_request_ctx(req
));
317 static int shash_async_final(struct ahash_request
*req
)
319 return crypto_shash_final(ahash_request_ctx(req
), req
->result
);
322 int shash_ahash_finup(struct ahash_request
*req
, struct shash_desc
*desc
)
324 struct crypto_hash_walk walk
;
327 nbytes
= crypto_hash_walk_first(req
, &walk
);
329 return crypto_shash_final(desc
, req
->result
);
332 nbytes
= crypto_hash_walk_last(&walk
) ?
333 crypto_shash_finup(desc
, walk
.data
, nbytes
,
335 crypto_shash_update(desc
, walk
.data
, nbytes
);
336 nbytes
= crypto_hash_walk_done(&walk
, nbytes
);
337 } while (nbytes
> 0);
341 EXPORT_SYMBOL_GPL(shash_ahash_finup
);
343 static int shash_async_finup(struct ahash_request
*req
)
345 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
346 struct shash_desc
*desc
= ahash_request_ctx(req
);
350 return shash_ahash_finup(req
, desc
);
353 int shash_ahash_digest(struct ahash_request
*req
, struct shash_desc
*desc
)
355 unsigned int nbytes
= req
->nbytes
;
356 struct scatterlist
*sg
;
361 (sg
= req
->src
, offset
= sg
->offset
,
362 nbytes
<= min(sg
->length
, ((unsigned int)(PAGE_SIZE
)) - offset
))) {
365 data
= kmap_local_page(sg_page(sg
));
366 err
= crypto_shash_digest(desc
, data
+ offset
, nbytes
,
370 err
= crypto_shash_init(desc
) ?:
371 shash_ahash_finup(req
, desc
);
375 EXPORT_SYMBOL_GPL(shash_ahash_digest
);
377 static int shash_async_digest(struct ahash_request
*req
)
379 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
380 struct shash_desc
*desc
= ahash_request_ctx(req
);
384 return shash_ahash_digest(req
, desc
);
387 static int shash_async_export(struct ahash_request
*req
, void *out
)
389 return crypto_shash_export(ahash_request_ctx(req
), out
);
392 static int shash_async_import(struct ahash_request
*req
, const void *in
)
394 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
395 struct shash_desc
*desc
= ahash_request_ctx(req
);
399 return crypto_shash_import(desc
, in
);
402 static void crypto_exit_shash_ops_async(struct crypto_tfm
*tfm
)
404 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
406 crypto_free_shash(*ctx
);
409 int crypto_init_shash_ops_async(struct crypto_tfm
*tfm
)
411 struct crypto_alg
*calg
= tfm
->__crt_alg
;
412 struct shash_alg
*alg
= __crypto_shash_alg(calg
);
413 struct crypto_ahash
*crt
= __crypto_ahash_cast(tfm
);
414 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
415 struct crypto_shash
*shash
;
417 if (!crypto_mod_get(calg
))
420 shash
= crypto_create_tfm(calg
, &crypto_shash_type
);
422 crypto_mod_put(calg
);
423 return PTR_ERR(shash
);
427 tfm
->exit
= crypto_exit_shash_ops_async
;
429 crt
->init
= shash_async_init
;
430 crt
->update
= shash_async_update
;
431 crt
->final
= shash_async_final
;
432 crt
->finup
= shash_async_finup
;
433 crt
->digest
= shash_async_digest
;
434 if (crypto_shash_alg_has_setkey(alg
))
435 crt
->setkey
= shash_async_setkey
;
437 crypto_ahash_set_flags(crt
, crypto_shash_get_flags(shash
) &
438 CRYPTO_TFM_NEED_KEY
);
440 crt
->export
= shash_async_export
;
441 crt
->import
= shash_async_import
;
443 crt
->reqsize
= sizeof(struct shash_desc
) + crypto_shash_descsize(shash
);
448 struct crypto_ahash
*crypto_clone_shash_ops_async(struct crypto_ahash
*nhash
,
449 struct crypto_ahash
*hash
)
451 struct crypto_shash
**nctx
= crypto_ahash_ctx(nhash
);
452 struct crypto_shash
**ctx
= crypto_ahash_ctx(hash
);
453 struct crypto_shash
*shash
;
455 shash
= crypto_clone_shash(*ctx
);
457 crypto_free_ahash(nhash
);
458 return ERR_CAST(shash
);
466 static void crypto_shash_exit_tfm(struct crypto_tfm
*tfm
)
468 struct crypto_shash
*hash
= __crypto_shash_cast(tfm
);
469 struct shash_alg
*alg
= crypto_shash_alg(hash
);
474 static int crypto_shash_init_tfm(struct crypto_tfm
*tfm
)
476 struct crypto_shash
*hash
= __crypto_shash_cast(tfm
);
477 struct shash_alg
*alg
= crypto_shash_alg(hash
);
480 hash
->descsize
= alg
->descsize
;
482 shash_set_needkey(hash
, alg
);
485 tfm
->exit
= crypto_shash_exit_tfm
;
490 err
= alg
->init_tfm(hash
);
494 /* ->init_tfm() may have increased the descsize. */
495 if (WARN_ON_ONCE(hash
->descsize
> HASH_MAX_DESCSIZE
)) {
504 static void crypto_shash_free_instance(struct crypto_instance
*inst
)
506 struct shash_instance
*shash
= shash_instance(inst
);
511 static int __maybe_unused
crypto_shash_report(
512 struct sk_buff
*skb
, struct crypto_alg
*alg
)
514 struct crypto_report_hash rhash
;
515 struct shash_alg
*salg
= __crypto_shash_alg(alg
);
517 memset(&rhash
, 0, sizeof(rhash
));
519 strscpy(rhash
.type
, "shash", sizeof(rhash
.type
));
521 rhash
.blocksize
= alg
->cra_blocksize
;
522 rhash
.digestsize
= salg
->digestsize
;
524 return nla_put(skb
, CRYPTOCFGA_REPORT_HASH
, sizeof(rhash
), &rhash
);
527 static void crypto_shash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
529 static void crypto_shash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
531 struct shash_alg
*salg
= __crypto_shash_alg(alg
);
533 seq_printf(m
, "type : shash\n");
534 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
535 seq_printf(m
, "digestsize : %u\n", salg
->digestsize
);
538 static int __maybe_unused
crypto_shash_report_stat(
539 struct sk_buff
*skb
, struct crypto_alg
*alg
)
541 return crypto_hash_report_stat(skb
, alg
, "shash");
544 static const struct crypto_type crypto_shash_type
= {
545 .extsize
= crypto_alg_extsize
,
546 .init_tfm
= crypto_shash_init_tfm
,
547 .free
= crypto_shash_free_instance
,
548 #ifdef CONFIG_PROC_FS
549 .show
= crypto_shash_show
,
551 #if IS_ENABLED(CONFIG_CRYPTO_USER)
552 .report
= crypto_shash_report
,
554 #ifdef CONFIG_CRYPTO_STATS
555 .report_stat
= crypto_shash_report_stat
,
557 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
558 .maskset
= CRYPTO_ALG_TYPE_MASK
,
559 .type
= CRYPTO_ALG_TYPE_SHASH
,
560 .tfmsize
= offsetof(struct crypto_shash
, base
),
563 int crypto_grab_shash(struct crypto_shash_spawn
*spawn
,
564 struct crypto_instance
*inst
,
565 const char *name
, u32 type
, u32 mask
)
567 spawn
->base
.frontend
= &crypto_shash_type
;
568 return crypto_grab_spawn(&spawn
->base
, inst
, name
, type
, mask
);
570 EXPORT_SYMBOL_GPL(crypto_grab_shash
);
572 struct crypto_shash
*crypto_alloc_shash(const char *alg_name
, u32 type
,
575 return crypto_alloc_tfm(alg_name
, &crypto_shash_type
, type
, mask
);
577 EXPORT_SYMBOL_GPL(crypto_alloc_shash
);
579 int crypto_has_shash(const char *alg_name
, u32 type
, u32 mask
)
581 return crypto_type_has_alg(alg_name
, &crypto_shash_type
, type
, mask
);
583 EXPORT_SYMBOL_GPL(crypto_has_shash
);
585 struct crypto_shash
*crypto_clone_shash(struct crypto_shash
*hash
)
587 struct crypto_tfm
*tfm
= crypto_shash_tfm(hash
);
588 struct shash_alg
*alg
= crypto_shash_alg(hash
);
589 struct crypto_shash
*nhash
;
592 if (!crypto_shash_alg_has_setkey(alg
)) {
593 tfm
= crypto_tfm_get(tfm
);
595 return ERR_CAST(tfm
);
600 if (!alg
->clone_tfm
&& (alg
->init_tfm
|| alg
->base
.cra_init
))
601 return ERR_PTR(-ENOSYS
);
603 nhash
= crypto_clone_tfm(&crypto_shash_type
, tfm
);
607 nhash
->descsize
= hash
->descsize
;
609 if (alg
->clone_tfm
) {
610 err
= alg
->clone_tfm(nhash
, hash
);
612 crypto_free_shash(nhash
);
619 EXPORT_SYMBOL_GPL(crypto_clone_shash
);
621 int hash_prepare_alg(struct hash_alg_common
*alg
)
623 struct crypto_istat_hash
*istat
= hash_get_stat(alg
);
624 struct crypto_alg
*base
= &alg
->base
;
626 if (alg
->digestsize
> HASH_MAX_DIGESTSIZE
)
629 base
->cra_flags
&= ~CRYPTO_ALG_TYPE_MASK
;
631 if (IS_ENABLED(CONFIG_CRYPTO_STATS
))
632 memset(istat
, 0, sizeof(*istat
));
637 static int shash_prepare_alg(struct shash_alg
*alg
)
639 struct crypto_alg
*base
= &alg
->halg
.base
;
642 if (alg
->descsize
> HASH_MAX_DESCSIZE
)
645 if (base
->cra_alignmask
> MAX_SHASH_ALIGNMASK
)
648 if ((alg
->export
&& !alg
->import
) || (alg
->import
&& !alg
->export
))
651 err
= hash_prepare_alg(&alg
->halg
);
655 base
->cra_type
= &crypto_shash_type
;
656 base
->cra_flags
|= CRYPTO_ALG_TYPE_SHASH
;
659 alg
->finup
= shash_finup_unaligned
;
661 alg
->digest
= shash_digest_unaligned
;
663 alg
->export
= shash_default_export
;
664 alg
->import
= shash_default_import
;
665 alg
->halg
.statesize
= alg
->descsize
;
668 alg
->setkey
= shash_no_setkey
;
673 int crypto_register_shash(struct shash_alg
*alg
)
675 struct crypto_alg
*base
= &alg
->base
;
678 err
= shash_prepare_alg(alg
);
682 return crypto_register_alg(base
);
684 EXPORT_SYMBOL_GPL(crypto_register_shash
);
686 void crypto_unregister_shash(struct shash_alg
*alg
)
688 crypto_unregister_alg(&alg
->base
);
690 EXPORT_SYMBOL_GPL(crypto_unregister_shash
);
692 int crypto_register_shashes(struct shash_alg
*algs
, int count
)
696 for (i
= 0; i
< count
; i
++) {
697 ret
= crypto_register_shash(&algs
[i
]);
705 for (--i
; i
>= 0; --i
)
706 crypto_unregister_shash(&algs
[i
]);
710 EXPORT_SYMBOL_GPL(crypto_register_shashes
);
712 void crypto_unregister_shashes(struct shash_alg
*algs
, int count
)
716 for (i
= count
- 1; i
>= 0; --i
)
717 crypto_unregister_shash(&algs
[i
]);
719 EXPORT_SYMBOL_GPL(crypto_unregister_shashes
);
721 int shash_register_instance(struct crypto_template
*tmpl
,
722 struct shash_instance
*inst
)
726 if (WARN_ON(!inst
->free
))
729 err
= shash_prepare_alg(&inst
->alg
);
733 return crypto_register_instance(tmpl
, shash_crypto_instance(inst
));
735 EXPORT_SYMBOL_GPL(shash_register_instance
);
737 void shash_free_singlespawn_instance(struct shash_instance
*inst
)
739 crypto_drop_spawn(shash_instance_ctx(inst
));
742 EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance
);
744 MODULE_LICENSE("GPL");
745 MODULE_DESCRIPTION("Synchronous cryptographic hash type");