1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Asynchronous Cryptographic Hash operations.
5 * This is the implementation of the ahash (asynchronous hash) API. It differs
6 * from shash (synchronous hash) in that ahash supports asynchronous operations,
7 * and it hashes data from scatterlists instead of virtually addressed buffers.
9 * The ahash API provides access to both ahash and shash algorithms. The shash
10 * API only provides access to shash algorithms.
12 * Copyright (c) 2008 Loc Ho <lho@amcc.com>
15 #include <crypto/scatterwalk.h>
16 #include <linux/cryptouser.h>
17 #include <linux/err.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/sched.h>
21 #include <linux/slab.h>
22 #include <linux/seq_file.h>
23 #include <linux/string.h>
24 #include <net/netlink.h>
28 #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000e
30 static inline struct crypto_istat_hash
*ahash_get_stat(struct ahash_alg
*alg
)
32 return hash_get_stat(&alg
->halg
);
35 static inline int crypto_ahash_errstat(struct ahash_alg
*alg
, int err
)
37 if (!IS_ENABLED(CONFIG_CRYPTO_STATS
))
40 if (err
&& err
!= -EINPROGRESS
&& err
!= -EBUSY
)
41 atomic64_inc(&ahash_get_stat(alg
)->err_cnt
);
47 * For an ahash tfm that is using an shash algorithm (instead of an ahash
48 * algorithm), this returns the underlying shash tfm.
50 static inline struct crypto_shash
*ahash_to_shash(struct crypto_ahash
*tfm
)
52 return *(struct crypto_shash
**)crypto_ahash_ctx(tfm
);
55 static inline struct shash_desc
*prepare_shash_desc(struct ahash_request
*req
,
56 struct crypto_ahash
*tfm
)
58 struct shash_desc
*desc
= ahash_request_ctx(req
);
60 desc
->tfm
= ahash_to_shash(tfm
);
64 int shash_ahash_update(struct ahash_request
*req
, struct shash_desc
*desc
)
66 struct crypto_hash_walk walk
;
69 for (nbytes
= crypto_hash_walk_first(req
, &walk
); nbytes
> 0;
70 nbytes
= crypto_hash_walk_done(&walk
, nbytes
))
71 nbytes
= crypto_shash_update(desc
, walk
.data
, nbytes
);
75 EXPORT_SYMBOL_GPL(shash_ahash_update
);
77 int shash_ahash_finup(struct ahash_request
*req
, struct shash_desc
*desc
)
79 struct crypto_hash_walk walk
;
82 nbytes
= crypto_hash_walk_first(req
, &walk
);
84 return crypto_shash_final(desc
, req
->result
);
87 nbytes
= crypto_hash_walk_last(&walk
) ?
88 crypto_shash_finup(desc
, walk
.data
, nbytes
,
90 crypto_shash_update(desc
, walk
.data
, nbytes
);
91 nbytes
= crypto_hash_walk_done(&walk
, nbytes
);
96 EXPORT_SYMBOL_GPL(shash_ahash_finup
);
98 int shash_ahash_digest(struct ahash_request
*req
, struct shash_desc
*desc
)
100 unsigned int nbytes
= req
->nbytes
;
101 struct scatterlist
*sg
;
106 (sg
= req
->src
, offset
= sg
->offset
,
107 nbytes
<= min(sg
->length
, ((unsigned int)(PAGE_SIZE
)) - offset
))) {
110 data
= kmap_local_page(sg_page(sg
));
111 err
= crypto_shash_digest(desc
, data
+ offset
, nbytes
,
115 err
= crypto_shash_init(desc
) ?:
116 shash_ahash_finup(req
, desc
);
120 EXPORT_SYMBOL_GPL(shash_ahash_digest
);
122 static void crypto_exit_ahash_using_shash(struct crypto_tfm
*tfm
)
124 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
126 crypto_free_shash(*ctx
);
129 static int crypto_init_ahash_using_shash(struct crypto_tfm
*tfm
)
131 struct crypto_alg
*calg
= tfm
->__crt_alg
;
132 struct crypto_ahash
*crt
= __crypto_ahash_cast(tfm
);
133 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
134 struct crypto_shash
*shash
;
136 if (!crypto_mod_get(calg
))
139 shash
= crypto_create_tfm(calg
, &crypto_shash_type
);
141 crypto_mod_put(calg
);
142 return PTR_ERR(shash
);
145 crt
->using_shash
= true;
147 tfm
->exit
= crypto_exit_ahash_using_shash
;
149 crypto_ahash_set_flags(crt
, crypto_shash_get_flags(shash
) &
150 CRYPTO_TFM_NEED_KEY
);
151 crt
->reqsize
= sizeof(struct shash_desc
) + crypto_shash_descsize(shash
);
156 static int hash_walk_next(struct crypto_hash_walk
*walk
)
158 unsigned int offset
= walk
->offset
;
159 unsigned int nbytes
= min(walk
->entrylen
,
160 ((unsigned int)(PAGE_SIZE
)) - offset
);
162 walk
->data
= kmap_local_page(walk
->pg
);
163 walk
->data
+= offset
;
164 walk
->entrylen
-= nbytes
;
168 static int hash_walk_new_entry(struct crypto_hash_walk
*walk
)
170 struct scatterlist
*sg
;
173 walk
->offset
= sg
->offset
;
174 walk
->pg
= sg_page(walk
->sg
) + (walk
->offset
>> PAGE_SHIFT
);
175 walk
->offset
= offset_in_page(walk
->offset
);
176 walk
->entrylen
= sg
->length
;
178 if (walk
->entrylen
> walk
->total
)
179 walk
->entrylen
= walk
->total
;
180 walk
->total
-= walk
->entrylen
;
182 return hash_walk_next(walk
);
185 int crypto_hash_walk_done(struct crypto_hash_walk
*walk
, int err
)
187 walk
->data
-= walk
->offset
;
189 kunmap_local(walk
->data
);
190 crypto_yield(walk
->flags
);
195 if (walk
->entrylen
) {
198 return hash_walk_next(walk
);
204 walk
->sg
= sg_next(walk
->sg
);
206 return hash_walk_new_entry(walk
);
208 EXPORT_SYMBOL_GPL(crypto_hash_walk_done
);
210 int crypto_hash_walk_first(struct ahash_request
*req
,
211 struct crypto_hash_walk
*walk
)
213 walk
->total
= req
->nbytes
;
221 walk
->flags
= req
->base
.flags
;
223 return hash_walk_new_entry(walk
);
225 EXPORT_SYMBOL_GPL(crypto_hash_walk_first
);
227 static int ahash_nosetkey(struct crypto_ahash
*tfm
, const u8
*key
,
233 static void ahash_set_needkey(struct crypto_ahash
*tfm
, struct ahash_alg
*alg
)
235 if (alg
->setkey
!= ahash_nosetkey
&&
236 !(alg
->halg
.base
.cra_flags
& CRYPTO_ALG_OPTIONAL_KEY
))
237 crypto_ahash_set_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
240 int crypto_ahash_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
243 if (likely(tfm
->using_shash
)) {
244 struct crypto_shash
*shash
= ahash_to_shash(tfm
);
247 err
= crypto_shash_setkey(shash
, key
, keylen
);
249 crypto_ahash_set_flags(tfm
,
250 crypto_shash_get_flags(shash
) &
251 CRYPTO_TFM_NEED_KEY
);
255 struct ahash_alg
*alg
= crypto_ahash_alg(tfm
);
258 err
= alg
->setkey(tfm
, key
, keylen
);
260 ahash_set_needkey(tfm
, alg
);
264 crypto_ahash_clear_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
267 EXPORT_SYMBOL_GPL(crypto_ahash_setkey
);
269 int crypto_ahash_init(struct ahash_request
*req
)
271 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
273 if (likely(tfm
->using_shash
))
274 return crypto_shash_init(prepare_shash_desc(req
, tfm
));
275 if (crypto_ahash_get_flags(tfm
) & CRYPTO_TFM_NEED_KEY
)
277 return crypto_ahash_alg(tfm
)->init(req
);
279 EXPORT_SYMBOL_GPL(crypto_ahash_init
);
281 static int ahash_save_req(struct ahash_request
*req
, crypto_completion_t cplt
,
284 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
285 unsigned int ds
= crypto_ahash_digestsize(tfm
);
286 struct ahash_request
*subreq
;
287 unsigned int subreq_size
;
288 unsigned int reqsize
;
293 subreq_size
= sizeof(*subreq
);
294 reqsize
= crypto_ahash_reqsize(tfm
);
295 reqsize
= ALIGN(reqsize
, crypto_tfm_ctx_alignment());
296 subreq_size
+= reqsize
;
299 flags
= ahash_request_flags(req
);
300 gfp
= (flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ? GFP_KERNEL
: GFP_ATOMIC
;
301 subreq
= kmalloc(subreq_size
, gfp
);
305 ahash_request_set_tfm(subreq
, tfm
);
306 ahash_request_set_callback(subreq
, flags
, cplt
, req
);
308 result
= (u8
*)(subreq
+ 1) + reqsize
;
310 ahash_request_set_crypt(subreq
, req
->src
, result
, req
->nbytes
);
315 state
= kmalloc(crypto_ahash_statesize(tfm
), gfp
);
321 crypto_ahash_export(req
, state
);
322 crypto_ahash_import(subreq
, state
);
323 kfree_sensitive(state
);
331 static void ahash_restore_req(struct ahash_request
*req
, int err
)
333 struct ahash_request
*subreq
= req
->priv
;
336 memcpy(req
->result
, subreq
->result
,
337 crypto_ahash_digestsize(crypto_ahash_reqtfm(req
)));
341 kfree_sensitive(subreq
);
344 int crypto_ahash_update(struct ahash_request
*req
)
346 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
347 struct ahash_alg
*alg
;
349 if (likely(tfm
->using_shash
))
350 return shash_ahash_update(req
, ahash_request_ctx(req
));
352 alg
= crypto_ahash_alg(tfm
);
353 if (IS_ENABLED(CONFIG_CRYPTO_STATS
))
354 atomic64_add(req
->nbytes
, &ahash_get_stat(alg
)->hash_tlen
);
355 return crypto_ahash_errstat(alg
, alg
->update(req
));
357 EXPORT_SYMBOL_GPL(crypto_ahash_update
);
359 int crypto_ahash_final(struct ahash_request
*req
)
361 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
362 struct ahash_alg
*alg
;
364 if (likely(tfm
->using_shash
))
365 return crypto_shash_final(ahash_request_ctx(req
), req
->result
);
367 alg
= crypto_ahash_alg(tfm
);
368 if (IS_ENABLED(CONFIG_CRYPTO_STATS
))
369 atomic64_inc(&ahash_get_stat(alg
)->hash_cnt
);
370 return crypto_ahash_errstat(alg
, alg
->final(req
));
372 EXPORT_SYMBOL_GPL(crypto_ahash_final
);
374 int crypto_ahash_finup(struct ahash_request
*req
)
376 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
377 struct ahash_alg
*alg
;
379 if (likely(tfm
->using_shash
))
380 return shash_ahash_finup(req
, ahash_request_ctx(req
));
382 alg
= crypto_ahash_alg(tfm
);
383 if (IS_ENABLED(CONFIG_CRYPTO_STATS
)) {
384 struct crypto_istat_hash
*istat
= ahash_get_stat(alg
);
386 atomic64_inc(&istat
->hash_cnt
);
387 atomic64_add(req
->nbytes
, &istat
->hash_tlen
);
389 return crypto_ahash_errstat(alg
, alg
->finup(req
));
391 EXPORT_SYMBOL_GPL(crypto_ahash_finup
);
393 int crypto_ahash_digest(struct ahash_request
*req
)
395 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
396 struct ahash_alg
*alg
;
399 if (likely(tfm
->using_shash
))
400 return shash_ahash_digest(req
, prepare_shash_desc(req
, tfm
));
402 alg
= crypto_ahash_alg(tfm
);
403 if (IS_ENABLED(CONFIG_CRYPTO_STATS
)) {
404 struct crypto_istat_hash
*istat
= ahash_get_stat(alg
);
406 atomic64_inc(&istat
->hash_cnt
);
407 atomic64_add(req
->nbytes
, &istat
->hash_tlen
);
410 if (crypto_ahash_get_flags(tfm
) & CRYPTO_TFM_NEED_KEY
)
413 err
= alg
->digest(req
);
415 return crypto_ahash_errstat(alg
, err
);
417 EXPORT_SYMBOL_GPL(crypto_ahash_digest
);
419 static void ahash_def_finup_done2(void *data
, int err
)
421 struct ahash_request
*areq
= data
;
423 if (err
== -EINPROGRESS
)
426 ahash_restore_req(areq
, err
);
428 ahash_request_complete(areq
, err
);
431 static int ahash_def_finup_finish1(struct ahash_request
*req
, int err
)
433 struct ahash_request
*subreq
= req
->priv
;
438 subreq
->base
.complete
= ahash_def_finup_done2
;
440 err
= crypto_ahash_alg(crypto_ahash_reqtfm(req
))->final(subreq
);
441 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
445 ahash_restore_req(req
, err
);
449 static void ahash_def_finup_done1(void *data
, int err
)
451 struct ahash_request
*areq
= data
;
452 struct ahash_request
*subreq
;
454 if (err
== -EINPROGRESS
)
458 subreq
->base
.flags
&= CRYPTO_TFM_REQ_MAY_BACKLOG
;
460 err
= ahash_def_finup_finish1(areq
, err
);
461 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
465 ahash_request_complete(areq
, err
);
468 static int ahash_def_finup(struct ahash_request
*req
)
470 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
473 err
= ahash_save_req(req
, ahash_def_finup_done1
, true);
477 err
= crypto_ahash_alg(tfm
)->update(req
->priv
);
478 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
481 return ahash_def_finup_finish1(req
, err
);
484 int crypto_ahash_export(struct ahash_request
*req
, void *out
)
486 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
488 if (likely(tfm
->using_shash
))
489 return crypto_shash_export(ahash_request_ctx(req
), out
);
490 return crypto_ahash_alg(tfm
)->export(req
, out
);
492 EXPORT_SYMBOL_GPL(crypto_ahash_export
);
494 int crypto_ahash_import(struct ahash_request
*req
, const void *in
)
496 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
498 if (likely(tfm
->using_shash
))
499 return crypto_shash_import(prepare_shash_desc(req
, tfm
), in
);
500 if (crypto_ahash_get_flags(tfm
) & CRYPTO_TFM_NEED_KEY
)
502 return crypto_ahash_alg(tfm
)->import(req
, in
);
504 EXPORT_SYMBOL_GPL(crypto_ahash_import
);
506 static void crypto_ahash_exit_tfm(struct crypto_tfm
*tfm
)
508 struct crypto_ahash
*hash
= __crypto_ahash_cast(tfm
);
509 struct ahash_alg
*alg
= crypto_ahash_alg(hash
);
514 static int crypto_ahash_init_tfm(struct crypto_tfm
*tfm
)
516 struct crypto_ahash
*hash
= __crypto_ahash_cast(tfm
);
517 struct ahash_alg
*alg
= crypto_ahash_alg(hash
);
519 crypto_ahash_set_statesize(hash
, alg
->halg
.statesize
);
521 if (tfm
->__crt_alg
->cra_type
== &crypto_shash_type
)
522 return crypto_init_ahash_using_shash(tfm
);
524 ahash_set_needkey(hash
, alg
);
527 tfm
->exit
= crypto_ahash_exit_tfm
;
529 return alg
->init_tfm
? alg
->init_tfm(hash
) : 0;
532 static unsigned int crypto_ahash_extsize(struct crypto_alg
*alg
)
534 if (alg
->cra_type
== &crypto_shash_type
)
535 return sizeof(struct crypto_shash
*);
537 return crypto_alg_extsize(alg
);
540 static void crypto_ahash_free_instance(struct crypto_instance
*inst
)
542 struct ahash_instance
*ahash
= ahash_instance(inst
);
547 static int __maybe_unused
crypto_ahash_report(
548 struct sk_buff
*skb
, struct crypto_alg
*alg
)
550 struct crypto_report_hash rhash
;
552 memset(&rhash
, 0, sizeof(rhash
));
554 strscpy(rhash
.type
, "ahash", sizeof(rhash
.type
));
556 rhash
.blocksize
= alg
->cra_blocksize
;
557 rhash
.digestsize
= __crypto_hash_alg_common(alg
)->digestsize
;
559 return nla_put(skb
, CRYPTOCFGA_REPORT_HASH
, sizeof(rhash
), &rhash
);
562 static void crypto_ahash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
564 static void crypto_ahash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
566 seq_printf(m
, "type : ahash\n");
567 seq_printf(m
, "async : %s\n", alg
->cra_flags
& CRYPTO_ALG_ASYNC
?
569 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
570 seq_printf(m
, "digestsize : %u\n",
571 __crypto_hash_alg_common(alg
)->digestsize
);
574 static int __maybe_unused
crypto_ahash_report_stat(
575 struct sk_buff
*skb
, struct crypto_alg
*alg
)
577 return crypto_hash_report_stat(skb
, alg
, "ahash");
580 static const struct crypto_type crypto_ahash_type
= {
581 .extsize
= crypto_ahash_extsize
,
582 .init_tfm
= crypto_ahash_init_tfm
,
583 .free
= crypto_ahash_free_instance
,
584 #ifdef CONFIG_PROC_FS
585 .show
= crypto_ahash_show
,
587 #if IS_ENABLED(CONFIG_CRYPTO_USER)
588 .report
= crypto_ahash_report
,
590 #ifdef CONFIG_CRYPTO_STATS
591 .report_stat
= crypto_ahash_report_stat
,
593 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
594 .maskset
= CRYPTO_ALG_TYPE_AHASH_MASK
,
595 .type
= CRYPTO_ALG_TYPE_AHASH
,
596 .tfmsize
= offsetof(struct crypto_ahash
, base
),
599 int crypto_grab_ahash(struct crypto_ahash_spawn
*spawn
,
600 struct crypto_instance
*inst
,
601 const char *name
, u32 type
, u32 mask
)
603 spawn
->base
.frontend
= &crypto_ahash_type
;
604 return crypto_grab_spawn(&spawn
->base
, inst
, name
, type
, mask
);
606 EXPORT_SYMBOL_GPL(crypto_grab_ahash
);
608 struct crypto_ahash
*crypto_alloc_ahash(const char *alg_name
, u32 type
,
611 return crypto_alloc_tfm(alg_name
, &crypto_ahash_type
, type
, mask
);
613 EXPORT_SYMBOL_GPL(crypto_alloc_ahash
);
615 int crypto_has_ahash(const char *alg_name
, u32 type
, u32 mask
)
617 return crypto_type_has_alg(alg_name
, &crypto_ahash_type
, type
, mask
);
619 EXPORT_SYMBOL_GPL(crypto_has_ahash
);
621 static bool crypto_hash_alg_has_setkey(struct hash_alg_common
*halg
)
623 struct crypto_alg
*alg
= &halg
->base
;
625 if (alg
->cra_type
== &crypto_shash_type
)
626 return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg
));
628 return __crypto_ahash_alg(alg
)->setkey
!= ahash_nosetkey
;
631 struct crypto_ahash
*crypto_clone_ahash(struct crypto_ahash
*hash
)
633 struct hash_alg_common
*halg
= crypto_hash_alg_common(hash
);
634 struct crypto_tfm
*tfm
= crypto_ahash_tfm(hash
);
635 struct crypto_ahash
*nhash
;
636 struct ahash_alg
*alg
;
639 if (!crypto_hash_alg_has_setkey(halg
)) {
640 tfm
= crypto_tfm_get(tfm
);
642 return ERR_CAST(tfm
);
647 nhash
= crypto_clone_tfm(&crypto_ahash_type
, tfm
);
652 nhash
->reqsize
= hash
->reqsize
;
653 nhash
->statesize
= hash
->statesize
;
655 if (likely(hash
->using_shash
)) {
656 struct crypto_shash
**nctx
= crypto_ahash_ctx(nhash
);
657 struct crypto_shash
*shash
;
659 shash
= crypto_clone_shash(ahash_to_shash(hash
));
661 err
= PTR_ERR(shash
);
664 nhash
->using_shash
= true;
670 alg
= crypto_ahash_alg(hash
);
674 err
= alg
->clone_tfm(nhash
, hash
);
681 crypto_free_ahash(nhash
);
684 EXPORT_SYMBOL_GPL(crypto_clone_ahash
);
686 static int ahash_prepare_alg(struct ahash_alg
*alg
)
688 struct crypto_alg
*base
= &alg
->halg
.base
;
691 if (alg
->halg
.statesize
== 0)
694 err
= hash_prepare_alg(&alg
->halg
);
698 base
->cra_type
= &crypto_ahash_type
;
699 base
->cra_flags
|= CRYPTO_ALG_TYPE_AHASH
;
702 alg
->finup
= ahash_def_finup
;
704 alg
->setkey
= ahash_nosetkey
;
709 int crypto_register_ahash(struct ahash_alg
*alg
)
711 struct crypto_alg
*base
= &alg
->halg
.base
;
714 err
= ahash_prepare_alg(alg
);
718 return crypto_register_alg(base
);
720 EXPORT_SYMBOL_GPL(crypto_register_ahash
);
722 void crypto_unregister_ahash(struct ahash_alg
*alg
)
724 crypto_unregister_alg(&alg
->halg
.base
);
726 EXPORT_SYMBOL_GPL(crypto_unregister_ahash
);
728 int crypto_register_ahashes(struct ahash_alg
*algs
, int count
)
732 for (i
= 0; i
< count
; i
++) {
733 ret
= crypto_register_ahash(&algs
[i
]);
741 for (--i
; i
>= 0; --i
)
742 crypto_unregister_ahash(&algs
[i
]);
746 EXPORT_SYMBOL_GPL(crypto_register_ahashes
);
748 void crypto_unregister_ahashes(struct ahash_alg
*algs
, int count
)
752 for (i
= count
- 1; i
>= 0; --i
)
753 crypto_unregister_ahash(&algs
[i
]);
755 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes
);
757 int ahash_register_instance(struct crypto_template
*tmpl
,
758 struct ahash_instance
*inst
)
762 if (WARN_ON(!inst
->free
))
765 err
= ahash_prepare_alg(&inst
->alg
);
769 return crypto_register_instance(tmpl
, ahash_crypto_instance(inst
));
771 EXPORT_SYMBOL_GPL(ahash_register_instance
);
773 MODULE_LICENSE("GPL");
774 MODULE_DESCRIPTION("Asynchronous cryptographic hash type");