1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Synchronous Cryptographic Hash operations.
5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 #include <crypto/scatterwalk.h>
9 #include <linux/cryptouser.h>
10 #include <linux/err.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
13 #include <linux/seq_file.h>
14 #include <linux/string.h>
15 #include <net/netlink.h>
19 static const struct crypto_type crypto_shash_type
;
21 static inline struct crypto_istat_hash
*shash_get_stat(struct shash_alg
*alg
)
23 return hash_get_stat(&alg
->halg
);
26 static inline int crypto_shash_errstat(struct shash_alg
*alg
, int err
)
28 return crypto_hash_errstat(&alg
->halg
, err
);
31 int shash_no_setkey(struct crypto_shash
*tfm
, const u8
*key
,
36 EXPORT_SYMBOL_GPL(shash_no_setkey
);
38 static void shash_set_needkey(struct crypto_shash
*tfm
, struct shash_alg
*alg
)
40 if (crypto_shash_alg_needs_key(alg
))
41 crypto_shash_set_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
44 int crypto_shash_setkey(struct crypto_shash
*tfm
, const u8
*key
,
47 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
50 err
= shash
->setkey(tfm
, key
, keylen
);
52 shash_set_needkey(tfm
, shash
);
56 crypto_shash_clear_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
59 EXPORT_SYMBOL_GPL(crypto_shash_setkey
);
61 int crypto_shash_update(struct shash_desc
*desc
, const u8
*data
,
64 struct shash_alg
*shash
= crypto_shash_alg(desc
->tfm
);
67 if (IS_ENABLED(CONFIG_CRYPTO_STATS
))
68 atomic64_add(len
, &shash_get_stat(shash
)->hash_tlen
);
70 err
= shash
->update(desc
, data
, len
);
72 return crypto_shash_errstat(shash
, err
);
74 EXPORT_SYMBOL_GPL(crypto_shash_update
);
76 int crypto_shash_final(struct shash_desc
*desc
, u8
*out
)
78 struct shash_alg
*shash
= crypto_shash_alg(desc
->tfm
);
81 if (IS_ENABLED(CONFIG_CRYPTO_STATS
))
82 atomic64_inc(&shash_get_stat(shash
)->hash_cnt
);
84 err
= shash
->final(desc
, out
);
86 return crypto_shash_errstat(shash
, err
);
88 EXPORT_SYMBOL_GPL(crypto_shash_final
);
90 static int shash_default_finup(struct shash_desc
*desc
, const u8
*data
,
91 unsigned int len
, u8
*out
)
93 struct shash_alg
*shash
= crypto_shash_alg(desc
->tfm
);
95 return shash
->update(desc
, data
, len
) ?:
96 shash
->final(desc
, out
);
99 int crypto_shash_finup(struct shash_desc
*desc
, const u8
*data
,
100 unsigned int len
, u8
*out
)
102 struct crypto_shash
*tfm
= desc
->tfm
;
103 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
106 if (IS_ENABLED(CONFIG_CRYPTO_STATS
)) {
107 struct crypto_istat_hash
*istat
= shash_get_stat(shash
);
109 atomic64_inc(&istat
->hash_cnt
);
110 atomic64_add(len
, &istat
->hash_tlen
);
113 err
= shash
->finup(desc
, data
, len
, out
);
115 return crypto_shash_errstat(shash
, err
);
117 EXPORT_SYMBOL_GPL(crypto_shash_finup
);
119 static int shash_default_digest(struct shash_desc
*desc
, const u8
*data
,
120 unsigned int len
, u8
*out
)
122 struct shash_alg
*shash
= crypto_shash_alg(desc
->tfm
);
124 return shash
->init(desc
) ?:
125 shash
->finup(desc
, data
, len
, out
);
128 int crypto_shash_digest(struct shash_desc
*desc
, const u8
*data
,
129 unsigned int len
, u8
*out
)
131 struct crypto_shash
*tfm
= desc
->tfm
;
132 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
135 if (IS_ENABLED(CONFIG_CRYPTO_STATS
)) {
136 struct crypto_istat_hash
*istat
= shash_get_stat(shash
);
138 atomic64_inc(&istat
->hash_cnt
);
139 atomic64_add(len
, &istat
->hash_tlen
);
142 if (crypto_shash_get_flags(tfm
) & CRYPTO_TFM_NEED_KEY
)
145 err
= shash
->digest(desc
, data
, len
, out
);
147 return crypto_shash_errstat(shash
, err
);
149 EXPORT_SYMBOL_GPL(crypto_shash_digest
);
151 int crypto_shash_tfm_digest(struct crypto_shash
*tfm
, const u8
*data
,
152 unsigned int len
, u8
*out
)
154 SHASH_DESC_ON_STACK(desc
, tfm
);
159 err
= crypto_shash_digest(desc
, data
, len
, out
);
161 shash_desc_zero(desc
);
165 EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest
);
167 int crypto_shash_export(struct shash_desc
*desc
, void *out
)
169 struct crypto_shash
*tfm
= desc
->tfm
;
170 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
173 return shash
->export(desc
, out
);
175 memcpy(out
, shash_desc_ctx(desc
), crypto_shash_descsize(tfm
));
178 EXPORT_SYMBOL_GPL(crypto_shash_export
);
180 int crypto_shash_import(struct shash_desc
*desc
, const void *in
)
182 struct crypto_shash
*tfm
= desc
->tfm
;
183 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
185 if (crypto_shash_get_flags(tfm
) & CRYPTO_TFM_NEED_KEY
)
189 return shash
->import(desc
, in
);
191 memcpy(shash_desc_ctx(desc
), in
, crypto_shash_descsize(tfm
));
194 EXPORT_SYMBOL_GPL(crypto_shash_import
);
196 static int shash_async_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
199 struct crypto_shash
**ctx
= crypto_ahash_ctx(tfm
);
201 return crypto_shash_setkey(*ctx
, key
, keylen
);
204 static int shash_async_init(struct ahash_request
*req
)
206 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
207 struct shash_desc
*desc
= ahash_request_ctx(req
);
211 return crypto_shash_init(desc
);
214 int shash_ahash_update(struct ahash_request
*req
, struct shash_desc
*desc
)
216 struct crypto_hash_walk walk
;
219 for (nbytes
= crypto_hash_walk_first(req
, &walk
); nbytes
> 0;
220 nbytes
= crypto_hash_walk_done(&walk
, nbytes
))
221 nbytes
= crypto_shash_update(desc
, walk
.data
, nbytes
);
225 EXPORT_SYMBOL_GPL(shash_ahash_update
);
227 static int shash_async_update(struct ahash_request
*req
)
229 return shash_ahash_update(req
, ahash_request_ctx(req
));
232 static int shash_async_final(struct ahash_request
*req
)
234 return crypto_shash_final(ahash_request_ctx(req
), req
->result
);
237 int shash_ahash_finup(struct ahash_request
*req
, struct shash_desc
*desc
)
239 struct crypto_hash_walk walk
;
242 nbytes
= crypto_hash_walk_first(req
, &walk
);
244 return crypto_shash_final(desc
, req
->result
);
247 nbytes
= crypto_hash_walk_last(&walk
) ?
248 crypto_shash_finup(desc
, walk
.data
, nbytes
,
250 crypto_shash_update(desc
, walk
.data
, nbytes
);
251 nbytes
= crypto_hash_walk_done(&walk
, nbytes
);
252 } while (nbytes
> 0);
256 EXPORT_SYMBOL_GPL(shash_ahash_finup
);
258 static int shash_async_finup(struct ahash_request
*req
)
260 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
261 struct shash_desc
*desc
= ahash_request_ctx(req
);
265 return shash_ahash_finup(req
, desc
);
268 int shash_ahash_digest(struct ahash_request
*req
, struct shash_desc
*desc
)
270 unsigned int nbytes
= req
->nbytes
;
271 struct scatterlist
*sg
;
276 (sg
= req
->src
, offset
= sg
->offset
,
277 nbytes
<= min(sg
->length
, ((unsigned int)(PAGE_SIZE
)) - offset
))) {
280 data
= kmap_local_page(sg_page(sg
));
281 err
= crypto_shash_digest(desc
, data
+ offset
, nbytes
,
285 err
= crypto_shash_init(desc
) ?:
286 shash_ahash_finup(req
, desc
);
290 EXPORT_SYMBOL_GPL(shash_ahash_digest
);
292 static int shash_async_digest(struct ahash_request
*req
)
294 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
295 struct shash_desc
*desc
= ahash_request_ctx(req
);
299 return shash_ahash_digest(req
, desc
);
302 static int shash_async_export(struct ahash_request
*req
, void *out
)
304 return crypto_shash_export(ahash_request_ctx(req
), out
);
307 static int shash_async_import(struct ahash_request
*req
, const void *in
)
309 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
310 struct shash_desc
*desc
= ahash_request_ctx(req
);
314 return crypto_shash_import(desc
, in
);
317 static void crypto_exit_shash_ops_async(struct crypto_tfm
*tfm
)
319 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
321 crypto_free_shash(*ctx
);
324 int crypto_init_shash_ops_async(struct crypto_tfm
*tfm
)
326 struct crypto_alg
*calg
= tfm
->__crt_alg
;
327 struct shash_alg
*alg
= __crypto_shash_alg(calg
);
328 struct crypto_ahash
*crt
= __crypto_ahash_cast(tfm
);
329 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
330 struct crypto_shash
*shash
;
332 if (!crypto_mod_get(calg
))
335 shash
= crypto_create_tfm(calg
, &crypto_shash_type
);
337 crypto_mod_put(calg
);
338 return PTR_ERR(shash
);
342 tfm
->exit
= crypto_exit_shash_ops_async
;
344 crt
->init
= shash_async_init
;
345 crt
->update
= shash_async_update
;
346 crt
->final
= shash_async_final
;
347 crt
->finup
= shash_async_finup
;
348 crt
->digest
= shash_async_digest
;
349 if (crypto_shash_alg_has_setkey(alg
))
350 crt
->setkey
= shash_async_setkey
;
352 crypto_ahash_set_flags(crt
, crypto_shash_get_flags(shash
) &
353 CRYPTO_TFM_NEED_KEY
);
355 crt
->export
= shash_async_export
;
356 crt
->import
= shash_async_import
;
358 crt
->reqsize
= sizeof(struct shash_desc
) + crypto_shash_descsize(shash
);
363 struct crypto_ahash
*crypto_clone_shash_ops_async(struct crypto_ahash
*nhash
,
364 struct crypto_ahash
*hash
)
366 struct crypto_shash
**nctx
= crypto_ahash_ctx(nhash
);
367 struct crypto_shash
**ctx
= crypto_ahash_ctx(hash
);
368 struct crypto_shash
*shash
;
370 shash
= crypto_clone_shash(*ctx
);
372 crypto_free_ahash(nhash
);
373 return ERR_CAST(shash
);
381 static void crypto_shash_exit_tfm(struct crypto_tfm
*tfm
)
383 struct crypto_shash
*hash
= __crypto_shash_cast(tfm
);
384 struct shash_alg
*alg
= crypto_shash_alg(hash
);
389 static int crypto_shash_init_tfm(struct crypto_tfm
*tfm
)
391 struct crypto_shash
*hash
= __crypto_shash_cast(tfm
);
392 struct shash_alg
*alg
= crypto_shash_alg(hash
);
395 hash
->descsize
= alg
->descsize
;
397 shash_set_needkey(hash
, alg
);
400 tfm
->exit
= crypto_shash_exit_tfm
;
405 err
= alg
->init_tfm(hash
);
409 /* ->init_tfm() may have increased the descsize. */
410 if (WARN_ON_ONCE(hash
->descsize
> HASH_MAX_DESCSIZE
)) {
419 static void crypto_shash_free_instance(struct crypto_instance
*inst
)
421 struct shash_instance
*shash
= shash_instance(inst
);
426 static int __maybe_unused
crypto_shash_report(
427 struct sk_buff
*skb
, struct crypto_alg
*alg
)
429 struct crypto_report_hash rhash
;
430 struct shash_alg
*salg
= __crypto_shash_alg(alg
);
432 memset(&rhash
, 0, sizeof(rhash
));
434 strscpy(rhash
.type
, "shash", sizeof(rhash
.type
));
436 rhash
.blocksize
= alg
->cra_blocksize
;
437 rhash
.digestsize
= salg
->digestsize
;
439 return nla_put(skb
, CRYPTOCFGA_REPORT_HASH
, sizeof(rhash
), &rhash
);
442 static void crypto_shash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
444 static void crypto_shash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
446 struct shash_alg
*salg
= __crypto_shash_alg(alg
);
448 seq_printf(m
, "type : shash\n");
449 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
450 seq_printf(m
, "digestsize : %u\n", salg
->digestsize
);
453 static int __maybe_unused
crypto_shash_report_stat(
454 struct sk_buff
*skb
, struct crypto_alg
*alg
)
456 return crypto_hash_report_stat(skb
, alg
, "shash");
459 static const struct crypto_type crypto_shash_type
= {
460 .extsize
= crypto_alg_extsize
,
461 .init_tfm
= crypto_shash_init_tfm
,
462 .free
= crypto_shash_free_instance
,
463 #ifdef CONFIG_PROC_FS
464 .show
= crypto_shash_show
,
466 #if IS_ENABLED(CONFIG_CRYPTO_USER)
467 .report
= crypto_shash_report
,
469 #ifdef CONFIG_CRYPTO_STATS
470 .report_stat
= crypto_shash_report_stat
,
472 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
473 .maskset
= CRYPTO_ALG_TYPE_MASK
,
474 .type
= CRYPTO_ALG_TYPE_SHASH
,
475 .tfmsize
= offsetof(struct crypto_shash
, base
),
478 int crypto_grab_shash(struct crypto_shash_spawn
*spawn
,
479 struct crypto_instance
*inst
,
480 const char *name
, u32 type
, u32 mask
)
482 spawn
->base
.frontend
= &crypto_shash_type
;
483 return crypto_grab_spawn(&spawn
->base
, inst
, name
, type
, mask
);
485 EXPORT_SYMBOL_GPL(crypto_grab_shash
);
487 struct crypto_shash
*crypto_alloc_shash(const char *alg_name
, u32 type
,
490 return crypto_alloc_tfm(alg_name
, &crypto_shash_type
, type
, mask
);
492 EXPORT_SYMBOL_GPL(crypto_alloc_shash
);
494 int crypto_has_shash(const char *alg_name
, u32 type
, u32 mask
)
496 return crypto_type_has_alg(alg_name
, &crypto_shash_type
, type
, mask
);
498 EXPORT_SYMBOL_GPL(crypto_has_shash
);
500 struct crypto_shash
*crypto_clone_shash(struct crypto_shash
*hash
)
502 struct crypto_tfm
*tfm
= crypto_shash_tfm(hash
);
503 struct shash_alg
*alg
= crypto_shash_alg(hash
);
504 struct crypto_shash
*nhash
;
507 if (!crypto_shash_alg_has_setkey(alg
)) {
508 tfm
= crypto_tfm_get(tfm
);
510 return ERR_CAST(tfm
);
515 if (!alg
->clone_tfm
&& (alg
->init_tfm
|| alg
->base
.cra_init
))
516 return ERR_PTR(-ENOSYS
);
518 nhash
= crypto_clone_tfm(&crypto_shash_type
, tfm
);
522 nhash
->descsize
= hash
->descsize
;
524 if (alg
->clone_tfm
) {
525 err
= alg
->clone_tfm(nhash
, hash
);
527 crypto_free_shash(nhash
);
534 EXPORT_SYMBOL_GPL(crypto_clone_shash
);
536 int hash_prepare_alg(struct hash_alg_common
*alg
)
538 struct crypto_istat_hash
*istat
= hash_get_stat(alg
);
539 struct crypto_alg
*base
= &alg
->base
;
541 if (alg
->digestsize
> HASH_MAX_DIGESTSIZE
)
544 base
->cra_flags
&= ~CRYPTO_ALG_TYPE_MASK
;
546 if (IS_ENABLED(CONFIG_CRYPTO_STATS
))
547 memset(istat
, 0, sizeof(*istat
));
552 static int shash_prepare_alg(struct shash_alg
*alg
)
554 struct crypto_alg
*base
= &alg
->halg
.base
;
557 if (alg
->descsize
> HASH_MAX_DESCSIZE
)
560 /* alignmask is not useful for shash, so it is not supported. */
561 if (base
->cra_alignmask
)
564 if ((alg
->export
&& !alg
->import
) || (alg
->import
&& !alg
->export
))
567 err
= hash_prepare_alg(&alg
->halg
);
571 base
->cra_type
= &crypto_shash_type
;
572 base
->cra_flags
|= CRYPTO_ALG_TYPE_SHASH
;
575 * Handle missing optional functions. For each one we can either
576 * install a default here, or we can leave the pointer as NULL and check
577 * the pointer for NULL in crypto_shash_*(), avoiding an indirect call
578 * when the default behavior is desired. For ->finup and ->digest we
579 * install defaults, since for optimal performance algorithms should
580 * implement these anyway. On the other hand, for ->import and
581 * ->export the common case and best performance comes from the simple
582 * memcpy of the shash_desc_ctx, so when those pointers are NULL we
583 * leave them NULL and provide the memcpy with no indirect call.
586 alg
->finup
= shash_default_finup
;
588 alg
->digest
= shash_default_digest
;
590 alg
->halg
.statesize
= alg
->descsize
;
592 alg
->setkey
= shash_no_setkey
;
597 int crypto_register_shash(struct shash_alg
*alg
)
599 struct crypto_alg
*base
= &alg
->base
;
602 err
= shash_prepare_alg(alg
);
606 return crypto_register_alg(base
);
608 EXPORT_SYMBOL_GPL(crypto_register_shash
);
610 void crypto_unregister_shash(struct shash_alg
*alg
)
612 crypto_unregister_alg(&alg
->base
);
614 EXPORT_SYMBOL_GPL(crypto_unregister_shash
);
616 int crypto_register_shashes(struct shash_alg
*algs
, int count
)
620 for (i
= 0; i
< count
; i
++) {
621 ret
= crypto_register_shash(&algs
[i
]);
629 for (--i
; i
>= 0; --i
)
630 crypto_unregister_shash(&algs
[i
]);
634 EXPORT_SYMBOL_GPL(crypto_register_shashes
);
636 void crypto_unregister_shashes(struct shash_alg
*algs
, int count
)
640 for (i
= count
- 1; i
>= 0; --i
)
641 crypto_unregister_shash(&algs
[i
]);
643 EXPORT_SYMBOL_GPL(crypto_unregister_shashes
);
645 int shash_register_instance(struct crypto_template
*tmpl
,
646 struct shash_instance
*inst
)
650 if (WARN_ON(!inst
->free
))
653 err
= shash_prepare_alg(&inst
->alg
);
657 return crypto_register_instance(tmpl
, shash_crypto_instance(inst
));
659 EXPORT_SYMBOL_GPL(shash_register_instance
);
661 void shash_free_singlespawn_instance(struct shash_instance
*inst
)
663 crypto_drop_spawn(shash_instance_ctx(inst
));
666 EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance
);
668 MODULE_LICENSE("GPL");
669 MODULE_DESCRIPTION("Synchronous cryptographic hash type");