1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Synchronous Cryptographic Hash operations.
5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 #include <crypto/scatterwalk.h>
9 #include <linux/cryptouser.h>
10 #include <linux/err.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
13 #include <linux/seq_file.h>
14 #include <linux/string.h>
15 #include <net/netlink.h>
19 static inline struct crypto_istat_hash
*shash_get_stat(struct shash_alg
*alg
)
21 return hash_get_stat(&alg
->halg
);
24 static inline int crypto_shash_errstat(struct shash_alg
*alg
, int err
)
26 if (!IS_ENABLED(CONFIG_CRYPTO_STATS
))
29 if (err
&& err
!= -EINPROGRESS
&& err
!= -EBUSY
)
30 atomic64_inc(&shash_get_stat(alg
)->err_cnt
);
35 int shash_no_setkey(struct crypto_shash
*tfm
, const u8
*key
,
40 EXPORT_SYMBOL_GPL(shash_no_setkey
);
42 static void shash_set_needkey(struct crypto_shash
*tfm
, struct shash_alg
*alg
)
44 if (crypto_shash_alg_needs_key(alg
))
45 crypto_shash_set_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
48 int crypto_shash_setkey(struct crypto_shash
*tfm
, const u8
*key
,
51 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
54 err
= shash
->setkey(tfm
, key
, keylen
);
56 shash_set_needkey(tfm
, shash
);
60 crypto_shash_clear_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
63 EXPORT_SYMBOL_GPL(crypto_shash_setkey
);
65 int crypto_shash_update(struct shash_desc
*desc
, const u8
*data
,
68 struct shash_alg
*shash
= crypto_shash_alg(desc
->tfm
);
71 if (IS_ENABLED(CONFIG_CRYPTO_STATS
))
72 atomic64_add(len
, &shash_get_stat(shash
)->hash_tlen
);
74 err
= shash
->update(desc
, data
, len
);
76 return crypto_shash_errstat(shash
, err
);
78 EXPORT_SYMBOL_GPL(crypto_shash_update
);
80 int crypto_shash_final(struct shash_desc
*desc
, u8
*out
)
82 struct shash_alg
*shash
= crypto_shash_alg(desc
->tfm
);
85 if (IS_ENABLED(CONFIG_CRYPTO_STATS
))
86 atomic64_inc(&shash_get_stat(shash
)->hash_cnt
);
88 err
= shash
->final(desc
, out
);
90 return crypto_shash_errstat(shash
, err
);
92 EXPORT_SYMBOL_GPL(crypto_shash_final
);
94 static int shash_default_finup(struct shash_desc
*desc
, const u8
*data
,
95 unsigned int len
, u8
*out
)
97 struct shash_alg
*shash
= crypto_shash_alg(desc
->tfm
);
99 return shash
->update(desc
, data
, len
) ?:
100 shash
->final(desc
, out
);
103 int crypto_shash_finup(struct shash_desc
*desc
, const u8
*data
,
104 unsigned int len
, u8
*out
)
106 struct crypto_shash
*tfm
= desc
->tfm
;
107 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
110 if (IS_ENABLED(CONFIG_CRYPTO_STATS
)) {
111 struct crypto_istat_hash
*istat
= shash_get_stat(shash
);
113 atomic64_inc(&istat
->hash_cnt
);
114 atomic64_add(len
, &istat
->hash_tlen
);
117 err
= shash
->finup(desc
, data
, len
, out
);
119 return crypto_shash_errstat(shash
, err
);
121 EXPORT_SYMBOL_GPL(crypto_shash_finup
);
123 static int shash_default_digest(struct shash_desc
*desc
, const u8
*data
,
124 unsigned int len
, u8
*out
)
126 struct shash_alg
*shash
= crypto_shash_alg(desc
->tfm
);
128 return shash
->init(desc
) ?:
129 shash
->finup(desc
, data
, len
, out
);
132 int crypto_shash_digest(struct shash_desc
*desc
, const u8
*data
,
133 unsigned int len
, u8
*out
)
135 struct crypto_shash
*tfm
= desc
->tfm
;
136 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
139 if (IS_ENABLED(CONFIG_CRYPTO_STATS
)) {
140 struct crypto_istat_hash
*istat
= shash_get_stat(shash
);
142 atomic64_inc(&istat
->hash_cnt
);
143 atomic64_add(len
, &istat
->hash_tlen
);
146 if (crypto_shash_get_flags(tfm
) & CRYPTO_TFM_NEED_KEY
)
149 err
= shash
->digest(desc
, data
, len
, out
);
151 return crypto_shash_errstat(shash
, err
);
153 EXPORT_SYMBOL_GPL(crypto_shash_digest
);
155 int crypto_shash_tfm_digest(struct crypto_shash
*tfm
, const u8
*data
,
156 unsigned int len
, u8
*out
)
158 SHASH_DESC_ON_STACK(desc
, tfm
);
163 err
= crypto_shash_digest(desc
, data
, len
, out
);
165 shash_desc_zero(desc
);
169 EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest
);
171 int crypto_shash_export(struct shash_desc
*desc
, void *out
)
173 struct crypto_shash
*tfm
= desc
->tfm
;
174 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
177 return shash
->export(desc
, out
);
179 memcpy(out
, shash_desc_ctx(desc
), crypto_shash_descsize(tfm
));
182 EXPORT_SYMBOL_GPL(crypto_shash_export
);
184 int crypto_shash_import(struct shash_desc
*desc
, const void *in
)
186 struct crypto_shash
*tfm
= desc
->tfm
;
187 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
189 if (crypto_shash_get_flags(tfm
) & CRYPTO_TFM_NEED_KEY
)
193 return shash
->import(desc
, in
);
195 memcpy(shash_desc_ctx(desc
), in
, crypto_shash_descsize(tfm
));
198 EXPORT_SYMBOL_GPL(crypto_shash_import
);
200 static void crypto_shash_exit_tfm(struct crypto_tfm
*tfm
)
202 struct crypto_shash
*hash
= __crypto_shash_cast(tfm
);
203 struct shash_alg
*alg
= crypto_shash_alg(hash
);
208 static int crypto_shash_init_tfm(struct crypto_tfm
*tfm
)
210 struct crypto_shash
*hash
= __crypto_shash_cast(tfm
);
211 struct shash_alg
*alg
= crypto_shash_alg(hash
);
214 hash
->descsize
= alg
->descsize
;
216 shash_set_needkey(hash
, alg
);
219 tfm
->exit
= crypto_shash_exit_tfm
;
224 err
= alg
->init_tfm(hash
);
228 /* ->init_tfm() may have increased the descsize. */
229 if (WARN_ON_ONCE(hash
->descsize
> HASH_MAX_DESCSIZE
)) {
238 static void crypto_shash_free_instance(struct crypto_instance
*inst
)
240 struct shash_instance
*shash
= shash_instance(inst
);
245 static int __maybe_unused
crypto_shash_report(
246 struct sk_buff
*skb
, struct crypto_alg
*alg
)
248 struct crypto_report_hash rhash
;
249 struct shash_alg
*salg
= __crypto_shash_alg(alg
);
251 memset(&rhash
, 0, sizeof(rhash
));
253 strscpy(rhash
.type
, "shash", sizeof(rhash
.type
));
255 rhash
.blocksize
= alg
->cra_blocksize
;
256 rhash
.digestsize
= salg
->digestsize
;
258 return nla_put(skb
, CRYPTOCFGA_REPORT_HASH
, sizeof(rhash
), &rhash
);
261 static void crypto_shash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
263 static void crypto_shash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
265 struct shash_alg
*salg
= __crypto_shash_alg(alg
);
267 seq_printf(m
, "type : shash\n");
268 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
269 seq_printf(m
, "digestsize : %u\n", salg
->digestsize
);
272 static int __maybe_unused
crypto_shash_report_stat(
273 struct sk_buff
*skb
, struct crypto_alg
*alg
)
275 return crypto_hash_report_stat(skb
, alg
, "shash");
278 const struct crypto_type crypto_shash_type
= {
279 .extsize
= crypto_alg_extsize
,
280 .init_tfm
= crypto_shash_init_tfm
,
281 .free
= crypto_shash_free_instance
,
282 #ifdef CONFIG_PROC_FS
283 .show
= crypto_shash_show
,
285 #if IS_ENABLED(CONFIG_CRYPTO_USER)
286 .report
= crypto_shash_report
,
288 #ifdef CONFIG_CRYPTO_STATS
289 .report_stat
= crypto_shash_report_stat
,
291 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
292 .maskset
= CRYPTO_ALG_TYPE_MASK
,
293 .type
= CRYPTO_ALG_TYPE_SHASH
,
294 .tfmsize
= offsetof(struct crypto_shash
, base
),
297 int crypto_grab_shash(struct crypto_shash_spawn
*spawn
,
298 struct crypto_instance
*inst
,
299 const char *name
, u32 type
, u32 mask
)
301 spawn
->base
.frontend
= &crypto_shash_type
;
302 return crypto_grab_spawn(&spawn
->base
, inst
, name
, type
, mask
);
304 EXPORT_SYMBOL_GPL(crypto_grab_shash
);
306 struct crypto_shash
*crypto_alloc_shash(const char *alg_name
, u32 type
,
309 return crypto_alloc_tfm(alg_name
, &crypto_shash_type
, type
, mask
);
311 EXPORT_SYMBOL_GPL(crypto_alloc_shash
);
313 int crypto_has_shash(const char *alg_name
, u32 type
, u32 mask
)
315 return crypto_type_has_alg(alg_name
, &crypto_shash_type
, type
, mask
);
317 EXPORT_SYMBOL_GPL(crypto_has_shash
);
319 struct crypto_shash
*crypto_clone_shash(struct crypto_shash
*hash
)
321 struct crypto_tfm
*tfm
= crypto_shash_tfm(hash
);
322 struct shash_alg
*alg
= crypto_shash_alg(hash
);
323 struct crypto_shash
*nhash
;
326 if (!crypto_shash_alg_has_setkey(alg
)) {
327 tfm
= crypto_tfm_get(tfm
);
329 return ERR_CAST(tfm
);
334 if (!alg
->clone_tfm
&& (alg
->init_tfm
|| alg
->base
.cra_init
))
335 return ERR_PTR(-ENOSYS
);
337 nhash
= crypto_clone_tfm(&crypto_shash_type
, tfm
);
341 nhash
->descsize
= hash
->descsize
;
343 if (alg
->clone_tfm
) {
344 err
= alg
->clone_tfm(nhash
, hash
);
346 crypto_free_shash(nhash
);
353 EXPORT_SYMBOL_GPL(crypto_clone_shash
);
355 int hash_prepare_alg(struct hash_alg_common
*alg
)
357 struct crypto_istat_hash
*istat
= hash_get_stat(alg
);
358 struct crypto_alg
*base
= &alg
->base
;
360 if (alg
->digestsize
> HASH_MAX_DIGESTSIZE
)
363 /* alignmask is not useful for hashes, so it is not supported. */
364 if (base
->cra_alignmask
)
367 base
->cra_flags
&= ~CRYPTO_ALG_TYPE_MASK
;
369 if (IS_ENABLED(CONFIG_CRYPTO_STATS
))
370 memset(istat
, 0, sizeof(*istat
));
375 static int shash_prepare_alg(struct shash_alg
*alg
)
377 struct crypto_alg
*base
= &alg
->halg
.base
;
380 if (alg
->descsize
> HASH_MAX_DESCSIZE
)
383 if ((alg
->export
&& !alg
->import
) || (alg
->import
&& !alg
->export
))
386 err
= hash_prepare_alg(&alg
->halg
);
390 base
->cra_type
= &crypto_shash_type
;
391 base
->cra_flags
|= CRYPTO_ALG_TYPE_SHASH
;
394 * Handle missing optional functions. For each one we can either
395 * install a default here, or we can leave the pointer as NULL and check
396 * the pointer for NULL in crypto_shash_*(), avoiding an indirect call
397 * when the default behavior is desired. For ->finup and ->digest we
398 * install defaults, since for optimal performance algorithms should
399 * implement these anyway. On the other hand, for ->import and
400 * ->export the common case and best performance comes from the simple
401 * memcpy of the shash_desc_ctx, so when those pointers are NULL we
402 * leave them NULL and provide the memcpy with no indirect call.
405 alg
->finup
= shash_default_finup
;
407 alg
->digest
= shash_default_digest
;
409 alg
->halg
.statesize
= alg
->descsize
;
411 alg
->setkey
= shash_no_setkey
;
416 int crypto_register_shash(struct shash_alg
*alg
)
418 struct crypto_alg
*base
= &alg
->base
;
421 err
= shash_prepare_alg(alg
);
425 return crypto_register_alg(base
);
427 EXPORT_SYMBOL_GPL(crypto_register_shash
);
429 void crypto_unregister_shash(struct shash_alg
*alg
)
431 crypto_unregister_alg(&alg
->base
);
433 EXPORT_SYMBOL_GPL(crypto_unregister_shash
);
435 int crypto_register_shashes(struct shash_alg
*algs
, int count
)
439 for (i
= 0; i
< count
; i
++) {
440 ret
= crypto_register_shash(&algs
[i
]);
448 for (--i
; i
>= 0; --i
)
449 crypto_unregister_shash(&algs
[i
]);
453 EXPORT_SYMBOL_GPL(crypto_register_shashes
);
455 void crypto_unregister_shashes(struct shash_alg
*algs
, int count
)
459 for (i
= count
- 1; i
>= 0; --i
)
460 crypto_unregister_shash(&algs
[i
]);
462 EXPORT_SYMBOL_GPL(crypto_unregister_shashes
);
464 int shash_register_instance(struct crypto_template
*tmpl
,
465 struct shash_instance
*inst
)
469 if (WARN_ON(!inst
->free
))
472 err
= shash_prepare_alg(&inst
->alg
);
476 return crypto_register_instance(tmpl
, shash_crypto_instance(inst
));
478 EXPORT_SYMBOL_GPL(shash_register_instance
);
480 void shash_free_singlespawn_instance(struct shash_instance
*inst
)
482 crypto_drop_spawn(shash_instance_ctx(inst
));
485 EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance
);
487 MODULE_LICENSE("GPL");
488 MODULE_DESCRIPTION("Synchronous cryptographic hash type");