1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Asynchronous Cryptographic Hash operations.
5 * This is the asynchronous version of hash.c with notification of
6 * completion via a callback.
8 * Copyright (c) 2008 Loc Ho <lho@amcc.com>
11 #include <crypto/scatterwalk.h>
12 #include <linux/cryptouser.h>
13 #include <linux/err.h>
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/sched.h>
17 #include <linux/slab.h>
18 #include <linux/seq_file.h>
19 #include <linux/string.h>
20 #include <net/netlink.h>
24 #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000e
26 static const struct crypto_type crypto_ahash_type
;
28 static int hash_walk_next(struct crypto_hash_walk
*walk
)
30 unsigned int offset
= walk
->offset
;
31 unsigned int nbytes
= min(walk
->entrylen
,
32 ((unsigned int)(PAGE_SIZE
)) - offset
);
34 walk
->data
= kmap_local_page(walk
->pg
);
36 walk
->entrylen
-= nbytes
;
40 static int hash_walk_new_entry(struct crypto_hash_walk
*walk
)
42 struct scatterlist
*sg
;
45 walk
->offset
= sg
->offset
;
46 walk
->pg
= sg_page(walk
->sg
) + (walk
->offset
>> PAGE_SHIFT
);
47 walk
->offset
= offset_in_page(walk
->offset
);
48 walk
->entrylen
= sg
->length
;
50 if (walk
->entrylen
> walk
->total
)
51 walk
->entrylen
= walk
->total
;
52 walk
->total
-= walk
->entrylen
;
54 return hash_walk_next(walk
);
57 int crypto_hash_walk_done(struct crypto_hash_walk
*walk
, int err
)
59 walk
->data
-= walk
->offset
;
61 kunmap_local(walk
->data
);
62 crypto_yield(walk
->flags
);
70 return hash_walk_next(walk
);
76 walk
->sg
= sg_next(walk
->sg
);
78 return hash_walk_new_entry(walk
);
80 EXPORT_SYMBOL_GPL(crypto_hash_walk_done
);
82 int crypto_hash_walk_first(struct ahash_request
*req
,
83 struct crypto_hash_walk
*walk
)
85 walk
->total
= req
->nbytes
;
93 walk
->flags
= req
->base
.flags
;
95 return hash_walk_new_entry(walk
);
97 EXPORT_SYMBOL_GPL(crypto_hash_walk_first
);
99 static int ahash_nosetkey(struct crypto_ahash
*tfm
, const u8
*key
,
105 static void ahash_set_needkey(struct crypto_ahash
*tfm
)
107 const struct hash_alg_common
*alg
= crypto_hash_alg_common(tfm
);
109 if (tfm
->setkey
!= ahash_nosetkey
&&
110 !(alg
->base
.cra_flags
& CRYPTO_ALG_OPTIONAL_KEY
))
111 crypto_ahash_set_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
114 int crypto_ahash_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
117 int err
= tfm
->setkey(tfm
, key
, keylen
);
120 ahash_set_needkey(tfm
);
124 crypto_ahash_clear_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
127 EXPORT_SYMBOL_GPL(crypto_ahash_setkey
);
129 static int ahash_save_req(struct ahash_request
*req
, crypto_completion_t cplt
,
132 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
133 unsigned int ds
= crypto_ahash_digestsize(tfm
);
134 struct ahash_request
*subreq
;
135 unsigned int subreq_size
;
136 unsigned int reqsize
;
141 subreq_size
= sizeof(*subreq
);
142 reqsize
= crypto_ahash_reqsize(tfm
);
143 reqsize
= ALIGN(reqsize
, crypto_tfm_ctx_alignment());
144 subreq_size
+= reqsize
;
147 flags
= ahash_request_flags(req
);
148 gfp
= (flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ? GFP_KERNEL
: GFP_ATOMIC
;
149 subreq
= kmalloc(subreq_size
, gfp
);
153 ahash_request_set_tfm(subreq
, tfm
);
154 ahash_request_set_callback(subreq
, flags
, cplt
, req
);
156 result
= (u8
*)(subreq
+ 1) + reqsize
;
158 ahash_request_set_crypt(subreq
, req
->src
, result
, req
->nbytes
);
163 state
= kmalloc(crypto_ahash_statesize(tfm
), gfp
);
169 crypto_ahash_export(req
, state
);
170 crypto_ahash_import(subreq
, state
);
171 kfree_sensitive(state
);
179 static void ahash_restore_req(struct ahash_request
*req
, int err
)
181 struct ahash_request
*subreq
= req
->priv
;
184 memcpy(req
->result
, subreq
->result
,
185 crypto_ahash_digestsize(crypto_ahash_reqtfm(req
)));
189 kfree_sensitive(subreq
);
192 int crypto_ahash_final(struct ahash_request
*req
)
194 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
195 struct hash_alg_common
*alg
= crypto_hash_alg_common(tfm
);
197 if (IS_ENABLED(CONFIG_CRYPTO_STATS
))
198 atomic64_inc(&hash_get_stat(alg
)->hash_cnt
);
200 return crypto_hash_errstat(alg
, tfm
->final(req
));
202 EXPORT_SYMBOL_GPL(crypto_ahash_final
);
204 int crypto_ahash_finup(struct ahash_request
*req
)
206 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
207 struct hash_alg_common
*alg
= crypto_hash_alg_common(tfm
);
209 if (IS_ENABLED(CONFIG_CRYPTO_STATS
)) {
210 struct crypto_istat_hash
*istat
= hash_get_stat(alg
);
212 atomic64_inc(&istat
->hash_cnt
);
213 atomic64_add(req
->nbytes
, &istat
->hash_tlen
);
216 return crypto_hash_errstat(alg
, tfm
->finup(req
));
218 EXPORT_SYMBOL_GPL(crypto_ahash_finup
);
220 int crypto_ahash_digest(struct ahash_request
*req
)
222 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
223 struct hash_alg_common
*alg
= crypto_hash_alg_common(tfm
);
226 if (IS_ENABLED(CONFIG_CRYPTO_STATS
)) {
227 struct crypto_istat_hash
*istat
= hash_get_stat(alg
);
229 atomic64_inc(&istat
->hash_cnt
);
230 atomic64_add(req
->nbytes
, &istat
->hash_tlen
);
233 if (crypto_ahash_get_flags(tfm
) & CRYPTO_TFM_NEED_KEY
)
236 err
= tfm
->digest(req
);
238 return crypto_hash_errstat(alg
, err
);
240 EXPORT_SYMBOL_GPL(crypto_ahash_digest
);
242 static void ahash_def_finup_done2(void *data
, int err
)
244 struct ahash_request
*areq
= data
;
246 if (err
== -EINPROGRESS
)
249 ahash_restore_req(areq
, err
);
251 ahash_request_complete(areq
, err
);
254 static int ahash_def_finup_finish1(struct ahash_request
*req
, int err
)
256 struct ahash_request
*subreq
= req
->priv
;
261 subreq
->base
.complete
= ahash_def_finup_done2
;
263 err
= crypto_ahash_reqtfm(req
)->final(subreq
);
264 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
268 ahash_restore_req(req
, err
);
272 static void ahash_def_finup_done1(void *data
, int err
)
274 struct ahash_request
*areq
= data
;
275 struct ahash_request
*subreq
;
277 if (err
== -EINPROGRESS
)
281 subreq
->base
.flags
&= CRYPTO_TFM_REQ_MAY_BACKLOG
;
283 err
= ahash_def_finup_finish1(areq
, err
);
284 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
288 ahash_request_complete(areq
, err
);
291 static int ahash_def_finup(struct ahash_request
*req
)
293 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
296 err
= ahash_save_req(req
, ahash_def_finup_done1
, true);
300 err
= tfm
->update(req
->priv
);
301 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
304 return ahash_def_finup_finish1(req
, err
);
307 static void crypto_ahash_exit_tfm(struct crypto_tfm
*tfm
)
309 struct crypto_ahash
*hash
= __crypto_ahash_cast(tfm
);
310 struct ahash_alg
*alg
= crypto_ahash_alg(hash
);
315 static int crypto_ahash_init_tfm(struct crypto_tfm
*tfm
)
317 struct crypto_ahash
*hash
= __crypto_ahash_cast(tfm
);
318 struct ahash_alg
*alg
= crypto_ahash_alg(hash
);
320 hash
->setkey
= ahash_nosetkey
;
322 crypto_ahash_set_statesize(hash
, alg
->halg
.statesize
);
324 if (tfm
->__crt_alg
->cra_type
!= &crypto_ahash_type
)
325 return crypto_init_shash_ops_async(tfm
);
327 hash
->init
= alg
->init
;
328 hash
->update
= alg
->update
;
329 hash
->final
= alg
->final
;
330 hash
->finup
= alg
->finup
?: ahash_def_finup
;
331 hash
->digest
= alg
->digest
;
332 hash
->export
= alg
->export
;
333 hash
->import
= alg
->import
;
336 hash
->setkey
= alg
->setkey
;
337 ahash_set_needkey(hash
);
341 tfm
->exit
= crypto_ahash_exit_tfm
;
343 return alg
->init_tfm
? alg
->init_tfm(hash
) : 0;
346 static unsigned int crypto_ahash_extsize(struct crypto_alg
*alg
)
348 if (alg
->cra_type
!= &crypto_ahash_type
)
349 return sizeof(struct crypto_shash
*);
351 return crypto_alg_extsize(alg
);
354 static void crypto_ahash_free_instance(struct crypto_instance
*inst
)
356 struct ahash_instance
*ahash
= ahash_instance(inst
);
361 static int __maybe_unused
crypto_ahash_report(
362 struct sk_buff
*skb
, struct crypto_alg
*alg
)
364 struct crypto_report_hash rhash
;
366 memset(&rhash
, 0, sizeof(rhash
));
368 strscpy(rhash
.type
, "ahash", sizeof(rhash
.type
));
370 rhash
.blocksize
= alg
->cra_blocksize
;
371 rhash
.digestsize
= __crypto_hash_alg_common(alg
)->digestsize
;
373 return nla_put(skb
, CRYPTOCFGA_REPORT_HASH
, sizeof(rhash
), &rhash
);
376 static void crypto_ahash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
378 static void crypto_ahash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
380 seq_printf(m
, "type : ahash\n");
381 seq_printf(m
, "async : %s\n", alg
->cra_flags
& CRYPTO_ALG_ASYNC
?
383 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
384 seq_printf(m
, "digestsize : %u\n",
385 __crypto_hash_alg_common(alg
)->digestsize
);
388 static int __maybe_unused
crypto_ahash_report_stat(
389 struct sk_buff
*skb
, struct crypto_alg
*alg
)
391 return crypto_hash_report_stat(skb
, alg
, "ahash");
394 static const struct crypto_type crypto_ahash_type
= {
395 .extsize
= crypto_ahash_extsize
,
396 .init_tfm
= crypto_ahash_init_tfm
,
397 .free
= crypto_ahash_free_instance
,
398 #ifdef CONFIG_PROC_FS
399 .show
= crypto_ahash_show
,
401 #if IS_ENABLED(CONFIG_CRYPTO_USER)
402 .report
= crypto_ahash_report
,
404 #ifdef CONFIG_CRYPTO_STATS
405 .report_stat
= crypto_ahash_report_stat
,
407 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
408 .maskset
= CRYPTO_ALG_TYPE_AHASH_MASK
,
409 .type
= CRYPTO_ALG_TYPE_AHASH
,
410 .tfmsize
= offsetof(struct crypto_ahash
, base
),
413 int crypto_grab_ahash(struct crypto_ahash_spawn
*spawn
,
414 struct crypto_instance
*inst
,
415 const char *name
, u32 type
, u32 mask
)
417 spawn
->base
.frontend
= &crypto_ahash_type
;
418 return crypto_grab_spawn(&spawn
->base
, inst
, name
, type
, mask
);
420 EXPORT_SYMBOL_GPL(crypto_grab_ahash
);
422 struct crypto_ahash
*crypto_alloc_ahash(const char *alg_name
, u32 type
,
425 return crypto_alloc_tfm(alg_name
, &crypto_ahash_type
, type
, mask
);
427 EXPORT_SYMBOL_GPL(crypto_alloc_ahash
);
429 int crypto_has_ahash(const char *alg_name
, u32 type
, u32 mask
)
431 return crypto_type_has_alg(alg_name
, &crypto_ahash_type
, type
, mask
);
433 EXPORT_SYMBOL_GPL(crypto_has_ahash
);
435 struct crypto_ahash
*crypto_clone_ahash(struct crypto_ahash
*hash
)
437 struct hash_alg_common
*halg
= crypto_hash_alg_common(hash
);
438 struct crypto_tfm
*tfm
= crypto_ahash_tfm(hash
);
439 struct crypto_ahash
*nhash
;
440 struct ahash_alg
*alg
;
443 if (!crypto_hash_alg_has_setkey(halg
)) {
444 tfm
= crypto_tfm_get(tfm
);
446 return ERR_CAST(tfm
);
451 nhash
= crypto_clone_tfm(&crypto_ahash_type
, tfm
);
456 nhash
->init
= hash
->init
;
457 nhash
->update
= hash
->update
;
458 nhash
->final
= hash
->final
;
459 nhash
->finup
= hash
->finup
;
460 nhash
->digest
= hash
->digest
;
461 nhash
->export
= hash
->export
;
462 nhash
->import
= hash
->import
;
463 nhash
->setkey
= hash
->setkey
;
464 nhash
->reqsize
= hash
->reqsize
;
465 nhash
->statesize
= hash
->statesize
;
467 if (tfm
->__crt_alg
->cra_type
!= &crypto_ahash_type
)
468 return crypto_clone_shash_ops_async(nhash
, hash
);
471 alg
= crypto_ahash_alg(hash
);
475 err
= alg
->clone_tfm(nhash
, hash
);
482 crypto_free_ahash(nhash
);
485 EXPORT_SYMBOL_GPL(crypto_clone_ahash
);
487 static int ahash_prepare_alg(struct ahash_alg
*alg
)
489 struct crypto_alg
*base
= &alg
->halg
.base
;
492 if (alg
->halg
.statesize
== 0)
495 err
= hash_prepare_alg(&alg
->halg
);
499 base
->cra_type
= &crypto_ahash_type
;
500 base
->cra_flags
|= CRYPTO_ALG_TYPE_AHASH
;
505 int crypto_register_ahash(struct ahash_alg
*alg
)
507 struct crypto_alg
*base
= &alg
->halg
.base
;
510 err
= ahash_prepare_alg(alg
);
514 return crypto_register_alg(base
);
516 EXPORT_SYMBOL_GPL(crypto_register_ahash
);
518 void crypto_unregister_ahash(struct ahash_alg
*alg
)
520 crypto_unregister_alg(&alg
->halg
.base
);
522 EXPORT_SYMBOL_GPL(crypto_unregister_ahash
);
524 int crypto_register_ahashes(struct ahash_alg
*algs
, int count
)
528 for (i
= 0; i
< count
; i
++) {
529 ret
= crypto_register_ahash(&algs
[i
]);
537 for (--i
; i
>= 0; --i
)
538 crypto_unregister_ahash(&algs
[i
]);
542 EXPORT_SYMBOL_GPL(crypto_register_ahashes
);
544 void crypto_unregister_ahashes(struct ahash_alg
*algs
, int count
)
548 for (i
= count
- 1; i
>= 0; --i
)
549 crypto_unregister_ahash(&algs
[i
]);
551 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes
);
553 int ahash_register_instance(struct crypto_template
*tmpl
,
554 struct ahash_instance
*inst
)
558 if (WARN_ON(!inst
->free
))
561 err
= ahash_prepare_alg(&inst
->alg
);
565 return crypto_register_instance(tmpl
, ahash_crypto_instance(inst
));
567 EXPORT_SYMBOL_GPL(ahash_register_instance
);
569 bool crypto_hash_alg_has_setkey(struct hash_alg_common
*halg
)
571 struct crypto_alg
*alg
= &halg
->base
;
573 if (alg
->cra_type
!= &crypto_ahash_type
)
574 return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg
));
576 return __crypto_ahash_alg(alg
)->setkey
!= NULL
;
578 EXPORT_SYMBOL_GPL(crypto_hash_alg_has_setkey
);
580 MODULE_LICENSE("GPL");
581 MODULE_DESCRIPTION("Asynchronous cryptographic hash type");