]> git.ipfire.org Git - thirdparty/kernel/stable.git/blob - crypto/shash.c
Merge tag 'nfs-for-5.2-1' of git://git.linux-nfs.org/projects/anna/linux-nfs
[thirdparty/kernel/stable.git] / crypto / shash.c
1 /*
2 * Synchronous Cryptographic Hash operations.
3 *
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
22 #include <linux/compiler.h>
23
24 #include "internal.h"
25
26 static const struct crypto_type crypto_shash_type;
27
28 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
29 unsigned int keylen)
30 {
31 return -ENOSYS;
32 }
33 EXPORT_SYMBOL_GPL(shash_no_setkey);
34
35 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
36 unsigned int keylen)
37 {
38 struct shash_alg *shash = crypto_shash_alg(tfm);
39 unsigned long alignmask = crypto_shash_alignmask(tfm);
40 unsigned long absize;
41 u8 *buffer, *alignbuffer;
42 int err;
43
44 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
45 buffer = kmalloc(absize, GFP_ATOMIC);
46 if (!buffer)
47 return -ENOMEM;
48
49 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
50 memcpy(alignbuffer, key, keylen);
51 err = shash->setkey(tfm, alignbuffer, keylen);
52 kzfree(buffer);
53 return err;
54 }
55
56 static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
57 {
58 if (crypto_shash_alg_has_setkey(alg) &&
59 !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
60 crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
61 }
62
63 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
64 unsigned int keylen)
65 {
66 struct shash_alg *shash = crypto_shash_alg(tfm);
67 unsigned long alignmask = crypto_shash_alignmask(tfm);
68 int err;
69
70 if ((unsigned long)key & alignmask)
71 err = shash_setkey_unaligned(tfm, key, keylen);
72 else
73 err = shash->setkey(tfm, key, keylen);
74
75 if (unlikely(err)) {
76 shash_set_needkey(tfm, shash);
77 return err;
78 }
79
80 crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
81 return 0;
82 }
83 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
84
85 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
86 unsigned int len)
87 {
88 struct crypto_shash *tfm = desc->tfm;
89 struct shash_alg *shash = crypto_shash_alg(tfm);
90 unsigned long alignmask = crypto_shash_alignmask(tfm);
91 unsigned int unaligned_len = alignmask + 1 -
92 ((unsigned long)data & alignmask);
93 /*
94 * We cannot count on __aligned() working for large values:
95 * https://patchwork.kernel.org/patch/9507697/
96 */
97 u8 ubuf[MAX_ALGAPI_ALIGNMASK * 2];
98 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
99 int err;
100
101 if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
102 return -EINVAL;
103
104 if (unaligned_len > len)
105 unaligned_len = len;
106
107 memcpy(buf, data, unaligned_len);
108 err = shash->update(desc, buf, unaligned_len);
109 memset(buf, 0, unaligned_len);
110
111 return err ?:
112 shash->update(desc, data + unaligned_len, len - unaligned_len);
113 }
114
115 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
116 unsigned int len)
117 {
118 struct crypto_shash *tfm = desc->tfm;
119 struct shash_alg *shash = crypto_shash_alg(tfm);
120 unsigned long alignmask = crypto_shash_alignmask(tfm);
121
122 if ((unsigned long)data & alignmask)
123 return shash_update_unaligned(desc, data, len);
124
125 return shash->update(desc, data, len);
126 }
127 EXPORT_SYMBOL_GPL(crypto_shash_update);
128
129 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
130 {
131 struct crypto_shash *tfm = desc->tfm;
132 unsigned long alignmask = crypto_shash_alignmask(tfm);
133 struct shash_alg *shash = crypto_shash_alg(tfm);
134 unsigned int ds = crypto_shash_digestsize(tfm);
135 /*
136 * We cannot count on __aligned() working for large values:
137 * https://patchwork.kernel.org/patch/9507697/
138 */
139 u8 ubuf[MAX_ALGAPI_ALIGNMASK + HASH_MAX_DIGESTSIZE];
140 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
141 int err;
142
143 if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
144 return -EINVAL;
145
146 err = shash->final(desc, buf);
147 if (err)
148 goto out;
149
150 memcpy(out, buf, ds);
151
152 out:
153 memset(buf, 0, ds);
154 return err;
155 }
156
157 int crypto_shash_final(struct shash_desc *desc, u8 *out)
158 {
159 struct crypto_shash *tfm = desc->tfm;
160 struct shash_alg *shash = crypto_shash_alg(tfm);
161 unsigned long alignmask = crypto_shash_alignmask(tfm);
162
163 if ((unsigned long)out & alignmask)
164 return shash_final_unaligned(desc, out);
165
166 return shash->final(desc, out);
167 }
168 EXPORT_SYMBOL_GPL(crypto_shash_final);
169
170 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
171 unsigned int len, u8 *out)
172 {
173 return crypto_shash_update(desc, data, len) ?:
174 crypto_shash_final(desc, out);
175 }
176
177 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
178 unsigned int len, u8 *out)
179 {
180 struct crypto_shash *tfm = desc->tfm;
181 struct shash_alg *shash = crypto_shash_alg(tfm);
182 unsigned long alignmask = crypto_shash_alignmask(tfm);
183
184 if (((unsigned long)data | (unsigned long)out) & alignmask)
185 return shash_finup_unaligned(desc, data, len, out);
186
187 return shash->finup(desc, data, len, out);
188 }
189 EXPORT_SYMBOL_GPL(crypto_shash_finup);
190
191 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
192 unsigned int len, u8 *out)
193 {
194 return crypto_shash_init(desc) ?:
195 crypto_shash_finup(desc, data, len, out);
196 }
197
198 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
199 unsigned int len, u8 *out)
200 {
201 struct crypto_shash *tfm = desc->tfm;
202 struct shash_alg *shash = crypto_shash_alg(tfm);
203 unsigned long alignmask = crypto_shash_alignmask(tfm);
204
205 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
206 return -ENOKEY;
207
208 if (((unsigned long)data | (unsigned long)out) & alignmask)
209 return shash_digest_unaligned(desc, data, len, out);
210
211 return shash->digest(desc, data, len, out);
212 }
213 EXPORT_SYMBOL_GPL(crypto_shash_digest);
214
215 static int shash_default_export(struct shash_desc *desc, void *out)
216 {
217 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
218 return 0;
219 }
220
221 static int shash_default_import(struct shash_desc *desc, const void *in)
222 {
223 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
224 return 0;
225 }
226
227 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
228 unsigned int keylen)
229 {
230 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
231
232 return crypto_shash_setkey(*ctx, key, keylen);
233 }
234
235 static int shash_async_init(struct ahash_request *req)
236 {
237 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
238 struct shash_desc *desc = ahash_request_ctx(req);
239
240 desc->tfm = *ctx;
241
242 return crypto_shash_init(desc);
243 }
244
245 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
246 {
247 struct crypto_hash_walk walk;
248 int nbytes;
249
250 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
251 nbytes = crypto_hash_walk_done(&walk, nbytes))
252 nbytes = crypto_shash_update(desc, walk.data, nbytes);
253
254 return nbytes;
255 }
256 EXPORT_SYMBOL_GPL(shash_ahash_update);
257
258 static int shash_async_update(struct ahash_request *req)
259 {
260 return shash_ahash_update(req, ahash_request_ctx(req));
261 }
262
263 static int shash_async_final(struct ahash_request *req)
264 {
265 return crypto_shash_final(ahash_request_ctx(req), req->result);
266 }
267
268 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
269 {
270 struct crypto_hash_walk walk;
271 int nbytes;
272
273 nbytes = crypto_hash_walk_first(req, &walk);
274 if (!nbytes)
275 return crypto_shash_final(desc, req->result);
276
277 do {
278 nbytes = crypto_hash_walk_last(&walk) ?
279 crypto_shash_finup(desc, walk.data, nbytes,
280 req->result) :
281 crypto_shash_update(desc, walk.data, nbytes);
282 nbytes = crypto_hash_walk_done(&walk, nbytes);
283 } while (nbytes > 0);
284
285 return nbytes;
286 }
287 EXPORT_SYMBOL_GPL(shash_ahash_finup);
288
289 static int shash_async_finup(struct ahash_request *req)
290 {
291 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
292 struct shash_desc *desc = ahash_request_ctx(req);
293
294 desc->tfm = *ctx;
295
296 return shash_ahash_finup(req, desc);
297 }
298
299 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
300 {
301 unsigned int nbytes = req->nbytes;
302 struct scatterlist *sg;
303 unsigned int offset;
304 int err;
305
306 if (nbytes &&
307 (sg = req->src, offset = sg->offset,
308 nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
309 void *data;
310
311 data = kmap_atomic(sg_page(sg));
312 err = crypto_shash_digest(desc, data + offset, nbytes,
313 req->result);
314 kunmap_atomic(data);
315 } else
316 err = crypto_shash_init(desc) ?:
317 shash_ahash_finup(req, desc);
318
319 return err;
320 }
321 EXPORT_SYMBOL_GPL(shash_ahash_digest);
322
323 static int shash_async_digest(struct ahash_request *req)
324 {
325 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
326 struct shash_desc *desc = ahash_request_ctx(req);
327
328 desc->tfm = *ctx;
329
330 return shash_ahash_digest(req, desc);
331 }
332
333 static int shash_async_export(struct ahash_request *req, void *out)
334 {
335 return crypto_shash_export(ahash_request_ctx(req), out);
336 }
337
338 static int shash_async_import(struct ahash_request *req, const void *in)
339 {
340 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
341 struct shash_desc *desc = ahash_request_ctx(req);
342
343 desc->tfm = *ctx;
344
345 return crypto_shash_import(desc, in);
346 }
347
348 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
349 {
350 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
351
352 crypto_free_shash(*ctx);
353 }
354
355 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
356 {
357 struct crypto_alg *calg = tfm->__crt_alg;
358 struct shash_alg *alg = __crypto_shash_alg(calg);
359 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
360 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
361 struct crypto_shash *shash;
362
363 if (!crypto_mod_get(calg))
364 return -EAGAIN;
365
366 shash = crypto_create_tfm(calg, &crypto_shash_type);
367 if (IS_ERR(shash)) {
368 crypto_mod_put(calg);
369 return PTR_ERR(shash);
370 }
371
372 *ctx = shash;
373 tfm->exit = crypto_exit_shash_ops_async;
374
375 crt->init = shash_async_init;
376 crt->update = shash_async_update;
377 crt->final = shash_async_final;
378 crt->finup = shash_async_finup;
379 crt->digest = shash_async_digest;
380 if (crypto_shash_alg_has_setkey(alg))
381 crt->setkey = shash_async_setkey;
382
383 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
384 CRYPTO_TFM_NEED_KEY);
385
386 crt->export = shash_async_export;
387 crt->import = shash_async_import;
388
389 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
390
391 return 0;
392 }
393
394 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
395 {
396 struct crypto_shash *hash = __crypto_shash_cast(tfm);
397 struct shash_alg *alg = crypto_shash_alg(hash);
398
399 hash->descsize = alg->descsize;
400
401 shash_set_needkey(hash, alg);
402
403 return 0;
404 }
405
406 #ifdef CONFIG_NET
407 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
408 {
409 struct crypto_report_hash rhash;
410 struct shash_alg *salg = __crypto_shash_alg(alg);
411
412 memset(&rhash, 0, sizeof(rhash));
413
414 strscpy(rhash.type, "shash", sizeof(rhash.type));
415
416 rhash.blocksize = alg->cra_blocksize;
417 rhash.digestsize = salg->digestsize;
418
419 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
420 }
421 #else
422 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
423 {
424 return -ENOSYS;
425 }
426 #endif
427
428 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
429 __maybe_unused;
430 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
431 {
432 struct shash_alg *salg = __crypto_shash_alg(alg);
433
434 seq_printf(m, "type : shash\n");
435 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
436 seq_printf(m, "digestsize : %u\n", salg->digestsize);
437 }
438
439 static const struct crypto_type crypto_shash_type = {
440 .extsize = crypto_alg_extsize,
441 .init_tfm = crypto_shash_init_tfm,
442 #ifdef CONFIG_PROC_FS
443 .show = crypto_shash_show,
444 #endif
445 .report = crypto_shash_report,
446 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
447 .maskset = CRYPTO_ALG_TYPE_MASK,
448 .type = CRYPTO_ALG_TYPE_SHASH,
449 .tfmsize = offsetof(struct crypto_shash, base),
450 };
451
452 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
453 u32 mask)
454 {
455 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
456 }
457 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
458
459 static int shash_prepare_alg(struct shash_alg *alg)
460 {
461 struct crypto_alg *base = &alg->base;
462
463 if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
464 alg->descsize > HASH_MAX_DESCSIZE ||
465 alg->statesize > HASH_MAX_STATESIZE)
466 return -EINVAL;
467
468 if ((alg->export && !alg->import) || (alg->import && !alg->export))
469 return -EINVAL;
470
471 base->cra_type = &crypto_shash_type;
472 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
473 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
474
475 if (!alg->finup)
476 alg->finup = shash_finup_unaligned;
477 if (!alg->digest)
478 alg->digest = shash_digest_unaligned;
479 if (!alg->export) {
480 alg->export = shash_default_export;
481 alg->import = shash_default_import;
482 alg->statesize = alg->descsize;
483 }
484 if (!alg->setkey)
485 alg->setkey = shash_no_setkey;
486
487 return 0;
488 }
489
490 int crypto_register_shash(struct shash_alg *alg)
491 {
492 struct crypto_alg *base = &alg->base;
493 int err;
494
495 err = shash_prepare_alg(alg);
496 if (err)
497 return err;
498
499 return crypto_register_alg(base);
500 }
501 EXPORT_SYMBOL_GPL(crypto_register_shash);
502
503 int crypto_unregister_shash(struct shash_alg *alg)
504 {
505 return crypto_unregister_alg(&alg->base);
506 }
507 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
508
509 int crypto_register_shashes(struct shash_alg *algs, int count)
510 {
511 int i, ret;
512
513 for (i = 0; i < count; i++) {
514 ret = crypto_register_shash(&algs[i]);
515 if (ret)
516 goto err;
517 }
518
519 return 0;
520
521 err:
522 for (--i; i >= 0; --i)
523 crypto_unregister_shash(&algs[i]);
524
525 return ret;
526 }
527 EXPORT_SYMBOL_GPL(crypto_register_shashes);
528
529 int crypto_unregister_shashes(struct shash_alg *algs, int count)
530 {
531 int i, ret;
532
533 for (i = count - 1; i >= 0; --i) {
534 ret = crypto_unregister_shash(&algs[i]);
535 if (ret)
536 pr_err("Failed to unregister %s %s: %d\n",
537 algs[i].base.cra_driver_name,
538 algs[i].base.cra_name, ret);
539 }
540
541 return 0;
542 }
543 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
544
545 int shash_register_instance(struct crypto_template *tmpl,
546 struct shash_instance *inst)
547 {
548 int err;
549
550 err = shash_prepare_alg(&inst->alg);
551 if (err)
552 return err;
553
554 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
555 }
556 EXPORT_SYMBOL_GPL(shash_register_instance);
557
558 void shash_free_instance(struct crypto_instance *inst)
559 {
560 crypto_drop_spawn(crypto_instance_ctx(inst));
561 kfree(shash_instance(inst));
562 }
563 EXPORT_SYMBOL_GPL(shash_free_instance);
564
565 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
566 struct shash_alg *alg,
567 struct crypto_instance *inst)
568 {
569 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
570 &crypto_shash_type);
571 }
572 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
573
574 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
575 {
576 struct crypto_alg *alg;
577
578 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
579 return IS_ERR(alg) ? ERR_CAST(alg) :
580 container_of(alg, struct shash_alg, base);
581 }
582 EXPORT_SYMBOL_GPL(shash_attr_alg);
583
584 MODULE_LICENSE("GPL");
585 MODULE_DESCRIPTION("Synchronous cryptographic hash type");