]> git.ipfire.org Git - thirdparty/linux.git/blob - arch/arm/crypto/sha256_glue.c
Merge tag 'x86-fpu-2020-06-01' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
[thirdparty/linux.git] / arch / arm / crypto / sha256_glue.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Glue code for the SHA256 Secure Hash Algorithm assembly implementation
4 * using optimized ARM assembler and NEON instructions.
5 *
6 * Copyright © 2015 Google Inc.
7 *
8 * This file is based on sha256_ssse3_glue.c:
9 * Copyright (C) 2013 Intel Corporation
10 * Author: Tim Chen <tim.c.chen@linux.intel.com>
11 */
12
13 #include <crypto/internal/hash.h>
14 #include <linux/crypto.h>
15 #include <linux/init.h>
16 #include <linux/module.h>
17 #include <linux/mm.h>
18 #include <linux/types.h>
19 #include <linux/string.h>
20 #include <crypto/sha.h>
21 #include <crypto/sha256_base.h>
22 #include <asm/simd.h>
23 #include <asm/neon.h>
24
25 #include "sha256_glue.h"
26
27 asmlinkage void sha256_block_data_order(u32 *digest, const void *data,
28 unsigned int num_blks);
29
30 int crypto_sha256_arm_update(struct shash_desc *desc, const u8 *data,
31 unsigned int len)
32 {
33 /* make sure casting to sha256_block_fn() is safe */
34 BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
35
36 return sha256_base_do_update(desc, data, len,
37 (sha256_block_fn *)sha256_block_data_order);
38 }
39 EXPORT_SYMBOL(crypto_sha256_arm_update);
40
41 static int crypto_sha256_arm_final(struct shash_desc *desc, u8 *out)
42 {
43 sha256_base_do_finalize(desc,
44 (sha256_block_fn *)sha256_block_data_order);
45 return sha256_base_finish(desc, out);
46 }
47
48 int crypto_sha256_arm_finup(struct shash_desc *desc, const u8 *data,
49 unsigned int len, u8 *out)
50 {
51 sha256_base_do_update(desc, data, len,
52 (sha256_block_fn *)sha256_block_data_order);
53 return crypto_sha256_arm_final(desc, out);
54 }
55 EXPORT_SYMBOL(crypto_sha256_arm_finup);
56
57 static struct shash_alg algs[] = { {
58 .digestsize = SHA256_DIGEST_SIZE,
59 .init = sha256_base_init,
60 .update = crypto_sha256_arm_update,
61 .final = crypto_sha256_arm_final,
62 .finup = crypto_sha256_arm_finup,
63 .descsize = sizeof(struct sha256_state),
64 .base = {
65 .cra_name = "sha256",
66 .cra_driver_name = "sha256-asm",
67 .cra_priority = 150,
68 .cra_blocksize = SHA256_BLOCK_SIZE,
69 .cra_module = THIS_MODULE,
70 }
71 }, {
72 .digestsize = SHA224_DIGEST_SIZE,
73 .init = sha224_base_init,
74 .update = crypto_sha256_arm_update,
75 .final = crypto_sha256_arm_final,
76 .finup = crypto_sha256_arm_finup,
77 .descsize = sizeof(struct sha256_state),
78 .base = {
79 .cra_name = "sha224",
80 .cra_driver_name = "sha224-asm",
81 .cra_priority = 150,
82 .cra_blocksize = SHA224_BLOCK_SIZE,
83 .cra_module = THIS_MODULE,
84 }
85 } };
86
87 static int __init sha256_mod_init(void)
88 {
89 int res = crypto_register_shashes(algs, ARRAY_SIZE(algs));
90
91 if (res < 0)
92 return res;
93
94 if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && cpu_has_neon()) {
95 res = crypto_register_shashes(sha256_neon_algs,
96 ARRAY_SIZE(sha256_neon_algs));
97
98 if (res < 0)
99 crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
100 }
101
102 return res;
103 }
104
105 static void __exit sha256_mod_fini(void)
106 {
107 crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
108
109 if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && cpu_has_neon())
110 crypto_unregister_shashes(sha256_neon_algs,
111 ARRAY_SIZE(sha256_neon_algs));
112 }
113
114 module_init(sha256_mod_init);
115 module_exit(sha256_mod_fini);
116
117 MODULE_LICENSE("GPL");
118 MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm (ARM), including NEON");
119
120 MODULE_ALIAS_CRYPTO("sha256");