]> git.ipfire.org Git - thirdparty/linux.git/blame - arch/x86/crypto/blake2s-glue.c
Merge tag 'io_uring-5.7-2020-05-22' of git://git.kernel.dk/linux-block
[thirdparty/linux.git] / arch / x86 / crypto / blake2s-glue.c
CommitLineData
ed0356ed
JD
1// SPDX-License-Identifier: GPL-2.0 OR MIT
2/*
3 * Copyright (C) 2015-2019 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
4 */
5
6#include <crypto/internal/blake2s.h>
7#include <crypto/internal/simd.h>
8#include <crypto/internal/hash.h>
9
10#include <linux/types.h>
11#include <linux/jump_label.h>
12#include <linux/kernel.h>
13#include <linux/module.h>
14
15#include <asm/cpufeature.h>
16#include <asm/fpu/api.h>
17#include <asm/processor.h>
18#include <asm/simd.h>
19
20asmlinkage void blake2s_compress_ssse3(struct blake2s_state *state,
21 const u8 *block, const size_t nblocks,
22 const u32 inc);
23asmlinkage void blake2s_compress_avx512(struct blake2s_state *state,
24 const u8 *block, const size_t nblocks,
25 const u32 inc);
26
27static __ro_after_init DEFINE_STATIC_KEY_FALSE(blake2s_use_ssse3);
28static __ro_after_init DEFINE_STATIC_KEY_FALSE(blake2s_use_avx512);
29
30void blake2s_compress_arch(struct blake2s_state *state,
31 const u8 *block, size_t nblocks,
32 const u32 inc)
33{
34 /* SIMD disables preemption, so relax after processing each page. */
706024a5 35 BUILD_BUG_ON(SZ_4K / BLAKE2S_BLOCK_SIZE < 8);
ed0356ed
JD
36
37 if (!static_branch_likely(&blake2s_use_ssse3) || !crypto_simd_usable()) {
38 blake2s_compress_generic(state, block, nblocks, inc);
39 return;
40 }
41
706024a5 42 do {
ed0356ed 43 const size_t blocks = min_t(size_t, nblocks,
706024a5 44 SZ_4K / BLAKE2S_BLOCK_SIZE);
ed0356ed
JD
45
46 kernel_fpu_begin();
47 if (IS_ENABLED(CONFIG_AS_AVX512) &&
48 static_branch_likely(&blake2s_use_avx512))
49 blake2s_compress_avx512(state, block, blocks, inc);
50 else
51 blake2s_compress_ssse3(state, block, blocks, inc);
52 kernel_fpu_end();
53
54 nblocks -= blocks;
ed0356ed 55 block += blocks * BLAKE2S_BLOCK_SIZE;
706024a5 56 } while (nblocks);
ed0356ed
JD
57}
58EXPORT_SYMBOL(blake2s_compress_arch);
59
60static int crypto_blake2s_setkey(struct crypto_shash *tfm, const u8 *key,
61 unsigned int keylen)
62{
63 struct blake2s_tfm_ctx *tctx = crypto_shash_ctx(tfm);
64
674f368a 65 if (keylen == 0 || keylen > BLAKE2S_KEY_SIZE)
ed0356ed 66 return -EINVAL;
ed0356ed
JD
67
68 memcpy(tctx->key, key, keylen);
69 tctx->keylen = keylen;
70
71 return 0;
72}
73
74static int crypto_blake2s_init(struct shash_desc *desc)
75{
76 struct blake2s_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
77 struct blake2s_state *state = shash_desc_ctx(desc);
78 const int outlen = crypto_shash_digestsize(desc->tfm);
79
80 if (tctx->keylen)
81 blake2s_init_key(state, outlen, tctx->key, tctx->keylen);
82 else
83 blake2s_init(state, outlen);
84
85 return 0;
86}
87
88static int crypto_blake2s_update(struct shash_desc *desc, const u8 *in,
89 unsigned int inlen)
90{
91 struct blake2s_state *state = shash_desc_ctx(desc);
92 const size_t fill = BLAKE2S_BLOCK_SIZE - state->buflen;
93
94 if (unlikely(!inlen))
95 return 0;
96 if (inlen > fill) {
97 memcpy(state->buf + state->buflen, in, fill);
98 blake2s_compress_arch(state, state->buf, 1, BLAKE2S_BLOCK_SIZE);
99 state->buflen = 0;
100 in += fill;
101 inlen -= fill;
102 }
103 if (inlen > BLAKE2S_BLOCK_SIZE) {
104 const size_t nblocks = DIV_ROUND_UP(inlen, BLAKE2S_BLOCK_SIZE);
105 /* Hash one less (full) block than strictly possible */
106 blake2s_compress_arch(state, in, nblocks - 1, BLAKE2S_BLOCK_SIZE);
107 in += BLAKE2S_BLOCK_SIZE * (nblocks - 1);
108 inlen -= BLAKE2S_BLOCK_SIZE * (nblocks - 1);
109 }
110 memcpy(state->buf + state->buflen, in, inlen);
111 state->buflen += inlen;
112
113 return 0;
114}
115
116static int crypto_blake2s_final(struct shash_desc *desc, u8 *out)
117{
118 struct blake2s_state *state = shash_desc_ctx(desc);
119
120 blake2s_set_lastblock(state);
121 memset(state->buf + state->buflen, 0,
122 BLAKE2S_BLOCK_SIZE - state->buflen); /* Padding */
123 blake2s_compress_arch(state, state->buf, 1, state->buflen);
124 cpu_to_le32_array(state->h, ARRAY_SIZE(state->h));
125 memcpy(out, state->h, state->outlen);
126 memzero_explicit(state, sizeof(*state));
127
128 return 0;
129}
130
131static struct shash_alg blake2s_algs[] = {{
132 .base.cra_name = "blake2s-128",
133 .base.cra_driver_name = "blake2s-128-x86",
134 .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
135 .base.cra_ctxsize = sizeof(struct blake2s_tfm_ctx),
136 .base.cra_priority = 200,
137 .base.cra_blocksize = BLAKE2S_BLOCK_SIZE,
138 .base.cra_module = THIS_MODULE,
139
140 .digestsize = BLAKE2S_128_HASH_SIZE,
141 .setkey = crypto_blake2s_setkey,
142 .init = crypto_blake2s_init,
143 .update = crypto_blake2s_update,
144 .final = crypto_blake2s_final,
145 .descsize = sizeof(struct blake2s_state),
146}, {
147 .base.cra_name = "blake2s-160",
148 .base.cra_driver_name = "blake2s-160-x86",
149 .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
150 .base.cra_ctxsize = sizeof(struct blake2s_tfm_ctx),
151 .base.cra_priority = 200,
152 .base.cra_blocksize = BLAKE2S_BLOCK_SIZE,
153 .base.cra_module = THIS_MODULE,
154
155 .digestsize = BLAKE2S_160_HASH_SIZE,
156 .setkey = crypto_blake2s_setkey,
157 .init = crypto_blake2s_init,
158 .update = crypto_blake2s_update,
159 .final = crypto_blake2s_final,
160 .descsize = sizeof(struct blake2s_state),
161}, {
162 .base.cra_name = "blake2s-224",
163 .base.cra_driver_name = "blake2s-224-x86",
164 .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
165 .base.cra_ctxsize = sizeof(struct blake2s_tfm_ctx),
166 .base.cra_priority = 200,
167 .base.cra_blocksize = BLAKE2S_BLOCK_SIZE,
168 .base.cra_module = THIS_MODULE,
169
170 .digestsize = BLAKE2S_224_HASH_SIZE,
171 .setkey = crypto_blake2s_setkey,
172 .init = crypto_blake2s_init,
173 .update = crypto_blake2s_update,
174 .final = crypto_blake2s_final,
175 .descsize = sizeof(struct blake2s_state),
176}, {
177 .base.cra_name = "blake2s-256",
178 .base.cra_driver_name = "blake2s-256-x86",
179 .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
180 .base.cra_ctxsize = sizeof(struct blake2s_tfm_ctx),
181 .base.cra_priority = 200,
182 .base.cra_blocksize = BLAKE2S_BLOCK_SIZE,
183 .base.cra_module = THIS_MODULE,
184
185 .digestsize = BLAKE2S_256_HASH_SIZE,
186 .setkey = crypto_blake2s_setkey,
187 .init = crypto_blake2s_init,
188 .update = crypto_blake2s_update,
189 .final = crypto_blake2s_final,
190 .descsize = sizeof(struct blake2s_state),
191}};
192
193static int __init blake2s_mod_init(void)
194{
195 if (!boot_cpu_has(X86_FEATURE_SSSE3))
196 return 0;
197
198 static_branch_enable(&blake2s_use_ssse3);
199
200 if (IS_ENABLED(CONFIG_AS_AVX512) &&
201 boot_cpu_has(X86_FEATURE_AVX) &&
202 boot_cpu_has(X86_FEATURE_AVX2) &&
203 boot_cpu_has(X86_FEATURE_AVX512F) &&
204 boot_cpu_has(X86_FEATURE_AVX512VL) &&
205 cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM |
206 XFEATURE_MASK_AVX512, NULL))
207 static_branch_enable(&blake2s_use_avx512);
208
8394bfec
JD
209 return IS_REACHABLE(CONFIG_CRYPTO_HASH) ?
210 crypto_register_shashes(blake2s_algs,
211 ARRAY_SIZE(blake2s_algs)) : 0;
ed0356ed
JD
212}
213
214static void __exit blake2s_mod_exit(void)
215{
8394bfec 216 if (IS_REACHABLE(CONFIG_CRYPTO_HASH) && boot_cpu_has(X86_FEATURE_SSSE3))
ed0356ed
JD
217 crypto_unregister_shashes(blake2s_algs, ARRAY_SIZE(blake2s_algs));
218}
219
220module_init(blake2s_mod_init);
221module_exit(blake2s_mod_exit);
222
223MODULE_ALIAS_CRYPTO("blake2s-128");
224MODULE_ALIAS_CRYPTO("blake2s-128-x86");
225MODULE_ALIAS_CRYPTO("blake2s-160");
226MODULE_ALIAS_CRYPTO("blake2s-160-x86");
227MODULE_ALIAS_CRYPTO("blake2s-224");
228MODULE_ALIAS_CRYPTO("blake2s-224-x86");
229MODULE_ALIAS_CRYPTO("blake2s-256");
230MODULE_ALIAS_CRYPTO("blake2s-256-x86");
231MODULE_LICENSE("GPL v2");