]> git.ipfire.org Git - thirdparty/nettle.git/blob - fat-s390x.c
Add ChangeLog entry for nettle-3.10 release.
[thirdparty/nettle.git] / fat-s390x.c
1 /* fat-s390x.c
2
3 Copyright (C) 2020 Mamone Tarsha
4
5 This file is part of GNU Nettle.
6
7 GNU Nettle is free software: you can redistribute it and/or
8 modify it under the terms of either:
9
10 * the GNU Lesser General Public License as published by the Free
11 Software Foundation; either version 3 of the License, or (at your
12 option) any later version.
13
14 or
15
16 * the GNU General Public License as published by the Free
17 Software Foundation; either version 2 of the License, or (at your
18 option) any later version.
19
20 or both in parallel, as here.
21
22 GNU Nettle is distributed in the hope that it will be useful,
23 but WITHOUT ANY WARRANTY; without even the implied warranty of
24 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
25 General Public License for more details.
26
27 You should have received copies of the GNU General Public License and
28 the GNU Lesser General Public License along with this program. If
29 not, see http://www.gnu.org/licenses/.
30 */
31
32 #define _GNU_SOURCE
33
34 #if HAVE_CONFIG_H
35 # include "config.h"
36 #endif
37
38 #include <assert.h>
39 #include <stdio.h>
40 #include <stdlib.h>
41 #include <string.h>
42
43 #if defined(__GLIBC__) && defined(__GLIBC_PREREQ)
44 # if __GLIBC_PREREQ(2, 16)
45 # define USE_GETAUXVAL 1
46 # include <sys/auxv.h>
47 # endif
48 #endif
49
50 #include "nettle-types.h"
51
52 #include "memxor.h"
53 #include "aes-internal.h"
54 #include "chacha-internal.h"
55 #include "ghash-internal.h"
56 #include "fat-setup.h"
57
58 /* Max number of doublewords returned by STFLE */
59 #define FACILITY_DOUBLEWORDS_MAX 3
60 #define FACILITY_INDEX(bit) ((bit) / 64)
61 /* STFLE and cipher query store doublewords as bit-reversed.
62 reverse facility bit or function code in doubleword */
63 #define FACILITY_BIT(bit) (1ULL << (63 - (bit) % 64))
64
65 /* Define from arch/s390/include/asm/elf.h in Linux kernel */
66 #ifndef HWCAP_S390_STFLE
67 #define HWCAP_S390_STFLE 4
68 #endif
69
70 /* Facility bits */
71 #define FAC_VF 129 /* vector facility */
72 #define FAC_MSA 17 /* message-security assist */
73 #define FAC_MSA_X4 77 /* message-security-assist extension 4 */
74
75 /* Function codes */
76 #define AES_128_CODE 18
77 #define AES_192_CODE 19
78 #define AES_256_CODE 20
79 #define SHA_1_CODE 1
80 #define SHA_256_CODE 2
81 #define SHA_512_CODE 3
82 #define GHASH_CODE 65
83
84 struct s390x_features
85 {
86 int have_vector_facility;
87 int have_km_aes128;
88 int have_km_aes192;
89 int have_km_aes256;
90 int have_kimd_sha_1;
91 int have_kimd_sha_256;
92 int have_kimd_sha_512;
93 int have_kimd_ghash;
94 };
95
96 void _nettle_stfle(uint64_t *facility, uint64_t facility_size);
97 void _nettle_km_status(uint64_t *status);
98 void _nettle_kimd_status(uint64_t *status);
99
100 #define MATCH(s, slen, literal, llen) \
101 ((slen) == (llen) && memcmp ((s), (literal), llen) == 0)
102
103 static void
104 get_s390x_features (struct s390x_features *features)
105 {
106 features->have_vector_facility = 0;
107 features->have_km_aes128 = 0;
108 features->have_km_aes192 = 0;
109 features->have_km_aes256 = 0;
110 features->have_kimd_sha_1 = 0;
111 features->have_kimd_sha_256 = 0;
112 features->have_kimd_sha_512 = 0;
113 features->have_kimd_ghash = 0;
114
115 const char *s = secure_getenv (ENV_OVERRIDE);
116 if (s)
117 for (;;)
118 {
119 const char *sep = strchr (s, ',');
120 size_t length = sep ? (size_t) (sep - s) : strlen(s);
121
122 if (MATCH (s, length, "vf", 2))
123 features->have_vector_facility = 1;
124 else if (MATCH (s, length, "msa", 3))
125 features->have_kimd_sha_1 = 1;
126 else if (MATCH (s, length, "msa_x1", 6))
127 {
128 features->have_km_aes128 = 1;
129 features->have_kimd_sha_256 = 1;
130 }
131 else if (MATCH (s, length, "msa_x2", 6))
132 {
133 features->have_km_aes192 = 1;
134 features->have_km_aes256 = 1;
135 features->have_kimd_sha_512 = 1;
136 }
137 else if (MATCH (s, length, "msa_x4", 6))
138 features->have_kimd_ghash = 1;
139 if (!sep)
140 break;
141 s = sep + 1;
142 }
143 else
144 {
145 #if USE_GETAUXVAL
146 unsigned long hwcap = getauxval(AT_HWCAP);
147 if (hwcap & HWCAP_S390_STFLE)
148 {
149 uint64_t facilities[FACILITY_DOUBLEWORDS_MAX] = {0};
150 _nettle_stfle(facilities, FACILITY_DOUBLEWORDS_MAX);
151
152 if (facilities[FACILITY_INDEX(FAC_VF)] & FACILITY_BIT(FAC_VF))
153 features->have_vector_facility = 1;
154
155 if (facilities[FACILITY_INDEX(FAC_MSA)] & FACILITY_BIT(FAC_MSA))
156 {
157 uint64_t query_status[2] = {0};
158 _nettle_km_status(query_status);
159 if (query_status[FACILITY_INDEX(AES_128_CODE)] & FACILITY_BIT(AES_128_CODE))
160 features->have_km_aes128 = 1;
161 if (query_status[FACILITY_INDEX(AES_192_CODE)] & FACILITY_BIT(AES_192_CODE))
162 features->have_km_aes192 = 1;
163 if (query_status[FACILITY_INDEX(AES_256_CODE)] & FACILITY_BIT(AES_256_CODE))
164 features->have_km_aes256 = 1;
165
166 memset(query_status, 0, sizeof(query_status));
167 _nettle_kimd_status(query_status);
168 if (query_status[FACILITY_INDEX(SHA_1_CODE)] & FACILITY_BIT(SHA_1_CODE))
169 features->have_kimd_sha_1 = 1;
170 if (query_status[FACILITY_INDEX(SHA_256_CODE)] & FACILITY_BIT(SHA_256_CODE))
171 features->have_kimd_sha_256 = 1;
172 if (query_status[FACILITY_INDEX(SHA_512_CODE)] & FACILITY_BIT(SHA_512_CODE))
173 features->have_kimd_sha_512 = 1;
174 }
175
176 if (facilities[FACILITY_INDEX(FAC_MSA_X4)] & FACILITY_BIT(FAC_MSA_X4))
177 {
178 uint64_t query_status[2] = {0};
179 _nettle_kimd_status(query_status);
180 if (query_status[FACILITY_INDEX(GHASH_CODE)] & FACILITY_BIT(GHASH_CODE))
181 features->have_kimd_ghash = 1;
182 }
183 }
184 #endif
185 }
186 }
187
188 /* MEMXOR3 */
189 DECLARE_FAT_FUNC(nettle_memxor3, memxor3_func)
190 DECLARE_FAT_FUNC_VAR(memxor3, memxor3_func, c)
191 DECLARE_FAT_FUNC_VAR(memxor3, memxor3_func, s390x)
192
193 /* AES128 */
194 DECLARE_FAT_FUNC(nettle_aes128_set_encrypt_key, aes128_set_key_func)
195 DECLARE_FAT_FUNC_VAR(aes128_set_encrypt_key, aes128_set_key_func, c)
196 DECLARE_FAT_FUNC_VAR(aes128_set_encrypt_key, aes128_set_key_func, s390x)
197 DECLARE_FAT_FUNC(nettle_aes128_set_decrypt_key, aes128_set_key_func)
198 DECLARE_FAT_FUNC_VAR(aes128_set_decrypt_key, aes128_set_key_func, c)
199 DECLARE_FAT_FUNC_VAR(aes128_set_decrypt_key, aes128_set_key_func, s390x)
200 DECLARE_FAT_FUNC(nettle_aes128_invert_key, aes128_invert_key_func)
201 DECLARE_FAT_FUNC_VAR(aes128_invert_key, aes128_invert_key_func, c)
202 DECLARE_FAT_FUNC_VAR(aes128_invert_key, aes128_invert_key_func, s390x)
203 DECLARE_FAT_FUNC(nettle_aes128_encrypt, aes128_crypt_func)
204 DECLARE_FAT_FUNC_VAR(aes128_encrypt, aes128_crypt_func, c)
205 DECLARE_FAT_FUNC_VAR(aes128_encrypt, aes128_crypt_func, s390x)
206 DECLARE_FAT_FUNC(nettle_aes128_decrypt, aes128_crypt_func)
207 DECLARE_FAT_FUNC_VAR(aes128_decrypt, aes128_crypt_func, c)
208 DECLARE_FAT_FUNC_VAR(aes128_decrypt, aes128_crypt_func, s390x)
209
210 /* AES192 */
211 DECLARE_FAT_FUNC(nettle_aes192_set_encrypt_key, aes192_set_key_func)
212 DECLARE_FAT_FUNC_VAR(aes192_set_encrypt_key, aes192_set_key_func, c)
213 DECLARE_FAT_FUNC_VAR(aes192_set_encrypt_key, aes192_set_key_func, s390x)
214 DECLARE_FAT_FUNC(nettle_aes192_set_decrypt_key, aes192_set_key_func)
215 DECLARE_FAT_FUNC_VAR(aes192_set_decrypt_key, aes192_set_key_func, c)
216 DECLARE_FAT_FUNC_VAR(aes192_set_decrypt_key, aes192_set_key_func, s390x)
217 DECLARE_FAT_FUNC(nettle_aes192_invert_key, aes192_invert_key_func)
218 DECLARE_FAT_FUNC_VAR(aes192_invert_key, aes192_invert_key_func, c)
219 DECLARE_FAT_FUNC_VAR(aes192_invert_key, aes192_invert_key_func, s390x)
220 DECLARE_FAT_FUNC(nettle_aes192_encrypt, aes192_crypt_func)
221 DECLARE_FAT_FUNC_VAR(aes192_encrypt, aes192_crypt_func, c)
222 DECLARE_FAT_FUNC_VAR(aes192_encrypt, aes192_crypt_func, s390x)
223 DECLARE_FAT_FUNC(nettle_aes192_decrypt, aes192_crypt_func)
224 DECLARE_FAT_FUNC_VAR(aes192_decrypt, aes192_crypt_func, c)
225 DECLARE_FAT_FUNC_VAR(aes192_decrypt, aes192_crypt_func, s390x)
226
227 /* AES256 */
228 DECLARE_FAT_FUNC(nettle_aes256_set_encrypt_key, aes256_set_key_func)
229 DECLARE_FAT_FUNC_VAR(aes256_set_encrypt_key, aes256_set_key_func, c)
230 DECLARE_FAT_FUNC_VAR(aes256_set_encrypt_key, aes256_set_key_func, s390x)
231 DECLARE_FAT_FUNC(nettle_aes256_set_decrypt_key, aes256_set_key_func)
232 DECLARE_FAT_FUNC_VAR(aes256_set_decrypt_key, aes256_set_key_func, c)
233 DECLARE_FAT_FUNC_VAR(aes256_set_decrypt_key, aes256_set_key_func, s390x)
234 DECLARE_FAT_FUNC(nettle_aes256_invert_key, aes256_invert_key_func)
235 DECLARE_FAT_FUNC_VAR(aes256_invert_key, aes256_invert_key_func, c)
236 DECLARE_FAT_FUNC_VAR(aes256_invert_key, aes256_invert_key_func, s390x)
237 DECLARE_FAT_FUNC(nettle_aes256_encrypt, aes256_crypt_func)
238 DECLARE_FAT_FUNC_VAR(aes256_encrypt, aes256_crypt_func, c)
239 DECLARE_FAT_FUNC_VAR(aes256_encrypt, aes256_crypt_func, s390x)
240 DECLARE_FAT_FUNC(nettle_aes256_decrypt, aes256_crypt_func)
241 DECLARE_FAT_FUNC_VAR(aes256_decrypt, aes256_crypt_func, c)
242 DECLARE_FAT_FUNC_VAR(aes256_decrypt, aes256_crypt_func, s390x)
243
244 /* GHASH */
245 DECLARE_FAT_FUNC(_nettle_ghash_set_key, ghash_set_key_func)
246 DECLARE_FAT_FUNC_VAR(ghash_set_key, ghash_set_key_func, c)
247 DECLARE_FAT_FUNC_VAR(ghash_set_key, ghash_set_key_func, s390x)
248
249 DECLARE_FAT_FUNC(_nettle_ghash_update, ghash_update_func)
250 DECLARE_FAT_FUNC_VAR(ghash_update, ghash_update_func, c)
251 DECLARE_FAT_FUNC_VAR(ghash_update, ghash_update_func, s390x)
252
253 DECLARE_FAT_FUNC(nettle_sha1_compress, sha1_compress_func)
254 DECLARE_FAT_FUNC_VAR(sha1_compress, sha1_compress_func, c)
255 DECLARE_FAT_FUNC_VAR(sha1_compress, sha1_compress_func, s390x)
256
257 DECLARE_FAT_FUNC(_nettle_sha256_compress_n, sha256_compress_n_func)
258 DECLARE_FAT_FUNC_VAR(sha256_compress_n, sha256_compress_n_func, c)
259 DECLARE_FAT_FUNC_VAR(sha256_compress_n, sha256_compress_n_func, s390x)
260
261 DECLARE_FAT_FUNC(_nettle_sha512_compress, sha512_compress_func)
262 DECLARE_FAT_FUNC_VAR(sha512_compress, sha512_compress_func, c)
263 DECLARE_FAT_FUNC_VAR(sha512_compress, sha512_compress_func, s390x)
264
265 DECLARE_FAT_FUNC(nettle_sha3_permute, sha3_permute_func)
266 DECLARE_FAT_FUNC_VAR(sha3_permute, sha3_permute_func, c)
267 DECLARE_FAT_FUNC_VAR(sha3_permute, sha3_permute_func, s390x)
268
269 DECLARE_FAT_FUNC(_nettle_chacha_core, chacha_core_func)
270 DECLARE_FAT_FUNC_VAR(chacha_core, chacha_core_func, c);
271 DECLARE_FAT_FUNC_VAR(chacha_core, chacha_core_func, s390x);
272
273 DECLARE_FAT_FUNC(nettle_chacha_crypt, chacha_crypt_func)
274 DECLARE_FAT_FUNC_VAR(chacha_crypt, chacha_crypt_func, 1core)
275 DECLARE_FAT_FUNC_VAR(chacha_crypt, chacha_crypt_func, 4core)
276
277 DECLARE_FAT_FUNC(nettle_chacha_crypt32, chacha_crypt_func)
278 DECLARE_FAT_FUNC_VAR(chacha_crypt32, chacha_crypt_func, 1core)
279 DECLARE_FAT_FUNC_VAR(chacha_crypt32, chacha_crypt_func, 4core)
280
281 static void CONSTRUCTOR
282 fat_init (void)
283 {
284 struct s390x_features features;
285 int verbose;
286
287 get_s390x_features (&features);
288 verbose = getenv (ENV_VERBOSE) != NULL;
289
290 /* MEMXOR3 */
291 if (features.have_vector_facility)
292 {
293 if (verbose)
294 fprintf (stderr, "libnettle: enabling vector facility code.\n");
295 nettle_memxor3_vec = _nettle_memxor3_s390x;
296 nettle_sha3_permute_vec = _nettle_sha3_permute_s390x;
297 _nettle_chacha_core_vec = _nettle_chacha_core_s390x;
298 nettle_chacha_crypt_vec = _nettle_chacha_crypt_4core;
299 nettle_chacha_crypt32_vec = _nettle_chacha_crypt32_4core;
300 }
301 else
302 {
303 nettle_memxor3_vec = _nettle_memxor3_c;
304 nettle_sha3_permute_vec = _nettle_sha3_permute_c;
305 _nettle_chacha_core_vec = _nettle_chacha_core_c;
306 nettle_chacha_crypt_vec = _nettle_chacha_crypt_1core;
307 nettle_chacha_crypt32_vec = _nettle_chacha_crypt32_1core;
308 }
309
310 /* AES128 */
311 if (features.have_km_aes128)
312 {
313 if (verbose)
314 fprintf (stderr, "libnettle: enabling hardware accelerated AES128 EBC mode.\n");
315 nettle_aes128_set_encrypt_key_vec = _nettle_aes128_set_encrypt_key_s390x;
316 nettle_aes128_set_decrypt_key_vec = _nettle_aes128_set_decrypt_key_s390x;
317 nettle_aes128_invert_key_vec = _nettle_aes128_invert_key_s390x;
318 nettle_aes128_encrypt_vec = _nettle_aes128_encrypt_s390x;
319 nettle_aes128_decrypt_vec = _nettle_aes128_decrypt_s390x;
320 }
321 else
322 {
323 nettle_aes128_set_encrypt_key_vec = _nettle_aes128_set_encrypt_key_c;
324 nettle_aes128_set_decrypt_key_vec = _nettle_aes128_set_decrypt_key_c;
325 nettle_aes128_invert_key_vec = _nettle_aes128_invert_key_c;
326 nettle_aes128_encrypt_vec = _nettle_aes128_encrypt_c;
327 nettle_aes128_decrypt_vec = _nettle_aes128_decrypt_c;
328 }
329
330 /* AES192 */
331 if (features.have_km_aes192)
332 {
333 if (verbose)
334 fprintf (stderr, "libnettle: enabling hardware accelerated AES192 EBC mode.\n");
335 nettle_aes192_set_encrypt_key_vec = _nettle_aes192_set_encrypt_key_s390x;
336 nettle_aes192_set_decrypt_key_vec = _nettle_aes192_set_decrypt_key_s390x;
337 nettle_aes192_invert_key_vec = _nettle_aes192_invert_key_s390x;
338 nettle_aes192_encrypt_vec = _nettle_aes192_encrypt_s390x;
339 nettle_aes192_decrypt_vec = _nettle_aes192_decrypt_s390x;
340 }
341 else
342 {
343 nettle_aes192_set_encrypt_key_vec = _nettle_aes192_set_encrypt_key_c;
344 nettle_aes192_set_decrypt_key_vec = _nettle_aes192_set_decrypt_key_c;
345 nettle_aes192_invert_key_vec = _nettle_aes192_invert_key_c;
346 nettle_aes192_encrypt_vec = _nettle_aes192_encrypt_c;
347 nettle_aes192_decrypt_vec = _nettle_aes192_decrypt_c;
348 }
349
350 /* AES256 */
351 if (features.have_km_aes256)
352 {
353 if (verbose)
354 fprintf (stderr, "libnettle: enabling hardware accelerated AES256 EBC mode.\n");
355 nettle_aes256_set_encrypt_key_vec = _nettle_aes256_set_encrypt_key_s390x;
356 nettle_aes256_set_decrypt_key_vec = _nettle_aes256_set_decrypt_key_s390x;
357 nettle_aes256_invert_key_vec = _nettle_aes256_invert_key_s390x;
358 nettle_aes256_encrypt_vec = _nettle_aes256_encrypt_s390x;
359 nettle_aes256_decrypt_vec = _nettle_aes256_decrypt_s390x;
360 }
361 else
362 {
363 nettle_aes256_set_encrypt_key_vec = _nettle_aes256_set_encrypt_key_c;
364 nettle_aes256_set_decrypt_key_vec = _nettle_aes256_set_decrypt_key_c;
365 nettle_aes256_invert_key_vec = _nettle_aes256_invert_key_c;
366 nettle_aes256_encrypt_vec = _nettle_aes256_encrypt_c;
367 nettle_aes256_decrypt_vec = _nettle_aes256_decrypt_c;
368 }
369
370 /* GHASH */
371 if (features.have_kimd_ghash)
372 {
373 if (verbose)
374 fprintf (stderr, "libnettle: enabling hardware accelerated GHASH.\n");
375 _nettle_ghash_set_key_vec = _nettle_ghash_set_key_s390x;
376 _nettle_ghash_update_vec = _nettle_ghash_update_s390x;
377 }
378 else
379 {
380 _nettle_ghash_set_key_vec = _nettle_ghash_set_key_c;
381 _nettle_ghash_update_vec = _nettle_ghash_update_c;
382 }
383
384 /* SHA1 */
385 if (features.have_kimd_sha_1)
386 {
387 if (verbose)
388 fprintf (stderr, "libnettle: enabling hardware accelerated SHA1 compress code.\n");
389 nettle_sha1_compress_vec = _nettle_sha1_compress_s390x;
390 }
391 else
392 {
393 nettle_sha1_compress_vec = _nettle_sha1_compress_c;
394 }
395
396 /* SHA256 */
397 if (features.have_kimd_sha_256)
398 {
399 if (verbose)
400 fprintf (stderr, "libnettle: enabling hardware accelerated SHA256 compress code.\n");
401 _nettle_sha256_compress_n_vec = _nettle_sha256_compress_n_s390x;
402 }
403 else
404 {
405 _nettle_sha256_compress_n_vec = _nettle_sha256_compress_n_c;
406 }
407
408 /* SHA512 */
409 if (features.have_kimd_sha_512)
410 {
411 if (verbose)
412 fprintf (stderr, "libnettle: enabling hardware accelerated SHA512 compress code.\n");
413 _nettle_sha512_compress_vec = _nettle_sha512_compress_s390x;
414 }
415 else
416 {
417 _nettle_sha512_compress_vec = _nettle_sha512_compress_c;
418 }
419 }
420
421 /* MEMXOR3 */
422 DEFINE_FAT_FUNC(nettle_memxor3, void *,
423 (void *dst_in, const void *a_in, const void *b_in, size_t n),
424 (dst_in, a_in, b_in, n))
425
426 /* AES128 */
427 DEFINE_FAT_FUNC(nettle_aes128_set_encrypt_key, void,
428 (struct aes128_ctx *ctx, const uint8_t *key),
429 (ctx, key))
430 DEFINE_FAT_FUNC(nettle_aes128_set_decrypt_key, void,
431 (struct aes128_ctx *ctx, const uint8_t *key),
432 (ctx, key))
433 DEFINE_FAT_FUNC(nettle_aes128_invert_key, void,
434 (struct aes128_ctx *dst, const struct aes128_ctx *src),
435 (dst, src))
436 DEFINE_FAT_FUNC(nettle_aes128_encrypt, void,
437 (const struct aes128_ctx *ctx, size_t length,
438 uint8_t *dst,const uint8_t *src),
439 (ctx, length, dst, src))
440 DEFINE_FAT_FUNC(nettle_aes128_decrypt, void,
441 (const struct aes128_ctx *ctx, size_t length,
442 uint8_t *dst,const uint8_t *src),
443 (ctx, length, dst, src))
444
445 /* AES192 */
446 DEFINE_FAT_FUNC(nettle_aes192_set_encrypt_key, void,
447 (struct aes192_ctx *ctx, const uint8_t *key),
448 (ctx, key))
449 DEFINE_FAT_FUNC(nettle_aes192_set_decrypt_key, void,
450 (struct aes192_ctx *ctx, const uint8_t *key),
451 (ctx, key))
452 DEFINE_FAT_FUNC(nettle_aes192_invert_key, void,
453 (struct aes192_ctx *dst, const struct aes192_ctx *src),
454 (dst, src))
455 DEFINE_FAT_FUNC(nettle_aes192_encrypt, void,
456 (const struct aes192_ctx *ctx, size_t length,
457 uint8_t *dst,const uint8_t *src),
458 (ctx, length, dst, src))
459 DEFINE_FAT_FUNC(nettle_aes192_decrypt, void,
460 (const struct aes192_ctx *ctx, size_t length,
461 uint8_t *dst,const uint8_t *src),
462 (ctx, length, dst, src))
463
464 /* AES256 */
465 DEFINE_FAT_FUNC(nettle_aes256_set_encrypt_key, void,
466 (struct aes256_ctx *ctx, const uint8_t *key),
467 (ctx, key))
468 DEFINE_FAT_FUNC(nettle_aes256_set_decrypt_key, void,
469 (struct aes256_ctx *ctx, const uint8_t *key),
470 (ctx, key))
471 DEFINE_FAT_FUNC(nettle_aes256_invert_key, void,
472 (struct aes256_ctx *dst, const struct aes256_ctx *src),
473 (dst, src))
474 DEFINE_FAT_FUNC(nettle_aes256_encrypt, void,
475 (const struct aes256_ctx *ctx, size_t length,
476 uint8_t *dst,const uint8_t *src),
477 (ctx, length, dst, src))
478 DEFINE_FAT_FUNC(nettle_aes256_decrypt, void,
479 (const struct aes256_ctx *ctx, size_t length,
480 uint8_t *dst,const uint8_t *src),
481 (ctx, length, dst, src))
482
483 /* GHASH */
484 DEFINE_FAT_FUNC(_nettle_ghash_set_key, void,
485 (struct gcm_key *ctx, const union nettle_block16 *key),
486 (ctx, key))
487 DEFINE_FAT_FUNC(_nettle_ghash_update, const uint8_t *,
488 (const struct gcm_key *ctx, union nettle_block16 *state,
489 size_t blocks, const uint8_t *data),
490 (ctx, state, blocks, data))
491
492 /* SHA1 */
493 DEFINE_FAT_FUNC(nettle_sha1_compress, void,
494 (uint32_t *state, const uint8_t *input),
495 (state, input))
496
497 /* SHA256 */
498 DEFINE_FAT_FUNC(_nettle_sha256_compress_n, const uint8_t *,
499 (uint32_t *state, const uint32_t *k,
500 size_t blocks, const uint8_t *input),
501 (state, k, blocks, input))
502
503 /* SHA512 */
504 DEFINE_FAT_FUNC(_nettle_sha512_compress, void,
505 (uint64_t *state, const uint8_t *input, const uint64_t *k),
506 (state, input, k))
507
508 /* SHA3 */
509 DEFINE_FAT_FUNC(nettle_sha3_permute, void,
510 (struct sha3_state *state), (state))
511
512 DEFINE_FAT_FUNC(_nettle_chacha_core, void,
513 (uint32_t *dst, const uint32_t *src, unsigned rounds),
514 (dst, src, rounds))
515
516 DEFINE_FAT_FUNC(nettle_chacha_crypt, void,
517 (struct chacha_ctx *ctx,
518 size_t length,
519 uint8_t *dst,
520 const uint8_t *src),
521 (ctx, length, dst, src))
522
523 DEFINE_FAT_FUNC(nettle_chacha_crypt32, void,
524 (struct chacha_ctx *ctx,
525 size_t length,
526 uint8_t *dst,
527 const uint8_t *src),
528 (ctx, length, dst, src))