]> git.ipfire.org Git - thirdparty/kernel/stable.git/blob - crypto/aegis256.c
net: phy: dp83867: Set up RGMII TX delay
[thirdparty/kernel/stable.git] / crypto / aegis256.c
1 /*
2 * The AEGIS-256 Authenticated-Encryption Algorithm
3 *
4 * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
5 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
6 *
7 * This program is free software; you can redistribute it and/or modify it
8 * under the terms of the GNU General Public License as published by the Free
9 * Software Foundation; either version 2 of the License, or (at your option)
10 * any later version.
11 */
12
13 #include <crypto/algapi.h>
14 #include <crypto/internal/aead.h>
15 #include <crypto/internal/skcipher.h>
16 #include <crypto/scatterwalk.h>
17 #include <linux/err.h>
18 #include <linux/init.h>
19 #include <linux/kernel.h>
20 #include <linux/module.h>
21 #include <linux/scatterlist.h>
22
23 #include "aegis.h"
24
25 #define AEGIS256_NONCE_SIZE 32
26 #define AEGIS256_STATE_BLOCKS 6
27 #define AEGIS256_KEY_SIZE 32
28 #define AEGIS256_MIN_AUTH_SIZE 8
29 #define AEGIS256_MAX_AUTH_SIZE 16
30
31 struct aegis_state {
32 union aegis_block blocks[AEGIS256_STATE_BLOCKS];
33 };
34
35 struct aegis_ctx {
36 union aegis_block key[AEGIS256_KEY_SIZE / AEGIS_BLOCK_SIZE];
37 };
38
39 struct aegis256_ops {
40 int (*skcipher_walk_init)(struct skcipher_walk *walk,
41 struct aead_request *req, bool atomic);
42
43 void (*crypt_chunk)(struct aegis_state *state, u8 *dst,
44 const u8 *src, unsigned int size);
45 };
46
47 static void crypto_aegis256_update(struct aegis_state *state)
48 {
49 union aegis_block tmp;
50 unsigned int i;
51
52 tmp = state->blocks[AEGIS256_STATE_BLOCKS - 1];
53 for (i = AEGIS256_STATE_BLOCKS - 1; i > 0; i--)
54 crypto_aegis_aesenc(&state->blocks[i], &state->blocks[i - 1],
55 &state->blocks[i]);
56 crypto_aegis_aesenc(&state->blocks[0], &tmp, &state->blocks[0]);
57 }
58
59 static void crypto_aegis256_update_a(struct aegis_state *state,
60 const union aegis_block *msg)
61 {
62 crypto_aegis256_update(state);
63 crypto_aegis_block_xor(&state->blocks[0], msg);
64 }
65
66 static void crypto_aegis256_update_u(struct aegis_state *state, const void *msg)
67 {
68 crypto_aegis256_update(state);
69 crypto_xor(state->blocks[0].bytes, msg, AEGIS_BLOCK_SIZE);
70 }
71
72 static void crypto_aegis256_init(struct aegis_state *state,
73 const union aegis_block *key,
74 const u8 *iv)
75 {
76 union aegis_block key_iv[2];
77 unsigned int i;
78
79 key_iv[0] = key[0];
80 key_iv[1] = key[1];
81 crypto_xor(key_iv[0].bytes, iv + 0 * AEGIS_BLOCK_SIZE,
82 AEGIS_BLOCK_SIZE);
83 crypto_xor(key_iv[1].bytes, iv + 1 * AEGIS_BLOCK_SIZE,
84 AEGIS_BLOCK_SIZE);
85
86 state->blocks[0] = key_iv[0];
87 state->blocks[1] = key_iv[1];
88 state->blocks[2] = crypto_aegis_const[1];
89 state->blocks[3] = crypto_aegis_const[0];
90 state->blocks[4] = key[0];
91 state->blocks[5] = key[1];
92
93 crypto_aegis_block_xor(&state->blocks[4], &crypto_aegis_const[0]);
94 crypto_aegis_block_xor(&state->blocks[5], &crypto_aegis_const[1]);
95
96 for (i = 0; i < 4; i++) {
97 crypto_aegis256_update_a(state, &key[0]);
98 crypto_aegis256_update_a(state, &key[1]);
99 crypto_aegis256_update_a(state, &key_iv[0]);
100 crypto_aegis256_update_a(state, &key_iv[1]);
101 }
102 }
103
104 static void crypto_aegis256_ad(struct aegis_state *state,
105 const u8 *src, unsigned int size)
106 {
107 if (AEGIS_ALIGNED(src)) {
108 const union aegis_block *src_blk =
109 (const union aegis_block *)src;
110
111 while (size >= AEGIS_BLOCK_SIZE) {
112 crypto_aegis256_update_a(state, src_blk);
113
114 size -= AEGIS_BLOCK_SIZE;
115 src_blk++;
116 }
117 } else {
118 while (size >= AEGIS_BLOCK_SIZE) {
119 crypto_aegis256_update_u(state, src);
120
121 size -= AEGIS_BLOCK_SIZE;
122 src += AEGIS_BLOCK_SIZE;
123 }
124 }
125 }
126
127 static void crypto_aegis256_encrypt_chunk(struct aegis_state *state, u8 *dst,
128 const u8 *src, unsigned int size)
129 {
130 union aegis_block tmp;
131
132 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
133 while (size >= AEGIS_BLOCK_SIZE) {
134 union aegis_block *dst_blk =
135 (union aegis_block *)dst;
136 const union aegis_block *src_blk =
137 (const union aegis_block *)src;
138
139 tmp = state->blocks[2];
140 crypto_aegis_block_and(&tmp, &state->blocks[3]);
141 crypto_aegis_block_xor(&tmp, &state->blocks[5]);
142 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
143 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
144 crypto_aegis_block_xor(&tmp, src_blk);
145
146 crypto_aegis256_update_a(state, src_blk);
147
148 *dst_blk = tmp;
149
150 size -= AEGIS_BLOCK_SIZE;
151 src += AEGIS_BLOCK_SIZE;
152 dst += AEGIS_BLOCK_SIZE;
153 }
154 } else {
155 while (size >= AEGIS_BLOCK_SIZE) {
156 tmp = state->blocks[2];
157 crypto_aegis_block_and(&tmp, &state->blocks[3]);
158 crypto_aegis_block_xor(&tmp, &state->blocks[5]);
159 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
160 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
161 crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE);
162
163 crypto_aegis256_update_u(state, src);
164
165 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
166
167 size -= AEGIS_BLOCK_SIZE;
168 src += AEGIS_BLOCK_SIZE;
169 dst += AEGIS_BLOCK_SIZE;
170 }
171 }
172
173 if (size > 0) {
174 union aegis_block msg = {};
175 memcpy(msg.bytes, src, size);
176
177 tmp = state->blocks[2];
178 crypto_aegis_block_and(&tmp, &state->blocks[3]);
179 crypto_aegis_block_xor(&tmp, &state->blocks[5]);
180 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
181 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
182
183 crypto_aegis256_update_a(state, &msg);
184
185 crypto_aegis_block_xor(&msg, &tmp);
186
187 memcpy(dst, msg.bytes, size);
188 }
189 }
190
191 static void crypto_aegis256_decrypt_chunk(struct aegis_state *state, u8 *dst,
192 const u8 *src, unsigned int size)
193 {
194 union aegis_block tmp;
195
196 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
197 while (size >= AEGIS_BLOCK_SIZE) {
198 union aegis_block *dst_blk =
199 (union aegis_block *)dst;
200 const union aegis_block *src_blk =
201 (const union aegis_block *)src;
202
203 tmp = state->blocks[2];
204 crypto_aegis_block_and(&tmp, &state->blocks[3]);
205 crypto_aegis_block_xor(&tmp, &state->blocks[5]);
206 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
207 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
208 crypto_aegis_block_xor(&tmp, src_blk);
209
210 crypto_aegis256_update_a(state, &tmp);
211
212 *dst_blk = tmp;
213
214 size -= AEGIS_BLOCK_SIZE;
215 src += AEGIS_BLOCK_SIZE;
216 dst += AEGIS_BLOCK_SIZE;
217 }
218 } else {
219 while (size >= AEGIS_BLOCK_SIZE) {
220 tmp = state->blocks[2];
221 crypto_aegis_block_and(&tmp, &state->blocks[3]);
222 crypto_aegis_block_xor(&tmp, &state->blocks[5]);
223 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
224 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
225 crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE);
226
227 crypto_aegis256_update_a(state, &tmp);
228
229 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
230
231 size -= AEGIS_BLOCK_SIZE;
232 src += AEGIS_BLOCK_SIZE;
233 dst += AEGIS_BLOCK_SIZE;
234 }
235 }
236
237 if (size > 0) {
238 union aegis_block msg = {};
239 memcpy(msg.bytes, src, size);
240
241 tmp = state->blocks[2];
242 crypto_aegis_block_and(&tmp, &state->blocks[3]);
243 crypto_aegis_block_xor(&tmp, &state->blocks[5]);
244 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
245 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
246 crypto_aegis_block_xor(&msg, &tmp);
247
248 memset(msg.bytes + size, 0, AEGIS_BLOCK_SIZE - size);
249
250 crypto_aegis256_update_a(state, &msg);
251
252 memcpy(dst, msg.bytes, size);
253 }
254 }
255
256 static void crypto_aegis256_process_ad(struct aegis_state *state,
257 struct scatterlist *sg_src,
258 unsigned int assoclen)
259 {
260 struct scatter_walk walk;
261 union aegis_block buf;
262 unsigned int pos = 0;
263
264 scatterwalk_start(&walk, sg_src);
265 while (assoclen != 0) {
266 unsigned int size = scatterwalk_clamp(&walk, assoclen);
267 unsigned int left = size;
268 void *mapped = scatterwalk_map(&walk);
269 const u8 *src = (const u8 *)mapped;
270
271 if (pos + size >= AEGIS_BLOCK_SIZE) {
272 if (pos > 0) {
273 unsigned int fill = AEGIS_BLOCK_SIZE - pos;
274 memcpy(buf.bytes + pos, src, fill);
275 crypto_aegis256_update_a(state, &buf);
276 pos = 0;
277 left -= fill;
278 src += fill;
279 }
280
281 crypto_aegis256_ad(state, src, left);
282 src += left & ~(AEGIS_BLOCK_SIZE - 1);
283 left &= AEGIS_BLOCK_SIZE - 1;
284 }
285
286 memcpy(buf.bytes + pos, src, left);
287
288 pos += left;
289 assoclen -= size;
290 scatterwalk_unmap(mapped);
291 scatterwalk_advance(&walk, size);
292 scatterwalk_done(&walk, 0, assoclen);
293 }
294
295 if (pos > 0) {
296 memset(buf.bytes + pos, 0, AEGIS_BLOCK_SIZE - pos);
297 crypto_aegis256_update_a(state, &buf);
298 }
299 }
300
301 static void crypto_aegis256_process_crypt(struct aegis_state *state,
302 struct aead_request *req,
303 const struct aegis256_ops *ops)
304 {
305 struct skcipher_walk walk;
306
307 ops->skcipher_walk_init(&walk, req, false);
308
309 while (walk.nbytes) {
310 unsigned int nbytes = walk.nbytes;
311
312 if (nbytes < walk.total)
313 nbytes = round_down(nbytes, walk.stride);
314
315 ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
316 nbytes);
317
318 skcipher_walk_done(&walk, walk.nbytes - nbytes);
319 }
320 }
321
322 static void crypto_aegis256_final(struct aegis_state *state,
323 union aegis_block *tag_xor,
324 u64 assoclen, u64 cryptlen)
325 {
326 u64 assocbits = assoclen * 8;
327 u64 cryptbits = cryptlen * 8;
328
329 union aegis_block tmp;
330 unsigned int i;
331
332 tmp.words64[0] = cpu_to_le64(assocbits);
333 tmp.words64[1] = cpu_to_le64(cryptbits);
334
335 crypto_aegis_block_xor(&tmp, &state->blocks[3]);
336
337 for (i = 0; i < 7; i++)
338 crypto_aegis256_update_a(state, &tmp);
339
340 for (i = 0; i < AEGIS256_STATE_BLOCKS; i++)
341 crypto_aegis_block_xor(tag_xor, &state->blocks[i]);
342 }
343
344 static int crypto_aegis256_setkey(struct crypto_aead *aead, const u8 *key,
345 unsigned int keylen)
346 {
347 struct aegis_ctx *ctx = crypto_aead_ctx(aead);
348
349 if (keylen != AEGIS256_KEY_SIZE) {
350 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
351 return -EINVAL;
352 }
353
354 memcpy(ctx->key[0].bytes, key, AEGIS_BLOCK_SIZE);
355 memcpy(ctx->key[1].bytes, key + AEGIS_BLOCK_SIZE,
356 AEGIS_BLOCK_SIZE);
357 return 0;
358 }
359
360 static int crypto_aegis256_setauthsize(struct crypto_aead *tfm,
361 unsigned int authsize)
362 {
363 if (authsize > AEGIS256_MAX_AUTH_SIZE)
364 return -EINVAL;
365 if (authsize < AEGIS256_MIN_AUTH_SIZE)
366 return -EINVAL;
367 return 0;
368 }
369
370 static void crypto_aegis256_crypt(struct aead_request *req,
371 union aegis_block *tag_xor,
372 unsigned int cryptlen,
373 const struct aegis256_ops *ops)
374 {
375 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
376 struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
377 struct aegis_state state;
378
379 crypto_aegis256_init(&state, ctx->key, req->iv);
380 crypto_aegis256_process_ad(&state, req->src, req->assoclen);
381 crypto_aegis256_process_crypt(&state, req, ops);
382 crypto_aegis256_final(&state, tag_xor, req->assoclen, cryptlen);
383 }
384
385 static int crypto_aegis256_encrypt(struct aead_request *req)
386 {
387 static const struct aegis256_ops ops = {
388 .skcipher_walk_init = skcipher_walk_aead_encrypt,
389 .crypt_chunk = crypto_aegis256_encrypt_chunk,
390 };
391
392 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
393 union aegis_block tag = {};
394 unsigned int authsize = crypto_aead_authsize(tfm);
395 unsigned int cryptlen = req->cryptlen;
396
397 crypto_aegis256_crypt(req, &tag, cryptlen, &ops);
398
399 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen,
400 authsize, 1);
401 return 0;
402 }
403
404 static int crypto_aegis256_decrypt(struct aead_request *req)
405 {
406 static const struct aegis256_ops ops = {
407 .skcipher_walk_init = skcipher_walk_aead_decrypt,
408 .crypt_chunk = crypto_aegis256_decrypt_chunk,
409 };
410 static const u8 zeros[AEGIS256_MAX_AUTH_SIZE] = {};
411
412 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
413 union aegis_block tag;
414 unsigned int authsize = crypto_aead_authsize(tfm);
415 unsigned int cryptlen = req->cryptlen - authsize;
416
417 scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen,
418 authsize, 0);
419
420 crypto_aegis256_crypt(req, &tag, cryptlen, &ops);
421
422 return crypto_memneq(tag.bytes, zeros, authsize) ? -EBADMSG : 0;
423 }
424
425 static int crypto_aegis256_init_tfm(struct crypto_aead *tfm)
426 {
427 return 0;
428 }
429
430 static void crypto_aegis256_exit_tfm(struct crypto_aead *tfm)
431 {
432 }
433
434 static struct aead_alg crypto_aegis256_alg = {
435 .setkey = crypto_aegis256_setkey,
436 .setauthsize = crypto_aegis256_setauthsize,
437 .encrypt = crypto_aegis256_encrypt,
438 .decrypt = crypto_aegis256_decrypt,
439 .init = crypto_aegis256_init_tfm,
440 .exit = crypto_aegis256_exit_tfm,
441
442 .ivsize = AEGIS256_NONCE_SIZE,
443 .maxauthsize = AEGIS256_MAX_AUTH_SIZE,
444 .chunksize = AEGIS_BLOCK_SIZE,
445
446 .base = {
447 .cra_blocksize = 1,
448 .cra_ctxsize = sizeof(struct aegis_ctx),
449 .cra_alignmask = 0,
450
451 .cra_priority = 100,
452
453 .cra_name = "aegis256",
454 .cra_driver_name = "aegis256-generic",
455
456 .cra_module = THIS_MODULE,
457 }
458 };
459
460 static int __init crypto_aegis256_module_init(void)
461 {
462 return crypto_register_aead(&crypto_aegis256_alg);
463 }
464
465 static void __exit crypto_aegis256_module_exit(void)
466 {
467 crypto_unregister_aead(&crypto_aegis256_alg);
468 }
469
470 module_init(crypto_aegis256_module_init);
471 module_exit(crypto_aegis256_module_exit);
472
473 MODULE_LICENSE("GPL");
474 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
475 MODULE_DESCRIPTION("AEGIS-256 AEAD algorithm");
476 MODULE_ALIAS_CRYPTO("aegis256");
477 MODULE_ALIAS_CRYPTO("aegis256-generic");