]> git.ipfire.org Git - thirdparty/kernel/linux.git/blob - crypto/aegis128l.c
Merge tag '5.1-rc5-smb3-fixes' of git://git.samba.org/sfrench/cifs-2.6
[thirdparty/kernel/linux.git] / crypto / aegis128l.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * The AEGIS-128L Authenticated-Encryption Algorithm
4 *
5 * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
6 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
7 */
8
9 #include <crypto/algapi.h>
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/skcipher.h>
12 #include <crypto/scatterwalk.h>
13 #include <linux/err.h>
14 #include <linux/init.h>
15 #include <linux/kernel.h>
16 #include <linux/module.h>
17 #include <linux/scatterlist.h>
18
19 #include "aegis.h"
20
21 #define AEGIS128L_CHUNK_BLOCKS 2
22 #define AEGIS128L_CHUNK_SIZE (AEGIS128L_CHUNK_BLOCKS * AEGIS_BLOCK_SIZE)
23 #define AEGIS128L_NONCE_SIZE 16
24 #define AEGIS128L_STATE_BLOCKS 8
25 #define AEGIS128L_KEY_SIZE 16
26 #define AEGIS128L_MIN_AUTH_SIZE 8
27 #define AEGIS128L_MAX_AUTH_SIZE 16
28
29 union aegis_chunk {
30 union aegis_block blocks[AEGIS128L_CHUNK_BLOCKS];
31 u8 bytes[AEGIS128L_CHUNK_SIZE];
32 };
33
34 struct aegis_state {
35 union aegis_block blocks[AEGIS128L_STATE_BLOCKS];
36 };
37
38 struct aegis_ctx {
39 union aegis_block key;
40 };
41
42 struct aegis128l_ops {
43 int (*skcipher_walk_init)(struct skcipher_walk *walk,
44 struct aead_request *req, bool atomic);
45
46 void (*crypt_chunk)(struct aegis_state *state, u8 *dst,
47 const u8 *src, unsigned int size);
48 };
49
50 static void crypto_aegis128l_update(struct aegis_state *state)
51 {
52 union aegis_block tmp;
53 unsigned int i;
54
55 tmp = state->blocks[AEGIS128L_STATE_BLOCKS - 1];
56 for (i = AEGIS128L_STATE_BLOCKS - 1; i > 0; i--)
57 crypto_aegis_aesenc(&state->blocks[i], &state->blocks[i - 1],
58 &state->blocks[i]);
59 crypto_aegis_aesenc(&state->blocks[0], &tmp, &state->blocks[0]);
60 }
61
62 static void crypto_aegis128l_update_a(struct aegis_state *state,
63 const union aegis_chunk *msg)
64 {
65 crypto_aegis128l_update(state);
66 crypto_aegis_block_xor(&state->blocks[0], &msg->blocks[0]);
67 crypto_aegis_block_xor(&state->blocks[4], &msg->blocks[1]);
68 }
69
70 static void crypto_aegis128l_update_u(struct aegis_state *state,
71 const void *msg)
72 {
73 crypto_aegis128l_update(state);
74 crypto_xor(state->blocks[0].bytes, msg + 0 * AEGIS_BLOCK_SIZE,
75 AEGIS_BLOCK_SIZE);
76 crypto_xor(state->blocks[4].bytes, msg + 1 * AEGIS_BLOCK_SIZE,
77 AEGIS_BLOCK_SIZE);
78 }
79
80 static void crypto_aegis128l_init(struct aegis_state *state,
81 const union aegis_block *key,
82 const u8 *iv)
83 {
84 union aegis_block key_iv;
85 union aegis_chunk chunk;
86 unsigned int i;
87
88 memcpy(chunk.blocks[0].bytes, iv, AEGIS_BLOCK_SIZE);
89 chunk.blocks[1] = *key;
90
91 key_iv = *key;
92 crypto_aegis_block_xor(&key_iv, &chunk.blocks[0]);
93
94 state->blocks[0] = key_iv;
95 state->blocks[1] = crypto_aegis_const[1];
96 state->blocks[2] = crypto_aegis_const[0];
97 state->blocks[3] = crypto_aegis_const[1];
98 state->blocks[4] = key_iv;
99 state->blocks[5] = *key;
100 state->blocks[6] = *key;
101 state->blocks[7] = *key;
102
103 crypto_aegis_block_xor(&state->blocks[5], &crypto_aegis_const[0]);
104 crypto_aegis_block_xor(&state->blocks[6], &crypto_aegis_const[1]);
105 crypto_aegis_block_xor(&state->blocks[7], &crypto_aegis_const[0]);
106
107 for (i = 0; i < 10; i++) {
108 crypto_aegis128l_update_a(state, &chunk);
109 }
110 }
111
112 static void crypto_aegis128l_ad(struct aegis_state *state,
113 const u8 *src, unsigned int size)
114 {
115 if (AEGIS_ALIGNED(src)) {
116 const union aegis_chunk *src_chunk =
117 (const union aegis_chunk *)src;
118
119 while (size >= AEGIS128L_CHUNK_SIZE) {
120 crypto_aegis128l_update_a(state, src_chunk);
121
122 size -= AEGIS128L_CHUNK_SIZE;
123 src_chunk += 1;
124 }
125 } else {
126 while (size >= AEGIS128L_CHUNK_SIZE) {
127 crypto_aegis128l_update_u(state, src);
128
129 size -= AEGIS128L_CHUNK_SIZE;
130 src += AEGIS128L_CHUNK_SIZE;
131 }
132 }
133 }
134
135 static void crypto_aegis128l_encrypt_chunk(struct aegis_state *state, u8 *dst,
136 const u8 *src, unsigned int size)
137 {
138 union aegis_chunk tmp;
139 union aegis_block *tmp0 = &tmp.blocks[0];
140 union aegis_block *tmp1 = &tmp.blocks[1];
141
142 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
143 while (size >= AEGIS128L_CHUNK_SIZE) {
144 union aegis_chunk *dst_blk =
145 (union aegis_chunk *)dst;
146 const union aegis_chunk *src_blk =
147 (const union aegis_chunk *)src;
148
149 *tmp0 = state->blocks[2];
150 crypto_aegis_block_and(tmp0, &state->blocks[3]);
151 crypto_aegis_block_xor(tmp0, &state->blocks[6]);
152 crypto_aegis_block_xor(tmp0, &state->blocks[1]);
153 crypto_aegis_block_xor(tmp0, &src_blk->blocks[0]);
154
155 *tmp1 = state->blocks[6];
156 crypto_aegis_block_and(tmp1, &state->blocks[7]);
157 crypto_aegis_block_xor(tmp1, &state->blocks[5]);
158 crypto_aegis_block_xor(tmp1, &state->blocks[2]);
159 crypto_aegis_block_xor(tmp1, &src_blk->blocks[1]);
160
161 crypto_aegis128l_update_a(state, src_blk);
162
163 *dst_blk = tmp;
164
165 size -= AEGIS128L_CHUNK_SIZE;
166 src += AEGIS128L_CHUNK_SIZE;
167 dst += AEGIS128L_CHUNK_SIZE;
168 }
169 } else {
170 while (size >= AEGIS128L_CHUNK_SIZE) {
171 *tmp0 = state->blocks[2];
172 crypto_aegis_block_and(tmp0, &state->blocks[3]);
173 crypto_aegis_block_xor(tmp0, &state->blocks[6]);
174 crypto_aegis_block_xor(tmp0, &state->blocks[1]);
175 crypto_xor(tmp0->bytes, src + 0 * AEGIS_BLOCK_SIZE,
176 AEGIS_BLOCK_SIZE);
177
178 *tmp1 = state->blocks[6];
179 crypto_aegis_block_and(tmp1, &state->blocks[7]);
180 crypto_aegis_block_xor(tmp1, &state->blocks[5]);
181 crypto_aegis_block_xor(tmp1, &state->blocks[2]);
182 crypto_xor(tmp1->bytes, src + 1 * AEGIS_BLOCK_SIZE,
183 AEGIS_BLOCK_SIZE);
184
185 crypto_aegis128l_update_u(state, src);
186
187 memcpy(dst, tmp.bytes, AEGIS128L_CHUNK_SIZE);
188
189 size -= AEGIS128L_CHUNK_SIZE;
190 src += AEGIS128L_CHUNK_SIZE;
191 dst += AEGIS128L_CHUNK_SIZE;
192 }
193 }
194
195 if (size > 0) {
196 union aegis_chunk msg = {};
197 memcpy(msg.bytes, src, size);
198
199 *tmp0 = state->blocks[2];
200 crypto_aegis_block_and(tmp0, &state->blocks[3]);
201 crypto_aegis_block_xor(tmp0, &state->blocks[6]);
202 crypto_aegis_block_xor(tmp0, &state->blocks[1]);
203
204 *tmp1 = state->blocks[6];
205 crypto_aegis_block_and(tmp1, &state->blocks[7]);
206 crypto_aegis_block_xor(tmp1, &state->blocks[5]);
207 crypto_aegis_block_xor(tmp1, &state->blocks[2]);
208
209 crypto_aegis128l_update_a(state, &msg);
210
211 crypto_aegis_block_xor(&msg.blocks[0], tmp0);
212 crypto_aegis_block_xor(&msg.blocks[1], tmp1);
213
214 memcpy(dst, msg.bytes, size);
215 }
216 }
217
218 static void crypto_aegis128l_decrypt_chunk(struct aegis_state *state, u8 *dst,
219 const u8 *src, unsigned int size)
220 {
221 union aegis_chunk tmp;
222 union aegis_block *tmp0 = &tmp.blocks[0];
223 union aegis_block *tmp1 = &tmp.blocks[1];
224
225 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
226 while (size >= AEGIS128L_CHUNK_SIZE) {
227 union aegis_chunk *dst_blk =
228 (union aegis_chunk *)dst;
229 const union aegis_chunk *src_blk =
230 (const union aegis_chunk *)src;
231
232 *tmp0 = state->blocks[2];
233 crypto_aegis_block_and(tmp0, &state->blocks[3]);
234 crypto_aegis_block_xor(tmp0, &state->blocks[6]);
235 crypto_aegis_block_xor(tmp0, &state->blocks[1]);
236 crypto_aegis_block_xor(tmp0, &src_blk->blocks[0]);
237
238 *tmp1 = state->blocks[6];
239 crypto_aegis_block_and(tmp1, &state->blocks[7]);
240 crypto_aegis_block_xor(tmp1, &state->blocks[5]);
241 crypto_aegis_block_xor(tmp1, &state->blocks[2]);
242 crypto_aegis_block_xor(tmp1, &src_blk->blocks[1]);
243
244 crypto_aegis128l_update_a(state, &tmp);
245
246 *dst_blk = tmp;
247
248 size -= AEGIS128L_CHUNK_SIZE;
249 src += AEGIS128L_CHUNK_SIZE;
250 dst += AEGIS128L_CHUNK_SIZE;
251 }
252 } else {
253 while (size >= AEGIS128L_CHUNK_SIZE) {
254 *tmp0 = state->blocks[2];
255 crypto_aegis_block_and(tmp0, &state->blocks[3]);
256 crypto_aegis_block_xor(tmp0, &state->blocks[6]);
257 crypto_aegis_block_xor(tmp0, &state->blocks[1]);
258 crypto_xor(tmp0->bytes, src + 0 * AEGIS_BLOCK_SIZE,
259 AEGIS_BLOCK_SIZE);
260
261 *tmp1 = state->blocks[6];
262 crypto_aegis_block_and(tmp1, &state->blocks[7]);
263 crypto_aegis_block_xor(tmp1, &state->blocks[5]);
264 crypto_aegis_block_xor(tmp1, &state->blocks[2]);
265 crypto_xor(tmp1->bytes, src + 1 * AEGIS_BLOCK_SIZE,
266 AEGIS_BLOCK_SIZE);
267
268 crypto_aegis128l_update_a(state, &tmp);
269
270 memcpy(dst, tmp.bytes, AEGIS128L_CHUNK_SIZE);
271
272 size -= AEGIS128L_CHUNK_SIZE;
273 src += AEGIS128L_CHUNK_SIZE;
274 dst += AEGIS128L_CHUNK_SIZE;
275 }
276 }
277
278 if (size > 0) {
279 union aegis_chunk msg = {};
280 memcpy(msg.bytes, src, size);
281
282 *tmp0 = state->blocks[2];
283 crypto_aegis_block_and(tmp0, &state->blocks[3]);
284 crypto_aegis_block_xor(tmp0, &state->blocks[6]);
285 crypto_aegis_block_xor(tmp0, &state->blocks[1]);
286 crypto_aegis_block_xor(&msg.blocks[0], tmp0);
287
288 *tmp1 = state->blocks[6];
289 crypto_aegis_block_and(tmp1, &state->blocks[7]);
290 crypto_aegis_block_xor(tmp1, &state->blocks[5]);
291 crypto_aegis_block_xor(tmp1, &state->blocks[2]);
292 crypto_aegis_block_xor(&msg.blocks[1], tmp1);
293
294 memset(msg.bytes + size, 0, AEGIS128L_CHUNK_SIZE - size);
295
296 crypto_aegis128l_update_a(state, &msg);
297
298 memcpy(dst, msg.bytes, size);
299 }
300 }
301
302 static void crypto_aegis128l_process_ad(struct aegis_state *state,
303 struct scatterlist *sg_src,
304 unsigned int assoclen)
305 {
306 struct scatter_walk walk;
307 union aegis_chunk buf;
308 unsigned int pos = 0;
309
310 scatterwalk_start(&walk, sg_src);
311 while (assoclen != 0) {
312 unsigned int size = scatterwalk_clamp(&walk, assoclen);
313 unsigned int left = size;
314 void *mapped = scatterwalk_map(&walk);
315 const u8 *src = (const u8 *)mapped;
316
317 if (pos + size >= AEGIS128L_CHUNK_SIZE) {
318 if (pos > 0) {
319 unsigned int fill = AEGIS128L_CHUNK_SIZE - pos;
320 memcpy(buf.bytes + pos, src, fill);
321 crypto_aegis128l_update_a(state, &buf);
322 pos = 0;
323 left -= fill;
324 src += fill;
325 }
326
327 crypto_aegis128l_ad(state, src, left);
328 src += left & ~(AEGIS128L_CHUNK_SIZE - 1);
329 left &= AEGIS128L_CHUNK_SIZE - 1;
330 }
331
332 memcpy(buf.bytes + pos, src, left);
333
334 pos += left;
335 assoclen -= size;
336 scatterwalk_unmap(mapped);
337 scatterwalk_advance(&walk, size);
338 scatterwalk_done(&walk, 0, assoclen);
339 }
340
341 if (pos > 0) {
342 memset(buf.bytes + pos, 0, AEGIS128L_CHUNK_SIZE - pos);
343 crypto_aegis128l_update_a(state, &buf);
344 }
345 }
346
347 static void crypto_aegis128l_process_crypt(struct aegis_state *state,
348 struct aead_request *req,
349 const struct aegis128l_ops *ops)
350 {
351 struct skcipher_walk walk;
352
353 ops->skcipher_walk_init(&walk, req, false);
354
355 while (walk.nbytes) {
356 unsigned int nbytes = walk.nbytes;
357
358 if (nbytes < walk.total)
359 nbytes = round_down(nbytes, walk.stride);
360
361 ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
362 nbytes);
363
364 skcipher_walk_done(&walk, walk.nbytes - nbytes);
365 }
366 }
367
368 static void crypto_aegis128l_final(struct aegis_state *state,
369 union aegis_block *tag_xor,
370 u64 assoclen, u64 cryptlen)
371 {
372 u64 assocbits = assoclen * 8;
373 u64 cryptbits = cryptlen * 8;
374
375 union aegis_chunk tmp;
376 unsigned int i;
377
378 tmp.blocks[0].words64[0] = cpu_to_le64(assocbits);
379 tmp.blocks[0].words64[1] = cpu_to_le64(cryptbits);
380
381 crypto_aegis_block_xor(&tmp.blocks[0], &state->blocks[2]);
382
383 tmp.blocks[1] = tmp.blocks[0];
384 for (i = 0; i < 7; i++)
385 crypto_aegis128l_update_a(state, &tmp);
386
387 for (i = 0; i < 7; i++)
388 crypto_aegis_block_xor(tag_xor, &state->blocks[i]);
389 }
390
391 static int crypto_aegis128l_setkey(struct crypto_aead *aead, const u8 *key,
392 unsigned int keylen)
393 {
394 struct aegis_ctx *ctx = crypto_aead_ctx(aead);
395
396 if (keylen != AEGIS128L_KEY_SIZE) {
397 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
398 return -EINVAL;
399 }
400
401 memcpy(ctx->key.bytes, key, AEGIS128L_KEY_SIZE);
402 return 0;
403 }
404
405 static int crypto_aegis128l_setauthsize(struct crypto_aead *tfm,
406 unsigned int authsize)
407 {
408 if (authsize > AEGIS128L_MAX_AUTH_SIZE)
409 return -EINVAL;
410 if (authsize < AEGIS128L_MIN_AUTH_SIZE)
411 return -EINVAL;
412 return 0;
413 }
414
415 static void crypto_aegis128l_crypt(struct aead_request *req,
416 union aegis_block *tag_xor,
417 unsigned int cryptlen,
418 const struct aegis128l_ops *ops)
419 {
420 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
421 struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
422 struct aegis_state state;
423
424 crypto_aegis128l_init(&state, &ctx->key, req->iv);
425 crypto_aegis128l_process_ad(&state, req->src, req->assoclen);
426 crypto_aegis128l_process_crypt(&state, req, ops);
427 crypto_aegis128l_final(&state, tag_xor, req->assoclen, cryptlen);
428 }
429
430 static int crypto_aegis128l_encrypt(struct aead_request *req)
431 {
432 static const struct aegis128l_ops ops = {
433 .skcipher_walk_init = skcipher_walk_aead_encrypt,
434 .crypt_chunk = crypto_aegis128l_encrypt_chunk,
435 };
436
437 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
438 union aegis_block tag = {};
439 unsigned int authsize = crypto_aead_authsize(tfm);
440 unsigned int cryptlen = req->cryptlen;
441
442 crypto_aegis128l_crypt(req, &tag, cryptlen, &ops);
443
444 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen,
445 authsize, 1);
446 return 0;
447 }
448
449 static int crypto_aegis128l_decrypt(struct aead_request *req)
450 {
451 static const struct aegis128l_ops ops = {
452 .skcipher_walk_init = skcipher_walk_aead_decrypt,
453 .crypt_chunk = crypto_aegis128l_decrypt_chunk,
454 };
455 static const u8 zeros[AEGIS128L_MAX_AUTH_SIZE] = {};
456
457 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
458 union aegis_block tag;
459 unsigned int authsize = crypto_aead_authsize(tfm);
460 unsigned int cryptlen = req->cryptlen - authsize;
461
462 scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen,
463 authsize, 0);
464
465 crypto_aegis128l_crypt(req, &tag, cryptlen, &ops);
466
467 return crypto_memneq(tag.bytes, zeros, authsize) ? -EBADMSG : 0;
468 }
469
470 static int crypto_aegis128l_init_tfm(struct crypto_aead *tfm)
471 {
472 return 0;
473 }
474
475 static void crypto_aegis128l_exit_tfm(struct crypto_aead *tfm)
476 {
477 }
478
479 static struct aead_alg crypto_aegis128l_alg = {
480 .setkey = crypto_aegis128l_setkey,
481 .setauthsize = crypto_aegis128l_setauthsize,
482 .encrypt = crypto_aegis128l_encrypt,
483 .decrypt = crypto_aegis128l_decrypt,
484 .init = crypto_aegis128l_init_tfm,
485 .exit = crypto_aegis128l_exit_tfm,
486
487 .ivsize = AEGIS128L_NONCE_SIZE,
488 .maxauthsize = AEGIS128L_MAX_AUTH_SIZE,
489 .chunksize = AEGIS128L_CHUNK_SIZE,
490
491 .base = {
492 .cra_blocksize = 1,
493 .cra_ctxsize = sizeof(struct aegis_ctx),
494 .cra_alignmask = 0,
495
496 .cra_priority = 100,
497
498 .cra_name = "aegis128l",
499 .cra_driver_name = "aegis128l-generic",
500
501 .cra_module = THIS_MODULE,
502 }
503 };
504
505 static int __init crypto_aegis128l_module_init(void)
506 {
507 return crypto_register_aead(&crypto_aegis128l_alg);
508 }
509
510 static void __exit crypto_aegis128l_module_exit(void)
511 {
512 crypto_unregister_aead(&crypto_aegis128l_alg);
513 }
514
515 module_init(crypto_aegis128l_module_init);
516 module_exit(crypto_aegis128l_module_exit);
517
518 MODULE_LICENSE("GPL");
519 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
520 MODULE_DESCRIPTION("AEGIS-128L AEAD algorithm");
521 MODULE_ALIAS_CRYPTO("aegis128l");
522 MODULE_ALIAS_CRYPTO("aegis128l-generic");