]>
git.ipfire.org Git - thirdparty/linux.git/blob - arch/arm64/crypto/aes-neonbs-glue.c
1 // SPDX-License-Identifier: GPL-2.0-only
3 * Bit sliced AES using NEON instructions
5 * Copyright (C) 2016 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
10 #include <crypto/aes.h>
11 #include <crypto/ctr.h>
12 #include <crypto/internal/simd.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/scatterwalk.h>
15 #include <crypto/xts.h>
16 #include <linux/module.h>
18 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
19 MODULE_LICENSE("GPL v2");
21 MODULE_ALIAS_CRYPTO("ecb(aes)");
22 MODULE_ALIAS_CRYPTO("cbc(aes)");
23 MODULE_ALIAS_CRYPTO("ctr(aes)");
24 MODULE_ALIAS_CRYPTO("xts(aes)");
26 asmlinkage
void aesbs_convert_key(u8 out
[], u32
const rk
[], int rounds
);
28 asmlinkage
void aesbs_ecb_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
29 int rounds
, int blocks
);
30 asmlinkage
void aesbs_ecb_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
31 int rounds
, int blocks
);
33 asmlinkage
void aesbs_cbc_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
34 int rounds
, int blocks
, u8 iv
[]);
36 asmlinkage
void aesbs_ctr_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
37 int rounds
, int blocks
, u8 iv
[]);
39 asmlinkage
void aesbs_xts_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
40 int rounds
, int blocks
, u8 iv
[]);
41 asmlinkage
void aesbs_xts_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
42 int rounds
, int blocks
, u8 iv
[]);
44 /* borrowed from aes-neon-blk.ko */
45 asmlinkage
void neon_aes_ecb_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
46 int rounds
, int blocks
);
47 asmlinkage
void neon_aes_cbc_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
48 int rounds
, int blocks
, u8 iv
[]);
49 asmlinkage
void neon_aes_ctr_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
50 int rounds
, int bytes
, u8 ctr
[]);
51 asmlinkage
void neon_aes_xts_encrypt(u8 out
[], u8
const in
[],
52 u32
const rk1
[], int rounds
, int bytes
,
53 u32
const rk2
[], u8 iv
[], int first
);
54 asmlinkage
void neon_aes_xts_decrypt(u8 out
[], u8
const in
[],
55 u32
const rk1
[], int rounds
, int bytes
,
56 u32
const rk2
[], u8 iv
[], int first
);
59 u8 rk
[13 * (8 * AES_BLOCK_SIZE
) + 32];
61 } __aligned(AES_BLOCK_SIZE
);
63 struct aesbs_cbc_ctr_ctx
{
65 u32 enc
[AES_MAX_KEYLENGTH_U32
];
68 struct aesbs_xts_ctx
{
70 u32 twkey
[AES_MAX_KEYLENGTH_U32
];
71 struct crypto_aes_ctx cts
;
74 static int aesbs_setkey(struct crypto_skcipher
*tfm
, const u8
*in_key
,
77 struct aesbs_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
78 struct crypto_aes_ctx rk
;
81 err
= aes_expandkey(&rk
, in_key
, key_len
);
85 ctx
->rounds
= 6 + key_len
/ 4;
88 aesbs_convert_key(ctx
->rk
, rk
.key_enc
, ctx
->rounds
);
94 static int __ecb_crypt(struct skcipher_request
*req
,
95 void (*fn
)(u8 out
[], u8
const in
[], u8
const rk
[],
96 int rounds
, int blocks
))
98 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
99 struct aesbs_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
100 struct skcipher_walk walk
;
103 err
= skcipher_walk_virt(&walk
, req
, false);
105 while (walk
.nbytes
>= AES_BLOCK_SIZE
) {
106 unsigned int blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
108 if (walk
.nbytes
< walk
.total
)
109 blocks
= round_down(blocks
,
110 walk
.stride
/ AES_BLOCK_SIZE
);
113 fn(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
, ctx
->rk
,
114 ctx
->rounds
, blocks
);
116 err
= skcipher_walk_done(&walk
,
117 walk
.nbytes
- blocks
* AES_BLOCK_SIZE
);
123 static int ecb_encrypt(struct skcipher_request
*req
)
125 return __ecb_crypt(req
, aesbs_ecb_encrypt
);
128 static int ecb_decrypt(struct skcipher_request
*req
)
130 return __ecb_crypt(req
, aesbs_ecb_decrypt
);
133 static int aesbs_cbc_ctr_setkey(struct crypto_skcipher
*tfm
, const u8
*in_key
,
134 unsigned int key_len
)
136 struct aesbs_cbc_ctr_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
137 struct crypto_aes_ctx rk
;
140 err
= aes_expandkey(&rk
, in_key
, key_len
);
144 ctx
->key
.rounds
= 6 + key_len
/ 4;
146 memcpy(ctx
->enc
, rk
.key_enc
, sizeof(ctx
->enc
));
149 aesbs_convert_key(ctx
->key
.rk
, rk
.key_enc
, ctx
->key
.rounds
);
151 memzero_explicit(&rk
, sizeof(rk
));
156 static int cbc_encrypt(struct skcipher_request
*req
)
158 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
159 struct aesbs_cbc_ctr_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
160 struct skcipher_walk walk
;
163 err
= skcipher_walk_virt(&walk
, req
, false);
165 while (walk
.nbytes
>= AES_BLOCK_SIZE
) {
166 unsigned int blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
168 /* fall back to the non-bitsliced NEON implementation */
170 neon_aes_cbc_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
171 ctx
->enc
, ctx
->key
.rounds
, blocks
,
174 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
179 static int cbc_decrypt(struct skcipher_request
*req
)
181 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
182 struct aesbs_cbc_ctr_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
183 struct skcipher_walk walk
;
186 err
= skcipher_walk_virt(&walk
, req
, false);
188 while (walk
.nbytes
>= AES_BLOCK_SIZE
) {
189 unsigned int blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
191 if (walk
.nbytes
< walk
.total
)
192 blocks
= round_down(blocks
,
193 walk
.stride
/ AES_BLOCK_SIZE
);
196 aesbs_cbc_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
197 ctx
->key
.rk
, ctx
->key
.rounds
, blocks
,
200 err
= skcipher_walk_done(&walk
,
201 walk
.nbytes
- blocks
* AES_BLOCK_SIZE
);
207 static int ctr_encrypt(struct skcipher_request
*req
)
209 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
210 struct aesbs_cbc_ctr_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
211 struct skcipher_walk walk
;
214 err
= skcipher_walk_virt(&walk
, req
, false);
216 while (walk
.nbytes
> 0) {
217 int blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
) & ~7;
218 int nbytes
= walk
.nbytes
% (8 * AES_BLOCK_SIZE
);
219 const u8
*src
= walk
.src
.virt
.addr
;
220 u8
*dst
= walk
.dst
.virt
.addr
;
224 aesbs_ctr_encrypt(dst
, src
, ctx
->key
.rk
, ctx
->key
.rounds
,
226 dst
+= blocks
* AES_BLOCK_SIZE
;
227 src
+= blocks
* AES_BLOCK_SIZE
;
229 if (nbytes
&& walk
.nbytes
== walk
.total
) {
230 u8 buf
[AES_BLOCK_SIZE
];
233 if (unlikely(nbytes
< AES_BLOCK_SIZE
))
234 src
= dst
= memcpy(buf
+ sizeof(buf
) - nbytes
,
237 neon_aes_ctr_encrypt(dst
, src
, ctx
->enc
, ctx
->key
.rounds
,
240 if (unlikely(nbytes
< AES_BLOCK_SIZE
))
241 memcpy(d
, dst
, nbytes
);
246 err
= skcipher_walk_done(&walk
, nbytes
);
251 static int aesbs_xts_setkey(struct crypto_skcipher
*tfm
, const u8
*in_key
,
252 unsigned int key_len
)
254 struct aesbs_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
255 struct crypto_aes_ctx rk
;
258 err
= xts_verify_key(tfm
, in_key
, key_len
);
263 err
= aes_expandkey(&ctx
->cts
, in_key
, key_len
);
267 err
= aes_expandkey(&rk
, in_key
+ key_len
, key_len
);
271 memcpy(ctx
->twkey
, rk
.key_enc
, sizeof(ctx
->twkey
));
273 return aesbs_setkey(tfm
, in_key
, key_len
);
276 static int __xts_crypt(struct skcipher_request
*req
, bool encrypt
,
277 void (*fn
)(u8 out
[], u8
const in
[], u8
const rk
[],
278 int rounds
, int blocks
, u8 iv
[]))
280 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
281 struct aesbs_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
282 int tail
= req
->cryptlen
% (8 * AES_BLOCK_SIZE
);
283 struct scatterlist sg_src
[2], sg_dst
[2];
284 struct skcipher_request subreq
;
285 struct scatterlist
*src
, *dst
;
286 struct skcipher_walk walk
;
291 if (req
->cryptlen
< AES_BLOCK_SIZE
)
294 /* ensure that the cts tail is covered by a single step */
295 if (unlikely(tail
> 0 && tail
< AES_BLOCK_SIZE
)) {
296 int xts_blocks
= DIV_ROUND_UP(req
->cryptlen
,
299 skcipher_request_set_tfm(&subreq
, tfm
);
300 skcipher_request_set_callback(&subreq
,
301 skcipher_request_flags(req
),
303 skcipher_request_set_crypt(&subreq
, req
->src
, req
->dst
,
304 xts_blocks
* AES_BLOCK_SIZE
,
311 err
= skcipher_walk_virt(&walk
, req
, false);
315 while (walk
.nbytes
>= AES_BLOCK_SIZE
) {
316 int blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
) & ~7;
317 out
= walk
.dst
.virt
.addr
;
318 in
= walk
.src
.virt
.addr
;
319 nbytes
= walk
.nbytes
;
324 neon_aes_ecb_encrypt(walk
.iv
, walk
.iv
,
329 fn(out
, in
, ctx
->key
.rk
, ctx
->key
.rounds
, blocks
,
332 out
+= blocks
* AES_BLOCK_SIZE
;
333 in
+= blocks
* AES_BLOCK_SIZE
;
334 nbytes
-= blocks
* AES_BLOCK_SIZE
;
336 if (walk
.nbytes
== walk
.total
&& nbytes
> 0) {
338 neon_aes_xts_encrypt(out
, in
, ctx
->cts
.key_enc
,
339 ctx
->key
.rounds
, nbytes
,
340 ctx
->twkey
, walk
.iv
, first
);
342 neon_aes_xts_decrypt(out
, in
, ctx
->cts
.key_dec
,
343 ctx
->key
.rounds
, nbytes
,
344 ctx
->twkey
, walk
.iv
, first
);
348 err
= skcipher_walk_done(&walk
, nbytes
);
351 if (err
|| likely(!tail
))
354 /* handle ciphertext stealing */
355 dst
= src
= scatterwalk_ffwd(sg_src
, req
->src
, req
->cryptlen
);
356 if (req
->dst
!= req
->src
)
357 dst
= scatterwalk_ffwd(sg_dst
, req
->dst
, req
->cryptlen
);
359 skcipher_request_set_crypt(req
, src
, dst
, AES_BLOCK_SIZE
+ tail
,
362 err
= skcipher_walk_virt(&walk
, req
, false);
366 out
= walk
.dst
.virt
.addr
;
367 in
= walk
.src
.virt
.addr
;
368 nbytes
= walk
.nbytes
;
372 neon_aes_xts_encrypt(out
, in
, ctx
->cts
.key_enc
, ctx
->key
.rounds
,
373 nbytes
, ctx
->twkey
, walk
.iv
, first
);
375 neon_aes_xts_decrypt(out
, in
, ctx
->cts
.key_dec
, ctx
->key
.rounds
,
376 nbytes
, ctx
->twkey
, walk
.iv
, first
);
379 return skcipher_walk_done(&walk
, 0);
382 static int xts_encrypt(struct skcipher_request
*req
)
384 return __xts_crypt(req
, true, aesbs_xts_encrypt
);
387 static int xts_decrypt(struct skcipher_request
*req
)
389 return __xts_crypt(req
, false, aesbs_xts_decrypt
);
392 static struct skcipher_alg aes_algs
[] = { {
393 .base
.cra_name
= "ecb(aes)",
394 .base
.cra_driver_name
= "ecb-aes-neonbs",
395 .base
.cra_priority
= 250,
396 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
397 .base
.cra_ctxsize
= sizeof(struct aesbs_ctx
),
398 .base
.cra_module
= THIS_MODULE
,
400 .min_keysize
= AES_MIN_KEY_SIZE
,
401 .max_keysize
= AES_MAX_KEY_SIZE
,
402 .walksize
= 8 * AES_BLOCK_SIZE
,
403 .setkey
= aesbs_setkey
,
404 .encrypt
= ecb_encrypt
,
405 .decrypt
= ecb_decrypt
,
407 .base
.cra_name
= "cbc(aes)",
408 .base
.cra_driver_name
= "cbc-aes-neonbs",
409 .base
.cra_priority
= 250,
410 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
411 .base
.cra_ctxsize
= sizeof(struct aesbs_cbc_ctr_ctx
),
412 .base
.cra_module
= THIS_MODULE
,
414 .min_keysize
= AES_MIN_KEY_SIZE
,
415 .max_keysize
= AES_MAX_KEY_SIZE
,
416 .walksize
= 8 * AES_BLOCK_SIZE
,
417 .ivsize
= AES_BLOCK_SIZE
,
418 .setkey
= aesbs_cbc_ctr_setkey
,
419 .encrypt
= cbc_encrypt
,
420 .decrypt
= cbc_decrypt
,
422 .base
.cra_name
= "ctr(aes)",
423 .base
.cra_driver_name
= "ctr-aes-neonbs",
424 .base
.cra_priority
= 250,
425 .base
.cra_blocksize
= 1,
426 .base
.cra_ctxsize
= sizeof(struct aesbs_cbc_ctr_ctx
),
427 .base
.cra_module
= THIS_MODULE
,
429 .min_keysize
= AES_MIN_KEY_SIZE
,
430 .max_keysize
= AES_MAX_KEY_SIZE
,
431 .chunksize
= AES_BLOCK_SIZE
,
432 .walksize
= 8 * AES_BLOCK_SIZE
,
433 .ivsize
= AES_BLOCK_SIZE
,
434 .setkey
= aesbs_cbc_ctr_setkey
,
435 .encrypt
= ctr_encrypt
,
436 .decrypt
= ctr_encrypt
,
438 .base
.cra_name
= "xts(aes)",
439 .base
.cra_driver_name
= "xts-aes-neonbs",
440 .base
.cra_priority
= 250,
441 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
442 .base
.cra_ctxsize
= sizeof(struct aesbs_xts_ctx
),
443 .base
.cra_module
= THIS_MODULE
,
445 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
446 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
447 .walksize
= 8 * AES_BLOCK_SIZE
,
448 .ivsize
= AES_BLOCK_SIZE
,
449 .setkey
= aesbs_xts_setkey
,
450 .encrypt
= xts_encrypt
,
451 .decrypt
= xts_decrypt
,
454 static void aes_exit(void)
456 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
459 static int __init
aes_init(void)
461 if (!cpu_have_named_feature(ASIMD
))
464 return crypto_register_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
467 module_init(aes_init
);
468 module_exit(aes_exit
);