1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Linear symmetric key cipher operations.
5 * Generic encrypt/decrypt wrapper for ciphers.
7 * Copyright (c) 2023 Herbert Xu <herbert@gondor.apana.org.au>
10 #include <linux/cryptouser.h>
11 #include <linux/err.h>
12 #include <linux/export.h>
13 #include <linux/kernel.h>
14 #include <linux/seq_file.h>
15 #include <linux/slab.h>
16 #include <linux/string.h>
17 #include <net/netlink.h>
20 static inline struct crypto_lskcipher
*__crypto_lskcipher_cast(
21 struct crypto_tfm
*tfm
)
23 return container_of(tfm
, struct crypto_lskcipher
, base
);
26 static inline struct lskcipher_alg
*__crypto_lskcipher_alg(
27 struct crypto_alg
*alg
)
29 return container_of(alg
, struct lskcipher_alg
, co
.base
);
32 static inline struct crypto_istat_cipher
*lskcipher_get_stat(
33 struct lskcipher_alg
*alg
)
35 return skcipher_get_stat_common(&alg
->co
);
38 static inline int crypto_lskcipher_errstat(struct lskcipher_alg
*alg
, int err
)
40 struct crypto_istat_cipher
*istat
= lskcipher_get_stat(alg
);
42 if (!IS_ENABLED(CONFIG_CRYPTO_STATS
))
46 atomic64_inc(&istat
->err_cnt
);
51 static int lskcipher_setkey_unaligned(struct crypto_lskcipher
*tfm
,
52 const u8
*key
, unsigned int keylen
)
54 unsigned long alignmask
= crypto_lskcipher_alignmask(tfm
);
55 struct lskcipher_alg
*cipher
= crypto_lskcipher_alg(tfm
);
56 u8
*buffer
, *alignbuffer
;
60 absize
= keylen
+ alignmask
;
61 buffer
= kmalloc(absize
, GFP_ATOMIC
);
65 alignbuffer
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
66 memcpy(alignbuffer
, key
, keylen
);
67 ret
= cipher
->setkey(tfm
, alignbuffer
, keylen
);
68 kfree_sensitive(buffer
);
72 int crypto_lskcipher_setkey(struct crypto_lskcipher
*tfm
, const u8
*key
,
75 unsigned long alignmask
= crypto_lskcipher_alignmask(tfm
);
76 struct lskcipher_alg
*cipher
= crypto_lskcipher_alg(tfm
);
78 if (keylen
< cipher
->co
.min_keysize
|| keylen
> cipher
->co
.max_keysize
)
81 if ((unsigned long)key
& alignmask
)
82 return lskcipher_setkey_unaligned(tfm
, key
, keylen
);
84 return cipher
->setkey(tfm
, key
, keylen
);
86 EXPORT_SYMBOL_GPL(crypto_lskcipher_setkey
);
88 static int crypto_lskcipher_crypt_unaligned(
89 struct crypto_lskcipher
*tfm
, const u8
*src
, u8
*dst
, unsigned len
,
90 u8
*iv
, int (*crypt
)(struct crypto_lskcipher
*tfm
, const u8
*src
,
91 u8
*dst
, unsigned len
, u8
*iv
, bool final
))
93 unsigned ivsize
= crypto_lskcipher_ivsize(tfm
);
94 unsigned bs
= crypto_lskcipher_blocksize(tfm
);
95 unsigned cs
= crypto_lskcipher_chunksize(tfm
);
100 BUILD_BUG_ON(MAX_CIPHER_BLOCKSIZE
> PAGE_SIZE
||
101 MAX_CIPHER_ALIGNMASK
>= PAGE_SIZE
);
103 tiv
= kmalloc(PAGE_SIZE
, GFP_ATOMIC
);
107 memcpy(tiv
, iv
, ivsize
);
109 p
= kmalloc(PAGE_SIZE
, GFP_ATOMIC
);
115 unsigned chunk
= min((unsigned)PAGE_SIZE
, len
);
121 memcpy(p
, src
, chunk
);
122 err
= crypt(tfm
, p
, p
, chunk
, tiv
, true);
126 memcpy(dst
, p
, chunk
);
132 err
= len
? -EINVAL
: 0;
135 memcpy(iv
, tiv
, ivsize
);
137 kfree_sensitive(tiv
);
141 static int crypto_lskcipher_crypt(struct crypto_lskcipher
*tfm
, const u8
*src
,
142 u8
*dst
, unsigned len
, u8
*iv
,
143 int (*crypt
)(struct crypto_lskcipher
*tfm
,
144 const u8
*src
, u8
*dst
,
145 unsigned len
, u8
*iv
,
148 unsigned long alignmask
= crypto_lskcipher_alignmask(tfm
);
149 struct lskcipher_alg
*alg
= crypto_lskcipher_alg(tfm
);
152 if (((unsigned long)src
| (unsigned long)dst
| (unsigned long)iv
) &
154 ret
= crypto_lskcipher_crypt_unaligned(tfm
, src
, dst
, len
, iv
,
159 ret
= crypt(tfm
, src
, dst
, len
, iv
, true);
162 return crypto_lskcipher_errstat(alg
, ret
);
165 int crypto_lskcipher_encrypt(struct crypto_lskcipher
*tfm
, const u8
*src
,
166 u8
*dst
, unsigned len
, u8
*iv
)
168 struct lskcipher_alg
*alg
= crypto_lskcipher_alg(tfm
);
170 if (IS_ENABLED(CONFIG_CRYPTO_STATS
)) {
171 struct crypto_istat_cipher
*istat
= lskcipher_get_stat(alg
);
173 atomic64_inc(&istat
->encrypt_cnt
);
174 atomic64_add(len
, &istat
->encrypt_tlen
);
177 return crypto_lskcipher_crypt(tfm
, src
, dst
, len
, iv
, alg
->encrypt
);
179 EXPORT_SYMBOL_GPL(crypto_lskcipher_encrypt
);
181 int crypto_lskcipher_decrypt(struct crypto_lskcipher
*tfm
, const u8
*src
,
182 u8
*dst
, unsigned len
, u8
*iv
)
184 struct lskcipher_alg
*alg
= crypto_lskcipher_alg(tfm
);
186 if (IS_ENABLED(CONFIG_CRYPTO_STATS
)) {
187 struct crypto_istat_cipher
*istat
= lskcipher_get_stat(alg
);
189 atomic64_inc(&istat
->decrypt_cnt
);
190 atomic64_add(len
, &istat
->decrypt_tlen
);
193 return crypto_lskcipher_crypt(tfm
, src
, dst
, len
, iv
, alg
->decrypt
);
195 EXPORT_SYMBOL_GPL(crypto_lskcipher_decrypt
);
197 static int crypto_lskcipher_crypt_sg(struct skcipher_request
*req
,
198 int (*crypt
)(struct crypto_lskcipher
*tfm
,
199 const u8
*src
, u8
*dst
,
200 unsigned len
, u8
*iv
,
203 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
204 struct crypto_lskcipher
**ctx
= crypto_skcipher_ctx(skcipher
);
205 struct crypto_lskcipher
*tfm
= *ctx
;
206 struct skcipher_walk walk
;
209 err
= skcipher_walk_virt(&walk
, req
, false);
211 while (walk
.nbytes
) {
212 err
= crypt(tfm
, walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
213 walk
.nbytes
, walk
.iv
, walk
.nbytes
== walk
.total
);
214 err
= skcipher_walk_done(&walk
, err
);
220 int crypto_lskcipher_encrypt_sg(struct skcipher_request
*req
)
222 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
223 struct crypto_lskcipher
**ctx
= crypto_skcipher_ctx(skcipher
);
224 struct lskcipher_alg
*alg
= crypto_lskcipher_alg(*ctx
);
226 return crypto_lskcipher_crypt_sg(req
, alg
->encrypt
);
229 int crypto_lskcipher_decrypt_sg(struct skcipher_request
*req
)
231 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
232 struct crypto_lskcipher
**ctx
= crypto_skcipher_ctx(skcipher
);
233 struct lskcipher_alg
*alg
= crypto_lskcipher_alg(*ctx
);
235 return crypto_lskcipher_crypt_sg(req
, alg
->decrypt
);
238 static void crypto_lskcipher_exit_tfm(struct crypto_tfm
*tfm
)
240 struct crypto_lskcipher
*skcipher
= __crypto_lskcipher_cast(tfm
);
241 struct lskcipher_alg
*alg
= crypto_lskcipher_alg(skcipher
);
246 static int crypto_lskcipher_init_tfm(struct crypto_tfm
*tfm
)
248 struct crypto_lskcipher
*skcipher
= __crypto_lskcipher_cast(tfm
);
249 struct lskcipher_alg
*alg
= crypto_lskcipher_alg(skcipher
);
252 skcipher
->base
.exit
= crypto_lskcipher_exit_tfm
;
255 return alg
->init(skcipher
);
260 static void crypto_lskcipher_free_instance(struct crypto_instance
*inst
)
262 struct lskcipher_instance
*skcipher
=
263 container_of(inst
, struct lskcipher_instance
, s
.base
);
265 skcipher
->free(skcipher
);
268 static void __maybe_unused
crypto_lskcipher_show(
269 struct seq_file
*m
, struct crypto_alg
*alg
)
271 struct lskcipher_alg
*skcipher
= __crypto_lskcipher_alg(alg
);
273 seq_printf(m
, "type : lskcipher\n");
274 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
275 seq_printf(m
, "min keysize : %u\n", skcipher
->co
.min_keysize
);
276 seq_printf(m
, "max keysize : %u\n", skcipher
->co
.max_keysize
);
277 seq_printf(m
, "ivsize : %u\n", skcipher
->co
.ivsize
);
278 seq_printf(m
, "chunksize : %u\n", skcipher
->co
.chunksize
);
281 static int __maybe_unused
crypto_lskcipher_report(
282 struct sk_buff
*skb
, struct crypto_alg
*alg
)
284 struct lskcipher_alg
*skcipher
= __crypto_lskcipher_alg(alg
);
285 struct crypto_report_blkcipher rblkcipher
;
287 memset(&rblkcipher
, 0, sizeof(rblkcipher
));
289 strscpy(rblkcipher
.type
, "lskcipher", sizeof(rblkcipher
.type
));
290 strscpy(rblkcipher
.geniv
, "<none>", sizeof(rblkcipher
.geniv
));
292 rblkcipher
.blocksize
= alg
->cra_blocksize
;
293 rblkcipher
.min_keysize
= skcipher
->co
.min_keysize
;
294 rblkcipher
.max_keysize
= skcipher
->co
.max_keysize
;
295 rblkcipher
.ivsize
= skcipher
->co
.ivsize
;
297 return nla_put(skb
, CRYPTOCFGA_REPORT_BLKCIPHER
,
298 sizeof(rblkcipher
), &rblkcipher
);
301 static int __maybe_unused
crypto_lskcipher_report_stat(
302 struct sk_buff
*skb
, struct crypto_alg
*alg
)
304 struct lskcipher_alg
*skcipher
= __crypto_lskcipher_alg(alg
);
305 struct crypto_istat_cipher
*istat
;
306 struct crypto_stat_cipher rcipher
;
308 istat
= lskcipher_get_stat(skcipher
);
310 memset(&rcipher
, 0, sizeof(rcipher
));
312 strscpy(rcipher
.type
, "cipher", sizeof(rcipher
.type
));
314 rcipher
.stat_encrypt_cnt
= atomic64_read(&istat
->encrypt_cnt
);
315 rcipher
.stat_encrypt_tlen
= atomic64_read(&istat
->encrypt_tlen
);
316 rcipher
.stat_decrypt_cnt
= atomic64_read(&istat
->decrypt_cnt
);
317 rcipher
.stat_decrypt_tlen
= atomic64_read(&istat
->decrypt_tlen
);
318 rcipher
.stat_err_cnt
= atomic64_read(&istat
->err_cnt
);
320 return nla_put(skb
, CRYPTOCFGA_STAT_CIPHER
, sizeof(rcipher
), &rcipher
);
323 static const struct crypto_type crypto_lskcipher_type
= {
324 .extsize
= crypto_alg_extsize
,
325 .init_tfm
= crypto_lskcipher_init_tfm
,
326 .free
= crypto_lskcipher_free_instance
,
327 #ifdef CONFIG_PROC_FS
328 .show
= crypto_lskcipher_show
,
330 #if IS_ENABLED(CONFIG_CRYPTO_USER)
331 .report
= crypto_lskcipher_report
,
333 #ifdef CONFIG_CRYPTO_STATS
334 .report_stat
= crypto_lskcipher_report_stat
,
336 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
337 .maskset
= CRYPTO_ALG_TYPE_MASK
,
338 .type
= CRYPTO_ALG_TYPE_LSKCIPHER
,
339 .tfmsize
= offsetof(struct crypto_lskcipher
, base
),
342 static void crypto_lskcipher_exit_tfm_sg(struct crypto_tfm
*tfm
)
344 struct crypto_lskcipher
**ctx
= crypto_tfm_ctx(tfm
);
346 crypto_free_lskcipher(*ctx
);
349 int crypto_init_lskcipher_ops_sg(struct crypto_tfm
*tfm
)
351 struct crypto_lskcipher
**ctx
= crypto_tfm_ctx(tfm
);
352 struct crypto_alg
*calg
= tfm
->__crt_alg
;
353 struct crypto_lskcipher
*skcipher
;
355 if (!crypto_mod_get(calg
))
358 skcipher
= crypto_create_tfm(calg
, &crypto_lskcipher_type
);
359 if (IS_ERR(skcipher
)) {
360 crypto_mod_put(calg
);
361 return PTR_ERR(skcipher
);
365 tfm
->exit
= crypto_lskcipher_exit_tfm_sg
;
370 int crypto_grab_lskcipher(struct crypto_lskcipher_spawn
*spawn
,
371 struct crypto_instance
*inst
,
372 const char *name
, u32 type
, u32 mask
)
374 spawn
->base
.frontend
= &crypto_lskcipher_type
;
375 return crypto_grab_spawn(&spawn
->base
, inst
, name
, type
, mask
);
377 EXPORT_SYMBOL_GPL(crypto_grab_lskcipher
);
379 struct crypto_lskcipher
*crypto_alloc_lskcipher(const char *alg_name
,
382 return crypto_alloc_tfm(alg_name
, &crypto_lskcipher_type
, type
, mask
);
384 EXPORT_SYMBOL_GPL(crypto_alloc_lskcipher
);
386 static int lskcipher_prepare_alg(struct lskcipher_alg
*alg
)
388 struct crypto_alg
*base
= &alg
->co
.base
;
391 err
= skcipher_prepare_alg_common(&alg
->co
);
395 if (alg
->co
.chunksize
& (alg
->co
.chunksize
- 1))
398 base
->cra_type
= &crypto_lskcipher_type
;
399 base
->cra_flags
|= CRYPTO_ALG_TYPE_LSKCIPHER
;
404 int crypto_register_lskcipher(struct lskcipher_alg
*alg
)
406 struct crypto_alg
*base
= &alg
->co
.base
;
409 err
= lskcipher_prepare_alg(alg
);
413 return crypto_register_alg(base
);
415 EXPORT_SYMBOL_GPL(crypto_register_lskcipher
);
417 void crypto_unregister_lskcipher(struct lskcipher_alg
*alg
)
419 crypto_unregister_alg(&alg
->co
.base
);
421 EXPORT_SYMBOL_GPL(crypto_unregister_lskcipher
);
423 int crypto_register_lskciphers(struct lskcipher_alg
*algs
, int count
)
427 for (i
= 0; i
< count
; i
++) {
428 ret
= crypto_register_lskcipher(&algs
[i
]);
436 for (--i
; i
>= 0; --i
)
437 crypto_unregister_lskcipher(&algs
[i
]);
441 EXPORT_SYMBOL_GPL(crypto_register_lskciphers
);
443 void crypto_unregister_lskciphers(struct lskcipher_alg
*algs
, int count
)
447 for (i
= count
- 1; i
>= 0; --i
)
448 crypto_unregister_lskcipher(&algs
[i
]);
450 EXPORT_SYMBOL_GPL(crypto_unregister_lskciphers
);
452 int lskcipher_register_instance(struct crypto_template
*tmpl
,
453 struct lskcipher_instance
*inst
)
457 if (WARN_ON(!inst
->free
))
460 err
= lskcipher_prepare_alg(&inst
->alg
);
464 return crypto_register_instance(tmpl
, lskcipher_crypto_instance(inst
));
466 EXPORT_SYMBOL_GPL(lskcipher_register_instance
);
468 static int lskcipher_setkey_simple(struct crypto_lskcipher
*tfm
, const u8
*key
,
471 struct crypto_lskcipher
*cipher
= lskcipher_cipher_simple(tfm
);
473 crypto_lskcipher_clear_flags(cipher
, CRYPTO_TFM_REQ_MASK
);
474 crypto_lskcipher_set_flags(cipher
, crypto_lskcipher_get_flags(tfm
) &
475 CRYPTO_TFM_REQ_MASK
);
476 return crypto_lskcipher_setkey(cipher
, key
, keylen
);
479 static int lskcipher_init_tfm_simple(struct crypto_lskcipher
*tfm
)
481 struct lskcipher_instance
*inst
= lskcipher_alg_instance(tfm
);
482 struct crypto_lskcipher
**ctx
= crypto_lskcipher_ctx(tfm
);
483 struct crypto_lskcipher_spawn
*spawn
;
484 struct crypto_lskcipher
*cipher
;
486 spawn
= lskcipher_instance_ctx(inst
);
487 cipher
= crypto_spawn_lskcipher(spawn
);
489 return PTR_ERR(cipher
);
495 static void lskcipher_exit_tfm_simple(struct crypto_lskcipher
*tfm
)
497 struct crypto_lskcipher
**ctx
= crypto_lskcipher_ctx(tfm
);
499 crypto_free_lskcipher(*ctx
);
502 static void lskcipher_free_instance_simple(struct lskcipher_instance
*inst
)
504 crypto_drop_lskcipher(lskcipher_instance_ctx(inst
));
509 * lskcipher_alloc_instance_simple - allocate instance of simple block cipher
511 * Allocate an lskcipher_instance for a simple block cipher mode of operation,
512 * e.g. cbc or ecb. The instance context will have just a single crypto_spawn,
513 * that for the underlying cipher. The {min,max}_keysize, ivsize, blocksize,
514 * alignmask, and priority are set from the underlying cipher but can be
515 * overridden if needed. The tfm context defaults to
516 * struct crypto_lskcipher *, and default ->setkey(), ->init(), and
517 * ->exit() methods are installed.
519 * @tmpl: the template being instantiated
520 * @tb: the template parameters
522 * Return: a pointer to the new instance, or an ERR_PTR(). The caller still
523 * needs to register the instance.
525 struct lskcipher_instance
*lskcipher_alloc_instance_simple(
526 struct crypto_template
*tmpl
, struct rtattr
**tb
)
529 struct lskcipher_instance
*inst
;
530 struct crypto_lskcipher_spawn
*spawn
;
531 char ecb_name
[CRYPTO_MAX_ALG_NAME
];
532 struct lskcipher_alg
*cipher_alg
;
533 const char *cipher_name
;
536 err
= crypto_check_attr_type(tb
, CRYPTO_ALG_TYPE_LSKCIPHER
, &mask
);
540 cipher_name
= crypto_attr_alg_name(tb
[1]);
541 if (IS_ERR(cipher_name
))
542 return ERR_CAST(cipher_name
);
544 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
546 return ERR_PTR(-ENOMEM
);
548 spawn
= lskcipher_instance_ctx(inst
);
549 err
= crypto_grab_lskcipher(spawn
,
550 lskcipher_crypto_instance(inst
),
551 cipher_name
, 0, mask
);
554 if (err
== -ENOENT
&& !!memcmp(tmpl
->name
, "ecb", 4)) {
556 if (snprintf(ecb_name
, CRYPTO_MAX_ALG_NAME
, "ecb(%s)",
557 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
560 err
= crypto_grab_lskcipher(spawn
,
561 lskcipher_crypto_instance(inst
),
568 cipher_alg
= crypto_lskcipher_spawn_alg(spawn
);
570 err
= crypto_inst_setname(lskcipher_crypto_instance(inst
), tmpl
->name
,
571 &cipher_alg
->co
.base
);
579 len
= strscpy(ecb_name
, &cipher_alg
->co
.base
.cra_name
[4],
584 if (ecb_name
[len
- 1] != ')')
587 ecb_name
[len
- 1] = 0;
590 if (snprintf(inst
->alg
.co
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
591 "%s(%s)", tmpl
->name
, ecb_name
) >=
595 if (strcmp(ecb_name
, cipher_name
) &&
596 snprintf(inst
->alg
.co
.base
.cra_driver_name
,
598 "%s(%s)", tmpl
->name
, cipher_name
) >=
602 /* Don't allow nesting. */
604 if ((cipher_alg
->co
.base
.cra_flags
& CRYPTO_ALG_INSTANCE
))
609 if (cipher_alg
->co
.ivsize
)
612 inst
->free
= lskcipher_free_instance_simple
;
614 /* Default algorithm properties, can be overridden */
615 inst
->alg
.co
.base
.cra_blocksize
= cipher_alg
->co
.base
.cra_blocksize
;
616 inst
->alg
.co
.base
.cra_alignmask
= cipher_alg
->co
.base
.cra_alignmask
;
617 inst
->alg
.co
.base
.cra_priority
= cipher_alg
->co
.base
.cra_priority
;
618 inst
->alg
.co
.min_keysize
= cipher_alg
->co
.min_keysize
;
619 inst
->alg
.co
.max_keysize
= cipher_alg
->co
.max_keysize
;
620 inst
->alg
.co
.ivsize
= cipher_alg
->co
.base
.cra_blocksize
;
622 /* Use struct crypto_lskcipher * by default, can be overridden */
623 inst
->alg
.co
.base
.cra_ctxsize
= sizeof(struct crypto_lskcipher
*);
624 inst
->alg
.setkey
= lskcipher_setkey_simple
;
625 inst
->alg
.init
= lskcipher_init_tfm_simple
;
626 inst
->alg
.exit
= lskcipher_exit_tfm_simple
;
631 lskcipher_free_instance_simple(inst
);
634 EXPORT_SYMBOL_GPL(lskcipher_alloc_instance_simple
);