1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * AEAD: Authenticated Encryption with Associated Data
5 * This file provides API support for AEAD algorithms.
7 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
10 #include <crypto/internal/aead.h>
11 #include <linux/cryptouser.h>
12 #include <linux/errno.h>
13 #include <linux/init.h>
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/slab.h>
17 #include <linux/seq_file.h>
18 #include <linux/string.h>
19 #include <net/netlink.h>
23 static inline struct crypto_istat_aead
*aead_get_stat(struct aead_alg
*alg
)
25 #ifdef CONFIG_CRYPTO_STATS
32 static int setkey_unaligned(struct crypto_aead
*tfm
, const u8
*key
,
35 unsigned long alignmask
= crypto_aead_alignmask(tfm
);
37 u8
*buffer
, *alignbuffer
;
40 absize
= keylen
+ alignmask
;
41 buffer
= kmalloc(absize
, GFP_ATOMIC
);
45 alignbuffer
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
46 memcpy(alignbuffer
, key
, keylen
);
47 ret
= crypto_aead_alg(tfm
)->setkey(tfm
, alignbuffer
, keylen
);
48 memset(alignbuffer
, 0, keylen
);
53 int crypto_aead_setkey(struct crypto_aead
*tfm
,
54 const u8
*key
, unsigned int keylen
)
56 unsigned long alignmask
= crypto_aead_alignmask(tfm
);
59 if ((unsigned long)key
& alignmask
)
60 err
= setkey_unaligned(tfm
, key
, keylen
);
62 err
= crypto_aead_alg(tfm
)->setkey(tfm
, key
, keylen
);
65 crypto_aead_set_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
69 crypto_aead_clear_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
72 EXPORT_SYMBOL_GPL(crypto_aead_setkey
);
74 int crypto_aead_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
78 if ((!authsize
&& crypto_aead_maxauthsize(tfm
)) ||
79 authsize
> crypto_aead_maxauthsize(tfm
))
82 if (crypto_aead_alg(tfm
)->setauthsize
) {
83 err
= crypto_aead_alg(tfm
)->setauthsize(tfm
, authsize
);
88 tfm
->authsize
= authsize
;
91 EXPORT_SYMBOL_GPL(crypto_aead_setauthsize
);
93 static inline int crypto_aead_errstat(struct crypto_istat_aead
*istat
, int err
)
95 if (!IS_ENABLED(CONFIG_CRYPTO_STATS
))
98 if (err
&& err
!= -EINPROGRESS
&& err
!= -EBUSY
)
99 atomic64_inc(&istat
->err_cnt
);
104 int crypto_aead_encrypt(struct aead_request
*req
)
106 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
107 struct aead_alg
*alg
= crypto_aead_alg(aead
);
108 struct crypto_istat_aead
*istat
;
111 istat
= aead_get_stat(alg
);
113 if (IS_ENABLED(CONFIG_CRYPTO_STATS
)) {
114 atomic64_inc(&istat
->encrypt_cnt
);
115 atomic64_add(req
->cryptlen
, &istat
->encrypt_tlen
);
118 if (crypto_aead_get_flags(aead
) & CRYPTO_TFM_NEED_KEY
)
121 ret
= alg
->encrypt(req
);
123 return crypto_aead_errstat(istat
, ret
);
125 EXPORT_SYMBOL_GPL(crypto_aead_encrypt
);
127 int crypto_aead_decrypt(struct aead_request
*req
)
129 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
130 struct aead_alg
*alg
= crypto_aead_alg(aead
);
131 struct crypto_istat_aead
*istat
;
134 istat
= aead_get_stat(alg
);
136 if (IS_ENABLED(CONFIG_CRYPTO_STATS
)) {
137 atomic64_inc(&istat
->encrypt_cnt
);
138 atomic64_add(req
->cryptlen
, &istat
->encrypt_tlen
);
141 if (crypto_aead_get_flags(aead
) & CRYPTO_TFM_NEED_KEY
)
143 else if (req
->cryptlen
< crypto_aead_authsize(aead
))
146 ret
= alg
->decrypt(req
);
148 return crypto_aead_errstat(istat
, ret
);
150 EXPORT_SYMBOL_GPL(crypto_aead_decrypt
);
152 static void crypto_aead_exit_tfm(struct crypto_tfm
*tfm
)
154 struct crypto_aead
*aead
= __crypto_aead_cast(tfm
);
155 struct aead_alg
*alg
= crypto_aead_alg(aead
);
160 static int crypto_aead_init_tfm(struct crypto_tfm
*tfm
)
162 struct crypto_aead
*aead
= __crypto_aead_cast(tfm
);
163 struct aead_alg
*alg
= crypto_aead_alg(aead
);
165 crypto_aead_set_flags(aead
, CRYPTO_TFM_NEED_KEY
);
167 aead
->authsize
= alg
->maxauthsize
;
170 aead
->base
.exit
= crypto_aead_exit_tfm
;
173 return alg
->init(aead
);
178 static int __maybe_unused
crypto_aead_report(
179 struct sk_buff
*skb
, struct crypto_alg
*alg
)
181 struct crypto_report_aead raead
;
182 struct aead_alg
*aead
= container_of(alg
, struct aead_alg
, base
);
184 memset(&raead
, 0, sizeof(raead
));
186 strscpy(raead
.type
, "aead", sizeof(raead
.type
));
187 strscpy(raead
.geniv
, "<none>", sizeof(raead
.geniv
));
189 raead
.blocksize
= alg
->cra_blocksize
;
190 raead
.maxauthsize
= aead
->maxauthsize
;
191 raead
.ivsize
= aead
->ivsize
;
193 return nla_put(skb
, CRYPTOCFGA_REPORT_AEAD
, sizeof(raead
), &raead
);
196 static void crypto_aead_show(struct seq_file
*m
, struct crypto_alg
*alg
)
198 static void crypto_aead_show(struct seq_file
*m
, struct crypto_alg
*alg
)
200 struct aead_alg
*aead
= container_of(alg
, struct aead_alg
, base
);
202 seq_printf(m
, "type : aead\n");
203 seq_printf(m
, "async : %s\n", alg
->cra_flags
& CRYPTO_ALG_ASYNC
?
205 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
206 seq_printf(m
, "ivsize : %u\n", aead
->ivsize
);
207 seq_printf(m
, "maxauthsize : %u\n", aead
->maxauthsize
);
208 seq_printf(m
, "geniv : <none>\n");
211 static void crypto_aead_free_instance(struct crypto_instance
*inst
)
213 struct aead_instance
*aead
= aead_instance(inst
);
218 static int __maybe_unused
crypto_aead_report_stat(
219 struct sk_buff
*skb
, struct crypto_alg
*alg
)
221 struct aead_alg
*aead
= container_of(alg
, struct aead_alg
, base
);
222 struct crypto_istat_aead
*istat
= aead_get_stat(aead
);
223 struct crypto_stat_aead raead
;
225 memset(&raead
, 0, sizeof(raead
));
227 strscpy(raead
.type
, "aead", sizeof(raead
.type
));
229 raead
.stat_encrypt_cnt
= atomic64_read(&istat
->encrypt_cnt
);
230 raead
.stat_encrypt_tlen
= atomic64_read(&istat
->encrypt_tlen
);
231 raead
.stat_decrypt_cnt
= atomic64_read(&istat
->decrypt_cnt
);
232 raead
.stat_decrypt_tlen
= atomic64_read(&istat
->decrypt_tlen
);
233 raead
.stat_err_cnt
= atomic64_read(&istat
->err_cnt
);
235 return nla_put(skb
, CRYPTOCFGA_STAT_AEAD
, sizeof(raead
), &raead
);
238 static const struct crypto_type crypto_aead_type
= {
239 .extsize
= crypto_alg_extsize
,
240 .init_tfm
= crypto_aead_init_tfm
,
241 .free
= crypto_aead_free_instance
,
242 #ifdef CONFIG_PROC_FS
243 .show
= crypto_aead_show
,
245 #if IS_ENABLED(CONFIG_CRYPTO_USER)
246 .report
= crypto_aead_report
,
248 #ifdef CONFIG_CRYPTO_STATS
249 .report_stat
= crypto_aead_report_stat
,
251 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
252 .maskset
= CRYPTO_ALG_TYPE_MASK
,
253 .type
= CRYPTO_ALG_TYPE_AEAD
,
254 .tfmsize
= offsetof(struct crypto_aead
, base
),
257 int crypto_grab_aead(struct crypto_aead_spawn
*spawn
,
258 struct crypto_instance
*inst
,
259 const char *name
, u32 type
, u32 mask
)
261 spawn
->base
.frontend
= &crypto_aead_type
;
262 return crypto_grab_spawn(&spawn
->base
, inst
, name
, type
, mask
);
264 EXPORT_SYMBOL_GPL(crypto_grab_aead
);
266 struct crypto_aead
*crypto_alloc_aead(const char *alg_name
, u32 type
, u32 mask
)
268 return crypto_alloc_tfm(alg_name
, &crypto_aead_type
, type
, mask
);
270 EXPORT_SYMBOL_GPL(crypto_alloc_aead
);
272 static int aead_prepare_alg(struct aead_alg
*alg
)
274 struct crypto_istat_aead
*istat
= aead_get_stat(alg
);
275 struct crypto_alg
*base
= &alg
->base
;
277 if (max3(alg
->maxauthsize
, alg
->ivsize
, alg
->chunksize
) >
282 alg
->chunksize
= base
->cra_blocksize
;
284 base
->cra_type
= &crypto_aead_type
;
285 base
->cra_flags
&= ~CRYPTO_ALG_TYPE_MASK
;
286 base
->cra_flags
|= CRYPTO_ALG_TYPE_AEAD
;
288 if (IS_ENABLED(CONFIG_CRYPTO_STATS
))
289 memset(istat
, 0, sizeof(*istat
));
294 int crypto_register_aead(struct aead_alg
*alg
)
296 struct crypto_alg
*base
= &alg
->base
;
299 err
= aead_prepare_alg(alg
);
303 return crypto_register_alg(base
);
305 EXPORT_SYMBOL_GPL(crypto_register_aead
);
307 void crypto_unregister_aead(struct aead_alg
*alg
)
309 crypto_unregister_alg(&alg
->base
);
311 EXPORT_SYMBOL_GPL(crypto_unregister_aead
);
313 int crypto_register_aeads(struct aead_alg
*algs
, int count
)
317 for (i
= 0; i
< count
; i
++) {
318 ret
= crypto_register_aead(&algs
[i
]);
326 for (--i
; i
>= 0; --i
)
327 crypto_unregister_aead(&algs
[i
]);
331 EXPORT_SYMBOL_GPL(crypto_register_aeads
);
333 void crypto_unregister_aeads(struct aead_alg
*algs
, int count
)
337 for (i
= count
- 1; i
>= 0; --i
)
338 crypto_unregister_aead(&algs
[i
]);
340 EXPORT_SYMBOL_GPL(crypto_unregister_aeads
);
342 int aead_register_instance(struct crypto_template
*tmpl
,
343 struct aead_instance
*inst
)
347 if (WARN_ON(!inst
->free
))
350 err
= aead_prepare_alg(&inst
->alg
);
354 return crypto_register_instance(tmpl
, aead_crypto_instance(inst
));
356 EXPORT_SYMBOL_GPL(aead_register_instance
);
358 MODULE_LICENSE("GPL");
359 MODULE_DESCRIPTION("Authenticated Encryption with Associated Data (AEAD)");