]> git.ipfire.org Git - thirdparty/kernel/linux.git/blame - crypto/ctr.c
mmc: core: complete HS400 before checking status
[thirdparty/kernel/linux.git] / crypto / ctr.c
CommitLineData
2874c5fd 1// SPDX-License-Identifier: GPL-2.0-or-later
23e353c8
JL
2/*
3 * CTR: Counter mode
4 *
5 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
23e353c8
JL
6 */
7
8#include <crypto/algapi.h>
5311f248 9#include <crypto/ctr.h>
69d3150c 10#include <crypto/internal/skcipher.h>
23e353c8
JL
11#include <linux/err.h>
12#include <linux/init.h>
13#include <linux/kernel.h>
14#include <linux/module.h>
23e353c8
JL
15#include <linux/slab.h>
16
5311f248 17struct crypto_rfc3686_ctx {
b2b39c2f 18 struct crypto_skcipher *child;
5311f248 19 u8 nonce[CTR_RFC3686_NONCE_SIZE];
23e353c8
JL
20};
21
69d3150c
JK
22struct crypto_rfc3686_req_ctx {
23 u8 iv[CTR_RFC3686_BLOCK_SIZE];
b2b39c2f 24 struct skcipher_request subreq CRYPTO_MINALIGN_ATTR;
69d3150c
JK
25};
26
11f14630 27static void crypto_ctr_crypt_final(struct skcipher_walk *walk,
5311f248 28 struct crypto_cipher *tfm)
0971eb0d
HX
29{
30 unsigned int bsize = crypto_cipher_blocksize(tfm);
5311f248
HX
31 unsigned long alignmask = crypto_cipher_alignmask(tfm);
32 u8 *ctrblk = walk->iv;
6650c4de 33 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
5311f248 34 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
0971eb0d
HX
35 u8 *src = walk->src.virt.addr;
36 u8 *dst = walk->dst.virt.addr;
37 unsigned int nbytes = walk->nbytes;
38
39 crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
45fe93df 40 crypto_xor_cpy(dst, keystream, src, nbytes);
5311f248
HX
41
42 crypto_inc(ctrblk, bsize);
0971eb0d
HX
43}
44
11f14630 45static int crypto_ctr_crypt_segment(struct skcipher_walk *walk,
5311f248 46 struct crypto_cipher *tfm)
23e353c8
JL
47{
48 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
49 crypto_cipher_alg(tfm)->cia_encrypt;
50 unsigned int bsize = crypto_cipher_blocksize(tfm);
5311f248 51 u8 *ctrblk = walk->iv;
23e353c8
JL
52 u8 *src = walk->src.virt.addr;
53 u8 *dst = walk->dst.virt.addr;
54 unsigned int nbytes = walk->nbytes;
55
56 do {
57 /* create keystream */
0971eb0d
HX
58 fn(crypto_cipher_tfm(tfm), dst, ctrblk);
59 crypto_xor(dst, src, bsize);
23e353c8
JL
60
61 /* increment counter in counterblock */
5311f248 62 crypto_inc(ctrblk, bsize);
23e353c8 63
23e353c8
JL
64 src += bsize;
65 dst += bsize;
0971eb0d 66 } while ((nbytes -= bsize) >= bsize);
23e353c8 67
0971eb0d 68 return nbytes;
23e353c8
JL
69}
70
11f14630 71static int crypto_ctr_crypt_inplace(struct skcipher_walk *walk,
5311f248 72 struct crypto_cipher *tfm)
23e353c8
JL
73{
74 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
75 crypto_cipher_alg(tfm)->cia_encrypt;
76 unsigned int bsize = crypto_cipher_blocksize(tfm);
5311f248 77 unsigned long alignmask = crypto_cipher_alignmask(tfm);
23e353c8 78 unsigned int nbytes = walk->nbytes;
5311f248 79 u8 *ctrblk = walk->iv;
23e353c8 80 u8 *src = walk->src.virt.addr;
6650c4de 81 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
5311f248 82 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
23e353c8
JL
83
84 do {
85 /* create keystream */
86 fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
0971eb0d 87 crypto_xor(src, keystream, bsize);
23e353c8
JL
88
89 /* increment counter in counterblock */
5311f248 90 crypto_inc(ctrblk, bsize);
23e353c8 91
23e353c8 92 src += bsize;
0971eb0d 93 } while ((nbytes -= bsize) >= bsize);
23e353c8 94
0971eb0d 95 return nbytes;
23e353c8
JL
96}
97
11f14630 98static int crypto_ctr_crypt(struct skcipher_request *req)
23e353c8 99{
11f14630
EB
100 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
101 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
102 const unsigned int bsize = crypto_cipher_blocksize(cipher);
103 struct skcipher_walk walk;
104 unsigned int nbytes;
23e353c8
JL
105 int err;
106
11f14630 107 err = skcipher_walk_virt(&walk, req, false);
23e353c8 108
0971eb0d 109 while (walk.nbytes >= bsize) {
23e353c8 110 if (walk.src.virt.addr == walk.dst.virt.addr)
11f14630 111 nbytes = crypto_ctr_crypt_inplace(&walk, cipher);
23e353c8 112 else
11f14630 113 nbytes = crypto_ctr_crypt_segment(&walk, cipher);
23e353c8 114
11f14630 115 err = skcipher_walk_done(&walk, nbytes);
23e353c8 116 }
0971eb0d
HX
117
118 if (walk.nbytes) {
11f14630
EB
119 crypto_ctr_crypt_final(&walk, cipher);
120 err = skcipher_walk_done(&walk, 0);
0971eb0d
HX
121 }
122
23e353c8
JL
123 return err;
124}
125
11f14630 126static int crypto_ctr_create(struct crypto_template *tmpl, struct rtattr **tb)
23e353c8 127{
11f14630 128 struct skcipher_instance *inst;
23e353c8 129 struct crypto_alg *alg;
23e353c8
JL
130 int err;
131
11f14630
EB
132 inst = skcipher_alloc_instance_simple(tmpl, tb, &alg);
133 if (IS_ERR(inst))
134 return PTR_ERR(inst);
23e353c8 135
5311f248 136 /* Block size must be >= 4 bytes. */
23e353c8 137 err = -EINVAL;
5311f248 138 if (alg->cra_blocksize < 4)
11f14630 139 goto out_free_inst;
23e353c8 140
3f8214ea 141 /* If this is false we'd fail the alignment of crypto_inc. */
5311f248 142 if (alg->cra_blocksize % 4)
11f14630 143 goto out_free_inst;
23e353c8 144
11f14630
EB
145 /* CTR mode is a stream cipher. */
146 inst->alg.base.cra_blocksize = 1;
23e353c8 147
11f14630
EB
148 /*
149 * To simplify the implementation, configure the skcipher walk to only
150 * give a partial block at the very end, never earlier.
151 */
152 inst->alg.chunksize = alg->cra_blocksize;
23e353c8 153
11f14630
EB
154 inst->alg.encrypt = crypto_ctr_crypt;
155 inst->alg.decrypt = crypto_ctr_crypt;
23e353c8 156
11f14630
EB
157 err = skcipher_register_instance(tmpl, inst);
158 if (err)
159 goto out_free_inst;
160 goto out_put_alg;
5311f248 161
11f14630
EB
162out_free_inst:
163 inst->free(inst);
5311f248 164out_put_alg:
11f14630
EB
165 crypto_mod_put(alg);
166 return err;
23e353c8
JL
167}
168
b2b39c2f 169static int crypto_rfc3686_setkey(struct crypto_skcipher *parent,
69d3150c 170 const u8 *key, unsigned int keylen)
5311f248 171{
b2b39c2f
HX
172 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(parent);
173 struct crypto_skcipher *child = ctx->child;
5311f248
HX
174 int err;
175
176 /* the nonce is stored in bytes at end of key */
177 if (keylen < CTR_RFC3686_NONCE_SIZE)
178 return -EINVAL;
179
180 memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
181 CTR_RFC3686_NONCE_SIZE);
182
183 keylen -= CTR_RFC3686_NONCE_SIZE;
184
b2b39c2f
HX
185 crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
186 crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
187 CRYPTO_TFM_REQ_MASK);
188 err = crypto_skcipher_setkey(child, key, keylen);
189 crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) &
190 CRYPTO_TFM_RES_MASK);
5311f248
HX
191
192 return err;
193}
194
b2b39c2f 195static int crypto_rfc3686_crypt(struct skcipher_request *req)
5311f248 196{
b2b39c2f
HX
197 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
198 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
199 struct crypto_skcipher *child = ctx->child;
200 unsigned long align = crypto_skcipher_alignmask(tfm);
69d3150c 201 struct crypto_rfc3686_req_ctx *rctx =
b2b39c2f
HX
202 (void *)PTR_ALIGN((u8 *)skcipher_request_ctx(req), align + 1);
203 struct skcipher_request *subreq = &rctx->subreq;
69d3150c 204 u8 *iv = rctx->iv;
5311f248
HX
205
206 /* set up counter block */
207 memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
b2b39c2f 208 memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE);
5311f248
HX
209
210 /* initialize counter portion of counter block */
211 *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
212 cpu_to_be32(1);
213
b2b39c2f
HX
214 skcipher_request_set_tfm(subreq, child);
215 skcipher_request_set_callback(subreq, req->base.flags,
216 req->base.complete, req->base.data);
217 skcipher_request_set_crypt(subreq, req->src, req->dst,
218 req->cryptlen, iv);
5311f248 219
b2b39c2f 220 return crypto_skcipher_encrypt(subreq);
5311f248
HX
221}
222
b2b39c2f 223static int crypto_rfc3686_init_tfm(struct crypto_skcipher *tfm)
5311f248 224{
b2b39c2f
HX
225 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
226 struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
227 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
228 struct crypto_skcipher *cipher;
69d3150c 229 unsigned long align;
b2b39c2f 230 unsigned int reqsize;
5311f248 231
60425a8b 232 cipher = crypto_spawn_skcipher(spawn);
5311f248
HX
233 if (IS_ERR(cipher))
234 return PTR_ERR(cipher);
235
236 ctx->child = cipher;
237
b2b39c2f 238 align = crypto_skcipher_alignmask(tfm);
69d3150c 239 align &= ~(crypto_tfm_ctx_alignment() - 1);
b2b39c2f
HX
240 reqsize = align + sizeof(struct crypto_rfc3686_req_ctx) +
241 crypto_skcipher_reqsize(cipher);
242 crypto_skcipher_set_reqsize(tfm, reqsize);
69d3150c 243
5311f248
HX
244 return 0;
245}
246
b2b39c2f 247static void crypto_rfc3686_exit_tfm(struct crypto_skcipher *tfm)
5311f248 248{
b2b39c2f
HX
249 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
250
251 crypto_free_skcipher(ctx->child);
252}
5311f248 253
b2b39c2f
HX
254static void crypto_rfc3686_free(struct skcipher_instance *inst)
255{
256 struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
257
258 crypto_drop_skcipher(spawn);
259 kfree(inst);
5311f248
HX
260}
261
b2b39c2f
HX
262static int crypto_rfc3686_create(struct crypto_template *tmpl,
263 struct rtattr **tb)
5311f248 264{
69d3150c 265 struct crypto_attr_type *algt;
b2b39c2f
HX
266 struct skcipher_instance *inst;
267 struct skcipher_alg *alg;
69d3150c
JK
268 struct crypto_skcipher_spawn *spawn;
269 const char *cipher_name;
d2c2a85c
MC
270 u32 mask;
271
5311f248
HX
272 int err;
273
69d3150c 274 algt = crypto_get_attr_type(tb);
69d3150c 275 if (IS_ERR(algt))
b2b39c2f 276 return PTR_ERR(algt);
5311f248 277
b2b39c2f
HX
278 if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
279 return -EINVAL;
69d3150c
JK
280
281 cipher_name = crypto_attr_alg_name(tb[1]);
69d3150c 282 if (IS_ERR(cipher_name))
b2b39c2f 283 return PTR_ERR(cipher_name);
5311f248 284
69d3150c
JK
285 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
286 if (!inst)
b2b39c2f 287 return -ENOMEM;
69d3150c 288
d2c2a85c
MC
289 mask = crypto_requires_sync(algt->type, algt->mask) |
290 crypto_requires_off(algt->type, algt->mask,
291 CRYPTO_ALG_NEED_FALLBACK);
292
b2b39c2f 293 spawn = skcipher_instance_ctx(inst);
69d3150c 294
b2b39c2f 295 crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst));
d2c2a85c 296 err = crypto_grab_skcipher(spawn, cipher_name, 0, mask);
69d3150c
JK
297 if (err)
298 goto err_free_inst;
299
b2b39c2f 300 alg = crypto_spawn_skcipher_alg(spawn);
69d3150c 301
5311f248
HX
302 /* We only support 16-byte blocks. */
303 err = -EINVAL;
b2b39c2f 304 if (crypto_skcipher_alg_ivsize(alg) != CTR_RFC3686_BLOCK_SIZE)
69d3150c 305 goto err_drop_spawn;
5311f248
HX
306
307 /* Not a stream cipher? */
b2b39c2f 308 if (alg->base.cra_blocksize != 1)
69d3150c 309 goto err_drop_spawn;
5311f248 310
69d3150c 311 err = -ENAMETOOLONG;
b2b39c2f
HX
312 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
313 "rfc3686(%s)", alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
69d3150c 314 goto err_drop_spawn;
b2b39c2f
HX
315 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
316 "rfc3686(%s)", alg->base.cra_driver_name) >=
317 CRYPTO_MAX_ALG_NAME)
69d3150c 318 goto err_drop_spawn;
5311f248 319
b2b39c2f
HX
320 inst->alg.base.cra_priority = alg->base.cra_priority;
321 inst->alg.base.cra_blocksize = 1;
322 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
5311f248 323
b2b39c2f 324 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
69d3150c 325
b2b39c2f
HX
326 inst->alg.ivsize = CTR_RFC3686_IV_SIZE;
327 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
328 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) +
329 CTR_RFC3686_NONCE_SIZE;
330 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) +
331 CTR_RFC3686_NONCE_SIZE;
5311f248 332
b2b39c2f
HX
333 inst->alg.setkey = crypto_rfc3686_setkey;
334 inst->alg.encrypt = crypto_rfc3686_crypt;
335 inst->alg.decrypt = crypto_rfc3686_crypt;
69d3150c 336
b2b39c2f 337 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
0a270321 338
b2b39c2f
HX
339 inst->alg.init = crypto_rfc3686_init_tfm;
340 inst->alg.exit = crypto_rfc3686_exit_tfm;
5311f248 341
b2b39c2f 342 inst->free = crypto_rfc3686_free;
5311f248 343
b2b39c2f
HX
344 err = skcipher_register_instance(tmpl, inst);
345 if (err)
346 goto err_drop_spawn;
347
348out:
349 return err;
5311f248 350
69d3150c
JK
351err_drop_spawn:
352 crypto_drop_skcipher(spawn);
353err_free_inst:
354 kfree(inst);
b2b39c2f 355 goto out;
5311f248
HX
356}
357
9f8ef365
XW
358static struct crypto_template crypto_ctr_tmpls[] = {
359 {
360 .name = "ctr",
361 .create = crypto_ctr_create,
362 .module = THIS_MODULE,
363 }, {
364 .name = "rfc3686",
365 .create = crypto_rfc3686_create,
366 .module = THIS_MODULE,
367 },
5311f248
HX
368};
369
23e353c8
JL
370static int __init crypto_ctr_module_init(void)
371{
9f8ef365
XW
372 return crypto_register_templates(crypto_ctr_tmpls,
373 ARRAY_SIZE(crypto_ctr_tmpls));
23e353c8
JL
374}
375
376static void __exit crypto_ctr_module_exit(void)
377{
9f8ef365
XW
378 crypto_unregister_templates(crypto_ctr_tmpls,
379 ARRAY_SIZE(crypto_ctr_tmpls));
23e353c8
JL
380}
381
c4741b23 382subsys_initcall(crypto_ctr_module_init);
23e353c8
JL
383module_exit(crypto_ctr_module_exit);
384
385MODULE_LICENSE("GPL");
11f14630 386MODULE_DESCRIPTION("CTR block cipher mode of operation");
5d26a105 387MODULE_ALIAS_CRYPTO("rfc3686");
4943ba16 388MODULE_ALIAS_CRYPTO("ctr");