]> git.ipfire.org Git - thirdparty/kernel/stable.git/blob - arch/x86/crypto/des3_ede_glue.c
treewide: Replace GPLv2 boilerplate/reference with SPDX - rule 157
[thirdparty/kernel/stable.git] / arch / x86 / crypto / des3_ede_glue.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Glue Code for assembler optimized version of 3DES
4 *
5 * Copyright © 2014 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
6 *
7 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
8 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
9 * CTR part based on code (crypto/ctr.c) by:
10 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
11 */
12
13 #include <crypto/algapi.h>
14 #include <crypto/des.h>
15 #include <crypto/internal/skcipher.h>
16 #include <linux/crypto.h>
17 #include <linux/init.h>
18 #include <linux/module.h>
19 #include <linux/types.h>
20
21 struct des3_ede_x86_ctx {
22 u32 enc_expkey[DES3_EDE_EXPKEY_WORDS];
23 u32 dec_expkey[DES3_EDE_EXPKEY_WORDS];
24 };
25
26 /* regular block cipher functions */
27 asmlinkage void des3_ede_x86_64_crypt_blk(const u32 *expkey, u8 *dst,
28 const u8 *src);
29
30 /* 3-way parallel cipher functions */
31 asmlinkage void des3_ede_x86_64_crypt_blk_3way(const u32 *expkey, u8 *dst,
32 const u8 *src);
33
34 static inline void des3_ede_enc_blk(struct des3_ede_x86_ctx *ctx, u8 *dst,
35 const u8 *src)
36 {
37 u32 *enc_ctx = ctx->enc_expkey;
38
39 des3_ede_x86_64_crypt_blk(enc_ctx, dst, src);
40 }
41
42 static inline void des3_ede_dec_blk(struct des3_ede_x86_ctx *ctx, u8 *dst,
43 const u8 *src)
44 {
45 u32 *dec_ctx = ctx->dec_expkey;
46
47 des3_ede_x86_64_crypt_blk(dec_ctx, dst, src);
48 }
49
50 static inline void des3_ede_enc_blk_3way(struct des3_ede_x86_ctx *ctx, u8 *dst,
51 const u8 *src)
52 {
53 u32 *enc_ctx = ctx->enc_expkey;
54
55 des3_ede_x86_64_crypt_blk_3way(enc_ctx, dst, src);
56 }
57
58 static inline void des3_ede_dec_blk_3way(struct des3_ede_x86_ctx *ctx, u8 *dst,
59 const u8 *src)
60 {
61 u32 *dec_ctx = ctx->dec_expkey;
62
63 des3_ede_x86_64_crypt_blk_3way(dec_ctx, dst, src);
64 }
65
66 static void des3_ede_x86_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
67 {
68 des3_ede_enc_blk(crypto_tfm_ctx(tfm), dst, src);
69 }
70
71 static void des3_ede_x86_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
72 {
73 des3_ede_dec_blk(crypto_tfm_ctx(tfm), dst, src);
74 }
75
76 static int ecb_crypt(struct skcipher_request *req, const u32 *expkey)
77 {
78 const unsigned int bsize = DES3_EDE_BLOCK_SIZE;
79 struct skcipher_walk walk;
80 unsigned int nbytes;
81 int err;
82
83 err = skcipher_walk_virt(&walk, req, false);
84
85 while ((nbytes = walk.nbytes)) {
86 u8 *wsrc = walk.src.virt.addr;
87 u8 *wdst = walk.dst.virt.addr;
88
89 /* Process four block batch */
90 if (nbytes >= bsize * 3) {
91 do {
92 des3_ede_x86_64_crypt_blk_3way(expkey, wdst,
93 wsrc);
94
95 wsrc += bsize * 3;
96 wdst += bsize * 3;
97 nbytes -= bsize * 3;
98 } while (nbytes >= bsize * 3);
99
100 if (nbytes < bsize)
101 goto done;
102 }
103
104 /* Handle leftovers */
105 do {
106 des3_ede_x86_64_crypt_blk(expkey, wdst, wsrc);
107
108 wsrc += bsize;
109 wdst += bsize;
110 nbytes -= bsize;
111 } while (nbytes >= bsize);
112
113 done:
114 err = skcipher_walk_done(&walk, nbytes);
115 }
116
117 return err;
118 }
119
120 static int ecb_encrypt(struct skcipher_request *req)
121 {
122 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
123 struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
124
125 return ecb_crypt(req, ctx->enc_expkey);
126 }
127
128 static int ecb_decrypt(struct skcipher_request *req)
129 {
130 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
131 struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
132
133 return ecb_crypt(req, ctx->dec_expkey);
134 }
135
136 static unsigned int __cbc_encrypt(struct des3_ede_x86_ctx *ctx,
137 struct skcipher_walk *walk)
138 {
139 unsigned int bsize = DES3_EDE_BLOCK_SIZE;
140 unsigned int nbytes = walk->nbytes;
141 u64 *src = (u64 *)walk->src.virt.addr;
142 u64 *dst = (u64 *)walk->dst.virt.addr;
143 u64 *iv = (u64 *)walk->iv;
144
145 do {
146 *dst = *src ^ *iv;
147 des3_ede_enc_blk(ctx, (u8 *)dst, (u8 *)dst);
148 iv = dst;
149
150 src += 1;
151 dst += 1;
152 nbytes -= bsize;
153 } while (nbytes >= bsize);
154
155 *(u64 *)walk->iv = *iv;
156 return nbytes;
157 }
158
159 static int cbc_encrypt(struct skcipher_request *req)
160 {
161 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
162 struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
163 struct skcipher_walk walk;
164 unsigned int nbytes;
165 int err;
166
167 err = skcipher_walk_virt(&walk, req, false);
168
169 while ((nbytes = walk.nbytes)) {
170 nbytes = __cbc_encrypt(ctx, &walk);
171 err = skcipher_walk_done(&walk, nbytes);
172 }
173
174 return err;
175 }
176
177 static unsigned int __cbc_decrypt(struct des3_ede_x86_ctx *ctx,
178 struct skcipher_walk *walk)
179 {
180 unsigned int bsize = DES3_EDE_BLOCK_SIZE;
181 unsigned int nbytes = walk->nbytes;
182 u64 *src = (u64 *)walk->src.virt.addr;
183 u64 *dst = (u64 *)walk->dst.virt.addr;
184 u64 ivs[3 - 1];
185 u64 last_iv;
186
187 /* Start of the last block. */
188 src += nbytes / bsize - 1;
189 dst += nbytes / bsize - 1;
190
191 last_iv = *src;
192
193 /* Process four block batch */
194 if (nbytes >= bsize * 3) {
195 do {
196 nbytes -= bsize * 3 - bsize;
197 src -= 3 - 1;
198 dst -= 3 - 1;
199
200 ivs[0] = src[0];
201 ivs[1] = src[1];
202
203 des3_ede_dec_blk_3way(ctx, (u8 *)dst, (u8 *)src);
204
205 dst[1] ^= ivs[0];
206 dst[2] ^= ivs[1];
207
208 nbytes -= bsize;
209 if (nbytes < bsize)
210 goto done;
211
212 *dst ^= *(src - 1);
213 src -= 1;
214 dst -= 1;
215 } while (nbytes >= bsize * 3);
216 }
217
218 /* Handle leftovers */
219 for (;;) {
220 des3_ede_dec_blk(ctx, (u8 *)dst, (u8 *)src);
221
222 nbytes -= bsize;
223 if (nbytes < bsize)
224 break;
225
226 *dst ^= *(src - 1);
227 src -= 1;
228 dst -= 1;
229 }
230
231 done:
232 *dst ^= *(u64 *)walk->iv;
233 *(u64 *)walk->iv = last_iv;
234
235 return nbytes;
236 }
237
238 static int cbc_decrypt(struct skcipher_request *req)
239 {
240 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
241 struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
242 struct skcipher_walk walk;
243 unsigned int nbytes;
244 int err;
245
246 err = skcipher_walk_virt(&walk, req, false);
247
248 while ((nbytes = walk.nbytes)) {
249 nbytes = __cbc_decrypt(ctx, &walk);
250 err = skcipher_walk_done(&walk, nbytes);
251 }
252
253 return err;
254 }
255
256 static void ctr_crypt_final(struct des3_ede_x86_ctx *ctx,
257 struct skcipher_walk *walk)
258 {
259 u8 *ctrblk = walk->iv;
260 u8 keystream[DES3_EDE_BLOCK_SIZE];
261 u8 *src = walk->src.virt.addr;
262 u8 *dst = walk->dst.virt.addr;
263 unsigned int nbytes = walk->nbytes;
264
265 des3_ede_enc_blk(ctx, keystream, ctrblk);
266 crypto_xor_cpy(dst, keystream, src, nbytes);
267
268 crypto_inc(ctrblk, DES3_EDE_BLOCK_SIZE);
269 }
270
271 static unsigned int __ctr_crypt(struct des3_ede_x86_ctx *ctx,
272 struct skcipher_walk *walk)
273 {
274 unsigned int bsize = DES3_EDE_BLOCK_SIZE;
275 unsigned int nbytes = walk->nbytes;
276 __be64 *src = (__be64 *)walk->src.virt.addr;
277 __be64 *dst = (__be64 *)walk->dst.virt.addr;
278 u64 ctrblk = be64_to_cpu(*(__be64 *)walk->iv);
279 __be64 ctrblocks[3];
280
281 /* Process four block batch */
282 if (nbytes >= bsize * 3) {
283 do {
284 /* create ctrblks for parallel encrypt */
285 ctrblocks[0] = cpu_to_be64(ctrblk++);
286 ctrblocks[1] = cpu_to_be64(ctrblk++);
287 ctrblocks[2] = cpu_to_be64(ctrblk++);
288
289 des3_ede_enc_blk_3way(ctx, (u8 *)ctrblocks,
290 (u8 *)ctrblocks);
291
292 dst[0] = src[0] ^ ctrblocks[0];
293 dst[1] = src[1] ^ ctrblocks[1];
294 dst[2] = src[2] ^ ctrblocks[2];
295
296 src += 3;
297 dst += 3;
298 } while ((nbytes -= bsize * 3) >= bsize * 3);
299
300 if (nbytes < bsize)
301 goto done;
302 }
303
304 /* Handle leftovers */
305 do {
306 ctrblocks[0] = cpu_to_be64(ctrblk++);
307
308 des3_ede_enc_blk(ctx, (u8 *)ctrblocks, (u8 *)ctrblocks);
309
310 dst[0] = src[0] ^ ctrblocks[0];
311
312 src += 1;
313 dst += 1;
314 } while ((nbytes -= bsize) >= bsize);
315
316 done:
317 *(__be64 *)walk->iv = cpu_to_be64(ctrblk);
318 return nbytes;
319 }
320
321 static int ctr_crypt(struct skcipher_request *req)
322 {
323 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
324 struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
325 struct skcipher_walk walk;
326 unsigned int nbytes;
327 int err;
328
329 err = skcipher_walk_virt(&walk, req, false);
330
331 while ((nbytes = walk.nbytes) >= DES3_EDE_BLOCK_SIZE) {
332 nbytes = __ctr_crypt(ctx, &walk);
333 err = skcipher_walk_done(&walk, nbytes);
334 }
335
336 if (nbytes) {
337 ctr_crypt_final(ctx, &walk);
338 err = skcipher_walk_done(&walk, 0);
339 }
340
341 return err;
342 }
343
344 static int des3_ede_x86_setkey(struct crypto_tfm *tfm, const u8 *key,
345 unsigned int keylen)
346 {
347 struct des3_ede_x86_ctx *ctx = crypto_tfm_ctx(tfm);
348 u32 i, j, tmp;
349 int err;
350
351 /* Generate encryption context using generic implementation. */
352 err = __des3_ede_setkey(ctx->enc_expkey, &tfm->crt_flags, key, keylen);
353 if (err < 0)
354 return err;
355
356 /* Fix encryption context for this implementation and form decryption
357 * context. */
358 j = DES3_EDE_EXPKEY_WORDS - 2;
359 for (i = 0; i < DES3_EDE_EXPKEY_WORDS; i += 2, j -= 2) {
360 tmp = ror32(ctx->enc_expkey[i + 1], 4);
361 ctx->enc_expkey[i + 1] = tmp;
362
363 ctx->dec_expkey[j + 0] = ctx->enc_expkey[i + 0];
364 ctx->dec_expkey[j + 1] = tmp;
365 }
366
367 return 0;
368 }
369
370 static int des3_ede_x86_setkey_skcipher(struct crypto_skcipher *tfm,
371 const u8 *key,
372 unsigned int keylen)
373 {
374 return des3_ede_x86_setkey(&tfm->base, key, keylen);
375 }
376
377 static struct crypto_alg des3_ede_cipher = {
378 .cra_name = "des3_ede",
379 .cra_driver_name = "des3_ede-asm",
380 .cra_priority = 200,
381 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
382 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
383 .cra_ctxsize = sizeof(struct des3_ede_x86_ctx),
384 .cra_alignmask = 0,
385 .cra_module = THIS_MODULE,
386 .cra_u = {
387 .cipher = {
388 .cia_min_keysize = DES3_EDE_KEY_SIZE,
389 .cia_max_keysize = DES3_EDE_KEY_SIZE,
390 .cia_setkey = des3_ede_x86_setkey,
391 .cia_encrypt = des3_ede_x86_encrypt,
392 .cia_decrypt = des3_ede_x86_decrypt,
393 }
394 }
395 };
396
397 static struct skcipher_alg des3_ede_skciphers[] = {
398 {
399 .base.cra_name = "ecb(des3_ede)",
400 .base.cra_driver_name = "ecb-des3_ede-asm",
401 .base.cra_priority = 300,
402 .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
403 .base.cra_ctxsize = sizeof(struct des3_ede_x86_ctx),
404 .base.cra_module = THIS_MODULE,
405 .min_keysize = DES3_EDE_KEY_SIZE,
406 .max_keysize = DES3_EDE_KEY_SIZE,
407 .setkey = des3_ede_x86_setkey_skcipher,
408 .encrypt = ecb_encrypt,
409 .decrypt = ecb_decrypt,
410 }, {
411 .base.cra_name = "cbc(des3_ede)",
412 .base.cra_driver_name = "cbc-des3_ede-asm",
413 .base.cra_priority = 300,
414 .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
415 .base.cra_ctxsize = sizeof(struct des3_ede_x86_ctx),
416 .base.cra_module = THIS_MODULE,
417 .min_keysize = DES3_EDE_KEY_SIZE,
418 .max_keysize = DES3_EDE_KEY_SIZE,
419 .ivsize = DES3_EDE_BLOCK_SIZE,
420 .setkey = des3_ede_x86_setkey_skcipher,
421 .encrypt = cbc_encrypt,
422 .decrypt = cbc_decrypt,
423 }, {
424 .base.cra_name = "ctr(des3_ede)",
425 .base.cra_driver_name = "ctr-des3_ede-asm",
426 .base.cra_priority = 300,
427 .base.cra_blocksize = 1,
428 .base.cra_ctxsize = sizeof(struct des3_ede_x86_ctx),
429 .base.cra_module = THIS_MODULE,
430 .min_keysize = DES3_EDE_KEY_SIZE,
431 .max_keysize = DES3_EDE_KEY_SIZE,
432 .ivsize = DES3_EDE_BLOCK_SIZE,
433 .chunksize = DES3_EDE_BLOCK_SIZE,
434 .setkey = des3_ede_x86_setkey_skcipher,
435 .encrypt = ctr_crypt,
436 .decrypt = ctr_crypt,
437 }
438 };
439
440 static bool is_blacklisted_cpu(void)
441 {
442 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL)
443 return false;
444
445 if (boot_cpu_data.x86 == 0x0f) {
446 /*
447 * On Pentium 4, des3_ede-x86_64 is slower than generic C
448 * implementation because use of 64bit rotates (which are really
449 * slow on P4). Therefore blacklist P4s.
450 */
451 return true;
452 }
453
454 return false;
455 }
456
457 static int force;
458 module_param(force, int, 0);
459 MODULE_PARM_DESC(force, "Force module load, ignore CPU blacklist");
460
461 static int __init des3_ede_x86_init(void)
462 {
463 int err;
464
465 if (!force && is_blacklisted_cpu()) {
466 pr_info("des3_ede-x86_64: performance on this CPU would be suboptimal: disabling des3_ede-x86_64.\n");
467 return -ENODEV;
468 }
469
470 err = crypto_register_alg(&des3_ede_cipher);
471 if (err)
472 return err;
473
474 err = crypto_register_skciphers(des3_ede_skciphers,
475 ARRAY_SIZE(des3_ede_skciphers));
476 if (err)
477 crypto_unregister_alg(&des3_ede_cipher);
478
479 return err;
480 }
481
482 static void __exit des3_ede_x86_fini(void)
483 {
484 crypto_unregister_alg(&des3_ede_cipher);
485 crypto_unregister_skciphers(des3_ede_skciphers,
486 ARRAY_SIZE(des3_ede_skciphers));
487 }
488
489 module_init(des3_ede_x86_init);
490 module_exit(des3_ede_x86_fini);
491
492 MODULE_LICENSE("GPL");
493 MODULE_DESCRIPTION("Triple DES EDE Cipher Algorithm, asm optimized");
494 MODULE_ALIAS_CRYPTO("des3_ede");
495 MODULE_ALIAS_CRYPTO("des3_ede-asm");
496 MODULE_AUTHOR("Jussi Kivilinna <jussi.kivilinna@iki.fi>");