]> git.ipfire.org Git - thirdparty/kernel/stable.git/blob - crypto/testmgr.c
crypto: ccp - no need to check return value of debugfs_create functions
[thirdparty/kernel/stable.git] / crypto / testmgr.c
1 /*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 *
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
20 *
21 */
22
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <crypto/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/fips.h>
28 #include <linux/module.h>
29 #include <linux/scatterlist.h>
30 #include <linux/slab.h>
31 #include <linux/string.h>
32 #include <crypto/rng.h>
33 #include <crypto/drbg.h>
34 #include <crypto/akcipher.h>
35 #include <crypto/kpp.h>
36 #include <crypto/acompress.h>
37
38 #include "internal.h"
39
40 static bool notests;
41 module_param(notests, bool, 0644);
42 MODULE_PARM_DESC(notests, "disable crypto self-tests");
43
44 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
45
46 /* a perfect nop */
47 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
48 {
49 return 0;
50 }
51
52 #else
53
54 #include "testmgr.h"
55
56 /*
57 * Need slab memory for testing (size in number of pages).
58 */
59 #define XBUFSIZE 8
60
61 /*
62 * Indexes into the xbuf to simulate cross-page access.
63 */
64 #define IDX1 32
65 #define IDX2 32400
66 #define IDX3 1511
67 #define IDX4 8193
68 #define IDX5 22222
69 #define IDX6 17101
70 #define IDX7 27333
71 #define IDX8 3000
72
73 /*
74 * Used by test_cipher()
75 */
76 #define ENCRYPT 1
77 #define DECRYPT 0
78
79 struct aead_test_suite {
80 const struct aead_testvec *vecs;
81 unsigned int count;
82 };
83
84 struct cipher_test_suite {
85 const struct cipher_testvec *vecs;
86 unsigned int count;
87 };
88
89 struct comp_test_suite {
90 struct {
91 const struct comp_testvec *vecs;
92 unsigned int count;
93 } comp, decomp;
94 };
95
96 struct hash_test_suite {
97 const struct hash_testvec *vecs;
98 unsigned int count;
99 };
100
101 struct cprng_test_suite {
102 const struct cprng_testvec *vecs;
103 unsigned int count;
104 };
105
106 struct drbg_test_suite {
107 const struct drbg_testvec *vecs;
108 unsigned int count;
109 };
110
111 struct akcipher_test_suite {
112 const struct akcipher_testvec *vecs;
113 unsigned int count;
114 };
115
116 struct kpp_test_suite {
117 const struct kpp_testvec *vecs;
118 unsigned int count;
119 };
120
121 struct alg_test_desc {
122 const char *alg;
123 int (*test)(const struct alg_test_desc *desc, const char *driver,
124 u32 type, u32 mask);
125 int fips_allowed; /* set if alg is allowed in fips mode */
126
127 union {
128 struct aead_test_suite aead;
129 struct cipher_test_suite cipher;
130 struct comp_test_suite comp;
131 struct hash_test_suite hash;
132 struct cprng_test_suite cprng;
133 struct drbg_test_suite drbg;
134 struct akcipher_test_suite akcipher;
135 struct kpp_test_suite kpp;
136 } suite;
137 };
138
139 static const unsigned int IDX[8] = {
140 IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
141
142 static void hexdump(unsigned char *buf, unsigned int len)
143 {
144 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
145 16, 1,
146 buf, len, false);
147 }
148
149 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
150 {
151 int i;
152
153 for (i = 0; i < XBUFSIZE; i++) {
154 buf[i] = (void *)__get_free_page(GFP_KERNEL);
155 if (!buf[i])
156 goto err_free_buf;
157 }
158
159 return 0;
160
161 err_free_buf:
162 while (i-- > 0)
163 free_page((unsigned long)buf[i]);
164
165 return -ENOMEM;
166 }
167
168 static void testmgr_free_buf(char *buf[XBUFSIZE])
169 {
170 int i;
171
172 for (i = 0; i < XBUFSIZE; i++)
173 free_page((unsigned long)buf[i]);
174 }
175
176 static int ahash_guard_result(char *result, char c, int size)
177 {
178 int i;
179
180 for (i = 0; i < size; i++) {
181 if (result[i] != c)
182 return -EINVAL;
183 }
184
185 return 0;
186 }
187
188 static int ahash_partial_update(struct ahash_request **preq,
189 struct crypto_ahash *tfm, const struct hash_testvec *template,
190 void *hash_buff, int k, int temp, struct scatterlist *sg,
191 const char *algo, char *result, struct crypto_wait *wait)
192 {
193 char *state;
194 struct ahash_request *req;
195 int statesize, ret = -EINVAL;
196 static const unsigned char guard[] = { 0x00, 0xba, 0xad, 0x00 };
197 int digestsize = crypto_ahash_digestsize(tfm);
198
199 req = *preq;
200 statesize = crypto_ahash_statesize(
201 crypto_ahash_reqtfm(req));
202 state = kmalloc(statesize + sizeof(guard), GFP_KERNEL);
203 if (!state) {
204 pr_err("alg: hash: Failed to alloc state for %s\n", algo);
205 goto out_nostate;
206 }
207 memcpy(state + statesize, guard, sizeof(guard));
208 memset(result, 1, digestsize);
209 ret = crypto_ahash_export(req, state);
210 WARN_ON(memcmp(state + statesize, guard, sizeof(guard)));
211 if (ret) {
212 pr_err("alg: hash: Failed to export() for %s\n", algo);
213 goto out;
214 }
215 ret = ahash_guard_result(result, 1, digestsize);
216 if (ret) {
217 pr_err("alg: hash: Failed, export used req->result for %s\n",
218 algo);
219 goto out;
220 }
221 ahash_request_free(req);
222 req = ahash_request_alloc(tfm, GFP_KERNEL);
223 if (!req) {
224 pr_err("alg: hash: Failed to alloc request for %s\n", algo);
225 goto out_noreq;
226 }
227 ahash_request_set_callback(req,
228 CRYPTO_TFM_REQ_MAY_BACKLOG,
229 crypto_req_done, wait);
230
231 memcpy(hash_buff, template->plaintext + temp,
232 template->tap[k]);
233 sg_init_one(&sg[0], hash_buff, template->tap[k]);
234 ahash_request_set_crypt(req, sg, result, template->tap[k]);
235 ret = crypto_ahash_import(req, state);
236 if (ret) {
237 pr_err("alg: hash: Failed to import() for %s\n", algo);
238 goto out;
239 }
240 ret = ahash_guard_result(result, 1, digestsize);
241 if (ret) {
242 pr_err("alg: hash: Failed, import used req->result for %s\n",
243 algo);
244 goto out;
245 }
246 ret = crypto_wait_req(crypto_ahash_update(req), wait);
247 if (ret)
248 goto out;
249 *preq = req;
250 ret = 0;
251 goto out_noreq;
252 out:
253 ahash_request_free(req);
254 out_noreq:
255 kfree(state);
256 out_nostate:
257 return ret;
258 }
259
260 enum hash_test {
261 HASH_TEST_DIGEST,
262 HASH_TEST_FINAL,
263 HASH_TEST_FINUP
264 };
265
266 static int __test_hash(struct crypto_ahash *tfm,
267 const struct hash_testvec *template, unsigned int tcount,
268 enum hash_test test_type, const int align_offset)
269 {
270 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
271 size_t digest_size = crypto_ahash_digestsize(tfm);
272 unsigned int i, j, k, temp;
273 struct scatterlist sg[8];
274 char *result;
275 char *key;
276 struct ahash_request *req;
277 struct crypto_wait wait;
278 void *hash_buff;
279 char *xbuf[XBUFSIZE];
280 int ret = -ENOMEM;
281
282 result = kmalloc(digest_size, GFP_KERNEL);
283 if (!result)
284 return ret;
285 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
286 if (!key)
287 goto out_nobuf;
288 if (testmgr_alloc_buf(xbuf))
289 goto out_nobuf;
290
291 crypto_init_wait(&wait);
292
293 req = ahash_request_alloc(tfm, GFP_KERNEL);
294 if (!req) {
295 printk(KERN_ERR "alg: hash: Failed to allocate request for "
296 "%s\n", algo);
297 goto out_noreq;
298 }
299 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
300 crypto_req_done, &wait);
301
302 j = 0;
303 for (i = 0; i < tcount; i++) {
304 if (template[i].np)
305 continue;
306
307 ret = -EINVAL;
308 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
309 goto out;
310
311 j++;
312 memset(result, 0, digest_size);
313
314 hash_buff = xbuf[0];
315 hash_buff += align_offset;
316
317 memcpy(hash_buff, template[i].plaintext, template[i].psize);
318 sg_init_one(&sg[0], hash_buff, template[i].psize);
319
320 if (template[i].ksize) {
321 crypto_ahash_clear_flags(tfm, ~0);
322 if (template[i].ksize > MAX_KEYLEN) {
323 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
324 j, algo, template[i].ksize, MAX_KEYLEN);
325 ret = -EINVAL;
326 goto out;
327 }
328 memcpy(key, template[i].key, template[i].ksize);
329 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
330 if (ret) {
331 printk(KERN_ERR "alg: hash: setkey failed on "
332 "test %d for %s: ret=%d\n", j, algo,
333 -ret);
334 goto out;
335 }
336 }
337
338 ahash_request_set_crypt(req, sg, result, template[i].psize);
339 switch (test_type) {
340 case HASH_TEST_DIGEST:
341 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
342 if (ret) {
343 pr_err("alg: hash: digest failed on test %d "
344 "for %s: ret=%d\n", j, algo, -ret);
345 goto out;
346 }
347 break;
348
349 case HASH_TEST_FINAL:
350 memset(result, 1, digest_size);
351 ret = crypto_wait_req(crypto_ahash_init(req), &wait);
352 if (ret) {
353 pr_err("alg: hash: init failed on test %d "
354 "for %s: ret=%d\n", j, algo, -ret);
355 goto out;
356 }
357 ret = ahash_guard_result(result, 1, digest_size);
358 if (ret) {
359 pr_err("alg: hash: init failed on test %d "
360 "for %s: used req->result\n", j, algo);
361 goto out;
362 }
363 ret = crypto_wait_req(crypto_ahash_update(req), &wait);
364 if (ret) {
365 pr_err("alg: hash: update failed on test %d "
366 "for %s: ret=%d\n", j, algo, -ret);
367 goto out;
368 }
369 ret = ahash_guard_result(result, 1, digest_size);
370 if (ret) {
371 pr_err("alg: hash: update failed on test %d "
372 "for %s: used req->result\n", j, algo);
373 goto out;
374 }
375 ret = crypto_wait_req(crypto_ahash_final(req), &wait);
376 if (ret) {
377 pr_err("alg: hash: final failed on test %d "
378 "for %s: ret=%d\n", j, algo, -ret);
379 goto out;
380 }
381 break;
382
383 case HASH_TEST_FINUP:
384 memset(result, 1, digest_size);
385 ret = crypto_wait_req(crypto_ahash_init(req), &wait);
386 if (ret) {
387 pr_err("alg: hash: init failed on test %d "
388 "for %s: ret=%d\n", j, algo, -ret);
389 goto out;
390 }
391 ret = ahash_guard_result(result, 1, digest_size);
392 if (ret) {
393 pr_err("alg: hash: init failed on test %d "
394 "for %s: used req->result\n", j, algo);
395 goto out;
396 }
397 ret = crypto_wait_req(crypto_ahash_finup(req), &wait);
398 if (ret) {
399 pr_err("alg: hash: final failed on test %d "
400 "for %s: ret=%d\n", j, algo, -ret);
401 goto out;
402 }
403 break;
404 }
405
406 if (memcmp(result, template[i].digest,
407 crypto_ahash_digestsize(tfm))) {
408 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
409 j, algo);
410 hexdump(result, crypto_ahash_digestsize(tfm));
411 ret = -EINVAL;
412 goto out;
413 }
414 }
415
416 if (test_type)
417 goto out;
418
419 j = 0;
420 for (i = 0; i < tcount; i++) {
421 /* alignment tests are only done with continuous buffers */
422 if (align_offset != 0)
423 break;
424
425 if (!template[i].np)
426 continue;
427
428 j++;
429 memset(result, 0, digest_size);
430
431 temp = 0;
432 sg_init_table(sg, template[i].np);
433 ret = -EINVAL;
434 for (k = 0; k < template[i].np; k++) {
435 if (WARN_ON(offset_in_page(IDX[k]) +
436 template[i].tap[k] > PAGE_SIZE))
437 goto out;
438 sg_set_buf(&sg[k],
439 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
440 offset_in_page(IDX[k]),
441 template[i].plaintext + temp,
442 template[i].tap[k]),
443 template[i].tap[k]);
444 temp += template[i].tap[k];
445 }
446
447 if (template[i].ksize) {
448 if (template[i].ksize > MAX_KEYLEN) {
449 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
450 j, algo, template[i].ksize, MAX_KEYLEN);
451 ret = -EINVAL;
452 goto out;
453 }
454 crypto_ahash_clear_flags(tfm, ~0);
455 memcpy(key, template[i].key, template[i].ksize);
456 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
457
458 if (ret) {
459 printk(KERN_ERR "alg: hash: setkey "
460 "failed on chunking test %d "
461 "for %s: ret=%d\n", j, algo, -ret);
462 goto out;
463 }
464 }
465
466 ahash_request_set_crypt(req, sg, result, template[i].psize);
467 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
468 if (ret) {
469 pr_err("alg: hash: digest failed on chunking test %d for %s: ret=%d\n",
470 j, algo, -ret);
471 goto out;
472 }
473
474 if (memcmp(result, template[i].digest,
475 crypto_ahash_digestsize(tfm))) {
476 printk(KERN_ERR "alg: hash: Chunking test %d "
477 "failed for %s\n", j, algo);
478 hexdump(result, crypto_ahash_digestsize(tfm));
479 ret = -EINVAL;
480 goto out;
481 }
482 }
483
484 /* partial update exercise */
485 j = 0;
486 for (i = 0; i < tcount; i++) {
487 /* alignment tests are only done with continuous buffers */
488 if (align_offset != 0)
489 break;
490
491 if (template[i].np < 2)
492 continue;
493
494 j++;
495 memset(result, 0, digest_size);
496
497 ret = -EINVAL;
498 hash_buff = xbuf[0];
499 memcpy(hash_buff, template[i].plaintext,
500 template[i].tap[0]);
501 sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
502
503 if (template[i].ksize) {
504 crypto_ahash_clear_flags(tfm, ~0);
505 if (template[i].ksize > MAX_KEYLEN) {
506 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
507 j, algo, template[i].ksize, MAX_KEYLEN);
508 ret = -EINVAL;
509 goto out;
510 }
511 memcpy(key, template[i].key, template[i].ksize);
512 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
513 if (ret) {
514 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
515 j, algo, -ret);
516 goto out;
517 }
518 }
519
520 ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
521 ret = crypto_wait_req(crypto_ahash_init(req), &wait);
522 if (ret) {
523 pr_err("alg: hash: init failed on test %d for %s: ret=%d\n",
524 j, algo, -ret);
525 goto out;
526 }
527 ret = crypto_wait_req(crypto_ahash_update(req), &wait);
528 if (ret) {
529 pr_err("alg: hash: update failed on test %d for %s: ret=%d\n",
530 j, algo, -ret);
531 goto out;
532 }
533
534 temp = template[i].tap[0];
535 for (k = 1; k < template[i].np; k++) {
536 ret = ahash_partial_update(&req, tfm, &template[i],
537 hash_buff, k, temp, &sg[0], algo, result,
538 &wait);
539 if (ret) {
540 pr_err("alg: hash: partial update failed on test %d for %s: ret=%d\n",
541 j, algo, -ret);
542 goto out_noreq;
543 }
544 temp += template[i].tap[k];
545 }
546 ret = crypto_wait_req(crypto_ahash_final(req), &wait);
547 if (ret) {
548 pr_err("alg: hash: final failed on test %d for %s: ret=%d\n",
549 j, algo, -ret);
550 goto out;
551 }
552 if (memcmp(result, template[i].digest,
553 crypto_ahash_digestsize(tfm))) {
554 pr_err("alg: hash: Partial Test %d failed for %s\n",
555 j, algo);
556 hexdump(result, crypto_ahash_digestsize(tfm));
557 ret = -EINVAL;
558 goto out;
559 }
560 }
561
562 ret = 0;
563
564 out:
565 ahash_request_free(req);
566 out_noreq:
567 testmgr_free_buf(xbuf);
568 out_nobuf:
569 kfree(key);
570 kfree(result);
571 return ret;
572 }
573
574 static int test_hash(struct crypto_ahash *tfm,
575 const struct hash_testvec *template,
576 unsigned int tcount, enum hash_test test_type)
577 {
578 unsigned int alignmask;
579 int ret;
580
581 ret = __test_hash(tfm, template, tcount, test_type, 0);
582 if (ret)
583 return ret;
584
585 /* test unaligned buffers, check with one byte offset */
586 ret = __test_hash(tfm, template, tcount, test_type, 1);
587 if (ret)
588 return ret;
589
590 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
591 if (alignmask) {
592 /* Check if alignment mask for tfm is correctly set. */
593 ret = __test_hash(tfm, template, tcount, test_type,
594 alignmask + 1);
595 if (ret)
596 return ret;
597 }
598
599 return 0;
600 }
601
602 static int __test_aead(struct crypto_aead *tfm, int enc,
603 const struct aead_testvec *template, unsigned int tcount,
604 const bool diff_dst, const int align_offset)
605 {
606 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
607 unsigned int i, j, k, n, temp;
608 int ret = -ENOMEM;
609 char *q;
610 char *key;
611 struct aead_request *req;
612 struct scatterlist *sg;
613 struct scatterlist *sgout;
614 const char *e, *d;
615 struct crypto_wait wait;
616 unsigned int authsize, iv_len;
617 char *iv;
618 char *xbuf[XBUFSIZE];
619 char *xoutbuf[XBUFSIZE];
620 char *axbuf[XBUFSIZE];
621
622 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
623 if (!iv)
624 return ret;
625 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
626 if (!key)
627 goto out_noxbuf;
628 if (testmgr_alloc_buf(xbuf))
629 goto out_noxbuf;
630 if (testmgr_alloc_buf(axbuf))
631 goto out_noaxbuf;
632 if (diff_dst && testmgr_alloc_buf(xoutbuf))
633 goto out_nooutbuf;
634
635 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
636 sg = kmalloc(array3_size(sizeof(*sg), 8, (diff_dst ? 4 : 2)),
637 GFP_KERNEL);
638 if (!sg)
639 goto out_nosg;
640 sgout = &sg[16];
641
642 if (diff_dst)
643 d = "-ddst";
644 else
645 d = "";
646
647 if (enc == ENCRYPT)
648 e = "encryption";
649 else
650 e = "decryption";
651
652 crypto_init_wait(&wait);
653
654 req = aead_request_alloc(tfm, GFP_KERNEL);
655 if (!req) {
656 pr_err("alg: aead%s: Failed to allocate request for %s\n",
657 d, algo);
658 goto out;
659 }
660
661 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
662 crypto_req_done, &wait);
663
664 iv_len = crypto_aead_ivsize(tfm);
665
666 for (i = 0, j = 0; i < tcount; i++) {
667 const char *input, *expected_output;
668 unsigned int inlen, outlen;
669 char *inbuf, *outbuf, *assocbuf;
670
671 if (template[i].np)
672 continue;
673 if (enc) {
674 if (template[i].novrfy)
675 continue;
676 input = template[i].ptext;
677 inlen = template[i].plen;
678 expected_output = template[i].ctext;
679 outlen = template[i].clen;
680 } else {
681 input = template[i].ctext;
682 inlen = template[i].clen;
683 expected_output = template[i].ptext;
684 outlen = template[i].plen;
685 }
686
687 j++;
688
689 /* some templates have no input data but they will
690 * touch input
691 */
692 inbuf = xbuf[0] + align_offset;
693 assocbuf = axbuf[0];
694
695 ret = -EINVAL;
696 if (WARN_ON(align_offset + template[i].clen > PAGE_SIZE ||
697 template[i].alen > PAGE_SIZE))
698 goto out;
699
700 memcpy(inbuf, input, inlen);
701 memcpy(assocbuf, template[i].assoc, template[i].alen);
702 if (template[i].iv)
703 memcpy(iv, template[i].iv, iv_len);
704 else
705 memset(iv, 0, iv_len);
706
707 crypto_aead_clear_flags(tfm, ~0);
708 if (template[i].wk)
709 crypto_aead_set_flags(tfm,
710 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
711
712 if (template[i].klen > MAX_KEYLEN) {
713 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
714 d, j, algo, template[i].klen,
715 MAX_KEYLEN);
716 ret = -EINVAL;
717 goto out;
718 }
719 memcpy(key, template[i].key, template[i].klen);
720
721 ret = crypto_aead_setkey(tfm, key, template[i].klen);
722 if (template[i].fail == !ret) {
723 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
724 d, j, algo, crypto_aead_get_flags(tfm));
725 goto out;
726 } else if (ret)
727 continue;
728
729 authsize = template[i].clen - template[i].plen;
730 ret = crypto_aead_setauthsize(tfm, authsize);
731 if (ret) {
732 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
733 d, authsize, j, algo);
734 goto out;
735 }
736
737 k = !!template[i].alen;
738 sg_init_table(sg, k + 1);
739 sg_set_buf(&sg[0], assocbuf, template[i].alen);
740 sg_set_buf(&sg[k], inbuf, template[i].clen);
741 outbuf = inbuf;
742
743 if (diff_dst) {
744 sg_init_table(sgout, k + 1);
745 sg_set_buf(&sgout[0], assocbuf, template[i].alen);
746
747 outbuf = xoutbuf[0] + align_offset;
748 sg_set_buf(&sgout[k], outbuf, template[i].clen);
749 }
750
751 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, inlen,
752 iv);
753
754 aead_request_set_ad(req, template[i].alen);
755
756 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
757 : crypto_aead_decrypt(req), &wait);
758
759 switch (ret) {
760 case 0:
761 if (template[i].novrfy) {
762 /* verification was supposed to fail */
763 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
764 d, e, j, algo);
765 /* so really, we got a bad message */
766 ret = -EBADMSG;
767 goto out;
768 }
769 break;
770 case -EBADMSG:
771 if (template[i].novrfy)
772 /* verification failure was expected */
773 continue;
774 /* fall through */
775 default:
776 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
777 d, e, j, algo, -ret);
778 goto out;
779 }
780
781 if (memcmp(outbuf, expected_output, outlen)) {
782 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
783 d, j, e, algo);
784 hexdump(outbuf, outlen);
785 ret = -EINVAL;
786 goto out;
787 }
788 }
789
790 for (i = 0, j = 0; i < tcount; i++) {
791 const char *input, *expected_output;
792 unsigned int inlen, outlen;
793
794 /* alignment tests are only done with continuous buffers */
795 if (align_offset != 0)
796 break;
797
798 if (!template[i].np)
799 continue;
800
801 if (enc) {
802 if (template[i].novrfy)
803 continue;
804 input = template[i].ptext;
805 inlen = template[i].plen;
806 expected_output = template[i].ctext;
807 outlen = template[i].clen;
808 } else {
809 input = template[i].ctext;
810 inlen = template[i].clen;
811 expected_output = template[i].ptext;
812 outlen = template[i].plen;
813 }
814
815 j++;
816
817 if (template[i].iv)
818 memcpy(iv, template[i].iv, iv_len);
819 else
820 memset(iv, 0, MAX_IVLEN);
821
822 crypto_aead_clear_flags(tfm, ~0);
823 if (template[i].wk)
824 crypto_aead_set_flags(tfm,
825 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
826 if (template[i].klen > MAX_KEYLEN) {
827 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
828 d, j, algo, template[i].klen, MAX_KEYLEN);
829 ret = -EINVAL;
830 goto out;
831 }
832 memcpy(key, template[i].key, template[i].klen);
833
834 ret = crypto_aead_setkey(tfm, key, template[i].klen);
835 if (template[i].fail == !ret) {
836 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
837 d, j, algo, crypto_aead_get_flags(tfm));
838 goto out;
839 } else if (ret)
840 continue;
841
842 authsize = template[i].clen - template[i].plen;
843
844 ret = -EINVAL;
845 sg_init_table(sg, template[i].anp + template[i].np);
846 if (diff_dst)
847 sg_init_table(sgout, template[i].anp + template[i].np);
848
849 ret = -EINVAL;
850 for (k = 0, temp = 0; k < template[i].anp; k++) {
851 if (WARN_ON(offset_in_page(IDX[k]) +
852 template[i].atap[k] > PAGE_SIZE))
853 goto out;
854 sg_set_buf(&sg[k],
855 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
856 offset_in_page(IDX[k]),
857 template[i].assoc + temp,
858 template[i].atap[k]),
859 template[i].atap[k]);
860 if (diff_dst)
861 sg_set_buf(&sgout[k],
862 axbuf[IDX[k] >> PAGE_SHIFT] +
863 offset_in_page(IDX[k]),
864 template[i].atap[k]);
865 temp += template[i].atap[k];
866 }
867
868 for (k = 0, temp = 0; k < template[i].np; k++) {
869 n = template[i].tap[k];
870 if (k == template[i].np - 1 && !enc)
871 n += authsize;
872
873 if (WARN_ON(offset_in_page(IDX[k]) + n > PAGE_SIZE))
874 goto out;
875
876 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
877 memcpy(q, input + temp, n);
878 sg_set_buf(&sg[template[i].anp + k], q, n);
879
880 if (diff_dst) {
881 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
882 offset_in_page(IDX[k]);
883
884 memset(q, 0, n);
885
886 sg_set_buf(&sgout[template[i].anp + k], q, n);
887 }
888
889 if (k == template[i].np - 1 && enc)
890 n += authsize;
891 if (offset_in_page(q) + n < PAGE_SIZE)
892 q[n] = 0;
893
894 temp += n;
895 }
896
897 ret = crypto_aead_setauthsize(tfm, authsize);
898 if (ret) {
899 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
900 d, authsize, j, algo);
901 goto out;
902 }
903
904 if (enc) {
905 if (WARN_ON(sg[template[i].anp + k - 1].offset +
906 sg[template[i].anp + k - 1].length +
907 authsize > PAGE_SIZE)) {
908 ret = -EINVAL;
909 goto out;
910 }
911
912 if (diff_dst)
913 sgout[template[i].anp + k - 1].length +=
914 authsize;
915 sg[template[i].anp + k - 1].length += authsize;
916 }
917
918 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
919 inlen, iv);
920
921 aead_request_set_ad(req, template[i].alen);
922
923 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
924 : crypto_aead_decrypt(req), &wait);
925
926 switch (ret) {
927 case 0:
928 if (template[i].novrfy) {
929 /* verification was supposed to fail */
930 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
931 d, e, j, algo);
932 /* so really, we got a bad message */
933 ret = -EBADMSG;
934 goto out;
935 }
936 break;
937 case -EBADMSG:
938 if (template[i].novrfy)
939 /* verification failure was expected */
940 continue;
941 /* fall through */
942 default:
943 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
944 d, e, j, algo, -ret);
945 goto out;
946 }
947
948 ret = -EINVAL;
949 for (k = 0, temp = 0; k < template[i].np; k++) {
950 if (diff_dst)
951 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
952 offset_in_page(IDX[k]);
953 else
954 q = xbuf[IDX[k] >> PAGE_SHIFT] +
955 offset_in_page(IDX[k]);
956
957 n = template[i].tap[k];
958 if (k == template[i].np - 1 && enc)
959 n += authsize;
960
961 if (memcmp(q, expected_output + temp, n)) {
962 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
963 d, j, e, k, algo);
964 hexdump(q, n);
965 goto out;
966 }
967
968 q += n;
969 if (k == template[i].np - 1 && !enc) {
970 if (!diff_dst && memcmp(q, input + temp + n,
971 authsize))
972 n = authsize;
973 else
974 n = 0;
975 } else {
976 for (n = 0; offset_in_page(q + n) && q[n]; n++)
977 ;
978 }
979 if (n) {
980 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
981 d, j, e, k, algo, n);
982 hexdump(q, n);
983 goto out;
984 }
985
986 temp += template[i].tap[k];
987 }
988 }
989
990 ret = 0;
991
992 out:
993 aead_request_free(req);
994 kfree(sg);
995 out_nosg:
996 if (diff_dst)
997 testmgr_free_buf(xoutbuf);
998 out_nooutbuf:
999 testmgr_free_buf(axbuf);
1000 out_noaxbuf:
1001 testmgr_free_buf(xbuf);
1002 out_noxbuf:
1003 kfree(key);
1004 kfree(iv);
1005 return ret;
1006 }
1007
1008 static int test_aead(struct crypto_aead *tfm, int enc,
1009 const struct aead_testvec *template, unsigned int tcount)
1010 {
1011 unsigned int alignmask;
1012 int ret;
1013
1014 /* test 'dst == src' case */
1015 ret = __test_aead(tfm, enc, template, tcount, false, 0);
1016 if (ret)
1017 return ret;
1018
1019 /* test 'dst != src' case */
1020 ret = __test_aead(tfm, enc, template, tcount, true, 0);
1021 if (ret)
1022 return ret;
1023
1024 /* test unaligned buffers, check with one byte offset */
1025 ret = __test_aead(tfm, enc, template, tcount, true, 1);
1026 if (ret)
1027 return ret;
1028
1029 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1030 if (alignmask) {
1031 /* Check if alignment mask for tfm is correctly set. */
1032 ret = __test_aead(tfm, enc, template, tcount, true,
1033 alignmask + 1);
1034 if (ret)
1035 return ret;
1036 }
1037
1038 return 0;
1039 }
1040
1041 static int test_cipher(struct crypto_cipher *tfm, int enc,
1042 const struct cipher_testvec *template,
1043 unsigned int tcount)
1044 {
1045 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
1046 unsigned int i, j, k;
1047 char *q;
1048 const char *e;
1049 const char *input, *result;
1050 void *data;
1051 char *xbuf[XBUFSIZE];
1052 int ret = -ENOMEM;
1053
1054 if (testmgr_alloc_buf(xbuf))
1055 goto out_nobuf;
1056
1057 if (enc == ENCRYPT)
1058 e = "encryption";
1059 else
1060 e = "decryption";
1061
1062 j = 0;
1063 for (i = 0; i < tcount; i++) {
1064 if (template[i].np)
1065 continue;
1066
1067 if (fips_enabled && template[i].fips_skip)
1068 continue;
1069
1070 input = enc ? template[i].ptext : template[i].ctext;
1071 result = enc ? template[i].ctext : template[i].ptext;
1072 j++;
1073
1074 ret = -EINVAL;
1075 if (WARN_ON(template[i].len > PAGE_SIZE))
1076 goto out;
1077
1078 data = xbuf[0];
1079 memcpy(data, input, template[i].len);
1080
1081 crypto_cipher_clear_flags(tfm, ~0);
1082 if (template[i].wk)
1083 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1084
1085 ret = crypto_cipher_setkey(tfm, template[i].key,
1086 template[i].klen);
1087 if (template[i].fail == !ret) {
1088 printk(KERN_ERR "alg: cipher: setkey failed "
1089 "on test %d for %s: flags=%x\n", j,
1090 algo, crypto_cipher_get_flags(tfm));
1091 goto out;
1092 } else if (ret)
1093 continue;
1094
1095 for (k = 0; k < template[i].len;
1096 k += crypto_cipher_blocksize(tfm)) {
1097 if (enc)
1098 crypto_cipher_encrypt_one(tfm, data + k,
1099 data + k);
1100 else
1101 crypto_cipher_decrypt_one(tfm, data + k,
1102 data + k);
1103 }
1104
1105 q = data;
1106 if (memcmp(q, result, template[i].len)) {
1107 printk(KERN_ERR "alg: cipher: Test %d failed "
1108 "on %s for %s\n", j, e, algo);
1109 hexdump(q, template[i].len);
1110 ret = -EINVAL;
1111 goto out;
1112 }
1113 }
1114
1115 ret = 0;
1116
1117 out:
1118 testmgr_free_buf(xbuf);
1119 out_nobuf:
1120 return ret;
1121 }
1122
1123 static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1124 const struct cipher_testvec *template,
1125 unsigned int tcount,
1126 const bool diff_dst, const int align_offset)
1127 {
1128 const char *algo =
1129 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1130 unsigned int i, j, k, n, temp;
1131 char *q;
1132 struct skcipher_request *req;
1133 struct scatterlist sg[8];
1134 struct scatterlist sgout[8];
1135 const char *e, *d;
1136 struct crypto_wait wait;
1137 const char *input, *result;
1138 void *data;
1139 char iv[MAX_IVLEN];
1140 char *xbuf[XBUFSIZE];
1141 char *xoutbuf[XBUFSIZE];
1142 int ret = -ENOMEM;
1143 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1144
1145 if (testmgr_alloc_buf(xbuf))
1146 goto out_nobuf;
1147
1148 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1149 goto out_nooutbuf;
1150
1151 if (diff_dst)
1152 d = "-ddst";
1153 else
1154 d = "";
1155
1156 if (enc == ENCRYPT)
1157 e = "encryption";
1158 else
1159 e = "decryption";
1160
1161 crypto_init_wait(&wait);
1162
1163 req = skcipher_request_alloc(tfm, GFP_KERNEL);
1164 if (!req) {
1165 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1166 d, algo);
1167 goto out;
1168 }
1169
1170 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1171 crypto_req_done, &wait);
1172
1173 j = 0;
1174 for (i = 0; i < tcount; i++) {
1175 if (template[i].np && !template[i].also_non_np)
1176 continue;
1177
1178 if (fips_enabled && template[i].fips_skip)
1179 continue;
1180
1181 if (template[i].iv && !(template[i].generates_iv && enc))
1182 memcpy(iv, template[i].iv, ivsize);
1183 else
1184 memset(iv, 0, MAX_IVLEN);
1185
1186 input = enc ? template[i].ptext : template[i].ctext;
1187 result = enc ? template[i].ctext : template[i].ptext;
1188 j++;
1189 ret = -EINVAL;
1190 if (WARN_ON(align_offset + template[i].len > PAGE_SIZE))
1191 goto out;
1192
1193 data = xbuf[0];
1194 data += align_offset;
1195 memcpy(data, input, template[i].len);
1196
1197 crypto_skcipher_clear_flags(tfm, ~0);
1198 if (template[i].wk)
1199 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1200
1201 ret = crypto_skcipher_setkey(tfm, template[i].key,
1202 template[i].klen);
1203 if (template[i].fail == !ret) {
1204 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1205 d, j, algo, crypto_skcipher_get_flags(tfm));
1206 goto out;
1207 } else if (ret)
1208 continue;
1209
1210 sg_init_one(&sg[0], data, template[i].len);
1211 if (diff_dst) {
1212 data = xoutbuf[0];
1213 data += align_offset;
1214 sg_init_one(&sgout[0], data, template[i].len);
1215 }
1216
1217 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1218 template[i].len, iv);
1219 ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1220 crypto_skcipher_decrypt(req), &wait);
1221
1222 if (ret) {
1223 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1224 d, e, j, algo, -ret);
1225 goto out;
1226 }
1227
1228 q = data;
1229 if (memcmp(q, result, template[i].len)) {
1230 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1231 d, j, e, algo);
1232 hexdump(q, template[i].len);
1233 ret = -EINVAL;
1234 goto out;
1235 }
1236
1237 if (template[i].generates_iv && enc &&
1238 memcmp(iv, template[i].iv, crypto_skcipher_ivsize(tfm))) {
1239 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1240 d, j, e, algo);
1241 hexdump(iv, crypto_skcipher_ivsize(tfm));
1242 ret = -EINVAL;
1243 goto out;
1244 }
1245 }
1246
1247 j = 0;
1248 for (i = 0; i < tcount; i++) {
1249 /* alignment tests are only done with continuous buffers */
1250 if (align_offset != 0)
1251 break;
1252
1253 if (!template[i].np)
1254 continue;
1255
1256 if (fips_enabled && template[i].fips_skip)
1257 continue;
1258
1259 if (template[i].iv && !(template[i].generates_iv && enc))
1260 memcpy(iv, template[i].iv, ivsize);
1261 else
1262 memset(iv, 0, MAX_IVLEN);
1263
1264 input = enc ? template[i].ptext : template[i].ctext;
1265 result = enc ? template[i].ctext : template[i].ptext;
1266 j++;
1267 crypto_skcipher_clear_flags(tfm, ~0);
1268 if (template[i].wk)
1269 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1270
1271 ret = crypto_skcipher_setkey(tfm, template[i].key,
1272 template[i].klen);
1273 if (template[i].fail == !ret) {
1274 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1275 d, j, algo, crypto_skcipher_get_flags(tfm));
1276 goto out;
1277 } else if (ret)
1278 continue;
1279
1280 temp = 0;
1281 ret = -EINVAL;
1282 sg_init_table(sg, template[i].np);
1283 if (diff_dst)
1284 sg_init_table(sgout, template[i].np);
1285 for (k = 0; k < template[i].np; k++) {
1286 if (WARN_ON(offset_in_page(IDX[k]) +
1287 template[i].tap[k] > PAGE_SIZE))
1288 goto out;
1289
1290 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1291
1292 memcpy(q, input + temp, template[i].tap[k]);
1293
1294 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1295 q[template[i].tap[k]] = 0;
1296
1297 sg_set_buf(&sg[k], q, template[i].tap[k]);
1298 if (diff_dst) {
1299 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1300 offset_in_page(IDX[k]);
1301
1302 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1303
1304 memset(q, 0, template[i].tap[k]);
1305 if (offset_in_page(q) +
1306 template[i].tap[k] < PAGE_SIZE)
1307 q[template[i].tap[k]] = 0;
1308 }
1309
1310 temp += template[i].tap[k];
1311 }
1312
1313 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1314 template[i].len, iv);
1315
1316 ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1317 crypto_skcipher_decrypt(req), &wait);
1318
1319 if (ret) {
1320 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1321 d, e, j, algo, -ret);
1322 goto out;
1323 }
1324
1325 temp = 0;
1326 ret = -EINVAL;
1327 for (k = 0; k < template[i].np; k++) {
1328 if (diff_dst)
1329 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1330 offset_in_page(IDX[k]);
1331 else
1332 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1333 offset_in_page(IDX[k]);
1334
1335 if (memcmp(q, result + temp, template[i].tap[k])) {
1336 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1337 d, j, e, k, algo);
1338 hexdump(q, template[i].tap[k]);
1339 goto out;
1340 }
1341
1342 q += template[i].tap[k];
1343 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1344 ;
1345 if (n) {
1346 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1347 d, j, e, k, algo, n);
1348 hexdump(q, n);
1349 goto out;
1350 }
1351 temp += template[i].tap[k];
1352 }
1353 }
1354
1355 ret = 0;
1356
1357 out:
1358 skcipher_request_free(req);
1359 if (diff_dst)
1360 testmgr_free_buf(xoutbuf);
1361 out_nooutbuf:
1362 testmgr_free_buf(xbuf);
1363 out_nobuf:
1364 return ret;
1365 }
1366
1367 static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1368 const struct cipher_testvec *template,
1369 unsigned int tcount)
1370 {
1371 unsigned int alignmask;
1372 int ret;
1373
1374 /* test 'dst == src' case */
1375 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1376 if (ret)
1377 return ret;
1378
1379 /* test 'dst != src' case */
1380 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1381 if (ret)
1382 return ret;
1383
1384 /* test unaligned buffers, check with one byte offset */
1385 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1386 if (ret)
1387 return ret;
1388
1389 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1390 if (alignmask) {
1391 /* Check if alignment mask for tfm is correctly set. */
1392 ret = __test_skcipher(tfm, enc, template, tcount, true,
1393 alignmask + 1);
1394 if (ret)
1395 return ret;
1396 }
1397
1398 return 0;
1399 }
1400
1401 static int test_comp(struct crypto_comp *tfm,
1402 const struct comp_testvec *ctemplate,
1403 const struct comp_testvec *dtemplate,
1404 int ctcount, int dtcount)
1405 {
1406 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1407 char *output, *decomp_output;
1408 unsigned int i;
1409 int ret;
1410
1411 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1412 if (!output)
1413 return -ENOMEM;
1414
1415 decomp_output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1416 if (!decomp_output) {
1417 kfree(output);
1418 return -ENOMEM;
1419 }
1420
1421 for (i = 0; i < ctcount; i++) {
1422 int ilen;
1423 unsigned int dlen = COMP_BUF_SIZE;
1424
1425 memset(output, 0, COMP_BUF_SIZE);
1426 memset(decomp_output, 0, COMP_BUF_SIZE);
1427
1428 ilen = ctemplate[i].inlen;
1429 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1430 ilen, output, &dlen);
1431 if (ret) {
1432 printk(KERN_ERR "alg: comp: compression failed "
1433 "on test %d for %s: ret=%d\n", i + 1, algo,
1434 -ret);
1435 goto out;
1436 }
1437
1438 ilen = dlen;
1439 dlen = COMP_BUF_SIZE;
1440 ret = crypto_comp_decompress(tfm, output,
1441 ilen, decomp_output, &dlen);
1442 if (ret) {
1443 pr_err("alg: comp: compression failed: decompress: on test %d for %s failed: ret=%d\n",
1444 i + 1, algo, -ret);
1445 goto out;
1446 }
1447
1448 if (dlen != ctemplate[i].inlen) {
1449 printk(KERN_ERR "alg: comp: Compression test %d "
1450 "failed for %s: output len = %d\n", i + 1, algo,
1451 dlen);
1452 ret = -EINVAL;
1453 goto out;
1454 }
1455
1456 if (memcmp(decomp_output, ctemplate[i].input,
1457 ctemplate[i].inlen)) {
1458 pr_err("alg: comp: compression failed: output differs: on test %d for %s\n",
1459 i + 1, algo);
1460 hexdump(decomp_output, dlen);
1461 ret = -EINVAL;
1462 goto out;
1463 }
1464 }
1465
1466 for (i = 0; i < dtcount; i++) {
1467 int ilen;
1468 unsigned int dlen = COMP_BUF_SIZE;
1469
1470 memset(decomp_output, 0, COMP_BUF_SIZE);
1471
1472 ilen = dtemplate[i].inlen;
1473 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1474 ilen, decomp_output, &dlen);
1475 if (ret) {
1476 printk(KERN_ERR "alg: comp: decompression failed "
1477 "on test %d for %s: ret=%d\n", i + 1, algo,
1478 -ret);
1479 goto out;
1480 }
1481
1482 if (dlen != dtemplate[i].outlen) {
1483 printk(KERN_ERR "alg: comp: Decompression test %d "
1484 "failed for %s: output len = %d\n", i + 1, algo,
1485 dlen);
1486 ret = -EINVAL;
1487 goto out;
1488 }
1489
1490 if (memcmp(decomp_output, dtemplate[i].output, dlen)) {
1491 printk(KERN_ERR "alg: comp: Decompression test %d "
1492 "failed for %s\n", i + 1, algo);
1493 hexdump(decomp_output, dlen);
1494 ret = -EINVAL;
1495 goto out;
1496 }
1497 }
1498
1499 ret = 0;
1500
1501 out:
1502 kfree(decomp_output);
1503 kfree(output);
1504 return ret;
1505 }
1506
1507 static int test_acomp(struct crypto_acomp *tfm,
1508 const struct comp_testvec *ctemplate,
1509 const struct comp_testvec *dtemplate,
1510 int ctcount, int dtcount)
1511 {
1512 const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
1513 unsigned int i;
1514 char *output, *decomp_out;
1515 int ret;
1516 struct scatterlist src, dst;
1517 struct acomp_req *req;
1518 struct crypto_wait wait;
1519
1520 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1521 if (!output)
1522 return -ENOMEM;
1523
1524 decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1525 if (!decomp_out) {
1526 kfree(output);
1527 return -ENOMEM;
1528 }
1529
1530 for (i = 0; i < ctcount; i++) {
1531 unsigned int dlen = COMP_BUF_SIZE;
1532 int ilen = ctemplate[i].inlen;
1533 void *input_vec;
1534
1535 input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
1536 if (!input_vec) {
1537 ret = -ENOMEM;
1538 goto out;
1539 }
1540
1541 memset(output, 0, dlen);
1542 crypto_init_wait(&wait);
1543 sg_init_one(&src, input_vec, ilen);
1544 sg_init_one(&dst, output, dlen);
1545
1546 req = acomp_request_alloc(tfm);
1547 if (!req) {
1548 pr_err("alg: acomp: request alloc failed for %s\n",
1549 algo);
1550 kfree(input_vec);
1551 ret = -ENOMEM;
1552 goto out;
1553 }
1554
1555 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1556 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1557 crypto_req_done, &wait);
1558
1559 ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
1560 if (ret) {
1561 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1562 i + 1, algo, -ret);
1563 kfree(input_vec);
1564 acomp_request_free(req);
1565 goto out;
1566 }
1567
1568 ilen = req->dlen;
1569 dlen = COMP_BUF_SIZE;
1570 sg_init_one(&src, output, ilen);
1571 sg_init_one(&dst, decomp_out, dlen);
1572 crypto_init_wait(&wait);
1573 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1574
1575 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
1576 if (ret) {
1577 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1578 i + 1, algo, -ret);
1579 kfree(input_vec);
1580 acomp_request_free(req);
1581 goto out;
1582 }
1583
1584 if (req->dlen != ctemplate[i].inlen) {
1585 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
1586 i + 1, algo, req->dlen);
1587 ret = -EINVAL;
1588 kfree(input_vec);
1589 acomp_request_free(req);
1590 goto out;
1591 }
1592
1593 if (memcmp(input_vec, decomp_out, req->dlen)) {
1594 pr_err("alg: acomp: Compression test %d failed for %s\n",
1595 i + 1, algo);
1596 hexdump(output, req->dlen);
1597 ret = -EINVAL;
1598 kfree(input_vec);
1599 acomp_request_free(req);
1600 goto out;
1601 }
1602
1603 kfree(input_vec);
1604 acomp_request_free(req);
1605 }
1606
1607 for (i = 0; i < dtcount; i++) {
1608 unsigned int dlen = COMP_BUF_SIZE;
1609 int ilen = dtemplate[i].inlen;
1610 void *input_vec;
1611
1612 input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
1613 if (!input_vec) {
1614 ret = -ENOMEM;
1615 goto out;
1616 }
1617
1618 memset(output, 0, dlen);
1619 crypto_init_wait(&wait);
1620 sg_init_one(&src, input_vec, ilen);
1621 sg_init_one(&dst, output, dlen);
1622
1623 req = acomp_request_alloc(tfm);
1624 if (!req) {
1625 pr_err("alg: acomp: request alloc failed for %s\n",
1626 algo);
1627 kfree(input_vec);
1628 ret = -ENOMEM;
1629 goto out;
1630 }
1631
1632 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1633 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1634 crypto_req_done, &wait);
1635
1636 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
1637 if (ret) {
1638 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
1639 i + 1, algo, -ret);
1640 kfree(input_vec);
1641 acomp_request_free(req);
1642 goto out;
1643 }
1644
1645 if (req->dlen != dtemplate[i].outlen) {
1646 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
1647 i + 1, algo, req->dlen);
1648 ret = -EINVAL;
1649 kfree(input_vec);
1650 acomp_request_free(req);
1651 goto out;
1652 }
1653
1654 if (memcmp(output, dtemplate[i].output, req->dlen)) {
1655 pr_err("alg: acomp: Decompression test %d failed for %s\n",
1656 i + 1, algo);
1657 hexdump(output, req->dlen);
1658 ret = -EINVAL;
1659 kfree(input_vec);
1660 acomp_request_free(req);
1661 goto out;
1662 }
1663
1664 kfree(input_vec);
1665 acomp_request_free(req);
1666 }
1667
1668 ret = 0;
1669
1670 out:
1671 kfree(decomp_out);
1672 kfree(output);
1673 return ret;
1674 }
1675
1676 static int test_cprng(struct crypto_rng *tfm,
1677 const struct cprng_testvec *template,
1678 unsigned int tcount)
1679 {
1680 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1681 int err = 0, i, j, seedsize;
1682 u8 *seed;
1683 char result[32];
1684
1685 seedsize = crypto_rng_seedsize(tfm);
1686
1687 seed = kmalloc(seedsize, GFP_KERNEL);
1688 if (!seed) {
1689 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1690 "for %s\n", algo);
1691 return -ENOMEM;
1692 }
1693
1694 for (i = 0; i < tcount; i++) {
1695 memset(result, 0, 32);
1696
1697 memcpy(seed, template[i].v, template[i].vlen);
1698 memcpy(seed + template[i].vlen, template[i].key,
1699 template[i].klen);
1700 memcpy(seed + template[i].vlen + template[i].klen,
1701 template[i].dt, template[i].dtlen);
1702
1703 err = crypto_rng_reset(tfm, seed, seedsize);
1704 if (err) {
1705 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1706 "for %s\n", algo);
1707 goto out;
1708 }
1709
1710 for (j = 0; j < template[i].loops; j++) {
1711 err = crypto_rng_get_bytes(tfm, result,
1712 template[i].rlen);
1713 if (err < 0) {
1714 printk(KERN_ERR "alg: cprng: Failed to obtain "
1715 "the correct amount of random data for "
1716 "%s (requested %d)\n", algo,
1717 template[i].rlen);
1718 goto out;
1719 }
1720 }
1721
1722 err = memcmp(result, template[i].result,
1723 template[i].rlen);
1724 if (err) {
1725 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1726 i, algo);
1727 hexdump(result, template[i].rlen);
1728 err = -EINVAL;
1729 goto out;
1730 }
1731 }
1732
1733 out:
1734 kfree(seed);
1735 return err;
1736 }
1737
1738 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1739 u32 type, u32 mask)
1740 {
1741 const struct aead_test_suite *suite = &desc->suite.aead;
1742 struct crypto_aead *tfm;
1743 int err;
1744
1745 tfm = crypto_alloc_aead(driver, type, mask);
1746 if (IS_ERR(tfm)) {
1747 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1748 "%ld\n", driver, PTR_ERR(tfm));
1749 return PTR_ERR(tfm);
1750 }
1751
1752 err = test_aead(tfm, ENCRYPT, suite->vecs, suite->count);
1753 if (!err)
1754 err = test_aead(tfm, DECRYPT, suite->vecs, suite->count);
1755
1756 crypto_free_aead(tfm);
1757 return err;
1758 }
1759
1760 static int alg_test_cipher(const struct alg_test_desc *desc,
1761 const char *driver, u32 type, u32 mask)
1762 {
1763 const struct cipher_test_suite *suite = &desc->suite.cipher;
1764 struct crypto_cipher *tfm;
1765 int err;
1766
1767 tfm = crypto_alloc_cipher(driver, type, mask);
1768 if (IS_ERR(tfm)) {
1769 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1770 "%s: %ld\n", driver, PTR_ERR(tfm));
1771 return PTR_ERR(tfm);
1772 }
1773
1774 err = test_cipher(tfm, ENCRYPT, suite->vecs, suite->count);
1775 if (!err)
1776 err = test_cipher(tfm, DECRYPT, suite->vecs, suite->count);
1777
1778 crypto_free_cipher(tfm);
1779 return err;
1780 }
1781
1782 static int alg_test_skcipher(const struct alg_test_desc *desc,
1783 const char *driver, u32 type, u32 mask)
1784 {
1785 const struct cipher_test_suite *suite = &desc->suite.cipher;
1786 struct crypto_skcipher *tfm;
1787 int err;
1788
1789 tfm = crypto_alloc_skcipher(driver, type, mask);
1790 if (IS_ERR(tfm)) {
1791 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1792 "%s: %ld\n", driver, PTR_ERR(tfm));
1793 return PTR_ERR(tfm);
1794 }
1795
1796 err = test_skcipher(tfm, ENCRYPT, suite->vecs, suite->count);
1797 if (!err)
1798 err = test_skcipher(tfm, DECRYPT, suite->vecs, suite->count);
1799
1800 crypto_free_skcipher(tfm);
1801 return err;
1802 }
1803
1804 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1805 u32 type, u32 mask)
1806 {
1807 struct crypto_comp *comp;
1808 struct crypto_acomp *acomp;
1809 int err;
1810 u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
1811
1812 if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
1813 acomp = crypto_alloc_acomp(driver, type, mask);
1814 if (IS_ERR(acomp)) {
1815 pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
1816 driver, PTR_ERR(acomp));
1817 return PTR_ERR(acomp);
1818 }
1819 err = test_acomp(acomp, desc->suite.comp.comp.vecs,
1820 desc->suite.comp.decomp.vecs,
1821 desc->suite.comp.comp.count,
1822 desc->suite.comp.decomp.count);
1823 crypto_free_acomp(acomp);
1824 } else {
1825 comp = crypto_alloc_comp(driver, type, mask);
1826 if (IS_ERR(comp)) {
1827 pr_err("alg: comp: Failed to load transform for %s: %ld\n",
1828 driver, PTR_ERR(comp));
1829 return PTR_ERR(comp);
1830 }
1831
1832 err = test_comp(comp, desc->suite.comp.comp.vecs,
1833 desc->suite.comp.decomp.vecs,
1834 desc->suite.comp.comp.count,
1835 desc->suite.comp.decomp.count);
1836
1837 crypto_free_comp(comp);
1838 }
1839 return err;
1840 }
1841
1842 static int __alg_test_hash(const struct hash_testvec *template,
1843 unsigned int tcount, const char *driver,
1844 u32 type, u32 mask)
1845 {
1846 struct crypto_ahash *tfm;
1847 int err;
1848
1849 tfm = crypto_alloc_ahash(driver, type, mask);
1850 if (IS_ERR(tfm)) {
1851 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1852 "%ld\n", driver, PTR_ERR(tfm));
1853 return PTR_ERR(tfm);
1854 }
1855
1856 err = test_hash(tfm, template, tcount, HASH_TEST_DIGEST);
1857 if (!err)
1858 err = test_hash(tfm, template, tcount, HASH_TEST_FINAL);
1859 if (!err)
1860 err = test_hash(tfm, template, tcount, HASH_TEST_FINUP);
1861 crypto_free_ahash(tfm);
1862 return err;
1863 }
1864
1865 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1866 u32 type, u32 mask)
1867 {
1868 const struct hash_testvec *template = desc->suite.hash.vecs;
1869 unsigned int tcount = desc->suite.hash.count;
1870 unsigned int nr_unkeyed, nr_keyed;
1871 int err;
1872
1873 /*
1874 * For OPTIONAL_KEY algorithms, we have to do all the unkeyed tests
1875 * first, before setting a key on the tfm. To make this easier, we
1876 * require that the unkeyed test vectors (if any) are listed first.
1877 */
1878
1879 for (nr_unkeyed = 0; nr_unkeyed < tcount; nr_unkeyed++) {
1880 if (template[nr_unkeyed].ksize)
1881 break;
1882 }
1883 for (nr_keyed = 0; nr_unkeyed + nr_keyed < tcount; nr_keyed++) {
1884 if (!template[nr_unkeyed + nr_keyed].ksize) {
1885 pr_err("alg: hash: test vectors for %s out of order, "
1886 "unkeyed ones must come first\n", desc->alg);
1887 return -EINVAL;
1888 }
1889 }
1890
1891 err = 0;
1892 if (nr_unkeyed) {
1893 err = __alg_test_hash(template, nr_unkeyed, driver, type, mask);
1894 template += nr_unkeyed;
1895 }
1896
1897 if (!err && nr_keyed)
1898 err = __alg_test_hash(template, nr_keyed, driver, type, mask);
1899
1900 return err;
1901 }
1902
1903 static int alg_test_crc32c(const struct alg_test_desc *desc,
1904 const char *driver, u32 type, u32 mask)
1905 {
1906 struct crypto_shash *tfm;
1907 __le32 val;
1908 int err;
1909
1910 err = alg_test_hash(desc, driver, type, mask);
1911 if (err)
1912 goto out;
1913
1914 tfm = crypto_alloc_shash(driver, type, mask);
1915 if (IS_ERR(tfm)) {
1916 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1917 "%ld\n", driver, PTR_ERR(tfm));
1918 err = PTR_ERR(tfm);
1919 goto out;
1920 }
1921
1922 do {
1923 SHASH_DESC_ON_STACK(shash, tfm);
1924 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1925
1926 shash->tfm = tfm;
1927 shash->flags = 0;
1928
1929 *ctx = 420553207;
1930 err = crypto_shash_final(shash, (u8 *)&val);
1931 if (err) {
1932 printk(KERN_ERR "alg: crc32c: Operation failed for "
1933 "%s: %d\n", driver, err);
1934 break;
1935 }
1936
1937 if (val != cpu_to_le32(~420553207)) {
1938 pr_err("alg: crc32c: Test failed for %s: %u\n",
1939 driver, le32_to_cpu(val));
1940 err = -EINVAL;
1941 }
1942 } while (0);
1943
1944 crypto_free_shash(tfm);
1945
1946 out:
1947 return err;
1948 }
1949
1950 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1951 u32 type, u32 mask)
1952 {
1953 struct crypto_rng *rng;
1954 int err;
1955
1956 rng = crypto_alloc_rng(driver, type, mask);
1957 if (IS_ERR(rng)) {
1958 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1959 "%ld\n", driver, PTR_ERR(rng));
1960 return PTR_ERR(rng);
1961 }
1962
1963 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1964
1965 crypto_free_rng(rng);
1966
1967 return err;
1968 }
1969
1970
1971 static int drbg_cavs_test(const struct drbg_testvec *test, int pr,
1972 const char *driver, u32 type, u32 mask)
1973 {
1974 int ret = -EAGAIN;
1975 struct crypto_rng *drng;
1976 struct drbg_test_data test_data;
1977 struct drbg_string addtl, pers, testentropy;
1978 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1979
1980 if (!buf)
1981 return -ENOMEM;
1982
1983 drng = crypto_alloc_rng(driver, type, mask);
1984 if (IS_ERR(drng)) {
1985 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1986 "%s\n", driver);
1987 kzfree(buf);
1988 return -ENOMEM;
1989 }
1990
1991 test_data.testentropy = &testentropy;
1992 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1993 drbg_string_fill(&pers, test->pers, test->perslen);
1994 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1995 if (ret) {
1996 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1997 goto outbuf;
1998 }
1999
2000 drbg_string_fill(&addtl, test->addtla, test->addtllen);
2001 if (pr) {
2002 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
2003 ret = crypto_drbg_get_bytes_addtl_test(drng,
2004 buf, test->expectedlen, &addtl, &test_data);
2005 } else {
2006 ret = crypto_drbg_get_bytes_addtl(drng,
2007 buf, test->expectedlen, &addtl);
2008 }
2009 if (ret < 0) {
2010 printk(KERN_ERR "alg: drbg: could not obtain random data for "
2011 "driver %s\n", driver);
2012 goto outbuf;
2013 }
2014
2015 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
2016 if (pr) {
2017 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
2018 ret = crypto_drbg_get_bytes_addtl_test(drng,
2019 buf, test->expectedlen, &addtl, &test_data);
2020 } else {
2021 ret = crypto_drbg_get_bytes_addtl(drng,
2022 buf, test->expectedlen, &addtl);
2023 }
2024 if (ret < 0) {
2025 printk(KERN_ERR "alg: drbg: could not obtain random data for "
2026 "driver %s\n", driver);
2027 goto outbuf;
2028 }
2029
2030 ret = memcmp(test->expected, buf, test->expectedlen);
2031
2032 outbuf:
2033 crypto_free_rng(drng);
2034 kzfree(buf);
2035 return ret;
2036 }
2037
2038
2039 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
2040 u32 type, u32 mask)
2041 {
2042 int err = 0;
2043 int pr = 0;
2044 int i = 0;
2045 const struct drbg_testvec *template = desc->suite.drbg.vecs;
2046 unsigned int tcount = desc->suite.drbg.count;
2047
2048 if (0 == memcmp(driver, "drbg_pr_", 8))
2049 pr = 1;
2050
2051 for (i = 0; i < tcount; i++) {
2052 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
2053 if (err) {
2054 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
2055 i, driver);
2056 err = -EINVAL;
2057 break;
2058 }
2059 }
2060 return err;
2061
2062 }
2063
2064 static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
2065 const char *alg)
2066 {
2067 struct kpp_request *req;
2068 void *input_buf = NULL;
2069 void *output_buf = NULL;
2070 void *a_public = NULL;
2071 void *a_ss = NULL;
2072 void *shared_secret = NULL;
2073 struct crypto_wait wait;
2074 unsigned int out_len_max;
2075 int err = -ENOMEM;
2076 struct scatterlist src, dst;
2077
2078 req = kpp_request_alloc(tfm, GFP_KERNEL);
2079 if (!req)
2080 return err;
2081
2082 crypto_init_wait(&wait);
2083
2084 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
2085 if (err < 0)
2086 goto free_req;
2087
2088 out_len_max = crypto_kpp_maxsize(tfm);
2089 output_buf = kzalloc(out_len_max, GFP_KERNEL);
2090 if (!output_buf) {
2091 err = -ENOMEM;
2092 goto free_req;
2093 }
2094
2095 /* Use appropriate parameter as base */
2096 kpp_request_set_input(req, NULL, 0);
2097 sg_init_one(&dst, output_buf, out_len_max);
2098 kpp_request_set_output(req, &dst, out_len_max);
2099 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2100 crypto_req_done, &wait);
2101
2102 /* Compute party A's public key */
2103 err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
2104 if (err) {
2105 pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
2106 alg, err);
2107 goto free_output;
2108 }
2109
2110 if (vec->genkey) {
2111 /* Save party A's public key */
2112 a_public = kzalloc(out_len_max, GFP_KERNEL);
2113 if (!a_public) {
2114 err = -ENOMEM;
2115 goto free_output;
2116 }
2117 memcpy(a_public, sg_virt(req->dst), out_len_max);
2118 } else {
2119 /* Verify calculated public key */
2120 if (memcmp(vec->expected_a_public, sg_virt(req->dst),
2121 vec->expected_a_public_size)) {
2122 pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
2123 alg);
2124 err = -EINVAL;
2125 goto free_output;
2126 }
2127 }
2128
2129 /* Calculate shared secret key by using counter part (b) public key. */
2130 input_buf = kzalloc(vec->b_public_size, GFP_KERNEL);
2131 if (!input_buf) {
2132 err = -ENOMEM;
2133 goto free_output;
2134 }
2135
2136 memcpy(input_buf, vec->b_public, vec->b_public_size);
2137 sg_init_one(&src, input_buf, vec->b_public_size);
2138 sg_init_one(&dst, output_buf, out_len_max);
2139 kpp_request_set_input(req, &src, vec->b_public_size);
2140 kpp_request_set_output(req, &dst, out_len_max);
2141 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2142 crypto_req_done, &wait);
2143 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
2144 if (err) {
2145 pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
2146 alg, err);
2147 goto free_all;
2148 }
2149
2150 if (vec->genkey) {
2151 /* Save the shared secret obtained by party A */
2152 a_ss = kzalloc(vec->expected_ss_size, GFP_KERNEL);
2153 if (!a_ss) {
2154 err = -ENOMEM;
2155 goto free_all;
2156 }
2157 memcpy(a_ss, sg_virt(req->dst), vec->expected_ss_size);
2158
2159 /*
2160 * Calculate party B's shared secret by using party A's
2161 * public key.
2162 */
2163 err = crypto_kpp_set_secret(tfm, vec->b_secret,
2164 vec->b_secret_size);
2165 if (err < 0)
2166 goto free_all;
2167
2168 sg_init_one(&src, a_public, vec->expected_a_public_size);
2169 sg_init_one(&dst, output_buf, out_len_max);
2170 kpp_request_set_input(req, &src, vec->expected_a_public_size);
2171 kpp_request_set_output(req, &dst, out_len_max);
2172 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2173 crypto_req_done, &wait);
2174 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req),
2175 &wait);
2176 if (err) {
2177 pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
2178 alg, err);
2179 goto free_all;
2180 }
2181
2182 shared_secret = a_ss;
2183 } else {
2184 shared_secret = (void *)vec->expected_ss;
2185 }
2186
2187 /*
2188 * verify shared secret from which the user will derive
2189 * secret key by executing whatever hash it has chosen
2190 */
2191 if (memcmp(shared_secret, sg_virt(req->dst),
2192 vec->expected_ss_size)) {
2193 pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
2194 alg);
2195 err = -EINVAL;
2196 }
2197
2198 free_all:
2199 kfree(a_ss);
2200 kfree(input_buf);
2201 free_output:
2202 kfree(a_public);
2203 kfree(output_buf);
2204 free_req:
2205 kpp_request_free(req);
2206 return err;
2207 }
2208
2209 static int test_kpp(struct crypto_kpp *tfm, const char *alg,
2210 const struct kpp_testvec *vecs, unsigned int tcount)
2211 {
2212 int ret, i;
2213
2214 for (i = 0; i < tcount; i++) {
2215 ret = do_test_kpp(tfm, vecs++, alg);
2216 if (ret) {
2217 pr_err("alg: %s: test failed on vector %d, err=%d\n",
2218 alg, i + 1, ret);
2219 return ret;
2220 }
2221 }
2222 return 0;
2223 }
2224
2225 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
2226 u32 type, u32 mask)
2227 {
2228 struct crypto_kpp *tfm;
2229 int err = 0;
2230
2231 tfm = crypto_alloc_kpp(driver, type, mask);
2232 if (IS_ERR(tfm)) {
2233 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
2234 driver, PTR_ERR(tfm));
2235 return PTR_ERR(tfm);
2236 }
2237 if (desc->suite.kpp.vecs)
2238 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
2239 desc->suite.kpp.count);
2240
2241 crypto_free_kpp(tfm);
2242 return err;
2243 }
2244
2245 static int test_akcipher_one(struct crypto_akcipher *tfm,
2246 const struct akcipher_testvec *vecs)
2247 {
2248 char *xbuf[XBUFSIZE];
2249 struct akcipher_request *req;
2250 void *outbuf_enc = NULL;
2251 void *outbuf_dec = NULL;
2252 struct crypto_wait wait;
2253 unsigned int out_len_max, out_len = 0;
2254 int err = -ENOMEM;
2255 struct scatterlist src, dst, src_tab[2];
2256 const char *m, *c;
2257 unsigned int m_size, c_size;
2258 const char *op;
2259
2260 if (testmgr_alloc_buf(xbuf))
2261 return err;
2262
2263 req = akcipher_request_alloc(tfm, GFP_KERNEL);
2264 if (!req)
2265 goto free_xbuf;
2266
2267 crypto_init_wait(&wait);
2268
2269 if (vecs->public_key_vec)
2270 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
2271 vecs->key_len);
2272 else
2273 err = crypto_akcipher_set_priv_key(tfm, vecs->key,
2274 vecs->key_len);
2275 if (err)
2276 goto free_req;
2277
2278 err = -ENOMEM;
2279 out_len_max = crypto_akcipher_maxsize(tfm);
2280
2281 /*
2282 * First run test which do not require a private key, such as
2283 * encrypt or verify.
2284 */
2285 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
2286 if (!outbuf_enc)
2287 goto free_req;
2288
2289 if (!vecs->siggen_sigver_test) {
2290 m = vecs->m;
2291 m_size = vecs->m_size;
2292 c = vecs->c;
2293 c_size = vecs->c_size;
2294 op = "encrypt";
2295 } else {
2296 /* Swap args so we could keep plaintext (digest)
2297 * in vecs->m, and cooked signature in vecs->c.
2298 */
2299 m = vecs->c; /* signature */
2300 m_size = vecs->c_size;
2301 c = vecs->m; /* digest */
2302 c_size = vecs->m_size;
2303 op = "verify";
2304 }
2305
2306 if (WARN_ON(m_size > PAGE_SIZE))
2307 goto free_all;
2308 memcpy(xbuf[0], m, m_size);
2309
2310 sg_init_table(src_tab, 2);
2311 sg_set_buf(&src_tab[0], xbuf[0], 8);
2312 sg_set_buf(&src_tab[1], xbuf[0] + 8, m_size - 8);
2313 sg_init_one(&dst, outbuf_enc, out_len_max);
2314 akcipher_request_set_crypt(req, src_tab, &dst, m_size,
2315 out_len_max);
2316 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2317 crypto_req_done, &wait);
2318
2319 err = crypto_wait_req(vecs->siggen_sigver_test ?
2320 /* Run asymmetric signature verification */
2321 crypto_akcipher_verify(req) :
2322 /* Run asymmetric encrypt */
2323 crypto_akcipher_encrypt(req), &wait);
2324 if (err) {
2325 pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
2326 goto free_all;
2327 }
2328 if (req->dst_len != c_size) {
2329 pr_err("alg: akcipher: %s test failed. Invalid output len\n",
2330 op);
2331 err = -EINVAL;
2332 goto free_all;
2333 }
2334 /* verify that encrypted message is equal to expected */
2335 if (memcmp(c, outbuf_enc, c_size)) {
2336 pr_err("alg: akcipher: %s test failed. Invalid output\n", op);
2337 hexdump(outbuf_enc, c_size);
2338 err = -EINVAL;
2339 goto free_all;
2340 }
2341
2342 /*
2343 * Don't invoke (decrypt or sign) test which require a private key
2344 * for vectors with only a public key.
2345 */
2346 if (vecs->public_key_vec) {
2347 err = 0;
2348 goto free_all;
2349 }
2350 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
2351 if (!outbuf_dec) {
2352 err = -ENOMEM;
2353 goto free_all;
2354 }
2355
2356 op = vecs->siggen_sigver_test ? "sign" : "decrypt";
2357 if (WARN_ON(c_size > PAGE_SIZE))
2358 goto free_all;
2359 memcpy(xbuf[0], c, c_size);
2360
2361 sg_init_one(&src, xbuf[0], c_size);
2362 sg_init_one(&dst, outbuf_dec, out_len_max);
2363 crypto_init_wait(&wait);
2364 akcipher_request_set_crypt(req, &src, &dst, c_size, out_len_max);
2365
2366 err = crypto_wait_req(vecs->siggen_sigver_test ?
2367 /* Run asymmetric signature generation */
2368 crypto_akcipher_sign(req) :
2369 /* Run asymmetric decrypt */
2370 crypto_akcipher_decrypt(req), &wait);
2371 if (err) {
2372 pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
2373 goto free_all;
2374 }
2375 out_len = req->dst_len;
2376 if (out_len < m_size) {
2377 pr_err("alg: akcipher: %s test failed. Invalid output len %u\n",
2378 op, out_len);
2379 err = -EINVAL;
2380 goto free_all;
2381 }
2382 /* verify that decrypted message is equal to the original msg */
2383 if (memchr_inv(outbuf_dec, 0, out_len - m_size) ||
2384 memcmp(m, outbuf_dec + out_len - m_size, m_size)) {
2385 pr_err("alg: akcipher: %s test failed. Invalid output\n", op);
2386 hexdump(outbuf_dec, out_len);
2387 err = -EINVAL;
2388 }
2389 free_all:
2390 kfree(outbuf_dec);
2391 kfree(outbuf_enc);
2392 free_req:
2393 akcipher_request_free(req);
2394 free_xbuf:
2395 testmgr_free_buf(xbuf);
2396 return err;
2397 }
2398
2399 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
2400 const struct akcipher_testvec *vecs,
2401 unsigned int tcount)
2402 {
2403 const char *algo =
2404 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
2405 int ret, i;
2406
2407 for (i = 0; i < tcount; i++) {
2408 ret = test_akcipher_one(tfm, vecs++);
2409 if (!ret)
2410 continue;
2411
2412 pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
2413 i + 1, algo, ret);
2414 return ret;
2415 }
2416 return 0;
2417 }
2418
2419 static int alg_test_akcipher(const struct alg_test_desc *desc,
2420 const char *driver, u32 type, u32 mask)
2421 {
2422 struct crypto_akcipher *tfm;
2423 int err = 0;
2424
2425 tfm = crypto_alloc_akcipher(driver, type, mask);
2426 if (IS_ERR(tfm)) {
2427 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
2428 driver, PTR_ERR(tfm));
2429 return PTR_ERR(tfm);
2430 }
2431 if (desc->suite.akcipher.vecs)
2432 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2433 desc->suite.akcipher.count);
2434
2435 crypto_free_akcipher(tfm);
2436 return err;
2437 }
2438
2439 static int alg_test_null(const struct alg_test_desc *desc,
2440 const char *driver, u32 type, u32 mask)
2441 {
2442 return 0;
2443 }
2444
2445 #define __VECS(tv) { .vecs = tv, .count = ARRAY_SIZE(tv) }
2446
2447 /* Please keep this list sorted by algorithm name. */
2448 static const struct alg_test_desc alg_test_descs[] = {
2449 {
2450 .alg = "adiantum(xchacha12,aes)",
2451 .test = alg_test_skcipher,
2452 .suite = {
2453 .cipher = __VECS(adiantum_xchacha12_aes_tv_template)
2454 },
2455 }, {
2456 .alg = "adiantum(xchacha20,aes)",
2457 .test = alg_test_skcipher,
2458 .suite = {
2459 .cipher = __VECS(adiantum_xchacha20_aes_tv_template)
2460 },
2461 }, {
2462 .alg = "aegis128",
2463 .test = alg_test_aead,
2464 .suite = {
2465 .aead = __VECS(aegis128_tv_template)
2466 }
2467 }, {
2468 .alg = "aegis128l",
2469 .test = alg_test_aead,
2470 .suite = {
2471 .aead = __VECS(aegis128l_tv_template)
2472 }
2473 }, {
2474 .alg = "aegis256",
2475 .test = alg_test_aead,
2476 .suite = {
2477 .aead = __VECS(aegis256_tv_template)
2478 }
2479 }, {
2480 .alg = "ansi_cprng",
2481 .test = alg_test_cprng,
2482 .suite = {
2483 .cprng = __VECS(ansi_cprng_aes_tv_template)
2484 }
2485 }, {
2486 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2487 .test = alg_test_aead,
2488 .suite = {
2489 .aead = __VECS(hmac_md5_ecb_cipher_null_tv_template)
2490 }
2491 }, {
2492 .alg = "authenc(hmac(sha1),cbc(aes))",
2493 .test = alg_test_aead,
2494 .fips_allowed = 1,
2495 .suite = {
2496 .aead = __VECS(hmac_sha1_aes_cbc_tv_temp)
2497 }
2498 }, {
2499 .alg = "authenc(hmac(sha1),cbc(des))",
2500 .test = alg_test_aead,
2501 .suite = {
2502 .aead = __VECS(hmac_sha1_des_cbc_tv_temp)
2503 }
2504 }, {
2505 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2506 .test = alg_test_aead,
2507 .fips_allowed = 1,
2508 .suite = {
2509 .aead = __VECS(hmac_sha1_des3_ede_cbc_tv_temp)
2510 }
2511 }, {
2512 .alg = "authenc(hmac(sha1),ctr(aes))",
2513 .test = alg_test_null,
2514 .fips_allowed = 1,
2515 }, {
2516 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2517 .test = alg_test_aead,
2518 .suite = {
2519 .aead = __VECS(hmac_sha1_ecb_cipher_null_tv_temp)
2520 }
2521 }, {
2522 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2523 .test = alg_test_null,
2524 .fips_allowed = 1,
2525 }, {
2526 .alg = "authenc(hmac(sha224),cbc(des))",
2527 .test = alg_test_aead,
2528 .suite = {
2529 .aead = __VECS(hmac_sha224_des_cbc_tv_temp)
2530 }
2531 }, {
2532 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2533 .test = alg_test_aead,
2534 .fips_allowed = 1,
2535 .suite = {
2536 .aead = __VECS(hmac_sha224_des3_ede_cbc_tv_temp)
2537 }
2538 }, {
2539 .alg = "authenc(hmac(sha256),cbc(aes))",
2540 .test = alg_test_aead,
2541 .fips_allowed = 1,
2542 .suite = {
2543 .aead = __VECS(hmac_sha256_aes_cbc_tv_temp)
2544 }
2545 }, {
2546 .alg = "authenc(hmac(sha256),cbc(des))",
2547 .test = alg_test_aead,
2548 .suite = {
2549 .aead = __VECS(hmac_sha256_des_cbc_tv_temp)
2550 }
2551 }, {
2552 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2553 .test = alg_test_aead,
2554 .fips_allowed = 1,
2555 .suite = {
2556 .aead = __VECS(hmac_sha256_des3_ede_cbc_tv_temp)
2557 }
2558 }, {
2559 .alg = "authenc(hmac(sha256),ctr(aes))",
2560 .test = alg_test_null,
2561 .fips_allowed = 1,
2562 }, {
2563 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2564 .test = alg_test_null,
2565 .fips_allowed = 1,
2566 }, {
2567 .alg = "authenc(hmac(sha384),cbc(des))",
2568 .test = alg_test_aead,
2569 .suite = {
2570 .aead = __VECS(hmac_sha384_des_cbc_tv_temp)
2571 }
2572 }, {
2573 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2574 .test = alg_test_aead,
2575 .fips_allowed = 1,
2576 .suite = {
2577 .aead = __VECS(hmac_sha384_des3_ede_cbc_tv_temp)
2578 }
2579 }, {
2580 .alg = "authenc(hmac(sha384),ctr(aes))",
2581 .test = alg_test_null,
2582 .fips_allowed = 1,
2583 }, {
2584 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2585 .test = alg_test_null,
2586 .fips_allowed = 1,
2587 }, {
2588 .alg = "authenc(hmac(sha512),cbc(aes))",
2589 .fips_allowed = 1,
2590 .test = alg_test_aead,
2591 .suite = {
2592 .aead = __VECS(hmac_sha512_aes_cbc_tv_temp)
2593 }
2594 }, {
2595 .alg = "authenc(hmac(sha512),cbc(des))",
2596 .test = alg_test_aead,
2597 .suite = {
2598 .aead = __VECS(hmac_sha512_des_cbc_tv_temp)
2599 }
2600 }, {
2601 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2602 .test = alg_test_aead,
2603 .fips_allowed = 1,
2604 .suite = {
2605 .aead = __VECS(hmac_sha512_des3_ede_cbc_tv_temp)
2606 }
2607 }, {
2608 .alg = "authenc(hmac(sha512),ctr(aes))",
2609 .test = alg_test_null,
2610 .fips_allowed = 1,
2611 }, {
2612 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2613 .test = alg_test_null,
2614 .fips_allowed = 1,
2615 }, {
2616 .alg = "cbc(aes)",
2617 .test = alg_test_skcipher,
2618 .fips_allowed = 1,
2619 .suite = {
2620 .cipher = __VECS(aes_cbc_tv_template)
2621 },
2622 }, {
2623 .alg = "cbc(anubis)",
2624 .test = alg_test_skcipher,
2625 .suite = {
2626 .cipher = __VECS(anubis_cbc_tv_template)
2627 },
2628 }, {
2629 .alg = "cbc(blowfish)",
2630 .test = alg_test_skcipher,
2631 .suite = {
2632 .cipher = __VECS(bf_cbc_tv_template)
2633 },
2634 }, {
2635 .alg = "cbc(camellia)",
2636 .test = alg_test_skcipher,
2637 .suite = {
2638 .cipher = __VECS(camellia_cbc_tv_template)
2639 },
2640 }, {
2641 .alg = "cbc(cast5)",
2642 .test = alg_test_skcipher,
2643 .suite = {
2644 .cipher = __VECS(cast5_cbc_tv_template)
2645 },
2646 }, {
2647 .alg = "cbc(cast6)",
2648 .test = alg_test_skcipher,
2649 .suite = {
2650 .cipher = __VECS(cast6_cbc_tv_template)
2651 },
2652 }, {
2653 .alg = "cbc(des)",
2654 .test = alg_test_skcipher,
2655 .suite = {
2656 .cipher = __VECS(des_cbc_tv_template)
2657 },
2658 }, {
2659 .alg = "cbc(des3_ede)",
2660 .test = alg_test_skcipher,
2661 .fips_allowed = 1,
2662 .suite = {
2663 .cipher = __VECS(des3_ede_cbc_tv_template)
2664 },
2665 }, {
2666 /* Same as cbc(aes) except the key is stored in
2667 * hardware secure memory which we reference by index
2668 */
2669 .alg = "cbc(paes)",
2670 .test = alg_test_null,
2671 .fips_allowed = 1,
2672 }, {
2673 .alg = "cbc(serpent)",
2674 .test = alg_test_skcipher,
2675 .suite = {
2676 .cipher = __VECS(serpent_cbc_tv_template)
2677 },
2678 }, {
2679 .alg = "cbc(sm4)",
2680 .test = alg_test_skcipher,
2681 .suite = {
2682 .cipher = __VECS(sm4_cbc_tv_template)
2683 }
2684 }, {
2685 .alg = "cbc(twofish)",
2686 .test = alg_test_skcipher,
2687 .suite = {
2688 .cipher = __VECS(tf_cbc_tv_template)
2689 },
2690 }, {
2691 .alg = "cbcmac(aes)",
2692 .fips_allowed = 1,
2693 .test = alg_test_hash,
2694 .suite = {
2695 .hash = __VECS(aes_cbcmac_tv_template)
2696 }
2697 }, {
2698 .alg = "ccm(aes)",
2699 .test = alg_test_aead,
2700 .fips_allowed = 1,
2701 .suite = {
2702 .aead = __VECS(aes_ccm_tv_template)
2703 }
2704 }, {
2705 .alg = "cfb(aes)",
2706 .test = alg_test_skcipher,
2707 .fips_allowed = 1,
2708 .suite = {
2709 .cipher = __VECS(aes_cfb_tv_template)
2710 },
2711 }, {
2712 .alg = "chacha20",
2713 .test = alg_test_skcipher,
2714 .suite = {
2715 .cipher = __VECS(chacha20_tv_template)
2716 },
2717 }, {
2718 .alg = "cmac(aes)",
2719 .fips_allowed = 1,
2720 .test = alg_test_hash,
2721 .suite = {
2722 .hash = __VECS(aes_cmac128_tv_template)
2723 }
2724 }, {
2725 .alg = "cmac(des3_ede)",
2726 .fips_allowed = 1,
2727 .test = alg_test_hash,
2728 .suite = {
2729 .hash = __VECS(des3_ede_cmac64_tv_template)
2730 }
2731 }, {
2732 .alg = "compress_null",
2733 .test = alg_test_null,
2734 }, {
2735 .alg = "crc32",
2736 .test = alg_test_hash,
2737 .suite = {
2738 .hash = __VECS(crc32_tv_template)
2739 }
2740 }, {
2741 .alg = "crc32c",
2742 .test = alg_test_crc32c,
2743 .fips_allowed = 1,
2744 .suite = {
2745 .hash = __VECS(crc32c_tv_template)
2746 }
2747 }, {
2748 .alg = "crct10dif",
2749 .test = alg_test_hash,
2750 .fips_allowed = 1,
2751 .suite = {
2752 .hash = __VECS(crct10dif_tv_template)
2753 }
2754 }, {
2755 .alg = "ctr(aes)",
2756 .test = alg_test_skcipher,
2757 .fips_allowed = 1,
2758 .suite = {
2759 .cipher = __VECS(aes_ctr_tv_template)
2760 }
2761 }, {
2762 .alg = "ctr(blowfish)",
2763 .test = alg_test_skcipher,
2764 .suite = {
2765 .cipher = __VECS(bf_ctr_tv_template)
2766 }
2767 }, {
2768 .alg = "ctr(camellia)",
2769 .test = alg_test_skcipher,
2770 .suite = {
2771 .cipher = __VECS(camellia_ctr_tv_template)
2772 }
2773 }, {
2774 .alg = "ctr(cast5)",
2775 .test = alg_test_skcipher,
2776 .suite = {
2777 .cipher = __VECS(cast5_ctr_tv_template)
2778 }
2779 }, {
2780 .alg = "ctr(cast6)",
2781 .test = alg_test_skcipher,
2782 .suite = {
2783 .cipher = __VECS(cast6_ctr_tv_template)
2784 }
2785 }, {
2786 .alg = "ctr(des)",
2787 .test = alg_test_skcipher,
2788 .suite = {
2789 .cipher = __VECS(des_ctr_tv_template)
2790 }
2791 }, {
2792 .alg = "ctr(des3_ede)",
2793 .test = alg_test_skcipher,
2794 .fips_allowed = 1,
2795 .suite = {
2796 .cipher = __VECS(des3_ede_ctr_tv_template)
2797 }
2798 }, {
2799 /* Same as ctr(aes) except the key is stored in
2800 * hardware secure memory which we reference by index
2801 */
2802 .alg = "ctr(paes)",
2803 .test = alg_test_null,
2804 .fips_allowed = 1,
2805 }, {
2806 .alg = "ctr(serpent)",
2807 .test = alg_test_skcipher,
2808 .suite = {
2809 .cipher = __VECS(serpent_ctr_tv_template)
2810 }
2811 }, {
2812 .alg = "ctr(sm4)",
2813 .test = alg_test_skcipher,
2814 .suite = {
2815 .cipher = __VECS(sm4_ctr_tv_template)
2816 }
2817 }, {
2818 .alg = "ctr(twofish)",
2819 .test = alg_test_skcipher,
2820 .suite = {
2821 .cipher = __VECS(tf_ctr_tv_template)
2822 }
2823 }, {
2824 .alg = "cts(cbc(aes))",
2825 .test = alg_test_skcipher,
2826 .fips_allowed = 1,
2827 .suite = {
2828 .cipher = __VECS(cts_mode_tv_template)
2829 }
2830 }, {
2831 .alg = "deflate",
2832 .test = alg_test_comp,
2833 .fips_allowed = 1,
2834 .suite = {
2835 .comp = {
2836 .comp = __VECS(deflate_comp_tv_template),
2837 .decomp = __VECS(deflate_decomp_tv_template)
2838 }
2839 }
2840 }, {
2841 .alg = "dh",
2842 .test = alg_test_kpp,
2843 .fips_allowed = 1,
2844 .suite = {
2845 .kpp = __VECS(dh_tv_template)
2846 }
2847 }, {
2848 .alg = "digest_null",
2849 .test = alg_test_null,
2850 }, {
2851 .alg = "drbg_nopr_ctr_aes128",
2852 .test = alg_test_drbg,
2853 .fips_allowed = 1,
2854 .suite = {
2855 .drbg = __VECS(drbg_nopr_ctr_aes128_tv_template)
2856 }
2857 }, {
2858 .alg = "drbg_nopr_ctr_aes192",
2859 .test = alg_test_drbg,
2860 .fips_allowed = 1,
2861 .suite = {
2862 .drbg = __VECS(drbg_nopr_ctr_aes192_tv_template)
2863 }
2864 }, {
2865 .alg = "drbg_nopr_ctr_aes256",
2866 .test = alg_test_drbg,
2867 .fips_allowed = 1,
2868 .suite = {
2869 .drbg = __VECS(drbg_nopr_ctr_aes256_tv_template)
2870 }
2871 }, {
2872 /*
2873 * There is no need to specifically test the DRBG with every
2874 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2875 */
2876 .alg = "drbg_nopr_hmac_sha1",
2877 .fips_allowed = 1,
2878 .test = alg_test_null,
2879 }, {
2880 .alg = "drbg_nopr_hmac_sha256",
2881 .test = alg_test_drbg,
2882 .fips_allowed = 1,
2883 .suite = {
2884 .drbg = __VECS(drbg_nopr_hmac_sha256_tv_template)
2885 }
2886 }, {
2887 /* covered by drbg_nopr_hmac_sha256 test */
2888 .alg = "drbg_nopr_hmac_sha384",
2889 .fips_allowed = 1,
2890 .test = alg_test_null,
2891 }, {
2892 .alg = "drbg_nopr_hmac_sha512",
2893 .test = alg_test_null,
2894 .fips_allowed = 1,
2895 }, {
2896 .alg = "drbg_nopr_sha1",
2897 .fips_allowed = 1,
2898 .test = alg_test_null,
2899 }, {
2900 .alg = "drbg_nopr_sha256",
2901 .test = alg_test_drbg,
2902 .fips_allowed = 1,
2903 .suite = {
2904 .drbg = __VECS(drbg_nopr_sha256_tv_template)
2905 }
2906 }, {
2907 /* covered by drbg_nopr_sha256 test */
2908 .alg = "drbg_nopr_sha384",
2909 .fips_allowed = 1,
2910 .test = alg_test_null,
2911 }, {
2912 .alg = "drbg_nopr_sha512",
2913 .fips_allowed = 1,
2914 .test = alg_test_null,
2915 }, {
2916 .alg = "drbg_pr_ctr_aes128",
2917 .test = alg_test_drbg,
2918 .fips_allowed = 1,
2919 .suite = {
2920 .drbg = __VECS(drbg_pr_ctr_aes128_tv_template)
2921 }
2922 }, {
2923 /* covered by drbg_pr_ctr_aes128 test */
2924 .alg = "drbg_pr_ctr_aes192",
2925 .fips_allowed = 1,
2926 .test = alg_test_null,
2927 }, {
2928 .alg = "drbg_pr_ctr_aes256",
2929 .fips_allowed = 1,
2930 .test = alg_test_null,
2931 }, {
2932 .alg = "drbg_pr_hmac_sha1",
2933 .fips_allowed = 1,
2934 .test = alg_test_null,
2935 }, {
2936 .alg = "drbg_pr_hmac_sha256",
2937 .test = alg_test_drbg,
2938 .fips_allowed = 1,
2939 .suite = {
2940 .drbg = __VECS(drbg_pr_hmac_sha256_tv_template)
2941 }
2942 }, {
2943 /* covered by drbg_pr_hmac_sha256 test */
2944 .alg = "drbg_pr_hmac_sha384",
2945 .fips_allowed = 1,
2946 .test = alg_test_null,
2947 }, {
2948 .alg = "drbg_pr_hmac_sha512",
2949 .test = alg_test_null,
2950 .fips_allowed = 1,
2951 }, {
2952 .alg = "drbg_pr_sha1",
2953 .fips_allowed = 1,
2954 .test = alg_test_null,
2955 }, {
2956 .alg = "drbg_pr_sha256",
2957 .test = alg_test_drbg,
2958 .fips_allowed = 1,
2959 .suite = {
2960 .drbg = __VECS(drbg_pr_sha256_tv_template)
2961 }
2962 }, {
2963 /* covered by drbg_pr_sha256 test */
2964 .alg = "drbg_pr_sha384",
2965 .fips_allowed = 1,
2966 .test = alg_test_null,
2967 }, {
2968 .alg = "drbg_pr_sha512",
2969 .fips_allowed = 1,
2970 .test = alg_test_null,
2971 }, {
2972 .alg = "ecb(aes)",
2973 .test = alg_test_skcipher,
2974 .fips_allowed = 1,
2975 .suite = {
2976 .cipher = __VECS(aes_tv_template)
2977 }
2978 }, {
2979 .alg = "ecb(anubis)",
2980 .test = alg_test_skcipher,
2981 .suite = {
2982 .cipher = __VECS(anubis_tv_template)
2983 }
2984 }, {
2985 .alg = "ecb(arc4)",
2986 .test = alg_test_skcipher,
2987 .suite = {
2988 .cipher = __VECS(arc4_tv_template)
2989 }
2990 }, {
2991 .alg = "ecb(blowfish)",
2992 .test = alg_test_skcipher,
2993 .suite = {
2994 .cipher = __VECS(bf_tv_template)
2995 }
2996 }, {
2997 .alg = "ecb(camellia)",
2998 .test = alg_test_skcipher,
2999 .suite = {
3000 .cipher = __VECS(camellia_tv_template)
3001 }
3002 }, {
3003 .alg = "ecb(cast5)",
3004 .test = alg_test_skcipher,
3005 .suite = {
3006 .cipher = __VECS(cast5_tv_template)
3007 }
3008 }, {
3009 .alg = "ecb(cast6)",
3010 .test = alg_test_skcipher,
3011 .suite = {
3012 .cipher = __VECS(cast6_tv_template)
3013 }
3014 }, {
3015 .alg = "ecb(cipher_null)",
3016 .test = alg_test_null,
3017 .fips_allowed = 1,
3018 }, {
3019 .alg = "ecb(des)",
3020 .test = alg_test_skcipher,
3021 .suite = {
3022 .cipher = __VECS(des_tv_template)
3023 }
3024 }, {
3025 .alg = "ecb(des3_ede)",
3026 .test = alg_test_skcipher,
3027 .fips_allowed = 1,
3028 .suite = {
3029 .cipher = __VECS(des3_ede_tv_template)
3030 }
3031 }, {
3032 .alg = "ecb(fcrypt)",
3033 .test = alg_test_skcipher,
3034 .suite = {
3035 .cipher = {
3036 .vecs = fcrypt_pcbc_tv_template,
3037 .count = 1
3038 }
3039 }
3040 }, {
3041 .alg = "ecb(khazad)",
3042 .test = alg_test_skcipher,
3043 .suite = {
3044 .cipher = __VECS(khazad_tv_template)
3045 }
3046 }, {
3047 /* Same as ecb(aes) except the key is stored in
3048 * hardware secure memory which we reference by index
3049 */
3050 .alg = "ecb(paes)",
3051 .test = alg_test_null,
3052 .fips_allowed = 1,
3053 }, {
3054 .alg = "ecb(seed)",
3055 .test = alg_test_skcipher,
3056 .suite = {
3057 .cipher = __VECS(seed_tv_template)
3058 }
3059 }, {
3060 .alg = "ecb(serpent)",
3061 .test = alg_test_skcipher,
3062 .suite = {
3063 .cipher = __VECS(serpent_tv_template)
3064 }
3065 }, {
3066 .alg = "ecb(sm4)",
3067 .test = alg_test_skcipher,
3068 .suite = {
3069 .cipher = __VECS(sm4_tv_template)
3070 }
3071 }, {
3072 .alg = "ecb(tea)",
3073 .test = alg_test_skcipher,
3074 .suite = {
3075 .cipher = __VECS(tea_tv_template)
3076 }
3077 }, {
3078 .alg = "ecb(tnepres)",
3079 .test = alg_test_skcipher,
3080 .suite = {
3081 .cipher = __VECS(tnepres_tv_template)
3082 }
3083 }, {
3084 .alg = "ecb(twofish)",
3085 .test = alg_test_skcipher,
3086 .suite = {
3087 .cipher = __VECS(tf_tv_template)
3088 }
3089 }, {
3090 .alg = "ecb(xeta)",
3091 .test = alg_test_skcipher,
3092 .suite = {
3093 .cipher = __VECS(xeta_tv_template)
3094 }
3095 }, {
3096 .alg = "ecb(xtea)",
3097 .test = alg_test_skcipher,
3098 .suite = {
3099 .cipher = __VECS(xtea_tv_template)
3100 }
3101 }, {
3102 .alg = "ecdh",
3103 .test = alg_test_kpp,
3104 .fips_allowed = 1,
3105 .suite = {
3106 .kpp = __VECS(ecdh_tv_template)
3107 }
3108 }, {
3109 .alg = "gcm(aes)",
3110 .test = alg_test_aead,
3111 .fips_allowed = 1,
3112 .suite = {
3113 .aead = __VECS(aes_gcm_tv_template)
3114 }
3115 }, {
3116 .alg = "ghash",
3117 .test = alg_test_hash,
3118 .fips_allowed = 1,
3119 .suite = {
3120 .hash = __VECS(ghash_tv_template)
3121 }
3122 }, {
3123 .alg = "hmac(md5)",
3124 .test = alg_test_hash,
3125 .suite = {
3126 .hash = __VECS(hmac_md5_tv_template)
3127 }
3128 }, {
3129 .alg = "hmac(rmd128)",
3130 .test = alg_test_hash,
3131 .suite = {
3132 .hash = __VECS(hmac_rmd128_tv_template)
3133 }
3134 }, {
3135 .alg = "hmac(rmd160)",
3136 .test = alg_test_hash,
3137 .suite = {
3138 .hash = __VECS(hmac_rmd160_tv_template)
3139 }
3140 }, {
3141 .alg = "hmac(sha1)",
3142 .test = alg_test_hash,
3143 .fips_allowed = 1,
3144 .suite = {
3145 .hash = __VECS(hmac_sha1_tv_template)
3146 }
3147 }, {
3148 .alg = "hmac(sha224)",
3149 .test = alg_test_hash,
3150 .fips_allowed = 1,
3151 .suite = {
3152 .hash = __VECS(hmac_sha224_tv_template)
3153 }
3154 }, {
3155 .alg = "hmac(sha256)",
3156 .test = alg_test_hash,
3157 .fips_allowed = 1,
3158 .suite = {
3159 .hash = __VECS(hmac_sha256_tv_template)
3160 }
3161 }, {
3162 .alg = "hmac(sha3-224)",
3163 .test = alg_test_hash,
3164 .fips_allowed = 1,
3165 .suite = {
3166 .hash = __VECS(hmac_sha3_224_tv_template)
3167 }
3168 }, {
3169 .alg = "hmac(sha3-256)",
3170 .test = alg_test_hash,
3171 .fips_allowed = 1,
3172 .suite = {
3173 .hash = __VECS(hmac_sha3_256_tv_template)
3174 }
3175 }, {
3176 .alg = "hmac(sha3-384)",
3177 .test = alg_test_hash,
3178 .fips_allowed = 1,
3179 .suite = {
3180 .hash = __VECS(hmac_sha3_384_tv_template)
3181 }
3182 }, {
3183 .alg = "hmac(sha3-512)",
3184 .test = alg_test_hash,
3185 .fips_allowed = 1,
3186 .suite = {
3187 .hash = __VECS(hmac_sha3_512_tv_template)
3188 }
3189 }, {
3190 .alg = "hmac(sha384)",
3191 .test = alg_test_hash,
3192 .fips_allowed = 1,
3193 .suite = {
3194 .hash = __VECS(hmac_sha384_tv_template)
3195 }
3196 }, {
3197 .alg = "hmac(sha512)",
3198 .test = alg_test_hash,
3199 .fips_allowed = 1,
3200 .suite = {
3201 .hash = __VECS(hmac_sha512_tv_template)
3202 }
3203 }, {
3204 .alg = "hmac(streebog256)",
3205 .test = alg_test_hash,
3206 .suite = {
3207 .hash = __VECS(hmac_streebog256_tv_template)
3208 }
3209 }, {
3210 .alg = "hmac(streebog512)",
3211 .test = alg_test_hash,
3212 .suite = {
3213 .hash = __VECS(hmac_streebog512_tv_template)
3214 }
3215 }, {
3216 .alg = "jitterentropy_rng",
3217 .fips_allowed = 1,
3218 .test = alg_test_null,
3219 }, {
3220 .alg = "kw(aes)",
3221 .test = alg_test_skcipher,
3222 .fips_allowed = 1,
3223 .suite = {
3224 .cipher = __VECS(aes_kw_tv_template)
3225 }
3226 }, {
3227 .alg = "lrw(aes)",
3228 .test = alg_test_skcipher,
3229 .suite = {
3230 .cipher = __VECS(aes_lrw_tv_template)
3231 }
3232 }, {
3233 .alg = "lrw(camellia)",
3234 .test = alg_test_skcipher,
3235 .suite = {
3236 .cipher = __VECS(camellia_lrw_tv_template)
3237 }
3238 }, {
3239 .alg = "lrw(cast6)",
3240 .test = alg_test_skcipher,
3241 .suite = {
3242 .cipher = __VECS(cast6_lrw_tv_template)
3243 }
3244 }, {
3245 .alg = "lrw(serpent)",
3246 .test = alg_test_skcipher,
3247 .suite = {
3248 .cipher = __VECS(serpent_lrw_tv_template)
3249 }
3250 }, {
3251 .alg = "lrw(twofish)",
3252 .test = alg_test_skcipher,
3253 .suite = {
3254 .cipher = __VECS(tf_lrw_tv_template)
3255 }
3256 }, {
3257 .alg = "lz4",
3258 .test = alg_test_comp,
3259 .fips_allowed = 1,
3260 .suite = {
3261 .comp = {
3262 .comp = __VECS(lz4_comp_tv_template),
3263 .decomp = __VECS(lz4_decomp_tv_template)
3264 }
3265 }
3266 }, {
3267 .alg = "lz4hc",
3268 .test = alg_test_comp,
3269 .fips_allowed = 1,
3270 .suite = {
3271 .comp = {
3272 .comp = __VECS(lz4hc_comp_tv_template),
3273 .decomp = __VECS(lz4hc_decomp_tv_template)
3274 }
3275 }
3276 }, {
3277 .alg = "lzo",
3278 .test = alg_test_comp,
3279 .fips_allowed = 1,
3280 .suite = {
3281 .comp = {
3282 .comp = __VECS(lzo_comp_tv_template),
3283 .decomp = __VECS(lzo_decomp_tv_template)
3284 }
3285 }
3286 }, {
3287 .alg = "md4",
3288 .test = alg_test_hash,
3289 .suite = {
3290 .hash = __VECS(md4_tv_template)
3291 }
3292 }, {
3293 .alg = "md5",
3294 .test = alg_test_hash,
3295 .suite = {
3296 .hash = __VECS(md5_tv_template)
3297 }
3298 }, {
3299 .alg = "michael_mic",
3300 .test = alg_test_hash,
3301 .suite = {
3302 .hash = __VECS(michael_mic_tv_template)
3303 }
3304 }, {
3305 .alg = "morus1280",
3306 .test = alg_test_aead,
3307 .suite = {
3308 .aead = __VECS(morus1280_tv_template)
3309 }
3310 }, {
3311 .alg = "morus640",
3312 .test = alg_test_aead,
3313 .suite = {
3314 .aead = __VECS(morus640_tv_template)
3315 }
3316 }, {
3317 .alg = "nhpoly1305",
3318 .test = alg_test_hash,
3319 .suite = {
3320 .hash = __VECS(nhpoly1305_tv_template)
3321 }
3322 }, {
3323 .alg = "ofb(aes)",
3324 .test = alg_test_skcipher,
3325 .fips_allowed = 1,
3326 .suite = {
3327 .cipher = __VECS(aes_ofb_tv_template)
3328 }
3329 }, {
3330 /* Same as ofb(aes) except the key is stored in
3331 * hardware secure memory which we reference by index
3332 */
3333 .alg = "ofb(paes)",
3334 .test = alg_test_null,
3335 .fips_allowed = 1,
3336 }, {
3337 .alg = "pcbc(fcrypt)",
3338 .test = alg_test_skcipher,
3339 .suite = {
3340 .cipher = __VECS(fcrypt_pcbc_tv_template)
3341 }
3342 }, {
3343 .alg = "pkcs1pad(rsa,sha224)",
3344 .test = alg_test_null,
3345 .fips_allowed = 1,
3346 }, {
3347 .alg = "pkcs1pad(rsa,sha256)",
3348 .test = alg_test_akcipher,
3349 .fips_allowed = 1,
3350 .suite = {
3351 .akcipher = __VECS(pkcs1pad_rsa_tv_template)
3352 }
3353 }, {
3354 .alg = "pkcs1pad(rsa,sha384)",
3355 .test = alg_test_null,
3356 .fips_allowed = 1,
3357 }, {
3358 .alg = "pkcs1pad(rsa,sha512)",
3359 .test = alg_test_null,
3360 .fips_allowed = 1,
3361 }, {
3362 .alg = "poly1305",
3363 .test = alg_test_hash,
3364 .suite = {
3365 .hash = __VECS(poly1305_tv_template)
3366 }
3367 }, {
3368 .alg = "rfc3686(ctr(aes))",
3369 .test = alg_test_skcipher,
3370 .fips_allowed = 1,
3371 .suite = {
3372 .cipher = __VECS(aes_ctr_rfc3686_tv_template)
3373 }
3374 }, {
3375 .alg = "rfc4106(gcm(aes))",
3376 .test = alg_test_aead,
3377 .fips_allowed = 1,
3378 .suite = {
3379 .aead = __VECS(aes_gcm_rfc4106_tv_template)
3380 }
3381 }, {
3382 .alg = "rfc4309(ccm(aes))",
3383 .test = alg_test_aead,
3384 .fips_allowed = 1,
3385 .suite = {
3386 .aead = __VECS(aes_ccm_rfc4309_tv_template)
3387 }
3388 }, {
3389 .alg = "rfc4543(gcm(aes))",
3390 .test = alg_test_aead,
3391 .suite = {
3392 .aead = __VECS(aes_gcm_rfc4543_tv_template)
3393 }
3394 }, {
3395 .alg = "rfc7539(chacha20,poly1305)",
3396 .test = alg_test_aead,
3397 .suite = {
3398 .aead = __VECS(rfc7539_tv_template)
3399 }
3400 }, {
3401 .alg = "rfc7539esp(chacha20,poly1305)",
3402 .test = alg_test_aead,
3403 .suite = {
3404 .aead = __VECS(rfc7539esp_tv_template)
3405 }
3406 }, {
3407 .alg = "rmd128",
3408 .test = alg_test_hash,
3409 .suite = {
3410 .hash = __VECS(rmd128_tv_template)
3411 }
3412 }, {
3413 .alg = "rmd160",
3414 .test = alg_test_hash,
3415 .suite = {
3416 .hash = __VECS(rmd160_tv_template)
3417 }
3418 }, {
3419 .alg = "rmd256",
3420 .test = alg_test_hash,
3421 .suite = {
3422 .hash = __VECS(rmd256_tv_template)
3423 }
3424 }, {
3425 .alg = "rmd320",
3426 .test = alg_test_hash,
3427 .suite = {
3428 .hash = __VECS(rmd320_tv_template)
3429 }
3430 }, {
3431 .alg = "rsa",
3432 .test = alg_test_akcipher,
3433 .fips_allowed = 1,
3434 .suite = {
3435 .akcipher = __VECS(rsa_tv_template)
3436 }
3437 }, {
3438 .alg = "salsa20",
3439 .test = alg_test_skcipher,
3440 .suite = {
3441 .cipher = __VECS(salsa20_stream_tv_template)
3442 }
3443 }, {
3444 .alg = "sha1",
3445 .test = alg_test_hash,
3446 .fips_allowed = 1,
3447 .suite = {
3448 .hash = __VECS(sha1_tv_template)
3449 }
3450 }, {
3451 .alg = "sha224",
3452 .test = alg_test_hash,
3453 .fips_allowed = 1,
3454 .suite = {
3455 .hash = __VECS(sha224_tv_template)
3456 }
3457 }, {
3458 .alg = "sha256",
3459 .test = alg_test_hash,
3460 .fips_allowed = 1,
3461 .suite = {
3462 .hash = __VECS(sha256_tv_template)
3463 }
3464 }, {
3465 .alg = "sha3-224",
3466 .test = alg_test_hash,
3467 .fips_allowed = 1,
3468 .suite = {
3469 .hash = __VECS(sha3_224_tv_template)
3470 }
3471 }, {
3472 .alg = "sha3-256",
3473 .test = alg_test_hash,
3474 .fips_allowed = 1,
3475 .suite = {
3476 .hash = __VECS(sha3_256_tv_template)
3477 }
3478 }, {
3479 .alg = "sha3-384",
3480 .test = alg_test_hash,
3481 .fips_allowed = 1,
3482 .suite = {
3483 .hash = __VECS(sha3_384_tv_template)
3484 }
3485 }, {
3486 .alg = "sha3-512",
3487 .test = alg_test_hash,
3488 .fips_allowed = 1,
3489 .suite = {
3490 .hash = __VECS(sha3_512_tv_template)
3491 }
3492 }, {
3493 .alg = "sha384",
3494 .test = alg_test_hash,
3495 .fips_allowed = 1,
3496 .suite = {
3497 .hash = __VECS(sha384_tv_template)
3498 }
3499 }, {
3500 .alg = "sha512",
3501 .test = alg_test_hash,
3502 .fips_allowed = 1,
3503 .suite = {
3504 .hash = __VECS(sha512_tv_template)
3505 }
3506 }, {
3507 .alg = "sm3",
3508 .test = alg_test_hash,
3509 .suite = {
3510 .hash = __VECS(sm3_tv_template)
3511 }
3512 }, {
3513 .alg = "streebog256",
3514 .test = alg_test_hash,
3515 .suite = {
3516 .hash = __VECS(streebog256_tv_template)
3517 }
3518 }, {
3519 .alg = "streebog512",
3520 .test = alg_test_hash,
3521 .suite = {
3522 .hash = __VECS(streebog512_tv_template)
3523 }
3524 }, {
3525 .alg = "tgr128",
3526 .test = alg_test_hash,
3527 .suite = {
3528 .hash = __VECS(tgr128_tv_template)
3529 }
3530 }, {
3531 .alg = "tgr160",
3532 .test = alg_test_hash,
3533 .suite = {
3534 .hash = __VECS(tgr160_tv_template)
3535 }
3536 }, {
3537 .alg = "tgr192",
3538 .test = alg_test_hash,
3539 .suite = {
3540 .hash = __VECS(tgr192_tv_template)
3541 }
3542 }, {
3543 .alg = "vmac64(aes)",
3544 .test = alg_test_hash,
3545 .suite = {
3546 .hash = __VECS(vmac64_aes_tv_template)
3547 }
3548 }, {
3549 .alg = "wp256",
3550 .test = alg_test_hash,
3551 .suite = {
3552 .hash = __VECS(wp256_tv_template)
3553 }
3554 }, {
3555 .alg = "wp384",
3556 .test = alg_test_hash,
3557 .suite = {
3558 .hash = __VECS(wp384_tv_template)
3559 }
3560 }, {
3561 .alg = "wp512",
3562 .test = alg_test_hash,
3563 .suite = {
3564 .hash = __VECS(wp512_tv_template)
3565 }
3566 }, {
3567 .alg = "xcbc(aes)",
3568 .test = alg_test_hash,
3569 .suite = {
3570 .hash = __VECS(aes_xcbc128_tv_template)
3571 }
3572 }, {
3573 .alg = "xchacha12",
3574 .test = alg_test_skcipher,
3575 .suite = {
3576 .cipher = __VECS(xchacha12_tv_template)
3577 },
3578 }, {
3579 .alg = "xchacha20",
3580 .test = alg_test_skcipher,
3581 .suite = {
3582 .cipher = __VECS(xchacha20_tv_template)
3583 },
3584 }, {
3585 .alg = "xts(aes)",
3586 .test = alg_test_skcipher,
3587 .fips_allowed = 1,
3588 .suite = {
3589 .cipher = __VECS(aes_xts_tv_template)
3590 }
3591 }, {
3592 .alg = "xts(camellia)",
3593 .test = alg_test_skcipher,
3594 .suite = {
3595 .cipher = __VECS(camellia_xts_tv_template)
3596 }
3597 }, {
3598 .alg = "xts(cast6)",
3599 .test = alg_test_skcipher,
3600 .suite = {
3601 .cipher = __VECS(cast6_xts_tv_template)
3602 }
3603 }, {
3604 /* Same as xts(aes) except the key is stored in
3605 * hardware secure memory which we reference by index
3606 */
3607 .alg = "xts(paes)",
3608 .test = alg_test_null,
3609 .fips_allowed = 1,
3610 }, {
3611 .alg = "xts(serpent)",
3612 .test = alg_test_skcipher,
3613 .suite = {
3614 .cipher = __VECS(serpent_xts_tv_template)
3615 }
3616 }, {
3617 .alg = "xts(twofish)",
3618 .test = alg_test_skcipher,
3619 .suite = {
3620 .cipher = __VECS(tf_xts_tv_template)
3621 }
3622 }, {
3623 .alg = "xts4096(paes)",
3624 .test = alg_test_null,
3625 .fips_allowed = 1,
3626 }, {
3627 .alg = "xts512(paes)",
3628 .test = alg_test_null,
3629 .fips_allowed = 1,
3630 }, {
3631 .alg = "zlib-deflate",
3632 .test = alg_test_comp,
3633 .fips_allowed = 1,
3634 .suite = {
3635 .comp = {
3636 .comp = __VECS(zlib_deflate_comp_tv_template),
3637 .decomp = __VECS(zlib_deflate_decomp_tv_template)
3638 }
3639 }
3640 }, {
3641 .alg = "zstd",
3642 .test = alg_test_comp,
3643 .fips_allowed = 1,
3644 .suite = {
3645 .comp = {
3646 .comp = __VECS(zstd_comp_tv_template),
3647 .decomp = __VECS(zstd_decomp_tv_template)
3648 }
3649 }
3650 }
3651 };
3652
3653 static bool alg_test_descs_checked;
3654
3655 static void alg_test_descs_check_order(void)
3656 {
3657 int i;
3658
3659 /* only check once */
3660 if (alg_test_descs_checked)
3661 return;
3662
3663 alg_test_descs_checked = true;
3664
3665 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3666 int diff = strcmp(alg_test_descs[i - 1].alg,
3667 alg_test_descs[i].alg);
3668
3669 if (WARN_ON(diff > 0)) {
3670 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3671 alg_test_descs[i - 1].alg,
3672 alg_test_descs[i].alg);
3673 }
3674
3675 if (WARN_ON(diff == 0)) {
3676 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3677 alg_test_descs[i].alg);
3678 }
3679 }
3680 }
3681
3682 static int alg_find_test(const char *alg)
3683 {
3684 int start = 0;
3685 int end = ARRAY_SIZE(alg_test_descs);
3686
3687 while (start < end) {
3688 int i = (start + end) / 2;
3689 int diff = strcmp(alg_test_descs[i].alg, alg);
3690
3691 if (diff > 0) {
3692 end = i;
3693 continue;
3694 }
3695
3696 if (diff < 0) {
3697 start = i + 1;
3698 continue;
3699 }
3700
3701 return i;
3702 }
3703
3704 return -1;
3705 }
3706
3707 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3708 {
3709 int i;
3710 int j;
3711 int rc;
3712
3713 if (!fips_enabled && notests) {
3714 printk_once(KERN_INFO "alg: self-tests disabled\n");
3715 return 0;
3716 }
3717
3718 alg_test_descs_check_order();
3719
3720 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3721 char nalg[CRYPTO_MAX_ALG_NAME];
3722
3723 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3724 sizeof(nalg))
3725 return -ENAMETOOLONG;
3726
3727 i = alg_find_test(nalg);
3728 if (i < 0)
3729 goto notest;
3730
3731 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3732 goto non_fips_alg;
3733
3734 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3735 goto test_done;
3736 }
3737
3738 i = alg_find_test(alg);
3739 j = alg_find_test(driver);
3740 if (i < 0 && j < 0)
3741 goto notest;
3742
3743 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3744 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3745 goto non_fips_alg;
3746
3747 rc = 0;
3748 if (i >= 0)
3749 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3750 type, mask);
3751 if (j >= 0 && j != i)
3752 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3753 type, mask);
3754
3755 test_done:
3756 if (fips_enabled && rc)
3757 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3758
3759 if (fips_enabled && !rc)
3760 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3761
3762 return rc;
3763
3764 notest:
3765 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3766 return 0;
3767 non_fips_alg:
3768 return -EINVAL;
3769 }
3770
3771 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3772
3773 EXPORT_SYMBOL_GPL(alg_test);