2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <crypto/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/fips.h>
28 #include <linux/module.h>
29 #include <linux/scatterlist.h>
30 #include <linux/slab.h>
31 #include <linux/string.h>
32 #include <crypto/rng.h>
33 #include <crypto/drbg.h>
34 #include <crypto/akcipher.h>
35 #include <crypto/kpp.h>
36 #include <crypto/acompress.h>
41 module_param(notests
, bool, 0644);
42 MODULE_PARM_DESC(notests
, "disable crypto self-tests");
44 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
47 int alg_test(const char *driver
, const char *alg
, u32 type
, u32 mask
)
57 * Need slab memory for testing (size in number of pages).
62 * Indexes into the xbuf to simulate cross-page access.
74 * Used by test_cipher()
79 struct aead_test_suite
{
80 const struct aead_testvec
*vecs
;
84 struct cipher_test_suite
{
85 const struct cipher_testvec
*vecs
;
89 struct comp_test_suite
{
91 const struct comp_testvec
*vecs
;
96 struct hash_test_suite
{
97 const struct hash_testvec
*vecs
;
101 struct cprng_test_suite
{
102 const struct cprng_testvec
*vecs
;
106 struct drbg_test_suite
{
107 const struct drbg_testvec
*vecs
;
111 struct akcipher_test_suite
{
112 const struct akcipher_testvec
*vecs
;
116 struct kpp_test_suite
{
117 const struct kpp_testvec
*vecs
;
121 struct alg_test_desc
{
123 int (*test
)(const struct alg_test_desc
*desc
, const char *driver
,
125 int fips_allowed
; /* set if alg is allowed in fips mode */
128 struct aead_test_suite aead
;
129 struct cipher_test_suite cipher
;
130 struct comp_test_suite comp
;
131 struct hash_test_suite hash
;
132 struct cprng_test_suite cprng
;
133 struct drbg_test_suite drbg
;
134 struct akcipher_test_suite akcipher
;
135 struct kpp_test_suite kpp
;
139 static const unsigned int IDX
[8] = {
140 IDX1
, IDX2
, IDX3
, IDX4
, IDX5
, IDX6
, IDX7
, IDX8
};
142 static void hexdump(unsigned char *buf
, unsigned int len
)
144 print_hex_dump(KERN_CONT
, "", DUMP_PREFIX_OFFSET
,
149 static int testmgr_alloc_buf(char *buf
[XBUFSIZE
])
153 for (i
= 0; i
< XBUFSIZE
; i
++) {
154 buf
[i
] = (void *)__get_free_page(GFP_KERNEL
);
163 free_page((unsigned long)buf
[i
]);
168 static void testmgr_free_buf(char *buf
[XBUFSIZE
])
172 for (i
= 0; i
< XBUFSIZE
; i
++)
173 free_page((unsigned long)buf
[i
]);
176 static int ahash_guard_result(char *result
, char c
, int size
)
180 for (i
= 0; i
< size
; i
++) {
188 static int ahash_partial_update(struct ahash_request
**preq
,
189 struct crypto_ahash
*tfm
, const struct hash_testvec
*template,
190 void *hash_buff
, int k
, int temp
, struct scatterlist
*sg
,
191 const char *algo
, char *result
, struct crypto_wait
*wait
)
194 struct ahash_request
*req
;
195 int statesize
, ret
= -EINVAL
;
196 static const unsigned char guard
[] = { 0x00, 0xba, 0xad, 0x00 };
197 int digestsize
= crypto_ahash_digestsize(tfm
);
200 statesize
= crypto_ahash_statesize(
201 crypto_ahash_reqtfm(req
));
202 state
= kmalloc(statesize
+ sizeof(guard
), GFP_KERNEL
);
204 pr_err("alg: hash: Failed to alloc state for %s\n", algo
);
207 memcpy(state
+ statesize
, guard
, sizeof(guard
));
208 memset(result
, 1, digestsize
);
209 ret
= crypto_ahash_export(req
, state
);
210 WARN_ON(memcmp(state
+ statesize
, guard
, sizeof(guard
)));
212 pr_err("alg: hash: Failed to export() for %s\n", algo
);
215 ret
= ahash_guard_result(result
, 1, digestsize
);
217 pr_err("alg: hash: Failed, export used req->result for %s\n",
221 ahash_request_free(req
);
222 req
= ahash_request_alloc(tfm
, GFP_KERNEL
);
224 pr_err("alg: hash: Failed to alloc request for %s\n", algo
);
227 ahash_request_set_callback(req
,
228 CRYPTO_TFM_REQ_MAY_BACKLOG
,
229 crypto_req_done
, wait
);
231 memcpy(hash_buff
, template->plaintext
+ temp
,
233 sg_init_one(&sg
[0], hash_buff
, template->tap
[k
]);
234 ahash_request_set_crypt(req
, sg
, result
, template->tap
[k
]);
235 ret
= crypto_ahash_import(req
, state
);
237 pr_err("alg: hash: Failed to import() for %s\n", algo
);
240 ret
= ahash_guard_result(result
, 1, digestsize
);
242 pr_err("alg: hash: Failed, import used req->result for %s\n",
246 ret
= crypto_wait_req(crypto_ahash_update(req
), wait
);
253 ahash_request_free(req
);
266 static int __test_hash(struct crypto_ahash
*tfm
,
267 const struct hash_testvec
*template, unsigned int tcount
,
268 enum hash_test test_type
, const int align_offset
)
270 const char *algo
= crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm
));
271 size_t digest_size
= crypto_ahash_digestsize(tfm
);
272 unsigned int i
, j
, k
, temp
;
273 struct scatterlist sg
[8];
276 struct ahash_request
*req
;
277 struct crypto_wait wait
;
279 char *xbuf
[XBUFSIZE
];
282 result
= kmalloc(digest_size
, GFP_KERNEL
);
285 key
= kmalloc(MAX_KEYLEN
, GFP_KERNEL
);
288 if (testmgr_alloc_buf(xbuf
))
291 crypto_init_wait(&wait
);
293 req
= ahash_request_alloc(tfm
, GFP_KERNEL
);
295 printk(KERN_ERR
"alg: hash: Failed to allocate request for "
299 ahash_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
300 crypto_req_done
, &wait
);
303 for (i
= 0; i
< tcount
; i
++) {
308 if (WARN_ON(align_offset
+ template[i
].psize
> PAGE_SIZE
))
312 memset(result
, 0, digest_size
);
315 hash_buff
+= align_offset
;
317 memcpy(hash_buff
, template[i
].plaintext
, template[i
].psize
);
318 sg_init_one(&sg
[0], hash_buff
, template[i
].psize
);
320 if (template[i
].ksize
) {
321 crypto_ahash_clear_flags(tfm
, ~0);
322 if (template[i
].ksize
> MAX_KEYLEN
) {
323 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
324 j
, algo
, template[i
].ksize
, MAX_KEYLEN
);
328 memcpy(key
, template[i
].key
, template[i
].ksize
);
329 ret
= crypto_ahash_setkey(tfm
, key
, template[i
].ksize
);
331 printk(KERN_ERR
"alg: hash: setkey failed on "
332 "test %d for %s: ret=%d\n", j
, algo
,
338 ahash_request_set_crypt(req
, sg
, result
, template[i
].psize
);
340 case HASH_TEST_DIGEST
:
341 ret
= crypto_wait_req(crypto_ahash_digest(req
), &wait
);
343 pr_err("alg: hash: digest failed on test %d "
344 "for %s: ret=%d\n", j
, algo
, -ret
);
349 case HASH_TEST_FINAL
:
350 memset(result
, 1, digest_size
);
351 ret
= crypto_wait_req(crypto_ahash_init(req
), &wait
);
353 pr_err("alg: hash: init failed on test %d "
354 "for %s: ret=%d\n", j
, algo
, -ret
);
357 ret
= ahash_guard_result(result
, 1, digest_size
);
359 pr_err("alg: hash: init failed on test %d "
360 "for %s: used req->result\n", j
, algo
);
363 ret
= crypto_wait_req(crypto_ahash_update(req
), &wait
);
365 pr_err("alg: hash: update failed on test %d "
366 "for %s: ret=%d\n", j
, algo
, -ret
);
369 ret
= ahash_guard_result(result
, 1, digest_size
);
371 pr_err("alg: hash: update failed on test %d "
372 "for %s: used req->result\n", j
, algo
);
375 ret
= crypto_wait_req(crypto_ahash_final(req
), &wait
);
377 pr_err("alg: hash: final failed on test %d "
378 "for %s: ret=%d\n", j
, algo
, -ret
);
383 case HASH_TEST_FINUP
:
384 memset(result
, 1, digest_size
);
385 ret
= crypto_wait_req(crypto_ahash_init(req
), &wait
);
387 pr_err("alg: hash: init failed on test %d "
388 "for %s: ret=%d\n", j
, algo
, -ret
);
391 ret
= ahash_guard_result(result
, 1, digest_size
);
393 pr_err("alg: hash: init failed on test %d "
394 "for %s: used req->result\n", j
, algo
);
397 ret
= crypto_wait_req(crypto_ahash_finup(req
), &wait
);
399 pr_err("alg: hash: final failed on test %d "
400 "for %s: ret=%d\n", j
, algo
, -ret
);
406 if (memcmp(result
, template[i
].digest
,
407 crypto_ahash_digestsize(tfm
))) {
408 printk(KERN_ERR
"alg: hash: Test %d failed for %s\n",
410 hexdump(result
, crypto_ahash_digestsize(tfm
));
420 for (i
= 0; i
< tcount
; i
++) {
421 /* alignment tests are only done with continuous buffers */
422 if (align_offset
!= 0)
429 memset(result
, 0, digest_size
);
432 sg_init_table(sg
, template[i
].np
);
434 for (k
= 0; k
< template[i
].np
; k
++) {
435 if (WARN_ON(offset_in_page(IDX
[k
]) +
436 template[i
].tap
[k
] > PAGE_SIZE
))
439 memcpy(xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
440 offset_in_page(IDX
[k
]),
441 template[i
].plaintext
+ temp
,
444 temp
+= template[i
].tap
[k
];
447 if (template[i
].ksize
) {
448 if (template[i
].ksize
> MAX_KEYLEN
) {
449 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
450 j
, algo
, template[i
].ksize
, MAX_KEYLEN
);
454 crypto_ahash_clear_flags(tfm
, ~0);
455 memcpy(key
, template[i
].key
, template[i
].ksize
);
456 ret
= crypto_ahash_setkey(tfm
, key
, template[i
].ksize
);
459 printk(KERN_ERR
"alg: hash: setkey "
460 "failed on chunking test %d "
461 "for %s: ret=%d\n", j
, algo
, -ret
);
466 ahash_request_set_crypt(req
, sg
, result
, template[i
].psize
);
467 ret
= crypto_wait_req(crypto_ahash_digest(req
), &wait
);
469 pr_err("alg: hash: digest failed on chunking test %d for %s: ret=%d\n",
474 if (memcmp(result
, template[i
].digest
,
475 crypto_ahash_digestsize(tfm
))) {
476 printk(KERN_ERR
"alg: hash: Chunking test %d "
477 "failed for %s\n", j
, algo
);
478 hexdump(result
, crypto_ahash_digestsize(tfm
));
484 /* partial update exercise */
486 for (i
= 0; i
< tcount
; i
++) {
487 /* alignment tests are only done with continuous buffers */
488 if (align_offset
!= 0)
491 if (template[i
].np
< 2)
495 memset(result
, 0, digest_size
);
499 memcpy(hash_buff
, template[i
].plaintext
,
501 sg_init_one(&sg
[0], hash_buff
, template[i
].tap
[0]);
503 if (template[i
].ksize
) {
504 crypto_ahash_clear_flags(tfm
, ~0);
505 if (template[i
].ksize
> MAX_KEYLEN
) {
506 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
507 j
, algo
, template[i
].ksize
, MAX_KEYLEN
);
511 memcpy(key
, template[i
].key
, template[i
].ksize
);
512 ret
= crypto_ahash_setkey(tfm
, key
, template[i
].ksize
);
514 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
520 ahash_request_set_crypt(req
, sg
, result
, template[i
].tap
[0]);
521 ret
= crypto_wait_req(crypto_ahash_init(req
), &wait
);
523 pr_err("alg: hash: init failed on test %d for %s: ret=%d\n",
527 ret
= crypto_wait_req(crypto_ahash_update(req
), &wait
);
529 pr_err("alg: hash: update failed on test %d for %s: ret=%d\n",
534 temp
= template[i
].tap
[0];
535 for (k
= 1; k
< template[i
].np
; k
++) {
536 ret
= ahash_partial_update(&req
, tfm
, &template[i
],
537 hash_buff
, k
, temp
, &sg
[0], algo
, result
,
540 pr_err("alg: hash: partial update failed on test %d for %s: ret=%d\n",
544 temp
+= template[i
].tap
[k
];
546 ret
= crypto_wait_req(crypto_ahash_final(req
), &wait
);
548 pr_err("alg: hash: final failed on test %d for %s: ret=%d\n",
552 if (memcmp(result
, template[i
].digest
,
553 crypto_ahash_digestsize(tfm
))) {
554 pr_err("alg: hash: Partial Test %d failed for %s\n",
556 hexdump(result
, crypto_ahash_digestsize(tfm
));
565 ahash_request_free(req
);
567 testmgr_free_buf(xbuf
);
574 static int test_hash(struct crypto_ahash
*tfm
,
575 const struct hash_testvec
*template,
576 unsigned int tcount
, enum hash_test test_type
)
578 unsigned int alignmask
;
581 ret
= __test_hash(tfm
, template, tcount
, test_type
, 0);
585 /* test unaligned buffers, check with one byte offset */
586 ret
= __test_hash(tfm
, template, tcount
, test_type
, 1);
590 alignmask
= crypto_tfm_alg_alignmask(&tfm
->base
);
592 /* Check if alignment mask for tfm is correctly set. */
593 ret
= __test_hash(tfm
, template, tcount
, test_type
,
602 static int __test_aead(struct crypto_aead
*tfm
, int enc
,
603 const struct aead_testvec
*template, unsigned int tcount
,
604 const bool diff_dst
, const int align_offset
)
606 const char *algo
= crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm
));
607 unsigned int i
, j
, k
, n
, temp
;
611 struct aead_request
*req
;
612 struct scatterlist
*sg
;
613 struct scatterlist
*sgout
;
615 struct crypto_wait wait
;
616 unsigned int authsize
, iv_len
;
618 char *xbuf
[XBUFSIZE
];
619 char *xoutbuf
[XBUFSIZE
];
620 char *axbuf
[XBUFSIZE
];
622 iv
= kzalloc(MAX_IVLEN
, GFP_KERNEL
);
625 key
= kmalloc(MAX_KEYLEN
, GFP_KERNEL
);
628 if (testmgr_alloc_buf(xbuf
))
630 if (testmgr_alloc_buf(axbuf
))
632 if (diff_dst
&& testmgr_alloc_buf(xoutbuf
))
635 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
636 sg
= kmalloc(array3_size(sizeof(*sg
), 8, (diff_dst
? 4 : 2)),
652 crypto_init_wait(&wait
);
654 req
= aead_request_alloc(tfm
, GFP_KERNEL
);
656 pr_err("alg: aead%s: Failed to allocate request for %s\n",
661 aead_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
662 crypto_req_done
, &wait
);
664 iv_len
= crypto_aead_ivsize(tfm
);
666 for (i
= 0, j
= 0; i
< tcount
; i
++) {
667 const char *input
, *expected_output
;
668 unsigned int inlen
, outlen
;
669 char *inbuf
, *outbuf
, *assocbuf
;
674 if (template[i
].novrfy
)
676 input
= template[i
].ptext
;
677 inlen
= template[i
].plen
;
678 expected_output
= template[i
].ctext
;
679 outlen
= template[i
].clen
;
681 input
= template[i
].ctext
;
682 inlen
= template[i
].clen
;
683 expected_output
= template[i
].ptext
;
684 outlen
= template[i
].plen
;
689 /* some templates have no input data but they will
692 inbuf
= xbuf
[0] + align_offset
;
696 if (WARN_ON(align_offset
+ template[i
].clen
> PAGE_SIZE
||
697 template[i
].alen
> PAGE_SIZE
))
700 memcpy(inbuf
, input
, inlen
);
701 memcpy(assocbuf
, template[i
].assoc
, template[i
].alen
);
703 memcpy(iv
, template[i
].iv
, iv_len
);
705 memset(iv
, 0, iv_len
);
707 crypto_aead_clear_flags(tfm
, ~0);
709 crypto_aead_set_flags(tfm
,
710 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS
);
712 if (template[i
].klen
> MAX_KEYLEN
) {
713 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
714 d
, j
, algo
, template[i
].klen
,
719 memcpy(key
, template[i
].key
, template[i
].klen
);
721 ret
= crypto_aead_setkey(tfm
, key
, template[i
].klen
);
722 if (template[i
].fail
== !ret
) {
723 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
724 d
, j
, algo
, crypto_aead_get_flags(tfm
));
729 authsize
= template[i
].clen
- template[i
].plen
;
730 ret
= crypto_aead_setauthsize(tfm
, authsize
);
732 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
733 d
, authsize
, j
, algo
);
737 k
= !!template[i
].alen
;
738 sg_init_table(sg
, k
+ 1);
739 sg_set_buf(&sg
[0], assocbuf
, template[i
].alen
);
740 sg_set_buf(&sg
[k
], inbuf
, template[i
].clen
);
744 sg_init_table(sgout
, k
+ 1);
745 sg_set_buf(&sgout
[0], assocbuf
, template[i
].alen
);
747 outbuf
= xoutbuf
[0] + align_offset
;
748 sg_set_buf(&sgout
[k
], outbuf
, template[i
].clen
);
751 aead_request_set_crypt(req
, sg
, (diff_dst
) ? sgout
: sg
, inlen
,
754 aead_request_set_ad(req
, template[i
].alen
);
756 ret
= crypto_wait_req(enc
? crypto_aead_encrypt(req
)
757 : crypto_aead_decrypt(req
), &wait
);
761 if (template[i
].novrfy
) {
762 /* verification was supposed to fail */
763 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
765 /* so really, we got a bad message */
771 if (template[i
].novrfy
)
772 /* verification failure was expected */
776 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
777 d
, e
, j
, algo
, -ret
);
781 if (memcmp(outbuf
, expected_output
, outlen
)) {
782 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
784 hexdump(outbuf
, outlen
);
790 for (i
= 0, j
= 0; i
< tcount
; i
++) {
791 const char *input
, *expected_output
;
792 unsigned int inlen
, outlen
;
794 /* alignment tests are only done with continuous buffers */
795 if (align_offset
!= 0)
802 if (template[i
].novrfy
)
804 input
= template[i
].ptext
;
805 inlen
= template[i
].plen
;
806 expected_output
= template[i
].ctext
;
807 outlen
= template[i
].clen
;
809 input
= template[i
].ctext
;
810 inlen
= template[i
].clen
;
811 expected_output
= template[i
].ptext
;
812 outlen
= template[i
].plen
;
818 memcpy(iv
, template[i
].iv
, iv_len
);
820 memset(iv
, 0, MAX_IVLEN
);
822 crypto_aead_clear_flags(tfm
, ~0);
824 crypto_aead_set_flags(tfm
,
825 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS
);
826 if (template[i
].klen
> MAX_KEYLEN
) {
827 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
828 d
, j
, algo
, template[i
].klen
, MAX_KEYLEN
);
832 memcpy(key
, template[i
].key
, template[i
].klen
);
834 ret
= crypto_aead_setkey(tfm
, key
, template[i
].klen
);
835 if (template[i
].fail
== !ret
) {
836 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
837 d
, j
, algo
, crypto_aead_get_flags(tfm
));
842 authsize
= template[i
].clen
- template[i
].plen
;
845 sg_init_table(sg
, template[i
].anp
+ template[i
].np
);
847 sg_init_table(sgout
, template[i
].anp
+ template[i
].np
);
850 for (k
= 0, temp
= 0; k
< template[i
].anp
; k
++) {
851 if (WARN_ON(offset_in_page(IDX
[k
]) +
852 template[i
].atap
[k
] > PAGE_SIZE
))
855 memcpy(axbuf
[IDX
[k
] >> PAGE_SHIFT
] +
856 offset_in_page(IDX
[k
]),
857 template[i
].assoc
+ temp
,
858 template[i
].atap
[k
]),
859 template[i
].atap
[k
]);
861 sg_set_buf(&sgout
[k
],
862 axbuf
[IDX
[k
] >> PAGE_SHIFT
] +
863 offset_in_page(IDX
[k
]),
864 template[i
].atap
[k
]);
865 temp
+= template[i
].atap
[k
];
868 for (k
= 0, temp
= 0; k
< template[i
].np
; k
++) {
869 n
= template[i
].tap
[k
];
870 if (k
== template[i
].np
- 1 && !enc
)
873 if (WARN_ON(offset_in_page(IDX
[k
]) + n
> PAGE_SIZE
))
876 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] + offset_in_page(IDX
[k
]);
877 memcpy(q
, input
+ temp
, n
);
878 sg_set_buf(&sg
[template[i
].anp
+ k
], q
, n
);
881 q
= xoutbuf
[IDX
[k
] >> PAGE_SHIFT
] +
882 offset_in_page(IDX
[k
]);
886 sg_set_buf(&sgout
[template[i
].anp
+ k
], q
, n
);
889 if (k
== template[i
].np
- 1 && enc
)
891 if (offset_in_page(q
) + n
< PAGE_SIZE
)
897 ret
= crypto_aead_setauthsize(tfm
, authsize
);
899 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
900 d
, authsize
, j
, algo
);
905 if (WARN_ON(sg
[template[i
].anp
+ k
- 1].offset
+
906 sg
[template[i
].anp
+ k
- 1].length
+
907 authsize
> PAGE_SIZE
)) {
913 sgout
[template[i
].anp
+ k
- 1].length
+=
915 sg
[template[i
].anp
+ k
- 1].length
+= authsize
;
918 aead_request_set_crypt(req
, sg
, (diff_dst
) ? sgout
: sg
,
921 aead_request_set_ad(req
, template[i
].alen
);
923 ret
= crypto_wait_req(enc
? crypto_aead_encrypt(req
)
924 : crypto_aead_decrypt(req
), &wait
);
928 if (template[i
].novrfy
) {
929 /* verification was supposed to fail */
930 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
932 /* so really, we got a bad message */
938 if (template[i
].novrfy
)
939 /* verification failure was expected */
943 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
944 d
, e
, j
, algo
, -ret
);
949 for (k
= 0, temp
= 0; k
< template[i
].np
; k
++) {
951 q
= xoutbuf
[IDX
[k
] >> PAGE_SHIFT
] +
952 offset_in_page(IDX
[k
]);
954 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
955 offset_in_page(IDX
[k
]);
957 n
= template[i
].tap
[k
];
958 if (k
== template[i
].np
- 1 && enc
)
961 if (memcmp(q
, expected_output
+ temp
, n
)) {
962 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
969 if (k
== template[i
].np
- 1 && !enc
) {
970 if (!diff_dst
&& memcmp(q
, input
+ temp
+ n
,
976 for (n
= 0; offset_in_page(q
+ n
) && q
[n
]; n
++)
980 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
981 d
, j
, e
, k
, algo
, n
);
986 temp
+= template[i
].tap
[k
];
993 aead_request_free(req
);
997 testmgr_free_buf(xoutbuf
);
999 testmgr_free_buf(axbuf
);
1001 testmgr_free_buf(xbuf
);
1008 static int test_aead(struct crypto_aead
*tfm
, int enc
,
1009 const struct aead_testvec
*template, unsigned int tcount
)
1011 unsigned int alignmask
;
1014 /* test 'dst == src' case */
1015 ret
= __test_aead(tfm
, enc
, template, tcount
, false, 0);
1019 /* test 'dst != src' case */
1020 ret
= __test_aead(tfm
, enc
, template, tcount
, true, 0);
1024 /* test unaligned buffers, check with one byte offset */
1025 ret
= __test_aead(tfm
, enc
, template, tcount
, true, 1);
1029 alignmask
= crypto_tfm_alg_alignmask(&tfm
->base
);
1031 /* Check if alignment mask for tfm is correctly set. */
1032 ret
= __test_aead(tfm
, enc
, template, tcount
, true,
1041 static int test_cipher(struct crypto_cipher
*tfm
, int enc
,
1042 const struct cipher_testvec
*template,
1043 unsigned int tcount
)
1045 const char *algo
= crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm
));
1046 unsigned int i
, j
, k
;
1049 const char *input
, *result
;
1051 char *xbuf
[XBUFSIZE
];
1054 if (testmgr_alloc_buf(xbuf
))
1063 for (i
= 0; i
< tcount
; i
++) {
1067 if (fips_enabled
&& template[i
].fips_skip
)
1070 input
= enc
? template[i
].ptext
: template[i
].ctext
;
1071 result
= enc
? template[i
].ctext
: template[i
].ptext
;
1075 if (WARN_ON(template[i
].len
> PAGE_SIZE
))
1079 memcpy(data
, input
, template[i
].len
);
1081 crypto_cipher_clear_flags(tfm
, ~0);
1083 crypto_cipher_set_flags(tfm
, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS
);
1085 ret
= crypto_cipher_setkey(tfm
, template[i
].key
,
1087 if (template[i
].fail
== !ret
) {
1088 printk(KERN_ERR
"alg: cipher: setkey failed "
1089 "on test %d for %s: flags=%x\n", j
,
1090 algo
, crypto_cipher_get_flags(tfm
));
1095 for (k
= 0; k
< template[i
].len
;
1096 k
+= crypto_cipher_blocksize(tfm
)) {
1098 crypto_cipher_encrypt_one(tfm
, data
+ k
,
1101 crypto_cipher_decrypt_one(tfm
, data
+ k
,
1106 if (memcmp(q
, result
, template[i
].len
)) {
1107 printk(KERN_ERR
"alg: cipher: Test %d failed "
1108 "on %s for %s\n", j
, e
, algo
);
1109 hexdump(q
, template[i
].len
);
1118 testmgr_free_buf(xbuf
);
1123 static int __test_skcipher(struct crypto_skcipher
*tfm
, int enc
,
1124 const struct cipher_testvec
*template,
1125 unsigned int tcount
,
1126 const bool diff_dst
, const int align_offset
)
1129 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm
));
1130 unsigned int i
, j
, k
, n
, temp
;
1132 struct skcipher_request
*req
;
1133 struct scatterlist sg
[8];
1134 struct scatterlist sgout
[8];
1136 struct crypto_wait wait
;
1137 const char *input
, *result
;
1140 char *xbuf
[XBUFSIZE
];
1141 char *xoutbuf
[XBUFSIZE
];
1143 unsigned int ivsize
= crypto_skcipher_ivsize(tfm
);
1145 if (testmgr_alloc_buf(xbuf
))
1148 if (diff_dst
&& testmgr_alloc_buf(xoutbuf
))
1161 crypto_init_wait(&wait
);
1163 req
= skcipher_request_alloc(tfm
, GFP_KERNEL
);
1165 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1170 skcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
1171 crypto_req_done
, &wait
);
1174 for (i
= 0; i
< tcount
; i
++) {
1175 if (template[i
].np
&& !template[i
].also_non_np
)
1178 if (fips_enabled
&& template[i
].fips_skip
)
1181 if (template[i
].iv
&& !(template[i
].generates_iv
&& enc
))
1182 memcpy(iv
, template[i
].iv
, ivsize
);
1184 memset(iv
, 0, MAX_IVLEN
);
1186 input
= enc
? template[i
].ptext
: template[i
].ctext
;
1187 result
= enc
? template[i
].ctext
: template[i
].ptext
;
1190 if (WARN_ON(align_offset
+ template[i
].len
> PAGE_SIZE
))
1194 data
+= align_offset
;
1195 memcpy(data
, input
, template[i
].len
);
1197 crypto_skcipher_clear_flags(tfm
, ~0);
1199 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS
);
1201 ret
= crypto_skcipher_setkey(tfm
, template[i
].key
,
1203 if (template[i
].fail
== !ret
) {
1204 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1205 d
, j
, algo
, crypto_skcipher_get_flags(tfm
));
1210 sg_init_one(&sg
[0], data
, template[i
].len
);
1213 data
+= align_offset
;
1214 sg_init_one(&sgout
[0], data
, template[i
].len
);
1217 skcipher_request_set_crypt(req
, sg
, (diff_dst
) ? sgout
: sg
,
1218 template[i
].len
, iv
);
1219 ret
= crypto_wait_req(enc
? crypto_skcipher_encrypt(req
) :
1220 crypto_skcipher_decrypt(req
), &wait
);
1223 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1224 d
, e
, j
, algo
, -ret
);
1229 if (memcmp(q
, result
, template[i
].len
)) {
1230 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1232 hexdump(q
, template[i
].len
);
1237 if (template[i
].generates_iv
&& enc
&&
1238 memcmp(iv
, template[i
].iv
, crypto_skcipher_ivsize(tfm
))) {
1239 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1241 hexdump(iv
, crypto_skcipher_ivsize(tfm
));
1248 for (i
= 0; i
< tcount
; i
++) {
1249 /* alignment tests are only done with continuous buffers */
1250 if (align_offset
!= 0)
1253 if (!template[i
].np
)
1256 if (fips_enabled
&& template[i
].fips_skip
)
1259 if (template[i
].iv
&& !(template[i
].generates_iv
&& enc
))
1260 memcpy(iv
, template[i
].iv
, ivsize
);
1262 memset(iv
, 0, MAX_IVLEN
);
1264 input
= enc
? template[i
].ptext
: template[i
].ctext
;
1265 result
= enc
? template[i
].ctext
: template[i
].ptext
;
1267 crypto_skcipher_clear_flags(tfm
, ~0);
1269 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS
);
1271 ret
= crypto_skcipher_setkey(tfm
, template[i
].key
,
1273 if (template[i
].fail
== !ret
) {
1274 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1275 d
, j
, algo
, crypto_skcipher_get_flags(tfm
));
1282 sg_init_table(sg
, template[i
].np
);
1284 sg_init_table(sgout
, template[i
].np
);
1285 for (k
= 0; k
< template[i
].np
; k
++) {
1286 if (WARN_ON(offset_in_page(IDX
[k
]) +
1287 template[i
].tap
[k
] > PAGE_SIZE
))
1290 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] + offset_in_page(IDX
[k
]);
1292 memcpy(q
, input
+ temp
, template[i
].tap
[k
]);
1294 if (offset_in_page(q
) + template[i
].tap
[k
] < PAGE_SIZE
)
1295 q
[template[i
].tap
[k
]] = 0;
1297 sg_set_buf(&sg
[k
], q
, template[i
].tap
[k
]);
1299 q
= xoutbuf
[IDX
[k
] >> PAGE_SHIFT
] +
1300 offset_in_page(IDX
[k
]);
1302 sg_set_buf(&sgout
[k
], q
, template[i
].tap
[k
]);
1304 memset(q
, 0, template[i
].tap
[k
]);
1305 if (offset_in_page(q
) +
1306 template[i
].tap
[k
] < PAGE_SIZE
)
1307 q
[template[i
].tap
[k
]] = 0;
1310 temp
+= template[i
].tap
[k
];
1313 skcipher_request_set_crypt(req
, sg
, (diff_dst
) ? sgout
: sg
,
1314 template[i
].len
, iv
);
1316 ret
= crypto_wait_req(enc
? crypto_skcipher_encrypt(req
) :
1317 crypto_skcipher_decrypt(req
), &wait
);
1320 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1321 d
, e
, j
, algo
, -ret
);
1327 for (k
= 0; k
< template[i
].np
; k
++) {
1329 q
= xoutbuf
[IDX
[k
] >> PAGE_SHIFT
] +
1330 offset_in_page(IDX
[k
]);
1332 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
1333 offset_in_page(IDX
[k
]);
1335 if (memcmp(q
, result
+ temp
, template[i
].tap
[k
])) {
1336 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1338 hexdump(q
, template[i
].tap
[k
]);
1342 q
+= template[i
].tap
[k
];
1343 for (n
= 0; offset_in_page(q
+ n
) && q
[n
]; n
++)
1346 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1347 d
, j
, e
, k
, algo
, n
);
1351 temp
+= template[i
].tap
[k
];
1358 skcipher_request_free(req
);
1360 testmgr_free_buf(xoutbuf
);
1362 testmgr_free_buf(xbuf
);
1367 static int test_skcipher(struct crypto_skcipher
*tfm
, int enc
,
1368 const struct cipher_testvec
*template,
1369 unsigned int tcount
)
1371 unsigned int alignmask
;
1374 /* test 'dst == src' case */
1375 ret
= __test_skcipher(tfm
, enc
, template, tcount
, false, 0);
1379 /* test 'dst != src' case */
1380 ret
= __test_skcipher(tfm
, enc
, template, tcount
, true, 0);
1384 /* test unaligned buffers, check with one byte offset */
1385 ret
= __test_skcipher(tfm
, enc
, template, tcount
, true, 1);
1389 alignmask
= crypto_tfm_alg_alignmask(&tfm
->base
);
1391 /* Check if alignment mask for tfm is correctly set. */
1392 ret
= __test_skcipher(tfm
, enc
, template, tcount
, true,
1401 static int test_comp(struct crypto_comp
*tfm
,
1402 const struct comp_testvec
*ctemplate
,
1403 const struct comp_testvec
*dtemplate
,
1404 int ctcount
, int dtcount
)
1406 const char *algo
= crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm
));
1407 char *output
, *decomp_output
;
1411 output
= kmalloc(COMP_BUF_SIZE
, GFP_KERNEL
);
1415 decomp_output
= kmalloc(COMP_BUF_SIZE
, GFP_KERNEL
);
1416 if (!decomp_output
) {
1421 for (i
= 0; i
< ctcount
; i
++) {
1423 unsigned int dlen
= COMP_BUF_SIZE
;
1425 memset(output
, 0, COMP_BUF_SIZE
);
1426 memset(decomp_output
, 0, COMP_BUF_SIZE
);
1428 ilen
= ctemplate
[i
].inlen
;
1429 ret
= crypto_comp_compress(tfm
, ctemplate
[i
].input
,
1430 ilen
, output
, &dlen
);
1432 printk(KERN_ERR
"alg: comp: compression failed "
1433 "on test %d for %s: ret=%d\n", i
+ 1, algo
,
1439 dlen
= COMP_BUF_SIZE
;
1440 ret
= crypto_comp_decompress(tfm
, output
,
1441 ilen
, decomp_output
, &dlen
);
1443 pr_err("alg: comp: compression failed: decompress: on test %d for %s failed: ret=%d\n",
1448 if (dlen
!= ctemplate
[i
].inlen
) {
1449 printk(KERN_ERR
"alg: comp: Compression test %d "
1450 "failed for %s: output len = %d\n", i
+ 1, algo
,
1456 if (memcmp(decomp_output
, ctemplate
[i
].input
,
1457 ctemplate
[i
].inlen
)) {
1458 pr_err("alg: comp: compression failed: output differs: on test %d for %s\n",
1460 hexdump(decomp_output
, dlen
);
1466 for (i
= 0; i
< dtcount
; i
++) {
1468 unsigned int dlen
= COMP_BUF_SIZE
;
1470 memset(decomp_output
, 0, COMP_BUF_SIZE
);
1472 ilen
= dtemplate
[i
].inlen
;
1473 ret
= crypto_comp_decompress(tfm
, dtemplate
[i
].input
,
1474 ilen
, decomp_output
, &dlen
);
1476 printk(KERN_ERR
"alg: comp: decompression failed "
1477 "on test %d for %s: ret=%d\n", i
+ 1, algo
,
1482 if (dlen
!= dtemplate
[i
].outlen
) {
1483 printk(KERN_ERR
"alg: comp: Decompression test %d "
1484 "failed for %s: output len = %d\n", i
+ 1, algo
,
1490 if (memcmp(decomp_output
, dtemplate
[i
].output
, dlen
)) {
1491 printk(KERN_ERR
"alg: comp: Decompression test %d "
1492 "failed for %s\n", i
+ 1, algo
);
1493 hexdump(decomp_output
, dlen
);
1502 kfree(decomp_output
);
1507 static int test_acomp(struct crypto_acomp
*tfm
,
1508 const struct comp_testvec
*ctemplate
,
1509 const struct comp_testvec
*dtemplate
,
1510 int ctcount
, int dtcount
)
1512 const char *algo
= crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm
));
1514 char *output
, *decomp_out
;
1516 struct scatterlist src
, dst
;
1517 struct acomp_req
*req
;
1518 struct crypto_wait wait
;
1520 output
= kmalloc(COMP_BUF_SIZE
, GFP_KERNEL
);
1524 decomp_out
= kmalloc(COMP_BUF_SIZE
, GFP_KERNEL
);
1530 for (i
= 0; i
< ctcount
; i
++) {
1531 unsigned int dlen
= COMP_BUF_SIZE
;
1532 int ilen
= ctemplate
[i
].inlen
;
1535 input_vec
= kmemdup(ctemplate
[i
].input
, ilen
, GFP_KERNEL
);
1541 memset(output
, 0, dlen
);
1542 crypto_init_wait(&wait
);
1543 sg_init_one(&src
, input_vec
, ilen
);
1544 sg_init_one(&dst
, output
, dlen
);
1546 req
= acomp_request_alloc(tfm
);
1548 pr_err("alg: acomp: request alloc failed for %s\n",
1555 acomp_request_set_params(req
, &src
, &dst
, ilen
, dlen
);
1556 acomp_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
1557 crypto_req_done
, &wait
);
1559 ret
= crypto_wait_req(crypto_acomp_compress(req
), &wait
);
1561 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1564 acomp_request_free(req
);
1569 dlen
= COMP_BUF_SIZE
;
1570 sg_init_one(&src
, output
, ilen
);
1571 sg_init_one(&dst
, decomp_out
, dlen
);
1572 crypto_init_wait(&wait
);
1573 acomp_request_set_params(req
, &src
, &dst
, ilen
, dlen
);
1575 ret
= crypto_wait_req(crypto_acomp_decompress(req
), &wait
);
1577 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1580 acomp_request_free(req
);
1584 if (req
->dlen
!= ctemplate
[i
].inlen
) {
1585 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
1586 i
+ 1, algo
, req
->dlen
);
1589 acomp_request_free(req
);
1593 if (memcmp(input_vec
, decomp_out
, req
->dlen
)) {
1594 pr_err("alg: acomp: Compression test %d failed for %s\n",
1596 hexdump(output
, req
->dlen
);
1599 acomp_request_free(req
);
1604 acomp_request_free(req
);
1607 for (i
= 0; i
< dtcount
; i
++) {
1608 unsigned int dlen
= COMP_BUF_SIZE
;
1609 int ilen
= dtemplate
[i
].inlen
;
1612 input_vec
= kmemdup(dtemplate
[i
].input
, ilen
, GFP_KERNEL
);
1618 memset(output
, 0, dlen
);
1619 crypto_init_wait(&wait
);
1620 sg_init_one(&src
, input_vec
, ilen
);
1621 sg_init_one(&dst
, output
, dlen
);
1623 req
= acomp_request_alloc(tfm
);
1625 pr_err("alg: acomp: request alloc failed for %s\n",
1632 acomp_request_set_params(req
, &src
, &dst
, ilen
, dlen
);
1633 acomp_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
1634 crypto_req_done
, &wait
);
1636 ret
= crypto_wait_req(crypto_acomp_decompress(req
), &wait
);
1638 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
1641 acomp_request_free(req
);
1645 if (req
->dlen
!= dtemplate
[i
].outlen
) {
1646 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
1647 i
+ 1, algo
, req
->dlen
);
1650 acomp_request_free(req
);
1654 if (memcmp(output
, dtemplate
[i
].output
, req
->dlen
)) {
1655 pr_err("alg: acomp: Decompression test %d failed for %s\n",
1657 hexdump(output
, req
->dlen
);
1660 acomp_request_free(req
);
1665 acomp_request_free(req
);
1676 static int test_cprng(struct crypto_rng
*tfm
,
1677 const struct cprng_testvec
*template,
1678 unsigned int tcount
)
1680 const char *algo
= crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm
));
1681 int err
= 0, i
, j
, seedsize
;
1685 seedsize
= crypto_rng_seedsize(tfm
);
1687 seed
= kmalloc(seedsize
, GFP_KERNEL
);
1689 printk(KERN_ERR
"alg: cprng: Failed to allocate seed space "
1694 for (i
= 0; i
< tcount
; i
++) {
1695 memset(result
, 0, 32);
1697 memcpy(seed
, template[i
].v
, template[i
].vlen
);
1698 memcpy(seed
+ template[i
].vlen
, template[i
].key
,
1700 memcpy(seed
+ template[i
].vlen
+ template[i
].klen
,
1701 template[i
].dt
, template[i
].dtlen
);
1703 err
= crypto_rng_reset(tfm
, seed
, seedsize
);
1705 printk(KERN_ERR
"alg: cprng: Failed to reset rng "
1710 for (j
= 0; j
< template[i
].loops
; j
++) {
1711 err
= crypto_rng_get_bytes(tfm
, result
,
1714 printk(KERN_ERR
"alg: cprng: Failed to obtain "
1715 "the correct amount of random data for "
1716 "%s (requested %d)\n", algo
,
1722 err
= memcmp(result
, template[i
].result
,
1725 printk(KERN_ERR
"alg: cprng: Test %d failed for %s\n",
1727 hexdump(result
, template[i
].rlen
);
1738 static int alg_test_aead(const struct alg_test_desc
*desc
, const char *driver
,
1741 const struct aead_test_suite
*suite
= &desc
->suite
.aead
;
1742 struct crypto_aead
*tfm
;
1745 tfm
= crypto_alloc_aead(driver
, type
, mask
);
1747 printk(KERN_ERR
"alg: aead: Failed to load transform for %s: "
1748 "%ld\n", driver
, PTR_ERR(tfm
));
1749 return PTR_ERR(tfm
);
1752 err
= test_aead(tfm
, ENCRYPT
, suite
->vecs
, suite
->count
);
1754 err
= test_aead(tfm
, DECRYPT
, suite
->vecs
, suite
->count
);
1756 crypto_free_aead(tfm
);
1760 static int alg_test_cipher(const struct alg_test_desc
*desc
,
1761 const char *driver
, u32 type
, u32 mask
)
1763 const struct cipher_test_suite
*suite
= &desc
->suite
.cipher
;
1764 struct crypto_cipher
*tfm
;
1767 tfm
= crypto_alloc_cipher(driver
, type
, mask
);
1769 printk(KERN_ERR
"alg: cipher: Failed to load transform for "
1770 "%s: %ld\n", driver
, PTR_ERR(tfm
));
1771 return PTR_ERR(tfm
);
1774 err
= test_cipher(tfm
, ENCRYPT
, suite
->vecs
, suite
->count
);
1776 err
= test_cipher(tfm
, DECRYPT
, suite
->vecs
, suite
->count
);
1778 crypto_free_cipher(tfm
);
1782 static int alg_test_skcipher(const struct alg_test_desc
*desc
,
1783 const char *driver
, u32 type
, u32 mask
)
1785 const struct cipher_test_suite
*suite
= &desc
->suite
.cipher
;
1786 struct crypto_skcipher
*tfm
;
1789 tfm
= crypto_alloc_skcipher(driver
, type
, mask
);
1791 printk(KERN_ERR
"alg: skcipher: Failed to load transform for "
1792 "%s: %ld\n", driver
, PTR_ERR(tfm
));
1793 return PTR_ERR(tfm
);
1796 err
= test_skcipher(tfm
, ENCRYPT
, suite
->vecs
, suite
->count
);
1798 err
= test_skcipher(tfm
, DECRYPT
, suite
->vecs
, suite
->count
);
1800 crypto_free_skcipher(tfm
);
1804 static int alg_test_comp(const struct alg_test_desc
*desc
, const char *driver
,
1807 struct crypto_comp
*comp
;
1808 struct crypto_acomp
*acomp
;
1810 u32 algo_type
= type
& CRYPTO_ALG_TYPE_ACOMPRESS_MASK
;
1812 if (algo_type
== CRYPTO_ALG_TYPE_ACOMPRESS
) {
1813 acomp
= crypto_alloc_acomp(driver
, type
, mask
);
1814 if (IS_ERR(acomp
)) {
1815 pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
1816 driver
, PTR_ERR(acomp
));
1817 return PTR_ERR(acomp
);
1819 err
= test_acomp(acomp
, desc
->suite
.comp
.comp
.vecs
,
1820 desc
->suite
.comp
.decomp
.vecs
,
1821 desc
->suite
.comp
.comp
.count
,
1822 desc
->suite
.comp
.decomp
.count
);
1823 crypto_free_acomp(acomp
);
1825 comp
= crypto_alloc_comp(driver
, type
, mask
);
1827 pr_err("alg: comp: Failed to load transform for %s: %ld\n",
1828 driver
, PTR_ERR(comp
));
1829 return PTR_ERR(comp
);
1832 err
= test_comp(comp
, desc
->suite
.comp
.comp
.vecs
,
1833 desc
->suite
.comp
.decomp
.vecs
,
1834 desc
->suite
.comp
.comp
.count
,
1835 desc
->suite
.comp
.decomp
.count
);
1837 crypto_free_comp(comp
);
1842 static int __alg_test_hash(const struct hash_testvec
*template,
1843 unsigned int tcount
, const char *driver
,
1846 struct crypto_ahash
*tfm
;
1849 tfm
= crypto_alloc_ahash(driver
, type
, mask
);
1851 printk(KERN_ERR
"alg: hash: Failed to load transform for %s: "
1852 "%ld\n", driver
, PTR_ERR(tfm
));
1853 return PTR_ERR(tfm
);
1856 err
= test_hash(tfm
, template, tcount
, HASH_TEST_DIGEST
);
1858 err
= test_hash(tfm
, template, tcount
, HASH_TEST_FINAL
);
1860 err
= test_hash(tfm
, template, tcount
, HASH_TEST_FINUP
);
1861 crypto_free_ahash(tfm
);
1865 static int alg_test_hash(const struct alg_test_desc
*desc
, const char *driver
,
1868 const struct hash_testvec
*template = desc
->suite
.hash
.vecs
;
1869 unsigned int tcount
= desc
->suite
.hash
.count
;
1870 unsigned int nr_unkeyed
, nr_keyed
;
1874 * For OPTIONAL_KEY algorithms, we have to do all the unkeyed tests
1875 * first, before setting a key on the tfm. To make this easier, we
1876 * require that the unkeyed test vectors (if any) are listed first.
1879 for (nr_unkeyed
= 0; nr_unkeyed
< tcount
; nr_unkeyed
++) {
1880 if (template[nr_unkeyed
].ksize
)
1883 for (nr_keyed
= 0; nr_unkeyed
+ nr_keyed
< tcount
; nr_keyed
++) {
1884 if (!template[nr_unkeyed
+ nr_keyed
].ksize
) {
1885 pr_err("alg: hash: test vectors for %s out of order, "
1886 "unkeyed ones must come first\n", desc
->alg
);
1893 err
= __alg_test_hash(template, nr_unkeyed
, driver
, type
, mask
);
1894 template += nr_unkeyed
;
1897 if (!err
&& nr_keyed
)
1898 err
= __alg_test_hash(template, nr_keyed
, driver
, type
, mask
);
1903 static int alg_test_crc32c(const struct alg_test_desc
*desc
,
1904 const char *driver
, u32 type
, u32 mask
)
1906 struct crypto_shash
*tfm
;
1910 err
= alg_test_hash(desc
, driver
, type
, mask
);
1914 tfm
= crypto_alloc_shash(driver
, type
, mask
);
1916 printk(KERN_ERR
"alg: crc32c: Failed to load transform for %s: "
1917 "%ld\n", driver
, PTR_ERR(tfm
));
1923 SHASH_DESC_ON_STACK(shash
, tfm
);
1924 u32
*ctx
= (u32
*)shash_desc_ctx(shash
);
1930 err
= crypto_shash_final(shash
, (u8
*)&val
);
1932 printk(KERN_ERR
"alg: crc32c: Operation failed for "
1933 "%s: %d\n", driver
, err
);
1937 if (val
!= cpu_to_le32(~420553207)) {
1938 pr_err("alg: crc32c: Test failed for %s: %u\n",
1939 driver
, le32_to_cpu(val
));
1944 crypto_free_shash(tfm
);
1950 static int alg_test_cprng(const struct alg_test_desc
*desc
, const char *driver
,
1953 struct crypto_rng
*rng
;
1956 rng
= crypto_alloc_rng(driver
, type
, mask
);
1958 printk(KERN_ERR
"alg: cprng: Failed to load transform for %s: "
1959 "%ld\n", driver
, PTR_ERR(rng
));
1960 return PTR_ERR(rng
);
1963 err
= test_cprng(rng
, desc
->suite
.cprng
.vecs
, desc
->suite
.cprng
.count
);
1965 crypto_free_rng(rng
);
1971 static int drbg_cavs_test(const struct drbg_testvec
*test
, int pr
,
1972 const char *driver
, u32 type
, u32 mask
)
1975 struct crypto_rng
*drng
;
1976 struct drbg_test_data test_data
;
1977 struct drbg_string addtl
, pers
, testentropy
;
1978 unsigned char *buf
= kzalloc(test
->expectedlen
, GFP_KERNEL
);
1983 drng
= crypto_alloc_rng(driver
, type
, mask
);
1985 printk(KERN_ERR
"alg: drbg: could not allocate DRNG handle for "
1991 test_data
.testentropy
= &testentropy
;
1992 drbg_string_fill(&testentropy
, test
->entropy
, test
->entropylen
);
1993 drbg_string_fill(&pers
, test
->pers
, test
->perslen
);
1994 ret
= crypto_drbg_reset_test(drng
, &pers
, &test_data
);
1996 printk(KERN_ERR
"alg: drbg: Failed to reset rng\n");
2000 drbg_string_fill(&addtl
, test
->addtla
, test
->addtllen
);
2002 drbg_string_fill(&testentropy
, test
->entpra
, test
->entprlen
);
2003 ret
= crypto_drbg_get_bytes_addtl_test(drng
,
2004 buf
, test
->expectedlen
, &addtl
, &test_data
);
2006 ret
= crypto_drbg_get_bytes_addtl(drng
,
2007 buf
, test
->expectedlen
, &addtl
);
2010 printk(KERN_ERR
"alg: drbg: could not obtain random data for "
2011 "driver %s\n", driver
);
2015 drbg_string_fill(&addtl
, test
->addtlb
, test
->addtllen
);
2017 drbg_string_fill(&testentropy
, test
->entprb
, test
->entprlen
);
2018 ret
= crypto_drbg_get_bytes_addtl_test(drng
,
2019 buf
, test
->expectedlen
, &addtl
, &test_data
);
2021 ret
= crypto_drbg_get_bytes_addtl(drng
,
2022 buf
, test
->expectedlen
, &addtl
);
2025 printk(KERN_ERR
"alg: drbg: could not obtain random data for "
2026 "driver %s\n", driver
);
2030 ret
= memcmp(test
->expected
, buf
, test
->expectedlen
);
2033 crypto_free_rng(drng
);
2039 static int alg_test_drbg(const struct alg_test_desc
*desc
, const char *driver
,
2045 const struct drbg_testvec
*template = desc
->suite
.drbg
.vecs
;
2046 unsigned int tcount
= desc
->suite
.drbg
.count
;
2048 if (0 == memcmp(driver
, "drbg_pr_", 8))
2051 for (i
= 0; i
< tcount
; i
++) {
2052 err
= drbg_cavs_test(&template[i
], pr
, driver
, type
, mask
);
2054 printk(KERN_ERR
"alg: drbg: Test %d failed for %s\n",
2064 static int do_test_kpp(struct crypto_kpp
*tfm
, const struct kpp_testvec
*vec
,
2067 struct kpp_request
*req
;
2068 void *input_buf
= NULL
;
2069 void *output_buf
= NULL
;
2070 void *a_public
= NULL
;
2072 void *shared_secret
= NULL
;
2073 struct crypto_wait wait
;
2074 unsigned int out_len_max
;
2076 struct scatterlist src
, dst
;
2078 req
= kpp_request_alloc(tfm
, GFP_KERNEL
);
2082 crypto_init_wait(&wait
);
2084 err
= crypto_kpp_set_secret(tfm
, vec
->secret
, vec
->secret_size
);
2088 out_len_max
= crypto_kpp_maxsize(tfm
);
2089 output_buf
= kzalloc(out_len_max
, GFP_KERNEL
);
2095 /* Use appropriate parameter as base */
2096 kpp_request_set_input(req
, NULL
, 0);
2097 sg_init_one(&dst
, output_buf
, out_len_max
);
2098 kpp_request_set_output(req
, &dst
, out_len_max
);
2099 kpp_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
2100 crypto_req_done
, &wait
);
2102 /* Compute party A's public key */
2103 err
= crypto_wait_req(crypto_kpp_generate_public_key(req
), &wait
);
2105 pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
2111 /* Save party A's public key */
2112 a_public
= kzalloc(out_len_max
, GFP_KERNEL
);
2117 memcpy(a_public
, sg_virt(req
->dst
), out_len_max
);
2119 /* Verify calculated public key */
2120 if (memcmp(vec
->expected_a_public
, sg_virt(req
->dst
),
2121 vec
->expected_a_public_size
)) {
2122 pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
2129 /* Calculate shared secret key by using counter part (b) public key. */
2130 input_buf
= kzalloc(vec
->b_public_size
, GFP_KERNEL
);
2136 memcpy(input_buf
, vec
->b_public
, vec
->b_public_size
);
2137 sg_init_one(&src
, input_buf
, vec
->b_public_size
);
2138 sg_init_one(&dst
, output_buf
, out_len_max
);
2139 kpp_request_set_input(req
, &src
, vec
->b_public_size
);
2140 kpp_request_set_output(req
, &dst
, out_len_max
);
2141 kpp_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
2142 crypto_req_done
, &wait
);
2143 err
= crypto_wait_req(crypto_kpp_compute_shared_secret(req
), &wait
);
2145 pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
2151 /* Save the shared secret obtained by party A */
2152 a_ss
= kzalloc(vec
->expected_ss_size
, GFP_KERNEL
);
2157 memcpy(a_ss
, sg_virt(req
->dst
), vec
->expected_ss_size
);
2160 * Calculate party B's shared secret by using party A's
2163 err
= crypto_kpp_set_secret(tfm
, vec
->b_secret
,
2164 vec
->b_secret_size
);
2168 sg_init_one(&src
, a_public
, vec
->expected_a_public_size
);
2169 sg_init_one(&dst
, output_buf
, out_len_max
);
2170 kpp_request_set_input(req
, &src
, vec
->expected_a_public_size
);
2171 kpp_request_set_output(req
, &dst
, out_len_max
);
2172 kpp_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
2173 crypto_req_done
, &wait
);
2174 err
= crypto_wait_req(crypto_kpp_compute_shared_secret(req
),
2177 pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
2182 shared_secret
= a_ss
;
2184 shared_secret
= (void *)vec
->expected_ss
;
2188 * verify shared secret from which the user will derive
2189 * secret key by executing whatever hash it has chosen
2191 if (memcmp(shared_secret
, sg_virt(req
->dst
),
2192 vec
->expected_ss_size
)) {
2193 pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
2205 kpp_request_free(req
);
2209 static int test_kpp(struct crypto_kpp
*tfm
, const char *alg
,
2210 const struct kpp_testvec
*vecs
, unsigned int tcount
)
2214 for (i
= 0; i
< tcount
; i
++) {
2215 ret
= do_test_kpp(tfm
, vecs
++, alg
);
2217 pr_err("alg: %s: test failed on vector %d, err=%d\n",
2225 static int alg_test_kpp(const struct alg_test_desc
*desc
, const char *driver
,
2228 struct crypto_kpp
*tfm
;
2231 tfm
= crypto_alloc_kpp(driver
, type
, mask
);
2233 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
2234 driver
, PTR_ERR(tfm
));
2235 return PTR_ERR(tfm
);
2237 if (desc
->suite
.kpp
.vecs
)
2238 err
= test_kpp(tfm
, desc
->alg
, desc
->suite
.kpp
.vecs
,
2239 desc
->suite
.kpp
.count
);
2241 crypto_free_kpp(tfm
);
2245 static int test_akcipher_one(struct crypto_akcipher
*tfm
,
2246 const struct akcipher_testvec
*vecs
)
2248 char *xbuf
[XBUFSIZE
];
2249 struct akcipher_request
*req
;
2250 void *outbuf_enc
= NULL
;
2251 void *outbuf_dec
= NULL
;
2252 struct crypto_wait wait
;
2253 unsigned int out_len_max
, out_len
= 0;
2255 struct scatterlist src
, dst
, src_tab
[2];
2257 unsigned int m_size
, c_size
;
2260 if (testmgr_alloc_buf(xbuf
))
2263 req
= akcipher_request_alloc(tfm
, GFP_KERNEL
);
2267 crypto_init_wait(&wait
);
2269 if (vecs
->public_key_vec
)
2270 err
= crypto_akcipher_set_pub_key(tfm
, vecs
->key
,
2273 err
= crypto_akcipher_set_priv_key(tfm
, vecs
->key
,
2279 out_len_max
= crypto_akcipher_maxsize(tfm
);
2282 * First run test which do not require a private key, such as
2283 * encrypt or verify.
2285 outbuf_enc
= kzalloc(out_len_max
, GFP_KERNEL
);
2289 if (!vecs
->siggen_sigver_test
) {
2291 m_size
= vecs
->m_size
;
2293 c_size
= vecs
->c_size
;
2296 /* Swap args so we could keep plaintext (digest)
2297 * in vecs->m, and cooked signature in vecs->c.
2299 m
= vecs
->c
; /* signature */
2300 m_size
= vecs
->c_size
;
2301 c
= vecs
->m
; /* digest */
2302 c_size
= vecs
->m_size
;
2306 if (WARN_ON(m_size
> PAGE_SIZE
))
2308 memcpy(xbuf
[0], m
, m_size
);
2310 sg_init_table(src_tab
, 2);
2311 sg_set_buf(&src_tab
[0], xbuf
[0], 8);
2312 sg_set_buf(&src_tab
[1], xbuf
[0] + 8, m_size
- 8);
2313 sg_init_one(&dst
, outbuf_enc
, out_len_max
);
2314 akcipher_request_set_crypt(req
, src_tab
, &dst
, m_size
,
2316 akcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
2317 crypto_req_done
, &wait
);
2319 err
= crypto_wait_req(vecs
->siggen_sigver_test
?
2320 /* Run asymmetric signature verification */
2321 crypto_akcipher_verify(req
) :
2322 /* Run asymmetric encrypt */
2323 crypto_akcipher_encrypt(req
), &wait
);
2325 pr_err("alg: akcipher: %s test failed. err %d\n", op
, err
);
2328 if (req
->dst_len
!= c_size
) {
2329 pr_err("alg: akcipher: %s test failed. Invalid output len\n",
2334 /* verify that encrypted message is equal to expected */
2335 if (memcmp(c
, outbuf_enc
, c_size
)) {
2336 pr_err("alg: akcipher: %s test failed. Invalid output\n", op
);
2337 hexdump(outbuf_enc
, c_size
);
2343 * Don't invoke (decrypt or sign) test which require a private key
2344 * for vectors with only a public key.
2346 if (vecs
->public_key_vec
) {
2350 outbuf_dec
= kzalloc(out_len_max
, GFP_KERNEL
);
2356 op
= vecs
->siggen_sigver_test
? "sign" : "decrypt";
2357 if (WARN_ON(c_size
> PAGE_SIZE
))
2359 memcpy(xbuf
[0], c
, c_size
);
2361 sg_init_one(&src
, xbuf
[0], c_size
);
2362 sg_init_one(&dst
, outbuf_dec
, out_len_max
);
2363 crypto_init_wait(&wait
);
2364 akcipher_request_set_crypt(req
, &src
, &dst
, c_size
, out_len_max
);
2366 err
= crypto_wait_req(vecs
->siggen_sigver_test
?
2367 /* Run asymmetric signature generation */
2368 crypto_akcipher_sign(req
) :
2369 /* Run asymmetric decrypt */
2370 crypto_akcipher_decrypt(req
), &wait
);
2372 pr_err("alg: akcipher: %s test failed. err %d\n", op
, err
);
2375 out_len
= req
->dst_len
;
2376 if (out_len
< m_size
) {
2377 pr_err("alg: akcipher: %s test failed. Invalid output len %u\n",
2382 /* verify that decrypted message is equal to the original msg */
2383 if (memchr_inv(outbuf_dec
, 0, out_len
- m_size
) ||
2384 memcmp(m
, outbuf_dec
+ out_len
- m_size
, m_size
)) {
2385 pr_err("alg: akcipher: %s test failed. Invalid output\n", op
);
2386 hexdump(outbuf_dec
, out_len
);
2393 akcipher_request_free(req
);
2395 testmgr_free_buf(xbuf
);
2399 static int test_akcipher(struct crypto_akcipher
*tfm
, const char *alg
,
2400 const struct akcipher_testvec
*vecs
,
2401 unsigned int tcount
)
2404 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm
));
2407 for (i
= 0; i
< tcount
; i
++) {
2408 ret
= test_akcipher_one(tfm
, vecs
++);
2412 pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
2419 static int alg_test_akcipher(const struct alg_test_desc
*desc
,
2420 const char *driver
, u32 type
, u32 mask
)
2422 struct crypto_akcipher
*tfm
;
2425 tfm
= crypto_alloc_akcipher(driver
, type
, mask
);
2427 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
2428 driver
, PTR_ERR(tfm
));
2429 return PTR_ERR(tfm
);
2431 if (desc
->suite
.akcipher
.vecs
)
2432 err
= test_akcipher(tfm
, desc
->alg
, desc
->suite
.akcipher
.vecs
,
2433 desc
->suite
.akcipher
.count
);
2435 crypto_free_akcipher(tfm
);
2439 static int alg_test_null(const struct alg_test_desc
*desc
,
2440 const char *driver
, u32 type
, u32 mask
)
2445 #define __VECS(tv) { .vecs = tv, .count = ARRAY_SIZE(tv) }
2447 /* Please keep this list sorted by algorithm name. */
2448 static const struct alg_test_desc alg_test_descs
[] = {
2450 .alg
= "adiantum(xchacha12,aes)",
2451 .test
= alg_test_skcipher
,
2453 .cipher
= __VECS(adiantum_xchacha12_aes_tv_template
)
2456 .alg
= "adiantum(xchacha20,aes)",
2457 .test
= alg_test_skcipher
,
2459 .cipher
= __VECS(adiantum_xchacha20_aes_tv_template
)
2463 .test
= alg_test_aead
,
2465 .aead
= __VECS(aegis128_tv_template
)
2469 .test
= alg_test_aead
,
2471 .aead
= __VECS(aegis128l_tv_template
)
2475 .test
= alg_test_aead
,
2477 .aead
= __VECS(aegis256_tv_template
)
2480 .alg
= "ansi_cprng",
2481 .test
= alg_test_cprng
,
2483 .cprng
= __VECS(ansi_cprng_aes_tv_template
)
2486 .alg
= "authenc(hmac(md5),ecb(cipher_null))",
2487 .test
= alg_test_aead
,
2489 .aead
= __VECS(hmac_md5_ecb_cipher_null_tv_template
)
2492 .alg
= "authenc(hmac(sha1),cbc(aes))",
2493 .test
= alg_test_aead
,
2496 .aead
= __VECS(hmac_sha1_aes_cbc_tv_temp
)
2499 .alg
= "authenc(hmac(sha1),cbc(des))",
2500 .test
= alg_test_aead
,
2502 .aead
= __VECS(hmac_sha1_des_cbc_tv_temp
)
2505 .alg
= "authenc(hmac(sha1),cbc(des3_ede))",
2506 .test
= alg_test_aead
,
2509 .aead
= __VECS(hmac_sha1_des3_ede_cbc_tv_temp
)
2512 .alg
= "authenc(hmac(sha1),ctr(aes))",
2513 .test
= alg_test_null
,
2516 .alg
= "authenc(hmac(sha1),ecb(cipher_null))",
2517 .test
= alg_test_aead
,
2519 .aead
= __VECS(hmac_sha1_ecb_cipher_null_tv_temp
)
2522 .alg
= "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2523 .test
= alg_test_null
,
2526 .alg
= "authenc(hmac(sha224),cbc(des))",
2527 .test
= alg_test_aead
,
2529 .aead
= __VECS(hmac_sha224_des_cbc_tv_temp
)
2532 .alg
= "authenc(hmac(sha224),cbc(des3_ede))",
2533 .test
= alg_test_aead
,
2536 .aead
= __VECS(hmac_sha224_des3_ede_cbc_tv_temp
)
2539 .alg
= "authenc(hmac(sha256),cbc(aes))",
2540 .test
= alg_test_aead
,
2543 .aead
= __VECS(hmac_sha256_aes_cbc_tv_temp
)
2546 .alg
= "authenc(hmac(sha256),cbc(des))",
2547 .test
= alg_test_aead
,
2549 .aead
= __VECS(hmac_sha256_des_cbc_tv_temp
)
2552 .alg
= "authenc(hmac(sha256),cbc(des3_ede))",
2553 .test
= alg_test_aead
,
2556 .aead
= __VECS(hmac_sha256_des3_ede_cbc_tv_temp
)
2559 .alg
= "authenc(hmac(sha256),ctr(aes))",
2560 .test
= alg_test_null
,
2563 .alg
= "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2564 .test
= alg_test_null
,
2567 .alg
= "authenc(hmac(sha384),cbc(des))",
2568 .test
= alg_test_aead
,
2570 .aead
= __VECS(hmac_sha384_des_cbc_tv_temp
)
2573 .alg
= "authenc(hmac(sha384),cbc(des3_ede))",
2574 .test
= alg_test_aead
,
2577 .aead
= __VECS(hmac_sha384_des3_ede_cbc_tv_temp
)
2580 .alg
= "authenc(hmac(sha384),ctr(aes))",
2581 .test
= alg_test_null
,
2584 .alg
= "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2585 .test
= alg_test_null
,
2588 .alg
= "authenc(hmac(sha512),cbc(aes))",
2590 .test
= alg_test_aead
,
2592 .aead
= __VECS(hmac_sha512_aes_cbc_tv_temp
)
2595 .alg
= "authenc(hmac(sha512),cbc(des))",
2596 .test
= alg_test_aead
,
2598 .aead
= __VECS(hmac_sha512_des_cbc_tv_temp
)
2601 .alg
= "authenc(hmac(sha512),cbc(des3_ede))",
2602 .test
= alg_test_aead
,
2605 .aead
= __VECS(hmac_sha512_des3_ede_cbc_tv_temp
)
2608 .alg
= "authenc(hmac(sha512),ctr(aes))",
2609 .test
= alg_test_null
,
2612 .alg
= "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2613 .test
= alg_test_null
,
2617 .test
= alg_test_skcipher
,
2620 .cipher
= __VECS(aes_cbc_tv_template
)
2623 .alg
= "cbc(anubis)",
2624 .test
= alg_test_skcipher
,
2626 .cipher
= __VECS(anubis_cbc_tv_template
)
2629 .alg
= "cbc(blowfish)",
2630 .test
= alg_test_skcipher
,
2632 .cipher
= __VECS(bf_cbc_tv_template
)
2635 .alg
= "cbc(camellia)",
2636 .test
= alg_test_skcipher
,
2638 .cipher
= __VECS(camellia_cbc_tv_template
)
2641 .alg
= "cbc(cast5)",
2642 .test
= alg_test_skcipher
,
2644 .cipher
= __VECS(cast5_cbc_tv_template
)
2647 .alg
= "cbc(cast6)",
2648 .test
= alg_test_skcipher
,
2650 .cipher
= __VECS(cast6_cbc_tv_template
)
2654 .test
= alg_test_skcipher
,
2656 .cipher
= __VECS(des_cbc_tv_template
)
2659 .alg
= "cbc(des3_ede)",
2660 .test
= alg_test_skcipher
,
2663 .cipher
= __VECS(des3_ede_cbc_tv_template
)
2666 /* Same as cbc(aes) except the key is stored in
2667 * hardware secure memory which we reference by index
2670 .test
= alg_test_null
,
2673 .alg
= "cbc(serpent)",
2674 .test
= alg_test_skcipher
,
2676 .cipher
= __VECS(serpent_cbc_tv_template
)
2680 .test
= alg_test_skcipher
,
2682 .cipher
= __VECS(sm4_cbc_tv_template
)
2685 .alg
= "cbc(twofish)",
2686 .test
= alg_test_skcipher
,
2688 .cipher
= __VECS(tf_cbc_tv_template
)
2691 .alg
= "cbcmac(aes)",
2693 .test
= alg_test_hash
,
2695 .hash
= __VECS(aes_cbcmac_tv_template
)
2699 .test
= alg_test_aead
,
2702 .aead
= __VECS(aes_ccm_tv_template
)
2706 .test
= alg_test_skcipher
,
2709 .cipher
= __VECS(aes_cfb_tv_template
)
2713 .test
= alg_test_skcipher
,
2715 .cipher
= __VECS(chacha20_tv_template
)
2720 .test
= alg_test_hash
,
2722 .hash
= __VECS(aes_cmac128_tv_template
)
2725 .alg
= "cmac(des3_ede)",
2727 .test
= alg_test_hash
,
2729 .hash
= __VECS(des3_ede_cmac64_tv_template
)
2732 .alg
= "compress_null",
2733 .test
= alg_test_null
,
2736 .test
= alg_test_hash
,
2738 .hash
= __VECS(crc32_tv_template
)
2742 .test
= alg_test_crc32c
,
2745 .hash
= __VECS(crc32c_tv_template
)
2749 .test
= alg_test_hash
,
2752 .hash
= __VECS(crct10dif_tv_template
)
2756 .test
= alg_test_skcipher
,
2759 .cipher
= __VECS(aes_ctr_tv_template
)
2762 .alg
= "ctr(blowfish)",
2763 .test
= alg_test_skcipher
,
2765 .cipher
= __VECS(bf_ctr_tv_template
)
2768 .alg
= "ctr(camellia)",
2769 .test
= alg_test_skcipher
,
2771 .cipher
= __VECS(camellia_ctr_tv_template
)
2774 .alg
= "ctr(cast5)",
2775 .test
= alg_test_skcipher
,
2777 .cipher
= __VECS(cast5_ctr_tv_template
)
2780 .alg
= "ctr(cast6)",
2781 .test
= alg_test_skcipher
,
2783 .cipher
= __VECS(cast6_ctr_tv_template
)
2787 .test
= alg_test_skcipher
,
2789 .cipher
= __VECS(des_ctr_tv_template
)
2792 .alg
= "ctr(des3_ede)",
2793 .test
= alg_test_skcipher
,
2796 .cipher
= __VECS(des3_ede_ctr_tv_template
)
2799 /* Same as ctr(aes) except the key is stored in
2800 * hardware secure memory which we reference by index
2803 .test
= alg_test_null
,
2806 .alg
= "ctr(serpent)",
2807 .test
= alg_test_skcipher
,
2809 .cipher
= __VECS(serpent_ctr_tv_template
)
2813 .test
= alg_test_skcipher
,
2815 .cipher
= __VECS(sm4_ctr_tv_template
)
2818 .alg
= "ctr(twofish)",
2819 .test
= alg_test_skcipher
,
2821 .cipher
= __VECS(tf_ctr_tv_template
)
2824 .alg
= "cts(cbc(aes))",
2825 .test
= alg_test_skcipher
,
2828 .cipher
= __VECS(cts_mode_tv_template
)
2832 .test
= alg_test_comp
,
2836 .comp
= __VECS(deflate_comp_tv_template
),
2837 .decomp
= __VECS(deflate_decomp_tv_template
)
2842 .test
= alg_test_kpp
,
2845 .kpp
= __VECS(dh_tv_template
)
2848 .alg
= "digest_null",
2849 .test
= alg_test_null
,
2851 .alg
= "drbg_nopr_ctr_aes128",
2852 .test
= alg_test_drbg
,
2855 .drbg
= __VECS(drbg_nopr_ctr_aes128_tv_template
)
2858 .alg
= "drbg_nopr_ctr_aes192",
2859 .test
= alg_test_drbg
,
2862 .drbg
= __VECS(drbg_nopr_ctr_aes192_tv_template
)
2865 .alg
= "drbg_nopr_ctr_aes256",
2866 .test
= alg_test_drbg
,
2869 .drbg
= __VECS(drbg_nopr_ctr_aes256_tv_template
)
2873 * There is no need to specifically test the DRBG with every
2874 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2876 .alg
= "drbg_nopr_hmac_sha1",
2878 .test
= alg_test_null
,
2880 .alg
= "drbg_nopr_hmac_sha256",
2881 .test
= alg_test_drbg
,
2884 .drbg
= __VECS(drbg_nopr_hmac_sha256_tv_template
)
2887 /* covered by drbg_nopr_hmac_sha256 test */
2888 .alg
= "drbg_nopr_hmac_sha384",
2890 .test
= alg_test_null
,
2892 .alg
= "drbg_nopr_hmac_sha512",
2893 .test
= alg_test_null
,
2896 .alg
= "drbg_nopr_sha1",
2898 .test
= alg_test_null
,
2900 .alg
= "drbg_nopr_sha256",
2901 .test
= alg_test_drbg
,
2904 .drbg
= __VECS(drbg_nopr_sha256_tv_template
)
2907 /* covered by drbg_nopr_sha256 test */
2908 .alg
= "drbg_nopr_sha384",
2910 .test
= alg_test_null
,
2912 .alg
= "drbg_nopr_sha512",
2914 .test
= alg_test_null
,
2916 .alg
= "drbg_pr_ctr_aes128",
2917 .test
= alg_test_drbg
,
2920 .drbg
= __VECS(drbg_pr_ctr_aes128_tv_template
)
2923 /* covered by drbg_pr_ctr_aes128 test */
2924 .alg
= "drbg_pr_ctr_aes192",
2926 .test
= alg_test_null
,
2928 .alg
= "drbg_pr_ctr_aes256",
2930 .test
= alg_test_null
,
2932 .alg
= "drbg_pr_hmac_sha1",
2934 .test
= alg_test_null
,
2936 .alg
= "drbg_pr_hmac_sha256",
2937 .test
= alg_test_drbg
,
2940 .drbg
= __VECS(drbg_pr_hmac_sha256_tv_template
)
2943 /* covered by drbg_pr_hmac_sha256 test */
2944 .alg
= "drbg_pr_hmac_sha384",
2946 .test
= alg_test_null
,
2948 .alg
= "drbg_pr_hmac_sha512",
2949 .test
= alg_test_null
,
2952 .alg
= "drbg_pr_sha1",
2954 .test
= alg_test_null
,
2956 .alg
= "drbg_pr_sha256",
2957 .test
= alg_test_drbg
,
2960 .drbg
= __VECS(drbg_pr_sha256_tv_template
)
2963 /* covered by drbg_pr_sha256 test */
2964 .alg
= "drbg_pr_sha384",
2966 .test
= alg_test_null
,
2968 .alg
= "drbg_pr_sha512",
2970 .test
= alg_test_null
,
2973 .test
= alg_test_skcipher
,
2976 .cipher
= __VECS(aes_tv_template
)
2979 .alg
= "ecb(anubis)",
2980 .test
= alg_test_skcipher
,
2982 .cipher
= __VECS(anubis_tv_template
)
2986 .test
= alg_test_skcipher
,
2988 .cipher
= __VECS(arc4_tv_template
)
2991 .alg
= "ecb(blowfish)",
2992 .test
= alg_test_skcipher
,
2994 .cipher
= __VECS(bf_tv_template
)
2997 .alg
= "ecb(camellia)",
2998 .test
= alg_test_skcipher
,
3000 .cipher
= __VECS(camellia_tv_template
)
3003 .alg
= "ecb(cast5)",
3004 .test
= alg_test_skcipher
,
3006 .cipher
= __VECS(cast5_tv_template
)
3009 .alg
= "ecb(cast6)",
3010 .test
= alg_test_skcipher
,
3012 .cipher
= __VECS(cast6_tv_template
)
3015 .alg
= "ecb(cipher_null)",
3016 .test
= alg_test_null
,
3020 .test
= alg_test_skcipher
,
3022 .cipher
= __VECS(des_tv_template
)
3025 .alg
= "ecb(des3_ede)",
3026 .test
= alg_test_skcipher
,
3029 .cipher
= __VECS(des3_ede_tv_template
)
3032 .alg
= "ecb(fcrypt)",
3033 .test
= alg_test_skcipher
,
3036 .vecs
= fcrypt_pcbc_tv_template
,
3041 .alg
= "ecb(khazad)",
3042 .test
= alg_test_skcipher
,
3044 .cipher
= __VECS(khazad_tv_template
)
3047 /* Same as ecb(aes) except the key is stored in
3048 * hardware secure memory which we reference by index
3051 .test
= alg_test_null
,
3055 .test
= alg_test_skcipher
,
3057 .cipher
= __VECS(seed_tv_template
)
3060 .alg
= "ecb(serpent)",
3061 .test
= alg_test_skcipher
,
3063 .cipher
= __VECS(serpent_tv_template
)
3067 .test
= alg_test_skcipher
,
3069 .cipher
= __VECS(sm4_tv_template
)
3073 .test
= alg_test_skcipher
,
3075 .cipher
= __VECS(tea_tv_template
)
3078 .alg
= "ecb(tnepres)",
3079 .test
= alg_test_skcipher
,
3081 .cipher
= __VECS(tnepres_tv_template
)
3084 .alg
= "ecb(twofish)",
3085 .test
= alg_test_skcipher
,
3087 .cipher
= __VECS(tf_tv_template
)
3091 .test
= alg_test_skcipher
,
3093 .cipher
= __VECS(xeta_tv_template
)
3097 .test
= alg_test_skcipher
,
3099 .cipher
= __VECS(xtea_tv_template
)
3103 .test
= alg_test_kpp
,
3106 .kpp
= __VECS(ecdh_tv_template
)
3110 .test
= alg_test_aead
,
3113 .aead
= __VECS(aes_gcm_tv_template
)
3117 .test
= alg_test_hash
,
3120 .hash
= __VECS(ghash_tv_template
)
3124 .test
= alg_test_hash
,
3126 .hash
= __VECS(hmac_md5_tv_template
)
3129 .alg
= "hmac(rmd128)",
3130 .test
= alg_test_hash
,
3132 .hash
= __VECS(hmac_rmd128_tv_template
)
3135 .alg
= "hmac(rmd160)",
3136 .test
= alg_test_hash
,
3138 .hash
= __VECS(hmac_rmd160_tv_template
)
3141 .alg
= "hmac(sha1)",
3142 .test
= alg_test_hash
,
3145 .hash
= __VECS(hmac_sha1_tv_template
)
3148 .alg
= "hmac(sha224)",
3149 .test
= alg_test_hash
,
3152 .hash
= __VECS(hmac_sha224_tv_template
)
3155 .alg
= "hmac(sha256)",
3156 .test
= alg_test_hash
,
3159 .hash
= __VECS(hmac_sha256_tv_template
)
3162 .alg
= "hmac(sha3-224)",
3163 .test
= alg_test_hash
,
3166 .hash
= __VECS(hmac_sha3_224_tv_template
)
3169 .alg
= "hmac(sha3-256)",
3170 .test
= alg_test_hash
,
3173 .hash
= __VECS(hmac_sha3_256_tv_template
)
3176 .alg
= "hmac(sha3-384)",
3177 .test
= alg_test_hash
,
3180 .hash
= __VECS(hmac_sha3_384_tv_template
)
3183 .alg
= "hmac(sha3-512)",
3184 .test
= alg_test_hash
,
3187 .hash
= __VECS(hmac_sha3_512_tv_template
)
3190 .alg
= "hmac(sha384)",
3191 .test
= alg_test_hash
,
3194 .hash
= __VECS(hmac_sha384_tv_template
)
3197 .alg
= "hmac(sha512)",
3198 .test
= alg_test_hash
,
3201 .hash
= __VECS(hmac_sha512_tv_template
)
3204 .alg
= "hmac(streebog256)",
3205 .test
= alg_test_hash
,
3207 .hash
= __VECS(hmac_streebog256_tv_template
)
3210 .alg
= "hmac(streebog512)",
3211 .test
= alg_test_hash
,
3213 .hash
= __VECS(hmac_streebog512_tv_template
)
3216 .alg
= "jitterentropy_rng",
3218 .test
= alg_test_null
,
3221 .test
= alg_test_skcipher
,
3224 .cipher
= __VECS(aes_kw_tv_template
)
3228 .test
= alg_test_skcipher
,
3230 .cipher
= __VECS(aes_lrw_tv_template
)
3233 .alg
= "lrw(camellia)",
3234 .test
= alg_test_skcipher
,
3236 .cipher
= __VECS(camellia_lrw_tv_template
)
3239 .alg
= "lrw(cast6)",
3240 .test
= alg_test_skcipher
,
3242 .cipher
= __VECS(cast6_lrw_tv_template
)
3245 .alg
= "lrw(serpent)",
3246 .test
= alg_test_skcipher
,
3248 .cipher
= __VECS(serpent_lrw_tv_template
)
3251 .alg
= "lrw(twofish)",
3252 .test
= alg_test_skcipher
,
3254 .cipher
= __VECS(tf_lrw_tv_template
)
3258 .test
= alg_test_comp
,
3262 .comp
= __VECS(lz4_comp_tv_template
),
3263 .decomp
= __VECS(lz4_decomp_tv_template
)
3268 .test
= alg_test_comp
,
3272 .comp
= __VECS(lz4hc_comp_tv_template
),
3273 .decomp
= __VECS(lz4hc_decomp_tv_template
)
3278 .test
= alg_test_comp
,
3282 .comp
= __VECS(lzo_comp_tv_template
),
3283 .decomp
= __VECS(lzo_decomp_tv_template
)
3288 .test
= alg_test_hash
,
3290 .hash
= __VECS(md4_tv_template
)
3294 .test
= alg_test_hash
,
3296 .hash
= __VECS(md5_tv_template
)
3299 .alg
= "michael_mic",
3300 .test
= alg_test_hash
,
3302 .hash
= __VECS(michael_mic_tv_template
)
3306 .test
= alg_test_aead
,
3308 .aead
= __VECS(morus1280_tv_template
)
3312 .test
= alg_test_aead
,
3314 .aead
= __VECS(morus640_tv_template
)
3317 .alg
= "nhpoly1305",
3318 .test
= alg_test_hash
,
3320 .hash
= __VECS(nhpoly1305_tv_template
)
3324 .test
= alg_test_skcipher
,
3327 .cipher
= __VECS(aes_ofb_tv_template
)
3330 /* Same as ofb(aes) except the key is stored in
3331 * hardware secure memory which we reference by index
3334 .test
= alg_test_null
,
3337 .alg
= "pcbc(fcrypt)",
3338 .test
= alg_test_skcipher
,
3340 .cipher
= __VECS(fcrypt_pcbc_tv_template
)
3343 .alg
= "pkcs1pad(rsa,sha224)",
3344 .test
= alg_test_null
,
3347 .alg
= "pkcs1pad(rsa,sha256)",
3348 .test
= alg_test_akcipher
,
3351 .akcipher
= __VECS(pkcs1pad_rsa_tv_template
)
3354 .alg
= "pkcs1pad(rsa,sha384)",
3355 .test
= alg_test_null
,
3358 .alg
= "pkcs1pad(rsa,sha512)",
3359 .test
= alg_test_null
,
3363 .test
= alg_test_hash
,
3365 .hash
= __VECS(poly1305_tv_template
)
3368 .alg
= "rfc3686(ctr(aes))",
3369 .test
= alg_test_skcipher
,
3372 .cipher
= __VECS(aes_ctr_rfc3686_tv_template
)
3375 .alg
= "rfc4106(gcm(aes))",
3376 .test
= alg_test_aead
,
3379 .aead
= __VECS(aes_gcm_rfc4106_tv_template
)
3382 .alg
= "rfc4309(ccm(aes))",
3383 .test
= alg_test_aead
,
3386 .aead
= __VECS(aes_ccm_rfc4309_tv_template
)
3389 .alg
= "rfc4543(gcm(aes))",
3390 .test
= alg_test_aead
,
3392 .aead
= __VECS(aes_gcm_rfc4543_tv_template
)
3395 .alg
= "rfc7539(chacha20,poly1305)",
3396 .test
= alg_test_aead
,
3398 .aead
= __VECS(rfc7539_tv_template
)
3401 .alg
= "rfc7539esp(chacha20,poly1305)",
3402 .test
= alg_test_aead
,
3404 .aead
= __VECS(rfc7539esp_tv_template
)
3408 .test
= alg_test_hash
,
3410 .hash
= __VECS(rmd128_tv_template
)
3414 .test
= alg_test_hash
,
3416 .hash
= __VECS(rmd160_tv_template
)
3420 .test
= alg_test_hash
,
3422 .hash
= __VECS(rmd256_tv_template
)
3426 .test
= alg_test_hash
,
3428 .hash
= __VECS(rmd320_tv_template
)
3432 .test
= alg_test_akcipher
,
3435 .akcipher
= __VECS(rsa_tv_template
)
3439 .test
= alg_test_skcipher
,
3441 .cipher
= __VECS(salsa20_stream_tv_template
)
3445 .test
= alg_test_hash
,
3448 .hash
= __VECS(sha1_tv_template
)
3452 .test
= alg_test_hash
,
3455 .hash
= __VECS(sha224_tv_template
)
3459 .test
= alg_test_hash
,
3462 .hash
= __VECS(sha256_tv_template
)
3466 .test
= alg_test_hash
,
3469 .hash
= __VECS(sha3_224_tv_template
)
3473 .test
= alg_test_hash
,
3476 .hash
= __VECS(sha3_256_tv_template
)
3480 .test
= alg_test_hash
,
3483 .hash
= __VECS(sha3_384_tv_template
)
3487 .test
= alg_test_hash
,
3490 .hash
= __VECS(sha3_512_tv_template
)
3494 .test
= alg_test_hash
,
3497 .hash
= __VECS(sha384_tv_template
)
3501 .test
= alg_test_hash
,
3504 .hash
= __VECS(sha512_tv_template
)
3508 .test
= alg_test_hash
,
3510 .hash
= __VECS(sm3_tv_template
)
3513 .alg
= "streebog256",
3514 .test
= alg_test_hash
,
3516 .hash
= __VECS(streebog256_tv_template
)
3519 .alg
= "streebog512",
3520 .test
= alg_test_hash
,
3522 .hash
= __VECS(streebog512_tv_template
)
3526 .test
= alg_test_hash
,
3528 .hash
= __VECS(tgr128_tv_template
)
3532 .test
= alg_test_hash
,
3534 .hash
= __VECS(tgr160_tv_template
)
3538 .test
= alg_test_hash
,
3540 .hash
= __VECS(tgr192_tv_template
)
3543 .alg
= "vmac64(aes)",
3544 .test
= alg_test_hash
,
3546 .hash
= __VECS(vmac64_aes_tv_template
)
3550 .test
= alg_test_hash
,
3552 .hash
= __VECS(wp256_tv_template
)
3556 .test
= alg_test_hash
,
3558 .hash
= __VECS(wp384_tv_template
)
3562 .test
= alg_test_hash
,
3564 .hash
= __VECS(wp512_tv_template
)
3568 .test
= alg_test_hash
,
3570 .hash
= __VECS(aes_xcbc128_tv_template
)
3574 .test
= alg_test_skcipher
,
3576 .cipher
= __VECS(xchacha12_tv_template
)
3580 .test
= alg_test_skcipher
,
3582 .cipher
= __VECS(xchacha20_tv_template
)
3586 .test
= alg_test_skcipher
,
3589 .cipher
= __VECS(aes_xts_tv_template
)
3592 .alg
= "xts(camellia)",
3593 .test
= alg_test_skcipher
,
3595 .cipher
= __VECS(camellia_xts_tv_template
)
3598 .alg
= "xts(cast6)",
3599 .test
= alg_test_skcipher
,
3601 .cipher
= __VECS(cast6_xts_tv_template
)
3604 /* Same as xts(aes) except the key is stored in
3605 * hardware secure memory which we reference by index
3608 .test
= alg_test_null
,
3611 .alg
= "xts(serpent)",
3612 .test
= alg_test_skcipher
,
3614 .cipher
= __VECS(serpent_xts_tv_template
)
3617 .alg
= "xts(twofish)",
3618 .test
= alg_test_skcipher
,
3620 .cipher
= __VECS(tf_xts_tv_template
)
3623 .alg
= "xts4096(paes)",
3624 .test
= alg_test_null
,
3627 .alg
= "xts512(paes)",
3628 .test
= alg_test_null
,
3631 .alg
= "zlib-deflate",
3632 .test
= alg_test_comp
,
3636 .comp
= __VECS(zlib_deflate_comp_tv_template
),
3637 .decomp
= __VECS(zlib_deflate_decomp_tv_template
)
3642 .test
= alg_test_comp
,
3646 .comp
= __VECS(zstd_comp_tv_template
),
3647 .decomp
= __VECS(zstd_decomp_tv_template
)
3653 static bool alg_test_descs_checked
;
3655 static void alg_test_descs_check_order(void)
3659 /* only check once */
3660 if (alg_test_descs_checked
)
3663 alg_test_descs_checked
= true;
3665 for (i
= 1; i
< ARRAY_SIZE(alg_test_descs
); i
++) {
3666 int diff
= strcmp(alg_test_descs
[i
- 1].alg
,
3667 alg_test_descs
[i
].alg
);
3669 if (WARN_ON(diff
> 0)) {
3670 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3671 alg_test_descs
[i
- 1].alg
,
3672 alg_test_descs
[i
].alg
);
3675 if (WARN_ON(diff
== 0)) {
3676 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3677 alg_test_descs
[i
].alg
);
3682 static int alg_find_test(const char *alg
)
3685 int end
= ARRAY_SIZE(alg_test_descs
);
3687 while (start
< end
) {
3688 int i
= (start
+ end
) / 2;
3689 int diff
= strcmp(alg_test_descs
[i
].alg
, alg
);
3707 int alg_test(const char *driver
, const char *alg
, u32 type
, u32 mask
)
3713 if (!fips_enabled
&& notests
) {
3714 printk_once(KERN_INFO
"alg: self-tests disabled\n");
3718 alg_test_descs_check_order();
3720 if ((type
& CRYPTO_ALG_TYPE_MASK
) == CRYPTO_ALG_TYPE_CIPHER
) {
3721 char nalg
[CRYPTO_MAX_ALG_NAME
];
3723 if (snprintf(nalg
, sizeof(nalg
), "ecb(%s)", alg
) >=
3725 return -ENAMETOOLONG
;
3727 i
= alg_find_test(nalg
);
3731 if (fips_enabled
&& !alg_test_descs
[i
].fips_allowed
)
3734 rc
= alg_test_cipher(alg_test_descs
+ i
, driver
, type
, mask
);
3738 i
= alg_find_test(alg
);
3739 j
= alg_find_test(driver
);
3743 if (fips_enabled
&& ((i
>= 0 && !alg_test_descs
[i
].fips_allowed
) ||
3744 (j
>= 0 && !alg_test_descs
[j
].fips_allowed
)))
3749 rc
|= alg_test_descs
[i
].test(alg_test_descs
+ i
, driver
,
3751 if (j
>= 0 && j
!= i
)
3752 rc
|= alg_test_descs
[j
].test(alg_test_descs
+ j
, driver
,
3756 if (fips_enabled
&& rc
)
3757 panic("%s: %s alg self test failed in fips mode!\n", driver
, alg
);
3759 if (fips_enabled
&& !rc
)
3760 pr_info("alg: self-tests for %s (%s) passed\n", driver
, alg
);
3765 printk(KERN_INFO
"alg: No test for %s (%s)\n", alg
, driver
);
3771 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3773 EXPORT_SYMBOL_GPL(alg_test
);