Merge 4.4.68 into android-4.4
[GitHub/exynos8895/android_kernel_samsung_universal8895.git] / crypto / testmgr.c
1 /*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 *
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
20 *
21 */
22
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <crypto/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/fips.h>
28 #include <linux/module.h>
29 #include <linux/scatterlist.h>
30 #include <linux/slab.h>
31 #include <linux/string.h>
32 #include <crypto/rng.h>
33 #include <crypto/drbg.h>
34 #include <crypto/akcipher.h>
35
36 #include "internal.h"
37
38 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
39
40 /* a perfect nop */
41 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
42 {
43 return 0;
44 }
45
46 #else
47
48 #include "testmgr.h"
49
50 /*
51 * Need slab memory for testing (size in number of pages).
52 */
53 #define XBUFSIZE 8
54
55 /*
56 * Indexes into the xbuf to simulate cross-page access.
57 */
58 #define IDX1 32
59 #define IDX2 32400
60 #define IDX3 1
61 #define IDX4 8193
62 #define IDX5 22222
63 #define IDX6 17101
64 #define IDX7 27333
65 #define IDX8 3000
66
67 /*
68 * Used by test_cipher()
69 */
70 #define ENCRYPT 1
71 #define DECRYPT 0
72
73 struct tcrypt_result {
74 struct completion completion;
75 int err;
76 };
77
78 struct aead_test_suite {
79 struct {
80 struct aead_testvec *vecs;
81 unsigned int count;
82 } enc, dec;
83 };
84
85 struct cipher_test_suite {
86 struct {
87 struct cipher_testvec *vecs;
88 unsigned int count;
89 } enc, dec;
90 };
91
92 struct comp_test_suite {
93 struct {
94 struct comp_testvec *vecs;
95 unsigned int count;
96 } comp, decomp;
97 };
98
99 struct pcomp_test_suite {
100 struct {
101 struct pcomp_testvec *vecs;
102 unsigned int count;
103 } comp, decomp;
104 };
105
106 struct hash_test_suite {
107 struct hash_testvec *vecs;
108 unsigned int count;
109 };
110
111 struct cprng_test_suite {
112 struct cprng_testvec *vecs;
113 unsigned int count;
114 };
115
116 struct drbg_test_suite {
117 struct drbg_testvec *vecs;
118 unsigned int count;
119 };
120
121 struct akcipher_test_suite {
122 struct akcipher_testvec *vecs;
123 unsigned int count;
124 };
125
126 struct alg_test_desc {
127 const char *alg;
128 int (*test)(const struct alg_test_desc *desc, const char *driver,
129 u32 type, u32 mask);
130 int fips_allowed; /* set if alg is allowed in fips mode */
131
132 union {
133 struct aead_test_suite aead;
134 struct cipher_test_suite cipher;
135 struct comp_test_suite comp;
136 struct pcomp_test_suite pcomp;
137 struct hash_test_suite hash;
138 struct cprng_test_suite cprng;
139 struct drbg_test_suite drbg;
140 struct akcipher_test_suite akcipher;
141 } suite;
142 };
143
144 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
145
146 static void hexdump(unsigned char *buf, unsigned int len)
147 {
148 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
149 16, 1,
150 buf, len, false);
151 }
152
153 static void tcrypt_complete(struct crypto_async_request *req, int err)
154 {
155 struct tcrypt_result *res = req->data;
156
157 if (err == -EINPROGRESS)
158 return;
159
160 res->err = err;
161 complete(&res->completion);
162 }
163
164 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
165 {
166 int i;
167
168 for (i = 0; i < XBUFSIZE; i++) {
169 buf[i] = (void *)__get_free_page(GFP_KERNEL);
170 if (!buf[i])
171 goto err_free_buf;
172 }
173
174 return 0;
175
176 err_free_buf:
177 while (i-- > 0)
178 free_page((unsigned long)buf[i]);
179
180 return -ENOMEM;
181 }
182
183 static void testmgr_free_buf(char *buf[XBUFSIZE])
184 {
185 int i;
186
187 for (i = 0; i < XBUFSIZE; i++)
188 free_page((unsigned long)buf[i]);
189 }
190
191 static int wait_async_op(struct tcrypt_result *tr, int ret)
192 {
193 if (ret == -EINPROGRESS || ret == -EBUSY) {
194 wait_for_completion(&tr->completion);
195 reinit_completion(&tr->completion);
196 ret = tr->err;
197 }
198 return ret;
199 }
200
201 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
202 unsigned int tcount, bool use_digest,
203 const int align_offset)
204 {
205 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
206 unsigned int i, j, k, temp;
207 struct scatterlist sg[8];
208 char *result;
209 char *key;
210 struct ahash_request *req;
211 struct tcrypt_result tresult;
212 void *hash_buff;
213 char *xbuf[XBUFSIZE];
214 int ret = -ENOMEM;
215
216 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
217 if (!result)
218 return ret;
219 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
220 if (!key)
221 goto out_nobuf;
222 if (testmgr_alloc_buf(xbuf))
223 goto out_nobuf;
224
225 init_completion(&tresult.completion);
226
227 req = ahash_request_alloc(tfm, GFP_KERNEL);
228 if (!req) {
229 printk(KERN_ERR "alg: hash: Failed to allocate request for "
230 "%s\n", algo);
231 goto out_noreq;
232 }
233 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
234 tcrypt_complete, &tresult);
235
236 j = 0;
237 for (i = 0; i < tcount; i++) {
238 if (template[i].np)
239 continue;
240
241 ret = -EINVAL;
242 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
243 goto out;
244
245 j++;
246 memset(result, 0, MAX_DIGEST_SIZE);
247
248 hash_buff = xbuf[0];
249 hash_buff += align_offset;
250
251 memcpy(hash_buff, template[i].plaintext, template[i].psize);
252 sg_init_one(&sg[0], hash_buff, template[i].psize);
253
254 if (template[i].ksize) {
255 crypto_ahash_clear_flags(tfm, ~0);
256 if (template[i].ksize > MAX_KEYLEN) {
257 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
258 j, algo, template[i].ksize, MAX_KEYLEN);
259 ret = -EINVAL;
260 goto out;
261 }
262 memcpy(key, template[i].key, template[i].ksize);
263 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
264 if (ret) {
265 printk(KERN_ERR "alg: hash: setkey failed on "
266 "test %d for %s: ret=%d\n", j, algo,
267 -ret);
268 goto out;
269 }
270 }
271
272 ahash_request_set_crypt(req, sg, result, template[i].psize);
273 if (use_digest) {
274 ret = wait_async_op(&tresult, crypto_ahash_digest(req));
275 if (ret) {
276 pr_err("alg: hash: digest failed on test %d "
277 "for %s: ret=%d\n", j, algo, -ret);
278 goto out;
279 }
280 } else {
281 ret = wait_async_op(&tresult, crypto_ahash_init(req));
282 if (ret) {
283 pr_err("alt: hash: init failed on test %d "
284 "for %s: ret=%d\n", j, algo, -ret);
285 goto out;
286 }
287 ret = wait_async_op(&tresult, crypto_ahash_update(req));
288 if (ret) {
289 pr_err("alt: hash: update failed on test %d "
290 "for %s: ret=%d\n", j, algo, -ret);
291 goto out;
292 }
293 ret = wait_async_op(&tresult, crypto_ahash_final(req));
294 if (ret) {
295 pr_err("alt: hash: final failed on test %d "
296 "for %s: ret=%d\n", j, algo, -ret);
297 goto out;
298 }
299 }
300
301 if (memcmp(result, template[i].digest,
302 crypto_ahash_digestsize(tfm))) {
303 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
304 j, algo);
305 hexdump(result, crypto_ahash_digestsize(tfm));
306 ret = -EINVAL;
307 goto out;
308 }
309 }
310
311 j = 0;
312 for (i = 0; i < tcount; i++) {
313 /* alignment tests are only done with continuous buffers */
314 if (align_offset != 0)
315 break;
316
317 if (!template[i].np)
318 continue;
319
320 j++;
321 memset(result, 0, MAX_DIGEST_SIZE);
322
323 temp = 0;
324 sg_init_table(sg, template[i].np);
325 ret = -EINVAL;
326 for (k = 0; k < template[i].np; k++) {
327 if (WARN_ON(offset_in_page(IDX[k]) +
328 template[i].tap[k] > PAGE_SIZE))
329 goto out;
330 sg_set_buf(&sg[k],
331 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
332 offset_in_page(IDX[k]),
333 template[i].plaintext + temp,
334 template[i].tap[k]),
335 template[i].tap[k]);
336 temp += template[i].tap[k];
337 }
338
339 if (template[i].ksize) {
340 if (template[i].ksize > MAX_KEYLEN) {
341 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
342 j, algo, template[i].ksize, MAX_KEYLEN);
343 ret = -EINVAL;
344 goto out;
345 }
346 crypto_ahash_clear_flags(tfm, ~0);
347 memcpy(key, template[i].key, template[i].ksize);
348 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
349
350 if (ret) {
351 printk(KERN_ERR "alg: hash: setkey "
352 "failed on chunking test %d "
353 "for %s: ret=%d\n", j, algo, -ret);
354 goto out;
355 }
356 }
357
358 ahash_request_set_crypt(req, sg, result, template[i].psize);
359 ret = crypto_ahash_digest(req);
360 switch (ret) {
361 case 0:
362 break;
363 case -EINPROGRESS:
364 case -EBUSY:
365 wait_for_completion(&tresult.completion);
366 reinit_completion(&tresult.completion);
367 ret = tresult.err;
368 if (!ret)
369 break;
370 /* fall through */
371 default:
372 printk(KERN_ERR "alg: hash: digest failed "
373 "on chunking test %d for %s: "
374 "ret=%d\n", j, algo, -ret);
375 goto out;
376 }
377
378 if (memcmp(result, template[i].digest,
379 crypto_ahash_digestsize(tfm))) {
380 printk(KERN_ERR "alg: hash: Chunking test %d "
381 "failed for %s\n", j, algo);
382 hexdump(result, crypto_ahash_digestsize(tfm));
383 ret = -EINVAL;
384 goto out;
385 }
386 }
387
388 ret = 0;
389
390 out:
391 ahash_request_free(req);
392 out_noreq:
393 testmgr_free_buf(xbuf);
394 out_nobuf:
395 kfree(key);
396 kfree(result);
397 return ret;
398 }
399
400 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
401 unsigned int tcount, bool use_digest)
402 {
403 unsigned int alignmask;
404 int ret;
405
406 ret = __test_hash(tfm, template, tcount, use_digest, 0);
407 if (ret)
408 return ret;
409
410 /* test unaligned buffers, check with one byte offset */
411 ret = __test_hash(tfm, template, tcount, use_digest, 1);
412 if (ret)
413 return ret;
414
415 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
416 if (alignmask) {
417 /* Check if alignment mask for tfm is correctly set. */
418 ret = __test_hash(tfm, template, tcount, use_digest,
419 alignmask + 1);
420 if (ret)
421 return ret;
422 }
423
424 return 0;
425 }
426
427 static int __test_aead(struct crypto_aead *tfm, int enc,
428 struct aead_testvec *template, unsigned int tcount,
429 const bool diff_dst, const int align_offset)
430 {
431 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
432 unsigned int i, j, k, n, temp;
433 int ret = -ENOMEM;
434 char *q;
435 char *key;
436 struct aead_request *req;
437 struct scatterlist *sg;
438 struct scatterlist *sgout;
439 const char *e, *d;
440 struct tcrypt_result result;
441 unsigned int authsize, iv_len;
442 void *input;
443 void *output;
444 void *assoc;
445 char *iv;
446 char *xbuf[XBUFSIZE];
447 char *xoutbuf[XBUFSIZE];
448 char *axbuf[XBUFSIZE];
449
450 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
451 if (!iv)
452 return ret;
453 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
454 if (!key)
455 goto out_noxbuf;
456 if (testmgr_alloc_buf(xbuf))
457 goto out_noxbuf;
458 if (testmgr_alloc_buf(axbuf))
459 goto out_noaxbuf;
460 if (diff_dst && testmgr_alloc_buf(xoutbuf))
461 goto out_nooutbuf;
462
463 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
464 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
465 if (!sg)
466 goto out_nosg;
467 sgout = &sg[16];
468
469 if (diff_dst)
470 d = "-ddst";
471 else
472 d = "";
473
474 if (enc == ENCRYPT)
475 e = "encryption";
476 else
477 e = "decryption";
478
479 init_completion(&result.completion);
480
481 req = aead_request_alloc(tfm, GFP_KERNEL);
482 if (!req) {
483 pr_err("alg: aead%s: Failed to allocate request for %s\n",
484 d, algo);
485 goto out;
486 }
487
488 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
489 tcrypt_complete, &result);
490
491 iv_len = crypto_aead_ivsize(tfm);
492
493 for (i = 0, j = 0; i < tcount; i++) {
494 if (template[i].np)
495 continue;
496
497 j++;
498
499 /* some templates have no input data but they will
500 * touch input
501 */
502 input = xbuf[0];
503 input += align_offset;
504 assoc = axbuf[0];
505
506 ret = -EINVAL;
507 if (WARN_ON(align_offset + template[i].ilen >
508 PAGE_SIZE || template[i].alen > PAGE_SIZE))
509 goto out;
510
511 memcpy(input, template[i].input, template[i].ilen);
512 memcpy(assoc, template[i].assoc, template[i].alen);
513 if (template[i].iv)
514 memcpy(iv, template[i].iv, iv_len);
515 else
516 memset(iv, 0, iv_len);
517
518 crypto_aead_clear_flags(tfm, ~0);
519 if (template[i].wk)
520 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
521
522 if (template[i].klen > MAX_KEYLEN) {
523 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
524 d, j, algo, template[i].klen,
525 MAX_KEYLEN);
526 ret = -EINVAL;
527 goto out;
528 }
529 memcpy(key, template[i].key, template[i].klen);
530
531 ret = crypto_aead_setkey(tfm, key, template[i].klen);
532 if (!ret == template[i].fail) {
533 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
534 d, j, algo, crypto_aead_get_flags(tfm));
535 goto out;
536 } else if (ret)
537 continue;
538
539 authsize = abs(template[i].rlen - template[i].ilen);
540 ret = crypto_aead_setauthsize(tfm, authsize);
541 if (ret) {
542 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
543 d, authsize, j, algo);
544 goto out;
545 }
546
547 k = !!template[i].alen;
548 sg_init_table(sg, k + 1);
549 sg_set_buf(&sg[0], assoc, template[i].alen);
550 sg_set_buf(&sg[k], input,
551 template[i].ilen + (enc ? authsize : 0));
552 output = input;
553
554 if (diff_dst) {
555 sg_init_table(sgout, k + 1);
556 sg_set_buf(&sgout[0], assoc, template[i].alen);
557
558 output = xoutbuf[0];
559 output += align_offset;
560 sg_set_buf(&sgout[k], output,
561 template[i].rlen + (enc ? 0 : authsize));
562 }
563
564 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
565 template[i].ilen, iv);
566
567 aead_request_set_ad(req, template[i].alen);
568
569 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
570
571 switch (ret) {
572 case 0:
573 if (template[i].novrfy) {
574 /* verification was supposed to fail */
575 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
576 d, e, j, algo);
577 /* so really, we got a bad message */
578 ret = -EBADMSG;
579 goto out;
580 }
581 break;
582 case -EINPROGRESS:
583 case -EBUSY:
584 wait_for_completion(&result.completion);
585 reinit_completion(&result.completion);
586 ret = result.err;
587 if (!ret)
588 break;
589 case -EBADMSG:
590 if (template[i].novrfy)
591 /* verification failure was expected */
592 continue;
593 /* fall through */
594 default:
595 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
596 d, e, j, algo, -ret);
597 goto out;
598 }
599
600 q = output;
601 if (memcmp(q, template[i].result, template[i].rlen)) {
602 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
603 d, j, e, algo);
604 hexdump(q, template[i].rlen);
605 ret = -EINVAL;
606 goto out;
607 }
608 }
609
610 for (i = 0, j = 0; i < tcount; i++) {
611 /* alignment tests are only done with continuous buffers */
612 if (align_offset != 0)
613 break;
614
615 if (!template[i].np)
616 continue;
617
618 j++;
619
620 if (template[i].iv)
621 memcpy(iv, template[i].iv, iv_len);
622 else
623 memset(iv, 0, MAX_IVLEN);
624
625 crypto_aead_clear_flags(tfm, ~0);
626 if (template[i].wk)
627 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
628 if (template[i].klen > MAX_KEYLEN) {
629 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
630 d, j, algo, template[i].klen, MAX_KEYLEN);
631 ret = -EINVAL;
632 goto out;
633 }
634 memcpy(key, template[i].key, template[i].klen);
635
636 ret = crypto_aead_setkey(tfm, key, template[i].klen);
637 if (!ret == template[i].fail) {
638 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
639 d, j, algo, crypto_aead_get_flags(tfm));
640 goto out;
641 } else if (ret)
642 continue;
643
644 authsize = abs(template[i].rlen - template[i].ilen);
645
646 ret = -EINVAL;
647 sg_init_table(sg, template[i].anp + template[i].np);
648 if (diff_dst)
649 sg_init_table(sgout, template[i].anp + template[i].np);
650
651 ret = -EINVAL;
652 for (k = 0, temp = 0; k < template[i].anp; k++) {
653 if (WARN_ON(offset_in_page(IDX[k]) +
654 template[i].atap[k] > PAGE_SIZE))
655 goto out;
656 sg_set_buf(&sg[k],
657 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
658 offset_in_page(IDX[k]),
659 template[i].assoc + temp,
660 template[i].atap[k]),
661 template[i].atap[k]);
662 if (diff_dst)
663 sg_set_buf(&sgout[k],
664 axbuf[IDX[k] >> PAGE_SHIFT] +
665 offset_in_page(IDX[k]),
666 template[i].atap[k]);
667 temp += template[i].atap[k];
668 }
669
670 for (k = 0, temp = 0; k < template[i].np; k++) {
671 if (WARN_ON(offset_in_page(IDX[k]) +
672 template[i].tap[k] > PAGE_SIZE))
673 goto out;
674
675 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
676 memcpy(q, template[i].input + temp, template[i].tap[k]);
677 sg_set_buf(&sg[template[i].anp + k],
678 q, template[i].tap[k]);
679
680 if (diff_dst) {
681 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
682 offset_in_page(IDX[k]);
683
684 memset(q, 0, template[i].tap[k]);
685
686 sg_set_buf(&sgout[template[i].anp + k],
687 q, template[i].tap[k]);
688 }
689
690 n = template[i].tap[k];
691 if (k == template[i].np - 1 && enc)
692 n += authsize;
693 if (offset_in_page(q) + n < PAGE_SIZE)
694 q[n] = 0;
695
696 temp += template[i].tap[k];
697 }
698
699 ret = crypto_aead_setauthsize(tfm, authsize);
700 if (ret) {
701 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
702 d, authsize, j, algo);
703 goto out;
704 }
705
706 if (enc) {
707 if (WARN_ON(sg[template[i].anp + k - 1].offset +
708 sg[template[i].anp + k - 1].length +
709 authsize > PAGE_SIZE)) {
710 ret = -EINVAL;
711 goto out;
712 }
713
714 if (diff_dst)
715 sgout[template[i].anp + k - 1].length +=
716 authsize;
717 sg[template[i].anp + k - 1].length += authsize;
718 }
719
720 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
721 template[i].ilen,
722 iv);
723
724 aead_request_set_ad(req, template[i].alen);
725
726 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
727
728 switch (ret) {
729 case 0:
730 if (template[i].novrfy) {
731 /* verification was supposed to fail */
732 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
733 d, e, j, algo);
734 /* so really, we got a bad message */
735 ret = -EBADMSG;
736 goto out;
737 }
738 break;
739 case -EINPROGRESS:
740 case -EBUSY:
741 wait_for_completion(&result.completion);
742 reinit_completion(&result.completion);
743 ret = result.err;
744 if (!ret)
745 break;
746 case -EBADMSG:
747 if (template[i].novrfy)
748 /* verification failure was expected */
749 continue;
750 /* fall through */
751 default:
752 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
753 d, e, j, algo, -ret);
754 goto out;
755 }
756
757 ret = -EINVAL;
758 for (k = 0, temp = 0; k < template[i].np; k++) {
759 if (diff_dst)
760 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
761 offset_in_page(IDX[k]);
762 else
763 q = xbuf[IDX[k] >> PAGE_SHIFT] +
764 offset_in_page(IDX[k]);
765
766 n = template[i].tap[k];
767 if (k == template[i].np - 1)
768 n += enc ? authsize : -authsize;
769
770 if (memcmp(q, template[i].result + temp, n)) {
771 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
772 d, j, e, k, algo);
773 hexdump(q, n);
774 goto out;
775 }
776
777 q += n;
778 if (k == template[i].np - 1 && !enc) {
779 if (!diff_dst &&
780 memcmp(q, template[i].input +
781 temp + n, authsize))
782 n = authsize;
783 else
784 n = 0;
785 } else {
786 for (n = 0; offset_in_page(q + n) && q[n]; n++)
787 ;
788 }
789 if (n) {
790 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
791 d, j, e, k, algo, n);
792 hexdump(q, n);
793 goto out;
794 }
795
796 temp += template[i].tap[k];
797 }
798 }
799
800 ret = 0;
801
802 out:
803 aead_request_free(req);
804 kfree(sg);
805 out_nosg:
806 if (diff_dst)
807 testmgr_free_buf(xoutbuf);
808 out_nooutbuf:
809 testmgr_free_buf(axbuf);
810 out_noaxbuf:
811 testmgr_free_buf(xbuf);
812 out_noxbuf:
813 kfree(key);
814 kfree(iv);
815 return ret;
816 }
817
818 static int test_aead(struct crypto_aead *tfm, int enc,
819 struct aead_testvec *template, unsigned int tcount)
820 {
821 unsigned int alignmask;
822 int ret;
823
824 /* test 'dst == src' case */
825 ret = __test_aead(tfm, enc, template, tcount, false, 0);
826 if (ret)
827 return ret;
828
829 /* test 'dst != src' case */
830 ret = __test_aead(tfm, enc, template, tcount, true, 0);
831 if (ret)
832 return ret;
833
834 /* test unaligned buffers, check with one byte offset */
835 ret = __test_aead(tfm, enc, template, tcount, true, 1);
836 if (ret)
837 return ret;
838
839 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
840 if (alignmask) {
841 /* Check if alignment mask for tfm is correctly set. */
842 ret = __test_aead(tfm, enc, template, tcount, true,
843 alignmask + 1);
844 if (ret)
845 return ret;
846 }
847
848 return 0;
849 }
850
851 static int test_cipher(struct crypto_cipher *tfm, int enc,
852 struct cipher_testvec *template, unsigned int tcount)
853 {
854 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
855 unsigned int i, j, k;
856 char *q;
857 const char *e;
858 void *data;
859 char *xbuf[XBUFSIZE];
860 int ret = -ENOMEM;
861
862 if (testmgr_alloc_buf(xbuf))
863 goto out_nobuf;
864
865 if (enc == ENCRYPT)
866 e = "encryption";
867 else
868 e = "decryption";
869
870 j = 0;
871 for (i = 0; i < tcount; i++) {
872 if (template[i].np)
873 continue;
874
875 j++;
876
877 ret = -EINVAL;
878 if (WARN_ON(template[i].ilen > PAGE_SIZE))
879 goto out;
880
881 data = xbuf[0];
882 memcpy(data, template[i].input, template[i].ilen);
883
884 crypto_cipher_clear_flags(tfm, ~0);
885 if (template[i].wk)
886 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
887
888 ret = crypto_cipher_setkey(tfm, template[i].key,
889 template[i].klen);
890 if (!ret == template[i].fail) {
891 printk(KERN_ERR "alg: cipher: setkey failed "
892 "on test %d for %s: flags=%x\n", j,
893 algo, crypto_cipher_get_flags(tfm));
894 goto out;
895 } else if (ret)
896 continue;
897
898 for (k = 0; k < template[i].ilen;
899 k += crypto_cipher_blocksize(tfm)) {
900 if (enc)
901 crypto_cipher_encrypt_one(tfm, data + k,
902 data + k);
903 else
904 crypto_cipher_decrypt_one(tfm, data + k,
905 data + k);
906 }
907
908 q = data;
909 if (memcmp(q, template[i].result, template[i].rlen)) {
910 printk(KERN_ERR "alg: cipher: Test %d failed "
911 "on %s for %s\n", j, e, algo);
912 hexdump(q, template[i].rlen);
913 ret = -EINVAL;
914 goto out;
915 }
916 }
917
918 ret = 0;
919
920 out:
921 testmgr_free_buf(xbuf);
922 out_nobuf:
923 return ret;
924 }
925
926 static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
927 struct cipher_testvec *template, unsigned int tcount,
928 const bool diff_dst, const int align_offset)
929 {
930 const char *algo =
931 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
932 unsigned int i, j, k, n, temp;
933 char *q;
934 struct skcipher_request *req;
935 struct scatterlist sg[8];
936 struct scatterlist sgout[8];
937 const char *e, *d;
938 struct tcrypt_result result;
939 void *data;
940 char iv[MAX_IVLEN];
941 char *xbuf[XBUFSIZE];
942 char *xoutbuf[XBUFSIZE];
943 int ret = -ENOMEM;
944 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
945
946 if (testmgr_alloc_buf(xbuf))
947 goto out_nobuf;
948
949 if (diff_dst && testmgr_alloc_buf(xoutbuf))
950 goto out_nooutbuf;
951
952 if (diff_dst)
953 d = "-ddst";
954 else
955 d = "";
956
957 if (enc == ENCRYPT)
958 e = "encryption";
959 else
960 e = "decryption";
961
962 init_completion(&result.completion);
963
964 req = skcipher_request_alloc(tfm, GFP_KERNEL);
965 if (!req) {
966 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
967 d, algo);
968 goto out;
969 }
970
971 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
972 tcrypt_complete, &result);
973
974 j = 0;
975 for (i = 0; i < tcount; i++) {
976 if (template[i].np && !template[i].also_non_np)
977 continue;
978
979 if (template[i].iv)
980 memcpy(iv, template[i].iv, ivsize);
981 else
982 memset(iv, 0, MAX_IVLEN);
983
984 j++;
985 ret = -EINVAL;
986 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
987 goto out;
988
989 data = xbuf[0];
990 data += align_offset;
991 memcpy(data, template[i].input, template[i].ilen);
992
993 crypto_skcipher_clear_flags(tfm, ~0);
994 if (template[i].wk)
995 crypto_skcipher_set_flags(tfm,
996 CRYPTO_TFM_REQ_WEAK_KEY);
997
998 ret = crypto_skcipher_setkey(tfm, template[i].key,
999 template[i].klen);
1000 if (!ret == template[i].fail) {
1001 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1002 d, j, algo, crypto_skcipher_get_flags(tfm));
1003 goto out;
1004 } else if (ret)
1005 continue;
1006
1007 sg_init_one(&sg[0], data, template[i].ilen);
1008 if (diff_dst) {
1009 data = xoutbuf[0];
1010 data += align_offset;
1011 sg_init_one(&sgout[0], data, template[i].ilen);
1012 }
1013
1014 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1015 template[i].ilen, iv);
1016 ret = enc ? crypto_skcipher_encrypt(req) :
1017 crypto_skcipher_decrypt(req);
1018
1019 switch (ret) {
1020 case 0:
1021 break;
1022 case -EINPROGRESS:
1023 case -EBUSY:
1024 wait_for_completion(&result.completion);
1025 reinit_completion(&result.completion);
1026 ret = result.err;
1027 if (!ret)
1028 break;
1029 /* fall through */
1030 default:
1031 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1032 d, e, j, algo, -ret);
1033 goto out;
1034 }
1035
1036 q = data;
1037 if (memcmp(q, template[i].result, template[i].rlen)) {
1038 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1039 d, j, e, algo);
1040 hexdump(q, template[i].rlen);
1041 ret = -EINVAL;
1042 goto out;
1043 }
1044
1045 if (template[i].iv_out &&
1046 memcmp(iv, template[i].iv_out,
1047 crypto_skcipher_ivsize(tfm))) {
1048 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1049 d, j, e, algo);
1050 hexdump(iv, crypto_skcipher_ivsize(tfm));
1051 ret = -EINVAL;
1052 goto out;
1053 }
1054 }
1055
1056 j = 0;
1057 for (i = 0; i < tcount; i++) {
1058 /* alignment tests are only done with continuous buffers */
1059 if (align_offset != 0)
1060 break;
1061
1062 if (!template[i].np)
1063 continue;
1064
1065 if (template[i].iv)
1066 memcpy(iv, template[i].iv, ivsize);
1067 else
1068 memset(iv, 0, MAX_IVLEN);
1069
1070 j++;
1071 crypto_skcipher_clear_flags(tfm, ~0);
1072 if (template[i].wk)
1073 crypto_skcipher_set_flags(tfm,
1074 CRYPTO_TFM_REQ_WEAK_KEY);
1075
1076 ret = crypto_skcipher_setkey(tfm, template[i].key,
1077 template[i].klen);
1078 if (!ret == template[i].fail) {
1079 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1080 d, j, algo, crypto_skcipher_get_flags(tfm));
1081 goto out;
1082 } else if (ret)
1083 continue;
1084
1085 temp = 0;
1086 ret = -EINVAL;
1087 sg_init_table(sg, template[i].np);
1088 if (diff_dst)
1089 sg_init_table(sgout, template[i].np);
1090 for (k = 0; k < template[i].np; k++) {
1091 if (WARN_ON(offset_in_page(IDX[k]) +
1092 template[i].tap[k] > PAGE_SIZE))
1093 goto out;
1094
1095 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1096
1097 memcpy(q, template[i].input + temp, template[i].tap[k]);
1098
1099 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1100 q[template[i].tap[k]] = 0;
1101
1102 sg_set_buf(&sg[k], q, template[i].tap[k]);
1103 if (diff_dst) {
1104 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1105 offset_in_page(IDX[k]);
1106
1107 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1108
1109 memset(q, 0, template[i].tap[k]);
1110 if (offset_in_page(q) +
1111 template[i].tap[k] < PAGE_SIZE)
1112 q[template[i].tap[k]] = 0;
1113 }
1114
1115 temp += template[i].tap[k];
1116 }
1117
1118 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1119 template[i].ilen, iv);
1120
1121 ret = enc ? crypto_skcipher_encrypt(req) :
1122 crypto_skcipher_decrypt(req);
1123
1124 switch (ret) {
1125 case 0:
1126 break;
1127 case -EINPROGRESS:
1128 case -EBUSY:
1129 wait_for_completion(&result.completion);
1130 reinit_completion(&result.completion);
1131 ret = result.err;
1132 if (!ret)
1133 break;
1134 /* fall through */
1135 default:
1136 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1137 d, e, j, algo, -ret);
1138 goto out;
1139 }
1140
1141 temp = 0;
1142 ret = -EINVAL;
1143 for (k = 0; k < template[i].np; k++) {
1144 if (diff_dst)
1145 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1146 offset_in_page(IDX[k]);
1147 else
1148 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1149 offset_in_page(IDX[k]);
1150
1151 if (memcmp(q, template[i].result + temp,
1152 template[i].tap[k])) {
1153 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1154 d, j, e, k, algo);
1155 hexdump(q, template[i].tap[k]);
1156 goto out;
1157 }
1158
1159 q += template[i].tap[k];
1160 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1161 ;
1162 if (n) {
1163 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1164 d, j, e, k, algo, n);
1165 hexdump(q, n);
1166 goto out;
1167 }
1168 temp += template[i].tap[k];
1169 }
1170 }
1171
1172 ret = 0;
1173
1174 out:
1175 skcipher_request_free(req);
1176 if (diff_dst)
1177 testmgr_free_buf(xoutbuf);
1178 out_nooutbuf:
1179 testmgr_free_buf(xbuf);
1180 out_nobuf:
1181 return ret;
1182 }
1183
1184 static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1185 struct cipher_testvec *template, unsigned int tcount)
1186 {
1187 unsigned int alignmask;
1188 int ret;
1189
1190 /* test 'dst == src' case */
1191 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1192 if (ret)
1193 return ret;
1194
1195 /* test 'dst != src' case */
1196 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1197 if (ret)
1198 return ret;
1199
1200 /* test unaligned buffers, check with one byte offset */
1201 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1202 if (ret)
1203 return ret;
1204
1205 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1206 if (alignmask) {
1207 /* Check if alignment mask for tfm is correctly set. */
1208 ret = __test_skcipher(tfm, enc, template, tcount, true,
1209 alignmask + 1);
1210 if (ret)
1211 return ret;
1212 }
1213
1214 return 0;
1215 }
1216
1217 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1218 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1219 {
1220 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1221 unsigned int i;
1222 char result[COMP_BUF_SIZE];
1223 int ret;
1224
1225 for (i = 0; i < ctcount; i++) {
1226 int ilen;
1227 unsigned int dlen = COMP_BUF_SIZE;
1228
1229 memset(result, 0, sizeof (result));
1230
1231 ilen = ctemplate[i].inlen;
1232 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1233 ilen, result, &dlen);
1234 if (ret) {
1235 printk(KERN_ERR "alg: comp: compression failed "
1236 "on test %d for %s: ret=%d\n", i + 1, algo,
1237 -ret);
1238 goto out;
1239 }
1240
1241 if (dlen != ctemplate[i].outlen) {
1242 printk(KERN_ERR "alg: comp: Compression test %d "
1243 "failed for %s: output len = %d\n", i + 1, algo,
1244 dlen);
1245 ret = -EINVAL;
1246 goto out;
1247 }
1248
1249 if (memcmp(result, ctemplate[i].output, dlen)) {
1250 printk(KERN_ERR "alg: comp: Compression test %d "
1251 "failed for %s\n", i + 1, algo);
1252 hexdump(result, dlen);
1253 ret = -EINVAL;
1254 goto out;
1255 }
1256 }
1257
1258 for (i = 0; i < dtcount; i++) {
1259 int ilen;
1260 unsigned int dlen = COMP_BUF_SIZE;
1261
1262 memset(result, 0, sizeof (result));
1263
1264 ilen = dtemplate[i].inlen;
1265 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1266 ilen, result, &dlen);
1267 if (ret) {
1268 printk(KERN_ERR "alg: comp: decompression failed "
1269 "on test %d for %s: ret=%d\n", i + 1, algo,
1270 -ret);
1271 goto out;
1272 }
1273
1274 if (dlen != dtemplate[i].outlen) {
1275 printk(KERN_ERR "alg: comp: Decompression test %d "
1276 "failed for %s: output len = %d\n", i + 1, algo,
1277 dlen);
1278 ret = -EINVAL;
1279 goto out;
1280 }
1281
1282 if (memcmp(result, dtemplate[i].output, dlen)) {
1283 printk(KERN_ERR "alg: comp: Decompression test %d "
1284 "failed for %s\n", i + 1, algo);
1285 hexdump(result, dlen);
1286 ret = -EINVAL;
1287 goto out;
1288 }
1289 }
1290
1291 ret = 0;
1292
1293 out:
1294 return ret;
1295 }
1296
1297 static int test_pcomp(struct crypto_pcomp *tfm,
1298 struct pcomp_testvec *ctemplate,
1299 struct pcomp_testvec *dtemplate, int ctcount,
1300 int dtcount)
1301 {
1302 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1303 unsigned int i;
1304 char result[COMP_BUF_SIZE];
1305 int res;
1306
1307 for (i = 0; i < ctcount; i++) {
1308 struct comp_request req;
1309 unsigned int produced = 0;
1310
1311 res = crypto_compress_setup(tfm, ctemplate[i].params,
1312 ctemplate[i].paramsize);
1313 if (res) {
1314 pr_err("alg: pcomp: compression setup failed on test "
1315 "%d for %s: error=%d\n", i + 1, algo, res);
1316 return res;
1317 }
1318
1319 res = crypto_compress_init(tfm);
1320 if (res) {
1321 pr_err("alg: pcomp: compression init failed on test "
1322 "%d for %s: error=%d\n", i + 1, algo, res);
1323 return res;
1324 }
1325
1326 memset(result, 0, sizeof(result));
1327
1328 req.next_in = ctemplate[i].input;
1329 req.avail_in = ctemplate[i].inlen / 2;
1330 req.next_out = result;
1331 req.avail_out = ctemplate[i].outlen / 2;
1332
1333 res = crypto_compress_update(tfm, &req);
1334 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1335 pr_err("alg: pcomp: compression update failed on test "
1336 "%d for %s: error=%d\n", i + 1, algo, res);
1337 return res;
1338 }
1339 if (res > 0)
1340 produced += res;
1341
1342 /* Add remaining input data */
1343 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1344
1345 res = crypto_compress_update(tfm, &req);
1346 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1347 pr_err("alg: pcomp: compression update failed on test "
1348 "%d for %s: error=%d\n", i + 1, algo, res);
1349 return res;
1350 }
1351 if (res > 0)
1352 produced += res;
1353
1354 /* Provide remaining output space */
1355 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1356
1357 res = crypto_compress_final(tfm, &req);
1358 if (res < 0) {
1359 pr_err("alg: pcomp: compression final failed on test "
1360 "%d for %s: error=%d\n", i + 1, algo, res);
1361 return res;
1362 }
1363 produced += res;
1364
1365 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1366 pr_err("alg: comp: Compression test %d failed for %s: "
1367 "output len = %d (expected %d)\n", i + 1, algo,
1368 COMP_BUF_SIZE - req.avail_out,
1369 ctemplate[i].outlen);
1370 return -EINVAL;
1371 }
1372
1373 if (produced != ctemplate[i].outlen) {
1374 pr_err("alg: comp: Compression test %d failed for %s: "
1375 "returned len = %u (expected %d)\n", i + 1,
1376 algo, produced, ctemplate[i].outlen);
1377 return -EINVAL;
1378 }
1379
1380 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1381 pr_err("alg: pcomp: Compression test %d failed for "
1382 "%s\n", i + 1, algo);
1383 hexdump(result, ctemplate[i].outlen);
1384 return -EINVAL;
1385 }
1386 }
1387
1388 for (i = 0; i < dtcount; i++) {
1389 struct comp_request req;
1390 unsigned int produced = 0;
1391
1392 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1393 dtemplate[i].paramsize);
1394 if (res) {
1395 pr_err("alg: pcomp: decompression setup failed on "
1396 "test %d for %s: error=%d\n", i + 1, algo, res);
1397 return res;
1398 }
1399
1400 res = crypto_decompress_init(tfm);
1401 if (res) {
1402 pr_err("alg: pcomp: decompression init failed on test "
1403 "%d for %s: error=%d\n", i + 1, algo, res);
1404 return res;
1405 }
1406
1407 memset(result, 0, sizeof(result));
1408
1409 req.next_in = dtemplate[i].input;
1410 req.avail_in = dtemplate[i].inlen / 2;
1411 req.next_out = result;
1412 req.avail_out = dtemplate[i].outlen / 2;
1413
1414 res = crypto_decompress_update(tfm, &req);
1415 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1416 pr_err("alg: pcomp: decompression update failed on "
1417 "test %d for %s: error=%d\n", i + 1, algo, res);
1418 return res;
1419 }
1420 if (res > 0)
1421 produced += res;
1422
1423 /* Add remaining input data */
1424 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1425
1426 res = crypto_decompress_update(tfm, &req);
1427 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1428 pr_err("alg: pcomp: decompression update failed on "
1429 "test %d for %s: error=%d\n", i + 1, algo, res);
1430 return res;
1431 }
1432 if (res > 0)
1433 produced += res;
1434
1435 /* Provide remaining output space */
1436 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1437
1438 res = crypto_decompress_final(tfm, &req);
1439 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1440 pr_err("alg: pcomp: decompression final failed on "
1441 "test %d for %s: error=%d\n", i + 1, algo, res);
1442 return res;
1443 }
1444 if (res > 0)
1445 produced += res;
1446
1447 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1448 pr_err("alg: comp: Decompression test %d failed for "
1449 "%s: output len = %d (expected %d)\n", i + 1,
1450 algo, COMP_BUF_SIZE - req.avail_out,
1451 dtemplate[i].outlen);
1452 return -EINVAL;
1453 }
1454
1455 if (produced != dtemplate[i].outlen) {
1456 pr_err("alg: comp: Decompression test %d failed for "
1457 "%s: returned len = %u (expected %d)\n", i + 1,
1458 algo, produced, dtemplate[i].outlen);
1459 return -EINVAL;
1460 }
1461
1462 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1463 pr_err("alg: pcomp: Decompression test %d failed for "
1464 "%s\n", i + 1, algo);
1465 hexdump(result, dtemplate[i].outlen);
1466 return -EINVAL;
1467 }
1468 }
1469
1470 return 0;
1471 }
1472
1473
1474 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1475 unsigned int tcount)
1476 {
1477 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1478 int err = 0, i, j, seedsize;
1479 u8 *seed;
1480 char result[32];
1481
1482 seedsize = crypto_rng_seedsize(tfm);
1483
1484 seed = kmalloc(seedsize, GFP_KERNEL);
1485 if (!seed) {
1486 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1487 "for %s\n", algo);
1488 return -ENOMEM;
1489 }
1490
1491 for (i = 0; i < tcount; i++) {
1492 memset(result, 0, 32);
1493
1494 memcpy(seed, template[i].v, template[i].vlen);
1495 memcpy(seed + template[i].vlen, template[i].key,
1496 template[i].klen);
1497 memcpy(seed + template[i].vlen + template[i].klen,
1498 template[i].dt, template[i].dtlen);
1499
1500 err = crypto_rng_reset(tfm, seed, seedsize);
1501 if (err) {
1502 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1503 "for %s\n", algo);
1504 goto out;
1505 }
1506
1507 for (j = 0; j < template[i].loops; j++) {
1508 err = crypto_rng_get_bytes(tfm, result,
1509 template[i].rlen);
1510 if (err < 0) {
1511 printk(KERN_ERR "alg: cprng: Failed to obtain "
1512 "the correct amount of random data for "
1513 "%s (requested %d)\n", algo,
1514 template[i].rlen);
1515 goto out;
1516 }
1517 }
1518
1519 err = memcmp(result, template[i].result,
1520 template[i].rlen);
1521 if (err) {
1522 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1523 i, algo);
1524 hexdump(result, template[i].rlen);
1525 err = -EINVAL;
1526 goto out;
1527 }
1528 }
1529
1530 out:
1531 kfree(seed);
1532 return err;
1533 }
1534
1535 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1536 u32 type, u32 mask)
1537 {
1538 struct crypto_aead *tfm;
1539 int err = 0;
1540
1541 tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask);
1542 if (IS_ERR(tfm)) {
1543 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1544 "%ld\n", driver, PTR_ERR(tfm));
1545 return PTR_ERR(tfm);
1546 }
1547
1548 if (desc->suite.aead.enc.vecs) {
1549 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1550 desc->suite.aead.enc.count);
1551 if (err)
1552 goto out;
1553 }
1554
1555 if (!err && desc->suite.aead.dec.vecs)
1556 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1557 desc->suite.aead.dec.count);
1558
1559 out:
1560 crypto_free_aead(tfm);
1561 return err;
1562 }
1563
1564 static int alg_test_cipher(const struct alg_test_desc *desc,
1565 const char *driver, u32 type, u32 mask)
1566 {
1567 struct crypto_cipher *tfm;
1568 int err = 0;
1569
1570 tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1571 if (IS_ERR(tfm)) {
1572 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1573 "%s: %ld\n", driver, PTR_ERR(tfm));
1574 return PTR_ERR(tfm);
1575 }
1576
1577 if (desc->suite.cipher.enc.vecs) {
1578 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1579 desc->suite.cipher.enc.count);
1580 if (err)
1581 goto out;
1582 }
1583
1584 if (desc->suite.cipher.dec.vecs)
1585 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1586 desc->suite.cipher.dec.count);
1587
1588 out:
1589 crypto_free_cipher(tfm);
1590 return err;
1591 }
1592
1593 static int alg_test_skcipher(const struct alg_test_desc *desc,
1594 const char *driver, u32 type, u32 mask)
1595 {
1596 struct crypto_skcipher *tfm;
1597 int err = 0;
1598
1599 tfm = crypto_alloc_skcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1600 if (IS_ERR(tfm)) {
1601 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1602 "%s: %ld\n", driver, PTR_ERR(tfm));
1603 return PTR_ERR(tfm);
1604 }
1605
1606 if (desc->suite.cipher.enc.vecs) {
1607 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1608 desc->suite.cipher.enc.count);
1609 if (err)
1610 goto out;
1611 }
1612
1613 if (desc->suite.cipher.dec.vecs)
1614 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1615 desc->suite.cipher.dec.count);
1616
1617 out:
1618 crypto_free_skcipher(tfm);
1619 return err;
1620 }
1621
1622 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1623 u32 type, u32 mask)
1624 {
1625 struct crypto_comp *tfm;
1626 int err;
1627
1628 tfm = crypto_alloc_comp(driver, type, mask);
1629 if (IS_ERR(tfm)) {
1630 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1631 "%ld\n", driver, PTR_ERR(tfm));
1632 return PTR_ERR(tfm);
1633 }
1634
1635 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1636 desc->suite.comp.decomp.vecs,
1637 desc->suite.comp.comp.count,
1638 desc->suite.comp.decomp.count);
1639
1640 crypto_free_comp(tfm);
1641 return err;
1642 }
1643
1644 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1645 u32 type, u32 mask)
1646 {
1647 struct crypto_pcomp *tfm;
1648 int err;
1649
1650 tfm = crypto_alloc_pcomp(driver, type, mask);
1651 if (IS_ERR(tfm)) {
1652 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1653 driver, PTR_ERR(tfm));
1654 return PTR_ERR(tfm);
1655 }
1656
1657 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1658 desc->suite.pcomp.decomp.vecs,
1659 desc->suite.pcomp.comp.count,
1660 desc->suite.pcomp.decomp.count);
1661
1662 crypto_free_pcomp(tfm);
1663 return err;
1664 }
1665
1666 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1667 u32 type, u32 mask)
1668 {
1669 struct crypto_ahash *tfm;
1670 int err;
1671
1672 tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1673 if (IS_ERR(tfm)) {
1674 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1675 "%ld\n", driver, PTR_ERR(tfm));
1676 return PTR_ERR(tfm);
1677 }
1678
1679 err = test_hash(tfm, desc->suite.hash.vecs,
1680 desc->suite.hash.count, true);
1681 if (!err)
1682 err = test_hash(tfm, desc->suite.hash.vecs,
1683 desc->suite.hash.count, false);
1684
1685 crypto_free_ahash(tfm);
1686 return err;
1687 }
1688
1689 static int alg_test_crc32c(const struct alg_test_desc *desc,
1690 const char *driver, u32 type, u32 mask)
1691 {
1692 struct crypto_shash *tfm;
1693 u32 val;
1694 int err;
1695
1696 err = alg_test_hash(desc, driver, type, mask);
1697 if (err)
1698 goto out;
1699
1700 tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1701 if (IS_ERR(tfm)) {
1702 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1703 "%ld\n", driver, PTR_ERR(tfm));
1704 err = PTR_ERR(tfm);
1705 goto out;
1706 }
1707
1708 do {
1709 SHASH_DESC_ON_STACK(shash, tfm);
1710 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1711
1712 shash->tfm = tfm;
1713 shash->flags = 0;
1714
1715 *ctx = le32_to_cpu(420553207);
1716 err = crypto_shash_final(shash, (u8 *)&val);
1717 if (err) {
1718 printk(KERN_ERR "alg: crc32c: Operation failed for "
1719 "%s: %d\n", driver, err);
1720 break;
1721 }
1722
1723 if (val != ~420553207) {
1724 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1725 "%d\n", driver, val);
1726 err = -EINVAL;
1727 }
1728 } while (0);
1729
1730 crypto_free_shash(tfm);
1731
1732 out:
1733 return err;
1734 }
1735
1736 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1737 u32 type, u32 mask)
1738 {
1739 struct crypto_rng *rng;
1740 int err;
1741
1742 rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1743 if (IS_ERR(rng)) {
1744 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1745 "%ld\n", driver, PTR_ERR(rng));
1746 return PTR_ERR(rng);
1747 }
1748
1749 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1750
1751 crypto_free_rng(rng);
1752
1753 return err;
1754 }
1755
1756
1757 static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1758 const char *driver, u32 type, u32 mask)
1759 {
1760 int ret = -EAGAIN;
1761 struct crypto_rng *drng;
1762 struct drbg_test_data test_data;
1763 struct drbg_string addtl, pers, testentropy;
1764 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1765
1766 if (!buf)
1767 return -ENOMEM;
1768
1769 drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1770 if (IS_ERR(drng)) {
1771 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1772 "%s\n", driver);
1773 kzfree(buf);
1774 return -ENOMEM;
1775 }
1776
1777 test_data.testentropy = &testentropy;
1778 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1779 drbg_string_fill(&pers, test->pers, test->perslen);
1780 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1781 if (ret) {
1782 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1783 goto outbuf;
1784 }
1785
1786 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1787 if (pr) {
1788 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1789 ret = crypto_drbg_get_bytes_addtl_test(drng,
1790 buf, test->expectedlen, &addtl, &test_data);
1791 } else {
1792 ret = crypto_drbg_get_bytes_addtl(drng,
1793 buf, test->expectedlen, &addtl);
1794 }
1795 if (ret < 0) {
1796 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1797 "driver %s\n", driver);
1798 goto outbuf;
1799 }
1800
1801 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1802 if (pr) {
1803 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1804 ret = crypto_drbg_get_bytes_addtl_test(drng,
1805 buf, test->expectedlen, &addtl, &test_data);
1806 } else {
1807 ret = crypto_drbg_get_bytes_addtl(drng,
1808 buf, test->expectedlen, &addtl);
1809 }
1810 if (ret < 0) {
1811 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1812 "driver %s\n", driver);
1813 goto outbuf;
1814 }
1815
1816 ret = memcmp(test->expected, buf, test->expectedlen);
1817
1818 outbuf:
1819 crypto_free_rng(drng);
1820 kzfree(buf);
1821 return ret;
1822 }
1823
1824
1825 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1826 u32 type, u32 mask)
1827 {
1828 int err = 0;
1829 int pr = 0;
1830 int i = 0;
1831 struct drbg_testvec *template = desc->suite.drbg.vecs;
1832 unsigned int tcount = desc->suite.drbg.count;
1833
1834 if (0 == memcmp(driver, "drbg_pr_", 8))
1835 pr = 1;
1836
1837 for (i = 0; i < tcount; i++) {
1838 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1839 if (err) {
1840 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1841 i, driver);
1842 err = -EINVAL;
1843 break;
1844 }
1845 }
1846 return err;
1847
1848 }
1849
1850 static int do_test_rsa(struct crypto_akcipher *tfm,
1851 struct akcipher_testvec *vecs)
1852 {
1853 char *xbuf[XBUFSIZE];
1854 struct akcipher_request *req;
1855 void *outbuf_enc = NULL;
1856 void *outbuf_dec = NULL;
1857 struct tcrypt_result result;
1858 unsigned int out_len_max, out_len = 0;
1859 int err = -ENOMEM;
1860 struct scatterlist src, dst, src_tab[2];
1861
1862 if (testmgr_alloc_buf(xbuf))
1863 return err;
1864
1865 req = akcipher_request_alloc(tfm, GFP_KERNEL);
1866 if (!req)
1867 goto free_xbuf;
1868
1869 init_completion(&result.completion);
1870
1871 if (vecs->public_key_vec)
1872 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
1873 vecs->key_len);
1874 else
1875 err = crypto_akcipher_set_priv_key(tfm, vecs->key,
1876 vecs->key_len);
1877 if (err)
1878 goto free_req;
1879
1880 out_len_max = crypto_akcipher_maxsize(tfm);
1881 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
1882 if (!outbuf_enc)
1883 goto free_req;
1884
1885 if (WARN_ON(vecs->m_size > PAGE_SIZE))
1886 goto free_all;
1887
1888 memcpy(xbuf[0], vecs->m, vecs->m_size);
1889
1890 sg_init_table(src_tab, 2);
1891 sg_set_buf(&src_tab[0], xbuf[0], 8);
1892 sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
1893 sg_init_one(&dst, outbuf_enc, out_len_max);
1894 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
1895 out_len_max);
1896 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1897 tcrypt_complete, &result);
1898
1899 /* Run RSA encrypt - c = m^e mod n;*/
1900 err = wait_async_op(&result, crypto_akcipher_encrypt(req));
1901 if (err) {
1902 pr_err("alg: rsa: encrypt test failed. err %d\n", err);
1903 goto free_all;
1904 }
1905 if (req->dst_len != vecs->c_size) {
1906 pr_err("alg: rsa: encrypt test failed. Invalid output len\n");
1907 err = -EINVAL;
1908 goto free_all;
1909 }
1910 /* verify that encrypted message is equal to expected */
1911 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
1912 pr_err("alg: rsa: encrypt test failed. Invalid output\n");
1913 err = -EINVAL;
1914 goto free_all;
1915 }
1916 /* Don't invoke decrypt for vectors with public key */
1917 if (vecs->public_key_vec) {
1918 err = 0;
1919 goto free_all;
1920 }
1921 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
1922 if (!outbuf_dec) {
1923 err = -ENOMEM;
1924 goto free_all;
1925 }
1926
1927 if (WARN_ON(vecs->c_size > PAGE_SIZE))
1928 goto free_all;
1929
1930 memcpy(xbuf[0], vecs->c, vecs->c_size);
1931
1932 sg_init_one(&src, xbuf[0], vecs->c_size);
1933 sg_init_one(&dst, outbuf_dec, out_len_max);
1934 init_completion(&result.completion);
1935 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
1936
1937 /* Run RSA decrypt - m = c^d mod n;*/
1938 err = wait_async_op(&result, crypto_akcipher_decrypt(req));
1939 if (err) {
1940 pr_err("alg: rsa: decrypt test failed. err %d\n", err);
1941 goto free_all;
1942 }
1943 out_len = req->dst_len;
1944 if (out_len != vecs->m_size) {
1945 pr_err("alg: rsa: decrypt test failed. Invalid output len\n");
1946 err = -EINVAL;
1947 goto free_all;
1948 }
1949 /* verify that decrypted message is equal to the original msg */
1950 if (memcmp(vecs->m, outbuf_dec, vecs->m_size)) {
1951 pr_err("alg: rsa: decrypt test failed. Invalid output\n");
1952 err = -EINVAL;
1953 }
1954 free_all:
1955 kfree(outbuf_dec);
1956 kfree(outbuf_enc);
1957 free_req:
1958 akcipher_request_free(req);
1959 free_xbuf:
1960 testmgr_free_buf(xbuf);
1961 return err;
1962 }
1963
1964 static int test_rsa(struct crypto_akcipher *tfm, struct akcipher_testvec *vecs,
1965 unsigned int tcount)
1966 {
1967 int ret, i;
1968
1969 for (i = 0; i < tcount; i++) {
1970 ret = do_test_rsa(tfm, vecs++);
1971 if (ret) {
1972 pr_err("alg: rsa: test failed on vector %d, err=%d\n",
1973 i + 1, ret);
1974 return ret;
1975 }
1976 }
1977 return 0;
1978 }
1979
1980 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
1981 struct akcipher_testvec *vecs, unsigned int tcount)
1982 {
1983 if (strncmp(alg, "rsa", 3) == 0)
1984 return test_rsa(tfm, vecs, tcount);
1985
1986 return 0;
1987 }
1988
1989 static int alg_test_akcipher(const struct alg_test_desc *desc,
1990 const char *driver, u32 type, u32 mask)
1991 {
1992 struct crypto_akcipher *tfm;
1993 int err = 0;
1994
1995 tfm = crypto_alloc_akcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1996 if (IS_ERR(tfm)) {
1997 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
1998 driver, PTR_ERR(tfm));
1999 return PTR_ERR(tfm);
2000 }
2001 if (desc->suite.akcipher.vecs)
2002 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2003 desc->suite.akcipher.count);
2004
2005 crypto_free_akcipher(tfm);
2006 return err;
2007 }
2008
2009 static int alg_test_null(const struct alg_test_desc *desc,
2010 const char *driver, u32 type, u32 mask)
2011 {
2012 return 0;
2013 }
2014
2015 /* Please keep this list sorted by algorithm name. */
2016 static const struct alg_test_desc alg_test_descs[] = {
2017 {
2018 .alg = "__cbc-cast5-avx",
2019 .test = alg_test_null,
2020 }, {
2021 .alg = "__cbc-cast6-avx",
2022 .test = alg_test_null,
2023 }, {
2024 .alg = "__cbc-serpent-avx",
2025 .test = alg_test_null,
2026 }, {
2027 .alg = "__cbc-serpent-avx2",
2028 .test = alg_test_null,
2029 }, {
2030 .alg = "__cbc-serpent-sse2",
2031 .test = alg_test_null,
2032 }, {
2033 .alg = "__cbc-twofish-avx",
2034 .test = alg_test_null,
2035 }, {
2036 .alg = "__driver-cbc-aes-aesni",
2037 .test = alg_test_null,
2038 .fips_allowed = 1,
2039 }, {
2040 .alg = "__driver-cbc-camellia-aesni",
2041 .test = alg_test_null,
2042 }, {
2043 .alg = "__driver-cbc-camellia-aesni-avx2",
2044 .test = alg_test_null,
2045 }, {
2046 .alg = "__driver-cbc-cast5-avx",
2047 .test = alg_test_null,
2048 }, {
2049 .alg = "__driver-cbc-cast6-avx",
2050 .test = alg_test_null,
2051 }, {
2052 .alg = "__driver-cbc-serpent-avx",
2053 .test = alg_test_null,
2054 }, {
2055 .alg = "__driver-cbc-serpent-avx2",
2056 .test = alg_test_null,
2057 }, {
2058 .alg = "__driver-cbc-serpent-sse2",
2059 .test = alg_test_null,
2060 }, {
2061 .alg = "__driver-cbc-twofish-avx",
2062 .test = alg_test_null,
2063 }, {
2064 .alg = "__driver-ecb-aes-aesni",
2065 .test = alg_test_null,
2066 .fips_allowed = 1,
2067 }, {
2068 .alg = "__driver-ecb-camellia-aesni",
2069 .test = alg_test_null,
2070 }, {
2071 .alg = "__driver-ecb-camellia-aesni-avx2",
2072 .test = alg_test_null,
2073 }, {
2074 .alg = "__driver-ecb-cast5-avx",
2075 .test = alg_test_null,
2076 }, {
2077 .alg = "__driver-ecb-cast6-avx",
2078 .test = alg_test_null,
2079 }, {
2080 .alg = "__driver-ecb-serpent-avx",
2081 .test = alg_test_null,
2082 }, {
2083 .alg = "__driver-ecb-serpent-avx2",
2084 .test = alg_test_null,
2085 }, {
2086 .alg = "__driver-ecb-serpent-sse2",
2087 .test = alg_test_null,
2088 }, {
2089 .alg = "__driver-ecb-twofish-avx",
2090 .test = alg_test_null,
2091 }, {
2092 .alg = "__driver-gcm-aes-aesni",
2093 .test = alg_test_null,
2094 .fips_allowed = 1,
2095 }, {
2096 .alg = "__ghash-pclmulqdqni",
2097 .test = alg_test_null,
2098 .fips_allowed = 1,
2099 }, {
2100 .alg = "ansi_cprng",
2101 .test = alg_test_cprng,
2102 .fips_allowed = 1,
2103 .suite = {
2104 .cprng = {
2105 .vecs = ansi_cprng_aes_tv_template,
2106 .count = ANSI_CPRNG_AES_TEST_VECTORS
2107 }
2108 }
2109 }, {
2110 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2111 .test = alg_test_aead,
2112 .suite = {
2113 .aead = {
2114 .enc = {
2115 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
2116 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
2117 },
2118 .dec = {
2119 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
2120 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
2121 }
2122 }
2123 }
2124 }, {
2125 .alg = "authenc(hmac(sha1),cbc(aes))",
2126 .test = alg_test_aead,
2127 .suite = {
2128 .aead = {
2129 .enc = {
2130 .vecs =
2131 hmac_sha1_aes_cbc_enc_tv_temp,
2132 .count =
2133 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
2134 }
2135 }
2136 }
2137 }, {
2138 .alg = "authenc(hmac(sha1),cbc(des))",
2139 .test = alg_test_aead,
2140 .suite = {
2141 .aead = {
2142 .enc = {
2143 .vecs =
2144 hmac_sha1_des_cbc_enc_tv_temp,
2145 .count =
2146 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
2147 }
2148 }
2149 }
2150 }, {
2151 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2152 .test = alg_test_aead,
2153 .suite = {
2154 .aead = {
2155 .enc = {
2156 .vecs =
2157 hmac_sha1_des3_ede_cbc_enc_tv_temp,
2158 .count =
2159 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
2160 }
2161 }
2162 }
2163 }, {
2164 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2165 .test = alg_test_aead,
2166 .suite = {
2167 .aead = {
2168 .enc = {
2169 .vecs =
2170 hmac_sha1_ecb_cipher_null_enc_tv_temp,
2171 .count =
2172 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
2173 },
2174 .dec = {
2175 .vecs =
2176 hmac_sha1_ecb_cipher_null_dec_tv_temp,
2177 .count =
2178 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2179 }
2180 }
2181 }
2182 }, {
2183 .alg = "authenc(hmac(sha224),cbc(des))",
2184 .test = alg_test_aead,
2185 .suite = {
2186 .aead = {
2187 .enc = {
2188 .vecs =
2189 hmac_sha224_des_cbc_enc_tv_temp,
2190 .count =
2191 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2192 }
2193 }
2194 }
2195 }, {
2196 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2197 .test = alg_test_aead,
2198 .suite = {
2199 .aead = {
2200 .enc = {
2201 .vecs =
2202 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2203 .count =
2204 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
2205 }
2206 }
2207 }
2208 }, {
2209 .alg = "authenc(hmac(sha256),cbc(aes))",
2210 .test = alg_test_aead,
2211 .suite = {
2212 .aead = {
2213 .enc = {
2214 .vecs =
2215 hmac_sha256_aes_cbc_enc_tv_temp,
2216 .count =
2217 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2218 }
2219 }
2220 }
2221 }, {
2222 .alg = "authenc(hmac(sha256),cbc(des))",
2223 .test = alg_test_aead,
2224 .suite = {
2225 .aead = {
2226 .enc = {
2227 .vecs =
2228 hmac_sha256_des_cbc_enc_tv_temp,
2229 .count =
2230 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2231 }
2232 }
2233 }
2234 }, {
2235 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2236 .test = alg_test_aead,
2237 .suite = {
2238 .aead = {
2239 .enc = {
2240 .vecs =
2241 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2242 .count =
2243 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2244 }
2245 }
2246 }
2247 }, {
2248 .alg = "authenc(hmac(sha384),cbc(des))",
2249 .test = alg_test_aead,
2250 .suite = {
2251 .aead = {
2252 .enc = {
2253 .vecs =
2254 hmac_sha384_des_cbc_enc_tv_temp,
2255 .count =
2256 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2257 }
2258 }
2259 }
2260 }, {
2261 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2262 .test = alg_test_aead,
2263 .suite = {
2264 .aead = {
2265 .enc = {
2266 .vecs =
2267 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2268 .count =
2269 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
2270 }
2271 }
2272 }
2273 }, {
2274 .alg = "authenc(hmac(sha512),cbc(aes))",
2275 .test = alg_test_aead,
2276 .suite = {
2277 .aead = {
2278 .enc = {
2279 .vecs =
2280 hmac_sha512_aes_cbc_enc_tv_temp,
2281 .count =
2282 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2283 }
2284 }
2285 }
2286 }, {
2287 .alg = "authenc(hmac(sha512),cbc(des))",
2288 .test = alg_test_aead,
2289 .suite = {
2290 .aead = {
2291 .enc = {
2292 .vecs =
2293 hmac_sha512_des_cbc_enc_tv_temp,
2294 .count =
2295 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2296 }
2297 }
2298 }
2299 }, {
2300 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2301 .test = alg_test_aead,
2302 .suite = {
2303 .aead = {
2304 .enc = {
2305 .vecs =
2306 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2307 .count =
2308 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
2309 }
2310 }
2311 }
2312 }, {
2313 .alg = "cbc(aes)",
2314 .test = alg_test_skcipher,
2315 .fips_allowed = 1,
2316 .suite = {
2317 .cipher = {
2318 .enc = {
2319 .vecs = aes_cbc_enc_tv_template,
2320 .count = AES_CBC_ENC_TEST_VECTORS
2321 },
2322 .dec = {
2323 .vecs = aes_cbc_dec_tv_template,
2324 .count = AES_CBC_DEC_TEST_VECTORS
2325 }
2326 }
2327 }
2328 }, {
2329 .alg = "cbc(anubis)",
2330 .test = alg_test_skcipher,
2331 .suite = {
2332 .cipher = {
2333 .enc = {
2334 .vecs = anubis_cbc_enc_tv_template,
2335 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2336 },
2337 .dec = {
2338 .vecs = anubis_cbc_dec_tv_template,
2339 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2340 }
2341 }
2342 }
2343 }, {
2344 .alg = "cbc(blowfish)",
2345 .test = alg_test_skcipher,
2346 .suite = {
2347 .cipher = {
2348 .enc = {
2349 .vecs = bf_cbc_enc_tv_template,
2350 .count = BF_CBC_ENC_TEST_VECTORS
2351 },
2352 .dec = {
2353 .vecs = bf_cbc_dec_tv_template,
2354 .count = BF_CBC_DEC_TEST_VECTORS
2355 }
2356 }
2357 }
2358 }, {
2359 .alg = "cbc(camellia)",
2360 .test = alg_test_skcipher,
2361 .suite = {
2362 .cipher = {
2363 .enc = {
2364 .vecs = camellia_cbc_enc_tv_template,
2365 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2366 },
2367 .dec = {
2368 .vecs = camellia_cbc_dec_tv_template,
2369 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2370 }
2371 }
2372 }
2373 }, {
2374 .alg = "cbc(cast5)",
2375 .test = alg_test_skcipher,
2376 .suite = {
2377 .cipher = {
2378 .enc = {
2379 .vecs = cast5_cbc_enc_tv_template,
2380 .count = CAST5_CBC_ENC_TEST_VECTORS
2381 },
2382 .dec = {
2383 .vecs = cast5_cbc_dec_tv_template,
2384 .count = CAST5_CBC_DEC_TEST_VECTORS
2385 }
2386 }
2387 }
2388 }, {
2389 .alg = "cbc(cast6)",
2390 .test = alg_test_skcipher,
2391 .suite = {
2392 .cipher = {
2393 .enc = {
2394 .vecs = cast6_cbc_enc_tv_template,
2395 .count = CAST6_CBC_ENC_TEST_VECTORS
2396 },
2397 .dec = {
2398 .vecs = cast6_cbc_dec_tv_template,
2399 .count = CAST6_CBC_DEC_TEST_VECTORS
2400 }
2401 }
2402 }
2403 }, {
2404 .alg = "cbc(des)",
2405 .test = alg_test_skcipher,
2406 .suite = {
2407 .cipher = {
2408 .enc = {
2409 .vecs = des_cbc_enc_tv_template,
2410 .count = DES_CBC_ENC_TEST_VECTORS
2411 },
2412 .dec = {
2413 .vecs = des_cbc_dec_tv_template,
2414 .count = DES_CBC_DEC_TEST_VECTORS
2415 }
2416 }
2417 }
2418 }, {
2419 .alg = "cbc(des3_ede)",
2420 .test = alg_test_skcipher,
2421 .fips_allowed = 1,
2422 .suite = {
2423 .cipher = {
2424 .enc = {
2425 .vecs = des3_ede_cbc_enc_tv_template,
2426 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2427 },
2428 .dec = {
2429 .vecs = des3_ede_cbc_dec_tv_template,
2430 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2431 }
2432 }
2433 }
2434 }, {
2435 .alg = "cbc(serpent)",
2436 .test = alg_test_skcipher,
2437 .suite = {
2438 .cipher = {
2439 .enc = {
2440 .vecs = serpent_cbc_enc_tv_template,
2441 .count = SERPENT_CBC_ENC_TEST_VECTORS
2442 },
2443 .dec = {
2444 .vecs = serpent_cbc_dec_tv_template,
2445 .count = SERPENT_CBC_DEC_TEST_VECTORS
2446 }
2447 }
2448 }
2449 }, {
2450 .alg = "cbc(twofish)",
2451 .test = alg_test_skcipher,
2452 .suite = {
2453 .cipher = {
2454 .enc = {
2455 .vecs = tf_cbc_enc_tv_template,
2456 .count = TF_CBC_ENC_TEST_VECTORS
2457 },
2458 .dec = {
2459 .vecs = tf_cbc_dec_tv_template,
2460 .count = TF_CBC_DEC_TEST_VECTORS
2461 }
2462 }
2463 }
2464 }, {
2465 .alg = "ccm(aes)",
2466 .test = alg_test_aead,
2467 .fips_allowed = 1,
2468 .suite = {
2469 .aead = {
2470 .enc = {
2471 .vecs = aes_ccm_enc_tv_template,
2472 .count = AES_CCM_ENC_TEST_VECTORS
2473 },
2474 .dec = {
2475 .vecs = aes_ccm_dec_tv_template,
2476 .count = AES_CCM_DEC_TEST_VECTORS
2477 }
2478 }
2479 }
2480 }, {
2481 .alg = "chacha20",
2482 .test = alg_test_skcipher,
2483 .suite = {
2484 .cipher = {
2485 .enc = {
2486 .vecs = chacha20_enc_tv_template,
2487 .count = CHACHA20_ENC_TEST_VECTORS
2488 },
2489 .dec = {
2490 .vecs = chacha20_enc_tv_template,
2491 .count = CHACHA20_ENC_TEST_VECTORS
2492 },
2493 }
2494 }
2495 }, {
2496 .alg = "cmac(aes)",
2497 .fips_allowed = 1,
2498 .test = alg_test_hash,
2499 .suite = {
2500 .hash = {
2501 .vecs = aes_cmac128_tv_template,
2502 .count = CMAC_AES_TEST_VECTORS
2503 }
2504 }
2505 }, {
2506 .alg = "cmac(des3_ede)",
2507 .fips_allowed = 1,
2508 .test = alg_test_hash,
2509 .suite = {
2510 .hash = {
2511 .vecs = des3_ede_cmac64_tv_template,
2512 .count = CMAC_DES3_EDE_TEST_VECTORS
2513 }
2514 }
2515 }, {
2516 .alg = "compress_null",
2517 .test = alg_test_null,
2518 }, {
2519 .alg = "crc32",
2520 .test = alg_test_hash,
2521 .suite = {
2522 .hash = {
2523 .vecs = crc32_tv_template,
2524 .count = CRC32_TEST_VECTORS
2525 }
2526 }
2527 }, {
2528 .alg = "crc32c",
2529 .test = alg_test_crc32c,
2530 .fips_allowed = 1,
2531 .suite = {
2532 .hash = {
2533 .vecs = crc32c_tv_template,
2534 .count = CRC32C_TEST_VECTORS
2535 }
2536 }
2537 }, {
2538 .alg = "crct10dif",
2539 .test = alg_test_hash,
2540 .fips_allowed = 1,
2541 .suite = {
2542 .hash = {
2543 .vecs = crct10dif_tv_template,
2544 .count = CRCT10DIF_TEST_VECTORS
2545 }
2546 }
2547 }, {
2548 .alg = "cryptd(__driver-cbc-aes-aesni)",
2549 .test = alg_test_null,
2550 .fips_allowed = 1,
2551 }, {
2552 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2553 .test = alg_test_null,
2554 }, {
2555 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2556 .test = alg_test_null,
2557 }, {
2558 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2559 .test = alg_test_null,
2560 }, {
2561 .alg = "cryptd(__driver-ecb-aes-aesni)",
2562 .test = alg_test_null,
2563 .fips_allowed = 1,
2564 }, {
2565 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2566 .test = alg_test_null,
2567 }, {
2568 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2569 .test = alg_test_null,
2570 }, {
2571 .alg = "cryptd(__driver-ecb-cast5-avx)",
2572 .test = alg_test_null,
2573 }, {
2574 .alg = "cryptd(__driver-ecb-cast6-avx)",
2575 .test = alg_test_null,
2576 }, {
2577 .alg = "cryptd(__driver-ecb-serpent-avx)",
2578 .test = alg_test_null,
2579 }, {
2580 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2581 .test = alg_test_null,
2582 }, {
2583 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2584 .test = alg_test_null,
2585 }, {
2586 .alg = "cryptd(__driver-ecb-twofish-avx)",
2587 .test = alg_test_null,
2588 }, {
2589 .alg = "cryptd(__driver-gcm-aes-aesni)",
2590 .test = alg_test_null,
2591 .fips_allowed = 1,
2592 }, {
2593 .alg = "cryptd(__ghash-pclmulqdqni)",
2594 .test = alg_test_null,
2595 .fips_allowed = 1,
2596 }, {
2597 .alg = "ctr(aes)",
2598 .test = alg_test_skcipher,
2599 .fips_allowed = 1,
2600 .suite = {
2601 .cipher = {
2602 .enc = {
2603 .vecs = aes_ctr_enc_tv_template,
2604 .count = AES_CTR_ENC_TEST_VECTORS
2605 },
2606 .dec = {
2607 .vecs = aes_ctr_dec_tv_template,
2608 .count = AES_CTR_DEC_TEST_VECTORS
2609 }
2610 }
2611 }
2612 }, {
2613 .alg = "ctr(blowfish)",
2614 .test = alg_test_skcipher,
2615 .suite = {
2616 .cipher = {
2617 .enc = {
2618 .vecs = bf_ctr_enc_tv_template,
2619 .count = BF_CTR_ENC_TEST_VECTORS
2620 },
2621 .dec = {
2622 .vecs = bf_ctr_dec_tv_template,
2623 .count = BF_CTR_DEC_TEST_VECTORS
2624 }
2625 }
2626 }
2627 }, {
2628 .alg = "ctr(camellia)",
2629 .test = alg_test_skcipher,
2630 .suite = {
2631 .cipher = {
2632 .enc = {
2633 .vecs = camellia_ctr_enc_tv_template,
2634 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2635 },
2636 .dec = {
2637 .vecs = camellia_ctr_dec_tv_template,
2638 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2639 }
2640 }
2641 }
2642 }, {
2643 .alg = "ctr(cast5)",
2644 .test = alg_test_skcipher,
2645 .suite = {
2646 .cipher = {
2647 .enc = {
2648 .vecs = cast5_ctr_enc_tv_template,
2649 .count = CAST5_CTR_ENC_TEST_VECTORS
2650 },
2651 .dec = {
2652 .vecs = cast5_ctr_dec_tv_template,
2653 .count = CAST5_CTR_DEC_TEST_VECTORS
2654 }
2655 }
2656 }
2657 }, {
2658 .alg = "ctr(cast6)",
2659 .test = alg_test_skcipher,
2660 .suite = {
2661 .cipher = {
2662 .enc = {
2663 .vecs = cast6_ctr_enc_tv_template,
2664 .count = CAST6_CTR_ENC_TEST_VECTORS
2665 },
2666 .dec = {
2667 .vecs = cast6_ctr_dec_tv_template,
2668 .count = CAST6_CTR_DEC_TEST_VECTORS
2669 }
2670 }
2671 }
2672 }, {
2673 .alg = "ctr(des)",
2674 .test = alg_test_skcipher,
2675 .suite = {
2676 .cipher = {
2677 .enc = {
2678 .vecs = des_ctr_enc_tv_template,
2679 .count = DES_CTR_ENC_TEST_VECTORS
2680 },
2681 .dec = {
2682 .vecs = des_ctr_dec_tv_template,
2683 .count = DES_CTR_DEC_TEST_VECTORS
2684 }
2685 }
2686 }
2687 }, {
2688 .alg = "ctr(des3_ede)",
2689 .test = alg_test_skcipher,
2690 .suite = {
2691 .cipher = {
2692 .enc = {
2693 .vecs = des3_ede_ctr_enc_tv_template,
2694 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2695 },
2696 .dec = {
2697 .vecs = des3_ede_ctr_dec_tv_template,
2698 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2699 }
2700 }
2701 }
2702 }, {
2703 .alg = "ctr(serpent)",
2704 .test = alg_test_skcipher,
2705 .suite = {
2706 .cipher = {
2707 .enc = {
2708 .vecs = serpent_ctr_enc_tv_template,
2709 .count = SERPENT_CTR_ENC_TEST_VECTORS
2710 },
2711 .dec = {
2712 .vecs = serpent_ctr_dec_tv_template,
2713 .count = SERPENT_CTR_DEC_TEST_VECTORS
2714 }
2715 }
2716 }
2717 }, {
2718 .alg = "ctr(twofish)",
2719 .test = alg_test_skcipher,
2720 .suite = {
2721 .cipher = {
2722 .enc = {
2723 .vecs = tf_ctr_enc_tv_template,
2724 .count = TF_CTR_ENC_TEST_VECTORS
2725 },
2726 .dec = {
2727 .vecs = tf_ctr_dec_tv_template,
2728 .count = TF_CTR_DEC_TEST_VECTORS
2729 }
2730 }
2731 }
2732 }, {
2733 .alg = "cts(cbc(aes))",
2734 .test = alg_test_skcipher,
2735 .suite = {
2736 .cipher = {
2737 .enc = {
2738 .vecs = cts_mode_enc_tv_template,
2739 .count = CTS_MODE_ENC_TEST_VECTORS
2740 },
2741 .dec = {
2742 .vecs = cts_mode_dec_tv_template,
2743 .count = CTS_MODE_DEC_TEST_VECTORS
2744 }
2745 }
2746 }
2747 }, {
2748 .alg = "deflate",
2749 .test = alg_test_comp,
2750 .fips_allowed = 1,
2751 .suite = {
2752 .comp = {
2753 .comp = {
2754 .vecs = deflate_comp_tv_template,
2755 .count = DEFLATE_COMP_TEST_VECTORS
2756 },
2757 .decomp = {
2758 .vecs = deflate_decomp_tv_template,
2759 .count = DEFLATE_DECOMP_TEST_VECTORS
2760 }
2761 }
2762 }
2763 }, {
2764 .alg = "digest_null",
2765 .test = alg_test_null,
2766 }, {
2767 .alg = "drbg_nopr_ctr_aes128",
2768 .test = alg_test_drbg,
2769 .fips_allowed = 1,
2770 .suite = {
2771 .drbg = {
2772 .vecs = drbg_nopr_ctr_aes128_tv_template,
2773 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2774 }
2775 }
2776 }, {
2777 .alg = "drbg_nopr_ctr_aes192",
2778 .test = alg_test_drbg,
2779 .fips_allowed = 1,
2780 .suite = {
2781 .drbg = {
2782 .vecs = drbg_nopr_ctr_aes192_tv_template,
2783 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2784 }
2785 }
2786 }, {
2787 .alg = "drbg_nopr_ctr_aes256",
2788 .test = alg_test_drbg,
2789 .fips_allowed = 1,
2790 .suite = {
2791 .drbg = {
2792 .vecs = drbg_nopr_ctr_aes256_tv_template,
2793 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2794 }
2795 }
2796 }, {
2797 /*
2798 * There is no need to specifically test the DRBG with every
2799 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2800 */
2801 .alg = "drbg_nopr_hmac_sha1",
2802 .fips_allowed = 1,
2803 .test = alg_test_null,
2804 }, {
2805 .alg = "drbg_nopr_hmac_sha256",
2806 .test = alg_test_drbg,
2807 .fips_allowed = 1,
2808 .suite = {
2809 .drbg = {
2810 .vecs = drbg_nopr_hmac_sha256_tv_template,
2811 .count =
2812 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2813 }
2814 }
2815 }, {
2816 /* covered by drbg_nopr_hmac_sha256 test */
2817 .alg = "drbg_nopr_hmac_sha384",
2818 .fips_allowed = 1,
2819 .test = alg_test_null,
2820 }, {
2821 .alg = "drbg_nopr_hmac_sha512",
2822 .test = alg_test_null,
2823 .fips_allowed = 1,
2824 }, {
2825 .alg = "drbg_nopr_sha1",
2826 .fips_allowed = 1,
2827 .test = alg_test_null,
2828 }, {
2829 .alg = "drbg_nopr_sha256",
2830 .test = alg_test_drbg,
2831 .fips_allowed = 1,
2832 .suite = {
2833 .drbg = {
2834 .vecs = drbg_nopr_sha256_tv_template,
2835 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2836 }
2837 }
2838 }, {
2839 /* covered by drbg_nopr_sha256 test */
2840 .alg = "drbg_nopr_sha384",
2841 .fips_allowed = 1,
2842 .test = alg_test_null,
2843 }, {
2844 .alg = "drbg_nopr_sha512",
2845 .fips_allowed = 1,
2846 .test = alg_test_null,
2847 }, {
2848 .alg = "drbg_pr_ctr_aes128",
2849 .test = alg_test_drbg,
2850 .fips_allowed = 1,
2851 .suite = {
2852 .drbg = {
2853 .vecs = drbg_pr_ctr_aes128_tv_template,
2854 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
2855 }
2856 }
2857 }, {
2858 /* covered by drbg_pr_ctr_aes128 test */
2859 .alg = "drbg_pr_ctr_aes192",
2860 .fips_allowed = 1,
2861 .test = alg_test_null,
2862 }, {
2863 .alg = "drbg_pr_ctr_aes256",
2864 .fips_allowed = 1,
2865 .test = alg_test_null,
2866 }, {
2867 .alg = "drbg_pr_hmac_sha1",
2868 .fips_allowed = 1,
2869 .test = alg_test_null,
2870 }, {
2871 .alg = "drbg_pr_hmac_sha256",
2872 .test = alg_test_drbg,
2873 .fips_allowed = 1,
2874 .suite = {
2875 .drbg = {
2876 .vecs = drbg_pr_hmac_sha256_tv_template,
2877 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
2878 }
2879 }
2880 }, {
2881 /* covered by drbg_pr_hmac_sha256 test */
2882 .alg = "drbg_pr_hmac_sha384",
2883 .fips_allowed = 1,
2884 .test = alg_test_null,
2885 }, {
2886 .alg = "drbg_pr_hmac_sha512",
2887 .test = alg_test_null,
2888 .fips_allowed = 1,
2889 }, {
2890 .alg = "drbg_pr_sha1",
2891 .fips_allowed = 1,
2892 .test = alg_test_null,
2893 }, {
2894 .alg = "drbg_pr_sha256",
2895 .test = alg_test_drbg,
2896 .fips_allowed = 1,
2897 .suite = {
2898 .drbg = {
2899 .vecs = drbg_pr_sha256_tv_template,
2900 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
2901 }
2902 }
2903 }, {
2904 /* covered by drbg_pr_sha256 test */
2905 .alg = "drbg_pr_sha384",
2906 .fips_allowed = 1,
2907 .test = alg_test_null,
2908 }, {
2909 .alg = "drbg_pr_sha512",
2910 .fips_allowed = 1,
2911 .test = alg_test_null,
2912 }, {
2913 .alg = "ecb(__aes-aesni)",
2914 .test = alg_test_null,
2915 .fips_allowed = 1,
2916 }, {
2917 .alg = "ecb(aes)",
2918 .test = alg_test_skcipher,
2919 .fips_allowed = 1,
2920 .suite = {
2921 .cipher = {
2922 .enc = {
2923 .vecs = aes_enc_tv_template,
2924 .count = AES_ENC_TEST_VECTORS
2925 },
2926 .dec = {
2927 .vecs = aes_dec_tv_template,
2928 .count = AES_DEC_TEST_VECTORS
2929 }
2930 }
2931 }
2932 }, {
2933 .alg = "ecb(anubis)",
2934 .test = alg_test_skcipher,
2935 .suite = {
2936 .cipher = {
2937 .enc = {
2938 .vecs = anubis_enc_tv_template,
2939 .count = ANUBIS_ENC_TEST_VECTORS
2940 },
2941 .dec = {
2942 .vecs = anubis_dec_tv_template,
2943 .count = ANUBIS_DEC_TEST_VECTORS
2944 }
2945 }
2946 }
2947 }, {
2948 .alg = "ecb(arc4)",
2949 .test = alg_test_skcipher,
2950 .suite = {
2951 .cipher = {
2952 .enc = {
2953 .vecs = arc4_enc_tv_template,
2954 .count = ARC4_ENC_TEST_VECTORS
2955 },
2956 .dec = {
2957 .vecs = arc4_dec_tv_template,
2958 .count = ARC4_DEC_TEST_VECTORS
2959 }
2960 }
2961 }
2962 }, {
2963 .alg = "ecb(blowfish)",
2964 .test = alg_test_skcipher,
2965 .suite = {
2966 .cipher = {
2967 .enc = {
2968 .vecs = bf_enc_tv_template,
2969 .count = BF_ENC_TEST_VECTORS
2970 },
2971 .dec = {
2972 .vecs = bf_dec_tv_template,
2973 .count = BF_DEC_TEST_VECTORS
2974 }
2975 }
2976 }
2977 }, {
2978 .alg = "ecb(camellia)",
2979 .test = alg_test_skcipher,
2980 .suite = {
2981 .cipher = {
2982 .enc = {
2983 .vecs = camellia_enc_tv_template,
2984 .count = CAMELLIA_ENC_TEST_VECTORS
2985 },
2986 .dec = {
2987 .vecs = camellia_dec_tv_template,
2988 .count = CAMELLIA_DEC_TEST_VECTORS
2989 }
2990 }
2991 }
2992 }, {
2993 .alg = "ecb(cast5)",
2994 .test = alg_test_skcipher,
2995 .suite = {
2996 .cipher = {
2997 .enc = {
2998 .vecs = cast5_enc_tv_template,
2999 .count = CAST5_ENC_TEST_VECTORS
3000 },
3001 .dec = {
3002 .vecs = cast5_dec_tv_template,
3003 .count = CAST5_DEC_TEST_VECTORS
3004 }
3005 }
3006 }
3007 }, {
3008 .alg = "ecb(cast6)",
3009 .test = alg_test_skcipher,
3010 .suite = {
3011 .cipher = {
3012 .enc = {
3013 .vecs = cast6_enc_tv_template,
3014 .count = CAST6_ENC_TEST_VECTORS
3015 },
3016 .dec = {
3017 .vecs = cast6_dec_tv_template,
3018 .count = CAST6_DEC_TEST_VECTORS
3019 }
3020 }
3021 }
3022 }, {
3023 .alg = "ecb(cipher_null)",
3024 .test = alg_test_null,
3025 }, {
3026 .alg = "ecb(des)",
3027 .test = alg_test_skcipher,
3028 .suite = {
3029 .cipher = {
3030 .enc = {
3031 .vecs = des_enc_tv_template,
3032 .count = DES_ENC_TEST_VECTORS
3033 },
3034 .dec = {
3035 .vecs = des_dec_tv_template,
3036 .count = DES_DEC_TEST_VECTORS
3037 }
3038 }
3039 }
3040 }, {
3041 .alg = "ecb(des3_ede)",
3042 .test = alg_test_skcipher,
3043 .fips_allowed = 1,
3044 .suite = {
3045 .cipher = {
3046 .enc = {
3047 .vecs = des3_ede_enc_tv_template,
3048 .count = DES3_EDE_ENC_TEST_VECTORS
3049 },
3050 .dec = {
3051 .vecs = des3_ede_dec_tv_template,
3052 .count = DES3_EDE_DEC_TEST_VECTORS
3053 }
3054 }
3055 }
3056 }, {
3057 .alg = "ecb(fcrypt)",
3058 .test = alg_test_skcipher,
3059 .suite = {
3060 .cipher = {
3061 .enc = {
3062 .vecs = fcrypt_pcbc_enc_tv_template,
3063 .count = 1
3064 },
3065 .dec = {
3066 .vecs = fcrypt_pcbc_dec_tv_template,
3067 .count = 1
3068 }
3069 }
3070 }
3071 }, {
3072 .alg = "ecb(khazad)",
3073 .test = alg_test_skcipher,
3074 .suite = {
3075 .cipher = {
3076 .enc = {
3077 .vecs = khazad_enc_tv_template,
3078 .count = KHAZAD_ENC_TEST_VECTORS
3079 },
3080 .dec = {
3081 .vecs = khazad_dec_tv_template,
3082 .count = KHAZAD_DEC_TEST_VECTORS
3083 }
3084 }
3085 }
3086 }, {
3087 .alg = "ecb(seed)",
3088 .test = alg_test_skcipher,
3089 .suite = {
3090 .cipher = {
3091 .enc = {
3092 .vecs = seed_enc_tv_template,
3093 .count = SEED_ENC_TEST_VECTORS
3094 },
3095 .dec = {
3096 .vecs = seed_dec_tv_template,
3097 .count = SEED_DEC_TEST_VECTORS
3098 }
3099 }
3100 }
3101 }, {
3102 .alg = "ecb(serpent)",
3103 .test = alg_test_skcipher,
3104 .suite = {
3105 .cipher = {
3106 .enc = {
3107 .vecs = serpent_enc_tv_template,
3108 .count = SERPENT_ENC_TEST_VECTORS
3109 },
3110 .dec = {
3111 .vecs = serpent_dec_tv_template,
3112 .count = SERPENT_DEC_TEST_VECTORS
3113 }
3114 }
3115 }
3116 }, {
3117 .alg = "ecb(tea)",
3118 .test = alg_test_skcipher,
3119 .suite = {
3120 .cipher = {
3121 .enc = {
3122 .vecs = tea_enc_tv_template,
3123 .count = TEA_ENC_TEST_VECTORS
3124 },
3125 .dec = {
3126 .vecs = tea_dec_tv_template,
3127 .count = TEA_DEC_TEST_VECTORS
3128 }
3129 }
3130 }
3131 }, {
3132 .alg = "ecb(tnepres)",
3133 .test = alg_test_skcipher,
3134 .suite = {
3135 .cipher = {
3136 .enc = {
3137 .vecs = tnepres_enc_tv_template,
3138 .count = TNEPRES_ENC_TEST_VECTORS
3139 },
3140 .dec = {
3141 .vecs = tnepres_dec_tv_template,
3142 .count = TNEPRES_DEC_TEST_VECTORS
3143 }
3144 }
3145 }
3146 }, {
3147 .alg = "ecb(twofish)",
3148 .test = alg_test_skcipher,
3149 .suite = {
3150 .cipher = {
3151 .enc = {
3152 .vecs = tf_enc_tv_template,
3153 .count = TF_ENC_TEST_VECTORS
3154 },
3155 .dec = {
3156 .vecs = tf_dec_tv_template,
3157 .count = TF_DEC_TEST_VECTORS
3158 }
3159 }
3160 }
3161 }, {
3162 .alg = "ecb(xeta)",
3163 .test = alg_test_skcipher,
3164 .suite = {
3165 .cipher = {
3166 .enc = {
3167 .vecs = xeta_enc_tv_template,
3168 .count = XETA_ENC_TEST_VECTORS
3169 },
3170 .dec = {
3171 .vecs = xeta_dec_tv_template,
3172 .count = XETA_DEC_TEST_VECTORS
3173 }
3174 }
3175 }
3176 }, {
3177 .alg = "ecb(xtea)",
3178 .test = alg_test_skcipher,
3179 .suite = {
3180 .cipher = {
3181 .enc = {
3182 .vecs = xtea_enc_tv_template,
3183 .count = XTEA_ENC_TEST_VECTORS
3184 },
3185 .dec = {
3186 .vecs = xtea_dec_tv_template,
3187 .count = XTEA_DEC_TEST_VECTORS
3188 }
3189 }
3190 }
3191 }, {
3192 .alg = "gcm(aes)",
3193 .test = alg_test_aead,
3194 .fips_allowed = 1,
3195 .suite = {
3196 .aead = {
3197 .enc = {
3198 .vecs = aes_gcm_enc_tv_template,
3199 .count = AES_GCM_ENC_TEST_VECTORS
3200 },
3201 .dec = {
3202 .vecs = aes_gcm_dec_tv_template,
3203 .count = AES_GCM_DEC_TEST_VECTORS
3204 }
3205 }
3206 }
3207 }, {
3208 .alg = "ghash",
3209 .test = alg_test_hash,
3210 .fips_allowed = 1,
3211 .suite = {
3212 .hash = {
3213 .vecs = ghash_tv_template,
3214 .count = GHASH_TEST_VECTORS
3215 }
3216 }
3217 }, {
3218 .alg = "heh(aes)",
3219 .test = alg_test_skcipher,
3220 .suite = {
3221 .cipher = {
3222 .enc = {
3223 .vecs = aes_heh_enc_tv_template,
3224 .count = AES_HEH_ENC_TEST_VECTORS
3225 },
3226 .dec = {
3227 .vecs = aes_heh_dec_tv_template,
3228 .count = AES_HEH_DEC_TEST_VECTORS
3229 }
3230 }
3231 }
3232 }, {
3233 .alg = "hmac(crc32)",
3234 .test = alg_test_hash,
3235 .suite = {
3236 .hash = {
3237 .vecs = bfin_crc_tv_template,
3238 .count = BFIN_CRC_TEST_VECTORS
3239 }
3240 }
3241 }, {
3242 .alg = "hmac(md5)",
3243 .test = alg_test_hash,
3244 .suite = {
3245 .hash = {
3246 .vecs = hmac_md5_tv_template,
3247 .count = HMAC_MD5_TEST_VECTORS
3248 }
3249 }
3250 }, {
3251 .alg = "hmac(rmd128)",
3252 .test = alg_test_hash,
3253 .suite = {
3254 .hash = {
3255 .vecs = hmac_rmd128_tv_template,
3256 .count = HMAC_RMD128_TEST_VECTORS
3257 }
3258 }
3259 }, {
3260 .alg = "hmac(rmd160)",
3261 .test = alg_test_hash,
3262 .suite = {
3263 .hash = {
3264 .vecs = hmac_rmd160_tv_template,
3265 .count = HMAC_RMD160_TEST_VECTORS
3266 }
3267 }
3268 }, {
3269 .alg = "hmac(sha1)",
3270 .test = alg_test_hash,
3271 .fips_allowed = 1,
3272 .suite = {
3273 .hash = {
3274 .vecs = hmac_sha1_tv_template,
3275 .count = HMAC_SHA1_TEST_VECTORS
3276 }
3277 }
3278 }, {
3279 .alg = "hmac(sha224)",
3280 .test = alg_test_hash,
3281 .fips_allowed = 1,
3282 .suite = {
3283 .hash = {
3284 .vecs = hmac_sha224_tv_template,
3285 .count = HMAC_SHA224_TEST_VECTORS
3286 }
3287 }
3288 }, {
3289 .alg = "hmac(sha256)",
3290 .test = alg_test_hash,
3291 .fips_allowed = 1,
3292 .suite = {
3293 .hash = {
3294 .vecs = hmac_sha256_tv_template,
3295 .count = HMAC_SHA256_TEST_VECTORS
3296 }
3297 }
3298 }, {
3299 .alg = "hmac(sha384)",
3300 .test = alg_test_hash,
3301 .fips_allowed = 1,
3302 .suite = {
3303 .hash = {
3304 .vecs = hmac_sha384_tv_template,
3305 .count = HMAC_SHA384_TEST_VECTORS
3306 }
3307 }
3308 }, {
3309 .alg = "hmac(sha512)",
3310 .test = alg_test_hash,
3311 .fips_allowed = 1,
3312 .suite = {
3313 .hash = {
3314 .vecs = hmac_sha512_tv_template,
3315 .count = HMAC_SHA512_TEST_VECTORS
3316 }
3317 }
3318 }, {
3319 .alg = "jitterentropy_rng",
3320 .fips_allowed = 1,
3321 .test = alg_test_null,
3322 }, {
3323 .alg = "kw(aes)",
3324 .test = alg_test_skcipher,
3325 .fips_allowed = 1,
3326 .suite = {
3327 .cipher = {
3328 .enc = {
3329 .vecs = aes_kw_enc_tv_template,
3330 .count = ARRAY_SIZE(aes_kw_enc_tv_template)
3331 },
3332 .dec = {
3333 .vecs = aes_kw_dec_tv_template,
3334 .count = ARRAY_SIZE(aes_kw_dec_tv_template)
3335 }
3336 }
3337 }
3338 }, {
3339 .alg = "lrw(aes)",
3340 .test = alg_test_skcipher,
3341 .suite = {
3342 .cipher = {
3343 .enc = {
3344 .vecs = aes_lrw_enc_tv_template,
3345 .count = AES_LRW_ENC_TEST_VECTORS
3346 },
3347 .dec = {
3348 .vecs = aes_lrw_dec_tv_template,
3349 .count = AES_LRW_DEC_TEST_VECTORS
3350 }
3351 }
3352 }
3353 }, {
3354 .alg = "lrw(camellia)",
3355 .test = alg_test_skcipher,
3356 .suite = {
3357 .cipher = {
3358 .enc = {
3359 .vecs = camellia_lrw_enc_tv_template,
3360 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3361 },
3362 .dec = {
3363 .vecs = camellia_lrw_dec_tv_template,
3364 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3365 }
3366 }
3367 }
3368 }, {
3369 .alg = "lrw(cast6)",
3370 .test = alg_test_skcipher,
3371 .suite = {
3372 .cipher = {
3373 .enc = {
3374 .vecs = cast6_lrw_enc_tv_template,
3375 .count = CAST6_LRW_ENC_TEST_VECTORS
3376 },
3377 .dec = {
3378 .vecs = cast6_lrw_dec_tv_template,
3379 .count = CAST6_LRW_DEC_TEST_VECTORS
3380 }
3381 }
3382 }
3383 }, {
3384 .alg = "lrw(serpent)",
3385 .test = alg_test_skcipher,
3386 .suite = {
3387 .cipher = {
3388 .enc = {
3389 .vecs = serpent_lrw_enc_tv_template,
3390 .count = SERPENT_LRW_ENC_TEST_VECTORS
3391 },
3392 .dec = {
3393 .vecs = serpent_lrw_dec_tv_template,
3394 .count = SERPENT_LRW_DEC_TEST_VECTORS
3395 }
3396 }
3397 }
3398 }, {
3399 .alg = "lrw(twofish)",
3400 .test = alg_test_skcipher,
3401 .suite = {
3402 .cipher = {
3403 .enc = {
3404 .vecs = tf_lrw_enc_tv_template,
3405 .count = TF_LRW_ENC_TEST_VECTORS
3406 },
3407 .dec = {
3408 .vecs = tf_lrw_dec_tv_template,
3409 .count = TF_LRW_DEC_TEST_VECTORS
3410 }
3411 }
3412 }
3413 }, {
3414 .alg = "lz4",
3415 .test = alg_test_comp,
3416 .fips_allowed = 1,
3417 .suite = {
3418 .comp = {
3419 .comp = {
3420 .vecs = lz4_comp_tv_template,
3421 .count = LZ4_COMP_TEST_VECTORS
3422 },
3423 .decomp = {
3424 .vecs = lz4_decomp_tv_template,
3425 .count = LZ4_DECOMP_TEST_VECTORS
3426 }
3427 }
3428 }
3429 }, {
3430 .alg = "lz4hc",
3431 .test = alg_test_comp,
3432 .fips_allowed = 1,
3433 .suite = {
3434 .comp = {
3435 .comp = {
3436 .vecs = lz4hc_comp_tv_template,
3437 .count = LZ4HC_COMP_TEST_VECTORS
3438 },
3439 .decomp = {
3440 .vecs = lz4hc_decomp_tv_template,
3441 .count = LZ4HC_DECOMP_TEST_VECTORS
3442 }
3443 }
3444 }
3445 }, {
3446 .alg = "lzo",
3447 .test = alg_test_comp,
3448 .fips_allowed = 1,
3449 .suite = {
3450 .comp = {
3451 .comp = {
3452 .vecs = lzo_comp_tv_template,
3453 .count = LZO_COMP_TEST_VECTORS
3454 },
3455 .decomp = {
3456 .vecs = lzo_decomp_tv_template,
3457 .count = LZO_DECOMP_TEST_VECTORS
3458 }
3459 }
3460 }
3461 }, {
3462 .alg = "md4",
3463 .test = alg_test_hash,
3464 .suite = {
3465 .hash = {
3466 .vecs = md4_tv_template,
3467 .count = MD4_TEST_VECTORS
3468 }
3469 }
3470 }, {
3471 .alg = "md5",
3472 .test = alg_test_hash,
3473 .suite = {
3474 .hash = {
3475 .vecs = md5_tv_template,
3476 .count = MD5_TEST_VECTORS
3477 }
3478 }
3479 }, {
3480 .alg = "michael_mic",
3481 .test = alg_test_hash,
3482 .suite = {
3483 .hash = {
3484 .vecs = michael_mic_tv_template,
3485 .count = MICHAEL_MIC_TEST_VECTORS
3486 }
3487 }
3488 }, {
3489 .alg = "ofb(aes)",
3490 .test = alg_test_skcipher,
3491 .fips_allowed = 1,
3492 .suite = {
3493 .cipher = {
3494 .enc = {
3495 .vecs = aes_ofb_enc_tv_template,
3496 .count = AES_OFB_ENC_TEST_VECTORS
3497 },
3498 .dec = {
3499 .vecs = aes_ofb_dec_tv_template,
3500 .count = AES_OFB_DEC_TEST_VECTORS
3501 }
3502 }
3503 }
3504 }, {
3505 .alg = "pcbc(fcrypt)",
3506 .test = alg_test_skcipher,
3507 .suite = {
3508 .cipher = {
3509 .enc = {
3510 .vecs = fcrypt_pcbc_enc_tv_template,
3511 .count = FCRYPT_ENC_TEST_VECTORS
3512 },
3513 .dec = {
3514 .vecs = fcrypt_pcbc_dec_tv_template,
3515 .count = FCRYPT_DEC_TEST_VECTORS
3516 }
3517 }
3518 }
3519 }, {
3520 .alg = "poly1305",
3521 .test = alg_test_hash,
3522 .suite = {
3523 .hash = {
3524 .vecs = poly1305_tv_template,
3525 .count = POLY1305_TEST_VECTORS
3526 }
3527 }
3528 }, {
3529 .alg = "rfc3686(ctr(aes))",
3530 .test = alg_test_skcipher,
3531 .fips_allowed = 1,
3532 .suite = {
3533 .cipher = {
3534 .enc = {
3535 .vecs = aes_ctr_rfc3686_enc_tv_template,
3536 .count = AES_CTR_3686_ENC_TEST_VECTORS
3537 },
3538 .dec = {
3539 .vecs = aes_ctr_rfc3686_dec_tv_template,
3540 .count = AES_CTR_3686_DEC_TEST_VECTORS
3541 }
3542 }
3543 }
3544 }, {
3545 .alg = "rfc4106(gcm(aes))",
3546 .test = alg_test_aead,
3547 .fips_allowed = 1,
3548 .suite = {
3549 .aead = {
3550 .enc = {
3551 .vecs = aes_gcm_rfc4106_enc_tv_template,
3552 .count = AES_GCM_4106_ENC_TEST_VECTORS
3553 },
3554 .dec = {
3555 .vecs = aes_gcm_rfc4106_dec_tv_template,
3556 .count = AES_GCM_4106_DEC_TEST_VECTORS
3557 }
3558 }
3559 }
3560 }, {
3561 .alg = "rfc4309(ccm(aes))",
3562 .test = alg_test_aead,
3563 .fips_allowed = 1,
3564 .suite = {
3565 .aead = {
3566 .enc = {
3567 .vecs = aes_ccm_rfc4309_enc_tv_template,
3568 .count = AES_CCM_4309_ENC_TEST_VECTORS
3569 },
3570 .dec = {
3571 .vecs = aes_ccm_rfc4309_dec_tv_template,
3572 .count = AES_CCM_4309_DEC_TEST_VECTORS
3573 }
3574 }
3575 }
3576 }, {
3577 .alg = "rfc4543(gcm(aes))",
3578 .test = alg_test_aead,
3579 .suite = {
3580 .aead = {
3581 .enc = {
3582 .vecs = aes_gcm_rfc4543_enc_tv_template,
3583 .count = AES_GCM_4543_ENC_TEST_VECTORS
3584 },
3585 .dec = {
3586 .vecs = aes_gcm_rfc4543_dec_tv_template,
3587 .count = AES_GCM_4543_DEC_TEST_VECTORS
3588 },
3589 }
3590 }
3591 }, {
3592 .alg = "rfc7539(chacha20,poly1305)",
3593 .test = alg_test_aead,
3594 .suite = {
3595 .aead = {
3596 .enc = {
3597 .vecs = rfc7539_enc_tv_template,
3598 .count = RFC7539_ENC_TEST_VECTORS
3599 },
3600 .dec = {
3601 .vecs = rfc7539_dec_tv_template,
3602 .count = RFC7539_DEC_TEST_VECTORS
3603 },
3604 }
3605 }
3606 }, {
3607 .alg = "rfc7539esp(chacha20,poly1305)",
3608 .test = alg_test_aead,
3609 .suite = {
3610 .aead = {
3611 .enc = {
3612 .vecs = rfc7539esp_enc_tv_template,
3613 .count = RFC7539ESP_ENC_TEST_VECTORS
3614 },
3615 .dec = {
3616 .vecs = rfc7539esp_dec_tv_template,
3617 .count = RFC7539ESP_DEC_TEST_VECTORS
3618 },
3619 }
3620 }
3621 }, {
3622 .alg = "rmd128",
3623 .test = alg_test_hash,
3624 .suite = {
3625 .hash = {
3626 .vecs = rmd128_tv_template,
3627 .count = RMD128_TEST_VECTORS
3628 }
3629 }
3630 }, {
3631 .alg = "rmd160",
3632 .test = alg_test_hash,
3633 .suite = {
3634 .hash = {
3635 .vecs = rmd160_tv_template,
3636 .count = RMD160_TEST_VECTORS
3637 }
3638 }
3639 }, {
3640 .alg = "rmd256",
3641 .test = alg_test_hash,
3642 .suite = {
3643 .hash = {
3644 .vecs = rmd256_tv_template,
3645 .count = RMD256_TEST_VECTORS
3646 }
3647 }
3648 }, {
3649 .alg = "rmd320",
3650 .test = alg_test_hash,
3651 .suite = {
3652 .hash = {
3653 .vecs = rmd320_tv_template,
3654 .count = RMD320_TEST_VECTORS
3655 }
3656 }
3657 }, {
3658 .alg = "rsa",
3659 .test = alg_test_akcipher,
3660 .fips_allowed = 1,
3661 .suite = {
3662 .akcipher = {
3663 .vecs = rsa_tv_template,
3664 .count = RSA_TEST_VECTORS
3665 }
3666 }
3667 }, {
3668 .alg = "salsa20",
3669 .test = alg_test_skcipher,
3670 .suite = {
3671 .cipher = {
3672 .enc = {
3673 .vecs = salsa20_stream_enc_tv_template,
3674 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3675 }
3676 }
3677 }
3678 }, {
3679 .alg = "sha1",
3680 .test = alg_test_hash,
3681 .fips_allowed = 1,
3682 .suite = {
3683 .hash = {
3684 .vecs = sha1_tv_template,
3685 .count = SHA1_TEST_VECTORS
3686 }
3687 }
3688 }, {
3689 .alg = "sha224",
3690 .test = alg_test_hash,
3691 .fips_allowed = 1,
3692 .suite = {
3693 .hash = {
3694 .vecs = sha224_tv_template,
3695 .count = SHA224_TEST_VECTORS
3696 }
3697 }
3698 }, {
3699 .alg = "sha256",
3700 .test = alg_test_hash,
3701 .fips_allowed = 1,
3702 .suite = {
3703 .hash = {
3704 .vecs = sha256_tv_template,
3705 .count = SHA256_TEST_VECTORS
3706 }
3707 }
3708 }, {
3709 .alg = "sha384",
3710 .test = alg_test_hash,
3711 .fips_allowed = 1,
3712 .suite = {
3713 .hash = {
3714 .vecs = sha384_tv_template,
3715 .count = SHA384_TEST_VECTORS
3716 }
3717 }
3718 }, {
3719 .alg = "sha512",
3720 .test = alg_test_hash,
3721 .fips_allowed = 1,
3722 .suite = {
3723 .hash = {
3724 .vecs = sha512_tv_template,
3725 .count = SHA512_TEST_VECTORS
3726 }
3727 }
3728 }, {
3729 .alg = "tgr128",
3730 .test = alg_test_hash,
3731 .suite = {
3732 .hash = {
3733 .vecs = tgr128_tv_template,
3734 .count = TGR128_TEST_VECTORS
3735 }
3736 }
3737 }, {
3738 .alg = "tgr160",
3739 .test = alg_test_hash,
3740 .suite = {
3741 .hash = {
3742 .vecs = tgr160_tv_template,
3743 .count = TGR160_TEST_VECTORS
3744 }
3745 }
3746 }, {
3747 .alg = "tgr192",
3748 .test = alg_test_hash,
3749 .suite = {
3750 .hash = {
3751 .vecs = tgr192_tv_template,
3752 .count = TGR192_TEST_VECTORS
3753 }
3754 }
3755 }, {
3756 .alg = "vmac(aes)",
3757 .test = alg_test_hash,
3758 .suite = {
3759 .hash = {
3760 .vecs = aes_vmac128_tv_template,
3761 .count = VMAC_AES_TEST_VECTORS
3762 }
3763 }
3764 }, {
3765 .alg = "wp256",
3766 .test = alg_test_hash,
3767 .suite = {
3768 .hash = {
3769 .vecs = wp256_tv_template,
3770 .count = WP256_TEST_VECTORS
3771 }
3772 }
3773 }, {
3774 .alg = "wp384",
3775 .test = alg_test_hash,
3776 .suite = {
3777 .hash = {
3778 .vecs = wp384_tv_template,
3779 .count = WP384_TEST_VECTORS
3780 }
3781 }
3782 }, {
3783 .alg = "wp512",
3784 .test = alg_test_hash,
3785 .suite = {
3786 .hash = {
3787 .vecs = wp512_tv_template,
3788 .count = WP512_TEST_VECTORS
3789 }
3790 }
3791 }, {
3792 .alg = "xcbc(aes)",
3793 .test = alg_test_hash,
3794 .suite = {
3795 .hash = {
3796 .vecs = aes_xcbc128_tv_template,
3797 .count = XCBC_AES_TEST_VECTORS
3798 }
3799 }
3800 }, {
3801 .alg = "xts(aes)",
3802 .test = alg_test_skcipher,
3803 .fips_allowed = 1,
3804 .suite = {
3805 .cipher = {
3806 .enc = {
3807 .vecs = aes_xts_enc_tv_template,
3808 .count = AES_XTS_ENC_TEST_VECTORS
3809 },
3810 .dec = {
3811 .vecs = aes_xts_dec_tv_template,
3812 .count = AES_XTS_DEC_TEST_VECTORS
3813 }
3814 }
3815 }
3816 }, {
3817 .alg = "xts(camellia)",
3818 .test = alg_test_skcipher,
3819 .suite = {
3820 .cipher = {
3821 .enc = {
3822 .vecs = camellia_xts_enc_tv_template,
3823 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3824 },
3825 .dec = {
3826 .vecs = camellia_xts_dec_tv_template,
3827 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3828 }
3829 }
3830 }
3831 }, {
3832 .alg = "xts(cast6)",
3833 .test = alg_test_skcipher,
3834 .suite = {
3835 .cipher = {
3836 .enc = {
3837 .vecs = cast6_xts_enc_tv_template,
3838 .count = CAST6_XTS_ENC_TEST_VECTORS
3839 },
3840 .dec = {
3841 .vecs = cast6_xts_dec_tv_template,
3842 .count = CAST6_XTS_DEC_TEST_VECTORS
3843 }
3844 }
3845 }
3846 }, {
3847 .alg = "xts(serpent)",
3848 .test = alg_test_skcipher,
3849 .suite = {
3850 .cipher = {
3851 .enc = {
3852 .vecs = serpent_xts_enc_tv_template,
3853 .count = SERPENT_XTS_ENC_TEST_VECTORS
3854 },
3855 .dec = {
3856 .vecs = serpent_xts_dec_tv_template,
3857 .count = SERPENT_XTS_DEC_TEST_VECTORS
3858 }
3859 }
3860 }
3861 }, {
3862 .alg = "xts(twofish)",
3863 .test = alg_test_skcipher,
3864 .suite = {
3865 .cipher = {
3866 .enc = {
3867 .vecs = tf_xts_enc_tv_template,
3868 .count = TF_XTS_ENC_TEST_VECTORS
3869 },
3870 .dec = {
3871 .vecs = tf_xts_dec_tv_template,
3872 .count = TF_XTS_DEC_TEST_VECTORS
3873 }
3874 }
3875 }
3876 }, {
3877 .alg = "zlib",
3878 .test = alg_test_pcomp,
3879 .fips_allowed = 1,
3880 .suite = {
3881 .pcomp = {
3882 .comp = {
3883 .vecs = zlib_comp_tv_template,
3884 .count = ZLIB_COMP_TEST_VECTORS
3885 },
3886 .decomp = {
3887 .vecs = zlib_decomp_tv_template,
3888 .count = ZLIB_DECOMP_TEST_VECTORS
3889 }
3890 }
3891 }
3892 }
3893 };
3894
3895 static bool alg_test_descs_checked;
3896
3897 static void alg_test_descs_check_order(void)
3898 {
3899 int i;
3900
3901 /* only check once */
3902 if (alg_test_descs_checked)
3903 return;
3904
3905 alg_test_descs_checked = true;
3906
3907 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3908 int diff = strcmp(alg_test_descs[i - 1].alg,
3909 alg_test_descs[i].alg);
3910
3911 if (WARN_ON(diff > 0)) {
3912 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3913 alg_test_descs[i - 1].alg,
3914 alg_test_descs[i].alg);
3915 }
3916
3917 if (WARN_ON(diff == 0)) {
3918 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3919 alg_test_descs[i].alg);
3920 }
3921 }
3922 }
3923
3924 static int alg_find_test(const char *alg)
3925 {
3926 int start = 0;
3927 int end = ARRAY_SIZE(alg_test_descs);
3928
3929 while (start < end) {
3930 int i = (start + end) / 2;
3931 int diff = strcmp(alg_test_descs[i].alg, alg);
3932
3933 if (diff > 0) {
3934 end = i;
3935 continue;
3936 }
3937
3938 if (diff < 0) {
3939 start = i + 1;
3940 continue;
3941 }
3942
3943 return i;
3944 }
3945
3946 return -1;
3947 }
3948
3949 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3950 {
3951 int i;
3952 int j;
3953 int rc;
3954
3955 alg_test_descs_check_order();
3956
3957 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3958 char nalg[CRYPTO_MAX_ALG_NAME];
3959
3960 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3961 sizeof(nalg))
3962 return -ENAMETOOLONG;
3963
3964 i = alg_find_test(nalg);
3965 if (i < 0)
3966 goto notest;
3967
3968 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3969 goto non_fips_alg;
3970
3971 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3972 goto test_done;
3973 }
3974
3975 i = alg_find_test(alg);
3976 j = alg_find_test(driver);
3977 if (i < 0 && j < 0)
3978 goto notest;
3979
3980 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3981 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3982 goto non_fips_alg;
3983
3984 rc = 0;
3985 if (i >= 0)
3986 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3987 type, mask);
3988 if (j >= 0 && j != i)
3989 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3990 type, mask);
3991
3992 test_done:
3993 if (fips_enabled && rc)
3994 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3995
3996 if (fips_enabled && !rc)
3997 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3998
3999 return rc;
4000
4001 notest:
4002 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
4003 return 0;
4004 non_fips_alg:
4005 return -EINVAL;
4006 }
4007
4008 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
4009
4010 EXPORT_SYMBOL_GPL(alg_test);