blob: 7972d2784b3b56f8e834cfa88e30c2c9399c3bda [file] [log] [blame]
Thomas Gleixner2874c5f2019-05-27 08:55:01 +02001// SPDX-License-Identifier: GPL-2.0-or-later
Herbert Xuef2736f2005-06-22 13:26:03 -07002/*
Linus Torvalds1da177e2005-04-16 15:20:36 -07003 * Quick & dirty crypto testing module.
4 *
5 * This will only exist until we have a better testing mechanism
6 * (e.g. a char device).
7 *
8 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
9 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
Mikko Herranene3a4ea42007-11-26 22:12:07 +080010 * Copyright (c) 2007 Nokia Siemens Networks
Linus Torvalds1da177e2005-04-16 15:20:36 -070011 *
Adrian Hoban69435b92010-11-04 15:02:04 -040012 * Updated RFC4106 AES-GCM testing.
13 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Adrian Hoban <adrian.hoban@intel.com>
15 * Gabriele Paoloni <gabriele.paoloni@intel.com>
16 * Tadeusz Struk (tadeusz.struk@intel.com)
17 * Copyright (c) 2010, Intel Corporation.
Linus Torvalds1da177e2005-04-16 15:20:36 -070018 */
19
Rabin Vincent76512f22017-01-18 14:54:05 +010020#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
21
Herbert Xu1ce5a042015-04-22 15:06:30 +080022#include <crypto/aead.h>
Herbert Xu18e33e62008-07-10 16:01:22 +080023#include <crypto/hash.h>
Herbert Xu7166e582016-06-29 18:03:50 +080024#include <crypto/skcipher.h>
Herbert Xucba83562006-08-13 08:26:09 +100025#include <linux/err.h>
Herbert Xudaf09442015-04-22 13:25:57 +080026#include <linux/fips.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070027#include <linux/init.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090028#include <linux/gfp.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070029#include <linux/module.h>
David Hardeman378f0582005-09-17 17:55:31 +100030#include <linux/scatterlist.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070031#include <linux/string.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070032#include <linux/moduleparam.h>
Harald Welteebfd9bc2005-06-22 13:27:23 -070033#include <linux/jiffies.h>
Herbert Xu6a179442005-06-22 13:29:03 -070034#include <linux/timex.h>
35#include <linux/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070036#include "tcrypt.h"
37
38/*
Herbert Xuf139cfa2008-07-31 12:23:53 +080039 * Need slab memory for testing (size in number of pages).
Linus Torvalds1da177e2005-04-16 15:20:36 -070040 */
Herbert Xuf139cfa2008-07-31 12:23:53 +080041#define TVMEMSIZE 4
Linus Torvalds1da177e2005-04-16 15:20:36 -070042
43/*
Herbert Xuda7f0332008-07-31 17:08:25 +080044* Used by test_cipher_speed()
Linus Torvalds1da177e2005-04-16 15:20:36 -070045*/
46#define ENCRYPT 1
47#define DECRYPT 0
Linus Torvalds1da177e2005-04-16 15:20:36 -070048
Horia Geant?f074f7b2015-08-27 18:38:36 +030049#define MAX_DIGEST_SIZE 64
50
Harald Welteebfd9bc2005-06-22 13:27:23 -070051/*
Luca Clementi263a8df2014-06-25 22:57:42 -070052 * return a string with the driver name
53 */
54#define get_driver_name(tfm_type, tfm) crypto_tfm_alg_driver_name(tfm_type ## _tfm(tfm))
55
56/*
Harald Welteebfd9bc2005-06-22 13:27:23 -070057 * Used by test_cipher_speed()
58 */
Herbert Xu6a179442005-06-22 13:29:03 -070059static unsigned int sec;
Harald Welteebfd9bc2005-06-22 13:27:23 -070060
Steffen Klasserta873a5f2009-06-19 19:46:53 +080061static char *alg = NULL;
62static u32 type;
Herbert Xu7be380f2009-07-14 16:06:54 +080063static u32 mask;
Linus Torvalds1da177e2005-04-16 15:20:36 -070064static int mode;
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +000065static u32 num_mb = 8;
Herbert Xuba974ad2020-08-05 15:57:08 +100066static unsigned int klen;
Herbert Xuf139cfa2008-07-31 12:23:53 +080067static char *tvmem[TVMEMSIZE];
Linus Torvalds1da177e2005-04-16 15:20:36 -070068
Corentin Labbe07d8f182019-11-08 15:42:13 +000069static const char *check[] = {
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +030070 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", "sm3",
Jonathan Lynchcd12fb902007-11-10 20:08:25 +080071 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
72 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
David Howells90831632006-12-16 12:13:14 +110073 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +080074 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
Dave Rodgman45ec9752019-03-07 16:30:44 -080075 "lzo", "lzo-rle", "cts", "sha3-224", "sha3-256", "sha3-384",
76 "sha3-512", "streebog256", "streebog512",
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +030077 NULL
Linus Torvalds1da177e2005-04-16 15:20:36 -070078};
79
Ard Biesheuvelee5bbc92018-12-04 14:13:31 +010080static u32 block_sizes[] = { 16, 64, 256, 1024, 1472, 8192, 0 };
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +000081static u32 aead_sizes[] = { 16, 64, 256, 512, 1024, 2048, 4096, 8192, 0 };
82
83#define XBUFSIZE 8
84#define MAX_IVLEN 32
85
86static int testmgr_alloc_buf(char *buf[XBUFSIZE])
87{
88 int i;
89
90 for (i = 0; i < XBUFSIZE; i++) {
91 buf[i] = (void *)__get_free_page(GFP_KERNEL);
92 if (!buf[i])
93 goto err_free_buf;
94 }
95
96 return 0;
97
98err_free_buf:
99 while (i-- > 0)
100 free_page((unsigned long)buf[i]);
101
102 return -ENOMEM;
103}
104
105static void testmgr_free_buf(char *buf[XBUFSIZE])
106{
107 int i;
108
109 for (i = 0; i < XBUFSIZE; i++)
110 free_page((unsigned long)buf[i]);
111}
112
113static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
114 unsigned int buflen, const void *assoc,
115 unsigned int aad_size)
116{
117 int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
118 int k, rem;
119
120 if (np > XBUFSIZE) {
121 rem = PAGE_SIZE;
122 np = XBUFSIZE;
123 } else {
124 rem = buflen % PAGE_SIZE;
125 }
126
127 sg_init_table(sg, np + 1);
128
129 sg_set_buf(&sg[0], assoc, aad_size);
130
131 if (rem)
132 np--;
133 for (k = 0; k < np; k++)
134 sg_set_buf(&sg[k + 1], xbuf[k], PAGE_SIZE);
135
136 if (rem)
137 sg_set_buf(&sg[k + 1], xbuf[k], rem);
138}
139
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530140static inline int do_one_aead_op(struct aead_request *req, int ret)
141{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100142 struct crypto_wait *wait = req->base.data;
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530143
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100144 return crypto_wait_req(ret, wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530145}
146
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000147struct test_mb_aead_data {
148 struct scatterlist sg[XBUFSIZE];
149 struct scatterlist sgout[XBUFSIZE];
150 struct aead_request *req;
151 struct crypto_wait wait;
152 char *xbuf[XBUFSIZE];
153 char *xoutbuf[XBUFSIZE];
154 char *axbuf[XBUFSIZE];
155};
156
157static int do_mult_aead_op(struct test_mb_aead_data *data, int enc,
Kees Cook4e234ee2018-04-26 19:57:28 -0700158 u32 num_mb, int *rc)
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000159{
Kees Cook4e234ee2018-04-26 19:57:28 -0700160 int i, err = 0;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000161
162 /* Fire up a bunch of concurrent requests */
163 for (i = 0; i < num_mb; i++) {
164 if (enc == ENCRYPT)
165 rc[i] = crypto_aead_encrypt(data[i].req);
166 else
167 rc[i] = crypto_aead_decrypt(data[i].req);
168 }
169
170 /* Wait for all requests to finish */
171 for (i = 0; i < num_mb; i++) {
172 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
173
174 if (rc[i]) {
175 pr_info("concurrent request %d error %d\n", i, rc[i]);
176 err = rc[i];
177 }
178 }
179
180 return err;
181}
182
183static int test_mb_aead_jiffies(struct test_mb_aead_data *data, int enc,
184 int blen, int secs, u32 num_mb)
185{
186 unsigned long start, end;
187 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -0700188 int ret = 0;
189 int *rc;
190
191 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
192 if (!rc)
193 return -ENOMEM;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000194
195 for (start = jiffies, end = start + secs * HZ, bcount = 0;
196 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700197 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000198 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -0700199 goto out;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000200 }
201
Ard Biesheuveled51ffe2020-12-08 15:34:41 +0100202 pr_cont("%d operations in %d seconds (%llu bytes)\n",
203 bcount * num_mb, secs, (u64)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -0700204
205out:
206 kfree(rc);
207 return ret;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000208}
209
210static int test_mb_aead_cycles(struct test_mb_aead_data *data, int enc,
211 int blen, u32 num_mb)
212{
213 unsigned long cycles = 0;
214 int ret = 0;
215 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -0700216 int *rc;
217
218 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
219 if (!rc)
220 return -ENOMEM;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000221
222 /* Warm-up run. */
223 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700224 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000225 if (ret)
226 goto out;
227 }
228
229 /* The real thing. */
230 for (i = 0; i < 8; i++) {
231 cycles_t start, end;
232
233 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -0700234 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000235 end = get_cycles();
236
237 if (ret)
238 goto out;
239
240 cycles += end - start;
241 }
242
Kees Cook4e234ee2018-04-26 19:57:28 -0700243 pr_cont("1 operation in %lu cycles (%d bytes)\n",
244 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000245
Kees Cook4e234ee2018-04-26 19:57:28 -0700246out:
247 kfree(rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000248 return ret;
249}
250
251static void test_mb_aead_speed(const char *algo, int enc, int secs,
252 struct aead_speed_template *template,
253 unsigned int tcount, u8 authsize,
254 unsigned int aad_size, u8 *keysize, u32 num_mb)
255{
256 struct test_mb_aead_data *data;
257 struct crypto_aead *tfm;
258 unsigned int i, j, iv_len;
259 const char *key;
260 const char *e;
261 void *assoc;
262 u32 *b_size;
263 char *iv;
264 int ret;
265
266
267 if (aad_size >= PAGE_SIZE) {
268 pr_err("associate data length (%u) too big\n", aad_size);
269 return;
270 }
271
272 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
273 if (!iv)
274 return;
275
276 if (enc == ENCRYPT)
277 e = "encryption";
278 else
279 e = "decryption";
280
281 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
282 if (!data)
283 goto out_free_iv;
284
285 tfm = crypto_alloc_aead(algo, 0, 0);
286 if (IS_ERR(tfm)) {
287 pr_err("failed to load transform for %s: %ld\n",
288 algo, PTR_ERR(tfm));
289 goto out_free_data;
290 }
291
292 ret = crypto_aead_setauthsize(tfm, authsize);
293
294 for (i = 0; i < num_mb; ++i)
295 if (testmgr_alloc_buf(data[i].xbuf)) {
296 while (i--)
297 testmgr_free_buf(data[i].xbuf);
298 goto out_free_tfm;
299 }
300
301 for (i = 0; i < num_mb; ++i)
302 if (testmgr_alloc_buf(data[i].axbuf)) {
303 while (i--)
304 testmgr_free_buf(data[i].axbuf);
305 goto out_free_xbuf;
306 }
307
308 for (i = 0; i < num_mb; ++i)
309 if (testmgr_alloc_buf(data[i].xoutbuf)) {
310 while (i--)
Colin Ian Kingc6ba4f32018-01-02 15:43:04 +0000311 testmgr_free_buf(data[i].xoutbuf);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000312 goto out_free_axbuf;
313 }
314
315 for (i = 0; i < num_mb; ++i) {
316 data[i].req = aead_request_alloc(tfm, GFP_KERNEL);
317 if (!data[i].req) {
318 pr_err("alg: skcipher: Failed to allocate request for %s\n",
319 algo);
320 while (i--)
321 aead_request_free(data[i].req);
322 goto out_free_xoutbuf;
323 }
324 }
325
326 for (i = 0; i < num_mb; ++i) {
327 crypto_init_wait(&data[i].wait);
328 aead_request_set_callback(data[i].req,
329 CRYPTO_TFM_REQ_MAY_BACKLOG,
330 crypto_req_done, &data[i].wait);
331 }
332
333 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
334 get_driver_name(crypto_aead, tfm), e);
335
336 i = 0;
337 do {
338 b_size = aead_sizes;
339 do {
340 if (*b_size + authsize > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +0000341 pr_err("template (%u) too big for buffer (%lu)\n",
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000342 authsize + *b_size,
343 XBUFSIZE * PAGE_SIZE);
344 goto out;
345 }
346
347 pr_info("test %u (%d bit key, %d byte blocks): ", i,
348 *keysize * 8, *b_size);
349
350 /* Set up tfm global state, i.e. the key */
351
352 memset(tvmem[0], 0xff, PAGE_SIZE);
353 key = tvmem[0];
354 for (j = 0; j < tcount; j++) {
355 if (template[j].klen == *keysize) {
356 key = template[j].key;
357 break;
358 }
359 }
360
361 crypto_aead_clear_flags(tfm, ~0);
362
363 ret = crypto_aead_setkey(tfm, key, *keysize);
364 if (ret) {
365 pr_err("setkey() failed flags=%x\n",
366 crypto_aead_get_flags(tfm));
367 goto out;
368 }
369
370 iv_len = crypto_aead_ivsize(tfm);
371 if (iv_len)
372 memset(iv, 0xff, iv_len);
373
374 /* Now setup per request stuff, i.e. buffers */
375
376 for (j = 0; j < num_mb; ++j) {
377 struct test_mb_aead_data *cur = &data[j];
378
379 assoc = cur->axbuf[0];
380 memset(assoc, 0xff, aad_size);
381
382 sg_init_aead(cur->sg, cur->xbuf,
383 *b_size + (enc ? 0 : authsize),
384 assoc, aad_size);
385
386 sg_init_aead(cur->sgout, cur->xoutbuf,
387 *b_size + (enc ? authsize : 0),
388 assoc, aad_size);
389
390 aead_request_set_ad(cur->req, aad_size);
391
392 if (!enc) {
393
394 aead_request_set_crypt(cur->req,
395 cur->sgout,
396 cur->sg,
397 *b_size, iv);
398 ret = crypto_aead_encrypt(cur->req);
399 ret = do_one_aead_op(cur->req, ret);
400
401 if (ret) {
Randy Dunlap129a4db2020-07-30 19:39:24 -0700402 pr_err("calculating auth failed (%d)\n",
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000403 ret);
404 break;
405 }
406 }
407
408 aead_request_set_crypt(cur->req, cur->sg,
409 cur->sgout, *b_size +
410 (enc ? 0 : authsize),
411 iv);
412
413 }
414
Horia Geantă2af63292018-07-23 17:18:48 +0300415 if (secs) {
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000416 ret = test_mb_aead_jiffies(data, enc, *b_size,
417 secs, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300418 cond_resched();
419 } else {
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000420 ret = test_mb_aead_cycles(data, enc, *b_size,
421 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300422 }
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000423
424 if (ret) {
425 pr_err("%s() failed return code=%d\n", e, ret);
426 break;
427 }
428 b_size++;
429 i++;
430 } while (*b_size);
431 keysize++;
432 } while (*keysize);
433
434out:
435 for (i = 0; i < num_mb; ++i)
436 aead_request_free(data[i].req);
437out_free_xoutbuf:
438 for (i = 0; i < num_mb; ++i)
439 testmgr_free_buf(data[i].xoutbuf);
440out_free_axbuf:
441 for (i = 0; i < num_mb; ++i)
442 testmgr_free_buf(data[i].axbuf);
443out_free_xbuf:
444 for (i = 0; i < num_mb; ++i)
445 testmgr_free_buf(data[i].xbuf);
446out_free_tfm:
447 crypto_free_aead(tfm);
448out_free_data:
449 kfree(data);
450out_free_iv:
451 kfree(iv);
452}
453
Tim Chen53f52d72013-12-11 14:28:47 -0800454static int test_aead_jiffies(struct aead_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700455 int blen, int secs)
Tim Chen53f52d72013-12-11 14:28:47 -0800456{
457 unsigned long start, end;
458 int bcount;
459 int ret;
460
Mark Rustad3e3dc252014-07-25 02:53:38 -0700461 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Tim Chen53f52d72013-12-11 14:28:47 -0800462 time_before(jiffies, end); bcount++) {
463 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530464 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800465 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530466 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800467
468 if (ret)
469 return ret;
470 }
471
Ard Biesheuveled51ffe2020-12-08 15:34:41 +0100472 pr_cont("%d operations in %d seconds (%llu bytes)\n",
473 bcount, secs, (u64)bcount * blen);
Tim Chen53f52d72013-12-11 14:28:47 -0800474 return 0;
475}
476
477static int test_aead_cycles(struct aead_request *req, int enc, int blen)
478{
479 unsigned long cycles = 0;
480 int ret = 0;
481 int i;
482
Tim Chen53f52d72013-12-11 14:28:47 -0800483 /* Warm-up run. */
484 for (i = 0; i < 4; i++) {
485 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530486 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800487 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530488 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800489
490 if (ret)
491 goto out;
492 }
493
494 /* The real thing. */
495 for (i = 0; i < 8; i++) {
496 cycles_t start, end;
497
498 start = get_cycles();
499 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530500 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800501 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530502 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800503 end = get_cycles();
504
505 if (ret)
506 goto out;
507
508 cycles += end - start;
509 }
510
511out:
Tim Chen53f52d72013-12-11 14:28:47 -0800512 if (ret == 0)
513 printk("1 operation in %lu cycles (%d bytes)\n",
514 (cycles + 4) / 8, blen);
515
516 return ret;
517}
518
Mark Rustad3e3dc252014-07-25 02:53:38 -0700519static void test_aead_speed(const char *algo, int enc, unsigned int secs,
Tim Chen53f52d72013-12-11 14:28:47 -0800520 struct aead_speed_template *template,
521 unsigned int tcount, u8 authsize,
522 unsigned int aad_size, u8 *keysize)
523{
524 unsigned int i, j;
525 struct crypto_aead *tfm;
526 int ret = -ENOMEM;
527 const char *key;
528 struct aead_request *req;
529 struct scatterlist *sg;
Tim Chen53f52d72013-12-11 14:28:47 -0800530 struct scatterlist *sgout;
531 const char *e;
532 void *assoc;
Cristian Stoica96692a732015-01-28 13:07:32 +0200533 char *iv;
Tim Chen53f52d72013-12-11 14:28:47 -0800534 char *xbuf[XBUFSIZE];
535 char *xoutbuf[XBUFSIZE];
536 char *axbuf[XBUFSIZE];
537 unsigned int *b_size;
538 unsigned int iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100539 struct crypto_wait wait;
Tim Chen53f52d72013-12-11 14:28:47 -0800540
Cristian Stoica96692a732015-01-28 13:07:32 +0200541 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
542 if (!iv)
543 return;
544
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200545 if (aad_size >= PAGE_SIZE) {
546 pr_err("associate data length (%u) too big\n", aad_size);
Cristian Stoica96692a732015-01-28 13:07:32 +0200547 goto out_noxbuf;
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200548 }
549
Tim Chen53f52d72013-12-11 14:28:47 -0800550 if (enc == ENCRYPT)
551 e = "encryption";
552 else
553 e = "decryption";
554
555 if (testmgr_alloc_buf(xbuf))
556 goto out_noxbuf;
557 if (testmgr_alloc_buf(axbuf))
558 goto out_noaxbuf;
559 if (testmgr_alloc_buf(xoutbuf))
560 goto out_nooutbuf;
561
Herbert Xua3f21852015-05-27 16:03:51 +0800562 sg = kmalloc(sizeof(*sg) * 9 * 2, GFP_KERNEL);
Tim Chen53f52d72013-12-11 14:28:47 -0800563 if (!sg)
564 goto out_nosg;
Herbert Xua3f21852015-05-27 16:03:51 +0800565 sgout = &sg[9];
Tim Chen53f52d72013-12-11 14:28:47 -0800566
Herbert Xu5e4b8c12015-08-13 17:29:06 +0800567 tfm = crypto_alloc_aead(algo, 0, 0);
Tim Chen53f52d72013-12-11 14:28:47 -0800568
569 if (IS_ERR(tfm)) {
570 pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
571 PTR_ERR(tfm));
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200572 goto out_notfm;
Tim Chen53f52d72013-12-11 14:28:47 -0800573 }
574
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100575 crypto_init_wait(&wait);
Luca Clementi263a8df2014-06-25 22:57:42 -0700576 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
577 get_driver_name(crypto_aead, tfm), e);
578
Tim Chen53f52d72013-12-11 14:28:47 -0800579 req = aead_request_alloc(tfm, GFP_KERNEL);
580 if (!req) {
581 pr_err("alg: aead: Failed to allocate request for %s\n",
582 algo);
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200583 goto out_noreq;
Tim Chen53f52d72013-12-11 14:28:47 -0800584 }
585
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530586 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100587 crypto_req_done, &wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530588
Tim Chen53f52d72013-12-11 14:28:47 -0800589 i = 0;
590 do {
591 b_size = aead_sizes;
592 do {
593 assoc = axbuf[0];
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200594 memset(assoc, 0xff, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800595
596 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
597 pr_err("template (%u) too big for tvmem (%lu)\n",
598 *keysize + *b_size,
599 TVMEMSIZE * PAGE_SIZE);
600 goto out;
601 }
602
603 key = tvmem[0];
604 for (j = 0; j < tcount; j++) {
605 if (template[j].klen == *keysize) {
606 key = template[j].key;
607 break;
608 }
609 }
610 ret = crypto_aead_setkey(tfm, key, *keysize);
611 ret = crypto_aead_setauthsize(tfm, authsize);
612
613 iv_len = crypto_aead_ivsize(tfm);
614 if (iv_len)
Cristian Stoica96692a732015-01-28 13:07:32 +0200615 memset(iv, 0xff, iv_len);
Tim Chen53f52d72013-12-11 14:28:47 -0800616
617 crypto_aead_clear_flags(tfm, ~0);
618 printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
619 i, *keysize * 8, *b_size);
620
621
622 memset(tvmem[0], 0xff, PAGE_SIZE);
623
624 if (ret) {
625 pr_err("setkey() failed flags=%x\n",
626 crypto_aead_get_flags(tfm));
627 goto out;
628 }
629
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200630 sg_init_aead(sg, xbuf, *b_size + (enc ? 0 : authsize),
631 assoc, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800632
Herbert Xu31267272015-06-17 14:05:26 +0800633 sg_init_aead(sgout, xoutbuf,
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200634 *b_size + (enc ? authsize : 0), assoc,
635 aad_size);
Herbert Xu31267272015-06-17 14:05:26 +0800636
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +0000637 aead_request_set_ad(req, aad_size);
638
639 if (!enc) {
640
641 /*
642 * For decryption we need a proper auth so
643 * we do the encryption path once with buffers
644 * reversed (input <-> output) to calculate it
645 */
646 aead_request_set_crypt(req, sgout, sg,
647 *b_size, iv);
648 ret = do_one_aead_op(req,
649 crypto_aead_encrypt(req));
650
651 if (ret) {
Randy Dunlap129a4db2020-07-30 19:39:24 -0700652 pr_err("calculating auth failed (%d)\n",
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +0000653 ret);
654 break;
655 }
656 }
657
Robert Baronescu7aacbfc2017-10-10 13:22:00 +0300658 aead_request_set_crypt(req, sg, sgout,
659 *b_size + (enc ? 0 : authsize),
660 iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800661
Horia Geantă2af63292018-07-23 17:18:48 +0300662 if (secs) {
Mark Rustad3e3dc252014-07-25 02:53:38 -0700663 ret = test_aead_jiffies(req, enc, *b_size,
664 secs);
Horia Geantă2af63292018-07-23 17:18:48 +0300665 cond_resched();
666 } else {
Tim Chen53f52d72013-12-11 14:28:47 -0800667 ret = test_aead_cycles(req, enc, *b_size);
Horia Geantă2af63292018-07-23 17:18:48 +0300668 }
Tim Chen53f52d72013-12-11 14:28:47 -0800669
670 if (ret) {
671 pr_err("%s() failed return code=%d\n", e, ret);
672 break;
673 }
674 b_size++;
675 i++;
676 } while (*b_size);
677 keysize++;
678 } while (*keysize);
679
680out:
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200681 aead_request_free(req);
682out_noreq:
Tim Chen53f52d72013-12-11 14:28:47 -0800683 crypto_free_aead(tfm);
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200684out_notfm:
Tim Chen53f52d72013-12-11 14:28:47 -0800685 kfree(sg);
686out_nosg:
687 testmgr_free_buf(xoutbuf);
688out_nooutbuf:
689 testmgr_free_buf(axbuf);
690out_noaxbuf:
691 testmgr_free_buf(xbuf);
692out_noxbuf:
Cristian Stoica96692a732015-01-28 13:07:32 +0200693 kfree(iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800694}
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800695
David S. Millerbeb63da2010-05-19 14:11:21 +1000696static void test_hash_sg_init(struct scatterlist *sg)
697{
698 int i;
699
700 sg_init_table(sg, TVMEMSIZE);
701 for (i = 0; i < TVMEMSIZE; i++) {
702 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
703 memset(tvmem[i], 0xff, PAGE_SIZE);
704 }
705}
706
David S. Millerbeb63da2010-05-19 14:11:21 +1000707static inline int do_one_ahash_op(struct ahash_request *req, int ret)
708{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100709 struct crypto_wait *wait = req->base.data;
David S. Millerbeb63da2010-05-19 14:11:21 +1000710
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100711 return crypto_wait_req(ret, wait);
David S. Millerbeb63da2010-05-19 14:11:21 +1000712}
713
Herbert Xu72259de2016-06-28 20:33:52 +0800714struct test_mb_ahash_data {
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000715 struct scatterlist sg[XBUFSIZE];
Herbert Xu72259de2016-06-28 20:33:52 +0800716 char result[64];
717 struct ahash_request *req;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100718 struct crypto_wait wait;
Herbert Xu72259de2016-06-28 20:33:52 +0800719 char *xbuf[XBUFSIZE];
720};
Megha Dey087bcd22016-06-23 18:40:47 -0700721
Kees Cook4e234ee2018-04-26 19:57:28 -0700722static inline int do_mult_ahash_op(struct test_mb_ahash_data *data, u32 num_mb,
723 int *rc)
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000724{
Kees Cook4e234ee2018-04-26 19:57:28 -0700725 int i, err = 0;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000726
727 /* Fire up a bunch of concurrent requests */
728 for (i = 0; i < num_mb; i++)
729 rc[i] = crypto_ahash_digest(data[i].req);
730
731 /* Wait for all requests to finish */
732 for (i = 0; i < num_mb; i++) {
733 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
734
735 if (rc[i]) {
736 pr_info("concurrent request %d error %d\n", i, rc[i]);
737 err = rc[i];
738 }
739 }
740
741 return err;
742}
743
744static int test_mb_ahash_jiffies(struct test_mb_ahash_data *data, int blen,
745 int secs, u32 num_mb)
746{
747 unsigned long start, end;
748 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -0700749 int ret = 0;
750 int *rc;
751
752 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
753 if (!rc)
754 return -ENOMEM;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000755
756 for (start = jiffies, end = start + secs * HZ, bcount = 0;
757 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700758 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000759 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -0700760 goto out;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000761 }
762
Ard Biesheuveled51ffe2020-12-08 15:34:41 +0100763 pr_cont("%d operations in %d seconds (%llu bytes)\n",
764 bcount * num_mb, secs, (u64)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -0700765
766out:
767 kfree(rc);
768 return ret;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000769}
770
771static int test_mb_ahash_cycles(struct test_mb_ahash_data *data, int blen,
772 u32 num_mb)
773{
774 unsigned long cycles = 0;
775 int ret = 0;
776 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -0700777 int *rc;
778
779 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
780 if (!rc)
781 return -ENOMEM;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000782
783 /* Warm-up run. */
784 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700785 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000786 if (ret)
787 goto out;
788 }
789
790 /* The real thing. */
791 for (i = 0; i < 8; i++) {
792 cycles_t start, end;
793
794 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -0700795 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000796 end = get_cycles();
797
798 if (ret)
799 goto out;
800
801 cycles += end - start;
802 }
803
Kees Cook4e234ee2018-04-26 19:57:28 -0700804 pr_cont("1 operation in %lu cycles (%d bytes)\n",
805 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000806
Kees Cook4e234ee2018-04-26 19:57:28 -0700807out:
808 kfree(rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000809 return ret;
810}
811
812static void test_mb_ahash_speed(const char *algo, unsigned int secs,
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000813 struct hash_speed *speed, u32 num_mb)
Megha Dey087bcd22016-06-23 18:40:47 -0700814{
Herbert Xu72259de2016-06-28 20:33:52 +0800815 struct test_mb_ahash_data *data;
Megha Dey087bcd22016-06-23 18:40:47 -0700816 struct crypto_ahash *tfm;
Herbert Xu72259de2016-06-28 20:33:52 +0800817 unsigned int i, j, k;
818 int ret;
819
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000820 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
Herbert Xu72259de2016-06-28 20:33:52 +0800821 if (!data)
822 return;
Megha Dey087bcd22016-06-23 18:40:47 -0700823
824 tfm = crypto_alloc_ahash(algo, 0, 0);
825 if (IS_ERR(tfm)) {
826 pr_err("failed to load transform for %s: %ld\n",
827 algo, PTR_ERR(tfm));
Herbert Xu72259de2016-06-28 20:33:52 +0800828 goto free_data;
Megha Dey087bcd22016-06-23 18:40:47 -0700829 }
Herbert Xu72259de2016-06-28 20:33:52 +0800830
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000831 for (i = 0; i < num_mb; ++i) {
Herbert Xu72259de2016-06-28 20:33:52 +0800832 if (testmgr_alloc_buf(data[i].xbuf))
833 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700834
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100835 crypto_init_wait(&data[i].wait);
Megha Dey087bcd22016-06-23 18:40:47 -0700836
Herbert Xu72259de2016-06-28 20:33:52 +0800837 data[i].req = ahash_request_alloc(tfm, GFP_KERNEL);
838 if (!data[i].req) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200839 pr_err("alg: hash: Failed to allocate request for %s\n",
840 algo);
Herbert Xu72259de2016-06-28 20:33:52 +0800841 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700842 }
Megha Dey087bcd22016-06-23 18:40:47 -0700843
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100844 ahash_request_set_callback(data[i].req, 0, crypto_req_done,
845 &data[i].wait);
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000846
847 sg_init_table(data[i].sg, XBUFSIZE);
848 for (j = 0; j < XBUFSIZE; j++) {
849 sg_set_buf(data[i].sg + j, data[i].xbuf[j], PAGE_SIZE);
850 memset(data[i].xbuf[j], 0xff, PAGE_SIZE);
851 }
Megha Dey087bcd22016-06-23 18:40:47 -0700852 }
853
Herbert Xu72259de2016-06-28 20:33:52 +0800854 pr_info("\ntesting speed of multibuffer %s (%s)\n", algo,
855 get_driver_name(crypto_ahash, tfm));
Megha Dey087bcd22016-06-23 18:40:47 -0700856
857 for (i = 0; speed[i].blen != 0; i++) {
Herbert Xu72259de2016-06-28 20:33:52 +0800858 /* For some reason this only tests digests. */
859 if (speed[i].blen != speed[i].plen)
860 continue;
861
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000862 if (speed[i].blen > XBUFSIZE * PAGE_SIZE) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200863 pr_err("template (%u) too big for tvmem (%lu)\n",
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000864 speed[i].blen, XBUFSIZE * PAGE_SIZE);
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200865 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700866 }
867
Herbert Xuba974ad2020-08-05 15:57:08 +1000868 if (klen)
869 crypto_ahash_setkey(tfm, tvmem[0], klen);
Megha Dey087bcd22016-06-23 18:40:47 -0700870
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000871 for (k = 0; k < num_mb; k++)
Herbert Xu72259de2016-06-28 20:33:52 +0800872 ahash_request_set_crypt(data[k].req, data[k].sg,
873 data[k].result, speed[i].blen);
Megha Dey087bcd22016-06-23 18:40:47 -0700874
Herbert Xu72259de2016-06-28 20:33:52 +0800875 pr_info("test%3u "
876 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
Megha Dey087bcd22016-06-23 18:40:47 -0700877 i, speed[i].blen, speed[i].plen,
878 speed[i].blen / speed[i].plen);
879
Horia Geantă2af63292018-07-23 17:18:48 +0300880 if (secs) {
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000881 ret = test_mb_ahash_jiffies(data, speed[i].blen, secs,
882 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300883 cond_resched();
884 } else {
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000885 ret = test_mb_ahash_cycles(data, speed[i].blen, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300886 }
Herbert Xu72259de2016-06-28 20:33:52 +0800887
Herbert Xu72259de2016-06-28 20:33:52 +0800888
889 if (ret) {
890 pr_err("At least one hashing failed ret=%d\n", ret);
891 break;
892 }
Megha Dey087bcd22016-06-23 18:40:47 -0700893 }
Megha Dey087bcd22016-06-23 18:40:47 -0700894
895out:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000896 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800897 ahash_request_free(data[k].req);
898
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000899 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800900 testmgr_free_buf(data[k].xbuf);
901
902 crypto_free_ahash(tfm);
903
904free_data:
905 kfree(data);
Megha Dey087bcd22016-06-23 18:40:47 -0700906}
907
David S. Millerbeb63da2010-05-19 14:11:21 +1000908static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700909 char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000910{
911 unsigned long start, end;
912 int bcount;
913 int ret;
914
Mark Rustad3e3dc252014-07-25 02:53:38 -0700915 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000916 time_before(jiffies, end); bcount++) {
917 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
918 if (ret)
919 return ret;
920 }
921
922 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700923 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000924
925 return 0;
926}
927
928static int test_ahash_jiffies(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700929 int plen, char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000930{
931 unsigned long start, end;
932 int bcount, pcount;
933 int ret;
934
935 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700936 return test_ahash_jiffies_digest(req, blen, out, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000937
Mark Rustad3e3dc252014-07-25 02:53:38 -0700938 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000939 time_before(jiffies, end); bcount++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800940 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000941 if (ret)
942 return ret;
943 for (pcount = 0; pcount < blen; pcount += plen) {
944 ret = do_one_ahash_op(req, crypto_ahash_update(req));
945 if (ret)
946 return ret;
947 }
948 /* we assume there is enough space in 'out' for the result */
949 ret = do_one_ahash_op(req, crypto_ahash_final(req));
950 if (ret)
951 return ret;
952 }
953
954 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700955 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000956
957 return 0;
958}
959
960static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
961 char *out)
962{
963 unsigned long cycles = 0;
964 int ret, i;
965
966 /* Warm-up run. */
967 for (i = 0; i < 4; i++) {
968 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
969 if (ret)
970 goto out;
971 }
972
973 /* The real thing. */
974 for (i = 0; i < 8; i++) {
975 cycles_t start, end;
976
977 start = get_cycles();
978
979 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
980 if (ret)
981 goto out;
982
983 end = get_cycles();
984
985 cycles += end - start;
986 }
987
988out:
989 if (ret)
990 return ret;
991
992 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
993 cycles / 8, cycles / (8 * blen));
994
995 return 0;
996}
997
998static int test_ahash_cycles(struct ahash_request *req, int blen,
999 int plen, char *out)
1000{
1001 unsigned long cycles = 0;
1002 int i, pcount, ret;
1003
1004 if (plen == blen)
1005 return test_ahash_cycles_digest(req, blen, out);
1006
1007 /* Warm-up run. */
1008 for (i = 0; i < 4; i++) {
Herbert Xu43a96072015-04-22 11:02:27 +08001009 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +10001010 if (ret)
1011 goto out;
1012 for (pcount = 0; pcount < blen; pcount += plen) {
1013 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1014 if (ret)
1015 goto out;
1016 }
1017 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1018 if (ret)
1019 goto out;
1020 }
1021
1022 /* The real thing. */
1023 for (i = 0; i < 8; i++) {
1024 cycles_t start, end;
1025
1026 start = get_cycles();
1027
Herbert Xu43a96072015-04-22 11:02:27 +08001028 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +10001029 if (ret)
1030 goto out;
1031 for (pcount = 0; pcount < blen; pcount += plen) {
1032 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1033 if (ret)
1034 goto out;
1035 }
1036 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1037 if (ret)
1038 goto out;
1039
1040 end = get_cycles();
1041
1042 cycles += end - start;
1043 }
1044
1045out:
1046 if (ret)
1047 return ret;
1048
1049 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
1050 cycles / 8, cycles / (8 * blen));
1051
1052 return 0;
1053}
1054
Herbert Xu06605112016-02-01 21:36:49 +08001055static void test_ahash_speed_common(const char *algo, unsigned int secs,
1056 struct hash_speed *speed, unsigned mask)
David S. Millerbeb63da2010-05-19 14:11:21 +10001057{
1058 struct scatterlist sg[TVMEMSIZE];
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001059 struct crypto_wait wait;
David S. Millerbeb63da2010-05-19 14:11:21 +10001060 struct ahash_request *req;
1061 struct crypto_ahash *tfm;
Horia Geant?f074f7b2015-08-27 18:38:36 +03001062 char *output;
David S. Millerbeb63da2010-05-19 14:11:21 +10001063 int i, ret;
1064
Herbert Xu06605112016-02-01 21:36:49 +08001065 tfm = crypto_alloc_ahash(algo, 0, mask);
David S. Millerbeb63da2010-05-19 14:11:21 +10001066 if (IS_ERR(tfm)) {
1067 pr_err("failed to load transform for %s: %ld\n",
1068 algo, PTR_ERR(tfm));
1069 return;
1070 }
1071
Luca Clementi263a8df2014-06-25 22:57:42 -07001072 printk(KERN_INFO "\ntesting speed of async %s (%s)\n", algo,
1073 get_driver_name(crypto_ahash, tfm));
1074
Horia Geant?f074f7b2015-08-27 18:38:36 +03001075 if (crypto_ahash_digestsize(tfm) > MAX_DIGEST_SIZE) {
1076 pr_err("digestsize(%u) > %d\n", crypto_ahash_digestsize(tfm),
1077 MAX_DIGEST_SIZE);
David S. Millerbeb63da2010-05-19 14:11:21 +10001078 goto out;
1079 }
1080
1081 test_hash_sg_init(sg);
1082 req = ahash_request_alloc(tfm, GFP_KERNEL);
1083 if (!req) {
1084 pr_err("ahash request allocation failure\n");
1085 goto out;
1086 }
1087
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001088 crypto_init_wait(&wait);
David S. Millerbeb63da2010-05-19 14:11:21 +10001089 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001090 crypto_req_done, &wait);
David S. Millerbeb63da2010-05-19 14:11:21 +10001091
Horia Geant?f074f7b2015-08-27 18:38:36 +03001092 output = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
1093 if (!output)
1094 goto out_nomem;
1095
David S. Millerbeb63da2010-05-19 14:11:21 +10001096 for (i = 0; speed[i].blen != 0; i++) {
1097 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
1098 pr_err("template (%u) too big for tvmem (%lu)\n",
1099 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
1100 break;
1101 }
1102
Herbert Xuba974ad2020-08-05 15:57:08 +10001103 if (klen)
1104 crypto_ahash_setkey(tfm, tvmem[0], klen);
Horia Geantă331351f2018-09-12 16:20:48 +03001105
David S. Millerbeb63da2010-05-19 14:11:21 +10001106 pr_info("test%3u "
1107 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
1108 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
1109
1110 ahash_request_set_crypt(req, sg, output, speed[i].plen);
1111
Horia Geantă2af63292018-07-23 17:18:48 +03001112 if (secs) {
David S. Millerbeb63da2010-05-19 14:11:21 +10001113 ret = test_ahash_jiffies(req, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001114 speed[i].plen, output, secs);
Horia Geantă2af63292018-07-23 17:18:48 +03001115 cond_resched();
1116 } else {
David S. Millerbeb63da2010-05-19 14:11:21 +10001117 ret = test_ahash_cycles(req, speed[i].blen,
1118 speed[i].plen, output);
Horia Geantă2af63292018-07-23 17:18:48 +03001119 }
David S. Millerbeb63da2010-05-19 14:11:21 +10001120
1121 if (ret) {
1122 pr_err("hashing failed ret=%d\n", ret);
1123 break;
1124 }
1125 }
1126
Horia Geant?f074f7b2015-08-27 18:38:36 +03001127 kfree(output);
1128
1129out_nomem:
David S. Millerbeb63da2010-05-19 14:11:21 +10001130 ahash_request_free(req);
1131
1132out:
1133 crypto_free_ahash(tfm);
1134}
1135
Herbert Xu06605112016-02-01 21:36:49 +08001136static void test_ahash_speed(const char *algo, unsigned int secs,
1137 struct hash_speed *speed)
1138{
1139 return test_ahash_speed_common(algo, secs, speed, 0);
1140}
1141
1142static void test_hash_speed(const char *algo, unsigned int secs,
1143 struct hash_speed *speed)
1144{
1145 return test_ahash_speed_common(algo, secs, speed, CRYPTO_ALG_ASYNC);
1146}
1147
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001148struct test_mb_skcipher_data {
1149 struct scatterlist sg[XBUFSIZE];
1150 struct skcipher_request *req;
1151 struct crypto_wait wait;
1152 char *xbuf[XBUFSIZE];
1153};
1154
1155static int do_mult_acipher_op(struct test_mb_skcipher_data *data, int enc,
Kees Cook4e234ee2018-04-26 19:57:28 -07001156 u32 num_mb, int *rc)
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001157{
Kees Cook4e234ee2018-04-26 19:57:28 -07001158 int i, err = 0;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001159
1160 /* Fire up a bunch of concurrent requests */
1161 for (i = 0; i < num_mb; i++) {
1162 if (enc == ENCRYPT)
1163 rc[i] = crypto_skcipher_encrypt(data[i].req);
1164 else
1165 rc[i] = crypto_skcipher_decrypt(data[i].req);
1166 }
1167
1168 /* Wait for all requests to finish */
1169 for (i = 0; i < num_mb; i++) {
1170 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
1171
1172 if (rc[i]) {
1173 pr_info("concurrent request %d error %d\n", i, rc[i]);
1174 err = rc[i];
1175 }
1176 }
1177
1178 return err;
1179}
1180
1181static int test_mb_acipher_jiffies(struct test_mb_skcipher_data *data, int enc,
1182 int blen, int secs, u32 num_mb)
1183{
1184 unsigned long start, end;
1185 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -07001186 int ret = 0;
1187 int *rc;
1188
1189 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
1190 if (!rc)
1191 return -ENOMEM;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001192
1193 for (start = jiffies, end = start + secs * HZ, bcount = 0;
1194 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -07001195 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001196 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -07001197 goto out;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001198 }
1199
Ard Biesheuveled51ffe2020-12-08 15:34:41 +01001200 pr_cont("%d operations in %d seconds (%llu bytes)\n",
1201 bcount * num_mb, secs, (u64)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -07001202
1203out:
1204 kfree(rc);
1205 return ret;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001206}
1207
1208static int test_mb_acipher_cycles(struct test_mb_skcipher_data *data, int enc,
1209 int blen, u32 num_mb)
1210{
1211 unsigned long cycles = 0;
1212 int ret = 0;
1213 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -07001214 int *rc;
1215
1216 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
1217 if (!rc)
1218 return -ENOMEM;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001219
1220 /* Warm-up run. */
1221 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -07001222 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001223 if (ret)
1224 goto out;
1225 }
1226
1227 /* The real thing. */
1228 for (i = 0; i < 8; i++) {
1229 cycles_t start, end;
1230
1231 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -07001232 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001233 end = get_cycles();
1234
1235 if (ret)
1236 goto out;
1237
1238 cycles += end - start;
1239 }
1240
Kees Cook4e234ee2018-04-26 19:57:28 -07001241 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1242 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001243
Kees Cook4e234ee2018-04-26 19:57:28 -07001244out:
1245 kfree(rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001246 return ret;
1247}
1248
1249static void test_mb_skcipher_speed(const char *algo, int enc, int secs,
1250 struct cipher_speed_template *template,
1251 unsigned int tcount, u8 *keysize, u32 num_mb)
1252{
1253 struct test_mb_skcipher_data *data;
1254 struct crypto_skcipher *tfm;
1255 unsigned int i, j, iv_len;
1256 const char *key;
1257 const char *e;
1258 u32 *b_size;
1259 char iv[128];
1260 int ret;
1261
1262 if (enc == ENCRYPT)
1263 e = "encryption";
1264 else
1265 e = "decryption";
1266
1267 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
1268 if (!data)
1269 return;
1270
1271 tfm = crypto_alloc_skcipher(algo, 0, 0);
1272 if (IS_ERR(tfm)) {
1273 pr_err("failed to load transform for %s: %ld\n",
1274 algo, PTR_ERR(tfm));
1275 goto out_free_data;
1276 }
1277
1278 for (i = 0; i < num_mb; ++i)
1279 if (testmgr_alloc_buf(data[i].xbuf)) {
1280 while (i--)
1281 testmgr_free_buf(data[i].xbuf);
1282 goto out_free_tfm;
1283 }
1284
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001285 for (i = 0; i < num_mb; ++i) {
1286 data[i].req = skcipher_request_alloc(tfm, GFP_KERNEL);
1287 if (!data[i].req) {
1288 pr_err("alg: skcipher: Failed to allocate request for %s\n",
1289 algo);
1290 while (i--)
1291 skcipher_request_free(data[i].req);
1292 goto out_free_xbuf;
1293 }
1294 }
1295
1296 for (i = 0; i < num_mb; ++i) {
1297 skcipher_request_set_callback(data[i].req,
1298 CRYPTO_TFM_REQ_MAY_BACKLOG,
1299 crypto_req_done, &data[i].wait);
1300 crypto_init_wait(&data[i].wait);
1301 }
1302
1303 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
1304 get_driver_name(crypto_skcipher, tfm), e);
1305
1306 i = 0;
1307 do {
1308 b_size = block_sizes;
1309 do {
1310 if (*b_size > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +00001311 pr_err("template (%u) too big for buffer (%lu)\n",
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001312 *b_size, XBUFSIZE * PAGE_SIZE);
1313 goto out;
1314 }
1315
1316 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1317 *keysize * 8, *b_size);
1318
1319 /* Set up tfm global state, i.e. the key */
1320
1321 memset(tvmem[0], 0xff, PAGE_SIZE);
1322 key = tvmem[0];
1323 for (j = 0; j < tcount; j++) {
1324 if (template[j].klen == *keysize) {
1325 key = template[j].key;
1326 break;
1327 }
1328 }
1329
1330 crypto_skcipher_clear_flags(tfm, ~0);
1331
1332 ret = crypto_skcipher_setkey(tfm, key, *keysize);
1333 if (ret) {
1334 pr_err("setkey() failed flags=%x\n",
1335 crypto_skcipher_get_flags(tfm));
1336 goto out;
1337 }
1338
1339 iv_len = crypto_skcipher_ivsize(tfm);
1340 if (iv_len)
1341 memset(&iv, 0xff, iv_len);
1342
1343 /* Now setup per request stuff, i.e. buffers */
1344
1345 for (j = 0; j < num_mb; ++j) {
1346 struct test_mb_skcipher_data *cur = &data[j];
1347 unsigned int k = *b_size;
1348 unsigned int pages = DIV_ROUND_UP(k, PAGE_SIZE);
1349 unsigned int p = 0;
1350
1351 sg_init_table(cur->sg, pages);
1352
1353 while (k > PAGE_SIZE) {
1354 sg_set_buf(cur->sg + p, cur->xbuf[p],
1355 PAGE_SIZE);
1356 memset(cur->xbuf[p], 0xff, PAGE_SIZE);
1357 p++;
1358 k -= PAGE_SIZE;
1359 }
1360
1361 sg_set_buf(cur->sg + p, cur->xbuf[p], k);
1362 memset(cur->xbuf[p], 0xff, k);
1363
1364 skcipher_request_set_crypt(cur->req, cur->sg,
1365 cur->sg, *b_size,
1366 iv);
1367 }
1368
Horia Geantă2af63292018-07-23 17:18:48 +03001369 if (secs) {
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001370 ret = test_mb_acipher_jiffies(data, enc,
1371 *b_size, secs,
1372 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +03001373 cond_resched();
1374 } else {
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001375 ret = test_mb_acipher_cycles(data, enc,
1376 *b_size, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +03001377 }
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001378
1379 if (ret) {
1380 pr_err("%s() failed flags=%x\n", e,
1381 crypto_skcipher_get_flags(tfm));
1382 break;
1383 }
1384 b_size++;
1385 i++;
1386 } while (*b_size);
1387 keysize++;
1388 } while (*keysize);
1389
1390out:
1391 for (i = 0; i < num_mb; ++i)
1392 skcipher_request_free(data[i].req);
1393out_free_xbuf:
1394 for (i = 0; i < num_mb; ++i)
1395 testmgr_free_buf(data[i].xbuf);
1396out_free_tfm:
1397 crypto_free_skcipher(tfm);
1398out_free_data:
1399 kfree(data);
1400}
1401
Herbert Xu7166e582016-06-29 18:03:50 +08001402static inline int do_one_acipher_op(struct skcipher_request *req, int ret)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001403{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001404 struct crypto_wait *wait = req->base.data;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001405
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001406 return crypto_wait_req(ret, wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001407}
1408
Herbert Xu7166e582016-06-29 18:03:50 +08001409static int test_acipher_jiffies(struct skcipher_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001410 int blen, int secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001411{
1412 unsigned long start, end;
1413 int bcount;
1414 int ret;
1415
Mark Rustad3e3dc252014-07-25 02:53:38 -07001416 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001417 time_before(jiffies, end); bcount++) {
1418 if (enc)
1419 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001420 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001421 else
1422 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001423 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001424
1425 if (ret)
1426 return ret;
1427 }
1428
Ard Biesheuveled51ffe2020-12-08 15:34:41 +01001429 pr_cont("%d operations in %d seconds (%llu bytes)\n",
1430 bcount, secs, (u64)bcount * blen);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001431 return 0;
1432}
1433
Herbert Xu7166e582016-06-29 18:03:50 +08001434static int test_acipher_cycles(struct skcipher_request *req, int enc,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001435 int blen)
1436{
1437 unsigned long cycles = 0;
1438 int ret = 0;
1439 int i;
1440
1441 /* Warm-up run. */
1442 for (i = 0; i < 4; i++) {
1443 if (enc)
1444 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001445 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001446 else
1447 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001448 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001449
1450 if (ret)
1451 goto out;
1452 }
1453
1454 /* The real thing. */
1455 for (i = 0; i < 8; i++) {
1456 cycles_t start, end;
1457
1458 start = get_cycles();
1459 if (enc)
1460 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001461 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001462 else
1463 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001464 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001465 end = get_cycles();
1466
1467 if (ret)
1468 goto out;
1469
1470 cycles += end - start;
1471 }
1472
1473out:
1474 if (ret == 0)
1475 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1476 (cycles + 4) / 8, blen);
1477
1478 return ret;
1479}
1480
Herbert Xu7166e582016-06-29 18:03:50 +08001481static void test_skcipher_speed(const char *algo, int enc, unsigned int secs,
1482 struct cipher_speed_template *template,
1483 unsigned int tcount, u8 *keysize, bool async)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001484{
Nicolas Royerde1975332012-07-01 19:19:47 +02001485 unsigned int ret, i, j, k, iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001486 struct crypto_wait wait;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001487 const char *key;
1488 char iv[128];
Herbert Xu7166e582016-06-29 18:03:50 +08001489 struct skcipher_request *req;
1490 struct crypto_skcipher *tfm;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001491 const char *e;
1492 u32 *b_size;
1493
1494 if (enc == ENCRYPT)
1495 e = "encryption";
1496 else
1497 e = "decryption";
1498
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001499 crypto_init_wait(&wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001500
Herbert Xu7166e582016-06-29 18:03:50 +08001501 tfm = crypto_alloc_skcipher(algo, 0, async ? 0 : CRYPTO_ALG_ASYNC);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001502
1503 if (IS_ERR(tfm)) {
1504 pr_err("failed to load transform for %s: %ld\n", algo,
1505 PTR_ERR(tfm));
1506 return;
1507 }
1508
Horia Geantă8e3b7fd2020-02-05 12:19:58 +02001509 pr_info("\ntesting speed of %s %s (%s) %s\n", async ? "async" : "sync",
1510 algo, get_driver_name(crypto_skcipher, tfm), e);
Luca Clementi263a8df2014-06-25 22:57:42 -07001511
Herbert Xu7166e582016-06-29 18:03:50 +08001512 req = skcipher_request_alloc(tfm, GFP_KERNEL);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001513 if (!req) {
1514 pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
1515 algo);
1516 goto out;
1517 }
1518
Herbert Xu7166e582016-06-29 18:03:50 +08001519 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001520 crypto_req_done, &wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001521
1522 i = 0;
1523 do {
1524 b_size = block_sizes;
1525
1526 do {
1527 struct scatterlist sg[TVMEMSIZE];
1528
1529 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
1530 pr_err("template (%u) too big for "
1531 "tvmem (%lu)\n", *keysize + *b_size,
1532 TVMEMSIZE * PAGE_SIZE);
1533 goto out_free_req;
1534 }
1535
1536 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1537 *keysize * 8, *b_size);
1538
1539 memset(tvmem[0], 0xff, PAGE_SIZE);
1540
1541 /* set key, plain text and IV */
1542 key = tvmem[0];
1543 for (j = 0; j < tcount; j++) {
1544 if (template[j].klen == *keysize) {
1545 key = template[j].key;
1546 break;
1547 }
1548 }
1549
Herbert Xu7166e582016-06-29 18:03:50 +08001550 crypto_skcipher_clear_flags(tfm, ~0);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001551
Herbert Xu7166e582016-06-29 18:03:50 +08001552 ret = crypto_skcipher_setkey(tfm, key, *keysize);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001553 if (ret) {
1554 pr_err("setkey() failed flags=%x\n",
Herbert Xu7166e582016-06-29 18:03:50 +08001555 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001556 goto out_free_req;
1557 }
1558
Nicolas Royerde1975332012-07-01 19:19:47 +02001559 k = *keysize + *b_size;
Horia Geant?007ee8d2015-03-09 16:14:58 +02001560 sg_init_table(sg, DIV_ROUND_UP(k, PAGE_SIZE));
1561
Nicolas Royerde1975332012-07-01 19:19:47 +02001562 if (k > PAGE_SIZE) {
1563 sg_set_buf(sg, tvmem[0] + *keysize,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001564 PAGE_SIZE - *keysize);
Nicolas Royerde1975332012-07-01 19:19:47 +02001565 k -= PAGE_SIZE;
1566 j = 1;
1567 while (k > PAGE_SIZE) {
1568 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
1569 memset(tvmem[j], 0xff, PAGE_SIZE);
1570 j++;
1571 k -= PAGE_SIZE;
1572 }
1573 sg_set_buf(sg + j, tvmem[j], k);
1574 memset(tvmem[j], 0xff, k);
1575 } else {
1576 sg_set_buf(sg, tvmem[0] + *keysize, *b_size);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001577 }
1578
Herbert Xu7166e582016-06-29 18:03:50 +08001579 iv_len = crypto_skcipher_ivsize(tfm);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001580 if (iv_len)
1581 memset(&iv, 0xff, iv_len);
1582
Herbert Xu7166e582016-06-29 18:03:50 +08001583 skcipher_request_set_crypt(req, sg, sg, *b_size, iv);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001584
Horia Geantă2af63292018-07-23 17:18:48 +03001585 if (secs) {
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001586 ret = test_acipher_jiffies(req, enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001587 *b_size, secs);
Horia Geantă2af63292018-07-23 17:18:48 +03001588 cond_resched();
1589 } else {
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001590 ret = test_acipher_cycles(req, enc,
1591 *b_size);
Horia Geantă2af63292018-07-23 17:18:48 +03001592 }
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001593
1594 if (ret) {
1595 pr_err("%s() failed flags=%x\n", e,
Herbert Xu7166e582016-06-29 18:03:50 +08001596 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001597 break;
1598 }
1599 b_size++;
1600 i++;
1601 } while (*b_size);
1602 keysize++;
1603 } while (*keysize);
1604
1605out_free_req:
Herbert Xu7166e582016-06-29 18:03:50 +08001606 skcipher_request_free(req);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001607out:
Herbert Xu7166e582016-06-29 18:03:50 +08001608 crypto_free_skcipher(tfm);
1609}
1610
1611static void test_acipher_speed(const char *algo, int enc, unsigned int secs,
1612 struct cipher_speed_template *template,
1613 unsigned int tcount, u8 *keysize)
1614{
1615 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1616 true);
1617}
1618
1619static void test_cipher_speed(const char *algo, int enc, unsigned int secs,
1620 struct cipher_speed_template *template,
1621 unsigned int tcount, u8 *keysize)
1622{
1623 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1624 false);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001625}
1626
Herbert Xuef2736f2005-06-22 13:26:03 -07001627static void test_available(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07001628{
Corentin Labbe07d8f182019-11-08 15:42:13 +00001629 const char **name = check;
Herbert Xuef2736f2005-06-22 13:26:03 -07001630
Linus Torvalds1da177e2005-04-16 15:20:36 -07001631 while (*name) {
1632 printk("alg %s ", *name);
Herbert Xu6158efc2007-04-04 17:41:07 +10001633 printk(crypto_has_alg(*name, 0, 0) ?
Herbert Xue4d5b792006-08-26 18:12:40 +10001634 "found\n" : "not found\n");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001635 name++;
Herbert Xuef2736f2005-06-22 13:26:03 -07001636 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07001637}
1638
Herbert Xu01b32322008-07-31 15:41:55 +08001639static inline int tcrypt_test(const char *alg)
1640{
Jarod Wilson4e033a62009-05-27 15:10:21 +10001641 int ret;
1642
Rabin Vincent76512f22017-01-18 14:54:05 +01001643 pr_debug("testing %s\n", alg);
1644
Jarod Wilson4e033a62009-05-27 15:10:21 +10001645 ret = alg_test(alg, alg, 0, 0);
1646 /* non-fips algs return -EINVAL in fips mode */
1647 if (fips_enabled && ret == -EINVAL)
1648 ret = 0;
1649 return ret;
Herbert Xu01b32322008-07-31 15:41:55 +08001650}
1651
Kees Cook4e234ee2018-04-26 19:57:28 -07001652static int do_test(const char *alg, u32 type, u32 mask, int m, u32 num_mb)
Herbert Xu01b32322008-07-31 15:41:55 +08001653{
1654 int i;
Jarod Wilson4e033a62009-05-27 15:10:21 +10001655 int ret = 0;
Herbert Xu01b32322008-07-31 15:41:55 +08001656
1657 switch (m) {
Linus Torvalds1da177e2005-04-16 15:20:36 -07001658 case 0:
Herbert Xu86068132014-12-04 16:43:29 +08001659 if (alg) {
1660 if (!crypto_has_alg(alg, type,
1661 mask ?: CRYPTO_ALG_TYPE_MASK))
1662 ret = -ENOENT;
1663 break;
1664 }
1665
Herbert Xu01b32322008-07-31 15:41:55 +08001666 for (i = 1; i < 200; i++)
Kees Cook4e234ee2018-04-26 19:57:28 -07001667 ret += do_test(NULL, 0, 0, i, num_mb);
Linus Torvalds1da177e2005-04-16 15:20:36 -07001668 break;
1669
1670 case 1:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001671 ret += tcrypt_test("md5");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001672 break;
1673
1674 case 2:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001675 ret += tcrypt_test("sha1");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001676 break;
1677
1678 case 3:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001679 ret += tcrypt_test("ecb(des)");
1680 ret += tcrypt_test("cbc(des)");
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03001681 ret += tcrypt_test("ctr(des)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001682 break;
1683
1684 case 4:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001685 ret += tcrypt_test("ecb(des3_ede)");
1686 ret += tcrypt_test("cbc(des3_ede)");
Jussi Kivilinnae080b172012-10-20 14:53:12 +03001687 ret += tcrypt_test("ctr(des3_ede)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001688 break;
1689
1690 case 5:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001691 ret += tcrypt_test("md4");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001692 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001693
Linus Torvalds1da177e2005-04-16 15:20:36 -07001694 case 6:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001695 ret += tcrypt_test("sha256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001696 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001697
Linus Torvalds1da177e2005-04-16 15:20:36 -07001698 case 7:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001699 ret += tcrypt_test("ecb(blowfish)");
1700 ret += tcrypt_test("cbc(blowfish)");
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03001701 ret += tcrypt_test("ctr(blowfish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001702 break;
1703
1704 case 8:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001705 ret += tcrypt_test("ecb(twofish)");
1706 ret += tcrypt_test("cbc(twofish)");
Jussi Kivilinna573da622011-10-10 23:03:12 +03001707 ret += tcrypt_test("ctr(twofish)");
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001708 ret += tcrypt_test("lrw(twofish)");
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001709 ret += tcrypt_test("xts(twofish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001710 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001711
Linus Torvalds1da177e2005-04-16 15:20:36 -07001712 case 9:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001713 ret += tcrypt_test("ecb(serpent)");
Jussi Kivilinna9d259172011-10-18 00:02:53 +03001714 ret += tcrypt_test("cbc(serpent)");
1715 ret += tcrypt_test("ctr(serpent)");
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001716 ret += tcrypt_test("lrw(serpent)");
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001717 ret += tcrypt_test("xts(serpent)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001718 break;
1719
1720 case 10:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001721 ret += tcrypt_test("ecb(aes)");
1722 ret += tcrypt_test("cbc(aes)");
1723 ret += tcrypt_test("lrw(aes)");
1724 ret += tcrypt_test("xts(aes)");
1725 ret += tcrypt_test("ctr(aes)");
1726 ret += tcrypt_test("rfc3686(ctr(aes))");
Gilad Ben-Yossefdfb89ab2018-09-20 14:18:40 +01001727 ret += tcrypt_test("ofb(aes)");
Dmitry Eremin-Solenikov7da66672018-10-20 02:01:53 +03001728 ret += tcrypt_test("cfb(aes)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001729 break;
1730
1731 case 11:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001732 ret += tcrypt_test("sha384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001733 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001734
Linus Torvalds1da177e2005-04-16 15:20:36 -07001735 case 12:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001736 ret += tcrypt_test("sha512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001737 break;
1738
1739 case 13:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001740 ret += tcrypt_test("deflate");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001741 break;
1742
1743 case 14:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001744 ret += tcrypt_test("ecb(cast5)");
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001745 ret += tcrypt_test("cbc(cast5)");
1746 ret += tcrypt_test("ctr(cast5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001747 break;
1748
1749 case 15:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001750 ret += tcrypt_test("ecb(cast6)");
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001751 ret += tcrypt_test("cbc(cast6)");
1752 ret += tcrypt_test("ctr(cast6)");
1753 ret += tcrypt_test("lrw(cast6)");
1754 ret += tcrypt_test("xts(cast6)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001755 break;
1756
1757 case 16:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001758 ret += tcrypt_test("ecb(arc4)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001759 break;
1760
1761 case 17:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001762 ret += tcrypt_test("michael_mic");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001763 break;
1764
1765 case 18:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001766 ret += tcrypt_test("crc32c");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001767 break;
1768
1769 case 19:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001770 ret += tcrypt_test("ecb(tea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001771 break;
1772
1773 case 20:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001774 ret += tcrypt_test("ecb(xtea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001775 break;
1776
1777 case 21:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001778 ret += tcrypt_test("ecb(khazad)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001779 break;
1780
1781 case 22:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001782 ret += tcrypt_test("wp512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001783 break;
1784
1785 case 23:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001786 ret += tcrypt_test("wp384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001787 break;
1788
1789 case 24:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001790 ret += tcrypt_test("wp256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001791 break;
1792
1793 case 25:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001794 ret += tcrypt_test("ecb(tnepres)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001795 break;
1796
1797 case 26:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001798 ret += tcrypt_test("ecb(anubis)");
1799 ret += tcrypt_test("cbc(anubis)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001800 break;
1801
1802 case 27:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001803 ret += tcrypt_test("tgr192");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001804 break;
1805
1806 case 28:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001807 ret += tcrypt_test("tgr160");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001808 break;
1809
1810 case 29:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001811 ret += tcrypt_test("tgr128");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001812 break;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001813
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001814 case 30:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001815 ret += tcrypt_test("ecb(xeta)");
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001816 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001817
David Howells90831632006-12-16 12:13:14 +11001818 case 31:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001819 ret += tcrypt_test("pcbc(fcrypt)");
David Howells90831632006-12-16 12:13:14 +11001820 break;
1821
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001822 case 32:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001823 ret += tcrypt_test("ecb(camellia)");
1824 ret += tcrypt_test("cbc(camellia)");
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001825 ret += tcrypt_test("ctr(camellia)");
1826 ret += tcrypt_test("lrw(camellia)");
1827 ret += tcrypt_test("xts(camellia)");
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001828 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001829
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001830 case 33:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001831 ret += tcrypt_test("sha224");
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001832 break;
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001833
Tan Swee Heng2407d602007-11-23 19:45:00 +08001834 case 34:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001835 ret += tcrypt_test("salsa20");
Tan Swee Heng2407d602007-11-23 19:45:00 +08001836 break;
1837
Herbert Xu8df213d2007-12-02 14:55:47 +11001838 case 35:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001839 ret += tcrypt_test("gcm(aes)");
Herbert Xu8df213d2007-12-02 14:55:47 +11001840 break;
1841
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001842 case 36:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001843 ret += tcrypt_test("lzo");
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001844 break;
1845
Joy Latten93cc74e2007-12-12 20:24:22 +08001846 case 37:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001847 ret += tcrypt_test("ccm(aes)");
Joy Latten93cc74e2007-12-12 20:24:22 +08001848 break;
1849
Kevin Coffman76cb9522008-03-24 21:26:16 +08001850 case 38:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001851 ret += tcrypt_test("cts(cbc(aes))");
Kevin Coffman76cb9522008-03-24 21:26:16 +08001852 break;
1853
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001854 case 39:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001855 ret += tcrypt_test("rmd128");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001856 break;
1857
1858 case 40:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001859 ret += tcrypt_test("rmd160");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001860 break;
1861
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001862 case 41:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001863 ret += tcrypt_test("rmd256");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001864 break;
1865
1866 case 42:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001867 ret += tcrypt_test("rmd320");
Herbert Xu01b32322008-07-31 15:41:55 +08001868 break;
1869
1870 case 43:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001871 ret += tcrypt_test("ecb(seed)");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001872 break;
1873
Jarod Wilson5d667322009-05-04 19:23:40 +08001874 case 45:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001875 ret += tcrypt_test("rfc4309(ccm(aes))");
Jarod Wilson5d667322009-05-04 19:23:40 +08001876 break;
1877
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001878 case 46:
1879 ret += tcrypt_test("ghash");
1880 break;
1881
Herbert Xu684115212013-09-07 12:56:26 +10001882 case 47:
1883 ret += tcrypt_test("crct10dif");
1884 break;
1885
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301886 case 48:
1887 ret += tcrypt_test("sha3-224");
1888 break;
1889
1890 case 49:
1891 ret += tcrypt_test("sha3-256");
1892 break;
1893
1894 case 50:
1895 ret += tcrypt_test("sha3-384");
1896 break;
1897
1898 case 51:
1899 ret += tcrypt_test("sha3-512");
1900 break;
1901
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03001902 case 52:
1903 ret += tcrypt_test("sm3");
1904 break;
1905
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03001906 case 53:
1907 ret += tcrypt_test("streebog256");
1908 break;
1909
1910 case 54:
1911 ret += tcrypt_test("streebog512");
1912 break;
1913
Linus Torvalds1da177e2005-04-16 15:20:36 -07001914 case 100:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001915 ret += tcrypt_test("hmac(md5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001916 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001917
Linus Torvalds1da177e2005-04-16 15:20:36 -07001918 case 101:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001919 ret += tcrypt_test("hmac(sha1)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001920 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001921
Linus Torvalds1da177e2005-04-16 15:20:36 -07001922 case 102:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001923 ret += tcrypt_test("hmac(sha256)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001924 break;
1925
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001926 case 103:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001927 ret += tcrypt_test("hmac(sha384)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001928 break;
1929
1930 case 104:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001931 ret += tcrypt_test("hmac(sha512)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001932 break;
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001933
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001934 case 105:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001935 ret += tcrypt_test("hmac(sha224)");
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001936 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001937
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001938 case 106:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001939 ret += tcrypt_test("xcbc(aes)");
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001940 break;
1941
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001942 case 107:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001943 ret += tcrypt_test("hmac(rmd128)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001944 break;
1945
1946 case 108:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001947 ret += tcrypt_test("hmac(rmd160)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001948 break;
1949
Shane Wangf1939f72009-09-02 20:05:22 +10001950 case 109:
Eric Biggers0917b872018-06-18 10:22:40 -07001951 ret += tcrypt_test("vmac64(aes)");
Shane Wangf1939f72009-09-02 20:05:22 +10001952 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001953
raveendra padasalagi98eca722016-07-01 11:16:54 +05301954 case 111:
1955 ret += tcrypt_test("hmac(sha3-224)");
1956 break;
1957
1958 case 112:
1959 ret += tcrypt_test("hmac(sha3-256)");
1960 break;
1961
1962 case 113:
1963 ret += tcrypt_test("hmac(sha3-384)");
1964 break;
1965
1966 case 114:
1967 ret += tcrypt_test("hmac(sha3-512)");
1968 break;
1969
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03001970 case 115:
1971 ret += tcrypt_test("hmac(streebog256)");
1972 break;
1973
1974 case 116:
1975 ret += tcrypt_test("hmac(streebog512)");
1976 break;
1977
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001978 case 150:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001979 ret += tcrypt_test("ansi_cprng");
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001980 break;
1981
Adrian Hoban69435b92010-11-04 15:02:04 -04001982 case 151:
1983 ret += tcrypt_test("rfc4106(gcm(aes))");
1984 break;
1985
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03001986 case 152:
1987 ret += tcrypt_test("rfc4543(gcm(aes))");
1988 break;
1989
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001990 case 153:
1991 ret += tcrypt_test("cmac(aes)");
1992 break;
1993
1994 case 154:
1995 ret += tcrypt_test("cmac(des3_ede)");
1996 break;
1997
Horia Geantabbf9c892013-11-28 15:11:16 +02001998 case 155:
1999 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
2000 break;
2001
Horia Geantabca4feb2014-03-14 17:46:51 +02002002 case 156:
2003 ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
2004 break;
2005
2006 case 157:
2007 ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
2008 break;
Nitesh Lal5208ed22014-05-21 17:09:08 +05302009 case 181:
2010 ret += tcrypt_test("authenc(hmac(sha1),cbc(des))");
2011 break;
2012 case 182:
2013 ret += tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))");
2014 break;
2015 case 183:
2016 ret += tcrypt_test("authenc(hmac(sha224),cbc(des))");
2017 break;
2018 case 184:
2019 ret += tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))");
2020 break;
2021 case 185:
2022 ret += tcrypt_test("authenc(hmac(sha256),cbc(des))");
2023 break;
2024 case 186:
2025 ret += tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))");
2026 break;
2027 case 187:
2028 ret += tcrypt_test("authenc(hmac(sha384),cbc(des))");
2029 break;
2030 case 188:
2031 ret += tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))");
2032 break;
2033 case 189:
2034 ret += tcrypt_test("authenc(hmac(sha512),cbc(des))");
2035 break;
2036 case 190:
2037 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
2038 break;
Gilad Ben-Yossefcd83a8a2018-03-06 09:44:43 +00002039 case 191:
2040 ret += tcrypt_test("ecb(sm4)");
Gilad Ben-Yossef95ba5972018-09-20 14:18:38 +01002041 ret += tcrypt_test("cbc(sm4)");
2042 ret += tcrypt_test("ctr(sm4)");
Gilad Ben-Yossefcd83a8a2018-03-06 09:44:43 +00002043 break;
Harald Welteebfd9bc2005-06-22 13:27:23 -07002044 case 200:
Herbert Xucba83562006-08-13 08:26:09 +10002045 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002046 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002047 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002048 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002049 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002050 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002051 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002052 speed_template_16_24_32);
Rik Snelf3d10442006-11-29 19:01:41 +11002053 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002054 speed_template_32_40_48);
Rik Snelf3d10442006-11-29 19:01:41 +11002055 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002056 speed_template_32_40_48);
Rik Snelf19f5112007-09-19 20:23:13 +08002057 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002058 speed_template_32_64);
Rik Snelf19f5112007-09-19 20:23:13 +08002059 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002060 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002061 test_cipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2062 speed_template_16_24_32);
2063 test_cipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2064 speed_template_16_24_32);
Jan Glauber9996e342011-04-26 16:34:01 +10002065 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2066 speed_template_16_24_32);
2067 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2068 speed_template_16_24_32);
Dmitry Eremin-Solenikov7da66672018-10-20 02:01:53 +03002069 test_cipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2070 speed_template_16_24_32);
2071 test_cipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2072 speed_template_16_24_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002073 break;
2074
2075 case 201:
Herbert Xucba83562006-08-13 08:26:09 +10002076 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002077 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002078 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002079 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002080 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002081 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002082 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002083 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002084 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002085 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002086 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002087 speed_template_24);
Jussi Kivilinna87131502014-06-09 20:59:49 +03002088 test_cipher_speed("ctr(des3_ede)", ENCRYPT, sec,
2089 des3_speed_template, DES3_SPEED_VECTORS,
2090 speed_template_24);
2091 test_cipher_speed("ctr(des3_ede)", DECRYPT, sec,
2092 des3_speed_template, DES3_SPEED_VECTORS,
2093 speed_template_24);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002094 break;
2095
2096 case 202:
Herbert Xucba83562006-08-13 08:26:09 +10002097 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002098 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002099 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002100 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002101 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002102 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002103 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002104 speed_template_16_24_32);
Jussi Kivilinnaee5002a2011-09-26 16:47:15 +03002105 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2106 speed_template_16_24_32);
2107 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2108 speed_template_16_24_32);
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03002109 test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2110 speed_template_32_40_48);
2111 test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2112 speed_template_32_40_48);
Jussi Kivilinna131f7542011-10-18 13:33:38 +03002113 test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2114 speed_template_32_48_64);
2115 test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2116 speed_template_32_48_64);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002117 break;
2118
2119 case 203:
Herbert Xucba83562006-08-13 08:26:09 +10002120 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002121 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002122 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002123 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002124 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002125 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002126 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002127 speed_template_8_32);
Jussi Kivilinna7d47b862011-09-02 01:45:17 +03002128 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2129 speed_template_8_32);
2130 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2131 speed_template_8_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002132 break;
2133
2134 case 204:
Herbert Xucba83562006-08-13 08:26:09 +10002135 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002136 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002137 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002138 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002139 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002140 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002141 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002142 speed_template_8);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002143 break;
2144
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002145 case 205:
2146 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002147 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002148 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002149 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002150 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002151 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002152 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002153 speed_template_16_24_32);
Jussi Kivilinna4de59332012-03-05 20:26:26 +02002154 test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2155 speed_template_16_24_32);
2156 test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2157 speed_template_16_24_32);
2158 test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2159 speed_template_32_40_48);
2160 test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2161 speed_template_32_40_48);
2162 test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2163 speed_template_32_48_64);
2164 test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2165 speed_template_32_48_64);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002166 break;
2167
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08002168 case 206:
2169 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002170 speed_template_16_32);
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08002171 break;
2172
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002173 case 207:
2174 test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2175 speed_template_16_32);
2176 test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2177 speed_template_16_32);
2178 test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2179 speed_template_16_32);
2180 test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2181 speed_template_16_32);
2182 test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2183 speed_template_16_32);
2184 test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2185 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002186 test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2187 speed_template_32_48);
2188 test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2189 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002190 test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2191 speed_template_32_64);
2192 test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2193 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002194 break;
2195
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002196 case 208:
2197 test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2198 speed_template_8);
2199 break;
2200
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002201 case 209:
2202 test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2203 speed_template_8_16);
2204 test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2205 speed_template_8_16);
2206 test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2207 speed_template_8_16);
2208 test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2209 speed_template_8_16);
2210 test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2211 speed_template_8_16);
2212 test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2213 speed_template_8_16);
2214 break;
2215
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002216 case 210:
2217 test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2218 speed_template_16_32);
2219 test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2220 speed_template_16_32);
2221 test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2222 speed_template_16_32);
2223 test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2224 speed_template_16_32);
2225 test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2226 speed_template_16_32);
2227 test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2228 speed_template_16_32);
2229 test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2230 speed_template_32_48);
2231 test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2232 speed_template_32_48);
2233 test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2234 speed_template_32_64);
2235 test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2236 speed_template_32_64);
2237 break;
2238
Tim Chen53f52d72013-12-11 14:28:47 -08002239 case 211:
2240 test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002241 NULL, 0, 16, 16, aead_speed_template_20);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +05302242 test_aead_speed("gcm(aes)", ENCRYPT, sec,
Cyrille Pitchenf18611d2015-11-17 13:37:10 +01002243 NULL, 0, 16, 8, speed_template_16_24_32);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002244 test_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec,
2245 NULL, 0, 16, 16, aead_speed_template_20);
2246 test_aead_speed("gcm(aes)", DECRYPT, sec,
2247 NULL, 0, 16, 8, speed_template_16_24_32);
Tim Chen53f52d72013-12-11 14:28:47 -08002248 break;
2249
Herbert Xu4e4aab62015-06-17 14:04:21 +08002250 case 212:
2251 test_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002252 NULL, 0, 16, 16, aead_speed_template_19);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002253 test_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec,
2254 NULL, 0, 16, 16, aead_speed_template_19);
Herbert Xu4e4aab62015-06-17 14:04:21 +08002255 break;
2256
Martin Willi2dce0632015-07-16 19:13:59 +02002257 case 213:
2258 test_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT, sec,
2259 NULL, 0, 16, 8, aead_speed_template_36);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002260 test_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT, sec,
2261 NULL, 0, 16, 8, aead_speed_template_36);
Martin Willi2dce0632015-07-16 19:13:59 +02002262 break;
2263
2264 case 214:
2265 test_cipher_speed("chacha20", ENCRYPT, sec, NULL, 0,
2266 speed_template_32);
2267 break;
2268
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +00002269 case 215:
2270 test_mb_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec, NULL,
2271 0, 16, 16, aead_speed_template_20, num_mb);
2272 test_mb_aead_speed("gcm(aes)", ENCRYPT, sec, NULL, 0, 16, 8,
2273 speed_template_16_24_32, num_mb);
2274 test_mb_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec, NULL,
2275 0, 16, 16, aead_speed_template_20, num_mb);
2276 test_mb_aead_speed("gcm(aes)", DECRYPT, sec, NULL, 0, 16, 8,
2277 speed_template_16_24_32, num_mb);
2278 break;
2279
2280 case 216:
2281 test_mb_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec, NULL, 0,
2282 16, 16, aead_speed_template_19, num_mb);
2283 test_mb_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec, NULL, 0,
2284 16, 16, aead_speed_template_19, num_mb);
2285 break;
2286
2287 case 217:
2288 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT,
2289 sec, NULL, 0, 16, 8, aead_speed_template_36,
2290 num_mb);
2291 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT,
2292 sec, NULL, 0, 16, 8, aead_speed_template_36,
2293 num_mb);
2294 break;
2295
Gilad Ben-Yossef95ba5972018-09-20 14:18:38 +01002296 case 218:
2297 test_cipher_speed("ecb(sm4)", ENCRYPT, sec, NULL, 0,
2298 speed_template_16);
2299 test_cipher_speed("ecb(sm4)", DECRYPT, sec, NULL, 0,
2300 speed_template_16);
2301 test_cipher_speed("cbc(sm4)", ENCRYPT, sec, NULL, 0,
2302 speed_template_16);
2303 test_cipher_speed("cbc(sm4)", DECRYPT, sec, NULL, 0,
2304 speed_template_16);
2305 test_cipher_speed("ctr(sm4)", ENCRYPT, sec, NULL, 0,
2306 speed_template_16);
2307 test_cipher_speed("ctr(sm4)", DECRYPT, sec, NULL, 0,
2308 speed_template_16);
2309 break;
Eric Biggers059c2a42018-11-16 17:26:31 -08002310
2311 case 219:
2312 test_cipher_speed("adiantum(xchacha12,aes)", ENCRYPT, sec, NULL,
2313 0, speed_template_32);
2314 test_cipher_speed("adiantum(xchacha12,aes)", DECRYPT, sec, NULL,
2315 0, speed_template_32);
2316 test_cipher_speed("adiantum(xchacha20,aes)", ENCRYPT, sec, NULL,
2317 0, speed_template_32);
2318 test_cipher_speed("adiantum(xchacha20,aes)", DECRYPT, sec, NULL,
2319 0, speed_template_32);
2320 break;
2321
Ard Biesheuvelf975abb2019-08-19 17:17:34 +03002322 case 220:
2323 test_acipher_speed("essiv(cbc(aes),sha256)",
2324 ENCRYPT, sec, NULL, 0,
2325 speed_template_16_24_32);
2326 test_acipher_speed("essiv(cbc(aes),sha256)",
2327 DECRYPT, sec, NULL, 0,
2328 speed_template_16_24_32);
2329 break;
2330
Ard Biesheuvel97bcb162019-07-03 10:55:12 +02002331 case 221:
2332 test_aead_speed("aegis128", ENCRYPT, sec,
2333 NULL, 0, 16, 8, speed_template_16);
2334 test_aead_speed("aegis128", DECRYPT, sec,
2335 NULL, 0, 16, 8, speed_template_16);
2336 break;
2337
Michal Ludvige8057922006-05-30 22:04:19 +10002338 case 300:
Herbert Xu86068132014-12-04 16:43:29 +08002339 if (alg) {
2340 test_hash_speed(alg, sec, generic_hash_speed_template);
2341 break;
2342 }
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002343 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002344 case 301:
Herbert Xue9d41162006-08-19 21:38:49 +10002345 test_hash_speed("md4", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002346 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002347 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002348 case 302:
Herbert Xue9d41162006-08-19 21:38:49 +10002349 test_hash_speed("md5", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002350 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002351 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002352 case 303:
Herbert Xue9d41162006-08-19 21:38:49 +10002353 test_hash_speed("sha1", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002354 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002355 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002356 case 304:
Herbert Xue9d41162006-08-19 21:38:49 +10002357 test_hash_speed("sha256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002358 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002359 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002360 case 305:
Herbert Xue9d41162006-08-19 21:38:49 +10002361 test_hash_speed("sha384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002362 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002363 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002364 case 306:
Herbert Xue9d41162006-08-19 21:38:49 +10002365 test_hash_speed("sha512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002366 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002367 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002368 case 307:
Herbert Xue9d41162006-08-19 21:38:49 +10002369 test_hash_speed("wp256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002370 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002371 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002372 case 308:
Herbert Xue9d41162006-08-19 21:38:49 +10002373 test_hash_speed("wp384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002374 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002375 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002376 case 309:
Herbert Xue9d41162006-08-19 21:38:49 +10002377 test_hash_speed("wp512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002378 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002379 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002380 case 310:
Herbert Xue9d41162006-08-19 21:38:49 +10002381 test_hash_speed("tgr128", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002382 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002383 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002384 case 311:
Herbert Xue9d41162006-08-19 21:38:49 +10002385 test_hash_speed("tgr160", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002386 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002387 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002388 case 312:
Herbert Xue9d41162006-08-19 21:38:49 +10002389 test_hash_speed("tgr192", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002390 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002391 fallthrough;
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08002392 case 313:
2393 test_hash_speed("sha224", sec, generic_hash_speed_template);
2394 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002395 fallthrough;
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002396 case 314:
2397 test_hash_speed("rmd128", sec, generic_hash_speed_template);
2398 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002399 fallthrough;
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002400 case 315:
2401 test_hash_speed("rmd160", sec, generic_hash_speed_template);
2402 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002403 fallthrough;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08002404 case 316:
2405 test_hash_speed("rmd256", sec, generic_hash_speed_template);
2406 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002407 fallthrough;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08002408 case 317:
2409 test_hash_speed("rmd320", sec, generic_hash_speed_template);
2410 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002411 fallthrough;
Huang Ying18bcc912010-03-10 18:30:32 +08002412 case 318:
Herbert Xuba974ad2020-08-05 15:57:08 +10002413 klen = 16;
2414 test_hash_speed("ghash", sec, generic_hash_speed_template);
Huang Ying18bcc912010-03-10 18:30:32 +08002415 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002416 fallthrough;
Tim Chene3899e42012-09-27 15:44:24 -07002417 case 319:
2418 test_hash_speed("crc32c", sec, generic_hash_speed_template);
2419 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002420 fallthrough;
Herbert Xu684115212013-09-07 12:56:26 +10002421 case 320:
2422 test_hash_speed("crct10dif", sec, generic_hash_speed_template);
2423 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002424 fallthrough;
Martin Willi2dce0632015-07-16 19:13:59 +02002425 case 321:
2426 test_hash_speed("poly1305", sec, poly1305_speed_template);
2427 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002428 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302429 case 322:
2430 test_hash_speed("sha3-224", sec, generic_hash_speed_template);
2431 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002432 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302433 case 323:
2434 test_hash_speed("sha3-256", sec, generic_hash_speed_template);
2435 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002436 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302437 case 324:
2438 test_hash_speed("sha3-384", sec, generic_hash_speed_template);
2439 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002440 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302441 case 325:
2442 test_hash_speed("sha3-512", sec, generic_hash_speed_template);
2443 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002444 fallthrough;
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002445 case 326:
2446 test_hash_speed("sm3", sec, generic_hash_speed_template);
2447 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002448 fallthrough;
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03002449 case 327:
2450 test_hash_speed("streebog256", sec,
2451 generic_hash_speed_template);
2452 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002453 fallthrough;
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03002454 case 328:
2455 test_hash_speed("streebog512", sec,
2456 generic_hash_speed_template);
2457 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002458 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002459 case 399:
2460 break;
2461
David S. Millerbeb63da2010-05-19 14:11:21 +10002462 case 400:
Herbert Xu86068132014-12-04 16:43:29 +08002463 if (alg) {
2464 test_ahash_speed(alg, sec, generic_hash_speed_template);
2465 break;
2466 }
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002467 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002468 case 401:
2469 test_ahash_speed("md4", sec, generic_hash_speed_template);
2470 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002471 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002472 case 402:
2473 test_ahash_speed("md5", sec, generic_hash_speed_template);
2474 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002475 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002476 case 403:
2477 test_ahash_speed("sha1", sec, generic_hash_speed_template);
2478 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002479 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002480 case 404:
2481 test_ahash_speed("sha256", sec, generic_hash_speed_template);
2482 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002483 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002484 case 405:
2485 test_ahash_speed("sha384", sec, generic_hash_speed_template);
2486 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002487 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002488 case 406:
2489 test_ahash_speed("sha512", sec, generic_hash_speed_template);
2490 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002491 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002492 case 407:
2493 test_ahash_speed("wp256", sec, generic_hash_speed_template);
2494 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002495 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002496 case 408:
2497 test_ahash_speed("wp384", sec, generic_hash_speed_template);
2498 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002499 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002500 case 409:
2501 test_ahash_speed("wp512", sec, generic_hash_speed_template);
2502 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002503 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002504 case 410:
2505 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
2506 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002507 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002508 case 411:
2509 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
2510 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002511 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002512 case 412:
2513 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
2514 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002515 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002516 case 413:
2517 test_ahash_speed("sha224", sec, generic_hash_speed_template);
2518 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002519 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002520 case 414:
2521 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
2522 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002523 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002524 case 415:
2525 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
2526 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002527 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002528 case 416:
2529 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
2530 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002531 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002532 case 417:
2533 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
2534 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002535 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302536 case 418:
2537 test_ahash_speed("sha3-224", sec, generic_hash_speed_template);
2538 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002539 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302540 case 419:
2541 test_ahash_speed("sha3-256", sec, generic_hash_speed_template);
2542 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002543 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302544 case 420:
2545 test_ahash_speed("sha3-384", sec, generic_hash_speed_template);
2546 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002547 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302548 case 421:
2549 test_ahash_speed("sha3-512", sec, generic_hash_speed_template);
2550 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002551 fallthrough;
Megha Dey087bcd22016-06-23 18:40:47 -07002552 case 422:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002553 test_mb_ahash_speed("sha1", sec, generic_hash_speed_template,
2554 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002555 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002556 fallthrough;
Megha Dey087bcd22016-06-23 18:40:47 -07002557 case 423:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002558 test_mb_ahash_speed("sha256", sec, generic_hash_speed_template,
2559 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002560 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002561 fallthrough;
Megha Dey14009c42016-06-27 10:20:09 -07002562 case 424:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002563 test_mb_ahash_speed("sha512", sec, generic_hash_speed_template,
2564 num_mb);
Megha Dey14009c42016-06-27 10:20:09 -07002565 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002566 fallthrough;
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002567 case 425:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002568 test_mb_ahash_speed("sm3", sec, generic_hash_speed_template,
2569 num_mb);
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002570 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002571 fallthrough;
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03002572 case 426:
2573 test_mb_ahash_speed("streebog256", sec,
2574 generic_hash_speed_template, num_mb);
2575 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002576 fallthrough;
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03002577 case 427:
2578 test_mb_ahash_speed("streebog512", sec,
2579 generic_hash_speed_template, num_mb);
2580 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002581 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002582 case 499:
2583 break;
2584
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002585 case 500:
2586 test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2587 speed_template_16_24_32);
2588 test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2589 speed_template_16_24_32);
2590 test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2591 speed_template_16_24_32);
2592 test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2593 speed_template_16_24_32);
2594 test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2595 speed_template_32_40_48);
2596 test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2597 speed_template_32_40_48);
2598 test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002599 speed_template_32_64);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002600 test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002601 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002602 test_acipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2603 speed_template_16_24_32);
2604 test_acipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2605 speed_template_16_24_32);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002606 test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2607 speed_template_16_24_32);
2608 test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2609 speed_template_16_24_32);
Nicolas Royerde1975332012-07-01 19:19:47 +02002610 test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2611 speed_template_16_24_32);
2612 test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2613 speed_template_16_24_32);
2614 test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2615 speed_template_16_24_32);
2616 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2617 speed_template_16_24_32);
Jussi Kivilinna69d31502012-12-28 12:04:58 +02002618 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
2619 speed_template_20_28_36);
2620 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
2621 speed_template_20_28_36);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002622 break;
2623
2624 case 501:
2625 test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2626 des3_speed_template, DES3_SPEED_VECTORS,
2627 speed_template_24);
2628 test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
2629 des3_speed_template, DES3_SPEED_VECTORS,
2630 speed_template_24);
2631 test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2632 des3_speed_template, DES3_SPEED_VECTORS,
2633 speed_template_24);
2634 test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
2635 des3_speed_template, DES3_SPEED_VECTORS,
2636 speed_template_24);
Nicolas Royerde1975332012-07-01 19:19:47 +02002637 test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2638 des3_speed_template, DES3_SPEED_VECTORS,
2639 speed_template_24);
2640 test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
2641 des3_speed_template, DES3_SPEED_VECTORS,
2642 speed_template_24);
2643 test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2644 des3_speed_template, DES3_SPEED_VECTORS,
2645 speed_template_24);
2646 test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
2647 des3_speed_template, DES3_SPEED_VECTORS,
2648 speed_template_24);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002649 break;
2650
2651 case 502:
2652 test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2653 speed_template_8);
2654 test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2655 speed_template_8);
2656 test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2657 speed_template_8);
2658 test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2659 speed_template_8);
Nicolas Royerde1975332012-07-01 19:19:47 +02002660 test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2661 speed_template_8);
2662 test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2663 speed_template_8);
2664 test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2665 speed_template_8);
2666 test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2667 speed_template_8);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002668 break;
2669
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002670 case 503:
2671 test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2672 speed_template_16_32);
2673 test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2674 speed_template_16_32);
2675 test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2676 speed_template_16_32);
2677 test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2678 speed_template_16_32);
2679 test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2680 speed_template_16_32);
2681 test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2682 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002683 test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2684 speed_template_32_48);
2685 test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2686 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002687 test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2688 speed_template_32_64);
2689 test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2690 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002691 break;
2692
Johannes Goetzfried107778b52012-05-28 15:54:24 +02002693 case 504:
2694 test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2695 speed_template_16_24_32);
2696 test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2697 speed_template_16_24_32);
2698 test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2699 speed_template_16_24_32);
2700 test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2701 speed_template_16_24_32);
2702 test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2703 speed_template_16_24_32);
2704 test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2705 speed_template_16_24_32);
2706 test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2707 speed_template_32_40_48);
2708 test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2709 speed_template_32_40_48);
2710 test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2711 speed_template_32_48_64);
2712 test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2713 speed_template_32_48_64);
2714 break;
2715
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002716 case 505:
2717 test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2718 speed_template_8);
2719 break;
2720
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002721 case 506:
2722 test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2723 speed_template_8_16);
2724 test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2725 speed_template_8_16);
2726 test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2727 speed_template_8_16);
2728 test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2729 speed_template_8_16);
2730 test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2731 speed_template_8_16);
2732 test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2733 speed_template_8_16);
2734 break;
2735
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002736 case 507:
2737 test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2738 speed_template_16_32);
2739 test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2740 speed_template_16_32);
2741 test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2742 speed_template_16_32);
2743 test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2744 speed_template_16_32);
2745 test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2746 speed_template_16_32);
2747 test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2748 speed_template_16_32);
2749 test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2750 speed_template_32_48);
2751 test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2752 speed_template_32_48);
2753 test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2754 speed_template_32_64);
2755 test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2756 speed_template_32_64);
2757 break;
2758
Jussi Kivilinnabf9c5182012-10-26 14:48:51 +03002759 case 508:
2760 test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2761 speed_template_16_32);
2762 test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2763 speed_template_16_32);
2764 test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2765 speed_template_16_32);
2766 test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2767 speed_template_16_32);
2768 test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2769 speed_template_16_32);
2770 test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2771 speed_template_16_32);
2772 test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2773 speed_template_32_48);
2774 test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2775 speed_template_32_48);
2776 test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2777 speed_template_32_64);
2778 test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2779 speed_template_32_64);
2780 break;
2781
Jussi Kivilinnaad8b7c32013-04-13 13:46:40 +03002782 case 509:
2783 test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2784 speed_template_8_32);
2785 test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2786 speed_template_8_32);
2787 test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2788 speed_template_8_32);
2789 test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2790 speed_template_8_32);
2791 test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2792 speed_template_8_32);
2793 test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2794 speed_template_8_32);
2795 break;
2796
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00002797 case 600:
2798 test_mb_skcipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2799 speed_template_16_24_32, num_mb);
2800 test_mb_skcipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2801 speed_template_16_24_32, num_mb);
2802 test_mb_skcipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2803 speed_template_16_24_32, num_mb);
2804 test_mb_skcipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2805 speed_template_16_24_32, num_mb);
2806 test_mb_skcipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2807 speed_template_32_40_48, num_mb);
2808 test_mb_skcipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2809 speed_template_32_40_48, num_mb);
2810 test_mb_skcipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
2811 speed_template_32_64, num_mb);
2812 test_mb_skcipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
2813 speed_template_32_64, num_mb);
2814 test_mb_skcipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2815 speed_template_16_24_32, num_mb);
2816 test_mb_skcipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2817 speed_template_16_24_32, num_mb);
2818 test_mb_skcipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2819 speed_template_16_24_32, num_mb);
2820 test_mb_skcipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2821 speed_template_16_24_32, num_mb);
2822 test_mb_skcipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2823 speed_template_16_24_32, num_mb);
2824 test_mb_skcipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2825 speed_template_16_24_32, num_mb);
2826 test_mb_skcipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2827 speed_template_16_24_32, num_mb);
2828 test_mb_skcipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2829 speed_template_16_24_32, num_mb);
2830 test_mb_skcipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL,
2831 0, speed_template_20_28_36, num_mb);
2832 test_mb_skcipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL,
2833 0, speed_template_20_28_36, num_mb);
2834 break;
2835
2836 case 601:
2837 test_mb_skcipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2838 des3_speed_template, DES3_SPEED_VECTORS,
2839 speed_template_24, num_mb);
2840 test_mb_skcipher_speed("ecb(des3_ede)", DECRYPT, sec,
2841 des3_speed_template, DES3_SPEED_VECTORS,
2842 speed_template_24, num_mb);
2843 test_mb_skcipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2844 des3_speed_template, DES3_SPEED_VECTORS,
2845 speed_template_24, num_mb);
2846 test_mb_skcipher_speed("cbc(des3_ede)", DECRYPT, sec,
2847 des3_speed_template, DES3_SPEED_VECTORS,
2848 speed_template_24, num_mb);
2849 test_mb_skcipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2850 des3_speed_template, DES3_SPEED_VECTORS,
2851 speed_template_24, num_mb);
2852 test_mb_skcipher_speed("cfb(des3_ede)", DECRYPT, sec,
2853 des3_speed_template, DES3_SPEED_VECTORS,
2854 speed_template_24, num_mb);
2855 test_mb_skcipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2856 des3_speed_template, DES3_SPEED_VECTORS,
2857 speed_template_24, num_mb);
2858 test_mb_skcipher_speed("ofb(des3_ede)", DECRYPT, sec,
2859 des3_speed_template, DES3_SPEED_VECTORS,
2860 speed_template_24, num_mb);
2861 break;
2862
2863 case 602:
2864 test_mb_skcipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2865 speed_template_8, num_mb);
2866 test_mb_skcipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2867 speed_template_8, num_mb);
2868 test_mb_skcipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2869 speed_template_8, num_mb);
2870 test_mb_skcipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2871 speed_template_8, num_mb);
2872 test_mb_skcipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2873 speed_template_8, num_mb);
2874 test_mb_skcipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2875 speed_template_8, num_mb);
2876 test_mb_skcipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2877 speed_template_8, num_mb);
2878 test_mb_skcipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2879 speed_template_8, num_mb);
2880 break;
2881
2882 case 603:
2883 test_mb_skcipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2884 speed_template_16_32, num_mb);
2885 test_mb_skcipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2886 speed_template_16_32, num_mb);
2887 test_mb_skcipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2888 speed_template_16_32, num_mb);
2889 test_mb_skcipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2890 speed_template_16_32, num_mb);
2891 test_mb_skcipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2892 speed_template_16_32, num_mb);
2893 test_mb_skcipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2894 speed_template_16_32, num_mb);
2895 test_mb_skcipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2896 speed_template_32_48, num_mb);
2897 test_mb_skcipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2898 speed_template_32_48, num_mb);
2899 test_mb_skcipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2900 speed_template_32_64, num_mb);
2901 test_mb_skcipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2902 speed_template_32_64, num_mb);
2903 break;
2904
2905 case 604:
2906 test_mb_skcipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2907 speed_template_16_24_32, num_mb);
2908 test_mb_skcipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2909 speed_template_16_24_32, num_mb);
2910 test_mb_skcipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2911 speed_template_16_24_32, num_mb);
2912 test_mb_skcipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2913 speed_template_16_24_32, num_mb);
2914 test_mb_skcipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2915 speed_template_16_24_32, num_mb);
2916 test_mb_skcipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2917 speed_template_16_24_32, num_mb);
2918 test_mb_skcipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2919 speed_template_32_40_48, num_mb);
2920 test_mb_skcipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2921 speed_template_32_40_48, num_mb);
2922 test_mb_skcipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2923 speed_template_32_48_64, num_mb);
2924 test_mb_skcipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2925 speed_template_32_48_64, num_mb);
2926 break;
2927
2928 case 605:
2929 test_mb_skcipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2930 speed_template_8, num_mb);
2931 break;
2932
2933 case 606:
2934 test_mb_skcipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2935 speed_template_8_16, num_mb);
2936 test_mb_skcipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2937 speed_template_8_16, num_mb);
2938 test_mb_skcipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2939 speed_template_8_16, num_mb);
2940 test_mb_skcipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2941 speed_template_8_16, num_mb);
2942 test_mb_skcipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2943 speed_template_8_16, num_mb);
2944 test_mb_skcipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2945 speed_template_8_16, num_mb);
2946 break;
2947
2948 case 607:
2949 test_mb_skcipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2950 speed_template_16_32, num_mb);
2951 test_mb_skcipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2952 speed_template_16_32, num_mb);
2953 test_mb_skcipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2954 speed_template_16_32, num_mb);
2955 test_mb_skcipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2956 speed_template_16_32, num_mb);
2957 test_mb_skcipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2958 speed_template_16_32, num_mb);
2959 test_mb_skcipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2960 speed_template_16_32, num_mb);
2961 test_mb_skcipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2962 speed_template_32_48, num_mb);
2963 test_mb_skcipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2964 speed_template_32_48, num_mb);
2965 test_mb_skcipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2966 speed_template_32_64, num_mb);
2967 test_mb_skcipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2968 speed_template_32_64, num_mb);
2969 break;
2970
2971 case 608:
2972 test_mb_skcipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2973 speed_template_16_32, num_mb);
2974 test_mb_skcipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2975 speed_template_16_32, num_mb);
2976 test_mb_skcipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2977 speed_template_16_32, num_mb);
2978 test_mb_skcipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2979 speed_template_16_32, num_mb);
2980 test_mb_skcipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2981 speed_template_16_32, num_mb);
2982 test_mb_skcipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2983 speed_template_16_32, num_mb);
2984 test_mb_skcipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2985 speed_template_32_48, num_mb);
2986 test_mb_skcipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2987 speed_template_32_48, num_mb);
2988 test_mb_skcipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2989 speed_template_32_64, num_mb);
2990 test_mb_skcipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2991 speed_template_32_64, num_mb);
2992 break;
2993
2994 case 609:
2995 test_mb_skcipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2996 speed_template_8_32, num_mb);
2997 test_mb_skcipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2998 speed_template_8_32, num_mb);
2999 test_mb_skcipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
3000 speed_template_8_32, num_mb);
3001 test_mb_skcipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
3002 speed_template_8_32, num_mb);
3003 test_mb_skcipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
3004 speed_template_8_32, num_mb);
3005 test_mb_skcipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
3006 speed_template_8_32, num_mb);
3007 break;
3008
Linus Torvalds1da177e2005-04-16 15:20:36 -07003009 case 1000:
3010 test_available();
3011 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07003012 }
Jarod Wilson4e033a62009-05-27 15:10:21 +10003013
3014 return ret;
Linus Torvalds1da177e2005-04-16 15:20:36 -07003015}
3016
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003017static int __init tcrypt_mod_init(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07003018{
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003019 int err = -ENOMEM;
Herbert Xuf139cfa2008-07-31 12:23:53 +08003020 int i;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003021
Herbert Xuf139cfa2008-07-31 12:23:53 +08003022 for (i = 0; i < TVMEMSIZE; i++) {
3023 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
3024 if (!tvmem[i])
3025 goto err_free_tv;
3026 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07003027
Kees Cook4e234ee2018-04-26 19:57:28 -07003028 err = do_test(alg, type, mask, mode, num_mb);
Steffen Klasserta873a5f2009-06-19 19:46:53 +08003029
Jarod Wilson4e033a62009-05-27 15:10:21 +10003030 if (err) {
3031 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3032 goto err_free_tv;
Rabin Vincent76512f22017-01-18 14:54:05 +01003033 } else {
3034 pr_debug("all tests passed\n");
Jarod Wilson4e033a62009-05-27 15:10:21 +10003035 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07003036
Jarod Wilson4e033a62009-05-27 15:10:21 +10003037 /* We intentionaly return -EAGAIN to prevent keeping the module,
3038 * unless we're running in fips mode. It does all its work from
3039 * init() and doesn't offer any runtime functionality, but in
3040 * the fips case, checking for a successful load is helpful.
Michal Ludvig14fdf472006-05-30 14:49:38 +10003041 * => we don't need it in the memory, do we?
3042 * -- mludvig
3043 */
Jarod Wilson4e033a62009-05-27 15:10:21 +10003044 if (!fips_enabled)
3045 err = -EAGAIN;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003046
Herbert Xuf139cfa2008-07-31 12:23:53 +08003047err_free_tv:
3048 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
3049 free_page((unsigned long)tvmem[i]);
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003050
3051 return err;
Linus Torvalds1da177e2005-04-16 15:20:36 -07003052}
3053
3054/*
3055 * If an init function is provided, an exit function must also be provided
3056 * to allow module unload.
3057 */
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003058static void __exit tcrypt_mod_fini(void) { }
Linus Torvalds1da177e2005-04-16 15:20:36 -07003059
Eric Biggersc4741b22019-04-11 21:57:42 -07003060subsys_initcall(tcrypt_mod_init);
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003061module_exit(tcrypt_mod_fini);
Linus Torvalds1da177e2005-04-16 15:20:36 -07003062
Steffen Klasserta873a5f2009-06-19 19:46:53 +08003063module_param(alg, charp, 0);
3064module_param(type, uint, 0);
Herbert Xu7be380f2009-07-14 16:06:54 +08003065module_param(mask, uint, 0);
Linus Torvalds1da177e2005-04-16 15:20:36 -07003066module_param(mode, int, 0);
Harald Welteebfd9bc2005-06-22 13:27:23 -07003067module_param(sec, uint, 0);
Herbert Xu6a179442005-06-22 13:29:03 -07003068MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
3069 "(defaults to zero which uses CPU cycles instead)");
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00003070module_param(num_mb, uint, 0000);
3071MODULE_PARM_DESC(num_mb, "Number of concurrent requests to be used in mb speed tests (defaults to 8)");
Herbert Xuba974ad2020-08-05 15:57:08 +10003072module_param(klen, uint, 0);
3073MODULE_PARM_DESC(klen, "Key length (defaults to 0)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07003074
3075MODULE_LICENSE("GPL");
3076MODULE_DESCRIPTION("Quick & dirty crypto testing module");
3077MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");