blob: a647bb298fbce4bc47dc249f10dd75f2bdf88e58 [file] [log] [blame]
Thomas Gleixner2874c5f2019-05-27 08:55:01 +02001// SPDX-License-Identifier: GPL-2.0-or-later
Herbert Xuef2736f2005-06-22 13:26:03 -07002/*
Linus Torvalds1da177e2005-04-16 15:20:36 -07003 * Quick & dirty crypto testing module.
4 *
5 * This will only exist until we have a better testing mechanism
6 * (e.g. a char device).
7 *
8 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
9 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
Mikko Herranene3a4ea42007-11-26 22:12:07 +080010 * Copyright (c) 2007 Nokia Siemens Networks
Linus Torvalds1da177e2005-04-16 15:20:36 -070011 *
Adrian Hoban69435b92010-11-04 15:02:04 -040012 * Updated RFC4106 AES-GCM testing.
13 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Adrian Hoban <adrian.hoban@intel.com>
15 * Gabriele Paoloni <gabriele.paoloni@intel.com>
16 * Tadeusz Struk (tadeusz.struk@intel.com)
17 * Copyright (c) 2010, Intel Corporation.
Linus Torvalds1da177e2005-04-16 15:20:36 -070018 */
19
Rabin Vincent76512f22017-01-18 14:54:05 +010020#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
21
Herbert Xu1ce5a042015-04-22 15:06:30 +080022#include <crypto/aead.h>
Herbert Xu18e33e62008-07-10 16:01:22 +080023#include <crypto/hash.h>
Herbert Xu7166e582016-06-29 18:03:50 +080024#include <crypto/skcipher.h>
Herbert Xucba83562006-08-13 08:26:09 +100025#include <linux/err.h>
Herbert Xudaf09442015-04-22 13:25:57 +080026#include <linux/fips.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070027#include <linux/init.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090028#include <linux/gfp.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070029#include <linux/module.h>
David Hardeman378f0582005-09-17 17:55:31 +100030#include <linux/scatterlist.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070031#include <linux/string.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070032#include <linux/moduleparam.h>
Harald Welteebfd9bc2005-06-22 13:27:23 -070033#include <linux/jiffies.h>
Herbert Xu6a179442005-06-22 13:29:03 -070034#include <linux/timex.h>
35#include <linux/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070036#include "tcrypt.h"
37
38/*
Herbert Xuf139cfa2008-07-31 12:23:53 +080039 * Need slab memory for testing (size in number of pages).
Linus Torvalds1da177e2005-04-16 15:20:36 -070040 */
Herbert Xuf139cfa2008-07-31 12:23:53 +080041#define TVMEMSIZE 4
Linus Torvalds1da177e2005-04-16 15:20:36 -070042
43/*
Herbert Xuda7f0332008-07-31 17:08:25 +080044* Used by test_cipher_speed()
Linus Torvalds1da177e2005-04-16 15:20:36 -070045*/
46#define ENCRYPT 1
47#define DECRYPT 0
Linus Torvalds1da177e2005-04-16 15:20:36 -070048
Horia Geant?f074f7b2015-08-27 18:38:36 +030049#define MAX_DIGEST_SIZE 64
50
Harald Welteebfd9bc2005-06-22 13:27:23 -070051/*
Luca Clementi263a8df2014-06-25 22:57:42 -070052 * return a string with the driver name
53 */
54#define get_driver_name(tfm_type, tfm) crypto_tfm_alg_driver_name(tfm_type ## _tfm(tfm))
55
56/*
Harald Welteebfd9bc2005-06-22 13:27:23 -070057 * Used by test_cipher_speed()
58 */
Herbert Xu6a179442005-06-22 13:29:03 -070059static unsigned int sec;
Harald Welteebfd9bc2005-06-22 13:27:23 -070060
Steffen Klasserta873a5f2009-06-19 19:46:53 +080061static char *alg = NULL;
62static u32 type;
Herbert Xu7be380f2009-07-14 16:06:54 +080063static u32 mask;
Linus Torvalds1da177e2005-04-16 15:20:36 -070064static int mode;
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +000065static u32 num_mb = 8;
Herbert Xuba974ad2020-08-05 15:57:08 +100066static unsigned int klen;
Herbert Xuf139cfa2008-07-31 12:23:53 +080067static char *tvmem[TVMEMSIZE];
Linus Torvalds1da177e2005-04-16 15:20:36 -070068
Corentin Labbe07d8f182019-11-08 15:42:13 +000069static const char *check[] = {
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +030070 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", "sm3",
Jonathan Lynchcd12fb92007-11-10 20:08:25 +080071 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
72 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
David Howells90831632006-12-16 12:13:14 +110073 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +080074 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
Dave Rodgman45ec9752019-03-07 16:30:44 -080075 "lzo", "lzo-rle", "cts", "sha3-224", "sha3-256", "sha3-384",
76 "sha3-512", "streebog256", "streebog512",
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +030077 NULL
Linus Torvalds1da177e2005-04-16 15:20:36 -070078};
79
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +010080static const int block_sizes[] = { 16, 64, 256, 1024, 1420, 4096, 0 };
81static const int aead_sizes[] = { 16, 64, 256, 512, 1024, 1420, 4096, 8192, 0 };
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +000082
83#define XBUFSIZE 8
84#define MAX_IVLEN 32
85
86static int testmgr_alloc_buf(char *buf[XBUFSIZE])
87{
88 int i;
89
90 for (i = 0; i < XBUFSIZE; i++) {
91 buf[i] = (void *)__get_free_page(GFP_KERNEL);
92 if (!buf[i])
93 goto err_free_buf;
94 }
95
96 return 0;
97
98err_free_buf:
99 while (i-- > 0)
100 free_page((unsigned long)buf[i]);
101
102 return -ENOMEM;
103}
104
105static void testmgr_free_buf(char *buf[XBUFSIZE])
106{
107 int i;
108
109 for (i = 0; i < XBUFSIZE; i++)
110 free_page((unsigned long)buf[i]);
111}
112
113static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
114 unsigned int buflen, const void *assoc,
115 unsigned int aad_size)
116{
117 int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
118 int k, rem;
119
120 if (np > XBUFSIZE) {
121 rem = PAGE_SIZE;
122 np = XBUFSIZE;
123 } else {
124 rem = buflen % PAGE_SIZE;
125 }
126
127 sg_init_table(sg, np + 1);
128
129 sg_set_buf(&sg[0], assoc, aad_size);
130
131 if (rem)
132 np--;
133 for (k = 0; k < np; k++)
134 sg_set_buf(&sg[k + 1], xbuf[k], PAGE_SIZE);
135
136 if (rem)
137 sg_set_buf(&sg[k + 1], xbuf[k], rem);
138}
139
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530140static inline int do_one_aead_op(struct aead_request *req, int ret)
141{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100142 struct crypto_wait *wait = req->base.data;
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530143
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100144 return crypto_wait_req(ret, wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530145}
146
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000147struct test_mb_aead_data {
148 struct scatterlist sg[XBUFSIZE];
149 struct scatterlist sgout[XBUFSIZE];
150 struct aead_request *req;
151 struct crypto_wait wait;
152 char *xbuf[XBUFSIZE];
153 char *xoutbuf[XBUFSIZE];
154 char *axbuf[XBUFSIZE];
155};
156
157static int do_mult_aead_op(struct test_mb_aead_data *data, int enc,
Kees Cook4e234ee2018-04-26 19:57:28 -0700158 u32 num_mb, int *rc)
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000159{
Kees Cook4e234ee2018-04-26 19:57:28 -0700160 int i, err = 0;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000161
162 /* Fire up a bunch of concurrent requests */
163 for (i = 0; i < num_mb; i++) {
164 if (enc == ENCRYPT)
165 rc[i] = crypto_aead_encrypt(data[i].req);
166 else
167 rc[i] = crypto_aead_decrypt(data[i].req);
168 }
169
170 /* Wait for all requests to finish */
171 for (i = 0; i < num_mb; i++) {
172 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
173
174 if (rc[i]) {
175 pr_info("concurrent request %d error %d\n", i, rc[i]);
176 err = rc[i];
177 }
178 }
179
180 return err;
181}
182
183static int test_mb_aead_jiffies(struct test_mb_aead_data *data, int enc,
184 int blen, int secs, u32 num_mb)
185{
186 unsigned long start, end;
187 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -0700188 int ret = 0;
189 int *rc;
190
191 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
192 if (!rc)
193 return -ENOMEM;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000194
195 for (start = jiffies, end = start + secs * HZ, bcount = 0;
196 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700197 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000198 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -0700199 goto out;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000200 }
201
202 pr_cont("%d operations in %d seconds (%ld bytes)\n",
203 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -0700204
205out:
206 kfree(rc);
207 return ret;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000208}
209
210static int test_mb_aead_cycles(struct test_mb_aead_data *data, int enc,
211 int blen, u32 num_mb)
212{
213 unsigned long cycles = 0;
214 int ret = 0;
215 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -0700216 int *rc;
217
218 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
219 if (!rc)
220 return -ENOMEM;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000221
222 /* Warm-up run. */
223 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700224 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000225 if (ret)
226 goto out;
227 }
228
229 /* The real thing. */
230 for (i = 0; i < 8; i++) {
231 cycles_t start, end;
232
233 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -0700234 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000235 end = get_cycles();
236
237 if (ret)
238 goto out;
239
240 cycles += end - start;
241 }
242
Kees Cook4e234ee2018-04-26 19:57:28 -0700243 pr_cont("1 operation in %lu cycles (%d bytes)\n",
244 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000245
Kees Cook4e234ee2018-04-26 19:57:28 -0700246out:
247 kfree(rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000248 return ret;
249}
250
251static void test_mb_aead_speed(const char *algo, int enc, int secs,
252 struct aead_speed_template *template,
253 unsigned int tcount, u8 authsize,
254 unsigned int aad_size, u8 *keysize, u32 num_mb)
255{
256 struct test_mb_aead_data *data;
257 struct crypto_aead *tfm;
258 unsigned int i, j, iv_len;
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100259 const int *b_size;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000260 const char *key;
261 const char *e;
262 void *assoc;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000263 char *iv;
264 int ret;
265
266
267 if (aad_size >= PAGE_SIZE) {
268 pr_err("associate data length (%u) too big\n", aad_size);
269 return;
270 }
271
272 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
273 if (!iv)
274 return;
275
276 if (enc == ENCRYPT)
277 e = "encryption";
278 else
279 e = "decryption";
280
281 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
282 if (!data)
283 goto out_free_iv;
284
285 tfm = crypto_alloc_aead(algo, 0, 0);
286 if (IS_ERR(tfm)) {
287 pr_err("failed to load transform for %s: %ld\n",
288 algo, PTR_ERR(tfm));
289 goto out_free_data;
290 }
291
292 ret = crypto_aead_setauthsize(tfm, authsize);
293
294 for (i = 0; i < num_mb; ++i)
295 if (testmgr_alloc_buf(data[i].xbuf)) {
296 while (i--)
297 testmgr_free_buf(data[i].xbuf);
298 goto out_free_tfm;
299 }
300
301 for (i = 0; i < num_mb; ++i)
302 if (testmgr_alloc_buf(data[i].axbuf)) {
303 while (i--)
304 testmgr_free_buf(data[i].axbuf);
305 goto out_free_xbuf;
306 }
307
308 for (i = 0; i < num_mb; ++i)
309 if (testmgr_alloc_buf(data[i].xoutbuf)) {
310 while (i--)
Colin Ian Kingc6ba4f32018-01-02 15:43:04 +0000311 testmgr_free_buf(data[i].xoutbuf);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000312 goto out_free_axbuf;
313 }
314
315 for (i = 0; i < num_mb; ++i) {
316 data[i].req = aead_request_alloc(tfm, GFP_KERNEL);
317 if (!data[i].req) {
318 pr_err("alg: skcipher: Failed to allocate request for %s\n",
319 algo);
320 while (i--)
321 aead_request_free(data[i].req);
322 goto out_free_xoutbuf;
323 }
324 }
325
326 for (i = 0; i < num_mb; ++i) {
327 crypto_init_wait(&data[i].wait);
328 aead_request_set_callback(data[i].req,
329 CRYPTO_TFM_REQ_MAY_BACKLOG,
330 crypto_req_done, &data[i].wait);
331 }
332
333 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
334 get_driver_name(crypto_aead, tfm), e);
335
336 i = 0;
337 do {
338 b_size = aead_sizes;
339 do {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100340 int bs = round_up(*b_size, crypto_aead_blocksize(tfm));
341
342 if (bs + authsize > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +0000343 pr_err("template (%u) too big for buffer (%lu)\n",
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100344 authsize + bs,
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000345 XBUFSIZE * PAGE_SIZE);
346 goto out;
347 }
348
349 pr_info("test %u (%d bit key, %d byte blocks): ", i,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100350 *keysize * 8, bs);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000351
352 /* Set up tfm global state, i.e. the key */
353
354 memset(tvmem[0], 0xff, PAGE_SIZE);
355 key = tvmem[0];
356 for (j = 0; j < tcount; j++) {
357 if (template[j].klen == *keysize) {
358 key = template[j].key;
359 break;
360 }
361 }
362
363 crypto_aead_clear_flags(tfm, ~0);
364
365 ret = crypto_aead_setkey(tfm, key, *keysize);
366 if (ret) {
367 pr_err("setkey() failed flags=%x\n",
368 crypto_aead_get_flags(tfm));
369 goto out;
370 }
371
372 iv_len = crypto_aead_ivsize(tfm);
373 if (iv_len)
374 memset(iv, 0xff, iv_len);
375
376 /* Now setup per request stuff, i.e. buffers */
377
378 for (j = 0; j < num_mb; ++j) {
379 struct test_mb_aead_data *cur = &data[j];
380
381 assoc = cur->axbuf[0];
382 memset(assoc, 0xff, aad_size);
383
384 sg_init_aead(cur->sg, cur->xbuf,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100385 bs + (enc ? 0 : authsize),
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000386 assoc, aad_size);
387
388 sg_init_aead(cur->sgout, cur->xoutbuf,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100389 bs + (enc ? authsize : 0),
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000390 assoc, aad_size);
391
392 aead_request_set_ad(cur->req, aad_size);
393
394 if (!enc) {
395
396 aead_request_set_crypt(cur->req,
397 cur->sgout,
398 cur->sg,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100399 bs, iv);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000400 ret = crypto_aead_encrypt(cur->req);
401 ret = do_one_aead_op(cur->req, ret);
402
403 if (ret) {
Randy Dunlap129a4db2020-07-30 19:39:24 -0700404 pr_err("calculating auth failed (%d)\n",
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000405 ret);
406 break;
407 }
408 }
409
410 aead_request_set_crypt(cur->req, cur->sg,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100411 cur->sgout, bs +
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000412 (enc ? 0 : authsize),
413 iv);
414
415 }
416
Horia Geantă2af63292018-07-23 17:18:48 +0300417 if (secs) {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100418 ret = test_mb_aead_jiffies(data, enc, bs,
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000419 secs, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300420 cond_resched();
421 } else {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100422 ret = test_mb_aead_cycles(data, enc, bs,
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000423 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300424 }
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000425
426 if (ret) {
427 pr_err("%s() failed return code=%d\n", e, ret);
428 break;
429 }
430 b_size++;
431 i++;
432 } while (*b_size);
433 keysize++;
434 } while (*keysize);
435
436out:
437 for (i = 0; i < num_mb; ++i)
438 aead_request_free(data[i].req);
439out_free_xoutbuf:
440 for (i = 0; i < num_mb; ++i)
441 testmgr_free_buf(data[i].xoutbuf);
442out_free_axbuf:
443 for (i = 0; i < num_mb; ++i)
444 testmgr_free_buf(data[i].axbuf);
445out_free_xbuf:
446 for (i = 0; i < num_mb; ++i)
447 testmgr_free_buf(data[i].xbuf);
448out_free_tfm:
449 crypto_free_aead(tfm);
450out_free_data:
451 kfree(data);
452out_free_iv:
453 kfree(iv);
454}
455
Tim Chen53f52d72013-12-11 14:28:47 -0800456static int test_aead_jiffies(struct aead_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700457 int blen, int secs)
Tim Chen53f52d72013-12-11 14:28:47 -0800458{
459 unsigned long start, end;
460 int bcount;
461 int ret;
462
Mark Rustad3e3dc252014-07-25 02:53:38 -0700463 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Tim Chen53f52d72013-12-11 14:28:47 -0800464 time_before(jiffies, end); bcount++) {
465 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530466 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800467 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530468 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800469
470 if (ret)
471 return ret;
472 }
473
474 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700475 bcount, secs, (long)bcount * blen);
Tim Chen53f52d72013-12-11 14:28:47 -0800476 return 0;
477}
478
479static int test_aead_cycles(struct aead_request *req, int enc, int blen)
480{
481 unsigned long cycles = 0;
482 int ret = 0;
483 int i;
484
Tim Chen53f52d72013-12-11 14:28:47 -0800485 /* Warm-up run. */
486 for (i = 0; i < 4; i++) {
487 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530488 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800489 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530490 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800491
492 if (ret)
493 goto out;
494 }
495
496 /* The real thing. */
497 for (i = 0; i < 8; i++) {
498 cycles_t start, end;
499
500 start = get_cycles();
501 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530502 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800503 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530504 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800505 end = get_cycles();
506
507 if (ret)
508 goto out;
509
510 cycles += end - start;
511 }
512
513out:
Tim Chen53f52d72013-12-11 14:28:47 -0800514 if (ret == 0)
515 printk("1 operation in %lu cycles (%d bytes)\n",
516 (cycles + 4) / 8, blen);
517
518 return ret;
519}
520
Mark Rustad3e3dc252014-07-25 02:53:38 -0700521static void test_aead_speed(const char *algo, int enc, unsigned int secs,
Tim Chen53f52d72013-12-11 14:28:47 -0800522 struct aead_speed_template *template,
523 unsigned int tcount, u8 authsize,
524 unsigned int aad_size, u8 *keysize)
525{
526 unsigned int i, j;
527 struct crypto_aead *tfm;
528 int ret = -ENOMEM;
529 const char *key;
530 struct aead_request *req;
531 struct scatterlist *sg;
Tim Chen53f52d72013-12-11 14:28:47 -0800532 struct scatterlist *sgout;
533 const char *e;
534 void *assoc;
Cristian Stoica96692a732015-01-28 13:07:32 +0200535 char *iv;
Tim Chen53f52d72013-12-11 14:28:47 -0800536 char *xbuf[XBUFSIZE];
537 char *xoutbuf[XBUFSIZE];
538 char *axbuf[XBUFSIZE];
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100539 const int *b_size;
Tim Chen53f52d72013-12-11 14:28:47 -0800540 unsigned int iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100541 struct crypto_wait wait;
Tim Chen53f52d72013-12-11 14:28:47 -0800542
Cristian Stoica96692a732015-01-28 13:07:32 +0200543 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
544 if (!iv)
545 return;
546
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200547 if (aad_size >= PAGE_SIZE) {
548 pr_err("associate data length (%u) too big\n", aad_size);
Cristian Stoica96692a732015-01-28 13:07:32 +0200549 goto out_noxbuf;
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200550 }
551
Tim Chen53f52d72013-12-11 14:28:47 -0800552 if (enc == ENCRYPT)
553 e = "encryption";
554 else
555 e = "decryption";
556
557 if (testmgr_alloc_buf(xbuf))
558 goto out_noxbuf;
559 if (testmgr_alloc_buf(axbuf))
560 goto out_noaxbuf;
561 if (testmgr_alloc_buf(xoutbuf))
562 goto out_nooutbuf;
563
Herbert Xua3f21852015-05-27 16:03:51 +0800564 sg = kmalloc(sizeof(*sg) * 9 * 2, GFP_KERNEL);
Tim Chen53f52d72013-12-11 14:28:47 -0800565 if (!sg)
566 goto out_nosg;
Herbert Xua3f21852015-05-27 16:03:51 +0800567 sgout = &sg[9];
Tim Chen53f52d72013-12-11 14:28:47 -0800568
Herbert Xu5e4b8c12015-08-13 17:29:06 +0800569 tfm = crypto_alloc_aead(algo, 0, 0);
Tim Chen53f52d72013-12-11 14:28:47 -0800570
571 if (IS_ERR(tfm)) {
572 pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
573 PTR_ERR(tfm));
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200574 goto out_notfm;
Tim Chen53f52d72013-12-11 14:28:47 -0800575 }
576
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100577 crypto_init_wait(&wait);
Luca Clementi263a8df2014-06-25 22:57:42 -0700578 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
579 get_driver_name(crypto_aead, tfm), e);
580
Tim Chen53f52d72013-12-11 14:28:47 -0800581 req = aead_request_alloc(tfm, GFP_KERNEL);
582 if (!req) {
583 pr_err("alg: aead: Failed to allocate request for %s\n",
584 algo);
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200585 goto out_noreq;
Tim Chen53f52d72013-12-11 14:28:47 -0800586 }
587
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530588 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100589 crypto_req_done, &wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530590
Tim Chen53f52d72013-12-11 14:28:47 -0800591 i = 0;
592 do {
593 b_size = aead_sizes;
594 do {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100595 u32 bs = round_up(*b_size, crypto_aead_blocksize(tfm));
596
Tim Chen53f52d72013-12-11 14:28:47 -0800597 assoc = axbuf[0];
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200598 memset(assoc, 0xff, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800599
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100600 if ((*keysize + bs) > TVMEMSIZE * PAGE_SIZE) {
Tim Chen53f52d72013-12-11 14:28:47 -0800601 pr_err("template (%u) too big for tvmem (%lu)\n",
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100602 *keysize + bs,
Tim Chen53f52d72013-12-11 14:28:47 -0800603 TVMEMSIZE * PAGE_SIZE);
604 goto out;
605 }
606
607 key = tvmem[0];
608 for (j = 0; j < tcount; j++) {
609 if (template[j].klen == *keysize) {
610 key = template[j].key;
611 break;
612 }
613 }
614 ret = crypto_aead_setkey(tfm, key, *keysize);
615 ret = crypto_aead_setauthsize(tfm, authsize);
616
617 iv_len = crypto_aead_ivsize(tfm);
618 if (iv_len)
Cristian Stoica96692a732015-01-28 13:07:32 +0200619 memset(iv, 0xff, iv_len);
Tim Chen53f52d72013-12-11 14:28:47 -0800620
621 crypto_aead_clear_flags(tfm, ~0);
622 printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100623 i, *keysize * 8, bs);
Tim Chen53f52d72013-12-11 14:28:47 -0800624
625
626 memset(tvmem[0], 0xff, PAGE_SIZE);
627
628 if (ret) {
629 pr_err("setkey() failed flags=%x\n",
630 crypto_aead_get_flags(tfm));
631 goto out;
632 }
633
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100634 sg_init_aead(sg, xbuf, bs + (enc ? 0 : authsize),
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200635 assoc, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800636
Herbert Xu31267272015-06-17 14:05:26 +0800637 sg_init_aead(sgout, xoutbuf,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100638 bs + (enc ? authsize : 0), assoc,
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200639 aad_size);
Herbert Xu31267272015-06-17 14:05:26 +0800640
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +0000641 aead_request_set_ad(req, aad_size);
642
643 if (!enc) {
644
645 /*
646 * For decryption we need a proper auth so
647 * we do the encryption path once with buffers
648 * reversed (input <-> output) to calculate it
649 */
650 aead_request_set_crypt(req, sgout, sg,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100651 bs, iv);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +0000652 ret = do_one_aead_op(req,
653 crypto_aead_encrypt(req));
654
655 if (ret) {
Randy Dunlap129a4db2020-07-30 19:39:24 -0700656 pr_err("calculating auth failed (%d)\n",
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +0000657 ret);
658 break;
659 }
660 }
661
Robert Baronescu7aacbfc2017-10-10 13:22:00 +0300662 aead_request_set_crypt(req, sg, sgout,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100663 bs + (enc ? 0 : authsize),
Robert Baronescu7aacbfc2017-10-10 13:22:00 +0300664 iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800665
Horia Geantă2af63292018-07-23 17:18:48 +0300666 if (secs) {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100667 ret = test_aead_jiffies(req, enc, bs,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700668 secs);
Horia Geantă2af63292018-07-23 17:18:48 +0300669 cond_resched();
670 } else {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100671 ret = test_aead_cycles(req, enc, bs);
Horia Geantă2af63292018-07-23 17:18:48 +0300672 }
Tim Chen53f52d72013-12-11 14:28:47 -0800673
674 if (ret) {
675 pr_err("%s() failed return code=%d\n", e, ret);
676 break;
677 }
678 b_size++;
679 i++;
680 } while (*b_size);
681 keysize++;
682 } while (*keysize);
683
684out:
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200685 aead_request_free(req);
686out_noreq:
Tim Chen53f52d72013-12-11 14:28:47 -0800687 crypto_free_aead(tfm);
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200688out_notfm:
Tim Chen53f52d72013-12-11 14:28:47 -0800689 kfree(sg);
690out_nosg:
691 testmgr_free_buf(xoutbuf);
692out_nooutbuf:
693 testmgr_free_buf(axbuf);
694out_noaxbuf:
695 testmgr_free_buf(xbuf);
696out_noxbuf:
Cristian Stoica96692a732015-01-28 13:07:32 +0200697 kfree(iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800698}
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800699
David S. Millerbeb63da2010-05-19 14:11:21 +1000700static void test_hash_sg_init(struct scatterlist *sg)
701{
702 int i;
703
704 sg_init_table(sg, TVMEMSIZE);
705 for (i = 0; i < TVMEMSIZE; i++) {
706 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
707 memset(tvmem[i], 0xff, PAGE_SIZE);
708 }
709}
710
David S. Millerbeb63da2010-05-19 14:11:21 +1000711static inline int do_one_ahash_op(struct ahash_request *req, int ret)
712{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100713 struct crypto_wait *wait = req->base.data;
David S. Millerbeb63da2010-05-19 14:11:21 +1000714
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100715 return crypto_wait_req(ret, wait);
David S. Millerbeb63da2010-05-19 14:11:21 +1000716}
717
Herbert Xu72259de2016-06-28 20:33:52 +0800718struct test_mb_ahash_data {
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000719 struct scatterlist sg[XBUFSIZE];
Herbert Xu72259de2016-06-28 20:33:52 +0800720 char result[64];
721 struct ahash_request *req;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100722 struct crypto_wait wait;
Herbert Xu72259de2016-06-28 20:33:52 +0800723 char *xbuf[XBUFSIZE];
724};
Megha Dey087bcd22016-06-23 18:40:47 -0700725
Kees Cook4e234ee2018-04-26 19:57:28 -0700726static inline int do_mult_ahash_op(struct test_mb_ahash_data *data, u32 num_mb,
727 int *rc)
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000728{
Kees Cook4e234ee2018-04-26 19:57:28 -0700729 int i, err = 0;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000730
731 /* Fire up a bunch of concurrent requests */
732 for (i = 0; i < num_mb; i++)
733 rc[i] = crypto_ahash_digest(data[i].req);
734
735 /* Wait for all requests to finish */
736 for (i = 0; i < num_mb; i++) {
737 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
738
739 if (rc[i]) {
740 pr_info("concurrent request %d error %d\n", i, rc[i]);
741 err = rc[i];
742 }
743 }
744
745 return err;
746}
747
748static int test_mb_ahash_jiffies(struct test_mb_ahash_data *data, int blen,
749 int secs, u32 num_mb)
750{
751 unsigned long start, end;
752 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -0700753 int ret = 0;
754 int *rc;
755
756 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
757 if (!rc)
758 return -ENOMEM;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000759
760 for (start = jiffies, end = start + secs * HZ, bcount = 0;
761 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700762 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000763 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -0700764 goto out;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000765 }
766
767 pr_cont("%d operations in %d seconds (%ld bytes)\n",
768 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -0700769
770out:
771 kfree(rc);
772 return ret;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000773}
774
775static int test_mb_ahash_cycles(struct test_mb_ahash_data *data, int blen,
776 u32 num_mb)
777{
778 unsigned long cycles = 0;
779 int ret = 0;
780 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -0700781 int *rc;
782
783 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
784 if (!rc)
785 return -ENOMEM;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000786
787 /* Warm-up run. */
788 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700789 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000790 if (ret)
791 goto out;
792 }
793
794 /* The real thing. */
795 for (i = 0; i < 8; i++) {
796 cycles_t start, end;
797
798 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -0700799 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000800 end = get_cycles();
801
802 if (ret)
803 goto out;
804
805 cycles += end - start;
806 }
807
Kees Cook4e234ee2018-04-26 19:57:28 -0700808 pr_cont("1 operation in %lu cycles (%d bytes)\n",
809 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000810
Kees Cook4e234ee2018-04-26 19:57:28 -0700811out:
812 kfree(rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000813 return ret;
814}
815
816static void test_mb_ahash_speed(const char *algo, unsigned int secs,
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000817 struct hash_speed *speed, u32 num_mb)
Megha Dey087bcd22016-06-23 18:40:47 -0700818{
Herbert Xu72259de2016-06-28 20:33:52 +0800819 struct test_mb_ahash_data *data;
Megha Dey087bcd22016-06-23 18:40:47 -0700820 struct crypto_ahash *tfm;
Herbert Xu72259de2016-06-28 20:33:52 +0800821 unsigned int i, j, k;
822 int ret;
823
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000824 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
Herbert Xu72259de2016-06-28 20:33:52 +0800825 if (!data)
826 return;
Megha Dey087bcd22016-06-23 18:40:47 -0700827
828 tfm = crypto_alloc_ahash(algo, 0, 0);
829 if (IS_ERR(tfm)) {
830 pr_err("failed to load transform for %s: %ld\n",
831 algo, PTR_ERR(tfm));
Herbert Xu72259de2016-06-28 20:33:52 +0800832 goto free_data;
Megha Dey087bcd22016-06-23 18:40:47 -0700833 }
Herbert Xu72259de2016-06-28 20:33:52 +0800834
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000835 for (i = 0; i < num_mb; ++i) {
Herbert Xu72259de2016-06-28 20:33:52 +0800836 if (testmgr_alloc_buf(data[i].xbuf))
837 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700838
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100839 crypto_init_wait(&data[i].wait);
Megha Dey087bcd22016-06-23 18:40:47 -0700840
Herbert Xu72259de2016-06-28 20:33:52 +0800841 data[i].req = ahash_request_alloc(tfm, GFP_KERNEL);
842 if (!data[i].req) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200843 pr_err("alg: hash: Failed to allocate request for %s\n",
844 algo);
Herbert Xu72259de2016-06-28 20:33:52 +0800845 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700846 }
Megha Dey087bcd22016-06-23 18:40:47 -0700847
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100848 ahash_request_set_callback(data[i].req, 0, crypto_req_done,
849 &data[i].wait);
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000850
851 sg_init_table(data[i].sg, XBUFSIZE);
852 for (j = 0; j < XBUFSIZE; j++) {
853 sg_set_buf(data[i].sg + j, data[i].xbuf[j], PAGE_SIZE);
854 memset(data[i].xbuf[j], 0xff, PAGE_SIZE);
855 }
Megha Dey087bcd22016-06-23 18:40:47 -0700856 }
857
Herbert Xu72259de2016-06-28 20:33:52 +0800858 pr_info("\ntesting speed of multibuffer %s (%s)\n", algo,
859 get_driver_name(crypto_ahash, tfm));
Megha Dey087bcd22016-06-23 18:40:47 -0700860
861 for (i = 0; speed[i].blen != 0; i++) {
Herbert Xu72259de2016-06-28 20:33:52 +0800862 /* For some reason this only tests digests. */
863 if (speed[i].blen != speed[i].plen)
864 continue;
865
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000866 if (speed[i].blen > XBUFSIZE * PAGE_SIZE) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200867 pr_err("template (%u) too big for tvmem (%lu)\n",
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000868 speed[i].blen, XBUFSIZE * PAGE_SIZE);
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200869 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700870 }
871
Herbert Xuba974ad2020-08-05 15:57:08 +1000872 if (klen)
873 crypto_ahash_setkey(tfm, tvmem[0], klen);
Megha Dey087bcd22016-06-23 18:40:47 -0700874
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000875 for (k = 0; k < num_mb; k++)
Herbert Xu72259de2016-06-28 20:33:52 +0800876 ahash_request_set_crypt(data[k].req, data[k].sg,
877 data[k].result, speed[i].blen);
Megha Dey087bcd22016-06-23 18:40:47 -0700878
Herbert Xu72259de2016-06-28 20:33:52 +0800879 pr_info("test%3u "
880 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
Megha Dey087bcd22016-06-23 18:40:47 -0700881 i, speed[i].blen, speed[i].plen,
882 speed[i].blen / speed[i].plen);
883
Horia Geantă2af63292018-07-23 17:18:48 +0300884 if (secs) {
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000885 ret = test_mb_ahash_jiffies(data, speed[i].blen, secs,
886 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300887 cond_resched();
888 } else {
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000889 ret = test_mb_ahash_cycles(data, speed[i].blen, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300890 }
Herbert Xu72259de2016-06-28 20:33:52 +0800891
Herbert Xu72259de2016-06-28 20:33:52 +0800892
893 if (ret) {
894 pr_err("At least one hashing failed ret=%d\n", ret);
895 break;
896 }
Megha Dey087bcd22016-06-23 18:40:47 -0700897 }
Megha Dey087bcd22016-06-23 18:40:47 -0700898
899out:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000900 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800901 ahash_request_free(data[k].req);
902
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000903 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800904 testmgr_free_buf(data[k].xbuf);
905
906 crypto_free_ahash(tfm);
907
908free_data:
909 kfree(data);
Megha Dey087bcd22016-06-23 18:40:47 -0700910}
911
David S. Millerbeb63da2010-05-19 14:11:21 +1000912static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700913 char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000914{
915 unsigned long start, end;
916 int bcount;
917 int ret;
918
Mark Rustad3e3dc252014-07-25 02:53:38 -0700919 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000920 time_before(jiffies, end); bcount++) {
921 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
922 if (ret)
923 return ret;
924 }
925
926 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700927 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000928
929 return 0;
930}
931
932static int test_ahash_jiffies(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700933 int plen, char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000934{
935 unsigned long start, end;
936 int bcount, pcount;
937 int ret;
938
939 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700940 return test_ahash_jiffies_digest(req, blen, out, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000941
Mark Rustad3e3dc252014-07-25 02:53:38 -0700942 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000943 time_before(jiffies, end); bcount++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800944 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000945 if (ret)
946 return ret;
947 for (pcount = 0; pcount < blen; pcount += plen) {
948 ret = do_one_ahash_op(req, crypto_ahash_update(req));
949 if (ret)
950 return ret;
951 }
952 /* we assume there is enough space in 'out' for the result */
953 ret = do_one_ahash_op(req, crypto_ahash_final(req));
954 if (ret)
955 return ret;
956 }
957
958 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700959 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000960
961 return 0;
962}
963
964static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
965 char *out)
966{
967 unsigned long cycles = 0;
968 int ret, i;
969
970 /* Warm-up run. */
971 for (i = 0; i < 4; i++) {
972 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
973 if (ret)
974 goto out;
975 }
976
977 /* The real thing. */
978 for (i = 0; i < 8; i++) {
979 cycles_t start, end;
980
981 start = get_cycles();
982
983 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
984 if (ret)
985 goto out;
986
987 end = get_cycles();
988
989 cycles += end - start;
990 }
991
992out:
993 if (ret)
994 return ret;
995
996 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
997 cycles / 8, cycles / (8 * blen));
998
999 return 0;
1000}
1001
1002static int test_ahash_cycles(struct ahash_request *req, int blen,
1003 int plen, char *out)
1004{
1005 unsigned long cycles = 0;
1006 int i, pcount, ret;
1007
1008 if (plen == blen)
1009 return test_ahash_cycles_digest(req, blen, out);
1010
1011 /* Warm-up run. */
1012 for (i = 0; i < 4; i++) {
Herbert Xu43a96072015-04-22 11:02:27 +08001013 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +10001014 if (ret)
1015 goto out;
1016 for (pcount = 0; pcount < blen; pcount += plen) {
1017 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1018 if (ret)
1019 goto out;
1020 }
1021 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1022 if (ret)
1023 goto out;
1024 }
1025
1026 /* The real thing. */
1027 for (i = 0; i < 8; i++) {
1028 cycles_t start, end;
1029
1030 start = get_cycles();
1031
Herbert Xu43a96072015-04-22 11:02:27 +08001032 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +10001033 if (ret)
1034 goto out;
1035 for (pcount = 0; pcount < blen; pcount += plen) {
1036 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1037 if (ret)
1038 goto out;
1039 }
1040 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1041 if (ret)
1042 goto out;
1043
1044 end = get_cycles();
1045
1046 cycles += end - start;
1047 }
1048
1049out:
1050 if (ret)
1051 return ret;
1052
1053 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
1054 cycles / 8, cycles / (8 * blen));
1055
1056 return 0;
1057}
1058
Herbert Xu06605112016-02-01 21:36:49 +08001059static void test_ahash_speed_common(const char *algo, unsigned int secs,
1060 struct hash_speed *speed, unsigned mask)
David S. Millerbeb63da2010-05-19 14:11:21 +10001061{
1062 struct scatterlist sg[TVMEMSIZE];
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001063 struct crypto_wait wait;
David S. Millerbeb63da2010-05-19 14:11:21 +10001064 struct ahash_request *req;
1065 struct crypto_ahash *tfm;
Horia Geant?f074f7b2015-08-27 18:38:36 +03001066 char *output;
David S. Millerbeb63da2010-05-19 14:11:21 +10001067 int i, ret;
1068
Herbert Xu06605112016-02-01 21:36:49 +08001069 tfm = crypto_alloc_ahash(algo, 0, mask);
David S. Millerbeb63da2010-05-19 14:11:21 +10001070 if (IS_ERR(tfm)) {
1071 pr_err("failed to load transform for %s: %ld\n",
1072 algo, PTR_ERR(tfm));
1073 return;
1074 }
1075
Luca Clementi263a8df2014-06-25 22:57:42 -07001076 printk(KERN_INFO "\ntesting speed of async %s (%s)\n", algo,
1077 get_driver_name(crypto_ahash, tfm));
1078
Horia Geant?f074f7b2015-08-27 18:38:36 +03001079 if (crypto_ahash_digestsize(tfm) > MAX_DIGEST_SIZE) {
1080 pr_err("digestsize(%u) > %d\n", crypto_ahash_digestsize(tfm),
1081 MAX_DIGEST_SIZE);
David S. Millerbeb63da2010-05-19 14:11:21 +10001082 goto out;
1083 }
1084
1085 test_hash_sg_init(sg);
1086 req = ahash_request_alloc(tfm, GFP_KERNEL);
1087 if (!req) {
1088 pr_err("ahash request allocation failure\n");
1089 goto out;
1090 }
1091
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001092 crypto_init_wait(&wait);
David S. Millerbeb63da2010-05-19 14:11:21 +10001093 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001094 crypto_req_done, &wait);
David S. Millerbeb63da2010-05-19 14:11:21 +10001095
Horia Geant?f074f7b2015-08-27 18:38:36 +03001096 output = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
1097 if (!output)
1098 goto out_nomem;
1099
David S. Millerbeb63da2010-05-19 14:11:21 +10001100 for (i = 0; speed[i].blen != 0; i++) {
1101 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
1102 pr_err("template (%u) too big for tvmem (%lu)\n",
1103 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
1104 break;
1105 }
1106
Herbert Xuba974ad2020-08-05 15:57:08 +10001107 if (klen)
1108 crypto_ahash_setkey(tfm, tvmem[0], klen);
Horia Geantă331351f2018-09-12 16:20:48 +03001109
David S. Millerbeb63da2010-05-19 14:11:21 +10001110 pr_info("test%3u "
1111 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
1112 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
1113
1114 ahash_request_set_crypt(req, sg, output, speed[i].plen);
1115
Horia Geantă2af63292018-07-23 17:18:48 +03001116 if (secs) {
David S. Millerbeb63da2010-05-19 14:11:21 +10001117 ret = test_ahash_jiffies(req, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001118 speed[i].plen, output, secs);
Horia Geantă2af63292018-07-23 17:18:48 +03001119 cond_resched();
1120 } else {
David S. Millerbeb63da2010-05-19 14:11:21 +10001121 ret = test_ahash_cycles(req, speed[i].blen,
1122 speed[i].plen, output);
Horia Geantă2af63292018-07-23 17:18:48 +03001123 }
David S. Millerbeb63da2010-05-19 14:11:21 +10001124
1125 if (ret) {
1126 pr_err("hashing failed ret=%d\n", ret);
1127 break;
1128 }
1129 }
1130
Horia Geant?f074f7b2015-08-27 18:38:36 +03001131 kfree(output);
1132
1133out_nomem:
David S. Millerbeb63da2010-05-19 14:11:21 +10001134 ahash_request_free(req);
1135
1136out:
1137 crypto_free_ahash(tfm);
1138}
1139
Herbert Xu06605112016-02-01 21:36:49 +08001140static void test_ahash_speed(const char *algo, unsigned int secs,
1141 struct hash_speed *speed)
1142{
1143 return test_ahash_speed_common(algo, secs, speed, 0);
1144}
1145
1146static void test_hash_speed(const char *algo, unsigned int secs,
1147 struct hash_speed *speed)
1148{
1149 return test_ahash_speed_common(algo, secs, speed, CRYPTO_ALG_ASYNC);
1150}
1151
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001152struct test_mb_skcipher_data {
1153 struct scatterlist sg[XBUFSIZE];
1154 struct skcipher_request *req;
1155 struct crypto_wait wait;
1156 char *xbuf[XBUFSIZE];
1157};
1158
1159static int do_mult_acipher_op(struct test_mb_skcipher_data *data, int enc,
Kees Cook4e234ee2018-04-26 19:57:28 -07001160 u32 num_mb, int *rc)
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001161{
Kees Cook4e234ee2018-04-26 19:57:28 -07001162 int i, err = 0;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001163
1164 /* Fire up a bunch of concurrent requests */
1165 for (i = 0; i < num_mb; i++) {
1166 if (enc == ENCRYPT)
1167 rc[i] = crypto_skcipher_encrypt(data[i].req);
1168 else
1169 rc[i] = crypto_skcipher_decrypt(data[i].req);
1170 }
1171
1172 /* Wait for all requests to finish */
1173 for (i = 0; i < num_mb; i++) {
1174 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
1175
1176 if (rc[i]) {
1177 pr_info("concurrent request %d error %d\n", i, rc[i]);
1178 err = rc[i];
1179 }
1180 }
1181
1182 return err;
1183}
1184
1185static int test_mb_acipher_jiffies(struct test_mb_skcipher_data *data, int enc,
1186 int blen, int secs, u32 num_mb)
1187{
1188 unsigned long start, end;
1189 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -07001190 int ret = 0;
1191 int *rc;
1192
1193 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
1194 if (!rc)
1195 return -ENOMEM;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001196
1197 for (start = jiffies, end = start + secs * HZ, bcount = 0;
1198 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -07001199 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001200 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -07001201 goto out;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001202 }
1203
1204 pr_cont("%d operations in %d seconds (%ld bytes)\n",
1205 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -07001206
1207out:
1208 kfree(rc);
1209 return ret;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001210}
1211
1212static int test_mb_acipher_cycles(struct test_mb_skcipher_data *data, int enc,
1213 int blen, u32 num_mb)
1214{
1215 unsigned long cycles = 0;
1216 int ret = 0;
1217 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -07001218 int *rc;
1219
1220 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
1221 if (!rc)
1222 return -ENOMEM;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001223
1224 /* Warm-up run. */
1225 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -07001226 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001227 if (ret)
1228 goto out;
1229 }
1230
1231 /* The real thing. */
1232 for (i = 0; i < 8; i++) {
1233 cycles_t start, end;
1234
1235 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -07001236 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001237 end = get_cycles();
1238
1239 if (ret)
1240 goto out;
1241
1242 cycles += end - start;
1243 }
1244
Kees Cook4e234ee2018-04-26 19:57:28 -07001245 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1246 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001247
Kees Cook4e234ee2018-04-26 19:57:28 -07001248out:
1249 kfree(rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001250 return ret;
1251}
1252
1253static void test_mb_skcipher_speed(const char *algo, int enc, int secs,
1254 struct cipher_speed_template *template,
1255 unsigned int tcount, u8 *keysize, u32 num_mb)
1256{
1257 struct test_mb_skcipher_data *data;
1258 struct crypto_skcipher *tfm;
1259 unsigned int i, j, iv_len;
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001260 const int *b_size;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001261 const char *key;
1262 const char *e;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001263 char iv[128];
1264 int ret;
1265
1266 if (enc == ENCRYPT)
1267 e = "encryption";
1268 else
1269 e = "decryption";
1270
1271 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
1272 if (!data)
1273 return;
1274
1275 tfm = crypto_alloc_skcipher(algo, 0, 0);
1276 if (IS_ERR(tfm)) {
1277 pr_err("failed to load transform for %s: %ld\n",
1278 algo, PTR_ERR(tfm));
1279 goto out_free_data;
1280 }
1281
1282 for (i = 0; i < num_mb; ++i)
1283 if (testmgr_alloc_buf(data[i].xbuf)) {
1284 while (i--)
1285 testmgr_free_buf(data[i].xbuf);
1286 goto out_free_tfm;
1287 }
1288
1289
1290 for (i = 0; i < num_mb; ++i)
1291 if (testmgr_alloc_buf(data[i].xbuf)) {
1292 while (i--)
1293 testmgr_free_buf(data[i].xbuf);
1294 goto out_free_tfm;
1295 }
1296
1297
1298 for (i = 0; i < num_mb; ++i) {
1299 data[i].req = skcipher_request_alloc(tfm, GFP_KERNEL);
1300 if (!data[i].req) {
1301 pr_err("alg: skcipher: Failed to allocate request for %s\n",
1302 algo);
1303 while (i--)
1304 skcipher_request_free(data[i].req);
1305 goto out_free_xbuf;
1306 }
1307 }
1308
1309 for (i = 0; i < num_mb; ++i) {
1310 skcipher_request_set_callback(data[i].req,
1311 CRYPTO_TFM_REQ_MAY_BACKLOG,
1312 crypto_req_done, &data[i].wait);
1313 crypto_init_wait(&data[i].wait);
1314 }
1315
1316 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
1317 get_driver_name(crypto_skcipher, tfm), e);
1318
1319 i = 0;
1320 do {
1321 b_size = block_sizes;
1322 do {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001323 u32 bs = round_up(*b_size, crypto_skcipher_blocksize(tfm));
1324
1325 if (bs > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +00001326 pr_err("template (%u) too big for buffer (%lu)\n",
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001327 *b_size, XBUFSIZE * PAGE_SIZE);
1328 goto out;
1329 }
1330
1331 pr_info("test %u (%d bit key, %d byte blocks): ", i,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001332 *keysize * 8, bs);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001333
1334 /* Set up tfm global state, i.e. the key */
1335
1336 memset(tvmem[0], 0xff, PAGE_SIZE);
1337 key = tvmem[0];
1338 for (j = 0; j < tcount; j++) {
1339 if (template[j].klen == *keysize) {
1340 key = template[j].key;
1341 break;
1342 }
1343 }
1344
1345 crypto_skcipher_clear_flags(tfm, ~0);
1346
1347 ret = crypto_skcipher_setkey(tfm, key, *keysize);
1348 if (ret) {
1349 pr_err("setkey() failed flags=%x\n",
1350 crypto_skcipher_get_flags(tfm));
1351 goto out;
1352 }
1353
1354 iv_len = crypto_skcipher_ivsize(tfm);
1355 if (iv_len)
1356 memset(&iv, 0xff, iv_len);
1357
1358 /* Now setup per request stuff, i.e. buffers */
1359
1360 for (j = 0; j < num_mb; ++j) {
1361 struct test_mb_skcipher_data *cur = &data[j];
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001362 unsigned int k = bs;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001363 unsigned int pages = DIV_ROUND_UP(k, PAGE_SIZE);
1364 unsigned int p = 0;
1365
1366 sg_init_table(cur->sg, pages);
1367
1368 while (k > PAGE_SIZE) {
1369 sg_set_buf(cur->sg + p, cur->xbuf[p],
1370 PAGE_SIZE);
1371 memset(cur->xbuf[p], 0xff, PAGE_SIZE);
1372 p++;
1373 k -= PAGE_SIZE;
1374 }
1375
1376 sg_set_buf(cur->sg + p, cur->xbuf[p], k);
1377 memset(cur->xbuf[p], 0xff, k);
1378
1379 skcipher_request_set_crypt(cur->req, cur->sg,
1380 cur->sg, *b_size,
1381 iv);
1382 }
1383
Horia Geantă2af63292018-07-23 17:18:48 +03001384 if (secs) {
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001385 ret = test_mb_acipher_jiffies(data, enc,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001386 bs, secs,
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001387 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +03001388 cond_resched();
1389 } else {
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001390 ret = test_mb_acipher_cycles(data, enc,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001391 bs, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +03001392 }
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001393
1394 if (ret) {
1395 pr_err("%s() failed flags=%x\n", e,
1396 crypto_skcipher_get_flags(tfm));
1397 break;
1398 }
1399 b_size++;
1400 i++;
1401 } while (*b_size);
1402 keysize++;
1403 } while (*keysize);
1404
1405out:
1406 for (i = 0; i < num_mb; ++i)
1407 skcipher_request_free(data[i].req);
1408out_free_xbuf:
1409 for (i = 0; i < num_mb; ++i)
1410 testmgr_free_buf(data[i].xbuf);
1411out_free_tfm:
1412 crypto_free_skcipher(tfm);
1413out_free_data:
1414 kfree(data);
1415}
1416
Herbert Xu7166e582016-06-29 18:03:50 +08001417static inline int do_one_acipher_op(struct skcipher_request *req, int ret)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001418{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001419 struct crypto_wait *wait = req->base.data;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001420
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001421 return crypto_wait_req(ret, wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001422}
1423
Herbert Xu7166e582016-06-29 18:03:50 +08001424static int test_acipher_jiffies(struct skcipher_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001425 int blen, int secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001426{
1427 unsigned long start, end;
1428 int bcount;
1429 int ret;
1430
Mark Rustad3e3dc252014-07-25 02:53:38 -07001431 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001432 time_before(jiffies, end); bcount++) {
1433 if (enc)
1434 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001435 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001436 else
1437 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001438 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001439
1440 if (ret)
1441 return ret;
1442 }
1443
1444 pr_cont("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -07001445 bcount, secs, (long)bcount * blen);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001446 return 0;
1447}
1448
Herbert Xu7166e582016-06-29 18:03:50 +08001449static int test_acipher_cycles(struct skcipher_request *req, int enc,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001450 int blen)
1451{
1452 unsigned long cycles = 0;
1453 int ret = 0;
1454 int i;
1455
1456 /* Warm-up run. */
1457 for (i = 0; i < 4; i++) {
1458 if (enc)
1459 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001460 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001461 else
1462 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001463 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001464
1465 if (ret)
1466 goto out;
1467 }
1468
1469 /* The real thing. */
1470 for (i = 0; i < 8; i++) {
1471 cycles_t start, end;
1472
1473 start = get_cycles();
1474 if (enc)
1475 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001476 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001477 else
1478 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001479 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001480 end = get_cycles();
1481
1482 if (ret)
1483 goto out;
1484
1485 cycles += end - start;
1486 }
1487
1488out:
1489 if (ret == 0)
1490 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1491 (cycles + 4) / 8, blen);
1492
1493 return ret;
1494}
1495
Herbert Xu7166e582016-06-29 18:03:50 +08001496static void test_skcipher_speed(const char *algo, int enc, unsigned int secs,
1497 struct cipher_speed_template *template,
1498 unsigned int tcount, u8 *keysize, bool async)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001499{
Nicolas Royerde1975332012-07-01 19:19:47 +02001500 unsigned int ret, i, j, k, iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001501 struct crypto_wait wait;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001502 const char *key;
1503 char iv[128];
Herbert Xu7166e582016-06-29 18:03:50 +08001504 struct skcipher_request *req;
1505 struct crypto_skcipher *tfm;
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001506 const int *b_size;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001507 const char *e;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001508
1509 if (enc == ENCRYPT)
1510 e = "encryption";
1511 else
1512 e = "decryption";
1513
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001514 crypto_init_wait(&wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001515
Herbert Xu7166e582016-06-29 18:03:50 +08001516 tfm = crypto_alloc_skcipher(algo, 0, async ? 0 : CRYPTO_ALG_ASYNC);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001517
1518 if (IS_ERR(tfm)) {
1519 pr_err("failed to load transform for %s: %ld\n", algo,
1520 PTR_ERR(tfm));
1521 return;
1522 }
1523
Horia Geantă8e3b7fd2020-02-05 12:19:58 +02001524 pr_info("\ntesting speed of %s %s (%s) %s\n", async ? "async" : "sync",
1525 algo, get_driver_name(crypto_skcipher, tfm), e);
Luca Clementi263a8df2014-06-25 22:57:42 -07001526
Herbert Xu7166e582016-06-29 18:03:50 +08001527 req = skcipher_request_alloc(tfm, GFP_KERNEL);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001528 if (!req) {
1529 pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
1530 algo);
1531 goto out;
1532 }
1533
Herbert Xu7166e582016-06-29 18:03:50 +08001534 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001535 crypto_req_done, &wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001536
1537 i = 0;
1538 do {
1539 b_size = block_sizes;
1540
1541 do {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001542 u32 bs = round_up(*b_size, crypto_skcipher_blocksize(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001543 struct scatterlist sg[TVMEMSIZE];
1544
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001545 if ((*keysize + bs) > TVMEMSIZE * PAGE_SIZE) {
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001546 pr_err("template (%u) too big for "
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001547 "tvmem (%lu)\n", *keysize + bs,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001548 TVMEMSIZE * PAGE_SIZE);
1549 goto out_free_req;
1550 }
1551
1552 pr_info("test %u (%d bit key, %d byte blocks): ", i,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001553 *keysize * 8, bs);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001554
1555 memset(tvmem[0], 0xff, PAGE_SIZE);
1556
1557 /* set key, plain text and IV */
1558 key = tvmem[0];
1559 for (j = 0; j < tcount; j++) {
1560 if (template[j].klen == *keysize) {
1561 key = template[j].key;
1562 break;
1563 }
1564 }
1565
Herbert Xu7166e582016-06-29 18:03:50 +08001566 crypto_skcipher_clear_flags(tfm, ~0);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001567
Herbert Xu7166e582016-06-29 18:03:50 +08001568 ret = crypto_skcipher_setkey(tfm, key, *keysize);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001569 if (ret) {
1570 pr_err("setkey() failed flags=%x\n",
Herbert Xu7166e582016-06-29 18:03:50 +08001571 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001572 goto out_free_req;
1573 }
1574
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001575 k = *keysize + bs;
Horia Geant?007ee8d2015-03-09 16:14:58 +02001576 sg_init_table(sg, DIV_ROUND_UP(k, PAGE_SIZE));
1577
Nicolas Royerde1975332012-07-01 19:19:47 +02001578 if (k > PAGE_SIZE) {
1579 sg_set_buf(sg, tvmem[0] + *keysize,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001580 PAGE_SIZE - *keysize);
Nicolas Royerde1975332012-07-01 19:19:47 +02001581 k -= PAGE_SIZE;
1582 j = 1;
1583 while (k > PAGE_SIZE) {
1584 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
1585 memset(tvmem[j], 0xff, PAGE_SIZE);
1586 j++;
1587 k -= PAGE_SIZE;
1588 }
1589 sg_set_buf(sg + j, tvmem[j], k);
1590 memset(tvmem[j], 0xff, k);
1591 } else {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001592 sg_set_buf(sg, tvmem[0] + *keysize, bs);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001593 }
1594
Herbert Xu7166e582016-06-29 18:03:50 +08001595 iv_len = crypto_skcipher_ivsize(tfm);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001596 if (iv_len)
1597 memset(&iv, 0xff, iv_len);
1598
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001599 skcipher_request_set_crypt(req, sg, sg, bs, iv);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001600
Horia Geantă2af63292018-07-23 17:18:48 +03001601 if (secs) {
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001602 ret = test_acipher_jiffies(req, enc,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001603 bs, secs);
Horia Geantă2af63292018-07-23 17:18:48 +03001604 cond_resched();
1605 } else {
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001606 ret = test_acipher_cycles(req, enc,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001607 bs);
Horia Geantă2af63292018-07-23 17:18:48 +03001608 }
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001609
1610 if (ret) {
1611 pr_err("%s() failed flags=%x\n", e,
Herbert Xu7166e582016-06-29 18:03:50 +08001612 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001613 break;
1614 }
1615 b_size++;
1616 i++;
1617 } while (*b_size);
1618 keysize++;
1619 } while (*keysize);
1620
1621out_free_req:
Herbert Xu7166e582016-06-29 18:03:50 +08001622 skcipher_request_free(req);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001623out:
Herbert Xu7166e582016-06-29 18:03:50 +08001624 crypto_free_skcipher(tfm);
1625}
1626
1627static void test_acipher_speed(const char *algo, int enc, unsigned int secs,
1628 struct cipher_speed_template *template,
1629 unsigned int tcount, u8 *keysize)
1630{
1631 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1632 true);
1633}
1634
1635static void test_cipher_speed(const char *algo, int enc, unsigned int secs,
1636 struct cipher_speed_template *template,
1637 unsigned int tcount, u8 *keysize)
1638{
1639 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1640 false);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001641}
1642
Herbert Xuef2736f2005-06-22 13:26:03 -07001643static void test_available(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07001644{
Corentin Labbe07d8f182019-11-08 15:42:13 +00001645 const char **name = check;
Herbert Xuef2736f2005-06-22 13:26:03 -07001646
Linus Torvalds1da177e2005-04-16 15:20:36 -07001647 while (*name) {
1648 printk("alg %s ", *name);
Herbert Xu6158efc2007-04-04 17:41:07 +10001649 printk(crypto_has_alg(*name, 0, 0) ?
Herbert Xue4d5b792006-08-26 18:12:40 +10001650 "found\n" : "not found\n");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001651 name++;
Herbert Xuef2736f2005-06-22 13:26:03 -07001652 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07001653}
1654
Herbert Xu01b32322008-07-31 15:41:55 +08001655static inline int tcrypt_test(const char *alg)
1656{
Jarod Wilson4e033a62009-05-27 15:10:21 +10001657 int ret;
1658
Rabin Vincent76512f22017-01-18 14:54:05 +01001659 pr_debug("testing %s\n", alg);
1660
Jarod Wilson4e033a62009-05-27 15:10:21 +10001661 ret = alg_test(alg, alg, 0, 0);
1662 /* non-fips algs return -EINVAL in fips mode */
1663 if (fips_enabled && ret == -EINVAL)
1664 ret = 0;
1665 return ret;
Herbert Xu01b32322008-07-31 15:41:55 +08001666}
1667
Kees Cook4e234ee2018-04-26 19:57:28 -07001668static int do_test(const char *alg, u32 type, u32 mask, int m, u32 num_mb)
Herbert Xu01b32322008-07-31 15:41:55 +08001669{
1670 int i;
Jarod Wilson4e033a62009-05-27 15:10:21 +10001671 int ret = 0;
Herbert Xu01b32322008-07-31 15:41:55 +08001672
1673 switch (m) {
Linus Torvalds1da177e2005-04-16 15:20:36 -07001674 case 0:
Herbert Xu86068132014-12-04 16:43:29 +08001675 if (alg) {
1676 if (!crypto_has_alg(alg, type,
1677 mask ?: CRYPTO_ALG_TYPE_MASK))
1678 ret = -ENOENT;
1679 break;
1680 }
1681
Herbert Xu01b32322008-07-31 15:41:55 +08001682 for (i = 1; i < 200; i++)
Kees Cook4e234ee2018-04-26 19:57:28 -07001683 ret += do_test(NULL, 0, 0, i, num_mb);
Linus Torvalds1da177e2005-04-16 15:20:36 -07001684 break;
1685
1686 case 1:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001687 ret += tcrypt_test("md5");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001688 break;
1689
1690 case 2:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001691 ret += tcrypt_test("sha1");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001692 break;
1693
1694 case 3:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001695 ret += tcrypt_test("ecb(des)");
1696 ret += tcrypt_test("cbc(des)");
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03001697 ret += tcrypt_test("ctr(des)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001698 break;
1699
1700 case 4:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001701 ret += tcrypt_test("ecb(des3_ede)");
1702 ret += tcrypt_test("cbc(des3_ede)");
Jussi Kivilinnae080b172012-10-20 14:53:12 +03001703 ret += tcrypt_test("ctr(des3_ede)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001704 break;
1705
1706 case 5:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001707 ret += tcrypt_test("md4");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001708 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001709
Linus Torvalds1da177e2005-04-16 15:20:36 -07001710 case 6:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001711 ret += tcrypt_test("sha256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001712 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001713
Linus Torvalds1da177e2005-04-16 15:20:36 -07001714 case 7:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001715 ret += tcrypt_test("ecb(blowfish)");
1716 ret += tcrypt_test("cbc(blowfish)");
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03001717 ret += tcrypt_test("ctr(blowfish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001718 break;
1719
1720 case 8:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001721 ret += tcrypt_test("ecb(twofish)");
1722 ret += tcrypt_test("cbc(twofish)");
Jussi Kivilinna573da622011-10-10 23:03:12 +03001723 ret += tcrypt_test("ctr(twofish)");
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001724 ret += tcrypt_test("lrw(twofish)");
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001725 ret += tcrypt_test("xts(twofish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001726 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001727
Linus Torvalds1da177e2005-04-16 15:20:36 -07001728 case 9:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001729 ret += tcrypt_test("ecb(serpent)");
Jussi Kivilinna9d259172011-10-18 00:02:53 +03001730 ret += tcrypt_test("cbc(serpent)");
1731 ret += tcrypt_test("ctr(serpent)");
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001732 ret += tcrypt_test("lrw(serpent)");
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001733 ret += tcrypt_test("xts(serpent)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001734 break;
1735
1736 case 10:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001737 ret += tcrypt_test("ecb(aes)");
1738 ret += tcrypt_test("cbc(aes)");
1739 ret += tcrypt_test("lrw(aes)");
1740 ret += tcrypt_test("xts(aes)");
1741 ret += tcrypt_test("ctr(aes)");
1742 ret += tcrypt_test("rfc3686(ctr(aes))");
Gilad Ben-Yossefdfb89ab2018-09-20 14:18:40 +01001743 ret += tcrypt_test("ofb(aes)");
Dmitry Eremin-Solenikov7da66672018-10-20 02:01:53 +03001744 ret += tcrypt_test("cfb(aes)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001745 break;
1746
1747 case 11:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001748 ret += tcrypt_test("sha384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001749 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001750
Linus Torvalds1da177e2005-04-16 15:20:36 -07001751 case 12:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001752 ret += tcrypt_test("sha512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001753 break;
1754
1755 case 13:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001756 ret += tcrypt_test("deflate");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001757 break;
1758
1759 case 14:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001760 ret += tcrypt_test("ecb(cast5)");
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001761 ret += tcrypt_test("cbc(cast5)");
1762 ret += tcrypt_test("ctr(cast5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001763 break;
1764
1765 case 15:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001766 ret += tcrypt_test("ecb(cast6)");
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001767 ret += tcrypt_test("cbc(cast6)");
1768 ret += tcrypt_test("ctr(cast6)");
1769 ret += tcrypt_test("lrw(cast6)");
1770 ret += tcrypt_test("xts(cast6)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001771 break;
1772
1773 case 16:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001774 ret += tcrypt_test("ecb(arc4)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001775 break;
1776
1777 case 17:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001778 ret += tcrypt_test("michael_mic");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001779 break;
1780
1781 case 18:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001782 ret += tcrypt_test("crc32c");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001783 break;
1784
1785 case 19:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001786 ret += tcrypt_test("ecb(tea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001787 break;
1788
1789 case 20:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001790 ret += tcrypt_test("ecb(xtea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001791 break;
1792
1793 case 21:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001794 ret += tcrypt_test("ecb(khazad)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001795 break;
1796
1797 case 22:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001798 ret += tcrypt_test("wp512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001799 break;
1800
1801 case 23:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001802 ret += tcrypt_test("wp384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001803 break;
1804
1805 case 24:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001806 ret += tcrypt_test("wp256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001807 break;
1808
1809 case 25:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001810 ret += tcrypt_test("ecb(tnepres)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001811 break;
1812
1813 case 26:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001814 ret += tcrypt_test("ecb(anubis)");
1815 ret += tcrypt_test("cbc(anubis)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001816 break;
1817
1818 case 27:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001819 ret += tcrypt_test("tgr192");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001820 break;
1821
1822 case 28:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001823 ret += tcrypt_test("tgr160");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001824 break;
1825
1826 case 29:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001827 ret += tcrypt_test("tgr128");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001828 break;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001829
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001830 case 30:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001831 ret += tcrypt_test("ecb(xeta)");
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001832 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001833
David Howells90831632006-12-16 12:13:14 +11001834 case 31:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001835 ret += tcrypt_test("pcbc(fcrypt)");
David Howells90831632006-12-16 12:13:14 +11001836 break;
1837
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001838 case 32:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001839 ret += tcrypt_test("ecb(camellia)");
1840 ret += tcrypt_test("cbc(camellia)");
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001841 ret += tcrypt_test("ctr(camellia)");
1842 ret += tcrypt_test("lrw(camellia)");
1843 ret += tcrypt_test("xts(camellia)");
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001844 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001845
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001846 case 33:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001847 ret += tcrypt_test("sha224");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001848 break;
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001849
Tan Swee Heng2407d602007-11-23 19:45:00 +08001850 case 34:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001851 ret += tcrypt_test("salsa20");
Tan Swee Heng2407d602007-11-23 19:45:00 +08001852 break;
1853
Herbert Xu8df213d2007-12-02 14:55:47 +11001854 case 35:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001855 ret += tcrypt_test("gcm(aes)");
Herbert Xu8df213d2007-12-02 14:55:47 +11001856 break;
1857
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001858 case 36:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001859 ret += tcrypt_test("lzo");
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001860 break;
1861
Joy Latten93cc74e2007-12-12 20:24:22 +08001862 case 37:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001863 ret += tcrypt_test("ccm(aes)");
Joy Latten93cc74e2007-12-12 20:24:22 +08001864 break;
1865
Kevin Coffman76cb9522008-03-24 21:26:16 +08001866 case 38:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001867 ret += tcrypt_test("cts(cbc(aes))");
Kevin Coffman76cb9522008-03-24 21:26:16 +08001868 break;
1869
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001870 case 39:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001871 ret += tcrypt_test("rmd128");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001872 break;
1873
1874 case 40:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001875 ret += tcrypt_test("rmd160");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001876 break;
1877
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001878 case 41:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001879 ret += tcrypt_test("rmd256");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001880 break;
1881
1882 case 42:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001883 ret += tcrypt_test("rmd320");
Herbert Xu01b32322008-07-31 15:41:55 +08001884 break;
1885
1886 case 43:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001887 ret += tcrypt_test("ecb(seed)");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001888 break;
1889
Jarod Wilson5d667322009-05-04 19:23:40 +08001890 case 45:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001891 ret += tcrypt_test("rfc4309(ccm(aes))");
Jarod Wilson5d667322009-05-04 19:23:40 +08001892 break;
1893
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001894 case 46:
1895 ret += tcrypt_test("ghash");
1896 break;
1897
Herbert Xu684115212013-09-07 12:56:26 +10001898 case 47:
1899 ret += tcrypt_test("crct10dif");
1900 break;
1901
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301902 case 48:
1903 ret += tcrypt_test("sha3-224");
1904 break;
1905
1906 case 49:
1907 ret += tcrypt_test("sha3-256");
1908 break;
1909
1910 case 50:
1911 ret += tcrypt_test("sha3-384");
1912 break;
1913
1914 case 51:
1915 ret += tcrypt_test("sha3-512");
1916 break;
1917
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03001918 case 52:
1919 ret += tcrypt_test("sm3");
1920 break;
1921
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03001922 case 53:
1923 ret += tcrypt_test("streebog256");
1924 break;
1925
1926 case 54:
1927 ret += tcrypt_test("streebog512");
1928 break;
1929
Linus Torvalds1da177e2005-04-16 15:20:36 -07001930 case 100:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001931 ret += tcrypt_test("hmac(md5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001932 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001933
Linus Torvalds1da177e2005-04-16 15:20:36 -07001934 case 101:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001935 ret += tcrypt_test("hmac(sha1)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001936 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001937
Linus Torvalds1da177e2005-04-16 15:20:36 -07001938 case 102:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001939 ret += tcrypt_test("hmac(sha256)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001940 break;
1941
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001942 case 103:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001943 ret += tcrypt_test("hmac(sha384)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001944 break;
1945
1946 case 104:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001947 ret += tcrypt_test("hmac(sha512)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001948 break;
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001949
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001950 case 105:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001951 ret += tcrypt_test("hmac(sha224)");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001952 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001953
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001954 case 106:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001955 ret += tcrypt_test("xcbc(aes)");
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001956 break;
1957
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001958 case 107:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001959 ret += tcrypt_test("hmac(rmd128)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001960 break;
1961
1962 case 108:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001963 ret += tcrypt_test("hmac(rmd160)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001964 break;
1965
Shane Wangf1939f72009-09-02 20:05:22 +10001966 case 109:
Eric Biggers0917b872018-06-18 10:22:40 -07001967 ret += tcrypt_test("vmac64(aes)");
Shane Wangf1939f72009-09-02 20:05:22 +10001968 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001969
raveendra padasalagi98eca722016-07-01 11:16:54 +05301970 case 111:
1971 ret += tcrypt_test("hmac(sha3-224)");
1972 break;
1973
1974 case 112:
1975 ret += tcrypt_test("hmac(sha3-256)");
1976 break;
1977
1978 case 113:
1979 ret += tcrypt_test("hmac(sha3-384)");
1980 break;
1981
1982 case 114:
1983 ret += tcrypt_test("hmac(sha3-512)");
1984 break;
1985
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03001986 case 115:
1987 ret += tcrypt_test("hmac(streebog256)");
1988 break;
1989
1990 case 116:
1991 ret += tcrypt_test("hmac(streebog512)");
1992 break;
1993
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001994 case 150:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001995 ret += tcrypt_test("ansi_cprng");
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001996 break;
1997
Adrian Hoban69435b92010-11-04 15:02:04 -04001998 case 151:
1999 ret += tcrypt_test("rfc4106(gcm(aes))");
2000 break;
2001
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03002002 case 152:
2003 ret += tcrypt_test("rfc4543(gcm(aes))");
2004 break;
2005
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03002006 case 153:
2007 ret += tcrypt_test("cmac(aes)");
2008 break;
2009
2010 case 154:
2011 ret += tcrypt_test("cmac(des3_ede)");
2012 break;
2013
Horia Geantabbf9c892013-11-28 15:11:16 +02002014 case 155:
2015 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
2016 break;
2017
Horia Geantabca4feb2014-03-14 17:46:51 +02002018 case 156:
2019 ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
2020 break;
2021
2022 case 157:
2023 ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
2024 break;
Nitesh Lal5208ed22014-05-21 17:09:08 +05302025 case 181:
2026 ret += tcrypt_test("authenc(hmac(sha1),cbc(des))");
2027 break;
2028 case 182:
2029 ret += tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))");
2030 break;
2031 case 183:
2032 ret += tcrypt_test("authenc(hmac(sha224),cbc(des))");
2033 break;
2034 case 184:
2035 ret += tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))");
2036 break;
2037 case 185:
2038 ret += tcrypt_test("authenc(hmac(sha256),cbc(des))");
2039 break;
2040 case 186:
2041 ret += tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))");
2042 break;
2043 case 187:
2044 ret += tcrypt_test("authenc(hmac(sha384),cbc(des))");
2045 break;
2046 case 188:
2047 ret += tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))");
2048 break;
2049 case 189:
2050 ret += tcrypt_test("authenc(hmac(sha512),cbc(des))");
2051 break;
2052 case 190:
2053 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
2054 break;
Gilad Ben-Yossefcd83a8a2018-03-06 09:44:43 +00002055 case 191:
2056 ret += tcrypt_test("ecb(sm4)");
Gilad Ben-Yossef95ba5972018-09-20 14:18:38 +01002057 ret += tcrypt_test("cbc(sm4)");
2058 ret += tcrypt_test("ctr(sm4)");
Gilad Ben-Yossefcd83a8a2018-03-06 09:44:43 +00002059 break;
Harald Welteebfd9bc2005-06-22 13:27:23 -07002060 case 200:
Herbert Xucba83562006-08-13 08:26:09 +10002061 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002062 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002063 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002064 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002065 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002066 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002067 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002068 speed_template_16_24_32);
Rik Snelf3d10442006-11-29 19:01:41 +11002069 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002070 speed_template_32_40_48);
Rik Snelf3d10442006-11-29 19:01:41 +11002071 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002072 speed_template_32_40_48);
Rik Snelf19f5112007-09-19 20:23:13 +08002073 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002074 speed_template_32_64);
Rik Snelf19f5112007-09-19 20:23:13 +08002075 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002076 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002077 test_cipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2078 speed_template_16_24_32);
2079 test_cipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2080 speed_template_16_24_32);
Jan Glauber9996e342011-04-26 16:34:01 +10002081 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2082 speed_template_16_24_32);
2083 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2084 speed_template_16_24_32);
Dmitry Eremin-Solenikov7da66672018-10-20 02:01:53 +03002085 test_cipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2086 speed_template_16_24_32);
2087 test_cipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2088 speed_template_16_24_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002089 break;
2090
2091 case 201:
Herbert Xucba83562006-08-13 08:26:09 +10002092 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002093 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002094 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002095 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002096 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002097 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002098 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002099 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002100 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002101 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002102 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002103 speed_template_24);
Jussi Kivilinna87131502014-06-09 20:59:49 +03002104 test_cipher_speed("ctr(des3_ede)", ENCRYPT, sec,
2105 des3_speed_template, DES3_SPEED_VECTORS,
2106 speed_template_24);
2107 test_cipher_speed("ctr(des3_ede)", DECRYPT, sec,
2108 des3_speed_template, DES3_SPEED_VECTORS,
2109 speed_template_24);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002110 break;
2111
2112 case 202:
Herbert Xucba83562006-08-13 08:26:09 +10002113 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002114 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002115 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002116 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002117 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002118 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002119 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002120 speed_template_16_24_32);
Jussi Kivilinnaee5002a2011-09-26 16:47:15 +03002121 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2122 speed_template_16_24_32);
2123 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2124 speed_template_16_24_32);
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03002125 test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2126 speed_template_32_40_48);
2127 test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2128 speed_template_32_40_48);
Jussi Kivilinna131f7542011-10-18 13:33:38 +03002129 test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2130 speed_template_32_48_64);
2131 test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2132 speed_template_32_48_64);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002133 break;
2134
2135 case 203:
Herbert Xucba83562006-08-13 08:26:09 +10002136 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002137 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002138 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002139 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002140 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002141 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002142 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002143 speed_template_8_32);
Jussi Kivilinna7d47b862011-09-02 01:45:17 +03002144 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2145 speed_template_8_32);
2146 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2147 speed_template_8_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002148 break;
2149
2150 case 204:
Herbert Xucba83562006-08-13 08:26:09 +10002151 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002152 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002153 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002154 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002155 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002156 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002157 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002158 speed_template_8);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002159 break;
2160
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002161 case 205:
2162 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002163 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002164 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002165 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002166 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002167 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002168 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002169 speed_template_16_24_32);
Jussi Kivilinna4de59332012-03-05 20:26:26 +02002170 test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2171 speed_template_16_24_32);
2172 test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2173 speed_template_16_24_32);
2174 test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2175 speed_template_32_40_48);
2176 test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2177 speed_template_32_40_48);
2178 test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2179 speed_template_32_48_64);
2180 test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2181 speed_template_32_48_64);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002182 break;
2183
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08002184 case 206:
2185 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002186 speed_template_16_32);
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08002187 break;
2188
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002189 case 207:
2190 test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2191 speed_template_16_32);
2192 test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2193 speed_template_16_32);
2194 test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2195 speed_template_16_32);
2196 test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2197 speed_template_16_32);
2198 test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2199 speed_template_16_32);
2200 test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2201 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002202 test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2203 speed_template_32_48);
2204 test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2205 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002206 test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2207 speed_template_32_64);
2208 test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2209 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002210 break;
2211
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002212 case 208:
2213 test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2214 speed_template_8);
2215 break;
2216
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002217 case 209:
2218 test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2219 speed_template_8_16);
2220 test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2221 speed_template_8_16);
2222 test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2223 speed_template_8_16);
2224 test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2225 speed_template_8_16);
2226 test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2227 speed_template_8_16);
2228 test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2229 speed_template_8_16);
2230 break;
2231
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002232 case 210:
2233 test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2234 speed_template_16_32);
2235 test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2236 speed_template_16_32);
2237 test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2238 speed_template_16_32);
2239 test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2240 speed_template_16_32);
2241 test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2242 speed_template_16_32);
2243 test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2244 speed_template_16_32);
2245 test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2246 speed_template_32_48);
2247 test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2248 speed_template_32_48);
2249 test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2250 speed_template_32_64);
2251 test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2252 speed_template_32_64);
2253 break;
2254
Tim Chen53f52d72013-12-11 14:28:47 -08002255 case 211:
2256 test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002257 NULL, 0, 16, 16, aead_speed_template_20);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +05302258 test_aead_speed("gcm(aes)", ENCRYPT, sec,
Cyrille Pitchenf18611d2015-11-17 13:37:10 +01002259 NULL, 0, 16, 8, speed_template_16_24_32);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002260 test_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec,
2261 NULL, 0, 16, 16, aead_speed_template_20);
2262 test_aead_speed("gcm(aes)", DECRYPT, sec,
2263 NULL, 0, 16, 8, speed_template_16_24_32);
Tim Chen53f52d72013-12-11 14:28:47 -08002264 break;
2265
Herbert Xu4e4aab62015-06-17 14:04:21 +08002266 case 212:
2267 test_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002268 NULL, 0, 16, 16, aead_speed_template_19);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002269 test_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec,
2270 NULL, 0, 16, 16, aead_speed_template_19);
Herbert Xu4e4aab62015-06-17 14:04:21 +08002271 break;
2272
Martin Willi2dce0632015-07-16 19:13:59 +02002273 case 213:
2274 test_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT, sec,
2275 NULL, 0, 16, 8, aead_speed_template_36);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002276 test_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT, sec,
2277 NULL, 0, 16, 8, aead_speed_template_36);
Martin Willi2dce0632015-07-16 19:13:59 +02002278 break;
2279
2280 case 214:
2281 test_cipher_speed("chacha20", ENCRYPT, sec, NULL, 0,
2282 speed_template_32);
2283 break;
2284
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +00002285 case 215:
2286 test_mb_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec, NULL,
2287 0, 16, 16, aead_speed_template_20, num_mb);
2288 test_mb_aead_speed("gcm(aes)", ENCRYPT, sec, NULL, 0, 16, 8,
2289 speed_template_16_24_32, num_mb);
2290 test_mb_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec, NULL,
2291 0, 16, 16, aead_speed_template_20, num_mb);
2292 test_mb_aead_speed("gcm(aes)", DECRYPT, sec, NULL, 0, 16, 8,
2293 speed_template_16_24_32, num_mb);
2294 break;
2295
2296 case 216:
2297 test_mb_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec, NULL, 0,
2298 16, 16, aead_speed_template_19, num_mb);
2299 test_mb_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec, NULL, 0,
2300 16, 16, aead_speed_template_19, num_mb);
2301 break;
2302
2303 case 217:
2304 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT,
2305 sec, NULL, 0, 16, 8, aead_speed_template_36,
2306 num_mb);
2307 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT,
2308 sec, NULL, 0, 16, 8, aead_speed_template_36,
2309 num_mb);
2310 break;
2311
Gilad Ben-Yossef95ba5972018-09-20 14:18:38 +01002312 case 218:
2313 test_cipher_speed("ecb(sm4)", ENCRYPT, sec, NULL, 0,
2314 speed_template_16);
2315 test_cipher_speed("ecb(sm4)", DECRYPT, sec, NULL, 0,
2316 speed_template_16);
2317 test_cipher_speed("cbc(sm4)", ENCRYPT, sec, NULL, 0,
2318 speed_template_16);
2319 test_cipher_speed("cbc(sm4)", DECRYPT, sec, NULL, 0,
2320 speed_template_16);
2321 test_cipher_speed("ctr(sm4)", ENCRYPT, sec, NULL, 0,
2322 speed_template_16);
2323 test_cipher_speed("ctr(sm4)", DECRYPT, sec, NULL, 0,
2324 speed_template_16);
2325 break;
Eric Biggers059c2a42018-11-16 17:26:31 -08002326
2327 case 219:
2328 test_cipher_speed("adiantum(xchacha12,aes)", ENCRYPT, sec, NULL,
2329 0, speed_template_32);
2330 test_cipher_speed("adiantum(xchacha12,aes)", DECRYPT, sec, NULL,
2331 0, speed_template_32);
2332 test_cipher_speed("adiantum(xchacha20,aes)", ENCRYPT, sec, NULL,
2333 0, speed_template_32);
2334 test_cipher_speed("adiantum(xchacha20,aes)", DECRYPT, sec, NULL,
2335 0, speed_template_32);
2336 break;
2337
Ard Biesheuvelf975abb2019-08-19 17:17:34 +03002338 case 220:
2339 test_acipher_speed("essiv(cbc(aes),sha256)",
2340 ENCRYPT, sec, NULL, 0,
2341 speed_template_16_24_32);
2342 test_acipher_speed("essiv(cbc(aes),sha256)",
2343 DECRYPT, sec, NULL, 0,
2344 speed_template_16_24_32);
2345 break;
2346
Ard Biesheuvel97bcb162019-07-03 10:55:12 +02002347 case 221:
2348 test_aead_speed("aegis128", ENCRYPT, sec,
2349 NULL, 0, 16, 8, speed_template_16);
2350 test_aead_speed("aegis128", DECRYPT, sec,
2351 NULL, 0, 16, 8, speed_template_16);
2352 break;
2353
Michal Ludvige8057922006-05-30 22:04:19 +10002354 case 300:
Herbert Xu86068132014-12-04 16:43:29 +08002355 if (alg) {
2356 test_hash_speed(alg, sec, generic_hash_speed_template);
2357 break;
2358 }
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002359 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002360 case 301:
Herbert Xue9d41162006-08-19 21:38:49 +10002361 test_hash_speed("md4", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002362 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002363 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002364 case 302:
Herbert Xue9d41162006-08-19 21:38:49 +10002365 test_hash_speed("md5", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002366 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002367 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002368 case 303:
Herbert Xue9d41162006-08-19 21:38:49 +10002369 test_hash_speed("sha1", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002370 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002371 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002372 case 304:
Herbert Xue9d41162006-08-19 21:38:49 +10002373 test_hash_speed("sha256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002374 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002375 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002376 case 305:
Herbert Xue9d41162006-08-19 21:38:49 +10002377 test_hash_speed("sha384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002378 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002379 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002380 case 306:
Herbert Xue9d41162006-08-19 21:38:49 +10002381 test_hash_speed("sha512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002382 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002383 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002384 case 307:
Herbert Xue9d41162006-08-19 21:38:49 +10002385 test_hash_speed("wp256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002386 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002387 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002388 case 308:
Herbert Xue9d41162006-08-19 21:38:49 +10002389 test_hash_speed("wp384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002390 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002391 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002392 case 309:
Herbert Xue9d41162006-08-19 21:38:49 +10002393 test_hash_speed("wp512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002394 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002395 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002396 case 310:
Herbert Xue9d41162006-08-19 21:38:49 +10002397 test_hash_speed("tgr128", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002398 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002399 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002400 case 311:
Herbert Xue9d41162006-08-19 21:38:49 +10002401 test_hash_speed("tgr160", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002402 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002403 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002404 case 312:
Herbert Xue9d41162006-08-19 21:38:49 +10002405 test_hash_speed("tgr192", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002406 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002407 fallthrough;
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08002408 case 313:
2409 test_hash_speed("sha224", sec, generic_hash_speed_template);
2410 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002411 fallthrough;
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002412 case 314:
2413 test_hash_speed("rmd128", sec, generic_hash_speed_template);
2414 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002415 fallthrough;
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002416 case 315:
2417 test_hash_speed("rmd160", sec, generic_hash_speed_template);
2418 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002419 fallthrough;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08002420 case 316:
2421 test_hash_speed("rmd256", sec, generic_hash_speed_template);
2422 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002423 fallthrough;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08002424 case 317:
2425 test_hash_speed("rmd320", sec, generic_hash_speed_template);
2426 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002427 fallthrough;
Huang Ying18bcc912010-03-10 18:30:32 +08002428 case 318:
Herbert Xuba974ad2020-08-05 15:57:08 +10002429 klen = 16;
2430 test_hash_speed("ghash", sec, generic_hash_speed_template);
Huang Ying18bcc912010-03-10 18:30:32 +08002431 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002432 fallthrough;
Tim Chene3899e42012-09-27 15:44:24 -07002433 case 319:
2434 test_hash_speed("crc32c", sec, generic_hash_speed_template);
2435 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002436 fallthrough;
Herbert Xu684115212013-09-07 12:56:26 +10002437 case 320:
2438 test_hash_speed("crct10dif", sec, generic_hash_speed_template);
2439 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002440 fallthrough;
Martin Willi2dce0632015-07-16 19:13:59 +02002441 case 321:
2442 test_hash_speed("poly1305", sec, poly1305_speed_template);
2443 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002444 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302445 case 322:
2446 test_hash_speed("sha3-224", sec, generic_hash_speed_template);
2447 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002448 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302449 case 323:
2450 test_hash_speed("sha3-256", sec, generic_hash_speed_template);
2451 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002452 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302453 case 324:
2454 test_hash_speed("sha3-384", sec, generic_hash_speed_template);
2455 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002456 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302457 case 325:
2458 test_hash_speed("sha3-512", sec, generic_hash_speed_template);
2459 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002460 fallthrough;
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002461 case 326:
2462 test_hash_speed("sm3", sec, generic_hash_speed_template);
2463 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002464 fallthrough;
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03002465 case 327:
2466 test_hash_speed("streebog256", sec,
2467 generic_hash_speed_template);
2468 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002469 fallthrough;
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03002470 case 328:
2471 test_hash_speed("streebog512", sec,
2472 generic_hash_speed_template);
2473 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002474 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002475 case 399:
2476 break;
2477
David S. Millerbeb63da2010-05-19 14:11:21 +10002478 case 400:
Herbert Xu86068132014-12-04 16:43:29 +08002479 if (alg) {
2480 test_ahash_speed(alg, sec, generic_hash_speed_template);
2481 break;
2482 }
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002483 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002484 case 401:
2485 test_ahash_speed("md4", sec, generic_hash_speed_template);
2486 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002487 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002488 case 402:
2489 test_ahash_speed("md5", sec, generic_hash_speed_template);
2490 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002491 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002492 case 403:
2493 test_ahash_speed("sha1", sec, generic_hash_speed_template);
2494 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002495 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002496 case 404:
2497 test_ahash_speed("sha256", sec, generic_hash_speed_template);
2498 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002499 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002500 case 405:
2501 test_ahash_speed("sha384", sec, generic_hash_speed_template);
2502 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002503 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002504 case 406:
2505 test_ahash_speed("sha512", sec, generic_hash_speed_template);
2506 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002507 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002508 case 407:
2509 test_ahash_speed("wp256", sec, generic_hash_speed_template);
2510 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002511 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002512 case 408:
2513 test_ahash_speed("wp384", sec, generic_hash_speed_template);
2514 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002515 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002516 case 409:
2517 test_ahash_speed("wp512", sec, generic_hash_speed_template);
2518 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002519 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002520 case 410:
2521 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
2522 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002523 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002524 case 411:
2525 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
2526 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002527 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002528 case 412:
2529 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
2530 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002531 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002532 case 413:
2533 test_ahash_speed("sha224", sec, generic_hash_speed_template);
2534 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002535 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002536 case 414:
2537 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
2538 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002539 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002540 case 415:
2541 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
2542 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002543 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002544 case 416:
2545 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
2546 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002547 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002548 case 417:
2549 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
2550 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002551 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302552 case 418:
2553 test_ahash_speed("sha3-224", sec, generic_hash_speed_template);
2554 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002555 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302556 case 419:
2557 test_ahash_speed("sha3-256", sec, generic_hash_speed_template);
2558 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002559 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302560 case 420:
2561 test_ahash_speed("sha3-384", sec, generic_hash_speed_template);
2562 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002563 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302564 case 421:
2565 test_ahash_speed("sha3-512", sec, generic_hash_speed_template);
2566 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002567 fallthrough;
Megha Dey087bcd22016-06-23 18:40:47 -07002568 case 422:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002569 test_mb_ahash_speed("sha1", sec, generic_hash_speed_template,
2570 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002571 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002572 fallthrough;
Megha Dey087bcd22016-06-23 18:40:47 -07002573 case 423:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002574 test_mb_ahash_speed("sha256", sec, generic_hash_speed_template,
2575 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002576 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002577 fallthrough;
Megha Dey14009c42016-06-27 10:20:09 -07002578 case 424:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002579 test_mb_ahash_speed("sha512", sec, generic_hash_speed_template,
2580 num_mb);
Megha Dey14009c42016-06-27 10:20:09 -07002581 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002582 fallthrough;
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002583 case 425:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002584 test_mb_ahash_speed("sm3", sec, generic_hash_speed_template,
2585 num_mb);
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002586 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002587 fallthrough;
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03002588 case 426:
2589 test_mb_ahash_speed("streebog256", sec,
2590 generic_hash_speed_template, num_mb);
2591 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002592 fallthrough;
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03002593 case 427:
2594 test_mb_ahash_speed("streebog512", sec,
2595 generic_hash_speed_template, num_mb);
2596 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002597 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002598 case 499:
2599 break;
2600
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002601 case 500:
2602 test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2603 speed_template_16_24_32);
2604 test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2605 speed_template_16_24_32);
2606 test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2607 speed_template_16_24_32);
2608 test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2609 speed_template_16_24_32);
2610 test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2611 speed_template_32_40_48);
2612 test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2613 speed_template_32_40_48);
2614 test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002615 speed_template_32_64);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002616 test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002617 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002618 test_acipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2619 speed_template_16_24_32);
2620 test_acipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2621 speed_template_16_24_32);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002622 test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2623 speed_template_16_24_32);
2624 test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2625 speed_template_16_24_32);
Nicolas Royerde1975332012-07-01 19:19:47 +02002626 test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2627 speed_template_16_24_32);
2628 test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2629 speed_template_16_24_32);
2630 test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2631 speed_template_16_24_32);
2632 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2633 speed_template_16_24_32);
Jussi Kivilinna69d31502012-12-28 12:04:58 +02002634 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
2635 speed_template_20_28_36);
2636 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
2637 speed_template_20_28_36);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002638 break;
2639
2640 case 501:
2641 test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2642 des3_speed_template, DES3_SPEED_VECTORS,
2643 speed_template_24);
2644 test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
2645 des3_speed_template, DES3_SPEED_VECTORS,
2646 speed_template_24);
2647 test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2648 des3_speed_template, DES3_SPEED_VECTORS,
2649 speed_template_24);
2650 test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
2651 des3_speed_template, DES3_SPEED_VECTORS,
2652 speed_template_24);
Nicolas Royerde1975332012-07-01 19:19:47 +02002653 test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2654 des3_speed_template, DES3_SPEED_VECTORS,
2655 speed_template_24);
2656 test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
2657 des3_speed_template, DES3_SPEED_VECTORS,
2658 speed_template_24);
2659 test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2660 des3_speed_template, DES3_SPEED_VECTORS,
2661 speed_template_24);
2662 test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
2663 des3_speed_template, DES3_SPEED_VECTORS,
2664 speed_template_24);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002665 break;
2666
2667 case 502:
2668 test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2669 speed_template_8);
2670 test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2671 speed_template_8);
2672 test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2673 speed_template_8);
2674 test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2675 speed_template_8);
Nicolas Royerde1975332012-07-01 19:19:47 +02002676 test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2677 speed_template_8);
2678 test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2679 speed_template_8);
2680 test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2681 speed_template_8);
2682 test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2683 speed_template_8);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002684 break;
2685
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002686 case 503:
2687 test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2688 speed_template_16_32);
2689 test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2690 speed_template_16_32);
2691 test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2692 speed_template_16_32);
2693 test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2694 speed_template_16_32);
2695 test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2696 speed_template_16_32);
2697 test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2698 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002699 test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2700 speed_template_32_48);
2701 test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2702 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002703 test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2704 speed_template_32_64);
2705 test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2706 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002707 break;
2708
Johannes Goetzfried107778b52012-05-28 15:54:24 +02002709 case 504:
2710 test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2711 speed_template_16_24_32);
2712 test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2713 speed_template_16_24_32);
2714 test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2715 speed_template_16_24_32);
2716 test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2717 speed_template_16_24_32);
2718 test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2719 speed_template_16_24_32);
2720 test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2721 speed_template_16_24_32);
2722 test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2723 speed_template_32_40_48);
2724 test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2725 speed_template_32_40_48);
2726 test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2727 speed_template_32_48_64);
2728 test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2729 speed_template_32_48_64);
2730 break;
2731
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002732 case 505:
2733 test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2734 speed_template_8);
2735 break;
2736
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002737 case 506:
2738 test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2739 speed_template_8_16);
2740 test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2741 speed_template_8_16);
2742 test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2743 speed_template_8_16);
2744 test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2745 speed_template_8_16);
2746 test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2747 speed_template_8_16);
2748 test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2749 speed_template_8_16);
2750 break;
2751
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002752 case 507:
2753 test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2754 speed_template_16_32);
2755 test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2756 speed_template_16_32);
2757 test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2758 speed_template_16_32);
2759 test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2760 speed_template_16_32);
2761 test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2762 speed_template_16_32);
2763 test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2764 speed_template_16_32);
2765 test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2766 speed_template_32_48);
2767 test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2768 speed_template_32_48);
2769 test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2770 speed_template_32_64);
2771 test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2772 speed_template_32_64);
2773 break;
2774
Jussi Kivilinnabf9c5182012-10-26 14:48:51 +03002775 case 508:
2776 test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2777 speed_template_16_32);
2778 test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2779 speed_template_16_32);
2780 test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2781 speed_template_16_32);
2782 test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2783 speed_template_16_32);
2784 test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2785 speed_template_16_32);
2786 test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2787 speed_template_16_32);
2788 test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2789 speed_template_32_48);
2790 test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2791 speed_template_32_48);
2792 test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2793 speed_template_32_64);
2794 test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2795 speed_template_32_64);
2796 break;
2797
Jussi Kivilinnaad8b7c32013-04-13 13:46:40 +03002798 case 509:
2799 test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2800 speed_template_8_32);
2801 test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2802 speed_template_8_32);
2803 test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2804 speed_template_8_32);
2805 test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2806 speed_template_8_32);
2807 test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2808 speed_template_8_32);
2809 test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2810 speed_template_8_32);
2811 break;
2812
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00002813 case 600:
2814 test_mb_skcipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2815 speed_template_16_24_32, num_mb);
2816 test_mb_skcipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2817 speed_template_16_24_32, num_mb);
2818 test_mb_skcipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2819 speed_template_16_24_32, num_mb);
2820 test_mb_skcipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2821 speed_template_16_24_32, num_mb);
2822 test_mb_skcipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2823 speed_template_32_40_48, num_mb);
2824 test_mb_skcipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2825 speed_template_32_40_48, num_mb);
2826 test_mb_skcipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
2827 speed_template_32_64, num_mb);
2828 test_mb_skcipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
2829 speed_template_32_64, num_mb);
2830 test_mb_skcipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2831 speed_template_16_24_32, num_mb);
2832 test_mb_skcipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2833 speed_template_16_24_32, num_mb);
2834 test_mb_skcipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2835 speed_template_16_24_32, num_mb);
2836 test_mb_skcipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2837 speed_template_16_24_32, num_mb);
2838 test_mb_skcipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2839 speed_template_16_24_32, num_mb);
2840 test_mb_skcipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2841 speed_template_16_24_32, num_mb);
2842 test_mb_skcipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2843 speed_template_16_24_32, num_mb);
2844 test_mb_skcipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2845 speed_template_16_24_32, num_mb);
2846 test_mb_skcipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL,
2847 0, speed_template_20_28_36, num_mb);
2848 test_mb_skcipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL,
2849 0, speed_template_20_28_36, num_mb);
2850 break;
2851
2852 case 601:
2853 test_mb_skcipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2854 des3_speed_template, DES3_SPEED_VECTORS,
2855 speed_template_24, num_mb);
2856 test_mb_skcipher_speed("ecb(des3_ede)", DECRYPT, sec,
2857 des3_speed_template, DES3_SPEED_VECTORS,
2858 speed_template_24, num_mb);
2859 test_mb_skcipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2860 des3_speed_template, DES3_SPEED_VECTORS,
2861 speed_template_24, num_mb);
2862 test_mb_skcipher_speed("cbc(des3_ede)", DECRYPT, sec,
2863 des3_speed_template, DES3_SPEED_VECTORS,
2864 speed_template_24, num_mb);
2865 test_mb_skcipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2866 des3_speed_template, DES3_SPEED_VECTORS,
2867 speed_template_24, num_mb);
2868 test_mb_skcipher_speed("cfb(des3_ede)", DECRYPT, sec,
2869 des3_speed_template, DES3_SPEED_VECTORS,
2870 speed_template_24, num_mb);
2871 test_mb_skcipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2872 des3_speed_template, DES3_SPEED_VECTORS,
2873 speed_template_24, num_mb);
2874 test_mb_skcipher_speed("ofb(des3_ede)", DECRYPT, sec,
2875 des3_speed_template, DES3_SPEED_VECTORS,
2876 speed_template_24, num_mb);
2877 break;
2878
2879 case 602:
2880 test_mb_skcipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2881 speed_template_8, num_mb);
2882 test_mb_skcipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2883 speed_template_8, num_mb);
2884 test_mb_skcipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2885 speed_template_8, num_mb);
2886 test_mb_skcipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2887 speed_template_8, num_mb);
2888 test_mb_skcipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2889 speed_template_8, num_mb);
2890 test_mb_skcipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2891 speed_template_8, num_mb);
2892 test_mb_skcipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2893 speed_template_8, num_mb);
2894 test_mb_skcipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2895 speed_template_8, num_mb);
2896 break;
2897
2898 case 603:
2899 test_mb_skcipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2900 speed_template_16_32, num_mb);
2901 test_mb_skcipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2902 speed_template_16_32, num_mb);
2903 test_mb_skcipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2904 speed_template_16_32, num_mb);
2905 test_mb_skcipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2906 speed_template_16_32, num_mb);
2907 test_mb_skcipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2908 speed_template_16_32, num_mb);
2909 test_mb_skcipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2910 speed_template_16_32, num_mb);
2911 test_mb_skcipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2912 speed_template_32_48, num_mb);
2913 test_mb_skcipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2914 speed_template_32_48, num_mb);
2915 test_mb_skcipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2916 speed_template_32_64, num_mb);
2917 test_mb_skcipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2918 speed_template_32_64, num_mb);
2919 break;
2920
2921 case 604:
2922 test_mb_skcipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2923 speed_template_16_24_32, num_mb);
2924 test_mb_skcipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2925 speed_template_16_24_32, num_mb);
2926 test_mb_skcipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2927 speed_template_16_24_32, num_mb);
2928 test_mb_skcipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2929 speed_template_16_24_32, num_mb);
2930 test_mb_skcipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2931 speed_template_16_24_32, num_mb);
2932 test_mb_skcipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2933 speed_template_16_24_32, num_mb);
2934 test_mb_skcipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2935 speed_template_32_40_48, num_mb);
2936 test_mb_skcipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2937 speed_template_32_40_48, num_mb);
2938 test_mb_skcipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2939 speed_template_32_48_64, num_mb);
2940 test_mb_skcipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2941 speed_template_32_48_64, num_mb);
2942 break;
2943
2944 case 605:
2945 test_mb_skcipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2946 speed_template_8, num_mb);
2947 break;
2948
2949 case 606:
2950 test_mb_skcipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2951 speed_template_8_16, num_mb);
2952 test_mb_skcipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2953 speed_template_8_16, num_mb);
2954 test_mb_skcipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2955 speed_template_8_16, num_mb);
2956 test_mb_skcipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2957 speed_template_8_16, num_mb);
2958 test_mb_skcipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2959 speed_template_8_16, num_mb);
2960 test_mb_skcipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2961 speed_template_8_16, num_mb);
2962 break;
2963
2964 case 607:
2965 test_mb_skcipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2966 speed_template_16_32, num_mb);
2967 test_mb_skcipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2968 speed_template_16_32, num_mb);
2969 test_mb_skcipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2970 speed_template_16_32, num_mb);
2971 test_mb_skcipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2972 speed_template_16_32, num_mb);
2973 test_mb_skcipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2974 speed_template_16_32, num_mb);
2975 test_mb_skcipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2976 speed_template_16_32, num_mb);
2977 test_mb_skcipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2978 speed_template_32_48, num_mb);
2979 test_mb_skcipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2980 speed_template_32_48, num_mb);
2981 test_mb_skcipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2982 speed_template_32_64, num_mb);
2983 test_mb_skcipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2984 speed_template_32_64, num_mb);
2985 break;
2986
2987 case 608:
2988 test_mb_skcipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2989 speed_template_16_32, num_mb);
2990 test_mb_skcipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2991 speed_template_16_32, num_mb);
2992 test_mb_skcipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2993 speed_template_16_32, num_mb);
2994 test_mb_skcipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2995 speed_template_16_32, num_mb);
2996 test_mb_skcipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2997 speed_template_16_32, num_mb);
2998 test_mb_skcipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2999 speed_template_16_32, num_mb);
3000 test_mb_skcipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
3001 speed_template_32_48, num_mb);
3002 test_mb_skcipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
3003 speed_template_32_48, num_mb);
3004 test_mb_skcipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
3005 speed_template_32_64, num_mb);
3006 test_mb_skcipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
3007 speed_template_32_64, num_mb);
3008 break;
3009
3010 case 609:
3011 test_mb_skcipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
3012 speed_template_8_32, num_mb);
3013 test_mb_skcipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
3014 speed_template_8_32, num_mb);
3015 test_mb_skcipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
3016 speed_template_8_32, num_mb);
3017 test_mb_skcipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
3018 speed_template_8_32, num_mb);
3019 test_mb_skcipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
3020 speed_template_8_32, num_mb);
3021 test_mb_skcipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
3022 speed_template_8_32, num_mb);
3023 break;
3024
Linus Torvalds1da177e2005-04-16 15:20:36 -07003025 case 1000:
3026 test_available();
3027 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07003028 }
Jarod Wilson4e033a62009-05-27 15:10:21 +10003029
3030 return ret;
Linus Torvalds1da177e2005-04-16 15:20:36 -07003031}
3032
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003033static int __init tcrypt_mod_init(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07003034{
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003035 int err = -ENOMEM;
Herbert Xuf139cfa2008-07-31 12:23:53 +08003036 int i;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003037
Herbert Xuf139cfa2008-07-31 12:23:53 +08003038 for (i = 0; i < TVMEMSIZE; i++) {
3039 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
3040 if (!tvmem[i])
3041 goto err_free_tv;
3042 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07003043
Kees Cook4e234ee2018-04-26 19:57:28 -07003044 err = do_test(alg, type, mask, mode, num_mb);
Steffen Klasserta873a5f2009-06-19 19:46:53 +08003045
Jarod Wilson4e033a62009-05-27 15:10:21 +10003046 if (err) {
3047 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3048 goto err_free_tv;
Rabin Vincent76512f22017-01-18 14:54:05 +01003049 } else {
3050 pr_debug("all tests passed\n");
Jarod Wilson4e033a62009-05-27 15:10:21 +10003051 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07003052
Jarod Wilson4e033a62009-05-27 15:10:21 +10003053 /* We intentionaly return -EAGAIN to prevent keeping the module,
3054 * unless we're running in fips mode. It does all its work from
3055 * init() and doesn't offer any runtime functionality, but in
3056 * the fips case, checking for a successful load is helpful.
Michal Ludvig14fdf472006-05-30 14:49:38 +10003057 * => we don't need it in the memory, do we?
3058 * -- mludvig
3059 */
Jarod Wilson4e033a62009-05-27 15:10:21 +10003060 if (!fips_enabled)
3061 err = -EAGAIN;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003062
Herbert Xuf139cfa2008-07-31 12:23:53 +08003063err_free_tv:
3064 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
3065 free_page((unsigned long)tvmem[i]);
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003066
3067 return err;
Linus Torvalds1da177e2005-04-16 15:20:36 -07003068}
3069
3070/*
3071 * If an init function is provided, an exit function must also be provided
3072 * to allow module unload.
3073 */
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003074static void __exit tcrypt_mod_fini(void) { }
Linus Torvalds1da177e2005-04-16 15:20:36 -07003075
Ard Biesheuvel08a7e332020-11-20 12:04:31 +01003076late_initcall(tcrypt_mod_init);
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003077module_exit(tcrypt_mod_fini);
Linus Torvalds1da177e2005-04-16 15:20:36 -07003078
Steffen Klasserta873a5f2009-06-19 19:46:53 +08003079module_param(alg, charp, 0);
3080module_param(type, uint, 0);
Herbert Xu7be380f2009-07-14 16:06:54 +08003081module_param(mask, uint, 0);
Linus Torvalds1da177e2005-04-16 15:20:36 -07003082module_param(mode, int, 0);
Harald Welteebfd9bc2005-06-22 13:27:23 -07003083module_param(sec, uint, 0);
Herbert Xu6a179442005-06-22 13:29:03 -07003084MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
3085 "(defaults to zero which uses CPU cycles instead)");
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00003086module_param(num_mb, uint, 0000);
3087MODULE_PARM_DESC(num_mb, "Number of concurrent requests to be used in mb speed tests (defaults to 8)");
Herbert Xuba974ad2020-08-05 15:57:08 +10003088module_param(klen, uint, 0);
3089MODULE_PARM_DESC(klen, "Key length (defaults to 0)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07003090
3091MODULE_LICENSE("GPL");
3092MODULE_DESCRIPTION("Quick & dirty crypto testing module");
3093MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");