Lines Matching refs:i
80 int i; in testmgr_alloc_buf() local
82 for (i = 0; i < XBUFSIZE; i++) { in testmgr_alloc_buf()
83 buf[i] = (void *)__get_free_page(GFP_KERNEL); in testmgr_alloc_buf()
84 if (!buf[i]) in testmgr_alloc_buf()
91 while (i-- > 0) in testmgr_alloc_buf()
92 free_page((unsigned long)buf[i]); in testmgr_alloc_buf()
99 int i; in testmgr_free_buf() local
101 for (i = 0; i < XBUFSIZE; i++) in testmgr_free_buf()
102 free_page((unsigned long)buf[i]); in testmgr_free_buf()
152 int i, err = 0; in do_mult_aead_op() local
155 for (i = 0; i < num_mb; i++) { in do_mult_aead_op()
157 rc[i] = crypto_aead_encrypt(data[i].req); in do_mult_aead_op()
159 rc[i] = crypto_aead_decrypt(data[i].req); in do_mult_aead_op()
163 for (i = 0; i < num_mb; i++) { in do_mult_aead_op()
164 rc[i] = crypto_wait_req(rc[i], &data[i].wait); in do_mult_aead_op()
166 if (rc[i]) { in do_mult_aead_op()
167 pr_info("concurrent request %d error %d\n", i, rc[i]); in do_mult_aead_op()
168 err = rc[i]; in do_mult_aead_op()
207 int i; in test_mb_aead_cycles() local
215 for (i = 0; i < 4; i++) { in test_mb_aead_cycles()
222 for (i = 0; i < 8; i++) { in test_mb_aead_cycles()
250 unsigned int i, j, iv_len; in test_mb_aead_speed() local
291 for (i = 0; i < num_mb; ++i) in test_mb_aead_speed()
292 if (testmgr_alloc_buf(data[i].xbuf)) { in test_mb_aead_speed()
293 while (i--) in test_mb_aead_speed()
294 testmgr_free_buf(data[i].xbuf); in test_mb_aead_speed()
298 for (i = 0; i < num_mb; ++i) in test_mb_aead_speed()
299 if (testmgr_alloc_buf(data[i].axbuf)) { in test_mb_aead_speed()
300 while (i--) in test_mb_aead_speed()
301 testmgr_free_buf(data[i].axbuf); in test_mb_aead_speed()
305 for (i = 0; i < num_mb; ++i) in test_mb_aead_speed()
306 if (testmgr_alloc_buf(data[i].xoutbuf)) { in test_mb_aead_speed()
307 while (i--) in test_mb_aead_speed()
308 testmgr_free_buf(data[i].xoutbuf); in test_mb_aead_speed()
312 for (i = 0; i < num_mb; ++i) { in test_mb_aead_speed()
313 data[i].req = aead_request_alloc(tfm, GFP_KERNEL); in test_mb_aead_speed()
314 if (!data[i].req) { in test_mb_aead_speed()
317 while (i--) in test_mb_aead_speed()
318 aead_request_free(data[i].req); in test_mb_aead_speed()
323 for (i = 0; i < num_mb; ++i) { in test_mb_aead_speed()
324 crypto_init_wait(&data[i].wait); in test_mb_aead_speed()
325 aead_request_set_callback(data[i].req, in test_mb_aead_speed()
327 crypto_req_done, &data[i].wait); in test_mb_aead_speed()
333 i = 0; in test_mb_aead_speed()
346 pr_info("test %u (%d bit key, %d byte blocks): ", i, in test_mb_aead_speed()
428 i++; in test_mb_aead_speed()
434 for (i = 0; i < num_mb; ++i) in test_mb_aead_speed()
435 aead_request_free(data[i].req); in test_mb_aead_speed()
437 for (i = 0; i < num_mb; ++i) in test_mb_aead_speed()
438 testmgr_free_buf(data[i].xoutbuf); in test_mb_aead_speed()
440 for (i = 0; i < num_mb; ++i) in test_mb_aead_speed()
441 testmgr_free_buf(data[i].axbuf); in test_mb_aead_speed()
443 for (i = 0; i < num_mb; ++i) in test_mb_aead_speed()
444 testmgr_free_buf(data[i].xbuf); in test_mb_aead_speed()
480 int i; in test_aead_cycles() local
483 for (i = 0; i < 4; i++) { in test_aead_cycles()
494 for (i = 0; i < 8; i++) { in test_aead_cycles()
523 unsigned int i, j; in test_aead_speed() local
594 i = 0; in test_aead_speed()
631 i, *keysize * 8, bs); in test_aead_speed()
680 i++; in test_aead_speed()
703 int i; in test_hash_sg_init() local
706 for (i = 0; i < TVMEMSIZE; i++) { in test_hash_sg_init()
707 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE); in test_hash_sg_init()
708 memset(tvmem[i], 0xff, PAGE_SIZE); in test_hash_sg_init()
775 int ret, i; in test_ahash_cycles_digest() local
778 for (i = 0; i < 4; i++) { in test_ahash_cycles_digest()
785 for (i = 0; i < 8; i++) { in test_ahash_cycles_digest()
813 int i, pcount, ret; in test_ahash_cycles() local
819 for (i = 0; i < 4; i++) { in test_ahash_cycles()
834 for (i = 0; i < 8; i++) { in test_ahash_cycles()
874 int i, ret; in test_ahash_speed_common() local
907 for (i = 0; speed[i].blen != 0; i++) { in test_ahash_speed_common()
908 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) { in test_ahash_speed_common()
910 speed[i].blen, TVMEMSIZE * PAGE_SIZE); in test_ahash_speed_common()
919 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen); in test_ahash_speed_common()
921 ahash_request_set_crypt(req, sg, output, speed[i].plen); in test_ahash_speed_common()
924 ret = test_ahash_jiffies(req, speed[i].blen, in test_ahash_speed_common()
925 speed[i].plen, output, secs); in test_ahash_speed_common()
928 ret = test_ahash_cycles(req, speed[i].blen, in test_ahash_speed_common()
929 speed[i].plen, output); in test_ahash_speed_common()
969 int i, err = 0; in do_mult_acipher_op() local
972 for (i = 0; i < num_mb; i++) { in do_mult_acipher_op()
974 rc[i] = crypto_skcipher_encrypt(data[i].req); in do_mult_acipher_op()
976 rc[i] = crypto_skcipher_decrypt(data[i].req); in do_mult_acipher_op()
980 for (i = 0; i < num_mb; i++) { in do_mult_acipher_op()
981 rc[i] = crypto_wait_req(rc[i], &data[i].wait); in do_mult_acipher_op()
983 if (rc[i]) { in do_mult_acipher_op()
984 pr_info("concurrent request %d error %d\n", i, rc[i]); in do_mult_acipher_op()
985 err = rc[i]; in do_mult_acipher_op()
1024 int i; in test_mb_acipher_cycles() local
1032 for (i = 0; i < 4; i++) { in test_mb_acipher_cycles()
1039 for (i = 0; i < 8; i++) { in test_mb_acipher_cycles()
1066 unsigned int i, j, iv_len; in test_mb_skcipher_speed() local
1089 for (i = 0; i < num_mb; ++i) in test_mb_skcipher_speed()
1090 if (testmgr_alloc_buf(data[i].xbuf)) { in test_mb_skcipher_speed()
1091 while (i--) in test_mb_skcipher_speed()
1092 testmgr_free_buf(data[i].xbuf); in test_mb_skcipher_speed()
1096 for (i = 0; i < num_mb; ++i) { in test_mb_skcipher_speed()
1097 data[i].req = skcipher_request_alloc(tfm, GFP_KERNEL); in test_mb_skcipher_speed()
1098 if (!data[i].req) { in test_mb_skcipher_speed()
1101 while (i--) in test_mb_skcipher_speed()
1102 skcipher_request_free(data[i].req); in test_mb_skcipher_speed()
1107 for (i = 0; i < num_mb; ++i) { in test_mb_skcipher_speed()
1108 skcipher_request_set_callback(data[i].req, in test_mb_skcipher_speed()
1110 crypto_req_done, &data[i].wait); in test_mb_skcipher_speed()
1111 crypto_init_wait(&data[i].wait); in test_mb_skcipher_speed()
1117 i = 0; in test_mb_skcipher_speed()
1129 pr_info("test %u (%d bit key, %d byte blocks): ", i, in test_mb_skcipher_speed()
1197 i++; in test_mb_skcipher_speed()
1203 for (i = 0; i < num_mb; ++i) in test_mb_skcipher_speed()
1204 skcipher_request_free(data[i].req); in test_mb_skcipher_speed()
1206 for (i = 0; i < num_mb; ++i) in test_mb_skcipher_speed()
1207 testmgr_free_buf(data[i].xbuf); in test_mb_skcipher_speed()
1251 int i; in test_acipher_cycles() local
1254 for (i = 0; i < 4; i++) { in test_acipher_cycles()
1267 for (i = 0; i < 8; i++) { in test_acipher_cycles()
1297 unsigned int ret, i, j, k, iv_len; in test_skcipher_speed() local
1333 i = 0; in test_skcipher_speed()
1348 pr_info("test %u (%d bit key, %d byte blocks): ", i, in test_skcipher_speed()
1412 i++; in test_skcipher_speed()
1454 int i; in do_test() local
1466 for (i = 1; i < 200; i++) in do_test()
1467 ret = min(ret, do_test(NULL, 0, 0, i, num_mb)); in do_test()
2815 int i; in tcrypt_mod_init() local
2817 for (i = 0; i < TVMEMSIZE; i++) { in tcrypt_mod_init()
2818 tvmem[i] = (void *)__get_free_page(GFP_KERNEL); in tcrypt_mod_init()
2819 if (!tvmem[i]) in tcrypt_mod_init()
2843 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++) in tcrypt_mod_init()
2844 free_page((unsigned long)tvmem[i]); in tcrypt_mod_init()