/linux-6.3-rc2/drivers/s390/cio/ |
A D | airq.c | 135 iv = kzalloc(sizeof(*iv), GFP_KERNEL); in airq_iv_create() 136 if (!iv) in airq_iv_create() 163 iv->end = 0; in airq_iv_create() 184 return iv; in airq_iv_create() 190 if (iv->flags & AIRQ_IV_CACHELINE && iv->vector) in airq_iv_create() 194 kfree(iv); in airq_iv_create() 212 cio_dma_free(iv->vector, iv_size(iv->bits)); in airq_iv_release() 214 kfree(iv); in airq_iv_release() 234 bit = find_first_bit_inv(iv->avail, iv->bits); in airq_iv_alloc() 277 while (iv->end > 0 && !test_bit_inv(iv->end - 1, iv->avail)) in airq_iv_free() [all …]
|
/linux-6.3-rc2/crypto/ |
A D | testmgr.h | 68 const char *iv; member 109 const char *iv; member 12112 .iv = zeroed_string, 12144 .iv = zeroed_string, 17258 .iv = "", 17277 .iv = "", 17570 .iv = "", 17591 .iv = "", 21406 .iv = zeroed_string, 22021 .iv = zeroed_string, [all …]
|
A D | cfb.c | 51 u8 *iv = walk->iv; in crypto_cfb_final() local 65 u8 *iv = walk->iv; in crypto_cfb_encrypt_segment() local 70 iv = dst; in crypto_cfb_encrypt_segment() 76 memcpy(walk->iv, iv, bsize); in crypto_cfb_encrypt_segment() 87 u8 *iv = walk->iv; in crypto_cfb_encrypt_inplace() local 93 iv = src; in crypto_cfb_encrypt_inplace() 98 memcpy(walk->iv, iv, bsize); in crypto_cfb_encrypt_inplace() 135 u8 *iv = walk->iv; in crypto_cfb_decrypt_segment() local 140 iv = src; in crypto_cfb_decrypt_segment() 146 memcpy(walk->iv, iv, bsize); in crypto_cfb_decrypt_segment() [all …]
|
A D | pcbc.c | 28 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_segment() local 31 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_segment() 32 crypto_cipher_encrypt_one(tfm, dst, iv); in crypto_pcbc_encrypt_segment() 33 crypto_xor_cpy(iv, dst, src, bsize); in crypto_pcbc_encrypt_segment() 49 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_inplace() local 54 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_inplace() 95 u8 * const iv = walk->iv; in crypto_pcbc_decrypt_segment() local 99 crypto_xor(dst, iv, bsize); in crypto_pcbc_decrypt_segment() 100 crypto_xor_cpy(iv, dst, src, bsize); in crypto_pcbc_decrypt_segment() 116 u8 * const iv = walk->iv; in crypto_pcbc_decrypt_inplace() local [all …]
|
A D | cbc.c | 27 u8 *iv = walk->iv; in crypto_cbc_encrypt_segment() local 34 crypto_xor(iv, src, bsize); in crypto_cbc_encrypt_segment() 35 fn(tfm, dst, iv); in crypto_cbc_encrypt_segment() 36 memcpy(iv, dst, bsize); in crypto_cbc_encrypt_segment() 54 u8 *iv = walk->iv; in crypto_cbc_encrypt_inplace() local 61 crypto_xor(src, iv, bsize); in crypto_cbc_encrypt_inplace() 63 iv = src; in crypto_cbc_encrypt_inplace() 68 memcpy(walk->iv, iv, bsize); in crypto_cbc_encrypt_inplace() 102 u8 *iv = walk->iv; in crypto_cbc_decrypt_segment() local 111 iv = src; in crypto_cbc_decrypt_segment() [all …]
|
A D | ofb.c | 31 u8 * const iv = walk.iv; in crypto_ofb_crypt() local 35 crypto_cipher_encrypt_one(cipher, iv, iv); in crypto_ofb_crypt() 36 crypto_xor_cpy(dst, src, iv, bsize); in crypto_ofb_crypt() 45 crypto_cipher_encrypt_one(cipher, walk.iv, walk.iv); in crypto_ofb_crypt() 46 crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, walk.iv, in crypto_ofb_crypt()
|
/linux-6.3-rc2/arch/s390/include/asm/ |
A D | airq.h | 54 void airq_iv_release(struct airq_iv *iv); 62 return airq_iv_alloc(iv, 1); in airq_iv_alloc_bit() 67 airq_iv_free(iv, bit, 1); in airq_iv_free_bit() 70 static inline unsigned long airq_iv_end(struct airq_iv *iv) in airq_iv_end() argument 72 return iv->end; in airq_iv_end() 78 bit_spin_lock(bit ^ be_to_le, iv->bitlock); in airq_iv_lock() 84 bit_spin_unlock(bit ^ be_to_le, iv->bitlock); in airq_iv_unlock() 90 iv->data[bit] = data; in airq_iv_set_data() 96 return iv->data[bit]; in airq_iv_get_data() 102 iv->ptr[bit] = ptr; in airq_iv_set_ptr() [all …]
|
/linux-6.3-rc2/arch/x86/crypto/ |
A D | sm4_aesni_avx_glue.c | 27 const u8 *src, u8 *iv); 29 const u8 *src, u8 *iv); 104 const u8 *iv = walk.iv; in sm4_cbc_encrypt() local 111 iv = dst; in sm4_cbc_encrypt() 116 if (iv != walk.iv) in sm4_cbc_encrypt() 117 memcpy(walk.iv, iv, SM4_BLOCK_SIZE); in sm4_cbc_encrypt() 171 memcpy(walk.iv, iv, SM4_BLOCK_SIZE); in sm4_avx_cbc_decrypt() 203 const u8 *iv = walk.iv; in sm4_cfb_encrypt() local 210 iv = dst; in sm4_cfb_encrypt() 215 if (iv != walk.iv) in sm4_cfb_encrypt() [all …]
|
A D | aesni-intel_glue.c | 118 u8 *iv, 159 u8 *iv, 182 u8 *iv, 404 req->iv); in cts_cbc_encrypt() 422 req->iv); in cts_cbc_encrypt() 478 req->iv); in cts_cbc_decrypt() 847 *(iv+4+i) = req->iv[i]; in helper_rfc4106_encrypt() 875 *(iv+4+i) = req->iv[i]; in helper_rfc4106_decrypt() 945 aesni_enc(aes_ctx(ctx->raw_tweak_ctx), walk.iv, walk.iv); in xts_crypt() 1167 memcpy(iv, req->iv, 12); in generic_gcmaes_encrypt() [all …]
|
/linux-6.3-rc2/arch/arm64/crypto/ |
A D | sm4-neon-glue.c | 93 const u8 *iv = walk.iv; in sm4_cbc_encrypt() local 100 iv = dst; in sm4_cbc_encrypt() 105 if (iv != walk.iv) in sm4_cbc_encrypt() 106 memcpy(walk.iv, iv, SM4_BLOCK_SIZE); in sm4_cbc_encrypt() 134 walk.iv, nblocks); in sm4_cbc_decrypt() 157 const u8 *iv = walk.iv; in sm4_cfb_encrypt() local 164 iv = dst; in sm4_cfb_encrypt() 169 if (iv != walk.iv) in sm4_cfb_encrypt() 170 memcpy(walk.iv, iv, SM4_BLOCK_SIZE); in sm4_cfb_encrypt() 205 walk.iv, nblocks); in sm4_cfb_decrypt() [all …]
|
A D | ghash-ce-glue.c | 374 dg, iv, ctx->aes_key.key_enc, nrounds, in gcm_encrypt() 444 dg, iv, ctx->aes_key.key_enc, in gcm_decrypt() 466 u8 iv[AES_BLOCK_SIZE]; in gcm_aes_encrypt() local 468 memcpy(iv, req->iv, GCM_AES_IV_SIZE); in gcm_aes_encrypt() 469 return gcm_encrypt(req, iv, req->assoclen); in gcm_aes_encrypt() 474 u8 iv[AES_BLOCK_SIZE]; in gcm_aes_decrypt() local 476 memcpy(iv, req->iv, GCM_AES_IV_SIZE); in gcm_aes_decrypt() 504 u8 iv[AES_BLOCK_SIZE]; in rfc4106_encrypt() local 507 memcpy(iv + RFC4106_NONCE_SIZE, req->iv, GCM_RFC4106_IV_SIZE); in rfc4106_encrypt() 517 u8 iv[AES_BLOCK_SIZE]; in rfc4106_decrypt() local [all …]
|
A D | aes-neonbs-glue.c | 34 int rounds, int blocks, u8 iv[]); 37 int rounds, int blocks, u8 iv[]); 40 int rounds, int blocks, u8 iv[]); 172 walk.iv); in cbc_encrypt() 198 walk.iv); in cbc_decrypt() 225 blocks, walk.iv); in ctr_encrypt() 231 nbytes, walk.iv); in ctr_encrypt() 294 req->iv); in __xts_crypt() 313 neon_aes_ecb_encrypt(walk.iv, walk.iv, in __xts_crypt() 319 walk.iv); in __xts_crypt() [all …]
|
/linux-6.3-rc2/drivers/crypto/nx/ |
A D | nx-aes-ccm.c | 126 if (1 > iv[0] || iv[0] > 7) in crypto_ccm_check_iv() 138 memcpy(b0, iv, 16); in generate_b0() 169 memset(iv + 15 - iv[0], 0, iv[0] + 1); in generate_pat() 327 u8 *iv, in ccm_nx_decrypt() argument 405 u8 *iv, in ccm_nx_encrypt() argument 481 u8 *iv = rctx->iv; in ccm4309_aes_nx_encrypt() local 483 iv[0] = 3; in ccm4309_aes_nx_encrypt() 485 memcpy(iv + 4, req->iv, 8); in ccm4309_aes_nx_encrypt() 505 u8 *iv = rctx->iv; in ccm4309_aes_nx_decrypt() local 507 iv[0] = 3; in ccm4309_aes_nx_decrypt() [all …]
|
A D | nx-aes-ctr.c | 72 static int ctr_aes_nx_crypt(struct skcipher_request *req, u8 *iv) in ctr_aes_nx_crypt() argument 86 rc = nx_build_sg_lists(nx_ctx, iv, req->dst, req->src, in ctr_aes_nx_crypt() 88 csbcpb->cpb.aes_ctr.iv); in ctr_aes_nx_crypt() 102 memcpy(iv, csbcpb->cpb.aes_cbc.cv, AES_BLOCK_SIZE); in ctr_aes_nx_crypt() 119 u8 iv[16]; in ctr3686_aes_nx_crypt() local 121 memcpy(iv, nx_ctx->priv.ctr.nonce, CTR_RFC3686_NONCE_SIZE); in ctr3686_aes_nx_crypt() 122 memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE); in ctr3686_aes_nx_crypt() 123 iv[12] = iv[13] = iv[14] = 0; in ctr3686_aes_nx_crypt() 124 iv[15] = 1; in ctr3686_aes_nx_crypt() 126 return ctr_aes_nx_crypt(req, iv); in ctr3686_aes_nx_crypt()
|
A D | nx-aes-gcm.c | 326 rc = gcm_empty(req, rctx->iv, enc); in gcm_aes_nx_crypt() 328 rc = gmac(req, rctx->iv, assoclen); in gcm_aes_nx_crypt() 419 char *iv = rctx->iv; in gcm_aes_nx_encrypt() local 421 memcpy(iv, req->iv, GCM_AES_IV_SIZE); in gcm_aes_nx_encrypt() 429 char *iv = rctx->iv; in gcm_aes_nx_decrypt() local 431 memcpy(iv, req->iv, GCM_AES_IV_SIZE); in gcm_aes_nx_decrypt() 441 char *iv = rctx->iv; in gcm4106_aes_nx_encrypt() local 444 memcpy(iv, nonce, NX_GCM4106_NONCE_LEN); in gcm4106_aes_nx_encrypt() 445 memcpy(iv + NX_GCM4106_NONCE_LEN, req->iv, 8); in gcm4106_aes_nx_encrypt() 458 char *iv = rctx->iv; in gcm4106_aes_nx_decrypt() local [all …]
|
/linux-6.3-rc2/drivers/staging/wlan-ng/ |
A D | p80211wep.c | 91 u8 *iv, u8 *icv) in wep_decrypt() argument 102 key[0] = iv[0]; in wep_decrypt() 103 key[1] = iv[1]; in wep_decrypt() 104 key[2] = iv[2]; in wep_decrypt() 105 keyidx = WEP_KEY(iv[3]); in wep_decrypt() 179 get_random_bytes(iv, 3); in wep_encrypt() 180 while ((iv[1] == 0xff) && (iv[0] >= 3) && (iv[0] < keylen)) in wep_encrypt() 181 get_random_bytes(iv, 3); in wep_encrypt() 185 key[0] = iv[0]; in wep_encrypt() 186 key[1] = iv[1]; in wep_encrypt() [all …]
|
/linux-6.3-rc2/lib/crypto/ |
A D | chacha20poly1305.c | 39 u8 iv[CHACHA_IV_SIZE]; in xchacha_init() local 41 memset(iv, 0, 8); in xchacha_init() 53 memzero_explicit(iv, sizeof(iv)); in xchacha_init() 97 __le64 iv[2]; in chacha20poly1305_encrypt() local 101 iv[0] = 0; in chacha20poly1305_encrypt() 107 memzero_explicit(iv, sizeof(iv)); in chacha20poly1305_encrypt() 175 __le64 iv[2]; in chacha20poly1305_decrypt() local 180 iv[0] = 0; in chacha20poly1305_decrypt() 188 memzero_explicit(iv, sizeof(iv)); in chacha20poly1305_decrypt() 226 __le64 iv[2]; in chacha20poly1305_crypt_sg_inplace() member [all …]
|
A D | aesgcm.c | 137 const u8 iv[GCM_AES_IV_SIZE], u8 *authtag) in aesgcm_encrypt() 141 memcpy(ctr, iv, GCM_AES_IV_SIZE); in aesgcm_encrypt() 173 memcpy(ctr, iv, GCM_AES_IV_SIZE); in aesgcm_decrypt() 550 u8 iv[GCM_AES_IV_SIZE]; member 572 .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" 582 .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" 607 .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" 629 .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" 641 .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" 656 .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" [all …]
|
/linux-6.3-rc2/include/crypto/ |
A D | chacha.h | 65 void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv); 66 static inline void chacha_init_generic(u32 *state, const u32 *key, const u8 *iv) in chacha_init_generic() argument 77 state[12] = get_unaligned_le32(iv + 0); in chacha_init_generic() 78 state[13] = get_unaligned_le32(iv + 4); in chacha_init_generic() 79 state[14] = get_unaligned_le32(iv + 8); in chacha_init_generic() 80 state[15] = get_unaligned_le32(iv + 12); in chacha_init_generic() 83 static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv) in chacha_init() argument 86 chacha_init_arch(state, key, iv); in chacha_init() 88 chacha_init_generic(state, key, iv); in chacha_init()
|
/linux-6.3-rc2/drivers/crypto/ccp/ |
A D | ccp-crypto-aes.c | 33 memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_complete() 83 if (!req->iv) in ccp_aes_crypt() 86 memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE); in ccp_aes_crypt() 101 rctx->cmd.u.aes.iv = iv_sg; in ccp_aes_crypt() 139 req->iv = rctx->rfc3686_info; in ccp_aes_rfc3686_complete() 163 u8 *iv; in ccp_aes_rfc3686_crypt() local 166 iv = rctx->rfc3686_iv; in ccp_aes_rfc3686_crypt() 169 iv += CTR_RFC3686_NONCE_SIZE; in ccp_aes_rfc3686_crypt() 170 memcpy(iv, req->iv, CTR_RFC3686_IV_SIZE); in ccp_aes_rfc3686_crypt() 172 iv += CTR_RFC3686_IV_SIZE; in ccp_aes_rfc3686_crypt() [all …]
|
/linux-6.3-rc2/net/mac80211/ |
A D | wep.c | 38 if ((iv & 0xff00) == 0xff00) { in ieee80211_wep_weak_iv() 39 u8 B = (iv >> 16) & 0xff; in ieee80211_wep_weak_iv() 48 int keylen, int keyidx, u8 *iv) in ieee80211_wep_get_iv() argument 54 if (!iv) in ieee80211_wep_get_iv() 57 *iv++ = (local->wep_iv >> 16) & 0xff; in ieee80211_wep_get_iv() 58 *iv++ = (local->wep_iv >> 8) & 0xff; in ieee80211_wep_get_iv() 59 *iv++ = local->wep_iv & 0xff; in ieee80211_wep_get_iv() 60 *iv++ = keyidx << 6; in ieee80211_wep_get_iv() 135 u8 *iv; in ieee80211_wep_encrypt() local 143 if (!iv) in ieee80211_wep_encrypt() [all …]
|
/linux-6.3-rc2/tools/testing/crypto/chacha20-s390/ |
A D | test-cipher.c | 54 u8 iv[16], key[32]; in test_lib_chacha() local 58 memset(iv, 'I', sizeof(iv)); in test_lib_chacha() 65 16, 1, iv, 16, 1); in test_lib_chacha() 69 chacha_init_arch(chacha_state, (u32*)key, iv); in test_lib_chacha() 84 chacha_init_arch(chacha_state, (u32 *)key, iv); in test_lib_chacha() 131 u8 iv[16], key[32]; in test_skcipher() local 153 memset(iv, 'I', sizeof(iv)); in test_skcipher() 166 16, 1, iv, 16, 1); in test_skcipher() 176 data_size, iv); in test_skcipher() 195 memset(iv, 'I', sizeof(iv)); in test_skcipher() [all …]
|
/linux-6.3-rc2/drivers/crypto/amcc/ |
A D | crypto4xx_alg.c | 75 __le32 iv[AES_IV_SIZE]; in crypto4xx_crypt() local 81 crypto4xx_memcpy_to_le32(iv, req->iv, ivlen); in crypto4xx_crypt() 345 if (is_ccm && !(req->iv[0] == 1 || req->iv[0] == 3)) in crypto4xx_aead_need_fallback() 360 req->iv); in crypto4xx_aead_fallback() 446 __le32 iv[16]; in crypto4xx_crypt_aes_ccm() local 460 if (req->iv[0] == 1) { in crypto4xx_crypt_aes_ccm() 466 crypto4xx_memcpy_to_le32(iv, req->iv, 16 - (req->iv[0] + 1)); in crypto4xx_crypt_aes_ccm() 469 len, iv, sizeof(iv), in crypto4xx_crypt_aes_ccm() 592 __le32 iv[4]; in crypto4xx_crypt_aes_gcm() local 601 crypto4xx_memcpy_to_le32(iv, req->iv, GCM_AES_IV_SIZE); in crypto4xx_crypt_aes_gcm() [all …]
|
/linux-6.3-rc2/include/uapi/linux/ |
A D | tls.h | 127 unsigned char iv[TLS_CIPHER_AES_GCM_128_IV_SIZE]; member 135 unsigned char iv[TLS_CIPHER_AES_GCM_256_IV_SIZE]; member 143 unsigned char iv[TLS_CIPHER_AES_CCM_128_IV_SIZE]; member 151 unsigned char iv[TLS_CIPHER_CHACHA20_POLY1305_IV_SIZE]; member 159 unsigned char iv[TLS_CIPHER_SM4_GCM_IV_SIZE]; member 167 unsigned char iv[TLS_CIPHER_SM4_CCM_IV_SIZE]; member 175 unsigned char iv[TLS_CIPHER_ARIA_GCM_128_IV_SIZE]; member 183 unsigned char iv[TLS_CIPHER_ARIA_GCM_256_IV_SIZE]; member
|
/linux-6.3-rc2/net/qrtr/ |
A D | ns.c | 113 struct kvec iv; in service_announce_new() local 118 iv.iov_base = &pkt; in service_announce_new() 139 struct kvec iv; in service_announce_del() local 145 iv.iov_base = &pkt; in service_announce_del() 170 struct kvec iv; in lookup_notify() local 173 iv.iov_base = &pkt; in lookup_notify() 313 struct kvec iv; in say_hello() local 316 iv.iov_base = &pkt; in say_hello() 354 struct kvec iv; in ctrl_cmd_bye() local 436 struct kvec iv; in ctrl_cmd_del_client() local [all …]
|