/linux-6.3-rc2/drivers/crypto/nx/ |
A D | nx-aes-xcbc.c | 81 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys, &len, in nx_xcbc_empty() 93 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_xcbc_empty() 108 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys[1], &len, in nx_xcbc_empty() 121 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_xcbc_empty() 195 in_sg = nx_ctx->in_sg; in nx_xcbc_update() 230 in_sg = nx_build_sg_list(nx_ctx->in_sg, in nx_xcbc_update() 241 in_sg = nx_build_sg_list(in_sg, in nx_xcbc_update() 251 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * in nx_xcbc_update() 280 in_sg = nx_ctx->in_sg; in nx_xcbc_update() 324 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *)sctx->buffer, in nx_xcbc_final() [all …]
|
A D | nx-sha256.c | 109 struct nx_sg *in_sg = nx_ctx->in_sg; in nx_sha256_update() local 113 in_sg = nx_build_sg_list(in_sg, in nx_sha256_update() 122 used_sgs = in_sg - nx_ctx->in_sg; in nx_sha256_update() 137 in_sg = nx_build_sg_list(in_sg, (u8 *) data, in nx_sha256_update() 140 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha256_update() 186 struct nx_sg *in_sg, *out_sg; in nx_sha256_final() local 215 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) sctx->buf, in nx_sha256_final() 231 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha256_final()
|
A D | nx-sha512.c | 109 struct nx_sg *in_sg = nx_ctx->in_sg; in nx_sha512_update() local 113 in_sg = nx_build_sg_list(in_sg, in nx_sha512_update() 121 used_sgs = in_sg - nx_ctx->in_sg; in nx_sha512_update() 136 in_sg = nx_build_sg_list(in_sg, (u8 *) data, in nx_sha512_update() 139 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha512_update() 189 struct nx_sg *in_sg, *out_sg; in nx_sha512_final() local 224 in_sg = nx_build_sg_list(nx_ctx->in_sg, sctx->buf, &len, in nx_sha512_final() 236 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha512_final()
|
A D | nx-aes-gcm.c | 107 struct nx_sg *nx_sg = nx_ctx->in_sg; in nx_gca() 137 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len, in nx_gca() 145 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_sg) in nx_gca() 204 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len, in gmac() 212 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_sg) in gmac() 249 struct nx_sg *in_sg, *out_sg; in gcm_empty() local 269 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) iv, in gcm_empty() 282 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in gcm_empty()
|
A D | nx.c | 269 struct nx_sg *nx_insg = nx_ctx->in_sg; in nx_build_sg_lists() 294 nx_ctx->op.inlen = trim_sg_list(nx_ctx->in_sg, nx_insg, delta, nbytes); in nx_build_sg_lists() 314 nx_ctx->op.in = __pa(nx_ctx->in_sg); in nx_ctx_init() 322 nx_ctx->op_aead.in = __pa(nx_ctx->in_sg); in nx_ctx_init() 677 nx_ctx->in_sg = (struct nx_sg *)((u8 *)nx_ctx->csbcpb + NX_PAGE_SIZE); in nx_crypto_ctx_init() 678 nx_ctx->out_sg = (struct nx_sg *)((u8 *)nx_ctx->in_sg + NX_PAGE_SIZE); in nx_crypto_ctx_init() 753 nx_ctx->in_sg = NULL; in nx_crypto_ctx_exit()
|
A D | nx-aes-ccm.c | 161 struct nx_sg *nx_insg = nx_ctx->in_sg; in generate_pat() 249 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_insg) * in generate_pat() 282 nx_insg = nx_walk_and_build(nx_ctx->in_sg, in generate_pat() 296 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_insg) * in generate_pat()
|
A D | nx.h | 126 struct nx_sg *in_sg; /* aligned pointer into kmem to an sg list */ member
|
/linux-6.3-rc2/lib/ |
A D | sg_split.c | 80 struct scatterlist *in_sg, *out_sg; in sg_split_phys() local 84 in_sg = split->in_sg0; in sg_split_phys() 87 *out_sg = *in_sg; in sg_split_phys() 96 in_sg = sg_next(in_sg); in sg_split_phys() 106 struct scatterlist *in_sg, *out_sg; in sg_split_mapped() local 110 in_sg = split->in_sg0; in sg_split_mapped() 113 sg_dma_address(out_sg) = sg_dma_address(in_sg); in sg_split_mapped() 114 sg_dma_len(out_sg) = sg_dma_len(in_sg); in sg_split_mapped() 119 in_sg = sg_next(in_sg); in sg_split_mapped()
|
/linux-6.3-rc2/drivers/crypto/ |
A D | omap-des.c | 152 struct scatterlist *in_sg; member 387 scatterwalk_start(&dd->in_walk, dd->in_sg); in omap_des_crypt_dma() 543 dd->in_sg = req->src; in omap_des_prepare_req() 845 BUG_ON(!dd->in_sg); in omap_des_irq() 847 BUG_ON(_calc_walked(in) > dd->in_sg->length); in omap_des_irq() 849 src = sg_virt(dd->in_sg) + _calc_walked(in); in omap_des_irq() 855 if (dd->in_sg->length == _calc_walked(in)) { in omap_des_irq() 856 dd->in_sg = sg_next(dd->in_sg); in omap_des_irq() 857 if (dd->in_sg) { in omap_des_irq() 859 dd->in_sg); in omap_des_irq() [all …]
|
A D | omap-aes.c | 268 struct scatterlist *in_sg, in omap_aes_crypt_dma() argument 277 scatterwalk_start(&dd->in_walk, dd->in_sg); in omap_aes_crypt_dma() 447 dd->in_sg = req->src; in omap_aes_prepare_req() 872 BUG_ON(!dd->in_sg); in omap_aes_irq() 874 BUG_ON(_calc_walked(in) > dd->in_sg->length); in omap_aes_irq() 876 src = sg_virt(dd->in_sg) + _calc_walked(in); in omap_aes_irq() 882 if (dd->in_sg->length == _calc_walked(in)) { in omap_aes_irq() 883 dd->in_sg = sg_next(dd->in_sg); in omap_aes_irq() 884 if (dd->in_sg) { in omap_aes_irq() 886 dd->in_sg); in omap_aes_irq() [all …]
|
A D | sahara.c | 185 struct scatterlist *in_sg; member 223 struct scatterlist *in_sg; member 503 sg = dev->in_sg; in sahara_hw_descriptor_create() 567 dev->in_sg = req->src; in sahara_aes_process() 790 dev->in_sg = rctx->in_sg; in sahara_sha_hw_links_create() 803 sg = dev->in_sg; in sahara_sha_hw_links_create() 961 rctx->in_sg = rctx->in_sg_chain; in sahara_sha_prepare_request() 967 rctx->in_sg = req->src; in sahara_sha_prepare_request() 969 rctx->in_sg = rctx->in_sg_chain; in sahara_sha_prepare_request() 975 rctx->in_sg = req->src; in sahara_sha_prepare_request() [all …]
|
A D | atmel-tdes.c | 115 struct scatterlist *in_sg; member 305 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_pdc_stop() 497 IS_ALIGNED(dd->in_sg->length, dd->ctx->block_size); in atmel_tdes_crypt_start() 502 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) in atmel_tdes_crypt_start() 521 dma_unmap_sg(dd->dev, dd->in_sg, 1, in atmel_tdes_crypt_start() 526 addr_in = sg_dma_address(dd->in_sg); in atmel_tdes_crypt_start() 550 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_start() 630 dd->in_sg = req->src; in atmel_tdes_handle_queue() 661 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_dma_stop() 1063 dd->in_sg = sg_next(dd->in_sg); in atmel_tdes_done_task() [all …]
|
A D | omap-aes-gcm.c | 33 dd->in_sg = NULL; in omap_aes_gcm_finish_req() 56 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); in omap_aes_gcm_done_task() 145 dd->in_sg = dd->in_sgl; in omap_aes_gcm_copy_buffers() 168 dd->in_sg_len = sg_nents_for_len(dd->in_sg, alen + clen); in omap_aes_gcm_copy_buffers()
|
A D | omap-aes.h | 183 struct scatterlist *in_sg; member
|
/linux-6.3-rc2/drivers/crypto/gemini/ |
A D | sl3516-ce-cipher.c | 26 struct scatterlist *in_sg; in sl3516_ce_need_fallback() local 83 in_sg = areq->src; in sl3516_ce_need_fallback() 85 while (in_sg && out_sg) { in sl3516_ce_need_fallback() 86 if (in_sg->length != out_sg->length) { in sl3516_ce_need_fallback() 90 in_sg = sg_next(in_sg); in sl3516_ce_need_fallback() 93 if (in_sg || out_sg) in sl3516_ce_need_fallback()
|
/linux-6.3-rc2/drivers/crypto/allwinner/sun8i-ss/ |
A D | sun8i-ss-cipher.c | 28 struct scatterlist *in_sg = areq->src; in sun8i_ss_need_fallback() local 76 in_sg = areq->src; in sun8i_ss_need_fallback() 78 while (in_sg && out_sg) { in sun8i_ss_need_fallback() 79 if (in_sg->length != out_sg->length) in sun8i_ss_need_fallback() 81 in_sg = sg_next(in_sg); in sun8i_ss_need_fallback() 84 if (in_sg || out_sg) in sun8i_ss_need_fallback()
|
/linux-6.3-rc2/drivers/crypto/allwinner/sun4i-ss/ |
A D | sun4i-ss-hash.c | 198 struct scatterlist *in_sg = areq->src; in sun4i_hash() local 263 while (in_sg && i == 1) { in sun4i_hash() 264 if (in_sg->length % 4) in sun4i_hash() 266 in_sg = sg_next(in_sg); in sun4i_hash()
|
A D | sun4i-ss-cipher.c | 182 struct scatterlist *in_sg = areq->src; in sun4i_ss_cipher_poll() local 225 while (in_sg && no_chunk == 1) { in sun4i_ss_cipher_poll() 226 if ((in_sg->length | in_sg->offset) & 3u) in sun4i_ss_cipher_poll() 228 in_sg = sg_next(in_sg); in sun4i_ss_cipher_poll()
|
/linux-6.3-rc2/crypto/ |
A D | rsa-pkcs1pad.c | 102 struct scatterlist in_sg[2], out_sg[1]; member 256 pkcs1pad_sg_set_buf(req_ctx->in_sg, req_ctx->in_buf, in pkcs1pad_encrypt() 264 akcipher_request_set_crypt(&req_ctx->child_req, req_ctx->in_sg, in pkcs1pad_encrypt() 413 pkcs1pad_sg_set_buf(req_ctx->in_sg, req_ctx->in_buf, in pkcs1pad_sign() 421 akcipher_request_set_crypt(&req_ctx->child_req, req_ctx->in_sg, in pkcs1pad_sign()
|
/linux-6.3-rc2/crypto/asymmetric_keys/ |
A D | public_key.c | 238 struct scatterlist in_sg, out_sg; in software_key_eds_op() local 277 sg_init_one(&in_sg, in, params->in_len); in software_key_eds_op() 279 akcipher_request_set_crypt(req, &in_sg, &out_sg, params->in_len, in software_key_eds_op()
|
/linux-6.3-rc2/arch/um/drivers/ |
A D | virt-pci.c | 77 struct scatterlist out_sg, extra_sg, in_sg; in um_pci_send_cmd() local 80 [1] = extra ? &extra_sg : &in_sg, in um_pci_send_cmd() 81 [2] = extra ? &in_sg : NULL, in um_pci_send_cmd() 132 sg_init_one(&in_sg, out, out_size); in um_pci_send_cmd()
|
/linux-6.3-rc2/net/vmw_vsock/ |
A D | virtio_transport.c | 104 int ret, in_sg = 0, out_sg = 0; in virtio_transport_send_pkt_work() local 122 ret = virtqueue_add_sgs(vq, sgs, out_sg, in_sg, skb, GFP_KERNEL); in virtio_transport_send_pkt_work()
|
/linux-6.3-rc2/drivers/crypto/aspeed/ |
A D | aspeed-hace-crypto.c | 174 struct scatterlist *in_sg; in aspeed_sk_start() local 179 in_sg = req->src; in aspeed_sk_start() 181 nbytes = sg_copy_to_buffer(in_sg, rctx->src_nents, in aspeed_sk_start()
|
/linux-6.3-rc2/drivers/crypto/stm32/ |
A D | stm32-cryp.c | 1172 struct scatterlist *in_sg; in stm32_cryp_prepare_req() local 1233 in_sg = req ? req->src : areq->src; in stm32_cryp_prepare_req() 1234 scatterwalk_start(&cryp->in_walk, in_sg); in stm32_cryp_prepare_req()
|