| /linux/drivers/misc/genwqe/ |
| A D | card_utils.c | 304 sgl->fpage_offs, sgl->fpage_size, sgl->lpage_size); in genwqe_alloc_sync_sgl() 317 sgl->sgl = __genwqe_alloc_consistent(cd, sgl->sgl_size, in genwqe_alloc_sync_sgl() 319 if (sgl->sgl == NULL) { in genwqe_alloc_sync_sgl() 347 sgl->lpage_size, sgl->lpage_size)) { in genwqe_alloc_sync_sgl() 365 __genwqe_free_consistent(cd, sgl->sgl_size, sgl->sgl, in genwqe_alloc_sync_sgl() 367 sgl->sgl = NULL; in genwqe_alloc_sync_sgl() 386 s = &sgl->sgl[0]; /* first set of 8 entries */ in genwqe_setup_sgl() 483 sgl->fpage + sgl->fpage_offs, sgl->fpage_size); in genwqe_free_sync_sgl() 498 offset = sgl->user_size - sgl->lpage_size; in genwqe_free_sync_sgl() 513 __genwqe_free_consistent(cd, sgl->sgl_size, sgl->sgl, in genwqe_free_sync_sgl() [all …]
|
| /linux/crypto/ |
| A D | af_alg.c | 548 if (sgl->sgt.sgl) { in af_alg_free_sg() 551 unpin_user_page(sg_page(&sgl->sgt.sgl[i])); in af_alg_free_sg() 552 if (sgl->sgt.sgl != sgl->sgl) in af_alg_free_sg() 553 kvfree(sgl->sgt.sgl); in af_alg_free_sg() 554 sgl->sgt.sgl = NULL; in af_alg_free_sg() 1002 sg = sgl->sg + sgl->cur - 1; in af_alg_sendmsg() 1055 for (; sgl->cur < sgtable.nents; sgl->cur++) in af_alg_sendmsg() 1211 areq->first_rsgl.sgl.sgt.sgl = areq->first_rsgl.sgl.sgl; in af_alg_alloc_areq() 1263 rsgl->sgl.sgt.sgl = rsgl->sgl.sgl; in af_alg_get_rsgl() 1268 sg_init_table(rsgl->sgl.sgt.sgl, ALG_MAX_PAGES); in af_alg_get_rsgl() [all …]
|
| A D | algif_hash.c | 20 struct af_alg_sgl sgl; member 106 ctx->sgl.sgt.sgl = ctx->sgl.sgl; in hash_sendmsg() 107 ctx->sgl.sgt.nents = 0; in hash_sendmsg() 108 ctx->sgl.sgt.orig_nents = 0; in hash_sendmsg() 115 sg_init_table(ctx->sgl.sgl, npages); in hash_sendmsg() 117 ctx->sgl.need_unpin = iov_iter_extract_will_pin(&msg->msg_iter); in hash_sendmsg() 120 &ctx->sgl.sgt, npages, 0); in hash_sendmsg() 124 sg_mark_end(ctx->sgl.sgt.sgl + ctx->sgl.sgt.nents - 1); in hash_sendmsg() 132 ahash_request_set_crypt(&ctx->req, ctx->sgl.sgt.sgl, in hash_sendmsg() 160 af_alg_free_sg(&ctx->sgl); in hash_sendmsg() [all …]
|
| /linux/drivers/gpu/drm/xe/ |
| A D | xe_res_cursor.h | 46 struct scatterlist *sgl; member 72 cur->sgl = NULL; in xe_res_first() 130 struct scatterlist *sgl = cur->sgl; in __xe_res_sg_next() local 133 while (start >= sg_dma_len(sgl)) { in __xe_res_sg_next() 134 start -= sg_dma_len(sgl); in __xe_res_sg_next() 135 sgl = sg_next(sgl); in __xe_res_sg_next() 136 XE_WARN_ON(!sgl); in __xe_res_sg_next() 141 cur->sgl = sgl; in __xe_res_sg_next() 163 cur->sgl = sg->sgl; in xe_res_first_sg() 194 if (cur->sgl) { in xe_res_next() [all …]
|
| /linux/mm/ |
| A D | page_reporting.c | 108 struct scatterlist *sg = sgl; in page_reporting_drain() 137 sg_init_table(sgl, nents); in page_reporting_drain() 208 sg_set_page(&sgl[*offset], page, page_len, 0); in page_reporting_cycle() 286 sgl, &offset); in page_reporting_process_zone() 295 sgl = &sgl[offset]; in page_reporting_process_zone() 296 err = prdev->report(prdev, sgl, leftover); in page_reporting_process_zone() 313 struct scatterlist *sgl; in page_reporting_process() local 325 sgl = kmalloc_array(PAGE_REPORTING_CAPACITY, sizeof(*sgl), GFP_KERNEL); in page_reporting_process() 326 if (!sgl) in page_reporting_process() 329 sg_init_table(sgl, PAGE_REPORTING_CAPACITY); in page_reporting_process() [all …]
|
| /linux/drivers/crypto/hisilicon/ |
| A D | sgl.c | 37 struct hisi_acc_hw_sgl *sgl; member 95 if (!block[i].sgl) { in hisi_acc_create_sgl_pool() 107 if (!block[i].sgl) { in hisi_acc_create_sgl_pool() 175 hw_sge->buf = sg_dma_address(sgl); in sg_map_to_hw_sg() 176 hw_sge->len = cpu_to_le32(sg_dma_len(sgl)); in sg_map_to_hw_sg() 177 hw_sge->page_ctrl = sg_virt(sgl); in sg_map_to_hw_sg() 219 struct scatterlist *sgl, in hisi_acc_sg_buf_map_to_hw_sgl() argument 233 sg_n = sg_nents(sgl); in hisi_acc_sg_buf_map_to_hw_sgl() 256 for_each_sg(sgl, sg, sg_n_mapped, i) { in hisi_acc_sg_buf_map_to_hw_sgl() 285 if (!dev || !sgl || !hw_sgl) in hisi_acc_sg_buf_unmap() [all …]
|
| /linux/lib/ |
| A D | scatterlist.c | 129 memset(sgl, 0, sizeof(*sgl) * nents); in sg_init_table() 130 sg_init_marker(sgl, nents); in sg_init_table() 206 sgl = table->sgl; in __sg_free_table() 231 sgl = next; in __sg_free_table() 235 table->sgl = NULL; in __sg_free_table() 347 table->sgl = sg; in __sg_alloc_table() 634 if (!sgl) in sgl_alloc_order() 638 sg = sgl; in sgl_alloc_order() 654 return sgl; in sgl_alloc_order() 699 kfree(sgl); in sgl_free_n_order() [all …]
|
| A D | kfifo.c | 318 sg_set_buf(sgl, buf, len); in setup_sgl_buf() 321 sg_dma_address(sgl) = dma + data_offset; in setup_sgl_buf() 322 sg_dma_len(sgl) = len; in setup_sgl_buf() 344 n = setup_sgl_buf(fifo, sgl, off, nents, len_to_end, dma); in setup_sgl() 351 struct scatterlist *sgl, int nents, unsigned int len, in __kfifo_dma_in_prepare() argument 360 return setup_sgl(fifo, sgl, nents, len, fifo->in, dma); in __kfifo_dma_in_prepare() 365 struct scatterlist *sgl, int nents, unsigned int len, in __kfifo_dma_out_prepare() argument 374 return setup_sgl(fifo, sgl, nents, len, fifo->out, dma); in __kfifo_dma_out_prepare() 558 struct scatterlist *sgl, int nents, unsigned int len, size_t recsize, in __kfifo_dma_in_prepare_r() argument 568 return setup_sgl(fifo, sgl, nents, len, fifo->in + recsize, dma); in __kfifo_dma_in_prepare_r() [all …]
|
| /linux/include/linux/ |
| A D | scatterlist.h | 40 struct scatterlist *sgl; /* the list */ member 202 for_each_sg((sgt)->sgl, sg, (sgt)->orig_nents, i) 210 for_each_sg((sgt)->sgl, sg, (sgt)->nents, i) 213 struct scatterlist *sgl) in __sg_chain() argument 225 chain_sg->page_link = ((unsigned long) sgl | SG_CHAIN) & ~SG_END; in __sg_chain() 239 struct scatterlist *sgl) in sg_chain() argument 241 __sg_chain(&prv[prv_nents - 1], sgl); in sg_chain() 413 static inline void sg_init_marker(struct scatterlist *sgl, in sg_init_marker() argument 416 sg_mark_end(&sgl[nents - 1]); in sg_init_marker() 488 void sgl_free_order(struct scatterlist *sgl, int order); [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/core/ |
| A D | firmware.c | 121 case NVKM_FIRMWARE_IMG_DMA: return &fw->mem.sgl; in nvkm_firmware_mem_sgl() 122 case NVKM_FIRMWARE_IMG_SGT: return fw->mem.sgt.sgl; in nvkm_firmware_mem_sgl() 139 .sgl = nvkm_firmware_mem_sgl(memory), in nvkm_firmware_mem_map() 142 if (!map.sgl) in nvkm_firmware_mem_map() 153 return sgl ? sg_dma_len(sgl) : 0; in nvkm_firmware_mem_size() 251 sg_init_one(&fw->mem.sgl, fw->img, len); in nvkm_firmware_ctor() 252 sg_dma_address(&fw->mem.sgl) = fw->phys; in nvkm_firmware_ctor() 253 sg_dma_len(&fw->mem.sgl) = len; in nvkm_firmware_ctor() 268 struct scatterlist *sgl; in nvkm_firmware_ctor() local 272 for_each_sgtable_sg(&fw->mem.sgt, sgl, i) { in nvkm_firmware_ctor() [all …]
|
| /linux/drivers/target/iscsi/cxgbit/ |
| A D | cxgbit_ddp.c | 133 struct scatterlist *sg = ttinfo->sgl; in cxgbit_ddp_set_map() 176 struct scatterlist *sgl = ttinfo->sgl; in cxgbit_ddp_reserve() local 178 unsigned int sg_offset = sgl->offset; in cxgbit_ddp_reserve() 188 if (cxgbit_ddp_sgl_check(sgl, sgcnt) < 0) in cxgbit_ddp_reserve() 191 ttinfo->nr_pages = (xferlen + sgl->offset + in cxgbit_ddp_reserve() 203 sgl->offset = 0; in cxgbit_ddp_reserve() 205 sgl->offset = sg_offset; in cxgbit_ddp_reserve() 245 ttinfo->sgl = cmd->se_cmd.t_data_sg; in cxgbit_get_r2t_ttt() 253 ttinfo->sgl = NULL; in cxgbit_get_r2t_ttt() 284 if (unlikely(ttinfo->sgl)) { in cxgbit_unmap_cmd() [all …]
|
| /linux/drivers/crypto/intel/qat/qat_common/ |
| A D | qat_bl.c | 51 struct scatterlist *sgl, in __qat_bl_sgl_to_bufl() argument 62 int n = sg_nents(sgl); in __qat_bl_sgl_to_bufl() 97 for_each_sg(sgl, sg, n, i) { in __qat_bl_sgl_to_bufl() 127 if (sgl != sglout) { in __qat_bl_sgl_to_bufl() 217 n = sg_nents(sgl); in __qat_bl_sgl_to_bufl() 269 struct scatterlist *sgl, in qat_bl_sgl_map() argument 279 n = sg_nents(sgl); in qat_bl_sgl_map() 333 if (sgl) in qat_bl_sgl_free_unmap() 334 sgl_free(sgl); in qat_bl_sgl_free_unmap() 356 *sgl = dst; in qat_bl_sgl_alloc_map() [all …]
|
| /linux/tools/virtio/linux/ |
| A D | scatterlist.h | 92 struct scatterlist *sgl) in sg_chain() argument 104 prv[prv_nents - 1].page_link = ((unsigned long) sgl | 0x01) & ~0x02; in sg_chain() 150 static inline void sg_init_table(struct scatterlist *sgl, unsigned int nents) in sg_init_table() argument 152 memset(sgl, 0, sizeof(*sgl) * nents); in sg_init_table() 153 sg_mark_end(&sgl[nents - 1]); in sg_init_table()
|
| /linux/kernel/dma/ |
| A D | direct.h | 21 int dma_direct_map_sg(struct device *dev, struct scatterlist *sgl, int nents, 28 void dma_direct_sync_sg_for_device(struct device *dev, struct scatterlist *sgl, 32 struct scatterlist *sgl, int nents, enum dma_data_direction dir) in dma_direct_sync_sg_for_device() argument 40 void dma_direct_unmap_sg(struct device *dev, struct scatterlist *sgl, 43 struct scatterlist *sgl, int nents, enum dma_data_direction dir); 46 struct scatterlist *sgl, int nents, enum dma_data_direction dir, in dma_direct_unmap_sg() argument 51 struct scatterlist *sgl, int nents, enum dma_data_direction dir) in dma_direct_sync_sg_for_cpu() argument
|
| /linux/drivers/spi/ |
| A D | spi-bcm2835.c | 497 if (bs->tx_buf && !sg_is_last(&tfr->tx_sg.sgl[0])) in bcm2835_spi_transfer_prologue() 509 !(sg_dma_len(&tfr->tx_sg.sgl[0]) & ~3); in bcm2835_spi_transfer_prologue() 531 sg_dma_address(&tfr->rx_sg.sgl[0]), in bcm2835_spi_transfer_prologue() 560 sg_dma_len(&tfr->tx_sg.sgl[0]) = 0; in bcm2835_spi_transfer_prologue() 561 sg_dma_address(&tfr->tx_sg.sgl[1]) += 4; in bcm2835_spi_transfer_prologue() 562 sg_dma_len(&tfr->tx_sg.sgl[1]) -= 4; in bcm2835_spi_transfer_prologue() 594 sg_dma_address(&tfr->tx_sg.sgl[1]) -= 4; in bcm2835_spi_undo_prologue() 595 sg_dma_len(&tfr->tx_sg.sgl[1]) += 4; in bcm2835_spi_undo_prologue() 678 struct scatterlist *sgl; in bcm2835_spi_prepare_sg() local 690 sgl = tfr->tx_sg.sgl; in bcm2835_spi_prepare_sg() [all …]
|
| /linux/drivers/scsi/lpfc/ |
| A D | lpfc_scsi.c | 106 sgl->word2 = le32_to_cpu(sgl->word2); in lpfc_sli4_set_rsp_sgl_last() 108 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_sli4_set_rsp_sgl_last() 2033 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_bg_setup_sgl() 2044 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_bg_setup_sgl() 2250 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_bg_setup_sgl_prot() 3079 sgl->word2 = le32_to_cpu(sgl->word2); in lpfc_scsi_prep_dma_buf_s4() 3081 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_scsi_prep_dma_buf_s4() 3210 sgl->word2 = le32_to_cpu(sgl->word2); in lpfc_scsi_prep_dma_buf_s4() 3212 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_scsi_prep_dma_buf_s4() 3324 sgl->word2 = le32_to_cpu(sgl->word2); in lpfc_bg_scsi_prep_dma_buf_s4() [all …]
|
| A D | lpfc_nvme.c | 852 sgl->addr_hi = 0; in lpfc_nvme_adj_fcp_sgls() 853 sgl->addr_lo = 0; in lpfc_nvme_adj_fcp_sgls() 906 sgl++; in lpfc_nvme_adj_fcp_sgls() 911 sgl->word2 = le32_to_cpu(sgl->word2); in lpfc_nvme_adj_fcp_sgls() 916 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_nvme_adj_fcp_sgls() 1366 sgl += 2; in lpfc_nvme_prep_io_dma() 1401 sgl->word2 = 0; in lpfc_nvme_prep_io_dma() 1448 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_nvme_prep_io_dma() 1454 sgl++; in lpfc_nvme_prep_io_dma() 1458 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_nvme_prep_io_dma() [all …]
|
| /linux/fs/bcachefs/ |
| A D | checksum.c | 130 darray_init(&sgl); in do_encrypt() 140 if (darray_push(&sgl, sg)) { in do_encrypt() 148 sgl.nr = 0; in do_encrypt() 149 BUG_ON(darray_push(&sgl, sg)); in do_encrypt() 157 sg_mark_end(&darray_last(sgl)); in do_encrypt() 160 darray_exit(&sgl); in do_encrypt() 343 darray_init(&sgl); in __bch2_encrypt_bio() 352 if (darray_push(&sgl, sg)) { in __bch2_encrypt_bio() 360 sgl.nr = 0; in __bch2_encrypt_bio() 368 sg_mark_end(&darray_last(sgl)); in __bch2_encrypt_bio() [all …]
|
| /linux/drivers/vfio/pci/pds/ |
| A D | cmds.c | 206 struct pds_lm_sg_elem *sgl, *sge; in pds_vfio_dma_map_lm_file() local 225 sgl = kzalloc(sgl_size, GFP_KERNEL); in pds_vfio_dma_map_lm_file() 226 if (!sgl) { in pds_vfio_dma_map_lm_file() 232 sge = sgl; in pds_vfio_dma_map_lm_file() 240 sgl_addr = dma_map_single(dev, sgl, sgl_size, DMA_TO_DEVICE); in pds_vfio_dma_map_lm_file() 246 lm_file->sgl = sgl; in pds_vfio_dma_map_lm_file() 252 kfree(sgl); in pds_vfio_dma_map_lm_file() 267 if (lm_file->sgl) { in pds_vfio_dma_unmap_lm_file() 269 lm_file->num_sge * sizeof(*lm_file->sgl), in pds_vfio_dma_unmap_lm_file() 271 kfree(lm_file->sgl); in pds_vfio_dma_unmap_lm_file() [all …]
|
| /linux/drivers/media/platform/ |
| A D | m2m-deinterlace.c | 236 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma() 237 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma() 243 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma() 244 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma() 251 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma() 258 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma() 265 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma() 272 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma() 278 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma() 279 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma() [all …]
|
| /linux/drivers/iio/buffer/ |
| A D | industrialio-buffer-dmaengine.c | 68 struct scatterlist *sgl; in iio_dmaengine_buffer_submit_block() local 85 sgl = block->sg_table->sgl; in iio_dmaengine_buffer_submit_block() 86 nents = sg_nents_for_len(sgl, block->bytes_used); in iio_dmaengine_buffer_submit_block() 97 vecs[i].addr = sg_dma_address(sgl); in iio_dmaengine_buffer_submit_block() 98 vecs[i].len = min(sg_dma_len(sgl), len_total); in iio_dmaengine_buffer_submit_block() 101 sgl = sg_next(sgl); in iio_dmaengine_buffer_submit_block()
|
| /linux/drivers/xen/ |
| A D | swiotlb-xen.c | 326 xen_swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, in xen_swiotlb_unmap_sg() argument 334 for_each_sg(sgl, sg, nelems, i) in xen_swiotlb_unmap_sg() 341 xen_swiotlb_map_sg(struct device *dev, struct scatterlist *sgl, int nelems, in xen_swiotlb_map_sg() argument 349 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_map_sg() 359 xen_swiotlb_unmap_sg(dev, sgl, i, dir, attrs | DMA_ATTR_SKIP_CPU_SYNC); in xen_swiotlb_map_sg() 360 sg_dma_len(sgl) = 0; in xen_swiotlb_map_sg() 365 xen_swiotlb_sync_sg_for_cpu(struct device *dev, struct scatterlist *sgl, in xen_swiotlb_sync_sg_for_cpu() argument 371 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_sync_sg_for_cpu() 378 xen_swiotlb_sync_sg_for_device(struct device *dev, struct scatterlist *sgl, in xen_swiotlb_sync_sg_for_device() argument 384 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_sync_sg_for_device()
|
| /linux/drivers/dma/ |
| A D | loongson1-apb-dma.c | 217 struct scatterlist *sgl, unsigned int sg_len, in ls1x_dma_prep_lli() argument 244 for_each_sg(sgl, sg, sg_len, i) { in ls1x_dma_prep_lli() 323 struct scatterlist *sgl; in ls1x_dma_prep_dma_cyclic() local 338 sgl = kmalloc_array(sg_len, sizeof(*sgl), GFP_NOWAIT); in ls1x_dma_prep_dma_cyclic() 339 if (!sgl) in ls1x_dma_prep_dma_cyclic() 342 sg_init_table(sgl, sg_len); in ls1x_dma_prep_dma_cyclic() 344 sg_set_page(&sgl[i], pfn_to_page(PFN_DOWN(buf_addr)), in ls1x_dma_prep_dma_cyclic() 346 sg_dma_address(&sgl[i]) = buf_addr; in ls1x_dma_prep_dma_cyclic() 347 sg_dma_len(&sgl[i]) = period_len; in ls1x_dma_prep_dma_cyclic() 351 ret = ls1x_dma_prep_lli(dchan, desc, sgl, sg_len, dir, true); in ls1x_dma_prep_dma_cyclic() [all …]
|
| /linux/drivers/net/ethernet/intel/ixgbe/ |
| A D | ixgbe_fcoe.c | 29 ddp->sgl = NULL; in ixgbe_fcoe_clear_ddp() 110 if (ddp->sgl) in ixgbe_fcoe_ddp_put() 134 struct scatterlist *sgl, unsigned int sgc, in ixgbe_fcoe_ddp_setup() argument 153 if (!netdev || !sgl) in ixgbe_fcoe_ddp_setup() 169 if (ddp->sgl) { in ixgbe_fcoe_ddp_setup() 171 xid, ddp->sgl, ddp->sgc); in ixgbe_fcoe_ddp_setup() 202 ddp->sgl = sgl; in ixgbe_fcoe_ddp_setup() 206 for_each_sg(sgl, sg, dmacount, i) { in ixgbe_fcoe_ddp_setup() 366 struct scatterlist *sgl, unsigned int sgc) in ixgbe_fcoe_ddp_target() argument 442 dma_unmap_sg(&adapter->pdev->dev, ddp->sgl, in ixgbe_fcoe_ddp() [all …]
|
| /linux/include/trace/events/ |
| A D | dma.h | 179 TP_PROTO(struct device *dev, struct scatterlist *sgl, int nents, 181 TP_ARGS(dev, sgl, nents, ents, dir, attrs), 197 for_each_sg(sgl, sg, nents, i) 199 for_each_sg(sgl, sg, ents, i) { 225 TP_PROTO(struct device *dev, struct scatterlist *sgl, int nents, 227 TP_ARGS(dev, sgl, nents, dir, attrs), 241 for_each_sg(sgl, sg, nents, i) 293 TP_PROTO(struct device *dev, struct scatterlist *sgl, int nents, 295 TP_ARGS(dev, sgl, nents, dir), 309 for_each_sg(sgl, sg, nents, i) {
|