| /drivers/misc/genwqe/ |
| A D | card_utils.c | 304 sgl->fpage_offs, sgl->fpage_size, sgl->lpage_size); in genwqe_alloc_sync_sgl() 317 sgl->sgl = __genwqe_alloc_consistent(cd, sgl->sgl_size, in genwqe_alloc_sync_sgl() 319 if (sgl->sgl == NULL) { in genwqe_alloc_sync_sgl() 347 sgl->lpage_size, sgl->lpage_size)) { in genwqe_alloc_sync_sgl() 365 __genwqe_free_consistent(cd, sgl->sgl_size, sgl->sgl, in genwqe_alloc_sync_sgl() 367 sgl->sgl = NULL; in genwqe_alloc_sync_sgl() 386 s = &sgl->sgl[0]; /* first set of 8 entries */ in genwqe_setup_sgl() 483 sgl->fpage + sgl->fpage_offs, sgl->fpage_size); in genwqe_free_sync_sgl() 498 offset = sgl->user_size - sgl->lpage_size; in genwqe_free_sync_sgl() 513 __genwqe_free_consistent(cd, sgl->sgl_size, sgl->sgl, in genwqe_free_sync_sgl() [all …]
|
| /drivers/crypto/hisilicon/ |
| A D | sgl.c | 37 struct hisi_acc_hw_sgl *sgl; member 95 if (!block[i].sgl) { in hisi_acc_create_sgl_pool() 107 if (!block[i].sgl) { in hisi_acc_create_sgl_pool() 175 hw_sge->buf = sg_dma_address(sgl); in sg_map_to_hw_sg() 176 hw_sge->len = cpu_to_le32(sg_dma_len(sgl)); in sg_map_to_hw_sg() 177 hw_sge->page_ctrl = sg_virt(sgl); in sg_map_to_hw_sg() 233 sg_n = sg_nents(sgl); in hisi_acc_sg_buf_map_to_hw_sgl() 235 sg_n_mapped = dma_map_sg(dev, sgl, sg_n, dir); in hisi_acc_sg_buf_map_to_hw_sgl() 256 for_each_sg(sgl, sg, sg_n_mapped, i) { in hisi_acc_sg_buf_map_to_hw_sgl() 286 if (!dev || !sgl || !hw_sgl) in hisi_acc_sg_buf_unmap() [all …]
|
| /drivers/gpu/drm/xe/ |
| A D | xe_res_cursor.h | 57 struct scatterlist *sgl; member 94 cur->sgl = NULL; in xe_res_first() 153 struct scatterlist *sgl = cur->sgl; in __xe_res_sg_next() local 157 start -= sg_dma_len(sgl); in __xe_res_sg_next() 158 sgl = sg_next(sgl); in __xe_res_sg_next() 159 XE_WARN_ON(!sgl); in __xe_res_sg_next() 164 cur->sgl = sgl; in __xe_res_sg_next() 217 cur->sgl = sg->sgl; in xe_res_first_sg() 248 cur->sgl = NULL; in xe_res_first_dma() 284 if (cur->sgl) { in xe_res_next() [all …]
|
| /drivers/gpu/drm/nouveau/nvkm/core/ |
| A D | firmware.c | 121 case NVKM_FIRMWARE_IMG_DMA: return &fw->mem.sgl; in nvkm_firmware_mem_sgl() 122 case NVKM_FIRMWARE_IMG_SGT: return fw->mem.sgt.sgl; in nvkm_firmware_mem_sgl() 139 .sgl = nvkm_firmware_mem_sgl(memory), in nvkm_firmware_mem_map() 142 if (!map.sgl) in nvkm_firmware_mem_map() 153 return sgl ? sg_dma_len(sgl) : 0; in nvkm_firmware_mem_size() 251 sg_init_one(&fw->mem.sgl, fw->img, len); in nvkm_firmware_ctor() 252 sg_dma_address(&fw->mem.sgl) = fw->phys; in nvkm_firmware_ctor() 253 sg_dma_len(&fw->mem.sgl) = len; in nvkm_firmware_ctor() 268 struct scatterlist *sgl; in nvkm_firmware_ctor() local 272 for_each_sgtable_sg(&fw->mem.sgt, sgl, i) { in nvkm_firmware_ctor() [all …]
|
| /drivers/target/iscsi/cxgbit/ |
| A D | cxgbit_ddp.c | 133 struct scatterlist *sg = ttinfo->sgl; in cxgbit_ddp_set_map() 176 struct scatterlist *sgl = ttinfo->sgl; in cxgbit_ddp_reserve() local 178 unsigned int sg_offset = sgl->offset; in cxgbit_ddp_reserve() 188 if (cxgbit_ddp_sgl_check(sgl, sgcnt) < 0) in cxgbit_ddp_reserve() 191 ttinfo->nr_pages = (xferlen + sgl->offset + in cxgbit_ddp_reserve() 203 sgl->offset = 0; in cxgbit_ddp_reserve() 205 sgl->offset = sg_offset; in cxgbit_ddp_reserve() 245 ttinfo->sgl = cmd->se_cmd.t_data_sg; in cxgbit_get_r2t_ttt() 253 ttinfo->sgl = NULL; in cxgbit_get_r2t_ttt() 284 if (unlikely(ttinfo->sgl)) { in cxgbit_unmap_cmd() [all …]
|
| /drivers/scsi/lpfc/ |
| A D | lpfc_scsi.c | 106 sgl->word2 = le32_to_cpu(sgl->word2); in lpfc_sli4_set_rsp_sgl_last() 108 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_sli4_set_rsp_sgl_last() 2038 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_bg_setup_sgl() 2049 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_bg_setup_sgl() 2255 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_bg_setup_sgl_prot() 3084 sgl->word2 = le32_to_cpu(sgl->word2); in lpfc_scsi_prep_dma_buf_s4() 3086 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_scsi_prep_dma_buf_s4() 3215 sgl->word2 = le32_to_cpu(sgl->word2); in lpfc_scsi_prep_dma_buf_s4() 3217 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_scsi_prep_dma_buf_s4() 3329 sgl->word2 = le32_to_cpu(sgl->word2); in lpfc_bg_scsi_prep_dma_buf_s4() [all …]
|
| A D | lpfc_nvme.c | 852 sgl->addr_hi = 0; in lpfc_nvme_adj_fcp_sgls() 853 sgl->addr_lo = 0; in lpfc_nvme_adj_fcp_sgls() 906 sgl++; in lpfc_nvme_adj_fcp_sgls() 911 sgl->word2 = le32_to_cpu(sgl->word2); in lpfc_nvme_adj_fcp_sgls() 916 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_nvme_adj_fcp_sgls() 1366 sgl += 2; in lpfc_nvme_prep_io_dma() 1401 sgl->word2 = 0; in lpfc_nvme_prep_io_dma() 1448 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_nvme_prep_io_dma() 1454 sgl++; in lpfc_nvme_prep_io_dma() 1458 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_nvme_prep_io_dma() [all …]
|
| A D | lpfc_nvmet.c | 2841 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_nvmet_prep_fcp_wqe() 2843 sgl++; in lpfc_nvmet_prep_fcp_wqe() 2848 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_nvmet_prep_fcp_wqe() 2850 sgl++; in lpfc_nvmet_prep_fcp_wqe() 2927 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_nvmet_prep_fcp_wqe() 2929 sgl++; in lpfc_nvmet_prep_fcp_wqe() 2934 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_nvmet_prep_fcp_wqe() 2936 sgl++; in lpfc_nvmet_prep_fcp_wqe() 3019 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_nvmet_prep_fcp_wqe() 3021 sgl++; in lpfc_nvmet_prep_fcp_wqe() [all …]
|
| /drivers/spi/ |
| A D | spi-bcm2835.c | 497 if (bs->tx_buf && !sg_is_last(&tfr->tx_sg.sgl[0])) in bcm2835_spi_transfer_prologue() 509 !(sg_dma_len(&tfr->tx_sg.sgl[0]) & ~3); in bcm2835_spi_transfer_prologue() 531 sg_dma_address(&tfr->rx_sg.sgl[0]), in bcm2835_spi_transfer_prologue() 560 sg_dma_len(&tfr->tx_sg.sgl[0]) = 0; in bcm2835_spi_transfer_prologue() 561 sg_dma_address(&tfr->tx_sg.sgl[1]) += 4; in bcm2835_spi_transfer_prologue() 562 sg_dma_len(&tfr->tx_sg.sgl[1]) -= 4; in bcm2835_spi_transfer_prologue() 594 sg_dma_address(&tfr->tx_sg.sgl[1]) -= 4; in bcm2835_spi_undo_prologue() 595 sg_dma_len(&tfr->tx_sg.sgl[1]) += 4; in bcm2835_spi_undo_prologue() 678 struct scatterlist *sgl; in bcm2835_spi_prepare_sg() local 690 sgl = tfr->tx_sg.sgl; in bcm2835_spi_prepare_sg() [all …]
|
| /drivers/media/platform/ |
| A D | m2m-deinterlace.c | 236 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma() 237 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma() 243 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma() 244 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma() 251 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma() 258 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma() 265 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma() 272 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma() 278 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma() 279 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma() [all …]
|
| /drivers/vfio/pci/pds/ |
| A D | cmds.c | 206 struct pds_lm_sg_elem *sgl, *sge; in pds_vfio_dma_map_lm_file() local 225 sgl = kzalloc(sgl_size, GFP_KERNEL); in pds_vfio_dma_map_lm_file() 226 if (!sgl) { in pds_vfio_dma_map_lm_file() 232 sge = sgl; in pds_vfio_dma_map_lm_file() 240 sgl_addr = dma_map_single(dev, sgl, sgl_size, DMA_TO_DEVICE); in pds_vfio_dma_map_lm_file() 246 lm_file->sgl = sgl; in pds_vfio_dma_map_lm_file() 252 kfree(sgl); in pds_vfio_dma_map_lm_file() 267 if (lm_file->sgl) { in pds_vfio_dma_unmap_lm_file() 269 lm_file->num_sge * sizeof(*lm_file->sgl), in pds_vfio_dma_unmap_lm_file() 271 kfree(lm_file->sgl); in pds_vfio_dma_unmap_lm_file() [all …]
|
| A D | dirty.c | 121 kfree(region->sgl); in __pds_vfio_dirty_free_sgl() 124 region->sgl = NULL; in __pds_vfio_dirty_free_sgl() 138 if (region->sgl) in pds_vfio_dirty_free_sgl() 149 struct pds_lm_sg_elem *sgl; in pds_vfio_dirty_alloc_sgl() local 157 sgl = kzalloc(sgl_size, GFP_KERNEL); in pds_vfio_dirty_alloc_sgl() 158 if (!sgl) in pds_vfio_dirty_alloc_sgl() 161 sgl_addr = dma_map_single(pdsc_dev, sgl, sgl_size, DMA_BIDIRECTIONAL); in pds_vfio_dirty_alloc_sgl() 163 kfree(sgl); in pds_vfio_dirty_alloc_sgl() 167 region->sgl = sgl; in pds_vfio_dirty_alloc_sgl() 424 struct pds_lm_sg_elem *sg_elem = ®ion->sgl[i]; in pds_vfio_dirty_seq_ack()
|
| /drivers/net/ethernet/intel/ixgbe/ |
| A D | ixgbe_fcoe.c | 29 ddp->sgl = NULL; in ixgbe_fcoe_clear_ddp() 110 if (ddp->sgl) in ixgbe_fcoe_ddp_put() 134 struct scatterlist *sgl, unsigned int sgc, in ixgbe_fcoe_ddp_setup() argument 153 if (!netdev || !sgl) in ixgbe_fcoe_ddp_setup() 169 if (ddp->sgl) { in ixgbe_fcoe_ddp_setup() 171 xid, ddp->sgl, ddp->sgc); in ixgbe_fcoe_ddp_setup() 202 ddp->sgl = sgl; in ixgbe_fcoe_ddp_setup() 206 for_each_sg(sgl, sg, dmacount, i) { in ixgbe_fcoe_ddp_setup() 366 struct scatterlist *sgl, unsigned int sgc) in ixgbe_fcoe_ddp_target() argument 442 dma_unmap_sg(&adapter->pdev->dev, ddp->sgl, in ixgbe_fcoe_ddp() [all …]
|
| /drivers/mtd/nand/ |
| A D | qpic_common.c | 154 struct scatterlist *sgl; in qcom_prepare_bam_async_desc() local 166 sgl = &bam_txn->cmd_sgl[bam_txn->cmd_sgl_start]; in qcom_prepare_bam_async_desc() 172 sgl = &bam_txn->data_sgl[bam_txn->tx_sgl_start]; in qcom_prepare_bam_async_desc() 178 sgl = &bam_txn->data_sgl[bam_txn->rx_sgl_start]; in qcom_prepare_bam_async_desc() 185 sg_mark_end(sgl + sgl_cnt - 1); in qcom_prepare_bam_async_desc() 194 desc->bam_sgl = sgl; in qcom_prepare_bam_async_desc() 201 dma_unmap_sg(nandc->dev, sgl, sgl_cnt, desc->dir); in qcom_prepare_bam_async_desc() 366 struct scatterlist *sgl; in qcom_prep_adm_dma_desc() local 373 sgl = &desc->adm_sgl; in qcom_prep_adm_dma_desc() 375 sg_init_one(sgl, vaddr, size); in qcom_prep_adm_dma_desc() [all …]
|
| /drivers/dma/ |
| A D | loongson1-apb-dma.c | 217 struct scatterlist *sgl, unsigned int sg_len, in ls1x_dma_prep_lli() argument 244 for_each_sg(sgl, sg, sg_len, i) { in ls1x_dma_prep_lli() 323 struct scatterlist *sgl; in ls1x_dma_prep_dma_cyclic() local 338 sgl = kmalloc_array(sg_len, sizeof(*sgl), GFP_NOWAIT); in ls1x_dma_prep_dma_cyclic() 339 if (!sgl) in ls1x_dma_prep_dma_cyclic() 342 sg_init_table(sgl, sg_len); in ls1x_dma_prep_dma_cyclic() 344 sg_set_page(&sgl[i], pfn_to_page(PFN_DOWN(buf_addr)), in ls1x_dma_prep_dma_cyclic() 346 sg_dma_address(&sgl[i]) = buf_addr; in ls1x_dma_prep_dma_cyclic() 347 sg_dma_len(&sgl[i]) = period_len; in ls1x_dma_prep_dma_cyclic() 351 ret = ls1x_dma_prep_lli(dchan, desc, sgl, sg_len, dir, true); in ls1x_dma_prep_dma_cyclic() [all …]
|
| A D | milbeaut-hdmac.c | 61 struct scatterlist *sgl; member 122 sg = &md->sgl[md->sg_cur]; in milbeaut_chan_start() 256 milbeaut_hdmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, in milbeaut_hdmac_prep_slave_sg() argument 272 md->sgl = kcalloc(sg_len, sizeof(*sgl), GFP_NOWAIT); in milbeaut_hdmac_prep_slave_sg() 273 if (!md->sgl) { in milbeaut_hdmac_prep_slave_sg() 279 md->sgl[i] = sgl[i]; in milbeaut_hdmac_prep_slave_sg() 350 sg = &md->sgl[md->sg_cur]; in milbeaut_hdmac_tx_status() 372 txstate->residue += sg_dma_len(&md->sgl[i]); in milbeaut_hdmac_tx_status() 398 kfree(md->sgl); in milbeaut_hdmac_desc_free()
|
| /drivers/xen/ |
| A D | swiotlb-xen.c | 331 xen_swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, in xen_swiotlb_unmap_sg() argument 339 for_each_sg(sgl, sg, nelems, i) in xen_swiotlb_unmap_sg() 346 xen_swiotlb_map_sg(struct device *dev, struct scatterlist *sgl, int nelems, in xen_swiotlb_map_sg() argument 354 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_map_sg() 364 xen_swiotlb_unmap_sg(dev, sgl, i, dir, attrs | DMA_ATTR_SKIP_CPU_SYNC); in xen_swiotlb_map_sg() 365 sg_dma_len(sgl) = 0; in xen_swiotlb_map_sg() 370 xen_swiotlb_sync_sg_for_cpu(struct device *dev, struct scatterlist *sgl, in xen_swiotlb_sync_sg_for_cpu() argument 376 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_sync_sg_for_cpu() 383 xen_swiotlb_sync_sg_for_device(struct device *dev, struct scatterlist *sgl, in xen_swiotlb_sync_sg_for_device() argument 389 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_sync_sg_for_device()
|
| /drivers/crypto/intel/qat/qat_common/ |
| A D | qat_bl.c | 51 struct scatterlist *sgl, in __qat_bl_sgl_to_bufl() argument 62 int n = sg_nents(sgl); in __qat_bl_sgl_to_bufl() 90 bufl_dma_dir = sgl != sglout ? DMA_TO_DEVICE : DMA_BIDIRECTIONAL; in __qat_bl_sgl_to_bufl() 97 for_each_sg(sgl, sg, n, i) { in __qat_bl_sgl_to_bufl() 127 if (sgl != sglout) { in __qat_bl_sgl_to_bufl() 217 n = sg_nents(sgl); in __qat_bl_sgl_to_bufl() 232 struct scatterlist *sgl, in qat_bl_sgl_to_bufl() argument 250 return __qat_bl_sgl_to_bufl(accel_dev, sgl, sglout, buf, in qat_bl_sgl_to_bufl()
|
| /drivers/iio/buffer/ |
| A D | industrialio-buffer-dmaengine.c | 68 struct scatterlist *sgl; in iio_dmaengine_buffer_submit_block() local 85 sgl = block->sg_table->sgl; in iio_dmaengine_buffer_submit_block() 86 nents = sg_nents_for_len(sgl, block->bytes_used); in iio_dmaengine_buffer_submit_block() 97 vecs[i].addr = sg_dma_address(sgl); in iio_dmaengine_buffer_submit_block() 98 vecs[i].len = min(sg_dma_len(sgl), len_total); in iio_dmaengine_buffer_submit_block() 101 sgl = sg_next(sgl); in iio_dmaengine_buffer_submit_block()
|
| /drivers/scsi/esas2r/ |
| A D | esas2r_io.c | 224 struct esas2r_mem_desc *sgl; in esas2r_build_sg_list_sge() local 231 sgl = esas2r_alloc_sgl(a); in esas2r_build_sg_list_sge() 233 if (unlikely(sgl == NULL)) in esas2r_build_sg_list_sge() 253 (struct atto_vda_sge *)((u8 *)sgl->virt_addr in esas2r_build_sg_list_sge() 260 cpu_to_le64(sgl->phys_addr); in esas2r_build_sg_list_sge() 302 list_add(&sgl->next_desc, &rq->sg_table_head); in esas2r_build_sg_list_sge() 376 struct esas2r_mem_desc *sgl; in esas2r_build_prd_iblk() local 449 sgl = esas2r_alloc_sgl(a); in esas2r_build_prd_iblk() 451 if (unlikely(sgl == NULL)) in esas2r_build_prd_iblk() 458 list_add(&sgl->next_desc, &rq->sg_table_head); in esas2r_build_prd_iblk() [all …]
|
| /drivers/gpu/drm/i915/ |
| A D | i915_scatterlist.h | 31 } __sgt_iter(struct scatterlist *sgl, bool dma) { in __sgt_iter() argument 32 struct sgt_iter s = { .sgp = sgl }; in __sgt_iter() 90 for ((__iter) = __sgt_iter((__sgt)->sgl, true); \ 112 for ((__iter) = __sgt_iter((__sgt)->sgl, false); \ 225 rsgt->table.sgl = NULL; in __i915_refct_sgt_init()
|
| /drivers/tty/serial/8250/ |
| A D | 8250_dma.c | 93 struct scatterlist sgl[2]; in serial8250_tx_dma() local 115 sg_init_table(sgl, ARRAY_SIZE(sgl)); in serial8250_tx_dma() 117 ret = kfifo_dma_out_prepare_mapped(&tport->xmit_fifo, sgl, ARRAY_SIZE(sgl), in serial8250_tx_dma() 122 for_each_sg(sgl, sg, ret, i) in serial8250_tx_dma() 125 desc = dmaengine_prep_slave_sg(dma->txchan, sgl, ret, in serial8250_tx_dma()
|
| /drivers/scsi/bnx2fc/ |
| A D | bnx2fc_hwi.c | 1496 struct fcoe_ext_mul_sges_ctx *sgl; in bnx2fc_init_seq_cleanup_task() local 1555 sgl = &task->rxwr_only.union_ctx.read_info.sgl_ctx.sgl; in bnx2fc_init_seq_cleanup_task() 1558 sgl->mul_sgl.sgl_size = bd_count; in bnx2fc_init_seq_cleanup_task() 1560 sgl->mul_sgl.cur_sge_idx = i; in bnx2fc_init_seq_cleanup_task() 1612 struct fcoe_ext_mul_sges_ctx *sgl; in bnx2fc_init_mp_task() local 1693 sgl = &task->rxwr_only.union_ctx.read_info.sgl_ctx.sgl; in bnx2fc_init_mp_task() 1696 sgl->mul_sgl.cur_sge_addr.hi = in bnx2fc_init_mp_task() 1698 sgl->mul_sgl.sgl_size = 1; in bnx2fc_init_mp_task() 1815 sgl = &task->rxwr_only.union_ctx.read_info.sgl_ctx.sgl; in bnx2fc_init_task() 1846 sgl->mul_sgl.cur_sge_addr.hi = in bnx2fc_init_task() [all …]
|
| /drivers/dma/sh/ |
| A D | shdma-base.c | 571 for_each_sg(sgl, sg, sg_len, i) in shdma_prep_sg() 588 for_each_sg(sgl, sg, sg_len, i) { in shdma_prep_sg() 685 return shdma_prep_sg(schan, sgl, sg_len, &slave_addr, in shdma_prep_slave_sg() 703 struct scatterlist *sgl; in shdma_prep_dma_cyclic() local 731 sgl = kmalloc_array(sg_len, sizeof(*sgl), GFP_KERNEL); in shdma_prep_dma_cyclic() 732 if (!sgl) in shdma_prep_dma_cyclic() 735 sg_init_table(sgl, sg_len); in shdma_prep_dma_cyclic() 742 sg_dma_address(&sgl[i]) = src; in shdma_prep_dma_cyclic() 743 sg_dma_len(&sgl[i]) = period_len; in shdma_prep_dma_cyclic() 746 desc = shdma_prep_sg(schan, sgl, sg_len, &slave_addr, in shdma_prep_dma_cyclic() [all …]
|
| /drivers/scsi/elx/efct/ |
| A D | efct_scsi.c | 202 struct efct_scsi_sgl *sgl, u32 sgl_count, in efct_scsi_build_sgls() argument 218 rc = efct_hw_io_add_sge(hw, hio, sgl[i].addr, sgl[i].len); in efct_scsi_build_sgls() 303 io->sgl, io->sgl_count, io->hio_type); in efct_scsi_io_dispatch_hw_io() 672 struct efct_scsi_sgl *sgl_ptr = &io->sgl[sgl_count - 1]; in efct_scsi_xfer_data() 715 return efct_scsi_xfer_data(io, flags, sgl, sgl_count, in efct_scsi_send_rd_data() 824 io->sgl[0].addr = io->rspbuf.phys; in efct_scsi_send_resp() 825 io->sgl[0].dif_addr = 0; in efct_scsi_send_resp() 826 io->sgl[0].len = io->wire_len; in efct_scsi_send_resp() 1021 io->sgl[0].addr = io->rspbuf.phys; in efct_scsi_send_tmf_resp() 1022 io->sgl[0].dif_addr = 0; in efct_scsi_send_tmf_resp() [all …]
|