| /drivers/media/platform/ |
| A D | m2m-deinterlace.c | 238 ctx->xt->src_start = p_in; in deinterlace_issue_dma() 239 ctx->xt->dst_start = p_out; in deinterlace_issue_dma() 280 ctx->xt->src_start = p_in; in deinterlace_issue_dma() 301 ctx->xt->src_start = p_in; in deinterlace_issue_dma() 302 ctx->xt->dst_start = p_out; in deinterlace_issue_dma() 316 ctx->xt->src_start = p_in; in deinterlace_issue_dma() 322 ctx->xt->frame_size = 1; in deinterlace_issue_dma() 324 ctx->xt->src_sgl = false; in deinterlace_issue_dma() 325 ctx->xt->dst_sgl = true; in deinterlace_issue_dma() 858 if (!ctx->xt) { in deinterlace_open() [all …]
|
| /drivers/media/platform/ti/omap/ |
| A D | omap_vout_vrfb.c | 143 if (!vout->vrfb_dma_tx.xt) { in omap_vout_setup_vrfb_bufs() 187 kfree(vout->vrfb_dma_tx.xt); in omap_vout_release_vrfb() 236 struct dma_interleaved_template *xt = vout->vrfb_dma_tx.xt; in omap_vout_prepare_vrfb() local 259 xt->numf = vout->pix.height; in omap_vout_prepare_vrfb() 260 xt->frame_size = 1; in omap_vout_prepare_vrfb() 262 xt->sgl[0].icg = dst_icg; in omap_vout_prepare_vrfb() 264 xt->dir = DMA_MEM_TO_MEM; in omap_vout_prepare_vrfb() 265 xt->src_sgl = false; in omap_vout_prepare_vrfb() 266 xt->src_inc = true; in omap_vout_prepare_vrfb() 267 xt->dst_sgl = true; in omap_vout_prepare_vrfb() [all …]
|
| A D | omap_voutdef.h | 87 struct dma_interleaved_template *xt; member
|
| /drivers/dma/ |
| A D | dma-axi-dmac.c | 750 if (xt->frame_size != 1) in axi_dmac_prep_interleaved() 753 if (xt->dir != chan->direction) in axi_dmac_prep_interleaved() 757 if (!xt->src_inc || !axi_dmac_check_addr(chan, xt->src_start)) in axi_dmac_prep_interleaved() 762 if (!xt->dst_inc || !axi_dmac_check_addr(chan, xt->dst_start)) in axi_dmac_prep_interleaved() 766 dst_icg = dmaengine_get_dst_icg(xt, &xt->sgl[0]); in axi_dmac_prep_interleaved() 767 src_icg = dmaengine_get_src_icg(xt, &xt->sgl[0]); in axi_dmac_prep_interleaved() 771 xt->numf == 0) in axi_dmac_prep_interleaved() 779 if (chan->max_length / xt->sgl[0].size < xt->numf) in axi_dmac_prep_interleaved() 781 if (!axi_dmac_check_len(chan, xt->sgl[0].size * xt->numf)) in axi_dmac_prep_interleaved() 801 desc->sg[0].hw->y_len = xt->numf - 1; in axi_dmac_prep_interleaved() [all …]
|
| A D | imx-dma.c | 946 (unsigned long long) xt->dst_start, in imxdma_prep_dma_interleaved() 947 str_true_false(xt->src_sgl), str_true_false(xt->dst_sgl), in imxdma_prep_dma_interleaved() 948 xt->numf, xt->frame_size); in imxdma_prep_dma_interleaved() 954 if (xt->frame_size != 1 || xt->numf <= 0 || xt->dir != DMA_MEM_TO_MEM) in imxdma_prep_dma_interleaved() 960 desc->src = xt->src_start; in imxdma_prep_dma_interleaved() 961 desc->dest = xt->dst_start; in imxdma_prep_dma_interleaved() 962 desc->x = xt->sgl[0].size; in imxdma_prep_dma_interleaved() 963 desc->y = xt->numf; in imxdma_prep_dma_interleaved() 964 desc->w = xt->sgl[0].icg + desc->x; in imxdma_prep_dma_interleaved() 969 if (xt->src_sgl) in imxdma_prep_dma_interleaved() [all …]
|
| A D | at_xdmac.c | 1022 if (xt->src_inc) { in at_xdmac_interleaved_queue_desc() 1023 if (xt->src_sgl) in at_xdmac_interleaved_queue_desc() 1029 if (xt->dst_inc) { in at_xdmac_interleaved_queue_desc() 1030 if (xt->dst_sgl) in at_xdmac_interleaved_queue_desc() 1083 if (!xt || !xt->numf || (xt->dir != DMA_MEM_TO_MEM)) in at_xdmac_prep_interleaved() 1090 if ((xt->numf > 1) && (xt->frame_size > 1)) in at_xdmac_prep_interleaved() 1094 __func__, &xt->src_start, &xt->dst_start, xt->numf, in at_xdmac_prep_interleaved() 1100 if (xt->numf > 1) { in at_xdmac_prep_interleaved() 1104 xt, xt->sgl); in at_xdmac_prep_interleaved() 1150 if (xt->src_sgl) in at_xdmac_prep_interleaved() [all …]
|
| A D | at_hdmac.c | 884 if (unlikely(!xt || xt->numf != 1 || !xt->frame_size)) in atc_prep_dma_interleaved() 887 first = xt->sgl; in atc_prep_dma_interleaved() 891 __func__, &xt->src_start, &xt->dst_start, xt->numf, in atc_prep_dma_interleaved() 892 xt->frame_size, flags); in atc_prep_dma_interleaved() 900 for (i = 0; i < xt->frame_size; i++) { in atc_prep_dma_interleaved() 903 if ((chunk->size != xt->sgl->size) || in atc_prep_dma_interleaved() 904 (dmaengine_get_dst_icg(xt, chunk) != dmaengine_get_dst_icg(xt, first)) || in atc_prep_dma_interleaved() 905 (dmaengine_get_src_icg(xt, chunk) != dmaengine_get_src_icg(xt, first))) { in atc_prep_dma_interleaved() 915 dwidth = atc_get_xfer_width(xt->src_start, xt->dst_start, len); in atc_prep_dma_interleaved() 946 lli->saddr = xt->src_start; in atc_prep_dma_interleaved() [all …]
|
| /drivers/dma/xilinx/ |
| A D | xdma.c | 745 for (i = 0; i < xt->frame_size; ++i) in xdma_prep_interleaved_dma() 751 sw_desc->dir = xt->dir; in xdma_prep_interleaved_dma() 754 sw_desc->frames_left = xt->numf; in xdma_prep_interleaved_dma() 755 sw_desc->periods = xt->numf; in xdma_prep_interleaved_dma() 758 src_addr = xt->src_start; in xdma_prep_interleaved_dma() 759 dst_addr = xt->dst_start; in xdma_prep_interleaved_dma() 762 src_addr += dmaengine_get_src_icg(xt, &xt->sgl[i]) + (xt->src_inc ? in xdma_prep_interleaved_dma() 763 xt->sgl[i].size : 0); in xdma_prep_interleaved_dma() 764 dst_addr += dmaengine_get_dst_icg(xt, &xt->sgl[i]) + (xt->dst_inc ? in xdma_prep_interleaved_dma() 765 xt->sgl[i].size : 0); in xdma_prep_interleaved_dma() [all …]
|
| A D | xilinx_dpdma.c | 764 struct dma_interleaved_template *xt) in xilinx_dpdma_chan_prep_interleaved_dma() argument 769 size_t hsize = xt->sgl[0].size; in xilinx_dpdma_chan_prep_interleaved_dma() 770 size_t stride = hsize + xt->sgl[0].icg; in xilinx_dpdma_chan_prep_interleaved_dma() 772 if (!IS_ALIGNED(xt->src_start, XILINX_DPDMA_ALIGN_BYTES)) { in xilinx_dpdma_chan_prep_interleaved_dma() 790 &xt->src_start, 1); in xilinx_dpdma_chan_prep_interleaved_dma() 794 hw_desc->xfer_size = hsize * xt->numf; in xilinx_dpdma_chan_prep_interleaved_dma() 1290 struct dma_interleaved_template *xt, in xilinx_dpdma_prep_interleaved_dma() argument 1296 if (xt->dir != DMA_MEM_TO_DEV) in xilinx_dpdma_prep_interleaved_dma() 1299 if (!xt->numf || !xt->sgl[0].size) in xilinx_dpdma_prep_interleaved_dma() 1305 desc = xilinx_dpdma_chan_prep_interleaved_dma(chan, xt); in xilinx_dpdma_prep_interleaved_dma()
|
| A D | xilinx_dma.c | 2044 struct dma_interleaved_template *xt, in xilinx_vdma_dma_prep_interleaved() argument 2052 if (!is_slave_direction(xt->dir)) in xilinx_vdma_dma_prep_interleaved() 2055 if (!xt->numf || !xt->sgl[0].size) in xilinx_vdma_dma_prep_interleaved() 2058 if (xt->numf & ~XILINX_DMA_VSIZE_MASK || in xilinx_vdma_dma_prep_interleaved() 2062 if (xt->frame_size != 1) in xilinx_vdma_dma_prep_interleaved() 2081 hw->vsize = xt->numf; in xilinx_vdma_dma_prep_interleaved() 2082 hw->hsize = xt->sgl[0].size; in xilinx_vdma_dma_prep_interleaved() 2083 hw->stride = (xt->sgl[0].icg + xt->sgl[0].size) << in xilinx_vdma_dma_prep_interleaved() 2088 if (xt->dir != DMA_MEM_TO_DEV) { in xilinx_vdma_dma_prep_interleaved() 2093 hw->buf_addr = xt->dst_start; in xilinx_vdma_dma_prep_interleaved() [all …]
|
| /drivers/media/platform/xilinx/ |
| A D | xilinx-dma.c | 338 dma->xt.dir = DMA_DEV_TO_MEM; in xvip_dma_buffer_queue() 339 dma->xt.src_sgl = false; in xvip_dma_buffer_queue() 340 dma->xt.dst_sgl = true; in xvip_dma_buffer_queue() 341 dma->xt.dst_start = addr; in xvip_dma_buffer_queue() 344 dma->xt.dir = DMA_MEM_TO_DEV; in xvip_dma_buffer_queue() 345 dma->xt.src_sgl = true; in xvip_dma_buffer_queue() 346 dma->xt.dst_sgl = false; in xvip_dma_buffer_queue() 347 dma->xt.src_start = addr; in xvip_dma_buffer_queue() 350 dma->xt.frame_size = 1; in xvip_dma_buffer_queue() 353 dma->xt.numf = dma->format.height; in xvip_dma_buffer_queue() [all …]
|
| A D | xilinx-dma.h | 99 struct dma_interleaved_template xt; member
|
| /drivers/dma/ti/ |
| A D | omap-dma.c | 1298 if (is_slave_direction(xt->dir)) in omap_dma_prep_dma_interleaved() 1301 if (xt->frame_size != 1 || xt->numf == 0) in omap_dma_prep_dma_interleaved() 1308 data_type = __ffs((xt->src_start | xt->dst_start | xt->sgl[0].size)); in omap_dma_prep_dma_interleaved() 1315 d->dev_addr = xt->src_start; in omap_dma_prep_dma_interleaved() 1317 sg->en = xt->sgl[0].size / BIT(data_type); in omap_dma_prep_dma_interleaved() 1318 sg->fn = xt->numf; in omap_dma_prep_dma_interleaved() 1319 sg->addr = xt->dst_start; in omap_dma_prep_dma_interleaved() 1322 src_icg = dmaengine_get_src_icg(xt, &xt->sgl[0]); in omap_dma_prep_dma_interleaved() 1323 dst_icg = dmaengine_get_dst_icg(xt, &xt->sgl[0]); in omap_dma_prep_dma_interleaved() 1328 } else if (xt->src_inc) { in omap_dma_prep_dma_interleaved() [all …]
|
| A D | edma.c | 1237 if (xt->frame_size != 1 || xt->numf == 0) in edma_prep_dma_interleaved() 1240 if (xt->sgl[0].size > SZ_64K || xt->numf > SZ_64K) in edma_prep_dma_interleaved() 1243 src_icg = dmaengine_get_src_icg(xt, &xt->sgl[0]); in edma_prep_dma_interleaved() 1246 } else if (xt->src_inc) { in edma_prep_dma_interleaved() 1247 src_bidx = xt->sgl[0].size; in edma_prep_dma_interleaved() 1254 dst_icg = dmaengine_get_dst_icg(xt, &xt->sgl[0]); in edma_prep_dma_interleaved() 1257 } else if (xt->dst_inc) { in edma_prep_dma_interleaved() 1258 dst_bidx = xt->sgl[0].size; in edma_prep_dma_interleaved() 1278 param->src = xt->src_start; in edma_prep_dma_interleaved() 1279 param->dst = xt->dst_start; in edma_prep_dma_interleaved() [all …]
|
| /drivers/gpu/drm/xlnx/ |
| A D | zynqmp_disp.c | 102 struct dma_interleaved_template xt; member 1127 dma->xt.numf = height; in zynqmp_disp_layer_update() 1130 dma->xt.src_start = dma_addr; in zynqmp_disp_layer_update() 1131 dma->xt.frame_size = 1; in zynqmp_disp_layer_update() 1132 dma->xt.dir = DMA_MEM_TO_DEV; in zynqmp_disp_layer_update() 1133 dma->xt.src_sgl = true; in zynqmp_disp_layer_update() 1134 dma->xt.dst_sgl = false; in zynqmp_disp_layer_update() 1136 desc = dmaengine_prep_interleaved_dma(dma->chan, &dma->xt, in zynqmp_disp_layer_update()
|
| /drivers/iommu/amd/ |
| A D | init.c | 2311 union intcapxt xt; in intcapxt_unmask_irq() local 2313 xt.capxt = 0ULL; in intcapxt_unmask_irq() 2314 xt.dest_mode_logical = apic->dest_mode_logical; in intcapxt_unmask_irq() 2315 xt.vector = cfg->vector; in intcapxt_unmask_irq() 2316 xt.destid_0_23 = cfg->dest_apicid & GENMASK(23, 0); in intcapxt_unmask_irq() 2317 xt.destid_24_31 = cfg->dest_apicid >> 24; in intcapxt_unmask_irq() 2319 writeq(xt.capxt, iommu->mmio_base + irqd->hwirq); in intcapxt_unmask_irq()
|