Lines Matching refs:chan
447 void (*start_transfer)(struct xilinx_dma_chan *chan);
448 int (*stop_transfer)(struct xilinx_dma_chan *chan);
501 struct xilinx_dma_chan *chan[XILINX_MCDMA_MAX_CHANS_PER_DEVICE]; member
517 #define to_xilinx_chan(chan) \ argument
518 container_of(chan, struct xilinx_dma_chan, common)
521 #define xilinx_dma_poll_timeout(chan, reg, val, cond, delay_us, timeout_us) \ argument
522 readl_poll_timeout_atomic(chan->xdev->regs + chan->ctrl_offset + reg, \
526 static inline u32 dma_read(struct xilinx_dma_chan *chan, u32 reg) in dma_read() argument
528 return ioread32(chan->xdev->regs + reg); in dma_read()
531 static inline void dma_write(struct xilinx_dma_chan *chan, u32 reg, u32 value) in dma_write() argument
533 iowrite32(value, chan->xdev->regs + reg); in dma_write()
536 static inline void vdma_desc_write(struct xilinx_dma_chan *chan, u32 reg, in vdma_desc_write() argument
539 dma_write(chan, chan->desc_offset + reg, value); in vdma_desc_write()
542 static inline u32 dma_ctrl_read(struct xilinx_dma_chan *chan, u32 reg) in dma_ctrl_read() argument
544 return dma_read(chan, chan->ctrl_offset + reg); in dma_ctrl_read()
547 static inline void dma_ctrl_write(struct xilinx_dma_chan *chan, u32 reg, in dma_ctrl_write() argument
550 dma_write(chan, chan->ctrl_offset + reg, value); in dma_ctrl_write()
553 static inline void dma_ctrl_clr(struct xilinx_dma_chan *chan, u32 reg, in dma_ctrl_clr() argument
556 dma_ctrl_write(chan, reg, dma_ctrl_read(chan, reg) & ~clr); in dma_ctrl_clr()
559 static inline void dma_ctrl_set(struct xilinx_dma_chan *chan, u32 reg, in dma_ctrl_set() argument
562 dma_ctrl_write(chan, reg, dma_ctrl_read(chan, reg) | set); in dma_ctrl_set()
576 static inline void vdma_desc_write_64(struct xilinx_dma_chan *chan, u32 reg, in vdma_desc_write_64() argument
580 writel(value_lsb, chan->xdev->regs + chan->desc_offset + reg); in vdma_desc_write_64()
583 writel(value_msb, chan->xdev->regs + chan->desc_offset + reg + 4); in vdma_desc_write_64()
586 static inline void dma_writeq(struct xilinx_dma_chan *chan, u32 reg, u64 value) in dma_writeq() argument
588 lo_hi_writeq(value, chan->xdev->regs + chan->ctrl_offset + reg); in dma_writeq()
591 static inline void xilinx_write(struct xilinx_dma_chan *chan, u32 reg, in xilinx_write() argument
594 if (chan->ext_addr) in xilinx_write()
595 dma_writeq(chan, reg, addr); in xilinx_write()
597 dma_ctrl_write(chan, reg, addr); in xilinx_write()
600 static inline void xilinx_axidma_buf(struct xilinx_dma_chan *chan, in xilinx_axidma_buf() argument
605 if (chan->ext_addr) { in xilinx_axidma_buf()
614 static inline void xilinx_aximcdma_buf(struct xilinx_dma_chan *chan, in xilinx_aximcdma_buf() argument
618 if (chan->ext_addr) { in xilinx_aximcdma_buf()
637 xilinx_vdma_alloc_tx_segment(struct xilinx_dma_chan *chan) in xilinx_vdma_alloc_tx_segment() argument
642 segment = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &phys); in xilinx_vdma_alloc_tx_segment()
658 xilinx_cdma_alloc_tx_segment(struct xilinx_dma_chan *chan) in xilinx_cdma_alloc_tx_segment() argument
663 segment = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &phys); in xilinx_cdma_alloc_tx_segment()
679 xilinx_axidma_alloc_tx_segment(struct xilinx_dma_chan *chan) in xilinx_axidma_alloc_tx_segment() argument
684 spin_lock_irqsave(&chan->lock, flags); in xilinx_axidma_alloc_tx_segment()
685 if (!list_empty(&chan->free_seg_list)) { in xilinx_axidma_alloc_tx_segment()
686 segment = list_first_entry(&chan->free_seg_list, in xilinx_axidma_alloc_tx_segment()
691 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_axidma_alloc_tx_segment()
694 dev_dbg(chan->dev, "Could not find free tx segment\n"); in xilinx_axidma_alloc_tx_segment()
706 xilinx_aximcdma_alloc_tx_segment(struct xilinx_dma_chan *chan) in xilinx_aximcdma_alloc_tx_segment() argument
711 spin_lock_irqsave(&chan->lock, flags); in xilinx_aximcdma_alloc_tx_segment()
712 if (!list_empty(&chan->free_seg_list)) { in xilinx_aximcdma_alloc_tx_segment()
713 segment = list_first_entry(&chan->free_seg_list, in xilinx_aximcdma_alloc_tx_segment()
718 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_aximcdma_alloc_tx_segment()
750 static void xilinx_dma_free_tx_segment(struct xilinx_dma_chan *chan, in xilinx_dma_free_tx_segment() argument
755 list_add_tail(&segment->node, &chan->free_seg_list); in xilinx_dma_free_tx_segment()
763 static void xilinx_mcdma_free_tx_segment(struct xilinx_dma_chan *chan, in xilinx_mcdma_free_tx_segment() argument
769 list_add_tail(&segment->node, &chan->free_seg_list); in xilinx_mcdma_free_tx_segment()
777 static void xilinx_cdma_free_tx_segment(struct xilinx_dma_chan *chan, in xilinx_cdma_free_tx_segment() argument
780 dma_pool_free(chan->desc_pool, segment, segment->phys); in xilinx_cdma_free_tx_segment()
788 static void xilinx_vdma_free_tx_segment(struct xilinx_dma_chan *chan, in xilinx_vdma_free_tx_segment() argument
791 dma_pool_free(chan->desc_pool, segment, segment->phys); in xilinx_vdma_free_tx_segment()
801 xilinx_dma_alloc_tx_descriptor(struct xilinx_dma_chan *chan) in xilinx_dma_alloc_tx_descriptor() argument
820 xilinx_dma_free_tx_descriptor(struct xilinx_dma_chan *chan, in xilinx_dma_free_tx_descriptor() argument
831 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { in xilinx_dma_free_tx_descriptor()
834 xilinx_vdma_free_tx_segment(chan, segment); in xilinx_dma_free_tx_descriptor()
836 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) { in xilinx_dma_free_tx_descriptor()
840 xilinx_cdma_free_tx_segment(chan, cdma_segment); in xilinx_dma_free_tx_descriptor()
842 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_free_tx_descriptor()
846 xilinx_dma_free_tx_segment(chan, axidma_segment); in xilinx_dma_free_tx_descriptor()
852 xilinx_mcdma_free_tx_segment(chan, aximcdma_segment); in xilinx_dma_free_tx_descriptor()
866 static void xilinx_dma_free_desc_list(struct xilinx_dma_chan *chan, in xilinx_dma_free_desc_list() argument
873 xilinx_dma_free_tx_descriptor(chan, desc); in xilinx_dma_free_desc_list()
881 static void xilinx_dma_free_descriptors(struct xilinx_dma_chan *chan) in xilinx_dma_free_descriptors() argument
885 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_free_descriptors()
887 xilinx_dma_free_desc_list(chan, &chan->pending_list); in xilinx_dma_free_descriptors()
888 xilinx_dma_free_desc_list(chan, &chan->done_list); in xilinx_dma_free_descriptors()
889 xilinx_dma_free_desc_list(chan, &chan->active_list); in xilinx_dma_free_descriptors()
891 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_free_descriptors()
900 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_dma_free_chan_resources() local
903 dev_dbg(chan->dev, "Free all channel resources.\n"); in xilinx_dma_free_chan_resources()
905 xilinx_dma_free_descriptors(chan); in xilinx_dma_free_chan_resources()
907 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_free_chan_resources()
908 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_free_chan_resources()
909 INIT_LIST_HEAD(&chan->free_seg_list); in xilinx_dma_free_chan_resources()
910 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_free_chan_resources()
913 dma_free_coherent(chan->dev, sizeof(*chan->seg_v) * in xilinx_dma_free_chan_resources()
914 XILINX_DMA_NUM_DESCS, chan->seg_v, in xilinx_dma_free_chan_resources()
915 chan->seg_p); in xilinx_dma_free_chan_resources()
918 dma_free_coherent(chan->dev, sizeof(*chan->cyclic_seg_v), in xilinx_dma_free_chan_resources()
919 chan->cyclic_seg_v, chan->cyclic_seg_p); in xilinx_dma_free_chan_resources()
922 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) { in xilinx_dma_free_chan_resources()
923 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_free_chan_resources()
924 INIT_LIST_HEAD(&chan->free_seg_list); in xilinx_dma_free_chan_resources()
925 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_free_chan_resources()
928 dma_free_coherent(chan->dev, sizeof(*chan->seg_mv) * in xilinx_dma_free_chan_resources()
929 XILINX_DMA_NUM_DESCS, chan->seg_mv, in xilinx_dma_free_chan_resources()
930 chan->seg_p); in xilinx_dma_free_chan_resources()
933 if (chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIDMA && in xilinx_dma_free_chan_resources()
934 chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIMCDMA) { in xilinx_dma_free_chan_resources()
935 dma_pool_destroy(chan->desc_pool); in xilinx_dma_free_chan_resources()
936 chan->desc_pool = NULL; in xilinx_dma_free_chan_resources()
948 static u32 xilinx_dma_get_residue(struct xilinx_dma_chan *chan, in xilinx_dma_get_residue() argument
961 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) { in xilinx_dma_get_residue()
967 chan->xdev->max_buffer_len; in xilinx_dma_get_residue()
968 } else if (chan->xdev->dma_config->dmatype == in xilinx_dma_get_residue()
975 chan->xdev->max_buffer_len; in xilinx_dma_get_residue()
984 chan->xdev->max_buffer_len; in xilinx_dma_get_residue()
997 static void xilinx_dma_chan_handle_cyclic(struct xilinx_dma_chan *chan, in xilinx_dma_chan_handle_cyclic() argument
1005 spin_unlock_irqrestore(&chan->lock, *flags); in xilinx_dma_chan_handle_cyclic()
1007 spin_lock_irqsave(&chan->lock, *flags); in xilinx_dma_chan_handle_cyclic()
1015 static void xilinx_dma_chan_desc_cleanup(struct xilinx_dma_chan *chan) in xilinx_dma_chan_desc_cleanup() argument
1020 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_chan_desc_cleanup()
1022 list_for_each_entry_safe(desc, next, &chan->done_list, node) { in xilinx_dma_chan_desc_cleanup()
1026 xilinx_dma_chan_handle_cyclic(chan, desc, &flags); in xilinx_dma_chan_desc_cleanup()
1034 if (chan->direction == DMA_DEV_TO_MEM) in xilinx_dma_chan_desc_cleanup()
1045 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_chan_desc_cleanup()
1047 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_chan_desc_cleanup()
1051 xilinx_dma_free_tx_descriptor(chan, desc); in xilinx_dma_chan_desc_cleanup()
1057 if (chan->terminating) in xilinx_dma_chan_desc_cleanup()
1061 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_chan_desc_cleanup()
1070 struct xilinx_dma_chan *chan = from_tasklet(chan, t, tasklet); in xilinx_dma_do_tasklet() local
1072 xilinx_dma_chan_desc_cleanup(chan); in xilinx_dma_do_tasklet()
1083 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_dma_alloc_chan_resources() local
1087 if (chan->desc_pool) in xilinx_dma_alloc_chan_resources()
1094 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_alloc_chan_resources()
1096 chan->seg_v = dma_alloc_coherent(chan->dev, in xilinx_dma_alloc_chan_resources()
1097 sizeof(*chan->seg_v) * XILINX_DMA_NUM_DESCS, in xilinx_dma_alloc_chan_resources()
1098 &chan->seg_p, GFP_KERNEL); in xilinx_dma_alloc_chan_resources()
1099 if (!chan->seg_v) { in xilinx_dma_alloc_chan_resources()
1100 dev_err(chan->dev, in xilinx_dma_alloc_chan_resources()
1102 chan->id); in xilinx_dma_alloc_chan_resources()
1111 chan->cyclic_seg_v = dma_alloc_coherent(chan->dev, in xilinx_dma_alloc_chan_resources()
1112 sizeof(*chan->cyclic_seg_v), in xilinx_dma_alloc_chan_resources()
1113 &chan->cyclic_seg_p, in xilinx_dma_alloc_chan_resources()
1115 if (!chan->cyclic_seg_v) { in xilinx_dma_alloc_chan_resources()
1116 dev_err(chan->dev, in xilinx_dma_alloc_chan_resources()
1118 dma_free_coherent(chan->dev, sizeof(*chan->seg_v) * in xilinx_dma_alloc_chan_resources()
1119 XILINX_DMA_NUM_DESCS, chan->seg_v, in xilinx_dma_alloc_chan_resources()
1120 chan->seg_p); in xilinx_dma_alloc_chan_resources()
1123 chan->cyclic_seg_v->phys = chan->cyclic_seg_p; in xilinx_dma_alloc_chan_resources()
1126 chan->seg_v[i].hw.next_desc = in xilinx_dma_alloc_chan_resources()
1127 lower_32_bits(chan->seg_p + sizeof(*chan->seg_v) * in xilinx_dma_alloc_chan_resources()
1129 chan->seg_v[i].hw.next_desc_msb = in xilinx_dma_alloc_chan_resources()
1130 upper_32_bits(chan->seg_p + sizeof(*chan->seg_v) * in xilinx_dma_alloc_chan_resources()
1132 chan->seg_v[i].phys = chan->seg_p + in xilinx_dma_alloc_chan_resources()
1133 sizeof(*chan->seg_v) * i; in xilinx_dma_alloc_chan_resources()
1134 list_add_tail(&chan->seg_v[i].node, in xilinx_dma_alloc_chan_resources()
1135 &chan->free_seg_list); in xilinx_dma_alloc_chan_resources()
1137 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) { in xilinx_dma_alloc_chan_resources()
1139 chan->seg_mv = dma_alloc_coherent(chan->dev, in xilinx_dma_alloc_chan_resources()
1140 sizeof(*chan->seg_mv) * in xilinx_dma_alloc_chan_resources()
1142 &chan->seg_p, GFP_KERNEL); in xilinx_dma_alloc_chan_resources()
1143 if (!chan->seg_mv) { in xilinx_dma_alloc_chan_resources()
1144 dev_err(chan->dev, in xilinx_dma_alloc_chan_resources()
1146 chan->id); in xilinx_dma_alloc_chan_resources()
1150 chan->seg_mv[i].hw.next_desc = in xilinx_dma_alloc_chan_resources()
1151 lower_32_bits(chan->seg_p + sizeof(*chan->seg_mv) * in xilinx_dma_alloc_chan_resources()
1153 chan->seg_mv[i].hw.next_desc_msb = in xilinx_dma_alloc_chan_resources()
1154 upper_32_bits(chan->seg_p + sizeof(*chan->seg_mv) * in xilinx_dma_alloc_chan_resources()
1156 chan->seg_mv[i].phys = chan->seg_p + in xilinx_dma_alloc_chan_resources()
1157 sizeof(*chan->seg_mv) * i; in xilinx_dma_alloc_chan_resources()
1158 list_add_tail(&chan->seg_mv[i].node, in xilinx_dma_alloc_chan_resources()
1159 &chan->free_seg_list); in xilinx_dma_alloc_chan_resources()
1161 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) { in xilinx_dma_alloc_chan_resources()
1162 chan->desc_pool = dma_pool_create("xilinx_cdma_desc_pool", in xilinx_dma_alloc_chan_resources()
1163 chan->dev, in xilinx_dma_alloc_chan_resources()
1168 chan->desc_pool = dma_pool_create("xilinx_vdma_desc_pool", in xilinx_dma_alloc_chan_resources()
1169 chan->dev, in xilinx_dma_alloc_chan_resources()
1175 if (!chan->desc_pool && in xilinx_dma_alloc_chan_resources()
1176 ((chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIDMA) && in xilinx_dma_alloc_chan_resources()
1177 chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIMCDMA)) { in xilinx_dma_alloc_chan_resources()
1178 dev_err(chan->dev, in xilinx_dma_alloc_chan_resources()
1180 chan->id); in xilinx_dma_alloc_chan_resources()
1186 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_alloc_chan_resources()
1190 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR, in xilinx_dma_alloc_chan_resources()
1194 if ((chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) && chan->has_sg) in xilinx_dma_alloc_chan_resources()
1195 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR, in xilinx_dma_alloc_chan_resources()
1209 static int xilinx_dma_calc_copysize(struct xilinx_dma_chan *chan, in xilinx_dma_calc_copysize() argument
1215 chan->xdev->max_buffer_len); in xilinx_dma_calc_copysize()
1218 chan->xdev->common.copy_align) { in xilinx_dma_calc_copysize()
1224 (1 << chan->xdev->common.copy_align)); in xilinx_dma_calc_copysize()
1241 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_dma_tx_status() local
1251 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_tx_status()
1252 if (!list_empty(&chan->active_list)) { in xilinx_dma_tx_status()
1253 desc = list_last_entry(&chan->active_list, in xilinx_dma_tx_status()
1259 if (chan->has_sg && chan->xdev->dma_config->dmatype != XDMA_TYPE_VDMA) in xilinx_dma_tx_status()
1260 residue = xilinx_dma_get_residue(chan, desc); in xilinx_dma_tx_status()
1262 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_tx_status()
1275 static int xilinx_dma_stop_transfer(struct xilinx_dma_chan *chan) in xilinx_dma_stop_transfer() argument
1279 dma_ctrl_clr(chan, XILINX_DMA_REG_DMACR, XILINX_DMA_DMACR_RUNSTOP); in xilinx_dma_stop_transfer()
1282 return xilinx_dma_poll_timeout(chan, XILINX_DMA_REG_DMASR, val, in xilinx_dma_stop_transfer()
1293 static int xilinx_cdma_stop_transfer(struct xilinx_dma_chan *chan) in xilinx_cdma_stop_transfer() argument
1297 return xilinx_dma_poll_timeout(chan, XILINX_DMA_REG_DMASR, val, in xilinx_cdma_stop_transfer()
1306 static void xilinx_dma_start(struct xilinx_dma_chan *chan) in xilinx_dma_start() argument
1311 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR, XILINX_DMA_DMACR_RUNSTOP); in xilinx_dma_start()
1314 err = xilinx_dma_poll_timeout(chan, XILINX_DMA_REG_DMASR, val, in xilinx_dma_start()
1319 dev_err(chan->dev, "Cannot start channel %p: %x\n", in xilinx_dma_start()
1320 chan, dma_ctrl_read(chan, XILINX_DMA_REG_DMASR)); in xilinx_dma_start()
1322 chan->err = true; in xilinx_dma_start()
1330 static void xilinx_vdma_start_transfer(struct xilinx_dma_chan *chan) in xilinx_vdma_start_transfer() argument
1332 struct xilinx_vdma_config *config = &chan->config; in xilinx_vdma_start_transfer()
1339 if (chan->err) in xilinx_vdma_start_transfer()
1342 if (!chan->idle) in xilinx_vdma_start_transfer()
1345 if (list_empty(&chan->pending_list)) in xilinx_vdma_start_transfer()
1348 desc = list_first_entry(&chan->pending_list, in xilinx_vdma_start_transfer()
1352 if (chan->has_vflip) { in xilinx_vdma_start_transfer()
1353 reg = dma_read(chan, XILINX_VDMA_REG_ENABLE_VERTICAL_FLIP); in xilinx_vdma_start_transfer()
1356 dma_write(chan, XILINX_VDMA_REG_ENABLE_VERTICAL_FLIP, in xilinx_vdma_start_transfer()
1360 reg = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR); in xilinx_vdma_start_transfer()
1373 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg); in xilinx_vdma_start_transfer()
1375 j = chan->desc_submitcount; in xilinx_vdma_start_transfer()
1376 reg = dma_read(chan, XILINX_DMA_REG_PARK_PTR); in xilinx_vdma_start_transfer()
1377 if (chan->direction == DMA_MEM_TO_DEV) { in xilinx_vdma_start_transfer()
1384 dma_write(chan, XILINX_DMA_REG_PARK_PTR, reg); in xilinx_vdma_start_transfer()
1387 xilinx_dma_start(chan); in xilinx_vdma_start_transfer()
1389 if (chan->err) in xilinx_vdma_start_transfer()
1393 if (chan->desc_submitcount < chan->num_frms) in xilinx_vdma_start_transfer()
1394 i = chan->desc_submitcount; in xilinx_vdma_start_transfer()
1397 if (chan->ext_addr) in xilinx_vdma_start_transfer()
1398 vdma_desc_write_64(chan, in xilinx_vdma_start_transfer()
1403 vdma_desc_write(chan, in xilinx_vdma_start_transfer()
1414 vdma_desc_write(chan, XILINX_DMA_REG_HSIZE, last->hw.hsize); in xilinx_vdma_start_transfer()
1415 vdma_desc_write(chan, XILINX_DMA_REG_FRMDLY_STRIDE, in xilinx_vdma_start_transfer()
1417 vdma_desc_write(chan, XILINX_DMA_REG_VSIZE, last->hw.vsize); in xilinx_vdma_start_transfer()
1419 chan->desc_submitcount++; in xilinx_vdma_start_transfer()
1420 chan->desc_pendingcount--; in xilinx_vdma_start_transfer()
1421 list_move_tail(&desc->node, &chan->active_list); in xilinx_vdma_start_transfer()
1422 if (chan->desc_submitcount == chan->num_frms) in xilinx_vdma_start_transfer()
1423 chan->desc_submitcount = 0; in xilinx_vdma_start_transfer()
1425 chan->idle = false; in xilinx_vdma_start_transfer()
1432 static void xilinx_cdma_start_transfer(struct xilinx_dma_chan *chan) in xilinx_cdma_start_transfer() argument
1436 u32 ctrl_reg = dma_read(chan, XILINX_DMA_REG_DMACR); in xilinx_cdma_start_transfer()
1438 if (chan->err) in xilinx_cdma_start_transfer()
1441 if (!chan->idle) in xilinx_cdma_start_transfer()
1444 if (list_empty(&chan->pending_list)) in xilinx_cdma_start_transfer()
1447 head_desc = list_first_entry(&chan->pending_list, in xilinx_cdma_start_transfer()
1449 tail_desc = list_last_entry(&chan->pending_list, in xilinx_cdma_start_transfer()
1454 if (chan->desc_pendingcount <= XILINX_DMA_COALESCE_MAX) { in xilinx_cdma_start_transfer()
1456 ctrl_reg |= chan->desc_pendingcount << in xilinx_cdma_start_transfer()
1458 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, ctrl_reg); in xilinx_cdma_start_transfer()
1461 if (chan->has_sg) { in xilinx_cdma_start_transfer()
1462 dma_ctrl_clr(chan, XILINX_DMA_REG_DMACR, in xilinx_cdma_start_transfer()
1465 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR, in xilinx_cdma_start_transfer()
1468 xilinx_write(chan, XILINX_DMA_REG_CURDESC, in xilinx_cdma_start_transfer()
1472 xilinx_write(chan, XILINX_DMA_REG_TAILDESC, in xilinx_cdma_start_transfer()
1485 xilinx_write(chan, XILINX_CDMA_REG_SRCADDR, in xilinx_cdma_start_transfer()
1487 xilinx_write(chan, XILINX_CDMA_REG_DSTADDR, in xilinx_cdma_start_transfer()
1491 dma_ctrl_write(chan, XILINX_DMA_REG_BTT, in xilinx_cdma_start_transfer()
1492 hw->control & chan->xdev->max_buffer_len); in xilinx_cdma_start_transfer()
1495 list_splice_tail_init(&chan->pending_list, &chan->active_list); in xilinx_cdma_start_transfer()
1496 chan->desc_pendingcount = 0; in xilinx_cdma_start_transfer()
1497 chan->idle = false; in xilinx_cdma_start_transfer()
1504 static void xilinx_dma_start_transfer(struct xilinx_dma_chan *chan) in xilinx_dma_start_transfer() argument
1510 if (chan->err) in xilinx_dma_start_transfer()
1513 if (list_empty(&chan->pending_list)) in xilinx_dma_start_transfer()
1516 if (!chan->idle) in xilinx_dma_start_transfer()
1519 head_desc = list_first_entry(&chan->pending_list, in xilinx_dma_start_transfer()
1521 tail_desc = list_last_entry(&chan->pending_list, in xilinx_dma_start_transfer()
1526 reg = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR); in xilinx_dma_start_transfer()
1528 if (chan->desc_pendingcount <= XILINX_DMA_COALESCE_MAX) { in xilinx_dma_start_transfer()
1530 reg |= chan->desc_pendingcount << in xilinx_dma_start_transfer()
1532 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg); in xilinx_dma_start_transfer()
1535 if (chan->has_sg) in xilinx_dma_start_transfer()
1536 xilinx_write(chan, XILINX_DMA_REG_CURDESC, in xilinx_dma_start_transfer()
1539 xilinx_dma_start(chan); in xilinx_dma_start_transfer()
1541 if (chan->err) in xilinx_dma_start_transfer()
1545 if (chan->has_sg) { in xilinx_dma_start_transfer()
1546 if (chan->cyclic) in xilinx_dma_start_transfer()
1547 xilinx_write(chan, XILINX_DMA_REG_TAILDESC, in xilinx_dma_start_transfer()
1548 chan->cyclic_seg_v->phys); in xilinx_dma_start_transfer()
1550 xilinx_write(chan, XILINX_DMA_REG_TAILDESC, in xilinx_dma_start_transfer()
1561 xilinx_write(chan, XILINX_DMA_REG_SRCDSTADDR, in xilinx_dma_start_transfer()
1565 dma_ctrl_write(chan, XILINX_DMA_REG_BTT, in xilinx_dma_start_transfer()
1566 hw->control & chan->xdev->max_buffer_len); in xilinx_dma_start_transfer()
1569 list_splice_tail_init(&chan->pending_list, &chan->active_list); in xilinx_dma_start_transfer()
1570 chan->desc_pendingcount = 0; in xilinx_dma_start_transfer()
1571 chan->idle = false; in xilinx_dma_start_transfer()
1578 static void xilinx_mcdma_start_transfer(struct xilinx_dma_chan *chan) in xilinx_mcdma_start_transfer() argument
1589 if (chan->err) in xilinx_mcdma_start_transfer()
1592 if (!chan->idle) in xilinx_mcdma_start_transfer()
1595 if (list_empty(&chan->pending_list)) in xilinx_mcdma_start_transfer()
1598 head_desc = list_first_entry(&chan->pending_list, in xilinx_mcdma_start_transfer()
1600 tail_desc = list_last_entry(&chan->pending_list, in xilinx_mcdma_start_transfer()
1605 reg = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest)); in xilinx_mcdma_start_transfer()
1607 if (chan->desc_pendingcount <= XILINX_MCDMA_COALESCE_MAX) { in xilinx_mcdma_start_transfer()
1609 reg |= chan->desc_pendingcount << in xilinx_mcdma_start_transfer()
1614 dma_ctrl_write(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest), reg); in xilinx_mcdma_start_transfer()
1617 xilinx_write(chan, XILINX_MCDMA_CHAN_CDESC_OFFSET(chan->tdest), in xilinx_mcdma_start_transfer()
1621 reg = dma_ctrl_read(chan, XILINX_MCDMA_CHEN_OFFSET); in xilinx_mcdma_start_transfer()
1622 reg |= BIT(chan->tdest); in xilinx_mcdma_start_transfer()
1623 dma_ctrl_write(chan, XILINX_MCDMA_CHEN_OFFSET, reg); in xilinx_mcdma_start_transfer()
1626 reg = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest)); in xilinx_mcdma_start_transfer()
1628 dma_ctrl_write(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest), reg); in xilinx_mcdma_start_transfer()
1630 xilinx_dma_start(chan); in xilinx_mcdma_start_transfer()
1632 if (chan->err) in xilinx_mcdma_start_transfer()
1636 xilinx_write(chan, XILINX_MCDMA_CHAN_TDESC_OFFSET(chan->tdest), in xilinx_mcdma_start_transfer()
1639 list_splice_tail_init(&chan->pending_list, &chan->active_list); in xilinx_mcdma_start_transfer()
1640 chan->desc_pendingcount = 0; in xilinx_mcdma_start_transfer()
1641 chan->idle = false; in xilinx_mcdma_start_transfer()
1650 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_dma_issue_pending() local
1653 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_issue_pending()
1654 chan->start_transfer(chan); in xilinx_dma_issue_pending()
1655 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_issue_pending()
1675 static void xilinx_dma_complete_descriptor(struct xilinx_dma_chan *chan) in xilinx_dma_complete_descriptor() argument
1680 if (list_empty(&chan->active_list)) in xilinx_dma_complete_descriptor()
1683 list_for_each_entry_safe(desc, next, &chan->active_list, node) { in xilinx_dma_complete_descriptor()
1684 if (chan->has_sg && chan->xdev->dma_config->dmatype != in xilinx_dma_complete_descriptor()
1686 desc->residue = xilinx_dma_get_residue(chan, desc); in xilinx_dma_complete_descriptor()
1689 desc->err = chan->err; in xilinx_dma_complete_descriptor()
1694 list_add_tail(&desc->node, &chan->done_list); in xilinx_dma_complete_descriptor()
1704 static int xilinx_dma_reset(struct xilinx_dma_chan *chan) in xilinx_dma_reset() argument
1709 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR, XILINX_DMA_DMACR_RESET); in xilinx_dma_reset()
1712 err = xilinx_dma_poll_timeout(chan, XILINX_DMA_REG_DMACR, tmp, in xilinx_dma_reset()
1717 dev_err(chan->dev, "reset timeout, cr %x, sr %x\n", in xilinx_dma_reset()
1718 dma_ctrl_read(chan, XILINX_DMA_REG_DMACR), in xilinx_dma_reset()
1719 dma_ctrl_read(chan, XILINX_DMA_REG_DMASR)); in xilinx_dma_reset()
1723 chan->err = false; in xilinx_dma_reset()
1724 chan->idle = true; in xilinx_dma_reset()
1725 chan->desc_pendingcount = 0; in xilinx_dma_reset()
1726 chan->desc_submitcount = 0; in xilinx_dma_reset()
1737 static int xilinx_dma_chan_reset(struct xilinx_dma_chan *chan) in xilinx_dma_chan_reset() argument
1742 err = xilinx_dma_reset(chan); in xilinx_dma_chan_reset()
1747 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR, in xilinx_dma_chan_reset()
1762 struct xilinx_dma_chan *chan = data; in xilinx_mcdma_irq_handler() local
1765 if (chan->direction == DMA_DEV_TO_MEM) in xilinx_mcdma_irq_handler()
1771 chan_sermask = dma_ctrl_read(chan, ser_offset); in xilinx_mcdma_irq_handler()
1777 if (chan->direction == DMA_DEV_TO_MEM) in xilinx_mcdma_irq_handler()
1778 chan_offset = chan->xdev->dma_config->max_channels / 2; in xilinx_mcdma_irq_handler()
1781 chan = chan->xdev->chan[chan_offset]; in xilinx_mcdma_irq_handler()
1783 status = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_SR_OFFSET(chan->tdest)); in xilinx_mcdma_irq_handler()
1787 dma_ctrl_write(chan, XILINX_MCDMA_CHAN_SR_OFFSET(chan->tdest), in xilinx_mcdma_irq_handler()
1791 dev_err(chan->dev, "Channel %p has errors %x cdr %x tdr %x\n", in xilinx_mcdma_irq_handler()
1792 chan, in xilinx_mcdma_irq_handler()
1793 dma_ctrl_read(chan, XILINX_MCDMA_CH_ERR_OFFSET), in xilinx_mcdma_irq_handler()
1794 dma_ctrl_read(chan, XILINX_MCDMA_CHAN_CDESC_OFFSET in xilinx_mcdma_irq_handler()
1795 (chan->tdest)), in xilinx_mcdma_irq_handler()
1796 dma_ctrl_read(chan, XILINX_MCDMA_CHAN_TDESC_OFFSET in xilinx_mcdma_irq_handler()
1797 (chan->tdest))); in xilinx_mcdma_irq_handler()
1798 chan->err = true; in xilinx_mcdma_irq_handler()
1806 dev_dbg(chan->dev, "Inter-packet latency too long\n"); in xilinx_mcdma_irq_handler()
1810 spin_lock(&chan->lock); in xilinx_mcdma_irq_handler()
1811 xilinx_dma_complete_descriptor(chan); in xilinx_mcdma_irq_handler()
1812 chan->idle = true; in xilinx_mcdma_irq_handler()
1813 chan->start_transfer(chan); in xilinx_mcdma_irq_handler()
1814 spin_unlock(&chan->lock); in xilinx_mcdma_irq_handler()
1817 tasklet_schedule(&chan->tasklet); in xilinx_mcdma_irq_handler()
1830 struct xilinx_dma_chan *chan = data; in xilinx_dma_irq_handler() local
1834 status = dma_ctrl_read(chan, XILINX_DMA_REG_DMASR); in xilinx_dma_irq_handler()
1838 dma_ctrl_write(chan, XILINX_DMA_REG_DMASR, in xilinx_dma_irq_handler()
1851 dma_ctrl_write(chan, XILINX_DMA_REG_DMASR, in xilinx_dma_irq_handler()
1854 if (!chan->flush_on_fsync || in xilinx_dma_irq_handler()
1856 dev_err(chan->dev, in xilinx_dma_irq_handler()
1858 chan, errors, in xilinx_dma_irq_handler()
1859 dma_ctrl_read(chan, XILINX_DMA_REG_CURDESC), in xilinx_dma_irq_handler()
1860 dma_ctrl_read(chan, XILINX_DMA_REG_TAILDESC)); in xilinx_dma_irq_handler()
1861 chan->err = true; in xilinx_dma_irq_handler()
1870 dev_dbg(chan->dev, "Inter-packet latency too long\n"); in xilinx_dma_irq_handler()
1874 spin_lock(&chan->lock); in xilinx_dma_irq_handler()
1875 xilinx_dma_complete_descriptor(chan); in xilinx_dma_irq_handler()
1876 chan->idle = true; in xilinx_dma_irq_handler()
1877 chan->start_transfer(chan); in xilinx_dma_irq_handler()
1878 spin_unlock(&chan->lock); in xilinx_dma_irq_handler()
1881 tasklet_schedule(&chan->tasklet); in xilinx_dma_irq_handler()
1890 static void append_desc_queue(struct xilinx_dma_chan *chan, in append_desc_queue() argument
1899 if (list_empty(&chan->pending_list)) in append_desc_queue()
1906 tail_desc = list_last_entry(&chan->pending_list, in append_desc_queue()
1908 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { in append_desc_queue()
1913 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) { in append_desc_queue()
1918 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in append_desc_queue()
1936 list_add_tail(&desc->node, &chan->pending_list); in append_desc_queue()
1937 chan->desc_pendingcount++; in append_desc_queue()
1939 if (chan->has_sg && (chan->xdev->dma_config->dmatype == XDMA_TYPE_VDMA) in append_desc_queue()
1940 && unlikely(chan->desc_pendingcount > chan->num_frms)) { in append_desc_queue()
1941 dev_dbg(chan->dev, "desc pendingcount is too high\n"); in append_desc_queue()
1942 chan->desc_pendingcount = chan->num_frms; in append_desc_queue()
1955 struct xilinx_dma_chan *chan = to_xilinx_chan(tx->chan); in xilinx_dma_tx_submit() local
1960 if (chan->cyclic) { in xilinx_dma_tx_submit()
1961 xilinx_dma_free_tx_descriptor(chan, desc); in xilinx_dma_tx_submit()
1965 if (chan->err) { in xilinx_dma_tx_submit()
1970 err = xilinx_dma_chan_reset(chan); in xilinx_dma_tx_submit()
1975 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_tx_submit()
1980 append_desc_queue(chan, desc); in xilinx_dma_tx_submit()
1983 chan->cyclic = true; in xilinx_dma_tx_submit()
1985 chan->terminating = false; in xilinx_dma_tx_submit()
1987 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_tx_submit()
2006 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_vdma_dma_prep_interleaved() local
2021 desc = xilinx_dma_alloc_tx_descriptor(chan); in xilinx_vdma_dma_prep_interleaved()
2025 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in xilinx_vdma_dma_prep_interleaved()
2030 segment = xilinx_vdma_alloc_tx_segment(chan); in xilinx_vdma_dma_prep_interleaved()
2040 hw->stride |= chan->config.frm_dly << in xilinx_vdma_dma_prep_interleaved()
2044 if (chan->ext_addr) { in xilinx_vdma_dma_prep_interleaved()
2051 if (chan->ext_addr) { in xilinx_vdma_dma_prep_interleaved()
2070 xilinx_dma_free_tx_descriptor(chan, desc); in xilinx_vdma_dma_prep_interleaved()
2088 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_cdma_prep_memcpy() local
2093 if (!len || len > chan->xdev->max_buffer_len) in xilinx_cdma_prep_memcpy()
2096 desc = xilinx_dma_alloc_tx_descriptor(chan); in xilinx_cdma_prep_memcpy()
2100 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in xilinx_cdma_prep_memcpy()
2104 segment = xilinx_cdma_alloc_tx_segment(chan); in xilinx_cdma_prep_memcpy()
2112 if (chan->ext_addr) { in xilinx_cdma_prep_memcpy()
2126 xilinx_dma_free_tx_descriptor(chan, desc); in xilinx_cdma_prep_memcpy()
2146 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_dma_prep_slave_sg() local
2159 desc = xilinx_dma_alloc_tx_descriptor(chan); in xilinx_dma_prep_slave_sg()
2163 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in xilinx_dma_prep_slave_sg()
2175 segment = xilinx_axidma_alloc_tx_segment(chan); in xilinx_dma_prep_slave_sg()
2183 copy = xilinx_dma_calc_copysize(chan, sg_dma_len(sg), in xilinx_dma_prep_slave_sg()
2188 xilinx_axidma_buf(chan, hw, sg_dma_address(sg), in xilinx_dma_prep_slave_sg()
2193 if (chan->direction == DMA_MEM_TO_DEV) { in xilinx_dma_prep_slave_sg()
2214 if (chan->direction == DMA_MEM_TO_DEV) { in xilinx_dma_prep_slave_sg()
2225 xilinx_dma_free_tx_descriptor(chan, desc); in xilinx_dma_prep_slave_sg()
2245 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_dma_prep_dma_cyclic() local
2265 desc = xilinx_dma_alloc_tx_descriptor(chan); in xilinx_dma_prep_dma_cyclic()
2269 chan->direction = direction; in xilinx_dma_prep_dma_cyclic()
2270 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in xilinx_dma_prep_dma_cyclic()
2280 segment = xilinx_axidma_alloc_tx_segment(chan); in xilinx_dma_prep_dma_cyclic()
2288 copy = xilinx_dma_calc_copysize(chan, period_len, in xilinx_dma_prep_dma_cyclic()
2291 xilinx_axidma_buf(chan, hw, buf_addr, sg_used, in xilinx_dma_prep_dma_cyclic()
2314 reg = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR); in xilinx_dma_prep_dma_cyclic()
2316 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg); in xilinx_dma_prep_dma_cyclic()
2332 xilinx_dma_free_tx_descriptor(chan, desc); in xilinx_dma_prep_dma_cyclic()
2353 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_mcdma_prep_slave_sg() local
2366 desc = xilinx_dma_alloc_tx_descriptor(chan); in xilinx_mcdma_prep_slave_sg()
2370 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in xilinx_mcdma_prep_slave_sg()
2382 segment = xilinx_aximcdma_alloc_tx_segment(chan); in xilinx_mcdma_prep_slave_sg()
2391 chan->xdev->max_buffer_len); in xilinx_mcdma_prep_slave_sg()
2395 xilinx_aximcdma_buf(chan, hw, sg_dma_address(sg), in xilinx_mcdma_prep_slave_sg()
2399 if (chan->direction == DMA_MEM_TO_DEV && app_w) { in xilinx_mcdma_prep_slave_sg()
2418 if (chan->direction == DMA_MEM_TO_DEV) { in xilinx_mcdma_prep_slave_sg()
2429 xilinx_dma_free_tx_descriptor(chan, desc); in xilinx_mcdma_prep_slave_sg()
2442 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_dma_terminate_all() local
2446 if (!chan->cyclic) { in xilinx_dma_terminate_all()
2447 err = chan->stop_transfer(chan); in xilinx_dma_terminate_all()
2449 dev_err(chan->dev, "Cannot stop channel %p: %x\n", in xilinx_dma_terminate_all()
2450 chan, dma_ctrl_read(chan, in xilinx_dma_terminate_all()
2452 chan->err = true; in xilinx_dma_terminate_all()
2456 xilinx_dma_chan_reset(chan); in xilinx_dma_terminate_all()
2458 chan->terminating = true; in xilinx_dma_terminate_all()
2459 xilinx_dma_free_descriptors(chan); in xilinx_dma_terminate_all()
2460 chan->idle = true; in xilinx_dma_terminate_all()
2462 if (chan->cyclic) { in xilinx_dma_terminate_all()
2463 reg = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR); in xilinx_dma_terminate_all()
2465 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg); in xilinx_dma_terminate_all()
2466 chan->cyclic = false; in xilinx_dma_terminate_all()
2469 if ((chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) && chan->has_sg) in xilinx_dma_terminate_all()
2470 dma_ctrl_clr(chan, XILINX_DMA_REG_DMACR, in xilinx_dma_terminate_all()
2478 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_dma_synchronize() local
2480 tasklet_kill(&chan->tasklet); in xilinx_dma_synchronize()
2499 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_vdma_channel_set_config() local
2503 return xilinx_dma_chan_reset(chan); in xilinx_vdma_channel_set_config()
2505 dmacr = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR); in xilinx_vdma_channel_set_config()
2507 chan->config.frm_dly = cfg->frm_dly; in xilinx_vdma_channel_set_config()
2508 chan->config.park = cfg->park; in xilinx_vdma_channel_set_config()
2511 chan->config.gen_lock = cfg->gen_lock; in xilinx_vdma_channel_set_config()
2512 chan->config.master = cfg->master; in xilinx_vdma_channel_set_config()
2515 if (cfg->gen_lock && chan->genlock) { in xilinx_vdma_channel_set_config()
2521 chan->config.frm_cnt_en = cfg->frm_cnt_en; in xilinx_vdma_channel_set_config()
2522 chan->config.vflip_en = cfg->vflip_en; in xilinx_vdma_channel_set_config()
2525 chan->config.park_frm = cfg->park_frm; in xilinx_vdma_channel_set_config()
2527 chan->config.park_frm = -1; in xilinx_vdma_channel_set_config()
2529 chan->config.coalesc = cfg->coalesc; in xilinx_vdma_channel_set_config()
2530 chan->config.delay = cfg->delay; in xilinx_vdma_channel_set_config()
2535 chan->config.coalesc = cfg->coalesc; in xilinx_vdma_channel_set_config()
2541 chan->config.delay = cfg->delay; in xilinx_vdma_channel_set_config()
2548 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, dmacr); in xilinx_vdma_channel_set_config()
2562 static void xilinx_dma_chan_remove(struct xilinx_dma_chan *chan) in xilinx_dma_chan_remove() argument
2565 dma_ctrl_clr(chan, XILINX_DMA_REG_DMACR, in xilinx_dma_chan_remove()
2568 if (chan->irq > 0) in xilinx_dma_chan_remove()
2569 free_irq(chan->irq, chan); in xilinx_dma_chan_remove()
2571 tasklet_kill(&chan->tasklet); in xilinx_dma_chan_remove()
2573 list_del(&chan->common.device_node); in xilinx_dma_chan_remove()
2767 struct xilinx_dma_chan *chan; in xilinx_dma_chan_probe() local
2773 chan = devm_kzalloc(xdev->dev, sizeof(*chan), GFP_KERNEL); in xilinx_dma_chan_probe()
2774 if (!chan) in xilinx_dma_chan_probe()
2777 chan->dev = xdev->dev; in xilinx_dma_chan_probe()
2778 chan->xdev = xdev; in xilinx_dma_chan_probe()
2779 chan->desc_pendingcount = 0x0; in xilinx_dma_chan_probe()
2780 chan->ext_addr = xdev->ext_addr; in xilinx_dma_chan_probe()
2786 chan->idle = true; in xilinx_dma_chan_probe()
2788 spin_lock_init(&chan->lock); in xilinx_dma_chan_probe()
2789 INIT_LIST_HEAD(&chan->pending_list); in xilinx_dma_chan_probe()
2790 INIT_LIST_HEAD(&chan->done_list); in xilinx_dma_chan_probe()
2791 INIT_LIST_HEAD(&chan->active_list); in xilinx_dma_chan_probe()
2792 INIT_LIST_HEAD(&chan->free_seg_list); in xilinx_dma_chan_probe()
2797 chan->genlock = of_property_read_bool(node, "xlnx,genlock-mode"); in xilinx_dma_chan_probe()
2816 chan->direction = DMA_MEM_TO_DEV; in xilinx_dma_chan_probe()
2817 chan->id = xdev->mm2s_chan_id++; in xilinx_dma_chan_probe()
2818 chan->tdest = chan->id; in xilinx_dma_chan_probe()
2820 chan->ctrl_offset = XILINX_DMA_MM2S_CTRL_OFFSET; in xilinx_dma_chan_probe()
2822 chan->desc_offset = XILINX_VDMA_MM2S_DESC_OFFSET; in xilinx_dma_chan_probe()
2823 chan->config.park = 1; in xilinx_dma_chan_probe()
2827 chan->flush_on_fsync = true; in xilinx_dma_chan_probe()
2833 chan->direction = DMA_DEV_TO_MEM; in xilinx_dma_chan_probe()
2834 chan->id = xdev->s2mm_chan_id++; in xilinx_dma_chan_probe()
2835 chan->tdest = chan->id - xdev->dma_config->max_channels / 2; in xilinx_dma_chan_probe()
2836 chan->has_vflip = of_property_read_bool(node, in xilinx_dma_chan_probe()
2838 if (chan->has_vflip) { in xilinx_dma_chan_probe()
2839 chan->config.vflip_en = dma_read(chan, in xilinx_dma_chan_probe()
2845 chan->ctrl_offset = XILINX_MCDMA_S2MM_CTRL_OFFSET; in xilinx_dma_chan_probe()
2847 chan->ctrl_offset = XILINX_DMA_S2MM_CTRL_OFFSET; in xilinx_dma_chan_probe()
2850 chan->desc_offset = XILINX_VDMA_S2MM_DESC_OFFSET; in xilinx_dma_chan_probe()
2851 chan->config.park = 1; in xilinx_dma_chan_probe()
2855 chan->flush_on_fsync = true; in xilinx_dma_chan_probe()
2863 chan->irq = irq_of_parse_and_map(node, chan->tdest); in xilinx_dma_chan_probe()
2864 err = request_irq(chan->irq, xdev->dma_config->irq_handler, in xilinx_dma_chan_probe()
2865 IRQF_SHARED, "xilinx-dma-controller", chan); in xilinx_dma_chan_probe()
2867 dev_err(xdev->dev, "unable to request IRQ %d\n", chan->irq); in xilinx_dma_chan_probe()
2872 chan->start_transfer = xilinx_dma_start_transfer; in xilinx_dma_chan_probe()
2873 chan->stop_transfer = xilinx_dma_stop_transfer; in xilinx_dma_chan_probe()
2875 chan->start_transfer = xilinx_mcdma_start_transfer; in xilinx_dma_chan_probe()
2876 chan->stop_transfer = xilinx_dma_stop_transfer; in xilinx_dma_chan_probe()
2878 chan->start_transfer = xilinx_cdma_start_transfer; in xilinx_dma_chan_probe()
2879 chan->stop_transfer = xilinx_cdma_stop_transfer; in xilinx_dma_chan_probe()
2881 chan->start_transfer = xilinx_vdma_start_transfer; in xilinx_dma_chan_probe()
2882 chan->stop_transfer = xilinx_dma_stop_transfer; in xilinx_dma_chan_probe()
2888 dma_ctrl_read(chan, XILINX_DMA_REG_DMASR) & in xilinx_dma_chan_probe()
2890 chan->has_sg = true; in xilinx_dma_chan_probe()
2891 dev_dbg(chan->dev, "ch %d: SG %s\n", chan->id, in xilinx_dma_chan_probe()
2892 chan->has_sg ? "enabled" : "disabled"); in xilinx_dma_chan_probe()
2896 tasklet_setup(&chan->tasklet, xilinx_dma_do_tasklet); in xilinx_dma_chan_probe()
2902 chan->common.device = &xdev->common; in xilinx_dma_chan_probe()
2904 list_add_tail(&chan->common.device_node, &xdev->common.channels); in xilinx_dma_chan_probe()
2905 xdev->chan[chan->id] = chan; in xilinx_dma_chan_probe()
2908 err = xilinx_dma_chan_reset(chan); in xilinx_dma_chan_probe()
2956 if (chan_id >= xdev->dma_config->max_channels || !xdev->chan[chan_id]) in of_dma_xilinx_xlate()
2959 return dma_get_slave_channel(&xdev->chan[chan_id]->common); in of_dma_xilinx_xlate()
3140 if (xdev->chan[i]) in xilinx_dma_probe()
3141 xdev->chan[i]->num_frms = num_frames; in xilinx_dma_probe()
3174 if (xdev->chan[i]) in xilinx_dma_probe()
3175 xilinx_dma_chan_remove(xdev->chan[i]); in xilinx_dma_probe()
3196 if (xdev->chan[i]) in xilinx_dma_remove()
3197 xilinx_dma_chan_remove(xdev->chan[i]); in xilinx_dma_remove()