Lines Matching refs:fsl_chan
45 static void fsl_edma_enable_request(struct fsl_edma_chan *fsl_chan) in fsl_edma_enable_request() argument
47 struct edma_regs *regs = &fsl_chan->edma->regs; in fsl_edma_enable_request()
48 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_enable_request()
50 if (fsl_chan->edma->drvdata->version == v1) { in fsl_edma_enable_request()
51 edma_writeb(fsl_chan->edma, EDMA_SEEI_SEEI(ch), regs->seei); in fsl_edma_enable_request()
52 edma_writeb(fsl_chan->edma, ch, regs->serq); in fsl_edma_enable_request()
62 void fsl_edma_disable_request(struct fsl_edma_chan *fsl_chan) in fsl_edma_disable_request() argument
64 struct edma_regs *regs = &fsl_chan->edma->regs; in fsl_edma_disable_request()
65 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_disable_request()
67 if (fsl_chan->edma->drvdata->version == v1) { in fsl_edma_disable_request()
68 edma_writeb(fsl_chan->edma, ch, regs->cerq); in fsl_edma_disable_request()
69 edma_writeb(fsl_chan->edma, EDMA_CEEI_CEEI(ch), regs->ceei); in fsl_edma_disable_request()
80 static void mux_configure8(struct fsl_edma_chan *fsl_chan, void __iomem *addr, in mux_configure8() argument
93 static void mux_configure32(struct fsl_edma_chan *fsl_chan, void __iomem *addr, in mux_configure32() argument
106 void fsl_edma_chan_mux(struct fsl_edma_chan *fsl_chan, in fsl_edma_chan_mux() argument
109 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_chan_mux()
113 u32 dmamux_nr = fsl_chan->edma->drvdata->dmamuxs; in fsl_edma_chan_mux()
115 chans_per_mux = fsl_chan->edma->n_chans / dmamux_nr; in fsl_edma_chan_mux()
116 ch_off = fsl_chan->vchan.chan.chan_id % chans_per_mux; in fsl_edma_chan_mux()
118 if (fsl_chan->edma->drvdata->mux_swap) in fsl_edma_chan_mux()
121 muxaddr = fsl_chan->edma->muxbase[ch / chans_per_mux]; in fsl_edma_chan_mux()
124 if (fsl_chan->edma->drvdata->version == v3) in fsl_edma_chan_mux()
125 mux_configure32(fsl_chan, muxaddr, ch_off, slot, enable); in fsl_edma_chan_mux()
127 mux_configure8(fsl_chan, muxaddr, ch_off, slot, enable); in fsl_edma_chan_mux()
162 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_terminate_all() local
166 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
167 fsl_edma_disable_request(fsl_chan); in fsl_edma_terminate_all()
168 fsl_chan->edesc = NULL; in fsl_edma_terminate_all()
169 fsl_chan->idle = true; in fsl_edma_terminate_all()
170 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
171 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
172 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
179 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_pause() local
182 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_pause()
183 if (fsl_chan->edesc) { in fsl_edma_pause()
184 fsl_edma_disable_request(fsl_chan); in fsl_edma_pause()
185 fsl_chan->status = DMA_PAUSED; in fsl_edma_pause()
186 fsl_chan->idle = true; in fsl_edma_pause()
188 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_pause()
195 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_resume() local
198 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_resume()
199 if (fsl_chan->edesc) { in fsl_edma_resume()
200 fsl_edma_enable_request(fsl_chan); in fsl_edma_resume()
201 fsl_chan->status = DMA_IN_PROGRESS; in fsl_edma_resume()
202 fsl_chan->idle = false; in fsl_edma_resume()
204 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_resume()
209 static void fsl_edma_unprep_slave_dma(struct fsl_edma_chan *fsl_chan) in fsl_edma_unprep_slave_dma() argument
211 if (fsl_chan->dma_dir != DMA_NONE) in fsl_edma_unprep_slave_dma()
212 dma_unmap_resource(fsl_chan->vchan.chan.device->dev, in fsl_edma_unprep_slave_dma()
213 fsl_chan->dma_dev_addr, in fsl_edma_unprep_slave_dma()
214 fsl_chan->dma_dev_size, in fsl_edma_unprep_slave_dma()
215 fsl_chan->dma_dir, 0); in fsl_edma_unprep_slave_dma()
216 fsl_chan->dma_dir = DMA_NONE; in fsl_edma_unprep_slave_dma()
219 static bool fsl_edma_prep_slave_dma(struct fsl_edma_chan *fsl_chan, in fsl_edma_prep_slave_dma() argument
222 struct device *dev = fsl_chan->vchan.chan.device->dev; in fsl_edma_prep_slave_dma()
230 addr = fsl_chan->cfg.dst_addr; in fsl_edma_prep_slave_dma()
231 size = fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_slave_dma()
235 addr = fsl_chan->cfg.src_addr; in fsl_edma_prep_slave_dma()
236 size = fsl_chan->cfg.src_maxburst; in fsl_edma_prep_slave_dma()
244 if (fsl_chan->dma_dir == dma_dir) in fsl_edma_prep_slave_dma()
247 fsl_edma_unprep_slave_dma(fsl_chan); in fsl_edma_prep_slave_dma()
249 fsl_chan->dma_dev_addr = dma_map_resource(dev, addr, size, dma_dir, 0); in fsl_edma_prep_slave_dma()
250 if (dma_mapping_error(dev, fsl_chan->dma_dev_addr)) in fsl_edma_prep_slave_dma()
252 fsl_chan->dma_dev_size = size; in fsl_edma_prep_slave_dma()
253 fsl_chan->dma_dir = dma_dir; in fsl_edma_prep_slave_dma()
261 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_slave_config() local
263 memcpy(&fsl_chan->cfg, cfg, sizeof(*cfg)); in fsl_edma_slave_config()
264 fsl_edma_unprep_slave_dma(fsl_chan); in fsl_edma_slave_config()
270 static size_t fsl_edma_desc_residue(struct fsl_edma_chan *fsl_chan, in fsl_edma_desc_residue() argument
273 struct fsl_edma_desc *edesc = fsl_chan->edesc; in fsl_edma_desc_residue()
274 struct edma_regs *regs = &fsl_chan->edma->regs; in fsl_edma_desc_residue()
275 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_desc_residue()
282 for (len = i = 0; i < fsl_chan->edesc->n_tcds; i++) in fsl_edma_desc_residue()
290 cur_addr = edma_readl(fsl_chan->edma, ®s->tcd[ch].saddr); in fsl_edma_desc_residue()
292 cur_addr = edma_readl(fsl_chan->edma, ®s->tcd[ch].daddr); in fsl_edma_desc_residue()
295 for (i = 0; i < fsl_chan->edesc->n_tcds; i++) { in fsl_edma_desc_residue()
316 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_tx_status() local
326 return fsl_chan->status; in fsl_edma_tx_status()
328 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_tx_status()
329 vdesc = vchan_find_desc(&fsl_chan->vchan, cookie); in fsl_edma_tx_status()
330 if (fsl_chan->edesc && cookie == fsl_chan->edesc->vdesc.tx.cookie) in fsl_edma_tx_status()
332 fsl_edma_desc_residue(fsl_chan, vdesc, true); in fsl_edma_tx_status()
335 fsl_edma_desc_residue(fsl_chan, vdesc, false); in fsl_edma_tx_status()
339 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_tx_status()
341 return fsl_chan->status; in fsl_edma_tx_status()
345 static void fsl_edma_set_tcd_regs(struct fsl_edma_chan *fsl_chan, in fsl_edma_set_tcd_regs() argument
348 struct fsl_edma_engine *edma = fsl_chan->edma; in fsl_edma_set_tcd_regs()
349 struct edma_regs *regs = &fsl_chan->edma->regs; in fsl_edma_set_tcd_regs()
350 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_set_tcd_regs()
377 if (fsl_chan->is_sw) { in fsl_edma_set_tcd_regs()
428 static struct fsl_edma_desc *fsl_edma_alloc_desc(struct fsl_edma_chan *fsl_chan, in fsl_edma_alloc_desc() argument
438 fsl_desc->echan = fsl_chan; in fsl_edma_alloc_desc()
441 fsl_desc->tcd[i].vtcd = dma_pool_alloc(fsl_chan->tcd_pool, in fsl_edma_alloc_desc()
450 dma_pool_free(fsl_chan->tcd_pool, fsl_desc->tcd[i].vtcd, in fsl_edma_alloc_desc()
461 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_prep_dma_cyclic() local
471 if (!fsl_edma_prep_slave_dma(fsl_chan, direction)) in fsl_edma_prep_dma_cyclic()
475 fsl_desc = fsl_edma_alloc_desc(fsl_chan, sg_len); in fsl_edma_prep_dma_cyclic()
483 fsl_chan->attr = in fsl_edma_prep_dma_cyclic()
484 fsl_edma_get_tcd_attr(fsl_chan->cfg.dst_addr_width); in fsl_edma_prep_dma_cyclic()
485 nbytes = fsl_chan->cfg.dst_addr_width * in fsl_edma_prep_dma_cyclic()
486 fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_dma_cyclic()
488 fsl_chan->attr = in fsl_edma_prep_dma_cyclic()
489 fsl_edma_get_tcd_attr(fsl_chan->cfg.src_addr_width); in fsl_edma_prep_dma_cyclic()
490 nbytes = fsl_chan->cfg.src_addr_width * in fsl_edma_prep_dma_cyclic()
491 fsl_chan->cfg.src_maxburst; in fsl_edma_prep_dma_cyclic()
505 dst_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_dma_cyclic()
506 soff = fsl_chan->cfg.dst_addr_width; in fsl_edma_prep_dma_cyclic()
509 src_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_dma_cyclic()
512 doff = fsl_chan->cfg.src_addr_width; in fsl_edma_prep_dma_cyclic()
516 fsl_chan->attr, soff, nbytes, 0, iter, in fsl_edma_prep_dma_cyclic()
521 return vchan_tx_prep(&fsl_chan->vchan, &fsl_desc->vdesc, flags); in fsl_edma_prep_dma_cyclic()
530 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_prep_slave_sg() local
540 if (!fsl_edma_prep_slave_dma(fsl_chan, direction)) in fsl_edma_prep_slave_sg()
543 fsl_desc = fsl_edma_alloc_desc(fsl_chan, sg_len); in fsl_edma_prep_slave_sg()
550 fsl_chan->attr = in fsl_edma_prep_slave_sg()
551 fsl_edma_get_tcd_attr(fsl_chan->cfg.dst_addr_width); in fsl_edma_prep_slave_sg()
552 nbytes = fsl_chan->cfg.dst_addr_width * in fsl_edma_prep_slave_sg()
553 fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_slave_sg()
555 fsl_chan->attr = in fsl_edma_prep_slave_sg()
556 fsl_edma_get_tcd_attr(fsl_chan->cfg.src_addr_width); in fsl_edma_prep_slave_sg()
557 nbytes = fsl_chan->cfg.src_addr_width * in fsl_edma_prep_slave_sg()
558 fsl_chan->cfg.src_maxburst; in fsl_edma_prep_slave_sg()
564 dst_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_slave_sg()
565 soff = fsl_chan->cfg.dst_addr_width; in fsl_edma_prep_slave_sg()
568 src_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_slave_sg()
571 doff = fsl_chan->cfg.src_addr_width; in fsl_edma_prep_slave_sg()
578 dst_addr, fsl_chan->attr, soff, in fsl_edma_prep_slave_sg()
584 dst_addr, fsl_chan->attr, soff, in fsl_edma_prep_slave_sg()
590 return vchan_tx_prep(&fsl_chan->vchan, &fsl_desc->vdesc, flags); in fsl_edma_prep_slave_sg()
598 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_prep_memcpy() local
601 fsl_desc = fsl_edma_alloc_desc(fsl_chan, 1); in fsl_edma_prep_memcpy()
606 fsl_chan->is_sw = true; in fsl_edma_prep_memcpy()
613 return vchan_tx_prep(&fsl_chan->vchan, &fsl_desc->vdesc, flags); in fsl_edma_prep_memcpy()
617 void fsl_edma_xfer_desc(struct fsl_edma_chan *fsl_chan) in fsl_edma_xfer_desc() argument
621 lockdep_assert_held(&fsl_chan->vchan.lock); in fsl_edma_xfer_desc()
623 vdesc = vchan_next_desc(&fsl_chan->vchan); in fsl_edma_xfer_desc()
626 fsl_chan->edesc = to_fsl_edma_desc(vdesc); in fsl_edma_xfer_desc()
627 fsl_edma_set_tcd_regs(fsl_chan, fsl_chan->edesc->tcd[0].vtcd); in fsl_edma_xfer_desc()
628 fsl_edma_enable_request(fsl_chan); in fsl_edma_xfer_desc()
629 fsl_chan->status = DMA_IN_PROGRESS; in fsl_edma_xfer_desc()
630 fsl_chan->idle = false; in fsl_edma_xfer_desc()
636 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_issue_pending() local
639 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_issue_pending()
641 if (unlikely(fsl_chan->pm_state != RUNNING)) { in fsl_edma_issue_pending()
642 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_issue_pending()
647 if (vchan_issue_pending(&fsl_chan->vchan) && !fsl_chan->edesc) in fsl_edma_issue_pending()
648 fsl_edma_xfer_desc(fsl_chan); in fsl_edma_issue_pending()
650 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_issue_pending()
656 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_alloc_chan_resources() local
658 fsl_chan->tcd_pool = dma_pool_create("tcd_pool", chan->device->dev, in fsl_edma_alloc_chan_resources()
667 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_free_chan_resources() local
668 struct fsl_edma_engine *edma = fsl_chan->edma; in fsl_edma_free_chan_resources()
672 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_free_chan_resources()
673 fsl_edma_disable_request(fsl_chan); in fsl_edma_free_chan_resources()
675 fsl_edma_chan_mux(fsl_chan, 0, false); in fsl_edma_free_chan_resources()
676 fsl_chan->edesc = NULL; in fsl_edma_free_chan_resources()
677 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_free_chan_resources()
678 fsl_edma_unprep_slave_dma(fsl_chan); in fsl_edma_free_chan_resources()
679 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_free_chan_resources()
681 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_edma_free_chan_resources()
682 dma_pool_destroy(fsl_chan->tcd_pool); in fsl_edma_free_chan_resources()
683 fsl_chan->tcd_pool = NULL; in fsl_edma_free_chan_resources()
684 fsl_chan->is_sw = false; in fsl_edma_free_chan_resources()