| /drivers/dma/ |
| A D | ste_dma40_ll.c | 171 lli->reg_ptr = data; in d40_phy_fill_lli() 172 lli->reg_cfg = reg_cfg; in d40_phy_fill_lli() 255 lli++; in d40_phy_buf_to_lli() 260 return lli; in d40_phy_buf_to_lli() 298 lli = d40_phy_buf_to_lli(lli, dst, len, l_phys, lli_phys, in d40_phy_sg_to_lli() 301 if (lli == NULL) in d40_phy_sg_to_lli() 370 lli->lcsp13 = reg_cfg; in d40_log_fill_lli() 405 d40_log_fill_lli(lli, in d40_log_buf_to_lli() 412 lli++; in d40_log_buf_to_lli() 415 return lli; in d40_log_buf_to_lli() [all …]
|
| A D | loongson1-apb-dma.c | 193 struct ls1x_dma_lli *lli, *_lli; in ls1x_dma_free_desc() local 196 list_del(&lli->node); in ls1x_dma_free_desc() 197 dma_pool_free(chan->lli_pool, lli, lli->phys); in ls1x_dma_free_desc() 256 if (!lli) { in ls1x_dma_prep_lli() 262 lli->phys = phys; in ls1x_dma_prep_lli() 273 prev = lli; in ls1x_dma_prep_lli() 276 first = lli; in ls1x_dma_prep_lli() 289 lli, sizeof(*lli), false); in ls1x_dma_prep_lli() 440 struct ls1x_dma_lli *lli; in ls1x_dma_tx_status() local 454 &lli->phys); in ls1x_dma_tx_status() [all …]
|
| A D | owl-dma.c | 345 list_del(&lli->node); in owl_dma_free_lli() 346 dma_pool_free(od->lli_pool, lli, lli->phys); in owl_dma_free_lli() 351 struct owl_dma_lli *lli; in owl_dma_alloc_lli() local 355 if (!lli) in owl_dma_alloc_lli() 359 lli->phys = phys; in owl_dma_alloc_lli() 361 return lli; in owl_dma_alloc_lli() 541 struct owl_dma_lli *lli; in owl_dma_start_next_txd() local 890 if (!lli) { in owl_dma_prep_memcpy() 949 if (!lli) { in owl_dma_prep_slave_sg() 1005 if (!lli) { in owl_prep_dma_cyclic() [all …]
|
| A D | at_hdmac.c | 216 struct at_lli *lli; member 417 &lli->saddr, &lli->daddr, in atc_dump_lli() 418 lli->ctrla, lli->ctrlb, &lli->dscr); in atc_dump_lli() 723 if (desc->sg[i].lli && desc->sg[i].lli->dscr == dscr) in atc_get_llis_residue() 944 lli = atdma_sg->lli; in atc_prep_dma_interleaved() 1023 lli = atdma_sg->lli; in atc_prep_dma_memcpy() 1073 lli = atdma_sg->lli; in atdma_create_memset_lli() 1075 lli->saddr = psrc; in atdma_create_memset_lli() 1308 lli = atdma_sg->lli; in atc_prep_slave_sg() 1357 lli = atdma_sg->lli; in atc_prep_slave_sg() [all …]
|
| A D | idma64.c | 222 dma_pool_free(idma64c->pool, hw->lli, hw->llp); in idma64_desc_free() 241 struct idma64_lli *lli = hw->lli; in idma64_hw_desc_fill() local 263 lli->sar = sar; in idma64_hw_desc_fill() 264 lli->dar = dar; in idma64_hw_desc_fill() 266 lli->ctlhi = ctlhi; in idma64_hw_desc_fill() 267 lli->ctllo = ctllo | in idma64_hw_desc_fill() 273 lli->llp = llp; in idma64_hw_desc_fill() 282 struct idma64_lli *lli = hw->lli; in idma64_desc_fill() local 294 lli->ctllo |= IDMA64C_CTLL_INT_EN; in idma64_desc_fill() 297 lli->ctllo &= ~(IDMA64C_CTLL_LLP_S_EN | IDMA64C_CTLL_LLP_D_EN); in idma64_desc_fill() [all …]
|
| A D | sun6i-dma.c | 353 struct sun6i_dma_lli *lli; in sun6i_get_chan_size() local 363 for (lli = txd->v_lli; lli; lli = lli->v_lli_next) { in sun6i_get_chan_size() 364 if (lli->p_lli_next == pos) { in sun6i_get_chan_size() 365 for (lli = lli->v_lli_next; lli; lli = lli->v_lli_next) in sun6i_get_chan_size() 366 bytes += lli->len; in sun6i_get_chan_size() 959 struct sun6i_dma_lli *lli; in sun6i_dma_tx_status() local 976 for (lli = txd->v_lli; lli != NULL; lli = lli->v_lli_next) in sun6i_dma_tx_status() 977 bytes += lli->len; in sun6i_dma_tx_status()
|
| A D | amba-pl08x.c | 392 struct pl08x_phy_chan *phychan, const u32 *lli, u32 ccfg) in pl08x_write_lli() argument 398 phychan->id, lli[PL080_LLI_SRC], lli[PL080_LLI_DST], in pl08x_write_lli() 399 lli[PL080_LLI_LLI], lli[PL080_LLI_CCTL], in pl08x_write_lli() 400 lli[PL080S_LLI_CCTL2], ccfg); in pl08x_write_lli() 405 phychan->id, lli[PL080_LLI_SRC], lli[PL080_LLI_DST], in pl08x_write_lli() 406 lli[PL080_LLI_LLI], lli[PL080_LLI_CCTL], ccfg); in pl08x_write_lli() 408 writel_relaxed(lli[PL080_LLI_SRC], phychan->reg_src); in pl08x_write_lli() 409 writel_relaxed(lli[PL080_LLI_DST], phychan->reg_dst); in pl08x_write_lli() 410 writel_relaxed(lli[PL080_LLI_LLI], phychan->reg_lli); in pl08x_write_lli() 419 u32 llictl = lli[PL080_LLI_CCTL]; in pl08x_write_lli() [all …]
|
| A D | k3dma.c | 61 u32 lli; member 163 writel_relaxed(hw->lli, phy->base + CX_LLI); in k3_dma_set_desc() 412 if (!ds->desc_hw[index].lli) in k3_dma_tx_status() 450 ds->desc_hw[num].lli = ds->desc_hw_lli + (num + 1) * in k3_dma_fill_desc() 453 ds->desc_hw[num].lli |= CX_LLI_CHAIN_EN; in k3_dma_fill_desc() 526 ds->desc_hw[num-1].lli = 0; /* end of link */ in k3_dma_prep_memcpy() 581 ds->desc_hw[num-1].lli = 0; /* end of link */ in k3_dma_prep_slave_sg() 647 ds->desc_hw[num - 1].lli |= ds->desc_hw_lli; in k3_dma_prep_dma_cyclic()
|
| A D | idma64.h | 107 struct idma64_lli *lli; member
|
| A D | ste_dma40_ll.h | 444 struct d40_phy_lli *lli,
|
| A D | ste_dma40.c | 854 struct d40_log_lli_bidir *lli = &desc->lli_log; in d40_log_lli_to_lcxa() local 900 &lli->dst[lli_current], in d40_log_lli_to_lcxa() 901 &lli->src[lli_current], in d40_log_lli_to_lcxa() 928 &lli->dst[lli_current], in d40_log_lli_to_lcxa() 929 &lli->src[lli_current], in d40_log_lli_to_lcxa() 938 &lli->dst[lli_current], in d40_log_lli_to_lcxa() 939 &lli->src[lli_current], in d40_log_lli_to_lcxa()
|
| /drivers/dma/dw-edma/ |
| A D | dw-hdma-v0-core.c | 161 struct dw_hdma_v0_lli *lli = chunk->ll_region.vaddr.mem + ofs; in dw_hdma_v0_write_ll_data() local 163 lli->control = control; in dw_hdma_v0_write_ll_data() 164 lli->transfer_size = size; in dw_hdma_v0_write_ll_data() 165 lli->sar.reg = sar; in dw_hdma_v0_write_ll_data() 166 lli->dar.reg = dar; in dw_hdma_v0_write_ll_data() 168 struct dw_hdma_v0_lli __iomem *lli = chunk->ll_region.vaddr.io + ofs; in dw_hdma_v0_write_ll_data() local 170 writel(control, &lli->control); in dw_hdma_v0_write_ll_data() 171 writel(size, &lli->transfer_size); in dw_hdma_v0_write_ll_data() 172 writeq(sar, &lli->sar.reg); in dw_hdma_v0_write_ll_data() 173 writeq(dar, &lli->dar.reg); in dw_hdma_v0_write_ll_data()
|
| A D | dw-edma-v0-core.c | 285 struct dw_edma_v0_lli *lli = chunk->ll_region.vaddr.mem + ofs; in dw_edma_v0_write_ll_data() local 287 lli->control = control; in dw_edma_v0_write_ll_data() 288 lli->transfer_size = size; in dw_edma_v0_write_ll_data() 289 lli->sar.reg = sar; in dw_edma_v0_write_ll_data() 290 lli->dar.reg = dar; in dw_edma_v0_write_ll_data() 292 struct dw_edma_v0_lli __iomem *lli = chunk->ll_region.vaddr.io + ofs; in dw_edma_v0_write_ll_data() local 294 writel(control, &lli->control); in dw_edma_v0_write_ll_data() 295 writel(size, &lli->transfer_size); in dw_edma_v0_write_ll_data() 296 writeq(sar, &lli->sar.reg); in dw_edma_v0_write_ll_data() 297 writeq(dar, &lli->dar.reg); in dw_edma_v0_write_ll_data()
|
| /drivers/dma/dw-axi-dmac/ |
| A D | dw-axi-dmac-platform.c | 313 struct axi_dma_lli *lli; in axi_desc_get() local 317 if (unlikely(!lli)) { in axi_desc_get() 326 return lli; in axi_desc_get() 391 desc->lli->llp = cpu_to_le64(adr); in write_desc_llp() 700 if (unlikely(!hw_desc->lli)) in dw_axi_dma_set_hw_desc() 968 if (unlikely(!hw_desc->lli)) in dma_chan_prep_dma_memcpy() 1036 if (!desc->lli) { in axi_chan_dump_lli() 1043 le64_to_cpu(desc->lli->sar), in axi_chan_dump_lli() 1044 le64_to_cpu(desc->lli->dar), in axi_chan_dump_lli() 1045 le64_to_cpu(desc->lli->llp), in axi_chan_dump_lli() [all …]
|
| A D | dw-axi-dmac.h | 94 struct axi_dma_lli *lli; member
|
| /drivers/dma/dw/ |
| A D | regs.h | 386 struct dw_lli lli; member 388 #define lli_set(d, reg, v) ((d)->lli.reg |= cpu_to_le32(v)) 389 #define lli_clear(d, reg, v) ((d)->lli.reg &= ~cpu_to_le32(v)) 390 #define lli_read(d, reg) le32_to_cpu((d)->lli.reg) 391 #define lli_write(d, reg, v) ((d)->lli.reg = cpu_to_le32(v))
|
| A D | core.c | 602 prev->lli.llp = 0; in dwc_prep_dma_memcpy() 750 prev->lli.llp = 0; in dwc_prep_slave_sg()
|
| /drivers/dma/stm32/ |
| A D | stm32-dma3.c | 274 struct stm32_dma3_lli lli[] __counted_by(lli_size); 376 hwdesc = swdesc->lli[i].hwdesc; in stm32_dma3_chan_dump_hwdesc() 419 &swdesc->lli[i].hwdesc_addr); in stm32_dma3_chan_desc_alloc() 420 if (!swdesc->lli[i].hwdesc) in stm32_dma3_chan_desc_alloc() 437 dma_pool_free(chan->lli_pool, swdesc->lli[i].hwdesc, swdesc->lli[i].hwdesc_addr); in stm32_dma3_chan_desc_alloc() 449 dma_pool_free(chan->lli_pool, swdesc->lli[i].hwdesc, swdesc->lli[i].hwdesc_addr); in stm32_dma3_chan_desc_free() 502 hwdesc = swdesc->lli[curr].hwdesc; in stm32_dma3_chan_prep_hwdesc() 511 next_lli = swdesc->lli[0].hwdesc_addr; in stm32_dma3_chan_prep_hwdesc() 515 next_lli = swdesc->lli[next].hwdesc_addr; in stm32_dma3_chan_prep_hwdesc() 780 hwdesc = chan->swdesc->lli[0].hwdesc; in stm32_dma3_chan_start() [all …]
|
| /drivers/net/ethernet/chelsio/cxgb4/ |
| A D | cxgb4_uld.c | 511 struct cxgb4_lld_info *lli) in uld_queue_init() argument 517 lli->rxq_ids = rxq_info->rspq_id; in uld_queue_init() 518 lli->nrxq = rxq_info->nrxq; in uld_queue_init() 519 lli->ciq_ids = rxq_info->rspq_id + rxq_info->nrxq; in uld_queue_init() 520 lli->nciq = rxq_info->nciq; in uld_queue_init() 521 lli->ntxq = txq_info->ntxq; in uld_queue_init() 646 struct cxgb4_lld_info lli; in uld_attach() local 649 uld_init(adap, &lli); in uld_attach() 650 uld_queue_init(adap, uld, &lli); in uld_attach() 652 handle = adap->uld[uld].add(&lli); in uld_attach()
|