| /drivers/dma/ |
| A D | of-dma.c | 37 if (ofdma->of_node == dma_spec->np) in of_dma_find_controller() 41 dma_spec->np); in of_dma_find_controller() 238 dma_spec)) in of_dma_match_channel() 254 struct of_phandle_args dma_spec; in of_dma_request_slave_channel() local 285 &dma_spec)) in of_dma_request_slave_channel() 289 ofdma = of_dma_find_controller(&dma_spec); in of_dma_request_slave_channel() 300 of_node_put(dma_spec.np); in of_dma_request_slave_channel() 324 int count = dma_spec->args_count; in of_dma_simple_xlate() 334 &dma_spec->args[0], dma_spec->np); in of_dma_simple_xlate() 357 if (!dev || dma_spec->args_count != 1) in of_dma_xlate_by_chan_id() [all …]
|
| A D | acpi-dma.c | 292 struct acpi_dma_spec *dma_spec) in acpi_dma_update_dma_spec() argument 295 dma_spec->dev = adma->dev; in acpi_dma_update_dma_spec() 303 dma_spec->slave_id > adma->end_request_line) in acpi_dma_update_dma_spec() 310 dma_spec->slave_id -= adma->base_request_line; in acpi_dma_update_dma_spec() 316 struct acpi_dma_spec dma_spec; member 334 pdata->dma_spec.chan_id = dma->channels; in acpi_dma_parse_fixed_dma() 355 struct acpi_dma_spec *dma_spec = &pdata.dma_spec; in acpi_dma_request_slave_chan_by_index() local 367 dma_spec->chan_id = -1; in acpi_dma_request_slave_chan_by_index() 368 dma_spec->slave_id = -1; in acpi_dma_request_slave_chan_by_index() 377 if (dma_spec->slave_id < 0 || dma_spec->chan_id < 0) in acpi_dma_request_slave_chan_by_index() [all …]
|
| A D | lpc18xx-dmamux.c | 61 if (dma_spec->args_count != 3) { in lpc18xx_dmamux_reserve() 66 mux = dma_spec->args[0]; in lpc18xx_dmamux_reserve() 69 dma_spec->args[0]); in lpc18xx_dmamux_reserve() 73 if (dma_spec->args[1] > LPC18XX_DMAMUX_MAX_VAL) { in lpc18xx_dmamux_reserve() 75 dma_spec->args[1]); in lpc18xx_dmamux_reserve() 80 dma_spec->np = of_parse_phandle(ofdma->of_node, "dma-masters", 0); in lpc18xx_dmamux_reserve() 81 if (!dma_spec->np) { in lpc18xx_dmamux_reserve() 91 of_node_put(dma_spec->np); in lpc18xx_dmamux_reserve() 96 dmamux->muxes[mux].value = dma_spec->args[1]; in lpc18xx_dmamux_reserve() 103 dma_spec->args[1] = dma_spec->args[2]; in lpc18xx_dmamux_reserve() [all …]
|
| A D | lpc32xx-dmamux.c | 100 if (dma_spec->args_count != 3) { in lpc32xx_dmamux_reserve() 106 if (lpc32xx_muxes[i].signal == dma_spec->args[0]) { in lpc32xx_dmamux_reserve() 113 dma_spec->args[0]); in lpc32xx_dmamux_reserve() 117 if (dma_spec->args[2] > 1) { in lpc32xx_dmamux_reserve() 119 dma_spec->args[1]); in lpc32xx_dmamux_reserve() 124 dma_spec->np = of_parse_phandle(ofdma->of_node, "dma-masters", 0); in lpc32xx_dmamux_reserve() 125 if (!dma_spec->np) { in lpc32xx_dmamux_reserve() 135 of_node_put(dma_spec->np); in lpc32xx_dmamux_reserve() 140 mux->muxval = dma_spec->args[2] ? BIT(mux->bit) : 0; in lpc32xx_dmamux_reserve() 145 dma_spec->args[2] = 0; in lpc32xx_dmamux_reserve() [all …]
|
| A D | cv1800b-dmamux.c | 96 static void *cv1800_dmamux_route_allocate(struct of_phandle_args *dma_spec, in cv1800_dmamux_route_allocate() argument 107 if (dma_spec->args_count != DMAMUX_NCELLS) { in cv1800_dmamux_route_allocate() 112 devid = dma_spec->args[0]; in cv1800_dmamux_route_allocate() 113 cpuid = dma_spec->args[1]; in cv1800_dmamux_route_allocate() 114 dma_spec->args_count = 1; in cv1800_dmamux_route_allocate() 126 dma_spec->np = of_parse_phandle(ofdma->of_node, "dma-masters", 0); in cv1800_dmamux_route_allocate() 127 if (!dma_spec->np) { in cv1800_dmamux_route_allocate() 169 dma_spec->args[0] = chid; in cv1800_dmamux_route_allocate() 178 of_node_put(dma_spec->np); in cv1800_dmamux_route_allocate()
|
| A D | fsl-edma-main.c | 260 if (dma_spec->args_count != 2) in fsl_edma_xlate() 269 if (fsl_edma_srcid_in_use(fsl_edma, dma_spec->args[1])) in fsl_edma_xlate() 277 fsl_chan->srcid = dma_spec->args[1]; in fsl_edma_xlate() 303 if (dma_spec->args_count != 3) in fsl_edma3_xlate() 316 if (fsl_edma_srcid_in_use(fsl_edma, dma_spec->args[0])) in fsl_edma3_xlate() 320 fsl_chan->priority = dma_spec->args[1]; in fsl_edma3_xlate() 321 fsl_chan->is_rxchan = dma_spec->args[2] & FSL_EDMA_RX; in fsl_edma3_xlate() 325 if ((dma_spec->args[2] & FSL_EDMA_EVEN_CH) && (i & 0x1)) in fsl_edma3_xlate() 328 if ((dma_spec->args[2] & FSL_EDMA_ODD_CH) && !(i & 0x1)) in fsl_edma3_xlate() 331 if (!b_chmux && i == dma_spec->args[0]) { in fsl_edma3_xlate() [all …]
|
| A D | st_fdma.c | 167 static struct dma_chan *st_fdma_of_xlate(struct of_phandle_args *dma_spec, in st_fdma_of_xlate() argument 175 if (dma_spec->args_count < 1) in st_fdma_of_xlate() 178 if (fdev->dma_device.dev->of_node != dma_spec->np) in st_fdma_of_xlate() 193 fchan->cfg.of_node = dma_spec->np; in st_fdma_of_xlate() 194 fchan->cfg.req_line = dma_spec->args[0]; in st_fdma_of_xlate() 198 if (dma_spec->args_count > 1) in st_fdma_of_xlate() 199 fchan->cfg.req_ctrl = dma_spec->args[1] in st_fdma_of_xlate() 202 if (dma_spec->args_count > 2) in st_fdma_of_xlate() 203 fchan->cfg.type = dma_spec->args[2]; in st_fdma_of_xlate()
|
| A D | dma-jz4780.c | 805 static struct dma_chan *jz4780_of_dma_xlate(struct of_phandle_args *dma_spec, in jz4780_of_dma_xlate() argument 812 if (dma_spec->args_count == 2) { in jz4780_of_dma_xlate() 813 data.transfer_type_tx = dma_spec->args[0]; in jz4780_of_dma_xlate() 814 data.transfer_type_rx = dma_spec->args[0]; in jz4780_of_dma_xlate() 815 data.channel = dma_spec->args[1]; in jz4780_of_dma_xlate() 816 } else if (dma_spec->args_count == 3) { in jz4780_of_dma_xlate() 817 data.transfer_type_tx = dma_spec->args[0]; in jz4780_of_dma_xlate() 818 data.transfer_type_rx = dma_spec->args[1]; in jz4780_of_dma_xlate() 819 data.channel = dma_spec->args[2]; in jz4780_of_dma_xlate()
|
| A D | ep93xx_dma.c | 1445 static struct dma_chan *ep93xx_m2p_dma_of_xlate(struct of_phandle_args *dma_spec, in ep93xx_m2p_dma_of_xlate() argument 1451 u8 port = dma_spec->args[0]; in ep93xx_m2p_dma_of_xlate() 1452 u8 direction = dma_spec->args[1]; in ep93xx_m2p_dma_of_xlate() 1476 static struct dma_chan *ep93xx_m2m_dma_of_xlate(struct of_phandle_args *dma_spec, in ep93xx_m2m_dma_of_xlate() argument 1482 u8 port = dma_spec->args[0]; in ep93xx_m2m_dma_of_xlate() 1483 u8 direction = dma_spec->args[1]; in ep93xx_m2m_dma_of_xlate()
|
| A D | img-mdc-dma.c | 807 static struct dma_chan *mdc_of_xlate(struct of_phandle_args *dma_spec, in mdc_of_xlate() argument 813 if (dma_spec->args_count != 3) in mdc_of_xlate() 819 if (!(dma_spec->args[1] & BIT(mchan->chan_nr))) in mdc_of_xlate() 822 mchan->periph = dma_spec->args[0]; in mdc_of_xlate() 823 mchan->thread = dma_spec->args[2]; in mdc_of_xlate()
|
| A D | milbeaut-hdmac.c | 403 milbeaut_hdmac_xlate(struct of_phandle_args *dma_spec, struct of_dma *of_dma) in milbeaut_hdmac_xlate() argument 410 if (dma_spec->args_count != 1) in milbeaut_hdmac_xlate() 419 mc->slave_id = dma_spec->args[0]; in milbeaut_hdmac_xlate()
|
| A D | mmp_tdma.c | 609 static struct dma_chan *mmp_tdma_xlate(struct of_phandle_args *dma_spec, in mmp_tdma_xlate() argument 616 if (dma_spec->args_count != 1) in mmp_tdma_xlate() 619 param.chan_id = dma_spec->args[0]; in mmp_tdma_xlate()
|
| A D | uniphier-xdmac.c | 466 static struct dma_chan *of_dma_uniphier_xlate(struct of_phandle_args *dma_spec, in of_dma_uniphier_xlate() argument 470 int chan_id = dma_spec->args[0]; in of_dma_uniphier_xlate() 476 xdev->channels[chan_id].req_factor = dma_spec->args[1]; in of_dma_uniphier_xlate()
|
| A D | at_hdmac.c | 1796 static struct dma_chan *at_dma_xlate(struct of_phandle_args *dma_spec, in at_dma_xlate() argument 1806 if (dma_spec->args_count != 2) in at_dma_xlate() 1809 dmac_pdev = of_find_device_by_node(dma_spec->np); in at_dma_xlate() 1827 per_id = dma_spec->args[1] & AT91_DMA_CFG_PER_ID_MASK; in at_dma_xlate() 1834 switch (dma_spec->args[1] & AT91_DMA_CFG_FIFOCFG_MASK) { in at_dma_xlate() 1857 atchan->per_if = dma_spec->args[0] & 0xff; in at_dma_xlate() 1858 atchan->mem_if = (dma_spec->args[0] >> 16) & 0xff; in at_dma_xlate() 1863 static struct dma_chan *at_dma_xlate(struct of_phandle_args *dma_spec, in at_dma_xlate() argument
|
| /drivers/dma/ti/ |
| A D | dma-crossbar.c | 84 if (dma_spec->args_count != 3) in ti_am335x_xbar_route_allocate() 89 dma_spec->args[2]); in ti_am335x_xbar_route_allocate() 95 dma_spec->args[0]); in ti_am335x_xbar_route_allocate() 101 if (!dma_spec->np) { in ti_am335x_xbar_route_allocate() 108 of_node_put(dma_spec->np); in ti_am335x_xbar_route_allocate() 115 dma_spec->args[2] = 0; in ti_am335x_xbar_route_allocate() 116 dma_spec->args_count = 2; in ti_am335x_xbar_route_allocate() 248 dma_spec->args[0]); in ti_dra7_xbar_route_allocate() 255 if (!dma_spec->np) { in ti_dra7_xbar_route_allocate() 263 of_node_put(dma_spec->np); in ti_dra7_xbar_route_allocate() [all …]
|
| A D | k3-psil.c | 78 struct of_phandle_args dma_spec; in psil_set_new_ep_config() local 90 index, &dma_spec)) in psil_set_new_ep_config() 93 thread_id = dma_spec.args[0]; in psil_set_new_ep_config() 99 of_node_put(dma_spec.np); in psil_set_new_ep_config() 105 of_node_put(dma_spec.np); in psil_set_new_ep_config()
|
| A D | k3-udma-glue.c | 148 struct of_phandle_args dma_spec; in of_k3_udma_glue_parse_chn() local 161 &dma_spec)) in of_k3_udma_glue_parse_chn() 164 ret = of_k3_udma_glue_parse(dma_spec.np, common); in of_k3_udma_glue_parse_chn() 168 thread_id = dma_spec.args[0]; in of_k3_udma_glue_parse_chn() 169 if (dma_spec.args_count == 2) { in of_k3_udma_glue_parse_chn() 170 if (dma_spec.args[1] > 2 && !xudma_is_pktdma(common->udmax)) { in of_k3_udma_glue_parse_chn() 172 dma_spec.args[1]); in of_k3_udma_glue_parse_chn() 176 if (dma_spec.args[1] > 15 && xudma_is_pktdma(common->udmax)) { in of_k3_udma_glue_parse_chn() 178 dma_spec.args[1]); in of_k3_udma_glue_parse_chn() 183 common->atype_asel = dma_spec.args[1]; in of_k3_udma_glue_parse_chn() [all …]
|
| /drivers/dma/dw/ |
| A D | of.c | 16 static struct dma_chan *dw_dma_of_xlate(struct of_phandle_args *dma_spec, in dw_dma_of_xlate() argument 25 if (dma_spec->args_count < 3 || dma_spec->args_count > 4) in dw_dma_of_xlate() 28 slave.src_id = dma_spec->args[0]; in dw_dma_of_xlate() 29 slave.dst_id = dma_spec->args[0]; in dw_dma_of_xlate() 30 slave.m_master = dma_spec->args[1]; in dw_dma_of_xlate() 31 slave.p_master = dma_spec->args[2]; in dw_dma_of_xlate() 32 if (dma_spec->args_count >= 4) in dw_dma_of_xlate() 33 slave.channels = dma_spec->args[3]; in dw_dma_of_xlate()
|
| A D | rzn1-dmamux.c | 41 static void *rzn1_dmamux_route_allocate(struct of_phandle_args *dma_spec, in rzn1_dmamux_route_allocate() argument 51 if (dma_spec->args_count != RNZ1_DMAMUX_NCELLS) in rzn1_dmamux_route_allocate() 58 chan = dma_spec->args[0]; in rzn1_dmamux_route_allocate() 59 map->req_idx = dma_spec->args[4]; in rzn1_dmamux_route_allocate() 60 val = dma_spec->args[5]; in rzn1_dmamux_route_allocate() 61 dma_spec->args_count -= 2; in rzn1_dmamux_route_allocate() 77 dma_spec->np = of_parse_phandle(ofdma->of_node, "dma-masters", dmac_idx); in rzn1_dmamux_route_allocate() 78 if (!dma_spec->np) { in rzn1_dmamux_route_allocate()
|
| A D | acpi.c | 13 struct acpi_dma_spec *dma_spec = param; in dw_dma_acpi_filter() local 15 .dma_dev = dma_spec->dev, in dw_dma_acpi_filter() 16 .src_id = dma_spec->slave_id, in dw_dma_acpi_filter() 17 .dst_id = dma_spec->slave_id, in dw_dma_acpi_filter()
|
| /drivers/dma/stm32/ |
| A D | stm32-dmamux.c | 96 if (dma_spec->args_count != 3) { in stm32_dmamux_route_allocate() 101 if (dma_spec->args[0] > dmamux->dmamux_requests) { in stm32_dmamux_route_allocate() 103 dma_spec->args[0]); in stm32_dmamux_route_allocate() 133 dma_spec->np = of_parse_phandle(ofdma->of_node, "dma-masters", i - 1); in stm32_dmamux_route_allocate() 134 if (!dma_spec->np) { in stm32_dmamux_route_allocate() 149 mux->request = dma_spec->args[0]; in stm32_dmamux_route_allocate() 152 dma_spec->args[3] = dma_spec->args[2] | mux->chan_id << 16; in stm32_dmamux_route_allocate() 153 dma_spec->args[2] = dma_spec->args[1]; in stm32_dmamux_route_allocate() 154 dma_spec->args[1] = 0; in stm32_dmamux_route_allocate() 155 dma_spec->args[0] = mux->chan_id - min; in stm32_dmamux_route_allocate() [all …]
|
| /drivers/mmc/host/ |
| A D | mmci_qcom_dml.c | 101 struct of_phandle_args dma_spec; in of_get_dml_pipe_index() local 109 &dma_spec)) in of_get_dml_pipe_index() 112 if (dma_spec.args_count) in of_get_dml_pipe_index() 113 return dma_spec.args[0]; in of_get_dml_pipe_index()
|
| /drivers/dma/sh/ |
| A D | usb-dmac.c | 639 struct of_phandle_args *dma_spec = arg; in usb_dmac_chan_filter() local 642 if (uchan->index != dma_spec->args[0]) in usb_dmac_chan_filter() 648 static struct dma_chan *usb_dmac_of_xlate(struct of_phandle_args *dma_spec, in usb_dmac_of_xlate() argument 654 if (dma_spec->args_count != 1) in usb_dmac_of_xlate() 661 chan = __dma_request_channel(&mask, usb_dmac_chan_filter, dma_spec, in usb_dmac_of_xlate()
|
| A D | rz-dmac.c | 758 struct of_phandle_args *dma_spec = arg; in rz_dmac_chan_filter() local 761 channel->mid_rid = dma_spec->args[0] & MID_RID_MASK; in rz_dmac_chan_filter() 762 ch_cfg = (dma_spec->args[0] & CHCFG_MASK) >> 10; in rz_dmac_chan_filter() 769 static struct dma_chan *rz_dmac_of_xlate(struct of_phandle_args *dma_spec, in rz_dmac_of_xlate() argument 774 if (dma_spec->args_count != 1) in rz_dmac_of_xlate() 781 return __dma_request_channel(&mask, rz_dmac_chan_filter, dma_spec, in rz_dmac_of_xlate()
|
| /drivers/dma/qcom/ |
| A D | qcom_adm.c | 718 static struct dma_chan *adm_dma_xlate(struct of_phandle_args *dma_spec, in adm_dma_xlate() argument 725 if (!dev || dma_spec->args_count > 2) in adm_dma_xlate() 729 if (chan->chan_id == dma_spec->args[0]) { in adm_dma_xlate() 738 if (dma_spec->args_count == 2) in adm_dma_xlate() 739 achan->crci = dma_spec->args[1]; in adm_dma_xlate()
|