Home
last modified time | relevance | path

Searched refs:dma_ctx (Results 1 – 16 of 16) sorted by relevance

/drivers/dma/
A Ddma_silabs_ldma.c42 struct dma_context dma_ctx; member
303 for (chnum = 0; chnum < data->dma_ctx.dma_channels; chnum++) { in dma_silabs_irq_handler()
348 if (channel >= data->dma_ctx.dma_channels) { in dma_silabs_configure()
441 atomic_set_bit(data->dma_ctx.atomic, channel); in dma_silabs_configure()
451 if (channel > data->dma_ctx.dma_channels) { in dma_silabs_start()
467 if (channel > data->dma_ctx.dma_channels) { in dma_silabs_stop()
485 if (channel > data->dma_ctx.dma_channels) { in dma_silabs_get_status()
489 if (!atomic_test_bit(data->dma_ctx.atomic, channel)) { in dma_silabs_get_status()
551 if (channel > data->dma_ctx.dma_channels) { in silabs_ldma_append_block()
555 if (!atomic_test_bit(data->dma_ctx.atomic, channel)) { in silabs_ldma_append_block()
[all …]
A Ddma_silabs_siwx91x.c54 struct dma_context dma_ctx; member
376 if (channel >= data->dma_ctx.dma_channels) { in siwx91x_dma_configure()
411 atomic_set_bit(data->dma_ctx.atomic, channel); in siwx91x_dma_configure()
430 if (channel >= data->dma_ctx.dma_channels) { in siwx91x_dma_reload()
481 if (channel >= data->dma_ctx.dma_channels) { in siwx91x_dma_start()
505 if (channel >= data->dma_ctx.dma_channels) { in siwx91x_dma_stop()
525 if (channel >= data->dma_ctx.dma_channels) { in siwx91x_dma_get_status()
529 if (!atomic_test_bit(data->dma_ctx.atomic, channel)) { in siwx91x_dma_get_status()
615 if (!channel || channel > data->dma_ctx.dma_channels) { in siwx91x_dma_isr()
681 .dma_ctx.magic = DMA_MAGIC, \
[all …]
A Ddma_dw_axi.c231 struct dma_context dma_ctx; member
303 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_isr()
457 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_config()
517 if (cfg->channel_priority < dw_dev_data->dma_ctx.dma_channels) { in dma_dw_axi_config()
646 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_start()
708 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_stop()
759 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_resume()
790 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_suspend()
842 for (i = 0; i < dw_dev_data->dma_ctx.dma_channels; i++) { in dma_dw_axi_init()
883 .dma_ctx = { \
A Ddma_smartbond.c156 struct dma_context dma_ctx; member
961 for (int i = 0; i < data->dma_ctx.dma_channels; i++) { in dma_smartbond_is_sleep_allowed()
962 if (atomic_test_bit(data->dma_ctx.atomic, i)) { in dma_smartbond_is_sleep_allowed()
1018 data->dma_ctx.magic = DMA_MAGIC; in dma_smartbond_init()
1019 data->dma_ctx.dma_channels = DMA_CHANNELS_COUNT; in dma_smartbond_init()
1020 data->dma_ctx.atomic = data->channels_atomic; in dma_smartbond_init()
A Ddma_intel_adsp_hda.c450 struct dma_context *dma_ctx; in intel_adsp_hda_dma_isr() local
474 dma_ctx = (struct dma_context *)host_dev[i]->data; in intel_adsp_hda_dma_isr()
476 enabled_chs = atomic_get(dma_ctx->atomic); in intel_adsp_hda_dma_isr()
477 for (j = 0; enabled_chs && j < dma_ctx->dma_channels; j++) { in intel_adsp_hda_dma_isr()
A Ddma_nxp_sdma.c47 struct dma_context dma_ctx; member
472 data->dma_ctx.magic = DMA_MAGIC; in dma_nxp_sdma_init()
473 data->dma_ctx.dma_channels = FSL_FEATURE_SDMA_MODULE_CHANNEL; in dma_nxp_sdma_init()
474 data->dma_ctx.atomic = data->channels_atomic; in dma_nxp_sdma_init()
A Ddma_andes_atcdmacx00.c154 struct dma_context dma_ctx; member
573 data->dma_ctx.atomic = &data->channel_flags; in dma_atcdmacx00_init()
614 .dma_ctx.magic = DMA_MAGIC, \
615 .dma_ctx.dma_channels = DT_INST_PROP(n, dma_channels), \
616 .dma_ctx.atomic = ATOMIC_INIT(0), \
A Ddma_emul.c62 struct dma_context dma_ctx; member
539 data->dma_ctx.magic = DMA_MAGIC; in dma_emul_init()
540 data->dma_ctx.dma_channels = config->num_channels; in dma_emul_init()
541 data->dma_ctx.atomic = data->channels_atomic; in dma_emul_init()
A Ddma_stm32_bdma.h35 struct dma_context dma_ctx; member
A Ddma_intel_adsp_gpdma.c471 dev_data->dma_ctx.magic = DMA_MAGIC; in intel_adsp_gpdma_init()
472 dev_data->dma_ctx.dma_channels = DW_MAX_CHAN; in intel_adsp_gpdma_init()
473 dev_data->dma_ctx.atomic = dev_data->channels_atomic; in intel_adsp_gpdma_init()
A Ddma_stm32.h34 struct dma_context dma_ctx; member
A Ddma_stm32.c661 ((struct dma_stm32_data *)dev->data)->dma_ctx.magic = 0; in dma_stm32_init()
662 ((struct dma_stm32_data *)dev->data)->dma_ctx.dma_channels = 0; in dma_stm32_init()
663 ((struct dma_stm32_data *)dev->data)->dma_ctx.atomic = 0; in dma_stm32_init()
A Ddma_stm32_bdma.c810 ((struct bdma_stm32_data *)dev->data)->dma_ctx.magic = 0; in bdma_stm32_init()
811 ((struct bdma_stm32_data *)dev->data)->dma_ctx.dma_channels = 0; in bdma_stm32_init()
812 ((struct bdma_stm32_data *)dev->data)->dma_ctx.atomic = 0; in bdma_stm32_init()
A Ddma_stm32u5.c712 ((struct dma_stm32_data *)dev->data)->dma_ctx.magic = 0; in dma_stm32_init()
713 ((struct dma_stm32_data *)dev->data)->dma_ctx.dma_channels = 0; in dma_stm32_init()
714 ((struct dma_stm32_data *)dev->data)->dma_ctx.atomic = 0; in dma_stm32_init()
A Ddma_mcux_edma.c95 struct dma_context dma_ctx; member
934 data->dma_ctx.magic = DMA_MAGIC; in dma_mcux_edma_init()
935 data->dma_ctx.dma_channels = config->dma_channels; in dma_mcux_edma_init()
936 data->dma_ctx.atomic = data->channels_atomic; in dma_mcux_edma_init()
A Ddma_dw_common.h247 struct dma_context dma_ctx; member

Completed in 49 milliseconds