Home
last modified time | relevance | path

Searched refs:sdma (Results 1 – 25 of 51) sorted by relevance

123

/drivers/dma/
A Dimx-sdma.c702 struct sdma_engine *sdma = sdmac->sdma; in sdma_config_ownership() local
804 struct sdma_engine *sdma = sdmac->sdma; in sdma_event_enable() local
824 struct sdma_engine *sdma = sdmac->sdma; in sdma_event_disable() local
843 struct sdma_engine *sdma = sdmac->sdma; in sdma_start_desc() local
988 struct sdma_engine *sdma = sdmac->sdma; in sdma_get_pc() local
1112 struct sdma_engine *sdma = sdmac->sdma; in sdma_load_context() local
1178 struct sdma_engine *sdma = sdmac->sdma; in sdma_disable_channel() local
1239 struct sdma_engine *sdma = sdmac->sdma; in sdma_set_watermarklevel_for_p2p() local
1371 struct sdma_engine *sdma = sdmac->sdma; in sdma_set_channel_priority() local
1518 struct sdma_engine *sdma = sdmac->sdma; in sdma_free_chan_resources() local
[all …]
A DTODO8 - imx-sdma
/drivers/net/ethernet/marvell/prestera/
A Dprestera_rxtx.c107 struct prestera_sdma sdma; member
130 return sdma->map_addr + pa; in prestera_sdma_map()
500 tx_ring = &sdma->tx_ring; in prestera_sdma_tx_recycle_work_fn()
616 struct prestera_sdma *sdma = &sw->rxtx->sdma; in prestera_sdma_switch_init() local
631 sdma->sw = sw; in prestera_sdma_switch_init()
636 if (!sdma->desc_pool) in prestera_sdma_switch_init()
653 sdma); in prestera_sdma_switch_init()
658 if (!sdma->napi_dev) { in prestera_sdma_switch_init()
664 netif_napi_add(sdma->napi_dev, &sdma->rx_napi, prestera_sdma_rx_poll); in prestera_sdma_switch_init()
684 struct prestera_sdma *sdma = &sw->rxtx->sdma; in prestera_sdma_switch_fini() local
[all …]
/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_sdma.c48 return &adev->sdma.instance[i]; in amdgpu_sdma_get_instance_from_ring()
319 if (!adev->sdma.ras) in amdgpu_sdma_ras_sw_init()
322 ras = adev->sdma.ras; in amdgpu_sdma_ras_sw_init()
363 if (adev->sdma.has_page_queue) in amdgpu_debugfs_sdma_sched_mask_set()
378 if (adev->sdma.has_page_queue) in amdgpu_debugfs_sdma_sched_mask_set()
410 if (adev->sdma.has_page_queue) in amdgpu_debugfs_sdma_sched_mask_get()
417 if (adev->sdma.has_page_queue) in amdgpu_debugfs_sdma_sched_mask_get()
481 if (adev->sdma.num_instances) { in amdgpu_sdma_sysfs_reset_mask_init()
496 if (adev->sdma.num_instances) in amdgpu_sdma_sysfs_reset_mask_fini()
523 if (!adev->sdma.has_page_queue || i >= adev->sdma.num_instances) in amdgpu_sdma_is_shared_inv_eng()
[all …]
A Dsdma_v4_4_2.c357 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v4_4_2_ring_insert_nop()
500 sdma[i] = &adev->sdma.instance[i].ring; in sdma_v4_4_2_inst_gfx_stop()
1272 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v4_4_2_ring_pad_ib()
2057 if (!adev->sdma.ip_dump) in sdma_v4_4_2_print_ip_state()
2230 adev->sdma.trap_irq.num_types = adev->sdma.num_instances; in sdma_v4_4_2_set_irq_funcs()
2231 adev->sdma.ecc_irq.num_types = adev->sdma.num_instances; in sdma_v4_4_2_set_irq_funcs()
2232 adev->sdma.vm_hole_irq.num_types = adev->sdma.num_instances; in sdma_v4_4_2_set_irq_funcs()
2233 adev->sdma.doorbell_invalid_irq.num_types = adev->sdma.num_instances; in sdma_v4_4_2_set_irq_funcs()
2234 adev->sdma.pool_timeout_irq.num_types = adev->sdma.num_instances; in sdma_v4_4_2_set_irq_funcs()
2235 adev->sdma.srbm_write_irq.num_types = adev->sdma.num_instances; in sdma_v4_4_2_set_irq_funcs()
[all …]
A Dsdma_v6_0.c247 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v6_0_ring_insert_nop()
1150 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v6_0_ring_pad_ib()
1380 if ((adev->sdma.instance[0].fw_version >= 24) && !adev->sdma.disable_uq) in sdma_v6_0_sw_init()
1384 if ((adev->sdma.instance[0].fw_version >= 18) && !adev->sdma.disable_uq) in sdma_v6_0_sw_init()
1388 if ((adev->sdma.instance[0].fw_version >= 21) && !adev->sdma.disable_uq) in sdma_v6_0_sw_init()
1392 if ((adev->sdma.instance[0].fw_version >= 25) && !adev->sdma.disable_uq) in sdma_v6_0_sw_init()
1396 if ((adev->sdma.instance[0].fw_version >= 14) && !adev->sdma.disable_uq) in sdma_v6_0_sw_init()
1400 if ((adev->sdma.instance[0].fw_version >= 17) && !adev->sdma.disable_uq) in sdma_v6_0_sw_init()
1404 if ((adev->sdma.instance[0].fw_version >= 15) && !adev->sdma.disable_uq) in sdma_v6_0_sw_init()
1408 if ((adev->sdma.instance[0].fw_version >= 10) && !adev->sdma.disable_uq) in sdma_v6_0_sw_init()
[all …]
A Dsdma_v4_0.c790 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v4_0_ring_insert_nop()
1676 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v4_0_ring_pad_ib()
1925 adev->sdma.ip_dump = ptr; in sdma_v4_0_sw_init()
2360 if (!adev->sdma.ip_dump) in sdma_v4_0_print_ip_state()
2381 if (!adev->sdma.ip_dump) in sdma_v4_0_dump_ip_state()
2524 adev->sdma.trap_irq.num_types = adev->sdma.num_instances; in sdma_v4_0_set_irq_funcs()
2525 adev->sdma.ecc_irq.num_types = adev->sdma.num_instances; in sdma_v4_0_set_irq_funcs()
2530 adev->sdma.vm_hole_irq.num_types = adev->sdma.num_instances; in sdma_v4_0_set_irq_funcs()
2531 adev->sdma.doorbell_invalid_irq.num_types = adev->sdma.num_instances; in sdma_v4_0_set_irq_funcs()
2532 adev->sdma.pool_timeout_irq.num_types = adev->sdma.num_instances; in sdma_v4_0_set_irq_funcs()
[all …]
A Dsdma_v3_0.c324 info->fw = adev->sdma.instance[i].fw; in sdma_v3_0_init_microcode()
406 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v3_0_ring_insert_nop()
648 ring = &adev->sdma.instance[i].ring; in sdma_v3_0_gfx_resume()
1015 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v3_0_ring_pad_ib()
1092 adev->sdma.num_instances = 1; in sdma_v3_0_early_init()
1119 &adev->sdma.trap_irq); in sdma_v3_0_sw_init()
1248 adev->sdma.srbm_soft_reset = 0; in sdma_v3_0_check_soft_reset()
1258 if (!adev->sdma.srbm_soft_reset) in sdma_v3_0_pre_soft_reset()
1277 if (!adev->sdma.srbm_soft_reset) in sdma_v3_0_post_soft_reset()
1297 if (!adev->sdma.srbm_soft_reset) in sdma_v3_0_soft_reset()
[all …]
A Dcik_sdma.c205 if (sdma && sdma->burst_nop && (i == 0)) in cik_sdma_ring_insert_nop()
434 ring = &adev->sdma.instance[i].ring; in cik_sdma_gfx_resume()
497 ring = &adev->sdma.instance[i].ring; in cik_sdma_gfx_resume()
539 if (!adev->sdma.instance[i].fw) in cik_sdma_load_microcode()
806 if (sdma && sdma->burst_nop && (i == 0)) in cik_sdma_ring_pad_ib()
949 &adev->sdma.trap_irq); in cik_sdma_sw_init()
955 &adev->sdma.illegal_inst_irq); in cik_sdma_sw_init()
966 ring = &adev->sdma.instance[i].ring; in cik_sdma_sw_init()
970 &adev->sdma.trap_irq, in cik_sdma_sw_init()
1254 adev->sdma.instance[i].ring.me = i; in cik_sdma_set_ring_funcs()
[all …]
A Dsdma_v7_0.c251 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v7_0_ring_insert_nop()
1171 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v7_0_ring_pad_ib()
1266 adev->sdma.disable_uq = true; in sdma_v7_0_early_init()
1331 &adev->sdma.trap_irq, in sdma_v7_0_sw_init()
1338 adev->sdma.supported_reset = in sdma_v7_0_sw_init()
1349 adev->sdma.ip_dump = ptr; in sdma_v7_0_sw_init()
1356 if ((adev->sdma.instance[0].fw_version >= 7966358) && !adev->sdma.disable_uq) in sdma_v7_0_sw_init()
1380 kfree(adev->sdma.ip_dump); in sdma_v7_0_sw_fini()
1624 if (!adev->sdma.ip_dump) in sdma_v7_0_print_ip_state()
1645 if (!adev->sdma.ip_dump) in sdma_v7_0_dump_ip_state()
[all …]
A Dsdma_v2_4.c165 info->fw = adev->sdma.instance[i].fw; in sdma_v2_4_init_microcode()
230 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v2_4_ring_insert_nop()
409 ring = &adev->sdma.instance[i].ring; in sdma_v2_4_gfx_resume()
472 ring = &adev->sdma.instance[i].ring; in sdma_v2_4_gfx_resume()
742 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v2_4_ring_pad_ib()
839 &adev->sdma.trap_irq); in sdma_v2_4_sw_init()
845 &adev->sdma.illegal_inst_irq); in sdma_v2_4_sw_init()
851 &adev->sdma.illegal_inst_irq); in sdma_v2_4_sw_init()
856 ring = &adev->sdma.instance[i].ring; in sdma_v2_4_sw_init()
1148 adev->sdma.instance[i].ring.me = i; in sdma_v2_4_set_ring_funcs()
[all …]
A Dsdma_v5_2.c261 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v5_2_ring_insert_nop()
548 ring = &adev->sdma.instance[i].ring; in sdma_v5_2_gfx_resume_instance()
741 if (!adev->sdma.instance[i].fw) in sdma_v5_2_load_microcode()
1145 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v5_2_ring_pad_ib()
1315 &adev->sdma.trap_irq); in sdma_v5_2_sw_init()
1343 adev->sdma.supported_reset = in sdma_v5_2_sw_init()
1366 adev->sdma.ip_dump = ptr; in sdma_v5_2_sw_init()
1388 kfree(adev->sdma.ip_dump); in sdma_v5_2_sw_fini()
1895 if (!adev->sdma.ip_dump) in sdma_v5_2_print_ip_state()
1916 if (!adev->sdma.ip_dump) in sdma_v5_2_dump_ip_state()
[all …]
A Dsdma_v5_0.c413 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v5_0_ring_insert_nop()
894 if (!adev->sdma.instance[i].fw) in sdma_v5_0_load_microcode()
1245 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v5_0_ring_pad_ib()
1390 &adev->sdma.trap_irq); in sdma_v5_0_sw_init()
1397 &adev->sdma.trap_irq); in sdma_v5_0_sw_init()
1425 adev->sdma.supported_reset = in sdma_v5_0_sw_init()
1442 adev->sdma.ip_dump = ptr; in sdma_v5_0_sw_init()
1464 kfree(adev->sdma.ip_dump); in sdma_v5_0_sw_fini()
1887 if (!adev->sdma.ip_dump) in sdma_v5_0_print_ip_state()
1908 if (!adev->sdma.ip_dump) in sdma_v5_0_dump_ip_state()
[all …]
A Dsi_dma.c134 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_stop()
149 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_start()
150 ring = &adev->sdma.instance[i].ring; in si_dma_start()
480 adev->sdma.num_instances = SDMA_MAX_INSTANCE; in si_dma_early_init()
498 &adev->sdma.trap_irq); in si_dma_sw_init()
504 &adev->sdma.trap_irq); in si_dma_sw_init()
509 ring = &adev->sdma.instance[i].ring; in si_dma_sw_init()
514 &adev->sdma.trap_irq, in si_dma_sw_init()
530 for (i = 0; i < adev->sdma.num_instances; i++) in si_dma_sw_fini()
752 for (i = 0; i < adev->sdma.num_instances; i++) in si_dma_set_ring_funcs()
[all …]
A Daqua_vanjaram.c50 for (i = 0; i < adev->sdma.num_instances; i++) in aqua_vanjaram_doorbell_index_init()
175 num_sdma = adev->sdma.num_instances; in __aqua_vanjaram_get_xcp_ip_info()
285 max_res[AMDGPU_XCP_RES_DMA] = adev->sdma.num_instances; in aqua_vanjaram_get_xcp_res_info()
537 u32 mask, avail_inst, inst_mask = adev->sdma.sdma_mask; in aqua_vanjaram_init_soc_config()
541 adev->sdma.num_inst_per_aid = 4; in aqua_vanjaram_init_soc_config()
542 adev->sdma.num_instances = NUM_SDMA(adev->sdma.sdma_mask); in aqua_vanjaram_init_soc_config()
545 inst_mask >>= adev->sdma.num_inst_per_aid; in aqua_vanjaram_init_soc_config()
547 for (mask = (1 << adev->sdma.num_inst_per_aid) - 1; inst_mask; in aqua_vanjaram_init_soc_config()
548 inst_mask >>= adev->sdma.num_inst_per_aid, ++i) { in aqua_vanjaram_init_soc_config()
A Daldebaran.c377 if (tmp_adev->sdma.ras && in aldebaran_mode2_restore_hwcontext()
378 tmp_adev->sdma.ras->ras_block.ras_late_init) { in aldebaran_mode2_restore_hwcontext()
379 r = tmp_adev->sdma.ras->ras_block.ras_late_init(tmp_adev, in aldebaran_mode2_restore_hwcontext()
380 &tmp_adev->sdma.ras->ras_block.ras_comm); in aldebaran_mode2_restore_hwcontext()
A Dsdma_v4_4.c243 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_reset_ras_error_count()
256 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_query_ras_error_count()
A Damdgpu_dev_coredump.c159 for (int i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_devcoredump_fw_info()
162 i, adev->sdma.instance[i].feature_version, in amdgpu_devcoredump_fw_info()
163 adev->sdma.instance[i].fw_version); in amdgpu_devcoredump_fw_info()
A Damdgpu_amdkfd.c456 return adev->sdma.instance[0].fw_version; in amdgpu_amdkfd_get_fw_version()
459 return adev->sdma.instance[1].fw_version; in amdgpu_amdkfd_get_fw_version()
650 ring = &adev->sdma.instance[0].ring; in amdgpu_amdkfd_submit_ib()
653 ring = &adev->sdma.instance[1].ring; in amdgpu_amdkfd_submit_ib()
A Damdgpu_mes_ctx.h104 } __aligned(PAGE_SIZE) sdma[AMDGPU_MES_CTX_MAX_SDMA_RINGS];
A Damdgpu_discovery.c760 adev->sdma.sdma_mask &= in amdgpu_discovery_read_from_harvest_table()
1344 adev->sdma.sdma_mask = 0; in amdgpu_discovery_reg_base_init()
1413 if (adev->sdma.num_instances < in amdgpu_discovery_reg_base_init()
1415 adev->sdma.num_instances++; in amdgpu_discovery_reg_base_init()
1416 adev->sdma.sdma_mask |= in amdgpu_discovery_reg_base_init()
2564 adev->sdma.num_instances = 2; in amdgpu_discovery_set_ip_blocks()
2591 adev->sdma.num_instances = 2; in amdgpu_discovery_set_ip_blocks()
2618 adev->sdma.num_instances = 1; in amdgpu_discovery_set_ip_blocks()
2664 adev->sdma.num_instances = 2; in amdgpu_discovery_set_ip_blocks()
2692 adev->sdma.num_instances = 8; in amdgpu_discovery_set_ip_blocks()
[all …]
/drivers/infiniband/hw/hfi1/
A Dvnic_sdma.c30 struct hfi1_vnic_sdma *sdma; member
42 struct hfi1_vnic_sdma *vnic_sdma = tx->sdma; in vnic_sdma_complete()
130 struct hfi1_vnic_sdma *vnic_sdma = &vinfo->sdma[q_idx]; in hfi1_vnic_send_dma()
147 tx->sdma = vnic_sdma; in hfi1_vnic_send_dma()
231 struct hfi1_vnic_sdma *vnic_sdma = &vinfo->sdma[q_idx]; in hfi1_vnic_sdma_write_avail()
241 struct hfi1_vnic_sdma *vnic_sdma = &vinfo->sdma[i]; in hfi1_vnic_sdma_init()
A DKconfig16 sdma completions for unit testing
/drivers/soc/fsl/qe/
A Dqe.c368 struct sdma __iomem *sdma = &qe_immr->sdma; in qe_sdma_init() local
380 &sdma->sdebcr); in qe_sdma_init()
382 &sdma->sdmr); in qe_sdma_init()
/drivers/scsi/isci/
A Dhost.c816 &ihost->scu_registers->sdma.unsolicited_frame_get_pointer); in sci_controller_initialize_unsolicited_frame_queue()
820 &ihost->scu_registers->sdma.unsolicited_frame_put_pointer); in sci_controller_initialize_unsolicited_frame_queue()
2203 val = readl(&ihost->scu_registers->sdma.pdma_configuration); in sci_controller_initialize()
2205 writel(val, &ihost->scu_registers->sdma.pdma_configuration); in sci_controller_initialize()
2207 val = readl(&ihost->scu_registers->sdma.cdma_configuration); in sci_controller_initialize()
2209 writel(val, &ihost->scu_registers->sdma.cdma_configuration); in sci_controller_initialize()
2319 &ihost->scu_registers->sdma.uf_header_base_address_lower); in sci_controller_mem_init()
2321 &ihost->scu_registers->sdma.uf_header_base_address_upper); in sci_controller_mem_init()
2324 &ihost->scu_registers->sdma.uf_address_table_lower); in sci_controller_mem_init()
2326 &ihost->scu_registers->sdma.uf_address_table_upper); in sci_controller_mem_init()
[all …]

Completed in 712 milliseconds

123