Lines Matching refs:sdma
45 for (i = 0; i < adev->sdma.num_instances; i++) in amdgpu_sdma_get_instance_from_ring()
46 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_instance_from_ring()
47 ring == &adev->sdma.instance[i].page) in amdgpu_sdma_get_instance_from_ring()
48 return &adev->sdma.instance[i]; in amdgpu_sdma_get_instance_from_ring()
58 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_get_index_from_ring()
59 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_index_from_ring()
60 ring == &adev->sdma.instance[i].page) { in amdgpu_sdma_get_index_from_ring()
103 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_ras_late_init()
104 r = amdgpu_irq_get(adev, &adev->sdma.ecc_irq, in amdgpu_sdma_ras_late_init()
136 struct ras_common_if *ras_if = adev->sdma.ras_if; in amdgpu_sdma_process_ecc_irq()
193 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_destroy_inst_ctx()
194 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in amdgpu_sdma_destroy_inst_ctx()
199 memset((void *)adev->sdma.instance, 0, in amdgpu_sdma_destroy_inst_ctx()
216 err = amdgpu_ucode_request(adev, &adev->sdma.instance[instance].fw, in amdgpu_sdma_init_microcode()
220 err = amdgpu_ucode_request(adev, &adev->sdma.instance[instance].fw, in amdgpu_sdma_init_microcode()
227 adev->sdma.instance[instance].fw->data; in amdgpu_sdma_init_microcode()
235 err = amdgpu_sdma_init_inst_ctx(&adev->sdma.instance[instance]); in amdgpu_sdma_init_microcode()
240 for (i = 1; i < adev->sdma.num_instances; i++) in amdgpu_sdma_init_microcode()
241 memcpy((void *)&adev->sdma.instance[i], in amdgpu_sdma_init_microcode()
242 (void *)&adev->sdma.instance[0], in amdgpu_sdma_init_microcode()
252 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_init_microcode()
266 adev->sdma.num_inst_per_aid == i) { in amdgpu_sdma_init_microcode()
271 info->fw = adev->sdma.instance[i].fw; in amdgpu_sdma_init_microcode()
279 adev->sdma.instance[0].fw->data; in amdgpu_sdma_init_microcode()
282 info->fw = adev->sdma.instance[0].fw; in amdgpu_sdma_init_microcode()
287 info->fw = adev->sdma.instance[0].fw; in amdgpu_sdma_init_microcode()
293 adev->sdma.instance[0].fw->data; in amdgpu_sdma_init_microcode()
296 info->fw = adev->sdma.instance[0].fw; in amdgpu_sdma_init_microcode()
319 if (!adev->sdma.ras) in amdgpu_sdma_ras_sw_init()
322 ras = adev->sdma.ras; in amdgpu_sdma_ras_sw_init()
333 adev->sdma.ras_if = &ras->ras_block.ras_comm; in amdgpu_sdma_ras_sw_init()
363 if (adev->sdma.has_page_queue) in amdgpu_debugfs_sdma_sched_mask_set()
371 mask = BIT_ULL(adev->sdma.num_instances * num_ring) - 1; in amdgpu_debugfs_sdma_sched_mask_set()
376 for (i = 0; i < adev->sdma.num_instances; ++i) { in amdgpu_debugfs_sdma_sched_mask_set()
377 ring = &adev->sdma.instance[i].ring; in amdgpu_debugfs_sdma_sched_mask_set()
378 if (adev->sdma.has_page_queue) in amdgpu_debugfs_sdma_sched_mask_set()
379 page = &adev->sdma.instance[i].page; in amdgpu_debugfs_sdma_sched_mask_set()
410 if (adev->sdma.has_page_queue) in amdgpu_debugfs_sdma_sched_mask_get()
415 for (i = 0; i < adev->sdma.num_instances; ++i) { in amdgpu_debugfs_sdma_sched_mask_get()
416 ring = &adev->sdma.instance[i].ring; in amdgpu_debugfs_sdma_sched_mask_get()
417 if (adev->sdma.has_page_queue) in amdgpu_debugfs_sdma_sched_mask_get()
418 page = &adev->sdma.instance[i].page; in amdgpu_debugfs_sdma_sched_mask_get()
450 if (!(adev->sdma.num_instances > 1)) in amdgpu_debugfs_sdma_sched_mask_init()
468 return amdgpu_show_reset_mask(buf, adev->sdma.supported_reset); in amdgpu_get_sdma_reset_mask()
481 if (adev->sdma.num_instances) { in amdgpu_sdma_sysfs_reset_mask_init()
496 if (adev->sdma.num_instances) in amdgpu_sdma_sysfs_reset_mask_fini()
503 if (adev->sdma.has_page_queue && in amdgpu_sdma_get_shared_ring()
504 (ring->me < adev->sdma.num_instances) && in amdgpu_sdma_get_shared_ring()
505 (ring == &adev->sdma.instance[ring->me].ring)) in amdgpu_sdma_get_shared_ring()
506 return &adev->sdma.instance[ring->me].page; in amdgpu_sdma_get_shared_ring()
523 if (!adev->sdma.has_page_queue || i >= adev->sdma.num_instances) in amdgpu_sdma_is_shared_inv_eng()
529 return (ring == &adev->sdma.instance[i].page); in amdgpu_sdma_is_shared_inv_eng()
536 struct amdgpu_sdma_instance *sdma_instance = &adev->sdma.instance[instance_id]; in amdgpu_sdma_soft_reset()
557 struct amdgpu_sdma_instance *sdma_instance = &adev->sdma.instance[instance_id]; in amdgpu_sdma_reset_engine()
570 if (adev->sdma.has_page_queue) in amdgpu_sdma_reset_engine()
576 if (adev->sdma.has_page_queue) in amdgpu_sdma_reset_engine()
589 if (adev->sdma.has_page_queue) in amdgpu_sdma_reset_engine()
602 if (adev->sdma.has_page_queue) { in amdgpu_sdma_reset_engine()