Lines Matching refs:sdma
111 release_firmware(adev->sdma.instance[0].fw); in sdma_v5_2_destroy_inst_ctx()
113 memset((void *)adev->sdma.instance, 0, in sdma_v5_2_destroy_inst_ctx()
164 err = request_firmware(&adev->sdma.instance[0].fw, fw_name, adev->dev); in sdma_v5_2_init_microcode()
168 err = sdma_v5_2_init_inst_ctx(&adev->sdma.instance[0]); in sdma_v5_2_init_microcode()
172 for (i = 1; i < adev->sdma.num_instances; i++) in sdma_v5_2_init_microcode()
173 memcpy((void *)&adev->sdma.instance[i], in sdma_v5_2_init_microcode()
174 (void *)&adev->sdma.instance[0], in sdma_v5_2_init_microcode()
184 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_init_microcode()
187 info->fw = adev->sdma.instance[i].fw; in sdma_v5_2_init_microcode()
318 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v5_2_ring_insert_nop() local
322 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v5_2_ring_insert_nop()
474 struct amdgpu_ring *sdma0 = &adev->sdma.instance[0].ring; in sdma_v5_2_gfx_stop()
475 struct amdgpu_ring *sdma1 = &adev->sdma.instance[1].ring; in sdma_v5_2_gfx_stop()
476 struct amdgpu_ring *sdma2 = &adev->sdma.instance[2].ring; in sdma_v5_2_gfx_stop()
477 struct amdgpu_ring *sdma3 = &adev->sdma.instance[3].ring; in sdma_v5_2_gfx_stop()
487 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_gfx_stop()
546 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_ctx_switch_enable()
581 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_enable()
609 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_gfx_resume()
610 ring = &adev->sdma.instance[i].ring; in sdma_v5_2_gfx_resume()
783 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_load_microcode()
784 if (!adev->sdma.instance[i].fw) in sdma_v5_2_load_microcode()
787 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v5_2_load_microcode()
792 (adev->sdma.instance[i].fw->data + in sdma_v5_2_load_microcode()
803 WREG32(sdma_v5_2_get_reg_offset(adev, i, mmSDMA0_UCODE_ADDR), adev->sdma.instance[i].fw_version); in sdma_v5_2_load_microcode()
816 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_soft_reset()
1125 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v5_2_ring_pad_ib() local
1131 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v5_2_ring_pad_ib()
1269 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_sw_init()
1272 &adev->sdma.trap_irq); in sdma_v5_2_sw_init()
1283 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_sw_init()
1284 ring = &adev->sdma.instance[i].ring; in sdma_v5_2_sw_init()
1296 r = amdgpu_ring_init(adev, ring, 1024, &adev->sdma.trap_irq, in sdma_v5_2_sw_init()
1311 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v5_2_sw_fini()
1312 amdgpu_ring_fini(&adev->sdma.instance[i].ring); in sdma_v5_2_sw_fini()
1361 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_is_idle()
1461 amdgpu_fence_process(&adev->sdma.instance[0].ring); in sdma_v5_2_process_trap_irq()
1477 amdgpu_fence_process(&adev->sdma.instance[1].ring); in sdma_v5_2_process_trap_irq()
1493 amdgpu_fence_process(&adev->sdma.instance[2].ring); in sdma_v5_2_process_trap_irq()
1509 amdgpu_fence_process(&adev->sdma.instance[3].ring); in sdma_v5_2_process_trap_irq()
1539 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_update_medium_grain_clock_gating()
1541 …if (adev->sdma.instance[i].fw_version < 70 && adev->ip_versions[SDMA0_HWIP][0] == IP_VERSION(5, 2,… in sdma_v5_2_update_medium_grain_clock_gating()
1576 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_update_medium_grain_light_sleep()
1578 …if (adev->sdma.instance[i].fw_version < 70 && adev->ip_versions[SDMA0_HWIP][0] == IP_VERSION(5, 2,… in sdma_v5_2_update_medium_grain_light_sleep()
1705 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_set_ring_funcs()
1706 adev->sdma.instance[i].ring.funcs = &sdma_v5_2_ring_funcs; in sdma_v5_2_set_ring_funcs()
1707 adev->sdma.instance[i].ring.me = i; in sdma_v5_2_set_ring_funcs()
1722 adev->sdma.trap_irq.num_types = AMDGPU_SDMA_IRQ_INSTANCE0 + in sdma_v5_2_set_irq_funcs()
1723 adev->sdma.num_instances; in sdma_v5_2_set_irq_funcs()
1724 adev->sdma.trap_irq.funcs = &sdma_v5_2_trap_irq_funcs; in sdma_v5_2_set_irq_funcs()
1725 adev->sdma.illegal_inst_irq.funcs = &sdma_v5_2_illegal_inst_irq_funcs; in sdma_v5_2_set_irq_funcs()
1794 adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring; in sdma_v5_2_set_buffer_funcs()
1811 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_set_vm_pte_funcs()
1813 &adev->sdma.instance[i].ring.sched; in sdma_v5_2_set_vm_pte_funcs()
1815 adev->vm_manager.vm_pte_num_scheds = adev->sdma.num_instances; in sdma_v5_2_set_vm_pte_funcs()