Lines Matching refs:vcn

71 	adev->vcn.num_enc_rings = 2;  in vcn_v1_0_early_init()
97 VCN_1_0__SRCID__UVD_SYSTEM_MESSAGE_INTERRUPT, &adev->vcn.inst->irq); in vcn_v1_0_sw_init()
102 for (i = 0; i < adev->vcn.num_enc_rings; ++i) { in vcn_v1_0_sw_init()
104 &adev->vcn.inst->irq); in vcn_v1_0_sw_init()
114 adev->vcn.idle_work.work.func = vcn_v1_0_idle_work_handler; in vcn_v1_0_sw_init()
122 ring = &adev->vcn.inst->ring_dec; in vcn_v1_0_sw_init()
124 r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst->irq, 0, in vcn_v1_0_sw_init()
129 adev->vcn.internal.scratch9 = adev->vcn.inst->external.scratch9 = in vcn_v1_0_sw_init()
131 adev->vcn.internal.data0 = adev->vcn.inst->external.data0 = in vcn_v1_0_sw_init()
133 adev->vcn.internal.data1 = adev->vcn.inst->external.data1 = in vcn_v1_0_sw_init()
135 adev->vcn.internal.cmd = adev->vcn.inst->external.cmd = in vcn_v1_0_sw_init()
137 adev->vcn.internal.nop = adev->vcn.inst->external.nop = in vcn_v1_0_sw_init()
140 for (i = 0; i < adev->vcn.num_enc_rings; ++i) { in vcn_v1_0_sw_init()
143 ring = &adev->vcn.inst->ring_enc[i]; in vcn_v1_0_sw_init()
145 r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst->irq, 0, in vcn_v1_0_sw_init()
151 adev->vcn.pause_dpg_mode = vcn_v1_0_pause_dpg_mode; in vcn_v1_0_sw_init()
154 volatile struct amdgpu_fw_shared *fw_shared = adev->vcn.inst->fw_shared.cpu_addr; in vcn_v1_0_sw_init()
157 amdgpu_vcn_fwlog_init(adev->vcn.inst); in vcn_v1_0_sw_init()
198 struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec; in vcn_v1_0_hw_init()
205 for (i = 0; i < adev->vcn.num_enc_rings; ++i) { in vcn_v1_0_hw_init()
206 ring = &adev->vcn.inst->ring_enc[i]; in vcn_v1_0_hw_init()
236 cancel_delayed_work_sync(&adev->vcn.idle_work); in vcn_v1_0_hw_fini()
239 (adev->vcn.cur_state != AMD_PG_STATE_GATE && in vcn_v1_0_hw_fini()
260 idle_work_unexecuted = cancel_delayed_work_sync(&adev->vcn.idle_work); in vcn_v1_0_suspend()
305 uint32_t size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.fw->size + 4); in vcn_v1_0_mc_resume_spg_mode()
318 lower_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v1_0_mc_resume_spg_mode()
320 upper_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v1_0_mc_resume_spg_mode()
330 lower_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v1_0_mc_resume_spg_mode()
332 upper_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v1_0_mc_resume_spg_mode()
338 lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v1_0_mc_resume_spg_mode()
340 upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v1_0_mc_resume_spg_mode()
372 uint32_t size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.fw->size + 4); in vcn_v1_0_mc_resume_dpg_mode()
388 lower_32_bits(adev->vcn.inst->gpu_addr), 0xFFFFFFFF, 0); in vcn_v1_0_mc_resume_dpg_mode()
390 upper_32_bits(adev->vcn.inst->gpu_addr), 0xFFFFFFFF, 0); in vcn_v1_0_mc_resume_dpg_mode()
400 lower_32_bits(adev->vcn.inst->gpu_addr + offset), 0xFFFFFFFF, 0); in vcn_v1_0_mc_resume_dpg_mode()
402 upper_32_bits(adev->vcn.inst->gpu_addr + offset), 0xFFFFFFFF, 0); in vcn_v1_0_mc_resume_dpg_mode()
410 lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), in vcn_v1_0_mc_resume_dpg_mode()
413 upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), in vcn_v1_0_mc_resume_dpg_mode()
789 struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec; in vcn_v1_0_start_spg_mode()
942 ring = &adev->vcn.inst->ring_enc[0]; in vcn_v1_0_start_spg_mode()
949 ring = &adev->vcn.inst->ring_enc[1]; in vcn_v1_0_start_spg_mode()
963 struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec; in vcn_v1_0_start_dpg_mode()
1218 if (adev->vcn.inst[inst_idx].pause_state.fw_based != new_state->fw_based) { in vcn_v1_0_pause_dpg_mode()
1220 adev->vcn.inst[inst_idx].pause_state.fw_based, in vcn_v1_0_pause_dpg_mode()
1221 adev->vcn.inst[inst_idx].pause_state.jpeg, in vcn_v1_0_pause_dpg_mode()
1244 ring = &adev->vcn.inst->ring_enc[0]; in vcn_v1_0_pause_dpg_mode()
1251 ring = &adev->vcn.inst->ring_enc[1]; in vcn_v1_0_pause_dpg_mode()
1258 ring = &adev->vcn.inst->ring_dec; in vcn_v1_0_pause_dpg_mode()
1270 adev->vcn.inst[inst_idx].pause_state.fw_based = new_state->fw_based; in vcn_v1_0_pause_dpg_mode()
1274 if (adev->vcn.inst[inst_idx].pause_state.jpeg != new_state->jpeg) { in vcn_v1_0_pause_dpg_mode()
1276 adev->vcn.inst[inst_idx].pause_state.fw_based, in vcn_v1_0_pause_dpg_mode()
1277 adev->vcn.inst[inst_idx].pause_state.jpeg, in vcn_v1_0_pause_dpg_mode()
1319 ring = &adev->vcn.inst->ring_dec; in vcn_v1_0_pause_dpg_mode()
1331 adev->vcn.inst[inst_idx].pause_state.jpeg = new_state->jpeg; in vcn_v1_0_pause_dpg_mode()
1590 if (ring == &adev->vcn.inst->ring_enc[0]) in vcn_v1_0_enc_ring_get_rptr()
1607 if (ring == &adev->vcn.inst->ring_enc[0]) in vcn_v1_0_enc_ring_get_wptr()
1624 if (ring == &adev->vcn.inst->ring_enc[0]) in vcn_v1_0_enc_ring_set_wptr()
1730 amdgpu_fence_process(&adev->vcn.inst->ring_dec); in vcn_v1_0_process_interrupt()
1733 amdgpu_fence_process(&adev->vcn.inst->ring_enc[0]); in vcn_v1_0_process_interrupt()
1736 amdgpu_fence_process(&adev->vcn.inst->ring_enc[1]); in vcn_v1_0_process_interrupt()
1773 if(state == adev->vcn.cur_state) in vcn_v1_0_set_powergating_state()
1782 adev->vcn.cur_state = state; in vcn_v1_0_set_powergating_state()
1789 container_of(work, struct amdgpu_device, vcn.idle_work.work); in vcn_v1_0_idle_work_handler()
1792 for (i = 0; i < adev->vcn.num_enc_rings; ++i) in vcn_v1_0_idle_work_handler()
1793 fences += amdgpu_fence_count_emitted(&adev->vcn.inst->ring_enc[i]); in vcn_v1_0_idle_work_handler()
1808 adev->vcn.pause_dpg_mode(adev, 0, &new_state); in vcn_v1_0_idle_work_handler()
1812 fences += amdgpu_fence_count_emitted(&adev->vcn.inst->ring_dec); in vcn_v1_0_idle_work_handler()
1822 schedule_delayed_work(&adev->vcn.idle_work, VCN_IDLE_TIMEOUT); in vcn_v1_0_idle_work_handler()
1829 bool set_clocks = !cancel_delayed_work_sync(&adev->vcn.idle_work); in vcn_v1_0_ring_begin_use()
1831 mutex_lock(&adev->vcn.vcn1_jpeg1_workaround); in vcn_v1_0_ring_begin_use()
1857 for (i = 0; i < adev->vcn.num_enc_rings; ++i) in vcn_v1_0_set_pg_for_begin_use()
1858 fences += amdgpu_fence_count_emitted(&adev->vcn.inst->ring_enc[i]); in vcn_v1_0_set_pg_for_begin_use()
1875 adev->vcn.pause_dpg_mode(adev, 0, &new_state); in vcn_v1_0_set_pg_for_begin_use()
1881 schedule_delayed_work(&ring->adev->vcn.idle_work, VCN_IDLE_TIMEOUT); in vcn_v1_0_ring_end_use()
1882 mutex_unlock(&ring->adev->vcn.vcn1_jpeg1_workaround); in vcn_v1_0_ring_end_use()
1959 if (reg == PACKET0(p->adev->vcn.internal.data0, 0)) { in vcn_v1_0_ring_patch_cs_in_place()
1961 } else if (reg == PACKET0(p->adev->vcn.internal.data1, 0)) { in vcn_v1_0_ring_patch_cs_in_place()
1963 } else if (reg == PACKET0(p->adev->vcn.internal.cmd, 0)) { in vcn_v1_0_ring_patch_cs_in_place()
2043 adev->vcn.inst->ring_dec.funcs = &vcn_v1_0_dec_ring_vm_funcs; in vcn_v1_0_set_dec_ring_funcs()
2051 for (i = 0; i < adev->vcn.num_enc_rings; ++i) in vcn_v1_0_set_enc_ring_funcs()
2052 adev->vcn.inst->ring_enc[i].funcs = &vcn_v1_0_enc_ring_vm_funcs; in vcn_v1_0_set_enc_ring_funcs()
2064 adev->vcn.inst->irq.num_types = adev->vcn.num_enc_rings + 2; in vcn_v1_0_set_irq_funcs()
2065 adev->vcn.inst->irq.funcs = &vcn_v1_0_irq_funcs; in vcn_v1_0_set_irq_funcs()