Lines Matching refs:umsch
48 static int umsch_mm_v4_0_load_microcode(struct amdgpu_umsch_mm *umsch) in umsch_mm_v4_0_load_microcode() argument
50 struct amdgpu_device *adev = umsch->ring.adev; in umsch_mm_v4_0_load_microcode()
54 r = amdgpu_umsch_mm_allocate_ucode_buffer(umsch); in umsch_mm_v4_0_load_microcode()
58 r = amdgpu_umsch_mm_allocate_ucode_data_buffer(umsch); in umsch_mm_v4_0_load_microcode()
62 umsch->cmd_buf_curr_ptr = umsch->cmd_buf_ptr; in umsch_mm_v4_0_load_microcode()
147 WREG32_SOC15_UMSCH(regVCN_MES_GP0_LO, lower_32_bits(umsch->log_gpu_addr)); in umsch_mm_v4_0_load_microcode()
148 WREG32_SOC15_UMSCH(regVCN_MES_GP0_HI, upper_32_bits(umsch->log_gpu_addr)); in umsch_mm_v4_0_load_microcode()
162 amdgpu_umsch_mm_psp_execute_cmd_buf(umsch); in umsch_mm_v4_0_load_microcode()
184 static void umsch_mm_v4_0_aggregated_doorbell_init(struct amdgpu_umsch_mm *umsch) in umsch_mm_v4_0_aggregated_doorbell_init() argument
186 struct amdgpu_device *adev = umsch->ring.adev; in umsch_mm_v4_0_aggregated_doorbell_init()
191 umsch->agdb_index[CONTEXT_PRIORITY_LEVEL_REALTIME]); in umsch_mm_v4_0_aggregated_doorbell_init()
197 umsch->agdb_index[CONTEXT_PRIORITY_LEVEL_FOCUS]); in umsch_mm_v4_0_aggregated_doorbell_init()
203 umsch->agdb_index[CONTEXT_PRIORITY_LEVEL_NORMAL]); in umsch_mm_v4_0_aggregated_doorbell_init()
209 umsch->agdb_index[CONTEXT_PRIORITY_LEVEL_IDLE]); in umsch_mm_v4_0_aggregated_doorbell_init()
214 static int umsch_mm_v4_0_ring_start(struct amdgpu_umsch_mm *umsch) in umsch_mm_v4_0_ring_start() argument
216 struct amdgpu_ring *ring = &umsch->ring; in umsch_mm_v4_0_ring_start()
239 umsch_mm_v4_0_aggregated_doorbell_init(umsch); in umsch_mm_v4_0_ring_start()
244 static int umsch_mm_v4_0_ring_stop(struct amdgpu_umsch_mm *umsch) in umsch_mm_v4_0_ring_stop() argument
246 struct amdgpu_ring *ring = &umsch->ring; in umsch_mm_v4_0_ring_stop()
269 static int umsch_mm_v4_0_set_hw_resources(struct amdgpu_umsch_mm *umsch) in umsch_mm_v4_0_set_hw_resources() argument
272 struct amdgpu_device *adev = umsch->ring.adev; in umsch_mm_v4_0_set_hw_resources()
279 set_hw_resources.vmid_mask_mm_vcn = umsch->vmid_mask_mm_vcn; in umsch_mm_v4_0_set_hw_resources()
280 set_hw_resources.vmid_mask_mm_vpe = umsch->vmid_mask_mm_vpe; in umsch_mm_v4_0_set_hw_resources()
283 set_hw_resources.engine_mask = umsch->engine_mask; in umsch_mm_v4_0_set_hw_resources()
285 set_hw_resources.vcn0_hqd_mask[0] = umsch->vcn0_hqd_mask; in umsch_mm_v4_0_set_hw_resources()
286 set_hw_resources.vcn1_hqd_mask[0] = umsch->vcn1_hqd_mask; in umsch_mm_v4_0_set_hw_resources()
287 set_hw_resources.vcn_hqd_mask[0] = umsch->vcn_hqd_mask[0]; in umsch_mm_v4_0_set_hw_resources()
288 set_hw_resources.vcn_hqd_mask[1] = umsch->vcn_hqd_mask[1]; in umsch_mm_v4_0_set_hw_resources()
289 set_hw_resources.vpe_hqd_mask[0] = umsch->vpe_hqd_mask; in umsch_mm_v4_0_set_hw_resources()
291 set_hw_resources.g_sch_ctx_gpu_mc_ptr = umsch->sch_ctx_gpu_addr; in umsch_mm_v4_0_set_hw_resources()
310 set_hw_resources.api_status.api_completion_fence_addr = umsch->ring.fence_drv.gpu_addr; in umsch_mm_v4_0_set_hw_resources()
311 set_hw_resources.api_status.api_completion_fence_value = ++umsch->ring.fence_drv.sync_seq; in umsch_mm_v4_0_set_hw_resources()
313 r = amdgpu_umsch_mm_submit_pkt(umsch, &set_hw_resources.max_dwords_in_api, in umsch_mm_v4_0_set_hw_resources()
318 r = amdgpu_umsch_mm_query_fence(umsch); in umsch_mm_v4_0_set_hw_resources()
327 static int umsch_mm_v4_0_add_queue(struct amdgpu_umsch_mm *umsch, in umsch_mm_v4_0_add_queue() argument
330 struct amdgpu_device *adev = umsch->ring.adev; in umsch_mm_v4_0_add_queue()
360 add_queue.api_status.api_completion_fence_addr = umsch->ring.fence_drv.gpu_addr; in umsch_mm_v4_0_add_queue()
361 add_queue.api_status.api_completion_fence_value = ++umsch->ring.fence_drv.sync_seq; in umsch_mm_v4_0_add_queue()
363 r = amdgpu_umsch_mm_submit_pkt(umsch, &add_queue.max_dwords_in_api, in umsch_mm_v4_0_add_queue()
368 r = amdgpu_umsch_mm_query_fence(umsch); in umsch_mm_v4_0_add_queue()
377 static int umsch_mm_v4_0_remove_queue(struct amdgpu_umsch_mm *umsch, in umsch_mm_v4_0_remove_queue() argument
381 struct amdgpu_device *adev = umsch->ring.adev; in umsch_mm_v4_0_remove_queue()
392 remove_queue.api_status.api_completion_fence_addr = umsch->ring.fence_drv.gpu_addr; in umsch_mm_v4_0_remove_queue()
393 remove_queue.api_status.api_completion_fence_value = ++umsch->ring.fence_drv.sync_seq; in umsch_mm_v4_0_remove_queue()
395 r = amdgpu_umsch_mm_submit_pkt(umsch, &remove_queue.max_dwords_in_api, in umsch_mm_v4_0_remove_queue()
400 r = amdgpu_umsch_mm_query_fence(umsch); in umsch_mm_v4_0_remove_queue()
409 static int umsch_mm_v4_0_set_regs(struct amdgpu_umsch_mm *umsch) in umsch_mm_v4_0_set_regs() argument
411 struct amdgpu_device *adev = container_of(umsch, struct amdgpu_device, umsch_mm); in umsch_mm_v4_0_set_regs()
413 umsch->rb_wptr = SOC15_REG_OFFSET(VCN, 0, regVCN_UMSCH_RB_WPTR); in umsch_mm_v4_0_set_regs()
414 umsch->rb_rptr = SOC15_REG_OFFSET(VCN, 0, regVCN_UMSCH_RB_RPTR); in umsch_mm_v4_0_set_regs()
431 void umsch_mm_v4_0_set_funcs(struct amdgpu_umsch_mm *umsch) in umsch_mm_v4_0_set_funcs() argument
433 umsch->funcs = &umsch_mm_v4_0_funcs; in umsch_mm_v4_0_set_funcs()