Lines Matching refs:adev

37 int amdgpu_jpeg_sw_init(struct amdgpu_device *adev)  in amdgpu_jpeg_sw_init()  argument
41 INIT_DELAYED_WORK(&adev->jpeg.idle_work, amdgpu_jpeg_idle_work_handler); in amdgpu_jpeg_sw_init()
42 mutex_init(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_sw_init()
43 atomic_set(&adev->jpeg.total_submission_cnt, 0); in amdgpu_jpeg_sw_init()
45 if ((adev->firmware.load_type == AMDGPU_FW_LOAD_PSP) && in amdgpu_jpeg_sw_init()
46 (adev->pg_flags & AMD_PG_SUPPORT_JPEG_DPG)) in amdgpu_jpeg_sw_init()
47 adev->jpeg.indirect_sram = true; in amdgpu_jpeg_sw_init()
49 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in amdgpu_jpeg_sw_init()
50 if (adev->jpeg.harvest_config & (1 << i)) in amdgpu_jpeg_sw_init()
53 if (adev->jpeg.indirect_sram) { in amdgpu_jpeg_sw_init()
54 r = amdgpu_bo_create_kernel(adev, 64 * 2 * 4, PAGE_SIZE, in amdgpu_jpeg_sw_init()
57 &adev->jpeg.inst[i].dpg_sram_bo, in amdgpu_jpeg_sw_init()
58 &adev->jpeg.inst[i].dpg_sram_gpu_addr, in amdgpu_jpeg_sw_init()
59 &adev->jpeg.inst[i].dpg_sram_cpu_addr); in amdgpu_jpeg_sw_init()
61 dev_err(adev->dev, in amdgpu_jpeg_sw_init()
71 int amdgpu_jpeg_sw_fini(struct amdgpu_device *adev) in amdgpu_jpeg_sw_fini() argument
75 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_jpeg_sw_fini()
76 if (adev->jpeg.harvest_config & (1 << i)) in amdgpu_jpeg_sw_fini()
80 &adev->jpeg.inst[i].dpg_sram_bo, in amdgpu_jpeg_sw_fini()
81 &adev->jpeg.inst[i].dpg_sram_gpu_addr, in amdgpu_jpeg_sw_fini()
82 (void **)&adev->jpeg.inst[i].dpg_sram_cpu_addr); in amdgpu_jpeg_sw_fini()
84 for (j = 0; j < adev->jpeg.num_jpeg_rings; ++j) in amdgpu_jpeg_sw_fini()
85 amdgpu_ring_fini(&adev->jpeg.inst[i].ring_dec[j]); in amdgpu_jpeg_sw_fini()
88 mutex_destroy(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_sw_fini()
93 int amdgpu_jpeg_suspend(struct amdgpu_device *adev) in amdgpu_jpeg_suspend() argument
95 cancel_delayed_work_sync(&adev->jpeg.idle_work); in amdgpu_jpeg_suspend()
100 int amdgpu_jpeg_resume(struct amdgpu_device *adev) in amdgpu_jpeg_resume() argument
107 struct amdgpu_device *adev = in amdgpu_jpeg_idle_work_handler() local
112 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_jpeg_idle_work_handler()
113 if (adev->jpeg.harvest_config & (1 << i)) in amdgpu_jpeg_idle_work_handler()
116 for (j = 0; j < adev->jpeg.num_jpeg_rings; ++j) in amdgpu_jpeg_idle_work_handler()
117 fences += amdgpu_fence_count_emitted(&adev->jpeg.inst[i].ring_dec[j]); in amdgpu_jpeg_idle_work_handler()
120 if (!fences && !atomic_read(&adev->jpeg.total_submission_cnt)) in amdgpu_jpeg_idle_work_handler()
121 amdgpu_device_ip_set_powergating_state(adev, AMD_IP_BLOCK_TYPE_JPEG, in amdgpu_jpeg_idle_work_handler()
124 schedule_delayed_work(&adev->jpeg.idle_work, JPEG_IDLE_TIMEOUT); in amdgpu_jpeg_idle_work_handler()
129 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_ring_begin_use() local
131 atomic_inc(&adev->jpeg.total_submission_cnt); in amdgpu_jpeg_ring_begin_use()
132 cancel_delayed_work_sync(&adev->jpeg.idle_work); in amdgpu_jpeg_ring_begin_use()
134 mutex_lock(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_ring_begin_use()
135 amdgpu_device_ip_set_powergating_state(adev, AMD_IP_BLOCK_TYPE_JPEG, in amdgpu_jpeg_ring_begin_use()
137 mutex_unlock(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_ring_begin_use()
142 atomic_dec(&ring->adev->jpeg.total_submission_cnt); in amdgpu_jpeg_ring_end_use()
143 schedule_delayed_work(&ring->adev->jpeg.idle_work, JPEG_IDLE_TIMEOUT); in amdgpu_jpeg_ring_end_use()
148 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_dec_ring_test_ring() local
154 if (amdgpu_sriov_vf(adev)) in amdgpu_jpeg_dec_ring_test_ring()
161 WREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe], 0xCAFEDEAD); in amdgpu_jpeg_dec_ring_test_ring()
163 RREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe]); in amdgpu_jpeg_dec_ring_test_ring()
165 amdgpu_ring_write(ring, PACKET0(adev->jpeg.internal.jpeg_pitch[ring->pipe], 0)); in amdgpu_jpeg_dec_ring_test_ring()
169 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_jpeg_dec_ring_test_ring()
170 tmp = RREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe]); in amdgpu_jpeg_dec_ring_test_ring()
176 if (i >= adev->usec_timeout) in amdgpu_jpeg_dec_ring_test_ring()
185 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_dec_set_reg() local
192 r = amdgpu_job_alloc_with_ib(ring->adev, NULL, NULL, ib_size_dw * 4, in amdgpu_jpeg_dec_set_reg()
199 ib->ptr[0] = PACKETJ(adev->jpeg.internal.jpeg_pitch[ring->pipe], 0, 0, PACKETJ_TYPE0); in amdgpu_jpeg_dec_set_reg()
224 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_dec_ring_test_ib() local
244 if (!amdgpu_sriov_vf(adev)) { in amdgpu_jpeg_dec_ring_test_ib()
245 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_jpeg_dec_ring_test_ib()
246 tmp = RREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe]); in amdgpu_jpeg_dec_ring_test_ib()
254 if (i >= adev->usec_timeout) in amdgpu_jpeg_dec_ring_test_ib()
263 int amdgpu_jpeg_process_poison_irq(struct amdgpu_device *adev, in amdgpu_jpeg_process_poison_irq() argument
267 struct ras_common_if *ras_if = adev->jpeg.ras_if; in amdgpu_jpeg_process_poison_irq()
276 amdgpu_ras_interrupt_dispatch(adev, &ih_data); in amdgpu_jpeg_process_poison_irq()
281 int amdgpu_jpeg_ras_late_init(struct amdgpu_device *adev, struct ras_common_if *ras_block) in amdgpu_jpeg_ras_late_init() argument
285 r = amdgpu_ras_block_late_init(adev, ras_block); in amdgpu_jpeg_ras_late_init()
289 if (amdgpu_ras_is_supported(adev, ras_block->block)) { in amdgpu_jpeg_ras_late_init()
290 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_jpeg_ras_late_init()
291 if (adev->jpeg.harvest_config & (1 << i) || in amdgpu_jpeg_ras_late_init()
292 !adev->jpeg.inst[i].ras_poison_irq.funcs) in amdgpu_jpeg_ras_late_init()
295 r = amdgpu_irq_get(adev, &adev->jpeg.inst[i].ras_poison_irq, 0); in amdgpu_jpeg_ras_late_init()
303 amdgpu_ras_block_late_fini(adev, ras_block); in amdgpu_jpeg_ras_late_init()
307 int amdgpu_jpeg_ras_sw_init(struct amdgpu_device *adev) in amdgpu_jpeg_ras_sw_init() argument
312 if (!adev->jpeg.ras) in amdgpu_jpeg_ras_sw_init()
315 ras = adev->jpeg.ras; in amdgpu_jpeg_ras_sw_init()
316 err = amdgpu_ras_register_ras_block(adev, &ras->ras_block); in amdgpu_jpeg_ras_sw_init()
318 dev_err(adev->dev, "Failed to register jpeg ras block!\n"); in amdgpu_jpeg_ras_sw_init()
325 adev->jpeg.ras_if = &ras->ras_block.ras_comm; in amdgpu_jpeg_ras_sw_init()
333 int amdgpu_jpeg_psp_update_sram(struct amdgpu_device *adev, int inst_idx, in amdgpu_jpeg_psp_update_sram() argument
338 .mc_addr = adev->jpeg.inst[inst_idx].dpg_sram_gpu_addr, in amdgpu_jpeg_psp_update_sram()
339 .ucode_size = ((uintptr_t)adev->jpeg.inst[inst_idx].dpg_sram_curr_addr - in amdgpu_jpeg_psp_update_sram()
340 (uintptr_t)adev->jpeg.inst[inst_idx].dpg_sram_cpu_addr), in amdgpu_jpeg_psp_update_sram()
343 return psp_execute_ip_fw_load(&adev->psp, &ucode); in amdgpu_jpeg_psp_update_sram()