Lines Matching refs:jpeg
42 INIT_DELAYED_WORK(&adev->jpeg.idle_work, amdgpu_jpeg_idle_work_handler); in amdgpu_jpeg_sw_init()
43 mutex_init(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_sw_init()
44 atomic_set(&adev->jpeg.total_submission_cnt, 0); in amdgpu_jpeg_sw_init()
48 adev->jpeg.indirect_sram = true; in amdgpu_jpeg_sw_init()
50 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in amdgpu_jpeg_sw_init()
51 if (adev->jpeg.harvest_config & (1U << i)) in amdgpu_jpeg_sw_init()
54 if (adev->jpeg.indirect_sram) { in amdgpu_jpeg_sw_init()
58 &adev->jpeg.inst[i].dpg_sram_bo, in amdgpu_jpeg_sw_init()
59 &adev->jpeg.inst[i].dpg_sram_gpu_addr, in amdgpu_jpeg_sw_init()
60 &adev->jpeg.inst[i].dpg_sram_cpu_addr); in amdgpu_jpeg_sw_init()
76 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_jpeg_sw_fini()
77 if (adev->jpeg.harvest_config & (1U << i)) in amdgpu_jpeg_sw_fini()
81 &adev->jpeg.inst[i].dpg_sram_bo, in amdgpu_jpeg_sw_fini()
82 &adev->jpeg.inst[i].dpg_sram_gpu_addr, in amdgpu_jpeg_sw_fini()
83 (void **)&adev->jpeg.inst[i].dpg_sram_cpu_addr); in amdgpu_jpeg_sw_fini()
85 for (j = 0; j < adev->jpeg.num_jpeg_rings; ++j) in amdgpu_jpeg_sw_fini()
86 amdgpu_ring_fini(&adev->jpeg.inst[i].ring_dec[j]); in amdgpu_jpeg_sw_fini()
89 if (adev->jpeg.reg_list) in amdgpu_jpeg_sw_fini()
92 mutex_destroy(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_sw_fini()
99 cancel_delayed_work_sync(&adev->jpeg.idle_work); in amdgpu_jpeg_suspend()
112 container_of(work, struct amdgpu_device, jpeg.idle_work.work); in amdgpu_jpeg_idle_work_handler()
116 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_jpeg_idle_work_handler()
117 if (adev->jpeg.harvest_config & (1U << i)) in amdgpu_jpeg_idle_work_handler()
120 for (j = 0; j < adev->jpeg.num_jpeg_rings; ++j) in amdgpu_jpeg_idle_work_handler()
121 fences += amdgpu_fence_count_emitted(&adev->jpeg.inst[i].ring_dec[j]); in amdgpu_jpeg_idle_work_handler()
124 if (!fences && !atomic_read(&adev->jpeg.total_submission_cnt)) in amdgpu_jpeg_idle_work_handler()
128 schedule_delayed_work(&adev->jpeg.idle_work, JPEG_IDLE_TIMEOUT); in amdgpu_jpeg_idle_work_handler()
135 atomic_inc(&adev->jpeg.total_submission_cnt); in amdgpu_jpeg_ring_begin_use()
136 cancel_delayed_work_sync(&adev->jpeg.idle_work); in amdgpu_jpeg_ring_begin_use()
138 mutex_lock(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_ring_begin_use()
141 mutex_unlock(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_ring_begin_use()
146 atomic_dec(&ring->adev->jpeg.total_submission_cnt); in amdgpu_jpeg_ring_end_use()
147 schedule_delayed_work(&ring->adev->jpeg.idle_work, JPEG_IDLE_TIMEOUT); in amdgpu_jpeg_ring_end_use()
165 WREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe], 0xCAFEDEAD); in amdgpu_jpeg_dec_ring_test_ring()
167 RREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe]); in amdgpu_jpeg_dec_ring_test_ring()
169 amdgpu_ring_write(ring, PACKET0(adev->jpeg.internal.jpeg_pitch[ring->pipe], 0)); in amdgpu_jpeg_dec_ring_test_ring()
174 tmp = RREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe]); in amdgpu_jpeg_dec_ring_test_ring()
203 ib->ptr[0] = PACKETJ(adev->jpeg.internal.jpeg_pitch[ring->pipe], 0, 0, PACKETJ_TYPE0); in amdgpu_jpeg_dec_set_reg()
250 tmp = RREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe]); in amdgpu_jpeg_dec_ring_test_ib()
271 struct ras_common_if *ras_if = adev->jpeg.ras_if; in amdgpu_jpeg_process_poison_irq()
294 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_jpeg_ras_late_init()
295 if (adev->jpeg.harvest_config & (1 << i) || in amdgpu_jpeg_ras_late_init()
296 !adev->jpeg.inst[i].ras_poison_irq.funcs) in amdgpu_jpeg_ras_late_init()
299 r = amdgpu_irq_get(adev, &adev->jpeg.inst[i].ras_poison_irq, 0); in amdgpu_jpeg_ras_late_init()
316 if (!adev->jpeg.ras) in amdgpu_jpeg_ras_sw_init()
319 ras = adev->jpeg.ras; in amdgpu_jpeg_ras_sw_init()
329 adev->jpeg.ras_if = &ras->ras_block.ras_comm; in amdgpu_jpeg_ras_sw_init()
342 .mc_addr = adev->jpeg.inst[inst_idx].dpg_sram_gpu_addr, in amdgpu_jpeg_psp_update_sram()
343 .ucode_size = ((uintptr_t)adev->jpeg.inst[inst_idx].dpg_sram_curr_addr - in amdgpu_jpeg_psp_update_sram()
344 (uintptr_t)adev->jpeg.inst[inst_idx].dpg_sram_cpu_addr), in amdgpu_jpeg_psp_update_sram()
364 mask = (1ULL << (adev->jpeg.num_jpeg_inst * adev->jpeg.num_jpeg_rings)) - 1; in amdgpu_debugfs_jpeg_sched_mask_set()
368 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_debugfs_jpeg_sched_mask_set()
369 for (j = 0; j < adev->jpeg.num_jpeg_rings; ++j) { in amdgpu_debugfs_jpeg_sched_mask_set()
370 ring = &adev->jpeg.inst[i].ring_dec[j]; in amdgpu_debugfs_jpeg_sched_mask_set()
371 if (val & (1 << ((i * adev->jpeg.num_jpeg_rings) + j))) in amdgpu_debugfs_jpeg_sched_mask_set()
391 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_debugfs_jpeg_sched_mask_get()
392 for (j = 0; j < adev->jpeg.num_jpeg_rings; ++j) { in amdgpu_debugfs_jpeg_sched_mask_get()
393 ring = &adev->jpeg.inst[i].ring_dec[j]; in amdgpu_debugfs_jpeg_sched_mask_get()
395 mask |= 1ULL << ((i * adev->jpeg.num_jpeg_rings) + j); in amdgpu_debugfs_jpeg_sched_mask_get()
415 if (!(adev->jpeg.num_jpeg_inst > 1) && !(adev->jpeg.num_jpeg_rings > 1)) in amdgpu_debugfs_jpeg_sched_mask_init()
433 return amdgpu_show_reset_mask(buf, adev->jpeg.supported_reset); in amdgpu_get_jpeg_reset_mask()
443 if (adev->jpeg.num_jpeg_inst) { in amdgpu_jpeg_sysfs_reset_mask_init()
455 if (adev->jpeg.num_jpeg_inst) in amdgpu_jpeg_sysfs_reset_mask_fini()
463 adev->jpeg.ip_dump = kcalloc(adev->jpeg.num_jpeg_inst * count, in amdgpu_jpeg_reg_dump_init()
465 if (!adev->jpeg.ip_dump) { in amdgpu_jpeg_reg_dump_init()
470 adev->jpeg.reg_list = reg; in amdgpu_jpeg_reg_dump_init()
471 adev->jpeg.reg_count = count; in amdgpu_jpeg_reg_dump_init()
478 kfree(adev->jpeg.ip_dump); in amdgpu_jpeg_reg_dump_fini()
479 adev->jpeg.reg_list = NULL; in amdgpu_jpeg_reg_dump_fini()
480 adev->jpeg.reg_count = 0; in amdgpu_jpeg_reg_dump_fini()
489 if (!adev->jpeg.ip_dump) in amdgpu_jpeg_dump_ip_state()
492 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in amdgpu_jpeg_dump_ip_state()
493 if (adev->jpeg.harvest_config & (1 << i)) in amdgpu_jpeg_dump_ip_state()
497 inst_off = i * adev->jpeg.reg_count; in amdgpu_jpeg_dump_ip_state()
499 adev->jpeg.ip_dump[inst_off] = in amdgpu_jpeg_dump_ip_state()
500 RREG32(SOC15_REG_ENTRY_OFFSET_INST(adev->jpeg.reg_list[0], in amdgpu_jpeg_dump_ip_state()
502 is_powered = ((adev->jpeg.ip_dump[inst_off] & 0x1) != 1); in amdgpu_jpeg_dump_ip_state()
505 for (j = 1; j < adev->jpeg.reg_count; j++) in amdgpu_jpeg_dump_ip_state()
506 adev->jpeg.ip_dump[inst_off + j] = in amdgpu_jpeg_dump_ip_state()
507 RREG32(SOC15_REG_ENTRY_OFFSET_INST(adev->jpeg.reg_list[j], in amdgpu_jpeg_dump_ip_state()
518 if (!adev->jpeg.ip_dump) in amdgpu_jpeg_print_ip_state()
521 drm_printf(p, "num_instances:%d\n", adev->jpeg.num_jpeg_inst); in amdgpu_jpeg_print_ip_state()
522 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in amdgpu_jpeg_print_ip_state()
523 if (adev->jpeg.harvest_config & (1 << i)) { in amdgpu_jpeg_print_ip_state()
528 inst_off = i * adev->jpeg.reg_count; in amdgpu_jpeg_print_ip_state()
529 is_powered = ((adev->jpeg.ip_dump[inst_off] & 0x1) != 1); in amdgpu_jpeg_print_ip_state()
533 for (j = 0; j < adev->jpeg.reg_count; j++) in amdgpu_jpeg_print_ip_state()
534 drm_printf(p, "%-50s \t 0x%08x\n", adev->jpeg.reg_list[j].reg_name, in amdgpu_jpeg_print_ip_state()
535 adev->jpeg.ip_dump[inst_off + j]); in amdgpu_jpeg_print_ip_state()