Home
last modified time | relevance | path

Searched refs:num_jpeg_inst (Results 1 – 14 of 14) sorted by relevance

/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_jpeg.c50 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in amdgpu_jpeg_sw_init()
76 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_jpeg_sw_fini()
116 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_jpeg_idle_work_handler()
294 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_jpeg_ras_late_init()
368 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_debugfs_jpeg_sched_mask_set()
391 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_debugfs_jpeg_sched_mask_get()
443 if (adev->jpeg.num_jpeg_inst) { in amdgpu_jpeg_sysfs_reset_mask_init()
455 if (adev->jpeg.num_jpeg_inst) in amdgpu_jpeg_sysfs_reset_mask_fini()
463 adev->jpeg.ip_dump = kcalloc(adev->jpeg.num_jpeg_inst * count, in amdgpu_jpeg_reg_dump_init()
492 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in amdgpu_jpeg_dump_ip_state()
[all …]
A Djpeg_v2_5.c81 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in jpeg_v2_5_early_init()
110 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v2_5_sw_init()
141 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v2_5_sw_init()
214 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v2_5_hw_init()
244 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v2_5_hw_fini()
390 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v2_5_start()
426 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v2_5_stop()
526 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v2_5_is_idle()
543 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v2_5_wait_for_idle()
564 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v2_5_set_clockgating_state()
[all …]
A Djpeg_v4_0_5.c91 adev->jpeg.num_jpeg_inst = 1; in jpeg_v4_0_5_early_init()
94 adev->jpeg.num_jpeg_inst = 2; in jpeg_v4_0_5_early_init()
124 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v4_0_5_sw_init()
155 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v4_0_5_sw_init()
227 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v4_0_5_hw_init()
254 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v4_0_5_hw_fini()
509 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v4_0_5_start()
575 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v4_0_5_stop()
657 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v4_0_5_is_idle()
673 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v4_0_5_wait_for_idle()
[all …]
A Djpeg_v5_0_1.c118 if (!adev->jpeg.num_jpeg_inst || adev->jpeg.num_jpeg_inst > AMDGPU_MAX_JPEG_INSTANCES) in jpeg_v5_0_1_early_init()
169 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v5_0_1_sw_init()
252 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v5_0_1_hw_init()
266 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v5_0_1_hw_init()
465 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in jpeg_v5_0_1_start_sriov()
566 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v5_0_1_start()
588 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) in jpeg_v5_0_1_stop()
655 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v5_0_1_is_idle()
675 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v5_0_1_wait_for_idle()
763 if (inst >= adev->jpeg.num_jpeg_inst) { in jpeg_v5_0_1_process_interrupt()
[all …]
A Djpeg_v4_0_3.c172 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v4_0_3_sw_init()
275 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in jpeg_v4_0_3_start_sriov()
381 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v4_0_3_hw_init()
396 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v4_0_3_hw_init()
624 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v4_0_3_start()
663 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) in jpeg_v4_0_3_stop()
976 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v4_0_3_is_idle()
994 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in jpeg_v4_0_3_wait_for_idle()
1078 if (inst >= adev->jpeg.num_jpeg_inst) { in jpeg_v4_0_3_process_interrupt()
1313 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) in jpeg_v4_0_3_query_ras_error_count()
[all …]
A Damdgpu_jpeg.h125 uint8_t num_jpeg_inst; member
A Djpeg_v4_0.c75 adev->jpeg.num_jpeg_inst = 1; in jpeg_v4_0_early_init()
850 for (inst = 0; inst < adev->jpeg.num_jpeg_inst; inst++) in jpeg_v4_0_query_ras_poison_status()
A Daqua_vanjaram.c287 max_res[AMDGPU_XCP_RES_JPEG] = adev->jpeg.num_jpeg_inst; in aqua_vanjaram_get_xcp_res_info()
563 adev->jpeg.num_jpeg_inst = hweight32(adev->jpeg.inst_mask); in aqua_vanjaram_init_soc_config()
A Djpeg_v3_0.c82 adev->jpeg.num_jpeg_inst = 1; in jpeg_v3_0_early_init()
A Djpeg_v5_0_0.c69 adev->jpeg.num_jpeg_inst = 1; in jpeg_v5_0_0_early_init()
A Djpeg_v1_0.c469 adev->jpeg.num_jpeg_inst = 1; in jpeg_v1_0_early_init()
A Djpeg_v2_0.c68 adev->jpeg.num_jpeg_inst = 1; in jpeg_v2_0_early_init()
A Damdgpu_kms.c521 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in amdgpu_hw_ip_info()
716 count = adev->jpeg.num_jpeg_inst * adev->jpeg.num_jpeg_rings; in amdgpu_info_ioctl()
/drivers/gpu/drm/amd/pm/swsmu/smu14/
A Dsmu_v14_0.c1603 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in smu_v14_0_set_jpeg_enable()

Completed in 30 milliseconds