Home
last modified time | relevance | path

Searched refs:num_compute_rings (Results 1 – 12 of 12) sorted by relevance

/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_gfx.c196 if (adev->gfx.num_compute_rings > 1 && in amdgpu_gfx_is_high_priority_compute_queue()
209 adev->gfx.num_compute_rings); in amdgpu_gfx_compute_queue_acquire()
445 j = i + xcc_id * adev->gfx.num_compute_rings; in amdgpu_gfx_mqd_sw_init()
486 j = i + xcc_id * adev->gfx.num_compute_rings; in amdgpu_gfx_mqd_sw_fini()
526 adev->gfx.num_compute_rings)) { in amdgpu_gfx_disable_kcq()
532 j = i + xcc_id * adev->gfx.num_compute_rings; in amdgpu_gfx_disable_kcq()
644 j = i + xcc_id * adev->gfx.num_compute_rings; in amdgpu_gfx_mes_enable_kcq()
691 adev->gfx.num_compute_rings + in amdgpu_gfx_enable_kcq()
1850 if (adev->gfx.num_compute_rings) { in amdgpu_gfx_sysfs_reset_mask_init()
1867 if (adev->gfx.num_compute_rings) in amdgpu_gfx_sysfs_reset_mask_fini()
[all …]
A Dgfx_v9_4_3.c993 (ring_id + xcc_id * adev->gfx.num_compute_rings) * in gfx_v9_4_3_compute_ring_init()
2198 for (j = 0; j < adev->gfx.num_compute_rings; j++) { in gfx_v9_4_3_xcc_kcq_fini_register()
2227 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_4_3_xcc_kcq_resume()
2229 adev->gfx.num_compute_rings]; in gfx_v9_4_3_xcc_kcq_resume()
2264 for (j = 0; j < adev->gfx.num_compute_rings; j++) { in gfx_v9_4_3_xcc_cp_resume()
2266 [j + xcc_id * adev->gfx.num_compute_rings]; in gfx_v9_4_3_xcc_cp_resume()
3316 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_4_3_eop_irq()
3319 xcc_id * adev->gfx.num_compute_rings]; in gfx_v9_4_3_eop_irq()
3352 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_4_3_fault()
3355 xcc_id * adev->gfx.num_compute_rings]; in gfx_v9_4_3_fault()
[all …]
A Dgfx_v12_0.c1395 unsigned num_compute_rings; in gfx_v12_0_sw_init() local
1452 if (adev->gfx.num_compute_rings) { in gfx_v12_0_sw_init()
1454 num_compute_rings = (adev->gfx.mec.num_pipe_per_mec * in gfx_v12_0_sw_init()
1456 adev->gfx.num_compute_rings = min(adev->gfx.num_compute_rings, in gfx_v12_0_sw_init()
1457 num_compute_rings); in gfx_v12_0_sw_init()
1522 if (adev->gfx.num_compute_rings) { in gfx_v12_0_sw_init()
1633 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v12_0_sw_fini()
3433 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v12_0_kcq_resume()
3496 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v12_0_cp_resume()
3858 adev->gfx.num_compute_rings = 0; in gfx_v12_0_early_init()
[all …]
A Damdgpu_amdkfd_arcturus.c289 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in suspend_resume_compute_scheduler()
A Dgfx_v8_0.c1299 mec_hpd_size = adev->gfx.num_compute_rings * GFX8_MEC_HPD_SIZE; in gfx_v8_0_mec_init()
2037 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v8_0_sw_fini()
4335 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kiq_kcq_enable()
4671 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kcq_resume()
4698 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_cp_test_all_rings()
4766 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kcq_disable()
4973 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_pre_soft_reset()
5068 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_post_soft_reset()
6564 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_eop_irq()
6594 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_fault()
[all …]
A Dgfx_v7_0.c2721 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_fini()
3024 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_resume()
3034 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_resume()
4107 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v7_0_early_init()
4413 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v7_0_sw_fini()
4760 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_eop_irq()
4785 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_fault()
4976 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v7_0_set_ring_funcs()
A Dgfx_v11_0.c950 mec_hpd_size = adev->gfx.num_compute_rings * GFX11_MEC_HPD_SIZE; in gfx_v11_0_mec_init()
1779 if (adev->gfx.num_compute_rings) { in gfx_v11_0_sw_init()
1900 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v11_0_sw_fini()
4537 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_kcq_resume()
4615 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_cp_resume()
5122 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_check_soft_reset()
5227 adev->gfx.num_compute_rings = 0; in gfx_v11_0_early_init()
5230 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v11_0_early_init()
6459 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_eop_irq()
6623 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_handle_priv_fault()
[all …]
A Damdgpu_gfx.h454 unsigned num_compute_rings; member
A Dgfx_v6_0.c3031 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v6_0_early_init()
3083 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v6_0_sw_init()
3116 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v6_0_sw_fini()
3508 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v6_0_set_ring_funcs()
A Dgfx_v9_0.c1884 mec_hpd_size = adev->gfx.num_compute_rings * GFX9_MEC_HPD_SIZE; in gfx_v9_0_mec_init()
2465 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_sw_fini()
3926 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_kcq_resume()
3981 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_cp_resume()
4804 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v9_0_early_init()
6223 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_eop_irq()
6253 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_fault()
7605 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_set_ring_funcs()
A Dgfx_v10_0.c4431 mec_hpd_size = adev->gfx.num_compute_rings * GFX10_MEC_HPD_SIZE; in gfx_v10_0_mec_init()
5027 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v10_0_sw_fini()
7211 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_kcq_resume()
7265 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_cp_resume()
7820 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v10_0_early_init()
9213 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_eop_irq()
9375 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_handle_priv_fault()
9968 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v10_0_set_ring_funcs()
A Damdgpu_kms.c426 for (i = 0; i < adev->gfx.num_compute_rings; i++) in amdgpu_hw_ip_info()

Completed in 92 milliseconds