Searched refs:num_compute_rings (Results 1 – 9 of 9) sorted by relevance
198 if (adev->gfx.num_compute_rings > 1 && in amdgpu_gfx_is_high_priority_compute_queue()211 adev->gfx.num_compute_rings); in amdgpu_gfx_compute_queue_acquire()416 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_mqd_sw_init()452 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_mqd_sw_fini()478 adev->gfx.num_compute_rings)) { in amdgpu_gfx_disable_kcq()483 for (i = 0; i < adev->gfx.num_compute_rings; i++) in amdgpu_gfx_disable_kcq()536 adev->gfx.num_compute_rings + in amdgpu_gfx_enable_kcq()548 for (i = 0; i < adev->gfx.num_compute_rings; i++) in amdgpu_gfx_enable_kcq()
327 unsigned num_compute_rings; member
1312 mec_hpd_size = adev->gfx.num_compute_rings * GFX8_MEC_HPD_SIZE; in gfx_v8_0_mec_init()2050 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v8_0_sw_fini()4350 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kiq_kcq_enable()4706 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kcq_resume()4749 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_cp_test_all_rings()4817 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kcq_disable()5016 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_pre_soft_reset()5111 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_post_soft_reset()6619 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_eop_irq()6649 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_fault()[all …]
2713 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_fini()3055 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_resume()3065 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_resume()4182 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v7_0_early_init()4487 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v7_0_sw_fini()4838 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_eop_irq()4863 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_fault()5053 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v7_0_set_ring_funcs()
706 mec_hpd_size = adev->gfx.num_compute_rings * GFX11_MEC_HPD_SIZE; in gfx_v11_0_mec_init()1456 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v11_0_sw_fini()4132 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_kcq_resume()4211 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_cp_resume()4648 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_check_soft_reset()4707 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v11_0_early_init()5969 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_eop_irq()6047 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_handle_priv_fault()6299 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v11_0_set_ring_funcs()
1710 mec_hpd_size = adev->gfx.num_compute_rings * GFX9_MEC_HPD_SIZE; in gfx_v9_0_mec_init()2202 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_sw_fini()3625 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_kcq_resume()3689 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_cp_resume()4503 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v9_0_early_init()5800 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_eop_irq()5830 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_fault()6938 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_set_ring_funcs()
3032 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v6_0_early_init()3084 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v6_0_sw_init()3117 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v6_0_sw_fini()3520 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v6_0_set_ring_funcs()
4226 mec_hpd_size = adev->gfx.num_compute_rings * GFX10_MEC_HPD_SIZE; in gfx_v10_0_mec_init()4688 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v10_0_sw_fini()6911 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_kcq_resume()6980 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_cp_resume()7541 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v10_0_early_init()9044 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_eop_irq()9122 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_handle_priv_fault()9387 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v10_0_set_ring_funcs()
385 for (i = 0; i < adev->gfx.num_compute_rings; i++) in amdgpu_hw_ip_info()
Completed in 67 milliseconds