Searched refs:hive (Results 1 – 10 of 10) sorted by relevance
351 kfree(hive); in amdgpu_xgmi_hive_release()614 hive = kzalloc(sizeof(*hive), GFP_KERNEL); in amdgpu_get_xgmi_hive()615 if (!hive) { in amdgpu_get_xgmi_hive()679 if (hive) in amdgpu_get_xgmi_hive()682 return hive; in amdgpu_get_xgmi_hive()687 if (hive) in amdgpu_put_xgmi_hive()700 if (!hive) in amdgpu_xgmi_set_pstate()739 hive->pstate = hive->hi_req_count ? in amdgpu_xgmi_set_pstate()995 adev->hive = hive; in amdgpu_xgmi_add_device()1010 struct amdgpu_hive_info *hive = adev->hive; in amdgpu_xgmi_remove_device() local[all …]
56 struct amdgpu_hive_info *hive = NULL; in amdgpu_job_core_dump() local59 hive = amdgpu_get_xgmi_hive(adev); in amdgpu_job_core_dump()60 if (hive) in amdgpu_job_core_dump()61 mutex_lock(&hive->hive_lock); in amdgpu_job_core_dump()67 if (!amdgpu_sriov_vf(adev) && (adev->gmc.xgmi.num_physical_nodes > 1) && hive) { in amdgpu_job_core_dump()68 list_for_each_entry(tmp_adev, &hive->device_list, gmc.xgmi.head) in amdgpu_job_core_dump()82 if (hive) { in amdgpu_job_core_dump()83 mutex_unlock(&hive->hive_lock); in amdgpu_job_core_dump()84 amdgpu_put_xgmi_hive(hive); in amdgpu_job_core_dump()
58 void amdgpu_put_xgmi_hive(struct amdgpu_hive_info *hive);59 int amdgpu_xgmi_update_topology(struct amdgpu_hive_info *hive, struct amdgpu_device *adev);
158 reset_context->hive == NULL) { in aldebaran_mode2_perform_reset()339 reset_context->hive == NULL) { in aldebaran_mode2_restore_hwcontext()384 if (reset_context->hive && in aldebaran_mode2_restore_hwcontext()386 r = amdgpu_xgmi_update_topology(reset_context->hive, in aldebaran_mode2_restore_hwcontext()
2910 if (WARN_ON(!hive)) { in amdgpu_device_ip_init()2915 if (!hive->reset_domain || in amdgpu_device_ip_init()3836 if (WARN_ON(!hive)) in amdgpu_device_xgmi_reset_func()3870 amdgpu_put_xgmi_hive(hive); in amdgpu_device_xgmi_reset_func()5082 if (hive) in amdgpu_device_reset_sriov()5083 amdgpu_put_xgmi_hive(hive); in amdgpu_device_reset_sriov()5472 if (reset_context->hive && in amdgpu_do_asic_reset()5671 if (hive) in amdgpu_device_gpu_recover()5675 reset_context->hive = hive; in amdgpu_device_gpu_recover()5880 if (hive) { in amdgpu_device_gpu_recover()[all …]
2523 if (hive) { in amdgpu_ras_set_fed_all()2537 if (hive) { in amdgpu_ras_in_recovery()2539 amdgpu_put_xgmi_hive(hive); in amdgpu_ras_in_recovery()2566 if (hive) { in amdgpu_ras_do_recovery()2634 if (hive) { in amdgpu_ras_do_recovery()2636 amdgpu_put_xgmi_hive(hive); in amdgpu_ras_do_recovery()3529 struct amdgpu_hive_info *hive; in amdgpu_ras_event_mgr_init() local3534 hive = amdgpu_get_xgmi_hive(adev); in amdgpu_ras_event_mgr_init()3535 ras->event_mgr = hive ? &hive->event_mgr : &ras->__event_mgr; in amdgpu_ras_event_mgr_init()3543 if (hive) in amdgpu_ras_event_mgr_init()[all …]
52 struct amdgpu_hive_info *hive; member
1376 struct amdgpu_hive_info *hive; in psp_xgmi_reflect_topology_info() local1382 hive = amdgpu_get_xgmi_hive(psp->adev); in psp_xgmi_reflect_topology_info()1383 if (WARN_ON(!hive)) in psp_xgmi_reflect_topology_info()1386 list_for_each_entry(mirror_adev, &hive->device_list, gmc.xgmi.head) { in psp_xgmi_reflect_topology_info()1413 amdgpu_put_xgmi_hive(hive); in psp_xgmi_reflect_topology_info()
842 struct amdgpu_hive_info *hive; member
40 ((adev)->hive ? (void *)(adev)->hive : (void *)(adev))
Completed in 47 milliseconds