Searched refs:tmp_adev (Results 1 – 8 of 8) sorted by relevance
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | aldebaran.c | 184 dev_err(tmp_adev->dev, in aldebaran_mode2_perform_reset() 196 r = tmp_adev->asic_reset_res; in aldebaran_mode2_perform_reset() 358 amdgpu_set_init_level(tmp_adev, in aldebaran_mode2_restore_hwcontext() 360 dev_info(tmp_adev->dev, in aldebaran_mode2_restore_hwcontext() 377 if (tmp_adev->sdma.ras && in aldebaran_mode2_restore_hwcontext() 379 r = tmp_adev->sdma.ras->ras_block.ras_late_init(tmp_adev, in aldebaran_mode2_restore_hwcontext() 387 if (tmp_adev->gfx.ras && in aldebaran_mode2_restore_hwcontext() 389 r = tmp_adev->gfx.ras->ras_block.ras_late_init(tmp_adev, in aldebaran_mode2_restore_hwcontext() 398 amdgpu_ras_resume(tmp_adev); in aldebaran_mode2_restore_hwcontext() 404 tmp_adev); in aldebaran_mode2_restore_hwcontext() [all …]
|
| A D | amdgpu_reset.c | 61 struct amdgpu_device *tmp_adev; in amdgpu_reset_xgmi_reset_on_init_prep_hwctxt() local 65 amdgpu_unregister_gpu_instance(tmp_adev); in amdgpu_reset_xgmi_reset_on_init_prep_hwctxt() 68 dev_err(tmp_adev->dev, in amdgpu_reset_xgmi_reset_on_init_prep_hwctxt() 82 struct amdgpu_device *tmp_adev = NULL; in amdgpu_reset_xgmi_reset_on_init_restore_hwctxt() local 89 if (!tmp_adev->kfd.init_complete) { in amdgpu_reset_xgmi_reset_on_init_restore_hwctxt() 90 kgd2kfd_init_zone_device(tmp_adev); in amdgpu_reset_xgmi_reset_on_init_restore_hwctxt() 91 amdgpu_amdkfd_device_init(tmp_adev); in amdgpu_reset_xgmi_reset_on_init_restore_hwctxt() 105 struct amdgpu_device *tmp_adev = NULL; in amdgpu_reset_xgmi_reset_on_init_perform_reset() local 112 tmp_adev->reset_cntl->active_reset = in amdgpu_reset_xgmi_reset_on_init_perform_reset() 122 dev_err(tmp_adev->dev, in amdgpu_reset_xgmi_reset_on_init_perform_reset() [all …]
|
| A D | sienna_cichlid.c | 221 struct amdgpu_device *tmp_adev = (struct amdgpu_device *)reset_ctl->handle; in sienna_cichlid_mode2_restore_hwcontext() local 223 amdgpu_set_init_level(tmp_adev, AMDGPU_INIT_LEVEL_RESET_RECOVERY); in sienna_cichlid_mode2_restore_hwcontext() 224 dev_info(tmp_adev->dev, in sienna_cichlid_mode2_restore_hwcontext() 226 r = sienna_cichlid_mode2_restore_ip(tmp_adev); in sienna_cichlid_mode2_restore_hwcontext() 234 amdgpu_register_gpu_instance(tmp_adev); in sienna_cichlid_mode2_restore_hwcontext() 237 amdgpu_ras_resume(tmp_adev); in sienna_cichlid_mode2_restore_hwcontext() 239 amdgpu_irq_gpu_reset_resume_helper(tmp_adev); in sienna_cichlid_mode2_restore_hwcontext() 241 amdgpu_set_init_level(tmp_adev, AMDGPU_INIT_LEVEL_DEFAULT); in sienna_cichlid_mode2_restore_hwcontext() 242 r = amdgpu_ib_ring_tests(tmp_adev); in sienna_cichlid_mode2_restore_hwcontext() 244 dev_err(tmp_adev->dev, in sienna_cichlid_mode2_restore_hwcontext()
|
| A D | smu_v13_0_10.c | 222 struct amdgpu_device *tmp_adev = (struct amdgpu_device *)reset_ctl->handle; in smu_v13_0_10_mode2_restore_hwcontext() local 224 amdgpu_set_init_level(tmp_adev, AMDGPU_INIT_LEVEL_RESET_RECOVERY); in smu_v13_0_10_mode2_restore_hwcontext() 225 dev_info(tmp_adev->dev, in smu_v13_0_10_mode2_restore_hwcontext() 227 r = smu_v13_0_10_mode2_restore_ip(tmp_adev); in smu_v13_0_10_mode2_restore_hwcontext() 231 amdgpu_register_gpu_instance(tmp_adev); in smu_v13_0_10_mode2_restore_hwcontext() 234 amdgpu_ras_resume(tmp_adev); in smu_v13_0_10_mode2_restore_hwcontext() 236 amdgpu_irq_gpu_reset_resume_helper(tmp_adev); in smu_v13_0_10_mode2_restore_hwcontext() 238 amdgpu_set_init_level(tmp_adev, AMDGPU_INIT_LEVEL_DEFAULT); in smu_v13_0_10_mode2_restore_hwcontext() 239 r = amdgpu_ib_ring_tests(tmp_adev); in smu_v13_0_10_mode2_restore_hwcontext() 241 dev_err(tmp_adev->dev, in smu_v13_0_10_mode2_restore_hwcontext()
|
| A D | amdgpu_xgmi.c | 944 struct amdgpu_device *tmp_adev; in amdgpu_xgmi_initialize_hive_get_data_partition() local 950 dev_err(tmp_adev->dev, in amdgpu_xgmi_initialize_hive_get_data_partition() 1044 if (tmp_adev != adev) { in amdgpu_xgmi_add_device() 1079 dev_err(tmp_adev->dev, in amdgpu_xgmi_add_device() 1081 tmp_adev->gmc.xgmi.node_id, in amdgpu_xgmi_add_device() 1102 dev_err(tmp_adev->dev, in amdgpu_xgmi_add_device() 1648 struct amdgpu_device *tmp_adev; in amdgpu_xgmi_reset_on_init_work() local 1676 dev_err(tmp_adev->dev, in amdgpu_xgmi_reset_on_init_work() 1719 struct amdgpu_device *tmp_adev; in amdgpu_xgmi_request_nps_change() local 1737 tmp_adev, req_nps_mode); in amdgpu_xgmi_request_nps_change() [all …]
|
| A D | amdgpu_device.c | 5809 struct amdgpu_device *tmp_adev; in amdgpu_device_reinit_after_reset() local 5851 tmp_adev->dev, in amdgpu_device_reinit_after_reset() 5861 tmp_adev->xcp_mgr); in amdgpu_device_reinit_after_reset() 5936 tmp_adev->asic_reset_res = r; in amdgpu_device_reinit_after_reset() 5982 dev_err(tmp_adev->dev, in amdgpu_do_asic_reset() 6155 tmp_adev->shutdown = true; in amdgpu_device_recovery_prepare() 6181 tmp_adev = in amdgpu_device_recovery_get_reset_lock() 6193 tmp_adev = in amdgpu_device_recovery_put_reset_lock() 6345 if (tmp_adev->asic_reset_res) in amdgpu_device_sched_resume() 6348 tmp_adev->asic_reset_res = 0; in amdgpu_device_sched_resume() [all …]
|
| A D | amdgpu_job.c | 55 struct amdgpu_device *tmp_adev = NULL; in amdgpu_job_core_dump() local 68 list_for_each_entry(tmp_adev, &hive->device_list, gmc.xgmi.head) in amdgpu_job_core_dump() 69 list_add_tail(&tmp_adev->reset_list, &device_list); in amdgpu_job_core_dump() 79 list_for_each_entry(tmp_adev, device_list_handle, reset_list) in amdgpu_job_core_dump() 80 amdgpu_job_do_core_dump(tmp_adev, job); in amdgpu_job_core_dump()
|
| A D | amdgpu_ras.c | 2598 struct amdgpu_device *tmp_adev; in amdgpu_ras_set_fed_all() local 2601 list_for_each_entry(tmp_adev, &hive->device_list, gmc.xgmi.head) in amdgpu_ras_set_fed_all() 2602 amdgpu_ras_set_fed(tmp_adev, status); in amdgpu_ras_set_fed_all()
|
Completed in 35 milliseconds