Lines Matching refs:tmp_adev

153 	struct amdgpu_device *tmp_adev = NULL;  in aldebaran_mode2_perform_reset()  local
167 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
168 mutex_lock(&tmp_adev->reset_cntl->reset_lock); in aldebaran_mode2_perform_reset()
169 tmp_adev->reset_cntl->active_reset = AMD_RESET_METHOD_MODE2; in aldebaran_mode2_perform_reset()
175 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
177 if (tmp_adev->gmc.xgmi.num_physical_nodes > 1) { in aldebaran_mode2_perform_reset()
179 &tmp_adev->reset_cntl->reset_work)) in aldebaran_mode2_perform_reset()
182 r = aldebaran_mode2_reset(tmp_adev); in aldebaran_mode2_perform_reset()
184 dev_err(tmp_adev->dev, in aldebaran_mode2_perform_reset()
186 r, adev_to_drm(tmp_adev)->unique); in aldebaran_mode2_perform_reset()
193 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
194 if (tmp_adev->gmc.xgmi.num_physical_nodes > 1) { in aldebaran_mode2_perform_reset()
195 flush_work(&tmp_adev->reset_cntl->reset_work); in aldebaran_mode2_perform_reset()
196 r = tmp_adev->asic_reset_res; in aldebaran_mode2_perform_reset()
203 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
204 mutex_unlock(&tmp_adev->reset_cntl->reset_lock); in aldebaran_mode2_perform_reset()
205 tmp_adev->reset_cntl->active_reset = AMD_RESET_METHOD_NONE; in aldebaran_mode2_perform_reset()
343 struct amdgpu_device *tmp_adev = NULL; in aldebaran_mode2_restore_hwcontext() local
357 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_restore_hwcontext()
358 amdgpu_set_init_level(tmp_adev, in aldebaran_mode2_restore_hwcontext()
360 dev_info(tmp_adev->dev, in aldebaran_mode2_restore_hwcontext()
363 amdgpu_ras_clear_err_state(tmp_adev); in aldebaran_mode2_restore_hwcontext()
364 r = aldebaran_mode2_restore_ip(tmp_adev); in aldebaran_mode2_restore_hwcontext()
372 amdgpu_register_gpu_instance(tmp_adev); in aldebaran_mode2_restore_hwcontext()
375 con = amdgpu_ras_get_context(tmp_adev); in aldebaran_mode2_restore_hwcontext()
376 if (!amdgpu_sriov_vf(tmp_adev) && con) { in aldebaran_mode2_restore_hwcontext()
377 if (tmp_adev->sdma.ras && in aldebaran_mode2_restore_hwcontext()
378 tmp_adev->sdma.ras->ras_block.ras_late_init) { in aldebaran_mode2_restore_hwcontext()
379 r = tmp_adev->sdma.ras->ras_block.ras_late_init(tmp_adev, in aldebaran_mode2_restore_hwcontext()
380 &tmp_adev->sdma.ras->ras_block.ras_comm); in aldebaran_mode2_restore_hwcontext()
382 dev_err(tmp_adev->dev, "SDMA failed to execute ras_late_init! ret:%d\n", r); in aldebaran_mode2_restore_hwcontext()
387 if (tmp_adev->gfx.ras && in aldebaran_mode2_restore_hwcontext()
388 tmp_adev->gfx.ras->ras_block.ras_late_init) { in aldebaran_mode2_restore_hwcontext()
389 r = tmp_adev->gfx.ras->ras_block.ras_late_init(tmp_adev, in aldebaran_mode2_restore_hwcontext()
390 &tmp_adev->gfx.ras->ras_block.ras_comm); in aldebaran_mode2_restore_hwcontext()
392 dev_err(tmp_adev->dev, "GFX failed to execute ras_late_init! ret:%d\n", r); in aldebaran_mode2_restore_hwcontext()
398 amdgpu_ras_resume(tmp_adev); in aldebaran_mode2_restore_hwcontext()
402 tmp_adev->gmc.xgmi.num_physical_nodes > 1) in aldebaran_mode2_restore_hwcontext()
404 tmp_adev); in aldebaran_mode2_restore_hwcontext()
407 amdgpu_set_init_level(tmp_adev, in aldebaran_mode2_restore_hwcontext()
409 amdgpu_irq_gpu_reset_resume_helper(tmp_adev); in aldebaran_mode2_restore_hwcontext()
411 r = amdgpu_ib_ring_tests(tmp_adev); in aldebaran_mode2_restore_hwcontext()
413 dev_err(tmp_adev->dev, in aldebaran_mode2_restore_hwcontext()
416 tmp_adev->asic_reset_res = r; in aldebaran_mode2_restore_hwcontext()