Lines Matching refs:tmp_adev

4539 	struct amdgpu_device *tmp_adev = NULL;  in amdgpu_do_asic_reset()  local
4544 tmp_adev = list_first_entry(device_list_handle, struct amdgpu_device, in amdgpu_do_asic_reset()
4546 r = amdgpu_reset_perform_reset(tmp_adev, reset_context); in amdgpu_do_asic_reset()
4563 list_for_each_entry(tmp_adev, device_list_handle, reset_list) { in amdgpu_do_asic_reset()
4565 if (tmp_adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_do_asic_reset()
4566 tmp_adev->gmc.xgmi.pending_reset = false; in amdgpu_do_asic_reset()
4567 if (!queue_work(system_unbound_wq, &tmp_adev->xgmi_reset_work)) in amdgpu_do_asic_reset()
4570 r = amdgpu_asic_reset(tmp_adev); in amdgpu_do_asic_reset()
4573 dev_err(tmp_adev->dev, "ASIC reset failed with error, %d for drm dev, %s", in amdgpu_do_asic_reset()
4574 r, adev_to_drm(tmp_adev)->unique); in amdgpu_do_asic_reset()
4581 list_for_each_entry(tmp_adev, device_list_handle, reset_list) { in amdgpu_do_asic_reset()
4582 if (tmp_adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_do_asic_reset()
4583 flush_work(&tmp_adev->xgmi_reset_work); in amdgpu_do_asic_reset()
4584 r = tmp_adev->asic_reset_res; in amdgpu_do_asic_reset()
4593 list_for_each_entry(tmp_adev, device_list_handle, reset_list) { in amdgpu_do_asic_reset()
4594 if (tmp_adev->mmhub.ras_funcs && in amdgpu_do_asic_reset()
4595 tmp_adev->mmhub.ras_funcs->reset_ras_error_count) in amdgpu_do_asic_reset()
4596 tmp_adev->mmhub.ras_funcs->reset_ras_error_count(tmp_adev); in amdgpu_do_asic_reset()
4602 list_for_each_entry(tmp_adev, device_list_handle, reset_list) { in amdgpu_do_asic_reset()
4605 r = amdgpu_device_asic_init(tmp_adev); in amdgpu_do_asic_reset()
4607 dev_warn(tmp_adev->dev, "asic atom init failed!"); in amdgpu_do_asic_reset()
4609 dev_info(tmp_adev->dev, "GPU reset succeeded, trying to resume\n"); in amdgpu_do_asic_reset()
4610 r = amdgpu_amdkfd_resume_iommu(tmp_adev); in amdgpu_do_asic_reset()
4614 r = amdgpu_device_ip_resume_phase1(tmp_adev); in amdgpu_do_asic_reset()
4618 vram_lost = amdgpu_device_check_vram_lost(tmp_adev); in amdgpu_do_asic_reset()
4621 amdgpu_inc_vram_lost(tmp_adev); in amdgpu_do_asic_reset()
4624 r = amdgpu_gtt_mgr_recover(ttm_manager_type(&tmp_adev->mman.bdev, TTM_PL_TT)); in amdgpu_do_asic_reset()
4628 r = amdgpu_device_fw_loading(tmp_adev); in amdgpu_do_asic_reset()
4632 r = amdgpu_device_ip_resume_phase2(tmp_adev); in amdgpu_do_asic_reset()
4637 amdgpu_device_fill_reset_magic(tmp_adev); in amdgpu_do_asic_reset()
4643 amdgpu_register_gpu_instance(tmp_adev); in amdgpu_do_asic_reset()
4646 tmp_adev->gmc.xgmi.num_physical_nodes > 1) in amdgpu_do_asic_reset()
4647 amdgpu_xgmi_add_device(tmp_adev); in amdgpu_do_asic_reset()
4649 r = amdgpu_device_ip_late_init(tmp_adev); in amdgpu_do_asic_reset()
4653 amdgpu_fbdev_set_suspend(tmp_adev, 0); in amdgpu_do_asic_reset()
4665 if (!amdgpu_ras_eeprom_check_err_threshold(tmp_adev)) { in amdgpu_do_asic_reset()
4667 amdgpu_ras_resume(tmp_adev); in amdgpu_do_asic_reset()
4675 tmp_adev->gmc.xgmi.num_physical_nodes > 1) in amdgpu_do_asic_reset()
4677 reset_context->hive, tmp_adev); in amdgpu_do_asic_reset()
4683 amdgpu_irq_gpu_reset_resume_helper(tmp_adev); in amdgpu_do_asic_reset()
4684 r = amdgpu_ib_ring_tests(tmp_adev); in amdgpu_do_asic_reset()
4686 dev_err(tmp_adev->dev, "ib ring test failed (%d).\n", r); in amdgpu_do_asic_reset()
4694 r = amdgpu_device_recover_vram(tmp_adev); in amdgpu_do_asic_reset()
4696 tmp_adev->asic_reset_res = r; in amdgpu_do_asic_reset()
4750 struct amdgpu_device *tmp_adev = NULL; in amdgpu_device_lock_hive_adev() local
4757 list_for_each_entry(tmp_adev, &hive->device_list, gmc.xgmi.head) { in amdgpu_device_lock_hive_adev()
4758 if (!amdgpu_device_lock_adev(tmp_adev, hive)) in amdgpu_device_lock_hive_adev()
4766 if (!list_is_first(&tmp_adev->gmc.xgmi.head, &hive->device_list)) { in amdgpu_device_lock_hive_adev()
4774 dev_warn(tmp_adev->dev, "Hive lock iteration broke in the middle. Rolling back to unlock"); in amdgpu_device_lock_hive_adev()
4775 list_for_each_entry_continue_reverse(tmp_adev, &hive->device_list, gmc.xgmi.head) { in amdgpu_device_lock_hive_adev()
4776 amdgpu_device_unlock_adev(tmp_adev); in amdgpu_device_lock_hive_adev()
4929 struct amdgpu_device *tmp_adev = NULL; in amdgpu_device_gpu_recover() local
5006 list_for_each_entry(tmp_adev, &hive->device_list, gmc.xgmi.head) in amdgpu_device_gpu_recover()
5007 list_add_tail(&tmp_adev->reset_list, &device_list); in amdgpu_device_gpu_recover()
5017 list_for_each_entry(tmp_adev, device_list_handle, reset_list) { in amdgpu_device_gpu_recover()
5028 if (!amdgpu_device_suspend_display_audio(tmp_adev)) in amdgpu_device_gpu_recover()
5031 amdgpu_ras_set_error_query_ready(tmp_adev, false); in amdgpu_device_gpu_recover()
5033 cancel_delayed_work_sync(&tmp_adev->delayed_init_work); in amdgpu_device_gpu_recover()
5035 if (!amdgpu_sriov_vf(tmp_adev)) in amdgpu_device_gpu_recover()
5036 amdgpu_amdkfd_pre_reset(tmp_adev); in amdgpu_device_gpu_recover()
5042 amdgpu_unregister_gpu_instance(tmp_adev); in amdgpu_device_gpu_recover()
5044 amdgpu_fbdev_set_suspend(tmp_adev, 1); in amdgpu_device_gpu_recover()
5048 amdgpu_device_ip_need_full_reset(tmp_adev)) in amdgpu_device_gpu_recover()
5049 amdgpu_ras_suspend(tmp_adev); in amdgpu_device_gpu_recover()
5052 struct amdgpu_ring *ring = tmp_adev->rings[i]; in amdgpu_device_gpu_recover()
5062 atomic_inc(&tmp_adev->gpu_reset_counter); in amdgpu_device_gpu_recover()
5082 list_for_each_entry(tmp_adev, device_list_handle, reset_list) { in amdgpu_device_gpu_recover()
5083 r = amdgpu_device_pre_asic_reset(tmp_adev, &reset_context); in amdgpu_device_gpu_recover()
5086 dev_err(tmp_adev->dev, "GPU pre asic reset failed with err, %d for drm dev, %s ", in amdgpu_device_gpu_recover()
5087 r, adev_to_drm(tmp_adev)->unique); in amdgpu_device_gpu_recover()
5088 tmp_adev->asic_reset_res = r; in amdgpu_device_gpu_recover()
5108 list_for_each_entry(tmp_adev, device_list_handle, reset_list) { in amdgpu_device_gpu_recover()
5120 tmp_adev, device_list_handle, &reset_context); in amdgpu_device_gpu_recover()
5123 struct amdgpu_ring *ring = tmp_adev->rings[i]; in amdgpu_device_gpu_recover()
5129 if (!tmp_adev->asic_reset_res && !job_signaled) in amdgpu_device_gpu_recover()
5132 drm_sched_start(&ring->sched, !tmp_adev->asic_reset_res); in amdgpu_device_gpu_recover()
5135 if (!drm_drv_uses_atomic_modeset(adev_to_drm(tmp_adev)) && !job_signaled) { in amdgpu_device_gpu_recover()
5136 drm_helper_resume_force_mode(adev_to_drm(tmp_adev)); in amdgpu_device_gpu_recover()
5139 tmp_adev->asic_reset_res = 0; in amdgpu_device_gpu_recover()
5143 dev_info(tmp_adev->dev, "GPU reset(%d) failed\n", atomic_read(&tmp_adev->gpu_reset_counter)); in amdgpu_device_gpu_recover()
5144 amdgpu_vf_error_put(tmp_adev, AMDGIM_ERROR_VF_GPU_RESET_FAIL, 0, r); in amdgpu_device_gpu_recover()
5146 dev_info(tmp_adev->dev, "GPU reset(%d) succeeded!\n", atomic_read(&tmp_adev->gpu_reset_counter)); in amdgpu_device_gpu_recover()
5147 if (amdgpu_acpi_smart_shift_update(adev_to_drm(tmp_adev), AMDGPU_SS_DEV_D0)) in amdgpu_device_gpu_recover()
5153 list_for_each_entry(tmp_adev, device_list_handle, reset_list) { in amdgpu_device_gpu_recover()
5155 if (!need_emergency_restart && !amdgpu_sriov_vf(tmp_adev)) in amdgpu_device_gpu_recover()
5156 amdgpu_amdkfd_post_reset(tmp_adev); in amdgpu_device_gpu_recover()
5165 amdgpu_device_resume_display_audio(tmp_adev); in amdgpu_device_gpu_recover()
5166 amdgpu_device_unlock_adev(tmp_adev); in amdgpu_device_gpu_recover()