Lines Matching refs:ras
1019 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in amdgpu_ras_get_ecc_info() local
1026 ret = amdgpu_dpm_get_ecc_info(adev, (void *)&(ras->umc_ecc)); in amdgpu_ras_get_ecc_info()
1028 if (adev->umc.ras && adev->umc.ras->ras_block.hw_ops && in amdgpu_ras_get_ecc_info()
1029 adev->umc.ras->ras_block.hw_ops->query_ras_error_count) in amdgpu_ras_get_ecc_info()
1030 adev->umc.ras->ras_block.hw_ops->query_ras_error_count(adev, err_data); in amdgpu_ras_get_ecc_info()
1035 if (adev->umc.ras && adev->umc.ras->ras_block.hw_ops && in amdgpu_ras_get_ecc_info()
1036 adev->umc.ras->ras_block.hw_ops->query_ras_error_address) in amdgpu_ras_get_ecc_info()
1037 adev->umc.ras->ras_block.hw_ops->query_ras_error_address(adev, err_data); in amdgpu_ras_get_ecc_info()
1039 if (adev->umc.ras && in amdgpu_ras_get_ecc_info()
1040 adev->umc.ras->ecc_info_query_ras_error_count) in amdgpu_ras_get_ecc_info()
1041 adev->umc.ras->ecc_info_query_ras_error_count(adev, err_data); in amdgpu_ras_get_ecc_info()
1043 if (adev->umc.ras && in amdgpu_ras_get_ecc_info()
1044 adev->umc.ras->ecc_info_query_ras_error_address) in amdgpu_ras_get_ecc_info()
1045 adev->umc.ras->ecc_info_query_ras_error_address(adev, err_data); in amdgpu_ras_get_ecc_info()
2105 if (adev->nbio.ras && in amdgpu_ras_interrupt_fatal_error_handler()
2106 adev->nbio.ras->handle_ras_controller_intr_no_bifring) in amdgpu_ras_interrupt_fatal_error_handler()
2107 adev->nbio.ras->handle_ras_controller_intr_no_bifring(adev); in amdgpu_ras_interrupt_fatal_error_handler()
2109 if (adev->nbio.ras && in amdgpu_ras_interrupt_fatal_error_handler()
2110 adev->nbio.ras->handle_ras_err_event_athub_intr_no_bifring) in amdgpu_ras_interrupt_fatal_error_handler()
2111 adev->nbio.ras->handle_ras_err_event_athub_intr_no_bifring(adev); in amdgpu_ras_interrupt_fatal_error_handler()
2534 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in amdgpu_ras_in_recovery() local
2542 if (ras && (atomic_read(&ras->in_recovery) || hive_ras_recovery)) in amdgpu_ras_in_recovery()
2558 struct amdgpu_ras *ras = in amdgpu_ras_do_recovery() local
2561 struct amdgpu_device *adev = ras->adev; in amdgpu_ras_do_recovery()
2581 if (!ras->disable_ras_err_cnt_harvest) { in amdgpu_ras_do_recovery()
2601 if (amdgpu_device_should_recover_gpu(ras->adev)) { in amdgpu_ras_do_recovery()
2610 if (!amdgpu_ras_is_poison_mode_supported(ras->adev)) in amdgpu_ras_do_recovery()
2615 if (ras->gpu_reset_flags & AMDGPU_RAS_GPU_RESET_MODE2_RESET) { in amdgpu_ras_do_recovery()
2616 ras->gpu_reset_flags &= ~AMDGPU_RAS_GPU_RESET_MODE2_RESET; in amdgpu_ras_do_recovery()
2623 if (ras->gpu_reset_flags & AMDGPU_RAS_GPU_RESET_MODE1_RESET) { in amdgpu_ras_do_recovery()
2624 ras->gpu_reset_flags &= ~AMDGPU_RAS_GPU_RESET_MODE1_RESET; in amdgpu_ras_do_recovery()
2631 amdgpu_device_gpu_recover(ras->adev, NULL, &reset_context); in amdgpu_ras_do_recovery()
2633 atomic_set(&ras->in_recovery, 0); in amdgpu_ras_do_recovery()
2755 &adev->psp.ras_context.ras->eeprom_control; in amdgpu_ras_load_bad_pages()
2962 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in amdgpu_ras_poison_creation_handler() local
2972 ecc_log = &ras->umc_ecc_log; in amdgpu_ras_poison_creation_handler()
3011 schedule_delayed_work(&ras->page_retirement_dwork, 0); in amdgpu_ras_poison_creation_handler()
3408 adev->umc.ras && in amdgpu_ras_query_poison_mode()
3409 adev->umc.ras->query_ras_poison_mode) { in amdgpu_ras_query_poison_mode()
3413 adev->umc.ras->query_ras_poison_mode(adev); in amdgpu_ras_query_poison_mode()
3528 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in amdgpu_ras_event_mgr_init() local
3531 if (!ras) in amdgpu_ras_event_mgr_init()
3535 ras->event_mgr = hive ? &hive->event_mgr : &ras->__event_mgr; in amdgpu_ras_event_mgr_init()
3540 ras_event_mgr_init(ras->event_mgr); in amdgpu_ras_event_mgr_init()
3620 adev->nbio.ras = &nbio_v7_4_ras; in amdgpu_ras_init()
3630 adev->nbio.ras = &nbio_v4_3_ras; in amdgpu_ras_init()
3634 adev->nbio.ras = &nbio_v7_9_ras; in amdgpu_ras_init()
3647 if (adev->nbio.ras && in amdgpu_ras_init()
3648 adev->nbio.ras->init_ras_controller_interrupt) { in amdgpu_ras_init()
3649 r = adev->nbio.ras->init_ras_controller_interrupt(adev); in amdgpu_ras_init()
3654 if (adev->nbio.ras && in amdgpu_ras_init()
3655 adev->nbio.ras->init_ras_err_event_athub_interrupt) { in amdgpu_ras_init()
3656 r = adev->nbio.ras->init_ras_err_event_athub_interrupt(adev); in amdgpu_ras_init()
4005 struct amdgpu_ras *ras; in amdgpu_ras_get_fed_status() local
4007 ras = amdgpu_ras_get_context(adev); in amdgpu_ras_get_fed_status()
4008 if (!ras) in amdgpu_ras_get_fed_status()
4011 return atomic_read(&ras->fed); in amdgpu_ras_get_fed_status()
4016 struct amdgpu_ras *ras; in amdgpu_ras_set_fed() local
4018 ras = amdgpu_ras_get_context(adev); in amdgpu_ras_set_fed()
4019 if (ras) in amdgpu_ras_set_fed()
4020 atomic_set(&ras->fed, !!status); in amdgpu_ras_set_fed()
4025 struct amdgpu_ras *ras; in __get_ras_event_mgr() local
4027 ras = amdgpu_ras_get_context(adev); in __get_ras_event_mgr()
4028 if (!ras) in __get_ras_event_mgr()
4031 return ras->event_mgr; in __get_ras_event_mgr()
4094 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in amdgpu_ras_global_ras_isr() local
4107 ras->gpu_reset_flags |= AMDGPU_RAS_GPU_RESET_MODE1_RESET; in amdgpu_ras_global_ras_isr()
4244 return adev->psp.ras_context.ras; in amdgpu_ras_get_context()
4252 adev->psp.ras_context.ras = ras_con; in amdgpu_ras_set_context()
4261 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in amdgpu_ras_is_supported() local
4266 ret = ras && (adev->ras_enabled & (1 << block)); in amdgpu_ras_is_supported()
4289 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in amdgpu_ras_reset_gpu() local
4293 ras->gpu_reset_flags = 0; in amdgpu_ras_reset_gpu()
4294 ras->gpu_reset_flags |= AMDGPU_RAS_GPU_RESET_MODE1_RESET; in amdgpu_ras_reset_gpu()
4297 if (atomic_cmpxchg(&ras->in_recovery, 0, 1) == 0) in amdgpu_ras_reset_gpu()
4298 amdgpu_reset_domain_schedule(ras->adev->reset_domain, &ras->recovery_work); in amdgpu_ras_reset_gpu()