| /linux/drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_umc.c | 109 if (adev->umc.ras && adev->umc.ras->ras_block.hw_ops && in amdgpu_umc_handle_bad_pages() 113 if (adev->umc.ras && adev->umc.ras->ras_block.hw_ops && in amdgpu_umc_handle_bad_pages() 136 if (adev->umc.ras && in amdgpu_umc_handle_bad_pages() 140 if (adev->umc.ras && in amdgpu_umc_handle_bad_pages() 286 struct amdgpu_umc_ras *ras; in amdgpu_umc_ras_sw_init() local 288 if (!adev->umc.ras) in amdgpu_umc_ras_sw_init() 291 ras = adev->umc.ras; in amdgpu_umc_ras_sw_init() 304 if (!ras->ras_block.ras_late_init) in amdgpu_umc_ras_sw_init() 307 if (!ras->ras_block.ras_cb) in amdgpu_umc_ras_sw_init() 328 if (adev->umc.ras && in amdgpu_umc_ras_late_init() [all …]
|
| A D | amdgpu_hdp.c | 29 struct amdgpu_hdp_ras *ras; in amdgpu_hdp_ras_sw_init() local 31 if (!adev->hdp.ras) in amdgpu_hdp_ras_sw_init() 34 ras = adev->hdp.ras; in amdgpu_hdp_ras_sw_init() 35 err = amdgpu_ras_register_ras_block(adev, &ras->ras_block); in amdgpu_hdp_ras_sw_init() 41 strcpy(ras->ras_block.ras_comm.name, "hdp"); in amdgpu_hdp_ras_sw_init() 42 ras->ras_block.ras_comm.block = AMDGPU_RAS_BLOCK__HDP; in amdgpu_hdp_ras_sw_init() 43 ras->ras_block.ras_comm.type = AMDGPU_RAS_ERROR__MULTI_UNCORRECTABLE; in amdgpu_hdp_ras_sw_init() 44 adev->hdp.ras_if = &ras->ras_block.ras_comm; in amdgpu_hdp_ras_sw_init()
|
| A D | amdgpu_mmhub.c | 27 struct amdgpu_mmhub_ras *ras; in amdgpu_mmhub_ras_sw_init() local 29 if (!adev->mmhub.ras) in amdgpu_mmhub_ras_sw_init() 32 ras = adev->mmhub.ras; in amdgpu_mmhub_ras_sw_init() 33 err = amdgpu_ras_register_ras_block(adev, &ras->ras_block); in amdgpu_mmhub_ras_sw_init() 39 strcpy(ras->ras_block.ras_comm.name, "mmhub"); in amdgpu_mmhub_ras_sw_init() 40 ras->ras_block.ras_comm.block = AMDGPU_RAS_BLOCK__MMHUB; in amdgpu_mmhub_ras_sw_init() 41 ras->ras_block.ras_comm.type = AMDGPU_RAS_ERROR__MULTI_UNCORRECTABLE; in amdgpu_mmhub_ras_sw_init() 42 adev->mmhub.ras_if = &ras->ras_block.ras_comm; in amdgpu_mmhub_ras_sw_init()
|
| A D | amdgpu_nbio.c | 28 struct amdgpu_nbio_ras *ras; in amdgpu_nbio_ras_sw_init() local 30 if (!adev->nbio.ras) in amdgpu_nbio_ras_sw_init() 33 ras = adev->nbio.ras; in amdgpu_nbio_ras_sw_init() 34 err = amdgpu_ras_register_ras_block(adev, &ras->ras_block); in amdgpu_nbio_ras_sw_init() 40 strcpy(ras->ras_block.ras_comm.name, "pcie_bif"); in amdgpu_nbio_ras_sw_init() 41 ras->ras_block.ras_comm.block = AMDGPU_RAS_BLOCK__PCIE_BIF; in amdgpu_nbio_ras_sw_init() 42 ras->ras_block.ras_comm.type = AMDGPU_RAS_ERROR__MULTI_UNCORRECTABLE; in amdgpu_nbio_ras_sw_init() 43 adev->nbio.ras_if = &ras->ras_block.ras_comm; in amdgpu_nbio_ras_sw_init()
|
| A D | amdgpu_mca.c | 33 if (adev->umc.ras->check_ecc_err_status) in amdgpu_mca_is_deferred_error() 34 return adev->umc.ras->check_ecc_err_status(adev, in amdgpu_mca_is_deferred_error() 87 struct amdgpu_mca_ras_block *ras; in amdgpu_mca_mp0_ras_sw_init() local 89 if (!adev->mca.mp0.ras) in amdgpu_mca_mp0_ras_sw_init() 92 ras = adev->mca.mp0.ras; in amdgpu_mca_mp0_ras_sw_init() 111 struct amdgpu_mca_ras_block *ras; in amdgpu_mca_mp1_ras_sw_init() local 113 if (!adev->mca.mp1.ras) in amdgpu_mca_mp1_ras_sw_init() 116 ras = adev->mca.mp1.ras; in amdgpu_mca_mp1_ras_sw_init() 135 struct amdgpu_mca_ras_block *ras; in amdgpu_mca_mpio_ras_sw_init() local 137 if (!adev->mca.mpio.ras) in amdgpu_mca_mpio_ras_sw_init() [all …]
|
| A D | amdgpu_sdma.c | 315 struct amdgpu_sdma_ras *ras = NULL; in amdgpu_sdma_ras_sw_init() local 320 if (!adev->sdma.ras) in amdgpu_sdma_ras_sw_init() 323 ras = adev->sdma.ras; in amdgpu_sdma_ras_sw_init() 325 err = amdgpu_ras_register_ras_block(adev, &ras->ras_block); in amdgpu_sdma_ras_sw_init() 331 strcpy(ras->ras_block.ras_comm.name, "sdma"); in amdgpu_sdma_ras_sw_init() 332 ras->ras_block.ras_comm.block = AMDGPU_RAS_BLOCK__SDMA; in amdgpu_sdma_ras_sw_init() 334 adev->sdma.ras_if = &ras->ras_block.ras_comm; in amdgpu_sdma_ras_sw_init() 337 if (!ras->ras_block.ras_late_init) in amdgpu_sdma_ras_sw_init() 338 ras->ras_block.ras_late_init = amdgpu_sdma_ras_late_init; in amdgpu_sdma_ras_sw_init() 341 if (!ras->ras_block.ras_cb) in amdgpu_sdma_ras_sw_init() [all …]
|
| A D | amdgpu_jpeg.c | 310 struct amdgpu_jpeg_ras *ras; in amdgpu_jpeg_ras_sw_init() local 312 if (!adev->jpeg.ras) in amdgpu_jpeg_ras_sw_init() 315 ras = adev->jpeg.ras; in amdgpu_jpeg_ras_sw_init() 316 err = amdgpu_ras_register_ras_block(adev, &ras->ras_block); in amdgpu_jpeg_ras_sw_init() 322 strcpy(ras->ras_block.ras_comm.name, "jpeg"); in amdgpu_jpeg_ras_sw_init() 323 ras->ras_block.ras_comm.block = AMDGPU_RAS_BLOCK__JPEG; in amdgpu_jpeg_ras_sw_init() 324 ras->ras_block.ras_comm.type = AMDGPU_RAS_ERROR__POISON; in amdgpu_jpeg_ras_sw_init() 325 adev->jpeg.ras_if = &ras->ras_block.ras_comm; in amdgpu_jpeg_ras_sw_init() 327 if (!ras->ras_block.ras_late_init) in amdgpu_jpeg_ras_sw_init() 328 ras->ras_block.ras_late_init = amdgpu_jpeg_ras_late_init; in amdgpu_jpeg_ras_sw_init()
|
| A D | amdgpu_ras_eeprom.c | 754 ras->is_rma = true; in amdgpu_ras_eeprom_update_header() 804 ras->bad_page_cnt_threshold; in amdgpu_ras_eeprom_update_header() 1029 struct amdgpu_ras_eeprom_control *control = ras ? &ras->eeprom_control : NULL; in amdgpu_ras_debugfs_eeprom_size_read() 1036 if (!ras || !control) { in amdgpu_ras_debugfs_eeprom_size_read() 1087 struct dentry *de = ras->de_ras_eeprom_table; in amdgpu_ras_debugfs_set_ret_size() 1213 struct amdgpu_ras_eeprom_control *control = ras ? &ras->eeprom_control : NULL; in amdgpu_ras_debugfs_eeprom_table_read() 1220 if (!ras || !control) { in amdgpu_ras_debugfs_eeprom_table_read() 1338 ras->is_rma = false; in amdgpu_ras_eeprom_init() 1396 ras->bad_page_cnt_threshold); in amdgpu_ras_eeprom_init() 1420 ras->bad_page_cnt_threshold); in amdgpu_ras_eeprom_init() [all …]
|
| A D | amdgpu_ras.c | 1028 if (adev->umc.ras && adev->umc.ras->ras_block.hw_ops && in amdgpu_ras_get_ecc_info() 1035 if (adev->umc.ras && adev->umc.ras->ras_block.hw_ops && in amdgpu_ras_get_ecc_info() 1039 if (adev->umc.ras && in amdgpu_ras_get_ecc_info() 1043 if (adev->umc.ras && in amdgpu_ras_get_ecc_info() 2542 if (ras && (atomic_read(&ras->in_recovery) || hive_ras_recovery)) in amdgpu_ras_in_recovery() 3408 adev->umc.ras && in amdgpu_ras_query_poison_mode() 3531 if (!ras) in amdgpu_ras_event_mgr_init() 3535 ras->event_mgr = hive ? &hive->event_mgr : &ras->__event_mgr; in amdgpu_ras_event_mgr_init() 4008 if (!ras) in amdgpu_ras_get_fed_status() 4019 if (ras) in amdgpu_ras_set_fed() [all …]
|
| A D | aldebaran.c | 360 if (tmp_adev->sdma.ras && in aldebaran_mode2_restore_hwcontext() 361 tmp_adev->sdma.ras->ras_block.ras_late_init) { in aldebaran_mode2_restore_hwcontext() 362 r = tmp_adev->sdma.ras->ras_block.ras_late_init(tmp_adev, in aldebaran_mode2_restore_hwcontext() 363 &tmp_adev->sdma.ras->ras_block.ras_comm); in aldebaran_mode2_restore_hwcontext() 370 if (tmp_adev->gfx.ras && in aldebaran_mode2_restore_hwcontext() 371 tmp_adev->gfx.ras->ras_block.ras_late_init) { in aldebaran_mode2_restore_hwcontext() 372 r = tmp_adev->gfx.ras->ras_block.ras_late_init(tmp_adev, in aldebaran_mode2_restore_hwcontext() 373 &tmp_adev->gfx.ras->ras_block.ras_comm); in aldebaran_mode2_restore_hwcontext()
|
| A D | umc_v6_7.c | 101 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v6_7_ecc_info_query_correctable_error_count() local 109 mc_umc_status = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_status; in umc_v6_7_ecc_info_query_correctable_error_count() 116 if (ras->umc_ecc.record_ce_addr_supported) { in umc_v6_7_ecc_info_query_correctable_error_count() 121 err_addr = ras->umc_ecc.ecc[eccinfo_table_idx].mca_ceumc_addr; in umc_v6_7_ecc_info_query_correctable_error_count() 143 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v6_7_ecc_info_querry_uncorrectable_error_count() local 150 mc_umc_status = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_status; in umc_v6_7_ecc_info_querry_uncorrectable_error_count() 228 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v6_7_ecc_info_query_error_address() local 232 mc_umc_status = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_status; in umc_v6_7_ecc_info_query_error_address() 244 err_addr = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_addr; in umc_v6_7_ecc_info_query_error_address()
|
| A D | umc_v8_10.c | 341 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v8_10_ecc_info_query_correctable_error_count() local 349 ecc_ce_cnt = ras->umc_ecc.ecc[eccinfo_table_idx].ce_count_lo_chip; in umc_v8_10_ecc_info_query_correctable_error_count() 360 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v8_10_ecc_info_query_uncorrectable_error_count() local 368 mc_umc_status = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_status; in umc_v8_10_ecc_info_query_uncorrectable_error_count() 408 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v8_10_ecc_info_query_error_address() local 415 mc_umc_status = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_status; in umc_v8_10_ecc_info_query_error_address() 428 err_addr = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_addr; in umc_v8_10_ecc_info_query_error_address()
|
| A D | umc_v8_7.c | 56 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v8_7_ecc_info_query_correctable_error_count() local 63 mc_umc_status = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_status; in umc_v8_7_ecc_info_query_correctable_error_count() 75 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v8_7_ecc_info_querry_uncorrectable_error_count() local 80 mc_umc_status = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_status; in umc_v8_7_ecc_info_querry_uncorrectable_error_count() 137 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v8_7_ecc_info_query_error_address() local 140 mc_umc_status = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_status; in umc_v8_7_ecc_info_query_error_address() 152 err_addr = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_addr; in umc_v8_7_ecc_info_query_error_address()
|
| A D | amdgpu_gfx.c | 916 struct amdgpu_gfx_ras *ras = NULL; in amdgpu_gfx_ras_sw_init() local 921 if (!adev->gfx.ras) in amdgpu_gfx_ras_sw_init() 924 ras = adev->gfx.ras; in amdgpu_gfx_ras_sw_init() 932 strcpy(ras->ras_block.ras_comm.name, "gfx"); in amdgpu_gfx_ras_sw_init() 933 ras->ras_block.ras_comm.block = AMDGPU_RAS_BLOCK__GFX; in amdgpu_gfx_ras_sw_init() 935 adev->gfx.ras_if = &ras->ras_block.ras_comm; in amdgpu_gfx_ras_sw_init() 938 if (!ras->ras_block.ras_late_init) in amdgpu_gfx_ras_sw_init() 942 if (!ras->ras_block.ras_cb) in amdgpu_gfx_ras_sw_init() 943 ras->ras_block.ras_cb = amdgpu_gfx_process_ras_data_cb; in amdgpu_gfx_ras_sw_init() 951 if (adev->gfx.ras && adev->gfx.ras->poison_consumption_handler) in amdgpu_gfx_poison_consumption_handler() [all …]
|
| A D | gfx_v11_0_3.c | 95 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in gfx_v11_0_3_poison_consumption_handler() local 97 ras->gpu_reset_flags |= AMDGPU_RAS_GPU_RESET_MODE2_RESET; in gfx_v11_0_3_poison_consumption_handler()
|
| A D | amdgpu_vcn.c | 1243 struct amdgpu_vcn_ras *ras; in amdgpu_vcn_ras_sw_init() local 1245 if (!adev->vcn.ras) in amdgpu_vcn_ras_sw_init() 1248 ras = adev->vcn.ras; in amdgpu_vcn_ras_sw_init() 1249 err = amdgpu_ras_register_ras_block(adev, &ras->ras_block); in amdgpu_vcn_ras_sw_init() 1255 strcpy(ras->ras_block.ras_comm.name, "vcn"); in amdgpu_vcn_ras_sw_init() 1256 ras->ras_block.ras_comm.block = AMDGPU_RAS_BLOCK__VCN; in amdgpu_vcn_ras_sw_init() 1257 ras->ras_block.ras_comm.type = AMDGPU_RAS_ERROR__POISON; in amdgpu_vcn_ras_sw_init() 1258 adev->vcn.ras_if = &ras->ras_block.ras_comm; in amdgpu_vcn_ras_sw_init() 1260 if (!ras->ras_block.ras_late_init) in amdgpu_vcn_ras_sw_init() 1261 ras->ras_block.ras_late_init = amdgpu_vcn_ras_late_init; in amdgpu_vcn_ras_sw_init()
|
| A D | soc15.c | 485 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in soc15_asic_baco_reset() local 489 if (ras && adev->ras_enabled) in soc15_asic_baco_reset() 497 if (ras && adev->ras_enabled) in soc15_asic_baco_reset() 508 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in soc15_asic_reset_method() local 542 if (ras && adev->ras_enabled && in soc15_asic_reset_method() 1301 if (adev->nbio.ras && in soc15_common_hw_fini() 1302 adev->nbio.ras->init_ras_controller_interrupt) in soc15_common_hw_fini() 1304 if (adev->nbio.ras && in soc15_common_hw_fini() 1305 adev->nbio.ras->init_ras_err_event_athub_interrupt) in soc15_common_hw_fini()
|
| A D | amdgpu_hdp.h | 43 struct amdgpu_hdp_ras *ras; member
|
| /linux/drivers/gpu/drm/amd/pm/powerplay/hwmgr/ |
| A D | vega20_baco.c | 75 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in vega20_baco_set_state() local 86 if (!ras || !adev->ras_enabled) { in vega20_baco_set_state()
|
| /linux/drivers/edac/ |
| A D | i5000_edac.c | 471 int ras, cas; in i5000_process_fatal_error_info() local 484 ras = NREC_RAS(info->nrecmemb); in i5000_process_fatal_error_info() 489 rdwr ? "Write" : "Read", ras, cas); in i5000_process_fatal_error_info() 525 bank, ras, cas, allErrors, specific); in i5000_process_fatal_error_info() 556 int ras, cas; in i5000_process_nonfatal_error_info() local 579 ras = NREC_RAS(info->nrecmemb); in i5000_process_nonfatal_error_info() 584 rdwr ? "Write" : "Read", ras, cas); in i5000_process_nonfatal_error_info() 624 rank, bank, ras, cas, ue_errors, specific); in i5000_process_nonfatal_error_info() 651 ras = REC_RAS(info->recmemb); in i5000_process_nonfatal_error_info() 656 rdwr ? "Write" : "Read", ras, cas); in i5000_process_nonfatal_error_info() [all …]
|
| A D | i5100_edac.c | 433 unsigned ras, in i5100_handle_ce() argument 441 bank, cas, ras); in i5100_handle_ce() 455 unsigned ras, in i5100_handle_ue() argument 463 bank, cas, ras); in i5100_handle_ue() 483 unsigned ras; in i5100_read_log() local 503 ras = i5100_recmemb_ras(dw2); in i5100_read_log() 512 i5100_handle_ce(mci, chan, bank, rank, syndrome, cas, ras, msg); in i5100_read_log() 525 ras = i5100_nrecmemb_ras(dw2); in i5100_read_log() 534 i5100_handle_ue(mci, chan, bank, rank, syndrome, cas, ras, msg); in i5100_read_log()
|
| A D | i5400_edac.c | 523 int ras, cas; in i5400_proccess_non_recoverable_info() local 549 ras = nrec_ras(info); in i5400_proccess_non_recoverable_info() 554 buf_id, rdwr_str(rdwr), ras, cas); in i5400_proccess_non_recoverable_info() 562 bank, buf_id, ras, cas, allErrors, error_name[errnum]); in i5400_proccess_non_recoverable_info() 587 int ras, cas; in i5400_process_nonfatal_error_info() local 619 ras = rec_ras(info); in i5400_process_nonfatal_error_info() 627 rdwr_str(rdwr), ras, cas); in i5400_process_nonfatal_error_info() 633 branch >> 1, bank, rdwr_str(rdwr), ras, cas, in i5400_process_nonfatal_error_info()
|
| /linux/net/netfilter/ |
| A D | nf_conntrack_h323_main.c | 1627 switch (ras->choice) { in process_ras() 1630 &ras->gatekeeperRequest); in process_ras() 1633 &ras->gatekeeperConfirm); in process_ras() 1636 &ras->registrationRequest); in process_ras() 1639 &ras->registrationConfirm); in process_ras() 1645 &ras->admissionRequest); in process_ras() 1648 &ras->admissionConfirm); in process_ras() 1651 &ras->locationRequest); in process_ras() 1654 &ras->locationConfirm); in process_ras() 1657 &ras->infoRequestResponse); in process_ras() [all …]
|
| /linux/drivers/ras/ |
| A D | Makefile | 2 obj-$(CONFIG_RAS) += ras.o
|
| A D | Kconfig | 34 source "arch/x86/ras/Kconfig" 35 source "drivers/ras/amd/atl/Kconfig"
|