| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_umc.c | 59 err_data.err_addr = in amdgpu_umc_page_retirement_mca() 116 err_data->err_addr = in amdgpu_umc_handle_bad_pages() 164 if (err_data->ue_count || err_data->de_count) { in amdgpu_umc_handle_bad_pages() 165 err_count = err_data->ue_count + err_data->de_count; in amdgpu_umc_handle_bad_pages() 199 if ((err_data->ue_count || err_data->de_count) && in amdgpu_umc_do_page_retirement() 241 obj->err_data.ue_count += err_data.ue_count; in amdgpu_umc_pasid_poison_handler() 242 obj->err_data.ce_count += err_data.ce_count; in amdgpu_umc_pasid_poison_handler() 243 obj->err_data.de_count += err_data.de_count; in amdgpu_umc_pasid_poison_handler() 369 if (!err_data || in amdgpu_umc_fill_error_record() 371 (err_data->err_addr_cnt >= err_data->err_addr_len)) in amdgpu_umc_fill_error_record() [all …]
|
| A D | amdgpu_ras.c | 1232 new_ce = err_data->ce_count - obj->err_data.ce_count; in amdgpu_ras_virt_error_generate_report() 1275 obj->err_data.ue_count += err_data->ue_count; in amdgpu_rasmgr_error_data_statistic_update() 1276 obj->err_data.ce_count += err_data->ce_count; in amdgpu_rasmgr_error_data_statistic_update() 1277 obj->err_data.de_count += err_data->de_count; in amdgpu_rasmgr_error_data_statistic_update() 1285 obj->err_data.ue_count = err_data->ue_count; in amdgpu_ras_mgr_virt_error_data_statistics_update() 1286 obj->err_data.ce_count = err_data->ce_count; in amdgpu_ras_mgr_virt_error_data_statistics_update() 1287 obj->err_data.de_count = err_data->de_count; in amdgpu_ras_mgr_virt_error_data_statistics_update() 2298 obj->err_data.ue_count += err_data.ue_count; in amdgpu_ras_interrupt_umc_handler() 2299 obj->err_data.ce_count += err_data.ce_count; in amdgpu_ras_interrupt_umc_handler() 2300 obj->err_data.de_count += err_data.de_count; in amdgpu_ras_interrupt_umc_handler() [all …]
|
| A D | umc_v8_7.c | 104 &(err_data->ce_count)); in umc_v8_7_ecc_info_query_ras_error_count() 107 &(err_data->ue_count)); in umc_v8_7_ecc_info_query_ras_error_count() 126 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v8_7_convert_error_address() 131 struct ras_err_data *err_data, in umc_v8_7_ecc_info_query_error_address() argument 145 if (!err_data->err_addr) in umc_v8_7_ecc_info_query_error_address() 174 err_data, in umc_v8_7_ecc_info_query_ras_error_address() 317 &(err_data->ce_count)); in umc_v8_7_query_ras_error_count() 320 &(err_data->ue_count)); in umc_v8_7_query_ras_error_count() 327 struct ras_err_data *err_data, in umc_v8_7_query_error_address() argument 344 if (!err_data->err_addr) { in umc_v8_7_query_error_address() [all …]
|
| A D | umc_v8_10.c | 147 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v8_10_query_ecc_error_count() local 153 &(err_data->ce_count)); in umc_v8_10_query_ecc_error_count() 156 &(err_data->ue_count)); in umc_v8_10_query_ecc_error_count() 206 struct ras_err_data *err_data, uint64_t err_addr, in umc_v8_10_convert_error_address() argument 239 amdgpu_umc_fill_error_record(err_data, na_err_addr, in umc_v8_10_convert_error_address() 262 if (!err_data->err_addr) { in umc_v8_10_query_error_address() 277 umc_v8_10_convert_error_address(adev, err_data, err_addr, in umc_v8_10_query_error_address() 387 &(err_data->ce_count)); in umc_v8_10_ecc_info_query_ecc_error_count() 390 &(err_data->ue_count)); in umc_v8_10_ecc_info_query_ecc_error_count() 420 if (!err_data->err_addr) in umc_v8_10_ecc_info_query_error_address() [all …]
|
| A D | umc_v6_7.c | 171 &(err_data->ce_count)); in umc_v6_7_ecc_info_querry_ecc_error_count() 175 &(err_data->ue_count)); in umc_v6_7_ecc_info_querry_ecc_error_count() 188 struct ras_err_data *err_data, uint64_t err_addr, in umc_v6_7_convert_error_address() argument 211 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v6_7_convert_error_address() 217 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v6_7_convert_error_address() 237 if (!err_data->err_addr) in umc_v6_7_ecc_info_query_error_address() 247 umc_v6_7_convert_error_address(adev, err_data, err_addr, in umc_v6_7_ecc_info_query_error_address() 422 &(err_data->ce_count), in umc_v6_7_query_ecc_error_count() 427 &(err_data->ue_count)); in umc_v6_7_query_ecc_error_count() 461 if (!err_data->err_addr) { in umc_v6_7_query_error_address() [all …]
|
| A D | nbio_v7_4.c | 363 struct ras_err_data err_data; in nbio_v7_4_handle_ras_controller_intr_no_bifring() local 366 if (amdgpu_ras_error_data_init(&err_data)) in nbio_v7_4_handle_ras_controller_intr_no_bifring() 394 obj->err_data.ue_count += err_data.ue_count; in nbio_v7_4_handle_ras_controller_intr_no_bifring() 395 obj->err_data.ce_count += err_data.ce_count; in nbio_v7_4_handle_ras_controller_intr_no_bifring() 397 if (err_data.ce_count) in nbio_v7_4_handle_ras_controller_intr_no_bifring() 400 obj->err_data.ce_count, in nbio_v7_4_handle_ras_controller_intr_no_bifring() 403 if (err_data.ue_count) in nbio_v7_4_handle_ras_controller_intr_no_bifring() 406 obj->err_data.ue_count, in nbio_v7_4_handle_ras_controller_intr_no_bifring() 419 amdgpu_ras_error_data_fini(&err_data); in nbio_v7_4_handle_ras_controller_intr_no_bifring() 609 err_data->ce_count++; in nbio_v7_4_query_ras_error_count() [all …]
|
| A D | umc_v12_0.c | 141 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v12_0_query_error_count() local 161 amdgpu_ras_error_statistic_ue_count(err_data, &mcm_info, ue_count); in umc_v12_0_query_error_count() 236 struct ras_err_data *err_data, in umc_v12_0_convert_error_address() argument 297 if (!err_data && !dump_addr) in umc_v12_0_convert_error_address() 317 if (err_data) in umc_v12_0_convert_error_address() 318 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v12_0_convert_error_address() 330 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v12_0_query_error_address() local 346 if (!err_data->err_addr) { in umc_v12_0_query_error_address() 635 if (!err_data || !ecc_err) in umc_v12_0_fill_error_record() 644 ret = amdgpu_umc_fill_error_record(err_data, in umc_v12_0_fill_error_record() [all …]
|
| A D | nbio_v7_9.c | 520 struct ras_err_data err_data; in nbio_v7_9_handle_ras_controller_intr_no_bifring() local 523 if (amdgpu_ras_error_data_init(&err_data)) in nbio_v7_9_handle_ras_controller_intr_no_bifring() 542 nbio_v7_9_query_ras_error_count(adev, &err_data); in nbio_v7_9_handle_ras_controller_intr_no_bifring() 545 obj->err_data.ue_count += err_data.ue_count; in nbio_v7_9_handle_ras_controller_intr_no_bifring() 546 obj->err_data.ce_count += err_data.ce_count; in nbio_v7_9_handle_ras_controller_intr_no_bifring() 548 if (err_data.ce_count) in nbio_v7_9_handle_ras_controller_intr_no_bifring() 551 obj->err_data.ce_count, in nbio_v7_9_handle_ras_controller_intr_no_bifring() 554 if (err_data.ue_count) in nbio_v7_9_handle_ras_controller_intr_no_bifring() 557 obj->err_data.ue_count, in nbio_v7_9_handle_ras_controller_intr_no_bifring() 565 amdgpu_ras_error_data_fini(&err_data); in nbio_v7_9_handle_ras_controller_intr_no_bifring()
|
| A D | umc_v6_1.c | 257 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in umc_v6_1_query_ras_error_count() local 279 &(err_data->ce_count)); in umc_v6_1_query_ras_error_count() 282 &(err_data->ue_count)); in umc_v6_1_query_ras_error_count() 296 struct ras_err_data *err_data, in umc_v6_1_query_error_address() argument 324 if (!err_data->err_addr) { in umc_v6_1_query_error_address() 345 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v6_1_query_error_address() 356 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in umc_v6_1_query_ras_error_address() local 377 err_data, in umc_v6_1_query_ras_error_address()
|
| A D | hdp_v4_0.c | 59 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in hdp_v4_0_query_ras_error_count() local 61 err_data->ue_count = 0; in hdp_v4_0_query_ras_error_count() 62 err_data->ce_count = 0; in hdp_v4_0_query_ras_error_count() 68 err_data->ue_count += RREG32_SOC15(HDP, 0, mmHDP_EDC_CNT); in hdp_v4_0_query_ras_error_count()
|
| A D | gfx_v9_4.c | 715 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status() 723 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status() 737 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status() 746 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status() 759 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status() 767 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status() 781 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status() 876 err_data->ue_count = 0; in gfx_v9_4_query_ras_error_count() 877 err_data->ce_count = 0; in gfx_v9_4_query_ras_error_count() 897 err_data->ce_count += sec_count; in gfx_v9_4_query_ras_error_count() [all …]
|
| A D | amdgpu_mca.c | 76 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in amdgpu_mca_query_ras_error_count() local 78 amdgpu_mca_query_correctable_error_count(adev, mc_status_addr, &(err_data->ce_count)); in amdgpu_mca_query_ras_error_count() 79 amdgpu_mca_query_uncorrectable_error_count(adev, mc_status_addr, &(err_data->ue_count)); in amdgpu_mca_query_ras_error_count() 416 struct mca_bank_set *mca_set, struct ras_err_data *err_data) in amdgpu_mca_dispatch_mca_set() argument 447 amdgpu_ras_error_statistic_ue_count(err_data, in amdgpu_mca_dispatch_mca_set() 451 amdgpu_ras_error_statistic_de_count(err_data, in amdgpu_mca_dispatch_mca_set() 454 amdgpu_ras_error_statistic_ce_count(err_data, in amdgpu_mca_dispatch_mca_set() 477 struct ras_err_data *err_data, struct ras_query_context *qctx) in amdgpu_mca_smu_log_ras_error() argument 489 ret = amdgpu_mca_dispatch_mca_set(adev, blk, type, &mca_set, err_data); in amdgpu_mca_smu_log_ras_error() 503 ret = amdgpu_mca_dispatch_mca_set(adev, blk, type, &mca_cache->mca_set, err_data); in amdgpu_mca_smu_log_ras_error()
|
| A D | amdgpu_ras.h | 603 #define for_each_ras_error(err_node, err_data) \ argument 604 list_for_each_entry(err_node, &(err_data)->err_node_list, node) 616 void *err_data, 652 struct ras_err_data err_data; member 944 int amdgpu_ras_error_data_init(struct ras_err_data *err_data); 945 void amdgpu_ras_error_data_fini(struct ras_err_data *err_data); 946 int amdgpu_ras_error_statistic_ce_count(struct ras_err_data *err_data, 949 int amdgpu_ras_error_statistic_ue_count(struct ras_err_data *err_data, 952 int amdgpu_ras_error_statistic_de_count(struct ras_err_data *err_data,
|
| A D | amdgpu_aca.c | 513 amdgpu_ras_error_statistic_ue_count(err_data, &mcm_info, count); in aca_log_aca_error_data() 516 amdgpu_ras_error_statistic_ce_count(err_data, &mcm_info, count); in aca_log_aca_error_data() 519 amdgpu_ras_error_statistic_de_count(err_data, &mcm_info, count); in aca_log_aca_error_data() 528 …a_log_aca_error(struct aca_handle *handle, enum aca_error_type type, struct ras_err_data *err_data) in aca_log_aca_error() argument 540 aca_log_aca_error_data(bank_error, type, err_data); in aca_log_aca_error() 551 struct ras_err_data *err_data, struct ras_query_context *qctx) in __aca_get_error_data() argument 575 aca_log_aca_error(handle, ACA_ERROR_TYPE_DEFERRED, err_data); in __aca_get_error_data() 577 return aca_log_aca_error(handle, type, err_data); in __aca_get_error_data() 589 enum aca_error_type type, struct ras_err_data *err_data, in amdgpu_aca_get_error_data() argument 592 if (!handle || !err_data) in amdgpu_aca_get_error_data() [all …]
|
| A D | umc_v8_14.c | 96 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v8_14_query_error_count_per_channel() local 102 &(err_data->ce_count)); in umc_v8_14_query_error_count_per_channel() 105 &(err_data->ue_count)); in umc_v8_14_query_error_count_per_channel()
|
| A D | amdgpu_umc.h | 109 struct ras_err_data *err_data, 162 int amdgpu_umc_fill_error_record(struct ras_err_data *err_data, 185 struct ras_err_data *err_data, uint64_t pa_addr);
|
| A D | sdma_v4_4.c | 199 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in sdma_v4_4_query_ras_error_count_by_instance() local 225 err_data->ue_count += sec_count; in sdma_v4_4_query_ras_error_count_by_instance() 231 err_data->ce_count = 0; in sdma_v4_4_query_ras_error_count_by_instance()
|
| A D | amdgpu_xgmi.c | 1400 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in amdgpu_xgmi_legacy_query_ras_error_count() local 1408 err_data->ue_count = 0; in amdgpu_xgmi_legacy_query_ras_error_count() 1409 err_data->ce_count = 0; in amdgpu_xgmi_legacy_query_ras_error_count() 1490 err_data->ue_count += ue_cnt; in amdgpu_xgmi_legacy_query_ras_error_count() 1491 err_data->ce_count += ce_cnt; in amdgpu_xgmi_legacy_query_ras_error_count() 1519 u64 mca_base, struct ras_err_data *err_data) in __xgmi_v6_4_0_query_error_count() argument 1530 amdgpu_ras_error_statistic_ue_count(err_data, mcm_info, 1ULL); in __xgmi_v6_4_0_query_error_count() 1533 amdgpu_ras_error_statistic_ce_count(err_data, mcm_info, 1ULL); in __xgmi_v6_4_0_query_error_count() 1551 __xgmi_v6_4_0_query_error_count(adev, &mcm_info, xgmi_v6_4_0_mca_base_array[i], err_data); in xgmi_v6_4_0_query_error_count() 1556 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in xgmi_v6_4_0_query_ras_error_count() local [all …]
|
| A D | mmhub_v1_0.c | 797 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in mmhub_v1_0_query_ras_error_count() local 802 err_data->ue_count = 0; in mmhub_v1_0_query_ras_error_count() 803 err_data->ce_count = 0; in mmhub_v1_0_query_ras_error_count() 814 err_data->ce_count += sec_count; in mmhub_v1_0_query_ras_error_count() 815 err_data->ue_count += ded_count; in mmhub_v1_0_query_ras_error_count()
|
| A D | umc_v6_7.h | 75 struct ras_err_data *err_data, uint64_t err_addr,
|
| A D | mmhub_v1_7.c | 1271 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in mmhub_v1_7_query_ras_error_count() local 1276 err_data->ue_count = 0; in mmhub_v1_7_query_ras_error_count() 1277 err_data->ce_count = 0; in mmhub_v1_7_query_ras_error_count() 1287 err_data->ce_count += sec_count; in mmhub_v1_7_query_ras_error_count() 1288 err_data->ue_count += ded_count; in mmhub_v1_7_query_ras_error_count()
|
| A D | gfx_v9_4_2.c | 1648 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in gfx_v9_4_2_query_ras_error_count() local 1654 err_data->ue_count = 0; in gfx_v9_4_2_query_ras_error_count() 1655 err_data->ce_count = 0; in gfx_v9_4_2_query_ras_error_count() 1658 err_data->ce_count += sec_count; in gfx_v9_4_2_query_ras_error_count() 1659 err_data->ue_count += ded_count; in gfx_v9_4_2_query_ras_error_count() 1662 err_data->ce_count += sec_count; in gfx_v9_4_2_query_ras_error_count() 1663 err_data->ue_count += ded_count; in gfx_v9_4_2_query_ras_error_count()
|
| /drivers/misc/ |
| A D | smpro-errmon.c | 251 unsigned char err_data[MAX_READ_BLOCK_LENGTH]; in smpro_error_data_read() local 271 memset(err_data, 0x00, MAX_READ_BLOCK_LENGTH); in smpro_error_data_read() 272 ret = regmap_noinc_read(errmon->regmap, err_info->data, err_data, err_length); in smpro_error_data_read() 285 return sysfs_emit(buf, "%*phN\n", MAX_READ_BLOCK_LENGTH, err_data); in smpro_error_data_read()
|
| /drivers/mtd/nand/raw/ |
| A D | nuvoton-ma35d1-nand-controller.c | 290 u32 err_data[6]; in ma35_nfi_correct() local 317 err_data[i] = readl(nand->regs + MA35_NFI_REG_NANDECCED0 + i * 4); in ma35_nfi_correct() 320 temp_data[i * 4 + 0] = err_data[i] & 0xff; in ma35_nfi_correct() 321 temp_data[i * 4 + 1] = (err_data[i] >> 8) & 0xff; in ma35_nfi_correct() 322 temp_data[i * 4 + 2] = (err_data[i] >> 16) & 0xff; in ma35_nfi_correct() 323 temp_data[i * 4 + 3] = (err_data[i] >> 24) & 0xff; in ma35_nfi_correct()
|
| /drivers/clk/hisilicon/ |
| A D | clk.c | 78 goto err_data; in hisi_clk_init() 84 err_data: in hisi_clk_init()
|