Home
last modified time | relevance | path

Searched refs:err_data (Results 1 – 25 of 43) sorted by relevance

12

/linux/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_umc.c59 err_data.err_addr = in amdgpu_umc_page_retirement_mca()
116 err_data->err_addr = in amdgpu_umc_handle_bad_pages()
164 if (err_data->ue_count || err_data->de_count) { in amdgpu_umc_handle_bad_pages()
165 err_count = err_data->ue_count + err_data->de_count; in amdgpu_umc_handle_bad_pages()
198 if ((err_data->ue_count || err_data->de_count) && in amdgpu_umc_do_page_retirement()
240 obj->err_data.ue_count += err_data.ue_count; in amdgpu_umc_pasid_poison_handler()
241 obj->err_data.ce_count += err_data.ce_count; in amdgpu_umc_pasid_poison_handler()
242 obj->err_data.de_count += err_data.de_count; in amdgpu_umc_pasid_poison_handler()
365 if (!err_data || in amdgpu_umc_fill_error_record()
367 (err_data->err_addr_cnt >= err_data->err_addr_len)) in amdgpu_umc_fill_error_record()
[all …]
A Dumc_v8_7.c104 &(err_data->ce_count)); in umc_v8_7_ecc_info_query_ras_error_count()
107 &(err_data->ue_count)); in umc_v8_7_ecc_info_query_ras_error_count()
126 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v8_7_convert_error_address()
131 struct ras_err_data *err_data, in umc_v8_7_ecc_info_query_error_address() argument
145 if (!err_data->err_addr) in umc_v8_7_ecc_info_query_error_address()
174 err_data, in umc_v8_7_ecc_info_query_ras_error_address()
317 &(err_data->ce_count)); in umc_v8_7_query_ras_error_count()
320 &(err_data->ue_count)); in umc_v8_7_query_ras_error_count()
327 struct ras_err_data *err_data, in umc_v8_7_query_error_address() argument
344 if (!err_data->err_addr) { in umc_v8_7_query_error_address()
[all …]
A Damdgpu_ras.c1147 if (err_data->ce_count) { in amdgpu_ras_error_generate_report()
1170 if (err_data->ue_count) { in amdgpu_ras_error_generate_report()
1193 if (err_data->de_count) { in amdgpu_ras_error_generate_report()
1234 obj->err_data.ue_count += err_data->ue_count; in amdgpu_rasmgr_error_data_statistic_update()
1235 obj->err_data.ce_count += err_data->ce_count; in amdgpu_rasmgr_error_data_statistic_update()
1236 obj->err_data.de_count += err_data->de_count; in amdgpu_rasmgr_error_data_statistic_update()
1401 &err_data, in amdgpu_ras_query_error_status_with_event()
2221 obj->err_data.ue_count += err_data.ue_count; in amdgpu_ras_interrupt_umc_handler()
2222 obj->err_data.ce_count += err_data.ce_count; in amdgpu_ras_interrupt_umc_handler()
2223 obj->err_data.de_count += err_data.de_count; in amdgpu_ras_interrupt_umc_handler()
[all …]
A Dumc_v8_10.c147 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v8_10_query_ecc_error_count() local
153 &(err_data->ce_count)); in umc_v8_10_query_ecc_error_count()
156 &(err_data->ue_count)); in umc_v8_10_query_ecc_error_count()
206 struct ras_err_data *err_data, uint64_t err_addr, in umc_v8_10_convert_error_address() argument
239 amdgpu_umc_fill_error_record(err_data, na_err_addr, in umc_v8_10_convert_error_address()
262 if (!err_data->err_addr) { in umc_v8_10_query_error_address()
277 umc_v8_10_convert_error_address(adev, err_data, err_addr, in umc_v8_10_query_error_address()
387 &(err_data->ce_count)); in umc_v8_10_ecc_info_query_ecc_error_count()
390 &(err_data->ue_count)); in umc_v8_10_ecc_info_query_ecc_error_count()
420 if (!err_data->err_addr) in umc_v8_10_ecc_info_query_error_address()
[all …]
A Dumc_v6_7.c171 &(err_data->ce_count)); in umc_v6_7_ecc_info_querry_ecc_error_count()
175 &(err_data->ue_count)); in umc_v6_7_ecc_info_querry_ecc_error_count()
188 struct ras_err_data *err_data, uint64_t err_addr, in umc_v6_7_convert_error_address() argument
211 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v6_7_convert_error_address()
217 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v6_7_convert_error_address()
237 if (!err_data->err_addr) in umc_v6_7_ecc_info_query_error_address()
247 umc_v6_7_convert_error_address(adev, err_data, err_addr, in umc_v6_7_ecc_info_query_error_address()
422 &(err_data->ce_count), in umc_v6_7_query_ecc_error_count()
427 &(err_data->ue_count)); in umc_v6_7_query_ecc_error_count()
461 if (!err_data->err_addr) { in umc_v6_7_query_error_address()
[all …]
A Dumc_v12_0.c140 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v12_0_query_error_count() local
160 amdgpu_ras_error_statistic_ue_count(err_data, &mcm_info, ue_count); in umc_v12_0_query_error_count()
177 struct ras_err_data *err_data, in umc_v12_0_convert_error_address() argument
215 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v12_0_convert_error_address()
223 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v12_0_convert_error_address()
332 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v12_0_query_error_address() local
348 if (!err_data->err_addr) { in umc_v12_0_query_error_address()
377 umc_v12_0_convert_error_address(adev, err_data, &addr_in); in umc_v12_0_query_error_address()
628 if (!err_data || !ecc_err) in umc_v12_0_fill_error_record()
637 ret = amdgpu_umc_fill_error_record(err_data, in umc_v12_0_fill_error_record()
[all …]
A Dnbio_v7_4.c364 struct ras_err_data err_data; in nbio_v7_4_handle_ras_controller_intr_no_bifring() local
367 if (amdgpu_ras_error_data_init(&err_data)) in nbio_v7_4_handle_ras_controller_intr_no_bifring()
395 obj->err_data.ue_count += err_data.ue_count; in nbio_v7_4_handle_ras_controller_intr_no_bifring()
396 obj->err_data.ce_count += err_data.ce_count; in nbio_v7_4_handle_ras_controller_intr_no_bifring()
398 if (err_data.ce_count) in nbio_v7_4_handle_ras_controller_intr_no_bifring()
401 obj->err_data.ce_count, in nbio_v7_4_handle_ras_controller_intr_no_bifring()
404 if (err_data.ue_count) in nbio_v7_4_handle_ras_controller_intr_no_bifring()
407 obj->err_data.ue_count, in nbio_v7_4_handle_ras_controller_intr_no_bifring()
421 amdgpu_ras_error_data_fini(&err_data); in nbio_v7_4_handle_ras_controller_intr_no_bifring()
611 err_data->ce_count++; in nbio_v7_4_query_ras_error_count()
[all …]
A Dnbio_v7_9.c525 struct ras_err_data err_data; in nbio_v7_9_handle_ras_controller_intr_no_bifring() local
528 if (amdgpu_ras_error_data_init(&err_data)) in nbio_v7_9_handle_ras_controller_intr_no_bifring()
547 nbio_v7_9_query_ras_error_count(adev, &err_data); in nbio_v7_9_handle_ras_controller_intr_no_bifring()
550 obj->err_data.ue_count += err_data.ue_count; in nbio_v7_9_handle_ras_controller_intr_no_bifring()
551 obj->err_data.ce_count += err_data.ce_count; in nbio_v7_9_handle_ras_controller_intr_no_bifring()
553 if (err_data.ce_count) in nbio_v7_9_handle_ras_controller_intr_no_bifring()
556 obj->err_data.ce_count, in nbio_v7_9_handle_ras_controller_intr_no_bifring()
559 if (err_data.ue_count) in nbio_v7_9_handle_ras_controller_intr_no_bifring()
562 obj->err_data.ue_count, in nbio_v7_9_handle_ras_controller_intr_no_bifring()
570 amdgpu_ras_error_data_fini(&err_data); in nbio_v7_9_handle_ras_controller_intr_no_bifring()
A Dumc_v6_1.c257 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in umc_v6_1_query_ras_error_count() local
279 &(err_data->ce_count)); in umc_v6_1_query_ras_error_count()
282 &(err_data->ue_count)); in umc_v6_1_query_ras_error_count()
296 struct ras_err_data *err_data, in umc_v6_1_query_error_address() argument
324 if (!err_data->err_addr) { in umc_v6_1_query_error_address()
345 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v6_1_query_error_address()
356 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in umc_v6_1_query_ras_error_address() local
377 err_data, in umc_v6_1_query_ras_error_address()
A Dhdp_v4_0.c67 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in hdp_v4_0_query_ras_error_count() local
69 err_data->ue_count = 0; in hdp_v4_0_query_ras_error_count()
70 err_data->ce_count = 0; in hdp_v4_0_query_ras_error_count()
76 err_data->ue_count += RREG32_SOC15(HDP, 0, mmHDP_EDC_CNT); in hdp_v4_0_query_ras_error_count()
A Damdgpu_mca.c76 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in amdgpu_mca_query_ras_error_count() local
78 amdgpu_mca_query_correctable_error_count(adev, mc_status_addr, &(err_data->ce_count)); in amdgpu_mca_query_ras_error_count()
79 amdgpu_mca_query_uncorrectable_error_count(adev, mc_status_addr, &(err_data->ue_count)); in amdgpu_mca_query_ras_error_count()
397 struct mca_bank_set *mca_set, struct ras_err_data *err_data) in amdgpu_mca_dispatch_mca_set() argument
428 amdgpu_ras_error_statistic_ue_count(err_data, in amdgpu_mca_dispatch_mca_set()
432 amdgpu_ras_error_statistic_de_count(err_data, in amdgpu_mca_dispatch_mca_set()
435 amdgpu_ras_error_statistic_ce_count(err_data, in amdgpu_mca_dispatch_mca_set()
458 struct ras_err_data *err_data, struct ras_query_context *qctx) in amdgpu_mca_smu_log_ras_error() argument
470 ret = amdgpu_mca_dispatch_mca_set(adev, blk, type, &mca_set, err_data); in amdgpu_mca_smu_log_ras_error()
484 ret = amdgpu_mca_dispatch_mca_set(adev, blk, type, &mca_cache->mca_set, err_data); in amdgpu_mca_smu_log_ras_error()
A Dgfx_v9_4.c716 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status()
724 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status()
738 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status()
747 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status()
760 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status()
768 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status()
782 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status()
877 err_data->ue_count = 0; in gfx_v9_4_query_ras_error_count()
878 err_data->ce_count = 0; in gfx_v9_4_query_ras_error_count()
898 err_data->ce_count += sec_count; in gfx_v9_4_query_ras_error_count()
[all …]
A Damdgpu_ras.h598 #define for_each_ras_error(err_node, err_data) \ argument
599 list_for_each_entry(err_node, &(err_data)->err_node_list, node)
611 void *err_data,
647 struct ras_err_data err_data; member
933 int amdgpu_ras_error_data_init(struct ras_err_data *err_data);
934 void amdgpu_ras_error_data_fini(struct ras_err_data *err_data);
935 int amdgpu_ras_error_statistic_ce_count(struct ras_err_data *err_data,
938 int amdgpu_ras_error_statistic_ue_count(struct ras_err_data *err_data,
941 int amdgpu_ras_error_statistic_de_count(struct ras_err_data *err_data,
A Damdgpu_aca.c440 …or_data(struct aca_bank_error *bank_error, enum aca_error_type type, struct ras_err_data *err_data) in aca_log_aca_error_data() argument
459 amdgpu_ras_error_statistic_ue_count(err_data, &mcm_info, count); in aca_log_aca_error_data()
462 amdgpu_ras_error_statistic_ce_count(err_data, &mcm_info, count); in aca_log_aca_error_data()
465 amdgpu_ras_error_statistic_de_count(err_data, &mcm_info, count); in aca_log_aca_error_data()
474 …a_log_aca_error(struct aca_handle *handle, enum aca_error_type type, struct ras_err_data *err_data) in aca_log_aca_error() argument
486 aca_log_aca_error_data(bank_error, type, err_data); in aca_log_aca_error()
497 struct ras_err_data *err_data, struct ras_query_context *qctx) in __aca_get_error_data() argument
519 return aca_log_aca_error(handle, type, err_data); in __aca_get_error_data()
531 enum aca_error_type type, struct ras_err_data *err_data, in amdgpu_aca_get_error_data() argument
534 if (!handle || !err_data) in amdgpu_aca_get_error_data()
[all …]
A Dsdma_v4_4.c199 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in sdma_v4_4_query_ras_error_count_by_instance() local
225 err_data->ue_count += sec_count; in sdma_v4_4_query_ras_error_count_by_instance()
231 err_data->ce_count = 0; in sdma_v4_4_query_ras_error_count_by_instance()
A Damdgpu_xgmi.c1263 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in amdgpu_xgmi_legacy_query_ras_error_count() local
1271 err_data->ue_count = 0; in amdgpu_xgmi_legacy_query_ras_error_count()
1272 err_data->ce_count = 0; in amdgpu_xgmi_legacy_query_ras_error_count()
1352 err_data->ue_count += ue_cnt; in amdgpu_xgmi_legacy_query_ras_error_count()
1353 err_data->ce_count += ce_cnt; in amdgpu_xgmi_legacy_query_ras_error_count()
1381 u64 mca_base, struct ras_err_data *err_data) in __xgmi_v6_4_0_query_error_count() argument
1392 amdgpu_ras_error_statistic_ue_count(err_data, mcm_info, 1ULL); in __xgmi_v6_4_0_query_error_count()
1395 amdgpu_ras_error_statistic_ce_count(err_data, mcm_info, 1ULL); in __xgmi_v6_4_0_query_error_count()
1413 __xgmi_v6_4_0_query_error_count(adev, &mcm_info, xgmi_v6_4_0_mca_base_array[i], err_data); in xgmi_v6_4_0_query_error_count()
1418 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in xgmi_v6_4_0_query_ras_error_count() local
[all …]
A Dumc_v6_7.h75 struct ras_err_data *err_data, uint64_t err_addr,
A Dmmhub_v1_0.c748 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in mmhub_v1_0_query_ras_error_count() local
753 err_data->ue_count = 0; in mmhub_v1_0_query_ras_error_count()
754 err_data->ce_count = 0; in mmhub_v1_0_query_ras_error_count()
765 err_data->ce_count += sec_count; in mmhub_v1_0_query_ras_error_count()
766 err_data->ue_count += ded_count; in mmhub_v1_0_query_ras_error_count()
A Damdgpu_sdma.h168 void *err_data,
A Damdgpu_umc.h115 int amdgpu_umc_fill_error_record(struct ras_err_data *err_data,
A Damdgpu_mca.h167 struct ras_err_data *err_data, struct ras_query_context *qctx);
A Dmmhub_v1_8.c629 struct ras_err_data *err_data = (struct ras_err_data *)ras_err_status; in mmhub_v1_8_inst_query_ras_error_count() local
656 amdgpu_ras_error_statistic_ce_count(err_data, &mcm_info, ce_count); in mmhub_v1_8_inst_query_ras_error_count()
657 amdgpu_ras_error_statistic_ue_count(err_data, &mcm_info, ue_count); in mmhub_v1_8_inst_query_ras_error_count()
A Dmmhub_v1_7.c1246 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in mmhub_v1_7_query_ras_error_count() local
1251 err_data->ue_count = 0; in mmhub_v1_7_query_ras_error_count()
1252 err_data->ce_count = 0; in mmhub_v1_7_query_ras_error_count()
1262 err_data->ce_count += sec_count; in mmhub_v1_7_query_ras_error_count()
1263 err_data->ue_count += ded_count; in mmhub_v1_7_query_ras_error_count()
/linux/drivers/misc/
A Dsmpro-errmon.c251 unsigned char err_data[MAX_READ_BLOCK_LENGTH]; in smpro_error_data_read() local
271 memset(err_data, 0x00, MAX_READ_BLOCK_LENGTH); in smpro_error_data_read()
272 ret = regmap_noinc_read(errmon->regmap, err_info->data, err_data, err_length); in smpro_error_data_read()
285 return sysfs_emit(buf, "%*phN\n", MAX_READ_BLOCK_LENGTH, err_data); in smpro_error_data_read()
/linux/drivers/clk/hisilicon/
A Dclk.c78 goto err_data; in hisi_clk_init()
84 err_data: in hisi_clk_init()

Completed in 68 milliseconds

12