Searched refs:read_errors (Results 1 – 7 of 7) sorted by relevance
80 unsigned int read_errors; member268 cw_bat->read_errors++; in cw_get_soc()269 if (cw_bat->read_errors > max_error_cycles) { in cw_get_soc()273 cw_bat->read_errors = 0; in cw_get_soc()277 cw_bat->read_errors = 0; in cw_get_soc()
2656 unsigned int read_errors = atomic_read(&rdev->read_errors); in check_decay_read_errors() local2676 if (hours_since_last >= 8 * sizeof(read_errors)) in check_decay_read_errors()2677 atomic_set(&rdev->read_errors, 0); in check_decay_read_errors()2679 atomic_set(&rdev->read_errors, read_errors >> hours_since_last); in check_decay_read_errors()2733 atomic_inc(&rdev->read_errors); in fix_read_error()2734 if (atomic_read(&rdev->read_errors) > max_read_errors) { in fix_read_error()2737 atomic_read(&rdev->read_errors), max_read_errors); in fix_read_error()
112 atomic_t read_errors; /* number of consecutive read errors that member
2807 if (atomic_read(&rdev->read_errors)) in raid5_end_read_request()2808 atomic_set(&rdev->read_errors, 0); in raid5_end_read_request()2815 atomic_inc(&rdev->read_errors); in raid5_end_read_request()2837 } else if (atomic_read(&rdev->read_errors) in raid5_end_read_request()2842 atomic_read(&rdev->read_errors), in raid5_end_read_request()
3619 atomic_set(&rdev->read_errors, 0); in md_rdev_init()
161 __u64 read_errors; /* # of read errors encountered (EIO) */ member
1073 sctx->stat.read_errors++; in scrub_handle_errored_block()1123 sctx->stat.read_errors++; in scrub_handle_errored_block()1135 sctx->stat.read_errors++; in scrub_handle_errored_block()1169 sctx->stat.read_errors++; in scrub_handle_errored_block()2360 sctx->stat.read_errors++; in scrub_missing_raid56_worker()2951 sctx->stat.read_errors += nbits; in scrub_free_parity()
Completed in 68 milliseconds