Searched refs:layout_map_count (Results 1 – 4 of 4) sorted by relevance
1020 __le16 layout_map_count; /* layout maps (1 map per */ member1053 u16 layout_map_count; member
1352 if (get_unaligned_le16(&raid_map->layout_map_count) != 2) { in pqi_validate_raid_map()1357 if (get_unaligned_le16(&raid_map->layout_map_count) != 3) { in pqi_validate_raid_map()1363 get_unaligned_le16(&raid_map->layout_map_count) > 1) { in pqi_validate_raid_map()2702 rmd->layout_map_count = get_unaligned_le16(&raid_map->layout_map_count); in pci_get_aio_common_raid_map_values()2763 rmd->stripesize = rmd->blocks_per_row * rmd->layout_map_count; in pqi_calc_aio_r5_or_r6()2910 if (rmd->layout_map_count > 2) { in pqi_calc_aio_r1_nexus()2915 rmd->num_it_nexus_entries = rmd->layout_map_count; in pqi_calc_aio_r1_nexus()2955 if (next_bypass_group >= rmd.layout_map_count) in pqi_raid_bypass_submit_scsi_cmd()2962 (rmd.layout_map_count > 1 || rmd.is_write)) { in pqi_raid_bypass_submit_scsi_cmd()
246 __le16 layout_map_count; /* layout maps (1 map per mirror/parity member
1711 le16_to_cpu(map->layout_map_count) * in hpsa_figure_phys_disk_ptrs()1713 int nphys_disk = le16_to_cpu(map->layout_map_count) * in hpsa_figure_phys_disk_ptrs()3270 le16_to_cpu(map_buff->layout_map_count)); in hpsa_debug_map_buff()3278 map_cnt = le16_to_cpu(map_buff->layout_map_count); in hpsa_debug_map_buff()5089 if (*current_group < le16_to_cpu(map->layout_map_count) - 1) { in raid_map_helper()5267 if (le16_to_cpu(map->layout_map_count) != 2) { in hpsa_scsi_ioaccel_raid_map()5280 if (le16_to_cpu(map->layout_map_count) != 3) { in hpsa_scsi_ioaccel_raid_map()5291 le16_to_cpu(map->layout_map_count) - 1) in hpsa_scsi_ioaccel_raid_map()5301 if (le16_to_cpu(map->layout_map_count) <= 1) in hpsa_scsi_ioaccel_raid_map()5313 le16_to_cpu(map->layout_map_count); in hpsa_scsi_ioaccel_raid_map()[all …]
Completed in 48 milliseconds