| /linux/drivers/nvdimm/ |
| A D | region_devs.c | 62 int nd_region_activate(struct nd_region *nd_region) in nd_region_activate() argument 126 struct nd_region *nd_region = to_nd_region(dev); in nd_region_release() local 145 struct nd_region *nd_region = container_of(dev, struct nd_region, dev); in to_nd_region() local 162 struct nd_region *nd_region = to_nd_region(dev); in to_nd_blk_region() local 1040 struct nd_region *nd_region; in nd_region_create() local 1077 nd_region = &ndbr->nd_region; in nd_region_create() 1193 if (nd_region->flush(nd_region, bio)) in nvdimm_flush() 1289 struct nd_region *nd_region; member 1295 struct nd_region *nd_region; in region_conflict() local 1303 if (nd_region == ctx->nd_region) in region_conflict() [all …]
|
| A D | region.c | 17 struct nd_region *nd_region = to_nd_region(dev); in nd_region_probe() local 26 nd_region->num_lanes); in nd_region_probe() 40 .end = nd_region->ndr_start + nd_region->ndr_size - 1, in nd_region_probe() 45 nd_region->bb_state = sysfs_get_dirent(nd_region->dev.kobj.sd, in nd_region_probe() 47 if (!nd_region->bb_state) in nd_region_probe() 50 nvdimm_badblocks_populate(nd_region, &nd_region->bb, &range); in nd_region_probe() 64 nd_region->btt_seed = nd_btt_create(nd_region); in nd_region_probe() 65 nd_region->pfn_seed = nd_pfn_create(nd_region); in nd_region_probe() 66 nd_region->dax_seed = nd_dax_create(nd_region); in nd_region_probe() 92 struct nd_region *nd_region = to_nd_region(dev); in nd_region_remove() local [all …]
|
| A D | namespace_devs.c | 26 struct nd_region *nd_region = to_nd_region(dev->parent); in namespace_pmem_release() local 38 struct nd_region *nd_region = to_nd_region(dev->parent); in namespace_blk_release() local 107 struct nd_region *nd_region = to_nd_region(dev->parent); in pmem_should_map_pages() local 419 static int scan_free(struct nd_region *nd_region, in scan_free() argument 545 align = nd_region->align / nd_region->ndr_mappings; in space_valid() 744 static int merge_dpa(struct nd_region *nd_region, in merge_dpa() argument 778 struct nd_region *nd_region; in __reserve_free_pmem() local 2185 nd_region->dax_seed = nd_dax_create(nd_region); in nd_region_create_dax_seed() 2197 nd_region->pfn_seed = nd_pfn_create(nd_region); in nd_region_create_pfn_seed() 2209 nd_region->btt_seed = nd_btt_create(nd_region); in nd_region_create_btt_seed() [all …]
|
| A D | nd-core.h | 118 struct nd_region; 120 void nd_region_create_ns_seed(struct nd_region *nd_region); 121 void nd_region_create_btt_seed(struct nd_region *nd_region); 122 void nd_region_create_pfn_seed(struct nd_region *nd_region); 123 void nd_region_create_dax_seed(struct nd_region *nd_region); 132 struct nd_region; 141 resource_size_t nd_pmem_max_contiguous_dpa(struct nd_region *nd_region, 144 resource_size_t nd_pmem_available_dpa(struct nd_region *nd_region, 146 resource_size_t nd_blk_available_dpa(struct nd_region *nd_region); 147 resource_size_t nd_region_available_dpa(struct nd_region *nd_region); [all …]
|
| A D | dax_devs.c | 15 struct nd_region *nd_region = to_nd_region(dev->parent); in nd_dax_release() local 21 ida_simple_remove(&nd_region->dax_ida, nd_pfn->id); in nd_dax_release() 47 static struct nd_dax *nd_dax_alloc(struct nd_region *nd_region) in nd_dax_alloc() argument 58 nd_pfn->id = ida_simple_get(&nd_region->dax_ida, 0, 0, GFP_KERNEL); in nd_dax_alloc() 65 dev_set_name(dev, "dax%d.%d", nd_region->id, nd_pfn->id); in nd_dax_alloc() 67 dev->parent = &nd_region->dev; in nd_dax_alloc() 72 struct device *nd_dax_create(struct nd_region *nd_region) in nd_dax_create() argument 77 if (!is_memory(&nd_region->dev)) in nd_dax_create() 80 nd_dax = nd_dax_alloc(nd_region); in nd_dax_create() 94 struct nd_region *nd_region = to_nd_region(ndns->dev.parent); in nd_dax_probe() local [all …]
|
| A D | nd.h | 406 struct nd_region { struct 427 int (*flush)(struct nd_region *nd_region, struct bio *bio); argument 445 struct nd_region nd_region; member 556 struct device *nd_btt_create(struct nd_region *nd_region); 582 struct device *nd_pfn_create(struct nd_region *nd_region); 614 struct device *nd_dax_create(struct nd_region *nd_region); 633 int nd_region_to_nstype(struct nd_region *nd_region); 635 u64 nd_region_interleave_set_cookie(struct nd_region *nd_region, 658 void nvdimm_badblocks_populate(struct nd_region *nd_region, 675 int nd_blk_region_init(struct nd_region *nd_region); [all …]
|
| A D | dimm_devs.c | 216 struct nd_region *nd_region = &ndbr->nd_region; in nd_blk_region_to_dimm() local 714 static unsigned long dpa_align(struct nd_region *nd_region) in dpa_align() argument 721 if (dev_WARN_ONCE(dev, !nd_region->ndr_mappings || nd_region->align in dpa_align() 722 % nd_region->ndr_mappings, in dpa_align() 724 nd_region->align, nd_region->ndr_mappings)) in dpa_align() 726 return nd_region->align / nd_region->ndr_mappings; in dpa_align() 734 struct nd_region *nd_region; in alias_dpa_busy() local 775 align = dpa_align(nd_region); in alias_dpa_busy() 818 resource_size_t nd_blk_available_dpa(struct nd_region *nd_region) in nd_blk_available_dpa() argument 862 resource_size_t nd_pmem_max_contiguous_dpa(struct nd_region *nd_region, in nd_pmem_max_contiguous_dpa() argument [all …]
|
| A D | btt_devs.c | 18 struct nd_region *nd_region = to_nd_region(dev->parent); in nd_btt_release() local 23 ida_simple_remove(&nd_region->btt_ida, nd_btt->id); in nd_btt_release() 182 static struct device *__nd_btt_create(struct nd_region *nd_region, in __nd_btt_create() argument 193 nd_btt->id = ida_simple_get(&nd_region->btt_ida, 0, 0, GFP_KERNEL); in __nd_btt_create() 205 dev_set_name(dev, "btt%d.%d", nd_region->id, nd_btt->id); in __nd_btt_create() 206 dev->parent = &nd_region->dev; in __nd_btt_create() 218 ida_simple_remove(&nd_region->btt_ida, nd_btt->id); in __nd_btt_create() 225 struct device *nd_btt_create(struct nd_region *nd_region) in nd_btt_create() argument 227 struct device *dev = __nd_btt_create(nd_region, 0, NULL, NULL); in nd_btt_create() 338 struct nd_region *nd_region = to_nd_region(ndns->dev.parent); in nd_btt_probe() local [all …]
|
| A D | pfn_devs.c | 19 struct nd_region *nd_region = to_nd_region(dev->parent); in nd_pfn_release() local 316 static struct nd_pfn *nd_pfn_alloc(struct nd_region *nd_region) in nd_pfn_alloc() argument 334 dev->parent = &nd_region->dev; in nd_pfn_alloc() 339 struct device *nd_pfn_create(struct nd_region *nd_region) in nd_pfn_create() argument 344 if (!is_memory(&nd_region->dev)) in nd_pfn_create() 347 nd_pfn = nd_pfn_alloc(nd_region); in nd_pfn_create() 362 struct nd_region *nd_region = to_nd_region(nd_pfn->dev.parent); in nd_pfn_clear_memmap_errors() local 619 struct nd_region *nd_region = to_nd_region(ndns->dev.parent); in nd_pfn_probe() local 723 struct nd_region *nd_region; in nd_pfn_init() local 752 if (nd_region->ro) { in nd_pfn_init() [all …]
|
| A D | nd_virtio.c | 38 static int virtio_pmem_flush(struct nd_region *nd_region) in virtio_pmem_flush() argument 40 struct virtio_device *vdev = nd_region->provider_data; in virtio_pmem_flush() 101 int async_pmem_flush(struct nd_region *nd_region, struct bio *bio) in async_pmem_flush() argument 119 if (virtio_pmem_flush(nd_region)) in async_pmem_flush()
|
| A D | virtio_pmem.c | 36 struct nd_region *nd_region; in virtio_pmem_probe() local 86 nd_region = nvdimm_pmem_region_create(vpmem->nvdimm_bus, &ndr_desc); in virtio_pmem_probe() 87 if (!nd_region) { in virtio_pmem_probe() 92 nd_region->provider_data = dev_to_virtio(nd_region->dev.parent->parent); in virtio_pmem_probe()
|
| A D | bus.c | 157 void nvdimm_region_notify(struct nd_region *nd_region, enum nvdimm_event event) in nvdimm_region_notify() argument 176 struct nd_region *nd_region; in nvdimm_clear_badblocks_region() local 184 nd_region = to_nd_region(dev); in nvdimm_clear_badblocks_region() 185 ndr_end = nd_region->ndr_start + nd_region->ndr_size - 1; in nvdimm_clear_badblocks_region() 195 if (nd_region->bb_state) in nvdimm_clear_badblocks_region() 625 struct nd_region *nd_region = to_nd_region(dev->parent); in nvdimm_check_and_set_ro() local 629 if (disk_ro == nd_region->ro) in nvdimm_check_and_set_ro() 633 dev_name(&nd_region->dev), nd_region->ro ? "only" : "write", in nvdimm_check_and_set_ro() 677 struct nd_region *nd_region = NULL; in nvdimm_dev_to_target_node() local 684 if (!nd_region) in nvdimm_dev_to_target_node() [all …]
|
| A D | pmem.c | 43 static struct nd_region *to_region(struct pmem_device *pmem) in to_region() 202 struct nd_region *nd_region = to_region(pmem); in pmem_submit_bio() local 205 ret = nvdimm_flush(nd_region, bio); in pmem_submit_bio() 226 ret = nvdimm_flush(nd_region, bio); in pmem_submit_bio() 393 struct nd_region *nd_region = to_nd_region(dev->parent); in pmem_attach_disk() local 429 fua = nvdimm_has_flush(nd_region); in pmem_attach_disk() 498 if (is_nvdimm_sync(nd_region)) in pmem_attach_disk() 606 struct nd_region *nd_region; in pmem_revalidate_poison() local 618 nd_region = to_nd_region(ndns->dev.parent); in pmem_revalidate_poison() 625 nd_region = to_region(pmem); in pmem_revalidate_poison() [all …]
|
| A D | label.h | 222 struct nd_region; 225 int nd_pmem_namespace_label_update(struct nd_region *nd_region, 227 int nd_blk_namespace_label_update(struct nd_region *nd_region,
|
| A D | label.c | 411 struct nd_region *nd_region = NULL; in nd_label_reserve_dpa() local 430 nd_dbg_dpa(nd_region, ndd, res, "reserve\n"); in nd_label_reserve_dpa() 877 static int __pmem_label_update(struct nd_region *nd_region, in __pmem_label_update() argument 882 struct nd_interleave_set *nd_set = nd_region->nd_set; in __pmem_label_update() 931 nd_dbg_dpa(nd_region, ndd, res, "\n"); in __pmem_label_update() 1057 static int __blk_label_update(struct nd_region *nd_region, in __blk_label_update() argument 1062 struct nd_interleave_set *nd_set = nd_region->nd_set; in __blk_label_update() 1380 int nd_pmem_namespace_label_update(struct nd_region *nd_region, in nd_pmem_namespace_label_update() argument 1385 for (i = 0; i < nd_region->ndr_mappings; i++) { in nd_pmem_namespace_label_update() 1417 for (i = 0; i < nd_region->ndr_mappings; i++) { in nd_pmem_namespace_label_update() [all …]
|
| A D | blk.c | 54 struct nd_region *nd_region; in to_ndbr() local 58 nd_region = container_of(parent, struct nd_region, dev); in to_ndbr() 59 return container_of(nd_region, struct nd_blk_region, nd_region); in to_ndbr()
|
| A D | badrange.c | 269 void nvdimm_badblocks_populate(struct nd_region *nd_region, in nvdimm_badblocks_populate() argument 274 if (!is_memory(&nd_region->dev)) { in nvdimm_badblocks_populate() 275 dev_WARN_ONCE(&nd_region->dev, 1, in nvdimm_badblocks_populate() 279 nvdimm_bus = walk_to_nvdimm_bus(&nd_region->dev); in nvdimm_badblocks_populate()
|
| A D | claim.c | 79 struct nd_region *nd_region = to_nd_region(dev->parent); in is_idle() local 83 seed = nd_region->btt_seed; in is_idle() 85 seed = nd_region->pfn_seed; in is_idle() 87 seed = nd_region->dax_seed; in is_idle()
|
| A D | virtio_pmem.h | 54 int async_pmem_flush(struct nd_region *nd_region, struct bio *bio);
|
| A D | btt.c | 1210 lane = nd_region_acquire_lane(btt->nd_region); in btt_read_pg() 1280 nd_region_release_lane(btt->nd_region, lane); in btt_read_pg() 1292 nd_region_release_lane(btt->nd_region, lane); in btt_read_pg() 1404 nd_region_release_lane(btt->nd_region, lane); in btt_write_pg() 1422 nd_region_release_lane(btt->nd_region, lane); in btt_write_pg() 1583 struct nd_region *nd_region) in btt_init() argument 1600 btt->nd_region = nd_region; in btt_init() 1612 dev_name(&nd_region->dev)); in btt_init() 1665 struct nd_region *nd_region; in nvdimm_namespace_attach_btt() local 1700 nd_region = to_nd_region(nd_btt->dev.parent); in nvdimm_namespace_attach_btt() [all …]
|
| /linux/include/linux/ |
| A D | libnvdimm.h | 117 struct nd_region; 130 int (*flush)(struct nd_region *nd_region, struct bio *bio); 259 struct device *nd_region_dev(struct nd_region *nd_region); 297 void *nd_region_provider_data(struct nd_region *nd_region); 302 unsigned int nd_region_acquire_lane(struct nd_region *nd_region); 303 void nd_region_release_lane(struct nd_region *nd_region, unsigned int lane); 305 int nvdimm_flush(struct nd_region *nd_region, struct bio *bio); 306 int generic_nvdimm_flush(struct nd_region *nd_region); 307 int nvdimm_has_flush(struct nd_region *nd_region); 308 int nvdimm_has_cache(struct nd_region *nd_region); [all …]
|
| A D | nd.h | 172 struct nd_region; 173 void nvdimm_region_notify(struct nd_region *nd_region, enum nvdimm_event event);
|
| /linux/drivers/dax/pmem/ |
| A D | core.c | 24 struct nd_region *nd_region = to_nd_region(dev->parent); in __dax_pmem_probe() local 57 nd_region->target_node, le32_to_cpu(pfn_sb->align), in __dax_pmem_probe()
|
| /linux/drivers/acpi/nfit/ |
| A D | nfit.h | 165 struct nd_region *nd_region; member 306 struct nd_region *nd_region; member
|
| /linux/tools/testing/nvdimm/test/ |
| A D | ndtest.h | 67 struct nd_region *blk_region; 93 struct nd_region *region;
|