Lines Matching refs:ndd

40 static int validate_dimm(struct nvdimm_drvdata *ndd)  in validate_dimm()  argument
44 if (!ndd) in validate_dimm()
47 rc = nvdimm_check_config_data(ndd->dev); in validate_dimm()
49 dev_dbg(ndd->dev, "%ps: %s error: %d\n", in validate_dimm()
58 int nvdimm_init_nsarea(struct nvdimm_drvdata *ndd) in nvdimm_init_nsarea() argument
60 struct nd_cmd_get_config_size *cmd = &ndd->nsarea; in nvdimm_init_nsarea()
61 struct nvdimm_bus *nvdimm_bus = walk_to_nvdimm_bus(ndd->dev); in nvdimm_init_nsarea()
63 int rc = validate_dimm(ndd); in nvdimm_init_nsarea()
74 rc = nd_desc->ndctl(nd_desc, to_nvdimm(ndd->dev), in nvdimm_init_nsarea()
81 int nvdimm_get_config_data(struct nvdimm_drvdata *ndd, void *buf, in nvdimm_get_config_data() argument
84 struct nvdimm_bus *nvdimm_bus = walk_to_nvdimm_bus(ndd->dev); in nvdimm_get_config_data()
86 int rc = validate_dimm(ndd), cmd_rc = 0; in nvdimm_get_config_data()
93 if (offset + len > ndd->nsarea.config_size) in nvdimm_get_config_data()
96 max_cmd_size = min_t(u32, len, ndd->nsarea.max_xfer); in nvdimm_get_config_data()
110 rc = nd_desc->ndctl(nd_desc, to_nvdimm(ndd->dev), in nvdimm_get_config_data()
127 int nvdimm_set_config_data(struct nvdimm_drvdata *ndd, size_t offset, in nvdimm_set_config_data() argument
132 int rc = validate_dimm(ndd), cmd_rc = 0; in nvdimm_set_config_data()
133 struct nvdimm_bus *nvdimm_bus = walk_to_nvdimm_bus(ndd->dev); in nvdimm_set_config_data()
139 if (offset + len > ndd->nsarea.config_size) in nvdimm_set_config_data()
142 max_cmd_size = min_t(u32, len, ndd->nsarea.max_xfer); in nvdimm_set_config_data()
158 rc = nd_desc->ndctl(nd_desc, to_nvdimm(ndd->dev), in nvdimm_set_config_data()
222 struct nvdimm_drvdata *ndd = container_of(kref, typeof(*ndd), kref); in nvdimm_drvdata_release() local
223 struct device *dev = ndd->dev; in nvdimm_drvdata_release()
228 for_each_dpa_resource_safe(ndd, res, _r) in nvdimm_drvdata_release()
229 nvdimm_free_dpa(ndd, res); in nvdimm_drvdata_release()
232 kvfree(ndd->data); in nvdimm_drvdata_release()
233 kfree(ndd); in nvdimm_drvdata_release()
237 void get_ndd(struct nvdimm_drvdata *ndd) in get_ndd() argument
239 kref_get(&ndd->kref); in get_ndd()
242 void put_ndd(struct nvdimm_drvdata *ndd) in put_ndd() argument
244 if (ndd) in put_ndd()
245 kref_put(&ndd->kref, nvdimm_drvdata_release); in put_ndd()
317 static ssize_t __available_slots_show(struct nvdimm_drvdata *ndd, char *buf) in __available_slots_show() argument
323 if (!ndd) in __available_slots_show()
326 dev = ndd->dev; in __available_slots_show()
328 nfree = nd_label_nfree(ndd); in __available_slots_show()
729 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in nd_pmem_max_contiguous_dpa() local
736 if (!ndd) in nd_pmem_max_contiguous_dpa()
743 nvdimm_bus = walk_to_nvdimm_bus(ndd->dev); in nd_pmem_max_contiguous_dpa()
746 for_each_dpa_resource(ndd, res) { in nd_pmem_max_contiguous_dpa()
774 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in nd_pmem_available_dpa() local
779 if (!ndd) in nd_pmem_available_dpa()
788 for_each_dpa_resource(ndd, res) { in nd_pmem_available_dpa()
795 nd_dbg_dpa(nd_region, ndd, res, in nd_pmem_available_dpa()
813 void nvdimm_free_dpa(struct nvdimm_drvdata *ndd, struct resource *res) in nvdimm_free_dpa() argument
815 WARN_ON_ONCE(!is_nvdimm_bus_locked(ndd->dev)); in nvdimm_free_dpa()
817 __release_region(&ndd->dpa, res->start, resource_size(res)); in nvdimm_free_dpa()
820 struct resource *nvdimm_allocate_dpa(struct nvdimm_drvdata *ndd, in nvdimm_allocate_dpa() argument
830 WARN_ON_ONCE(!is_nvdimm_bus_locked(ndd->dev)); in nvdimm_allocate_dpa()
831 res = __request_region(&ndd->dpa, start, n, name, 0); in nvdimm_allocate_dpa()
842 resource_size_t nvdimm_allocated_dpa(struct nvdimm_drvdata *ndd, in nvdimm_allocated_dpa() argument
848 for_each_dpa_resource(ndd, res) in nvdimm_allocated_dpa()