| /drivers/cxl/core/ |
| A D | region.c | 222 &cxlr->dev, in cxl_region_invalidate_memregion() 2119 if (!cxlr) in __cxl_decoder_detach() 2158 return cxlr; in __cxl_decoder_detach() 2415 cxlr = kzalloc(sizeof(*cxlr), GFP_KERNEL); in cxl_region_alloc() 2875 if (cxlr) in __cxl_dpa_to_region() 2882 ctx->cxlr = cxlr; in __cxl_dpa_to_region() 3043 cxlr_pmem->cxlr = cxlr; in cxl_pmem_region_alloc() 3101 cxlr_dax->cxlr = cxlr; in cxl_dax_region_alloc() 3115 struct cxl_region *cxlr = cxlr_pmem->cxlr; in cxlr_pmem_unregister() local 3419 } while (IS_ERR(cxlr) && PTR_ERR(cxlr) == -EBUSY); in construct_region() [all …]
|
| A D | cdat.c | 848 dev_dbg(&cxlr->dev, in DEFINE_FREE() 969 cxlr->coord[i].read_bandwidth = coord[i].read_bandwidth; in cxl_region_update_bandwidth() 1001 for (int i = 0; i < cxlr->params.nr_targets; i++) { in cxl_region_shared_upstream_bandwidth_update() 1012 if (root_count && root_count != cxlr->params.nr_targets) { in cxl_region_shared_upstream_bandwidth_update() 1013 dev_dbg(&cxlr->dev, in cxl_region_shared_upstream_bandwidth_update() 1025 working_xa = cxl_switch_gather_bandwidth(cxlr, usp_xa, in cxl_region_shared_upstream_bandwidth_update() 1052 cxl_region_update_bandwidth(cxlr, usp_xa); in cxl_region_shared_upstream_bandwidth_update() 1068 cxlr->coord[i].read_latency = max_t(unsigned int, in cxl_region_perf_data_calculate() 1069 cxlr->coord[i].read_latency, in cxl_region_perf_data_calculate() 1071 cxlr->coord[i].write_latency = max_t(unsigned int, in cxl_region_perf_data_calculate() [all …]
|
| A D | trace.h | 455 TP_ARGS(cxlmd, log, cxlr, hpa, hpa_alias0, rec), 477 __string(region_name, cxlr ? dev_name(&cxlr->dev) : "") 501 if (cxlr) { 598 TP_ARGS(cxlmd, log, cxlr, hpa, hpa_alias0, rec), 626 __string(region_name, cxlr ? dev_name(&cxlr->dev) : "") 653 if (cxlr) { 1044 __string(region, cxlr ? dev_name(&cxlr->dev) : "") 1065 if (cxlr) { 1067 memcpy(__entry->uuid, &cxlr->params.uuid, 16); 1068 __entry->hpa = cxl_dpa_to_hpa(cxlr, cxlmd, [all …]
|
| A D | core.h | 30 int cxl_decoder_detach(struct cxl_region *cxlr, 43 u64 cxl_dpa_to_hpa(struct cxl_region *cxlr, const struct cxl_memdev *cxlmd, 47 static inline u64 cxl_dpa_to_hpa(struct cxl_region *cxlr, in cxl_dpa_to_hpa() argument 61 static inline int cxl_decoder_detach(struct cxl_region *cxlr, in cxl_decoder_detach() argument 140 int cxl_update_hmat_access_coordinates(int nid, struct cxl_region *cxlr,
|
| A D | edac.c | 37 struct cxl_region *cxlr; member 108 struct cxl_region *cxlr; in cxl_scrub_get_attrbs() local 112 if (!cxl_ps_ctx->cxlr) { in cxl_scrub_get_attrbs() 122 cxlr = cxl_ps_ctx->cxlr; in cxl_scrub_get_attrbs() 123 p = &cxlr->params; in cxl_scrub_get_attrbs() 157 struct cxl_region *cxlr; in cxl_scrub_set_attrbs_region() local 164 cxlr = cxl_ps_ctx->cxlr; in cxl_scrub_set_attrbs_region() 165 p = &cxlr->params; in cxl_scrub_set_attrbs_region() 191 cxlmd->scrub_region_id = cxlr->id; in cxl_scrub_set_attrbs_region() 238 if (cxl_ps_ctx->cxlr) in cxl_scrub_set_attrbs() [all …]
|
| A D | memdev.c | 285 struct cxl_region *cxlr; in cxl_inject_poison() local 313 cxlr = cxl_dpa_to_region(cxlmd, dpa); in cxl_inject_poison() 314 if (cxlr) in cxl_inject_poison() 317 dev_name(&cxlr->dev)); in cxl_inject_poison() 323 trace_cxl_poison(cxlmd, cxlr, &record, 0, 0, CXL_POISON_TRACE_INJECT); in cxl_inject_poison() 335 struct cxl_region *cxlr; in cxl_clear_poison() local 372 cxlr = cxl_dpa_to_region(cxlmd, dpa); in cxl_clear_poison() 373 if (cxlr) in cxl_clear_poison() 376 dev_name(&cxlr->dev)); in cxl_clear_poison() 382 trace_cxl_poison(cxlmd, cxlr, &record, 0, 0, CXL_POISON_TRACE_CLEAR); in cxl_clear_poison()
|
| A D | mbox.c | 909 struct cxl_region *cxlr; in cxl_event_trace_record() local 920 cxlr = cxl_dpa_to_region(cxlmd, dpa); in cxl_event_trace_record() 921 if (cxlr) { in cxl_event_trace_record() 922 u64 cache_size = cxlr->params.cache_size; in cxl_event_trace_record() 924 hpa = cxl_dpa_to_hpa(cxlr, cxlmd, dpa); in cxl_event_trace_record() 942 trace_cxl_general_media(cxlmd, type, cxlr, hpa, in cxl_event_trace_record() 957 trace_cxl_dram(cxlmd, type, cxlr, hpa, hpa_alias, in cxl_event_trace_record() 1419 struct cxl_region *cxlr) in cxl_mem_get_poison() argument 1451 trace_cxl_poison(cxlmd, cxlr, &po->record[i], in cxl_mem_get_poison()
|
| /drivers/dax/ |
| A D | cxl.c | 13 struct cxl_region *cxlr = cxlr_dax->cxlr; in cxl_dax_region_probe() local 20 dax_region = alloc_dax_region(dev, cxlr->id, &cxlr_dax->hpa_range, nid, in cxl_dax_region_probe()
|
| /drivers/cxl/ |
| A D | pmem.c | 379 struct cxl_region *cxlr = cxlr_pmem->cxlr; in cxl_pmem_region_probe() local 380 struct cxl_nvdimm_bridge *cxl_nvb = cxlr->cxl_nvb; in cxl_pmem_region_probe() 417 dev_dbg(&cxlr->dev, "changing target node from %d to %d", in cxl_pmem_region_probe() 425 ndr_desc.memregion = cxlr->id; in cxl_pmem_region_probe()
|
| A D | cxl.h | 563 struct cxl_region *cxlr; member 572 struct cxl_region *cxlr; member 897 void cxl_region_perf_data_calculate(struct cxl_region *cxlr, 899 void cxl_region_shared_upstream_bandwidth_update(struct cxl_region *cxlr);
|
| A D | cxlmem.h | 868 struct cxl_region *cxlr); 875 int devm_cxl_region_edac_register(struct cxl_region *cxlr); 882 static inline int devm_cxl_region_edac_register(struct cxl_region *cxlr) in devm_cxl_region_edac_register() argument
|