Lines Matching refs:cxld

263 			struct cxl_decoder *cxld;  in cxl_region_decode_reset()  local
266 cxld = cxl_rr->decoder; in cxl_region_decode_reset()
267 if (cxld->reset) in cxl_region_decode_reset()
268 cxld->reset(cxld); in cxl_region_decode_reset()
273 cxled->cxld.reset(&cxled->cxld); in cxl_region_decode_reset()
281 static int commit_decoder(struct cxl_decoder *cxld) in commit_decoder() argument
285 if (cxld->commit) in commit_decoder()
286 return cxld->commit(cxld); in commit_decoder()
288 if (is_switch_decoder(&cxld->dev)) in commit_decoder()
289 cxlsd = to_cxl_switch_decoder(&cxld->dev); in commit_decoder()
291 if (dev_WARN_ONCE(&cxld->dev, !cxlsd || cxlsd->nr_targets > 1, in commit_decoder()
306 struct cxl_decoder *cxld; in cxl_region_decode_commit() local
314 cxld = cxl_rr->decoder; in cxl_region_decode_commit()
315 rc = commit_decoder(cxld); in cxl_region_decode_commit()
325 cxld = cxl_rr->decoder; in cxl_region_decode_commit()
326 if (cxld->reset) in cxl_region_decode_commit()
327 cxld->reset(cxld); in cxl_region_decode_commit()
330 cxled->cxld.reset(&cxled->cxld); in cxl_region_decode_commit()
491 struct cxl_decoder *cxld = &cxlrd->cxlsd.cxld; in interleave_ways_store() local
510 if (!is_power_of_2(val / cxld->interleave_ways) || in interleave_ways_store()
511 (val % cxld->interleave_ways)) { in interleave_ways_store()
554 struct cxl_decoder *cxld = &cxlrd->cxlsd.cxld; in interleave_granularity_store() local
576 if (cxld->interleave_ways > 1 && val != cxld->interleave_granularity) in interleave_granularity_store()
784 return sysfs_emit(buf, "%s\n", dev_name(&cxled->cxld.dev)); in show_targetN()
789 struct cxl_decoder *cxld = to_cxl_decoder(dev); in check_commit_order() local
796 if (((cxld->flags & CXL_DECODER_F_ENABLE) == 0)) in check_commit_order()
804 struct cxl_decoder *cxld; in match_free_decoder() local
810 cxld = to_cxl_decoder(dev); in match_free_decoder()
812 if (cxld->id != port->commit_end + 1) in match_free_decoder()
815 if (cxld->region) { in match_free_decoder()
818 dev_name(dev), dev_name(&cxld->region->dev)); in match_free_decoder()
851 struct cxl_decoder *cxld; in match_auto_decoder() local
857 cxld = to_cxl_decoder(dev); in match_auto_decoder()
858 r = &cxld->hpa_range; in match_auto_decoder()
887 return &cxled->cxld; in cxl_port_pick_region_decoder()
907 struct cxl_decoder *cxld) in auto_order_ok() argument
918 dev_dbg(&cxld->dev, "check for HPA violation %s:%d < %s:%d\n", in auto_order_ok()
919 dev_name(&cxld->dev), cxld->id, in auto_order_ok()
922 if (cxld_iter->id > cxld->id) in auto_order_ok()
931 struct cxl_decoder *cxld) in alloc_region_ref() argument
945 if (auto_order_ok(port, iter->region, cxld)) in alloc_region_ref()
978 struct cxl_decoder *cxld = cxl_rr->decoder; in cxl_rr_free_decoder() local
980 if (!cxld) in cxl_rr_free_decoder()
983 dev_WARN_ONCE(&cxlr->dev, cxld->region != cxlr, "region mismatch\n"); in cxl_rr_free_decoder()
984 if (cxld->region == cxlr) { in cxl_rr_free_decoder()
985 cxld->region = NULL; in cxl_rr_free_decoder()
1007 struct cxl_decoder *cxld = cxl_rr->decoder; in cxl_rr_ep_add() local
1018 if (!cxld->region) { in cxl_rr_ep_add()
1019 cxld->region = cxlr; in cxl_rr_ep_add()
1029 struct cxl_decoder *cxld) in cxl_rr_assign_decoder() argument
1031 if (cxld->region) { in cxl_rr_assign_decoder()
1033 dev_name(&port->dev), dev_name(&cxld->dev), in cxl_rr_assign_decoder()
1034 dev_name(&cxld->region->dev)); in cxl_rr_assign_decoder()
1045 cxld->target_type != cxlr->type, in cxl_rr_assign_decoder()
1048 dev_name(&cxld->dev), cxld->target_type, cxlr->type); in cxl_rr_assign_decoder()
1049 cxld->target_type = cxlr->type; in cxl_rr_assign_decoder()
1050 cxl_rr->decoder = cxld; in cxl_rr_assign_decoder()
1086 struct cxl_decoder *cxld; in cxl_port_attach_region() local
1121 struct cxl_decoder *cxld; in cxl_port_attach_region() local
1123 cxld = cxl_port_pick_region_decoder(port, cxled, cxlr); in cxl_port_attach_region()
1124 if (!cxld) { in cxl_port_attach_region()
1130 cxl_rr = alloc_region_ref(port, cxlr, cxled, cxld); in cxl_port_attach_region()
1139 rc = cxl_rr_assign_decoder(port, cxlr, cxled, cxl_rr, cxld); in cxl_port_attach_region()
1143 cxld = cxl_rr->decoder; in cxl_port_attach_region()
1149 if (is_switch_decoder(&cxld->dev)) { in cxl_port_attach_region()
1152 cxlsd = to_cxl_switch_decoder(&cxld->dev); in cxl_port_attach_region()
1157 dev_name(&cxld->dev), dev_name(&cxlmd->dev), in cxl_port_attach_region()
1158 dev_name(&cxled->cxld.dev), pos, in cxl_port_attach_region()
1170 dev_name(&cxld->dev)); in cxl_port_attach_region()
1177 dev_name(&cxld->dev), dev_name(&cxlmd->dev), in cxl_port_attach_region()
1178 dev_name(&cxled->cxld.dev), pos, in cxl_port_attach_region()
1210 if (cxl_rr->decoder == &cxled->cxld) in cxl_port_detach_region()
1255 dev_name(&cxlmd->dev), dev_name(&cxled->cxld.dev), pos); in check_last_peer()
1265 dev_name(&cxlmd->dev), dev_name(&cxled->cxld.dev), pos, in check_last_peer()
1267 dev_name(&cxled_peer->cxld.dev)); in check_last_peer()
1274 static int check_interleave_cap(struct cxl_decoder *cxld, int iw, int ig) in check_interleave_cap() argument
1276 struct cxl_port *port = to_cxl_port(cxld->dev.parent); in check_interleave_cap()
1329 struct cxl_decoder *cxld = cxl_rr->decoder; in cxl_port_setup_targets() local
1346 cxlsd = to_cxl_switch_decoder(&cxld->dev); in cxl_port_setup_targets()
1366 distance *= cxlrd->cxlsd.cxld.interleave_ways; in cxl_port_setup_targets()
1388 parent_iw = cxlrd->cxlsd.cxld.interleave_ways; in cxl_port_setup_targets()
1453 dev_name(&cxld->dev), iw, cxlsd->nr_targets); in cxl_port_setup_targets()
1458 if (cxld->interleave_ways != iw || in cxl_port_setup_targets()
1459 (iw > 1 && cxld->interleave_granularity != ig) || in cxl_port_setup_targets()
1460 !region_res_match_cxl_range(p, &cxld->hpa_range) || in cxl_port_setup_targets()
1461 ((cxld->flags & CXL_DECODER_F_ENABLE) == 0)) { in cxl_port_setup_targets()
1469 __func__, cxld->interleave_ways, in cxl_port_setup_targets()
1470 cxld->interleave_granularity, in cxl_port_setup_targets()
1471 (cxld->flags & CXL_DECODER_F_ENABLE) ? in cxl_port_setup_targets()
1474 cxld->hpa_range.start, cxld->hpa_range.end); in cxl_port_setup_targets()
1478 rc = check_interleave_cap(cxld, iw, ig); in cxl_port_setup_targets()
1487 cxld->interleave_ways = iw; in cxl_port_setup_targets()
1488 cxld->interleave_granularity = ig; in cxl_port_setup_targets()
1489 cxld->hpa_range = (struct range) { in cxl_port_setup_targets()
1501 dev_name(&cxlmd->dev), dev_name(&cxled->cxld.dev), pos); in cxl_port_setup_targets()
1508 dev_name(&cxlsd->cxld.dev), in cxl_port_setup_targets()
1521 dev_name(&cxlmd->dev), dev_name(&cxled->cxld.dev), pos); in cxl_port_setup_targets()
1530 struct cxl_decoder *cxld; in cxl_port_reset_targets() local
1540 cxld = cxl_rr->decoder; in cxl_port_reset_targets()
1541 cxld->hpa_range = (struct range) { in cxl_port_reset_targets()
1655 dev_name(&cxled_target->cxld.dev)); in cxl_region_validate_position()
1672 dev_name(&cxled_target->cxld.dev)); in cxl_region_validate_position()
1687 struct cxl_decoder *cxld = &cxlsd->cxld; in cxl_region_attach_position() local
1688 int iw = cxld->interleave_ways; in cxl_region_attach_position()
1694 dev_name(&cxlmd->dev), dev_name(&cxled->cxld.dev), in cxl_region_attach_position()
1695 dev_name(&cxlrd->cxlsd.cxld.dev)); in cxl_region_attach_position()
1723 dev_name(&cxled->cxld.dev)); in cxl_region_attach_auto()
1729 dev_name(&cxled->cxld.dev), pos); in cxl_region_attach_auto()
1735 dev_name(&cxled->cxld.dev)); in cxl_region_attach_auto()
1772 r1 = &cxlsd->cxld.hpa_range; in match_switch_decoder_by_range()
1800 *ways = cxlsd->cxld.interleave_ways; in find_pos_and_ways()
1816 dev_name(&cxlsd->cxld.dev)); in find_pos_and_ways()
1840 struct range *range = &cxled->cxld.hpa_range; in cxl_calc_interleave_pos()
1887 dev_name(&cxled->cxld.dev), dev_name(cxlmd->dev.parent), in cxl_calc_interleave_pos()
1929 rc = check_interleave_cap(&cxled->cxld, p->interleave_ways, in cxl_region_attach()
1933 dev_name(&cxled->cxld.dev), p->interleave_ways, in cxl_region_attach()
1939 dev_dbg(&cxlr->dev, "%s dead\n", dev_name(&cxled->cxld.dev)); in cxl_region_attach()
1945 dev_name(&cxled->cxld.dev), cxlr->mode); in cxl_region_attach()
1971 dev_name(&cxlmd->dev), dev_name(&cxled->cxld.dev), in cxl_region_attach()
1976 if (cxled->cxld.target_type != cxlr->type) { in cxl_region_attach()
1978 dev_name(&cxlmd->dev), dev_name(&cxled->cxld.dev), in cxl_region_attach()
1979 cxled->cxld.target_type, cxlr->type); in cxl_region_attach()
1985 dev_name(&cxlmd->dev), dev_name(&cxled->cxld.dev)); in cxl_region_attach()
1993 dev_name(&cxlmd->dev), dev_name(&cxled->cxld.dev), in cxl_region_attach()
2066 cxled->cxld.interleave_ways = p->interleave_ways; in cxl_region_attach()
2067 cxled->cxld.interleave_granularity = p->interleave_granularity; in cxl_region_attach()
2068 cxled->cxld.hpa_range = (struct range) { in cxl_region_attach()
2087 dev_dbg(&cxled->cxld.dev, in cxl_region_attach()
2118 cxlr = cxled->cxld.region; in __cxl_decoder_detach()
2141 dev_name(&cxlmd->dev), dev_name(&cxled->cxld.dev), in __cxl_decoder_detach()
2152 cxled->cxld.hpa_range = (struct range) { in __cxl_decoder_detach()
2222 dev_warn(cxled->cxld.dev.parent, "failed to attach %s to %s: %d\n", in attach_target()
2223 dev_name(&cxled->cxld.dev), dev_name(&cxlr->dev), rc); in attach_target()
2424 dev->parent = &cxlrd->cxlsd.cxld.dev; in cxl_region_alloc()
2539 struct cxl_port *port = to_cxl_port(cxlrd->cxlsd.cxld.dev.parent); in devm_cxl_add_region()
2564 dev_name(&cxlrd->cxlsd.cxld.dev), dev_name(dev)); in devm_cxl_add_region()
2599 dev_err(&cxlrd->cxlsd.cxld.dev, "unsupported mode %d\n", mode); in __create_region()
2652 struct cxl_decoder *cxld = to_cxl_decoder(dev); in region_show() local
2659 if (cxld->region) in region_show()
2660 return sysfs_emit(buf, "%s\n", dev_name(&cxld->region->dev)); in region_show()
2668 struct cxl_decoder *cxld = &cxlrd->cxlsd.cxld; in cxl_find_region_by_name() local
2671 region_dev = device_find_child_by_name(&cxld->dev, name); in cxl_find_region_by_name()
2813 rc = cxl_mem_get_poison(cxlmd, offset, length, cxled->cxld.region); in poison_by_decoder()
2820 if (cxled->cxld.id == ctx->port->commit_end) { in poison_by_decoder()
2874 cxlr = cxled->cxld.region; in __cxl_dpa_to_region()
3237 struct cxl_decoder *cxld; in match_decoder_by_range() local
3242 cxld = to_cxl_decoder(dev); in match_decoder_by_range()
3243 r1 = &cxld->hpa_range; in match_decoder_by_range()
3262 struct cxl_decoder *root, *cxld = &cxled->cxld; in cxl_find_root_decoder() local
3263 struct range *hpa = &cxld->hpa_range; in cxl_find_root_decoder()
3269 dev_name(&cxlmd->dev), dev_name(&cxld->dev), in cxl_find_root_decoder()
3270 cxld->hpa_range.start, cxld->hpa_range.end); in cxl_find_root_decoder()
3339 struct range *hpa = &cxled->cxld.hpa_range; in __construct_region()
3349 dev_name(&cxlmd->dev), dev_name(&cxled->cxld.dev), in __construct_region()
3382 dev_name(&cxlmd->dev), dev_name(&cxled->cxld.dev), in __construct_region()
3387 p->interleave_ways = cxled->cxld.interleave_ways; in __construct_region()
3388 p->interleave_granularity = cxled->cxld.interleave_granularity; in __construct_region()
3396 dev_name(&cxlmd->dev), dev_name(&cxled->cxld.dev), __func__, in __construct_region()
3424 dev_name(&cxlmd->dev), dev_name(&cxled->cxld.dev), in construct_region()
3443 region_dev = device_find_child(&cxlrd->cxlsd.cxld.dev, hpa, in cxl_find_region_by_range()
3453 struct range *hpa = &cxled->cxld.hpa_range; in cxl_add_to_region()