Lines Matching refs:cxld

22 static int add_hdm_decoder(struct cxl_port *port, struct cxl_decoder *cxld,  in add_hdm_decoder()  argument
27 rc = cxl_decoder_add_locked(cxld, target_map); in add_hdm_decoder()
29 put_device(&cxld->dev); in add_hdm_decoder()
34 rc = cxl_decoder_autoremove(&port->dev, cxld); in add_hdm_decoder()
38 dev_dbg(&cxld->dev, "Added to port %s\n", dev_name(&port->dev)); in add_hdm_decoder()
67 return add_hdm_decoder(port, &cxlsd->cxld, single_port_map); in devm_cxl_add_passthrough_decoder()
212 put_device(&cxled->cxld.dev); in __cxl_dpa_release()
253 port->id, cxled->cxld.id, cxled->dpa_res); in __cxl_dpa_reserve()
257 if (port->hdm_end + 1 != cxled->cxld.id) { in __cxl_dpa_reserve()
265 cxled->cxld.id, port->id, port->hdm_end + 1); in __cxl_dpa_reserve()
271 dev_name(&cxled->cxld.dev), 0); in __cxl_dpa_reserve()
275 port->id, cxled->cxld.id); in __cxl_dpa_reserve()
280 dev_name(&cxled->cxld.dev), 0); in __cxl_dpa_reserve()
283 port->id, cxled->cxld.id); in __cxl_dpa_reserve()
298 cxled->cxld.id, cxled->dpa_res); in __cxl_dpa_reserve()
304 get_device(&cxled->cxld.dev); in __cxl_dpa_reserve()
353 struct device *dev = &cxled->cxld.dev; in cxl_dpa_free()
361 if (cxled->cxld.region) { in cxl_dpa_free()
363 dev_name(&cxled->cxld.region->dev)); in cxl_dpa_free()
367 if (cxled->cxld.flags & CXL_DECODER_F_ENABLE) { in cxl_dpa_free()
372 if (cxled->cxld.id != port->hdm_end) { in cxl_dpa_free()
390 struct device *dev = &cxled->cxld.dev; in cxl_dpa_set_mode()
403 if (cxled->cxld.flags & CXL_DECODER_F_ENABLE) { in cxl_dpa_set_mode()
437 struct device *dev = &cxled->cxld.dev; in cxl_dpa_alloc()
443 if (cxled->cxld.region) { in cxl_dpa_alloc()
445 dev_name(&cxled->cxld.region->dev)); in cxl_dpa_alloc()
450 if (cxled->cxld.flags & CXL_DECODER_F_ENABLE) { in cxl_dpa_alloc()
515 static void cxld_set_interleave(struct cxl_decoder *cxld, u32 *ctrl) in cxld_set_interleave() argument
524 if (WARN_ONCE(ways_to_eiw(cxld->interleave_ways, &eiw), in cxld_set_interleave()
525 "invalid interleave_ways: %d\n", cxld->interleave_ways)) in cxld_set_interleave()
527 if (WARN_ONCE(granularity_to_eig(cxld->interleave_granularity, &eig), in cxld_set_interleave()
529 cxld->interleave_granularity)) in cxld_set_interleave()
537 static void cxld_set_type(struct cxl_decoder *cxld, u32 *ctrl) in cxld_set_type() argument
539 u32p_replace_bits(ctrl, !!(cxld->target_type == 3), in cxld_set_type()
546 int ways = cxlsd->cxld.interleave_ways; in cxlsd_set_targets()
548 if (dev_WARN_ONCE(&cxlsd->cxld.dev, in cxlsd_set_targets()
599 static int cxl_decoder_commit(struct cxl_decoder *cxld) in cxl_decoder_commit() argument
601 struct cxl_port *port = to_cxl_port(cxld->dev.parent); in cxl_decoder_commit()
604 int id = cxld->id, rc; in cxl_decoder_commit()
608 if (cxld->flags & CXL_DECODER_F_ENABLE) in cxl_decoder_commit()
614 dev_name(&cxld->dev), port->id, port->commit_end + 1); in cxl_decoder_commit()
620 ctrl = readl(hdm + CXL_HDM_DECODER0_CTRL_OFFSET(cxld->id)); in cxl_decoder_commit()
621 cxld_set_interleave(cxld, &ctrl); in cxl_decoder_commit()
622 cxld_set_type(cxld, &ctrl); in cxl_decoder_commit()
623 base = cxld->hpa_range.start; in cxl_decoder_commit()
624 size = range_len(&cxld->hpa_range); in cxl_decoder_commit()
631 if (is_switch_decoder(&cxld->dev)) { in cxl_decoder_commit()
633 to_cxl_switch_decoder(&cxld->dev); in cxl_decoder_commit()
641 dev_name(&cxld->dev)); in cxl_decoder_commit()
649 to_cxl_endpoint_decoder(&cxld->dev); in cxl_decoder_commit()
661 rc = cxld_await_commit(hdm, cxld->id); in cxl_decoder_commit()
665 dev_name(&cxld->dev), rc); in cxl_decoder_commit()
666 cxld->reset(cxld); in cxl_decoder_commit()
669 cxld->flags |= CXL_DECODER_F_ENABLE; in cxl_decoder_commit()
674 static int cxl_decoder_reset(struct cxl_decoder *cxld) in cxl_decoder_reset() argument
676 struct cxl_port *port = to_cxl_port(cxld->dev.parent); in cxl_decoder_reset()
679 int id = cxld->id; in cxl_decoder_reset()
682 if ((cxld->flags & CXL_DECODER_F_ENABLE) == 0) in cxl_decoder_reset()
688 dev_name(&cxld->dev), port->id, port->commit_end); in cxl_decoder_reset()
704 cxld->flags &= ~CXL_DECODER_F_ENABLE; in cxl_decoder_reset()
707 if (is_endpoint_decoder(&cxld->dev)) { in cxl_decoder_reset()
710 cxled = to_cxl_endpoint_decoder(&cxld->dev); in cxl_decoder_reset()
718 struct cxl_decoder *cxld, int which, in cxl_setup_hdm_decoder_from_dvsec() argument
727 cxld->target_type = CXL_DECODER_EXPANDER; in cxl_setup_hdm_decoder_from_dvsec()
728 cxld->commit = NULL; in cxl_setup_hdm_decoder_from_dvsec()
729 cxld->reset = NULL; in cxl_setup_hdm_decoder_from_dvsec()
730 cxld->hpa_range = info->dvsec_range[which]; in cxl_setup_hdm_decoder_from_dvsec()
736 cxld->flags |= CXL_DECODER_F_ENABLE | CXL_DECODER_F_LOCK; in cxl_setup_hdm_decoder_from_dvsec()
737 port->commit_end = cxld->id; in cxl_setup_hdm_decoder_from_dvsec()
768 static int init_hdm_decoder(struct cxl_port *port, struct cxl_decoder *cxld, in init_hdm_decoder() argument
784 return cxl_setup_hdm_decoder_from_dvsec(port, cxld, which, info); in init_hdm_decoder()
786 if (is_endpoint_decoder(&cxld->dev)) in init_hdm_decoder()
787 cxled = to_cxl_endpoint_decoder(&cxld->dev); in init_hdm_decoder()
793 cxld->commit = cxl_decoder_commit; in init_hdm_decoder()
794 cxld->reset = cxl_decoder_reset; in init_hdm_decoder()
800 port->id, cxld->id); in init_hdm_decoder()
804 cxld->hpa_range = (struct range) { in init_hdm_decoder()
810 return cxl_setup_hdm_decoder_from_dvsec(port, cxld, which, info); in init_hdm_decoder()
814 cxld->flags |= CXL_DECODER_F_ENABLE; in init_hdm_decoder()
816 cxld->flags |= CXL_DECODER_F_LOCK; in init_hdm_decoder()
818 cxld->target_type = CXL_DECODER_EXPANDER; in init_hdm_decoder()
820 cxld->target_type = CXL_DECODER_ACCELERATOR; in init_hdm_decoder()
821 if (cxld->id != port->commit_end + 1) { in init_hdm_decoder()
824 port->id, cxld->id); in init_hdm_decoder()
827 port->commit_end = cxld->id; in init_hdm_decoder()
834 cxld->target_type = CXL_DECODER_EXPANDER; in init_hdm_decoder()
837 &cxld->interleave_ways); in init_hdm_decoder()
841 port->id, cxld->id, ctrl); in init_hdm_decoder()
845 &cxld->interleave_granularity); in init_hdm_decoder()
852 for (i = 0; i < cxld->interleave_ways; i++) in init_hdm_decoder()
861 dpa_size = div_u64_rem(size, cxld->interleave_ways, &remainder); in init_hdm_decoder()
865 port->id, cxld->id, size, cxld->interleave_ways); in init_hdm_decoder()
873 port->id, cxld->id, *dpa_base, in init_hdm_decoder()
929 struct cxl_decoder *cxld; in devm_cxl_enumerate_decoders() local
941 cxld = &cxled->cxld; in devm_cxl_enumerate_decoders()
952 cxld = &cxlsd->cxld; in devm_cxl_enumerate_decoders()
955 rc = init_hdm_decoder(port, cxld, target_map, hdm, i, in devm_cxl_enumerate_decoders()
961 put_device(&cxld->dev); in devm_cxl_enumerate_decoders()
964 rc = add_hdm_decoder(port, cxld, target_map); in devm_cxl_enumerate_decoders()