Lines Matching refs:tunnel
301 const char *drm_dp_tunnel_name(const struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_name() argument
303 return tunnel->name; in drm_dp_tunnel_name()
364 tunnel_get(struct drm_dp_tunnel *tunnel) in tunnel_get() argument
366 kref_get(&tunnel->kref); in tunnel_get()
368 return tunnel; in tunnel_get()
373 struct drm_dp_tunnel *tunnel = container_of(kref, typeof(*tunnel), kref); in free_tunnel() local
374 struct drm_dp_tunnel_group *group = tunnel->group; in free_tunnel()
376 list_del(&tunnel->node); in free_tunnel()
380 kfree(tunnel); in free_tunnel()
383 static void tunnel_put(struct drm_dp_tunnel *tunnel) in tunnel_put() argument
385 kref_put(&tunnel->kref, free_tunnel); in tunnel_put()
389 static void track_tunnel_ref(struct drm_dp_tunnel *tunnel, in track_tunnel_ref() argument
392 ref_tracker_alloc(&tunnel->group->mgr->ref_tracker, in track_tunnel_ref()
396 static void untrack_tunnel_ref(struct drm_dp_tunnel *tunnel, in untrack_tunnel_ref() argument
399 ref_tracker_free(&tunnel->group->mgr->ref_tracker, in untrack_tunnel_ref()
403 static void track_tunnel_ref(struct drm_dp_tunnel *tunnel, in track_tunnel_ref() argument
408 static void untrack_tunnel_ref(struct drm_dp_tunnel *tunnel, in untrack_tunnel_ref() argument
428 drm_dp_tunnel_get(struct drm_dp_tunnel *tunnel, in drm_dp_tunnel_get() argument
431 track_tunnel_ref(tunnel, tracker); in drm_dp_tunnel_get()
433 return tunnel_get(tunnel); in drm_dp_tunnel_get()
445 void drm_dp_tunnel_put(struct drm_dp_tunnel *tunnel, in drm_dp_tunnel_put() argument
448 untrack_tunnel_ref(tunnel, tracker); in drm_dp_tunnel_put()
450 tunnel_put(tunnel); in drm_dp_tunnel_put()
456 struct drm_dp_tunnel *tunnel) in add_tunnel_to_group() argument
464 tunnel->group = group; in add_tunnel_to_group()
465 list_add(&tunnel->node, &group->tunnels); in add_tunnel_to_group()
476 struct drm_dp_tunnel *tunnel; in create_tunnel() local
478 tunnel = kzalloc(sizeof(*tunnel), GFP_KERNEL); in create_tunnel()
479 if (!tunnel) in create_tunnel()
482 INIT_LIST_HEAD(&tunnel->node); in create_tunnel()
484 kref_init(&tunnel->kref); in create_tunnel()
486 tunnel->aux = aux; in create_tunnel()
488 tunnel->adapter_id = tunnel_reg(regs, DP_IN_ADAPTER_INFO) & DP_IN_ADAPTER_NUMBER_MASK; in create_tunnel()
490 snprintf(tunnel->name, sizeof(tunnel->name), "%d:%d:%d", in create_tunnel()
493 tunnel->adapter_id & ((1 << DP_IN_ADAPTER_NUMBER_BITS) - 1)); in create_tunnel()
495 tunnel->bw_granularity = tunnel_reg_bw_granularity(regs); in create_tunnel()
496 tunnel->allocated_bw = tunnel_reg(regs, DP_ALLOCATED_BW) * in create_tunnel()
497 tunnel->bw_granularity; in create_tunnel()
505 if (!tunnel->allocated_bw) in create_tunnel()
506 tunnel->allocated_bw = -1; in create_tunnel()
508 tunnel->bw_alloc_supported = tunnel_reg_bw_alloc_supported(regs); in create_tunnel()
509 tunnel->bw_alloc_enabled = tunnel_reg_bw_alloc_enabled(regs); in create_tunnel()
511 if (!add_tunnel_to_group(mgr, drv_group_id, tunnel)) { in create_tunnel()
512 kfree(tunnel); in create_tunnel()
517 track_tunnel_ref(tunnel, &tunnel->tracker); in create_tunnel()
519 return tunnel; in create_tunnel()
522 static void destroy_tunnel(struct drm_dp_tunnel *tunnel) in destroy_tunnel() argument
524 untrack_tunnel_ref(tunnel, &tunnel->tracker); in destroy_tunnel()
525 tunnel_put(tunnel); in destroy_tunnel()
541 void drm_dp_tunnel_set_io_error(struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_set_io_error() argument
543 tunnel->has_io_error = true; in drm_dp_tunnel_set_io_error()
615 static int tunnel_allocated_bw(const struct drm_dp_tunnel *tunnel) in tunnel_allocated_bw() argument
617 return max(tunnel->allocated_bw, 0); in tunnel_allocated_bw()
620 static bool tunnel_info_changes_are_valid(struct drm_dp_tunnel *tunnel, in tunnel_info_changes_are_valid() argument
627 if (tunnel->bw_alloc_supported != tunnel_reg_bw_alloc_supported(regs)) { in tunnel_info_changes_are_valid()
628 tun_dbg(tunnel, in tunnel_info_changes_are_valid()
630 str_yes_no(tunnel->bw_alloc_supported), in tunnel_info_changes_are_valid()
636 if (tunnel->group->drv_group_id != new_drv_group_id) { in tunnel_info_changes_are_valid()
637 tun_dbg(tunnel, in tunnel_info_changes_are_valid()
639 tunnel_group_drv_id(tunnel->group->drv_group_id), in tunnel_info_changes_are_valid()
640 tunnel_group_id(tunnel->group->drv_group_id), in tunnel_info_changes_are_valid()
647 if (!tunnel->bw_alloc_supported) in tunnel_info_changes_are_valid()
650 if (tunnel->bw_granularity != tunnel_reg_bw_granularity(regs)) { in tunnel_info_changes_are_valid()
651 tun_dbg(tunnel, in tunnel_info_changes_are_valid()
653 DPTUN_BW_ARG(tunnel->bw_granularity), in tunnel_info_changes_are_valid()
665 tunnel_allocated_bw(tunnel) != in tunnel_info_changes_are_valid()
666 tunnel_reg(regs, DP_ALLOCATED_BW) * tunnel->bw_granularity) { in tunnel_info_changes_are_valid()
667 tun_dbg(tunnel, in tunnel_info_changes_are_valid()
669 DPTUN_BW_ARG(tunnel->allocated_bw), in tunnel_info_changes_are_valid()
670 DPTUN_BW_ARG(tunnel_reg(regs, DP_ALLOCATED_BW) * tunnel->bw_granularity)); in tunnel_info_changes_are_valid()
679 read_and_verify_tunnel_regs(struct drm_dp_tunnel *tunnel, in read_and_verify_tunnel_regs() argument
685 err = read_tunnel_regs(tunnel->aux, regs); in read_and_verify_tunnel_regs()
687 drm_dp_tunnel_set_io_error(tunnel); in read_and_verify_tunnel_regs()
692 if (!tunnel_regs_are_valid(tunnel->group->mgr, regs, flags)) in read_and_verify_tunnel_regs()
695 if (!tunnel_info_changes_are_valid(tunnel, regs, flags)) in read_and_verify_tunnel_regs()
701 static bool update_dprx_caps(struct drm_dp_tunnel *tunnel, const struct drm_dp_tunnel_regs *regs) in update_dprx_caps() argument
705 if (tunnel_reg_max_dprx_rate(regs) != tunnel->max_dprx_rate) { in update_dprx_caps()
706 tunnel->max_dprx_rate = tunnel_reg_max_dprx_rate(regs); in update_dprx_caps()
710 if (tunnel_reg_max_dprx_lane_count(regs) != tunnel->max_dprx_lane_count) { in update_dprx_caps()
711 tunnel->max_dprx_lane_count = tunnel_reg_max_dprx_lane_count(regs); in update_dprx_caps()
726 static int get_max_dprx_bw(const struct drm_dp_tunnel *tunnel) in get_max_dprx_bw() argument
728 int max_dprx_bw = drm_dp_max_dprx_data_rate(tunnel->max_dprx_rate, in get_max_dprx_bw()
729 tunnel->max_dprx_lane_count); in get_max_dprx_bw()
736 return min(roundup(max_dprx_bw, tunnel->bw_granularity), in get_max_dprx_bw()
737 MAX_DP_REQUEST_BW * tunnel->bw_granularity); in get_max_dprx_bw()
740 static int get_max_tunnel_bw(const struct drm_dp_tunnel *tunnel) in get_max_tunnel_bw() argument
742 return min(get_max_dprx_bw(tunnel), tunnel->group->available_bw); in get_max_tunnel_bw()
761 struct drm_dp_tunnel *tunnel; in drm_dp_tunnel_detect() local
776 tunnel = create_tunnel(mgr, aux, ®s); in drm_dp_tunnel_detect()
777 if (!tunnel) in drm_dp_tunnel_detect()
780 tun_dbg(tunnel, in drm_dp_tunnel_detect()
794 str_yes_no(tunnel->bw_alloc_supported), in drm_dp_tunnel_detect()
795 str_yes_no(tunnel->bw_alloc_enabled)); in drm_dp_tunnel_detect()
797 return tunnel; in drm_dp_tunnel_detect()
809 int drm_dp_tunnel_destroy(struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_destroy() argument
811 if (!tunnel) in drm_dp_tunnel_destroy()
814 if (drm_WARN_ON(tunnel->group->mgr->dev, tunnel->destroyed)) in drm_dp_tunnel_destroy()
817 tun_dbg(tunnel, "destroying\n"); in drm_dp_tunnel_destroy()
819 tunnel->destroyed = true; in drm_dp_tunnel_destroy()
820 destroy_tunnel(tunnel); in drm_dp_tunnel_destroy()
826 static int check_tunnel(const struct drm_dp_tunnel *tunnel) in check_tunnel() argument
828 if (tunnel->destroyed) in check_tunnel()
831 if (tunnel->has_io_error) in check_tunnel()
839 struct drm_dp_tunnel *tunnel; in group_allocated_bw() local
842 for_each_tunnel_in_group(group, tunnel) { in group_allocated_bw()
843 if (check_tunnel(tunnel) == 0 && in group_allocated_bw()
844 tunnel->bw_alloc_enabled) in group_allocated_bw()
845 group_allocated_bw += tunnel_allocated_bw(tunnel); in group_allocated_bw()
856 static int group_free_bw(const struct drm_dp_tunnel *tunnel) in group_free_bw() argument
858 return tunnel->estimated_bw - tunnel_allocated_bw(tunnel); in group_free_bw()
861 static int calc_group_available_bw(const struct drm_dp_tunnel *tunnel) in calc_group_available_bw() argument
863 return group_allocated_bw(tunnel->group) + in calc_group_available_bw()
864 group_free_bw(tunnel); in calc_group_available_bw()
867 static int update_group_available_bw(struct drm_dp_tunnel *tunnel, in update_group_available_bw() argument
874 tunnel->estimated_bw = tunnel_reg(regs, DP_ESTIMATED_BW) * tunnel->bw_granularity; in update_group_available_bw()
876 if (calc_group_available_bw(tunnel) == tunnel->group->available_bw) in update_group_available_bw()
879 for_each_tunnel_in_group(tunnel->group, tunnel_iter) { in update_group_available_bw()
882 if (tunnel_iter == tunnel) in update_group_available_bw()
898 group_available_bw = calc_group_available_bw(tunnel); in update_group_available_bw()
900 tun_dbg(tunnel, "Updated group available BW: %d->%d\n", in update_group_available_bw()
901 DPTUN_BW_ARG(tunnel->group->available_bw), in update_group_available_bw()
904 changed = tunnel->group->available_bw != group_available_bw; in update_group_available_bw()
906 tunnel->group->available_bw = group_available_bw; in update_group_available_bw()
911 static int set_bw_alloc_mode(struct drm_dp_tunnel *tunnel, bool enable) in set_bw_alloc_mode() argument
916 if (drm_dp_dpcd_readb(tunnel->aux, DP_DPTX_BW_ALLOCATION_MODE_CONTROL, &val) < 0) in set_bw_alloc_mode()
924 if (drm_dp_dpcd_writeb(tunnel->aux, DP_DPTX_BW_ALLOCATION_MODE_CONTROL, val) < 0) in set_bw_alloc_mode()
927 tunnel->bw_alloc_enabled = enable; in set_bw_alloc_mode()
932 drm_dp_tunnel_set_io_error(tunnel); in set_bw_alloc_mode()
945 int drm_dp_tunnel_enable_bw_alloc(struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_enable_bw_alloc() argument
950 err = check_tunnel(tunnel); in drm_dp_tunnel_enable_bw_alloc()
954 if (!tunnel->bw_alloc_supported) in drm_dp_tunnel_enable_bw_alloc()
957 if (!tunnel_group_id(tunnel->group->drv_group_id)) in drm_dp_tunnel_enable_bw_alloc()
960 err = set_bw_alloc_mode(tunnel, true); in drm_dp_tunnel_enable_bw_alloc()
971 err = read_and_verify_tunnel_regs(tunnel, ®s, in drm_dp_tunnel_enable_bw_alloc()
974 set_bw_alloc_mode(tunnel, false); in drm_dp_tunnel_enable_bw_alloc()
979 if (!tunnel->max_dprx_rate) in drm_dp_tunnel_enable_bw_alloc()
980 update_dprx_caps(tunnel, ®s); in drm_dp_tunnel_enable_bw_alloc()
982 if (tunnel->group->available_bw == -1) { in drm_dp_tunnel_enable_bw_alloc()
983 err = update_group_available_bw(tunnel, ®s); in drm_dp_tunnel_enable_bw_alloc()
988 tun_dbg_stat(tunnel, err, in drm_dp_tunnel_enable_bw_alloc()
990 tunnel->max_dprx_rate / 100, tunnel->max_dprx_lane_count, in drm_dp_tunnel_enable_bw_alloc()
991 DPTUN_BW_ARG(group_allocated_bw(tunnel->group)), in drm_dp_tunnel_enable_bw_alloc()
992 DPTUN_BW_ARG(tunnel->group->available_bw)); in drm_dp_tunnel_enable_bw_alloc()
1006 int drm_dp_tunnel_disable_bw_alloc(struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_disable_bw_alloc() argument
1010 err = check_tunnel(tunnel); in drm_dp_tunnel_disable_bw_alloc()
1014 tunnel->allocated_bw = -1; in drm_dp_tunnel_disable_bw_alloc()
1016 err = set_bw_alloc_mode(tunnel, false); in drm_dp_tunnel_disable_bw_alloc()
1018 tun_dbg_stat(tunnel, err, "Disabling BW alloc mode"); in drm_dp_tunnel_disable_bw_alloc()
1032 bool drm_dp_tunnel_bw_alloc_is_enabled(const struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_bw_alloc_is_enabled() argument
1034 return tunnel && tunnel->bw_alloc_enabled; in drm_dp_tunnel_bw_alloc_is_enabled()
1072 static int allocate_tunnel_bw(struct drm_dp_tunnel *tunnel, int bw) in allocate_tunnel_bw() argument
1074 struct drm_dp_tunnel_mgr *mgr = tunnel->group->mgr; in allocate_tunnel_bw()
1075 int request_bw = DIV_ROUND_UP(bw, tunnel->bw_granularity); in allocate_tunnel_bw()
1085 if (request_bw * tunnel->bw_granularity == tunnel->allocated_bw) in allocate_tunnel_bw()
1094 err = clear_bw_req_state(tunnel->aux); in allocate_tunnel_bw()
1098 if (drm_dp_dpcd_writeb(tunnel->aux, DP_REQUEST_BW, request_bw) < 0) { in allocate_tunnel_bw()
1109 err = bw_req_complete(tunnel->aux, &status_changed); in allocate_tunnel_bw()
1116 err = read_and_verify_tunnel_regs(tunnel, ®s, in allocate_tunnel_bw()
1135 tunnel->allocated_bw = request_bw * tunnel->bw_granularity; in allocate_tunnel_bw()
1138 tun_dbg_stat(tunnel, err, "Allocating %d/%d Mb/s for tunnel: Group alloc:%d/%d Mb/s", in allocate_tunnel_bw()
1139 DPTUN_BW_ARG(request_bw * tunnel->bw_granularity), in allocate_tunnel_bw()
1140 DPTUN_BW_ARG(get_max_tunnel_bw(tunnel)), in allocate_tunnel_bw()
1141 DPTUN_BW_ARG(group_allocated_bw(tunnel->group)), in allocate_tunnel_bw()
1142 DPTUN_BW_ARG(tunnel->group->available_bw)); in allocate_tunnel_bw()
1145 drm_dp_tunnel_set_io_error(tunnel); in allocate_tunnel_bw()
1160 int drm_dp_tunnel_alloc_bw(struct drm_dp_tunnel *tunnel, int bw) in drm_dp_tunnel_alloc_bw() argument
1164 err = check_tunnel(tunnel); in drm_dp_tunnel_alloc_bw()
1168 return allocate_tunnel_bw(tunnel, bw); in drm_dp_tunnel_alloc_bw()
1184 int drm_dp_tunnel_get_allocated_bw(struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_get_allocated_bw() argument
1186 return tunnel->allocated_bw; in drm_dp_tunnel_get_allocated_bw()
1194 static int check_and_clear_status_change(struct drm_dp_tunnel *tunnel) in check_and_clear_status_change() argument
1199 if (drm_dp_dpcd_readb(tunnel->aux, DP_TUNNELING_STATUS, &val) < 0) in check_and_clear_status_change()
1205 if (drm_dp_dpcd_writeb(tunnel->aux, DP_TUNNELING_STATUS, val) < 0) in check_and_clear_status_change()
1211 if (!drm_dp_tunnel_bw_alloc_is_enabled(tunnel)) in check_and_clear_status_change()
1218 if (drm_dp_dpcd_readb(tunnel->aux, DP_ESTIMATED_BW, &val) < 0) in check_and_clear_status_change()
1221 if (val * tunnel->bw_granularity != tunnel->estimated_bw) in check_and_clear_status_change()
1227 drm_dp_tunnel_set_io_error(tunnel); in check_and_clear_status_change()
1241 int drm_dp_tunnel_update_state(struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_update_state() argument
1247 ret = check_tunnel(tunnel); in drm_dp_tunnel_update_state()
1251 ret = check_and_clear_status_change(tunnel); in drm_dp_tunnel_update_state()
1258 ret = read_and_verify_tunnel_regs(tunnel, ®s, 0); in drm_dp_tunnel_update_state()
1262 if (update_dprx_caps(tunnel, ®s)) in drm_dp_tunnel_update_state()
1265 ret = update_group_available_bw(tunnel, ®s); in drm_dp_tunnel_update_state()
1270 tun_dbg_stat(tunnel, ret < 0 ? ret : 0, in drm_dp_tunnel_update_state()
1273 tunnel->max_dprx_rate / 100, tunnel->max_dprx_lane_count, in drm_dp_tunnel_update_state()
1274 DPTUN_BW_ARG(tunnel->allocated_bw), in drm_dp_tunnel_update_state()
1275 DPTUN_BW_ARG(get_max_tunnel_bw(tunnel)), in drm_dp_tunnel_update_state()
1276 DPTUN_BW_ARG(group_allocated_bw(tunnel->group)), in drm_dp_tunnel_update_state()
1277 DPTUN_BW_ARG(tunnel->group->available_bw)); in drm_dp_tunnel_update_state()
1327 int drm_dp_tunnel_max_dprx_rate(const struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_max_dprx_rate() argument
1329 return tunnel->max_dprx_rate; in drm_dp_tunnel_max_dprx_rate()
1344 int drm_dp_tunnel_max_dprx_lane_count(const struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_max_dprx_lane_count() argument
1346 return tunnel->max_dprx_lane_count; in drm_dp_tunnel_max_dprx_lane_count()
1364 int drm_dp_tunnel_available_bw(const struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_available_bw() argument
1366 return tunnel->group->available_bw; in drm_dp_tunnel_available_bw()
1372 const struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_atomic_get_group_state() argument
1376 &tunnel->group->base); in drm_dp_tunnel_atomic_get_group_state()
1381 struct drm_dp_tunnel *tunnel) in add_tunnel_state() argument
1385 tun_dbg_atomic(tunnel, in add_tunnel_state()
1387 tunnel, group_state); in add_tunnel_state()
1395 drm_dp_tunnel_ref_get(tunnel, &tunnel_state->tunnel_ref); in add_tunnel_state()
1405 tun_dbg_atomic(tunnel_state->tunnel_ref.tunnel, in free_tunnel_state()
1407 tunnel_state->tunnel_ref.tunnel); in free_tunnel_state()
1430 const struct drm_dp_tunnel *tunnel) in get_tunnel_state() argument
1435 if (tunnel_state->tunnel_ref.tunnel == tunnel) in get_tunnel_state()
1443 struct drm_dp_tunnel *tunnel) in get_or_add_tunnel_state() argument
1447 tunnel_state = get_tunnel_state(group_state, tunnel); in get_or_add_tunnel_state()
1451 return add_tunnel_state(group_state, tunnel); in get_or_add_tunnel_state()
1472 tunnel_state->tunnel_ref.tunnel); in tunnel_group_duplicate_state()
1516 struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_atomic_get_state() argument
1521 group_state = drm_dp_tunnel_atomic_get_group_state(state, tunnel); in drm_dp_tunnel_atomic_get_state()
1525 tunnel_state = get_or_add_tunnel_state(group_state, tunnel); in drm_dp_tunnel_atomic_get_state()
1544 const struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_atomic_get_old_state() argument
1550 if (to_group(old_group_state->base.obj) == tunnel->group) in drm_dp_tunnel_atomic_get_old_state()
1551 return get_tunnel_state(old_group_state, tunnel); in drm_dp_tunnel_atomic_get_old_state()
1568 const struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_atomic_get_new_state() argument
1574 if (to_group(new_group_state->base.obj) == tunnel->group) in drm_dp_tunnel_atomic_get_new_state()
1575 return get_tunnel_state(new_group_state, tunnel); in drm_dp_tunnel_atomic_get_new_state()
1616 tunnel_state->tunnel_ref.tunnel->name, in check_unique_stream_ids()
1702 struct drm_dp_tunnel *tunnel, in drm_dp_tunnel_atomic_set_stream_bw() argument
1709 if (drm_WARN_ON(tunnel->group->mgr->dev, in drm_dp_tunnel_atomic_set_stream_bw()
1713 tun_dbg(tunnel, in drm_dp_tunnel_atomic_set_stream_bw()
1717 new_group_state = drm_dp_tunnel_atomic_get_group_state(state, tunnel); in drm_dp_tunnel_atomic_set_stream_bw()
1722 tunnel_state = get_tunnel_state(new_group_state, tunnel); in drm_dp_tunnel_atomic_set_stream_bw()
1729 tunnel_state = get_or_add_tunnel_state(new_group_state, tunnel); in drm_dp_tunnel_atomic_set_stream_bw()
1779 const struct drm_dp_tunnel *tunnel, in drm_dp_tunnel_atomic_get_group_streams_in_state() argument
1785 group_state = drm_dp_tunnel_atomic_get_group_state(state, tunnel); in drm_dp_tunnel_atomic_get_group_streams_in_state()
1807 struct drm_dp_tunnel *tunnel = new_tunnel_state->tunnel_ref.tunnel; in drm_dp_tunnel_atomic_check_group_bw() local
1808 int max_dprx_bw = get_max_dprx_bw(tunnel); in drm_dp_tunnel_atomic_check_group_bw()
1811 tun_dbg(tunnel, in drm_dp_tunnel_atomic_check_group_bw()
1822 group_bw += min(roundup(tunnel_bw, tunnel->bw_granularity), in drm_dp_tunnel_atomic_check_group_bw()