| /drivers/gpu/drm/amd/display/dc/resource/dcn30/ |
| A D | dcn30_resource.h | 88 struct dc_transfer_func **shaper); 94 struct dc_transfer_func **shaper);
|
| A D | dcn30_resource.c | 1433 struct dc_transfer_func **shaper) in dcn30_acquire_post_bldn_3dlut() argument 1439 ASSERT(*lut == NULL && *shaper == NULL); in dcn30_acquire_post_bldn_3dlut() 1441 *shaper = NULL; in dcn30_acquire_post_bldn_3dlut() 1446 *shaper = pool->mpc_shaper[i]; in dcn30_acquire_post_bldn_3dlut() 1468 struct dc_transfer_func **shaper) in dcn30_release_post_bldn_3dlut() argument 1474 if (pool->mpc_lut[i] == *lut && pool->mpc_shaper[i] == *shaper) { in dcn30_release_post_bldn_3dlut() 1478 *shaper = NULL; in dcn30_release_post_bldn_3dlut()
|
| /drivers/gpu/drm/amd/display/dc/inc/ |
| A D | core_types.h | 196 struct dc_transfer_func **shaper); 202 struct dc_transfer_func **shaper);
|
| /drivers/gpu/drm/amd/display/amdgpu_dm/ |
| A D | amdgpu_dm_color.c | 822 const struct drm_color_lut *shaper = NULL, *lut3d = NULL; in amdgpu_dm_verify_lut3d_size() local 828 shaper = __extract_blob_lut(dm_plane_state->shaper_lut, &size); in amdgpu_dm_verify_lut3d_size() 830 if (shaper && size != exp_size) { in amdgpu_dm_verify_lut3d_size()
|
| /drivers/net/ethernet/intel/iavf/ |
| A D | iavf_main.c | 5072 const struct net_shaper *shaper, in iavf_verify_shaper() argument 5078 if (shaper->handle.scope == NET_SHAPER_SCOPE_QUEUE) { in iavf_verify_shaper() 5079 vf_max = adapter->qos_caps->cap[0].shaper.peak; in iavf_verify_shaper() 5080 if (vf_max && shaper->bw_max > vf_max) { in iavf_verify_shaper() 5082 shaper->bw_max, shaper->handle.id, in iavf_verify_shaper() 5092 const struct net_shaper *shaper, in iavf_shaper_set() argument 5096 const struct net_shaper_handle *handle = &shaper->handle; in iavf_shaper_set() 5105 ret = iavf_verify_shaper(binding, shaper, extack); in iavf_shaper_set() 5111 tx_ring->q_shaper.bw_min = div_u64(shaper->bw_min, 1000); in iavf_shaper_set() 5112 tx_ring->q_shaper.bw_max = div_u64(shaper->bw_max, 1000); in iavf_shaper_set()
|
| A D | iavf_virtchnl.c | 1791 qs_bw_cfg->cfg[inx].shaper.peak = q_shaper->bw_max; in iavf_cfg_queues_bw() 1792 qs_bw_cfg->cfg[inx].shaper.committed = q_shaper->bw_min; in iavf_cfg_queues_bw()
|
| /drivers/gpu/drm/amd/display/dc/resource/dcn32/ |
| A D | dcn32_resource.c | 1600 struct dc_transfer_func **shaper) in dcn32_acquire_post_bldn_3dlut() argument 1604 ASSERT(*lut == NULL && *shaper == NULL); in dcn32_acquire_post_bldn_3dlut() 1606 *shaper = NULL; in dcn32_acquire_post_bldn_3dlut() 1610 *shaper = pool->mpc_shaper[mpcc_id]; in dcn32_acquire_post_bldn_3dlut() 1621 struct dc_transfer_func **shaper) in dcn32_release_post_bldn_3dlut() argument 1627 if (pool->mpc_lut[i] == *lut && pool->mpc_shaper[i] == *shaper) { in dcn32_release_post_bldn_3dlut() 1631 *shaper = NULL; in dcn32_release_post_bldn_3dlut()
|
| A D | dcn32_resource.h | 87 struct dc_transfer_func **shaper); 93 struct dc_transfer_func **shaper);
|
| /drivers/net/ethernet/intel/ice/ |
| A D | ice_virtchnl.c | 1113 cfg->shaper.committed = vsi_ctx->sched.bw_t_info[i].cir_bw.bw; in ice_vc_get_qos_caps() 1114 cfg->shaper.peak = vsi_ctx->sched.bw_t_info[i].eir_bw.bw; in ice_vc_get_qos_caps() 1866 if (qbw->cfg[i].shaper.peak != 0 && vf->max_tx_rate != 0 && in ice_vc_cfg_q_bw() 1867 qbw->cfg[i].shaper.peak > vf->max_tx_rate) { in ice_vc_cfg_q_bw() 1874 if (qbw->cfg[i].shaper.committed != 0 && vf->min_tx_rate != 0 && in ice_vc_cfg_q_bw() 1875 qbw->cfg[i].shaper.committed < vf->min_tx_rate) { in ice_vc_cfg_q_bw() 1898 vf->qs_bw[i].peak = qbw->cfg[i].shaper.peak; in ice_vc_cfg_q_bw() 1899 vf->qs_bw[i].committed = qbw->cfg[i].shaper.committed; in ice_vc_cfg_q_bw()
|
| /drivers/net/ethernet/microsoft/mana/ |
| A D | mana_en.c | 737 const struct net_shaper *shaper, in mana_shaper_set() argument 744 if (shaper->handle.scope != NET_SHAPER_SCOPE_NETDEV) { in mana_shaper_set() 749 if (apc->handle.id && shaper->handle.id != apc->handle.id) { in mana_shaper_set() 754 if (!shaper->bw_max || (shaper->bw_max % 100000000)) { in mana_shaper_set() 759 rate = div_u64(shaper->bw_max, 1000); /* Convert bps to Kbps */ in mana_shaper_set() 769 apc->handle = (err) ? apc->handle : shaper->handle; in mana_shaper_set()
|
| /drivers/gpu/drm/amd/display/dc/hwss/dcn401/ |
| A D | dcn401_hwseq.c | 441 if (mcm_luts.shaper && mcm_luts.lut3d_data.mpc_3dlut_enable) { in dcn401_populate_mcm_luts() 443 if (mcm_luts.shaper->type == TF_TYPE_HWPWL) in dcn401_populate_mcm_luts() 444 m_lut_params.pwl = &mcm_luts.shaper->pwl; in dcn401_populate_mcm_luts() 445 else if (mcm_luts.shaper->type == TF_TYPE_DISTRIBUTED_POINTS) { in dcn401_populate_mcm_luts() 448 mcm_luts.shaper, in dcn401_populate_mcm_luts()
|
| /drivers/gpu/drm/amd/display/dc/ |
| A D | dc_types.h | 1307 const struct dc_transfer_func *shaper; member
|
| A D | dc.h | 1853 struct dc_transfer_func **shaper);
|
| /drivers/net/ethernet/chelsio/cxgb4/ |
| A D | cxgb4_tc_mqprio.c | 28 } else if (mqprio->shaper != TC_MQPRIO_SHAPER_BW_RATE) { in cxgb4_mqprio_validate()
|
| /drivers/net/netdevsim/ |
| A D | netdev.c | 581 const struct net_shaper *shaper, in nsim_shaper_set() argument
|
| /drivers/net/ethernet/ti/ |
| A D | am65-cpsw-qos.c | 116 if (mqprio->shaper != TC_MQPRIO_SHAPER_BW_RATE) in am65_cpsw_mqprio_verify_shaper()
|
| /drivers/gpu/drm/amd/display/dc/core/ |
| A D | dc.c | 2436 struct dc_transfer_func **shaper) in dc_acquire_release_mpc_3dlut() argument 2460 ret = pool->funcs->acquire_post_bldn_3dlut(res_ctx, pool, mpcc_id, lut, shaper); in dc_acquire_release_mpc_3dlut() 2462 ret = pool->funcs->release_post_bldn_3dlut(res_ctx, pool, lut, shaper); in dc_acquire_release_mpc_3dlut() 3145 surface->mcm_luts.shaper = &surface->in_shaper_func; in copy_surface_update_to_plane()
|
| /drivers/net/dsa/microchip/ |
| A D | ksz_common.c | 4147 u8 shaper) in ksz_setup_tc_mode() argument 4151 FIELD_PREP(MTI_SHAPING_M, shaper)); in ksz_setup_tc_mode()
|
| /drivers/net/ethernet/marvell/ |
| A D | mvneta.c | 5281 if (mqprio->shaper != TC_MQPRIO_SHAPER_BW_RATE) { in mvneta_setup_mqprio()
|