Home
last modified time | relevance | path

Searched refs:pfvf (Results 1 – 25 of 48) sorted by relevance

12

/drivers/net/ethernet/marvell/octeontx2/nic/
A Dotx2_common.c266 if (is_otx2_lbkvf(pfvf->pdev) || is_otx2_sdp_rep(pfvf->pdev)) in otx2_config_pause_frm()
623 dwrr_val = mtu_to_dwrr_weight(pfvf, pfvf->tx_max_pktlen); in otx2_txschq_config()
1010 if (pfvf->ptp && qidx < pfvf->hw.tx_queues) { in otx2_sq_init()
1200 pfvf->cq_op_addr = (__force u64 *)otx2_get_regaddr(pfvf, in otx2_config_nix_queues()
1204 pfvf->refill_wrk = devm_kcalloc(pfvf->dev, pfvf->qset.cq_cnt, in otx2_config_nix_queues()
1210 pfvf->refill_wrk[qidx].pf = pfvf; in otx2_config_nix_queues()
1223 pfvf->qset.xqe_size = pfvf->hw.xqe_size; in otx2_config_nix()
1363 devm_kfree(pfvf->dev, pfvf->qset.pool); in otx2_aura_pool_free()
1593 pfvf->hw_ops->aura_freeptr(pfvf, pool_id, bufptr); in otx2_sq_aura_pool_init()
1653 pfvf->hw_ops->aura_freeptr(pfvf, pool_id, in otx2_rq_aura_pool_init()
[all …]
A Dotx2_dcbnl.c13 u8 pfc_en = pfvf->pfc_en; in otx2_check_pfc_config()
18 dev_warn(pfvf->dev, in otx2_check_pfc_config()
33 pfc_en = pfvf->pfc_en; in otx2_pfc_txschq_config()
47 dev_err(pfvf->dev, in otx2_pfc_txschq_config()
98 pfvf->pfc_schq_list[lvl][prio] = pfvf->hw.txschq_list[lvl][0]; in otx2_pfc_txschq_alloc_one()
216 otx2_smq_flush(pfvf, pfvf->pfc_schq_list[NIX_TXSCH_LVL_SMQ][prio]); in otx2_pfc_txschq_update()
243 dev_err(pfvf->dev, in otx2_pfc_txschq_update()
306 if (pfvf->pfc_en) { in otx2_config_priority_flow_ctrl()
344 dev_warn(pfvf->dev, in otx2_update_bpid_in_rqctx()
394 dev_warn(pfvf->dev, in otx2_update_bpid_in_rqctx()
[all …]
A Dqos_sq.c23 if (!pfvf->qset.pool) in otx2_qos_aura_pool_free()
26 pool = &pfvf->qset.pool[pool_id]; in otx2_qos_aura_pool_free()
27 qmem_free(pfvf->dev, pool->stack); in otx2_qos_aura_pool_free()
37 struct otx2_hw *hw = &pfvf->hw; in otx2_qos_sq_aura_pool_init()
88 pfvf->hw_ops->aura_freeptr(pfvf, pool_id, bufptr); in otx2_qos_sq_aura_pool_init()
141 qmem_free(pfvf->dev, sq->sqe); in otx2_qos_sq_free_sqbs()
226 pfvf->hw.tc_tx_queues); in otx2_qos_get_qid()
246 mutex_lock(&pfvf->mbox.lock); in otx2_qos_enable_sq()
265 mutex_lock(&pfvf->mbox.lock); in otx2_qos_nix_npa_ndc_sync()
294 sq = &pfvf->qset.sq[sq_idx]; in otx2_qos_disable_sq()
[all …]
A Dqos.c28 struct otx2_hw *hw = &pfvf->hw; in otx2_qos_update_tx_netdev_queues()
38 netdev_err(pfvf->netdev, in otx2_qos_update_tx_netdev_queues()
100 mtu_to_dwrr_weight(pfvf, pfvf->tx_max_pktlen); in otx2_config_sched_shaping()
1037 netdev_dbg(pfvf->netdev, in otx2_qos_root_add()
1065 netdev_dbg(pfvf->netdev, in otx2_qos_root_add()
1232 netdev_dbg(pfvf->netdev, in otx2_qos_leaf_alloc_queue()
1325 netdev_err(pfvf->netdev, in otx2_qos_leaf_alloc_queue()
1371 netdev_dbg(pfvf->netdev, in otx2_qos_leaf_to_inner()
1502 int last = find_last_bit(pfvf->qos.qos_sq_bmap, pfvf->hw.tc_tx_queues); in otx2_qos_cur_leaf_nodes()
1593 otx2_reset_qdisc(pfvf->netdev, pfvf->hw.tx_queues + moved_qid); in otx2_qos_leaf_del()
[all …]
A Dotx2_devlink.c15 struct otx2_nic *pfvf = otx2_dl->pfvf; in otx2_dl_mcam_count_validate() local
18 if (!pfvf->flow_cfg) { in otx2_dl_mcam_count_validate()
39 struct otx2_nic *pfvf = otx2_dl->pfvf; in otx2_dl_mcam_count_set() local
54 struct otx2_nic *pfvf = otx2_dl->pfvf; in otx2_dl_mcam_count_get() local
73 struct otx2_nic *pfvf = otx2_dl->pfvf; in otx2_dl_ucast_flt_cnt_set() local
90 struct otx2_nic *pfvf = otx2_dl->pfvf; in otx2_dl_ucast_flt_cnt_get() local
92 ctx->val.vu8 = pfvf->flow_cfg ? pfvf->flow_cfg->ucast_flt_cnt : 0; in otx2_dl_ucast_flt_cnt_get()
102 struct otx2_nic *pfvf = otx2_dl->pfvf; in otx2_dl_ucast_flt_cnt_validate() local
149 struct otx2_nic *pfvf = otx2_dl->pfvf; in otx2_devlink_eswitch_mode_get() local
163 struct otx2_nic *pfvf = otx2_dl->pfvf; in otx2_devlink_eswitch_mode_set() local
[all …]
A Dotx2_ethtool.c141 if (!pfvf) in otx2_get_qset_stats()
192 otx2_get_dev_stats(pfvf); in otx2_get_ethtool_stats()
249 (pfvf->hw.rx_queues + otx2_get_total_tx_queues(pfvf)); in otx2_get_sset_count()
282 if (bitmap_weight(&pfvf->rq_bmap, pfvf->hw.rx_queues) > 1) { in otx2_set_channels()
301 if (pfvf->xdp_prog) in otx2_set_channels()
308 pfvf->hw.tx_queues, pfvf->hw.rx_queues); in otx2_set_channels()
319 if (is_otx2_lbkvf(pfvf->pdev) || is_otx2_sdp_rep(pfvf->pdev)) in otx2_get_pauseparam()
351 if (is_otx2_lbkvf(pfvf->pdev) || is_otx2_sdp_rep(pfvf->pdev)) in otx2_set_pauseparam()
958 if (is_otx2_lbkvf(pfvf->pdev) || is_otx2_sdp_rep(pfvf->pdev)) in otx2_get_link()
968 if (!pfvf->ptp) in otx2_get_ts_info()
[all …]
A Dcn10k.c31 pfvf->hw_ops = &otx2_hw_ops; in otx2_init_hw_ops()
35 pfvf->hw_ops = &cn10k_hw_ops; in otx2_init_hw_ops()
53 mutex_lock(&pfvf->mbox.lock); in cn10k_lmtst_init()
62 err = qmem_alloc(pfvf->dev, &pfvf->dync_lmt, pfvf->tot_lmt_lines, in cn10k_lmtst_init()
68 pfvf->hw.lmt_base = (u64 *)pfvf->dync_lmt->base; in cn10k_lmtst_init()
88 struct otx2_nic *pfvf = dev; in cn10k_sq_aq_init() local
100 aq->sq.smq_rr_weight = mtu_to_dwrr_weight(pfvf, pfvf->tx_max_pktlen); in cn10k_sq_aq_init()
122 struct otx2_nic *pfvf = dev; in cn10k_refill_pool_ptrs() local
156 struct otx2_nic *pfvf = dev; in cn10k_sqe_flush() local
183 if (is_dev_otx2(pfvf->pdev)) in cn10k_free_all_ipolicers()
[all …]
A Dotx2_txrx.c219 pfvf->hw_ops->aura_freeptr(pfvf, qidx, iova & ~0x07ULL); in otx2_skb_add_frag()
260 pfvf->hw_ops->aura_freeptr(pfvf, qidx, in otx2_free_rcv_seg()
354 if (otx2_xdp_rcv_pkt_handler(pfvf, pfvf->xdp_prog, cqe, cq, in otx2_rcv_pkt_handler()
500 otx2_snd_pkt_handler(pfvf, cq, &pfvf->qset.sq[qidx], in otx2_tx_napi_handler()
595 filled_cnt = pfvf->hw_ops->refill_pool_ptrs(pfvf, rx_cq); in otx2_napi_handler()
1009 pfvf->hw_ops->sqe_flush(pfvf, sq, offset, qidx); in otx2_sq_append_tso()
1271 pfvf->hw_ops->sqe_flush(pfvf, sq, offset, qidx); in otx2_sq_append_skb()
1310 otx2_free_bufs(pfvf, pool, iova, pfvf->rbsize); in otx2_cleanup_rx_cqes()
1483 pfvf->hw_ops->sqe_flush(pfvf, sq, offset, qidx); in otx2_xdp_sq_append_pkt()
1558 otx2_dma_unmap_page(pfvf, iova, pfvf->rbsize, in otx2_xdp_rcv_pkt_handler()
[all …]
A Dotx2_common.h173 struct otx2_nic *pfvf; member
624 struct otx2_hw *hw = &pfvf->hw; in otx2_setup_dev_hw_settings()
632 if (is_96xx_A0(pfvf->pdev)) { in otx2_setup_dev_hw_settings()
636 pfvf->hw.cq_qcount_wait = 0x0; in otx2_setup_dev_hw_settings()
641 pfvf->hw.rq_skid = 600; in otx2_setup_dev_hw_settings()
644 if (is_96xx_B0(pfvf->pdev)) in otx2_setup_dev_hw_settings()
808 struct otx2_nic *pfvf = dev; in cn10k_aura_freeptr() local
830 struct otx2_nic *pfvf = dev; in otx2_aura_freeptr() local
964 smq = pfvf->qos.qid_to_sqmap[qidx - pfvf->hw.non_qos_queues]; in otx2_get_smq_idx()
975 return pfvf->hw.non_qos_queues + pfvf->hw.tc_tx_queues; in otx2_get_total_tx_queues()
[all …]
A Dotx2_flows.c49 mutex_lock(&pfvf->mbox.lock); in otx2_free_ntuple_mcam_entries()
62 mutex_unlock(&pfvf->mbox.lock); in otx2_free_ntuple_mcam_entries()
83 netdev_err(pfvf->netdev, in otx2_alloc_mcam_entries()
89 mutex_lock(&pfvf->mbox.lock); in otx2_alloc_mcam_entries()
152 netdev_info(pfvf->netdev, in otx2_alloc_mcam_entries()
203 netdev_info(pfvf->netdev, in otx2_mcam_entry_init()
270 pfvf->flow_cfg = devm_kzalloc(pfvf->dev, in otx2vf_mcam_flow_init()
273 if (!pfvf->flow_cfg) in otx2vf_mcam_flow_init()
276 pfvf->flow_cfg->dmacflt_bmap = devm_kcalloc(pfvf->dev, in otx2vf_mcam_flow_init()
282 flow_cfg = pfvf->flow_cfg; in otx2vf_mcam_flow_init()
[all …]
A Dcn10k_macsec.c142 struct mbox *mbox = &pfvf->mbox; in cn10k_mcs_alloc_rsrc()
202 struct mbox *mbox = &pfvf->mbox; in cn10k_mcs_free_rsrc()
261 struct mbox *mbox = &pfvf->mbox; in cn10k_mcs_write_rx_secy()
315 struct mbox *mbox = &pfvf->mbox; in cn10k_mcs_write_rx_flowid()
360 struct mbox *mbox = &pfvf->mbox; in cn10k_mcs_write_sc_cam()
428 struct mbox *mbox = &pfvf->mbox; in cn10k_mcs_write_rx_sa_plcy()
473 struct mbox *mbox = &pfvf->mbox; in cn10k_mcs_write_rx_sa_pn()
500 struct mbox *mbox = &pfvf->mbox; in cn10k_mcs_write_tx_secy()
1781 pfvf->macsec_cfg = cfg; in cn10k_mcs_init()
1813 kfree(pfvf->macsec_cfg); in cn10k_mcs_free()
[all …]
A Dotx2_xsk.c87 otx2_mbox_reset(&pfvf->mbox.mbox, 0); in otx2_xsk_ctx_disable()
93 struct otx2_qset *qset = &pfvf->qset; in otx2_clean_up_rq()
99 if (pfvf->flags & OTX2_FLAG_INTF_DOWN) in otx2_clean_up_rq()
104 otx2_cleanup_rx_cqes(pfvf, cq, qidx); in otx2_clean_up_rq()
106 pool = &pfvf->qset.pool[qidx]; in otx2_clean_up_rq()
107 iova = otx2_aura_allocptr(pfvf, qidx); in otx2_clean_up_rq()
110 otx2_free_bufs(pfvf, pool, iova, pfvf->rbsize); in otx2_clean_up_rq()
114 mutex_lock(&pfvf->mbox.lock); in otx2_clean_up_rq()
116 mutex_unlock(&pfvf->mbox.lock); in otx2_clean_up_rq()
207 sq = &pfvf->qset.sq[qidx]; in otx2_xsk_sq_append_pkt()
[all …]
A Dcn10k.h13 static inline int mtu_to_dwrr_weight(struct otx2_nic *pfvf, int mtu) in mtu_to_dwrr_weight() argument
20 weight = mtu / pfvf->hw.dwrr_mtu; in mtu_to_dwrr_weight()
21 if (mtu % pfvf->hw.dwrr_mtu) in mtu_to_dwrr_weight()
30 int cn10k_lmtst_init(struct otx2_nic *pfvf);
31 int cn10k_free_all_ipolicers(struct otx2_nic *pfvf);
32 int cn10k_alloc_matchall_ipolicer(struct otx2_nic *pfvf);
33 int cn10k_free_matchall_ipolicer(struct otx2_nic *pfvf);
34 int cn10k_set_matchall_ipolicer_rate(struct otx2_nic *pfvf,
38 int cn10k_alloc_leaf_profile(struct otx2_nic *pfvf, u16 *leaf);
41 int cn10k_free_leaf_profile(struct otx2_nic *pfvf, u16 leaf);
[all …]
A Dotx2_ptp.c58 mutex_lock(&pfvf->mbox.lock); in otx2_ptp_hw_adjtime()
67 mutex_unlock(&pfvf->mbox.lock); in otx2_ptp_hw_adjtime()
127 mutex_lock(&pfvf->mbox.lock); in otx2_ptp_hw_settime()
236 mutex_lock(&pfvf->mbox.lock); in otx2_ptp_tc_adjtime()
390 pfvf->ptp = NULL; in otx2_ptp_init()
417 ptp_ptr->nic = pfvf; in otx2_ptp_init()
480 pfvf->ptp = ptp_ptr; in otx2_ptp_init()
498 pfvf->ptp = NULL; in otx2_ptp_destroy()
504 if (!pfvf->ptp) in otx2_ptp_clock_index()
513 if (!pfvf->ptp) in otx2_ptp_tstamp2time()
[all …]
A Dotx2_ptp.h26 int otx2_ptp_init(struct otx2_nic *pfvf);
27 void otx2_ptp_destroy(struct otx2_nic *pfvf);
29 int otx2_ptp_clock_index(struct otx2_nic *pfvf);
30 int otx2_ptp_tstamp2time(struct otx2_nic *pfvf, u64 tstamp, u64 *tsns);
A Dotx2_pf.c462 pf = vf_mbox->pfvf; in otx2_pfvf_mbox_handler()
652 mbox->pfvf = pf; in otx2_pfvf_mbox_init()
861 pf = af_mbox->pfvf; in otx2_pfaf_mbox_handler()
1205 mbox->pfvf = pf; in otx2_pfaf_mbox_init()
1281 mutex_lock(&pfvf->mbox.lock); in otx2_reset_mac_stats()
1835 if (is_otx2_vf(pfvf->pcifunc)) in otx2_promisc_use_mce_list()
1896 struct otx2_nic *pfvf; in otx2_dim_work() local
1907 otx2_set_irq_coalesce(pfvf); in otx2_dim_work()
2389 mutex_lock(&pfvf->mbox.lock); in otx2_config_hw_rx_tstamp()
2421 mutex_lock(&pfvf->mbox.lock); in otx2_config_hw_tx_tstamp()
[all …]
A Dqos.h26 int otx2_qos_get_qid(struct otx2_nic *pfvf);
27 void otx2_qos_free_qid(struct otx2_nic *pfvf, int qidx);
28 int otx2_qos_enable_sq(struct otx2_nic *pfvf, int qidx);
29 void otx2_qos_disable_sq(struct otx2_nic *pfvf, int qidx);
A Dotx2_devlink.h13 struct otx2_nic *pfvf; member
17 int otx2_register_dl(struct otx2_nic *pfvf);
18 void otx2_unregister_dl(struct otx2_nic *pfvf);
/drivers/net/ethernet/marvell/octeontx2/af/
A Drvu_npa.c70 struct rvu_pfvf *pfvf; in rvu_npa_aq_enq_inst() local
75 if (!pfvf->aura_ctx || req->aura_id >= pfvf->aura_ctx->qsize) in rvu_npa_aq_enq_inst()
218 if (!pfvf->pool_ctx || !pfvf->aura_ctx) in npa_lf_hwctx_disable()
311 kfree(pfvf->aura_bmap); in npa_ctx_free()
312 pfvf->aura_bmap = NULL; in npa_ctx_free()
315 pfvf->aura_ctx = NULL; in npa_ctx_free()
321 pfvf->pool_ctx = NULL; in npa_ctx_free()
335 struct rvu_pfvf *pfvf; in rvu_mbox_handler_npa_lf_alloc() local
374 if (!pfvf->aura_bmap) in rvu_mbox_handler_npa_lf_alloc()
385 if (!pfvf->pool_bmap) in rvu_mbox_handler_npa_lf_alloc()
[all …]
A Drvu.c378 attach ? pfvf->sso++ : pfvf->sso--; in rvu_update_rsrc_map()
382 attach ? pfvf->ssow++ : pfvf->ssow--; in rvu_update_rsrc_map()
386 attach ? pfvf->timlfs++ : pfvf->timlfs--; in rvu_update_rsrc_map()
390 attach ? pfvf->cptlfs++ : pfvf->cptlfs--; in rvu_update_rsrc_map()
394 attach ? pfvf->cpt1_lfs++ : pfvf->cpt1_lfs--; in rvu_update_rsrc_map()
629 pfvf->msix_lfmap = devm_kcalloc(rvu->dev, pfvf->msix.max, in rvu_setup_msix_resources()
790 ether_addr_copy(pfvf->default_mac, pfvf->mac_addr); in rvu_setup_pfvf_macaddress()
806 ether_addr_copy(pfvf->default_mac, pfvf->mac_addr); in rvu_setup_pfvf_macaddress()
1259 return pfvf->sso; in rvu_get_rsrc_mapcount()
1261 return pfvf->ssow; in rvu_get_rsrc_mapcount()
[all …]
A Drvu_nix.c360 pfvf->tx_chan_base = pfvf->rx_chan_base; in nix_interface_init()
448 pfvf->tx_chan_base = pfvf->rx_chan_base; in nix_interface_init()
449 pfvf->tx_chan_cnt = pfvf->rx_chan_cnt; in nix_interface_init()
462 pfvf->rx_chan_base, pfvf->mac_addr); in nix_interface_init()
1071 if (!pfvf->rq_ctx || req->qidx >= pfvf->rq_ctx->qsize) in rvu_nix_blk_aq_enq_inst()
1075 if (!pfvf->sq_ctx || req->qidx >= pfvf->sq_ctx->qsize) in rvu_nix_blk_aq_enq_inst()
1079 if (!pfvf->cq_ctx || req->qidx >= pfvf->cq_ctx->qsize) in rvu_nix_blk_aq_enq_inst()
1384 if (!pfvf->cq_ctx || !pfvf->sq_ctx || !pfvf->rq_ctx) in nix_lf_hwctx_disable()
4585 pfvf->minlen && pfvf->minlen < minlen) in nix_find_link_frs()
4594 pfvf->minlen && pfvf->minlen < minlen) in nix_find_link_frs()
[all …]
A Drvu_switch.c13 struct rvu_pfvf *pfvf = rvu_get_pfvf(rvu, pcifunc); in rvu_switch_enable_lbk_link() local
16 nix_hw = get_nix_hw(rvu->hw, pfvf->nix_blkaddr); in rvu_switch_enable_lbk_link()
18 rvu_nix_tx_tl2_cfg(rvu, pfvf->nix_blkaddr, pcifunc, in rvu_switch_enable_lbk_link()
27 struct rvu_pfvf *pfvf; in rvu_switch_install_rx_rule() local
29 pfvf = rvu_get_pfvf(rvu, pcifunc); in rvu_switch_install_rx_rule()
34 if (!test_bit(NIXLF_INITIALIZED, &pfvf->flags)) in rvu_switch_install_rx_rule()
42 req.channel = pfvf->rx_chan_base; in rvu_switch_install_rx_rule()
44 req.intf = pfvf->nix_rx_intf; in rvu_switch_install_rx_rule()
55 struct rvu_pfvf *pfvf; in rvu_switch_install_tx_rule() local
58 pfvf = rvu_get_pfvf(rvu, pcifunc); in rvu_switch_install_tx_rule()
[all …]
A Drvu_sdp.c56 struct rvu_pfvf *pfvf; in rvu_sdp_init() local
61 pfvf = &rvu->pf[sdp_pf_num[0]]; in rvu_sdp_init()
62 pfvf->sdp_info = &rvu->fwdata->channel_data.info; in rvu_sdp_init()
72 pfvf = &rvu->pf[sdp_pf_num[i]]; in rvu_sdp_init()
74 pfvf->sdp_info = devm_kzalloc(rvu->dev, in rvu_sdp_init()
77 if (!pfvf->sdp_info) { in rvu_sdp_init()
97 struct rvu_pfvf *pfvf = rvu_get_pfvf(rvu, req->hdr.pcifunc); in rvu_mbox_handler_set_sdp_chan_info() local
99 memcpy(pfvf->sdp_info, &req->info, sizeof(struct sdp_node_info)); in rvu_mbox_handler_set_sdp_chan_info()
A Drvu_cn10k.c134 if (!pfvf->lmt_base_addr) in rvu_update_lmtaddr()
135 pfvf->lmt_base_addr = val; in rvu_update_lmtaddr()
223 if (!pfvf->lmt_map_ent_w1) in rvu_mbox_handler_lmtst_tbl_setup()
224 pfvf->lmt_map_ent_w1 = val; in rvu_mbox_handler_lmtst_tbl_setup()
263 if (pfvf->lmt_base_addr || pfvf->lmt_map_ent_w1) { in rvu_reset_lmt_map_tbl()
269 if (pfvf->lmt_base_addr) { in rvu_reset_lmt_map_tbl()
271 &pfvf->lmt_base_addr, in rvu_reset_lmt_map_tbl()
277 pfvf->lmt_base_addr = 0; in rvu_reset_lmt_map_tbl()
282 if (pfvf->lmt_map_ent_w1) { in rvu_reset_lmt_map_tbl()
285 &pfvf->lmt_map_ent_w1, in rvu_reset_lmt_map_tbl()
[all …]
A Drvu_rep.c45 ether_addr_copy(pfvf->mac_addr, event->evt_data.mac); in rvu_rep_up_notify()
239 struct rvu_pfvf *pfvf; in rvu_rep_install_rx_rule() local
242 pfvf = rvu_get_pfvf(rvu, pcifunc); in rvu_rep_install_rx_rule()
270 req.intf = pfvf->nix_rx_intf; in rvu_rep_install_rx_rule()
280 struct rvu_pfvf *pfvf; in rvu_rep_install_tx_rule() local
285 pfvf = rvu_get_pfvf(rvu, pcifunc); in rvu_rep_install_tx_rule()
294 lbkid = pfvf->nix_blkaddr == BLKADDR_NIX0 ? 0 : 1; in rvu_rep_install_tx_rule()
305 req.intf = pfvf->nix_tx_intf; in rvu_rep_install_tx_rule()
415 struct rvu_pfvf *pfvf; in rvu_rep_pf_init() local
417 pfvf = rvu_get_pfvf(rvu, pcifunc); in rvu_rep_pf_init()
[all …]

Completed in 97 milliseconds

12