| /drivers/net/ethernet/marvell/octeon_ep/ |
| A D | octep_ctrl_mbox.c | 135 u8 __iomem *qbuf; in octep_write_mbox_data() local 139 qbuf = (q->hw_q + *pi); in octep_write_mbox_data() 142 memcpy_toio(qbuf, buf, w_sz); in octep_write_mbox_data() 147 memcpy_toio(qbuf, buf, cp_sz); in octep_write_mbox_data() 153 qbuf = (q->hw_q + *pi); in octep_write_mbox_data() 154 memcpy_toio(qbuf, buf, w_sz); in octep_write_mbox_data() 200 u8 __iomem *qbuf; in octep_read_mbox_data() local 204 qbuf = (q->hw_q + *ci); in octep_read_mbox_data() 207 memcpy_fromio(buf, qbuf, r_sz); in octep_read_mbox_data() 212 memcpy_fromio(buf, qbuf, cp_sz); in octep_read_mbox_data() [all …]
|
| /drivers/infiniband/hw/erdma/ |
| A D | erdma_cmdq.c | 98 if (!sq->qbuf) in erdma_cmdq_sq_init() 118 sq->qbuf, sq->qbuf_dma_addr); in erdma_cmdq_sq_init() 131 if (!cq->qbuf) in erdma_cmdq_cq_init() 151 dma_free_coherent(&dev->pdev->dev, cq->depth << CQE_SHIFT, cq->qbuf, in erdma_cmdq_cq_init() 210 cmdq->cq.qbuf, cmdq->cq.qbuf_dma_addr); in erdma_cmdq_init() 216 cmdq->sq.qbuf, cmdq->sq.qbuf_dma_addr); in erdma_cmdq_init() 237 cmdq->sq.qbuf, cmdq->sq.qbuf_dma_addr); in erdma_cmdq_destroy() 242 cmdq->cq.qbuf, cmdq->cq.qbuf_dma_addr); in erdma_cmdq_destroy() 249 __be32 *cqe = get_queue_entry(cmdq->cq.qbuf, cmdq->cq.ci, in get_next_valid_cmdq_cqe() 267 wqe = get_queue_entry(cmdq->sq.qbuf, cmdq->sq.pi, cmdq->sq.depth, in push_cmdq_sqe() [all …]
|
| A D | erdma.h | 22 void *qbuf; member 41 void *qbuf; member 57 void *qbuf; member 220 static inline void *get_queue_entry(void *qbuf, u32 idx, u32 depth, u32 shift) in get_queue_entry() argument 224 return qbuf + (idx << shift); in get_queue_entry()
|
| A D | erdma_eq.c | 24 u64 *eqe = get_queue_entry(eq->qbuf, eq->ci, eq->depth, EQE_SHIFT); in get_next_valid_eqe() 87 eq->qbuf = dma_alloc_coherent(&dev->pdev->dev, buf_size, in erdma_eq_common_init() 89 if (!eq->qbuf) in erdma_eq_common_init() 105 dma_free_coherent(&dev->pdev->dev, buf_size, eq->qbuf, in erdma_eq_common_init() 114 dma_free_coherent(&dev->pdev->dev, eq->depth << EQE_SHIFT, eq->qbuf, in erdma_eq_destroy()
|
| A D | erdma_cq.c | 11 __be32 *cqe = get_queue_entry(cq->kern_cq.qbuf, cq->kern_cq.ci, in get_next_valid_cqe() 245 cqe = get_queue_entry(cq->kern_cq.qbuf, cur_cq_ci, cq->depth, in erdma_remove_cqes_of_qp() 251 dst_cqe = get_queue_entry(cq->kern_cq.qbuf, in erdma_remove_cqes_of_qp()
|
| A D | erdma_verbs.c | 1305 cq->kern_cq.qbuf, cq->kern_cq.qbuf_dma_addr); in erdma_destroy_cq() 1899 cq->kern_cq.qbuf = in erdma_init_kernel_cq() 1902 if (!cq->kern_cq.qbuf) in erdma_init_kernel_cq() 1918 cq->kern_cq.qbuf, cq->kern_cq.qbuf_dma_addr); in erdma_init_kernel_cq() 1986 cq->kern_cq.qbuf, cq->kern_cq.qbuf_dma_addr); in erdma_create_cq()
|
| A D | erdma_verbs.h | 320 void *qbuf; member
|
| /drivers/net/ethernet/intel/ice/ |
| A D | ice_lag.c | 431 qbuf->queue_info[count].tc = tc; in ice_lag_qbuf_recfg() 524 struct ice_aqc_cfg_txqs_buf *qbuf; in ice_lag_move_vf_node_tc() local 567 if (!qbuf) { in ice_lag_move_vf_node_tc() 586 kfree(qbuf); in ice_lag_move_vf_node_tc() 610 kfree(qbuf); in ice_lag_move_vf_node_tc() 957 if (!qbuf) { in ice_lag_reclaim_vf_tc() 977 kfree(qbuf); in ice_lag_reclaim_vf_tc() 999 kfree(qbuf); in ice_lag_reclaim_vf_tc() 2010 if (!qbuf) { in ice_lag_move_vf_nodes_tc_sync() 2029 kfree(qbuf); in ice_lag_move_vf_nodes_tc_sync() [all …]
|
| /drivers/net/wireless/ath/ath12k/ |
| A D | dp_rx.c | 620 dma_unmap_single(dp->ab->dev, rx_tid->qbuf.paddr_aligned, rx_tid->qbuf.size, in ath12k_dp_reo_cmd_free() 622 kfree(rx_tid->qbuf.vaddr); in ath12k_dp_reo_cmd_free() 623 rx_tid->qbuf.vaddr = NULL; in ath12k_dp_reo_cmd_free() 704 dma_unmap_single(ab->dev, rx_tid->qbuf.paddr_aligned, rx_tid->qbuf.size, in ath12k_dp_reo_cache_flush() 706 kfree(rx_tid->qbuf.vaddr); in ath12k_dp_reo_cache_flush() 707 rx_tid->qbuf.vaddr = NULL; in ath12k_dp_reo_cache_flush() 766 dma_unmap_single(ab->dev, rx_tid->qbuf.paddr_aligned, rx_tid->qbuf.size, in ath12k_dp_rx_tid_del_func() 768 kfree(rx_tid->qbuf.vaddr); in ath12k_dp_rx_tid_del_func() 769 rx_tid->qbuf.vaddr = NULL; in ath12k_dp_rx_tid_del_func() 850 kfree(rx_tid->qbuf.vaddr); in ath12k_dp_rx_peer_tid_delete() [all …]
|
| A D | dp_rx.h | 19 struct ath12k_reoq_buf qbuf; member
|
| /drivers/net/ethernet/chelsio/cxgb4vf/ |
| A D | t4vf_hw.c | 1300 u16 qbuf[3]; in t4vf_config_rss_range() local 1301 u16 *qbp = qbuf; in t4vf_config_rss_range() 1305 qbuf[0] = qbuf[1] = qbuf[2] = 0; in t4vf_config_rss_range() 1312 *qp++ = cpu_to_be32(FW_RSS_IND_TBL_CMD_IQ0_V(qbuf[0]) | in t4vf_config_rss_range() 1313 FW_RSS_IND_TBL_CMD_IQ1_V(qbuf[1]) | in t4vf_config_rss_range() 1314 FW_RSS_IND_TBL_CMD_IQ2_V(qbuf[2])); in t4vf_config_rss_range()
|
| /drivers/net/wireless/mediatek/mt76/ |
| A D | dma.c | 645 struct mt76_queue_buf qbuf = {}; in mt76_dma_rx_fill_buf() local 656 qbuf.addr = page_pool_get_dma_addr(virt_to_head_page(buf)) + in mt76_dma_rx_fill_buf() 659 qbuf.len = len - q->buf_offset; in mt76_dma_rx_fill_buf() 660 qbuf.skip_unmap = false; in mt76_dma_rx_fill_buf() 661 if (mt76_dma_add_rx_buf(dev, q, &qbuf, buf) < 0) { in mt76_dma_rx_fill_buf()
|