| /drivers/net/ethernet/cisco/enic/ |
| A D | vnic_rq.c | 25 rq->bufs[i] = kzalloc(VNIC_RQ_BUF_BLK_SZ(count), GFP_KERNEL); in vnic_rq_alloc_bufs() 26 if (!rq->bufs[i]) in vnic_rq_alloc_bufs() 31 buf = rq->bufs[i]; in vnic_rq_alloc_bufs() 37 buf->next = rq->bufs[0]; in vnic_rq_alloc_bufs() 40 buf->next = rq->bufs[i + 1]; in vnic_rq_alloc_bufs() 48 rq->to_use = rq->to_clean = rq->bufs[0]; in vnic_rq_alloc_bufs() 63 if (rq->bufs[i]) { in vnic_rq_free() 64 kfree(rq->bufs[i]); in vnic_rq_free() 65 rq->bufs[i] = NULL; in vnic_rq_free() 121 &rq->bufs[fetch_index / VNIC_RQ_BUF_BLK_ENTRIES(count)] in vnic_rq_init_start() [all …]
|
| A D | vnic_wq.c | 25 wq->bufs[i] = kzalloc(VNIC_WQ_BUF_BLK_SZ(count), GFP_KERNEL); in vnic_wq_alloc_bufs() 26 if (!wq->bufs[i]) in vnic_wq_alloc_bufs() 31 buf = wq->bufs[i]; in vnic_wq_alloc_bufs() 37 buf->next = wq->bufs[0]; in vnic_wq_alloc_bufs() 41 buf->next = wq->bufs[i + 1]; in vnic_wq_alloc_bufs() 51 wq->to_use = wq->to_clean = wq->bufs[0]; in vnic_wq_alloc_bufs() 66 if (wq->bufs[i]) { in vnic_wq_free() 67 kfree(wq->bufs[i]); in vnic_wq_free() 68 wq->bufs[i] = NULL; in vnic_wq_free() 140 &wq->bufs[fetch_index / VNIC_WQ_BUF_BLK_ENTRIES(count)] in enic_wq_init_start() [all …]
|
| /drivers/gpu/drm/xe/tests/ |
| A D | xe_guc_buf_kunit.c | 112 struct xe_guc_buf *bufs; in test_granular() local 116 bufs = kunit_kcalloc(test, dwords, sizeof(*bufs), GFP_KERNEL); in test_granular() 117 KUNIT_EXPECT_NOT_NULL(test, bufs); in test_granular() 120 bufs[n] = xe_guc_buf_reserve(cache, 1); in test_granular() 126 xe_guc_buf_release(bufs[n]); in test_granular() 132 struct xe_guc_buf *bufs; in test_unique() local 136 bufs = kunit_kcalloc(test, dwords, sizeof(*bufs), GFP_KERNEL); in test_unique() 137 KUNIT_EXPECT_NOT_NULL(test, bufs); in test_unique() 140 bufs[n] = xe_guc_buf_reserve(cache, 1); in test_unique() 145 xe_guc_buf_cpu_ptr(bufs[m]), "n=%d, m=%d", n, m); in test_unique() [all …]
|
| /drivers/scsi/fnic/ |
| A D | vnic_rq.c | 22 rq->bufs[i] = kzalloc(VNIC_RQ_BUF_BLK_SZ, GFP_ATOMIC); in vnic_rq_alloc_bufs() 23 if (!rq->bufs[i]) { in vnic_rq_alloc_bufs() 30 buf = rq->bufs[i]; in vnic_rq_alloc_bufs() 36 buf->next = rq->bufs[0]; in vnic_rq_alloc_bufs() 39 buf->next = rq->bufs[i + 1]; in vnic_rq_alloc_bufs() 47 rq->to_use = rq->to_clean = rq->bufs[0]; in vnic_rq_alloc_bufs() 63 kfree(rq->bufs[i]); in vnic_rq_free() 64 rq->bufs[i] = NULL; in vnic_rq_free() 118 &rq->bufs[fetch_index / VNIC_RQ_BUF_BLK_ENTRIES] in vnic_rq_init() 174 &rq->bufs[fetch_index / VNIC_RQ_BUF_BLK_ENTRIES] in vnic_rq_clean()
|
| A D | vnic_wq.c | 42 wq->bufs[i] = kzalloc(VNIC_WQ_BUF_BLK_SZ, GFP_ATOMIC); in vnic_wq_alloc_bufs() 43 if (!wq->bufs[i]) { in vnic_wq_alloc_bufs() 50 buf = wq->bufs[i]; in vnic_wq_alloc_bufs() 56 buf->next = wq->bufs[0]; in vnic_wq_alloc_bufs() 59 buf->next = wq->bufs[i + 1]; in vnic_wq_alloc_bufs() 67 wq->to_use = wq->to_clean = wq->bufs[0]; in vnic_wq_alloc_bufs() 82 kfree(wq->bufs[i]); in vnic_wq_free() 83 wq->bufs[i] = NULL; in vnic_wq_free() 160 &wq->bufs[fetch_index / VNIC_WQ_BUF_BLK_ENTRIES] in vnic_wq_init_start() 227 wq->to_use = wq->to_clean = wq->bufs[0]; in vnic_wq_clean()
|
| /drivers/scsi/snic/ |
| A D | vnic_wq.c | 36 wq->bufs[i] = kzalloc(VNIC_WQ_BUF_BLK_SZ, GFP_ATOMIC); in vnic_wq_alloc_bufs() 37 if (!wq->bufs[i]) { in vnic_wq_alloc_bufs() 45 buf = wq->bufs[i]; in vnic_wq_alloc_bufs() 51 buf->next = wq->bufs[0]; in vnic_wq_alloc_bufs() 54 buf->next = wq->bufs[i + 1]; in vnic_wq_alloc_bufs() 62 wq->to_use = wq->to_clean = wq->bufs[0]; in vnic_wq_alloc_bufs() 77 kfree(wq->bufs[i]); in svnic_wq_free() 78 wq->bufs[i] = NULL; in svnic_wq_free() 159 &wq->bufs[fetch_index / VNIC_WQ_BUF_BLK_ENTRIES(count)] in vnic_wq_init_start() 216 wq->to_use = wq->to_clean = wq->bufs[0]; in svnic_wq_clean()
|
| /drivers/net/ethernet/marvell/prestera/ |
| A D | prestera_rxtx.c | 81 struct prestera_sdma_buf *bufs; member 86 struct prestera_sdma_buf *bufs; member 297 buf = &ring->bufs[buf_idx]; in prestera_sdma_rx_poll() 345 if (!ring->bufs) in prestera_sdma_rx_fini() 383 if (!ring->bufs) in prestera_sdma_rx_init() 388 tail = &ring->bufs[bnum - 1]; in prestera_sdma_rx_init() 389 head = &ring->bufs[0]; in prestera_sdma_rx_init() 533 if (!tx_ring->bufs) in prestera_sdma_tx_init() 536 tail = &tx_ring->bufs[bnum - 1]; in prestera_sdma_tx_init() 537 head = &tx_ring->bufs[0]; in prestera_sdma_tx_init() [all …]
|
| /drivers/scsi/arm/ |
| A D | arm_scsi.h | 35 int bufs = SCp->buffers_residual; in copy_SCp_to_sg() local 40 BUG_ON(bufs + 1 > max); in copy_SCp_to_sg() 44 if (bufs) { in copy_SCp_to_sg() 48 for_each_sg(sg_next(SCp->buffer), src_sg, bufs, i) in copy_SCp_to_sg() 53 return bufs + 1; in copy_SCp_to_sg()
|
| A D | powertec.c | 141 int bufs, map_dir, dma_dir; in powertecscsi_dma_setup() local 143 bufs = copy_SCp_to_sg(&info->sg[0], SCp, NR_SG); in powertecscsi_dma_setup() 153 dma_map_sg(dev, info->sg, bufs, map_dir); in powertecscsi_dma_setup() 156 set_dma_sg(dmach, info->sg, bufs); in powertecscsi_dma_setup()
|
| A D | cumana_2.c | 169 int bufs, map_dir, dma_dir, alatch_dir; in cumanascsi_2_dma_setup() local 171 bufs = copy_SCp_to_sg(&info->sg[0], SCp, NR_SG); in cumanascsi_2_dma_setup() 183 dma_map_sg(dev, info->sg, bufs, map_dir); in cumanascsi_2_dma_setup() 186 set_dma_sg(dmach, info->sg, bufs); in cumanascsi_2_dma_setup()
|
| /drivers/soc/fsl/qbman/ |
| A D | bman.c | 113 struct bm_buffer bufs[8]; member 151 struct bm_buffer bufs[8]; member 738 int bman_release(struct bman_pool *pool, const struct bm_buffer *bufs, u8 num) in bman_release() argument 772 bm_buffer_set64(r->bufs, bm_buffer_get64(bufs)); in bman_release() 773 bm_buffer_set_bpid(r->bufs, pool->bpid); in bman_release() 775 memcpy(&r->bufs[1], &bufs[1], i * sizeof(bufs[0])); in bman_release() 786 int bman_acquire(struct bman_pool *pool, struct bm_buffer *bufs, u8 num) in bman_acquire() argument 805 if (bufs) in bman_acquire() 806 memcpy(&bufs[0], &mcr->bufs[0], num * sizeof(bufs[0])); in bman_acquire()
|
| /drivers/net/ethernet/fungible/funeth/ |
| A D | funeth_rx.c | 55 c->bufs[c->prod_cnt & c->mask] = *buf; in cache_offer() 75 buf = &c->bufs[c->cons_cnt & c->mask]; in cache_get() 538 struct funeth_rxbuf *b = q->bufs; in fun_rxq_free_bufs() 551 struct funeth_rxbuf *b = q->bufs; in fun_rxq_alloc_bufs() 561 q->cur_buf = q->bufs; in fun_rxq_alloc_bufs() 570 c->bufs = kvzalloc_node(depth * sizeof(*c->bufs), GFP_KERNEL, node); in fun_rxq_init_cache() 571 return c->bufs ? 0 : -ENOMEM; in fun_rxq_init_cache() 583 kvfree(q->cache.bufs); in fun_rxq_free_cache() 584 q->cache.bufs = NULL; in fun_rxq_free_cache() 669 q->rq_dma_addr, q->bufs); in fun_rxq_create_sw() [all …]
|
| /drivers/net/ethernet/mellanox/mlx5/core/fpga/ |
| A D | conn.c | 122 conn->qp.rq.bufs[ix] = buf; in mlx5_fpga_conn_post_recv() 171 conn->qp.sq.bufs[ix] = buf; in mlx5_fpga_conn_post_send() 257 buf = conn->qp.rq.bufs[ix]; in mlx5_fpga_conn_rq_cqe() 258 conn->qp.rq.bufs[ix] = NULL; in mlx5_fpga_conn_rq_cqe() 300 buf = conn->qp.sq.bufs[ix]; in mlx5_fpga_conn_sq_cqe() 539 if (!conn->qp.rq.bufs) { in mlx5_fpga_conn_create_qp() 547 if (!conn->qp.sq.bufs) { in mlx5_fpga_conn_create_qp() 595 kvfree(conn->qp.sq.bufs); in mlx5_fpga_conn_create_qp() 597 kvfree(conn->qp.rq.bufs); in mlx5_fpga_conn_create_qp() 652 kvfree(conn->qp.sq.bufs); in mlx5_fpga_conn_destroy_qp() [all …]
|
| A D | conn.h | 74 struct mlx5_fpga_dma_buf **bufs; member 81 struct mlx5_fpga_dma_buf **bufs; member
|
| /drivers/md/ |
| A D | dm-verity-fec.c | 97 return &fio->bufs[i][j * v->fec->rsn]; in fec_buffer_rs_block() 320 if (fio->bufs[n]) in fec_alloc_bufs() 323 fio->bufs[n] = mempool_alloc(&v->fec->prealloc_pool, GFP_NOWAIT); in fec_alloc_bufs() 324 if (unlikely(!fio->bufs[n])) { in fec_alloc_bufs() 332 if (fio->bufs[n]) in fec_alloc_bufs() 335 fio->bufs[n] = mempool_alloc(&v->fec->extra_pool, GFP_NOWAIT); in fec_alloc_bufs() 337 if (unlikely(!fio->bufs[n])) in fec_alloc_bufs() 357 memset(fio->bufs[n], 0, v->fec->rsn << DM_VERITY_FEC_BUF_RS_BITS); in fec_init_bufs() 483 mempool_free(fio->bufs[n], &f->prealloc_pool); in verity_fec_finish_io() 486 mempool_free(fio->bufs[n], &f->extra_pool); in verity_fec_finish_io() [all …]
|
| /drivers/infiniband/hw/hns/ |
| A D | hns_roce_alloc.c | 132 int hns_roce_get_kmem_bufs(struct hns_roce_dev *hr_dev, dma_addr_t *bufs, in hns_roce_get_kmem_bufs() argument 149 bufs[total++] = hns_roce_buf_dma_addr(buf, offset); in hns_roce_get_kmem_bufs() 156 int hns_roce_get_umem_bufs(dma_addr_t *bufs, int buf_cnt, struct ib_umem *umem, in hns_roce_get_umem_bufs() argument 164 bufs[total++] = rdma_block_iter_dma_address(&biter); in hns_roce_get_umem_bufs()
|
| /drivers/staging/rtl8723bs/os_dep/ |
| A D | osdep_service.c | 191 cbuf->bufs[cbuf->write] = buf; in rtw_cbuf_push() 210 buf = cbuf->bufs[cbuf->read]; in rtw_cbuf_pop() 226 cbuf = rtw_malloc(struct_size(cbuf, bufs, size)); in rtw_cbuf_alloc()
|
| /drivers/gpu/drm/tegra/ |
| A D | submit.c | 262 struct drm_tegra_submit_buf *bufs; in submit_process_bufs() local 266 bufs = alloc_copy_user_array(u64_to_user_ptr(args->bufs_ptr), args->num_bufs, in submit_process_bufs() 267 sizeof(*bufs)); in submit_process_bufs() 268 if (IS_ERR(bufs)) { in submit_process_bufs() 270 return PTR_ERR(bufs); in submit_process_bufs() 281 struct drm_tegra_submit_buf *buf = &bufs[i]; in submit_process_bufs() 322 kvfree(bufs); in submit_process_bufs()
|
| /drivers/net/ethernet/atheros/alx/ |
| A D | main.c | 82 cur_buf = &rxq->bufs[cur]; in alx_refill_rx_ring() 127 cur_buf = &rxq->bufs[cur]; in alx_refill_rx_ring() 182 skb = txq->bufs[sw_read_idx].skb; in alx_clean_tx_irq() 241 rxb = &rxq->bufs[rxq->read_idx]; in alx_clean_rx_irq() 492 if (!txq->bufs) in alx_free_txring_buf() 511 if (!rxq->bufs) in alx_free_rxring_buf() 515 cur_buf = rxq->bufs + i; in alx_free_rxring_buf() 620 if (!txq->bufs) in alx_alloc_tx_ring() 634 if (!rxq->bufs) in alx_alloc_rx_ring() 697 kfree(alx->qnapi[i]->txq->bufs); in alx_free_rings() [all …]
|
| A D | alx.h | 63 struct alx_buffer *bufs; member 79 struct alx_buffer *bufs; member
|
| /drivers/bluetooth/ |
| A D | btintel_pcie.c | 156 if (!data->dbgc.bufs) in btintel_pcie_setup_dbgc() 182 buf = &data->dbgc.bufs[i]; in btintel_pcie_setup_dbgc() 243 buf = &txq->bufs[tfd_index]; in btintel_pcie_prepare_tx() 1714 kfree(txq->bufs); in btintel_pcie_free_txq_bufs() 1725 if (!txq->bufs) in btintel_pcie_setup_txq_bufs() 1736 kfree(txq->bufs); in btintel_pcie_setup_txq_bufs() 1744 buf = &txq->bufs[i]; in btintel_pcie_setup_txq_bufs() 1758 kfree(rxq->bufs); in btintel_pcie_free_rxq_bufs() 1769 if (!rxq->bufs) in btintel_pcie_setup_rxq_bufs() 1780 kfree(rxq->bufs); in btintel_pcie_setup_rxq_bufs() [all …]
|
| /drivers/net/ethernet/intel/ice/ |
| A D | ice_ddp.c | 1316 if (!bufs || !count) { in ice_dwnld_cfg_bufs_no_lock() 1321 bufs += start; in ice_dwnld_cfg_bufs_no_lock() 1323 for (int i = 0; i < count; i++, bufs++) { in ice_dwnld_cfg_bufs_no_lock() 1324 bh = (struct ice_buf_hdr *)bufs; in ice_dwnld_cfg_bufs_no_lock() 1425 struct ice_buf_table *bufs; in ice_download_pkg_config_seg() local 1433 bufs = ice_find_buf_table(seg); in ice_download_pkg_config_seg() 1434 buf_count = le32_to_cpu(bufs->buf_count); in ice_download_pkg_config_seg() 1594 if (!bufs || !count) in ice_dwnld_cfg_bufs() 1601 bh = (struct ice_buf_hdr *)bufs; in ice_dwnld_cfg_bufs() 1612 ice_dwnld_cfg_bufs_no_lock(&ctx, bufs, 0, count); in ice_dwnld_cfg_bufs() [all …]
|
| /drivers/net/ethernet/pensando/ionic/ |
| A D | ionic_txrx.c | 187 buf_info = &desc_info->bufs[0]; in ionic_rx_build_skb() 239 buf_info = &desc_info->bufs[0]; in ionic_rx_copybreak() 314 buf_info = desc_info->bufs; in ionic_xdp_post_frame() 813 buf_info = &desc_info->bufs[0]; in ionic_rx_fill() 891 for (j = 0; j < ARRAY_SIZE(desc_info->bufs); j++) in ionic_rx_empty() 892 ionic_rx_put_buf(q, &desc_info->bufs[j]); in ionic_rx_empty() 1111 struct ionic_buf_info *buf_info = desc_info->bufs; in ionic_tx_map_skb() 1148 dma_unmap_single(dev, desc_info->bufs[0].dma_addr, in ionic_tx_map_skb() 1149 desc_info->bufs[0].len, DMA_TO_DEVICE); in ionic_tx_map_skb() 1156 struct ionic_buf_info *buf_info = desc_info->bufs; in ionic_tx_desc_unmap_bufs() [all …]
|
| /drivers/net/ethernet/mscc/ |
| A D | ocelot_fdma.h | 94 struct ocelot_fdma_tx_buf bufs[OCELOT_FDMA_TX_RING_SIZE]; member 127 struct ocelot_fdma_rx_buf bufs[OCELOT_FDMA_RX_RING_SIZE]; member
|
| /drivers/net/ethernet/mellanox/mlxsw/ |
| A D | spectrum_buffers.c | 340 hdroom->bufs.buf[i].lossy = true; in mlxsw_sp_hdroom_bufs_reset_lossiness() 344 hdroom->bufs.buf[hdroom->prios.prio[prio].buf_idx].lossy = false; in mlxsw_sp_hdroom_bufs_reset_lossiness() 420 struct mlxsw_sp_hdroom_buf *buf = &hdroom->bufs.buf[i]; in mlxsw_sp_hdroom_bufs_reset_sizes() 461 dirty = memcmp(&mlxsw_sp_port->hdroom->bufs, &hdroom->bufs, sizeof(hdroom->bufs)); in mlxsw_sp_hdroom_configure_buffers() 467 const struct mlxsw_sp_hdroom_buf *buf = &hdroom->bufs.buf[i]; in mlxsw_sp_hdroom_configure_buffers() 480 mlxsw_sp_port->hdroom->bufs = hdroom->bufs; in mlxsw_sp_hdroom_configure_buffers() 535 taken_headroom_cells += hdroom->bufs.buf[i].size_cells; in mlxsw_sp_hdroom_bufs_fit() 560 if (hdroom->bufs.buf[i].size_cells) in __mlxsw_sp_hdroom_configure() 561 tmp_hdroom.bufs.buf[i] = hdroom->bufs.buf[i]; in __mlxsw_sp_hdroom_configure() 621 hdroom.bufs.buf[9].size_cells = mlxsw_sp_bytes_cells(mlxsw_sp, size9); in mlxsw_sp_port_headroom_init()
|