Home
last modified time | relevance | path

Searched refs:sq_depth (Results 1 – 25 of 33) sorted by relevance

12

/drivers/net/ethernet/fungible/funcore/
A Dfun_queue.h46 u32 sq_depth; member
97 if (++tail == funq->sq_depth) in funq_sq_post_tail()
123 u32 sq_depth; member
133 u8 sqe_size_log2, u32 sq_depth, dma_addr_t dma_addr,
A Dfun_queue.c76 u8 sqe_size_log2, u32 sq_depth, dma_addr_t dma_addr, in fun_sq_create() argument
89 if (sq_depth > fdev->q_depth) in fun_sq_create()
94 wb_addr = dma_addr + (sq_depth << sqe_size_log2); in fun_sq_create()
101 sq_depth - 1, dma_addr, 0, in fun_sq_create()
356 funq->sq_cmds = fun_alloc_ring_mem(funq->fdev->dev, funq->sq_depth, in fun_alloc_sqes()
390 fun_free_ring_mem(dev, funq->sq_depth, 1 << funq->sqe_size_log2, in fun_free_queue()
442 funq->sq_depth = req->sq_depth; in fun_alloc_queue()
A Dfun_dev.c231 .sq_depth = areq->sq_depth, in fun_enable_admin_queue()
234 unsigned int ntags = areq->sq_depth - 1; in fun_enable_admin_queue()
241 if (areq->sq_depth < AQA_MIN_QUEUE_SIZE || in fun_enable_admin_queue()
242 areq->sq_depth > AQA_MAX_QUEUE_SIZE || in fun_enable_admin_queue()
269 writel((funq->sq_depth - 1) << AQA_ASQS_SHIFT | in fun_enable_admin_queue()
401 if (++funq->sq_tail == funq->sq_depth) in fun_submit_admin_cmd()
A Dfun_dev.h93 u16 sq_depth; member
/drivers/net/ethernet/fungible/funeth/
A Dfuneth.h77 unsigned int sq_depth; member
115 unsigned int sq_depth; member
A Dfuneth_main.c494 xdpqs = alloc_xdpqs(netdev, qset->nxdpqs, qset->sq_depth, in fun_alloc_rings()
503 err = alloc_txqs(netdev, txqs, qset->ntxqs, qset->sq_depth, in fun_alloc_rings()
843 .sq_depth = fp->sq_depth, in funeth_open()
1085 xdpqs = alloc_xdpqs(dev, nqs, fp->sq_depth, 0, FUN_QSTATE_INIT_FULL); in fun_enter_xdp()
1644 .sq_depth = fp->sq_depth, in fun_change_num_queues()
1781 fp->sq_depth = min(SQ_DEPTH, fdev->q_depth); in fun_create_netdev()
2002 .sq_depth = ADMIN_SQ_DEPTH, in funeth_probe()
A Dfuneth_ethtool.c567 ring->tx_pending = fp->sq_depth; in fun_get_ringparam()
593 if (fp->sq_depth == ring->tx_pending && in fun_set_ringparam()
601 .sq_depth = ring->tx_pending in fun_set_ringparam()
609 fp->sq_depth = ring->tx_pending; in fun_set_ringparam()
/drivers/net/ethernet/huawei/hinic/
A Dhinic_hw_dev.c270 static int set_hw_ioctxt(struct hinic_hwdev *hwdev, unsigned int sq_depth, in set_hw_ioctxt() argument
289 hw_ioctxt.sq_depth = ilog2(sq_depth); in set_hw_ioctxt()
438 int hinic_hwdev_ifup(struct hinic_hwdev *hwdev, u16 sq_depth, u16 rq_depth) in hinic_hwdev_ifup() argument
461 func_to_io->sq_depth = sq_depth; in hinic_hwdev_ifup()
489 err = set_hw_ioctxt(hwdev, sq_depth, rq_depth); in hinic_hwdev_ifup()
A Dhinic_hw_io.h73 u16 sq_depth; member
A Dhinic_dev.h99 u16 sq_depth; member
A Dhinic_hw_dev.h288 u16 sq_depth; member
627 int hinic_hwdev_ifup(struct hinic_hwdev *hwdev, u16 sq_depth, u16 rq_depth);
A Dhinic_ethtool.c558 ring->tx_pending = nic_dev->sq_depth; in hinic_get_ringparam()
599 if (new_sq_depth == nic_dev->sq_depth && in hinic_set_ringparam()
605 nic_dev->sq_depth, nic_dev->rq_depth, in hinic_set_ringparam()
608 nic_dev->sq_depth = new_sq_depth; in hinic_set_ringparam()
A Dhinic_hw_mbox.c1291 (hw_ctxt)->sq_depth >= HINIC_QUEUE_MIN_DEPTH && \
1292 (hw_ctxt)->sq_depth <= HINIC_QUEUE_MAX_DEPTH && \
1300 if (!hw_ctxt->rq_depth && !hw_ctxt->sq_depth && in hw_ctxt_qps_param_valid()
A Dhinic_main.c424 err = hinic_hwdev_ifup(nic_dev->hwdev, nic_dev->sq_depth, in hinic_open()
1214 nic_dev->sq_depth = HINIC_SQ_DEPTH; in nic_dev_init()
/drivers/crypto/hisilicon/
A Dqm.c271 #define QM_MK_SQC_DW3_V2(sqe_sz, sq_depth) \ argument
2089 sqc.w8 = cpu_to_le16(qp->sq_depth - 1); in qm_sq_ctx_cfg()
2203 u16 sq_depth = qp->sq_depth; in qp_stop_fail_cb() local
2204 u16 cur_head = (cur_tail + sq_depth - qp_used) % sq_depth; in qp_stop_fail_cb()
2210 pos = (i + cur_head) % sq_depth; in qp_stop_fail_cb()
2600 qp_info.sq_depth = qp->sq_depth; in hisi_qm_uacce_ioctl()
2751 u16 sq_depth, cq_depth; in qm_alloc_uacce() local
2908 u16 sq_depth, u16 cq_depth) in hisi_qp_memory_init() argument
2911 size_t off = qm->sqe_size * sq_depth; in hisi_qp_memory_init()
2931 qp->sq_depth = sq_depth; in hisi_qp_memory_init()
[all …]
A Ddebugfs.c322 u16 sq_depth = qm->qp_array->sq_depth; in qm_sq_dump() local
328 ret = q_dump_param_parse(qm, s, &sqe_id, &qp_id, sq_depth); in qm_sq_dump()
/drivers/net/ethernet/huawei/hinic3/
A Dhinic3_nic_dev.h46 u32 sq_depth; member
A Dhinic3_tx.c40 txq->q_depth = nic_dev->q_params.sq_depth; in hinic3_alloc_txqs()
41 txq->q_mask = nic_dev->q_params.sq_depth - 1; in hinic3_alloc_txqs()
A Dhinic3_main.c83 nic_dev->q_params.sq_depth = HINIC3_SQ_DEPTH; in hinic3_sw_init()
/drivers/infiniband/hw/irdma/
A Duser.h300 u32 *sq_depth, u8 *sq_shift);
378 u32 sq_depth; member
A Dverbs.c632 ret = irdma_uk_calc_depth_shift_sq(ukinfo, &ukinfo->sq_depth, in irdma_setup_umode_qp()
643 (ukinfo->sq_depth - IRDMA_SQ_RSVD) >> ukinfo->sq_shift; in irdma_setup_umode_qp()
646 ukinfo->sq_size = ukinfo->sq_depth >> ukinfo->sq_shift; in irdma_setup_umode_qp()
672 status = irdma_uk_calc_depth_shift_sq(ukinfo, &ukinfo->sq_depth, in irdma_setup_kmode_qp()
683 kcalloc(ukinfo->sq_depth, sizeof(*iwqp->kqp.sq_wrid_mem), GFP_KERNEL); in irdma_setup_kmode_qp()
699 size = (ukinfo->sq_depth + ukinfo->rq_depth) * IRDMA_QP_WQE_MIN_SIZE; in irdma_setup_kmode_qp()
715 ukinfo->rq = &ukinfo->sq[ukinfo->sq_depth]; in irdma_setup_kmode_qp()
716 info->rq_pa = info->sq_pa + (ukinfo->sq_depth * IRDMA_QP_WQE_MIN_SIZE); in irdma_setup_kmode_qp()
720 ukinfo->sq_size = ukinfo->sq_depth >> ukinfo->sq_shift; in irdma_setup_kmode_qp()
723 iwqp->max_send_wr = (ukinfo->sq_depth - IRDMA_SQ_RSVD) >> ukinfo->sq_shift; in irdma_setup_kmode_qp()
/drivers/crypto/hisilicon/zip/
A Dzip_crypto.c447 u16 q_depth = ctx->qp_ctx[0].qp->sq_depth; in hisi_zip_create_req_q()
499 u16 q_depth = ctx->qp_ctx[0].qp->sq_depth; in hisi_zip_create_sgl_pool()
/drivers/infiniband/hw/efa/
A Defa_com_cmd.h23 u32 sq_depth; member
/drivers/crypto/hisilicon/sec2/
A Dsec_crypto.c131 req_id = idr_alloc_cyclic(&qp_ctx->req_idr, NULL, 0, qp_ctx->qp->sq_depth, GFP_ATOMIC); in sec_alloc_req_id()
141 if (unlikely(req_id < 0 || req_id >= qp_ctx->qp->sq_depth)) { in sec_free_req_id()
214 if (atomic_read(&qp_ctx->qp->qp_status.used) == qp_ctx->qp->sq_depth - 1) in qp_send_message()
218 if (atomic_read(&qp_ctx->qp->qp_status.used) == qp_ctx->qp->sq_depth - 1) { in qp_send_message()
234 qp_ctx->send_head = (qp_ctx->send_head + 1) % qp_ctx->qp->sq_depth; in qp_send_message()
566 u16 q_depth = qp_ctx->qp->sq_depth; in sec_alloc_qp_ctx_resource()
/drivers/net/ethernet/amazon/ena/
A Dena_admin_defs.h269 u16 sq_depth; member

Completed in 78 milliseconds

12