Home
last modified time | relevance | path

Searched refs:hwq (Results 1 – 24 of 24) sorted by relevance

/linux/drivers/infiniband/hw/bnxt_re/
A Dqplib_res.h66 #define HWQ_CMP(idx, hwq) ((idx) & ((hwq)->max_elements - 1)) argument
68 #define HWQ_FREE_SLOTS(hwq) (hwq->max_elements - \ argument
69 ((HWQ_CMP(hwq->prod, hwq)\
70 - HWQ_CMP(hwq->cons, hwq))\
328 return (void *)(hwq->pbl_ptr[pg_num] + hwq->element_size * pg_idx); in bnxt_qplib_get_qe()
333 idx += hwq->prod; in bnxt_qplib_get_prod_qe()
335 idx -= hwq->depth; in bnxt_qplib_get_prod_qe()
375 hwq->prod = (hwq->prod + cnt) % hwq->depth; in bnxt_qplib_hwq_incr_prod()
381 hwq->cons = (hwq->cons + cnt) % hwq->depth; in bnxt_qplib_hwq_incr_cons()
389 key = info->hwq->cons & (info->hwq->max_elements - 1); in bnxt_qplib_ring_db32()
[all …]
A Dqplib_fp.c659 srq->dbinfo.hwq = &srq->hwq; in bnxt_qplib_create_srq()
901 sq->dbinfo.hwq = &sq->hwq; in bnxt_qplib_create_qp1()
906 rq->dbinfo.hwq = &rq->hwq; in bnxt_qplib_create_qp1()
939 hwq = &sq->hwq; in bnxt_qplib_init_psn_ptr()
1110 sq->dbinfo.hwq = &sq->hwq; in bnxt_qplib_create_qp()
1115 rq->dbinfo.hwq = &rq->hwq; in bnxt_qplib_create_qp()
1598 hwq = &qp->sq.hwq; in bnxt_qplib_put_inline()
1684 hwq = &sq->hwq; in bnxt_qplib_pull_psn_buff()
1719 hwq = &sq->hwq; in bnxt_qplib_post_send()
1733 hwq->prod, hwq->cons, hwq->depth, sq->q_full_delta); in bnxt_qplib_post_send()
[all …]
A Dqplib_res.c167 if (i == hwq->level) in bnxt_qplib_free_hwq()
168 __free_pbl(res, &hwq->pbl[i], hwq->is_user); in bnxt_qplib_free_hwq()
176 hwq->prod = 0; in bnxt_qplib_free_hwq()
177 hwq->cons = 0; in bnxt_qplib_free_hwq()
178 hwq->cp_bit = 0; in bnxt_qplib_free_hwq()
350 hwq->prod = 0; in bnxt_qplib_alloc_init_hwq()
351 hwq->cons = 0; in bnxt_qplib_alloc_init_hwq()
352 hwq->pdev = pdev; in bnxt_qplib_alloc_init_hwq()
358 lvl = hwq->level; in bnxt_qplib_alloc_init_hwq()
361 hwq->pbl_ptr = hwq->pbl[lvl].pg_arr; in bnxt_qplib_alloc_init_hwq()
[all …]
A Dqplib_rcfw.c92 struct bnxt_qplib_hwq *hwq = &cmdq->hwq; in __send_message() local
168 sw_prod = HWQ_CMP(hwq->prod, hwq); in __send_message()
180 hwq->prod++; in __send_message()
184 cmdq_prod = hwq->prod; in __send_message()
305 struct bnxt_qplib_hwq *hwq = &rcfw->cmdq.hwq; in bnxt_qplib_process_qp_event() local
382 struct bnxt_qplib_hwq *hwq = &creq->hwq; in bnxt_qplib_service_creq() local
389 raw_cons = hwq->cons; in bnxt_qplib_service_creq()
427 hwq->cons = raw_cons; in bnxt_qplib_service_creq()
442 hwq = &creq->hwq; in bnxt_qplib_creq_irq()
444 sw_cons = HWQ_CMP(hwq->cons, hwq); in bnxt_qplib_creq_irq()
[all …]
A Dqplib_fp.h101 struct bnxt_qplib_hwq hwq; member
249 struct bnxt_qplib_hwq hwq; member
358 struct bnxt_qplib_hwq *hwq; in bnxt_qplib_queue_full() local
361 hwq = &que->hwq; in bnxt_qplib_queue_full()
363 avail = hwq->cons - hwq->prod; in bnxt_qplib_queue_full()
364 if (hwq->cons <= hwq->prod) in bnxt_qplib_queue_full()
365 avail += hwq->depth; in bnxt_qplib_queue_full()
402 struct bnxt_qplib_hwq hwq; member
475 struct bnxt_qplib_hwq hwq; member
A Dqplib_sp.c596 if (mrw->hwq.max_elements) in bnxt_qplib_free_mrw()
597 bnxt_qplib_free_hwq(res, &mrw->hwq); in bnxt_qplib_free_mrw()
655 if (mrw->hwq.max_elements) { in bnxt_qplib_dereg_mrw()
658 bnxt_qplib_free_hwq(res, &mrw->hwq); in bnxt_qplib_dereg_mrw()
682 if (mr->hwq.max_elements) in bnxt_qplib_reg_mr()
683 bnxt_qplib_free_hwq(res, &mr->hwq); in bnxt_qplib_reg_mr()
705 if (mr->hwq.level == PBL_LVL_MAX) { in bnxt_qplib_reg_mr()
711 level = mr->hwq.level; in bnxt_qplib_reg_mr()
735 if (mr->hwq.max_elements) in bnxt_qplib_reg_mr()
736 bnxt_qplib_free_hwq(res, &mr->hwq); in bnxt_qplib_reg_mr()
[all …]
A Dqplib_rcfw.h150 struct bnxt_qplib_hwq hwq; member
170 struct bnxt_qplib_hwq hwq; member
A Dqplib_sp.h118 struct bnxt_qplib_hwq hwq; member
123 struct bnxt_qplib_hwq hwq; member
A Dmain.c1033 nq->hwq.max_elements = BNXT_QPLIB_NQE_MAX_CNT; in bnxt_re_alloc_res()
1041 rattr.dma_arr = nq->hwq.pbl[PBL_LVL_0].pg_map_arr; in bnxt_re_alloc_res()
1042 rattr.pages = nq->hwq.pbl[rdev->nq[i].hwq.level].pg_count; in bnxt_re_alloc_res()
1445 rattr.dma_arr = creq->hwq.pbl[PBL_LVL_0].pg_map_arr; in bnxt_re_dev_init()
1446 rattr.pages = creq->hwq.pbl[creq->hwq.level].pg_count; in bnxt_re_dev_init()
A Dib_verbs.c2458 wqe->frmr.pbl_ptr = (__le64 *)qplib_frpl->hwq.pbl_ptr[0]; in bnxt_re_build_reg_wqe()
2459 wqe->frmr.pbl_dma_ptr = qplib_frpl->hwq.pbl_dma_ptr[0]; in bnxt_re_build_reg_wqe()
2462 wqe->frmr.levels = qplib_frpl->hwq.level; in bnxt_re_build_reg_wqe()
2896 resp.tail = cq->qplib_cq.hwq.cons; in bnxt_re_create_cq()
3408 lib_qp->id, lib_qp->sq.hwq.prod, in send_phantom_wqe()
3409 HWQ_CMP(lib_qp->sq.hwq.prod, &lib_qp->sq.hwq), in send_phantom_wqe()
3596 mr->qplib_mr.hwq.level = PBL_LVL_MAX; in bnxt_re_get_dma_mr()
/linux/drivers/scsi/cxlflash/
A Dmain.c693 struct hwq *hwq; in stop_afu() local
732 struct hwq *hwq; in term_intr() local
778 struct hwq *hwq; in term_mc() local
1334 struct hwq *hwq = (struct hwq *)data; in cxlflash_sync_err_irq() local
1472 struct hwq *hwq = (struct hwq *)data; in cxlflash_rrq_irq() local
1540 struct hwq *hwq = (struct hwq *)data; in cxlflash_async_err_irq() local
1711 struct hwq *hwq; in init_pcr() local
1746 struct hwq *hwq; in init_global() local
1872 struct hwq *hwq; in start_afu() local
2097 struct hwq *hwq; in init_afu() local
[all …]
A Dcommon.h196 struct hwq { struct
231 struct hwq hwqs[CXLFLASH_MAX_HWQS]; argument
233 int (*context_reset)(struct hwq *hwq);
255 static inline struct hwq *get_hwq(struct afu *afu, u32 index) in get_hwq()
A Dsuperpipe.c267 struct hwq *hwq = get_hwq(afu, PRIMARY_HWQ); in afu_attach() local
291 val = hwq->ctx_hndl; in afu_attach()
298 val = SISL_RHT_CNT_ID((u64)MAX_RHT_PER_CONTEXT, (u64)(hwq->ctx_hndl)); in afu_attach()
1658 struct hwq *hwq = get_hwq(afu, PRIMARY_HWQ); in cxlflash_afu_recover() local
1735 reg = readq_be(&hwq->ctrl_map->mbox_r); in cxlflash_afu_recover()
/linux/include/scsi/
A Dscsi_tcq.h26 u16 hwq; in scsi_host_find_tag() local
31 hwq = blk_mq_unique_tag_to_hwq(tag); in scsi_host_find_tag()
32 if (hwq < shost->tag_set.nr_hw_queues) { in scsi_host_find_tag()
33 req = blk_mq_tag_to_rq(shost->tag_set.tags[hwq], in scsi_host_find_tag()
/linux/drivers/net/wireless/ti/wlcore/
A Dtx.c1201 int hwq = wlcore_tx_get_mac80211_queue(wlvif, queue); in wlcore_stop_queue_locked() local
1202 bool stopped = !!wl->queue_stop_reasons[hwq]; in wlcore_stop_queue_locked()
1205 WARN_ON_ONCE(test_and_set_bit(reason, &wl->queue_stop_reasons[hwq])); in wlcore_stop_queue_locked()
1210 ieee80211_stop_queue(wl->hw, hwq); in wlcore_stop_queue_locked()
1227 int hwq = wlcore_tx_get_mac80211_queue(wlvif, queue); in wlcore_wake_queue() local
1234 if (wl->queue_stop_reasons[hwq]) in wlcore_wake_queue()
1237 ieee80211_wake_queue(wl->hw, hwq); in wlcore_wake_queue()
1304 int hwq = wlcore_tx_get_mac80211_queue(wlvif, queue); in wlcore_is_queue_stopped_by_reason_locked() local
1307 return test_bit(reason, &wl->queue_stop_reasons[hwq]); in wlcore_is_queue_stopped_by_reason_locked()
1313 int hwq = wlcore_tx_get_mac80211_queue(wlvif, queue); in wlcore_is_queue_stopped_locked() local
[all …]
/linux/drivers/net/wireless/mediatek/mt76/
A Dtx.c394 struct mt76_queue *hwq = phy->q_tx[MT_TXQ_PSD]; in mt76_release_buffered_frames() local
397 spin_lock_bh(&hwq->lock); in mt76_release_buffered_frames()
421 dev->queue_ops->kick(dev, hwq); in mt76_release_buffered_frames()
426 spin_unlock_bh(&hwq->lock); in mt76_release_buffered_frames()
616 struct mt76_queue *hwq; in mt76_stop_tx_queues() local
622 hwq = phy->q_tx[mt76_txq_get_qid(txq)]; in mt76_stop_tx_queues()
625 spin_lock_bh(&hwq->lock); in mt76_stop_tx_queues()
627 spin_unlock_bh(&hwq->lock); in mt76_stop_tx_queues()
A Dmac80211.c1479 struct mt76_queue *hwq; in mt76_init_queue() local
1482 hwq = devm_kzalloc(dev->dev, sizeof(*hwq), GFP_KERNEL); in mt76_init_queue()
1483 if (!hwq) in mt76_init_queue()
1486 err = dev->queue_ops->alloc(dev, hwq, idx, n_desc, 0, ring_base); in mt76_init_queue()
1490 return hwq; in mt76_init_queue()
/linux/drivers/scsi/
A Dvirtio_scsi.c548 u16 hwq = blk_mq_unique_tag_to_hwq(tag); in virtscsi_pick_vq_mq() local
550 return &vscsi->req_vqs[hwq]; in virtscsi_pick_vq_mq()
722 static void virtscsi_commit_rqs(struct Scsi_Host *shost, u16 hwq) in virtscsi_commit_rqs() argument
726 virtscsi_kick_vq(&vscsi->req_vqs[hwq]); in virtscsi_commit_rqs()
A Dscsi_debug.c4738 u16 hwq; in get_queue() local
4741 hwq = blk_mq_unique_tag_to_hwq(tag); in get_queue()
4743 pr_debug("tag=%#x, hwq=%d\n", tag, hwq); in get_queue()
4744 if (WARN_ON_ONCE(hwq >= submit_queues)) in get_queue()
4745 hwq = 0; in get_queue()
4747 return sdebug_q_arr + hwq; in get_queue()
/linux/drivers/net/wireless/intel/iwlegacy/
A Dcommon.h2245 il_set_swq_id(struct il_tx_queue *txq, u8 ac, u8 hwq) in il_set_swq_id() argument
2248 BUG_ON(hwq > 31); /* only use 5 bits */ in il_set_swq_id()
2250 txq->swq_id = (hwq << 2) | ac; in il_set_swq_id()
2271 u8 hwq = (queue >> 2) & 0x1f; in il_wake_queue() local
2273 if (test_and_clear_bit(hwq, il->queue_stopped)) in il_wake_queue()
2282 u8 hwq = (queue >> 2) & 0x1f; in il_stop_queue() local
2284 if (!test_and_set_bit(hwq, il->queue_stopped)) in il_stop_queue()
/linux/drivers/block/
A Dnbd.c730 u16 hwq; in nbd_handle_reply() local
736 hwq = blk_mq_unique_tag_to_hwq(tag); in nbd_handle_reply()
737 if (hwq < nbd->tag_set.nr_hw_queues) in nbd_handle_reply()
738 req = blk_mq_tag_to_rq(nbd->tag_set.tags[hwq], in nbd_handle_reply()
/linux/drivers/scsi/ibmvscsi/
A Dibmvfc.h760 u16 hwq; member
A Dibmvfc.c1562 evt->hwq = 0; in ibmvfc_init_event()
1931 u16 hwq = blk_mq_unique_tag_to_hwq(tag_and_hwq); in ibmvfc_queuecommand() local
1944 scsi_channel = hwq % vhost->scsi_scrqs.active_queues; in ibmvfc_queuecommand()
1946 evt->hwq = hwq % vhost->scsi_scrqs.active_queues; in ibmvfc_queuecommand()
/linux/drivers/scsi/qla2xxx/
A Dqla_os.c849 uint16_t hwq; in qla2xxx_queuecommand() local
853 hwq = blk_mq_unique_tag_to_hwq(tag); in qla2xxx_queuecommand()
854 qpair = ha->queue_pair_map[hwq]; in qla2xxx_queuecommand()

Completed in 107 milliseconds