Home
last modified time | relevance | path

Searched refs:queue_count (Results 1 – 25 of 53) sorted by relevance

123

/drivers/mailbox/
A Dti-msgmgr.c74 u8 queue_count; member
650 if (qinst->queue_id > d->queue_count) { in ti_msgmgr_queue_setup()
652 idx, qinst->queue_id, d->queue_count); in ti_msgmgr_queue_setup()
774 .queue_count = 64,
789 .queue_count = 190,
819 int queue_count; in ti_msgmgr_probe() local
857 queue_count = desc->num_valid_queues; in ti_msgmgr_probe()
858 if (!queue_count || queue_count > desc->queue_count) { in ti_msgmgr_probe()
860 queue_count, desc->queue_count); in ti_msgmgr_probe()
863 inst->num_valid_queues = queue_count; in ti_msgmgr_probe()
[all …]
/drivers/s390/crypto/
A Dap_queue.c147 aq->queue_count = max_t(int, 0, aq->queue_count - 1); in ap_sm_recv()
149 aq->queue_count++; in ap_sm_recv()
150 if (aq->queue_count > 0) in ap_sm_recv()
177 aq->queue_count = 0; in ap_sm_recv()
208 if (aq->queue_count > 0) { in ap_sm_read()
215 if (aq->queue_count > 0) in ap_sm_read()
266 aq->queue_count = max_t(int, 1, aq->queue_count + 1); in ap_sm_write()
267 if (aq->queue_count == 1) in ap_sm_write()
395 if (aq->queue_count > 0 && aq->reply) in ap_sm_setirq_wait()
410 if (aq->queue_count > 0) in ap_sm_setirq_wait()
[all …]
A Dap_bus.h199 int queue_count; /* # messages currently on AP queue. */ member
/drivers/nvme/target/
A Dloop.c224 BUG_ON(hctx_idx >= ctrl->ctrl.queue_count); in nvme_loop_init_hctx()
303 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_loop_destroy_io_queues()
308 ctrl->ctrl.queue_count = 1; in nvme_loop_destroy_io_queues()
340 ctrl->ctrl.queue_count++; in nvme_loop_init_io_queues()
354 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_loop_connect_io_queues()
376 ctrl->ctrl.queue_count = 1; in nvme_loop_configure_admin_queue()
420 if (ctrl->ctrl.queue_count > 1) { in nvme_loop_shutdown_ctrl()
483 ctrl->ctrl.queue_count - 1); in nvme_loop_reset_ctrl_work()
/drivers/net/ethernet/freescale/dpaa2/
A Ddpaa2-ethtool.c918 int queue_count = dpaa2_eth_queue_count(priv); in dpaa2_eth_get_channels() local
920 channels->max_rx = queue_count; in dpaa2_eth_get_channels()
921 channels->max_tx = queue_count; in dpaa2_eth_get_channels()
922 channels->rx_count = queue_count; in dpaa2_eth_get_channels()
923 channels->tx_count = queue_count; in dpaa2_eth_get_channels()
926 channels->max_other = queue_count + 1; in dpaa2_eth_get_channels()
931 channels->other_count = queue_count + 1; in dpaa2_eth_get_channels()
/drivers/gpu/drm/panthor/
A Dpanthor_sched.c578 u32 queue_count; member
914 for (i = 0; i < group->queue_count; i++) in group_release_work()
989 for (u32 i = 0; i < group->queue_count; i++) in group_bind_locked()
1025 for (u32 i = 0; i < group->queue_count; i++) in group_unbind_locked()
1206 for (i = 0; i < group->queue_count; i++) { in csg_slot_sync_queues_state_locked()
1276 for (i = 0; i < group->queue_count; i++) { in csg_slot_sync_state_locked()
1308 for (i = 0; i < group->queue_count; i++) { in csg_slot_prog_locked()
2062 for (i = 0; i < group->queue_count; i++) { in group_term_post_processing()
2614 for (u32 i = 0; i < group->queue_count; i++) in panthor_group_stop()
3407 for (i = 0; i < group->queue_count; i++) { in add_group_kbo_sizes()
[all …]
/drivers/nvme/host/
A Dfc.c2274 for (i = 1; i < ctrl->ctrl.queue_count; i++) in nvme_fc_free_io_queues()
2327 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_fc_connect_io_queues()
2347 for (i = 1; i < ctrl->ctrl.queue_count; i++) in nvme_fc_init_io_queues()
2448 if (ctrl->ctrl.queue_count > 1) { in __nvme_fc_abort_outstanding_ios()
2449 for (q = 1; q < ctrl->ctrl.queue_count; q++) in __nvme_fc_abort_outstanding_ios()
2466 if (ctrl->ctrl.queue_count > 1) { in __nvme_fc_abort_outstanding_ios()
2877 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_fc_create_io_queues()
2938 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_fc_recreate_io_queues()
2940 if (ctrl->ctrl.queue_count == 1) in nvme_fc_recreate_io_queues()
3131 if (ctrl->ctrl.queue_count > 1) { in nvme_fc_create_association()
[all …]
A Drdma.c325 BUG_ON(hctx_idx >= ctrl->ctrl.queue_count); in nvme_rdma_init_hctx()
664 for (i = 1; i < ctrl->ctrl.queue_count; i++) in nvme_rdma_free_io_queues()
672 for (i = 1; i < ctrl->ctrl.queue_count; i++) in nvme_rdma_stop_io_queues()
733 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_rdma_alloc_io_queues()
738 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_rdma_alloc_io_queues()
902 ctrl->ctrl.queue_count - 1); in nvme_rdma_configure_io_queues()
947 if (ctrl->ctrl.queue_count > 1) { in nvme_rdma_teardown_io_queues()
1057 if (ctrl->ctrl.queue_count > 1) { in nvme_rdma_setup_ctrl()
1083 if (ctrl->ctrl.queue_count > 1) { in nvme_rdma_setup_ctrl()
2286 ctrl->ctrl.queue_count = opts->nr_io_queues + opts->nr_write_queues + in nvme_rdma_alloc_ctrl()
[all …]
A Dtcp.c2030 for (i = 1; i < ctrl->queue_count; i++) in nvme_tcp_free_io_queues()
2038 for (i = 1; i < ctrl->queue_count; i++) in nvme_tcp_stop_io_queues()
2040 for (i = 1; i < ctrl->queue_count; i++) in nvme_tcp_stop_io_queues()
2122 for (i = 1; i < ctrl->queue_count; i++) { in __nvme_tcp_alloc_io_queues()
2154 ctrl->queue_count = nr_io_queues + 1; in nvme_tcp_alloc_io_queues()
2185 nr_queues = min(ctrl->tagset->nr_hw_queues + 1, ctrl->queue_count); in nvme_tcp_configure_io_queues()
2204 ctrl->queue_count - 1); in nvme_tcp_configure_io_queues()
2301 if (ctrl->queue_count <= 1) in nvme_tcp_teardown_io_queues()
2401 if (ctrl->queue_count > 1) { in nvme_tcp_setup_ctrl()
2426 if (ctrl->queue_count > 1) { in nvme_tcp_setup_ctrl()
[all …]
A Dpci.c1746 for (i = dev->ctrl.queue_count - 1; i >= lowest; i--) { in nvme_free_queues()
1747 dev->ctrl.queue_count--; in nvme_free_queues()
1773 for (i = dev->ctrl.queue_count - 1; i > 0; i--) in nvme_suspend_io_queues()
1787 for (i = dev->ctrl.queue_count - 1; i > 0; i--) { in nvme_reap_pending_cqes()
1849 if (dev->ctrl.queue_count > qid) in nvme_alloc_queue()
1869 dev->ctrl.queue_count++; in nvme_alloc_queue()
2125 for (i = dev->ctrl.queue_count; i <= dev->max_qid; i++) { in nvme_create_io_queues()
2132 max = min(dev->max_qid, dev->ctrl.queue_count - 1); in nvme_create_io_queues()
3004 if (!dead && dev->ctrl.queue_count > 0) { in nvme_dev_disable()
/drivers/gpu/drm/amd/amdkfd/
A Dkfd_packet_manager.c51 unsigned int process_count, queue_count, compute_queue_count, gws_queue_count; in pm_calc_rlib_size() local
58 queue_count = pm->dqm->active_queue_count; in pm_calc_rlib_size()
87 queue_count * map_queue_size; in pm_calc_rlib_size()
A Dkfd_packet_manager_v9.c56 packet->bitfields14.num_queues = (qpd->is_debug) ? 0 : qpd->queue_count; in pm_map_process_v9()
116 packet->bitfields14.num_queues = (qpd->is_debug) ? 0 : qpd->queue_count; in pm_map_process_aldebaran()
/drivers/scsi/hisi_sas/
A Dhisi_sas_v1_hw.c657 (u32)((1ULL << hisi_hba->queue_count) - 1)); in init_reg_v1_hw()
699 for (i = 0; i < hisi_hba->queue_count; i++) { in init_reg_v1_hw()
1650 for (i = 0; i < hisi_hba->queue_count; i++, idx++) { in interrupt_init_v1_hw()
1664 idx = (hisi_hba->n_phy * HISI_SAS_PHY_INT_NR) + hisi_hba->queue_count; in interrupt_init_v1_hw()
1679 hisi_hba->cq_nvecs = hisi_hba->queue_count; in interrupt_init_v1_hw()
1746 if (hisi_hba->queue_count < 0 || hisi_hba->queue_count > 32) { in check_fw_info_v1_hw()
A Dhisi_sas_v3_hw.c659 for (i = 0; i < hisi_hba->queue_count; i++) in interrupt_enable_v3_hw()
683 (u32)((1ULL << hisi_hba->queue_count) - 1)); in init_reg_v3_hw()
758 for (i = 0; i < hisi_hba->queue_count; i++) { in init_reg_v3_hw()
2724 for (i = 0; i < hisi_hba->queue_count; i++) in interrupt_disable_v3_hw()
3449 if (hisi_hba->queue_count < 0 || hisi_hba->queue_count > 16) { in check_fw_info_v3_hw()
3515 for (i = 0; i < hisi_hba->queue_count; i++) in debugfs_snapshot_cq_reg_v3_hw()
3527 for (i = 0; i < hisi_hba->queue_count; i++) { in debugfs_snapshot_dq_reg_v3_hw()
3942 for (c = 0; c < hisi_hba->queue_count; c++) { in debugfs_create_files_v3_hw()
3952 for (d = 0; d < hisi_hba->queue_count; d++) { in debugfs_create_files_v3_hw()
4654 for (i = 0; i < hisi_hba->queue_count; i++) { in debugfs_release_v3_hw()
[all …]
A Dhisi_sas_main.c700 int queue = i % hisi_hba->queue_count; in hisi_sas_alloc_dev()
733 if (cq->id < hisi_hba->queue_count - hisi_hba->iopoll_q_cnt) in hisi_sas_queue_is_poll()
750 for (i = 0; i < hisi_hba->queue_count; i++) { in hisi_sas_sync_poll_cqs()
763 for (i = 0; i < hisi_hba->queue_count; i++) { in hisi_sas_sync_cqs()
2190 for (i = 0; i < hisi_hba->queue_count; i++) { in hisi_sas_init_mem()
2242 for (i = 0; i < hisi_hba->queue_count; i++) { in hisi_sas_alloc()
2468 &hisi_hba->queue_count)) { in hisi_sas_get_fw_info()
/drivers/net/ethernet/pensando/ionic/
A Dionic_debugfs.c70 (u32 *)&ionic->ident.lif.eth.config.queue_count[IONIC_QTYPE_TXQ]); in ionic_debugfs_add_sizes()
72 (u32 *)&ionic->ident.lif.eth.config.queue_count[IONIC_QTYPE_RXQ]); in ionic_debugfs_add_sizes()
/drivers/net/ethernet/intel/fm10k/
A Dfm10k_pf.c502 u16 glort, queue_count, vsi_count, pc_count; in fm10k_configure_dglort_map_pf() local
517 queue_count = BIT(dglort->rss_l + dglort->pc_l); in fm10k_configure_dglort_map_pf()
524 for (queue = 0; queue < queue_count; queue++, q_idx++) { in fm10k_configure_dglort_map_pf()
534 queue_count = BIT(dglort->queue_l + dglort->rss_l + dglort->vsi_l); in fm10k_configure_dglort_map_pf()
540 for (queue = 0; queue < queue_count; queue++) { in fm10k_configure_dglort_map_pf()
/drivers/infiniband/sw/rxe/
A Drxe_cq.c28 count = queue_count(cq->queue, QUEUE_TYPE_TO_CLIENT); in rxe_cq_chk_attr()
A Drxe_queue.c118 if (!queue_empty(q, q->type) && (num_elem < queue_count(q, type))) in resize_finish()
A Drxe_queue.h170 static inline u32 queue_count(const struct rxe_queue *q, in queue_count() function
/drivers/net/wireless/microchip/wilc1000/
A Dnetdev.c747 int queue_count; in wilc_mac_xmit() local
768 queue_count = wilc_wlan_txq_add_net_pkt(ndev, tx_data, in wilc_mac_xmit()
772 if (queue_count > FLOW_CONTROL_UPPER_THRESHOLD) { in wilc_mac_xmit()
/drivers/net/wireless/intel/iwlegacy/
A D4965-rs.c234 while (tl->queue_count && tl->time_stamp < oldest_time) { in il4965_rs_tl_rm_old_stats()
238 tl->queue_count--; in il4965_rs_tl_rm_old_stats()
272 if (!(tl->queue_count)) { in il4965_rs_tl_add_packet()
275 tl->queue_count = 1; in il4965_rs_tl_add_packet()
293 if ((idx + 1) > tl->queue_count) in il4965_rs_tl_add_packet()
294 tl->queue_count = idx + 1; in il4965_rs_tl_add_packet()
317 if (!(tl->queue_count)) in il4965_rs_tl_get_load()
/drivers/net/wireless/intel/iwlwifi/dvm/
A Drs.h309 u8 queue_count; /* number of queues that has member
A Drs.c232 while (tl->queue_count && in rs_tl_rm_old_stats()
237 tl->queue_count--; in rs_tl_rm_old_stats()
271 if (!(tl->queue_count)) { in rs_tl_add_packet()
274 tl->queue_count = 1; in rs_tl_add_packet()
292 if ((index + 1) > tl->queue_count) in rs_tl_add_packet()
293 tl->queue_count = index + 1; in rs_tl_add_packet()
345 if (!(tl->queue_count)) in rs_tl_get_load()
/drivers/md/dm-vdo/
A Dvdo.h292 unsigned int queue_count, void *contexts[]);

Completed in 103 milliseconds

123