| /drivers/infiniband/core/ |
| A D | uverbs_std_types_wq.c | 58 ret = uverbs_get_const(&wq_init_attr.wq_type, attrs, in UVERBS_HANDLER() 63 if (wq_init_attr.wq_type != IB_WQT_RQ) in UVERBS_HANDLER() 81 wq->wq_type = wq_init_attr.wq_type; in UVERBS_HANDLER()
|
| A D | uverbs_cmd.c | 2948 wq_init_attr.wq_type = cmd.wq_type; in ib_uverbs_ex_create_wq() 2962 wq->wq_type = wq_init_attr.wq_type; in ib_uverbs_ex_create_wq()
|
| A D | verbs.c | 2554 wq->wq_type = wq_attr->wq_type; in ib_create_wq()
|
| /drivers/net/ethernet/mellanox/mlx5/core/en/ |
| A D | txrx.h | 392 if (rq->wq_type == MLX5_WQ_TYPE_LINKED_LIST_STRIDING_RQ) { in mlx5e_rqwq_reset() 418 switch (rq->wq_type) { in mlx5e_rqwq_get_size() 428 switch (rq->wq_type) { in mlx5e_rqwq_get_cur_sz() 438 switch (rq->wq_type) { in mlx5e_rqwq_get_head() 448 switch (rq->wq_type) { in mlx5e_rqwq_get_wqe_counter()
|
| A D | params.c | 793 static u8 mlx5e_get_rqwq_log_stride(u8 wq_type, int ndsegs) in mlx5e_get_rqwq_log_stride() argument 797 switch (wq_type) { in mlx5e_get_rqwq_log_stride() 946 MLX5_SET(wq, wq, wq_type, params->rq_wq_type); in mlx5e_build_rq_param() 966 MLX5_SET(wq, wq, wq_type, MLX5_WQ_TYPE_CYCLIC); in mlx5e_build_drop_rq_param()
|
| A D | trap.c | 45 rq->wq_type = params->rq_wq_type; in mlx5e_init_trap_rq()
|
| A D | ptp.c | 708 rq->wq_type = params->rq_wq_type; in mlx5e_init_ptp_rq()
|
| /drivers/net/ethernet/mellanox/mlx5/core/en/xsk/ |
| A D | setup.c | 70 rq->wq_type = params->rq_wq_type; in mlx5e_init_xsk_rq()
|
| /drivers/net/ethernet/mellanox/mlx5/core/ |
| A D | en.h | 168 static inline u16 mlx5_min_rx_wqes(int wq_type, u32 wq_size) in mlx5_min_rx_wqes() argument 170 switch (wq_type) { in mlx5_min_rx_wqes() 734 u8 wq_type; member
|
| A D | wc.c | 178 MLX5_SET(wq, wq, wq_type, MLX5_WQ_TYPE_CYCLIC); in create_wc_sq()
|
| A D | en_main.c | 728 rq->wq_type = params->rq_wq_type; in mlx5e_init_rxq_rq() 912 switch (rq->wq_type) { in mlx5e_alloc_rq() 1030 if (rq->wq_type == MLX5_WQ_TYPE_LINKED_LIST_STRIDING_RQ) { in mlx5e_alloc_rq() 1069 switch (rq->wq_type) { in mlx5e_alloc_rq() 1096 switch (rq->wq_type) { in mlx5e_free_rq() 1274 u16 min_wqes = mlx5_min_rx_wqes(rq->wq_type, mlx5e_rqwq_get_size(rq)); in mlx5e_wait_for_min_rx_wqes() 1297 if (rq->wq_type != MLX5_WQ_TYPE_LINKED_LIST_STRIDING_RQ) in mlx5e_free_rx_missing_descs() 1331 if (rq->wq_type == MLX5_WQ_TYPE_LINKED_LIST_STRIDING_RQ) { in mlx5e_free_rx_descs() 1784 MLX5_SET(wq, wq, wq_type, MLX5_WQ_TYPE_CYCLIC); in mlx5e_create_sq()
|
| A D | en_rx.c | 100 if (rq->wq_type == MLX5_WQ_TYPE_LINKED_LIST_STRIDING_RQ) in mlx5e_read_enhanced_title_slot() 176 if (rq->wq_type == MLX5_WQ_TYPE_LINKED_LIST_STRIDING_RQ) in mlx5e_decompress_cqe() 2716 switch (rq->wq_type) { in mlx5e_rq_set_handlers()
|
| /drivers/net/ethernet/microsoft/mana/ |
| A D | mana_en.c | 1353 u32 wq_type, struct mana_obj_spec *wq_spec, in mana_create_wq_obj() argument 1365 req.wq_type = wq_type; in mana_create_wq_obj() 1406 void mana_destroy_wq_obj(struct mana_port_context *apc, u32 wq_type, in mana_destroy_wq_obj() argument 1416 req.wq_type = wq_type; in mana_destroy_wq_obj()
|
| /drivers/net/ethernet/mellanox/mlx5/core/lib/ |
| A D | aso.c | 205 MLX5_SET(wq, wq, wq_type, MLX5_WQ_TYPE_CYCLIC); in create_aso_sq()
|
| /drivers/infiniband/hw/mlx5/ |
| A D | qp.c | 1393 MLX5_SET(wq, wq, wq_type, MLX5_WQ_TYPE_CYCLIC); in create_raw_packet_qp_sq() 1479 MLX5_SET(wq, wq, wq_type, MLX5_WQ_TYPE_CYCLIC); in create_raw_packet_qp_rq() 5263 MLX5_SET(wq, wq, wq_type, in create_rq() 5483 switch (init_attr->wq_type) { in mlx5_ib_create_wq() 5497 init_attr->wq_type); in mlx5_ib_create_wq()
|
| /drivers/net/ethernet/mellanox/mlx5/core/steering/hws/ |
| A D | send.c | 766 MLX5_SET(wq, wq, wq_type, MLX5_WQ_TYPE_CYCLIC); in hws_send_ring_create_sq()
|
| /drivers/infiniband/hw/mlx4/ |
| A D | qp.c | 4157 if (init_attr->wq_type != IB_WQT_RQ) { in mlx4_ib_create_wq() 4158 pr_debug("unsupported wq type %d\n", init_attr->wq_type); in mlx4_ib_create_wq()
|