Searched refs:cqe64 (Results 1 – 6 of 6) sorted by relevance
| /linux/drivers/infiniband/hw/mlx5/ |
| A D | cq.c | 85 struct mlx5_cqe64 *cqe64; in get_sw_cqe() local 456 struct mlx5_cqe64 *cqe64; in mlx5_poll_one() local 479 opcode = get_cqe_opcode(cqe64); in mlx5_poll_one() 508 handle_good_req(wc, cqe64, wq, idx); in mlx5_poll_one() 518 handle_responder(wc, cqe64, *cur_qp); in mlx5_poll_one() 525 err_cqe = (struct mlx5_err_cqe *)cqe64; in mlx5_poll_one() 559 (struct mlx5_sig_err_cqe *)cqe64; in mlx5_poll_one() 878 struct mlx5_cqe64 *cqe64; in init_cq_frag_buf() local 883 cqe64->op_own = MLX5_CQE_INVALID << 4; in init_cq_frag_buf() 1082 struct mlx5_cqe64 *cqe64, *dest64; in __mlx5_ib_cq_clean() local [all …]
|
| /linux/drivers/net/ethernet/mellanox/mlx5/core/steering/hws/ |
| A D | mlx5hws_send.c | 433 struct mlx5_cqe64 *cqe64) in mlx5hws_parse_cqe() argument 435 if (unlikely(get_cqe_opcode(cqe64) != MLX5_CQE_REQ)) { in mlx5hws_parse_cqe() 436 struct mlx5_err_cqe *err_cqe = (struct mlx5_err_cqe *)cqe64; in mlx5hws_parse_cqe() 438 mlx5_core_err(cq->mdev, "Bad OP in HWS SQ CQE: 0x%x\n", get_cqe_opcode(cqe64)); in mlx5hws_parse_cqe() 452 struct mlx5_cqe64 *cqe64; in mlx5hws_cq_poll_one() local 455 cqe64 = mlx5_cqwq_get_cqe(&cq->wq); in mlx5hws_cq_poll_one() 456 if (!cqe64) { in mlx5hws_cq_poll_one() 467 err = mlx5hws_parse_cqe(cq, cqe64); in mlx5hws_cq_poll_one()
|
| /linux/drivers/net/ethernet/mellanox/mlx5/core/steering/ |
| A D | dr_send.c | 189 static int dr_parse_cqe(struct mlx5dr_cq *dr_cq, struct mlx5_cqe64 *cqe64) in dr_parse_cqe() argument 194 opcode = get_cqe_opcode(cqe64); in dr_parse_cqe() 196 idx = be16_to_cpu(cqe64->wqe_counter) & in dr_parse_cqe() 202 idx = be16_to_cpu(cqe64->wqe_counter) & in dr_parse_cqe() 214 struct mlx5_cqe64 *cqe64; in dr_cq_poll_one() local 217 cqe64 = mlx5_cqwq_get_cqe(&dr_cq->wq); in dr_cq_poll_one() 218 if (!cqe64) { in dr_cq_poll_one() 229 err = dr_parse_cqe(dr_cq, cqe64); in dr_cq_poll_one()
|
| /linux/drivers/vfio/pci/mlx5/ |
| A D | cmd.c | 1013 struct mlx5_cqe64 *cqe64; in init_cq_frag_buf() local 1019 cqe64 = buf->cqe_size == 64 ? cqe : cqe + 64; in init_cq_frag_buf() 1020 cqe64->op_own = MLX5_CQE_INVALID << 4; in init_cq_frag_buf() 1672 struct mlx5_cqe64 *cqe64; in get_sw_cqe() local 1674 cqe64 = (cq->mcq.cqe_sz == 64) ? cqe : cqe + 64; in get_sw_cqe() 1676 if (likely(get_cqe_opcode(cqe64) != MLX5_CQE_INVALID) && in get_sw_cqe() 1677 !((cqe64->op_own & MLX5_CQE_OWNER_MASK) ^ !!(n & (cq->ncqe)))) { in get_sw_cqe() 1678 return cqe64; in get_sw_cqe()
|
| /linux/drivers/vdpa/mlx5/net/ |
| A D | mlx5_vnet.c | 345 struct mlx5_cqe64 *cqe64; in cq_frag_buf_init() local 351 cqe64 = cqe; in cq_frag_buf_init() 352 cqe64->op_own = MLX5_CQE_INVALID << 4; in cq_frag_buf_init() 358 struct mlx5_cqe64 *cqe64 = get_cqe(cq, n & (cq->cqe - 1)); in get_sw_cqe() local 360 if (likely(get_cqe_opcode(cqe64) != MLX5_CQE_INVALID) && in get_sw_cqe() 361 !((cqe64->op_own & MLX5_CQE_OWNER_MASK) ^ !!(n & cq->cqe))) in get_sw_cqe() 362 return cqe64; in get_sw_cqe() 501 struct mlx5_cqe64 *cqe64; in mlx5_vdpa_poll_one() local 503 cqe64 = next_cqe_sw(vcq); in mlx5_vdpa_poll_one() 504 if (!cqe64) in mlx5_vdpa_poll_one()
|
| /linux/include/linux/mlx5/ |
| A D | device.h | 1083 struct mlx5_cqe64 cqe64; member
|
Completed in 34 milliseconds