Home
last modified time | relevance | path

Searched refs:hr_cq (Results 1 – 6 of 6) sorted by relevance

/linux-6.3-rc2/drivers/infiniband/hw/hns/
A Dhns_roce_cq.c120 hr_cq->cqn); in hns_roce_create_cqc()
124 hr_cq->cqn, ret); in hns_roce_create_cqc()
150 hr_cq->cqn, ret); in alloc_cqc()
154 ret = xa_err(xa_store(&cq_table->array, hr_cq->cqn, hr_cq, GFP_KERNEL)); in alloc_cqc()
184 hr_cq->cqn); in free_cqc()
207 buf_attr.region[0].size = hr_cq->cq_depth * hr_cq->cqe_size; in alloc_cq_buf()
249 hr_cq->set_ci_db = hr_cq->db.db_record; in alloc_cq_db()
419 hr_cq->arm_sn = 1; in hns_roce_create_cq()
456 if (!hr_cq) { in hns_roce_cq_completion()
462 ++hr_cq->arm_sn; in hns_roce_cq_completion()
[all …]
A Dhns_roce_restrack.c16 struct hns_roce_cq *hr_cq = to_hr_cq(ib_cq); in hns_roce_fill_res_cq_entry() local
23 if (rdma_nl_put_driver_u32(msg, "cq_depth", hr_cq->cq_depth)) in hns_roce_fill_res_cq_entry()
26 if (rdma_nl_put_driver_u32(msg, "cons_index", hr_cq->cons_index)) in hns_roce_fill_res_cq_entry()
29 if (rdma_nl_put_driver_u32(msg, "cqe_size", hr_cq->cqe_size)) in hns_roce_fill_res_cq_entry()
32 if (rdma_nl_put_driver_u32(msg, "arm_sn", hr_cq->arm_sn)) in hns_roce_fill_res_cq_entry()
48 struct hns_roce_cq *hr_cq = to_hr_cq(ib_cq); in hns_roce_fill_res_cq_entry_raw() local
57 ret = hr_dev->hw->query_cqc(hr_dev, hr_cq->cqn, &context); in hns_roce_fill_res_cq_entry_raw()
A Dhns_roce_hw_v2.c2688 hr_cq = kzalloc(sizeof(*hr_cq), GFP_KERNEL); in free_mr_init_cq()
2692 cq = &hr_cq->ib_cq; in free_mr_init_cq()
2697 kfree(hr_cq); in free_mr_init_cq()
3566 return hns_roce_buf_offset(hr_cq->mtr.kmem, n * hr_cq->cqe_size); in get_cqe_v2()
3571 struct hns_roce_v2_cqe *cqe = get_cqe_v2(hr_cq, n & hr_cq->ib_cq.cqe); in get_sw_cqe_v2()
3582 *hr_cq->set_ci_db = hr_cq->cons_index & V2_CQ_DB_CONS_IDX_M; in update_cq_db()
3605 for (prod_index = hr_cq->cons_index; get_sw_cqe_v2(hr_cq, prod_index); in __hns_roce_v2_cq_clean()
3607 if (prod_index > hr_cq->cons_index + hr_cq->ib_cq.cqe) in __hns_roce_v2_cq_clean()
3616 cqe = get_cqe_v2(hr_cq, prod_index & hr_cq->ib_cq.cqe); in __hns_roce_v2_cq_clean()
3991 cqe = get_sw_cqe_v2(hr_cq, hr_cq->cons_index); in hns_roce_v2_poll_one()
[all …]
A Dhns_roce_main.c888 struct hns_roce_cq *hr_cq = to_hr_cq(cq); in check_and_get_armed_cq() local
891 spin_lock_irqsave(&hr_cq->lock, flags); in check_and_get_armed_cq()
893 if (!hr_cq->is_armed) { in check_and_get_armed_cq()
894 hr_cq->is_armed = 1; in check_and_get_armed_cq()
895 list_add_tail(&hr_cq->node, cq_list); in check_and_get_armed_cq()
898 spin_unlock_irqrestore(&hr_cq->lock, flags); in check_and_get_armed_cq()
904 struct hns_roce_cq *hr_cq; in hns_roce_handle_device_err() local
924 list_for_each_entry(hr_cq, &cq_list, node) in hns_roce_handle_device_err()
925 hns_roce_cq_completion(hr_dev, hr_cq->cqn); in hns_roce_handle_device_err()
A Dhns_roce_qp.c1429 struct hns_roce_cq *hr_cq; in hns_roce_wq_overflow() local
1436 hr_cq = to_hr_cq(ib_cq); in hns_roce_wq_overflow()
1437 spin_lock(&hr_cq->lock); in hns_roce_wq_overflow()
1439 spin_unlock(&hr_cq->lock); in hns_roce_wq_overflow()
A Dhns_roce_device.h866 struct hns_roce_cq *hr_cq, void *mb_buf, u64 *mtts,

Completed in 29 milliseconds