Home
last modified time | relevance | path

Searched refs:qp (Results 1 – 25 of 321) sorted by relevance

12345678910>>...13

/drivers/infiniband/sw/rxe/
A Drxe_qp.c156 qp->mtu = ib_mtu_enum_to_int(qp->attr.path_mtu); in rxe_qp_init_misc()
211 qp->sq.queue->buf, qp->sq.queue->buf_size, in rxe_init_sq()
262 qp->req.wqe_index = queue_get_producer(qp->sq.queue, in rxe_qp_init_req()
268 rxe_init_task(&qp->send_task, qp, rxe_sender); in rxe_qp_init_req()
301 qp->rq.queue->buf, qp->rq.queue->buf_size, in rxe_init_rq()
339 rxe_init_task(&qp->recv_task, qp, rxe_receiver); in rxe_qp_init_resp()
366 qp->pd = pd; in rxe_qp_from_init()
726 qp->resp.psn = qp->attr.rq_psn; in rxe_qp_from_attr()
738 qp->req.psn = qp->attr.sq_psn; in rxe_qp_from_attr()
739 qp->comp.psn = qp->attr.sq_psn; in rxe_qp_from_attr()
[all …]
A Drxe_comp.c117 struct rxe_qp *qp = timer_container_of(qp, t, retrans_timer); in retransmit_timer() local
167 qp->comp.retry_cnt = qp->attr.retry_cnt; in reset_retry_counters()
168 qp->comp.rnr_retry = qp->attr.rnr_retry; in reset_retry_counters()
404 wc->qp = &qp->ibqp; in make_send_cqe()
480 if (qp->attr.sq_draining && qp->comp.psn == qp->req.psn) { in comp_check_sq_drain_done()
488 ev.element.qp = &qp->ibqp; in comp_check_sq_drain_done()
571 wc->qp = &qp->ibqp; in flush_send_wqe()
608 struct rxe_qp *qp = pkt->qp; in free_pkt() local
612 rxe_put(qp); in free_pkt()
631 psn_compare(qp->req.psn, qp->comp.psn) > 0) in reset_retry_timer()
[all …]
A Drxe_resp.c289 qp->resp.wqe = &qp->resp.srq_wqe.wqe; in get_srq_wqe()
586 err = rxe_mr_copy(qp->resp.mr, qp->resp.va + qp->resp.offset, in write_data_in()
682 qp->resp.ack_psn = qp->resp.psn; in process_flush()
722 qp->resp.ack_psn = qp->resp.psn; in atomic_reply()
750 iova = qp->resp.va + qp->resp.offset; in atomic_write_reply()
770 qp->resp.ack_psn = qp->resp.psn; in atomic_write_reply()
801 ack->qp = qp; in prepare_ack_packet()
1056 qp->resp.ack_psn = qp->resp.psn; in execute()
1090 wc->qp = &qp->ibqp; in do_complete()
1458 wc->qp = &qp->ibqp; in flush_recv_wqe()
[all …]
A Drxe_req.c25 qp->req.opcode = next_opcode(qp, wqe, in retry_first_write_send()
52 qp->req.psn = qp->comp.psn; in req_retry()
89 qp->mtu; in req_retry()
150 ev.element.qp = &qp->ibqp; in req_check_sq_drain_done()
575 qp->req.psn = (qp->req.psn + 1) & BTH_PSN_MASK; in update_wqe_psn()
583 qp->req.wqe_index = queue_next_index(qp->sq.queue, in update_state()
634 qp->req.wqe_index = queue_next_index(qp->sq.queue, qp->req.wqe_index); in rxe_do_local_ops()
715 psn_compare(qp->req.psn, (qp->comp.psn + in rxe_requester()
755 qp->req.psn = (qp->req.psn + 1) & BTH_PSN_MASK; in rxe_requester()
768 pkt.qp = qp; in rxe_requester()
[all …]
/drivers/infiniband/hw/hfi1/
A Drc.c450 hfi1_trdma_send_complete(qp, wqe, qp->s_last != qp->s_acked ? in hfi1_make_rc_req()
906 qp->s_len = restart_sge(&qp->s_sge, wqe, qp->s_psn, pmtu); in hfi1_make_rc_req()
951 qp->s_len = restart_sge(&qp->s_sge, wqe, qp->s_psn, pmtu); in hfi1_make_rc_req()
1885 qp->s_acked = qp->s_cur; in do_rc_completion()
1894 if (qp->state == IB_QPS_SQD && qp->s_acked == qp->s_cur) in do_rc_completion()
2945 wc.qp = &qp->ibqp; in hfi1_rc_rcv()
2987 ok = rvt_rkey_ok(qp, &qp->r_sge.sge, qp->r_len, vaddr, in hfi1_rc_rcv()
3176 qp->r_ack_psn = qp->r_psn; in hfi1_rc_rcv()
3184 qp->r_ack_psn = qp->r_psn; in hfi1_rc_rcv()
3194 qp->r_ack_psn = qp->r_psn; in hfi1_rc_rcv()
[all …]
A Dqp.c449 qp = tx->qp; in iowait_sleep()
587 qp->s_last == qp->s_acked && in qp_idle()
588 qp->s_acked == qp->s_cur && in qp_idle()
589 qp->s_cur == qp->s_tail && in qp_idle()
590 qp->s_tail == qp->s_head; in qp_idle()
601 struct rvt_qp *qp = iter->qp; in qp_iter_print() local
631 qp->s_psn, qp->s_next_psn, in qp_iter_print()
634 qp->s_last, qp->s_acked, qp->s_cur, in qp_iter_print()
635 qp->s_tail, qp->s_head, qp->s_size, in qp_iter_print()
789 ev.element.qp = &qp->ibqp; in hfi1_migrate_qp()
[all …]
A Duc.c49 wqe = rvt_get_swqe_ptr(qp, qp->s_last); in hfi1_make_uc_req()
72 wqe = rvt_get_swqe_ptr(qp, qp->s_cur); in hfi1_make_uc_req()
93 if (qp->s_last != qp->s_cur) in hfi1_make_uc_req()
95 if (++qp->s_cur == qp->s_size) in hfi1_make_uc_req()
138 if (++qp->s_cur >= qp->s_size) in hfi1_make_uc_req()
167 if (++qp->s_cur >= qp->s_size) in hfi1_make_uc_req()
197 if (++qp->s_cur >= qp->s_size) in hfi1_make_uc_req()
223 if (++qp->s_cur >= qp->s_size) in hfi1_make_uc_req()
264 struct rvt_qp *qp = packet->qp; in hfi1_uc_rcv() local
412 wc.qp = &qp->ibqp; in hfi1_uc_rcv()
[all …]
A Dtid_rdma.c1669 req->qp = qp; in hfi1_init_trdma_req()
1709 struct rvt_qp *qp = req->qp; in hfi1_build_tid_rdma_read_packet() local
2006 qp->r_ack_psn = qp->r_psn; in tid_rdma_rcv_error()
2333 qp->r_ack_psn = qp->r_psn; in hfi1_rc_rcv_tid_rdma_read_req()
2341 qp->r_ack_psn = qp->r_psn; in hfi1_rc_rcv_tid_rdma_read_req()
2892 packet->qp = qp; in hfi1_handle_kdeth_eflags()
3217 s_prev = (qp->s_cur == 0 ? qp->s_size : qp->s_cur) - 1; in hfi1_tid_rdma_wqe_interlock()
3603 qp->r_ack_psn = qp->r_psn; in hfi1_tid_write_alloc_resources()
3824 qp->r_ack_psn = qp->r_psn; in hfi1_rc_rcv_tid_rdma_write_req()
3832 qp->r_ack_psn = qp->r_psn; in hfi1_rc_rcv_tid_rdma_write_req()
[all …]
A Dtrace_tid.h196 TP_ARGS(qp),
226 TP_ARGS(qp)
232 TP_ARGS(qp)
238 TP_ARGS(qp)
244 TP_ARGS(qp)
250 TP_ARGS(qp)
360 __entry->qpn = qp ? qp->ibqp.qp_num : 0;
931 TP_ARGS(qp)
937 TP_ARGS(qp)
943 TP_ARGS(qp)
[all …]
A Druc.c31 struct rvt_qp *qp = packet->qp; in hfi1_ruc_check_hdr() local
74 hfi1_migrate_qp(qp); in hfi1_ruc_check_hdr()
168 clear_ahg(qp); in build_ahg()
171 if (qp->s_ahgidx < 0) in build_ahg()
174 qp->s_ahgpsn = npsn; in build_ahg()
282 build_ahg(qp, bth2); in hfi1_make_ruc_header_16B()
360 build_ahg(qp, bth2); in hfi1_make_ruc_header_9B()
481 hfi1_do_send(qp, true); in _hfi1_do_send()
500 ps.ibp = to_iport(qp->ibqp.device, qp->port_num); in hfi1_do_send()
512 rvt_ruc_loopback(qp); in hfi1_do_send()
[all …]
A Dud.c172 rvt_copy_sge(qp, &qp->r_sge, &grh, in ud_loopback()
196 wc.qp = &qp->ibqp; in ud_loopback()
231 ibp = to_iport(qp->ibqp.device, qp->port_num); in hfi1_make_bth_deth()
453 if (qp->s_last == READ_ONCE(qp->s_head)) in hfi1_make_ud_req()
460 wqe = rvt_get_swqe_ptr(qp, qp->s_last); in hfi1_make_ud_req()
466 if (qp->s_cur == READ_ONCE(qp->s_head)) in hfi1_make_ud_req()
469 wqe = rvt_get_swqe_ptr(qp, qp->s_cur); in hfi1_make_ud_req()
819 struct rvt_qp *qp = packet->qp; in hfi1_ud_rcv() local
962 rvt_copy_sge(qp, &qp->r_sge, packet->grh, in hfi1_ud_rcv()
973 rvt_copy_sge(qp, &qp->r_sge, &grh, in hfi1_ud_rcv()
[all …]
A Dqp.h51 struct hfi1_qp_priv *priv = qp->priv; in hfi1_send_ok()
55 (qp->s_flags & RVT_S_RESP_PENDING) || in hfi1_send_ok()
56 !(qp->s_flags & RVT_S_ANY_WAIT_SEND)); in hfi1_send_ok()
64 struct hfi1_qp_priv *priv = qp->priv; in clear_ahg()
68 if (priv->s_sde && qp->s_ahgidx >= 0) in clear_ahg()
70 qp->s_ahgidx = -1; in clear_ahg()
88 void hfi1_migrate_qp(struct rvt_qp *qp);
96 void notify_qp_reset(struct rvt_qp *qp);
100 void notify_error_qp(struct rvt_qp *qp);
101 void stop_send_queue(struct rvt_qp *qp);
[all …]
/drivers/infiniband/sw/rdmavt/
A Dqp.c715 ev.element.qp = &qp->ibqp; in rvt_qp_mr_clean()
1315 wc.qp = &qp->ibqp; in rvt_error_qp()
1380 rcu_assign_pointer(rvp->qp[qp->ibqp.qp_num], qp); in rvt_insert_qp()
1541 qp->s_draining = qp->s_last != qp->s_cur; in rvt_modify_qp()
1647 ev.element.qp = &qp->ibqp; in rvt_modify_qp()
1818 wc.qp = &qp->ibqp; in rvt_post_recv()
1947 qp->ibqp.qp_num, qp->s_size, qp->s_avail, in rvt_qp_is_avail()
1948 qp->s_head, qp->s_tail, qp->s_cur, in rvt_qp_is_avail()
2307 wc.qp = &qp->ibqp; in init_sge()
2722 iter->qp = qp; in rvt_qp_iter_next()
[all …]
/drivers/ntb/
A Dntb_transport.c474 if (!qp || !qp->link_is_up) in debugfs_read()
663 entry->qp = qp; in ntb_transport_setup_qp_mw()
968 qp->event_handler(qp->cb_data, qp->link_is_up); in ntb_qp_link_cleanup()
1169 qp->event_handler(qp->cb_data, qp->link_is_up); in ntb_qp_link_work()
1239 qp->debugfs_dir, qp, in ntb_transport_init_queue()
1485 qp->rx_handler(qp, qp->cb_data, cb_data, len); in ntb_complete_rxc()
1642 offset = qp->rx_buff + qp->rx_max_frame * qp->rx_index; in ntb_process_rxc()
1796 qp->tx_handler(qp, qp->cb_data, entry->cb_data, in ntb_tx_copy_callback()
1926 qp->tx_handler(qp, qp->cb_data, NULL, -EIO); in ntb_process_tx()
2078 entry->qp = qp; in ntb_transport_create_queue()
[all …]
/drivers/infiniband/sw/siw/
A Dsiw_qp.c171 if (qp->cep) { in siw_qp_llp_close()
173 qp->cep = NULL; in siw_qp_llp_close()
344 memset(&qp->term_info, 0, sizeof(qp->term_info)); in siw_init_terminate()
656 qp->attrs.orq_size, qp->attrs.irq_size); in siw_qp_nextstate_from_idle()
897 if (qp->attrs.orq_size && !siw_orq_empty(qp)) { in siw_activate_tx_from_sq()
933 qp->sq_get++; in siw_activate_tx_from_sq()
956 irqe = &qp->irq[qp->irq_get % qp->attrs.irq_size]; in siw_activate_tx()
1169 sqe = &qp->orq[qp->orq_get % qp->attrs.orq_size]; in siw_sq_flush()
1205 sqe = &qp->sendq[qp->sq_get % qp->attrs.sq_size]; in siw_sq_flush()
1269 &qp->recvq[qp->rq_get % qp->attrs.rq_size]; in siw_rq_flush()
[all …]
/drivers/infiniband/hw/mthca/
A Dmthca_qp.c257 qp->port = qp->alt_port; in mthca_qp_event()
261 event.element.qp = &qp->ibqp; in mthca_qp_event()
841 qp->sq.last = get_send_wqe(qp, qp->sq.max - 1); in __mthca_modify_qp()
844 qp->rq.last = get_recv_wqe(qp, qp->rq.max - 1); in __mthca_modify_qp()
1052 qp->send_wqe_offset = ALIGN(qp->rq.max << qp->rq.wqe_shift, in mthca_alloc_wqe_buf()
1072 &qp->queue, &qp->is_direct, pd, 0, &qp->mr); in mthca_alloc_wqe_buf()
1088 &qp->queue, qp->is_direct, &qp->mr); in mthca_free_wqe_buf()
1138 qp->qpn, &qp->rq.db); in mthca_alloc_memfree()
1143 qp->qpn, &qp->sq.db); in mthca_alloc_memfree()
1246 qp->sq.last = get_send_wqe(qp, qp->sq.max - 1); in mthca_alloc_qp_common()
[all …]
/drivers/net/ethernet/qlogic/qed/
A Dqed_roce.c210 if (qp->vlan_id) { in qed_roce_get_qp_tc()
217 qp->icid, tc, qp->vlan_id ? "enabled" : "disabled"); in qed_roce_get_qp_tc()
244 if (!qp->irq) { in qed_roce_sp_create_responder()
343 qp->cq_prod = 0; in qed_roce_sp_create_responder()
355 qp->irq, qp->irq_phys_addr); in qed_roce_sp_create_responder()
382 if (!qp->orq) { in qed_roce_sp_create_requester()
486 qp->orq, qp->orq_phys_addr); in qed_roce_sp_create_requester()
736 qp->irq, qp->irq_phys_addr); in qed_roce_sp_destroy_qp_responder()
798 qp->orq, qp->orq_phys_addr); in qed_roce_sp_destroy_qp_requester()
828 if ((!(qp->resp_offloaded)) && (!(qp->req_offloaded))) { in qed_roce_query_qp()
[all …]
/drivers/infiniband/hw/mlx4/
A Dqp.c181 return get_wqe(qp, qp->rq.offset + (n << qp->rq.wqe_shift)); in get_recv_wqe()
186 return get_wqe(qp, qp->sq.offset + (n << qp->sq.wqe_shift)); in get_send_wqe()
274 qpe_work->qp = qp; in mlx4_ib_qp_event()
425 qp->buf_size = (qp->rq.wqe_cnt << qp->rq.wqe_shift) + in set_kernel_sq_size()
429 qp->sq.offset = qp->rq.wqe_cnt << qp->rq.wqe_shift; in set_kernel_sq_size()
431 qp->rq.offset = qp->sq.wqe_cnt << qp->sq.wqe_shift; in set_kernel_sq_size()
464 qp->buf_size = (qp->rq.wqe_cnt << qp->rq.wqe_shift) + in set_user_sq_size()
774 qp->ibqp.qp_num = qp->mqp.qpn; in _mlx4_ib_create_qp_rss()
918 qp->buf_size = (qp->rq.wqe_cnt << qp->rq.wqe_shift) + in create_rq()
1420 if (qp->pri.smac || (!qp->pri.smac && qp->pri.smac_port)) { in destroy_qp_common()
[all …]
/drivers/infiniband/hw/erdma/
A Derdma_qp.c33 if (qp->cep) { in erdma_qp_llp_close()
35 qp->cep = NULL; in erdma_qp_llp_close()
45 if (qp) in erdma_get_ibqp()
46 return &qp->ibqp; in erdma_get_ibqp()
208 mod_delayed_work(qp->dev->reflush_wq, &qp->reflush_dwork, in erdma_modify_qp_state_iwarp()
264 erdma_remove_cqes_of_qp(&qp->scq->ibcq, QP_ID(qp)); in erdma_reset_qp()
265 if (qp->rcq != qp->scq) in erdma_reset_qp()
266 erdma_remove_cqes_of_qp(&qp->rcq->ibcq, QP_ID(qp)); in erdma_reset_qp()
695 mod_delayed_work(qp->dev->reflush_wq, &qp->reflush_dwork, in erdma_post_send()
705 get_queue_entry(qp->kern_qp.rq_buf, qp->kern_qp.rq_pi, in erdma_post_recv_one()
[all …]
/drivers/infiniband/hw/vmw_pvrdma/
A Dpvrdma_qp.c150 qp->npages_recv = (qp->rq.wqe_cnt * qp->rq.wqe_size + PAGE_SIZE - 1) / in pvrdma_set_rq_size()
177 (qp->sq.wqe_cnt * qp->sq.wqe_size + PAGE_SIZE - 1) / in pvrdma_set_sq_size()
300 qp->npages = qp->npages_send + qp->npages_recv; in pvrdma_create_qp()
312 qp->npages = qp->npages_send + qp->npages_recv; in pvrdma_create_qp()
318 qp->rq.offset = qp->npages_send * PAGE_SIZE; in pvrdma_create_qp()
344 qp->sq.ring = qp->pdir.pages[0]; in pvrdma_create_qp()
345 qp->rq.ring = is_srq ? NULL : &qp->sq.ring[1]; in pvrdma_create_qp()
637 qp->sq.offset + n * qp->sq.wqe_size); in get_sq_wqe()
643 qp->rq.offset + n * qp->rq.wqe_size); in get_rq_wqe()
696 qp->sq.ring, qp->sq.wqe_cnt, &tail))) { in pvrdma_post_send()
[all …]
/drivers/infiniband/hw/irdma/
A Duk.c134 writel(qp->qp_id, qp->wqe_alloc_db); in irdma_uk_qp_post_wr()
138 writel(qp->qp_id, qp->wqe_alloc_db); in irdma_uk_qp_post_wr()
143 qp->initial_ring.head = qp->sq_ring.head; in irdma_uk_qp_post_wr()
184 qp->swqe_polarity = !qp->swqe_polarity; in irdma_qp_get_next_send_wqe()
191 wqe_0 = qp->sq_base[IRDMA_RING_CURRENT_HEAD(qp->sq_ring)].elem; in irdma_qp_get_next_send_wqe()
219 qp->rwqe_polarity = !qp->rwqe_polarity; in irdma_qp_get_next_recv_wqe()
221 wqe = qp->rq_base[*wqe_idx * qp->rq_wqe_size_multiplier].elem; in irdma_qp_get_next_recv_wqe()
1080 if (!qp || qp->destroy_pending) { in irdma_uk_cq_poll_cmpl()
1100 info->wr_id = qp->rq_wrid_array[qp->rq_ring.tail]; in irdma_uk_cq_poll_cmpl()
1128 if (wqe_idx < qp->conn_wqes && qp->sq_ring.head == qp->sq_ring.tail) { in irdma_uk_cq_poll_cmpl()
[all …]
/drivers/infiniband/hw/bnxt_re/
A Dqplib_fp.c101 if (qp->scq == qp->rcq) in bnxt_qplib_acquire_cq_flush_locks()
111 if (qp->scq == qp->rcq) in bnxt_qplib_release_cq_flush_locks()
146 __clean_cq(qp->scq, (u64)(unsigned long)qp); in bnxt_qplib_clean_qp()
149 __clean_cq(qp->rcq, (u64)(unsigned long)qp); in bnxt_qplib_clean_qp()
187 qp->rq_hdr_buf, qp->rq_hdr_buf_map); in bnxt_qplib_free_qp_hdr_buf()
191 qp->sq_hdr_buf, qp->sq_hdr_buf_map); in bnxt_qplib_free_qp_hdr_buf()
1454 qp->cur_qp_state = qp->state; in bnxt_qplib_modify_qp()
1713 qp->msn %= qp->msn_tbl_sz; in bnxt_qplib_fill_msn_search()
1893 qp->id, qp->state); in bnxt_qplib_post_send()
2150 qp->id, qp->state); in bnxt_qplib_post_recv()
[all …]
/drivers/interconnect/qcom/
A Dicc-rpm.c197 switch (qp->type) { in qcom_icc_qos_set()
375 if (!qp->bus_clk_desc && !qp->bus_clk) in qcom_icc_set()
382 if (qp->keep_alive) in qcom_icc_set()
386 if (qp->bus_clk) { in qcom_icc_set()
480 qp = devm_kzalloc(dev, sizeof(*qp), GFP_KERNEL); in qnoc_probe()
481 if (!qp) in qnoc_probe()
489 qp->bus_clk_desc = devm_kzalloc(dev, sizeof(*qp->bus_clk_desc), in qnoc_probe()
524 if (qp->regmap) in qnoc_probe()
545 ret = devm_clk_bulk_get(dev, qp->num_intf_clks, qp->intf_clks); in qnoc_probe()
560 ret = clk_bulk_prepare_enable(qp->num_intf_clks, qp->intf_clks); in qnoc_probe()
[all …]
/drivers/infiniband/hw/qedr/
A Dverbs.c2190 qp->wqe_wr_id = kcalloc(qp->sq.max_wr, sizeof(*qp->wqe_wr_id), in qedr_create_kernel_qp()
2208 qp->rqe_wr_id = kcalloc(qp->rq.max_wr, sizeof(*qp->rqe_wr_id), in qedr_create_kernel_qp()
2306 qp->ibqp.qp_num = qp->qp_id; in qedr_create_qp()
2448 qp->rq.prod, qp->rq.cons, qp->sq.prod, in qedr_update_qp_state()
2820 qp, qp->qp_type); in qedr_destroy_qp()
3783 qp->icid, qp->state); in qedr_post_send()
4076 wc->qp = &qp->ibqp; in process_req()
4294 wc->qp = &qp->ibqp; in __process_resp_one()
4314 wc->qp = &qp->ibqp; in process_resp_one_srq()
4351 wc->qp = &qp->ibqp; in process_resp_flush()
[all …]
A Dqedr_roce_cm.c108 qp->rqe_wr_id[qp->rq.gsi_cons].vlan = data->vlan; in qedr_ll2_complete_rx_packet()
110 qp->rqe_wr_id[qp->rq.gsi_cons].sg_list[0].length = in qedr_ll2_complete_rx_packet()
342 qp->rqe_wr_id = kcalloc(qp->rq.max_wr, sizeof(*qp->rqe_wr_id), in qedr_create_gsi_qp()
346 qp->wqe_wr_id = kcalloc(qp->sq.max_wr, sizeof(*qp->wqe_wr_id), in qedr_create_gsi_qp()
651 memset(&qp->rqe_wr_id[qp->rq.prod], 0, in qedr_gsi_post_recv()
652 sizeof(qp->rqe_wr_id[qp->rq.prod])); in qedr_gsi_post_recv()
684 wc[i].qp = &qp->ibqp; in qedr_gsi_poll_cq()
696 vlan_id = qp->rqe_wr_id[qp->rq.cons].vlan & in qedr_gsi_poll_cq()
712 wc[i].qp = &qp->ibqp; in qedr_gsi_poll_cq()
725 num_entries, i, qp->rq.cons, qp->rq.gsi_cons, qp->sq.cons, in qedr_gsi_poll_cq()
[all …]

Completed in 875 milliseconds

12345678910>>...13