Lines Matching refs:sq
95 static void dealloc_oc_sq(struct c4iw_rdev *rdev, struct t4_sq *sq) in dealloc_oc_sq() argument
97 c4iw_ocqp_pool_free(rdev, sq->dma_addr, sq->memsize); in dealloc_oc_sq()
100 static void dealloc_host_sq(struct c4iw_rdev *rdev, struct t4_sq *sq) in dealloc_host_sq() argument
102 dma_free_coherent(&(rdev->lldi.pdev->dev), sq->memsize, sq->queue, in dealloc_host_sq()
103 dma_unmap_addr(sq, mapping)); in dealloc_host_sq()
106 static void dealloc_sq(struct c4iw_rdev *rdev, struct t4_sq *sq) in dealloc_sq() argument
108 if (t4_sq_onchip(sq)) in dealloc_sq()
109 dealloc_oc_sq(rdev, sq); in dealloc_sq()
111 dealloc_host_sq(rdev, sq); in dealloc_sq()
114 static int alloc_oc_sq(struct c4iw_rdev *rdev, struct t4_sq *sq) in alloc_oc_sq() argument
118 sq->dma_addr = c4iw_ocqp_pool_alloc(rdev, sq->memsize); in alloc_oc_sq()
119 if (!sq->dma_addr) in alloc_oc_sq()
121 sq->phys_addr = rdev->oc_mw_pa + sq->dma_addr - in alloc_oc_sq()
123 sq->queue = (__force union t4_wr *)(rdev->oc_mw_kva + sq->dma_addr - in alloc_oc_sq()
125 sq->flags |= T4_SQ_ONCHIP; in alloc_oc_sq()
129 static int alloc_host_sq(struct c4iw_rdev *rdev, struct t4_sq *sq) in alloc_host_sq() argument
131 sq->queue = dma_alloc_coherent(&(rdev->lldi.pdev->dev), sq->memsize, in alloc_host_sq()
132 &(sq->dma_addr), GFP_KERNEL); in alloc_host_sq()
133 if (!sq->queue) in alloc_host_sq()
135 sq->phys_addr = virt_to_phys(sq->queue); in alloc_host_sq()
136 dma_unmap_addr_set(sq, mapping, sq->dma_addr); in alloc_host_sq()
140 static int alloc_sq(struct c4iw_rdev *rdev, struct t4_sq *sq, int user) in alloc_sq() argument
144 ret = alloc_oc_sq(rdev, sq); in alloc_sq()
146 ret = alloc_host_sq(rdev, sq); in alloc_sq()
157 dealloc_sq(rdev, &wq->sq); in destroy_qp()
158 kfree(wq->sq.sw_sq); in destroy_qp()
159 c4iw_put_qpid(rdev, wq->sq.qid, uctx); in destroy_qp()
213 wq->sq.qid = c4iw_get_qpid(rdev, uctx); in create_qp()
214 if (!wq->sq.qid) in create_qp()
226 wq->sq.sw_sq = kcalloc(wq->sq.size, sizeof(*wq->sq.sw_sq), in create_qp()
228 if (!wq->sq.sw_sq) { in create_qp()
257 ret = alloc_sq(rdev, &wq->sq, user); in create_qp()
260 memset(wq->sq.queue, 0, wq->sq.memsize); in create_qp()
261 dma_unmap_addr_set(&wq->sq, mapping, wq->sq.dma_addr); in create_qp()
273 wq->sq.queue, in create_qp()
274 (unsigned long long)virt_to_phys(wq->sq.queue), in create_qp()
282 wq->sq.bar2_va = c4iw_bar2_addrs(rdev, wq->sq.qid, in create_qp()
284 &wq->sq.bar2_qid, in create_qp()
285 user ? &wq->sq.bar2_pa : NULL); in create_qp()
295 if (user && (!wq->sq.bar2_pa || (need_rq && !wq->rq.bar2_pa))) { in create_qp()
297 pci_name(rdev->lldi.pdev), wq->sq.qid, wq->rq.qid); in create_qp()
330 eqsize = wq->sq.size * T4_SQ_NUM_SLOTS + in create_qp()
337 (t4_sq_onchip(&wq->sq) ? FW_RI_RES_WR_ONCHIP_F : 0) | in create_qp()
343 (t4_sq_onchip(&wq->sq) ? FW_RI_RES_WR_FBMAX_V(2) : in create_qp()
348 res->u.sqrq.eqid = cpu_to_be32(wq->sq.qid); in create_qp()
349 res->u.sqrq.eqaddr = cpu_to_be64(wq->sq.dma_addr); in create_qp()
382 ret = c4iw_ref_send_wait(rdev, skb, wr_waitp, 0, wq->sq.qid, __func__); in create_qp()
387 wq->sq.qid, wq->rq.qid, wq->db, in create_qp()
388 wq->sq.bar2_va, wq->rq.bar2_va); in create_qp()
397 dealloc_sq(rdev, &wq->sq); in create_qp()
405 kfree(wq->sq.sw_sq); in create_qp()
410 c4iw_put_qpid(rdev, wq->sq.qid, uctx); in create_qp()
414 static int build_immd(struct t4_sq *sq, struct fw_ri_immd *immdp, in build_immd() argument
430 if (dstp == (u8 *)&sq->queue[sq->size]) in build_immd()
431 dstp = (u8 *)sq->queue; in build_immd()
432 if (rem <= (u8 *)&sq->queue[sq->size] - dstp) in build_immd()
435 len = (u8 *)&sq->queue[sq->size] - dstp; in build_immd()
489 static int build_rdma_send(struct t4_sq *sq, union t4_wr *wqe, in build_rdma_send() argument
527 ret = build_immd(sq, wqe->send.u.immd_src, wr, in build_rdma_send()
534 ret = build_isgl((__be64 *)sq->queue, in build_rdma_send()
535 (__be64 *)&sq->queue[sq->size], in build_rdma_send()
556 static int build_rdma_write(struct t4_sq *sq, union t4_wr *wqe, in build_rdma_write() argument
578 ret = build_immd(sq, wqe->write.u.immd_src, wr, in build_rdma_write()
585 ret = build_isgl((__be64 *)sq->queue, in build_rdma_write()
586 (__be64 *)&sq->queue[sq->size], in build_rdma_write()
607 static void build_immd_cmpl(struct t4_sq *sq, struct fw_ri_immd_cmpl *immdp, in build_immd_cmpl() argument
616 static void build_rdma_write_cmpl(struct t4_sq *sq, in build_rdma_write_cmpl() argument
645 build_immd_cmpl(sq, &wcwr->u_cmpl.immd_src, wr->next); in build_rdma_write_cmpl()
647 build_isgl((__be64 *)sq->queue, (__be64 *)&sq->queue[sq->size], in build_rdma_write_cmpl()
651 build_isgl((__be64 *)sq->queue, (__be64 *)&sq->queue[sq->size], in build_rdma_write_cmpl()
706 wqe = (union t4_wr *)((u8 *)qhp->wq.sq.queue + in post_write_cmpl()
707 qhp->wq.sq.wq_pidx * T4_EQ_ENTRY_SIZE); in post_write_cmpl()
708 build_rdma_write_cmpl(&qhp->wq.sq, &wqe->write_cmpl, wr, &len16); in post_write_cmpl()
711 swsqe = &qhp->wq.sq.sw_sq[qhp->wq.sq.pidx]; in post_write_cmpl()
713 swsqe->idx = qhp->wq.sq.pidx; in post_write_cmpl()
724 write_wrid = qhp->wq.sq.pidx; in post_write_cmpl()
727 qhp->wq.sq.in_use++; in post_write_cmpl()
728 if (++qhp->wq.sq.pidx == qhp->wq.sq.size) in post_write_cmpl()
729 qhp->wq.sq.pidx = 0; in post_write_cmpl()
732 swsqe = &qhp->wq.sq.sw_sq[qhp->wq.sq.pidx]; in post_write_cmpl()
737 swsqe->idx = qhp->wq.sq.pidx; in post_write_cmpl()
749 wqe->write_cmpl.wrid_send = qhp->wq.sq.pidx; in post_write_cmpl()
820 static int build_memreg(struct t4_sq *sq, union t4_wr *wqe, in build_memreg() argument
869 if (++p == (__be64 *)&sq->queue[sq->size]) in build_memreg()
870 p = (__be64 *)sq->queue; in build_memreg()
875 if (++p == (__be64 *)&sq->queue[sq->size]) in build_memreg()
876 p = (__be64 *)sq->queue; in build_memreg()
922 qhp->wq.sq.wq_pidx_inc += inc; in ring_kernel_sq_db()
1001 CQE_QPID_V(qhp->wq.sq.qid)); in complete_sq_drain_wr()
1052 CQE_QPID_V(qhp->wq.sq.qid)); in complete_rq_drain_wr()
1142 wqe = (union t4_wr *)((u8 *)qhp->wq.sq.queue + in c4iw_post_send()
1143 qhp->wq.sq.wq_pidx * T4_EQ_ENTRY_SIZE); in c4iw_post_send()
1150 swsqe = &qhp->wq.sq.sw_sq[qhp->wq.sq.pidx]; in c4iw_post_send()
1161 err = build_rdma_send(&qhp->wq.sq, wqe, wr, &len16); in c4iw_post_send()
1173 err = build_rdma_write(&qhp->wq.sq, wqe, wr, &len16); in c4iw_post_send()
1189 if (!qhp->wq.sq.oldest_read) in c4iw_post_send()
1190 qhp->wq.sq.oldest_read = swsqe; in c4iw_post_send()
1203 err = build_memreg(&qhp->wq.sq, wqe, reg_wr(wr), in c4iw_post_send()
1229 swsqe->idx = qhp->wq.sq.pidx; in c4iw_post_send()
1241 init_wr_hdr(wqe, qhp->wq.sq.pidx, fw_opcode, fw_flags, len16); in c4iw_post_send()
1244 (unsigned long long)wr->wr_id, qhp->wq.sq.pidx, in c4iw_post_send()
1567 pr_debug("qhp %p qid 0x%x tid %u\n", qhp, qhp->wq.sq.qid, in post_terminate()
1699 pr_debug("qhp %p qid 0x%x tid %u\n", qhp, qhp->wq.sq.qid, ep->hwtid); in rdma_fini()
1719 qhp->ep->hwtid, qhp->wq.sq.qid, __func__); in rdma_fini()
1756 qhp->wq.sq.qid, qhp->ep->hwtid, qhp->ep->ird, qhp->ep->ord); in rdma_init()
1801 wqe->u.init.qpid = cpu_to_be32(qhp->wq.sq.qid); in rdma_init()
1802 wqe->u.init.sq_eqid = cpu_to_be32(qhp->wq.sq.qid); in rdma_init()
1822 qhp->ep->hwtid, qhp->wq.sq.qid, __func__); in rdma_init()
1846 qhp, qhp->wq.sq.qid, qhp->wq.rq.qid, qhp->ep, qhp->attr.state, in c4iw_modify_qp()
2031 qhp->wq.sq.qid); in c4iw_modify_qp()
2089 __xa_erase(&rhp->qps, qhp->wq.sq.qid); in c4iw_destroy_qp()
2099 pr_debug("ib_qp %p qpid 0x%0x\n", ib_qp, qhp->wq.sq.qid); in c4iw_destroy_qp()
2157 qhp->wq.sq.size = sqsize; in c4iw_create_qp()
2158 qhp->wq.sq.memsize = in c4iw_create_qp()
2160 sizeof(*qhp->wq.sq.queue) + 16 * sizeof(__be64); in c4iw_create_qp()
2161 qhp->wq.sq.flush_cidx = -1; in c4iw_create_qp()
2170 qhp->wq.sq.memsize = roundup(qhp->wq.sq.memsize, PAGE_SIZE); in c4iw_create_qp()
2211 ret = xa_insert_irq(&rhp->qps, qhp->wq.sq.qid, qhp, GFP_KERNEL); in c4iw_create_qp()
2242 if (t4_sq_onchip(&qhp->wq.sq)) { in c4iw_create_qp()
2254 uresp.sqid = qhp->wq.sq.qid; in c4iw_create_qp()
2255 uresp.sq_size = qhp->wq.sq.size; in c4iw_create_qp()
2256 uresp.sq_memsize = qhp->wq.sq.memsize; in c4iw_create_qp()
2284 sq_key_mm->addr = qhp->wq.sq.phys_addr; in c4iw_create_qp()
2285 sq_key_mm->len = PAGE_ALIGN(qhp->wq.sq.memsize); in c4iw_create_qp()
2294 sq_db_key_mm->addr = (u64)(unsigned long)qhp->wq.sq.bar2_pa; in c4iw_create_qp()
2320 &qhp->wq.sq.queue[qhp->wq.sq.size].status.qp_err; in c4iw_create_qp()
2322 &qhp->wq.sq.queue[qhp->wq.sq.size].status.srqidx; in c4iw_create_qp()
2325 qhp->ibqp.qp_num = qhp->wq.sq.qid; in c4iw_create_qp()
2330 qhp->wq.sq.qid, qhp->wq.sq.size, qhp->wq.sq.memsize, in c4iw_create_qp()
2347 xa_erase_irq(&rhp->qps, qhp->wq.sq.qid); in c4iw_create_qp()