Lines Matching refs:send_wr

208 			    const struct ib_send_wr *send_wr, u16 wqe_idx,  in fill_inline_data()  argument
220 while (i < send_wr->num_sge) { in fill_inline_data()
221 bytes += send_wr->sg_list[i].length; in fill_inline_data()
225 remain_size = send_wr->sg_list[i].length; in fill_inline_data()
232 (void *)(uintptr_t)send_wr->sg_list[i].addr + in fill_inline_data()
254 static int fill_sgl(struct erdma_qp *qp, const struct ib_send_wr *send_wr, in fill_sgl() argument
261 if (send_wr->num_sge > qp->dev->attrs.max_send_sge) in fill_sgl()
267 while (i < send_wr->num_sge) { in fill_sgl()
273 bytes += send_wr->sg_list[i].length; in fill_sgl()
274 memcpy(sgl + sgl_offset, &send_wr->sg_list[i], in fill_sgl()
286 const struct ib_send_wr *send_wr) in erdma_push_one_sqe() argument
290 enum ib_wr_opcode op = send_wr->opcode; in erdma_push_one_sqe()
310 qp->kern_qp.swr_tbl[idx] = send_wr->wr_id; in erdma_push_one_sqe()
311 flags = send_wr->send_flags; in erdma_push_one_sqe()
330 rdma_wr = container_of(send_wr, struct ib_rdma_wr, wr); in erdma_push_one_sqe()
333 write_sqe->imm_data = send_wr->ex.imm_data; in erdma_push_one_sqe()
347 if (unlikely(send_wr->num_sge != 1)) in erdma_push_one_sqe()
353 cpu_to_le32(send_wr->ex.invalidate_rkey); in erdma_push_one_sqe()
357 rdma_wr = container_of(send_wr, struct ib_rdma_wr, wr); in erdma_push_one_sqe()
358 read_sqe->length = cpu_to_le32(send_wr->sg_list[0].length); in erdma_push_one_sqe()
359 read_sqe->sink_stag = cpu_to_le32(send_wr->sg_list[0].lkey); in erdma_push_one_sqe()
361 cpu_to_le32(lower_32_bits(send_wr->sg_list[0].addr)); in erdma_push_one_sqe()
363 cpu_to_le32(upper_32_bits(send_wr->sg_list[0].addr)); in erdma_push_one_sqe()
369 sge->length = cpu_to_le32(send_wr->sg_list[0].length); in erdma_push_one_sqe()
371 send_wr->num_sge * sizeof(struct ib_sge); in erdma_push_one_sqe()
381 send_sqe->imm_data = send_wr->ex.imm_data; in erdma_push_one_sqe()
385 cpu_to_le32(send_wr->ex.invalidate_rkey); in erdma_push_one_sqe()
397 mr = to_emr(reg_wr(send_wr)->mr); in erdma_push_one_sqe()
400 to_erdma_access_flags(reg_wr(send_wr)->access); in erdma_push_one_sqe()
403 regmr_sge->stag = cpu_to_le32(reg_wr(send_wr)->key); in erdma_push_one_sqe()
427 regmr_sge->stag = cpu_to_le32(send_wr->ex.invalidate_rkey); in erdma_push_one_sqe()
437 cpu_to_le64(atomic_wr(send_wr)->swap); in erdma_push_one_sqe()
439 cpu_to_le64(atomic_wr(send_wr)->compare_add); in erdma_push_one_sqe()
444 cpu_to_le64(atomic_wr(send_wr)->compare_add); in erdma_push_one_sqe()
449 sge->addr = cpu_to_le64(atomic_wr(send_wr)->remote_addr); in erdma_push_one_sqe()
450 sge->key = cpu_to_le32(atomic_wr(send_wr)->rkey); in erdma_push_one_sqe()
453 sge->addr = cpu_to_le64(send_wr->sg_list[0].addr); in erdma_push_one_sqe()
454 sge->key = cpu_to_le32(send_wr->sg_list[0].lkey); in erdma_push_one_sqe()
455 sge->length = cpu_to_le32(send_wr->sg_list[0].length); in erdma_push_one_sqe()
464 ret = fill_inline_data(qp, send_wr, idx, sgl_offset, in erdma_push_one_sqe()
471 ret = fill_sgl(qp, send_wr, idx, sgl_offset, length_field); in erdma_push_one_sqe()
474 wqe_size += send_wr->num_sge * sizeof(struct ib_sge); in erdma_push_one_sqe()
476 send_wr->num_sge); in erdma_push_one_sqe()
499 int erdma_post_send(struct ib_qp *ibqp, const struct ib_send_wr *send_wr, in erdma_post_send() argument
504 const struct ib_send_wr *wr = send_wr; in erdma_post_send()
508 if (!send_wr) in erdma_post_send()
517 *bad_send_wr = send_wr; in erdma_post_send()