Lines Matching refs:psn
130 struct rvt_qp *qp, u32 psn, int diff, bool fecn);
349 qpriv->flow_state.psn = 0; in hfi1_qp_priv_init()
806 fs->psn = 0; in hfi1_kern_setup_hw_flow()
833 fs->psn = 0; in hfi1_kern_clear_hw_flow()
1522 flow->flow_state.spsn = qpriv->flow_state.psn; in hfi1_kern_exp_rcv_setup()
1526 qpriv->flow_state.psn += flow->npkts; in hfi1_kern_exp_rcv_setup()
1682 u32 psn, u16 *fidx) in find_flow_ib() argument
1692 if (cmp_psn(psn, flow->flow_state.ib_spsn) >= 0 && in find_flow_ib()
1693 cmp_psn(psn, flow->flow_state.ib_lpsn) <= 0) { in find_flow_ib()
1800 trace_hfi1_tid_req_build_read_req(qp, 0, wqe->wr.opcode, wqe->psn, in hfi1_build_tid_rdma_read_req()
1839 if ((qpriv->flow_state.psn + npkts) > MAX_TID_FLOW_PSN - 1) { in hfi1_build_tid_rdma_read_req()
1893 u32 bth0, u32 psn, u64 vaddr, u32 len) in tid_rdma_rcv_read_request() argument
1951 flow->flow_state.ib_spsn = psn; in tid_rdma_rcv_read_request()
1965 e->psn = psn; in tid_rdma_rcv_read_request()
1966 e->lpsn = psn + flow->npkts - 1; in tid_rdma_rcv_read_request()
1978 req->r_flow_psn = e->psn; in tid_rdma_rcv_read_request()
1980 trace_hfi1_tid_req_rcv_read_req(qp, 0, e->opcode, e->psn, e->lpsn, in tid_rdma_rcv_read_request()
1987 struct rvt_qp *qp, u32 psn, int diff) in tid_rdma_rcv_error() argument
1999 trace_hfi1_rsp_tid_rcv_error(qp, psn); in tid_rdma_rcv_error()
2000 trace_hfi1_tid_rdma_rcv_err(qp, 0, psn, diff); in tid_rdma_rcv_error()
2015 e = find_prev_entry(qp, psn, &prev, NULL, &old_req); in tid_rdma_rcv_error()
2021 req->r_flow_psn = psn; in tid_rdma_rcv_error()
2022 trace_hfi1_tid_req_rcv_err(qp, 0, e->opcode, e->psn, e->lpsn, req); in tid_rdma_rcv_error()
2037 if (psn != e->psn || len != req->total_len) in tid_rdma_rcv_error()
2062 if (tid_rdma_rcv_read_request(qp, e, packet, ohdr, bth0, psn, in tid_rdma_rcv_error()
2133 delta_psn(psn, fstate->resp_ib_psn), in tid_rdma_rcv_error()
2136 delta_psn(psn, fstate->resp_ib_psn); in tid_rdma_rcv_error()
2146 req->cur_seg = delta_psn(psn, e->psn); in tid_rdma_rcv_error()
2162 trace_hfi1_tid_req_rcv_err(qp, 0, e->opcode, e->psn, in tid_rdma_rcv_error()
2238 u32 bth0, psn, len, rkey; in hfi1_rc_rcv_tid_rdma_read_req() local
2250 psn = mask_psn(be32_to_cpu(ohdr->bth[2])); in hfi1_rc_rcv_tid_rdma_read_req()
2251 trace_hfi1_rsp_rcv_tid_read_req(qp, psn); in hfi1_rc_rcv_tid_rdma_read_req()
2266 diff = delta_psn(psn, qp->r_psn); in hfi1_rc_rcv_tid_rdma_read_req()
2268 tid_rdma_rcv_err(packet, ohdr, qp, psn, diff, fecn); in hfi1_rc_rcv_tid_rdma_read_req()
2295 if (tid_rdma_rcv_read_request(qp, e, packet, ohdr, bth0, psn, vaddr, in hfi1_rc_rcv_tid_rdma_read_req()
2307 qp->r_psn += e->lpsn - e->psn + 1; in hfi1_rc_rcv_tid_rdma_read_req()
2415 find_tid_request(struct rvt_qp *qp, u32 psn, enum ib_wr_opcode opcode) in find_tid_request() argument
2427 if (cmp_psn(psn, wqe->psn) >= 0 && in find_tid_request()
2428 cmp_psn(psn, wqe->lpsn) <= 0) { in find_tid_request()
2529 req->e.swqe->psn, req->e.swqe->lpsn, in hfi1_rc_rcv_tid_rdma_read_resp()
2657 u8 rte, u32 psn, u32 ibpsn) in handle_read_kdeth_eflags() argument
2741 trace_hfi1_tid_req_read_kdeth_eflags(qp, 0, wqe->wr.opcode, wqe->psn, in handle_read_kdeth_eflags()
2761 diff = cmp_psn(psn, in handle_read_kdeth_eflags()
2787 if (cmp_psn(fpsn, psn) == 0) { in handle_read_kdeth_eflags()
2794 mask_psn(psn + 1); in handle_read_kdeth_eflags()
2859 u32 qp_num, psn, ibpsn; in hfi1_handle_kdeth_eflags() local
2884 psn = mask_psn(be32_to_cpu(ohdr->bth[2])); in hfi1_handle_kdeth_eflags()
2924 ret = handle_read_kdeth_eflags(rcd, packet, rcv_type, rte, psn, in hfi1_handle_kdeth_eflags()
2946 trace_hfi1_eflags_err_write(qp, rcv_type, rte, psn); in hfi1_handle_kdeth_eflags()
2947 trace_hfi1_rsp_handle_kdeth_eflags(qp, psn); in hfi1_handle_kdeth_eflags()
2949 trace_hfi1_tid_req_handle_kdeth_eflags(qp, 0, e->opcode, e->psn, in hfi1_handle_kdeth_eflags()
2974 diff = cmp_psn(psn, in hfi1_handle_kdeth_eflags()
2987 if (psn == full_flow_psn(flow, in hfi1_handle_kdeth_eflags()
2991 mask_psn(psn + 1); in hfi1_handle_kdeth_eflags()
3064 wqe->psn, wqe->lpsn, in hfi1_tid_rdma_restart_req()
3132 trace_hfi1_tid_req_restart_req(qp, 0, wqe->wr.opcode, wqe->psn, in hfi1_tid_rdma_restart_req()
3335 wqe->lpsn = wqe->psn; in setup_tid_rdma_wqe()
3357 wqe->psn, wqe->lpsn, in setup_tid_rdma_wqe()
3510 trace_hfi1_tid_req_write_alloc_res(qp, 0, e->opcode, e->psn, in hfi1_tid_write_alloc_resources()
3548 if (qpriv->flow_state.psn + npkts > MAX_TID_FLOW_PSN - 1) { in hfi1_tid_write_alloc_resources()
3602 qp->r_psn = e->psn + req->alloc_seg; in hfi1_tid_write_alloc_resources()
3668 u32 bth0, psn, len, rkey, num_segs; in hfi1_rc_rcv_tid_rdma_write_req() local
3679 psn = mask_psn(be32_to_cpu(ohdr->bth[2])); in hfi1_rc_rcv_tid_rdma_write_req()
3680 trace_hfi1_rsp_rcv_tid_write_req(qp, psn); in hfi1_rc_rcv_tid_rdma_write_req()
3693 diff = delta_psn(psn, qp->r_psn); in hfi1_rc_rcv_tid_rdma_write_req()
3695 tid_rdma_rcv_err(packet, ohdr, qp, psn, diff, fecn); in hfi1_rc_rcv_tid_rdma_write_req()
3753 e->psn = psn; in hfi1_rc_rcv_tid_rdma_write_req()
3767 req->r_flow_psn = e->psn; in hfi1_rc_rcv_tid_rdma_write_req()
3785 trace_hfi1_tid_req_rcv_write_req(qp, 0, e->opcode, e->psn, e->lpsn, in hfi1_rc_rcv_tid_rdma_write_req()
3848 trace_hfi1_tid_req_build_write_resp(qp, 0, e->opcode, e->psn, e->lpsn, in hfi1_build_tid_rdma_write_resp()
4048 u32 opcode, aeth, psn, flow_psn, i, tidlen = 0, pktlen; in hfi1_rc_rcv_tid_rdma_write_resp() local
4053 psn = mask_psn(be32_to_cpu(ohdr->bth[2])); in hfi1_rc_rcv_tid_rdma_write_resp()
4060 if (cmp_psn(psn, qp->s_next_psn) >= 0) in hfi1_rc_rcv_tid_rdma_write_resp()
4064 if (unlikely(cmp_psn(psn, qp->s_last_psn) <= 0)) in hfi1_rc_rcv_tid_rdma_write_resp()
4076 if (cmp_psn(psn, qp->s_last_psn + 1) != 0) in hfi1_rc_rcv_tid_rdma_write_resp()
4101 if (!do_rc_ack(qp, aeth, psn, opcode, 0, rcd)) in hfi1_rc_rcv_tid_rdma_write_resp()
4104 trace_hfi1_ack(qp, psn); in hfi1_rc_rcv_tid_rdma_write_resp()
4118 flow->flow_state.resp_ib_psn = psn; in hfi1_rc_rcv_tid_rdma_write_resp()
4155 trace_hfi1_tid_req_rcv_write_resp(qp, 0, wqe->wr.opcode, wqe->psn, in hfi1_rc_rcv_tid_rdma_write_resp()
4161 if (!cmp_psn(psn, wqe->psn)) { in hfi1_rc_rcv_tid_rdma_write_resp()
4162 req->r_last_acked = mask_psn(wqe->psn - 1); in hfi1_rc_rcv_tid_rdma_write_resp()
4280 u32 psn, next; in hfi1_rc_rcv_tid_rdma_write_data() local
4285 psn = mask_psn(be32_to_cpu(ohdr->bth[2])); in hfi1_rc_rcv_tid_rdma_write_data()
4296 if (cmp_psn(psn, full_flow_psn(flow, flow->flow_state.lpsn))) { in hfi1_rc_rcv_tid_rdma_write_data()
4299 if (cmp_psn(psn, flow->flow_state.r_next_psn)) in hfi1_rc_rcv_tid_rdma_write_data()
4302 flow->flow_state.r_next_psn = mask_psn(psn + 1); in hfi1_rc_rcv_tid_rdma_write_data()
4323 len += delta_psn(psn, in hfi1_rc_rcv_tid_rdma_write_data()
4341 priv->r_next_psn_kdeth = mask_psn(psn + 1); in hfi1_rc_rcv_tid_rdma_write_data()
4346 flow->flow_state.r_next_psn = mask_psn(psn + 1); in hfi1_rc_rcv_tid_rdma_write_data()
4349 rcd->flows[flow->idx].psn = psn & HFI1_KDETH_BTH_SEQ_MASK; in hfi1_rc_rcv_tid_rdma_write_data()
4360 trace_hfi1_rsp_rcv_tid_write_data(qp, psn); in hfi1_rc_rcv_tid_rdma_write_data()
4361 trace_hfi1_tid_req_rcv_write_data(qp, 0, e->opcode, e->psn, e->lpsn, in hfi1_rc_rcv_tid_rdma_write_data()
4420 static bool hfi1_tid_rdma_is_resync_psn(u32 psn) in hfi1_tid_rdma_is_resync_psn() argument
4422 return (bool)((psn & HFI1_KDETH_BTH_SEQ_MASK) == in hfi1_tid_rdma_is_resync_psn()
4505 u32 aeth, psn, req_psn, ack_psn, flpsn, resync_psn, ack_kpsn; in hfi1_rc_rcv_tid_rdma_ack() local
4511 psn = mask_psn(be32_to_cpu(ohdr->bth[2])); in hfi1_rc_rcv_tid_rdma_ack()
4517 trace_hfi1_rcv_tid_ack(qp, aeth, psn, req_psn, resync_psn); in hfi1_rc_rcv_tid_rdma_ack()
4521 cmp_psn(psn, qpriv->s_resync_psn)) in hfi1_rc_rcv_tid_rdma_ack()
4525 if (hfi1_tid_rdma_is_resync_psn(psn)) in hfi1_rc_rcv_tid_rdma_ack()
4528 ack_kpsn = psn; in hfi1_rc_rcv_tid_rdma_ack()
4543 trace_hfi1_tid_req_rcv_tid_ack(qp, 0, wqe->wr.opcode, wqe->psn, in hfi1_rc_rcv_tid_rdma_ack()
4549 if (cmp_psn(psn, full_flow_psn(flow, flow->flow_state.spsn)) < 0 || in hfi1_rc_rcv_tid_rdma_ack()
4560 trace_hfi1_tid_req_rcv_tid_ack(qp, 0, wqe->wr.opcode, wqe->psn, in hfi1_rc_rcv_tid_rdma_ack()
4579 trace_hfi1_tid_req_rcv_tid_ack(qp, 0, wqe->wr.opcode, wqe->psn, in hfi1_rc_rcv_tid_rdma_ack()
4585 if (!hfi1_tid_rdma_is_resync_psn(psn)) { in hfi1_rc_rcv_tid_rdma_ack()
4626 psn = mask_psn(psn + 1); in hfi1_rc_rcv_tid_rdma_ack()
4627 generation = psn >> HFI1_KDETH_BTH_SEQ_SHIFT; in hfi1_rc_rcv_tid_rdma_ack()
4648 req->r_ack_psn = psn; in hfi1_rc_rcv_tid_rdma_ack()
4725 if (cmp_psn(psn, flpsn) > 0) in hfi1_rc_rcv_tid_rdma_ack()
4824 qp, 0, wqe->wr.opcode, wqe->psn, wqe->lpsn, req); in hfi1_tid_retry_timeout()
4880 u32 psn, generation, idx, gen_next; in hfi1_rc_rcv_tid_rdma_resync() local
4885 psn = mask_psn(be32_to_cpu(ohdr->bth[2])); in hfi1_rc_rcv_tid_rdma_resync()
4887 generation = mask_psn(psn + 1) >> HFI1_KDETH_BTH_SEQ_SHIFT; in hfi1_rc_rcv_tid_rdma_resync()
4915 fs->psn = 0; in hfi1_rc_rcv_tid_rdma_resync()
4936 trace_hfi1_tid_req_rcv_resync(qp, 0, e->opcode, e->psn, in hfi1_rc_rcv_tid_rdma_resync()
4953 flow->flow_state.spsn = fs->psn; in hfi1_rc_rcv_tid_rdma_resync()
4959 fs->psn += flow->npkts; in hfi1_rc_rcv_tid_rdma_resync()
5070 trace_hfi1_tid_req_make_tid_pkt(qp, 0, wqe->wr.opcode, wqe->psn, in hfi1_make_tid_rdma_pkt()
5109 wqe->psn, wqe->lpsn, req); in hfi1_make_tid_rdma_pkt()
5231 trace_hfi1_rsp_make_tid_ack(qp, e->psn); in make_tid_rdma_ack()
5232 trace_hfi1_tid_req_make_tid_ack(qp, 0, e->opcode, e->psn, e->lpsn, in make_tid_rdma_ack()
5306 trace_hfi1_tid_req_make_tid_ack(qp, 0, e->opcode, e->psn, e->lpsn, in make_tid_rdma_ack()
5503 struct rvt_qp *qp, u32 psn, int diff, bool fecn) in tid_rdma_rcv_err() argument
5507 tid_rdma_rcv_error(packet, ohdr, qp, psn, diff); in tid_rdma_rcv_err()