Lines Matching refs:sq
108 struct otx2_snd_queue *sq, in otx2_xdp_snd_pkt_handler() argument
116 sg = &sq->sg[snd_comp->sqe_id]; in otx2_xdp_snd_pkt_handler()
127 struct otx2_snd_queue *sq, in otx2_snd_pkt_handler() argument
143 sg = &sq->sg[snd_comp->sqe_id]; in otx2_snd_pkt_handler()
149 timestamp = ((u64 *)sq->timestamps->base)[snd_comp->sqe_id]; in otx2_snd_pkt_handler()
455 otx2_xdp_snd_pkt_handler(pfvf, &pfvf->qset.sq[qidx], in otx2_tx_napi_handler()
459 &pfvf->qset.sq[cq->cint_idx], in otx2_tx_napi_handler()
529 void otx2_sqe_flush(void *dev, struct otx2_snd_queue *sq, in otx2_sqe_flush() argument
538 memcpy(sq->lmt_addr, sq->sqe_base, size); in otx2_sqe_flush()
539 status = otx2_lmt_flush(sq->io_addr); in otx2_sqe_flush()
542 sq->head++; in otx2_sqe_flush()
543 sq->head &= (sq->sqe_cnt - 1); in otx2_sqe_flush()
548 static bool otx2_sqe_add_sg(struct otx2_nic *pfvf, struct otx2_snd_queue *sq, in otx2_sqe_add_sg() argument
556 sq->sg[sq->head].num_segs = 0; in otx2_sqe_add_sg()
560 sg = (struct nix_sqe_sg_s *)(sq->sqe_base + *offset); in otx2_sqe_add_sg()
583 sq->sg[sq->head].dma_addr[seg] = dma_addr; in otx2_sqe_add_sg()
584 sq->sg[sq->head].size[seg] = len; in otx2_sqe_add_sg()
585 sq->sg[sq->head].num_segs++; in otx2_sqe_add_sg()
588 sq->sg[sq->head].skb = (u64)skb; in otx2_sqe_add_sg()
593 static void otx2_sqe_add_ext(struct otx2_nic *pfvf, struct otx2_snd_queue *sq, in otx2_sqe_add_ext() argument
598 ext = (struct nix_sqe_ext_s *)(sq->sqe_base + *offset); in otx2_sqe_add_ext()
663 static void otx2_sqe_add_mem(struct otx2_snd_queue *sq, int *offset, in otx2_sqe_add_mem() argument
668 mem = (struct nix_sqe_mem_s *)(sq->sqe_base + *offset); in otx2_sqe_add_mem()
678 static void otx2_sqe_add_hdr(struct otx2_nic *pfvf, struct otx2_snd_queue *sq, in otx2_sqe_add_hdr() argument
690 sqe_hdr->aura = sq->aura_id; in otx2_sqe_add_hdr()
693 sqe_hdr->sq = qidx; in otx2_sqe_add_hdr()
697 sqe_hdr->sqe_id = sq->head; in otx2_sqe_add_hdr()
726 struct otx2_snd_queue *sq, in otx2_dma_map_tso_skb() argument
730 struct sg_list *sg = &sq->sg[sqe]; in otx2_dma_map_tso_skb()
758 static u64 otx2_tso_frag_dma_addr(struct otx2_snd_queue *sq, in otx2_tso_frag_dma_addr() argument
762 struct sg_list *sg = &sq->sg[sqe]; in otx2_tso_frag_dma_addr()
776 static void otx2_sqe_tso_add_sg(struct otx2_snd_queue *sq, in otx2_sqe_tso_add_sg() argument
787 sg = (struct nix_sqe_sg_s *)(sq->sqe_base + *offset); in otx2_sqe_tso_add_sg()
807 static void otx2_sq_append_tso(struct otx2_nic *pfvf, struct otx2_snd_queue *sq, in otx2_sq_append_tso() argument
813 int first_sqe = sq->head; in otx2_sq_append_tso()
822 if (otx2_dma_map_tso_skb(pfvf, sq, skb, first_sqe, hdr_len)) { in otx2_sq_append_tso()
837 memset(sq->sqe_base, 0, sq->sqe_size); in otx2_sq_append_tso()
838 sqe_hdr = (struct nix_sqe_hdr_s *)(sq->sqe_base); in otx2_sq_append_tso()
839 otx2_sqe_add_hdr(pfvf, sq, sqe_hdr, skb, qidx); in otx2_sq_append_tso()
843 hdr = sq->tso_hdrs->base + (sq->head * TSO_HEADER_SIZE); in otx2_sq_append_tso()
846 sq->tso_hdrs->iova + (sq->head * TSO_HEADER_SIZE); in otx2_sq_append_tso()
859 otx2_tso_frag_dma_addr(sq, skb, in otx2_sq_append_tso()
869 otx2_sqe_tso_add_sg(sq, &list, &offset); in otx2_sq_append_tso()
880 sq->sg[first_sqe].skb = (u64)skb; in otx2_sq_append_tso()
888 pfvf->hw_ops->sqe_flush(pfvf, sq, offset, qidx); in otx2_sq_append_tso()
931 struct otx2_snd_queue *sq, int *offset) in otx2_set_txtstamp() argument
938 iova = sq->timestamps->iova + (sq->head * sizeof(u64)); in otx2_set_txtstamp()
939 otx2_sqe_add_mem(sq, offset, NIX_SENDMEMALG_E_SETTSTMP, iova); in otx2_set_txtstamp()
945 bool otx2_sq_append_skb(struct net_device *netdev, struct otx2_snd_queue *sq, in otx2_sq_append_skb() argument
957 free_sqe = (sq->num_sqbs - *sq->aura_fc_addr) * sq->sqe_per_sqb; in otx2_sq_append_skb()
959 if (free_sqe < sq->sqe_thresh || in otx2_sq_append_skb()
980 otx2_sq_append_tso(pfvf, sq, skb, qidx); in otx2_sq_append_skb()
987 memset(sq->sqe_base + 8, 0, sq->sqe_size - 8); in otx2_sq_append_skb()
988 sqe_hdr = (struct nix_sqe_hdr_s *)(sq->sqe_base); in otx2_sq_append_skb()
989 otx2_sqe_add_hdr(pfvf, sq, sqe_hdr, skb, qidx); in otx2_sq_append_skb()
993 otx2_sqe_add_ext(pfvf, sq, skb, &offset); in otx2_sq_append_skb()
996 if (!otx2_sqe_add_sg(pfvf, sq, skb, num_segs, &offset)) { in otx2_sq_append_skb()
997 otx2_dma_unmap_skb_frags(pfvf, &sq->sg[sq->head]); in otx2_sq_append_skb()
1001 otx2_set_txtstamp(pfvf, skb, sq, &offset); in otx2_sq_append_skb()
1008 pfvf->hw_ops->sqe_flush(pfvf, sq, offset, qidx); in otx2_sq_append_skb()
1051 struct otx2_snd_queue *sq; in otx2_cleanup_tx_cqes() local
1056 sq = &pfvf->qset.sq[cq->cint_idx]; in otx2_cleanup_tx_cqes()
1068 sg = &sq->sg[cqe->comp.sqe_id]; in otx2_cleanup_tx_cqes()
1103 static void otx2_xdp_sqe_add_sg(struct otx2_snd_queue *sq, u64 dma_addr, in otx2_xdp_sqe_add_sg() argument
1109 sg = (struct nix_sqe_sg_s *)(sq->sqe_base + *offset); in otx2_xdp_sqe_add_sg()
1118 sq->sg[sq->head].dma_addr[0] = dma_addr; in otx2_xdp_sqe_add_sg()
1119 sq->sg[sq->head].size[0] = len; in otx2_xdp_sqe_add_sg()
1120 sq->sg[sq->head].num_segs = 1; in otx2_xdp_sqe_add_sg()
1126 struct otx2_snd_queue *sq; in otx2_xdp_sq_append_pkt() local
1129 sq = &pfvf->qset.sq[qidx]; in otx2_xdp_sq_append_pkt()
1130 free_sqe = (sq->num_sqbs - *sq->aura_fc_addr) * sq->sqe_per_sqb; in otx2_xdp_sq_append_pkt()
1131 if (free_sqe < sq->sqe_thresh) in otx2_xdp_sq_append_pkt()
1134 memset(sq->sqe_base + 8, 0, sq->sqe_size - 8); in otx2_xdp_sq_append_pkt()
1136 sqe_hdr = (struct nix_sqe_hdr_s *)(sq->sqe_base); in otx2_xdp_sq_append_pkt()
1139 sqe_hdr->aura = sq->aura_id; in otx2_xdp_sq_append_pkt()
1141 sqe_hdr->sq = qidx; in otx2_xdp_sq_append_pkt()
1145 sqe_hdr->sqe_id = sq->head; in otx2_xdp_sq_append_pkt()
1149 otx2_xdp_sqe_add_sg(sq, iova, len, &offset); in otx2_xdp_sq_append_pkt()
1151 pfvf->hw_ops->sqe_flush(pfvf, sq, offset, qidx); in otx2_xdp_sq_append_pkt()