Lines Matching refs:cached_cons

44 	u32 cached_cons;  member
120 static inline void __xskq_cons_read_addr_unchecked(struct xsk_queue *q, u32 cached_cons, u64 *addr) in __xskq_cons_read_addr_unchecked() argument
123 u32 idx = cached_cons & q->ring_mask; in __xskq_cons_read_addr_unchecked()
130 if (q->cached_cons != q->cached_prod) { in xskq_cons_read_addr_unchecked()
131 __xskq_cons_read_addr_unchecked(q, q->cached_cons, addr); in xskq_cons_read_addr_unchecked()
194 return q->cached_cons != q->cached_prod; in xskq_has_descs()
212 if (q->cached_cons != q->cached_prod) { in xskq_cons_read_desc()
214 u32 idx = q->cached_cons & q->ring_mask; in xskq_cons_read_desc()
226 q->cached_cons += cnt; in xskq_cons_release_n()
240 u32 cached_cons = q->cached_cons, nb_entries = 0; in xskq_cons_read_desc_batch() local
247 while (cached_cons != q->cached_prod && nb_entries < max) { in xskq_cons_read_desc_batch()
249 u32 idx = cached_cons & q->ring_mask; in xskq_cons_read_desc_batch()
253 cached_cons++; in xskq_cons_read_desc_batch()
271 cached_cons -= nr_frags; in xskq_cons_read_desc_batch()
273 xskq_cons_release_n(q, cached_cons - q->cached_cons); in xskq_cons_read_desc_batch()
281 smp_store_release(&q->ring->consumer, q->cached_cons); /* D, matchees A */ in __xskq_cons_release()
298 u32 entries = q->cached_prod - q->cached_cons; in xskq_cons_nb_entries()
304 entries = q->cached_prod - q->cached_cons; in xskq_cons_nb_entries()
311 if (q->cached_prod == q->cached_cons) in xskq_cons_peek_addr_unchecked()
320 if (q->cached_prod == q->cached_cons) in xskq_cons_peek_desc()
331 q->cached_cons++; in xskq_cons_release()
336 q->cached_cons -= cnt; in xskq_cons_cancel_n()
349 u32 free_entries = q->nentries - (q->cached_prod - q->cached_cons); in xskq_prod_nb_free()
355 q->cached_cons = READ_ONCE(q->ring->consumer); in xskq_prod_nb_free()
356 free_entries = q->nentries - (q->cached_prod - q->cached_cons); in xskq_prod_nb_free()