Lines Matching refs:cached_prod
43 u32 cached_prod; member
125 if (q->cached_cons != q->cached_prod) { in xskq_cons_read_addr_unchecked()
195 while (q->cached_cons != q->cached_prod) { in xskq_cons_read_desc()
220 while (cached_cons != q->cached_prod && nb_entries < max) { in xskq_cons_read_desc_batch()
250 q->cached_prod = smp_load_acquire(&q->ring->producer); /* C, matches B */ in __xskq_cons_peek()
261 u32 entries = q->cached_prod - q->cached_cons; in xskq_cons_nb_entries()
267 entries = q->cached_prod - q->cached_cons; in xskq_cons_nb_entries()
279 if (q->cached_prod == q->cached_cons) in xskq_cons_peek_addr_unchecked()
288 if (q->cached_prod == q->cached_cons) in xskq_cons_peek_desc()
312 u32 free_entries = q->nentries - (q->cached_prod - q->cached_cons); in xskq_prod_nb_free()
319 free_entries = q->nentries - (q->cached_prod - q->cached_cons); in xskq_prod_nb_free()
331 q->cached_prod--; in xskq_prod_cancel()
340 q->cached_prod++; in xskq_prod_reserve()
352 ring->desc[q->cached_prod++ & q->ring_mask] = addr; in xskq_prod_reserve_addr()
360 u32 i, cached_prod; in xskq_prod_write_addr_batch() local
363 cached_prod = q->cached_prod; in xskq_prod_write_addr_batch()
365 ring->desc[cached_prod++ & q->ring_mask] = descs[i].addr; in xskq_prod_write_addr_batch()
366 q->cached_prod = cached_prod; in xskq_prod_write_addr_batch()
379 idx = q->cached_prod++ & q->ring_mask; in xskq_prod_reserve_desc()
393 __xskq_prod_submit(q, q->cached_prod); in xskq_prod_submit()