Lines Matching refs:rx_buffer
115 static bool ixgbevf_can_reuse_rx_page(struct ixgbevf_rx_buffer *rx_buffer);
535 struct ixgbevf_rx_buffer *rx_buffer; in ixgbevf_get_rx_buffer() local
537 rx_buffer = &rx_ring->rx_buffer_info[rx_ring->next_to_clean]; in ixgbevf_get_rx_buffer()
538 prefetchw(rx_buffer->page); in ixgbevf_get_rx_buffer()
542 rx_buffer->dma, in ixgbevf_get_rx_buffer()
543 rx_buffer->page_offset, in ixgbevf_get_rx_buffer()
547 rx_buffer->pagecnt_bias--; in ixgbevf_get_rx_buffer()
549 return rx_buffer; in ixgbevf_get_rx_buffer()
553 struct ixgbevf_rx_buffer *rx_buffer, in ixgbevf_put_rx_buffer() argument
556 if (ixgbevf_can_reuse_rx_page(rx_buffer)) { in ixgbevf_put_rx_buffer()
558 ixgbevf_reuse_rx_page(rx_ring, rx_buffer); in ixgbevf_put_rx_buffer()
564 dma_unmap_page_attrs(rx_ring->dev, rx_buffer->dma, in ixgbevf_put_rx_buffer()
568 __page_frag_cache_drain(rx_buffer->page, in ixgbevf_put_rx_buffer()
569 rx_buffer->pagecnt_bias); in ixgbevf_put_rx_buffer()
573 rx_buffer->page = NULL; in ixgbevf_put_rx_buffer()
784 static bool ixgbevf_can_reuse_rx_page(struct ixgbevf_rx_buffer *rx_buffer) in ixgbevf_can_reuse_rx_page() argument
786 unsigned int pagecnt_bias = rx_buffer->pagecnt_bias; in ixgbevf_can_reuse_rx_page()
787 struct page *page = rx_buffer->page; in ixgbevf_can_reuse_rx_page()
801 if (rx_buffer->page_offset > IXGBEVF_LAST_OFFSET) in ixgbevf_can_reuse_rx_page()
812 rx_buffer->pagecnt_bias = USHRT_MAX; in ixgbevf_can_reuse_rx_page()
828 struct ixgbevf_rx_buffer *rx_buffer, in ixgbevf_add_rx_frag() argument
839 skb_add_rx_frag(skb, skb_shinfo(skb)->nr_frags, rx_buffer->page, in ixgbevf_add_rx_frag()
840 rx_buffer->page_offset, size, truesize); in ixgbevf_add_rx_frag()
842 rx_buffer->page_offset ^= truesize; in ixgbevf_add_rx_frag()
844 rx_buffer->page_offset += truesize; in ixgbevf_add_rx_frag()
850 struct ixgbevf_rx_buffer *rx_buffer, in ixgbevf_construct_skb() argument
900 skb_add_rx_frag(skb, 0, rx_buffer->page, in ixgbevf_construct_skb()
902 page_address(rx_buffer->page), in ixgbevf_construct_skb()
905 rx_buffer->page_offset ^= truesize; in ixgbevf_construct_skb()
907 rx_buffer->page_offset += truesize; in ixgbevf_construct_skb()
910 rx_buffer->pagecnt_bias++; in ixgbevf_construct_skb()
925 struct ixgbevf_rx_buffer *rx_buffer, in ixgbevf_build_skb() argument
959 rx_buffer->page_offset ^= truesize; in ixgbevf_build_skb()
961 rx_buffer->page_offset += truesize; in ixgbevf_build_skb()
1104 struct ixgbevf_rx_buffer *rx_buffer, in ixgbevf_rx_buffer_flip() argument
1110 rx_buffer->page_offset ^= truesize; in ixgbevf_rx_buffer_flip()
1112 rx_buffer->page_offset += truesize; in ixgbevf_rx_buffer_flip()
1135 struct ixgbevf_rx_buffer *rx_buffer; in ixgbevf_clean_rx_irq() local
1156 rx_buffer = ixgbevf_get_rx_buffer(rx_ring, size); in ixgbevf_clean_rx_irq()
1163 hard_start = page_address(rx_buffer->page) + in ixgbevf_clean_rx_irq()
1164 rx_buffer->page_offset - offset; in ixgbevf_clean_rx_irq()
1176 ixgbevf_rx_buffer_flip(rx_ring, rx_buffer, in ixgbevf_clean_rx_irq()
1179 rx_buffer->pagecnt_bias++; in ixgbevf_clean_rx_irq()
1184 ixgbevf_add_rx_frag(rx_ring, rx_buffer, skb, size); in ixgbevf_clean_rx_irq()
1186 skb = ixgbevf_build_skb(rx_ring, rx_buffer, in ixgbevf_clean_rx_irq()
1189 skb = ixgbevf_construct_skb(rx_ring, rx_buffer, in ixgbevf_clean_rx_irq()
1196 rx_buffer->pagecnt_bias++; in ixgbevf_clean_rx_irq()
1200 ixgbevf_put_rx_buffer(rx_ring, rx_buffer, skb); in ixgbevf_clean_rx_irq()
2370 struct ixgbevf_rx_buffer *rx_buffer; in ixgbevf_clean_rx_ring() local
2372 rx_buffer = &rx_ring->rx_buffer_info[i]; in ixgbevf_clean_rx_ring()
2378 rx_buffer->dma, in ixgbevf_clean_rx_ring()
2379 rx_buffer->page_offset, in ixgbevf_clean_rx_ring()
2385 rx_buffer->dma, in ixgbevf_clean_rx_ring()
2390 __page_frag_cache_drain(rx_buffer->page, in ixgbevf_clean_rx_ring()
2391 rx_buffer->pagecnt_bias); in ixgbevf_clean_rx_ring()