Lines Matching refs:rx_buffer

111 static bool ixgbevf_can_reuse_rx_page(struct ixgbevf_rx_buffer *rx_buffer);
531 struct ixgbevf_rx_buffer *rx_buffer; in ixgbevf_get_rx_buffer() local
533 rx_buffer = &rx_ring->rx_buffer_info[rx_ring->next_to_clean]; in ixgbevf_get_rx_buffer()
534 prefetchw(rx_buffer->page); in ixgbevf_get_rx_buffer()
538 rx_buffer->dma, in ixgbevf_get_rx_buffer()
539 rx_buffer->page_offset, in ixgbevf_get_rx_buffer()
543 rx_buffer->pagecnt_bias--; in ixgbevf_get_rx_buffer()
545 return rx_buffer; in ixgbevf_get_rx_buffer()
549 struct ixgbevf_rx_buffer *rx_buffer, in ixgbevf_put_rx_buffer() argument
552 if (ixgbevf_can_reuse_rx_page(rx_buffer)) { in ixgbevf_put_rx_buffer()
554 ixgbevf_reuse_rx_page(rx_ring, rx_buffer); in ixgbevf_put_rx_buffer()
560 dma_unmap_page_attrs(rx_ring->dev, rx_buffer->dma, in ixgbevf_put_rx_buffer()
564 __page_frag_cache_drain(rx_buffer->page, in ixgbevf_put_rx_buffer()
565 rx_buffer->pagecnt_bias); in ixgbevf_put_rx_buffer()
569 rx_buffer->page = NULL; in ixgbevf_put_rx_buffer()
784 static bool ixgbevf_can_reuse_rx_page(struct ixgbevf_rx_buffer *rx_buffer) in ixgbevf_can_reuse_rx_page() argument
786 unsigned int pagecnt_bias = rx_buffer->pagecnt_bias; in ixgbevf_can_reuse_rx_page()
787 struct page *page = rx_buffer->page; in ixgbevf_can_reuse_rx_page()
801 if (rx_buffer->page_offset > IXGBEVF_LAST_OFFSET) in ixgbevf_can_reuse_rx_page()
812 rx_buffer->pagecnt_bias = USHRT_MAX; in ixgbevf_can_reuse_rx_page()
828 struct ixgbevf_rx_buffer *rx_buffer, in ixgbevf_add_rx_frag() argument
839 skb_add_rx_frag(skb, skb_shinfo(skb)->nr_frags, rx_buffer->page, in ixgbevf_add_rx_frag()
840 rx_buffer->page_offset, size, truesize); in ixgbevf_add_rx_frag()
842 rx_buffer->page_offset ^= truesize; in ixgbevf_add_rx_frag()
844 rx_buffer->page_offset += truesize; in ixgbevf_add_rx_frag()
850 struct ixgbevf_rx_buffer *rx_buffer, in ixgbevf_construct_skb() argument
900 skb_add_rx_frag(skb, 0, rx_buffer->page, in ixgbevf_construct_skb()
902 page_address(rx_buffer->page), in ixgbevf_construct_skb()
905 rx_buffer->page_offset ^= truesize; in ixgbevf_construct_skb()
907 rx_buffer->page_offset += truesize; in ixgbevf_construct_skb()
910 rx_buffer->pagecnt_bias++; in ixgbevf_construct_skb()
925 struct ixgbevf_rx_buffer *rx_buffer, in ixgbevf_build_skb() argument
959 rx_buffer->page_offset ^= truesize; in ixgbevf_build_skb()
961 rx_buffer->page_offset += truesize; in ixgbevf_build_skb()
1104 struct ixgbevf_rx_buffer *rx_buffer, in ixgbevf_rx_buffer_flip() argument
1110 rx_buffer->page_offset ^= truesize; in ixgbevf_rx_buffer_flip()
1112 rx_buffer->page_offset += truesize; in ixgbevf_rx_buffer_flip()
1134 struct ixgbevf_rx_buffer *rx_buffer; in ixgbevf_clean_rx_irq() local
1155 rx_buffer = ixgbevf_get_rx_buffer(rx_ring, size); in ixgbevf_clean_rx_irq()
1162 hard_start = page_address(rx_buffer->page) + in ixgbevf_clean_rx_irq()
1163 rx_buffer->page_offset - offset; in ixgbevf_clean_rx_irq()
1175 ixgbevf_rx_buffer_flip(rx_ring, rx_buffer, in ixgbevf_clean_rx_irq()
1178 rx_buffer->pagecnt_bias++; in ixgbevf_clean_rx_irq()
1183 ixgbevf_add_rx_frag(rx_ring, rx_buffer, skb, size); in ixgbevf_clean_rx_irq()
1185 skb = ixgbevf_build_skb(rx_ring, rx_buffer, in ixgbevf_clean_rx_irq()
1188 skb = ixgbevf_construct_skb(rx_ring, rx_buffer, in ixgbevf_clean_rx_irq()
1195 rx_buffer->pagecnt_bias++; in ixgbevf_clean_rx_irq()
1199 ixgbevf_put_rx_buffer(rx_ring, rx_buffer, skb); in ixgbevf_clean_rx_irq()
2369 struct ixgbevf_rx_buffer *rx_buffer; in ixgbevf_clean_rx_ring() local
2371 rx_buffer = &rx_ring->rx_buffer_info[i]; in ixgbevf_clean_rx_ring()
2377 rx_buffer->dma, in ixgbevf_clean_rx_ring()
2378 rx_buffer->page_offset, in ixgbevf_clean_rx_ring()
2384 rx_buffer->dma, in ixgbevf_clean_rx_ring()
2389 __page_frag_cache_drain(rx_buffer->page, in ixgbevf_clean_rx_ring()
2390 rx_buffer->pagecnt_bias); in ixgbevf_clean_rx_ring()