Lines Matching refs:desc_cb

1606 				    struct hns3_desc_cb *desc_cb,  in hns3_handle_csum_partial()  argument
1640 &param->type_cs_vlan_tso, &desc_cb->send_bytes); in hns3_handle_csum_partial()
1650 struct hns3_desc_cb *desc_cb) in hns3_fill_skb_desc() argument
1660 desc_cb->send_bytes = skb->len; in hns3_fill_skb_desc()
1663 ret = hns3_handle_csum_partial(ring, skb, desc_cb, &param); in hns3_fill_skb_desc()
1726 struct hns3_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_use]; in hns3_map_and_fill_desc() local
1759 desc_cb->priv = priv; in hns3_map_and_fill_desc()
1760 desc_cb->length = size; in hns3_map_and_fill_desc()
1761 desc_cb->dma = dma; in hns3_map_and_fill_desc()
1762 desc_cb->type = type; in hns3_map_and_fill_desc()
1977 struct hns3_desc_cb *desc_cb; in hns3_clear_desc() local
1988 desc_cb = &ring->desc_cb[ring->next_to_use]; in hns3_clear_desc()
1990 if (!desc_cb->dma) in hns3_clear_desc()
1994 if (desc_cb->type & (DESC_TYPE_SKB | DESC_TYPE_FRAGLIST_SKB)) in hns3_clear_desc()
1995 dma_unmap_single(dev, desc_cb->dma, desc_cb->length, in hns3_clear_desc()
1997 else if (desc_cb->type & in hns3_clear_desc()
1999 hns3_tx_spare_rollback(ring, desc_cb->length); in hns3_clear_desc()
2000 else if (desc_cb->length) in hns3_clear_desc()
2001 dma_unmap_page(dev, desc_cb->dma, desc_cb->length, in hns3_clear_desc()
2004 desc_cb->length = 0; in hns3_clear_desc()
2005 desc_cb->dma = 0; in hns3_clear_desc()
2006 desc_cb->type = DESC_TYPE_UNKNOWN; in hns3_clear_desc()
2143 struct hns3_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_use]; in hns3_handle_tx_bounce() local
2169 desc_cb->priv = skb; in hns3_handle_tx_bounce()
2170 desc_cb->length = cb_len; in hns3_handle_tx_bounce()
2171 desc_cb->dma = dma; in hns3_handle_tx_bounce()
2172 desc_cb->type = type; in hns3_handle_tx_bounce()
2196 struct hns3_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_use]; in hns3_handle_tx_sgl() local
2232 desc_cb->priv = skb; in hns3_handle_tx_sgl()
2233 desc_cb->length = cb_len; in hns3_handle_tx_sgl()
2234 desc_cb->dma = dma; in hns3_handle_tx_sgl()
2235 desc_cb->type = DESC_TYPE_SGL_SKB; in hns3_handle_tx_sgl()
2267 struct hns3_desc_cb *desc_cb, in hns3_handle_skb_desc() argument
2273 desc_cb); in hns3_handle_skb_desc()
2294 struct hns3_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_use]; in hns3_nic_net_xmit() local
2322 ret = hns3_handle_skb_desc(ring, skb, desc_cb, ring->next_to_use); in hns3_nic_net_xmit()
2340 doorbell = __netdev_tx_sent_queue(dev_queue, desc_cb->send_bytes, in hns3_nic_net_xmit()
3432 hns3_unmap_buffer(ring, &ring->desc_cb[i]); in hns3_buffer_detach()
3434 ring->desc_cb[i].refill = 0; in hns3_buffer_detach()
3440 struct hns3_desc_cb *cb = &ring->desc_cb[i]; in hns3_free_buffer_detach()
3442 if (!ring->desc_cb[i].dma) in hns3_free_buffer_detach()
3506 int ret = hns3_alloc_and_map_buffer(ring, &ring->desc_cb[i]); in hns3_alloc_and_attach_buffer()
3511 ring->desc[i].addr = cpu_to_le64(ring->desc_cb[i].dma + in hns3_alloc_and_attach_buffer()
3512 ring->desc_cb[i].page_offset); in hns3_alloc_and_attach_buffer()
3513 ring->desc_cb[i].refill = 1; in hns3_alloc_and_attach_buffer()
3541 hns3_unmap_buffer(ring, &ring->desc_cb[i]); in hns3_replace_buffer()
3542 ring->desc_cb[i] = *res_cb; in hns3_replace_buffer()
3543 ring->desc_cb[i].refill = 1; in hns3_replace_buffer()
3544 ring->desc[i].addr = cpu_to_le64(ring->desc_cb[i].dma + in hns3_replace_buffer()
3545 ring->desc_cb[i].page_offset); in hns3_replace_buffer()
3551 ring->desc_cb[i].reuse_flag = 0; in hns3_reuse_buffer()
3552 ring->desc_cb[i].refill = 1; in hns3_reuse_buffer()
3553 ring->desc[i].addr = cpu_to_le64(ring->desc_cb[i].dma + in hns3_reuse_buffer()
3554 ring->desc_cb[i].page_offset); in hns3_reuse_buffer()
3558 ring->desc_cb[i].dma + ring->desc_cb[i].page_offset, in hns3_reuse_buffer()
3572 struct hns3_desc_cb *desc_cb; in hns3_nic_reclaim_desc() local
3583 desc_cb = &ring->desc_cb[ntc]; in hns3_nic_reclaim_desc()
3585 if (desc_cb->type & (DESC_TYPE_SKB | DESC_TYPE_BOUNCE_ALL | in hns3_nic_reclaim_desc()
3589 (*bytes) += desc_cb->send_bytes; in hns3_nic_reclaim_desc()
3599 prefetch(&ring->desc_cb[ntc]); in hns3_nic_reclaim_desc()
3659 if (unlikely(ntc == ntu && !ring->desc_cb[ntc].refill)) in hns3_desc_unused()
3669 struct hns3_desc_cb *desc_cb; in hns3_nic_alloc_rx_buffers() local
3674 desc_cb = &ring->desc_cb[ring->next_to_use]; in hns3_nic_alloc_rx_buffers()
3675 if (desc_cb->reuse_flag) { in hns3_nic_alloc_rx_buffers()
3712 struct hns3_desc_cb *desc_cb) in hns3_handle_rx_copybreak() argument
3715 u32 frag_offset = desc_cb->page_offset + pull_len; in hns3_handle_rx_copybreak()
3728 desc_cb->reuse_flag = 1; in hns3_handle_rx_copybreak()
3729 memcpy(frag, desc_cb->buf + frag_offset, frag_size); in hns3_handle_rx_copybreak()
3739 struct hns3_desc_cb *desc_cb) in hns3_nic_reuse_page() argument
3742 u32 frag_offset = desc_cb->page_offset + pull_len; in hns3_nic_reuse_page()
3750 skb_add_rx_frag(skb, i, desc_cb->priv, frag_offset, in hns3_nic_reuse_page()
3756 if (unlikely(!dev_page_is_reusable(desc_cb->priv))) in hns3_nic_reuse_page()
3759 reused = hns3_can_reuse_page(desc_cb); in hns3_nic_reuse_page()
3772 if ((!desc_cb->page_offset && reused) || in hns3_nic_reuse_page()
3773 ((desc_cb->page_offset + truesize + truesize) <= in hns3_nic_reuse_page()
3774 hns3_page_size(ring) && desc_cb->page_offset)) { in hns3_nic_reuse_page()
3775 desc_cb->page_offset += truesize; in hns3_nic_reuse_page()
3776 desc_cb->reuse_flag = 1; in hns3_nic_reuse_page()
3777 } else if (desc_cb->page_offset && reused) { in hns3_nic_reuse_page()
3778 desc_cb->page_offset = 0; in hns3_nic_reuse_page()
3779 desc_cb->reuse_flag = 1; in hns3_nic_reuse_page()
3781 ret = hns3_handle_rx_copybreak(skb, i, ring, pull_len, desc_cb); in hns3_nic_reuse_page()
3787 desc_cb->pagecnt_bias--; in hns3_nic_reuse_page()
3789 if (unlikely(!desc_cb->pagecnt_bias)) { in hns3_nic_reuse_page()
3790 page_ref_add(desc_cb->priv, USHRT_MAX); in hns3_nic_reuse_page()
3791 desc_cb->pagecnt_bias = USHRT_MAX; in hns3_nic_reuse_page()
3794 skb_add_rx_frag(skb, i, desc_cb->priv, frag_offset, in hns3_nic_reuse_page()
3797 if (unlikely(!desc_cb->reuse_flag)) in hns3_nic_reuse_page()
3798 __page_frag_cache_drain(desc_cb->priv, desc_cb->pagecnt_bias); in hns3_nic_reuse_page()
4011 ring->desc_cb[ring->next_to_clean].refill = 0; in hns3_rx_ring_move_fw()
4021 struct hns3_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_clean]; in hns3_alloc_skb() local
4044 if (dev_page_is_reusable(desc_cb->priv)) in hns3_alloc_skb()
4045 desc_cb->reuse_flag = 1; in hns3_alloc_skb()
4046 else if (desc_cb->type & DESC_TYPE_PP_FRAG) in hns3_alloc_skb()
4047 page_pool_put_full_page(ring->page_pool, desc_cb->priv, in hns3_alloc_skb()
4050 __page_frag_cache_drain(desc_cb->priv, in hns3_alloc_skb()
4051 desc_cb->pagecnt_bias); in hns3_alloc_skb()
4065 desc_cb); in hns3_alloc_skb()
4076 struct hns3_desc_cb *desc_cb; in hns3_add_frag() local
4082 desc_cb = &ring->desc_cb[ring->next_to_clean]; in hns3_add_frag()
4119 desc_cb->dma + desc_cb->page_offset, in hns3_add_frag()
4123 hns3_nic_reuse_page(skb, ring->frag_num++, ring, 0, desc_cb); in hns3_add_frag()
4314 struct hns3_desc_cb *desc_cb; in hns3_handle_rx_bd() local
4321 desc_cb = &ring->desc_cb[ring->next_to_clean]; in hns3_handle_rx_bd()
4334 ring->va = desc_cb->buf + desc_cb->page_offset; in hns3_handle_rx_bd()
4337 desc_cb->dma + desc_cb->page_offset, in hns3_handle_rx_bd()
4882 ring->desc_cb = NULL; in hns3_ring_get_cfg()
4956 ring->desc_cb = devm_kcalloc(ring_to_dev(ring), ring->desc_num, in hns3_alloc_ring_memory()
4957 sizeof(ring->desc_cb[0]), GFP_KERNEL); in hns3_alloc_ring_memory()
4958 if (!ring->desc_cb) { in hns3_alloc_ring_memory()
4983 devm_kfree(ring_to_dev(ring), ring->desc_cb); in hns3_alloc_ring_memory()
4984 ring->desc_cb = NULL; in hns3_alloc_ring_memory()
4992 devm_kfree(ring_to_dev(ring), ring->desc_cb); in hns3_fini_ring()
4993 ring->desc_cb = NULL; in hns3_fini_ring()
5490 if (!ring->desc_cb[ring->next_to_use].reuse_flag) { in hns3_clear_rx_ring()
5524 if (!ring->desc_cb[ring->next_to_use].reuse_flag) { in hns3_force_clear_rx_ring()
5526 &ring->desc_cb[ring->next_to_use]); in hns3_force_clear_rx_ring()
5527 ring->desc_cb[ring->next_to_use].dma = 0; in hns3_force_clear_rx_ring()