Lines Matching refs:skb_dma

110 static bool bam_dmux_skb_dma_map(struct bam_dmux_skb_dma *skb_dma,  in bam_dmux_skb_dma_map()  argument
113 struct device *dev = skb_dma->dmux->dev; in bam_dmux_skb_dma_map()
115 skb_dma->addr = dma_map_single(dev, skb_dma->skb->data, skb_dma->skb->len, dir); in bam_dmux_skb_dma_map()
116 if (dma_mapping_error(dev, skb_dma->addr)) { in bam_dmux_skb_dma_map()
118 skb_dma->addr = 0; in bam_dmux_skb_dma_map()
125 static void bam_dmux_skb_dma_unmap(struct bam_dmux_skb_dma *skb_dma, in bam_dmux_skb_dma_unmap() argument
128 dma_unmap_single(skb_dma->dmux->dev, skb_dma->addr, skb_dma->skb->len, dir); in bam_dmux_skb_dma_unmap()
129 skb_dma->addr = 0; in bam_dmux_skb_dma_unmap()
160 static void bam_dmux_tx_done(struct bam_dmux_skb_dma *skb_dma) in bam_dmux_tx_done() argument
162 struct bam_dmux *dmux = skb_dma->dmux; in bam_dmux_tx_done()
168 if (skb_dma->addr) in bam_dmux_tx_done()
169 bam_dmux_skb_dma_unmap(skb_dma, DMA_TO_DEVICE); in bam_dmux_tx_done()
172 skb_dma->skb = NULL; in bam_dmux_tx_done()
173 if (skb_dma == &dmux->tx_skbs[dmux->tx_next_skb % BAM_DMUX_NUM_SKB]) in bam_dmux_tx_done()
180 struct bam_dmux_skb_dma *skb_dma = data; in bam_dmux_tx_callback() local
181 struct sk_buff *skb = skb_dma->skb; in bam_dmux_tx_callback()
183 bam_dmux_tx_done(skb_dma); in bam_dmux_tx_callback()
187 static bool bam_dmux_skb_dma_submit_tx(struct bam_dmux_skb_dma *skb_dma) in bam_dmux_skb_dma_submit_tx() argument
189 struct bam_dmux *dmux = skb_dma->dmux; in bam_dmux_skb_dma_submit_tx()
192 desc = dmaengine_prep_slave_single(dmux->tx, skb_dma->addr, in bam_dmux_skb_dma_submit_tx()
193 skb_dma->skb->len, DMA_MEM_TO_DEV, in bam_dmux_skb_dma_submit_tx()
201 desc->callback_param = skb_dma; in bam_dmux_skb_dma_submit_tx()
209 struct bam_dmux_skb_dma *skb_dma; in bam_dmux_tx_queue() local
214 skb_dma = &dmux->tx_skbs[dmux->tx_next_skb % BAM_DMUX_NUM_SKB]; in bam_dmux_tx_queue()
215 if (skb_dma->skb) { in bam_dmux_tx_queue()
220 skb_dma->skb = skb; in bam_dmux_tx_queue()
227 return skb_dma; in bam_dmux_tx_queue()
233 struct bam_dmux_skb_dma *skb_dma; in bam_dmux_send_cmd() local
247 skb_dma = bam_dmux_tx_queue(dmux, skb); in bam_dmux_send_cmd()
248 if (!skb_dma) { in bam_dmux_send_cmd()
257 if (!bam_dmux_skb_dma_map(skb_dma, DMA_TO_DEVICE)) { in bam_dmux_send_cmd()
262 if (!bam_dmux_skb_dma_submit_tx(skb_dma)) { in bam_dmux_send_cmd()
271 bam_dmux_tx_done(skb_dma); in bam_dmux_send_cmd()
339 struct bam_dmux_skb_dma *skb_dma; in bam_dmux_netdev_start_xmit() local
342 skb_dma = bam_dmux_tx_queue(dmux, skb); in bam_dmux_netdev_start_xmit()
343 if (!skb_dma) in bam_dmux_netdev_start_xmit()
354 if (!bam_dmux_skb_dma_map(skb_dma, DMA_TO_DEVICE)) in bam_dmux_netdev_start_xmit()
359 if (!atomic_long_fetch_or(BIT(skb_dma - dmux->tx_skbs), in bam_dmux_netdev_start_xmit()
365 if (!bam_dmux_skb_dma_submit_tx(skb_dma)) in bam_dmux_netdev_start_xmit()
372 bam_dmux_tx_done(skb_dma); in bam_dmux_netdev_start_xmit()
470 static bool bam_dmux_skb_dma_submit_rx(struct bam_dmux_skb_dma *skb_dma) in bam_dmux_skb_dma_submit_rx() argument
472 struct bam_dmux *dmux = skb_dma->dmux; in bam_dmux_skb_dma_submit_rx()
475 desc = dmaengine_prep_slave_single(dmux->rx, skb_dma->addr, in bam_dmux_skb_dma_submit_rx()
476 skb_dma->skb->len, DMA_DEV_TO_MEM, in bam_dmux_skb_dma_submit_rx()
484 desc->callback_param = skb_dma; in bam_dmux_skb_dma_submit_rx()
489 static bool bam_dmux_skb_dma_queue_rx(struct bam_dmux_skb_dma *skb_dma, gfp_t gfp) in bam_dmux_skb_dma_queue_rx() argument
491 if (!skb_dma->skb) { in bam_dmux_skb_dma_queue_rx()
492 skb_dma->skb = __netdev_alloc_skb(NULL, BAM_DMUX_BUFFER_SIZE, gfp); in bam_dmux_skb_dma_queue_rx()
493 if (!skb_dma->skb) in bam_dmux_skb_dma_queue_rx()
495 skb_put(skb_dma->skb, BAM_DMUX_BUFFER_SIZE); in bam_dmux_skb_dma_queue_rx()
498 return bam_dmux_skb_dma_map(skb_dma, DMA_FROM_DEVICE) && in bam_dmux_skb_dma_queue_rx()
499 bam_dmux_skb_dma_submit_rx(skb_dma); in bam_dmux_skb_dma_queue_rx()
502 static void bam_dmux_cmd_data(struct bam_dmux_skb_dma *skb_dma) in bam_dmux_cmd_data() argument
504 struct bam_dmux *dmux = skb_dma->dmux; in bam_dmux_cmd_data()
505 struct sk_buff *skb = skb_dma->skb; in bam_dmux_cmd_data()
520 skb_dma->skb = NULL; /* Hand over to network stack */ in bam_dmux_cmd_data()
578 struct bam_dmux_skb_dma *skb_dma = data; in bam_dmux_rx_callback() local
579 struct bam_dmux *dmux = skb_dma->dmux; in bam_dmux_rx_callback()
580 struct sk_buff *skb = skb_dma->skb; in bam_dmux_rx_callback()
583 bam_dmux_skb_dma_unmap(skb_dma, DMA_FROM_DEVICE); in bam_dmux_rx_callback()
597 bam_dmux_cmd_data(skb_dma); in bam_dmux_rx_callback()
612 if (bam_dmux_skb_dma_queue_rx(skb_dma, GFP_ATOMIC)) in bam_dmux_rx_callback()
648 struct bam_dmux_skb_dma *skb_dma = &skbs[i]; in bam_dmux_free_skbs() local
650 if (skb_dma->addr) in bam_dmux_free_skbs()
651 bam_dmux_skb_dma_unmap(skb_dma, dir); in bam_dmux_free_skbs()
652 if (skb_dma->skb) { in bam_dmux_free_skbs()
653 dev_kfree_skb(skb_dma->skb); in bam_dmux_free_skbs()
654 skb_dma->skb = NULL; in bam_dmux_free_skbs()