Searched refs:DMA_ATTR_WEAK_ORDERING (Results 1 – 16 of 16) sorted by relevance
8 #define STMMAC_RX_DMA_ATTR (DMA_ATTR_SKIP_CPU_SYNC | DMA_ATTR_WEAK_ORDERING)
8 DMA_ATTR_WEAK_ORDERING section in DMA attributes11 DMA_ATTR_WEAK_ORDERING specifies that reads and writes to the mapping14 Since it is optional for platforms to implement DMA_ATTR_WEAK_ORDERING,
241 dma_attr |= DMA_ATTR_WEAK_ORDERING; in ib_umem_get()
201 if (attrs & DMA_ATTR_WEAK_ORDERING) in dma_4v_alloc_coherent()398 if (attrs & DMA_ATTR_WEAK_ORDERING) in dma_4v_map_page()496 if (attrs & DMA_ATTR_WEAK_ORDERING) in dma_4v_map_sg()
136 (DMA_ATTR_SKIP_CPU_SYNC | DMA_ATTR_WEAK_ORDERING)
116 (DMA_ATTR_SKIP_CPU_SYNC | DMA_ATTR_WEAK_ORDERING)
103 (DMA_ATTR_SKIP_CPU_SYNC | DMA_ATTR_WEAK_ORDERING)
317 (DMA_ATTR_SKIP_CPU_SYNC | DMA_ATTR_WEAK_ORDERING)
180 DMA_ATTR_WEAK_ORDERING); in bnxt_rx_xdp()
734 DMA_ATTR_WEAK_ORDERING); in __bnxt_alloc_rx_page()755 DMA_ATTR_WEAK_ORDERING); in __bnxt_alloc_rx_data()861 DMA_ATTR_WEAK_ORDERING); in bnxt_alloc_rx_page()981 DMA_ATTR_WEAK_ORDERING); in bnxt_rx_page_skb()1025 bp->rx_dir, DMA_ATTR_WEAK_ORDERING); in bnxt_rx_skb()1101 DMA_ATTR_WEAK_ORDERING); in bnxt_rx_pages()1629 DMA_ATTR_WEAK_ORDERING); in bnxt_tpa_end()2773 DMA_ATTR_WEAK_ORDERING); in bnxt_free_one_rx_ring_skbs()2797 DMA_ATTR_WEAK_ORDERING); in bnxt_free_one_rx_ring_skbs()2802 DMA_ATTR_WEAK_ORDERING); in bnxt_free_one_rx_ring_skbs()[all …]
371 (DMA_ATTR_SKIP_CPU_SYNC | DMA_ATTR_WEAK_ORDERING)
207 (DMA_ATTR_SKIP_CPU_SYNC | DMA_ATTR_WEAK_ORDERING)
597 imem->attrs = DMA_ATTR_WEAK_ORDERING | in gk20a_instmem_new()
23 #define DMA_ATTR_WEAK_ORDERING (1UL << 1) macro
183 if (unlikely(attrs & DMA_ATTR_WEAK_ORDERING)) in tce_build_cell()
Completed in 61 milliseconds