| /include/linux/ |
| A D | dma-direct.h | 22 dma_addr_t dma_start; 26 static inline dma_addr_t translate_phys_to_dma(struct device *dev, in translate_phys_to_dma() 43 dma_addr_t dma_addr) in translate_dma_to_phys() 57 static inline dma_addr_t dma_range_map_min(const struct bus_dma_region *map) in dma_range_map_min() 59 dma_addr_t ret = (dma_addr_t)U64_MAX; in dma_range_map_min() 68 dma_addr_t ret = 0; in dma_range_map_max() 88 static inline dma_addr_t phys_to_dma_unencrypted(struct device *dev, in phys_to_dma_unencrypted() 130 dma_addr_t end = addr + size - 1; in dma_capable() 145 dma_addr_t dma_addr, unsigned long attrs); 147 dma_addr_t *dma_handle, enum dma_data_direction dir, gfp_t gfp); [all …]
|
| A D | iommu-dma.h | 24 dma_addr_t iommu_dma_map_page(struct device *dev, struct page *page, 27 void iommu_dma_unmap_page(struct device *dev, dma_addr_t dma_handle, 33 void *iommu_dma_alloc(struct device *dev, size_t size, dma_addr_t *handle, 36 void *cpu_addr, dma_addr_t dma_addr, size_t size, 39 void *cpu_addr, dma_addr_t dma_addr, size_t size, 45 dma_addr_t handle, unsigned long attrs); 46 dma_addr_t iommu_dma_map_resource(struct device *dev, phys_addr_t phys, 48 void iommu_dma_unmap_resource(struct device *dev, dma_addr_t handle, 60 void iommu_dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, 62 void iommu_dma_sync_single_for_device(struct device *dev, dma_addr_t dma_handle,
|
| A D | dma-mapping.h | 71 #define DMA_MAPPING_ERROR (~(dma_addr_t)0) 76 dma_addr_t addr; 97 dma_addr_t dma_addr) in debug_dma_mapping_error() 135 dma_addr_t dma_handle, unsigned long attrs); 139 dma_addr_t dma_handle); 211 dma_addr_t dma_handle, unsigned long attrs) in dma_free_attrs() 220 void *vaddr, dma_addr_t dma_handle) in dmam_free_coherent() 557 dma_addr_t *dma_handle, gfp_t gfp) in dma_alloc_coherent() 564 void *cpu_addr, dma_addr_t dma_handle) in dma_free_coherent() 674 dma_addr_t *dma_handle, gfp_t gfp) in dmam_alloc_coherent() [all …]
|
| A D | dma-map-ops.h | 18 dma_addr_t *dma_handle, gfp_t gfp, 21 dma_addr_t dma_handle, unsigned long attrs); 28 void *, dma_addr_t, size_t, unsigned long attrs); 31 void *cpu_addr, dma_addr_t dma_addr, size_t size, 58 dma_addr_t dma_handle, size_t size, 160 dma_addr_t device_addr, size_t size); 163 dma_addr_t *dma_handle, void **ret); 182 dma_addr_t *dma_handle); 189 ssize_t size, dma_addr_t *dma_handle) in dma_alloc_from_global_coherent() 228 dma_addr_t dma_start, u64 size); [all …]
|
| A D | dmapool.h | 28 dma_addr_t *handle); 29 void dma_pool_free(struct dma_pool *pool, void *vaddr, dma_addr_t addr); 47 dma_addr_t *handle) { return NULL; } in dma_pool_alloc() 49 dma_addr_t addr) { } in dma_pool_free() 72 dma_addr_t *handle) in dma_pool_zalloc()
|
| A D | pch_dma.h | 20 dma_addr_t tx_reg; 21 dma_addr_t rx_reg;
|
| A D | virtio.h | 108 dma_addr_t virtqueue_get_desc_addr(const struct virtqueue *vq); 109 dma_addr_t virtqueue_get_avail_addr(const struct virtqueue *vq); 110 dma_addr_t virtqueue_get_used_addr(const struct virtqueue *vq); 265 dma_addr_t virtqueue_dma_map_single_attrs(struct virtqueue *_vq, void *ptr, size_t size, 267 void virtqueue_dma_unmap_single_attrs(struct virtqueue *_vq, dma_addr_t addr, 270 int virtqueue_dma_mapping_error(struct virtqueue *_vq, dma_addr_t addr); 272 bool virtqueue_dma_need_sync(struct virtqueue *_vq, dma_addr_t addr); 273 void virtqueue_dma_sync_single_range_for_cpu(struct virtqueue *_vq, dma_addr_t addr, 276 void virtqueue_dma_sync_single_range_for_device(struct virtqueue *_vq, dma_addr_t addr,
|
| A D | shdma-base.h | 95 dma_addr_t (*slave_addr)(struct shdma_chan *); 97 dma_addr_t, dma_addr_t, size_t *); 98 int (*set_slave)(struct shdma_chan *, int, dma_addr_t, bool);
|
| A D | hmm-dma.h | 22 dma_addr_t *dma_list; 29 dma_addr_t hmm_dma_map_pfn(struct device *dev, struct hmm_dma_map *map,
|
| A D | genalloc.h | 157 dma_addr_t *dma); 159 dma_addr_t *dma, genpool_algo_t algo, void *data); 161 dma_addr_t *dma, int align); 162 extern void *gen_pool_dma_zalloc(struct gen_pool *pool, size_t size, dma_addr_t *dma); 164 dma_addr_t *dma, genpool_algo_t algo, void *data); 166 dma_addr_t *dma, int align);
|
| A D | iova.h | 58 static inline size_t iova_offset(struct iova_domain *iovad, dma_addr_t iova) in iova_offset() 73 static inline dma_addr_t iova_dma_addr(struct iova_domain *iovad, struct iova *iova) in iova_dma_addr() 75 return (dma_addr_t)iova->pfn_lo << iova_shift(iovad); in iova_dma_addr() 78 static inline unsigned long iova_pfn(struct iova_domain *iovad, dma_addr_t iova) in iova_pfn()
|
| /include/linux/soc/ti/ |
| A D | knav_qmss.h | 66 dma_addr_t knav_queue_pop(void *qhandle, unsigned *size); 67 int knav_queue_push(void *qhandle, dma_addr_t dma, 77 dma_addr_t *dma, unsigned *dma_sz); 78 void *knav_pool_desc_unmap(void *ph, dma_addr_t dma, unsigned dma_sz); 79 dma_addr_t knav_pool_desc_virt_to_dma(void *ph, void *virt); 80 void *knav_pool_desc_dma_to_virt(void *ph, dma_addr_t dma);
|
| /include/linux/dma/ |
| A D | k3-udma-glue.h | 37 dma_addr_t desc_dma); 39 dma_addr_t *desc_dma); 45 void *data, void (*cleanup)(void *data, dma_addr_t desc_dma)); 52 dma_addr_t *addr); 54 dma_addr_t *addr); 129 dma_addr_t desc_dma); 131 u32 flow_num, dma_addr_t *desc_dma); 141 void (*cleanup)(void *data, dma_addr_t desc_dma)); 149 dma_addr_t *addr); 151 dma_addr_t *addr);
|
| /include/soc/fsl/ |
| A D | dpaa2-fd.h | 104 static inline dma_addr_t dpaa2_fd_get_addr(const struct dpaa2_fd *fd) in dpaa2_fd_get_addr() 106 return (dma_addr_t)le64_to_cpu(fd->simple.addr); in dpaa2_fd_get_addr() 114 static inline void dpaa2_fd_set_addr(struct dpaa2_fd *fd, dma_addr_t addr) in dpaa2_fd_set_addr() 167 static inline dma_addr_t dpaa2_fd_get_flc(const struct dpaa2_fd *fd) in dpaa2_fd_get_flc() 169 return (dma_addr_t)le64_to_cpu(fd->simple.flc); in dpaa2_fd_get_flc() 310 static inline dma_addr_t dpaa2_sg_get_addr(const struct dpaa2_sg_entry *sg) in dpaa2_sg_get_addr() 312 return (dma_addr_t)le64_to_cpu(sg->addr); in dpaa2_sg_get_addr() 482 return (dma_addr_t)le64_to_cpu(fle->addr); in dpaa2_fl_get_addr() 491 dma_addr_t addr) in dpaa2_fl_set_addr() 546 return (dma_addr_t)le64_to_cpu(fle->flc); in dpaa2_fl_get_flc() [all …]
|
| /include/linux/mailbox/ |
| A D | brcm-message.h | 25 dma_addr_t cmd_dma_addr; 32 dma_addr_t resp; 34 dma_addr_t data;
|
| /include/media/ |
| A D | vsp1.h | 66 dma_addr_t mem[3]; 107 dma_addr_t mem[3]; 142 dma_addr_t dma_addr; 177 dma_addr_t mem; 181 dma_addr_t mem;
|
| A D | videobuf2-dma-contig.h | 19 static inline dma_addr_t 22 dma_addr_t *addr = vb2_plane_cookie(vb, plane_no); in vb2_dma_contig_plane_dma_addr()
|
| /include/xen/ |
| A D | swiotlb-xen.h | 8 void xen_dma_sync_for_cpu(struct device *dev, dma_addr_t handle, 10 void xen_dma_sync_for_device(struct device *dev, dma_addr_t handle,
|
| /include/soc/tegra/ |
| A D | ivc.h | 22 dma_addr_t phys; 97 dma_addr_t rx_phys, const struct iosys_map *tx, dma_addr_t tx_phys,
|
| /include/net/ |
| A D | xsk_buff_pool.h | 28 dma_addr_t dma; 29 dma_addr_t frame_dma; 38 dma_addr_t *dma_pages; 69 dma_addr_t *dma_pages; 128 dma_addr_t *dma_pages, u64 addr) in xp_init_xskb_dma() 145 dma_addr_t xp_raw_get_dma(struct xsk_buff_pool *pool, u64 addr); 148 dma_addr_t dma; 154 static inline dma_addr_t xp_get_dma(struct xdp_buff_xsk *xskb) in xp_get_dma() 159 static inline dma_addr_t xp_get_frame_dma(struct xdp_buff_xsk *xskb) in xp_get_frame_dma() 172 dma_addr_t dma, size_t size) in xp_dma_sync_for_device()
|
| /include/linux/soc/nxp/ |
| A D | lpc32xx-misc.h | 15 extern u32 lpc32xx_return_iram(void __iomem **mapbase, dma_addr_t *dmaaddr); 19 static inline u32 lpc32xx_return_iram(void __iomem **mapbase, dma_addr_t *dmaaddr) in lpc32xx_return_iram()
|
| /include/drm/ |
| A D | drm_pagemap.h | 36 dma_addr_t addr; 52 drm_pagemap_device_addr_encode(dma_addr_t addr, in drm_pagemap_device_addr_encode() 181 dma_addr_t *dma_addr, 195 dma_addr_t *dma_addr,
|
| /include/linux/qed/ |
| A D | qed_fcoe_if.h | 38 dma_addr_t sq_pbl_addr; 39 dma_addr_t sq_curr_page_addr; 40 dma_addr_t sq_next_page_addr; 143 u32 handle, dma_addr_t terminate_params);
|
| /include/linux/mtd/ |
| A D | nand-ecc-mxic.h | 23 unsigned int direction, dma_addr_t dirmap); 42 dma_addr_t dirmap) in mxic_ecc_process_data_pipelined()
|
| /include/drm/intel/ |
| A D | intel-gtt.h | 24 void intel_gmch_gtt_insert_page(dma_addr_t addr, 31 dma_addr_t intel_gmch_gtt_read_entry(unsigned int pg,
|