Lines Matching refs:dma_handle
28 rt_ubase_t *dma_handle, rt_ubase_t flags);
30 void *cpu_addr, rt_ubase_t dma_handle, rt_ubase_t flags);
43 void *data, rt_size_t size, rt_ubase_t *dma_handle, rt_ubase_t flags) in dma_map_coherent_sync_out_data() argument
45 if (dma_handle) in dma_map_coherent_sync_out_data()
47 *dma_handle = (rt_ubase_t)rt_kmem_v2p(data); in dma_map_coherent_sync_out_data()
55 void *out_data, rt_size_t size, rt_ubase_t dma_handle, rt_ubase_t flags) in dma_map_coherent_sync_in_data() argument
69 void *data, rt_size_t size, rt_ubase_t *dma_handle, rt_ubase_t flags) in dma_map_nocoherent_sync_out_data() argument
71 if (dma_handle) in dma_map_nocoherent_sync_out_data()
73 *dma_handle = (rt_ubase_t)rt_kmem_v2p(data); in dma_map_nocoherent_sync_out_data()
80 void *out_data, rt_size_t size, rt_ubase_t dma_handle, rt_ubase_t flags) in dma_map_nocoherent_sync_in_data() argument
103 rt_ubase_t *dma_handle, rt_ubase_t flags) in ofw_dma_map_alloc() argument
105 void *cpu_addr = dma_alloc(dev, size, dma_handle, flags); in ofw_dma_map_alloc()
107 if (cpu_addr && dma_handle) in ofw_dma_map_alloc()
109 *dma_handle = ofw_addr_cpu2dma(dev, *dma_handle); in ofw_dma_map_alloc()
116 void *cpu_addr, rt_ubase_t dma_handle, rt_ubase_t flags) in ofw_dma_map_free() argument
118 dma_handle = ofw_addr_dma2cpu(dev, dma_handle); in ofw_dma_map_free()
120 dma_free(dev, size, cpu_addr, dma_handle, flags); in ofw_dma_map_free()
125 rt_ubase_t *dma_handle, rt_ubase_t flags) in ofw_dma_map_sync_out_data() argument
131 err = dma_map_nocoherent_sync_out_data(dev, data, size, dma_handle, flags); in ofw_dma_map_sync_out_data()
135 err = dma_map_coherent_sync_out_data(dev, data, size, dma_handle, flags); in ofw_dma_map_sync_out_data()
138 if (!err && dma_handle) in ofw_dma_map_sync_out_data()
140 *dma_handle = ofw_addr_cpu2dma(dev, *dma_handle); in ofw_dma_map_sync_out_data()
148 rt_ubase_t dma_handle, rt_ubase_t flags) in ofw_dma_map_sync_in_data() argument
150 dma_handle = ofw_addr_dma2cpu(dev, dma_handle); in ofw_dma_map_sync_in_data()
154 return dma_map_nocoherent_sync_in_data(dev, out_data, size, dma_handle, flags); in ofw_dma_map_sync_in_data()
157 return dma_map_coherent_sync_in_data(dev, out_data, size, dma_handle, flags); in ofw_dma_map_sync_in_data()
316 rt_ubase_t *dma_handle, rt_ubase_t flags) in dma_alloc() argument
352 *dma_handle = dma_pool_alloc(pool, size); in dma_alloc()
354 if (*dma_handle && !(flags & RT_DMA_F_NOMAP)) in dma_alloc()
358 dma_buffer = rt_ioremap_nocache((void *)*dma_handle, size); in dma_alloc()
362 dma_buffer = rt_ioremap_cached((void *)*dma_handle, size); in dma_alloc()
367 dma_pool_free(pool, *dma_handle, size); in dma_alloc()
374 else if (*dma_handle) in dma_alloc()
376 dma_buffer = (void *)*dma_handle; in dma_alloc()
388 void *cpu_addr, rt_ubase_t dma_handle, rt_ubase_t flags) in dma_free() argument
396 if (dma_handle >= pool->region.start && in dma_free()
397 dma_handle <= pool->region.end) in dma_free()
401 dma_pool_free(pool, dma_handle, size); in dma_free()
411 rt_ubase_t *dma_handle, rt_ubase_t flags) in rt_dma_alloc() argument
438 if (dma_handle) in rt_dma_alloc()
440 *dma_handle = dma_handle_s; in rt_dma_alloc()
447 void *cpu_addr, rt_ubase_t dma_handle, rt_ubase_t flags) in rt_dma_free() argument
460 ops->free(dev, size, cpu_addr, dma_handle, flags); in rt_dma_free()
464 dma_free(dev, size, cpu_addr, dma_handle, flags); in rt_dma_free()
469 rt_ubase_t *dma_handle, rt_ubase_t flags) in rt_dma_sync_out_data() argument
483 if (dma_handle) in rt_dma_sync_out_data()
485 *dma_handle = dma_handle_s; in rt_dma_sync_out_data()
492 rt_ubase_t dma_handle, rt_ubase_t flags) in rt_dma_sync_in_data() argument
503 err = ops->sync_in_data(dev, out_data, size, dma_handle, flags); in rt_dma_sync_in_data()