Lines Matching refs:ops

121 		const struct dma_map_ops *ops)  in dma_go_direct()  argument
126 if (likely(!ops)) in dma_go_direct()
144 const struct dma_map_ops *ops) in dma_alloc_direct() argument
146 return dma_go_direct(dev, dev->coherent_dma_mask, ops); in dma_alloc_direct()
150 const struct dma_map_ops *ops) in dma_map_direct() argument
152 return dma_go_direct(dev, *dev->dma_mask, ops); in dma_map_direct()
159 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_page_attrs() local
167 if (dma_map_direct(dev, ops) || in dma_map_page_attrs()
173 addr = ops->map_page(dev, page, offset, size, dir, attrs); in dma_map_page_attrs()
186 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_page_attrs() local
189 if (dma_map_direct(dev, ops) || in dma_unmap_page_attrs()
195 ops->unmap_page(dev, addr, size, dir, attrs); in dma_unmap_page_attrs()
204 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_map_sg_attrs() local
212 if (dma_map_direct(dev, ops) || in __dma_map_sg_attrs()
218 ents = ops->map_sg(dev, sg, nents, dir, attrs); in __dma_map_sg_attrs()
306 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_sg_attrs() local
311 if (dma_map_direct(dev, ops) || in dma_unmap_sg_attrs()
316 else if (ops->unmap_sg) in dma_unmap_sg_attrs()
317 ops->unmap_sg(dev, sg, nents, dir, attrs); in dma_unmap_sg_attrs()
324 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_resource() local
332 if (dma_map_direct(dev, ops)) in dma_map_resource()
336 else if (ops->map_resource) in dma_map_resource()
337 addr = ops->map_resource(dev, phys_addr, size, dir, attrs); in dma_map_resource()
348 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_resource() local
351 if (dma_map_direct(dev, ops)) in dma_unmap_resource()
355 else if (ops->unmap_resource) in dma_unmap_resource()
356 ops->unmap_resource(dev, addr, size, dir, attrs); in dma_unmap_resource()
366 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_sync_single_for_cpu() local
369 if (dma_map_direct(dev, ops)) in __dma_sync_single_for_cpu()
373 else if (ops->sync_single_for_cpu) in __dma_sync_single_for_cpu()
374 ops->sync_single_for_cpu(dev, addr, size, dir); in __dma_sync_single_for_cpu()
383 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_sync_single_for_device() local
386 if (dma_map_direct(dev, ops)) in __dma_sync_single_for_device()
390 else if (ops->sync_single_for_device) in __dma_sync_single_for_device()
391 ops->sync_single_for_device(dev, addr, size, dir); in __dma_sync_single_for_device()
400 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_sync_sg_for_cpu() local
403 if (dma_map_direct(dev, ops)) in __dma_sync_sg_for_cpu()
407 else if (ops->sync_sg_for_cpu) in __dma_sync_sg_for_cpu()
408 ops->sync_sg_for_cpu(dev, sg, nelems, dir); in __dma_sync_sg_for_cpu()
417 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_sync_sg_for_device() local
420 if (dma_map_direct(dev, ops)) in __dma_sync_sg_for_device()
424 else if (ops->sync_sg_for_device) in __dma_sync_sg_for_device()
425 ops->sync_sg_for_device(dev, sg, nelems, dir); in __dma_sync_sg_for_device()
433 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_need_sync() local
435 if (dma_map_direct(dev, ops)) in __dma_need_sync()
466 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_setup_need_sync() local
468 if (dma_map_direct(dev, ops) || use_dma_iommu(dev)) in dma_setup_need_sync()
475 else if (!ops->sync_single_for_device && !ops->sync_single_for_cpu && in dma_setup_need_sync()
476 !ops->sync_sg_for_device && !ops->sync_sg_for_cpu) in dma_setup_need_sync()
504 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_get_sgtable_attrs() local
506 if (dma_alloc_direct(dev, ops)) in dma_get_sgtable_attrs()
512 if (!ops->get_sgtable) in dma_get_sgtable_attrs()
514 return ops->get_sgtable(dev, sgt, cpu_addr, dma_addr, size, attrs); in dma_get_sgtable_attrs()
544 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_can_mmap() local
546 if (dma_alloc_direct(dev, ops)) in dma_can_mmap()
550 return ops->mmap != NULL; in dma_can_mmap()
571 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_mmap_attrs() local
573 if (dma_alloc_direct(dev, ops)) in dma_mmap_attrs()
579 if (!ops->mmap) in dma_mmap_attrs()
581 return ops->mmap(dev, vma, cpu_addr, dma_addr, size, attrs); in dma_mmap_attrs()
587 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_get_required_mask() local
589 if (dma_alloc_direct(dev, ops)) in dma_get_required_mask()
595 if (ops->get_required_mask) in dma_get_required_mask()
596 return ops->get_required_mask(dev); in dma_get_required_mask()
613 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_alloc_attrs() local
635 if (dma_alloc_direct(dev, ops)) { in dma_alloc_attrs()
639 } else if (ops->alloc) { in dma_alloc_attrs()
640 cpu_addr = ops->alloc(dev, size, dma_handle, flag, attrs); in dma_alloc_attrs()
657 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_free_attrs() local
676 if (dma_alloc_direct(dev, ops)) in dma_free_attrs()
680 else if (ops->free) in dma_free_attrs()
681 ops->free(dev, size, cpu_addr, dma_handle, attrs); in dma_free_attrs()
688 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_alloc_pages() local
698 if (dma_alloc_direct(dev, ops)) in __dma_alloc_pages()
702 if (!ops->alloc_pages_op) in __dma_alloc_pages()
704 return ops->alloc_pages_op(dev, size, dma_handle, dir, gfp); in __dma_alloc_pages()
726 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_free_pages() local
729 if (dma_alloc_direct(dev, ops)) in __dma_free_pages()
733 else if (ops->free_pages) in __dma_free_pages()
734 ops->free_pages(dev, size, page, dma_handle, dir); in __dma_free_pages()
860 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_supported() local
863 if (WARN_ON(ops)) in dma_supported()
872 if (ops) { in dma_supported()
873 if (!ops->dma_supported) in dma_supported()
875 return ops->dma_supported(dev, mask); in dma_supported()
883 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_pci_p2pdma_supported() local
892 return !ops; in dma_pci_p2pdma_supported()
933 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_addressing_limited() local
939 if (unlikely(ops) || use_dma_iommu(dev)) in __dma_addressing_limited()
964 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_max_mapping_size() local
967 if (dma_map_direct(dev, ops)) in dma_max_mapping_size()
971 else if (ops && ops->max_mapping_size) in dma_max_mapping_size()
972 size = ops->max_mapping_size(dev); in dma_max_mapping_size()
980 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_opt_mapping_size() local
985 else if (ops && ops->opt_mapping_size) in dma_opt_mapping_size()
986 size = ops->opt_mapping_size(); in dma_opt_mapping_size()
994 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_get_merge_boundary() local
999 if (!ops || !ops->get_merge_boundary) in dma_get_merge_boundary()
1002 return ops->get_merge_boundary(dev); in dma_get_merge_boundary()