| /drivers/gpu/drm/virtio/ |
| A D | virtgpu_prime.c | 58 return drm_gem_map_dma_buf(attach, dir); in virtgpu_gem_map_dma_buf() 78 .attach = virtio_dma_buf_attach, 216 dma_buf_detach(dmabuf, attach); in virtgpu_dma_buf_free_obj() 247 ret = dma_buf_pin(attach); in virtgpu_dma_buf_init_obj() 258 params.size = attach->dmabuf->size; in virtgpu_dma_buf_init_obj() 264 dma_buf_unpin(attach); in virtgpu_dma_buf_init_obj() 270 dma_buf_unpin(attach); in virtgpu_dma_buf_init_obj() 298 struct dma_buf_attachment *attach; in virtgpu_gem_prime_import() local 329 if (IS_ERR(attach)) { in virtgpu_gem_prime_import() 331 return ERR_CAST(attach); in virtgpu_gem_prime_import() [all …]
|
| /drivers/gpu/drm/xe/ |
| A D | xe_dma_buf.c | 26 struct dma_buf_attachment *attach) in xe_dma_buf_attach() argument 30 if (attach->peer2peer && in xe_dma_buf_attach() 32 attach->peer2peer = false; in xe_dma_buf_attach() 42 struct dma_buf_attachment *attach) in xe_dma_buf_detach() argument 92 struct dma_buf *dma_buf = attach->dmabuf; in xe_dma_buf_map() 102 if (!attach->peer2peer) in xe_dma_buf_map() 177 .attach = xe_dma_buf_attach, 264 struct dma_buf_attachment *attach; in xe_gem_prime_import() local 297 if (IS_ERR(attach)) { in xe_gem_prime_import() 298 obj = ERR_CAST(attach); in xe_gem_prime_import() [all …]
|
| /drivers/dma-buf/ |
| A D | dma-buf.c | 962 attach = kzalloc(sizeof(*attach), GFP_KERNEL); in dma_buf_dynamic_attach() 963 if (!attach) in dma_buf_dynamic_attach() 974 ret = dmabuf->ops->attach(dmabuf, attach); in dma_buf_dynamic_attach() 1110 if (WARN_ON(!attach || !attach->dmabuf)) in dma_buf_map_attachment() 1116 ret = attach->dmabuf->ops->pin(attach); in dma_buf_map_attachment() 1169 attach->dmabuf->ops->unpin(attach); in dma_buf_map_attachment() 1192 if (WARN_ON(!attach || !attach->dmabuf)) in dma_buf_map_attachment_unlocked() 1219 if (WARN_ON(!attach || !attach->dmabuf || !sg_table)) in dma_buf_unmap_attachment() 1228 attach->dmabuf->ops->unpin(attach); in dma_buf_unmap_attachment() 1248 if (WARN_ON(!attach || !attach->dmabuf || !sg_table)) in dma_buf_unmap_attachment_unlocked() [all …]
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_dma_buf.c | 78 struct dma_buf_attachment *attach) in amdgpu_dma_buf_attach() argument 87 attach->peer2peer = false; in amdgpu_dma_buf_attach() 103 struct dma_buf *dmabuf = attach->dmabuf; in amdgpu_dma_buf_pin() 121 if (!attach->peer2peer) in amdgpu_dma_buf_pin() 178 attach->peer2peer) { in amdgpu_dma_buf_map() 204 dma_buf_attach_adev(attach), bo))) in amdgpu_dma_buf_map() 289 .attach = amdgpu_dma_buf_attach, 467 struct dma_buf_attachment *attach; in amdgpu_gem_prime_import() local 488 if (IS_ERR(attach)) { in amdgpu_gem_prime_import() 490 return ERR_CAST(attach); in amdgpu_gem_prime_import() [all …]
|
| /drivers/infiniband/core/ |
| A D | umem_dmabuf.c | 24 dma_resv_assert_held(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_map_pages() 32 sgt = dma_buf_map_attachment(umem_dmabuf->attach, in ib_umem_dmabuf_map_pages() 158 umem_dmabuf->attach = dma_buf_dynamic_attach( in ib_umem_dmabuf_get_with_dma_device() 163 if (IS_ERR(umem_dmabuf->attach)) { in ib_umem_dmabuf_get_with_dma_device() 164 ret = ERR_CAST(umem_dmabuf->attach); in ib_umem_dmabuf_get_with_dma_device() 217 err = dma_buf_pin(umem_dmabuf->attach); in ib_umem_dmabuf_get_pinned_with_dma_device() 225 dma_resv_unlock(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_get_pinned_with_dma_device() 230 dma_buf_unpin(umem_dmabuf->attach); in ib_umem_dmabuf_get_pinned_with_dma_device() 232 dma_resv_unlock(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_get_pinned_with_dma_device() 257 dma_buf_unpin(umem_dmabuf->attach); in ib_umem_dmabuf_revoke() [all …]
|
| /drivers/media/common/videobuf2/ |
| A D | videobuf2-vmalloc.c | 224 attach = kzalloc(sizeof(*attach), GFP_KERNEL); in vb2_vmalloc_dmabuf_ops_attach() 225 if (!attach) in vb2_vmalloc_dmabuf_ops_attach() 228 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_attach() 231 kfree(attach); in vb2_vmalloc_dmabuf_ops_attach() 239 kfree(attach); in vb2_vmalloc_dmabuf_ops_attach() 246 attach->dma_dir = DMA_NONE; in vb2_vmalloc_dmabuf_ops_attach() 257 if (!attach) in vb2_vmalloc_dmabuf_ops_detach() 260 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_detach() 266 kfree(attach); in vb2_vmalloc_dmabuf_ops_detach() 276 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_map() [all …]
|
| A D | videobuf2-dma-sg.c | 378 attach = kzalloc(sizeof(*attach), GFP_KERNEL); in vb2_dma_sg_dmabuf_ops_attach() 379 if (!attach) in vb2_dma_sg_dmabuf_ops_attach() 382 sgt = &attach->sgt; in vb2_dma_sg_dmabuf_ops_attach() 388 kfree(attach); in vb2_dma_sg_dmabuf_ops_attach() 400 attach->dma_dir = DMA_NONE; in vb2_dma_sg_dmabuf_ops_attach() 401 dbuf_attach->priv = attach; in vb2_dma_sg_dmabuf_ops_attach() 412 if (!attach) in vb2_dma_sg_dmabuf_ops_detach() 415 sgt = &attach->sgt; in vb2_dma_sg_dmabuf_ops_detach() 421 kfree(attach); in vb2_dma_sg_dmabuf_ops_detach() 431 sgt = &attach->sgt; in vb2_dma_sg_dmabuf_ops_map() [all …]
|
| A D | videobuf2-dma-contig.c | 327 attach = kzalloc(sizeof(*attach), GFP_KERNEL); in vb2_dc_dmabuf_ops_attach() 328 if (!attach) in vb2_dc_dmabuf_ops_attach() 331 sgt = &attach->sgt; in vb2_dc_dmabuf_ops_attach() 337 kfree(attach); in vb2_dc_dmabuf_ops_attach() 349 attach->dma_dir = DMA_NONE; in vb2_dc_dmabuf_ops_attach() 350 dbuf_attach->priv = attach; in vb2_dc_dmabuf_ops_attach() 361 if (!attach) in vb2_dc_dmabuf_ops_detach() 364 sgt = &attach->sgt; in vb2_dc_dmabuf_ops_detach() 377 kfree(attach); in vb2_dc_dmabuf_ops_detach() 387 sgt = &attach->sgt; in vb2_dc_dmabuf_ops_map() [all …]
|
| /drivers/xen/ |
| A D | gntdev-dmabuf.c | 48 struct dma_buf_attachment *attach; member 253 attach->priv = NULL; in dmabuf_exp_ops_detach() 266 attach->dev); in dmabuf_exp_ops_map_dma_buf() 340 .attach = dmabuf_exp_ops_attach, 583 if (IS_ERR(attach)) { in dmabuf_imp_to_refs() 584 ret = ERR_CAST(attach); in dmabuf_imp_to_refs() 588 gntdev_dmabuf->u.imp.attach = attach; in dmabuf_imp_to_refs() 655 dma_buf_detach(dma_buf, attach); in dmabuf_imp_to_refs() 700 attach = gntdev_dmabuf->u.imp.attach; in dmabuf_imp_release() 705 dma_buf = attach->dmabuf; in dmabuf_imp_release() [all …]
|
| /drivers/iommu/iommufd/ |
| A D | device.c | 363 if (attach) in iommufd_group_device_num() 471 if (attach && attach->hwpt && !attach->hwpt->pasid_compat) in iommufd_hwpt_pasid_compat() 621 if (!attach) { in iommufd_hw_pagetable_attach() 622 attach = kzalloc(sizeof(*attach), GFP_KERNEL); in iommufd_hw_pagetable_attach() 623 if (!attach) { in iommufd_hw_pagetable_attach() 677 kfree(attach); in iommufd_hw_pagetable_attach() 696 if (!attach) { in iommufd_hw_pagetable_detach() 701 hwpt = attach->hwpt; in iommufd_hw_pagetable_detach() 708 kfree(attach); in iommufd_hw_pagetable_detach() 794 if (!attach) { in iommufd_device_do_replace() [all …]
|
| /drivers/vfio/ |
| A D | device_cdev.c | 197 struct vfio_device_attach_iommufd_pt attach; in vfio_df_ioctl_attach_pt() local 204 if (copy_from_user(&attach, arg, minsz)) in vfio_df_ioctl_attach_pt() 207 if (attach.argsz < minsz) in vfio_df_ioctl_attach_pt() 210 if (attach.flags & ~VFIO_DEVICE_ATTACH_PASID) in vfio_df_ioctl_attach_pt() 213 if (attach.flags & VFIO_DEVICE_ATTACH_PASID) { in vfio_df_ioctl_attach_pt() 220 if (attach.argsz < xend) in vfio_df_ioctl_attach_pt() 223 if (copy_from_user((void *)&attach + minsz, in vfio_df_ioctl_attach_pt() 229 if (attach.flags & VFIO_DEVICE_ATTACH_PASID) in vfio_df_ioctl_attach_pt() 231 attach.pasid, in vfio_df_ioctl_attach_pt() 232 &attach.pt_id); in vfio_df_ioctl_attach_pt() [all …]
|
| /drivers/gpu/drm/omapdrm/ |
| A D | omap_gem_dmabuf.c | 100 struct dma_buf_attachment *attach; in omap_gem_prime_import() local 117 attach = dma_buf_attach(dma_buf, dev->dev); in omap_gem_prime_import() 118 if (IS_ERR(attach)) in omap_gem_prime_import() 119 return ERR_CAST(attach); in omap_gem_prime_import() 123 sgt = dma_buf_map_attachment_unlocked(attach, DMA_TO_DEVICE); in omap_gem_prime_import() 135 obj->import_attach = attach; in omap_gem_prime_import() 140 dma_buf_unmap_attachment_unlocked(attach, sgt, DMA_TO_DEVICE); in omap_gem_prime_import() 142 dma_buf_detach(dma_buf, attach); in omap_gem_prime_import()
|
| /drivers/gpu/drm/ |
| A D | drm_prime.c | 641 struct dma_buf_attachment *attach) in drm_gem_map_detach() argument 831 .attach = drm_gem_map_attach, 975 struct dma_buf_attachment *attach; in drm_gem_prime_import_dev() local 994 if (IS_ERR(attach)) in drm_gem_prime_import_dev() 995 return ERR_CAST(attach); in drm_gem_prime_import_dev() 1011 obj->import_attach = attach; in drm_gem_prime_import_dev() 1019 dma_buf_detach(dma_buf, attach); in drm_gem_prime_import_dev() 1110 struct dma_buf_attachment *attach; in drm_prime_gem_destroy() local 1113 attach = obj->import_attach; in drm_prime_gem_destroy() 1116 dma_buf = attach->dmabuf; in drm_prime_gem_destroy() [all …]
|
| A D | drm_gem_dma_helper.c | 463 struct dma_buf_attachment *attach, in drm_gem_dma_prime_import_sg_table() argument 469 if (drm_prime_get_contiguous_size(sgt) < attach->dmabuf->size) in drm_gem_dma_prime_import_sg_table() 473 dma_obj = __drm_gem_dma_create(dev, attach->dmabuf->size, true); in drm_gem_dma_prime_import_sg_table() 481 attach->dmabuf->size); in drm_gem_dma_prime_import_sg_table() 575 struct dma_buf_attachment *attach, in drm_gem_dma_prime_import_sg_table_vmap() argument 583 ret = dma_buf_vmap_unlocked(attach->dmabuf, &map); in drm_gem_dma_prime_import_sg_table_vmap() 589 obj = drm_gem_dma_prime_import_sg_table(dev, attach, sgt); in drm_gem_dma_prime_import_sg_table_vmap() 591 dma_buf_vunmap_unlocked(attach->dmabuf, &map); in drm_gem_dma_prime_import_sg_table_vmap()
|
| /drivers/gpu/drm/tegra/ |
| A D | gem.c | 81 map->attach = dma_buf_attach(buf, dev); in tegra_bo_pin() 82 if (IS_ERR(map->attach)) { in tegra_bo_pin() 83 err = PTR_ERR(map->attach); in tegra_bo_pin() 89 dma_buf_detach(buf, map->attach); in tegra_bo_pin() 162 if (map->attach) { in tegra_bo_unpin() 165 dma_buf_detach(map->attach->dmabuf, map->attach); in tegra_bo_unpin() 459 struct dma_buf_attachment *attach; in tegra_bo_import() local 473 if (IS_ERR(attach)) { in tegra_bo_import() 474 err = PTR_ERR(attach); in tegra_bo_import() 488 bo->gem.import_attach = attach; in tegra_bo_import() [all …]
|
| /drivers/gpu/drm/i915/gem/ |
| A D | i915_gem_dmabuf.c | 30 struct drm_i915_gem_object *obj = dma_buf_to_obj(attach->dmabuf); in i915_gem_map_dma_buf() 55 ret = dma_map_sgtable(attach->dev, sgt, dir, DMA_ATTR_SKIP_CPU_SYNC); in i915_gem_map_dma_buf() 167 struct dma_buf_attachment *attach) in i915_gem_dmabuf_attach() argument 196 struct dma_buf_attachment *attach) in i915_gem_dmabuf_detach() argument 204 .attach = i915_gem_dmabuf_attach, 285 struct dma_buf_attachment *attach; in i915_gem_prime_import() local 307 attach = dma_buf_attach(dma_buf, dev->dev); in i915_gem_prime_import() 308 if (IS_ERR(attach)) in i915_gem_prime_import() 309 return ERR_CAST(attach); in i915_gem_prime_import() 322 obj->base.import_attach = attach; in i915_gem_prime_import() [all …]
|
| /drivers/iio/ |
| A D | industrialio-buffer.c | 53 struct dma_buf_attachment *attach; member 1562 struct dma_buf_attachment *attach = priv->attach; in iio_buffer_dmabuf_release() local 1572 dma_buf_detach(attach->dmabuf, attach); in iio_buffer_dmabuf_release() 1640 attach = priv->attach; in iio_buffer_find_attachment() 1645 if (attach) in iio_buffer_find_attachment() 1683 if (IS_ERR(attach)) { in iio_buffer_attach_dmabuf() 1684 err = PTR_ERR(attach); in iio_buffer_attach_dmabuf() 1704 priv->attach = attach; in iio_buffer_attach_dmabuf() 1849 if (IS_ERR(attach)) { in iio_buffer_enqueue_dmabuf() 1850 ret = PTR_ERR(attach); in iio_buffer_enqueue_dmabuf() [all …]
|
| /drivers/gpu/drm/vmwgfx/ |
| A D | vmwgfx_prime.c | 45 struct dma_buf_attachment *attach) in vmw_prime_map_attach() argument 51 struct dma_buf_attachment *attach) in vmw_prime_map_detach() argument 55 static struct sg_table *vmw_prime_map_dma_buf(struct dma_buf_attachment *attach, in vmw_prime_map_dma_buf() argument 61 static void vmw_prime_unmap_dma_buf(struct dma_buf_attachment *attach, in vmw_prime_unmap_dma_buf() argument 68 .attach = vmw_prime_map_attach,
|
| /drivers/gpu/drm/armada/ |
| A D | armada_gem.c | 389 struct drm_gem_object *obj = attach->dmabuf->priv; in armada_gem_prime_map_dma_buf() 419 if (dma_map_sgtable(attach->dev, sgt, dir, 0)) in armada_gem_prime_map_dma_buf() 428 if (dma_map_sgtable(attach->dev, sgt, dir, 0)) in armada_gem_prime_map_dma_buf() 455 struct drm_gem_object *obj = attach->dmabuf->priv; in armada_gem_prime_unmap_dma_buf() 460 dma_unmap_sgtable(attach->dev, sgt, dir, 0); in armada_gem_prime_unmap_dma_buf() 502 struct dma_buf_attachment *attach; in armada_gem_prime_import() local 517 attach = dma_buf_attach(buf, dev->dev); in armada_gem_prime_import() 518 if (IS_ERR(attach)) in armada_gem_prime_import() 519 return ERR_CAST(attach); in armada_gem_prime_import() 523 dma_buf_detach(buf, attach); in armada_gem_prime_import() [all …]
|
| /drivers/virtio/ |
| A D | virtio_dma_buf.c | 28 exp_info->ops->attach != &virtio_dma_buf_attach || in virtio_dma_buf_export() 43 struct dma_buf_attachment *attach) in virtio_dma_buf_attach() argument 51 ret = ops->device_attach(dma_buf, attach); in virtio_dma_buf_attach() 65 return dma_buf->ops->attach == &virtio_dma_buf_attach; in is_virtio_dma_buf()
|
| /drivers/accel/ivpu/ |
| A D | ivpu_gem.c | 186 struct dma_buf_attachment *attach; in ivpu_gem_prime_import() local 191 attach = dma_buf_attach(dma_buf, attach_dev); in ivpu_gem_prime_import() 192 if (IS_ERR(attach)) in ivpu_gem_prime_import() 193 return ERR_CAST(attach); in ivpu_gem_prime_import() 197 sgt = dma_buf_map_attachment_unlocked(attach, DMA_BIDIRECTIONAL); in ivpu_gem_prime_import() 203 obj = drm_gem_shmem_prime_import_sg_table(dev, attach, sgt); in ivpu_gem_prime_import() 209 obj->import_attach = attach; in ivpu_gem_prime_import() 215 dma_buf_unmap_attachment_unlocked(attach, sgt, DMA_BIDIRECTIONAL); in ivpu_gem_prime_import() 217 dma_buf_detach(dma_buf, attach); in ivpu_gem_prime_import()
|
| /drivers/nvdimm/ |
| A D | claim.c | 44 bool __nd_attach_ndns(struct device *dev, struct nd_namespace_common *attach, in __nd_attach_ndns() argument 47 struct nvdimm_bus *nvdimm_bus = walk_to_nvdimm_bus(&attach->dev); in __nd_attach_ndns() 49 if (attach->claim) in __nd_attach_ndns() 53 attach->claim = dev; in __nd_attach_ndns() 54 *_ndns = attach; in __nd_attach_ndns() 55 get_device(&attach->dev); in __nd_attach_ndns()
|
| /drivers/accel/amdxdna/ |
| A D | amdxdna_gem.c | 384 .attach = drm_gem_map_attach, 410 dma_buf_detach(abo->dma_buf, abo->attach); in amdxdna_imported_obj_free() 500 struct dma_buf_attachment *attach; in amdxdna_gem_prime_import() local 508 attach = dma_buf_attach(dma_buf, dev->dev); in amdxdna_gem_prime_import() 509 if (IS_ERR(attach)) { in amdxdna_gem_prime_import() 510 ret = PTR_ERR(attach); in amdxdna_gem_prime_import() 514 sgt = dma_buf_map_attachment_unlocked(attach, DMA_BIDIRECTIONAL); in amdxdna_gem_prime_import() 520 gobj = drm_gem_shmem_prime_import_sg_table(dev, attach, sgt); in amdxdna_gem_prime_import() 527 abo->attach = attach; in amdxdna_gem_prime_import() 533 dma_buf_unmap_attachment_unlocked(attach, sgt, DMA_BIDIRECTIONAL); in amdxdna_gem_prime_import() [all …]
|
| /drivers/power/supply/ |
| A D | mt6370-charger.c | 100 int attach; member 260 unsigned int attach, usb_stat; in mt6370_chg_bc12_work_func() local 263 attach = priv->attach; in mt6370_chg_bc12_work_func() 265 switch (attach) { in mt6370_chg_bc12_work_func() 270 ret = mt6370_chg_field_set(priv, F_USBCHGEN, attach); in mt6370_chg_bc12_work_func() 429 val->intval = !!priv->attach; in mt6370_chg_get_online() 515 if (pwr_rdy == !!priv->attach) { in mt6370_chg_set_online() 521 priv->attach = pwr_rdy; in mt6370_chg_set_online() 795 priv->attach = MT6370_ATTACH_STAT_ATTACH_BC12_DONE; in mt6370_attach_i_handler() 903 priv->attach = MT6370_ATTACH_STAT_DETACH; in mt6370_chg_probe()
|
| /drivers/net/ethernet/marvell/octeontx2/af/ |
| A D | rvu.c | 1536 blkaddr = attach->cpt_blkaddr ? attach->cpt_blkaddr : in rvu_get_attach_blkaddr() 1706 attach->hdr.pcifunc, attach); in rvu_attach_from_same_block() 1724 if (!attach->modify) in rvu_mbox_handler_attach_resources() 1735 if (attach->npalf) in rvu_mbox_handler_attach_resources() 1738 if (attach->nixlf) in rvu_mbox_handler_attach_resources() 1741 if (attach->sso) { in rvu_mbox_handler_attach_resources() 1750 attach->sso, attach); in rvu_mbox_handler_attach_resources() 1753 if (attach->ssow) { in rvu_mbox_handler_attach_resources() 1757 attach->ssow, attach); in rvu_mbox_handler_attach_resources() 1764 attach->timlfs, attach); in rvu_mbox_handler_attach_resources() [all …]
|