| /drivers/media/usb/uvc/ |
| A D | uvc_ctrl.c | 406 mapping->set(mapping, sizeof(data_in), &data_in, data_out); in uvc_mapping_set_s32() 1162 if ((*mapping == NULL || (*mapping)->id > map->id) && in __uvc_find_control() 1648 mapping->id, uvc_map_get_name(mapping), ret); in __uvc_query_v4l2_ctrl() 2518 return mapping->set(mapping, size, data, in uvc_mapping_set_xctrl_compound() 2957 map = kmemdup(mapping, sizeof(*mapping), GFP_KERNEL); in __uvc_ctrl_add_mapping() 2974 if (mapping->menu_mapping && mapping->menu_mask) { in __uvc_ctrl_add_mapping() 2982 if (mapping->menu_names && mapping->menu_mask) { in __uvc_ctrl_add_mapping() 3040 uvc_map_get_name(mapping), mapping->id); in uvc_ctrl_add_mapping() 3078 mapping->offset + mapping->size > ctrl->info.size * 8) { in uvc_ctrl_add_mapping() 3087 uvc_map_get_name(mapping), mapping->id); in uvc_ctrl_add_mapping() [all …]
|
| /drivers/gpu/drm/tegra/ |
| A D | uapi.c | 21 host1x_bo_put(mapping->bo); in tegra_drm_mapping_release() 23 kfree(mapping); in tegra_drm_mapping_release() 206 mapping = kzalloc(sizeof(*mapping), GFP_KERNEL); in tegra_drm_ioctl_channel_map() 207 if (!mapping) { in tegra_drm_ioctl_channel_map() 220 if (!mapping->bo) { in tegra_drm_ioctl_channel_map() 243 mapping->map = host1x_bo_pin(mapping_dev, mapping->bo, direction, NULL); in tegra_drm_ioctl_channel_map() 249 mapping->iova = mapping->map->phys; in tegra_drm_ioctl_channel_map() 250 mapping->iova_end = mapping->iova + host1x_to_tegra_bo(mapping->bo)->gem.size; in tegra_drm_ioctl_channel_map() 266 kfree(mapping); in tegra_drm_ioctl_channel_map() 287 mapping = xa_erase(&context->mappings, args->mapping); in tegra_drm_ioctl_channel_unmap() [all …]
|
| A D | submit.c | 148 struct tegra_drm_mapping *mapping; in tegra_drm_mapping_get() local 152 mapping = xa_load(&context->mappings, id); in tegra_drm_mapping_get() 153 if (mapping) in tegra_drm_mapping_get() 154 kref_get(&mapping->ref); in tegra_drm_mapping_get() 158 return mapping; in tegra_drm_mapping_get() 282 struct tegra_drm_mapping *mapping; in submit_process_bufs() local 290 mapping = tegra_drm_mapping_get(context, buf->mapping); in submit_process_bufs() 291 if (!mapping) { in submit_process_bufs() 299 tegra_drm_mapping_put(mapping); in submit_process_bufs() 303 mappings[i].mapping = mapping; in submit_process_bufs() [all …]
|
| /drivers/gpu/drm/panfrost/ |
| A D | panfrost_gem.c | 100 mapping = iter; in panfrost_gem_mapping_get() 106 return mapping; in panfrost_gem_mapping_get() 112 if (mapping->active) in panfrost_gem_teardown_mapping() 113 panfrost_mmu_unmap(mapping); in panfrost_gem_teardown_mapping() 130 kfree(mapping); in panfrost_gem_mapping_release() 135 if (!mapping) in panfrost_gem_mapping_put() 159 mapping = kzalloc(sizeof(*mapping), GFP_KERNEL); in panfrost_gem_open() 160 if (!mapping) in panfrost_gem_open() 166 mapping->obj = bo; in panfrost_gem_open() 181 ret = drm_mm_insert_node_generic(&mapping->mmu->mm, &mapping->mmnode, in panfrost_gem_open() [all …]
|
| A D | panfrost_perfcnt.c | 30 struct panfrost_gem_mapping *mapping; member 54 gpuva = pfdev->perfcnt->mapping->mmnode.start << PAGE_SHIFT; in panfrost_perfcnt_dump_locked() 102 perfcnt->mapping = panfrost_gem_mapping_get(to_panfrost_bo(&bo->base), in panfrost_perfcnt_enable_locked() 104 if (!perfcnt->mapping) { in panfrost_perfcnt_enable_locked() 135 as = panfrost_mmu_as_get(pfdev, perfcnt->mapping->mmu); in panfrost_perfcnt_enable_locked() 172 panfrost_gem_mapping_put(perfcnt->mapping); in panfrost_perfcnt_enable_locked() 200 drm_gem_vunmap(&perfcnt->mapping->obj->base.base, &map); in panfrost_perfcnt_disable_locked() 202 panfrost_gem_close(&perfcnt->mapping->obj->base.base, file_priv); in panfrost_perfcnt_disable_locked() 203 panfrost_mmu_as_put(pfdev, perfcnt->mapping->mmu); in panfrost_perfcnt_disable_locked() 204 panfrost_gem_mapping_put(perfcnt->mapping); in panfrost_perfcnt_disable_locked() [all …]
|
| A D | panfrost_mmu.c | 435 if (WARN_ON(mapping->active)) in panfrost_mmu_map() 445 mmu_map_sg(pfdev, mapping->mmu, mapping->mmnode.start << PAGE_SHIFT, in panfrost_mmu_map() 447 mapping->active = true; in panfrost_mmu_map() 462 if (WARN_ON(!mapping->active)) in panfrost_mmu_unmap() 466 mapping->mmu->as, iova, len); in panfrost_mmu_unmap() 484 mapping->active = false; in panfrost_mmu_unmap() 531 kref_get(&mapping->refcount); in addr_to_mapping() 539 return mapping; in addr_to_mapping() 550 struct address_space *mapping; in panfrost_mmu_map_fault_addr() local 604 mapping = bo->base.base.filp->f_mapping; in panfrost_mmu_map_fault_addr() [all …]
|
| /drivers/gpu/drm/etnaviv/ |
| A D | etnaviv_mmu.c | 129 WARN_ON(mapping->use); in etnaviv_iommu_reap_mapping() 133 mapping->context = NULL; in etnaviv_iommu_reap_mapping() 283 mapping->iova = iova; in etnaviv_iommu_map_gem() 291 node = &mapping->vram_node; in etnaviv_iommu_map_gem() 300 mapping->iova = node->start; in etnaviv_iommu_map_gem() 320 WARN_ON(mapping->use); in etnaviv_iommu_unmap_gem() 325 if (!mapping->context) { in etnaviv_iommu_unmap_gem() 402 if (mapping->use > 0) { in etnaviv_iommu_get_suballoc_va() 403 mapping->use++; in etnaviv_iommu_get_suballoc_va() 439 mapping->use = 1; in etnaviv_iommu_get_suballoc_va() [all …]
|
| A D | etnaviv_gem.c | 222 return mapping; in etnaviv_gem_get_vram_mapping() 234 mapping->use -= 1; in etnaviv_gem_mapping_unreference() 251 if (mapping) { in etnaviv_gem_mapping_get() 270 if (mapping) in etnaviv_gem_mapping_get() 289 if (!mapping) { in etnaviv_gem_mapping_get() 290 mapping = kzalloc(sizeof(*mapping), GFP_KERNEL); in etnaviv_gem_mapping_get() 291 if (!mapping) { in etnaviv_gem_mapping_get() 302 mapping->use = 1; in etnaviv_gem_mapping_get() 308 kfree(mapping); in etnaviv_gem_mapping_get() 320 return mapping; in etnaviv_gem_mapping_get() [all …]
|
| A D | etnaviv_cmdbuf.c | 63 struct etnaviv_vram_mapping *mapping, in etnaviv_cmdbuf_suballoc_map() argument 66 return etnaviv_iommu_get_suballoc_va(context, mapping, memory_base, in etnaviv_cmdbuf_suballoc_map() 71 struct etnaviv_vram_mapping *mapping) in etnaviv_cmdbuf_suballoc_unmap() argument 73 etnaviv_iommu_put_suballoc_va(context, mapping); in etnaviv_cmdbuf_suballoc_unmap() 136 struct etnaviv_vram_mapping *mapping) in etnaviv_cmdbuf_get_va() argument 138 return mapping->iova + buf->suballoc_offset; in etnaviv_cmdbuf_get_va()
|
| /drivers/gpu/drm/exynos/ |
| A D | exynos_drm_dma.c | 66 ret = arm_iommu_attach_device(subdrv_dev, priv->mapping); in drm_iommu_attach_device() 68 ret = iommu_attach_device(priv->mapping, subdrv_dev); in drm_iommu_attach_device() 92 iommu_detach_device(priv->mapping, subdrv_dev); in drm_iommu_detach_device() 109 if (!priv->mapping) { in exynos_drm_register_dma() 110 void *mapping = NULL; in exynos_drm_register_dma() local 113 mapping = arm_iommu_create_mapping(dev, in exynos_drm_register_dma() 116 mapping = iommu_get_domain_for_dev(priv->dma_dev); in exynos_drm_register_dma() 118 if (!mapping) in exynos_drm_register_dma() 120 priv->mapping = mapping; in exynos_drm_register_dma() 140 arm_iommu_release_mapping(priv->mapping); in exynos_drm_cleanup_dma() [all …]
|
| /drivers/sh/clk/ |
| A D | core.c | 340 struct clk_mapping *mapping = clk->mapping; in clk_establish_mapping() local 345 if (!mapping) { in clk_establish_mapping() 361 mapping = clkp->mapping; in clk_establish_mapping() 362 BUG_ON(!mapping); in clk_establish_mapping() 368 if (!mapping->base && mapping->phys) { in clk_establish_mapping() 371 mapping->base = ioremap(mapping->phys, mapping->len); in clk_establish_mapping() 378 kref_get(&mapping->ref); in clk_establish_mapping() 381 clk->mapping = mapping; in clk_establish_mapping() 394 iounmap(mapping->base); in clk_destroy_mapping() 399 struct clk_mapping *mapping = clk->mapping; in clk_teardown_mapping() local [all …]
|
| /drivers/net/wireless/marvell/mwifiex/ |
| A D | util.h | 57 struct mwifiex_dma_mapping *mapping) in mwifiex_store_mapping() argument 61 memcpy(&cb->dma_mapping, mapping, sizeof(*mapping)); in mwifiex_store_mapping() 65 struct mwifiex_dma_mapping *mapping) in mwifiex_get_mapping() argument 69 memcpy(mapping, &cb->dma_mapping, sizeof(*mapping)); in mwifiex_get_mapping() 74 struct mwifiex_dma_mapping mapping; in MWIFIEX_SKB_DMA_ADDR() local 76 mwifiex_get_mapping(skb, &mapping); in MWIFIEX_SKB_DMA_ADDR() 78 return mapping.addr; in MWIFIEX_SKB_DMA_ADDR()
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_vm.c | 1491 kfree(mapping); in amdgpu_vm_free_mapping() 1556 &sync, mapping->start, mapping->last, in amdgpu_vm_clear_freed() 1852 mapping = kmalloc(sizeof(*mapping), GFP_KERNEL); in amdgpu_vm_bo_map() 1853 if (!mapping) in amdgpu_vm_bo_map() 1899 mapping = kmalloc(sizeof(*mapping), GFP_KERNEL); in amdgpu_vm_bo_replace_map() 1900 if (!mapping) in amdgpu_vm_bo_replace_map() 1905 kfree(mapping); in amdgpu_vm_bo_replace_map() 2130 for (mapping = amdgpu_vm_it_iter_first(&vm->va, 0, U64_MAX); mapping; in amdgpu_vm_bo_trace_cs() 2131 mapping = amdgpu_vm_it_iter_next(mapping, 0, U64_MAX)) { in amdgpu_vm_bo_trace_cs() 2132 if (mapping->bo_va && mapping->bo_va->base.bo) { in amdgpu_vm_bo_trace_cs() [all …]
|
| A D | amdgpu_trace.h | 244 TP_ARGS(bo_va, mapping), 255 __entry->start = mapping->start; 256 __entry->last = mapping->last; 258 __entry->flags = mapping->flags; 268 TP_ARGS(bo_va, mapping), 279 __entry->start = mapping->start; 280 __entry->last = mapping->last; 291 TP_ARGS(mapping), 309 TP_ARGS(mapping) 314 TP_ARGS(mapping) [all …]
|
| /drivers/net/ethernet/broadcom/bnxt/ |
| A D | bnxt_xdp.c | 28 dma_addr_t mapping, u32 len, in bnxt_xmit_bd() argument 56 txbd->tx_bd_haddr = cpu_to_le64(mapping); in bnxt_xmit_bd() 109 dma_addr_t mapping, u32 len, in __bnxt_xmit_xdp_redirect() argument 117 dma_unmap_addr_set(tx_buf, mapping, mapping); in __bnxt_xmit_xdp_redirect() 142 dma_unmap_addr(tx_buf, mapping), in bnxt_tx_int_xdp() 189 dma_addr_t mapping; in bnxt_xdp_buff_init() local 196 mapping = rx_buf->mapping - bp->rx_dma_offset; in bnxt_xdp_buff_init() 232 dma_addr_t mapping; in bnxt_rx_xdp() local 270 mapping = rx_buf->mapping - bp->rx_dma_offset; in bnxt_rx_xdp() 338 dma_addr_t mapping; in bnxt_xdp_xmit() local [all …]
|
| /drivers/gpu/drm/i915/gem/ |
| A D | i915_gem_shmem.c | 40 mapping_clear_unevictable(mapping); in shmem_sg_free_table() 65 struct address_space *mapping, in shmem_sg_alloc_table() argument 95 mapping_set_unevictable(mapping); in shmem_sg_alloc_table() 112 folio = shmem_read_folio_gfp(mapping, i, gfp); in shmem_sg_alloc_table() 134 gfp = mapping_gfp_mask(mapping); in shmem_sg_alloc_table() 184 shmem_sg_free_table(st, mapping, false, false); in shmem_sg_alloc_table() 186 mapping_clear_unevictable(mapping); in shmem_sg_alloc_table() 264 shmem_sg_free_table(st, mapping, false, false); in shmem_get_pages() 530 struct address_space *mapping; in shmem_object_init() local 546 mapping = obj->base.filp->f_mapping; in shmem_object_init() [all …]
|
| /drivers/pci/ |
| A D | devres.c | 540 void __iomem *mapping; in pcim_iomap() local 552 if (!mapping) in pcim_iomap() 554 res->baseaddr = mapping; in pcim_iomap() 560 return mapping; in pcim_iomap() 683 void __iomem *mapping; in pcim_iomap_regions() local 690 if (IS_ERR(mapping)) { in pcim_iomap_regions() 691 ret = PTR_ERR(mapping); in pcim_iomap_regions() 837 void __iomem *mapping; in pcim_iomap_range() local 848 if (!mapping) { in pcim_iomap_range() 854 res->baseaddr = mapping; in pcim_iomap_range() [all …]
|
| /drivers/gpu/host1x/ |
| A D | bus.c | 889 if (mapping->bo == bo && mapping->direction == dir) { in host1x_bo_pin() 890 kref_get(&mapping->ref); in host1x_bo_pin() 897 if (IS_ERR(mapping)) in host1x_bo_pin() 900 spin_lock(&mapping->bo->lock); in host1x_bo_pin() 906 mapping->cache = cache; in host1x_bo_pin() 911 kref_get(&mapping->ref); in host1x_bo_pin() 918 return mapping; in host1x_bo_pin() 930 if (mapping->cache) in __host1x_bo_unpin() 931 list_del(&mapping->entry); in __host1x_bo_unpin() 934 list_del(&mapping->list); in __host1x_bo_unpin() [all …]
|
| /drivers/net/ethernet/sunplus/ |
| A D | spl2sw_desc.c | 23 rx_desc[j].addr1 = rx_skbinfo[j].mapping; in spl2sw_rx_descs_flush() 47 if (comm->tx_temp_skb_info[i].mapping) { in spl2sw_tx_descs_clean() 48 dma_unmap_single(&comm->pdev->dev, comm->tx_temp_skb_info[i].mapping, in spl2sw_tx_descs_clean() 50 comm->tx_temp_skb_info[i].mapping = 0; in spl2sw_tx_descs_clean() 79 dma_unmap_single(&comm->pdev->dev, rx_skbinfo[j].mapping, in spl2sw_rx_descs_clean() 83 rx_skbinfo[j].mapping = 0; in spl2sw_rx_descs_clean() 128 u32 mapping; in spl2sw_rx_descs_init() local 145 mapping = dma_map_single(&comm->pdev->dev, skb->data, in spl2sw_rx_descs_init() 148 if (dma_mapping_error(&comm->pdev->dev, mapping)) in spl2sw_rx_descs_init() 151 rx_skbinfo[j].mapping = mapping; in spl2sw_rx_descs_init() [all …]
|
| /drivers/net/ethernet/dec/tulip/ |
| A D | interrupt.c | 70 dma_addr_t mapping; in tulip_refill_rx() local 85 tp->rx_buffers[entry].mapping = mapping; in tulip_refill_rx() 214 tp->rx_buffers[entry].mapping, in tulip_poll() 246 tp->rx_buffers[entry].mapping, in tulip_poll() 444 tp->rx_buffers[entry].mapping, in tulip_rx() 476 tp->rx_buffers[entry].mapping, in tulip_rx() 480 tp->rx_buffers[entry].mapping = 0; in tulip_rx() 606 if (tp->tx_buffers[entry].mapping) in tulip_interrupt() 608 tp->tx_buffers[entry].mapping, in tulip_interrupt() 640 tp->tx_buffers[entry].mapping, in tulip_interrupt() [all …]
|
| /drivers/nvdimm/ |
| A D | region_devs.c | 714 nd_mapping = &nd_region->mapping[n]; in mappingN() 728 static DEVICE_ATTR_RO(mapping##idx) 1010 struct nd_mapping_desc *mapping = &ndr_desc->mapping[i]; in nd_region_create() local 1011 struct nvdimm *nvdimm = mapping->nvdimm; in nd_region_create() 1013 if ((mapping->start | mapping->size) % PAGE_SIZE) { in nd_region_create() 1054 struct nd_mapping_desc *mapping = &ndr_desc->mapping[i]; in nd_region_create() local 1055 struct nvdimm *nvdimm = mapping->nvdimm; in nd_region_create() 1057 nd_region->mapping[i].nvdimm = nvdimm; in nd_region_create() 1058 nd_region->mapping[i].start = mapping->start; in nd_region_create() 1059 nd_region->mapping[i].size = mapping->size; in nd_region_create() [all …]
|
| /drivers/net/xen-netback/ |
| A D | hash.c | 328 memset(vif->hash.mapping[vif->hash.mapping_sel], 0, in xenvif_set_hash_mapping_size() 337 u32 *mapping = vif->hash.mapping[!vif->hash.mapping_sel]; in xenvif_set_hash_mapping() local 343 .len = len * sizeof(*mapping), in xenvif_set_hash_mapping() 348 len > XEN_PAGE_SIZE / sizeof(*mapping)) in xenvif_set_hash_mapping() 351 copy_op[0].dest.u.gmfn = virt_to_gfn(mapping + off); in xenvif_set_hash_mapping() 352 copy_op[0].dest.offset = xen_offset_in_page(mapping + off); in xenvif_set_hash_mapping() 363 memcpy(mapping, vif->hash.mapping[vif->hash.mapping_sel], in xenvif_set_hash_mapping() 364 vif->hash.size * sizeof(*mapping)); in xenvif_set_hash_mapping() 375 if (mapping[off++] >= vif->num_queues) in xenvif_set_hash_mapping() 431 const u32 *mapping = vif->hash.mapping[vif->hash.mapping_sel]; in xenvif_dump_hash_info() local [all …]
|
| /drivers/net/wwan/iosm/ |
| A D | iosm_ipc_pcie.c | 455 size_t size, dma_addr_t *mapping, int direction) in ipc_pcie_addr_map() argument 458 *mapping = dma_map_single(&ipc_pcie->pci->dev, data, size, in ipc_pcie_addr_map() 460 if (dma_mapping_error(&ipc_pcie->pci->dev, *mapping)) { in ipc_pcie_addr_map() 469 dma_addr_t mapping, int direction) in ipc_pcie_addr_unmap() argument 471 if (!mapping) in ipc_pcie_addr_unmap() 474 dma_unmap_single(&ipc_pcie->pci->dev, mapping, size, direction); in ipc_pcie_addr_unmap() 492 IPC_CB(skb)->mapping = 0; in ipc_pcie_alloc_local_skb() 498 gfp_t flags, dma_addr_t *mapping, in ipc_pcie_alloc_skb() argument 517 IPC_CB(skb)->mapping = *mapping; in ipc_pcie_alloc_skb() 529 ipc_pcie_addr_unmap(ipc_pcie, IPC_CB(skb)->len, IPC_CB(skb)->mapping, in ipc_pcie_kfree_skb() [all …]
|
| /drivers/gpu/drm/vmwgfx/ |
| A D | vmwgfx_page_dirty.c | 72 struct address_space *mapping = vbo->tbo.bdev->dev_mapping; in vmw_bo_dirty_scan_pagetable() local 76 (mapping, in vmw_bo_dirty_scan_pagetable() 88 wp_shared_mapping_range(mapping, in vmw_bo_dirty_scan_pagetable() 90 clean_record_shared_mapping_range(mapping, in vmw_bo_dirty_scan_pagetable() 110 struct address_space *mapping = vbo->tbo.bdev->dev_mapping; in vmw_bo_dirty_scan_mkwrite() local 175 struct address_space *mapping = vbo->tbo.bdev->dev_mapping; in vmw_bo_dirty_pre_unmap() local 180 wp_shared_mapping_range(mapping, start + offset, end - start); in vmw_bo_dirty_pre_unmap() 181 clean_record_shared_mapping_range(mapping, start + offset, in vmw_bo_dirty_pre_unmap() 199 struct address_space *mapping = vbo->tbo.bdev->dev_mapping; in vmw_bo_dirty_unmap() local 242 struct address_space *mapping = vbo->tbo.bdev->dev_mapping; in vmw_bo_dirty_add() local [all …]
|
| /drivers/infiniband/ulp/ipoib/ |
| A D | ipoib_ib.c | 95 u64 mapping[IPOIB_UD_RX_SG]) in ipoib_ud_dma_unmap_rx() 97 ib_dma_unmap_single(priv->ca, mapping[0], in ipoib_ud_dma_unmap_rx() 128 u64 *mapping; in ipoib_alloc_rx_skb() local 142 mapping = priv->rx_ring[id].mapping; in ipoib_alloc_rx_skb() 179 u64 mapping[IPOIB_UD_RX_SG]; in ipoib_ib_handle_rx_wc() local 205 memcpy(mapping, priv->rx_ring[wr_id].mapping, in ipoib_ib_handle_rx_wc() 220 ipoib_ud_dma_unmap_rx(priv, mapping); in ipoib_ib_handle_rx_wc() 279 u64 *mapping = tx_req->mapping; in ipoib_dma_map_tx() local 295 mapping[i + off] = ib_dma_map_page(ca, in ipoib_dma_map_tx() 322 u64 *mapping = tx_req->mapping; in ipoib_dma_unmap_tx() local [all …]
|