| /drivers/gpu/drm/xe/ |
| A D | xe_bo_evict.c | 24 spin_lock(&xe->pinned.lock); in xe_bo_apply_to_pinned() 32 spin_unlock(&xe->pinned.lock); in xe_bo_apply_to_pinned() 36 spin_lock(&xe->pinned.lock); in xe_bo_apply_to_pinned() 44 spin_unlock(&xe->pinned.lock); in xe_bo_apply_to_pinned() 47 spin_lock(&xe->pinned.lock); in xe_bo_apply_to_pinned() 50 spin_unlock(&xe->pinned.lock); in xe_bo_apply_to_pinned() 94 &xe->pinned.late.kernel_bo_present, in xe_bo_notifier_unprepare_all_pinned() 179 &xe->pinned.early.evicted, in xe_bo_evict_all() 262 &xe->pinned.late.external, in xe_bo_restore_late() 278 &xe->pinned.late.external, in xe_bo_pci_dev_remove_pinned() [all …]
|
| A D | xe_bo.c | 2292 spin_lock(&xe->pinned.lock); in xe_bo_pin_external() 2293 list_add_tail(&bo->pinned_link, &xe->pinned.late.external); in xe_bo_pin_external() 2294 spin_unlock(&xe->pinned.lock); in xe_bo_pin_external() 2337 spin_lock(&xe->pinned.lock); in xe_bo_pin() 2339 list_add_tail(&bo->pinned_link, &xe->pinned.late.kernel_bo_present); in xe_bo_pin() 2341 list_add_tail(&bo->pinned_link, &xe->pinned.early.kernel_bo_present); in xe_bo_pin() 2342 spin_unlock(&xe->pinned.lock); in xe_bo_pin() 2376 spin_lock(&xe->pinned.lock); in xe_bo_unpin_external() 2379 spin_unlock(&xe->pinned.lock); in xe_bo_unpin_external() 2401 spin_lock(&xe->pinned.lock); in xe_bo_unpin() [all …]
|
| /drivers/infiniband/core/ |
| A D | umem.c | 169 int pinned, ret; in ib_umem_get() local 229 pinned = pin_user_pages_fast(cur_base, in ib_umem_get() 234 if (pinned < 0) { in ib_umem_get() 235 ret = pinned; in ib_umem_get() 239 cur_base += pinned * PAGE_SIZE; in ib_umem_get() 240 npages -= pinned; in ib_umem_get() 242 &umem->sgt_append, page_list, pinned, 0, in ib_umem_get() 243 pinned << PAGE_SHIFT, ib_dma_max_seg_size(device), in ib_umem_get() 246 unpin_user_pages_dirty_lock(page_list, pinned, 0); in ib_umem_get()
|
| A D | umem_dmabuf.c | 220 umem_dmabuf->pinned = 1; in ib_umem_dmabuf_get_pinned_with_dma_device() 256 if (umem_dmabuf->pinned) { in ib_umem_dmabuf_revoke() 258 umem_dmabuf->pinned = 0; in ib_umem_dmabuf_revoke()
|
| /drivers/infiniband/hw/hfi1/ |
| A D | pin_system.c | 119 int pinned, cleared; in pin_system_pages() local 138 pinned = hfi1_acquire_user_pages(current->mm, start_address, npages, 0, in pin_system_pages() 141 if (pinned < 0) { in pin_system_pages() 143 SDMA_DBG(req, "pinned %d", pinned); in pin_system_pages() 144 return pinned; in pin_system_pages() 146 if (pinned != npages) { in pin_system_pages() 147 unpin_vector_pages(current->mm, pages, node->npages, pinned); in pin_system_pages() 148 SDMA_DBG(req, "npages %u pinned %d", npages, pinned); in pin_system_pages() 155 atomic_add(pinned, &pq->n_locked); in pin_system_pages() 156 SDMA_DBG(req, "done. pinned %d", pinned); in pin_system_pages()
|
| A D | user_exp_rcv.c | 161 int pinned; in pin_rcv_pages() local 188 if (pinned <= 0) { in pin_rcv_pages() 190 return pinned; in pin_rcv_pages() 193 fd->tid_n_pinned += pinned; in pin_rcv_pages() 194 return pinned; in pin_rcv_pages() 249 int ret = 0, need_group = 0, pinned; in hfi1_user_exp_rcv_setup() local 288 pinned = pin_rcv_pages(fd, tidbuf); in hfi1_user_exp_rcv_setup() 289 if (pinned <= 0) { in hfi1_user_exp_rcv_setup() 290 ret = (pinned < 0) ? pinned : -ENOSPC; in hfi1_user_exp_rcv_setup() 464 pinned = 0; /* nothing left to unpin */ in hfi1_user_exp_rcv_setup() [all …]
|
| /drivers/fpga/ |
| A D | dfl-afu-dma-region.c | 39 int ret, pinned; in afu_dma_pin_pages() local 51 pinned = pin_user_pages_fast(region->user_addr, npages, FOLL_WRITE, in afu_dma_pin_pages() 53 if (pinned < 0) { in afu_dma_pin_pages() 54 ret = pinned; in afu_dma_pin_pages() 56 } else if (pinned != npages) { in afu_dma_pin_pages() 61 dev_dbg(dev, "%d pages pinned\n", pinned); in afu_dma_pin_pages() 66 unpin_user_pages(region->pages, pinned); in afu_dma_pin_pages()
|
| /drivers/gpu/drm/i915/gem/ |
| A D | i915_gem_userptr.c | 238 int pinned, ret; in i915_gem_object_userptr_submit_init() local 266 pinned = 0; in i915_gem_object_userptr_submit_init() 267 while (pinned < num_pages) { in i915_gem_object_userptr_submit_init() 268 ret = pin_user_pages_fast(obj->userptr.ptr + pinned * PAGE_SIZE, in i915_gem_object_userptr_submit_init() 269 num_pages - pinned, gup_flags, in i915_gem_object_userptr_submit_init() 270 &pvec[pinned]); in i915_gem_object_userptr_submit_init() 274 pinned += ret; in i915_gem_object_userptr_submit_init() 302 unpin_user_pages(pvec, pinned); in i915_gem_object_userptr_submit_init()
|
| A D | i915_gem_pages.c | 504 bool pinned; in i915_gem_object_pin_map() local 517 pinned = !(type & I915_MAP_OVERRIDE); in i915_gem_object_pin_map() 531 pinned = false; in i915_gem_object_pin_map() 565 if (pinned) { in i915_gem_object_pin_map()
|
| /drivers/virt/acrn/ |
| A D | mm.c | 164 int ret, pinned; in acrn_vm_ram_map() local 242 pinned = pin_user_pages_fast(memmap->vma_base, in acrn_vm_ram_map() 245 if (pinned < 0) { in acrn_vm_ram_map() 246 ret = pinned; in acrn_vm_ram_map() 248 } else if (pinned != nr_pages) { in acrn_vm_ram_map() 343 for (i = 0; i < pinned; i++) in acrn_vm_ram_map()
|
| /drivers/gpu/drm/i915/gt/ |
| A D | intel_gt_buffer_pool.c | 107 if (node->pinned) { in pool_retire() 112 node->pinned = false; in pool_retire() 129 if (node->pinned) in intel_gt_buffer_pool_mark_used() 135 node->pinned = true; in intel_gt_buffer_pool_mark_used() 153 node->pinned = false; in node_create()
|
| A D | intel_gt_buffer_pool_types.h | 33 u32 pinned; member
|
| A D | intel_gt_buffer_pool.h | 28 GEM_WARN_ON(!node->pinned); in intel_gt_buffer_pool_mark_active()
|
| /drivers/s390/cio/ |
| A D | vfio_ccw_cp.c | 133 int pinned = 0, npage = 1; in page_array_pin() local 136 while (pinned < pa->pa_nr) { in page_array_pin() 137 dma_addr_t *first = &pa->pa_iova[pinned]; in page_array_pin() 140 if (pinned + npage < pa->pa_nr && in page_array_pin() 149 &pa->pa_page[pinned]); in page_array_pin() 153 pinned += ret; in page_array_pin() 157 pinned += npage; in page_array_pin() 164 page_array_unpin(pa, vdev, pinned, unaligned); in page_array_pin()
|
| /drivers/gpu/drm/etnaviv/ |
| A D | etnaviv_gem.c | 669 int ret, pinned = 0, npages = etnaviv_obj->base.size >> PAGE_SHIFT; in etnaviv_gem_userptr_get_pages() local 685 unsigned num_pages = npages - pinned; in etnaviv_gem_userptr_get_pages() 686 uint64_t ptr = userptr->ptr + pinned * PAGE_SIZE; in etnaviv_gem_userptr_get_pages() 687 struct page **pages = pvec + pinned; in etnaviv_gem_userptr_get_pages() 691 unpin_user_pages(pvec, pinned); in etnaviv_gem_userptr_get_pages() 696 pinned += ret; in etnaviv_gem_userptr_get_pages() 698 } while (pinned < npages); in etnaviv_gem_userptr_get_pages()
|
| /drivers/vhost/ |
| A D | vdpa.c | 924 unsigned long pfn, pinned; in vhost_vdpa_pa_unmap() local 927 pinned = PFN_DOWN(map->size); in vhost_vdpa_pa_unmap() 929 pinned > 0; pfn++, pinned--) { in vhost_vdpa_pa_unmap() 1104 long pinned; in vhost_vdpa_pa_map() local 1137 if (sz2pin != pinned) { in vhost_vdpa_pa_map() 1138 if (pinned < 0) { in vhost_vdpa_pa_map() 1139 ret = pinned; in vhost_vdpa_pa_map() 1151 for (i = 0; i < pinned; i++) { in vhost_vdpa_pa_map() 1171 pinned - i); in vhost_vdpa_pa_map() 1183 cur_base += PFN_PHYS(pinned); in vhost_vdpa_pa_map() [all …]
|
| /drivers/gpu/drm/radeon/ |
| A D | radeon_ttm.c | 327 unsigned pinned = 0; in radeon_ttm_tt_pin_userptr() local 348 unsigned num_pages = ttm->num_pages - pinned; in radeon_ttm_tt_pin_userptr() 349 uint64_t userptr = gtt->userptr + pinned * PAGE_SIZE; in radeon_ttm_tt_pin_userptr() 350 struct page **pages = ttm->pages + pinned; in radeon_ttm_tt_pin_userptr() 357 pinned += r; in radeon_ttm_tt_pin_userptr() 359 } while (pinned < ttm->num_pages); in radeon_ttm_tt_pin_userptr() 380 release_pages(ttm->pages, pinned); in radeon_ttm_tt_pin_userptr()
|
| /drivers/gpu/drm/qxl/ |
| A D | qxl_object.h | 58 bool kernel, bool pinned, u32 domain,
|
| A D | qxl_object.c | 105 bool kernel, bool pinned, u32 domain, u32 priority, in qxl_bo_create() argument 149 if (pinned) in qxl_bo_create()
|
| /drivers/gpu/drm/i915/selftests/ |
| A D | i915_vma.c | 113 int pinned; in create_vmas() local 116 for (pinned = 0; pinned <= 1; pinned++) { in create_vmas() 133 if (!pinned) { in create_vmas()
|
| /drivers/vdpa/vdpa_user/ |
| A D | vduse_dev.c | 1067 long pinned = 0; in vduse_dev_reg_umem() local 1095 pinned = pin_user_pages(uaddr, npages, FOLL_LONGTERM | FOLL_WRITE, in vduse_dev_reg_umem() 1097 if (pinned != npages) { in vduse_dev_reg_umem() 1098 ret = pinned < 0 ? pinned : -ENOMEM; in vduse_dev_reg_umem() 1103 page_list, pinned); in vduse_dev_reg_umem() 1110 umem->npages = pinned; in vduse_dev_reg_umem() 1117 if (ret && pinned > 0) in vduse_dev_reg_umem() 1118 unpin_user_pages(page_list, pinned); in vduse_dev_reg_umem()
|
| /drivers/accel/amdxdna/ |
| A D | amdxdna_gem.h | 38 bool pinned; member
|
| /drivers/gpu/drm/vmwgfx/ |
| A D | vmwgfx_stdu.c | 949 WARN_ON(!vps->pinned); in vmw_stdu_primary_plane_cleanup_fb() 987 WARN_ON(vps->pinned != 0); in vmw_stdu_primary_plane_prepare_fb() 1051 WARN_ON(vps->pinned != 0); in vmw_stdu_primary_plane_prepare_fb() 1073 WARN_ON(vps->pinned != 0); in vmw_stdu_primary_plane_prepare_fb() 1088 vps->pinned++; in vmw_stdu_primary_plane_prepare_fb()
|
| /drivers/xen/ |
| A D | privcmd.c | 595 struct page *pages[], unsigned int nr_pages, unsigned int *pinned) in lock_pages() argument 615 *pinned += page_count; in lock_pages() 641 unsigned int pinned = 0; in privcmd_ioctl_dm_op() local 695 rc = lock_pages(kbufs, kdata.num, pages, nr_pages, &pinned); in privcmd_ioctl_dm_op() 709 unlock_pages(pages, pinned); in privcmd_ioctl_dm_op()
|
| /drivers/gpu/drm/i915/ |
| A D | i915_gem.c | 96 u64 pinned; in i915_gem_get_aperture_ioctl() local 101 pinned = ggtt->vm.reserved; in i915_gem_get_aperture_ioctl() 104 pinned += vma->node.size; in i915_gem_get_aperture_ioctl() 109 args->aper_available_size = args->aper_size - pinned; in i915_gem_get_aperture_ioctl()
|