Searched refs:hmm_range (Results 1 – 15 of 15) sorted by relevance
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_hmm.c | 173 struct hmm_range *hmm_range; in amdgpu_hmm_range_get_pages() local 180 hmm_range = kzalloc(sizeof(*hmm_range), GFP_KERNEL); in amdgpu_hmm_range_get_pages() 181 if (unlikely(!hmm_range)) in amdgpu_hmm_range_get_pages() 195 hmm_range->start = start; in amdgpu_hmm_range_get_pages() 200 hmm_range->end = min(hmm_range->start + MAX_WALK_BYTE, end); in amdgpu_hmm_range_get_pages() 203 hmm_range->start, hmm_range->end); in amdgpu_hmm_range_get_pages() 219 hmm_range->start = hmm_range->end; in amdgpu_hmm_range_get_pages() 233 *phmm_range = hmm_range; in amdgpu_hmm_range_get_pages() 240 kfree(hmm_range); in amdgpu_hmm_range_get_pages() 247 bool amdgpu_hmm_range_get_pages_done(struct hmm_range *hmm_range) in amdgpu_hmm_range_get_pages_done() argument [all …]
|
| A D | amdgpu_ttm.h | 45 struct hmm_range; 193 struct hmm_range **range); 195 struct hmm_range *range); 197 struct hmm_range *range); 201 struct hmm_range **range) in amdgpu_ttm_tt_get_user_pages() 206 struct hmm_range *range) in amdgpu_ttm_tt_discard_user_pages() 210 struct hmm_range *range) in amdgpu_ttm_tt_get_user_pages_done()
|
| A D | amdgpu_hmm.h | 37 struct hmm_range **phmm_range); 38 bool amdgpu_hmm_range_get_pages_done(struct hmm_range *hmm_range);
|
| A D | amdgpu_bo_list.h | 28 struct hmm_range; 42 struct hmm_range *range;
|
| A D | amdgpu_amdkfd.h | 74 struct hmm_range *range;
|
| A D | amdgpu_ttm.c | 699 struct hmm_range **range) in amdgpu_ttm_tt_get_user_pages() 749 struct hmm_range *range) in amdgpu_ttm_tt_discard_user_pages() 764 struct hmm_range *range) in amdgpu_ttm_tt_get_user_pages_done()
|
| A D | amdgpu_gem.c | 538 struct hmm_range *range; in amdgpu_gem_userptr_ioctl()
|
| A D | amdgpu_amdkfd_gpuvm.c | 1043 struct hmm_range *range; in init_user_pages()
|
| /drivers/gpu/drm/xe/ |
| A D | xe_hmm.c | 23 struct hmm_range *range, struct rw_semaphore *notifier_sem) in xe_alloc_sg() 92 static int xe_build_sg(struct xe_device *xe, struct hmm_range *range, in xe_build_sg() 225 struct hmm_range hmm_range = { in xe_hmm_userptr_populate_range() local 259 hmm_range.default_flags |= HMM_PFN_REQ_WRITE; in xe_hmm_userptr_populate_range() 266 hmm_range.hmm_pfns = pfns; in xe_hmm_userptr_populate_range() 269 hmm_range.notifier_seq = mmu_interval_read_begin(&userptr->notifier); in xe_hmm_userptr_populate_range() 274 ret = hmm_range_fault(&hmm_range); in xe_hmm_userptr_populate_range() 293 ret = xe_alloc_sg(vm->xe, &userptr->sgt, &hmm_range, &vm->userptr.notifier_lock); in xe_hmm_userptr_populate_range() 301 if (mmu_interval_read_retry(hmm_range.notifier, hmm_range.notifier_seq)) { in xe_hmm_userptr_populate_range() 306 ret = xe_build_sg(vm->xe, &hmm_range, &userptr->sgt, in xe_hmm_userptr_populate_range() [all …]
|
| /drivers/gpu/drm/ |
| A D | drm_gpusvm.c | 727 struct hmm_range hmm_range = { in drm_gpusvm_check_pages() local 747 hmm_range.hmm_pfns = pfns; in drm_gpusvm_check_pages() 750 err = hmm_range_fault(&hmm_range); in drm_gpusvm_check_pages() 755 hmm_range.notifier_seq = in drm_gpusvm_check_pages() 1248 struct hmm_range hmm_range = { in drm_gpusvm_range_get_pages() local 1285 hmm_range.hmm_pfns = pfns; in drm_gpusvm_range_get_pages() 1288 err = hmm_range_fault(&hmm_range); in drm_gpusvm_range_get_pages() 1295 hmm_range.notifier_seq = in drm_gpusvm_range_get_pages() 1483 struct hmm_range hmm_range = { in drm_gpusvm_range_evict() local 1505 hmm_range.hmm_pfns = pfns; in drm_gpusvm_range_evict() [all …]
|
| /drivers/gpu/drm/nouveau/ |
| A D | nouveau_dmem.h | 29 struct hmm_range;
|
| A D | nouveau_svm.c | 538 struct hmm_range *range, in nouveau_hmm_convert_pfn() 662 struct hmm_range range = { in nouveau_range_fault()
|
| /drivers/accel/amdxdna/ |
| A D | amdxdna_gem.h | 14 struct hmm_range range;
|
| /drivers/infiniband/core/ |
| A D | umem_odp.c | 333 struct hmm_range range = {}; in ib_umem_odp_map_dma_and_lock()
|
| /drivers/gpu/drm/amd/amdkfd/ |
| A D | kfd_svm.c | 1702 struct hmm_range *hmm_range = NULL; in svm_range_validate_and_map() local 1720 &hmm_range); in svm_range_validate_and_map() 1731 hmm_range->hmm_pfns); in svm_range_validate_and_map() 1742 if (hmm_range && amdgpu_hmm_range_get_pages_done(hmm_range) && !r) { in svm_range_validate_and_map()
|
Completed in 37 milliseconds