Home
last modified time | relevance | path

Searched refs:hmm_range (Results 1 – 15 of 15) sorted by relevance

/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_hmm.c173 struct hmm_range *hmm_range; in amdgpu_hmm_range_get_pages() local
180 hmm_range = kzalloc(sizeof(*hmm_range), GFP_KERNEL); in amdgpu_hmm_range_get_pages()
181 if (unlikely(!hmm_range)) in amdgpu_hmm_range_get_pages()
195 hmm_range->start = start; in amdgpu_hmm_range_get_pages()
200 hmm_range->end = min(hmm_range->start + MAX_WALK_BYTE, end); in amdgpu_hmm_range_get_pages()
203 hmm_range->start, hmm_range->end); in amdgpu_hmm_range_get_pages()
219 hmm_range->start = hmm_range->end; in amdgpu_hmm_range_get_pages()
233 *phmm_range = hmm_range; in amdgpu_hmm_range_get_pages()
240 kfree(hmm_range); in amdgpu_hmm_range_get_pages()
247 bool amdgpu_hmm_range_get_pages_done(struct hmm_range *hmm_range) in amdgpu_hmm_range_get_pages_done() argument
[all …]
A Damdgpu_ttm.h45 struct hmm_range;
193 struct hmm_range **range);
195 struct hmm_range *range);
197 struct hmm_range *range);
201 struct hmm_range **range) in amdgpu_ttm_tt_get_user_pages()
206 struct hmm_range *range) in amdgpu_ttm_tt_discard_user_pages()
210 struct hmm_range *range) in amdgpu_ttm_tt_get_user_pages_done()
A Damdgpu_hmm.h37 struct hmm_range **phmm_range);
38 bool amdgpu_hmm_range_get_pages_done(struct hmm_range *hmm_range);
A Damdgpu_bo_list.h28 struct hmm_range;
42 struct hmm_range *range;
A Damdgpu_amdkfd.h74 struct hmm_range *range;
A Damdgpu_ttm.c699 struct hmm_range **range) in amdgpu_ttm_tt_get_user_pages()
749 struct hmm_range *range) in amdgpu_ttm_tt_discard_user_pages()
764 struct hmm_range *range) in amdgpu_ttm_tt_get_user_pages_done()
A Damdgpu_gem.c538 struct hmm_range *range; in amdgpu_gem_userptr_ioctl()
A Damdgpu_amdkfd_gpuvm.c1043 struct hmm_range *range; in init_user_pages()
/drivers/gpu/drm/xe/
A Dxe_hmm.c23 struct hmm_range *range, struct rw_semaphore *notifier_sem) in xe_alloc_sg()
92 static int xe_build_sg(struct xe_device *xe, struct hmm_range *range, in xe_build_sg()
225 struct hmm_range hmm_range = { in xe_hmm_userptr_populate_range() local
259 hmm_range.default_flags |= HMM_PFN_REQ_WRITE; in xe_hmm_userptr_populate_range()
266 hmm_range.hmm_pfns = pfns; in xe_hmm_userptr_populate_range()
269 hmm_range.notifier_seq = mmu_interval_read_begin(&userptr->notifier); in xe_hmm_userptr_populate_range()
274 ret = hmm_range_fault(&hmm_range); in xe_hmm_userptr_populate_range()
293 ret = xe_alloc_sg(vm->xe, &userptr->sgt, &hmm_range, &vm->userptr.notifier_lock); in xe_hmm_userptr_populate_range()
301 if (mmu_interval_read_retry(hmm_range.notifier, hmm_range.notifier_seq)) { in xe_hmm_userptr_populate_range()
306 ret = xe_build_sg(vm->xe, &hmm_range, &userptr->sgt, in xe_hmm_userptr_populate_range()
[all …]
/drivers/gpu/drm/
A Ddrm_gpusvm.c727 struct hmm_range hmm_range = { in drm_gpusvm_check_pages() local
747 hmm_range.hmm_pfns = pfns; in drm_gpusvm_check_pages()
750 err = hmm_range_fault(&hmm_range); in drm_gpusvm_check_pages()
755 hmm_range.notifier_seq = in drm_gpusvm_check_pages()
1248 struct hmm_range hmm_range = { in drm_gpusvm_range_get_pages() local
1285 hmm_range.hmm_pfns = pfns; in drm_gpusvm_range_get_pages()
1288 err = hmm_range_fault(&hmm_range); in drm_gpusvm_range_get_pages()
1295 hmm_range.notifier_seq = in drm_gpusvm_range_get_pages()
1483 struct hmm_range hmm_range = { in drm_gpusvm_range_evict() local
1505 hmm_range.hmm_pfns = pfns; in drm_gpusvm_range_evict()
[all …]
/drivers/gpu/drm/nouveau/
A Dnouveau_dmem.h29 struct hmm_range;
A Dnouveau_svm.c538 struct hmm_range *range, in nouveau_hmm_convert_pfn()
662 struct hmm_range range = { in nouveau_range_fault()
/drivers/accel/amdxdna/
A Damdxdna_gem.h14 struct hmm_range range;
/drivers/infiniband/core/
A Dumem_odp.c333 struct hmm_range range = {}; in ib_umem_odp_map_dma_and_lock()
/drivers/gpu/drm/amd/amdkfd/
A Dkfd_svm.c1702 struct hmm_range *hmm_range = NULL; in svm_range_validate_and_map() local
1720 &hmm_range); in svm_range_validate_and_map()
1731 hmm_range->hmm_pfns); in svm_range_validate_and_map()
1742 if (hmm_range && amdgpu_hmm_range_get_pages_done(hmm_range) && !r) { in svm_range_validate_and_map()

Completed in 37 milliseconds