Lines Matching refs:hmm_vma_walk
31 struct hmm_vma_walk { struct
66 struct hmm_vma_walk *hmm_vma_walk = walk->private; in hmm_vma_fault() local
71 hmm_vma_walk->last = addr; in hmm_vma_fault()
86 static unsigned int hmm_pte_need_fault(const struct hmm_vma_walk *hmm_vma_walk, in hmm_pte_need_fault() argument
90 struct hmm_range *range = hmm_vma_walk->range; in hmm_pte_need_fault()
121 hmm_range_need_fault(const struct hmm_vma_walk *hmm_vma_walk, in hmm_range_need_fault() argument
125 struct hmm_range *range = hmm_vma_walk->range; in hmm_range_need_fault()
139 required_fault |= hmm_pte_need_fault(hmm_vma_walk, hmm_pfns[i], in hmm_range_need_fault()
150 struct hmm_vma_walk *hmm_vma_walk = walk->private; in hmm_vma_walk_hole() local
151 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_walk_hole()
160 hmm_range_need_fault(hmm_vma_walk, hmm_pfns, npages, 0); in hmm_vma_walk_hole()
191 struct hmm_vma_walk *hmm_vma_walk = walk->private; in hmm_vma_handle_pmd() local
192 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_handle_pmd()
200 hmm_range_need_fault(hmm_vma_walk, hmm_pfns, npages, cpu_flags); in hmm_vma_handle_pmd()
227 struct hmm_vma_walk *hmm_vma_walk = walk->private; in hmm_vma_handle_pte() local
228 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_handle_pte()
236 hmm_pte_need_fault(hmm_vma_walk, pfn_req_flags, 0); in hmm_vma_handle_pte()
261 hmm_pte_need_fault(hmm_vma_walk, pfn_req_flags, 0); in hmm_vma_handle_pte()
278 hmm_vma_walk->last = addr; in hmm_vma_handle_pte()
290 hmm_pte_need_fault(hmm_vma_walk, pfn_req_flags, cpu_flags); in hmm_vma_handle_pte()
303 if (hmm_pte_need_fault(hmm_vma_walk, pfn_req_flags, 0)) { in hmm_vma_handle_pte()
325 struct hmm_vma_walk *hmm_vma_walk = walk->private; in hmm_vma_walk_pmd() local
326 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_walk_pmd()
340 if (hmm_range_need_fault(hmm_vma_walk, hmm_pfns, npages, 0)) { in hmm_vma_walk_pmd()
341 hmm_vma_walk->last = addr; in hmm_vma_walk_pmd()
349 if (hmm_range_need_fault(hmm_vma_walk, hmm_pfns, npages, 0)) in hmm_vma_walk_pmd()
378 if (hmm_range_need_fault(hmm_vma_walk, hmm_pfns, npages, 0)) in hmm_vma_walk_pmd()
412 struct hmm_vma_walk *hmm_vma_walk = walk->private; in hmm_vma_walk_pud() local
413 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_walk_pud()
446 required_fault = hmm_range_need_fault(hmm_vma_walk, hmm_pfns, in hmm_vma_walk_pud()
476 struct hmm_vma_walk *hmm_vma_walk = walk->private; in hmm_vma_walk_hugetlb_entry() local
477 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_walk_hugetlb_entry()
493 hmm_pte_need_fault(hmm_vma_walk, pfn_req_flags, cpu_flags); in hmm_vma_walk_hugetlb_entry()
526 struct hmm_vma_walk *hmm_vma_walk = walk->private; in hmm_vma_walk_test() local
527 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_walk_test()
545 if (hmm_range_need_fault(hmm_vma_walk, in hmm_vma_walk_test()
586 struct hmm_vma_walk hmm_vma_walk = { in hmm_range_fault() local
600 ret = walk_page_range(mm, hmm_vma_walk.last, range->end, in hmm_range_fault()
601 &hmm_walk_ops, &hmm_vma_walk); in hmm_range_fault()