Lines Matching refs:range

34 	struct hmm_range	*range;  member
51 struct hmm_range *range, unsigned long cpu_flags) in hmm_pfns_fill() argument
53 unsigned long i = (addr - range->start) >> PAGE_SHIFT; in hmm_pfns_fill()
56 range->hmm_pfns[i] &= HMM_PFN_INOUT_FLAGS; in hmm_pfns_fill()
57 range->hmm_pfns[i] |= cpu_flags; in hmm_pfns_fill()
100 struct hmm_range *range = hmm_vma_walk->range; in hmm_pte_need_fault() local
112 pfn_req_flags &= range->pfn_flags_mask; in hmm_pte_need_fault()
113 pfn_req_flags |= range->default_flags; in hmm_pte_need_fault()
135 struct hmm_range *range = hmm_vma_walk->range; in hmm_range_need_fault() local
144 if (!((range->default_flags | range->pfn_flags_mask) & in hmm_range_need_fault()
161 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_walk_hole() local
166 i = (addr - range->start) >> PAGE_SHIFT; in hmm_vma_walk_hole()
168 hmm_pfns = &range->hmm_pfns[i]; in hmm_vma_walk_hole()
174 return hmm_pfns_fill(addr, end, range, HMM_PFN_ERROR); in hmm_vma_walk_hole()
178 return hmm_pfns_fill(addr, end, range, 0); in hmm_vma_walk_hole()
187 static inline unsigned long pmd_to_hmm_pfn_flags(struct hmm_range *range, in pmd_to_hmm_pfn_flags() argument
202 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_handle_pmd() local
208 cpu_flags = pmd_to_hmm_pfn_flags(range, pmd); in hmm_vma_handle_pmd()
227 static inline unsigned long pte_to_hmm_pfn_flags(struct hmm_range *range, in pte_to_hmm_pfn_flags() argument
240 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_handle_pte() local
264 range->dev_private_owner) { in hmm_vma_handle_pte()
298 cpu_flags = pte_to_hmm_pfn_flags(range, pte); in hmm_vma_handle_pte()
335 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_walk_pmd() local
337 &range->hmm_pfns[(start - range->start) >> PAGE_SHIFT]; in hmm_vma_walk_pmd()
354 return hmm_pfns_fill(start, end, range, 0); in hmm_vma_walk_pmd()
360 return hmm_pfns_fill(start, end, range, HMM_PFN_ERROR); in hmm_vma_walk_pmd()
389 return hmm_pfns_fill(start, end, range, HMM_PFN_ERROR); in hmm_vma_walk_pmd()
409 static inline unsigned long pud_to_hmm_pfn_flags(struct hmm_range *range, in pud_to_hmm_pfn_flags() argument
423 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_walk_pud() local
446 i = (addr - range->start) >> PAGE_SHIFT; in hmm_vma_walk_pud()
448 hmm_pfns = &range->hmm_pfns[i]; in hmm_vma_walk_pud()
450 cpu_flags = pud_to_hmm_pfn_flags(range, pud); in hmm_vma_walk_pud()
484 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_walk_hugetlb_entry() local
495 i = (start - range->start) >> PAGE_SHIFT; in hmm_vma_walk_hugetlb_entry()
496 pfn_req_flags = range->hmm_pfns[i]; in hmm_vma_walk_hugetlb_entry()
497 cpu_flags = pte_to_hmm_pfn_flags(range, entry) | in hmm_vma_walk_hugetlb_entry()
521 range->hmm_pfns[i] &= HMM_PFN_INOUT_FLAGS; in hmm_vma_walk_hugetlb_entry()
522 range->hmm_pfns[i] |= pfn | cpu_flags; in hmm_vma_walk_hugetlb_entry()
536 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_walk_test() local
555 range->hmm_pfns + in hmm_vma_walk_test()
556 ((start - range->start) >> PAGE_SHIFT), in hmm_vma_walk_test()
560 hmm_pfns_fill(start, end, range, HMM_PFN_ERROR); in hmm_vma_walk_test()
594 int hmm_range_fault(struct hmm_range *range) in hmm_range_fault() argument
597 .range = range, in hmm_range_fault()
598 .last = range->start, in hmm_range_fault()
600 struct mm_struct *mm = range->notifier->mm; in hmm_range_fault()
607 if (mmu_interval_check_retry(range->notifier, in hmm_range_fault()
608 range->notifier_seq)) in hmm_range_fault()
610 ret = walk_page_range(mm, hmm_vma_walk.last, range->end, in hmm_range_fault()