Home
last modified time | relevance | path

Searched refs:pmd (Results 1 – 25 of 40) sorted by relevance

12

/mm/kasan/
A Dinit.c124 if (pmd_none(*pmd)) { in zero_pmd_populate()
153 pmd_t *pmd; in zero_pud_populate() local
192 pmd_t *pmd; in zero_p4d_populate() local
244 pmd_t *pmd; in kasan_populate_early_shadow() local
298 pmd_clear(pmd); in kasan_free_pte()
303 pmd_t *pmd; in kasan_free_pmd() local
307 pmd = pmd_start + i; in kasan_free_pmd()
308 if (!pmd_none(*pmd)) in kasan_free_pmd()
384 pmd_clear(pmd); in kasan_remove_pmd_table()
390 kasan_free_pte(pte_offset_kernel(pmd, 0), pmd); in kasan_remove_pmd_table()
[all …]
A Dshadow.c191 pmd_t *pmd; in shadow_mapped() local
204 pmd = pmd_offset(pud, addr); in shadow_mapped()
205 if (pmd_none(*pmd)) in shadow_mapped()
207 if (pmd_leaf(*pmd)) in shadow_mapped()
209 pte = pte_offset_kernel(pmd, addr); in shadow_mapped()
/mm/
A Ddebug_vm_pgtable.c171 pmd_t pmd; in pmd_basic_tests() local
189 WARN_ON(!pmd_same(pmd, pmd)); in pmd_basic_tests()
208 pmd_t pmd; in pmd_advanced_tests() local
242 pmd = pmd_wrprotect(pmd); in pmd_advanced_tests()
243 pmd = pmd_mkclean(pmd); in pmd_advanced_tests()
246 pmd = pmd_mkwrite(pmd, args->vma); in pmd_advanced_tests()
247 pmd = pmd_mkdirty(pmd); in pmd_advanced_tests()
250 WARN_ON(!(pmd_write(pmd) && pmd_dirty(pmd))); in pmd_advanced_tests()
256 pmd = pmd_mkyoung(pmd); in pmd_advanced_tests()
270 pmd_t pmd; in pmd_leaf_tests() local
[all …]
A Dhuge_memory.c1055 pmd = pmd_mkwrite(pmd, vma); in maybe_pmd_mkwrite()
1722 pmd = pmd_swp_mkuffd_wp(pmd); in copy_huge_pmd()
1775 pmd = pmd_clear_uffd_wp(pmd); in copy_huge_pmd()
1776 pmd = pmd_wrprotect(pmd); in copy_huge_pmd()
1778 pmd = pmd_mkold(pmd); in copy_huge_pmd()
2087 pmd = pmd_mkyoung(pmd); in do_huge_pmd_numa_page()
2089 pmd = pmd_mkwrite(pmd, vma); in do_huge_pmd_numa_page()
2278 pmd = pmd_mksoft_dirty(pmd); in move_soft_dirty_pmd()
2286 pmd = pmd_clear_uffd_wp(pmd); in clear_uffd_wp_pmd()
2330 pmd = move_soft_dirty_pmd(pmd); in move_huge_pmd()
[all …]
A Dpage_table_check.c164 if (pmd_user_accessible_page(pmd)) { in __page_table_check_pmd_clear()
213 static inline void page_table_check_pmd_flags(pmd_t pmd) in page_table_check_pmd_flags() argument
215 if (pmd_present(pmd) && pmd_uffd_wp(pmd)) in page_table_check_pmd_flags()
216 WARN_ON_ONCE(pmd_write(pmd)); in page_table_check_pmd_flags()
217 else if (is_swap_pmd(pmd) && pmd_swp_uffd_wp(pmd)) in page_table_check_pmd_flags()
230 page_table_check_pmd_flags(pmd); in __page_table_check_pmds_set()
234 if (pmd_user_accessible_page(pmd)) in __page_table_check_pmds_set()
235 page_table_check_set(pmd_pfn(pmd), stride * nr, pmd_write(pmd)); in __page_table_check_pmds_set()
257 pmd_t pmd) in __page_table_check_pte_clear_range() argument
262 if (!pmd_bad(pmd) && !pmd_leaf(pmd)) { in __page_table_check_pte_clear_range()
[all …]
A Dpagewalk.c100 pmd_t *pmd; in walk_pmd_range() local
112 if (pmd_none(*pmd)) { in walk_pmd_range()
146 if (pmd_present(*pmd) && pmd_trans_huge(*pmd)) in walk_pmd_range()
152 else if (pmd_leaf(*pmd) || !pmd_present(*pmd)) in walk_pmd_range()
871 pmd_t *pmdp, pmd; in folio_walk_start() local
928 if (pmd_none(pmd)) in folio_walk_start()
931 (!pmd_present(pmd) || pmd_leaf(pmd))) { in folio_walk_start()
938 fw->pmd = pmd; in folio_walk_start()
940 if (pmd_none(pmd)) { in folio_walk_start()
943 } else if (pmd_present(pmd) && !pmd_leaf(pmd)) { in folio_walk_start()
[all …]
A Dpgtable-generic.c52 void pmd_clear_bad(pmd_t *pmd) in pmd_clear_bad() argument
54 pmd_ERROR(*pmd); in pmd_clear_bad()
55 pmd_clear(pmd); in pmd_clear_bad()
140 pmd_t pmd; in pmdp_huge_clear_flush() local
145 return pmd; in pmdp_huge_clear_flush()
224 pmd_t pmd; in pmdp_collapse_flush() local
232 return pmd; in pmdp_collapse_flush()
288 pmdval = pmdp_get_lockless(pmd); in ___pte_offset_map()
298 pmd_clear_bad(pmd); in ___pte_offset_map()
313 pte = __pte_offset_map(pmd, addr, &pmdval); in pte_offset_map_ro_nolock()
[all …]
A Dpt_reclaim.c14 bool try_get_and_clear_pmd(struct mm_struct *mm, pmd_t *pmd, pmd_t *pmdval) in try_get_and_clear_pmd() argument
16 spinlock_t *pml = pmd_lockptr(mm, pmd); in try_get_and_clear_pmd()
21 *pmdval = pmdp_get_lockless(pmd); in try_get_and_clear_pmd()
22 pmd_clear(pmd); in try_get_and_clear_pmd()
35 void try_to_free_pte(struct mm_struct *mm, pmd_t *pmd, unsigned long addr, in try_to_free_pte() argument
43 pml = pmd_lock(mm, pmd); in try_to_free_pte()
44 start_pte = pte_offset_map_rw_nolock(mm, pmd, addr, &pmdval, &ptl); in try_to_free_pte()
57 pmd_clear(pmd); in try_to_free_pte()
A Dsparse-vmemmap.c158 pte_t *pte = pte_offset_kernel(pmd, addr); in vmemmap_pte_populate()
200 pmd_t *pmd = pmd_offset(pud, addr); in vmemmap_pmd_populate() local
201 if (pmd_none(*pmd)) { in vmemmap_pmd_populate()
206 pmd_populate_kernel(&init_mm, pmd, p); in vmemmap_pmd_populate()
208 return pmd; in vmemmap_pmd_populate()
257 pmd_t *pmd; in vmemmap_populate_address() local
269 pmd = vmemmap_pmd_populate(pud, addr, node); in vmemmap_populate_address()
270 if (!pmd) in vmemmap_populate_address()
424 pmd_t *pmd; in vmemmap_populate_hugepages() local
441 pmd = pmd_offset(pud, addr); in vmemmap_populate_hugepages()
[all …]
A Dkhugepaged.c762 pmd_t *pmd, in __collapse_huge_page_copy_failed() argument
974 pmd_t **pmd) in find_pmd_or_thp_or_none() argument
977 if (!*pmd) in find_pmd_or_thp_or_none()
985 pmd_t *pmd) in check_pmd_still_valid() argument
992 if (new_pmd != pmd) in check_pmd_still_valid()
1022 .pmd = pmd, in __collapse_huge_page_swapin()
1113 pmd_t *pmd, _pmd; in collapse_huge_page() local
1276 pmd_t *pmd; in hpage_collapse_scan_pmd() local
1479 .pmd = pmdp, in set_huge_pmd()
1515 pmd_t *pmd, pgt_pmd; in collapse_pte_mapped_thp() local
[all …]
A Dmemory.c201 pmd_t *pmd; in free_pmd_range() local
1789 pmd_t *pmd; in zap_pmd_range() local
1795 if (is_swap_pmd(*pmd) || pmd_trans_huge(*pmd)) { in zap_pmd_range()
1820 pmd--; in zap_pmd_range()
2072 pmd_t *pmd; in walk_to_pmd() local
2082 if (!pmd) in walk_to_pmd()
2094 if (!pmd) in __get_locked_pte()
2238 if (!pmd) in insert_pages()
2718 if (!pmd) in remap_pmd_range()
3043 if (!pmd) in apply_to_pmd_range()
[all …]
A Dhmm.c188 pmd_t pmd) in pmd_to_hmm_pfn_flags() argument
190 if (pmd_protnone(pmd)) in pmd_to_hmm_pfn_flags()
199 pmd_t pmd) in hmm_vma_handle_pmd() argument
341 pmd_t pmd; in hmm_vma_walk_pmd() local
344 pmd = pmdp_get_lockless(pmdp); in hmm_vma_walk_pmd()
345 if (pmd_none(pmd)) in hmm_vma_walk_pmd()
357 if (!pmd_present(pmd)) { in hmm_vma_walk_pmd()
363 if (pmd_trans_huge(pmd)) { in hmm_vma_walk_pmd()
373 pmd = pmdp_get_lockless(pmdp); in hmm_vma_walk_pmd()
374 if (!pmd_trans_huge(pmd)) in hmm_vma_walk_pmd()
[all …]
A Dpage_vma_mapped.c23 pvmw->pte = pte_offset_map_lock(pvmw->vma->vm_mm, pvmw->pmd, in map_pte()
37 pvmw->pte = pte_offset_map_rw_nolock(pvmw->vma->vm_mm, pvmw->pmd, in map_pte()
73 if (unlikely(!pmd_same(*pmdvalp, pmdp_get_lockless(pvmw->pmd)))) { in map_pte()
196 if (pvmw->pmd && !pvmw->pte) in page_vma_mapped_walk()
241 pvmw->pmd = pmd_offset(pud, pvmw->address); in page_vma_mapped_walk()
247 pmde = pmdp_get_lockless(pvmw->pmd); in page_vma_mapped_walk()
250 pvmw->ptl = pmd_lock(mm, pvmw->pmd); in page_vma_mapped_walk()
251 pmde = *pvmw->pmd; in page_vma_mapped_walk()
284 spinlock_t *ptl = pmd_lock(mm, pvmw->pmd); in page_vma_mapped_walk()
319 if (unlikely(!pmd_same(pmde, pmdp_get_lockless(pvmw->pmd)))) { in page_vma_mapped_walk()
A Dgup.c702 if (pmd_write(pmd)) in can_follow_write_pmd()
720 pmd_t pmdval = *pmd; in follow_huge_pmd()
917 pmd_t *pmd, pmdval; in follow_pmd_mask() local
922 pmd = pmd_offset(pudp, address); in follow_pmd_mask()
934 ptl = pmd_lock(mm, pmd); in follow_pmd_mask()
935 pmdval = *pmd; in follow_pmd_mask()
1055 pmd_t *pmd; in get_gate_page() local
1072 pmd = pmd_offset(pud, address); in get_gate_page()
1073 if (!pmd_present(*pmd)) in get_gate_page()
3046 if (!pmd_present(pmd)) in gup_fast_pmd_range()
[all …]
A Dmprotect.c485 #define change_pmd_prepare(vma, pmd, cp_flags) \ argument
489 if (pte_alloc(vma->vm_mm, pmd)) \
515 pmd_t *pmd; in change_pmd_range() local
520 pmd = pmd_offset(pud, addr); in change_pmd_range()
527 ret = change_pmd_prepare(vma, pmd, cp_flags); in change_pmd_range()
533 if (pmd_none(*pmd)) in change_pmd_range()
536 _pmd = pmdp_get_lockless(pmd); in change_pmd_range()
540 __split_huge_pmd(vma, pmd, addr, false); in change_pmd_range()
546 ret = change_pmd_prepare(vma, pmd, cp_flags); in change_pmd_range()
552 ret = change_huge_pmd(tlb, vma, pmd, in change_pmd_range()
[all …]
A Dpgalloc-track.h46 #define pte_alloc_kernel_track(pmd, address, mask) \ argument
47 ((unlikely(pmd_none(*(pmd))) && \
48 (__pte_alloc_kernel(pmd) || ({*(mask)|=PGTBL_PMD_MODIFIED;0;})))?\
49 NULL: pte_offset_kernel(pmd, address))
A Dmadvise.c380 if (pmd_trans_huge(*pmd)) { in madvise_cold_or_pageout_pte_range()
385 ptl = pmd_trans_huge_lock(pmd, vma); in madvise_cold_or_pageout_pte_range()
389 orig_pmd = *pmd; in madvise_cold_or_pageout_pte_range()
423 pmdp_invalidate(vma, addr, pmd); in madvise_cold_or_pageout_pte_range()
426 set_pmd_at(mm, addr, pmd, orig_pmd); in madvise_cold_or_pageout_pte_range()
427 tlb_remove_pmd_tlb_entry(tlb, pmd, addr); in madvise_cold_or_pageout_pte_range()
509 pte_offset_map_lock(mm, pmd, addr, &ptl); in madvise_cold_or_pageout_pte_range()
671 if (pmd_trans_huge(*pmd)) in madvise_free_pte_range()
736 pte = pte_offset_map_lock(mm, pmd, addr, &ptl); in madvise_free_pte_range()
1090 pmd_t pmdval = pmdp_get(pmd); in guard_install_pmd_entry()
[all …]
A Dmremap.c99 pmd_t *pmd; in get_old_pmd() local
105 pmd = pmd_offset(pud, addr); in get_old_pmd()
106 if (pmd_none(*pmd)) in get_old_pmd()
109 return pmd; in get_old_pmd()
128 pmd_t *pmd; in alloc_new_pmd() local
134 pmd = pmd_alloc(mm, pud, addr); in alloc_new_pmd()
135 if (!pmd) in alloc_new_pmd()
140 return pmd; in alloc_new_pmd()
334 pmd_t pmd; in move_normal_pmd() local
382 pmd = *old_pmd; in move_normal_pmd()
[all …]
A Dhugetlb_vmemmap.c51 static int vmemmap_split_pmd(pmd_t *pmd, struct page *head, unsigned long start, in vmemmap_split_pmd() argument
75 if (likely(pmd_leaf(*pmd))) { in vmemmap_split_pmd()
86 pmd_populate_kernel(&init_mm, pmd, pgtable); in vmemmap_split_pmd()
97 static int vmemmap_pmd_entry(pmd_t *pmd, unsigned long addr, in vmemmap_pmd_entry() argument
109 head = pmd_leaf(*pmd) ? pmd_page(*pmd) : NULL; in vmemmap_pmd_entry()
126 pte_page(ptep_get(pte_offset_kernel(pmd, addr))); in vmemmap_pmd_entry()
135 return vmemmap_split_pmd(pmd, head, addr & PMD_MASK, vmemmap_walk); in vmemmap_pmd_entry()
A Dvmalloc.c158 if (pmd_present(*pmd) && !pmd_free_pte_page(pmd, addr)) in vmap_try_huge_pmd()
168 pmd_t *pmd; in vmap_pmd_range() local
172 if (!pmd) in vmap_pmd_range()
388 pmd_t *pmd; in vunmap_pmd_range() local
392 pmd = pmd_offset(pud, addr); in vunmap_pmd_range()
561 pmd_t *pmd; in vmap_pages_pmd_range() local
565 if (!pmd) in vmap_pages_pmd_range()
786 pmd_t *pmd; in vmalloc_to_page() local
818 pmd = pmd_offset(pud, addr); in vmalloc_to_page()
819 if (pmd_none(*pmd)) in vmalloc_to_page()
[all …]
A Dmlock.c350 static int mlock_pte_range(pmd_t *pmd, unsigned long addr, in mlock_pte_range() argument
362 ptl = pmd_trans_huge_lock(pmd, vma); in mlock_pte_range()
364 if (!pmd_present(*pmd)) in mlock_pte_range()
366 if (is_huge_zero_pmd(*pmd)) in mlock_pte_range()
368 folio = pmd_folio(*pmd); in mlock_pte_range()
378 start_pte = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl); in mlock_pte_range()
A Dmincore.c104 static int mincore_pte_range(pmd_t *pmd, unsigned long addr, unsigned long end, in mincore_pte_range() argument
114 ptl = pmd_trans_huge_lock(pmd, vma); in mincore_pte_range()
121 ptep = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in mincore_pte_range()
A Drmap.c820 pmd_t *pmd = NULL; in mm_find_pmd() local
834 pmd = pmd_offset(pud, address); in mm_find_pmd()
836 return pmd; in mm_find_pmd()
907 pvmw.pmd)) in folio_referenced_one()
1066 pmd_t *pmd = pvmw->pmd; in page_vma_mkclean_one() local
1069 if (!pmd_dirty(*pmd) && !pmd_write(*pmd)) in page_vma_mkclean_one()
1074 entry = pmdp_invalidate(vma, address, pmd); in page_vma_mkclean_one()
1077 set_pmd_at(vma->vm_mm, address, pmd, entry); in page_vma_mkclean_one()
1954 pvmw.pmd, false); in try_to_unmap_one()
2331 pvmw.pmd, true); in try_to_migrate_one()
[all …]
/mm/damon/
A Dvaddr.c313 if (pmd_trans_huge(pmdp_get(pmd))) { in damon_mkold_pmd_entry()
314 ptl = pmd_lock(walk->mm, pmd); in damon_mkold_pmd_entry()
315 pmde = pmdp_get(pmd); in damon_mkold_pmd_entry()
323 damon_pmdp_mkold(pmd, walk->vma, addr); in damon_mkold_pmd_entry()
330 pte = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in damon_mkold_pmd_entry()
451 if (pmd_trans_huge(pmdp_get(pmd))) { in damon_young_pmd_entry()
454 ptl = pmd_lock(walk->mm, pmd); in damon_young_pmd_entry()
455 pmde = pmdp_get(pmd); in damon_young_pmd_entry()
483 pte = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in damon_young_pmd_entry()
737 ptl = pmd_lock(walk->mm, pmd); in damos_va_migrate_pmd_entry()
[all …]
A Dops-common.c75 void damon_pmdp_mkold(pmd_t *pmd, struct vm_area_struct *vma, unsigned long addr) in damon_pmdp_mkold() argument
78 struct folio *folio = damon_get_folio(pmd_pfn(pmdp_get(pmd))); in damon_pmdp_mkold()
83 if (pmdp_clear_young_notify(vma, addr, pmd)) in damon_pmdp_mkold()
154 damon_pmdp_mkold(pvmw.pmd, vma, addr); in damon_folio_mkold_one()
206 *accessed = pmd_young(pmdp_get(pvmw.pmd)) || in damon_folio_young_one()

Completed in 101 milliseconds

12