/linux-6.3-rc2/mm/ |
A D | bootmem_info.c | 60 page = virt_to_page(memmap); in register_page_bootmem_info_section() 69 page = virt_to_page(usage); in register_page_bootmem_info_section() 93 page = virt_to_page(usage); in register_page_bootmem_info_section() 109 page = virt_to_page(pgdat); in register_page_bootmem_info_node()
|
A D | ptdump.c | 36 if (pgd_page(val) == virt_to_page(lm_alias(kasan_early_shadow_p4d))) in ptdump_pgd_entry() 59 if (p4d_page(val) == virt_to_page(lm_alias(kasan_early_shadow_pud))) in ptdump_p4d_entry() 82 if (pud_page(val) == virt_to_page(lm_alias(kasan_early_shadow_pmd))) in ptdump_pud_entry() 104 if (pmd_page(val) == virt_to_page(lm_alias(kasan_early_shadow_pte))) in ptdump_pmd_entry()
|
A D | z3fold.c | 280 struct page *page = virt_to_page(zhdr); in put_z3fold_header() 447 struct page *page = virt_to_page(zhdr); in __release_z3fold_page() 499 struct page *page = virt_to_page(zhdr); in free_pages_work() 689 struct page *page = virt_to_page(zhdr); in z3fold_compact_page() 739 page = virt_to_page(zhdr); in do_compact_page() 822 page = virt_to_page(zhdr); in __z3fold_alloc() 867 page = virt_to_page(zhdr); in __z3fold_alloc() 1031 page = virt_to_page(zhdr); in z3fold_alloc() 1108 page = virt_to_page(zhdr); in z3fold_free() 1382 page = virt_to_page(zhdr); in z3fold_map() [all …]
|
/linux-6.3-rc2/arch/powerpc/mm/ |
A D | pgtable_64.c | 108 return virt_to_page(p4d_pgtable(p4d)); in p4d_page() 119 return virt_to_page(pud_pgtable(pud)); in pud_page() 138 return virt_to_page(pmd_page_vaddr(pmd)); in pmd_page()
|
A D | pgtable-frag.c | 23 page = virt_to_page(pte_frag); in pte_frag_destroy() 111 struct page *page = virt_to_page(table); in pte_fragment_free()
|
/linux-6.3-rc2/arch/x86/mm/ |
A D | pgtable.c | 63 struct page *page = virt_to_page(pmd); in ___pmd_free_tlb() 80 paravirt_tlb_remove_table(tlb, virt_to_page(pud)); in ___pud_free_tlb() 87 paravirt_tlb_remove_table(tlb, virt_to_page(p4d)); in ___p4d_free_tlb() 95 struct page *page = virt_to_page(pgd); in pgd_list_add() 102 struct page *page = virt_to_page(pgd); in pgd_list_del() 115 virt_to_page(pgd)->pt_mm = mm; in pgd_set_mm() 219 pgtable_pmd_page_dtor(virt_to_page(pmds[i])); in free_pmds() 238 if (pmd && !pgtable_pmd_page_ctor(virt_to_page(pmd))) { in preallocate_pmds() 841 pgtable_pmd_page_dtor(virt_to_page(pmd)); in pud_free_pmd_page()
|
/linux-6.3-rc2/arch/um/include/asm/ |
A D | pgalloc.h | 38 pgtable_pmd_page_dtor(virt_to_page(pmd)); \ 39 tlb_remove_page((tlb),virt_to_page(pmd)); \
|
/linux-6.3-rc2/arch/powerpc/mm/kasan/ |
A D | init_book3e_64.c | 19 return p4d_page(p4d) == virt_to_page(lm_alias(kasan_early_shadow_pud)); in kasan_pud_table() 24 return pud_page(pud) == virt_to_page(lm_alias(kasan_early_shadow_pmd)); in kasan_pmd_table() 29 return pmd_page(pmd) == virt_to_page(lm_alias(kasan_early_shadow_pte)); in kasan_pte_table()
|
/linux-6.3-rc2/arch/arc/include/asm/ |
A D | pgtable-levels.h | 112 #define p4d_page(p4d) virt_to_page(p4d_pgtable(p4d)) 134 #define pud_page(pud) virt_to_page(pud_pgtable(pud)) 162 #define pmd_page(pmd) virt_to_page(pmd_page_vaddr(pmd))
|
A D | pgtable.h | 25 #define ZERO_PAGE(vaddr) (virt_to_page(empty_zero_page))
|
/linux-6.3-rc2/arch/arm64/include/asm/ |
A D | tlb.h | 86 struct page *page = virt_to_page(pmdp); in __pmd_free_tlb() 97 tlb_remove_table(tlb, virt_to_page(pudp)); in __pud_free_tlb()
|
/linux-6.3-rc2/arch/parisc/mm/ |
A D | ioremap.c | 58 for (page = virt_to_page(t_addr); in ioremap() 59 page <= virt_to_page(t_end); page++) { in ioremap()
|
/linux-6.3-rc2/arch/m68k/include/asm/ |
A D | mcf_pgalloc.h | 38 struct page *page = virt_to_page(pgtable); in __pte_free_tlb() 64 struct page *page = virt_to_page(pgtable); in pte_free()
|
/linux-6.3-rc2/fs/ubifs/ |
A D | crypto.c | 53 err = fscrypt_encrypt_block_inplace(inode, virt_to_page(p), pad_len, in ubifs_encrypt() 78 err = fscrypt_decrypt_block_inplace(inode, virt_to_page(&dn->data), in ubifs_decrypt()
|
/linux-6.3-rc2/arch/xtensa/mm/ |
A D | kasan_init.c | 27 mk_pte(virt_to_page(kasan_early_shadow_page), in kasan_early_init() 95 mk_pte(virt_to_page(kasan_early_shadow_page), in kasan_init()
|
/linux-6.3-rc2/arch/csky/kernel/ |
A D | vdso.c | 46 pg = virt_to_page(vdso_start + (i << PAGE_SHIFT)); in vdso_init() 49 vdso_pagelist[i] = virt_to_page(vdso_data); in vdso_init()
|
/linux-6.3-rc2/mm/kasan/ |
A D | init.c | 35 return pgd_page(pgd) == virt_to_page(lm_alias(kasan_early_shadow_p4d)); in kasan_p4d_table() 47 return p4d_page(p4d) == virt_to_page(lm_alias(kasan_early_shadow_pud)); in kasan_pud_table() 59 return pud_page(pud) == virt_to_page(lm_alias(kasan_early_shadow_pmd)); in kasan_pmd_table() 72 return pmd_page(pmd) == virt_to_page(lm_alias(kasan_early_shadow_pte)); in kasan_pte_table() 77 return pte_page(pte) == virt_to_page(lm_alias(kasan_early_shadow_page)); in kasan_early_shadow_page_entry()
|
/linux-6.3-rc2/arch/s390/include/asm/ |
A D | pgalloc.h | 89 if (!pgtable_pmd_page_ctor(virt_to_page(table))) { in pmd_alloc_one() 100 pgtable_pmd_page_dtor(virt_to_page(pmd)); in pmd_free()
|
A D | kfence.h | 38 __kernel_map_pages(virt_to_page(addr), 1, !protect); in kfence_protect_page()
|
/linux-6.3-rc2/arch/sparc/kernel/ |
A D | leon_smp.c | 248 free_reserved_page(virt_to_page(&trapbase_cpu1)); in leon_smp_done() 251 free_reserved_page(virt_to_page(&trapbase_cpu2)); in leon_smp_done() 254 free_reserved_page(virt_to_page(&trapbase_cpu3)); in leon_smp_done()
|
/linux-6.3-rc2/kernel/events/ |
A D | ring_buffer.c | 636 struct page *page = virt_to_page(rb->aux_pages[idx]); in rb_free_aux_page() 730 struct page *page = virt_to_page(rb->aux_pages[0]); in rb_alloc_aux() 782 return virt_to_page(rb->user_page); in __perf_mmap_to_page() 784 return virt_to_page(rb->data_pages[pgoff - 1]); in __perf_mmap_to_page() 802 struct page *page = virt_to_page(addr); in perf_mmap_free_page() 957 return virt_to_page(rb->aux_pages[aux_pgoff]); in perf_mmap_to_page()
|
/linux-6.3-rc2/arch/nios2/mm/ |
A D | ioremap.c | 139 for (page = virt_to_page(t_addr); in ioremap() 140 page <= virt_to_page(t_end); page++) in ioremap()
|
/linux-6.3-rc2/arch/ia64/include/asm/ |
A D | pgtable.h | 232 #define pte_page(pte) virt_to_page(((pte_val(pte) & _PFN_MASK) + PAGE_OFFSET)) 240 #define pmd_page(pmd) virt_to_page((pmd_val(pmd) + PAGE_OFFSET)) 247 #define pud_page(pud) virt_to_page((pud_val(pud) + PAGE_OFFSET)) 255 #define p4d_page(p4d) virt_to_page((p4d_val(p4d) + PAGE_OFFSET))
|
/linux-6.3-rc2/arch/arm/include/asm/vdso/ |
A D | vsyscall.h | 27 flush_dcache_page(virt_to_page(vdata)); in __arm_sync_vdso_data()
|
/linux-6.3-rc2/arch/arm/kernel/ |
A D | vdso.c | 201 vdso_data_page = virt_to_page(vdso_data); in vdso_init() 207 page = virt_to_page(vdso_start + i * PAGE_SIZE); in vdso_init()
|