Searched refs:virt_to_page (Results 1 – 25 of 107) sorted by relevance
12345
108 return virt_to_page(p4d_pgtable(p4d)); in p4d_page()119 return virt_to_page(pud_pgtable(pud)); in pud_page()138 return virt_to_page(pmd_page_vaddr(pmd)); in pmd_page()
37 for (page = virt_to_page(t_addr); in ioremap_prot()38 page <= virt_to_page(t_end); page++) { in ioremap_prot()
82 #define virt_to_page(kaddr) phys_to_page(__pa(kaddr)) macro94 #define virt_to_page(kaddr) \ macro103 #define virt_to_pfn(kaddr) page_to_pfn(virt_to_page(kaddr))
43 set_page_address(virt_to_page(kaddr), vaddr); in arch_kfence_init_pool()
19 return p4d_page(p4d) == virt_to_page(lm_alias(kasan_early_shadow_pud)); in kasan_pud_table()24 return pud_page(pud) == virt_to_page(lm_alias(kasan_early_shadow_pmd)); in kasan_pmd_table()29 return pmd_page(pmd) == virt_to_page(lm_alias(kasan_early_shadow_pte)); in kasan_pte_table()
112 #define p4d_page(p4d) virt_to_page(p4d_pgtable(p4d))134 #define pud_page(pud) virt_to_page(pud_pgtable(pud))161 #define pmd_page(pmd) virt_to_page((void *)pmd_page_vaddr(pmd))
25 #define ZERO_PAGE(vaddr) (virt_to_page(empty_zero_page))
26 mk_pte(virt_to_page(kasan_early_shadow_page), in kasan_early_init()89 mk_pte(virt_to_page(kasan_early_shadow_page), in kasan_init()
15 flush_dcache_page(virt_to_page(vdata)); in __arch_sync_vdso_time_data()
27 __kernel_map_pages(virt_to_page((void *)addr), 1, !protect); in kfence_protect_page()
38 #define ZERO_PAGE(vaddr) (virt_to_page(empty_zero_page))
32 #define virt_to_page(addr) (mem_map + (((unsigned long)(addr)-PAGE_OFFSET) >> PAGE_SHIFT)) macro
107 #define pte_page(pte) virt_to_page(__pte_page(pte))109 #define pmd_page(pmd) virt_to_page((void *)pmd_page_vaddr(pmd))
39 struct page *page = virt_to_page((void *)addr); in kfence_protect_page()
139 for (page = virt_to_page(t_addr); in ioremap()140 page <= virt_to_page(t_end); page++) in ioremap()
248 free_reserved_page(virt_to_page(&trapbase_cpu1[0])); in leon_smp_done()251 free_reserved_page(virt_to_page(&trapbase_cpu2[0])); in leon_smp_done()254 free_reserved_page(virt_to_page(&trapbase_cpu3[0])); in leon_smp_done()
71 #define virt_to_page(addr) \ macro
68 #define virt_to_page(kaddr) pfn_to_page(__pa(kaddr) >> PAGE_SHIFT) macro
32 pg = virt_to_page(vdso_start + (i << PAGE_SHIFT)); in vdso_init()
84 # define virt_to_page(vaddr) pfn_to_page(PFN_DOWN(virt_to_phys(vaddr))) macro
57 = pte_val(mk_pte(virt_to_page(ret), PAGE_KERNEL)); in pgd_alloc()87 return pte_mkdirty(mk_pte(virt_to_page(EMPTY_PGE), PAGE_SHARED)); in __bad_page()
90 #define virt_to_page(kaddr) pfn_to_page(PFN_DOWN(__pa(kaddr))) macro
80 #define virt_to_page(x) (mem_map + MAP_NR(x)) macro
60 syscall_pages[0] = virt_to_page(syscall_page); in vsyscall_init()
82 #define virt_to_page(kaddr) pfn_to_page(__pa(kaddr) >> PAGE_SHIFT) macro
Completed in 35 milliseconds