Searched refs:P4D_SIZE (Results 1 – 23 of 23) sorted by relevance
| /arch/riscv/mm/ |
| A D | kasan_init.c | 128 if (p4d_none(p4dp_get(p4dp)) && IS_ALIGNED(vaddr, P4D_SIZE) && in kasan_populate_p4d() 129 (next - vaddr) >= P4D_SIZE) { in kasan_populate_p4d() 130 phys_addr = memblock_phys_alloc(P4D_SIZE, P4D_SIZE); in kasan_populate_p4d() 133 memset(__va(phys_addr), KASAN_SHADOW_INIT, P4D_SIZE); in kasan_populate_p4d() 206 if (pgtable_l4_enabled && IS_ALIGNED(vaddr, P4D_SIZE) && in kasan_early_clear_p4d() 207 (next - vaddr) >= P4D_SIZE) { in kasan_early_clear_p4d() 290 if (p4d_none(p4dp_get(p4dp)) && IS_ALIGNED(vaddr, P4D_SIZE) && in kasan_early_populate_p4d() 291 (next - vaddr) >= P4D_SIZE) { in kasan_early_populate_p4d()
|
| A D | tlbflush.c | 188 else if (stride_size >= P4D_SIZE) in flush_tlb_range() 189 stride_size = P4D_SIZE; in flush_tlb_range()
|
| A D | init.c | 671 if (sz == P4D_SIZE) { in create_p4d_mapping() 753 !(pa & (P4D_SIZE - 1)) && !(va & (P4D_SIZE - 1)) && size >= P4D_SIZE) in best_map_size() 754 return P4D_SIZE; in best_map_size() 877 P4D_SIZE, PAGE_TABLE); in set_satp_mode() 1190 (uintptr_t)fixmap_pud, P4D_SIZE, PAGE_TABLE); in setup_vm() 1202 (uintptr_t)trampoline_pud, P4D_SIZE, PAGE_TABLE); in setup_vm() 1750 free_vmemmap_storage(p4d_page(p4d), P4D_SIZE, altmap); in remove_p4d_mapping()
|
| A D | hugetlbpage.c | 135 return P4D_SIZE - PUD_SIZE; in hugetlb_mask_last_page() 224 else if (sz >= P4D_SIZE) in num_contig_ptes_from_size()
|
| A D | pageattr.c | 196 if (next - vaddr >= P4D_SIZE && in __split_linear_mapping_p4d()
|
| /arch/x86/include/asm/ |
| A D | pgtable_64_types.h | 56 #define P4D_SIZE (_AC(1, UL) << P4D_SHIFT) macro 57 #define P4D_MASK (~(P4D_SIZE - 1))
|
| A D | pgtable_areas.h | 19 #define CPU_ENTRY_AREA_MAP_SIZE P4D_SIZE
|
| /arch/arm64/include/asm/ |
| A D | pgtable-hwdef.h | 73 #define P4D_SIZE (_AC(1, UL) << P4D_SHIFT) macro 74 #define P4D_MASK (~(P4D_SIZE-1))
|
| /arch/powerpc/include/asm/nohash/ |
| A D | pgtable.h | 78 else if (sz < P4D_SIZE) in pte_update() 81 pdsize = P4D_SIZE; in pte_update()
|
| /arch/riscv/include/asm/ |
| A D | pgtable-64.h | 31 #define P4D_SIZE (_AC(1, UL) << P4D_SHIFT) macro 32 #define P4D_MASK (~(P4D_SIZE - 1))
|
| /arch/powerpc/mm/ |
| A D | hugetlbpage.c | 53 if (!mm_pud_folded(mm) && sz >= P4D_SIZE) in huge_pte_alloc()
|
| A D | pgtable.c | 359 else if (sz < P4D_SIZE) in set_huge_pte_at() 362 pdsize = P4D_SIZE; in set_huge_pte_at()
|
| /arch/x86/boot/startup/ |
| A D | sme.c | 267 entries += (DIV_ROUND_UP(len, P4D_SIZE) + 1) * sizeof(pud_t) * PTRS_PER_PUD; in sme_pgtable_calc() 278 tables += DIV_ROUND_UP(entries, P4D_SIZE) * sizeof(pud_t) * PTRS_PER_PUD; in sme_pgtable_calc()
|
| /arch/s390/boot/ |
| A D | vmem.c | 153 IS_ALIGNED(addr, P4D_SIZE) && end - addr >= P4D_SIZE) { in kasan_p4d_populate_zero_shadow()
|
| /arch/arm64/mm/ |
| A D | kasan_init.c | 182 #define SHADOW_ALIGN P4D_SIZE
|
| /arch/arm/mm/ |
| A D | dump.c | 389 addr = start + i * P4D_SIZE; in walk_p4d()
|
| /arch/x86/mm/ |
| A D | kasan_init_64.c | 187 for (; start < end; start += P4D_SIZE) in clear_pgds()
|
| A D | init_64.c | 692 vaddr_next = (vaddr & P4D_MASK) + P4D_SIZE; in phys_p4d_init()
|
| /arch/powerpc/mm/ptdump/ |
| A D | hashpagetable.c | 443 addr = start + i * P4D_SIZE; in walk_p4d()
|
| /arch/powerpc/mm/book3s64/ |
| A D | radix_pgtable.c | 913 if (!IS_ALIGNED(addr, P4D_SIZE) || in remove_pagetable() 914 !IS_ALIGNED(next, P4D_SIZE)) { in remove_pagetable()
|
| /arch/x86/xen/ |
| A D | mmu_pv.c | 1158 xen_free_ro_pages(pa, P4D_SIZE); in xen_cleanmfnmap_p4d() 1955 n_pud = roundup(size, P4D_SIZE) >> P4D_SHIFT; in xen_relocate_p2m()
|
| /arch/powerpc/kvm/ |
| A D | book3s_64_mmu_radix.c | 1367 gpa = (gpa & P4D_MASK) + P4D_SIZE; in debugfs_radix_read()
|
| /arch/s390/include/asm/ |
| A D | pgtable.h | 380 #define P4D_SIZE _REGION2_SIZE macro
|
Completed in 49 milliseconds