| /arch/arm64/include/asm/ |
| A D | kernel-pgtable.h | 45 #define SPAN_NR_ENTRIES(vstart, vend, shift) \ argument 46 ((((vend) - 1) >> (shift)) - ((vstart) >> (shift)) + 1) 48 #define EARLY_ENTRIES(lvl, vstart, vend) \ argument 49 SPAN_NR_ENTRIES(vstart, vend, SWAPPER_BLOCK_SHIFT + lvl * PTDESC_TABLE_SHIFT) 51 #define EARLY_LEVEL(lvl, lvls, vstart, vend, add) \ argument 52 ((lvls) > (lvl) ? EARLY_ENTRIES(lvl, vstart, vend) + (add) : 0) 54 #define EARLY_PAGES(lvls, vstart, vend, add) (1 /* PGDIR page */ \ argument 55 + EARLY_LEVEL(3, (lvls), (vstart), (vend), add) /* each entry needs a next level page table */ \ 56 + EARLY_LEVEL(2, (lvls), (vstart), (vend), add) /* each entry needs a next level page table */ \ 57 + EARLY_LEVEL(1, (lvls), (vstart), (vend), add))/* each entry needs a next level page table */
|
| /arch/x86/lib/ |
| A D | usercopy_64.c | 32 void *vend = addr + size; in clean_cache_range() local 36 p < vend; p += x86_clflush_size) in clean_cache_range()
|
| /arch/alpha/boot/ |
| A D | bootpz.c | 76 check_range(unsigned long vstart, unsigned long vend, in check_range() argument 83 vstart, vend, kstart, kend); in check_range() 86 for (vaddr = vstart; vaddr <= vend; vaddr += PAGE_SIZE) in check_range()
|
| /arch/sparc/mm/ |
| A D | init_64.c | 1679 unsigned long vend, in kernel_map_hugepud() argument 1697 vend = vstart + mask16gb + 1UL; in kernel_map_hugepud() 1698 while (vstart < vend) { in kernel_map_hugepud() 1718 unsigned long vend, in kernel_map_hugepmd() argument 1742 vend = vstart + mask2gb + 1UL; in kernel_map_hugepmd() 1745 while (vstart < vend) { in kernel_map_hugepmd() 1775 vstart, vend); in kernel_map_range() 1779 while (vstart < vend) { in kernel_map_range() 1844 if (this_end > vend) in kernel_map_range() 1845 this_end = vend; in kernel_map_range() [all …]
|
| A D | srmmu.c | 854 unsigned long vend = PGDIR_ALIGN(vbase + sp_banks[sp_entry].num_bytes); in map_spbank() local 862 if (vend > max_vaddr || vend < min_vaddr) in map_spbank() 863 vend = max_vaddr; in map_spbank() 865 while (vstart < vend) { in map_spbank()
|
| /arch/riscv/mm/ |
| A D | kasan_init.c | 372 unsigned long vend = PAGE_ALIGN((unsigned long)end); in kasan_populate() local 374 kasan_populate_pgd(pgd_offset_k(vaddr), vaddr, vend); in kasan_populate() 439 unsigned long vend = PAGE_ALIGN((unsigned long)end); in kasan_shallow_populate() local 441 kasan_shallow_populate_pgd(vaddr, vend); in kasan_shallow_populate()
|
| /arch/powerpc/mm/book3s64/ |
| A D | hash_utils.c | 618 int htab_bolt_mapping(unsigned long vstart, unsigned long vend, in htab_bolt_mapping() argument 632 vstart, vend, pstart, prot, psize, ssize); in htab_bolt_mapping() 637 vend = ALIGN_DOWN(vend, step); in htab_bolt_mapping() 639 for (; vaddr < vend; vaddr += step, paddr += step) { in htab_bolt_mapping() 704 int htab_remove_mapping(unsigned long vstart, unsigned long vend, in htab_remove_mapping() argument 722 for (;vaddr < vend; vaddr += step) { in htab_remove_mapping()
|
| /arch/powerpc/include/asm/book3s/64/ |
| A D | mmu-hash.h | 495 extern int htab_bolt_mapping(unsigned long vstart, unsigned long vend, 498 int htab_remove_mapping(unsigned long vstart, unsigned long vend,
|
| /arch/mips/include/asm/ |
| A D | sgiarcs.h | 96 char vend[8], prod[8]; member
|
| /arch/x86/mm/pat/ |
| A D | set_memory.c | 331 void *vend = vaddr + size; in clflush_cache_range_opt() local 333 if (p >= vend) in clflush_cache_range_opt() 336 for (; p < vend; p += clflush_size) in clflush_cache_range_opt()
|