| /linux/arch/hexagon/kernel/ |
| A D | vdso.c | 53 unsigned long vdso_base; in arch_setup_additional_pages() local 64 vdso_base = STACK_TOP; in arch_setup_additional_pages() 66 vdso_base = get_unmapped_area(NULL, vdso_base, PAGE_SIZE, 0, 0); in arch_setup_additional_pages() 67 if (IS_ERR_VALUE(vdso_base)) { in arch_setup_additional_pages() 68 ret = vdso_base; in arch_setup_additional_pages() 74 vma = _install_special_mapping(mm, vdso_base, PAGE_SIZE, in arch_setup_additional_pages() 83 mm->context.vdso = (void *)vdso_base; in arch_setup_additional_pages()
|
| /linux/arch/csky/kernel/ |
| A D | vdso.c | 50 unsigned long vdso_base, vdso_len; in arch_setup_additional_pages() local 62 vdso_base = get_unmapped_area(NULL, 0, vdso_len, 0, 0); in arch_setup_additional_pages() 63 if (IS_ERR_VALUE(vdso_base)) { in arch_setup_additional_pages() 64 ret = vdso_base; in arch_setup_additional_pages() 73 mm->context.vdso = (void *)vdso_base; in arch_setup_additional_pages() 77 _install_special_mapping(mm, vdso_base, vdso_pages << PAGE_SHIFT, in arch_setup_additional_pages() 87 vdso_base += (vdso_pages << PAGE_SHIFT); in arch_setup_additional_pages() 89 vma = _install_special_mapping(mm, vdso_base, PAGE_SIZE, in arch_setup_additional_pages()
|
| /linux/arch/riscv/kernel/ |
| A D | vdso.c | 214 unsigned long vdso_base, vdso_text_len, vdso_mapping_len; in __setup_additional_pages() local 223 vdso_base = get_unmapped_area(NULL, 0, vdso_mapping_len, 0, 0); in __setup_additional_pages() 224 if (IS_ERR_VALUE(vdso_base)) { in __setup_additional_pages() 225 ret = ERR_PTR(vdso_base); in __setup_additional_pages() 229 ret = _install_special_mapping(mm, vdso_base, VVAR_SIZE, in __setup_additional_pages() 234 vdso_base += VVAR_SIZE; in __setup_additional_pages() 235 mm->context.vdso = (void *)vdso_base; in __setup_additional_pages() 238 _install_special_mapping(mm, vdso_base, vdso_text_len, in __setup_additional_pages()
|
| /linux/arch/powerpc/kernel/ |
| A D | vdso.c | 189 unsigned long vdso_size, vdso_base, mappings_size; in __arch_setup_additional_pages() local 210 vdso_base = get_unmapped_area(NULL, 0, mappings_size, 0, 0); in __arch_setup_additional_pages() 211 if (IS_ERR_VALUE(vdso_base)) in __arch_setup_additional_pages() 212 return vdso_base; in __arch_setup_additional_pages() 215 vdso_base = ALIGN(vdso_base, VDSO_ALIGNMENT); in __arch_setup_additional_pages() 217 vma = _install_special_mapping(mm, vdso_base, vvar_size, in __arch_setup_additional_pages() 233 vma = _install_special_mapping(mm, vdso_base + vvar_size, vdso_size, in __arch_setup_additional_pages() 237 do_munmap(mm, vdso_base, vvar_size, NULL); in __arch_setup_additional_pages() 242 mm->context.vdso = (void __user *)vdso_base + vvar_size; in __arch_setup_additional_pages()
|
| /linux/arch/arm64/kernel/ |
| A D | vdso.c | 184 unsigned long vdso_base, vdso_text_len, vdso_mapping_len; in __setup_additional_pages() local 194 vdso_base = get_unmapped_area(NULL, 0, vdso_mapping_len, 0, 0); in __setup_additional_pages() 195 if (IS_ERR_VALUE(vdso_base)) { in __setup_additional_pages() 196 ret = ERR_PTR(vdso_base); in __setup_additional_pages() 200 ret = _install_special_mapping(mm, vdso_base, VVAR_NR_PAGES * PAGE_SIZE, in __setup_additional_pages() 209 vdso_base += VVAR_NR_PAGES * PAGE_SIZE; in __setup_additional_pages() 210 mm->context.vdso = (void *)vdso_base; in __setup_additional_pages() 211 ret = _install_special_mapping(mm, vdso_base, vdso_text_len, in __setup_additional_pages()
|
| /linux/arch/s390/include/asm/ |
| A D | vdso-symbols.h | 10 #define VDSO64_SYMBOL(tsk, name) ((tsk)->mm->context.vdso_base + (vdso64_offset_##name)) 12 #define VDSO32_SYMBOL(tsk, name) ((tsk)->mm->context.vdso_base + (vdso32_offset_##name))
|
| A D | mmu.h | 18 unsigned long vdso_base; member
|
| A D | elf.h | 302 (unsigned long)current->mm->context.vdso_base); \
|
| /linux/arch/parisc/include/asm/ |
| A D | vdso.h | 12 #define VDSO64_SYMBOL(tsk, name) ((tsk)->mm->context.vdso_base + (vdso64_offset_##name)) 13 #define VDSO32_SYMBOL(tsk, name) ((tsk)->mm->context.vdso_base + (vdso32_offset_##name))
|
| A D | mmu.h | 7 unsigned long vdso_base; member
|
| A D | elf.h | 359 #define VDSO_CURRENT_BASE current->mm->context.vdso_base
|
| /linux/arch/parisc/kernel/ |
| A D | vdso.c | 32 current->mm->context.vdso_base = vma->vm_start; in vdso_mremap() 91 current->mm->context.vdso_base = vdso_text_start; in arch_setup_additional_pages()
|
| /linux/arch/mips/kernel/ |
| A D | vdso.c | 72 static unsigned long vdso_base(void) in vdso_base() function 131 base = get_unmapped_area(NULL, vdso_base(), size, 0, 0); in arch_setup_additional_pages()
|
| /linux/arch/loongarch/kernel/ |
| A D | vdso.c | 150 static unsigned long vdso_base(void) in vdso_base() function 179 data_addr = get_unmapped_area(NULL, vdso_base(), size, 0, 0); in arch_setup_additional_pages()
|
| /linux/arch/s390/kernel/ |
| A D | vdso.c | 111 current->mm->context.vdso_base = vma->vm_start; in vdso_mremap() 177 current->mm->context.vdso_base = vdso_text_start; in map_vdso()
|
| A D | stacktrace.c | 97 return in_range(ip, current->mm->context.vdso_base, vdso_text_size()); in ip_within_vdso()
|