Home
last modified time | relevance | path

Searched refs:kernel_map (Results 1 – 11 of 11) sorted by relevance

/linux/arch/riscv/include/asm/
A Dpage.h31 #define PAGE_OFFSET kernel_map.page_offset
125 extern struct kernel_mapping kernel_map;
129 ((x) >= kernel_map.virt_addr && (x) < (kernel_map.virt_addr + kernel_map.size))
135 #define linear_mapping_pa_to_va(x) ((void *)((unsigned long)(x) + kernel_map.va_pa_offset))
144 (void *)(_y + kernel_map.va_kernel_xip_text_pa_offset) : \
145 (void *)(_y + kernel_map.va_kernel_xip_data_pa_offset); \
154 #define linear_mapping_va_to_pa(x) ((unsigned long)(x) - kernel_map.va_pa_offset)
162 (_y < kernel_map.virt_addr + kernel_map.xiprom_sz) ? \
163 (_y - kernel_map.va_kernel_xip_text_pa_offset) : \
164 (_y - kernel_map.va_kernel_xip_data_pa_offset); \
[all …]
/linux/arch/riscv/mm/
A Dinit.c45 #define kernel_map (*(struct kernel_mapping *)XIP_FIXUP(&kernel_map)) macro
925 end_va = kernel_map.virt_addr + kernel_map.xiprom_sz; in create_kernel_page_table()
928 kernel_map.xiprom + (va - kernel_map.virt_addr), in create_kernel_page_table()
933 end_va = kernel_map.virt_addr + kernel_map.size; in create_kernel_page_table()
944 end_va = kernel_map.virt_addr + kernel_map.size; in create_kernel_page_table()
947 kernel_map.phys_addr + (va - kernel_map.virt_addr), in create_kernel_page_table()
1092 kernel_map.virt_addr = KERNEL_LINK_ADDR + kernel_map.virt_offset; in setup_vm()
1107 kernel_map.va_kernel_xip_text_pa_offset = kernel_map.virt_addr - kernel_map.xiprom; in setup_vm()
1108 kernel_map.va_kernel_xip_data_pa_offset = kernel_map.virt_addr - kernel_map.phys_addr in setup_vm()
1113 kernel_map.size = (uintptr_t)(&_end) - kernel_map.phys_addr; in setup_vm()
[all …]
A Dphysaddr.c24 unsigned long kernel_start = kernel_map.virt_addr; in __phys_addr_symbol()
25 unsigned long kernel_end = kernel_start + kernel_map.size; in __phys_addr_symbol()
39 BUG_ON(!kernel_map.va_pa_offset); in linear_mapping_va_to_pa()
41 return ((unsigned long)(x) - kernel_map.va_pa_offset); in linear_mapping_va_to_pa()
47 BUG_ON(!kernel_map.va_pa_offset); in linear_mapping_pa_to_va()
49 return ((void *)((unsigned long)(x) + kernel_map.va_pa_offset)); in linear_mapping_pa_to_va()
A Dptdump.c404 address_markers[KERNEL_MAPPING_NR].start_address = kernel_map.virt_addr; in ptdump_init()
/linux/arch/riscv/kernel/
A Dvmcore_info.c29 kernel_map.va_kernel_pa_offset); in arch_crash_save_vmcoreinfo()
A Dmachine_kexec.c203 this_hart_id, kernel_map.va_pa_offset); in machine_kexec()
A Dsetup.c322 kernel_map.virt_offset, in dump_kernel_offset()
A Dhead.S76 la a1, kernel_map
/linux/tools/perf/
A Dbuiltin-report.c614 struct map *kernel_map = machine__kernel_map(&rep->session->machines.host); in report__warn_kptr_restrict() local
615 struct kmap *kernel_kmap = kernel_map ? map__kmap(kernel_map) : NULL; in report__warn_kptr_restrict()
620 if (kernel_map == NULL || in report__warn_kptr_restrict()
621 (dso__hit(map__dso(kernel_map)) && in report__warn_kptr_restrict()
628 if (kernel_map && map__has_symbols(kernel_map)) { in report__warn_kptr_restrict()
A Dbuiltin-kmem.c346 struct map *kernel_map; in build_alloc_func_list() local
363 kernel_map = machine__kernel_map(machine); in build_alloc_func_list()
364 if (map__load(kernel_map) < 0) { in build_alloc_func_list()
369 map__for_each_symbol(kernel_map, sym, node) { in build_alloc_func_list()
/linux/tools/perf/util/
A Dmap.c542 struct map *kernel_map = machine__kernel_map(machine); in map__rip_2objdump() local
544 if (kernel_map) in map__rip_2objdump()
545 map = kernel_map; in map__rip_2objdump()

Completed in 25 milliseconds