Searched refs:kernel_start (Results 1 – 13 of 13) sorted by relevance
| /arch/riscv/mm/ |
| A D | physaddr.c | 24 unsigned long kernel_start = kernel_map.virt_addr; in __phys_addr_symbol() local 25 unsigned long kernel_end = kernel_start + kernel_map.size; in __phys_addr_symbol() 31 VIRTUAL_BUG_ON(x < kernel_start || x > kernel_end); in __phys_addr_symbol()
|
| /arch/s390/boot/ |
| A D | startup.c | 328 unsigned long kernel_start; in setup_kernel_memory_layout() local 384 kernel_start = round_down(kernel_end - kernel_size, THREAD_SIZE); in setup_kernel_memory_layout() 387 kernel_start + kernel_size); in setup_kernel_memory_layout() 389 kernel_start = round_down(vmax - kernel_size, THREAD_SIZE); in setup_kernel_memory_layout() 391 kernel_start + kernel_size); in setup_kernel_memory_layout() 393 kernel_start = __NO_KASLR_START_KERNEL; in setup_kernel_memory_layout() 395 kernel_start + kernel_size); in setup_kernel_memory_layout() 397 __kaslr_offset = kernel_start; in setup_kernel_memory_layout() 400 MODULES_END = round_down(kernel_start, _SEGMENT_SIZE); in setup_kernel_memory_layout() 457 static void clear_bss_section(unsigned long kernel_start) in clear_bss_section() argument [all …]
|
| A D | vmem.c | 97 static void kasan_populate_shadow(unsigned long kernel_start, unsigned long kernel_end) in kasan_populate_shadow() argument 127 kasan_populate(kernel_start + TEXT_OFFSET, kernel_end, POPULATE_KASAN_MAP_SHADOW); in kasan_populate_shadow() 192 static inline void kasan_populate_shadow(unsigned long kernel_start, unsigned long kernel_end) in kasan_populate_shadow() argument 481 void setup_vmem(unsigned long kernel_start, unsigned long kernel_end, unsigned long asce_limit) in setup_vmem() argument 547 pgtable_populate(kernel_start + TEXT_OFFSET, kernel_end, POPULATE_KERNEL); in setup_vmem() 555 kasan_populate_shadow(kernel_start, kernel_end); in setup_vmem()
|
| A D | boot.h | 71 void setup_vmem(unsigned long kernel_start, unsigned long kernel_end, unsigned long asce_limit);
|
| /arch/arm/boot/bootp/ |
| A D | kernel.S | 2 .globl kernel_start 3 kernel_start: label
|
| A D | init.S | 58 b kernel_start @ call kernel
|
| /arch/x86/boot/startup/ |
| A D | sme.c | 288 unsigned long kernel_start, kernel_end, kernel_len; in sme_encrypt_kernel() local 318 kernel_start = (unsigned long)rip_rel_ptr(_text); in sme_encrypt_kernel() 320 kernel_len = kernel_end - kernel_start; in sme_encrypt_kernel() 354 pgtable_area_len += sme_pgtable_calc(execute_end - kernel_start) * 2; in sme_encrypt_kernel() 419 ppd.paddr = kernel_start; in sme_encrypt_kernel() 420 ppd.vaddr = kernel_start; in sme_encrypt_kernel() 425 ppd.paddr = kernel_start; in sme_encrypt_kernel() 426 ppd.vaddr = kernel_start + decrypted_base; in sme_encrypt_kernel() 457 sme_encrypt_execute(kernel_start, kernel_start + decrypted_base, in sme_encrypt_kernel() 470 ppd.vaddr = kernel_start + decrypted_base; in sme_encrypt_kernel()
|
| /arch/powerpc/kexec/ |
| A D | core.c | 105 unsigned long long kernel_start, kernel_size; in arch_reserve_crashkernel() local 121 kernel_start = __pa(_stext); in arch_reserve_crashkernel() 125 if ((kernel_start + kernel_size > crash_base) && (kernel_start <= crash_end)) { in arch_reserve_crashkernel()
|
| /arch/riscv/kernel/ |
| A D | machine_kexec_file.c | 256 int load_extra_segments(struct kimage *image, unsigned long kernel_start, in load_extra_segments() argument 268 kbuf.buf_min = kernel_start + kernel_len; in load_extra_segments() 325 &kernel_start, in load_extra_segments() 326 sizeof(kernel_start), 0); in load_extra_segments()
|
| /arch/riscv/include/asm/ |
| A D | kexec.h | 72 int load_extra_segments(struct kimage *image, unsigned long kernel_start,
|
| /arch/arm64/mm/ |
| A D | init.c | 461 u64 kernel_start = (u64)_text; in module_init_limits() local 462 u64 kernel_size = kernel_end - kernel_start; in module_init_limits() 477 u64 min = kernel_start; in module_init_limits()
|
| A D | mmu.c | 646 phys_addr_t kernel_start = __pa_symbol(_stext); in map_mem() local 677 memblock_mark_nomap(kernel_start, kernel_end - kernel_start); in map_mem() 702 __map_memblock(pgdp, kernel_start, kernel_end, in map_mem() 704 memblock_clear_nomap(kernel_start, kernel_end - kernel_start); in map_mem()
|
| /arch/parisc/mm/ |
| A D | init.c | 358 unsigned long kernel_start, kernel_end; in map_pages() local 362 kernel_start = __pa((unsigned long)&__init_begin); in map_pages() 401 } else if (address < kernel_start || address >= kernel_end) { in map_pages()
|
Completed in 26 milliseconds