| /arch/powerpc/include/asm/ |
| A D | edac.h | 21 unsigned int *virt_addr = va; in edac_atomic_scrub() local 25 for (i = 0; i < size / sizeof(*virt_addr); i++, virt_addr++) { in edac_atomic_scrub() 35 : "r"(virt_addr) in edac_atomic_scrub()
|
| /arch/arm/include/asm/ |
| A D | edac.h | 18 unsigned int *virt_addr = va; in edac_atomic_scrub() local 22 for (i = 0; i < size / sizeof(*virt_addr); i++, virt_addr++) { in edac_atomic_scrub() 32 : "r"(virt_addr) in edac_atomic_scrub()
|
| /arch/mips/include/asm/ |
| A D | edac.h | 11 unsigned long *virt_addr = va; in edac_atomic_scrub() local 31 : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*virt_addr) in edac_atomic_scrub() 32 : GCC_OFF_SMALL_ASM() (*virt_addr)); in edac_atomic_scrub() 34 virt_addr++; in edac_atomic_scrub()
|
| /arch/x86/include/asm/ |
| A D | edac.h | 9 u32 i, *virt_addr = va; in edac_atomic_scrub() local 15 for (i = 0; i < size / 4; i++, virt_addr++) in edac_atomic_scrub() 16 asm volatile("lock addl $0, %0"::"m" (*virt_addr)); in edac_atomic_scrub()
|
| A D | boot.h | 87 unsigned long decompress_kernel(unsigned char *outbuf, unsigned long virt_addr,
|
| /arch/x86/boot/compressed/ |
| A D | misc.c | 199 unsigned long virt_addr) in handle_relocations() argument 227 delta = virt_addr - LOAD_PHYSICAL_ADDR; in handle_relocations() 277 unsigned long virt_addr) in handle_relocations() argument 342 unsigned long decompress_kernel(unsigned char *outbuf, unsigned long virt_addr, in decompress_kernel() argument 357 handle_relocations(outbuf, output_len, virt_addr); in decompress_kernel() 407 unsigned long virt_addr = LOAD_PHYSICAL_ADDR; in extract_kernel() local 491 &virt_addr); in extract_kernel() 496 if (virt_addr & (MIN_KERNEL_ALIGN - 1)) in extract_kernel() 501 if (virt_addr + needed_size > KERNEL_IMAGE_SIZE) in extract_kernel() 508 if (virt_addr != LOAD_PHYSICAL_ADDR) in extract_kernel() [all …]
|
| A D | misc.h | 107 unsigned long *virt_addr); 113 unsigned long *virt_addr) in choose_random_location() argument
|
| /arch/powerpc/platforms/ps3/ |
| A D | mm.c | 780 unsigned long phys_addr = is_kernel_addr(virt_addr) ? __pa(virt_addr) in dma_sb_map_area() 781 : virt_addr; in dma_sb_map_area() 791 virt_addr); in dma_sb_map_area() 835 unsigned long phys_addr = is_kernel_addr(virt_addr) ? __pa(virt_addr) in dma_ioc0_map_area() 836 : virt_addr; in dma_ioc0_map_area() 842 virt_addr, len); in dma_ioc0_map_area() 972 unsigned long virt_addr, len; in dma_sb_region_create_linear() local 990 virt_addr = map.rm.base + r->offset; in dma_sb_region_create_linear() 1002 virt_addr = map.rm.size; in dma_sb_region_create_linear() 1076 unsigned long phys_addr = is_kernel_addr(virt_addr) ? __pa(virt_addr) in dma_sb_map_area_linear() [all …]
|
| A D | system-bus.c | 511 unsigned long virt_addr; in ps3_alloc_coherent() local 516 virt_addr = __get_free_pages(flag, get_order(size)); in ps3_alloc_coherent() 518 if (!virt_addr) { in ps3_alloc_coherent() 523 result = ps3_dma_map(dev->d_region, virt_addr, size, dma_handle, in ps3_alloc_coherent() 534 return (void*)virt_addr; in ps3_alloc_coherent() 537 free_pages(virt_addr, get_order(size)); in ps3_alloc_coherent()
|
| /arch/m68k/include/asm/ |
| A D | oplib.h | 62 extern void prom_unmapio(char *virt_addr, unsigned int num_bytes); 203 extern void prom_free(char *virt_addr, unsigned int size); 210 extern void prom_putsegment(int context, unsigned long virt_addr,
|
| /arch/riscv/include/asm/ |
| A D | page.h | 102 unsigned long virt_addr; member 125 ((x) >= kernel_map.virt_addr && (x) < (kernel_map.virt_addr + kernel_map.size)) 158 (_y < kernel_map.virt_addr + kernel_map.xiprom_sz) ? \
|
| /arch/riscv/mm/ |
| A D | init.c | 164 print_ml("kernel", (unsigned long)kernel_map.virt_addr, in print_vm_layout() 516 BUG_ON((va - kernel_map.virt_addr) >> PUD_SHIFT); in alloc_pmd_early() 581 BUG_ON((va - kernel_map.virt_addr) >> PGDIR_SHIFT); in alloc_pud_early() 619 BUG_ON((va - kernel_map.virt_addr) >> PGDIR_SHIFT); in alloc_p4d_early() 944 end_va = kernel_map.virt_addr + kernel_map.xiprom_sz; in create_kernel_page_table() 947 kernel_map.xiprom + (va - kernel_map.virt_addr), in create_kernel_page_table() 952 end_va = kernel_map.virt_addr + kernel_map.size; in create_kernel_page_table() 963 end_va = kernel_map.virt_addr + kernel_map.size; in create_kernel_page_table() 1207 create_pmd_mapping(trampoline_pmd, kernel_map.virt_addr, in setup_vm() 1210 create_pmd_mapping(trampoline_pmd, kernel_map.virt_addr, in setup_vm() [all …]
|
| A D | physaddr.c | 24 unsigned long kernel_start = kernel_map.virt_addr; in __phys_addr_symbol()
|
| /arch/arm64/kvm/hyp/nvhe/ |
| A D | mm.c | 117 unsigned long virt_addr; in pkvm_create_mappings_locked() local 125 for (virt_addr = start; virt_addr < end; virt_addr += PAGE_SIZE) { in pkvm_create_mappings_locked() 128 phys = hyp_virt_to_phys((void *)virt_addr); in pkvm_create_mappings_locked() 129 err = kvm_pgtable_hyp_map(&pkvm_pgtable, virt_addr, PAGE_SIZE, in pkvm_create_mappings_locked()
|
| /arch/x86/kernel/cpu/sgx/ |
| A D | sgx.h | 59 void *virt_addr; member 83 return section->virt_addr + index * PAGE_SIZE; in sgx_get_epc_virt_addr()
|
| A D | main.c | 630 section->virt_addr = memremap(phys_addr, size, MEMREMAP_WB); in sgx_setup_epc_section() 631 if (!section->virt_addr) in sgx_setup_epc_section() 636 memunmap(section->virt_addr); in sgx_setup_epc_section() 964 memunmap(sgx_epc_sections[i].virt_addr); in sgx_init()
|
| /arch/riscv/kernel/ |
| A D | efi.c | 54 create_pgd_mapping(mm->pgd, md->virt_addr + i * PAGE_SIZE, in efi_create_mapping() 94 return apply_to_page_range(mm, md->virt_addr, in efi_set_mapping_permissions()
|
| /arch/arm/kernel/ |
| A D | efi.c | 33 base = md->virt_addr; in efi_set_mapping_permissions() 52 .virtual = md->virt_addr, in efi_create_mapping()
|
| /arch/x86/platform/efi/ |
| A D | efi_32.c | 56 md->virt_addr = (unsigned long)va; in efi_map_region() 139 set_memory_x(md->virt_addr, md->num_pages); in efi_runtime_update_mappings()
|
| A D | efi_64.c | 324 md->virt_addr = md->phys_addr; in efi_map_region() 351 md->virt_addr = efi_va; in efi_map_region() 362 __map_region(md, md->virt_addr); in efi_map_region_fixed() 381 md->phys_addr, md->virt_addr); in efi_update_mappings() 384 err2 = kernel_map_pages_in_pgd(pgd, pfn, md->virt_addr, md->num_pages, pf); in efi_update_mappings() 387 md->phys_addr, md->virt_addr); in efi_update_mappings()
|
| A D | runtime-map.c | 47 EFI_RUNTIME_U64_ATTR_SHOW(virt_addr); 67 static struct map_attribute map_virt_addr_attr = __ATTR_RO_MODE(virt_addr, 0400);
|
| /arch/sparc/mm/ |
| A D | srmmu.c | 497 pgdp = pgd_offset_k(virt_addr); in srmmu_mapioaddr() 498 p4dp = p4d_offset(pgdp, virt_addr); in srmmu_mapioaddr() 499 pudp = pud_offset(p4dp, virt_addr); in srmmu_mapioaddr() 500 pmdp = pmd_offset(pudp, virt_addr); in srmmu_mapioaddr() 510 __flush_page_to_ram(virt_addr); in srmmu_mapioaddr() 535 pgdp = pgd_offset_k(virt_addr); in srmmu_unmapioaddr() 536 p4dp = p4d_offset(pgdp, virt_addr); in srmmu_unmapioaddr() 537 pudp = pud_offset(p4dp, virt_addr); in srmmu_unmapioaddr() 538 pmdp = pmd_offset(pudp, virt_addr); in srmmu_unmapioaddr() 549 srmmu_unmapioaddr(virt_addr); in srmmu_unmapiorange() [all …]
|
| /arch/powerpc/kexec/ |
| A D | vmcore_info.c | 23 VMCOREINFO_OFFSET(vmemmap_backing, virt_addr); in arch_crash_save_vmcoreinfo()
|
| /arch/arm64/kernel/ |
| A D | efi.c | 102 create_pgd_mapping(mm, md->phys_addr, md->virt_addr, in efi_create_mapping() 148 return apply_to_page_range(mm, md->virt_addr, in efi_set_mapping_permissions()
|
| /arch/powerpc/include/asm/nohash/64/ |
| A D | pgalloc.h | 14 unsigned long virt_addr; member
|