| /arch/x86/platform/intel-quark/ |
| A D | imr_selftest.c | 72 imr_self_test_result(ret < 0, fmt_over, __va(base), __va(base + size)); in imr_self_test() 77 imr_self_test_result(ret < 0, fmt_over, __va(base), __va(base + size)); in imr_self_test() 82 imr_self_test_result(ret < 0, fmt_over, __va(base), __va(base + size)); in imr_self_test()
|
| /arch/x86/include/asm/ |
| A D | page.h | 57 #ifndef __va 58 #define __va(x) ((void *)((unsigned long)(x)+PAGE_OFFSET)) macro 61 #define __boot_va(x) __va(x) 74 return __va(pfn << PAGE_SHIFT); in pfn_to_kaddr()
|
| /arch/m68k/include/asm/ |
| A D | page_no.h | 20 #define __va(paddr) ((void *)((unsigned long)(paddr))) macro 29 return __va(pfn << PAGE_SHIFT); in pfn_to_virt() 33 #define page_to_virt(page) __va(((((page) - mem_map) << PAGE_SHIFT) + PAGE_OFFSET))
|
| A D | page_mm.h | 81 static inline void *__va(unsigned long paddr) in __va() function 105 static inline void *__va(unsigned long x) in __va() function 130 return __va(pfn << PAGE_SHIFT); in pfn_to_virt()
|
| A D | motorola_pgtable.h | 100 #define __pte_page(pte) ((unsigned long)__va(pte_val(pte) & PAGE_MASK)) 101 #define pmd_page_vaddr(pmd) ((unsigned long)__va(pmd_val(pmd) & _TABLE_MASK)) 102 #define pud_pgtable(pud) ((pmd_t *)__va(pud_val(pud) & _TABLE_MASK)) 110 #define pte_page(pte) virt_to_page(__va(pte_val(pte))) 132 #define pud_page(pud) (mem_map + ((unsigned long)(__va(pud_val(pud)) - PAGE_OFFSET) >> PAGE_SHIFT))
|
| /arch/loongarch/include/asm/ |
| A D | page.h | 68 #define __va(x) ((void *)((unsigned long)(x) + PAGE_OFFSET - PHYS_OFFSET)) macro 70 #define pfn_to_kaddr(pfn) __va((pfn) << PAGE_SHIFT) 81 #define page_to_virt(page) __va(page_to_phys(page)) 91 (__kfence_pool == NULL) ? __va(page_to_phys(page)) : page_address(page); \
|
| /arch/powerpc/include/asm/ |
| A D | page.h | 189 #define __va(x) ((void *)(unsigned long)((phys_addr_t)(x) + VIRT_PHYS_OFFSET)) macro 201 #define __va(x) \ macro 214 #define __va(x) ((void *)(unsigned long)((phys_addr_t)(x) + PAGE_OFFSET - MEMORY_START)) macro 227 return __va(pfn << PAGE_SHIFT); in pfn_to_kaddr()
|
| A D | sections.h | 68 return start < (unsigned long)__va(real_end) && in overlaps_interrupt_vector_text() 69 (unsigned long)__va(real_start) < end; in overlaps_interrupt_vector_text()
|
| /arch/loongarch/mm/ |
| A D | kasan_init.c | 124 memcpy(__va(pte_phys), kasan_early_shadow_pte, sizeof(kasan_early_shadow_pte)); in kasan_pte_offset() 125 pmd_populate_kernel(NULL, pmdp, (pte_t *)__va(pte_phys)); in kasan_pte_offset() 137 memcpy(__va(pmd_phys), kasan_early_shadow_pmd, sizeof(kasan_early_shadow_pmd)); in kasan_pmd_offset() 138 pud_populate(&init_mm, pudp, (pmd_t *)__va(pmd_phys)); in kasan_pmd_offset() 150 memcpy(__va(pud_phys), kasan_early_shadow_pud, sizeof(kasan_early_shadow_pud)); in kasan_pud_offset() 151 p4d_populate(&init_mm, p4dp, (pud_t *)__va(pud_phys)); in kasan_pud_offset() 163 memcpy(__va(p4d_phys), kasan_early_shadow_p4d, sizeof(kasan_early_shadow_p4d)); in kasan_p4d_offset() 164 pgd_populate(&init_mm, pgdp, (p4d_t *)__va(p4d_phys)); in kasan_p4d_offset()
|
| /arch/microblaze/include/asm/ |
| A D | page.h | 100 # define page_to_virt(page) __va(page_to_pfn(page) << PAGE_SHIFT) 119 # define __va(x) ((void *)__phys_to_virt((unsigned long)(x))) macro 128 return __va(pfn_to_phys((pfn))); in pfn_to_virt()
|
| /arch/arm/mm/ |
| A D | dma-mapping-nommu.c | 19 dmac_map_area(__va(paddr), size, dir); in arch_sync_dma_for_device() 32 dmac_unmap_area(__va(paddr), size, dir); in arch_sync_dma_for_cpu()
|
| /arch/x86/mm/ |
| A D | mem_encrypt_amd.c | 67 early_snp_set_memory_shared((unsigned long)__va(paddr), paddr, npages); in snp_memcpy() 72 early_snp_set_memory_private((unsigned long)__va(paddr), paddr, npages); in snp_memcpy() 194 __sme_early_map_unmap_mem(__va(cmdline_paddr), COMMAND_LINE_SIZE, false); in sme_unmap_bootdata() 214 __sme_early_map_unmap_mem(__va(cmdline_paddr), COMMAND_LINE_SIZE, true); in sme_map_bootdata() 344 clflush_cache_range(__va(d->pa), d->size); in prepare_pte_enc() 379 early_snp_set_memory_shared((unsigned long)__va(d.pa), d.pa, 1); in __set_clr_pte_enc() 389 early_snp_set_memory_private((unsigned long)__va(d.pa), d.pa, 1); in __set_clr_pte_enc()
|
| /arch/x86/include/asm/numachip/ |
| A D | numachip_csr.h | 41 return __va(NUMACHIP_LCSR_BASE | (1UL << 15) | in lcsr_address() 69 return (void __iomem *)__va(NUMACHIP2_LCSR_BASE | in numachip2_lcsr_address()
|
| /arch/nios2/include/asm/ |
| A D | page.h | 76 # define __va(x) \ macro 82 # define pfn_to_kaddr(pfn) __va((pfn) << PAGE_SHIFT)
|
| /arch/s390/include/asm/ |
| A D | dma-types.h | 45 return __va((__force unsigned long)addr); in dma32_to_virt() 80 return __va((__force unsigned long)addr); in dma64_to_virt()
|
| /arch/x86/realmode/ |
| A D | init.c | 140 __va(real_mode_header->trampoline_header); in setup_real_mode() 163 trampoline_pgd = (u64 *) __va(real_mode_header->trampoline_pgd); in setup_real_mode() 202 (unsigned long) __va(real_mode_header->text_start); in set_real_mode_permissions()
|
| /arch/sparc/include/asm/ |
| A D | page_64.h | 145 #define __va(x) ((void *)((unsigned long) (x) + PAGE_OFFSET)) macro 147 #define pfn_to_kaddr(pfn) __va((pfn) << PAGE_SHIFT) 154 #define phys_to_virt __va
|
| A D | pgtsrmmu.h | 107 #define __nocache_va(PADDR) (__va((unsigned long)PADDR) - (unsigned long)srmmu_nocache_pool + SRMMU… 108 #define __nocache_fix(VADDR) ((__typeof__(VADDR))__va(__nocache_pa(VADDR)))
|
| /arch/riscv/mm/ |
| A D | init.c | 424 return (pte_t *) __va(pa); in get_pte_virt_late() 511 return (pmd_t *) __va(pa); in get_pmd_virt_late() 575 return (pud_t *)__va(pa); in get_pud_virt_late() 613 return (p4d_t *)__va(pa); in get_p4d_virt_late() 691 #define alloc_pgd_next(__va) (pgtable_l5_enabled ? \ argument 692 pt_ops.alloc_p4d(__va) : (pgtable_l4_enabled ? \ 693 pt_ops.alloc_pud(__va) : pt_ops.alloc_pmd(__va))) 711 #define alloc_pgd_next(__va) pt_ops.alloc_pte(__va) argument 1269 va = (uintptr_t)__va(pa); in create_linear_mapping_range() 1305 __kfence_pool = __va(kfence_pool); in create_linear_mapping_page_table() [all …]
|
| /arch/hexagon/include/asm/ |
| A D | page.h | 84 #define __va(x) ((void *)((unsigned long)(x) - PHYS_OFFSET + PAGE_OFFSET)) macro 124 #define page_to_virt(page) __va(page_to_phys(page))
|
| /arch/csky/include/asm/ |
| A D | page.h | 69 #define __va(x) ((void *)((unsigned long)(x) + PAGE_OFFSET - va_pa_offset)) macro 82 #define pfn_to_kaddr(x) __va(PFN_PHYS(x))
|
| /arch/x86/include/asm/uv/ |
| A D | uv_hub.h | 538 return __va(((unsigned long)pnode << m_val) | offset); in uv_pnode_offset_to_vaddr() 544 return __va((unsigned long)offset); in uv_pnode_offset_to_vaddr() 547 return __va(base << UV_GAM_RANGE_SHFT | offset); in uv_pnode_offset_to_vaddr() 565 return __va(UV_GLOBAL_MMR32_BASE | in uv_global_mmr32_address() 585 return __va(UV_GLOBAL_MMR64_BASE | in uv_global_mmr64_address() 615 return __va(UV_LOCAL_MMR_BASE | offset); in uv_local_mmr_address()
|
| /arch/parisc/kernel/ |
| A D | setup.c | 54 strscpy(boot_command_line, (char *)__va(boot_args[1]), in setup_cmdline() 74 initrd_start = (unsigned long)__va(boot_args[2]); in setup_cmdline() 75 initrd_end = (unsigned long)__va(boot_args[3]); in setup_cmdline()
|
| /arch/openrisc/mm/ |
| A D | init.c | 83 v = (u32) __va(p); in map_ram() 156 unsigned long *dtlb_vector = __va(0x900); in paging_init() 157 unsigned long *itlb_vector = __va(0xa00); in paging_init()
|
| /arch/riscv/include/asm/ |
| A D | page.h | 182 #define __va(x) ((void *)__pa_to_va_nodebug((phys_addr_t)(x))) macro 188 #define pfn_to_virt(pfn) (__va(pfn_to_phys(pfn))) 199 return __va(pfn << PAGE_SHIFT); in pfn_to_kaddr()
|