| /arch/riscv/include/asm/ |
| A D | sections.h | 18 static inline bool is_va_kernel_text(uintptr_t va) in is_va_kernel_text() argument 23 return va >= start && va < end; in is_va_kernel_text() 26 static inline bool is_va_kernel_lm_alias_text(uintptr_t va) in is_va_kernel_lm_alias_text() argument 31 return va >= start && va < end; in is_va_kernel_lm_alias_text()
|
| /arch/x86/kernel/ |
| A D | irq_64.c | 40 void *va; in map_irq_stack() local 49 va = vmap(pages, IRQ_STACK_SIZE / PAGE_SIZE, VM_MAP, PAGE_KERNEL); in map_irq_stack() 50 if (!va) in map_irq_stack() 54 per_cpu(hardirq_stack_ptr, cpu) = va + IRQ_STACK_SIZE - 8; in map_irq_stack() 64 void *va = per_cpu_ptr(&irq_stack_backing_store, cpu); in map_irq_stack() local 67 per_cpu(hardirq_stack_ptr, cpu) = va + IRQ_STACK_SIZE - 8; in map_irq_stack()
|
| A D | ldt.c | 223 p4d = p4d_offset(pgd, va); in pgd_to_pmd_walk() 227 pud = pud_offset(p4d, va); in pgd_to_pmd_walk() 231 return pmd_offset(pud, va); in pgd_to_pmd_walk() 290 unsigned long va; in map_ldt_struct() local 326 ptep = get_locked_pte(mm, va, &ptl); in map_ldt_struct() 338 set_pte_at(mm, va, ptep, pte); in map_ldt_struct() 351 unsigned long va; in unmap_ldt_struct() local 369 ptep = get_locked_pte(mm, va, &ptl); in unmap_ldt_struct() 371 pte_clear(mm, va, ptep); in unmap_ldt_struct() 376 va = (unsigned long)ldt_slot_va(ldt->slot); in unmap_ldt_struct() [all …]
|
| /arch/xtensa/mm/ |
| A D | ioremap.c | 25 unsigned long va = (unsigned long) addr; in iounmap() local 27 if ((va >= XCHAL_KIO_CACHED_VADDR && in iounmap() 28 va - XCHAL_KIO_CACHED_VADDR < XCHAL_KIO_SIZE) || in iounmap() 29 (va >= XCHAL_KIO_BYPASS_VADDR && in iounmap() 30 va - XCHAL_KIO_BYPASS_VADDR < XCHAL_KIO_SIZE)) in iounmap()
|
| /arch/parisc/include/asm/ |
| A D | special_insns.h | 5 #define lpa(va) ({ \ argument 14 : "r" (va) \ 20 #define lpa_user(va) ({ \ argument 29 : "r" (va) \ 47 #define prober_user(sr, va) ({ \ argument 57 : "i" (sr), "r" (va), "i" (PRIV_USER) \
|
| /arch/openrisc/kernel/ |
| A D | dma.c | 67 unsigned long va = (unsigned long)cpu_addr; in arch_dma_set_uncached() local 75 error = walk_kernel_page_table_range(va, va + size, in arch_dma_set_uncached() 86 unsigned long va = (unsigned long)cpu_addr; in arch_dma_clear_uncached() local 90 WARN_ON(walk_kernel_page_table_range(va, va + size, in arch_dma_clear_uncached()
|
| /arch/alpha/kernel/ |
| A D | traps.c | 386 unaligned[0].va = (unsigned long) va; in do_entUna() 404 : "r"(va), "0"(0)); in do_entUna() 420 : "r"(va), "0"(0)); in do_entUna() 436 : "r"(va), "0"(0)); in do_entUna() 519 pc, va, opcode, reg); in do_entUna() 695 unaligned[1].va = (unsigned long)va; in do_entUnaUser() 727 : "r"(va), "0"(0)); in do_entUnaUser() 743 : "r"(va), "0"(0)); in do_entUnaUser() 759 : "r"(va), "0"(0)); in do_entUnaUser() 775 : "r"(va), "0"(0)); in do_entUnaUser() [all …]
|
| /arch/powerpc/mm/nohash/ |
| A D | 8xx.c | 26 phys_addr_t v_block_mapped(unsigned long va) in v_block_mapped() argument 30 if (va >= VIRT_IMMR_BASE && va < VIRT_IMMR_BASE + IMMR_SIZE) in v_block_mapped() 31 return p + va - VIRT_IMMR_BASE; in v_block_mapped() 32 if (va >= PAGE_OFFSET && va < PAGE_OFFSET + block_mapped_ram) in v_block_mapped() 33 return __pa(va); in v_block_mapped() 52 static int __ref __early_map_kernel_hugepage(unsigned long va, phys_addr_t pa, in __early_map_kernel_hugepage() argument 55 pmd_t *pmdp = pmd_off_k(va); in __early_map_kernel_hugepage() 75 ptep = early_pte_alloc_kernel(pmdp, va); in __early_map_kernel_hugepage() 84 ptep = pte_offset_kernel(pmdp, va); in __early_map_kernel_hugepage() 90 set_huge_pte_at(&init_mm, va, ptep, in __early_map_kernel_hugepage()
|
| /arch/powerpc/mm/book3s64/ |
| A D | hash_native.c | 70 unsigned long va; in ___tlbie() local 94 va |= ssize << 8; in ___tlbie() 96 va |= sllp << 5; in ___tlbie() 105 va |= penc << 12; in ___tlbie() 106 va |= ssize << 8; in ___tlbie() 115 va |= 1; /* L */ in ___tlbie() 121 return va; in ___tlbie() 166 unsigned long va; in __tlbiel() local 184 va |= ssize << 8; in __tlbiel() 186 va |= sllp << 5; in __tlbiel() [all …]
|
| A D | radix_tlb.c | 163 rb = va & ~(PPC_BITMASK(52, 63)); in __tlbiel_va() 179 rb = va & ~(PPC_BITMASK(52, 63)); in __tlbie_va() 195 rb = va & ~(PPC_BITMASK(52, 63)); in __tlbie_lpid_va() 448 __tlbiel_va(va, pid, ap, ric); in _tlbiel_va() 482 __tlbie_va(va, pid, ap, ric); in _tlbie_va() 483 fixup_tlbie_va(va, pid, ap); in _tlbie_va() 489 unsigned long va; member 511 struct tlbiel_va t = { .va = va, .pid = pid, .psize = psize, .ric = ric }; in _tlbiel_va_multicast() 540 __tlbie_lpid_va(va, lpid, ap, ric); in _tlbie_lpid_va() 541 fixup_tlbie_lpid_va(va, lpid, ap); in _tlbie_lpid_va() [all …]
|
| /arch/riscv/mm/ |
| A D | init.c | 785 if (is_va_kernel_text(va)) in pgprot_from_va() 941 uintptr_t va, start_va, end_va; in create_kernel_page_table() local 945 for (va = kernel_map.virt_addr; va < end_va; va += PMD_SIZE) in create_kernel_page_table() 946 create_pgd_mapping(pgdir, va, in create_kernel_page_table() 953 for (va = start_va; va < end_va; va += PMD_SIZE) in create_kernel_page_table() 954 create_pgd_mapping(pgdir, va, in create_kernel_page_table() 961 uintptr_t va, end_va; in create_kernel_page_table() local 964 for (va = kernel_map.virt_addr; va < end_va; va += PMD_SIZE) in create_kernel_page_table() 965 create_pgd_mapping(pgdir, va, in create_kernel_page_table() 1266 uintptr_t va, map_size; in create_linear_mapping_range() local [all …]
|
| /arch/arm/mach-omap1/ |
| A D | irq.c | 59 void __iomem *va; member 71 return readl_relaxed(irq_banks[bank].va + offset); in irq_bank_readl() 75 writel_relaxed(value, irq_banks[bank].va + offset); in irq_bank_writel() 81 writel_relaxed(0x1, irq_banks[1].va + IRQ_CONTROL_REG_OFFSET); in omap_ack_irq() 83 writel_relaxed(0x1, irq_banks[0].va + IRQ_CONTROL_REG_OFFSET); in omap_ack_irq() 137 void __iomem *l1 = irq_banks[0].va; in omap1_handle_irq() 138 void __iomem *l2 = irq_banks[1].va; in omap1_handle_irq() 208 irq_banks[i].va = ioremap(irq_banks[i].base_reg, 0xff); in omap1_init_irq() 209 if (WARN_ON(!irq_banks[i].va)) in omap1_init_irq() 247 omap_alloc_gc(irq_banks[i].va, irq_base + i * 32, 32); in omap1_init_irq()
|
| /arch/arm/mm/ |
| A D | mm.h | 25 static inline void set_top_pte(unsigned long va, pte_t pte) in set_top_pte() argument 27 pte_t *ptep = pte_offset_kernel(top_pmd, va); in set_top_pte() 29 local_flush_tlb_kernel_page(va); in set_top_pte() 32 static inline pte_t get_top_pte(unsigned long va) in get_top_pte() argument 34 pte_t *ptep = pte_offset_kernel(top_pmd, va); in get_top_pte()
|
| A D | cache-xsc3l2.c | 58 static inline void l2_unmap_va(unsigned long va) in l2_unmap_va() argument 61 if (va != -1) in l2_unmap_va() 62 kunmap_atomic((void *)va); in l2_unmap_va() 69 unsigned long va = prev_va & PAGE_MASK; in l2_map_va() local 78 va = (unsigned long)kmap_atomic_pfn(pa >> PAGE_SHIFT); in l2_map_va() 80 return va + (pa_offset >> (32 - PAGE_SHIFT)); in l2_map_va()
|
| /arch/openrisc/mm/ |
| A D | init.c | 205 static int __init map_page(unsigned long va, phys_addr_t pa, pgprot_t prot) in map_page() argument 212 p4d = p4d_offset(pgd_offset_k(va), va); in map_page() 213 pud = pud_offset(p4d, va); in map_page() 214 pmd = pmd_offset(pud, va); in map_page() 215 pte = pte_alloc_kernel(pmd, va); in map_page() 221 set_pte_at(&init_mm, va, pte, pfn_pte(pa >> PAGE_SHIFT, prot)); in map_page() 223 pte_clear(&init_mm, va, pte); in map_page() 225 local_flush_tlb_page(NULL, va); in map_page()
|
| /arch/arm64/crypto/ |
| A D | aes-ce-ccm-core.S | 26 .macro dround, va, vb, vk 27 aese \va\().16b, \vk\().16b 28 aesmc \va\().16b, \va\().16b 33 .macro aes_encrypt, va, vb, nr 35 dround \va, \vb, v10 36 dround \va, \vb, v11 38 dround \va, \vb, v12 39 dround \va, \vb, v13 41 dround \va, \vb, \v 43 aese \va\().16b, v4.16b
|
| /arch/arm64/kvm/hyp/vhe/ |
| A D | tlb.c | 232 int __kvm_tlbi_s1e2(struct kvm_s2_mmu *mmu, u64 va, u64 sys_encoding) in __kvm_tlbi_s1e2() argument 274 __tlbi(vae1is, va); in __kvm_tlbi_s1e2() 288 __tlbi(vale1is, va); in __kvm_tlbi_s1e2() 296 __tlbi(aside1is, va); in __kvm_tlbi_s1e2() 304 __tlbi(vaae1is, va); in __kvm_tlbi_s1e2() 312 __tlbi(vaale1is, va); in __kvm_tlbi_s1e2() 326 __tlbi(rvae1is, va); in __kvm_tlbi_s1e2() 340 __tlbi(rvale1is, va); in __kvm_tlbi_s1e2() 348 __tlbi(rvaae1is, va); in __kvm_tlbi_s1e2() 356 __tlbi(rvaale1is, va); in __kvm_tlbi_s1e2()
|
| /arch/x86/platform/efi/ |
| A D | efi_32.c | 40 void *va; in efi_map_region() local 48 va = __va(md->phys_addr); in efi_map_region() 51 set_memory_uc((unsigned long)va, md->num_pages); in efi_map_region() 53 va = ioremap_cache(md->phys_addr, size); in efi_map_region() 56 md->virt_addr = (unsigned long)va; in efi_map_region() 57 if (!va) in efi_map_region()
|
| /arch/powerpc/mm/ |
| A D | pgtable_32.c | 57 pte_t __init *early_pte_alloc_kernel(pmd_t *pmdp, unsigned long va) in early_pte_alloc_kernel() argument 64 return pte_offset_kernel(pmdp, va); in early_pte_alloc_kernel() 68 int __ref map_kernel_page(unsigned long va, phys_addr_t pa, pgprot_t prot) in map_kernel_page() argument 75 pd = pmd_off_k(va); in map_kernel_page() 78 pg = pte_alloc_kernel(pd, va); in map_kernel_page() 80 pg = early_pte_alloc_kernel(pd, va); in map_kernel_page() 87 set_pte_at(&init_mm, va, pg, pfn_pte(pa >> PAGE_SHIFT, prot)); in map_kernel_page()
|
| /arch/powerpc/math-emu/ |
| A D | math_efp.c | 183 union dw_union vc, va, vb; in do_spe_mathemu() local 203 va.wp[1] = regs->gpr[fa]; in do_spe_mathemu() 211 pr_debug("va: %08x %08x\n", va.wp[0], va.wp[1]); in do_spe_mathemu() 221 FP_UNPACK_SP(SA, va.wp + 1); in do_spe_mathemu() 227 FP_UNPACK_SP(SA, va.wp + 1); in do_spe_mathemu() 350 FP_UNPACK_DP(DA, va.dp); in do_spe_mathemu() 356 FP_UNPACK_DP(DA, va.dp); in do_spe_mathemu() 496 FP_UNPACK_SP(SA0, va.wp); in do_spe_mathemu() 497 FP_UNPACK_SP(SA1, va.wp + 1); in do_spe_mathemu() 504 FP_UNPACK_SP(SA0, va.wp); in do_spe_mathemu() [all …]
|
| /arch/microblaze/mm/ |
| A D | pgtable.c | 137 int map_page(unsigned long va, phys_addr_t pa, int flags) in map_page() argument 146 p4d = p4d_offset(pgd_offset_k(va), va); in map_page() 147 pud = pud_offset(p4d, va); in map_page() 148 pd = pmd_offset(pud, va); in map_page() 150 pg = pte_alloc_kernel(pd, va); /* from powerpc - pgtable.c */ in map_page() 155 set_pte_at(&init_mm, va, pg, pfn_pte(pa >> PAGE_SHIFT, in map_page() 158 _tlbie(va); in map_page()
|
| /arch/parisc/kernel/ |
| A D | entry.S | 189 mfctl %pcoq, va 206 mfctl %pcoq, va 220 mfctl %ior,va 238 mfctl %ior,va 252 mfctl %ior, va 270 mfctl %ior, va 282 mfctl %ior,va 298 mfctl %ior,va 312 mfctl %ior,va 330 mfctl %ior,va [all …]
|
| /arch/powerpc/mm/kasan/ |
| A D | init_book3s_64.c | 27 void *va; in kasan_init_phys_region() local 35 va = memblock_alloc_or_panic(k_end - k_start, PAGE_SIZE); in kasan_init_phys_region() 36 for (k_cur = k_start; k_cur < k_end; k_cur += PAGE_SIZE, va += PAGE_SIZE) in kasan_init_phys_region() 37 map_kernel_page(k_cur, __pa(va), PAGE_KERNEL); in kasan_init_phys_region()
|
| /arch/arm64/kvm/hyp/nvhe/ |
| A D | pkvm.c | 552 if (!PAGE_ALIGNED(va)) in map_donated_memory_noclear() 559 return va; in map_donated_memory_noclear() 566 if (va) in map_donated_memory() 567 memset(va, 0, size); in map_donated_memory() 569 return va; in map_donated_memory() 574 kvm_flush_dcache_to_poc(va, size); in __unmap_donated_memory() 581 if (!va) in unmap_donated_memory() 584 memset(va, 0, size); in unmap_donated_memory() 585 __unmap_donated_memory(va, size); in unmap_donated_memory() 590 if (!va) in unmap_donated_memory_noclear() [all …]
|
| /arch/powerpc/kernel/ |
| A D | vecemu.c | 266 unsigned int va, vb, vc, vd; in emulate_altivec() local 276 va = (word >> 16) & 0x1f; in emulate_altivec() 285 vaddfp(&vrs[vd], &vrs[va], &vrs[vb]); in emulate_altivec() 288 vsubfp(&vrs[vd], &vrs[va], &vrs[vb]); in emulate_altivec() 328 vrs[vd].u[i] = ctuxs(vrs[vb].u[i], va, in emulate_altivec() 333 vrs[vd].u[i] = ctsxs(vrs[vb].u[i], va, in emulate_altivec() 341 vmaddfp(&vrs[vd], &vrs[va], &vrs[vb], &vrs[vc]); in emulate_altivec() 344 vnmsubfp(&vrs[vd], &vrs[va], &vrs[vb], &vrs[vc]); in emulate_altivec()
|