Home
last modified time | relevance | path

Searched refs:paddr (Results 1 – 25 of 151) sorted by relevance

1234567

/arch/m68k/mm/
A Dmemory.c31 : : "a" (paddr)); in clear040()
42 : : "a" (paddr)); in cleari040()
54 : : "a" (paddr)); in push040()
64 push040(paddr); in pushcl040()
66 clear040(paddr); in pushcl040()
113 paddr += tmp; in cache_clear()
116 paddr &= PAGE_MASK; in cache_clear()
119 paddr += tmp; in cache_clear()
164 paddr &= PAGE_MASK; in cache_push()
167 push040(paddr); in cache_push()
[all …]
/arch/mips/cavium-octeon/
A Ddma-octeon.c40 return paddr; in octeon_hole_phys_to_dma()
53 if (paddr >= 0x410000000ull && paddr < 0x420000000ull) in octeon_gen1_phys_to_dma()
54 paddr -= 0x400000000ull; in octeon_gen1_phys_to_dma()
90 if (paddr >= 0x410000000ull && paddr < 0x420000000ull) in octeon_big_phys_to_dma()
95 paddr = OCTEON_BAR2_PCI_ADDRESS + paddr; in octeon_big_phys_to_dma()
97 return paddr; in octeon_big_phys_to_dma()
118 if (paddr >= 0x410000000ull && paddr < 0x420000000ull) in octeon_small_phys_to_dma()
123 paddr = paddr - octeon_bar1_pci_phys; in octeon_small_phys_to_dma()
125 paddr = OCTEON_BAR2_PCI_ADDRESS + paddr; in octeon_small_phys_to_dma()
127 return paddr; in octeon_small_phys_to_dma()
[all …]
/arch/riscv/mm/
A Ddma-noncoherent.c20 void *vaddr = phys_to_virt(paddr); in arch_dma_cache_wback()
24 noncoherent_cache_ops.wback(paddr, size); in arch_dma_cache_wback()
33 void *vaddr = phys_to_virt(paddr); in arch_dma_cache_inv()
37 noncoherent_cache_ops.inv(paddr, size); in arch_dma_cache_inv()
47 void *vaddr = phys_to_virt(paddr); in arch_dma_cache_wback_inv()
51 noncoherent_cache_ops.wback_inv(paddr, size); in arch_dma_cache_wback_inv()
74 arch_dma_cache_wback(paddr, size); in arch_sync_dma_for_device()
79 arch_dma_cache_inv(paddr, size); in arch_sync_dma_for_device()
88 arch_dma_cache_wback(paddr, size); in arch_sync_dma_for_device()
90 arch_dma_cache_wback_inv(paddr, size); in arch_sync_dma_for_device()
[all …]
/arch/arc/mm/
A Dioremap.c13 static inline bool arc_uncached_addr_space(phys_addr_t paddr) in arc_uncached_addr_space() argument
16 if (paddr >= ARC_UNCACHED_ADDR_SPACE) in arc_uncached_addr_space()
18 } else if (paddr >= perip_base && paddr <= perip_end) { in arc_uncached_addr_space()
25 void __iomem *ioremap(phys_addr_t paddr, unsigned long size) in ioremap() argument
31 if (arc_uncached_addr_space(paddr)) in ioremap()
32 return (void __iomem *)(u32)paddr; in ioremap()
34 return ioremap_prot(paddr, size, in ioremap()
46 void __iomem *ioremap_prot(phys_addr_t paddr, size_t size, in ioremap_prot() argument
50 return generic_ioremap_prot(paddr, size, pgprot_noncached(prot)); in ioremap_prot()
A Dcache.c210 paddr &= CACHE_LINE_MASK; in __cache_line_loop_v3()
235 paddr += L1_CACHE_BYTES; in __cache_line_loop_v3()
269 paddr &= CACHE_LINE_MASK; in __cache_line_loop_v4()
292 paddr += L1_CACHE_BYTES; in __cache_line_loop_v4()
319 paddr &= CACHE_LINE_MASK; in __cache_line_loop_v4()
338 write_aux_reg(s, paddr); in __cache_line_loop_v4()
514 phys_addr_t paddr, vaddr; member
529 .paddr = paddr, in __ic_line_inv_vaddr()
641 paddr &= SLC_LINE_MASK; in slc_op_line()
647 paddr += l2_line_sz; in slc_op_line()
[all …]
A Ddma.c51 void arch_sync_dma_for_device(phys_addr_t paddr, size_t size, in arch_sync_dma_for_device() argument
56 dma_cache_wback(paddr, size); in arch_sync_dma_for_device()
60 dma_cache_inv(paddr, size); in arch_sync_dma_for_device()
64 dma_cache_wback_inv(paddr, size); in arch_sync_dma_for_device()
72 void arch_sync_dma_for_cpu(phys_addr_t paddr, size_t size, in arch_sync_dma_for_cpu() argument
82 dma_cache_inv(paddr, size); in arch_sync_dma_for_cpu()
/arch/arm/mm/
A Ddma-mapping-nommu.c16 void arch_sync_dma_for_device(phys_addr_t paddr, size_t size, in arch_sync_dma_for_device() argument
19 dmac_map_area(__va(paddr), size, dir); in arch_sync_dma_for_device()
22 outer_inv_range(paddr, paddr + size); in arch_sync_dma_for_device()
24 outer_clean_range(paddr, paddr + size); in arch_sync_dma_for_device()
27 void arch_sync_dma_for_cpu(phys_addr_t paddr, size_t size, in arch_sync_dma_for_cpu() argument
31 outer_inv_range(paddr, paddr + size); in arch_sync_dma_for_cpu()
32 dmac_unmap_area(__va(paddr), size, dir); in arch_sync_dma_for_cpu()
/arch/microblaze/kernel/
A Ddma.c17 static void __dma_sync(phys_addr_t paddr, size_t size, in __dma_sync() argument
23 flush_dcache_range(paddr, paddr + size); in __dma_sync()
26 invalidate_dcache_range(paddr, paddr + size); in __dma_sync()
33 void arch_sync_dma_for_device(phys_addr_t paddr, size_t size, in arch_sync_dma_for_device() argument
36 __dma_sync(paddr, size, dir); in arch_sync_dma_for_device()
39 void arch_sync_dma_for_cpu(phys_addr_t paddr, size_t size, in arch_sync_dma_for_cpu() argument
42 __dma_sync(paddr, size, dir); in arch_sync_dma_for_cpu()
/arch/m68k/kernel/
A Dsys_m68k.c70 unsigned long paddr, i; in cache_flush_040() local
135 : : "a" (paddr)); in cache_flush_040()
142 : : "a" (paddr)); in cache_flush_040()
150 : : "a" (paddr)); in cache_flush_040()
174 paddr += 16; in cache_flush_040()
192 : : "a" (paddr)); in cache_flush_040()
199 : : "a" (paddr)); in cache_flush_040()
207 : : "a" (paddr)); in cache_flush_040()
222 : "=a" (paddr) \
230 unsigned long paddr, i; in cache_flush_060() local
[all …]
/arch/xtensa/kernel/
A Dpci-dma.c24 static void do_cache_op(phys_addr_t paddr, size_t size, in do_cache_op() argument
27 unsigned long off = paddr & (PAGE_SIZE - 1); in do_cache_op()
28 unsigned long pfn = PFN_DOWN(paddr); in do_cache_op()
32 fn((unsigned long)phys_to_virt(paddr), size); in do_cache_op()
46 void arch_sync_dma_for_cpu(phys_addr_t paddr, size_t size, in arch_sync_dma_for_cpu() argument
52 do_cache_op(paddr, size, __invalidate_dcache_range); in arch_sync_dma_for_cpu()
64 void arch_sync_dma_for_device(phys_addr_t paddr, size_t size, in arch_sync_dma_for_device() argument
71 do_cache_op(paddr, size, __flush_dcache_range); in arch_sync_dma_for_device()
/arch/csky/mm/
A Ddma-mapping.c14 static inline void cache_op(phys_addr_t paddr, size_t size, in cache_op() argument
17 struct page *page = phys_to_page(paddr); in cache_op()
19 unsigned long offset = offset_in_page(paddr); in cache_op()
58 void arch_sync_dma_for_device(phys_addr_t paddr, size_t size, in arch_sync_dma_for_device() argument
63 cache_op(paddr, size, dma_wb_range); in arch_sync_dma_for_device()
67 cache_op(paddr, size, dma_wbinv_range); in arch_sync_dma_for_device()
74 void arch_sync_dma_for_cpu(phys_addr_t paddr, size_t size, in arch_sync_dma_for_cpu() argument
82 cache_op(paddr, size, dma_inv_range); in arch_sync_dma_for_cpu()
A Dtcm.c29 unsigned long vaddr, paddr; local
32 paddr = CONFIG_ITCM_RAM_BASE;
47 set_pte(tcm_pte, pfn_pte(__phys_to_pfn(paddr), PAGE_KERNEL));
51 paddr = paddr + PAGE_SIZE;
58 paddr = CONFIG_DTCM_RAM_BASE;
66 set_pte(tcm_pte, pfn_pte(__phys_to_pfn(paddr), PAGE_KERNEL));
70 paddr = paddr + PAGE_SIZE;
/arch/openrisc/mm/
A Dcache.c31 static __always_inline void cache_loop(unsigned long paddr, unsigned long end, in cache_loop() argument
37 while (paddr < end) { in cache_loop()
38 mtspr(reg, paddr); in cache_loop()
39 paddr += L1_CACHE_BYTES; in cache_loop()
46 unsigned long paddr = page_to_pfn(page) << PAGE_SHIFT; in cache_loop_page() local
47 unsigned long end = paddr + PAGE_SIZE; in cache_loop_page()
49 paddr &= ~(L1_CACHE_BYTES - 1); in cache_loop_page()
51 cache_loop(paddr, end, reg, cache_type); in cache_loop_page()
/arch/x86/mm/pat/
A Dmemtype.c659 if (x86_platform.is_untracked_pat_range(paddr, paddr + PAGE_SIZE)) in lookup_memtype()
662 if (pat_pagerange_is_ram(paddr, paddr + PAGE_SIZE)) { in lookup_memtype()
845 is_ram = pat_pagerange_is_ram(paddr, paddr + size); in reserve_pfn_range()
869 ret = memtype_reserve(paddr, paddr + size, want_pcm, &pcm); in reserve_pfn_range()
875 memtype_free(paddr, paddr + size); in reserve_pfn_range()
888 memtype_free(paddr, paddr + size); in reserve_pfn_range()
902 is_ram = pat_pagerange_is_ram(paddr, paddr + size); in free_pfn_range()
904 memtype_free(paddr, paddr + size); in free_pfn_range()
915 pcm = lookup_memtype(paddr); in pfnmap_setup_cachemode()
920 paddr += PAGE_SIZE; in pfnmap_setup_cachemode()
[all …]
/arch/mips/sgi-ip32/
A Dip32-dma.c21 dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr) in phys_to_dma() argument
23 dma_addr_t dma_addr = paddr & RAM_OFFSET_MASK; in phys_to_dma()
32 phys_addr_t paddr = dma_addr & RAM_OFFSET_MASK; in dma_to_phys() local
35 paddr += CRIME_HI_MEM_BASE; in dma_to_phys()
36 return paddr; in dma_to_phys()
/arch/x86/boot/startup/
A Dsev-startup.c176 early_set_pages_state(unsigned long vaddr, unsigned long paddr, in early_set_pages_state() argument
184 paddr = paddr & PAGE_MASK; in early_set_pages_state()
185 paddr_end = paddr + (npages << PAGE_SHIFT); in early_set_pages_state()
187 while (paddr < paddr_end) { in early_set_pages_state()
190 pvalidate_4k_page(vaddr, paddr, false); in early_set_pages_state()
196 sev_es_wr_ghcb_msr(GHCB_MSR_PSC_REQ_GFN(paddr >> PAGE_SHIFT, op)); in early_set_pages_state()
209 pvalidate_4k_page(vaddr, paddr, true); in early_set_pages_state()
212 paddr += PAGE_SIZE; in early_set_pages_state()
237 early_set_pages_state(vaddr, paddr, npages, SNP_PAGE_STATE_PRIVATE); in early_snp_set_memory_private()
240 void __head early_snp_set_memory_shared(unsigned long vaddr, unsigned long paddr, in early_snp_set_memory_shared() argument
[all …]
/arch/sparc/include/asm/
A Dmxcc.h89 static inline void mxcc_set_stream_src(unsigned long *paddr) in mxcc_set_stream_src() argument
91 unsigned long data0 = paddr[0]; in mxcc_set_stream_src()
92 unsigned long data1 = paddr[1]; in mxcc_set_stream_src()
102 static inline void mxcc_set_stream_dst(unsigned long *paddr) in mxcc_set_stream_dst() argument
104 unsigned long data0 = paddr[0]; in mxcc_set_stream_dst()
105 unsigned long data1 = paddr[1]; in mxcc_set_stream_dst()
/arch/s390/include/asm/
A Dpage-states.h25 static __always_inline unsigned long essa(unsigned long paddr, unsigned char cmd) in essa() argument
32 : [paddr] "d" (paddr), in essa()
39 unsigned long paddr = __pa(addr) & PAGE_MASK; in __set_page_state() local
42 essa(paddr, cmd); in __set_page_state()
43 paddr += PAGE_SIZE; in __set_page_state()
/arch/x86/mm/
A Dmem_encrypt_amd.c58 unsigned long paddr, bool decrypt) in snp_memcpy() argument
67 early_snp_set_memory_shared((unsigned long)__va(paddr), paddr, npages); in snp_memcpy()
72 early_snp_set_memory_private((unsigned long)__va(paddr), paddr, npages); in snp_memcpy()
114 early_memremap_encrypted_wp(paddr, len); in __sme_early_enc_dec()
116 dst = enc ? early_memremap_encrypted(paddr, len) : in __sme_early_enc_dec()
117 early_memremap_decrypted(paddr, len); in __sme_early_enc_dec()
141 paddr += len; in __sme_early_enc_dec()
148 __sme_early_enc_dec(paddr, size, true); in sme_early_encrypt()
153 __sme_early_enc_dec(paddr, size, false); in sme_early_decrypt()
166 pmd = map ? (paddr & PMD_MASK) + pmd_flags : 0; in __sme_early_map_unmap_mem()
[all …]
A Dioremap.c605 u64 paddr; in memremap_is_efi_data() local
612 paddr <<= 32; in memremap_is_efi_data()
614 if (phys_addr == paddr) in memremap_is_efi_data()
618 paddr <<= 32; in memremap_is_efi_data()
620 if (phys_addr == paddr) in memremap_is_efi_data()
646 u64 paddr, paddr_next; in __memremap_is_setup_data() local
649 while (paddr) { in __memremap_is_setup_data()
652 if (phys_addr == paddr) in __memremap_is_setup_data()
695 paddr = indirect->addr; in __memremap_is_setup_data()
705 if ((phys_addr > paddr) && (phys_addr < (paddr + len))) in __memremap_is_setup_data()
[all …]
/arch/mips/mm/
A Ddma-noncoherent.c95 static inline void dma_sync_phys(phys_addr_t paddr, size_t size, in dma_sync_phys() argument
98 struct page *page = pfn_to_page(paddr >> PAGE_SHIFT); in dma_sync_phys()
99 unsigned long offset = paddr & ~PAGE_MASK; in dma_sync_phys()
124 void arch_sync_dma_for_device(phys_addr_t paddr, size_t size, in arch_sync_dma_for_device() argument
127 dma_sync_phys(paddr, size, dir, true); in arch_sync_dma_for_device()
131 void arch_sync_dma_for_cpu(phys_addr_t paddr, size_t size, in arch_sync_dma_for_cpu() argument
135 dma_sync_phys(paddr, size, dir, false); in arch_sync_dma_for_cpu()
/arch/powerpc/mm/
A Ddma-noncoherent.c94 static void __dma_sync_page(phys_addr_t paddr, size_t size, int dir) in __dma_sync_page() argument
96 struct page *page = pfn_to_page(paddr >> PAGE_SHIFT); in __dma_sync_page()
97 unsigned offset = paddr & ~PAGE_MASK; in __dma_sync_page()
107 void arch_sync_dma_for_device(phys_addr_t paddr, size_t size, in arch_sync_dma_for_device() argument
110 __dma_sync_page(paddr, size, dir); in arch_sync_dma_for_device()
113 void arch_sync_dma_for_cpu(phys_addr_t paddr, size_t size, in arch_sync_dma_for_cpu() argument
116 __dma_sync_page(paddr, size, dir); in arch_sync_dma_for_cpu()
/arch/x86/include/asm/uv/
A Duv_hub.h422 return uv_gam_range(paddr)->nasid; in uv_soc_phys_ram_to_nasid()
441 paddr |= uv_hub_info->gnode_upper; in uv_soc_phys_ram_to_gpa()
442 paddr = ((paddr << uv_hub_info->m_shift) in uv_soc_phys_ram_to_gpa()
444 ((paddr >> uv_hub_info->m_val) in uv_soc_phys_ram_to_gpa()
447 paddr |= uv_soc_phys_ram_to_nasid(paddr) in uv_soc_phys_ram_to_gpa()
450 return paddr; in uv_soc_phys_ram_to_gpa()
469 unsigned long paddr; in uv_gpa_to_soc_phys_ram() local
478 paddr = gpa & uv_hub_info->gpa_mask; in uv_gpa_to_soc_phys_ram()
479 if (paddr >= remap_base && paddr < remap_base + remap_top) in uv_gpa_to_soc_phys_ram()
480 paddr -= remap_base; in uv_gpa_to_soc_phys_ram()
[all …]
/arch/x86/kernel/
A Dksysfs.c78 *paddr = pa_data; in get_setup_data_paddr()
141 u64 paddr; in type_show() local
148 ret = get_setup_data_paddr(nr, &paddr); in type_show()
158 data = memremap(paddr, len, MEMREMAP_WB); in type_show()
182 u64 paddr, len; in setup_data_data_read() local
189 ret = get_setup_data_paddr(nr, &paddr); in setup_data_data_read()
199 data = memremap(paddr, len, MEMREMAP_WB); in setup_data_data_read()
206 paddr = indirect->addr; in setup_data_data_read()
214 paddr += sizeof(*data); in setup_data_data_read()
218 paddr += sizeof(*data); in setup_data_data_read()
[all …]
/arch/nios2/mm/
A Ddma-mapping.c21 void arch_sync_dma_for_device(phys_addr_t paddr, size_t size, in arch_sync_dma_for_device() argument
24 void *vaddr = phys_to_virt(paddr); in arch_sync_dma_for_device()
45 void arch_sync_dma_for_cpu(phys_addr_t paddr, size_t size, in arch_sync_dma_for_cpu() argument
48 void *vaddr = phys_to_virt(paddr); in arch_sync_dma_for_cpu()

Completed in 783 milliseconds

1234567