Lines Matching refs:phys_addr

178 __ioremap_caller(resource_size_t phys_addr, unsigned long size,  in __ioremap_caller()  argument
183 const resource_size_t unaligned_phys_addr = phys_addr; in __ioremap_caller()
193 last_addr = phys_addr + size - 1; in __ioremap_caller()
194 if (!size || last_addr < phys_addr) in __ioremap_caller()
197 if (!phys_addr_valid(phys_addr)) { in __ioremap_caller()
199 (unsigned long long)phys_addr); in __ioremap_caller()
204 __ioremap_check_mem(phys_addr, size, &io_desc); in __ioremap_caller()
211 &phys_addr, &last_addr); in __ioremap_caller()
218 offset = phys_addr & ~PAGE_MASK; in __ioremap_caller()
219 phys_addr &= PHYSICAL_PAGE_MASK; in __ioremap_caller()
220 size = PAGE_ALIGN(last_addr+1) - phys_addr; in __ioremap_caller()
222 retval = memtype_reserve(phys_addr, (u64)phys_addr + size, in __ioremap_caller()
230 if (!is_new_memtype_allowed(phys_addr, size, pcm, new_pcm)) { in __ioremap_caller()
233 (unsigned long long)phys_addr, in __ioremap_caller()
234 (unsigned long long)(phys_addr + size), in __ioremap_caller()
278 area->phys_addr = phys_addr; in __ioremap_caller()
281 if (memtype_kernel_map_sync(phys_addr, size, pcm)) in __ioremap_caller()
284 if (ioremap_page_range(vaddr, vaddr + size, phys_addr, prot)) in __ioremap_caller()
301 memtype_free(phys_addr, phys_addr + size); in __ioremap_caller()
326 void __iomem *ioremap(resource_size_t phys_addr, unsigned long size) in ioremap() argument
338 return __ioremap_caller(phys_addr, size, pcm, in ioremap()
367 void __iomem *ioremap_uc(resource_size_t phys_addr, unsigned long size) in ioremap_uc() argument
371 return __ioremap_caller(phys_addr, size, pcm, in ioremap_uc()
386 void __iomem *ioremap_wc(resource_size_t phys_addr, unsigned long size) in ioremap_wc() argument
388 return __ioremap_caller(phys_addr, size, _PAGE_CACHE_MODE_WC, in ioremap_wc()
403 void __iomem *ioremap_wt(resource_size_t phys_addr, unsigned long size) in ioremap_wt() argument
405 return __ioremap_caller(phys_addr, size, _PAGE_CACHE_MODE_WT, in ioremap_wt()
410 void __iomem *ioremap_encrypted(resource_size_t phys_addr, unsigned long size) in ioremap_encrypted() argument
412 return __ioremap_caller(phys_addr, size, _PAGE_CACHE_MODE_WB, in ioremap_encrypted()
417 void __iomem *ioremap_cache(resource_size_t phys_addr, unsigned long size) in ioremap_cache() argument
419 return __ioremap_caller(phys_addr, size, _PAGE_CACHE_MODE_WB, in ioremap_cache()
424 void __iomem *ioremap_prot(resource_size_t phys_addr, unsigned long size, in ioremap_prot() argument
427 return __ioremap_caller(phys_addr, size, in ioremap_prot()
477 memtype_free(p->phys_addr, p->phys_addr + get_vm_area_size(p)); in iounmap()
523 static bool memremap_should_map_decrypted(resource_size_t phys_addr, in memremap_should_map_decrypted() argument
532 is_pmem = region_intersects(phys_addr, size, IORESOURCE_MEM, in memremap_should_map_decrypted()
542 switch (efi_mem_type(phys_addr)) { in memremap_should_map_decrypted()
544 if (efi_mem_attributes(phys_addr) & EFI_MEMORY_NV) in memremap_should_map_decrypted()
553 switch (e820__get_entry_type(phys_addr, phys_addr + size - 1)) { in memremap_should_map_decrypted()
576 static bool memremap_is_efi_data(resource_size_t phys_addr, in memremap_is_efi_data() argument
588 if (phys_addr == paddr) in memremap_is_efi_data()
594 if (phys_addr == paddr) in memremap_is_efi_data()
597 if (efi_is_table_address(phys_addr)) in memremap_is_efi_data()
600 switch (efi_mem_type(phys_addr)) { in memremap_is_efi_data()
615 static bool memremap_is_setup_data(resource_size_t phys_addr, in memremap_is_setup_data() argument
625 if (phys_addr == paddr) in memremap_is_setup_data()
634 if ((phys_addr > paddr) && (phys_addr < (paddr + len))) { in memremap_is_setup_data()
647 if ((phys_addr > paddr) && (phys_addr < (paddr + len))) in memremap_is_setup_data()
660 static bool __init early_memremap_is_setup_data(resource_size_t phys_addr, in early_memremap_is_setup_data() argument
670 if (phys_addr == paddr) in early_memremap_is_setup_data()
680 if ((phys_addr > paddr) && (phys_addr < (paddr + len))) in early_memremap_is_setup_data()
694 bool arch_memremap_can_ram_remap(resource_size_t phys_addr, unsigned long size, in arch_memremap_can_ram_remap() argument
707 if (memremap_is_setup_data(phys_addr, size) || in arch_memremap_can_ram_remap()
708 memremap_is_efi_data(phys_addr, size)) in arch_memremap_can_ram_remap()
712 return !memremap_should_map_decrypted(phys_addr, size); in arch_memremap_can_ram_remap()
721 pgprot_t __init early_memremap_pgprot_adjust(resource_size_t phys_addr, in early_memremap_pgprot_adjust() argument
733 if (early_memremap_is_setup_data(phys_addr, size) || in early_memremap_pgprot_adjust()
734 memremap_is_efi_data(phys_addr, size)) in early_memremap_pgprot_adjust()
738 if (encrypted_prot && memremap_should_map_decrypted(phys_addr, size)) in early_memremap_pgprot_adjust()
745 bool phys_mem_access_encrypted(unsigned long phys_addr, unsigned long size) in phys_mem_access_encrypted() argument
747 return arch_memremap_can_ram_remap(phys_addr, size, 0); in phys_mem_access_encrypted()
751 void __init *early_memremap_encrypted(resource_size_t phys_addr, in early_memremap_encrypted() argument
754 return early_memremap_prot(phys_addr, size, __PAGE_KERNEL_ENC); in early_memremap_encrypted()
761 void __init *early_memremap_encrypted_wp(resource_size_t phys_addr, in early_memremap_encrypted_wp() argument
766 return early_memremap_prot(phys_addr, size, __PAGE_KERNEL_ENC_WP); in early_memremap_encrypted_wp()
770 void __init *early_memremap_decrypted(resource_size_t phys_addr, in early_memremap_decrypted() argument
773 return early_memremap_prot(phys_addr, size, __PAGE_KERNEL_NOENC); in early_memremap_decrypted()
780 void __init *early_memremap_decrypted_wp(resource_size_t phys_addr, in early_memremap_decrypted_wp() argument
785 return early_memremap_prot(phys_addr, size, __PAGE_KERNEL_NOENC_WP); in early_memremap_decrypted_wp()