Searched refs:phys (Results 1 – 8 of 8) sorted by relevance
| /arch/xtensa/core/ |
| A D | mem_manage.c | 13 __weak bool sys_mm_is_phys_addr_in_range(uintptr_t phys) in sys_mm_is_phys_addr_in_range() argument 16 uintptr_t cached = (uintptr_t)sys_cache_cached_ptr_get((void *)phys); in sys_mm_is_phys_addr_in_range() 18 valid = ((phys >= CONFIG_SRAM_BASE_ADDRESS) && in sys_mm_is_phys_addr_in_range() 19 (phys < (CONFIG_SRAM_BASE_ADDRESS + (CONFIG_SRAM_SIZE * 1024UL)))); in sys_mm_is_phys_addr_in_range()
|
| A D | ptables.c | 362 static bool l2_page_table_map(uint32_t *l1_table, void *vaddr, uintptr_t phys, in l2_page_table_map() argument 387 table[l2_pos] = XTENSA_MMU_PTE(phys, is_user ? XTENSA_MMU_USER_RING : in l2_page_table_map() 469 void arch_mem_map(void *virt, uintptr_t phys, size_t size, uint32_t flags) in arch_mem_map() argument 472 uint32_t pa = (uint32_t)phys; in arch_mem_map() 480 "zero size", (uint32_t)phys); in arch_mem_map()
|
| /arch/x86/include/ |
| A D | x86_mmu.h | 158 static inline void z_x86_cr3_set(uintptr_t phys) in z_x86_cr3_set() argument 160 __ASSERT((phys & PTABLES_ALIGN) == 0U, "unaligned page tables"); in z_x86_cr3_set() 162 __asm__ volatile("movq %0, %%cr3\n\t" : : "r" (phys) : "memory"); in z_x86_cr3_set() 164 __asm__ volatile("movl %0, %%cr3\n\t" : : "r" (phys) : "memory"); in z_x86_cr3_set()
|
| /arch/arm64/core/ |
| A D | mmu.c | 745 name, virt, phys, size, desc, in __add_map() 749 desc |= phys; in __add_map() 1159 if (phys) { in arch_page_phys_get() 1160 *phys = par & GENMASK64(47, 12); in arch_page_phys_get() 1178 if ((phys & (level_size - 1))) { in arch_virt_region_align() 1466 desc |= phys; in arch_mem_page_in() 1538 if (phys) { in arch_page_info_get() 1539 *phys = desc & PTE_PHYSADDR_MASK; in arch_page_info_get() 1603 uintptr_t phys; in z_arm64_do_demand_paging() local 1673 phys = desc & PTE_PHYSADDR_MASK; in z_arm64_do_demand_paging() [all …]
|
| A D | fatal.c | 209 uintptr_t *phys = NULL; in is_address_mapped() local 220 return !arch_page_phys_get((void *) addr, phys); in is_address_mapped()
|
| /arch/x86/core/ |
| A D | x86_mmu.c | 587 uintptr_t phys = get_entry_phys(entry, level); in print_entries() local 593 if (phys == virt) { in print_entries() 596 } else if (phys + K_MEM_VIRT_OFFSET == virt) { in print_entries() 617 phys = get_entry_phys(entry, level); in print_entries() 618 if (phys == virt) { in print_entries() 1173 PRI_ENTRY " opt 0x%x", __func__, phys, virt, size, in range_map() 2067 int arch_page_phys_get(void *virt, uintptr_t *phys) in arch_page_phys_get() argument 2078 if (phys != NULL) { in arch_page_phys_get() 2109 void arch_mem_page_in(void *addr, uintptr_t phys) in arch_mem_page_in() argument 2121 void arch_mem_scratch(uintptr_t phys) in arch_mem_scratch() argument [all …]
|
| /arch/arm/core/mmu/ |
| A D | arm_mmu.c | 876 static int __arch_mem_map(void *virt, uintptr_t phys, size_t size, uint32_t flags) in __arch_mem_map() argument 879 uint32_t pa = (uint32_t)phys; in __arch_mem_map() 887 "zero size", (uint32_t)phys); in __arch_mem_map() 953 void arch_mem_map(void *virt, uintptr_t phys, size_t size, uint32_t flags) in arch_mem_map() argument 955 int ret = __arch_mem_map(virt, phys, size, flags); in arch_mem_map() 1037 int arch_page_phys_get(void *virt, uintptr_t *phys) in arch_page_phys_get() argument 1098 if (phys) { in arch_page_phys_get() 1099 *phys = (uintptr_t)pa_resolved; in arch_page_phys_get()
|
| /arch/x86/ |
| A D | gen_mmu.py | 706 phys = one_map['phys'] 720 pt.map(phys, virt, size, flags, level)
|
Completed in 28 milliseconds