Lines Matching refs:vaddr

46 static bool x86_mmu_check_vaddr(vaddr_t vaddr) {  in x86_mmu_check_vaddr()  argument
47 uint64_t addr = (uint64_t)vaddr; in x86_mmu_check_vaddr()
89 static inline uint64_t get_pml4_entry_from_pml4_table(vaddr_t vaddr, addr_t pml4_addr) { in get_pml4_entry_from_pml4_table() argument
93 pml4_index = (((uint64_t)vaddr >> PML4_SHIFT) & ((1ul << ADDR_OFFSET) - 1)); in get_pml4_entry_from_pml4_table()
97 static inline uint64_t get_pdp_entry_from_pdp_table(vaddr_t vaddr, uint64_t pml4e) { in get_pdp_entry_from_pdp_table() argument
101 pdp_index = (((uint64_t)vaddr >> PDP_SHIFT) & ((1ul << ADDR_OFFSET) - 1)); in get_pdp_entry_from_pdp_table()
106 static inline uint64_t get_pd_entry_from_pd_table(vaddr_t vaddr, uint64_t pdpe) { in get_pd_entry_from_pd_table() argument
110 pd_index = (((uint64_t)vaddr >> PD_SHIFT) & ((1ul << ADDR_OFFSET) - 1)); in get_pd_entry_from_pd_table()
115 static inline uint64_t get_pt_entry_from_pt_table(vaddr_t vaddr, uint64_t pde) { in get_pt_entry_from_pt_table() argument
119 pt_index = (((uint64_t)vaddr >> PT_SHIFT) & ((1ul << ADDR_OFFSET) - 1)); in get_pt_entry_from_pt_table()
190 status_t x86_mmu_get_mapping(map_addr_t pml4, vaddr_t vaddr, uint32_t *ret_level, in x86_mmu_get_mapping() argument
205 pml4e = get_pml4_entry_from_pml4_table(vaddr, pml4); in x86_mmu_get_mapping()
211 pdpe = get_pdp_entry_from_pdp_table(vaddr, pml4e); in x86_mmu_get_mapping()
219 pde = get_pd_entry_from_pd_table(vaddr, pdpe); in x86_mmu_get_mapping()
230 …*last_valid_entry = get_pfn_from_pde(X86_VIRT_TO_PHYS(pde)) + ((uint64_t)vaddr & PAGE_OFFSET_MASK_… in x86_mmu_get_mapping()
236 pte = get_pt_entry_from_pt_table(vaddr, pde); in x86_mmu_get_mapping()
244 …*last_valid_entry = get_pfn_from_pte(X86_VIRT_TO_PHYS(pte)) + ((uint64_t)vaddr & PAGE_OFFSET_MASK_… in x86_mmu_get_mapping()
258 vaddr_t vaddr, arch_flags_t in_flags, in x86_mmu_check_mapping() argument
266 (!x86_mmu_check_vaddr(vaddr)) || in x86_mmu_check_mapping()
271 status = x86_mmu_get_mapping(pml4, vaddr, ret_level, &existing_flags, last_valid_entry); in x86_mmu_check_mapping()
290 static void update_pt_entry(vaddr_t vaddr, paddr_t paddr, uint64_t pde, arch_flags_t flags) { in update_pt_entry() argument
294 pt_index = (((uint64_t)vaddr >> PT_SHIFT) & ((1ul << ADDR_OFFSET) - 1)); in update_pt_entry()
301 static void update_pd_entry(vaddr_t vaddr, uint64_t pdpe, map_addr_t m, arch_flags_t flags) { in update_pd_entry() argument
305 pd_index = (((uint64_t)vaddr >> PD_SHIFT) & ((1ul << ADDR_OFFSET) - 1)); in update_pd_entry()
314 static void update_pdp_entry(vaddr_t vaddr, uint64_t pml4e, map_addr_t m, arch_flags_t flags) { in update_pdp_entry() argument
318 pdp_index = (((uint64_t)vaddr >> PDP_SHIFT) & ((1ul << ADDR_OFFSET) - 1)); in update_pdp_entry()
327 static void update_pml4_entry(vaddr_t vaddr, addr_t pml4_addr, map_addr_t m, arch_flags_t flags) { in update_pml4_entry() argument
331 pml4_index = (((uint64_t)vaddr >> PML4_SHIFT) & ((1ul << ADDR_OFFSET) - 1)); in update_pml4_entry()
365 vaddr_t vaddr, arch_flags_t mmu_flags) { in x86_mmu_add_mapping() argument
372 LTRACEF("pml4 0x%llx paddr 0x%llx vaddr 0x%lx flags 0x%llx\n", pml4, paddr, vaddr, mmu_flags); in x86_mmu_add_mapping()
375 if ((!x86_mmu_check_vaddr(vaddr)) || (!x86_mmu_check_paddr(paddr)) ) in x86_mmu_add_mapping()
378 pml4e = get_pml4_entry_from_pml4_table(vaddr, pml4); in x86_mmu_add_mapping()
388 update_pml4_entry(vaddr, pml4, X86_VIRT_TO_PHYS(m), get_x86_arch_flags(mmu_flags)); in x86_mmu_add_mapping()
394 pdpe = get_pdp_entry_from_pdp_table(vaddr, pml4e); in x86_mmu_add_mapping()
406 update_pdp_entry(vaddr, pml4e, X86_VIRT_TO_PHYS(m), get_x86_arch_flags(mmu_flags)); in x86_mmu_add_mapping()
412 pde = get_pd_entry_from_pd_table(vaddr, pdpe); in x86_mmu_add_mapping()
442 update_pd_entry(vaddr, pdpe, X86_VIRT_TO_PHYS(m), get_x86_arch_flags(mmu_flags)); in x86_mmu_add_mapping()
447 update_pt_entry(vaddr, paddr, pde, get_x86_arch_flags(mmu_flags)); in x86_mmu_add_mapping()
467 static void x86_mmu_unmap_entry(vaddr_t vaddr, int level, vaddr_t table_entry) { in x86_mmu_unmap_entry() argument
471 LTRACEF("vaddr 0x%lx level %d table_entry 0x%lx\n", vaddr, level, table_entry); in x86_mmu_unmap_entry()
479 offset = (((uint64_t)vaddr >> PML4_SHIFT) & ((1ul << ADDR_OFFSET) - 1)); in x86_mmu_unmap_entry()
487 offset = (((uint64_t)vaddr >> PDP_SHIFT) & ((1ul << ADDR_OFFSET) - 1)); in x86_mmu_unmap_entry()
495 offset = (((uint64_t)vaddr >> PD_SHIFT) & ((1ul << ADDR_OFFSET) - 1)); in x86_mmu_unmap_entry()
503 offset = (((uint64_t)vaddr >> PT_SHIFT) & ((1ul << ADDR_OFFSET) - 1)); in x86_mmu_unmap_entry()
519 x86_mmu_unmap_entry(vaddr, level, (vaddr_t)next_table_addr); in x86_mmu_unmap_entry()
543 status_t x86_mmu_unmap(map_addr_t pml4, vaddr_t vaddr, uint count) { in x86_mmu_unmap() argument
547 if (!(x86_mmu_check_vaddr(vaddr))) in x86_mmu_unmap()
553 next_aligned_v_addr = vaddr; in x86_mmu_unmap()
562 int arch_mmu_unmap(arch_aspace_t *aspace, vaddr_t vaddr, uint count) { in arch_mmu_unmap() argument
565 LTRACEF("aspace %p, vaddr 0x%lx, count %u\n", aspace, vaddr, count); in arch_mmu_unmap()
569 if (!(x86_mmu_check_vaddr(vaddr))) in arch_mmu_unmap()
578 return (x86_mmu_unmap(X86_PHYS_TO_VIRT(current_cr3_val), vaddr, count)); in arch_mmu_unmap()
621 status_t arch_mmu_query(arch_aspace_t *aspace, vaddr_t vaddr, paddr_t *paddr, uint *flags) { in arch_mmu_query() argument
628 LTRACEF("aspace %p, vaddr 0x%lx, paddr %p, flags %p\n", aspace, vaddr, paddr, flags); in arch_mmu_query()
638 …stat = x86_mmu_get_mapping(X86_PHYS_TO_VIRT(current_cr3_val), vaddr, &ret_level, &ret_flags, &last… in arch_mmu_query()
652 int arch_mmu_map(arch_aspace_t *aspace, vaddr_t vaddr, paddr_t paddr, uint count, uint flags) { in arch_mmu_map() argument
658 …LTRACEF("aspace %p, vaddr 0x%lx paddr 0x%lx count %u flags 0x%x\n", aspace, vaddr, paddr, count, f… in arch_mmu_map()
663 if (!x86_mmu_check_vaddr(vaddr)) in arch_mmu_map()
672 range.start_vaddr = vaddr; in arch_mmu_map()