Home
last modified time | relevance | path

Searched refs:paddr (Results 1 – 11 of 11) sorted by relevance

/arch/arm64/
A Dmmu.c162 if (paddr) in arch_mmu_query()
163 *paddr = pte_addr + vaddr_rem; in arch_mmu_query()
207 vaddr, paddr ? *paddr : ~0UL, flags ? *flags : ~0U); in arch_mmu_query()
249 page = paddr_to_vm_page(paddr); in free_page_table()
260 paddr_t paddr; in arm64_mmu_get_page_table() local
272 vaddr = paddr_to_kvaddr(paddr); in arm64_mmu_get_page_table()
287 return paddr_to_kvaddr(paddr); in arm64_mmu_get_page_table()
382 paddr_t paddr = paddr_in; in arm64_mmu_map_pt() local
427 pte = paddr | attrs; in arm64_mmu_map_pt()
438 paddr += chunk_size; in arm64_mmu_map_pt()
[all …]
A Dstart.S25 paddr .req x24 label
104 ldp paddr, vaddr, [mmu_initial_mapping, #__MMU_INITIAL_MAPPING_PHYS_OFFSET]
108 adr paddr, _start
110 str paddr, [mmu_initial_mapping, #__MMU_INITIAL_MAPPING_PHYS_OFFSET]
173 orr tmp, vaddr, paddr
202 orr tmp2, vaddr, paddr
220 orr tmp, tmp, paddr
227 add paddr, paddr, tmp
/arch/or1k/
A Dmmu.c71 status_t arch_mmu_query(arch_aspace_t *aspace, vaddr_t vaddr, paddr_t *paddr, uint *flags) { in arch_mmu_query() argument
87 if (paddr) in arch_mmu_query()
88 *paddr = (pte & ~OR1K_MMU_PG_FLAGS_MASK) | (vaddr & vmask); in arch_mmu_query()
134 int arch_mmu_map(arch_aspace_t *aspace, vaddr_t vaddr, paddr_t paddr, uint count, uint flags) { in arch_mmu_map() argument
139 LTRACEF("vaddr = 0x%lx, paddr = 0x%lx, count = %d, flags = 0x%x\n", vaddr, paddr, count, flags); in arch_mmu_map()
141 if (!IS_PAGE_ALIGNED(vaddr) || !IS_PAGE_ALIGNED(paddr)) in arch_mmu_map()
156 …if (IS_ALIGNED(vaddr, SECTION_SIZE) && IS_ALIGNED(paddr, SECTION_SIZE) && count >= SECTION_SIZE / … in arch_mmu_map()
161 paddr += SECTION_SIZE; in arch_mmu_map()
191 LTRACEF("l2_index = 0x%x, vaddr = 0x%lx, paddr = 0x%lx\n", l2_index, vaddr, paddr); in arch_mmu_map()
192 l2_table[l2_index] = paddr | arch_flags | OR1K_MMU_PG_PRESENT | OR1K_MMU_PG_L; in arch_mmu_map()
[all …]
/arch/x86/32/
A Dmmu.c105 DEBUG_ASSERT(paddr); in x86_mmu_get_mapping()
108 *paddr = 0; in x86_mmu_get_mapping()
123 *paddr = get_pfn_from_pde(pde) + (vaddr & PAGE_OFFSET_MASK_4MB); in x86_mmu_get_mapping()
135 *paddr = 0; in x86_mmu_get_mapping()
140 *paddr = get_pfn_from_pte(pte) + (vaddr & PAGE_OFFSET_MASK_4KB); in x86_mmu_get_mapping()
151 map_addr_t entry = paddr; in update_pt_entry()
163 map_addr_t entry = paddr; in update_pd_entry()
234 update_pt_entry(vaddr, pt, paddr, get_x86_arch_flags(mmu_flags)); in x86_mmu_add_mapping()
374 if (!paddr) in arch_mmu_query()
387 LTRACEF("returning paddr %#lx flags %#x\n", *paddr, ret_flags); in arch_mmu_query()
[all …]
/arch/x86/64/
A Dmmu.c88 if (!IS_ALIGNED(paddr, PAGE_SIZE)) in x86_mmu_check_paddr()
93 return paddr <= max_paddr; in x86_mmu_check_paddr()
204 if ((!ret_level) || (!paddr) || (!mmu_flags)) { in x86_mmu_get_mapping()
209 *paddr = 0; in x86_mmu_get_mapping()
265 pt_table[pt_index] = paddr; in update_pt_entry()
275 pd_table[pd_index] = paddr; in update_pd_entry()
285 pdp_table[pdp_index] = paddr; in update_pdp_entry()
294 pml4_table[pml4_index] = paddr; in update_pml4_entry()
583 if (!paddr) in arch_mmu_query()
611 if ((!x86_mmu_check_paddr(paddr))) in arch_mmu_map()
[all …]
/arch/x86/
A Dpv.c70 paddr_t paddr; in pvclock_init() local
71 arch_mmu_query(&vmm_get_kernel_aspace()->arch_aspace, (vaddr_t)clocksource_page, &paddr, NULL); in pvclock_init()
72 LTRACEF("clocksource page %p, paddr %#" PRIxPTR "\n", clocksource_page, paddr); in pvclock_init()
74 write_msr(clocksource_msr_base, paddr); in pvclock_init()
75 write_msr(clocksource_msr_base + 1, paddr + sizeof(struct pvclock_wall_clock) + 1); in pvclock_init()
/arch/arm/arm/
A Dmmu.c32 static void arm_mmu_map_section(arch_aspace_t *aspace, addr_t paddr, addr_t vaddr, uint flags);
142 DEBUG_ASSERT(IS_SECTION_ALIGNED(paddr)); in arm_mmu_map_section()
256 if (paddr) in arch_mmu_query()
306 if (paddr) in arch_mmu_query()
477 LTRACEF("vaddr 0x%lx paddr 0x%lx count %u flags 0x%x\n", vaddr, paddr, count, flags); in arch_mmu_map()
495 DEBUG_ASSERT(IS_PAGE_ALIGNED(paddr)); in arch_mmu_map()
496 if (!IS_PAGE_ALIGNED(vaddr) || !IS_PAGE_ALIGNED(paddr)) in arch_mmu_map()
513 arm_mmu_map_section(aspace, paddr, vaddr, arch_flags); in arch_mmu_map()
517 paddr += SECTION_SIZE; in arch_mmu_map()
555 l2_table[l2_index++] = paddr | arch_flags; in arch_mmu_map()
[all …]
/arch/riscv/include/arch/riscv/
A Dmmu.h79 #define RISCV_PTE_PPN_TO_PTE(paddr) (((paddr) >> PAGE_SIZE_SHIFT) << RISCV_PTE_PPN_SHIFT) argument
/arch/include/arch/
A Dmmu.h49 int arch_mmu_map(arch_aspace_t *aspace, vaddr_t vaddr, paddr_t paddr, uint count, uint flags) __NON…
51 status_t arch_mmu_query(arch_aspace_t *aspace, vaddr_t vaddr, paddr_t *paddr, uint *flags) __NONNUL…
/arch/riscv/
A Dmmu.cpp394 int arch_mmu_map(arch_aspace_t *aspace, const vaddr_t _vaddr, paddr_t paddr, uint count, const uint… in arch_mmu_map() argument
395 LTRACEF("vaddr %#lx paddr %#lx count %u flags %#x\n", _vaddr, paddr, count, flags); in arch_mmu_map()
417 …auto map_cb = [&paddr, &count, aspace, flags](uint level, uint index, riscv_pte_t pte, vaddr_t *va… in arch_mmu_map()
419 level, index, pte, *vaddr, paddr, count, flags); in arch_mmu_map()
444 riscv_pte_t temp_pte = RISCV_PTE_PPN_TO_PTE(paddr); in arch_mmu_map()
455 paddr += PAGE_SIZE; in arch_mmu_map()
469 status_t arch_mmu_query(arch_aspace_t *aspace, const vaddr_t _vaddr, paddr_t *paddr, uint *flags) { in arch_mmu_query() argument
483 …auto query_cb = [paddr, flags](uint level, uint index, riscv_pte_t pte, vaddr_t *vaddr) -> walk_cb… in arch_mmu_query()
491 if (paddr) { in arch_mmu_query()
497 *paddr = pa | (*vaddr & page_mask); in arch_mmu_query()
[all …]
/arch/arm64/include/arch/arm64/
A Dmmu.h271 int arm64_mmu_map(vaddr_t vaddr, paddr_t paddr, size_t size, pte_t attrs,

Completed in 23 milliseconds