Home
last modified time | relevance | path

Searched refs:p_addr (Results 1 – 8 of 8) sorted by relevance

/libcpu/mips/gs264/
A Dmmu.c419 size_t loop_pa = (size_t)p_addr & ~ARCH_PAGE_MASK; in __rt_hw_mmu_map()
495 pa_s = (size_t)p_addr; in _rt_hw_mmu_map()
496 pa_e = (size_t)p_addr + size - 1; in _rt_hw_mmu_map()
503 pa_s = (size_t)p_addr; in _rt_hw_mmu_map()
536 pa_s = (size_t)p_addr; in _rt_hw_mmu_map()
537 pa_e = (size_t)p_addr + size - 1; in _rt_hw_mmu_map()
688 void *p_addr = 0; in rt_hw_kernel_virt_to_phys() local
690 return p_addr; in rt_hw_kernel_virt_to_phys()
694 void *rt_hw_kernel_phys_to_virt(void *p_addr, size_t size) in rt_hw_kernel_phys_to_virt() argument
702 v_addr = p_addr; in rt_hw_kernel_phys_to_virt()
[all …]
A Dmmu.h102 void *rt_hw_mmu_map(rt_mmu_info *mmu_info, void *v_addr, void* p_addr, size_t size, size_t attr);
105 void *rt_hw_mmu_map(rt_mmu_info *mmu_info, void* p_addr, size_t size, size_t attr);
110 void *rt_hw_kernel_phys_to_virt(void *p_addr, size_t size);
/libcpu/arm/cortex-a/
A Dmmu.c311 static int _kenrel_map_4K(unsigned long *lv0_tbl, void *v_addr, void *p_addr, in _kenrel_map_4K() argument
315 size_t loop_pa = (size_t)p_addr & ~ARCH_PAGE_MASK; in _kenrel_map_4K()
358 void *rt_hw_mmu_map(rt_aspace_t aspace, void *v_addr, void *p_addr, size_t size, in rt_hw_mmu_map() argument
368 ret = _kenrel_map_4K(aspace->page_table, v_addr, p_addr, attr); in rt_hw_mmu_map()
383 p_addr += ARCH_PAGE_SIZE; in rt_hw_mmu_map()
A Dmmu.h117 void *rt_hw_mmu_map(struct rt_aspace *aspace, void *v_addr, void *p_addr, size_t size, size_t attr);
/libcpu/aarch64/common/
A Dmmu.c272 void *rt_hw_mmu_map(rt_aspace_t aspace, void *v_addr, void *p_addr, size_t size, in rt_hw_mmu_map() argument
287 ((rt_ubase_t)p_addr & ARCH_SECTION_MASK) || in rt_hw_mmu_map()
304 if (rt_page_is_member((rt_base_t)p_addr) && _ALIAS_OFFSET(v_addr) != _ALIAS_OFFSET(p_addr)) in rt_hw_mmu_map()
306 LOG_W("Possibly aliasing on va(0x%lx) to pa(0x%lx)", v_addr, p_addr); in rt_hw_mmu_map()
313 ret = mapper(aspace->page_table, v_addr, p_addr, attr); in rt_hw_mmu_map()
333 p_addr = (char *)p_addr + stride; in rt_hw_mmu_map()
/libcpu/risc-v/common64/
A Dmmu.h63 void *rt_hw_mmu_map(rt_aspace_t aspace, void *v_addr, void *p_addr, size_t size,
A Dmmu.c182 void *rt_hw_mmu_map(struct rt_aspace *aspace, void *v_addr, void *p_addr, in rt_hw_mmu_map() argument
193 ret = _map_one_page(aspace, v_addr, p_addr, attr); in rt_hw_mmu_map()
208 p_addr += ARCH_PAGE_SIZE; in rt_hw_mmu_map()
/libcpu/aarch64/common/include/
A Dmmu.h113 void *rt_hw_mmu_map(struct rt_aspace *aspace, void *v_addr, void *p_addr,

Completed in 14 milliseconds