Home
last modified time | relevance | path

Searched refs:v_addr (Results 1 – 8 of 8) sorted by relevance

/libcpu/mips/gs264/
A Dmmu.c500 if (v_addr) in _rt_hw_mmu_map()
502 vaddr = (size_t)v_addr; in _rt_hw_mmu_map()
643 offset = (size_t)v_addr & ARCH_PAGE_MASK; in _rt_hw_mmu_map_auto()
646 if (v_addr) in _rt_hw_mmu_map_auto()
648 vaddr = (size_t)v_addr; in _rt_hw_mmu_map_auto()
676 va_s = (size_t)v_addr; in _rt_hw_mmu_unmap()
677 va_e = (size_t)v_addr + size - 1; in _rt_hw_mmu_unmap()
696 void *v_addr = 0; in rt_hw_kernel_phys_to_virt() local
702 v_addr = p_addr; in rt_hw_kernel_phys_to_virt()
705 return v_addr; in rt_hw_kernel_phys_to_virt()
[all …]
A Dmmu.h102 void *rt_hw_mmu_map(rt_mmu_info *mmu_info, void *v_addr, void* p_addr, size_t size, size_t attr);
103 void *rt_hw_mmu_map_auto(rt_mmu_info *mmu_info, void *v_addr, size_t size, size_t attr);
107 void rt_hw_mmu_unmap(rt_mmu_info *mmu_info, void* v_addr, size_t size);
108 void *rt_hw_mmu_v2p(rt_mmu_info *mmu_info, void* v_addr);
111 void *rt_hw_kernel_virt_to_phys(void *v_addr, size_t size);
/libcpu/arm/cortex-a/
A Dmmu.c278 size_t loop_va = (size_t)v_addr & ~ARCH_PAGE_MASK; in _kenrel_unmap_4K()
314 size_t loop_va = (size_t)v_addr & ~ARCH_PAGE_MASK; in _kenrel_map_4K()
362 void *unmap_va = v_addr; in rt_hw_mmu_map()
372 while (unmap_va != v_addr) in rt_hw_mmu_map()
382 v_addr += ARCH_PAGE_SIZE; in rt_hw_mmu_map()
388 return v_addr; in rt_hw_mmu_map()
407 _kenrel_unmap_4K(aspace->page_table, v_addr); in rt_hw_mmu_unmap()
410 v_addr += ARCH_PAGE_SIZE; in rt_hw_mmu_unmap()
427 void *rt_hw_mmu_v2p(rt_aspace_t aspace, void* v_addr) in rt_hw_mmu_v2p() argument
434 l1_off = (size_t)v_addr >> ARCH_SECTION_SHIFT; in rt_hw_mmu_v2p()
[all …]
A Dmmu.h117 void *rt_hw_mmu_map(struct rt_aspace *aspace, void *v_addr, void *p_addr, size_t size, size_t attr);
118 void rt_hw_mmu_unmap(struct rt_aspace *aspace, void *v_addr, size_t size);
/libcpu/aarch64/common/
A Dmmu.c68 static void _kenrel_unmap_4K(unsigned long *lv0_tbl, void *v_addr) in _kenrel_unmap_4K() argument
71 unsigned long va = (unsigned long)v_addr; in _kenrel_unmap_4K()
277 void *unmap_va = v_addr; in rt_hw_mmu_map()
286 if (((rt_ubase_t)v_addr & ARCH_SECTION_MASK) || in rt_hw_mmu_map()
321 while (unmap_va != v_addr) in rt_hw_mmu_map()
332 v_addr = (char *)v_addr + stride; in rt_hw_mmu_map()
357 if (rt_hw_mmu_v2p(aspace, v_addr) != ARCH_MAP_FAILED) in rt_hw_mmu_unmap()
358 _kenrel_unmap_4K(aspace->page_table, v_addr); in rt_hw_mmu_unmap()
360 v_addr = (char *)v_addr + ARCH_PAGE_SIZE; in rt_hw_mmu_unmap()
806 void *rt_hw_mmu_v2p(rt_aspace_t aspace, void *v_addr) in rt_hw_mmu_v2p() argument
[all …]
/libcpu/aarch64/common/include/
A Dmmu.h113 void *rt_hw_mmu_map(struct rt_aspace *aspace, void *v_addr, void *p_addr,
115 void rt_hw_mmu_unmap(struct rt_aspace *aspace, void *v_addr, size_t size);
122 static inline void *rt_hw_mmu_kernel_v2p(void *v_addr) in rt_hw_mmu_kernel_v2p() argument
126 __asm__ volatile("at s1e1w, %0"::"r"(v_addr):"memory"); in rt_hw_mmu_kernel_v2p()
137 par |= (rt_ubase_t)v_addr & ARCH_PAGE_MASK; in rt_hw_mmu_kernel_v2p()
/libcpu/risc-v/common64/
A Dmmu.c39 static size_t _unmap_area(struct rt_aspace *aspace, void *v_addr);
182 void *rt_hw_mmu_map(struct rt_aspace *aspace, void *v_addr, void *p_addr, in rt_hw_mmu_map() argument
186 void *unmap_va = v_addr; in rt_hw_mmu_map()
193 ret = _map_one_page(aspace, v_addr, p_addr, attr); in rt_hw_mmu_map()
198 while (unmap_va != v_addr) in rt_hw_mmu_map()
207 v_addr += ARCH_PAGE_SIZE; in rt_hw_mmu_map()
249 static size_t _unmap_area(struct rt_aspace *aspace, void *v_addr) in _unmap_area() argument
251 rt_ubase_t loop_va = __UMASKVALUE((rt_ubase_t)v_addr, PAGE_OFFSET_MASK); in _unmap_area()
323 void rt_hw_mmu_unmap(struct rt_aspace *aspace, void *v_addr, size_t size) in rt_hw_mmu_unmap() argument
335 unmapped = _unmap_area(aspace, v_addr); in rt_hw_mmu_unmap()
[all …]
A Dmmu.h63 void *rt_hw_mmu_map(rt_aspace_t aspace, void *v_addr, void *p_addr, size_t size,
65 void rt_hw_mmu_unmap(rt_aspace_t aspace, void *v_addr, size_t size);

Completed in 13 milliseconds