Home
last modified time | relevance | path

Searched refs:pv_off (Results 1 – 10 of 10) sorted by relevance

/libcpu/risc-v/common64/
A Dmmu.h46 #define VPN_TO_PPN(vaddr, pv_off) (((rt_uintptr_t)(vaddr)) + (pv_off)) argument
47 #define PPN_TO_VPN(paddr, pv_off) (((rt_uintptr_t)(paddr)) - (pv_off)) argument
61 rt_ubase_t *vtable, rt_ubase_t pv_off);
A Dmmu.c392 rt_ubase_t *vtable, rt_ubase_t pv_off) in rt_hw_mmu_map_init() argument
667 rt_ubase_t pv_off; in rt_hw_mem_setup_early() local
678 pv_off = symb_pc - symb_linker; in rt_hw_mem_setup_early()
679 rt_kmem_pvoff_set(pv_off); in rt_hw_mem_setup_early()
681 if (pv_off) in rt_hw_mem_setup_early()
683 if (pv_off & ((1ul << (ARCH_INDEX_WIDTH * 2 + ARCH_PAGE_SHIFT)) - 1)) in rt_hw_mem_setup_early()
686 pv_off); in rt_hw_mem_setup_early()
705 vs = ps - pv_off; in rt_hw_mem_setup_early()
/libcpu/aarch64/common/
A Dsetup.c211 const rt_ubase_t pv_off = PV_OFFSET; in rt_hw_common_setup() local
216 rt_hw_mmu_map_init(&rt_kernel_space, (void*)0xfffffffff0000000, 0x10000000, MMUTable, pv_off); in rt_hw_common_setup()
239 fdt_ptr = (void *)(fdt_ptr - pv_off); in rt_hw_common_setup()
243 fdt_ptr = rt_ioremap_early(fdt_ptr + pv_off, fdt_size); in rt_hw_common_setup()
248 rt_memmove((void *)(fdt_start - pv_off), fdt_ptr, fdt_size); in rt_hw_common_setup()
249 fdt_ptr = (void *)fdt_start - pv_off; in rt_hw_common_setup()
251 rt_system_heap_init((void *)(heap_start - pv_off), (void *)(heap_end - pv_off)); in rt_hw_common_setup()
253 init_page_region.start = init_page_start - pv_off; in rt_hw_common_setup()
254 init_page_region.end = init_page_end - pv_off; in rt_hw_common_setup()
262 platform_mem_desc.vaddr_start = platform_mem_region.start - pv_off; in rt_hw_common_setup()
[all …]
A Dmmu.c532 size_t *vtable, size_t pv_off) in rt_hw_mmu_map_init() argument
910 unsigned long size, unsigned long pv_off) in rt_hw_mem_setup_early() argument
916 unsigned long va = (unsigned long) &_start - pv_off; in rt_hw_mem_setup_early()
920 rt_kmem_pvoff_set(pv_off); in rt_hw_mem_setup_early()
926 ret = _init_map_2M(tbl1, va, va + pv_off, count, normal_attr); in rt_hw_mem_setup_early()
931 ret = _init_map_2M(tbl0, va + pv_off, va + pv_off, count, normal_attr); in rt_hw_mem_setup_early()
/libcpu/mips/gs264/
A Dmmu.c252 mmu_info->pv_off = pv_off; in rt_hw_mmu_map_init()
282 mmu_l2 = (size_t *)((*mmu_l1 & ~ARCH_PAGE_TBL_MASK) - mmu_info->pv_off); in find_vaddr()
347 mmu_l2 = (size_t *)((*mmu_l1 & ~ARCH_PAGE_TBL_MASK) - mmu_info->pv_off); in check_vaddr()
452 *mmu_l1 = (((size_t)mmu_l2 + mmu_info->pv_off) | 0x1); in __rt_hw_mmu_map()
570 loop_pa = (size_t)rt_pages_alloc(0) + mmu_info->pv_off; in __rt_hw_mmu_map_auto()
593 *mmu_l1 = (((size_t)mmu_l2 + mmu_info->pv_off) | 0x1); in __rt_hw_mmu_map_auto()
622 pa -= mmu_info->pv_off; in __rt_hw_mmu_map_auto()
764 mmu_l2 = (size_t *)((tmp & ~ARCH_PAGE_TBL_MASK) - mmu_info->pv_off); in _rt_hw_mmu_v2p()
806 void init_mm_setup(unsigned int *mtbl, unsigned int size, unsigned int pv_off) in init_mm_setup() argument
813 mtbl[va] = ((va << 20) + pv_off) | NORMAL_MEM; in init_mm_setup()
[all …]
A Dmmu.h94 size_t pv_off; member
100 …w_mmu_map_init(rt_mmu_info *mmu_info, void* v_address, size_t size, size_t *vtable, size_t pv_off);
/libcpu/arm/cortex-a/
A Dmmu.c61 void init_mm_setup(unsigned int *mtbl, unsigned int size, unsigned int pv_off) in init_mm_setup() argument
71 mtbl[va] = ((va << 20) + pv_off) | NORMAL_MEM; in init_mm_setup()
73 … else if (vaddr >= (KERNEL_VADDR_START + pv_off) && vaddr - (KERNEL_VADDR_START + pv_off) < size) in init_mm_setup()
101 rt_uint32_t pv_off) in rt_hw_mem_setup_early() argument
109 init_mm_setup(early_mmu_talbe, size, pv_off); in rt_hw_mem_setup_early()
117 _init_map_section(early_mmu_talbe, va, size, va + pv_off, normal_attr); in rt_hw_mem_setup_early()
171 …mmu_map_init(struct rt_aspace *aspace, void* v_address, size_t size, size_t *vtable, size_t pv_off) in rt_hw_mmu_map_init() argument
A Dstart_gcc.S43 pv_off .req r11 /* Used to store the offset between physical address and the virtual address … label
50 get_pvoff r0, pv_off
66 get_phy r0, svc_stack_top, pv_off
85 get_phy r1, __bss_start, pv_off
86 get_phy r2, __bss_end, pv_off
182 get_phy r0, init_mtbl, pv_off
183 mov r1, pv_off
218 get_phy r0, init_mtbl, pv_off
273 mov r0 ,pv_off
295 adr pv_off, _reset
[all …]
A Dmmu.h116 …mu_map_init(struct rt_aspace *aspace, void *v_address, size_t size, size_t *vtable, size_t pv_off);
/libcpu/aarch64/common/include/
A Dmmu.h108 unsigned long size, unsigned long pv_off);
112 …t_hw_mmu_map_init(rt_aspace_t aspace, void *v_address, size_t size, size_t *vtable, size_t pv_off);

Completed in 15 milliseconds