Home
last modified time | relevance | path

Searched refs:vaddr_t (Results 1 – 25 of 49) sorted by relevance

12

/lk-master/arch/arm64/
A Dthread.c19 vaddr_t lr;
23 vaddr_t r18;
24 vaddr_t r19;
25 vaddr_t r20;
26 vaddr_t r21;
27 vaddr_t r22;
28 vaddr_t r23;
29 vaddr_t r24;
30 vaddr_t r25;
31 vaddr_t r26;
[all …]
A Dmmu.c91 vaddr_t vaddr_rem; in arch_mmu_query()
295 static void arm64_mmu_unmap_pt(vaddr_t vaddr, vaddr_t vaddr_rel, in arm64_mmu_unmap_pt()
300 vaddr_t index; in arm64_mmu_unmap_pt()
302 vaddr_t vaddr_rem; in arm64_mmu_unmap_pt()
303 vaddr_t block_size; in arm64_mmu_unmap_pt()
304 vaddr_t block_mask; in arm64_mmu_unmap_pt()
352 static int arm64_mmu_map_pt(vaddr_t vaddr_in, vaddr_t vaddr_rel_in, in arm64_mmu_map_pt()
359 vaddr_t index; in arm64_mmu_map_pt()
365 vaddr_t vaddr_rem; in arm64_mmu_map_pt()
366 vaddr_t block_size; in arm64_mmu_map_pt()
[all …]
A Darch.c105 void arch_enter_uspace(vaddr_t entry_point, vaddr_t user_stack_top) { in arch_enter_uspace()
110 vaddr_t kernel_stack_top = (uintptr_t)ct->stack + ct->stack_size; in arch_enter_uspace()
/lk-master/arch/arm/arm/
A Dthread.c17 vaddr_t r4;
18 vaddr_t r5;
19 vaddr_t r6;
20 vaddr_t r7;
21 vaddr_t r8;
22 vaddr_t r9;
23 vaddr_t r10;
24 vaddr_t r11;
25 vaddr_t lr;
51 vaddr_t stack_top = (vaddr_t)t->stack + t->stack_size; in arch_thread_initialize()
[all …]
A Dmmu.c163 arm_invalidate_tlb_mva_no_barrier((vaddr_t)index * SECTION_SIZE); in arm_mmu_unmap_l1_entry()
180 vaddr_t va = map->virt; in arm_mmu_init()
235 status_t arch_mmu_query(arch_aspace_t *aspace, vaddr_t vaddr, paddr_t *paddr, uint *flags) { in arch_mmu_query()
395 ret = arm_vtop((vaddr_t)l2_va, &pa); in get_l2_table()
399 DEBUG_ASSERT(IS_PAGE_ALIGNED((vaddr_t)l2_va)); in get_l2_table()
451 vaddr_t arch_mmu_pick_spot(vaddr_t base, uint prev_region_flags, in arch_mmu_pick_spot()
452 vaddr_t end, uint next_region_flags, in arch_mmu_pick_spot()
453 vaddr_t align, size_t size, uint flags) { in arch_mmu_pick_spot()
457 vaddr_t spot; in arch_mmu_pick_spot()
466 vaddr_t spot_end = spot + size - 1; in arch_mmu_pick_spot()
[all …]
/lk-master/arch/include/arch/
A Dmmu.h38 status_t arch_mmu_init_aspace(arch_aspace_t *aspace, vaddr_t base, size_t size, uint flags) __NONNU…
42 int arch_mmu_map(arch_aspace_t *aspace, vaddr_t vaddr, paddr_t paddr, uint count, uint flags) __NON…
43 int arch_mmu_unmap(arch_aspace_t *aspace, vaddr_t vaddr, uint count) __NONNULL((1));
44 status_t arch_mmu_query(arch_aspace_t *aspace, vaddr_t vaddr, paddr_t *paddr, uint *flags) __NONNUL…
46 vaddr_t arch_mmu_pick_spot(arch_aspace_t *aspace,
47 vaddr_t base, uint prev_region_arch_mmu_flags,
48 vaddr_t end, uint next_region_arch_mmu_flags,
49 vaddr_t align, size_t size, uint arch_mmu_flags) __NONNULL((1));
/lk-master/kernel/include/kernel/
A Dvm.h52 vaddr_t virt;
82 #define KERNEL_ASPACE_BASE ((vaddr_t)0x80000000UL)
85 #define KERNEL_ASPACE_SIZE ((vaddr_t)0x80000000UL)
90 static inline bool is_kernel_address(vaddr_t va) { in is_kernel_address()
91 …return (va >= (vaddr_t)KERNEL_ASPACE_BASE && va <= ((vaddr_t)KERNEL_ASPACE_BASE + ((vaddr_t)KERNEL… in is_kernel_address()
96 #define USER_ASPACE_BASE ((vaddr_t)0x01000000UL)
99 #define USER_ASPACE_SIZE ((vaddr_t)KERNEL_ASPACE_BASE - USER_ASPACE_BASE - 0x01000000UL)
104 static inline bool is_user_address(vaddr_t va) { in is_user_address()
191 vaddr_t base;
208 vaddr_t base;
[all …]
/lk-master/arch/x86/64/
A Dmmu.c46 static bool x86_mmu_check_vaddr(vaddr_t vaddr) { in x86_mmu_check_vaddr()
258 vaddr_t vaddr, arch_flags_t in_flags, in x86_mmu_check_mapping()
365 vaddr_t vaddr, arch_flags_t mmu_flags) { in x86_mmu_add_mapping()
467 static void x86_mmu_unmap_entry(vaddr_t vaddr, int level, vaddr_t table_entry) { in x86_mmu_unmap_entry()
469 vaddr_t *table, *next_table_addr, value; in x86_mmu_unmap_entry()
474 table = (vaddr_t *)(table_entry & X86_PG_FRAME); in x86_mmu_unmap_entry()
519 x86_mmu_unmap_entry(vaddr, level, (vaddr_t)next_table_addr); in x86_mmu_unmap_entry()
524 next_table_addr = (vaddr_t *)((vaddr_t)(next_table_addr) & X86_PG_FRAME); in x86_mmu_unmap_entry()
543 status_t x86_mmu_unmap(map_addr_t pml4, vaddr_t vaddr, uint count) { in x86_mmu_unmap()
544 vaddr_t next_aligned_v_addr; in x86_mmu_unmap()
[all …]
/lk-master/kernel/vm/
A Dvmm.c169 vaddr_t *pva, vaddr_t align, size_t size, in check_gap()
221 vaddr_t spot; in alloc_spot()
271 if (vaddr == (vaddr_t)-1) { in alloc_region()
279 r->base = (vaddr_t)vaddr; in alloc_region()
346 vaddr_t vaddr = 0; in vmm_alloc_physical()
354 vaddr = (vaddr_t)*ptr; in vmm_alloc_physical()
398 vaddr_t vaddr = 0; in vmm_alloc_contiguous()
407 vaddr = (vaddr_t)*ptr; in vmm_alloc_contiguous()
472 vaddr_t vaddr = 0; in vmm_alloc()
481 vaddr = (vaddr_t)*ptr; in vmm_alloc()
[all …]
A Dvm.c28 static void mark_pages_in_use(vaddr_t va, size_t len) { in mark_pages_in_use()
64 mark_pages_in_use((vaddr_t)&_start, ((uintptr_t)&_end - (uintptr_t)&_start)); in vm_init_preheap()
115 status_t rc = arch_mmu_query(&aspace->arch_aspace, (vaddr_t)ptr, &pa, NULL); in vaddr_to_paddr()
123 if (is_kernel_address((vaddr_t)ptr)) { in vaddr_to_aspace()
125 } else if (is_user_address((vaddr_t)ptr)) { in vaddr_to_aspace()
/lk-master/arch/x86/
A Dthread.c38 vaddr_t stack_top = (vaddr_t)t->stack + t->stack_size; in arch_thread_initialize()
61 frame->eip = (vaddr_t) &initial_thread_func; in arch_thread_initialize()
66 frame->rip = (vaddr_t) &initial_thread_func; in arch_thread_initialize()
76 t->arch.sp = (vaddr_t)frame; in arch_thread_initialize()
A Darch.c66 void arch_enter_uspace(vaddr_t entry_point, vaddr_t user_stack_top) { in arch_enter_uspace()
73 vaddr_t kernel_stack_top = (uintptr_t)ct->stack + ct->stack_size; in arch_enter_uspace()
/lk-master/arch/x86/include/arch/x86/
A Dmmu.h113 vaddr_t start_vaddr;
132 vaddr_t vaddr, arch_flags_t in_flags,
137 vaddr_t vaddr, arch_flags_t in_flags,
142 status_t x86_mmu_get_mapping(map_addr_t init_table, vaddr_t vaddr, uint32_t *ret_level,
147 vaddr_t vaddr, arch_flags_t flags);
148 status_t x86_mmu_unmap(map_addr_t init_table, vaddr_t vaddr, uint count);
/lk-master/arch/x86/32/
A Dmmu.c40 static inline map_addr_t get_pdp_entry_from_pdp_table(vaddr_t vaddr, map_addr_t pdpt) { in get_pdp_entry_from_pdp_table()
65 static inline map_addr_t get_pd_entry_from_pd_table(vaddr_t vaddr, map_addr_t pdt) { in get_pd_entry_from_pd_table()
74 static inline map_addr_t get_pt_entry_from_page_table(vaddr_t vaddr, map_addr_t pt) { in get_pt_entry_from_page_table()
138 status_t x86_mmu_get_mapping(map_addr_t init_table, vaddr_t vaddr, uint32_t *ret_level, in x86_mmu_get_mapping()
216 vaddr_t vaddr, arch_flags_t in_flags, in x86_mmu_check_mapping()
305 vaddr_t vaddr, arch_flags_t mmu_flags) { in x86_mmu_add_mapping()
384 static void x86_mmu_unmap_entry(vaddr_t vaddr, int level, map_addr_t table_entry) { in x86_mmu_unmap_entry()
441 status_t x86_mmu_unmap(map_addr_t init_table, vaddr_t vaddr, uint count) { in x86_mmu_unmap()
442 vaddr_t next_aligned_v_addr; in x86_mmu_unmap()
464 int arch_mmu_unmap(arch_aspace_t *aspace, vaddr_t vaddr, uint count) { in arch_mmu_unmap()
[all …]
/lk-master/arch/x86/include/arch/
A Darch_thread.h14 vaddr_t sp;
16 vaddr_t *fpu_states;
/lk-master/arch/riscv/
A Dthread.c48 vaddr_t stack_top = ROUNDDOWN((vaddr_t)t->stack + t->stack_size, 16); in arch_thread_initialize()
51 t->arch.cs_frame.ra = (vaddr_t)&initial_thread_func; in arch_thread_initialize()
A Dmmu.cpp75 constexpr uint vaddr_to_index(vaddr_t va, uint level) { in vaddr_to_index()
129 void riscv_tlb_flush_vma_range(vaddr_t base, size_t count) { in riscv_tlb_flush_vma_range()
200 status_t arch_mmu_init_aspace(arch_aspace_t *aspace, vaddr_t base, size_t size, uint flags) { in arch_mmu_init_aspace()
310 using page_walk_cb = walk_cb_ret(*)(uint level, uint index, riscv_pte_t pte, vaddr_t *vaddr);
314 int riscv_pt_walk(arch_aspace_t *aspace, vaddr_t vaddr, F callback) { in riscv_pt_walk()
396 int arch_mmu_map(arch_aspace_t *aspace, const vaddr_t _vaddr, paddr_t paddr, uint count, const uint… in arch_mmu_map()
414 …auto map_cb = [&paddr, &count, aspace, flags](uint level, uint index, riscv_pte_t pte, vaddr_t *va… in arch_mmu_map()
466 status_t arch_mmu_query(arch_aspace_t *aspace, const vaddr_t _vaddr, paddr_t *paddr, uint *flags) { in arch_mmu_query()
480 …auto query_cb = [paddr, flags](uint level, uint index, riscv_pte_t pte, vaddr_t *vaddr) -> walk_cb… in arch_mmu_query()
514 int arch_mmu_unmap(arch_aspace_t *aspace, const vaddr_t _vaddr, const uint _count) { in arch_mmu_unmap()
[all …]
/lk-master/app/lkboot/
A Ddcc.c97 arch_invalidate_cache_range((vaddr_t)htod_buffer, DCC_BUFLEN); in dcc_process_opcode()
102 arch_invalidate_cache_range((vaddr_t)dtoh_buffer, DCC_BUFLEN); in dcc_process_opcode()
145 arch_invalidate_cache_range((vaddr_t)htod_buffer, DCC_BUFLEN); in dcc_read()
169 arch_clean_cache_range((vaddr_t)dtoh_buffer, DCC_BUFLEN); in dcc_write()
232 arch_clean_cache_range((vaddr_t)&buffer_desc, sizeof(buffer_desc)); in lkboot_dcc_init()
/lk-master/arch/include/
A Darch.h19 void arch_enter_uspace(vaddr_t entry_point, vaddr_t user_stack_top) __NO_RETURN;
/lk-master/arch/arm/arm/include/arch/
A Daspace.h21 vaddr_t base;
28 static inline bool arch_mmu_is_valid_vaddr(struct arch_aspace *aspace, vaddr_t vaddr) { in arch_mmu_is_valid_vaddr()
/lk-master/arch/arm/include/arch/arm/
A Dmmu.h230 static inline void arm_invalidate_tlb_mva_no_barrier(vaddr_t va) { in arm_invalidate_tlb_mva_no_barrier()
238 static inline void arm_invalidate_tlb_mva(vaddr_t va) { in arm_invalidate_tlb_mva()
259 static inline void arm_invalidate_tlb_mva_asid_no_barrier(vaddr_t va, uint8_t asid) { in arm_invalidate_tlb_mva_asid_no_barrier()
267 static inline void arm_invalidate_tlb_mva_asid(vaddr_t va, uint8_t asid) { in arm_invalidate_tlb_mva_asid()
/lk-master/arch/or1k/
A Dmmu.c28 void or1k_invalidate_tlb(vaddr_t vaddr, uint count) { in or1k_invalidate_tlb()
71 status_t arch_mmu_query(arch_aspace_t *aspace, vaddr_t vaddr, paddr_t *paddr, uint *flags) { in arch_mmu_query()
105 int arch_mmu_unmap(arch_aspace_t *aspace, vaddr_t vaddr, uint count) { in arch_mmu_unmap()
134 int arch_mmu_map(arch_aspace_t *aspace, vaddr_t vaddr, paddr_t paddr, uint count, uint flags) { in arch_mmu_map()
204 status_t arch_mmu_init_aspace(arch_aspace_t *aspace, vaddr_t base, size_t size, uint flags) { in arch_mmu_init_aspace()
/lk-master/lib/heap/miniheap/
A Dminiheap.c71 …dprintf(INFO, "\t\tbase %p, end 0x%lx, len 0x%zx\n", chunk, (vaddr_t)chunk + chunk->len, chunk->le… in dump_free_chunk()
92 vaddr_t chunk_end = (vaddr_t)chunk + chunk->len; in heap_insert_free_chunk()
106 DEBUG_ASSERT(chunk_end <= (vaddr_t)next_chunk); in heap_insert_free_chunk()
121 if ((vaddr_t)last_chunk + last_chunk->len == (vaddr_t)chunk) { in heap_insert_free_chunk()
136 if ((vaddr_t)chunk + chunk->len == (vaddr_t)next_chunk) { in heap_insert_free_chunk()
/lk-master/arch/arm64/include/arch/
A Darm64.h35 void arm64_context_switch(vaddr_t *old_sp, vaddr_t new_sp);
/lk-master/arch/mips/
A Dthread.c46 t->arch.cs_frame.ra = (vaddr_t)&initial_thread_func; in arch_thread_initialize()
47 t->arch.cs_frame.sp = (vaddr_t)t->stack + t->stack_size; in arch_thread_initialize()

Completed in 31 milliseconds

12