Home
last modified time | relevance | path

Searched refs:base (Results 1 – 15 of 15) sorted by relevance

/arch/arm/arm/include/arch/
A Daspace.h21 vaddr_t base; member
29 return (vaddr >= aspace->base && vaddr <= aspace->base + aspace->size - 1); in arch_mmu_is_valid_vaddr()
/arch/x86/
A Ddescriptor.c15 void x86_set_gdt_descriptor(seg_sel_t sel, void *base, uint32_t limit, in x86_set_gdt_descriptor() argument
57 desc.seg_desc_legacy.base_15_0 = ((uintptr_t) base) & 0x0000ffff; in x86_set_gdt_descriptor()
58 desc.seg_desc_legacy.base_23_16 = (((uintptr_t) base) & 0x00ff0000) >> 16; in x86_set_gdt_descriptor()
59 desc.seg_desc_legacy.base_31_24 = ((uintptr_t) base) >> 24; in x86_set_gdt_descriptor()
84 desc.seg_desc_64.base_63_32 = (uint32_t)((uintptr_t) base >> 32); in x86_set_gdt_descriptor()
/arch/riscv/
A Dmmu.cpp133 sbi_rfence_vma(&hart_mask, base, count * PAGE_SIZE); in riscv_tlb_flush_vma_range()
138 asm volatile("sfence.vma %0, zero" :: "r"(base)); in riscv_tlb_flush_vma_range()
139 base += PAGE_SIZE; in riscv_tlb_flush_vma_range()
206 DEBUG_ASSERT(base + size - 1 > base); in arch_mmu_init_aspace()
213 DEBUG_ASSERT(base == KERNEL_ASPACE_BASE); in arch_mmu_init_aspace()
217 aspace->base = base; in arch_mmu_init_aspace()
227 DEBUG_ASSERT(base == USER_ASPACE_BASE); in arch_mmu_init_aspace()
230 aspace->base = base; in arch_mmu_init_aspace()
405 if (_vaddr < aspace->base || _vaddr > aspace->base + aspace->size - 1) { in arch_mmu_map()
476 if (_vaddr < aspace->base || _vaddr > aspace->base + aspace->size - 1) { in arch_mmu_query()
[all …]
/arch/or1k/
A Dmmu.c204 status_t arch_mmu_init_aspace(arch_aspace_t *aspace, vaddr_t base, size_t size, uint flags) { in arch_mmu_init_aspace() argument
205 LTRACEF("aspace %p, base %#lx, size %#zx, flags %#x\n", aspace, base, size, flags); in arch_mmu_init_aspace()
211 DEBUG_ASSERT(base + size - 1 > base); in arch_mmu_init_aspace()
217 DEBUG_ASSERT(base == KERNEL_ASPACE_BASE); in arch_mmu_init_aspace()
220 aspace->base = base; in arch_mmu_init_aspace()
/arch/arm/arm/
A Dmmu.c447 vaddr_t arch_mmu_pick_spot(vaddr_t base, uint prev_region_flags, in arch_mmu_pick_spot() argument
451 base, end, align, size, flags); in arch_mmu_pick_spot()
457 spot = ALIGN(base, align); in arch_mmu_pick_spot()
459 spot = ALIGN(base, SECTION_SIZE); in arch_mmu_pick_spot()
668 status_t arch_mmu_init_aspace(arch_aspace_t *aspace, vaddr_t base, size_t size, uint flags) { in arch_mmu_init_aspace() argument
669 LTRACEF("aspace %p, base 0x%lx, size 0x%zx, flags 0x%x\n", aspace, base, size, flags); in arch_mmu_init_aspace()
675 DEBUG_ASSERT(base + size - 1 > base); in arch_mmu_init_aspace()
680 aspace->base = base; in arch_mmu_init_aspace()
688 DEBUG_ASSERT(base < GB && (base + size) <= GB); in arch_mmu_init_aspace()
690 aspace->base = base; in arch_mmu_init_aspace()
/arch/include/arch/
A Dmmu.h45 status_t arch_mmu_init_aspace(arch_aspace_t *aspace, vaddr_t base, size_t size, uint flags) __NONNU…
54 vaddr_t base, uint prev_region_arch_mmu_flags,
/arch/arm64/include/arch/
A Daspace.h24 vaddr_t base; member
/arch/x86/include/arch/
A Daspace.h24 vaddr_t base; member
A Dx86.h268 static inline void x86_lidt(uintptr_t base) { __asm volatile("lidt (%0)" ::"r"(base) : "memory"); } in x86_lidt() argument
269 static inline void x86_lgdt(uintptr_t base) { __asm volatile("lgdt (%0)" ::"r"(base) : "memory"); } in x86_lgdt() argument
/arch/riscv/include/arch/
A Daspace.h29 vaddr_t base; member
/arch/x86/32/
A Dmmu.c457 status_t arch_mmu_init_aspace(arch_aspace_t * const aspace, const vaddr_t base, const size_t size, … in arch_mmu_init_aspace() argument
460 LTRACEF("aspace %p, base %#lx, size %#zx, flags %#x\n", aspace, base, size, flags); in arch_mmu_init_aspace()
464 DEBUG_ASSERT(base + size - 1 > base); in arch_mmu_init_aspace()
470 DEBUG_ASSERT(base == KERNEL_ASPACE_BASE); in arch_mmu_init_aspace()
473 aspace->base = base; in arch_mmu_init_aspace()
478 DEBUG_ASSERT(base == USER_ASPACE_BASE); in arch_mmu_init_aspace()
481 aspace->base = base; in arch_mmu_init_aspace()
/arch/arm64/
A Dmmu.c38 return (vaddr >= aspace->base && vaddr <= aspace->base + aspace->size - 1); in is_valid_vaddr()
575 status_t arch_mmu_init_aspace(arch_aspace_t *aspace, vaddr_t base, size_t size, uint flags) { in arch_mmu_init_aspace() argument
576 LTRACEF("aspace %p, base 0x%lx, size 0x%zx, flags 0x%x\n", aspace, base, size, flags); in arch_mmu_init_aspace()
582 DEBUG_ASSERT(base + size - 1 > base); in arch_mmu_init_aspace()
587 DEBUG_ASSERT(base == ~0UL << MMU_KERNEL_SIZE_SHIFT); in arch_mmu_init_aspace()
590 aspace->base = base; in arch_mmu_init_aspace()
596 DEBUG_ASSERT(base + size <= 1UL << MMU_USER_SIZE_SHIFT); in arch_mmu_init_aspace()
598 aspace->base = base; in arch_mmu_init_aspace()
/arch/x86/64/
A Dmmu.c98 return (vaddr >= aspace->base && vaddr <= aspace->base + aspace->size - 1); in is_valid_vaddr()
685 status_t arch_mmu_init_aspace(arch_aspace_t * const aspace, const vaddr_t base, const size_t size, … in arch_mmu_init_aspace() argument
688 LTRACEF("aspace %p, base %#lx, size %#zx, flags %#x\n", aspace, base, size, flags); in arch_mmu_init_aspace()
692 DEBUG_ASSERT(base + size - 1 > base); in arch_mmu_init_aspace()
697 DEBUG_ASSERT(base == KERNEL_ASPACE_BASE); in arch_mmu_init_aspace()
700 aspace->base = base; in arch_mmu_init_aspace()
705 DEBUG_ASSERT(base == USER_ASPACE_BASE); in arch_mmu_init_aspace()
708 aspace->base = base; in arch_mmu_init_aspace()
/arch/arm64/include/arch/arm64/
A Dmmu.h91 #define BM(base, count, val) (((val) & ((1UL << (count)) - 1)) << (base)) argument
93 #define BM(base, count, val) (((val) & ((0x1 << (count)) - 1)) << (base)) argument
/arch/x86/include/arch/x86/
A Ddescriptor.h103 void x86_set_gdt_descriptor(seg_sel_t sel, void *base, uint32_t limit,

Completed in 24 milliseconds