Home
last modified time | relevance | path

Searched refs:index (Results 1 – 10 of 10) sorted by relevance

/arch/x86/
A Dfeature.c203 uint32_t index = i - X86_CPUID_EXT_BASE; in x86_feature_early_init() local
204 … cpuid_c(i, 0, &saved_cpuids_ext[index].a, &saved_cpuids_ext[index].b, &saved_cpuids_ext[index].c, in x86_feature_early_init()
205 &saved_cpuids_ext[index].d); in x86_feature_early_init()
211 uint32_t index = i - X86_CPUID_HYP_BASE; in x86_feature_early_init() local
212 … cpuid_c(i, 0, &saved_cpuids_hyp[index].a, &saved_cpuids_hyp[index].b, &saved_cpuids_hyp[index].c, in x86_feature_early_init()
213 &saved_cpuids_hyp[index].d); in x86_feature_early_init()
225 uint32_t index = i - X86_CPUID_HYP_BASE; in x86_feature_dump_cpuid() local
227 …saved_cpuids_hyp[index].a, saved_cpuids_hyp[index].b, saved_cpuids_hyp[index].c, saved_cpuids_hyp[ in x86_feature_dump_cpuid()
230 uint32_t index = i - X86_CPUID_EXT_BASE; in x86_feature_dump_cpuid() local
232 … saved_cpuids[index].a, saved_cpuids[index].b, saved_cpuids[index].c, saved_cpuids[index].d); in x86_feature_dump_cpuid()
A Ddescriptor.c71 uint16_t index = sel >> 3; in x86_set_gdt_descriptor() local
72 seg_desc_t *entry = (seg_desc_t *)&_gdt[index]; in x86_set_gdt_descriptor()
/arch/x86/64/
A Dmmu.c114 …RACEF_LEVEL(2, "pml4_table %p, index %u, returning %#llx\n", pml4_table, index, pml4_table[index]); in get_pml4_entry_from_pml4_table()
116 return pml4_table[index]; in get_pml4_entry_from_pml4_table()
125 return pdpe_table[index]; in get_pdp_entry_from_pdp_table()
133 … LTRACEF_LEVEL(2, "pde_table %p, index %u, returning %#llx\n", pde_table, index, pde_table[index]); in get_pd_entry_from_pd_table()
134 return pde_table[index]; in get_pd_entry_from_pd_table()
142 … LTRACEF_LEVEL(2, "pte_table %p, index %u, returning %#llx\n", pte_table, index, pte_table[index]); in get_pt_entry_from_pt_table()
143 return pte_table[index]; in get_pt_entry_from_pt_table()
443 uint32_t index = 0; in x86_mmu_unmap_entry() local
480 table[index] = 0; in x86_mmu_unmap_entry()
504 table[index] = 0; in x86_mmu_unmap_entry()
[all …]
/arch/arm64/
A Dmmu.c95 uint index; in arch_mmu_query() local
121 index = vaddr_rem >> index_shift; in arch_mmu_query()
137 pte = page_table[index]; in arch_mmu_query()
264 pte = page_table[index]; in arm64_mmu_get_page_table()
280 page_table[index] = pte; in arm64_mmu_get_page_table()
320 vaddr_t index; in arm64_mmu_unmap_pt() local
338 pte = page_table[index]; in arm64_mmu_unmap_pt()
379 vaddr_t index; in arm64_mmu_map_pt() local
420 pte = page_table[index]; in arm64_mmu_map_pt()
423 index, pte); in arm64_mmu_map_pt()
[all …]
/arch/riscv/
A Dmmu.cpp81 LTRACEF_LEVEL(3, "canonical va %#lx, level %u = index %#x\n", va, level, index); in vaddr_to_index()
83 return index; in vaddr_to_index()
320 uint index = vaddr_to_index(vaddr, level); in riscv_pt_walk() local
321 volatile riscv_pte_t *ptep = aspace->pt_virt + index; in riscv_pt_walk()
325 level, index, ptep, *ptep, vaddr); in riscv_pt_walk()
338 index = vaddr_to_index(vaddr, level); in riscv_pt_walk()
339 ptep = ptv + index; in riscv_pt_walk()
343 auto ret = callback(level, index, pte, &vaddr); in riscv_pt_walk()
379 index = vaddr_to_index(vaddr, level); in riscv_pt_walk()
380 ptep = ptv + index; in riscv_pt_walk()
[all …]
/arch/x86/32/
A Dmmu.c247 uint32_t index = 0; in x86_mmu_unmap_entry() local
253 LTRACEF_LEVEL(2, "index %u\n", index); in x86_mmu_unmap_entry()
254 if ((table[index] & X86_MMU_PG_P) == 0) in x86_mmu_unmap_entry()
256 next_table_pa = get_pfn_from_pte(table[index]); in x86_mmu_unmap_entry()
262 LTRACEF_LEVEL(2, "index %u\n", index); in x86_mmu_unmap_entry()
263 if ((table[index] & X86_MMU_PG_P) == 0) in x86_mmu_unmap_entry()
268 table[index] = 0; in x86_mmu_unmap_entry()
291 if ((table[index] & X86_MMU_PG_P) != 0) { in x86_mmu_unmap_entry()
292 table[index] = 0; in x86_mmu_unmap_entry()
354 for (uint32_t index = 0; index < no_of_pages; index++) { in x86_mmu_map_range() local
[all …]
/arch/or1k/
A Dmmu.c72 uint index = vaddr / SECTION_SIZE; in arch_mmu_query() local
73 uint32_t pte = or1k_kernel_translation_table[index]; in arch_mmu_query()
82 index = (vaddr % SECTION_SIZE) / PAGE_SIZE; in arch_mmu_query()
83 pte = l2_table[index]; in arch_mmu_query()
113 uint index = vaddr / SECTION_SIZE; in arch_mmu_unmap() local
114 uint32_t pte = or1k_kernel_translation_table[index]; in arch_mmu_unmap()
124 or1k_kernel_translation_table[index] = 0; in arch_mmu_unmap()
/arch/arm/arm/
A Dmmu.c136 int index; in arm_mmu_map_section() local
147 index = vaddr / SECTION_SIZE; in arm_mmu_map_section()
154 …aspace->tt_virt[index] = (paddr & ~(MB-1)) | (MMU_MEMORY_DOMAIN_MEM << 5) | MMU_MEMORY_L1_DESCRIPT… in arm_mmu_map_section()
157 static void arm_mmu_unmap_l1_entry(uint32_t *translation_table, uint32_t index) { in arm_mmu_unmap_l1_entry() argument
159 DEBUG_ASSERT(index < TT_ENTRY_COUNT); in arm_mmu_unmap_l1_entry()
161 translation_table[index] = 0; in arm_mmu_unmap_l1_entry()
163 arm_invalidate_tlb_mva_no_barrier((vaddr_t)index * SECTION_SIZE); in arm_mmu_unmap_l1_entry()
242 uint index = vaddr / MB; in arch_mmu_query() local
245 uint32_t tt_entry = aspace->tt_virt[index]; in arch_mmu_query()
/arch/arm64/include/arch/arm64/
A Dmmu.h123 #define MMU_MAIR_ATTR(index, attr) BM(index * 8, 8, (attr)) argument
/arch/mips/
A Dmips.ld196 .stab.index 0 : { *(.stab.index) }

Completed in 28 milliseconds