| /arch/xtensa/core/ |
| A D | mmu.c | 25 uint32_t user_asid = domain->asid; in xtensa_mmu_compute_domain_regs() 43 domain->reg_ptepin_as = XTENSA_MMU_PTE_ENTRY_VADDR(domain->reg_ptevaddr, in xtensa_mmu_compute_domain_regs() 44 domain->reg_ptevaddr) in xtensa_mmu_compute_domain_regs() 57 domain->reg_vecpin_at = vb_pte; in xtensa_mmu_compute_domain_regs() 58 domain->reg_vecpin_as = XTENSA_MMU_PTE_ENTRY_VADDR(domain->reg_ptevaddr, in xtensa_mmu_compute_domain_regs() 88 :: "r"(domain->reg_ptevaddr), "r"(domain->reg_asid), in xtensa_mmu_set_paging() 89 "r"(domain->reg_ptepin_at), "r"(domain->reg_ptepin_as), in xtensa_mmu_set_paging() 90 "r"(domain->reg_vecpin_at), "r"(domain->reg_vecpin_as)); in xtensa_mmu_set_paging() 143 domain.asid = ASID_INVALID; in xtensa_mmu_init_paging() 171 "r"(domain.reg_ptepin_at), "r"(domain.reg_ptepin_as), in xtensa_mmu_init_paging() [all …]
|
| A D | ptables.c | 331 struct arch_mem_domain *domain = in xtensa_mmu_reinit() local 336 xtensa_mmu_set_paging(domain); in xtensa_mmu_reinit() 444 struct arch_mem_domain *domain; in __arch_mem_map() local 454 vaddr, domain); in __arch_mem_map() 461 vaddr_uc, domain); in __arch_mem_map() 599 struct arch_mem_domain *domain; in __arch_mem_unmap() local 705 xtensa_mmu_set_paging(domain); in xtensa_mmu_tlb_shootdown() 823 domain->arch.asid = asid_count; in arch_mem_domain_init() 835 domain->arch.ptables = ptables; in arch_mem_domain_init() 974 struct k_mem_domain *domain; in arch_mem_domain_thread_add() local [all …]
|
| A D | mpu.c | 750 int arch_mem_domain_init(struct k_mem_domain *domain) in arch_mem_domain_init() argument 752 domain->arch.mpu_map = xtensa_mpu_map_fg_kernel; in arch_mem_domain_init() 771 int arch_mem_domain_partition_remove(struct k_mem_domain *domain, in arch_mem_domain_partition_remove() argument 777 struct xtensa_mpu_map *map = &domain->arch.mpu_map; in arch_mem_domain_partition_remove() 850 if (cur_thread->mem_domain_info.mem_domain == domain) { in arch_mem_domain_partition_remove() 858 int arch_mem_domain_partition_add(struct k_mem_domain *domain, in arch_mem_domain_partition_add() argument 863 struct xtensa_mpu_map *map = &domain->arch.mpu_map; in arch_mem_domain_partition_add() 887 (cur_thread->mem_domain_info.mem_domain == domain)) { in arch_mem_domain_partition_add() 925 ret = mpu_map_region_add(&domain->arch.mpu_map, in arch_mem_domain_thread_add() 935 thread->arch.mpu_map = &domain->arch.mpu_map; in arch_mem_domain_thread_add() [all …]
|
| A D | README_MMU.txt | 235 (e.g. to re-use it for another memory domain, or just for any runtime 263 system-wide flushes on all cpus every time a memory domain is
|
| /arch/riscv/core/ |
| A D | pmp.c | 607 struct k_mem_domain *domain) in resync_pmp_domain() argument 615 remaining_partitions = domain->num_partitions; in resync_pmp_domain() 617 struct k_mem_partition *part = &domain->partitions[p_idx]; in resync_pmp_domain() 637 remaining_partitions + 1, domain); in resync_pmp_domain() 667 resync_pmp_domain(thread, domain); in z_riscv_pmp_usermode_enable() 715 int arch_mem_domain_init(struct k_mem_domain *domain) in arch_mem_domain_init() argument 717 domain->arch.pmp_update_nr = 0; in arch_mem_domain_init() 727 domain->arch.pmp_update_nr += 1; in arch_mem_domain_partition_add() 737 domain->arch.pmp_update_nr += 1; in arch_mem_domain_partition_remove() 785 remaining_partitions = domain->num_partitions; in arch_buffer_validate() [all …]
|
| /arch/x86/core/ |
| A D | x86_mmu.c | 1203 struct arch_mem_domain *domain = in range_map() local 1592 if (domain != current_domain) { in arch_mem_domain_partition_remove() 1607 if (domain != current_domain) { in arch_mem_domain_partition_add() 1809 LOG_DBG("%s(%p)", __func__, domain); in arch_mem_domain_init() 1818 __ASSERT(list_domain != &domain->arch, in arch_mem_domain_init() 1830 if (domain == &k_mem_domain_default) { in arch_mem_domain_init() 1840 (void)memset(domain->arch.pdpt, 0, sizeof(domain->arch.pdpt)); in arch_mem_domain_init() 1841 domain->arch.ptables = domain->arch.pdpt; in arch_mem_domain_init() 1844 domain->arch.ptables = page_pool_get(); in arch_mem_domain_init() 1845 if (domain->arch.ptables == NULL) { in arch_mem_domain_init() [all …]
|
| /arch/arm/core/mmu/ |
| A D | arm_mmu_priv.h | 93 uint32_t domain : 4; member 109 uint32_t domain : 4; member 199 uint32_t domain : 4; member
|
| A D | arm_mmu.c | 297 perms_attrs.domain = ARM_MMU_DOMAIN_DEVICE; in arm_mmu_convert_attr_flags() 305 perms_attrs.domain = ARM_MMU_DOMAIN_DEVICE; in arm_mmu_convert_attr_flags() 323 perms_attrs.domain = ARM_MMU_DOMAIN_OS; in arm_mmu_convert_attr_flags() 424 l1_page_table.entries[l1_index].l1_section_1m.domain = perms_attrs.domain; in arm_mmu_l1_map_section() 486 perms_attrs.domain = l1_page_table.entries[l1_index].l1_section_1m.domain; in arm_mmu_remap_l1_section_to_l2_table() 522 l1_page_table.entries[l1_index].l2_page_table_ref.domain = perms_attrs.domain; in arm_mmu_remap_l1_section_to_l2_table() 606 l1_page_table.entries[l1_index].l2_page_table_ref.domain = 0; /* TODO */ in arm_mmu_l2_map_page()
|
| /arch/arm64/core/ |
| A D | mmu.c | 1026 struct arch_mem_domain *domain; in sync_domains() local 1033 domain = CONTAINER_OF(node, struct arch_mem_domain, node); in sync_domains() 1034 domain_ptables = &domain->ptables; in sync_domains() 1204 int arch_mem_domain_init(struct k_mem_domain *domain) in arch_mem_domain_init() argument 1234 sys_slist_append(&domain_list, &domain->arch.node); in arch_mem_domain_init() 1296 struct k_mem_domain *domain; in arch_mem_domain_thread_add() local 1300 domain = thread->mem_domain_info.mem_domain; in arch_mem_domain_thread_add() 1301 domain_ptables = &domain->arch.ptables; in arch_mem_domain_thread_add() 1332 struct k_mem_domain *domain; in arch_mem_domain_thread_remove() local 1334 domain = thread->mem_domain_info.mem_domain; in arch_mem_domain_thread_remove() [all …]
|
| /arch/arc/core/mpu/ |
| A D | arc_mpu_common_internal.h | 188 void arc_core_mpu_remove_mem_partition(struct k_mem_domain *domain, uint32_t part_id) in arc_core_mpu_remove_mem_partition() argument 190 ARG_UNUSED(domain); in arc_core_mpu_remove_mem_partition()
|
| A D | arc_mpu_v4_internal.h | 745 void arc_core_mpu_remove_mem_partition(struct k_mem_domain *domain, in arc_core_mpu_remove_mem_partition() argument 748 struct k_mem_partition *partition = &domain->partitions[partition_id]; in arc_core_mpu_remove_mem_partition()
|
| /arch/arm64/core/cortex_r/ |
| A D | arm_mpu.c | 758 static int configure_domain_partitions(struct k_mem_domain *domain) in configure_domain_partitions() argument 763 SYS_DLIST_FOR_EACH_CONTAINER(&domain->mem_domain_q, thread, in configure_domain_partitions() 778 int arch_mem_domain_partition_add(struct k_mem_domain *domain, uint32_t partition_id) in arch_mem_domain_partition_add() argument 782 return configure_domain_partitions(domain); in arch_mem_domain_partition_add() 785 int arch_mem_domain_partition_remove(struct k_mem_domain *domain, uint32_t partition_id) in arch_mem_domain_partition_remove() argument 789 return configure_domain_partitions(domain); in arch_mem_domain_partition_remove()
|
| /arch/arm64/core/xen/ |
| A D | Kconfig | 20 Built binary will be used as Xen privileged domain (Domain 0).
|
| /arch/xtensa/include/ |
| A D | xtensa_mmu_priv.h | 544 void xtensa_mmu_set_paging(struct arch_mem_domain *domain); 551 void xtensa_mmu_compute_domain_regs(struct arch_mem_domain *domain);
|
| /arch/xtensa/ |
| A D | Kconfig | 302 the region being removed from a memory domain.
|
| /arch/arm/core/ |
| A D | Kconfig | 275 FPU usage or security domain.
|
| /arch/ |
| A D | Kconfig | 272 (Normal) domain. 283 interrupts etc.) belonging to the Secure domain. 952 regions available for application memory domain programming.
|
| /arch/x86/ |
| A D | Kconfig | 349 default memory domain. Instantiation of additional memory domains
|