| /libcpu/risc-v/t-head/c908/ |
| A D | riscv_mmu.h | 73 #define PTE_WRAP(attr) (attr | PTE_A | PTE_D) argument 97 #define MMU_MAP_TRACE(attr) (attr) argument 136 attr &= ~PTE_W; in rt_hw_mmu_attr_rm_perm() 140 attr &= ~PTE_W; in rt_hw_mmu_attr_rm_perm() 145 return attr; in rt_hw_mmu_attr_rm_perm() 161 attr |= (PTE_R | PTE_W | PTE_U); in rt_hw_mmu_attr_add_perm() 166 return attr; in rt_hw_mmu_attr_add_perm() 183 rc = ((attr & PTE_W) && (attr & PTE_R)); in rt_hw_mmu_attr_test_perm() 186 rc = !!(attr & PTE_R); in rt_hw_mmu_attr_test_perm() 189 rc = !!(attr & PTE_X); in rt_hw_mmu_attr_test_perm() [all …]
|
| /libcpu/risc-v/t-head/c906/ |
| A D | riscv_mmu.h | 77 #define PTE_WRAP(attr) (attr | PTE_A | PTE_D) argument 102 #define MMU_MAP_TRACE(attr) (attr) argument 143 attr &= ~PTE_W; in rt_hw_mmu_attr_rm_perm() 147 attr &= ~PTE_W; in rt_hw_mmu_attr_rm_perm() 152 return attr; in rt_hw_mmu_attr_rm_perm() 168 attr |= (PTE_R | PTE_W | PTE_U); in rt_hw_mmu_attr_add_perm() 173 return attr; in rt_hw_mmu_attr_add_perm() 190 rc = ((attr & PTE_W) && (attr & PTE_R)); in rt_hw_mmu_attr_test_perm() 193 rc = !!(attr & PTE_R); in rt_hw_mmu_attr_test_perm() 196 rc = !!(attr & PTE_X); in rt_hw_mmu_attr_test_perm() [all …]
|
| /libcpu/arm/cortex-a/ |
| A D | mmu.h | 55 rt_uint32_t attr; member 79 #define MMU_MAP_TRACE(attr) (attr) argument 148 if ((attr & AP_APX_MASK) == AP_APX_URW_KRW) in rt_hw_mmu_attr_rm_perm() 149 attr &= ~MMU_MAP_MTBL_AP01(0x1); in rt_hw_mmu_attr_rm_perm() 152 switch (attr & AP_APX_MASK) in rt_hw_mmu_attr_rm_perm() 157 attr = (attr & AP_APX_MASK) | AP_APX_URO_KRO; in rt_hw_mmu_attr_rm_perm() 159 attr |= MMU_MAP_MTBL_AP2(0x1); in rt_hw_mmu_attr_rm_perm() 166 return attr; in rt_hw_mmu_attr_rm_perm() 183 attr |= MMU_MAP_MTBL_AP01(0x3); in rt_hw_mmu_attr_add_perm() 184 attr &= ~MMU_MAP_MTBL_AP2(0x1); in rt_hw_mmu_attr_add_perm() [all …]
|
| A D | mmu.c | 48 rt_uint32_t paddrStart, rt_uint32_t attr) in rt_hw_mmu_setmtt() argument 56 *pTT = attr | (((paddrStart >> 20) + i) << 20); in rt_hw_mmu_setmtt() 86 rt_uint32_t size,rt_uint32_t pa, rt_uint32_t attr) in _init_map_section() argument 94 *ptt = attr | (((pa >> ARCH_SECTION_SHIFT) + i) << ARCH_SECTION_SHIFT); in _init_map_section() 142 mdesc->attr, MMF_MAP_FIXED, &rt_mm_dummy_mapper, 0); in rt_hw_init_mmu_table() 145 mdesc->paddr_start, mdesc->attr); in rt_hw_init_mmu_table() 312 size_t attr) in _kenrel_map_4K() argument 348 *(mmu_l2 + l2_off) = (loop_pa | attr); in _kenrel_map_4K() 359 size_t attr) in rt_hw_mmu_map() argument 368 ret = _kenrel_map_4K(aspace->page_table, v_addr, p_addr, attr); in rt_hw_mmu_map()
|
| /libcpu/risc-v/virt64/ |
| A D | riscv_mmu.h | 117 rt_inline size_t rt_hw_mmu_attr_rm_perm(size_t attr, rt_base_t prot) in rt_hw_mmu_attr_rm_perm() argument 123 attr &= ~PTE_W; in rt_hw_mmu_attr_rm_perm() 127 attr &= ~PTE_W; in rt_hw_mmu_attr_rm_perm() 132 return attr; in rt_hw_mmu_attr_rm_perm() 142 rt_inline size_t rt_hw_mmu_attr_add_perm(size_t attr, rt_base_t prot) in rt_hw_mmu_attr_add_perm() argument 148 attr |= (PTE_R | PTE_W | PTE_U); in rt_hw_mmu_attr_add_perm() 153 return attr; in rt_hw_mmu_attr_add_perm() 170 rc = ((attr & PTE_W) && (attr & PTE_R)); in rt_hw_mmu_attr_test_perm() 173 rc = !!(attr & PTE_R); in rt_hw_mmu_attr_test_perm() 176 rc = !!(attr & PTE_X); in rt_hw_mmu_attr_test_perm() [all …]
|
| /libcpu/aarch64/common/include/ |
| A D | mmu.h | 31 unsigned long attr; member 76 #define MMU_MAP_TRACE(attr) ((attr) & ~(MMU_ATTR_AF | MMU_ATTR_DBM)) argument 114 size_t size, size_t attr); 150 rt_inline size_t rt_hw_mmu_attr_add_perm(size_t attr, rt_base_t prot) in rt_hw_mmu_attr_add_perm() argument 156 attr = (attr & ~MMU_AP_MASK) | (MMU_AP_KAUA << MMU_AP_SHIFT); in rt_hw_mmu_attr_add_perm() 161 return attr; in rt_hw_mmu_attr_add_perm() 171 rt_inline size_t rt_hw_mmu_attr_rm_perm(size_t attr, rt_base_t prot) in rt_hw_mmu_attr_rm_perm() argument 177 if (attr & 0x40) in rt_hw_mmu_attr_rm_perm() 178 attr |= 0x80; in rt_hw_mmu_attr_rm_perm() 183 return attr; in rt_hw_mmu_attr_rm_perm() [all …]
|
| /libcpu/arm/cortex-m33/ |
| A D | mpu.c | 39 rt_uint8_t attr = 0U; in rt_hw_mpu_region_default_attr() local 48 return attr; in rt_hw_mpu_region_default_attr() 57 region->attr.rbar = (rt_uint32_t)region->start | (region->attr.rbar & (~MPU_RBAR_BASE_Msk)); in _mpu_rbar_rlar() 59 if (region->attr.mair_attr == RT_ARM_DEFAULT_MAIR_ATTR) in _mpu_rbar_rlar() 65 mair_attr = (rt_uint8_t)region->attr.mair_attr; in _mpu_rbar_rlar() 99 region->attr.rlar = rlar; in _mpu_rbar_rlar() 171 ARM_MPU_SetRegion(index, static_regions[index].attr.rbar, static_regions[index].attr.rlar); in rt_hw_mpu_init() 210 ARM_MPU_SetRegion(index, region->attr.rbar, region->attr.rlar); in rt_hw_mpu_add_region() 262 ARM_MPU_SetRegion(index, region->attr.rbar, region->attr.rlar); in rt_hw_mpu_update_region() 285 …x, ((rt_mem_region_t *)thread->mem_regions)[i].attr.rbar, ((rt_mem_region_t *)thread->mem_regions)… in rt_hw_mpu_table_switch() [all …]
|
| A D | cpuport.c | 249 stack_top_region.attr = RT_MEM_REGION_P_RO_U_NA; in rt_hw_stack_guard_init() 252 stack_bottom_region.attr = RT_MEM_REGION_P_RO_U_NA; in rt_hw_stack_guard_init()
|
| /libcpu/arm/cortex-m7/ |
| A D | mpu.c | 38 rt_uint32_t attr = 0U; in rt_hw_mpu_region_default_attr() local 45 attr = default_mem_attr[((rt_uint32_t)region->start & ~0xFFFFFFFU) >> 29U]; in rt_hw_mpu_region_default_attr() 47 return attr; in rt_hw_mpu_region_default_attr() 53 if ((region->attr.rasr & RESERVED) == RESERVED) in _mpu_rasr() 56 rasr |= region->attr.rasr & (MPU_RASR_XN_Msk | MPU_RASR_AP_Msk); in _mpu_rasr() 60 rasr |= region->attr.rasr & MPU_RASR_ATTRS_Msk; in _mpu_rasr() 125 static_regions[index].attr.rasr = _mpu_rasr(&(static_regions[index])); in rt_hw_mpu_init() 142 region->attr.rasr = _mpu_rasr(region); in rt_hw_mpu_add_region() 159 ARM_MPU_SetRegion(ARM_MPU_RBAR(index, (rt_uint32_t)region->start), region->attr.rasr); in rt_hw_mpu_add_region() 193 region->attr.rasr = _mpu_rasr(region); in rt_hw_mpu_update_region() [all …]
|
| A D | cpuport.c | 191 stack_top_region.attr = RT_MEM_REGION_P_NA_U_NA; in rt_hw_stack_guard_init() 194 stack_bottom_region.attr = RT_MEM_REGION_P_NA_U_NA; in rt_hw_stack_guard_init()
|
| /libcpu/risc-v/common64/ |
| A D | mmu.h | 34 rt_size_t attr; member 51 #define COMBINEPTE(paddr, attr) \ argument 52 ((((paddr) >> PAGE_OFFSET_BIT) << PTE_PPN_SHIFT) | (attr)) 64 size_t attr);
|
| A D | mmu.c | 93 size_t attr) in _map_one_page() argument 156 *(mmu_l3 + l3_off) = COMBINEPTE((rt_ubase_t)pa, attr); in _map_one_page() 183 size_t size, size_t attr) in rt_hw_mmu_map() argument 193 ret = _map_one_page(aspace, v_addr, p_addr, attr); in rt_hw_mmu_map() 612 size_t attr; in rt_hw_mmu_setup() local 613 switch (mdesc->attr) in rt_hw_mmu_setup() 616 attr = MMU_MAP_K_RWCB; in rt_hw_mmu_setup() 619 attr = MMU_MAP_K_RWCB; in rt_hw_mmu_setup() 622 attr = MMU_MAP_K_DEVICE; in rt_hw_mmu_setup() 625 attr = MMU_MAP_K_DEVICE; in rt_hw_mmu_setup() [all …]
|
| /libcpu/aarch64/common/ |
| A D | mmu.c | 191 attr &= MMU_ATTRIB_MASK; in _kernel_map_4K() 192 pa |= (attr | MMU_TYPE_PAGE); /* page */ in _kernel_map_4K() 260 attr &= MMU_ATTRIB_MASK; in _kernel_map_2M() 273 size_t attr) in rt_hw_mmu_map() argument 472 size_t attr; in rt_hw_mmu_setup() local 473 switch (mdesc->attr) in rt_hw_mmu_setup() 476 attr = MMU_MAP_K_RWCB; in rt_hw_mmu_setup() 479 attr = MMU_MAP_K_RW; in rt_hw_mmu_setup() 482 attr = MMU_MAP_K_DEVICE; in rt_hw_mmu_setup() 485 attr = MMU_MAP_K_DEVICE; in rt_hw_mmu_setup() [all …]
|
| A D | setup.c | 264 platform_mem_desc.attr = NORMAL_MEM; in rt_hw_common_setup()
|
| /libcpu/mips/gs264/ |
| A D | mmu.c | 148 rt_uint32_t attr) in rt_hw_mmu_setmtt() argument 156 *pTT = attr | (((paddrStart >> 20) + i) << 20); in rt_hw_mmu_setmtt() 178 mdesc->paddr_start, mdesc->attr); in rt_hw_init_mmu_table() 467 *(mmu_l2 + l2_off) = (loop_pa | attr); in __rt_hw_mmu_map() 519 ret = __rt_hw_mmu_map(mmu_info, (void*)vaddr, p_addr, pages, attr); in _rt_hw_mmu_map() 529 void *_rt_hw_mmu_map(rt_mmu_info *mmu_info, void* p_addr, size_t size, size_t attr) in _rt_hw_mmu_map() argument 543 ret = __rt_hw_mmu_map(mmu_info, (void*)vaddr, p_addr, pages, attr); in _rt_hw_mmu_map() 605 *(mmu_l2 + l2_off) = (loop_pa | attr); in __rt_hw_mmu_map_auto() 660 ret = __rt_hw_mmu_map_auto(mmu_info, (void*)vaddr, pages, attr); in _rt_hw_mmu_map_auto() 715 ret = _rt_hw_mmu_map(mmu_info, v_addr, p_addr, size, attr); in rt_hw_mmu_map() [all …]
|
| A D | mmu.h | 54 rt_uint32_t attr; member 102 void *rt_hw_mmu_map(rt_mmu_info *mmu_info, void *v_addr, void* p_addr, size_t size, size_t attr); 103 void *rt_hw_mmu_map_auto(rt_mmu_info *mmu_info, void *v_addr, size_t size, size_t attr); 105 void *rt_hw_mmu_map(rt_mmu_info *mmu_info, void* p_addr, size_t size, size_t attr);
|
| /libcpu/arm/am335x/ |
| A D | mmu.c | 145 … mmu_setmtt(rt_uint32_t vaddrStart, rt_uint32_t vaddrEnd, rt_uint32_t paddrStart, rt_uint32_t attr) in mmu_setmtt() argument 153 *pTT = attr |(((paddrStart>>20)+i)<<20); in mmu_setmtt()
|
| /libcpu/arm/realview-a8-vmm/ |
| A D | mmu.c | 158 rt_uint32_t attr) in rt_hw_mmu_setmtt() argument 166 *pTT = attr | (((paddrStart >> 20) + i) << 20); in rt_hw_mmu_setmtt()
|
| /libcpu/arm/arm926/ |
| A D | mmu.h | 45 rt_uint32_t attr; member
|
| A D | mmu.c | 399 rt_uint32_t paddrStart, rt_uint32_t attr) in mmu_setmtt() argument 408 *pTT = attr | (((paddrStart >> 20) + i) << 20); in mmu_setmtt() 425 mdesc->paddr_start, mdesc->attr); in rt_hw_mmu_init()
|
| /libcpu/sim/posix/ |
| A D | cpu_port.c | 186 pthread_attr_t attr; in thread_create() local 198 pthread_attr_init(&attr); in thread_create() 199 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED); in thread_create() 202 res = pthread_create(&thread->pthread, &attr, &thread_run, (void *)thread); in thread_create()
|
| /libcpu/arm/s3c24x0/ |
| A D | mmu.c | 315 void mmu_setmtt(int vaddrStart,int vaddrEnd,int paddrStart,int attr) in mmu_setmtt() argument 323 *pTT = attr |(((paddrStart>>20)+i)<<20); in mmu_setmtt()
|