1 /*
2  * Copyright (c) 2008-2014 Travis Geiselbrecht
3  * Copyright (c) 2012, NVIDIA CORPORATION. All rights reserved
4  *
5  * Use of this source code is governed by a MIT-style
6  * license that can be found in the LICENSE file or at
7  * https://opensource.org/licenses/MIT
8  */
9 #ifndef __ARCH_ARM_MMU_H
10 #define __ARCH_ARM_MMU_H
11 
12 #define KB                (1024UL)
13 #define MB                (1024UL*1024UL)
14 #define GB                (1024UL*1024UL*1024UL)
15 
16 #define SECTION_SIZE      MB
17 #define SUPERSECTION_SIZE (16 * MB)
18 #define TT_ENTRY_COUNT    (4096)
19 
20 #if defined(ARM_ISA_ARMV6) | defined(ARM_ISA_ARMV7)
21 
22 #define MMU_MEMORY_L1_DESCRIPTOR_INVALID                 (0x0 << 0)
23 #define MMU_MEMORY_L1_DESCRIPTOR_PAGE_TABLE              (0x1 << 0)
24 #define MMU_MEMORY_L1_DESCRIPTOR_SECTION                 (0x2 << 0)
25 #define MMU_MEMORY_L1_DESCRIPTOR_SUPERSECTION            ((0x2 << 0) | (0x1 << 18))
26 #define MMU_MEMORY_L1_DESCRIPTOR_MASK                    (0x3 << 0)
27 
28 #define MMU_MEMORY_L2_DESCRIPTOR_INVALID                 (0x0 << 0)
29 #define MMU_MEMORY_L2_DESCRIPTOR_LARGE_PAGE              (0x1 << 0)
30 #define MMU_MEMORY_L2_DESCRIPTOR_SMALL_PAGE              (0x2 << 0)
31 #define MMU_MEMORY_L2_DESCRIPTOR_SMALL_PAGE_XN           (0x3 << 0)
32 #define MMU_MEMORY_L2_DESCRIPTOR_MASK                    (0x3 << 0)
33 
34 /* C, B and TEX[2:0] encodings without TEX remap (for first level descriptors) */
35 /* TEX      |    CB    */
36 #define MMU_MEMORY_L1_TYPE_STRONGLY_ORDERED              ((0x0 << 12) | (0x0 << 2))
37 #define MMU_MEMORY_L1_TYPE_DEVICE_SHARED                 ((0x0 << 12) | (0x1 << 2))
38 #define MMU_MEMORY_L1_TYPE_DEVICE_NON_SHARED             ((0x2 << 12) | (0x0 << 2))
39 #define MMU_MEMORY_L1_TYPE_NORMAL                        ((0x1 << 12) | (0x0 << 2))
40 #define MMU_MEMORY_L1_TYPE_NORMAL_WRITE_THROUGH          ((0x0 << 12) | (0x2 << 2))
41 #define MMU_MEMORY_L1_TYPE_NORMAL_WRITE_BACK_NO_ALLOCATE ((0x0 << 12) | (0x3 << 2))
42 #define MMU_MEMORY_L1_TYPE_NORMAL_WRITE_BACK_ALLOCATE    ((0x1 << 12) | (0x3 << 2))
43 #define MMU_MEMORY_L1_TYPE_MASK                          ((0x7 << 12) | (0x3 << 2))
44 
45 #define MMU_MEMORY_L1_TYPE_INNER_WRITE_BACK_ALLOCATE     ((0x4 << 12) | (0x1 << 2))
46 
47 /* C, B and TEX[2:0] encodings without TEX remap (for second level descriptors) */
48 /* TEX     |    CB    */
49 #define MMU_MEMORY_L2_TYPE_STRONGLY_ORDERED              ((0x0 << 6) | (0x0 << 2))
50 #define MMU_MEMORY_L2_TYPE_DEVICE_SHARED                 ((0x0 << 6) | (0x1 << 2))
51 #define MMU_MEMORY_L2_TYPE_DEVICE_NON_SHARED             ((0x2 << 6) | (0x0 << 2))
52 #define MMU_MEMORY_L2_TYPE_NORMAL                        ((0x1 << 6) | (0x0 << 2))
53 #define MMU_MEMORY_L2_TYPE_NORMAL_WRITE_THROUGH          ((0x0 << 6) | (0x2 << 2))
54 #define MMU_MEMORY_L2_TYPE_NORMAL_WRITE_BACK_NO_ALLOCATE ((0x0 << 6) | (0x3 << 2))
55 #define MMU_MEMORY_L2_TYPE_NORMAL_WRITE_BACK_ALLOCATE    ((0x1 << 6) | (0x3 << 2))
56 #define MMU_MEMORY_L2_TYPE_MASK                          ((0x7 << 6) | (0x3 << 2))
57 
58 #define MMU_MEMORY_DOMAIN_MEM                            (0)
59 
60 /*
61  * AP (Access Permissions)
62  * +-------------------------+
63  * | AP        P         U   |
64  * +-------------------------+
65  * |                         |
66  * | 000      NA        NA   |
67  * |                         |
68  * | 001      RW        NA   |
69  * |                         |
70  * | 010      RW        R    |
71  * |                         |
72  * | 011      RW        RW   |
73  * |                         |
74  * | 101      R         NA   |
75  * |                         |
76  * | 111      R         R    |
77  * |                         |
78  * +-------------------------+
79  *
80  * NA = No Access
81  * RW = Read/Write
82  * R  = Read only
83  *
84  * P = Privileged modes
85  * U = ~P
86  *
87  */
88 #define MMU_MEMORY_L1_AP_P_NA_U_NA          ((0x0 << 15) | (0x0 << 10))
89 #define MMU_MEMORY_L1_AP_P_RW_U_RO          ((0x0 << 15) | (0x2 << 10)) /* Obsolete */
90 #define MMU_MEMORY_L1_AP_P_RW_U_RW          ((0x0 << 15) | (0x3 << 10))
91 #define MMU_MEMORY_L1_AP_P_RW_U_NA          ((0x0 << 15) | (0x1 << 10))
92 #define MMU_MEMORY_L1_AP_P_RO_U_RO          ((0x1 << 15) | (0x3 << 10))
93 #define MMU_MEMORY_L1_AP_P_RO_U_NA          ((0x1 << 15) | (0x1 << 10))
94 #define MMU_MEMORY_L1_AP_MASK               ((0x1 << 15) | (0x3 << 10))
95 
96 #define MMU_MEMORY_L2_AP_P_NA_U_NA          ((0x0 << 9) | (0x0 << 4))
97 #define MMU_MEMORY_L2_AP_P_RW_U_RO          ((0x0 << 9) | (0x2 << 4)) /* Obsolete */
98 #define MMU_MEMORY_L2_AP_P_RW_U_RW          ((0x0 << 9) | (0x3 << 4))
99 #define MMU_MEMORY_L2_AP_P_RW_U_NA          ((0x0 << 9) | (0x1 << 4))
100 #define MMU_MEMORY_L2_AP_P_RO_U_RO          ((0x1 << 9) | (0x3 << 4))
101 #define MMU_MEMORY_L2_AP_P_RO_U_NA          ((0x1 << 9) | (0x1 << 4))
102 #define MMU_MEMORY_L2_AP_MASK               ((0x1 << 9) | (0x3 << 4))
103 
104 #define MMU_MEMORY_L1_PAGETABLE_NON_SECURE  (1 << 3)
105 
106 #define MMU_MEMORY_L1_SECTION_NON_SECURE    (1 << 19)
107 #define MMU_MEMORY_L1_SECTION_SHAREABLE     (1 << 16)
108 #define MMU_MEMORY_L1_SECTION_NON_GLOBAL    (1 << 17)
109 #define MMU_MEMORY_L1_SECTION_XN            (1 << 4)
110 
111 #define MMU_MEMORY_L1_CB_SHIFT              2
112 #define MMU_MEMORY_L1_TEX_SHIFT            12
113 
114 #define MMU_MEMORY_SET_L1_INNER(val)        (((val) & 0x3) << MMU_MEMORY_L1_CB_SHIFT)
115 #define MMU_MEMORY_SET_L1_OUTER(val)        (((val) & 0x3) << MMU_MEMORY_L1_TEX_SHIFT)
116 #define MMU_MEMORY_SET_L1_CACHEABLE_MEM     (0x4 << MMU_MEMORY_L1_TEX_SHIFT)
117 
118 #define MMU_MEMORY_L2_SHAREABLE             (1 << 10)
119 #define MMU_MEMORY_L2_NON_GLOBAL            (1 << 11)
120 
121 #define MMU_MEMORY_L2_CB_SHIFT              2
122 #define MMU_MEMORY_L2_TEX_SHIFT             6
123 
124 #define MMU_MEMORY_NON_CACHEABLE            0
125 #define MMU_MEMORY_WRITE_BACK_ALLOCATE      1
126 #define MMU_MEMORY_WRITE_THROUGH_NO_ALLOCATE 2
127 #define MMU_MEMORY_WRITE_BACK_NO_ALLOCATE   3
128 
129 #define MMU_MEMORY_SET_L2_INNER(val)        (((val) & 0x3) << MMU_MEMORY_L2_CB_SHIFT)
130 #define MMU_MEMORY_SET_L2_OUTER(val)        (((val) & 0x3) << MMU_MEMORY_L2_TEX_SHIFT)
131 #define MMU_MEMORY_SET_L2_CACHEABLE_MEM     (0x4 << MMU_MEMORY_L2_TEX_SHIFT)
132 
133 #define MMU_MEMORY_L1_SECTION_ADDR(x)       ((x) & ~((1<<20)-1))
134 #define MMU_MEMORY_L1_PAGE_TABLE_ADDR(x)    ((x) & ~((1<<10)-1))
135 
136 #define MMU_MEMORY_L2_SMALL_PAGE_ADDR(x)    ((x) & ~((1<<12)-1))
137 #define MMU_MEMORY_L2_LARGE_PAGE_ADDR(x)    ((x) & ~((1<<16)-1))
138 
139 #define MMU_MEMORY_TTBR_RGN(x)              (((x) & 0x3) << 3)
140 /* IRGN[1:0] is encoded as: IRGN[0] in TTBRx[6], and IRGN[1] in TTBRx[0] */
141 #define MMU_MEMORY_TTBR_IRGN(x)             ((((x) & 0x1) << 6) | \
142                                             ((((x) >> 1) & 0x1) << 0))
143 #define MMU_MEMORY_TTBR_S                   (1 << 1)
144 #define MMU_MEMORY_TTBR_NOS                 (1 << 5)
145 
146 /* Default configuration for main kernel page table:
147  *    - section mappings for memory
148  *    - do cached translation walks
149  */
150 
151 /* Enable cached page table walks:
152  * inner/outer (IRGN/RGN): write-back + write-allocate
153  * (select inner sharable on smp)
154  */
155 #if WITH_SMP
156 #define MMU_TTBRx_SHARABLE_FLAGS (MMU_MEMORY_TTBR_S | MMU_MEMORY_TTBR_NOS)
157 #else
158 #define MMU_TTBRx_SHARABLE_FLAGS (0)
159 #endif
160 #define MMU_TTBRx_FLAGS \
161     (MMU_MEMORY_TTBR_RGN(MMU_MEMORY_WRITE_BACK_ALLOCATE) |\
162      MMU_MEMORY_TTBR_IRGN(MMU_MEMORY_WRITE_BACK_ALLOCATE) | \
163      MMU_TTBRx_SHARABLE_FLAGS)
164 
165 /* Section mapping, TEX[2:0]=001, CB=11, S=1, AP[2:0]=001 */
166 #if WITH_SMP
167 #define MMU_KERNEL_L1_PTE_FLAGS \
168     (MMU_MEMORY_L1_DESCRIPTOR_SECTION | \
169      MMU_MEMORY_L1_TYPE_NORMAL_WRITE_BACK_ALLOCATE | \
170      MMU_MEMORY_L1_AP_P_RW_U_NA | \
171      MMU_MEMORY_L1_SECTION_SHAREABLE)
172 #else
173 #define MMU_KERNEL_L1_PTE_FLAGS \
174     (MMU_MEMORY_L1_DESCRIPTOR_SECTION | \
175      MMU_MEMORY_L1_TYPE_NORMAL_WRITE_BACK_ALLOCATE | \
176      MMU_MEMORY_L1_AP_P_RW_U_NA)
177 #endif
178 
179 #define MMU_INITIAL_MAP_STRONGLY_ORDERED \
180     (MMU_MEMORY_L1_DESCRIPTOR_SECTION | \
181     MMU_MEMORY_L1_TYPE_STRONGLY_ORDERED | \
182     MMU_MEMORY_L1_AP_P_RW_U_NA)
183 
184 #define MMU_INITIAL_MAP_DEVICE \
185     (MMU_MEMORY_L1_DESCRIPTOR_SECTION | \
186     MMU_MEMORY_L1_TYPE_DEVICE_SHARED | \
187     MMU_MEMORY_L1_AP_P_RW_U_NA)
188 
189 #endif // armv6 | armv7
190 
191 #ifndef ASSEMBLY
192 
193 #include <sys/types.h>
194 #include <assert.h>
195 #include <lk/compiler.h>
196 #include <arch/arm.h>
197 
198 __BEGIN_CDECLS
199 
200 void arm_mmu_early_init(void);
201 void arm_mmu_init(void);
202 status_t arm_vtop(addr_t va, addr_t *pa);
203 
204 /* tlb routines */
205 
arm_after_invalidate_tlb_barrier(void)206 static inline void arm_after_invalidate_tlb_barrier(void) {
207 #if WITH_SMP
208     arm_write_bpiallis(0);
209 #else
210     arm_write_bpiall(0);
211 #endif
212     DSB;
213     ISB;
214 }
215 
arm_invalidate_tlb_global_no_barrier(void)216 static inline void arm_invalidate_tlb_global_no_barrier(void) {
217 #if WITH_SMP
218     arm_write_tlbiallis(0);
219 #else
220     arm_write_tlbiall(0);
221 #endif
222 }
223 
arm_invalidate_tlb_global(void)224 static inline void arm_invalidate_tlb_global(void) {
225     DSB;
226     arm_invalidate_tlb_global_no_barrier();
227     arm_after_invalidate_tlb_barrier();
228 }
229 
arm_invalidate_tlb_mva_no_barrier(vaddr_t va)230 static inline void arm_invalidate_tlb_mva_no_barrier(vaddr_t va) {
231 #if WITH_SMP
232     arm_write_tlbimvaais(va & 0xfffff000);
233 #else
234     arm_write_tlbimvaa(va & 0xfffff000);
235 #endif
236 }
237 
arm_invalidate_tlb_mva(vaddr_t va)238 static inline void arm_invalidate_tlb_mva(vaddr_t va) {
239     DSB;
240     arm_invalidate_tlb_mva_no_barrier(va);
241     arm_after_invalidate_tlb_barrier();
242 }
243 
244 
arm_invalidate_tlb_asid_no_barrier(uint8_t asid)245 static inline void arm_invalidate_tlb_asid_no_barrier(uint8_t asid) {
246 #if WITH_SMP
247     arm_write_tlbiasidis(asid);
248 #else
249     arm_write_tlbiasid(asid);
250 #endif
251 }
252 
arm_invalidate_tlb_asid(uint8_t asid)253 static inline void arm_invalidate_tlb_asid(uint8_t asid) {
254     DSB;
255     arm_invalidate_tlb_asid_no_barrier(asid);
256     arm_after_invalidate_tlb_barrier();
257 }
258 
arm_invalidate_tlb_mva_asid_no_barrier(vaddr_t va,uint8_t asid)259 static inline void arm_invalidate_tlb_mva_asid_no_barrier(vaddr_t va, uint8_t asid) {
260 #if WITH_SMP
261     arm_write_tlbimvais((va & 0xfffff000) | asid);
262 #else
263     arm_write_tlbimva((va & 0xfffff000) | asid);
264 #endif
265 }
266 
arm_invalidate_tlb_mva_asid(vaddr_t va,uint8_t asid)267 static inline void arm_invalidate_tlb_mva_asid(vaddr_t va, uint8_t asid) {
268     DSB;
269     arm_invalidate_tlb_mva_asid_no_barrier(va, asid);
270     arm_after_invalidate_tlb_barrier();
271 }
272 
273 __END_CDECLS
274 
275 #endif /* ASSEMBLY */
276 
277 #endif
278