1 /*
2 * Copyright (c) 2006 - 2021, RT-Thread Development Team
3 * Copyright (c) 2014 - 2020 Xilinx, Inc. All rights reserved.
4 * Copyright (c) 2021 WangHuachen. All rights reserved.
5 * SPDX-License-Identifier: MIT
6 *
7 * Change Logs:
8 * Date Author Notes
9 * 2020-03-19 WangHuachen first version
10 * 2021-05-10 WangHuachen add more functions
11 */
12
13 #include <stdint.h>
14 #include <rthw.h>
15 #include <rtdef.h>
16
17 #include "xpseudo_asm_gcc.h"
18 #include "xreg_cortexr5.h"
19
20 #define IRQ_FIQ_MASK 0xC0 /* Mask IRQ and FIQ interrupts in cpsr */
21
22 typedef intptr_t INTPTR;
23 typedef rt_uint32_t u32;
24
25 #if defined (__GNUC__)
26 #define asm_inval_dc_line_mva_poc(param) __asm__ __volatile__("mcr " \
27 XREG_CP15_INVAL_DC_LINE_MVA_POC :: "r" (param))
28
29 #define asm_clean_inval_dc_line_sw(param) __asm__ __volatile__("mcr " \
30 XREG_CP15_CLEAN_INVAL_DC_LINE_SW :: "r" (param))
31
32 #define asm_clean_inval_dc_line_mva_poc(param) __asm__ __volatile__("mcr " \
33 XREG_CP15_CLEAN_INVAL_DC_LINE_MVA_POC :: "r" (param))
34
35 #define asm_inval_ic_line_mva_pou(param) __asm__ __volatile__("mcr " \
36 XREG_CP15_INVAL_IC_LINE_MVA_POU :: "r" (param))
37 #elif defined (__ICCARM__)
38 #define asm_inval_dc_line_mva_poc(param) __asm volatile("mcr " \
39 XREG_CP15_INVAL_DC_LINE_MVA_POC :: "r" (param))
40
41 #define asm_clean_inval_dc_line_sw(param) __asm volatile("mcr " \
42 XREG_CP15_CLEAN_INVAL_DC_LINE_SW :: "r" (param))
43
44 #define asm_clean_inval_dc_line_mva_poc(param) __asm volatile("mcr " \
45 XREG_CP15_CLEAN_INVAL_DC_LINE_MVA_POC :: "r" (param))
46
47 #define asm_inval_ic_line_mva_pou(param) __asm volatile("mcr " \
48 XREG_CP15_INVAL_IC_LINE_MVA_POU :: "r" (param))
49 #endif
50
51 void Xil_DCacheEnable(void);
52 void Xil_DCacheDisable(void);
53 void Xil_DCacheInvalidate(void);
54 void Xil_DCacheInvalidateRange(INTPTR adr, u32 len);
55 void Xil_DCacheFlush(void);
56 void Xil_DCacheFlushRange(INTPTR adr, u32 len);
57 void Xil_DCacheInvalidateLine(INTPTR adr);
58 void Xil_DCacheFlushLine(INTPTR adr);
59 void Xil_DCacheStoreLine(INTPTR adr);
60 void Xil_ICacheEnable(void);
61 void Xil_ICacheDisable(void);
62 void Xil_ICacheInvalidate(void);
63 void Xil_ICacheInvalidateRange(INTPTR adr, u32 len);
64 void Xil_ICacheInvalidateLine(INTPTR adr);
65
Xil_DCacheEnable(void)66 void Xil_DCacheEnable(void)
67 {
68 register u32 CtrlReg;
69
70 /* enable caches only if they are disabled */
71 #if defined (__GNUC__)
72 CtrlReg = mfcp(XREG_CP15_SYS_CONTROL);
73 #elif defined (__ICCARM__)
74 mfcp(XREG_CP15_SYS_CONTROL, CtrlReg);
75 #endif
76 if ((CtrlReg & XREG_CP15_CONTROL_C_BIT) == 0x00000000U)
77 {
78 /* invalidate the Data cache */
79 Xil_DCacheInvalidate();
80
81 /* enable the Data cache */
82 CtrlReg |= (XREG_CP15_CONTROL_C_BIT);
83
84 mtcp(XREG_CP15_SYS_CONTROL, CtrlReg);
85 }
86 }
87
Xil_DCacheDisable(void)88 void Xil_DCacheDisable(void)
89 {
90 register u32 CtrlReg;
91
92 /* clean and invalidate the Data cache */
93 Xil_DCacheFlush();
94
95 /* disable the Data cache */
96 #if defined (__GNUC__)
97 CtrlReg = mfcp(XREG_CP15_SYS_CONTROL);
98 #elif defined (__ICCARM__)
99 mfcp(XREG_CP15_SYS_CONTROL, CtrlReg);
100 #endif
101
102 CtrlReg &= ~(XREG_CP15_CONTROL_C_BIT);
103
104 mtcp(XREG_CP15_SYS_CONTROL, CtrlReg);
105 }
106
Xil_DCacheInvalidate(void)107 void Xil_DCacheInvalidate(void)
108 {
109 u32 currmask;
110
111 currmask = mfcpsr();
112 mtcpsr(currmask | IRQ_FIQ_MASK);
113
114 mtcp(XREG_CP15_CACHE_SIZE_SEL, 0);
115
116 /*invalidate all D cache*/
117 mtcp(XREG_CP15_INVAL_DC_ALL, 0);
118
119 mtcpsr(currmask);
120 }
121
Xil_DCacheInvalidateLine(INTPTR adr)122 void Xil_DCacheInvalidateLine(INTPTR adr)
123 {
124 u32 currmask;
125
126 currmask = mfcpsr();
127 mtcpsr(currmask | IRQ_FIQ_MASK);
128
129 mtcp(XREG_CP15_CACHE_SIZE_SEL, 0);
130 mtcp(XREG_CP15_INVAL_DC_LINE_MVA_POC, (adr & (~0x1F)));
131
132 /* Wait for invalidate to complete */
133 dsb();
134
135 mtcpsr(currmask);
136 }
137
Xil_DCacheInvalidateRange(INTPTR adr,u32 len)138 void Xil_DCacheInvalidateRange(INTPTR adr, u32 len)
139 {
140 const u32 cacheline = 32U;
141 u32 end;
142 u32 tempadr = adr;
143 u32 tempend;
144 u32 currmask;
145
146 currmask = mfcpsr();
147 mtcpsr(currmask | IRQ_FIQ_MASK);
148
149 if (len != 0U)
150 {
151 end = tempadr + len;
152 tempend = end;
153 /* Select L1 Data cache in CSSR */
154 mtcp(XREG_CP15_CACHE_SIZE_SEL, 0U);
155
156 if ((tempadr & (cacheline - 1U)) != 0U)
157 {
158 tempadr &= (~(cacheline - 1U));
159
160 Xil_DCacheFlushLine(tempadr);
161 }
162 if ((tempend & (cacheline - 1U)) != 0U)
163 {
164 tempend &= (~(cacheline - 1U));
165
166 Xil_DCacheFlushLine(tempend);
167 }
168
169 while (tempadr < tempend)
170 {
171
172 /* Invalidate Data cache line */
173 asm_inval_dc_line_mva_poc(tempadr);
174
175 tempadr += cacheline;
176 }
177 }
178
179 dsb();
180 mtcpsr(currmask);
181 }
182
Xil_DCacheFlush(void)183 void Xil_DCacheFlush(void)
184 {
185 register u32 CsidReg, C7Reg;
186 u32 CacheSize, LineSize, NumWays;
187 u32 Way, WayIndex, Set, SetIndex, NumSet;
188 u32 currmask;
189
190 currmask = mfcpsr();
191 mtcpsr(currmask | IRQ_FIQ_MASK);
192
193 /* Select cache level 0 and D cache in CSSR */
194 mtcp(XREG_CP15_CACHE_SIZE_SEL, 0);
195
196 #if defined (__GNUC__)
197 CsidReg = mfcp(XREG_CP15_CACHE_SIZE_ID);
198 #elif defined (__ICCARM__)
199 mfcp(XREG_CP15_CACHE_SIZE_ID, CsidReg);
200 #endif
201 /* Determine Cache Size */
202
203 CacheSize = (CsidReg >> 13U) & 0x000001FFU;
204 CacheSize += 0x00000001U;
205 CacheSize *= (u32)128; /* to get number of bytes */
206
207 /* Number of Ways */
208 NumWays = (CsidReg & 0x000003ffU) >> 3U;
209 NumWays += 0x00000001U;
210
211 /* Get the cacheline size, way size, index size from csidr */
212 LineSize = (CsidReg & 0x00000007U) + 0x00000004U;
213
214 NumSet = CacheSize / NumWays;
215 NumSet /= (0x00000001U << LineSize);
216
217 Way = 0U;
218 Set = 0U;
219
220 /* Invalidate all the cachelines */
221 for (WayIndex = 0U; WayIndex < NumWays; WayIndex++)
222 {
223 for (SetIndex = 0U; SetIndex < NumSet; SetIndex++)
224 {
225 C7Reg = Way | Set;
226 /* Flush by Set/Way */
227 asm_clean_inval_dc_line_sw(C7Reg);
228
229 Set += (0x00000001U << LineSize);
230 }
231 Set = 0U;
232 Way += 0x40000000U;
233 }
234
235 /* Wait for flush to complete */
236 dsb();
237 mtcpsr(currmask);
238
239 mtcpsr(currmask);
240 }
241
Xil_DCacheFlushLine(INTPTR adr)242 void Xil_DCacheFlushLine(INTPTR adr)
243 {
244 u32 currmask;
245
246 currmask = mfcpsr();
247 mtcpsr(currmask | IRQ_FIQ_MASK);
248
249 mtcp(XREG_CP15_CACHE_SIZE_SEL, 0);
250
251 mtcp(XREG_CP15_CLEAN_INVAL_DC_LINE_MVA_POC, (adr & (~0x1F)));
252
253 /* Wait for flush to complete */
254 dsb();
255 mtcpsr(currmask);
256 }
257
Xil_DCacheFlushRange(INTPTR adr,u32 len)258 void Xil_DCacheFlushRange(INTPTR adr, u32 len)
259 {
260 u32 LocalAddr = adr;
261 const u32 cacheline = 32U;
262 u32 end;
263 u32 currmask;
264
265 currmask = mfcpsr();
266 mtcpsr(currmask | IRQ_FIQ_MASK);
267
268 if (len != 0x00000000U)
269 {
270 /* Back the starting address up to the start of a cache line
271 * perform cache operations until adr+len
272 */
273 end = LocalAddr + len;
274 LocalAddr &= ~(cacheline - 1U);
275
276 while (LocalAddr < end)
277 {
278 /* Flush Data cache line */
279 asm_clean_inval_dc_line_mva_poc(LocalAddr);
280
281 LocalAddr += cacheline;
282 }
283 }
284 dsb();
285 mtcpsr(currmask);
286 }
287
Xil_DCacheStoreLine(INTPTR adr)288 void Xil_DCacheStoreLine(INTPTR adr)
289 {
290 u32 currmask;
291
292 currmask = mfcpsr();
293 mtcpsr(currmask | IRQ_FIQ_MASK);
294
295 mtcp(XREG_CP15_CACHE_SIZE_SEL, 0);
296 mtcp(XREG_CP15_CLEAN_DC_LINE_MVA_POC, (adr & (~0x1F)));
297
298 /* Wait for store to complete */
299 dsb();
300 isb();
301
302 mtcpsr(currmask);
303 }
304
Xil_ICacheEnable(void)305 void Xil_ICacheEnable(void)
306 {
307 register u32 CtrlReg;
308
309 /* enable caches only if they are disabled */
310 #if defined (__GNUC__)
311 CtrlReg = mfcp(XREG_CP15_SYS_CONTROL);
312 #elif defined (__ICCARM__)
313 mfcp(XREG_CP15_SYS_CONTROL, CtrlReg);
314 #endif
315 if ((CtrlReg & XREG_CP15_CONTROL_I_BIT) == 0x00000000U)
316 {
317 /* invalidate the instruction cache */
318 mtcp(XREG_CP15_INVAL_IC_POU, 0);
319
320 /* enable the instruction cache */
321 CtrlReg |= (XREG_CP15_CONTROL_I_BIT);
322
323 mtcp(XREG_CP15_SYS_CONTROL, CtrlReg);
324 }
325 }
326
Xil_ICacheDisable(void)327 void Xil_ICacheDisable(void)
328 {
329 register u32 CtrlReg;
330
331 dsb();
332
333 /* invalidate the instruction cache */
334 mtcp(XREG_CP15_INVAL_IC_POU, 0);
335
336 /* disable the instruction cache */
337 #if defined (__GNUC__)
338 CtrlReg = mfcp(XREG_CP15_SYS_CONTROL);
339 #elif defined (__ICCARM__)
340 mfcp(XREG_CP15_SYS_CONTROL, CtrlReg);
341 #endif
342
343 CtrlReg &= ~(XREG_CP15_CONTROL_I_BIT);
344
345 mtcp(XREG_CP15_SYS_CONTROL, CtrlReg);
346 }
347
Xil_ICacheInvalidate(void)348 void Xil_ICacheInvalidate(void)
349 {
350 u32 currmask;
351
352 currmask = mfcpsr();
353 mtcpsr(currmask | IRQ_FIQ_MASK);
354
355 mtcp(XREG_CP15_CACHE_SIZE_SEL, 1);
356
357 /* invalidate the instruction cache */
358 mtcp(XREG_CP15_INVAL_IC_POU, 0);
359
360 /* Wait for invalidate to complete */
361 dsb();
362 mtcpsr(currmask);
363 }
364
Xil_ICacheInvalidateLine(INTPTR adr)365 void Xil_ICacheInvalidateLine(INTPTR adr)
366 {
367 u32 currmask;
368
369 currmask = mfcpsr();
370 mtcpsr(currmask | IRQ_FIQ_MASK);
371
372 mtcp(XREG_CP15_CACHE_SIZE_SEL, 1);
373 mtcp(XREG_CP15_INVAL_IC_LINE_MVA_POU, (adr & (~0x1F)));
374
375 /* Wait for invalidate to complete */
376 dsb();
377 mtcpsr(currmask);
378 }
379
Xil_ICacheInvalidateRange(INTPTR adr,u32 len)380 void Xil_ICacheInvalidateRange(INTPTR adr, u32 len)
381 {
382 u32 LocalAddr = adr;
383 const u32 cacheline = 32U;
384 u32 end;
385 u32 currmask;
386
387 currmask = mfcpsr();
388 mtcpsr(currmask | IRQ_FIQ_MASK);
389 if (len != 0x00000000U)
390 {
391 /* Back the starting address up to the start of a cache line
392 * perform cache operations until adr+len
393 */
394 end = LocalAddr + len;
395 LocalAddr = LocalAddr & ~(cacheline - 1U);
396
397 /* Select cache L0 I-cache in CSSR */
398 mtcp(XREG_CP15_CACHE_SIZE_SEL, 1U);
399
400 while (LocalAddr < end)
401 {
402
403 /* Invalidate L1 I-cache line */
404 asm_inval_ic_line_mva_pou(LocalAddr);
405
406 LocalAddr += cacheline;
407 }
408 }
409
410 /* Wait for invalidate to complete */
411 dsb();
412 mtcpsr(currmask);
413 }
414
rt_hw_cpu_icache_ops(int ops,void * addr,int size)415 void rt_hw_cpu_icache_ops(int ops, void *addr, int size)
416 {
417 if (ops == RT_HW_CACHE_INVALIDATE)
418 Xil_ICacheInvalidateRange((INTPTR)addr, size);
419 }
420
rt_hw_cpu_dcache_ops(int ops,void * addr,int size)421 void rt_hw_cpu_dcache_ops(int ops, void *addr, int size)
422 {
423 if (ops == RT_HW_CACHE_FLUSH)
424 Xil_DCacheFlushRange((intptr_t)addr, size);
425 else if (ops == RT_HW_CACHE_INVALIDATE)
426 Xil_DCacheInvalidateRange((intptr_t)addr, size);
427 }
428
rt_hw_cpu_icache_status(void)429 rt_base_t rt_hw_cpu_icache_status(void)
430 {
431 register u32 CtrlReg;
432 #if defined (__GNUC__)
433 CtrlReg = mfcp(XREG_CP15_SYS_CONTROL);
434 #elif defined (__ICCARM__)
435 mfcp(XREG_CP15_SYS_CONTROL, CtrlReg);
436 #endif
437 return CtrlReg & XREG_CP15_CONTROL_I_BIT;
438 }
439
rt_hw_cpu_dcache_status(void)440 rt_base_t rt_hw_cpu_dcache_status(void)
441 {
442 register u32 CtrlReg;
443 #if defined (__GNUC__)
444 CtrlReg = mfcp(XREG_CP15_SYS_CONTROL);
445 #elif defined (__ICCARM__)
446 mfcp(XREG_CP15_SYS_CONTROL, CtrlReg);
447 #endif
448 return CtrlReg & XREG_CP15_CONTROL_C_BIT;
449 }
450