1 /******************************************************************************
2 * @file cachel1_armv7.h
3 * @brief CMSIS Level 1 Cache API for Armv7-M and later
4 * @version V1.0.1
5 * @date 19. April 2021
6 ******************************************************************************/
7 /*
8 * Copyright (c) 2020-2021 Arm Limited. All rights reserved.
9 *
10 * SPDX-License-Identifier: Apache-2.0
11 *
12 * Licensed under the Apache License, Version 2.0 (the License); you may
13 * not use this file except in compliance with the License.
14 * You may obtain a copy of the License at
15 *
16 * www.apache.org/licenses/LICENSE-2.0
17 *
18 * Unless required by applicable law or agreed to in writing, software
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 * See the License for the specific language governing permissions and
22 * limitations under the License.
23 */
24
25 #if defined ( __ICCARM__ )
26 #pragma system_include /* treat file as system include file for MISRA check */
27 #elif defined (__clang__)
28 #pragma clang system_header /* treat file as system include file */
29 #endif
30
31 #ifndef ARM_CACHEL1_ARMV7_H
32 #define ARM_CACHEL1_ARMV7_H
33
34 /**
35 \ingroup CMSIS_Core_FunctionInterface
36 \defgroup CMSIS_Core_CacheFunctions Cache Functions
37 \brief Functions that configure Instruction and Data cache.
38 @{
39 */
40
41 /* Cache Size ID Register Macros */
42 #define CCSIDR_WAYS(x) (((x) & SCB_CCSIDR_ASSOCIATIVITY_Msk) >> SCB_CCSIDR_ASSOCIATIVITY_Pos)
43 #define CCSIDR_SETS(x) (((x) & SCB_CCSIDR_NUMSETS_Msk ) >> SCB_CCSIDR_NUMSETS_Pos )
44
45 #ifndef __SCB_DCACHE_LINE_SIZE
46 #define __SCB_DCACHE_LINE_SIZE 32U /*!< Cortex-M7 cache line size is fixed to 32 bytes (8 words). See also register SCB_CCSIDR */
47 #endif
48
49 #ifndef __SCB_ICACHE_LINE_SIZE
50 #define __SCB_ICACHE_LINE_SIZE 32U /*!< Cortex-M7 cache line size is fixed to 32 bytes (8 words). See also register SCB_CCSIDR */
51 #endif
52
53 /**
54 \brief Enable I-Cache
55 \details Turns on I-Cache
56 */
SCB_EnableICache(void)57 __STATIC_FORCEINLINE void SCB_EnableICache (void)
58 {
59 #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
60 if (SCB->CCR & SCB_CCR_IC_Msk) return; /* return if ICache is already enabled */
61
62 __DSB();
63 __ISB();
64 SCB->ICIALLU = 0UL; /* invalidate I-Cache */
65 __DSB();
66 __ISB();
67 SCB->CCR |= (uint32_t)SCB_CCR_IC_Msk; /* enable I-Cache */
68 __DSB();
69 __ISB();
70 #endif
71 }
72
73
74 /**
75 \brief Disable I-Cache
76 \details Turns off I-Cache
77 */
SCB_DisableICache(void)78 __STATIC_FORCEINLINE void SCB_DisableICache (void)
79 {
80 #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
81 __DSB();
82 __ISB();
83 SCB->CCR &= ~(uint32_t)SCB_CCR_IC_Msk; /* disable I-Cache */
84 SCB->ICIALLU = 0UL; /* invalidate I-Cache */
85 __DSB();
86 __ISB();
87 #endif
88 }
89
90
91 /**
92 \brief Invalidate I-Cache
93 \details Invalidates I-Cache
94 */
SCB_InvalidateICache(void)95 __STATIC_FORCEINLINE void SCB_InvalidateICache (void)
96 {
97 #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
98 __DSB();
99 __ISB();
100 SCB->ICIALLU = 0UL;
101 __DSB();
102 __ISB();
103 #endif
104 }
105
106
107 /**
108 \brief I-Cache Invalidate by address
109 \details Invalidates I-Cache for the given address.
110 I-Cache is invalidated starting from a 32 byte aligned address in 32 byte granularity.
111 I-Cache memory blocks which are part of given address + given size are invalidated.
112 \param[in] addr address
113 \param[in] isize size of memory block (in number of bytes)
114 */
SCB_InvalidateICache_by_Addr(volatile void * addr,int32_t isize)115 __STATIC_FORCEINLINE void SCB_InvalidateICache_by_Addr (volatile void *addr, int32_t isize)
116 {
117 #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
118 if ( isize > 0 ) {
119 int32_t op_size = isize + (((uint32_t)addr) & (__SCB_ICACHE_LINE_SIZE - 1U));
120 uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_ICACHE_LINE_SIZE - 1U) */;
121
122 __DSB();
123
124 do {
125 SCB->ICIMVAU = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
126 op_addr += __SCB_ICACHE_LINE_SIZE;
127 op_size -= __SCB_ICACHE_LINE_SIZE;
128 } while ( op_size > 0 );
129
130 __DSB();
131 __ISB();
132 }
133 #endif
134 }
135
136
137 /**
138 \brief Enable D-Cache
139 \details Turns on D-Cache
140 */
SCB_EnableDCache(void)141 __STATIC_FORCEINLINE void SCB_EnableDCache (void)
142 {
143 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
144 uint32_t ccsidr;
145 uint32_t sets;
146 uint32_t ways;
147
148 if (SCB->CCR & SCB_CCR_DC_Msk) return; /* return if DCache is already enabled */
149
150 SCB->CSSELR = 0U; /* select Level 1 data cache */
151 __DSB();
152
153 ccsidr = SCB->CCSIDR;
154
155 /* invalidate D-Cache */
156 sets = (uint32_t)(CCSIDR_SETS(ccsidr));
157 do {
158 ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
159 do {
160 SCB->DCISW = (((sets << SCB_DCISW_SET_Pos) & SCB_DCISW_SET_Msk) |
161 ((ways << SCB_DCISW_WAY_Pos) & SCB_DCISW_WAY_Msk) );
162 #if defined ( __CC_ARM )
163 __schedule_barrier();
164 #endif
165 } while (ways-- != 0U);
166 } while(sets-- != 0U);
167 __DSB();
168
169 SCB->CCR |= (uint32_t)SCB_CCR_DC_Msk; /* enable D-Cache */
170
171 __DSB();
172 __ISB();
173 #endif
174 }
175
176
177 /**
178 \brief Disable D-Cache
179 \details Turns off D-Cache
180 */
SCB_DisableDCache(void)181 __STATIC_FORCEINLINE void SCB_DisableDCache (void)
182 {
183 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
184 uint32_t ccsidr;
185 uint32_t sets;
186 uint32_t ways;
187
188 SCB->CSSELR = 0U; /* select Level 1 data cache */
189 __DSB();
190
191 SCB->CCR &= ~(uint32_t)SCB_CCR_DC_Msk; /* disable D-Cache */
192 __DSB();
193
194 ccsidr = SCB->CCSIDR;
195
196 /* clean & invalidate D-Cache */
197 sets = (uint32_t)(CCSIDR_SETS(ccsidr));
198 do {
199 ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
200 do {
201 SCB->DCCISW = (((sets << SCB_DCCISW_SET_Pos) & SCB_DCCISW_SET_Msk) |
202 ((ways << SCB_DCCISW_WAY_Pos) & SCB_DCCISW_WAY_Msk) );
203 #if defined ( __CC_ARM )
204 __schedule_barrier();
205 #endif
206 } while (ways-- != 0U);
207 } while(sets-- != 0U);
208
209 __DSB();
210 __ISB();
211 #endif
212 }
213
214
215 /**
216 \brief Invalidate D-Cache
217 \details Invalidates D-Cache
218 */
SCB_InvalidateDCache(void)219 __STATIC_FORCEINLINE void SCB_InvalidateDCache (void)
220 {
221 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
222 uint32_t ccsidr;
223 uint32_t sets;
224 uint32_t ways;
225
226 SCB->CSSELR = 0U; /* select Level 1 data cache */
227 __DSB();
228
229 ccsidr = SCB->CCSIDR;
230
231 /* invalidate D-Cache */
232 sets = (uint32_t)(CCSIDR_SETS(ccsidr));
233 do {
234 ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
235 do {
236 SCB->DCISW = (((sets << SCB_DCISW_SET_Pos) & SCB_DCISW_SET_Msk) |
237 ((ways << SCB_DCISW_WAY_Pos) & SCB_DCISW_WAY_Msk) );
238 #if defined ( __CC_ARM )
239 __schedule_barrier();
240 #endif
241 } while (ways-- != 0U);
242 } while(sets-- != 0U);
243
244 __DSB();
245 __ISB();
246 #endif
247 }
248
249
250 /**
251 \brief Clean D-Cache
252 \details Cleans D-Cache
253 */
SCB_CleanDCache(void)254 __STATIC_FORCEINLINE void SCB_CleanDCache (void)
255 {
256 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
257 uint32_t ccsidr;
258 uint32_t sets;
259 uint32_t ways;
260
261 SCB->CSSELR = 0U; /* select Level 1 data cache */
262 __DSB();
263
264 ccsidr = SCB->CCSIDR;
265
266 /* clean D-Cache */
267 sets = (uint32_t)(CCSIDR_SETS(ccsidr));
268 do {
269 ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
270 do {
271 SCB->DCCSW = (((sets << SCB_DCCSW_SET_Pos) & SCB_DCCSW_SET_Msk) |
272 ((ways << SCB_DCCSW_WAY_Pos) & SCB_DCCSW_WAY_Msk) );
273 #if defined ( __CC_ARM )
274 __schedule_barrier();
275 #endif
276 } while (ways-- != 0U);
277 } while(sets-- != 0U);
278
279 __DSB();
280 __ISB();
281 #endif
282 }
283
284
285 /**
286 \brief Clean & Invalidate D-Cache
287 \details Cleans and Invalidates D-Cache
288 */
SCB_CleanInvalidateDCache(void)289 __STATIC_FORCEINLINE void SCB_CleanInvalidateDCache (void)
290 {
291 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
292 uint32_t ccsidr;
293 uint32_t sets;
294 uint32_t ways;
295
296 SCB->CSSELR = 0U; /* select Level 1 data cache */
297 __DSB();
298
299 ccsidr = SCB->CCSIDR;
300
301 /* clean & invalidate D-Cache */
302 sets = (uint32_t)(CCSIDR_SETS(ccsidr));
303 do {
304 ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
305 do {
306 SCB->DCCISW = (((sets << SCB_DCCISW_SET_Pos) & SCB_DCCISW_SET_Msk) |
307 ((ways << SCB_DCCISW_WAY_Pos) & SCB_DCCISW_WAY_Msk) );
308 #if defined ( __CC_ARM )
309 __schedule_barrier();
310 #endif
311 } while (ways-- != 0U);
312 } while(sets-- != 0U);
313
314 __DSB();
315 __ISB();
316 #endif
317 }
318
319
320 /**
321 \brief D-Cache Invalidate by address
322 \details Invalidates D-Cache for the given address.
323 D-Cache is invalidated starting from a 32 byte aligned address in 32 byte granularity.
324 D-Cache memory blocks which are part of given address + given size are invalidated.
325 \param[in] addr address
326 \param[in] dsize size of memory block (in number of bytes)
327 */
SCB_InvalidateDCache_by_Addr(volatile void * addr,int32_t dsize)328 __STATIC_FORCEINLINE void SCB_InvalidateDCache_by_Addr (volatile void *addr, int32_t dsize)
329 {
330 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
331 if ( dsize > 0 ) {
332 int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
333 uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
334
335 __DSB();
336
337 do {
338 SCB->DCIMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
339 op_addr += __SCB_DCACHE_LINE_SIZE;
340 op_size -= __SCB_DCACHE_LINE_SIZE;
341 } while ( op_size > 0 );
342
343 __DSB();
344 __ISB();
345 }
346 #endif
347 }
348
349
350 /**
351 \brief D-Cache Clean by address
352 \details Cleans D-Cache for the given address
353 D-Cache is cleaned starting from a 32 byte aligned address in 32 byte granularity.
354 D-Cache memory blocks which are part of given address + given size are cleaned.
355 \param[in] addr address
356 \param[in] dsize size of memory block (in number of bytes)
357 */
SCB_CleanDCache_by_Addr(volatile void * addr,int32_t dsize)358 __STATIC_FORCEINLINE void SCB_CleanDCache_by_Addr (volatile void *addr, int32_t dsize)
359 {
360 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
361 if ( dsize > 0 ) {
362 int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
363 uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
364
365 __DSB();
366
367 do {
368 SCB->DCCMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
369 op_addr += __SCB_DCACHE_LINE_SIZE;
370 op_size -= __SCB_DCACHE_LINE_SIZE;
371 } while ( op_size > 0 );
372
373 __DSB();
374 __ISB();
375 }
376 #endif
377 }
378
379
380 /**
381 \brief D-Cache Clean and Invalidate by address
382 \details Cleans and invalidates D_Cache for the given address
383 D-Cache is cleaned and invalidated starting from a 32 byte aligned address in 32 byte granularity.
384 D-Cache memory blocks which are part of given address + given size are cleaned and invalidated.
385 \param[in] addr address (aligned to 32-byte boundary)
386 \param[in] dsize size of memory block (in number of bytes)
387 */
SCB_CleanInvalidateDCache_by_Addr(volatile void * addr,int32_t dsize)388 __STATIC_FORCEINLINE void SCB_CleanInvalidateDCache_by_Addr (volatile void *addr, int32_t dsize)
389 {
390 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
391 if ( dsize > 0 ) {
392 int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
393 uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
394
395 __DSB();
396
397 do {
398 SCB->DCCIMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
399 op_addr += __SCB_DCACHE_LINE_SIZE;
400 op_size -= __SCB_DCACHE_LINE_SIZE;
401 } while ( op_size > 0 );
402
403 __DSB();
404 __ISB();
405 }
406 #endif
407 }
408
409 /*@} end of CMSIS_Core_CacheFunctions */
410
411 #endif /* ARM_CACHEL1_ARMV7_H */
412