1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_ARM_CACHETYPE_H
3 #define __ASM_ARM_CACHETYPE_H
4
5 #define CACHEID_VIVT (1 << 0)
6 #define CACHEID_VIPT_NONALIASING (1 << 1)
7 #define CACHEID_VIPT_ALIASING (1 << 2)
8 #define CACHEID_VIPT (CACHEID_VIPT_ALIASING|CACHEID_VIPT_NONALIASING)
9 #define CACHEID_ASID_TAGGED (1 << 3)
10 #define CACHEID_VIPT_I_ALIASING (1 << 4)
11 #define CACHEID_PIPT (1 << 5)
12
13 extern unsigned int cacheid;
14
15 #define cache_is_vivt() cacheid_is(CACHEID_VIVT)
16 #define cache_is_vipt() cacheid_is(CACHEID_VIPT)
17 #define cache_is_vipt_nonaliasing() cacheid_is(CACHEID_VIPT_NONALIASING)
18 #define cache_is_vipt_aliasing() cacheid_is(CACHEID_VIPT_ALIASING)
19 #define icache_is_vivt_asid_tagged() cacheid_is(CACHEID_ASID_TAGGED)
20 #define icache_is_vipt_aliasing() cacheid_is(CACHEID_VIPT_I_ALIASING)
21 #define icache_is_pipt() cacheid_is(CACHEID_PIPT)
22
23 /*
24 * __LINUX_ARM_ARCH__ is the minimum supported CPU architecture
25 * Mask out support which will never be present on newer CPUs.
26 * - v6+ is never VIVT
27 * - v7+ VIPT never aliases on D-side
28 */
29 #if __LINUX_ARM_ARCH__ >= 7
30 #define __CACHEID_ARCH_MIN (CACHEID_VIPT_NONALIASING |\
31 CACHEID_ASID_TAGGED |\
32 CACHEID_VIPT_I_ALIASING |\
33 CACHEID_PIPT)
34 #elif __LINUX_ARM_ARCH__ >= 6
35 #define __CACHEID_ARCH_MIN (~CACHEID_VIVT)
36 #else
37 #define __CACHEID_ARCH_MIN (~0)
38 #endif
39
40 /*
41 * Mask out support which isn't configured
42 */
43 #if defined(CONFIG_CPU_CACHE_VIVT) && !defined(CONFIG_CPU_CACHE_VIPT)
44 #define __CACHEID_ALWAYS (CACHEID_VIVT)
45 #define __CACHEID_NEVER (~CACHEID_VIVT)
46 #elif !defined(CONFIG_CPU_CACHE_VIVT) && defined(CONFIG_CPU_CACHE_VIPT)
47 #define __CACHEID_ALWAYS (0)
48 #define __CACHEID_NEVER (CACHEID_VIVT)
49 #else
50 #define __CACHEID_ALWAYS (0)
51 #define __CACHEID_NEVER (0)
52 #endif
53
cacheid_is(unsigned int mask)54 static inline unsigned int __attribute__((pure)) cacheid_is(unsigned int mask)
55 {
56 return (__CACHEID_ALWAYS & mask) |
57 (~__CACHEID_NEVER & __CACHEID_ARCH_MIN & mask & cacheid);
58 }
59
60 #define CSSELR_ICACHE 1
61 #define CSSELR_DCACHE 0
62
63 #define CSSELR_L1 (0 << 1)
64 #define CSSELR_L2 (1 << 1)
65 #define CSSELR_L3 (2 << 1)
66 #define CSSELR_L4 (3 << 1)
67 #define CSSELR_L5 (4 << 1)
68 #define CSSELR_L6 (5 << 1)
69 #define CSSELR_L7 (6 << 1)
70
71 #ifndef CONFIG_CPU_V7M
set_csselr(unsigned int cache_selector)72 static inline void set_csselr(unsigned int cache_selector)
73 {
74 asm volatile("mcr p15, 2, %0, c0, c0, 0" : : "r" (cache_selector));
75 }
76
read_ccsidr(void)77 static inline unsigned int read_ccsidr(void)
78 {
79 unsigned int val;
80
81 asm volatile("mrc p15, 1, %0, c0, c0, 0" : "=r" (val));
82 return val;
83 }
84 #else /* CONFIG_CPU_V7M */
85 #include <linux/io.h>
86 #include "asm/v7m.h"
87
set_csselr(unsigned int cache_selector)88 static inline void set_csselr(unsigned int cache_selector)
89 {
90 writel(cache_selector, BASEADDR_V7M_SCB + V7M_SCB_CTR);
91 }
92
read_ccsidr(void)93 static inline unsigned int read_ccsidr(void)
94 {
95 return readl(BASEADDR_V7M_SCB + V7M_SCB_CCSIDR);
96 }
97 #endif
98
99 #endif
100