1 #ifndef __ASM_ARM_PROCESSOR_H
2 #define __ASM_ARM_PROCESSOR_H
3 
4 #ifndef __ASSEMBLY__
5 #include <xen/types.h>
6 #endif
7 #include <public/arch-arm.h>
8 
9 /* CTR Cache Type Register */
10 #define CTR_L1IP_MASK       0x3
11 #define CTR_L1IP_SHIFT      14
12 #define CTR_DMINLINE_SHIFT  16
13 #define CTR_IMINLINE_SHIFT  0
14 #define CTR_IMINLINE_MASK   0xf
15 #define CTR_ERG_SHIFT       20
16 #define CTR_CWG_SHIFT       24
17 #define CTR_CWG_MASK        15
18 #define CTR_IDC_SHIFT       28
19 #define CTR_DIC_SHIFT       29
20 
21 /* CCSIDR Current Cache Size ID Register */
22 #define CCSIDR_LINESIZE_MASK            _AC(0x7, UL)
23 #define CCSIDR_NUMSETS_SHIFT            13
24 #define CCSIDR_NUMSETS_MASK             _AC(0x3fff, UL)
25 #define CCSIDR_NUMSETS_SHIFT_FEAT_CCIDX 32
26 #define CCSIDR_NUMSETS_MASK_FEAT_CCIDX  _AC(0xffffff, UL)
27 
28 /* CSSELR Cache Size Selection Register */
29 #define CSSELR_LEVEL_SHIFT 1
30 
31 /* CLIDR Cache Level ID Register */
32 #define CLIDR_CTYPEn_SHIFT(n) (3 * ((n) - 1))
33 #define CLIDR_CTYPEn_MASK     _AC(0x7, UL)
34 #define CLIDR_CTYPEn_LEVELS   7
35 
36 #define ICACHE_POLICY_VPIPT  0
37 #define ICACHE_POLICY_AIVIVT 1
38 #define ICACHE_POLICY_VIPT   2
39 #define ICACHE_POLICY_PIPT   3
40 
41 /* MIDR Main ID Register */
42 #define MIDR_REVISION_MASK      0xf
43 #define MIDR_RESIVION(midr)     ((midr) & MIDR_REVISION_MASK)
44 #define MIDR_PARTNUM_SHIFT      4
45 #define MIDR_PARTNUM_MASK       (0xfff << MIDR_PARTNUM_SHIFT)
46 #define MIDR_PARTNUM(midr) \
47     (((midr) & MIDR_PARTNUM_MASK) >> MIDR_PARTNUM_SHIFT)
48 #define MIDR_ARCHITECTURE_SHIFT 16
49 #define MIDR_ARCHITECTURE_MASK  (0xf << MIDR_ARCHITECTURE_SHIFT)
50 #define MIDR_ARCHITECTURE(midr) \
51     (((midr) & MIDR_ARCHITECTURE_MASK) >> MIDR_ARCHITECTURE_SHIFT)
52 #define MIDR_VARIANT_SHIFT      20
53 #define MIDR_VARIANT_MASK       (0xf << MIDR_VARIANT_SHIFT)
54 #define MIDR_VARIANT(midr) \
55     (((midr) & MIDR_VARIANT_MASK) >> MIDR_VARIANT_SHIFT)
56 #define MIDR_IMPLEMENTOR_SHIFT  24
57 #define MIDR_IMPLEMENTOR_MASK   (0xffU << MIDR_IMPLEMENTOR_SHIFT)
58 #define MIDR_IMPLEMENTOR(midr) \
59     (((midr) & MIDR_IMPLEMENTOR_MASK) >> MIDR_IMPLEMENTOR_SHIFT)
60 
61 #define MIDR_CPU_MODEL(imp, partnum)            \
62     (((imp)     << MIDR_IMPLEMENTOR_SHIFT) |    \
63      (0xf       << MIDR_ARCHITECTURE_SHIFT) |   \
64      ((partnum) << MIDR_PARTNUM_SHIFT))
65 
66 #define MIDR_CPU_MODEL_MASK \
67      (MIDR_IMPLEMENTOR_MASK | MIDR_PARTNUM_MASK | MIDR_ARCHITECTURE_MASK)
68 
69 #define MIDR_IS_CPU_MODEL_RANGE(midr, model, rv_min, rv_max)            \
70 ({                                                                      \
71         u32 _model = (midr) & MIDR_CPU_MODEL_MASK;                      \
72         u32 _rv = (midr) & (MIDR_REVISION_MASK | MIDR_VARIANT_MASK);    \
73                                                                         \
74         _model == (model) && _rv >= (rv_min) && _rv <= (rv_max);        \
75 })
76 
77 #define ARM_CPU_IMP_ARM             0x41
78 
79 #define ARM_CPU_PART_CORTEX_A12     0xC0D
80 #define ARM_CPU_PART_CORTEX_A17     0xC0E
81 #define ARM_CPU_PART_CORTEX_A15     0xC0F
82 #define ARM_CPU_PART_CORTEX_A53     0xD03
83 #define ARM_CPU_PART_CORTEX_A35     0xD04
84 #define ARM_CPU_PART_CORTEX_A55     0xD05
85 #define ARM_CPU_PART_CORTEX_A57     0xD07
86 #define ARM_CPU_PART_CORTEX_A72     0xD08
87 #define ARM_CPU_PART_CORTEX_A73     0xD09
88 #define ARM_CPU_PART_CORTEX_A75     0xD0A
89 #define ARM_CPU_PART_CORTEX_A76     0xD0B
90 #define ARM_CPU_PART_NEOVERSE_N1    0xD0C
91 #define ARM_CPU_PART_CORTEX_A77     0xD0D
92 #define ARM_CPU_PART_NEOVERSE_V1    0xD40
93 #define ARM_CPU_PART_CORTEX_A78     0xD41
94 #define ARM_CPU_PART_CORTEX_X1      0xD44
95 #define ARM_CPU_PART_CORTEX_A710    0xD47
96 #define ARM_CPU_PART_CORTEX_X2      0xD48
97 #define ARM_CPU_PART_NEOVERSE_N2    0xD49
98 #define ARM_CPU_PART_CORTEX_A78C    0xD4B
99 
100 #define MIDR_CORTEX_A12 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A12)
101 #define MIDR_CORTEX_A17 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A17)
102 #define MIDR_CORTEX_A15 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A15)
103 #define MIDR_CORTEX_A53 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A53)
104 #define MIDR_CORTEX_A35 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A35)
105 #define MIDR_CORTEX_A55 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A55)
106 #define MIDR_CORTEX_A57 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A57)
107 #define MIDR_CORTEX_A72 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A72)
108 #define MIDR_CORTEX_A73 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A73)
109 #define MIDR_CORTEX_A75 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A75)
110 #define MIDR_CORTEX_A76 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A76)
111 #define MIDR_NEOVERSE_N1 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_NEOVERSE_N1)
112 #define MIDR_CORTEX_A77 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A77)
113 #define MIDR_NEOVERSE_V1 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_NEOVERSE_V1)
114 #define MIDR_CORTEX_A78 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A78)
115 #define MIDR_CORTEX_X1  MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_X1)
116 #define MIDR_CORTEX_A710 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A710)
117 #define MIDR_CORTEX_X2  MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_X2)
118 #define MIDR_NEOVERSE_N2 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_NEOVERSE_N2)
119 #define MIDR_CORTEX_A78C MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A78C)
120 
121 /* MPIDR Multiprocessor Affinity Register */
122 #define _MPIDR_UP           (30)
123 #define MPIDR_UP            (_AC(1,UL) << _MPIDR_UP)
124 #define _MPIDR_SMP          (31)
125 #define MPIDR_SMP           (_AC(1,UL) << _MPIDR_SMP)
126 #define MPIDR_AFF0_SHIFT    (0)
127 #define MPIDR_AFF0_MASK     (_AC(0xff,UL) << MPIDR_AFF0_SHIFT)
128 #ifdef CONFIG_ARM_64
129 #define MPIDR_HWID_MASK     _AC(0xff00ffffff,UL)
130 #else
131 #define MPIDR_HWID_MASK     _AC(0xffffff,U)
132 #endif
133 #define MPIDR_INVALID       (~MPIDR_HWID_MASK)
134 #define MPIDR_LEVEL_BITS    (8)
135 
136 
137 /*
138  * Macros to extract affinity level. picked from kernel
139  */
140 
141 #define MPIDR_LEVEL_BITS_SHIFT  3
142 #define MPIDR_LEVEL_MASK        ((1 << MPIDR_LEVEL_BITS) - 1)
143 
144 #define MPIDR_LEVEL_SHIFT(level) \
145          (((1 << (level)) >> 1) << MPIDR_LEVEL_BITS_SHIFT)
146 
147 #define MPIDR_AFFINITY_LEVEL(mpidr, level) \
148          (((mpidr) >> MPIDR_LEVEL_SHIFT(level)) & MPIDR_LEVEL_MASK)
149 
150 #define AFFINITY_MASK(level)    ~((_AC(0x1,UL) << MPIDR_LEVEL_SHIFT(level)) - 1)
151 
152 /* TTBCR Translation Table Base Control Register */
153 #define TTBCR_EAE    _AC(0x80000000,U)
154 #define TTBCR_N_MASK _AC(0x07,U)
155 #define TTBCR_N_16KB _AC(0x00,U)
156 #define TTBCR_N_8KB  _AC(0x01,U)
157 #define TTBCR_N_4KB  _AC(0x02,U)
158 #define TTBCR_N_2KB  _AC(0x03,U)
159 #define TTBCR_N_1KB  _AC(0x04,U)
160 
161 /*
162  * TTBCR_PD(0|1) can be applied only if LPAE is disabled, i.e., TTBCR.EAE==0
163  * (ARM DDI 0487B.a G6-5203 and ARM DDI 0406C.b B4-1722).
164  */
165 #define TTBCR_PD0       (_AC(1,U)<<4)
166 #define TTBCR_PD1       (_AC(1,U)<<5)
167 
168 /* SCTLR System Control Register. */
169 
170 /* Bits specific to SCTLR_EL1 for Arm32 */
171 
172 #define SCTLR_A32_EL1_V     BIT(13, UL)
173 
174 /* Common bits for SCTLR_ELx for Arm32 */
175 
176 #define SCTLR_A32_ELx_TE    BIT(30, UL)
177 #define SCTLR_A32_ELx_FI    BIT(21, UL)
178 
179 /* Common bits for SCTLR_ELx for Arm64 */
180 #define SCTLR_A64_ELx_SA    BIT(3, UL)
181 
182 /* Common bits for SCTLR_ELx on all architectures */
183 #define SCTLR_Axx_ELx_EE    BIT(25, UL)
184 #define SCTLR_Axx_ELx_WXN   BIT(19, UL)
185 #define SCTLR_Axx_ELx_I     BIT(12, UL)
186 #define SCTLR_Axx_ELx_C     BIT(2, UL)
187 #define SCTLR_Axx_ELx_A     BIT(1, UL)
188 #define SCTLR_Axx_ELx_M     BIT(0, UL)
189 
190 #ifdef CONFIG_ARM_32
191 
192 #define HSCTLR_RES1     (BIT( 3, UL) | BIT( 4, UL) | BIT( 5, UL) |\
193                          BIT( 6, UL) | BIT(11, UL) | BIT(16, UL) |\
194                          BIT(18, UL) | BIT(22, UL) | BIT(23, UL) |\
195                          BIT(28, UL) | BIT(29, UL))
196 
197 #define HSCTLR_RES0     (BIT(7, UL)  | BIT(8, UL)  | BIT(9, UL)  | BIT(10, UL) |\
198                          BIT(13, UL) | BIT(14, UL) | BIT(15, UL) | BIT(17, UL) |\
199                          BIT(20, UL) | BIT(24, UL) | BIT(26, UL) | BIT(27, UL) |\
200                          BIT(31, UL))
201 
202 /* Initial value for HSCTLR */
203 #define HSCTLR_SET      (HSCTLR_RES1    | SCTLR_Axx_ELx_A   | SCTLR_Axx_ELx_I)
204 
205 /* Only used a pre-processing time... */
206 #define HSCTLR_CLEAR    (HSCTLR_RES0        | SCTLR_Axx_ELx_M   |\
207                          SCTLR_Axx_ELx_C    | SCTLR_Axx_ELx_WXN |\
208                          SCTLR_A32_ELx_FI   | SCTLR_Axx_ELx_EE  |\
209                          SCTLR_A32_ELx_TE)
210 
211 #if (HSCTLR_SET ^ HSCTLR_CLEAR) != 0xffffffffU
212 #error "Inconsistent HSCTLR set/clear bits"
213 #endif
214 
215 #else
216 
217 #define SCTLR_EL2_RES1  (BIT( 4, UL) | BIT( 5, UL) | BIT(11, UL) |\
218                          BIT(16, UL) | BIT(18, UL) | BIT(22, UL) |\
219                          BIT(23, UL) | BIT(28, UL) | BIT(29, UL))
220 
221 #define SCTLR_EL2_RES0  (BIT( 6, UL) | BIT( 7, UL) | BIT( 8, UL) |\
222                          BIT( 9, UL) | BIT(10, UL) | BIT(13, UL) |\
223                          BIT(14, UL) | BIT(15, UL) | BIT(17, UL) |\
224                          BIT(20, UL) | BIT(21, UL) | BIT(24, UL) |\
225                          BIT(26, UL) | BIT(27, UL) | BIT(30, UL) |\
226                          BIT(31, UL) | (0xffffffffULL << 32))
227 
228 /* Initial value for SCTLR_EL2 */
229 #define SCTLR_EL2_SET   (SCTLR_EL2_RES1     | SCTLR_A64_ELx_SA  |\
230                          SCTLR_Axx_ELx_I)
231 
232 /* Only used a pre-processing time... */
233 #define SCTLR_EL2_CLEAR (SCTLR_EL2_RES0     | SCTLR_Axx_ELx_M   |\
234                          SCTLR_Axx_ELx_A    | SCTLR_Axx_ELx_C   |\
235                          SCTLR_Axx_ELx_WXN  | SCTLR_Axx_ELx_EE)
236 
237 /*
238  * Cppcheck preprocessor is wrongly throwing the error here so disable
239  * this check for cppcheck runs.
240  */
241 #ifndef CPPCHECK
242 #if (SCTLR_EL2_SET ^ SCTLR_EL2_CLEAR) != 0xffffffffffffffffUL
243 #error "Inconsistent SCTLR_EL2 set/clear bits"
244 #endif
245 #endif
246 
247 #endif
248 
249 /* HCR Hyp Configuration Register */
250 #define HCR_RW          (_AC(1,UL)<<31) /* Register Width, ARM64 only */
251 #define HCR_TGE         (_AC(1,UL)<<27) /* Trap General Exceptions */
252 #define HCR_TVM         (_AC(1,UL)<<26) /* Trap Virtual Memory Controls */
253 #define HCR_TTLB        (_AC(1,UL)<<25) /* Trap TLB Maintenance Operations */
254 #define HCR_TPU         (_AC(1,UL)<<24) /* Trap Cache Maintenance Operations to PoU */
255 #define HCR_TPC         (_AC(1,UL)<<23) /* Trap Cache Maintenance Operations to PoC */
256 #define HCR_TSW         (_AC(1,UL)<<22) /* Trap Set/Way Cache Maintenance Operations */
257 #define HCR_TAC         (_AC(1,UL)<<21) /* Trap ACTLR Accesses */
258 #define HCR_TIDCP       (_AC(1,UL)<<20) /* Trap lockdown */
259 #define HCR_TSC         (_AC(1,UL)<<19) /* Trap SMC instruction */
260 #define HCR_TID3        (_AC(1,UL)<<18) /* Trap ID Register Group 3 */
261 #define HCR_TID2        (_AC(1,UL)<<17) /* Trap ID Register Group 2 */
262 #define HCR_TID1        (_AC(1,UL)<<16) /* Trap ID Register Group 1 */
263 #define HCR_TID0        (_AC(1,UL)<<15) /* Trap ID Register Group 0 */
264 #define HCR_TWE         (_AC(1,UL)<<14) /* Trap WFE instruction */
265 #define HCR_TWI         (_AC(1,UL)<<13) /* Trap WFI instruction */
266 #define HCR_DC          (_AC(1,UL)<<12) /* Default cacheable */
267 #define HCR_BSU_MASK    (_AC(3,UL)<<10) /* Barrier Shareability Upgrade */
268 #define HCR_BSU_NONE     (_AC(0,UL)<<10)
269 #define HCR_BSU_INNER    (_AC(1,UL)<<10)
270 #define HCR_BSU_OUTER    (_AC(2,UL)<<10)
271 #define HCR_BSU_FULL     (_AC(3,UL)<<10)
272 #define HCR_FB          (_AC(1,UL)<<9) /* Force Broadcast of Cache/BP/TLB operations */
273 #define HCR_VA          (_AC(1,UL)<<8) /* Virtual Asynchronous Abort */
274 #define HCR_VI          (_AC(1,UL)<<7) /* Virtual IRQ */
275 #define HCR_VF          (_AC(1,UL)<<6) /* Virtual FIQ */
276 #define HCR_AMO         (_AC(1,UL)<<5) /* Override CPSR.A */
277 #define HCR_IMO         (_AC(1,UL)<<4) /* Override CPSR.I */
278 #define HCR_FMO         (_AC(1,UL)<<3) /* Override CPSR.F */
279 #define HCR_PTW         (_AC(1,UL)<<2) /* Protected Walk */
280 #define HCR_SWIO        (_AC(1,UL)<<1) /* Set/Way Invalidation Override */
281 #define HCR_VM          (_AC(1,UL)<<0) /* Virtual MMU Enable */
282 
283 /* TCR: Stage 1 Translation Control */
284 
285 #define TCR_T0SZ_SHIFT  (0)
286 #define TCR_T1SZ_SHIFT  (16)
287 #define TCR_T0SZ(x)     ((x)<<TCR_T0SZ_SHIFT)
288 
289 /*
290  * According to ARM DDI 0487B.a, TCR_EL1.{T0SZ,T1SZ} (AArch64, page D7-2480)
291  * comprises 6 bits and TTBCR.{T0SZ,T1SZ} (AArch32, page G6-5204) comprises 3
292  * bits following another 3 bits for RES0. Thus, the mask for both registers
293  * should be 0x3f.
294  */
295 #define TCR_SZ_MASK     (_AC(0x3f,UL))
296 
297 #define TCR_EPD0        (_AC(0x1,UL)<<7)
298 #define TCR_EPD1        (_AC(0x1,UL)<<23)
299 
300 #define TCR_IRGN0_NC    (_AC(0x0,UL)<<8)
301 #define TCR_IRGN0_WBWA  (_AC(0x1,UL)<<8)
302 #define TCR_IRGN0_WT    (_AC(0x2,UL)<<8)
303 #define TCR_IRGN0_WB    (_AC(0x3,UL)<<8)
304 
305 #define TCR_ORGN0_NC    (_AC(0x0,UL)<<10)
306 #define TCR_ORGN0_WBWA  (_AC(0x1,UL)<<10)
307 #define TCR_ORGN0_WT    (_AC(0x2,UL)<<10)
308 #define TCR_ORGN0_WB    (_AC(0x3,UL)<<10)
309 
310 #define TCR_SH0_NS      (_AC(0x0,UL)<<12)
311 #define TCR_SH0_OS      (_AC(0x2,UL)<<12)
312 #define TCR_SH0_IS      (_AC(0x3,UL)<<12)
313 
314 /* Note that the fields TCR_EL1.{TG0,TG1} are not available on AArch32. */
315 #define TCR_TG0_SHIFT   (14)
316 #define TCR_TG0_MASK    (_AC(0x3,UL)<<TCR_TG0_SHIFT)
317 #define TCR_TG0_4K      (_AC(0x0,UL)<<TCR_TG0_SHIFT)
318 #define TCR_TG0_64K     (_AC(0x1,UL)<<TCR_TG0_SHIFT)
319 #define TCR_TG0_16K     (_AC(0x2,UL)<<TCR_TG0_SHIFT)
320 
321 /* Note that the field TCR_EL2.TG1 exists only if HCR_EL2.E2H==1. */
322 #define TCR_EL1_TG1_SHIFT   (30)
323 #define TCR_EL1_TG1_MASK    (_AC(0x3,UL)<<TCR_EL1_TG1_SHIFT)
324 #define TCR_EL1_TG1_16K     (_AC(0x1,UL)<<TCR_EL1_TG1_SHIFT)
325 #define TCR_EL1_TG1_4K      (_AC(0x2,UL)<<TCR_EL1_TG1_SHIFT)
326 #define TCR_EL1_TG1_64K     (_AC(0x3,UL)<<TCR_EL1_TG1_SHIFT)
327 
328 /*
329  * Note that the field TCR_EL1.IPS is not available on AArch32. Also, the field
330  * TCR_EL2.IPS exists only if HCR_EL2.E2H==1.
331  */
332 #define TCR_EL1_IPS_SHIFT   (32)
333 #define TCR_EL1_IPS_MASK    (_AC(0x7,ULL)<<TCR_EL1_IPS_SHIFT)
334 #define TCR_EL1_IPS_32_BIT  (_AC(0x0,ULL)<<TCR_EL1_IPS_SHIFT)
335 #define TCR_EL1_IPS_36_BIT  (_AC(0x1,ULL)<<TCR_EL1_IPS_SHIFT)
336 #define TCR_EL1_IPS_40_BIT  (_AC(0x2,ULL)<<TCR_EL1_IPS_SHIFT)
337 #define TCR_EL1_IPS_42_BIT  (_AC(0x3,ULL)<<TCR_EL1_IPS_SHIFT)
338 #define TCR_EL1_IPS_44_BIT  (_AC(0x4,ULL)<<TCR_EL1_IPS_SHIFT)
339 #define TCR_EL1_IPS_48_BIT  (_AC(0x5,ULL)<<TCR_EL1_IPS_SHIFT)
340 #define TCR_EL1_IPS_52_BIT  (_AC(0x6,ULL)<<TCR_EL1_IPS_SHIFT)
341 
342 /*
343  * The following values correspond to the bit masks represented by
344  * TCR_EL1_IPS_XX_BIT defines.
345  */
346 #define TCR_EL1_IPS_32_BIT_VAL  (32)
347 #define TCR_EL1_IPS_36_BIT_VAL  (36)
348 #define TCR_EL1_IPS_40_BIT_VAL  (40)
349 #define TCR_EL1_IPS_42_BIT_VAL  (42)
350 #define TCR_EL1_IPS_44_BIT_VAL  (44)
351 #define TCR_EL1_IPS_48_BIT_VAL  (48)
352 #define TCR_EL1_IPS_52_BIT_VAL  (52)
353 #define TCR_EL1_IPS_MIN_VAL     (25)
354 
355 /* Note that the fields TCR_EL2.TBI(0|1) exist only if HCR_EL2.E2H==1. */
356 #define TCR_EL1_TBI0    (_AC(0x1,ULL)<<37)
357 #define TCR_EL1_TBI1    (_AC(0x1,ULL)<<38)
358 
359 #ifdef CONFIG_ARM_64
360 
361 #define TCR_PS(x)       ((x)<<16)
362 #define TCR_TBI         (_AC(0x1,UL)<<20)
363 
364 #define TCR_RES1        (_AC(1,UL)<<31|_AC(1,UL)<<23)
365 
366 #else
367 
368 #define TCR_RES1        (_AC(1,UL)<<31)
369 
370 #endif
371 
372 /* VTCR: Stage 2 Translation Control */
373 
374 #define VTCR_T0SZ(x)    ((x)<<0)
375 
376 #define VTCR_SL0(x)     ((x)<<6)
377 
378 #define VTCR_IRGN0_NC   (_AC(0x0,UL)<<8)
379 #define VTCR_IRGN0_WBWA (_AC(0x1,UL)<<8)
380 #define VTCR_IRGN0_WT   (_AC(0x2,UL)<<8)
381 #define VTCR_IRGN0_WB   (_AC(0x3,UL)<<8)
382 
383 #define VTCR_ORGN0_NC   (_AC(0x0,UL)<<10)
384 #define VTCR_ORGN0_WBWA (_AC(0x1,UL)<<10)
385 #define VTCR_ORGN0_WT   (_AC(0x2,UL)<<10)
386 #define VTCR_ORGN0_WB   (_AC(0x3,UL)<<10)
387 
388 #define VTCR_SH0_NS     (_AC(0x0,UL)<<12)
389 #define VTCR_SH0_OS     (_AC(0x2,UL)<<12)
390 #define VTCR_SH0_IS     (_AC(0x3,UL)<<12)
391 
392 #ifdef CONFIG_ARM_64
393 
394 #define VTCR_TG0_4K     (_AC(0x0,UL)<<14)
395 #define VTCR_TG0_64K    (_AC(0x1,UL)<<14)
396 #define VTCR_TG0_16K    (_AC(0x2,UL)<<14)
397 
398 #define VTCR_PS(x)      ((x)<<16)
399 
400 #define VTCR_VS    	    (_AC(0x1,UL)<<19)
401 
402 #endif
403 
404 #define VTCR_RES1       (_AC(1,UL)<<31)
405 
406 /* HCPTR Hyp. Coprocessor Trap Register */
407 #define HCPTR_TAM       ((_AC(1,U)<<30))
408 #define HCPTR_TTA       ((_AC(1,U)<<20))        /* Trap trace registers */
409 #define HCPTR_CP(x)     ((_AC(1,U)<<(x)))       /* Trap Coprocessor x */
410 #define HCPTR_CP_MASK   ((_AC(1,U)<<14)-1)
411 
412 /* HSTR Hyp. System Trap Register */
413 #define HSTR_T(x)       ((_AC(1,U)<<(x)))       /* Trap Cp15 c<x> */
414 
415 /* HDCR Hyp. Debug Configuration Register */
416 #define HDCR_TDRA       (_AC(1,U)<<11)          /* Trap Debug ROM access */
417 #define HDCR_TDOSA      (_AC(1,U)<<10)          /* Trap Debug-OS-related register access */
418 #define HDCR_TDA        (_AC(1,U)<<9)           /* Trap Debug Access */
419 #define HDCR_TDE        (_AC(1,U)<<8)           /* Route Soft Debug exceptions from EL1/EL1 to EL2 */
420 #define HDCR_TPM        (_AC(1,U)<<6)           /* Trap Performance Monitors accesses */
421 #define HDCR_TPMCR      (_AC(1,U)<<5)           /* Trap PMCR accesses */
422 
423 #define HSR_EC_SHIFT                26
424 
425 #define HSR_EC_UNKNOWN              0x00
426 #define HSR_EC_WFI_WFE              0x01
427 #define HSR_EC_CP15_32              0x03
428 #define HSR_EC_CP15_64              0x04
429 #define HSR_EC_CP14_32              0x05        /* Trapped MCR or MRC access to CP14 */
430 #define HSR_EC_CP14_DBG             0x06        /* Trapped LDC/STC access to CP14 (only for debug registers) */
431 #define HSR_EC_CP                   0x07        /* HCPTR-trapped access to CP0-CP13 */
432 #define HSR_EC_CP10                 0x08
433 #define HSR_EC_JAZELLE              0x09
434 #define HSR_EC_BXJ                  0x0a
435 #define HSR_EC_CP14_64              0x0c
436 #define HSR_EC_SVC32                0x11
437 #define HSR_EC_HVC32                0x12
438 #define HSR_EC_SMC32                0x13
439 #ifdef CONFIG_ARM_64
440 #define HSR_EC_SVC64                0x15
441 #define HSR_EC_HVC64                0x16
442 #define HSR_EC_SMC64                0x17
443 #define HSR_EC_SYSREG               0x18
444 #define HSR_EC_SVE                  0x19
445 #endif
446 #define HSR_EC_INSTR_ABORT_LOWER_EL 0x20
447 #define HSR_EC_INSTR_ABORT_CURR_EL  0x21
448 #define HSR_EC_DATA_ABORT_LOWER_EL  0x24
449 #define HSR_EC_DATA_ABORT_CURR_EL   0x25
450 #ifdef CONFIG_ARM_64
451 #define HSR_EC_BRK                  0x3c
452 #endif
453 
454 /* FSR format, common */
455 #define FSR_LPAE                (_AC(1,UL)<<9)
456 /* FSR short format */
457 #define FSRS_FS_DEBUG           (_AC(0,UL)<<10|_AC(0x2,UL)<<0)
458 /* FSR long format */
459 #define FSRL_STATUS_DEBUG       (_AC(0x22,UL)<<0)
460 
461 #ifdef CONFIG_ARM_64
462 #define MM64_VMID_8_BITS_SUPPORT    0x0
463 #define MM64_VMID_16_BITS_SUPPORT   0x2
464 #endif
465 
466 #ifndef __ASSEMBLY__
467 
468 extern register_t __cpu_logical_map[];
469 #define cpu_logical_map(cpu) __cpu_logical_map[cpu]
470 
471 #endif
472 
473 /* Physical Address Register */
474 #define PAR_F           (_AC(1,U)<<0)
475 
476 /* .... If F == 1 */
477 #define PAR_FSC_SHIFT   (1)
478 #define PAR_FSC_MASK    (_AC(0x3f,U)<<PAR_FSC_SHIFT)
479 #define PAR_STAGE21     (_AC(1,U)<<8)     /* Stage 2 Fault During Stage 1 Walk */
480 #define PAR_STAGE2      (_AC(1,U)<<9)     /* Stage 2 Fault */
481 
482 /* If F == 0 */
483 #define PAR_MAIR_SHIFT  56                       /* Memory Attributes */
484 #define PAR_MAIR_MASK   (0xffLL<<PAR_MAIR_SHIFT)
485 #define PAR_NS          (_AC(1,U)<<9)                   /* Non-Secure */
486 #define PAR_SH_SHIFT    7                        /* Shareability */
487 #define PAR_SH_MASK     (_AC(3,U)<<PAR_SH_SHIFT)
488 
489 /* Fault Status Register */
490 /*
491  * 543210 BIT
492  * 00XXLL -- XX Fault Level LL
493  * ..01LL -- Translation Fault LL
494  * ..10LL -- Access Fault LL
495  * ..11LL -- Permission Fault LL
496  * 01xxxx -- Abort/Parity
497  * 10xxxx -- Other
498  * 11xxxx -- Implementation Defined
499  */
500 #define FSC_TYPE_MASK (_AC(0x3,U)<<4)
501 #define FSC_TYPE_FAULT (_AC(0x00,U)<<4)
502 #define FSC_TYPE_ABT   (_AC(0x01,U)<<4)
503 #define FSC_TYPE_OTH   (_AC(0x02,U)<<4)
504 #define FSC_TYPE_IMPL  (_AC(0x03,U)<<4)
505 
506 #define FSC_FLT_TRANS  (0x04)
507 #define FSC_FLT_ACCESS (0x08)
508 #define FSC_FLT_PERM   (0x0c)
509 #define FSC_SEA        (0x10) /* Synchronous External Abort */
510 #define FSC_SPE        (0x18) /* Memory Access Synchronous Parity Error */
511 #define FSC_APE        (0x11) /* Memory Access Asynchronous Parity Error */
512 #define FSC_SEATT      (0x14) /* Sync. Ext. Abort Translation Table */
513 #define FSC_SPETT      (0x1c) /* Sync. Parity. Error Translation Table */
514 #define FSC_AF         (0x21) /* Alignment Fault */
515 #define FSC_DE         (0x22) /* Debug Event */
516 #define FSC_LKD        (0x34) /* Lockdown Abort */
517 #define FSC_CPR        (0x3a) /* Coprocossor Abort */
518 
519 #define FSC_LL_MASK    (_AC(0x03,U)<<0)
520 
521 /* HPFAR_EL2: Hypervisor IPA Fault Address Register */
522 #ifdef CONFIG_ARM_64
523 #define HPFAR_MASK	GENMASK(39, 4)
524 #else
525 #define HPFAR_MASK	GENMASK(31, 4)
526 #endif
527 
528 /* Time counter hypervisor control register */
529 #define CNTHCTL_EL2_EL1PCTEN (1u<<0) /* Kernel/user access to physical counter */
530 #define CNTHCTL_EL2_EL1PCEN  (1u<<1) /* Kernel/user access to CNTP timer regs */
531 
532 /* Time counter kernel control register */
533 #define CNTKCTL_EL1_EL0PCTEN (1u<<0) /* Expose phys counters to EL0 */
534 #define CNTKCTL_EL1_EL0VCTEN (1u<<1) /* Expose virt counters to EL0 */
535 #define CNTKCTL_EL1_EL0VTEN  (1u<<8) /* Expose virt timer registers to EL0 */
536 #define CNTKCTL_EL1_EL0PTEN  (1u<<9) /* Expose phys timer registers to EL0 */
537 
538 /* Timer control registers */
539 #define CNTx_CTL_ENABLE   (1UL<<0)  /* Enable timer */
540 #define CNTx_CTL_MASK     (1UL<<1)  /* Mask IRQ */
541 #define CNTx_CTL_PENDING  (1UL<<2)  /* IRQ pending */
542 
543 /* Timer frequency mask */
544 #define CNTFRQ_MASK       GENMASK(31, 0)
545 
546 /* Exception Vector offsets */
547 /* ... ARM32 */
548 #define VECTOR32_RST  0
549 #define VECTOR32_UND  4
550 #define VECTOR32_SVC  8
551 #define VECTOR32_PABT 12
552 #define VECTOR32_DABT 16
553 /* ... ARM64 */
554 #define VECTOR64_CURRENT_SP0_BASE  0x000
555 #define VECTOR64_CURRENT_SPx_BASE  0x200
556 #define VECTOR64_LOWER64_BASE      0x400
557 #define VECTOR64_LOWER32_BASE      0x600
558 
559 #define VECTOR64_SYNC_OFFSET       0x000
560 #define VECTOR64_IRQ_OFFSET        0x080
561 #define VECTOR64_FIQ_OFFSET        0x100
562 #define VECTOR64_ERROR_OFFSET      0x180
563 
564 
565 #if defined(CONFIG_ARM_32)
566 # include <asm/arm32/processor.h>
567 #elif defined(CONFIG_ARM_64)
568 # include <asm/arm64/processor.h>
569 #else
570 # error "unknown ARM variant"
571 #endif
572 
573 #ifndef __ASSEMBLY__
574 void noreturn panic_PAR(uint64_t par);
575 
576 /* Debugging functions are declared with external linkage to aid development. */
577 void show_registers(const struct cpu_user_regs *regs);
578 void show_stack(const struct cpu_user_regs *regs);
579 
580 //#define dump_execution_state() run_in_exception_handler(show_execution_state)
581 #define dump_execution_state() WARN()
582 
583 #define cpu_relax() barrier() /* Could yield? */
584 
585 /* All a bit UP for the moment */
586 #define cpu_to_core(_cpu)   (0)
587 #define cpu_to_socket(_cpu) (0)
588 
589 struct vcpu;
590 void vcpu_regs_hyp_to_user(const struct vcpu *vcpu,
591                            struct vcpu_guest_core_regs *regs);
592 void vcpu_regs_user_to_hyp(struct vcpu *vcpu,
593                            const struct vcpu_guest_core_regs *regs);
594 
595 void do_trap_hyp_serror(struct cpu_user_regs *regs);
596 
597 void do_trap_guest_serror(struct cpu_user_regs *regs);
598 
599 register_t get_default_hcr_flags(void);
600 
601 register_t get_default_cptr_flags(void);
602 
603 /*
604  * Synchronize SError unless the feature is selected.
605  * This is relying on the SErrors are currently unmasked.
606  */
607 #define SYNCHRONIZE_SERROR(feat)                                  \
608     do {                                                          \
609         ASSERT(local_abort_is_enabled());                         \
610         asm_inline volatile (                                     \
611             ALTERNATIVE("dsb sy; isb",                            \
612                         "nop; nop", feat)                         \
613             ::: "memory" );                                       \
614     } while (0)
615 
616 /*
617  * Clear/Set flags in HCR_EL2 for a given vCPU. It only supports the current
618  * vCPU for now.
619  */
620 #define vcpu_hcr_clear_flags(v, flags)              \
621     do {                                            \
622         ASSERT((v) == current);                     \
623         (v)->arch.hcr_el2 &= ~(flags);              \
624         WRITE_SYSREG((v)->arch.hcr_el2, HCR_EL2);   \
625     } while (0)
626 
627 #define vcpu_hcr_set_flags(v, flags)                \
628     do {                                            \
629         ASSERT((v) == current);                     \
630         (v)->arch.hcr_el2 |= (flags);               \
631         WRITE_SYSREG((v)->arch.hcr_el2, HCR_EL2);   \
632     } while (0)
633 
634 #endif /* __ASSEMBLY__ */
635 #endif /* __ASM_ARM_PROCESSOR_H */
636 /*
637  * Local variables:
638  * mode: C
639  * c-file-style: "BSD"
640  * c-basic-offset: 4
641  * indent-tabs-mode: nil
642  * End:
643  */
644