1 /* 2 * Copyright (c) 2013-2022, Arm Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7 #ifndef CONTEXT_H 8 #define CONTEXT_H 9 10 #include <lib/utils_def.h> 11 12 /******************************************************************************* 13 * Constants that allow assembler code to access members of and the 'gp_regs' 14 * structure at their correct offsets. 15 ******************************************************************************/ 16 #define CTX_GPREGS_OFFSET U(0x0) 17 #define CTX_GPREG_X0 U(0x0) 18 #define CTX_GPREG_X1 U(0x8) 19 #define CTX_GPREG_X2 U(0x10) 20 #define CTX_GPREG_X3 U(0x18) 21 #define CTX_GPREG_X4 U(0x20) 22 #define CTX_GPREG_X5 U(0x28) 23 #define CTX_GPREG_X6 U(0x30) 24 #define CTX_GPREG_X7 U(0x38) 25 #define CTX_GPREG_X8 U(0x40) 26 #define CTX_GPREG_X9 U(0x48) 27 #define CTX_GPREG_X10 U(0x50) 28 #define CTX_GPREG_X11 U(0x58) 29 #define CTX_GPREG_X12 U(0x60) 30 #define CTX_GPREG_X13 U(0x68) 31 #define CTX_GPREG_X14 U(0x70) 32 #define CTX_GPREG_X15 U(0x78) 33 #define CTX_GPREG_X16 U(0x80) 34 #define CTX_GPREG_X17 U(0x88) 35 #define CTX_GPREG_X18 U(0x90) 36 #define CTX_GPREG_X19 U(0x98) 37 #define CTX_GPREG_X20 U(0xa0) 38 #define CTX_GPREG_X21 U(0xa8) 39 #define CTX_GPREG_X22 U(0xb0) 40 #define CTX_GPREG_X23 U(0xb8) 41 #define CTX_GPREG_X24 U(0xc0) 42 #define CTX_GPREG_X25 U(0xc8) 43 #define CTX_GPREG_X26 U(0xd0) 44 #define CTX_GPREG_X27 U(0xd8) 45 #define CTX_GPREG_X28 U(0xe0) 46 #define CTX_GPREG_X29 U(0xe8) 47 #define CTX_GPREG_LR U(0xf0) 48 #define CTX_GPREG_SP_EL0 U(0xf8) 49 #define CTX_GPREGS_END U(0x100) 50 51 /******************************************************************************* 52 * Constants that allow assembler code to access members of and the 'el3_state' 53 * structure at their correct offsets. Note that some of the registers are only 54 * 32-bits wide but are stored as 64-bit values for convenience 55 ******************************************************************************/ 56 #define CTX_EL3STATE_OFFSET (CTX_GPREGS_OFFSET + CTX_GPREGS_END) 57 #define CTX_SCR_EL3 U(0x0) 58 #define CTX_ESR_EL3 U(0x8) 59 #define CTX_RUNTIME_SP U(0x10) 60 #define CTX_SPSR_EL3 U(0x18) 61 #define CTX_ELR_EL3 U(0x20) 62 #define CTX_PMCR_EL0 U(0x28) 63 #define CTX_IS_IN_EL3 U(0x30) 64 #define CTX_CPTR_EL3 U(0x38) 65 #define CTX_ZCR_EL3 U(0x40) 66 #define CTX_EL3STATE_END U(0x50) /* Align to the next 16 byte boundary */ 67 68 /******************************************************************************* 69 * Constants that allow assembler code to access members of and the 70 * 'el1_sys_regs' structure at their correct offsets. Note that some of the 71 * registers are only 32-bits wide but are stored as 64-bit values for 72 * convenience 73 ******************************************************************************/ 74 #define CTX_EL1_SYSREGS_OFFSET (CTX_EL3STATE_OFFSET + CTX_EL3STATE_END) 75 #define CTX_SPSR_EL1 U(0x0) 76 #define CTX_ELR_EL1 U(0x8) 77 #define CTX_SCTLR_EL1 U(0x10) 78 #define CTX_TCR_EL1 U(0x18) 79 #define CTX_CPACR_EL1 U(0x20) 80 #define CTX_CSSELR_EL1 U(0x28) 81 #define CTX_SP_EL1 U(0x30) 82 #define CTX_ESR_EL1 U(0x38) 83 #define CTX_TTBR0_EL1 U(0x40) 84 #define CTX_TTBR1_EL1 U(0x48) 85 #define CTX_MAIR_EL1 U(0x50) 86 #define CTX_AMAIR_EL1 U(0x58) 87 #define CTX_ACTLR_EL1 U(0x60) 88 #define CTX_TPIDR_EL1 U(0x68) 89 #define CTX_TPIDR_EL0 U(0x70) 90 #define CTX_TPIDRRO_EL0 U(0x78) 91 #define CTX_PAR_EL1 U(0x80) 92 #define CTX_FAR_EL1 U(0x88) 93 #define CTX_AFSR0_EL1 U(0x90) 94 #define CTX_AFSR1_EL1 U(0x98) 95 #define CTX_CONTEXTIDR_EL1 U(0xa0) 96 #define CTX_VBAR_EL1 U(0xa8) 97 98 /* 99 * If the platform is AArch64-only, there is no need to save and restore these 100 * AArch32 registers. 101 */ 102 #if CTX_INCLUDE_AARCH32_REGS 103 #define CTX_SPSR_ABT U(0xb0) /* Align to the next 16 byte boundary */ 104 #define CTX_SPSR_UND U(0xb8) 105 #define CTX_SPSR_IRQ U(0xc0) 106 #define CTX_SPSR_FIQ U(0xc8) 107 #define CTX_DACR32_EL2 U(0xd0) 108 #define CTX_IFSR32_EL2 U(0xd8) 109 #define CTX_AARCH32_END U(0xe0) /* Align to the next 16 byte boundary */ 110 #else 111 #define CTX_AARCH32_END U(0xb0) /* Align to the next 16 byte boundary */ 112 #endif /* CTX_INCLUDE_AARCH32_REGS */ 113 114 /* 115 * If the timer registers aren't saved and restored, we don't have to reserve 116 * space for them in the context 117 */ 118 #if NS_TIMER_SWITCH 119 #define CTX_CNTP_CTL_EL0 (CTX_AARCH32_END + U(0x0)) 120 #define CTX_CNTP_CVAL_EL0 (CTX_AARCH32_END + U(0x8)) 121 #define CTX_CNTV_CTL_EL0 (CTX_AARCH32_END + U(0x10)) 122 #define CTX_CNTV_CVAL_EL0 (CTX_AARCH32_END + U(0x18)) 123 #define CTX_CNTKCTL_EL1 (CTX_AARCH32_END + U(0x20)) 124 #define CTX_TIMER_SYSREGS_END (CTX_AARCH32_END + U(0x30)) /* Align to the next 16 byte boundary */ 125 #else 126 #define CTX_TIMER_SYSREGS_END CTX_AARCH32_END 127 #endif /* NS_TIMER_SWITCH */ 128 129 #if CTX_INCLUDE_MTE_REGS 130 #define CTX_TFSRE0_EL1 (CTX_TIMER_SYSREGS_END + U(0x0)) 131 #define CTX_TFSR_EL1 (CTX_TIMER_SYSREGS_END + U(0x8)) 132 #define CTX_RGSR_EL1 (CTX_TIMER_SYSREGS_END + U(0x10)) 133 #define CTX_GCR_EL1 (CTX_TIMER_SYSREGS_END + U(0x18)) 134 135 /* Align to the next 16 byte boundary */ 136 #define CTX_MTE_REGS_END (CTX_TIMER_SYSREGS_END + U(0x20)) 137 #else 138 #define CTX_MTE_REGS_END CTX_TIMER_SYSREGS_END 139 #endif /* CTX_INCLUDE_MTE_REGS */ 140 141 /* 142 * End of system registers. 143 */ 144 #define CTX_EL1_SYSREGS_END CTX_MTE_REGS_END 145 146 /* 147 * EL2 register set 148 */ 149 150 #if CTX_INCLUDE_EL2_REGS 151 /* For later discussion 152 * ICH_AP0R<n>_EL2 153 * ICH_AP1R<n>_EL2 154 * AMEVCNTVOFF0<n>_EL2 155 * AMEVCNTVOFF1<n>_EL2 156 * ICH_LR<n>_EL2 157 */ 158 #define CTX_EL2_SYSREGS_OFFSET (CTX_EL1_SYSREGS_OFFSET + CTX_EL1_SYSREGS_END) 159 160 #define CTX_ACTLR_EL2 U(0x0) 161 #define CTX_AFSR0_EL2 U(0x8) 162 #define CTX_AFSR1_EL2 U(0x10) 163 #define CTX_AMAIR_EL2 U(0x18) 164 #define CTX_CNTHCTL_EL2 U(0x20) 165 #define CTX_CNTVOFF_EL2 U(0x28) 166 #define CTX_CPTR_EL2 U(0x30) 167 #define CTX_DBGVCR32_EL2 U(0x38) 168 #define CTX_ELR_EL2 U(0x40) 169 #define CTX_ESR_EL2 U(0x48) 170 #define CTX_FAR_EL2 U(0x50) 171 #define CTX_HACR_EL2 U(0x58) 172 #define CTX_HCR_EL2 U(0x60) 173 #define CTX_HPFAR_EL2 U(0x68) 174 #define CTX_HSTR_EL2 U(0x70) 175 #define CTX_ICC_SRE_EL2 U(0x78) 176 #define CTX_ICH_HCR_EL2 U(0x80) 177 #define CTX_ICH_VMCR_EL2 U(0x88) 178 #define CTX_MAIR_EL2 U(0x90) 179 #define CTX_MDCR_EL2 U(0x98) 180 #define CTX_PMSCR_EL2 U(0xa0) 181 #define CTX_SCTLR_EL2 U(0xa8) 182 #define CTX_SPSR_EL2 U(0xb0) 183 #define CTX_SP_EL2 U(0xb8) 184 #define CTX_TCR_EL2 U(0xc0) 185 #define CTX_TPIDR_EL2 U(0xc8) 186 #define CTX_TTBR0_EL2 U(0xd0) 187 #define CTX_VBAR_EL2 U(0xd8) 188 #define CTX_VMPIDR_EL2 U(0xe0) 189 #define CTX_VPIDR_EL2 U(0xe8) 190 #define CTX_VTCR_EL2 U(0xf0) 191 #define CTX_VTTBR_EL2 U(0xf8) 192 193 // Only if MTE registers in use 194 #define CTX_TFSR_EL2 U(0x100) 195 196 // Only if ENABLE_MPAM_FOR_LOWER_ELS==1 197 #define CTX_MPAM2_EL2 U(0x108) 198 #define CTX_MPAMHCR_EL2 U(0x110) 199 #define CTX_MPAMVPM0_EL2 U(0x118) 200 #define CTX_MPAMVPM1_EL2 U(0x120) 201 #define CTX_MPAMVPM2_EL2 U(0x128) 202 #define CTX_MPAMVPM3_EL2 U(0x130) 203 #define CTX_MPAMVPM4_EL2 U(0x138) 204 #define CTX_MPAMVPM5_EL2 U(0x140) 205 #define CTX_MPAMVPM6_EL2 U(0x148) 206 #define CTX_MPAMVPM7_EL2 U(0x150) 207 #define CTX_MPAMVPMV_EL2 U(0x158) 208 209 // Starting with Armv8.6 210 #define CTX_HDFGRTR_EL2 U(0x160) 211 #define CTX_HAFGRTR_EL2 U(0x168) 212 #define CTX_HDFGWTR_EL2 U(0x170) 213 #define CTX_HFGITR_EL2 U(0x178) 214 #define CTX_HFGRTR_EL2 U(0x180) 215 #define CTX_HFGWTR_EL2 U(0x188) 216 #define CTX_CNTPOFF_EL2 U(0x190) 217 218 // Starting with Armv8.4 219 #define CTX_CONTEXTIDR_EL2 U(0x198) 220 #define CTX_TTBR1_EL2 U(0x1a0) 221 #define CTX_VDISR_EL2 U(0x1a8) 222 #define CTX_VSESR_EL2 U(0x1b0) 223 #define CTX_VNCR_EL2 U(0x1b8) 224 #define CTX_TRFCR_EL2 U(0x1c0) 225 226 // Starting with Armv8.5 227 #define CTX_SCXTNUM_EL2 U(0x1c8) 228 229 // Register for FEAT_HCX 230 #define CTX_HCRX_EL2 U(0x1d0) 231 232 /* Align to the next 16 byte boundary */ 233 #define CTX_EL2_SYSREGS_END U(0x1e0) 234 235 #endif /* CTX_INCLUDE_EL2_REGS */ 236 237 /******************************************************************************* 238 * Constants that allow assembler code to access members of and the 'fp_regs' 239 * structure at their correct offsets. 240 ******************************************************************************/ 241 #if CTX_INCLUDE_EL2_REGS 242 # define CTX_FPREGS_OFFSET (CTX_EL2_SYSREGS_OFFSET + CTX_EL2_SYSREGS_END) 243 #else 244 # define CTX_FPREGS_OFFSET (CTX_EL1_SYSREGS_OFFSET + CTX_EL1_SYSREGS_END) 245 #endif 246 #if CTX_INCLUDE_FPREGS 247 #define CTX_FP_Q0 U(0x0) 248 #define CTX_FP_Q1 U(0x10) 249 #define CTX_FP_Q2 U(0x20) 250 #define CTX_FP_Q3 U(0x30) 251 #define CTX_FP_Q4 U(0x40) 252 #define CTX_FP_Q5 U(0x50) 253 #define CTX_FP_Q6 U(0x60) 254 #define CTX_FP_Q7 U(0x70) 255 #define CTX_FP_Q8 U(0x80) 256 #define CTX_FP_Q9 U(0x90) 257 #define CTX_FP_Q10 U(0xa0) 258 #define CTX_FP_Q11 U(0xb0) 259 #define CTX_FP_Q12 U(0xc0) 260 #define CTX_FP_Q13 U(0xd0) 261 #define CTX_FP_Q14 U(0xe0) 262 #define CTX_FP_Q15 U(0xf0) 263 #define CTX_FP_Q16 U(0x100) 264 #define CTX_FP_Q17 U(0x110) 265 #define CTX_FP_Q18 U(0x120) 266 #define CTX_FP_Q19 U(0x130) 267 #define CTX_FP_Q20 U(0x140) 268 #define CTX_FP_Q21 U(0x150) 269 #define CTX_FP_Q22 U(0x160) 270 #define CTX_FP_Q23 U(0x170) 271 #define CTX_FP_Q24 U(0x180) 272 #define CTX_FP_Q25 U(0x190) 273 #define CTX_FP_Q26 U(0x1a0) 274 #define CTX_FP_Q27 U(0x1b0) 275 #define CTX_FP_Q28 U(0x1c0) 276 #define CTX_FP_Q29 U(0x1d0) 277 #define CTX_FP_Q30 U(0x1e0) 278 #define CTX_FP_Q31 U(0x1f0) 279 #define CTX_FP_FPSR U(0x200) 280 #define CTX_FP_FPCR U(0x208) 281 #if CTX_INCLUDE_AARCH32_REGS 282 #define CTX_FP_FPEXC32_EL2 U(0x210) 283 #define CTX_FPREGS_END U(0x220) /* Align to the next 16 byte boundary */ 284 #else 285 #define CTX_FPREGS_END U(0x210) /* Align to the next 16 byte boundary */ 286 #endif 287 #else 288 #define CTX_FPREGS_END U(0) 289 #endif 290 291 /******************************************************************************* 292 * Registers related to CVE-2018-3639 293 ******************************************************************************/ 294 #define CTX_CVE_2018_3639_OFFSET (CTX_FPREGS_OFFSET + CTX_FPREGS_END) 295 #define CTX_CVE_2018_3639_DISABLE U(0) 296 #define CTX_CVE_2018_3639_END U(0x10) /* Align to the next 16 byte boundary */ 297 298 /******************************************************************************* 299 * Registers related to ARMv8.3-PAuth. 300 ******************************************************************************/ 301 #define CTX_PAUTH_REGS_OFFSET (CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_END) 302 #if CTX_INCLUDE_PAUTH_REGS 303 #define CTX_PACIAKEY_LO U(0x0) 304 #define CTX_PACIAKEY_HI U(0x8) 305 #define CTX_PACIBKEY_LO U(0x10) 306 #define CTX_PACIBKEY_HI U(0x18) 307 #define CTX_PACDAKEY_LO U(0x20) 308 #define CTX_PACDAKEY_HI U(0x28) 309 #define CTX_PACDBKEY_LO U(0x30) 310 #define CTX_PACDBKEY_HI U(0x38) 311 #define CTX_PACGAKEY_LO U(0x40) 312 #define CTX_PACGAKEY_HI U(0x48) 313 #define CTX_PAUTH_REGS_END U(0x50) /* Align to the next 16 byte boundary */ 314 #else 315 #define CTX_PAUTH_REGS_END U(0) 316 #endif /* CTX_INCLUDE_PAUTH_REGS */ 317 318 #ifndef __ASSEMBLER__ 319 320 #include <stdint.h> 321 322 #include <lib/cassert.h> 323 324 /* 325 * Common constants to help define the 'cpu_context' structure and its 326 * members below. 327 */ 328 #define DWORD_SHIFT U(3) 329 #define DEFINE_REG_STRUCT(name, num_regs) \ 330 typedef struct name { \ 331 uint64_t ctx_regs[num_regs]; \ 332 } __aligned(16) name##_t 333 334 /* Constants to determine the size of individual context structures */ 335 #define CTX_GPREG_ALL (CTX_GPREGS_END >> DWORD_SHIFT) 336 #define CTX_EL1_SYSREGS_ALL (CTX_EL1_SYSREGS_END >> DWORD_SHIFT) 337 #if CTX_INCLUDE_EL2_REGS 338 # define CTX_EL2_SYSREGS_ALL (CTX_EL2_SYSREGS_END >> DWORD_SHIFT) 339 #endif 340 #if CTX_INCLUDE_FPREGS 341 # define CTX_FPREG_ALL (CTX_FPREGS_END >> DWORD_SHIFT) 342 #endif 343 #define CTX_EL3STATE_ALL (CTX_EL3STATE_END >> DWORD_SHIFT) 344 #define CTX_CVE_2018_3639_ALL (CTX_CVE_2018_3639_END >> DWORD_SHIFT) 345 #if CTX_INCLUDE_PAUTH_REGS 346 # define CTX_PAUTH_REGS_ALL (CTX_PAUTH_REGS_END >> DWORD_SHIFT) 347 #endif 348 349 /* 350 * AArch64 general purpose register context structure. Usually x0-x18, 351 * lr are saved as the compiler is expected to preserve the remaining 352 * callee saved registers if used by the C runtime and the assembler 353 * does not touch the remaining. But in case of world switch during 354 * exception handling, we need to save the callee registers too. 355 */ 356 DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL); 357 358 /* 359 * AArch64 EL1 system register context structure for preserving the 360 * architectural state during world switches. 361 */ 362 DEFINE_REG_STRUCT(el1_sysregs, CTX_EL1_SYSREGS_ALL); 363 364 365 /* 366 * AArch64 EL2 system register context structure for preserving the 367 * architectural state during world switches. 368 */ 369 #if CTX_INCLUDE_EL2_REGS 370 DEFINE_REG_STRUCT(el2_sysregs, CTX_EL2_SYSREGS_ALL); 371 #endif 372 373 /* 374 * AArch64 floating point register context structure for preserving 375 * the floating point state during switches from one security state to 376 * another. 377 */ 378 #if CTX_INCLUDE_FPREGS 379 DEFINE_REG_STRUCT(fp_regs, CTX_FPREG_ALL); 380 #endif 381 382 /* 383 * Miscellaneous registers used by EL3 firmware to maintain its state 384 * across exception entries and exits 385 */ 386 DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL); 387 388 /* Function pointer used by CVE-2018-3639 dynamic mitigation */ 389 DEFINE_REG_STRUCT(cve_2018_3639, CTX_CVE_2018_3639_ALL); 390 391 /* Registers associated to ARMv8.3-PAuth */ 392 #if CTX_INCLUDE_PAUTH_REGS 393 DEFINE_REG_STRUCT(pauth, CTX_PAUTH_REGS_ALL); 394 #endif 395 396 /* 397 * Macros to access members of any of the above structures using their 398 * offsets 399 */ 400 #define read_ctx_reg(ctx, offset) ((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) 401 #define write_ctx_reg(ctx, offset, val) (((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) \ 402 = (uint64_t) (val)) 403 404 /* 405 * Top-level context structure which is used by EL3 firmware to preserve 406 * the state of a core at the next lower EL in a given security state and 407 * save enough EL3 meta data to be able to return to that EL and security 408 * state. The context management library will be used to ensure that 409 * SP_EL3 always points to an instance of this structure at exception 410 * entry and exit. 411 */ 412 typedef struct cpu_context { 413 gp_regs_t gpregs_ctx; 414 el3_state_t el3state_ctx; 415 el1_sysregs_t el1_sysregs_ctx; 416 #if CTX_INCLUDE_EL2_REGS 417 el2_sysregs_t el2_sysregs_ctx; 418 #endif 419 #if CTX_INCLUDE_FPREGS 420 fp_regs_t fpregs_ctx; 421 #endif 422 cve_2018_3639_t cve_2018_3639_ctx; 423 #if CTX_INCLUDE_PAUTH_REGS 424 pauth_t pauth_ctx; 425 #endif 426 } cpu_context_t; 427 428 /* Macros to access members of the 'cpu_context_t' structure */ 429 #define get_el3state_ctx(h) (&((cpu_context_t *) h)->el3state_ctx) 430 #if CTX_INCLUDE_FPREGS 431 # define get_fpregs_ctx(h) (&((cpu_context_t *) h)->fpregs_ctx) 432 #endif 433 #define get_el1_sysregs_ctx(h) (&((cpu_context_t *) h)->el1_sysregs_ctx) 434 #if CTX_INCLUDE_EL2_REGS 435 # define get_el2_sysregs_ctx(h) (&((cpu_context_t *) h)->el2_sysregs_ctx) 436 #endif 437 #define get_gpregs_ctx(h) (&((cpu_context_t *) h)->gpregs_ctx) 438 #define get_cve_2018_3639_ctx(h) (&((cpu_context_t *) h)->cve_2018_3639_ctx) 439 #if CTX_INCLUDE_PAUTH_REGS 440 # define get_pauth_ctx(h) (&((cpu_context_t *) h)->pauth_ctx) 441 #endif 442 443 /* 444 * Compile time assertions related to the 'cpu_context' structure to 445 * ensure that the assembler and the compiler view of the offsets of 446 * the structure members is the same. 447 */ 448 CASSERT(CTX_GPREGS_OFFSET == __builtin_offsetof(cpu_context_t, gpregs_ctx), \ 449 assert_core_context_gp_offset_mismatch); 450 CASSERT(CTX_EL1_SYSREGS_OFFSET == __builtin_offsetof(cpu_context_t, el1_sysregs_ctx), \ 451 assert_core_context_el1_sys_offset_mismatch); 452 #if CTX_INCLUDE_EL2_REGS 453 CASSERT(CTX_EL2_SYSREGS_OFFSET == __builtin_offsetof(cpu_context_t, el2_sysregs_ctx), \ 454 assert_core_context_el2_sys_offset_mismatch); 455 #endif 456 #if CTX_INCLUDE_FPREGS 457 CASSERT(CTX_FPREGS_OFFSET == __builtin_offsetof(cpu_context_t, fpregs_ctx), \ 458 assert_core_context_fp_offset_mismatch); 459 #endif 460 CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx), \ 461 assert_core_context_el3state_offset_mismatch); 462 CASSERT(CTX_CVE_2018_3639_OFFSET == __builtin_offsetof(cpu_context_t, cve_2018_3639_ctx), \ 463 assert_core_context_cve_2018_3639_offset_mismatch); 464 #if CTX_INCLUDE_PAUTH_REGS 465 CASSERT(CTX_PAUTH_REGS_OFFSET == __builtin_offsetof(cpu_context_t, pauth_ctx), \ 466 assert_core_context_pauth_offset_mismatch); 467 #endif 468 469 /* 470 * Helper macro to set the general purpose registers that correspond to 471 * parameters in an aapcs_64 call i.e. x0-x7 472 */ 473 #define set_aapcs_args0(ctx, x0) do { \ 474 write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X0, x0); \ 475 } while (0) 476 #define set_aapcs_args1(ctx, x0, x1) do { \ 477 write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X1, x1); \ 478 set_aapcs_args0(ctx, x0); \ 479 } while (0) 480 #define set_aapcs_args2(ctx, x0, x1, x2) do { \ 481 write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X2, x2); \ 482 set_aapcs_args1(ctx, x0, x1); \ 483 } while (0) 484 #define set_aapcs_args3(ctx, x0, x1, x2, x3) do { \ 485 write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X3, x3); \ 486 set_aapcs_args2(ctx, x0, x1, x2); \ 487 } while (0) 488 #define set_aapcs_args4(ctx, x0, x1, x2, x3, x4) do { \ 489 write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X4, x4); \ 490 set_aapcs_args3(ctx, x0, x1, x2, x3); \ 491 } while (0) 492 #define set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5) do { \ 493 write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X5, x5); \ 494 set_aapcs_args4(ctx, x0, x1, x2, x3, x4); \ 495 } while (0) 496 #define set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6) do { \ 497 write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X6, x6); \ 498 set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5); \ 499 } while (0) 500 #define set_aapcs_args7(ctx, x0, x1, x2, x3, x4, x5, x6, x7) do { \ 501 write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X7, x7); \ 502 set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6); \ 503 } while (0) 504 505 /******************************************************************************* 506 * Function prototypes 507 ******************************************************************************/ 508 void el1_sysregs_context_save(el1_sysregs_t *regs); 509 void el1_sysregs_context_restore(el1_sysregs_t *regs); 510 511 #if CTX_INCLUDE_EL2_REGS 512 void el2_sysregs_context_save_common(el2_sysregs_t *regs); 513 void el2_sysregs_context_restore_common(el2_sysregs_t *regs); 514 #if ENABLE_SPE_FOR_LOWER_ELS 515 void el2_sysregs_context_save_spe(el2_sysregs_t *regs); 516 void el2_sysregs_context_restore_spe(el2_sysregs_t *regs); 517 #endif /* ENABLE_SPE_FOR_LOWER_ELS */ 518 #if CTX_INCLUDE_MTE_REGS 519 void el2_sysregs_context_save_mte(el2_sysregs_t *regs); 520 void el2_sysregs_context_restore_mte(el2_sysregs_t *regs); 521 #endif /* CTX_INCLUDE_MTE_REGS */ 522 #if ENABLE_MPAM_FOR_LOWER_ELS 523 void el2_sysregs_context_save_mpam(el2_sysregs_t *regs); 524 void el2_sysregs_context_restore_mpam(el2_sysregs_t *regs); 525 #endif /* ENABLE_MPAM_FOR_LOWER_ELS */ 526 #if ENABLE_FEAT_FGT 527 void el2_sysregs_context_save_fgt(el2_sysregs_t *regs); 528 void el2_sysregs_context_restore_fgt(el2_sysregs_t *regs); 529 #endif /* ENABLE_FEAT_FGT */ 530 #if ENABLE_FEAT_ECV 531 void el2_sysregs_context_save_ecv(el2_sysregs_t *regs); 532 void el2_sysregs_context_restore_ecv(el2_sysregs_t *regs); 533 #endif /* ENABLE_FEAT_ECV */ 534 #if ENABLE_FEAT_VHE 535 void el2_sysregs_context_save_vhe(el2_sysregs_t *regs); 536 void el2_sysregs_context_restore_vhe(el2_sysregs_t *regs); 537 #endif /* ENABLE_FEAT_VHE */ 538 #if RAS_EXTENSION 539 void el2_sysregs_context_save_ras(el2_sysregs_t *regs); 540 void el2_sysregs_context_restore_ras(el2_sysregs_t *regs); 541 #endif /* RAS_EXTENSION */ 542 #if CTX_INCLUDE_NEVE_REGS 543 void el2_sysregs_context_save_nv2(el2_sysregs_t *regs); 544 void el2_sysregs_context_restore_nv2(el2_sysregs_t *regs); 545 #endif /* CTX_INCLUDE_NEVE_REGS */ 546 #if ENABLE_TRF_FOR_NS 547 void el2_sysregs_context_save_trf(el2_sysregs_t *regs); 548 void el2_sysregs_context_restore_trf(el2_sysregs_t *regs); 549 #endif /* ENABLE_TRF_FOR_NS */ 550 #if ENABLE_FEAT_CSV2_2 551 void el2_sysregs_context_save_csv2(el2_sysregs_t *regs); 552 void el2_sysregs_context_restore_csv2(el2_sysregs_t *regs); 553 #endif /* ENABLE_FEAT_CSV2_2 */ 554 #if ENABLE_FEAT_HCX 555 void el2_sysregs_context_save_hcx(el2_sysregs_t *regs); 556 void el2_sysregs_context_restore_hcx(el2_sysregs_t *regs); 557 #endif /* ENABLE_FEAT_HCX */ 558 #endif /* CTX_INCLUDE_EL2_REGS */ 559 560 #if CTX_INCLUDE_FPREGS 561 void fpregs_context_save(fp_regs_t *regs); 562 void fpregs_context_restore(fp_regs_t *regs); 563 #endif 564 565 #endif /* __ASSEMBLER__ */ 566 567 #endif /* CONTEXT_H */ 568