1 /*
2 * Copyright (c) 2015-2018, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7 #ifndef SMCCC_HELPERS_H
8 #define SMCCC_HELPERS_H
9
10 #include <lib/smccc.h>
11
12 /* Definitions to help the assembler access the SMC/ERET args structure */
13 #define SMC_ARGS_SIZE 0x40
14 #define SMC_ARG0 0x0
15 #define SMC_ARG1 0x8
16 #define SMC_ARG2 0x10
17 #define SMC_ARG3 0x18
18 #define SMC_ARG4 0x20
19 #define SMC_ARG5 0x28
20 #define SMC_ARG6 0x30
21 #define SMC_ARG7 0x38
22 #define SMC_ARGS_END 0x40
23
24 #ifndef __ASSEMBLER__
25
26 #include <stdbool.h>
27
28 #include <context.h>
29
30 #include <platform_def.h> /* For CACHE_WRITEBACK_GRANULE */
31
32 /* Convenience macros to return from SMC handler */
33 #define SMC_RET0(_h) { \
34 return (uint64_t) (_h); \
35 }
36 #define SMC_RET1(_h, _x0) { \
37 write_ctx_reg((get_gpregs_ctx(_h)), (CTX_GPREG_X0), (_x0)); \
38 SMC_RET0(_h); \
39 }
40 #define SMC_RET2(_h, _x0, _x1) { \
41 write_ctx_reg((get_gpregs_ctx(_h)), (CTX_GPREG_X1), (_x1)); \
42 SMC_RET1(_h, (_x0)); \
43 }
44 #define SMC_RET3(_h, _x0, _x1, _x2) { \
45 write_ctx_reg((get_gpregs_ctx(_h)), (CTX_GPREG_X2), (_x2)); \
46 SMC_RET2(_h, (_x0), (_x1)); \
47 }
48 #define SMC_RET4(_h, _x0, _x1, _x2, _x3) { \
49 write_ctx_reg((get_gpregs_ctx(_h)), (CTX_GPREG_X3), (_x3)); \
50 SMC_RET3(_h, (_x0), (_x1), (_x2)); \
51 }
52 #define SMC_RET5(_h, _x0, _x1, _x2, _x3, _x4) { \
53 write_ctx_reg((get_gpregs_ctx(_h)), (CTX_GPREG_X4), (_x4)); \
54 SMC_RET4(_h, (_x0), (_x1), (_x2), (_x3)); \
55 }
56 #define SMC_RET6(_h, _x0, _x1, _x2, _x3, _x4, _x5) { \
57 write_ctx_reg((get_gpregs_ctx(_h)), (CTX_GPREG_X5), (_x5)); \
58 SMC_RET5(_h, (_x0), (_x1), (_x2), (_x3), (_x4)); \
59 }
60 #define SMC_RET7(_h, _x0, _x1, _x2, _x3, _x4, _x5, _x6) { \
61 write_ctx_reg((get_gpregs_ctx(_h)), (CTX_GPREG_X6), (_x6)); \
62 SMC_RET6(_h, (_x0), (_x1), (_x2), (_x3), (_x4), (_x5)); \
63 }
64 #define SMC_RET8(_h, _x0, _x1, _x2, _x3, _x4, _x5, _x6, _x7) { \
65 write_ctx_reg((get_gpregs_ctx(_h)), (CTX_GPREG_X7), (_x7)); \
66 SMC_RET7(_h, (_x0), (_x1), (_x2), (_x3), (_x4), (_x5), (_x6)); \
67 }
68
69 /*
70 * Convenience macros to access general purpose registers using handle provided
71 * to SMC handler. These take the offset values defined in context.h
72 */
73 #define SMC_GET_GP(_h, _g) \
74 read_ctx_reg((get_gpregs_ctx(_h)), (_g))
75 #define SMC_SET_GP(_h, _g, _v) \
76 write_ctx_reg((get_gpregs_ctx(_h)), (_g), (_v))
77
78 /*
79 * Convenience macros to access EL3 context registers using handle provided to
80 * SMC handler. These take the offset values defined in context.h
81 */
82 #define SMC_GET_EL3(_h, _e) \
83 read_ctx_reg((get_el3state_ctx(_h)), (_e))
84 #define SMC_SET_EL3(_h, _e, _v) \
85 write_ctx_reg((get_el3state_ctx(_h)), (_e), (_v))
86
87 /*
88 * Helper macro to retrieve the SMC parameters from cpu_context_t.
89 */
90 #define get_smc_params_from_ctx(_hdl, _x1, _x2, _x3, _x4) \
91 do { \
92 const gp_regs_t *regs = get_gpregs_ctx(_hdl); \
93 _x1 = read_ctx_reg(regs, CTX_GPREG_X1); \
94 _x2 = read_ctx_reg(regs, CTX_GPREG_X2); \
95 _x3 = read_ctx_reg(regs, CTX_GPREG_X3); \
96 _x4 = read_ctx_reg(regs, CTX_GPREG_X4); \
97 } while (false)
98
99 typedef struct {
100 uint64_t _regs[SMC_ARGS_END >> 3];
101 } __aligned(CACHE_WRITEBACK_GRANULE) smc_args_t;
102
103 /*
104 * Ensure that the assembler's view of the size of the tsp_args is the
105 * same as the compilers.
106 */
107 CASSERT(sizeof(smc_args_t) == SMC_ARGS_SIZE, assert_sp_args_size_mismatch);
108
smc_helper(uint32_t func,uint64_t arg0,uint64_t arg1,uint64_t arg2,uint64_t arg3,uint64_t arg4,uint64_t arg5,uint64_t arg6)109 static inline smc_args_t smc_helper(uint32_t func, uint64_t arg0,
110 uint64_t arg1, uint64_t arg2,
111 uint64_t arg3, uint64_t arg4,
112 uint64_t arg5, uint64_t arg6)
113 {
114 smc_args_t ret_args = {0};
115
116 register uint64_t r0 __asm__("x0") = func;
117 register uint64_t r1 __asm__("x1") = arg0;
118 register uint64_t r2 __asm__("x2") = arg1;
119 register uint64_t r3 __asm__("x3") = arg2;
120 register uint64_t r4 __asm__("x4") = arg3;
121 register uint64_t r5 __asm__("x5") = arg4;
122 register uint64_t r6 __asm__("x6") = arg5;
123 register uint64_t r7 __asm__("x7") = arg6;
124
125 /* Output registers, also used as inputs ('+' constraint). */
126 __asm__ volatile("smc #0"
127 : "+r"(r0), "+r"(r1), "+r"(r2), "+r"(r3), "+r"(r4),
128 "+r"(r5), "+r"(r6), "+r"(r7));
129
130 ret_args._regs[0] = r0;
131 ret_args._regs[1] = r1;
132 ret_args._regs[2] = r2;
133 ret_args._regs[3] = r3;
134 ret_args._regs[4] = r4;
135 ret_args._regs[5] = r5;
136 ret_args._regs[6] = r6;
137 ret_args._regs[7] = r7;
138
139 return ret_args;
140 }
141
142 #endif /*__ASSEMBLER__*/
143
144 #endif /* SMCCC_HELPERS_H */
145