1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3  * Copyright 2022 NXP
4  */
5 
6 #ifndef RISCV_H
7 #define RISCV_H
8 
9 #include <compiler.h>
10 #include <encoding.h>
11 #include <stdint.h>
12 #include <sys/cdefs.h>
13 #include <util.h>
14 
15 #define RISCV_XLEN_BITS		(__riscv_xlen)
16 #define RISCV_XLEN_BYTES	(__riscv_xlen / 8)
17 
18 #define REGOFF(x)			((x) * RISCV_XLEN_BYTES)
19 
20 #if __riscv_xlen == 32
21 #define STR       sw
22 #define LDR       lw
23 #else
24 #define STR       sd
25 #define LDR       ld
26 #endif
27 
28 #if defined(CFG_RISCV_M_MODE)
29 #define CSR_MODE_OFFSET	PRV_M
30 #define XRET			mret
31 #elif defined(CFG_RISCV_S_MODE)
32 #define CSR_MODE_OFFSET	PRV_S
33 #define XRET			sret
34 #endif
35 
36 #define CSR_MODE_BITS	SHIFT_U64(CSR_MODE_OFFSET, 8)
37 
38 #define CSR_XSTATUS		(CSR_MODE_BITS | 0x000)
39 #define CSR_XIE			(CSR_MODE_BITS | 0x004)
40 #define CSR_XTVEC		(CSR_MODE_BITS | 0x005)
41 #define CSR_XSCRATCH		(CSR_MODE_BITS | 0x040)
42 #define CSR_XEPC		(CSR_MODE_BITS | 0x041)
43 #define CSR_XCAUSE		(CSR_MODE_BITS | 0x042)
44 #define CSR_XTVAL		(CSR_MODE_BITS | 0x043)
45 #define CSR_XIP			(CSR_MODE_BITS | 0x044)
46 
47 #ifndef __ASSEMBLER__
48 
mb(void)49 static inline __noprof void mb(void)
50 {
51 	asm volatile ("fence" : : : "memory");
52 }
53 
read_tp(void)54 static inline __noprof unsigned long read_tp(void)
55 {
56 	unsigned long tp;
57 
58 	asm volatile("mv %0, tp" : "=&r"(tp));
59 	return tp;
60 }
61 
wfi(void)62 static inline __noprof void wfi(void)
63 {
64 	asm volatile ("wfi");
65 }
66 
flush_tlb(void)67 static inline __noprof void flush_tlb(void)
68 {
69 	asm volatile("sfence.vma zero, zero");
70 }
71 
flush_tlb_entry(unsigned long va)72 static inline __noprof void flush_tlb_entry(unsigned long va)
73 {
74 	asm volatile ("sfence.vma %0" : : "r" (va) : "memory");
75 }
76 
77 /* supervisor address translation and protection */
read_satp(void)78 static inline __noprof unsigned long read_satp(void)
79 {
80 	unsigned long satp;
81 
82 	asm volatile("csrr %0, satp" : "=r" (satp));
83 
84 	return satp;
85 }
86 
write_satp(unsigned long satp)87 static inline __noprof void write_satp(unsigned long satp)
88 {
89 	asm volatile("csrw satp, %0" : : "r" (satp));
90 }
91 
92 /* machine trap-vector base-address register */
read_mtvec(void)93 static inline __noprof unsigned long read_mtvec(void)
94 {
95 	unsigned long mtvec;
96 
97 	asm volatile("csrr %0, mtvec" : "=r" (mtvec));
98 
99 	return mtvec;
100 }
101 
write_mtvec(unsigned long mtvec)102 static inline __noprof void write_mtvec(unsigned long mtvec)
103 {
104 	asm volatile("csrw mtvec, %0" : : "r" (mtvec));
105 }
106 
107 /* supervisor trap-vector base-address register */
read_stvec(void)108 static inline __noprof unsigned long read_stvec(void)
109 {
110 	unsigned long stvec;
111 
112 	asm volatile("csrr %0, stvec" : "=r" (stvec));
113 
114 	return stvec;
115 }
116 
write_stvec(unsigned long stvec)117 static inline __noprof void write_stvec(unsigned long stvec)
118 {
119 	asm volatile("csrw stvec, %0" : : "r" (stvec));
120 }
121 
122 /* machine status register */
read_mstatus(void)123 static inline __noprof unsigned long read_mstatus(void)
124 {
125 	unsigned long mstatus;
126 
127 	asm volatile("csrr %0, mstatus" : "=r" (mstatus));
128 
129 	return mstatus;
130 }
131 
write_mstatus(unsigned long mstatus)132 static inline __noprof void write_mstatus(unsigned long mstatus)
133 {
134 	asm volatile("csrw mstatus, %0" : : "r" (mstatus));
135 }
136 
137 /* supervisor status register */
read_sstatus(void)138 static inline __noprof unsigned long read_sstatus(void)
139 {
140 	unsigned long sstatus;
141 
142 	asm volatile("csrr %0, sstatus" : "=r" (sstatus));
143 
144 	return sstatus;
145 }
146 
write_sstatus(unsigned long sstatus)147 static inline __noprof void write_sstatus(unsigned long sstatus)
148 {
149 	asm volatile("csrw sstatus, %0" : : "r" (sstatus));
150 }
151 
set_sstatus(unsigned long sstatus)152 static inline __noprof void set_sstatus(unsigned long sstatus)
153 {
154 	unsigned long x;
155 
156 	asm volatile ("csrrs %0, sstatus, %1" : "=r"(x) : "rK"(sstatus));
157 }
158 
159 /* machine exception delegation */
read_medeleg(void)160 static inline __noprof unsigned long read_medeleg(void)
161 {
162 	unsigned long medeleg;
163 
164 	asm volatile("csrr %0, medeleg" : "=r" (medeleg));
165 
166 	return medeleg;
167 }
168 
write_medeleg(unsigned long medeleg)169 static inline __noprof void write_medeleg(unsigned long medeleg)
170 {
171 	asm volatile("csrw medeleg, %0" : : "r" (medeleg));
172 }
173 
174 /* machine interrupt delegation */
read_mideleg(void)175 static inline __noprof unsigned long read_mideleg(void)
176 {
177 	unsigned long mideleg;
178 
179 	asm volatile("csrr %0, mideleg" : "=r" (mideleg));
180 
181 	return mideleg;
182 }
183 
write_mideleg(unsigned long mideleg)184 static inline __noprof void write_mideleg(unsigned long mideleg)
185 {
186 	asm volatile("csrw mideleg, %0" : : "r" (mideleg));
187 }
188 
189 /* machine interrupt-enable register */
read_mie(void)190 static inline __noprof unsigned long read_mie(void)
191 {
192 	unsigned long mie;
193 
194 	asm volatile("csrr %0, mie" : "=r" (mie));
195 
196 	return mie;
197 }
198 
write_mie(unsigned long mie)199 static inline __noprof void write_mie(unsigned long mie)
200 {
201 	asm volatile("csrw mie, %0" : : "r" (mie));
202 }
203 
204 /* supervisor interrupt-enable register */
read_sie(void)205 static inline __noprof unsigned long read_sie(void)
206 {
207 	unsigned long sie;
208 
209 	asm volatile("csrr %0, sie" : "=r" (sie));
210 
211 	return sie;
212 }
213 
write_sie(unsigned long sie)214 static inline __noprof void write_sie(unsigned long sie)
215 {
216 	asm volatile("csrw sie, %0" : : "r" (sie));
217 }
218 
219 /* machine exception program counter */
read_mepc(void)220 static inline __noprof unsigned long read_mepc(void)
221 {
222 	unsigned long mepc;
223 
224 	asm volatile("csrr %0, mepc" : "=r" (mepc));
225 
226 	return mepc;
227 }
228 
write_mepc(unsigned long mepc)229 static inline __noprof void write_mepc(unsigned long mepc)
230 {
231 	asm volatile("csrw mepc, %0" : : "r" (mepc));
232 }
233 
234 /* supervisor exception program counter */
read_sepc(void)235 static inline __noprof unsigned long read_sepc(void)
236 {
237 	unsigned long sepc;
238 
239 	asm volatile("csrr %0, sepc" : "=r" (sepc));
240 
241 	return sepc;
242 }
243 
write_sepc(unsigned long sepc)244 static inline __noprof void write_sepc(unsigned long sepc)
245 {
246 	asm volatile("csrw sepc, %0" : : "r" (sepc));
247 }
248 
249 /* machine scratch register */
read_mscratch(void)250 static inline __noprof unsigned long read_mscratch(void)
251 {
252 	unsigned long mscratch;
253 
254 	asm volatile("csrr %0, mscratch" : "=r" (mscratch));
255 
256 	return mscratch;
257 }
258 
write_mscratch(unsigned long mscratch)259 static inline __noprof void write_mscratch(unsigned long mscratch)
260 {
261 	asm volatile("csrw mscratch, %0" : : "r" (mscratch));
262 }
263 
264 /* supervisor scratch register */
read_sscratch(void)265 static inline __noprof unsigned long read_sscratch(void)
266 {
267 	unsigned long sscratch;
268 
269 	asm volatile("csrr %0, sscratch" : "=r" (sscratch));
270 
271 	return sscratch;
272 }
273 
write_sscratch(unsigned long sscratch)274 static inline __noprof void write_sscratch(unsigned long sscratch)
275 {
276 	asm volatile("csrw sscratch, %0" : : "r" (sscratch));
277 }
278 
279 /* trap-return instructions */
mret(void)280 static inline __noprof void mret(void)
281 {
282 	asm volatile("mret");
283 }
284 
sret(void)285 static inline __noprof void sret(void)
286 {
287 	asm volatile("sret");
288 }
289 
uret(void)290 static inline __noprof void uret(void)
291 {
292 	asm volatile("uret");
293 }
294 
295 #endif /*__ASSEMBLER__*/
296 
297 #endif /*RISCV_H*/
298