1 /*
2 * Copyright (c) 2021-2023 HPMicro
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 *
6 */
7
8 #ifndef HPM_INTERRUPT_H
9 #define HPM_INTERRUPT_H
10 #include "hpm_common.h"
11 #include "hpm_csr_drv.h"
12 #include "hpm_plic_drv.h"
13
14 /**
15 * @brief INTERRUPT driver APIs
16 * @defgroup irq_interface INTERRUPT driver APIs
17 * @{
18 */
19
20 #define M_MODE 0 /*!< Machine mode */
21 #define S_MODE 1 /*!< Supervisor mode */
22
23 #ifdef __cplusplus
24 extern "C" {
25 #endif
26
27 /* Machine mode API: these APIs are supposed to be called at machine mode */
28
29 /**
30 * @brief Enable global IRQ with mask
31 *
32 * @param[in] mask interrupt mask to be enabaled
33 */
enable_global_irq(uint32_t mask)34 ATTR_ALWAYS_INLINE static inline void enable_global_irq(uint32_t mask)
35 {
36 set_csr(CSR_MSTATUS, mask);
37 }
38
39 /**
40 * @brief Disable global IRQ with mask and return mstatus
41 *
42 * @param[in] mask interrupt mask to be disabled
43 * @retval current mstatus value before irq mask is disabled
44 */
disable_global_irq(uint32_t mask)45 ATTR_ALWAYS_INLINE static inline uint32_t disable_global_irq(uint32_t mask)
46 {
47 return read_clear_csr(CSR_MSTATUS, mask);
48 }
49
50 /**
51 * @brief Restore global IRQ with mask
52 *
53 * @param[in] mask interrupt mask to be restored
54 */
restore_global_irq(uint32_t mask)55 ATTR_ALWAYS_INLINE static inline void restore_global_irq(uint32_t mask)
56 {
57 set_csr(CSR_MSTATUS, mask);
58 }
59
60 /**
61 * @brief Enable IRQ from interrupt controller
62 *
63 */
enable_irq_from_intc(void)64 ATTR_ALWAYS_INLINE static inline void enable_irq_from_intc(void)
65 {
66 set_csr(CSR_MIE, CSR_MIE_MEIE_MASK);
67 }
68
69 /**
70 * @brief Disable IRQ from interrupt controller
71 *
72 */
disable_irq_from_intc(void)73 ATTR_ALWAYS_INLINE static inline void disable_irq_from_intc(void)
74 {
75 clear_csr(CSR_MIE, CSR_MIE_MEIE_MASK);
76 }
77
78 /**
79 * @brief Enable machine timer IRQ
80 */
enable_mchtmr_irq(void)81 ATTR_ALWAYS_INLINE static inline void enable_mchtmr_irq(void)
82 {
83 set_csr(CSR_MIE, CSR_MIE_MTIE_MASK);
84 }
85
86 /**
87 * @brief Disable machine timer IRQ
88 *
89 */
disable_mchtmr_irq(void)90 ATTR_ALWAYS_INLINE static inline void disable_mchtmr_irq(void)
91 {
92 clear_csr(CSR_MIE, CSR_MIE_MTIE_MASK);
93 }
94
95 /**
96 * @brief Delegate IRQ handling
97 *
98 * @param[in] mask interrupt mask to be delegated
99 */
delegate_irq(uint32_t mask)100 ATTR_ALWAYS_INLINE static inline void delegate_irq(uint32_t mask)
101 {
102 set_csr(CSR_MIDELEG, mask);
103 }
104
105 /**
106 * @brief Undelegate IRQ handling
107 *
108 * @param[in] mask interrupt mask to be undelegated
109 */
undelegate_irq(uint32_t mask)110 ATTR_ALWAYS_INLINE static inline void undelegate_irq(uint32_t mask)
111 {
112 clear_csr(CSR_MIDELEG, mask);
113 }
114
115
116 /* Supervisor mode API: these APIs are supposed to be called at supervisor mode */
117
118 /**
119 * @brief Enable global IRQ with mask for supervisor mode
120 *
121 * @param[in] mask interrupt mask to be enabaled
122 */
enable_s_global_irq(uint32_t mask)123 ATTR_ALWAYS_INLINE static inline void enable_s_global_irq(uint32_t mask)
124 {
125 set_csr(CSR_SSTATUS, mask);
126 }
127
128 /**
129 * @brief Disable global IRQ with mask and return sstatus for supervisor mode
130 *
131 * @param[in] mask interrupt mask to be disabled
132 * @retval current sstatus value before irq mask is disabled
133 */
disable_s_global_irq(uint32_t mask)134 ATTR_ALWAYS_INLINE static inline uint32_t disable_s_global_irq(uint32_t mask)
135 {
136 return read_clear_csr(CSR_SSTATUS, mask);
137 }
138
139 /**
140 * @brief Restore global IRQ with mask for supervisor mode
141 *
142 * @param[in] mask interrupt mask to be restored
143 */
restore_s_global_irq(uint32_t mask)144 ATTR_ALWAYS_INLINE static inline void restore_s_global_irq(uint32_t mask)
145 {
146 set_csr(CSR_SSTATUS, mask);
147 }
148
149 /**
150 * @brief Disable IRQ from interrupt controller for supervisor mode
151 *
152 */
disable_s_irq_from_intc(void)153 ATTR_ALWAYS_INLINE static inline void disable_s_irq_from_intc(void)
154 {
155 clear_csr(CSR_SIE, CSR_SIE_SEIE_MASK);
156 }
157
158 /**
159 * @brief Enable IRQ from interrupt controller for supervisor mode
160 *
161 */
enable_s_irq_from_intc(void)162 ATTR_ALWAYS_INLINE static inline void enable_s_irq_from_intc(void)
163 {
164 set_csr(CSR_SIE, CSR_SIE_SEIE_MASK);
165 }
166
167 /**
168 * @brief Enable machine timer IRQ for supervisor mode
169 */
enable_s_mchtmr_irq(void)170 ATTR_ALWAYS_INLINE static inline void enable_s_mchtmr_irq(void)
171 {
172 set_csr(CSR_SIE, CSR_SIE_STIE_MASK);
173 }
174
175 /**
176 * @brief Disable machine timer IRQ
177 *
178 */
disable_s_mchtmr_irq(void)179 ATTR_ALWAYS_INLINE static inline void disable_s_mchtmr_irq(void)
180 {
181 clear_csr(CSR_SIE, CSR_SIE_STIE_MASK);
182 }
183
184
185 /*
186 * CPU Machine SWI control
187 *
188 * Machine SWI (MSIP) is connected to PLICSW irq 1.
189 */
190 #define PLICSWI 1
191
192 /**
193 * @brief Initialize software interrupt
194 *
195 */
intc_m_init_swi(void)196 ATTR_ALWAYS_INLINE static inline void intc_m_init_swi(void)
197 {
198 __plic_enable_irq(HPM_PLICSW_BASE, HPM_PLIC_TARGET_M_MODE, PLICSWI);
199 }
200
201
202 /**
203 * @brief Enable software interrupt
204 *
205 */
intc_m_enable_swi(void)206 ATTR_ALWAYS_INLINE static inline void intc_m_enable_swi(void)
207 {
208 set_csr(CSR_MIE, CSR_MIE_MSIE_MASK);
209 }
210
211
212 /**
213 * @brief Disable software interrupt
214 *
215 */
intc_m_disable_swi(void)216 ATTR_ALWAYS_INLINE static inline void intc_m_disable_swi(void)
217 {
218 clear_csr(CSR_MIE, CSR_MIE_MSIE_MASK);
219 }
220
221
222 /**
223 * @brief Trigger software interrupt
224 *
225 */
intc_m_trigger_swi(void)226 ATTR_ALWAYS_INLINE static inline void intc_m_trigger_swi(void)
227 {
228 __plic_set_irq_pending(HPM_PLICSW_BASE, PLICSWI);
229 }
230
231 /**
232 * @brief Claim software interrupt
233 *
234 */
intc_m_claim_swi(void)235 ATTR_ALWAYS_INLINE static inline void intc_m_claim_swi(void)
236 {
237 __plic_claim_irq(HPM_PLICSW_BASE, 0);
238 }
239
240 /**
241 * @brief Complete software interrupt
242 *
243 */
intc_m_complete_swi(void)244 ATTR_ALWAYS_INLINE static inline void intc_m_complete_swi(void)
245 {
246 __plic_complete_irq(HPM_PLICSW_BASE, HPM_PLIC_TARGET_M_MODE, PLICSWI);
247 }
248
249 /*
250 * @brief Enable IRQ for machine mode
251 *
252 * @param[in] irq Interrupt number
253 */
254 #define intc_m_enable_irq(irq) \
255 intc_enable_irq(HPM_PLIC_TARGET_M_MODE, irq)
256
257 /*
258 * @brief Disable IRQ for machine mode
259 *
260 * @param[in] irq Interrupt number
261 */
262 #define intc_m_disable_irq(irq) \
263 intc_disable_irq(HPM_PLIC_TARGET_M_MODE, irq)
264
265 #define intc_m_set_threshold(threshold) \
266 intc_set_threshold(HPM_PLIC_TARGET_M_MODE, threshold)
267
268 /*
269 * @brief Complete IRQ for machine mode
270 *
271 * @param[in] irq Interrupt number
272 */
273 #define intc_m_complete_irq(irq) \
274 intc_complete_irq(HPM_PLIC_TARGET_M_MODE, irq)
275
276 /*
277 * @brief Claim IRQ for machine mode
278 *
279 */
280 #define intc_m_claim_irq() intc_claim_irq(HPM_PLIC_TARGET_M_MODE)
281
282 /*
283 * @brief Enable IRQ for machine mode with priority
284 *
285 * @param[in] irq Interrupt number
286 * @param[in] priority Priority of interrupt
287 */
288 #define intc_m_enable_irq_with_priority(irq, priority) \
289 do { \
290 intc_set_irq_priority(irq, priority); \
291 intc_m_enable_irq(irq); \
292 } while (0)
293
294
295
296 /* Supervisor mode */
297
298 /**
299 * @brief Enable software interrupt for supervisor mode
300 *
301 */
intc_s_enable_swi(void)302 ATTR_ALWAYS_INLINE static inline void intc_s_enable_swi(void)
303 {
304 set_csr(CSR_SIE, CSR_SIE_SSIE_MASK);
305 }
306
307
308 /**
309 * @brief Disable software interrupt for supervisor mode
310 *
311 */
intc_s_disable_swi(void)312 ATTR_ALWAYS_INLINE static inline void intc_s_disable_swi(void)
313 {
314 clear_csr(CSR_SIE, CSR_SIE_SSIE_MASK);
315 }
316
317
318 /**
319 * @brief Trigger software interrupt for supervisor mode
320 *
321 */
intc_s_trigger_swi(void)322 ATTR_ALWAYS_INLINE static inline void intc_s_trigger_swi(void)
323 {
324 set_csr(CSR_SIP, CSR_SIP_SSIP_MASK);
325 }
326
327
328 /**
329 * @brief Complete software interrupt for supervisor mode
330 *
331 */
intc_s_complete_swi(void)332 ATTR_ALWAYS_INLINE static inline void intc_s_complete_swi(void)
333 {
334 clear_csr(CSR_SIP, CSR_SIP_SSIP_MASK);
335 }
336
337 /*
338 * @brief Enable IRQ for supervisor mode
339 *
340 * @param[in] irq Interrupt number
341 */
342 #define intc_s_enable_irq(irq) \
343 intc_enable_irq(HPM_PLIC_TARGET_S_MODE, irq)
344
345 /*
346 * @brief Disable IRQ for supervisor mode
347 *
348 * @param[in] irq Interrupt number
349 */
350 #define intc_s_disable_irq(irq) \
351 intc_disable_irq(HPM_PLIC_TARGET_S_MODE, irq)
352
353 #define intc_set_s_threshold(threshold) \
354 intc_set_threshold(HPM_PLIC_TARGET_S_MODE, threshold)
355
356 /*
357 * @brief Complete IRQ for supervisor mode
358 *
359 * @param[in] irq Interrupt number
360 */
361 #define intc_s_complete_irq(irq) \
362 intc_complete_irq(HPM_PLIC_TARGET_S_MODE, irq)
363
364 /*
365 * @brief Claim IRQ for supervisor mode
366 *
367 */
368 #define intc_s_claim_irq() intc_claim_irq(HPM_PLIC_TARGET_S_MODE)
369
370 /*
371 * @brief Enable IRQ for supervisor mode with priority
372 *
373 * @param[in] irq Interrupt number
374 * @param[in] priority Priority of interrupt
375 */
376 #define intc_s_enable_irq_with_priority(irq, priority) \
377 do { \
378 intc_set_irq_priority(irq, priority); \
379 intc_s_enable_irq(irq); \
380 } while (0)
381
382
383 /*
384 * @brief Enable specific interrupt
385 *
386 * @param[in] target Target to handle specific interrupt
387 * @param[in] irq Interrupt number
388 */
intc_enable_irq(uint32_t target,uint32_t irq)389 ATTR_ALWAYS_INLINE static inline void intc_enable_irq(uint32_t target, uint32_t irq)
390 {
391 __plic_enable_irq(HPM_PLIC_BASE, target, irq);
392 }
393
394 /**
395 * @brief Set interrupt priority
396 *
397 * @param[in] irq Interrupt number
398 * @param[in] priority Priority of interrupt
399 */
intc_set_irq_priority(uint32_t irq,uint32_t priority)400 ATTR_ALWAYS_INLINE static inline void intc_set_irq_priority(uint32_t irq, uint32_t priority)
401 {
402 __plic_set_irq_priority(HPM_PLIC_BASE, irq, priority);
403 }
404
405 /**
406 * @brief Disable specific interrupt
407 *
408 * @param[in] target Target to handle specific interrupt
409 * @param[in] irq Interrupt number
410 */
intc_disable_irq(uint32_t target,uint32_t irq)411 ATTR_ALWAYS_INLINE static inline void intc_disable_irq(uint32_t target, uint32_t irq)
412 {
413 __plic_disable_irq(HPM_PLIC_BASE, target, irq);
414 }
415
416 /**
417 * @brief Set interrupt threshold
418 *
419 * @param[in] target Target to handle specific interrupt
420 * @param[in] threshold Threshold of IRQ can be serviced
421 */
intc_set_threshold(uint32_t target,uint32_t threshold)422 ATTR_ALWAYS_INLINE static inline void intc_set_threshold(uint32_t target, uint32_t threshold)
423 {
424 __plic_set_threshold(HPM_PLIC_BASE, target, threshold);
425 }
426
427 /**
428 * @brief Claim IRQ
429 *
430 * @param[in] target Target to handle specific interrupt
431 *
432 */
intc_claim_irq(uint32_t target)433 ATTR_ALWAYS_INLINE static inline uint32_t intc_claim_irq(uint32_t target)
434 {
435 return __plic_claim_irq(HPM_PLIC_BASE, target);
436 }
437
438 /**
439 * @brief Complete IRQ
440 *
441 * @param[in] target Target to handle specific interrupt
442 * @param[in] irq Specific IRQ to be completed
443 *
444 */
intc_complete_irq(uint32_t target,uint32_t irq)445 ATTR_ALWAYS_INLINE static inline void intc_complete_irq(uint32_t target, uint32_t irq)
446 {
447 __plic_complete_irq(HPM_PLIC_BASE, target, irq);
448 }
449
450 /*
451 * Vectored based irq install and uninstall
452 */
453 /* Machine mode */
454 extern int __vector_table[];
455 extern void default_irq_entry(void);
456
457 /**
458 * @brief Install ISR for certain IRQ for ram based vector table
459 *
460 * @param[in] irq Target interrupt number
461 * @param[in] isr Interrupt service routine
462 *
463 */
install_isr(uint32_t irq,uint32_t isr)464 ATTR_ALWAYS_INLINE static inline void install_isr(uint32_t irq, uint32_t isr)
465 {
466 __vector_table[irq] = isr;
467 }
468
469 /**
470 * @brief Uninstall ISR for certain IRQ for ram based vector table
471 *
472 * @param[in] irq Target interrupt number
473 *
474 */
uninstall_isr(uint32_t irq)475 ATTR_ALWAYS_INLINE static inline void uninstall_isr(uint32_t irq)
476 {
477 __vector_table[irq] = (int) default_irq_entry;
478 }
479
480 /* Supervisor mode */
481 extern int __vector_s_table[];
482 extern void default_s_irq_entry(void);
483 /**
484 * @brief Install ISR for certain IRQ for ram based vector table for supervisor mode
485 *
486 * @param[in] irq Target interrupt number
487 * @param[in] isr Interrupt service routine
488 *
489 */
install_s_isr(uint32_t irq,uint32_t isr)490 ATTR_ALWAYS_INLINE static inline void install_s_isr(uint32_t irq, uint32_t isr)
491 {
492 __vector_s_table[irq] = isr;
493 }
494
495 /**
496 * @brief Uninstall ISR for certain IRQ for ram based vector table for supervisor mode
497 *
498 * @param[in] irq Target interrupt number
499 *
500 */
uninstall_s_isr(uint32_t irq)501 ATTR_ALWAYS_INLINE static inline void uninstall_s_isr(uint32_t irq)
502 {
503 __vector_s_table[irq] = (int) default_s_irq_entry;
504 }
505
506
507 /*
508 * Inline nested irq entry/exit macros
509 */
510 /*
511 * @brief Save CSR
512 * @param[in] r Target CSR to be saved
513 */
514 #define SAVE_CSR(r) register long __##r = read_csr(r);
515
516 /*
517 * @brief Restore macro
518 *
519 * @param[in] r Target CSR to be restored
520 */
521 #define RESTORE_CSR(r) write_csr(r, __##r);
522
523 #if defined(SUPPORT_PFT_ARCH) && SUPPORT_PFT_ARCH
524 #define SAVE_MXSTATUS() SAVE_CSR(CSR_MXSTATUS)
525 #define RESTORE_MXSTATUS() RESTORE_CSR(CSR_MXSTATUS)
526 #else
527 #define SAVE_MXSTATUS()
528 #define RESTORE_MXSTATUS()
529 #endif
530
531 #ifdef __riscv_flen
532 #define SAVE_FCSR() register int __fcsr = read_fcsr();
533 #define RESTORE_FCSR() write_fcsr(__fcsr);
534 #else
535 #define SAVE_FCSR()
536 #define RESTORE_FCSR()
537 #endif
538
539 #ifdef __riscv_dsp
540 #define SAVE_UCODE() SAVE_CSR(CSR_UCODE)
541 #define RESTORE_UCODE() RESTORE_CSR(CSR_UCODE)
542 #else
543 #define SAVE_UCODE()
544 #define RESTORE_UCODE()
545 #endif
546
547 #ifdef __riscv_flen
548 #if __riscv_flen == 32
549 /* RV32I caller registers + MCAUSE + MEPC + MSTATUS +MXSTATUS + 20 FPU caller registers +FCSR + UCODE (DSP) */
550 #define CONTEXT_REG_NUM (4 * (16 + 4 + 20))
551 #else /* __riscv_flen = 64 */
552 /* RV32I caller registers + MCAUSE + MEPC + MSTATUS +MXSTATUS + 20 DFPU caller + FCSR registers + UCODE (DSP) */
553 #define CONTEXT_REG_NUM (4 * (16 + 4 + 20 * 2))
554 #endif
555
556 #else
557 /* RV32I caller registers + MCAUSE + MEPC + MSTATUS +MXSTATUS + UCODE (DSP)*/
558 #define CONTEXT_REG_NUM (4 * (16 + 4))
559 #endif
560
561 #ifdef __riscv_flen
562 /*
563 * Save FPU caller registers:
564 * NOTE: To simplify the logic, the FPU caller registers are always stored at word offset 20 in the stack
565 */
566 #if __riscv_flen == 32
567 #ifdef __ICCRISCV__
568 #define SAVE_FPU_CONTEXT() { \
569 __asm volatile("\n\
570 c.fswsp ft0, 20*4\n\
571 c.fswsp ft1, 21*4 \n\
572 c.fswsp ft2, 22*4 \n\
573 c.fswsp ft3, 23*4 \n\
574 c.fswsp ft4, 24*4 \n\
575 c.fswsp ft5, 25*4 \n\
576 c.fswsp ft6, 26*4 \n\
577 c.fswsp ft7, 27*4 \n\
578 c.fswsp fa0, 28*4 \n\
579 c.fswsp fa1, 29*4 \n\
580 c.fswsp fa2, 30*4 \n\
581 c.fswsp fa3, 31*4 \n\
582 c.fswsp fa4, 32*4 \n\
583 c.fswsp fa5, 33*4 \n\
584 c.fswsp fa6, 34*4 \n\
585 c.fswsp fa7, 35*4 \n\
586 c.fswsp ft8, 36*4 \n\
587 c.fswsp ft9, 37*4 \n\
588 c.fswsp ft10, 38*4 \n\
589 c.fswsp ft11, 39*4 \n");\
590 }
591
592 /*
593 * Restore FPU caller registers:
594 * NOTE: To simplify the logic, the FPU caller registers are always stored at word offset 20 in the stack
595 */
596 #define RESTORE_FPU_CONTEXT() { \
597 __asm volatile("\n\
598 c.flwsp ft0, 20*4\n\
599 c.flwsp ft1, 21*4 \n\
600 c.flwsp ft2, 22*4 \n\
601 c.flwsp ft3, 23*4 \n\
602 c.flwsp ft4, 24*4 \n\
603 c.flwsp ft5, 25*4 \n\
604 c.flwsp ft6, 26*4 \n\
605 c.flwsp ft7, 27*4 \n\
606 c.flwsp fa0, 28*4 \n\
607 c.flwsp fa1, 29*4 \n\
608 c.flwsp fa2, 30*4 \n\
609 c.flwsp fa3, 31*4 \n\
610 c.flwsp fa4, 32*4 \n\
611 c.flwsp fa5, 33*4 \n\
612 c.flwsp fa6, 34*4 \n\
613 c.flwsp fa7, 35*4 \n\
614 c.flwsp ft8, 36*4 \n\
615 c.flwsp ft9, 37*4 \n\
616 c.flwsp ft10, 38*4 \n\
617 c.flwsp ft11, 39*4 \n");\
618 }
619 #else /* __ICCRISCV__ not defined */
620 #define SAVE_FPU_CONTEXT() { \
621 __asm volatile("\n\
622 c.fswsp ft0, 20*4(sp)\n\
623 c.fswsp ft1, 21*4(sp) \n\
624 c.fswsp ft2, 22*4(sp) \n\
625 c.fswsp ft3, 23*4(sp) \n\
626 c.fswsp ft4, 24*4(sp) \n\
627 c.fswsp ft5, 25*4(sp) \n\
628 c.fswsp ft6, 26*4(sp) \n\
629 c.fswsp ft7, 27*4(sp) \n\
630 c.fswsp fa0, 28*4(sp) \n\
631 c.fswsp fa1, 29*4(sp) \n\
632 c.fswsp fa2, 30*4(sp) \n\
633 c.fswsp fa3, 31*4(sp) \n\
634 c.fswsp fa4, 32*4(sp) \n\
635 c.fswsp fa5, 33*4(sp) \n\
636 c.fswsp fa6, 34*4(sp) \n\
637 c.fswsp fa7, 35*4(sp) \n\
638 c.fswsp ft8, 36*4(sp) \n\
639 c.fswsp ft9, 37*4(sp) \n\
640 c.fswsp ft10, 38*4(sp) \n\
641 c.fswsp ft11, 39*4(sp) \n");\
642 }
643
644 /*
645 * Restore FPU caller registers:
646 * NOTE: To simplify the logic, the FPU caller registers are always stored at word offset 20 in the stack
647 */
648 #define RESTORE_FPU_CONTEXT() { \
649 __asm volatile("\n\
650 c.flwsp ft0, 20*4(sp)\n\
651 c.flwsp ft1, 21*4(sp) \n\
652 c.flwsp ft2, 22*4(sp) \n\
653 c.flwsp ft3, 23*4(sp) \n\
654 c.flwsp ft4, 24*4(sp) \n\
655 c.flwsp ft5, 25*4(sp) \n\
656 c.flwsp ft6, 26*4(sp) \n\
657 c.flwsp ft7, 27*4(sp) \n\
658 c.flwsp fa0, 28*4(sp) \n\
659 c.flwsp fa1, 29*4(sp) \n\
660 c.flwsp fa2, 30*4(sp) \n\
661 c.flwsp fa3, 31*4(sp) \n\
662 c.flwsp fa4, 32*4(sp) \n\
663 c.flwsp fa5, 33*4(sp) \n\
664 c.flwsp fa6, 34*4(sp) \n\
665 c.flwsp fa7, 35*4(sp) \n\
666 c.flwsp ft8, 36*4(sp) \n\
667 c.flwsp ft9, 37*4(sp) \n\
668 c.flwsp ft10, 38*4(sp) \n\
669 c.flwsp ft11, 39*4(sp) \n");\
670 }
671 #endif
672 #else /*__riscv_flen == 64*/
673 #ifdef __ICCRISCV__
674 #define SAVE_FPU_CONTEXT() { \
675 __asm volatile("\n\
676 c.fsdsp ft0, 20*4\n\
677 c.fsdsp ft1, 22*4 \n\
678 c.fsdsp ft2, 24*4 \n\
679 c.fsdsp ft3, 26*4 \n\
680 c.fsdsp ft4, 28*4 \n\
681 c.fsdsp ft5, 30*4 \n\
682 c.fsdsp ft6, 32*4 \n\
683 c.fsdsp ft7, 34*4 \n\
684 c.fsdsp fa0, 36*4 \n\
685 c.fsdsp fa1, 38*4 \n\
686 c.fsdsp fa2, 40*4 \n\
687 c.fsdsp fa3, 42*4 \n\
688 c.fsdsp fa4, 44*4 \n\
689 c.fsdsp fa5, 46*4 \n\
690 c.fsdsp fa6, 48*4 \n\
691 c.fsdsp fa7, 50*4 \n\
692 c.fsdsp ft8, 52*4 \n\
693 c.fsdsp ft9, 54*4 \n\
694 c.fsdsp ft10, 56*4 \n\
695 c.fsdsp ft11, 58*4 \n");\
696 }
697
698 /*
699 * Restore FPU caller registers:
700 * NOTE: To simplify the logic, the FPU caller registers are always stored at word offset 20 in the stack
701 */
702 #define RESTORE_FPU_CONTEXT() { \
703 __asm volatile("\n\
704 c.fldsp ft0, 20*4\n\
705 c.fldsp ft1, 22*4 \n\
706 c.fldsp ft2, 24*4 \n\
707 c.fldsp ft3, 26*4 \n\
708 c.fldsp ft4, 28*4 \n\
709 c.fldsp ft5, 30*4 \n\
710 c.fldsp ft6, 32*4 \n\
711 c.fldsp ft7, 34*4 \n\
712 c.fldsp fa0, 36*4 \n\
713 c.fldsp fa1, 38*4 \n\
714 c.fldsp fa2, 40*4 \n\
715 c.fldsp fa3, 42*4 \n\
716 c.fldsp fa4, 44*4 \n\
717 c.fldsp fa5, 46*4 \n\
718 c.fldsp fa6, 48*4 \n\
719 c.fldsp fa7, 50*4 \n\
720 c.fldsp ft8, 52*4 \n\
721 c.fldsp ft9, 54*4 \n\
722 c.fldsp ft10, 56*4 \n\
723 c.fldsp ft11, 58*4 \n");\
724 }
725 #else /*__riscv_flen == 64*/
726 #define SAVE_FPU_CONTEXT() { \
727 __asm volatile("\n\
728 c.fsdsp ft0, 20*4(sp)\n\
729 c.fsdsp ft1, 22*4(sp) \n\
730 c.fsdsp ft2, 24*4(sp) \n\
731 c.fsdsp ft3, 26*4(sp) \n\
732 c.fsdsp ft4, 28*4(sp) \n\
733 c.fsdsp ft5, 30*4(sp) \n\
734 c.fsdsp ft6, 32*4(sp) \n\
735 c.fsdsp ft7, 34*4(sp) \n\
736 c.fsdsp fa0, 36*4(sp) \n\
737 c.fsdsp fa1, 38*4(sp) \n\
738 c.fsdsp fa2, 40*4(sp) \n\
739 c.fsdsp fa3, 42*4(sp) \n\
740 c.fsdsp fa4, 44*4(sp) \n\
741 c.fsdsp fa5, 46*4(sp) \n\
742 c.fsdsp fa6, 48*4(sp) \n\
743 c.fsdsp fa7, 50*4(sp) \n\
744 c.fsdsp ft8, 52*4(sp) \n\
745 c.fsdsp ft9, 54*4(sp) \n\
746 c.fsdsp ft10, 56*4(sp) \n\
747 c.fsdsp ft11, 58*4(sp) \n");\
748 }
749
750 /*
751 * Restore FPU caller registers:
752 * NOTE: To simplify the logic, the FPU caller registers are always stored at word offset 20 in the stack
753 */
754 #define RESTORE_FPU_CONTEXT() { \
755 __asm volatile("\n\
756 c.fldsp ft0, 20*4(sp)\n\
757 c.fldsp ft1, 22*4(sp) \n\
758 c.fldsp ft2, 24*4(sp) \n\
759 c.fldsp ft3, 26*4(sp) \n\
760 c.fldsp ft4, 28*4(sp) \n\
761 c.fldsp ft5, 30*4(sp) \n\
762 c.fldsp ft6, 32*4(sp) \n\
763 c.fldsp ft7, 34*4(sp) \n\
764 c.fldsp fa0, 36*4(sp) \n\
765 c.fldsp fa1, 38*4(sp) \n\
766 c.fldsp fa2, 40*4(sp) \n\
767 c.fldsp fa3, 42*4(sp) \n\
768 c.fldsp fa4, 44*4(sp) \n\
769 c.fldsp fa5, 46*4(sp) \n\
770 c.fldsp fa6, 48*4(sp) \n\
771 c.fldsp fa7, 50*4(sp) \n\
772 c.fldsp ft8, 52*4(sp) \n\
773 c.fldsp ft9, 54*4(sp) \n\
774 c.fldsp ft10, 56*4(sp) \n\
775 c.fldsp ft11, 58*4(sp) \n");\
776 }
777 #endif
778 #endif
779 #else
780 #define SAVE_FPU_CONTEXT()
781 #define RESTORE_FPU_CONTEXT()
782 #endif
783
784 #ifdef __ICCRISCV__
785 /**
786 * @brief Save the caller registers based on the RISC-V ABI specification
787 */
788 #define SAVE_CALLER_CONTEXT() { \
789 __asm volatile("addi sp, sp, %0" : : "i"(-CONTEXT_REG_NUM) :);\
790 __asm volatile("\n\
791 c.swsp ra, 0*4 \n\
792 c.swsp t0, 1*4 \n\
793 c.swsp t1, 2*4 \n\
794 c.swsp t2, 3*4 \n\
795 c.swsp s0, 4*4 \n\
796 c.swsp s1, 5*4 \n\
797 c.swsp a0, 6*4 \n\
798 c.swsp a1, 7*4 \n\
799 c.swsp a2, 8*4 \n\
800 c.swsp a3, 9*4 \n\
801 c.swsp a4, 10*4 \n\
802 c.swsp a5, 11*4 \n\
803 c.swsp a6, 12*4 \n\
804 c.swsp a7, 13*4 \n\
805 c.swsp s2, 14*4 \n\
806 c.swsp s3, 15*4 \n\
807 c.swsp t3, 16*4 \n\
808 c.swsp t4, 17*4 \n\
809 c.swsp t5, 18*4 \n\
810 c.swsp t6, 19*4"); \
811 SAVE_FPU_CONTEXT(); \
812 }
813
814 /**
815 * @brief Restore the caller registers based on the RISC-V ABI specification
816 */
817 #define RESTORE_CALLER_CONTEXT() { \
818 __asm volatile("\n\
819 c.lwsp ra, 0*4 \n\
820 c.lwsp t0, 1*4 \n\
821 c.lwsp t1, 2*4 \n\
822 c.lwsp t2, 3*4 \n\
823 c.lwsp s0, 4*4 \n\
824 c.lwsp s1, 5*4 \n\
825 c.lwsp a0, 6*4 \n\
826 c.lwsp a1, 7*4 \n\
827 c.lwsp a2, 8*4 \n\
828 c.lwsp a3, 9*4 \n\
829 c.lwsp a4, 10*4 \n\
830 c.lwsp a5, 11*4 \n\
831 c.lwsp a6, 12*4 \n\
832 c.lwsp a7, 13*4 \n\
833 c.lwsp s2, 14*4 \n\
834 c.lwsp s3, 15*4 \n\
835 c.lwsp t3, 16*4 \n\
836 c.lwsp t4, 17*4 \n\
837 c.lwsp t5, 18*4 \n\
838 c.lwsp t6, 19*4 \n");\
839 RESTORE_FPU_CONTEXT(); \
840 __asm volatile("addi sp, sp, %0" : : "i"(CONTEXT_REG_NUM) :);\
841 }
842 #else
843 /**
844 * @brief Save the caller registers based on the RISC-V ABI specification
845 */
846 #define SAVE_CALLER_CONTEXT() { \
847 __asm volatile("addi sp, sp, %0" : : "i"(-CONTEXT_REG_NUM) :);\
848 __asm volatile("\n\
849 c.swsp ra, 0*4(sp) \n\
850 c.swsp t0, 1*4(sp) \n\
851 c.swsp t1, 2*4(sp) \n\
852 c.swsp t2, 3*4(sp) \n\
853 c.swsp s0, 4*4(sp) \n\
854 c.swsp s1, 5*4(sp) \n\
855 c.swsp a0, 6*4(sp) \n\
856 c.swsp a1, 7*4(sp) \n\
857 c.swsp a2, 8*4(sp) \n\
858 c.swsp a3, 9*4(sp) \n\
859 c.swsp a4, 10*4(sp) \n\
860 c.swsp a5, 11*4(sp) \n\
861 c.swsp a6, 12*4(sp) \n\
862 c.swsp a7, 13*4(sp) \n\
863 c.swsp s2, 14*4(sp) \n\
864 c.swsp s3, 15*4(sp) \n\
865 c.swsp t3, 16*4(sp) \n\
866 c.swsp t4, 17*4(sp) \n\
867 c.swsp t5, 18*4(sp) \n\
868 c.swsp t6, 19*4(sp)"); \
869 SAVE_FPU_CONTEXT(); \
870 }
871
872 /**
873 * @brief Restore the caller registers based on the RISC-V ABI specification
874 */
875 #define RESTORE_CALLER_CONTEXT() { \
876 __asm volatile("\n\
877 c.lwsp ra, 0*4(sp) \n\
878 c.lwsp t0, 1*4(sp) \n\
879 c.lwsp t1, 2*4(sp) \n\
880 c.lwsp t2, 3*4(sp) \n\
881 c.lwsp s0, 4*4(sp) \n\
882 c.lwsp s1, 5*4(sp) \n\
883 c.lwsp a0, 6*4(sp) \n\
884 c.lwsp a1, 7*4(sp) \n\
885 c.lwsp a2, 8*4(sp) \n\
886 c.lwsp a3, 9*4(sp) \n\
887 c.lwsp a4, 10*4(sp) \n\
888 c.lwsp a5, 11*4(sp) \n\
889 c.lwsp a6, 12*4(sp) \n\
890 c.lwsp a7, 13*4(sp) \n\
891 c.lwsp s2, 14*4(sp) \n\
892 c.lwsp s3, 15*4(sp) \n\
893 c.lwsp t3, 16*4(sp) \n\
894 c.lwsp t4, 17*4(sp) \n\
895 c.lwsp t5, 18*4(sp) \n\
896 c.lwsp t6, 19*4(sp) \n");\
897 RESTORE_FPU_CONTEXT(); \
898 __asm volatile("addi sp, sp, %0" : : "i"(CONTEXT_REG_NUM) :);\
899 }
900 #endif
901
902 #ifdef __riscv_flen
903 #define SAVE_FPU_STATE() { \
904 __asm volatile("frcsr s1\n"); \
905 }
906
907 #define RESTORE_FPU_STATE() { \
908 __asm volatile("fscsr s1\n"); \
909 }
910 #else
911 #define SAVE_FPU_STATE()
912 #define RESTORE_FPU_STATE()
913 #endif
914
915 #ifdef __riscv_dsp
916 /*
917 * Save DSP context
918 * NOTE: DSP context registers are stored at word offset 41 in the stack
919 */
920 #define SAVE_DSP_CONTEXT() { \
921 __asm volatile("csrrs s0, %0, x0\n" ::"i"(CSR_UCODE):); \
922 }
923 /*
924 * @brief Restore DSP context
925 * @note DSP context registers are stored at word offset 41 in the stack
926 */
927 #define RESTORE_DSP_CONTEXT() {\
928 __asm volatile("csrw %0, s0\n" ::"i"(CSR_UCODE):); \
929 }
930
931 #else
932 #define SAVE_DSP_CONTEXT()
933 #define RESTORE_DSP_CONTEXT()
934 #endif
935
936 /*
937 * @brief Enter Nested IRQ Handling
938 * @note To simplify the logic, Nested IRQ related registers are stored in the stack as below:
939 * MCAUSE - word offset 16 (not used in the vectored mode)
940 * EPC - word offset 17
941 * MSTATUS = word offset 18
942 * MXSTATUS = word offset 19
943 */
944 #define ENTER_NESTED_IRQ_HANDLING_M() { \
945 __asm volatile("\n\
946 csrr s2, mepc \n\
947 csrr s3, mstatus \n");\
948 SAVE_FPU_STATE(); \
949 SAVE_DSP_CONTEXT(); \
950 __asm volatile("csrsi mstatus, 8"); \
951 }
952
953 /*
954 * @brief Complete IRQ Handling
955 */
956 #define COMPLETE_IRQ_HANDLING_M(irq_num) { \
957 __asm volatile("csrci mstatus, 8"); \
958 __asm volatile("lui a4, 0xe4200"); \
959 __asm volatile("li a3, %0" : : "i" (irq_num) :); \
960 __asm volatile("sw a3, 4(a4)"); \
961 }
962
963 /*
964 * @brief Exit Nested IRQ Handling
965 * @note To simplify the logic, Nested IRQ related registers are stored in the stack as below:
966 * MCAUSE - word offset 16 (not used in the vectored mode)
967 * EPC - word offset 17
968 * MSTATUS = word offset 18
969 * MXSTATUS = word offset 19
970 */
971 #define EXIT_NESTED_IRQ_HANDLING_M() { \
972 __asm volatile("\n\
973 csrw mstatus, s3 \n\
974 csrw mepc, s2 \n");\
975 RESTORE_FPU_STATE(); \
976 RESTORE_DSP_CONTEXT(); \
977 }
978
979
980 #define ENTER_NESTED_IRQ_HANDLING_S() {\
981 __asm volatile("\n\
982 csrr s2, sepc \n\
983 csrr s3, sstatus \n");\
984 SAVE_FPU_STATE(); \
985 SAVE_DSP_CONTEXT(); \
986 __asm volatile("csrsi sstatus, 2"); \
987 }
988 #define COMPLETE_IRQ_HANDLING_S(irq_num) {\
989 __asm volatile("csrci sstatus, 2"); \
990 __asm volatile("lui a4, 0xe4201"); \
991 __asm volatile("li a3, %0" : : "i" (irq_num) :); \
992 __asm volatile("sw a3, 4(a4)"); \
993 }
994
995 /*
996 * @brief Exit Nested IRQ Handling at supervisor mode
997 * @note To simplify the logic, Nested IRQ related registers are stored in the stack as below:
998 * SCAUSE - word offset 16 (not used in the vectored mode)
999 * EPC - word offset 17
1000 * SSTATUS = word offset 18
1001 */
1002 #define EXIT_NESTED_IRQ_HANDLING_S() { \
1003 __asm volatile("\n\
1004 csrw sstatus, s3 \n\
1005 csrw sepc, s2 \n");\
1006 RESTORE_FPU_STATE(); \
1007 RESTORE_DSP_CONTEXT(); \
1008 }
1009
1010 /* @brief Nested IRQ entry macro : Save CSRs and enable global irq. */
1011 #define NESTED_IRQ_ENTER() \
1012 SAVE_CSR(CSR_MEPC) \
1013 SAVE_CSR(CSR_MSTATUS) \
1014 SAVE_MXSTATUS() \
1015 SAVE_FCSR() \
1016 SAVE_UCODE() \
1017 set_csr(CSR_MSTATUS, CSR_MSTATUS_MIE_MASK);
1018
1019 /* @brief Nested IRQ exit macro : Restore CSRs */
1020 #define NESTED_IRQ_EXIT() \
1021 RESTORE_CSR(CSR_MSTATUS) \
1022 RESTORE_CSR(CSR_MEPC) \
1023 RESTORE_MXSTATUS() \
1024 RESTORE_FCSR() \
1025 RESTORE_UCODE()
1026
1027 #ifdef __cplusplus
1028 #define EXTERN_C extern "C"
1029 #else
1030 #define EXTERN_C
1031 #endif
1032
1033 #define ISR_NAME_M(irq_num) default_isr_##irq_num
1034 #define ISR_NAME_S(irq_num) default_isr_s_##irq_num
1035 /**
1036 * @brief Declare an external interrupt handler for machine mode
1037 *
1038 * @param[in] irq_num - IRQ number index
1039 * @param[in] isr - Application IRQ handler function pointer
1040 */
1041 #if !defined(USE_NONVECTOR_MODE) || (USE_NONVECTOR_MODE == 0)
1042 #if defined(CONFIG_FREERTOS) && CONFIG_FREERTOS
1043 #define FREERTOS_VECTOR_ISR_WRAPPER_NAME(irq_num) irq_handler_wrapper_##irq_num
1044 #define SDK_DECLARE_EXT_ISR_M(irq_num, isr) \
1045 void isr(void) __attribute__((section(".isr_vector"))); \
1046 EXTERN_C void FREERTOS_VECTOR_ISR_WRAPPER_NAME(irq_num)(void) __attribute__((section(".isr_vector"))); \
1047 void FREERTOS_VECTOR_ISR_WRAPPER_NAME(irq_num)(void) \
1048 { \
1049 isr();\
1050 }
1051
1052 /**
1053 * @brief Declare an external interrupt handler for supervisor mode
1054 *
1055 * @param[in] irq_num - IRQ number index
1056 * @param[in] isr - Application IRQ handler function pointer
1057 */
1058 #define SDK_DECLARE_EXT_ISR_S(irq_num, isr) \
1059 void isr(void) __attribute__((section(".isr_s_vector")));\
1060 EXTERN_C void FREERTOS_VECTOR_ISR_WRAPPER_NAME(irq_num)(void) __attribute__((section(".isr_s_vector")));\
1061 void FREERTOS_VECTOR_ISR_WRAPPER_NAME(irq_num)(void) \
1062 { \
1063 isr();\
1064 }
1065 #else
1066
1067 #define SDK_DECLARE_EXT_ISR_M(irq_num, isr) \
1068 void isr(void) __attribute__((section(".isr_vector")));\
1069 EXTERN_C void ISR_NAME_M(irq_num)(void) __attribute__((section(".isr_vector")));\
1070 void ISR_NAME_M(irq_num)(void) \
1071 { \
1072 SAVE_CALLER_CONTEXT(); \
1073 ENTER_NESTED_IRQ_HANDLING_M();\
1074 __asm volatile("la t1, %0\n\t" : : "i" (isr) : );\
1075 __asm volatile("jalr t1\n");\
1076 COMPLETE_IRQ_HANDLING_M(irq_num);\
1077 EXIT_NESTED_IRQ_HANDLING_M();\
1078 RESTORE_CALLER_CONTEXT();\
1079 __asm volatile("fence io, io");\
1080 __asm volatile("mret\n");\
1081 }
1082
1083 /**
1084 * @brief Declare an external interrupt handler for supervisor mode
1085 *
1086 * @param[in] irq_num - IRQ number index
1087 * @param[in] isr - Application IRQ handler function pointer
1088 */
1089 #define SDK_DECLARE_EXT_ISR_S(irq_num, isr) \
1090 void isr(void) __attribute__((section(".isr_s_vector")));\
1091 EXTERN_C void ISR_NAME_S(irq_num)(void) __attribute__((section(".isr_s_vector")));\
1092 void ISR_NAME_S(irq_num)(void) {\
1093 SAVE_CALLER_CONTEXT(); \
1094 ENTER_NESTED_IRQ_HANDLING_S();\
1095 __asm volatile("la t1, %0\n\t" : : "i" (isr) : );\
1096 __asm volatile("jalr t1\n");\
1097 COMPLETE_IRQ_HANDLING_S(irq_num);\
1098 EXIT_NESTED_IRQ_HANDLING_S();\
1099 RESTORE_CALLER_CONTEXT();\
1100 __asm volatile("fence io, io");\
1101 __asm volatile("sret\n");\
1102 }
1103
1104 #endif
1105 #else
1106
1107 #define SDK_DECLARE_EXT_ISR_M(irq_num, isr) \
1108 void isr(void) __attribute__((section(".isr_vector")));\
1109 EXTERN_C void ISR_NAME_M(irq_num)(void) __attribute__((section(".isr_vector")));\
1110 void ISR_NAME_M(irq_num)(void) { \
1111 isr(); \
1112 }
1113
1114 #define SDK_DECLARE_EXT_ISR_S(irq_num, isr) \
1115 void isr(void) __attribute__((section(".isr_vector")));\
1116 EXTERN_C void ISR_NAME_S(irq_num)(void) __attribute__((section(".isr_vector")));\
1117 void ISR_NAME_S(irq_num)(void) { \
1118 isr(); \
1119 }
1120
1121 #endif
1122
1123
1124 /**
1125 * @brief Declare machine timer interrupt handler
1126 *
1127 * @param[in] isr - MCHTMR IRQ handler function pointer
1128 */
1129 #define SDK_DECLARE_MCHTMR_ISR(isr) \
1130 void isr(void) __attribute__((section(".isr_vector")));\
1131 EXTERN_C void mchtmr_isr(void) __attribute__((section(".isr_vector"))); \
1132 void mchtmr_isr(void) {\
1133 isr();\
1134 }
1135
1136 /**
1137 * @brief Declare machine software interrupt handler
1138 *
1139 * @param[in] isr - SWI IRQ handler function pointer
1140 */
1141 #define SDK_DECLARE_SWI_ISR(isr)\
1142 void isr(void) __attribute__((section(".isr_vector")));\
1143 EXTERN_C void swi_isr(void) __attribute__((section(".isr_vector"))); \
1144 void swi_isr(void) {\
1145 isr();\
1146 }
1147
1148 /* Supervisor mode */
1149
1150 /**
1151 * @brief Declare machine timer interrupt handler
1152 *
1153 * @param[in] isr - MCHTMR IRQ handler function pointer
1154 */
1155 #define SDK_DECLARE_MCHTMR_ISR_S(isr) \
1156 void isr(void) __attribute__((section(".isr_vector")));\
1157 EXTERN_C void mchtmr_s_isr(void) __attribute__((section(".isr_vector"))); \
1158 void mchtmr_s_isr(void) {\
1159 isr();\
1160 }
1161
1162 /**
1163 * @brief Declare machine software interrupt handler
1164 *
1165 * @param[in] isr - SWI IRQ handler function pointer
1166 */
1167 #define SDK_DECLARE_SWI_ISR_S(isr)\
1168 void isr(void) __attribute__((section(".isr_vector")));\
1169 EXTERN_C void swi_s_isr(void) __attribute__((section(".isr_vector"))); \
1170 void swi_s_isr(void) {\
1171 isr();\
1172 }
1173
1174 #define CSR_MSTATUS_MPP_S_MODE (0x1)
1175 #define MODE_SWITCH_FROM_M(mstatus, mepc, label, mode) \
1176 do { \
1177 if (label) { \
1178 write_csr(mepc, label); \
1179 } \
1180 clear_csr(mstatus, CSR_MSTATUS_MPP_MASK); \
1181 set_csr(mstatus, CSR_MSTATUS_MPP_SET(mode)); \
1182 } while(0)
1183
1184 typedef void (*s_mode_entry)(void);
1185
1186 /**
1187 * @brief Switch mode to supervisor from machine
1188 *
1189 * @param[in] entry - entry point after mode is switched
1190 */
switch_to_s_mode(s_mode_entry entry)1191 static inline void switch_to_s_mode(s_mode_entry entry)
1192 {
1193 write_csr(CSR_SEPC, entry);
1194 MODE_SWITCH_FROM_M(CSR_MSTATUS, CSR_MEPC, entry, CSR_MSTATUS_MPP_S_MODE);
1195 if (entry) {
1196 __asm("mret");
1197 }
1198 }
1199 #ifdef __cplusplus
1200 }
1201 #endif
1202
1203 /**
1204 * @}
1205 */
1206 #endif /* HPM_INTERRUPT_H */
1207