1 /*
2  * Copyright (c) 2006-2020, RT-Thread Development Team
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  *
6  * Change Logs:
7  * Date           Author       Notes
8  * 2024-03-28     Shell        Move vector handling codes from context_gcc.S
9  * 2024-04-08     Shell        Optimizing exception switch between u-space/kernel,
10  */
11 
12 #ifndef __ARM64_INC_VECTOR_H__
13 #define __ARM64_INC_VECTOR_H__
14 
15 #include "asm-generic.h"
16 
17 #include <rtconfig.h>
18 #include <asm-fpu.h>
19 #include <armv8.h>
20 
21 .macro SAVE_IRQ_CONTEXT
22     /* Save the entire context. */
23     SAVE_FPU sp
24     stp     x0, x1, [sp, #-0x10]!
25     stp     x2, x3, [sp, #-0x10]!
26     stp     x4, x5, [sp, #-0x10]!
27     stp     x6, x7, [sp, #-0x10]!
28     stp     x8, x9, [sp, #-0x10]!
29     stp     x10, x11, [sp, #-0x10]!
30     stp     x12, x13, [sp, #-0x10]!
31     stp     x14, x15, [sp, #-0x10]!
32     stp     x16, x17, [sp, #-0x10]!
33     stp     x18, x19, [sp, #-0x10]!
34     stp     x20, x21, [sp, #-0x10]!
35     stp     x22, x23, [sp, #-0x10]!
36     stp     x24, x25, [sp, #-0x10]!
37     stp     x26, x27, [sp, #-0x10]!
38     stp     x28, x29, [sp, #-0x10]!
39     mrs     x28, fpcr
40     mrs     x29, fpsr
41     stp     x28, x29, [sp, #-0x10]!
42     mrs     x29, sp_el0
43     stp     x29, x30, [sp, #-0x10]!
44 
45     mrs     x3, spsr_el1
46     mrs     x2, elr_el1
47 
48     stp     x2, x3, [sp, #-0x10]!
49 .endm
50 
51 #ifdef RT_USING_SMP
52 #include "../mp/context_gcc.h"
53 #else
54 #include "../up/context_gcc.h"
55 #endif
56 
57 .macro RESTORE_IRQ_CONTEXT_NO_SPEL0
58     ldp     x2, x3, [sp], #0x10
59     msr     elr_el1, x2
60     msr     spsr_el1, x3
61 
62     ldp     x29, x30, [sp], #0x10
63 
64     ldp     x28, x29, [sp], #0x10
65     msr     fpcr, x28
66     msr     fpsr, x29
67 
68     ldp     x28, x29, [sp], #0x10
69     ldp     x26, x27, [sp], #0x10
70     ldp     x24, x25, [sp], #0x10
71     ldp     x22, x23, [sp], #0x10
72     ldp     x20, x21, [sp], #0x10
73     ldp     x18, x19, [sp], #0x10
74     ldp     x16, x17, [sp], #0x10
75     ldp     x14, x15, [sp], #0x10
76     ldp     x12, x13, [sp], #0x10
77     ldp     x10, x11, [sp], #0x10
78     ldp     x8, x9, [sp], #0x10
79     ldp     x6, x7, [sp], #0x10
80     ldp     x4, x5, [sp], #0x10
81     ldp     x2, x3, [sp], #0x10
82     ldp     x0, x1, [sp], #0x10
83 
84     RESTORE_FPU sp
85 .endm
86 
87 .macro EXCEPTION_SWITCH, eframex, tmpx
88 #ifdef RT_USING_SMART
89     /**
90      * test the spsr for execution level 0
91      * That is { PSTATE.[NZCV] := SPSR_EL1 & M.EL0t }
92      */
93     ldr     \tmpx, [\eframex, #CONTEXT_OFFSET_SPSR_EL1]
94     and     \tmpx, \tmpx, 0x1f
95     cbz     \tmpx, 1f
96     b       2f
97 1:
98     b       arch_ret_to_user
99 2:
100 #endif /* RT_USING_SMART */
101 .endm
102 
103 .macro SAVE_USER_CTX, eframex, tmpx
104 #ifdef RT_USING_SMART
105     mrs     \tmpx, spsr_el1
106     and     \tmpx, \tmpx, 0xf
107     cbz     \tmpx, 1f
108     b       2f
109 1:
110     mov     x0, \eframex
111     bl      lwp_uthread_ctx_save
112 2:
113 #endif /* RT_USING_SMART */
114 .endm
115 
116 .macro RESTORE_USER_CTX, eframex, tmpx
117 #ifdef RT_USING_SMART
118     ldr     \tmpx, [\eframex, #CONTEXT_OFFSET_SPSR_EL1]
119     and     \tmpx, \tmpx, 0x1f
120     cbz     \tmpx, 1f
121     b       2f
122 1:
123     bl      lwp_uthread_ctx_restore
124 2:
125 #endif /* RT_USING_SMART */
126 .endm
127 
128 #endif /* __ARM64_INC_VECTOR_H__ */
129