1/*
2 * Copyright (c) 2006-2023, RT-Thread Development Team
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 *
6 * Change Logs:
7 * Date           Author       Notes
8 * 2021-05-18     Jesven       first version
9 * 2023-07-16     Shell        Move part of the codes to C from asm in signal handling
10 * 2023-08-03     Shell        Support of syscall restart (SA_RESTART)
11 */
12
13#ifndef __ASSEMBLY__
14#define __ASSEMBLY__
15#endif
16
17#include <rtconfig.h>
18#include <asm-generic.h>
19#include <asm-fpu.h>
20#include <vector_gcc.h>
21#include <armv8.h>
22#include <lwp_arch.h>
23
24/*********************
25 *      SPSR BIT     *
26 *********************/
27
28#define  SPSR_Mode(v)          ((v) << 0)
29#define  SPSR_A64              (0 << 4)
30#define  SPSR_RESEVRED_5       (0 << 5)
31#define  SPSR_FIQ_MASKED(v)    ((v) << 6)
32#define  SPSR_IRQ_MASKED(v)    ((v) << 7)
33#define  SPSR_SERROR_MASKED(v) ((v) << 8)
34#define  SPSR_D_MASKED(v)      ((v) << 9)
35#define  SPSR_RESEVRED_10_19   (0 << 10)
36#define  SPSR_IL(v)            ((v) << 20)
37#define  SPSR_SS(v)            ((v) << 21)
38#define  SPSR_RESEVRED_22_27   (0 << 22)
39#define  SPSR_V(v)             ((v) << 28)
40#define  SPSR_C(v)             ((v) << 29)
41#define  SPSR_Z(v)             ((v) << 30)
42#define  SPSR_N(v)             ((v) << 31)
43
44/**************************************************/
45
46.text
47
48/*
49 * void arch_start_umode(args, text, ustack, kstack);
50 */
51.global arch_start_umode
52.type arch_start_umode, % function
53arch_start_umode:
54    mov sp, x3
55    mov x4, #(SPSR_Mode(0) | SPSR_A64)
56    msr daifset, #3
57    dsb sy
58    mrs x30, sp_el0
59    /* user stack top */
60    msr sp_el0, x2
61    mov x3, x2
62    msr spsr_el1, x4
63    msr elr_el1, x1
64    eret
65
66/*
67 * void arch_crt_start_umode(args, text, ustack, kstack);
68 */
69.global arch_crt_start_umode
70.type arch_crt_start_umode, % function
71arch_crt_start_umode:
72    sub x4, x2, #0x10
73    adr x2, lwp_thread_return
74    ldr x5, [x2]
75    str x5, [x4]
76    ldr x5, [x2, #4]
77    str x5, [x4, #4]
78    ldr x5, [x2, #8]
79    str x5, [x4, #8]
80
81    mov x5, x4
82    dc cvau, x5
83    add x5, x5, #8
84    dc cvau, x5
85    dsb sy
86    ic ialluis
87    dsb sy
88
89    msr sp_el0, x4
90
91    mov sp, x3
92    mov x4, #(SPSR_Mode(0) | SPSR_A64)
93    msr daifset, #3
94    dsb sy
95    mrs x30, sp_el0
96    msr spsr_el1, x4
97    msr elr_el1, x1
98    eret
99
100.global arch_get_user_sp
101arch_get_user_sp:
102    mrs x0, sp_el0
103    ret
104
105.global arch_fork_exit
106.global arch_clone_exit
107arch_fork_exit:
108arch_clone_exit:
109    mov x0, xzr
110    b  arch_syscall_exit
111
112/*
113void lwp_exec_user(void *args, void *kernel_stack, void *user_entry)
114*/
115.global lwp_exec_user
116lwp_exec_user:
117    mov sp, x1
118    mov x4, #(SPSR_Mode(0) | SPSR_A64)
119    ldr x3, =0x0000ffff80000000
120    msr daifset, #3
121    msr spsr_el1, x4
122    msr elr_el1, x2
123    eret
124
125/*
126 * void SVC_Handler(regs);
127 * since this routine reset the SP, we take it as a start point
128 */
129START_POINT(SVC_Handler)
130    mov fp, xzr
131    mov lr, xzr
132
133    /* x0 is initial sp */
134    mov sp, x0
135
136    bl _SVC_Handler
137
138    /* jump explictly, make this code position independant */
139    b arch_syscall_exit
140START_POINT_END(SVC_Handler)
141
142TRACE_SYMBOL(_SVC_Handler)
143#define FRAME_REG x19
144
145/**
146 * x0 -> frame_addr
147 */
148_SVC_Handler:
149    .local _SVC_Handler
150
151    stp fp, lr, [sp, -16]!
152    mov fp, sp
153
154    mov FRAME_REG, x0   /* save the value of frame address */
155    msr daifclr, #3  /* enable interrupt */
156
157    GET_THREAD_SELF x0
158    bl  lwp_user_setting_save
159
160    ldp x8, x9, [FRAME_REG, #(CONTEXT_OFFSET_X8)]
161    and x0, x8, #0xf000
162    cmp x0, #0xe000
163    beq arch_signal_quit
164
165    cmp x0, #0xf000
166    beq ret_from_user
167
168    uxtb x0, w8
169    bl lwp_get_sys_api
170    cmp x0, xzr
171    mov x30, x0
172    beq arch_syscall_exit
173    ldp x0, x1, [FRAME_REG, #(CONTEXT_OFFSET_X0)]
174    ldp x2, x3, [FRAME_REG, #(CONTEXT_OFFSET_X2)]
175    ldp x4, x5, [FRAME_REG, #(CONTEXT_OFFSET_X4)]
176    ldp x6, x7, [FRAME_REG, #(CONTEXT_OFFSET_X6)]
177    blr x30
178
179    ldp fp, lr, [sp], 16
180    ret
181
182/**
183 * void arch_syscall_exit(long rc)
184 */
185arch_syscall_exit:
186    .global arch_syscall_exit
187
188    /**
189     * backup former x0 which is required to restart syscall, then setup
190     * syscall return value in stack frame
191     */
192    mov x1, sp
193    bl arch_syscall_prepare_signal
194
195    /**
196     * disable local irq so we don't messup with the spsr_el1 witch is not saved
197     * for kernel space IRQ/EXCEPTION
198     */
199    msr daifset, #3
200
201    b arch_ret_to_user
202
203/* the sp is reset to the outer most level, irq and fiq are disabled */
204START_POINT(arch_ret_to_user)
205    msr daifset, #3
206
207    ldr x2, [sp, #CONTEXT_OFFSET_SP_EL0]
208    msr sp_el0, x2
209    ldr x2, [sp, #CONTEXT_OFFSET_ELR_EL1]
210    msr elr_el1, x2
211    ldr x3, [sp, #CONTEXT_OFFSET_SPSR_EL1]
212    msr spsr_el1, x3
213
214    /* pre-action */
215    bl lwp_check_debug
216
217    bl lwp_check_exit_request
218    cbz w0, 1f
219    /* exit on event */
220    msr daifclr, #3
221    mov x0, xzr
222    b sys_exit
223
2241:  /* handling dbg */
225    /* check if dbg ops exist */
226    ldr x0, =rt_dbg_ops
227    ldr x0, [x0]
228    cbz x0, 3f
229    bl dbg_thread_in_debug
230    mov x1, #(1 << 21)
231    mrs x2, spsr_el1
232    cbz w0, 2f
233    orr x2, x2, x1
234    msr spsr_el1, x2
235    b 3f
2362:  /* clear software step */
237    bic x2, x2, x1
238    msr spsr_el1, x2
2393:  /* handling signal */
240
241    /**
242     * push updated spsr & elr to exception frame.
243     * Note: these 2 maybe updated after handling dbg
244     */
245    mrs x0, spsr_el1
246    str x0, [sp, #CONTEXT_OFFSET_SPSR_EL1]
247    mrs x1, elr_el1
248    str x1, [sp, #CONTEXT_OFFSET_ELR_EL1]
249    mov x0, sp
250
251    /* restore the thread execution environment */
252    msr daifclr, #3
253    bl lwp_thread_signal_catch
254
255    /* restore the exception-return exec-flow */
256    msr daifset, #3
257
258    /* check debug */
259
260    ldr x0, =rt_dbg_ops
261    ldr x0, [x0]
262    cmp x0, xzr
263
264    beq 1f
265
266    ldr x0, [sp, #CONTEXT_OFFSET_ELR_EL1]
267    bl dbg_attach_req
268
2691:
270    RESTORE_IRQ_CONTEXT_NO_SPEL0
271
272    eret
273START_POINT_END(arch_ret_to_user)
274
275.global lwp_check_debug
276lwp_check_debug:
277    ldr x0, =rt_dbg_ops
278    ldr x0, [x0]
279    cbnz x0, 1f
280    ret
2811:
282    stp x29, x30, [sp, #-0x10]!
283    bl dbg_check_suspend
284    cbz w0, lwp_check_debug_quit
285
286    mrs x2, sp_el0
287    sub x2, x2, #0x10
288    mov x3, x2
289    msr sp_el0, x2
290    ldr x0, =lwp_debugreturn
291    ldr w1, [x0]
292    str w1, [x2]
293    ldr w1, [x0, #4]
294    str w1, [x2, #4]
295
296    dc cvau, x2
297    add x2, x2, #4
298    dc cvau, x2
299
300    dsb sy
301    isb sy
302
303    ic ialluis
304    isb sy
305
306    mrs x0, elr_el1
307    mrs x1, spsr_el1
308    stp x0, x1, [sp, #-0x10]!
309    msr elr_el1, x3 /* lwp_debugreturn */
310    mov x1, #(SPSR_Mode(0) | SPSR_A64)
311    orr x1, x1, #(1 << 21)
312    msr spsr_el1, x1
313    eret
314ret_from_user:
315    /* sp_el0 += 16 for drop ins lwp_debugreturn */
316    mrs x0, sp_el0
317    add x0, x0, #0x10
318    msr sp_el0, x0
319    /* now is el1, sp is pos(empty) - sizeof(context) */
320    mov x0, sp
321    add x0, x0, #0x220
322    mov sp, x0
323    ldp x0, x1, [sp], #0x10  /* x1 is origin spsr_el1 */
324    msr elr_el1, x0          /* x0 is origin elr_el1 */
325    msr spsr_el1, x1
326lwp_check_debug_quit:
327    ldp x29, x30, [sp], #0x10
328    ret
329
330.global arch_syscall_restart
331arch_syscall_restart:
332    msr daifset, 3
333
334    mov sp, x1
335    /* drop exception frame in user stack */
336    msr sp_el0, x0
337
338    /* restore previous exception frame */
339    msr spsel, #0
340
341    RESTORE_IRQ_CONTEXT_NO_SPEL0
342
343    msr spsel, #1
344
345    b vector_exception
346
347arch_signal_quit:
348
349    /* drop current exception frame & sigreturn */
350    add sp, sp, #(CONTEXT_SIZE + 0x10)
351    mov x1, sp
352    mrs x0, sp_el0
353    bl  arch_signal_ucontext_restore
354    add x0, x0, #-CONTEXT_SIZE
355    msr sp_el0, x0
356
357    /**
358     * Note: Since we will reset spsr, but the reschedule will
359     * corrupt the spsr, we diable irq for a short period here
360     */
361    msr daifset, #3
362
363    /* restore previous exception frame */
364    msr spsel, #0
365
366    RESTORE_IRQ_CONTEXT_NO_SPEL0
367
368    msr spsel, #1
369
370    SAVE_IRQ_CONTEXT
371
372    b arch_ret_to_user
373
374/**
375 * rt_noreturn
376 * void arch_thread_signal_enter(
377 *      int signo,                      -> x0
378 *      siginfo_t *psiginfo,            -> x1
379 *      void *exp_frame,                -> x2
380 *      void *entry_uaddr,              -> x3
381 *      lwp_sigset_t *save_sig_mask,    -> x4
382 *      )
383 */
384.global arch_thread_signal_enter
385arch_thread_signal_enter:
386    mov x19, x0
387    mov x20, x2 /* exp_frame */
388    mov x21, x3
389
390    /**
391     * move exception frame to user stack
392     */
393    mrs x0, sp_el0
394    mov x3, x4
395
396    /* arch_signal_ucontext_save(user_sp, psiginfo, exp_frame, save_sig_mask); */
397    bl arch_signal_ucontext_save
398    mov x22, x0
399    /* get and saved pointer to uframe */
400    bl arch_signal_ucontext_get_frame
401    mov x2, x0
402    mov x0, x22
403
404    dc cvau, x0
405    dsb sy
406    ic ialluis
407    dsb sy
408
409    /**
410     * Brief: Prepare the environment for signal handler
411     */
412
413    /**
414     * reset the cpsr
415     * and drop exp frame on kernel stack, reset kernel sp
416     *
417     * Note: Since we will reset spsr, but the reschedule will
418     * corrupt the spsr, we diable irq for a short period here
419     */
420    msr daifset, #3
421    ldr x1, [x20, #CONTEXT_OFFSET_SPSR_EL1]
422    msr spsr_el1, x1
423    add sp, x20, #CONTEXT_SIZE
424
425    /** reset user sp */
426    msr sp_el0, x0
427    /** set the return address to the sigreturn */
428    mov x30, x0
429
430    cbnz x21, 1f
431    mov x21, x30
4321:
433    /** set the entry address of signal handler */
434    msr elr_el1, x21
435
436    /* siginfo is above the return address */
437    add x1, x30, UCTX_ABI_OFFSET_TO_SI
438    /* uframe is saved in x2 */
439    mov x0, x19
440
441    /**
442     * handler(signo, psi, ucontext);
443     *
444     */
445    eret
446
447lwp_debugreturn:
448    mov x8, 0xf000
449    svc #0
450
451.global lwp_sigreturn
452lwp_sigreturn:
453    mov x8, #0xe000
454    svc #0
455
456lwp_thread_return:
457    mov x0, xzr
458    mov x8, #0x01
459    svc #0
460
461.globl arch_get_tidr
462arch_get_tidr:
463    mrs x0, tpidr_el0
464    ret
465
466.global arch_set_thread_area
467arch_set_thread_area:
468.globl arch_set_tidr
469arch_set_tidr:
470    msr tpidr_el0, x0
471    ret
472