1/*
2 * Copyright (c) 2006-2020, RT-Thread Development Team
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 *
6 * Change Logs:
7 * Date           Author       Notes
8 * 2018-12-10     Jesven       first version
9 * 2023-07-16     Shell        Move part of the codes to C from asm in signal handling
10 */
11
12#include "rtconfig.h"
13#include "asm-generic.h"
14
15#define  Mode_USR       0x10
16#define  Mode_FIQ       0x11
17#define  Mode_IRQ       0x12
18#define  Mode_SVC       0x13
19#define  Mode_MON       0x16
20#define  Mode_ABT       0x17
21#define  Mode_UDF       0x1B
22#define  Mode_SYS       0x1F
23
24#define A_Bit        0x100
25#define I_Bit        0x80 @; when I bit is set, IRQ is disabled
26#define F_Bit        0x40 @; when F bit is set, FIQ is disabled
27#define T_Bit        0x20
28
29.cpu cortex-a9
30.syntax unified
31.text
32
33/*
34 * void arch_start_umode(args, text, ustack, kstack);
35 */
36.global arch_start_umode
37.type arch_start_umode, % function
38arch_start_umode:
39    mrs     r9, cpsr
40    bic     r9, #0x1f
41    orr     r9, #Mode_USR
42    cpsid i
43    msr     spsr, r9
44    mov     sp, r3
45
46    /* set user stack top */
47    cps #Mode_SYS
48    mov sp, r2
49    cps #Mode_SVC
50    mov     r3, r2
51
52    /* set data address. */
53    movs    pc, r1
54
55/*
56 * void arch_crt_start_umode(args, text, ustack, kstack);
57 */
58.global arch_crt_start_umode
59.type arch_crt_start_umode, % function
60arch_crt_start_umode:
61    cps #Mode_SYS
62    sub sp, r2, #16
63    ldr r2, =lwp_thread_return
64    ldr r4, [r2]
65    str r4, [sp]
66    ldr r4, [r2, #4]
67    str r4, [sp, #4]
68    ldr r4, [r2, #8]
69    str r4, [sp, #8]
70
71    mov r4, sp
72    mcr p15, 0, r4, c7, c11, 1   ;//dc cmvau
73    add r4, #4
74    mcr p15, 0, r4, c7, c11, 1   ;//dc cmvau
75    add r4, #4
76    mcr p15, 0, r4, c7, c11, 1   ;//dc cmvau
77    dsb
78    isb
79    mcr p15, 0, r4, c7, c5, 0   ;//iciallu
80    dsb
81    isb
82
83    mov lr, sp
84    cps #Mode_SVC
85
86    mrs     r9, cpsr
87    bic     r9, #0x1f
88    orr     r9, #Mode_USR
89    cpsid i
90    msr     spsr, r9
91    mov     sp, r3
92
93    /* set data address. */
94    movs    pc, r1
95
96/*
97void arch_set_thread_context(void *exit_addr, void *new_thread_stack, void *user_stack, void **thread_sp);
98*/
99.global arch_set_thread_context
100arch_set_thread_context:
101    sub r1, #(10 * 4 + 4 * 4) /* {r4 - r12, lr} , {r4, r5, spsr, u_pc} */
102    stmfd r1!, {r0}
103    mov r12, #0
104    stmfd r1!, {r12}
105    stmfd r1!, {r1 - r12}
106    stmfd r1!, {r12} /* new thread return value */
107    mrs r12, cpsr
108    orr r12, #(1 << 7) /* disable irq */
109    stmfd r1!, {r12} /* spsr */
110    mov r12, #0
111    stmfd r1!, {r12} /* now user lr is 0 */
112    stmfd r1!, {r2} /* user sp */
113#ifdef RT_USING_FPU
114    stmfd r1!, {r12} /* not use fpu */
115#endif
116    str r1, [r3]
117    mov pc, lr
118
119.global arch_get_user_sp
120arch_get_user_sp:
121    cps #Mode_SYS
122    mov r0, sp
123    cps #Mode_SVC
124    mov pc, lr
125
126.global sys_fork
127.global sys_vfork
128.global arch_fork_exit
129sys_fork:
130sys_vfork:
131    push {r4 - r12, lr}
132    bl _sys_fork
133arch_fork_exit:
134    pop {r4 - r12, lr}
135    b arch_syscall_exit
136
137.global sys_clone
138.global arch_clone_exit
139sys_clone:
140    push {r4 - r12, lr}
141    bl _sys_clone
142arch_clone_exit:
143    pop {r4 - r12, lr}
144    b arch_syscall_exit
145/*
146void lwp_exec_user(void *args, void *kernel_stack, void *user_entry)
147*/
148.global lwp_exec_user
149lwp_exec_user:
150    cpsid i
151    mov sp, r1
152    mov lr, r2
153    mov r2, #Mode_USR
154    msr spsr_cxsf, r2
155    ldr r3, =0x80000000
156    b arch_ret_to_user
157
158/*
159 * void SVC_Handler(void);
160 */
161.global vector_swi
162.type vector_swi, % function
163START_POINT(vector_swi)
164    push {lr}
165    mrs lr, spsr
166    push {r4, r5, lr}
167
168    cpsie i
169
170    push {r0 - r3, r12}
171
172    bl rt_thread_self
173    bl lwp_user_setting_save
174
175    and r0, r7, #0xf000
176    cmp r0, #0xe000
177    beq arch_signal_quit
178
179    cmp r0, #0xf000
180    beq ret_from_user
181    and r0, r7, #0xff
182    bl lwp_get_sys_api
183    cmp r0, #0           /* r0 = api */
184    mov lr, r0
185
186    pop {r0 - r3, r12}
187    beq arch_syscall_exit
188    blx lr
189START_POINT_END(vector_swi)
190
191.global arch_syscall_exit
192arch_syscall_exit:
193    cpsid i
194    pop {r4, r5, lr}
195    msr spsr_cxsf, lr
196    pop {lr}
197
198.global arch_ret_to_user
199arch_ret_to_user:
200    /* save all context for signal handler */
201    push {r0-r12, lr}
202    bl lwp_check_debug
203    bl lwp_check_exit_request
204    cmp r0, #0
205    beq 1f
206    mov r0, #0
207    b sys_exit
2081:
209    mov r0, sp
210    /* r0 -> exp frame */
211    bl lwp_thread_signal_catch
212
213    ldr r0, =rt_dbg_ops
214    ldr r0, [r0]
215    cmp r0, #0
216    beq 2f
217    mov r0, lr
218    bl dbg_attach_req
2192:
220    pop {r0-r12, lr}
221    movs pc, lr
222
223#ifdef RT_USING_SMART
224.global lwp_check_debug
225lwp_check_debug:
226    ldr r0, =rt_dbg_ops
227    ldr r0, [r0]
228    cmp r0, #0
229    bne 1f
230    bx  lr
2311:
232    push {lr}
233    bl dbg_check_suspend
234    cmp r0, #0
235    beq lwp_check_debug_quit
236
237    cps #Mode_SYS
238    sub sp, #8
239    ldr r0, =lwp_debugreturn
240    ldr r1, [r0]
241    str r1, [sp]
242    ldr r1, [r0, #4]
243    str r1, [sp, #4]
244
245    mov r1, sp
246    mcr p15, 0, r1, c7, c11, 1   ;//dc cmvau
247    add r1, #4
248    mcr p15, 0, r1, c7, c11, 1   ;//dc cmvau
249    dsb
250    isb
251    mcr p15, 0, r0, c7, c5, 0   ;//iciallu
252    dsb
253    isb
254
255    mov r0, sp /* lwp_debugreturn */
256    cps #Mode_SVC
257
258    mrs r1, spsr
259    push {r1}
260    mov r1, #Mode_USR
261    msr spsr_cxsf, r1
262    movs pc, r0
263ret_from_user:
264    cps #Mode_SYS
265    add sp, #8
266    cps #Mode_SVC
267    /*
268    pop {r0 - r3, r12}
269    pop {r4 - r6, lr}
270    */
271    add sp, #(4*9)
272    pop {r4}
273    msr spsr_cxsf, r4
274lwp_check_debug_quit:
275    pop {pc}
276
277arch_signal_quit:
278    cpsid i
279    /* drop context of signal handler */
280    pop {r0 - r3, r12}
281    pop {r4, r5, lr}
282    pop {lr}
283
284    /* restore context */
285    cps #Mode_SYS
286    mov r0, sp
287    cps #Mode_SVC
288    bl arch_signal_ucontext_restore
289
290    /* lr <- *(&frame.ip) */
291    ldr lr, [r0]
292    cps #Mode_SYS
293    mov sp, r0
294
295    /* drop ip in the frame and restore cpsr */
296    pop {r0}
297    pop {r0}
298    msr spsr_cxsf, r0
299    pop {r0-r12, lr}
300    cps #Mode_SVC
301
302    b arch_ret_to_user
303
304/**
305 * rt_noreturn
306 * void arch_thread_signal_enter(
307 *      int signo,                      -> r0
308 *      siginfo_t *psiginfo,            -> r1
309 *      void *exp_frame,                -> r2
310 *      void *entry_uaddr,              -> r3
311 *      lwp_sigset_t *save_sig_mask,    -> ??
312 * )
313 */
314.global arch_thread_signal_enter
315arch_thread_signal_enter:
316    mov r4, r0
317    mov r5, r3
318    mov r6, r2
319
320    cps #Mode_SYS
321    mov r0, lr
322    mov r3, sp
323    cps #Mode_SVC
324    bl arch_signal_ucontext_save
325
326    /* drop volatile frame {r0-r12, lr} */
327    add sp, r6, #14*4
328
329    /* reset user sp */
330    cps #Mode_SYS
331    mov sp, r0
332    mov lr, r0
333    cps #Mode_SVC
334
335    /* r1,r2 <- new_user_sp */
336    mov r1, r0
337    mov r2, r0
338
339    mcr p15, 0, r0, c7, c11, 1   ;//dc cmvau
340    add r0, #4
341    mcr p15, 0, r0, c7, c11, 1   ;//dc cmvau
342    dsb
343    isb
344    mcr p15, 0, r1, c7, c5, 0   ;//iciallu
345    dsb
346    isb
347
348    /* r0 <- signo */
349    mov r0, r4
350
351    /* r4 <- &sigreturn */
352    mov r4, r2
353
354    /* lr <- user_handler() */
355    mov lr, r5
356    cmp lr, #0
357    moveq lr, r4
358
359    /* r1 <- siginfo */
360    mov r1, r2
361    add r1, #8
362    /* handler(signo, siginfo, ucontext) */
363    movs pc, lr
364
365lwp_debugreturn:
366    mov r7, #0xf000
367    svc #0
368
369.global lwp_sigreturn
370lwp_sigreturn:
371    mov r7, #0xe000
372    svc #0
373
374lwp_thread_return:
375    mov r0, #0
376    mov r7, #0x01
377    svc #0
378#endif
379
380.global check_vfp
381check_vfp:
382#ifdef RT_USING_FPU
383    vmrs r0, fpexc
384    ubfx r0, r0, #30, #1
385#else
386    mov r0, #0
387#endif
388    mov pc, lr
389
390.global get_vfp
391get_vfp:
392#ifdef RT_USING_FPU
393    vstmia r0!, {d0-d15}
394    vstmia r0!, {d16-d31}
395    vmrs r1, fpscr
396    str  r1, [r0]
397#endif
398    mov pc, lr
399
400.globl arch_get_tidr
401arch_get_tidr:
402    mrc p15, 0, r0, c13, c0, 3
403    bx lr
404
405.global arch_set_thread_area
406arch_set_thread_area:
407.globl arch_set_tidr
408arch_set_tidr:
409    mcr p15, 0, r0, c13, c0, 3
410    bx lr
411
412/* kuser suppurt */
413    .macro  kuser_pad, sym, size
414    .if (. - \sym) & 3
415    .rept   4 - (. - \sym) & 3
416    .byte   0
417    .endr
418    .endif
419    .rept   (\size - (. - \sym)) / 4
420    .word   0xe7fddef1
421    .endr
422    .endm
423
424.align  5
425.globl  __kuser_helper_start
426__kuser_helper_start:
427__kuser_cmpxchg64:              @ 0xffff0f60
428    stmfd   sp!, {r4, r5, r6, lr}
429    ldmia   r0, {r4, r5}            @ load old val
430    ldmia   r1, {r6, lr}            @ load new val
4311:  ldmia   r2, {r0, r1}            @ load current val
432    eors    r3, r0, r4          @ compare with oldval (1)
433    eorseq  r3, r1, r5          @ compare with oldval (2)
4342:  stmiaeq r2, {r6, lr}            @ store newval if eq
435    rsbs    r0, r3, #0          @ set return val and C flag
436    ldmfd   sp!, {r4, r5, r6, pc}
437
438    kuser_pad __kuser_cmpxchg64, 64
439
440__kuser_memory_barrier:             @ 0xffff0fa0
441    dmb
442    mov pc, lr
443
444    kuser_pad __kuser_memory_barrier, 32
445
446__kuser_cmpxchg:                @ 0xffff0fc0
4471:  ldr r3, [r2]            @ load current val
448    subs    r3, r3, r0          @ compare with oldval
4492:  streq   r1, [r2]            @ store newval if eq
450    rsbs    r0, r3, #0          @ set return val and C flag
451    mov pc, lr
452
453kuser_pad __kuser_cmpxchg, 32
454
455__kuser_get_tls:                @ 0xffff0fe0
456    mrc p15, 0, r0, c13, c0, 3  @ 0xffff0fe8 hardware TLS code
457    mov pc, lr
458    ldr r0, [pc, #(16 - 8)] @ read TLS, set in kuser_get_tls_init
459
460    kuser_pad __kuser_get_tls, 16
461
462    .rep    3
463    .word   0           @ 0xffff0ff0 software TLS value, then
464    .endr               @ pad up to __kuser_helper_version
465
466__kuser_helper_version:             @ 0xffff0ffc
467    .word   ((__kuser_helper_end - __kuser_helper_start) >> 5)
468
469    .globl  __kuser_helper_end
470__kuser_helper_end:
471