1/*
2 * Copyright (c) 2006-2024, RT-Thread Development Team
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 *
6 * Date           Author       Notes
7 * 2018-10-06     ZhaoXiaowei  the first version (cpu_gcc.S)
8 * 2021-05-18     Jesven       the first version (context_gcc.S)
9 * 2024-01-06     Shell        Fix barrier on irq_disable/enable
10 * 2024-01-18     Shell        fix implicit dependency of cpuid management
11 * 2024-03-28     Shell        Move cpu codes from context_gcc.S
12 */
13
14#ifndef __ASSEMBLY__
15#define __ASSEMBLY__
16#endif
17
18#include "rtconfig.h"
19#include "asm-generic.h"
20#include "asm-fpu.h"
21#include "armv8.h"
22
23#ifdef RT_USING_SMP
24#define rt_hw_interrupt_disable rt_hw_local_irq_disable
25#define rt_hw_interrupt_enable rt_hw_local_irq_enable
26#endif /* RT_USING_SMP */
27
28.text
29
30/**
31 * #ifdef RT_USING_OFW
32 * void rt_hw_cpu_id_set(long cpuid)
33 * #else
34 * void rt_hw_cpu_id_set(void)
35 * #endif
36 */
37.type rt_hw_cpu_id_set, @function
38rt_hw_cpu_id_set:
39#ifdef ARCH_USING_GENERIC_CPUID
40    .globl  rt_hw_cpu_id_set
41#else /* !ARCH_USING_GENERIC_CPUID */
42    .weak   rt_hw_cpu_id_set
43#endif /* ARCH_USING_GENERIC_CPUID */
44
45#ifndef RT_USING_OFW
46    mrs     x0, mpidr_el1           /* MPIDR_EL1: Multi-Processor Affinity Register */
47#ifdef ARCH_ARM_CORTEX_A55
48    lsr     x0, x0, #8
49#endif /* ARCH_ARM_CORTEX_A55 */
50    and     x0, x0, #15
51#endif /* !RT_USING_OFW */
52
53#ifdef ARCH_USING_HW_THREAD_SELF
54    msr     tpidrro_el0, x0
55#else /* !ARCH_USING_HW_THREAD_SELF */
56    msr     tpidr_el1, x0
57#endif /* ARCH_USING_HW_THREAD_SELF */
58    ret
59
60/*
61int rt_hw_cpu_id(void)
62*/
63.type rt_hw_cpu_id, @function
64rt_hw_cpu_id:
65#ifdef ARCH_USING_GENERIC_CPUID
66    .globl rt_hw_cpu_id
67#else /* !ARCH_USING_GENERIC_CPUID */
68    .weak rt_hw_cpu_id
69#endif /* ARCH_USING_GENERIC_CPUID */
70
71#if RT_CPUS_NR > 1
72    #ifdef ARCH_USING_GENERIC_CPUID
73        mrs x0, tpidrro_el0
74    #else /* !ARCH_USING_GENERIC_CPUID */
75        mrs x0, tpidr_el1
76    #endif /* ARCH_USING_GENERIC_CPUID */
77#else /* RT_CPUS_NR == 1 */
78    mov x0, xzr
79#endif
80    ret
81
82/*
83void rt_hw_set_process_id(size_t id)
84*/
85.global rt_hw_set_process_id
86rt_hw_set_process_id:
87    msr     CONTEXTIDR_EL1, x0
88    ret
89
90/*
91 *enable gtimer
92 */
93.globl rt_hw_gtimer_enable
94rt_hw_gtimer_enable:
95    mov     x0, #1
96    msr     CNTP_CTL_EL0, x0
97    ret
98
99/*
100 *set gtimer CNTP_TVAL_EL0 value
101 */
102.globl rt_hw_set_gtimer_val
103rt_hw_set_gtimer_val:
104    msr     CNTP_TVAL_EL0, x0
105    ret
106
107/*
108 *get gtimer CNTP_TVAL_EL0 value
109 */
110.globl rt_hw_get_gtimer_val
111rt_hw_get_gtimer_val:
112    mrs     x0, CNTP_TVAL_EL0
113    ret
114
115
116.globl rt_hw_get_cntpct_val
117rt_hw_get_cntpct_val:
118    mrs     x0, CNTPCT_EL0
119    ret
120
121/*
122 *get gtimer frq value
123 */
124.globl rt_hw_get_gtimer_frq
125rt_hw_get_gtimer_frq:
126    mrs     x0, CNTFRQ_EL0
127    ret
128
129.global rt_hw_interrupt_is_disabled
130rt_hw_interrupt_is_disabled:
131    mrs     x0, DAIF
132    tst     x0, #0xc0
133    cset    x0, NE
134    ret
135
136/*
137 * rt_base_t rt_hw_interrupt_disable();
138 */
139.globl rt_hw_interrupt_disable
140rt_hw_interrupt_disable:
141    mrs     x0, DAIF
142    and     x0, x0, #0xc0
143    cmp     x0, #0xc0
144    /* branch if bits not both set(zero) */
145    bne     1f
146    ret
1471:
148    msr     DAIFSet, #3
149    dsb     nsh
150    isb
151    ret
152
153/*
154 * void rt_hw_interrupt_enable(rt_base_t level);
155 */
156.globl rt_hw_interrupt_enable
157rt_hw_interrupt_enable:
158    and     x0, x0, #0xc0
159    cmp     x0, #0xc0
160    /* branch if one of the bits not set(zero) */
161    bne     1f
162    ret
1631:
164    isb
165    dsb     nsh
166    and     x0, x0, #0xc0
167    mrs     x1, DAIF
168    bic     x1, x1, #0xc0
169    orr     x0, x0, x1
170    msr     DAIF, x0
171    ret
172
173.globl rt_hw_get_current_el
174rt_hw_get_current_el:
175    mrs     x0, CurrentEL
176    cmp     x0, 0xc
177    b.eq    3f
178    cmp     x0, 0x8
179    b.eq    2f
180    cmp     x0, 0x4
181    b.eq    1f
182
183    ldr     x0, =0
184    b       0f
1853:
186    ldr     x0, =3
187    b       0f
1882:
189    ldr     x0, =2
190    b       0f
1911:
192    ldr     x0, =1
193    b       0f
1940:
195    ret
196
197
198.globl rt_hw_set_current_vbar
199rt_hw_set_current_vbar:
200    mrs     x1, CurrentEL
201    cmp     x1, 0xc
202    b.eq    3f
203    cmp     x1, 0x8
204    b.eq    2f
205    cmp     x1, 0x4
206    b.eq    1f
207    b       0f
2083:
209    msr     VBAR_EL3,x0
210    b       0f
2112:
212    msr     VBAR_EL2,x0
213    b       0f
2141:
215    msr     VBAR_EL1,x0
216    b       0f
2170:
218    ret
219
220.globl rt_hw_set_elx_env
221rt_hw_set_elx_env:
222    mrs     x1, CurrentEL
223    cmp     x1, 0xc
224    b.eq    3f
225    cmp     x1, 0x8
226    b.eq    2f
227    cmp     x1, 0x4
228    b.eq    1f
229    b       0f
2303:
231    mrs     x0, SCR_EL3
232    orr     x0, x0, #0xf             /* SCR_EL3.NS|IRQ|FIQ|EA */
233    msr     SCR_EL3, x0
234    b       0f
2352:
236    mrs     x0, HCR_EL2
237    orr     x0, x0, #0x38
238    msr     HCR_EL2, x0
239    b       0f
2401:
241    b       0f
2420:
243    ret
244
245.globl rt_cpu_vector_set_base
246rt_cpu_vector_set_base:
247    msr     VBAR_EL1, x0
248    ret
249
250
251/**
252 * unsigned long rt_hw_ffz(unsigned long x)
253 */
254.globl rt_hw_ffz
255rt_hw_ffz:
256    mvn     x1, x0
257    clz     x0, x1
258    mov     x1, #0x3f
259    sub     x0, x1, x0
260    ret
261
262.globl rt_hw_clz
263rt_hw_clz:
264    clz     x0, x0
265    ret
266
267/**
268 * Spinlock (fallback implementation)
269 */
270
271rt_hw_spin_lock_init:
272    .weak   rt_hw_spin_lock_init
273    stlr    wzr, [x0]
274    ret
275
276rt_hw_spin_trylock:
277    .weak   rt_hw_spin_trylock
278    sub     sp, sp, #16
279    ldar    w2, [x0]
280    add     x1, sp, 8
281    stlr    w2, [x1]
282    ldarh   w1, [x1]
283    and     w1, w1, 65535
284    add     x3, sp, 10
285    ldarh   w3, [x3]
286    cmp     w1, w3, uxth
287    beq     1f
288    mov     w0, 0
289    add     sp, sp, 16
290    ret
2911:
292    add     x1, sp, 10
2932:
294    ldaxrh  w3, [x1]
295    add     w3, w3, 1
296    stlxrh  w4, w3, [x1]
297    cbnz    w4, 2b
298    add     x1, sp, 8
299    ldar    w1, [x1]
3003:
301    ldaxr   w3, [x0]
302    cmp     w3, w2
303    bne     4f
304    stxr    w4, w1, [x0]
305    cbnz    w4, 3b
3064:
307    cset    w0, eq
308    add     sp, sp, 16
309    ret
310
311rt_hw_spin_lock:
312    .weak   rt_hw_spin_lock
313    add     x1, x0, 2
3141:
315    ldxrh   w2, [x1]
316    add     w3, w2, 1
317    stxrh   w4, w3, [x1]
318    cbnz    w4, 1b
319    and     w2, w2, 65535
320    ldarh   w1, [x0]
321    cmp     w2, w1, uxth
322    beq     3f
323    sevl
3242:
325    wfe
326    ldaxrh  w1, [x0]
327    cmp     w2, w1
328    bne     2b
3293:
330    ret
331
332rt_hw_spin_unlock:
333    .weak   rt_hw_spin_unlock
334    ldxrh   w1, [x0]
335    add     w1, w1, 1
336    stlxrh  w2, w1, [x0]
337    cbnz    w2, rt_hw_spin_unlock
338    ret
339