1 /*
2 * Copyright (c) 2006-2023, RT-Thread Development Team
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 *
6 * Change Logs:
7 * Date Author Notes
8 * 2023-05-18 GuEe-GUI first version
9 */
10
11 #include <rthw.h>
12 #include <rtatomic.h>
13
rt_hw_atomic_load(volatile rt_atomic_t * ptr)14 rt_atomic_t rt_hw_atomic_load(volatile rt_atomic_t *ptr)
15 {
16 rt_atomic_t ret;
17
18 __asm__ volatile (
19 " ldr %0, %1\n"
20 " dmb ish"
21 : "=r" (ret)
22 : "Q" (*ptr)
23 : "memory");
24
25 return ret;
26 }
27
rt_hw_atomic_store(volatile rt_atomic_t * ptr,rt_atomic_t val)28 void rt_hw_atomic_store(volatile rt_atomic_t *ptr, rt_atomic_t val)
29 {
30 __asm__ volatile (
31 " str %1, %0\n"
32 " dmb ish"
33 : "=Q" (*ptr)
34 : "r" (val)
35 : "memory");
36 }
37
38 #define AARCH64_ATOMIC_OP_RETURN(op, ins, constraint) \
39 rt_atomic_t rt_hw_atomic_##op(volatile rt_atomic_t *ptr, rt_atomic_t in_val) \
40 { \
41 rt_atomic_t tmp, val, result; \
42 \
43 __asm__ volatile ( \
44 " prfm pstl1strm, %3\n" \
45 "1: ldxr %0, %3\n" \
46 " "#ins " %1, %0, %4\n" \
47 " stlxr %w2, %1, %3\n" \
48 " cbnz %w2, 1b\n" \
49 " dmb ish" \
50 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Q" (*ptr) \
51 : __RT_STRINGIFY(constraint) "r" (in_val) \
52 : "memory"); \
53 \
54 return result; \
55 }
56
AARCH64_ATOMIC_OP_RETURN(add,add,I)57 AARCH64_ATOMIC_OP_RETURN(add, add, I)
58 AARCH64_ATOMIC_OP_RETURN(sub, sub, J)
59 AARCH64_ATOMIC_OP_RETURN(and, and, K)
60 AARCH64_ATOMIC_OP_RETURN(or, orr, K)
61 AARCH64_ATOMIC_OP_RETURN(xor, eor, K)
62
63 rt_atomic_t rt_hw_atomic_exchange(volatile rt_atomic_t *ptr, rt_atomic_t val)
64 {
65 rt_atomic_t ret, tmp;
66
67 __asm__ volatile (
68 " prfm pstl1strm, %2\n"
69 "1: ldxr %0, %2\n"
70 " stlxr %w1, %3, %2\n"
71 " cbnz %w1, 1b\n"
72 " dmb ish"
73 : "=&r" (ret), "=&r" (tmp), "+Q" (*ptr)
74 : "r" (val)
75 : "memory");
76
77 return ret;
78 }
79
rt_hw_atomic_flag_clear(volatile rt_atomic_t * ptr)80 void rt_hw_atomic_flag_clear(volatile rt_atomic_t *ptr)
81 {
82 rt_hw_atomic_and(ptr, 0);
83 }
84
rt_hw_atomic_flag_test_and_set(volatile rt_atomic_t * ptr)85 rt_atomic_t rt_hw_atomic_flag_test_and_set(volatile rt_atomic_t *ptr)
86 {
87 return rt_hw_atomic_or(ptr, 1);
88 }
89
rt_hw_atomic_compare_exchange_strong(volatile rt_atomic_t * ptr,rt_atomic_t * old,rt_atomic_t new)90 rt_atomic_t rt_hw_atomic_compare_exchange_strong(volatile rt_atomic_t *ptr, rt_atomic_t *old, rt_atomic_t new)
91 {
92 rt_atomic_t tmp, oldval;
93 __asm__ volatile (
94 " prfm pstl1strm, %2\n"
95 "1: ldxr %0, %2\n"
96 " eor %1, %0, %3\n"
97 " cbnz %1, 2f\n"
98 " stlxr %w1, %4, %2\n"
99 " cbnz %w1, 1b\n"
100 " dmb ish\n"
101 " mov %w1, #1\n"
102 " b 3f\n"
103 "2: str %0, [%5]\n"
104 " mov %w1, #0\n"
105 "3:"
106 : "=&r" (oldval), "=&r" (tmp), "+Q" (*ptr)
107 : "Kr" (*old), "r" (new), "r" (old)
108 : "memory");
109
110 return tmp;
111 }
112
113