1 /*
2  * Copyright (c) 2006-2023, RT-Thread Development Team
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  *
6  * Change Logs:
7  * Date           Author       Notes
8  * 2022-07-27     flybreak       the first version
9  */
10 
11 #include <rtthread.h>
12 
13 #if defined (__IAR_SYSTEMS_ICC__)     /* for IAR Compiler */
14 #include <intrinsics.h>
15 #include <iccarm_builtin.h>
16 #endif
17 /**
18  \brief   LDR Exclusive (32 bit)
19  \details Executes a exclusive LDR instruction for 32 bit values.
20  \param [in]    ptr  Pointer to data
21  \return        value of type uint32_t at (*ptr)
22  */
23 #if defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) /* ARM Compiler V6 */
24 #define __LDREXW        (rt_atomic_t)__builtin_arm_ldrex
25 #elif defined(__ARMCC_VERSION)          /* ARM Compiler V5 */
26 #if __ARMCC_VERSION < 5060020
27 #define __LDREXW(ptr)                                                        ((rt_atomic_t ) __ldrex(ptr))
28 #else
29 #define __LDREXW(ptr)          _Pragma("push") _Pragma("diag_suppress 3731") ((rt_atomic_t ) __ldrex(ptr))  _Pragma("pop")
30 #endif
31 #elif defined (__IAR_SYSTEMS_ICC__)     /* for IAR Compiler */
__LDREXW(volatile rt_atomic_t * ptr)32 _Pragma("inline=forced") __intrinsic rt_atomic_t __LDREXW(volatile rt_atomic_t *ptr)
33 {
34     return __iar_builtin_LDREX((volatile unsigned int *)ptr);
35 }
36 #elif defined (__GNUC__)                /* GNU GCC Compiler */
__LDREXW(volatile rt_atomic_t * addr)37 __attribute__((always_inline))     static inline rt_atomic_t __LDREXW(volatile rt_atomic_t *addr)
38 {
39     rt_atomic_t result;
40 
41     __asm volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
42     return result;
43 }
44 #endif
45 
46 /**
47  \brief   STR Exclusive (32 bit)
48  \details Executes a exclusive STR instruction for 32 bit values.
49  \param [in]  value  Value to store
50  \param [in]    ptr  Pointer to location
51  \return          0  Function succeeded
52  \return          1  Function failed
53  */
54 #if defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) /* ARM Compiler V6 */
55 #define __STREXW        (rt_atomic_t)__builtin_arm_strex
56 #elif defined(__ARMCC_VERSION)          /* ARM Compiler V5 */
57 #if __ARMCC_VERSION < 5060020
58 #define __STREXW(value, ptr)                                                 __strex(value, ptr)
59 #else
60 #define __STREXW(value, ptr)   _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr)        _Pragma("pop")
61 #endif
62 #elif defined (__IAR_SYSTEMS_ICC__)     /* for IAR Compiler */
__STREXW(rt_atomic_t value,volatile rt_atomic_t * ptr)63 _Pragma("inline=forced") __intrinsic rt_atomic_t __STREXW(rt_atomic_t value, volatile rt_atomic_t *ptr)
64 {
65     return __STREX(value, (unsigned int *)ptr);
66 }
67 #elif defined (__GNUC__)                /* GNU GCC Compiler */
__STREXW(volatile rt_atomic_t value,volatile rt_atomic_t * addr)68 __attribute__((always_inline))     static inline rt_atomic_t __STREXW(volatile rt_atomic_t value, volatile rt_atomic_t *addr)
69 {
70     rt_atomic_t result;
71 
72     __asm volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
73     return result;
74 }
75 #endif
76 
rt_hw_atomic_load(volatile rt_atomic_t * ptr)77 rt_atomic_t rt_hw_atomic_load(volatile rt_atomic_t *ptr)
78 {
79     rt_atomic_t oldval;
80     do
81     {
82         oldval = __LDREXW(ptr);
83     } while ((__STREXW(oldval, ptr)) != 0U);
84     return oldval;
85 }
86 
rt_hw_atomic_store(volatile rt_atomic_t * ptr,rt_atomic_t val)87 void rt_hw_atomic_store(volatile rt_atomic_t *ptr, rt_atomic_t val)
88 {
89     do
90     {
91         __LDREXW(ptr);
92     } while ((__STREXW(val, ptr)) != 0U);
93 }
94 
rt_hw_atomic_add(volatile rt_atomic_t * ptr,rt_atomic_t val)95 rt_atomic_t rt_hw_atomic_add(volatile rt_atomic_t *ptr, rt_atomic_t val)
96 {
97     rt_atomic_t oldval;
98     do
99     {
100         oldval = __LDREXW(ptr);
101     } while ((__STREXW(oldval + val, ptr)) != 0U);
102     return oldval;
103 }
104 
rt_hw_atomic_sub(volatile rt_atomic_t * ptr,rt_atomic_t val)105 rt_atomic_t rt_hw_atomic_sub(volatile rt_atomic_t *ptr, rt_atomic_t val)
106 {
107     rt_atomic_t oldval;
108     do
109     {
110         oldval = __LDREXW(ptr);
111     } while ((__STREXW(oldval - val, ptr)) != 0U);
112     return oldval;
113 }
114 
rt_hw_atomic_and(volatile rt_atomic_t * ptr,rt_atomic_t val)115 rt_atomic_t rt_hw_atomic_and(volatile rt_atomic_t *ptr, rt_atomic_t val)
116 {
117     rt_atomic_t oldval;
118     do
119     {
120         oldval = __LDREXW(ptr);
121     } while ((__STREXW(oldval & val, ptr)) != 0U);
122     return oldval;
123 }
124 
rt_hw_atomic_or(volatile rt_atomic_t * ptr,rt_atomic_t val)125 rt_atomic_t rt_hw_atomic_or(volatile rt_atomic_t *ptr, rt_atomic_t val)
126 {
127     rt_atomic_t oldval;
128     do
129     {
130         oldval = __LDREXW(ptr);
131     } while ((__STREXW(oldval | val, ptr)) != 0U);
132     return oldval;
133 }
134 
rt_hw_atomic_xor(volatile rt_atomic_t * ptr,rt_atomic_t val)135 rt_atomic_t rt_hw_atomic_xor(volatile rt_atomic_t *ptr, rt_atomic_t val)
136 {
137     rt_atomic_t oldval;
138     do
139     {
140         oldval = __LDREXW(ptr);
141     } while ((__STREXW(oldval ^ val, ptr)) != 0U);
142     return oldval;
143 }
144 
rt_hw_atomic_exchange(volatile rt_atomic_t * ptr,rt_atomic_t val)145 rt_atomic_t rt_hw_atomic_exchange(volatile rt_atomic_t *ptr, rt_atomic_t val)
146 {
147     rt_atomic_t oldval;
148     do
149     {
150         oldval = __LDREXW(ptr);
151     } while ((__STREXW(val, ptr)) != 0U);
152     return oldval;
153 }
154 
rt_hw_atomic_flag_clear(volatile rt_atomic_t * ptr)155 void rt_hw_atomic_flag_clear(volatile rt_atomic_t *ptr)
156 {
157     do
158     {
159         __LDREXW(ptr);
160     } while ((__STREXW(0, ptr)) != 0U);
161 }
162 
rt_hw_atomic_flag_test_and_set(volatile rt_atomic_t * ptr)163 rt_atomic_t rt_hw_atomic_flag_test_and_set(volatile rt_atomic_t *ptr)
164 {
165     rt_atomic_t oldval;
166     do
167     {
168         oldval = __LDREXW(ptr);
169     } while ((__STREXW(1, ptr)) != 0U);
170     return oldval;
171 }
172 
rt_hw_atomic_compare_exchange_strong(volatile rt_atomic_t * ptr,rt_atomic_t * old,rt_atomic_t new)173 rt_atomic_t rt_hw_atomic_compare_exchange_strong(volatile rt_atomic_t *ptr, rt_atomic_t *old, rt_atomic_t new)
174 {
175     rt_atomic_t result;
176     rt_atomic_t temp = *old;
177     do
178     {
179         result = __LDREXW(ptr);
180         if (result != temp)
181         {
182             *old = result;
183             __STREXW(result, ptr);
184             break;
185         }
186     } while ((__STREXW(new, ptr)) != 0U);
187     return (result == temp);
188 }
189