1 // © 2021 Qualcomm Innovation Center, Inc. All rights reserved.
2 //
3 // SPDX-License-Identifier: BSD-3-Clause
4
5 // From ARMv8.0 onwards (but not ARMv7), losing the local monitor triggers an
6 // event, so we can obtain the required semantics by loading with LDAX* and
7 // polling with WFE. The default store-release updates are sufficient on their
8 // own. See the generic event header for detailed requirements.
9
10 #define asm_event_wait(p) __asm__ volatile("wfe" ::"m"(*p))
11
12 // clang-format off
13 #define asm_event_load_before_wait(p) _Generic( \
14 (p), \
15 _Atomic uint64_t *: asm_event_load64_before_wait, \
16 _Atomic uint32_t *: asm_event_load32_before_wait, \
17 _Atomic uint16_t *: asm_event_load16_before_wait, \
18 _Atomic uint8_t *: asm_event_load8_before_wait, \
19 _Atomic bool *: asm_event_loadbool_before_wait)(p)
20 // clang-format on
21
22 #define asm_event_load_bf_before_wait(name, p) \
23 name##_cast(asm_event_load_before_wait(name##_atomic_ptr_raw(p)))
24
25 #include <asm-generic/event.h>
26
27 static inline ALWAYS_INLINE bool
asm_event_loadbool_before_wait(_Atomic bool * p)28 asm_event_loadbool_before_wait(_Atomic bool *p)
29 {
30 uint8_t ret;
31 __asm__("ldaxrb %w0, %1" : "=r"(ret) : "Q"(*p));
32 return ret != 0U;
33 }
34
35 static inline ALWAYS_INLINE uint8_t
asm_event_load8_before_wait(_Atomic uint8_t * p)36 asm_event_load8_before_wait(_Atomic uint8_t *p)
37 {
38 uint8_t ret;
39 __asm__("ldaxrb %w0, %1" : "=r"(ret) : "Q"(*p));
40 return ret;
41 }
42
43 static inline ALWAYS_INLINE uint16_t
asm_event_load16_before_wait(_Atomic uint16_t * p)44 asm_event_load16_before_wait(_Atomic uint16_t *p)
45 {
46 uint16_t ret;
47 __asm__("ldaxrh %w0, %1" : "=r"(ret) : "Q"(*p));
48 return ret;
49 }
50
51 static inline ALWAYS_INLINE uint32_t
asm_event_load32_before_wait(_Atomic uint32_t * p)52 asm_event_load32_before_wait(_Atomic uint32_t *p)
53 {
54 uint32_t ret;
55 __asm__("ldaxr %w0, %1" : "=r"(ret) : "Q"(*p));
56 return ret;
57 }
58
59 static inline ALWAYS_INLINE uint64_t
asm_event_load64_before_wait(_Atomic uint64_t * p)60 asm_event_load64_before_wait(_Atomic uint64_t *p)
61 {
62 uint64_t ret;
63 __asm__("ldaxr %0, %1" : "=r"(ret) : "Q"(*p));
64 return ret;
65 }
66