1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4 */
5 #ifndef _ARCH_LOONGARCH_LOCAL_H
6 #define _ARCH_LOONGARCH_LOCAL_H
7
8 #include <linux/percpu.h>
9 #include <linux/bitops.h>
10 #include <linux/atomic.h>
11 #include <asm/cmpxchg.h>
12
13 typedef struct {
14 atomic_long_t a;
15 } local_t;
16
17 #define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) }
18
19 #define local_read(l) atomic_long_read(&(l)->a)
20 #define local_set(l, i) atomic_long_set(&(l)->a, (i))
21
22 #define local_add(i, l) atomic_long_add((i), (&(l)->a))
23 #define local_sub(i, l) atomic_long_sub((i), (&(l)->a))
24 #define local_inc(l) atomic_long_inc(&(l)->a)
25 #define local_dec(l) atomic_long_dec(&(l)->a)
26
27 /*
28 * Same as above, but return the result value
29 */
local_add_return(long i,local_t * l)30 static inline long local_add_return(long i, local_t *l)
31 {
32 unsigned long result;
33
34 __asm__ __volatile__(
35 " " __AMADD " %1, %2, %0 \n"
36 : "+ZB" (l->a.counter), "=&r" (result)
37 : "r" (i)
38 : "memory");
39 result = result + i;
40
41 return result;
42 }
43
local_sub_return(long i,local_t * l)44 static inline long local_sub_return(long i, local_t *l)
45 {
46 unsigned long result;
47
48 __asm__ __volatile__(
49 " " __AMADD "%1, %2, %0 \n"
50 : "+ZB" (l->a.counter), "=&r" (result)
51 : "r" (-i)
52 : "memory");
53
54 result = result - i;
55
56 return result;
57 }
58
59 #define local_cmpxchg(l, o, n) \
60 ((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
61 #define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
62
63 /**
64 * local_add_unless - add unless the number is a given value
65 * @l: pointer of type local_t
66 * @a: the amount to add to l...
67 * @u: ...unless l is equal to u.
68 *
69 * Atomically adds @a to @l, so long as it was not @u.
70 * Returns non-zero if @l was not @u, and zero otherwise.
71 */
72 #define local_add_unless(l, a, u) \
73 ({ \
74 long c, old; \
75 c = local_read(l); \
76 while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
77 c = old; \
78 c != (u); \
79 })
80 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
81
82 #define local_dec_return(l) local_sub_return(1, (l))
83 #define local_inc_return(l) local_add_return(1, (l))
84
85 /*
86 * local_sub_and_test - subtract value from variable and test result
87 * @i: integer value to subtract
88 * @l: pointer of type local_t
89 *
90 * Atomically subtracts @i from @l and returns
91 * true if the result is zero, or false for all
92 * other cases.
93 */
94 #define local_sub_and_test(i, l) (local_sub_return((i), (l)) == 0)
95
96 /*
97 * local_inc_and_test - increment and test
98 * @l: pointer of type local_t
99 *
100 * Atomically increments @l by 1
101 * and returns true if the result is zero, or false for all
102 * other cases.
103 */
104 #define local_inc_and_test(l) (local_inc_return(l) == 0)
105
106 /*
107 * local_dec_and_test - decrement by 1 and test
108 * @l: pointer of type local_t
109 *
110 * Atomically decrements @l by 1 and
111 * returns true if the result is 0, or false for all other
112 * cases.
113 */
114 #define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
115
116 /*
117 * local_add_negative - add and test if negative
118 * @l: pointer of type local_t
119 * @i: integer value to add
120 *
121 * Atomically adds @i to @l and returns true
122 * if the result is negative, or false when
123 * result is greater than or equal to zero.
124 */
125 #define local_add_negative(i, l) (local_add_return(i, (l)) < 0)
126
127 /* Use these for per-cpu local_t variables: on some archs they are
128 * much more efficient than these naive implementations. Note they take
129 * a variable, not an address.
130 */
131
132 #define __local_inc(l) ((l)->a.counter++)
133 #define __local_dec(l) ((l)->a.counter++)
134 #define __local_add(i, l) ((l)->a.counter += (i))
135 #define __local_sub(i, l) ((l)->a.counter -= (i))
136
137 #endif /* _ARCH_LOONGARCH_LOCAL_H */
138