1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3 * Adapted from Linux's arch/powerpc/include/asm/bitops.h.
4 *
5 * Merged version by David Gibson <david@gibson.dropbear.id.au>.
6 * Based on ppc64 versions by: Dave Engebretsen, Todd Inglett, Don
7 * Reed, Pat McCarthy, Peter Bergner, Anton Blanchard. They
8 * originally took it from the ppc32 code.
9 */
10 #ifndef _ASM_PPC_BITOPS_H
11 #define _ASM_PPC_BITOPS_H
12
13 #include <asm/memory.h>
14
15 #define __set_bit(n, p) set_bit(n, p)
16 #define __clear_bit(n, p) clear_bit(n, p)
17
18 #define BITOP_BITS_PER_WORD 32
19 #define BITOP_MASK(nr) (1U << ((nr) % BITOP_BITS_PER_WORD))
20 #define BITOP_WORD(nr) ((nr) / BITOP_BITS_PER_WORD)
21 #define BITS_PER_BYTE 8
22
23 /* PPC bit number conversion */
24 #define PPC_BITLSHIFT(be) (BITS_PER_LONG - 1 - (be))
25 #define PPC_BIT(bit) (1UL << PPC_BITLSHIFT(bit))
26 #define PPC_BITMASK(bs, be) ((PPC_BIT(bs) - PPC_BIT(be)) | PPC_BIT(bs))
27
28 /* Macro for generating the ***_bits() functions */
29 #define DEFINE_BITOP(fn, op) \
30 static inline void fn(unsigned int mask, \
31 volatile unsigned int *p_) \
32 { \
33 unsigned int old; \
34 unsigned int *p = (unsigned int *)p_; \
35 asm volatile ( "1: lwarx %0,0,%3,0\n" \
36 #op "%I2 %0,%0,%2\n" \
37 "stwcx. %0,0,%3\n" \
38 "bne- 1b\n" \
39 : "=&r" (old), "+m" (*p) \
40 : "rK" (mask), "r" (p) \
41 : "cc", "memory" ); \
42 }
43
DEFINE_BITOP(set_bits,or)44 DEFINE_BITOP(set_bits, or)
45 DEFINE_BITOP(change_bits, xor)
46
47 #define DEFINE_CLROP(fn) \
48 static inline void fn(unsigned int mask, volatile unsigned int *p_) \
49 { \
50 unsigned int old; \
51 unsigned int *p = (unsigned int *)p_; \
52 asm volatile ( "1: lwarx %0,0,%3,0\n" \
53 "andc %0,%0,%2\n" \
54 "stwcx. %0,0,%3\n" \
55 "bne- 1b\n" \
56 : "=&r" (old), "+m" (*p) \
57 : "r" (mask), "r" (p) \
58 : "cc", "memory" ); \
59 }
60
61 DEFINE_CLROP(clear_bits)
62
63 static inline void set_bit(int nr, volatile void *addr)
64 {
65 set_bits(BITOP_MASK(nr), (volatile unsigned int *)addr + BITOP_WORD(nr));
66 }
clear_bit(int nr,volatile void * addr)67 static inline void clear_bit(int nr, volatile void *addr)
68 {
69 clear_bits(BITOP_MASK(nr), (volatile unsigned int *)addr + BITOP_WORD(nr));
70 }
71
72 /**
73 * test_bit - Determine whether a bit is set
74 * @nr: bit number to test
75 * @addr: Address to start counting from
76 */
test_bit(int nr,const volatile void * addr)77 static inline int test_bit(int nr, const volatile void *addr)
78 {
79 const volatile unsigned int *p = addr;
80 return 1 & (p[BITOP_WORD(nr)] >> (nr & (BITOP_BITS_PER_WORD - 1)));
81 }
82
test_and_clear_bits(unsigned int mask,volatile unsigned int * p)83 static inline unsigned int test_and_clear_bits(
84 unsigned int mask,
85 volatile unsigned int *p)
86 {
87 unsigned int old, t;
88
89 asm volatile ( PPC_ATOMIC_ENTRY_BARRIER
90 "1: lwarx %0,0,%3,0\n"
91 "andc %1,%0,%2\n"
92 "stwcx. %1,0,%3\n"
93 "bne- 1b\n"
94 PPC_ATOMIC_EXIT_BARRIER
95 : "=&r" (old), "=&r" (t)
96 : "r" (mask), "r" (p)
97 : "cc", "memory" );
98
99 return (old & mask);
100 }
101
test_and_clear_bit(unsigned int nr,volatile void * addr)102 static inline int test_and_clear_bit(unsigned int nr,
103 volatile void *addr)
104 {
105 return test_and_clear_bits(
106 BITOP_MASK(nr),
107 (volatile unsigned int *)addr + BITOP_WORD(nr)) != 0;
108 }
109
test_and_set_bits(unsigned int mask,volatile unsigned int * p)110 static inline unsigned int test_and_set_bits(
111 unsigned int mask,
112 volatile unsigned int *p)
113 {
114 unsigned int old, t;
115
116 asm volatile ( PPC_ATOMIC_ENTRY_BARRIER
117 "1: lwarx %0,0,%3,0\n"
118 "or%I2 %1,%0,%2\n"
119 "stwcx. %1,0,%3\n"
120 "bne- 1b\n"
121 PPC_ATOMIC_EXIT_BARRIER
122 : "=&r" (old), "=&r" (t)
123 : "rK" (mask), "r" (p)
124 : "cc", "memory" );
125
126 return (old & mask);
127 }
128
test_and_set_bit(unsigned int nr,volatile void * addr)129 static inline int test_and_set_bit(unsigned int nr, volatile void *addr)
130 {
131 return test_and_set_bits(
132 BITOP_MASK(nr),
133 (volatile unsigned int *)addr + BITOP_WORD(nr)) != 0;
134 }
135
136 /**
137 * __test_and_set_bit - Set a bit and return its old value
138 * @nr: Bit to set
139 * @addr: Address to count from
140 *
141 * This operation is non-atomic and can be reordered.
142 * If two examples of this operation race, one can appear to succeed
143 * but actually fail. You must protect multiple accesses with a lock.
144 */
__test_and_set_bit(int nr,volatile void * addr)145 static inline int __test_and_set_bit(int nr, volatile void *addr)
146 {
147 unsigned int mask = BITOP_MASK(nr);
148 volatile unsigned int *p = (volatile unsigned int *)addr + BITOP_WORD(nr);
149 unsigned int old = *p;
150
151 *p = old | mask;
152 return (old & mask) != 0;
153 }
154
155 /**
156 * __test_and_clear_bit - Clear a bit and return its old value
157 * @nr: Bit to clear
158 * @addr: Address to count from
159 *
160 * This operation is non-atomic and can be reordered.
161 * If two examples of this operation race, one can appear to succeed
162 * but actually fail. You must protect multiple accesses with a lock.
163 */
__test_and_clear_bit(int nr,volatile void * addr)164 static inline int __test_and_clear_bit(int nr, volatile void *addr)
165 {
166 unsigned int mask = BITOP_MASK(nr);
167 volatile unsigned int *p = (volatile unsigned int *)addr + BITOP_WORD(nr);
168 unsigned int old = *p;
169
170 *p = old & ~mask;
171 return (old & mask) != 0;
172 }
173
174 #define arch_ffs(x) ((x) ? 1 + __builtin_ctz(x) : 0)
175 #define arch_ffsl(x) ((x) ? 1 + __builtin_ctzl(x) : 0)
176 #define arch_fls(x) ((x) ? 32 - __builtin_clz(x) : 0)
177 #define arch_flsl(x) ((x) ? BITS_PER_LONG - __builtin_clzl(x) : 0)
178
179 /**
180 * hweightN - returns the hamming weight of a N-bit word
181 * @x: the word to weigh
182 *
183 * The Hamming Weight of a number is the total number of bits set in it.
184 */
185 #define hweight64(x) __builtin_popcountll(x)
186 #define hweight32(x) __builtin_popcount(x)
187 #define hweight16(x) __builtin_popcount((uint16_t)(x))
188 #define hweight8(x) __builtin_popcount((uint8_t)(x))
189
190 #endif /* _ASM_PPC_BITOPS_H */
191