.. | .. |
---|
35 | 35 | |
---|
36 | 36 | #include <linux/typecheck.h> |
---|
37 | 37 | #include <linux/compiler.h> |
---|
| 38 | +#include <linux/types.h> |
---|
38 | 39 | #include <asm/atomic_ops.h> |
---|
39 | 40 | #include <asm/barrier.h> |
---|
40 | 41 | |
---|
.. | .. |
---|
55 | 56 | return ((unsigned char *)ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3); |
---|
56 | 57 | } |
---|
57 | 58 | |
---|
58 | | -static inline void set_bit(unsigned long nr, volatile unsigned long *ptr) |
---|
| 59 | +static __always_inline void arch_set_bit(unsigned long nr, volatile unsigned long *ptr) |
---|
59 | 60 | { |
---|
60 | 61 | unsigned long *addr = __bitops_word(nr, ptr); |
---|
61 | 62 | unsigned long mask; |
---|
.. | .. |
---|
73 | 74 | } |
---|
74 | 75 | #endif |
---|
75 | 76 | mask = 1UL << (nr & (BITS_PER_LONG - 1)); |
---|
76 | | - __atomic64_or(mask, addr); |
---|
| 77 | + __atomic64_or(mask, (long *)addr); |
---|
77 | 78 | } |
---|
78 | 79 | |
---|
79 | | -static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr) |
---|
| 80 | +static __always_inline void arch_clear_bit(unsigned long nr, volatile unsigned long *ptr) |
---|
80 | 81 | { |
---|
81 | 82 | unsigned long *addr = __bitops_word(nr, ptr); |
---|
82 | 83 | unsigned long mask; |
---|
.. | .. |
---|
94 | 95 | } |
---|
95 | 96 | #endif |
---|
96 | 97 | mask = ~(1UL << (nr & (BITS_PER_LONG - 1))); |
---|
97 | | - __atomic64_and(mask, addr); |
---|
| 98 | + __atomic64_and(mask, (long *)addr); |
---|
98 | 99 | } |
---|
99 | 100 | |
---|
100 | | -static inline void change_bit(unsigned long nr, volatile unsigned long *ptr) |
---|
| 101 | +static __always_inline void arch_change_bit(unsigned long nr, |
---|
| 102 | + volatile unsigned long *ptr) |
---|
101 | 103 | { |
---|
102 | 104 | unsigned long *addr = __bitops_word(nr, ptr); |
---|
103 | 105 | unsigned long mask; |
---|
.. | .. |
---|
115 | 117 | } |
---|
116 | 118 | #endif |
---|
117 | 119 | mask = 1UL << (nr & (BITS_PER_LONG - 1)); |
---|
118 | | - __atomic64_xor(mask, addr); |
---|
| 120 | + __atomic64_xor(mask, (long *)addr); |
---|
119 | 121 | } |
---|
120 | 122 | |
---|
121 | | -static inline int |
---|
122 | | -test_and_set_bit(unsigned long nr, volatile unsigned long *ptr) |
---|
| 123 | +static inline bool arch_test_and_set_bit(unsigned long nr, |
---|
| 124 | + volatile unsigned long *ptr) |
---|
123 | 125 | { |
---|
124 | 126 | unsigned long *addr = __bitops_word(nr, ptr); |
---|
125 | 127 | unsigned long old, mask; |
---|
126 | 128 | |
---|
127 | 129 | mask = 1UL << (nr & (BITS_PER_LONG - 1)); |
---|
128 | | - old = __atomic64_or_barrier(mask, addr); |
---|
| 130 | + old = __atomic64_or_barrier(mask, (long *)addr); |
---|
129 | 131 | return (old & mask) != 0; |
---|
130 | 132 | } |
---|
131 | 133 | |
---|
132 | | -static inline int |
---|
133 | | -test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr) |
---|
| 134 | +static inline bool arch_test_and_clear_bit(unsigned long nr, |
---|
| 135 | + volatile unsigned long *ptr) |
---|
134 | 136 | { |
---|
135 | 137 | unsigned long *addr = __bitops_word(nr, ptr); |
---|
136 | 138 | unsigned long old, mask; |
---|
137 | 139 | |
---|
138 | 140 | mask = ~(1UL << (nr & (BITS_PER_LONG - 1))); |
---|
139 | | - old = __atomic64_and_barrier(mask, addr); |
---|
| 141 | + old = __atomic64_and_barrier(mask, (long *)addr); |
---|
140 | 142 | return (old & ~mask) != 0; |
---|
141 | 143 | } |
---|
142 | 144 | |
---|
143 | | -static inline int |
---|
144 | | -test_and_change_bit(unsigned long nr, volatile unsigned long *ptr) |
---|
| 145 | +static inline bool arch_test_and_change_bit(unsigned long nr, |
---|
| 146 | + volatile unsigned long *ptr) |
---|
145 | 147 | { |
---|
146 | 148 | unsigned long *addr = __bitops_word(nr, ptr); |
---|
147 | 149 | unsigned long old, mask; |
---|
148 | 150 | |
---|
149 | 151 | mask = 1UL << (nr & (BITS_PER_LONG - 1)); |
---|
150 | | - old = __atomic64_xor_barrier(mask, addr); |
---|
| 152 | + old = __atomic64_xor_barrier(mask, (long *)addr); |
---|
151 | 153 | return (old & mask) != 0; |
---|
152 | 154 | } |
---|
153 | 155 | |
---|
154 | | -static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr) |
---|
| 156 | +static inline void arch___set_bit(unsigned long nr, volatile unsigned long *ptr) |
---|
155 | 157 | { |
---|
156 | 158 | unsigned char *addr = __bitops_byte(nr, ptr); |
---|
157 | 159 | |
---|
158 | 160 | *addr |= 1 << (nr & 7); |
---|
159 | 161 | } |
---|
160 | 162 | |
---|
161 | | -static inline void |
---|
162 | | -__clear_bit(unsigned long nr, volatile unsigned long *ptr) |
---|
| 163 | +static inline void arch___clear_bit(unsigned long nr, |
---|
| 164 | + volatile unsigned long *ptr) |
---|
163 | 165 | { |
---|
164 | 166 | unsigned char *addr = __bitops_byte(nr, ptr); |
---|
165 | 167 | |
---|
166 | 168 | *addr &= ~(1 << (nr & 7)); |
---|
167 | 169 | } |
---|
168 | 170 | |
---|
169 | | -static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr) |
---|
| 171 | +static inline void arch___change_bit(unsigned long nr, |
---|
| 172 | + volatile unsigned long *ptr) |
---|
170 | 173 | { |
---|
171 | 174 | unsigned char *addr = __bitops_byte(nr, ptr); |
---|
172 | 175 | |
---|
173 | 176 | *addr ^= 1 << (nr & 7); |
---|
174 | 177 | } |
---|
175 | 178 | |
---|
176 | | -static inline int |
---|
177 | | -__test_and_set_bit(unsigned long nr, volatile unsigned long *ptr) |
---|
| 179 | +static inline bool arch___test_and_set_bit(unsigned long nr, |
---|
| 180 | + volatile unsigned long *ptr) |
---|
178 | 181 | { |
---|
179 | 182 | unsigned char *addr = __bitops_byte(nr, ptr); |
---|
180 | 183 | unsigned char ch; |
---|
.. | .. |
---|
184 | 187 | return (ch >> (nr & 7)) & 1; |
---|
185 | 188 | } |
---|
186 | 189 | |
---|
187 | | -static inline int |
---|
188 | | -__test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr) |
---|
| 190 | +static inline bool arch___test_and_clear_bit(unsigned long nr, |
---|
| 191 | + volatile unsigned long *ptr) |
---|
189 | 192 | { |
---|
190 | 193 | unsigned char *addr = __bitops_byte(nr, ptr); |
---|
191 | 194 | unsigned char ch; |
---|
.. | .. |
---|
195 | 198 | return (ch >> (nr & 7)) & 1; |
---|
196 | 199 | } |
---|
197 | 200 | |
---|
198 | | -static inline int |
---|
199 | | -__test_and_change_bit(unsigned long nr, volatile unsigned long *ptr) |
---|
| 201 | +static inline bool arch___test_and_change_bit(unsigned long nr, |
---|
| 202 | + volatile unsigned long *ptr) |
---|
200 | 203 | { |
---|
201 | 204 | unsigned char *addr = __bitops_byte(nr, ptr); |
---|
202 | 205 | unsigned char ch; |
---|
.. | .. |
---|
206 | 209 | return (ch >> (nr & 7)) & 1; |
---|
207 | 210 | } |
---|
208 | 211 | |
---|
209 | | -static inline int test_bit(unsigned long nr, const volatile unsigned long *ptr) |
---|
| 212 | +static inline bool arch_test_bit(unsigned long nr, |
---|
| 213 | + const volatile unsigned long *ptr) |
---|
210 | 214 | { |
---|
211 | 215 | const volatile unsigned char *addr; |
---|
212 | 216 | |
---|
.. | .. |
---|
215 | 219 | return (*addr >> (nr & 7)) & 1; |
---|
216 | 220 | } |
---|
217 | 221 | |
---|
218 | | -static inline int test_and_set_bit_lock(unsigned long nr, |
---|
219 | | - volatile unsigned long *ptr) |
---|
| 222 | +static inline bool arch_test_and_set_bit_lock(unsigned long nr, |
---|
| 223 | + volatile unsigned long *ptr) |
---|
220 | 224 | { |
---|
221 | | - if (test_bit(nr, ptr)) |
---|
| 225 | + if (arch_test_bit(nr, ptr)) |
---|
222 | 226 | return 1; |
---|
223 | | - return test_and_set_bit(nr, ptr); |
---|
| 227 | + return arch_test_and_set_bit(nr, ptr); |
---|
224 | 228 | } |
---|
225 | 229 | |
---|
226 | | -static inline void clear_bit_unlock(unsigned long nr, |
---|
227 | | - volatile unsigned long *ptr) |
---|
| 230 | +static inline void arch_clear_bit_unlock(unsigned long nr, |
---|
| 231 | + volatile unsigned long *ptr) |
---|
228 | 232 | { |
---|
229 | 233 | smp_mb__before_atomic(); |
---|
230 | | - clear_bit(nr, ptr); |
---|
| 234 | + arch_clear_bit(nr, ptr); |
---|
231 | 235 | } |
---|
232 | 236 | |
---|
233 | | -static inline void __clear_bit_unlock(unsigned long nr, |
---|
234 | | - volatile unsigned long *ptr) |
---|
| 237 | +static inline void arch___clear_bit_unlock(unsigned long nr, |
---|
| 238 | + volatile unsigned long *ptr) |
---|
235 | 239 | { |
---|
236 | 240 | smp_mb(); |
---|
237 | | - __clear_bit(nr, ptr); |
---|
| 241 | + arch___clear_bit(nr, ptr); |
---|
238 | 242 | } |
---|
| 243 | + |
---|
| 244 | +#include <asm-generic/bitops/instrumented-atomic.h> |
---|
| 245 | +#include <asm-generic/bitops/instrumented-non-atomic.h> |
---|
| 246 | +#include <asm-generic/bitops/instrumented-lock.h> |
---|
239 | 247 | |
---|
240 | 248 | /* |
---|
241 | 249 | * Functions which use MSB0 bit numbering. |
---|
.. | .. |
---|
261 | 269 | return clear_bit(nr ^ (BITS_PER_LONG - 1), ptr); |
---|
262 | 270 | } |
---|
263 | 271 | |
---|
264 | | -static inline int test_and_clear_bit_inv(unsigned long nr, volatile unsigned long *ptr) |
---|
| 272 | +static inline bool test_and_clear_bit_inv(unsigned long nr, |
---|
| 273 | + volatile unsigned long *ptr) |
---|
265 | 274 | { |
---|
266 | 275 | return test_and_clear_bit(nr ^ (BITS_PER_LONG - 1), ptr); |
---|
267 | 276 | } |
---|
.. | .. |
---|
276 | 285 | return __clear_bit(nr ^ (BITS_PER_LONG - 1), ptr); |
---|
277 | 286 | } |
---|
278 | 287 | |
---|
279 | | -static inline int test_bit_inv(unsigned long nr, |
---|
280 | | - const volatile unsigned long *ptr) |
---|
| 288 | +static inline bool test_bit_inv(unsigned long nr, |
---|
| 289 | + const volatile unsigned long *ptr) |
---|
281 | 290 | { |
---|
282 | 291 | return test_bit(nr ^ (BITS_PER_LONG - 1), ptr); |
---|
283 | 292 | } |
---|
.. | .. |
---|
397 | 406 | * This is defined the same way as ffs. |
---|
398 | 407 | * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32. |
---|
399 | 408 | */ |
---|
400 | | -static inline int fls(int word) |
---|
| 409 | +static inline int fls(unsigned int word) |
---|
401 | 410 | { |
---|
402 | | - return fls64((unsigned int)word); |
---|
| 411 | + return fls64(word); |
---|
403 | 412 | } |
---|
404 | 413 | |
---|
405 | 414 | #else /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */ |
---|