hc
2023-12-11 d2ccde1c8e90d38cee87a1b0309ad2827f3fd30d
kernel/arch/s390/include/asm/bitops.h
....@@ -35,6 +35,7 @@
3535
3636 #include <linux/typecheck.h>
3737 #include <linux/compiler.h>
38
+#include <linux/types.h>
3839 #include <asm/atomic_ops.h>
3940 #include <asm/barrier.h>
4041
....@@ -55,7 +56,7 @@
5556 return ((unsigned char *)ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
5657 }
5758
58
-static inline void set_bit(unsigned long nr, volatile unsigned long *ptr)
59
+static __always_inline void arch_set_bit(unsigned long nr, volatile unsigned long *ptr)
5960 {
6061 unsigned long *addr = __bitops_word(nr, ptr);
6162 unsigned long mask;
....@@ -73,10 +74,10 @@
7374 }
7475 #endif
7576 mask = 1UL << (nr & (BITS_PER_LONG - 1));
76
- __atomic64_or(mask, addr);
77
+ __atomic64_or(mask, (long *)addr);
7778 }
7879
79
-static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr)
80
+static __always_inline void arch_clear_bit(unsigned long nr, volatile unsigned long *ptr)
8081 {
8182 unsigned long *addr = __bitops_word(nr, ptr);
8283 unsigned long mask;
....@@ -94,10 +95,11 @@
9495 }
9596 #endif
9697 mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
97
- __atomic64_and(mask, addr);
98
+ __atomic64_and(mask, (long *)addr);
9899 }
99100
100
-static inline void change_bit(unsigned long nr, volatile unsigned long *ptr)
101
+static __always_inline void arch_change_bit(unsigned long nr,
102
+ volatile unsigned long *ptr)
101103 {
102104 unsigned long *addr = __bitops_word(nr, ptr);
103105 unsigned long mask;
....@@ -115,66 +117,67 @@
115117 }
116118 #endif
117119 mask = 1UL << (nr & (BITS_PER_LONG - 1));
118
- __atomic64_xor(mask, addr);
120
+ __atomic64_xor(mask, (long *)addr);
119121 }
120122
121
-static inline int
122
-test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
123
+static inline bool arch_test_and_set_bit(unsigned long nr,
124
+ volatile unsigned long *ptr)
123125 {
124126 unsigned long *addr = __bitops_word(nr, ptr);
125127 unsigned long old, mask;
126128
127129 mask = 1UL << (nr & (BITS_PER_LONG - 1));
128
- old = __atomic64_or_barrier(mask, addr);
130
+ old = __atomic64_or_barrier(mask, (long *)addr);
129131 return (old & mask) != 0;
130132 }
131133
132
-static inline int
133
-test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
134
+static inline bool arch_test_and_clear_bit(unsigned long nr,
135
+ volatile unsigned long *ptr)
134136 {
135137 unsigned long *addr = __bitops_word(nr, ptr);
136138 unsigned long old, mask;
137139
138140 mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
139
- old = __atomic64_and_barrier(mask, addr);
141
+ old = __atomic64_and_barrier(mask, (long *)addr);
140142 return (old & ~mask) != 0;
141143 }
142144
143
-static inline int
144
-test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
145
+static inline bool arch_test_and_change_bit(unsigned long nr,
146
+ volatile unsigned long *ptr)
145147 {
146148 unsigned long *addr = __bitops_word(nr, ptr);
147149 unsigned long old, mask;
148150
149151 mask = 1UL << (nr & (BITS_PER_LONG - 1));
150
- old = __atomic64_xor_barrier(mask, addr);
152
+ old = __atomic64_xor_barrier(mask, (long *)addr);
151153 return (old & mask) != 0;
152154 }
153155
154
-static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr)
156
+static inline void arch___set_bit(unsigned long nr, volatile unsigned long *ptr)
155157 {
156158 unsigned char *addr = __bitops_byte(nr, ptr);
157159
158160 *addr |= 1 << (nr & 7);
159161 }
160162
161
-static inline void
162
-__clear_bit(unsigned long nr, volatile unsigned long *ptr)
163
+static inline void arch___clear_bit(unsigned long nr,
164
+ volatile unsigned long *ptr)
163165 {
164166 unsigned char *addr = __bitops_byte(nr, ptr);
165167
166168 *addr &= ~(1 << (nr & 7));
167169 }
168170
169
-static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr)
171
+static inline void arch___change_bit(unsigned long nr,
172
+ volatile unsigned long *ptr)
170173 {
171174 unsigned char *addr = __bitops_byte(nr, ptr);
172175
173176 *addr ^= 1 << (nr & 7);
174177 }
175178
176
-static inline int
177
-__test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
179
+static inline bool arch___test_and_set_bit(unsigned long nr,
180
+ volatile unsigned long *ptr)
178181 {
179182 unsigned char *addr = __bitops_byte(nr, ptr);
180183 unsigned char ch;
....@@ -184,8 +187,8 @@
184187 return (ch >> (nr & 7)) & 1;
185188 }
186189
187
-static inline int
188
-__test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
190
+static inline bool arch___test_and_clear_bit(unsigned long nr,
191
+ volatile unsigned long *ptr)
189192 {
190193 unsigned char *addr = __bitops_byte(nr, ptr);
191194 unsigned char ch;
....@@ -195,8 +198,8 @@
195198 return (ch >> (nr & 7)) & 1;
196199 }
197200
198
-static inline int
199
-__test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
201
+static inline bool arch___test_and_change_bit(unsigned long nr,
202
+ volatile unsigned long *ptr)
200203 {
201204 unsigned char *addr = __bitops_byte(nr, ptr);
202205 unsigned char ch;
....@@ -206,7 +209,8 @@
206209 return (ch >> (nr & 7)) & 1;
207210 }
208211
209
-static inline int test_bit(unsigned long nr, const volatile unsigned long *ptr)
212
+static inline bool arch_test_bit(unsigned long nr,
213
+ const volatile unsigned long *ptr)
210214 {
211215 const volatile unsigned char *addr;
212216
....@@ -215,27 +219,31 @@
215219 return (*addr >> (nr & 7)) & 1;
216220 }
217221
218
-static inline int test_and_set_bit_lock(unsigned long nr,
219
- volatile unsigned long *ptr)
222
+static inline bool arch_test_and_set_bit_lock(unsigned long nr,
223
+ volatile unsigned long *ptr)
220224 {
221
- if (test_bit(nr, ptr))
225
+ if (arch_test_bit(nr, ptr))
222226 return 1;
223
- return test_and_set_bit(nr, ptr);
227
+ return arch_test_and_set_bit(nr, ptr);
224228 }
225229
226
-static inline void clear_bit_unlock(unsigned long nr,
227
- volatile unsigned long *ptr)
230
+static inline void arch_clear_bit_unlock(unsigned long nr,
231
+ volatile unsigned long *ptr)
228232 {
229233 smp_mb__before_atomic();
230
- clear_bit(nr, ptr);
234
+ arch_clear_bit(nr, ptr);
231235 }
232236
233
-static inline void __clear_bit_unlock(unsigned long nr,
234
- volatile unsigned long *ptr)
237
+static inline void arch___clear_bit_unlock(unsigned long nr,
238
+ volatile unsigned long *ptr)
235239 {
236240 smp_mb();
237
- __clear_bit(nr, ptr);
241
+ arch___clear_bit(nr, ptr);
238242 }
243
+
244
+#include <asm-generic/bitops/instrumented-atomic.h>
245
+#include <asm-generic/bitops/instrumented-non-atomic.h>
246
+#include <asm-generic/bitops/instrumented-lock.h>
239247
240248 /*
241249 * Functions which use MSB0 bit numbering.
....@@ -261,7 +269,8 @@
261269 return clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
262270 }
263271
264
-static inline int test_and_clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
272
+static inline bool test_and_clear_bit_inv(unsigned long nr,
273
+ volatile unsigned long *ptr)
265274 {
266275 return test_and_clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
267276 }
....@@ -276,8 +285,8 @@
276285 return __clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
277286 }
278287
279
-static inline int test_bit_inv(unsigned long nr,
280
- const volatile unsigned long *ptr)
288
+static inline bool test_bit_inv(unsigned long nr,
289
+ const volatile unsigned long *ptr)
281290 {
282291 return test_bit(nr ^ (BITS_PER_LONG - 1), ptr);
283292 }
....@@ -397,9 +406,9 @@
397406 * This is defined the same way as ffs.
398407 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
399408 */
400
-static inline int fls(int word)
409
+static inline int fls(unsigned int word)
401410 {
402
- return fls64((unsigned int)word);
411
+ return fls64(word);
403412 }
404413
405414 #else /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */