hc
2024-02-20 102a0743326a03cd1a1202ceda21e175b7d3575c
kernel/arch/mips/include/asm/bitops.h
....@@ -13,15 +13,54 @@
1313 #error only <linux/bitops.h> can be included directly
1414 #endif
1515
16
+#include <linux/bits.h>
1617 #include <linux/compiler.h>
1718 #include <linux/types.h>
1819 #include <asm/barrier.h>
1920 #include <asm/byteorder.h> /* sigh ... */
2021 #include <asm/compiler.h>
2122 #include <asm/cpu-features.h>
23
+#include <asm/isa-rev.h>
2224 #include <asm/llsc.h>
2325 #include <asm/sgidefs.h>
2426 #include <asm/war.h>
27
+
28
+#define __bit_op(mem, insn, inputs...) do { \
29
+ unsigned long temp; \
30
+ \
31
+ asm volatile( \
32
+ " .set push \n" \
33
+ " .set " MIPS_ISA_LEVEL " \n" \
34
+ " " __SYNC(full, loongson3_war) " \n" \
35
+ "1: " __LL "%0, %1 \n" \
36
+ " " insn " \n" \
37
+ " " __SC "%0, %1 \n" \
38
+ " " __SC_BEQZ "%0, 1b \n" \
39
+ " .set pop \n" \
40
+ : "=&r"(temp), "+" GCC_OFF_SMALL_ASM()(mem) \
41
+ : inputs \
42
+ : __LLSC_CLOBBER); \
43
+} while (0)
44
+
45
+#define __test_bit_op(mem, ll_dst, insn, inputs...) ({ \
46
+ unsigned long orig, temp; \
47
+ \
48
+ asm volatile( \
49
+ " .set push \n" \
50
+ " .set " MIPS_ISA_LEVEL " \n" \
51
+ " " __SYNC(full, loongson3_war) " \n" \
52
+ "1: " __LL ll_dst ", %2 \n" \
53
+ " " insn " \n" \
54
+ " " __SC "%1, %2 \n" \
55
+ " " __SC_BEQZ "%1, 1b \n" \
56
+ " .set pop \n" \
57
+ : "=&r"(orig), "=&r"(temp), \
58
+ "+" GCC_OFF_SMALL_ASM()(mem) \
59
+ : inputs \
60
+ : __LLSC_CLOBBER); \
61
+ \
62
+ orig; \
63
+})
2564
2665 /*
2766 * These are the "slower" versions of the functions and are in bitops.c.
....@@ -30,8 +69,6 @@
3069 void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
3170 void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
3271 void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
33
-int __mips_test_and_set_bit(unsigned long nr,
34
- volatile unsigned long *addr);
3572 int __mips_test_and_set_bit_lock(unsigned long nr,
3673 volatile unsigned long *addr);
3774 int __mips_test_and_clear_bit(unsigned long nr,
....@@ -52,44 +89,20 @@
5289 */
5390 static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
5491 {
55
- unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
56
- int bit = nr & SZLONG_MASK;
57
- unsigned long temp;
92
+ volatile unsigned long *m = &addr[BIT_WORD(nr)];
93
+ int bit = nr % BITS_PER_LONG;
5894
59
- if (kernel_uses_llsc && R10000_LLSC_WAR) {
60
- __asm__ __volatile__(
61
- " .set arch=r4000 \n"
62
- "1: " __LL "%0, %1 # set_bit \n"
63
- " or %0, %2 \n"
64
- " " __SC "%0, %1 \n"
65
- " beqzl %0, 1b \n"
66
- " .set mips0 \n"
67
- : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
68
- : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m));
69
-#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
70
- } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
71
- do {
72
- __asm__ __volatile__(
73
- " " __LL "%0, %1 # set_bit \n"
74
- " " __INS "%0, %3, %2, 1 \n"
75
- " " __SC "%0, %1 \n"
76
- : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
77
- : "ir" (bit), "r" (~0));
78
- } while (unlikely(!temp));
79
-#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
80
- } else if (kernel_uses_llsc) {
81
- do {
82
- __asm__ __volatile__(
83
- " .set "MIPS_ISA_ARCH_LEVEL" \n"
84
- " " __LL "%0, %1 # set_bit \n"
85
- " or %0, %2 \n"
86
- " " __SC "%0, %1 \n"
87
- " .set mips0 \n"
88
- : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
89
- : "ir" (1UL << bit));
90
- } while (unlikely(!temp));
91
- } else
95
+ if (!kernel_uses_llsc) {
9296 __mips_set_bit(nr, addr);
97
+ return;
98
+ }
99
+
100
+ if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit) && (bit >= 16)) {
101
+ __bit_op(*m, __INS "%0, %3, %2, 1", "i"(bit), "r"(~0));
102
+ return;
103
+ }
104
+
105
+ __bit_op(*m, "or\t%0, %2", "ir"(BIT(bit)));
93106 }
94107
95108 /*
....@@ -104,44 +117,20 @@
104117 */
105118 static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
106119 {
107
- unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
108
- int bit = nr & SZLONG_MASK;
109
- unsigned long temp;
120
+ volatile unsigned long *m = &addr[BIT_WORD(nr)];
121
+ int bit = nr % BITS_PER_LONG;
110122
111
- if (kernel_uses_llsc && R10000_LLSC_WAR) {
112
- __asm__ __volatile__(
113
- " .set arch=r4000 \n"
114
- "1: " __LL "%0, %1 # clear_bit \n"
115
- " and %0, %2 \n"
116
- " " __SC "%0, %1 \n"
117
- " beqzl %0, 1b \n"
118
- " .set mips0 \n"
119
- : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
120
- : "ir" (~(1UL << bit)));
121
-#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
122
- } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
123
- do {
124
- __asm__ __volatile__(
125
- " " __LL "%0, %1 # clear_bit \n"
126
- " " __INS "%0, $0, %2, 1 \n"
127
- " " __SC "%0, %1 \n"
128
- : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
129
- : "ir" (bit));
130
- } while (unlikely(!temp));
131
-#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
132
- } else if (kernel_uses_llsc) {
133
- do {
134
- __asm__ __volatile__(
135
- " .set "MIPS_ISA_ARCH_LEVEL" \n"
136
- " " __LL "%0, %1 # clear_bit \n"
137
- " and %0, %2 \n"
138
- " " __SC "%0, %1 \n"
139
- " .set mips0 \n"
140
- : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
141
- : "ir" (~(1UL << bit)));
142
- } while (unlikely(!temp));
143
- } else
123
+ if (!kernel_uses_llsc) {
144124 __mips_clear_bit(nr, addr);
125
+ return;
126
+ }
127
+
128
+ if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit)) {
129
+ __bit_op(*m, __INS "%0, $0, %2, 1", "i"(bit));
130
+ return;
131
+ }
132
+
133
+ __bit_op(*m, "and\t%0, %2", "ir"(~BIT(bit)));
145134 }
146135
147136 /*
....@@ -169,93 +158,15 @@
169158 */
170159 static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
171160 {
172
- int bit = nr & SZLONG_MASK;
161
+ volatile unsigned long *m = &addr[BIT_WORD(nr)];
162
+ int bit = nr % BITS_PER_LONG;
173163
174
- if (kernel_uses_llsc && R10000_LLSC_WAR) {
175
- unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
176
- unsigned long temp;
177
-
178
- __asm__ __volatile__(
179
- " .set arch=r4000 \n"
180
- "1: " __LL "%0, %1 # change_bit \n"
181
- " xor %0, %2 \n"
182
- " " __SC "%0, %1 \n"
183
- " beqzl %0, 1b \n"
184
- " .set mips0 \n"
185
- : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
186
- : "ir" (1UL << bit));
187
- } else if (kernel_uses_llsc) {
188
- unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
189
- unsigned long temp;
190
-
191
- do {
192
- __asm__ __volatile__(
193
- " .set "MIPS_ISA_ARCH_LEVEL" \n"
194
- " " __LL "%0, %1 # change_bit \n"
195
- " xor %0, %2 \n"
196
- " " __SC "%0, %1 \n"
197
- " .set mips0 \n"
198
- : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
199
- : "ir" (1UL << bit));
200
- } while (unlikely(!temp));
201
- } else
164
+ if (!kernel_uses_llsc) {
202165 __mips_change_bit(nr, addr);
203
-}
166
+ return;
167
+ }
204168
205
-/*
206
- * test_and_set_bit - Set a bit and return its old value
207
- * @nr: Bit to set
208
- * @addr: Address to count from
209
- *
210
- * This operation is atomic and cannot be reordered.
211
- * It also implies a memory barrier.
212
- */
213
-static inline int test_and_set_bit(unsigned long nr,
214
- volatile unsigned long *addr)
215
-{
216
- int bit = nr & SZLONG_MASK;
217
- unsigned long res;
218
-
219
- smp_mb__before_llsc();
220
-
221
- if (kernel_uses_llsc && R10000_LLSC_WAR) {
222
- unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
223
- unsigned long temp;
224
-
225
- __asm__ __volatile__(
226
- " .set arch=r4000 \n"
227
- "1: " __LL "%0, %1 # test_and_set_bit \n"
228
- " or %2, %0, %3 \n"
229
- " " __SC "%2, %1 \n"
230
- " beqzl %2, 1b \n"
231
- " and %2, %0, %3 \n"
232
- " .set mips0 \n"
233
- : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
234
- : "r" (1UL << bit)
235
- : "memory");
236
- } else if (kernel_uses_llsc) {
237
- unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
238
- unsigned long temp;
239
-
240
- do {
241
- __asm__ __volatile__(
242
- " .set "MIPS_ISA_ARCH_LEVEL" \n"
243
- " " __LL "%0, %1 # test_and_set_bit \n"
244
- " or %2, %0, %3 \n"
245
- " " __SC "%2, %1 \n"
246
- " .set mips0 \n"
247
- : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
248
- : "r" (1UL << bit)
249
- : "memory");
250
- } while (unlikely(!res));
251
-
252
- res = temp & (1UL << bit);
253
- } else
254
- res = __mips_test_and_set_bit(nr, addr);
255
-
256
- smp_llsc_mb();
257
-
258
- return res != 0;
169
+ __bit_op(*m, "xor\t%0, %2", "ir"(BIT(bit)));
259170 }
260171
261172 /*
....@@ -269,48 +180,39 @@
269180 static inline int test_and_set_bit_lock(unsigned long nr,
270181 volatile unsigned long *addr)
271182 {
272
- int bit = nr & SZLONG_MASK;
273
- unsigned long res;
183
+ volatile unsigned long *m = &addr[BIT_WORD(nr)];
184
+ int bit = nr % BITS_PER_LONG;
185
+ unsigned long res, orig;
274186
275
- if (kernel_uses_llsc && R10000_LLSC_WAR) {
276
- unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
277
- unsigned long temp;
278
-
279
- __asm__ __volatile__(
280
- " .set arch=r4000 \n"
281
- "1: " __LL "%0, %1 # test_and_set_bit \n"
282
- " or %2, %0, %3 \n"
283
- " " __SC "%2, %1 \n"
284
- " beqzl %2, 1b \n"
285
- " and %2, %0, %3 \n"
286
- " .set mips0 \n"
287
- : "=&r" (temp), "+m" (*m), "=&r" (res)
288
- : "r" (1UL << bit)
289
- : "memory");
290
- } else if (kernel_uses_llsc) {
291
- unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
292
- unsigned long temp;
293
-
294
- do {
295
- __asm__ __volatile__(
296
- " .set "MIPS_ISA_ARCH_LEVEL" \n"
297
- " " __LL "%0, %1 # test_and_set_bit \n"
298
- " or %2, %0, %3 \n"
299
- " " __SC "%2, %1 \n"
300
- " .set mips0 \n"
301
- : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
302
- : "r" (1UL << bit)
303
- : "memory");
304
- } while (unlikely(!res));
305
-
306
- res = temp & (1UL << bit);
307
- } else
187
+ if (!kernel_uses_llsc) {
308188 res = __mips_test_and_set_bit_lock(nr, addr);
189
+ } else {
190
+ orig = __test_bit_op(*m, "%0",
191
+ "or\t%1, %0, %3",
192
+ "ir"(BIT(bit)));
193
+ res = (orig & BIT(bit)) != 0;
194
+ }
309195
310196 smp_llsc_mb();
311197
312
- return res != 0;
198
+ return res;
313199 }
200
+
201
+/*
202
+ * test_and_set_bit - Set a bit and return its old value
203
+ * @nr: Bit to set
204
+ * @addr: Address to count from
205
+ *
206
+ * This operation is atomic and cannot be reordered.
207
+ * It also implies a memory barrier.
208
+ */
209
+static inline int test_and_set_bit(unsigned long nr,
210
+ volatile unsigned long *addr)
211
+{
212
+ smp_mb__before_atomic();
213
+ return test_and_set_bit_lock(nr, addr);
214
+}
215
+
314216 /*
315217 * test_and_clear_bit - Clear a bit and return its old value
316218 * @nr: Bit to clear
....@@ -322,67 +224,30 @@
322224 static inline int test_and_clear_bit(unsigned long nr,
323225 volatile unsigned long *addr)
324226 {
325
- int bit = nr & SZLONG_MASK;
326
- unsigned long res;
227
+ volatile unsigned long *m = &addr[BIT_WORD(nr)];
228
+ int bit = nr % BITS_PER_LONG;
229
+ unsigned long res, orig;
327230
328
- smp_mb__before_llsc();
231
+ smp_mb__before_atomic();
329232
330
- if (kernel_uses_llsc && R10000_LLSC_WAR) {
331
- unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
332
- unsigned long temp;
333
-
334
- __asm__ __volatile__(
335
- " .set arch=r4000 \n"
336
- "1: " __LL "%0, %1 # test_and_clear_bit \n"
337
- " or %2, %0, %3 \n"
338
- " xor %2, %3 \n"
339
- " " __SC "%2, %1 \n"
340
- " beqzl %2, 1b \n"
341
- " and %2, %0, %3 \n"
342
- " .set mips0 \n"
343
- : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
344
- : "r" (1UL << bit)
345
- : "memory");
346
-#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
347
- } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
348
- unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
349
- unsigned long temp;
350
-
351
- do {
352
- __asm__ __volatile__(
353
- " " __LL "%0, %1 # test_and_clear_bit \n"
354
- " " __EXT "%2, %0, %3, 1 \n"
355
- " " __INS "%0, $0, %3, 1 \n"
356
- " " __SC "%0, %1 \n"
357
- : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
358
- : "ir" (bit)
359
- : "memory");
360
- } while (unlikely(!temp));
361
-#endif
362
- } else if (kernel_uses_llsc) {
363
- unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
364
- unsigned long temp;
365
-
366
- do {
367
- __asm__ __volatile__(
368
- " .set "MIPS_ISA_ARCH_LEVEL" \n"
369
- " " __LL "%0, %1 # test_and_clear_bit \n"
370
- " or %2, %0, %3 \n"
371
- " xor %2, %3 \n"
372
- " " __SC "%2, %1 \n"
373
- " .set mips0 \n"
374
- : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
375
- : "r" (1UL << bit)
376
- : "memory");
377
- } while (unlikely(!res));
378
-
379
- res = temp & (1UL << bit);
380
- } else
233
+ if (!kernel_uses_llsc) {
381234 res = __mips_test_and_clear_bit(nr, addr);
235
+ } else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) {
236
+ res = __test_bit_op(*m, "%1",
237
+ __EXT "%0, %1, %3, 1;"
238
+ __INS "%1, $0, %3, 1",
239
+ "i"(bit));
240
+ } else {
241
+ orig = __test_bit_op(*m, "%0",
242
+ "or\t%1, %0, %3;"
243
+ "xor\t%1, %1, %3",
244
+ "ir"(BIT(bit)));
245
+ res = (orig & BIT(bit)) != 0;
246
+ }
382247
383248 smp_llsc_mb();
384249
385
- return res != 0;
250
+ return res;
386251 }
387252
388253 /*
....@@ -396,50 +261,28 @@
396261 static inline int test_and_change_bit(unsigned long nr,
397262 volatile unsigned long *addr)
398263 {
399
- int bit = nr & SZLONG_MASK;
400
- unsigned long res;
264
+ volatile unsigned long *m = &addr[BIT_WORD(nr)];
265
+ int bit = nr % BITS_PER_LONG;
266
+ unsigned long res, orig;
401267
402
- smp_mb__before_llsc();
268
+ smp_mb__before_atomic();
403269
404
- if (kernel_uses_llsc && R10000_LLSC_WAR) {
405
- unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
406
- unsigned long temp;
407
-
408
- __asm__ __volatile__(
409
- " .set arch=r4000 \n"
410
- "1: " __LL "%0, %1 # test_and_change_bit \n"
411
- " xor %2, %0, %3 \n"
412
- " " __SC "%2, %1 \n"
413
- " beqzl %2, 1b \n"
414
- " and %2, %0, %3 \n"
415
- " .set mips0 \n"
416
- : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
417
- : "r" (1UL << bit)
418
- : "memory");
419
- } else if (kernel_uses_llsc) {
420
- unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
421
- unsigned long temp;
422
-
423
- do {
424
- __asm__ __volatile__(
425
- " .set "MIPS_ISA_ARCH_LEVEL" \n"
426
- " " __LL "%0, %1 # test_and_change_bit \n"
427
- " xor %2, %0, %3 \n"
428
- " " __SC "\t%2, %1 \n"
429
- " .set mips0 \n"
430
- : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
431
- : "r" (1UL << bit)
432
- : "memory");
433
- } while (unlikely(!res));
434
-
435
- res = temp & (1UL << bit);
436
- } else
270
+ if (!kernel_uses_llsc) {
437271 res = __mips_test_and_change_bit(nr, addr);
272
+ } else {
273
+ orig = __test_bit_op(*m, "%0",
274
+ "xor\t%1, %0, %3",
275
+ "ir"(BIT(bit)));
276
+ res = (orig & BIT(bit)) != 0;
277
+ }
438278
439279 smp_llsc_mb();
440280
441
- return res != 0;
281
+ return res;
442282 }
283
+
284
+#undef __bit_op
285
+#undef __test_bit_op
443286
444287 #include <asm-generic/bitops/non-atomic.h>
445288
....@@ -463,7 +306,7 @@
463306 * Return the bit position (0..63) of the most significant 1 bit in a word
464307 * Returns -1 if no 1 bit exists
465308 */
466
-static inline unsigned long __fls(unsigned long word)
309
+static __always_inline unsigned long __fls(unsigned long word)
467310 {
468311 int num;
469312
....@@ -529,7 +372,7 @@
529372 * Returns 0..SZLONG-1
530373 * Undefined if no bit exists, so code should check against 0 first.
531374 */
532
-static inline unsigned long __ffs(unsigned long word)
375
+static __always_inline unsigned long __ffs(unsigned long word)
533376 {
534377 return __fls(word & -word);
535378 }
....@@ -541,7 +384,7 @@
541384 * This is defined the same way as ffs.
542385 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
543386 */
544
-static inline int fls(int x)
387
+static inline int fls(unsigned int x)
545388 {
546389 int r;
547390