forked from ~ljy/RK356X_SDK_RELEASE

hc
2024-05-10 748e4f3d702def1a4bff191e0cf93b6a05340f01
kernel/arch/xtensa/include/asm/bitops.h
....@@ -13,8 +13,6 @@
1313 #ifndef _XTENSA_BITOPS_H
1414 #define _XTENSA_BITOPS_H
1515
16
-#ifdef __KERNEL__
17
-
1816 #ifndef _LINUX_BITOPS_H
1917 #error only <linux/bitops.h> can be included directly
2018 #endif
....@@ -98,130 +96,114 @@
9896
9997 #include <asm-generic/bitops/fls64.h>
10098
101
-#if XCHAL_HAVE_S32C1I
99
+#if XCHAL_HAVE_EXCLUSIVE
102100
103
-static inline void set_bit(unsigned int bit, volatile unsigned long *p)
104
-{
105
- unsigned long tmp, value;
106
- unsigned long mask = 1UL << (bit & 31);
107
-
108
- p += bit >> 5;
109
-
110
- __asm__ __volatile__(
111
- "1: l32i %1, %3, 0\n"
112
- " wsr %1, scompare1\n"
113
- " or %0, %1, %2\n"
114
- " s32c1i %0, %3, 0\n"
115
- " bne %0, %1, 1b\n"
116
- : "=&a" (tmp), "=&a" (value)
117
- : "a" (mask), "a" (p)
118
- : "memory");
101
+#define BIT_OP(op, insn, inv) \
102
+static inline void op##_bit(unsigned int bit, volatile unsigned long *p)\
103
+{ \
104
+ unsigned long tmp; \
105
+ unsigned long mask = 1UL << (bit & 31); \
106
+ \
107
+ p += bit >> 5; \
108
+ \
109
+ __asm__ __volatile__( \
110
+ "1: l32ex %[tmp], %[addr]\n" \
111
+ " "insn" %[tmp], %[tmp], %[mask]\n" \
112
+ " s32ex %[tmp], %[addr]\n" \
113
+ " getex %[tmp]\n" \
114
+ " beqz %[tmp], 1b\n" \
115
+ : [tmp] "=&a" (tmp) \
116
+ : [mask] "a" (inv mask), [addr] "a" (p) \
117
+ : "memory"); \
119118 }
120119
121
-static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
122
-{
123
- unsigned long tmp, value;
124
- unsigned long mask = 1UL << (bit & 31);
125
-
126
- p += bit >> 5;
127
-
128
- __asm__ __volatile__(
129
- "1: l32i %1, %3, 0\n"
130
- " wsr %1, scompare1\n"
131
- " and %0, %1, %2\n"
132
- " s32c1i %0, %3, 0\n"
133
- " bne %0, %1, 1b\n"
134
- : "=&a" (tmp), "=&a" (value)
135
- : "a" (~mask), "a" (p)
136
- : "memory");
120
+#define TEST_AND_BIT_OP(op, insn, inv) \
121
+static inline int \
122
+test_and_##op##_bit(unsigned int bit, volatile unsigned long *p) \
123
+{ \
124
+ unsigned long tmp, value; \
125
+ unsigned long mask = 1UL << (bit & 31); \
126
+ \
127
+ p += bit >> 5; \
128
+ \
129
+ __asm__ __volatile__( \
130
+ "1: l32ex %[value], %[addr]\n" \
131
+ " "insn" %[tmp], %[value], %[mask]\n" \
132
+ " s32ex %[tmp], %[addr]\n" \
133
+ " getex %[tmp]\n" \
134
+ " beqz %[tmp], 1b\n" \
135
+ : [tmp] "=&a" (tmp), [value] "=&a" (value) \
136
+ : [mask] "a" (inv mask), [addr] "a" (p) \
137
+ : "memory"); \
138
+ \
139
+ return value & mask; \
137140 }
138141
139
-static inline void change_bit(unsigned int bit, volatile unsigned long *p)
140
-{
141
- unsigned long tmp, value;
142
- unsigned long mask = 1UL << (bit & 31);
142
+#elif XCHAL_HAVE_S32C1I
143143
144
- p += bit >> 5;
145
-
146
- __asm__ __volatile__(
147
- "1: l32i %1, %3, 0\n"
148
- " wsr %1, scompare1\n"
149
- " xor %0, %1, %2\n"
150
- " s32c1i %0, %3, 0\n"
151
- " bne %0, %1, 1b\n"
152
- : "=&a" (tmp), "=&a" (value)
153
- : "a" (mask), "a" (p)
154
- : "memory");
144
+#define BIT_OP(op, insn, inv) \
145
+static inline void op##_bit(unsigned int bit, volatile unsigned long *p)\
146
+{ \
147
+ unsigned long tmp, value; \
148
+ unsigned long mask = 1UL << (bit & 31); \
149
+ \
150
+ p += bit >> 5; \
151
+ \
152
+ __asm__ __volatile__( \
153
+ "1: l32i %[value], %[mem]\n" \
154
+ " wsr %[value], scompare1\n" \
155
+ " "insn" %[tmp], %[value], %[mask]\n" \
156
+ " s32c1i %[tmp], %[mem]\n" \
157
+ " bne %[tmp], %[value], 1b\n" \
158
+ : [tmp] "=&a" (tmp), [value] "=&a" (value), \
159
+ [mem] "+m" (*p) \
160
+ : [mask] "a" (inv mask) \
161
+ : "memory"); \
155162 }
156163
157
-static inline int
158
-test_and_set_bit(unsigned int bit, volatile unsigned long *p)
159
-{
160
- unsigned long tmp, value;
161
- unsigned long mask = 1UL << (bit & 31);
162
-
163
- p += bit >> 5;
164
-
165
- __asm__ __volatile__(
166
- "1: l32i %1, %3, 0\n"
167
- " wsr %1, scompare1\n"
168
- " or %0, %1, %2\n"
169
- " s32c1i %0, %3, 0\n"
170
- " bne %0, %1, 1b\n"
171
- : "=&a" (tmp), "=&a" (value)
172
- : "a" (mask), "a" (p)
173
- : "memory");
174
-
175
- return tmp & mask;
176
-}
177
-
178
-static inline int
179
-test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
180
-{
181
- unsigned long tmp, value;
182
- unsigned long mask = 1UL << (bit & 31);
183
-
184
- p += bit >> 5;
185
-
186
- __asm__ __volatile__(
187
- "1: l32i %1, %3, 0\n"
188
- " wsr %1, scompare1\n"
189
- " and %0, %1, %2\n"
190
- " s32c1i %0, %3, 0\n"
191
- " bne %0, %1, 1b\n"
192
- : "=&a" (tmp), "=&a" (value)
193
- : "a" (~mask), "a" (p)
194
- : "memory");
195
-
196
- return tmp & mask;
197
-}
198
-
199
-static inline int
200
-test_and_change_bit(unsigned int bit, volatile unsigned long *p)
201
-{
202
- unsigned long tmp, value;
203
- unsigned long mask = 1UL << (bit & 31);
204
-
205
- p += bit >> 5;
206
-
207
- __asm__ __volatile__(
208
- "1: l32i %1, %3, 0\n"
209
- " wsr %1, scompare1\n"
210
- " xor %0, %1, %2\n"
211
- " s32c1i %0, %3, 0\n"
212
- " bne %0, %1, 1b\n"
213
- : "=&a" (tmp), "=&a" (value)
214
- : "a" (mask), "a" (p)
215
- : "memory");
216
-
217
- return tmp & mask;
164
+#define TEST_AND_BIT_OP(op, insn, inv) \
165
+static inline int \
166
+test_and_##op##_bit(unsigned int bit, volatile unsigned long *p) \
167
+{ \
168
+ unsigned long tmp, value; \
169
+ unsigned long mask = 1UL << (bit & 31); \
170
+ \
171
+ p += bit >> 5; \
172
+ \
173
+ __asm__ __volatile__( \
174
+ "1: l32i %[value], %[mem]\n" \
175
+ " wsr %[value], scompare1\n" \
176
+ " "insn" %[tmp], %[value], %[mask]\n" \
177
+ " s32c1i %[tmp], %[mem]\n" \
178
+ " bne %[tmp], %[value], 1b\n" \
179
+ : [tmp] "=&a" (tmp), [value] "=&a" (value), \
180
+ [mem] "+m" (*p) \
181
+ : [mask] "a" (inv mask) \
182
+ : "memory"); \
183
+ \
184
+ return tmp & mask; \
218185 }
219186
220187 #else
221188
189
+#define BIT_OP(op, insn, inv)
190
+#define TEST_AND_BIT_OP(op, insn, inv)
191
+
222192 #include <asm-generic/bitops/atomic.h>
223193
224194 #endif /* XCHAL_HAVE_S32C1I */
195
+
196
+#define BIT_OPS(op, insn, inv) \
197
+ BIT_OP(op, insn, inv) \
198
+ TEST_AND_BIT_OP(op, insn, inv)
199
+
200
+BIT_OPS(set, "or", )
201
+BIT_OPS(clear, "and", ~)
202
+BIT_OPS(change, "xor", )
203
+
204
+#undef BIT_OPS
205
+#undef BIT_OP
206
+#undef TEST_AND_BIT_OP
225207
226208 #include <asm-generic/bitops/find.h>
227209 #include <asm-generic/bitops/le.h>
....@@ -231,7 +213,5 @@
231213 #include <asm-generic/bitops/hweight.h>
232214 #include <asm-generic/bitops/lock.h>
233215 #include <asm-generic/bitops/sched.h>
234
-
235
-#endif /* __KERNEL__ */
236216
237217 #endif /* _XTENSA_BITOPS_H */