hc
2024-01-31 f9004dbfff8a3fbbd7e2a88c8a4327c7f2f8e5b2
kernel/include/asm-generic/atomic-long.h
....@@ -1,269 +1,1014 @@
1
-/* SPDX-License-Identifier: GPL-2.0 */
1
+// SPDX-License-Identifier: GPL-2.0
2
+
3
+// Generated by scripts/atomic/gen-atomic-long.sh
4
+// DO NOT MODIFY THIS FILE DIRECTLY
5
+
26 #ifndef _ASM_GENERIC_ATOMIC_LONG_H
37 #define _ASM_GENERIC_ATOMIC_LONG_H
4
-/*
5
- * Copyright (C) 2005 Silicon Graphics, Inc.
6
- * Christoph Lameter
7
- *
8
- * Allows to provide arch independent atomic definitions without the need to
9
- * edit all arch specific atomic.h files.
10
- */
118
9
+#include <linux/compiler.h>
1210 #include <asm/types.h>
1311
14
-/*
15
- * Suppport for atomic_long_t
16
- *
17
- * Casts for parameters are avoided for existing atomic functions in order to
18
- * avoid issues with cast-as-lval under gcc 4.x and other limitations that the
19
- * macros of a platform may have.
20
- */
21
-
22
-#if BITS_PER_LONG == 64
23
-
12
+#ifdef CONFIG_64BIT
2413 typedef atomic64_t atomic_long_t;
25
-
26
-#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
27
-#define ATOMIC_LONG_PFX(x) atomic64 ## x
28
-#define ATOMIC_LONG_TYPE s64
29
-
14
+#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
15
+#define atomic_long_cond_read_acquire atomic64_cond_read_acquire
16
+#define atomic_long_cond_read_relaxed atomic64_cond_read_relaxed
3017 #else
31
-
3218 typedef atomic_t atomic_long_t;
33
-
34
-#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
35
-#define ATOMIC_LONG_PFX(x) atomic ## x
36
-#define ATOMIC_LONG_TYPE int
37
-
19
+#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
20
+#define atomic_long_cond_read_acquire atomic_cond_read_acquire
21
+#define atomic_long_cond_read_relaxed atomic_cond_read_relaxed
3822 #endif
3923
40
-#define ATOMIC_LONG_READ_OP(mo) \
41
-static inline long atomic_long_read##mo(const atomic_long_t *l) \
42
-{ \
43
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
44
- \
45
- return (long)ATOMIC_LONG_PFX(_read##mo)(v); \
46
-}
47
-ATOMIC_LONG_READ_OP()
48
-ATOMIC_LONG_READ_OP(_acquire)
24
+#ifdef CONFIG_64BIT
4925
50
-#undef ATOMIC_LONG_READ_OP
51
-
52
-#define ATOMIC_LONG_SET_OP(mo) \
53
-static inline void atomic_long_set##mo(atomic_long_t *l, long i) \
54
-{ \
55
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
56
- \
57
- ATOMIC_LONG_PFX(_set##mo)(v, i); \
58
-}
59
-ATOMIC_LONG_SET_OP()
60
-ATOMIC_LONG_SET_OP(_release)
61
-
62
-#undef ATOMIC_LONG_SET_OP
63
-
64
-#define ATOMIC_LONG_ADD_SUB_OP(op, mo) \
65
-static inline long \
66
-atomic_long_##op##_return##mo(long i, atomic_long_t *l) \
67
-{ \
68
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
69
- \
70
- return (long)ATOMIC_LONG_PFX(_##op##_return##mo)(i, v); \
71
-}
72
-ATOMIC_LONG_ADD_SUB_OP(add,)
73
-ATOMIC_LONG_ADD_SUB_OP(add, _relaxed)
74
-ATOMIC_LONG_ADD_SUB_OP(add, _acquire)
75
-ATOMIC_LONG_ADD_SUB_OP(add, _release)
76
-ATOMIC_LONG_ADD_SUB_OP(sub,)
77
-ATOMIC_LONG_ADD_SUB_OP(sub, _relaxed)
78
-ATOMIC_LONG_ADD_SUB_OP(sub, _acquire)
79
-ATOMIC_LONG_ADD_SUB_OP(sub, _release)
80
-
81
-#undef ATOMIC_LONG_ADD_SUB_OP
82
-
83
-#define atomic_long_cmpxchg_relaxed(l, old, new) \
84
- (ATOMIC_LONG_PFX(_cmpxchg_relaxed)((ATOMIC_LONG_PFX(_t) *)(l), \
85
- (old), (new)))
86
-#define atomic_long_cmpxchg_acquire(l, old, new) \
87
- (ATOMIC_LONG_PFX(_cmpxchg_acquire)((ATOMIC_LONG_PFX(_t) *)(l), \
88
- (old), (new)))
89
-#define atomic_long_cmpxchg_release(l, old, new) \
90
- (ATOMIC_LONG_PFX(_cmpxchg_release)((ATOMIC_LONG_PFX(_t) *)(l), \
91
- (old), (new)))
92
-#define atomic_long_cmpxchg(l, old, new) \
93
- (ATOMIC_LONG_PFX(_cmpxchg)((ATOMIC_LONG_PFX(_t) *)(l), (old), (new)))
94
-
95
-
96
-#define atomic_long_try_cmpxchg_relaxed(l, old, new) \
97
- (ATOMIC_LONG_PFX(_try_cmpxchg_relaxed)((ATOMIC_LONG_PFX(_t) *)(l), \
98
- (ATOMIC_LONG_TYPE *)(old), (ATOMIC_LONG_TYPE)(new)))
99
-#define atomic_long_try_cmpxchg_acquire(l, old, new) \
100
- (ATOMIC_LONG_PFX(_try_cmpxchg_acquire)((ATOMIC_LONG_PFX(_t) *)(l), \
101
- (ATOMIC_LONG_TYPE *)(old), (ATOMIC_LONG_TYPE)(new)))
102
-#define atomic_long_try_cmpxchg_release(l, old, new) \
103
- (ATOMIC_LONG_PFX(_try_cmpxchg_release)((ATOMIC_LONG_PFX(_t) *)(l), \
104
- (ATOMIC_LONG_TYPE *)(old), (ATOMIC_LONG_TYPE)(new)))
105
-#define atomic_long_try_cmpxchg(l, old, new) \
106
- (ATOMIC_LONG_PFX(_try_cmpxchg)((ATOMIC_LONG_PFX(_t) *)(l), \
107
- (ATOMIC_LONG_TYPE *)(old), (ATOMIC_LONG_TYPE)(new)))
108
-
109
-
110
-#define atomic_long_xchg_relaxed(v, new) \
111
- (ATOMIC_LONG_PFX(_xchg_relaxed)((ATOMIC_LONG_PFX(_t) *)(v), (new)))
112
-#define atomic_long_xchg_acquire(v, new) \
113
- (ATOMIC_LONG_PFX(_xchg_acquire)((ATOMIC_LONG_PFX(_t) *)(v), (new)))
114
-#define atomic_long_xchg_release(v, new) \
115
- (ATOMIC_LONG_PFX(_xchg_release)((ATOMIC_LONG_PFX(_t) *)(v), (new)))
116
-#define atomic_long_xchg(v, new) \
117
- (ATOMIC_LONG_PFX(_xchg)((ATOMIC_LONG_PFX(_t) *)(v), (new)))
118
-
119
-static __always_inline void atomic_long_inc(atomic_long_t *l)
26
+static __always_inline long
27
+atomic_long_read(const atomic_long_t *v)
12028 {
121
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
122
-
123
- ATOMIC_LONG_PFX(_inc)(v);
29
+ return atomic64_read(v);
12430 }
12531
126
-static __always_inline void atomic_long_dec(atomic_long_t *l)
32
+static __always_inline long
33
+atomic_long_read_acquire(const atomic_long_t *v)
12734 {
128
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
129
-
130
- ATOMIC_LONG_PFX(_dec)(v);
35
+ return atomic64_read_acquire(v);
13136 }
13237
133
-#define ATOMIC_LONG_FETCH_OP(op, mo) \
134
-static inline long \
135
-atomic_long_fetch_##op##mo(long i, atomic_long_t *l) \
136
-{ \
137
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
138
- \
139
- return (long)ATOMIC_LONG_PFX(_fetch_##op##mo)(i, v); \
140
-}
141
-
142
-ATOMIC_LONG_FETCH_OP(add, )
143
-ATOMIC_LONG_FETCH_OP(add, _relaxed)
144
-ATOMIC_LONG_FETCH_OP(add, _acquire)
145
-ATOMIC_LONG_FETCH_OP(add, _release)
146
-ATOMIC_LONG_FETCH_OP(sub, )
147
-ATOMIC_LONG_FETCH_OP(sub, _relaxed)
148
-ATOMIC_LONG_FETCH_OP(sub, _acquire)
149
-ATOMIC_LONG_FETCH_OP(sub, _release)
150
-ATOMIC_LONG_FETCH_OP(and, )
151
-ATOMIC_LONG_FETCH_OP(and, _relaxed)
152
-ATOMIC_LONG_FETCH_OP(and, _acquire)
153
-ATOMIC_LONG_FETCH_OP(and, _release)
154
-ATOMIC_LONG_FETCH_OP(andnot, )
155
-ATOMIC_LONG_FETCH_OP(andnot, _relaxed)
156
-ATOMIC_LONG_FETCH_OP(andnot, _acquire)
157
-ATOMIC_LONG_FETCH_OP(andnot, _release)
158
-ATOMIC_LONG_FETCH_OP(or, )
159
-ATOMIC_LONG_FETCH_OP(or, _relaxed)
160
-ATOMIC_LONG_FETCH_OP(or, _acquire)
161
-ATOMIC_LONG_FETCH_OP(or, _release)
162
-ATOMIC_LONG_FETCH_OP(xor, )
163
-ATOMIC_LONG_FETCH_OP(xor, _relaxed)
164
-ATOMIC_LONG_FETCH_OP(xor, _acquire)
165
-ATOMIC_LONG_FETCH_OP(xor, _release)
166
-
167
-#undef ATOMIC_LONG_FETCH_OP
168
-
169
-#define ATOMIC_LONG_FETCH_INC_DEC_OP(op, mo) \
170
-static inline long \
171
-atomic_long_fetch_##op##mo(atomic_long_t *l) \
172
-{ \
173
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
174
- \
175
- return (long)ATOMIC_LONG_PFX(_fetch_##op##mo)(v); \
176
-}
177
-
178
-ATOMIC_LONG_FETCH_INC_DEC_OP(inc,)
179
-ATOMIC_LONG_FETCH_INC_DEC_OP(inc, _relaxed)
180
-ATOMIC_LONG_FETCH_INC_DEC_OP(inc, _acquire)
181
-ATOMIC_LONG_FETCH_INC_DEC_OP(inc, _release)
182
-ATOMIC_LONG_FETCH_INC_DEC_OP(dec,)
183
-ATOMIC_LONG_FETCH_INC_DEC_OP(dec, _relaxed)
184
-ATOMIC_LONG_FETCH_INC_DEC_OP(dec, _acquire)
185
-ATOMIC_LONG_FETCH_INC_DEC_OP(dec, _release)
186
-
187
-#undef ATOMIC_LONG_FETCH_INC_DEC_OP
188
-
189
-#define ATOMIC_LONG_OP(op) \
190
-static __always_inline void \
191
-atomic_long_##op(long i, atomic_long_t *l) \
192
-{ \
193
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
194
- \
195
- ATOMIC_LONG_PFX(_##op)(i, v); \
196
-}
197
-
198
-ATOMIC_LONG_OP(add)
199
-ATOMIC_LONG_OP(sub)
200
-ATOMIC_LONG_OP(and)
201
-ATOMIC_LONG_OP(andnot)
202
-ATOMIC_LONG_OP(or)
203
-ATOMIC_LONG_OP(xor)
204
-
205
-#undef ATOMIC_LONG_OP
206
-
207
-static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
38
+static __always_inline void
39
+atomic_long_set(atomic_long_t *v, long i)
20840 {
209
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
210
-
211
- return ATOMIC_LONG_PFX(_sub_and_test)(i, v);
41
+ atomic64_set(v, i);
21242 }
21343
214
-static inline int atomic_long_dec_and_test(atomic_long_t *l)
44
+static __always_inline void
45
+atomic_long_set_release(atomic_long_t *v, long i)
21546 {
216
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
217
-
218
- return ATOMIC_LONG_PFX(_dec_and_test)(v);
47
+ atomic64_set_release(v, i);
21948 }
22049
221
-static inline int atomic_long_inc_and_test(atomic_long_t *l)
50
+static __always_inline void
51
+atomic_long_add(long i, atomic_long_t *v)
22252 {
223
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
224
-
225
- return ATOMIC_LONG_PFX(_inc_and_test)(v);
53
+ atomic64_add(i, v);
22654 }
22755
228
-static inline int atomic_long_add_negative(long i, atomic_long_t *l)
56
+static __always_inline long
57
+atomic_long_add_return(long i, atomic_long_t *v)
22958 {
230
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
231
-
232
- return ATOMIC_LONG_PFX(_add_negative)(i, v);
59
+ return atomic64_add_return(i, v);
23360 }
23461
235
-#define ATOMIC_LONG_INC_DEC_OP(op, mo) \
236
-static inline long \
237
-atomic_long_##op##_return##mo(atomic_long_t *l) \
238
-{ \
239
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
240
- \
241
- return (long)ATOMIC_LONG_PFX(_##op##_return##mo)(v); \
242
-}
243
-ATOMIC_LONG_INC_DEC_OP(inc,)
244
-ATOMIC_LONG_INC_DEC_OP(inc, _relaxed)
245
-ATOMIC_LONG_INC_DEC_OP(inc, _acquire)
246
-ATOMIC_LONG_INC_DEC_OP(inc, _release)
247
-ATOMIC_LONG_INC_DEC_OP(dec,)
248
-ATOMIC_LONG_INC_DEC_OP(dec, _relaxed)
249
-ATOMIC_LONG_INC_DEC_OP(dec, _acquire)
250
-ATOMIC_LONG_INC_DEC_OP(dec, _release)
251
-
252
-#undef ATOMIC_LONG_INC_DEC_OP
253
-
254
-static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
62
+static __always_inline long
63
+atomic_long_add_return_acquire(long i, atomic_long_t *v)
25564 {
256
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
257
-
258
- return (long)ATOMIC_LONG_PFX(_add_unless)(v, a, u);
65
+ return atomic64_add_return_acquire(i, v);
25966 }
26067
261
-#define atomic_long_inc_not_zero(l) \
262
- ATOMIC_LONG_PFX(_inc_not_zero)((ATOMIC_LONG_PFX(_t) *)(l))
68
+static __always_inline long
69
+atomic_long_add_return_release(long i, atomic_long_t *v)
70
+{
71
+ return atomic64_add_return_release(i, v);
72
+}
26373
264
-#define atomic_long_cond_read_relaxed(v, c) \
265
- ATOMIC_LONG_PFX(_cond_read_relaxed)((ATOMIC_LONG_PFX(_t) *)(v), (c))
266
-#define atomic_long_cond_read_acquire(v, c) \
267
- ATOMIC_LONG_PFX(_cond_read_acquire)((ATOMIC_LONG_PFX(_t) *)(v), (c))
74
+static __always_inline long
75
+atomic_long_add_return_relaxed(long i, atomic_long_t *v)
76
+{
77
+ return atomic64_add_return_relaxed(i, v);
78
+}
26879
269
-#endif /* _ASM_GENERIC_ATOMIC_LONG_H */
80
+static __always_inline long
81
+atomic_long_fetch_add(long i, atomic_long_t *v)
82
+{
83
+ return atomic64_fetch_add(i, v);
84
+}
85
+
86
+static __always_inline long
87
+atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
88
+{
89
+ return atomic64_fetch_add_acquire(i, v);
90
+}
91
+
92
+static __always_inline long
93
+atomic_long_fetch_add_release(long i, atomic_long_t *v)
94
+{
95
+ return atomic64_fetch_add_release(i, v);
96
+}
97
+
98
+static __always_inline long
99
+atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
100
+{
101
+ return atomic64_fetch_add_relaxed(i, v);
102
+}
103
+
104
+static __always_inline void
105
+atomic_long_sub(long i, atomic_long_t *v)
106
+{
107
+ atomic64_sub(i, v);
108
+}
109
+
110
+static __always_inline long
111
+atomic_long_sub_return(long i, atomic_long_t *v)
112
+{
113
+ return atomic64_sub_return(i, v);
114
+}
115
+
116
+static __always_inline long
117
+atomic_long_sub_return_acquire(long i, atomic_long_t *v)
118
+{
119
+ return atomic64_sub_return_acquire(i, v);
120
+}
121
+
122
+static __always_inline long
123
+atomic_long_sub_return_release(long i, atomic_long_t *v)
124
+{
125
+ return atomic64_sub_return_release(i, v);
126
+}
127
+
128
+static __always_inline long
129
+atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
130
+{
131
+ return atomic64_sub_return_relaxed(i, v);
132
+}
133
+
134
+static __always_inline long
135
+atomic_long_fetch_sub(long i, atomic_long_t *v)
136
+{
137
+ return atomic64_fetch_sub(i, v);
138
+}
139
+
140
+static __always_inline long
141
+atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
142
+{
143
+ return atomic64_fetch_sub_acquire(i, v);
144
+}
145
+
146
+static __always_inline long
147
+atomic_long_fetch_sub_release(long i, atomic_long_t *v)
148
+{
149
+ return atomic64_fetch_sub_release(i, v);
150
+}
151
+
152
+static __always_inline long
153
+atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
154
+{
155
+ return atomic64_fetch_sub_relaxed(i, v);
156
+}
157
+
158
+static __always_inline void
159
+atomic_long_inc(atomic_long_t *v)
160
+{
161
+ atomic64_inc(v);
162
+}
163
+
164
+static __always_inline long
165
+atomic_long_inc_return(atomic_long_t *v)
166
+{
167
+ return atomic64_inc_return(v);
168
+}
169
+
170
+static __always_inline long
171
+atomic_long_inc_return_acquire(atomic_long_t *v)
172
+{
173
+ return atomic64_inc_return_acquire(v);
174
+}
175
+
176
+static __always_inline long
177
+atomic_long_inc_return_release(atomic_long_t *v)
178
+{
179
+ return atomic64_inc_return_release(v);
180
+}
181
+
182
+static __always_inline long
183
+atomic_long_inc_return_relaxed(atomic_long_t *v)
184
+{
185
+ return atomic64_inc_return_relaxed(v);
186
+}
187
+
188
+static __always_inline long
189
+atomic_long_fetch_inc(atomic_long_t *v)
190
+{
191
+ return atomic64_fetch_inc(v);
192
+}
193
+
194
+static __always_inline long
195
+atomic_long_fetch_inc_acquire(atomic_long_t *v)
196
+{
197
+ return atomic64_fetch_inc_acquire(v);
198
+}
199
+
200
+static __always_inline long
201
+atomic_long_fetch_inc_release(atomic_long_t *v)
202
+{
203
+ return atomic64_fetch_inc_release(v);
204
+}
205
+
206
+static __always_inline long
207
+atomic_long_fetch_inc_relaxed(atomic_long_t *v)
208
+{
209
+ return atomic64_fetch_inc_relaxed(v);
210
+}
211
+
212
+static __always_inline void
213
+atomic_long_dec(atomic_long_t *v)
214
+{
215
+ atomic64_dec(v);
216
+}
217
+
218
+static __always_inline long
219
+atomic_long_dec_return(atomic_long_t *v)
220
+{
221
+ return atomic64_dec_return(v);
222
+}
223
+
224
+static __always_inline long
225
+atomic_long_dec_return_acquire(atomic_long_t *v)
226
+{
227
+ return atomic64_dec_return_acquire(v);
228
+}
229
+
230
+static __always_inline long
231
+atomic_long_dec_return_release(atomic_long_t *v)
232
+{
233
+ return atomic64_dec_return_release(v);
234
+}
235
+
236
+static __always_inline long
237
+atomic_long_dec_return_relaxed(atomic_long_t *v)
238
+{
239
+ return atomic64_dec_return_relaxed(v);
240
+}
241
+
242
+static __always_inline long
243
+atomic_long_fetch_dec(atomic_long_t *v)
244
+{
245
+ return atomic64_fetch_dec(v);
246
+}
247
+
248
+static __always_inline long
249
+atomic_long_fetch_dec_acquire(atomic_long_t *v)
250
+{
251
+ return atomic64_fetch_dec_acquire(v);
252
+}
253
+
254
+static __always_inline long
255
+atomic_long_fetch_dec_release(atomic_long_t *v)
256
+{
257
+ return atomic64_fetch_dec_release(v);
258
+}
259
+
260
+static __always_inline long
261
+atomic_long_fetch_dec_relaxed(atomic_long_t *v)
262
+{
263
+ return atomic64_fetch_dec_relaxed(v);
264
+}
265
+
266
+static __always_inline void
267
+atomic_long_and(long i, atomic_long_t *v)
268
+{
269
+ atomic64_and(i, v);
270
+}
271
+
272
+static __always_inline long
273
+atomic_long_fetch_and(long i, atomic_long_t *v)
274
+{
275
+ return atomic64_fetch_and(i, v);
276
+}
277
+
278
+static __always_inline long
279
+atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
280
+{
281
+ return atomic64_fetch_and_acquire(i, v);
282
+}
283
+
284
+static __always_inline long
285
+atomic_long_fetch_and_release(long i, atomic_long_t *v)
286
+{
287
+ return atomic64_fetch_and_release(i, v);
288
+}
289
+
290
+static __always_inline long
291
+atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
292
+{
293
+ return atomic64_fetch_and_relaxed(i, v);
294
+}
295
+
296
+static __always_inline void
297
+atomic_long_andnot(long i, atomic_long_t *v)
298
+{
299
+ atomic64_andnot(i, v);
300
+}
301
+
302
+static __always_inline long
303
+atomic_long_fetch_andnot(long i, atomic_long_t *v)
304
+{
305
+ return atomic64_fetch_andnot(i, v);
306
+}
307
+
308
+static __always_inline long
309
+atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
310
+{
311
+ return atomic64_fetch_andnot_acquire(i, v);
312
+}
313
+
314
+static __always_inline long
315
+atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
316
+{
317
+ return atomic64_fetch_andnot_release(i, v);
318
+}
319
+
320
+static __always_inline long
321
+atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
322
+{
323
+ return atomic64_fetch_andnot_relaxed(i, v);
324
+}
325
+
326
+static __always_inline void
327
+atomic_long_or(long i, atomic_long_t *v)
328
+{
329
+ atomic64_or(i, v);
330
+}
331
+
332
+static __always_inline long
333
+atomic_long_fetch_or(long i, atomic_long_t *v)
334
+{
335
+ return atomic64_fetch_or(i, v);
336
+}
337
+
338
+static __always_inline long
339
+atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
340
+{
341
+ return atomic64_fetch_or_acquire(i, v);
342
+}
343
+
344
+static __always_inline long
345
+atomic_long_fetch_or_release(long i, atomic_long_t *v)
346
+{
347
+ return atomic64_fetch_or_release(i, v);
348
+}
349
+
350
+static __always_inline long
351
+atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
352
+{
353
+ return atomic64_fetch_or_relaxed(i, v);
354
+}
355
+
356
+static __always_inline void
357
+atomic_long_xor(long i, atomic_long_t *v)
358
+{
359
+ atomic64_xor(i, v);
360
+}
361
+
362
+static __always_inline long
363
+atomic_long_fetch_xor(long i, atomic_long_t *v)
364
+{
365
+ return atomic64_fetch_xor(i, v);
366
+}
367
+
368
+static __always_inline long
369
+atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
370
+{
371
+ return atomic64_fetch_xor_acquire(i, v);
372
+}
373
+
374
+static __always_inline long
375
+atomic_long_fetch_xor_release(long i, atomic_long_t *v)
376
+{
377
+ return atomic64_fetch_xor_release(i, v);
378
+}
379
+
380
+static __always_inline long
381
+atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
382
+{
383
+ return atomic64_fetch_xor_relaxed(i, v);
384
+}
385
+
386
+static __always_inline long
387
+atomic_long_xchg(atomic_long_t *v, long i)
388
+{
389
+ return atomic64_xchg(v, i);
390
+}
391
+
392
+static __always_inline long
393
+atomic_long_xchg_acquire(atomic_long_t *v, long i)
394
+{
395
+ return atomic64_xchg_acquire(v, i);
396
+}
397
+
398
+static __always_inline long
399
+atomic_long_xchg_release(atomic_long_t *v, long i)
400
+{
401
+ return atomic64_xchg_release(v, i);
402
+}
403
+
404
+static __always_inline long
405
+atomic_long_xchg_relaxed(atomic_long_t *v, long i)
406
+{
407
+ return atomic64_xchg_relaxed(v, i);
408
+}
409
+
410
+static __always_inline long
411
+atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
412
+{
413
+ return atomic64_cmpxchg(v, old, new);
414
+}
415
+
416
+static __always_inline long
417
+atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
418
+{
419
+ return atomic64_cmpxchg_acquire(v, old, new);
420
+}
421
+
422
+static __always_inline long
423
+atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
424
+{
425
+ return atomic64_cmpxchg_release(v, old, new);
426
+}
427
+
428
+static __always_inline long
429
+atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
430
+{
431
+ return atomic64_cmpxchg_relaxed(v, old, new);
432
+}
433
+
434
+static __always_inline bool
435
+atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
436
+{
437
+ return atomic64_try_cmpxchg(v, (s64 *)old, new);
438
+}
439
+
440
+static __always_inline bool
441
+atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
442
+{
443
+ return atomic64_try_cmpxchg_acquire(v, (s64 *)old, new);
444
+}
445
+
446
+static __always_inline bool
447
+atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
448
+{
449
+ return atomic64_try_cmpxchg_release(v, (s64 *)old, new);
450
+}
451
+
452
+static __always_inline bool
453
+atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
454
+{
455
+ return atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new);
456
+}
457
+
458
+static __always_inline bool
459
+atomic_long_sub_and_test(long i, atomic_long_t *v)
460
+{
461
+ return atomic64_sub_and_test(i, v);
462
+}
463
+
464
+static __always_inline bool
465
+atomic_long_dec_and_test(atomic_long_t *v)
466
+{
467
+ return atomic64_dec_and_test(v);
468
+}
469
+
470
+static __always_inline bool
471
+atomic_long_inc_and_test(atomic_long_t *v)
472
+{
473
+ return atomic64_inc_and_test(v);
474
+}
475
+
476
+static __always_inline bool
477
+atomic_long_add_negative(long i, atomic_long_t *v)
478
+{
479
+ return atomic64_add_negative(i, v);
480
+}
481
+
482
+static __always_inline long
483
+atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
484
+{
485
+ return atomic64_fetch_add_unless(v, a, u);
486
+}
487
+
488
+static __always_inline bool
489
+atomic_long_add_unless(atomic_long_t *v, long a, long u)
490
+{
491
+ return atomic64_add_unless(v, a, u);
492
+}
493
+
494
+static __always_inline bool
495
+atomic_long_inc_not_zero(atomic_long_t *v)
496
+{
497
+ return atomic64_inc_not_zero(v);
498
+}
499
+
500
+static __always_inline bool
501
+atomic_long_inc_unless_negative(atomic_long_t *v)
502
+{
503
+ return atomic64_inc_unless_negative(v);
504
+}
505
+
506
+static __always_inline bool
507
+atomic_long_dec_unless_positive(atomic_long_t *v)
508
+{
509
+ return atomic64_dec_unless_positive(v);
510
+}
511
+
512
+static __always_inline long
513
+atomic_long_dec_if_positive(atomic_long_t *v)
514
+{
515
+ return atomic64_dec_if_positive(v);
516
+}
517
+
518
+#else /* CONFIG_64BIT */
519
+
520
+static __always_inline long
521
+atomic_long_read(const atomic_long_t *v)
522
+{
523
+ return atomic_read(v);
524
+}
525
+
526
+static __always_inline long
527
+atomic_long_read_acquire(const atomic_long_t *v)
528
+{
529
+ return atomic_read_acquire(v);
530
+}
531
+
532
+static __always_inline void
533
+atomic_long_set(atomic_long_t *v, long i)
534
+{
535
+ atomic_set(v, i);
536
+}
537
+
538
+static __always_inline void
539
+atomic_long_set_release(atomic_long_t *v, long i)
540
+{
541
+ atomic_set_release(v, i);
542
+}
543
+
544
+static __always_inline void
545
+atomic_long_add(long i, atomic_long_t *v)
546
+{
547
+ atomic_add(i, v);
548
+}
549
+
550
+static __always_inline long
551
+atomic_long_add_return(long i, atomic_long_t *v)
552
+{
553
+ return atomic_add_return(i, v);
554
+}
555
+
556
+static __always_inline long
557
+atomic_long_add_return_acquire(long i, atomic_long_t *v)
558
+{
559
+ return atomic_add_return_acquire(i, v);
560
+}
561
+
562
+static __always_inline long
563
+atomic_long_add_return_release(long i, atomic_long_t *v)
564
+{
565
+ return atomic_add_return_release(i, v);
566
+}
567
+
568
+static __always_inline long
569
+atomic_long_add_return_relaxed(long i, atomic_long_t *v)
570
+{
571
+ return atomic_add_return_relaxed(i, v);
572
+}
573
+
574
+static __always_inline long
575
+atomic_long_fetch_add(long i, atomic_long_t *v)
576
+{
577
+ return atomic_fetch_add(i, v);
578
+}
579
+
580
+static __always_inline long
581
+atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
582
+{
583
+ return atomic_fetch_add_acquire(i, v);
584
+}
585
+
586
+static __always_inline long
587
+atomic_long_fetch_add_release(long i, atomic_long_t *v)
588
+{
589
+ return atomic_fetch_add_release(i, v);
590
+}
591
+
592
+static __always_inline long
593
+atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
594
+{
595
+ return atomic_fetch_add_relaxed(i, v);
596
+}
597
+
598
+static __always_inline void
599
+atomic_long_sub(long i, atomic_long_t *v)
600
+{
601
+ atomic_sub(i, v);
602
+}
603
+
604
+static __always_inline long
605
+atomic_long_sub_return(long i, atomic_long_t *v)
606
+{
607
+ return atomic_sub_return(i, v);
608
+}
609
+
610
+static __always_inline long
611
+atomic_long_sub_return_acquire(long i, atomic_long_t *v)
612
+{
613
+ return atomic_sub_return_acquire(i, v);
614
+}
615
+
616
+static __always_inline long
617
+atomic_long_sub_return_release(long i, atomic_long_t *v)
618
+{
619
+ return atomic_sub_return_release(i, v);
620
+}
621
+
622
+static __always_inline long
623
+atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
624
+{
625
+ return atomic_sub_return_relaxed(i, v);
626
+}
627
+
628
+static __always_inline long
629
+atomic_long_fetch_sub(long i, atomic_long_t *v)
630
+{
631
+ return atomic_fetch_sub(i, v);
632
+}
633
+
634
+static __always_inline long
635
+atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
636
+{
637
+ return atomic_fetch_sub_acquire(i, v);
638
+}
639
+
640
+static __always_inline long
641
+atomic_long_fetch_sub_release(long i, atomic_long_t *v)
642
+{
643
+ return atomic_fetch_sub_release(i, v);
644
+}
645
+
646
+static __always_inline long
647
+atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
648
+{
649
+ return atomic_fetch_sub_relaxed(i, v);
650
+}
651
+
652
+static __always_inline void
653
+atomic_long_inc(atomic_long_t *v)
654
+{
655
+ atomic_inc(v);
656
+}
657
+
658
+static __always_inline long
659
+atomic_long_inc_return(atomic_long_t *v)
660
+{
661
+ return atomic_inc_return(v);
662
+}
663
+
664
+static __always_inline long
665
+atomic_long_inc_return_acquire(atomic_long_t *v)
666
+{
667
+ return atomic_inc_return_acquire(v);
668
+}
669
+
670
+static __always_inline long
671
+atomic_long_inc_return_release(atomic_long_t *v)
672
+{
673
+ return atomic_inc_return_release(v);
674
+}
675
+
676
+static __always_inline long
677
+atomic_long_inc_return_relaxed(atomic_long_t *v)
678
+{
679
+ return atomic_inc_return_relaxed(v);
680
+}
681
+
682
+static __always_inline long
683
+atomic_long_fetch_inc(atomic_long_t *v)
684
+{
685
+ return atomic_fetch_inc(v);
686
+}
687
+
688
+static __always_inline long
689
+atomic_long_fetch_inc_acquire(atomic_long_t *v)
690
+{
691
+ return atomic_fetch_inc_acquire(v);
692
+}
693
+
694
+static __always_inline long
695
+atomic_long_fetch_inc_release(atomic_long_t *v)
696
+{
697
+ return atomic_fetch_inc_release(v);
698
+}
699
+
700
+static __always_inline long
701
+atomic_long_fetch_inc_relaxed(atomic_long_t *v)
702
+{
703
+ return atomic_fetch_inc_relaxed(v);
704
+}
705
+
706
+static __always_inline void
707
+atomic_long_dec(atomic_long_t *v)
708
+{
709
+ atomic_dec(v);
710
+}
711
+
712
+static __always_inline long
713
+atomic_long_dec_return(atomic_long_t *v)
714
+{
715
+ return atomic_dec_return(v);
716
+}
717
+
718
+static __always_inline long
719
+atomic_long_dec_return_acquire(atomic_long_t *v)
720
+{
721
+ return atomic_dec_return_acquire(v);
722
+}
723
+
724
+static __always_inline long
725
+atomic_long_dec_return_release(atomic_long_t *v)
726
+{
727
+ return atomic_dec_return_release(v);
728
+}
729
+
730
+static __always_inline long
731
+atomic_long_dec_return_relaxed(atomic_long_t *v)
732
+{
733
+ return atomic_dec_return_relaxed(v);
734
+}
735
+
736
+static __always_inline long
737
+atomic_long_fetch_dec(atomic_long_t *v)
738
+{
739
+ return atomic_fetch_dec(v);
740
+}
741
+
742
+static __always_inline long
743
+atomic_long_fetch_dec_acquire(atomic_long_t *v)
744
+{
745
+ return atomic_fetch_dec_acquire(v);
746
+}
747
+
748
+static __always_inline long
749
+atomic_long_fetch_dec_release(atomic_long_t *v)
750
+{
751
+ return atomic_fetch_dec_release(v);
752
+}
753
+
754
+static __always_inline long
755
+atomic_long_fetch_dec_relaxed(atomic_long_t *v)
756
+{
757
+ return atomic_fetch_dec_relaxed(v);
758
+}
759
+
760
+static __always_inline void
761
+atomic_long_and(long i, atomic_long_t *v)
762
+{
763
+ atomic_and(i, v);
764
+}
765
+
766
+static __always_inline long
767
+atomic_long_fetch_and(long i, atomic_long_t *v)
768
+{
769
+ return atomic_fetch_and(i, v);
770
+}
771
+
772
+static __always_inline long
773
+atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
774
+{
775
+ return atomic_fetch_and_acquire(i, v);
776
+}
777
+
778
+static __always_inline long
779
+atomic_long_fetch_and_release(long i, atomic_long_t *v)
780
+{
781
+ return atomic_fetch_and_release(i, v);
782
+}
783
+
784
+static __always_inline long
785
+atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
786
+{
787
+ return atomic_fetch_and_relaxed(i, v);
788
+}
789
+
790
+static __always_inline void
791
+atomic_long_andnot(long i, atomic_long_t *v)
792
+{
793
+ atomic_andnot(i, v);
794
+}
795
+
796
+static __always_inline long
797
+atomic_long_fetch_andnot(long i, atomic_long_t *v)
798
+{
799
+ return atomic_fetch_andnot(i, v);
800
+}
801
+
802
+static __always_inline long
803
+atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
804
+{
805
+ return atomic_fetch_andnot_acquire(i, v);
806
+}
807
+
808
+static __always_inline long
809
+atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
810
+{
811
+ return atomic_fetch_andnot_release(i, v);
812
+}
813
+
814
+static __always_inline long
815
+atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
816
+{
817
+ return atomic_fetch_andnot_relaxed(i, v);
818
+}
819
+
820
+static __always_inline void
821
+atomic_long_or(long i, atomic_long_t *v)
822
+{
823
+ atomic_or(i, v);
824
+}
825
+
826
+static __always_inline long
827
+atomic_long_fetch_or(long i, atomic_long_t *v)
828
+{
829
+ return atomic_fetch_or(i, v);
830
+}
831
+
832
+static __always_inline long
833
+atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
834
+{
835
+ return atomic_fetch_or_acquire(i, v);
836
+}
837
+
838
+static __always_inline long
839
+atomic_long_fetch_or_release(long i, atomic_long_t *v)
840
+{
841
+ return atomic_fetch_or_release(i, v);
842
+}
843
+
844
+static __always_inline long
845
+atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
846
+{
847
+ return atomic_fetch_or_relaxed(i, v);
848
+}
849
+
850
+static __always_inline void
851
+atomic_long_xor(long i, atomic_long_t *v)
852
+{
853
+ atomic_xor(i, v);
854
+}
855
+
856
+static __always_inline long
857
+atomic_long_fetch_xor(long i, atomic_long_t *v)
858
+{
859
+ return atomic_fetch_xor(i, v);
860
+}
861
+
862
+static __always_inline long
863
+atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
864
+{
865
+ return atomic_fetch_xor_acquire(i, v);
866
+}
867
+
868
+static __always_inline long
869
+atomic_long_fetch_xor_release(long i, atomic_long_t *v)
870
+{
871
+ return atomic_fetch_xor_release(i, v);
872
+}
873
+
874
+static __always_inline long
875
+atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
876
+{
877
+ return atomic_fetch_xor_relaxed(i, v);
878
+}
879
+
880
+static __always_inline long
881
+atomic_long_xchg(atomic_long_t *v, long i)
882
+{
883
+ return atomic_xchg(v, i);
884
+}
885
+
886
+static __always_inline long
887
+atomic_long_xchg_acquire(atomic_long_t *v, long i)
888
+{
889
+ return atomic_xchg_acquire(v, i);
890
+}
891
+
892
+static __always_inline long
893
+atomic_long_xchg_release(atomic_long_t *v, long i)
894
+{
895
+ return atomic_xchg_release(v, i);
896
+}
897
+
898
+static __always_inline long
899
+atomic_long_xchg_relaxed(atomic_long_t *v, long i)
900
+{
901
+ return atomic_xchg_relaxed(v, i);
902
+}
903
+
904
+static __always_inline long
905
+atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
906
+{
907
+ return atomic_cmpxchg(v, old, new);
908
+}
909
+
910
+static __always_inline long
911
+atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
912
+{
913
+ return atomic_cmpxchg_acquire(v, old, new);
914
+}
915
+
916
+static __always_inline long
917
+atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
918
+{
919
+ return atomic_cmpxchg_release(v, old, new);
920
+}
921
+
922
+static __always_inline long
923
+atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
924
+{
925
+ return atomic_cmpxchg_relaxed(v, old, new);
926
+}
927
+
928
+static __always_inline bool
929
+atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
930
+{
931
+ return atomic_try_cmpxchg(v, (int *)old, new);
932
+}
933
+
934
+static __always_inline bool
935
+atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
936
+{
937
+ return atomic_try_cmpxchg_acquire(v, (int *)old, new);
938
+}
939
+
940
+static __always_inline bool
941
+atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
942
+{
943
+ return atomic_try_cmpxchg_release(v, (int *)old, new);
944
+}
945
+
946
+static __always_inline bool
947
+atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
948
+{
949
+ return atomic_try_cmpxchg_relaxed(v, (int *)old, new);
950
+}
951
+
952
+static __always_inline bool
953
+atomic_long_sub_and_test(long i, atomic_long_t *v)
954
+{
955
+ return atomic_sub_and_test(i, v);
956
+}
957
+
958
+static __always_inline bool
959
+atomic_long_dec_and_test(atomic_long_t *v)
960
+{
961
+ return atomic_dec_and_test(v);
962
+}
963
+
964
+static __always_inline bool
965
+atomic_long_inc_and_test(atomic_long_t *v)
966
+{
967
+ return atomic_inc_and_test(v);
968
+}
969
+
970
+static __always_inline bool
971
+atomic_long_add_negative(long i, atomic_long_t *v)
972
+{
973
+ return atomic_add_negative(i, v);
974
+}
975
+
976
+static __always_inline long
977
+atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
978
+{
979
+ return atomic_fetch_add_unless(v, a, u);
980
+}
981
+
982
+static __always_inline bool
983
+atomic_long_add_unless(atomic_long_t *v, long a, long u)
984
+{
985
+ return atomic_add_unless(v, a, u);
986
+}
987
+
988
+static __always_inline bool
989
+atomic_long_inc_not_zero(atomic_long_t *v)
990
+{
991
+ return atomic_inc_not_zero(v);
992
+}
993
+
994
+static __always_inline bool
995
+atomic_long_inc_unless_negative(atomic_long_t *v)
996
+{
997
+ return atomic_inc_unless_negative(v);
998
+}
999
+
1000
+static __always_inline bool
1001
+atomic_long_dec_unless_positive(atomic_long_t *v)
1002
+{
1003
+ return atomic_dec_unless_positive(v);
1004
+}
1005
+
1006
+static __always_inline long
1007
+atomic_long_dec_if_positive(atomic_long_t *v)
1008
+{
1009
+ return atomic_dec_if_positive(v);
1010
+}
1011
+
1012
+#endif /* CONFIG_64BIT */
1013
+#endif /* _ASM_GENERIC_ATOMIC_LONG_H */
1014
+// a624200981f552b2c6be4f32fe44da8289f30d87