forked from ~ljy/RK356X_SDK_RELEASE

hc
2024-05-11 04dd17822334871b23ea2862f7798fb0e0007777
kernel/arch/arm/include/asm/atomic.h
....@@ -1,12 +1,9 @@
1
+/* SPDX-License-Identifier: GPL-2.0-only */
12 /*
23 * arch/arm/include/asm/atomic.h
34 *
45 * Copyright (C) 1996 Russell King.
56 * Copyright (C) 2002 Deep Blue Solutions Ltd.
6
- *
7
- * This program is free software; you can redistribute it and/or modify
8
- * it under the terms of the GNU General Public License version 2 as
9
- * published by the Free Software Foundation.
107 */
118 #ifndef __ASM_ARM_ATOMIC_H
129 #define __ASM_ARM_ATOMIC_H
....@@ -17,8 +14,6 @@
1714 #include <linux/irqflags.h>
1815 #include <asm/barrier.h>
1916 #include <asm/cmpxchg.h>
20
-
21
-#define ATOMIC_INIT(i) { (i) }
2217
2318 #ifdef __KERNEL__
2419
....@@ -249,15 +244,15 @@
249244
250245 #ifndef CONFIG_GENERIC_ATOMIC64
251246 typedef struct {
252
- long long counter;
247
+ s64 counter;
253248 } atomic64_t;
254249
255250 #define ATOMIC64_INIT(i) { (i) }
256251
257252 #ifdef CONFIG_ARM_LPAE
258
-static inline long long atomic64_read(const atomic64_t *v)
253
+static inline s64 atomic64_read(const atomic64_t *v)
259254 {
260
- long long result;
255
+ s64 result;
261256
262257 __asm__ __volatile__("@ atomic64_read\n"
263258 " ldrd %0, %H0, [%1]"
....@@ -268,7 +263,7 @@
268263 return result;
269264 }
270265
271
-static inline void atomic64_set(atomic64_t *v, long long i)
266
+static inline void atomic64_set(atomic64_t *v, s64 i)
272267 {
273268 __asm__ __volatile__("@ atomic64_set\n"
274269 " strd %2, %H2, [%1]"
....@@ -277,9 +272,9 @@
277272 );
278273 }
279274 #else
280
-static inline long long atomic64_read(const atomic64_t *v)
275
+static inline s64 atomic64_read(const atomic64_t *v)
281276 {
282
- long long result;
277
+ s64 result;
283278
284279 __asm__ __volatile__("@ atomic64_read\n"
285280 " ldrexd %0, %H0, [%1]"
....@@ -290,9 +285,9 @@
290285 return result;
291286 }
292287
293
-static inline void atomic64_set(atomic64_t *v, long long i)
288
+static inline void atomic64_set(atomic64_t *v, s64 i)
294289 {
295
- long long tmp;
290
+ s64 tmp;
296291
297292 prefetchw(&v->counter);
298293 __asm__ __volatile__("@ atomic64_set\n"
....@@ -307,9 +302,9 @@
307302 #endif
308303
309304 #define ATOMIC64_OP(op, op1, op2) \
310
-static inline void atomic64_##op(long long i, atomic64_t *v) \
305
+static inline void atomic64_##op(s64 i, atomic64_t *v) \
311306 { \
312
- long long result; \
307
+ s64 result; \
313308 unsigned long tmp; \
314309 \
315310 prefetchw(&v->counter); \
....@@ -326,10 +321,10 @@
326321 } \
327322
328323 #define ATOMIC64_OP_RETURN(op, op1, op2) \
329
-static inline long long \
330
-atomic64_##op##_return_relaxed(long long i, atomic64_t *v) \
324
+static inline s64 \
325
+atomic64_##op##_return_relaxed(s64 i, atomic64_t *v) \
331326 { \
332
- long long result; \
327
+ s64 result; \
333328 unsigned long tmp; \
334329 \
335330 prefetchw(&v->counter); \
....@@ -349,10 +344,10 @@
349344 }
350345
351346 #define ATOMIC64_FETCH_OP(op, op1, op2) \
352
-static inline long long \
353
-atomic64_fetch_##op##_relaxed(long long i, atomic64_t *v) \
347
+static inline s64 \
348
+atomic64_fetch_##op##_relaxed(s64 i, atomic64_t *v) \
354349 { \
355
- long long result, val; \
350
+ s64 result, val; \
356351 unsigned long tmp; \
357352 \
358353 prefetchw(&v->counter); \
....@@ -406,10 +401,9 @@
406401 #undef ATOMIC64_OP_RETURN
407402 #undef ATOMIC64_OP
408403
409
-static inline long long
410
-atomic64_cmpxchg_relaxed(atomic64_t *ptr, long long old, long long new)
404
+static inline s64 atomic64_cmpxchg_relaxed(atomic64_t *ptr, s64 old, s64 new)
411405 {
412
- long long oldval;
406
+ s64 oldval;
413407 unsigned long res;
414408
415409 prefetchw(&ptr->counter);
....@@ -430,9 +424,9 @@
430424 }
431425 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
432426
433
-static inline long long atomic64_xchg_relaxed(atomic64_t *ptr, long long new)
427
+static inline s64 atomic64_xchg_relaxed(atomic64_t *ptr, s64 new)
434428 {
435
- long long result;
429
+ s64 result;
436430 unsigned long tmp;
437431
438432 prefetchw(&ptr->counter);
....@@ -450,9 +444,9 @@
450444 }
451445 #define atomic64_xchg_relaxed atomic64_xchg_relaxed
452446
453
-static inline long long atomic64_dec_if_positive(atomic64_t *v)
447
+static inline s64 atomic64_dec_if_positive(atomic64_t *v)
454448 {
455
- long long result;
449
+ s64 result;
456450 unsigned long tmp;
457451
458452 smp_mb();
....@@ -478,10 +472,9 @@
478472 }
479473 #define atomic64_dec_if_positive atomic64_dec_if_positive
480474
481
-static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
482
- long long u)
475
+static inline s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
483476 {
484
- long long oldval, newval;
477
+ s64 oldval, newval;
485478 unsigned long tmp;
486479
487480 smp_mb();