.. | .. |
---|
| 1 | +/* SPDX-License-Identifier: GPL-2.0-only */ |
---|
1 | 2 | /* |
---|
2 | 3 | * arch/arm/include/asm/atomic.h |
---|
3 | 4 | * |
---|
4 | 5 | * Copyright (C) 1996 Russell King. |
---|
5 | 6 | * Copyright (C) 2002 Deep Blue Solutions Ltd. |
---|
6 | | - * |
---|
7 | | - * This program is free software; you can redistribute it and/or modify |
---|
8 | | - * it under the terms of the GNU General Public License version 2 as |
---|
9 | | - * published by the Free Software Foundation. |
---|
10 | 7 | */ |
---|
11 | 8 | #ifndef __ASM_ARM_ATOMIC_H |
---|
12 | 9 | #define __ASM_ARM_ATOMIC_H |
---|
.. | .. |
---|
17 | 14 | #include <linux/irqflags.h> |
---|
18 | 15 | #include <asm/barrier.h> |
---|
19 | 16 | #include <asm/cmpxchg.h> |
---|
20 | | - |
---|
21 | | -#define ATOMIC_INIT(i) { (i) } |
---|
22 | 17 | |
---|
23 | 18 | #ifdef __KERNEL__ |
---|
24 | 19 | |
---|
.. | .. |
---|
249 | 244 | |
---|
250 | 245 | #ifndef CONFIG_GENERIC_ATOMIC64 |
---|
251 | 246 | typedef struct { |
---|
252 | | - long long counter; |
---|
| 247 | + s64 counter; |
---|
253 | 248 | } atomic64_t; |
---|
254 | 249 | |
---|
255 | 250 | #define ATOMIC64_INIT(i) { (i) } |
---|
256 | 251 | |
---|
257 | 252 | #ifdef CONFIG_ARM_LPAE |
---|
258 | | -static inline long long atomic64_read(const atomic64_t *v) |
---|
| 253 | +static inline s64 atomic64_read(const atomic64_t *v) |
---|
259 | 254 | { |
---|
260 | | - long long result; |
---|
| 255 | + s64 result; |
---|
261 | 256 | |
---|
262 | 257 | __asm__ __volatile__("@ atomic64_read\n" |
---|
263 | 258 | " ldrd %0, %H0, [%1]" |
---|
.. | .. |
---|
268 | 263 | return result; |
---|
269 | 264 | } |
---|
270 | 265 | |
---|
271 | | -static inline void atomic64_set(atomic64_t *v, long long i) |
---|
| 266 | +static inline void atomic64_set(atomic64_t *v, s64 i) |
---|
272 | 267 | { |
---|
273 | 268 | __asm__ __volatile__("@ atomic64_set\n" |
---|
274 | 269 | " strd %2, %H2, [%1]" |
---|
.. | .. |
---|
277 | 272 | ); |
---|
278 | 273 | } |
---|
279 | 274 | #else |
---|
280 | | -static inline long long atomic64_read(const atomic64_t *v) |
---|
| 275 | +static inline s64 atomic64_read(const atomic64_t *v) |
---|
281 | 276 | { |
---|
282 | | - long long result; |
---|
| 277 | + s64 result; |
---|
283 | 278 | |
---|
284 | 279 | __asm__ __volatile__("@ atomic64_read\n" |
---|
285 | 280 | " ldrexd %0, %H0, [%1]" |
---|
.. | .. |
---|
290 | 285 | return result; |
---|
291 | 286 | } |
---|
292 | 287 | |
---|
293 | | -static inline void atomic64_set(atomic64_t *v, long long i) |
---|
| 288 | +static inline void atomic64_set(atomic64_t *v, s64 i) |
---|
294 | 289 | { |
---|
295 | | - long long tmp; |
---|
| 290 | + s64 tmp; |
---|
296 | 291 | |
---|
297 | 292 | prefetchw(&v->counter); |
---|
298 | 293 | __asm__ __volatile__("@ atomic64_set\n" |
---|
.. | .. |
---|
307 | 302 | #endif |
---|
308 | 303 | |
---|
309 | 304 | #define ATOMIC64_OP(op, op1, op2) \ |
---|
310 | | -static inline void atomic64_##op(long long i, atomic64_t *v) \ |
---|
| 305 | +static inline void atomic64_##op(s64 i, atomic64_t *v) \ |
---|
311 | 306 | { \ |
---|
312 | | - long long result; \ |
---|
| 307 | + s64 result; \ |
---|
313 | 308 | unsigned long tmp; \ |
---|
314 | 309 | \ |
---|
315 | 310 | prefetchw(&v->counter); \ |
---|
.. | .. |
---|
326 | 321 | } \ |
---|
327 | 322 | |
---|
328 | 323 | #define ATOMIC64_OP_RETURN(op, op1, op2) \ |
---|
329 | | -static inline long long \ |
---|
330 | | -atomic64_##op##_return_relaxed(long long i, atomic64_t *v) \ |
---|
| 324 | +static inline s64 \ |
---|
| 325 | +atomic64_##op##_return_relaxed(s64 i, atomic64_t *v) \ |
---|
331 | 326 | { \ |
---|
332 | | - long long result; \ |
---|
| 327 | + s64 result; \ |
---|
333 | 328 | unsigned long tmp; \ |
---|
334 | 329 | \ |
---|
335 | 330 | prefetchw(&v->counter); \ |
---|
.. | .. |
---|
349 | 344 | } |
---|
350 | 345 | |
---|
351 | 346 | #define ATOMIC64_FETCH_OP(op, op1, op2) \ |
---|
352 | | -static inline long long \ |
---|
353 | | -atomic64_fetch_##op##_relaxed(long long i, atomic64_t *v) \ |
---|
| 347 | +static inline s64 \ |
---|
| 348 | +atomic64_fetch_##op##_relaxed(s64 i, atomic64_t *v) \ |
---|
354 | 349 | { \ |
---|
355 | | - long long result, val; \ |
---|
| 350 | + s64 result, val; \ |
---|
356 | 351 | unsigned long tmp; \ |
---|
357 | 352 | \ |
---|
358 | 353 | prefetchw(&v->counter); \ |
---|
.. | .. |
---|
406 | 401 | #undef ATOMIC64_OP_RETURN |
---|
407 | 402 | #undef ATOMIC64_OP |
---|
408 | 403 | |
---|
409 | | -static inline long long |
---|
410 | | -atomic64_cmpxchg_relaxed(atomic64_t *ptr, long long old, long long new) |
---|
| 404 | +static inline s64 atomic64_cmpxchg_relaxed(atomic64_t *ptr, s64 old, s64 new) |
---|
411 | 405 | { |
---|
412 | | - long long oldval; |
---|
| 406 | + s64 oldval; |
---|
413 | 407 | unsigned long res; |
---|
414 | 408 | |
---|
415 | 409 | prefetchw(&ptr->counter); |
---|
.. | .. |
---|
430 | 424 | } |
---|
431 | 425 | #define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed |
---|
432 | 426 | |
---|
433 | | -static inline long long atomic64_xchg_relaxed(atomic64_t *ptr, long long new) |
---|
| 427 | +static inline s64 atomic64_xchg_relaxed(atomic64_t *ptr, s64 new) |
---|
434 | 428 | { |
---|
435 | | - long long result; |
---|
| 429 | + s64 result; |
---|
436 | 430 | unsigned long tmp; |
---|
437 | 431 | |
---|
438 | 432 | prefetchw(&ptr->counter); |
---|
.. | .. |
---|
450 | 444 | } |
---|
451 | 445 | #define atomic64_xchg_relaxed atomic64_xchg_relaxed |
---|
452 | 446 | |
---|
453 | | -static inline long long atomic64_dec_if_positive(atomic64_t *v) |
---|
| 447 | +static inline s64 atomic64_dec_if_positive(atomic64_t *v) |
---|
454 | 448 | { |
---|
455 | | - long long result; |
---|
| 449 | + s64 result; |
---|
456 | 450 | unsigned long tmp; |
---|
457 | 451 | |
---|
458 | 452 | smp_mb(); |
---|
.. | .. |
---|
478 | 472 | } |
---|
479 | 473 | #define atomic64_dec_if_positive atomic64_dec_if_positive |
---|
480 | 474 | |
---|
481 | | -static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a, |
---|
482 | | - long long u) |
---|
| 475 | +static inline s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) |
---|
483 | 476 | { |
---|
484 | | - long long oldval, newval; |
---|
| 477 | + s64 oldval, newval; |
---|
485 | 478 | unsigned long tmp; |
---|
486 | 479 | |
---|
487 | 480 | smp_mb(); |
---|