.. | .. |
---|
| 1 | +/* SPDX-License-Identifier: GPL-2.0-only */ |
---|
1 | 2 | /* |
---|
2 | 3 | * Based on arch/arm/include/asm/atomic.h |
---|
3 | 4 | * |
---|
4 | 5 | * Copyright (C) 1996 Russell King. |
---|
5 | 6 | * Copyright (C) 2002 Deep Blue Solutions Ltd. |
---|
6 | 7 | * Copyright (C) 2012 ARM Ltd. |
---|
7 | | - * |
---|
8 | | - * This program is free software; you can redistribute it and/or modify |
---|
9 | | - * it under the terms of the GNU General Public License version 2 as |
---|
10 | | - * published by the Free Software Foundation. |
---|
11 | | - * |
---|
12 | | - * This program is distributed in the hope that it will be useful, |
---|
13 | | - * but WITHOUT ANY WARRANTY; without even the implied warranty of |
---|
14 | | - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
---|
15 | | - * GNU General Public License for more details. |
---|
16 | | - * |
---|
17 | | - * You should have received a copy of the GNU General Public License |
---|
18 | | - * along with this program. If not, see <http://www.gnu.org/licenses/>. |
---|
19 | 8 | */ |
---|
20 | 9 | #ifndef __ASM_ATOMIC_H |
---|
21 | 10 | #define __ASM_ATOMIC_H |
---|
.. | .. |
---|
24 | 13 | #include <linux/types.h> |
---|
25 | 14 | |
---|
26 | 15 | #include <asm/barrier.h> |
---|
| 16 | +#include <asm/cmpxchg.h> |
---|
27 | 17 | #include <asm/lse.h> |
---|
28 | 18 | |
---|
29 | | -#ifdef __KERNEL__ |
---|
| 19 | +#define ATOMIC_OP(op) \ |
---|
| 20 | +static __always_inline void arch_##op(int i, atomic_t *v) \ |
---|
| 21 | +{ \ |
---|
| 22 | + __lse_ll_sc_body(op, i, v); \ |
---|
| 23 | +} |
---|
30 | 24 | |
---|
31 | | -#define __ARM64_IN_ATOMIC_IMPL |
---|
| 25 | +ATOMIC_OP(atomic_andnot) |
---|
| 26 | +ATOMIC_OP(atomic_or) |
---|
| 27 | +ATOMIC_OP(atomic_xor) |
---|
| 28 | +ATOMIC_OP(atomic_add) |
---|
| 29 | +ATOMIC_OP(atomic_and) |
---|
| 30 | +ATOMIC_OP(atomic_sub) |
---|
32 | 31 | |
---|
33 | | -#if defined(CONFIG_ARM64_LSE_ATOMICS) && defined(CONFIG_AS_LSE) |
---|
34 | | -#include <asm/atomic_lse.h> |
---|
35 | | -#else |
---|
36 | | -#include <asm/atomic_ll_sc.h> |
---|
37 | | -#endif |
---|
| 32 | +#undef ATOMIC_OP |
---|
38 | 33 | |
---|
39 | | -#undef __ARM64_IN_ATOMIC_IMPL |
---|
| 34 | +#define ATOMIC_FETCH_OP(name, op) \ |
---|
| 35 | +static __always_inline int arch_##op##name(int i, atomic_t *v) \ |
---|
| 36 | +{ \ |
---|
| 37 | + return __lse_ll_sc_body(op##name, i, v); \ |
---|
| 38 | +} |
---|
40 | 39 | |
---|
41 | | -#include <asm/cmpxchg.h> |
---|
| 40 | +#define ATOMIC_FETCH_OPS(op) \ |
---|
| 41 | + ATOMIC_FETCH_OP(_relaxed, op) \ |
---|
| 42 | + ATOMIC_FETCH_OP(_acquire, op) \ |
---|
| 43 | + ATOMIC_FETCH_OP(_release, op) \ |
---|
| 44 | + ATOMIC_FETCH_OP( , op) |
---|
42 | 45 | |
---|
43 | | -#define ATOMIC_INIT(i) { (i) } |
---|
| 46 | +ATOMIC_FETCH_OPS(atomic_fetch_andnot) |
---|
| 47 | +ATOMIC_FETCH_OPS(atomic_fetch_or) |
---|
| 48 | +ATOMIC_FETCH_OPS(atomic_fetch_xor) |
---|
| 49 | +ATOMIC_FETCH_OPS(atomic_fetch_add) |
---|
| 50 | +ATOMIC_FETCH_OPS(atomic_fetch_and) |
---|
| 51 | +ATOMIC_FETCH_OPS(atomic_fetch_sub) |
---|
| 52 | +ATOMIC_FETCH_OPS(atomic_add_return) |
---|
| 53 | +ATOMIC_FETCH_OPS(atomic_sub_return) |
---|
44 | 54 | |
---|
45 | | -#define atomic_read(v) READ_ONCE((v)->counter) |
---|
46 | | -#define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) |
---|
| 55 | +#undef ATOMIC_FETCH_OP |
---|
| 56 | +#undef ATOMIC_FETCH_OPS |
---|
47 | 57 | |
---|
48 | | -#define atomic_add_return_relaxed atomic_add_return_relaxed |
---|
49 | | -#define atomic_add_return_acquire atomic_add_return_acquire |
---|
50 | | -#define atomic_add_return_release atomic_add_return_release |
---|
51 | | -#define atomic_add_return atomic_add_return |
---|
| 58 | +#define ATOMIC64_OP(op) \ |
---|
| 59 | +static __always_inline void arch_##op(long i, atomic64_t *v) \ |
---|
| 60 | +{ \ |
---|
| 61 | + __lse_ll_sc_body(op, i, v); \ |
---|
| 62 | +} |
---|
52 | 63 | |
---|
53 | | -#define atomic_sub_return_relaxed atomic_sub_return_relaxed |
---|
54 | | -#define atomic_sub_return_acquire atomic_sub_return_acquire |
---|
55 | | -#define atomic_sub_return_release atomic_sub_return_release |
---|
56 | | -#define atomic_sub_return atomic_sub_return |
---|
| 64 | +ATOMIC64_OP(atomic64_andnot) |
---|
| 65 | +ATOMIC64_OP(atomic64_or) |
---|
| 66 | +ATOMIC64_OP(atomic64_xor) |
---|
| 67 | +ATOMIC64_OP(atomic64_add) |
---|
| 68 | +ATOMIC64_OP(atomic64_and) |
---|
| 69 | +ATOMIC64_OP(atomic64_sub) |
---|
57 | 70 | |
---|
58 | | -#define atomic_fetch_add_relaxed atomic_fetch_add_relaxed |
---|
59 | | -#define atomic_fetch_add_acquire atomic_fetch_add_acquire |
---|
60 | | -#define atomic_fetch_add_release atomic_fetch_add_release |
---|
61 | | -#define atomic_fetch_add atomic_fetch_add |
---|
| 71 | +#undef ATOMIC64_OP |
---|
62 | 72 | |
---|
63 | | -#define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed |
---|
64 | | -#define atomic_fetch_sub_acquire atomic_fetch_sub_acquire |
---|
65 | | -#define atomic_fetch_sub_release atomic_fetch_sub_release |
---|
66 | | -#define atomic_fetch_sub atomic_fetch_sub |
---|
| 73 | +#define ATOMIC64_FETCH_OP(name, op) \ |
---|
| 74 | +static __always_inline long arch_##op##name(long i, atomic64_t *v) \ |
---|
| 75 | +{ \ |
---|
| 76 | + return __lse_ll_sc_body(op##name, i, v); \ |
---|
| 77 | +} |
---|
67 | 78 | |
---|
68 | | -#define atomic_fetch_and_relaxed atomic_fetch_and_relaxed |
---|
69 | | -#define atomic_fetch_and_acquire atomic_fetch_and_acquire |
---|
70 | | -#define atomic_fetch_and_release atomic_fetch_and_release |
---|
71 | | -#define atomic_fetch_and atomic_fetch_and |
---|
| 79 | +#define ATOMIC64_FETCH_OPS(op) \ |
---|
| 80 | + ATOMIC64_FETCH_OP(_relaxed, op) \ |
---|
| 81 | + ATOMIC64_FETCH_OP(_acquire, op) \ |
---|
| 82 | + ATOMIC64_FETCH_OP(_release, op) \ |
---|
| 83 | + ATOMIC64_FETCH_OP( , op) |
---|
72 | 84 | |
---|
73 | | -#define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed |
---|
74 | | -#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire |
---|
75 | | -#define atomic_fetch_andnot_release atomic_fetch_andnot_release |
---|
76 | | -#define atomic_fetch_andnot atomic_fetch_andnot |
---|
| 85 | +ATOMIC64_FETCH_OPS(atomic64_fetch_andnot) |
---|
| 86 | +ATOMIC64_FETCH_OPS(atomic64_fetch_or) |
---|
| 87 | +ATOMIC64_FETCH_OPS(atomic64_fetch_xor) |
---|
| 88 | +ATOMIC64_FETCH_OPS(atomic64_fetch_add) |
---|
| 89 | +ATOMIC64_FETCH_OPS(atomic64_fetch_and) |
---|
| 90 | +ATOMIC64_FETCH_OPS(atomic64_fetch_sub) |
---|
| 91 | +ATOMIC64_FETCH_OPS(atomic64_add_return) |
---|
| 92 | +ATOMIC64_FETCH_OPS(atomic64_sub_return) |
---|
77 | 93 | |
---|
78 | | -#define atomic_fetch_or_relaxed atomic_fetch_or_relaxed |
---|
79 | | -#define atomic_fetch_or_acquire atomic_fetch_or_acquire |
---|
80 | | -#define atomic_fetch_or_release atomic_fetch_or_release |
---|
81 | | -#define atomic_fetch_or atomic_fetch_or |
---|
| 94 | +#undef ATOMIC64_FETCH_OP |
---|
| 95 | +#undef ATOMIC64_FETCH_OPS |
---|
82 | 96 | |
---|
83 | | -#define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed |
---|
84 | | -#define atomic_fetch_xor_acquire atomic_fetch_xor_acquire |
---|
85 | | -#define atomic_fetch_xor_release atomic_fetch_xor_release |
---|
86 | | -#define atomic_fetch_xor atomic_fetch_xor |
---|
| 97 | +static __always_inline long arch_atomic64_dec_if_positive(atomic64_t *v) |
---|
| 98 | +{ |
---|
| 99 | + return __lse_ll_sc_body(atomic64_dec_if_positive, v); |
---|
| 100 | +} |
---|
87 | 101 | |
---|
88 | | -#define atomic_xchg_relaxed(v, new) xchg_relaxed(&((v)->counter), (new)) |
---|
89 | | -#define atomic_xchg_acquire(v, new) xchg_acquire(&((v)->counter), (new)) |
---|
90 | | -#define atomic_xchg_release(v, new) xchg_release(&((v)->counter), (new)) |
---|
91 | | -#define atomic_xchg(v, new) xchg(&((v)->counter), (new)) |
---|
| 102 | +#define arch_atomic_read(v) __READ_ONCE((v)->counter) |
---|
| 103 | +#define arch_atomic_set(v, i) __WRITE_ONCE(((v)->counter), (i)) |
---|
92 | 104 | |
---|
93 | | -#define atomic_cmpxchg_relaxed(v, old, new) \ |
---|
94 | | - cmpxchg_relaxed(&((v)->counter), (old), (new)) |
---|
95 | | -#define atomic_cmpxchg_acquire(v, old, new) \ |
---|
96 | | - cmpxchg_acquire(&((v)->counter), (old), (new)) |
---|
97 | | -#define atomic_cmpxchg_release(v, old, new) \ |
---|
98 | | - cmpxchg_release(&((v)->counter), (old), (new)) |
---|
99 | | -#define atomic_cmpxchg(v, old, new) cmpxchg(&((v)->counter), (old), (new)) |
---|
| 105 | +#define arch_atomic_add_return_relaxed arch_atomic_add_return_relaxed |
---|
| 106 | +#define arch_atomic_add_return_acquire arch_atomic_add_return_acquire |
---|
| 107 | +#define arch_atomic_add_return_release arch_atomic_add_return_release |
---|
| 108 | +#define arch_atomic_add_return arch_atomic_add_return |
---|
100 | 109 | |
---|
101 | | -#define atomic_andnot atomic_andnot |
---|
| 110 | +#define arch_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed |
---|
| 111 | +#define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire |
---|
| 112 | +#define arch_atomic_sub_return_release arch_atomic_sub_return_release |
---|
| 113 | +#define arch_atomic_sub_return arch_atomic_sub_return |
---|
| 114 | + |
---|
| 115 | +#define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add_relaxed |
---|
| 116 | +#define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire |
---|
| 117 | +#define arch_atomic_fetch_add_release arch_atomic_fetch_add_release |
---|
| 118 | +#define arch_atomic_fetch_add arch_atomic_fetch_add |
---|
| 119 | + |
---|
| 120 | +#define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub_relaxed |
---|
| 121 | +#define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire |
---|
| 122 | +#define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release |
---|
| 123 | +#define arch_atomic_fetch_sub arch_atomic_fetch_sub |
---|
| 124 | + |
---|
| 125 | +#define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and_relaxed |
---|
| 126 | +#define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire |
---|
| 127 | +#define arch_atomic_fetch_and_release arch_atomic_fetch_and_release |
---|
| 128 | +#define arch_atomic_fetch_and arch_atomic_fetch_and |
---|
| 129 | + |
---|
| 130 | +#define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed |
---|
| 131 | +#define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire |
---|
| 132 | +#define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release |
---|
| 133 | +#define arch_atomic_fetch_andnot arch_atomic_fetch_andnot |
---|
| 134 | + |
---|
| 135 | +#define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed |
---|
| 136 | +#define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire |
---|
| 137 | +#define arch_atomic_fetch_or_release arch_atomic_fetch_or_release |
---|
| 138 | +#define arch_atomic_fetch_or arch_atomic_fetch_or |
---|
| 139 | + |
---|
| 140 | +#define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed |
---|
| 141 | +#define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire |
---|
| 142 | +#define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release |
---|
| 143 | +#define arch_atomic_fetch_xor arch_atomic_fetch_xor |
---|
| 144 | + |
---|
| 145 | +#define arch_atomic_xchg_relaxed(v, new) \ |
---|
| 146 | + arch_xchg_relaxed(&((v)->counter), (new)) |
---|
| 147 | +#define arch_atomic_xchg_acquire(v, new) \ |
---|
| 148 | + arch_xchg_acquire(&((v)->counter), (new)) |
---|
| 149 | +#define arch_atomic_xchg_release(v, new) \ |
---|
| 150 | + arch_xchg_release(&((v)->counter), (new)) |
---|
| 151 | +#define arch_atomic_xchg(v, new) \ |
---|
| 152 | + arch_xchg(&((v)->counter), (new)) |
---|
| 153 | + |
---|
| 154 | +#define arch_atomic_cmpxchg_relaxed(v, old, new) \ |
---|
| 155 | + arch_cmpxchg_relaxed(&((v)->counter), (old), (new)) |
---|
| 156 | +#define arch_atomic_cmpxchg_acquire(v, old, new) \ |
---|
| 157 | + arch_cmpxchg_acquire(&((v)->counter), (old), (new)) |
---|
| 158 | +#define arch_atomic_cmpxchg_release(v, old, new) \ |
---|
| 159 | + arch_cmpxchg_release(&((v)->counter), (old), (new)) |
---|
| 160 | +#define arch_atomic_cmpxchg(v, old, new) \ |
---|
| 161 | + arch_cmpxchg(&((v)->counter), (old), (new)) |
---|
| 162 | + |
---|
| 163 | +#define arch_atomic_andnot arch_atomic_andnot |
---|
102 | 164 | |
---|
103 | 165 | /* |
---|
104 | | - * 64-bit atomic operations. |
---|
| 166 | + * 64-bit arch_atomic operations. |
---|
105 | 167 | */ |
---|
106 | | -#define ATOMIC64_INIT ATOMIC_INIT |
---|
107 | | -#define atomic64_read atomic_read |
---|
108 | | -#define atomic64_set atomic_set |
---|
| 168 | +#define ATOMIC64_INIT ATOMIC_INIT |
---|
| 169 | +#define arch_atomic64_read arch_atomic_read |
---|
| 170 | +#define arch_atomic64_set arch_atomic_set |
---|
109 | 171 | |
---|
110 | | -#define atomic64_add_return_relaxed atomic64_add_return_relaxed |
---|
111 | | -#define atomic64_add_return_acquire atomic64_add_return_acquire |
---|
112 | | -#define atomic64_add_return_release atomic64_add_return_release |
---|
113 | | -#define atomic64_add_return atomic64_add_return |
---|
| 172 | +#define arch_atomic64_add_return_relaxed arch_atomic64_add_return_relaxed |
---|
| 173 | +#define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire |
---|
| 174 | +#define arch_atomic64_add_return_release arch_atomic64_add_return_release |
---|
| 175 | +#define arch_atomic64_add_return arch_atomic64_add_return |
---|
114 | 176 | |
---|
115 | | -#define atomic64_sub_return_relaxed atomic64_sub_return_relaxed |
---|
116 | | -#define atomic64_sub_return_acquire atomic64_sub_return_acquire |
---|
117 | | -#define atomic64_sub_return_release atomic64_sub_return_release |
---|
118 | | -#define atomic64_sub_return atomic64_sub_return |
---|
| 177 | +#define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return_relaxed |
---|
| 178 | +#define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire |
---|
| 179 | +#define arch_atomic64_sub_return_release arch_atomic64_sub_return_release |
---|
| 180 | +#define arch_atomic64_sub_return arch_atomic64_sub_return |
---|
119 | 181 | |
---|
120 | | -#define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed |
---|
121 | | -#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire |
---|
122 | | -#define atomic64_fetch_add_release atomic64_fetch_add_release |
---|
123 | | -#define atomic64_fetch_add atomic64_fetch_add |
---|
| 182 | +#define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add_relaxed |
---|
| 183 | +#define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire |
---|
| 184 | +#define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release |
---|
| 185 | +#define arch_atomic64_fetch_add arch_atomic64_fetch_add |
---|
124 | 186 | |
---|
125 | | -#define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed |
---|
126 | | -#define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire |
---|
127 | | -#define atomic64_fetch_sub_release atomic64_fetch_sub_release |
---|
128 | | -#define atomic64_fetch_sub atomic64_fetch_sub |
---|
| 187 | +#define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub_relaxed |
---|
| 188 | +#define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire |
---|
| 189 | +#define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release |
---|
| 190 | +#define arch_atomic64_fetch_sub arch_atomic64_fetch_sub |
---|
129 | 191 | |
---|
130 | | -#define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed |
---|
131 | | -#define atomic64_fetch_and_acquire atomic64_fetch_and_acquire |
---|
132 | | -#define atomic64_fetch_and_release atomic64_fetch_and_release |
---|
133 | | -#define atomic64_fetch_and atomic64_fetch_and |
---|
| 192 | +#define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and_relaxed |
---|
| 193 | +#define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire |
---|
| 194 | +#define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release |
---|
| 195 | +#define arch_atomic64_fetch_and arch_atomic64_fetch_and |
---|
134 | 196 | |
---|
135 | | -#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed |
---|
136 | | -#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire |
---|
137 | | -#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release |
---|
138 | | -#define atomic64_fetch_andnot atomic64_fetch_andnot |
---|
| 197 | +#define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed |
---|
| 198 | +#define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire |
---|
| 199 | +#define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release |
---|
| 200 | +#define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot |
---|
139 | 201 | |
---|
140 | | -#define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed |
---|
141 | | -#define atomic64_fetch_or_acquire atomic64_fetch_or_acquire |
---|
142 | | -#define atomic64_fetch_or_release atomic64_fetch_or_release |
---|
143 | | -#define atomic64_fetch_or atomic64_fetch_or |
---|
| 202 | +#define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or_relaxed |
---|
| 203 | +#define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire |
---|
| 204 | +#define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release |
---|
| 205 | +#define arch_atomic64_fetch_or arch_atomic64_fetch_or |
---|
144 | 206 | |
---|
145 | | -#define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed |
---|
146 | | -#define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire |
---|
147 | | -#define atomic64_fetch_xor_release atomic64_fetch_xor_release |
---|
148 | | -#define atomic64_fetch_xor atomic64_fetch_xor |
---|
| 207 | +#define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor_relaxed |
---|
| 208 | +#define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire |
---|
| 209 | +#define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release |
---|
| 210 | +#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor |
---|
149 | 211 | |
---|
150 | | -#define atomic64_xchg_relaxed atomic_xchg_relaxed |
---|
151 | | -#define atomic64_xchg_acquire atomic_xchg_acquire |
---|
152 | | -#define atomic64_xchg_release atomic_xchg_release |
---|
153 | | -#define atomic64_xchg atomic_xchg |
---|
| 212 | +#define arch_atomic64_xchg_relaxed arch_atomic_xchg_relaxed |
---|
| 213 | +#define arch_atomic64_xchg_acquire arch_atomic_xchg_acquire |
---|
| 214 | +#define arch_atomic64_xchg_release arch_atomic_xchg_release |
---|
| 215 | +#define arch_atomic64_xchg arch_atomic_xchg |
---|
154 | 216 | |
---|
155 | | -#define atomic64_cmpxchg_relaxed atomic_cmpxchg_relaxed |
---|
156 | | -#define atomic64_cmpxchg_acquire atomic_cmpxchg_acquire |
---|
157 | | -#define atomic64_cmpxchg_release atomic_cmpxchg_release |
---|
158 | | -#define atomic64_cmpxchg atomic_cmpxchg |
---|
| 217 | +#define arch_atomic64_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed |
---|
| 218 | +#define arch_atomic64_cmpxchg_acquire arch_atomic_cmpxchg_acquire |
---|
| 219 | +#define arch_atomic64_cmpxchg_release arch_atomic_cmpxchg_release |
---|
| 220 | +#define arch_atomic64_cmpxchg arch_atomic_cmpxchg |
---|
159 | 221 | |
---|
160 | | -#define atomic64_andnot atomic64_andnot |
---|
| 222 | +#define arch_atomic64_andnot arch_atomic64_andnot |
---|
161 | 223 | |
---|
162 | | -#define atomic64_dec_if_positive atomic64_dec_if_positive |
---|
| 224 | +#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive |
---|
163 | 225 | |
---|
164 | | -#endif |
---|
165 | | -#endif |
---|
| 226 | +#define ARCH_ATOMIC |
---|
| 227 | + |
---|
| 228 | +#endif /* __ASM_ATOMIC_H */ |
---|