.. | .. |
---|
19 | 19 | #include <asm/barrier.h> |
---|
20 | 20 | |
---|
21 | 21 | |
---|
22 | | -#define ATOMIC_INIT(i) { (i) } |
---|
23 | 22 | #define ATOMIC64_INIT(i) { (i) } |
---|
24 | 23 | |
---|
25 | 24 | #define atomic_read(v) READ_ONCE((v)->counter) |
---|
.. | .. |
---|
124 | 123 | #undef ATOMIC_OP |
---|
125 | 124 | |
---|
126 | 125 | #define ATOMIC64_OP(op, c_op) \ |
---|
127 | | -static __inline__ long \ |
---|
128 | | -ia64_atomic64_##op (__s64 i, atomic64_t *v) \ |
---|
| 126 | +static __inline__ s64 \ |
---|
| 127 | +ia64_atomic64_##op (s64 i, atomic64_t *v) \ |
---|
129 | 128 | { \ |
---|
130 | | - __s64 old, new; \ |
---|
| 129 | + s64 old, new; \ |
---|
131 | 130 | CMPXCHG_BUGCHECK_DECL \ |
---|
132 | 131 | \ |
---|
133 | 132 | do { \ |
---|
.. | .. |
---|
139 | 138 | } |
---|
140 | 139 | |
---|
141 | 140 | #define ATOMIC64_FETCH_OP(op, c_op) \ |
---|
142 | | -static __inline__ long \ |
---|
143 | | -ia64_atomic64_fetch_##op (__s64 i, atomic64_t *v) \ |
---|
| 141 | +static __inline__ s64 \ |
---|
| 142 | +ia64_atomic64_fetch_##op (s64 i, atomic64_t *v) \ |
---|
144 | 143 | { \ |
---|
145 | | - __s64 old, new; \ |
---|
| 144 | + s64 old, new; \ |
---|
146 | 145 | CMPXCHG_BUGCHECK_DECL \ |
---|
147 | 146 | \ |
---|
148 | 147 | do { \ |
---|
.. | .. |
---|
162 | 161 | |
---|
163 | 162 | #define atomic64_add_return(i,v) \ |
---|
164 | 163 | ({ \ |
---|
165 | | - long __ia64_aar_i = (i); \ |
---|
| 164 | + s64 __ia64_aar_i = (i); \ |
---|
166 | 165 | __ia64_atomic_const(i) \ |
---|
167 | 166 | ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \ |
---|
168 | 167 | : ia64_atomic64_add(__ia64_aar_i, v); \ |
---|
.. | .. |
---|
170 | 169 | |
---|
171 | 170 | #define atomic64_sub_return(i,v) \ |
---|
172 | 171 | ({ \ |
---|
173 | | - long __ia64_asr_i = (i); \ |
---|
| 172 | + s64 __ia64_asr_i = (i); \ |
---|
174 | 173 | __ia64_atomic_const(i) \ |
---|
175 | 174 | ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \ |
---|
176 | 175 | : ia64_atomic64_sub(__ia64_asr_i, v); \ |
---|
.. | .. |
---|
178 | 177 | |
---|
179 | 178 | #define atomic64_fetch_add(i,v) \ |
---|
180 | 179 | ({ \ |
---|
181 | | - long __ia64_aar_i = (i); \ |
---|
| 180 | + s64 __ia64_aar_i = (i); \ |
---|
182 | 181 | __ia64_atomic_const(i) \ |
---|
183 | 182 | ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \ |
---|
184 | 183 | : ia64_atomic64_fetch_add(__ia64_aar_i, v); \ |
---|
.. | .. |
---|
186 | 185 | |
---|
187 | 186 | #define atomic64_fetch_sub(i,v) \ |
---|
188 | 187 | ({ \ |
---|
189 | | - long __ia64_asr_i = (i); \ |
---|
| 188 | + s64 __ia64_asr_i = (i); \ |
---|
190 | 189 | __ia64_atomic_const(i) \ |
---|
191 | 190 | ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \ |
---|
192 | 191 | : ia64_atomic64_fetch_sub(__ia64_asr_i, v); \ |
---|