1 #ifndef _ASM_IA64_ATOMIC_H
2 #define _ASM_IA64_ATOMIC_H
5 * Atomic operations that C can't guarantee us. Useful for
6 * resource counting etc..
8 * NOTE: don't mess with the types below! The "unsigned long" and
9 * "int" types were carefully placed so as to ensure proper operation
12 * Copyright (C) 1998, 1999, 2002-2003 Hewlett-Packard Co
13 * David Mosberger-Tang <davidm@hpl.hp.com>
15 #include <linux/types.h>
17 #include <asm/intrinsics.h>
18 #include <asm/barrier.h>
21 #define ATOMIC_INIT(i) { (i) }
22 #define ATOMIC64_INIT(i) { (i) }
24 #define atomic_read(v) READ_ONCE((v)->counter)
25 #define atomic64_read(v) READ_ONCE((v)->counter)
27 #define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
28 #define atomic64_set(v,i) WRITE_ONCE(((v)->counter), (i))
30 #define ATOMIC_OP(op, c_op) \
31 static __inline__ int \
32 ia64_atomic_##op (int i, atomic_t *v) \
35 CMPXCHG_BUGCHECK_DECL \
38 CMPXCHG_BUGCHECK(v); \
39 old = atomic_read(v); \
41 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
45 #define ATOMIC_FETCH_OP(op, c_op) \
46 static __inline__ int \
47 ia64_atomic_fetch_##op (int i, atomic_t *v) \
50 CMPXCHG_BUGCHECK_DECL \
53 CMPXCHG_BUGCHECK(v); \
54 old = atomic_read(v); \
56 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
60 #define ATOMIC_OPS(op, c_op) \
62 ATOMIC_FETCH_OP(op, c_op)
67 #define atomic_add_return(i,v) \
69 int __ia64_aar_i = (i); \
70 (__builtin_constant_p(i) \
71 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
72 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
73 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
74 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
75 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
76 : ia64_atomic_add(__ia64_aar_i, v); \
79 #define atomic_sub_return(i,v) \
81 int __ia64_asr_i = (i); \
82 (__builtin_constant_p(i) \
83 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
84 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
85 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
86 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
87 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
88 : ia64_atomic_sub(__ia64_asr_i, v); \
91 #define atomic_fetch_add(i,v) \
93 int __ia64_aar_i = (i); \
94 (__builtin_constant_p(i) \
95 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
96 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
97 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
98 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
99 ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \
100 : ia64_atomic_fetch_add(__ia64_aar_i, v); \
103 #define atomic_fetch_sub(i,v) \
105 int __ia64_asr_i = (i); \
106 (__builtin_constant_p(i) \
107 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
108 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
109 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
110 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
111 ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \
112 : ia64_atomic_fetch_sub(__ia64_asr_i, v); \
115 ATOMIC_FETCH_OP(and, &)
116 ATOMIC_FETCH_OP(or, |)
117 ATOMIC_FETCH_OP(xor, ^)
119 #define atomic_and(i,v) (void)ia64_atomic_fetch_and(i,v)
120 #define atomic_or(i,v) (void)ia64_atomic_fetch_or(i,v)
121 #define atomic_xor(i,v) (void)ia64_atomic_fetch_xor(i,v)
123 #define atomic_fetch_and(i,v) ia64_atomic_fetch_and(i,v)
124 #define atomic_fetch_or(i,v) ia64_atomic_fetch_or(i,v)
125 #define atomic_fetch_xor(i,v) ia64_atomic_fetch_xor(i,v)
128 #undef ATOMIC_FETCH_OP
131 #define ATOMIC64_OP(op, c_op) \
132 static __inline__ long \
133 ia64_atomic64_##op (__s64 i, atomic64_t *v) \
136 CMPXCHG_BUGCHECK_DECL \
139 CMPXCHG_BUGCHECK(v); \
140 old = atomic64_read(v); \
142 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
146 #define ATOMIC64_FETCH_OP(op, c_op) \
147 static __inline__ long \
148 ia64_atomic64_fetch_##op (__s64 i, atomic64_t *v) \
151 CMPXCHG_BUGCHECK_DECL \
154 CMPXCHG_BUGCHECK(v); \
155 old = atomic64_read(v); \
157 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
161 #define ATOMIC64_OPS(op, c_op) \
162 ATOMIC64_OP(op, c_op) \
163 ATOMIC64_FETCH_OP(op, c_op)
168 #define atomic64_add_return(i,v) \
170 long __ia64_aar_i = (i); \
171 (__builtin_constant_p(i) \
172 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
173 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
174 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
175 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
176 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
177 : ia64_atomic64_add(__ia64_aar_i, v); \
180 #define atomic64_sub_return(i,v) \
182 long __ia64_asr_i = (i); \
183 (__builtin_constant_p(i) \
184 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
185 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
186 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
187 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
188 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
189 : ia64_atomic64_sub(__ia64_asr_i, v); \
192 #define atomic64_fetch_add(i,v) \
194 long __ia64_aar_i = (i); \
195 (__builtin_constant_p(i) \
196 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
197 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
198 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
199 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
200 ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \
201 : ia64_atomic64_fetch_add(__ia64_aar_i, v); \
204 #define atomic64_fetch_sub(i,v) \
206 long __ia64_asr_i = (i); \
207 (__builtin_constant_p(i) \
208 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
209 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
210 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
211 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
212 ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \
213 : ia64_atomic64_fetch_sub(__ia64_asr_i, v); \
216 ATOMIC64_FETCH_OP(and, &)
217 ATOMIC64_FETCH_OP(or, |)
218 ATOMIC64_FETCH_OP(xor, ^)
220 #define atomic64_and(i,v) (void)ia64_atomic64_fetch_and(i,v)
221 #define atomic64_or(i,v) (void)ia64_atomic64_fetch_or(i,v)
222 #define atomic64_xor(i,v) (void)ia64_atomic64_fetch_xor(i,v)
224 #define atomic64_fetch_and(i,v) ia64_atomic64_fetch_and(i,v)
225 #define atomic64_fetch_or(i,v) ia64_atomic64_fetch_or(i,v)
226 #define atomic64_fetch_xor(i,v) ia64_atomic64_fetch_xor(i,v)
229 #undef ATOMIC64_FETCH_OP
232 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
233 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
235 #define atomic64_cmpxchg(v, old, new) \
236 (cmpxchg(&((v)->counter), old, new))
237 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
239 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
244 if (unlikely(c == (u)))
246 old = atomic_cmpxchg((v), c, c + (a));
247 if (likely(old == c))
255 static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u)
258 c = atomic64_read(v);
260 if (unlikely(c == (u)))
262 old = atomic64_cmpxchg((v), c, c + (a));
263 if (likely(old == c))
270 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
272 static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
275 c = atomic64_read(v);
278 if (unlikely(dec < 0))
280 old = atomic64_cmpxchg((v), c, dec);
281 if (likely(old == c))
289 * Atomically add I to V and return TRUE if the resulting value is
292 static __inline__ int
293 atomic_add_negative (int i, atomic_t *v)
295 return atomic_add_return(i, v) < 0;
298 static __inline__ long
299 atomic64_add_negative (__s64 i, atomic64_t *v)
301 return atomic64_add_return(i, v) < 0;
304 #define atomic_dec_return(v) atomic_sub_return(1, (v))
305 #define atomic_inc_return(v) atomic_add_return(1, (v))
306 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
307 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
309 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
310 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
311 #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
312 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
313 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
314 #define atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0)
316 #define atomic_add(i,v) (void)atomic_add_return((i), (v))
317 #define atomic_sub(i,v) (void)atomic_sub_return((i), (v))
318 #define atomic_inc(v) atomic_add(1, (v))
319 #define atomic_dec(v) atomic_sub(1, (v))
321 #define atomic64_add(i,v) (void)atomic64_add_return((i), (v))
322 #define atomic64_sub(i,v) (void)atomic64_sub_return((i), (v))
323 #define atomic64_inc(v) atomic64_add(1, (v))
324 #define atomic64_dec(v) atomic64_sub(1, (v))
326 #endif /* _ASM_IA64_ATOMIC_H */