2 * Copyright IBM Corp. 1999, 2009
3 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
5 * Arnd Bergmann <arndb@de.ibm.com>,
7 * Atomic operations that C can't guarantee us.
8 * Useful for resource counting etc.
9 * s390 uses 'Compare And Swap' for atomicity in SMP environment.
13 #ifndef __ARCH_S390_ATOMIC__
14 #define __ARCH_S390_ATOMIC__
16 #include <linux/compiler.h>
17 #include <linux/types.h>
18 #include <asm/barrier.h>
19 #include <asm/cmpxchg.h>
21 #define ATOMIC_INIT(i) { (i) }
23 #define __ATOMIC_NO_BARRIER "\n"
25 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
27 #define __ATOMIC_OR "lao"
28 #define __ATOMIC_AND "lan"
29 #define __ATOMIC_ADD "laa"
30 #define __ATOMIC_XOR "lax"
31 #define __ATOMIC_BARRIER "bcr 14,0\n"
33 #define __ATOMIC_LOOP(ptr, op_val, op_string, __barrier) \
37 typecheck(atomic_t *, ptr); \
39 op_string " %0,%2,%1\n" \
41 : "=d" (old_val), "+Q" ((ptr)->counter) \
47 #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
49 #define __ATOMIC_OR "or"
50 #define __ATOMIC_AND "nr"
51 #define __ATOMIC_ADD "ar"
52 #define __ATOMIC_XOR "xr"
53 #define __ATOMIC_BARRIER "\n"
55 #define __ATOMIC_LOOP(ptr, op_val, op_string, __barrier) \
57 int old_val, new_val; \
59 typecheck(atomic_t *, ptr); \
63 op_string " %1,%3\n" \
66 : "=&d" (old_val), "=&d" (new_val), "+Q" ((ptr)->counter)\
72 #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
74 static inline int atomic_read(const atomic_t *v)
80 : "=d" (c) : "Q" (v->counter));
84 static inline void atomic_set(atomic_t *v, int i)
88 : "=Q" (v->counter) : "d" (i));
91 static inline int atomic_add_return(int i, atomic_t *v)
93 return __ATOMIC_LOOP(v, i, __ATOMIC_ADD, __ATOMIC_BARRIER) + i;
96 static inline int atomic_fetch_add(int i, atomic_t *v)
98 return __ATOMIC_LOOP(v, i, __ATOMIC_ADD, __ATOMIC_BARRIER);
101 static inline void atomic_add(int i, atomic_t *v)
103 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
104 if (__builtin_constant_p(i) && (i > -129) && (i < 128)) {
113 __ATOMIC_LOOP(v, i, __ATOMIC_ADD, __ATOMIC_NO_BARRIER);
116 #define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0)
117 #define atomic_inc(_v) atomic_add(1, _v)
118 #define atomic_inc_return(_v) atomic_add_return(1, _v)
119 #define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0)
120 #define atomic_sub(_i, _v) atomic_add(-(int)(_i), _v)
121 #define atomic_sub_return(_i, _v) atomic_add_return(-(int)(_i), _v)
122 #define atomic_fetch_sub(_i, _v) atomic_fetch_add(-(int)(_i), _v)
123 #define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0)
124 #define atomic_dec(_v) atomic_sub(1, _v)
125 #define atomic_dec_return(_v) atomic_sub_return(1, _v)
126 #define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0)
128 #define ATOMIC_OPS(op, OP) \
129 static inline void atomic_##op(int i, atomic_t *v) \
131 __ATOMIC_LOOP(v, i, __ATOMIC_##OP, __ATOMIC_NO_BARRIER); \
133 static inline int atomic_fetch_##op(int i, atomic_t *v) \
135 return __ATOMIC_LOOP(v, i, __ATOMIC_##OP, __ATOMIC_BARRIER); \
144 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
146 static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
150 : "+d" (old), "+Q" (v->counter)
156 static inline int __atomic_add_unless(atomic_t *v, int a, int u)
161 if (unlikely(c == u))
163 old = atomic_cmpxchg(v, c, c + a);
164 if (likely(old == c))
174 #define ATOMIC64_INIT(i) { (i) }
176 #define __ATOMIC64_NO_BARRIER "\n"
178 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
180 #define __ATOMIC64_OR "laog"
181 #define __ATOMIC64_AND "lang"
182 #define __ATOMIC64_ADD "laag"
183 #define __ATOMIC64_XOR "laxg"
184 #define __ATOMIC64_BARRIER "bcr 14,0\n"
186 #define __ATOMIC64_LOOP(ptr, op_val, op_string, __barrier) \
190 typecheck(atomic64_t *, ptr); \
192 op_string " %0,%2,%1\n" \
194 : "=d" (old_val), "+Q" ((ptr)->counter) \
200 #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
202 #define __ATOMIC64_OR "ogr"
203 #define __ATOMIC64_AND "ngr"
204 #define __ATOMIC64_ADD "agr"
205 #define __ATOMIC64_XOR "xgr"
206 #define __ATOMIC64_BARRIER "\n"
208 #define __ATOMIC64_LOOP(ptr, op_val, op_string, __barrier) \
210 long long old_val, new_val; \
212 typecheck(atomic64_t *, ptr); \
216 op_string " %1,%3\n" \
219 : "=&d" (old_val), "=&d" (new_val), "+Q" ((ptr)->counter)\
225 #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
227 static inline long long atomic64_read(const atomic64_t *v)
233 : "=d" (c) : "Q" (v->counter));
237 static inline void atomic64_set(atomic64_t *v, long long i)
241 : "=Q" (v->counter) : "d" (i));
244 static inline long long atomic64_add_return(long long i, atomic64_t *v)
246 return __ATOMIC64_LOOP(v, i, __ATOMIC64_ADD, __ATOMIC64_BARRIER) + i;
249 static inline long long atomic64_fetch_add(long long i, atomic64_t *v)
251 return __ATOMIC64_LOOP(v, i, __ATOMIC64_ADD, __ATOMIC64_BARRIER);
254 static inline void atomic64_add(long long i, atomic64_t *v)
256 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
257 if (__builtin_constant_p(i) && (i > -129) && (i < 128)) {
266 __ATOMIC64_LOOP(v, i, __ATOMIC64_ADD, __ATOMIC64_NO_BARRIER);
269 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
271 static inline long long atomic64_cmpxchg(atomic64_t *v,
272 long long old, long long new)
276 : "+d" (old), "+Q" (v->counter)
282 #define ATOMIC64_OPS(op, OP) \
283 static inline void atomic64_##op(long i, atomic64_t *v) \
285 __ATOMIC64_LOOP(v, i, __ATOMIC64_##OP, __ATOMIC64_NO_BARRIER); \
287 static inline long atomic64_fetch_##op(long i, atomic64_t *v) \
289 return __ATOMIC64_LOOP(v, i, __ATOMIC64_##OP, __ATOMIC64_BARRIER); \
292 ATOMIC64_OPS(and, AND)
294 ATOMIC64_OPS(xor, XOR)
297 #undef __ATOMIC64_LOOP
299 static inline int atomic64_add_unless(atomic64_t *v, long long i, long long u)
303 c = atomic64_read(v);
305 if (unlikely(c == u))
307 old = atomic64_cmpxchg(v, c, c + i);
308 if (likely(old == c))
315 static inline long long atomic64_dec_if_positive(atomic64_t *v)
317 long long c, old, dec;
319 c = atomic64_read(v);
322 if (unlikely(dec < 0))
324 old = atomic64_cmpxchg((v), c, dec);
325 if (likely(old == c))
332 #define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0)
333 #define atomic64_inc(_v) atomic64_add(1, _v)
334 #define atomic64_inc_return(_v) atomic64_add_return(1, _v)
335 #define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0)
336 #define atomic64_sub_return(_i, _v) atomic64_add_return(-(long long)(_i), _v)
337 #define atomic64_fetch_sub(_i, _v) atomic64_fetch_add(-(long long)(_i), _v)
338 #define atomic64_sub(_i, _v) atomic64_add(-(long long)(_i), _v)
339 #define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0)
340 #define atomic64_dec(_v) atomic64_sub(1, _v)
341 #define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
342 #define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
343 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
345 #endif /* __ARCH_S390_ATOMIC__ */