1 #ifndef _ASM_POWERPC_ATOMIC_H_
2 #define _ASM_POWERPC_ATOMIC_H_
5 * PowerPC atomic operations
9 #include <linux/types.h>
10 #include <asm/cmpxchg.h>
11 #include <asm/barrier.h>
13 #define ATOMIC_INIT(i) { (i) }
15 static __inline__ int atomic_read(const atomic_t *v)
19 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
24 static __inline__ void atomic_set(atomic_t *v, int i)
26 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
29 #define ATOMIC_OP(op, asm_op) \
30 static __inline__ void atomic_##op(int a, atomic_t *v) \
34 __asm__ __volatile__( \
35 "1: lwarx %0,0,%3 # atomic_" #op "\n" \
36 #asm_op " %0,%2,%0\n" \
38 " stwcx. %0,0,%3 \n" \
40 : "=&r" (t), "+m" (v->counter) \
41 : "r" (a), "r" (&v->counter) \
45 #define ATOMIC_OP_RETURN(op, asm_op) \
46 static __inline__ int atomic_##op##_return(int a, atomic_t *v) \
50 __asm__ __volatile__( \
51 PPC_ATOMIC_ENTRY_BARRIER \
52 "1: lwarx %0,0,%2 # atomic_" #op "_return\n" \
53 #asm_op " %0,%1,%0\n" \
55 " stwcx. %0,0,%2 \n" \
57 PPC_ATOMIC_EXIT_BARRIER \
59 : "r" (a), "r" (&v->counter) \
65 #define ATOMIC_OPS(op, asm_op) ATOMIC_OP(op, asm_op) ATOMIC_OP_RETURN(op, asm_op)
75 #undef ATOMIC_OP_RETURN
78 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
80 static __inline__ void atomic_inc(atomic_t *v)
85 "1: lwarx %0,0,%2 # atomic_inc\n\
90 : "=&r" (t), "+m" (v->counter)
95 static __inline__ int atomic_inc_return(atomic_t *v)
100 PPC_ATOMIC_ENTRY_BARRIER
101 "1: lwarx %0,0,%1 # atomic_inc_return\n\
106 PPC_ATOMIC_EXIT_BARRIER
109 : "cc", "xer", "memory");
115 * atomic_inc_and_test - increment and test
116 * @v: pointer of type atomic_t
118 * Atomically increments @v by 1
119 * and returns true if the result is zero, or false for all
122 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
124 static __inline__ void atomic_dec(atomic_t *v)
128 __asm__ __volatile__(
129 "1: lwarx %0,0,%2 # atomic_dec\n\
134 : "=&r" (t), "+m" (v->counter)
139 static __inline__ int atomic_dec_return(atomic_t *v)
143 __asm__ __volatile__(
144 PPC_ATOMIC_ENTRY_BARRIER
145 "1: lwarx %0,0,%1 # atomic_dec_return\n\
150 PPC_ATOMIC_EXIT_BARRIER
153 : "cc", "xer", "memory");
158 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
159 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
162 * __atomic_add_unless - add unless the number is a given value
163 * @v: pointer of type atomic_t
164 * @a: the amount to add to v...
165 * @u: ...unless v is equal to u.
167 * Atomically adds @a to @v, so long as it was not @u.
168 * Returns the old value of @v.
170 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
174 __asm__ __volatile__ (
175 PPC_ATOMIC_ENTRY_BARRIER
176 "1: lwarx %0,0,%1 # __atomic_add_unless\n\
183 PPC_ATOMIC_EXIT_BARRIER
187 : "r" (&v->counter), "r" (a), "r" (u)
194 * atomic_inc_not_zero - increment unless the number is zero
195 * @v: pointer of type atomic_t
197 * Atomically increments @v by 1, so long as @v is non-zero.
198 * Returns non-zero if @v was non-zero, and zero otherwise.
200 static __inline__ int atomic_inc_not_zero(atomic_t *v)
204 __asm__ __volatile__ (
205 PPC_ATOMIC_ENTRY_BARRIER
206 "1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
213 PPC_ATOMIC_EXIT_BARRIER
216 : "=&r" (t1), "=&r" (t2)
218 : "cc", "xer", "memory");
222 #define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
224 #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
225 #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
228 * Atomically test *v and decrement if it is greater than 0.
229 * The function returns the old value of *v minus 1, even if
230 * the atomic variable, v, was not decremented.
232 static __inline__ int atomic_dec_if_positive(atomic_t *v)
236 __asm__ __volatile__(
237 PPC_ATOMIC_ENTRY_BARRIER
238 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
245 PPC_ATOMIC_EXIT_BARRIER
253 #define atomic_dec_if_positive atomic_dec_if_positive
257 #define ATOMIC64_INIT(i) { (i) }
259 static __inline__ long atomic64_read(const atomic64_t *v)
263 __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
268 static __inline__ void atomic64_set(atomic64_t *v, long i)
270 __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
273 #define ATOMIC64_OP(op, asm_op) \
274 static __inline__ void atomic64_##op(long a, atomic64_t *v) \
278 __asm__ __volatile__( \
279 "1: ldarx %0,0,%3 # atomic64_" #op "\n" \
280 #asm_op " %0,%2,%0\n" \
281 " stdcx. %0,0,%3 \n" \
283 : "=&r" (t), "+m" (v->counter) \
284 : "r" (a), "r" (&v->counter) \
288 #define ATOMIC64_OP_RETURN(op, asm_op) \
289 static __inline__ long atomic64_##op##_return(long a, atomic64_t *v) \
293 __asm__ __volatile__( \
294 PPC_ATOMIC_ENTRY_BARRIER \
295 "1: ldarx %0,0,%2 # atomic64_" #op "_return\n" \
296 #asm_op " %0,%1,%0\n" \
297 " stdcx. %0,0,%2 \n" \
299 PPC_ATOMIC_EXIT_BARRIER \
301 : "r" (a), "r" (&v->counter) \
307 #define ATOMIC64_OPS(op, asm_op) ATOMIC64_OP(op, asm_op) ATOMIC64_OP_RETURN(op, asm_op)
309 ATOMIC64_OPS(add, add)
310 ATOMIC64_OPS(sub, subf)
311 ATOMIC64_OP(and, and)
313 ATOMIC64_OP(xor, xor)
316 #undef ATOMIC64_OP_RETURN
319 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
321 static __inline__ void atomic64_inc(atomic64_t *v)
325 __asm__ __volatile__(
326 "1: ldarx %0,0,%2 # atomic64_inc\n\
330 : "=&r" (t), "+m" (v->counter)
335 static __inline__ long atomic64_inc_return(atomic64_t *v)
339 __asm__ __volatile__(
340 PPC_ATOMIC_ENTRY_BARRIER
341 "1: ldarx %0,0,%1 # atomic64_inc_return\n\
345 PPC_ATOMIC_EXIT_BARRIER
348 : "cc", "xer", "memory");
354 * atomic64_inc_and_test - increment and test
355 * @v: pointer of type atomic64_t
357 * Atomically increments @v by 1
358 * and returns true if the result is zero, or false for all
361 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
363 static __inline__ void atomic64_dec(atomic64_t *v)
367 __asm__ __volatile__(
368 "1: ldarx %0,0,%2 # atomic64_dec\n\
372 : "=&r" (t), "+m" (v->counter)
377 static __inline__ long atomic64_dec_return(atomic64_t *v)
381 __asm__ __volatile__(
382 PPC_ATOMIC_ENTRY_BARRIER
383 "1: ldarx %0,0,%1 # atomic64_dec_return\n\
387 PPC_ATOMIC_EXIT_BARRIER
390 : "cc", "xer", "memory");
395 #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
396 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
399 * Atomically test *v and decrement if it is greater than 0.
400 * The function returns the old value of *v minus 1.
402 static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
406 __asm__ __volatile__(
407 PPC_ATOMIC_ENTRY_BARRIER
408 "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
413 PPC_ATOMIC_EXIT_BARRIER
417 : "cc", "xer", "memory");
422 #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
423 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
426 * atomic64_add_unless - add unless the number is a given value
427 * @v: pointer of type atomic64_t
428 * @a: the amount to add to v...
429 * @u: ...unless v is equal to u.
431 * Atomically adds @a to @v, so long as it was not @u.
432 * Returns the old value of @v.
434 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
438 __asm__ __volatile__ (
439 PPC_ATOMIC_ENTRY_BARRIER
440 "1: ldarx %0,0,%1 # __atomic_add_unless\n\
446 PPC_ATOMIC_EXIT_BARRIER
450 : "r" (&v->counter), "r" (a), "r" (u)
457 * atomic_inc64_not_zero - increment unless the number is zero
458 * @v: pointer of type atomic64_t
460 * Atomically increments @v by 1, so long as @v is non-zero.
461 * Returns non-zero if @v was non-zero, and zero otherwise.
463 static __inline__ int atomic64_inc_not_zero(atomic64_t *v)
467 __asm__ __volatile__ (
468 PPC_ATOMIC_ENTRY_BARRIER
469 "1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
475 PPC_ATOMIC_EXIT_BARRIER
478 : "=&r" (t1), "=&r" (t2)
480 : "cc", "xer", "memory");
485 #endif /* __powerpc64__ */
487 #endif /* __KERNEL__ */
488 #endif /* _ASM_POWERPC_ATOMIC_H_ */