1 #ifndef _ALPHA_ATOMIC_H
2 #define _ALPHA_ATOMIC_H
4 #include <linux/types.h>
5 #include <asm/barrier.h>
6 #include <asm/cmpxchg.h>
9 * Atomic operations that C can't guarantee us. Useful for
10 * resource counting etc...
12 * But use these as seldom as possible since they are much slower
13 * than regular operations.
17 #define ATOMIC_INIT(i) { (i) }
18 #define ATOMIC64_INIT(i) { (i) }
20 #define atomic_read(v) READ_ONCE((v)->counter)
21 #define atomic64_read(v) READ_ONCE((v)->counter)
23 #define atomic_set(v,i) WRITE_ONCE((v)->counter, (i))
24 #define atomic64_set(v,i) WRITE_ONCE((v)->counter, (i))
27 * To get proper branch prediction for the main line, we must branch
28 * forward to code at the end of this object's .text section, then
29 * branch back to restart the operation.
32 #define ATOMIC_OP(op, asm_op) \
33 static __inline__ void atomic_##op(int i, atomic_t * v) \
36 __asm__ __volatile__( \
38 " " #asm_op " %0,%2,%0\n" \
44 :"=&r" (temp), "=m" (v->counter) \
45 :"Ir" (i), "m" (v->counter)); \
48 #define ATOMIC_OP_RETURN(op, asm_op) \
49 static inline int atomic_##op##_return(int i, atomic_t *v) \
53 __asm__ __volatile__( \
55 " " #asm_op " %0,%3,%2\n" \
56 " " #asm_op " %0,%3,%0\n" \
62 :"=&r" (temp), "=m" (v->counter), "=&r" (result) \
63 :"Ir" (i), "m" (v->counter) : "memory"); \
68 #define ATOMIC64_OP(op, asm_op) \
69 static __inline__ void atomic64_##op(long i, atomic64_t * v) \
72 __asm__ __volatile__( \
74 " " #asm_op " %0,%2,%0\n" \
80 :"=&r" (temp), "=m" (v->counter) \
81 :"Ir" (i), "m" (v->counter)); \
84 #define ATOMIC64_OP_RETURN(op, asm_op) \
85 static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
89 __asm__ __volatile__( \
91 " " #asm_op " %0,%3,%2\n" \
92 " " #asm_op " %0,%3,%0\n" \
98 :"=&r" (temp), "=m" (v->counter), "=&r" (result) \
99 :"Ir" (i), "m" (v->counter) : "memory"); \
104 #define ATOMIC_OPS(op) \
105 ATOMIC_OP(op, op##l) \
106 ATOMIC_OP_RETURN(op, op##l) \
107 ATOMIC64_OP(op, op##q) \
108 ATOMIC64_OP_RETURN(op, op##q)
113 #define atomic_andnot atomic_andnot
114 #define atomic64_andnot atomic64_andnot
117 ATOMIC_OP(andnot, bic)
120 ATOMIC64_OP(and, and)
121 ATOMIC64_OP(andnot, bic)
123 ATOMIC64_OP(xor, xor)
126 #undef ATOMIC64_OP_RETURN
128 #undef ATOMIC_OP_RETURN
131 #define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
132 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
134 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
135 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
138 * __atomic_add_unless - add unless the number is a given value
139 * @v: pointer of type atomic_t
140 * @a: the amount to add to v...
141 * @u: ...unless v is equal to u.
143 * Atomically adds @a to @v, so long as it was not @u.
144 * Returns the old value of @v.
146 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
150 __asm__ __volatile__(
151 "1: ldl_l %[old],%[mem]\n"
152 " cmpeq %[old],%[u],%[c]\n"
153 " addl %[old],%[a],%[new]\n"
155 " stl_c %[new],%[mem]\n"
161 : [old] "=&r"(old), [new] "=&r"(new), [c] "=&r"(c)
162 : [mem] "m"(*v), [a] "rI"(a), [u] "rI"((long)u)
170 * atomic64_add_unless - add unless the number is a given value
171 * @v: pointer of type atomic64_t
172 * @a: the amount to add to v...
173 * @u: ...unless v is equal to u.
175 * Atomically adds @a to @v, so long as it was not @u.
176 * Returns true iff @v was not @u.
178 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
182 __asm__ __volatile__(
183 "1: ldq_l %[tmp],%[mem]\n"
184 " cmpeq %[tmp],%[u],%[c]\n"
185 " addq %[tmp],%[a],%[tmp]\n"
187 " stq_c %[tmp],%[mem]\n"
193 : [tmp] "=&r"(tmp), [c] "=&r"(c)
194 : [mem] "m"(*v), [a] "rI"(a), [u] "rI"(u)
201 * atomic64_dec_if_positive - decrement by 1 if old value positive
202 * @v: pointer of type atomic_t
204 * The function returns the old value of *v minus 1, even if
205 * the atomic variable, v, was not decremented.
207 static inline long atomic64_dec_if_positive(atomic64_t *v)
211 __asm__ __volatile__(
212 "1: ldq_l %[old],%[mem]\n"
213 " subq %[old],1,%[tmp]\n"
215 " stq_c %[tmp],%[mem]\n"
221 : [old] "=&r"(old), [tmp] "=&r"(tmp)
228 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
230 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
231 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
233 #define atomic_dec_return(v) atomic_sub_return(1,(v))
234 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
236 #define atomic_inc_return(v) atomic_add_return(1,(v))
237 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
239 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
240 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
242 #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
243 #define atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0)
245 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
246 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
248 #define atomic_inc(v) atomic_add(1,(v))
249 #define atomic64_inc(v) atomic64_add(1,(v))
251 #define atomic_dec(v) atomic_sub(1,(v))
252 #define atomic64_dec(v) atomic64_sub(1,(v))
254 #endif /* _ALPHA_ATOMIC_H */