2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/compiler.h>
21 #include <asm/cpu-features.h>
22 #include <asm/cmpxchg.h>
26 * Using a branch-likely instruction to check the result of an sc instruction
27 * works around a bug present in R10000 CPUs prior to revision 3.0 that could
28 * cause ll-sc sequences to execute non-atomically.
31 # define __scbeqz "beqzl"
33 # define __scbeqz "beqz"
36 #define ATOMIC_INIT(i) { (i) }
39 * atomic_read - read atomic variable
40 * @v: pointer of type atomic_t
42 * Atomically reads the value of @v.
44 #define atomic_read(v) READ_ONCE((v)->counter)
47 * atomic_set - set atomic variable
48 * @v: pointer of type atomic_t
51 * Atomically sets the value of @v to @i.
53 #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i))
55 #define ATOMIC_OP(op, c_op, asm_op) \
56 static __inline__ void atomic_##op(int i, atomic_t * v) \
58 if (kernel_uses_llsc) { \
62 __asm__ __volatile__( \
64 " .set "MIPS_ISA_LEVEL" \n" \
65 "1: ll %0, %1 # atomic_" #op " \n" \
66 " " #asm_op " %0, %2 \n" \
68 "\t" __scbeqz " %0, 1b \n" \
70 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
71 : "Ir" (i) : __LLSC_CLOBBER); \
73 unsigned long flags; \
75 raw_local_irq_save(flags); \
77 raw_local_irq_restore(flags); \
81 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
82 static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \
86 if (kernel_uses_llsc) { \
90 __asm__ __volatile__( \
92 " .set "MIPS_ISA_LEVEL" \n" \
93 "1: ll %1, %2 # atomic_" #op "_return \n" \
94 " " #asm_op " %0, %1, %3 \n" \
96 "\t" __scbeqz " %0, 1b \n" \
97 " " #asm_op " %0, %1, %3 \n" \
99 : "=&r" (result), "=&r" (temp), \
100 "+" GCC_OFF_SMALL_ASM() (v->counter) \
101 : "Ir" (i) : __LLSC_CLOBBER); \
103 unsigned long flags; \
105 raw_local_irq_save(flags); \
106 result = v->counter; \
108 v->counter = result; \
109 raw_local_irq_restore(flags); \
115 #define ATOMIC_FETCH_OP(op, c_op, asm_op) \
116 static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \
120 if (kernel_uses_llsc) { \
123 loongson_llsc_mb(); \
124 __asm__ __volatile__( \
126 " .set "MIPS_ISA_LEVEL" \n" \
127 "1: ll %1, %2 # atomic_fetch_" #op " \n" \
128 " " #asm_op " %0, %1, %3 \n" \
130 "\t" __scbeqz " %0, 1b \n" \
133 : "=&r" (result), "=&r" (temp), \
134 "+" GCC_OFF_SMALL_ASM() (v->counter) \
135 : "Ir" (i) : __LLSC_CLOBBER); \
137 unsigned long flags; \
139 raw_local_irq_save(flags); \
140 result = v->counter; \
142 raw_local_irq_restore(flags); \
148 #define ATOMIC_OPS(op, c_op, asm_op) \
149 ATOMIC_OP(op, c_op, asm_op) \
150 ATOMIC_OP_RETURN(op, c_op, asm_op) \
151 ATOMIC_FETCH_OP(op, c_op, asm_op)
153 ATOMIC_OPS(add, +=, addu)
154 ATOMIC_OPS(sub, -=, subu)
156 #define atomic_add_return_relaxed atomic_add_return_relaxed
157 #define atomic_sub_return_relaxed atomic_sub_return_relaxed
158 #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
159 #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
162 #define ATOMIC_OPS(op, c_op, asm_op) \
163 ATOMIC_OP(op, c_op, asm_op) \
164 ATOMIC_FETCH_OP(op, c_op, asm_op)
166 ATOMIC_OPS(and, &=, and)
167 ATOMIC_OPS(or, |=, or)
168 ATOMIC_OPS(xor, ^=, xor)
170 #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
171 #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
172 #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
175 #undef ATOMIC_FETCH_OP
176 #undef ATOMIC_OP_RETURN
180 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
181 * @i: integer value to subtract
182 * @v: pointer of type atomic_t
184 * Atomically test @v and subtract @i if @v is greater or equal than @i.
185 * The function returns the old value of @v minus @i.
187 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
191 smp_mb__before_llsc();
193 if (kernel_uses_llsc) {
197 __asm__ __volatile__(
199 " .set "MIPS_ISA_LEVEL" \n"
200 "1: ll %1, %2 # atomic_sub_if_positive\n"
202 " subu %0, %1, %3 \n"
206 " .set "MIPS_ISA_LEVEL" \n"
208 "\t" __scbeqz " %1, 1b \n"
211 : "=&r" (result), "=&r" (temp),
212 "+" GCC_OFF_SMALL_ASM() (v->counter)
213 : "Ir" (i) : __LLSC_CLOBBER);
217 raw_local_irq_save(flags);
222 raw_local_irq_restore(flags);
230 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
231 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
234 * atomic_dec_if_positive - decrement by 1 if old value positive
235 * @v: pointer of type atomic_t
237 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
241 #define ATOMIC64_INIT(i) { (i) }
244 * atomic64_read - read atomic variable
245 * @v: pointer of type atomic64_t
248 #define atomic64_read(v) READ_ONCE((v)->counter)
251 * atomic64_set - set atomic variable
252 * @v: pointer of type atomic64_t
255 #define atomic64_set(v, i) WRITE_ONCE((v)->counter, (i))
257 #define ATOMIC64_OP(op, c_op, asm_op) \
258 static __inline__ void atomic64_##op(s64 i, atomic64_t * v) \
260 if (kernel_uses_llsc) { \
263 loongson_llsc_mb(); \
264 __asm__ __volatile__( \
266 " .set "MIPS_ISA_LEVEL" \n" \
267 "1: lld %0, %1 # atomic64_" #op " \n" \
268 " " #asm_op " %0, %2 \n" \
270 "\t" __scbeqz " %0, 1b \n" \
272 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
273 : "Ir" (i) : __LLSC_CLOBBER); \
275 unsigned long flags; \
277 raw_local_irq_save(flags); \
279 raw_local_irq_restore(flags); \
283 #define ATOMIC64_OP_RETURN(op, c_op, asm_op) \
284 static __inline__ s64 atomic64_##op##_return_relaxed(s64 i, atomic64_t * v) \
288 if (kernel_uses_llsc) { \
291 loongson_llsc_mb(); \
292 __asm__ __volatile__( \
294 " .set "MIPS_ISA_LEVEL" \n" \
295 "1: lld %1, %2 # atomic64_" #op "_return\n" \
296 " " #asm_op " %0, %1, %3 \n" \
298 "\t" __scbeqz " %0, 1b \n" \
299 " " #asm_op " %0, %1, %3 \n" \
301 : "=&r" (result), "=&r" (temp), \
302 "+" GCC_OFF_SMALL_ASM() (v->counter) \
303 : "Ir" (i) : __LLSC_CLOBBER); \
305 unsigned long flags; \
307 raw_local_irq_save(flags); \
308 result = v->counter; \
310 v->counter = result; \
311 raw_local_irq_restore(flags); \
317 #define ATOMIC64_FETCH_OP(op, c_op, asm_op) \
318 static __inline__ s64 atomic64_fetch_##op##_relaxed(s64 i, atomic64_t * v) \
322 if (kernel_uses_llsc) { \
325 loongson_llsc_mb(); \
326 __asm__ __volatile__( \
328 " .set "MIPS_ISA_LEVEL" \n" \
329 "1: lld %1, %2 # atomic64_fetch_" #op "\n" \
330 " " #asm_op " %0, %1, %3 \n" \
332 "\t" __scbeqz " %0, 1b \n" \
335 : "=&r" (result), "=&r" (temp), \
336 "+" GCC_OFF_SMALL_ASM() (v->counter) \
337 : "Ir" (i) : __LLSC_CLOBBER); \
339 unsigned long flags; \
341 raw_local_irq_save(flags); \
342 result = v->counter; \
344 raw_local_irq_restore(flags); \
350 #define ATOMIC64_OPS(op, c_op, asm_op) \
351 ATOMIC64_OP(op, c_op, asm_op) \
352 ATOMIC64_OP_RETURN(op, c_op, asm_op) \
353 ATOMIC64_FETCH_OP(op, c_op, asm_op)
355 ATOMIC64_OPS(add, +=, daddu)
356 ATOMIC64_OPS(sub, -=, dsubu)
358 #define atomic64_add_return_relaxed atomic64_add_return_relaxed
359 #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
360 #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
361 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
364 #define ATOMIC64_OPS(op, c_op, asm_op) \
365 ATOMIC64_OP(op, c_op, asm_op) \
366 ATOMIC64_FETCH_OP(op, c_op, asm_op)
368 ATOMIC64_OPS(and, &=, and)
369 ATOMIC64_OPS(or, |=, or)
370 ATOMIC64_OPS(xor, ^=, xor)
372 #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
373 #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
374 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
377 #undef ATOMIC64_FETCH_OP
378 #undef ATOMIC64_OP_RETURN
382 * atomic64_sub_if_positive - conditionally subtract integer from atomic
384 * @i: integer value to subtract
385 * @v: pointer of type atomic64_t
387 * Atomically test @v and subtract @i if @v is greater or equal than @i.
388 * The function returns the old value of @v minus @i.
390 static __inline__ s64 atomic64_sub_if_positive(s64 i, atomic64_t * v)
394 smp_mb__before_llsc();
396 if (kernel_uses_llsc) {
399 __asm__ __volatile__(
401 " .set "MIPS_ISA_LEVEL" \n"
402 "1: lld %1, %2 # atomic64_sub_if_positive\n"
403 " dsubu %0, %1, %3 \n"
407 "\t" __scbeqz " %1, 1b \n"
410 : "=&r" (result), "=&r" (temp),
411 "+" GCC_OFF_SMALL_ASM() (v->counter)
416 raw_local_irq_save(flags);
421 raw_local_irq_restore(flags);
429 #define atomic64_cmpxchg(v, o, n) \
430 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
431 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
434 * atomic64_dec_if_positive - decrement by 1 if old value positive
435 * @v: pointer of type atomic64_t
437 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
439 #endif /* CONFIG_64BIT */
441 #endif /* _ASM_ATOMIC_H */