1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_M32R_ATOMIC_H
3 #define _ASM_M32R_ATOMIC_H
6 * linux/include/asm-m32r/atomic.h
9 * Copyright (C) 2001, 2002 Hitoshi Yamamoto
10 * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
13 #include <linux/types.h>
14 #include <asm/assembler.h>
15 #include <asm/cmpxchg.h>
16 #include <asm/dcache_clear.h>
17 #include <asm/barrier.h>
20 * Atomic operations that C can't guarantee us. Useful for
21 * resource counting etc..
24 #define ATOMIC_INIT(i) { (i) }
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
30 * Atomically reads the value of @v.
32 #define atomic_read(v) READ_ONCE((v)->counter)
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
39 * Atomically sets the value of @v to @i.
41 #define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
43 #ifdef CONFIG_CHIP_M32700_TS1
44 #define __ATOMIC_CLOBBER , "r4"
46 #define __ATOMIC_CLOBBER
49 #define ATOMIC_OP(op) \
50 static __inline__ void atomic_##op(int i, atomic_t *v) \
52 unsigned long flags; \
55 local_irq_save(flags); \
56 __asm__ __volatile__ ( \
57 "# atomic_" #op " \n\t" \
58 DCACHE_CLEAR("%0", "r4", "%1") \
59 M32R_LOCK" %0, @%1; \n\t" \
61 M32R_UNLOCK" %0, @%1; \n\t" \
63 : "r" (&v->counter), "r" (i) \
67 local_irq_restore(flags); \
70 #define ATOMIC_OP_RETURN(op) \
71 static __inline__ int atomic_##op##_return(int i, atomic_t *v) \
73 unsigned long flags; \
76 local_irq_save(flags); \
77 __asm__ __volatile__ ( \
78 "# atomic_" #op "_return \n\t" \
79 DCACHE_CLEAR("%0", "r4", "%1") \
80 M32R_LOCK" %0, @%1; \n\t" \
82 M32R_UNLOCK" %0, @%1; \n\t" \
84 : "r" (&v->counter), "r" (i) \
88 local_irq_restore(flags); \
93 #define ATOMIC_FETCH_OP(op) \
94 static __inline__ int atomic_fetch_##op(int i, atomic_t *v) \
96 unsigned long flags; \
99 local_irq_save(flags); \
100 __asm__ __volatile__ ( \
101 "# atomic_fetch_" #op " \n\t" \
102 DCACHE_CLEAR("%0", "r4", "%2") \
103 M32R_LOCK" %1, @%2; \n\t" \
105 #op " %1, %3; \n\t" \
106 M32R_UNLOCK" %1, @%2; \n\t" \
107 : "=&r" (result), "=&r" (val) \
108 : "r" (&v->counter), "r" (i) \
112 local_irq_restore(flags); \
117 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
123 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
130 #undef ATOMIC_FETCH_OP
131 #undef ATOMIC_OP_RETURN
135 * atomic_sub_and_test - subtract value from variable and test result
136 * @i: integer value to subtract
137 * @v: pointer of type atomic_t
139 * Atomically subtracts @i from @v and returns
140 * true if the result is zero, or false for all
143 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
146 * atomic_inc_return - increment atomic variable and return it
147 * @v: pointer of type atomic_t
149 * Atomically increments @v by 1 and returns the result.
151 static __inline__ int atomic_inc_return(atomic_t *v)
156 local_irq_save(flags);
157 __asm__ __volatile__ (
158 "# atomic_inc_return \n\t"
159 DCACHE_CLEAR("%0", "r4", "%1")
160 M32R_LOCK" %0, @%1; \n\t"
162 M32R_UNLOCK" %0, @%1; \n\t"
168 local_irq_restore(flags);
174 * atomic_dec_return - decrement atomic variable and return it
175 * @v: pointer of type atomic_t
177 * Atomically decrements @v by 1 and returns the result.
179 static __inline__ int atomic_dec_return(atomic_t *v)
184 local_irq_save(flags);
185 __asm__ __volatile__ (
186 "# atomic_dec_return \n\t"
187 DCACHE_CLEAR("%0", "r4", "%1")
188 M32R_LOCK" %0, @%1; \n\t"
190 M32R_UNLOCK" %0, @%1; \n\t"
196 local_irq_restore(flags);
202 * atomic_inc - increment atomic variable
203 * @v: pointer of type atomic_t
205 * Atomically increments @v by 1.
207 #define atomic_inc(v) ((void)atomic_inc_return(v))
210 * atomic_dec - decrement atomic variable
211 * @v: pointer of type atomic_t
213 * Atomically decrements @v by 1.
215 #define atomic_dec(v) ((void)atomic_dec_return(v))
218 * atomic_inc_and_test - increment and test
219 * @v: pointer of type atomic_t
221 * Atomically increments @v by 1
222 * and returns true if the result is zero, or false for all
225 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
228 * atomic_dec_and_test - decrement and test
229 * @v: pointer of type atomic_t
231 * Atomically decrements @v by 1 and
232 * returns true if the result is 0, or false for all
235 #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
238 * atomic_add_negative - add and test if negative
239 * @v: pointer of type atomic_t
240 * @i: integer value to add
242 * Atomically adds @i to @v and returns true
243 * if the result is negative, or false when
244 * result is greater than or equal to zero.
246 #define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
248 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
249 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
252 * __atomic_add_unless - add unless the number is a given value
253 * @v: pointer of type atomic_t
254 * @a: the amount to add to v...
255 * @u: ...unless v is equal to u.
257 * Atomically adds @a to @v, so long as it was not @u.
258 * Returns the old value of @v.
260 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
265 if (unlikely(c == (u)))
267 old = atomic_cmpxchg((v), c, c + (a));
268 if (likely(old == c))
275 #endif /* _ASM_M32R_ATOMIC_H */