1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_IA64_ATOMIC_H
3 #define _ASM_IA64_ATOMIC_H
6 * Atomic operations that C can't guarantee us. Useful for
7 * resource counting etc..
9 * NOTE: don't mess with the types below! The "unsigned long" and
10 * "int" types were carefully placed so as to ensure proper operation
13 * Copyright (C) 1998, 1999, 2002-2003 Hewlett-Packard Co
14 * David Mosberger-Tang <davidm@hpl.hp.com>
16 #include <linux/types.h>
18 #include <asm/intrinsics.h>
19 #include <asm/barrier.h>
22 #define ATOMIC64_INIT(i) { (i) }
24 #define arch_atomic_read(v) READ_ONCE((v)->counter)
25 #define arch_atomic64_read(v) READ_ONCE((v)->counter)
27 #define arch_atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
28 #define arch_atomic64_set(v,i) WRITE_ONCE(((v)->counter), (i))
30 #define ATOMIC_OP(op, c_op) \
31 static __inline__ int \
32 ia64_atomic_##op (int i, atomic_t *v) \
35 CMPXCHG_BUGCHECK_DECL \
38 CMPXCHG_BUGCHECK(v); \
39 old = arch_atomic_read(v); \
41 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
45 #define ATOMIC_FETCH_OP(op, c_op) \
46 static __inline__ int \
47 ia64_atomic_fetch_##op (int i, atomic_t *v) \
50 CMPXCHG_BUGCHECK_DECL \
53 CMPXCHG_BUGCHECK(v); \
54 old = arch_atomic_read(v); \
56 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
60 #define ATOMIC_OPS(op, c_op) \
62 ATOMIC_FETCH_OP(op, c_op)
68 #define __ia64_atomic_const(i) \
69 static const int __ia64_atomic_p = __builtin_constant_p(i) ? \
70 ((i) == 1 || (i) == 4 || (i) == 8 || (i) == 16 || \
71 (i) == -1 || (i) == -4 || (i) == -8 || (i) == -16) : 0;\
74 #define __ia64_atomic_const(i) 0
77 #define arch_atomic_add_return(i,v) \
79 int __ia64_aar_i = (i); \
80 __ia64_atomic_const(i) \
81 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
82 : ia64_atomic_add(__ia64_aar_i, v); \
85 #define arch_atomic_sub_return(i,v) \
87 int __ia64_asr_i = (i); \
88 __ia64_atomic_const(i) \
89 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
90 : ia64_atomic_sub(__ia64_asr_i, v); \
93 #define arch_atomic_fetch_add(i,v) \
95 int __ia64_aar_i = (i); \
96 __ia64_atomic_const(i) \
97 ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \
98 : ia64_atomic_fetch_add(__ia64_aar_i, v); \
101 #define arch_atomic_fetch_sub(i,v) \
103 int __ia64_asr_i = (i); \
104 __ia64_atomic_const(i) \
105 ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \
106 : ia64_atomic_fetch_sub(__ia64_asr_i, v); \
109 ATOMIC_FETCH_OP(and, &)
110 ATOMIC_FETCH_OP(or, |)
111 ATOMIC_FETCH_OP(xor, ^)
113 #define arch_atomic_and(i,v) (void)ia64_atomic_fetch_and(i,v)
114 #define arch_atomic_or(i,v) (void)ia64_atomic_fetch_or(i,v)
115 #define arch_atomic_xor(i,v) (void)ia64_atomic_fetch_xor(i,v)
117 #define arch_atomic_fetch_and(i,v) ia64_atomic_fetch_and(i,v)
118 #define arch_atomic_fetch_or(i,v) ia64_atomic_fetch_or(i,v)
119 #define arch_atomic_fetch_xor(i,v) ia64_atomic_fetch_xor(i,v)
122 #undef ATOMIC_FETCH_OP
125 #define ATOMIC64_OP(op, c_op) \
126 static __inline__ s64 \
127 ia64_atomic64_##op (s64 i, atomic64_t *v) \
130 CMPXCHG_BUGCHECK_DECL \
133 CMPXCHG_BUGCHECK(v); \
134 old = arch_atomic64_read(v); \
136 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
140 #define ATOMIC64_FETCH_OP(op, c_op) \
141 static __inline__ s64 \
142 ia64_atomic64_fetch_##op (s64 i, atomic64_t *v) \
145 CMPXCHG_BUGCHECK_DECL \
148 CMPXCHG_BUGCHECK(v); \
149 old = arch_atomic64_read(v); \
151 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
155 #define ATOMIC64_OPS(op, c_op) \
156 ATOMIC64_OP(op, c_op) \
157 ATOMIC64_FETCH_OP(op, c_op)
162 #define arch_atomic64_add_return(i,v) \
164 s64 __ia64_aar_i = (i); \
165 __ia64_atomic_const(i) \
166 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
167 : ia64_atomic64_add(__ia64_aar_i, v); \
170 #define arch_atomic64_sub_return(i,v) \
172 s64 __ia64_asr_i = (i); \
173 __ia64_atomic_const(i) \
174 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
175 : ia64_atomic64_sub(__ia64_asr_i, v); \
178 #define arch_atomic64_fetch_add(i,v) \
180 s64 __ia64_aar_i = (i); \
181 __ia64_atomic_const(i) \
182 ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \
183 : ia64_atomic64_fetch_add(__ia64_aar_i, v); \
186 #define arch_atomic64_fetch_sub(i,v) \
188 s64 __ia64_asr_i = (i); \
189 __ia64_atomic_const(i) \
190 ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \
191 : ia64_atomic64_fetch_sub(__ia64_asr_i, v); \
194 ATOMIC64_FETCH_OP(and, &)
195 ATOMIC64_FETCH_OP(or, |)
196 ATOMIC64_FETCH_OP(xor, ^)
198 #define arch_atomic64_and(i,v) (void)ia64_atomic64_fetch_and(i,v)
199 #define arch_atomic64_or(i,v) (void)ia64_atomic64_fetch_or(i,v)
200 #define arch_atomic64_xor(i,v) (void)ia64_atomic64_fetch_xor(i,v)
202 #define arch_atomic64_fetch_and(i,v) ia64_atomic64_fetch_and(i,v)
203 #define arch_atomic64_fetch_or(i,v) ia64_atomic64_fetch_or(i,v)
204 #define arch_atomic64_fetch_xor(i,v) ia64_atomic64_fetch_xor(i,v)
207 #undef ATOMIC64_FETCH_OP
210 #define arch_atomic_cmpxchg(v, old, new) (arch_cmpxchg(&((v)->counter), old, new))
211 #define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))
213 #define arch_atomic64_cmpxchg(v, old, new) \
214 (arch_cmpxchg(&((v)->counter), old, new))
215 #define arch_atomic64_xchg(v, new) (arch_xchg(&((v)->counter), new))
217 #define arch_atomic_add(i,v) (void)arch_atomic_add_return((i), (v))
218 #define arch_atomic_sub(i,v) (void)arch_atomic_sub_return((i), (v))
220 #define arch_atomic64_add(i,v) (void)arch_atomic64_add_return((i), (v))
221 #define arch_atomic64_sub(i,v) (void)arch_atomic64_sub_return((i), (v))
223 #endif /* _ASM_IA64_ATOMIC_H */