1 #ifndef _ALPHA_CMPXCHG_H
2 #error Do not include xchg.h directly!
5 * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code
6 * except that local version do not have the expensive memory barrier.
7 * So this file is included twice from asm/cmpxchg.h.
12 * Since it can be used to implement critical sections
13 * it must clobber "memory" (also for interrupts in UP).
15 * The leading and the trailing memory barriers guarantee that these
16 * operations are fully ordered.
20 static inline unsigned long
21 ____xchg(_u8, volatile char *m, unsigned long val)
23 unsigned long ret, tmp, addr64;
39 : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
40 : "r" ((long)m), "1" (val) : "memory");
45 static inline unsigned long
46 ____xchg(_u16, volatile short *m, unsigned long val)
48 unsigned long ret, tmp, addr64;
64 : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
65 : "r" ((long)m), "1" (val) : "memory");
70 static inline unsigned long
71 ____xchg(_u32, volatile int *m, unsigned long val)
85 : "=&r" (val), "=&r" (dummy), "=m" (*m)
86 : "rI" (val), "m" (*m) : "memory");
91 static inline unsigned long
92 ____xchg(_u64, volatile long *m, unsigned long val)
106 : "=&r" (val), "=&r" (dummy), "=m" (*m)
107 : "rI" (val), "m" (*m) : "memory");
112 /* This function doesn't exist, so you'll get a linker error
113 if something tries to do an invalid xchg(). */
114 extern void __xchg_called_with_bad_pointer(void);
116 static __always_inline unsigned long
117 ____xchg(, volatile void *ptr, unsigned long x, int size)
121 return ____xchg(_u8, ptr, x);
123 return ____xchg(_u16, ptr, x);
125 return ____xchg(_u32, ptr, x);
127 return ____xchg(_u64, ptr, x);
129 __xchg_called_with_bad_pointer();
134 * Atomic compare and exchange. Compare OLD with MEM, if identical,
135 * store NEW in MEM. Return the initial value in MEM. Success is
136 * indicated by comparing RETURN with OLD.
138 * The leading and the trailing memory barriers guarantee that these
139 * operations are fully ordered.
141 * The trailing memory barrier is placed in SMP unconditionally, in
142 * order to guarantee that dependency ordering is preserved when a
143 * dependency is headed by an unsuccessful operation.
146 static inline unsigned long
147 ____cmpxchg(_u8, volatile char *m, unsigned char old, unsigned char new)
149 unsigned long prev, tmp, cmp, addr64;
152 __asm__ __volatile__(
155 "1: ldq_l %2,0(%4)\n"
168 : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
169 : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
174 static inline unsigned long
175 ____cmpxchg(_u16, volatile short *m, unsigned short old, unsigned short new)
177 unsigned long prev, tmp, cmp, addr64;
180 __asm__ __volatile__(
183 "1: ldq_l %2,0(%4)\n"
196 : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
197 : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
202 static inline unsigned long
203 ____cmpxchg(_u32, volatile int *m, int old, int new)
205 unsigned long prev, cmp;
208 __asm__ __volatile__(
220 : "=&r"(prev), "=&r"(cmp), "=m"(*m)
221 : "r"((long) old), "r"(new), "m"(*m) : "memory");
226 static inline unsigned long
227 ____cmpxchg(_u64, volatile long *m, unsigned long old, unsigned long new)
229 unsigned long prev, cmp;
232 __asm__ __volatile__(
244 : "=&r"(prev), "=&r"(cmp), "=m"(*m)
245 : "r"((long) old), "r"(new), "m"(*m) : "memory");
250 /* This function doesn't exist, so you'll get a linker error
251 if something tries to do an invalid cmpxchg(). */
252 extern void __cmpxchg_called_with_bad_pointer(void);
254 static __always_inline unsigned long
255 ____cmpxchg(, volatile void *ptr, unsigned long old, unsigned long new,
260 return ____cmpxchg(_u8, ptr, old, new);
262 return ____cmpxchg(_u16, ptr, old, new);
264 return ____cmpxchg(_u32, ptr, old, new);
266 return ____cmpxchg(_u64, ptr, old, new);
268 __cmpxchg_called_with_bad_pointer();