GNU Linux-libre 4.9.314-gnu1
[releases.git] / arch / powerpc / include / asm / cmpxchg.h
1 #ifndef _ASM_POWERPC_CMPXCHG_H_
2 #define _ASM_POWERPC_CMPXCHG_H_
3
4 #ifdef __KERNEL__
5 #include <linux/compiler.h>
6 #include <asm/synch.h>
7 #include <asm/asm-compat.h>
8 #include <linux/bug.h>
9
10 /*
11  * Atomic exchange
12  *
13  * Changes the memory location '*p' to be val and returns
14  * the previous value stored there.
15  */
16
17 static __always_inline unsigned long
18 __xchg_u32_local(volatile void *p, unsigned long val)
19 {
20         unsigned long prev;
21
22         __asm__ __volatile__(
23 "1:     lwarx   %0,0,%2 \n"
24         PPC405_ERR77(0,%2)
25 "       stwcx.  %3,0,%2 \n\
26         bne-    1b"
27         : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
28         : "r" (p), "r" (val)
29         : "cc", "memory");
30
31         return prev;
32 }
33
34 static __always_inline unsigned long
35 __xchg_u32_relaxed(u32 *p, unsigned long val)
36 {
37         unsigned long prev;
38
39         __asm__ __volatile__(
40 "1:     lwarx   %0,0,%2\n"
41         PPC405_ERR77(0, %2)
42 "       stwcx.  %3,0,%2\n"
43 "       bne-    1b"
44         : "=&r" (prev), "+m" (*p)
45         : "r" (p), "r" (val)
46         : "cc");
47
48         return prev;
49 }
50
51 #ifdef CONFIG_PPC64
52 static __always_inline unsigned long
53 __xchg_u64_local(volatile void *p, unsigned long val)
54 {
55         unsigned long prev;
56
57         __asm__ __volatile__(
58 "1:     ldarx   %0,0,%2 \n"
59         PPC405_ERR77(0,%2)
60 "       stdcx.  %3,0,%2 \n\
61         bne-    1b"
62         : "=&r" (prev), "+m" (*(volatile unsigned long *)p)
63         : "r" (p), "r" (val)
64         : "cc", "memory");
65
66         return prev;
67 }
68
69 static __always_inline unsigned long
70 __xchg_u64_relaxed(u64 *p, unsigned long val)
71 {
72         unsigned long prev;
73
74         __asm__ __volatile__(
75 "1:     ldarx   %0,0,%2\n"
76         PPC405_ERR77(0, %2)
77 "       stdcx.  %3,0,%2\n"
78 "       bne-    1b"
79         : "=&r" (prev), "+m" (*p)
80         : "r" (p), "r" (val)
81         : "cc");
82
83         return prev;
84 }
85 #endif
86
87 static __always_inline unsigned long
88 __xchg_local(volatile void *ptr, unsigned long x, unsigned int size)
89 {
90         switch (size) {
91         case 4:
92                 return __xchg_u32_local(ptr, x);
93 #ifdef CONFIG_PPC64
94         case 8:
95                 return __xchg_u64_local(ptr, x);
96 #endif
97         }
98         BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg");
99         return x;
100 }
101
102 static __always_inline unsigned long
103 __xchg_relaxed(void *ptr, unsigned long x, unsigned int size)
104 {
105         switch (size) {
106         case 4:
107                 return __xchg_u32_relaxed(ptr, x);
108 #ifdef CONFIG_PPC64
109         case 8:
110                 return __xchg_u64_relaxed(ptr, x);
111 #endif
112         }
113         BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg_local");
114         return x;
115 }
116 #define xchg_local(ptr,x)                                                    \
117   ({                                                                         \
118      __typeof__(*(ptr)) _x_ = (x);                                           \
119      (__typeof__(*(ptr))) __xchg_local((ptr),                                \
120                 (unsigned long)_x_, sizeof(*(ptr)));                         \
121   })
122
123 #define xchg_relaxed(ptr, x)                                            \
124 ({                                                                      \
125         __typeof__(*(ptr)) _x_ = (x);                                   \
126         (__typeof__(*(ptr))) __xchg_relaxed((ptr),                      \
127                         (unsigned long)_x_, sizeof(*(ptr)));            \
128 })
129 /*
130  * Compare and exchange - if *p == old, set it to new,
131  * and return the old value of *p.
132  */
133
134 static __always_inline unsigned long
135 __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
136 {
137         unsigned int prev;
138
139         __asm__ __volatile__ (
140         PPC_ATOMIC_ENTRY_BARRIER
141 "1:     lwarx   %0,0,%2         # __cmpxchg_u32\n\
142         cmpw    0,%0,%3\n\
143         bne-    2f\n"
144         PPC405_ERR77(0,%2)
145 "       stwcx.  %4,0,%2\n\
146         bne-    1b"
147         PPC_ATOMIC_EXIT_BARRIER
148         "\n\
149 2:"
150         : "=&r" (prev), "+m" (*p)
151         : "r" (p), "r" (old), "r" (new)
152         : "cc", "memory");
153
154         return prev;
155 }
156
157 static __always_inline unsigned long
158 __cmpxchg_u32_local(volatile unsigned int *p, unsigned long old,
159                         unsigned long new)
160 {
161         unsigned int prev;
162
163         __asm__ __volatile__ (
164 "1:     lwarx   %0,0,%2         # __cmpxchg_u32\n\
165         cmpw    0,%0,%3\n\
166         bne-    2f\n"
167         PPC405_ERR77(0,%2)
168 "       stwcx.  %4,0,%2\n\
169         bne-    1b"
170         "\n\
171 2:"
172         : "=&r" (prev), "+m" (*p)
173         : "r" (p), "r" (old), "r" (new)
174         : "cc", "memory");
175
176         return prev;
177 }
178
179 static __always_inline unsigned long
180 __cmpxchg_u32_relaxed(u32 *p, unsigned long old, unsigned long new)
181 {
182         unsigned long prev;
183
184         __asm__ __volatile__ (
185 "1:     lwarx   %0,0,%2         # __cmpxchg_u32_relaxed\n"
186 "       cmpw    0,%0,%3\n"
187 "       bne-    2f\n"
188         PPC405_ERR77(0, %2)
189 "       stwcx.  %4,0,%2\n"
190 "       bne-    1b\n"
191 "2:"
192         : "=&r" (prev), "+m" (*p)
193         : "r" (p), "r" (old), "r" (new)
194         : "cc");
195
196         return prev;
197 }
198
199 /*
200  * cmpxchg family don't have order guarantee if cmp part fails, therefore we
201  * can avoid superfluous barriers if we use assembly code to implement
202  * cmpxchg() and cmpxchg_acquire(), however we don't do the similar for
203  * cmpxchg_release() because that will result in putting a barrier in the
204  * middle of a ll/sc loop, which is probably a bad idea. For example, this
205  * might cause the conditional store more likely to fail.
206  */
207 static __always_inline unsigned long
208 __cmpxchg_u32_acquire(u32 *p, unsigned long old, unsigned long new)
209 {
210         unsigned long prev;
211
212         __asm__ __volatile__ (
213 "1:     lwarx   %0,0,%2         # __cmpxchg_u32_acquire\n"
214 "       cmpw    0,%0,%3\n"
215 "       bne-    2f\n"
216         PPC405_ERR77(0, %2)
217 "       stwcx.  %4,0,%2\n"
218 "       bne-    1b\n"
219         PPC_ACQUIRE_BARRIER
220         "\n"
221 "2:"
222         : "=&r" (prev), "+m" (*p)
223         : "r" (p), "r" (old), "r" (new)
224         : "cc", "memory");
225
226         return prev;
227 }
228
229 #ifdef CONFIG_PPC64
230 static __always_inline unsigned long
231 __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
232 {
233         unsigned long prev;
234
235         __asm__ __volatile__ (
236         PPC_ATOMIC_ENTRY_BARRIER
237 "1:     ldarx   %0,0,%2         # __cmpxchg_u64\n\
238         cmpd    0,%0,%3\n\
239         bne-    2f\n\
240         stdcx.  %4,0,%2\n\
241         bne-    1b"
242         PPC_ATOMIC_EXIT_BARRIER
243         "\n\
244 2:"
245         : "=&r" (prev), "+m" (*p)
246         : "r" (p), "r" (old), "r" (new)
247         : "cc", "memory");
248
249         return prev;
250 }
251
252 static __always_inline unsigned long
253 __cmpxchg_u64_local(volatile unsigned long *p, unsigned long old,
254                         unsigned long new)
255 {
256         unsigned long prev;
257
258         __asm__ __volatile__ (
259 "1:     ldarx   %0,0,%2         # __cmpxchg_u64\n\
260         cmpd    0,%0,%3\n\
261         bne-    2f\n\
262         stdcx.  %4,0,%2\n\
263         bne-    1b"
264         "\n\
265 2:"
266         : "=&r" (prev), "+m" (*p)
267         : "r" (p), "r" (old), "r" (new)
268         : "cc", "memory");
269
270         return prev;
271 }
272
273 static __always_inline unsigned long
274 __cmpxchg_u64_relaxed(u64 *p, unsigned long old, unsigned long new)
275 {
276         unsigned long prev;
277
278         __asm__ __volatile__ (
279 "1:     ldarx   %0,0,%2         # __cmpxchg_u64_relaxed\n"
280 "       cmpd    0,%0,%3\n"
281 "       bne-    2f\n"
282 "       stdcx.  %4,0,%2\n"
283 "       bne-    1b\n"
284 "2:"
285         : "=&r" (prev), "+m" (*p)
286         : "r" (p), "r" (old), "r" (new)
287         : "cc");
288
289         return prev;
290 }
291
292 static __always_inline unsigned long
293 __cmpxchg_u64_acquire(u64 *p, unsigned long old, unsigned long new)
294 {
295         unsigned long prev;
296
297         __asm__ __volatile__ (
298 "1:     ldarx   %0,0,%2         # __cmpxchg_u64_acquire\n"
299 "       cmpd    0,%0,%3\n"
300 "       bne-    2f\n"
301 "       stdcx.  %4,0,%2\n"
302 "       bne-    1b\n"
303         PPC_ACQUIRE_BARRIER
304         "\n"
305 "2:"
306         : "=&r" (prev), "+m" (*p)
307         : "r" (p), "r" (old), "r" (new)
308         : "cc", "memory");
309
310         return prev;
311 }
312 #endif
313
314 static __always_inline unsigned long
315 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
316           unsigned int size)
317 {
318         switch (size) {
319         case 4:
320                 return __cmpxchg_u32(ptr, old, new);
321 #ifdef CONFIG_PPC64
322         case 8:
323                 return __cmpxchg_u64(ptr, old, new);
324 #endif
325         }
326         BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg");
327         return old;
328 }
329
330 static __always_inline unsigned long
331 __cmpxchg_local(volatile void *ptr, unsigned long old, unsigned long new,
332           unsigned int size)
333 {
334         switch (size) {
335         case 4:
336                 return __cmpxchg_u32_local(ptr, old, new);
337 #ifdef CONFIG_PPC64
338         case 8:
339                 return __cmpxchg_u64_local(ptr, old, new);
340 #endif
341         }
342         BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_local");
343         return old;
344 }
345
346 static __always_inline unsigned long
347 __cmpxchg_relaxed(void *ptr, unsigned long old, unsigned long new,
348                   unsigned int size)
349 {
350         switch (size) {
351         case 4:
352                 return __cmpxchg_u32_relaxed(ptr, old, new);
353 #ifdef CONFIG_PPC64
354         case 8:
355                 return __cmpxchg_u64_relaxed(ptr, old, new);
356 #endif
357         }
358         BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_relaxed");
359         return old;
360 }
361
362 static __always_inline unsigned long
363 __cmpxchg_acquire(void *ptr, unsigned long old, unsigned long new,
364                   unsigned int size)
365 {
366         switch (size) {
367         case 4:
368                 return __cmpxchg_u32_acquire(ptr, old, new);
369 #ifdef CONFIG_PPC64
370         case 8:
371                 return __cmpxchg_u64_acquire(ptr, old, new);
372 #endif
373         }
374         BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_acquire");
375         return old;
376 }
377 #define cmpxchg(ptr, o, n)                                               \
378   ({                                                                     \
379      __typeof__(*(ptr)) _o_ = (o);                                       \
380      __typeof__(*(ptr)) _n_ = (n);                                       \
381      (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_,           \
382                                     (unsigned long)_n_, sizeof(*(ptr))); \
383   })
384
385
386 #define cmpxchg_local(ptr, o, n)                                         \
387   ({                                                                     \
388      __typeof__(*(ptr)) _o_ = (o);                                       \
389      __typeof__(*(ptr)) _n_ = (n);                                       \
390      (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_,     \
391                                     (unsigned long)_n_, sizeof(*(ptr))); \
392   })
393
394 #define cmpxchg_relaxed(ptr, o, n)                                      \
395 ({                                                                      \
396         __typeof__(*(ptr)) _o_ = (o);                                   \
397         __typeof__(*(ptr)) _n_ = (n);                                   \
398         (__typeof__(*(ptr))) __cmpxchg_relaxed((ptr),                   \
399                         (unsigned long)_o_, (unsigned long)_n_,         \
400                         sizeof(*(ptr)));                                \
401 })
402
403 #define cmpxchg_acquire(ptr, o, n)                                      \
404 ({                                                                      \
405         __typeof__(*(ptr)) _o_ = (o);                                   \
406         __typeof__(*(ptr)) _n_ = (n);                                   \
407         (__typeof__(*(ptr))) __cmpxchg_acquire((ptr),                   \
408                         (unsigned long)_o_, (unsigned long)_n_,         \
409                         sizeof(*(ptr)));                                \
410 })
411 #ifdef CONFIG_PPC64
412 #define cmpxchg64(ptr, o, n)                                            \
413   ({                                                                    \
414         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
415         cmpxchg((ptr), (o), (n));                                       \
416   })
417 #define cmpxchg64_local(ptr, o, n)                                      \
418   ({                                                                    \
419         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
420         cmpxchg_local((ptr), (o), (n));                                 \
421   })
422 #define cmpxchg64_relaxed(ptr, o, n)                                    \
423 ({                                                                      \
424         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
425         cmpxchg_relaxed((ptr), (o), (n));                               \
426 })
427 #define cmpxchg64_acquire(ptr, o, n)                                    \
428 ({                                                                      \
429         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
430         cmpxchg_acquire((ptr), (o), (n));                               \
431 })
432 #else
433 #include <asm-generic/cmpxchg-local.h>
434 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
435 #endif
436
437 #endif /* __KERNEL__ */
438 #endif /* _ASM_POWERPC_CMPXCHG_H_ */