GNU Linux-libre 5.4.274-gnu1
[releases.git] / arch / csky / include / asm / atomic.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2
3 #ifndef __ASM_CSKY_ATOMIC_H
4 #define __ASM_CSKY_ATOMIC_H
5
6 #include <linux/version.h>
7 #include <asm/cmpxchg.h>
8 #include <asm/barrier.h>
9
10 #ifdef CONFIG_CPU_HAS_LDSTEX
11
12 #define __atomic_add_unless __atomic_add_unless
13 static inline int __atomic_add_unless(atomic_t *v, int a, int u)
14 {
15         unsigned long tmp, ret;
16
17         smp_mb();
18
19         asm volatile (
20         "1:     ldex.w          %0, (%3) \n"
21         "       mov             %1, %0   \n"
22         "       cmpne           %0, %4   \n"
23         "       bf              2f       \n"
24         "       add             %0, %2   \n"
25         "       stex.w          %0, (%3) \n"
26         "       bez             %0, 1b   \n"
27         "2:                              \n"
28                 : "=&r" (tmp), "=&r" (ret)
29                 : "r" (a), "r"(&v->counter), "r"(u)
30                 : "memory");
31
32         if (ret != u)
33                 smp_mb();
34
35         return ret;
36 }
37
38 #define ATOMIC_OP(op, c_op)                                             \
39 static inline void atomic_##op(int i, atomic_t *v)                      \
40 {                                                                       \
41         unsigned long tmp;                                              \
42                                                                         \
43         asm volatile (                                                  \
44         "1:     ldex.w          %0, (%2) \n"                            \
45         "       " #op "         %0, %1   \n"                            \
46         "       stex.w          %0, (%2) \n"                            \
47         "       bez             %0, 1b   \n"                            \
48                 : "=&r" (tmp)                                           \
49                 : "r" (i), "r"(&v->counter)                             \
50                 : "memory");                                            \
51 }
52
53 #define ATOMIC_OP_RETURN(op, c_op)                                      \
54 static inline int atomic_##op##_return(int i, atomic_t *v)              \
55 {                                                                       \
56         unsigned long tmp, ret;                                         \
57                                                                         \
58         smp_mb();                                                       \
59         asm volatile (                                                  \
60         "1:     ldex.w          %0, (%3) \n"                            \
61         "       " #op "         %0, %2   \n"                            \
62         "       mov             %1, %0   \n"                            \
63         "       stex.w          %0, (%3) \n"                            \
64         "       bez             %0, 1b   \n"                            \
65                 : "=&r" (tmp), "=&r" (ret)                              \
66                 : "r" (i), "r"(&v->counter)                             \
67                 : "memory");                                            \
68         smp_mb();                                                       \
69                                                                         \
70         return ret;                                                     \
71 }
72
73 #define ATOMIC_FETCH_OP(op, c_op)                                       \
74 static inline int atomic_fetch_##op(int i, atomic_t *v)                 \
75 {                                                                       \
76         unsigned long tmp, ret;                                         \
77                                                                         \
78         smp_mb();                                                       \
79         asm volatile (                                                  \
80         "1:     ldex.w          %0, (%3) \n"                            \
81         "       mov             %1, %0   \n"                            \
82         "       " #op "         %0, %2   \n"                            \
83         "       stex.w          %0, (%3) \n"                            \
84         "       bez             %0, 1b   \n"                            \
85                 : "=&r" (tmp), "=&r" (ret)                              \
86                 : "r" (i), "r"(&v->counter)                             \
87                 : "memory");                                            \
88         smp_mb();                                                       \
89                                                                         \
90         return ret;                                                     \
91 }
92
93 #else /* CONFIG_CPU_HAS_LDSTEX */
94
95 #include <linux/irqflags.h>
96
97 #define __atomic_add_unless __atomic_add_unless
98 static inline int __atomic_add_unless(atomic_t *v, int a, int u)
99 {
100         unsigned long tmp, ret, flags;
101
102         raw_local_irq_save(flags);
103
104         asm volatile (
105         "       ldw             %0, (%3) \n"
106         "       mov             %1, %0   \n"
107         "       cmpne           %0, %4   \n"
108         "       bf              2f       \n"
109         "       add             %0, %2   \n"
110         "       stw             %0, (%3) \n"
111         "2:                              \n"
112                 : "=&r" (tmp), "=&r" (ret)
113                 : "r" (a), "r"(&v->counter), "r"(u)
114                 : "memory");
115
116         raw_local_irq_restore(flags);
117
118         return ret;
119 }
120
121 #define ATOMIC_OP(op, c_op)                                             \
122 static inline void atomic_##op(int i, atomic_t *v)                      \
123 {                                                                       \
124         unsigned long tmp, flags;                                       \
125                                                                         \
126         raw_local_irq_save(flags);                                      \
127                                                                         \
128         asm volatile (                                                  \
129         "       ldw             %0, (%2) \n"                            \
130         "       " #op "         %0, %1   \n"                            \
131         "       stw             %0, (%2) \n"                            \
132                 : "=&r" (tmp)                                           \
133                 : "r" (i), "r"(&v->counter)                             \
134                 : "memory");                                            \
135                                                                         \
136         raw_local_irq_restore(flags);                                   \
137 }
138
139 #define ATOMIC_OP_RETURN(op, c_op)                                      \
140 static inline int atomic_##op##_return(int i, atomic_t *v)              \
141 {                                                                       \
142         unsigned long tmp, ret, flags;                                  \
143                                                                         \
144         raw_local_irq_save(flags);                                      \
145                                                                         \
146         asm volatile (                                                  \
147         "       ldw             %0, (%3) \n"                            \
148         "       " #op "         %0, %2   \n"                            \
149         "       stw             %0, (%3) \n"                            \
150         "       mov             %1, %0   \n"                            \
151                 : "=&r" (tmp), "=&r" (ret)                              \
152                 : "r" (i), "r"(&v->counter)                             \
153                 : "memory");                                            \
154                                                                         \
155         raw_local_irq_restore(flags);                                   \
156                                                                         \
157         return ret;                                                     \
158 }
159
160 #define ATOMIC_FETCH_OP(op, c_op)                                       \
161 static inline int atomic_fetch_##op(int i, atomic_t *v)                 \
162 {                                                                       \
163         unsigned long tmp, ret, flags;                                  \
164                                                                         \
165         raw_local_irq_save(flags);                                      \
166                                                                         \
167         asm volatile (                                                  \
168         "       ldw             %0, (%3) \n"                            \
169         "       mov             %1, %0   \n"                            \
170         "       " #op "         %0, %2   \n"                            \
171         "       stw             %0, (%3) \n"                            \
172                 : "=&r" (tmp), "=&r" (ret)                              \
173                 : "r" (i), "r"(&v->counter)                             \
174                 : "memory");                                            \
175                                                                         \
176         raw_local_irq_restore(flags);                                   \
177                                                                         \
178         return ret;                                                     \
179 }
180
181 #endif /* CONFIG_CPU_HAS_LDSTEX */
182
183 #define atomic_add_return atomic_add_return
184 ATOMIC_OP_RETURN(add, +)
185 #define atomic_sub_return atomic_sub_return
186 ATOMIC_OP_RETURN(sub, -)
187
188 #define atomic_fetch_add atomic_fetch_add
189 ATOMIC_FETCH_OP(add, +)
190 #define atomic_fetch_sub atomic_fetch_sub
191 ATOMIC_FETCH_OP(sub, -)
192 #define atomic_fetch_and atomic_fetch_and
193 ATOMIC_FETCH_OP(and, &)
194 #define atomic_fetch_or atomic_fetch_or
195 ATOMIC_FETCH_OP(or, |)
196 #define atomic_fetch_xor atomic_fetch_xor
197 ATOMIC_FETCH_OP(xor, ^)
198
199 #define atomic_and atomic_and
200 ATOMIC_OP(and, &)
201 #define atomic_or atomic_or
202 ATOMIC_OP(or, |)
203 #define atomic_xor atomic_xor
204 ATOMIC_OP(xor, ^)
205
206 #undef ATOMIC_FETCH_OP
207 #undef ATOMIC_OP_RETURN
208 #undef ATOMIC_OP
209
210 #include <asm-generic/atomic.h>
211
212 #endif /* __ASM_CSKY_ATOMIC_H */