2 * This file provides wrappers with KASAN instrumentation for atomic operations.
3 * To use this functionality an arch's atomic.h file needs to define all
4 * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
5 * this file at the end. This file provides atomic_read() that forwards to
6 * arch_atomic_read() for actual atomic operation.
7 * Note: if an arch atomic operation is implemented by means of other atomic
8 * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
9 * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
10 * double instrumentation.
13 #ifndef _LINUX_ATOMIC_INSTRUMENTED_H
14 #define _LINUX_ATOMIC_INSTRUMENTED_H
16 #include <linux/build_bug.h>
17 #include <linux/kasan-checks.h>
19 static __always_inline int atomic_read(const atomic_t *v)
21 kasan_check_read(v, sizeof(*v));
22 return arch_atomic_read(v);
25 static __always_inline s64 atomic64_read(const atomic64_t *v)
27 kasan_check_read(v, sizeof(*v));
28 return arch_atomic64_read(v);
31 static __always_inline void atomic_set(atomic_t *v, int i)
33 kasan_check_write(v, sizeof(*v));
34 arch_atomic_set(v, i);
37 static __always_inline void atomic64_set(atomic64_t *v, s64 i)
39 kasan_check_write(v, sizeof(*v));
40 arch_atomic64_set(v, i);
43 static __always_inline int atomic_xchg(atomic_t *v, int i)
45 kasan_check_write(v, sizeof(*v));
46 return arch_atomic_xchg(v, i);
49 static __always_inline s64 atomic64_xchg(atomic64_t *v, s64 i)
51 kasan_check_write(v, sizeof(*v));
52 return arch_atomic64_xchg(v, i);
55 static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
57 kasan_check_write(v, sizeof(*v));
58 return arch_atomic_cmpxchg(v, old, new);
61 static __always_inline s64 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
63 kasan_check_write(v, sizeof(*v));
64 return arch_atomic64_cmpxchg(v, old, new);
67 #ifdef arch_atomic_try_cmpxchg
68 #define atomic_try_cmpxchg atomic_try_cmpxchg
69 static __always_inline bool atomic_try_cmpxchg(atomic_t *v, int *old, int new)
71 kasan_check_write(v, sizeof(*v));
72 kasan_check_read(old, sizeof(*old));
73 return arch_atomic_try_cmpxchg(v, old, new);
77 #ifdef arch_atomic64_try_cmpxchg
78 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
79 static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
81 kasan_check_write(v, sizeof(*v));
82 kasan_check_read(old, sizeof(*old));
83 return arch_atomic64_try_cmpxchg(v, old, new);
87 #ifdef arch_atomic_fetch_add_unless
88 #define atomic_fetch_add_unless atomic_fetch_add_unless
89 static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
91 kasan_check_write(v, sizeof(*v));
92 return arch_atomic_fetch_add_unless(v, a, u);
96 #ifdef arch_atomic64_fetch_add_unless
97 #define atomic64_fetch_add_unless atomic64_fetch_add_unless
98 static __always_inline s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
100 kasan_check_write(v, sizeof(*v));
101 return arch_atomic64_fetch_add_unless(v, a, u);
105 #ifdef arch_atomic_inc
106 #define atomic_inc atomic_inc
107 static __always_inline void atomic_inc(atomic_t *v)
109 kasan_check_write(v, sizeof(*v));
114 #ifdef arch_atomic64_inc
115 #define atomic64_inc atomic64_inc
116 static __always_inline void atomic64_inc(atomic64_t *v)
118 kasan_check_write(v, sizeof(*v));
119 arch_atomic64_inc(v);
123 #ifdef arch_atomic_dec
124 #define atomic_dec atomic_dec
125 static __always_inline void atomic_dec(atomic_t *v)
127 kasan_check_write(v, sizeof(*v));
132 #ifdef atch_atomic64_dec
134 static __always_inline void atomic64_dec(atomic64_t *v)
136 kasan_check_write(v, sizeof(*v));
137 arch_atomic64_dec(v);
141 static __always_inline void atomic_add(int i, atomic_t *v)
143 kasan_check_write(v, sizeof(*v));
144 arch_atomic_add(i, v);
147 static __always_inline void atomic64_add(s64 i, atomic64_t *v)
149 kasan_check_write(v, sizeof(*v));
150 arch_atomic64_add(i, v);
153 static __always_inline void atomic_sub(int i, atomic_t *v)
155 kasan_check_write(v, sizeof(*v));
156 arch_atomic_sub(i, v);
159 static __always_inline void atomic64_sub(s64 i, atomic64_t *v)
161 kasan_check_write(v, sizeof(*v));
162 arch_atomic64_sub(i, v);
165 static __always_inline void atomic_and(int i, atomic_t *v)
167 kasan_check_write(v, sizeof(*v));
168 arch_atomic_and(i, v);
171 static __always_inline void atomic64_and(s64 i, atomic64_t *v)
173 kasan_check_write(v, sizeof(*v));
174 arch_atomic64_and(i, v);
177 static __always_inline void atomic_or(int i, atomic_t *v)
179 kasan_check_write(v, sizeof(*v));
180 arch_atomic_or(i, v);
183 static __always_inline void atomic64_or(s64 i, atomic64_t *v)
185 kasan_check_write(v, sizeof(*v));
186 arch_atomic64_or(i, v);
189 static __always_inline void atomic_xor(int i, atomic_t *v)
191 kasan_check_write(v, sizeof(*v));
192 arch_atomic_xor(i, v);
195 static __always_inline void atomic64_xor(s64 i, atomic64_t *v)
197 kasan_check_write(v, sizeof(*v));
198 arch_atomic64_xor(i, v);
201 #ifdef arch_atomic_inc_return
202 #define atomic_inc_return atomic_inc_return
203 static __always_inline int atomic_inc_return(atomic_t *v)
205 kasan_check_write(v, sizeof(*v));
206 return arch_atomic_inc_return(v);
210 #ifdef arch_atomic64_in_return
211 #define atomic64_inc_return atomic64_inc_return
212 static __always_inline s64 atomic64_inc_return(atomic64_t *v)
214 kasan_check_write(v, sizeof(*v));
215 return arch_atomic64_inc_return(v);
219 #ifdef arch_atomic_dec_return
220 #define atomic_dec_return atomic_dec_return
221 static __always_inline int atomic_dec_return(atomic_t *v)
223 kasan_check_write(v, sizeof(*v));
224 return arch_atomic_dec_return(v);
228 #ifdef arch_atomic64_dec_return
229 #define atomic64_dec_return atomic64_dec_return
230 static __always_inline s64 atomic64_dec_return(atomic64_t *v)
232 kasan_check_write(v, sizeof(*v));
233 return arch_atomic64_dec_return(v);
237 #ifdef arch_atomic64_inc_not_zero
238 #define atomic64_inc_not_zero atomic64_inc_not_zero
239 static __always_inline bool atomic64_inc_not_zero(atomic64_t *v)
241 kasan_check_write(v, sizeof(*v));
242 return arch_atomic64_inc_not_zero(v);
246 #ifdef arch_atomic64_dec_if_positive
247 #define atomic64_dec_if_positive atomic64_dec_if_positive
248 static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v)
250 kasan_check_write(v, sizeof(*v));
251 return arch_atomic64_dec_if_positive(v);
255 #ifdef arch_atomic_dec_and_test
256 #define atomic_dec_and_test atomic_dec_and_test
257 static __always_inline bool atomic_dec_and_test(atomic_t *v)
259 kasan_check_write(v, sizeof(*v));
260 return arch_atomic_dec_and_test(v);
264 #ifdef arch_atomic64_dec_and_test
265 #define atomic64_dec_and_test atomic64_dec_and_test
266 static __always_inline bool atomic64_dec_and_test(atomic64_t *v)
268 kasan_check_write(v, sizeof(*v));
269 return arch_atomic64_dec_and_test(v);
273 #ifdef arch_atomic_inc_and_test
274 #define atomic_inc_and_test atomic_inc_and_test
275 static __always_inline bool atomic_inc_and_test(atomic_t *v)
277 kasan_check_write(v, sizeof(*v));
278 return arch_atomic_inc_and_test(v);
282 #ifdef arch_atomic64_inc_and_test
283 #define atomic64_inc_and_test atomic64_inc_and_test
284 static __always_inline bool atomic64_inc_and_test(atomic64_t *v)
286 kasan_check_write(v, sizeof(*v));
287 return arch_atomic64_inc_and_test(v);
291 static __always_inline int atomic_add_return(int i, atomic_t *v)
293 kasan_check_write(v, sizeof(*v));
294 return arch_atomic_add_return(i, v);
297 static __always_inline s64 atomic64_add_return(s64 i, atomic64_t *v)
299 kasan_check_write(v, sizeof(*v));
300 return arch_atomic64_add_return(i, v);
303 static __always_inline int atomic_sub_return(int i, atomic_t *v)
305 kasan_check_write(v, sizeof(*v));
306 return arch_atomic_sub_return(i, v);
309 static __always_inline s64 atomic64_sub_return(s64 i, atomic64_t *v)
311 kasan_check_write(v, sizeof(*v));
312 return arch_atomic64_sub_return(i, v);
315 static __always_inline int atomic_fetch_add(int i, atomic_t *v)
317 kasan_check_write(v, sizeof(*v));
318 return arch_atomic_fetch_add(i, v);
321 static __always_inline s64 atomic64_fetch_add(s64 i, atomic64_t *v)
323 kasan_check_write(v, sizeof(*v));
324 return arch_atomic64_fetch_add(i, v);
327 static __always_inline int atomic_fetch_sub(int i, atomic_t *v)
329 kasan_check_write(v, sizeof(*v));
330 return arch_atomic_fetch_sub(i, v);
333 static __always_inline s64 atomic64_fetch_sub(s64 i, atomic64_t *v)
335 kasan_check_write(v, sizeof(*v));
336 return arch_atomic64_fetch_sub(i, v);
339 static __always_inline int atomic_fetch_and(int i, atomic_t *v)
341 kasan_check_write(v, sizeof(*v));
342 return arch_atomic_fetch_and(i, v);
345 static __always_inline s64 atomic64_fetch_and(s64 i, atomic64_t *v)
347 kasan_check_write(v, sizeof(*v));
348 return arch_atomic64_fetch_and(i, v);
351 static __always_inline int atomic_fetch_or(int i, atomic_t *v)
353 kasan_check_write(v, sizeof(*v));
354 return arch_atomic_fetch_or(i, v);
357 static __always_inline s64 atomic64_fetch_or(s64 i, atomic64_t *v)
359 kasan_check_write(v, sizeof(*v));
360 return arch_atomic64_fetch_or(i, v);
363 static __always_inline int atomic_fetch_xor(int i, atomic_t *v)
365 kasan_check_write(v, sizeof(*v));
366 return arch_atomic_fetch_xor(i, v);
369 static __always_inline s64 atomic64_fetch_xor(s64 i, atomic64_t *v)
371 kasan_check_write(v, sizeof(*v));
372 return arch_atomic64_fetch_xor(i, v);
375 #ifdef arch_atomic_sub_and_test
376 #define atomic_sub_and_test atomic_sub_and_test
377 static __always_inline bool atomic_sub_and_test(int i, atomic_t *v)
379 kasan_check_write(v, sizeof(*v));
380 return arch_atomic_sub_and_test(i, v);
384 #ifdef arch_atomic64_sub_and_test
385 #define atomic64_sub_and_test atomic64_sub_and_test
386 static __always_inline bool atomic64_sub_and_test(s64 i, atomic64_t *v)
388 kasan_check_write(v, sizeof(*v));
389 return arch_atomic64_sub_and_test(i, v);
393 #ifdef arch_atomic_add_negative
394 #define atomic_add_negative atomic_add_negative
395 static __always_inline bool atomic_add_negative(int i, atomic_t *v)
397 kasan_check_write(v, sizeof(*v));
398 return arch_atomic_add_negative(i, v);
402 #ifdef arch_atomic64_add_negative
403 #define atomic64_add_negative atomic64_add_negative
404 static __always_inline bool atomic64_add_negative(s64 i, atomic64_t *v)
406 kasan_check_write(v, sizeof(*v));
407 return arch_atomic64_add_negative(i, v);
411 #define xchg(ptr, new) \
413 typeof(ptr) __ai_ptr = (ptr); \
414 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
415 arch_xchg(__ai_ptr, (new)); \
418 #define cmpxchg(ptr, old, new) \
420 typeof(ptr) __ai_ptr = (ptr); \
421 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
422 arch_cmpxchg(__ai_ptr, (old), (new)); \
425 #define sync_cmpxchg(ptr, old, new) \
427 typeof(ptr) __ai_ptr = (ptr); \
428 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
429 arch_sync_cmpxchg(__ai_ptr, (old), (new)); \
432 #define cmpxchg_local(ptr, old, new) \
434 typeof(ptr) __ai_ptr = (ptr); \
435 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
436 arch_cmpxchg_local(__ai_ptr, (old), (new)); \
439 #define cmpxchg64(ptr, old, new) \
441 typeof(ptr) __ai_ptr = (ptr); \
442 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
443 arch_cmpxchg64(__ai_ptr, (old), (new)); \
446 #define cmpxchg64_local(ptr, old, new) \
448 typeof(ptr) __ai_ptr = (ptr); \
449 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
450 arch_cmpxchg64_local(__ai_ptr, (old), (new)); \
453 #define cmpxchg_double(p1, p2, o1, o2, n1, n2) \
455 typeof(p1) __ai_p1 = (p1); \
456 kasan_check_write(__ai_p1, 2 * sizeof(*__ai_p1)); \
457 arch_cmpxchg_double(__ai_p1, (p2), (o1), (o2), (n1), (n2)); \
460 #define cmpxchg_double_local(p1, p2, o1, o2, n1, n2) \
462 typeof(p1) __ai_p1 = (p1); \
463 kasan_check_write(__ai_p1, 2 * sizeof(*__ai_p1)); \
464 arch_cmpxchg_double_local(__ai_p1, (p2), (o1), (o2), (n1), (n2)); \
467 #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */