1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_BARRIER_H
3 #define __ASM_BARRIER_H
7 /* The synchronize caches instruction executes as a nop on systems in
8 which all memory references are performed in order. */
9 #define synchronize_caches() __asm__ __volatile__ ("sync" : : : "memory")
11 #if defined(CONFIG_SMP)
12 #define mb() do { synchronize_caches(); } while (0)
15 #define dma_rmb() mb()
16 #define dma_wmb() mb()
18 #define mb() barrier()
19 #define rmb() barrier()
20 #define wmb() barrier()
21 #define dma_rmb() barrier()
22 #define dma_wmb() barrier()
25 #define __smp_mb() mb()
26 #define __smp_rmb() mb()
27 #define __smp_wmb() mb()
29 #define __smp_store_release(p, v) \
31 typeof(p) __p = (p); \
32 union { typeof(*p) __val; char __c[1]; } __u = \
33 { .__val = (__force typeof(*p)) (v) }; \
34 compiletime_assert_atomic_type(*p); \
35 switch (sizeof(*p)) { \
37 asm volatile("stb,ma %0,0(%1)" \
38 : : "r"(*(__u8 *)__u.__c), "r"(__p) \
42 asm volatile("sth,ma %0,0(%1)" \
43 : : "r"(*(__u16 *)__u.__c), "r"(__p) \
47 asm volatile("stw,ma %0,0(%1)" \
48 : : "r"(*(__u32 *)__u.__c), "r"(__p) \
52 if (IS_ENABLED(CONFIG_64BIT)) \
53 asm volatile("std,ma %0,0(%1)" \
54 : : "r"(*(__u64 *)__u.__c), "r"(__p) \
60 #define __smp_load_acquire(p) \
62 union { typeof(*p) __val; char __c[1]; } __u; \
63 typeof(p) __p = (p); \
64 compiletime_assert_atomic_type(*p); \
65 switch (sizeof(*p)) { \
67 asm volatile("ldb,ma 0(%1),%0" \
68 : "=r"(*(__u8 *)__u.__c) : "r"(__p) \
72 asm volatile("ldh,ma 0(%1),%0" \
73 : "=r"(*(__u16 *)__u.__c) : "r"(__p) \
77 asm volatile("ldw,ma 0(%1),%0" \
78 : "=r"(*(__u32 *)__u.__c) : "r"(__p) \
82 if (IS_ENABLED(CONFIG_64BIT)) \
83 asm volatile("ldd,ma 0(%1),%0" \
84 : "=r"(*(__u64 *)__u.__c) : "r"(__p) \
90 #include <asm-generic/barrier.h>
92 #endif /* !__ASSEMBLY__ */
93 #endif /* __ASM_BARRIER_H */