1 /* SPDX-License-Identifier: GPL-2.0 */
3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
5 #ifndef __ASM_BARRIER_H
6 #define __ASM_BARRIER_H
11 * Bit4: ordering or completion (0: completion, 1: ordering)
12 * Bit3: barrier for previous read (0: true, 1: false)
13 * Bit2: barrier for previous write (0: true, 1: false)
14 * Bit1: barrier for succeeding read (0: true, 1: false)
15 * Bit0: barrier for succeeding write (0: true, 1: false)
17 * Hint 0x700: barrier for "read after read" from the same address
20 #define DBAR(hint) __asm__ __volatile__("dbar %0 " : : "I"(hint) : "memory")
33 #define c_sync() DBAR(crwrw)
34 #define c_rsync() DBAR(cr_r_)
35 #define c_wsync() DBAR(c_w_w)
37 #define o_sync() DBAR(orwrw)
38 #define o_rsync() DBAR(or_r_)
39 #define o_wsync() DBAR(o_w_w)
41 #define ldacq_mb() DBAR(or_rw)
42 #define strel_mb() DBAR(orw_w)
45 #define rmb() c_rsync()
46 #define wmb() c_wsync()
47 #define iob() c_sync()
48 #define wbflush() c_sync()
50 #define __smp_mb() o_sync()
51 #define __smp_rmb() o_rsync()
52 #define __smp_wmb() o_wsync()
55 #define __WEAK_LLSC_MB " dbar 0x700 \n"
57 #define __WEAK_LLSC_MB " \n"
60 #define __smp_mb__before_atomic() barrier()
61 #define __smp_mb__after_atomic() barrier()
64 * array_index_mask_nospec() - generate a ~0 mask when index < size, 0 otherwise
65 * @index: array element index
66 * @size: number of elements in array
69 * 0 - (@index < @size)
71 #define array_index_mask_nospec array_index_mask_nospec
72 static inline unsigned long array_index_mask_nospec(unsigned long index,
79 #if (__SIZEOF_LONG__ == 4)
80 "sub.w %0, $zero, %0\n\t"
81 #elif (__SIZEOF_LONG__ == 8)
82 "sub.d %0, $zero, %0\n\t"
85 : "r" (index), "r" (size)
91 #define __smp_load_acquire(p) \
93 typeof(*p) ___p1 = READ_ONCE(*p); \
94 compiletime_assert_atomic_type(*p); \
99 #define __smp_store_release(p, v) \
101 compiletime_assert_atomic_type(*p); \
106 #define __smp_store_mb(p, v) \
108 union { typeof(p) __val; char __c[1]; } __u = \
109 { .__val = (__force typeof(p)) (v) }; \
110 unsigned long __tmp; \
111 switch (sizeof(p)) { \
113 *(volatile __u8 *)&p = *(__u8 *)__u.__c; \
117 *(volatile __u16 *)&p = *(__u16 *)__u.__c; \
121 __asm__ __volatile__( \
122 "amswap_db.w %[tmp], %[val], %[mem] \n" \
123 : [mem] "+ZB" (*(u32 *)&p), [tmp] "=&r" (__tmp) \
124 : [val] "r" (*(__u32 *)__u.__c) \
128 __asm__ __volatile__( \
129 "amswap_db.d %[tmp], %[val], %[mem] \n" \
130 : [mem] "+ZB" (*(u64 *)&p), [tmp] "=&r" (__tmp) \
131 : [val] "r" (*(__u64 *)__u.__c) \
137 #include <asm-generic/barrier.h>
139 #endif /* __ASM_BARRIER_H */