GNU Linux-libre 5.4.274-gnu1
[releases.git] / arch / arm64 / include / asm / barrier.h
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Based on arch/arm/include/asm/barrier.h
4  *
5  * Copyright (C) 2012 ARM Ltd.
6  */
7 #ifndef __ASM_BARRIER_H
8 #define __ASM_BARRIER_H
9
10 #ifndef __ASSEMBLY__
11
12 #include <linux/kasan-checks.h>
13
14 #define __nops(n)       ".rept  " #n "\nnop\n.endr\n"
15 #define nops(n)         asm volatile(__nops(n))
16
17 #define sev()           asm volatile("sev" : : : "memory")
18 #define wfe()           asm volatile("wfe" : : : "memory")
19 #define wfi()           asm volatile("wfi" : : : "memory")
20
21 #define isb()           asm volatile("isb" : : : "memory")
22 #define dmb(opt)        asm volatile("dmb " #opt : : : "memory")
23 #define dsb(opt)        asm volatile("dsb " #opt : : : "memory")
24
25 #define psb_csync()     asm volatile("hint #17" : : : "memory")
26 #define csdb()          asm volatile("hint #20" : : : "memory")
27
28 #define spec_bar()      asm volatile(ALTERNATIVE("dsb nsh\nisb\n",              \
29                                                  SB_BARRIER_INSN"nop\n",        \
30                                                  ARM64_HAS_SB))
31
32 #define mb()            dsb(sy)
33 #define rmb()           dsb(ld)
34 #define wmb()           dsb(st)
35
36 #define dma_rmb()       dmb(oshld)
37 #define dma_wmb()       dmb(oshst)
38
39 /*
40  * Generate a mask for array_index__nospec() that is ~0UL when 0 <= idx < sz
41  * and 0 otherwise.
42  */
43 #define array_index_mask_nospec array_index_mask_nospec
44 static inline unsigned long array_index_mask_nospec(unsigned long idx,
45                                                     unsigned long sz)
46 {
47         unsigned long mask;
48
49         asm volatile(
50         "       cmp     %1, %2\n"
51         "       sbc     %0, xzr, xzr\n"
52         : "=r" (mask)
53         : "r" (idx), "Ir" (sz)
54         : "cc");
55
56         csdb();
57         return mask;
58 }
59
60 /*
61  * Ensure that reads of the counter are treated the same as memory reads
62  * for the purposes of ordering by subsequent memory barriers.
63  *
64  * This insanity brought to you by speculative system register reads,
65  * out-of-order memory accesses, sequence locks and Thomas Gleixner.
66  *
67  * http://lists.infradead.org/pipermail/linux-arm-kernel/2019-February/631195.html
68  */
69 #define arch_counter_enforce_ordering(val) do {                         \
70         u64 tmp, _val = (val);                                          \
71                                                                         \
72         asm volatile(                                                   \
73         "       eor     %0, %1, %1\n"                                   \
74         "       add     %0, sp, %0\n"                                   \
75         "       ldr     xzr, [%0]"                                      \
76         : "=r" (tmp) : "r" (_val));                                     \
77 } while (0)
78
79 #define __smp_mb()      dmb(ish)
80 #define __smp_rmb()     dmb(ishld)
81 #define __smp_wmb()     dmb(ishst)
82
83 #define __smp_store_release(p, v)                                       \
84 do {                                                                    \
85         typeof(p) __p = (p);                                            \
86         union { typeof(*p) __val; char __c[1]; } __u =                  \
87                 { .__val = (__force typeof(*p)) (v) };                  \
88         compiletime_assert_atomic_type(*p);                             \
89         kasan_check_write(__p, sizeof(*p));                             \
90         switch (sizeof(*p)) {                                           \
91         case 1:                                                         \
92                 asm volatile ("stlrb %w1, %0"                           \
93                                 : "=Q" (*__p)                           \
94                                 : "r" (*(__u8 *)__u.__c)                \
95                                 : "memory");                            \
96                 break;                                                  \
97         case 2:                                                         \
98                 asm volatile ("stlrh %w1, %0"                           \
99                                 : "=Q" (*__p)                           \
100                                 : "r" (*(__u16 *)__u.__c)               \
101                                 : "memory");                            \
102                 break;                                                  \
103         case 4:                                                         \
104                 asm volatile ("stlr %w1, %0"                            \
105                                 : "=Q" (*__p)                           \
106                                 : "r" (*(__u32 *)__u.__c)               \
107                                 : "memory");                            \
108                 break;                                                  \
109         case 8:                                                         \
110                 asm volatile ("stlr %1, %0"                             \
111                                 : "=Q" (*__p)                           \
112                                 : "r" (*(__u64 *)__u.__c)               \
113                                 : "memory");                            \
114                 break;                                                  \
115         }                                                               \
116 } while (0)
117
118 #define __smp_load_acquire(p)                                           \
119 ({                                                                      \
120         union { typeof(*p) __val; char __c[1]; } __u;                   \
121         typeof(p) __p = (p);                                            \
122         compiletime_assert_atomic_type(*p);                             \
123         kasan_check_read(__p, sizeof(*p));                              \
124         switch (sizeof(*p)) {                                           \
125         case 1:                                                         \
126                 asm volatile ("ldarb %w0, %1"                           \
127                         : "=r" (*(__u8 *)__u.__c)                       \
128                         : "Q" (*__p) : "memory");                       \
129                 break;                                                  \
130         case 2:                                                         \
131                 asm volatile ("ldarh %w0, %1"                           \
132                         : "=r" (*(__u16 *)__u.__c)                      \
133                         : "Q" (*__p) : "memory");                       \
134                 break;                                                  \
135         case 4:                                                         \
136                 asm volatile ("ldar %w0, %1"                            \
137                         : "=r" (*(__u32 *)__u.__c)                      \
138                         : "Q" (*__p) : "memory");                       \
139                 break;                                                  \
140         case 8:                                                         \
141                 asm volatile ("ldar %0, %1"                             \
142                         : "=r" (*(__u64 *)__u.__c)                      \
143                         : "Q" (*__p) : "memory");                       \
144                 break;                                                  \
145         }                                                               \
146         __u.__val;                                                      \
147 })
148
149 #define smp_cond_load_relaxed(ptr, cond_expr)                           \
150 ({                                                                      \
151         typeof(ptr) __PTR = (ptr);                                      \
152         typeof(*ptr) VAL;                                               \
153         for (;;) {                                                      \
154                 VAL = READ_ONCE(*__PTR);                                \
155                 if (cond_expr)                                          \
156                         break;                                          \
157                 __cmpwait_relaxed(__PTR, VAL);                          \
158         }                                                               \
159         VAL;                                                            \
160 })
161
162 #define smp_cond_load_acquire(ptr, cond_expr)                           \
163 ({                                                                      \
164         typeof(ptr) __PTR = (ptr);                                      \
165         typeof(*ptr) VAL;                                               \
166         for (;;) {                                                      \
167                 VAL = smp_load_acquire(__PTR);                          \
168                 if (cond_expr)                                          \
169                         break;                                          \
170                 __cmpwait_relaxed(__PTR, VAL);                          \
171         }                                                               \
172         VAL;                                                            \
173 })
174
175 #include <asm-generic/barrier.h>
176
177 #endif  /* __ASSEMBLY__ */
178
179 #endif  /* __ASM_BARRIER_H */