2 * Copyright (C) 2013 ARM Ltd.
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
13 * You should have received a copy of the GNU General Public License
14 * along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #ifndef __ASM_PERCPU_H
17 #define __ASM_PERCPU_H
19 #include <asm/alternative.h>
21 static inline void set_my_cpu_offset(unsigned long off)
23 asm volatile(ALTERNATIVE("msr tpidr_el1, %0",
25 ARM64_HAS_VIRT_HOST_EXTN)
26 :: "r" (off) : "memory");
29 static inline unsigned long __my_cpu_offset(void)
34 * We want to allow caching the value, so avoid using volatile and
35 * instead use a fake stack read to hazard against barrier().
37 asm(ALTERNATIVE("mrs %0, tpidr_el1",
39 ARM64_HAS_VIRT_HOST_EXTN)
41 "Q" (*(const unsigned long *)current_stack_pointer));
45 #define __my_cpu_offset __my_cpu_offset()
47 #define PERCPU_OP(op, asm_op) \
48 static inline unsigned long __percpu_##op(void *ptr, \
49 unsigned long val, int size) \
51 unsigned long loop, ret; \
55 asm ("//__per_cpu_" #op "_1\n" \
56 "1: ldxrb %w[ret], %[ptr]\n" \
57 #asm_op " %w[ret], %w[ret], %w[val]\n" \
58 " stxrb %w[loop], %w[ret], %[ptr]\n" \
59 " cbnz %w[loop], 1b" \
60 : [loop] "=&r" (loop), [ret] "=&r" (ret), \
61 [ptr] "+Q"(*(u8 *)ptr) \
62 : [val] "Ir" (val)); \
65 asm ("//__per_cpu_" #op "_2\n" \
66 "1: ldxrh %w[ret], %[ptr]\n" \
67 #asm_op " %w[ret], %w[ret], %w[val]\n" \
68 " stxrh %w[loop], %w[ret], %[ptr]\n" \
69 " cbnz %w[loop], 1b" \
70 : [loop] "=&r" (loop), [ret] "=&r" (ret), \
71 [ptr] "+Q"(*(u16 *)ptr) \
72 : [val] "Ir" (val)); \
75 asm ("//__per_cpu_" #op "_4\n" \
76 "1: ldxr %w[ret], %[ptr]\n" \
77 #asm_op " %w[ret], %w[ret], %w[val]\n" \
78 " stxr %w[loop], %w[ret], %[ptr]\n" \
79 " cbnz %w[loop], 1b" \
80 : [loop] "=&r" (loop), [ret] "=&r" (ret), \
81 [ptr] "+Q"(*(u32 *)ptr) \
82 : [val] "Ir" (val)); \
85 asm ("//__per_cpu_" #op "_8\n" \
86 "1: ldxr %[ret], %[ptr]\n" \
87 #asm_op " %[ret], %[ret], %[val]\n" \
88 " stxr %w[loop], %[ret], %[ptr]\n" \
89 " cbnz %w[loop], 1b" \
90 : [loop] "=&r" (loop), [ret] "=&r" (ret), \
91 [ptr] "+Q"(*(u64 *)ptr) \
92 : [val] "Ir" (val)); \
107 static inline unsigned long __percpu_read(void *ptr, int size)
113 ret = ACCESS_ONCE(*(u8 *)ptr);
116 ret = ACCESS_ONCE(*(u16 *)ptr);
119 ret = ACCESS_ONCE(*(u32 *)ptr);
122 ret = ACCESS_ONCE(*(u64 *)ptr);
132 static inline void __percpu_write(void *ptr, unsigned long val, int size)
136 ACCESS_ONCE(*(u8 *)ptr) = (u8)val;
139 ACCESS_ONCE(*(u16 *)ptr) = (u16)val;
142 ACCESS_ONCE(*(u32 *)ptr) = (u32)val;
145 ACCESS_ONCE(*(u64 *)ptr) = (u64)val;
152 static inline unsigned long __percpu_xchg(void *ptr, unsigned long val,
155 unsigned long ret, loop;
159 asm ("//__percpu_xchg_1\n"
160 "1: ldxrb %w[ret], %[ptr]\n"
161 " stxrb %w[loop], %w[val], %[ptr]\n"
163 : [loop] "=&r"(loop), [ret] "=&r"(ret),
164 [ptr] "+Q"(*(u8 *)ptr)
168 asm ("//__percpu_xchg_2\n"
169 "1: ldxrh %w[ret], %[ptr]\n"
170 " stxrh %w[loop], %w[val], %[ptr]\n"
172 : [loop] "=&r"(loop), [ret] "=&r"(ret),
173 [ptr] "+Q"(*(u16 *)ptr)
177 asm ("//__percpu_xchg_4\n"
178 "1: ldxr %w[ret], %[ptr]\n"
179 " stxr %w[loop], %w[val], %[ptr]\n"
181 : [loop] "=&r"(loop), [ret] "=&r"(ret),
182 [ptr] "+Q"(*(u32 *)ptr)
186 asm ("//__percpu_xchg_8\n"
187 "1: ldxr %[ret], %[ptr]\n"
188 " stxr %w[loop], %[val], %[ptr]\n"
190 : [loop] "=&r"(loop), [ret] "=&r"(ret),
191 [ptr] "+Q"(*(u64 *)ptr)
202 #define _percpu_read(pcp) \
204 typeof(pcp) __retval; \
205 preempt_disable_notrace(); \
206 __retval = (typeof(pcp))__percpu_read(raw_cpu_ptr(&(pcp)), \
208 preempt_enable_notrace(); \
212 #define _percpu_write(pcp, val) \
214 preempt_disable_notrace(); \
215 __percpu_write(raw_cpu_ptr(&(pcp)), (unsigned long)(val), \
217 preempt_enable_notrace(); \
220 #define _pcp_protect(operation, pcp, val) \
222 typeof(pcp) __retval; \
224 __retval = (typeof(pcp))operation(raw_cpu_ptr(&(pcp)), \
225 (val), sizeof(pcp)); \
230 #define _percpu_add(pcp, val) \
231 _pcp_protect(__percpu_add, pcp, val)
233 #define _percpu_add_return(pcp, val) _percpu_add(pcp, val)
235 #define _percpu_and(pcp, val) \
236 _pcp_protect(__percpu_and, pcp, val)
238 #define _percpu_or(pcp, val) \
239 _pcp_protect(__percpu_or, pcp, val)
241 #define _percpu_xchg(pcp, val) (typeof(pcp)) \
242 _pcp_protect(__percpu_xchg, pcp, (unsigned long)(val))
244 #define this_cpu_add_1(pcp, val) _percpu_add(pcp, val)
245 #define this_cpu_add_2(pcp, val) _percpu_add(pcp, val)
246 #define this_cpu_add_4(pcp, val) _percpu_add(pcp, val)
247 #define this_cpu_add_8(pcp, val) _percpu_add(pcp, val)
249 #define this_cpu_add_return_1(pcp, val) _percpu_add_return(pcp, val)
250 #define this_cpu_add_return_2(pcp, val) _percpu_add_return(pcp, val)
251 #define this_cpu_add_return_4(pcp, val) _percpu_add_return(pcp, val)
252 #define this_cpu_add_return_8(pcp, val) _percpu_add_return(pcp, val)
254 #define this_cpu_and_1(pcp, val) _percpu_and(pcp, val)
255 #define this_cpu_and_2(pcp, val) _percpu_and(pcp, val)
256 #define this_cpu_and_4(pcp, val) _percpu_and(pcp, val)
257 #define this_cpu_and_8(pcp, val) _percpu_and(pcp, val)
259 #define this_cpu_or_1(pcp, val) _percpu_or(pcp, val)
260 #define this_cpu_or_2(pcp, val) _percpu_or(pcp, val)
261 #define this_cpu_or_4(pcp, val) _percpu_or(pcp, val)
262 #define this_cpu_or_8(pcp, val) _percpu_or(pcp, val)
264 #define this_cpu_read_1(pcp) _percpu_read(pcp)
265 #define this_cpu_read_2(pcp) _percpu_read(pcp)
266 #define this_cpu_read_4(pcp) _percpu_read(pcp)
267 #define this_cpu_read_8(pcp) _percpu_read(pcp)
269 #define this_cpu_write_1(pcp, val) _percpu_write(pcp, val)
270 #define this_cpu_write_2(pcp, val) _percpu_write(pcp, val)
271 #define this_cpu_write_4(pcp, val) _percpu_write(pcp, val)
272 #define this_cpu_write_8(pcp, val) _percpu_write(pcp, val)
274 #define this_cpu_xchg_1(pcp, val) _percpu_xchg(pcp, val)
275 #define this_cpu_xchg_2(pcp, val) _percpu_xchg(pcp, val)
276 #define this_cpu_xchg_4(pcp, val) _percpu_xchg(pcp, val)
277 #define this_cpu_xchg_8(pcp, val) _percpu_xchg(pcp, val)
279 #include <asm-generic/percpu.h>
281 #endif /* __ASM_PERCPU_H */