1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_POWERPC_FUTEX_H
3 #define _ASM_POWERPC_FUTEX_H
7 #include <linux/futex.h>
8 #include <linux/uaccess.h>
10 #include <asm/synch.h>
12 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
13 __asm__ __volatile ( \
14 PPC_ATOMIC_ENTRY_BARRIER \
15 "1: lwarx %0,0,%2\n" \
17 "2: stwcx. %1,0,%2\n" \
19 PPC_ATOMIC_EXIT_BARRIER \
21 "3: .section .fixup,\"ax\"\n" \
27 : "=&r" (oldval), "=&r" (ret) \
28 : "b" (uaddr), "i" (-EFAULT), "r" (oparg) \
31 static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
36 if (!user_access_begin(uaddr, sizeof(u32)))
41 __futex_atomic_op("mr %1,%4\n", ret, oldval, uaddr, oparg);
44 __futex_atomic_op("add %1,%0,%4\n", ret, oldval, uaddr, oparg);
47 __futex_atomic_op("or %1,%0,%4\n", ret, oldval, uaddr, oparg);
50 __futex_atomic_op("andc %1,%0,%4\n", ret, oldval, uaddr, oparg);
53 __futex_atomic_op("xor %1,%0,%4\n", ret, oldval, uaddr, oparg);
66 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
67 u32 oldval, u32 newval)
72 if (!user_access_begin(uaddr, sizeof(u32)))
75 __asm__ __volatile__ (
76 PPC_ATOMIC_ENTRY_BARRIER
77 "1: lwarx %1,0,%3 # futex_atomic_cmpxchg_inatomic\n\
82 PPC_ATOMIC_EXIT_BARRIER
83 "3: .section .fixup,\"ax\"\n\
89 : "+r" (ret), "=&r" (prev), "+m" (*uaddr)
90 : "r" (uaddr), "r" (oldval), "r" (newval), "i" (-EFAULT)
100 #endif /* __KERNEL__ */
101 #endif /* _ASM_POWERPC_FUTEX_H */