1 #ifndef _ASM_X86_FUTEX_H
2 #define _ASM_X86_FUTEX_H
6 #include <linux/futex.h>
7 #include <linux/uaccess.h>
10 #include <asm/errno.h>
11 #include <asm/processor.h>
14 #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
15 asm volatile("\t" ASM_STAC "\n" \
17 "2:\t" ASM_CLAC "\n" \
18 "\t.section .fixup,\"ax\"\n" \
22 _ASM_EXTABLE(1b, 3b) \
23 : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
24 : "i" (-EFAULT), "0" (oparg), "1" (0))
26 #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
27 asm volatile("\t" ASM_STAC "\n" \
31 "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \
33 "3:\t" ASM_CLAC "\n" \
34 "\t.section .fixup,\"ax\"\n" \
38 _ASM_EXTABLE(1b, 4b) \
39 _ASM_EXTABLE(2b, 4b) \
40 : "=&a" (oldval), "=&r" (ret), \
41 "+m" (*uaddr), "=&r" (tem) \
42 : "r" (oparg), "i" (-EFAULT), "1" (0))
44 static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
47 int oldval = 0, ret, tem;
53 __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
56 __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval,
60 __futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg);
63 __futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg);
66 __futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg);
80 static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
81 u32 oldval, u32 newval)
83 return user_atomic_cmpxchg_inatomic(uval, uaddr, oldval, newval);
87 #endif /* _ASM_X86_FUTEX_H */