arm64: dts: qcom: sm8550: add TRNG node
[linux-modified.git] / arch / loongarch / include / asm / futex.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4  */
5 #ifndef _ASM_FUTEX_H
6 #define _ASM_FUTEX_H
7
8 #include <linux/futex.h>
9 #include <linux/uaccess.h>
10 #include <asm/asm-extable.h>
11 #include <asm/barrier.h>
12 #include <asm/errno.h>
13
14 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)              \
15 {                                                                       \
16         __asm__ __volatile__(                                           \
17         "1:     ll.w    %1, %4 # __futex_atomic_op\n"           \
18         "       " insn  "                               \n"     \
19         "2:     sc.w    $t0, %2                         \n"     \
20         "       beqz    $t0, 1b                         \n"     \
21         "3:                                             \n"     \
22         _ASM_EXTABLE_UACCESS_ERR(1b, 3b, %0)                    \
23         _ASM_EXTABLE_UACCESS_ERR(2b, 3b, %0)                    \
24         : "=r" (ret), "=&r" (oldval),                           \
25           "=ZC" (*uaddr)                                        \
26         : "0" (0), "ZC" (*uaddr), "Jr" (oparg)                  \
27         : "memory", "t0");                                      \
28 }
29
30 static inline int
31 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
32 {
33         int oldval = 0, ret = 0;
34
35         pagefault_disable();
36
37         switch (op) {
38         case FUTEX_OP_SET:
39                 __futex_atomic_op("move $t0, %z5", ret, oldval, uaddr, oparg);
40                 break;
41         case FUTEX_OP_ADD:
42                 __futex_atomic_op("add.w $t0, %1, %z5", ret, oldval, uaddr, oparg);
43                 break;
44         case FUTEX_OP_OR:
45                 __futex_atomic_op("or   $t0, %1, %z5", ret, oldval, uaddr, oparg);
46                 break;
47         case FUTEX_OP_ANDN:
48                 __futex_atomic_op("and  $t0, %1, %z5", ret, oldval, uaddr, ~oparg);
49                 break;
50         case FUTEX_OP_XOR:
51                 __futex_atomic_op("xor  $t0, %1, %z5", ret, oldval, uaddr, oparg);
52                 break;
53         default:
54                 ret = -ENOSYS;
55         }
56
57         pagefault_enable();
58
59         if (!ret)
60                 *oval = oldval;
61
62         return ret;
63 }
64
65 static inline int
66 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval, u32 newval)
67 {
68         int ret = 0;
69         u32 val = 0;
70
71         if (!access_ok(uaddr, sizeof(u32)))
72                 return -EFAULT;
73
74         __asm__ __volatile__(
75         "# futex_atomic_cmpxchg_inatomic                        \n"
76         "1:     ll.w    %1, %3                                  \n"
77         "       bne     %1, %z4, 3f                             \n"
78         "       move    $t0, %z5                                \n"
79         "2:     sc.w    $t0, %2                                 \n"
80         "       beqz    $t0, 1b                                 \n"
81         "3:                                                     \n"
82         __WEAK_LLSC_MB
83         _ASM_EXTABLE_UACCESS_ERR(1b, 3b, %0)
84         _ASM_EXTABLE_UACCESS_ERR(2b, 3b, %0)
85         : "+r" (ret), "=&r" (val), "=ZC" (*uaddr)
86         : "ZC" (*uaddr), "Jr" (oldval), "Jr" (newval)
87         : "memory", "t0");
88
89         *uval = val;
90
91         return ret;
92 }
93
94 #endif /* _ASM_FUTEX_H */