arm64: dts: qcom: sm8550: add TRNG node
[linux-modified.git] / arch / xtensa / include / asm / futex.h
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Atomic futex routines
4  *
5  * Based on the PowerPC implementataion
6  *
7  * Copyright (C) 2013 TangoTec Ltd.
8  *
9  * Baruch Siach <baruch@tkos.co.il>
10  */
11
12 #ifndef _ASM_XTENSA_FUTEX_H
13 #define _ASM_XTENSA_FUTEX_H
14
15 #include <linux/futex.h>
16 #include <linux/uaccess.h>
17 #include <linux/errno.h>
18
19 #define arch_futex_atomic_op_inuser arch_futex_atomic_op_inuser
20 #define futex_atomic_cmpxchg_inatomic futex_atomic_cmpxchg_inatomic
21 #include <asm-generic/futex.h>
22
23 #if XCHAL_HAVE_EXCLUSIVE
24 #define __futex_atomic_op(insn, ret, old, uaddr, arg)   \
25         __asm__ __volatile(                             \
26         "1:     l32ex   %[oldval], %[addr]\n"           \
27                 insn "\n"                               \
28         "2:     s32ex   %[newval], %[addr]\n"           \
29         "       getex   %[newval]\n"                    \
30         "       beqz    %[newval], 1b\n"                \
31         "       movi    %[newval], 0\n"                 \
32         "3:\n"                                          \
33         "       .section .fixup,\"ax\"\n"               \
34         "       .align 4\n"                             \
35         "       .literal_position\n"                    \
36         "5:     movi    %[oldval], 3b\n"                \
37         "       movi    %[newval], %[fault]\n"          \
38         "       jx      %[oldval]\n"                    \
39         "       .previous\n"                            \
40         "       .section __ex_table,\"a\"\n"            \
41         "       .long 1b, 5b, 2b, 5b\n"                 \
42         "       .previous\n"                            \
43         : [oldval] "=&r" (old), [newval] "=&r" (ret)    \
44         : [addr] "r" (uaddr), [oparg] "r" (arg),        \
45           [fault] "I" (-EFAULT)                         \
46         : "memory")
47 #elif XCHAL_HAVE_S32C1I
48 #define __futex_atomic_op(insn, ret, old, uaddr, arg)   \
49         __asm__ __volatile(                             \
50         "1:     l32i    %[oldval], %[mem]\n"            \
51                 insn "\n"                               \
52         "       wsr     %[oldval], scompare1\n"         \
53         "2:     s32c1i  %[newval], %[mem]\n"            \
54         "       bne     %[newval], %[oldval], 1b\n"     \
55         "       movi    %[newval], 0\n"                 \
56         "3:\n"                                          \
57         "       .section .fixup,\"ax\"\n"               \
58         "       .align 4\n"                             \
59         "       .literal_position\n"                    \
60         "5:     movi    %[oldval], 3b\n"                \
61         "       movi    %[newval], %[fault]\n"          \
62         "       jx      %[oldval]\n"                    \
63         "       .previous\n"                            \
64         "       .section __ex_table,\"a\"\n"            \
65         "       .long 1b, 5b, 2b, 5b\n"                 \
66         "       .previous\n"                            \
67         : [oldval] "=&r" (old), [newval] "=&r" (ret),   \
68           [mem] "+m" (*(uaddr))                         \
69         : [oparg] "r" (arg), [fault] "I" (-EFAULT)      \
70         : "memory")
71 #endif
72
73 static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
74                 u32 __user *uaddr)
75 {
76 #if XCHAL_HAVE_S32C1I || XCHAL_HAVE_EXCLUSIVE
77         int oldval = 0, ret;
78
79         if (!access_ok(uaddr, sizeof(u32)))
80                 return -EFAULT;
81
82         switch (op) {
83         case FUTEX_OP_SET:
84                 __futex_atomic_op("mov %[newval], %[oparg]",
85                                   ret, oldval, uaddr, oparg);
86                 break;
87         case FUTEX_OP_ADD:
88                 __futex_atomic_op("add %[newval], %[oldval], %[oparg]",
89                                   ret, oldval, uaddr, oparg);
90                 break;
91         case FUTEX_OP_OR:
92                 __futex_atomic_op("or %[newval], %[oldval], %[oparg]",
93                                   ret, oldval, uaddr, oparg);
94                 break;
95         case FUTEX_OP_ANDN:
96                 __futex_atomic_op("and %[newval], %[oldval], %[oparg]",
97                                   ret, oldval, uaddr, ~oparg);
98                 break;
99         case FUTEX_OP_XOR:
100                 __futex_atomic_op("xor %[newval], %[oldval], %[oparg]",
101                                   ret, oldval, uaddr, oparg);
102                 break;
103         default:
104                 ret = -ENOSYS;
105         }
106
107         if (!ret)
108                 *oval = oldval;
109
110         return ret;
111 #else
112         return futex_atomic_op_inuser_local(op, oparg, oval, uaddr);
113 #endif
114 }
115
116 static inline int
117 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
118                               u32 oldval, u32 newval)
119 {
120 #if XCHAL_HAVE_S32C1I || XCHAL_HAVE_EXCLUSIVE
121         unsigned long tmp;
122         int ret = 0;
123
124         if (!access_ok(uaddr, sizeof(u32)))
125                 return -EFAULT;
126
127         __asm__ __volatile__ (
128         "       # futex_atomic_cmpxchg_inatomic\n"
129 #if XCHAL_HAVE_EXCLUSIVE
130         "1:     l32ex   %[tmp], %[addr]\n"
131         "       s32i    %[tmp], %[uval], 0\n"
132         "       bne     %[tmp], %[oldval], 2f\n"
133         "       mov     %[tmp], %[newval]\n"
134         "3:     s32ex   %[tmp], %[addr]\n"
135         "       getex   %[tmp]\n"
136         "       beqz    %[tmp], 1b\n"
137 #elif XCHAL_HAVE_S32C1I
138         "       wsr     %[oldval], scompare1\n"
139         "1:     s32c1i  %[newval], %[addr], 0\n"
140         "       s32i    %[newval], %[uval], 0\n"
141 #endif
142         "2:\n"
143         "       .section .fixup,\"ax\"\n"
144         "       .align 4\n"
145         "       .literal_position\n"
146         "4:     movi    %[tmp], 2b\n"
147         "       movi    %[ret], %[fault]\n"
148         "       jx      %[tmp]\n"
149         "       .previous\n"
150         "       .section __ex_table,\"a\"\n"
151         "       .long 1b, 4b\n"
152 #if XCHAL_HAVE_EXCLUSIVE
153         "       .long 3b, 4b\n"
154 #endif
155         "       .previous\n"
156         : [ret] "+r" (ret), [newval] "+r" (newval), [tmp] "=&r" (tmp)
157         : [addr] "r" (uaddr), [oldval] "r" (oldval), [uval] "r" (uval),
158           [fault] "I" (-EFAULT)
159         : "memory");
160
161         return ret;
162 #else
163         return futex_atomic_cmpxchg_inatomic_local(uval, uaddr, oldval, newval);
164 #endif
165 }
166
167 #endif /* _ASM_XTENSA_FUTEX_H */