1 /* SPDX-License-Identifier: GPL-2.0-only */
3 #ifndef _ASM_ARC_ATOMIC_SPLOCK_H
4 #define _ASM_ARC_ATOMIC_SPLOCK_H
7 * Non hardware assisted Atomic-R-M-W
8 * Locking would change to irq-disabling only (UP) and spinlocks (SMP)
11 static inline void arch_atomic_set(atomic_t *v, int i)
14 * Independent of hardware support, all of the atomic_xxx() APIs need
15 * to follow the same locking rules to make sure that a "hardware"
16 * atomic insn (e.g. LD) doesn't clobber an "emulated" atomic insn
19 * Thus atomic_set() despite being 1 insn (and seemingly atomic)
20 * requires the locking.
24 atomic_ops_lock(flags);
25 WRITE_ONCE(v->counter, i);
26 atomic_ops_unlock(flags);
29 #define arch_atomic_set_release(v, i) arch_atomic_set((v), (i))
31 #define ATOMIC_OP(op, c_op, asm_op) \
32 static inline void arch_atomic_##op(int i, atomic_t *v) \
34 unsigned long flags; \
36 atomic_ops_lock(flags); \
38 atomic_ops_unlock(flags); \
41 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
42 static inline int arch_atomic_##op##_return(int i, atomic_t *v) \
44 unsigned long flags; \
48 * spin lock/unlock provides the needed smp_mb() before/after \
50 atomic_ops_lock(flags); \
54 atomic_ops_unlock(flags); \
59 #define ATOMIC_FETCH_OP(op, c_op, asm_op) \
60 static inline int arch_atomic_fetch_##op(int i, atomic_t *v) \
62 unsigned long flags; \
66 * spin lock/unlock provides the needed smp_mb() before/after \
68 atomic_ops_lock(flags); \
71 atomic_ops_unlock(flags); \
76 #define ATOMIC_OPS(op, c_op, asm_op) \
77 ATOMIC_OP(op, c_op, asm_op) \
78 ATOMIC_OP_RETURN(op, c_op, asm_op) \
79 ATOMIC_FETCH_OP(op, c_op, asm_op)
81 ATOMIC_OPS(add, +=, add)
82 ATOMIC_OPS(sub, -=, sub)
84 #define arch_atomic_fetch_add arch_atomic_fetch_add
85 #define arch_atomic_fetch_sub arch_atomic_fetch_sub
86 #define arch_atomic_add_return arch_atomic_add_return
87 #define arch_atomic_sub_return arch_atomic_sub_return
90 #define ATOMIC_OPS(op, c_op, asm_op) \
91 ATOMIC_OP(op, c_op, asm_op) \
92 ATOMIC_FETCH_OP(op, c_op, asm_op)
94 ATOMIC_OPS(and, &=, and)
95 ATOMIC_OPS(andnot, &= ~, bic)
96 ATOMIC_OPS(or, |=, or)
97 ATOMIC_OPS(xor, ^=, xor)
99 #define arch_atomic_andnot arch_atomic_andnot
101 #define arch_atomic_fetch_and arch_atomic_fetch_and
102 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
103 #define arch_atomic_fetch_or arch_atomic_fetch_or
104 #define arch_atomic_fetch_xor arch_atomic_fetch_xor
107 #undef ATOMIC_FETCH_OP
108 #undef ATOMIC_OP_RETURN