2 * Copyright IBM Corp. 1999, 2009
4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
7 #ifndef __ASM_BARRIER_H
8 #define __ASM_BARRIER_H
11 * Force strict CPU ordering.
12 * And yes, this is required on UP too when we're talking
16 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
17 /* Fast-BCR without checkpoint synchronization */
18 #define __ASM_BARRIER "bcr 14,0\n"
20 #define __ASM_BARRIER "bcr 15,0\n"
23 #define mb() do { asm volatile(__ASM_BARRIER : : : "memory"); } while (0)
25 #define rmb() barrier()
26 #define wmb() barrier()
27 #define dma_rmb() mb()
28 #define dma_wmb() mb()
29 #define __smp_mb() mb()
30 #define __smp_rmb() rmb()
31 #define __smp_wmb() wmb()
33 #define __smp_store_release(p, v) \
35 compiletime_assert_atomic_type(*p); \
40 #define __smp_load_acquire(p) \
42 typeof(*p) ___p1 = READ_ONCE(*p); \
43 compiletime_assert_atomic_type(*p); \
48 #define __smp_mb__before_atomic() barrier()
49 #define __smp_mb__after_atomic() barrier()
52 * array_index_mask_nospec - generate a mask for array_idx() that is
53 * ~0UL when the bounds check succeeds and 0 otherwise
54 * @index: array element index
55 * @size: number of elements in array
57 #define array_index_mask_nospec array_index_mask_nospec
58 static inline unsigned long array_index_mask_nospec(unsigned long index,
63 if (__builtin_constant_p(size) && size > 0) {
66 :"=d" (mask) : "d" (size-1), "d" (index) :"cc");
71 :"=d" (mask) : "d" (size), "d" (index) :"cc");
75 #include <asm-generic/barrier.h>
77 #endif /* __ASM_BARRIER_H */