1 /* SPDX-License-Identifier: GPL-2.0 */
5 * This file gets included from lowlevel asm headers too, to provide
6 * wrapped versions of the local_irq_*() APIs, based on the
7 * raw_local_irq_*() functions from the lowlevel headers.
9 #ifndef _ASM_IRQFLAGS_H
10 #define _ASM_IRQFLAGS_H
14 #include <asm/core_reg.h>
15 #include <asm/metag_regs.h>
17 #define INTS_OFF_MASK TXSTATI_BGNDHALT_BIT
20 extern unsigned int get_trigger_mask(void);
23 extern unsigned int global_trigger_mask;
25 static inline unsigned int get_trigger_mask(void)
27 return global_trigger_mask;
31 static inline unsigned long arch_local_save_flags(void)
33 return __core_reg_get(TXMASKI);
36 static inline int arch_irqs_disabled_flags(unsigned long flags)
38 return (flags & ~INTS_OFF_MASK) == 0;
41 static inline int arch_irqs_disabled(void)
43 unsigned long flags = arch_local_save_flags();
45 return arch_irqs_disabled_flags(flags);
48 static inline unsigned long __irqs_disabled(void)
51 * We shouldn't enable exceptions if they are not already
52 * enabled. This is required for chancalls to work correctly.
54 return arch_local_save_flags() & INTS_OFF_MASK;
60 static inline unsigned long arch_local_irq_save(void)
62 unsigned long flags = __irqs_disabled();
64 asm volatile("SWAP %0,TXMASKI\n" : "=r" (flags) : "0" (flags)
70 static inline void arch_local_irq_restore(unsigned long flags)
72 asm volatile("MOV TXMASKI,%0\n" : : "r" (flags) : "memory");
75 static inline void arch_local_irq_disable(void)
77 unsigned long flags = __irqs_disabled();
79 asm volatile("MOV TXMASKI,%0\n" : : "r" (flags) : "memory");
83 /* Avoid circular include dependencies through <linux/preempt.h> */
84 void arch_local_irq_enable(void);
86 static inline void arch_local_irq_enable(void)
88 arch_local_irq_restore(get_trigger_mask());
92 #endif /* (__ASSEMBLY__) */
94 #endif /* !(_ASM_IRQFLAGS_H) */