4 * This file gets included from lowlevel asm headers too, to provide
5 * wrapped versions of the local_irq_*() APIs, based on the
6 * raw_local_irq_*() functions from the lowlevel headers.
8 #ifndef _ASM_IRQFLAGS_H
9 #define _ASM_IRQFLAGS_H
13 #include <asm/core_reg.h>
14 #include <asm/metag_regs.h>
16 #define INTS_OFF_MASK TXSTATI_BGNDHALT_BIT
19 extern unsigned int get_trigger_mask(void);
22 extern unsigned int global_trigger_mask;
24 static inline unsigned int get_trigger_mask(void)
26 return global_trigger_mask;
30 static inline unsigned long arch_local_save_flags(void)
32 return __core_reg_get(TXMASKI);
35 static inline int arch_irqs_disabled_flags(unsigned long flags)
37 return (flags & ~INTS_OFF_MASK) == 0;
40 static inline int arch_irqs_disabled(void)
42 unsigned long flags = arch_local_save_flags();
44 return arch_irqs_disabled_flags(flags);
47 static inline unsigned long __irqs_disabled(void)
50 * We shouldn't enable exceptions if they are not already
51 * enabled. This is required for chancalls to work correctly.
53 return arch_local_save_flags() & INTS_OFF_MASK;
59 static inline unsigned long arch_local_irq_save(void)
61 unsigned long flags = __irqs_disabled();
63 asm volatile("SWAP %0,TXMASKI\n" : "=r" (flags) : "0" (flags)
69 static inline void arch_local_irq_restore(unsigned long flags)
71 asm volatile("MOV TXMASKI,%0\n" : : "r" (flags) : "memory");
74 static inline void arch_local_irq_disable(void)
76 unsigned long flags = __irqs_disabled();
78 asm volatile("MOV TXMASKI,%0\n" : : "r" (flags) : "memory");
82 /* Avoid circular include dependencies through <linux/preempt.h> */
83 void arch_local_irq_enable(void);
85 static inline void arch_local_irq_enable(void)
87 arch_local_irq_restore(get_trigger_mask());
91 #endif /* (__ASSEMBLY__) */
93 #endif /* !(_ASM_IRQFLAGS_H) */