xref: /linux/arch/arm64/include/asm/irqflags.h (revision 44f57d78)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Copyright (C) 2012 ARM Ltd.
4  */
5 #ifndef __ASM_IRQFLAGS_H
6 #define __ASM_IRQFLAGS_H
7 
8 #ifdef __KERNEL__
9 
10 #include <asm/alternative.h>
11 #include <asm/ptrace.h>
12 #include <asm/sysreg.h>
13 
14 /*
15  * Aarch64 has flags for masking: Debug, Asynchronous (serror), Interrupts and
16  * FIQ exceptions, in the 'daif' register. We mask and unmask them in 'dai'
17  * order:
18  * Masking debug exceptions causes all other exceptions to be masked too/
19  * Masking SError masks irq, but not debug exceptions. Masking irqs has no
20  * side effects for other flags. Keeping to this order makes it easier for
21  * entry.S to know which exceptions should be unmasked.
22  *
23  * FIQ is never expected, but we mask it when we disable debug exceptions, and
24  * unmask it at all other times.
25  */
26 
27 /*
28  * CPU interrupt mask handling.
29  */
30 static inline void arch_local_irq_enable(void)
31 {
32 	asm volatile(ALTERNATIVE(
33 		"msr	daifclr, #2		// arch_local_irq_enable\n"
34 		"nop",
35 		__msr_s(SYS_ICC_PMR_EL1, "%0")
36 		"dsb	sy",
37 		ARM64_HAS_IRQ_PRIO_MASKING)
38 		:
39 		: "r" ((unsigned long) GIC_PRIO_IRQON)
40 		: "memory");
41 }
42 
43 static inline void arch_local_irq_disable(void)
44 {
45 	asm volatile(ALTERNATIVE(
46 		"msr	daifset, #2		// arch_local_irq_disable",
47 		__msr_s(SYS_ICC_PMR_EL1, "%0"),
48 		ARM64_HAS_IRQ_PRIO_MASKING)
49 		:
50 		: "r" ((unsigned long) GIC_PRIO_IRQOFF)
51 		: "memory");
52 }
53 
54 /*
55  * Save the current interrupt enable state.
56  */
57 static inline unsigned long arch_local_save_flags(void)
58 {
59 	unsigned long daif_bits;
60 	unsigned long flags;
61 
62 	daif_bits = read_sysreg(daif);
63 
64 	/*
65 	 * The asm is logically equivalent to:
66 	 *
67 	 * if (system_uses_irq_prio_masking())
68 	 *	flags = (daif_bits & PSR_I_BIT) ?
69 	 *			GIC_PRIO_IRQOFF :
70 	 *			read_sysreg_s(SYS_ICC_PMR_EL1);
71 	 * else
72 	 *	flags = daif_bits;
73 	 */
74 	asm volatile(ALTERNATIVE(
75 			"mov	%0, %1\n"
76 			"nop\n"
77 			"nop",
78 			__mrs_s("%0", SYS_ICC_PMR_EL1)
79 			"ands	%1, %1, " __stringify(PSR_I_BIT) "\n"
80 			"csel	%0, %0, %2, eq",
81 			ARM64_HAS_IRQ_PRIO_MASKING)
82 		: "=&r" (flags), "+r" (daif_bits)
83 		: "r" ((unsigned long) GIC_PRIO_IRQOFF)
84 		: "memory");
85 
86 	return flags;
87 }
88 
89 static inline unsigned long arch_local_irq_save(void)
90 {
91 	unsigned long flags;
92 
93 	flags = arch_local_save_flags();
94 
95 	arch_local_irq_disable();
96 
97 	return flags;
98 }
99 
100 /*
101  * restore saved IRQ state
102  */
103 static inline void arch_local_irq_restore(unsigned long flags)
104 {
105 	asm volatile(ALTERNATIVE(
106 			"msr	daif, %0\n"
107 			"nop",
108 			__msr_s(SYS_ICC_PMR_EL1, "%0")
109 			"dsb	sy",
110 			ARM64_HAS_IRQ_PRIO_MASKING)
111 		: "+r" (flags)
112 		:
113 		: "memory");
114 }
115 
116 static inline int arch_irqs_disabled_flags(unsigned long flags)
117 {
118 	int res;
119 
120 	asm volatile(ALTERNATIVE(
121 			"and	%w0, %w1, #" __stringify(PSR_I_BIT) "\n"
122 			"nop",
123 			"cmp	%w1, #" __stringify(GIC_PRIO_IRQOFF) "\n"
124 			"cset	%w0, ls",
125 			ARM64_HAS_IRQ_PRIO_MASKING)
126 		: "=&r" (res)
127 		: "r" ((int) flags)
128 		: "memory");
129 
130 	return res;
131 }
132 #endif
133 #endif
134