.. | .. |
---|
| 1 | +/* SPDX-License-Identifier: GPL-2.0-only */ |
---|
1 | 2 | /* |
---|
2 | 3 | * Copyright (C) 2012 ARM Ltd. |
---|
3 | | - * |
---|
4 | | - * This program is free software; you can redistribute it and/or modify |
---|
5 | | - * it under the terms of the GNU General Public License version 2 as |
---|
6 | | - * published by the Free Software Foundation. |
---|
7 | | - * |
---|
8 | | - * This program is distributed in the hope that it will be useful, |
---|
9 | | - * but WITHOUT ANY WARRANTY; without even the implied warranty of |
---|
10 | | - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
---|
11 | | - * GNU General Public License for more details. |
---|
12 | | - * |
---|
13 | | - * You should have received a copy of the GNU General Public License |
---|
14 | | - * along with this program. If not, see <http://www.gnu.org/licenses/>. |
---|
15 | 4 | */ |
---|
16 | 5 | #ifndef __ASM_IRQFLAGS_H |
---|
17 | 6 | #define __ASM_IRQFLAGS_H |
---|
18 | 7 | |
---|
19 | | -#ifdef __KERNEL__ |
---|
20 | | - |
---|
| 8 | +#include <asm/alternative.h> |
---|
| 9 | +#include <asm/barrier.h> |
---|
21 | 10 | #include <asm/ptrace.h> |
---|
| 11 | +#include <asm/sysreg.h> |
---|
22 | 12 | |
---|
23 | 13 | /* |
---|
24 | 14 | * Aarch64 has flags for masking: Debug, Asynchronous (serror), Interrupts and |
---|
.. | .. |
---|
36 | 26 | /* |
---|
37 | 27 | * CPU interrupt mask handling. |
---|
38 | 28 | */ |
---|
39 | | -static inline unsigned long arch_local_irq_save(void) |
---|
40 | | -{ |
---|
41 | | - unsigned long flags; |
---|
42 | | - asm volatile( |
---|
43 | | - "mrs %0, daif // arch_local_irq_save\n" |
---|
44 | | - "msr daifset, #2" |
---|
45 | | - : "=r" (flags) |
---|
46 | | - : |
---|
47 | | - : "memory"); |
---|
48 | | - return flags; |
---|
49 | | -} |
---|
50 | | - |
---|
51 | 29 | static inline void arch_local_irq_enable(void) |
---|
52 | 30 | { |
---|
53 | | - asm volatile( |
---|
54 | | - "msr daifclr, #2 // arch_local_irq_enable" |
---|
| 31 | + if (system_has_prio_mask_debugging()) { |
---|
| 32 | + u32 pmr = read_sysreg_s(SYS_ICC_PMR_EL1); |
---|
| 33 | + |
---|
| 34 | + WARN_ON_ONCE(pmr != GIC_PRIO_IRQON && pmr != GIC_PRIO_IRQOFF); |
---|
| 35 | + } |
---|
| 36 | + |
---|
| 37 | + asm volatile(ALTERNATIVE( |
---|
| 38 | + "msr daifclr, #2 // arch_local_irq_enable", |
---|
| 39 | + __msr_s(SYS_ICC_PMR_EL1, "%0"), |
---|
| 40 | + ARM64_HAS_IRQ_PRIO_MASKING) |
---|
55 | 41 | : |
---|
56 | | - : |
---|
| 42 | + : "r" ((unsigned long) GIC_PRIO_IRQON) |
---|
57 | 43 | : "memory"); |
---|
| 44 | + |
---|
| 45 | + pmr_sync(); |
---|
58 | 46 | } |
---|
59 | 47 | |
---|
60 | 48 | static inline void arch_local_irq_disable(void) |
---|
61 | 49 | { |
---|
62 | | - asm volatile( |
---|
63 | | - "msr daifset, #2 // arch_local_irq_disable" |
---|
| 50 | + if (system_has_prio_mask_debugging()) { |
---|
| 51 | + u32 pmr = read_sysreg_s(SYS_ICC_PMR_EL1); |
---|
| 52 | + |
---|
| 53 | + WARN_ON_ONCE(pmr != GIC_PRIO_IRQON && pmr != GIC_PRIO_IRQOFF); |
---|
| 54 | + } |
---|
| 55 | + |
---|
| 56 | + asm volatile(ALTERNATIVE( |
---|
| 57 | + "msr daifset, #2 // arch_local_irq_disable", |
---|
| 58 | + __msr_s(SYS_ICC_PMR_EL1, "%0"), |
---|
| 59 | + ARM64_HAS_IRQ_PRIO_MASKING) |
---|
64 | 60 | : |
---|
65 | | - : |
---|
| 61 | + : "r" ((unsigned long) GIC_PRIO_IRQOFF) |
---|
66 | 62 | : "memory"); |
---|
67 | 63 | } |
---|
68 | 64 | |
---|
.. | .. |
---|
72 | 68 | static inline unsigned long arch_local_save_flags(void) |
---|
73 | 69 | { |
---|
74 | 70 | unsigned long flags; |
---|
75 | | - asm volatile( |
---|
76 | | - "mrs %0, daif // arch_local_save_flags" |
---|
77 | | - : "=r" (flags) |
---|
| 71 | + |
---|
| 72 | + asm volatile(ALTERNATIVE( |
---|
| 73 | + "mrs %0, daif", |
---|
| 74 | + __mrs_s("%0", SYS_ICC_PMR_EL1), |
---|
| 75 | + ARM64_HAS_IRQ_PRIO_MASKING) |
---|
| 76 | + : "=&r" (flags) |
---|
78 | 77 | : |
---|
79 | 78 | : "memory"); |
---|
| 79 | + |
---|
| 80 | + return flags; |
---|
| 81 | +} |
---|
| 82 | + |
---|
| 83 | +static inline int arch_irqs_disabled_flags(unsigned long flags) |
---|
| 84 | +{ |
---|
| 85 | + int res; |
---|
| 86 | + |
---|
| 87 | + asm volatile(ALTERNATIVE( |
---|
| 88 | + "and %w0, %w1, #" __stringify(PSR_I_BIT), |
---|
| 89 | + "eor %w0, %w1, #" __stringify(GIC_PRIO_IRQON), |
---|
| 90 | + ARM64_HAS_IRQ_PRIO_MASKING) |
---|
| 91 | + : "=&r" (res) |
---|
| 92 | + : "r" ((int) flags) |
---|
| 93 | + : "memory"); |
---|
| 94 | + |
---|
| 95 | + return res; |
---|
| 96 | +} |
---|
| 97 | + |
---|
| 98 | +static inline int arch_irqs_disabled(void) |
---|
| 99 | +{ |
---|
| 100 | + return arch_irqs_disabled_flags(arch_local_save_flags()); |
---|
| 101 | +} |
---|
| 102 | + |
---|
| 103 | +static inline unsigned long arch_local_irq_save(void) |
---|
| 104 | +{ |
---|
| 105 | + unsigned long flags; |
---|
| 106 | + |
---|
| 107 | + flags = arch_local_save_flags(); |
---|
| 108 | + |
---|
| 109 | + /* |
---|
| 110 | + * There are too many states with IRQs disabled, just keep the current |
---|
| 111 | + * state if interrupts are already disabled/masked. |
---|
| 112 | + */ |
---|
| 113 | + if (!arch_irqs_disabled_flags(flags)) |
---|
| 114 | + arch_local_irq_disable(); |
---|
| 115 | + |
---|
80 | 116 | return flags; |
---|
81 | 117 | } |
---|
82 | 118 | |
---|
.. | .. |
---|
85 | 121 | */ |
---|
86 | 122 | static inline void arch_local_irq_restore(unsigned long flags) |
---|
87 | 123 | { |
---|
88 | | - asm volatile( |
---|
89 | | - "msr daif, %0 // arch_local_irq_restore" |
---|
90 | | - : |
---|
91 | | - : "r" (flags) |
---|
92 | | - : "memory"); |
---|
| 124 | + asm volatile(ALTERNATIVE( |
---|
| 125 | + "msr daif, %0", |
---|
| 126 | + __msr_s(SYS_ICC_PMR_EL1, "%0"), |
---|
| 127 | + ARM64_HAS_IRQ_PRIO_MASKING) |
---|
| 128 | + : |
---|
| 129 | + : "r" (flags) |
---|
| 130 | + : "memory"); |
---|
| 131 | + |
---|
| 132 | + pmr_sync(); |
---|
93 | 133 | } |
---|
94 | 134 | |
---|
95 | | -static inline int arch_irqs_disabled_flags(unsigned long flags) |
---|
96 | | -{ |
---|
97 | | - return flags & PSR_I_BIT; |
---|
98 | | -} |
---|
99 | | -#endif |
---|
100 | | -#endif |
---|
| 135 | +#endif /* __ASM_IRQFLAGS_H */ |
---|