hc
2024-10-12 a5969cabbb4660eab42b6ef0412cbbd1200cf14d
kernel/arch/x86/include/asm/smap.h
....@@ -1,37 +1,31 @@
1
+/* SPDX-License-Identifier: GPL-2.0-only */
12 /*
23 * Supervisor Mode Access Prevention support
34 *
45 * Copyright (C) 2012 Intel Corporation
56 * Author: H. Peter Anvin <hpa@linux.intel.com>
6
- *
7
- * This program is free software; you can redistribute it and/or
8
- * modify it under the terms of the GNU General Public License
9
- * as published by the Free Software Foundation; version 2
10
- * of the License.
117 */
128
139 #ifndef _ASM_X86_SMAP_H
1410 #define _ASM_X86_SMAP_H
1511
16
-#include <linux/stringify.h>
1712 #include <asm/nops.h>
1813 #include <asm/cpufeatures.h>
14
+#include <asm/alternative.h>
1915
2016 /* "Raw" instruction opcodes */
21
-#define __ASM_CLAC .byte 0x0f,0x01,0xca
22
-#define __ASM_STAC .byte 0x0f,0x01,0xcb
17
+#define __ASM_CLAC ".byte 0x0f,0x01,0xca"
18
+#define __ASM_STAC ".byte 0x0f,0x01,0xcb"
2319
2420 #ifdef __ASSEMBLY__
25
-
26
-#include <asm/alternative-asm.h>
2721
2822 #ifdef CONFIG_X86_SMAP
2923
3024 #define ASM_CLAC \
31
- ALTERNATIVE "", __stringify(__ASM_CLAC), X86_FEATURE_SMAP
25
+ ALTERNATIVE "", __ASM_CLAC, X86_FEATURE_SMAP
3226
3327 #define ASM_STAC \
34
- ALTERNATIVE "", __stringify(__ASM_STAC), X86_FEATURE_SMAP
28
+ ALTERNATIVE "", __ASM_STAC, X86_FEATURE_SMAP
3529
3630 #else /* CONFIG_X86_SMAP */
3731
....@@ -42,28 +36,28 @@
4236
4337 #else /* __ASSEMBLY__ */
4438
45
-#include <asm/alternative.h>
46
-
4739 #ifdef CONFIG_X86_SMAP
4840
4941 static __always_inline void clac(void)
5042 {
5143 /* Note: a barrier is implicit in alternative() */
52
- alternative("", __stringify(__ASM_CLAC), X86_FEATURE_SMAP);
44
+ alternative("", __ASM_CLAC, X86_FEATURE_SMAP);
5345 }
5446
5547 static __always_inline void stac(void)
5648 {
5749 /* Note: a barrier is implicit in alternative() */
58
- alternative("", __stringify(__ASM_STAC), X86_FEATURE_SMAP);
50
+ alternative("", __ASM_STAC, X86_FEATURE_SMAP);
5951 }
6052
6153 static __always_inline unsigned long smap_save(void)
6254 {
6355 unsigned long flags;
6456
65
- asm volatile (ALTERNATIVE("", "pushf; pop %0; " __stringify(__ASM_CLAC),
66
- X86_FEATURE_SMAP)
57
+ asm volatile ("# smap_save\n\t"
58
+ ALTERNATIVE("jmp 1f", "", X86_FEATURE_SMAP)
59
+ "pushf; pop %0; " __ASM_CLAC "\n\t"
60
+ "1:"
6761 : "=rm" (flags) : : "memory", "cc");
6862
6963 return flags;
....@@ -71,15 +65,18 @@
7165
7266 static __always_inline void smap_restore(unsigned long flags)
7367 {
74
- asm volatile (ALTERNATIVE("", "push %0; popf", X86_FEATURE_SMAP)
68
+ asm volatile ("# smap_restore\n\t"
69
+ ALTERNATIVE("jmp 1f", "", X86_FEATURE_SMAP)
70
+ "push %0; popf\n\t"
71
+ "1:"
7572 : : "g" (flags) : "memory", "cc");
7673 }
7774
7875 /* These macros can be used in asm() statements */
7976 #define ASM_CLAC \
80
- ALTERNATIVE("", __stringify(__ASM_CLAC), X86_FEATURE_SMAP)
77
+ ALTERNATIVE("", __ASM_CLAC, X86_FEATURE_SMAP)
8178 #define ASM_STAC \
82
- ALTERNATIVE("", __stringify(__ASM_STAC), X86_FEATURE_SMAP)
79
+ ALTERNATIVE("", __ASM_STAC, X86_FEATURE_SMAP)
8380
8481 #else /* CONFIG_X86_SMAP */
8582