hc
2024-12-19 9370bb92b2d16684ee45cf24e879c93c509162da
kernel/arch/x86/lib/getuser.S
....@@ -35,10 +35,21 @@
3535 #include <asm/smap.h>
3636 #include <asm/export.h>
3737
38
+#define ASM_BARRIER_NOSPEC ALTERNATIVE "", "lfence", X86_FEATURE_LFENCE_RDTSC
39
+
40
+#ifdef CONFIG_X86_5LEVEL
41
+#define LOAD_TASK_SIZE_MINUS_N(n) \
42
+ ALTERNATIVE __stringify(mov $((1 << 47) - 4096 - (n)),%rdx), \
43
+ __stringify(mov $((1 << 56) - 4096 - (n)),%rdx), X86_FEATURE_LA57
44
+#else
45
+#define LOAD_TASK_SIZE_MINUS_N(n) \
46
+ mov $(TASK_SIZE_MAX - (n)),%_ASM_DX
47
+#endif
48
+
3849 .text
39
-ENTRY(__get_user_1)
40
- mov PER_CPU_VAR(current_task), %_ASM_DX
41
- cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
50
+SYM_FUNC_START(__get_user_1)
51
+ LOAD_TASK_SIZE_MINUS_N(0)
52
+ cmp %_ASM_DX,%_ASM_AX
4253 jae bad_get_user
4354 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
4455 and %_ASM_DX, %_ASM_AX
....@@ -46,98 +57,150 @@
4657 1: movzbl (%_ASM_AX),%edx
4758 xor %eax,%eax
4859 ASM_CLAC
49
- ret
50
-ENDPROC(__get_user_1)
60
+ RET
61
+SYM_FUNC_END(__get_user_1)
5162 EXPORT_SYMBOL(__get_user_1)
5263
53
-ENTRY(__get_user_2)
54
- add $1,%_ASM_AX
55
- jc bad_get_user
56
- mov PER_CPU_VAR(current_task), %_ASM_DX
57
- cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
64
+SYM_FUNC_START(__get_user_2)
65
+ LOAD_TASK_SIZE_MINUS_N(1)
66
+ cmp %_ASM_DX,%_ASM_AX
5867 jae bad_get_user
5968 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
6069 and %_ASM_DX, %_ASM_AX
6170 ASM_STAC
62
-2: movzwl -1(%_ASM_AX),%edx
71
+2: movzwl (%_ASM_AX),%edx
6372 xor %eax,%eax
6473 ASM_CLAC
65
- ret
66
-ENDPROC(__get_user_2)
74
+ RET
75
+SYM_FUNC_END(__get_user_2)
6776 EXPORT_SYMBOL(__get_user_2)
6877
69
-ENTRY(__get_user_4)
70
- add $3,%_ASM_AX
71
- jc bad_get_user
72
- mov PER_CPU_VAR(current_task), %_ASM_DX
73
- cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
78
+SYM_FUNC_START(__get_user_4)
79
+ LOAD_TASK_SIZE_MINUS_N(3)
80
+ cmp %_ASM_DX,%_ASM_AX
7481 jae bad_get_user
7582 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
7683 and %_ASM_DX, %_ASM_AX
7784 ASM_STAC
78
-3: movl -3(%_ASM_AX),%edx
85
+3: movl (%_ASM_AX),%edx
7986 xor %eax,%eax
8087 ASM_CLAC
81
- ret
82
-ENDPROC(__get_user_4)
88
+ RET
89
+SYM_FUNC_END(__get_user_4)
8390 EXPORT_SYMBOL(__get_user_4)
8491
85
-ENTRY(__get_user_8)
92
+SYM_FUNC_START(__get_user_8)
8693 #ifdef CONFIG_X86_64
87
- add $7,%_ASM_AX
88
- jc bad_get_user
89
- mov PER_CPU_VAR(current_task), %_ASM_DX
90
- cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
94
+ LOAD_TASK_SIZE_MINUS_N(7)
95
+ cmp %_ASM_DX,%_ASM_AX
9196 jae bad_get_user
9297 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
9398 and %_ASM_DX, %_ASM_AX
9499 ASM_STAC
95
-4: movq -7(%_ASM_AX),%rdx
100
+4: movq (%_ASM_AX),%rdx
96101 xor %eax,%eax
97102 ASM_CLAC
98
- ret
103
+ RET
99104 #else
100
- add $7,%_ASM_AX
101
- jc bad_get_user_8
102
- mov PER_CPU_VAR(current_task), %_ASM_DX
103
- cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
105
+ LOAD_TASK_SIZE_MINUS_N(7)
106
+ cmp %_ASM_DX,%_ASM_AX
104107 jae bad_get_user_8
105108 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
106109 and %_ASM_DX, %_ASM_AX
107110 ASM_STAC
108
-4: movl -7(%_ASM_AX),%edx
109
-5: movl -3(%_ASM_AX),%ecx
111
+4: movl (%_ASM_AX),%edx
112
+5: movl 4(%_ASM_AX),%ecx
110113 xor %eax,%eax
111114 ASM_CLAC
112
- ret
115
+ RET
113116 #endif
114
-ENDPROC(__get_user_8)
117
+SYM_FUNC_END(__get_user_8)
115118 EXPORT_SYMBOL(__get_user_8)
116119
120
+/* .. and the same for __get_user, just without the range checks */
121
+SYM_FUNC_START(__get_user_nocheck_1)
122
+ ASM_STAC
123
+ ASM_BARRIER_NOSPEC
124
+6: movzbl (%_ASM_AX),%edx
125
+ xor %eax,%eax
126
+ ASM_CLAC
127
+ RET
128
+SYM_FUNC_END(__get_user_nocheck_1)
129
+EXPORT_SYMBOL(__get_user_nocheck_1)
117130
131
+SYM_FUNC_START(__get_user_nocheck_2)
132
+ ASM_STAC
133
+ ASM_BARRIER_NOSPEC
134
+7: movzwl (%_ASM_AX),%edx
135
+ xor %eax,%eax
136
+ ASM_CLAC
137
+ RET
138
+SYM_FUNC_END(__get_user_nocheck_2)
139
+EXPORT_SYMBOL(__get_user_nocheck_2)
140
+
141
+SYM_FUNC_START(__get_user_nocheck_4)
142
+ ASM_STAC
143
+ ASM_BARRIER_NOSPEC
144
+8: movl (%_ASM_AX),%edx
145
+ xor %eax,%eax
146
+ ASM_CLAC
147
+ RET
148
+SYM_FUNC_END(__get_user_nocheck_4)
149
+EXPORT_SYMBOL(__get_user_nocheck_4)
150
+
151
+SYM_FUNC_START(__get_user_nocheck_8)
152
+ ASM_STAC
153
+ ASM_BARRIER_NOSPEC
154
+#ifdef CONFIG_X86_64
155
+9: movq (%_ASM_AX),%rdx
156
+#else
157
+9: movl (%_ASM_AX),%edx
158
+10: movl 4(%_ASM_AX),%ecx
159
+#endif
160
+ xor %eax,%eax
161
+ ASM_CLAC
162
+ RET
163
+SYM_FUNC_END(__get_user_nocheck_8)
164
+EXPORT_SYMBOL(__get_user_nocheck_8)
165
+
166
+
167
+SYM_CODE_START_LOCAL(.Lbad_get_user_clac)
168
+ ASM_CLAC
118169 bad_get_user:
119170 xor %edx,%edx
120171 mov $(-EFAULT),%_ASM_AX
121
- ASM_CLAC
122
- ret
123
-END(bad_get_user)
172
+ RET
173
+SYM_CODE_END(.Lbad_get_user_clac)
124174
125175 #ifdef CONFIG_X86_32
176
+SYM_CODE_START_LOCAL(.Lbad_get_user_8_clac)
177
+ ASM_CLAC
126178 bad_get_user_8:
127179 xor %edx,%edx
128180 xor %ecx,%ecx
129181 mov $(-EFAULT),%_ASM_AX
130
- ASM_CLAC
131
- ret
132
-END(bad_get_user_8)
182
+ RET
183
+SYM_CODE_END(.Lbad_get_user_8_clac)
133184 #endif
134185
135
- _ASM_EXTABLE(1b,bad_get_user)
136
- _ASM_EXTABLE(2b,bad_get_user)
137
- _ASM_EXTABLE(3b,bad_get_user)
186
+/* get_user */
187
+ _ASM_EXTABLE_UA(1b, .Lbad_get_user_clac)
188
+ _ASM_EXTABLE_UA(2b, .Lbad_get_user_clac)
189
+ _ASM_EXTABLE_UA(3b, .Lbad_get_user_clac)
138190 #ifdef CONFIG_X86_64
139
- _ASM_EXTABLE(4b,bad_get_user)
191
+ _ASM_EXTABLE_UA(4b, .Lbad_get_user_clac)
140192 #else
141
- _ASM_EXTABLE(4b,bad_get_user_8)
142
- _ASM_EXTABLE(5b,bad_get_user_8)
193
+ _ASM_EXTABLE_UA(4b, .Lbad_get_user_8_clac)
194
+ _ASM_EXTABLE_UA(5b, .Lbad_get_user_8_clac)
195
+#endif
196
+
197
+/* __get_user */
198
+ _ASM_EXTABLE_UA(6b, .Lbad_get_user_clac)
199
+ _ASM_EXTABLE_UA(7b, .Lbad_get_user_clac)
200
+ _ASM_EXTABLE_UA(8b, .Lbad_get_user_clac)
201
+#ifdef CONFIG_X86_64
202
+ _ASM_EXTABLE_UA(9b, .Lbad_get_user_clac)
203
+#else
204
+ _ASM_EXTABLE_UA(9b, .Lbad_get_user_8_clac)
205
+ _ASM_EXTABLE_UA(10b, .Lbad_get_user_8_clac)
143206 #endif