hc
2024-01-31 f9004dbfff8a3fbbd7e2a88c8a4327c7f2f8e5b2
kernel/arch/x86/lib/copy_user_64.S
....@@ -1,7 +1,7 @@
1
+/* SPDX-License-Identifier: GPL-2.0-only */
12 /*
23 * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com>
34 * Copyright 2002 Andi Kleen, SuSE Labs.
4
- * Subject to the GNU Public License v2.
55 *
66 * Functions to copy from and to user space.
77 */
....@@ -11,10 +11,35 @@
1111 #include <asm/asm-offsets.h>
1212 #include <asm/thread_info.h>
1313 #include <asm/cpufeatures.h>
14
-#include <asm/alternative-asm.h>
14
+#include <asm/alternative.h>
1515 #include <asm/asm.h>
1616 #include <asm/smap.h>
1717 #include <asm/export.h>
18
+#include <asm/trapnr.h>
19
+
20
+.macro ALIGN_DESTINATION
21
+ /* check for bad alignment of destination */
22
+ movl %edi,%ecx
23
+ andl $7,%ecx
24
+ jz 102f /* already aligned */
25
+ subl $8,%ecx
26
+ negl %ecx
27
+ subl %ecx,%edx
28
+100: movb (%rsi),%al
29
+101: movb %al,(%rdi)
30
+ incq %rsi
31
+ incq %rdi
32
+ decl %ecx
33
+ jnz 100b
34
+102:
35
+ .section .fixup,"ax"
36
+103: addl %ecx,%edx /* ecx is zerorest also */
37
+ jmp .Lcopy_user_handle_tail
38
+ .previous
39
+
40
+ _ASM_EXTABLE_CPY(100b, 103b)
41
+ _ASM_EXTABLE_CPY(101b, 103b)
42
+ .endm
1843
1944 /*
2045 * copy_user_generic_unrolled - memory copy with exception handling.
....@@ -29,7 +54,7 @@
2954 * Output:
3055 * eax uncopied bytes or 0 if successful.
3156 */
32
-ENTRY(copy_user_generic_unrolled)
57
+SYM_FUNC_START(copy_user_generic_unrolled)
3358 ASM_STAC
3459 cmpl $8,%edx
3560 jb 20f /* less then 8 bytes, go to byte copy loop */
....@@ -80,7 +105,7 @@
80105 jnz 21b
81106 23: xor %eax,%eax
82107 ASM_CLAC
83
- ret
108
+ RET
84109
85110 .section .fixup,"ax"
86111 30: shll $6,%ecx
....@@ -89,30 +114,30 @@
89114 40: leal (%rdx,%rcx,8),%edx
90115 jmp 60f
91116 50: movl %ecx,%edx
92
-60: jmp copy_user_handle_tail /* ecx is zerorest also */
117
+60: jmp .Lcopy_user_handle_tail /* ecx is zerorest also */
93118 .previous
94119
95
- _ASM_EXTABLE(1b,30b)
96
- _ASM_EXTABLE(2b,30b)
97
- _ASM_EXTABLE(3b,30b)
98
- _ASM_EXTABLE(4b,30b)
99
- _ASM_EXTABLE(5b,30b)
100
- _ASM_EXTABLE(6b,30b)
101
- _ASM_EXTABLE(7b,30b)
102
- _ASM_EXTABLE(8b,30b)
103
- _ASM_EXTABLE(9b,30b)
104
- _ASM_EXTABLE(10b,30b)
105
- _ASM_EXTABLE(11b,30b)
106
- _ASM_EXTABLE(12b,30b)
107
- _ASM_EXTABLE(13b,30b)
108
- _ASM_EXTABLE(14b,30b)
109
- _ASM_EXTABLE(15b,30b)
110
- _ASM_EXTABLE(16b,30b)
111
- _ASM_EXTABLE(18b,40b)
112
- _ASM_EXTABLE(19b,40b)
113
- _ASM_EXTABLE(21b,50b)
114
- _ASM_EXTABLE(22b,50b)
115
-ENDPROC(copy_user_generic_unrolled)
120
+ _ASM_EXTABLE_CPY(1b, 30b)
121
+ _ASM_EXTABLE_CPY(2b, 30b)
122
+ _ASM_EXTABLE_CPY(3b, 30b)
123
+ _ASM_EXTABLE_CPY(4b, 30b)
124
+ _ASM_EXTABLE_CPY(5b, 30b)
125
+ _ASM_EXTABLE_CPY(6b, 30b)
126
+ _ASM_EXTABLE_CPY(7b, 30b)
127
+ _ASM_EXTABLE_CPY(8b, 30b)
128
+ _ASM_EXTABLE_CPY(9b, 30b)
129
+ _ASM_EXTABLE_CPY(10b, 30b)
130
+ _ASM_EXTABLE_CPY(11b, 30b)
131
+ _ASM_EXTABLE_CPY(12b, 30b)
132
+ _ASM_EXTABLE_CPY(13b, 30b)
133
+ _ASM_EXTABLE_CPY(14b, 30b)
134
+ _ASM_EXTABLE_CPY(15b, 30b)
135
+ _ASM_EXTABLE_CPY(16b, 30b)
136
+ _ASM_EXTABLE_CPY(18b, 40b)
137
+ _ASM_EXTABLE_CPY(19b, 40b)
138
+ _ASM_EXTABLE_CPY(21b, 50b)
139
+ _ASM_EXTABLE_CPY(22b, 50b)
140
+SYM_FUNC_END(copy_user_generic_unrolled)
116141 EXPORT_SYMBOL(copy_user_generic_unrolled)
117142
118143 /* Some CPUs run faster using the string copy instructions.
....@@ -133,7 +158,7 @@
133158 * Output:
134159 * eax uncopied bytes or 0 if successful.
135160 */
136
-ENTRY(copy_user_generic_string)
161
+SYM_FUNC_START(copy_user_generic_string)
137162 ASM_STAC
138163 cmpl $8,%edx
139164 jb 2f /* less than 8 bytes, go to byte copy loop */
....@@ -148,17 +173,17 @@
148173 movsb
149174 xorl %eax,%eax
150175 ASM_CLAC
151
- ret
176
+ RET
152177
153178 .section .fixup,"ax"
154179 11: leal (%rdx,%rcx,8),%ecx
155180 12: movl %ecx,%edx /* ecx is zerorest also */
156
- jmp copy_user_handle_tail
181
+ jmp .Lcopy_user_handle_tail
157182 .previous
158183
159
- _ASM_EXTABLE(1b,11b)
160
- _ASM_EXTABLE(3b,12b)
161
-ENDPROC(copy_user_generic_string)
184
+ _ASM_EXTABLE_CPY(1b, 11b)
185
+ _ASM_EXTABLE_CPY(3b, 12b)
186
+SYM_FUNC_END(copy_user_generic_string)
162187 EXPORT_SYMBOL(copy_user_generic_string)
163188
164189 /*
....@@ -173,7 +198,7 @@
173198 * Output:
174199 * eax uncopied bytes or 0 if successful.
175200 */
176
-ENTRY(copy_user_enhanced_fast_string)
201
+SYM_FUNC_START(copy_user_enhanced_fast_string)
177202 ASM_STAC
178203 cmpl $64,%edx
179204 jb .L_copy_short_string /* less then 64 bytes, avoid the costly 'rep' */
....@@ -182,16 +207,53 @@
182207 movsb
183208 xorl %eax,%eax
184209 ASM_CLAC
185
- ret
210
+ RET
186211
187212 .section .fixup,"ax"
188213 12: movl %ecx,%edx /* ecx is zerorest also */
189
- jmp copy_user_handle_tail
214
+ jmp .Lcopy_user_handle_tail
190215 .previous
191216
192
- _ASM_EXTABLE(1b,12b)
193
-ENDPROC(copy_user_enhanced_fast_string)
217
+ _ASM_EXTABLE_CPY(1b, 12b)
218
+SYM_FUNC_END(copy_user_enhanced_fast_string)
194219 EXPORT_SYMBOL(copy_user_enhanced_fast_string)
220
+
221
+/*
222
+ * Try to copy last bytes and clear the rest if needed.
223
+ * Since protection fault in copy_from/to_user is not a normal situation,
224
+ * it is not necessary to optimize tail handling.
225
+ * Don't try to copy the tail if machine check happened
226
+ *
227
+ * Input:
228
+ * rdi destination
229
+ * rsi source
230
+ * rdx count
231
+ *
232
+ * Output:
233
+ * eax uncopied bytes or 0 if successful.
234
+ */
235
+SYM_CODE_START_LOCAL(.Lcopy_user_handle_tail)
236
+ movl %edx,%ecx
237
+ cmp $X86_TRAP_MC,%eax /* check if X86_TRAP_MC */
238
+ je 3f
239
+1: rep movsb
240
+2: mov %ecx,%eax
241
+ ASM_CLAC
242
+ RET
243
+
244
+ /*
245
+ * Return zero to pretend that this copy succeeded. This
246
+ * is counter-intuitive, but needed to prevent the code
247
+ * in lib/iov_iter.c from retrying and running back into
248
+ * the poison cache line again. The machine check handler
249
+ * will ensure that a SIGBUS is sent to the task.
250
+ */
251
+3: xorl %eax,%eax
252
+ ASM_CLAC
253
+ RET
254
+
255
+ _ASM_EXTABLE_CPY(1b, 2b)
256
+SYM_CODE_END(.Lcopy_user_handle_tail)
195257
196258 /*
197259 * copy_user_nocache - Uncached memory copy with exception handling
....@@ -202,7 +264,7 @@
202264 * - Require 8-byte alignment when size is 8 bytes or larger.
203265 * - Require 4-byte alignment when size is 4 bytes.
204266 */
205
-ENTRY(__copy_user_nocache)
267
+SYM_FUNC_START(__copy_user_nocache)
206268 ASM_STAC
207269
208270 /* If size is less than 8 bytes, go to 4-byte copy */
....@@ -299,7 +361,7 @@
299361 xorl %eax,%eax
300362 ASM_CLAC
301363 sfence
302
- ret
364
+ RET
303365
304366 .section .fixup,"ax"
305367 .L_fixup_4x8b_copy:
....@@ -316,30 +378,30 @@
316378 movl %ecx,%edx
317379 .L_fixup_handle_tail:
318380 sfence
319
- jmp copy_user_handle_tail
381
+ jmp .Lcopy_user_handle_tail
320382 .previous
321383
322
- _ASM_EXTABLE(1b,.L_fixup_4x8b_copy)
323
- _ASM_EXTABLE(2b,.L_fixup_4x8b_copy)
324
- _ASM_EXTABLE(3b,.L_fixup_4x8b_copy)
325
- _ASM_EXTABLE(4b,.L_fixup_4x8b_copy)
326
- _ASM_EXTABLE(5b,.L_fixup_4x8b_copy)
327
- _ASM_EXTABLE(6b,.L_fixup_4x8b_copy)
328
- _ASM_EXTABLE(7b,.L_fixup_4x8b_copy)
329
- _ASM_EXTABLE(8b,.L_fixup_4x8b_copy)
330
- _ASM_EXTABLE(9b,.L_fixup_4x8b_copy)
331
- _ASM_EXTABLE(10b,.L_fixup_4x8b_copy)
332
- _ASM_EXTABLE(11b,.L_fixup_4x8b_copy)
333
- _ASM_EXTABLE(12b,.L_fixup_4x8b_copy)
334
- _ASM_EXTABLE(13b,.L_fixup_4x8b_copy)
335
- _ASM_EXTABLE(14b,.L_fixup_4x8b_copy)
336
- _ASM_EXTABLE(15b,.L_fixup_4x8b_copy)
337
- _ASM_EXTABLE(16b,.L_fixup_4x8b_copy)
338
- _ASM_EXTABLE(20b,.L_fixup_8b_copy)
339
- _ASM_EXTABLE(21b,.L_fixup_8b_copy)
340
- _ASM_EXTABLE(30b,.L_fixup_4b_copy)
341
- _ASM_EXTABLE(31b,.L_fixup_4b_copy)
342
- _ASM_EXTABLE(40b,.L_fixup_1b_copy)
343
- _ASM_EXTABLE(41b,.L_fixup_1b_copy)
344
-ENDPROC(__copy_user_nocache)
384
+ _ASM_EXTABLE_CPY(1b, .L_fixup_4x8b_copy)
385
+ _ASM_EXTABLE_CPY(2b, .L_fixup_4x8b_copy)
386
+ _ASM_EXTABLE_CPY(3b, .L_fixup_4x8b_copy)
387
+ _ASM_EXTABLE_CPY(4b, .L_fixup_4x8b_copy)
388
+ _ASM_EXTABLE_CPY(5b, .L_fixup_4x8b_copy)
389
+ _ASM_EXTABLE_CPY(6b, .L_fixup_4x8b_copy)
390
+ _ASM_EXTABLE_CPY(7b, .L_fixup_4x8b_copy)
391
+ _ASM_EXTABLE_CPY(8b, .L_fixup_4x8b_copy)
392
+ _ASM_EXTABLE_CPY(9b, .L_fixup_4x8b_copy)
393
+ _ASM_EXTABLE_CPY(10b, .L_fixup_4x8b_copy)
394
+ _ASM_EXTABLE_CPY(11b, .L_fixup_4x8b_copy)
395
+ _ASM_EXTABLE_CPY(12b, .L_fixup_4x8b_copy)
396
+ _ASM_EXTABLE_CPY(13b, .L_fixup_4x8b_copy)
397
+ _ASM_EXTABLE_CPY(14b, .L_fixup_4x8b_copy)
398
+ _ASM_EXTABLE_CPY(15b, .L_fixup_4x8b_copy)
399
+ _ASM_EXTABLE_CPY(16b, .L_fixup_4x8b_copy)
400
+ _ASM_EXTABLE_CPY(20b, .L_fixup_8b_copy)
401
+ _ASM_EXTABLE_CPY(21b, .L_fixup_8b_copy)
402
+ _ASM_EXTABLE_CPY(30b, .L_fixup_4b_copy)
403
+ _ASM_EXTABLE_CPY(31b, .L_fixup_4b_copy)
404
+ _ASM_EXTABLE_CPY(40b, .L_fixup_1b_copy)
405
+ _ASM_EXTABLE_CPY(41b, .L_fixup_1b_copy)
406
+SYM_FUNC_END(__copy_user_nocache)
345407 EXPORT_SYMBOL(__copy_user_nocache)