| .. | .. |
|---|
| 1 | +/* SPDX-License-Identifier: GPL-2.0-only */ |
|---|
| 1 | 2 | /* |
|---|
| 2 | 3 | * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com> |
|---|
| 3 | 4 | * Copyright 2002 Andi Kleen, SuSE Labs. |
|---|
| 4 | | - * Subject to the GNU Public License v2. |
|---|
| 5 | 5 | * |
|---|
| 6 | 6 | * Functions to copy from and to user space. |
|---|
| 7 | 7 | */ |
|---|
| .. | .. |
|---|
| 11 | 11 | #include <asm/asm-offsets.h> |
|---|
| 12 | 12 | #include <asm/thread_info.h> |
|---|
| 13 | 13 | #include <asm/cpufeatures.h> |
|---|
| 14 | | -#include <asm/alternative-asm.h> |
|---|
| 14 | +#include <asm/alternative.h> |
|---|
| 15 | 15 | #include <asm/asm.h> |
|---|
| 16 | 16 | #include <asm/smap.h> |
|---|
| 17 | 17 | #include <asm/export.h> |
|---|
| 18 | +#include <asm/trapnr.h> |
|---|
| 19 | + |
|---|
| 20 | +.macro ALIGN_DESTINATION |
|---|
| 21 | + /* check for bad alignment of destination */ |
|---|
| 22 | + movl %edi,%ecx |
|---|
| 23 | + andl $7,%ecx |
|---|
| 24 | + jz 102f /* already aligned */ |
|---|
| 25 | + subl $8,%ecx |
|---|
| 26 | + negl %ecx |
|---|
| 27 | + subl %ecx,%edx |
|---|
| 28 | +100: movb (%rsi),%al |
|---|
| 29 | +101: movb %al,(%rdi) |
|---|
| 30 | + incq %rsi |
|---|
| 31 | + incq %rdi |
|---|
| 32 | + decl %ecx |
|---|
| 33 | + jnz 100b |
|---|
| 34 | +102: |
|---|
| 35 | + .section .fixup,"ax" |
|---|
| 36 | +103: addl %ecx,%edx /* ecx is zerorest also */ |
|---|
| 37 | + jmp .Lcopy_user_handle_tail |
|---|
| 38 | + .previous |
|---|
| 39 | + |
|---|
| 40 | + _ASM_EXTABLE_CPY(100b, 103b) |
|---|
| 41 | + _ASM_EXTABLE_CPY(101b, 103b) |
|---|
| 42 | + .endm |
|---|
| 18 | 43 | |
|---|
| 19 | 44 | /* |
|---|
| 20 | 45 | * copy_user_generic_unrolled - memory copy with exception handling. |
|---|
| .. | .. |
|---|
| 29 | 54 | * Output: |
|---|
| 30 | 55 | * eax uncopied bytes or 0 if successful. |
|---|
| 31 | 56 | */ |
|---|
| 32 | | -ENTRY(copy_user_generic_unrolled) |
|---|
| 57 | +SYM_FUNC_START(copy_user_generic_unrolled) |
|---|
| 33 | 58 | ASM_STAC |
|---|
| 34 | 59 | cmpl $8,%edx |
|---|
| 35 | 60 | jb 20f /* less then 8 bytes, go to byte copy loop */ |
|---|
| .. | .. |
|---|
| 80 | 105 | jnz 21b |
|---|
| 81 | 106 | 23: xor %eax,%eax |
|---|
| 82 | 107 | ASM_CLAC |
|---|
| 83 | | - ret |
|---|
| 108 | + RET |
|---|
| 84 | 109 | |
|---|
| 85 | 110 | .section .fixup,"ax" |
|---|
| 86 | 111 | 30: shll $6,%ecx |
|---|
| .. | .. |
|---|
| 89 | 114 | 40: leal (%rdx,%rcx,8),%edx |
|---|
| 90 | 115 | jmp 60f |
|---|
| 91 | 116 | 50: movl %ecx,%edx |
|---|
| 92 | | -60: jmp copy_user_handle_tail /* ecx is zerorest also */ |
|---|
| 117 | +60: jmp .Lcopy_user_handle_tail /* ecx is zerorest also */ |
|---|
| 93 | 118 | .previous |
|---|
| 94 | 119 | |
|---|
| 95 | | - _ASM_EXTABLE(1b,30b) |
|---|
| 96 | | - _ASM_EXTABLE(2b,30b) |
|---|
| 97 | | - _ASM_EXTABLE(3b,30b) |
|---|
| 98 | | - _ASM_EXTABLE(4b,30b) |
|---|
| 99 | | - _ASM_EXTABLE(5b,30b) |
|---|
| 100 | | - _ASM_EXTABLE(6b,30b) |
|---|
| 101 | | - _ASM_EXTABLE(7b,30b) |
|---|
| 102 | | - _ASM_EXTABLE(8b,30b) |
|---|
| 103 | | - _ASM_EXTABLE(9b,30b) |
|---|
| 104 | | - _ASM_EXTABLE(10b,30b) |
|---|
| 105 | | - _ASM_EXTABLE(11b,30b) |
|---|
| 106 | | - _ASM_EXTABLE(12b,30b) |
|---|
| 107 | | - _ASM_EXTABLE(13b,30b) |
|---|
| 108 | | - _ASM_EXTABLE(14b,30b) |
|---|
| 109 | | - _ASM_EXTABLE(15b,30b) |
|---|
| 110 | | - _ASM_EXTABLE(16b,30b) |
|---|
| 111 | | - _ASM_EXTABLE(18b,40b) |
|---|
| 112 | | - _ASM_EXTABLE(19b,40b) |
|---|
| 113 | | - _ASM_EXTABLE(21b,50b) |
|---|
| 114 | | - _ASM_EXTABLE(22b,50b) |
|---|
| 115 | | -ENDPROC(copy_user_generic_unrolled) |
|---|
| 120 | + _ASM_EXTABLE_CPY(1b, 30b) |
|---|
| 121 | + _ASM_EXTABLE_CPY(2b, 30b) |
|---|
| 122 | + _ASM_EXTABLE_CPY(3b, 30b) |
|---|
| 123 | + _ASM_EXTABLE_CPY(4b, 30b) |
|---|
| 124 | + _ASM_EXTABLE_CPY(5b, 30b) |
|---|
| 125 | + _ASM_EXTABLE_CPY(6b, 30b) |
|---|
| 126 | + _ASM_EXTABLE_CPY(7b, 30b) |
|---|
| 127 | + _ASM_EXTABLE_CPY(8b, 30b) |
|---|
| 128 | + _ASM_EXTABLE_CPY(9b, 30b) |
|---|
| 129 | + _ASM_EXTABLE_CPY(10b, 30b) |
|---|
| 130 | + _ASM_EXTABLE_CPY(11b, 30b) |
|---|
| 131 | + _ASM_EXTABLE_CPY(12b, 30b) |
|---|
| 132 | + _ASM_EXTABLE_CPY(13b, 30b) |
|---|
| 133 | + _ASM_EXTABLE_CPY(14b, 30b) |
|---|
| 134 | + _ASM_EXTABLE_CPY(15b, 30b) |
|---|
| 135 | + _ASM_EXTABLE_CPY(16b, 30b) |
|---|
| 136 | + _ASM_EXTABLE_CPY(18b, 40b) |
|---|
| 137 | + _ASM_EXTABLE_CPY(19b, 40b) |
|---|
| 138 | + _ASM_EXTABLE_CPY(21b, 50b) |
|---|
| 139 | + _ASM_EXTABLE_CPY(22b, 50b) |
|---|
| 140 | +SYM_FUNC_END(copy_user_generic_unrolled) |
|---|
| 116 | 141 | EXPORT_SYMBOL(copy_user_generic_unrolled) |
|---|
| 117 | 142 | |
|---|
| 118 | 143 | /* Some CPUs run faster using the string copy instructions. |
|---|
| .. | .. |
|---|
| 133 | 158 | * Output: |
|---|
| 134 | 159 | * eax uncopied bytes or 0 if successful. |
|---|
| 135 | 160 | */ |
|---|
| 136 | | -ENTRY(copy_user_generic_string) |
|---|
| 161 | +SYM_FUNC_START(copy_user_generic_string) |
|---|
| 137 | 162 | ASM_STAC |
|---|
| 138 | 163 | cmpl $8,%edx |
|---|
| 139 | 164 | jb 2f /* less than 8 bytes, go to byte copy loop */ |
|---|
| .. | .. |
|---|
| 148 | 173 | movsb |
|---|
| 149 | 174 | xorl %eax,%eax |
|---|
| 150 | 175 | ASM_CLAC |
|---|
| 151 | | - ret |
|---|
| 176 | + RET |
|---|
| 152 | 177 | |
|---|
| 153 | 178 | .section .fixup,"ax" |
|---|
| 154 | 179 | 11: leal (%rdx,%rcx,8),%ecx |
|---|
| 155 | 180 | 12: movl %ecx,%edx /* ecx is zerorest also */ |
|---|
| 156 | | - jmp copy_user_handle_tail |
|---|
| 181 | + jmp .Lcopy_user_handle_tail |
|---|
| 157 | 182 | .previous |
|---|
| 158 | 183 | |
|---|
| 159 | | - _ASM_EXTABLE(1b,11b) |
|---|
| 160 | | - _ASM_EXTABLE(3b,12b) |
|---|
| 161 | | -ENDPROC(copy_user_generic_string) |
|---|
| 184 | + _ASM_EXTABLE_CPY(1b, 11b) |
|---|
| 185 | + _ASM_EXTABLE_CPY(3b, 12b) |
|---|
| 186 | +SYM_FUNC_END(copy_user_generic_string) |
|---|
| 162 | 187 | EXPORT_SYMBOL(copy_user_generic_string) |
|---|
| 163 | 188 | |
|---|
| 164 | 189 | /* |
|---|
| .. | .. |
|---|
| 173 | 198 | * Output: |
|---|
| 174 | 199 | * eax uncopied bytes or 0 if successful. |
|---|
| 175 | 200 | */ |
|---|
| 176 | | -ENTRY(copy_user_enhanced_fast_string) |
|---|
| 201 | +SYM_FUNC_START(copy_user_enhanced_fast_string) |
|---|
| 177 | 202 | ASM_STAC |
|---|
| 178 | 203 | cmpl $64,%edx |
|---|
| 179 | 204 | jb .L_copy_short_string /* less then 64 bytes, avoid the costly 'rep' */ |
|---|
| .. | .. |
|---|
| 182 | 207 | movsb |
|---|
| 183 | 208 | xorl %eax,%eax |
|---|
| 184 | 209 | ASM_CLAC |
|---|
| 185 | | - ret |
|---|
| 210 | + RET |
|---|
| 186 | 211 | |
|---|
| 187 | 212 | .section .fixup,"ax" |
|---|
| 188 | 213 | 12: movl %ecx,%edx /* ecx is zerorest also */ |
|---|
| 189 | | - jmp copy_user_handle_tail |
|---|
| 214 | + jmp .Lcopy_user_handle_tail |
|---|
| 190 | 215 | .previous |
|---|
| 191 | 216 | |
|---|
| 192 | | - _ASM_EXTABLE(1b,12b) |
|---|
| 193 | | -ENDPROC(copy_user_enhanced_fast_string) |
|---|
| 217 | + _ASM_EXTABLE_CPY(1b, 12b) |
|---|
| 218 | +SYM_FUNC_END(copy_user_enhanced_fast_string) |
|---|
| 194 | 219 | EXPORT_SYMBOL(copy_user_enhanced_fast_string) |
|---|
| 220 | + |
|---|
| 221 | +/* |
|---|
| 222 | + * Try to copy last bytes and clear the rest if needed. |
|---|
| 223 | + * Since protection fault in copy_from/to_user is not a normal situation, |
|---|
| 224 | + * it is not necessary to optimize tail handling. |
|---|
| 225 | + * Don't try to copy the tail if machine check happened |
|---|
| 226 | + * |
|---|
| 227 | + * Input: |
|---|
| 228 | + * rdi destination |
|---|
| 229 | + * rsi source |
|---|
| 230 | + * rdx count |
|---|
| 231 | + * |
|---|
| 232 | + * Output: |
|---|
| 233 | + * eax uncopied bytes or 0 if successful. |
|---|
| 234 | + */ |
|---|
| 235 | +SYM_CODE_START_LOCAL(.Lcopy_user_handle_tail) |
|---|
| 236 | + movl %edx,%ecx |
|---|
| 237 | + cmp $X86_TRAP_MC,%eax /* check if X86_TRAP_MC */ |
|---|
| 238 | + je 3f |
|---|
| 239 | +1: rep movsb |
|---|
| 240 | +2: mov %ecx,%eax |
|---|
| 241 | + ASM_CLAC |
|---|
| 242 | + RET |
|---|
| 243 | + |
|---|
| 244 | + /* |
|---|
| 245 | + * Return zero to pretend that this copy succeeded. This |
|---|
| 246 | + * is counter-intuitive, but needed to prevent the code |
|---|
| 247 | + * in lib/iov_iter.c from retrying and running back into |
|---|
| 248 | + * the poison cache line again. The machine check handler |
|---|
| 249 | + * will ensure that a SIGBUS is sent to the task. |
|---|
| 250 | + */ |
|---|
| 251 | +3: xorl %eax,%eax |
|---|
| 252 | + ASM_CLAC |
|---|
| 253 | + RET |
|---|
| 254 | + |
|---|
| 255 | + _ASM_EXTABLE_CPY(1b, 2b) |
|---|
| 256 | +SYM_CODE_END(.Lcopy_user_handle_tail) |
|---|
| 195 | 257 | |
|---|
| 196 | 258 | /* |
|---|
| 197 | 259 | * copy_user_nocache - Uncached memory copy with exception handling |
|---|
| .. | .. |
|---|
| 202 | 264 | * - Require 8-byte alignment when size is 8 bytes or larger. |
|---|
| 203 | 265 | * - Require 4-byte alignment when size is 4 bytes. |
|---|
| 204 | 266 | */ |
|---|
| 205 | | -ENTRY(__copy_user_nocache) |
|---|
| 267 | +SYM_FUNC_START(__copy_user_nocache) |
|---|
| 206 | 268 | ASM_STAC |
|---|
| 207 | 269 | |
|---|
| 208 | 270 | /* If size is less than 8 bytes, go to 4-byte copy */ |
|---|
| .. | .. |
|---|
| 299 | 361 | xorl %eax,%eax |
|---|
| 300 | 362 | ASM_CLAC |
|---|
| 301 | 363 | sfence |
|---|
| 302 | | - ret |
|---|
| 364 | + RET |
|---|
| 303 | 365 | |
|---|
| 304 | 366 | .section .fixup,"ax" |
|---|
| 305 | 367 | .L_fixup_4x8b_copy: |
|---|
| .. | .. |
|---|
| 316 | 378 | movl %ecx,%edx |
|---|
| 317 | 379 | .L_fixup_handle_tail: |
|---|
| 318 | 380 | sfence |
|---|
| 319 | | - jmp copy_user_handle_tail |
|---|
| 381 | + jmp .Lcopy_user_handle_tail |
|---|
| 320 | 382 | .previous |
|---|
| 321 | 383 | |
|---|
| 322 | | - _ASM_EXTABLE(1b,.L_fixup_4x8b_copy) |
|---|
| 323 | | - _ASM_EXTABLE(2b,.L_fixup_4x8b_copy) |
|---|
| 324 | | - _ASM_EXTABLE(3b,.L_fixup_4x8b_copy) |
|---|
| 325 | | - _ASM_EXTABLE(4b,.L_fixup_4x8b_copy) |
|---|
| 326 | | - _ASM_EXTABLE(5b,.L_fixup_4x8b_copy) |
|---|
| 327 | | - _ASM_EXTABLE(6b,.L_fixup_4x8b_copy) |
|---|
| 328 | | - _ASM_EXTABLE(7b,.L_fixup_4x8b_copy) |
|---|
| 329 | | - _ASM_EXTABLE(8b,.L_fixup_4x8b_copy) |
|---|
| 330 | | - _ASM_EXTABLE(9b,.L_fixup_4x8b_copy) |
|---|
| 331 | | - _ASM_EXTABLE(10b,.L_fixup_4x8b_copy) |
|---|
| 332 | | - _ASM_EXTABLE(11b,.L_fixup_4x8b_copy) |
|---|
| 333 | | - _ASM_EXTABLE(12b,.L_fixup_4x8b_copy) |
|---|
| 334 | | - _ASM_EXTABLE(13b,.L_fixup_4x8b_copy) |
|---|
| 335 | | - _ASM_EXTABLE(14b,.L_fixup_4x8b_copy) |
|---|
| 336 | | - _ASM_EXTABLE(15b,.L_fixup_4x8b_copy) |
|---|
| 337 | | - _ASM_EXTABLE(16b,.L_fixup_4x8b_copy) |
|---|
| 338 | | - _ASM_EXTABLE(20b,.L_fixup_8b_copy) |
|---|
| 339 | | - _ASM_EXTABLE(21b,.L_fixup_8b_copy) |
|---|
| 340 | | - _ASM_EXTABLE(30b,.L_fixup_4b_copy) |
|---|
| 341 | | - _ASM_EXTABLE(31b,.L_fixup_4b_copy) |
|---|
| 342 | | - _ASM_EXTABLE(40b,.L_fixup_1b_copy) |
|---|
| 343 | | - _ASM_EXTABLE(41b,.L_fixup_1b_copy) |
|---|
| 344 | | -ENDPROC(__copy_user_nocache) |
|---|
| 384 | + _ASM_EXTABLE_CPY(1b, .L_fixup_4x8b_copy) |
|---|
| 385 | + _ASM_EXTABLE_CPY(2b, .L_fixup_4x8b_copy) |
|---|
| 386 | + _ASM_EXTABLE_CPY(3b, .L_fixup_4x8b_copy) |
|---|
| 387 | + _ASM_EXTABLE_CPY(4b, .L_fixup_4x8b_copy) |
|---|
| 388 | + _ASM_EXTABLE_CPY(5b, .L_fixup_4x8b_copy) |
|---|
| 389 | + _ASM_EXTABLE_CPY(6b, .L_fixup_4x8b_copy) |
|---|
| 390 | + _ASM_EXTABLE_CPY(7b, .L_fixup_4x8b_copy) |
|---|
| 391 | + _ASM_EXTABLE_CPY(8b, .L_fixup_4x8b_copy) |
|---|
| 392 | + _ASM_EXTABLE_CPY(9b, .L_fixup_4x8b_copy) |
|---|
| 393 | + _ASM_EXTABLE_CPY(10b, .L_fixup_4x8b_copy) |
|---|
| 394 | + _ASM_EXTABLE_CPY(11b, .L_fixup_4x8b_copy) |
|---|
| 395 | + _ASM_EXTABLE_CPY(12b, .L_fixup_4x8b_copy) |
|---|
| 396 | + _ASM_EXTABLE_CPY(13b, .L_fixup_4x8b_copy) |
|---|
| 397 | + _ASM_EXTABLE_CPY(14b, .L_fixup_4x8b_copy) |
|---|
| 398 | + _ASM_EXTABLE_CPY(15b, .L_fixup_4x8b_copy) |
|---|
| 399 | + _ASM_EXTABLE_CPY(16b, .L_fixup_4x8b_copy) |
|---|
| 400 | + _ASM_EXTABLE_CPY(20b, .L_fixup_8b_copy) |
|---|
| 401 | + _ASM_EXTABLE_CPY(21b, .L_fixup_8b_copy) |
|---|
| 402 | + _ASM_EXTABLE_CPY(30b, .L_fixup_4b_copy) |
|---|
| 403 | + _ASM_EXTABLE_CPY(31b, .L_fixup_4b_copy) |
|---|
| 404 | + _ASM_EXTABLE_CPY(40b, .L_fixup_1b_copy) |
|---|
| 405 | + _ASM_EXTABLE_CPY(41b, .L_fixup_1b_copy) |
|---|
| 406 | +SYM_FUNC_END(__copy_user_nocache) |
|---|
| 345 | 407 | EXPORT_SYMBOL(__copy_user_nocache) |
|---|