| .. | .. |
|---|
| 279 | 279 | #endif |
|---|
| 280 | 280 | |
|---|
| 281 | 281 | /* odd buffer alignment? */ |
|---|
| 282 | | -#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_LOONGSON3) |
|---|
| 282 | +#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR5) || \ |
|---|
| 283 | + defined(CONFIG_CPU_LOONGSON64) |
|---|
| 283 | 284 | .set push |
|---|
| 284 | 285 | .set arch=mips32r2 |
|---|
| 285 | 286 | wsbh v1, sum |
|---|
| .. | .. |
|---|
| 307 | 308 | /* |
|---|
| 308 | 309 | * checksum and copy routines based on memcpy.S |
|---|
| 309 | 310 | * |
|---|
| 310 | | - * csum_partial_copy_nocheck(src, dst, len, sum) |
|---|
| 311 | | - * __csum_partial_copy_kernel(src, dst, len, sum, errp) |
|---|
| 311 | + * csum_partial_copy_nocheck(src, dst, len) |
|---|
| 312 | + * __csum_partial_copy_kernel(src, dst, len) |
|---|
| 312 | 313 | * |
|---|
| 313 | 314 | * See "Spec" in memcpy.S for details. Unlike __copy_user, all |
|---|
| 314 | 315 | * function in this file use the standard calling convention. |
|---|
| .. | .. |
|---|
| 317 | 318 | #define src a0 |
|---|
| 318 | 319 | #define dst a1 |
|---|
| 319 | 320 | #define len a2 |
|---|
| 320 | | -#define psum a3 |
|---|
| 321 | 321 | #define sum v0 |
|---|
| 322 | 322 | #define odd t8 |
|---|
| 323 | | -#define errptr t9 |
|---|
| 324 | 323 | |
|---|
| 325 | 324 | /* |
|---|
| 326 | | - * The exception handler for loads requires that: |
|---|
| 327 | | - * 1- AT contain the address of the byte just past the end of the source |
|---|
| 328 | | - * of the copy, |
|---|
| 329 | | - * 2- src_entry <= src < AT, and |
|---|
| 330 | | - * 3- (dst - src) == (dst_entry - src_entry), |
|---|
| 331 | | - * The _entry suffix denotes values when __copy_user was called. |
|---|
| 332 | | - * |
|---|
| 333 | | - * (1) is set up up by __csum_partial_copy_from_user and maintained by |
|---|
| 334 | | - * not writing AT in __csum_partial_copy |
|---|
| 335 | | - * (2) is met by incrementing src by the number of bytes copied |
|---|
| 336 | | - * (3) is met by not doing loads between a pair of increments of dst and src |
|---|
| 337 | | - * |
|---|
| 338 | | - * The exception handlers for stores stores -EFAULT to errptr and return. |
|---|
| 339 | | - * These handlers do not need to overwrite any data. |
|---|
| 325 | + * All exception handlers simply return 0. |
|---|
| 340 | 326 | */ |
|---|
| 341 | 327 | |
|---|
| 342 | 328 | /* Instruction type */ |
|---|
| .. | .. |
|---|
| 357 | 343 | * addr : Address |
|---|
| 358 | 344 | * handler : Exception handler |
|---|
| 359 | 345 | */ |
|---|
| 360 | | -#define EXC(insn, type, reg, addr, handler) \ |
|---|
| 346 | +#define EXC(insn, type, reg, addr) \ |
|---|
| 361 | 347 | .if \mode == LEGACY_MODE; \ |
|---|
| 362 | 348 | 9: insn reg, addr; \ |
|---|
| 363 | 349 | .section __ex_table,"a"; \ |
|---|
| 364 | | - PTR 9b, handler; \ |
|---|
| 350 | + PTR 9b, .L_exc; \ |
|---|
| 365 | 351 | .previous; \ |
|---|
| 366 | 352 | /* This is enabled in EVA mode */ \ |
|---|
| 367 | 353 | .else; \ |
|---|
| .. | .. |
|---|
| 370 | 356 | ((\to == USEROP) && (type == ST_INSN)); \ |
|---|
| 371 | 357 | 9: __BUILD_EVA_INSN(insn##e, reg, addr); \ |
|---|
| 372 | 358 | .section __ex_table,"a"; \ |
|---|
| 373 | | - PTR 9b, handler; \ |
|---|
| 359 | + PTR 9b, .L_exc; \ |
|---|
| 374 | 360 | .previous; \ |
|---|
| 375 | 361 | .else; \ |
|---|
| 376 | 362 | /* EVA without exception */ \ |
|---|
| .. | .. |
|---|
| 383 | 369 | #ifdef USE_DOUBLE |
|---|
| 384 | 370 | |
|---|
| 385 | 371 | #define LOADK ld /* No exception */ |
|---|
| 386 | | -#define LOAD(reg, addr, handler) EXC(ld, LD_INSN, reg, addr, handler) |
|---|
| 387 | | -#define LOADBU(reg, addr, handler) EXC(lbu, LD_INSN, reg, addr, handler) |
|---|
| 388 | | -#define LOADL(reg, addr, handler) EXC(ldl, LD_INSN, reg, addr, handler) |
|---|
| 389 | | -#define LOADR(reg, addr, handler) EXC(ldr, LD_INSN, reg, addr, handler) |
|---|
| 390 | | -#define STOREB(reg, addr, handler) EXC(sb, ST_INSN, reg, addr, handler) |
|---|
| 391 | | -#define STOREL(reg, addr, handler) EXC(sdl, ST_INSN, reg, addr, handler) |
|---|
| 392 | | -#define STORER(reg, addr, handler) EXC(sdr, ST_INSN, reg, addr, handler) |
|---|
| 393 | | -#define STORE(reg, addr, handler) EXC(sd, ST_INSN, reg, addr, handler) |
|---|
| 372 | +#define LOAD(reg, addr) EXC(ld, LD_INSN, reg, addr) |
|---|
| 373 | +#define LOADBU(reg, addr) EXC(lbu, LD_INSN, reg, addr) |
|---|
| 374 | +#define LOADL(reg, addr) EXC(ldl, LD_INSN, reg, addr) |
|---|
| 375 | +#define LOADR(reg, addr) EXC(ldr, LD_INSN, reg, addr) |
|---|
| 376 | +#define STOREB(reg, addr) EXC(sb, ST_INSN, reg, addr) |
|---|
| 377 | +#define STOREL(reg, addr) EXC(sdl, ST_INSN, reg, addr) |
|---|
| 378 | +#define STORER(reg, addr) EXC(sdr, ST_INSN, reg, addr) |
|---|
| 379 | +#define STORE(reg, addr) EXC(sd, ST_INSN, reg, addr) |
|---|
| 394 | 380 | #define ADD daddu |
|---|
| 395 | 381 | #define SUB dsubu |
|---|
| 396 | 382 | #define SRL dsrl |
|---|
| .. | .. |
|---|
| 403 | 389 | #else |
|---|
| 404 | 390 | |
|---|
| 405 | 391 | #define LOADK lw /* No exception */ |
|---|
| 406 | | -#define LOAD(reg, addr, handler) EXC(lw, LD_INSN, reg, addr, handler) |
|---|
| 407 | | -#define LOADBU(reg, addr, handler) EXC(lbu, LD_INSN, reg, addr, handler) |
|---|
| 408 | | -#define LOADL(reg, addr, handler) EXC(lwl, LD_INSN, reg, addr, handler) |
|---|
| 409 | | -#define LOADR(reg, addr, handler) EXC(lwr, LD_INSN, reg, addr, handler) |
|---|
| 410 | | -#define STOREB(reg, addr, handler) EXC(sb, ST_INSN, reg, addr, handler) |
|---|
| 411 | | -#define STOREL(reg, addr, handler) EXC(swl, ST_INSN, reg, addr, handler) |
|---|
| 412 | | -#define STORER(reg, addr, handler) EXC(swr, ST_INSN, reg, addr, handler) |
|---|
| 413 | | -#define STORE(reg, addr, handler) EXC(sw, ST_INSN, reg, addr, handler) |
|---|
| 392 | +#define LOAD(reg, addr) EXC(lw, LD_INSN, reg, addr) |
|---|
| 393 | +#define LOADBU(reg, addr) EXC(lbu, LD_INSN, reg, addr) |
|---|
| 394 | +#define LOADL(reg, addr) EXC(lwl, LD_INSN, reg, addr) |
|---|
| 395 | +#define LOADR(reg, addr) EXC(lwr, LD_INSN, reg, addr) |
|---|
| 396 | +#define STOREB(reg, addr) EXC(sb, ST_INSN, reg, addr) |
|---|
| 397 | +#define STOREL(reg, addr) EXC(swl, ST_INSN, reg, addr) |
|---|
| 398 | +#define STORER(reg, addr) EXC(swr, ST_INSN, reg, addr) |
|---|
| 399 | +#define STORE(reg, addr) EXC(sw, ST_INSN, reg, addr) |
|---|
| 414 | 400 | #define ADD addu |
|---|
| 415 | 401 | #define SUB subu |
|---|
| 416 | 402 | #define SRL srl |
|---|
| .. | .. |
|---|
| 449 | 435 | .set at=v1 |
|---|
| 450 | 436 | #endif |
|---|
| 451 | 437 | |
|---|
| 452 | | - .macro __BUILD_CSUM_PARTIAL_COPY_USER mode, from, to, __nocheck |
|---|
| 438 | + .macro __BUILD_CSUM_PARTIAL_COPY_USER mode, from, to |
|---|
| 453 | 439 | |
|---|
| 454 | | - PTR_ADDU AT, src, len /* See (1) above. */ |
|---|
| 455 | | - /* initialize __nocheck if this the first time we execute this |
|---|
| 456 | | - * macro |
|---|
| 457 | | - */ |
|---|
| 458 | | -#ifdef CONFIG_64BIT |
|---|
| 459 | | - move errptr, a4 |
|---|
| 460 | | -#else |
|---|
| 461 | | - lw errptr, 16(sp) |
|---|
| 462 | | -#endif |
|---|
| 463 | | - .if \__nocheck == 1 |
|---|
| 464 | | - FEXPORT(csum_partial_copy_nocheck) |
|---|
| 465 | | - EXPORT_SYMBOL(csum_partial_copy_nocheck) |
|---|
| 466 | | - .endif |
|---|
| 467 | | - move sum, zero |
|---|
| 440 | + li sum, -1 |
|---|
| 468 | 441 | move odd, zero |
|---|
| 469 | 442 | /* |
|---|
| 470 | 443 | * Note: dst & src may be unaligned, len may be 0 |
|---|
| .. | .. |
|---|
| 496 | 469 | SUB len, 8*NBYTES # subtract here for bgez loop |
|---|
| 497 | 470 | .align 4 |
|---|
| 498 | 471 | 1: |
|---|
| 499 | | - LOAD(t0, UNIT(0)(src), .Ll_exc\@) |
|---|
| 500 | | - LOAD(t1, UNIT(1)(src), .Ll_exc_copy\@) |
|---|
| 501 | | - LOAD(t2, UNIT(2)(src), .Ll_exc_copy\@) |
|---|
| 502 | | - LOAD(t3, UNIT(3)(src), .Ll_exc_copy\@) |
|---|
| 503 | | - LOAD(t4, UNIT(4)(src), .Ll_exc_copy\@) |
|---|
| 504 | | - LOAD(t5, UNIT(5)(src), .Ll_exc_copy\@) |
|---|
| 505 | | - LOAD(t6, UNIT(6)(src), .Ll_exc_copy\@) |
|---|
| 506 | | - LOAD(t7, UNIT(7)(src), .Ll_exc_copy\@) |
|---|
| 472 | + LOAD(t0, UNIT(0)(src)) |
|---|
| 473 | + LOAD(t1, UNIT(1)(src)) |
|---|
| 474 | + LOAD(t2, UNIT(2)(src)) |
|---|
| 475 | + LOAD(t3, UNIT(3)(src)) |
|---|
| 476 | + LOAD(t4, UNIT(4)(src)) |
|---|
| 477 | + LOAD(t5, UNIT(5)(src)) |
|---|
| 478 | + LOAD(t6, UNIT(6)(src)) |
|---|
| 479 | + LOAD(t7, UNIT(7)(src)) |
|---|
| 507 | 480 | SUB len, len, 8*NBYTES |
|---|
| 508 | 481 | ADD src, src, 8*NBYTES |
|---|
| 509 | | - STORE(t0, UNIT(0)(dst), .Ls_exc\@) |
|---|
| 482 | + STORE(t0, UNIT(0)(dst)) |
|---|
| 510 | 483 | ADDC(t0, t1) |
|---|
| 511 | | - STORE(t1, UNIT(1)(dst), .Ls_exc\@) |
|---|
| 484 | + STORE(t1, UNIT(1)(dst)) |
|---|
| 512 | 485 | ADDC(sum, t0) |
|---|
| 513 | | - STORE(t2, UNIT(2)(dst), .Ls_exc\@) |
|---|
| 486 | + STORE(t2, UNIT(2)(dst)) |
|---|
| 514 | 487 | ADDC(t2, t3) |
|---|
| 515 | | - STORE(t3, UNIT(3)(dst), .Ls_exc\@) |
|---|
| 488 | + STORE(t3, UNIT(3)(dst)) |
|---|
| 516 | 489 | ADDC(sum, t2) |
|---|
| 517 | | - STORE(t4, UNIT(4)(dst), .Ls_exc\@) |
|---|
| 490 | + STORE(t4, UNIT(4)(dst)) |
|---|
| 518 | 491 | ADDC(t4, t5) |
|---|
| 519 | | - STORE(t5, UNIT(5)(dst), .Ls_exc\@) |
|---|
| 492 | + STORE(t5, UNIT(5)(dst)) |
|---|
| 520 | 493 | ADDC(sum, t4) |
|---|
| 521 | | - STORE(t6, UNIT(6)(dst), .Ls_exc\@) |
|---|
| 494 | + STORE(t6, UNIT(6)(dst)) |
|---|
| 522 | 495 | ADDC(t6, t7) |
|---|
| 523 | | - STORE(t7, UNIT(7)(dst), .Ls_exc\@) |
|---|
| 496 | + STORE(t7, UNIT(7)(dst)) |
|---|
| 524 | 497 | ADDC(sum, t6) |
|---|
| 525 | 498 | .set reorder /* DADDI_WAR */ |
|---|
| 526 | 499 | ADD dst, dst, 8*NBYTES |
|---|
| .. | .. |
|---|
| 540 | 513 | /* |
|---|
| 541 | 514 | * len >= 4*NBYTES |
|---|
| 542 | 515 | */ |
|---|
| 543 | | - LOAD(t0, UNIT(0)(src), .Ll_exc\@) |
|---|
| 544 | | - LOAD(t1, UNIT(1)(src), .Ll_exc_copy\@) |
|---|
| 545 | | - LOAD(t2, UNIT(2)(src), .Ll_exc_copy\@) |
|---|
| 546 | | - LOAD(t3, UNIT(3)(src), .Ll_exc_copy\@) |
|---|
| 516 | + LOAD(t0, UNIT(0)(src)) |
|---|
| 517 | + LOAD(t1, UNIT(1)(src)) |
|---|
| 518 | + LOAD(t2, UNIT(2)(src)) |
|---|
| 519 | + LOAD(t3, UNIT(3)(src)) |
|---|
| 547 | 520 | SUB len, len, 4*NBYTES |
|---|
| 548 | 521 | ADD src, src, 4*NBYTES |
|---|
| 549 | | - STORE(t0, UNIT(0)(dst), .Ls_exc\@) |
|---|
| 522 | + STORE(t0, UNIT(0)(dst)) |
|---|
| 550 | 523 | ADDC(t0, t1) |
|---|
| 551 | | - STORE(t1, UNIT(1)(dst), .Ls_exc\@) |
|---|
| 524 | + STORE(t1, UNIT(1)(dst)) |
|---|
| 552 | 525 | ADDC(sum, t0) |
|---|
| 553 | | - STORE(t2, UNIT(2)(dst), .Ls_exc\@) |
|---|
| 526 | + STORE(t2, UNIT(2)(dst)) |
|---|
| 554 | 527 | ADDC(t2, t3) |
|---|
| 555 | | - STORE(t3, UNIT(3)(dst), .Ls_exc\@) |
|---|
| 528 | + STORE(t3, UNIT(3)(dst)) |
|---|
| 556 | 529 | ADDC(sum, t2) |
|---|
| 557 | 530 | .set reorder /* DADDI_WAR */ |
|---|
| 558 | 531 | ADD dst, dst, 4*NBYTES |
|---|
| .. | .. |
|---|
| 565 | 538 | beq rem, len, .Lcopy_bytes\@ |
|---|
| 566 | 539 | nop |
|---|
| 567 | 540 | 1: |
|---|
| 568 | | - LOAD(t0, 0(src), .Ll_exc\@) |
|---|
| 541 | + LOAD(t0, 0(src)) |
|---|
| 569 | 542 | ADD src, src, NBYTES |
|---|
| 570 | 543 | SUB len, len, NBYTES |
|---|
| 571 | | - STORE(t0, 0(dst), .Ls_exc\@) |
|---|
| 544 | + STORE(t0, 0(dst)) |
|---|
| 572 | 545 | ADDC(sum, t0) |
|---|
| 573 | 546 | .set reorder /* DADDI_WAR */ |
|---|
| 574 | 547 | ADD dst, dst, NBYTES |
|---|
| .. | .. |
|---|
| 591 | 564 | ADD t1, dst, len # t1 is just past last byte of dst |
|---|
| 592 | 565 | li bits, 8*NBYTES |
|---|
| 593 | 566 | SLL rem, len, 3 # rem = number of bits to keep |
|---|
| 594 | | - LOAD(t0, 0(src), .Ll_exc\@) |
|---|
| 567 | + LOAD(t0, 0(src)) |
|---|
| 595 | 568 | SUB bits, bits, rem # bits = number of bits to discard |
|---|
| 596 | 569 | SHIFT_DISCARD t0, t0, bits |
|---|
| 597 | | - STREST(t0, -1(t1), .Ls_exc\@) |
|---|
| 570 | + STREST(t0, -1(t1)) |
|---|
| 598 | 571 | SHIFT_DISCARD_REVERT t0, t0, bits |
|---|
| 599 | 572 | .set reorder |
|---|
| 600 | 573 | ADDC(sum, t0) |
|---|
| .. | .. |
|---|
| 611 | 584 | * Set match = (src and dst have same alignment) |
|---|
| 612 | 585 | */ |
|---|
| 613 | 586 | #define match rem |
|---|
| 614 | | - LDFIRST(t3, FIRST(0)(src), .Ll_exc\@) |
|---|
| 587 | + LDFIRST(t3, FIRST(0)(src)) |
|---|
| 615 | 588 | ADD t2, zero, NBYTES |
|---|
| 616 | | - LDREST(t3, REST(0)(src), .Ll_exc_copy\@) |
|---|
| 589 | + LDREST(t3, REST(0)(src)) |
|---|
| 617 | 590 | SUB t2, t2, t1 # t2 = number of bytes copied |
|---|
| 618 | 591 | xor match, t0, t1 |
|---|
| 619 | | - STFIRST(t3, FIRST(0)(dst), .Ls_exc\@) |
|---|
| 592 | + STFIRST(t3, FIRST(0)(dst)) |
|---|
| 620 | 593 | SLL t4, t1, 3 # t4 = number of bits to discard |
|---|
| 621 | 594 | SHIFT_DISCARD t3, t3, t4 |
|---|
| 622 | 595 | /* no SHIFT_DISCARD_REVERT to handle odd buffer properly */ |
|---|
| .. | .. |
|---|
| 638 | 611 | * It's OK to load FIRST(N+1) before REST(N) because the two addresses |
|---|
| 639 | 612 | * are to the same unit (unless src is aligned, but it's not). |
|---|
| 640 | 613 | */ |
|---|
| 641 | | - LDFIRST(t0, FIRST(0)(src), .Ll_exc\@) |
|---|
| 642 | | - LDFIRST(t1, FIRST(1)(src), .Ll_exc_copy\@) |
|---|
| 614 | + LDFIRST(t0, FIRST(0)(src)) |
|---|
| 615 | + LDFIRST(t1, FIRST(1)(src)) |
|---|
| 643 | 616 | SUB len, len, 4*NBYTES |
|---|
| 644 | | - LDREST(t0, REST(0)(src), .Ll_exc_copy\@) |
|---|
| 645 | | - LDREST(t1, REST(1)(src), .Ll_exc_copy\@) |
|---|
| 646 | | - LDFIRST(t2, FIRST(2)(src), .Ll_exc_copy\@) |
|---|
| 647 | | - LDFIRST(t3, FIRST(3)(src), .Ll_exc_copy\@) |
|---|
| 648 | | - LDREST(t2, REST(2)(src), .Ll_exc_copy\@) |
|---|
| 649 | | - LDREST(t3, REST(3)(src), .Ll_exc_copy\@) |
|---|
| 617 | + LDREST(t0, REST(0)(src)) |
|---|
| 618 | + LDREST(t1, REST(1)(src)) |
|---|
| 619 | + LDFIRST(t2, FIRST(2)(src)) |
|---|
| 620 | + LDFIRST(t3, FIRST(3)(src)) |
|---|
| 621 | + LDREST(t2, REST(2)(src)) |
|---|
| 622 | + LDREST(t3, REST(3)(src)) |
|---|
| 650 | 623 | ADD src, src, 4*NBYTES |
|---|
| 651 | 624 | #ifdef CONFIG_CPU_SB1 |
|---|
| 652 | 625 | nop # improves slotting |
|---|
| 653 | 626 | #endif |
|---|
| 654 | | - STORE(t0, UNIT(0)(dst), .Ls_exc\@) |
|---|
| 627 | + STORE(t0, UNIT(0)(dst)) |
|---|
| 655 | 628 | ADDC(t0, t1) |
|---|
| 656 | | - STORE(t1, UNIT(1)(dst), .Ls_exc\@) |
|---|
| 629 | + STORE(t1, UNIT(1)(dst)) |
|---|
| 657 | 630 | ADDC(sum, t0) |
|---|
| 658 | | - STORE(t2, UNIT(2)(dst), .Ls_exc\@) |
|---|
| 631 | + STORE(t2, UNIT(2)(dst)) |
|---|
| 659 | 632 | ADDC(t2, t3) |
|---|
| 660 | | - STORE(t3, UNIT(3)(dst), .Ls_exc\@) |
|---|
| 633 | + STORE(t3, UNIT(3)(dst)) |
|---|
| 661 | 634 | ADDC(sum, t2) |
|---|
| 662 | 635 | .set reorder /* DADDI_WAR */ |
|---|
| 663 | 636 | ADD dst, dst, 4*NBYTES |
|---|
| .. | .. |
|---|
| 670 | 643 | beq rem, len, .Lcopy_bytes\@ |
|---|
| 671 | 644 | nop |
|---|
| 672 | 645 | 1: |
|---|
| 673 | | - LDFIRST(t0, FIRST(0)(src), .Ll_exc\@) |
|---|
| 674 | | - LDREST(t0, REST(0)(src), .Ll_exc_copy\@) |
|---|
| 646 | + LDFIRST(t0, FIRST(0)(src)) |
|---|
| 647 | + LDREST(t0, REST(0)(src)) |
|---|
| 675 | 648 | ADD src, src, NBYTES |
|---|
| 676 | 649 | SUB len, len, NBYTES |
|---|
| 677 | | - STORE(t0, 0(dst), .Ls_exc\@) |
|---|
| 650 | + STORE(t0, 0(dst)) |
|---|
| 678 | 651 | ADDC(sum, t0) |
|---|
| 679 | 652 | .set reorder /* DADDI_WAR */ |
|---|
| 680 | 653 | ADD dst, dst, NBYTES |
|---|
| .. | .. |
|---|
| 695 | 668 | #endif |
|---|
| 696 | 669 | move t2, zero # partial word |
|---|
| 697 | 670 | li t3, SHIFT_START # shift |
|---|
| 698 | | -/* use .Ll_exc_copy here to return correct sum on fault */ |
|---|
| 699 | 671 | #define COPY_BYTE(N) \ |
|---|
| 700 | | - LOADBU(t0, N(src), .Ll_exc_copy\@); \ |
|---|
| 672 | + LOADBU(t0, N(src)); \ |
|---|
| 701 | 673 | SUB len, len, 1; \ |
|---|
| 702 | | - STOREB(t0, N(dst), .Ls_exc\@); \ |
|---|
| 674 | + STOREB(t0, N(dst)); \ |
|---|
| 703 | 675 | SLLV t0, t0, t3; \ |
|---|
| 704 | 676 | addu t3, SHIFT_INC; \ |
|---|
| 705 | 677 | beqz len, .Lcopy_bytes_done\@; \ |
|---|
| .. | .. |
|---|
| 713 | 685 | COPY_BYTE(4) |
|---|
| 714 | 686 | COPY_BYTE(5) |
|---|
| 715 | 687 | #endif |
|---|
| 716 | | - LOADBU(t0, NBYTES-2(src), .Ll_exc_copy\@) |
|---|
| 688 | + LOADBU(t0, NBYTES-2(src)) |
|---|
| 717 | 689 | SUB len, len, 1 |
|---|
| 718 | | - STOREB(t0, NBYTES-2(dst), .Ls_exc\@) |
|---|
| 690 | + STOREB(t0, NBYTES-2(dst)) |
|---|
| 719 | 691 | SLLV t0, t0, t3 |
|---|
| 720 | 692 | or t2, t0 |
|---|
| 721 | 693 | .Lcopy_bytes_done\@: |
|---|
| .. | .. |
|---|
| 732 | 704 | addu sum, v1 |
|---|
| 733 | 705 | #endif |
|---|
| 734 | 706 | |
|---|
| 735 | | -#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_LOONGSON3) |
|---|
| 707 | +#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR5) || \ |
|---|
| 708 | + defined(CONFIG_CPU_LOONGSON64) |
|---|
| 736 | 709 | .set push |
|---|
| 737 | 710 | .set arch=mips32r2 |
|---|
| 738 | 711 | wsbh v1, sum |
|---|
| .. | .. |
|---|
| 751 | 724 | #endif |
|---|
| 752 | 725 | .set pop |
|---|
| 753 | 726 | .set reorder |
|---|
| 754 | | - ADDC32(sum, psum) |
|---|
| 755 | 727 | jr ra |
|---|
| 756 | 728 | .set noreorder |
|---|
| 757 | | - |
|---|
| 758 | | -.Ll_exc_copy\@: |
|---|
| 759 | | - /* |
|---|
| 760 | | - * Copy bytes from src until faulting load address (or until a |
|---|
| 761 | | - * lb faults) |
|---|
| 762 | | - * |
|---|
| 763 | | - * When reached by a faulting LDFIRST/LDREST, THREAD_BUADDR($28) |
|---|
| 764 | | - * may be more than a byte beyond the last address. |
|---|
| 765 | | - * Hence, the lb below may get an exception. |
|---|
| 766 | | - * |
|---|
| 767 | | - * Assumes src < THREAD_BUADDR($28) |
|---|
| 768 | | - */ |
|---|
| 769 | | - LOADK t0, TI_TASK($28) |
|---|
| 770 | | - li t2, SHIFT_START |
|---|
| 771 | | - LOADK t0, THREAD_BUADDR(t0) |
|---|
| 772 | | -1: |
|---|
| 773 | | - LOADBU(t1, 0(src), .Ll_exc\@) |
|---|
| 774 | | - ADD src, src, 1 |
|---|
| 775 | | - sb t1, 0(dst) # can't fault -- we're copy_from_user |
|---|
| 776 | | - SLLV t1, t1, t2 |
|---|
| 777 | | - addu t2, SHIFT_INC |
|---|
| 778 | | - ADDC(sum, t1) |
|---|
| 779 | | - .set reorder /* DADDI_WAR */ |
|---|
| 780 | | - ADD dst, dst, 1 |
|---|
| 781 | | - bne src, t0, 1b |
|---|
| 782 | | - .set noreorder |
|---|
| 783 | | -.Ll_exc\@: |
|---|
| 784 | | - LOADK t0, TI_TASK($28) |
|---|
| 785 | | - nop |
|---|
| 786 | | - LOADK t0, THREAD_BUADDR(t0) # t0 is just past last good address |
|---|
| 787 | | - nop |
|---|
| 788 | | - SUB len, AT, t0 # len number of uncopied bytes |
|---|
| 789 | | - /* |
|---|
| 790 | | - * Here's where we rely on src and dst being incremented in tandem, |
|---|
| 791 | | - * See (3) above. |
|---|
| 792 | | - * dst += (fault addr - src) to put dst at first byte to clear |
|---|
| 793 | | - */ |
|---|
| 794 | | - ADD dst, t0 # compute start address in a1 |
|---|
| 795 | | - SUB dst, src |
|---|
| 796 | | - /* |
|---|
| 797 | | - * Clear len bytes starting at dst. Can't call __bzero because it |
|---|
| 798 | | - * might modify len. An inefficient loop for these rare times... |
|---|
| 799 | | - */ |
|---|
| 800 | | - .set reorder /* DADDI_WAR */ |
|---|
| 801 | | - SUB src, len, 1 |
|---|
| 802 | | - beqz len, .Ldone\@ |
|---|
| 803 | | - .set noreorder |
|---|
| 804 | | -1: sb zero, 0(dst) |
|---|
| 805 | | - ADD dst, dst, 1 |
|---|
| 806 | | - .set push |
|---|
| 807 | | - .set noat |
|---|
| 808 | | -#ifndef CONFIG_CPU_DADDI_WORKAROUNDS |
|---|
| 809 | | - bnez src, 1b |
|---|
| 810 | | - SUB src, src, 1 |
|---|
| 811 | | -#else |
|---|
| 812 | | - li v1, 1 |
|---|
| 813 | | - bnez src, 1b |
|---|
| 814 | | - SUB src, src, v1 |
|---|
| 815 | | -#endif |
|---|
| 816 | | - li v1, -EFAULT |
|---|
| 817 | | - b .Ldone\@ |
|---|
| 818 | | - sw v1, (errptr) |
|---|
| 819 | | - |
|---|
| 820 | | -.Ls_exc\@: |
|---|
| 821 | | - li v0, -1 /* invalid checksum */ |
|---|
| 822 | | - li v1, -EFAULT |
|---|
| 823 | | - jr ra |
|---|
| 824 | | - sw v1, (errptr) |
|---|
| 825 | | - .set pop |
|---|
| 826 | 729 | .endm |
|---|
| 827 | 730 | |
|---|
| 828 | | -LEAF(__csum_partial_copy_kernel) |
|---|
| 829 | | -EXPORT_SYMBOL(__csum_partial_copy_kernel) |
|---|
| 731 | + .set noreorder |
|---|
| 732 | +.L_exc: |
|---|
| 733 | + jr ra |
|---|
| 734 | + li v0, 0 |
|---|
| 735 | + |
|---|
| 736 | +FEXPORT(__csum_partial_copy_nocheck) |
|---|
| 737 | +EXPORT_SYMBOL(__csum_partial_copy_nocheck) |
|---|
| 830 | 738 | #ifndef CONFIG_EVA |
|---|
| 831 | 739 | FEXPORT(__csum_partial_copy_to_user) |
|---|
| 832 | 740 | EXPORT_SYMBOL(__csum_partial_copy_to_user) |
|---|
| 833 | 741 | FEXPORT(__csum_partial_copy_from_user) |
|---|
| 834 | 742 | EXPORT_SYMBOL(__csum_partial_copy_from_user) |
|---|
| 835 | 743 | #endif |
|---|
| 836 | | -__BUILD_CSUM_PARTIAL_COPY_USER LEGACY_MODE USEROP USEROP 1 |
|---|
| 837 | | -END(__csum_partial_copy_kernel) |
|---|
| 744 | +__BUILD_CSUM_PARTIAL_COPY_USER LEGACY_MODE USEROP USEROP |
|---|
| 838 | 745 | |
|---|
| 839 | 746 | #ifdef CONFIG_EVA |
|---|
| 840 | 747 | LEAF(__csum_partial_copy_to_user) |
|---|
| 841 | | -__BUILD_CSUM_PARTIAL_COPY_USER EVA_MODE KERNELOP USEROP 0 |
|---|
| 748 | +__BUILD_CSUM_PARTIAL_COPY_USER EVA_MODE KERNELOP USEROP |
|---|
| 842 | 749 | END(__csum_partial_copy_to_user) |
|---|
| 843 | 750 | |
|---|
| 844 | 751 | LEAF(__csum_partial_copy_from_user) |
|---|
| 845 | | -__BUILD_CSUM_PARTIAL_COPY_USER EVA_MODE USEROP KERNELOP 0 |
|---|
| 752 | +__BUILD_CSUM_PARTIAL_COPY_USER EVA_MODE USEROP KERNELOP |
|---|
| 846 | 753 | END(__csum_partial_copy_from_user) |
|---|
| 847 | 754 | #endif |
|---|