hc
2024-05-11 04dd17822334871b23ea2862f7798fb0e0007777
kernel/arch/sparc/include/asm/checksum_32.h
....@@ -42,7 +42,7 @@
4242 unsigned int __csum_partial_copy_sparc_generic (const unsigned char *, unsigned char *);
4343
4444 static inline __wsum
45
-csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum)
45
+csum_partial_copy_nocheck(const void *src, void *dst, int len)
4646 {
4747 register unsigned int ret asm("o0") = (unsigned int)src;
4848 register char *d asm("o1") = dst;
....@@ -50,9 +50,9 @@
5050
5151 __asm__ __volatile__ (
5252 "call __csum_partial_copy_sparc_generic\n\t"
53
- " mov %6, %%g7\n"
53
+ " mov -1, %%g7\n"
5454 : "=&r" (ret), "=&r" (d), "=&r" (l)
55
- : "0" (ret), "1" (d), "2" (l), "r" (sum)
55
+ : "0" (ret), "1" (d), "2" (l)
5656 : "o2", "o3", "o4", "o5", "o7",
5757 "g2", "g3", "g4", "g5", "g7",
5858 "memory", "cc");
....@@ -60,61 +60,20 @@
6060 }
6161
6262 static inline __wsum
63
-csum_partial_copy_from_user(const void __user *src, void *dst, int len,
64
- __wsum sum, int *err)
65
- {
66
- register unsigned long ret asm("o0") = (unsigned long)src;
67
- register char *d asm("o1") = dst;
68
- register int l asm("g1") = len;
69
- register __wsum s asm("g7") = sum;
70
-
71
- __asm__ __volatile__ (
72
- ".section __ex_table,#alloc\n\t"
73
- ".align 4\n\t"
74
- ".word 1f,2\n\t"
75
- ".previous\n"
76
- "1:\n\t"
77
- "call __csum_partial_copy_sparc_generic\n\t"
78
- " st %8, [%%sp + 64]\n"
79
- : "=&r" (ret), "=&r" (d), "=&r" (l), "=&r" (s)
80
- : "0" (ret), "1" (d), "2" (l), "3" (s), "r" (err)
81
- : "o2", "o3", "o4", "o5", "o7", "g2", "g3", "g4", "g5",
82
- "cc", "memory");
83
- return (__force __wsum)ret;
63
+csum_and_copy_from_user(const void __user *src, void *dst, int len)
64
+{
65
+ if (unlikely(!access_ok(src, len)))
66
+ return 0;
67
+ return csum_partial_copy_nocheck((__force void *)src, dst, len);
8468 }
8569
8670 static inline __wsum
87
-csum_partial_copy_to_user(const void *src, void __user *dst, int len,
88
- __wsum sum, int *err)
71
+csum_and_copy_to_user(const void *src, void __user *dst, int len)
8972 {
90
- if (!access_ok (VERIFY_WRITE, dst, len)) {
91
- *err = -EFAULT;
92
- return sum;
93
- } else {
94
- register unsigned long ret asm("o0") = (unsigned long)src;
95
- register char __user *d asm("o1") = dst;
96
- register int l asm("g1") = len;
97
- register __wsum s asm("g7") = sum;
98
-
99
- __asm__ __volatile__ (
100
- ".section __ex_table,#alloc\n\t"
101
- ".align 4\n\t"
102
- ".word 1f,1\n\t"
103
- ".previous\n"
104
- "1:\n\t"
105
- "call __csum_partial_copy_sparc_generic\n\t"
106
- " st %8, [%%sp + 64]\n"
107
- : "=&r" (ret), "=&r" (d), "=&r" (l), "=&r" (s)
108
- : "0" (ret), "1" (d), "2" (l), "3" (s), "r" (err)
109
- : "o2", "o3", "o4", "o5", "o7",
110
- "g2", "g3", "g4", "g5",
111
- "cc", "memory");
112
- return (__force __wsum)ret;
113
- }
73
+ if (!access_ok(dst, len))
74
+ return 0;
75
+ return csum_partial_copy_nocheck(src, (__force void *)dst, len);
11476 }
115
-
116
-#define HAVE_CSUM_COPY_USER
117
-#define csum_and_copy_to_user csum_partial_copy_to_user
11877
11978 /* ihl is always 5 or greater, almost always is 5, and iph is word aligned
12079 * the majority of the time.