hc
2024-10-12 a5969cabbb4660eab42b6ef0412cbbd1200cf14d
kernel/arch/x86/include/asm/uaccess_64.h
....@@ -47,135 +47,15 @@
4747 }
4848
4949 static __always_inline __must_check unsigned long
50
-copy_to_user_mcsafe(void *to, const void *from, unsigned len)
51
-{
52
- unsigned long ret;
53
-
54
- __uaccess_begin();
55
- /*
56
- * Note, __memcpy_mcsafe() is explicitly used since it can
57
- * handle exceptions / faults. memcpy_mcsafe() may fall back to
58
- * memcpy() which lacks this handling.
59
- */
60
- ret = __memcpy_mcsafe(to, from, len);
61
- __uaccess_end();
62
- return ret;
63
-}
64
-
65
-static __always_inline __must_check unsigned long
6650 raw_copy_from_user(void *dst, const void __user *src, unsigned long size)
6751 {
68
- int ret = 0;
69
-
70
- if (!__builtin_constant_p(size))
71
- return copy_user_generic(dst, (__force void *)src, size);
72
- switch (size) {
73
- case 1:
74
- __uaccess_begin_nospec();
75
- __get_user_asm_nozero(*(u8 *)dst, (u8 __user *)src,
76
- ret, "b", "b", "=q", 1);
77
- __uaccess_end();
78
- return ret;
79
- case 2:
80
- __uaccess_begin_nospec();
81
- __get_user_asm_nozero(*(u16 *)dst, (u16 __user *)src,
82
- ret, "w", "w", "=r", 2);
83
- __uaccess_end();
84
- return ret;
85
- case 4:
86
- __uaccess_begin_nospec();
87
- __get_user_asm_nozero(*(u32 *)dst, (u32 __user *)src,
88
- ret, "l", "k", "=r", 4);
89
- __uaccess_end();
90
- return ret;
91
- case 8:
92
- __uaccess_begin_nospec();
93
- __get_user_asm_nozero(*(u64 *)dst, (u64 __user *)src,
94
- ret, "q", "", "=r", 8);
95
- __uaccess_end();
96
- return ret;
97
- case 10:
98
- __uaccess_begin_nospec();
99
- __get_user_asm_nozero(*(u64 *)dst, (u64 __user *)src,
100
- ret, "q", "", "=r", 10);
101
- if (likely(!ret))
102
- __get_user_asm_nozero(*(u16 *)(8 + (char *)dst),
103
- (u16 __user *)(8 + (char __user *)src),
104
- ret, "w", "w", "=r", 2);
105
- __uaccess_end();
106
- return ret;
107
- case 16:
108
- __uaccess_begin_nospec();
109
- __get_user_asm_nozero(*(u64 *)dst, (u64 __user *)src,
110
- ret, "q", "", "=r", 16);
111
- if (likely(!ret))
112
- __get_user_asm_nozero(*(u64 *)(8 + (char *)dst),
113
- (u64 __user *)(8 + (char __user *)src),
114
- ret, "q", "", "=r", 8);
115
- __uaccess_end();
116
- return ret;
117
- default:
118
- return copy_user_generic(dst, (__force void *)src, size);
119
- }
52
+ return copy_user_generic(dst, (__force void *)src, size);
12053 }
12154
12255 static __always_inline __must_check unsigned long
12356 raw_copy_to_user(void __user *dst, const void *src, unsigned long size)
12457 {
125
- int ret = 0;
126
-
127
- if (!__builtin_constant_p(size))
128
- return copy_user_generic((__force void *)dst, src, size);
129
- switch (size) {
130
- case 1:
131
- __uaccess_begin();
132
- __put_user_asm(*(u8 *)src, (u8 __user *)dst,
133
- ret, "b", "b", "iq", 1);
134
- __uaccess_end();
135
- return ret;
136
- case 2:
137
- __uaccess_begin();
138
- __put_user_asm(*(u16 *)src, (u16 __user *)dst,
139
- ret, "w", "w", "ir", 2);
140
- __uaccess_end();
141
- return ret;
142
- case 4:
143
- __uaccess_begin();
144
- __put_user_asm(*(u32 *)src, (u32 __user *)dst,
145
- ret, "l", "k", "ir", 4);
146
- __uaccess_end();
147
- return ret;
148
- case 8:
149
- __uaccess_begin();
150
- __put_user_asm(*(u64 *)src, (u64 __user *)dst,
151
- ret, "q", "", "er", 8);
152
- __uaccess_end();
153
- return ret;
154
- case 10:
155
- __uaccess_begin();
156
- __put_user_asm(*(u64 *)src, (u64 __user *)dst,
157
- ret, "q", "", "er", 10);
158
- if (likely(!ret)) {
159
- asm("":::"memory");
160
- __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
161
- ret, "w", "w", "ir", 2);
162
- }
163
- __uaccess_end();
164
- return ret;
165
- case 16:
166
- __uaccess_begin();
167
- __put_user_asm(*(u64 *)src, (u64 __user *)dst,
168
- ret, "q", "", "er", 16);
169
- if (likely(!ret)) {
170
- asm("":::"memory");
171
- __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
172
- ret, "q", "", "er", 8);
173
- }
174
- __uaccess_end();
175
- return ret;
176
- default:
177
- return copy_user_generic((__force void *)dst, src, size);
178
- }
58
+ return copy_user_generic((__force void *)dst, src, size);
17959 }
18060
18161 static __always_inline __must_check
....@@ -206,11 +86,4 @@
20686 kasan_check_write(dst, size);
20787 return __copy_user_flushcache(dst, src, size);
20888 }
209
-
210
-unsigned long
211
-copy_user_handle_tail(char *to, char *from, unsigned len);
212
-
213
-unsigned long
214
-mcsafe_handle_tail(char *to, char *from, unsigned len);
215
-
21689 #endif /* _ASM_X86_UACCESS_64_H */