hc
2024-10-12 a5969cabbb4660eab42b6ef0412cbbd1200cf14d
kernel/arch/x86/include/asm/futex.h
....@@ -12,76 +12,103 @@
1212 #include <asm/processor.h>
1313 #include <asm/smap.h>
1414
15
-#define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
16
- asm volatile("\t" ASM_STAC "\n" \
17
- "1:\t" insn "\n" \
18
- "2:\t" ASM_CLAC "\n" \
15
+#define unsafe_atomic_op1(insn, oval, uaddr, oparg, label) \
16
+do { \
17
+ int oldval = 0, ret; \
18
+ asm volatile("1:\t" insn "\n" \
19
+ "2:\n" \
1920 "\t.section .fixup,\"ax\"\n" \
2021 "3:\tmov\t%3, %1\n" \
2122 "\tjmp\t2b\n" \
2223 "\t.previous\n" \
23
- _ASM_EXTABLE(1b, 3b) \
24
+ _ASM_EXTABLE_UA(1b, 3b) \
2425 : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
25
- : "i" (-EFAULT), "0" (oparg), "1" (0))
26
+ : "i" (-EFAULT), "0" (oparg), "1" (0)); \
27
+ if (ret) \
28
+ goto label; \
29
+ *oval = oldval; \
30
+} while(0)
2631
27
-#define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
28
- asm volatile("\t" ASM_STAC "\n" \
29
- "1:\tmovl %2, %0\n" \
30
- "\tmovl\t%0, %3\n" \
32
+
33
+#define unsafe_atomic_op2(insn, oval, uaddr, oparg, label) \
34
+do { \
35
+ int oldval = 0, ret, tem; \
36
+ asm volatile("1:\tmovl %2, %0\n" \
37
+ "2:\tmovl\t%0, %3\n" \
3138 "\t" insn "\n" \
32
- "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \
33
- "\tjnz\t1b\n" \
34
- "3:\t" ASM_CLAC "\n" \
39
+ "3:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \
40
+ "\tjnz\t2b\n" \
41
+ "4:\n" \
3542 "\t.section .fixup,\"ax\"\n" \
36
- "4:\tmov\t%5, %1\n" \
37
- "\tjmp\t3b\n" \
43
+ "5:\tmov\t%5, %1\n" \
44
+ "\tjmp\t4b\n" \
3845 "\t.previous\n" \
39
- _ASM_EXTABLE(1b, 4b) \
40
- _ASM_EXTABLE(2b, 4b) \
46
+ _ASM_EXTABLE_UA(1b, 5b) \
47
+ _ASM_EXTABLE_UA(3b, 5b) \
4148 : "=&a" (oldval), "=&r" (ret), \
4249 "+m" (*uaddr), "=&r" (tem) \
43
- : "r" (oparg), "i" (-EFAULT), "1" (0))
50
+ : "r" (oparg), "i" (-EFAULT), "1" (0)); \
51
+ if (ret) \
52
+ goto label; \
53
+ *oval = oldval; \
54
+} while(0)
4455
45
-static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
56
+static __always_inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
4657 u32 __user *uaddr)
4758 {
48
- int oldval = 0, ret, tem;
49
-
50
- pagefault_disable();
59
+ if (!user_access_begin(uaddr, sizeof(u32)))
60
+ return -EFAULT;
5161
5262 switch (op) {
5363 case FUTEX_OP_SET:
54
- __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
64
+ unsafe_atomic_op1("xchgl %0, %2", oval, uaddr, oparg, Efault);
5565 break;
5666 case FUTEX_OP_ADD:
57
- __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval,
58
- uaddr, oparg);
67
+ unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval,
68
+ uaddr, oparg, Efault);
5969 break;
6070 case FUTEX_OP_OR:
61
- __futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg);
71
+ unsafe_atomic_op2("orl %4, %3", oval, uaddr, oparg, Efault);
6272 break;
6373 case FUTEX_OP_ANDN:
64
- __futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg);
74
+ unsafe_atomic_op2("andl %4, %3", oval, uaddr, ~oparg, Efault);
6575 break;
6676 case FUTEX_OP_XOR:
67
- __futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg);
77
+ unsafe_atomic_op2("xorl %4, %3", oval, uaddr, oparg, Efault);
6878 break;
6979 default:
70
- ret = -ENOSYS;
80
+ user_access_end();
81
+ return -ENOSYS;
7182 }
72
-
73
- pagefault_enable();
74
-
75
- if (!ret)
76
- *oval = oldval;
77
-
78
- return ret;
83
+ user_access_end();
84
+ return 0;
85
+Efault:
86
+ user_access_end();
87
+ return -EFAULT;
7988 }
8089
8190 static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
8291 u32 oldval, u32 newval)
8392 {
84
- return user_atomic_cmpxchg_inatomic(uval, uaddr, oldval, newval);
93
+ int ret = 0;
94
+
95
+ if (!user_access_begin(uaddr, sizeof(u32)))
96
+ return -EFAULT;
97
+ asm volatile("\n"
98
+ "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"
99
+ "2:\n"
100
+ "\t.section .fixup, \"ax\"\n"
101
+ "3:\tmov %3, %0\n"
102
+ "\tjmp 2b\n"
103
+ "\t.previous\n"
104
+ _ASM_EXTABLE_UA(1b, 3b)
105
+ : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
106
+ : "i" (-EFAULT), "r" (newval), "1" (oldval)
107
+ : "memory"
108
+ );
109
+ user_access_end();
110
+ *uval = oldval;
111
+ return ret;
85112 }
86113
87114 #endif