1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
| /* SPDX-License-Identifier: GPL-2.0 */
| #ifndef _ASM_X86_FUTEX_H
| #define _ASM_X86_FUTEX_H
|
| #ifdef __KERNEL__
|
| #include <linux/futex.h>
| #include <linux/uaccess.h>
|
| #include <asm/asm.h>
| #include <asm/errno.h>
| #include <asm/processor.h>
| #include <asm/smap.h>
|
| #define unsafe_atomic_op1(insn, oval, uaddr, oparg, label) \
| do { \
| int oldval = 0, ret; \
| asm volatile("1:\t" insn "\n" \
| "2:\n" \
| "\t.section .fixup,\"ax\"\n" \
| "3:\tmov\t%3, %1\n" \
| "\tjmp\t2b\n" \
| "\t.previous\n" \
| _ASM_EXTABLE_UA(1b, 3b) \
| : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
| : "i" (-EFAULT), "0" (oparg), "1" (0)); \
| if (ret) \
| goto label; \
| *oval = oldval; \
| } while(0)
|
|
| #define unsafe_atomic_op2(insn, oval, uaddr, oparg, label) \
| do { \
| int oldval = 0, ret, tem; \
| asm volatile("1:\tmovl %2, %0\n" \
| "2:\tmovl\t%0, %3\n" \
| "\t" insn "\n" \
| "3:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \
| "\tjnz\t2b\n" \
| "4:\n" \
| "\t.section .fixup,\"ax\"\n" \
| "5:\tmov\t%5, %1\n" \
| "\tjmp\t4b\n" \
| "\t.previous\n" \
| _ASM_EXTABLE_UA(1b, 5b) \
| _ASM_EXTABLE_UA(3b, 5b) \
| : "=&a" (oldval), "=&r" (ret), \
| "+m" (*uaddr), "=&r" (tem) \
| : "r" (oparg), "i" (-EFAULT), "1" (0)); \
| if (ret) \
| goto label; \
| *oval = oldval; \
| } while(0)
|
| static __always_inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
| u32 __user *uaddr)
| {
| if (!user_access_begin(uaddr, sizeof(u32)))
| return -EFAULT;
|
| switch (op) {
| case FUTEX_OP_SET:
| unsafe_atomic_op1("xchgl %0, %2", oval, uaddr, oparg, Efault);
| break;
| case FUTEX_OP_ADD:
| unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval,
| uaddr, oparg, Efault);
| break;
| case FUTEX_OP_OR:
| unsafe_atomic_op2("orl %4, %3", oval, uaddr, oparg, Efault);
| break;
| case FUTEX_OP_ANDN:
| unsafe_atomic_op2("andl %4, %3", oval, uaddr, ~oparg, Efault);
| break;
| case FUTEX_OP_XOR:
| unsafe_atomic_op2("xorl %4, %3", oval, uaddr, oparg, Efault);
| break;
| default:
| user_access_end();
| return -ENOSYS;
| }
| user_access_end();
| return 0;
| Efault:
| user_access_end();
| return -EFAULT;
| }
|
| static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
| u32 oldval, u32 newval)
| {
| int ret = 0;
|
| if (!user_access_begin(uaddr, sizeof(u32)))
| return -EFAULT;
| asm volatile("\n"
| "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"
| "2:\n"
| "\t.section .fixup, \"ax\"\n"
| "3:\tmov %3, %0\n"
| "\tjmp 2b\n"
| "\t.previous\n"
| _ASM_EXTABLE_UA(1b, 3b)
| : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
| : "i" (-EFAULT), "r" (newval), "1" (oldval)
| : "memory"
| );
| user_access_end();
| *uval = oldval;
| return ret;
| }
|
| #endif
| #endif /* _ASM_X86_FUTEX_H */
|
|