1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
| /* SPDX-License-Identifier: GPL-2.0-only */
| /*
| * Copyright (C) 2012 ARM Ltd.
| */
| #ifndef __ASM_FUTEX_H
| #define __ASM_FUTEX_H
|
| #include <linux/futex.h>
| #include <linux/uaccess.h>
|
| #include <asm/errno.h>
|
| #define FUTEX_MAX_LOOPS 128 /* What's the largest number you can think of? */
|
| #define __futex_atomic_op(insn, ret, oldval, uaddr, tmp, oparg) \
| do { \
| unsigned int loops = FUTEX_MAX_LOOPS; \
| \
| uaccess_enable_privileged(); \
| asm volatile( \
| " prfm pstl1strm, %2\n" \
| "1: ldxr %w1, %2\n" \
| insn "\n" \
| "2: stlxr %w0, %w3, %2\n" \
| " cbz %w0, 3f\n" \
| " sub %w4, %w4, %w0\n" \
| " cbnz %w4, 1b\n" \
| " mov %w0, %w7\n" \
| "3:\n" \
| " dmb ish\n" \
| " .pushsection .fixup,\"ax\"\n" \
| " .align 2\n" \
| "4: mov %w0, %w6\n" \
| " b 3b\n" \
| " .popsection\n" \
| _ASM_EXTABLE(1b, 4b) \
| _ASM_EXTABLE(2b, 4b) \
| : "=&r" (ret), "=&r" (oldval), "+Q" (*uaddr), "=&r" (tmp), \
| "+r" (loops) \
| : "r" (oparg), "Ir" (-EFAULT), "Ir" (-EAGAIN) \
| : "memory"); \
| uaccess_disable_privileged(); \
| } while (0)
|
| static inline int
| arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *_uaddr)
| {
| int oldval = 0, ret, tmp;
| u32 __user *uaddr = __uaccess_mask_ptr(_uaddr);
|
| if (!access_ok(_uaddr, sizeof(u32)))
| return -EFAULT;
|
| switch (op) {
| case FUTEX_OP_SET:
| __futex_atomic_op("mov %w3, %w5",
| ret, oldval, uaddr, tmp, oparg);
| break;
| case FUTEX_OP_ADD:
| __futex_atomic_op("add %w3, %w1, %w5",
| ret, oldval, uaddr, tmp, oparg);
| break;
| case FUTEX_OP_OR:
| __futex_atomic_op("orr %w3, %w1, %w5",
| ret, oldval, uaddr, tmp, oparg);
| break;
| case FUTEX_OP_ANDN:
| __futex_atomic_op("and %w3, %w1, %w5",
| ret, oldval, uaddr, tmp, ~oparg);
| break;
| case FUTEX_OP_XOR:
| __futex_atomic_op("eor %w3, %w1, %w5",
| ret, oldval, uaddr, tmp, oparg);
| break;
| default:
| ret = -ENOSYS;
| }
|
| if (!ret)
| *oval = oldval;
|
| return ret;
| }
|
| static inline int
| futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr,
| u32 oldval, u32 newval)
| {
| int ret = 0;
| unsigned int loops = FUTEX_MAX_LOOPS;
| u32 val, tmp;
| u32 __user *uaddr;
|
| if (!access_ok(_uaddr, sizeof(u32)))
| return -EFAULT;
|
| uaddr = __uaccess_mask_ptr(_uaddr);
| uaccess_enable_privileged();
| asm volatile("// futex_atomic_cmpxchg_inatomic\n"
| " prfm pstl1strm, %2\n"
| "1: ldxr %w1, %2\n"
| " sub %w3, %w1, %w5\n"
| " cbnz %w3, 4f\n"
| "2: stlxr %w3, %w6, %2\n"
| " cbz %w3, 3f\n"
| " sub %w4, %w4, %w3\n"
| " cbnz %w4, 1b\n"
| " mov %w0, %w8\n"
| "3:\n"
| " dmb ish\n"
| "4:\n"
| " .pushsection .fixup,\"ax\"\n"
| "5: mov %w0, %w7\n"
| " b 4b\n"
| " .popsection\n"
| _ASM_EXTABLE(1b, 5b)
| _ASM_EXTABLE(2b, 5b)
| : "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops)
| : "r" (oldval), "r" (newval), "Ir" (-EFAULT), "Ir" (-EAGAIN)
| : "memory");
| uaccess_disable_privileged();
|
| if (!ret)
| *uval = val;
|
| return ret;
| }
|
| #endif /* __ASM_FUTEX_H */
|
|