| .. | .. |
|---|
| 12 | 12 | #include <asm/processor.h> |
|---|
| 13 | 13 | #include <asm/smap.h> |
|---|
| 14 | 14 | |
|---|
| 15 | | -#define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \ |
|---|
| 16 | | - asm volatile("\t" ASM_STAC "\n" \ |
|---|
| 17 | | - "1:\t" insn "\n" \ |
|---|
| 18 | | - "2:\t" ASM_CLAC "\n" \ |
|---|
| 15 | +#define unsafe_atomic_op1(insn, oval, uaddr, oparg, label) \ |
|---|
| 16 | +do { \ |
|---|
| 17 | + int oldval = 0, ret; \ |
|---|
| 18 | + asm volatile("1:\t" insn "\n" \ |
|---|
| 19 | + "2:\n" \ |
|---|
| 19 | 20 | "\t.section .fixup,\"ax\"\n" \ |
|---|
| 20 | 21 | "3:\tmov\t%3, %1\n" \ |
|---|
| 21 | 22 | "\tjmp\t2b\n" \ |
|---|
| 22 | 23 | "\t.previous\n" \ |
|---|
| 23 | | - _ASM_EXTABLE(1b, 3b) \ |
|---|
| 24 | + _ASM_EXTABLE_UA(1b, 3b) \ |
|---|
| 24 | 25 | : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \ |
|---|
| 25 | | - : "i" (-EFAULT), "0" (oparg), "1" (0)) |
|---|
| 26 | + : "i" (-EFAULT), "0" (oparg), "1" (0)); \ |
|---|
| 27 | + if (ret) \ |
|---|
| 28 | + goto label; \ |
|---|
| 29 | + *oval = oldval; \ |
|---|
| 30 | +} while(0) |
|---|
| 26 | 31 | |
|---|
| 27 | | -#define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \ |
|---|
| 28 | | - asm volatile("\t" ASM_STAC "\n" \ |
|---|
| 29 | | - "1:\tmovl %2, %0\n" \ |
|---|
| 30 | | - "\tmovl\t%0, %3\n" \ |
|---|
| 32 | + |
|---|
| 33 | +#define unsafe_atomic_op2(insn, oval, uaddr, oparg, label) \ |
|---|
| 34 | +do { \ |
|---|
| 35 | + int oldval = 0, ret, tem; \ |
|---|
| 36 | + asm volatile("1:\tmovl %2, %0\n" \ |
|---|
| 37 | + "2:\tmovl\t%0, %3\n" \ |
|---|
| 31 | 38 | "\t" insn "\n" \ |
|---|
| 32 | | - "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \ |
|---|
| 33 | | - "\tjnz\t1b\n" \ |
|---|
| 34 | | - "3:\t" ASM_CLAC "\n" \ |
|---|
| 39 | + "3:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \ |
|---|
| 40 | + "\tjnz\t2b\n" \ |
|---|
| 41 | + "4:\n" \ |
|---|
| 35 | 42 | "\t.section .fixup,\"ax\"\n" \ |
|---|
| 36 | | - "4:\tmov\t%5, %1\n" \ |
|---|
| 37 | | - "\tjmp\t3b\n" \ |
|---|
| 43 | + "5:\tmov\t%5, %1\n" \ |
|---|
| 44 | + "\tjmp\t4b\n" \ |
|---|
| 38 | 45 | "\t.previous\n" \ |
|---|
| 39 | | - _ASM_EXTABLE(1b, 4b) \ |
|---|
| 40 | | - _ASM_EXTABLE(2b, 4b) \ |
|---|
| 46 | + _ASM_EXTABLE_UA(1b, 5b) \ |
|---|
| 47 | + _ASM_EXTABLE_UA(3b, 5b) \ |
|---|
| 41 | 48 | : "=&a" (oldval), "=&r" (ret), \ |
|---|
| 42 | 49 | "+m" (*uaddr), "=&r" (tem) \ |
|---|
| 43 | | - : "r" (oparg), "i" (-EFAULT), "1" (0)) |
|---|
| 50 | + : "r" (oparg), "i" (-EFAULT), "1" (0)); \ |
|---|
| 51 | + if (ret) \ |
|---|
| 52 | + goto label; \ |
|---|
| 53 | + *oval = oldval; \ |
|---|
| 54 | +} while(0) |
|---|
| 44 | 55 | |
|---|
| 45 | | -static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval, |
|---|
| 56 | +static __always_inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval, |
|---|
| 46 | 57 | u32 __user *uaddr) |
|---|
| 47 | 58 | { |
|---|
| 48 | | - int oldval = 0, ret, tem; |
|---|
| 49 | | - |
|---|
| 50 | | - pagefault_disable(); |
|---|
| 59 | + if (!user_access_begin(uaddr, sizeof(u32))) |
|---|
| 60 | + return -EFAULT; |
|---|
| 51 | 61 | |
|---|
| 52 | 62 | switch (op) { |
|---|
| 53 | 63 | case FUTEX_OP_SET: |
|---|
| 54 | | - __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg); |
|---|
| 64 | + unsafe_atomic_op1("xchgl %0, %2", oval, uaddr, oparg, Efault); |
|---|
| 55 | 65 | break; |
|---|
| 56 | 66 | case FUTEX_OP_ADD: |
|---|
| 57 | | - __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval, |
|---|
| 58 | | - uaddr, oparg); |
|---|
| 67 | + unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval, |
|---|
| 68 | + uaddr, oparg, Efault); |
|---|
| 59 | 69 | break; |
|---|
| 60 | 70 | case FUTEX_OP_OR: |
|---|
| 61 | | - __futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg); |
|---|
| 71 | + unsafe_atomic_op2("orl %4, %3", oval, uaddr, oparg, Efault); |
|---|
| 62 | 72 | break; |
|---|
| 63 | 73 | case FUTEX_OP_ANDN: |
|---|
| 64 | | - __futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg); |
|---|
| 74 | + unsafe_atomic_op2("andl %4, %3", oval, uaddr, ~oparg, Efault); |
|---|
| 65 | 75 | break; |
|---|
| 66 | 76 | case FUTEX_OP_XOR: |
|---|
| 67 | | - __futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg); |
|---|
| 77 | + unsafe_atomic_op2("xorl %4, %3", oval, uaddr, oparg, Efault); |
|---|
| 68 | 78 | break; |
|---|
| 69 | 79 | default: |
|---|
| 70 | | - ret = -ENOSYS; |
|---|
| 80 | + user_access_end(); |
|---|
| 81 | + return -ENOSYS; |
|---|
| 71 | 82 | } |
|---|
| 72 | | - |
|---|
| 73 | | - pagefault_enable(); |
|---|
| 74 | | - |
|---|
| 75 | | - if (!ret) |
|---|
| 76 | | - *oval = oldval; |
|---|
| 77 | | - |
|---|
| 78 | | - return ret; |
|---|
| 83 | + user_access_end(); |
|---|
| 84 | + return 0; |
|---|
| 85 | +Efault: |
|---|
| 86 | + user_access_end(); |
|---|
| 87 | + return -EFAULT; |
|---|
| 79 | 88 | } |
|---|
| 80 | 89 | |
|---|
| 81 | 90 | static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, |
|---|
| 82 | 91 | u32 oldval, u32 newval) |
|---|
| 83 | 92 | { |
|---|
| 84 | | - return user_atomic_cmpxchg_inatomic(uval, uaddr, oldval, newval); |
|---|
| 93 | + int ret = 0; |
|---|
| 94 | + |
|---|
| 95 | + if (!user_access_begin(uaddr, sizeof(u32))) |
|---|
| 96 | + return -EFAULT; |
|---|
| 97 | + asm volatile("\n" |
|---|
| 98 | + "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n" |
|---|
| 99 | + "2:\n" |
|---|
| 100 | + "\t.section .fixup, \"ax\"\n" |
|---|
| 101 | + "3:\tmov %3, %0\n" |
|---|
| 102 | + "\tjmp 2b\n" |
|---|
| 103 | + "\t.previous\n" |
|---|
| 104 | + _ASM_EXTABLE_UA(1b, 3b) |
|---|
| 105 | + : "+r" (ret), "=a" (oldval), "+m" (*uaddr) |
|---|
| 106 | + : "i" (-EFAULT), "r" (newval), "1" (oldval) |
|---|
| 107 | + : "memory" |
|---|
| 108 | + ); |
|---|
| 109 | + user_access_end(); |
|---|
| 110 | + *uval = oldval; |
|---|
| 111 | + return ret; |
|---|
| 85 | 112 | } |
|---|
| 86 | 113 | |
|---|
| 87 | 114 | #endif |
|---|