.. | .. |
---|
7 | 7 | |
---|
8 | 8 | /* Written 2002 by Andi Kleen */ |
---|
9 | 9 | |
---|
10 | | -/* Only used for special circumstances. Stolen from i386/string.h */ |
---|
11 | | -static __always_inline void *__inline_memcpy(void *to, const void *from, size_t n) |
---|
12 | | -{ |
---|
13 | | - unsigned long d0, d1, d2; |
---|
14 | | - asm volatile("rep ; movsl\n\t" |
---|
15 | | - "testb $2,%b4\n\t" |
---|
16 | | - "je 1f\n\t" |
---|
17 | | - "movsw\n" |
---|
18 | | - "1:\ttestb $1,%b4\n\t" |
---|
19 | | - "je 2f\n\t" |
---|
20 | | - "movsb\n" |
---|
21 | | - "2:" |
---|
22 | | - : "=&c" (d0), "=&D" (d1), "=&S" (d2) |
---|
23 | | - : "0" (n / 4), "q" (n), "1" ((long)to), "2" ((long)from) |
---|
24 | | - : "memory"); |
---|
25 | | - return to; |
---|
26 | | -} |
---|
27 | | - |
---|
28 | 10 | /* Even with __builtin_ the compiler may decide to use the out of line |
---|
29 | 11 | function. */ |
---|
30 | 12 | |
---|
.. | .. |
---|
100 | 82 | |
---|
101 | 83 | #endif |
---|
102 | 84 | |
---|
103 | | -#define __HAVE_ARCH_MEMCPY_MCSAFE 1 |
---|
104 | | -__must_check unsigned long __memcpy_mcsafe(void *dst, const void *src, |
---|
105 | | - size_t cnt); |
---|
106 | | -DECLARE_STATIC_KEY_FALSE(mcsafe_key); |
---|
107 | | - |
---|
108 | | -/** |
---|
109 | | - * memcpy_mcsafe - copy memory with indication if a machine check happened |
---|
110 | | - * |
---|
111 | | - * @dst: destination address |
---|
112 | | - * @src: source address |
---|
113 | | - * @cnt: number of bytes to copy |
---|
114 | | - * |
---|
115 | | - * Low level memory copy function that catches machine checks |
---|
116 | | - * We only call into the "safe" function on systems that can |
---|
117 | | - * actually do machine check recovery. Everyone else can just |
---|
118 | | - * use memcpy(). |
---|
119 | | - * |
---|
120 | | - * Return 0 for success, or number of bytes not copied if there was an |
---|
121 | | - * exception. |
---|
122 | | - */ |
---|
123 | | -static __always_inline __must_check unsigned long |
---|
124 | | -memcpy_mcsafe(void *dst, const void *src, size_t cnt) |
---|
125 | | -{ |
---|
126 | | -#ifdef CONFIG_X86_MCE |
---|
127 | | - if (static_branch_unlikely(&mcsafe_key)) |
---|
128 | | - return __memcpy_mcsafe(dst, src, cnt); |
---|
129 | | - else |
---|
130 | | -#endif |
---|
131 | | - memcpy(dst, src, cnt); |
---|
132 | | - return 0; |
---|
133 | | -} |
---|
134 | | - |
---|
135 | 85 | #ifdef CONFIG_ARCH_HAS_UACCESS_FLUSHCACHE |
---|
136 | 86 | #define __HAVE_ARCH_MEMCPY_FLUSHCACHE 1 |
---|
137 | | -void memcpy_flushcache(void *dst, const void *src, size_t cnt); |
---|
| 87 | +void __memcpy_flushcache(void *dst, const void *src, size_t cnt); |
---|
| 88 | +static __always_inline void memcpy_flushcache(void *dst, const void *src, size_t cnt) |
---|
| 89 | +{ |
---|
| 90 | + if (__builtin_constant_p(cnt)) { |
---|
| 91 | + switch (cnt) { |
---|
| 92 | + case 4: |
---|
| 93 | + asm ("movntil %1, %0" : "=m"(*(u32 *)dst) : "r"(*(u32 *)src)); |
---|
| 94 | + return; |
---|
| 95 | + case 8: |
---|
| 96 | + asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src)); |
---|
| 97 | + return; |
---|
| 98 | + case 16: |
---|
| 99 | + asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src)); |
---|
| 100 | + asm ("movntiq %1, %0" : "=m"(*(u64 *)(dst + 8)) : "r"(*(u64 *)(src + 8))); |
---|
| 101 | + return; |
---|
| 102 | + } |
---|
| 103 | + } |
---|
| 104 | + __memcpy_flushcache(dst, src, cnt); |
---|
| 105 | +} |
---|
138 | 106 | #endif |
---|
139 | 107 | |
---|
140 | 108 | #endif /* __KERNEL__ */ |
---|