hc
2024-02-19 1c055e55a242a33e574e48be530e06770a210dcd
kernel/include/asm-generic/barrier.h
....@@ -1,3 +1,4 @@
1
+/* SPDX-License-Identifier: GPL-2.0-or-later */
12 /*
23 * Generic barrier definitions.
34 *
....@@ -6,11 +7,6 @@
67 *
78 * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
89 * Written by David Howells (dhowells@redhat.com)
9
- *
10
- * This program is free software; you can redistribute it and/or
11
- * modify it under the terms of the GNU General Public Licence
12
- * as published by the Free Software Foundation; either version
13
- * 2 of the Licence, or (at your option) any later version.
1410 */
1511 #ifndef __ASM_GENERIC_BARRIER_H
1612 #define __ASM_GENERIC_BARRIER_H
....@@ -18,6 +14,7 @@
1814 #ifndef __ASSEMBLY__
1915
2016 #include <linux/compiler.h>
17
+#include <asm/rwonce.h>
2118
2219 #ifndef nop
2320 #define nop() asm volatile ("nop")
....@@ -50,10 +47,6 @@
5047 #define dma_wmb() wmb()
5148 #endif
5249
53
-#ifndef read_barrier_depends
54
-#define read_barrier_depends() do { } while (0)
55
-#endif
56
-
5750 #ifndef __smp_mb
5851 #define __smp_mb() mb()
5952 #endif
....@@ -64,10 +57,6 @@
6457
6558 #ifndef __smp_wmb
6659 #define __smp_wmb() wmb()
67
-#endif
68
-
69
-#ifndef __smp_read_barrier_depends
70
-#define __smp_read_barrier_depends() read_barrier_depends()
7160 #endif
7261
7362 #ifdef CONFIG_SMP
....@@ -84,10 +73,6 @@
8473 #define smp_wmb() __smp_wmb()
8574 #endif
8675
87
-#ifndef smp_read_barrier_depends
88
-#define smp_read_barrier_depends() __smp_read_barrier_depends()
89
-#endif
90
-
9176 #else /* !CONFIG_SMP */
9277
9378 #ifndef smp_mb
....@@ -100,10 +85,6 @@
10085
10186 #ifndef smp_wmb
10287 #define smp_wmb() barrier()
103
-#endif
104
-
105
-#ifndef smp_read_barrier_depends
106
-#define smp_read_barrier_depends() do { } while (0)
10788 #endif
10889
10990 #endif /* CONFIG_SMP */
....@@ -132,10 +113,10 @@
132113 #ifndef __smp_load_acquire
133114 #define __smp_load_acquire(p) \
134115 ({ \
135
- typeof(*p) ___p1 = READ_ONCE(*p); \
116
+ __unqual_scalar_typeof(*p) ___p1 = READ_ONCE(*p); \
136117 compiletime_assert_atomic_type(*p); \
137118 __smp_mb(); \
138
- ___p1; \
119
+ (typeof(*p))___p1; \
139120 })
140121 #endif
141122
....@@ -187,10 +168,10 @@
187168 #ifndef smp_load_acquire
188169 #define smp_load_acquire(p) \
189170 ({ \
190
- typeof(*p) ___p1 = READ_ONCE(*p); \
171
+ __unqual_scalar_typeof(*p) ___p1 = READ_ONCE(*p); \
191172 compiletime_assert_atomic_type(*p); \
192173 barrier(); \
193
- ___p1; \
174
+ (typeof(*p))___p1; \
194175 })
195176 #endif
196177
....@@ -200,7 +181,6 @@
200181 #define virt_mb() __smp_mb()
201182 #define virt_rmb() __smp_rmb()
202183 #define virt_wmb() __smp_wmb()
203
-#define virt_read_barrier_depends() __smp_read_barrier_depends()
204184 #define virt_store_mb(var, value) __smp_store_mb(var, value)
205185 #define virt_mb__before_atomic() __smp_mb__before_atomic()
206186 #define virt_mb__after_atomic() __smp_mb__after_atomic()
....@@ -233,14 +213,14 @@
233213 #ifndef smp_cond_load_relaxed
234214 #define smp_cond_load_relaxed(ptr, cond_expr) ({ \
235215 typeof(ptr) __PTR = (ptr); \
236
- typeof(*ptr) VAL; \
216
+ __unqual_scalar_typeof(*ptr) VAL; \
237217 for (;;) { \
238218 VAL = READ_ONCE(*__PTR); \
239219 if (cond_expr) \
240220 break; \
241221 cpu_relax(); \
242222 } \
243
- VAL; \
223
+ (typeof(*ptr))VAL; \
244224 })
245225 #endif
246226
....@@ -254,12 +234,22 @@
254234 */
255235 #ifndef smp_cond_load_acquire
256236 #define smp_cond_load_acquire(ptr, cond_expr) ({ \
257
- typeof(*ptr) _val; \
237
+ __unqual_scalar_typeof(*ptr) _val; \
258238 _val = smp_cond_load_relaxed(ptr, cond_expr); \
259239 smp_acquire__after_ctrl_dep(); \
260
- _val; \
240
+ (typeof(*ptr))_val; \
261241 })
262242 #endif
263243
244
+/*
245
+ * pmem_wmb() ensures that all stores for which the modification
246
+ * are written to persistent storage by preceding instructions have
247
+ * updated persistent storage before any data access or data transfer
248
+ * caused by subsequent instructions is initiated.
249
+ */
250
+#ifndef pmem_wmb
251
+#define pmem_wmb() wmb()
252
+#endif
253
+
264254 #endif /* !__ASSEMBLY__ */
265255 #endif /* __ASM_GENERIC_BARRIER_H */