.. | .. |
---|
21 | 21 | #define rmb() asm volatile("lock; addl $0,0(%%esp)" ::: "memory") |
---|
22 | 22 | #define wmb() asm volatile("lock; addl $0,0(%%esp)" ::: "memory") |
---|
23 | 23 | #elif defined(__x86_64__) |
---|
24 | | -#define mb() asm volatile("mfence":::"memory") |
---|
25 | | -#define rmb() asm volatile("lfence":::"memory") |
---|
| 24 | +#define mb() asm volatile("mfence" ::: "memory") |
---|
| 25 | +#define rmb() asm volatile("lfence" ::: "memory") |
---|
26 | 26 | #define wmb() asm volatile("sfence" ::: "memory") |
---|
| 27 | +#define smp_rmb() barrier() |
---|
| 28 | +#define smp_wmb() barrier() |
---|
| 29 | +#define smp_mb() asm volatile("lock; addl $0,-132(%%rsp)" ::: "memory", "cc") |
---|
27 | 30 | #endif |
---|
28 | 31 | |
---|
| 32 | +#if defined(__x86_64__) |
---|
| 33 | +#define smp_store_release(p, v) \ |
---|
| 34 | +do { \ |
---|
| 35 | + barrier(); \ |
---|
| 36 | + WRITE_ONCE(*p, v); \ |
---|
| 37 | +} while (0) |
---|
| 38 | + |
---|
| 39 | +#define smp_load_acquire(p) \ |
---|
| 40 | +({ \ |
---|
| 41 | + typeof(*p) ___p1 = READ_ONCE(*p); \ |
---|
| 42 | + barrier(); \ |
---|
| 43 | + ___p1; \ |
---|
| 44 | +}) |
---|
| 45 | +#endif /* defined(__x86_64__) */ |
---|
29 | 46 | #endif /* _TOOLS_LINUX_ASM_X86_BARRIER_H */ |
---|