.. | .. |
---|
3 | 3 | #define _LINUX_LIST_BL_H |
---|
4 | 4 | |
---|
5 | 5 | #include <linux/list.h> |
---|
6 | | -#include <linux/spinlock.h> |
---|
7 | 6 | #include <linux/bit_spinlock.h> |
---|
8 | 7 | |
---|
9 | 8 | /* |
---|
.. | .. |
---|
34 | 33 | |
---|
35 | 34 | struct hlist_bl_head { |
---|
36 | 35 | struct hlist_bl_node *first; |
---|
37 | | -#ifdef CONFIG_PREEMPT_RT_BASE |
---|
38 | | - raw_spinlock_t lock; |
---|
39 | | -#endif |
---|
40 | 36 | }; |
---|
41 | 37 | |
---|
42 | 38 | struct hlist_bl_node { |
---|
43 | 39 | struct hlist_bl_node *next, **pprev; |
---|
44 | 40 | }; |
---|
45 | | - |
---|
46 | | -#ifdef CONFIG_PREEMPT_RT_BASE |
---|
47 | | -#define INIT_HLIST_BL_HEAD(h) \ |
---|
48 | | -do { \ |
---|
49 | | - (h)->first = NULL; \ |
---|
50 | | - raw_spin_lock_init(&(h)->lock); \ |
---|
51 | | -} while (0) |
---|
52 | | -#else |
---|
53 | | -#define INIT_HLIST_BL_HEAD(h) (h)->first = NULL |
---|
54 | | -#endif |
---|
| 41 | +#define INIT_HLIST_BL_HEAD(ptr) \ |
---|
| 42 | + ((ptr)->first = NULL) |
---|
55 | 43 | |
---|
56 | 44 | static inline void INIT_HLIST_BL_NODE(struct hlist_bl_node *h) |
---|
57 | 45 | { |
---|
.. | .. |
---|
98 | 86 | hlist_bl_set_first(h, n); |
---|
99 | 87 | } |
---|
100 | 88 | |
---|
| 89 | +static inline void hlist_bl_add_before(struct hlist_bl_node *n, |
---|
| 90 | + struct hlist_bl_node *next) |
---|
| 91 | +{ |
---|
| 92 | + struct hlist_bl_node **pprev = next->pprev; |
---|
| 93 | + |
---|
| 94 | + n->pprev = pprev; |
---|
| 95 | + n->next = next; |
---|
| 96 | + next->pprev = &n->next; |
---|
| 97 | + |
---|
| 98 | + /* pprev may be `first`, so be careful not to lose the lock bit */ |
---|
| 99 | + WRITE_ONCE(*pprev, |
---|
| 100 | + (struct hlist_bl_node *) |
---|
| 101 | + ((uintptr_t)n | ((uintptr_t)*pprev & LIST_BL_LOCKMASK))); |
---|
| 102 | +} |
---|
| 103 | + |
---|
| 104 | +static inline void hlist_bl_add_behind(struct hlist_bl_node *n, |
---|
| 105 | + struct hlist_bl_node *prev) |
---|
| 106 | +{ |
---|
| 107 | + n->next = prev->next; |
---|
| 108 | + n->pprev = &prev->next; |
---|
| 109 | + prev->next = n; |
---|
| 110 | + |
---|
| 111 | + if (n->next) |
---|
| 112 | + n->next->pprev = &n->next; |
---|
| 113 | +} |
---|
| 114 | + |
---|
101 | 115 | static inline void __hlist_bl_del(struct hlist_bl_node *n) |
---|
102 | 116 | { |
---|
103 | 117 | struct hlist_bl_node *next = n->next; |
---|
.. | .. |
---|
131 | 145 | |
---|
132 | 146 | static inline void hlist_bl_lock(struct hlist_bl_head *b) |
---|
133 | 147 | { |
---|
134 | | -#ifndef CONFIG_PREEMPT_RT_BASE |
---|
135 | 148 | bit_spin_lock(0, (unsigned long *)b); |
---|
136 | | -#else |
---|
137 | | - raw_spin_lock(&b->lock); |
---|
138 | | -#if defined(CONFIG_SMP) || defined(CONFIG_DEBUG_SPINLOCK) |
---|
139 | | - __set_bit(0, (unsigned long *)b); |
---|
140 | | -#endif |
---|
141 | | -#endif |
---|
142 | 149 | } |
---|
143 | 150 | |
---|
144 | 151 | static inline void hlist_bl_unlock(struct hlist_bl_head *b) |
---|
145 | 152 | { |
---|
146 | | -#ifndef CONFIG_PREEMPT_RT_BASE |
---|
147 | 153 | __bit_spin_unlock(0, (unsigned long *)b); |
---|
148 | | -#else |
---|
149 | | -#if defined(CONFIG_SMP) || defined(CONFIG_DEBUG_SPINLOCK) |
---|
150 | | - __clear_bit(0, (unsigned long *)b); |
---|
151 | | -#endif |
---|
152 | | - raw_spin_unlock(&b->lock); |
---|
153 | | -#endif |
---|
154 | 154 | } |
---|
155 | 155 | |
---|
156 | 156 | static inline bool hlist_bl_is_locked(struct hlist_bl_head *b) |
---|