.. | .. |
---|
| 1 | +/* SPDX-License-Identifier: GPL-2.0-or-later */ |
---|
1 | 2 | /* |
---|
2 | 3 | * Copyright (C) 2001 Momchil Velikov |
---|
3 | 4 | * Portions Copyright (C) 2001 Christoph Hellwig |
---|
4 | 5 | * Copyright (C) 2006 Nick Piggin |
---|
5 | 6 | * Copyright (C) 2012 Konstantin Khlebnikov |
---|
6 | | - * |
---|
7 | | - * This program is free software; you can redistribute it and/or |
---|
8 | | - * modify it under the terms of the GNU General Public License as |
---|
9 | | - * published by the Free Software Foundation; either version 2, or (at |
---|
10 | | - * your option) any later version. |
---|
11 | | - * |
---|
12 | | - * This program is distributed in the hope that it will be useful, but |
---|
13 | | - * WITHOUT ANY WARRANTY; without even the implied warranty of |
---|
14 | | - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
---|
15 | | - * General Public License for more details. |
---|
16 | | - * |
---|
17 | | - * You should have received a copy of the GNU General Public License |
---|
18 | | - * along with this program; if not, write to the Free Software |
---|
19 | | - * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. |
---|
20 | 7 | */ |
---|
21 | 8 | #ifndef _LINUX_RADIX_TREE_H |
---|
22 | 9 | #define _LINUX_RADIX_TREE_H |
---|
.. | .. |
---|
24 | 11 | #include <linux/bitops.h> |
---|
25 | 12 | #include <linux/kernel.h> |
---|
26 | 13 | #include <linux/list.h> |
---|
| 14 | +#include <linux/percpu.h> |
---|
27 | 15 | #include <linux/preempt.h> |
---|
28 | 16 | #include <linux/rcupdate.h> |
---|
29 | 17 | #include <linux/spinlock.h> |
---|
30 | 18 | #include <linux/types.h> |
---|
| 19 | +#include <linux/xarray.h> |
---|
| 20 | +#include <linux/local_lock.h> |
---|
| 21 | + |
---|
| 22 | +/* Keep unconverted code working */ |
---|
| 23 | +#define radix_tree_root xarray |
---|
| 24 | +#define radix_tree_node xa_node |
---|
| 25 | + |
---|
| 26 | +struct radix_tree_preload { |
---|
| 27 | + local_lock_t lock; |
---|
| 28 | + unsigned nr; |
---|
| 29 | + /* nodes->parent points to next preallocated node */ |
---|
| 30 | + struct radix_tree_node *nodes; |
---|
| 31 | +}; |
---|
| 32 | +DECLARE_PER_CPU(struct radix_tree_preload, radix_tree_preloads); |
---|
31 | 33 | |
---|
32 | 34 | /* |
---|
33 | 35 | * The bottom two bits of the slot determine how the remaining bits in the |
---|
34 | 36 | * slot are interpreted: |
---|
35 | 37 | * |
---|
36 | 38 | * 00 - data pointer |
---|
37 | | - * 01 - internal entry |
---|
38 | | - * 10 - exceptional entry |
---|
39 | | - * 11 - this bit combination is currently unused/reserved |
---|
| 39 | + * 10 - internal entry |
---|
| 40 | + * x1 - value entry |
---|
40 | 41 | * |
---|
41 | 42 | * The internal entry may be a pointer to the next level in the tree, a |
---|
42 | 43 | * sibling entry, or an indicator that the entry in this slot has been moved |
---|
43 | 44 | * to another location in the tree and the lookup should be restarted. While |
---|
44 | 45 | * NULL fits the 'data pointer' pattern, it means that there is no entry in |
---|
45 | 46 | * the tree for this index (no matter what level of the tree it is found at). |
---|
46 | | - * This means that you cannot store NULL in the tree as a value for the index. |
---|
| 47 | + * This means that storing a NULL entry in the tree is the same as deleting |
---|
| 48 | + * the entry from the tree. |
---|
47 | 49 | */ |
---|
48 | 50 | #define RADIX_TREE_ENTRY_MASK 3UL |
---|
49 | | -#define RADIX_TREE_INTERNAL_NODE 1UL |
---|
50 | | - |
---|
51 | | -/* |
---|
52 | | - * Most users of the radix tree store pointers but shmem/tmpfs stores swap |
---|
53 | | - * entries in the same tree. They are marked as exceptional entries to |
---|
54 | | - * distinguish them from pointers to struct page. |
---|
55 | | - * EXCEPTIONAL_ENTRY tests the bit, EXCEPTIONAL_SHIFT shifts content past it. |
---|
56 | | - */ |
---|
57 | | -#define RADIX_TREE_EXCEPTIONAL_ENTRY 2 |
---|
58 | | -#define RADIX_TREE_EXCEPTIONAL_SHIFT 2 |
---|
| 51 | +#define RADIX_TREE_INTERNAL_NODE 2UL |
---|
59 | 52 | |
---|
60 | 53 | static inline bool radix_tree_is_internal_node(void *ptr) |
---|
61 | 54 | { |
---|
.. | .. |
---|
65 | 58 | |
---|
66 | 59 | /*** radix-tree API starts here ***/ |
---|
67 | 60 | |
---|
68 | | -#define RADIX_TREE_MAX_TAGS 3 |
---|
69 | | - |
---|
70 | | -#ifndef RADIX_TREE_MAP_SHIFT |
---|
71 | | -#define RADIX_TREE_MAP_SHIFT (CONFIG_BASE_SMALL ? 4 : 6) |
---|
72 | | -#endif |
---|
73 | | - |
---|
| 61 | +#define RADIX_TREE_MAP_SHIFT XA_CHUNK_SHIFT |
---|
74 | 62 | #define RADIX_TREE_MAP_SIZE (1UL << RADIX_TREE_MAP_SHIFT) |
---|
75 | 63 | #define RADIX_TREE_MAP_MASK (RADIX_TREE_MAP_SIZE-1) |
---|
76 | 64 | |
---|
77 | | -#define RADIX_TREE_TAG_LONGS \ |
---|
78 | | - ((RADIX_TREE_MAP_SIZE + BITS_PER_LONG - 1) / BITS_PER_LONG) |
---|
| 65 | +#define RADIX_TREE_MAX_TAGS XA_MAX_MARKS |
---|
| 66 | +#define RADIX_TREE_TAG_LONGS XA_MARK_LONGS |
---|
79 | 67 | |
---|
80 | 68 | #define RADIX_TREE_INDEX_BITS (8 /* CHAR_BIT */ * sizeof(unsigned long)) |
---|
81 | 69 | #define RADIX_TREE_MAX_PATH (DIV_ROUND_UP(RADIX_TREE_INDEX_BITS, \ |
---|
82 | 70 | RADIX_TREE_MAP_SHIFT)) |
---|
83 | 71 | |
---|
84 | | -/* |
---|
85 | | - * @count is the count of every non-NULL element in the ->slots array |
---|
86 | | - * whether that is an exceptional entry, a retry entry, a user pointer, |
---|
87 | | - * a sibling entry or a pointer to the next level of the tree. |
---|
88 | | - * @exceptional is the count of every element in ->slots which is |
---|
89 | | - * either radix_tree_exceptional_entry() or is a sibling entry for an |
---|
90 | | - * exceptional entry. |
---|
91 | | - */ |
---|
92 | | -struct radix_tree_node { |
---|
93 | | - unsigned char shift; /* Bits remaining in each slot */ |
---|
94 | | - unsigned char offset; /* Slot offset in parent */ |
---|
95 | | - unsigned char count; /* Total entry count */ |
---|
96 | | - unsigned char exceptional; /* Exceptional entry count */ |
---|
97 | | - struct radix_tree_node *parent; /* Used when ascending tree */ |
---|
98 | | - struct radix_tree_root *root; /* The tree we belong to */ |
---|
99 | | - union { |
---|
100 | | - struct list_head private_list; /* For tree user */ |
---|
101 | | - struct rcu_head rcu_head; /* Used when freeing node */ |
---|
102 | | - }; |
---|
103 | | - void __rcu *slots[RADIX_TREE_MAP_SIZE]; |
---|
104 | | - unsigned long tags[RADIX_TREE_MAX_TAGS][RADIX_TREE_TAG_LONGS]; |
---|
105 | | -}; |
---|
106 | | - |
---|
107 | | -/* The IDR tag is stored in the low bits of the GFP flags */ |
---|
| 72 | +/* The IDR tag is stored in the low bits of xa_flags */ |
---|
108 | 73 | #define ROOT_IS_IDR ((__force gfp_t)4) |
---|
109 | | -/* The top bits of gfp_mask are used to store the root tags */ |
---|
| 74 | +/* The top bits of xa_flags are used to store the root tags */ |
---|
110 | 75 | #define ROOT_TAG_SHIFT (__GFP_BITS_SHIFT) |
---|
111 | 76 | |
---|
112 | | -struct radix_tree_root { |
---|
113 | | - spinlock_t xa_lock; |
---|
114 | | - gfp_t gfp_mask; |
---|
115 | | - struct radix_tree_node __rcu *rnode; |
---|
116 | | -}; |
---|
117 | | - |
---|
118 | | -#define RADIX_TREE_INIT(name, mask) { \ |
---|
119 | | - .xa_lock = __SPIN_LOCK_UNLOCKED(name.xa_lock), \ |
---|
120 | | - .gfp_mask = (mask), \ |
---|
121 | | - .rnode = NULL, \ |
---|
122 | | -} |
---|
| 77 | +#define RADIX_TREE_INIT(name, mask) XARRAY_INIT(name, mask) |
---|
123 | 78 | |
---|
124 | 79 | #define RADIX_TREE(name, mask) \ |
---|
125 | 80 | struct radix_tree_root name = RADIX_TREE_INIT(name, mask) |
---|
126 | 81 | |
---|
127 | | -#define INIT_RADIX_TREE(root, mask) \ |
---|
128 | | -do { \ |
---|
129 | | - spin_lock_init(&(root)->xa_lock); \ |
---|
130 | | - (root)->gfp_mask = (mask); \ |
---|
131 | | - (root)->rnode = NULL; \ |
---|
132 | | -} while (0) |
---|
| 82 | +#define INIT_RADIX_TREE(root, mask) xa_init_flags(root, mask) |
---|
133 | 83 | |
---|
134 | 84 | static inline bool radix_tree_empty(const struct radix_tree_root *root) |
---|
135 | 85 | { |
---|
136 | | - return root->rnode == NULL; |
---|
| 86 | + return root->xa_head == NULL; |
---|
137 | 87 | } |
---|
138 | 88 | |
---|
139 | 89 | /** |
---|
.. | .. |
---|
143 | 93 | * @next_index: one beyond the last index for this chunk |
---|
144 | 94 | * @tags: bit-mask for tag-iterating |
---|
145 | 95 | * @node: node that contains current slot |
---|
146 | | - * @shift: shift for the node that holds our slots |
---|
147 | 96 | * |
---|
148 | 97 | * This radix tree iterator works in terms of "chunks" of slots. A chunk is a |
---|
149 | 98 | * subinterval of slots contained within one radix tree leaf node. It is |
---|
.. | .. |
---|
157 | 106 | unsigned long next_index; |
---|
158 | 107 | unsigned long tags; |
---|
159 | 108 | struct radix_tree_node *node; |
---|
160 | | -#ifdef CONFIG_RADIX_TREE_MULTIORDER |
---|
161 | | - unsigned int shift; |
---|
162 | | -#endif |
---|
163 | 109 | }; |
---|
164 | | - |
---|
165 | | -static inline unsigned int iter_shift(const struct radix_tree_iter *iter) |
---|
166 | | -{ |
---|
167 | | -#ifdef CONFIG_RADIX_TREE_MULTIORDER |
---|
168 | | - return iter->shift; |
---|
169 | | -#else |
---|
170 | | - return 0; |
---|
171 | | -#endif |
---|
172 | | -} |
---|
173 | 110 | |
---|
174 | 111 | /** |
---|
175 | 112 | * Radix-tree synchronization |
---|
.. | .. |
---|
194 | 131 | * radix_tree_lookup_slot |
---|
195 | 132 | * radix_tree_tag_get |
---|
196 | 133 | * radix_tree_gang_lookup |
---|
197 | | - * radix_tree_gang_lookup_slot |
---|
198 | 134 | * radix_tree_gang_lookup_tag |
---|
199 | 135 | * radix_tree_gang_lookup_tag_slot |
---|
200 | 136 | * radix_tree_tagged |
---|
201 | 137 | * |
---|
202 | | - * The first 8 functions are able to be called locklessly, using RCU. The |
---|
| 138 | + * The first 7 functions are able to be called locklessly, using RCU. The |
---|
203 | 139 | * caller must ensure calls to these functions are made within rcu_read_lock() |
---|
204 | 140 | * regions. Other readers (lock-free or otherwise) and modifications may be |
---|
205 | 141 | * running concurrently. |
---|
.. | .. |
---|
269 | 205 | } |
---|
270 | 206 | |
---|
271 | 207 | /** |
---|
272 | | - * radix_tree_exceptional_entry - radix_tree_deref_slot gave exceptional entry? |
---|
273 | | - * @arg: value returned by radix_tree_deref_slot |
---|
274 | | - * Returns: 0 if well-aligned pointer, non-0 if exceptional entry. |
---|
275 | | - */ |
---|
276 | | -static inline int radix_tree_exceptional_entry(void *arg) |
---|
277 | | -{ |
---|
278 | | - /* Not unlikely because radix_tree_exception often tested first */ |
---|
279 | | - return (unsigned long)arg & RADIX_TREE_EXCEPTIONAL_ENTRY; |
---|
280 | | -} |
---|
281 | | - |
---|
282 | | -/** |
---|
283 | 208 | * radix_tree_exception - radix_tree_deref_slot returned either exception? |
---|
284 | 209 | * @arg: value returned by radix_tree_deref_slot |
---|
285 | 210 | * Returns: 0 if well-aligned pointer, non-0 if either kind of exception. |
---|
.. | .. |
---|
289 | 214 | return unlikely((unsigned long)arg & RADIX_TREE_ENTRY_MASK); |
---|
290 | 215 | } |
---|
291 | 216 | |
---|
292 | | -int __radix_tree_create(struct radix_tree_root *, unsigned long index, |
---|
293 | | - unsigned order, struct radix_tree_node **nodep, |
---|
294 | | - void __rcu ***slotp); |
---|
295 | | -int __radix_tree_insert(struct radix_tree_root *, unsigned long index, |
---|
296 | | - unsigned order, void *); |
---|
297 | | -static inline int radix_tree_insert(struct radix_tree_root *root, |
---|
298 | | - unsigned long index, void *entry) |
---|
299 | | -{ |
---|
300 | | - return __radix_tree_insert(root, index, 0, entry); |
---|
301 | | -} |
---|
| 217 | +int radix_tree_insert(struct radix_tree_root *, unsigned long index, |
---|
| 218 | + void *); |
---|
302 | 219 | void *__radix_tree_lookup(const struct radix_tree_root *, unsigned long index, |
---|
303 | 220 | struct radix_tree_node **nodep, void __rcu ***slotp); |
---|
304 | 221 | void *radix_tree_lookup(const struct radix_tree_root *, unsigned long); |
---|
305 | 222 | void __rcu **radix_tree_lookup_slot(const struct radix_tree_root *, |
---|
306 | 223 | unsigned long index); |
---|
307 | | -typedef void (*radix_tree_update_node_t)(struct radix_tree_node *); |
---|
308 | 224 | void __radix_tree_replace(struct radix_tree_root *, struct radix_tree_node *, |
---|
309 | | - void __rcu **slot, void *entry, |
---|
310 | | - radix_tree_update_node_t update_node); |
---|
| 225 | + void __rcu **slot, void *entry); |
---|
311 | 226 | void radix_tree_iter_replace(struct radix_tree_root *, |
---|
312 | 227 | const struct radix_tree_iter *, void __rcu **slot, void *entry); |
---|
313 | 228 | void radix_tree_replace_slot(struct radix_tree_root *, |
---|
314 | 229 | void __rcu **slot, void *entry); |
---|
315 | | -void __radix_tree_delete_node(struct radix_tree_root *, |
---|
316 | | - struct radix_tree_node *, |
---|
317 | | - radix_tree_update_node_t update_node); |
---|
318 | 230 | void radix_tree_iter_delete(struct radix_tree_root *, |
---|
319 | 231 | struct radix_tree_iter *iter, void __rcu **slot); |
---|
320 | 232 | void *radix_tree_delete_item(struct radix_tree_root *, unsigned long, void *); |
---|
321 | 233 | void *radix_tree_delete(struct radix_tree_root *, unsigned long); |
---|
322 | | -void radix_tree_clear_tags(struct radix_tree_root *, struct radix_tree_node *, |
---|
323 | | - void __rcu **slot); |
---|
324 | 234 | unsigned int radix_tree_gang_lookup(const struct radix_tree_root *, |
---|
325 | 235 | void **results, unsigned long first_index, |
---|
326 | 236 | unsigned int max_items); |
---|
327 | | -unsigned int radix_tree_gang_lookup_slot(const struct radix_tree_root *, |
---|
328 | | - void __rcu ***results, unsigned long *indices, |
---|
329 | | - unsigned long first_index, unsigned int max_items); |
---|
330 | 237 | int radix_tree_preload(gfp_t gfp_mask); |
---|
331 | 238 | int radix_tree_maybe_preload(gfp_t gfp_mask); |
---|
332 | | -int radix_tree_maybe_preload_order(gfp_t gfp_mask, int order); |
---|
333 | | -void radix_tree_preload_end(void); |
---|
334 | | - |
---|
335 | 239 | void radix_tree_init(void); |
---|
336 | 240 | void *radix_tree_tag_set(struct radix_tree_root *, |
---|
337 | 241 | unsigned long index, unsigned int tag); |
---|
.. | .. |
---|
339 | 243 | unsigned long index, unsigned int tag); |
---|
340 | 244 | int radix_tree_tag_get(const struct radix_tree_root *, |
---|
341 | 245 | unsigned long index, unsigned int tag); |
---|
342 | | -void radix_tree_iter_tag_set(struct radix_tree_root *, |
---|
343 | | - const struct radix_tree_iter *iter, unsigned int tag); |
---|
344 | 246 | void radix_tree_iter_tag_clear(struct radix_tree_root *, |
---|
345 | 247 | const struct radix_tree_iter *iter, unsigned int tag); |
---|
346 | 248 | unsigned int radix_tree_gang_lookup_tag(const struct radix_tree_root *, |
---|
.. | .. |
---|
351 | 253 | unsigned int max_items, unsigned int tag); |
---|
352 | 254 | int radix_tree_tagged(const struct radix_tree_root *, unsigned int tag); |
---|
353 | 255 | |
---|
354 | | -int radix_tree_split_preload(unsigned old_order, unsigned new_order, gfp_t); |
---|
355 | | -int radix_tree_split(struct radix_tree_root *, unsigned long index, |
---|
356 | | - unsigned new_order); |
---|
357 | | -int radix_tree_join(struct radix_tree_root *, unsigned long index, |
---|
358 | | - unsigned new_order, void *); |
---|
| 256 | +static inline void radix_tree_preload_end(void) |
---|
| 257 | +{ |
---|
| 258 | + local_unlock(&radix_tree_preloads.lock); |
---|
| 259 | +} |
---|
359 | 260 | |
---|
360 | 261 | void __rcu **idr_get_free(struct radix_tree_root *root, |
---|
361 | 262 | struct radix_tree_iter *iter, gfp_t gfp, |
---|
.. | .. |
---|
425 | 326 | } |
---|
426 | 327 | |
---|
427 | 328 | /** |
---|
428 | | - * radix_tree_iter_find - find a present entry |
---|
429 | | - * @root: radix tree root |
---|
430 | | - * @iter: iterator state |
---|
431 | | - * @index: start location |
---|
432 | | - * |
---|
433 | | - * This function returns the slot containing the entry with the lowest index |
---|
434 | | - * which is at least @index. If @index is larger than any present entry, this |
---|
435 | | - * function returns NULL. The @iter is updated to describe the entry found. |
---|
436 | | - */ |
---|
437 | | -static inline void __rcu ** |
---|
438 | | -radix_tree_iter_find(const struct radix_tree_root *root, |
---|
439 | | - struct radix_tree_iter *iter, unsigned long index) |
---|
440 | | -{ |
---|
441 | | - radix_tree_iter_init(iter, index); |
---|
442 | | - return radix_tree_next_chunk(root, iter, 0); |
---|
443 | | -} |
---|
444 | | - |
---|
445 | | -/** |
---|
446 | 329 | * radix_tree_iter_retry - retry this chunk of the iteration |
---|
447 | 330 | * @iter: iterator state |
---|
448 | 331 | * |
---|
.. | .. |
---|
462 | 345 | static inline unsigned long |
---|
463 | 346 | __radix_tree_iter_add(struct radix_tree_iter *iter, unsigned long slots) |
---|
464 | 347 | { |
---|
465 | | - return iter->index + (slots << iter_shift(iter)); |
---|
| 348 | + return iter->index + slots; |
---|
466 | 349 | } |
---|
467 | 350 | |
---|
468 | 351 | /** |
---|
.. | .. |
---|
487 | 370 | static __always_inline long |
---|
488 | 371 | radix_tree_chunk_size(struct radix_tree_iter *iter) |
---|
489 | 372 | { |
---|
490 | | - return (iter->next_index - iter->index) >> iter_shift(iter); |
---|
| 373 | + return iter->next_index - iter->index; |
---|
491 | 374 | } |
---|
492 | | - |
---|
493 | | -#ifdef CONFIG_RADIX_TREE_MULTIORDER |
---|
494 | | -void __rcu **__radix_tree_next_slot(void __rcu **slot, |
---|
495 | | - struct radix_tree_iter *iter, unsigned flags); |
---|
496 | | -#else |
---|
497 | | -/* Can't happen without sibling entries, but the compiler can't tell that */ |
---|
498 | | -static inline void __rcu **__radix_tree_next_slot(void __rcu **slot, |
---|
499 | | - struct radix_tree_iter *iter, unsigned flags) |
---|
500 | | -{ |
---|
501 | | - return slot; |
---|
502 | | -} |
---|
503 | | -#endif |
---|
504 | 375 | |
---|
505 | 376 | /** |
---|
506 | 377 | * radix_tree_next_slot - find next slot in chunk |
---|
507 | 378 | * |
---|
508 | 379 | * @slot: pointer to current slot |
---|
509 | | - * @iter: pointer to interator state |
---|
| 380 | + * @iter: pointer to iterator state |
---|
510 | 381 | * @flags: RADIX_TREE_ITER_*, should be constant |
---|
511 | 382 | * Returns: pointer to next slot, or NULL if there no more left |
---|
512 | 383 | * |
---|
.. | .. |
---|
560 | 431 | return NULL; |
---|
561 | 432 | |
---|
562 | 433 | found: |
---|
563 | | - if (unlikely(radix_tree_is_internal_node(rcu_dereference_raw(*slot)))) |
---|
564 | | - return __radix_tree_next_slot(slot, iter, flags); |
---|
565 | 434 | return slot; |
---|
566 | 435 | } |
---|
567 | 436 | |
---|
.. | .. |
---|
579 | 448 | for (slot = radix_tree_iter_init(iter, start) ; \ |
---|
580 | 449 | slot || (slot = radix_tree_next_chunk(root, iter, 0)) ; \ |
---|
581 | 450 | slot = radix_tree_next_slot(slot, iter, 0)) |
---|
582 | | - |
---|
583 | | -/** |
---|
584 | | - * radix_tree_for_each_contig - iterate over contiguous slots |
---|
585 | | - * |
---|
586 | | - * @slot: the void** variable for pointer to slot |
---|
587 | | - * @root: the struct radix_tree_root pointer |
---|
588 | | - * @iter: the struct radix_tree_iter pointer |
---|
589 | | - * @start: iteration starting index |
---|
590 | | - * |
---|
591 | | - * @slot points to radix tree slot, @iter->index contains its index. |
---|
592 | | - */ |
---|
593 | | -#define radix_tree_for_each_contig(slot, root, iter, start) \ |
---|
594 | | - for (slot = radix_tree_iter_init(iter, start) ; \ |
---|
595 | | - slot || (slot = radix_tree_next_chunk(root, iter, \ |
---|
596 | | - RADIX_TREE_ITER_CONTIG)) ; \ |
---|
597 | | - slot = radix_tree_next_slot(slot, iter, \ |
---|
598 | | - RADIX_TREE_ITER_CONTIG)) |
---|
599 | 451 | |
---|
600 | 452 | /** |
---|
601 | 453 | * radix_tree_for_each_tagged - iterate over tagged slots |
---|