hc
2024-10-22 8ac6c7a54ed1b98d142dce24b11c6de6a1e239a5
kernel/lib/radix-tree.c
....@@ -1,3 +1,4 @@
1
+// SPDX-License-Identifier: GPL-2.0-or-later
12 /*
23 * Copyright (C) 2001 Momchil Velikov
34 * Portions Copyright (C) 2001 Christoph Hellwig
....@@ -6,20 +7,6 @@
67 * Copyright (C) 2012 Konstantin Khlebnikov
78 * Copyright (C) 2016 Intel, Matthew Wilcox
89 * Copyright (C) 2016 Intel, Ross Zwisler
9
- *
10
- * This program is free software; you can redistribute it and/or
11
- * modify it under the terms of the GNU General Public License as
12
- * published by the Free Software Foundation; either version 2, or (at
13
- * your option) any later version.
14
- *
15
- * This program is distributed in the hope that it will be useful, but
16
- * WITHOUT ANY WARRANTY; without even the implied warranty of
17
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18
- * General Public License for more details.
19
- *
20
- * You should have received a copy of the GNU General Public License
21
- * along with this program; if not, write to the Free Software
22
- * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
2310 */
2411
2512 #include <linux/bitmap.h>
....@@ -38,15 +25,12 @@
3825 #include <linux/rcupdate.h>
3926 #include <linux/slab.h>
4027 #include <linux/string.h>
41
-#include <linux/locallock.h>
42
-
43
-/* Number of nodes in fully populated tree of given height */
44
-static unsigned long height_to_maxnodes[RADIX_TREE_MAX_PATH + 1] __read_mostly;
28
+#include <linux/xarray.h>
4529
4630 /*
4731 * Radix tree node cache.
4832 */
49
-static struct kmem_cache *radix_tree_node_cachep;
33
+struct kmem_cache *radix_tree_node_cachep;
5034
5135 /*
5236 * The radix tree is variable-height, so an insert operation not only has
....@@ -71,23 +55,12 @@
7155 #define IDR_PRELOAD_SIZE (IDR_MAX_PATH * 2 - 1)
7256
7357 /*
74
- * The IDA is even shorter since it uses a bitmap at the last level.
75
- */
76
-#define IDA_INDEX_BITS (8 * sizeof(int) - 1 - ilog2(IDA_BITMAP_BITS))
77
-#define IDA_MAX_PATH (DIV_ROUND_UP(IDA_INDEX_BITS, \
78
- RADIX_TREE_MAP_SHIFT))
79
-#define IDA_PRELOAD_SIZE (IDA_MAX_PATH * 2 - 1)
80
-
81
-/*
8258 * Per-cpu pool of preloaded nodes
8359 */
84
-struct radix_tree_preload {
85
- unsigned nr;
86
- /* nodes->parent points to next preallocated node */
87
- struct radix_tree_node *nodes;
60
+DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = {
61
+ .lock = INIT_LOCAL_LOCK(lock),
8862 };
89
-static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
90
-static DEFINE_LOCAL_IRQ_LOCK(radix_tree_preloads_lock);
63
+EXPORT_PER_CPU_SYMBOL_GPL(radix_tree_preloads);
9164
9265 static inline struct radix_tree_node *entry_to_node(void *ptr)
9366 {
....@@ -99,24 +72,7 @@
9972 return (void *)((unsigned long)ptr | RADIX_TREE_INTERNAL_NODE);
10073 }
10174
102
-#define RADIX_TREE_RETRY node_to_entry(NULL)
103
-
104
-#ifdef CONFIG_RADIX_TREE_MULTIORDER
105
-/* Sibling slots point directly to another slot in the same node */
106
-static inline
107
-bool is_sibling_entry(const struct radix_tree_node *parent, void *node)
108
-{
109
- void __rcu **ptr = node;
110
- return (parent->slots <= ptr) &&
111
- (ptr < parent->slots + RADIX_TREE_MAP_SIZE);
112
-}
113
-#else
114
-static inline
115
-bool is_sibling_entry(const struct radix_tree_node *parent, void *node)
116
-{
117
- return false;
118
-}
119
-#endif
75
+#define RADIX_TREE_RETRY XA_RETRY_ENTRY
12076
12177 static inline unsigned long
12278 get_slot_offset(const struct radix_tree_node *parent, void __rcu **slot)
....@@ -130,24 +86,13 @@
13086 unsigned int offset = (index >> parent->shift) & RADIX_TREE_MAP_MASK;
13187 void __rcu **entry = rcu_dereference_raw(parent->slots[offset]);
13288
133
-#ifdef CONFIG_RADIX_TREE_MULTIORDER
134
- if (radix_tree_is_internal_node(entry)) {
135
- if (is_sibling_entry(parent, entry)) {
136
- void __rcu **sibentry;
137
- sibentry = (void __rcu **) entry_to_node(entry);
138
- offset = get_slot_offset(parent, sibentry);
139
- entry = rcu_dereference_raw(*sibentry);
140
- }
141
- }
142
-#endif
143
-
14489 *nodep = (void *)entry;
14590 return offset;
14691 }
14792
14893 static inline gfp_t root_gfp_mask(const struct radix_tree_root *root)
14994 {
150
- return root->gfp_mask & (__GFP_BITS_MASK & ~GFP_ZONEMASK);
95
+ return root->xa_flags & (__GFP_BITS_MASK & ~GFP_ZONEMASK);
15196 }
15297
15398 static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
....@@ -170,32 +115,32 @@
170115
171116 static inline void root_tag_set(struct radix_tree_root *root, unsigned tag)
172117 {
173
- root->gfp_mask |= (__force gfp_t)(1 << (tag + ROOT_TAG_SHIFT));
118
+ root->xa_flags |= (__force gfp_t)(1 << (tag + ROOT_TAG_SHIFT));
174119 }
175120
176121 static inline void root_tag_clear(struct radix_tree_root *root, unsigned tag)
177122 {
178
- root->gfp_mask &= (__force gfp_t)~(1 << (tag + ROOT_TAG_SHIFT));
123
+ root->xa_flags &= (__force gfp_t)~(1 << (tag + ROOT_TAG_SHIFT));
179124 }
180125
181126 static inline void root_tag_clear_all(struct radix_tree_root *root)
182127 {
183
- root->gfp_mask &= (1 << ROOT_TAG_SHIFT) - 1;
128
+ root->xa_flags &= (__force gfp_t)((1 << ROOT_TAG_SHIFT) - 1);
184129 }
185130
186131 static inline int root_tag_get(const struct radix_tree_root *root, unsigned tag)
187132 {
188
- return (__force int)root->gfp_mask & (1 << (tag + ROOT_TAG_SHIFT));
133
+ return (__force int)root->xa_flags & (1 << (tag + ROOT_TAG_SHIFT));
189134 }
190135
191136 static inline unsigned root_tags_get(const struct radix_tree_root *root)
192137 {
193
- return (__force unsigned)root->gfp_mask >> ROOT_TAG_SHIFT;
138
+ return (__force unsigned)root->xa_flags >> ROOT_TAG_SHIFT;
194139 }
195140
196141 static inline bool is_idr(const struct radix_tree_root *root)
197142 {
198
- return !!(root->gfp_mask & ROOT_IS_IDR);
143
+ return !!(root->xa_flags & ROOT_IS_IDR);
199144 }
200145
201146 /*
....@@ -255,7 +200,7 @@
255200
256201 static unsigned int iter_offset(const struct radix_tree_iter *iter)
257202 {
258
- return (iter->index >> iter_shift(iter)) & RADIX_TREE_MAP_MASK;
203
+ return iter->index & RADIX_TREE_MAP_MASK;
259204 }
260205
261206 /*
....@@ -278,99 +223,6 @@
278223 return (index & ~node_maxindex(node)) + (offset << node->shift);
279224 }
280225
281
-#ifndef __KERNEL__
282
-static void dump_node(struct radix_tree_node *node, unsigned long index)
283
-{
284
- unsigned long i;
285
-
286
- pr_debug("radix node: %p offset %d indices %lu-%lu parent %p tags %lx %lx %lx shift %d count %d exceptional %d\n",
287
- node, node->offset, index, index | node_maxindex(node),
288
- node->parent,
289
- node->tags[0][0], node->tags[1][0], node->tags[2][0],
290
- node->shift, node->count, node->exceptional);
291
-
292
- for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) {
293
- unsigned long first = index | (i << node->shift);
294
- unsigned long last = first | ((1UL << node->shift) - 1);
295
- void *entry = node->slots[i];
296
- if (!entry)
297
- continue;
298
- if (entry == RADIX_TREE_RETRY) {
299
- pr_debug("radix retry offset %ld indices %lu-%lu parent %p\n",
300
- i, first, last, node);
301
- } else if (!radix_tree_is_internal_node(entry)) {
302
- pr_debug("radix entry %p offset %ld indices %lu-%lu parent %p\n",
303
- entry, i, first, last, node);
304
- } else if (is_sibling_entry(node, entry)) {
305
- pr_debug("radix sblng %p offset %ld indices %lu-%lu parent %p val %p\n",
306
- entry, i, first, last, node,
307
- *(void **)entry_to_node(entry));
308
- } else {
309
- dump_node(entry_to_node(entry), first);
310
- }
311
- }
312
-}
313
-
314
-/* For debug */
315
-static void radix_tree_dump(struct radix_tree_root *root)
316
-{
317
- pr_debug("radix root: %p rnode %p tags %x\n",
318
- root, root->rnode,
319
- root->gfp_mask >> ROOT_TAG_SHIFT);
320
- if (!radix_tree_is_internal_node(root->rnode))
321
- return;
322
- dump_node(entry_to_node(root->rnode), 0);
323
-}
324
-
325
-static void dump_ida_node(void *entry, unsigned long index)
326
-{
327
- unsigned long i;
328
-
329
- if (!entry)
330
- return;
331
-
332
- if (radix_tree_is_internal_node(entry)) {
333
- struct radix_tree_node *node = entry_to_node(entry);
334
-
335
- pr_debug("ida node: %p offset %d indices %lu-%lu parent %p free %lx shift %d count %d\n",
336
- node, node->offset, index * IDA_BITMAP_BITS,
337
- ((index | node_maxindex(node)) + 1) *
338
- IDA_BITMAP_BITS - 1,
339
- node->parent, node->tags[0][0], node->shift,
340
- node->count);
341
- for (i = 0; i < RADIX_TREE_MAP_SIZE; i++)
342
- dump_ida_node(node->slots[i],
343
- index | (i << node->shift));
344
- } else if (radix_tree_exceptional_entry(entry)) {
345
- pr_debug("ida excp: %p offset %d indices %lu-%lu data %lx\n",
346
- entry, (int)(index & RADIX_TREE_MAP_MASK),
347
- index * IDA_BITMAP_BITS,
348
- index * IDA_BITMAP_BITS + BITS_PER_LONG -
349
- RADIX_TREE_EXCEPTIONAL_SHIFT,
350
- (unsigned long)entry >>
351
- RADIX_TREE_EXCEPTIONAL_SHIFT);
352
- } else {
353
- struct ida_bitmap *bitmap = entry;
354
-
355
- pr_debug("ida btmp: %p offset %d indices %lu-%lu data", bitmap,
356
- (int)(index & RADIX_TREE_MAP_MASK),
357
- index * IDA_BITMAP_BITS,
358
- (index + 1) * IDA_BITMAP_BITS - 1);
359
- for (i = 0; i < IDA_BITMAP_LONGS; i++)
360
- pr_cont(" %lx", bitmap->bitmap[i]);
361
- pr_cont("\n");
362
- }
363
-}
364
-
365
-static void ida_dump(struct ida *ida)
366
-{
367
- struct radix_tree_root *root = &ida->ida_rt;
368
- pr_debug("ida: %p node %p free %d\n", ida, root->rnode,
369
- root->gfp_mask >> ROOT_TAG_SHIFT);
370
- dump_ida_node(root->rnode, 0);
371
-}
372
-#endif
373
-
374226 /*
375227 * This assumes that the caller has performed appropriate preallocation, and
376228 * that the caller has pinned this thread of control to the current CPU.
....@@ -379,7 +231,7 @@
379231 radix_tree_node_alloc(gfp_t gfp_mask, struct radix_tree_node *parent,
380232 struct radix_tree_root *root,
381233 unsigned int shift, unsigned int offset,
382
- unsigned int count, unsigned int exceptional)
234
+ unsigned int count, unsigned int nr_values)
383235 {
384236 struct radix_tree_node *ret = NULL;
385237
....@@ -406,13 +258,12 @@
406258 * succeed in getting a node here (and never reach
407259 * kmem_cache_alloc)
408260 */
409
- rtp = &get_locked_var(radix_tree_preloads_lock, radix_tree_preloads);
261
+ rtp = this_cpu_ptr(&radix_tree_preloads);
410262 if (rtp->nr) {
411263 ret = rtp->nodes;
412264 rtp->nodes = ret->parent;
413265 rtp->nr--;
414266 }
415
- put_locked_var(radix_tree_preloads_lock, radix_tree_preloads);
416267 /*
417268 * Update the allocation stack trace as this is more useful
418269 * for debugging.
....@@ -427,14 +278,14 @@
427278 ret->shift = shift;
428279 ret->offset = offset;
429280 ret->count = count;
430
- ret->exceptional = exceptional;
281
+ ret->nr_values = nr_values;
431282 ret->parent = parent;
432
- ret->root = root;
283
+ ret->array = root;
433284 }
434285 return ret;
435286 }
436287
437
-static void radix_tree_node_rcu_free(struct rcu_head *head)
288
+void radix_tree_node_rcu_free(struct rcu_head *head)
438289 {
439290 struct radix_tree_node *node =
440291 container_of(head, struct radix_tree_node, rcu_head);
....@@ -473,19 +324,19 @@
473324 int ret = -ENOMEM;
474325
475326 /*
476
- * Nodes preloaded by one cgroup can be be used by another cgroup, so
327
+ * Nodes preloaded by one cgroup can be used by another cgroup, so
477328 * they should never be accounted to any particular memory cgroup.
478329 */
479330 gfp_mask &= ~__GFP_ACCOUNT;
480331
481
- local_lock(radix_tree_preloads_lock);
332
+ local_lock(&radix_tree_preloads.lock);
482333 rtp = this_cpu_ptr(&radix_tree_preloads);
483334 while (rtp->nr < nr) {
484
- local_unlock(radix_tree_preloads_lock);
335
+ local_unlock(&radix_tree_preloads.lock);
485336 node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
486337 if (node == NULL)
487338 goto out;
488
- local_lock(radix_tree_preloads_lock);
339
+ local_lock(&radix_tree_preloads.lock);
489340 rtp = this_cpu_ptr(&radix_tree_preloads);
490341 if (rtp->nr < nr) {
491342 node->parent = rtp->nodes;
....@@ -527,88 +378,15 @@
527378 if (gfpflags_allow_blocking(gfp_mask))
528379 return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE);
529380 /* Preloading doesn't help anything with this gfp mask, skip it */
530
- local_lock(radix_tree_preloads_lock);
381
+ local_lock(&radix_tree_preloads.lock);
531382 return 0;
532383 }
533384 EXPORT_SYMBOL(radix_tree_maybe_preload);
534385
535
-#ifdef CONFIG_RADIX_TREE_MULTIORDER
536
-/*
537
- * Preload with enough objects to ensure that we can split a single entry
538
- * of order @old_order into many entries of size @new_order
539
- */
540
-int radix_tree_split_preload(unsigned int old_order, unsigned int new_order,
541
- gfp_t gfp_mask)
542
-{
543
- unsigned top = 1 << (old_order % RADIX_TREE_MAP_SHIFT);
544
- unsigned layers = (old_order / RADIX_TREE_MAP_SHIFT) -
545
- (new_order / RADIX_TREE_MAP_SHIFT);
546
- unsigned nr = 0;
547
-
548
- WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask));
549
- BUG_ON(new_order >= old_order);
550
-
551
- while (layers--)
552
- nr = nr * RADIX_TREE_MAP_SIZE + 1;
553
- return __radix_tree_preload(gfp_mask, top * nr);
554
-}
555
-#endif
556
-
557
-/*
558
- * The same as function above, but preload number of nodes required to insert
559
- * (1 << order) continuous naturally-aligned elements.
560
- */
561
-int radix_tree_maybe_preload_order(gfp_t gfp_mask, int order)
562
-{
563
- unsigned long nr_subtrees;
564
- int nr_nodes, subtree_height;
565
-
566
- /* Preloading doesn't help anything with this gfp mask, skip it */
567
- if (!gfpflags_allow_blocking(gfp_mask)) {
568
- local_lock(radix_tree_preloads_lock);
569
- return 0;
570
- }
571
-
572
- /*
573
- * Calculate number and height of fully populated subtrees it takes to
574
- * store (1 << order) elements.
575
- */
576
- nr_subtrees = 1 << order;
577
- for (subtree_height = 0; nr_subtrees > RADIX_TREE_MAP_SIZE;
578
- subtree_height++)
579
- nr_subtrees >>= RADIX_TREE_MAP_SHIFT;
580
-
581
- /*
582
- * The worst case is zero height tree with a single item at index 0 and
583
- * then inserting items starting at ULONG_MAX - (1 << order).
584
- *
585
- * This requires RADIX_TREE_MAX_PATH nodes to build branch from root to
586
- * 0-index item.
587
- */
588
- nr_nodes = RADIX_TREE_MAX_PATH;
589
-
590
- /* Plus branch to fully populated subtrees. */
591
- nr_nodes += RADIX_TREE_MAX_PATH - subtree_height;
592
-
593
- /* Root node is shared. */
594
- nr_nodes--;
595
-
596
- /* Plus nodes required to build subtrees. */
597
- nr_nodes += nr_subtrees * height_to_maxnodes[subtree_height];
598
-
599
- return __radix_tree_preload(gfp_mask, nr_nodes);
600
-}
601
-
602
-void radix_tree_preload_end(void)
603
-{
604
- local_unlock(radix_tree_preloads_lock);
605
-}
606
-EXPORT_SYMBOL(radix_tree_preload_end);
607
-
608386 static unsigned radix_tree_load_root(const struct radix_tree_root *root,
609387 struct radix_tree_node **nodep, unsigned long *maxindex)
610388 {
611
- struct radix_tree_node *node = rcu_dereference_raw(root->rnode);
389
+ struct radix_tree_node *node = rcu_dereference_raw(root->xa_head);
612390
613391 *nodep = node;
614392
....@@ -637,7 +415,7 @@
637415 while (index > shift_maxindex(maxshift))
638416 maxshift += RADIX_TREE_MAP_SHIFT;
639417
640
- entry = rcu_dereference_raw(root->rnode);
418
+ entry = rcu_dereference_raw(root->xa_head);
641419 if (!entry && (!is_idr(root) || root_tag_get(root, IDR_FREE)))
642420 goto out;
643421
....@@ -664,9 +442,9 @@
664442 BUG_ON(shift > BITS_PER_LONG);
665443 if (radix_tree_is_internal_node(entry)) {
666444 entry_to_node(entry)->parent = node;
667
- } else if (radix_tree_exceptional_entry(entry)) {
668
- /* Moving an exceptional root->rnode to a node */
669
- node->exceptional = 1;
445
+ } else if (xa_is_value(entry)) {
446
+ /* Moving a value entry root->xa_head to a node */
447
+ node->nr_values = 1;
670448 }
671449 /*
672450 * entry was already in the radix tree, so we do not need
....@@ -674,7 +452,7 @@
674452 */
675453 node->slots[0] = (void __rcu *)entry;
676454 entry = node_to_entry(node);
677
- rcu_assign_pointer(root->rnode, entry);
455
+ rcu_assign_pointer(root->xa_head, entry);
678456 shift += RADIX_TREE_MAP_SHIFT;
679457 } while (shift <= maxshift);
680458 out:
....@@ -685,13 +463,12 @@
685463 * radix_tree_shrink - shrink radix tree to minimum height
686464 * @root radix tree root
687465 */
688
-static inline bool radix_tree_shrink(struct radix_tree_root *root,
689
- radix_tree_update_node_t update_node)
466
+static inline bool radix_tree_shrink(struct radix_tree_root *root)
690467 {
691468 bool shrunk = false;
692469
693470 for (;;) {
694
- struct radix_tree_node *node = rcu_dereference_raw(root->rnode);
471
+ struct radix_tree_node *node = rcu_dereference_raw(root->xa_head);
695472 struct radix_tree_node *child;
696473
697474 if (!radix_tree_is_internal_node(node))
....@@ -700,15 +477,20 @@
700477
701478 /*
702479 * The candidate node has more than one child, or its child
703
- * is not at the leftmost slot, or the child is a multiorder
704
- * entry, we cannot shrink.
480
+ * is not at the leftmost slot, we cannot shrink.
705481 */
706482 if (node->count != 1)
707483 break;
708484 child = rcu_dereference_raw(node->slots[0]);
709485 if (!child)
710486 break;
711
- if (!radix_tree_is_internal_node(child) && node->shift)
487
+
488
+ /*
489
+ * For an IDR, we must not shrink entry 0 into the root in
490
+ * case somebody calls idr_replace() with a pointer that
491
+ * appears to be an internal entry
492
+ */
493
+ if (!node->shift && is_idr(root))
712494 break;
713495
714496 if (radix_tree_is_internal_node(child))
....@@ -719,9 +501,9 @@
719501 * moving the node from one part of the tree to another: if it
720502 * was safe to dereference the old pointer to it
721503 * (node->slots[0]), it will be safe to dereference the new
722
- * one (root->rnode) as far as dependent read barriers go.
504
+ * one (root->xa_head) as far as dependent read barriers go.
723505 */
724
- root->rnode = (void __rcu *)child;
506
+ root->xa_head = (void __rcu *)child;
725507 if (is_idr(root) && !tag_get(node, IDR_FREE, 0))
726508 root_tag_clear(root, IDR_FREE);
727509
....@@ -746,8 +528,6 @@
746528 node->count = 0;
747529 if (!radix_tree_is_internal_node(child)) {
748530 node->slots[0] = (void __rcu *)RADIX_TREE_RETRY;
749
- if (update_node)
750
- update_node(node);
751531 }
752532
753533 WARN_ON_ONCE(!list_empty(&node->private_list));
....@@ -759,8 +539,7 @@
759539 }
760540
761541 static bool delete_node(struct radix_tree_root *root,
762
- struct radix_tree_node *node,
763
- radix_tree_update_node_t update_node)
542
+ struct radix_tree_node *node)
764543 {
765544 bool deleted = false;
766545
....@@ -769,9 +548,8 @@
769548
770549 if (node->count) {
771550 if (node_to_entry(node) ==
772
- rcu_dereference_raw(root->rnode))
773
- deleted |= radix_tree_shrink(root,
774
- update_node);
551
+ rcu_dereference_raw(root->xa_head))
552
+ deleted |= radix_tree_shrink(root);
775553 return deleted;
776554 }
777555
....@@ -786,7 +564,7 @@
786564 */
787565 if (!is_idr(root))
788566 root_tag_clear_all(root);
789
- root->rnode = NULL;
567
+ root->xa_head = NULL;
790568 }
791569
792570 WARN_ON_ONCE(!list_empty(&node->private_list));
....@@ -803,7 +581,6 @@
803581 * __radix_tree_create - create a slot in a radix tree
804582 * @root: radix tree root
805583 * @index: index key
806
- * @order: index occupies 2^order aligned slots
807584 * @nodep: returns node
808585 * @slotp: returns slot
809586 *
....@@ -811,36 +588,34 @@
811588 * at position @index in the radix tree @root.
812589 *
813590 * Until there is more than one item in the tree, no nodes are
814
- * allocated and @root->rnode is used as a direct slot instead of
591
+ * allocated and @root->xa_head is used as a direct slot instead of
815592 * pointing to a node, in which case *@nodep will be NULL.
816593 *
817594 * Returns -ENOMEM, or 0 for success.
818595 */
819
-int __radix_tree_create(struct radix_tree_root *root, unsigned long index,
820
- unsigned order, struct radix_tree_node **nodep,
821
- void __rcu ***slotp)
596
+static int __radix_tree_create(struct radix_tree_root *root,
597
+ unsigned long index, struct radix_tree_node **nodep,
598
+ void __rcu ***slotp)
822599 {
823600 struct radix_tree_node *node = NULL, *child;
824
- void __rcu **slot = (void __rcu **)&root->rnode;
601
+ void __rcu **slot = (void __rcu **)&root->xa_head;
825602 unsigned long maxindex;
826603 unsigned int shift, offset = 0;
827
- unsigned long max = index | ((1UL << order) - 1);
604
+ unsigned long max = index;
828605 gfp_t gfp = root_gfp_mask(root);
829606
830607 shift = radix_tree_load_root(root, &child, &maxindex);
831608
832609 /* Make sure the tree is high enough. */
833
- if (order > 0 && max == ((1UL << order) - 1))
834
- max++;
835610 if (max > maxindex) {
836611 int error = radix_tree_extend(root, gfp, max, shift);
837612 if (error < 0)
838613 return error;
839614 shift = error;
840
- child = rcu_dereference_raw(root->rnode);
615
+ child = rcu_dereference_raw(root->xa_head);
841616 }
842617
843
- while (shift > order) {
618
+ while (shift > 0) {
844619 shift -= RADIX_TREE_MAP_SHIFT;
845620 if (child == NULL) {
846621 /* Have to add a child node. */
....@@ -883,8 +658,7 @@
883658
884659 for (;;) {
885660 void *entry = rcu_dereference_raw(child->slots[offset]);
886
- if (radix_tree_is_internal_node(entry) &&
887
- !is_sibling_entry(child, entry)) {
661
+ if (xa_is_node(entry) && child->shift) {
888662 child = entry_to_node(entry);
889663 offset = 0;
890664 continue;
....@@ -902,96 +676,30 @@
902676 }
903677 }
904678
905
-#ifdef CONFIG_RADIX_TREE_MULTIORDER
906679 static inline int insert_entries(struct radix_tree_node *node,
907
- void __rcu **slot, void *item, unsigned order, bool replace)
908
-{
909
- struct radix_tree_node *child;
910
- unsigned i, n, tag, offset, tags = 0;
911
-
912
- if (node) {
913
- if (order > node->shift)
914
- n = 1 << (order - node->shift);
915
- else
916
- n = 1;
917
- offset = get_slot_offset(node, slot);
918
- } else {
919
- n = 1;
920
- offset = 0;
921
- }
922
-
923
- if (n > 1) {
924
- offset = offset & ~(n - 1);
925
- slot = &node->slots[offset];
926
- }
927
- child = node_to_entry(slot);
928
-
929
- for (i = 0; i < n; i++) {
930
- if (slot[i]) {
931
- if (replace) {
932
- node->count--;
933
- for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
934
- if (tag_get(node, tag, offset + i))
935
- tags |= 1 << tag;
936
- } else
937
- return -EEXIST;
938
- }
939
- }
940
-
941
- for (i = 0; i < n; i++) {
942
- struct radix_tree_node *old = rcu_dereference_raw(slot[i]);
943
- if (i) {
944
- rcu_assign_pointer(slot[i], child);
945
- for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
946
- if (tags & (1 << tag))
947
- tag_clear(node, tag, offset + i);
948
- } else {
949
- rcu_assign_pointer(slot[i], item);
950
- for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
951
- if (tags & (1 << tag))
952
- tag_set(node, tag, offset);
953
- }
954
- if (radix_tree_is_internal_node(old) &&
955
- !is_sibling_entry(node, old) &&
956
- (old != RADIX_TREE_RETRY))
957
- radix_tree_free_nodes(old);
958
- if (radix_tree_exceptional_entry(old))
959
- node->exceptional--;
960
- }
961
- if (node) {
962
- node->count += n;
963
- if (radix_tree_exceptional_entry(item))
964
- node->exceptional += n;
965
- }
966
- return n;
967
-}
968
-#else
969
-static inline int insert_entries(struct radix_tree_node *node,
970
- void __rcu **slot, void *item, unsigned order, bool replace)
680
+ void __rcu **slot, void *item, bool replace)
971681 {
972682 if (*slot)
973683 return -EEXIST;
974684 rcu_assign_pointer(*slot, item);
975685 if (node) {
976686 node->count++;
977
- if (radix_tree_exceptional_entry(item))
978
- node->exceptional++;
687
+ if (xa_is_value(item))
688
+ node->nr_values++;
979689 }
980690 return 1;
981691 }
982
-#endif
983692
984693 /**
985694 * __radix_tree_insert - insert into a radix tree
986695 * @root: radix tree root
987696 * @index: index key
988
- * @order: key covers the 2^order indices around index
989697 * @item: item to insert
990698 *
991699 * Insert an item into the radix tree at position @index.
992700 */
993
-int __radix_tree_insert(struct radix_tree_root *root, unsigned long index,
994
- unsigned order, void *item)
701
+int radix_tree_insert(struct radix_tree_root *root, unsigned long index,
702
+ void *item)
995703 {
996704 struct radix_tree_node *node;
997705 void __rcu **slot;
....@@ -999,11 +707,11 @@
999707
1000708 BUG_ON(radix_tree_is_internal_node(item));
1001709
1002
- error = __radix_tree_create(root, index, order, &node, &slot);
710
+ error = __radix_tree_create(root, index, &node, &slot);
1003711 if (error)
1004712 return error;
1005713
1006
- error = insert_entries(node, slot, item, order, false);
714
+ error = insert_entries(node, slot, item, false);
1007715 if (error < 0)
1008716 return error;
1009717
....@@ -1018,7 +726,7 @@
1018726
1019727 return 0;
1020728 }
1021
-EXPORT_SYMBOL(__radix_tree_insert);
729
+EXPORT_SYMBOL(radix_tree_insert);
1022730
1023731 /**
1024732 * __radix_tree_lookup - lookup an item in a radix tree
....@@ -1031,7 +739,7 @@
1031739 * tree @root.
1032740 *
1033741 * Until there is more than one item in the tree, no nodes are
1034
- * allocated and @root->rnode is used as a direct slot instead of
742
+ * allocated and @root->xa_head is used as a direct slot instead of
1035743 * pointing to a node, in which case *@nodep will be NULL.
1036744 */
1037745 void *__radix_tree_lookup(const struct radix_tree_root *root,
....@@ -1044,7 +752,7 @@
1044752
1045753 restart:
1046754 parent = NULL;
1047
- slot = (void __rcu **)&root->rnode;
755
+ slot = (void __rcu **)&root->xa_head;
1048756 radix_tree_load_root(root, &node, &maxindex);
1049757 if (index > maxindex)
1050758 return NULL;
....@@ -1052,11 +760,13 @@
1052760 while (radix_tree_is_internal_node(node)) {
1053761 unsigned offset;
1054762
1055
- if (node == RADIX_TREE_RETRY)
1056
- goto restart;
1057763 parent = entry_to_node(node);
1058764 offset = radix_tree_descend(parent, &node, index);
1059765 slot = parent->slots + offset;
766
+ if (node == RADIX_TREE_RETRY)
767
+ goto restart;
768
+ if (parent->shift == 0)
769
+ break;
1060770 }
1061771
1062772 if (nodep)
....@@ -1108,36 +818,12 @@
1108818 }
1109819 EXPORT_SYMBOL(radix_tree_lookup);
1110820
1111
-static inline void replace_sibling_entries(struct radix_tree_node *node,
1112
- void __rcu **slot, int count, int exceptional)
1113
-{
1114
-#ifdef CONFIG_RADIX_TREE_MULTIORDER
1115
- void *ptr = node_to_entry(slot);
1116
- unsigned offset = get_slot_offset(node, slot) + 1;
1117
-
1118
- while (offset < RADIX_TREE_MAP_SIZE) {
1119
- if (rcu_dereference_raw(node->slots[offset]) != ptr)
1120
- break;
1121
- if (count < 0) {
1122
- node->slots[offset] = NULL;
1123
- node->count--;
1124
- }
1125
- node->exceptional += exceptional;
1126
- offset++;
1127
- }
1128
-#endif
1129
-}
1130
-
1131821 static void replace_slot(void __rcu **slot, void *item,
1132
- struct radix_tree_node *node, int count, int exceptional)
822
+ struct radix_tree_node *node, int count, int values)
1133823 {
1134
- if (WARN_ON_ONCE(radix_tree_is_internal_node(item)))
1135
- return;
1136
-
1137
- if (node && (count || exceptional)) {
824
+ if (node && (count || values)) {
1138825 node->count += count;
1139
- node->exceptional += exceptional;
1140
- replace_sibling_entries(node, slot, count, exceptional);
826
+ node->nr_values += values;
1141827 }
1142828
1143829 rcu_assign_pointer(*slot, item);
....@@ -1180,37 +866,31 @@
1180866 * @node: pointer to tree node
1181867 * @slot: pointer to slot in @node
1182868 * @item: new item to store in the slot.
1183
- * @update_node: callback for changing leaf nodes
1184869 *
1185870 * For use with __radix_tree_lookup(). Caller must hold tree write locked
1186871 * across slot lookup and replacement.
1187872 */
1188873 void __radix_tree_replace(struct radix_tree_root *root,
1189874 struct radix_tree_node *node,
1190
- void __rcu **slot, void *item,
1191
- radix_tree_update_node_t update_node)
875
+ void __rcu **slot, void *item)
1192876 {
1193877 void *old = rcu_dereference_raw(*slot);
1194
- int exceptional = !!radix_tree_exceptional_entry(item) -
1195
- !!radix_tree_exceptional_entry(old);
878
+ int values = !!xa_is_value(item) - !!xa_is_value(old);
1196879 int count = calculate_count(root, node, slot, item, old);
1197880
1198881 /*
1199
- * This function supports replacing exceptional entries and
882
+ * This function supports replacing value entries and
1200883 * deleting entries, but that needs accounting against the
1201
- * node unless the slot is root->rnode.
884
+ * node unless the slot is root->xa_head.
1202885 */
1203
- WARN_ON_ONCE(!node && (slot != (void __rcu **)&root->rnode) &&
1204
- (count || exceptional));
1205
- replace_slot(slot, item, node, count, exceptional);
886
+ WARN_ON_ONCE(!node && (slot != (void __rcu **)&root->xa_head) &&
887
+ (count || values));
888
+ replace_slot(slot, item, node, count, values);
1206889
1207890 if (!node)
1208891 return;
1209892
1210
- if (update_node)
1211
- update_node(node);
1212
-
1213
- delete_node(root, node, update_node);
893
+ delete_node(root, node);
1214894 }
1215895
1216896 /**
....@@ -1219,12 +899,12 @@
1219899 * @slot: pointer to slot
1220900 * @item: new item to store in the slot.
1221901 *
1222
- * For use with radix_tree_lookup_slot(), radix_tree_gang_lookup_slot(),
902
+ * For use with radix_tree_lookup_slot() and
1223903 * radix_tree_gang_lookup_tag_slot(). Caller must hold tree write locked
1224904 * across slot lookup and replacement.
1225905 *
1226906 * NOTE: This cannot be used to switch between non-entries (empty slots),
1227
- * regular entries, and exceptional entries, as that requires accounting
907
+ * regular entries, and value entries, as that requires accounting
1228908 * inside the radix tree node. When switching from one type of entry or
1229909 * deleting, use __radix_tree_lookup() and __radix_tree_replace() or
1230910 * radix_tree_iter_replace().
....@@ -1232,7 +912,7 @@
1232912 void radix_tree_replace_slot(struct radix_tree_root *root,
1233913 void __rcu **slot, void *item)
1234914 {
1235
- __radix_tree_replace(root, NULL, slot, item, NULL);
915
+ __radix_tree_replace(root, NULL, slot, item);
1236916 }
1237917 EXPORT_SYMBOL(radix_tree_replace_slot);
1238918
....@@ -1242,161 +922,15 @@
1242922 * @slot: pointer to slot
1243923 * @item: new item to store in the slot.
1244924 *
1245
- * For use with radix_tree_split() and radix_tree_for_each_slot().
1246
- * Caller must hold tree write locked across split and replacement.
925
+ * For use with radix_tree_for_each_slot().
926
+ * Caller must hold tree write locked.
1247927 */
1248928 void radix_tree_iter_replace(struct radix_tree_root *root,
1249929 const struct radix_tree_iter *iter,
1250930 void __rcu **slot, void *item)
1251931 {
1252
- __radix_tree_replace(root, iter->node, slot, item, NULL);
932
+ __radix_tree_replace(root, iter->node, slot, item);
1253933 }
1254
-
1255
-#ifdef CONFIG_RADIX_TREE_MULTIORDER
1256
-/**
1257
- * radix_tree_join - replace multiple entries with one multiorder entry
1258
- * @root: radix tree root
1259
- * @index: an index inside the new entry
1260
- * @order: order of the new entry
1261
- * @item: new entry
1262
- *
1263
- * Call this function to replace several entries with one larger entry.
1264
- * The existing entries are presumed to not need freeing as a result of
1265
- * this call.
1266
- *
1267
- * The replacement entry will have all the tags set on it that were set
1268
- * on any of the entries it is replacing.
1269
- */
1270
-int radix_tree_join(struct radix_tree_root *root, unsigned long index,
1271
- unsigned order, void *item)
1272
-{
1273
- struct radix_tree_node *node;
1274
- void __rcu **slot;
1275
- int error;
1276
-
1277
- BUG_ON(radix_tree_is_internal_node(item));
1278
-
1279
- error = __radix_tree_create(root, index, order, &node, &slot);
1280
- if (!error)
1281
- error = insert_entries(node, slot, item, order, true);
1282
- if (error > 0)
1283
- error = 0;
1284
-
1285
- return error;
1286
-}
1287
-
1288
-/**
1289
- * radix_tree_split - Split an entry into smaller entries
1290
- * @root: radix tree root
1291
- * @index: An index within the large entry
1292
- * @order: Order of new entries
1293
- *
1294
- * Call this function as the first step in replacing a multiorder entry
1295
- * with several entries of lower order. After this function returns,
1296
- * loop over the relevant portion of the tree using radix_tree_for_each_slot()
1297
- * and call radix_tree_iter_replace() to set up each new entry.
1298
- *
1299
- * The tags from this entry are replicated to all the new entries.
1300
- *
1301
- * The radix tree should be locked against modification during the entire
1302
- * replacement operation. Lock-free lookups will see RADIX_TREE_RETRY which
1303
- * should prompt RCU walkers to restart the lookup from the root.
1304
- */
1305
-int radix_tree_split(struct radix_tree_root *root, unsigned long index,
1306
- unsigned order)
1307
-{
1308
- struct radix_tree_node *parent, *node, *child;
1309
- void __rcu **slot;
1310
- unsigned int offset, end;
1311
- unsigned n, tag, tags = 0;
1312
- gfp_t gfp = root_gfp_mask(root);
1313
-
1314
- if (!__radix_tree_lookup(root, index, &parent, &slot))
1315
- return -ENOENT;
1316
- if (!parent)
1317
- return -ENOENT;
1318
-
1319
- offset = get_slot_offset(parent, slot);
1320
-
1321
- for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1322
- if (tag_get(parent, tag, offset))
1323
- tags |= 1 << tag;
1324
-
1325
- for (end = offset + 1; end < RADIX_TREE_MAP_SIZE; end++) {
1326
- if (!is_sibling_entry(parent,
1327
- rcu_dereference_raw(parent->slots[end])))
1328
- break;
1329
- for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1330
- if (tags & (1 << tag))
1331
- tag_set(parent, tag, end);
1332
- /* rcu_assign_pointer ensures tags are set before RETRY */
1333
- rcu_assign_pointer(parent->slots[end], RADIX_TREE_RETRY);
1334
- }
1335
- rcu_assign_pointer(parent->slots[offset], RADIX_TREE_RETRY);
1336
- parent->exceptional -= (end - offset);
1337
-
1338
- if (order == parent->shift)
1339
- return 0;
1340
- if (order > parent->shift) {
1341
- while (offset < end)
1342
- offset += insert_entries(parent, &parent->slots[offset],
1343
- RADIX_TREE_RETRY, order, true);
1344
- return 0;
1345
- }
1346
-
1347
- node = parent;
1348
-
1349
- for (;;) {
1350
- if (node->shift > order) {
1351
- child = radix_tree_node_alloc(gfp, node, root,
1352
- node->shift - RADIX_TREE_MAP_SHIFT,
1353
- offset, 0, 0);
1354
- if (!child)
1355
- goto nomem;
1356
- if (node != parent) {
1357
- node->count++;
1358
- rcu_assign_pointer(node->slots[offset],
1359
- node_to_entry(child));
1360
- for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1361
- if (tags & (1 << tag))
1362
- tag_set(node, tag, offset);
1363
- }
1364
-
1365
- node = child;
1366
- offset = 0;
1367
- continue;
1368
- }
1369
-
1370
- n = insert_entries(node, &node->slots[offset],
1371
- RADIX_TREE_RETRY, order, false);
1372
- BUG_ON(n > RADIX_TREE_MAP_SIZE);
1373
-
1374
- for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1375
- if (tags & (1 << tag))
1376
- tag_set(node, tag, offset);
1377
- offset += n;
1378
-
1379
- while (offset == RADIX_TREE_MAP_SIZE) {
1380
- if (node == parent)
1381
- break;
1382
- offset = node->offset;
1383
- child = node;
1384
- node = node->parent;
1385
- rcu_assign_pointer(node->slots[offset],
1386
- node_to_entry(child));
1387
- offset++;
1388
- }
1389
- if ((node == parent) && (offset == end))
1390
- return 0;
1391
- }
1392
-
1393
- nomem:
1394
- /* Shouldn't happen; did user forget to preload? */
1395
- /* TODO: free all the allocated nodes */
1396
- WARN_ON(1);
1397
- return -ENOMEM;
1398
-}
1399
-#endif
1400934
1401935 static void node_tag_set(struct radix_tree_root *root,
1402936 struct radix_tree_node *node,
....@@ -1455,18 +989,6 @@
1455989 }
1456990 EXPORT_SYMBOL(radix_tree_tag_set);
1457991
1458
-/**
1459
- * radix_tree_iter_tag_set - set a tag on the current iterator entry
1460
- * @root: radix tree root
1461
- * @iter: iterator state
1462
- * @tag: tag to set
1463
- */
1464
-void radix_tree_iter_tag_set(struct radix_tree_root *root,
1465
- const struct radix_tree_iter *iter, unsigned int tag)
1466
-{
1467
- node_tag_set(root, iter->node, tag, iter_offset(iter));
1468
-}
1469
-
1470992 static void node_tag_clear(struct radix_tree_root *root,
1471993 struct radix_tree_node *node,
1472994 unsigned int tag, unsigned int offset)
....@@ -1506,7 +1028,7 @@
15061028 {
15071029 struct radix_tree_node *node, *parent;
15081030 unsigned long maxindex;
1509
- int uninitialized_var(offset);
1031
+ int offset;
15101032
15111033 radix_tree_load_root(root, &node, &maxindex);
15121034 if (index > maxindex)
....@@ -1582,14 +1104,6 @@
15821104 }
15831105 EXPORT_SYMBOL(radix_tree_tag_get);
15841106
1585
-static inline void __set_iter_shift(struct radix_tree_iter *iter,
1586
- unsigned int shift)
1587
-{
1588
-#ifdef CONFIG_RADIX_TREE_MULTIORDER
1589
- iter->shift = shift;
1590
-#endif
1591
-}
1592
-
15931107 /* Construct iter->tags bit-mask from node->tags[tag] array */
15941108 static void set_iter_tags(struct radix_tree_iter *iter,
15951109 struct radix_tree_node *node, unsigned offset,
....@@ -1616,92 +1130,10 @@
16161130 }
16171131 }
16181132
1619
-#ifdef CONFIG_RADIX_TREE_MULTIORDER
1620
-static void __rcu **skip_siblings(struct radix_tree_node **nodep,
1621
- void __rcu **slot, struct radix_tree_iter *iter)
1622
-{
1623
- while (iter->index < iter->next_index) {
1624
- *nodep = rcu_dereference_raw(*slot);
1625
- if (*nodep && !is_sibling_entry(iter->node, *nodep))
1626
- return slot;
1627
- slot++;
1628
- iter->index = __radix_tree_iter_add(iter, 1);
1629
- iter->tags >>= 1;
1630
- }
1631
-
1632
- *nodep = NULL;
1633
- return NULL;
1634
-}
1635
-
1636
-void __rcu **__radix_tree_next_slot(void __rcu **slot,
1637
- struct radix_tree_iter *iter, unsigned flags)
1638
-{
1639
- unsigned tag = flags & RADIX_TREE_ITER_TAG_MASK;
1640
- struct radix_tree_node *node;
1641
-
1642
- slot = skip_siblings(&node, slot, iter);
1643
-
1644
- while (radix_tree_is_internal_node(node)) {
1645
- unsigned offset;
1646
- unsigned long next_index;
1647
-
1648
- if (node == RADIX_TREE_RETRY)
1649
- return slot;
1650
- node = entry_to_node(node);
1651
- iter->node = node;
1652
- iter->shift = node->shift;
1653
-
1654
- if (flags & RADIX_TREE_ITER_TAGGED) {
1655
- offset = radix_tree_find_next_bit(node, tag, 0);
1656
- if (offset == RADIX_TREE_MAP_SIZE)
1657
- return NULL;
1658
- slot = &node->slots[offset];
1659
- iter->index = __radix_tree_iter_add(iter, offset);
1660
- set_iter_tags(iter, node, offset, tag);
1661
- node = rcu_dereference_raw(*slot);
1662
- } else {
1663
- offset = 0;
1664
- slot = &node->slots[0];
1665
- for (;;) {
1666
- node = rcu_dereference_raw(*slot);
1667
- if (node)
1668
- break;
1669
- slot++;
1670
- offset++;
1671
- if (offset == RADIX_TREE_MAP_SIZE)
1672
- return NULL;
1673
- }
1674
- iter->index = __radix_tree_iter_add(iter, offset);
1675
- }
1676
- if ((flags & RADIX_TREE_ITER_CONTIG) && (offset > 0))
1677
- goto none;
1678
- next_index = (iter->index | shift_maxindex(iter->shift)) + 1;
1679
- if (next_index < iter->next_index)
1680
- iter->next_index = next_index;
1681
- }
1682
-
1683
- return slot;
1684
- none:
1685
- iter->next_index = 0;
1686
- return NULL;
1687
-}
1688
-EXPORT_SYMBOL(__radix_tree_next_slot);
1689
-#else
1690
-static void __rcu **skip_siblings(struct radix_tree_node **nodep,
1691
- void __rcu **slot, struct radix_tree_iter *iter)
1692
-{
1693
- return slot;
1694
-}
1695
-#endif
1696
-
16971133 void __rcu **radix_tree_iter_resume(void __rcu **slot,
16981134 struct radix_tree_iter *iter)
16991135 {
1700
- struct radix_tree_node *node;
1701
-
1702
- slot++;
17031136 iter->index = __radix_tree_iter_add(iter, 1);
1704
- skip_siblings(&node, slot, iter);
17051137 iter->next_index = iter->index;
17061138 iter->tags = 0;
17071139 return NULL;
....@@ -1752,8 +1184,7 @@
17521184 iter->next_index = maxindex + 1;
17531185 iter->tags = 1;
17541186 iter->node = NULL;
1755
- __set_iter_shift(iter, 0);
1756
- return (void __rcu **)&root->rnode;
1187
+ return (void __rcu **)&root->xa_head;
17571188 }
17581189
17591190 do {
....@@ -1773,8 +1204,6 @@
17731204 while (++offset < RADIX_TREE_MAP_SIZE) {
17741205 void *slot = rcu_dereference_raw(
17751206 node->slots[offset]);
1776
- if (is_sibling_entry(node, slot))
1777
- continue;
17781207 if (slot)
17791208 break;
17801209 }
....@@ -1792,13 +1221,12 @@
17921221 goto restart;
17931222 if (child == RADIX_TREE_RETRY)
17941223 break;
1795
- } while (radix_tree_is_internal_node(child));
1224
+ } while (node->shift && radix_tree_is_internal_node(child));
17961225
17971226 /* Update the iterator state */
1798
- iter->index = (index &~ node_maxindex(node)) | (offset << node->shift);
1227
+ iter->index = (index &~ node_maxindex(node)) | offset;
17991228 iter->next_index = (index | node_maxindex(node)) + 1;
18001229 iter->node = node;
1801
- __set_iter_shift(iter, node->shift);
18021230
18031231 if (flags & RADIX_TREE_ITER_TAGGED)
18041232 set_iter_tags(iter, node, offset, tag);
....@@ -1853,48 +1281,6 @@
18531281 return ret;
18541282 }
18551283 EXPORT_SYMBOL(radix_tree_gang_lookup);
1856
-
1857
-/**
1858
- * radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1859
- * @root: radix tree root
1860
- * @results: where the results of the lookup are placed
1861
- * @indices: where their indices should be placed (but usually NULL)
1862
- * @first_index: start the lookup from this key
1863
- * @max_items: place up to this many items at *results
1864
- *
1865
- * Performs an index-ascending scan of the tree for present items. Places
1866
- * their slots at *@results and returns the number of items which were
1867
- * placed at *@results.
1868
- *
1869
- * The implementation is naive.
1870
- *
1871
- * Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1872
- * be dereferenced with radix_tree_deref_slot, and if using only RCU
1873
- * protection, radix_tree_deref_slot may fail requiring a retry.
1874
- */
1875
-unsigned int
1876
-radix_tree_gang_lookup_slot(const struct radix_tree_root *root,
1877
- void __rcu ***results, unsigned long *indices,
1878
- unsigned long first_index, unsigned int max_items)
1879
-{
1880
- struct radix_tree_iter iter;
1881
- void __rcu **slot;
1882
- unsigned int ret = 0;
1883
-
1884
- if (unlikely(!max_items))
1885
- return 0;
1886
-
1887
- radix_tree_for_each_slot(slot, root, &iter, first_index) {
1888
- results[ret] = slot;
1889
- if (indices)
1890
- indices[ret] = iter.index;
1891
- if (++ret == max_items)
1892
- break;
1893
- }
1894
-
1895
- return ret;
1896
-}
1897
-EXPORT_SYMBOL(radix_tree_gang_lookup_slot);
18981284
18991285 /**
19001286 * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
....@@ -1972,28 +1358,11 @@
19721358 }
19731359 EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot);
19741360
1975
-/**
1976
- * __radix_tree_delete_node - try to free node after clearing a slot
1977
- * @root: radix tree root
1978
- * @node: node containing @index
1979
- * @update_node: callback for changing leaf nodes
1980
- *
1981
- * After clearing the slot at @index in @node from radix tree
1982
- * rooted at @root, call this function to attempt freeing the
1983
- * node and shrinking the tree.
1984
- */
1985
-void __radix_tree_delete_node(struct radix_tree_root *root,
1986
- struct radix_tree_node *node,
1987
- radix_tree_update_node_t update_node)
1988
-{
1989
- delete_node(root, node, update_node);
1990
-}
1991
-
19921361 static bool __radix_tree_delete(struct radix_tree_root *root,
19931362 struct radix_tree_node *node, void __rcu **slot)
19941363 {
19951364 void *old = rcu_dereference_raw(*slot);
1996
- int exceptional = radix_tree_exceptional_entry(old) ? -1 : 0;
1365
+ int values = xa_is_value(old) ? -1 : 0;
19971366 unsigned offset = get_slot_offset(node, slot);
19981367 int tag;
19991368
....@@ -2003,8 +1372,8 @@
20031372 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
20041373 node_tag_clear(root, node, tag, offset);
20051374
2006
- replace_slot(slot, NULL, node, -1, exceptional);
2007
- return node && delete_node(root, node, NULL);
1375
+ replace_slot(slot, NULL, node, -1, values);
1376
+ return node && delete_node(root, node);
20081377 }
20091378
20101379 /**
....@@ -2076,19 +1445,6 @@
20761445 }
20771446 EXPORT_SYMBOL(radix_tree_delete);
20781447
2079
-void radix_tree_clear_tags(struct radix_tree_root *root,
2080
- struct radix_tree_node *node,
2081
- void __rcu **slot)
2082
-{
2083
- if (node) {
2084
- unsigned int tag, offset = get_slot_offset(node, slot);
2085
- for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
2086
- node_tag_clear(root, node, tag, offset);
2087
- } else {
2088
- root_tag_clear_all(root);
2089
- }
2090
-}
2091
-
20921448 /**
20931449 * radix_tree_tagged - test whether any items in the tree are tagged
20941450 * @root: radix tree root
....@@ -2110,43 +1466,16 @@
21101466 void idr_preload(gfp_t gfp_mask)
21111467 {
21121468 if (__radix_tree_preload(gfp_mask, IDR_PRELOAD_SIZE))
2113
- local_lock(radix_tree_preloads_lock);
1469
+ local_lock(&radix_tree_preloads.lock);
21141470 }
21151471 EXPORT_SYMBOL(idr_preload);
2116
-
2117
-void idr_preload_end(void)
2118
-{
2119
- local_unlock(radix_tree_preloads_lock);
2120
-}
2121
-EXPORT_SYMBOL(idr_preload_end);
2122
-
2123
-int ida_pre_get(struct ida *ida, gfp_t gfp)
2124
-{
2125
- /*
2126
- * The IDA API has no preload_end() equivalent. Instead,
2127
- * ida_get_new() can return -EAGAIN, prompting the caller
2128
- * to return to the ida_pre_get() step.
2129
- */
2130
- if (!__radix_tree_preload(gfp, IDA_PRELOAD_SIZE))
2131
- local_unlock(radix_tree_preloads_lock);
2132
-
2133
- if (!this_cpu_read(ida_bitmap)) {
2134
- struct ida_bitmap *bitmap = kzalloc(sizeof(*bitmap), gfp);
2135
- if (!bitmap)
2136
- return 0;
2137
- if (this_cpu_cmpxchg(ida_bitmap, NULL, bitmap))
2138
- kfree(bitmap);
2139
- }
2140
-
2141
- return 1;
2142
-}
21431472
21441473 void __rcu **idr_get_free(struct radix_tree_root *root,
21451474 struct radix_tree_iter *iter, gfp_t gfp,
21461475 unsigned long max)
21471476 {
21481477 struct radix_tree_node *node = NULL, *child;
2149
- void __rcu **slot = (void __rcu **)&root->rnode;
1478
+ void __rcu **slot = (void __rcu **)&root->xa_head;
21501479 unsigned long maxindex, start = iter->next_index;
21511480 unsigned int shift, offset = 0;
21521481
....@@ -2162,8 +1491,10 @@
21621491 if (error < 0)
21631492 return ERR_PTR(error);
21641493 shift = error;
2165
- child = rcu_dereference_raw(root->rnode);
1494
+ child = rcu_dereference_raw(root->xa_head);
21661495 }
1496
+ if (start == 0 && shift == 0)
1497
+ shift = RADIX_TREE_MAP_SHIFT;
21671498
21681499 while (shift) {
21691500 shift -= RADIX_TREE_MAP_SHIFT;
....@@ -2206,7 +1537,6 @@
22061537 else
22071538 iter->next_index = 1;
22081539 iter->node = node;
2209
- __set_iter_shift(iter, shift);
22101540 set_iter_tags(iter, node, offset, IDR_FREE);
22111541
22121542 return slot;
....@@ -2225,10 +1555,10 @@
22251555 */
22261556 void idr_destroy(struct idr *idr)
22271557 {
2228
- struct radix_tree_node *node = rcu_dereference_raw(idr->idr_rt.rnode);
1558
+ struct radix_tree_node *node = rcu_dereference_raw(idr->idr_rt.xa_head);
22291559 if (radix_tree_is_internal_node(node))
22301560 radix_tree_free_nodes(node);
2231
- idr->idr_rt.rnode = NULL;
1561
+ idr->idr_rt.xa_head = NULL;
22321562 root_tag_set(&idr->idr_rt, IDR_FREE);
22331563 }
22341564 EXPORT_SYMBOL(idr_destroy);
....@@ -2240,31 +1570,6 @@
22401570
22411571 memset(node, 0, sizeof(*node));
22421572 INIT_LIST_HEAD(&node->private_list);
2243
-}
2244
-
2245
-static __init unsigned long __maxindex(unsigned int height)
2246
-{
2247
- unsigned int width = height * RADIX_TREE_MAP_SHIFT;
2248
- int shift = RADIX_TREE_INDEX_BITS - width;
2249
-
2250
- if (shift < 0)
2251
- return ~0UL;
2252
- if (shift >= BITS_PER_LONG)
2253
- return 0UL;
2254
- return ~0UL >> shift;
2255
-}
2256
-
2257
-static __init void radix_tree_init_maxnodes(void)
2258
-{
2259
- unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1];
2260
- unsigned int i, j;
2261
-
2262
- for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++)
2263
- height_to_maxindex[i] = __maxindex(i);
2264
- for (i = 0; i < ARRAY_SIZE(height_to_maxnodes); i++) {
2265
- for (j = i; j > 0; j--)
2266
- height_to_maxnodes[i] += height_to_maxindex[j - 1] + 1;
2267
- }
22681573 }
22691574
22701575 static int radix_tree_cpu_dead(unsigned int cpu)
....@@ -2280,8 +1585,6 @@
22801585 kmem_cache_free(radix_tree_node_cachep, node);
22811586 rtp->nr--;
22821587 }
2283
- kfree(per_cpu(ida_bitmap, cpu));
2284
- per_cpu(ida_bitmap, cpu) = NULL;
22851588 return 0;
22861589 }
22871590
....@@ -2291,11 +1594,11 @@
22911594
22921595 BUILD_BUG_ON(RADIX_TREE_MAX_TAGS + __GFP_BITS_SHIFT > 32);
22931596 BUILD_BUG_ON(ROOT_IS_IDR & ~GFP_ZONEMASK);
1597
+ BUILD_BUG_ON(XA_CHUNK_SIZE > 255);
22941598 radix_tree_node_cachep = kmem_cache_create("radix_tree_node",
22951599 sizeof(struct radix_tree_node), 0,
22961600 SLAB_PANIC | SLAB_RECLAIM_ACCOUNT,
22971601 radix_tree_node_ctor);
2298
- radix_tree_init_maxnodes();
22991602 ret = cpuhp_setup_state_nocalls(CPUHP_RADIX_DEAD, "lib/radix:dead",
23001603 NULL, radix_tree_cpu_dead);
23011604 WARN_ON(ret < 0);