hc
2023-12-11 d2ccde1c8e90d38cee87a1b0309ad2827f3fd30d
kernel/tools/testing/radix-tree/multiorder.c
....@@ -1,17 +1,9 @@
1
+// SPDX-License-Identifier: GPL-2.0-only
12 /*
23 * multiorder.c: Multi-order radix tree entry testing
34 * Copyright (c) 2016 Intel Corporation
45 * Author: Ross Zwisler <ross.zwisler@linux.intel.com>
56 * Author: Matthew Wilcox <matthew.r.wilcox@intel.com>
6
- *
7
- * This program is free software; you can redistribute it and/or modify it
8
- * under the terms and conditions of the GNU General Public License,
9
- * version 2, as published by the Free Software Foundation.
10
- *
11
- * This program is distributed in the hope it will be useful, but WITHOUT
12
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13
- * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14
- * more details.
157 */
168 #include <linux/radix-tree.h>
179 #include <linux/slab.h>
....@@ -20,230 +12,39 @@
2012
2113 #include "test.h"
2214
23
-#define for_each_index(i, base, order) \
24
- for (i = base; i < base + (1 << order); i++)
25
-
26
-static void __multiorder_tag_test(int index, int order)
15
+static int item_insert_order(struct xarray *xa, unsigned long index,
16
+ unsigned order)
2717 {
28
- RADIX_TREE(tree, GFP_KERNEL);
29
- int base, err, i;
18
+ XA_STATE_ORDER(xas, xa, index, order);
19
+ struct item *item = item_create(index, order);
3020
31
- /* our canonical entry */
32
- base = index & ~((1 << order) - 1);
21
+ do {
22
+ xas_lock(&xas);
23
+ xas_store(&xas, item);
24
+ xas_unlock(&xas);
25
+ } while (xas_nomem(&xas, GFP_KERNEL));
3326
34
- printv(2, "Multiorder tag test with index %d, canonical entry %d\n",
35
- index, base);
27
+ if (!xas_error(&xas))
28
+ return 0;
3629
37
- err = item_insert_order(&tree, index, order);
38
- assert(!err);
39
-
40
- /*
41
- * Verify we get collisions for covered indices. We try and fail to
42
- * insert an exceptional entry so we don't leak memory via
43
- * item_insert_order().
44
- */
45
- for_each_index(i, base, order) {
46
- err = __radix_tree_insert(&tree, i, order,
47
- (void *)(0xA0 | RADIX_TREE_EXCEPTIONAL_ENTRY));
48
- assert(err == -EEXIST);
49
- }
50
-
51
- for_each_index(i, base, order) {
52
- assert(!radix_tree_tag_get(&tree, i, 0));
53
- assert(!radix_tree_tag_get(&tree, i, 1));
54
- }
55
-
56
- assert(radix_tree_tag_set(&tree, index, 0));
57
-
58
- for_each_index(i, base, order) {
59
- assert(radix_tree_tag_get(&tree, i, 0));
60
- assert(!radix_tree_tag_get(&tree, i, 1));
61
- }
62
-
63
- assert(tag_tagged_items(&tree, NULL, 0, ~0UL, 10, 0, 1) == 1);
64
- assert(radix_tree_tag_clear(&tree, index, 0));
65
-
66
- for_each_index(i, base, order) {
67
- assert(!radix_tree_tag_get(&tree, i, 0));
68
- assert(radix_tree_tag_get(&tree, i, 1));
69
- }
70
-
71
- assert(radix_tree_tag_clear(&tree, index, 1));
72
-
73
- assert(!radix_tree_tagged(&tree, 0));
74
- assert(!radix_tree_tagged(&tree, 1));
75
-
76
- item_kill_tree(&tree);
30
+ free(item);
31
+ return xas_error(&xas);
7732 }
7833
79
-static void __multiorder_tag_test2(unsigned order, unsigned long index2)
34
+void multiorder_iteration(struct xarray *xa)
8035 {
81
- RADIX_TREE(tree, GFP_KERNEL);
82
- unsigned long index = (1 << order);
83
- index2 += index;
84
-
85
- assert(item_insert_order(&tree, 0, order) == 0);
86
- assert(item_insert(&tree, index2) == 0);
87
-
88
- assert(radix_tree_tag_set(&tree, 0, 0));
89
- assert(radix_tree_tag_set(&tree, index2, 0));
90
-
91
- assert(tag_tagged_items(&tree, NULL, 0, ~0UL, 10, 0, 1) == 2);
92
-
93
- item_kill_tree(&tree);
94
-}
95
-
96
-static void multiorder_tag_tests(void)
97
-{
98
- int i, j;
99
-
100
- /* test multi-order entry for indices 0-7 with no sibling pointers */
101
- __multiorder_tag_test(0, 3);
102
- __multiorder_tag_test(5, 3);
103
-
104
- /* test multi-order entry for indices 8-15 with no sibling pointers */
105
- __multiorder_tag_test(8, 3);
106
- __multiorder_tag_test(15, 3);
107
-
108
- /*
109
- * Our order 5 entry covers indices 0-31 in a tree with height=2.
110
- * This is broken up as follows:
111
- * 0-7: canonical entry
112
- * 8-15: sibling 1
113
- * 16-23: sibling 2
114
- * 24-31: sibling 3
115
- */
116
- __multiorder_tag_test(0, 5);
117
- __multiorder_tag_test(29, 5);
118
-
119
- /* same test, but with indices 32-63 */
120
- __multiorder_tag_test(32, 5);
121
- __multiorder_tag_test(44, 5);
122
-
123
- /*
124
- * Our order 8 entry covers indices 0-255 in a tree with height=3.
125
- * This is broken up as follows:
126
- * 0-63: canonical entry
127
- * 64-127: sibling 1
128
- * 128-191: sibling 2
129
- * 192-255: sibling 3
130
- */
131
- __multiorder_tag_test(0, 8);
132
- __multiorder_tag_test(190, 8);
133
-
134
- /* same test, but with indices 256-511 */
135
- __multiorder_tag_test(256, 8);
136
- __multiorder_tag_test(300, 8);
137
-
138
- __multiorder_tag_test(0x12345678UL, 8);
139
-
140
- for (i = 1; i < 10; i++)
141
- for (j = 0; j < (10 << i); j++)
142
- __multiorder_tag_test2(i, j);
143
-}
144
-
145
-static void multiorder_check(unsigned long index, int order)
146
-{
147
- unsigned long i;
148
- unsigned long min = index & ~((1UL << order) - 1);
149
- unsigned long max = min + (1UL << order);
150
- void **slot;
151
- struct item *item2 = item_create(min, order);
152
- RADIX_TREE(tree, GFP_KERNEL);
153
-
154
- printv(2, "Multiorder index %ld, order %d\n", index, order);
155
-
156
- assert(item_insert_order(&tree, index, order) == 0);
157
-
158
- for (i = min; i < max; i++) {
159
- struct item *item = item_lookup(&tree, i);
160
- assert(item != 0);
161
- assert(item->index == index);
162
- }
163
- for (i = 0; i < min; i++)
164
- item_check_absent(&tree, i);
165
- for (i = max; i < 2*max; i++)
166
- item_check_absent(&tree, i);
167
- for (i = min; i < max; i++)
168
- assert(radix_tree_insert(&tree, i, item2) == -EEXIST);
169
-
170
- slot = radix_tree_lookup_slot(&tree, index);
171
- free(*slot);
172
- radix_tree_replace_slot(&tree, slot, item2);
173
- for (i = min; i < max; i++) {
174
- struct item *item = item_lookup(&tree, i);
175
- assert(item != 0);
176
- assert(item->index == min);
177
- }
178
-
179
- assert(item_delete(&tree, min) != 0);
180
-
181
- for (i = 0; i < 2*max; i++)
182
- item_check_absent(&tree, i);
183
-}
184
-
185
-static void multiorder_shrink(unsigned long index, int order)
186
-{
187
- unsigned long i;
188
- unsigned long max = 1 << order;
189
- RADIX_TREE(tree, GFP_KERNEL);
190
- struct radix_tree_node *node;
191
-
192
- printv(2, "Multiorder shrink index %ld, order %d\n", index, order);
193
-
194
- assert(item_insert_order(&tree, 0, order) == 0);
195
-
196
- node = tree.rnode;
197
-
198
- assert(item_insert(&tree, index) == 0);
199
- assert(node != tree.rnode);
200
-
201
- assert(item_delete(&tree, index) != 0);
202
- assert(node == tree.rnode);
203
-
204
- for (i = 0; i < max; i++) {
205
- struct item *item = item_lookup(&tree, i);
206
- assert(item != 0);
207
- assert(item->index == 0);
208
- }
209
- for (i = max; i < 2*max; i++)
210
- item_check_absent(&tree, i);
211
-
212
- if (!item_delete(&tree, 0)) {
213
- printv(2, "failed to delete index %ld (order %d)\n", index, order);
214
- abort();
215
- }
216
-
217
- for (i = 0; i < 2*max; i++)
218
- item_check_absent(&tree, i);
219
-}
220
-
221
-static void multiorder_insert_bug(void)
222
-{
223
- RADIX_TREE(tree, GFP_KERNEL);
224
-
225
- item_insert(&tree, 0);
226
- radix_tree_tag_set(&tree, 0, 0);
227
- item_insert_order(&tree, 3 << 6, 6);
228
-
229
- item_kill_tree(&tree);
230
-}
231
-
232
-void multiorder_iteration(void)
233
-{
234
- RADIX_TREE(tree, GFP_KERNEL);
235
- struct radix_tree_iter iter;
236
- void **slot;
36
+ XA_STATE(xas, xa, 0);
37
+ struct item *item;
23738 int i, j, err;
238
-
239
- printv(1, "Multiorder iteration test\n");
24039
24140 #define NUM_ENTRIES 11
24241 int index[NUM_ENTRIES] = {0, 2, 4, 8, 16, 32, 34, 36, 64, 72, 128};
24342 int order[NUM_ENTRIES] = {1, 1, 2, 3, 4, 1, 0, 1, 3, 0, 7};
24443
44
+ printv(1, "Multiorder iteration test\n");
45
+
24546 for (i = 0; i < NUM_ENTRIES; i++) {
246
- err = item_insert_order(&tree, index[i], order[i]);
47
+ err = item_insert_order(xa, index[i], order[i]);
24748 assert(!err);
24849 }
24950
....@@ -252,14 +53,14 @@
25253 if (j <= (index[i] | ((1 << order[i]) - 1)))
25354 break;
25455
255
- radix_tree_for_each_slot(slot, &tree, &iter, j) {
256
- int height = order[i] / RADIX_TREE_MAP_SHIFT;
257
- int shift = height * RADIX_TREE_MAP_SHIFT;
56
+ xas_set(&xas, j);
57
+ xas_for_each(&xas, item, ULONG_MAX) {
58
+ int height = order[i] / XA_CHUNK_SHIFT;
59
+ int shift = height * XA_CHUNK_SHIFT;
25860 unsigned long mask = (1UL << order[i]) - 1;
259
- struct item *item = *slot;
26061
261
- assert((iter.index | mask) == (index[i] | mask));
262
- assert(iter.shift == shift);
62
+ assert((xas.xa_index | mask) == (index[i] | mask));
63
+ assert(xas.xa_node->shift == shift);
26364 assert(!radix_tree_is_internal_node(item));
26465 assert((item->index | mask) == (index[i] | mask));
26566 assert(item->order == order[i]);
....@@ -267,17 +68,14 @@
26768 }
26869 }
26970
270
- item_kill_tree(&tree);
71
+ item_kill_tree(xa);
27172 }
27273
273
-void multiorder_tagged_iteration(void)
74
+void multiorder_tagged_iteration(struct xarray *xa)
27475 {
275
- RADIX_TREE(tree, GFP_KERNEL);
276
- struct radix_tree_iter iter;
277
- void **slot;
76
+ XA_STATE(xas, xa, 0);
77
+ struct item *item;
27878 int i, j;
279
-
280
- printv(1, "Multiorder tagged iteration test\n");
28179
28280 #define MT_NUM_ENTRIES 9
28381 int index[MT_NUM_ENTRIES] = {0, 2, 4, 16, 32, 40, 64, 72, 128};
....@@ -286,13 +84,15 @@
28684 #define TAG_ENTRIES 7
28785 int tag_index[TAG_ENTRIES] = {0, 4, 16, 40, 64, 72, 128};
28886
289
- for (i = 0; i < MT_NUM_ENTRIES; i++)
290
- assert(!item_insert_order(&tree, index[i], order[i]));
87
+ printv(1, "Multiorder tagged iteration test\n");
29188
292
- assert(!radix_tree_tagged(&tree, 1));
89
+ for (i = 0; i < MT_NUM_ENTRIES; i++)
90
+ assert(!item_insert_order(xa, index[i], order[i]));
91
+
92
+ assert(!xa_marked(xa, XA_MARK_1));
29393
29494 for (i = 0; i < TAG_ENTRIES; i++)
295
- assert(radix_tree_tag_set(&tree, tag_index[i], 1));
95
+ xa_set_mark(xa, tag_index[i], XA_MARK_1);
29696
29797 for (j = 0; j < 256; j++) {
29898 int k;
....@@ -304,23 +104,23 @@
304104 break;
305105 }
306106
307
- radix_tree_for_each_tagged(slot, &tree, &iter, j, 1) {
107
+ xas_set(&xas, j);
108
+ xas_for_each_marked(&xas, item, ULONG_MAX, XA_MARK_1) {
308109 unsigned long mask;
309
- struct item *item = *slot;
310110 for (k = i; index[k] < tag_index[i]; k++)
311111 ;
312112 mask = (1UL << order[k]) - 1;
313113
314
- assert((iter.index | mask) == (tag_index[i] | mask));
315
- assert(!radix_tree_is_internal_node(item));
114
+ assert((xas.xa_index | mask) == (tag_index[i] | mask));
115
+ assert(!xa_is_internal(item));
316116 assert((item->index | mask) == (tag_index[i] | mask));
317117 assert(item->order == order[k]);
318118 i++;
319119 }
320120 }
321121
322
- assert(tag_tagged_items(&tree, NULL, 0, ~0UL, TAG_ENTRIES, 1, 2) ==
323
- TAG_ENTRIES);
122
+ assert(tag_tagged_items(xa, 0, ULONG_MAX, TAG_ENTRIES, XA_MARK_1,
123
+ XA_MARK_2) == TAG_ENTRIES);
324124
325125 for (j = 0; j < 256; j++) {
326126 int mask, k;
....@@ -332,297 +132,31 @@
332132 break;
333133 }
334134
335
- radix_tree_for_each_tagged(slot, &tree, &iter, j, 2) {
336
- struct item *item = *slot;
135
+ xas_set(&xas, j);
136
+ xas_for_each_marked(&xas, item, ULONG_MAX, XA_MARK_2) {
337137 for (k = i; index[k] < tag_index[i]; k++)
338138 ;
339139 mask = (1 << order[k]) - 1;
340140
341
- assert((iter.index | mask) == (tag_index[i] | mask));
342
- assert(!radix_tree_is_internal_node(item));
141
+ assert((xas.xa_index | mask) == (tag_index[i] | mask));
142
+ assert(!xa_is_internal(item));
343143 assert((item->index | mask) == (tag_index[i] | mask));
344144 assert(item->order == order[k]);
345145 i++;
346146 }
347147 }
348148
349
- assert(tag_tagged_items(&tree, NULL, 1, ~0UL, MT_NUM_ENTRIES * 2, 1, 0)
350
- == TAG_ENTRIES);
149
+ assert(tag_tagged_items(xa, 1, ULONG_MAX, MT_NUM_ENTRIES * 2, XA_MARK_1,
150
+ XA_MARK_0) == TAG_ENTRIES);
351151 i = 0;
352
- radix_tree_for_each_tagged(slot, &tree, &iter, 0, 0) {
353
- assert(iter.index == tag_index[i]);
152
+ xas_set(&xas, 0);
153
+ xas_for_each_marked(&xas, item, ULONG_MAX, XA_MARK_0) {
154
+ assert(xas.xa_index == tag_index[i]);
354155 i++;
355156 }
157
+ assert(i == TAG_ENTRIES);
356158
357
- item_kill_tree(&tree);
358
-}
359
-
360
-/*
361
- * Basic join checks: make sure we can't find an entry in the tree after
362
- * a larger entry has replaced it
363
- */
364
-static void multiorder_join1(unsigned long index,
365
- unsigned order1, unsigned order2)
366
-{
367
- unsigned long loc;
368
- void *item, *item2 = item_create(index + 1, order1);
369
- RADIX_TREE(tree, GFP_KERNEL);
370
-
371
- item_insert_order(&tree, index, order2);
372
- item = radix_tree_lookup(&tree, index);
373
- radix_tree_join(&tree, index + 1, order1, item2);
374
- loc = find_item(&tree, item);
375
- if (loc == -1)
376
- free(item);
377
- item = radix_tree_lookup(&tree, index + 1);
378
- assert(item == item2);
379
- item_kill_tree(&tree);
380
-}
381
-
382
-/*
383
- * Check that the accounting of exceptional entries is handled correctly
384
- * by joining an exceptional entry to a normal pointer.
385
- */
386
-static void multiorder_join2(unsigned order1, unsigned order2)
387
-{
388
- RADIX_TREE(tree, GFP_KERNEL);
389
- struct radix_tree_node *node;
390
- void *item1 = item_create(0, order1);
391
- void *item2;
392
-
393
- item_insert_order(&tree, 0, order2);
394
- radix_tree_insert(&tree, 1 << order2, (void *)0x12UL);
395
- item2 = __radix_tree_lookup(&tree, 1 << order2, &node, NULL);
396
- assert(item2 == (void *)0x12UL);
397
- assert(node->exceptional == 1);
398
-
399
- item2 = radix_tree_lookup(&tree, 0);
400
- free(item2);
401
-
402
- radix_tree_join(&tree, 0, order1, item1);
403
- item2 = __radix_tree_lookup(&tree, 1 << order2, &node, NULL);
404
- assert(item2 == item1);
405
- assert(node->exceptional == 0);
406
- item_kill_tree(&tree);
407
-}
408
-
409
-/*
410
- * This test revealed an accounting bug for exceptional entries at one point.
411
- * Nodes were being freed back into the pool with an elevated exception count
412
- * by radix_tree_join() and then radix_tree_split() was failing to zero the
413
- * count of exceptional entries.
414
- */
415
-static void multiorder_join3(unsigned int order)
416
-{
417
- RADIX_TREE(tree, GFP_KERNEL);
418
- struct radix_tree_node *node;
419
- void **slot;
420
- struct radix_tree_iter iter;
421
- unsigned long i;
422
-
423
- for (i = 0; i < (1 << order); i++) {
424
- radix_tree_insert(&tree, i, (void *)0x12UL);
425
- }
426
-
427
- radix_tree_join(&tree, 0, order, (void *)0x16UL);
428
- rcu_barrier();
429
-
430
- radix_tree_split(&tree, 0, 0);
431
-
432
- radix_tree_for_each_slot(slot, &tree, &iter, 0) {
433
- radix_tree_iter_replace(&tree, &iter, slot, (void *)0x12UL);
434
- }
435
-
436
- __radix_tree_lookup(&tree, 0, &node, NULL);
437
- assert(node->exceptional == node->count);
438
-
439
- item_kill_tree(&tree);
440
-}
441
-
442
-static void multiorder_join(void)
443
-{
444
- int i, j, idx;
445
-
446
- for (idx = 0; idx < 1024; idx = idx * 2 + 3) {
447
- for (i = 1; i < 15; i++) {
448
- for (j = 0; j < i; j++) {
449
- multiorder_join1(idx, i, j);
450
- }
451
- }
452
- }
453
-
454
- for (i = 1; i < 15; i++) {
455
- for (j = 0; j < i; j++) {
456
- multiorder_join2(i, j);
457
- }
458
- }
459
-
460
- for (i = 3; i < 10; i++) {
461
- multiorder_join3(i);
462
- }
463
-}
464
-
465
-static void check_mem(unsigned old_order, unsigned new_order, unsigned alloc)
466
-{
467
- struct radix_tree_preload *rtp = &radix_tree_preloads;
468
- if (rtp->nr != 0)
469
- printv(2, "split(%u %u) remaining %u\n", old_order, new_order,
470
- rtp->nr);
471
- /*
472
- * Can't check for equality here as some nodes may have been
473
- * RCU-freed while we ran. But we should never finish with more
474
- * nodes allocated since they should have all been preloaded.
475
- */
476
- if (nr_allocated > alloc)
477
- printv(2, "split(%u %u) allocated %u %u\n", old_order, new_order,
478
- alloc, nr_allocated);
479
-}
480
-
481
-static void __multiorder_split(int old_order, int new_order)
482
-{
483
- RADIX_TREE(tree, GFP_ATOMIC);
484
- void **slot;
485
- struct radix_tree_iter iter;
486
- unsigned alloc;
487
- struct item *item;
488
-
489
- radix_tree_preload(GFP_KERNEL);
490
- assert(item_insert_order(&tree, 0, old_order) == 0);
491
- radix_tree_preload_end();
492
-
493
- /* Wipe out the preloaded cache or it'll confuse check_mem() */
494
- radix_tree_cpu_dead(0);
495
-
496
- item = radix_tree_tag_set(&tree, 0, 2);
497
-
498
- radix_tree_split_preload(old_order, new_order, GFP_KERNEL);
499
- alloc = nr_allocated;
500
- radix_tree_split(&tree, 0, new_order);
501
- check_mem(old_order, new_order, alloc);
502
- radix_tree_for_each_slot(slot, &tree, &iter, 0) {
503
- radix_tree_iter_replace(&tree, &iter, slot,
504
- item_create(iter.index, new_order));
505
- }
506
- radix_tree_preload_end();
507
-
508
- item_kill_tree(&tree);
509
- free(item);
510
-}
511
-
512
-static void __multiorder_split2(int old_order, int new_order)
513
-{
514
- RADIX_TREE(tree, GFP_KERNEL);
515
- void **slot;
516
- struct radix_tree_iter iter;
517
- struct radix_tree_node *node;
518
- void *item;
519
-
520
- __radix_tree_insert(&tree, 0, old_order, (void *)0x12);
521
-
522
- item = __radix_tree_lookup(&tree, 0, &node, NULL);
523
- assert(item == (void *)0x12);
524
- assert(node->exceptional > 0);
525
-
526
- radix_tree_split(&tree, 0, new_order);
527
- radix_tree_for_each_slot(slot, &tree, &iter, 0) {
528
- radix_tree_iter_replace(&tree, &iter, slot,
529
- item_create(iter.index, new_order));
530
- }
531
-
532
- item = __radix_tree_lookup(&tree, 0, &node, NULL);
533
- assert(item != (void *)0x12);
534
- assert(node->exceptional == 0);
535
-
536
- item_kill_tree(&tree);
537
-}
538
-
539
-static void __multiorder_split3(int old_order, int new_order)
540
-{
541
- RADIX_TREE(tree, GFP_KERNEL);
542
- void **slot;
543
- struct radix_tree_iter iter;
544
- struct radix_tree_node *node;
545
- void *item;
546
-
547
- __radix_tree_insert(&tree, 0, old_order, (void *)0x12);
548
-
549
- item = __radix_tree_lookup(&tree, 0, &node, NULL);
550
- assert(item == (void *)0x12);
551
- assert(node->exceptional > 0);
552
-
553
- radix_tree_split(&tree, 0, new_order);
554
- radix_tree_for_each_slot(slot, &tree, &iter, 0) {
555
- radix_tree_iter_replace(&tree, &iter, slot, (void *)0x16);
556
- }
557
-
558
- item = __radix_tree_lookup(&tree, 0, &node, NULL);
559
- assert(item == (void *)0x16);
560
- assert(node->exceptional > 0);
561
-
562
- item_kill_tree(&tree);
563
-
564
- __radix_tree_insert(&tree, 0, old_order, (void *)0x12);
565
-
566
- item = __radix_tree_lookup(&tree, 0, &node, NULL);
567
- assert(item == (void *)0x12);
568
- assert(node->exceptional > 0);
569
-
570
- radix_tree_split(&tree, 0, new_order);
571
- radix_tree_for_each_slot(slot, &tree, &iter, 0) {
572
- if (iter.index == (1 << new_order))
573
- radix_tree_iter_replace(&tree, &iter, slot,
574
- (void *)0x16);
575
- else
576
- radix_tree_iter_replace(&tree, &iter, slot, NULL);
577
- }
578
-
579
- item = __radix_tree_lookup(&tree, 1 << new_order, &node, NULL);
580
- assert(item == (void *)0x16);
581
- assert(node->count == node->exceptional);
582
- do {
583
- node = node->parent;
584
- if (!node)
585
- break;
586
- assert(node->count == 1);
587
- assert(node->exceptional == 0);
588
- } while (1);
589
-
590
- item_kill_tree(&tree);
591
-}
592
-
593
-static void multiorder_split(void)
594
-{
595
- int i, j;
596
-
597
- for (i = 3; i < 11; i++)
598
- for (j = 0; j < i; j++) {
599
- __multiorder_split(i, j);
600
- __multiorder_split2(i, j);
601
- __multiorder_split3(i, j);
602
- }
603
-}
604
-
605
-static void multiorder_account(void)
606
-{
607
- RADIX_TREE(tree, GFP_KERNEL);
608
- struct radix_tree_node *node;
609
- void **slot;
610
-
611
- item_insert_order(&tree, 0, 5);
612
-
613
- __radix_tree_insert(&tree, 1 << 5, 5, (void *)0x12);
614
- __radix_tree_lookup(&tree, 0, &node, NULL);
615
- assert(node->count == node->exceptional * 2);
616
- radix_tree_delete(&tree, 1 << 5);
617
- assert(node->exceptional == 0);
618
-
619
- __radix_tree_insert(&tree, 1 << 5, 5, (void *)0x12);
620
- __radix_tree_lookup(&tree, 1 << 5, &node, &slot);
621
- assert(node->count == node->exceptional * 2);
622
- __radix_tree_replace(&tree, node, slot, NULL, NULL);
623
- assert(node->exceptional == 0);
624
-
625
- item_kill_tree(&tree);
159
+ item_kill_tree(xa);
626160 }
627161
628162 bool stop_iteration = false;
....@@ -645,75 +179,54 @@
645179
646180 static void *iterator_func(void *ptr)
647181 {
648
- struct radix_tree_root *tree = ptr;
649
- struct radix_tree_iter iter;
182
+ XA_STATE(xas, ptr, 0);
650183 struct item *item;
651
- void **slot;
652184
653185 while (!stop_iteration) {
654186 rcu_read_lock();
655
- radix_tree_for_each_slot(slot, tree, &iter, 0) {
656
- item = radix_tree_deref_slot(slot);
657
-
658
- if (!item)
187
+ xas_for_each(&xas, item, ULONG_MAX) {
188
+ if (xas_retry(&xas, item))
659189 continue;
660
- if (radix_tree_deref_retry(item)) {
661
- slot = radix_tree_iter_retry(&iter);
662
- continue;
663
- }
664190
665
- item_sanity(item, iter.index);
191
+ item_sanity(item, xas.xa_index);
666192 }
667193 rcu_read_unlock();
668194 }
669195 return NULL;
670196 }
671197
672
-static void multiorder_iteration_race(void)
198
+static void multiorder_iteration_race(struct xarray *xa)
673199 {
674200 const int num_threads = sysconf(_SC_NPROCESSORS_ONLN);
675201 pthread_t worker_thread[num_threads];
676
- RADIX_TREE(tree, GFP_KERNEL);
677202 int i;
678203
679
- pthread_create(&worker_thread[0], NULL, &creator_func, &tree);
204
+ pthread_create(&worker_thread[0], NULL, &creator_func, xa);
680205 for (i = 1; i < num_threads; i++)
681
- pthread_create(&worker_thread[i], NULL, &iterator_func, &tree);
206
+ pthread_create(&worker_thread[i], NULL, &iterator_func, xa);
682207
683208 for (i = 0; i < num_threads; i++)
684209 pthread_join(worker_thread[i], NULL);
685210
686
- item_kill_tree(&tree);
211
+ item_kill_tree(xa);
687212 }
213
+
214
+static DEFINE_XARRAY(array);
688215
689216 void multiorder_checks(void)
690217 {
691
- int i;
692
-
693
- for (i = 0; i < 20; i++) {
694
- multiorder_check(200, i);
695
- multiorder_check(0, i);
696
- multiorder_check((1UL << i) + 1, i);
697
- }
698
-
699
- for (i = 0; i < 15; i++)
700
- multiorder_shrink((1UL << (i + RADIX_TREE_MAP_SHIFT)), i);
701
-
702
- multiorder_insert_bug();
703
- multiorder_tag_tests();
704
- multiorder_iteration();
705
- multiorder_tagged_iteration();
706
- multiorder_join();
707
- multiorder_split();
708
- multiorder_account();
709
- multiorder_iteration_race();
218
+ multiorder_iteration(&array);
219
+ multiorder_tagged_iteration(&array);
220
+ multiorder_iteration_race(&array);
710221
711222 radix_tree_cpu_dead(0);
712223 }
713224
714225 int __weak main(void)
715226 {
227
+ rcu_register_thread();
716228 radix_tree_init();
717229 multiorder_checks();
230
+ rcu_unregister_thread();
718231 return 0;
719232 }