hc
2023-11-30 6c9be420e167ee7ce45c0309586f09ddab28ac15
kernel/lib/radix-tree.c
....@@ -38,7 +38,7 @@
3838 #include <linux/rcupdate.h>
3939 #include <linux/slab.h>
4040 #include <linux/string.h>
41
-
41
+#include <linux/locallock.h>
4242
4343 /* Number of nodes in fully populated tree of given height */
4444 static unsigned long height_to_maxnodes[RADIX_TREE_MAX_PATH + 1] __read_mostly;
....@@ -87,6 +87,7 @@
8787 struct radix_tree_node *nodes;
8888 };
8989 static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
90
+static DEFINE_LOCAL_IRQ_LOCK(radix_tree_preloads_lock);
9091
9192 static inline struct radix_tree_node *entry_to_node(void *ptr)
9293 {
....@@ -405,12 +406,13 @@
405406 * succeed in getting a node here (and never reach
406407 * kmem_cache_alloc)
407408 */
408
- rtp = this_cpu_ptr(&radix_tree_preloads);
409
+ rtp = &get_locked_var(radix_tree_preloads_lock, radix_tree_preloads);
409410 if (rtp->nr) {
410411 ret = rtp->nodes;
411412 rtp->nodes = ret->parent;
412413 rtp->nr--;
413414 }
415
+ put_locked_var(radix_tree_preloads_lock, radix_tree_preloads);
414416 /*
415417 * Update the allocation stack trace as this is more useful
416418 * for debugging.
....@@ -476,14 +478,14 @@
476478 */
477479 gfp_mask &= ~__GFP_ACCOUNT;
478480
479
- preempt_disable();
481
+ local_lock(radix_tree_preloads_lock);
480482 rtp = this_cpu_ptr(&radix_tree_preloads);
481483 while (rtp->nr < nr) {
482
- preempt_enable();
484
+ local_unlock(radix_tree_preloads_lock);
483485 node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
484486 if (node == NULL)
485487 goto out;
486
- preempt_disable();
488
+ local_lock(radix_tree_preloads_lock);
487489 rtp = this_cpu_ptr(&radix_tree_preloads);
488490 if (rtp->nr < nr) {
489491 node->parent = rtp->nodes;
....@@ -525,7 +527,7 @@
525527 if (gfpflags_allow_blocking(gfp_mask))
526528 return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE);
527529 /* Preloading doesn't help anything with this gfp mask, skip it */
528
- preempt_disable();
530
+ local_lock(radix_tree_preloads_lock);
529531 return 0;
530532 }
531533 EXPORT_SYMBOL(radix_tree_maybe_preload);
....@@ -563,7 +565,7 @@
563565
564566 /* Preloading doesn't help anything with this gfp mask, skip it */
565567 if (!gfpflags_allow_blocking(gfp_mask)) {
566
- preempt_disable();
568
+ local_lock(radix_tree_preloads_lock);
567569 return 0;
568570 }
569571
....@@ -596,6 +598,12 @@
596598
597599 return __radix_tree_preload(gfp_mask, nr_nodes);
598600 }
601
+
602
+void radix_tree_preload_end(void)
603
+{
604
+ local_unlock(radix_tree_preloads_lock);
605
+}
606
+EXPORT_SYMBOL(radix_tree_preload_end);
599607
600608 static unsigned radix_tree_load_root(const struct radix_tree_root *root,
601609 struct radix_tree_node **nodep, unsigned long *maxindex)
....@@ -2102,9 +2110,15 @@
21022110 void idr_preload(gfp_t gfp_mask)
21032111 {
21042112 if (__radix_tree_preload(gfp_mask, IDR_PRELOAD_SIZE))
2105
- preempt_disable();
2113
+ local_lock(radix_tree_preloads_lock);
21062114 }
21072115 EXPORT_SYMBOL(idr_preload);
2116
+
2117
+void idr_preload_end(void)
2118
+{
2119
+ local_unlock(radix_tree_preloads_lock);
2120
+}
2121
+EXPORT_SYMBOL(idr_preload_end);
21082122
21092123 int ida_pre_get(struct ida *ida, gfp_t gfp)
21102124 {
....@@ -2114,7 +2128,7 @@
21142128 * to return to the ida_pre_get() step.
21152129 */
21162130 if (!__radix_tree_preload(gfp, IDA_PRELOAD_SIZE))
2117
- preempt_enable();
2131
+ local_unlock(radix_tree_preloads_lock);
21182132
21192133 if (!this_cpu_read(ida_bitmap)) {
21202134 struct ida_bitmap *bitmap = kzalloc(sizeof(*bitmap), gfp);