| .. | .. | 
|---|
| 38 | 38 |  #include <linux/rcupdate.h> | 
|---|
| 39 | 39 |  #include <linux/slab.h> | 
|---|
| 40 | 40 |  #include <linux/string.h> | 
|---|
| 41 |  | -  | 
|---|
 | 41 | +#include <linux/locallock.h>  | 
|---|
| 42 | 42 |   | 
|---|
| 43 | 43 |  /* Number of nodes in fully populated tree of given height */ | 
|---|
| 44 | 44 |  static unsigned long height_to_maxnodes[RADIX_TREE_MAX_PATH + 1] __read_mostly; | 
|---|
| .. | .. | 
|---|
| 87 | 87 |  	struct radix_tree_node *nodes; | 
|---|
| 88 | 88 |  }; | 
|---|
| 89 | 89 |  static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, }; | 
|---|
 | 90 | +static DEFINE_LOCAL_IRQ_LOCK(radix_tree_preloads_lock);  | 
|---|
| 90 | 91 |   | 
|---|
| 91 | 92 |  static inline struct radix_tree_node *entry_to_node(void *ptr) | 
|---|
| 92 | 93 |  { | 
|---|
| .. | .. | 
|---|
| 405 | 406 |  		 * succeed in getting a node here (and never reach | 
|---|
| 406 | 407 |  		 * kmem_cache_alloc) | 
|---|
| 407 | 408 |  		 */ | 
|---|
| 408 |  | -		rtp = this_cpu_ptr(&radix_tree_preloads);  | 
|---|
 | 409 | +		rtp = &get_locked_var(radix_tree_preloads_lock, radix_tree_preloads);  | 
|---|
| 409 | 410 |  		if (rtp->nr) { | 
|---|
| 410 | 411 |  			ret = rtp->nodes; | 
|---|
| 411 | 412 |  			rtp->nodes = ret->parent; | 
|---|
| 412 | 413 |  			rtp->nr--; | 
|---|
| 413 | 414 |  		} | 
|---|
 | 415 | +		put_locked_var(radix_tree_preloads_lock, radix_tree_preloads);  | 
|---|
| 414 | 416 |  		/* | 
|---|
| 415 | 417 |  		 * Update the allocation stack trace as this is more useful | 
|---|
| 416 | 418 |  		 * for debugging. | 
|---|
| .. | .. | 
|---|
| 476 | 478 |  	 */ | 
|---|
| 477 | 479 |  	gfp_mask &= ~__GFP_ACCOUNT; | 
|---|
| 478 | 480 |   | 
|---|
| 479 |  | -	preempt_disable();  | 
|---|
 | 481 | +	local_lock(radix_tree_preloads_lock);  | 
|---|
| 480 | 482 |  	rtp = this_cpu_ptr(&radix_tree_preloads); | 
|---|
| 481 | 483 |  	while (rtp->nr < nr) { | 
|---|
| 482 |  | -		preempt_enable();  | 
|---|
 | 484 | +		local_unlock(radix_tree_preloads_lock);  | 
|---|
| 483 | 485 |  		node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask); | 
|---|
| 484 | 486 |  		if (node == NULL) | 
|---|
| 485 | 487 |  			goto out; | 
|---|
| 486 |  | -		preempt_disable();  | 
|---|
 | 488 | +		local_lock(radix_tree_preloads_lock);  | 
|---|
| 487 | 489 |  		rtp = this_cpu_ptr(&radix_tree_preloads); | 
|---|
| 488 | 490 |  		if (rtp->nr < nr) { | 
|---|
| 489 | 491 |  			node->parent = rtp->nodes; | 
|---|
| .. | .. | 
|---|
| 525 | 527 |  	if (gfpflags_allow_blocking(gfp_mask)) | 
|---|
| 526 | 528 |  		return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE); | 
|---|
| 527 | 529 |  	/* Preloading doesn't help anything with this gfp mask, skip it */ | 
|---|
| 528 |  | -	preempt_disable();  | 
|---|
 | 530 | +	local_lock(radix_tree_preloads_lock);  | 
|---|
| 529 | 531 |  	return 0; | 
|---|
| 530 | 532 |  } | 
|---|
| 531 | 533 |  EXPORT_SYMBOL(radix_tree_maybe_preload); | 
|---|
| .. | .. | 
|---|
| 563 | 565 |   | 
|---|
| 564 | 566 |  	/* Preloading doesn't help anything with this gfp mask, skip it */ | 
|---|
| 565 | 567 |  	if (!gfpflags_allow_blocking(gfp_mask)) { | 
|---|
| 566 |  | -		preempt_disable();  | 
|---|
 | 568 | +		local_lock(radix_tree_preloads_lock);  | 
|---|
| 567 | 569 |  		return 0; | 
|---|
| 568 | 570 |  	} | 
|---|
| 569 | 571 |   | 
|---|
| .. | .. | 
|---|
| 596 | 598 |   | 
|---|
| 597 | 599 |  	return __radix_tree_preload(gfp_mask, nr_nodes); | 
|---|
| 598 | 600 |  } | 
|---|
 | 601 | +  | 
|---|
 | 602 | +void radix_tree_preload_end(void)  | 
|---|
 | 603 | +{  | 
|---|
 | 604 | +	local_unlock(radix_tree_preloads_lock);  | 
|---|
 | 605 | +}  | 
|---|
 | 606 | +EXPORT_SYMBOL(radix_tree_preload_end);  | 
|---|
| 599 | 607 |   | 
|---|
| 600 | 608 |  static unsigned radix_tree_load_root(const struct radix_tree_root *root, | 
|---|
| 601 | 609 |  		struct radix_tree_node **nodep, unsigned long *maxindex) | 
|---|
| .. | .. | 
|---|
| 2102 | 2110 |  void idr_preload(gfp_t gfp_mask) | 
|---|
| 2103 | 2111 |  { | 
|---|
| 2104 | 2112 |  	if (__radix_tree_preload(gfp_mask, IDR_PRELOAD_SIZE)) | 
|---|
| 2105 |  | -		preempt_disable();  | 
|---|
 | 2113 | +		local_lock(radix_tree_preloads_lock);  | 
|---|
| 2106 | 2114 |  } | 
|---|
| 2107 | 2115 |  EXPORT_SYMBOL(idr_preload); | 
|---|
 | 2116 | +  | 
|---|
 | 2117 | +void idr_preload_end(void)  | 
|---|
 | 2118 | +{  | 
|---|
 | 2119 | +	local_unlock(radix_tree_preloads_lock);  | 
|---|
 | 2120 | +}  | 
|---|
 | 2121 | +EXPORT_SYMBOL(idr_preload_end);  | 
|---|
| 2108 | 2122 |   | 
|---|
| 2109 | 2123 |  int ida_pre_get(struct ida *ida, gfp_t gfp) | 
|---|
| 2110 | 2124 |  { | 
|---|
| .. | .. | 
|---|
| 2114 | 2128 |  	 * to return to the ida_pre_get() step. | 
|---|
| 2115 | 2129 |  	 */ | 
|---|
| 2116 | 2130 |  	if (!__radix_tree_preload(gfp, IDA_PRELOAD_SIZE)) | 
|---|
| 2117 |  | -		preempt_enable();  | 
|---|
 | 2131 | +		local_unlock(radix_tree_preloads_lock);  | 
|---|
| 2118 | 2132 |   | 
|---|
| 2119 | 2133 |  	if (!this_cpu_read(ida_bitmap)) { | 
|---|
| 2120 | 2134 |  		struct ida_bitmap *bitmap = kzalloc(sizeof(*bitmap), gfp); | 
|---|