diff options
Diffstat (limited to 'lib/radix-tree.c')
| -rw-r--r-- | lib/radix-tree.c | 1927 |
1 files changed, 1040 insertions, 887 deletions
diff --git a/lib/radix-tree.c b/lib/radix-tree.c index e7964296fd50..976b9bd02a1b 100644 --- a/lib/radix-tree.c +++ b/lib/radix-tree.c @@ -1,76 +1,38 @@ +// SPDX-License-Identifier: GPL-2.0-or-later /* * Copyright (C) 2001 Momchil Velikov * Portions Copyright (C) 2001 Christoph Hellwig * Copyright (C) 2005 SGI, Christoph Lameter * Copyright (C) 2006 Nick Piggin * Copyright (C) 2012 Konstantin Khlebnikov - * - * This program is free software; you can redistribute it and/or - * modify it under the terms of the GNU General Public License as - * published by the Free Software Foundation; either version 2, or (at - * your option) any later version. - * - * This program is distributed in the hope that it will be useful, but - * WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. + * Copyright (C) 2016 Intel, Matthew Wilcox + * Copyright (C) 2016 Intel, Ross Zwisler */ +#include <linux/bitmap.h> +#include <linux/bitops.h> +#include <linux/bug.h> +#include <linux/cpu.h> #include <linux/errno.h> +#include <linux/export.h> +#include <linux/idr.h> #include <linux/init.h> #include <linux/kernel.h> -#include <linux/export.h> -#include <linux/radix-tree.h> +#include <linux/kmemleak.h> #include <linux/percpu.h> +#include <linux/preempt.h> /* in_interrupt() */ +#include <linux/radix-tree.h> +#include <linux/rcupdate.h> #include <linux/slab.h> -#include <linux/notifier.h> -#include <linux/cpu.h> #include <linux/string.h> -#include <linux/bitops.h> -#include <linux/rcupdate.h> - - -#ifdef __KERNEL__ -#define RADIX_TREE_MAP_SHIFT (CONFIG_BASE_SMALL ? 4 : 6) -#else -#define RADIX_TREE_MAP_SHIFT 3 /* For more stressful testing */ -#endif - -#define RADIX_TREE_MAP_SIZE (1UL << RADIX_TREE_MAP_SHIFT) -#define RADIX_TREE_MAP_MASK (RADIX_TREE_MAP_SIZE-1) - -#define RADIX_TREE_TAG_LONGS \ - ((RADIX_TREE_MAP_SIZE + BITS_PER_LONG - 1) / BITS_PER_LONG) - -struct radix_tree_node { - unsigned int height; /* Height from the bottom */ - unsigned int count; - union { - struct radix_tree_node *parent; /* Used when ascending tree */ - struct rcu_head rcu_head; /* Used when freeing node */ - }; - void __rcu *slots[RADIX_TREE_MAP_SIZE]; - unsigned long tags[RADIX_TREE_MAX_TAGS][RADIX_TREE_TAG_LONGS]; -}; +#include <linux/xarray.h> -#define RADIX_TREE_INDEX_BITS (8 /* CHAR_BIT */ * sizeof(unsigned long)) -#define RADIX_TREE_MAX_PATH (DIV_ROUND_UP(RADIX_TREE_INDEX_BITS, \ - RADIX_TREE_MAP_SHIFT)) - -/* - * The height_to_maxindex array needs to be one deeper than the maximum - * path as height 0 holds only 1 entry. - */ -static unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1] __read_mostly; +#include "radix-tree.h" /* * Radix tree node cache. */ -static struct kmem_cache *radix_tree_node_cachep; +struct kmem_cache *radix_tree_node_cachep; /* * The radix tree is variable-height, so an insert operation not only has @@ -86,27 +48,53 @@ static struct kmem_cache *radix_tree_node_cachep; #define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1) /* + * The IDR does not have to be as high as the radix tree since it uses + * signed integers, not unsigned longs. + */ +#define IDR_INDEX_BITS (8 /* CHAR_BIT */ * sizeof(int) - 1) +#define IDR_MAX_PATH (DIV_ROUND_UP(IDR_INDEX_BITS, \ + RADIX_TREE_MAP_SHIFT)) +#define IDR_PRELOAD_SIZE (IDR_MAX_PATH * 2 - 1) + +/* * Per-cpu pool of preloaded nodes */ -struct radix_tree_preload { - int nr; - struct radix_tree_node *nodes[RADIX_TREE_PRELOAD_SIZE]; +DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { + .lock = INIT_LOCAL_LOCK(lock), }; -static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, }; +EXPORT_PER_CPU_SYMBOL_GPL(radix_tree_preloads); -static inline void *ptr_to_indirect(void *ptr) +static inline struct radix_tree_node *entry_to_node(void *ptr) { - return (void *)((unsigned long)ptr | RADIX_TREE_INDIRECT_PTR); + return (void *)((unsigned long)ptr & ~RADIX_TREE_INTERNAL_NODE); } -static inline void *indirect_to_ptr(void *ptr) +static inline void *node_to_entry(void *ptr) { - return (void *)((unsigned long)ptr & ~RADIX_TREE_INDIRECT_PTR); + return (void *)((unsigned long)ptr | RADIX_TREE_INTERNAL_NODE); } -static inline gfp_t root_gfp_mask(struct radix_tree_root *root) +#define RADIX_TREE_RETRY XA_RETRY_ENTRY + +static inline unsigned long +get_slot_offset(const struct radix_tree_node *parent, void __rcu **slot) { - return root->gfp_mask & __GFP_BITS_MASK; + return parent ? slot - parent->slots : 0; +} + +static unsigned int radix_tree_descend(const struct radix_tree_node *parent, + struct radix_tree_node **nodep, unsigned long index) +{ + unsigned int offset = (index >> parent->shift) & RADIX_TREE_MAP_MASK; + void __rcu **entry = rcu_dereference_raw(parent->slots[offset]); + + *nodep = (void *)entry; + return offset; +} + +static inline gfp_t root_gfp_mask(const struct radix_tree_root *root) +{ + return root->xa_flags & (__GFP_BITS_MASK & ~GFP_ZONEMASK); } static inline void tag_set(struct radix_tree_node *node, unsigned int tag, @@ -121,39 +109,50 @@ static inline void tag_clear(struct radix_tree_node *node, unsigned int tag, __clear_bit(offset, node->tags[tag]); } -static inline int tag_get(struct radix_tree_node *node, unsigned int tag, +static inline int tag_get(const struct radix_tree_node *node, unsigned int tag, int offset) { return test_bit(offset, node->tags[tag]); } -static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag) +static inline void root_tag_set(struct radix_tree_root *root, unsigned tag) { - root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT)); + root->xa_flags |= (__force gfp_t)(1 << (tag + ROOT_TAG_SHIFT)); } -static inline void root_tag_clear(struct radix_tree_root *root, unsigned int tag) +static inline void root_tag_clear(struct radix_tree_root *root, unsigned tag) { - root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT)); + root->xa_flags &= (__force gfp_t)~(1 << (tag + ROOT_TAG_SHIFT)); } static inline void root_tag_clear_all(struct radix_tree_root *root) { - root->gfp_mask &= __GFP_BITS_MASK; + root->xa_flags &= (__force gfp_t)((1 << ROOT_TAG_SHIFT) - 1); +} + +static inline int root_tag_get(const struct radix_tree_root *root, unsigned tag) +{ + return (__force int)root->xa_flags & (1 << (tag + ROOT_TAG_SHIFT)); +} + +static inline unsigned root_tags_get(const struct radix_tree_root *root) +{ + return (__force unsigned)root->xa_flags >> ROOT_TAG_SHIFT; } -static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag) +static inline bool is_idr(const struct radix_tree_root *root) { - return (__force unsigned)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT)); + return !!(root->xa_flags & ROOT_IS_IDR); } /* * Returns 1 if any slot in the node has this tag set. * Otherwise returns 0. */ -static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag) +static inline int any_tag_set(const struct radix_tree_node *node, + unsigned int tag) { - int idx; + unsigned idx; for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) { if (node->tags[tag][idx]) return 1; @@ -161,25 +160,29 @@ static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag) return 0; } +static inline void all_tag_set(struct radix_tree_node *node, unsigned int tag) +{ + bitmap_fill(node->tags[tag], RADIX_TREE_MAP_SIZE); +} + /** * radix_tree_find_next_bit - find the next set bit in a memory region * - * @addr: The address to base the search on - * @size: The bitmap size in bits - * @offset: The bitnumber to start searching at + * @node: where to begin the search + * @tag: the tag index + * @offset: the bitnumber to start searching at * * Unrollable variant of find_next_bit() for constant size arrays. * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero. * Returns next bit offset, or size if nothing found. */ static __always_inline unsigned long -radix_tree_find_next_bit(const unsigned long *addr, - unsigned long size, unsigned long offset) +radix_tree_find_next_bit(struct radix_tree_node *node, unsigned int tag, + unsigned long offset) { - if (!__builtin_constant_p(size)) - return find_next_bit(addr, size, offset); + const unsigned long *addr = node->tags[tag]; - if (offset < size) { + if (offset < RADIX_TREE_MAP_SIZE) { unsigned long tmp; addr += offset / BITS_PER_LONG; @@ -187,14 +190,39 @@ radix_tree_find_next_bit(const unsigned long *addr, if (tmp) return __ffs(tmp) + offset; offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1); - while (offset < size) { + while (offset < RADIX_TREE_MAP_SIZE) { tmp = *++addr; if (tmp) return __ffs(tmp) + offset; offset += BITS_PER_LONG; } } - return size; + return RADIX_TREE_MAP_SIZE; +} + +static unsigned int iter_offset(const struct radix_tree_iter *iter) +{ + return iter->index & RADIX_TREE_MAP_MASK; +} + +/* + * The maximum index which can be stored in a radix tree + */ +static inline unsigned long shift_maxindex(unsigned int shift) +{ + return (RADIX_TREE_MAP_SIZE << shift) - 1; +} + +static inline unsigned long node_maxindex(const struct radix_tree_node *node) +{ + return shift_maxindex(node->shift); +} + +static unsigned long next_index(unsigned long index, + const struct radix_tree_node *node, + unsigned long offset) +{ + return (index & ~node_maxindex(node)) + (offset << node->shift); } /* @@ -202,49 +230,76 @@ radix_tree_find_next_bit(const unsigned long *addr, * that the caller has pinned this thread of control to the current CPU. */ static struct radix_tree_node * -radix_tree_node_alloc(struct radix_tree_root *root) +radix_tree_node_alloc(gfp_t gfp_mask, struct radix_tree_node *parent, + struct radix_tree_root *root, + unsigned int shift, unsigned int offset, + unsigned int count, unsigned int nr_values) { struct radix_tree_node *ret = NULL; - gfp_t gfp_mask = root_gfp_mask(root); - if (!(gfp_mask & __GFP_WAIT)) { + /* + * Preload code isn't irq safe and it doesn't make sense to use + * preloading during an interrupt anyway as all the allocations have + * to be atomic. So just do normal allocation when in interrupt. + */ + if (!gfpflags_allow_blocking(gfp_mask) && !in_interrupt()) { struct radix_tree_preload *rtp; /* + * Even if the caller has preloaded, try to allocate from the + * cache first for the new node to get accounted to the memory + * cgroup. + */ + ret = kmem_cache_alloc(radix_tree_node_cachep, + gfp_mask | __GFP_NOWARN); + if (ret) + goto out; + + /* * Provided the caller has preloaded here, we will always * succeed in getting a node here (and never reach * kmem_cache_alloc) */ - rtp = &__get_cpu_var(radix_tree_preloads); + rtp = this_cpu_ptr(&radix_tree_preloads); if (rtp->nr) { - ret = rtp->nodes[rtp->nr - 1]; - rtp->nodes[rtp->nr - 1] = NULL; + ret = rtp->nodes; + rtp->nodes = ret->parent; rtp->nr--; } + /* + * Update the allocation stack trace as this is more useful + * for debugging. + */ + kmemleak_update_trace(ret); + goto out; + } + ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask); +out: + BUG_ON(radix_tree_is_internal_node(ret)); + if (ret) { + ret->shift = shift; + ret->offset = offset; + ret->count = count; + ret->nr_values = nr_values; + ret->parent = parent; + ret->array = root; } - if (ret == NULL) - ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask); - - BUG_ON(radix_tree_is_indirect_ptr(ret)); return ret; } -static void radix_tree_node_rcu_free(struct rcu_head *head) +void radix_tree_node_rcu_free(struct rcu_head *head) { struct radix_tree_node *node = container_of(head, struct radix_tree_node, rcu_head); - int i; /* - * must only free zeroed nodes into the slab. radix_tree_shrink - * can leave us with a non-NULL entry in the first slot, so clear - * that here to make sure. + * Must only free zeroed nodes into the slab. We can be left with + * non-NULL entries by radix_tree_free_nodes, so clear the entries + * and tags here. */ - for (i = 0; i < RADIX_TREE_MAX_TAGS; i++) - tag_clear(node, i, 0); - - node->slots[0] = NULL; - node->count = 0; + memset(node->slots, 0, sizeof(node->slots)); + memset(node->tags, 0, sizeof(node->tags)); + INIT_LIST_HEAD(&node->private_list); kmem_cache_free(radix_tree_node_cachep, node); } @@ -262,203 +317,465 @@ radix_tree_node_free(struct radix_tree_node *node) * with preemption not disabled. * * To make use of this facility, the radix tree must be initialised without - * __GFP_WAIT being passed to INIT_RADIX_TREE(). + * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE(). */ -int radix_tree_preload(gfp_t gfp_mask) +static __must_check int __radix_tree_preload(gfp_t gfp_mask, unsigned nr) { struct radix_tree_preload *rtp; struct radix_tree_node *node; int ret = -ENOMEM; - preempt_disable(); - rtp = &__get_cpu_var(radix_tree_preloads); - while (rtp->nr < ARRAY_SIZE(rtp->nodes)) { - preempt_enable(); + /* + * Nodes preloaded by one cgroup can be used by another cgroup, so + * they should never be accounted to any particular memory cgroup. + */ + gfp_mask &= ~__GFP_ACCOUNT; + + local_lock(&radix_tree_preloads.lock); + rtp = this_cpu_ptr(&radix_tree_preloads); + while (rtp->nr < nr) { + local_unlock(&radix_tree_preloads.lock); node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask); if (node == NULL) goto out; - preempt_disable(); - rtp = &__get_cpu_var(radix_tree_preloads); - if (rtp->nr < ARRAY_SIZE(rtp->nodes)) - rtp->nodes[rtp->nr++] = node; - else + local_lock(&radix_tree_preloads.lock); + rtp = this_cpu_ptr(&radix_tree_preloads); + if (rtp->nr < nr) { + node->parent = rtp->nodes; + rtp->nodes = node; + rtp->nr++; + } else { kmem_cache_free(radix_tree_node_cachep, node); + } } ret = 0; out: return ret; } + +/* + * Load up this CPU's radix_tree_node buffer with sufficient objects to + * ensure that the addition of a single element in the tree cannot fail. On + * success, return zero, with preemption disabled. On error, return -ENOMEM + * with preemption not disabled. + * + * To make use of this facility, the radix tree must be initialised without + * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE(). + */ +int radix_tree_preload(gfp_t gfp_mask) +{ + /* Warn on non-sensical use... */ + WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask)); + return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE); +} EXPORT_SYMBOL(radix_tree_preload); /* - * Return the maximum key which can be store into a - * radix tree with height HEIGHT. + * The same as above function, except we don't guarantee preloading happens. + * We do it, if we decide it helps. On success, return zero with preemption + * disabled. On error, return -ENOMEM with preemption not disabled. */ -static inline unsigned long radix_tree_maxindex(unsigned int height) +int radix_tree_maybe_preload(gfp_t gfp_mask) { - return height_to_maxindex[height]; + if (gfpflags_allow_blocking(gfp_mask)) + return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE); + /* Preloading doesn't help anything with this gfp mask, skip it */ + local_lock(&radix_tree_preloads.lock); + return 0; +} +EXPORT_SYMBOL(radix_tree_maybe_preload); + +static unsigned radix_tree_load_root(const struct radix_tree_root *root, + struct radix_tree_node **nodep, unsigned long *maxindex) +{ + struct radix_tree_node *node = rcu_dereference_raw(root->xa_head); + + *nodep = node; + + if (likely(radix_tree_is_internal_node(node))) { + node = entry_to_node(node); + *maxindex = node_maxindex(node); + return node->shift + RADIX_TREE_MAP_SHIFT; + } + + *maxindex = 0; + return 0; } /* * Extend a radix tree so it can store key @index. */ -static int radix_tree_extend(struct radix_tree_root *root, unsigned long index) +static int radix_tree_extend(struct radix_tree_root *root, gfp_t gfp, + unsigned long index, unsigned int shift) { - struct radix_tree_node *node; - struct radix_tree_node *slot; - unsigned int height; + void *entry; + unsigned int maxshift; int tag; - /* Figure out what the height should be. */ - height = root->height + 1; - while (index > radix_tree_maxindex(height)) - height++; + /* Figure out what the shift should be. */ + maxshift = shift; + while (index > shift_maxindex(maxshift)) + maxshift += RADIX_TREE_MAP_SHIFT; - if (root->rnode == NULL) { - root->height = height; + entry = rcu_dereference_raw(root->xa_head); + if (!entry && (!is_idr(root) || root_tag_get(root, IDR_FREE))) goto out; - } do { - unsigned int newheight; - if (!(node = radix_tree_node_alloc(root))) + struct radix_tree_node *node = radix_tree_node_alloc(gfp, NULL, + root, shift, 0, 1, 0); + if (!node) return -ENOMEM; - /* Propagate the aggregated tag info into the new root */ - for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) { - if (root_tag_get(root, tag)) - tag_set(node, tag, 0); + if (is_idr(root)) { + all_tag_set(node, IDR_FREE); + if (!root_tag_get(root, IDR_FREE)) { + tag_clear(node, IDR_FREE, 0); + root_tag_set(root, IDR_FREE); + } + } else { + /* Propagate the aggregated tag info to the new child */ + for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) { + if (root_tag_get(root, tag)) + tag_set(node, tag, 0); + } } - /* Increase the height. */ - newheight = root->height+1; - node->height = newheight; - node->count = 1; - node->parent = NULL; - slot = root->rnode; - if (newheight > 1) { - slot = indirect_to_ptr(slot); - slot->parent = node; + BUG_ON(shift > BITS_PER_LONG); + if (radix_tree_is_internal_node(entry)) { + entry_to_node(entry)->parent = node; + } else if (xa_is_value(entry)) { + /* Moving a value entry root->xa_head to a node */ + node->nr_values = 1; } - node->slots[0] = slot; - node = ptr_to_indirect(node); - rcu_assign_pointer(root->rnode, node); - root->height = newheight; - } while (height > root->height); + /* + * entry was already in the radix tree, so we do not need + * rcu_assign_pointer here + */ + node->slots[0] = (void __rcu *)entry; + entry = node_to_entry(node); + rcu_assign_pointer(root->xa_head, entry); + shift += RADIX_TREE_MAP_SHIFT; + } while (shift <= maxshift); out: - return 0; + return maxshift + RADIX_TREE_MAP_SHIFT; } /** - * radix_tree_insert - insert into a radix tree + * radix_tree_shrink - shrink radix tree to minimum height + * @root: radix tree root + */ +static inline bool radix_tree_shrink(struct radix_tree_root *root) +{ + bool shrunk = false; + + for (;;) { + struct radix_tree_node *node = rcu_dereference_raw(root->xa_head); + struct radix_tree_node *child; + + if (!radix_tree_is_internal_node(node)) + break; + node = entry_to_node(node); + + /* + * The candidate node has more than one child, or its child + * is not at the leftmost slot, we cannot shrink. + */ + if (node->count != 1) + break; + child = rcu_dereference_raw(node->slots[0]); + if (!child) + break; + + /* + * For an IDR, we must not shrink entry 0 into the root in + * case somebody calls idr_replace() with a pointer that + * appears to be an internal entry + */ + if (!node->shift && is_idr(root)) + break; + + if (radix_tree_is_internal_node(child)) + entry_to_node(child)->parent = NULL; + + /* + * We don't need rcu_assign_pointer(), since we are simply + * moving the node from one part of the tree to another: if it + * was safe to dereference the old pointer to it + * (node->slots[0]), it will be safe to dereference the new + * one (root->xa_head) as far as dependent read barriers go. + */ + root->xa_head = (void __rcu *)child; + if (is_idr(root) && !tag_get(node, IDR_FREE, 0)) + root_tag_clear(root, IDR_FREE); + + /* + * We have a dilemma here. The node's slot[0] must not be + * NULLed in case there are concurrent lookups expecting to + * find the item. However if this was a bottom-level node, + * then it may be subject to the slot pointer being visible + * to callers dereferencing it. If item corresponding to + * slot[0] is subsequently deleted, these callers would expect + * their slot to become empty sooner or later. + * + * For example, lockless pagecache will look up a slot, deref + * the page pointer, and if the page has 0 refcount it means it + * was concurrently deleted from pagecache so try the deref + * again. Fortunately there is already a requirement for logic + * to retry the entire slot lookup -- the indirect pointer + * problem (replacing direct root node with an indirect pointer + * also results in a stale slot). So tag the slot as indirect + * to force callers to retry. + */ + node->count = 0; + if (!radix_tree_is_internal_node(child)) { + node->slots[0] = (void __rcu *)RADIX_TREE_RETRY; + } + + WARN_ON_ONCE(!list_empty(&node->private_list)); + radix_tree_node_free(node); + shrunk = true; + } + + return shrunk; +} + +static bool delete_node(struct radix_tree_root *root, + struct radix_tree_node *node) +{ + bool deleted = false; + + do { + struct radix_tree_node *parent; + + if (node->count) { + if (node_to_entry(node) == + rcu_dereference_raw(root->xa_head)) + deleted |= radix_tree_shrink(root); + return deleted; + } + + parent = node->parent; + if (parent) { + parent->slots[node->offset] = NULL; + parent->count--; + } else { + /* + * Shouldn't the tags already have all been cleared + * by the caller? + */ + if (!is_idr(root)) + root_tag_clear_all(root); + root->xa_head = NULL; + } + + WARN_ON_ONCE(!list_empty(&node->private_list)); + radix_tree_node_free(node); + deleted = true; + + node = parent; + } while (node); + + return deleted; +} + +/** + * __radix_tree_create - create a slot in a radix tree * @root: radix tree root * @index: index key - * @item: item to insert + * @nodep: returns node + * @slotp: returns slot * - * Insert an item into the radix tree at position @index. + * Create, if necessary, and return the node and slot for an item + * at position @index in the radix tree @root. + * + * Until there is more than one item in the tree, no nodes are + * allocated and @root->xa_head is used as a direct slot instead of + * pointing to a node, in which case *@nodep will be NULL. + * + * Returns -ENOMEM, or 0 for success. */ -int radix_tree_insert(struct radix_tree_root *root, - unsigned long index, void *item) +static int __radix_tree_create(struct radix_tree_root *root, + unsigned long index, struct radix_tree_node **nodep, + void __rcu ***slotp) { - struct radix_tree_node *node = NULL, *slot; - unsigned int height, shift; - int offset; - int error; + struct radix_tree_node *node = NULL, *child; + void __rcu **slot = (void __rcu **)&root->xa_head; + unsigned long maxindex; + unsigned int shift, offset = 0; + unsigned long max = index; + gfp_t gfp = root_gfp_mask(root); - BUG_ON(radix_tree_is_indirect_ptr(item)); + shift = radix_tree_load_root(root, &child, &maxindex); /* Make sure the tree is high enough. */ - if (index > radix_tree_maxindex(root->height)) { - error = radix_tree_extend(root, index); - if (error) + if (max > maxindex) { + int error = radix_tree_extend(root, gfp, max, shift); + if (error < 0) return error; + shift = error; + child = rcu_dereference_raw(root->xa_head); } - slot = indirect_to_ptr(root->rnode); - - height = root->height; - shift = (height-1) * RADIX_TREE_MAP_SHIFT; - - offset = 0; /* uninitialised var warning */ - while (height > 0) { - if (slot == NULL) { + while (shift > 0) { + shift -= RADIX_TREE_MAP_SHIFT; + if (child == NULL) { /* Have to add a child node. */ - if (!(slot = radix_tree_node_alloc(root))) + child = radix_tree_node_alloc(gfp, node, root, shift, + offset, 0, 0); + if (!child) return -ENOMEM; - slot->height = height; - slot->parent = node; - if (node) { - rcu_assign_pointer(node->slots[offset], slot); + rcu_assign_pointer(*slot, node_to_entry(child)); + if (node) node->count++; - } else - rcu_assign_pointer(root->rnode, ptr_to_indirect(slot)); - } + } else if (!radix_tree_is_internal_node(child)) + break; /* Go a level down */ - offset = (index >> shift) & RADIX_TREE_MAP_MASK; - node = slot; - slot = node->slots[offset]; - shift -= RADIX_TREE_MAP_SHIFT; - height--; + node = entry_to_node(child); + offset = radix_tree_descend(node, &child, index); + slot = &node->slots[offset]; } - if (slot != NULL) - return -EEXIST; + if (nodep) + *nodep = node; + if (slotp) + *slotp = slot; + return 0; +} + +/* + * Free any nodes below this node. The tree is presumed to not need + * shrinking, and any user data in the tree is presumed to not need a + * destructor called on it. If we need to add a destructor, we can + * add that functionality later. Note that we may not clear tags or + * slots from the tree as an RCU walker may still have a pointer into + * this subtree. We could replace the entries with RADIX_TREE_RETRY, + * but we'll still have to clear those in rcu_free. + */ +static void radix_tree_free_nodes(struct radix_tree_node *node) +{ + unsigned offset = 0; + struct radix_tree_node *child = entry_to_node(node); + + for (;;) { + void *entry = rcu_dereference_raw(child->slots[offset]); + if (xa_is_node(entry) && child->shift) { + child = entry_to_node(entry); + offset = 0; + continue; + } + offset++; + while (offset == RADIX_TREE_MAP_SIZE) { + struct radix_tree_node *old = child; + offset = child->offset + 1; + child = child->parent; + WARN_ON_ONCE(!list_empty(&old->private_list)); + radix_tree_node_free(old); + if (old == entry_to_node(node)) + return; + } + } +} +static inline int insert_entries(struct radix_tree_node *node, + void __rcu **slot, void *item) +{ + if (*slot) + return -EEXIST; + rcu_assign_pointer(*slot, item); if (node) { node->count++; - rcu_assign_pointer(node->slots[offset], item); + if (xa_is_value(item)) + node->nr_values++; + } + return 1; +} + +/** + * radix_tree_insert - insert into a radix tree + * @root: radix tree root + * @index: index key + * @item: item to insert + * + * Insert an item into the radix tree at position @index. + */ +int radix_tree_insert(struct radix_tree_root *root, unsigned long index, + void *item) +{ + struct radix_tree_node *node; + void __rcu **slot; + int error; + + BUG_ON(radix_tree_is_internal_node(item)); + + error = __radix_tree_create(root, index, &node, &slot); + if (error) + return error; + + error = insert_entries(node, slot, item); + if (error < 0) + return error; + + if (node) { + unsigned offset = get_slot_offset(node, slot); BUG_ON(tag_get(node, 0, offset)); BUG_ON(tag_get(node, 1, offset)); + BUG_ON(tag_get(node, 2, offset)); } else { - rcu_assign_pointer(root->rnode, item); - BUG_ON(root_tag_get(root, 0)); - BUG_ON(root_tag_get(root, 1)); + BUG_ON(root_tags_get(root)); } return 0; } EXPORT_SYMBOL(radix_tree_insert); -/* - * is_slot == 1 : search for the slot. - * is_slot == 0 : search for the node. +/** + * __radix_tree_lookup - lookup an item in a radix tree + * @root: radix tree root + * @index: index key + * @nodep: returns node + * @slotp: returns slot + * + * Lookup and return the item at position @index in the radix + * tree @root. + * + * Until there is more than one item in the tree, no nodes are + * allocated and @root->xa_head is used as a direct slot instead of + * pointing to a node, in which case *@nodep will be NULL. */ -static void *radix_tree_lookup_element(struct radix_tree_root *root, - unsigned long index, int is_slot) +void *__radix_tree_lookup(const struct radix_tree_root *root, + unsigned long index, struct radix_tree_node **nodep, + void __rcu ***slotp) { - unsigned int height, shift; - struct radix_tree_node *node, **slot; - - node = rcu_dereference_raw(root->rnode); - if (node == NULL) - return NULL; - - if (!radix_tree_is_indirect_ptr(node)) { - if (index > 0) - return NULL; - return is_slot ? (void *)&root->rnode : node; - } - node = indirect_to_ptr(node); - - height = node->height; - if (index > radix_tree_maxindex(height)) + struct radix_tree_node *node, *parent; + unsigned long maxindex; + void __rcu **slot; + + restart: + parent = NULL; + slot = (void __rcu **)&root->xa_head; + radix_tree_load_root(root, &node, &maxindex); + if (index > maxindex) return NULL; - shift = (height-1) * RADIX_TREE_MAP_SHIFT; + while (radix_tree_is_internal_node(node)) { + unsigned offset; - do { - slot = (struct radix_tree_node **) - (node->slots + ((index>>shift) & RADIX_TREE_MAP_MASK)); - node = rcu_dereference_raw(*slot); - if (node == NULL) - return NULL; - - shift -= RADIX_TREE_MAP_SHIFT; - height--; - } while (height > 0); + parent = entry_to_node(node); + offset = radix_tree_descend(parent, &node, index); + slot = parent->slots + offset; + if (node == RADIX_TREE_RETRY) + goto restart; + if (parent->shift == 0) + break; + } - return is_slot ? (void *)slot : indirect_to_ptr(node); + if (nodep) + *nodep = parent; + if (slotp) + *slotp = slot; + return node; } /** @@ -474,9 +791,14 @@ static void *radix_tree_lookup_element(struct radix_tree_root *root, * exclusive from other writers. Any dereference of the slot must be done * using radix_tree_deref_slot. */ -void **radix_tree_lookup_slot(struct radix_tree_root *root, unsigned long index) +void __rcu **radix_tree_lookup_slot(const struct radix_tree_root *root, + unsigned long index) { - return (void **)radix_tree_lookup_element(root, index, 1); + void __rcu **slot; + + if (!__radix_tree_lookup(root, index, NULL, &slot)) + return NULL; + return slot; } EXPORT_SYMBOL(radix_tree_lookup_slot); @@ -492,66 +814,213 @@ EXPORT_SYMBOL(radix_tree_lookup_slot); * them safely). No RCU barriers are required to access or modify the * returned item, however. */ -void *radix_tree_lookup(struct radix_tree_root *root, unsigned long index) +void *radix_tree_lookup(const struct radix_tree_root *root, unsigned long index) { - return radix_tree_lookup_element(root, index, 0); + return __radix_tree_lookup(root, index, NULL, NULL); } EXPORT_SYMBOL(radix_tree_lookup); +static void replace_slot(void __rcu **slot, void *item, + struct radix_tree_node *node, int count, int values) +{ + if (node && (count || values)) { + node->count += count; + node->nr_values += values; + } + + rcu_assign_pointer(*slot, item); +} + +static bool node_tag_get(const struct radix_tree_root *root, + const struct radix_tree_node *node, + unsigned int tag, unsigned int offset) +{ + if (node) + return tag_get(node, tag, offset); + return root_tag_get(root, tag); +} + +/* + * IDR users want to be able to store NULL in the tree, so if the slot isn't + * free, don't adjust the count, even if it's transitioning between NULL and + * non-NULL. For the IDA, we mark slots as being IDR_FREE while they still + * have empty bits, but it only stores NULL in slots when they're being + * deleted. + */ +static int calculate_count(struct radix_tree_root *root, + struct radix_tree_node *node, void __rcu **slot, + void *item, void *old) +{ + if (is_idr(root)) { + unsigned offset = get_slot_offset(node, slot); + bool free = node_tag_get(root, node, IDR_FREE, offset); + if (!free) + return 0; + if (!old) + return 1; + } + return !!item - !!old; +} + +/** + * __radix_tree_replace - replace item in a slot + * @root: radix tree root + * @node: pointer to tree node + * @slot: pointer to slot in @node + * @item: new item to store in the slot. + * + * For use with __radix_tree_lookup(). Caller must hold tree write locked + * across slot lookup and replacement. + */ +void __radix_tree_replace(struct radix_tree_root *root, + struct radix_tree_node *node, + void __rcu **slot, void *item) +{ + void *old = rcu_dereference_raw(*slot); + int values = !!xa_is_value(item) - !!xa_is_value(old); + int count = calculate_count(root, node, slot, item, old); + + /* + * This function supports replacing value entries and + * deleting entries, but that needs accounting against the + * node unless the slot is root->xa_head. + */ + WARN_ON_ONCE(!node && (slot != (void __rcu **)&root->xa_head) && + (count || values)); + replace_slot(slot, item, node, count, values); + + if (!node) + return; + + delete_node(root, node); +} + +/** + * radix_tree_replace_slot - replace item in a slot + * @root: radix tree root + * @slot: pointer to slot + * @item: new item to store in the slot. + * + * For use with radix_tree_lookup_slot() and + * radix_tree_gang_lookup_tag_slot(). Caller must hold tree write locked + * across slot lookup and replacement. + * + * NOTE: This cannot be used to switch between non-entries (empty slots), + * regular entries, and value entries, as that requires accounting + * inside the radix tree node. When switching from one type of entry or + * deleting, use __radix_tree_lookup() and __radix_tree_replace() or + * radix_tree_iter_replace(). + */ +void radix_tree_replace_slot(struct radix_tree_root *root, + void __rcu **slot, void *item) +{ + __radix_tree_replace(root, NULL, slot, item); +} +EXPORT_SYMBOL(radix_tree_replace_slot); + +/** + * radix_tree_iter_replace - replace item in a slot + * @root: radix tree root + * @iter: iterator state + * @slot: pointer to slot + * @item: new item to store in the slot. + * + * For use with radix_tree_for_each_slot(). + * Caller must hold tree write locked. + */ +void radix_tree_iter_replace(struct radix_tree_root *root, + const struct radix_tree_iter *iter, + void __rcu **slot, void *item) +{ + __radix_tree_replace(root, iter->node, slot, item); +} + +static void node_tag_set(struct radix_tree_root *root, + struct radix_tree_node *node, + unsigned int tag, unsigned int offset) +{ + while (node) { + if (tag_get(node, tag, offset)) + return; + tag_set(node, tag, offset); + offset = node->offset; + node = node->parent; + } + + if (!root_tag_get(root, tag)) + root_tag_set(root, tag); +} + /** * radix_tree_tag_set - set a tag on a radix tree node * @root: radix tree root * @index: index key - * @tag: tag index + * @tag: tag index * * Set the search tag (which must be < RADIX_TREE_MAX_TAGS) * corresponding to @index in the radix tree. From * the root all the way down to the leaf node. * - * Returns the address of the tagged item. Setting a tag on a not-present + * Returns the address of the tagged item. Setting a tag on a not-present * item is a bug. */ void *radix_tree_tag_set(struct radix_tree_root *root, unsigned long index, unsigned int tag) { - unsigned int height, shift; - struct radix_tree_node *slot; + struct radix_tree_node *node, *parent; + unsigned long maxindex; - height = root->height; - BUG_ON(index > radix_tree_maxindex(height)); + radix_tree_load_root(root, &node, &maxindex); + BUG_ON(index > maxindex); - slot = indirect_to_ptr(root->rnode); - shift = (height - 1) * RADIX_TREE_MAP_SHIFT; + while (radix_tree_is_internal_node(node)) { + unsigned offset; - while (height > 0) { - int offset; + parent = entry_to_node(node); + offset = radix_tree_descend(parent, &node, index); + BUG_ON(!node); - offset = (index >> shift) & RADIX_TREE_MAP_MASK; - if (!tag_get(slot, tag, offset)) - tag_set(slot, tag, offset); - slot = slot->slots[offset]; - BUG_ON(slot == NULL); - shift -= RADIX_TREE_MAP_SHIFT; - height--; + if (!tag_get(parent, tag, offset)) + tag_set(parent, tag, offset); } /* set the root's tag bit */ - if (slot && !root_tag_get(root, tag)) + if (!root_tag_get(root, tag)) root_tag_set(root, tag); - return slot; + return node; } EXPORT_SYMBOL(radix_tree_tag_set); +static void node_tag_clear(struct radix_tree_root *root, + struct radix_tree_node *node, + unsigned int tag, unsigned int offset) +{ + while (node) { + if (!tag_get(node, tag, offset)) + return; + tag_clear(node, tag, offset); + if (any_tag_set(node, tag)) + return; + + offset = node->offset; + node = node->parent; + } + + /* clear the root's tag bit */ + if (root_tag_get(root, tag)) + root_tag_clear(root, tag); +} + /** * radix_tree_tag_clear - clear a tag on a radix tree node * @root: radix tree root * @index: index key - * @tag: tag index + * @tag: tag index * * Clear the search tag (which must be < RADIX_TREE_MAX_TAGS) - * corresponding to @index in the radix tree. If - * this causes the leaf node to have no tags set then clear the tag in the + * corresponding to @index in the radix tree. If this causes + * the leaf node to have no tags set then clear the tag in the * next-to-leaf node, etc. * * Returns the address of the tagged item on success, else NULL. ie: @@ -560,57 +1029,45 @@ EXPORT_SYMBOL(radix_tree_tag_set); void *radix_tree_tag_clear(struct radix_tree_root *root, unsigned long index, unsigned int tag) { - struct radix_tree_node *node = NULL; - struct radix_tree_node *slot = NULL; - unsigned int height, shift; - int uninitialized_var(offset); - - height = root->height; - if (index > radix_tree_maxindex(height)) - goto out; - - shift = height * RADIX_TREE_MAP_SHIFT; - slot = indirect_to_ptr(root->rnode); - - while (shift) { - if (slot == NULL) - goto out; + struct radix_tree_node *node, *parent; + unsigned long maxindex; + int offset = 0; - shift -= RADIX_TREE_MAP_SHIFT; - offset = (index >> shift) & RADIX_TREE_MAP_MASK; - node = slot; - slot = slot->slots[offset]; - } - - if (slot == NULL) - goto out; + radix_tree_load_root(root, &node, &maxindex); + if (index > maxindex) + return NULL; - while (node) { - if (!tag_get(node, tag, offset)) - goto out; - tag_clear(node, tag, offset); - if (any_tag_set(node, tag)) - goto out; + parent = NULL; - index >>= RADIX_TREE_MAP_SHIFT; - offset = index & RADIX_TREE_MAP_MASK; - node = node->parent; + while (radix_tree_is_internal_node(node)) { + parent = entry_to_node(node); + offset = radix_tree_descend(parent, &node, index); } - /* clear the root's tag bit */ - if (root_tag_get(root, tag)) - root_tag_clear(root, tag); + if (node) + node_tag_clear(root, parent, tag, offset); -out: - return slot; + return node; } EXPORT_SYMBOL(radix_tree_tag_clear); /** + * radix_tree_iter_tag_clear - clear a tag on the current iterator entry + * @root: radix tree root + * @iter: iterator state + * @tag: tag to clear + */ +void radix_tree_iter_tag_clear(struct radix_tree_root *root, + const struct radix_tree_iter *iter, unsigned int tag) +{ + node_tag_clear(root, iter->node, tag, iter_offset(iter)); +} + +/** * radix_tree_tag_get - get a tag on a radix tree node * @root: radix tree root * @index: index key - * @tag: tag index (< RADIX_TREE_MAX_TAGS) + * @tag: tag index (< RADIX_TREE_MAX_TAGS) * * Return values: * @@ -621,47 +1078,70 @@ EXPORT_SYMBOL(radix_tree_tag_clear); * the RCU lock is held, unless tag modification and node deletion are excluded * from concurrency. */ -int radix_tree_tag_get(struct radix_tree_root *root, +int radix_tree_tag_get(const struct radix_tree_root *root, unsigned long index, unsigned int tag) { - unsigned int height, shift; - struct radix_tree_node *node; + struct radix_tree_node *node, *parent; + unsigned long maxindex; - /* check the root's tag bit */ if (!root_tag_get(root, tag)) return 0; - node = rcu_dereference_raw(root->rnode); - if (node == NULL) + radix_tree_load_root(root, &node, &maxindex); + if (index > maxindex) return 0; - if (!radix_tree_is_indirect_ptr(node)) - return (index == 0); - node = indirect_to_ptr(node); + while (radix_tree_is_internal_node(node)) { + unsigned offset; - height = node->height; - if (index > radix_tree_maxindex(height)) - return 0; + parent = entry_to_node(node); + offset = radix_tree_descend(parent, &node, index); + + if (!tag_get(parent, tag, offset)) + return 0; + if (node == RADIX_TREE_RETRY) + break; + } + + return 1; +} +EXPORT_SYMBOL(radix_tree_tag_get); - shift = (height - 1) * RADIX_TREE_MAP_SHIFT; +/* Construct iter->tags bit-mask from node->tags[tag] array */ +static void set_iter_tags(struct radix_tree_iter *iter, + struct radix_tree_node *node, unsigned offset, + unsigned tag) +{ + unsigned tag_long = offset / BITS_PER_LONG; + unsigned tag_bit = offset % BITS_PER_LONG; - for ( ; ; ) { - int offset; + if (!node) { + iter->tags = 1; + return; + } - if (node == NULL) - return 0; + iter->tags = node->tags[tag][tag_long] >> tag_bit; - offset = (index >> shift) & RADIX_TREE_MAP_MASK; - if (!tag_get(node, tag, offset)) - return 0; - if (height == 1) - return 1; - node = rcu_dereference_raw(node->slots[offset]); - shift -= RADIX_TREE_MAP_SHIFT; - height--; + /* This never happens if RADIX_TREE_TAG_LONGS == 1 */ + if (tag_long < RADIX_TREE_TAG_LONGS - 1) { + /* Pick tags from next element */ + if (tag_bit) + iter->tags |= node->tags[tag][tag_long + 1] << + (BITS_PER_LONG - tag_bit); + /* Clip chunk size, here only BITS_PER_LONG tags */ + iter->next_index = __radix_tree_iter_add(iter, BITS_PER_LONG); } } -EXPORT_SYMBOL(radix_tree_tag_get); + +void __rcu **radix_tree_iter_resume(void __rcu **slot, + struct radix_tree_iter *iter) +{ + iter->index = __radix_tree_iter_add(iter, 1); + iter->next_index = iter->index; + iter->tags = 0; + return NULL; +} +EXPORT_SYMBOL(radix_tree_iter_resume); /** * radix_tree_next_chunk - find next chunk of slots for iteration @@ -671,12 +1151,12 @@ EXPORT_SYMBOL(radix_tree_tag_get); * @flags: RADIX_TREE_ITER_* flags and tag index * Returns: pointer to chunk first slot, or NULL if iteration is over */ -void **radix_tree_next_chunk(struct radix_tree_root *root, +void __rcu **radix_tree_next_chunk(const struct radix_tree_root *root, struct radix_tree_iter *iter, unsigned flags) { - unsigned shift, tag = flags & RADIX_TREE_ITER_TAG_MASK; - struct radix_tree_node *rnode, *node; - unsigned long index, offset; + unsigned tag = flags & RADIX_TREE_ITER_TAG_MASK; + struct radix_tree_node *node, *child; + unsigned long index, offset, maxindex; if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag)) return NULL; @@ -688,303 +1168,77 @@ void **radix_tree_next_chunk(struct radix_tree_root *root, * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG. * * This condition also used by radix_tree_next_slot() to stop - * contiguous iterating, and forbid swithing to the next chunk. + * contiguous iterating, and forbid switching to the next chunk. */ index = iter->next_index; if (!index && iter->index) return NULL; - rnode = rcu_dereference_raw(root->rnode); - if (radix_tree_is_indirect_ptr(rnode)) { - rnode = indirect_to_ptr(rnode); - } else if (rnode && !index) { - /* Single-slot tree */ - iter->index = 0; - iter->next_index = 1; - iter->tags = 1; - return (void **)&root->rnode; - } else + restart: + radix_tree_load_root(root, &child, &maxindex); + if (index > maxindex) + return NULL; + if (!child) return NULL; -restart: - shift = (rnode->height - 1) * RADIX_TREE_MAP_SHIFT; - offset = index >> shift; + if (!radix_tree_is_internal_node(child)) { + /* Single-slot tree */ + iter->index = index; + iter->next_index = maxindex + 1; + iter->tags = 1; + iter->node = NULL; + return (void __rcu **)&root->xa_head; + } - /* Index outside of the tree */ - if (offset >= RADIX_TREE_MAP_SIZE) - return NULL; + do { + node = entry_to_node(child); + offset = radix_tree_descend(node, &child, index); - node = rnode; - while (1) { if ((flags & RADIX_TREE_ITER_TAGGED) ? - !test_bit(offset, node->tags[tag]) : - !node->slots[offset]) { + !tag_get(node, tag, offset) : !child) { /* Hole detected */ if (flags & RADIX_TREE_ITER_CONTIG) return NULL; if (flags & RADIX_TREE_ITER_TAGGED) - offset = radix_tree_find_next_bit( - node->tags[tag], - RADIX_TREE_MAP_SIZE, + offset = radix_tree_find_next_bit(node, tag, offset + 1); else while (++offset < RADIX_TREE_MAP_SIZE) { - if (node->slots[offset]) + void *slot = rcu_dereference_raw( + node->slots[offset]); + if (slot) break; } - index &= ~((RADIX_TREE_MAP_SIZE << shift) - 1); - index += offset << shift; + index &= ~node_maxindex(node); + index += offset << node->shift; /* Overflow after ~0UL */ if (!index) return NULL; if (offset == RADIX_TREE_MAP_SIZE) goto restart; + child = rcu_dereference_raw(node->slots[offset]); } - /* This is leaf-node */ - if (!shift) - break; - - node = rcu_dereference_raw(node->slots[offset]); - if (node == NULL) + if (!child) goto restart; - shift -= RADIX_TREE_MAP_SHIFT; - offset = (index >> shift) & RADIX_TREE_MAP_MASK; - } + if (child == RADIX_TREE_RETRY) + break; + } while (node->shift && radix_tree_is_internal_node(child)); /* Update the iterator state */ - iter->index = index; - iter->next_index = (index | RADIX_TREE_MAP_MASK) + 1; - - /* Construct iter->tags bit-mask from node->tags[tag] array */ - if (flags & RADIX_TREE_ITER_TAGGED) { - unsigned tag_long, tag_bit; - - tag_long = offset / BITS_PER_LONG; - tag_bit = offset % BITS_PER_LONG; - iter->tags = node->tags[tag][tag_long] >> tag_bit; - /* This never happens if RADIX_TREE_TAG_LONGS == 1 */ - if (tag_long < RADIX_TREE_TAG_LONGS - 1) { - /* Pick tags from next element */ - if (tag_bit) - iter->tags |= node->tags[tag][tag_long + 1] << - (BITS_PER_LONG - tag_bit); - /* Clip chunk size, here only BITS_PER_LONG tags */ - iter->next_index = index + BITS_PER_LONG; - } - } + iter->index = (index &~ node_maxindex(node)) | offset; + iter->next_index = (index | node_maxindex(node)) + 1; + iter->node = node; + + if (flags & RADIX_TREE_ITER_TAGGED) + set_iter_tags(iter, node, offset, tag); return node->slots + offset; } EXPORT_SYMBOL(radix_tree_next_chunk); /** - * radix_tree_range_tag_if_tagged - for each item in given range set given - * tag if item has another tag set - * @root: radix tree root - * @first_indexp: pointer to a starting index of a range to scan - * @last_index: last index of a range to scan - * @nr_to_tag: maximum number items to tag - * @iftag: tag index to test - * @settag: tag index to set if tested tag is set - * - * This function scans range of radix tree from first_index to last_index - * (inclusive). For each item in the range if iftag is set, the function sets - * also settag. The function stops either after tagging nr_to_tag items or - * after reaching last_index. - * - * The tags must be set from the leaf level only and propagated back up the - * path to the root. We must do this so that we resolve the full path before - * setting any tags on intermediate nodes. If we set tags as we descend, then - * we can get to the leaf node and find that the index that has the iftag - * set is outside the range we are scanning. This reults in dangling tags and - * can lead to problems with later tag operations (e.g. livelocks on lookups). - * - * The function returns number of leaves where the tag was set and sets - * *first_indexp to the first unscanned index. - * WARNING! *first_indexp can wrap if last_index is ULONG_MAX. Caller must - * be prepared to handle that. - */ -unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root, - unsigned long *first_indexp, unsigned long last_index, - unsigned long nr_to_tag, - unsigned int iftag, unsigned int settag) -{ - unsigned int height = root->height; - struct radix_tree_node *node = NULL; - struct radix_tree_node *slot; - unsigned int shift; - unsigned long tagged = 0; - unsigned long index = *first_indexp; - - last_index = min(last_index, radix_tree_maxindex(height)); - if (index > last_index) - return 0; - if (!nr_to_tag) - return 0; - if (!root_tag_get(root, iftag)) { - *first_indexp = last_index + 1; - return 0; - } - if (height == 0) { - *first_indexp = last_index + 1; - root_tag_set(root, settag); - return 1; - } - - shift = (height - 1) * RADIX_TREE_MAP_SHIFT; - slot = indirect_to_ptr(root->rnode); - - for (;;) { - unsigned long upindex; - int offset; - - offset = (index >> shift) & RADIX_TREE_MAP_MASK; - if (!slot->slots[offset]) - goto next; - if (!tag_get(slot, iftag, offset)) - goto next; - if (shift) { - /* Go down one level */ - shift -= RADIX_TREE_MAP_SHIFT; - node = slot; - slot = slot->slots[offset]; - continue; - } - - /* tag the leaf */ - tagged++; - tag_set(slot, settag, offset); - - /* walk back up the path tagging interior nodes */ - upindex = index; - while (node) { - upindex >>= RADIX_TREE_MAP_SHIFT; - offset = upindex & RADIX_TREE_MAP_MASK; - - /* stop if we find a node with the tag already set */ - if (tag_get(node, settag, offset)) - break; - tag_set(node, settag, offset); - node = node->parent; - } - - /* - * Small optimization: now clear that node pointer. - * Since all of this slot's ancestors now have the tag set - * from setting it above, we have no further need to walk - * back up the tree setting tags, until we update slot to - * point to another radix_tree_node. - */ - node = NULL; - -next: - /* Go to next item at level determined by 'shift' */ - index = ((index >> shift) + 1) << shift; - /* Overflow can happen when last_index is ~0UL... */ - if (index > last_index || !index) - break; - if (tagged >= nr_to_tag) - break; - while (((index >> shift) & RADIX_TREE_MAP_MASK) == 0) { - /* - * We've fully scanned this node. Go up. Because - * last_index is guaranteed to be in the tree, what - * we do below cannot wander astray. - */ - slot = slot->parent; - shift += RADIX_TREE_MAP_SHIFT; - } - } - /* - * We need not to tag the root tag if there is no tag which is set with - * settag within the range from *first_indexp to last_index. - */ - if (tagged > 0) - root_tag_set(root, settag); - *first_indexp = index; - - return tagged; -} -EXPORT_SYMBOL(radix_tree_range_tag_if_tagged); - - -/** - * radix_tree_next_hole - find the next hole (not-present entry) - * @root: tree root - * @index: index key - * @max_scan: maximum range to search - * - * Search the set [index, min(index+max_scan-1, MAX_INDEX)] for the lowest - * indexed hole. - * - * Returns: the index of the hole if found, otherwise returns an index - * outside of the set specified (in which case 'return - index >= max_scan' - * will be true). In rare cases of index wrap-around, 0 will be returned. - * - * radix_tree_next_hole may be called under rcu_read_lock. However, like - * radix_tree_gang_lookup, this will not atomically search a snapshot of - * the tree at a single point in time. For example, if a hole is created - * at index 5, then subsequently a hole is created at index 10, - * radix_tree_next_hole covering both indexes may return 10 if called - * under rcu_read_lock. - */ -unsigned long radix_tree_next_hole(struct radix_tree_root *root, - unsigned long index, unsigned long max_scan) -{ - unsigned long i; - - for (i = 0; i < max_scan; i++) { - if (!radix_tree_lookup(root, index)) - break; - index++; - if (index == 0) - break; - } - - return index; -} -EXPORT_SYMBOL(radix_tree_next_hole); - -/** - * radix_tree_prev_hole - find the prev hole (not-present entry) - * @root: tree root - * @index: index key - * @max_scan: maximum range to search - * - * Search backwards in the range [max(index-max_scan+1, 0), index] - * for the first hole. - * - * Returns: the index of the hole if found, otherwise returns an index - * outside of the set specified (in which case 'index - return >= max_scan' - * will be true). In rare cases of wrap-around, ULONG_MAX will be returned. - * - * radix_tree_next_hole may be called under rcu_read_lock. However, like - * radix_tree_gang_lookup, this will not atomically search a snapshot of - * the tree at a single point in time. For example, if a hole is created - * at index 10, then subsequently a hole is created at index 5, - * radix_tree_prev_hole covering both indexes may return 5 if called under - * rcu_read_lock. - */ -unsigned long radix_tree_prev_hole(struct radix_tree_root *root, - unsigned long index, unsigned long max_scan) -{ - unsigned long i; - - for (i = 0; i < max_scan; i++) { - if (!radix_tree_lookup(root, index)) - break; - index--; - if (index == ULONG_MAX) - break; - } - - return index; -} -EXPORT_SYMBOL(radix_tree_prev_hole); - -/** * radix_tree_gang_lookup - perform multiple lookup on a radix tree * @root: radix tree root * @results: where the results of the lookup are placed @@ -999,25 +1253,30 @@ EXPORT_SYMBOL(radix_tree_prev_hole); * * Like radix_tree_lookup, radix_tree_gang_lookup may be called under * rcu_read_lock. In this case, rather than the returned results being - * an atomic snapshot of the tree at a single point in time, the semantics - * of an RCU protected gang lookup are as though multiple radix_tree_lookups - * have been issued in individual locks, and results stored in 'results'. + * an atomic snapshot of the tree at a single point in time, the + * semantics of an RCU protected gang lookup are as though multiple + * radix_tree_lookups have been issued in individual locks, and results + * stored in 'results'. */ unsigned int -radix_tree_gang_lookup(struct radix_tree_root *root, void **results, +radix_tree_gang_lookup(const struct radix_tree_root *root, void **results, unsigned long first_index, unsigned int max_items) { struct radix_tree_iter iter; - void **slot; + void __rcu **slot; unsigned int ret = 0; if (unlikely(!max_items)) return 0; radix_tree_for_each_slot(slot, root, &iter, first_index) { - results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot)); + results[ret] = rcu_dereference_raw(*slot); if (!results[ret]) continue; + if (radix_tree_is_internal_node(results[ret])) { + slot = radix_tree_iter_retry(&iter); + continue; + } if (++ret == max_items) break; } @@ -1027,48 +1286,6 @@ radix_tree_gang_lookup(struct radix_tree_root *root, void **results, EXPORT_SYMBOL(radix_tree_gang_lookup); /** - * radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree - * @root: radix tree root - * @results: where the results of the lookup are placed - * @indices: where their indices should be placed (but usually NULL) - * @first_index: start the lookup from this key - * @max_items: place up to this many items at *results - * - * Performs an index-ascending scan of the tree for present items. Places - * their slots at *@results and returns the number of items which were - * placed at *@results. - * - * The implementation is naive. - * - * Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must - * be dereferenced with radix_tree_deref_slot, and if using only RCU - * protection, radix_tree_deref_slot may fail requiring a retry. - */ -unsigned int -radix_tree_gang_lookup_slot(struct radix_tree_root *root, - void ***results, unsigned long *indices, - unsigned long first_index, unsigned int max_items) -{ - struct radix_tree_iter iter; - void **slot; - unsigned int ret = 0; - - if (unlikely(!max_items)) - return 0; - - radix_tree_for_each_slot(slot, root, &iter, first_index) { - results[ret] = slot; - if (indices) - indices[ret] = iter.index; - if (++ret == max_items) - break; - } - - return ret; -} -EXPORT_SYMBOL(radix_tree_gang_lookup_slot); - -/** * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree * based on a tag * @root: radix tree root @@ -1082,21 +1299,25 @@ EXPORT_SYMBOL(radix_tree_gang_lookup_slot); * returns the number of items which were placed at *@results. */ unsigned int -radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results, +radix_tree_gang_lookup_tag(const struct radix_tree_root *root, void **results, unsigned long first_index, unsigned int max_items, unsigned int tag) { struct radix_tree_iter iter; - void **slot; + void __rcu **slot; unsigned int ret = 0; if (unlikely(!max_items)) return 0; radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) { - results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot)); + results[ret] = rcu_dereference_raw(*slot); if (!results[ret]) continue; + if (radix_tree_is_internal_node(results[ret])) { + slot = radix_tree_iter_retry(&iter); + continue; + } if (++ret == max_items) break; } @@ -1119,12 +1340,12 @@ EXPORT_SYMBOL(radix_tree_gang_lookup_tag); * returns the number of slots which were placed at *@results. */ unsigned int -radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results, - unsigned long first_index, unsigned int max_items, - unsigned int tag) +radix_tree_gang_lookup_tag_slot(const struct radix_tree_root *root, + void __rcu ***results, unsigned long first_index, + unsigned int max_items, unsigned int tag) { struct radix_tree_iter iter; - void **slot; + void __rcu **slot; unsigned int ret = 0; if (unlikely(!max_items)) @@ -1140,250 +1361,90 @@ radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results, } EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot); -#if defined(CONFIG_SHMEM) && defined(CONFIG_SWAP) -#include <linux/sched.h> /* for cond_resched() */ +static bool __radix_tree_delete(struct radix_tree_root *root, + struct radix_tree_node *node, void __rcu **slot) +{ + void *old = rcu_dereference_raw(*slot); + int values = xa_is_value(old) ? -1 : 0; + unsigned offset = get_slot_offset(node, slot); + int tag; -/* - * This linear search is at present only useful to shmem_unuse_inode(). - */ -static unsigned long __locate(struct radix_tree_node *slot, void *item, - unsigned long index, unsigned long *found_index) -{ - unsigned int shift, height; - unsigned long i; - - height = slot->height; - shift = (height-1) * RADIX_TREE_MAP_SHIFT; - - for ( ; height > 1; height--) { - i = (index >> shift) & RADIX_TREE_MAP_MASK; - for (;;) { - if (slot->slots[i] != NULL) - break; - index &= ~((1UL << shift) - 1); - index += 1UL << shift; - if (index == 0) - goto out; /* 32-bit wraparound */ - i++; - if (i == RADIX_TREE_MAP_SIZE) - goto out; - } + if (is_idr(root)) + node_tag_set(root, node, IDR_FREE, offset); + else + for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) + node_tag_clear(root, node, tag, offset); - shift -= RADIX_TREE_MAP_SHIFT; - slot = rcu_dereference_raw(slot->slots[i]); - if (slot == NULL) - goto out; - } - - /* Bottom level: check items */ - for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) { - if (slot->slots[i] == item) { - *found_index = index + i; - index = 0; - goto out; - } - } - index += RADIX_TREE_MAP_SIZE; -out: - return index; + replace_slot(slot, NULL, node, -1, values); + return node && delete_node(root, node); } /** - * radix_tree_locate_item - search through radix tree for item - * @root: radix tree root - * @item: item to be found + * radix_tree_iter_delete - delete the entry at this iterator position + * @root: radix tree root + * @iter: iterator state + * @slot: pointer to slot * - * Returns index where item was found, or -1 if not found. - * Caller must hold no lock (since this time-consuming function needs - * to be preemptible), and must check afterwards if item is still there. + * Delete the entry at the position currently pointed to by the iterator. + * This may result in the current node being freed; if it is, the iterator + * is advanced so that it will not reference the freed memory. This + * function may be called without any locking if there are no other threads + * which can access this tree. */ -unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item) -{ - struct radix_tree_node *node; - unsigned long max_index; - unsigned long cur_index = 0; - unsigned long found_index = -1; - - do { - rcu_read_lock(); - node = rcu_dereference_raw(root->rnode); - if (!radix_tree_is_indirect_ptr(node)) { - rcu_read_unlock(); - if (node == item) - found_index = 0; - break; - } - - node = indirect_to_ptr(node); - max_index = radix_tree_maxindex(node->height); - if (cur_index > max_index) - break; - - cur_index = __locate(node, item, cur_index, &found_index); - rcu_read_unlock(); - cond_resched(); - } while (cur_index != 0 && cur_index <= max_index); - - return found_index; -} -#else -unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item) +void radix_tree_iter_delete(struct radix_tree_root *root, + struct radix_tree_iter *iter, void __rcu **slot) { - return -1; + if (__radix_tree_delete(root, iter->node, slot)) + iter->index = iter->next_index; } -#endif /* CONFIG_SHMEM && CONFIG_SWAP */ +EXPORT_SYMBOL(radix_tree_iter_delete); /** - * radix_tree_shrink - shrink height of a radix tree to minimal - * @root radix tree root + * radix_tree_delete_item - delete an item from a radix tree + * @root: radix tree root + * @index: index key + * @item: expected item + * + * Remove @item at @index from the radix tree rooted at @root. + * + * Return: the deleted entry, or %NULL if it was not present + * or the entry at the given @index was not @item. */ -static inline void radix_tree_shrink(struct radix_tree_root *root) +void *radix_tree_delete_item(struct radix_tree_root *root, + unsigned long index, void *item) { - /* try to shrink tree height */ - while (root->height > 0) { - struct radix_tree_node *to_free = root->rnode; - struct radix_tree_node *slot; + struct radix_tree_node *node = NULL; + void __rcu **slot = NULL; + void *entry; - BUG_ON(!radix_tree_is_indirect_ptr(to_free)); - to_free = indirect_to_ptr(to_free); + entry = __radix_tree_lookup(root, index, &node, &slot); + if (!slot) + return NULL; + if (!entry && (!is_idr(root) || node_tag_get(root, node, IDR_FREE, + get_slot_offset(node, slot)))) + return NULL; - /* - * The candidate node has more than one child, or its child - * is not at the leftmost slot, we cannot shrink. - */ - if (to_free->count != 1) - break; - if (!to_free->slots[0]) - break; + if (item && entry != item) + return NULL; - /* - * We don't need rcu_assign_pointer(), since we are simply - * moving the node from one part of the tree to another: if it - * was safe to dereference the old pointer to it - * (to_free->slots[0]), it will be safe to dereference the new - * one (root->rnode) as far as dependent read barriers go. - */ - slot = to_free->slots[0]; - if (root->height > 1) { - slot->parent = NULL; - slot = ptr_to_indirect(slot); - } - root->rnode = slot; - root->height--; + __radix_tree_delete(root, node, slot); - /* - * We have a dilemma here. The node's slot[0] must not be - * NULLed in case there are concurrent lookups expecting to - * find the item. However if this was a bottom-level node, - * then it may be subject to the slot pointer being visible - * to callers dereferencing it. If item corresponding to - * slot[0] is subsequently deleted, these callers would expect - * their slot to become empty sooner or later. - * - * For example, lockless pagecache will look up a slot, deref - * the page pointer, and if the page is 0 refcount it means it - * was concurrently deleted from pagecache so try the deref - * again. Fortunately there is already a requirement for logic - * to retry the entire slot lookup -- the indirect pointer - * problem (replacing direct root node with an indirect pointer - * also results in a stale slot). So tag the slot as indirect - * to force callers to retry. - */ - if (root->height == 0) - *((unsigned long *)&to_free->slots[0]) |= - RADIX_TREE_INDIRECT_PTR; - - radix_tree_node_free(to_free); - } + return entry; } +EXPORT_SYMBOL(radix_tree_delete_item); /** - * radix_tree_delete - delete an item from a radix tree - * @root: radix tree root - * @index: index key + * radix_tree_delete - delete an entry from a radix tree + * @root: radix tree root + * @index: index key * - * Remove the item at @index from the radix tree rooted at @root. + * Remove the entry at @index from the radix tree rooted at @root. * - * Returns the address of the deleted item, or NULL if it was not present. + * Return: The deleted entry, or %NULL if it was not present. */ void *radix_tree_delete(struct radix_tree_root *root, unsigned long index) { - struct radix_tree_node *node = NULL; - struct radix_tree_node *slot = NULL; - struct radix_tree_node *to_free; - unsigned int height, shift; - int tag; - int uninitialized_var(offset); - - height = root->height; - if (index > radix_tree_maxindex(height)) - goto out; - - slot = root->rnode; - if (height == 0) { - root_tag_clear_all(root); - root->rnode = NULL; - goto out; - } - slot = indirect_to_ptr(slot); - shift = height * RADIX_TREE_MAP_SHIFT; - - do { - if (slot == NULL) - goto out; - - shift -= RADIX_TREE_MAP_SHIFT; - offset = (index >> shift) & RADIX_TREE_MAP_MASK; - node = slot; - slot = slot->slots[offset]; - } while (shift); - - if (slot == NULL) - goto out; - - /* - * Clear all tags associated with the item to be deleted. - * This way of doing it would be inefficient, but seldom is any set. - */ - for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) { - if (tag_get(node, tag, offset)) - radix_tree_tag_clear(root, index, tag); - } - - to_free = NULL; - /* Now free the nodes we do not need anymore */ - while (node) { - node->slots[offset] = NULL; - node->count--; - /* - * Queue the node for deferred freeing after the - * last reference to it disappears (set NULL, above). - */ - if (to_free) - radix_tree_node_free(to_free); - - if (node->count) { - if (node == indirect_to_ptr(root->rnode)) - radix_tree_shrink(root); - goto out; - } - - /* Node with zero slots in use so free it */ - to_free = node; - - index >>= RADIX_TREE_MAP_SHIFT; - offset = index & RADIX_TREE_MAP_MASK; - node = node->parent; - } - - root_tag_clear_all(root); - root->height = 0; - root->rnode = NULL; - if (to_free) - radix_tree_node_free(to_free); - -out: - return slot; + return radix_tree_delete_item(root, index, NULL); } EXPORT_SYMBOL(radix_tree_delete); @@ -1392,64 +1453,156 @@ EXPORT_SYMBOL(radix_tree_delete); * @root: radix tree root * @tag: tag to test */ -int radix_tree_tagged(struct radix_tree_root *root, unsigned int tag) +int radix_tree_tagged(const struct radix_tree_root *root, unsigned int tag) { return root_tag_get(root, tag); } EXPORT_SYMBOL(radix_tree_tagged); -static void -radix_tree_node_ctor(void *node) +/** + * idr_preload - preload for idr_alloc() + * @gfp_mask: allocation mask to use for preloading + * + * Preallocate memory to use for the next call to idr_alloc(). This function + * returns with preemption disabled. It will be enabled by idr_preload_end(). + */ +void idr_preload(gfp_t gfp_mask) { - memset(node, 0, sizeof(struct radix_tree_node)); + if (__radix_tree_preload(gfp_mask, IDR_PRELOAD_SIZE)) + local_lock(&radix_tree_preloads.lock); } +EXPORT_SYMBOL(idr_preload); -static __init unsigned long __maxindex(unsigned int height) +void __rcu **idr_get_free(struct radix_tree_root *root, + struct radix_tree_iter *iter, gfp_t gfp, + unsigned long max) { - unsigned int width = height * RADIX_TREE_MAP_SHIFT; - int shift = RADIX_TREE_INDEX_BITS - width; + struct radix_tree_node *node = NULL, *child; + void __rcu **slot = (void __rcu **)&root->xa_head; + unsigned long maxindex, start = iter->next_index; + unsigned int shift, offset = 0; + + grow: + shift = radix_tree_load_root(root, &child, &maxindex); + if (!radix_tree_tagged(root, IDR_FREE)) + start = max(start, maxindex + 1); + if (start > max) + return ERR_PTR(-ENOSPC); + + if (start > maxindex) { + int error = radix_tree_extend(root, gfp, start, shift); + if (error < 0) + return ERR_PTR(error); + shift = error; + child = rcu_dereference_raw(root->xa_head); + } + if (start == 0 && shift == 0) + shift = RADIX_TREE_MAP_SHIFT; - if (shift < 0) - return ~0UL; - if (shift >= BITS_PER_LONG) - return 0UL; - return ~0UL >> shift; + while (shift) { + shift -= RADIX_TREE_MAP_SHIFT; + if (child == NULL) { + /* Have to add a child node. */ + child = radix_tree_node_alloc(gfp, node, root, shift, + offset, 0, 0); + if (!child) + return ERR_PTR(-ENOMEM); + all_tag_set(child, IDR_FREE); + rcu_assign_pointer(*slot, node_to_entry(child)); + if (node) + node->count++; + } else if (!radix_tree_is_internal_node(child)) + break; + + node = entry_to_node(child); + offset = radix_tree_descend(node, &child, start); + if (!tag_get(node, IDR_FREE, offset)) { + offset = radix_tree_find_next_bit(node, IDR_FREE, + offset + 1); + start = next_index(start, node, offset); + if (start > max || start == 0) + return ERR_PTR(-ENOSPC); + while (offset == RADIX_TREE_MAP_SIZE) { + offset = node->offset + 1; + node = node->parent; + if (!node) + goto grow; + shift = node->shift; + } + child = rcu_dereference_raw(node->slots[offset]); + } + slot = &node->slots[offset]; + } + + iter->index = start; + if (node) + iter->next_index = 1 + min(max, (start | node_maxindex(node))); + else + iter->next_index = 1; + iter->node = node; + set_iter_tags(iter, node, offset, IDR_FREE); + + return slot; +} + +/** + * idr_destroy - release all internal memory from an IDR + * @idr: idr handle + * + * After this function is called, the IDR is empty, and may be reused or + * the data structure containing it may be freed. + * + * A typical clean-up sequence for objects stored in an idr tree will use + * idr_for_each() to free all objects, if necessary, then idr_destroy() to + * free the memory used to keep track of those objects. + */ +void idr_destroy(struct idr *idr) +{ + struct radix_tree_node *node = rcu_dereference_raw(idr->idr_rt.xa_head); + if (radix_tree_is_internal_node(node)) + radix_tree_free_nodes(node); + idr->idr_rt.xa_head = NULL; + root_tag_set(&idr->idr_rt, IDR_FREE); } +EXPORT_SYMBOL(idr_destroy); -static __init void radix_tree_init_maxindex(void) +static void +radix_tree_node_ctor(void *arg) { - unsigned int i; + struct radix_tree_node *node = arg; - for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++) - height_to_maxindex[i] = __maxindex(i); + memset(node, 0, sizeof(*node)); + INIT_LIST_HEAD(&node->private_list); } -static int radix_tree_callback(struct notifier_block *nfb, - unsigned long action, - void *hcpu) +static int radix_tree_cpu_dead(unsigned int cpu) { - int cpu = (long)hcpu; - struct radix_tree_preload *rtp; + struct radix_tree_preload *rtp; + struct radix_tree_node *node; - /* Free per-cpu pool of perloaded nodes */ - if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) { - rtp = &per_cpu(radix_tree_preloads, cpu); - while (rtp->nr) { - kmem_cache_free(radix_tree_node_cachep, - rtp->nodes[rtp->nr-1]); - rtp->nodes[rtp->nr-1] = NULL; - rtp->nr--; - } - } - return NOTIFY_OK; + /* Free per-cpu pool of preloaded nodes */ + rtp = &per_cpu(radix_tree_preloads, cpu); + while (rtp->nr) { + node = rtp->nodes; + rtp->nodes = node->parent; + kmem_cache_free(radix_tree_node_cachep, node); + rtp->nr--; + } + return 0; } void __init radix_tree_init(void) { + int ret; + + BUILD_BUG_ON(RADIX_TREE_MAX_TAGS + __GFP_BITS_SHIFT > 32); + BUILD_BUG_ON(ROOT_IS_IDR & ~GFP_ZONEMASK); + BUILD_BUG_ON(XA_CHUNK_SIZE > 255); radix_tree_node_cachep = kmem_cache_create("radix_tree_node", sizeof(struct radix_tree_node), 0, SLAB_PANIC | SLAB_RECLAIM_ACCOUNT, radix_tree_node_ctor); - radix_tree_init_maxindex(); - hotcpu_notifier(radix_tree_callback, 0); + ret = cpuhp_setup_state_nocalls(CPUHP_RADIX_DEAD, "lib/radix:dead", + NULL, radix_tree_cpu_dead); + WARN_ON(ret < 0); } |
