| /* |
| * Copyright (C) 2013 Konstantin Khlebnikov |
| * Copyright (c) 2013 Luis R. Rodriguez <mcgrof@do-not-panic.com> |
| * |
| * Backports radix_tree_next_chunk() |
| * |
| * This program is free software; you can redistribute it and/or |
| * modify it under the terms of the GNU General Public License as |
| * published by the Free Software Foundation; either version 2, or (at |
| * your option) any later version. |
| */ |
| |
| #include <linux/errno.h> |
| #include <linux/init.h> |
| #include <linux/kernel.h> |
| #include <linux/export.h> |
| #include <linux/radix-tree.h> |
| #include <linux/percpu.h> |
| #include <linux/slab.h> |
| #include <linux/notifier.h> |
| #include <linux/cpu.h> |
| #include <linux/string.h> |
| #include <linux/bitops.h> |
| #include <linux/rcupdate.h> |
| |
| #ifdef __KERNEL__ |
| #define RADIX_TREE_MAP_SHIFT (CONFIG_BASE_SMALL ? 4 : 6) |
| #else |
| #define RADIX_TREE_MAP_SHIFT 3 /* For more stressful testing */ |
| #endif |
| |
| #define RADIX_TREE_MAP_SIZE (1UL << RADIX_TREE_MAP_SHIFT) |
| #define RADIX_TREE_MAP_MASK (RADIX_TREE_MAP_SIZE-1) |
| |
| #define RADIX_TREE_TAG_LONGS \ |
| ((RADIX_TREE_MAP_SIZE + BITS_PER_LONG - 1) / BITS_PER_LONG) |
| |
| struct radix_tree_node { |
| unsigned int height; /* Height from the bottom */ |
| unsigned int count; |
| union { |
| struct radix_tree_node *parent; /* Used when ascending tree */ |
| struct rcu_head rcu_head; /* Used when freeing node */ |
| }; |
| void __rcu *slots[RADIX_TREE_MAP_SIZE]; |
| unsigned long tags[RADIX_TREE_MAX_TAGS][RADIX_TREE_TAG_LONGS]; |
| }; |
| |
| static inline void *ptr_to_indirect(void *ptr) |
| { |
| return (void *)((unsigned long)ptr | RADIX_TREE_INDIRECT_PTR); |
| } |
| |
| static inline void *indirect_to_ptr(void *ptr) |
| { |
| return (void *)((unsigned long)ptr & ~RADIX_TREE_INDIRECT_PTR); |
| } |
| |
| static inline gfp_t root_gfp_mask(struct radix_tree_root *root) |
| { |
| return root->gfp_mask & __GFP_BITS_MASK; |
| } |
| |
| static inline void tag_set(struct radix_tree_node *node, unsigned int tag, |
| int offset) |
| { |
| __set_bit(offset, node->tags[tag]); |
| } |
| |
| static inline void tag_clear(struct radix_tree_node *node, unsigned int tag, |
| int offset) |
| { |
| __clear_bit(offset, node->tags[tag]); |
| } |
| |
| static inline int tag_get(struct radix_tree_node *node, unsigned int tag, |
| int offset) |
| { |
| return test_bit(offset, node->tags[tag]); |
| } |
| |
| static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag) |
| { |
| root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT)); |
| } |
| |
| static inline void root_tag_clear(struct radix_tree_root *root, unsigned int tag) |
| { |
| root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT)); |
| } |
| |
| static inline void root_tag_clear_all(struct radix_tree_root *root) |
| { |
| root->gfp_mask &= __GFP_BITS_MASK; |
| } |
| |
| static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag) |
| { |
| return (__force unsigned)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT)); |
| } |
| |
| /* |
| * Returns 1 if any slot in the node has this tag set. |
| * Otherwise returns 0. |
| */ |
| static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag) |
| { |
| int idx; |
| for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) { |
| if (node->tags[tag][idx]) |
| return 1; |
| } |
| return 0; |
| } |
| |
| /** |
| * radix_tree_find_next_bit - find the next set bit in a memory region |
| * |
| * @addr: The address to base the search on |
| * @size: The bitmap size in bits |
| * @offset: The bitnumber to start searching at |
| * |
| * Unrollable variant of find_next_bit() for constant size arrays. |
| * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero. |
| * Returns next bit offset, or size if nothing found. |
| */ |
| static __always_inline unsigned long |
| radix_tree_find_next_bit(const unsigned long *addr, |
| unsigned long size, unsigned long offset) |
| { |
| if (!__builtin_constant_p(size)) |
| return find_next_bit(addr, size, offset); |
| |
| if (offset < size) { |
| unsigned long tmp; |
| |
| addr += offset / BITS_PER_LONG; |
| tmp = *addr >> (offset % BITS_PER_LONG); |
| if (tmp) |
| return __ffs(tmp) + offset; |
| offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1); |
| while (offset < size) { |
| tmp = *++addr; |
| if (tmp) |
| return __ffs(tmp) + offset; |
| offset += BITS_PER_LONG; |
| } |
| } |
| return size; |
| } |
| |
| /** |
| * radix_tree_next_chunk - find next chunk of slots for iteration |
| * |
| * @root: radix tree root |
| * @iter: iterator state |
| * @flags: RADIX_TREE_ITER_* flags and tag index |
| * Returns: pointer to chunk first slot, or NULL if iteration is over |
| */ |
| void **radix_tree_next_chunk(struct radix_tree_root *root, |
| struct radix_tree_iter *iter, unsigned flags) |
| { |
| unsigned shift, tag = flags & RADIX_TREE_ITER_TAG_MASK; |
| struct radix_tree_node *rnode, *node; |
| unsigned long index, offset; |
| |
| if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag)) |
| return NULL; |
| |
| /* |
| * Catch next_index overflow after ~0UL. iter->index never overflows |
| * during iterating; it can be zero only at the beginning. |
| * And we cannot overflow iter->next_index in a single step, |
| * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG. |
| * |
| * This condition also used by radix_tree_next_slot() to stop |
| * contiguous iterating, and forbid swithing to the next chunk. |
| */ |
| index = iter->next_index; |
| if (!index && iter->index) |
| return NULL; |
| |
| rnode = rcu_dereference_raw(root->rnode); |
| if (radix_tree_is_indirect_ptr(rnode)) { |
| rnode = indirect_to_ptr(rnode); |
| } else if (rnode && !index) { |
| /* Single-slot tree */ |
| iter->index = 0; |
| iter->next_index = 1; |
| iter->tags = 1; |
| return (void **)&root->rnode; |
| } else |
| return NULL; |
| |
| restart: |
| shift = (rnode->height - 1) * RADIX_TREE_MAP_SHIFT; |
| offset = index >> shift; |
| |
| /* Index outside of the tree */ |
| if (offset >= RADIX_TREE_MAP_SIZE) |
| return NULL; |
| |
| node = rnode; |
| while (1) { |
| if ((flags & RADIX_TREE_ITER_TAGGED) ? |
| !test_bit(offset, node->tags[tag]) : |
| !node->slots[offset]) { |
| /* Hole detected */ |
| if (flags & RADIX_TREE_ITER_CONTIG) |
| return NULL; |
| |
| if (flags & RADIX_TREE_ITER_TAGGED) |
| offset = radix_tree_find_next_bit( |
| node->tags[tag], |
| RADIX_TREE_MAP_SIZE, |
| offset + 1); |
| else |
| while (++offset < RADIX_TREE_MAP_SIZE) { |
| if (node->slots[offset]) |
| break; |
| } |
| index &= ~((RADIX_TREE_MAP_SIZE << shift) - 1); |
| index += offset << shift; |
| /* Overflow after ~0UL */ |
| if (!index) |
| return NULL; |
| if (offset == RADIX_TREE_MAP_SIZE) |
| goto restart; |
| } |
| |
| /* This is leaf-node */ |
| if (!shift) |
| break; |
| |
| node = rcu_dereference_raw(node->slots[offset]); |
| if (node == NULL) |
| goto restart; |
| shift -= RADIX_TREE_MAP_SHIFT; |
| offset = (index >> shift) & RADIX_TREE_MAP_MASK; |
| } |
| |
| /* Update the iterator state */ |
| iter->index = index; |
| iter->next_index = (index | RADIX_TREE_MAP_MASK) + 1; |
| |
| /* Construct iter->tags bit-mask from node->tags[tag] array */ |
| if (flags & RADIX_TREE_ITER_TAGGED) { |
| unsigned tag_long, tag_bit; |
| |
| tag_long = offset / BITS_PER_LONG; |
| tag_bit = offset % BITS_PER_LONG; |
| iter->tags = node->tags[tag][tag_long] >> tag_bit; |
| /* This never happens if RADIX_TREE_TAG_LONGS == 1 */ |
| if (tag_long < RADIX_TREE_TAG_LONGS - 1) { |
| /* Pick tags from next element */ |
| if (tag_bit) |
| iter->tags |= node->tags[tag][tag_long + 1] << |
| (BITS_PER_LONG - tag_bit); |
| /* Clip chunk size, here only BITS_PER_LONG tags */ |
| iter->next_index = index + BITS_PER_LONG; |
| } |
| } |
| |
| return node->slots + offset; |
| } |
| EXPORT_SYMBOL_GPL(radix_tree_next_chunk); |