diff options
Diffstat (limited to 'lib/test_maple_tree.c')
| -rw-r--r-- | lib/test_maple_tree.c | 1739 |
1 files changed, 1470 insertions, 269 deletions
diff --git a/lib/test_maple_tree.c b/lib/test_maple_tree.c index 497fc93ccf9e..a182e48b5f5e 100644 --- a/lib/test_maple_tree.c +++ b/lib/test_maple_tree.c @@ -9,20 +9,45 @@ #include <linux/maple_tree.h> #include <linux/module.h> +#include <linux/rwsem.h> #define MTREE_ALLOC_MAX 0x2000000000000Ul -#ifndef CONFIG_DEBUG_MAPLE_TREE -#define CONFIG_DEBUG_MAPLE_TREE -#endif #define CONFIG_MAPLE_SEARCH #define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31) +#ifndef CONFIG_DEBUG_MAPLE_TREE +#define mt_dump(mt, fmt) do {} while (0) +#define mt_validate(mt) do {} while (0) +#define mt_cache_shrink() do {} while (0) +#define mas_dump(mas) do {} while (0) +#define mas_wr_dump(mas) do {} while (0) +atomic_t maple_tree_tests_run; +atomic_t maple_tree_tests_passed; +#undef MT_BUG_ON + +#define MT_BUG_ON(__tree, __x) do { \ + atomic_inc(&maple_tree_tests_run); \ + if (__x) { \ + pr_info("BUG at %s:%d (%u)\n", \ + __func__, __LINE__, __x); \ + pr_info("Pass: %u Run:%u\n", \ + atomic_read(&maple_tree_tests_passed), \ + atomic_read(&maple_tree_tests_run)); \ + } else { \ + atomic_inc(&maple_tree_tests_passed); \ + } \ +} while (0) +#endif + /* #define BENCH_SLOT_STORE */ /* #define BENCH_NODE_STORE */ /* #define BENCH_AWALK */ /* #define BENCH_WALK */ +/* #define BENCH_LOAD */ /* #define BENCH_MT_FOR_EACH */ /* #define BENCH_FORK */ +/* #define BENCH_MAS_FOR_EACH */ +/* #define BENCH_MAS_PREV */ #ifdef __KERNEL__ #define mt_set_non_kernel(x) do {} while (0) @@ -30,54 +55,59 @@ #else #define cond_resched() do {} while (0) #endif -static -int mtree_insert_index(struct maple_tree *mt, unsigned long index, gfp_t gfp) + +#define mas_is_none(x) ((x)->status == ma_none) +#define mas_is_overflow(x) ((x)->status == ma_overflow) +#define mas_is_underflow(x) ((x)->status == ma_underflow) + +static int __init mtree_insert_index(struct maple_tree *mt, + unsigned long index, gfp_t gfp) { return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp); } -static void mtree_erase_index(struct maple_tree *mt, unsigned long index) +static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index) { MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX)); MT_BUG_ON(mt, mtree_load(mt, index) != NULL); } -static int mtree_test_insert(struct maple_tree *mt, unsigned long index, +static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index, void *ptr) { return mtree_insert(mt, index, ptr, GFP_KERNEL); } -static int mtree_test_store_range(struct maple_tree *mt, unsigned long start, - unsigned long end, void *ptr) +static int __init mtree_test_store_range(struct maple_tree *mt, + unsigned long start, unsigned long end, void *ptr) { return mtree_store_range(mt, start, end, ptr, GFP_KERNEL); } -static int mtree_test_store(struct maple_tree *mt, unsigned long start, +static int __init mtree_test_store(struct maple_tree *mt, unsigned long start, void *ptr) { return mtree_test_store_range(mt, start, start, ptr); } -static int mtree_test_insert_range(struct maple_tree *mt, unsigned long start, - unsigned long end, void *ptr) +static int __init mtree_test_insert_range(struct maple_tree *mt, + unsigned long start, unsigned long end, void *ptr) { return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL); } -static void *mtree_test_load(struct maple_tree *mt, unsigned long index) +static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index) { return mtree_load(mt, index); } -static void *mtree_test_erase(struct maple_tree *mt, unsigned long index) +static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index) { return mtree_erase(mt, index); } #if defined(CONFIG_64BIT) -static noinline void check_mtree_alloc_range(struct maple_tree *mt, +static noinline void __init check_mtree_alloc_range(struct maple_tree *mt, unsigned long start, unsigned long end, unsigned long size, unsigned long expected, int eret, void *ptr) { @@ -94,7 +124,7 @@ static noinline void check_mtree_alloc_range(struct maple_tree *mt, MT_BUG_ON(mt, result != expected); } -static noinline void check_mtree_alloc_rrange(struct maple_tree *mt, +static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt, unsigned long start, unsigned long end, unsigned long size, unsigned long expected, int eret, void *ptr) { @@ -102,7 +132,7 @@ static noinline void check_mtree_alloc_rrange(struct maple_tree *mt, unsigned long result = expected + 1; int ret; - ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end - 1, + ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end, GFP_KERNEL); MT_BUG_ON(mt, ret != eret); if (ret) @@ -112,8 +142,8 @@ static noinline void check_mtree_alloc_rrange(struct maple_tree *mt, } #endif -static noinline void check_load(struct maple_tree *mt, unsigned long index, - void *ptr) +static noinline void __init check_load(struct maple_tree *mt, + unsigned long index, void *ptr) { void *ret = mtree_test_load(mt, index); @@ -122,7 +152,7 @@ static noinline void check_load(struct maple_tree *mt, unsigned long index, MT_BUG_ON(mt, ret != ptr); } -static noinline void check_store_range(struct maple_tree *mt, +static noinline void __init check_store_range(struct maple_tree *mt, unsigned long start, unsigned long end, void *ptr, int expected) { int ret = -EINVAL; @@ -138,7 +168,7 @@ static noinline void check_store_range(struct maple_tree *mt, check_load(mt, i, ptr); } -static noinline void check_insert_range(struct maple_tree *mt, +static noinline void __init check_insert_range(struct maple_tree *mt, unsigned long start, unsigned long end, void *ptr, int expected) { int ret = -EINVAL; @@ -154,8 +184,8 @@ static noinline void check_insert_range(struct maple_tree *mt, check_load(mt, i, ptr); } -static noinline void check_insert(struct maple_tree *mt, unsigned long index, - void *ptr) +static noinline void __init check_insert(struct maple_tree *mt, + unsigned long index, void *ptr) { int ret = -EINVAL; @@ -163,7 +193,7 @@ static noinline void check_insert(struct maple_tree *mt, unsigned long index, MT_BUG_ON(mt, ret != 0); } -static noinline void check_dup_insert(struct maple_tree *mt, +static noinline void __init check_dup_insert(struct maple_tree *mt, unsigned long index, void *ptr) { int ret = -EINVAL; @@ -173,13 +203,13 @@ static noinline void check_dup_insert(struct maple_tree *mt, } -static noinline -void check_index_load(struct maple_tree *mt, unsigned long index) +static noinline void __init check_index_load(struct maple_tree *mt, + unsigned long index) { return check_load(mt, index, xa_mk_value(index & LONG_MAX)); } -static inline int not_empty(struct maple_node *node) +static inline __init int not_empty(struct maple_node *node) { int i; @@ -194,8 +224,8 @@ static inline int not_empty(struct maple_node *node) } -static noinline void check_rev_seq(struct maple_tree *mt, unsigned long max, - bool verbose) +static noinline void __init check_rev_seq(struct maple_tree *mt, + unsigned long max, bool verbose) { unsigned long i = max, j; @@ -219,7 +249,7 @@ static noinline void check_rev_seq(struct maple_tree *mt, unsigned long max, #ifndef __KERNEL__ if (verbose) { rcu_barrier(); - mt_dump(mt); + mt_dump(mt, mt_dump_dec); pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n", __func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(), mt_nr_tallocated()); @@ -227,7 +257,7 @@ static noinline void check_rev_seq(struct maple_tree *mt, unsigned long max, #endif } -static noinline void check_seq(struct maple_tree *mt, unsigned long max, +static noinline void __init check_seq(struct maple_tree *mt, unsigned long max, bool verbose) { unsigned long i, j; @@ -248,7 +278,7 @@ static noinline void check_seq(struct maple_tree *mt, unsigned long max, #ifndef __KERNEL__ if (verbose) { rcu_barrier(); - mt_dump(mt); + mt_dump(mt, mt_dump_dec); pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n", max, mt_get_alloc_size()/1024, mt_nr_allocated(), mt_nr_tallocated()); @@ -256,7 +286,7 @@ static noinline void check_seq(struct maple_tree *mt, unsigned long max, #endif } -static noinline void check_lb_not_empty(struct maple_tree *mt) +static noinline void __init check_lb_not_empty(struct maple_tree *mt) { unsigned long i, j; unsigned long huge = 4000UL * 1000 * 1000; @@ -275,13 +305,13 @@ static noinline void check_lb_not_empty(struct maple_tree *mt) mtree_destroy(mt); } -static noinline void check_lower_bound_split(struct maple_tree *mt) +static noinline void __init check_lower_bound_split(struct maple_tree *mt) { MT_BUG_ON(mt, !mtree_empty(mt)); check_lb_not_empty(mt); } -static noinline void check_upper_bound_split(struct maple_tree *mt) +static noinline void __init check_upper_bound_split(struct maple_tree *mt) { unsigned long i, j; unsigned long huge; @@ -306,7 +336,7 @@ static noinline void check_upper_bound_split(struct maple_tree *mt) mtree_destroy(mt); } -static noinline void check_mid_split(struct maple_tree *mt) +static noinline void __init check_mid_split(struct maple_tree *mt) { unsigned long huge = 8000UL * 1000 * 1000; @@ -315,7 +345,7 @@ static noinline void check_mid_split(struct maple_tree *mt) check_lb_not_empty(mt); } -static noinline void check_rev_find(struct maple_tree *mt) +static noinline void __init check_rev_find(struct maple_tree *mt) { int i, nr_entries = 200; void *val; @@ -354,7 +384,7 @@ static noinline void check_rev_find(struct maple_tree *mt) rcu_read_unlock(); } -static noinline void check_find(struct maple_tree *mt) +static noinline void __init check_find(struct maple_tree *mt) { unsigned long val = 0; unsigned long count; @@ -558,7 +588,7 @@ static noinline void check_find(struct maple_tree *mt) MT_BUG_ON(mt, last != mas.last); - mas.node = MAS_NONE; + mas.status = ma_none; mas.index = ULONG_MAX; mas.last = ULONG_MAX; entry2 = mas_prev(&mas, 0); @@ -571,7 +601,7 @@ static noinline void check_find(struct maple_tree *mt) mtree_destroy(mt); } -static noinline void check_find_2(struct maple_tree *mt) +static noinline void __init check_find_2(struct maple_tree *mt) { unsigned long i, j; void *entry; @@ -616,7 +646,7 @@ static noinline void check_find_2(struct maple_tree *mt) #if defined(CONFIG_64BIT) -static noinline void check_alloc_rev_range(struct maple_tree *mt) +static noinline void __init check_alloc_rev_range(struct maple_tree *mt) { /* * Generated by: @@ -624,7 +654,7 @@ static noinline void check_alloc_rev_range(struct maple_tree *mt) * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}' */ - unsigned long range[] = { + static const unsigned long range[] = { /* Inclusive , Exclusive. */ 0x565234af2000, 0x565234af4000, 0x565234af4000, 0x565234af9000, @@ -652,7 +682,7 @@ static noinline void check_alloc_rev_range(struct maple_tree *mt) 0x7fff58791000, 0x7fff58793000, }; - unsigned long holes[] = { + static const unsigned long holes[] = { /* * Note: start of hole is INCLUSIVE * end of hole is EXCLUSIVE @@ -672,7 +702,7 @@ static noinline void check_alloc_rev_range(struct maple_tree *mt) * 4. number that should be returned. * 5. return value */ - unsigned long req_range[] = { + static const unsigned long req_range[] = { 0x565234af9000, /* Min */ 0x7fff58791000, /* Max */ 0x1000, /* Size */ @@ -680,7 +710,7 @@ static noinline void check_alloc_rev_range(struct maple_tree *mt) 0, /* Return value success. */ 0x0, /* Min */ - 0x565234AF1 << 12, /* Max */ + 0x565234AF0 << 12, /* Max */ 0x3000, /* Size */ 0x565234AEE << 12, /* max - 3. */ 0, /* Return value success. */ @@ -692,14 +722,14 @@ static noinline void check_alloc_rev_range(struct maple_tree *mt) 0, /* Return value success. */ 0x0, /* Min */ - 0x7F36D510A << 12, /* Max */ + 0x7F36D5109 << 12, /* Max */ 0x4000, /* Size */ 0x7F36D5106 << 12, /* First rev hole of size 0x4000 */ 0, /* Return value success. */ /* Ascend test. */ 0x0, - 34148798629 << 12, + 34148798628 << 12, 19 << 12, 34148797418 << 12, 0x0, @@ -711,6 +741,12 @@ static noinline void check_alloc_rev_range(struct maple_tree *mt) 0x0, -EBUSY, + /* Single space test. */ + 34148798725 << 12, + 34148798725 << 12, + 1 << 12, + 34148798725 << 12, + 0, }; int i, range_count = ARRAY_SIZE(range); @@ -759,9 +795,9 @@ static noinline void check_alloc_rev_range(struct maple_tree *mt) mas_unlock(&mas); for (i = 0; i < req_range_count; i += 5) { #if DEBUG_REV_RANGE - pr_debug("\tReverse request between %lu-%lu size %lu, should get %lu\n", - req_range[i] >> 12, - (req_range[i + 1] >> 12) - 1, + pr_debug("\tReverse request %d between %lu-%lu size %lu, should get %lu\n", + i, req_range[i] >> 12, + (req_range[i + 1] >> 12), req_range[i+2] >> 12, req_range[i+3] >> 12); #endif @@ -777,13 +813,14 @@ static noinline void check_alloc_rev_range(struct maple_tree *mt) mt_set_non_kernel(1); mtree_erase(mt, 34148798727); /* create a deleted range. */ + mtree_erase(mt, 34148798725); check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414, 34148798725, 0, mt); mtree_destroy(mt); } -static noinline void check_alloc_range(struct maple_tree *mt) +static noinline void __init check_alloc_range(struct maple_tree *mt) { /* * Generated by: @@ -791,7 +828,7 @@ static noinline void check_alloc_range(struct maple_tree *mt) * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}' */ - unsigned long range[] = { + static const unsigned long range[] = { /* Inclusive , Exclusive. */ 0x565234af2000, 0x565234af4000, 0x565234af4000, 0x565234af9000, @@ -818,7 +855,7 @@ static noinline void check_alloc_range(struct maple_tree *mt) 0x7fff5878e000, 0x7fff58791000, 0x7fff58791000, 0x7fff58793000, }; - unsigned long holes[] = { + static const unsigned long holes[] = { /* Start of hole, end of hole, size of hole (+1) */ 0x565234afb000, 0x565234afc000, 0x1000, 0x565234afe000, 0x565235def000, 0x12F1000, @@ -833,7 +870,7 @@ static noinline void check_alloc_range(struct maple_tree *mt) * 4. number that should be returned. * 5. return value */ - unsigned long req_range[] = { + static const unsigned long req_range[] = { 0x565234af9000, /* Min */ 0x7fff58791000, /* Max */ 0x1000, /* Size */ @@ -880,6 +917,13 @@ static noinline void check_alloc_range(struct maple_tree *mt) 4503599618982063UL << 12, /* Size */ 34359052178 << 12, /* Expected location */ -EBUSY, /* Return failure. */ + + /* Test a single entry */ + 34148798648 << 12, /* Min */ + 34148798648 << 12, /* Max */ + 4096, /* Size of 1 */ + 34148798648 << 12, /* Location is the same as min/max */ + 0, /* Success */ }; int i, range_count = ARRAY_SIZE(range); int req_range_count = ARRAY_SIZE(req_range); @@ -893,7 +937,7 @@ static noinline void check_alloc_range(struct maple_tree *mt) #if DEBUG_ALLOC_RANGE pr_debug("\tInsert %lu-%lu\n", range[i] >> 12, (range[i + 1] >> 12) - 1); - mt_dump(mt); + mt_dump(mt, mt_dump_hex); #endif check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1, xa_mk_value(range[i] >> 12), 0); @@ -934,7 +978,7 @@ static noinline void check_alloc_range(struct maple_tree *mt) xa_mk_value(req_range[i] >> 12)); /* pointer */ mt_validate(mt); #if DEBUG_ALLOC_RANGE - mt_dump(mt); + mt_dump(mt, mt_dump_hex); #endif } @@ -942,10 +986,10 @@ static noinline void check_alloc_range(struct maple_tree *mt) } #endif -static noinline void check_ranges(struct maple_tree *mt) +static noinline void __init check_ranges(struct maple_tree *mt) { int i, val, val2; - unsigned long r[] = { + static const unsigned long r[] = { 10, 15, 20, 25, 17, 22, /* Overlaps previous range. */ @@ -1122,6 +1166,71 @@ static noinline void check_ranges(struct maple_tree *mt) MT_BUG_ON(mt, !mt_height(mt)); mtree_destroy(mt); + /* Check in-place modifications */ + mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); + /* Append to the start of last range */ + mt_set_non_kernel(50); + for (i = 0; i <= 500; i++) { + val = i * 5 + 1; + val2 = val + 4; + check_store_range(mt, val, val2, xa_mk_value(val), 0); + } + + /* Append to the last range without touching any boundaries */ + for (i = 0; i < 10; i++) { + val = val2 + 5; + val2 = val + 4; + check_store_range(mt, val, val2, xa_mk_value(val), 0); + } + + /* Append to the end of last range */ + val = val2; + for (i = 0; i < 10; i++) { + val += 5; + MT_BUG_ON(mt, mtree_test_store_range(mt, val, ULONG_MAX, + xa_mk_value(val)) != 0); + } + + /* Overwriting the range and over a part of the next range */ + for (i = 10; i < 30; i += 2) { + val = i * 5 + 1; + val2 = val + 5; + check_store_range(mt, val, val2, xa_mk_value(val), 0); + } + + /* Overwriting a part of the range and over the next range */ + for (i = 50; i < 70; i += 2) { + val2 = i * 5; + val = val2 - 5; + check_store_range(mt, val, val2, xa_mk_value(val), 0); + } + + /* + * Expand the range, only partially overwriting the previous and + * next ranges + */ + for (i = 100; i < 130; i += 3) { + val = i * 5 - 5; + val2 = i * 5 + 1; + check_store_range(mt, val, val2, xa_mk_value(val), 0); + } + + /* + * Expand the range, only partially overwriting the previous and + * next ranges, in RCU mode + */ + mt_set_in_rcu(mt); + for (i = 150; i < 180; i += 3) { + val = i * 5 - 5; + val2 = i * 5 + 1; + check_store_range(mt, val, val2, xa_mk_value(val), 0); + } + + MT_BUG_ON(mt, !mt_height(mt)); + mt_validate(mt); + mt_set_non_kernel(0); + mtree_destroy(mt); + /* Test rebalance gaps */ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); mt_set_non_kernel(50); @@ -1210,7 +1319,7 @@ static noinline void check_ranges(struct maple_tree *mt) MT_BUG_ON(mt, mt_height(mt) != 4); } -static noinline void check_next_entry(struct maple_tree *mt) +static noinline void __init check_next_entry(struct maple_tree *mt) { void *entry = NULL; unsigned long limit = 30, i = 0; @@ -1234,7 +1343,7 @@ static noinline void check_next_entry(struct maple_tree *mt) mtree_destroy(mt); } -static noinline void check_prev_entry(struct maple_tree *mt) +static noinline void __init check_prev_entry(struct maple_tree *mt) { unsigned long index = 16; void *value; @@ -1278,7 +1387,93 @@ static noinline void check_prev_entry(struct maple_tree *mt) mas_unlock(&mas); } -static noinline void check_root_expand(struct maple_tree *mt) +static noinline void __init check_store_null(struct maple_tree *mt) +{ + MA_STATE(mas, mt, 0, ULONG_MAX); + + /* + * Store NULL at range [0, ULONG_MAX] to an empty tree should result + * in an empty tree + */ + mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); + mas_lock(&mas); + mas_store_gfp(&mas, NULL, GFP_KERNEL); + MT_BUG_ON(mt, !mtree_empty(mt)); + mas_unlock(&mas); + mtree_destroy(mt); + + /* + * Store NULL at any range to an empty tree should result in an empty + * tree + */ + mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); + mas_lock(&mas); + mas_set_range(&mas, 3, 10); + mas_store_gfp(&mas, NULL, GFP_KERNEL); + MT_BUG_ON(mt, !mtree_empty(mt)); + mas_unlock(&mas); + mtree_destroy(mt); + + /* + * Store NULL at range [0, ULONG_MAX] to a single entry tree should + * result in an empty tree + */ + mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); + mas_lock(&mas); + mas_set(&mas, 0); + mas_store_gfp(&mas, &mas, GFP_KERNEL); + mas_set_range(&mas, 0, ULONG_MAX); + mas_store_gfp(&mas, NULL, GFP_KERNEL); + MT_BUG_ON(mt, !mtree_empty(mt)); + mas_unlock(&mas); + mtree_destroy(mt); + + /* + * Store NULL at range [0, n] to a single entry tree should + * result in an empty tree + */ + mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); + mas_lock(&mas); + mas_set(&mas, 0); + mas_store_gfp(&mas, &mas, GFP_KERNEL); + mas_set_range(&mas, 0, 5); + mas_store_gfp(&mas, NULL, GFP_KERNEL); + MT_BUG_ON(mt, !mtree_empty(mt)); + mas_unlock(&mas); + mtree_destroy(mt); + + /* + * Store NULL at range [m, n] where m > 0 to a single entry tree + * should still be a single entry tree + */ + mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); + mas_lock(&mas); + mas_set(&mas, 0); + mas_store_gfp(&mas, &mas, GFP_KERNEL); + mas_set_range(&mas, 2, 5); + mas_store_gfp(&mas, NULL, GFP_KERNEL); + MT_BUG_ON(mt, mtree_empty(mt)); +// MT_BUG_ON(mt, xa_is_node(mas_root(&mas))); + mas_unlock(&mas); + mtree_destroy(mt); + + /* + * Store NULL at range [0, ULONG_MAX] to a tree with node should + * result in an empty tree + */ + mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); + mas_lock(&mas); + mas_set_range(&mas, 1, 3); + mas_store_gfp(&mas, &mas, GFP_KERNEL); +// MT_BUG_ON(mt, !xa_is_node(mas_root(&mas))); + mas_set_range(&mas, 0, ULONG_MAX); + mas_store_gfp(&mas, NULL, GFP_KERNEL); + MT_BUG_ON(mt, !mtree_empty(mt)); + mas_unlock(&mas); + mtree_destroy(mt); +} + +static noinline void __init check_root_expand(struct maple_tree *mt) { MA_STATE(mas, mt, 0, 0); void *ptr; @@ -1287,6 +1482,7 @@ static noinline void check_root_expand(struct maple_tree *mt) mas_lock(&mas); mas_set(&mas, 3); ptr = mas_walk(&mas); + MT_BUG_ON(mt, mas.index != 0); MT_BUG_ON(mt, ptr != NULL); MT_BUG_ON(mt, mas.index != 0); MT_BUG_ON(mt, mas.last != ULONG_MAX); @@ -1356,7 +1552,7 @@ static noinline void check_root_expand(struct maple_tree *mt) mas_store_gfp(&mas, ptr, GFP_KERNEL); ptr = mas_next(&mas, ULONG_MAX); MT_BUG_ON(mt, ptr != NULL); - MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX)); + MT_BUG_ON(mt, (mas.index != ULONG_MAX) && (mas.last != ULONG_MAX)); mas_set(&mas, 1); ptr = mas_prev(&mas, 0); @@ -1367,13 +1563,37 @@ static noinline void check_root_expand(struct maple_tree *mt) mas_unlock(&mas); } -static noinline void check_gap_combining(struct maple_tree *mt) +static noinline void __init check_deficient_node(struct maple_tree *mt) +{ + MA_STATE(mas, mt, 0, 0); + int count; + + mas_lock(&mas); + for (count = 0; count < 10; count++) { + mas_set(&mas, count); + mas_store_gfp(&mas, xa_mk_value(count), GFP_KERNEL); + } + + for (count = 20; count < 39; count++) { + mas_set(&mas, count); + mas_store_gfp(&mas, xa_mk_value(count), GFP_KERNEL); + } + + for (count = 10; count < 12; count++) { + mas_set(&mas, count); + mas_store_gfp(&mas, xa_mk_value(count), GFP_KERNEL); + } + mas_unlock(&mas); + mt_validate(mt); +} + +static noinline void __init check_gap_combining(struct maple_tree *mt) { struct maple_enode *mn1, *mn2; void *entry; unsigned long singletons = 100; - unsigned long *seq100; - unsigned long seq100_64[] = { + static const unsigned long *seq100; + static const unsigned long seq100_64[] = { /* 0-5 */ 74, 75, 76, 50, 100, 2, @@ -1387,7 +1607,7 @@ static noinline void check_gap_combining(struct maple_tree *mt) 76, 2, 79, 85, 4, }; - unsigned long seq100_32[] = { + static const unsigned long seq100_32[] = { /* 0-5 */ 61, 62, 63, 50, 100, 2, @@ -1401,11 +1621,11 @@ static noinline void check_gap_combining(struct maple_tree *mt) 76, 2, 79, 85, 4, }; - unsigned long seq2000[] = { + static const unsigned long seq2000[] = { 1152, 1151, 1100, 1200, 2, }; - unsigned long seq400[] = { + static const unsigned long seq400[] = { 286, 318, 256, 260, 266, 270, 275, 280, 290, 398, 286, 310, @@ -1564,7 +1784,7 @@ static noinline void check_gap_combining(struct maple_tree *mt) mt_set_non_kernel(0); mtree_destroy(mt); } -static noinline void check_node_overwrite(struct maple_tree *mt) +static noinline void __init check_node_overwrite(struct maple_tree *mt) { int i, max = 4000; @@ -1572,12 +1792,12 @@ static noinline void check_node_overwrite(struct maple_tree *mt) mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100)); mtree_test_store_range(mt, 319951, 367950, NULL); - /*mt_dump(mt); */ + /*mt_dump(mt, mt_dump_dec); */ mt_validate(mt); } #if defined(BENCH_SLOT_STORE) -static noinline void bench_slot_store(struct maple_tree *mt) +static noinline void __init bench_slot_store(struct maple_tree *mt) { int i, brk = 105, max = 1040, brk_start = 100, count = 20000000; @@ -1593,7 +1813,7 @@ static noinline void bench_slot_store(struct maple_tree *mt) #endif #if defined(BENCH_NODE_STORE) -static noinline void bench_node_store(struct maple_tree *mt) +static noinline void __init bench_node_store(struct maple_tree *mt) { int i, overwrite = 76, max = 240, count = 20000000; @@ -1612,7 +1832,7 @@ static noinline void bench_node_store(struct maple_tree *mt) #endif #if defined(BENCH_AWALK) -static noinline void bench_awalk(struct maple_tree *mt) +static noinline void __init bench_awalk(struct maple_tree *mt) { int i, max = 2500, count = 50000000; MA_STATE(mas, mt, 1470, 1470); @@ -1629,7 +1849,7 @@ static noinline void bench_awalk(struct maple_tree *mt) } #endif #if defined(BENCH_WALK) -static noinline void bench_walk(struct maple_tree *mt) +static noinline void __init bench_walk(struct maple_tree *mt) { int i, max = 2500, count = 550000000; MA_STATE(mas, mt, 1470, 1470); @@ -1645,8 +1865,21 @@ static noinline void bench_walk(struct maple_tree *mt) } #endif +#if defined(BENCH_LOAD) +static noinline void __init bench_load(struct maple_tree *mt) +{ + int i, max = 2500, count = 550000000; + + for (i = 0; i < max; i += 10) + mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL); + + for (i = 0; i < count; i++) + mtree_load(mt, 1470); +} +#endif + #if defined(BENCH_MT_FOR_EACH) -static noinline void bench_mt_for_each(struct maple_tree *mt) +static noinline void __init bench_mt_for_each(struct maple_tree *mt) { int i, count = 1000000; unsigned long max = 2500, index = 0; @@ -1669,47 +1902,179 @@ static noinline void bench_mt_for_each(struct maple_tree *mt) } #endif +#if defined(BENCH_MAS_FOR_EACH) +static noinline void __init bench_mas_for_each(struct maple_tree *mt) +{ + int i, count = 1000000; + unsigned long max = 2500; + void *entry; + MA_STATE(mas, mt, 0, 0); + + for (i = 0; i < max; i += 5) { + int gap = 4; + + if (i % 30 == 0) + gap = 3; + mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL); + } + + rcu_read_lock(); + for (i = 0; i < count; i++) { + unsigned long j = 0; + + mas_for_each(&mas, entry, max) { + MT_BUG_ON(mt, entry != xa_mk_value(j)); + j += 5; + } + mas_set(&mas, 0); + } + rcu_read_unlock(); + +} +#endif +#if defined(BENCH_MAS_PREV) +static noinline void __init bench_mas_prev(struct maple_tree *mt) +{ + int i, count = 1000000; + unsigned long max = 2500; + void *entry; + MA_STATE(mas, mt, 0, 0); + + for (i = 0; i < max; i += 5) { + int gap = 4; + + if (i % 30 == 0) + gap = 3; + mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL); + } + + rcu_read_lock(); + for (i = 0; i < count; i++) { + unsigned long j = 2495; + + mas_set(&mas, ULONG_MAX); + while ((entry = mas_prev(&mas, 0)) != NULL) { + MT_BUG_ON(mt, entry != xa_mk_value(j)); + j -= 5; + } + } + rcu_read_unlock(); + +} +#endif /* check_forking - simulate the kernel forking sequence with the tree. */ -static noinline void check_forking(struct maple_tree *mt) +static noinline void __init check_forking(void) { + struct maple_tree mt, newmt; + int i, nr_entries = 134, ret; + void *val; + MA_STATE(mas, &mt, 0, 0); + MA_STATE(newmas, &newmt, 0, 0); + struct rw_semaphore mt_lock, newmt_lock; - struct maple_tree newmt; - int i, nr_entries = 134; + init_rwsem(&mt_lock); + init_rwsem(&newmt_lock); + + mt_init_flags(&mt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN); + mt_set_external_lock(&mt, &mt_lock); + + mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN); + mt_set_external_lock(&newmt, &newmt_lock); + + down_write(&mt_lock); + for (i = 0; i <= nr_entries; i++) { + mas_set_range(&mas, i*10, i*10 + 5); + mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL); + } + + down_write_nested(&newmt_lock, SINGLE_DEPTH_NESTING); + ret = __mt_dup(&mt, &newmt, GFP_KERNEL); + if (ret) { + pr_err("OOM!"); + BUG_ON(1); + } + + mas_set(&newmas, 0); + mas_for_each(&newmas, val, ULONG_MAX) + mas_store(&newmas, val); + + mas_destroy(&newmas); + mas_destroy(&mas); + mt_validate(&newmt); + __mt_destroy(&newmt); + __mt_destroy(&mt); + up_write(&newmt_lock); + up_write(&mt_lock); +} + +static noinline void __init check_iteration(struct maple_tree *mt) +{ + int i, nr_entries = 125; void *val; MA_STATE(mas, mt, 0, 0); - MA_STATE(newmas, mt, 0, 0); for (i = 0; i <= nr_entries; i++) - mtree_store_range(mt, i*10, i*10 + 5, + mtree_store_range(mt, i * 10, i * 10 + 9, xa_mk_value(i), GFP_KERNEL); mt_set_non_kernel(99999); - mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE); - newmas.tree = &newmt; - mas_reset(&newmas); - mas_reset(&mas); - mas_lock(&newmas); - mas.index = 0; - mas.last = 0; - if (mas_expected_entries(&newmas, nr_entries)) { - pr_err("OOM!"); - BUG_ON(1); + + i = 0; + mas_lock(&mas); + mas_for_each(&mas, val, 925) { + MT_BUG_ON(mt, mas.index != i * 10); + MT_BUG_ON(mt, mas.last != i * 10 + 9); + /* Overwrite end of entry 92 */ + if (i == 92) { + mas.index = 925; + mas.last = 929; + mas_store(&mas, val); + } + i++; } - rcu_read_lock(); - mas_for_each(&mas, val, ULONG_MAX) { - newmas.index = mas.index; - newmas.last = mas.last; - mas_store(&newmas, val); + /* Ensure mas_find() gets the next value */ + val = mas_find(&mas, ULONG_MAX); + MT_BUG_ON(mt, val != xa_mk_value(i)); + + mas_set(&mas, 0); + i = 0; + mas_for_each(&mas, val, 785) { + MT_BUG_ON(mt, mas.index != i * 10); + MT_BUG_ON(mt, mas.last != i * 10 + 9); + /* Overwrite start of entry 78 */ + if (i == 78) { + mas.index = 780; + mas.last = 785; + mas_store(&mas, val); + } else { + i++; + } } - rcu_read_unlock(); - mas_destroy(&newmas); - mas_unlock(&newmas); - mt_validate(&newmt); + val = mas_find(&mas, ULONG_MAX); + MT_BUG_ON(mt, val != xa_mk_value(i)); + + mas_set(&mas, 0); + i = 0; + mas_for_each(&mas, val, 765) { + MT_BUG_ON(mt, mas.index != i * 10); + MT_BUG_ON(mt, mas.last != i * 10 + 9); + /* Overwrite end of entry 76 and advance to the end */ + if (i == 76) { + mas.index = 760; + mas.last = 765; + mas_store(&mas, val); + } + i++; + } + /* Make sure the next find returns the one after 765, 766-769 */ + val = mas_find(&mas, ULONG_MAX); + MT_BUG_ON(mt, val != xa_mk_value(76)); + mas_unlock(&mas); + mas_destroy(&mas); mt_set_non_kernel(0); - mtree_destroy(&newmt); } -static noinline void check_mas_store_gfp(struct maple_tree *mt) +static noinline void __init check_mas_store_gfp(struct maple_tree *mt) { struct maple_tree newmt; @@ -1742,64 +2107,75 @@ static noinline void check_mas_store_gfp(struct maple_tree *mt) } #if defined(BENCH_FORK) -static noinline void bench_forking(struct maple_tree *mt) +static noinline void __init bench_forking(void) { - - struct maple_tree newmt; - int i, nr_entries = 134, nr_fork = 80000; + struct maple_tree mt, newmt; + int i, nr_entries = 134, nr_fork = 80000, ret; void *val; - MA_STATE(mas, mt, 0, 0); - MA_STATE(newmas, mt, 0, 0); + MA_STATE(mas, &mt, 0, 0); + MA_STATE(newmas, &newmt, 0, 0); + struct rw_semaphore mt_lock, newmt_lock; - for (i = 0; i <= nr_entries; i++) - mtree_store_range(mt, i*10, i*10 + 5, - xa_mk_value(i), GFP_KERNEL); + init_rwsem(&mt_lock); + init_rwsem(&newmt_lock); + + mt_init_flags(&mt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN); + mt_set_external_lock(&mt, &mt_lock); + + down_write(&mt_lock); + for (i = 0; i <= nr_entries; i++) { + mas_set_range(&mas, i*10, i*10 + 5); + mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL); + } for (i = 0; i < nr_fork; i++) { - mt_set_non_kernel(99999); - mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE); - newmas.tree = &newmt; - mas_reset(&newmas); - mas_reset(&mas); - mas.index = 0; - mas.last = 0; - rcu_read_lock(); - mas_lock(&newmas); - if (mas_expected_entries(&newmas, nr_entries)) { - printk("OOM!"); + mt_init_flags(&newmt, + MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN); + mt_set_external_lock(&newmt, &newmt_lock); + + down_write_nested(&newmt_lock, SINGLE_DEPTH_NESTING); + ret = __mt_dup(&mt, &newmt, GFP_KERNEL); + if (ret) { + pr_err("OOM!"); BUG_ON(1); } - mas_for_each(&mas, val, ULONG_MAX) { - newmas.index = mas.index; - newmas.last = mas.last; + + mas_set(&newmas, 0); + mas_for_each(&newmas, val, ULONG_MAX) mas_store(&newmas, val); - } + mas_destroy(&newmas); - mas_unlock(&newmas); - rcu_read_unlock(); mt_validate(&newmt); - mt_set_non_kernel(0); - mtree_destroy(&newmt); + __mt_destroy(&newmt); + up_write(&newmt_lock); } + mas_destroy(&mas); + __mt_destroy(&mt); + up_write(&mt_lock); } #endif -static noinline void next_prev_test(struct maple_tree *mt) +static noinline void __init next_prev_test(struct maple_tree *mt) { int i, nr_entries; void *val; MA_STATE(mas, mt, 0, 0); struct maple_enode *mn; - unsigned long *level2; - unsigned long level2_64[] = {707, 1000, 710, 715, 720, 725}; - unsigned long level2_32[] = {1747, 2000, 1750, 1755, 1760, 1765}; + static const unsigned long *level2; + static const unsigned long level2_64[] = { 707, 1000, 710, 715, 720, + 725}; + static const unsigned long level2_32[] = { 1747, 2000, 1750, 1755, + 1760, 1765}; + unsigned long last_index; if (MAPLE_32BIT) { nr_entries = 500; level2 = level2_32; + last_index = 0x138e; } else { nr_entries = 200; level2 = level2_64; + last_index = 0x7d6; } for (i = 0; i <= nr_entries; i++) @@ -1906,7 +2282,7 @@ static noinline void next_prev_test(struct maple_tree *mt) val = mas_next(&mas, ULONG_MAX); MT_BUG_ON(mt, val != NULL); - MT_BUG_ON(mt, mas.index != ULONG_MAX); + MT_BUG_ON(mt, mas.index != last_index); MT_BUG_ON(mt, mas.last != ULONG_MAX); val = mas_prev(&mas, 0); @@ -1930,7 +2306,8 @@ static noinline void next_prev_test(struct maple_tree *mt) val = mas_prev(&mas, 0); MT_BUG_ON(mt, val != NULL); MT_BUG_ON(mt, mas.index != 0); - MT_BUG_ON(mt, mas.last != 0); + MT_BUG_ON(mt, mas.last != 5); + MT_BUG_ON(mt, !mas_is_underflow(&mas)); mas.index = 0; mas.last = 5; @@ -1942,7 +2319,7 @@ static noinline void next_prev_test(struct maple_tree *mt) val = mas_prev(&mas, 0); MT_BUG_ON(mt, val != NULL); MT_BUG_ON(mt, mas.index != 0); - MT_BUG_ON(mt, mas.last != 0); + MT_BUG_ON(mt, mas.last != 9); mas_unlock(&mas); mtree_destroy(mt); @@ -1960,7 +2337,7 @@ static noinline void next_prev_test(struct maple_tree *mt) /* Test spanning writes that require balancing right sibling or right cousin */ -static noinline void check_spanning_relatives(struct maple_tree *mt) +static noinline void __init check_spanning_relatives(struct maple_tree *mt) { unsigned long i, nr_entries = 1000; @@ -1973,7 +2350,7 @@ static noinline void check_spanning_relatives(struct maple_tree *mt) mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL); } -static noinline void check_fuzzer(struct maple_tree *mt) +static noinline void __init check_fuzzer(struct maple_tree *mt) { /* * 1. Causes a spanning rebalance of a single root node. @@ -2369,168 +2746,945 @@ static noinline void check_fuzzer(struct maple_tree *mt) mtree_test_erase(mt, ULONG_MAX - 10); } -/* duplicate the tree with a specific gap */ -static noinline void check_dup_gaps(struct maple_tree *mt, - unsigned long nr_entries, bool zero_start, - unsigned long gap) +static noinline void __init check_bnode_min_spanning(struct maple_tree *mt) { - unsigned long i = 0; - struct maple_tree newmt; - int ret; - void *tmp; + int i = 50; MA_STATE(mas, mt, 0, 0); - MA_STATE(newmas, &newmt, 0, 0); - if (!zero_start) - i = 1; + mt_set_non_kernel(9999); + mas_lock(&mas); + do { + mas_set_range(&mas, i*10, i*10+9); + mas_store(&mas, check_bnode_min_spanning); + } while (i--); - mt_zero_nr_tallocated(); - for (; i <= nr_entries; i++) - mtree_store_range(mt, i*10, (i+1)*10 - gap, + mas_set_range(&mas, 240, 509); + mas_store(&mas, NULL); + mas_unlock(&mas); + mas_destroy(&mas); + mt_set_non_kernel(0); +} + +static noinline void __init check_empty_area_window(struct maple_tree *mt) +{ + unsigned long i, nr_entries = 20; + MA_STATE(mas, mt, 0, 0); + + for (i = 1; i <= nr_entries; i++) + mtree_store_range(mt, i*10, i*10 + 9, xa_mk_value(i), GFP_KERNEL); - mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE); - mt_set_non_kernel(99999); - mas_lock(&newmas); - ret = mas_expected_entries(&newmas, nr_entries); - mt_set_non_kernel(0); - MT_BUG_ON(mt, ret != 0); + /* Create another hole besides the one at 0 */ + mtree_store_range(mt, 160, 169, NULL, GFP_KERNEL); + /* Check lower bounds that don't fit */ rcu_read_lock(); - mas_for_each(&mas, tmp, ULONG_MAX) { - newmas.index = mas.index; - newmas.last = mas.last; - mas_store(&newmas, tmp); - } + MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 10) != -EBUSY); + + mas_reset(&mas); + MT_BUG_ON(mt, mas_empty_area_rev(&mas, 6, 90, 5) != -EBUSY); + + /* Check lower bound that does fit */ + mas_reset(&mas); + MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 5) != 0); + MT_BUG_ON(mt, mas.index != 5); + MT_BUG_ON(mt, mas.last != 9); rcu_read_unlock(); - mas_destroy(&newmas); - mas_unlock(&newmas); - mtree_destroy(&newmt); + /* Check one gap that doesn't fit and one that does */ + rcu_read_lock(); + mas_reset(&mas); + MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 217, 9) != 0); + MT_BUG_ON(mt, mas.index != 161); + MT_BUG_ON(mt, mas.last != 169); + + /* Check one gap that does fit above the min */ + mas_reset(&mas); + MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 3) != 0); + MT_BUG_ON(mt, mas.index != 216); + MT_BUG_ON(mt, mas.last != 218); + + /* Check size that doesn't fit any gap */ + mas_reset(&mas); + MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 16) != -EBUSY); + + /* + * Check size that doesn't fit the lower end of the window but + * does fit the gap + */ + mas_reset(&mas); + MT_BUG_ON(mt, mas_empty_area_rev(&mas, 167, 200, 4) != -EBUSY); + + /* + * Check size that doesn't fit the upper end of the window but + * does fit the gap + */ + mas_reset(&mas); + MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 162, 4) != -EBUSY); + + /* Check mas_empty_area forward */ + mas_reset(&mas); + MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 9) != 0); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 8); + + mas_reset(&mas); + MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 4) != 0); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 3); + + mas_reset(&mas); + MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 11) != -EBUSY); + + mas_reset(&mas); + MT_BUG_ON(mt, mas_empty_area(&mas, 5, 100, 6) != -EBUSY); + + mas_reset(&mas); + MT_BUG_ON(mt, mas_empty_area(&mas, 0, 8, 10) != -EINVAL); + + mas_reset(&mas); + mas_empty_area(&mas, 100, 165, 3); + + mas_reset(&mas); + MT_BUG_ON(mt, mas_empty_area(&mas, 100, 163, 6) != -EBUSY); + rcu_read_unlock(); } -/* Duplicate many sizes of trees. Mainly to test expected entry values */ -static noinline void check_dup(struct maple_tree *mt) +static noinline void __init check_empty_area_fill(struct maple_tree *mt) { - int i; - int big_start = 100010; + const unsigned long max = 0x25D78000; + unsigned long size; + int loop, shift; + MA_STATE(mas, mt, 0, 0); - /* Check with a value at zero */ - for (i = 10; i < 1000; i++) { - mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); - check_dup_gaps(mt, i, true, 5); - mtree_destroy(mt); - rcu_barrier(); + mt_set_non_kernel(99999); + for (shift = 12; shift <= 16; shift++) { + loop = 5000; + size = 1 << shift; + while (loop--) { + mas_set(&mas, 0); + mas_lock(&mas); + MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0); + MT_BUG_ON(mt, mas.last != mas.index + size - 1); + mas_store_gfp(&mas, (void *)size, GFP_KERNEL); + mas_unlock(&mas); + mas_reset(&mas); + } } - cond_resched(); - mt_cache_shrink(); - /* Check with a value at zero, no gap */ - for (i = 1000; i < 2000; i++) { - mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); - check_dup_gaps(mt, i, true, 0); - mtree_destroy(mt); - rcu_barrier(); - } + /* No space left. */ + size = 0x1000; + rcu_read_lock(); + MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY); + rcu_read_unlock(); - cond_resched(); - mt_cache_shrink(); - /* Check with a value at zero and unreasonably large */ - for (i = big_start; i < big_start + 10; i++) { - mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); - check_dup_gaps(mt, i, true, 5); - mtree_destroy(mt); - rcu_barrier(); - } + /* Fill a depth 3 node to the maximum */ + for (unsigned long i = 629440511; i <= 629440800; i += 6) + mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL); + /* Make space in the second-last depth 4 node */ + mtree_erase(mt, 631668735); + /* Make space in the last depth 4 node */ + mtree_erase(mt, 629506047); + mas_reset(&mas); + /* Search from just after the gap in the second-last depth 4 */ + rcu_read_lock(); + MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0); + rcu_read_unlock(); + mt_set_non_kernel(0); +} - cond_resched(); - mt_cache_shrink(); - /* Small to medium size not starting at zero*/ - for (i = 200; i < 1000; i++) { - mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); - check_dup_gaps(mt, i, false, 5); - mtree_destroy(mt); - rcu_barrier(); - } +/* + * Check MAS_START, MAS_PAUSE, active (implied), and MAS_NONE transitions. + * + * The table below shows the single entry tree (0-0 pointer) and normal tree + * with nodes. + * + * Function ENTRY Start Result index & last + * ┬ ┬ ┬ ┬ ┬ + * │ │ │ │ └─ the final range + * │ │ │ └─ The node value after execution + * │ │ └─ The node value before execution + * │ └─ If the entry exists or does not exists (DNE) + * └─ The function name + * + * Function ENTRY Start Result index & last + * mas_next() + * - after last + * Single entry tree at 0-0 + * ------------------------ + * DNE MAS_START MAS_NONE 1 - oo + * DNE MAS_PAUSE MAS_NONE 1 - oo + * DNE MAS_ROOT MAS_NONE 1 - oo + * when index = 0 + * DNE MAS_NONE MAS_ROOT 0 + * when index > 0 + * DNE MAS_NONE MAS_NONE 1 - oo + * + * Normal tree + * ----------- + * exists MAS_START active range + * DNE MAS_START active set to last range + * exists MAS_PAUSE active range + * DNE MAS_PAUSE active set to last range + * exists MAS_NONE active range + * exists active active range + * DNE active active set to last range + * ERANGE active MAS_OVERFLOW last range + * + * Function ENTRY Start Result index & last + * mas_prev() + * - before index + * Single entry tree at 0-0 + * ------------------------ + * if index > 0 + * exists MAS_START MAS_ROOT 0 + * exists MAS_PAUSE MAS_ROOT 0 + * exists MAS_NONE MAS_ROOT 0 + * + * if index == 0 + * DNE MAS_START MAS_NONE 0 + * DNE MAS_PAUSE MAS_NONE 0 + * DNE MAS_NONE MAS_NONE 0 + * DNE MAS_ROOT MAS_NONE 0 + * + * Normal tree + * ----------- + * exists MAS_START active range + * DNE MAS_START active set to min + * exists MAS_PAUSE active range + * DNE MAS_PAUSE active set to min + * exists MAS_NONE active range + * DNE MAS_NONE MAS_NONE set to min + * any MAS_ROOT MAS_NONE 0 + * exists active active range + * DNE active active last range + * ERANGE active MAS_UNDERFLOW last range + * + * Function ENTRY Start Result index & last + * mas_find() + * - at index or next + * Single entry tree at 0-0 + * ------------------------ + * if index > 0 + * DNE MAS_START MAS_NONE 0 + * DNE MAS_PAUSE MAS_NONE 0 + * DNE MAS_ROOT MAS_NONE 0 + * DNE MAS_NONE MAS_NONE 1 + * if index == 0 + * exists MAS_START MAS_ROOT 0 + * exists MAS_PAUSE MAS_ROOT 0 + * exists MAS_NONE MAS_ROOT 0 + * + * Normal tree + * ----------- + * exists MAS_START active range + * DNE MAS_START active set to max + * exists MAS_PAUSE active range + * DNE MAS_PAUSE active set to max + * exists MAS_NONE active range (start at last) + * exists active active range + * DNE active active last range (max < last) + * + * Function ENTRY Start Result index & last + * mas_find_rev() + * - at index or before + * Single entry tree at 0-0 + * ------------------------ + * if index > 0 + * exists MAS_START MAS_ROOT 0 + * exists MAS_PAUSE MAS_ROOT 0 + * exists MAS_NONE MAS_ROOT 0 + * if index == 0 + * DNE MAS_START MAS_NONE 0 + * DNE MAS_PAUSE MAS_NONE 0 + * DNE MAS_NONE MAS_NONE 0 + * DNE MAS_ROOT MAS_NONE 0 + * + * Normal tree + * ----------- + * exists MAS_START active range + * DNE MAS_START active set to min + * exists MAS_PAUSE active range + * DNE MAS_PAUSE active set to min + * exists MAS_NONE active range (start at index) + * exists active active range + * DNE active active last range (min > index) + * + * Function ENTRY Start Result index & last + * mas_walk() + * - Look up index + * Single entry tree at 0-0 + * ------------------------ + * if index > 0 + * DNE MAS_START MAS_ROOT 1 - oo + * DNE MAS_PAUSE MAS_ROOT 1 - oo + * DNE MAS_NONE MAS_ROOT 1 - oo + * DNE MAS_ROOT MAS_ROOT 1 - oo + * if index == 0 + * exists MAS_START MAS_ROOT 0 + * exists MAS_PAUSE MAS_ROOT 0 + * exists MAS_NONE MAS_ROOT 0 + * exists MAS_ROOT MAS_ROOT 0 + * + * Normal tree + * ----------- + * exists MAS_START active range + * DNE MAS_START active range of NULL + * exists MAS_PAUSE active range + * DNE MAS_PAUSE active range of NULL + * exists MAS_NONE active range + * DNE MAS_NONE active range of NULL + * exists active active range + * DNE active active range of NULL + */ - cond_resched(); - mt_cache_shrink(); - /* Unreasonably large not starting at zero*/ - for (i = big_start; i < big_start + 10; i++) { - mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); - check_dup_gaps(mt, i, false, 5); - mtree_destroy(mt); - rcu_barrier(); - cond_resched(); - mt_cache_shrink(); - } +static noinline void __init check_state_handling(struct maple_tree *mt) +{ + MA_STATE(mas, mt, 0, 0); + void *entry, *ptr = (void *) 0x1234500; + void *ptr2 = &ptr; + void *ptr3 = &ptr2; + unsigned long index; - /* Check non-allocation tree not starting at zero */ - for (i = 1500; i < 3000; i++) { - mt_init_flags(mt, 0); - check_dup_gaps(mt, i, false, 5); - mtree_destroy(mt); - rcu_barrier(); - cond_resched(); - if (i % 2 == 0) - mt_cache_shrink(); - } + /* Check MAS_ROOT First */ + mtree_store_range(mt, 0, 0, ptr, GFP_KERNEL); - mt_cache_shrink(); - /* Check non-allocation tree starting at zero */ - for (i = 200; i < 1000; i++) { - mt_init_flags(mt, 0); - check_dup_gaps(mt, i, true, 5); - mtree_destroy(mt); - rcu_barrier(); - cond_resched(); - } + mas_lock(&mas); + /* prev: Start -> underflow*/ + entry = mas_prev(&mas, 0); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.status != ma_underflow); - mt_cache_shrink(); - /* Unreasonably large */ - for (i = big_start + 5; i < big_start + 10; i++) { - mt_init_flags(mt, 0); - check_dup_gaps(mt, i, true, 5); - mtree_destroy(mt); - rcu_barrier(); - mt_cache_shrink(); - cond_resched(); + /* prev: Start -> root */ + mas_set(&mas, 10); + entry = mas_prev(&mas, 0); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0); + MT_BUG_ON(mt, mas.status != ma_root); + + /* prev: pause -> root */ + mas_set(&mas, 10); + mas_pause(&mas); + entry = mas_prev(&mas, 0); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0); + MT_BUG_ON(mt, mas.status != ma_root); + + /* next: start -> none */ + mas_set(&mas, 0); + entry = mas_next(&mas, ULONG_MAX); + MT_BUG_ON(mt, mas.index != 1); + MT_BUG_ON(mt, mas.last != ULONG_MAX); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.status != ma_none); + + /* next: start -> none*/ + mas_set(&mas, 10); + entry = mas_next(&mas, ULONG_MAX); + MT_BUG_ON(mt, mas.index != 1); + MT_BUG_ON(mt, mas.last != ULONG_MAX); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.status != ma_none); + + /* find: start -> root */ + mas_set(&mas, 0); + entry = mas_find(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0); + MT_BUG_ON(mt, mas.status != ma_root); + + /* find: root -> none */ + entry = mas_find(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 1); + MT_BUG_ON(mt, mas.last != ULONG_MAX); + MT_BUG_ON(mt, mas.status != ma_none); + + /* find: none -> none */ + entry = mas_find(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 1); + MT_BUG_ON(mt, mas.last != ULONG_MAX); + MT_BUG_ON(mt, mas.status != ma_none); + + /* find: start -> none */ + mas_set(&mas, 10); + entry = mas_find(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 1); + MT_BUG_ON(mt, mas.last != ULONG_MAX); + MT_BUG_ON(mt, mas.status != ma_none); + + /* find_rev: none -> root */ + entry = mas_find_rev(&mas, 0); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0); + MT_BUG_ON(mt, mas.status != ma_root); + + /* find_rev: start -> root */ + mas_set(&mas, 0); + entry = mas_find_rev(&mas, 0); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0); + MT_BUG_ON(mt, mas.status != ma_root); + + /* find_rev: root -> none */ + entry = mas_find_rev(&mas, 0); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0); + MT_BUG_ON(mt, mas.status != ma_none); + + /* find_rev: none -> none */ + entry = mas_find_rev(&mas, 0); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0); + MT_BUG_ON(mt, mas.status != ma_none); + + /* find_rev: start -> root */ + mas_set(&mas, 10); + entry = mas_find_rev(&mas, 0); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0); + MT_BUG_ON(mt, mas.status != ma_root); + + /* walk: start -> none */ + mas_set(&mas, 10); + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 1); + MT_BUG_ON(mt, mas.last != ULONG_MAX); + MT_BUG_ON(mt, mas.status != ma_none); + + /* walk: pause -> none*/ + mas_set(&mas, 10); + mas_pause(&mas); + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 1); + MT_BUG_ON(mt, mas.last != ULONG_MAX); + MT_BUG_ON(mt, mas.status != ma_none); + + /* walk: none -> none */ + mas.index = mas.last = 10; + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 1); + MT_BUG_ON(mt, mas.last != ULONG_MAX); + MT_BUG_ON(mt, mas.status != ma_none); + + /* walk: none -> none */ + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 1); + MT_BUG_ON(mt, mas.last != ULONG_MAX); + MT_BUG_ON(mt, mas.status != ma_none); + + /* walk: start -> root */ + mas_set(&mas, 0); + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0); + MT_BUG_ON(mt, mas.status != ma_root); + + /* walk: pause -> root */ + mas_set(&mas, 0); + mas_pause(&mas); + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0); + MT_BUG_ON(mt, mas.status != ma_root); + + /* walk: none -> root */ + mas.status = ma_none; + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0); + MT_BUG_ON(mt, mas.status != ma_root); + + /* walk: root -> root */ + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0); + MT_BUG_ON(mt, mas.status != ma_root); + + /* walk: root -> none */ + mas_set(&mas, 10); + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 1); + MT_BUG_ON(mt, mas.last != ULONG_MAX); + MT_BUG_ON(mt, mas.status != ma_none); + + /* walk: none -> root */ + mas.index = mas.last = 0; + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0); + MT_BUG_ON(mt, mas.status != ma_root); + + mas_unlock(&mas); + + /* Check when there is an actual node */ + mtree_store_range(mt, 0, 0, NULL, GFP_KERNEL); + mtree_store_range(mt, 0x1000, 0x1500, ptr, GFP_KERNEL); + mtree_store_range(mt, 0x2000, 0x2500, ptr2, GFP_KERNEL); + mtree_store_range(mt, 0x3000, 0x3500, ptr3, GFP_KERNEL); + + mas_lock(&mas); + + /* next: start ->active */ + mas_set(&mas, 0); + entry = mas_next(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* next: pause ->active */ + mas_set(&mas, 0); + mas_pause(&mas); + entry = mas_next(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* next: none ->active */ + mas.index = mas.last = 0; + mas.offset = 0; + mas.status = ma_none; + entry = mas_next(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* next:active ->active (spanning limit) */ + entry = mas_next(&mas, 0x2100); + MT_BUG_ON(mt, entry != ptr2); + MT_BUG_ON(mt, mas.index != 0x2000); + MT_BUG_ON(mt, mas.last != 0x2500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* next:active -> overflow (limit reached) beyond data */ + entry = mas_next(&mas, 0x2999); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0x2501); + MT_BUG_ON(mt, mas.last != 0x2fff); + MT_BUG_ON(mt, !mas_is_overflow(&mas)); + + /* next:overflow -> active (limit changed) */ + entry = mas_next(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != ptr3); + MT_BUG_ON(mt, mas.index != 0x3000); + MT_BUG_ON(mt, mas.last != 0x3500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* next:active -> overflow (limit reached) */ + entry = mas_next(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0x3501); + MT_BUG_ON(mt, mas.last != ULONG_MAX); + MT_BUG_ON(mt, !mas_is_overflow(&mas)); + + /* next:overflow -> overflow */ + entry = mas_next(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0x3501); + MT_BUG_ON(mt, mas.last != ULONG_MAX); + MT_BUG_ON(mt, !mas_is_overflow(&mas)); + + /* prev:overflow -> active */ + entry = mas_prev(&mas, 0); + MT_BUG_ON(mt, entry != ptr3); + MT_BUG_ON(mt, mas.index != 0x3000); + MT_BUG_ON(mt, mas.last != 0x3500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* next: none -> active, skip value at location */ + mas_set(&mas, 0); + entry = mas_next(&mas, ULONG_MAX); + mas.status = ma_none; + mas.offset = 0; + entry = mas_next(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != ptr2); + MT_BUG_ON(mt, mas.index != 0x2000); + MT_BUG_ON(mt, mas.last != 0x2500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* prev:active ->active */ + entry = mas_prev(&mas, 0); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* prev:active -> underflow (span limit) */ + mas_next(&mas, ULONG_MAX); + entry = mas_prev(&mas, 0x1200); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); /* spanning limit */ + entry = mas_prev(&mas, 0x1200); /* underflow */ + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_underflow(&mas)); + + /* prev:underflow -> underflow (lower limit) spanning end range */ + entry = mas_prev(&mas, 0x0100); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0x0FFF); + MT_BUG_ON(mt, !mas_is_underflow(&mas)); + + /* prev:underflow -> underflow */ + entry = mas_prev(&mas, 0); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0x0FFF); + MT_BUG_ON(mt, !mas_is_underflow(&mas)); + + /* prev:underflow -> underflow */ + entry = mas_prev(&mas, 0); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0x0FFF); + MT_BUG_ON(mt, !mas_is_underflow(&mas)); + + /* next:underflow -> active */ + entry = mas_next(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* prev:first value -> underflow */ + entry = mas_prev(&mas, 0x1000); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_underflow(&mas)); + + /* find:underflow -> first value */ + entry = mas_find(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* prev: pause ->active */ + mas_set(&mas, 0x3600); + entry = mas_prev(&mas, 0); + MT_BUG_ON(mt, entry != ptr3); + mas_pause(&mas); + entry = mas_prev(&mas, 0); + MT_BUG_ON(mt, entry != ptr2); + MT_BUG_ON(mt, mas.index != 0x2000); + MT_BUG_ON(mt, mas.last != 0x2500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* prev:active -> underflow spanning min */ + entry = mas_prev(&mas, 0x1600); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0x1501); + MT_BUG_ON(mt, mas.last != 0x1FFF); + MT_BUG_ON(mt, !mas_is_underflow(&mas)); + + /* prev: active ->active, continue */ + entry = mas_prev(&mas, 0); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* find: start ->active */ + mas_set(&mas, 0); + entry = mas_find(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* find: pause ->active */ + mas_set(&mas, 0); + mas_pause(&mas); + entry = mas_find(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* find: start ->active on value */ + mas_set(&mas, 1200); + entry = mas_find(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* find:active ->active */ + entry = mas_find(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != ptr2); + MT_BUG_ON(mt, mas.index != 0x2000); + MT_BUG_ON(mt, mas.last != 0x2500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + + /* find:active -> active (NULL)*/ + entry = mas_find(&mas, 0x2700); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0x2501); + MT_BUG_ON(mt, mas.last != 0x2FFF); + MAS_BUG_ON(&mas, !mas_is_active(&mas)); + + /* find: overflow ->active */ + entry = mas_find(&mas, 0x5000); + MT_BUG_ON(mt, entry != ptr3); + MT_BUG_ON(mt, mas.index != 0x3000); + MT_BUG_ON(mt, mas.last != 0x3500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* find:active -> active (NULL) end*/ + entry = mas_find(&mas, ULONG_MAX); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0x3501); + MT_BUG_ON(mt, mas.last != ULONG_MAX); + MAS_BUG_ON(&mas, !mas_is_active(&mas)); + + /* find_rev: active (END) ->active */ + entry = mas_find_rev(&mas, 0); + MT_BUG_ON(mt, entry != ptr3); + MT_BUG_ON(mt, mas.index != 0x3000); + MT_BUG_ON(mt, mas.last != 0x3500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* find_rev:active ->active */ + entry = mas_find_rev(&mas, 0); + MT_BUG_ON(mt, entry != ptr2); + MT_BUG_ON(mt, mas.index != 0x2000); + MT_BUG_ON(mt, mas.last != 0x2500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* find_rev: pause ->active */ + mas_pause(&mas); + entry = mas_find_rev(&mas, 0); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* find_rev:active -> underflow */ + entry = mas_find_rev(&mas, 0); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0); + MT_BUG_ON(mt, mas.last != 0x0FFF); + MT_BUG_ON(mt, !mas_is_underflow(&mas)); + + /* find_rev: start ->active */ + mas_set(&mas, 0x1200); + entry = mas_find_rev(&mas, 0); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* mas_walk start ->active */ + mas_set(&mas, 0x1200); + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* mas_walk start ->active */ + mas_set(&mas, 0x1600); + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0x1501); + MT_BUG_ON(mt, mas.last != 0x1fff); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* mas_walk pause ->active */ + mas_set(&mas, 0x1200); + mas_pause(&mas); + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* mas_walk pause -> active */ + mas_set(&mas, 0x1600); + mas_pause(&mas); + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0x1501); + MT_BUG_ON(mt, mas.last != 0x1fff); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* mas_walk none -> active */ + mas_set(&mas, 0x1200); + mas.status = ma_none; + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* mas_walk none -> active */ + mas_set(&mas, 0x1600); + mas.status = ma_none; + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0x1501); + MT_BUG_ON(mt, mas.last != 0x1fff); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* mas_walk active -> active */ + mas.index = 0x1200; + mas.last = 0x1200; + mas.offset = 0; + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != ptr); + MT_BUG_ON(mt, mas.index != 0x1000); + MT_BUG_ON(mt, mas.last != 0x1500); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + /* mas_walk active -> active */ + mas.index = 0x1600; + mas.last = 0x1600; + entry = mas_walk(&mas); + MT_BUG_ON(mt, entry != NULL); + MT_BUG_ON(mt, mas.index != 0x1501); + MT_BUG_ON(mt, mas.last != 0x1fff); + MT_BUG_ON(mt, !mas_is_active(&mas)); + + mas_unlock(&mas); + mtree_destroy(mt); + + mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); + mas_lock(&mas); + for (int count = 0; count < 30; count++) { + mas_set(&mas, count); + mas_store_gfp(&mas, xa_mk_value(count), GFP_KERNEL); } + + /* Ensure mas_find works with MA_UNDERFLOW */ + mas_set(&mas, 0); + entry = mas_walk(&mas); + mas_set(&mas, 0); + mas_prev(&mas, 0); + MT_BUG_ON(mt, mas.status != ma_underflow); + MT_BUG_ON(mt, mas_find(&mas, ULONG_MAX) != entry); + + /* Restore active on mas_next */ + entry = mas_next(&mas, ULONG_MAX); + index = mas.index; + mas_prev(&mas, index); + MT_BUG_ON(mt, mas.status != ma_underflow); + MT_BUG_ON(mt, mas_next(&mas, ULONG_MAX) != entry); + + /* Ensure overflow -> active works */ + mas_prev(&mas, 0); + mas_next(&mas, index - 1); + MT_BUG_ON(mt, mas.status != ma_overflow); + MT_BUG_ON(mt, mas_next(&mas, ULONG_MAX) != entry); + + mas_unlock(&mas); } -static noinline void check_bnode_min_spanning(struct maple_tree *mt) +static noinline void __init alloc_cyclic_testing(struct maple_tree *mt) { - int i = 50; + unsigned long location; + unsigned long next; + int ret = 0; MA_STATE(mas, mt, 0, 0); - mt_set_non_kernel(9999); - mas_lock(&mas); - do { - mas_set_range(&mas, i*10, i*10+9); - mas_store(&mas, check_bnode_min_spanning); - } while (i--); + next = 0; + mtree_lock(mt); + for (int i = 0; i < 100; i++) { + mas_alloc_cyclic(&mas, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL); + MAS_BUG_ON(&mas, i != location - 2); + MAS_BUG_ON(&mas, mas.index != location); + MAS_BUG_ON(&mas, mas.last != location); + MAS_BUG_ON(&mas, i != next - 3); + } - mas_set_range(&mas, 240, 509); - mas_store(&mas, NULL); - mas_unlock(&mas); - mas_destroy(&mas); - mt_set_non_kernel(0); + mtree_unlock(mt); + mtree_destroy(mt); + next = 0; + mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); + for (int i = 0; i < 100; i++) { + mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL); + MT_BUG_ON(mt, i != location - 2); + MT_BUG_ON(mt, i != next - 3); + MT_BUG_ON(mt, mtree_load(mt, location) != mt); + } + + mtree_destroy(mt); + + /* + * Issue with reverse search was discovered + * https://lore.kernel.org/all/20241216060600.287B4C4CED0@smtp.kernel.org/ + * Exhausting the allocation area and forcing the search to wrap needs a + * mas_reset() in mas_alloc_cyclic(). + */ + next = 0; + mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); + for (int i = 0; i < 1023; i++) { + mtree_alloc_cyclic(mt, &location, mt, 2, 1024, &next, GFP_KERNEL); + MT_BUG_ON(mt, i != location - 2); + MT_BUG_ON(mt, i != next - 3); + MT_BUG_ON(mt, mtree_load(mt, location) != mt); + } + mtree_erase(mt, 123); + MT_BUG_ON(mt, mtree_load(mt, 123) != NULL); + mtree_alloc_cyclic(mt, &location, mt, 2, 1024, &next, GFP_KERNEL); + MT_BUG_ON(mt, 123 != location); + MT_BUG_ON(mt, 124 != next); + MT_BUG_ON(mt, mtree_load(mt, location) != mt); + mtree_erase(mt, 100); + mtree_alloc_cyclic(mt, &location, mt, 2, 1024, &next, GFP_KERNEL); + MT_BUG_ON(mt, 100 != location); + MT_BUG_ON(mt, 101 != next); + MT_BUG_ON(mt, mtree_load(mt, location) != mt); + mtree_destroy(mt); + + /* Overflow test */ + next = ULONG_MAX - 1; + ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL); + MT_BUG_ON(mt, ret != 0); + ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL); + MT_BUG_ON(mt, ret != 0); + ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL); + MT_BUG_ON(mt, ret != 1); } static DEFINE_MTREE(tree); -static int maple_tree_seed(void) +static int __init maple_tree_seed(void) { - unsigned long set[] = {5015, 5014, 5017, 25, 1000, - 1001, 1002, 1003, 1005, 0, - 5003, 5002}; + unsigned long set[] = { 5015, 5014, 5017, 25, 1000, + 1001, 1002, 1003, 1005, 0, + 5003, 5002}; void *ptr = &set; pr_info("\nTEST STARTING\n\n"); - mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); - check_root_expand(&tree); - mtree_destroy(&tree); - #if defined(BENCH_SLOT_STORE) #define BENCH mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); @@ -2559,13 +3713,18 @@ static int maple_tree_seed(void) mtree_destroy(&tree); goto skip; #endif -#if defined(BENCH_FORK) +#if defined(BENCH_LOAD) #define BENCH mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); - bench_forking(&tree); + bench_load(&tree); mtree_destroy(&tree); goto skip; #endif +#if defined(BENCH_FORK) +#define BENCH + bench_forking(); + goto skip; +#endif #if defined(BENCH_MT_FOR_EACH) #define BENCH mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); @@ -2573,11 +3732,39 @@ static int maple_tree_seed(void) mtree_destroy(&tree); goto skip; #endif +#if defined(BENCH_MAS_FOR_EACH) +#define BENCH + mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); + bench_mas_for_each(&tree); + mtree_destroy(&tree); + goto skip; +#endif +#if defined(BENCH_MAS_PREV) +#define BENCH + mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); + bench_mas_prev(&tree); + mtree_destroy(&tree); + goto skip; +#endif + + mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); + check_deficient_node(&tree); + mtree_destroy(&tree); + + mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); + check_store_null(&tree); + mtree_destroy(&tree); + + mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); + check_root_expand(&tree); + mtree_destroy(&tree); mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); - check_forking(&tree); + check_iteration(&tree); mtree_destroy(&tree); + check_forking(); + mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); check_mas_store_gfp(&tree); mtree_destroy(&tree); @@ -2758,13 +3945,26 @@ static int maple_tree_seed(void) mtree_destroy(&tree); mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); - check_dup(&tree); + check_bnode_min_spanning(&tree); mtree_destroy(&tree); mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); - check_bnode_min_spanning(&tree); + check_empty_area_window(&tree); mtree_destroy(&tree); + mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); + check_empty_area_fill(&tree); + mtree_destroy(&tree); + + mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); + check_state_handling(&tree); + mtree_destroy(&tree); + + mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); + alloc_cyclic_testing(&tree); + mtree_destroy(&tree); + + #if defined(BENCH) skip: #endif @@ -2779,7 +3979,7 @@ skip: return -EINVAL; } -static void maple_tree_harvest(void) +static void __exit maple_tree_harvest(void) { } @@ -2787,4 +3987,5 @@ static void maple_tree_harvest(void) module_init(maple_tree_seed); module_exit(maple_tree_harvest); MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>"); +MODULE_DESCRIPTION("maple tree API test module"); MODULE_LICENSE("GPL"); |
