summaryrefslogtreecommitdiff
path: root/lib/test_maple_tree.c
diff options
context:
space:
mode:
Diffstat (limited to 'lib/test_maple_tree.c')
-rw-r--r--lib/test_maple_tree.c263
1 files changed, 237 insertions, 26 deletions
diff --git a/lib/test_maple_tree.c b/lib/test_maple_tree.c
index 8d4c92cbdd0c..464eeb90d5ad 100644
--- a/lib/test_maple_tree.c
+++ b/lib/test_maple_tree.c
@@ -9,6 +9,7 @@
#include <linux/maple_tree.h>
#include <linux/module.h>
+#include <linux/rwsem.h>
#define MTREE_ALLOC_MAX 0x2000000000000Ul
#define CONFIG_MAPLE_SEARCH
@@ -44,6 +45,8 @@ atomic_t maple_tree_tests_passed;
/* #define BENCH_WALK */
/* #define BENCH_MT_FOR_EACH */
/* #define BENCH_FORK */
+/* #define BENCH_MAS_FOR_EACH */
+/* #define BENCH_MAS_PREV */
#ifdef __KERNEL__
#define mt_set_non_kernel(x) do {} while (0)
@@ -1157,6 +1160,71 @@ static noinline void __init check_ranges(struct maple_tree *mt)
MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
+ /* Check in-place modifications */
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
+ /* Append to the start of last range */
+ mt_set_non_kernel(50);
+ for (i = 0; i <= 500; i++) {
+ val = i * 5 + 1;
+ val2 = val + 4;
+ check_store_range(mt, val, val2, xa_mk_value(val), 0);
+ }
+
+ /* Append to the last range without touching any boundaries */
+ for (i = 0; i < 10; i++) {
+ val = val2 + 5;
+ val2 = val + 4;
+ check_store_range(mt, val, val2, xa_mk_value(val), 0);
+ }
+
+ /* Append to the end of last range */
+ val = val2;
+ for (i = 0; i < 10; i++) {
+ val += 5;
+ MT_BUG_ON(mt, mtree_test_store_range(mt, val, ULONG_MAX,
+ xa_mk_value(val)) != 0);
+ }
+
+ /* Overwriting the range and over a part of the next range */
+ for (i = 10; i < 30; i += 2) {
+ val = i * 5 + 1;
+ val2 = val + 5;
+ check_store_range(mt, val, val2, xa_mk_value(val), 0);
+ }
+
+ /* Overwriting a part of the range and over the next range */
+ for (i = 50; i < 70; i += 2) {
+ val2 = i * 5;
+ val = val2 - 5;
+ check_store_range(mt, val, val2, xa_mk_value(val), 0);
+ }
+
+ /*
+ * Expand the range, only partially overwriting the previous and
+ * next ranges
+ */
+ for (i = 100; i < 130; i += 3) {
+ val = i * 5 - 5;
+ val2 = i * 5 + 1;
+ check_store_range(mt, val, val2, xa_mk_value(val), 0);
+ }
+
+ /*
+ * Expand the range, only partially overwriting the previous and
+ * next ranges, in RCU mode
+ */
+ mt_set_in_rcu(mt);
+ for (i = 150; i < 180; i += 3) {
+ val = i * 5 - 5;
+ val2 = i * 5 + 1;
+ check_store_range(mt, val, val2, xa_mk_value(val), 0);
+ }
+
+ MT_BUG_ON(mt, !mt_height(mt));
+ mt_validate(mt);
+ mt_set_non_kernel(0);
+ mtree_destroy(mt);
+
/* Test rebalance gaps */
mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
mt_set_non_kernel(50);
@@ -1705,6 +1773,66 @@ static noinline void __init bench_mt_for_each(struct maple_tree *mt)
}
#endif
+#if defined(BENCH_MAS_FOR_EACH)
+static noinline void __init bench_mas_for_each(struct maple_tree *mt)
+{
+ int i, count = 1000000;
+ unsigned long max = 2500;
+ void *entry;
+ MA_STATE(mas, mt, 0, 0);
+
+ for (i = 0; i < max; i += 5) {
+ int gap = 4;
+
+ if (i % 30 == 0)
+ gap = 3;
+ mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
+ }
+
+ rcu_read_lock();
+ for (i = 0; i < count; i++) {
+ unsigned long j = 0;
+
+ mas_for_each(&mas, entry, max) {
+ MT_BUG_ON(mt, entry != xa_mk_value(j));
+ j += 5;
+ }
+ mas_set(&mas, 0);
+ }
+ rcu_read_unlock();
+
+}
+#endif
+#if defined(BENCH_MAS_PREV)
+static noinline void __init bench_mas_prev(struct maple_tree *mt)
+{
+ int i, count = 1000000;
+ unsigned long max = 2500;
+ void *entry;
+ MA_STATE(mas, mt, 0, 0);
+
+ for (i = 0; i < max; i += 5) {
+ int gap = 4;
+
+ if (i % 30 == 0)
+ gap = 3;
+ mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
+ }
+
+ rcu_read_lock();
+ for (i = 0; i < count; i++) {
+ unsigned long j = 2495;
+
+ mas_set(&mas, ULONG_MAX);
+ while ((entry = mas_prev(&mas, 0)) != NULL) {
+ MT_BUG_ON(mt, entry != xa_mk_value(j));
+ j -= 5;
+ }
+ }
+ rcu_read_unlock();
+
+}
+#endif
/* check_forking - simulate the kernel forking sequence with the tree. */
static noinline void __init check_forking(struct maple_tree *mt)
{
@@ -1714,17 +1842,21 @@ static noinline void __init check_forking(struct maple_tree *mt)
void *val;
MA_STATE(mas, mt, 0, 0);
MA_STATE(newmas, mt, 0, 0);
+ struct rw_semaphore newmt_lock;
+
+ init_rwsem(&newmt_lock);
for (i = 0; i <= nr_entries; i++)
mtree_store_range(mt, i*10, i*10 + 5,
xa_mk_value(i), GFP_KERNEL);
mt_set_non_kernel(99999);
- mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
+ mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
+ mt_set_external_lock(&newmt, &newmt_lock);
newmas.tree = &newmt;
mas_reset(&newmas);
mas_reset(&mas);
- mas_lock(&newmas);
+ down_write(&newmt_lock);
mas.index = 0;
mas.last = 0;
if (mas_expected_entries(&newmas, nr_entries)) {
@@ -1739,10 +1871,10 @@ static noinline void __init check_forking(struct maple_tree *mt)
}
rcu_read_unlock();
mas_destroy(&newmas);
- mas_unlock(&newmas);
mt_validate(&newmt);
mt_set_non_kernel(0);
- mtree_destroy(&newmt);
+ __mt_destroy(&newmt);
+ up_write(&newmt_lock);
}
static noinline void __init check_iteration(struct maple_tree *mt)
@@ -1853,6 +1985,10 @@ static noinline void __init bench_forking(struct maple_tree *mt)
void *val;
MA_STATE(mas, mt, 0, 0);
MA_STATE(newmas, mt, 0, 0);
+ struct rw_semaphore newmt_lock;
+
+ init_rwsem(&newmt_lock);
+ mt_set_external_lock(&newmt, &newmt_lock);
for (i = 0; i <= nr_entries; i++)
mtree_store_range(mt, i*10, i*10 + 5,
@@ -1867,7 +2003,7 @@ static noinline void __init bench_forking(struct maple_tree *mt)
mas.index = 0;
mas.last = 0;
rcu_read_lock();
- mas_lock(&newmas);
+ down_write(&newmt_lock);
if (mas_expected_entries(&newmas, nr_entries)) {
printk("OOM!");
BUG_ON(1);
@@ -1878,11 +2014,11 @@ static noinline void __init bench_forking(struct maple_tree *mt)
mas_store(&newmas, val);
}
mas_destroy(&newmas);
- mas_unlock(&newmas);
rcu_read_unlock();
mt_validate(&newmt);
mt_set_non_kernel(0);
- mtree_destroy(&newmt);
+ __mt_destroy(&newmt);
+ up_write(&newmt_lock);
}
}
#endif
@@ -2039,7 +2175,7 @@ static noinline void __init next_prev_test(struct maple_tree *mt)
MT_BUG_ON(mt, val != NULL);
MT_BUG_ON(mt, mas.index != 0);
MT_BUG_ON(mt, mas.last != 5);
- MT_BUG_ON(mt, mas.node != MAS_NONE);
+ MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
mas.index = 0;
mas.last = 5;
@@ -2489,6 +2625,10 @@ static noinline void __init check_dup_gaps(struct maple_tree *mt,
void *tmp;
MA_STATE(mas, mt, 0, 0);
MA_STATE(newmas, &newmt, 0, 0);
+ struct rw_semaphore newmt_lock;
+
+ init_rwsem(&newmt_lock);
+ mt_set_external_lock(&newmt, &newmt_lock);
if (!zero_start)
i = 1;
@@ -2498,9 +2638,9 @@ static noinline void __init check_dup_gaps(struct maple_tree *mt,
mtree_store_range(mt, i*10, (i+1)*10 - gap,
xa_mk_value(i), GFP_KERNEL);
- mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
+ mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
mt_set_non_kernel(99999);
- mas_lock(&newmas);
+ down_write(&newmt_lock);
ret = mas_expected_entries(&newmas, nr_entries);
mt_set_non_kernel(0);
MT_BUG_ON(mt, ret != 0);
@@ -2513,9 +2653,9 @@ static noinline void __init check_dup_gaps(struct maple_tree *mt,
}
rcu_read_unlock();
mas_destroy(&newmas);
- mas_unlock(&newmas);
- mtree_destroy(&newmt);
+ __mt_destroy(&newmt);
+ up_write(&newmt_lock);
}
/* Duplicate many sizes of trees. Mainly to test expected entry values */
@@ -2790,6 +2930,7 @@ static noinline void __init check_empty_area_fill(struct maple_tree *mt)
* exists MAS_NONE active range
* exists active active range
* DNE active active set to last range
+ * ERANGE active MAS_OVERFLOW last range
*
* Function ENTRY Start Result index & last
* mas_prev()
@@ -2818,6 +2959,7 @@ static noinline void __init check_empty_area_fill(struct maple_tree *mt)
* any MAS_ROOT MAS_NONE 0
* exists active active range
* DNE active active last range
+ * ERANGE active MAS_UNDERFLOW last range
*
* Function ENTRY Start Result index & last
* mas_find()
@@ -2828,7 +2970,7 @@ static noinline void __init check_empty_area_fill(struct maple_tree *mt)
* DNE MAS_START MAS_NONE 0
* DNE MAS_PAUSE MAS_NONE 0
* DNE MAS_ROOT MAS_NONE 0
- * DNE MAS_NONE MAS_NONE 0
+ * DNE MAS_NONE MAS_NONE 1
* if index == 0
* exists MAS_START MAS_ROOT 0
* exists MAS_PAUSE MAS_ROOT 0
@@ -2840,7 +2982,7 @@ static noinline void __init check_empty_area_fill(struct maple_tree *mt)
* DNE MAS_START active set to max
* exists MAS_PAUSE active range
* DNE MAS_PAUSE active set to max
- * exists MAS_NONE active range
+ * exists MAS_NONE active range (start at last)
* exists active active range
* DNE active active last range (max < last)
*
@@ -2865,7 +3007,7 @@ static noinline void __init check_empty_area_fill(struct maple_tree *mt)
* DNE MAS_START active set to min
* exists MAS_PAUSE active range
* DNE MAS_PAUSE active set to min
- * exists MAS_NONE active range
+ * exists MAS_NONE active range (start at index)
* exists active active range
* DNE active active last range (min > index)
*
@@ -2912,10 +3054,10 @@ static noinline void __init check_state_handling(struct maple_tree *mt)
mtree_store_range(mt, 0, 0, ptr, GFP_KERNEL);
mas_lock(&mas);
- /* prev: Start -> none */
+ /* prev: Start -> underflow*/
entry = mas_prev(&mas, 0);
MT_BUG_ON(mt, entry != NULL);
- MT_BUG_ON(mt, mas.node != MAS_NONE);
+ MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
/* prev: Start -> root */
mas_set(&mas, 10);
@@ -2942,7 +3084,7 @@ static noinline void __init check_state_handling(struct maple_tree *mt)
MT_BUG_ON(mt, entry != NULL);
MT_BUG_ON(mt, mas.node != MAS_NONE);
- /* next: start -> none */
+ /* next: start -> none*/
mas_set(&mas, 10);
entry = mas_next(&mas, ULONG_MAX);
MT_BUG_ON(mt, mas.index != 1);
@@ -3141,27 +3283,48 @@ static noinline void __init check_state_handling(struct maple_tree *mt)
MT_BUG_ON(mt, mas.last != 0x2500);
MT_BUG_ON(mt, !mas_active(mas));
- /* next:active -> active out of range*/
+ /* next:active -> active beyond data */
entry = mas_next(&mas, 0x2999);
MT_BUG_ON(mt, entry != NULL);
MT_BUG_ON(mt, mas.index != 0x2501);
MT_BUG_ON(mt, mas.last != 0x2fff);
MT_BUG_ON(mt, !mas_active(mas));
- /* Continue after out of range*/
+ /* Continue after last range ends after max */
entry = mas_next(&mas, ULONG_MAX);
MT_BUG_ON(mt, entry != ptr3);
MT_BUG_ON(mt, mas.index != 0x3000);
MT_BUG_ON(mt, mas.last != 0x3500);
MT_BUG_ON(mt, !mas_active(mas));
- /* next:active -> active out of range*/
+ /* next:active -> active continued */
entry = mas_next(&mas, ULONG_MAX);
MT_BUG_ON(mt, entry != NULL);
MT_BUG_ON(mt, mas.index != 0x3501);
MT_BUG_ON(mt, mas.last != ULONG_MAX);
MT_BUG_ON(mt, !mas_active(mas));
+ /* next:active -> overflow */
+ entry = mas_next(&mas, ULONG_MAX);
+ MT_BUG_ON(mt, entry != NULL);
+ MT_BUG_ON(mt, mas.index != 0x3501);
+ MT_BUG_ON(mt, mas.last != ULONG_MAX);
+ MT_BUG_ON(mt, mas.node != MAS_OVERFLOW);
+
+ /* next:overflow -> overflow */
+ entry = mas_next(&mas, ULONG_MAX);
+ MT_BUG_ON(mt, entry != NULL);
+ MT_BUG_ON(mt, mas.index != 0x3501);
+ MT_BUG_ON(mt, mas.last != ULONG_MAX);
+ MT_BUG_ON(mt, mas.node != MAS_OVERFLOW);
+
+ /* prev:overflow -> active */
+ entry = mas_prev(&mas, 0);
+ MT_BUG_ON(mt, entry != ptr3);
+ MT_BUG_ON(mt, mas.index != 0x3000);
+ MT_BUG_ON(mt, mas.last != 0x3500);
+ MT_BUG_ON(mt, !mas_active(mas));
+
/* next: none -> active, skip value at location */
mas_set(&mas, 0);
entry = mas_next(&mas, ULONG_MAX);
@@ -3180,11 +3343,46 @@ static noinline void __init check_state_handling(struct maple_tree *mt)
MT_BUG_ON(mt, mas.last != 0x1500);
MT_BUG_ON(mt, !mas_active(mas));
- /* prev:active -> active out of range*/
+ /* prev:active -> active spanning end range */
+ entry = mas_prev(&mas, 0x0100);
+ MT_BUG_ON(mt, entry != NULL);
+ MT_BUG_ON(mt, mas.index != 0);
+ MT_BUG_ON(mt, mas.last != 0x0FFF);
+ MT_BUG_ON(mt, !mas_active(mas));
+
+ /* prev:active -> underflow */
+ entry = mas_prev(&mas, 0);
+ MT_BUG_ON(mt, entry != NULL);
+ MT_BUG_ON(mt, mas.index != 0);
+ MT_BUG_ON(mt, mas.last != 0x0FFF);
+ MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
+
+ /* prev:underflow -> underflow */
entry = mas_prev(&mas, 0);
MT_BUG_ON(mt, entry != NULL);
MT_BUG_ON(mt, mas.index != 0);
MT_BUG_ON(mt, mas.last != 0x0FFF);
+ MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
+
+ /* next:underflow -> active */
+ entry = mas_next(&mas, ULONG_MAX);
+ MT_BUG_ON(mt, entry != ptr);
+ MT_BUG_ON(mt, mas.index != 0x1000);
+ MT_BUG_ON(mt, mas.last != 0x1500);
+ MT_BUG_ON(mt, !mas_active(mas));
+
+ /* prev:first value -> underflow */
+ entry = mas_prev(&mas, 0x1000);
+ MT_BUG_ON(mt, entry != NULL);
+ MT_BUG_ON(mt, mas.index != 0x1000);
+ MT_BUG_ON(mt, mas.last != 0x1500);
+ MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
+
+ /* find:underflow -> first value */
+ entry = mas_find(&mas, ULONG_MAX);
+ MT_BUG_ON(mt, entry != ptr);
+ MT_BUG_ON(mt, mas.index != 0x1000);
+ MT_BUG_ON(mt, mas.last != 0x1500);
MT_BUG_ON(mt, !mas_active(mas));
/* prev: pause ->active */
@@ -3198,14 +3396,14 @@ static noinline void __init check_state_handling(struct maple_tree *mt)
MT_BUG_ON(mt, mas.last != 0x2500);
MT_BUG_ON(mt, !mas_active(mas));
- /* prev:active -> active out of range*/
+ /* prev:active -> active spanning min */
entry = mas_prev(&mas, 0x1600);
MT_BUG_ON(mt, entry != NULL);
MT_BUG_ON(mt, mas.index != 0x1501);
MT_BUG_ON(mt, mas.last != 0x1FFF);
MT_BUG_ON(mt, !mas_active(mas));
- /* prev: active ->active, continue*/
+ /* prev: active ->active, continue */
entry = mas_prev(&mas, 0);
MT_BUG_ON(mt, entry != ptr);
MT_BUG_ON(mt, mas.index != 0x1000);
@@ -3252,7 +3450,7 @@ static noinline void __init check_state_handling(struct maple_tree *mt)
MT_BUG_ON(mt, mas.last != 0x2FFF);
MT_BUG_ON(mt, !mas_active(mas));
- /* find: none ->active */
+ /* find: overflow ->active */
entry = mas_find(&mas, 0x5000);
MT_BUG_ON(mt, entry != ptr3);
MT_BUG_ON(mt, mas.index != 0x3000);
@@ -3433,6 +3631,20 @@ static int __init maple_tree_seed(void)
mtree_destroy(&tree);
goto skip;
#endif
+#if defined(BENCH_MAS_FOR_EACH)
+#define BENCH
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
+ bench_mas_for_each(&tree);
+ mtree_destroy(&tree);
+ goto skip;
+#endif
+#if defined(BENCH_MAS_PREV)
+#define BENCH
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
+ bench_mas_prev(&tree);
+ mtree_destroy(&tree);
+ goto skip;
+#endif
mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_iteration(&tree);
@@ -3637,7 +3849,6 @@ static int __init maple_tree_seed(void)
check_empty_area_fill(&tree);
mtree_destroy(&tree);
-
mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_state_handling(&tree);
mtree_destroy(&tree);