xref: /linux/lib/test_maple_tree.c (revision f92e1a82)
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * test_maple_tree.c: Test the maple tree API
4  * Copyright (c) 2018-2022 Oracle Corporation
5  * Author: Liam R. Howlett <Liam.Howlett@Oracle.com>
6  *
7  * Any tests that only require the interface of the tree.
8  */
9 
10 #include <linux/maple_tree.h>
11 #include <linux/module.h>
12 #include <linux/rwsem.h>
13 
14 #define MTREE_ALLOC_MAX 0x2000000000000Ul
15 #define CONFIG_MAPLE_SEARCH
16 #define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
17 
18 #ifndef CONFIG_DEBUG_MAPLE_TREE
19 #define mt_dump(mt, fmt)		do {} while (0)
20 #define mt_validate(mt)			do {} while (0)
21 #define mt_cache_shrink()		do {} while (0)
22 #define mas_dump(mas)			do {} while (0)
23 #define mas_wr_dump(mas)		do {} while (0)
24 atomic_t maple_tree_tests_run;
25 atomic_t maple_tree_tests_passed;
26 #undef MT_BUG_ON
27 
28 #define MT_BUG_ON(__tree, __x) do {					\
29 	atomic_inc(&maple_tree_tests_run);				\
30 	if (__x) {							\
31 		pr_info("BUG at %s:%d (%u)\n",				\
32 		__func__, __LINE__, __x);				\
33 		pr_info("Pass: %u Run:%u\n",				\
34 			atomic_read(&maple_tree_tests_passed),		\
35 			atomic_read(&maple_tree_tests_run));		\
36 	} else {							\
37 		atomic_inc(&maple_tree_tests_passed);			\
38 	}								\
39 } while (0)
40 #endif
41 
42 /* #define BENCH_SLOT_STORE */
43 /* #define BENCH_NODE_STORE */
44 /* #define BENCH_AWALK */
45 /* #define BENCH_WALK */
46 /* #define BENCH_LOAD */
47 /* #define BENCH_MT_FOR_EACH */
48 /* #define BENCH_FORK */
49 /* #define BENCH_MAS_FOR_EACH */
50 /* #define BENCH_MAS_PREV */
51 
52 #ifdef __KERNEL__
53 #define mt_set_non_kernel(x)		do {} while (0)
54 #define mt_zero_nr_tallocated(x)	do {} while (0)
55 #else
56 #define cond_resched()			do {} while (0)
57 #endif
58 
59 #define mas_is_none(x)		((x)->status == ma_none)
60 #define mas_is_overflow(x)	((x)->status == ma_overflow)
61 #define mas_is_underflow(x)	((x)->status == ma_underflow)
62 
mtree_insert_index(struct maple_tree * mt,unsigned long index,gfp_t gfp)63 static int __init mtree_insert_index(struct maple_tree *mt,
64 				     unsigned long index, gfp_t gfp)
65 {
66 	return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp);
67 }
68 
mtree_erase_index(struct maple_tree * mt,unsigned long index)69 static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index)
70 {
71 	MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX));
72 	MT_BUG_ON(mt, mtree_load(mt, index) != NULL);
73 }
74 
mtree_test_insert(struct maple_tree * mt,unsigned long index,void * ptr)75 static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index,
76 				void *ptr)
77 {
78 	return mtree_insert(mt, index, ptr, GFP_KERNEL);
79 }
80 
mtree_test_store_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr)81 static int __init mtree_test_store_range(struct maple_tree *mt,
82 			unsigned long start, unsigned long end, void *ptr)
83 {
84 	return mtree_store_range(mt, start, end, ptr, GFP_KERNEL);
85 }
86 
mtree_test_store(struct maple_tree * mt,unsigned long start,void * ptr)87 static int __init mtree_test_store(struct maple_tree *mt, unsigned long start,
88 				void *ptr)
89 {
90 	return mtree_test_store_range(mt, start, start, ptr);
91 }
92 
mtree_test_insert_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr)93 static int __init mtree_test_insert_range(struct maple_tree *mt,
94 			unsigned long start, unsigned long end, void *ptr)
95 {
96 	return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL);
97 }
98 
mtree_test_load(struct maple_tree * mt,unsigned long index)99 static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index)
100 {
101 	return mtree_load(mt, index);
102 }
103 
mtree_test_erase(struct maple_tree * mt,unsigned long index)104 static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index)
105 {
106 	return mtree_erase(mt, index);
107 }
108 
109 #if defined(CONFIG_64BIT)
check_mtree_alloc_range(struct maple_tree * mt,unsigned long start,unsigned long end,unsigned long size,unsigned long expected,int eret,void * ptr)110 static noinline void __init check_mtree_alloc_range(struct maple_tree *mt,
111 		unsigned long start, unsigned long end, unsigned long size,
112 		unsigned long expected, int eret, void *ptr)
113 {
114 
115 	unsigned long result = expected + 1;
116 	int ret;
117 
118 	ret = mtree_alloc_range(mt, &result, ptr, size, start, end,
119 			GFP_KERNEL);
120 	MT_BUG_ON(mt, ret != eret);
121 	if (ret)
122 		return;
123 
124 	MT_BUG_ON(mt, result != expected);
125 }
126 
check_mtree_alloc_rrange(struct maple_tree * mt,unsigned long start,unsigned long end,unsigned long size,unsigned long expected,int eret,void * ptr)127 static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt,
128 		unsigned long start, unsigned long end, unsigned long size,
129 		unsigned long expected, int eret, void *ptr)
130 {
131 
132 	unsigned long result = expected + 1;
133 	int ret;
134 
135 	ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end,
136 			GFP_KERNEL);
137 	MT_BUG_ON(mt, ret != eret);
138 	if (ret)
139 		return;
140 
141 	MT_BUG_ON(mt, result != expected);
142 }
143 #endif
144 
check_load(struct maple_tree * mt,unsigned long index,void * ptr)145 static noinline void __init check_load(struct maple_tree *mt,
146 				       unsigned long index, void *ptr)
147 {
148 	void *ret = mtree_test_load(mt, index);
149 
150 	if (ret != ptr)
151 		pr_err("Load %lu returned %p expect %p\n", index, ret, ptr);
152 	MT_BUG_ON(mt, ret != ptr);
153 }
154 
check_store_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr,int expected)155 static noinline void __init check_store_range(struct maple_tree *mt,
156 		unsigned long start, unsigned long end, void *ptr, int expected)
157 {
158 	int ret = -EINVAL;
159 	unsigned long i;
160 
161 	ret = mtree_test_store_range(mt, start, end, ptr);
162 	MT_BUG_ON(mt, ret != expected);
163 
164 	if (ret)
165 		return;
166 
167 	for (i = start; i <= end; i++)
168 		check_load(mt, i, ptr);
169 }
170 
check_insert_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr,int expected)171 static noinline void __init check_insert_range(struct maple_tree *mt,
172 		unsigned long start, unsigned long end, void *ptr, int expected)
173 {
174 	int ret = -EINVAL;
175 	unsigned long i;
176 
177 	ret = mtree_test_insert_range(mt, start, end, ptr);
178 	MT_BUG_ON(mt, ret != expected);
179 
180 	if (ret)
181 		return;
182 
183 	for (i = start; i <= end; i++)
184 		check_load(mt, i, ptr);
185 }
186 
check_insert(struct maple_tree * mt,unsigned long index,void * ptr)187 static noinline void __init check_insert(struct maple_tree *mt,
188 					 unsigned long index, void *ptr)
189 {
190 	int ret = -EINVAL;
191 
192 	ret = mtree_test_insert(mt, index, ptr);
193 	MT_BUG_ON(mt, ret != 0);
194 }
195 
check_dup_insert(struct maple_tree * mt,unsigned long index,void * ptr)196 static noinline void __init check_dup_insert(struct maple_tree *mt,
197 				      unsigned long index, void *ptr)
198 {
199 	int ret = -EINVAL;
200 
201 	ret = mtree_test_insert(mt, index, ptr);
202 	MT_BUG_ON(mt, ret != -EEXIST);
203 }
204 
205 
check_index_load(struct maple_tree * mt,unsigned long index)206 static noinline void __init check_index_load(struct maple_tree *mt,
207 					     unsigned long index)
208 {
209 	return check_load(mt, index, xa_mk_value(index & LONG_MAX));
210 }
211 
not_empty(struct maple_node * node)212 static inline __init int not_empty(struct maple_node *node)
213 {
214 	int i;
215 
216 	if (node->parent)
217 		return 1;
218 
219 	for (i = 0; i < ARRAY_SIZE(node->slot); i++)
220 		if (node->slot[i])
221 			return 1;
222 
223 	return 0;
224 }
225 
226 
check_rev_seq(struct maple_tree * mt,unsigned long max,bool verbose)227 static noinline void __init check_rev_seq(struct maple_tree *mt,
228 					  unsigned long max, bool verbose)
229 {
230 	unsigned long i = max, j;
231 
232 	MT_BUG_ON(mt, !mtree_empty(mt));
233 
234 	mt_zero_nr_tallocated();
235 	while (i) {
236 		MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
237 		for (j = i; j <= max; j++)
238 			check_index_load(mt, j);
239 
240 		check_load(mt, i - 1, NULL);
241 		mt_set_in_rcu(mt);
242 		MT_BUG_ON(mt, !mt_height(mt));
243 		mt_clear_in_rcu(mt);
244 		MT_BUG_ON(mt, !mt_height(mt));
245 		i--;
246 	}
247 	check_load(mt, max + 1, NULL);
248 
249 #ifndef __KERNEL__
250 	if (verbose) {
251 		rcu_barrier();
252 		mt_dump(mt, mt_dump_dec);
253 		pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
254 			__func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(),
255 			mt_nr_tallocated());
256 	}
257 #endif
258 }
259 
check_seq(struct maple_tree * mt,unsigned long max,bool verbose)260 static noinline void __init check_seq(struct maple_tree *mt, unsigned long max,
261 		bool verbose)
262 {
263 	unsigned long i, j;
264 
265 	MT_BUG_ON(mt, !mtree_empty(mt));
266 
267 	mt_zero_nr_tallocated();
268 	for (i = 0; i <= max; i++) {
269 		MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
270 		for (j = 0; j <= i; j++)
271 			check_index_load(mt, j);
272 
273 		if (i)
274 			MT_BUG_ON(mt, !mt_height(mt));
275 		check_load(mt, i + 1, NULL);
276 	}
277 
278 #ifndef __KERNEL__
279 	if (verbose) {
280 		rcu_barrier();
281 		mt_dump(mt, mt_dump_dec);
282 		pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
283 			max, mt_get_alloc_size()/1024, mt_nr_allocated(),
284 			mt_nr_tallocated());
285 	}
286 #endif
287 }
288 
check_lb_not_empty(struct maple_tree * mt)289 static noinline void __init check_lb_not_empty(struct maple_tree *mt)
290 {
291 	unsigned long i, j;
292 	unsigned long huge = 4000UL * 1000 * 1000;
293 
294 
295 	i = huge;
296 	while (i > 4096) {
297 		check_insert(mt, i, (void *) i);
298 		for (j = huge; j >= i; j /= 2) {
299 			check_load(mt, j-1, NULL);
300 			check_load(mt, j, (void *) j);
301 			check_load(mt, j+1, NULL);
302 		}
303 		i /= 2;
304 	}
305 	mtree_destroy(mt);
306 }
307 
check_lower_bound_split(struct maple_tree * mt)308 static noinline void __init check_lower_bound_split(struct maple_tree *mt)
309 {
310 	MT_BUG_ON(mt, !mtree_empty(mt));
311 	check_lb_not_empty(mt);
312 }
313 
check_upper_bound_split(struct maple_tree * mt)314 static noinline void __init check_upper_bound_split(struct maple_tree *mt)
315 {
316 	unsigned long i, j;
317 	unsigned long huge;
318 
319 	MT_BUG_ON(mt, !mtree_empty(mt));
320 
321 	if (MAPLE_32BIT)
322 		huge = 2147483647UL;
323 	else
324 		huge = 4000UL * 1000 * 1000;
325 
326 	i = 4096;
327 	while (i < huge) {
328 		check_insert(mt, i, (void *) i);
329 		for (j = i; j >= huge; j *= 2) {
330 			check_load(mt, j-1, NULL);
331 			check_load(mt, j, (void *) j);
332 			check_load(mt, j+1, NULL);
333 		}
334 		i *= 2;
335 	}
336 	mtree_destroy(mt);
337 }
338 
check_mid_split(struct maple_tree * mt)339 static noinline void __init check_mid_split(struct maple_tree *mt)
340 {
341 	unsigned long huge = 8000UL * 1000 * 1000;
342 
343 	check_insert(mt, huge, (void *) huge);
344 	check_insert(mt, 0, xa_mk_value(0));
345 	check_lb_not_empty(mt);
346 }
347 
check_rev_find(struct maple_tree * mt)348 static noinline void __init check_rev_find(struct maple_tree *mt)
349 {
350 	int i, nr_entries = 200;
351 	void *val;
352 	MA_STATE(mas, mt, 0, 0);
353 
354 	for (i = 0; i <= nr_entries; i++)
355 		mtree_store_range(mt, i*10, i*10 + 5,
356 				  xa_mk_value(i), GFP_KERNEL);
357 
358 	rcu_read_lock();
359 	mas_set(&mas, 1000);
360 	val = mas_find_rev(&mas, 1000);
361 	MT_BUG_ON(mt, val != xa_mk_value(100));
362 	val = mas_find_rev(&mas, 1000);
363 	MT_BUG_ON(mt, val != NULL);
364 
365 	mas_set(&mas, 999);
366 	val = mas_find_rev(&mas, 997);
367 	MT_BUG_ON(mt, val != NULL);
368 
369 	mas_set(&mas, 1000);
370 	val = mas_find_rev(&mas, 900);
371 	MT_BUG_ON(mt, val != xa_mk_value(100));
372 	val = mas_find_rev(&mas, 900);
373 	MT_BUG_ON(mt, val != xa_mk_value(99));
374 
375 	mas_set(&mas, 20);
376 	val = mas_find_rev(&mas, 0);
377 	MT_BUG_ON(mt, val != xa_mk_value(2));
378 	val = mas_find_rev(&mas, 0);
379 	MT_BUG_ON(mt, val != xa_mk_value(1));
380 	val = mas_find_rev(&mas, 0);
381 	MT_BUG_ON(mt, val != xa_mk_value(0));
382 	val = mas_find_rev(&mas, 0);
383 	MT_BUG_ON(mt, val != NULL);
384 	rcu_read_unlock();
385 }
386 
check_find(struct maple_tree * mt)387 static noinline void __init check_find(struct maple_tree *mt)
388 {
389 	unsigned long val = 0;
390 	unsigned long count;
391 	unsigned long max;
392 	unsigned long top;
393 	unsigned long last = 0, index = 0;
394 	void *entry, *entry2;
395 
396 	MA_STATE(mas, mt, 0, 0);
397 
398 	/* Insert 0. */
399 	MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
400 
401 #if defined(CONFIG_64BIT)
402 	top = 4398046511104UL;
403 #else
404 	top = ULONG_MAX;
405 #endif
406 
407 	if (MAPLE_32BIT) {
408 		count = 15;
409 	} else {
410 		count = 20;
411 	}
412 
413 	for (int i = 0; i <= count; i++) {
414 		if (val != 64)
415 			MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
416 		else
417 			MT_BUG_ON(mt, mtree_insert(mt, val,
418 				XA_ZERO_ENTRY, GFP_KERNEL));
419 
420 		val <<= 2;
421 	}
422 
423 	val = 0;
424 	mas_set(&mas, val);
425 	mas_lock(&mas);
426 	while ((entry = mas_find(&mas, 268435456)) != NULL) {
427 		if (val != 64)
428 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
429 		else
430 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
431 
432 		val <<= 2;
433 		/* For zero check. */
434 		if (!val)
435 			val = 1;
436 	}
437 	mas_unlock(&mas);
438 
439 	val = 0;
440 	mas_set(&mas, val);
441 	mas_lock(&mas);
442 	mas_for_each(&mas, entry, ULONG_MAX) {
443 		if (val != 64)
444 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
445 		else
446 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
447 		val <<= 2;
448 		/* For zero check. */
449 		if (!val)
450 			val = 1;
451 	}
452 	mas_unlock(&mas);
453 
454 	/* Test mas_pause */
455 	val = 0;
456 	mas_set(&mas, val);
457 	mas_lock(&mas);
458 	mas_for_each(&mas, entry, ULONG_MAX) {
459 		if (val != 64)
460 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
461 		else
462 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
463 		val <<= 2;
464 		/* For zero check. */
465 		if (!val)
466 			val = 1;
467 
468 		mas_pause(&mas);
469 		mas_unlock(&mas);
470 		mas_lock(&mas);
471 	}
472 	mas_unlock(&mas);
473 
474 	val = 0;
475 	max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
476 	mt_for_each(mt, entry, index, max) {
477 		MT_BUG_ON(mt, xa_mk_value(val) != entry);
478 		val <<= 2;
479 		if (val == 64) /* Skip zero entry. */
480 			val <<= 2;
481 		/* For zero check. */
482 		if (!val)
483 			val = 1;
484 	}
485 
486 	val = 0;
487 	max = 0;
488 	index = 0;
489 	MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
490 	mt_for_each(mt, entry, index, ULONG_MAX) {
491 		if (val == top)
492 			MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
493 		else
494 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
495 
496 		/* Workaround for 32bit */
497 		if ((val << 2) < val)
498 			val = ULONG_MAX;
499 		else
500 			val <<= 2;
501 
502 		if (val == 64) /* Skip zero entry. */
503 			val <<= 2;
504 		/* For zero check. */
505 		if (!val)
506 			val = 1;
507 		max++;
508 		MT_BUG_ON(mt, max > 25);
509 	}
510 	mtree_erase_index(mt, ULONG_MAX);
511 
512 	mas_reset(&mas);
513 	index = 17;
514 	entry = mt_find(mt, &index, 512);
515 	MT_BUG_ON(mt, xa_mk_value(256) != entry);
516 
517 	mas_reset(&mas);
518 	index = 17;
519 	entry = mt_find(mt, &index, 20);
520 	MT_BUG_ON(mt, entry != NULL);
521 
522 
523 	/* Range check.. */
524 	/* Insert ULONG_MAX */
525 	MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
526 
527 	val = 0;
528 	mas_set(&mas, 0);
529 	mas_lock(&mas);
530 	mas_for_each(&mas, entry, ULONG_MAX) {
531 		if (val == 64)
532 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
533 		else if (val == top)
534 			MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
535 		else
536 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
537 
538 		/* Workaround for 32bit */
539 		if ((val << 2) < val)
540 			val = ULONG_MAX;
541 		else
542 			val <<= 2;
543 
544 		/* For zero check. */
545 		if (!val)
546 			val = 1;
547 		mas_pause(&mas);
548 		mas_unlock(&mas);
549 		mas_lock(&mas);
550 	}
551 	mas_unlock(&mas);
552 
553 	mas_set(&mas, 1048576);
554 	mas_lock(&mas);
555 	entry = mas_find(&mas, 1048576);
556 	mas_unlock(&mas);
557 	MT_BUG_ON(mas.tree, entry == NULL);
558 
559 	/*
560 	 * Find last value.
561 	 * 1. get the expected value, leveraging the existence of an end entry
562 	 * 2. delete end entry
563 	 * 3. find the last value but searching for ULONG_MAX and then using
564 	 * prev
565 	 */
566 	/* First, get the expected result. */
567 	mas_lock(&mas);
568 	mas_reset(&mas);
569 	mas.index = ULONG_MAX; /* start at max.. */
570 	entry = mas_find(&mas, ULONG_MAX);
571 	entry = mas_prev(&mas, 0);
572 	index = mas.index;
573 	last = mas.last;
574 
575 	/* Erase the last entry. */
576 	mas_reset(&mas);
577 	mas.index = ULONG_MAX;
578 	mas.last = ULONG_MAX;
579 	mas_erase(&mas);
580 
581 	/* Get the previous value from MAS_START */
582 	mas_reset(&mas);
583 	entry2 = mas_prev(&mas, 0);
584 
585 	/* Check results. */
586 	MT_BUG_ON(mt, entry != entry2);
587 	MT_BUG_ON(mt, index != mas.index);
588 	MT_BUG_ON(mt, last != mas.last);
589 
590 
591 	mas.status = ma_none;
592 	mas.index = ULONG_MAX;
593 	mas.last = ULONG_MAX;
594 	entry2 = mas_prev(&mas, 0);
595 	MT_BUG_ON(mt, entry != entry2);
596 
597 	mas_set(&mas, 0);
598 	MT_BUG_ON(mt, mas_prev(&mas, 0) != NULL);
599 
600 	mas_unlock(&mas);
601 	mtree_destroy(mt);
602 }
603 
check_find_2(struct maple_tree * mt)604 static noinline void __init check_find_2(struct maple_tree *mt)
605 {
606 	unsigned long i, j;
607 	void *entry;
608 
609 	MA_STATE(mas, mt, 0, 0);
610 	rcu_read_lock();
611 	mas_for_each(&mas, entry, ULONG_MAX)
612 		MT_BUG_ON(mt, true);
613 	rcu_read_unlock();
614 
615 	for (i = 0; i < 256; i++) {
616 		mtree_insert_index(mt, i, GFP_KERNEL);
617 		j = 0;
618 		mas_set(&mas, 0);
619 		rcu_read_lock();
620 		mas_for_each(&mas, entry, ULONG_MAX) {
621 			MT_BUG_ON(mt, entry != xa_mk_value(j));
622 			j++;
623 		}
624 		rcu_read_unlock();
625 		MT_BUG_ON(mt, j != i + 1);
626 	}
627 
628 	for (i = 0; i < 256; i++) {
629 		mtree_erase_index(mt, i);
630 		j = i + 1;
631 		mas_set(&mas, 0);
632 		rcu_read_lock();
633 		mas_for_each(&mas, entry, ULONG_MAX) {
634 			if (xa_is_zero(entry))
635 				continue;
636 
637 			MT_BUG_ON(mt, entry != xa_mk_value(j));
638 			j++;
639 		}
640 		rcu_read_unlock();
641 		MT_BUG_ON(mt, j != 256);
642 	}
643 
644 	/*MT_BUG_ON(mt, !mtree_empty(mt)); */
645 }
646 
647 
648 #if defined(CONFIG_64BIT)
check_alloc_rev_range(struct maple_tree * mt)649 static noinline void __init check_alloc_rev_range(struct maple_tree *mt)
650 {
651 	/*
652 	 * Generated by:
653 	 * cat /proc/self/maps | awk '{print $1}'|
654 	 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
655 	 */
656 
657 	static const unsigned long range[] = {
658 	/*      Inclusive     , Exclusive. */
659 		0x565234af2000, 0x565234af4000,
660 		0x565234af4000, 0x565234af9000,
661 		0x565234af9000, 0x565234afb000,
662 		0x565234afc000, 0x565234afd000,
663 		0x565234afd000, 0x565234afe000,
664 		0x565235def000, 0x565235e10000,
665 		0x7f36d4bfd000, 0x7f36d4ee2000,
666 		0x7f36d4ee2000, 0x7f36d4f04000,
667 		0x7f36d4f04000, 0x7f36d504c000,
668 		0x7f36d504c000, 0x7f36d5098000,
669 		0x7f36d5098000, 0x7f36d5099000,
670 		0x7f36d5099000, 0x7f36d509d000,
671 		0x7f36d509d000, 0x7f36d509f000,
672 		0x7f36d509f000, 0x7f36d50a5000,
673 		0x7f36d50b9000, 0x7f36d50db000,
674 		0x7f36d50db000, 0x7f36d50dc000,
675 		0x7f36d50dc000, 0x7f36d50fa000,
676 		0x7f36d50fa000, 0x7f36d5102000,
677 		0x7f36d5102000, 0x7f36d5103000,
678 		0x7f36d5103000, 0x7f36d5104000,
679 		0x7f36d5104000, 0x7f36d5105000,
680 		0x7fff5876b000, 0x7fff5878d000,
681 		0x7fff5878e000, 0x7fff58791000,
682 		0x7fff58791000, 0x7fff58793000,
683 	};
684 
685 	static const unsigned long holes[] = {
686 		/*
687 		 * Note: start of hole is INCLUSIVE
688 		 *        end of hole is EXCLUSIVE
689 		 *        (opposite of the above table.)
690 		 * Start of hole, end of hole,  size of hole (+1)
691 		 */
692 		0x565234afb000, 0x565234afc000, 0x1000,
693 		0x565234afe000, 0x565235def000, 0x12F1000,
694 		0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
695 	};
696 
697 	/*
698 	 * req_range consists of 4 values.
699 	 * 1. min index
700 	 * 2. max index
701 	 * 3. size
702 	 * 4. number that should be returned.
703 	 * 5. return value
704 	 */
705 	static const unsigned long req_range[] = {
706 		0x565234af9000, /* Min */
707 		0x7fff58791000, /* Max */
708 		0x1000,         /* Size */
709 		0x7fff5878d << 12,  /* First rev hole of size 0x1000 */
710 		0,              /* Return value success. */
711 
712 		0x0,            /* Min */
713 		0x565234AF0 << 12,    /* Max */
714 		0x3000,         /* Size */
715 		0x565234AEE << 12,  /* max - 3. */
716 		0,              /* Return value success. */
717 
718 		0x0,            /* Min */
719 		-1,             /* Max */
720 		0x1000,         /* Size */
721 		562949953421311 << 12,/* First rev hole of size 0x1000 */
722 		0,              /* Return value success. */
723 
724 		0x0,            /* Min */
725 		0x7F36D5109 << 12,    /* Max */
726 		0x4000,         /* Size */
727 		0x7F36D5106 << 12,    /* First rev hole of size 0x4000 */
728 		0,              /* Return value success. */
729 
730 		/* Ascend test. */
731 		0x0,
732 		34148798628 << 12,
733 		19 << 12,
734 		34148797418 << 12,
735 		0x0,
736 
737 		/* Too big test. */
738 		0x0,
739 		18446744073709551615UL,
740 		562915594369134UL << 12,
741 		0x0,
742 		-EBUSY,
743 
744 		/* Single space test. */
745 		34148798725 << 12,
746 		34148798725 << 12,
747 		1 << 12,
748 		34148798725 << 12,
749 		0,
750 	};
751 
752 	int i, range_count = ARRAY_SIZE(range);
753 	int req_range_count = ARRAY_SIZE(req_range);
754 	unsigned long min = 0;
755 
756 	MA_STATE(mas, mt, 0, 0);
757 
758 	mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
759 			  GFP_KERNEL);
760 #define DEBUG_REV_RANGE 0
761 	for (i = 0; i < range_count; i += 2) {
762 		/* Inclusive, Inclusive (with the -1) */
763 
764 #if DEBUG_REV_RANGE
765 		pr_debug("\t%s: Insert %lu-%lu\n", __func__, range[i] >> 12,
766 				(range[i + 1] >> 12) - 1);
767 #endif
768 		check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
769 				xa_mk_value(range[i] >> 12), 0);
770 		mt_validate(mt);
771 	}
772 
773 
774 	mas_lock(&mas);
775 	for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
776 #if DEBUG_REV_RANGE
777 		pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
778 				min, holes[i+1]>>12, holes[i+2]>>12,
779 				holes[i] >> 12);
780 #endif
781 		MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
782 					holes[i+1] >> 12,
783 					holes[i+2] >> 12));
784 #if DEBUG_REV_RANGE
785 		pr_debug("Found %lu %lu\n", mas.index, mas.last);
786 		pr_debug("gap %lu %lu\n", (holes[i] >> 12),
787 				(holes[i+1] >> 12));
788 #endif
789 		MT_BUG_ON(mt, mas.last + 1 != (holes[i+1] >> 12));
790 		MT_BUG_ON(mt, mas.index != (holes[i+1] >> 12) - (holes[i+2] >> 12));
791 		min = holes[i+1] >> 12;
792 		mas_reset(&mas);
793 	}
794 
795 	mas_unlock(&mas);
796 	for (i = 0; i < req_range_count; i += 5) {
797 #if DEBUG_REV_RANGE
798 		pr_debug("\tReverse request %d between %lu-%lu size %lu, should get %lu\n",
799 				i, req_range[i] >> 12,
800 				(req_range[i + 1] >> 12),
801 				req_range[i+2] >> 12,
802 				req_range[i+3] >> 12);
803 #endif
804 		check_mtree_alloc_rrange(mt,
805 				req_range[i]   >> 12, /* start */
806 				req_range[i+1] >> 12, /* end */
807 				req_range[i+2] >> 12, /* size */
808 				req_range[i+3] >> 12, /* expected address */
809 				req_range[i+4],       /* expected return */
810 				xa_mk_value(req_range[i] >> 12)); /* pointer */
811 		mt_validate(mt);
812 	}
813 
814 	mt_set_non_kernel(1);
815 	mtree_erase(mt, 34148798727); /* create a deleted range. */
816 	mtree_erase(mt, 34148798725);
817 	check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
818 			34148798725, 0, mt);
819 
820 	mtree_destroy(mt);
821 }
822 
check_alloc_range(struct maple_tree * mt)823 static noinline void __init check_alloc_range(struct maple_tree *mt)
824 {
825 	/*
826 	 * Generated by:
827 	 * cat /proc/self/maps|awk '{print $1}'|
828 	 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
829 	 */
830 
831 	static const unsigned long range[] = {
832 	/*      Inclusive     , Exclusive. */
833 		0x565234af2000, 0x565234af4000,
834 		0x565234af4000, 0x565234af9000,
835 		0x565234af9000, 0x565234afb000,
836 		0x565234afc000, 0x565234afd000,
837 		0x565234afd000, 0x565234afe000,
838 		0x565235def000, 0x565235e10000,
839 		0x7f36d4bfd000, 0x7f36d4ee2000,
840 		0x7f36d4ee2000, 0x7f36d4f04000,
841 		0x7f36d4f04000, 0x7f36d504c000,
842 		0x7f36d504c000, 0x7f36d5098000,
843 		0x7f36d5098000, 0x7f36d5099000,
844 		0x7f36d5099000, 0x7f36d509d000,
845 		0x7f36d509d000, 0x7f36d509f000,
846 		0x7f36d509f000, 0x7f36d50a5000,
847 		0x7f36d50b9000, 0x7f36d50db000,
848 		0x7f36d50db000, 0x7f36d50dc000,
849 		0x7f36d50dc000, 0x7f36d50fa000,
850 		0x7f36d50fa000, 0x7f36d5102000,
851 		0x7f36d5102000, 0x7f36d5103000,
852 		0x7f36d5103000, 0x7f36d5104000,
853 		0x7f36d5104000, 0x7f36d5105000,
854 		0x7fff5876b000, 0x7fff5878d000,
855 		0x7fff5878e000, 0x7fff58791000,
856 		0x7fff58791000, 0x7fff58793000,
857 	};
858 	static const unsigned long holes[] = {
859 		/* Start of hole, end of hole,  size of hole (+1) */
860 		0x565234afb000, 0x565234afc000, 0x1000,
861 		0x565234afe000, 0x565235def000, 0x12F1000,
862 		0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
863 	};
864 
865 	/*
866 	 * req_range consists of 4 values.
867 	 * 1. min index
868 	 * 2. max index
869 	 * 3. size
870 	 * 4. number that should be returned.
871 	 * 5. return value
872 	 */
873 	static const unsigned long req_range[] = {
874 		0x565234af9000, /* Min */
875 		0x7fff58791000, /* Max */
876 		0x1000,         /* Size */
877 		0x565234afb000, /* First hole in our data of size 1000. */
878 		0,              /* Return value success. */
879 
880 		0x0,            /* Min */
881 		0x7fff58791000, /* Max */
882 		0x1F00,         /* Size */
883 		0x0,            /* First hole in our data of size 2000. */
884 		0,              /* Return value success. */
885 
886 		/* Test ascend. */
887 		34148797436 << 12, /* Min */
888 		0x7fff587AF000,    /* Max */
889 		0x3000,         /* Size */
890 		34148798629 << 12,             /* Expected location */
891 		0,              /* Return value success. */
892 
893 		/* Test failing. */
894 		34148798623 << 12,  /* Min */
895 		34148798683 << 12,  /* Max */
896 		0x15000,            /* Size */
897 		0,             /* Expected location */
898 		-EBUSY,              /* Return value failed. */
899 
900 		/* Test filling entire gap. */
901 		34148798623 << 12,  /* Min */
902 		0x7fff587AF000,    /* Max */
903 		0x10000,           /* Size */
904 		34148798632 << 12,             /* Expected location */
905 		0,              /* Return value success. */
906 
907 		/* Test walking off the end of root. */
908 		0,                  /* Min */
909 		-1,                 /* Max */
910 		-1,                 /* Size */
911 		0,                  /* Expected location */
912 		-EBUSY,             /* Return value failure. */
913 
914 		/* Test looking for too large a hole across entire range. */
915 		0,                  /* Min */
916 		-1,                 /* Max */
917 		4503599618982063UL << 12,  /* Size */
918 		34359052178 << 12,  /* Expected location */
919 		-EBUSY,             /* Return failure. */
920 
921 		/* Test a single entry */
922 		34148798648 << 12,		/* Min */
923 		34148798648 << 12,		/* Max */
924 		4096,			/* Size of 1 */
925 		34148798648 << 12,	/* Location is the same as min/max */
926 		0,			/* Success */
927 	};
928 	int i, range_count = ARRAY_SIZE(range);
929 	int req_range_count = ARRAY_SIZE(req_range);
930 	unsigned long min = 0x565234af2000;
931 	MA_STATE(mas, mt, 0, 0);
932 
933 	mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
934 			  GFP_KERNEL);
935 	for (i = 0; i < range_count; i += 2) {
936 #define DEBUG_ALLOC_RANGE 0
937 #if DEBUG_ALLOC_RANGE
938 		pr_debug("\tInsert %lu-%lu\n", range[i] >> 12,
939 			 (range[i + 1] >> 12) - 1);
940 		mt_dump(mt, mt_dump_hex);
941 #endif
942 		check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
943 				xa_mk_value(range[i] >> 12), 0);
944 		mt_validate(mt);
945 	}
946 
947 
948 
949 	mas_lock(&mas);
950 	for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
951 
952 #if DEBUG_ALLOC_RANGE
953 		pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
954 			holes[i+1] >> 12, holes[i+2] >> 12,
955 			min, holes[i+1]);
956 #endif
957 		MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
958 					holes[i+1] >> 12,
959 					holes[i+2] >> 12));
960 		MT_BUG_ON(mt, mas.index != holes[i] >> 12);
961 		min = holes[i+1];
962 		mas_reset(&mas);
963 	}
964 	mas_unlock(&mas);
965 	for (i = 0; i < req_range_count; i += 5) {
966 #if DEBUG_ALLOC_RANGE
967 		pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
968 			 i/5, req_range[i]   >> 12, req_range[i + 1]   >> 12,
969 			 req_range[i + 2]   >> 12, req_range[i + 3]   >> 12,
970 			 req_range[i], req_range[i+1]);
971 #endif
972 		check_mtree_alloc_range(mt,
973 				req_range[i]   >> 12, /* start */
974 				req_range[i+1] >> 12, /* end */
975 				req_range[i+2] >> 12, /* size */
976 				req_range[i+3] >> 12, /* expected address */
977 				req_range[i+4],       /* expected return */
978 				xa_mk_value(req_range[i] >> 12)); /* pointer */
979 		mt_validate(mt);
980 #if DEBUG_ALLOC_RANGE
981 		mt_dump(mt, mt_dump_hex);
982 #endif
983 	}
984 
985 	mtree_destroy(mt);
986 }
987 #endif
988 
check_ranges(struct maple_tree * mt)989 static noinline void __init check_ranges(struct maple_tree *mt)
990 {
991 	int i, val, val2;
992 	static const unsigned long r[] = {
993 		10, 15,
994 		20, 25,
995 		17, 22, /* Overlaps previous range. */
996 		9, 1000, /* Huge. */
997 		100, 200,
998 		45, 168,
999 		118, 128,
1000 			};
1001 
1002 	MT_BUG_ON(mt, !mtree_empty(mt));
1003 	check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
1004 	check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
1005 	check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
1006 	MT_BUG_ON(mt, !mt_height(mt));
1007 	/* Store */
1008 	check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
1009 	check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
1010 	check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
1011 	MT_BUG_ON(mt, !mt_height(mt));
1012 	mtree_destroy(mt);
1013 	MT_BUG_ON(mt, mt_height(mt));
1014 
1015 	check_seq(mt, 50, false);
1016 	mt_set_non_kernel(4);
1017 	check_store_range(mt, 5, 47,  xa_mk_value(47), 0);
1018 	MT_BUG_ON(mt, !mt_height(mt));
1019 	mtree_destroy(mt);
1020 
1021 	/* Create tree of 1-100 */
1022 	check_seq(mt, 100, false);
1023 	/* Store 45-168 */
1024 	mt_set_non_kernel(10);
1025 	check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1026 	MT_BUG_ON(mt, !mt_height(mt));
1027 	mtree_destroy(mt);
1028 
1029 	/* Create tree of 1-200 */
1030 	check_seq(mt, 200, false);
1031 	/* Store 45-168 */
1032 	check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1033 	MT_BUG_ON(mt, !mt_height(mt));
1034 	mtree_destroy(mt);
1035 
1036 	check_seq(mt, 30, false);
1037 	check_store_range(mt, 6, 18, xa_mk_value(6), 0);
1038 	MT_BUG_ON(mt, !mt_height(mt));
1039 	mtree_destroy(mt);
1040 
1041 	/* Overwrite across multiple levels. */
1042 	/* Create tree of 1-400 */
1043 	check_seq(mt, 400, false);
1044 	mt_set_non_kernel(50);
1045 	/* Store 118-128 */
1046 	check_store_range(mt, r[12], r[13], xa_mk_value(r[12]), 0);
1047 	mt_set_non_kernel(50);
1048 	mtree_test_erase(mt, 140);
1049 	mtree_test_erase(mt, 141);
1050 	mtree_test_erase(mt, 142);
1051 	mtree_test_erase(mt, 143);
1052 	mtree_test_erase(mt, 130);
1053 	mtree_test_erase(mt, 131);
1054 	mtree_test_erase(mt, 132);
1055 	mtree_test_erase(mt, 133);
1056 	mtree_test_erase(mt, 134);
1057 	mtree_test_erase(mt, 135);
1058 	check_load(mt, r[12], xa_mk_value(r[12]));
1059 	check_load(mt, r[13], xa_mk_value(r[12]));
1060 	check_load(mt, r[13] - 1, xa_mk_value(r[12]));
1061 	check_load(mt, r[13] + 1, xa_mk_value(r[13] + 1));
1062 	check_load(mt, 135, NULL);
1063 	check_load(mt, 140, NULL);
1064 	mt_set_non_kernel(0);
1065 	MT_BUG_ON(mt, !mt_height(mt));
1066 	mtree_destroy(mt);
1067 
1068 
1069 
1070 	/* Overwrite multiple levels at the end of the tree (slot 7) */
1071 	mt_set_non_kernel(50);
1072 	check_seq(mt, 400, false);
1073 	check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1074 	check_store_range(mt, 347, 352, xa_mk_value(347), 0);
1075 
1076 	check_load(mt, 346, xa_mk_value(346));
1077 	for (i = 347; i <= 352; i++)
1078 		check_load(mt, i, xa_mk_value(347));
1079 	for (i = 353; i <= 361; i++)
1080 		check_load(mt, i, xa_mk_value(353));
1081 	check_load(mt, 362, xa_mk_value(362));
1082 	mt_set_non_kernel(0);
1083 	MT_BUG_ON(mt, !mt_height(mt));
1084 	mtree_destroy(mt);
1085 
1086 	mt_set_non_kernel(50);
1087 	check_seq(mt, 400, false);
1088 	check_store_range(mt, 352, 364, NULL, 0);
1089 	check_store_range(mt, 351, 363, xa_mk_value(352), 0);
1090 	check_load(mt, 350, xa_mk_value(350));
1091 	check_load(mt, 351, xa_mk_value(352));
1092 	for (i = 352; i <= 363; i++)
1093 		check_load(mt, i, xa_mk_value(352));
1094 	check_load(mt, 364, NULL);
1095 	check_load(mt, 365, xa_mk_value(365));
1096 	mt_set_non_kernel(0);
1097 	MT_BUG_ON(mt, !mt_height(mt));
1098 	mtree_destroy(mt);
1099 
1100 	mt_set_non_kernel(5);
1101 	check_seq(mt, 400, false);
1102 	check_store_range(mt, 352, 364, NULL, 0);
1103 	check_store_range(mt, 351, 364, xa_mk_value(352), 0);
1104 	check_load(mt, 350, xa_mk_value(350));
1105 	check_load(mt, 351, xa_mk_value(352));
1106 	for (i = 352; i <= 364; i++)
1107 		check_load(mt, i, xa_mk_value(352));
1108 	check_load(mt, 365, xa_mk_value(365));
1109 	mt_set_non_kernel(0);
1110 	MT_BUG_ON(mt, !mt_height(mt));
1111 	mtree_destroy(mt);
1112 
1113 
1114 	mt_set_non_kernel(50);
1115 	check_seq(mt, 400, false);
1116 	check_store_range(mt, 362, 367, xa_mk_value(362), 0);
1117 	check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1118 	mt_set_non_kernel(0);
1119 	mt_validate(mt);
1120 	MT_BUG_ON(mt, !mt_height(mt));
1121 	mtree_destroy(mt);
1122 	/*
1123 	 * Interesting cases:
1124 	 * 1. Overwrite the end of a node and end in the first entry of the next
1125 	 * node.
1126 	 * 2. Split a single range
1127 	 * 3. Overwrite the start of a range
1128 	 * 4. Overwrite the end of a range
1129 	 * 5. Overwrite the entire range
1130 	 * 6. Overwrite a range that causes multiple parent nodes to be
1131 	 * combined
1132 	 * 7. Overwrite a range that causes multiple parent nodes and part of
1133 	 * root to be combined
1134 	 * 8. Overwrite the whole tree
1135 	 * 9. Try to overwrite the zero entry of an alloc tree.
1136 	 * 10. Write a range larger than a nodes current pivot
1137 	 */
1138 
1139 	mt_set_non_kernel(50);
1140 	for (i = 0; i <= 500; i++) {
1141 		val = i*5;
1142 		val2 = (i+1)*5;
1143 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1144 	}
1145 	check_store_range(mt, 2400, 2400, xa_mk_value(2400), 0);
1146 	check_store_range(mt, 2411, 2411, xa_mk_value(2411), 0);
1147 	check_store_range(mt, 2412, 2412, xa_mk_value(2412), 0);
1148 	check_store_range(mt, 2396, 2400, xa_mk_value(4052020), 0);
1149 	check_store_range(mt, 2402, 2402, xa_mk_value(2402), 0);
1150 	mtree_destroy(mt);
1151 	mt_set_non_kernel(0);
1152 
1153 	mt_set_non_kernel(50);
1154 	for (i = 0; i <= 500; i++) {
1155 		val = i*5;
1156 		val2 = (i+1)*5;
1157 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1158 	}
1159 	check_store_range(mt, 2422, 2422, xa_mk_value(2422), 0);
1160 	check_store_range(mt, 2424, 2424, xa_mk_value(2424), 0);
1161 	check_store_range(mt, 2425, 2425, xa_mk_value(2), 0);
1162 	check_store_range(mt, 2460, 2470, NULL, 0);
1163 	check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
1164 	check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
1165 	mt_set_non_kernel(0);
1166 	MT_BUG_ON(mt, !mt_height(mt));
1167 	mtree_destroy(mt);
1168 
1169 	/* Check in-place modifications */
1170 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1171 	/* Append to the start of last range */
1172 	mt_set_non_kernel(50);
1173 	for (i = 0; i <= 500; i++) {
1174 		val = i * 5 + 1;
1175 		val2 = val + 4;
1176 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1177 	}
1178 
1179 	/* Append to the last range without touching any boundaries */
1180 	for (i = 0; i < 10; i++) {
1181 		val = val2 + 5;
1182 		val2 = val + 4;
1183 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1184 	}
1185 
1186 	/* Append to the end of last range */
1187 	val = val2;
1188 	for (i = 0; i < 10; i++) {
1189 		val += 5;
1190 		MT_BUG_ON(mt, mtree_test_store_range(mt, val, ULONG_MAX,
1191 						     xa_mk_value(val)) != 0);
1192 	}
1193 
1194 	/* Overwriting the range and over a part of the next range */
1195 	for (i = 10; i < 30; i += 2) {
1196 		val = i * 5 + 1;
1197 		val2 = val + 5;
1198 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1199 	}
1200 
1201 	/* Overwriting a part of the range and over the next range */
1202 	for (i = 50; i < 70; i += 2) {
1203 		val2 = i * 5;
1204 		val = val2 - 5;
1205 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1206 	}
1207 
1208 	/*
1209 	 * Expand the range, only partially overwriting the previous and
1210 	 * next ranges
1211 	 */
1212 	for (i = 100; i < 130; i += 3) {
1213 		val = i * 5 - 5;
1214 		val2 = i * 5 + 1;
1215 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1216 	}
1217 
1218 	/*
1219 	 * Expand the range, only partially overwriting the previous and
1220 	 * next ranges, in RCU mode
1221 	 */
1222 	mt_set_in_rcu(mt);
1223 	for (i = 150; i < 180; i += 3) {
1224 		val = i * 5 - 5;
1225 		val2 = i * 5 + 1;
1226 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1227 	}
1228 
1229 	MT_BUG_ON(mt, !mt_height(mt));
1230 	mt_validate(mt);
1231 	mt_set_non_kernel(0);
1232 	mtree_destroy(mt);
1233 
1234 	/* Test rebalance gaps */
1235 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1236 	mt_set_non_kernel(50);
1237 	for (i = 0; i <= 50; i++) {
1238 		val = i*10;
1239 		val2 = (i+1)*10;
1240 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1241 	}
1242 	check_store_range(mt, 161, 161, xa_mk_value(161), 0);
1243 	check_store_range(mt, 162, 162, xa_mk_value(162), 0);
1244 	check_store_range(mt, 163, 163, xa_mk_value(163), 0);
1245 	check_store_range(mt, 240, 249, NULL, 0);
1246 	mtree_erase(mt, 200);
1247 	mtree_erase(mt, 210);
1248 	mtree_erase(mt, 220);
1249 	mtree_erase(mt, 230);
1250 	mt_set_non_kernel(0);
1251 	MT_BUG_ON(mt, !mt_height(mt));
1252 	mtree_destroy(mt);
1253 
1254 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1255 	for (i = 0; i <= 500; i++) {
1256 		val = i*10;
1257 		val2 = (i+1)*10;
1258 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1259 	}
1260 	check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
1261 	mt_validate(mt);
1262 	MT_BUG_ON(mt, !mt_height(mt));
1263 	mtree_destroy(mt);
1264 
1265 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1266 	for (i = 0; i <= 500; i++) {
1267 		val = i*10;
1268 		val2 = (i+1)*10;
1269 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1270 	}
1271 	check_store_range(mt, 4811, 4811, xa_mk_value(4811), 0);
1272 	check_store_range(mt, 4812, 4812, xa_mk_value(4812), 0);
1273 	check_store_range(mt, 4861, 4861, xa_mk_value(4861), 0);
1274 	check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
1275 	check_store_range(mt, 4842, 4849, NULL, 0);
1276 	mt_validate(mt);
1277 	MT_BUG_ON(mt, !mt_height(mt));
1278 	mtree_destroy(mt);
1279 
1280 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1281 	for (i = 0; i <= 1300; i++) {
1282 		val = i*10;
1283 		val2 = (i+1)*10;
1284 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1285 		MT_BUG_ON(mt, mt_height(mt) >= 4);
1286 	}
1287 	/*  Cause a 3 child split all the way up the tree. */
1288 	for (i = 5; i < 215; i += 10)
1289 		check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0);
1290 	for (i = 5; i < 65; i += 10)
1291 		check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
1292 
1293 	MT_BUG_ON(mt, mt_height(mt) >= 4);
1294 	for (i = 5; i < 45; i += 10)
1295 		check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
1296 	if (!MAPLE_32BIT)
1297 		MT_BUG_ON(mt, mt_height(mt) < 4);
1298 	mtree_destroy(mt);
1299 
1300 
1301 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1302 	for (i = 0; i <= 1200; i++) {
1303 		val = i*10;
1304 		val2 = (i+1)*10;
1305 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1306 		MT_BUG_ON(mt, mt_height(mt) >= 4);
1307 	}
1308 	/* Fill parents and leaves before split. */
1309 	for (i = 5; i < 455; i += 10)
1310 		check_store_range(mt, 7800 + i, 7800 + i + 1, NULL, 0);
1311 
1312 	for (i = 1; i < 16; i++)
1313 		check_store_range(mt, 8185 + i, 8185 + i + 1,
1314 				  xa_mk_value(8185+i), 0);
1315 	MT_BUG_ON(mt, mt_height(mt) >= 4);
1316 	/* triple split across multiple levels. */
1317 	check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0);
1318 	if (!MAPLE_32BIT)
1319 		MT_BUG_ON(mt, mt_height(mt) != 4);
1320 }
1321 
check_next_entry(struct maple_tree * mt)1322 static noinline void __init check_next_entry(struct maple_tree *mt)
1323 {
1324 	void *entry = NULL;
1325 	unsigned long limit = 30, i = 0;
1326 	MA_STATE(mas, mt, i, i);
1327 
1328 	MT_BUG_ON(mt, !mtree_empty(mt));
1329 
1330 	check_seq(mt, limit, false);
1331 	rcu_read_lock();
1332 
1333 	/* Check the first one and get ma_state in the correct state. */
1334 	MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++));
1335 	for ( ; i <= limit + 1; i++) {
1336 		entry = mas_next(&mas, limit);
1337 		if (i > limit)
1338 			MT_BUG_ON(mt, entry != NULL);
1339 		else
1340 			MT_BUG_ON(mt, xa_mk_value(i) != entry);
1341 	}
1342 	rcu_read_unlock();
1343 	mtree_destroy(mt);
1344 }
1345 
check_prev_entry(struct maple_tree * mt)1346 static noinline void __init check_prev_entry(struct maple_tree *mt)
1347 {
1348 	unsigned long index = 16;
1349 	void *value;
1350 	int i;
1351 
1352 	MA_STATE(mas, mt, index, index);
1353 
1354 	MT_BUG_ON(mt, !mtree_empty(mt));
1355 	check_seq(mt, 30, false);
1356 
1357 	rcu_read_lock();
1358 	value = mas_find(&mas, ULONG_MAX);
1359 	MT_BUG_ON(mt, value != xa_mk_value(index));
1360 	value = mas_prev(&mas, 0);
1361 	MT_BUG_ON(mt, value != xa_mk_value(index - 1));
1362 	rcu_read_unlock();
1363 	mtree_destroy(mt);
1364 
1365 	/* Check limits on prev */
1366 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1367 	mas_lock(&mas);
1368 	for (i = 0; i <= index; i++) {
1369 		mas_set_range(&mas, i*10, i*10+5);
1370 		mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1371 	}
1372 
1373 	mas_set(&mas, 20);
1374 	value = mas_walk(&mas);
1375 	MT_BUG_ON(mt, value != xa_mk_value(2));
1376 
1377 	value = mas_prev(&mas, 19);
1378 	MT_BUG_ON(mt, value != NULL);
1379 
1380 	mas_set(&mas, 80);
1381 	value = mas_walk(&mas);
1382 	MT_BUG_ON(mt, value != xa_mk_value(8));
1383 
1384 	value = mas_prev(&mas, 76);
1385 	MT_BUG_ON(mt, value != NULL);
1386 
1387 	mas_unlock(&mas);
1388 }
1389 
check_root_expand(struct maple_tree * mt)1390 static noinline void __init check_root_expand(struct maple_tree *mt)
1391 {
1392 	MA_STATE(mas, mt, 0, 0);
1393 	void *ptr;
1394 
1395 
1396 	mas_lock(&mas);
1397 	mas_set(&mas, 3);
1398 	ptr = mas_walk(&mas);
1399 	MT_BUG_ON(mt, mas.index != 0);
1400 	MT_BUG_ON(mt, ptr != NULL);
1401 	MT_BUG_ON(mt, mas.index != 0);
1402 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
1403 
1404 	ptr = &check_prev_entry;
1405 	mas_set(&mas, 1);
1406 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1407 
1408 	mas_set(&mas, 0);
1409 	ptr = mas_walk(&mas);
1410 	MT_BUG_ON(mt, ptr != NULL);
1411 
1412 	mas_set(&mas, 1);
1413 	ptr = mas_walk(&mas);
1414 	MT_BUG_ON(mt, ptr != &check_prev_entry);
1415 
1416 	mas_set(&mas, 2);
1417 	ptr = mas_walk(&mas);
1418 	MT_BUG_ON(mt, ptr != NULL);
1419 	mas_unlock(&mas);
1420 	mtree_destroy(mt);
1421 
1422 
1423 	mt_init_flags(mt, 0);
1424 	mas_lock(&mas);
1425 
1426 	mas_set(&mas, 0);
1427 	ptr = &check_prev_entry;
1428 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1429 
1430 	mas_set(&mas, 5);
1431 	ptr = mas_walk(&mas);
1432 	MT_BUG_ON(mt, ptr != NULL);
1433 	MT_BUG_ON(mt, mas.index != 1);
1434 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
1435 
1436 	mas_set_range(&mas, 0, 100);
1437 	ptr = mas_walk(&mas);
1438 	MT_BUG_ON(mt, ptr != &check_prev_entry);
1439 	MT_BUG_ON(mt, mas.last != 0);
1440 	mas_unlock(&mas);
1441 	mtree_destroy(mt);
1442 
1443 	mt_init_flags(mt, 0);
1444 	mas_lock(&mas);
1445 
1446 	mas_set(&mas, 0);
1447 	ptr = (void *)((unsigned long) check_prev_entry | 1UL);
1448 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1449 	ptr = mas_next(&mas, ULONG_MAX);
1450 	MT_BUG_ON(mt, ptr != NULL);
1451 	MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1452 
1453 	mas_set(&mas, 1);
1454 	ptr = mas_prev(&mas, 0);
1455 	MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1456 	MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 1UL));
1457 
1458 	mas_unlock(&mas);
1459 
1460 	mtree_destroy(mt);
1461 
1462 	mt_init_flags(mt, 0);
1463 	mas_lock(&mas);
1464 	mas_set(&mas, 0);
1465 	ptr = (void *)((unsigned long) check_prev_entry | 2UL);
1466 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1467 	ptr = mas_next(&mas, ULONG_MAX);
1468 	MT_BUG_ON(mt, ptr != NULL);
1469 	MT_BUG_ON(mt, (mas.index != ULONG_MAX) && (mas.last != ULONG_MAX));
1470 
1471 	mas_set(&mas, 1);
1472 	ptr = mas_prev(&mas, 0);
1473 	MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1474 	MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 2UL));
1475 
1476 
1477 	mas_unlock(&mas);
1478 }
1479 
check_gap_combining(struct maple_tree * mt)1480 static noinline void __init check_gap_combining(struct maple_tree *mt)
1481 {
1482 	struct maple_enode *mn1, *mn2;
1483 	void *entry;
1484 	unsigned long singletons = 100;
1485 	static const unsigned long *seq100;
1486 	static const unsigned long seq100_64[] = {
1487 		/* 0-5 */
1488 		74, 75, 76,
1489 		50, 100, 2,
1490 
1491 		/* 6-12 */
1492 		44, 45, 46, 43,
1493 		20, 50, 3,
1494 
1495 		/* 13-20*/
1496 		80, 81, 82,
1497 		76, 2, 79, 85, 4,
1498 	};
1499 
1500 	static const unsigned long seq100_32[] = {
1501 		/* 0-5 */
1502 		61, 62, 63,
1503 		50, 100, 2,
1504 
1505 		/* 6-12 */
1506 		31, 32, 33, 30,
1507 		20, 50, 3,
1508 
1509 		/* 13-20*/
1510 		80, 81, 82,
1511 		76, 2, 79, 85, 4,
1512 	};
1513 
1514 	static const unsigned long seq2000[] = {
1515 		1152, 1151,
1516 		1100, 1200, 2,
1517 	};
1518 	static const unsigned long seq400[] = {
1519 		286, 318,
1520 		256, 260, 266, 270, 275, 280, 290, 398,
1521 		286, 310,
1522 	};
1523 
1524 	unsigned long index;
1525 
1526 	MA_STATE(mas, mt, 0, 0);
1527 
1528 	if (MAPLE_32BIT)
1529 		seq100 = seq100_32;
1530 	else
1531 		seq100 = seq100_64;
1532 
1533 	index = seq100[0];
1534 	mas_set(&mas, index);
1535 	MT_BUG_ON(mt, !mtree_empty(mt));
1536 	check_seq(mt, singletons, false); /* create 100 singletons. */
1537 
1538 	mt_set_non_kernel(1);
1539 	mtree_test_erase(mt, seq100[2]);
1540 	check_load(mt, seq100[2], NULL);
1541 	mtree_test_erase(mt, seq100[1]);
1542 	check_load(mt, seq100[1], NULL);
1543 
1544 	rcu_read_lock();
1545 	entry = mas_find(&mas, ULONG_MAX);
1546 	MT_BUG_ON(mt, entry != xa_mk_value(index));
1547 	mn1 = mas.node;
1548 	mas_next(&mas, ULONG_MAX);
1549 	entry = mas_next(&mas, ULONG_MAX);
1550 	MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1551 	mn2 = mas.node;
1552 	MT_BUG_ON(mt, mn1 == mn2); /* test the test. */
1553 
1554 	/*
1555 	 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1556 	 * seq100[4]. Search for the gap.
1557 	 */
1558 	mt_set_non_kernel(1);
1559 	mas_reset(&mas);
1560 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
1561 					     seq100[5]));
1562 	MT_BUG_ON(mt, mas.index != index + 1);
1563 	rcu_read_unlock();
1564 
1565 	mtree_test_erase(mt, seq100[6]);
1566 	check_load(mt, seq100[6], NULL);
1567 	mtree_test_erase(mt, seq100[7]);
1568 	check_load(mt, seq100[7], NULL);
1569 	mtree_test_erase(mt, seq100[8]);
1570 	index = seq100[9];
1571 
1572 	rcu_read_lock();
1573 	mas.index = index;
1574 	mas.last = index;
1575 	mas_reset(&mas);
1576 	entry = mas_find(&mas, ULONG_MAX);
1577 	MT_BUG_ON(mt, entry != xa_mk_value(index));
1578 	mn1 = mas.node;
1579 	entry = mas_next(&mas, ULONG_MAX);
1580 	MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1581 	mas_next(&mas, ULONG_MAX); /* go to the next entry. */
1582 	mn2 = mas.node;
1583 	MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
1584 
1585 	/*
1586 	 * At this point, there is a gap of 3 at seq100[6].  Find it by
1587 	 * searching 20 - 50 for size 3.
1588 	 */
1589 	mas_reset(&mas);
1590 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
1591 					     seq100[12]));
1592 	MT_BUG_ON(mt, mas.index != seq100[6]);
1593 	rcu_read_unlock();
1594 
1595 	mt_set_non_kernel(1);
1596 	mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
1597 	check_load(mt, seq100[13], NULL);
1598 	check_load(mt, seq100[14], xa_mk_value(seq100[14]));
1599 	mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
1600 	check_load(mt, seq100[13], NULL);
1601 	check_load(mt, seq100[14], NULL);
1602 
1603 	mas_reset(&mas);
1604 	rcu_read_lock();
1605 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
1606 					     seq100[17]));
1607 	MT_BUG_ON(mt, mas.index != seq100[13]);
1608 	mt_validate(mt);
1609 	rcu_read_unlock();
1610 
1611 	/*
1612 	 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1613 	 * gap.
1614 	 */
1615 	mt_set_non_kernel(2);
1616 	mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
1617 	mtree_test_erase(mt, seq100[15]);
1618 	mas_reset(&mas);
1619 	rcu_read_lock();
1620 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
1621 					     seq100[20]));
1622 	rcu_read_unlock();
1623 	MT_BUG_ON(mt, mas.index != seq100[18]);
1624 	mt_validate(mt);
1625 	mtree_destroy(mt);
1626 
1627 	/* seq 2000 tests are for multi-level tree gaps */
1628 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1629 	check_seq(mt, 2000, false);
1630 	mt_set_non_kernel(1);
1631 	mtree_test_erase(mt, seq2000[0]);
1632 	mtree_test_erase(mt, seq2000[1]);
1633 
1634 	mt_set_non_kernel(2);
1635 	mas_reset(&mas);
1636 	rcu_read_lock();
1637 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
1638 					     seq2000[4]));
1639 	MT_BUG_ON(mt, mas.index != seq2000[1]);
1640 	rcu_read_unlock();
1641 	mt_validate(mt);
1642 	mtree_destroy(mt);
1643 
1644 	/* seq 400 tests rebalancing over two levels. */
1645 	mt_set_non_kernel(99);
1646 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1647 	check_seq(mt, 400, false);
1648 	mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
1649 	mt_set_non_kernel(0);
1650 	mtree_destroy(mt);
1651 
1652 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1653 	check_seq(mt, 400, false);
1654 	mt_set_non_kernel(50);
1655 	mtree_test_store_range(mt, seq400[2], seq400[9],
1656 			       xa_mk_value(seq400[2]));
1657 	mtree_test_store_range(mt, seq400[3], seq400[9],
1658 			       xa_mk_value(seq400[3]));
1659 	mtree_test_store_range(mt, seq400[4], seq400[9],
1660 			       xa_mk_value(seq400[4]));
1661 	mtree_test_store_range(mt, seq400[5], seq400[9],
1662 			       xa_mk_value(seq400[5]));
1663 	mtree_test_store_range(mt, seq400[0], seq400[9],
1664 			       xa_mk_value(seq400[0]));
1665 	mtree_test_store_range(mt, seq400[6], seq400[9],
1666 			       xa_mk_value(seq400[6]));
1667 	mtree_test_store_range(mt, seq400[7], seq400[9],
1668 			       xa_mk_value(seq400[7]));
1669 	mtree_test_store_range(mt, seq400[8], seq400[9],
1670 			       xa_mk_value(seq400[8]));
1671 	mtree_test_store_range(mt, seq400[10], seq400[11],
1672 			       xa_mk_value(seq400[10]));
1673 	mt_validate(mt);
1674 	mt_set_non_kernel(0);
1675 	mtree_destroy(mt);
1676 }
check_node_overwrite(struct maple_tree * mt)1677 static noinline void __init check_node_overwrite(struct maple_tree *mt)
1678 {
1679 	int i, max = 4000;
1680 
1681 	for (i = 0; i < max; i++)
1682 		mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100));
1683 
1684 	mtree_test_store_range(mt, 319951, 367950, NULL);
1685 	/*mt_dump(mt, mt_dump_dec); */
1686 	mt_validate(mt);
1687 }
1688 
1689 #if defined(BENCH_SLOT_STORE)
bench_slot_store(struct maple_tree * mt)1690 static noinline void __init bench_slot_store(struct maple_tree *mt)
1691 {
1692 	int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
1693 
1694 	for (i = 0; i < max; i += 10)
1695 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1696 
1697 	for (i = 0; i < count; i++) {
1698 		mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
1699 		mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
1700 				  GFP_KERNEL);
1701 	}
1702 }
1703 #endif
1704 
1705 #if defined(BENCH_NODE_STORE)
bench_node_store(struct maple_tree * mt)1706 static noinline void __init bench_node_store(struct maple_tree *mt)
1707 {
1708 	int i, overwrite = 76, max = 240, count = 20000000;
1709 
1710 	for (i = 0; i < max; i += 10)
1711 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1712 
1713 	for (i = 0; i < count; i++) {
1714 		mtree_store_range(mt, overwrite,  overwrite + 15,
1715 				  xa_mk_value(overwrite), GFP_KERNEL);
1716 
1717 		overwrite += 5;
1718 		if (overwrite >= 135)
1719 			overwrite = 76;
1720 	}
1721 }
1722 #endif
1723 
1724 #if defined(BENCH_AWALK)
bench_awalk(struct maple_tree * mt)1725 static noinline void __init bench_awalk(struct maple_tree *mt)
1726 {
1727 	int i, max = 2500, count = 50000000;
1728 	MA_STATE(mas, mt, 1470, 1470);
1729 
1730 	for (i = 0; i < max; i += 10)
1731 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1732 
1733 	mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
1734 
1735 	for (i = 0; i < count; i++) {
1736 		mas_empty_area_rev(&mas, 0, 2000, 10);
1737 		mas_reset(&mas);
1738 	}
1739 }
1740 #endif
1741 #if defined(BENCH_WALK)
bench_walk(struct maple_tree * mt)1742 static noinline void __init bench_walk(struct maple_tree *mt)
1743 {
1744 	int i, max = 2500, count = 550000000;
1745 	MA_STATE(mas, mt, 1470, 1470);
1746 
1747 	for (i = 0; i < max; i += 10)
1748 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1749 
1750 	for (i = 0; i < count; i++) {
1751 		mas_walk(&mas);
1752 		mas_reset(&mas);
1753 	}
1754 
1755 }
1756 #endif
1757 
1758 #if defined(BENCH_LOAD)
bench_load(struct maple_tree * mt)1759 static noinline void __init bench_load(struct maple_tree *mt)
1760 {
1761 	int i, max = 2500, count = 550000000;
1762 
1763 	for (i = 0; i < max; i += 10)
1764 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1765 
1766 	for (i = 0; i < count; i++)
1767 		mtree_load(mt, 1470);
1768 }
1769 #endif
1770 
1771 #if defined(BENCH_MT_FOR_EACH)
bench_mt_for_each(struct maple_tree * mt)1772 static noinline void __init bench_mt_for_each(struct maple_tree *mt)
1773 {
1774 	int i, count = 1000000;
1775 	unsigned long max = 2500, index = 0;
1776 	void *entry;
1777 
1778 	for (i = 0; i < max; i += 5)
1779 		mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
1780 
1781 	for (i = 0; i < count; i++) {
1782 		unsigned long j = 0;
1783 
1784 		mt_for_each(mt, entry, index, max) {
1785 			MT_BUG_ON(mt, entry != xa_mk_value(j));
1786 			j += 5;
1787 		}
1788 
1789 		index = 0;
1790 	}
1791 
1792 }
1793 #endif
1794 
1795 #if defined(BENCH_MAS_FOR_EACH)
bench_mas_for_each(struct maple_tree * mt)1796 static noinline void __init bench_mas_for_each(struct maple_tree *mt)
1797 {
1798 	int i, count = 1000000;
1799 	unsigned long max = 2500;
1800 	void *entry;
1801 	MA_STATE(mas, mt, 0, 0);
1802 
1803 	for (i = 0; i < max; i += 5) {
1804 		int gap = 4;
1805 
1806 		if (i % 30 == 0)
1807 			gap = 3;
1808 		mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1809 	}
1810 
1811 	rcu_read_lock();
1812 	for (i = 0; i < count; i++) {
1813 		unsigned long j = 0;
1814 
1815 		mas_for_each(&mas, entry, max) {
1816 			MT_BUG_ON(mt, entry != xa_mk_value(j));
1817 			j += 5;
1818 		}
1819 		mas_set(&mas, 0);
1820 	}
1821 	rcu_read_unlock();
1822 
1823 }
1824 #endif
1825 #if defined(BENCH_MAS_PREV)
bench_mas_prev(struct maple_tree * mt)1826 static noinline void __init bench_mas_prev(struct maple_tree *mt)
1827 {
1828 	int i, count = 1000000;
1829 	unsigned long max = 2500;
1830 	void *entry;
1831 	MA_STATE(mas, mt, 0, 0);
1832 
1833 	for (i = 0; i < max; i += 5) {
1834 		int gap = 4;
1835 
1836 		if (i % 30 == 0)
1837 			gap = 3;
1838 		mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1839 	}
1840 
1841 	rcu_read_lock();
1842 	for (i = 0; i < count; i++) {
1843 		unsigned long j = 2495;
1844 
1845 		mas_set(&mas, ULONG_MAX);
1846 		while ((entry = mas_prev(&mas, 0)) != NULL) {
1847 			MT_BUG_ON(mt, entry != xa_mk_value(j));
1848 			j -= 5;
1849 		}
1850 	}
1851 	rcu_read_unlock();
1852 
1853 }
1854 #endif
1855 /* check_forking - simulate the kernel forking sequence with the tree. */
check_forking(void)1856 static noinline void __init check_forking(void)
1857 {
1858 	struct maple_tree mt, newmt;
1859 	int i, nr_entries = 134, ret;
1860 	void *val;
1861 	MA_STATE(mas, &mt, 0, 0);
1862 	MA_STATE(newmas, &newmt, 0, 0);
1863 	struct rw_semaphore mt_lock, newmt_lock;
1864 
1865 	init_rwsem(&mt_lock);
1866 	init_rwsem(&newmt_lock);
1867 
1868 	mt_init_flags(&mt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
1869 	mt_set_external_lock(&mt, &mt_lock);
1870 
1871 	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
1872 	mt_set_external_lock(&newmt, &newmt_lock);
1873 
1874 	down_write(&mt_lock);
1875 	for (i = 0; i <= nr_entries; i++) {
1876 		mas_set_range(&mas, i*10, i*10 + 5);
1877 		mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1878 	}
1879 
1880 	down_write_nested(&newmt_lock, SINGLE_DEPTH_NESTING);
1881 	ret = __mt_dup(&mt, &newmt, GFP_KERNEL);
1882 	if (ret) {
1883 		pr_err("OOM!");
1884 		BUG_ON(1);
1885 	}
1886 
1887 	mas_set(&newmas, 0);
1888 	mas_for_each(&newmas, val, ULONG_MAX)
1889 		mas_store(&newmas, val);
1890 
1891 	mas_destroy(&newmas);
1892 	mas_destroy(&mas);
1893 	mt_validate(&newmt);
1894 	__mt_destroy(&newmt);
1895 	__mt_destroy(&mt);
1896 	up_write(&newmt_lock);
1897 	up_write(&mt_lock);
1898 }
1899 
check_iteration(struct maple_tree * mt)1900 static noinline void __init check_iteration(struct maple_tree *mt)
1901 {
1902 	int i, nr_entries = 125;
1903 	void *val;
1904 	MA_STATE(mas, mt, 0, 0);
1905 
1906 	for (i = 0; i <= nr_entries; i++)
1907 		mtree_store_range(mt, i * 10, i * 10 + 9,
1908 				  xa_mk_value(i), GFP_KERNEL);
1909 
1910 	mt_set_non_kernel(99999);
1911 
1912 	i = 0;
1913 	mas_lock(&mas);
1914 	mas_for_each(&mas, val, 925) {
1915 		MT_BUG_ON(mt, mas.index != i * 10);
1916 		MT_BUG_ON(mt, mas.last != i * 10 + 9);
1917 		/* Overwrite end of entry 92 */
1918 		if (i == 92) {
1919 			mas.index = 925;
1920 			mas.last = 929;
1921 			mas_store(&mas, val);
1922 		}
1923 		i++;
1924 	}
1925 	/* Ensure mas_find() gets the next value */
1926 	val = mas_find(&mas, ULONG_MAX);
1927 	MT_BUG_ON(mt, val != xa_mk_value(i));
1928 
1929 	mas_set(&mas, 0);
1930 	i = 0;
1931 	mas_for_each(&mas, val, 785) {
1932 		MT_BUG_ON(mt, mas.index != i * 10);
1933 		MT_BUG_ON(mt, mas.last != i * 10 + 9);
1934 		/* Overwrite start of entry 78 */
1935 		if (i == 78) {
1936 			mas.index = 780;
1937 			mas.last = 785;
1938 			mas_store(&mas, val);
1939 		} else {
1940 			i++;
1941 		}
1942 	}
1943 	val = mas_find(&mas, ULONG_MAX);
1944 	MT_BUG_ON(mt, val != xa_mk_value(i));
1945 
1946 	mas_set(&mas, 0);
1947 	i = 0;
1948 	mas_for_each(&mas, val, 765) {
1949 		MT_BUG_ON(mt, mas.index != i * 10);
1950 		MT_BUG_ON(mt, mas.last != i * 10 + 9);
1951 		/* Overwrite end of entry 76 and advance to the end */
1952 		if (i == 76) {
1953 			mas.index = 760;
1954 			mas.last = 765;
1955 			mas_store(&mas, val);
1956 		}
1957 		i++;
1958 	}
1959 	/* Make sure the next find returns the one after 765, 766-769 */
1960 	val = mas_find(&mas, ULONG_MAX);
1961 	MT_BUG_ON(mt, val != xa_mk_value(76));
1962 	mas_unlock(&mas);
1963 	mas_destroy(&mas);
1964 	mt_set_non_kernel(0);
1965 }
1966 
check_mas_store_gfp(struct maple_tree * mt)1967 static noinline void __init check_mas_store_gfp(struct maple_tree *mt)
1968 {
1969 
1970 	struct maple_tree newmt;
1971 	int i, nr_entries = 135;
1972 	void *val;
1973 	MA_STATE(mas, mt, 0, 0);
1974 	MA_STATE(newmas, mt, 0, 0);
1975 
1976 	for (i = 0; i <= nr_entries; i++)
1977 		mtree_store_range(mt, i*10, i*10 + 5,
1978 				  xa_mk_value(i), GFP_KERNEL);
1979 
1980 	mt_set_non_kernel(99999);
1981 	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1982 	newmas.tree = &newmt;
1983 	rcu_read_lock();
1984 	mas_lock(&newmas);
1985 	mas_reset(&newmas);
1986 	mas_set(&mas, 0);
1987 	mas_for_each(&mas, val, ULONG_MAX) {
1988 		newmas.index = mas.index;
1989 		newmas.last = mas.last;
1990 		mas_store_gfp(&newmas, val, GFP_KERNEL);
1991 	}
1992 	mas_unlock(&newmas);
1993 	rcu_read_unlock();
1994 	mt_validate(&newmt);
1995 	mt_set_non_kernel(0);
1996 	mtree_destroy(&newmt);
1997 }
1998 
1999 #if defined(BENCH_FORK)
bench_forking(void)2000 static noinline void __init bench_forking(void)
2001 {
2002 	struct maple_tree mt, newmt;
2003 	int i, nr_entries = 134, nr_fork = 80000, ret;
2004 	void *val;
2005 	MA_STATE(mas, &mt, 0, 0);
2006 	MA_STATE(newmas, &newmt, 0, 0);
2007 	struct rw_semaphore mt_lock, newmt_lock;
2008 
2009 	init_rwsem(&mt_lock);
2010 	init_rwsem(&newmt_lock);
2011 
2012 	mt_init_flags(&mt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2013 	mt_set_external_lock(&mt, &mt_lock);
2014 
2015 	down_write(&mt_lock);
2016 	for (i = 0; i <= nr_entries; i++) {
2017 		mas_set_range(&mas, i*10, i*10 + 5);
2018 		mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
2019 	}
2020 
2021 	for (i = 0; i < nr_fork; i++) {
2022 		mt_init_flags(&newmt,
2023 			      MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2024 		mt_set_external_lock(&newmt, &newmt_lock);
2025 
2026 		down_write_nested(&newmt_lock, SINGLE_DEPTH_NESTING);
2027 		ret = __mt_dup(&mt, &newmt, GFP_KERNEL);
2028 		if (ret) {
2029 			pr_err("OOM!");
2030 			BUG_ON(1);
2031 		}
2032 
2033 		mas_set(&newmas, 0);
2034 		mas_for_each(&newmas, val, ULONG_MAX)
2035 			mas_store(&newmas, val);
2036 
2037 		mas_destroy(&newmas);
2038 		mt_validate(&newmt);
2039 		__mt_destroy(&newmt);
2040 		up_write(&newmt_lock);
2041 	}
2042 	mas_destroy(&mas);
2043 	__mt_destroy(&mt);
2044 	up_write(&mt_lock);
2045 }
2046 #endif
2047 
next_prev_test(struct maple_tree * mt)2048 static noinline void __init next_prev_test(struct maple_tree *mt)
2049 {
2050 	int i, nr_entries;
2051 	void *val;
2052 	MA_STATE(mas, mt, 0, 0);
2053 	struct maple_enode *mn;
2054 	static const unsigned long *level2;
2055 	static const unsigned long level2_64[] = { 707, 1000, 710, 715, 720,
2056 						   725};
2057 	static const unsigned long level2_32[] = { 1747, 2000, 1750, 1755,
2058 						   1760, 1765};
2059 	unsigned long last_index;
2060 
2061 	if (MAPLE_32BIT) {
2062 		nr_entries = 500;
2063 		level2 = level2_32;
2064 		last_index = 0x138e;
2065 	} else {
2066 		nr_entries = 200;
2067 		level2 = level2_64;
2068 		last_index = 0x7d6;
2069 	}
2070 
2071 	for (i = 0; i <= nr_entries; i++)
2072 		mtree_store_range(mt, i*10, i*10 + 5,
2073 				  xa_mk_value(i), GFP_KERNEL);
2074 
2075 	mas_lock(&mas);
2076 	for (i = 0; i <= nr_entries / 2; i++) {
2077 		mas_next(&mas, 1000);
2078 		if (mas_is_none(&mas))
2079 			break;
2080 
2081 	}
2082 	mas_reset(&mas);
2083 	mas_set(&mas, 0);
2084 	i = 0;
2085 	mas_for_each(&mas, val, 1000) {
2086 		i++;
2087 	}
2088 
2089 	mas_reset(&mas);
2090 	mas_set(&mas, 0);
2091 	i = 0;
2092 	mas_for_each(&mas, val, 1000) {
2093 		mas_pause(&mas);
2094 		i++;
2095 	}
2096 
2097 	/*
2098 	 * 680 - 685 = 0x61a00001930c
2099 	 * 686 - 689 = NULL;
2100 	 * 690 - 695 = 0x61a00001930c
2101 	 * Check simple next/prev
2102 	 */
2103 	mas_set(&mas, 686);
2104 	val = mas_walk(&mas);
2105 	MT_BUG_ON(mt, val != NULL);
2106 
2107 	val = mas_next(&mas, 1000);
2108 	MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2109 	MT_BUG_ON(mt, mas.index != 690);
2110 	MT_BUG_ON(mt, mas.last != 695);
2111 
2112 	val = mas_prev(&mas, 0);
2113 	MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
2114 	MT_BUG_ON(mt, mas.index != 680);
2115 	MT_BUG_ON(mt, mas.last != 685);
2116 
2117 	val = mas_next(&mas, 1000);
2118 	MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2119 	MT_BUG_ON(mt, mas.index != 690);
2120 	MT_BUG_ON(mt, mas.last != 695);
2121 
2122 	val = mas_next(&mas, 1000);
2123 	MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
2124 	MT_BUG_ON(mt, mas.index != 700);
2125 	MT_BUG_ON(mt, mas.last != 705);
2126 
2127 	/* Check across node boundaries of the tree */
2128 	mas_set(&mas, 70);
2129 	val = mas_walk(&mas);
2130 	MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2131 	MT_BUG_ON(mt, mas.index != 70);
2132 	MT_BUG_ON(mt, mas.last != 75);
2133 
2134 	val = mas_next(&mas, 1000);
2135 	MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
2136 	MT_BUG_ON(mt, mas.index != 80);
2137 	MT_BUG_ON(mt, mas.last != 85);
2138 
2139 	val = mas_prev(&mas, 70);
2140 	MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2141 	MT_BUG_ON(mt, mas.index != 70);
2142 	MT_BUG_ON(mt, mas.last != 75);
2143 
2144 	/* Check across two levels of the tree */
2145 	mas_reset(&mas);
2146 	mas_set(&mas, level2[0]);
2147 	val = mas_walk(&mas);
2148 	MT_BUG_ON(mt, val != NULL);
2149 	val = mas_next(&mas, level2[1]);
2150 	MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2151 	MT_BUG_ON(mt, mas.index != level2[2]);
2152 	MT_BUG_ON(mt, mas.last != level2[3]);
2153 	mn = mas.node;
2154 
2155 	val = mas_next(&mas, level2[1]);
2156 	MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
2157 	MT_BUG_ON(mt, mas.index != level2[4]);
2158 	MT_BUG_ON(mt, mas.last != level2[5]);
2159 	MT_BUG_ON(mt, mn == mas.node);
2160 
2161 	val = mas_prev(&mas, 0);
2162 	MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2163 	MT_BUG_ON(mt, mas.index != level2[2]);
2164 	MT_BUG_ON(mt, mas.last != level2[3]);
2165 
2166 	/* Check running off the end and back on */
2167 	mas_set(&mas, nr_entries * 10);
2168 	val = mas_walk(&mas);
2169 	MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2170 	MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2171 	MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2172 
2173 	val = mas_next(&mas, ULONG_MAX);
2174 	MT_BUG_ON(mt, val != NULL);
2175 	MT_BUG_ON(mt, mas.index != last_index);
2176 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
2177 
2178 	val = mas_prev(&mas, 0);
2179 	MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2180 	MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2181 	MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2182 
2183 	/* Check running off the start and back on */
2184 	mas_reset(&mas);
2185 	mas_set(&mas, 10);
2186 	val = mas_walk(&mas);
2187 	MT_BUG_ON(mt, val != xa_mk_value(1));
2188 	MT_BUG_ON(mt, mas.index != 10);
2189 	MT_BUG_ON(mt, mas.last != 15);
2190 
2191 	val = mas_prev(&mas, 0);
2192 	MT_BUG_ON(mt, val != xa_mk_value(0));
2193 	MT_BUG_ON(mt, mas.index != 0);
2194 	MT_BUG_ON(mt, mas.last != 5);
2195 
2196 	val = mas_prev(&mas, 0);
2197 	MT_BUG_ON(mt, val != NULL);
2198 	MT_BUG_ON(mt, mas.index != 0);
2199 	MT_BUG_ON(mt, mas.last != 5);
2200 	MT_BUG_ON(mt, !mas_is_underflow(&mas));
2201 
2202 	mas.index = 0;
2203 	mas.last = 5;
2204 	mas_store(&mas, NULL);
2205 	mas_reset(&mas);
2206 	mas_set(&mas, 10);
2207 	mas_walk(&mas);
2208 
2209 	val = mas_prev(&mas, 0);
2210 	MT_BUG_ON(mt, val != NULL);
2211 	MT_BUG_ON(mt, mas.index != 0);
2212 	MT_BUG_ON(mt, mas.last != 9);
2213 	mas_unlock(&mas);
2214 
2215 	mtree_destroy(mt);
2216 
2217 	mt_init(mt);
2218 	mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
2219 	mtree_store_range(mt, 5, 5, xa_mk_value(5), GFP_KERNEL);
2220 	rcu_read_lock();
2221 	mas_set(&mas, 5);
2222 	val = mas_prev(&mas, 4);
2223 	MT_BUG_ON(mt, val != NULL);
2224 	rcu_read_unlock();
2225 }
2226 
2227 
2228 
2229 /* Test spanning writes that require balancing right sibling or right cousin */
check_spanning_relatives(struct maple_tree * mt)2230 static noinline void __init check_spanning_relatives(struct maple_tree *mt)
2231 {
2232 
2233 	unsigned long i, nr_entries = 1000;
2234 
2235 	for (i = 0; i <= nr_entries; i++)
2236 		mtree_store_range(mt, i*10, i*10 + 5,
2237 				  xa_mk_value(i), GFP_KERNEL);
2238 
2239 
2240 	mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL);
2241 }
2242 
check_fuzzer(struct maple_tree * mt)2243 static noinline void __init check_fuzzer(struct maple_tree *mt)
2244 {
2245 	/*
2246 	 * 1. Causes a spanning rebalance of a single root node.
2247 	 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
2248 	 * entire right side is consumed.
2249 	 */
2250 	mtree_test_insert(mt, 88, (void *)0xb1);
2251 	mtree_test_insert(mt, 84, (void *)0xa9);
2252 	mtree_test_insert(mt, 2,  (void *)0x5);
2253 	mtree_test_insert(mt, 4,  (void *)0x9);
2254 	mtree_test_insert(mt, 14, (void *)0x1d);
2255 	mtree_test_insert(mt, 7,  (void *)0xf);
2256 	mtree_test_insert(mt, 12, (void *)0x19);
2257 	mtree_test_insert(mt, 18, (void *)0x25);
2258 	mtree_test_store_range(mt, 8, 18, (void *)0x11);
2259 	mtree_destroy(mt);
2260 
2261 
2262 	/*
2263 	 * 2. Cause a spanning rebalance of two nodes in root.
2264 	 * Fixed by setting mast->r->max correctly.
2265 	 */
2266 	mt_init_flags(mt, 0);
2267 	mtree_test_store(mt, 87, (void *)0xaf);
2268 	mtree_test_store(mt, 0, (void *)0x1);
2269 	mtree_test_load(mt, 4);
2270 	mtree_test_insert(mt, 4, (void *)0x9);
2271 	mtree_test_store(mt, 8, (void *)0x11);
2272 	mtree_test_store(mt, 44, (void *)0x59);
2273 	mtree_test_store(mt, 68, (void *)0x89);
2274 	mtree_test_store(mt, 2, (void *)0x5);
2275 	mtree_test_insert(mt, 43, (void *)0x57);
2276 	mtree_test_insert(mt, 24, (void *)0x31);
2277 	mtree_test_insert(mt, 844, (void *)0x699);
2278 	mtree_test_store(mt, 84, (void *)0xa9);
2279 	mtree_test_store(mt, 4, (void *)0x9);
2280 	mtree_test_erase(mt, 4);
2281 	mtree_test_load(mt, 5);
2282 	mtree_test_erase(mt, 0);
2283 	mtree_destroy(mt);
2284 
2285 	/*
2286 	 * 3. Cause a node overflow on copy
2287 	 * Fixed by using the correct check for node size in mas_wr_modify()
2288 	 * Also discovered issue with metadata setting.
2289 	 */
2290 	mt_init_flags(mt, 0);
2291 	mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
2292 	mtree_test_store(mt, 4, (void *)0x9);
2293 	mtree_test_erase(mt, 5);
2294 	mtree_test_erase(mt, 0);
2295 	mtree_test_erase(mt, 4);
2296 	mtree_test_store(mt, 5, (void *)0xb);
2297 	mtree_test_erase(mt, 5);
2298 	mtree_test_store(mt, 5, (void *)0xb);
2299 	mtree_test_erase(mt, 5);
2300 	mtree_test_erase(mt, 4);
2301 	mtree_test_store(mt, 4, (void *)0x9);
2302 	mtree_test_store(mt, 444, (void *)0x379);
2303 	mtree_test_store(mt, 0, (void *)0x1);
2304 	mtree_test_load(mt, 0);
2305 	mtree_test_store(mt, 5, (void *)0xb);
2306 	mtree_test_erase(mt, 0);
2307 	mtree_destroy(mt);
2308 
2309 	/*
2310 	 * 4. spanning store failure due to writing incorrect pivot value at
2311 	 * last slot.
2312 	 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2313 	 *
2314 	 */
2315 	mt_init_flags(mt, 0);
2316 	mtree_test_insert(mt, 261, (void *)0x20b);
2317 	mtree_test_store(mt, 516, (void *)0x409);
2318 	mtree_test_store(mt, 6, (void *)0xd);
2319 	mtree_test_insert(mt, 5, (void *)0xb);
2320 	mtree_test_insert(mt, 1256, (void *)0x9d1);
2321 	mtree_test_store(mt, 4, (void *)0x9);
2322 	mtree_test_erase(mt, 1);
2323 	mtree_test_store(mt, 56, (void *)0x71);
2324 	mtree_test_insert(mt, 1, (void *)0x3);
2325 	mtree_test_store(mt, 24, (void *)0x31);
2326 	mtree_test_erase(mt, 1);
2327 	mtree_test_insert(mt, 2263, (void *)0x11af);
2328 	mtree_test_insert(mt, 446, (void *)0x37d);
2329 	mtree_test_store_range(mt, 6, 45, (void *)0xd);
2330 	mtree_test_store_range(mt, 3, 446, (void *)0x7);
2331 	mtree_destroy(mt);
2332 
2333 	/*
2334 	 * 5. mas_wr_extend_null() may overflow slots.
2335 	 * Fix by checking against wr_mas->node_end.
2336 	 */
2337 	mt_init_flags(mt, 0);
2338 	mtree_test_store(mt, 48, (void *)0x61);
2339 	mtree_test_store(mt, 3, (void *)0x7);
2340 	mtree_test_load(mt, 0);
2341 	mtree_test_store(mt, 88, (void *)0xb1);
2342 	mtree_test_store(mt, 81, (void *)0xa3);
2343 	mtree_test_insert(mt, 0, (void *)0x1);
2344 	mtree_test_insert(mt, 8, (void *)0x11);
2345 	mtree_test_insert(mt, 4, (void *)0x9);
2346 	mtree_test_insert(mt, 2480, (void *)0x1361);
2347 	mtree_test_insert(mt, ULONG_MAX,
2348 			  (void *)0xffffffffffffffff);
2349 	mtree_test_erase(mt, ULONG_MAX);
2350 	mtree_destroy(mt);
2351 
2352 	/*
2353 	 * 6.  When reusing a node with an implied pivot and the node is
2354 	 * shrinking, old data would be left in the implied slot
2355 	 * Fixed by checking the last pivot for the mas->max and clear
2356 	 * accordingly.  This only affected the left-most node as that node is
2357 	 * the only one allowed to end in NULL.
2358 	 */
2359 	mt_init_flags(mt, 0);
2360 	mtree_test_erase(mt, 3);
2361 	mtree_test_insert(mt, 22, (void *)0x2d);
2362 	mtree_test_insert(mt, 15, (void *)0x1f);
2363 	mtree_test_load(mt, 2);
2364 	mtree_test_insert(mt, 1, (void *)0x3);
2365 	mtree_test_insert(mt, 1, (void *)0x3);
2366 	mtree_test_insert(mt, 5, (void *)0xb);
2367 	mtree_test_erase(mt, 1);
2368 	mtree_test_insert(mt, 1, (void *)0x3);
2369 	mtree_test_insert(mt, 4, (void *)0x9);
2370 	mtree_test_insert(mt, 1, (void *)0x3);
2371 	mtree_test_erase(mt, 1);
2372 	mtree_test_insert(mt, 2, (void *)0x5);
2373 	mtree_test_insert(mt, 1, (void *)0x3);
2374 	mtree_test_erase(mt, 3);
2375 	mtree_test_insert(mt, 22, (void *)0x2d);
2376 	mtree_test_insert(mt, 15, (void *)0x1f);
2377 	mtree_test_insert(mt, 2, (void *)0x5);
2378 	mtree_test_insert(mt, 1, (void *)0x3);
2379 	mtree_test_insert(mt, 8, (void *)0x11);
2380 	mtree_test_load(mt, 2);
2381 	mtree_test_insert(mt, 1, (void *)0x3);
2382 	mtree_test_insert(mt, 1, (void *)0x3);
2383 	mtree_test_store(mt, 1, (void *)0x3);
2384 	mtree_test_insert(mt, 5, (void *)0xb);
2385 	mtree_test_erase(mt, 1);
2386 	mtree_test_insert(mt, 1, (void *)0x3);
2387 	mtree_test_insert(mt, 4, (void *)0x9);
2388 	mtree_test_insert(mt, 1, (void *)0x3);
2389 	mtree_test_erase(mt, 1);
2390 	mtree_test_insert(mt, 2, (void *)0x5);
2391 	mtree_test_insert(mt, 1, (void *)0x3);
2392 	mtree_test_erase(mt, 3);
2393 	mtree_test_insert(mt, 22, (void *)0x2d);
2394 	mtree_test_insert(mt, 15, (void *)0x1f);
2395 	mtree_test_insert(mt, 2, (void *)0x5);
2396 	mtree_test_insert(mt, 1, (void *)0x3);
2397 	mtree_test_insert(mt, 8, (void *)0x11);
2398 	mtree_test_insert(mt, 12, (void *)0x19);
2399 	mtree_test_erase(mt, 1);
2400 	mtree_test_store_range(mt, 4, 62, (void *)0x9);
2401 	mtree_test_erase(mt, 62);
2402 	mtree_test_store_range(mt, 1, 0, (void *)0x3);
2403 	mtree_test_insert(mt, 11, (void *)0x17);
2404 	mtree_test_insert(mt, 3, (void *)0x7);
2405 	mtree_test_insert(mt, 3, (void *)0x7);
2406 	mtree_test_store(mt, 62, (void *)0x7d);
2407 	mtree_test_erase(mt, 62);
2408 	mtree_test_store_range(mt, 1, 15, (void *)0x3);
2409 	mtree_test_erase(mt, 1);
2410 	mtree_test_insert(mt, 22, (void *)0x2d);
2411 	mtree_test_insert(mt, 12, (void *)0x19);
2412 	mtree_test_erase(mt, 1);
2413 	mtree_test_insert(mt, 3, (void *)0x7);
2414 	mtree_test_store(mt, 62, (void *)0x7d);
2415 	mtree_test_erase(mt, 62);
2416 	mtree_test_insert(mt, 122, (void *)0xf5);
2417 	mtree_test_store(mt, 3, (void *)0x7);
2418 	mtree_test_insert(mt, 0, (void *)0x1);
2419 	mtree_test_store_range(mt, 0, 1, (void *)0x1);
2420 	mtree_test_insert(mt, 85, (void *)0xab);
2421 	mtree_test_insert(mt, 72, (void *)0x91);
2422 	mtree_test_insert(mt, 81, (void *)0xa3);
2423 	mtree_test_insert(mt, 726, (void *)0x5ad);
2424 	mtree_test_insert(mt, 0, (void *)0x1);
2425 	mtree_test_insert(mt, 1, (void *)0x3);
2426 	mtree_test_store(mt, 51, (void *)0x67);
2427 	mtree_test_insert(mt, 611, (void *)0x4c7);
2428 	mtree_test_insert(mt, 485, (void *)0x3cb);
2429 	mtree_test_insert(mt, 1, (void *)0x3);
2430 	mtree_test_erase(mt, 1);
2431 	mtree_test_insert(mt, 0, (void *)0x1);
2432 	mtree_test_insert(mt, 1, (void *)0x3);
2433 	mtree_test_insert_range(mt, 26, 1, (void *)0x35);
2434 	mtree_test_load(mt, 1);
2435 	mtree_test_store_range(mt, 1, 22, (void *)0x3);
2436 	mtree_test_insert(mt, 1, (void *)0x3);
2437 	mtree_test_erase(mt, 1);
2438 	mtree_test_load(mt, 53);
2439 	mtree_test_load(mt, 1);
2440 	mtree_test_store_range(mt, 1, 1, (void *)0x3);
2441 	mtree_test_insert(mt, 222, (void *)0x1bd);
2442 	mtree_test_insert(mt, 485, (void *)0x3cb);
2443 	mtree_test_insert(mt, 1, (void *)0x3);
2444 	mtree_test_erase(mt, 1);
2445 	mtree_test_load(mt, 0);
2446 	mtree_test_insert(mt, 21, (void *)0x2b);
2447 	mtree_test_insert(mt, 3, (void *)0x7);
2448 	mtree_test_store(mt, 621, (void *)0x4db);
2449 	mtree_test_insert(mt, 0, (void *)0x1);
2450 	mtree_test_erase(mt, 5);
2451 	mtree_test_insert(mt, 1, (void *)0x3);
2452 	mtree_test_store(mt, 62, (void *)0x7d);
2453 	mtree_test_erase(mt, 62);
2454 	mtree_test_store_range(mt, 1, 0, (void *)0x3);
2455 	mtree_test_insert(mt, 22, (void *)0x2d);
2456 	mtree_test_insert(mt, 12, (void *)0x19);
2457 	mtree_test_erase(mt, 1);
2458 	mtree_test_insert(mt, 1, (void *)0x3);
2459 	mtree_test_store_range(mt, 4, 62, (void *)0x9);
2460 	mtree_test_erase(mt, 62);
2461 	mtree_test_erase(mt, 1);
2462 	mtree_test_load(mt, 1);
2463 	mtree_test_store_range(mt, 1, 22, (void *)0x3);
2464 	mtree_test_insert(mt, 1, (void *)0x3);
2465 	mtree_test_erase(mt, 1);
2466 	mtree_test_load(mt, 53);
2467 	mtree_test_load(mt, 1);
2468 	mtree_test_store_range(mt, 1, 1, (void *)0x3);
2469 	mtree_test_insert(mt, 222, (void *)0x1bd);
2470 	mtree_test_insert(mt, 485, (void *)0x3cb);
2471 	mtree_test_insert(mt, 1, (void *)0x3);
2472 	mtree_test_erase(mt, 1);
2473 	mtree_test_insert(mt, 1, (void *)0x3);
2474 	mtree_test_load(mt, 0);
2475 	mtree_test_load(mt, 0);
2476 	mtree_destroy(mt);
2477 
2478 	/*
2479 	 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2480 	 * data by overwriting it first - that way metadata is of no concern.
2481 	 */
2482 	mt_init_flags(mt, 0);
2483 	mtree_test_load(mt, 1);
2484 	mtree_test_insert(mt, 102, (void *)0xcd);
2485 	mtree_test_erase(mt, 2);
2486 	mtree_test_erase(mt, 0);
2487 	mtree_test_load(mt, 0);
2488 	mtree_test_insert(mt, 4, (void *)0x9);
2489 	mtree_test_insert(mt, 2, (void *)0x5);
2490 	mtree_test_insert(mt, 110, (void *)0xdd);
2491 	mtree_test_insert(mt, 1, (void *)0x3);
2492 	mtree_test_insert_range(mt, 5, 0, (void *)0xb);
2493 	mtree_test_erase(mt, 2);
2494 	mtree_test_store(mt, 0, (void *)0x1);
2495 	mtree_test_store(mt, 112, (void *)0xe1);
2496 	mtree_test_insert(mt, 21, (void *)0x2b);
2497 	mtree_test_store(mt, 1, (void *)0x3);
2498 	mtree_test_insert_range(mt, 110, 2, (void *)0xdd);
2499 	mtree_test_store(mt, 2, (void *)0x5);
2500 	mtree_test_load(mt, 22);
2501 	mtree_test_erase(mt, 2);
2502 	mtree_test_store(mt, 210, (void *)0x1a5);
2503 	mtree_test_store_range(mt, 0, 2, (void *)0x1);
2504 	mtree_test_store(mt, 2, (void *)0x5);
2505 	mtree_test_erase(mt, 2);
2506 	mtree_test_erase(mt, 22);
2507 	mtree_test_erase(mt, 1);
2508 	mtree_test_erase(mt, 2);
2509 	mtree_test_store(mt, 0, (void *)0x1);
2510 	mtree_test_load(mt, 112);
2511 	mtree_test_insert(mt, 2, (void *)0x5);
2512 	mtree_test_erase(mt, 2);
2513 	mtree_test_store(mt, 1, (void *)0x3);
2514 	mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2515 	mtree_test_erase(mt, 0);
2516 	mtree_test_erase(mt, 2);
2517 	mtree_test_store(mt, 2, (void *)0x5);
2518 	mtree_test_erase(mt, 0);
2519 	mtree_test_erase(mt, 2);
2520 	mtree_test_store(mt, 0, (void *)0x1);
2521 	mtree_test_store(mt, 0, (void *)0x1);
2522 	mtree_test_erase(mt, 2);
2523 	mtree_test_store(mt, 2, (void *)0x5);
2524 	mtree_test_erase(mt, 2);
2525 	mtree_test_insert(mt, 2, (void *)0x5);
2526 	mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2527 	mtree_test_erase(mt, 0);
2528 	mtree_test_erase(mt, 2);
2529 	mtree_test_store(mt, 0, (void *)0x1);
2530 	mtree_test_load(mt, 112);
2531 	mtree_test_store_range(mt, 110, 12, (void *)0xdd);
2532 	mtree_test_store(mt, 2, (void *)0x5);
2533 	mtree_test_load(mt, 110);
2534 	mtree_test_insert_range(mt, 4, 71, (void *)0x9);
2535 	mtree_test_load(mt, 2);
2536 	mtree_test_store(mt, 2, (void *)0x5);
2537 	mtree_test_insert_range(mt, 11, 22, (void *)0x17);
2538 	mtree_test_erase(mt, 12);
2539 	mtree_test_store(mt, 2, (void *)0x5);
2540 	mtree_test_load(mt, 22);
2541 	mtree_destroy(mt);
2542 
2543 
2544 	/*
2545 	 * 8.  When rebalancing or spanning_rebalance(), the max of the new node
2546 	 * may be set incorrectly to the final pivot and not the right max.
2547 	 * Fix by setting the left max to orig right max if the entire node is
2548 	 * consumed.
2549 	 */
2550 	mt_init_flags(mt, 0);
2551 	mtree_test_store(mt, 6, (void *)0xd);
2552 	mtree_test_store(mt, 67, (void *)0x87);
2553 	mtree_test_insert(mt, 15, (void *)0x1f);
2554 	mtree_test_insert(mt, 6716, (void *)0x3479);
2555 	mtree_test_store(mt, 61, (void *)0x7b);
2556 	mtree_test_insert(mt, 13, (void *)0x1b);
2557 	mtree_test_store(mt, 8, (void *)0x11);
2558 	mtree_test_insert(mt, 1, (void *)0x3);
2559 	mtree_test_load(mt, 0);
2560 	mtree_test_erase(mt, 67167);
2561 	mtree_test_insert_range(mt, 6, 7167, (void *)0xd);
2562 	mtree_test_insert(mt, 6, (void *)0xd);
2563 	mtree_test_erase(mt, 67);
2564 	mtree_test_insert(mt, 1, (void *)0x3);
2565 	mtree_test_erase(mt, 667167);
2566 	mtree_test_insert(mt, 6, (void *)0xd);
2567 	mtree_test_store(mt, 67, (void *)0x87);
2568 	mtree_test_insert(mt, 5, (void *)0xb);
2569 	mtree_test_erase(mt, 1);
2570 	mtree_test_insert(mt, 6, (void *)0xd);
2571 	mtree_test_erase(mt, 67);
2572 	mtree_test_insert(mt, 15, (void *)0x1f);
2573 	mtree_test_insert(mt, 67167, (void *)0x20cbf);
2574 	mtree_test_insert(mt, 1, (void *)0x3);
2575 	mtree_test_load(mt, 7);
2576 	mtree_test_insert(mt, 16, (void *)0x21);
2577 	mtree_test_insert(mt, 36, (void *)0x49);
2578 	mtree_test_store(mt, 67, (void *)0x87);
2579 	mtree_test_store(mt, 6, (void *)0xd);
2580 	mtree_test_insert(mt, 367, (void *)0x2df);
2581 	mtree_test_insert(mt, 115, (void *)0xe7);
2582 	mtree_test_store(mt, 0, (void *)0x1);
2583 	mtree_test_store_range(mt, 1, 3, (void *)0x3);
2584 	mtree_test_store(mt, 1, (void *)0x3);
2585 	mtree_test_erase(mt, 67167);
2586 	mtree_test_insert_range(mt, 6, 47, (void *)0xd);
2587 	mtree_test_store(mt, 1, (void *)0x3);
2588 	mtree_test_insert_range(mt, 1, 67, (void *)0x3);
2589 	mtree_test_load(mt, 67);
2590 	mtree_test_insert(mt, 1, (void *)0x3);
2591 	mtree_test_erase(mt, 67167);
2592 	mtree_destroy(mt);
2593 
2594 	/*
2595 	 * 9. spanning store to the end of data caused an invalid metadata
2596 	 * length which resulted in a crash eventually.
2597 	 * Fix by checking if there is a value in pivot before incrementing the
2598 	 * metadata end in mab_mas_cp().  To ensure this doesn't happen again,
2599 	 * abstract the two locations this happens into a function called
2600 	 * mas_leaf_set_meta().
2601 	 */
2602 	mt_init_flags(mt, 0);
2603 	mtree_test_insert(mt, 21, (void *)0x2b);
2604 	mtree_test_insert(mt, 12, (void *)0x19);
2605 	mtree_test_insert(mt, 6, (void *)0xd);
2606 	mtree_test_insert(mt, 8, (void *)0x11);
2607 	mtree_test_insert(mt, 2, (void *)0x5);
2608 	mtree_test_insert(mt, 91, (void *)0xb7);
2609 	mtree_test_insert(mt, 18, (void *)0x25);
2610 	mtree_test_insert(mt, 81, (void *)0xa3);
2611 	mtree_test_store_range(mt, 0, 128, (void *)0x1);
2612 	mtree_test_store(mt, 1, (void *)0x3);
2613 	mtree_test_erase(mt, 8);
2614 	mtree_test_insert(mt, 11, (void *)0x17);
2615 	mtree_test_insert(mt, 8, (void *)0x11);
2616 	mtree_test_insert(mt, 21, (void *)0x2b);
2617 	mtree_test_insert(mt, 2, (void *)0x5);
2618 	mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2619 	mtree_test_erase(mt, ULONG_MAX - 10);
2620 	mtree_test_store_range(mt, 0, 281, (void *)0x1);
2621 	mtree_test_erase(mt, 2);
2622 	mtree_test_insert(mt, 1211, (void *)0x977);
2623 	mtree_test_insert(mt, 111, (void *)0xdf);
2624 	mtree_test_insert(mt, 13, (void *)0x1b);
2625 	mtree_test_insert(mt, 211, (void *)0x1a7);
2626 	mtree_test_insert(mt, 11, (void *)0x17);
2627 	mtree_test_insert(mt, 5, (void *)0xb);
2628 	mtree_test_insert(mt, 1218, (void *)0x985);
2629 	mtree_test_insert(mt, 61, (void *)0x7b);
2630 	mtree_test_store(mt, 1, (void *)0x3);
2631 	mtree_test_insert(mt, 121, (void *)0xf3);
2632 	mtree_test_insert(mt, 8, (void *)0x11);
2633 	mtree_test_insert(mt, 21, (void *)0x2b);
2634 	mtree_test_insert(mt, 2, (void *)0x5);
2635 	mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2636 	mtree_test_erase(mt, ULONG_MAX - 10);
2637 }
2638 
2639 /* duplicate the tree with a specific gap */
check_dup_gaps(struct maple_tree * mt,unsigned long nr_entries,bool zero_start,unsigned long gap)2640 static noinline void __init check_dup_gaps(struct maple_tree *mt,
2641 				    unsigned long nr_entries, bool zero_start,
2642 				    unsigned long gap)
2643 {
2644 	unsigned long i = 0;
2645 	struct maple_tree newmt;
2646 	int ret;
2647 	void *tmp;
2648 	MA_STATE(mas, mt, 0, 0);
2649 	MA_STATE(newmas, &newmt, 0, 0);
2650 	struct rw_semaphore newmt_lock;
2651 
2652 	init_rwsem(&newmt_lock);
2653 	mt_set_external_lock(&newmt, &newmt_lock);
2654 
2655 	if (!zero_start)
2656 		i = 1;
2657 
2658 	mt_zero_nr_tallocated();
2659 	for (; i <= nr_entries; i++)
2660 		mtree_store_range(mt, i*10, (i+1)*10 - gap,
2661 				  xa_mk_value(i), GFP_KERNEL);
2662 
2663 	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2664 	mt_set_non_kernel(99999);
2665 	down_write(&newmt_lock);
2666 	ret = mas_expected_entries(&newmas, nr_entries);
2667 	mt_set_non_kernel(0);
2668 	MT_BUG_ON(mt, ret != 0);
2669 
2670 	rcu_read_lock();
2671 	mas_for_each(&mas, tmp, ULONG_MAX) {
2672 		newmas.index = mas.index;
2673 		newmas.last = mas.last;
2674 		mas_store(&newmas, tmp);
2675 	}
2676 	rcu_read_unlock();
2677 	mas_destroy(&newmas);
2678 
2679 	__mt_destroy(&newmt);
2680 	up_write(&newmt_lock);
2681 }
2682 
2683 /* Duplicate many sizes of trees.  Mainly to test expected entry values */
check_dup(struct maple_tree * mt)2684 static noinline void __init check_dup(struct maple_tree *mt)
2685 {
2686 	int i;
2687 	int big_start = 100010;
2688 
2689 	/* Check with a value at zero */
2690 	for (i = 10; i < 1000; i++) {
2691 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2692 		check_dup_gaps(mt, i, true, 5);
2693 		mtree_destroy(mt);
2694 		rcu_barrier();
2695 	}
2696 
2697 	cond_resched();
2698 	mt_cache_shrink();
2699 	/* Check with a value at zero, no gap */
2700 	for (i = 1000; i < 2000; i++) {
2701 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2702 		check_dup_gaps(mt, i, true, 0);
2703 		mtree_destroy(mt);
2704 		rcu_barrier();
2705 	}
2706 
2707 	cond_resched();
2708 	mt_cache_shrink();
2709 	/* Check with a value at zero and unreasonably large */
2710 	for (i = big_start; i < big_start + 10; i++) {
2711 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2712 		check_dup_gaps(mt, i, true, 5);
2713 		mtree_destroy(mt);
2714 		rcu_barrier();
2715 	}
2716 
2717 	cond_resched();
2718 	mt_cache_shrink();
2719 	/* Small to medium size not starting at zero*/
2720 	for (i = 200; i < 1000; i++) {
2721 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2722 		check_dup_gaps(mt, i, false, 5);
2723 		mtree_destroy(mt);
2724 		rcu_barrier();
2725 	}
2726 
2727 	cond_resched();
2728 	mt_cache_shrink();
2729 	/* Unreasonably large not starting at zero*/
2730 	for (i = big_start; i < big_start + 10; i++) {
2731 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2732 		check_dup_gaps(mt, i, false, 5);
2733 		mtree_destroy(mt);
2734 		rcu_barrier();
2735 		cond_resched();
2736 		mt_cache_shrink();
2737 	}
2738 
2739 	/* Check non-allocation tree not starting at zero */
2740 	for (i = 1500; i < 3000; i++) {
2741 		mt_init_flags(mt, 0);
2742 		check_dup_gaps(mt, i, false, 5);
2743 		mtree_destroy(mt);
2744 		rcu_barrier();
2745 		cond_resched();
2746 		if (i % 2 == 0)
2747 			mt_cache_shrink();
2748 	}
2749 
2750 	mt_cache_shrink();
2751 	/* Check non-allocation tree starting at zero */
2752 	for (i = 200; i < 1000; i++) {
2753 		mt_init_flags(mt, 0);
2754 		check_dup_gaps(mt, i, true, 5);
2755 		mtree_destroy(mt);
2756 		rcu_barrier();
2757 		cond_resched();
2758 	}
2759 
2760 	mt_cache_shrink();
2761 	/* Unreasonably large */
2762 	for (i = big_start + 5; i < big_start + 10; i++) {
2763 		mt_init_flags(mt, 0);
2764 		check_dup_gaps(mt, i, true, 5);
2765 		mtree_destroy(mt);
2766 		rcu_barrier();
2767 		mt_cache_shrink();
2768 		cond_resched();
2769 	}
2770 }
2771 
check_bnode_min_spanning(struct maple_tree * mt)2772 static noinline void __init check_bnode_min_spanning(struct maple_tree *mt)
2773 {
2774 	int i = 50;
2775 	MA_STATE(mas, mt, 0, 0);
2776 
2777 	mt_set_non_kernel(9999);
2778 	mas_lock(&mas);
2779 	do {
2780 		mas_set_range(&mas, i*10, i*10+9);
2781 		mas_store(&mas, check_bnode_min_spanning);
2782 	} while (i--);
2783 
2784 	mas_set_range(&mas, 240, 509);
2785 	mas_store(&mas, NULL);
2786 	mas_unlock(&mas);
2787 	mas_destroy(&mas);
2788 	mt_set_non_kernel(0);
2789 }
2790 
check_empty_area_window(struct maple_tree * mt)2791 static noinline void __init check_empty_area_window(struct maple_tree *mt)
2792 {
2793 	unsigned long i, nr_entries = 20;
2794 	MA_STATE(mas, mt, 0, 0);
2795 
2796 	for (i = 1; i <= nr_entries; i++)
2797 		mtree_store_range(mt, i*10, i*10 + 9,
2798 				  xa_mk_value(i), GFP_KERNEL);
2799 
2800 	/* Create another hole besides the one at 0 */
2801 	mtree_store_range(mt, 160, 169, NULL, GFP_KERNEL);
2802 
2803 	/* Check lower bounds that don't fit */
2804 	rcu_read_lock();
2805 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 10) != -EBUSY);
2806 
2807 	mas_reset(&mas);
2808 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 6, 90, 5) != -EBUSY);
2809 
2810 	/* Check lower bound that does fit */
2811 	mas_reset(&mas);
2812 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 5) != 0);
2813 	MT_BUG_ON(mt, mas.index != 5);
2814 	MT_BUG_ON(mt, mas.last != 9);
2815 	rcu_read_unlock();
2816 
2817 	/* Check one gap that doesn't fit and one that does */
2818 	rcu_read_lock();
2819 	mas_reset(&mas);
2820 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 217, 9) != 0);
2821 	MT_BUG_ON(mt, mas.index != 161);
2822 	MT_BUG_ON(mt, mas.last != 169);
2823 
2824 	/* Check one gap that does fit above the min */
2825 	mas_reset(&mas);
2826 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 3) != 0);
2827 	MT_BUG_ON(mt, mas.index != 216);
2828 	MT_BUG_ON(mt, mas.last != 218);
2829 
2830 	/* Check size that doesn't fit any gap */
2831 	mas_reset(&mas);
2832 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 16) != -EBUSY);
2833 
2834 	/*
2835 	 * Check size that doesn't fit the lower end of the window but
2836 	 * does fit the gap
2837 	 */
2838 	mas_reset(&mas);
2839 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 167, 200, 4) != -EBUSY);
2840 
2841 	/*
2842 	 * Check size that doesn't fit the upper end of the window but
2843 	 * does fit the gap
2844 	 */
2845 	mas_reset(&mas);
2846 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 162, 4) != -EBUSY);
2847 
2848 	/* Check mas_empty_area forward */
2849 	mas_reset(&mas);
2850 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 9) != 0);
2851 	MT_BUG_ON(mt, mas.index != 0);
2852 	MT_BUG_ON(mt, mas.last != 8);
2853 
2854 	mas_reset(&mas);
2855 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 4) != 0);
2856 	MT_BUG_ON(mt, mas.index != 0);
2857 	MT_BUG_ON(mt, mas.last != 3);
2858 
2859 	mas_reset(&mas);
2860 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 11) != -EBUSY);
2861 
2862 	mas_reset(&mas);
2863 	MT_BUG_ON(mt, mas_empty_area(&mas, 5, 100, 6) != -EBUSY);
2864 
2865 	mas_reset(&mas);
2866 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 8, 10) != -EINVAL);
2867 
2868 	mas_reset(&mas);
2869 	mas_empty_area(&mas, 100, 165, 3);
2870 
2871 	mas_reset(&mas);
2872 	MT_BUG_ON(mt, mas_empty_area(&mas, 100, 163, 6) != -EBUSY);
2873 	rcu_read_unlock();
2874 }
2875 
check_empty_area_fill(struct maple_tree * mt)2876 static noinline void __init check_empty_area_fill(struct maple_tree *mt)
2877 {
2878 	const unsigned long max = 0x25D78000;
2879 	unsigned long size;
2880 	int loop, shift;
2881 	MA_STATE(mas, mt, 0, 0);
2882 
2883 	mt_set_non_kernel(99999);
2884 	for (shift = 12; shift <= 16; shift++) {
2885 		loop = 5000;
2886 		size = 1 << shift;
2887 		while (loop--) {
2888 			mas_set(&mas, 0);
2889 			mas_lock(&mas);
2890 			MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0);
2891 			MT_BUG_ON(mt, mas.last != mas.index + size - 1);
2892 			mas_store_gfp(&mas, (void *)size, GFP_KERNEL);
2893 			mas_unlock(&mas);
2894 			mas_reset(&mas);
2895 		}
2896 	}
2897 
2898 	/* No space left. */
2899 	size = 0x1000;
2900 	rcu_read_lock();
2901 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY);
2902 	rcu_read_unlock();
2903 
2904 	/* Fill a depth 3 node to the maximum */
2905 	for (unsigned long i = 629440511; i <= 629440800; i += 6)
2906 		mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL);
2907 	/* Make space in the second-last depth 4 node */
2908 	mtree_erase(mt, 631668735);
2909 	/* Make space in the last depth 4 node */
2910 	mtree_erase(mt, 629506047);
2911 	mas_reset(&mas);
2912 	/* Search from just after the gap in the second-last depth 4 */
2913 	rcu_read_lock();
2914 	MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0);
2915 	rcu_read_unlock();
2916 	mt_set_non_kernel(0);
2917 }
2918 
2919 /*
2920  * Check MAS_START, MAS_PAUSE, active (implied), and MAS_NONE transitions.
2921  *
2922  * The table below shows the single entry tree (0-0 pointer) and normal tree
2923  * with nodes.
2924  *
2925  * Function	ENTRY	Start		Result		index & last
2926  *     ┬          ┬       ┬               ┬                ┬
2927  *     │          │       │               │                └─ the final range
2928  *     │          │       │               └─ The node value after execution
2929  *     │          │       └─ The node value before execution
2930  *     │          └─ If the entry exists or does not exists (DNE)
2931  *     └─ The function name
2932  *
2933  * Function	ENTRY	Start		Result		index & last
2934  * mas_next()
2935  *  - after last
2936  *			Single entry tree at 0-0
2937  *			------------------------
2938  *		DNE	MAS_START	MAS_NONE	1 - oo
2939  *		DNE	MAS_PAUSE	MAS_NONE	1 - oo
2940  *		DNE	MAS_ROOT	MAS_NONE	1 - oo
2941  *			when index = 0
2942  *		DNE	MAS_NONE	MAS_ROOT	0
2943  *			when index > 0
2944  *		DNE	MAS_NONE	MAS_NONE	1 - oo
2945  *
2946  *			Normal tree
2947  *			-----------
2948  *		exists	MAS_START	active		range
2949  *		DNE	MAS_START	active		set to last range
2950  *		exists	MAS_PAUSE	active		range
2951  *		DNE	MAS_PAUSE	active		set to last range
2952  *		exists	MAS_NONE	active		range
2953  *		exists	active		active		range
2954  *		DNE	active		active		set to last range
2955  *		ERANGE	active		MAS_OVERFLOW	last range
2956  *
2957  * Function	ENTRY	Start		Result		index & last
2958  * mas_prev()
2959  * - before index
2960  *			Single entry tree at 0-0
2961  *			------------------------
2962  *				if index > 0
2963  *		exists	MAS_START	MAS_ROOT	0
2964  *		exists	MAS_PAUSE	MAS_ROOT	0
2965  *		exists	MAS_NONE	MAS_ROOT	0
2966  *
2967  *				if index == 0
2968  *		DNE	MAS_START	MAS_NONE	0
2969  *		DNE	MAS_PAUSE	MAS_NONE	0
2970  *		DNE	MAS_NONE	MAS_NONE	0
2971  *		DNE	MAS_ROOT	MAS_NONE	0
2972  *
2973  *			Normal tree
2974  *			-----------
2975  *		exists	MAS_START	active		range
2976  *		DNE	MAS_START	active		set to min
2977  *		exists	MAS_PAUSE	active		range
2978  *		DNE	MAS_PAUSE	active		set to min
2979  *		exists	MAS_NONE	active		range
2980  *		DNE	MAS_NONE	MAS_NONE	set to min
2981  *		any	MAS_ROOT	MAS_NONE	0
2982  *		exists	active		active		range
2983  *		DNE	active		active		last range
2984  *		ERANGE	active		MAS_UNDERFLOW	last range
2985  *
2986  * Function	ENTRY	Start		Result		index & last
2987  * mas_find()
2988  *  - at index or next
2989  *			Single entry tree at 0-0
2990  *			------------------------
2991  *				if index >  0
2992  *		DNE	MAS_START	MAS_NONE	0
2993  *		DNE	MAS_PAUSE	MAS_NONE	0
2994  *		DNE	MAS_ROOT	MAS_NONE	0
2995  *		DNE	MAS_NONE	MAS_NONE	1
2996  *				if index ==  0
2997  *		exists	MAS_START	MAS_ROOT	0
2998  *		exists	MAS_PAUSE	MAS_ROOT	0
2999  *		exists	MAS_NONE	MAS_ROOT	0
3000  *
3001  *			Normal tree
3002  *			-----------
3003  *		exists	MAS_START	active		range
3004  *		DNE	MAS_START	active		set to max
3005  *		exists	MAS_PAUSE	active		range
3006  *		DNE	MAS_PAUSE	active		set to max
3007  *		exists	MAS_NONE	active		range (start at last)
3008  *		exists	active		active		range
3009  *		DNE	active		active		last range (max < last)
3010  *
3011  * Function	ENTRY	Start		Result		index & last
3012  * mas_find_rev()
3013  *  - at index or before
3014  *			Single entry tree at 0-0
3015  *			------------------------
3016  *				if index >  0
3017  *		exists	MAS_START	MAS_ROOT	0
3018  *		exists	MAS_PAUSE	MAS_ROOT	0
3019  *		exists	MAS_NONE	MAS_ROOT	0
3020  *				if index ==  0
3021  *		DNE	MAS_START	MAS_NONE	0
3022  *		DNE	MAS_PAUSE	MAS_NONE	0
3023  *		DNE	MAS_NONE	MAS_NONE	0
3024  *		DNE	MAS_ROOT	MAS_NONE	0
3025  *
3026  *			Normal tree
3027  *			-----------
3028  *		exists	MAS_START	active		range
3029  *		DNE	MAS_START	active		set to min
3030  *		exists	MAS_PAUSE	active		range
3031  *		DNE	MAS_PAUSE	active		set to min
3032  *		exists	MAS_NONE	active		range (start at index)
3033  *		exists	active		active		range
3034  *		DNE	active		active		last range (min > index)
3035  *
3036  * Function	ENTRY	Start		Result		index & last
3037  * mas_walk()
3038  * - Look up index
3039  *			Single entry tree at 0-0
3040  *			------------------------
3041  *				if index >  0
3042  *		DNE	MAS_START	MAS_ROOT	1 - oo
3043  *		DNE	MAS_PAUSE	MAS_ROOT	1 - oo
3044  *		DNE	MAS_NONE	MAS_ROOT	1 - oo
3045  *		DNE	MAS_ROOT	MAS_ROOT	1 - oo
3046  *				if index ==  0
3047  *		exists	MAS_START	MAS_ROOT	0
3048  *		exists	MAS_PAUSE	MAS_ROOT	0
3049  *		exists	MAS_NONE	MAS_ROOT	0
3050  *		exists	MAS_ROOT	MAS_ROOT	0
3051  *
3052  *			Normal tree
3053  *			-----------
3054  *		exists	MAS_START	active		range
3055  *		DNE	MAS_START	active		range of NULL
3056  *		exists	MAS_PAUSE	active		range
3057  *		DNE	MAS_PAUSE	active		range of NULL
3058  *		exists	MAS_NONE	active		range
3059  *		DNE	MAS_NONE	active		range of NULL
3060  *		exists	active		active		range
3061  *		DNE	active		active		range of NULL
3062  */
3063 
check_state_handling(struct maple_tree * mt)3064 static noinline void __init check_state_handling(struct maple_tree *mt)
3065 {
3066 	MA_STATE(mas, mt, 0, 0);
3067 	void *entry, *ptr = (void *) 0x1234500;
3068 	void *ptr2 = &ptr;
3069 	void *ptr3 = &ptr2;
3070 
3071 	/* Check MAS_ROOT First */
3072 	mtree_store_range(mt, 0, 0, ptr, GFP_KERNEL);
3073 
3074 	mas_lock(&mas);
3075 	/* prev: Start -> underflow*/
3076 	entry = mas_prev(&mas, 0);
3077 	MT_BUG_ON(mt, entry != NULL);
3078 	MT_BUG_ON(mt, mas.status != ma_underflow);
3079 
3080 	/* prev: Start -> root */
3081 	mas_set(&mas, 10);
3082 	entry = mas_prev(&mas, 0);
3083 	MT_BUG_ON(mt, entry != ptr);
3084 	MT_BUG_ON(mt, mas.index != 0);
3085 	MT_BUG_ON(mt, mas.last != 0);
3086 	MT_BUG_ON(mt, mas.status != ma_root);
3087 
3088 	/* prev: pause -> root */
3089 	mas_set(&mas, 10);
3090 	mas_pause(&mas);
3091 	entry = mas_prev(&mas, 0);
3092 	MT_BUG_ON(mt, entry != ptr);
3093 	MT_BUG_ON(mt, mas.index != 0);
3094 	MT_BUG_ON(mt, mas.last != 0);
3095 	MT_BUG_ON(mt, mas.status != ma_root);
3096 
3097 	/* next: start -> none */
3098 	mas_set(&mas, 0);
3099 	entry = mas_next(&mas, ULONG_MAX);
3100 	MT_BUG_ON(mt, mas.index != 1);
3101 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3102 	MT_BUG_ON(mt, entry != NULL);
3103 	MT_BUG_ON(mt, mas.status != ma_none);
3104 
3105 	/* next: start -> none*/
3106 	mas_set(&mas, 10);
3107 	entry = mas_next(&mas, ULONG_MAX);
3108 	MT_BUG_ON(mt, mas.index != 1);
3109 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3110 	MT_BUG_ON(mt, entry != NULL);
3111 	MT_BUG_ON(mt, mas.status != ma_none);
3112 
3113 	/* find: start -> root */
3114 	mas_set(&mas, 0);
3115 	entry = mas_find(&mas, ULONG_MAX);
3116 	MT_BUG_ON(mt, entry != ptr);
3117 	MT_BUG_ON(mt, mas.index != 0);
3118 	MT_BUG_ON(mt, mas.last != 0);
3119 	MT_BUG_ON(mt, mas.status != ma_root);
3120 
3121 	/* find: root -> none */
3122 	entry = mas_find(&mas, ULONG_MAX);
3123 	MT_BUG_ON(mt, entry != NULL);
3124 	MT_BUG_ON(mt, mas.index != 1);
3125 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3126 	MT_BUG_ON(mt, mas.status != ma_none);
3127 
3128 	/* find: none -> none */
3129 	entry = mas_find(&mas, ULONG_MAX);
3130 	MT_BUG_ON(mt, entry != NULL);
3131 	MT_BUG_ON(mt, mas.index != 1);
3132 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3133 	MT_BUG_ON(mt, mas.status != ma_none);
3134 
3135 	/* find: start -> none */
3136 	mas_set(&mas, 10);
3137 	entry = mas_find(&mas, ULONG_MAX);
3138 	MT_BUG_ON(mt, entry != NULL);
3139 	MT_BUG_ON(mt, mas.index != 1);
3140 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3141 	MT_BUG_ON(mt, mas.status != ma_none);
3142 
3143 	/* find_rev: none -> root */
3144 	entry = mas_find_rev(&mas, 0);
3145 	MT_BUG_ON(mt, entry != ptr);
3146 	MT_BUG_ON(mt, mas.index != 0);
3147 	MT_BUG_ON(mt, mas.last != 0);
3148 	MT_BUG_ON(mt, mas.status != ma_root);
3149 
3150 	/* find_rev: start -> root */
3151 	mas_set(&mas, 0);
3152 	entry = mas_find_rev(&mas, 0);
3153 	MT_BUG_ON(mt, entry != ptr);
3154 	MT_BUG_ON(mt, mas.index != 0);
3155 	MT_BUG_ON(mt, mas.last != 0);
3156 	MT_BUG_ON(mt, mas.status != ma_root);
3157 
3158 	/* find_rev: root -> none */
3159 	entry = mas_find_rev(&mas, 0);
3160 	MT_BUG_ON(mt, entry != NULL);
3161 	MT_BUG_ON(mt, mas.index != 0);
3162 	MT_BUG_ON(mt, mas.last != 0);
3163 	MT_BUG_ON(mt, mas.status != ma_none);
3164 
3165 	/* find_rev: none -> none */
3166 	entry = mas_find_rev(&mas, 0);
3167 	MT_BUG_ON(mt, entry != NULL);
3168 	MT_BUG_ON(mt, mas.index != 0);
3169 	MT_BUG_ON(mt, mas.last != 0);
3170 	MT_BUG_ON(mt, mas.status != ma_none);
3171 
3172 	/* find_rev: start -> root */
3173 	mas_set(&mas, 10);
3174 	entry = mas_find_rev(&mas, 0);
3175 	MT_BUG_ON(mt, entry != ptr);
3176 	MT_BUG_ON(mt, mas.index != 0);
3177 	MT_BUG_ON(mt, mas.last != 0);
3178 	MT_BUG_ON(mt, mas.status != ma_root);
3179 
3180 	/* walk: start -> none */
3181 	mas_set(&mas, 10);
3182 	entry = mas_walk(&mas);
3183 	MT_BUG_ON(mt, entry != NULL);
3184 	MT_BUG_ON(mt, mas.index != 1);
3185 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3186 	MT_BUG_ON(mt, mas.status != ma_none);
3187 
3188 	/* walk: pause -> none*/
3189 	mas_set(&mas, 10);
3190 	mas_pause(&mas);
3191 	entry = mas_walk(&mas);
3192 	MT_BUG_ON(mt, entry != NULL);
3193 	MT_BUG_ON(mt, mas.index != 1);
3194 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3195 	MT_BUG_ON(mt, mas.status != ma_none);
3196 
3197 	/* walk: none -> none */
3198 	mas.index = mas.last = 10;
3199 	entry = mas_walk(&mas);
3200 	MT_BUG_ON(mt, entry != NULL);
3201 	MT_BUG_ON(mt, mas.index != 1);
3202 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3203 	MT_BUG_ON(mt, mas.status != ma_none);
3204 
3205 	/* walk: none -> none */
3206 	entry = mas_walk(&mas);
3207 	MT_BUG_ON(mt, entry != NULL);
3208 	MT_BUG_ON(mt, mas.index != 1);
3209 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3210 	MT_BUG_ON(mt, mas.status != ma_none);
3211 
3212 	/* walk: start -> root */
3213 	mas_set(&mas, 0);
3214 	entry = mas_walk(&mas);
3215 	MT_BUG_ON(mt, entry != ptr);
3216 	MT_BUG_ON(mt, mas.index != 0);
3217 	MT_BUG_ON(mt, mas.last != 0);
3218 	MT_BUG_ON(mt, mas.status != ma_root);
3219 
3220 	/* walk: pause -> root */
3221 	mas_set(&mas, 0);
3222 	mas_pause(&mas);
3223 	entry = mas_walk(&mas);
3224 	MT_BUG_ON(mt, entry != ptr);
3225 	MT_BUG_ON(mt, mas.index != 0);
3226 	MT_BUG_ON(mt, mas.last != 0);
3227 	MT_BUG_ON(mt, mas.status != ma_root);
3228 
3229 	/* walk: none -> root */
3230 	mas.status = ma_none;
3231 	entry = mas_walk(&mas);
3232 	MT_BUG_ON(mt, entry != ptr);
3233 	MT_BUG_ON(mt, mas.index != 0);
3234 	MT_BUG_ON(mt, mas.last != 0);
3235 	MT_BUG_ON(mt, mas.status != ma_root);
3236 
3237 	/* walk: root -> root */
3238 	entry = mas_walk(&mas);
3239 	MT_BUG_ON(mt, entry != ptr);
3240 	MT_BUG_ON(mt, mas.index != 0);
3241 	MT_BUG_ON(mt, mas.last != 0);
3242 	MT_BUG_ON(mt, mas.status != ma_root);
3243 
3244 	/* walk: root -> none */
3245 	mas_set(&mas, 10);
3246 	entry = mas_walk(&mas);
3247 	MT_BUG_ON(mt, entry != NULL);
3248 	MT_BUG_ON(mt, mas.index != 1);
3249 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3250 	MT_BUG_ON(mt, mas.status != ma_none);
3251 
3252 	/* walk: none -> root */
3253 	mas.index = mas.last = 0;
3254 	entry = mas_walk(&mas);
3255 	MT_BUG_ON(mt, entry != ptr);
3256 	MT_BUG_ON(mt, mas.index != 0);
3257 	MT_BUG_ON(mt, mas.last != 0);
3258 	MT_BUG_ON(mt, mas.status != ma_root);
3259 
3260 	mas_unlock(&mas);
3261 
3262 	/* Check when there is an actual node */
3263 	mtree_store_range(mt, 0, 0, NULL, GFP_KERNEL);
3264 	mtree_store_range(mt, 0x1000, 0x1500, ptr, GFP_KERNEL);
3265 	mtree_store_range(mt, 0x2000, 0x2500, ptr2, GFP_KERNEL);
3266 	mtree_store_range(mt, 0x3000, 0x3500, ptr3, GFP_KERNEL);
3267 
3268 	mas_lock(&mas);
3269 
3270 	/* next: start ->active */
3271 	mas_set(&mas, 0);
3272 	entry = mas_next(&mas, ULONG_MAX);
3273 	MT_BUG_ON(mt, entry != ptr);
3274 	MT_BUG_ON(mt, mas.index != 0x1000);
3275 	MT_BUG_ON(mt, mas.last != 0x1500);
3276 	MT_BUG_ON(mt, !mas_is_active(&mas));
3277 
3278 	/* next: pause ->active */
3279 	mas_set(&mas, 0);
3280 	mas_pause(&mas);
3281 	entry = mas_next(&mas, ULONG_MAX);
3282 	MT_BUG_ON(mt, entry != ptr);
3283 	MT_BUG_ON(mt, mas.index != 0x1000);
3284 	MT_BUG_ON(mt, mas.last != 0x1500);
3285 	MT_BUG_ON(mt, !mas_is_active(&mas));
3286 
3287 	/* next: none ->active */
3288 	mas.index = mas.last = 0;
3289 	mas.offset = 0;
3290 	mas.status = ma_none;
3291 	entry = mas_next(&mas, ULONG_MAX);
3292 	MT_BUG_ON(mt, entry != ptr);
3293 	MT_BUG_ON(mt, mas.index != 0x1000);
3294 	MT_BUG_ON(mt, mas.last != 0x1500);
3295 	MT_BUG_ON(mt, !mas_is_active(&mas));
3296 
3297 	/* next:active ->active (spanning limit) */
3298 	entry = mas_next(&mas, 0x2100);
3299 	MT_BUG_ON(mt, entry != ptr2);
3300 	MT_BUG_ON(mt, mas.index != 0x2000);
3301 	MT_BUG_ON(mt, mas.last != 0x2500);
3302 	MT_BUG_ON(mt, !mas_is_active(&mas));
3303 
3304 	/* next:active -> overflow (limit reached) beyond data */
3305 	entry = mas_next(&mas, 0x2999);
3306 	MT_BUG_ON(mt, entry != NULL);
3307 	MT_BUG_ON(mt, mas.index != 0x2501);
3308 	MT_BUG_ON(mt, mas.last != 0x2fff);
3309 	MT_BUG_ON(mt, !mas_is_overflow(&mas));
3310 
3311 	/* next:overflow -> active (limit changed) */
3312 	entry = mas_next(&mas, ULONG_MAX);
3313 	MT_BUG_ON(mt, entry != ptr3);
3314 	MT_BUG_ON(mt, mas.index != 0x3000);
3315 	MT_BUG_ON(mt, mas.last != 0x3500);
3316 	MT_BUG_ON(mt, !mas_is_active(&mas));
3317 
3318 	/* next:active ->  overflow (limit reached) */
3319 	entry = mas_next(&mas, ULONG_MAX);
3320 	MT_BUG_ON(mt, entry != NULL);
3321 	MT_BUG_ON(mt, mas.index != 0x3501);
3322 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3323 	MT_BUG_ON(mt, !mas_is_overflow(&mas));
3324 
3325 	/* next:overflow -> overflow  */
3326 	entry = mas_next(&mas, ULONG_MAX);
3327 	MT_BUG_ON(mt, entry != NULL);
3328 	MT_BUG_ON(mt, mas.index != 0x3501);
3329 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3330 	MT_BUG_ON(mt, !mas_is_overflow(&mas));
3331 
3332 	/* prev:overflow -> active  */
3333 	entry = mas_prev(&mas, 0);
3334 	MT_BUG_ON(mt, entry != ptr3);
3335 	MT_BUG_ON(mt, mas.index != 0x3000);
3336 	MT_BUG_ON(mt, mas.last != 0x3500);
3337 	MT_BUG_ON(mt, !mas_is_active(&mas));
3338 
3339 	/* next: none -> active, skip value at location */
3340 	mas_set(&mas, 0);
3341 	entry = mas_next(&mas, ULONG_MAX);
3342 	mas.status = ma_none;
3343 	mas.offset = 0;
3344 	entry = mas_next(&mas, ULONG_MAX);
3345 	MT_BUG_ON(mt, entry != ptr2);
3346 	MT_BUG_ON(mt, mas.index != 0x2000);
3347 	MT_BUG_ON(mt, mas.last != 0x2500);
3348 	MT_BUG_ON(mt, !mas_is_active(&mas));
3349 
3350 	/* prev:active ->active */
3351 	entry = mas_prev(&mas, 0);
3352 	MT_BUG_ON(mt, entry != ptr);
3353 	MT_BUG_ON(mt, mas.index != 0x1000);
3354 	MT_BUG_ON(mt, mas.last != 0x1500);
3355 	MT_BUG_ON(mt, !mas_is_active(&mas));
3356 
3357 	/* prev:active -> underflow (span limit) */
3358 	mas_next(&mas, ULONG_MAX);
3359 	entry = mas_prev(&mas, 0x1200);
3360 	MT_BUG_ON(mt, entry != ptr);
3361 	MT_BUG_ON(mt, mas.index != 0x1000);
3362 	MT_BUG_ON(mt, mas.last != 0x1500);
3363 	MT_BUG_ON(mt, !mas_is_active(&mas)); /* spanning limit */
3364 	entry = mas_prev(&mas, 0x1200); /* underflow */
3365 	MT_BUG_ON(mt, entry != NULL);
3366 	MT_BUG_ON(mt, mas.index != 0x1000);
3367 	MT_BUG_ON(mt, mas.last != 0x1500);
3368 	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3369 
3370 	/* prev:underflow -> underflow (lower limit) spanning end range */
3371 	entry = mas_prev(&mas, 0x0100);
3372 	MT_BUG_ON(mt, entry != NULL);
3373 	MT_BUG_ON(mt, mas.index != 0);
3374 	MT_BUG_ON(mt, mas.last != 0x0FFF);
3375 	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3376 
3377 	/* prev:underflow -> underflow */
3378 	entry = mas_prev(&mas, 0);
3379 	MT_BUG_ON(mt, entry != NULL);
3380 	MT_BUG_ON(mt, mas.index != 0);
3381 	MT_BUG_ON(mt, mas.last != 0x0FFF);
3382 	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3383 
3384 	/* prev:underflow -> underflow */
3385 	entry = mas_prev(&mas, 0);
3386 	MT_BUG_ON(mt, entry != NULL);
3387 	MT_BUG_ON(mt, mas.index != 0);
3388 	MT_BUG_ON(mt, mas.last != 0x0FFF);
3389 	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3390 
3391 	/* next:underflow -> active */
3392 	entry = mas_next(&mas, ULONG_MAX);
3393 	MT_BUG_ON(mt, entry != ptr);
3394 	MT_BUG_ON(mt, mas.index != 0x1000);
3395 	MT_BUG_ON(mt, mas.last != 0x1500);
3396 	MT_BUG_ON(mt, !mas_is_active(&mas));
3397 
3398 	/* prev:first value -> underflow */
3399 	entry = mas_prev(&mas, 0x1000);
3400 	MT_BUG_ON(mt, entry != NULL);
3401 	MT_BUG_ON(mt, mas.index != 0x1000);
3402 	MT_BUG_ON(mt, mas.last != 0x1500);
3403 	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3404 
3405 	/* find:underflow -> first value */
3406 	entry = mas_find(&mas, ULONG_MAX);
3407 	MT_BUG_ON(mt, entry != ptr);
3408 	MT_BUG_ON(mt, mas.index != 0x1000);
3409 	MT_BUG_ON(mt, mas.last != 0x1500);
3410 	MT_BUG_ON(mt, !mas_is_active(&mas));
3411 
3412 	/* prev: pause ->active */
3413 	mas_set(&mas, 0x3600);
3414 	entry = mas_prev(&mas, 0);
3415 	MT_BUG_ON(mt, entry != ptr3);
3416 	mas_pause(&mas);
3417 	entry = mas_prev(&mas, 0);
3418 	MT_BUG_ON(mt, entry != ptr2);
3419 	MT_BUG_ON(mt, mas.index != 0x2000);
3420 	MT_BUG_ON(mt, mas.last != 0x2500);
3421 	MT_BUG_ON(mt, !mas_is_active(&mas));
3422 
3423 	/* prev:active -> underflow spanning min */
3424 	entry = mas_prev(&mas, 0x1600);
3425 	MT_BUG_ON(mt, entry != NULL);
3426 	MT_BUG_ON(mt, mas.index != 0x1501);
3427 	MT_BUG_ON(mt, mas.last != 0x1FFF);
3428 	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3429 
3430 	/* prev: active ->active, continue */
3431 	entry = mas_prev(&mas, 0);
3432 	MT_BUG_ON(mt, entry != ptr);
3433 	MT_BUG_ON(mt, mas.index != 0x1000);
3434 	MT_BUG_ON(mt, mas.last != 0x1500);
3435 	MT_BUG_ON(mt, !mas_is_active(&mas));
3436 
3437 	/* find: start ->active */
3438 	mas_set(&mas, 0);
3439 	entry = mas_find(&mas, ULONG_MAX);
3440 	MT_BUG_ON(mt, entry != ptr);
3441 	MT_BUG_ON(mt, mas.index != 0x1000);
3442 	MT_BUG_ON(mt, mas.last != 0x1500);
3443 	MT_BUG_ON(mt, !mas_is_active(&mas));
3444 
3445 	/* find: pause ->active */
3446 	mas_set(&mas, 0);
3447 	mas_pause(&mas);
3448 	entry = mas_find(&mas, ULONG_MAX);
3449 	MT_BUG_ON(mt, entry != ptr);
3450 	MT_BUG_ON(mt, mas.index != 0x1000);
3451 	MT_BUG_ON(mt, mas.last != 0x1500);
3452 	MT_BUG_ON(mt, !mas_is_active(&mas));
3453 
3454 	/* find: start ->active on value */;
3455 	mas_set(&mas, 1200);
3456 	entry = mas_find(&mas, ULONG_MAX);
3457 	MT_BUG_ON(mt, entry != ptr);
3458 	MT_BUG_ON(mt, mas.index != 0x1000);
3459 	MT_BUG_ON(mt, mas.last != 0x1500);
3460 	MT_BUG_ON(mt, !mas_is_active(&mas));
3461 
3462 	/* find:active ->active */
3463 	entry = mas_find(&mas, ULONG_MAX);
3464 	MT_BUG_ON(mt, entry != ptr2);
3465 	MT_BUG_ON(mt, mas.index != 0x2000);
3466 	MT_BUG_ON(mt, mas.last != 0x2500);
3467 	MT_BUG_ON(mt, !mas_is_active(&mas));
3468 
3469 
3470 	/* find:active -> active (NULL)*/
3471 	entry = mas_find(&mas, 0x2700);
3472 	MT_BUG_ON(mt, entry != NULL);
3473 	MT_BUG_ON(mt, mas.index != 0x2501);
3474 	MT_BUG_ON(mt, mas.last != 0x2FFF);
3475 	MAS_BUG_ON(&mas, !mas_is_active(&mas));
3476 
3477 	/* find: overflow ->active */
3478 	entry = mas_find(&mas, 0x5000);
3479 	MT_BUG_ON(mt, entry != ptr3);
3480 	MT_BUG_ON(mt, mas.index != 0x3000);
3481 	MT_BUG_ON(mt, mas.last != 0x3500);
3482 	MT_BUG_ON(mt, !mas_is_active(&mas));
3483 
3484 	/* find:active -> active (NULL) end*/
3485 	entry = mas_find(&mas, ULONG_MAX);
3486 	MT_BUG_ON(mt, entry != NULL);
3487 	MT_BUG_ON(mt, mas.index != 0x3501);
3488 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3489 	MAS_BUG_ON(&mas, !mas_is_active(&mas));
3490 
3491 	/* find_rev: active (END) ->active */
3492 	entry = mas_find_rev(&mas, 0);
3493 	MT_BUG_ON(mt, entry != ptr3);
3494 	MT_BUG_ON(mt, mas.index != 0x3000);
3495 	MT_BUG_ON(mt, mas.last != 0x3500);
3496 	MT_BUG_ON(mt, !mas_is_active(&mas));
3497 
3498 	/* find_rev:active ->active */
3499 	entry = mas_find_rev(&mas, 0);
3500 	MT_BUG_ON(mt, entry != ptr2);
3501 	MT_BUG_ON(mt, mas.index != 0x2000);
3502 	MT_BUG_ON(mt, mas.last != 0x2500);
3503 	MT_BUG_ON(mt, !mas_is_active(&mas));
3504 
3505 	/* find_rev: pause ->active */
3506 	mas_pause(&mas);
3507 	entry = mas_find_rev(&mas, 0);
3508 	MT_BUG_ON(mt, entry != ptr);
3509 	MT_BUG_ON(mt, mas.index != 0x1000);
3510 	MT_BUG_ON(mt, mas.last != 0x1500);
3511 	MT_BUG_ON(mt, !mas_is_active(&mas));
3512 
3513 	/* find_rev:active -> underflow */
3514 	entry = mas_find_rev(&mas, 0);
3515 	MT_BUG_ON(mt, entry != NULL);
3516 	MT_BUG_ON(mt, mas.index != 0);
3517 	MT_BUG_ON(mt, mas.last != 0x0FFF);
3518 	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3519 
3520 	/* find_rev: start ->active */
3521 	mas_set(&mas, 0x1200);
3522 	entry = mas_find_rev(&mas, 0);
3523 	MT_BUG_ON(mt, entry != ptr);
3524 	MT_BUG_ON(mt, mas.index != 0x1000);
3525 	MT_BUG_ON(mt, mas.last != 0x1500);
3526 	MT_BUG_ON(mt, !mas_is_active(&mas));
3527 
3528 	/* mas_walk start ->active */
3529 	mas_set(&mas, 0x1200);
3530 	entry = mas_walk(&mas);
3531 	MT_BUG_ON(mt, entry != ptr);
3532 	MT_BUG_ON(mt, mas.index != 0x1000);
3533 	MT_BUG_ON(mt, mas.last != 0x1500);
3534 	MT_BUG_ON(mt, !mas_is_active(&mas));
3535 
3536 	/* mas_walk start ->active */
3537 	mas_set(&mas, 0x1600);
3538 	entry = mas_walk(&mas);
3539 	MT_BUG_ON(mt, entry != NULL);
3540 	MT_BUG_ON(mt, mas.index != 0x1501);
3541 	MT_BUG_ON(mt, mas.last != 0x1fff);
3542 	MT_BUG_ON(mt, !mas_is_active(&mas));
3543 
3544 	/* mas_walk pause ->active */
3545 	mas_set(&mas, 0x1200);
3546 	mas_pause(&mas);
3547 	entry = mas_walk(&mas);
3548 	MT_BUG_ON(mt, entry != ptr);
3549 	MT_BUG_ON(mt, mas.index != 0x1000);
3550 	MT_BUG_ON(mt, mas.last != 0x1500);
3551 	MT_BUG_ON(mt, !mas_is_active(&mas));
3552 
3553 	/* mas_walk pause -> active */
3554 	mas_set(&mas, 0x1600);
3555 	mas_pause(&mas);
3556 	entry = mas_walk(&mas);
3557 	MT_BUG_ON(mt, entry != NULL);
3558 	MT_BUG_ON(mt, mas.index != 0x1501);
3559 	MT_BUG_ON(mt, mas.last != 0x1fff);
3560 	MT_BUG_ON(mt, !mas_is_active(&mas));
3561 
3562 	/* mas_walk none -> active */
3563 	mas_set(&mas, 0x1200);
3564 	mas.status = ma_none;
3565 	entry = mas_walk(&mas);
3566 	MT_BUG_ON(mt, entry != ptr);
3567 	MT_BUG_ON(mt, mas.index != 0x1000);
3568 	MT_BUG_ON(mt, mas.last != 0x1500);
3569 	MT_BUG_ON(mt, !mas_is_active(&mas));
3570 
3571 	/* mas_walk none -> active */
3572 	mas_set(&mas, 0x1600);
3573 	mas.status = ma_none;
3574 	entry = mas_walk(&mas);
3575 	MT_BUG_ON(mt, entry != NULL);
3576 	MT_BUG_ON(mt, mas.index != 0x1501);
3577 	MT_BUG_ON(mt, mas.last != 0x1fff);
3578 	MT_BUG_ON(mt, !mas_is_active(&mas));
3579 
3580 	/* mas_walk active -> active */
3581 	mas.index = 0x1200;
3582 	mas.last = 0x1200;
3583 	mas.offset = 0;
3584 	entry = mas_walk(&mas);
3585 	MT_BUG_ON(mt, entry != ptr);
3586 	MT_BUG_ON(mt, mas.index != 0x1000);
3587 	MT_BUG_ON(mt, mas.last != 0x1500);
3588 	MT_BUG_ON(mt, !mas_is_active(&mas));
3589 
3590 	/* mas_walk active -> active */
3591 	mas.index = 0x1600;
3592 	mas.last = 0x1600;
3593 	entry = mas_walk(&mas);
3594 	MT_BUG_ON(mt, entry != NULL);
3595 	MT_BUG_ON(mt, mas.index != 0x1501);
3596 	MT_BUG_ON(mt, mas.last != 0x1fff);
3597 	MT_BUG_ON(mt, !mas_is_active(&mas));
3598 
3599 	mas_unlock(&mas);
3600 }
3601 
alloc_cyclic_testing(struct maple_tree * mt)3602 static noinline void __init alloc_cyclic_testing(struct maple_tree *mt)
3603 {
3604 	unsigned long location;
3605 	unsigned long next;
3606 	int ret = 0;
3607 	MA_STATE(mas, mt, 0, 0);
3608 
3609 	next = 0;
3610 	mtree_lock(mt);
3611 	for (int i = 0; i < 100; i++) {
3612 		mas_alloc_cyclic(&mas, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3613 		MAS_BUG_ON(&mas, i != location - 2);
3614 		MAS_BUG_ON(&mas, mas.index != location);
3615 		MAS_BUG_ON(&mas, mas.last != location);
3616 		MAS_BUG_ON(&mas, i != next - 3);
3617 	}
3618 
3619 	mtree_unlock(mt);
3620 	mtree_destroy(mt);
3621 	next = 0;
3622 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
3623 	for (int i = 0; i < 100; i++) {
3624 		mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3625 		MT_BUG_ON(mt, i != location - 2);
3626 		MT_BUG_ON(mt, i != next - 3);
3627 		MT_BUG_ON(mt, mtree_load(mt, location) != mt);
3628 	}
3629 
3630 	mtree_destroy(mt);
3631 	/* Overflow test */
3632 	next = ULONG_MAX - 1;
3633 	ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3634 	MT_BUG_ON(mt, ret != 0);
3635 	ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3636 	MT_BUG_ON(mt, ret != 0);
3637 	ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3638 	MT_BUG_ON(mt, ret != 1);
3639 }
3640 
3641 static DEFINE_MTREE(tree);
maple_tree_seed(void)3642 static int __init maple_tree_seed(void)
3643 {
3644 	unsigned long set[] = { 5015, 5014, 5017, 25, 1000,
3645 				1001, 1002, 1003, 1005, 0,
3646 				5003, 5002};
3647 	void *ptr = &set;
3648 
3649 	pr_info("\nTEST STARTING\n\n");
3650 
3651 #if defined(BENCH_SLOT_STORE)
3652 #define BENCH
3653 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3654 	bench_slot_store(&tree);
3655 	mtree_destroy(&tree);
3656 	goto skip;
3657 #endif
3658 #if defined(BENCH_NODE_STORE)
3659 #define BENCH
3660 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3661 	bench_node_store(&tree);
3662 	mtree_destroy(&tree);
3663 	goto skip;
3664 #endif
3665 #if defined(BENCH_AWALK)
3666 #define BENCH
3667 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3668 	bench_awalk(&tree);
3669 	mtree_destroy(&tree);
3670 	goto skip;
3671 #endif
3672 #if defined(BENCH_WALK)
3673 #define BENCH
3674 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3675 	bench_walk(&tree);
3676 	mtree_destroy(&tree);
3677 	goto skip;
3678 #endif
3679 #if defined(BENCH_LOAD)
3680 #define BENCH
3681 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3682 	bench_load(&tree);
3683 	mtree_destroy(&tree);
3684 	goto skip;
3685 #endif
3686 #if defined(BENCH_FORK)
3687 #define BENCH
3688 	bench_forking();
3689 	goto skip;
3690 #endif
3691 #if defined(BENCH_MT_FOR_EACH)
3692 #define BENCH
3693 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3694 	bench_mt_for_each(&tree);
3695 	mtree_destroy(&tree);
3696 	goto skip;
3697 #endif
3698 #if defined(BENCH_MAS_FOR_EACH)
3699 #define BENCH
3700 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3701 	bench_mas_for_each(&tree);
3702 	mtree_destroy(&tree);
3703 	goto skip;
3704 #endif
3705 #if defined(BENCH_MAS_PREV)
3706 #define BENCH
3707 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3708 	bench_mas_prev(&tree);
3709 	mtree_destroy(&tree);
3710 	goto skip;
3711 #endif
3712 
3713 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3714 	check_root_expand(&tree);
3715 	mtree_destroy(&tree);
3716 
3717 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3718 	check_iteration(&tree);
3719 	mtree_destroy(&tree);
3720 
3721 	check_forking();
3722 
3723 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3724 	check_mas_store_gfp(&tree);
3725 	mtree_destroy(&tree);
3726 
3727 	/* Test ranges (store and insert) */
3728 	mt_init_flags(&tree, 0);
3729 	check_ranges(&tree);
3730 	mtree_destroy(&tree);
3731 
3732 #if defined(CONFIG_64BIT)
3733 	/* These tests have ranges outside of 4GB */
3734 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3735 	check_alloc_range(&tree);
3736 	mtree_destroy(&tree);
3737 
3738 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3739 	check_alloc_rev_range(&tree);
3740 	mtree_destroy(&tree);
3741 #endif
3742 
3743 	mt_init_flags(&tree, 0);
3744 
3745 	check_load(&tree, set[0], NULL);       /* See if 5015 -> NULL */
3746 
3747 	check_insert(&tree, set[9], &tree);     /* Insert 0 */
3748 	check_load(&tree, set[9], &tree);       /* See if 0 -> &tree */
3749 	check_load(&tree, set[0], NULL);       /* See if 5015 -> NULL */
3750 
3751 	check_insert(&tree, set[10], ptr);      /* Insert 5003 */
3752 	check_load(&tree, set[9], &tree);       /* See if 0 -> &tree */
3753 	check_load(&tree, set[11], NULL);       /* See if 5002 -> NULL */
3754 	check_load(&tree, set[10], ptr);       /* See if 5003 -> ptr */
3755 
3756 	/* Clear out the tree */
3757 	mtree_destroy(&tree);
3758 
3759 	/* Try to insert, insert a dup, and load back what was inserted. */
3760 	mt_init_flags(&tree, 0);
3761 	check_insert(&tree, set[0], &tree);     /* Insert 5015 */
3762 	check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
3763 	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
3764 
3765 	/*
3766 	 * Second set of tests try to load a value that doesn't exist, inserts
3767 	 * a second value, then loads the value again
3768 	 */
3769 	check_load(&tree, set[1], NULL);        /* See if 5014 -> NULL */
3770 	check_insert(&tree, set[1], ptr);       /* insert 5014 -> ptr */
3771 	check_load(&tree, set[1], ptr);         /* See if 5014 -> ptr */
3772 	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
3773 	/*
3774 	 * Tree currently contains:
3775 	 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
3776 	 */
3777 	check_insert(&tree, set[6], ptr);       /* insert 1002 -> ptr */
3778 	check_insert(&tree, set[7], &tree);       /* insert 1003 -> &tree */
3779 
3780 	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
3781 	check_load(&tree, set[1], ptr);         /* See if 5014 -> ptr */
3782 	check_load(&tree, set[6], ptr);         /* See if 1002 -> ptr */
3783 	check_load(&tree, set[7], &tree);       /* 1003 = &tree ? */
3784 
3785 	/* Clear out tree */
3786 	mtree_destroy(&tree);
3787 
3788 	mt_init_flags(&tree, 0);
3789 	/* Test inserting into a NULL hole. */
3790 	check_insert(&tree, set[5], ptr);       /* insert 1001 -> ptr */
3791 	check_insert(&tree, set[7], &tree);       /* insert 1003 -> &tree */
3792 	check_insert(&tree, set[6], ptr);       /* insert 1002 -> ptr */
3793 	check_load(&tree, set[5], ptr);         /* See if 1001 -> ptr */
3794 	check_load(&tree, set[6], ptr);         /* See if 1002 -> ptr */
3795 	check_load(&tree, set[7], &tree);       /* See if 1003 -> &tree */
3796 
3797 	/* Clear out the tree */
3798 	mtree_destroy(&tree);
3799 
3800 	mt_init_flags(&tree, 0);
3801 	/*
3802 	 *       set[] = {5015, 5014, 5017, 25, 1000,
3803 	 *                1001, 1002, 1003, 1005, 0,
3804 	 *                5003, 5002};
3805 	 */
3806 
3807 	check_insert(&tree, set[0], ptr); /* 5015 */
3808 	check_insert(&tree, set[1], &tree); /* 5014 */
3809 	check_insert(&tree, set[2], ptr); /* 5017 */
3810 	check_insert(&tree, set[3], &tree); /* 25 */
3811 	check_load(&tree, set[0], ptr);
3812 	check_load(&tree, set[1], &tree);
3813 	check_load(&tree, set[2], ptr);
3814 	check_load(&tree, set[3], &tree);
3815 	check_insert(&tree, set[4], ptr); /* 1000 < Should split. */
3816 	check_load(&tree, set[0], ptr);
3817 	check_load(&tree, set[1], &tree);
3818 	check_load(&tree, set[2], ptr);
3819 	check_load(&tree, set[3], &tree); /*25 */
3820 	check_load(&tree, set[4], ptr);
3821 	check_insert(&tree, set[5], &tree); /* 1001 */
3822 	check_load(&tree, set[0], ptr);
3823 	check_load(&tree, set[1], &tree);
3824 	check_load(&tree, set[2], ptr);
3825 	check_load(&tree, set[3], &tree);
3826 	check_load(&tree, set[4], ptr);
3827 	check_load(&tree, set[5], &tree);
3828 	check_insert(&tree, set[6], ptr);
3829 	check_load(&tree, set[0], ptr);
3830 	check_load(&tree, set[1], &tree);
3831 	check_load(&tree, set[2], ptr);
3832 	check_load(&tree, set[3], &tree);
3833 	check_load(&tree, set[4], ptr);
3834 	check_load(&tree, set[5], &tree);
3835 	check_load(&tree, set[6], ptr);
3836 	check_insert(&tree, set[7], &tree);
3837 	check_load(&tree, set[0], ptr);
3838 	check_insert(&tree, set[8], ptr);
3839 
3840 	check_insert(&tree, set[9], &tree);
3841 
3842 	check_load(&tree, set[0], ptr);
3843 	check_load(&tree, set[1], &tree);
3844 	check_load(&tree, set[2], ptr);
3845 	check_load(&tree, set[3], &tree);
3846 	check_load(&tree, set[4], ptr);
3847 	check_load(&tree, set[5], &tree);
3848 	check_load(&tree, set[6], ptr);
3849 	check_load(&tree, set[9], &tree);
3850 	mtree_destroy(&tree);
3851 
3852 	mt_init_flags(&tree, 0);
3853 	check_seq(&tree, 16, false);
3854 	mtree_destroy(&tree);
3855 
3856 	mt_init_flags(&tree, 0);
3857 	check_seq(&tree, 1000, true);
3858 	mtree_destroy(&tree);
3859 
3860 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3861 	check_rev_seq(&tree, 1000, true);
3862 	mtree_destroy(&tree);
3863 
3864 	check_lower_bound_split(&tree);
3865 	check_upper_bound_split(&tree);
3866 	check_mid_split(&tree);
3867 
3868 	mt_init_flags(&tree, 0);
3869 	check_next_entry(&tree);
3870 	check_find(&tree);
3871 	check_find_2(&tree);
3872 	mtree_destroy(&tree);
3873 
3874 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3875 	check_prev_entry(&tree);
3876 	mtree_destroy(&tree);
3877 
3878 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3879 	check_gap_combining(&tree);
3880 	mtree_destroy(&tree);
3881 
3882 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3883 	check_node_overwrite(&tree);
3884 	mtree_destroy(&tree);
3885 
3886 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3887 	next_prev_test(&tree);
3888 	mtree_destroy(&tree);
3889 
3890 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3891 	check_spanning_relatives(&tree);
3892 	mtree_destroy(&tree);
3893 
3894 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3895 	check_rev_find(&tree);
3896 	mtree_destroy(&tree);
3897 
3898 	mt_init_flags(&tree, 0);
3899 	check_fuzzer(&tree);
3900 	mtree_destroy(&tree);
3901 
3902 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3903 	check_dup(&tree);
3904 	mtree_destroy(&tree);
3905 
3906 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3907 	check_bnode_min_spanning(&tree);
3908 	mtree_destroy(&tree);
3909 
3910 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3911 	check_empty_area_window(&tree);
3912 	mtree_destroy(&tree);
3913 
3914 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3915 	check_empty_area_fill(&tree);
3916 	mtree_destroy(&tree);
3917 
3918 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3919 	check_state_handling(&tree);
3920 	mtree_destroy(&tree);
3921 
3922 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3923 	alloc_cyclic_testing(&tree);
3924 	mtree_destroy(&tree);
3925 
3926 
3927 #if defined(BENCH)
3928 skip:
3929 #endif
3930 	rcu_barrier();
3931 	pr_info("maple_tree: %u of %u tests passed\n",
3932 			atomic_read(&maple_tree_tests_passed),
3933 			atomic_read(&maple_tree_tests_run));
3934 	if (atomic_read(&maple_tree_tests_run) ==
3935 	    atomic_read(&maple_tree_tests_passed))
3936 		return 0;
3937 
3938 	return -EINVAL;
3939 }
3940 
maple_tree_harvest(void)3941 static void __exit maple_tree_harvest(void)
3942 {
3943 
3944 }
3945 
3946 module_init(maple_tree_seed);
3947 module_exit(maple_tree_harvest);
3948 MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>");
3949 MODULE_LICENSE("GPL");
3950