]> git.ipfire.org Git - thirdparty/linux.git/blame - lib/test_maple_tree.c
btrfs: use u64 for buffer sizes in the tree search ioctls
[thirdparty/linux.git] / lib / test_maple_tree.c
CommitLineData
e15e06a8
LH
1// SPDX-License-Identifier: GPL-2.0+
2/*
3 * test_maple_tree.c: Test the maple tree API
120b1162 4 * Copyright (c) 2018-2022 Oracle Corporation
e15e06a8 5 * Author: Liam R. Howlett <Liam.Howlett@Oracle.com>
120b1162
LH
6 *
7 * Any tests that only require the interface of the tree.
e15e06a8
LH
8 */
9
10#include <linux/maple_tree.h>
11#include <linux/module.h>
e15e06a8
LH
12
13#define MTREE_ALLOC_MAX 0x2000000000000Ul
e15e06a8 14#define CONFIG_MAPLE_SEARCH
120b1162
LH
15#define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
16
a5199577
LH
17#ifndef CONFIG_DEBUG_MAPLE_TREE
18#define mt_dump(mt, fmt) do {} while (0)
19#define mt_validate(mt) do {} while (0)
20#define mt_cache_shrink() do {} while (0)
21#define mas_dump(mas) do {} while (0)
22#define mas_wr_dump(mas) do {} while (0)
23atomic_t maple_tree_tests_run;
24atomic_t maple_tree_tests_passed;
25#undef MT_BUG_ON
26
27#define MT_BUG_ON(__tree, __x) do { \
28 atomic_inc(&maple_tree_tests_run); \
29 if (__x) { \
30 pr_info("BUG at %s:%d (%u)\n", \
31 __func__, __LINE__, __x); \
32 pr_info("Pass: %u Run:%u\n", \
33 atomic_read(&maple_tree_tests_passed), \
34 atomic_read(&maple_tree_tests_run)); \
35 } else { \
36 atomic_inc(&maple_tree_tests_passed); \
37 } \
38} while (0)
39#endif
40
e15e06a8
LH
41/* #define BENCH_SLOT_STORE */
42/* #define BENCH_NODE_STORE */
43/* #define BENCH_AWALK */
44/* #define BENCH_WALK */
45/* #define BENCH_MT_FOR_EACH */
46/* #define BENCH_FORK */
361c678b 47/* #define BENCH_MAS_FOR_EACH */
8c314f3b 48/* #define BENCH_MAS_PREV */
120b1162
LH
49
50#ifdef __KERNEL__
51#define mt_set_non_kernel(x) do {} while (0)
52#define mt_zero_nr_tallocated(x) do {} while (0)
53#else
54#define cond_resched() do {} while (0)
55#endif
eaf9790d
LH
56static int __init mtree_insert_index(struct maple_tree *mt,
57 unsigned long index, gfp_t gfp)
e15e06a8
LH
58{
59 return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp);
60}
61
eaf9790d 62static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index)
e15e06a8
LH
63{
64 MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX));
65 MT_BUG_ON(mt, mtree_load(mt, index) != NULL);
66}
67
eaf9790d 68static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index,
e15e06a8
LH
69 void *ptr)
70{
71 return mtree_insert(mt, index, ptr, GFP_KERNEL);
72}
73
eaf9790d
LH
74static int __init mtree_test_store_range(struct maple_tree *mt,
75 unsigned long start, unsigned long end, void *ptr)
e15e06a8
LH
76{
77 return mtree_store_range(mt, start, end, ptr, GFP_KERNEL);
78}
79
eaf9790d 80static int __init mtree_test_store(struct maple_tree *mt, unsigned long start,
e15e06a8
LH
81 void *ptr)
82{
83 return mtree_test_store_range(mt, start, start, ptr);
84}
85
eaf9790d
LH
86static int __init mtree_test_insert_range(struct maple_tree *mt,
87 unsigned long start, unsigned long end, void *ptr)
e15e06a8
LH
88{
89 return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL);
90}
91
eaf9790d 92static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index)
e15e06a8
LH
93{
94 return mtree_load(mt, index);
95}
96
eaf9790d 97static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index)
e15e06a8
LH
98{
99 return mtree_erase(mt, index);
100}
101
120b1162 102#if defined(CONFIG_64BIT)
eaf9790d 103static noinline void __init check_mtree_alloc_range(struct maple_tree *mt,
e15e06a8
LH
104 unsigned long start, unsigned long end, unsigned long size,
105 unsigned long expected, int eret, void *ptr)
106{
107
108 unsigned long result = expected + 1;
109 int ret;
110
111 ret = mtree_alloc_range(mt, &result, ptr, size, start, end,
112 GFP_KERNEL);
113 MT_BUG_ON(mt, ret != eret);
114 if (ret)
115 return;
116
117 MT_BUG_ON(mt, result != expected);
118}
119
eaf9790d 120static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt,
e15e06a8
LH
121 unsigned long start, unsigned long end, unsigned long size,
122 unsigned long expected, int eret, void *ptr)
123{
124
125 unsigned long result = expected + 1;
126 int ret;
127
ba997212 128 ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end,
e15e06a8
LH
129 GFP_KERNEL);
130 MT_BUG_ON(mt, ret != eret);
131 if (ret)
132 return;
133
134 MT_BUG_ON(mt, result != expected);
135}
120b1162 136#endif
e15e06a8 137
eaf9790d
LH
138static noinline void __init check_load(struct maple_tree *mt,
139 unsigned long index, void *ptr)
e15e06a8
LH
140{
141 void *ret = mtree_test_load(mt, index);
142
143 if (ret != ptr)
144 pr_err("Load %lu returned %p expect %p\n", index, ret, ptr);
145 MT_BUG_ON(mt, ret != ptr);
146}
147
eaf9790d 148static noinline void __init check_store_range(struct maple_tree *mt,
e15e06a8
LH
149 unsigned long start, unsigned long end, void *ptr, int expected)
150{
151 int ret = -EINVAL;
152 unsigned long i;
153
154 ret = mtree_test_store_range(mt, start, end, ptr);
155 MT_BUG_ON(mt, ret != expected);
156
157 if (ret)
158 return;
159
160 for (i = start; i <= end; i++)
161 check_load(mt, i, ptr);
162}
163
eaf9790d 164static noinline void __init check_insert_range(struct maple_tree *mt,
e15e06a8
LH
165 unsigned long start, unsigned long end, void *ptr, int expected)
166{
167 int ret = -EINVAL;
168 unsigned long i;
169
170 ret = mtree_test_insert_range(mt, start, end, ptr);
171 MT_BUG_ON(mt, ret != expected);
172
173 if (ret)
174 return;
175
176 for (i = start; i <= end; i++)
177 check_load(mt, i, ptr);
178}
179
eaf9790d
LH
180static noinline void __init check_insert(struct maple_tree *mt,
181 unsigned long index, void *ptr)
e15e06a8
LH
182{
183 int ret = -EINVAL;
184
185 ret = mtree_test_insert(mt, index, ptr);
186 MT_BUG_ON(mt, ret != 0);
187}
188
eaf9790d 189static noinline void __init check_dup_insert(struct maple_tree *mt,
e15e06a8
LH
190 unsigned long index, void *ptr)
191{
192 int ret = -EINVAL;
193
194 ret = mtree_test_insert(mt, index, ptr);
195 MT_BUG_ON(mt, ret != -EEXIST);
196}
197
198
eaf9790d
LH
199static noinline void __init check_index_load(struct maple_tree *mt,
200 unsigned long index)
e15e06a8
LH
201{
202 return check_load(mt, index, xa_mk_value(index & LONG_MAX));
203}
204
eaf9790d 205static inline __init int not_empty(struct maple_node *node)
e15e06a8
LH
206{
207 int i;
208
209 if (node->parent)
210 return 1;
211
212 for (i = 0; i < ARRAY_SIZE(node->slot); i++)
213 if (node->slot[i])
214 return 1;
215
216 return 0;
217}
218
e15e06a8 219
eaf9790d
LH
220static noinline void __init check_rev_seq(struct maple_tree *mt,
221 unsigned long max, bool verbose)
e15e06a8
LH
222{
223 unsigned long i = max, j;
224
225 MT_BUG_ON(mt, !mtree_empty(mt));
226
227 mt_zero_nr_tallocated();
228 while (i) {
229 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
230 for (j = i; j <= max; j++)
231 check_index_load(mt, j);
232
233 check_load(mt, i - 1, NULL);
234 mt_set_in_rcu(mt);
235 MT_BUG_ON(mt, !mt_height(mt));
236 mt_clear_in_rcu(mt);
237 MT_BUG_ON(mt, !mt_height(mt));
238 i--;
239 }
240 check_load(mt, max + 1, NULL);
241
120b1162 242#ifndef __KERNEL__
e15e06a8
LH
243 if (verbose) {
244 rcu_barrier();
89f499f3 245 mt_dump(mt, mt_dump_dec);
e15e06a8
LH
246 pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
247 __func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(),
248 mt_nr_tallocated());
249 }
120b1162 250#endif
e15e06a8
LH
251}
252
eaf9790d 253static noinline void __init check_seq(struct maple_tree *mt, unsigned long max,
e15e06a8
LH
254 bool verbose)
255{
256 unsigned long i, j;
257
258 MT_BUG_ON(mt, !mtree_empty(mt));
259
260 mt_zero_nr_tallocated();
261 for (i = 0; i <= max; i++) {
262 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
263 for (j = 0; j <= i; j++)
264 check_index_load(mt, j);
265
266 if (i)
267 MT_BUG_ON(mt, !mt_height(mt));
268 check_load(mt, i + 1, NULL);
269 }
120b1162
LH
270
271#ifndef __KERNEL__
e15e06a8
LH
272 if (verbose) {
273 rcu_barrier();
89f499f3 274 mt_dump(mt, mt_dump_dec);
e15e06a8
LH
275 pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
276 max, mt_get_alloc_size()/1024, mt_nr_allocated(),
277 mt_nr_tallocated());
278 }
120b1162 279#endif
e15e06a8
LH
280}
281
eaf9790d 282static noinline void __init check_lb_not_empty(struct maple_tree *mt)
e15e06a8
LH
283{
284 unsigned long i, j;
285 unsigned long huge = 4000UL * 1000 * 1000;
286
287
288 i = huge;
289 while (i > 4096) {
290 check_insert(mt, i, (void *) i);
291 for (j = huge; j >= i; j /= 2) {
292 check_load(mt, j-1, NULL);
293 check_load(mt, j, (void *) j);
294 check_load(mt, j+1, NULL);
295 }
296 i /= 2;
297 }
298 mtree_destroy(mt);
299}
300
eaf9790d 301static noinline void __init check_lower_bound_split(struct maple_tree *mt)
e15e06a8
LH
302{
303 MT_BUG_ON(mt, !mtree_empty(mt));
304 check_lb_not_empty(mt);
305}
306
eaf9790d 307static noinline void __init check_upper_bound_split(struct maple_tree *mt)
e15e06a8
LH
308{
309 unsigned long i, j;
120b1162 310 unsigned long huge;
e15e06a8
LH
311
312 MT_BUG_ON(mt, !mtree_empty(mt));
313
120b1162
LH
314 if (MAPLE_32BIT)
315 huge = 2147483647UL;
316 else
317 huge = 4000UL * 1000 * 1000;
318
e15e06a8
LH
319 i = 4096;
320 while (i < huge) {
321 check_insert(mt, i, (void *) i);
322 for (j = i; j >= huge; j *= 2) {
323 check_load(mt, j-1, NULL);
324 check_load(mt, j, (void *) j);
325 check_load(mt, j+1, NULL);
326 }
327 i *= 2;
328 }
329 mtree_destroy(mt);
330}
331
eaf9790d 332static noinline void __init check_mid_split(struct maple_tree *mt)
e15e06a8
LH
333{
334 unsigned long huge = 8000UL * 1000 * 1000;
335
336 check_insert(mt, huge, (void *) huge);
337 check_insert(mt, 0, xa_mk_value(0));
338 check_lb_not_empty(mt);
339}
340
eaf9790d 341static noinline void __init check_rev_find(struct maple_tree *mt)
e15e06a8
LH
342{
343 int i, nr_entries = 200;
344 void *val;
345 MA_STATE(mas, mt, 0, 0);
346
347 for (i = 0; i <= nr_entries; i++)
348 mtree_store_range(mt, i*10, i*10 + 5,
349 xa_mk_value(i), GFP_KERNEL);
350
120b1162 351 rcu_read_lock();
e15e06a8
LH
352 mas_set(&mas, 1000);
353 val = mas_find_rev(&mas, 1000);
354 MT_BUG_ON(mt, val != xa_mk_value(100));
355 val = mas_find_rev(&mas, 1000);
356 MT_BUG_ON(mt, val != NULL);
357
358 mas_set(&mas, 999);
359 val = mas_find_rev(&mas, 997);
360 MT_BUG_ON(mt, val != NULL);
361
362 mas_set(&mas, 1000);
363 val = mas_find_rev(&mas, 900);
364 MT_BUG_ON(mt, val != xa_mk_value(100));
365 val = mas_find_rev(&mas, 900);
366 MT_BUG_ON(mt, val != xa_mk_value(99));
367
368 mas_set(&mas, 20);
369 val = mas_find_rev(&mas, 0);
370 MT_BUG_ON(mt, val != xa_mk_value(2));
371 val = mas_find_rev(&mas, 0);
372 MT_BUG_ON(mt, val != xa_mk_value(1));
373 val = mas_find_rev(&mas, 0);
374 MT_BUG_ON(mt, val != xa_mk_value(0));
375 val = mas_find_rev(&mas, 0);
376 MT_BUG_ON(mt, val != NULL);
120b1162 377 rcu_read_unlock();
e15e06a8
LH
378}
379
eaf9790d 380static noinline void __init check_find(struct maple_tree *mt)
e15e06a8
LH
381{
382 unsigned long val = 0;
120b1162 383 unsigned long count;
e15e06a8 384 unsigned long max;
120b1162 385 unsigned long top;
e15e06a8
LH
386 unsigned long last = 0, index = 0;
387 void *entry, *entry2;
388
389 MA_STATE(mas, mt, 0, 0);
390
391 /* Insert 0. */
392 MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
393
120b1162
LH
394#if defined(CONFIG_64BIT)
395 top = 4398046511104UL;
396#else
397 top = ULONG_MAX;
398#endif
399
400 if (MAPLE_32BIT) {
401 count = 15;
402 } else {
403 count = 20;
404 }
405
e15e06a8
LH
406 for (int i = 0; i <= count; i++) {
407 if (val != 64)
408 MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
409 else
410 MT_BUG_ON(mt, mtree_insert(mt, val,
411 XA_ZERO_ENTRY, GFP_KERNEL));
412
413 val <<= 2;
414 }
415
416 val = 0;
417 mas_set(&mas, val);
418 mas_lock(&mas);
419 while ((entry = mas_find(&mas, 268435456)) != NULL) {
420 if (val != 64)
421 MT_BUG_ON(mt, xa_mk_value(val) != entry);
422 else
423 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
424
425 val <<= 2;
426 /* For zero check. */
427 if (!val)
428 val = 1;
429 }
430 mas_unlock(&mas);
431
432 val = 0;
433 mas_set(&mas, val);
434 mas_lock(&mas);
435 mas_for_each(&mas, entry, ULONG_MAX) {
436 if (val != 64)
437 MT_BUG_ON(mt, xa_mk_value(val) != entry);
438 else
439 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
440 val <<= 2;
441 /* For zero check. */
442 if (!val)
443 val = 1;
444 }
445 mas_unlock(&mas);
446
447 /* Test mas_pause */
448 val = 0;
449 mas_set(&mas, val);
450 mas_lock(&mas);
451 mas_for_each(&mas, entry, ULONG_MAX) {
452 if (val != 64)
453 MT_BUG_ON(mt, xa_mk_value(val) != entry);
454 else
455 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
456 val <<= 2;
457 /* For zero check. */
458 if (!val)
459 val = 1;
460
461 mas_pause(&mas);
462 mas_unlock(&mas);
463 mas_lock(&mas);
464 }
465 mas_unlock(&mas);
466
467 val = 0;
468 max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
469 mt_for_each(mt, entry, index, max) {
470 MT_BUG_ON(mt, xa_mk_value(val) != entry);
471 val <<= 2;
472 if (val == 64) /* Skip zero entry. */
473 val <<= 2;
474 /* For zero check. */
475 if (!val)
476 val = 1;
477 }
478
479 val = 0;
480 max = 0;
481 index = 0;
482 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
483 mt_for_each(mt, entry, index, ULONG_MAX) {
120b1162
LH
484 if (val == top)
485 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
e15e06a8
LH
486 else
487 MT_BUG_ON(mt, xa_mk_value(val) != entry);
120b1162
LH
488
489 /* Workaround for 32bit */
490 if ((val << 2) < val)
491 val = ULONG_MAX;
492 else
493 val <<= 2;
494
e15e06a8
LH
495 if (val == 64) /* Skip zero entry. */
496 val <<= 2;
497 /* For zero check. */
498 if (!val)
499 val = 1;
500 max++;
501 MT_BUG_ON(mt, max > 25);
502 }
503 mtree_erase_index(mt, ULONG_MAX);
504
505 mas_reset(&mas);
506 index = 17;
507 entry = mt_find(mt, &index, 512);
508 MT_BUG_ON(mt, xa_mk_value(256) != entry);
509
510 mas_reset(&mas);
511 index = 17;
512 entry = mt_find(mt, &index, 20);
513 MT_BUG_ON(mt, entry != NULL);
514
515
516 /* Range check.. */
517 /* Insert ULONG_MAX */
518 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
519
520 val = 0;
521 mas_set(&mas, 0);
522 mas_lock(&mas);
523 mas_for_each(&mas, entry, ULONG_MAX) {
524 if (val == 64)
525 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
120b1162
LH
526 else if (val == top)
527 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
e15e06a8
LH
528 else
529 MT_BUG_ON(mt, xa_mk_value(val) != entry);
120b1162
LH
530
531 /* Workaround for 32bit */
532 if ((val << 2) < val)
533 val = ULONG_MAX;
534 else
535 val <<= 2;
e15e06a8
LH
536
537 /* For zero check. */
538 if (!val)
539 val = 1;
540 mas_pause(&mas);
541 mas_unlock(&mas);
542 mas_lock(&mas);
543 }
544 mas_unlock(&mas);
545
546 mas_set(&mas, 1048576);
547 mas_lock(&mas);
548 entry = mas_find(&mas, 1048576);
549 mas_unlock(&mas);
550 MT_BUG_ON(mas.tree, entry == NULL);
551
552 /*
553 * Find last value.
554 * 1. get the expected value, leveraging the existence of an end entry
555 * 2. delete end entry
556 * 3. find the last value but searching for ULONG_MAX and then using
557 * prev
558 */
559 /* First, get the expected result. */
560 mas_lock(&mas);
561 mas_reset(&mas);
562 mas.index = ULONG_MAX; /* start at max.. */
563 entry = mas_find(&mas, ULONG_MAX);
564 entry = mas_prev(&mas, 0);
565 index = mas.index;
566 last = mas.last;
567
568 /* Erase the last entry. */
569 mas_reset(&mas);
570 mas.index = ULONG_MAX;
571 mas.last = ULONG_MAX;
572 mas_erase(&mas);
573
574 /* Get the previous value from MAS_START */
575 mas_reset(&mas);
576 entry2 = mas_prev(&mas, 0);
577
578 /* Check results. */
579 MT_BUG_ON(mt, entry != entry2);
580 MT_BUG_ON(mt, index != mas.index);
581 MT_BUG_ON(mt, last != mas.last);
582
583
584 mas.node = MAS_NONE;
585 mas.index = ULONG_MAX;
586 mas.last = ULONG_MAX;
587 entry2 = mas_prev(&mas, 0);
588 MT_BUG_ON(mt, entry != entry2);
589
590 mas_set(&mas, 0);
591 MT_BUG_ON(mt, mas_prev(&mas, 0) != NULL);
592
593 mas_unlock(&mas);
594 mtree_destroy(mt);
595}
596
eaf9790d 597static noinline void __init check_find_2(struct maple_tree *mt)
e15e06a8
LH
598{
599 unsigned long i, j;
600 void *entry;
601
602 MA_STATE(mas, mt, 0, 0);
603 rcu_read_lock();
604 mas_for_each(&mas, entry, ULONG_MAX)
605 MT_BUG_ON(mt, true);
606 rcu_read_unlock();
607
608 for (i = 0; i < 256; i++) {
609 mtree_insert_index(mt, i, GFP_KERNEL);
610 j = 0;
611 mas_set(&mas, 0);
612 rcu_read_lock();
613 mas_for_each(&mas, entry, ULONG_MAX) {
614 MT_BUG_ON(mt, entry != xa_mk_value(j));
615 j++;
616 }
617 rcu_read_unlock();
618 MT_BUG_ON(mt, j != i + 1);
619 }
620
621 for (i = 0; i < 256; i++) {
622 mtree_erase_index(mt, i);
623 j = i + 1;
624 mas_set(&mas, 0);
625 rcu_read_lock();
626 mas_for_each(&mas, entry, ULONG_MAX) {
627 if (xa_is_zero(entry))
628 continue;
629
630 MT_BUG_ON(mt, entry != xa_mk_value(j));
631 j++;
632 }
633 rcu_read_unlock();
634 MT_BUG_ON(mt, j != 256);
635 }
636
637 /*MT_BUG_ON(mt, !mtree_empty(mt)); */
638}
639
e15e06a8 640
120b1162 641#if defined(CONFIG_64BIT)
eaf9790d 642static noinline void __init check_alloc_rev_range(struct maple_tree *mt)
e15e06a8 643{
e15e06a8 644 /*
120b1162
LH
645 * Generated by:
646 * cat /proc/self/maps | awk '{print $1}'|
647 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
e15e06a8 648 */
e15e06a8 649
eaf9790d 650 static const unsigned long range[] = {
120b1162
LH
651 /* Inclusive , Exclusive. */
652 0x565234af2000, 0x565234af4000,
653 0x565234af4000, 0x565234af9000,
654 0x565234af9000, 0x565234afb000,
655 0x565234afc000, 0x565234afd000,
656 0x565234afd000, 0x565234afe000,
657 0x565235def000, 0x565235e10000,
658 0x7f36d4bfd000, 0x7f36d4ee2000,
659 0x7f36d4ee2000, 0x7f36d4f04000,
660 0x7f36d4f04000, 0x7f36d504c000,
661 0x7f36d504c000, 0x7f36d5098000,
662 0x7f36d5098000, 0x7f36d5099000,
663 0x7f36d5099000, 0x7f36d509d000,
664 0x7f36d509d000, 0x7f36d509f000,
665 0x7f36d509f000, 0x7f36d50a5000,
666 0x7f36d50b9000, 0x7f36d50db000,
667 0x7f36d50db000, 0x7f36d50dc000,
668 0x7f36d50dc000, 0x7f36d50fa000,
669 0x7f36d50fa000, 0x7f36d5102000,
670 0x7f36d5102000, 0x7f36d5103000,
671 0x7f36d5103000, 0x7f36d5104000,
672 0x7f36d5104000, 0x7f36d5105000,
673 0x7fff5876b000, 0x7fff5878d000,
674 0x7fff5878e000, 0x7fff58791000,
675 0x7fff58791000, 0x7fff58793000,
676 };
e15e06a8 677
eaf9790d 678 static const unsigned long holes[] = {
120b1162
LH
679 /*
680 * Note: start of hole is INCLUSIVE
681 * end of hole is EXCLUSIVE
682 * (opposite of the above table.)
683 * Start of hole, end of hole, size of hole (+1)
684 */
685 0x565234afb000, 0x565234afc000, 0x1000,
686 0x565234afe000, 0x565235def000, 0x12F1000,
687 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
688 };
e15e06a8 689
120b1162
LH
690 /*
691 * req_range consists of 4 values.
692 * 1. min index
693 * 2. max index
694 * 3. size
695 * 4. number that should be returned.
696 * 5. return value
697 */
eaf9790d 698 static const unsigned long req_range[] = {
120b1162
LH
699 0x565234af9000, /* Min */
700 0x7fff58791000, /* Max */
701 0x1000, /* Size */
702 0x7fff5878d << 12, /* First rev hole of size 0x1000 */
703 0, /* Return value success. */
e15e06a8 704
120b1162 705 0x0, /* Min */
ba997212 706 0x565234AF0 << 12, /* Max */
120b1162
LH
707 0x3000, /* Size */
708 0x565234AEE << 12, /* max - 3. */
709 0, /* Return value success. */
e15e06a8 710
120b1162
LH
711 0x0, /* Min */
712 -1, /* Max */
713 0x1000, /* Size */
714 562949953421311 << 12,/* First rev hole of size 0x1000 */
715 0, /* Return value success. */
e15e06a8 716
120b1162 717 0x0, /* Min */
ba997212 718 0x7F36D5109 << 12, /* Max */
120b1162
LH
719 0x4000, /* Size */
720 0x7F36D5106 << 12, /* First rev hole of size 0x4000 */
721 0, /* Return value success. */
e15e06a8 722
120b1162
LH
723 /* Ascend test. */
724 0x0,
ba997212 725 34148798628 << 12,
120b1162
LH
726 19 << 12,
727 34148797418 << 12,
728 0x0,
e15e06a8 729
120b1162
LH
730 /* Too big test. */
731 0x0,
732 18446744073709551615UL,
733 562915594369134UL << 12,
734 0x0,
735 -EBUSY,
e15e06a8 736
ba997212
LH
737 /* Single space test. */
738 34148798725 << 12,
739 34148798725 << 12,
740 1 << 12,
741 34148798725 << 12,
742 0,
120b1162 743 };
e15e06a8 744
120b1162
LH
745 int i, range_count = ARRAY_SIZE(range);
746 int req_range_count = ARRAY_SIZE(req_range);
747 unsigned long min = 0;
e15e06a8 748
120b1162 749 MA_STATE(mas, mt, 0, 0);
e15e06a8 750
120b1162
LH
751 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
752 GFP_KERNEL);
753#define DEBUG_REV_RANGE 0
754 for (i = 0; i < range_count; i += 2) {
755 /* Inclusive, Inclusive (with the -1) */
e15e06a8 756
120b1162
LH
757#if DEBUG_REV_RANGE
758 pr_debug("\t%s: Insert %lu-%lu\n", __func__, range[i] >> 12,
759 (range[i + 1] >> 12) - 1);
760#endif
761 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
762 xa_mk_value(range[i] >> 12), 0);
763 mt_validate(mt);
e15e06a8
LH
764 }
765
e15e06a8 766
120b1162
LH
767 mas_lock(&mas);
768 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
769#if DEBUG_REV_RANGE
770 pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
771 min, holes[i+1]>>12, holes[i+2]>>12,
772 holes[i] >> 12);
773#endif
774 MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
775 holes[i+1] >> 12,
776 holes[i+2] >> 12));
777#if DEBUG_REV_RANGE
778 pr_debug("Found %lu %lu\n", mas.index, mas.last);
779 pr_debug("gap %lu %lu\n", (holes[i] >> 12),
780 (holes[i+1] >> 12));
781#endif
782 MT_BUG_ON(mt, mas.last + 1 != (holes[i+1] >> 12));
783 MT_BUG_ON(mt, mas.index != (holes[i+1] >> 12) - (holes[i+2] >> 12));
784 min = holes[i+1] >> 12;
785 mas_reset(&mas);
e15e06a8 786 }
e15e06a8 787
120b1162
LH
788 mas_unlock(&mas);
789 for (i = 0; i < req_range_count; i += 5) {
790#if DEBUG_REV_RANGE
ba997212
LH
791 pr_debug("\tReverse request %d between %lu-%lu size %lu, should get %lu\n",
792 i, req_range[i] >> 12,
793 (req_range[i + 1] >> 12),
120b1162
LH
794 req_range[i+2] >> 12,
795 req_range[i+3] >> 12);
796#endif
797 check_mtree_alloc_rrange(mt,
798 req_range[i] >> 12, /* start */
799 req_range[i+1] >> 12, /* end */
800 req_range[i+2] >> 12, /* size */
801 req_range[i+3] >> 12, /* expected address */
802 req_range[i+4], /* expected return */
803 xa_mk_value(req_range[i] >> 12)); /* pointer */
804 mt_validate(mt);
e15e06a8
LH
805 }
806
120b1162
LH
807 mt_set_non_kernel(1);
808 mtree_erase(mt, 34148798727); /* create a deleted range. */
ba997212 809 mtree_erase(mt, 34148798725);
120b1162
LH
810 check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
811 34148798725, 0, mt);
e15e06a8 812
120b1162
LH
813 mtree_destroy(mt);
814}
e15e06a8 815
eaf9790d 816static noinline void __init check_alloc_range(struct maple_tree *mt)
e15e06a8 817{
120b1162
LH
818 /*
819 * Generated by:
820 * cat /proc/self/maps|awk '{print $1}'|
821 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
822 */
e15e06a8 823
eaf9790d 824 static const unsigned long range[] = {
120b1162
LH
825 /* Inclusive , Exclusive. */
826 0x565234af2000, 0x565234af4000,
827 0x565234af4000, 0x565234af9000,
828 0x565234af9000, 0x565234afb000,
829 0x565234afc000, 0x565234afd000,
830 0x565234afd000, 0x565234afe000,
831 0x565235def000, 0x565235e10000,
832 0x7f36d4bfd000, 0x7f36d4ee2000,
833 0x7f36d4ee2000, 0x7f36d4f04000,
834 0x7f36d4f04000, 0x7f36d504c000,
835 0x7f36d504c000, 0x7f36d5098000,
836 0x7f36d5098000, 0x7f36d5099000,
837 0x7f36d5099000, 0x7f36d509d000,
838 0x7f36d509d000, 0x7f36d509f000,
839 0x7f36d509f000, 0x7f36d50a5000,
840 0x7f36d50b9000, 0x7f36d50db000,
841 0x7f36d50db000, 0x7f36d50dc000,
842 0x7f36d50dc000, 0x7f36d50fa000,
843 0x7f36d50fa000, 0x7f36d5102000,
844 0x7f36d5102000, 0x7f36d5103000,
845 0x7f36d5103000, 0x7f36d5104000,
846 0x7f36d5104000, 0x7f36d5105000,
847 0x7fff5876b000, 0x7fff5878d000,
848 0x7fff5878e000, 0x7fff58791000,
849 0x7fff58791000, 0x7fff58793000,
850 };
eaf9790d 851 static const unsigned long holes[] = {
120b1162
LH
852 /* Start of hole, end of hole, size of hole (+1) */
853 0x565234afb000, 0x565234afc000, 0x1000,
854 0x565234afe000, 0x565235def000, 0x12F1000,
855 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
856 };
e15e06a8 857
120b1162
LH
858 /*
859 * req_range consists of 4 values.
860 * 1. min index
861 * 2. max index
862 * 3. size
863 * 4. number that should be returned.
864 * 5. return value
865 */
eaf9790d 866 static const unsigned long req_range[] = {
120b1162
LH
867 0x565234af9000, /* Min */
868 0x7fff58791000, /* Max */
869 0x1000, /* Size */
870 0x565234afb000, /* First hole in our data of size 1000. */
871 0, /* Return value success. */
e15e06a8 872
120b1162
LH
873 0x0, /* Min */
874 0x7fff58791000, /* Max */
875 0x1F00, /* Size */
876 0x0, /* First hole in our data of size 2000. */
877 0, /* Return value success. */
e15e06a8 878
120b1162
LH
879 /* Test ascend. */
880 34148797436 << 12, /* Min */
881 0x7fff587AF000, /* Max */
882 0x3000, /* Size */
883 34148798629 << 12, /* Expected location */
884 0, /* Return value success. */
e15e06a8 885
120b1162
LH
886 /* Test failing. */
887 34148798623 << 12, /* Min */
888 34148798683 << 12, /* Max */
889 0x15000, /* Size */
890 0, /* Expected location */
891 -EBUSY, /* Return value failed. */
e15e06a8 892
120b1162
LH
893 /* Test filling entire gap. */
894 34148798623 << 12, /* Min */
895 0x7fff587AF000, /* Max */
896 0x10000, /* Size */
897 34148798632 << 12, /* Expected location */
898 0, /* Return value success. */
e15e06a8 899
120b1162
LH
900 /* Test walking off the end of root. */
901 0, /* Min */
902 -1, /* Max */
903 -1, /* Size */
904 0, /* Expected location */
905 -EBUSY, /* Return value failure. */
e15e06a8 906
120b1162
LH
907 /* Test looking for too large a hole across entire range. */
908 0, /* Min */
909 -1, /* Max */
910 4503599618982063UL << 12, /* Size */
911 34359052178 << 12, /* Expected location */
912 -EBUSY, /* Return failure. */
ba997212
LH
913
914 /* Test a single entry */
915 34148798648 << 12, /* Min */
916 34148798648 << 12, /* Max */
917 4096, /* Size of 1 */
918 34148798648 << 12, /* Location is the same as min/max */
919 0, /* Success */
120b1162
LH
920 };
921 int i, range_count = ARRAY_SIZE(range);
922 int req_range_count = ARRAY_SIZE(req_range);
923 unsigned long min = 0x565234af2000;
e15e06a8
LH
924 MA_STATE(mas, mt, 0, 0);
925
120b1162
LH
926 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
927 GFP_KERNEL);
928 for (i = 0; i < range_count; i += 2) {
929#define DEBUG_ALLOC_RANGE 0
930#if DEBUG_ALLOC_RANGE
931 pr_debug("\tInsert %lu-%lu\n", range[i] >> 12,
932 (range[i + 1] >> 12) - 1);
89f499f3 933 mt_dump(mt, mt_dump_hex);
e15e06a8 934#endif
120b1162
LH
935 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
936 xa_mk_value(range[i] >> 12), 0);
937 mt_validate(mt);
938 }
e15e06a8 939
e15e06a8 940
e15e06a8 941
120b1162
LH
942 mas_lock(&mas);
943 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
e15e06a8 944
120b1162
LH
945#if DEBUG_ALLOC_RANGE
946 pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
947 holes[i+1] >> 12, holes[i+2] >> 12,
948 min, holes[i+1]);
949#endif
950 MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
951 holes[i+1] >> 12,
952 holes[i+2] >> 12));
953 MT_BUG_ON(mt, mas.index != holes[i] >> 12);
954 min = holes[i+1];
e15e06a8 955 mas_reset(&mas);
120b1162
LH
956 }
957 mas_unlock(&mas);
958 for (i = 0; i < req_range_count; i += 5) {
959#if DEBUG_ALLOC_RANGE
960 pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
961 i/5, req_range[i] >> 12, req_range[i + 1] >> 12,
962 req_range[i + 2] >> 12, req_range[i + 3] >> 12,
963 req_range[i], req_range[i+1]);
e15e06a8 964#endif
120b1162
LH
965 check_mtree_alloc_range(mt,
966 req_range[i] >> 12, /* start */
967 req_range[i+1] >> 12, /* end */
968 req_range[i+2] >> 12, /* size */
969 req_range[i+3] >> 12, /* expected address */
970 req_range[i+4], /* expected return */
971 xa_mk_value(req_range[i] >> 12)); /* pointer */
e15e06a8 972 mt_validate(mt);
120b1162 973#if DEBUG_ALLOC_RANGE
89f499f3 974 mt_dump(mt, mt_dump_hex);
e15e06a8 975#endif
e15e06a8 976 }
e15e06a8 977
120b1162
LH
978 mtree_destroy(mt);
979}
980#endif
e15e06a8 981
eaf9790d 982static noinline void __init check_ranges(struct maple_tree *mt)
e15e06a8 983{
120b1162 984 int i, val, val2;
eaf9790d 985 static const unsigned long r[] = {
120b1162
LH
986 10, 15,
987 20, 25,
988 17, 22, /* Overlaps previous range. */
989 9, 1000, /* Huge. */
990 100, 200,
991 45, 168,
992 118, 128,
993 };
e15e06a8 994
120b1162
LH
995 MT_BUG_ON(mt, !mtree_empty(mt));
996 check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
997 check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
998 check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
999 MT_BUG_ON(mt, !mt_height(mt));
1000 /* Store */
1001 check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
1002 check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
1003 check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
1004 MT_BUG_ON(mt, !mt_height(mt));
1005 mtree_destroy(mt);
1006 MT_BUG_ON(mt, mt_height(mt));
e15e06a8 1007
120b1162
LH
1008 check_seq(mt, 50, false);
1009 mt_set_non_kernel(4);
1010 check_store_range(mt, 5, 47, xa_mk_value(47), 0);
1011 MT_BUG_ON(mt, !mt_height(mt));
1012 mtree_destroy(mt);
e15e06a8 1013
120b1162
LH
1014 /* Create tree of 1-100 */
1015 check_seq(mt, 100, false);
1016 /* Store 45-168 */
1017 mt_set_non_kernel(10);
1018 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1019 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1020 mtree_destroy(mt);
1021
120b1162
LH
1022 /* Create tree of 1-200 */
1023 check_seq(mt, 200, false);
1024 /* Store 45-168 */
1025 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1026 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1027 mtree_destroy(mt);
1028
120b1162
LH
1029 check_seq(mt, 30, false);
1030 check_store_range(mt, 6, 18, xa_mk_value(6), 0);
1031 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1032 mtree_destroy(mt);
1033
120b1162
LH
1034 /* Overwrite across multiple levels. */
1035 /* Create tree of 1-400 */
1036 check_seq(mt, 400, false);
1037 mt_set_non_kernel(50);
1038 /* Store 118-128 */
1039 check_store_range(mt, r[12], r[13], xa_mk_value(r[12]), 0);
1040 mt_set_non_kernel(50);
1041 mtree_test_erase(mt, 140);
1042 mtree_test_erase(mt, 141);
1043 mtree_test_erase(mt, 142);
1044 mtree_test_erase(mt, 143);
1045 mtree_test_erase(mt, 130);
1046 mtree_test_erase(mt, 131);
1047 mtree_test_erase(mt, 132);
1048 mtree_test_erase(mt, 133);
1049 mtree_test_erase(mt, 134);
1050 mtree_test_erase(mt, 135);
1051 check_load(mt, r[12], xa_mk_value(r[12]));
1052 check_load(mt, r[13], xa_mk_value(r[12]));
1053 check_load(mt, r[13] - 1, xa_mk_value(r[12]));
1054 check_load(mt, r[13] + 1, xa_mk_value(r[13] + 1));
1055 check_load(mt, 135, NULL);
1056 check_load(mt, 140, NULL);
e15e06a8 1057 mt_set_non_kernel(0);
120b1162 1058 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1059 mtree_destroy(mt);
1060
e15e06a8 1061
e15e06a8 1062
120b1162
LH
1063 /* Overwrite multiple levels at the end of the tree (slot 7) */
1064 mt_set_non_kernel(50);
1065 check_seq(mt, 400, false);
1066 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1067 check_store_range(mt, 347, 352, xa_mk_value(347), 0);
e15e06a8 1068
120b1162
LH
1069 check_load(mt, 346, xa_mk_value(346));
1070 for (i = 347; i <= 352; i++)
1071 check_load(mt, i, xa_mk_value(347));
1072 for (i = 353; i <= 361; i++)
1073 check_load(mt, i, xa_mk_value(353));
1074 check_load(mt, 362, xa_mk_value(362));
1075 mt_set_non_kernel(0);
1076 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1077 mtree_destroy(mt);
1078
120b1162
LH
1079 mt_set_non_kernel(50);
1080 check_seq(mt, 400, false);
1081 check_store_range(mt, 352, 364, NULL, 0);
1082 check_store_range(mt, 351, 363, xa_mk_value(352), 0);
1083 check_load(mt, 350, xa_mk_value(350));
1084 check_load(mt, 351, xa_mk_value(352));
1085 for (i = 352; i <= 363; i++)
1086 check_load(mt, i, xa_mk_value(352));
1087 check_load(mt, 364, NULL);
1088 check_load(mt, 365, xa_mk_value(365));
1089 mt_set_non_kernel(0);
1090 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1091 mtree_destroy(mt);
1092
120b1162
LH
1093 mt_set_non_kernel(5);
1094 check_seq(mt, 400, false);
1095 check_store_range(mt, 352, 364, NULL, 0);
1096 check_store_range(mt, 351, 364, xa_mk_value(352), 0);
1097 check_load(mt, 350, xa_mk_value(350));
1098 check_load(mt, 351, xa_mk_value(352));
1099 for (i = 352; i <= 364; i++)
1100 check_load(mt, i, xa_mk_value(352));
1101 check_load(mt, 365, xa_mk_value(365));
1102 mt_set_non_kernel(0);
1103 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1104 mtree_destroy(mt);
1105
e15e06a8 1106
120b1162
LH
1107 mt_set_non_kernel(50);
1108 check_seq(mt, 400, false);
1109 check_store_range(mt, 362, 367, xa_mk_value(362), 0);
1110 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1111 mt_set_non_kernel(0);
1112 mt_validate(mt);
1113 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8 1114 mtree_destroy(mt);
120b1162
LH
1115 /*
1116 * Interesting cases:
1117 * 1. Overwrite the end of a node and end in the first entry of the next
1118 * node.
1119 * 2. Split a single range
1120 * 3. Overwrite the start of a range
1121 * 4. Overwrite the end of a range
1122 * 5. Overwrite the entire range
1123 * 6. Overwrite a range that causes multiple parent nodes to be
1124 * combined
1125 * 7. Overwrite a range that causes multiple parent nodes and part of
1126 * root to be combined
1127 * 8. Overwrite the whole tree
1128 * 9. Try to overwrite the zero entry of an alloc tree.
1129 * 10. Write a range larger than a nodes current pivot
1130 */
e15e06a8 1131
120b1162
LH
1132 mt_set_non_kernel(50);
1133 for (i = 0; i <= 500; i++) {
1134 val = i*5;
1135 val2 = (i+1)*5;
1136 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1137 }
1138 check_store_range(mt, 2400, 2400, xa_mk_value(2400), 0);
1139 check_store_range(mt, 2411, 2411, xa_mk_value(2411), 0);
1140 check_store_range(mt, 2412, 2412, xa_mk_value(2412), 0);
1141 check_store_range(mt, 2396, 2400, xa_mk_value(4052020), 0);
1142 check_store_range(mt, 2402, 2402, xa_mk_value(2402), 0);
e15e06a8 1143 mtree_destroy(mt);
120b1162 1144 mt_set_non_kernel(0);
e15e06a8 1145
120b1162
LH
1146 mt_set_non_kernel(50);
1147 for (i = 0; i <= 500; i++) {
1148 val = i*5;
1149 val2 = (i+1)*5;
1150 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1151 }
1152 check_store_range(mt, 2422, 2422, xa_mk_value(2422), 0);
1153 check_store_range(mt, 2424, 2424, xa_mk_value(2424), 0);
1154 check_store_range(mt, 2425, 2425, xa_mk_value(2), 0);
1155 check_store_range(mt, 2460, 2470, NULL, 0);
1156 check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
1157 check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
e15e06a8 1158 mt_set_non_kernel(0);
120b1162 1159 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1160 mtree_destroy(mt);
1161
d6e8d0dc
PZ
1162 /* Check in-place modifications */
1163 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1164 /* Append to the start of last range */
1165 mt_set_non_kernel(50);
1166 for (i = 0; i <= 500; i++) {
1167 val = i * 5 + 1;
1168 val2 = val + 4;
1169 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1170 }
1171
1172 /* Append to the last range without touching any boundaries */
1173 for (i = 0; i < 10; i++) {
1174 val = val2 + 5;
1175 val2 = val + 4;
1176 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1177 }
1178
1179 /* Append to the end of last range */
1180 val = val2;
1181 for (i = 0; i < 10; i++) {
1182 val += 5;
1183 MT_BUG_ON(mt, mtree_test_store_range(mt, val, ULONG_MAX,
1184 xa_mk_value(val)) != 0);
1185 }
1186
1187 /* Overwriting the range and over a part of the next range */
1188 for (i = 10; i < 30; i += 2) {
1189 val = i * 5 + 1;
1190 val2 = val + 5;
1191 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1192 }
1193
1194 /* Overwriting a part of the range and over the next range */
1195 for (i = 50; i < 70; i += 2) {
1196 val2 = i * 5;
1197 val = val2 - 5;
1198 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1199 }
1200
1201 /*
1202 * Expand the range, only partially overwriting the previous and
1203 * next ranges
1204 */
1205 for (i = 100; i < 130; i += 3) {
1206 val = i * 5 - 5;
1207 val2 = i * 5 + 1;
1208 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1209 }
1210
1211 /*
1212 * Expand the range, only partially overwriting the previous and
1213 * next ranges, in RCU mode
1214 */
1215 mt_set_in_rcu(mt);
1216 for (i = 150; i < 180; i += 3) {
1217 val = i * 5 - 5;
1218 val2 = i * 5 + 1;
1219 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1220 }
1221
1222 MT_BUG_ON(mt, !mt_height(mt));
1223 mt_validate(mt);
1224 mt_set_non_kernel(0);
1225 mtree_destroy(mt);
1226
120b1162 1227 /* Test rebalance gaps */
e15e06a8 1228 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1229 mt_set_non_kernel(50);
1230 for (i = 0; i <= 50; i++) {
1231 val = i*10;
1232 val2 = (i+1)*10;
1233 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1234 }
1235 check_store_range(mt, 161, 161, xa_mk_value(161), 0);
1236 check_store_range(mt, 162, 162, xa_mk_value(162), 0);
1237 check_store_range(mt, 163, 163, xa_mk_value(163), 0);
1238 check_store_range(mt, 240, 249, NULL, 0);
1239 mtree_erase(mt, 200);
1240 mtree_erase(mt, 210);
1241 mtree_erase(mt, 220);
1242 mtree_erase(mt, 230);
e15e06a8 1243 mt_set_non_kernel(0);
120b1162 1244 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1245 mtree_destroy(mt);
1246
e15e06a8 1247 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1248 for (i = 0; i <= 500; i++) {
1249 val = i*10;
1250 val2 = (i+1)*10;
1251 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1252 }
1253 check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
1254 mt_validate(mt);
1255 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1256 mtree_destroy(mt);
1257
1258 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1259 for (i = 0; i <= 500; i++) {
1260 val = i*10;
1261 val2 = (i+1)*10;
1262 check_store_range(mt, val, val2, xa_mk_value(val), 0);
e15e06a8 1263 }
120b1162
LH
1264 check_store_range(mt, 4811, 4811, xa_mk_value(4811), 0);
1265 check_store_range(mt, 4812, 4812, xa_mk_value(4812), 0);
1266 check_store_range(mt, 4861, 4861, xa_mk_value(4861), 0);
1267 check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
1268 check_store_range(mt, 4842, 4849, NULL, 0);
1269 mt_validate(mt);
1270 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1271 mtree_destroy(mt);
1272
1273 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1274 for (i = 0; i <= 1300; i++) {
1275 val = i*10;
1276 val2 = (i+1)*10;
1277 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1278 MT_BUG_ON(mt, mt_height(mt) >= 4);
1279 }
1280 /* Cause a 3 child split all the way up the tree. */
1281 for (i = 5; i < 215; i += 10)
1282 check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0);
1283 for (i = 5; i < 65; i += 10)
1284 check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
e15e06a8 1285
120b1162
LH
1286 MT_BUG_ON(mt, mt_height(mt) >= 4);
1287 for (i = 5; i < 45; i += 10)
1288 check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
1289 if (!MAPLE_32BIT)
1290 MT_BUG_ON(mt, mt_height(mt) < 4);
e15e06a8
LH
1291 mtree_destroy(mt);
1292
120b1162 1293
e15e06a8 1294 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1295 for (i = 0; i <= 1200; i++) {
1296 val = i*10;
1297 val2 = (i+1)*10;
1298 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1299 MT_BUG_ON(mt, mt_height(mt) >= 4);
e15e06a8 1300 }
120b1162
LH
1301 /* Fill parents and leaves before split. */
1302 for (i = 5; i < 455; i += 10)
1303 check_store_range(mt, 7800 + i, 7800 + i + 1, NULL, 0);
e15e06a8 1304
120b1162
LH
1305 for (i = 1; i < 16; i++)
1306 check_store_range(mt, 8185 + i, 8185 + i + 1,
1307 xa_mk_value(8185+i), 0);
1308 MT_BUG_ON(mt, mt_height(mt) >= 4);
1309 /* triple split across multiple levels. */
1310 check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0);
1311 if (!MAPLE_32BIT)
1312 MT_BUG_ON(mt, mt_height(mt) != 4);
e15e06a8
LH
1313}
1314
eaf9790d 1315static noinline void __init check_next_entry(struct maple_tree *mt)
e15e06a8 1316{
120b1162
LH
1317 void *entry = NULL;
1318 unsigned long limit = 30, i = 0;
1319 MA_STATE(mas, mt, i, i);
e15e06a8 1320
120b1162 1321 MT_BUG_ON(mt, !mtree_empty(mt));
e15e06a8 1322
120b1162
LH
1323 check_seq(mt, limit, false);
1324 rcu_read_lock();
1325
1326 /* Check the first one and get ma_state in the correct state. */
1327 MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++));
1328 for ( ; i <= limit + 1; i++) {
1329 entry = mas_next(&mas, limit);
1330 if (i > limit)
1331 MT_BUG_ON(mt, entry != NULL);
1332 else
1333 MT_BUG_ON(mt, xa_mk_value(i) != entry);
e15e06a8 1334 }
120b1162
LH
1335 rcu_read_unlock();
1336 mtree_destroy(mt);
e15e06a8 1337}
e15e06a8 1338
eaf9790d 1339static noinline void __init check_prev_entry(struct maple_tree *mt)
e15e06a8 1340{
120b1162
LH
1341 unsigned long index = 16;
1342 void *value;
1343 int i;
e15e06a8 1344
120b1162 1345 MA_STATE(mas, mt, index, index);
e15e06a8 1346
120b1162
LH
1347 MT_BUG_ON(mt, !mtree_empty(mt));
1348 check_seq(mt, 30, false);
e15e06a8 1349
120b1162
LH
1350 rcu_read_lock();
1351 value = mas_find(&mas, ULONG_MAX);
1352 MT_BUG_ON(mt, value != xa_mk_value(index));
1353 value = mas_prev(&mas, 0);
1354 MT_BUG_ON(mt, value != xa_mk_value(index - 1));
1355 rcu_read_unlock();
1356 mtree_destroy(mt);
e15e06a8 1357
120b1162
LH
1358 /* Check limits on prev */
1359 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1360 mas_lock(&mas);
1361 for (i = 0; i <= index; i++) {
1362 mas_set_range(&mas, i*10, i*10+5);
1363 mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1364 }
e15e06a8 1365
120b1162
LH
1366 mas_set(&mas, 20);
1367 value = mas_walk(&mas);
1368 MT_BUG_ON(mt, value != xa_mk_value(2));
e15e06a8 1369
120b1162
LH
1370 value = mas_prev(&mas, 19);
1371 MT_BUG_ON(mt, value != NULL);
e15e06a8 1372
120b1162
LH
1373 mas_set(&mas, 80);
1374 value = mas_walk(&mas);
1375 MT_BUG_ON(mt, value != xa_mk_value(8));
e15e06a8 1376
120b1162
LH
1377 value = mas_prev(&mas, 76);
1378 MT_BUG_ON(mt, value != NULL);
e15e06a8 1379
120b1162 1380 mas_unlock(&mas);
e15e06a8 1381}
e15e06a8 1382
eaf9790d 1383static noinline void __init check_root_expand(struct maple_tree *mt)
e15e06a8 1384{
120b1162
LH
1385 MA_STATE(mas, mt, 0, 0);
1386 void *ptr;
e15e06a8 1387
e15e06a8 1388
120b1162
LH
1389 mas_lock(&mas);
1390 mas_set(&mas, 3);
1391 ptr = mas_walk(&mas);
eb2e817f 1392 MT_BUG_ON(mt, mas.index != 0);
120b1162
LH
1393 MT_BUG_ON(mt, ptr != NULL);
1394 MT_BUG_ON(mt, mas.index != 0);
1395 MT_BUG_ON(mt, mas.last != ULONG_MAX);
e15e06a8 1396
120b1162
LH
1397 ptr = &check_prev_entry;
1398 mas_set(&mas, 1);
1399 mas_store_gfp(&mas, ptr, GFP_KERNEL);
e15e06a8 1400
120b1162
LH
1401 mas_set(&mas, 0);
1402 ptr = mas_walk(&mas);
1403 MT_BUG_ON(mt, ptr != NULL);
e15e06a8 1404
120b1162
LH
1405 mas_set(&mas, 1);
1406 ptr = mas_walk(&mas);
1407 MT_BUG_ON(mt, ptr != &check_prev_entry);
e15e06a8 1408
120b1162
LH
1409 mas_set(&mas, 2);
1410 ptr = mas_walk(&mas);
1411 MT_BUG_ON(mt, ptr != NULL);
1412 mas_unlock(&mas);
1413 mtree_destroy(mt);
e15e06a8 1414
e15e06a8 1415
120b1162
LH
1416 mt_init_flags(mt, 0);
1417 mas_lock(&mas);
e15e06a8 1418
120b1162
LH
1419 mas_set(&mas, 0);
1420 ptr = &check_prev_entry;
1421 mas_store_gfp(&mas, ptr, GFP_KERNEL);
e15e06a8 1422
120b1162
LH
1423 mas_set(&mas, 5);
1424 ptr = mas_walk(&mas);
1425 MT_BUG_ON(mt, ptr != NULL);
1426 MT_BUG_ON(mt, mas.index != 1);
1427 MT_BUG_ON(mt, mas.last != ULONG_MAX);
e15e06a8 1428
120b1162
LH
1429 mas_set_range(&mas, 0, 100);
1430 ptr = mas_walk(&mas);
1431 MT_BUG_ON(mt, ptr != &check_prev_entry);
1432 MT_BUG_ON(mt, mas.last != 0);
1433 mas_unlock(&mas);
1434 mtree_destroy(mt);
e15e06a8 1435
120b1162
LH
1436 mt_init_flags(mt, 0);
1437 mas_lock(&mas);
1438
1439 mas_set(&mas, 0);
1440 ptr = (void *)((unsigned long) check_prev_entry | 1UL);
1441 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1442 ptr = mas_next(&mas, ULONG_MAX);
1443 MT_BUG_ON(mt, ptr != NULL);
1444 MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1445
1446 mas_set(&mas, 1);
1447 ptr = mas_prev(&mas, 0);
1448 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1449 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 1UL));
e15e06a8 1450
120b1162 1451 mas_unlock(&mas);
e15e06a8 1452
120b1162 1453 mtree_destroy(mt);
e15e06a8 1454
120b1162
LH
1455 mt_init_flags(mt, 0);
1456 mas_lock(&mas);
1457 mas_set(&mas, 0);
1458 ptr = (void *)((unsigned long) check_prev_entry | 2UL);
1459 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1460 ptr = mas_next(&mas, ULONG_MAX);
1461 MT_BUG_ON(mt, ptr != NULL);
eb2e817f 1462 MT_BUG_ON(mt, (mas.index != ULONG_MAX) && (mas.last != ULONG_MAX));
e15e06a8 1463
120b1162
LH
1464 mas_set(&mas, 1);
1465 ptr = mas_prev(&mas, 0);
1466 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1467 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 2UL));
e15e06a8 1468
e15e06a8 1469
120b1162 1470 mas_unlock(&mas);
e15e06a8 1471}
e15e06a8 1472
eaf9790d 1473static noinline void __init check_gap_combining(struct maple_tree *mt)
e15e06a8 1474{
120b1162
LH
1475 struct maple_enode *mn1, *mn2;
1476 void *entry;
1477 unsigned long singletons = 100;
eaf9790d
LH
1478 static const unsigned long *seq100;
1479 static const unsigned long seq100_64[] = {
120b1162
LH
1480 /* 0-5 */
1481 74, 75, 76,
1482 50, 100, 2,
e15e06a8 1483
120b1162
LH
1484 /* 6-12 */
1485 44, 45, 46, 43,
1486 20, 50, 3,
e15e06a8 1487
120b1162
LH
1488 /* 13-20*/
1489 80, 81, 82,
1490 76, 2, 79, 85, 4,
1491 };
e15e06a8 1492
eaf9790d 1493 static const unsigned long seq100_32[] = {
120b1162
LH
1494 /* 0-5 */
1495 61, 62, 63,
1496 50, 100, 2,
e15e06a8 1497
120b1162
LH
1498 /* 6-12 */
1499 31, 32, 33, 30,
1500 20, 50, 3,
e15e06a8 1501
120b1162
LH
1502 /* 13-20*/
1503 80, 81, 82,
1504 76, 2, 79, 85, 4,
1505 };
e15e06a8 1506
eaf9790d 1507 static const unsigned long seq2000[] = {
120b1162
LH
1508 1152, 1151,
1509 1100, 1200, 2,
1510 };
eaf9790d 1511 static const unsigned long seq400[] = {
120b1162
LH
1512 286, 318,
1513 256, 260, 266, 270, 275, 280, 290, 398,
1514 286, 310,
1515 };
e15e06a8 1516
120b1162 1517 unsigned long index;
e15e06a8 1518
120b1162 1519 MA_STATE(mas, mt, 0, 0);
e15e06a8 1520
120b1162
LH
1521 if (MAPLE_32BIT)
1522 seq100 = seq100_32;
1523 else
1524 seq100 = seq100_64;
e15e06a8 1525
120b1162
LH
1526 index = seq100[0];
1527 mas_set(&mas, index);
1528 MT_BUG_ON(mt, !mtree_empty(mt));
1529 check_seq(mt, singletons, false); /* create 100 singletons. */
e15e06a8 1530
120b1162
LH
1531 mt_set_non_kernel(1);
1532 mtree_test_erase(mt, seq100[2]);
1533 check_load(mt, seq100[2], NULL);
1534 mtree_test_erase(mt, seq100[1]);
1535 check_load(mt, seq100[1], NULL);
e15e06a8 1536
120b1162
LH
1537 rcu_read_lock();
1538 entry = mas_find(&mas, ULONG_MAX);
1539 MT_BUG_ON(mt, entry != xa_mk_value(index));
1540 mn1 = mas.node;
1541 mas_next(&mas, ULONG_MAX);
1542 entry = mas_next(&mas, ULONG_MAX);
1543 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1544 mn2 = mas.node;
1545 MT_BUG_ON(mt, mn1 == mn2); /* test the test. */
e15e06a8 1546
120b1162
LH
1547 /*
1548 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1549 * seq100[4]. Search for the gap.
1550 */
1551 mt_set_non_kernel(1);
e15e06a8 1552 mas_reset(&mas);
120b1162
LH
1553 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
1554 seq100[5]));
1555 MT_BUG_ON(mt, mas.index != index + 1);
1556 rcu_read_unlock();
e15e06a8 1557
120b1162
LH
1558 mtree_test_erase(mt, seq100[6]);
1559 check_load(mt, seq100[6], NULL);
1560 mtree_test_erase(mt, seq100[7]);
1561 check_load(mt, seq100[7], NULL);
1562 mtree_test_erase(mt, seq100[8]);
1563 index = seq100[9];
e15e06a8 1564
120b1162
LH
1565 rcu_read_lock();
1566 mas.index = index;
1567 mas.last = index;
e15e06a8 1568 mas_reset(&mas);
120b1162
LH
1569 entry = mas_find(&mas, ULONG_MAX);
1570 MT_BUG_ON(mt, entry != xa_mk_value(index));
1571 mn1 = mas.node;
1572 entry = mas_next(&mas, ULONG_MAX);
1573 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1574 mas_next(&mas, ULONG_MAX); /* go to the next entry. */
1575 mn2 = mas.node;
1576 MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
e15e06a8 1577
120b1162
LH
1578 /*
1579 * At this point, there is a gap of 3 at seq100[6]. Find it by
1580 * searching 20 - 50 for size 3.
1581 */
e15e06a8 1582 mas_reset(&mas);
120b1162
LH
1583 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
1584 seq100[12]));
1585 MT_BUG_ON(mt, mas.index != seq100[6]);
1586 rcu_read_unlock();
e15e06a8 1587
120b1162
LH
1588 mt_set_non_kernel(1);
1589 mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
1590 check_load(mt, seq100[13], NULL);
1591 check_load(mt, seq100[14], xa_mk_value(seq100[14]));
1592 mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
1593 check_load(mt, seq100[13], NULL);
1594 check_load(mt, seq100[14], NULL);
e15e06a8 1595
120b1162
LH
1596 mas_reset(&mas);
1597 rcu_read_lock();
1598 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
1599 seq100[17]));
1600 MT_BUG_ON(mt, mas.index != seq100[13]);
1601 mt_validate(mt);
1602 rcu_read_unlock();
e15e06a8 1603
120b1162
LH
1604 /*
1605 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1606 * gap.
1607 */
1608 mt_set_non_kernel(2);
1609 mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
1610 mtree_test_erase(mt, seq100[15]);
e15e06a8 1611 mas_reset(&mas);
120b1162
LH
1612 rcu_read_lock();
1613 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
1614 seq100[20]));
1615 rcu_read_unlock();
1616 MT_BUG_ON(mt, mas.index != seq100[18]);
1617 mt_validate(mt);
1618 mtree_destroy(mt);
e15e06a8 1619
120b1162
LH
1620 /* seq 2000 tests are for multi-level tree gaps */
1621 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1622 check_seq(mt, 2000, false);
1623 mt_set_non_kernel(1);
1624 mtree_test_erase(mt, seq2000[0]);
1625 mtree_test_erase(mt, seq2000[1]);
e15e06a8 1626
120b1162
LH
1627 mt_set_non_kernel(2);
1628 mas_reset(&mas);
1629 rcu_read_lock();
1630 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
1631 seq2000[4]));
1632 MT_BUG_ON(mt, mas.index != seq2000[1]);
1633 rcu_read_unlock();
1634 mt_validate(mt);
e15e06a8
LH
1635 mtree_destroy(mt);
1636
120b1162
LH
1637 /* seq 400 tests rebalancing over two levels. */
1638 mt_set_non_kernel(99);
1639 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1640 check_seq(mt, 400, false);
1641 mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
1642 mt_set_non_kernel(0);
1643 mtree_destroy(mt);
e15e06a8 1644
120b1162
LH
1645 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1646 check_seq(mt, 400, false);
1647 mt_set_non_kernel(50);
1648 mtree_test_store_range(mt, seq400[2], seq400[9],
1649 xa_mk_value(seq400[2]));
1650 mtree_test_store_range(mt, seq400[3], seq400[9],
1651 xa_mk_value(seq400[3]));
1652 mtree_test_store_range(mt, seq400[4], seq400[9],
1653 xa_mk_value(seq400[4]));
1654 mtree_test_store_range(mt, seq400[5], seq400[9],
1655 xa_mk_value(seq400[5]));
1656 mtree_test_store_range(mt, seq400[0], seq400[9],
1657 xa_mk_value(seq400[0]));
1658 mtree_test_store_range(mt, seq400[6], seq400[9],
1659 xa_mk_value(seq400[6]));
1660 mtree_test_store_range(mt, seq400[7], seq400[9],
1661 xa_mk_value(seq400[7]));
1662 mtree_test_store_range(mt, seq400[8], seq400[9],
1663 xa_mk_value(seq400[8]));
1664 mtree_test_store_range(mt, seq400[10], seq400[11],
1665 xa_mk_value(seq400[10]));
1666 mt_validate(mt);
1667 mt_set_non_kernel(0);
1668 mtree_destroy(mt);
1669}
eaf9790d 1670static noinline void __init check_node_overwrite(struct maple_tree *mt)
e15e06a8 1671{
120b1162 1672 int i, max = 4000;
e15e06a8 1673
120b1162
LH
1674 for (i = 0; i < max; i++)
1675 mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100));
e15e06a8 1676
120b1162 1677 mtree_test_store_range(mt, 319951, 367950, NULL);
89f499f3 1678 /*mt_dump(mt, mt_dump_dec); */
120b1162 1679 mt_validate(mt);
e15e06a8
LH
1680}
1681
120b1162 1682#if defined(BENCH_SLOT_STORE)
eaf9790d 1683static noinline void __init bench_slot_store(struct maple_tree *mt)
e15e06a8 1684{
120b1162 1685 int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
e15e06a8 1686
120b1162
LH
1687 for (i = 0; i < max; i += 10)
1688 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
e15e06a8 1689
120b1162
LH
1690 for (i = 0; i < count; i++) {
1691 mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
1692 mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
1693 GFP_KERNEL);
e15e06a8 1694 }
e15e06a8 1695}
120b1162 1696#endif
e15e06a8 1697
120b1162 1698#if defined(BENCH_NODE_STORE)
eaf9790d 1699static noinline void __init bench_node_store(struct maple_tree *mt)
e15e06a8 1700{
120b1162 1701 int i, overwrite = 76, max = 240, count = 20000000;
e15e06a8 1702
120b1162
LH
1703 for (i = 0; i < max; i += 10)
1704 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
e15e06a8 1705
120b1162
LH
1706 for (i = 0; i < count; i++) {
1707 mtree_store_range(mt, overwrite, overwrite + 15,
1708 xa_mk_value(overwrite), GFP_KERNEL);
e15e06a8 1709
120b1162
LH
1710 overwrite += 5;
1711 if (overwrite >= 135)
1712 overwrite = 76;
e15e06a8
LH
1713 }
1714}
120b1162 1715#endif
e15e06a8 1716
120b1162 1717#if defined(BENCH_AWALK)
eaf9790d 1718static noinline void __init bench_awalk(struct maple_tree *mt)
e15e06a8 1719{
120b1162
LH
1720 int i, max = 2500, count = 50000000;
1721 MA_STATE(mas, mt, 1470, 1470);
e15e06a8 1722
120b1162
LH
1723 for (i = 0; i < max; i += 10)
1724 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
e15e06a8 1725
120b1162 1726 mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
e15e06a8 1727
120b1162
LH
1728 for (i = 0; i < count; i++) {
1729 mas_empty_area_rev(&mas, 0, 2000, 10);
1730 mas_reset(&mas);
e15e06a8 1731 }
120b1162
LH
1732}
1733#endif
1734#if defined(BENCH_WALK)
eaf9790d 1735static noinline void __init bench_walk(struct maple_tree *mt)
120b1162
LH
1736{
1737 int i, max = 2500, count = 550000000;
1738 MA_STATE(mas, mt, 1470, 1470);
e15e06a8 1739
120b1162
LH
1740 for (i = 0; i < max; i += 10)
1741 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
e15e06a8 1742
120b1162
LH
1743 for (i = 0; i < count; i++) {
1744 mas_walk(&mas);
1745 mas_reset(&mas);
e15e06a8
LH
1746 }
1747
e15e06a8 1748}
120b1162 1749#endif
e15e06a8 1750
120b1162 1751#if defined(BENCH_MT_FOR_EACH)
eaf9790d 1752static noinline void __init bench_mt_for_each(struct maple_tree *mt)
e15e06a8 1753{
120b1162
LH
1754 int i, count = 1000000;
1755 unsigned long max = 2500, index = 0;
1756 void *entry;
e15e06a8 1757
120b1162
LH
1758 for (i = 0; i < max; i += 5)
1759 mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
1760
1761 for (i = 0; i < count; i++) {
1762 unsigned long j = 0;
e15e06a8 1763
120b1162
LH
1764 mt_for_each(mt, entry, index, max) {
1765 MT_BUG_ON(mt, entry != xa_mk_value(j));
1766 j += 5;
e15e06a8 1767 }
120b1162
LH
1768
1769 index = 0;
e15e06a8
LH
1770 }
1771
e15e06a8 1772}
120b1162 1773#endif
e15e06a8 1774
361c678b
LH
1775#if defined(BENCH_MAS_FOR_EACH)
1776static noinline void __init bench_mas_for_each(struct maple_tree *mt)
1777{
1778 int i, count = 1000000;
1779 unsigned long max = 2500;
1780 void *entry;
1781 MA_STATE(mas, mt, 0, 0);
1782
1783 for (i = 0; i < max; i += 5) {
1784 int gap = 4;
1785
1786 if (i % 30 == 0)
1787 gap = 3;
1788 mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1789 }
1790
1791 rcu_read_lock();
1792 for (i = 0; i < count; i++) {
1793 unsigned long j = 0;
1794
1795 mas_for_each(&mas, entry, max) {
1796 MT_BUG_ON(mt, entry != xa_mk_value(j));
1797 j += 5;
1798 }
1799 mas_set(&mas, 0);
1800 }
1801 rcu_read_unlock();
1802
1803}
1804#endif
8c314f3b
LH
1805#if defined(BENCH_MAS_PREV)
1806static noinline void __init bench_mas_prev(struct maple_tree *mt)
1807{
1808 int i, count = 1000000;
1809 unsigned long max = 2500;
1810 void *entry;
1811 MA_STATE(mas, mt, 0, 0);
1812
1813 for (i = 0; i < max; i += 5) {
1814 int gap = 4;
1815
1816 if (i % 30 == 0)
1817 gap = 3;
1818 mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1819 }
1820
1821 rcu_read_lock();
1822 for (i = 0; i < count; i++) {
1823 unsigned long j = 2495;
1824
1825 mas_set(&mas, ULONG_MAX);
1826 while ((entry = mas_prev(&mas, 0)) != NULL) {
1827 MT_BUG_ON(mt, entry != xa_mk_value(j));
1828 j -= 5;
1829 }
1830 }
1831 rcu_read_unlock();
361c678b 1832
8c314f3b
LH
1833}
1834#endif
120b1162 1835/* check_forking - simulate the kernel forking sequence with the tree. */
eaf9790d 1836static noinline void __init check_forking(struct maple_tree *mt)
e15e06a8 1837{
e15e06a8 1838
120b1162
LH
1839 struct maple_tree newmt;
1840 int i, nr_entries = 134;
1841 void *val;
1842 MA_STATE(mas, mt, 0, 0);
1843 MA_STATE(newmas, mt, 0, 0);
1844
1845 for (i = 0; i <= nr_entries; i++)
1846 mtree_store_range(mt, i*10, i*10 + 5,
1847 xa_mk_value(i), GFP_KERNEL);
1848
1849 mt_set_non_kernel(99999);
1850 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1851 newmas.tree = &newmt;
1852 mas_reset(&newmas);
1853 mas_reset(&mas);
1854 mas_lock(&newmas);
1855 mas.index = 0;
1856 mas.last = 0;
1857 if (mas_expected_entries(&newmas, nr_entries)) {
1858 pr_err("OOM!");
1859 BUG_ON(1);
1860 }
1861 rcu_read_lock();
1862 mas_for_each(&mas, val, ULONG_MAX) {
1863 newmas.index = mas.index;
1864 newmas.last = mas.last;
1865 mas_store(&newmas, val);
e15e06a8 1866 }
120b1162
LH
1867 rcu_read_unlock();
1868 mas_destroy(&newmas);
1869 mas_unlock(&newmas);
1870 mt_validate(&newmt);
1871 mt_set_non_kernel(0);
1872 mtree_destroy(&newmt);
e15e06a8
LH
1873}
1874
eaf9790d 1875static noinline void __init check_iteration(struct maple_tree *mt)
5159d64b
LH
1876{
1877 int i, nr_entries = 125;
1878 void *val;
1879 MA_STATE(mas, mt, 0, 0);
1880
1881 for (i = 0; i <= nr_entries; i++)
1882 mtree_store_range(mt, i * 10, i * 10 + 9,
1883 xa_mk_value(i), GFP_KERNEL);
1884
1885 mt_set_non_kernel(99999);
1886
1887 i = 0;
1888 mas_lock(&mas);
1889 mas_for_each(&mas, val, 925) {
1890 MT_BUG_ON(mt, mas.index != i * 10);
1891 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1892 /* Overwrite end of entry 92 */
1893 if (i == 92) {
1894 mas.index = 925;
1895 mas.last = 929;
1896 mas_store(&mas, val);
1897 }
1898 i++;
1899 }
1900 /* Ensure mas_find() gets the next value */
1901 val = mas_find(&mas, ULONG_MAX);
1902 MT_BUG_ON(mt, val != xa_mk_value(i));
1903
1904 mas_set(&mas, 0);
1905 i = 0;
1906 mas_for_each(&mas, val, 785) {
1907 MT_BUG_ON(mt, mas.index != i * 10);
1908 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1909 /* Overwrite start of entry 78 */
1910 if (i == 78) {
1911 mas.index = 780;
1912 mas.last = 785;
1913 mas_store(&mas, val);
1914 } else {
1915 i++;
1916 }
1917 }
1918 val = mas_find(&mas, ULONG_MAX);
1919 MT_BUG_ON(mt, val != xa_mk_value(i));
1920
1921 mas_set(&mas, 0);
1922 i = 0;
1923 mas_for_each(&mas, val, 765) {
1924 MT_BUG_ON(mt, mas.index != i * 10);
1925 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1926 /* Overwrite end of entry 76 and advance to the end */
1927 if (i == 76) {
1928 mas.index = 760;
1929 mas.last = 765;
1930 mas_store(&mas, val);
5159d64b
LH
1931 }
1932 i++;
1933 }
1934 /* Make sure the next find returns the one after 765, 766-769 */
1935 val = mas_find(&mas, ULONG_MAX);
1936 MT_BUG_ON(mt, val != xa_mk_value(76));
1937 mas_unlock(&mas);
1938 mas_destroy(&mas);
1939 mt_set_non_kernel(0);
1940}
1941
eaf9790d 1942static noinline void __init check_mas_store_gfp(struct maple_tree *mt)
e15e06a8 1943{
e15e06a8 1944
120b1162
LH
1945 struct maple_tree newmt;
1946 int i, nr_entries = 135;
1947 void *val;
1948 MA_STATE(mas, mt, 0, 0);
1949 MA_STATE(newmas, mt, 0, 0);
e15e06a8 1950
120b1162
LH
1951 for (i = 0; i <= nr_entries; i++)
1952 mtree_store_range(mt, i*10, i*10 + 5,
1953 xa_mk_value(i), GFP_KERNEL);
e15e06a8 1954
120b1162
LH
1955 mt_set_non_kernel(99999);
1956 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1957 newmas.tree = &newmt;
1958 rcu_read_lock();
1959 mas_lock(&newmas);
1960 mas_reset(&newmas);
1961 mas_set(&mas, 0);
1962 mas_for_each(&mas, val, ULONG_MAX) {
1963 newmas.index = mas.index;
1964 newmas.last = mas.last;
1965 mas_store_gfp(&newmas, val, GFP_KERNEL);
e15e06a8 1966 }
120b1162
LH
1967 mas_unlock(&newmas);
1968 rcu_read_unlock();
1969 mt_validate(&newmt);
1970 mt_set_non_kernel(0);
1971 mtree_destroy(&newmt);
e15e06a8
LH
1972}
1973
120b1162 1974#if defined(BENCH_FORK)
eaf9790d 1975static noinline void __init bench_forking(struct maple_tree *mt)
e15e06a8
LH
1976{
1977
120b1162
LH
1978 struct maple_tree newmt;
1979 int i, nr_entries = 134, nr_fork = 80000;
1980 void *val;
1981 MA_STATE(mas, mt, 0, 0);
1982 MA_STATE(newmas, mt, 0, 0);
e15e06a8 1983
120b1162
LH
1984 for (i = 0; i <= nr_entries; i++)
1985 mtree_store_range(mt, i*10, i*10 + 5,
1986 xa_mk_value(i), GFP_KERNEL);
e15e06a8 1987
120b1162
LH
1988 for (i = 0; i < nr_fork; i++) {
1989 mt_set_non_kernel(99999);
1990 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1991 newmas.tree = &newmt;
1992 mas_reset(&newmas);
1993 mas_reset(&mas);
1994 mas.index = 0;
1995 mas.last = 0;
1996 rcu_read_lock();
1997 mas_lock(&newmas);
1998 if (mas_expected_entries(&newmas, nr_entries)) {
1999 printk("OOM!");
2000 BUG_ON(1);
2001 }
2002 mas_for_each(&mas, val, ULONG_MAX) {
2003 newmas.index = mas.index;
2004 newmas.last = mas.last;
2005 mas_store(&newmas, val);
e15e06a8 2006 }
120b1162
LH
2007 mas_destroy(&newmas);
2008 mas_unlock(&newmas);
2009 rcu_read_unlock();
2010 mt_validate(&newmt);
2011 mt_set_non_kernel(0);
2012 mtree_destroy(&newmt);
e15e06a8 2013 }
e15e06a8 2014}
120b1162 2015#endif
e15e06a8 2016
eaf9790d 2017static noinline void __init next_prev_test(struct maple_tree *mt)
e15e06a8 2018{
120b1162
LH
2019 int i, nr_entries;
2020 void *val;
2021 MA_STATE(mas, mt, 0, 0);
2022 struct maple_enode *mn;
eaf9790d
LH
2023 static const unsigned long *level2;
2024 static const unsigned long level2_64[] = { 707, 1000, 710, 715, 720,
2025 725};
2026 static const unsigned long level2_32[] = { 1747, 2000, 1750, 1755,
2027 1760, 1765};
7a93c71a 2028 unsigned long last_index;
e15e06a8 2029
120b1162
LH
2030 if (MAPLE_32BIT) {
2031 nr_entries = 500;
2032 level2 = level2_32;
7a93c71a 2033 last_index = 0x138e;
120b1162
LH
2034 } else {
2035 nr_entries = 200;
2036 level2 = level2_64;
7a93c71a 2037 last_index = 0x7d6;
120b1162 2038 }
e15e06a8 2039
120b1162
LH
2040 for (i = 0; i <= nr_entries; i++)
2041 mtree_store_range(mt, i*10, i*10 + 5,
2042 xa_mk_value(i), GFP_KERNEL);
e15e06a8 2043
120b1162
LH
2044 mas_lock(&mas);
2045 for (i = 0; i <= nr_entries / 2; i++) {
2046 mas_next(&mas, 1000);
2047 if (mas_is_none(&mas))
2048 break;
e15e06a8 2049
e15e06a8 2050 }
120b1162
LH
2051 mas_reset(&mas);
2052 mas_set(&mas, 0);
2053 i = 0;
2054 mas_for_each(&mas, val, 1000) {
2055 i++;
e15e06a8
LH
2056 }
2057
120b1162
LH
2058 mas_reset(&mas);
2059 mas_set(&mas, 0);
2060 i = 0;
2061 mas_for_each(&mas, val, 1000) {
2062 mas_pause(&mas);
2063 i++;
2064 }
e15e06a8 2065
120b1162
LH
2066 /*
2067 * 680 - 685 = 0x61a00001930c
2068 * 686 - 689 = NULL;
2069 * 690 - 695 = 0x61a00001930c
2070 * Check simple next/prev
2071 */
2072 mas_set(&mas, 686);
2073 val = mas_walk(&mas);
2074 MT_BUG_ON(mt, val != NULL);
e15e06a8 2075
120b1162
LH
2076 val = mas_next(&mas, 1000);
2077 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2078 MT_BUG_ON(mt, mas.index != 690);
2079 MT_BUG_ON(mt, mas.last != 695);
e15e06a8 2080
120b1162
LH
2081 val = mas_prev(&mas, 0);
2082 MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
2083 MT_BUG_ON(mt, mas.index != 680);
2084 MT_BUG_ON(mt, mas.last != 685);
e15e06a8 2085
120b1162
LH
2086 val = mas_next(&mas, 1000);
2087 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2088 MT_BUG_ON(mt, mas.index != 690);
2089 MT_BUG_ON(mt, mas.last != 695);
e15e06a8 2090
120b1162
LH
2091 val = mas_next(&mas, 1000);
2092 MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
2093 MT_BUG_ON(mt, mas.index != 700);
2094 MT_BUG_ON(mt, mas.last != 705);
e15e06a8 2095
120b1162
LH
2096 /* Check across node boundaries of the tree */
2097 mas_set(&mas, 70);
2098 val = mas_walk(&mas);
2099 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2100 MT_BUG_ON(mt, mas.index != 70);
2101 MT_BUG_ON(mt, mas.last != 75);
e15e06a8 2102
120b1162
LH
2103 val = mas_next(&mas, 1000);
2104 MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
2105 MT_BUG_ON(mt, mas.index != 80);
2106 MT_BUG_ON(mt, mas.last != 85);
e15e06a8 2107
120b1162
LH
2108 val = mas_prev(&mas, 70);
2109 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2110 MT_BUG_ON(mt, mas.index != 70);
2111 MT_BUG_ON(mt, mas.last != 75);
e15e06a8 2112
120b1162
LH
2113 /* Check across two levels of the tree */
2114 mas_reset(&mas);
2115 mas_set(&mas, level2[0]);
2116 val = mas_walk(&mas);
2117 MT_BUG_ON(mt, val != NULL);
2118 val = mas_next(&mas, level2[1]);
2119 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2120 MT_BUG_ON(mt, mas.index != level2[2]);
2121 MT_BUG_ON(mt, mas.last != level2[3]);
2122 mn = mas.node;
e15e06a8 2123
120b1162
LH
2124 val = mas_next(&mas, level2[1]);
2125 MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
2126 MT_BUG_ON(mt, mas.index != level2[4]);
2127 MT_BUG_ON(mt, mas.last != level2[5]);
2128 MT_BUG_ON(mt, mn == mas.node);
e15e06a8 2129
120b1162
LH
2130 val = mas_prev(&mas, 0);
2131 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2132 MT_BUG_ON(mt, mas.index != level2[2]);
2133 MT_BUG_ON(mt, mas.last != level2[3]);
e15e06a8 2134
120b1162
LH
2135 /* Check running off the end and back on */
2136 mas_set(&mas, nr_entries * 10);
2137 val = mas_walk(&mas);
2138 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2139 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2140 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
e15e06a8 2141
120b1162
LH
2142 val = mas_next(&mas, ULONG_MAX);
2143 MT_BUG_ON(mt, val != NULL);
7a93c71a 2144 MT_BUG_ON(mt, mas.index != last_index);
120b1162 2145 MT_BUG_ON(mt, mas.last != ULONG_MAX);
e15e06a8 2146
120b1162
LH
2147 val = mas_prev(&mas, 0);
2148 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2149 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2150 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
e15e06a8 2151
120b1162
LH
2152 /* Check running off the start and back on */
2153 mas_reset(&mas);
2154 mas_set(&mas, 10);
2155 val = mas_walk(&mas);
2156 MT_BUG_ON(mt, val != xa_mk_value(1));
2157 MT_BUG_ON(mt, mas.index != 10);
2158 MT_BUG_ON(mt, mas.last != 15);
e15e06a8 2159
120b1162
LH
2160 val = mas_prev(&mas, 0);
2161 MT_BUG_ON(mt, val != xa_mk_value(0));
2162 MT_BUG_ON(mt, mas.index != 0);
2163 MT_BUG_ON(mt, mas.last != 5);
e15e06a8 2164
120b1162
LH
2165 val = mas_prev(&mas, 0);
2166 MT_BUG_ON(mt, val != NULL);
2167 MT_BUG_ON(mt, mas.index != 0);
eb2e817f 2168 MT_BUG_ON(mt, mas.last != 5);
a8091f03 2169 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
e15e06a8 2170
120b1162
LH
2171 mas.index = 0;
2172 mas.last = 5;
2173 mas_store(&mas, NULL);
2174 mas_reset(&mas);
2175 mas_set(&mas, 10);
2176 mas_walk(&mas);
e15e06a8 2177
120b1162
LH
2178 val = mas_prev(&mas, 0);
2179 MT_BUG_ON(mt, val != NULL);
2180 MT_BUG_ON(mt, mas.index != 0);
eb2e817f 2181 MT_BUG_ON(mt, mas.last != 9);
120b1162 2182 mas_unlock(&mas);
e15e06a8 2183
120b1162 2184 mtree_destroy(mt);
e15e06a8 2185
120b1162
LH
2186 mt_init(mt);
2187 mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
2188 mtree_store_range(mt, 5, 5, xa_mk_value(5), GFP_KERNEL);
e15e06a8 2189 rcu_read_lock();
120b1162
LH
2190 mas_set(&mas, 5);
2191 val = mas_prev(&mas, 4);
2192 MT_BUG_ON(mt, val != NULL);
e15e06a8 2193 rcu_read_unlock();
e15e06a8
LH
2194}
2195
e15e06a8 2196
e15e06a8
LH
2197
2198/* Test spanning writes that require balancing right sibling or right cousin */
eaf9790d 2199static noinline void __init check_spanning_relatives(struct maple_tree *mt)
e15e06a8
LH
2200{
2201
2202 unsigned long i, nr_entries = 1000;
2203
2204 for (i = 0; i <= nr_entries; i++)
2205 mtree_store_range(mt, i*10, i*10 + 5,
2206 xa_mk_value(i), GFP_KERNEL);
2207
2208
2209 mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL);
2210}
2211
eaf9790d 2212static noinline void __init check_fuzzer(struct maple_tree *mt)
e15e06a8
LH
2213{
2214 /*
2215 * 1. Causes a spanning rebalance of a single root node.
2216 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
2217 * entire right side is consumed.
2218 */
2219 mtree_test_insert(mt, 88, (void *)0xb1);
2220 mtree_test_insert(mt, 84, (void *)0xa9);
2221 mtree_test_insert(mt, 2, (void *)0x5);
2222 mtree_test_insert(mt, 4, (void *)0x9);
2223 mtree_test_insert(mt, 14, (void *)0x1d);
2224 mtree_test_insert(mt, 7, (void *)0xf);
2225 mtree_test_insert(mt, 12, (void *)0x19);
2226 mtree_test_insert(mt, 18, (void *)0x25);
2227 mtree_test_store_range(mt, 8, 18, (void *)0x11);
2228 mtree_destroy(mt);
2229
2230
2231 /*
2232 * 2. Cause a spanning rebalance of two nodes in root.
2233 * Fixed by setting mast->r->max correctly.
2234 */
2235 mt_init_flags(mt, 0);
2236 mtree_test_store(mt, 87, (void *)0xaf);
2237 mtree_test_store(mt, 0, (void *)0x1);
2238 mtree_test_load(mt, 4);
2239 mtree_test_insert(mt, 4, (void *)0x9);
2240 mtree_test_store(mt, 8, (void *)0x11);
2241 mtree_test_store(mt, 44, (void *)0x59);
2242 mtree_test_store(mt, 68, (void *)0x89);
2243 mtree_test_store(mt, 2, (void *)0x5);
2244 mtree_test_insert(mt, 43, (void *)0x57);
2245 mtree_test_insert(mt, 24, (void *)0x31);
2246 mtree_test_insert(mt, 844, (void *)0x699);
2247 mtree_test_store(mt, 84, (void *)0xa9);
2248 mtree_test_store(mt, 4, (void *)0x9);
2249 mtree_test_erase(mt, 4);
2250 mtree_test_load(mt, 5);
2251 mtree_test_erase(mt, 0);
2252 mtree_destroy(mt);
2253
2254 /*
2255 * 3. Cause a node overflow on copy
2256 * Fixed by using the correct check for node size in mas_wr_modify()
2257 * Also discovered issue with metadata setting.
2258 */
2259 mt_init_flags(mt, 0);
120b1162 2260 mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
e15e06a8
LH
2261 mtree_test_store(mt, 4, (void *)0x9);
2262 mtree_test_erase(mt, 5);
2263 mtree_test_erase(mt, 0);
2264 mtree_test_erase(mt, 4);
2265 mtree_test_store(mt, 5, (void *)0xb);
2266 mtree_test_erase(mt, 5);
2267 mtree_test_store(mt, 5, (void *)0xb);
2268 mtree_test_erase(mt, 5);
2269 mtree_test_erase(mt, 4);
2270 mtree_test_store(mt, 4, (void *)0x9);
2271 mtree_test_store(mt, 444, (void *)0x379);
2272 mtree_test_store(mt, 0, (void *)0x1);
2273 mtree_test_load(mt, 0);
2274 mtree_test_store(mt, 5, (void *)0xb);
2275 mtree_test_erase(mt, 0);
2276 mtree_destroy(mt);
2277
2278 /*
2279 * 4. spanning store failure due to writing incorrect pivot value at
2280 * last slot.
2281 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2282 *
2283 */
2284 mt_init_flags(mt, 0);
2285 mtree_test_insert(mt, 261, (void *)0x20b);
2286 mtree_test_store(mt, 516, (void *)0x409);
2287 mtree_test_store(mt, 6, (void *)0xd);
2288 mtree_test_insert(mt, 5, (void *)0xb);
2289 mtree_test_insert(mt, 1256, (void *)0x9d1);
2290 mtree_test_store(mt, 4, (void *)0x9);
2291 mtree_test_erase(mt, 1);
2292 mtree_test_store(mt, 56, (void *)0x71);
2293 mtree_test_insert(mt, 1, (void *)0x3);
2294 mtree_test_store(mt, 24, (void *)0x31);
2295 mtree_test_erase(mt, 1);
2296 mtree_test_insert(mt, 2263, (void *)0x11af);
2297 mtree_test_insert(mt, 446, (void *)0x37d);
2298 mtree_test_store_range(mt, 6, 45, (void *)0xd);
2299 mtree_test_store_range(mt, 3, 446, (void *)0x7);
2300 mtree_destroy(mt);
2301
2302 /*
2303 * 5. mas_wr_extend_null() may overflow slots.
2304 * Fix by checking against wr_mas->node_end.
2305 */
2306 mt_init_flags(mt, 0);
2307 mtree_test_store(mt, 48, (void *)0x61);
2308 mtree_test_store(mt, 3, (void *)0x7);
2309 mtree_test_load(mt, 0);
2310 mtree_test_store(mt, 88, (void *)0xb1);
2311 mtree_test_store(mt, 81, (void *)0xa3);
2312 mtree_test_insert(mt, 0, (void *)0x1);
2313 mtree_test_insert(mt, 8, (void *)0x11);
2314 mtree_test_insert(mt, 4, (void *)0x9);
2315 mtree_test_insert(mt, 2480, (void *)0x1361);
120b1162 2316 mtree_test_insert(mt, ULONG_MAX,
e15e06a8 2317 (void *)0xffffffffffffffff);
120b1162 2318 mtree_test_erase(mt, ULONG_MAX);
e15e06a8
LH
2319 mtree_destroy(mt);
2320
2321 /*
2322 * 6. When reusing a node with an implied pivot and the node is
2323 * shrinking, old data would be left in the implied slot
2324 * Fixed by checking the last pivot for the mas->max and clear
2325 * accordingly. This only affected the left-most node as that node is
2326 * the only one allowed to end in NULL.
2327 */
2328 mt_init_flags(mt, 0);
2329 mtree_test_erase(mt, 3);
2330 mtree_test_insert(mt, 22, (void *)0x2d);
2331 mtree_test_insert(mt, 15, (void *)0x1f);
2332 mtree_test_load(mt, 2);
2333 mtree_test_insert(mt, 1, (void *)0x3);
2334 mtree_test_insert(mt, 1, (void *)0x3);
2335 mtree_test_insert(mt, 5, (void *)0xb);
2336 mtree_test_erase(mt, 1);
2337 mtree_test_insert(mt, 1, (void *)0x3);
2338 mtree_test_insert(mt, 4, (void *)0x9);
2339 mtree_test_insert(mt, 1, (void *)0x3);
2340 mtree_test_erase(mt, 1);
2341 mtree_test_insert(mt, 2, (void *)0x5);
2342 mtree_test_insert(mt, 1, (void *)0x3);
2343 mtree_test_erase(mt, 3);
2344 mtree_test_insert(mt, 22, (void *)0x2d);
2345 mtree_test_insert(mt, 15, (void *)0x1f);
2346 mtree_test_insert(mt, 2, (void *)0x5);
2347 mtree_test_insert(mt, 1, (void *)0x3);
2348 mtree_test_insert(mt, 8, (void *)0x11);
2349 mtree_test_load(mt, 2);
2350 mtree_test_insert(mt, 1, (void *)0x3);
2351 mtree_test_insert(mt, 1, (void *)0x3);
2352 mtree_test_store(mt, 1, (void *)0x3);
2353 mtree_test_insert(mt, 5, (void *)0xb);
2354 mtree_test_erase(mt, 1);
2355 mtree_test_insert(mt, 1, (void *)0x3);
2356 mtree_test_insert(mt, 4, (void *)0x9);
2357 mtree_test_insert(mt, 1, (void *)0x3);
2358 mtree_test_erase(mt, 1);
2359 mtree_test_insert(mt, 2, (void *)0x5);
2360 mtree_test_insert(mt, 1, (void *)0x3);
2361 mtree_test_erase(mt, 3);
2362 mtree_test_insert(mt, 22, (void *)0x2d);
2363 mtree_test_insert(mt, 15, (void *)0x1f);
2364 mtree_test_insert(mt, 2, (void *)0x5);
2365 mtree_test_insert(mt, 1, (void *)0x3);
2366 mtree_test_insert(mt, 8, (void *)0x11);
2367 mtree_test_insert(mt, 12, (void *)0x19);
2368 mtree_test_erase(mt, 1);
2369 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2370 mtree_test_erase(mt, 62);
2371 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2372 mtree_test_insert(mt, 11, (void *)0x17);
2373 mtree_test_insert(mt, 3, (void *)0x7);
2374 mtree_test_insert(mt, 3, (void *)0x7);
2375 mtree_test_store(mt, 62, (void *)0x7d);
2376 mtree_test_erase(mt, 62);
2377 mtree_test_store_range(mt, 1, 15, (void *)0x3);
2378 mtree_test_erase(mt, 1);
2379 mtree_test_insert(mt, 22, (void *)0x2d);
2380 mtree_test_insert(mt, 12, (void *)0x19);
2381 mtree_test_erase(mt, 1);
2382 mtree_test_insert(mt, 3, (void *)0x7);
2383 mtree_test_store(mt, 62, (void *)0x7d);
2384 mtree_test_erase(mt, 62);
2385 mtree_test_insert(mt, 122, (void *)0xf5);
2386 mtree_test_store(mt, 3, (void *)0x7);
2387 mtree_test_insert(mt, 0, (void *)0x1);
2388 mtree_test_store_range(mt, 0, 1, (void *)0x1);
2389 mtree_test_insert(mt, 85, (void *)0xab);
2390 mtree_test_insert(mt, 72, (void *)0x91);
2391 mtree_test_insert(mt, 81, (void *)0xa3);
2392 mtree_test_insert(mt, 726, (void *)0x5ad);
2393 mtree_test_insert(mt, 0, (void *)0x1);
2394 mtree_test_insert(mt, 1, (void *)0x3);
2395 mtree_test_store(mt, 51, (void *)0x67);
2396 mtree_test_insert(mt, 611, (void *)0x4c7);
2397 mtree_test_insert(mt, 485, (void *)0x3cb);
2398 mtree_test_insert(mt, 1, (void *)0x3);
2399 mtree_test_erase(mt, 1);
2400 mtree_test_insert(mt, 0, (void *)0x1);
2401 mtree_test_insert(mt, 1, (void *)0x3);
2402 mtree_test_insert_range(mt, 26, 1, (void *)0x35);
2403 mtree_test_load(mt, 1);
2404 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2405 mtree_test_insert(mt, 1, (void *)0x3);
2406 mtree_test_erase(mt, 1);
2407 mtree_test_load(mt, 53);
2408 mtree_test_load(mt, 1);
2409 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2410 mtree_test_insert(mt, 222, (void *)0x1bd);
2411 mtree_test_insert(mt, 485, (void *)0x3cb);
2412 mtree_test_insert(mt, 1, (void *)0x3);
2413 mtree_test_erase(mt, 1);
2414 mtree_test_load(mt, 0);
2415 mtree_test_insert(mt, 21, (void *)0x2b);
2416 mtree_test_insert(mt, 3, (void *)0x7);
2417 mtree_test_store(mt, 621, (void *)0x4db);
2418 mtree_test_insert(mt, 0, (void *)0x1);
2419 mtree_test_erase(mt, 5);
2420 mtree_test_insert(mt, 1, (void *)0x3);
2421 mtree_test_store(mt, 62, (void *)0x7d);
2422 mtree_test_erase(mt, 62);
2423 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2424 mtree_test_insert(mt, 22, (void *)0x2d);
2425 mtree_test_insert(mt, 12, (void *)0x19);
2426 mtree_test_erase(mt, 1);
2427 mtree_test_insert(mt, 1, (void *)0x3);
2428 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2429 mtree_test_erase(mt, 62);
2430 mtree_test_erase(mt, 1);
2431 mtree_test_load(mt, 1);
2432 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2433 mtree_test_insert(mt, 1, (void *)0x3);
2434 mtree_test_erase(mt, 1);
2435 mtree_test_load(mt, 53);
2436 mtree_test_load(mt, 1);
2437 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2438 mtree_test_insert(mt, 222, (void *)0x1bd);
2439 mtree_test_insert(mt, 485, (void *)0x3cb);
2440 mtree_test_insert(mt, 1, (void *)0x3);
2441 mtree_test_erase(mt, 1);
2442 mtree_test_insert(mt, 1, (void *)0x3);
2443 mtree_test_load(mt, 0);
2444 mtree_test_load(mt, 0);
2445 mtree_destroy(mt);
2446
2447 /*
2448 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2449 * data by overwriting it first - that way metadata is of no concern.
2450 */
2451 mt_init_flags(mt, 0);
2452 mtree_test_load(mt, 1);
2453 mtree_test_insert(mt, 102, (void *)0xcd);
2454 mtree_test_erase(mt, 2);
2455 mtree_test_erase(mt, 0);
2456 mtree_test_load(mt, 0);
2457 mtree_test_insert(mt, 4, (void *)0x9);
2458 mtree_test_insert(mt, 2, (void *)0x5);
2459 mtree_test_insert(mt, 110, (void *)0xdd);
2460 mtree_test_insert(mt, 1, (void *)0x3);
2461 mtree_test_insert_range(mt, 5, 0, (void *)0xb);
2462 mtree_test_erase(mt, 2);
2463 mtree_test_store(mt, 0, (void *)0x1);
2464 mtree_test_store(mt, 112, (void *)0xe1);
2465 mtree_test_insert(mt, 21, (void *)0x2b);
2466 mtree_test_store(mt, 1, (void *)0x3);
2467 mtree_test_insert_range(mt, 110, 2, (void *)0xdd);
2468 mtree_test_store(mt, 2, (void *)0x5);
2469 mtree_test_load(mt, 22);
2470 mtree_test_erase(mt, 2);
2471 mtree_test_store(mt, 210, (void *)0x1a5);
2472 mtree_test_store_range(mt, 0, 2, (void *)0x1);
2473 mtree_test_store(mt, 2, (void *)0x5);
2474 mtree_test_erase(mt, 2);
2475 mtree_test_erase(mt, 22);
2476 mtree_test_erase(mt, 1);
2477 mtree_test_erase(mt, 2);
2478 mtree_test_store(mt, 0, (void *)0x1);
2479 mtree_test_load(mt, 112);
2480 mtree_test_insert(mt, 2, (void *)0x5);
2481 mtree_test_erase(mt, 2);
2482 mtree_test_store(mt, 1, (void *)0x3);
2483 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2484 mtree_test_erase(mt, 0);
2485 mtree_test_erase(mt, 2);
2486 mtree_test_store(mt, 2, (void *)0x5);
2487 mtree_test_erase(mt, 0);
2488 mtree_test_erase(mt, 2);
2489 mtree_test_store(mt, 0, (void *)0x1);
2490 mtree_test_store(mt, 0, (void *)0x1);
2491 mtree_test_erase(mt, 2);
2492 mtree_test_store(mt, 2, (void *)0x5);
2493 mtree_test_erase(mt, 2);
2494 mtree_test_insert(mt, 2, (void *)0x5);
2495 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2496 mtree_test_erase(mt, 0);
2497 mtree_test_erase(mt, 2);
2498 mtree_test_store(mt, 0, (void *)0x1);
2499 mtree_test_load(mt, 112);
2500 mtree_test_store_range(mt, 110, 12, (void *)0xdd);
2501 mtree_test_store(mt, 2, (void *)0x5);
2502 mtree_test_load(mt, 110);
2503 mtree_test_insert_range(mt, 4, 71, (void *)0x9);
2504 mtree_test_load(mt, 2);
2505 mtree_test_store(mt, 2, (void *)0x5);
2506 mtree_test_insert_range(mt, 11, 22, (void *)0x17);
2507 mtree_test_erase(mt, 12);
2508 mtree_test_store(mt, 2, (void *)0x5);
2509 mtree_test_load(mt, 22);
2510 mtree_destroy(mt);
2511
2512
2513 /*
2514 * 8. When rebalancing or spanning_rebalance(), the max of the new node
2515 * may be set incorrectly to the final pivot and not the right max.
2516 * Fix by setting the left max to orig right max if the entire node is
2517 * consumed.
2518 */
2519 mt_init_flags(mt, 0);
2520 mtree_test_store(mt, 6, (void *)0xd);
2521 mtree_test_store(mt, 67, (void *)0x87);
2522 mtree_test_insert(mt, 15, (void *)0x1f);
2523 mtree_test_insert(mt, 6716, (void *)0x3479);
2524 mtree_test_store(mt, 61, (void *)0x7b);
2525 mtree_test_insert(mt, 13, (void *)0x1b);
2526 mtree_test_store(mt, 8, (void *)0x11);
2527 mtree_test_insert(mt, 1, (void *)0x3);
2528 mtree_test_load(mt, 0);
2529 mtree_test_erase(mt, 67167);
2530 mtree_test_insert_range(mt, 6, 7167, (void *)0xd);
2531 mtree_test_insert(mt, 6, (void *)0xd);
2532 mtree_test_erase(mt, 67);
2533 mtree_test_insert(mt, 1, (void *)0x3);
2534 mtree_test_erase(mt, 667167);
2535 mtree_test_insert(mt, 6, (void *)0xd);
2536 mtree_test_store(mt, 67, (void *)0x87);
2537 mtree_test_insert(mt, 5, (void *)0xb);
2538 mtree_test_erase(mt, 1);
2539 mtree_test_insert(mt, 6, (void *)0xd);
2540 mtree_test_erase(mt, 67);
2541 mtree_test_insert(mt, 15, (void *)0x1f);
2542 mtree_test_insert(mt, 67167, (void *)0x20cbf);
2543 mtree_test_insert(mt, 1, (void *)0x3);
2544 mtree_test_load(mt, 7);
2545 mtree_test_insert(mt, 16, (void *)0x21);
2546 mtree_test_insert(mt, 36, (void *)0x49);
2547 mtree_test_store(mt, 67, (void *)0x87);
2548 mtree_test_store(mt, 6, (void *)0xd);
2549 mtree_test_insert(mt, 367, (void *)0x2df);
2550 mtree_test_insert(mt, 115, (void *)0xe7);
2551 mtree_test_store(mt, 0, (void *)0x1);
2552 mtree_test_store_range(mt, 1, 3, (void *)0x3);
2553 mtree_test_store(mt, 1, (void *)0x3);
2554 mtree_test_erase(mt, 67167);
2555 mtree_test_insert_range(mt, 6, 47, (void *)0xd);
2556 mtree_test_store(mt, 1, (void *)0x3);
2557 mtree_test_insert_range(mt, 1, 67, (void *)0x3);
2558 mtree_test_load(mt, 67);
2559 mtree_test_insert(mt, 1, (void *)0x3);
2560 mtree_test_erase(mt, 67167);
2561 mtree_destroy(mt);
2562
2563 /*
2564 * 9. spanning store to the end of data caused an invalid metadata
2565 * length which resulted in a crash eventually.
2566 * Fix by checking if there is a value in pivot before incrementing the
2567 * metadata end in mab_mas_cp(). To ensure this doesn't happen again,
2568 * abstract the two locations this happens into a function called
2569 * mas_leaf_set_meta().
2570 */
2571 mt_init_flags(mt, 0);
2572 mtree_test_insert(mt, 21, (void *)0x2b);
2573 mtree_test_insert(mt, 12, (void *)0x19);
2574 mtree_test_insert(mt, 6, (void *)0xd);
2575 mtree_test_insert(mt, 8, (void *)0x11);
2576 mtree_test_insert(mt, 2, (void *)0x5);
2577 mtree_test_insert(mt, 91, (void *)0xb7);
2578 mtree_test_insert(mt, 18, (void *)0x25);
2579 mtree_test_insert(mt, 81, (void *)0xa3);
2580 mtree_test_store_range(mt, 0, 128, (void *)0x1);
2581 mtree_test_store(mt, 1, (void *)0x3);
2582 mtree_test_erase(mt, 8);
2583 mtree_test_insert(mt, 11, (void *)0x17);
2584 mtree_test_insert(mt, 8, (void *)0x11);
2585 mtree_test_insert(mt, 21, (void *)0x2b);
2586 mtree_test_insert(mt, 2, (void *)0x5);
120b1162
LH
2587 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2588 mtree_test_erase(mt, ULONG_MAX - 10);
e15e06a8
LH
2589 mtree_test_store_range(mt, 0, 281, (void *)0x1);
2590 mtree_test_erase(mt, 2);
2591 mtree_test_insert(mt, 1211, (void *)0x977);
2592 mtree_test_insert(mt, 111, (void *)0xdf);
2593 mtree_test_insert(mt, 13, (void *)0x1b);
2594 mtree_test_insert(mt, 211, (void *)0x1a7);
2595 mtree_test_insert(mt, 11, (void *)0x17);
2596 mtree_test_insert(mt, 5, (void *)0xb);
2597 mtree_test_insert(mt, 1218, (void *)0x985);
2598 mtree_test_insert(mt, 61, (void *)0x7b);
2599 mtree_test_store(mt, 1, (void *)0x3);
2600 mtree_test_insert(mt, 121, (void *)0xf3);
2601 mtree_test_insert(mt, 8, (void *)0x11);
2602 mtree_test_insert(mt, 21, (void *)0x2b);
2603 mtree_test_insert(mt, 2, (void *)0x5);
120b1162
LH
2604 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2605 mtree_test_erase(mt, ULONG_MAX - 10);
e15e06a8 2606}
120b1162
LH
2607
2608/* duplicate the tree with a specific gap */
eaf9790d 2609static noinline void __init check_dup_gaps(struct maple_tree *mt,
e15e06a8
LH
2610 unsigned long nr_entries, bool zero_start,
2611 unsigned long gap)
2612{
2613 unsigned long i = 0;
2614 struct maple_tree newmt;
2615 int ret;
2616 void *tmp;
2617 MA_STATE(mas, mt, 0, 0);
2618 MA_STATE(newmas, &newmt, 0, 0);
2619
e15e06a8
LH
2620 if (!zero_start)
2621 i = 1;
2622
2623 mt_zero_nr_tallocated();
2624 for (; i <= nr_entries; i++)
2625 mtree_store_range(mt, i*10, (i+1)*10 - gap,
2626 xa_mk_value(i), GFP_KERNEL);
2627
2628 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
2629 mt_set_non_kernel(99999);
120b1162 2630 mas_lock(&newmas);
e15e06a8
LH
2631 ret = mas_expected_entries(&newmas, nr_entries);
2632 mt_set_non_kernel(0);
2633 MT_BUG_ON(mt, ret != 0);
2634
120b1162 2635 rcu_read_lock();
e15e06a8
LH
2636 mas_for_each(&mas, tmp, ULONG_MAX) {
2637 newmas.index = mas.index;
2638 newmas.last = mas.last;
2639 mas_store(&newmas, tmp);
2640 }
120b1162 2641 rcu_read_unlock();
e15e06a8 2642 mas_destroy(&newmas);
120b1162
LH
2643 mas_unlock(&newmas);
2644
e15e06a8
LH
2645 mtree_destroy(&newmt);
2646}
2647
120b1162 2648/* Duplicate many sizes of trees. Mainly to test expected entry values */
eaf9790d 2649static noinline void __init check_dup(struct maple_tree *mt)
e15e06a8
LH
2650{
2651 int i;
120b1162 2652 int big_start = 100010;
e15e06a8
LH
2653
2654 /* Check with a value at zero */
2655 for (i = 10; i < 1000; i++) {
2656 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2657 check_dup_gaps(mt, i, true, 5);
2658 mtree_destroy(mt);
120b1162 2659 rcu_barrier();
e15e06a8
LH
2660 }
2661
120b1162
LH
2662 cond_resched();
2663 mt_cache_shrink();
e15e06a8
LH
2664 /* Check with a value at zero, no gap */
2665 for (i = 1000; i < 2000; i++) {
2666 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2667 check_dup_gaps(mt, i, true, 0);
2668 mtree_destroy(mt);
120b1162 2669 rcu_barrier();
e15e06a8
LH
2670 }
2671
120b1162
LH
2672 cond_resched();
2673 mt_cache_shrink();
e15e06a8 2674 /* Check with a value at zero and unreasonably large */
120b1162 2675 for (i = big_start; i < big_start + 10; i++) {
e15e06a8
LH
2676 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2677 check_dup_gaps(mt, i, true, 5);
2678 mtree_destroy(mt);
120b1162 2679 rcu_barrier();
e15e06a8
LH
2680 }
2681
120b1162
LH
2682 cond_resched();
2683 mt_cache_shrink();
e15e06a8
LH
2684 /* Small to medium size not starting at zero*/
2685 for (i = 200; i < 1000; i++) {
2686 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2687 check_dup_gaps(mt, i, false, 5);
2688 mtree_destroy(mt);
120b1162 2689 rcu_barrier();
e15e06a8
LH
2690 }
2691
120b1162
LH
2692 cond_resched();
2693 mt_cache_shrink();
e15e06a8 2694 /* Unreasonably large not starting at zero*/
120b1162 2695 for (i = big_start; i < big_start + 10; i++) {
e15e06a8
LH
2696 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2697 check_dup_gaps(mt, i, false, 5);
2698 mtree_destroy(mt);
120b1162
LH
2699 rcu_barrier();
2700 cond_resched();
2701 mt_cache_shrink();
e15e06a8
LH
2702 }
2703
2704 /* Check non-allocation tree not starting at zero */
2705 for (i = 1500; i < 3000; i++) {
2706 mt_init_flags(mt, 0);
2707 check_dup_gaps(mt, i, false, 5);
2708 mtree_destroy(mt);
120b1162
LH
2709 rcu_barrier();
2710 cond_resched();
2711 if (i % 2 == 0)
2712 mt_cache_shrink();
e15e06a8
LH
2713 }
2714
120b1162 2715 mt_cache_shrink();
e15e06a8
LH
2716 /* Check non-allocation tree starting at zero */
2717 for (i = 200; i < 1000; i++) {
2718 mt_init_flags(mt, 0);
2719 check_dup_gaps(mt, i, true, 5);
2720 mtree_destroy(mt);
120b1162
LH
2721 rcu_barrier();
2722 cond_resched();
e15e06a8
LH
2723 }
2724
120b1162 2725 mt_cache_shrink();
e15e06a8 2726 /* Unreasonably large */
120b1162 2727 for (i = big_start + 5; i < big_start + 10; i++) {
e15e06a8
LH
2728 mt_init_flags(mt, 0);
2729 check_dup_gaps(mt, i, true, 5);
2730 mtree_destroy(mt);
120b1162
LH
2731 rcu_barrier();
2732 mt_cache_shrink();
2733 cond_resched();
e15e06a8 2734 }
e15e06a8
LH
2735}
2736
eaf9790d 2737static noinline void __init check_bnode_min_spanning(struct maple_tree *mt)
c5651b31
LH
2738{
2739 int i = 50;
2740 MA_STATE(mas, mt, 0, 0);
2741
2742 mt_set_non_kernel(9999);
2743 mas_lock(&mas);
2744 do {
2745 mas_set_range(&mas, i*10, i*10+9);
2746 mas_store(&mas, check_bnode_min_spanning);
2747 } while (i--);
2748
2749 mas_set_range(&mas, 240, 509);
2750 mas_store(&mas, NULL);
2751 mas_unlock(&mas);
2752 mas_destroy(&mas);
2753 mt_set_non_kernel(0);
2754}
2755
eaf9790d 2756static noinline void __init check_empty_area_window(struct maple_tree *mt)
7327e811
LH
2757{
2758 unsigned long i, nr_entries = 20;
2759 MA_STATE(mas, mt, 0, 0);
2760
2761 for (i = 1; i <= nr_entries; i++)
2762 mtree_store_range(mt, i*10, i*10 + 9,
2763 xa_mk_value(i), GFP_KERNEL);
2764
2765 /* Create another hole besides the one at 0 */
2766 mtree_store_range(mt, 160, 169, NULL, GFP_KERNEL);
2767
2768 /* Check lower bounds that don't fit */
2769 rcu_read_lock();
2770 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 10) != -EBUSY);
2771
2772 mas_reset(&mas);
2773 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 6, 90, 5) != -EBUSY);
2774
2775 /* Check lower bound that does fit */
2776 mas_reset(&mas);
2777 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 5) != 0);
2778 MT_BUG_ON(mt, mas.index != 5);
2779 MT_BUG_ON(mt, mas.last != 9);
2780 rcu_read_unlock();
2781
2782 /* Check one gap that doesn't fit and one that does */
2783 rcu_read_lock();
2784 mas_reset(&mas);
2785 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 217, 9) != 0);
2786 MT_BUG_ON(mt, mas.index != 161);
2787 MT_BUG_ON(mt, mas.last != 169);
2788
2789 /* Check one gap that does fit above the min */
2790 mas_reset(&mas);
2791 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 3) != 0);
2792 MT_BUG_ON(mt, mas.index != 216);
2793 MT_BUG_ON(mt, mas.last != 218);
2794
2795 /* Check size that doesn't fit any gap */
2796 mas_reset(&mas);
2797 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 16) != -EBUSY);
2798
2799 /*
2800 * Check size that doesn't fit the lower end of the window but
2801 * does fit the gap
2802 */
2803 mas_reset(&mas);
2804 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 167, 200, 4) != -EBUSY);
2805
2806 /*
2807 * Check size that doesn't fit the upper end of the window but
2808 * does fit the gap
2809 */
2810 mas_reset(&mas);
2811 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 162, 4) != -EBUSY);
2812
2813 /* Check mas_empty_area forward */
2814 mas_reset(&mas);
2815 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 9) != 0);
2816 MT_BUG_ON(mt, mas.index != 0);
2817 MT_BUG_ON(mt, mas.last != 8);
2818
2819 mas_reset(&mas);
2820 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 4) != 0);
2821 MT_BUG_ON(mt, mas.index != 0);
2822 MT_BUG_ON(mt, mas.last != 3);
2823
2824 mas_reset(&mas);
2825 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 11) != -EBUSY);
2826
2827 mas_reset(&mas);
2828 MT_BUG_ON(mt, mas_empty_area(&mas, 5, 100, 6) != -EBUSY);
2829
2830 mas_reset(&mas);
17e7436b 2831 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 8, 10) != -EINVAL);
7327e811
LH
2832
2833 mas_reset(&mas);
2834 mas_empty_area(&mas, 100, 165, 3);
2835
2836 mas_reset(&mas);
2837 MT_BUG_ON(mt, mas_empty_area(&mas, 100, 163, 6) != -EBUSY);
2838 rcu_read_unlock();
2839}
2840
eaf9790d 2841static noinline void __init check_empty_area_fill(struct maple_tree *mt)
4bd6dded
LH
2842{
2843 const unsigned long max = 0x25D78000;
2844 unsigned long size;
2845 int loop, shift;
2846 MA_STATE(mas, mt, 0, 0);
2847
2848 mt_set_non_kernel(99999);
2849 for (shift = 12; shift <= 16; shift++) {
2850 loop = 5000;
2851 size = 1 << shift;
2852 while (loop--) {
2853 mas_set(&mas, 0);
2854 mas_lock(&mas);
2855 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0);
2856 MT_BUG_ON(mt, mas.last != mas.index + size - 1);
2857 mas_store_gfp(&mas, (void *)size, GFP_KERNEL);
2858 mas_unlock(&mas);
2859 mas_reset(&mas);
2860 }
2861 }
2862
2863 /* No space left. */
2864 size = 0x1000;
2865 rcu_read_lock();
2866 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY);
2867 rcu_read_unlock();
2868
2869 /* Fill a depth 3 node to the maximum */
2870 for (unsigned long i = 629440511; i <= 629440800; i += 6)
2871 mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL);
2872 /* Make space in the second-last depth 4 node */
2873 mtree_erase(mt, 631668735);
2874 /* Make space in the last depth 4 node */
2875 mtree_erase(mt, 629506047);
2876 mas_reset(&mas);
2877 /* Search from just after the gap in the second-last depth 4 */
2878 rcu_read_lock();
2879 MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0);
2880 rcu_read_unlock();
2881 mt_set_non_kernel(0);
2882}
2883
eb2e817f
LH
2884/*
2885 * Check MAS_START, MAS_PAUSE, active (implied), and MAS_NONE transitions.
2886 *
2887 * The table below shows the single entry tree (0-0 pointer) and normal tree
2888 * with nodes.
2889 *
2890 * Function ENTRY Start Result index & last
2891 * ┬ ┬ ┬ ┬ ┬
2892 * │ │ │ │ └─ the final range
2893 * │ │ │ └─ The node value after execution
2894 * │ │ └─ The node value before execution
2895 * │ └─ If the entry exists or does not exists (DNE)
2896 * └─ The function name
2897 *
2898 * Function ENTRY Start Result index & last
2899 * mas_next()
2900 * - after last
2901 * Single entry tree at 0-0
2902 * ------------------------
2903 * DNE MAS_START MAS_NONE 1 - oo
2904 * DNE MAS_PAUSE MAS_NONE 1 - oo
2905 * DNE MAS_ROOT MAS_NONE 1 - oo
2906 * when index = 0
2907 * DNE MAS_NONE MAS_ROOT 0
2908 * when index > 0
2909 * DNE MAS_NONE MAS_NONE 1 - oo
2910 *
2911 * Normal tree
2912 * -----------
2913 * exists MAS_START active range
2914 * DNE MAS_START active set to last range
2915 * exists MAS_PAUSE active range
2916 * DNE MAS_PAUSE active set to last range
2917 * exists MAS_NONE active range
2918 * exists active active range
2919 * DNE active active set to last range
a8091f03 2920 * ERANGE active MAS_OVERFLOW last range
eb2e817f
LH
2921 *
2922 * Function ENTRY Start Result index & last
2923 * mas_prev()
2924 * - before index
2925 * Single entry tree at 0-0
2926 * ------------------------
2927 * if index > 0
2928 * exists MAS_START MAS_ROOT 0
2929 * exists MAS_PAUSE MAS_ROOT 0
2930 * exists MAS_NONE MAS_ROOT 0
2931 *
2932 * if index == 0
2933 * DNE MAS_START MAS_NONE 0
2934 * DNE MAS_PAUSE MAS_NONE 0
2935 * DNE MAS_NONE MAS_NONE 0
2936 * DNE MAS_ROOT MAS_NONE 0
2937 *
2938 * Normal tree
2939 * -----------
2940 * exists MAS_START active range
2941 * DNE MAS_START active set to min
2942 * exists MAS_PAUSE active range
2943 * DNE MAS_PAUSE active set to min
2944 * exists MAS_NONE active range
2945 * DNE MAS_NONE MAS_NONE set to min
2946 * any MAS_ROOT MAS_NONE 0
2947 * exists active active range
2948 * DNE active active last range
a8091f03 2949 * ERANGE active MAS_UNDERFLOW last range
eb2e817f
LH
2950 *
2951 * Function ENTRY Start Result index & last
2952 * mas_find()
2953 * - at index or next
2954 * Single entry tree at 0-0
2955 * ------------------------
2956 * if index > 0
2957 * DNE MAS_START MAS_NONE 0
2958 * DNE MAS_PAUSE MAS_NONE 0
2959 * DNE MAS_ROOT MAS_NONE 0
a8091f03 2960 * DNE MAS_NONE MAS_NONE 1
eb2e817f
LH
2961 * if index == 0
2962 * exists MAS_START MAS_ROOT 0
2963 * exists MAS_PAUSE MAS_ROOT 0
2964 * exists MAS_NONE MAS_ROOT 0
2965 *
2966 * Normal tree
2967 * -----------
2968 * exists MAS_START active range
2969 * DNE MAS_START active set to max
2970 * exists MAS_PAUSE active range
2971 * DNE MAS_PAUSE active set to max
a8091f03 2972 * exists MAS_NONE active range (start at last)
eb2e817f
LH
2973 * exists active active range
2974 * DNE active active last range (max < last)
2975 *
2976 * Function ENTRY Start Result index & last
2977 * mas_find_rev()
2978 * - at index or before
2979 * Single entry tree at 0-0
2980 * ------------------------
2981 * if index > 0
2982 * exists MAS_START MAS_ROOT 0
2983 * exists MAS_PAUSE MAS_ROOT 0
2984 * exists MAS_NONE MAS_ROOT 0
2985 * if index == 0
2986 * DNE MAS_START MAS_NONE 0
2987 * DNE MAS_PAUSE MAS_NONE 0
2988 * DNE MAS_NONE MAS_NONE 0
2989 * DNE MAS_ROOT MAS_NONE 0
2990 *
2991 * Normal tree
2992 * -----------
2993 * exists MAS_START active range
2994 * DNE MAS_START active set to min
2995 * exists MAS_PAUSE active range
2996 * DNE MAS_PAUSE active set to min
a8091f03 2997 * exists MAS_NONE active range (start at index)
eb2e817f
LH
2998 * exists active active range
2999 * DNE active active last range (min > index)
3000 *
3001 * Function ENTRY Start Result index & last
3002 * mas_walk()
3003 * - Look up index
3004 * Single entry tree at 0-0
3005 * ------------------------
3006 * if index > 0
3007 * DNE MAS_START MAS_ROOT 1 - oo
3008 * DNE MAS_PAUSE MAS_ROOT 1 - oo
3009 * DNE MAS_NONE MAS_ROOT 1 - oo
3010 * DNE MAS_ROOT MAS_ROOT 1 - oo
3011 * if index == 0
3012 * exists MAS_START MAS_ROOT 0
3013 * exists MAS_PAUSE MAS_ROOT 0
3014 * exists MAS_NONE MAS_ROOT 0
3015 * exists MAS_ROOT MAS_ROOT 0
3016 *
3017 * Normal tree
3018 * -----------
3019 * exists MAS_START active range
3020 * DNE MAS_START active range of NULL
3021 * exists MAS_PAUSE active range
3022 * DNE MAS_PAUSE active range of NULL
3023 * exists MAS_NONE active range
3024 * DNE MAS_NONE active range of NULL
3025 * exists active active range
3026 * DNE active active range of NULL
3027 */
3028
3029#define mas_active(x) (((x).node != MAS_ROOT) && \
3030 ((x).node != MAS_START) && \
3031 ((x).node != MAS_PAUSE) && \
3032 ((x).node != MAS_NONE))
3033static noinline void __init check_state_handling(struct maple_tree *mt)
3034{
3035 MA_STATE(mas, mt, 0, 0);
3036 void *entry, *ptr = (void *) 0x1234500;
3037 void *ptr2 = &ptr;
3038 void *ptr3 = &ptr2;
3039
3040 /* Check MAS_ROOT First */
3041 mtree_store_range(mt, 0, 0, ptr, GFP_KERNEL);
3042
3043 mas_lock(&mas);
a8091f03 3044 /* prev: Start -> underflow*/
eb2e817f
LH
3045 entry = mas_prev(&mas, 0);
3046 MT_BUG_ON(mt, entry != NULL);
a8091f03 3047 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
eb2e817f
LH
3048
3049 /* prev: Start -> root */
3050 mas_set(&mas, 10);
3051 entry = mas_prev(&mas, 0);
3052 MT_BUG_ON(mt, entry != ptr);
3053 MT_BUG_ON(mt, mas.index != 0);
3054 MT_BUG_ON(mt, mas.last != 0);
3055 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3056
3057 /* prev: pause -> root */
3058 mas_set(&mas, 10);
3059 mas_pause(&mas);
3060 entry = mas_prev(&mas, 0);
3061 MT_BUG_ON(mt, entry != ptr);
3062 MT_BUG_ON(mt, mas.index != 0);
3063 MT_BUG_ON(mt, mas.last != 0);
3064 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3065
3066 /* next: start -> none */
3067 mas_set(&mas, 0);
3068 entry = mas_next(&mas, ULONG_MAX);
3069 MT_BUG_ON(mt, mas.index != 1);
3070 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3071 MT_BUG_ON(mt, entry != NULL);
3072 MT_BUG_ON(mt, mas.node != MAS_NONE);
3073
a8091f03 3074 /* next: start -> none*/
eb2e817f
LH
3075 mas_set(&mas, 10);
3076 entry = mas_next(&mas, ULONG_MAX);
3077 MT_BUG_ON(mt, mas.index != 1);
3078 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3079 MT_BUG_ON(mt, entry != NULL);
3080 MT_BUG_ON(mt, mas.node != MAS_NONE);
3081
3082 /* find: start -> root */
3083 mas_set(&mas, 0);
3084 entry = mas_find(&mas, ULONG_MAX);
3085 MT_BUG_ON(mt, entry != ptr);
3086 MT_BUG_ON(mt, mas.index != 0);
3087 MT_BUG_ON(mt, mas.last != 0);
3088 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3089
3090 /* find: root -> none */
3091 entry = mas_find(&mas, ULONG_MAX);
3092 MT_BUG_ON(mt, entry != NULL);
3093 MT_BUG_ON(mt, mas.index != 1);
3094 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3095 MT_BUG_ON(mt, mas.node != MAS_NONE);
3096
3097 /* find: none -> none */
3098 entry = mas_find(&mas, ULONG_MAX);
3099 MT_BUG_ON(mt, entry != NULL);
3100 MT_BUG_ON(mt, mas.index != 1);
3101 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3102 MT_BUG_ON(mt, mas.node != MAS_NONE);
3103
3104 /* find: start -> none */
3105 mas_set(&mas, 10);
3106 entry = mas_find(&mas, ULONG_MAX);
3107 MT_BUG_ON(mt, entry != NULL);
3108 MT_BUG_ON(mt, mas.index != 1);
3109 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3110 MT_BUG_ON(mt, mas.node != MAS_NONE);
3111
3112 /* find_rev: none -> root */
3113 entry = mas_find_rev(&mas, 0);
3114 MT_BUG_ON(mt, entry != ptr);
3115 MT_BUG_ON(mt, mas.index != 0);
3116 MT_BUG_ON(mt, mas.last != 0);
3117 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3118
3119 /* find_rev: start -> root */
3120 mas_set(&mas, 0);
3121 entry = mas_find_rev(&mas, 0);
3122 MT_BUG_ON(mt, entry != ptr);
3123 MT_BUG_ON(mt, mas.index != 0);
3124 MT_BUG_ON(mt, mas.last != 0);
3125 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3126
3127 /* find_rev: root -> none */
3128 entry = mas_find_rev(&mas, 0);
3129 MT_BUG_ON(mt, entry != NULL);
3130 MT_BUG_ON(mt, mas.index != 0);
3131 MT_BUG_ON(mt, mas.last != 0);
3132 MT_BUG_ON(mt, mas.node != MAS_NONE);
3133
3134 /* find_rev: none -> none */
3135 entry = mas_find_rev(&mas, 0);
3136 MT_BUG_ON(mt, entry != NULL);
3137 MT_BUG_ON(mt, mas.index != 0);
3138 MT_BUG_ON(mt, mas.last != 0);
3139 MT_BUG_ON(mt, mas.node != MAS_NONE);
3140
3141 /* find_rev: start -> root */
3142 mas_set(&mas, 10);
3143 entry = mas_find_rev(&mas, 0);
3144 MT_BUG_ON(mt, entry != ptr);
3145 MT_BUG_ON(mt, mas.index != 0);
3146 MT_BUG_ON(mt, mas.last != 0);
3147 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3148
3149 /* walk: start -> none */
3150 mas_set(&mas, 10);
3151 entry = mas_walk(&mas);
3152 MT_BUG_ON(mt, entry != NULL);
3153 MT_BUG_ON(mt, mas.index != 1);
3154 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3155 MT_BUG_ON(mt, mas.node != MAS_NONE);
3156
3157 /* walk: pause -> none*/
3158 mas_set(&mas, 10);
3159 mas_pause(&mas);
3160 entry = mas_walk(&mas);
3161 MT_BUG_ON(mt, entry != NULL);
3162 MT_BUG_ON(mt, mas.index != 1);
3163 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3164 MT_BUG_ON(mt, mas.node != MAS_NONE);
3165
3166 /* walk: none -> none */
3167 mas.index = mas.last = 10;
3168 entry = mas_walk(&mas);
3169 MT_BUG_ON(mt, entry != NULL);
3170 MT_BUG_ON(mt, mas.index != 1);
3171 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3172 MT_BUG_ON(mt, mas.node != MAS_NONE);
3173
3174 /* walk: none -> none */
3175 entry = mas_walk(&mas);
3176 MT_BUG_ON(mt, entry != NULL);
3177 MT_BUG_ON(mt, mas.index != 1);
3178 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3179 MT_BUG_ON(mt, mas.node != MAS_NONE);
3180
3181 /* walk: start -> root */
3182 mas_set(&mas, 0);
3183 entry = mas_walk(&mas);
3184 MT_BUG_ON(mt, entry != ptr);
3185 MT_BUG_ON(mt, mas.index != 0);
3186 MT_BUG_ON(mt, mas.last != 0);
3187 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3188
3189 /* walk: pause -> root */
3190 mas_set(&mas, 0);
3191 mas_pause(&mas);
3192 entry = mas_walk(&mas);
3193 MT_BUG_ON(mt, entry != ptr);
3194 MT_BUG_ON(mt, mas.index != 0);
3195 MT_BUG_ON(mt, mas.last != 0);
3196 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3197
3198 /* walk: none -> root */
3199 mas.node = MAS_NONE;
3200 entry = mas_walk(&mas);
3201 MT_BUG_ON(mt, entry != ptr);
3202 MT_BUG_ON(mt, mas.index != 0);
3203 MT_BUG_ON(mt, mas.last != 0);
3204 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3205
3206 /* walk: root -> root */
3207 entry = mas_walk(&mas);
3208 MT_BUG_ON(mt, entry != ptr);
3209 MT_BUG_ON(mt, mas.index != 0);
3210 MT_BUG_ON(mt, mas.last != 0);
3211 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3212
3213 /* walk: root -> none */
3214 mas_set(&mas, 10);
3215 entry = mas_walk(&mas);
3216 MT_BUG_ON(mt, entry != NULL);
3217 MT_BUG_ON(mt, mas.index != 1);
3218 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3219 MT_BUG_ON(mt, mas.node != MAS_NONE);
3220
3221 /* walk: none -> root */
3222 mas.index = mas.last = 0;
3223 entry = mas_walk(&mas);
3224 MT_BUG_ON(mt, entry != ptr);
3225 MT_BUG_ON(mt, mas.index != 0);
3226 MT_BUG_ON(mt, mas.last != 0);
3227 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3228
3229 mas_unlock(&mas);
3230
3231 /* Check when there is an actual node */
3232 mtree_store_range(mt, 0, 0, NULL, GFP_KERNEL);
3233 mtree_store_range(mt, 0x1000, 0x1500, ptr, GFP_KERNEL);
3234 mtree_store_range(mt, 0x2000, 0x2500, ptr2, GFP_KERNEL);
3235 mtree_store_range(mt, 0x3000, 0x3500, ptr3, GFP_KERNEL);
3236
3237 mas_lock(&mas);
3238
3239 /* next: start ->active */
3240 mas_set(&mas, 0);
3241 entry = mas_next(&mas, ULONG_MAX);
3242 MT_BUG_ON(mt, entry != ptr);
3243 MT_BUG_ON(mt, mas.index != 0x1000);
3244 MT_BUG_ON(mt, mas.last != 0x1500);
3245 MT_BUG_ON(mt, !mas_active(mas));
3246
3247 /* next: pause ->active */
3248 mas_set(&mas, 0);
3249 mas_pause(&mas);
3250 entry = mas_next(&mas, ULONG_MAX);
3251 MT_BUG_ON(mt, entry != ptr);
3252 MT_BUG_ON(mt, mas.index != 0x1000);
3253 MT_BUG_ON(mt, mas.last != 0x1500);
3254 MT_BUG_ON(mt, !mas_active(mas));
3255
3256 /* next: none ->active */
3257 mas.index = mas.last = 0;
3258 mas.offset = 0;
3259 mas.node = MAS_NONE;
3260 entry = mas_next(&mas, ULONG_MAX);
3261 MT_BUG_ON(mt, entry != ptr);
3262 MT_BUG_ON(mt, mas.index != 0x1000);
3263 MT_BUG_ON(mt, mas.last != 0x1500);
3264 MT_BUG_ON(mt, !mas_active(mas));
3265
3266 /* next:active ->active */
3267 entry = mas_next(&mas, ULONG_MAX);
3268 MT_BUG_ON(mt, entry != ptr2);
3269 MT_BUG_ON(mt, mas.index != 0x2000);
3270 MT_BUG_ON(mt, mas.last != 0x2500);
3271 MT_BUG_ON(mt, !mas_active(mas));
3272
a8091f03 3273 /* next:active -> active beyond data */
eb2e817f
LH
3274 entry = mas_next(&mas, 0x2999);
3275 MT_BUG_ON(mt, entry != NULL);
3276 MT_BUG_ON(mt, mas.index != 0x2501);
3277 MT_BUG_ON(mt, mas.last != 0x2fff);
3278 MT_BUG_ON(mt, !mas_active(mas));
3279
a8091f03 3280 /* Continue after last range ends after max */
eb2e817f
LH
3281 entry = mas_next(&mas, ULONG_MAX);
3282 MT_BUG_ON(mt, entry != ptr3);
3283 MT_BUG_ON(mt, mas.index != 0x3000);
3284 MT_BUG_ON(mt, mas.last != 0x3500);
3285 MT_BUG_ON(mt, !mas_active(mas));
3286
a8091f03
LH
3287 /* next:active -> active continued */
3288 entry = mas_next(&mas, ULONG_MAX);
3289 MT_BUG_ON(mt, entry != NULL);
3290 MT_BUG_ON(mt, mas.index != 0x3501);
3291 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3292 MT_BUG_ON(mt, !mas_active(mas));
3293
3294 /* next:active -> overflow */
eb2e817f
LH
3295 entry = mas_next(&mas, ULONG_MAX);
3296 MT_BUG_ON(mt, entry != NULL);
3297 MT_BUG_ON(mt, mas.index != 0x3501);
3298 MT_BUG_ON(mt, mas.last != ULONG_MAX);
a8091f03
LH
3299 MT_BUG_ON(mt, mas.node != MAS_OVERFLOW);
3300
3301 /* next:overflow -> overflow */
3302 entry = mas_next(&mas, ULONG_MAX);
3303 MT_BUG_ON(mt, entry != NULL);
3304 MT_BUG_ON(mt, mas.index != 0x3501);
3305 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3306 MT_BUG_ON(mt, mas.node != MAS_OVERFLOW);
3307
3308 /* prev:overflow -> active */
3309 entry = mas_prev(&mas, 0);
3310 MT_BUG_ON(mt, entry != ptr3);
3311 MT_BUG_ON(mt, mas.index != 0x3000);
3312 MT_BUG_ON(mt, mas.last != 0x3500);
eb2e817f
LH
3313 MT_BUG_ON(mt, !mas_active(mas));
3314
3315 /* next: none -> active, skip value at location */
3316 mas_set(&mas, 0);
3317 entry = mas_next(&mas, ULONG_MAX);
3318 mas.node = MAS_NONE;
3319 mas.offset = 0;
3320 entry = mas_next(&mas, ULONG_MAX);
3321 MT_BUG_ON(mt, entry != ptr2);
3322 MT_BUG_ON(mt, mas.index != 0x2000);
3323 MT_BUG_ON(mt, mas.last != 0x2500);
3324 MT_BUG_ON(mt, !mas_active(mas));
3325
3326 /* prev:active ->active */
3327 entry = mas_prev(&mas, 0);
3328 MT_BUG_ON(mt, entry != ptr);
3329 MT_BUG_ON(mt, mas.index != 0x1000);
3330 MT_BUG_ON(mt, mas.last != 0x1500);
3331 MT_BUG_ON(mt, !mas_active(mas));
3332
a8091f03
LH
3333 /* prev:active -> active spanning end range */
3334 entry = mas_prev(&mas, 0x0100);
3335 MT_BUG_ON(mt, entry != NULL);
3336 MT_BUG_ON(mt, mas.index != 0);
3337 MT_BUG_ON(mt, mas.last != 0x0FFF);
3338 MT_BUG_ON(mt, !mas_active(mas));
3339
3340 /* prev:active -> underflow */
3341 entry = mas_prev(&mas, 0);
3342 MT_BUG_ON(mt, entry != NULL);
3343 MT_BUG_ON(mt, mas.index != 0);
3344 MT_BUG_ON(mt, mas.last != 0x0FFF);
3345 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3346
3347 /* prev:underflow -> underflow */
eb2e817f
LH
3348 entry = mas_prev(&mas, 0);
3349 MT_BUG_ON(mt, entry != NULL);
3350 MT_BUG_ON(mt, mas.index != 0);
3351 MT_BUG_ON(mt, mas.last != 0x0FFF);
a8091f03
LH
3352 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3353
3354 /* next:underflow -> active */
3355 entry = mas_next(&mas, ULONG_MAX);
3356 MT_BUG_ON(mt, entry != ptr);
3357 MT_BUG_ON(mt, mas.index != 0x1000);
3358 MT_BUG_ON(mt, mas.last != 0x1500);
3359 MT_BUG_ON(mt, !mas_active(mas));
3360
3361 /* prev:first value -> underflow */
3362 entry = mas_prev(&mas, 0x1000);
3363 MT_BUG_ON(mt, entry != NULL);
3364 MT_BUG_ON(mt, mas.index != 0x1000);
3365 MT_BUG_ON(mt, mas.last != 0x1500);
3366 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3367
3368 /* find:underflow -> first value */
3369 entry = mas_find(&mas, ULONG_MAX);
3370 MT_BUG_ON(mt, entry != ptr);
3371 MT_BUG_ON(mt, mas.index != 0x1000);
3372 MT_BUG_ON(mt, mas.last != 0x1500);
eb2e817f
LH
3373 MT_BUG_ON(mt, !mas_active(mas));
3374
3375 /* prev: pause ->active */
3376 mas_set(&mas, 0x3600);
3377 entry = mas_prev(&mas, 0);
3378 MT_BUG_ON(mt, entry != ptr3);
3379 mas_pause(&mas);
3380 entry = mas_prev(&mas, 0);
3381 MT_BUG_ON(mt, entry != ptr2);
3382 MT_BUG_ON(mt, mas.index != 0x2000);
3383 MT_BUG_ON(mt, mas.last != 0x2500);
3384 MT_BUG_ON(mt, !mas_active(mas));
3385
a8091f03 3386 /* prev:active -> active spanning min */
eb2e817f
LH
3387 entry = mas_prev(&mas, 0x1600);
3388 MT_BUG_ON(mt, entry != NULL);
3389 MT_BUG_ON(mt, mas.index != 0x1501);
3390 MT_BUG_ON(mt, mas.last != 0x1FFF);
3391 MT_BUG_ON(mt, !mas_active(mas));
3392
a8091f03 3393 /* prev: active ->active, continue */
eb2e817f
LH
3394 entry = mas_prev(&mas, 0);
3395 MT_BUG_ON(mt, entry != ptr);
3396 MT_BUG_ON(mt, mas.index != 0x1000);
3397 MT_BUG_ON(mt, mas.last != 0x1500);
3398 MT_BUG_ON(mt, !mas_active(mas));
3399
3400 /* find: start ->active */
3401 mas_set(&mas, 0);
3402 entry = mas_find(&mas, ULONG_MAX);
3403 MT_BUG_ON(mt, entry != ptr);
3404 MT_BUG_ON(mt, mas.index != 0x1000);
3405 MT_BUG_ON(mt, mas.last != 0x1500);
3406 MT_BUG_ON(mt, !mas_active(mas));
3407
3408 /* find: pause ->active */
3409 mas_set(&mas, 0);
3410 mas_pause(&mas);
3411 entry = mas_find(&mas, ULONG_MAX);
3412 MT_BUG_ON(mt, entry != ptr);
3413 MT_BUG_ON(mt, mas.index != 0x1000);
3414 MT_BUG_ON(mt, mas.last != 0x1500);
3415 MT_BUG_ON(mt, !mas_active(mas));
3416
3417 /* find: start ->active on value */;
3418 mas_set(&mas, 1200);
3419 entry = mas_find(&mas, ULONG_MAX);
3420 MT_BUG_ON(mt, entry != ptr);
3421 MT_BUG_ON(mt, mas.index != 0x1000);
3422 MT_BUG_ON(mt, mas.last != 0x1500);
3423 MT_BUG_ON(mt, !mas_active(mas));
3424
3425 /* find:active ->active */
3426 entry = mas_find(&mas, ULONG_MAX);
3427 MT_BUG_ON(mt, entry != ptr2);
3428 MT_BUG_ON(mt, mas.index != 0x2000);
3429 MT_BUG_ON(mt, mas.last != 0x2500);
3430 MT_BUG_ON(mt, !mas_active(mas));
3431
3432
3433 /* find:active -> active (NULL)*/
3434 entry = mas_find(&mas, 0x2700);
3435 MT_BUG_ON(mt, entry != NULL);
3436 MT_BUG_ON(mt, mas.index != 0x2501);
3437 MT_BUG_ON(mt, mas.last != 0x2FFF);
3438 MT_BUG_ON(mt, !mas_active(mas));
3439
a8091f03 3440 /* find: overflow ->active */
eb2e817f
LH
3441 entry = mas_find(&mas, 0x5000);
3442 MT_BUG_ON(mt, entry != ptr3);
3443 MT_BUG_ON(mt, mas.index != 0x3000);
3444 MT_BUG_ON(mt, mas.last != 0x3500);
3445 MT_BUG_ON(mt, !mas_active(mas));
3446
3447 /* find:active -> active (NULL) end*/
3448 entry = mas_find(&mas, ULONG_MAX);
3449 MT_BUG_ON(mt, entry != NULL);
3450 MT_BUG_ON(mt, mas.index != 0x3501);
3451 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3452 MT_BUG_ON(mt, !mas_active(mas));
3453
3454 /* find_rev: active (END) ->active */
3455 entry = mas_find_rev(&mas, 0);
3456 MT_BUG_ON(mt, entry != ptr3);
3457 MT_BUG_ON(mt, mas.index != 0x3000);
3458 MT_BUG_ON(mt, mas.last != 0x3500);
3459 MT_BUG_ON(mt, !mas_active(mas));
3460
3461 /* find_rev:active ->active */
3462 entry = mas_find_rev(&mas, 0);
3463 MT_BUG_ON(mt, entry != ptr2);
3464 MT_BUG_ON(mt, mas.index != 0x2000);
3465 MT_BUG_ON(mt, mas.last != 0x2500);
3466 MT_BUG_ON(mt, !mas_active(mas));
3467
3468 /* find_rev: pause ->active */
3469 mas_pause(&mas);
3470 entry = mas_find_rev(&mas, 0);
3471 MT_BUG_ON(mt, entry != ptr);
3472 MT_BUG_ON(mt, mas.index != 0x1000);
3473 MT_BUG_ON(mt, mas.last != 0x1500);
3474 MT_BUG_ON(mt, !mas_active(mas));
3475
3476 /* find_rev:active -> active */
3477 entry = mas_find_rev(&mas, 0);
3478 MT_BUG_ON(mt, entry != NULL);
3479 MT_BUG_ON(mt, mas.index != 0);
3480 MT_BUG_ON(mt, mas.last != 0x0FFF);
3481 MT_BUG_ON(mt, !mas_active(mas));
3482
3483 /* find_rev: start ->active */
3484 mas_set(&mas, 0x1200);
3485 entry = mas_find_rev(&mas, 0);
3486 MT_BUG_ON(mt, entry != ptr);
3487 MT_BUG_ON(mt, mas.index != 0x1000);
3488 MT_BUG_ON(mt, mas.last != 0x1500);
3489 MT_BUG_ON(mt, !mas_active(mas));
3490
3491 /* mas_walk start ->active */
3492 mas_set(&mas, 0x1200);
3493 entry = mas_walk(&mas);
3494 MT_BUG_ON(mt, entry != ptr);
3495 MT_BUG_ON(mt, mas.index != 0x1000);
3496 MT_BUG_ON(mt, mas.last != 0x1500);
3497 MT_BUG_ON(mt, !mas_active(mas));
3498
3499 /* mas_walk start ->active */
3500 mas_set(&mas, 0x1600);
3501 entry = mas_walk(&mas);
3502 MT_BUG_ON(mt, entry != NULL);
3503 MT_BUG_ON(mt, mas.index != 0x1501);
3504 MT_BUG_ON(mt, mas.last != 0x1fff);
3505 MT_BUG_ON(mt, !mas_active(mas));
3506
3507 /* mas_walk pause ->active */
3508 mas_set(&mas, 0x1200);
3509 mas_pause(&mas);
3510 entry = mas_walk(&mas);
3511 MT_BUG_ON(mt, entry != ptr);
3512 MT_BUG_ON(mt, mas.index != 0x1000);
3513 MT_BUG_ON(mt, mas.last != 0x1500);
3514 MT_BUG_ON(mt, !mas_active(mas));
3515
3516 /* mas_walk pause -> active */
3517 mas_set(&mas, 0x1600);
3518 mas_pause(&mas);
3519 entry = mas_walk(&mas);
3520 MT_BUG_ON(mt, entry != NULL);
3521 MT_BUG_ON(mt, mas.index != 0x1501);
3522 MT_BUG_ON(mt, mas.last != 0x1fff);
3523 MT_BUG_ON(mt, !mas_active(mas));
3524
3525 /* mas_walk none -> active */
3526 mas_set(&mas, 0x1200);
3527 mas.node = MAS_NONE;
3528 entry = mas_walk(&mas);
3529 MT_BUG_ON(mt, entry != ptr);
3530 MT_BUG_ON(mt, mas.index != 0x1000);
3531 MT_BUG_ON(mt, mas.last != 0x1500);
3532 MT_BUG_ON(mt, !mas_active(mas));
3533
3534 /* mas_walk none -> active */
3535 mas_set(&mas, 0x1600);
3536 mas.node = MAS_NONE;
3537 entry = mas_walk(&mas);
3538 MT_BUG_ON(mt, entry != NULL);
3539 MT_BUG_ON(mt, mas.index != 0x1501);
3540 MT_BUG_ON(mt, mas.last != 0x1fff);
3541 MT_BUG_ON(mt, !mas_active(mas));
3542
3543 /* mas_walk active -> active */
3544 mas.index = 0x1200;
3545 mas.last = 0x1200;
3546 mas.offset = 0;
3547 entry = mas_walk(&mas);
3548 MT_BUG_ON(mt, entry != ptr);
3549 MT_BUG_ON(mt, mas.index != 0x1000);
3550 MT_BUG_ON(mt, mas.last != 0x1500);
3551 MT_BUG_ON(mt, !mas_active(mas));
3552
3553 /* mas_walk active -> active */
3554 mas.index = 0x1600;
3555 mas.last = 0x1600;
3556 entry = mas_walk(&mas);
3557 MT_BUG_ON(mt, entry != NULL);
3558 MT_BUG_ON(mt, mas.index != 0x1501);
3559 MT_BUG_ON(mt, mas.last != 0x1fff);
3560 MT_BUG_ON(mt, !mas_active(mas));
3561
3562 mas_unlock(&mas);
3563}
3564
e15e06a8 3565static DEFINE_MTREE(tree);
eaf9790d 3566static int __init maple_tree_seed(void)
e15e06a8 3567{
eaf9790d
LH
3568 unsigned long set[] = { 5015, 5014, 5017, 25, 1000,
3569 1001, 1002, 1003, 1005, 0,
3570 5003, 5002};
e15e06a8
LH
3571 void *ptr = &set;
3572
3573 pr_info("\nTEST STARTING\n\n");
3574
3575 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3576 check_root_expand(&tree);
3577 mtree_destroy(&tree);
3578
3579#if defined(BENCH_SLOT_STORE)
3580#define BENCH
3581 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3582 bench_slot_store(&tree);
3583 mtree_destroy(&tree);
3584 goto skip;
3585#endif
3586#if defined(BENCH_NODE_STORE)
3587#define BENCH
3588 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3589 bench_node_store(&tree);
3590 mtree_destroy(&tree);
3591 goto skip;
3592#endif
3593#if defined(BENCH_AWALK)
3594#define BENCH
3595 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3596 bench_awalk(&tree);
3597 mtree_destroy(&tree);
3598 goto skip;
3599#endif
3600#if defined(BENCH_WALK)
3601#define BENCH
3602 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3603 bench_walk(&tree);
3604 mtree_destroy(&tree);
3605 goto skip;
3606#endif
3607#if defined(BENCH_FORK)
3608#define BENCH
3609 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3610 bench_forking(&tree);
3611 mtree_destroy(&tree);
3612 goto skip;
3613#endif
3614#if defined(BENCH_MT_FOR_EACH)
3615#define BENCH
3616 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3617 bench_mt_for_each(&tree);
3618 mtree_destroy(&tree);
3619 goto skip;
3620#endif
361c678b
LH
3621#if defined(BENCH_MAS_FOR_EACH)
3622#define BENCH
3623 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3624 bench_mas_for_each(&tree);
3625 mtree_destroy(&tree);
3626 goto skip;
3627#endif
8c314f3b
LH
3628#if defined(BENCH_MAS_PREV)
3629#define BENCH
3630 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3631 bench_mas_prev(&tree);
3632 mtree_destroy(&tree);
3633 goto skip;
3634#endif
e15e06a8 3635
5159d64b
LH
3636 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3637 check_iteration(&tree);
3638 mtree_destroy(&tree);
3639
e15e06a8
LH
3640 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3641 check_forking(&tree);
3642 mtree_destroy(&tree);
3643
3644 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3645 check_mas_store_gfp(&tree);
3646 mtree_destroy(&tree);
3647
3648 /* Test ranges (store and insert) */
3649 mt_init_flags(&tree, 0);
3650 check_ranges(&tree);
3651 mtree_destroy(&tree);
3652
120b1162
LH
3653#if defined(CONFIG_64BIT)
3654 /* These tests have ranges outside of 4GB */
e15e06a8
LH
3655 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3656 check_alloc_range(&tree);
3657 mtree_destroy(&tree);
3658
3659 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3660 check_alloc_rev_range(&tree);
3661 mtree_destroy(&tree);
120b1162 3662#endif
e15e06a8
LH
3663
3664 mt_init_flags(&tree, 0);
3665
3666 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
3667
3668 check_insert(&tree, set[9], &tree); /* Insert 0 */
3669 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
3670 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
3671
3672 check_insert(&tree, set[10], ptr); /* Insert 5003 */
3673 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
3674 check_load(&tree, set[11], NULL); /* See if 5002 -> NULL */
3675 check_load(&tree, set[10], ptr); /* See if 5003 -> ptr */
3676
3677 /* Clear out the tree */
3678 mtree_destroy(&tree);
3679
3680 /* Try to insert, insert a dup, and load back what was inserted. */
3681 mt_init_flags(&tree, 0);
3682 check_insert(&tree, set[0], &tree); /* Insert 5015 */
3683 check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
3684 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3685
3686 /*
3687 * Second set of tests try to load a value that doesn't exist, inserts
3688 * a second value, then loads the value again
3689 */
3690 check_load(&tree, set[1], NULL); /* See if 5014 -> NULL */
3691 check_insert(&tree, set[1], ptr); /* insert 5014 -> ptr */
3692 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
3693 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3694 /*
3695 * Tree currently contains:
3696 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
3697 */
3698 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
3699 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
3700
3701 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3702 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
3703 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
3704 check_load(&tree, set[7], &tree); /* 1003 = &tree ? */
3705
3706 /* Clear out tree */
3707 mtree_destroy(&tree);
3708
3709 mt_init_flags(&tree, 0);
3710 /* Test inserting into a NULL hole. */
3711 check_insert(&tree, set[5], ptr); /* insert 1001 -> ptr */
3712 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
3713 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
3714 check_load(&tree, set[5], ptr); /* See if 1001 -> ptr */
3715 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
3716 check_load(&tree, set[7], &tree); /* See if 1003 -> &tree */
3717
3718 /* Clear out the tree */
3719 mtree_destroy(&tree);
3720
e15e06a8
LH
3721 mt_init_flags(&tree, 0);
3722 /*
3723 * set[] = {5015, 5014, 5017, 25, 1000,
3724 * 1001, 1002, 1003, 1005, 0,
3725 * 5003, 5002};
3726 */
3727
3728 check_insert(&tree, set[0], ptr); /* 5015 */
3729 check_insert(&tree, set[1], &tree); /* 5014 */
3730 check_insert(&tree, set[2], ptr); /* 5017 */
3731 check_insert(&tree, set[3], &tree); /* 25 */
3732 check_load(&tree, set[0], ptr);
3733 check_load(&tree, set[1], &tree);
3734 check_load(&tree, set[2], ptr);
3735 check_load(&tree, set[3], &tree);
3736 check_insert(&tree, set[4], ptr); /* 1000 < Should split. */
3737 check_load(&tree, set[0], ptr);
3738 check_load(&tree, set[1], &tree);
3739 check_load(&tree, set[2], ptr);
3740 check_load(&tree, set[3], &tree); /*25 */
3741 check_load(&tree, set[4], ptr);
3742 check_insert(&tree, set[5], &tree); /* 1001 */
3743 check_load(&tree, set[0], ptr);
3744 check_load(&tree, set[1], &tree);
3745 check_load(&tree, set[2], ptr);
3746 check_load(&tree, set[3], &tree);
3747 check_load(&tree, set[4], ptr);
3748 check_load(&tree, set[5], &tree);
3749 check_insert(&tree, set[6], ptr);
3750 check_load(&tree, set[0], ptr);
3751 check_load(&tree, set[1], &tree);
3752 check_load(&tree, set[2], ptr);
3753 check_load(&tree, set[3], &tree);
3754 check_load(&tree, set[4], ptr);
3755 check_load(&tree, set[5], &tree);
3756 check_load(&tree, set[6], ptr);
3757 check_insert(&tree, set[7], &tree);
3758 check_load(&tree, set[0], ptr);
3759 check_insert(&tree, set[8], ptr);
3760
3761 check_insert(&tree, set[9], &tree);
3762
3763 check_load(&tree, set[0], ptr);
3764 check_load(&tree, set[1], &tree);
3765 check_load(&tree, set[2], ptr);
3766 check_load(&tree, set[3], &tree);
3767 check_load(&tree, set[4], ptr);
3768 check_load(&tree, set[5], &tree);
3769 check_load(&tree, set[6], ptr);
3770 check_load(&tree, set[9], &tree);
3771 mtree_destroy(&tree);
3772
e15e06a8
LH
3773 mt_init_flags(&tree, 0);
3774 check_seq(&tree, 16, false);
3775 mtree_destroy(&tree);
3776
3777 mt_init_flags(&tree, 0);
3778 check_seq(&tree, 1000, true);
3779 mtree_destroy(&tree);
3780
3781 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3782 check_rev_seq(&tree, 1000, true);
3783 mtree_destroy(&tree);
3784
3785 check_lower_bound_split(&tree);
3786 check_upper_bound_split(&tree);
3787 check_mid_split(&tree);
3788
3789 mt_init_flags(&tree, 0);
3790 check_next_entry(&tree);
3791 check_find(&tree);
3792 check_find_2(&tree);
3793 mtree_destroy(&tree);
3794
3795 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3796 check_prev_entry(&tree);
3797 mtree_destroy(&tree);
3798
e15e06a8
LH
3799 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3800 check_gap_combining(&tree);
3801 mtree_destroy(&tree);
3802
3803 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3804 check_node_overwrite(&tree);
3805 mtree_destroy(&tree);
3806
3807 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3808 next_prev_test(&tree);
3809 mtree_destroy(&tree);
3810
e15e06a8
LH
3811 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3812 check_spanning_relatives(&tree);
3813 mtree_destroy(&tree);
3814
3815 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3816 check_rev_find(&tree);
3817 mtree_destroy(&tree);
3818
3819 mt_init_flags(&tree, 0);
3820 check_fuzzer(&tree);
3821 mtree_destroy(&tree);
3822
3823 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3824 check_dup(&tree);
3825 mtree_destroy(&tree);
3826
c5651b31
LH
3827 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3828 check_bnode_min_spanning(&tree);
3829 mtree_destroy(&tree);
3830
7327e811
LH
3831 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3832 check_empty_area_window(&tree);
3833 mtree_destroy(&tree);
3834
4bd6dded
LH
3835 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3836 check_empty_area_fill(&tree);
3837 mtree_destroy(&tree);
3838
eb2e817f
LH
3839 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3840 check_state_handling(&tree);
3841 mtree_destroy(&tree);
3842
e15e06a8
LH
3843#if defined(BENCH)
3844skip:
3845#endif
3846 rcu_barrier();
3847 pr_info("maple_tree: %u of %u tests passed\n",
3848 atomic_read(&maple_tree_tests_passed),
3849 atomic_read(&maple_tree_tests_run));
3850 if (atomic_read(&maple_tree_tests_run) ==
3851 atomic_read(&maple_tree_tests_passed))
3852 return 0;
3853
3854 return -EINVAL;
3855}
3856
eaf9790d 3857static void __exit maple_tree_harvest(void)
e15e06a8
LH
3858{
3859
3860}
3861
3862module_init(maple_tree_seed);
3863module_exit(maple_tree_harvest);
3864MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>");
3865MODULE_LICENSE("GPL");