]> git.ipfire.org Git - thirdparty/xfsprogs-dev.git/blob - libfrog/avl64.c
libfrog: fix bitmap error communication problems
[thirdparty/xfsprogs-dev.git] / libfrog / avl64.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright (c) 2000-2002,2005 Silicon Graphics, Inc.
4 * All Rights Reserved.
5 */
6 #include <stdint.h>
7 #include <stdio.h>
8 #include "platform_defs.h"
9 #include "avl64.h"
10
11 #define CERT ASSERT
12
13 #ifdef AVL_DEBUG
14
15 static void
16 avl64_checknode(
17 avl64tree_desc_t *tree,
18 avl64node_t *np)
19 {
20 avl64node_t *back = np->avl_back;
21 avl64node_t *forw = np->avl_forw;
22 avl64node_t *nextino = np->avl_nextino;
23 int bal = np->avl_balance;
24
25 ASSERT(bal != AVL_BALANCE || (!back && !forw) || (back && forw));
26 ASSERT(bal != AVL_FORW || forw);
27 ASSERT(bal != AVL_BACK || back);
28
29 if (forw) {
30 ASSERT(AVL_START(tree, np) < AVL_START(tree, forw));
31 ASSERT(np->avl_forw->avl_parent == np);
32 ASSERT(back || bal == AVL_FORW);
33 } else {
34 ASSERT(bal != AVL_FORW);
35 ASSERT(bal == AVL_BALANCE || back);
36 ASSERT(bal == AVL_BACK || !back);
37 }
38
39 if (back) {
40 ASSERT(AVL_START(tree, np) > AVL_START(tree, back));
41 ASSERT(np->avl_back->avl_parent == np);
42 ASSERT(forw || bal == AVL_BACK);
43 } else {
44 ASSERT(bal != AVL_BACK);
45 ASSERT(bal == AVL_BALANCE || forw);
46 ASSERT(bal == AVL_FORW || !forw);
47 }
48
49 if (nextino == NULL)
50 ASSERT(forw == NULL);
51 else
52 ASSERT(AVL_END(tree, np) <= AVL_START(tree, nextino));
53 }
54
55 static void
56 avl64_checktree(
57 avl64tree_desc_t *tree,
58 avl64node_t *root)
59 {
60 avl64node_t *nlast, *nnext, *np;
61 uint64_t offset = 0;
62 uint64_t end;
63
64 nlast = nnext = root;
65
66 ASSERT(!nnext || nnext->avl_parent == NULL);
67
68 while (nnext) {
69
70 avl64_checknode(tree, nnext);
71 end = AVL_END(tree, nnext);
72
73 if (end <= offset) {
74 if ((np = nnext->avl_forw) && np != nlast) {
75 nlast = nnext;
76 nnext = np;
77 } else {
78 nlast = nnext;
79 nnext = nnext->avl_parent;
80 }
81 continue;
82 }
83
84 nlast = nnext;
85 if (np = nnext->avl_back) {
86 if (AVL_END(tree, np) > offset) {
87 nnext = np;
88 continue;
89 }
90 }
91
92 np = nnext;
93 nnext = nnext->avl_forw;
94 if (!nnext)
95 nnext = np->avl_parent;
96
97 offset = end;
98 }
99 }
100 #else /* ! AVL_DEBUG */
101 #define avl64_checktree(t,x)
102 #endif /* AVL_DEBUG */
103
104
105 /*
106 * Reset balance for np up through tree.
107 * ``direction'' is the way that np's balance
108 * is headed after the deletion of one of its children --
109 * e.g., deleting a avl_forw child sends avl_balance toward AVL_BACK.
110 * Called only when deleting a node from the tree.
111 */
112 static void
113 retreat(
114 avl64tree_desc_t *tree,
115 avl64node_t *np,
116 int direction)
117 {
118 avl64node_t **rootp = &tree->avl_root;
119 avl64node_t *parent;
120 avl64node_t *child;
121 avl64node_t *tmp;
122 int bal;
123
124 do {
125 ASSERT(direction == AVL_BACK || direction == AVL_FORW);
126
127 if (np->avl_balance == AVL_BALANCE) {
128 np->avl_balance = direction;
129 return;
130 }
131
132 parent = np->avl_parent;
133
134 /*
135 * If balance is being restored, no local node
136 * reorganization is necessary, but may be at
137 * a higher node. Reset direction and continue.
138 */
139 if (direction != np->avl_balance) {
140 np->avl_balance = AVL_BALANCE;
141 if (parent) {
142 if (parent->avl_forw == np)
143 direction = AVL_BACK;
144 else
145 direction = AVL_FORW;
146
147 np = parent;
148 continue;
149 }
150 return;
151 }
152
153 /*
154 * Imbalance. If a avl_forw node was removed, direction
155 * (and, by reduction, np->avl_balance) is/was AVL_BACK.
156 */
157 if (np->avl_balance == AVL_BACK) {
158
159 ASSERT(direction == AVL_BACK);
160 child = np->avl_back;
161 bal = child->avl_balance;
162
163 if (bal != AVL_FORW) /* single LL */ {
164 /*
165 * np gets pushed down to lesser child's
166 * avl_forw branch.
167 *
168 * np-> -D +B
169 * / \ / \
170 * child-> B deleted A -D
171 * / \ /
172 * A C C
173 cmn_err(CE_CONT, "!LL delete b 0x%x c 0x%x\n",
174 np, child);
175 */
176
177 np->avl_back = child->avl_forw;
178 if (child->avl_forw)
179 child->avl_forw->avl_parent = np;
180 child->avl_forw = np;
181
182 if (parent) {
183 if (parent->avl_forw == np) {
184 parent->avl_forw = child;
185 direction = AVL_BACK;
186 } else {
187 ASSERT(parent->avl_back == np);
188 parent->avl_back = child;
189 direction = AVL_FORW;
190 }
191 } else {
192 ASSERT(*rootp == np);
193 *rootp = child;
194 }
195 np->avl_parent = child;
196 child->avl_parent = parent;
197
198 if (bal == AVL_BALANCE) {
199 np->avl_balance = AVL_BACK;
200 child->avl_balance = AVL_FORW;
201 return;
202 } else {
203 np->avl_balance = AVL_BALANCE;
204 child->avl_balance = AVL_BALANCE;
205 np = parent;
206 avl64_checktree(tree, *rootp);
207 continue;
208 }
209 }
210
211 /* child->avl_balance == AVL_FORW double LR rotation
212 *
213 * child's avl_forw node gets promoted up, along with
214 * its avl_forw subtree
215 *
216 * np-> -G C
217 * / \ / \
218 * child-> +B H -B G
219 * / \ \ / / \
220 * A +C deleted A D H
221 * \
222 * D
223 cmn_err(CE_CONT, "!LR delete b 0x%x c 0x%x t 0x%x\n",
224 np, child, child->avl_forw);
225 */
226
227 tmp = child->avl_forw;
228 bal = tmp->avl_balance;
229
230 child->avl_forw = tmp->avl_back;
231 if (tmp->avl_back)
232 tmp->avl_back->avl_parent = child;
233
234 tmp->avl_back = child;
235 child->avl_parent = tmp;
236
237 np->avl_back = tmp->avl_forw;
238 if (tmp->avl_forw)
239 tmp->avl_forw->avl_parent = np;
240 tmp->avl_forw = np;
241
242 if (bal == AVL_FORW)
243 child->avl_balance = AVL_BACK;
244 else
245 child->avl_balance = AVL_BALANCE;
246
247 if (bal == AVL_BACK)
248 np->avl_balance = AVL_FORW;
249 else
250 np->avl_balance = AVL_BALANCE;
251
252 goto next;
253 }
254
255 ASSERT(np->avl_balance == AVL_FORW && direction == AVL_FORW);
256
257 child = np->avl_forw;
258 bal = child->avl_balance;
259
260 if (bal != AVL_BACK) /* single RR */ {
261 /*
262 * np gets pushed down to greater child's
263 * avl_back branch.
264 *
265 * np-> +B -D
266 * / \ / \
267 * deleted D <-child +B E
268 * / \ \
269 * C E C
270 cmn_err(CE_CONT, "!RR delete b 0x%x c 0x%x\n",
271 np, child);
272 */
273
274 np->avl_forw = child->avl_back;
275 if (child->avl_back)
276 child->avl_back->avl_parent = np;
277 child->avl_back = np;
278
279 if (parent) {
280 if (parent->avl_forw == np) {
281 parent->avl_forw = child;
282 direction = AVL_BACK;
283 } else {
284 ASSERT(parent->avl_back == np);
285 parent->avl_back = child;
286 direction = AVL_FORW;
287 }
288 } else {
289 ASSERT(*rootp == np);
290 *rootp = child;
291 }
292 np->avl_parent = child;
293 child->avl_parent = parent;
294
295 if (bal == AVL_BALANCE) {
296 np->avl_balance = AVL_FORW;
297 child->avl_balance = AVL_BACK;
298 return;
299 } else {
300 np->avl_balance = AVL_BALANCE;
301 child->avl_balance = AVL_BALANCE;
302 np = parent;
303 avl64_checktree(tree, *rootp);
304 continue;
305 }
306 }
307
308 /* child->avl_balance == AVL_BACK double RL rotation
309 cmn_err(CE_CONT, "!RL delete b 0x%x c 0x%x t 0x%x\n",
310 np, child, child->avl_back);
311 */
312
313 tmp = child->avl_back;
314 bal = tmp->avl_balance;
315
316 child->avl_back = tmp->avl_forw;
317 if (tmp->avl_forw)
318 tmp->avl_forw->avl_parent = child;
319
320 tmp->avl_forw = child;
321 child->avl_parent = tmp;
322
323 np->avl_forw = tmp->avl_back;
324 if (tmp->avl_back)
325 tmp->avl_back->avl_parent = np;
326 tmp->avl_back = np;
327
328 if (bal == AVL_BACK)
329 child->avl_balance = AVL_FORW;
330 else
331 child->avl_balance = AVL_BALANCE;
332
333 if (bal == AVL_FORW)
334 np->avl_balance = AVL_BACK;
335 else
336 np->avl_balance = AVL_BALANCE;
337 next:
338 np->avl_parent = tmp;
339 tmp->avl_balance = AVL_BALANCE;
340 tmp->avl_parent = parent;
341
342 if (parent) {
343 if (parent->avl_forw == np) {
344 parent->avl_forw = tmp;
345 direction = AVL_BACK;
346 } else {
347 ASSERT(parent->avl_back == np);
348 parent->avl_back = tmp;
349 direction = AVL_FORW;
350 }
351 } else {
352 ASSERT(*rootp == np);
353 *rootp = tmp;
354 return;
355 }
356
357 np = parent;
358 avl64_checktree(tree, *rootp);
359 } while (np);
360 }
361
362 /*
363 * Remove node from tree.
364 * avl_delete does the local tree manipulations,
365 * calls retreat() to rebalance tree up to its root.
366 */
367 void
368 avl64_delete(
369 avl64tree_desc_t *tree,
370 avl64node_t *np)
371 {
372 avl64node_t *forw = np->avl_forw;
373 avl64node_t *back = np->avl_back;
374 avl64node_t *parent = np->avl_parent;
375 avl64node_t *nnext;
376
377
378 if (np->avl_back) {
379 /*
380 * a left child exits, then greatest left descendent's nextino
381 * is pointing to np; make it point to np->nextino.
382 */
383 nnext = np->avl_back;
384 while (nnext) {
385 if (!nnext->avl_forw)
386 break; /* can't find anything bigger */
387 nnext = nnext->avl_forw;
388 }
389 } else
390 if (np->avl_parent) {
391 /*
392 * find nearest ancestor with lesser value. That ancestor's
393 * nextino is pointing to np; make it point to np->nextino
394 */
395 nnext = np->avl_parent;
396 while (nnext) {
397 if (AVL_END(tree, nnext) <= AVL_END(tree, np))
398 break;
399 nnext = nnext->avl_parent;
400 }
401 } else
402 nnext = NULL;
403
404 if (nnext) {
405 ASSERT(nnext->avl_nextino == np);
406 nnext->avl_nextino = np->avl_nextino;
407 /*
408 * Something preceeds np; np cannot be firstino.
409 */
410 ASSERT(tree->avl_firstino != np);
411 }
412 else {
413 /*
414 * Nothing preceeding np; after deletion, np's nextino
415 * is firstino of tree.
416 */
417 ASSERT(tree->avl_firstino == np);
418 tree->avl_firstino = np->avl_nextino;
419 }
420
421
422 /*
423 * Degenerate cases...
424 */
425 if (forw == NULL) {
426 forw = back;
427 goto attach;
428 }
429
430 if (back == NULL) {
431 attach:
432 if (forw)
433 forw->avl_parent = parent;
434 if (parent) {
435 if (parent->avl_forw == np) {
436 parent->avl_forw = forw;
437 retreat(tree, parent, AVL_BACK);
438 } else {
439 ASSERT(parent->avl_back == np);
440 parent->avl_back = forw;
441 retreat(tree, parent, AVL_FORW);
442 }
443 } else {
444 ASSERT(tree->avl_root == np);
445 tree->avl_root = forw;
446 }
447 avl64_checktree(tree, tree->avl_root);
448 return;
449 }
450
451 /*
452 * Harder case: children on both sides.
453 * If back's avl_forw pointer is null, just have back
454 * inherit np's avl_forw tree, remove np from the tree
455 * and adjust balance counters starting at back.
456 *
457 * np-> xI xH (befor retreat())
458 * / \ / \
459 * back-> H J G J
460 * / / \ / \
461 * G ? ? ? ?
462 * / \
463 * ? ?
464 */
465 if ((forw = back->avl_forw) == NULL) {
466 /*
467 * AVL_FORW retreat below will set back's
468 * balance to AVL_BACK.
469 */
470 back->avl_balance = np->avl_balance;
471 back->avl_forw = forw = np->avl_forw;
472 forw->avl_parent = back;
473 back->avl_parent = parent;
474
475 if (parent) {
476 if (parent->avl_forw == np)
477 parent->avl_forw = back;
478 else {
479 ASSERT(parent->avl_back == np);
480 parent->avl_back = back;
481 }
482 } else {
483 ASSERT(tree->avl_root == np);
484 tree->avl_root = back;
485 }
486
487 /*
488 * back is taking np's place in the tree, and
489 * has therefore lost a avl_back node (itself).
490 */
491 retreat(tree, back, AVL_FORW);
492 avl64_checktree(tree, tree->avl_root);
493 return;
494 }
495
496 /*
497 * Hardest case: children on both sides, and back's
498 * avl_forw pointer isn't null. Find the immediately
499 * inferior buffer by following back's avl_forw line
500 * to the end, then have it inherit np's avl_forw tree.
501 *
502 * np-> xI xH
503 * / \ / \
504 * G J back-> G J (before retreat())
505 * / \ / \
506 * F ?... F ?1
507 * / \
508 * ? H <-forw
509 * /
510 * ?1
511 */
512 while ((back = forw->avl_forw))
513 forw = back;
514
515 /*
516 * Will be adjusted by retreat() below.
517 */
518 forw->avl_balance = np->avl_balance;
519
520 /*
521 * forw inherits np's avl_forw...
522 */
523 forw->avl_forw = np->avl_forw;
524 np->avl_forw->avl_parent = forw;
525
526 /*
527 * ... forw's parent gets forw's avl_back...
528 */
529 back = forw->avl_parent;
530 back->avl_forw = forw->avl_back;
531 if (forw->avl_back)
532 forw->avl_back->avl_parent = back;
533
534 /*
535 * ... forw gets np's avl_back...
536 */
537 forw->avl_back = np->avl_back;
538 np->avl_back->avl_parent = forw;
539
540 /*
541 * ... and forw gets np's parent.
542 */
543 forw->avl_parent = parent;
544
545 if (parent) {
546 if (parent->avl_forw == np)
547 parent->avl_forw = forw;
548 else
549 parent->avl_back = forw;
550 } else {
551 ASSERT(tree->avl_root == np);
552 tree->avl_root = forw;
553 }
554
555 /*
556 * What used to be forw's parent is the starting
557 * point for rebalancing. It has lost a avl_forw node.
558 */
559 retreat(tree, back, AVL_BACK);
560 avl64_checktree(tree, tree->avl_root);
561 }
562
563
564 /*
565 * avl_findanyrange:
566 *
567 * Given range r [start, end), find any range which is contained in r.
568 * if checklen is non-zero, then only ranges of non-zero length are
569 * considered in finding a match.
570 */
571 avl64node_t *
572 avl64_findanyrange(
573 avl64tree_desc_t *tree,
574 uint64_t start,
575 uint64_t end,
576 int checklen)
577 {
578 avl64node_t *np = tree->avl_root;
579
580 /* np = avl64_findadjacent(tree, start, AVL_SUCCEED); */
581 while (np) {
582 if (start < AVL_START(tree, np)) {
583 if (np->avl_back) {
584 np = np->avl_back;
585 continue;
586 }
587 /* if we were to add node with start, would
588 * have a growth of AVL_BACK
589 */
590 /* if succeeding node is needed, this is it.
591 */
592 break;
593 }
594 if (start >= AVL_END(tree, np)) {
595 if (np->avl_forw) {
596 np = np->avl_forw;
597 continue;
598 }
599 /* if we were to add node with start, would
600 * have a growth of AVL_FORW;
601 */
602 /* we are looking for a succeeding node;
603 * this is nextino.
604 */
605 np = np->avl_nextino;
606 break;
607 }
608 /* AVL_START(tree, np) <= start < AVL_END(tree, np) */
609 break;
610 }
611 if (np) {
612 if (checklen == AVL_INCLUDE_ZEROLEN) {
613 if (end <= AVL_START(tree, np)) {
614 /* something follows start, but is
615 * is entierly after the range (end)
616 */
617 return(NULL);
618 }
619 /* np may stradle [start, end) */
620 return(np);
621 }
622 /*
623 * find non-zero length region
624 */
625 while (np && (AVL_END(tree, np) - AVL_START(tree, np) == 0)
626 && (AVL_START(tree, np) < end))
627 np = np->avl_nextino;
628
629 if ((np == NULL) || (AVL_START(tree, np) >= end))
630 return NULL;
631 return(np);
632 }
633 /*
634 * nothing succeeds start, all existing ranges are before start.
635 */
636 return NULL;
637 }
638
639
640 /*
641 * Returns a pointer to range which contains value.
642 */
643 avl64node_t *
644 avl64_findrange(
645 avl64tree_desc_t *tree,
646 uint64_t value)
647 {
648 avl64node_t *np = tree->avl_root;
649
650 while (np) {
651 if (value < AVL_START(tree, np)) {
652 np = np->avl_back;
653 continue;
654 }
655 if (value >= AVL_END(tree, np)) {
656 np = np->avl_forw;
657 continue;
658 }
659 ASSERT(AVL_START(tree, np) <= value &&
660 value < AVL_END(tree, np));
661 return np;
662 }
663 return NULL;
664 }
665
666
667 /*
668 * Returns a pointer to node which contains exact value.
669 */
670 avl64node_t *
671 avl64_find(
672 avl64tree_desc_t *tree,
673 uint64_t value)
674 {
675 avl64node_t *np = tree->avl_root;
676 uint64_t nvalue;
677
678 while (np) {
679 nvalue = AVL_START(tree, np);
680 if (value < nvalue) {
681 np = np->avl_back;
682 continue;
683 }
684 if (value == nvalue) {
685 return np;
686 }
687 np = np->avl_forw;
688 }
689 return NULL;
690 }
691
692
693 /*
694 * Balance buffer AVL tree after attaching a new node to root.
695 * Called only by avl_insert.
696 */
697 static void
698 avl64_balance(
699 avl64node_t **rootp,
700 avl64node_t *np,
701 int growth)
702 {
703 /*
704 * At this point, np points to the node to which
705 * a new node has been attached. All that remains is to
706 * propagate avl_balance up the tree.
707 */
708 for ( ; ; ) {
709 avl64node_t *parent = np->avl_parent;
710 avl64node_t *child;
711
712 CERT(growth == AVL_BACK || growth == AVL_FORW);
713
714 /*
715 * If the buffer was already balanced, set avl_balance
716 * to the new direction. Continue if there is a
717 * parent after setting growth to reflect np's
718 * relation to its parent.
719 */
720 if (np->avl_balance == AVL_BALANCE) {
721 np->avl_balance = growth;
722 if (parent) {
723 if (parent->avl_forw == np)
724 growth = AVL_FORW;
725 else {
726 ASSERT(parent->avl_back == np);
727 growth = AVL_BACK;
728 }
729
730 np = parent;
731 continue;
732 }
733 break;
734 }
735
736 if (growth != np->avl_balance) {
737 /*
738 * Subtree is now balanced -- no net effect
739 * in the size of the subtree, so leave.
740 */
741 np->avl_balance = AVL_BALANCE;
742 break;
743 }
744
745 if (growth == AVL_BACK) {
746
747 child = np->avl_back;
748 CERT(np->avl_balance == AVL_BACK && child);
749
750 if (child->avl_balance == AVL_BACK) { /* single LL */
751 /*
752 * ``A'' just got inserted;
753 * np points to ``E'', child to ``C'',
754 * and it is already AVL_BACK --
755 * child will get promoted to top of subtree.
756
757 np-> -E C
758 / \ / \
759 child-> -C F -B E
760 / \ / / \
761 -B D A D F
762 /
763 A
764
765 Note that child->avl_parent and
766 avl_balance get set in common code.
767 */
768 np->avl_parent = child;
769 np->avl_balance = AVL_BALANCE;
770 np->avl_back = child->avl_forw;
771 if (child->avl_forw)
772 child->avl_forw->avl_parent = np;
773 child->avl_forw = np;
774 } else {
775 /*
776 * double LR
777 *
778 * child's avl_forw node gets promoted to
779 * the top of the subtree.
780
781 np-> -E C
782 / \ / \
783 child-> +B F -B E
784 / \ / / \
785 A +C A D F
786 \
787 D
788
789 */
790 avl64node_t *tmp = child->avl_forw;
791
792 CERT(child->avl_balance == AVL_FORW && tmp);
793
794 child->avl_forw = tmp->avl_back;
795 if (tmp->avl_back)
796 tmp->avl_back->avl_parent = child;
797
798 tmp->avl_back = child;
799 child->avl_parent = tmp;
800
801 np->avl_back = tmp->avl_forw;
802 if (tmp->avl_forw)
803 tmp->avl_forw->avl_parent = np;
804
805 tmp->avl_forw = np;
806 np->avl_parent = tmp;
807
808 if (tmp->avl_balance == AVL_BACK)
809 np->avl_balance = AVL_FORW;
810 else
811 np->avl_balance = AVL_BALANCE;
812
813 if (tmp->avl_balance == AVL_FORW)
814 child->avl_balance = AVL_BACK;
815 else
816 child->avl_balance = AVL_BALANCE;
817
818 /*
819 * Set child to point to tmp since it is
820 * now the top of the subtree, and will
821 * get attached to the subtree parent in
822 * the common code below.
823 */
824 child = tmp;
825 }
826
827 } else /* growth == AVL_BACK */ {
828
829 /*
830 * This code is the mirror image of AVL_FORW above.
831 */
832
833 child = np->avl_forw;
834 CERT(np->avl_balance == AVL_FORW && child);
835
836 if (child->avl_balance == AVL_FORW) { /* single RR */
837 np->avl_parent = child;
838 np->avl_balance = AVL_BALANCE;
839 np->avl_forw = child->avl_back;
840 if (child->avl_back)
841 child->avl_back->avl_parent = np;
842 child->avl_back = np;
843 } else {
844 /*
845 * double RL
846 */
847 avl64node_t *tmp = child->avl_back;
848
849 ASSERT(child->avl_balance == AVL_BACK && tmp);
850
851 child->avl_back = tmp->avl_forw;
852 if (tmp->avl_forw)
853 tmp->avl_forw->avl_parent = child;
854
855 tmp->avl_forw = child;
856 child->avl_parent = tmp;
857
858 np->avl_forw = tmp->avl_back;
859 if (tmp->avl_back)
860 tmp->avl_back->avl_parent = np;
861
862 tmp->avl_back = np;
863 np->avl_parent = tmp;
864
865 if (tmp->avl_balance == AVL_FORW)
866 np->avl_balance = AVL_BACK;
867 else
868 np->avl_balance = AVL_BALANCE;
869
870 if (tmp->avl_balance == AVL_BACK)
871 child->avl_balance = AVL_FORW;
872 else
873 child->avl_balance = AVL_BALANCE;
874
875 child = tmp;
876 }
877 }
878
879 child->avl_parent = parent;
880 child->avl_balance = AVL_BALANCE;
881
882 if (parent) {
883 if (parent->avl_back == np)
884 parent->avl_back = child;
885 else
886 parent->avl_forw = child;
887 } else {
888 ASSERT(*rootp == np);
889 *rootp = child;
890 }
891
892 break;
893 }
894 }
895
896 static
897 avl64node_t *
898 avl64_insert_find_growth(
899 avl64tree_desc_t *tree,
900 uint64_t start, /* range start at start, */
901 uint64_t end, /* exclusive */
902 int *growthp) /* OUT */
903 {
904 avl64node_t *root = tree->avl_root;
905 avl64node_t *np;
906
907 np = root;
908 ASSERT(np); /* caller ensures that there is atleast one node in tree */
909
910 for ( ; ; ) {
911 CERT(np->avl_parent || root == np);
912 CERT(!np->avl_parent || root != np);
913 CERT(!(np->avl_back) || np->avl_back->avl_parent == np);
914 CERT(!(np->avl_forw) || np->avl_forw->avl_parent == np);
915 CERT(np->avl_balance != AVL_FORW || np->avl_forw);
916 CERT(np->avl_balance != AVL_BACK || np->avl_back);
917 CERT(np->avl_balance != AVL_BALANCE ||
918 np->avl_back == NULL || np->avl_forw);
919 CERT(np->avl_balance != AVL_BALANCE ||
920 np->avl_forw == NULL || np->avl_back);
921
922 if (AVL_START(tree, np) >= end) {
923 if (np->avl_back) {
924 np = np->avl_back;
925 continue;
926 }
927 *growthp = AVL_BACK;
928 break;
929 }
930
931 if (AVL_END(tree, np) <= start) {
932 if (np->avl_forw) {
933 np = np->avl_forw;
934 continue;
935 }
936 *growthp = AVL_FORW;
937 break;
938 }
939 /* found exact match -- let caller decide if it is an error */
940 return(NULL);
941 }
942 return(np);
943 }
944
945
946 static void
947 avl64_insert_grow(
948 avl64tree_desc_t *tree,
949 avl64node_t *parent,
950 avl64node_t *newnode,
951 int growth)
952 {
953 avl64node_t *nnext;
954 uint64_t start = AVL_START(tree, newnode);
955
956 if (growth == AVL_BACK) {
957
958 parent->avl_back = newnode;
959 /*
960 * we are growing to the left; previous in-order to newnode is
961 * closest ancestor with lesser value. Before this
962 * insertion, this ancestor will be pointing to
963 * newnode's parent. After insertion, next in-order to newnode
964 * is the parent.
965 */
966 newnode->avl_nextino = parent;
967 nnext = parent;
968 while (nnext) {
969 if (AVL_END(tree, nnext) <= start)
970 break;
971 nnext = nnext->avl_parent;
972 }
973 if (nnext) {
974 /*
975 * nnext will be null if newnode is
976 * the least element, and hence very first in the list.
977 */
978 ASSERT(nnext->avl_nextino == parent);
979 nnext->avl_nextino = newnode;
980 }
981 }
982 else {
983 parent->avl_forw = newnode;
984 newnode->avl_nextino = parent->avl_nextino;
985 parent->avl_nextino = newnode;
986 }
987 }
988
989
990 avl64node_t *
991 avl64_insert(
992 avl64tree_desc_t *tree,
993 avl64node_t *newnode)
994 {
995 avl64node_t *np;
996 uint64_t start = AVL_START(tree, newnode);
997 uint64_t end = AVL_END(tree, newnode);
998 int growth;
999
1000 ASSERT(newnode);
1001 /*
1002 * Clean all pointers for sanity; some will be reset as necessary.
1003 */
1004 newnode->avl_nextino = NULL;
1005 newnode->avl_parent = NULL;
1006 newnode->avl_forw = NULL;
1007 newnode->avl_back = NULL;
1008 newnode->avl_balance = AVL_BALANCE;
1009
1010 if ((np = tree->avl_root) == NULL) { /* degenerate case... */
1011 tree->avl_root = newnode;
1012 tree->avl_firstino = newnode;
1013 return newnode;
1014 }
1015
1016 if ((np = avl64_insert_find_growth(tree, start, end, &growth))
1017 == NULL) {
1018 if (start != end) { /* non-zero length range */
1019 fprintf(stderr,
1020 _("avl_insert: Warning! duplicate range [%llu,%llu]\n"),
1021 (unsigned long long)start,
1022 (unsigned long long)end);
1023 }
1024 return(NULL);
1025 }
1026
1027 avl64_insert_grow(tree, np, newnode, growth);
1028 if (growth == AVL_BACK) {
1029 /*
1030 * Growing to left. if np was firstino, newnode will be firstino
1031 */
1032 if (tree->avl_firstino == np)
1033 tree->avl_firstino = newnode;
1034 }
1035 #ifdef notneeded
1036 else
1037 if (growth == AVL_FORW)
1038 /*
1039 * Cannot possibly be firstino; there is somebody to our left.
1040 */
1041 ;
1042 #endif
1043
1044 newnode->avl_parent = np;
1045 CERT(np->avl_forw == newnode || np->avl_back == newnode);
1046
1047 avl64_balance(&tree->avl_root, np, growth);
1048
1049 avl64_checktree(tree, tree->avl_root);
1050
1051 return newnode;
1052 }
1053
1054 /*
1055 *
1056 * avl64_insert_immediate(tree, afterp, newnode):
1057 * insert newnode immediately into tree immediately after afterp.
1058 * after insertion, newnode is right child of afterp.
1059 */
1060 void
1061 avl64_insert_immediate(
1062 avl64tree_desc_t *tree,
1063 avl64node_t *afterp,
1064 avl64node_t *newnode)
1065 {
1066 /*
1067 * Clean all pointers for sanity; some will be reset as necessary.
1068 */
1069 newnode->avl_nextino = NULL;
1070 newnode->avl_parent = NULL;
1071 newnode->avl_forw = NULL;
1072 newnode->avl_back = NULL;
1073 newnode->avl_balance = AVL_BALANCE;
1074
1075 if (afterp == NULL) {
1076 tree->avl_root = newnode;
1077 tree->avl_firstino = newnode;
1078 return;
1079 }
1080
1081 ASSERT(afterp->avl_forw == NULL);
1082 avl64_insert_grow(tree, afterp, newnode, AVL_FORW); /* grow to right */
1083 CERT(afterp->avl_forw == newnode);
1084 avl64_balance(&tree->avl_root, afterp, AVL_FORW);
1085 avl64_checktree(tree, tree->avl_root);
1086 }
1087
1088
1089 /*
1090 * Returns first in order node
1091 */
1092 avl64node_t *
1093 avl64_firstino(avl64node_t *root)
1094 {
1095 avl64node_t *np;
1096
1097 if ((np = root) == NULL)
1098 return NULL;
1099
1100 while (np->avl_back)
1101 np = np->avl_back;
1102 return np;
1103 }
1104
1105 /*
1106 * Returns last in order node
1107 */
1108 avl64node_t *
1109 avl64_lastino(avl64node_t *root)
1110 {
1111 avl64node_t *np;
1112
1113 if ((np = root) == NULL)
1114 return NULL;
1115
1116 while (np->avl_forw)
1117 np = np->avl_forw;
1118 return np;
1119 }
1120
1121 void
1122 avl64_init_tree(avl64tree_desc_t *tree, avl64ops_t *ops)
1123 {
1124 tree->avl_root = NULL;
1125 tree->avl_firstino = NULL;
1126 tree->avl_ops = ops;
1127 }
1128
1129 #ifdef AVL_DEBUG
1130 static void
1131 avl64_printnode(avl64tree_desc_t *tree, avl64node_t *np, int nl)
1132 {
1133 printf("[%d-%d]%c", AVL_START(tree, np),
1134 (AVL_END(tree, np) - 1), nl ? '\n' : ' ');
1135 }
1136 #endif
1137 #ifdef STAND_ALONE_DEBUG
1138
1139 struct avl_debug_node {
1140 avl64node_t avl_node;
1141 xfs_off_t avl_start;
1142 unsigned int avl_size;
1143 }
1144
1145 avl64ops_t avl_debug_ops = {
1146 avl_debug_start,
1147 avl_debug_end,
1148 }
1149
1150 static uint64_t
1151 avl64_debug_start(avl64node_t *node)
1152 {
1153 return (uint64_t)(struct avl_debug_node *)node->avl_start;
1154 }
1155
1156 static uint64_t
1157 avl64_debug_end(avl64node_t *node)
1158 {
1159 return (uint64_t)
1160 ((struct avl_debug_node *)node->avl_start +
1161 (struct avl_debug_node *)node->avl_size);
1162 }
1163
1164 avl_debug_node freenodes[100];
1165 avl_debug_node *freehead = &freenodes[0];
1166
1167 static avl64node_t *
1168 alloc_avl64_debug_node()
1169 {
1170 freehead->avl_balance = AVL_BALANCE;
1171 freehead->avl_parent = freehead->avl_forw = freehead->avl_back = NULL;
1172 return(freehead++);
1173 }
1174
1175 static void
1176 avl64_print(avl64tree_desc_t *tree, avl64node_t *root, int depth)
1177 {
1178 int i;
1179
1180 if (!root)
1181 return;
1182 if (root->avl_forw)
1183 avl64_print(tree, root->avl_forw, depth+5);
1184 for (i = 0; i < depth; i++)
1185 putchar((int) ' ');
1186 avl64_printnode(tree, root,1);
1187 if (root->avl_back)
1188 avl64_print(tree, root->avl_back, depth+5);
1189 }
1190
1191 main()
1192 {
1193 int i, j;
1194 avl64node_t *np;
1195 avl64tree_desc_t tree;
1196 char linebuf[256], cmd[256];
1197
1198 avl64_init_tree(&tree, &avl_debug_ops);
1199
1200 for (i = 100; i > 0; i = i - 10)
1201 {
1202 np = alloc__debug_avlnode();
1203 ASSERT(np);
1204 np->avl_start = i;
1205 np->avl_size = 10;
1206 avl64_insert(&tree, np);
1207 }
1208 avl64_print(&tree, tree.avl_root, 0);
1209
1210 for (np = tree.avl_firstino; np != NULL; np = np->avl_nextino)
1211 avl64_printnode(&tree, np, 0);
1212 printf("\n");
1213
1214 while (1) {
1215 printf(_("Command [fpdir] : "));
1216 fgets(linebuf, 256, stdin);
1217 if (feof(stdin)) break;
1218 cmd[0] = NULL;
1219 if (sscanf(linebuf, "%[fpdir]%d", cmd, &i) != 2)
1220 continue;
1221 switch (cmd[0]) {
1222 case 'd':
1223 case 'f':
1224 printf(_("end of range ? "));
1225 fgets(linebuf, 256, stdin);
1226 j = atoi(linebuf);
1227
1228 if (i == j) j = i+1;
1229 np = avl64_findinrange(&tree,i,j);
1230 if (np) {
1231 avl64_printnode(&tree, np, 1);
1232 if (cmd[0] == 'd')
1233 avl64_delete(&tree, np);
1234 } else
1235 printf(_("Cannot find %d\n"), i);
1236 break;
1237 case 'p':
1238 avl64_print(&tree, tree.avl_root, 0);
1239 for (np = tree.avl_firstino;
1240 np != NULL; np = np->avl_nextino)
1241 avl64_printnode(&tree, np, 0);
1242 printf("\n");
1243 break;
1244 case 'i':
1245 np = alloc_avlnode();
1246 ASSERT(np);
1247 np->avl_start = i;
1248 printf(_("size of range ? "));
1249 fgets(linebuf, 256, stdin);
1250 j = atoi(linebuf);
1251
1252 np->avl_size = j;
1253 avl64_insert(&tree, np);
1254 break;
1255 case 'r': {
1256 avl64node_t *b, *e, *t;
1257 int checklen;
1258
1259 printf(_("End of range ? "));
1260 fgets(linebuf, 256, stdin);
1261 j = atoi(linebuf);
1262
1263 printf(_("checklen 0/1 ? "));
1264 fgets(linebuf, 256, stdin);
1265 checklen = atoi(linebuf);
1266
1267
1268 b = avl64_findanyrange(&tree, i, j, checklen);
1269 if (b) {
1270 printf(_("Found something\n"));
1271 t = b;
1272 while (t) {
1273 if (t != b &&
1274 AVL_START(&tree, t) >= j)
1275 break;
1276 avl64_printnode(&tree, t, 0);
1277 t = t->avl_nextino;
1278 }
1279 printf("\n");
1280 }
1281 }
1282 }
1283 }
1284 }
1285 #endif
1286
1287 /*
1288 * Given a tree, find value; will find return range enclosing value,
1289 * or range immediately succeeding value,
1290 * or range immediately preceeding value.
1291 */
1292 avl64node_t *
1293 avl64_findadjacent(
1294 avl64tree_desc_t *tree,
1295 uint64_t value,
1296 int dir)
1297 {
1298 avl64node_t *np = tree->avl_root;
1299
1300 while (np) {
1301 if (value < AVL_START(tree, np)) {
1302 if (np->avl_back) {
1303 np = np->avl_back;
1304 continue;
1305 }
1306 /* if we were to add node with value, would
1307 * have a growth of AVL_BACK
1308 */
1309 if (dir == AVL_SUCCEED) {
1310 /* if succeeding node is needed, this is it.
1311 */
1312 return(np);
1313 }
1314 if (dir == AVL_PRECEED) {
1315 /*
1316 * find nearest ancestor with lesser value.
1317 */
1318 np = np->avl_parent;
1319 while (np) {
1320 if (AVL_END(tree, np) <= value)
1321 break;
1322 np = np->avl_parent;
1323 }
1324 return(np);
1325 }
1326 ASSERT(dir == AVL_SUCCEED || dir == AVL_PRECEED);
1327 break;
1328 }
1329 if (value >= AVL_END(tree, np)) {
1330 if (np->avl_forw) {
1331 np = np->avl_forw;
1332 continue;
1333 }
1334 /* if we were to add node with value, would
1335 * have a growth of AVL_FORW;
1336 */
1337 if (dir == AVL_SUCCEED) {
1338 /* we are looking for a succeeding node;
1339 * this is nextino.
1340 */
1341 return(np->avl_nextino);
1342 }
1343 if (dir == AVL_PRECEED) {
1344 /* looking for a preceeding node; this is it. */
1345 return(np);
1346 }
1347 ASSERT(dir == AVL_SUCCEED || dir == AVL_PRECEED);
1348 }
1349 /* AVL_START(tree, np) <= value < AVL_END(tree, np) */
1350 return(np);
1351 }
1352 return NULL;
1353 }
1354
1355
1356 /*
1357 * avl_findranges:
1358 *
1359 * Given range r [start, end), find all ranges in tree which are contained
1360 * in r. At return, startp and endp point to first and last of
1361 * a chain of elements which describe the contained ranges. Elements
1362 * in startp ... endp are in sort order, and can be accessed by
1363 * using avl_nextino.
1364 */
1365
1366 void
1367 avl64_findranges(
1368 avl64tree_desc_t *tree,
1369 uint64_t start,
1370 uint64_t end,
1371 avl64node_t **startp,
1372 avl64node_t **endp)
1373 {
1374 avl64node_t *np;
1375
1376 np = avl64_findadjacent(tree, start, AVL_SUCCEED);
1377 if (np == NULL /* nothing succeding start */
1378 || (np && (end <= AVL_START(tree, np))))
1379 /* something follows start,
1380 but... is entirely after end */
1381 {
1382 *startp = NULL;
1383 *endp = NULL;
1384 return;
1385 }
1386
1387 *startp = np;
1388
1389 /* see if end is in this region itself */
1390 if (end <= AVL_END(tree, np) ||
1391 np->avl_nextino == NULL ||
1392 (np->avl_nextino &&
1393 (end <= AVL_START(tree, np->avl_nextino)))) {
1394 *endp = np;
1395 return;
1396 }
1397 /* have to munge for end */
1398 /*
1399 * note: have to look for (end - 1), since
1400 * findadjacent will look for exact value, and does not
1401 * care about the fact that end is actually one more
1402 * than the value actually being looked for; thus feed it one less.
1403 */
1404 *endp = avl64_findadjacent(tree, (end-1), AVL_PRECEED);
1405 ASSERT(*endp);
1406 }