]> git.ipfire.org Git - thirdparty/xfsprogs-dev.git/blob - libfrog/avl64.c
xfs_fsr: refactor mountpoint finding to use libfrog paths functions
[thirdparty/xfsprogs-dev.git] / libfrog / avl64.c
1 /*
2 * Copyright (c) 2000-2002,2005 Silicon Graphics, Inc.
3 * All Rights Reserved.
4 *
5 * This program is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU General Public License as
7 * published by the Free Software Foundation.
8 *
9 * This program is distributed in the hope that it would be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
13 *
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, write the Free Software Foundation,
16 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18 #include <stdint.h>
19 #include <stdio.h>
20 #include "platform_defs.h"
21 #include "avl64.h"
22
23 #define CERT ASSERT
24
25 #ifdef AVL_DEBUG
26
27 static void
28 avl64_checknode(
29 avl64tree_desc_t *tree,
30 avl64node_t *np)
31 {
32 avl64node_t *back = np->avl_back;
33 avl64node_t *forw = np->avl_forw;
34 avl64node_t *nextino = np->avl_nextino;
35 int bal = np->avl_balance;
36
37 ASSERT(bal != AVL_BALANCE || (!back && !forw) || (back && forw));
38 ASSERT(bal != AVL_FORW || forw);
39 ASSERT(bal != AVL_BACK || back);
40
41 if (forw) {
42 ASSERT(AVL_START(tree, np) < AVL_START(tree, forw));
43 ASSERT(np->avl_forw->avl_parent == np);
44 ASSERT(back || bal == AVL_FORW);
45 } else {
46 ASSERT(bal != AVL_FORW);
47 ASSERT(bal == AVL_BALANCE || back);
48 ASSERT(bal == AVL_BACK || !back);
49 }
50
51 if (back) {
52 ASSERT(AVL_START(tree, np) > AVL_START(tree, back));
53 ASSERT(np->avl_back->avl_parent == np);
54 ASSERT(forw || bal == AVL_BACK);
55 } else {
56 ASSERT(bal != AVL_BACK);
57 ASSERT(bal == AVL_BALANCE || forw);
58 ASSERT(bal == AVL_FORW || !forw);
59 }
60
61 if (nextino == NULL)
62 ASSERT(forw == NULL);
63 else
64 ASSERT(AVL_END(tree, np) <= AVL_START(tree, nextino));
65 }
66
67 static void
68 avl64_checktree(
69 avl64tree_desc_t *tree,
70 avl64node_t *root)
71 {
72 avl64node_t *nlast, *nnext, *np;
73 uint64_t offset = 0;
74 uint64_t end;
75
76 nlast = nnext = root;
77
78 ASSERT(!nnext || nnext->avl_parent == NULL);
79
80 while (nnext) {
81
82 avl64_checknode(tree, nnext);
83 end = AVL_END(tree, nnext);
84
85 if (end <= offset) {
86 if ((np = nnext->avl_forw) && np != nlast) {
87 nlast = nnext;
88 nnext = np;
89 } else {
90 nlast = nnext;
91 nnext = nnext->avl_parent;
92 }
93 continue;
94 }
95
96 nlast = nnext;
97 if (np = nnext->avl_back) {
98 if (AVL_END(tree, np) > offset) {
99 nnext = np;
100 continue;
101 }
102 }
103
104 np = nnext;
105 nnext = nnext->avl_forw;
106 if (!nnext)
107 nnext = np->avl_parent;
108
109 offset = end;
110 }
111 }
112 #else /* ! AVL_DEBUG */
113 #define avl64_checktree(t,x)
114 #endif /* AVL_DEBUG */
115
116
117 /*
118 * Reset balance for np up through tree.
119 * ``direction'' is the way that np's balance
120 * is headed after the deletion of one of its children --
121 * e.g., deleting a avl_forw child sends avl_balance toward AVL_BACK.
122 * Called only when deleting a node from the tree.
123 */
124 static void
125 retreat(
126 avl64tree_desc_t *tree,
127 avl64node_t *np,
128 int direction)
129 {
130 avl64node_t **rootp = &tree->avl_root;
131 avl64node_t *parent;
132 avl64node_t *child;
133 avl64node_t *tmp;
134 int bal;
135
136 do {
137 ASSERT(direction == AVL_BACK || direction == AVL_FORW);
138
139 if (np->avl_balance == AVL_BALANCE) {
140 np->avl_balance = direction;
141 return;
142 }
143
144 parent = np->avl_parent;
145
146 /*
147 * If balance is being restored, no local node
148 * reorganization is necessary, but may be at
149 * a higher node. Reset direction and continue.
150 */
151 if (direction != np->avl_balance) {
152 np->avl_balance = AVL_BALANCE;
153 if (parent) {
154 if (parent->avl_forw == np)
155 direction = AVL_BACK;
156 else
157 direction = AVL_FORW;
158
159 np = parent;
160 continue;
161 }
162 return;
163 }
164
165 /*
166 * Imbalance. If a avl_forw node was removed, direction
167 * (and, by reduction, np->avl_balance) is/was AVL_BACK.
168 */
169 if (np->avl_balance == AVL_BACK) {
170
171 ASSERT(direction == AVL_BACK);
172 child = np->avl_back;
173 bal = child->avl_balance;
174
175 if (bal != AVL_FORW) /* single LL */ {
176 /*
177 * np gets pushed down to lesser child's
178 * avl_forw branch.
179 *
180 * np-> -D +B
181 * / \ / \
182 * child-> B deleted A -D
183 * / \ /
184 * A C C
185 cmn_err(CE_CONT, "!LL delete b 0x%x c 0x%x\n",
186 np, child);
187 */
188
189 np->avl_back = child->avl_forw;
190 if (child->avl_forw)
191 child->avl_forw->avl_parent = np;
192 child->avl_forw = np;
193
194 if (parent) {
195 if (parent->avl_forw == np) {
196 parent->avl_forw = child;
197 direction = AVL_BACK;
198 } else {
199 ASSERT(parent->avl_back == np);
200 parent->avl_back = child;
201 direction = AVL_FORW;
202 }
203 } else {
204 ASSERT(*rootp == np);
205 *rootp = child;
206 }
207 np->avl_parent = child;
208 child->avl_parent = parent;
209
210 if (bal == AVL_BALANCE) {
211 np->avl_balance = AVL_BACK;
212 child->avl_balance = AVL_FORW;
213 return;
214 } else {
215 np->avl_balance = AVL_BALANCE;
216 child->avl_balance = AVL_BALANCE;
217 np = parent;
218 avl64_checktree(tree, *rootp);
219 continue;
220 }
221 }
222
223 /* child->avl_balance == AVL_FORW double LR rotation
224 *
225 * child's avl_forw node gets promoted up, along with
226 * its avl_forw subtree
227 *
228 * np-> -G C
229 * / \ / \
230 * child-> +B H -B G
231 * / \ \ / / \
232 * A +C deleted A D H
233 * \
234 * D
235 cmn_err(CE_CONT, "!LR delete b 0x%x c 0x%x t 0x%x\n",
236 np, child, child->avl_forw);
237 */
238
239 tmp = child->avl_forw;
240 bal = tmp->avl_balance;
241
242 child->avl_forw = tmp->avl_back;
243 if (tmp->avl_back)
244 tmp->avl_back->avl_parent = child;
245
246 tmp->avl_back = child;
247 child->avl_parent = tmp;
248
249 np->avl_back = tmp->avl_forw;
250 if (tmp->avl_forw)
251 tmp->avl_forw->avl_parent = np;
252 tmp->avl_forw = np;
253
254 if (bal == AVL_FORW)
255 child->avl_balance = AVL_BACK;
256 else
257 child->avl_balance = AVL_BALANCE;
258
259 if (bal == AVL_BACK)
260 np->avl_balance = AVL_FORW;
261 else
262 np->avl_balance = AVL_BALANCE;
263
264 goto next;
265 }
266
267 ASSERT(np->avl_balance == AVL_FORW && direction == AVL_FORW);
268
269 child = np->avl_forw;
270 bal = child->avl_balance;
271
272 if (bal != AVL_BACK) /* single RR */ {
273 /*
274 * np gets pushed down to greater child's
275 * avl_back branch.
276 *
277 * np-> +B -D
278 * / \ / \
279 * deleted D <-child +B E
280 * / \ \
281 * C E C
282 cmn_err(CE_CONT, "!RR delete b 0x%x c 0x%x\n",
283 np, child);
284 */
285
286 np->avl_forw = child->avl_back;
287 if (child->avl_back)
288 child->avl_back->avl_parent = np;
289 child->avl_back = np;
290
291 if (parent) {
292 if (parent->avl_forw == np) {
293 parent->avl_forw = child;
294 direction = AVL_BACK;
295 } else {
296 ASSERT(parent->avl_back == np);
297 parent->avl_back = child;
298 direction = AVL_FORW;
299 }
300 } else {
301 ASSERT(*rootp == np);
302 *rootp = child;
303 }
304 np->avl_parent = child;
305 child->avl_parent = parent;
306
307 if (bal == AVL_BALANCE) {
308 np->avl_balance = AVL_FORW;
309 child->avl_balance = AVL_BACK;
310 return;
311 } else {
312 np->avl_balance = AVL_BALANCE;
313 child->avl_balance = AVL_BALANCE;
314 np = parent;
315 avl64_checktree(tree, *rootp);
316 continue;
317 }
318 }
319
320 /* child->avl_balance == AVL_BACK double RL rotation
321 cmn_err(CE_CONT, "!RL delete b 0x%x c 0x%x t 0x%x\n",
322 np, child, child->avl_back);
323 */
324
325 tmp = child->avl_back;
326 bal = tmp->avl_balance;
327
328 child->avl_back = tmp->avl_forw;
329 if (tmp->avl_forw)
330 tmp->avl_forw->avl_parent = child;
331
332 tmp->avl_forw = child;
333 child->avl_parent = tmp;
334
335 np->avl_forw = tmp->avl_back;
336 if (tmp->avl_back)
337 tmp->avl_back->avl_parent = np;
338 tmp->avl_back = np;
339
340 if (bal == AVL_BACK)
341 child->avl_balance = AVL_FORW;
342 else
343 child->avl_balance = AVL_BALANCE;
344
345 if (bal == AVL_FORW)
346 np->avl_balance = AVL_BACK;
347 else
348 np->avl_balance = AVL_BALANCE;
349 next:
350 np->avl_parent = tmp;
351 tmp->avl_balance = AVL_BALANCE;
352 tmp->avl_parent = parent;
353
354 if (parent) {
355 if (parent->avl_forw == np) {
356 parent->avl_forw = tmp;
357 direction = AVL_BACK;
358 } else {
359 ASSERT(parent->avl_back == np);
360 parent->avl_back = tmp;
361 direction = AVL_FORW;
362 }
363 } else {
364 ASSERT(*rootp == np);
365 *rootp = tmp;
366 return;
367 }
368
369 np = parent;
370 avl64_checktree(tree, *rootp);
371 } while (np);
372 }
373
374 /*
375 * Remove node from tree.
376 * avl_delete does the local tree manipulations,
377 * calls retreat() to rebalance tree up to its root.
378 */
379 void
380 avl64_delete(
381 avl64tree_desc_t *tree,
382 avl64node_t *np)
383 {
384 avl64node_t *forw = np->avl_forw;
385 avl64node_t *back = np->avl_back;
386 avl64node_t *parent = np->avl_parent;
387 avl64node_t *nnext;
388
389
390 if (np->avl_back) {
391 /*
392 * a left child exits, then greatest left descendent's nextino
393 * is pointing to np; make it point to np->nextino.
394 */
395 nnext = np->avl_back;
396 while (nnext) {
397 if (!nnext->avl_forw)
398 break; /* can't find anything bigger */
399 nnext = nnext->avl_forw;
400 }
401 } else
402 if (np->avl_parent) {
403 /*
404 * find nearest ancestor with lesser value. That ancestor's
405 * nextino is pointing to np; make it point to np->nextino
406 */
407 nnext = np->avl_parent;
408 while (nnext) {
409 if (AVL_END(tree, nnext) <= AVL_END(tree, np))
410 break;
411 nnext = nnext->avl_parent;
412 }
413 } else
414 nnext = NULL;
415
416 if (nnext) {
417 ASSERT(nnext->avl_nextino == np);
418 nnext->avl_nextino = np->avl_nextino;
419 /*
420 * Something preceeds np; np cannot be firstino.
421 */
422 ASSERT(tree->avl_firstino != np);
423 }
424 else {
425 /*
426 * Nothing preceeding np; after deletion, np's nextino
427 * is firstino of tree.
428 */
429 ASSERT(tree->avl_firstino == np);
430 tree->avl_firstino = np->avl_nextino;
431 }
432
433
434 /*
435 * Degenerate cases...
436 */
437 if (forw == NULL) {
438 forw = back;
439 goto attach;
440 }
441
442 if (back == NULL) {
443 attach:
444 if (forw)
445 forw->avl_parent = parent;
446 if (parent) {
447 if (parent->avl_forw == np) {
448 parent->avl_forw = forw;
449 retreat(tree, parent, AVL_BACK);
450 } else {
451 ASSERT(parent->avl_back == np);
452 parent->avl_back = forw;
453 retreat(tree, parent, AVL_FORW);
454 }
455 } else {
456 ASSERT(tree->avl_root == np);
457 tree->avl_root = forw;
458 }
459 avl64_checktree(tree, tree->avl_root);
460 return;
461 }
462
463 /*
464 * Harder case: children on both sides.
465 * If back's avl_forw pointer is null, just have back
466 * inherit np's avl_forw tree, remove np from the tree
467 * and adjust balance counters starting at back.
468 *
469 * np-> xI xH (befor retreat())
470 * / \ / \
471 * back-> H J G J
472 * / / \ / \
473 * G ? ? ? ?
474 * / \
475 * ? ?
476 */
477 if ((forw = back->avl_forw) == NULL) {
478 /*
479 * AVL_FORW retreat below will set back's
480 * balance to AVL_BACK.
481 */
482 back->avl_balance = np->avl_balance;
483 back->avl_forw = forw = np->avl_forw;
484 forw->avl_parent = back;
485 back->avl_parent = parent;
486
487 if (parent) {
488 if (parent->avl_forw == np)
489 parent->avl_forw = back;
490 else {
491 ASSERT(parent->avl_back == np);
492 parent->avl_back = back;
493 }
494 } else {
495 ASSERT(tree->avl_root == np);
496 tree->avl_root = back;
497 }
498
499 /*
500 * back is taking np's place in the tree, and
501 * has therefore lost a avl_back node (itself).
502 */
503 retreat(tree, back, AVL_FORW);
504 avl64_checktree(tree, tree->avl_root);
505 return;
506 }
507
508 /*
509 * Hardest case: children on both sides, and back's
510 * avl_forw pointer isn't null. Find the immediately
511 * inferior buffer by following back's avl_forw line
512 * to the end, then have it inherit np's avl_forw tree.
513 *
514 * np-> xI xH
515 * / \ / \
516 * G J back-> G J (before retreat())
517 * / \ / \
518 * F ?... F ?1
519 * / \
520 * ? H <-forw
521 * /
522 * ?1
523 */
524 while ((back = forw->avl_forw))
525 forw = back;
526
527 /*
528 * Will be adjusted by retreat() below.
529 */
530 forw->avl_balance = np->avl_balance;
531
532 /*
533 * forw inherits np's avl_forw...
534 */
535 forw->avl_forw = np->avl_forw;
536 np->avl_forw->avl_parent = forw;
537
538 /*
539 * ... forw's parent gets forw's avl_back...
540 */
541 back = forw->avl_parent;
542 back->avl_forw = forw->avl_back;
543 if (forw->avl_back)
544 forw->avl_back->avl_parent = back;
545
546 /*
547 * ... forw gets np's avl_back...
548 */
549 forw->avl_back = np->avl_back;
550 np->avl_back->avl_parent = forw;
551
552 /*
553 * ... and forw gets np's parent.
554 */
555 forw->avl_parent = parent;
556
557 if (parent) {
558 if (parent->avl_forw == np)
559 parent->avl_forw = forw;
560 else
561 parent->avl_back = forw;
562 } else {
563 ASSERT(tree->avl_root == np);
564 tree->avl_root = forw;
565 }
566
567 /*
568 * What used to be forw's parent is the starting
569 * point for rebalancing. It has lost a avl_forw node.
570 */
571 retreat(tree, back, AVL_BACK);
572 avl64_checktree(tree, tree->avl_root);
573 }
574
575
576 /*
577 * avl_findanyrange:
578 *
579 * Given range r [start, end), find any range which is contained in r.
580 * if checklen is non-zero, then only ranges of non-zero length are
581 * considered in finding a match.
582 */
583 avl64node_t *
584 avl64_findanyrange(
585 avl64tree_desc_t *tree,
586 uint64_t start,
587 uint64_t end,
588 int checklen)
589 {
590 avl64node_t *np = tree->avl_root;
591
592 /* np = avl64_findadjacent(tree, start, AVL_SUCCEED); */
593 while (np) {
594 if (start < AVL_START(tree, np)) {
595 if (np->avl_back) {
596 np = np->avl_back;
597 continue;
598 }
599 /* if we were to add node with start, would
600 * have a growth of AVL_BACK
601 */
602 /* if succeeding node is needed, this is it.
603 */
604 break;
605 }
606 if (start >= AVL_END(tree, np)) {
607 if (np->avl_forw) {
608 np = np->avl_forw;
609 continue;
610 }
611 /* if we were to add node with start, would
612 * have a growth of AVL_FORW;
613 */
614 /* we are looking for a succeeding node;
615 * this is nextino.
616 */
617 np = np->avl_nextino;
618 break;
619 }
620 /* AVL_START(tree, np) <= start < AVL_END(tree, np) */
621 break;
622 }
623 if (np) {
624 if (checklen == AVL_INCLUDE_ZEROLEN) {
625 if (end <= AVL_START(tree, np)) {
626 /* something follows start, but is
627 * is entierly after the range (end)
628 */
629 return(NULL);
630 }
631 /* np may stradle [start, end) */
632 return(np);
633 }
634 /*
635 * find non-zero length region
636 */
637 while (np && (AVL_END(tree, np) - AVL_START(tree, np) == 0)
638 && (AVL_START(tree, np) < end))
639 np = np->avl_nextino;
640
641 if ((np == NULL) || (AVL_START(tree, np) >= end))
642 return NULL;
643 return(np);
644 }
645 /*
646 * nothing succeeds start, all existing ranges are before start.
647 */
648 return NULL;
649 }
650
651
652 /*
653 * Returns a pointer to range which contains value.
654 */
655 avl64node_t *
656 avl64_findrange(
657 avl64tree_desc_t *tree,
658 uint64_t value)
659 {
660 avl64node_t *np = tree->avl_root;
661
662 while (np) {
663 if (value < AVL_START(tree, np)) {
664 np = np->avl_back;
665 continue;
666 }
667 if (value >= AVL_END(tree, np)) {
668 np = np->avl_forw;
669 continue;
670 }
671 ASSERT(AVL_START(tree, np) <= value &&
672 value < AVL_END(tree, np));
673 return np;
674 }
675 return NULL;
676 }
677
678
679 /*
680 * Returns a pointer to node which contains exact value.
681 */
682 avl64node_t *
683 avl64_find(
684 avl64tree_desc_t *tree,
685 uint64_t value)
686 {
687 avl64node_t *np = tree->avl_root;
688 uint64_t nvalue;
689
690 while (np) {
691 nvalue = AVL_START(tree, np);
692 if (value < nvalue) {
693 np = np->avl_back;
694 continue;
695 }
696 if (value == nvalue) {
697 return np;
698 }
699 np = np->avl_forw;
700 }
701 return NULL;
702 }
703
704
705 /*
706 * Balance buffer AVL tree after attaching a new node to root.
707 * Called only by avl_insert.
708 */
709 static void
710 avl64_balance(
711 avl64node_t **rootp,
712 avl64node_t *np,
713 int growth)
714 {
715 /*
716 * At this point, np points to the node to which
717 * a new node has been attached. All that remains is to
718 * propagate avl_balance up the tree.
719 */
720 for ( ; ; ) {
721 avl64node_t *parent = np->avl_parent;
722 avl64node_t *child;
723
724 CERT(growth == AVL_BACK || growth == AVL_FORW);
725
726 /*
727 * If the buffer was already balanced, set avl_balance
728 * to the new direction. Continue if there is a
729 * parent after setting growth to reflect np's
730 * relation to its parent.
731 */
732 if (np->avl_balance == AVL_BALANCE) {
733 np->avl_balance = growth;
734 if (parent) {
735 if (parent->avl_forw == np)
736 growth = AVL_FORW;
737 else {
738 ASSERT(parent->avl_back == np);
739 growth = AVL_BACK;
740 }
741
742 np = parent;
743 continue;
744 }
745 break;
746 }
747
748 if (growth != np->avl_balance) {
749 /*
750 * Subtree is now balanced -- no net effect
751 * in the size of the subtree, so leave.
752 */
753 np->avl_balance = AVL_BALANCE;
754 break;
755 }
756
757 if (growth == AVL_BACK) {
758
759 child = np->avl_back;
760 CERT(np->avl_balance == AVL_BACK && child);
761
762 if (child->avl_balance == AVL_BACK) { /* single LL */
763 /*
764 * ``A'' just got inserted;
765 * np points to ``E'', child to ``C'',
766 * and it is already AVL_BACK --
767 * child will get promoted to top of subtree.
768
769 np-> -E C
770 / \ / \
771 child-> -C F -B E
772 / \ / / \
773 -B D A D F
774 /
775 A
776
777 Note that child->avl_parent and
778 avl_balance get set in common code.
779 */
780 np->avl_parent = child;
781 np->avl_balance = AVL_BALANCE;
782 np->avl_back = child->avl_forw;
783 if (child->avl_forw)
784 child->avl_forw->avl_parent = np;
785 child->avl_forw = np;
786 } else {
787 /*
788 * double LR
789 *
790 * child's avl_forw node gets promoted to
791 * the top of the subtree.
792
793 np-> -E C
794 / \ / \
795 child-> +B F -B E
796 / \ / / \
797 A +C A D F
798 \
799 D
800
801 */
802 avl64node_t *tmp = child->avl_forw;
803
804 CERT(child->avl_balance == AVL_FORW && tmp);
805
806 child->avl_forw = tmp->avl_back;
807 if (tmp->avl_back)
808 tmp->avl_back->avl_parent = child;
809
810 tmp->avl_back = child;
811 child->avl_parent = tmp;
812
813 np->avl_back = tmp->avl_forw;
814 if (tmp->avl_forw)
815 tmp->avl_forw->avl_parent = np;
816
817 tmp->avl_forw = np;
818 np->avl_parent = tmp;
819
820 if (tmp->avl_balance == AVL_BACK)
821 np->avl_balance = AVL_FORW;
822 else
823 np->avl_balance = AVL_BALANCE;
824
825 if (tmp->avl_balance == AVL_FORW)
826 child->avl_balance = AVL_BACK;
827 else
828 child->avl_balance = AVL_BALANCE;
829
830 /*
831 * Set child to point to tmp since it is
832 * now the top of the subtree, and will
833 * get attached to the subtree parent in
834 * the common code below.
835 */
836 child = tmp;
837 }
838
839 } else /* growth == AVL_BACK */ {
840
841 /*
842 * This code is the mirror image of AVL_FORW above.
843 */
844
845 child = np->avl_forw;
846 CERT(np->avl_balance == AVL_FORW && child);
847
848 if (child->avl_balance == AVL_FORW) { /* single RR */
849 np->avl_parent = child;
850 np->avl_balance = AVL_BALANCE;
851 np->avl_forw = child->avl_back;
852 if (child->avl_back)
853 child->avl_back->avl_parent = np;
854 child->avl_back = np;
855 } else {
856 /*
857 * double RL
858 */
859 avl64node_t *tmp = child->avl_back;
860
861 ASSERT(child->avl_balance == AVL_BACK && tmp);
862
863 child->avl_back = tmp->avl_forw;
864 if (tmp->avl_forw)
865 tmp->avl_forw->avl_parent = child;
866
867 tmp->avl_forw = child;
868 child->avl_parent = tmp;
869
870 np->avl_forw = tmp->avl_back;
871 if (tmp->avl_back)
872 tmp->avl_back->avl_parent = np;
873
874 tmp->avl_back = np;
875 np->avl_parent = tmp;
876
877 if (tmp->avl_balance == AVL_FORW)
878 np->avl_balance = AVL_BACK;
879 else
880 np->avl_balance = AVL_BALANCE;
881
882 if (tmp->avl_balance == AVL_BACK)
883 child->avl_balance = AVL_FORW;
884 else
885 child->avl_balance = AVL_BALANCE;
886
887 child = tmp;
888 }
889 }
890
891 child->avl_parent = parent;
892 child->avl_balance = AVL_BALANCE;
893
894 if (parent) {
895 if (parent->avl_back == np)
896 parent->avl_back = child;
897 else
898 parent->avl_forw = child;
899 } else {
900 ASSERT(*rootp == np);
901 *rootp = child;
902 }
903
904 break;
905 }
906 }
907
908 static
909 avl64node_t *
910 avl64_insert_find_growth(
911 avl64tree_desc_t *tree,
912 uint64_t start, /* range start at start, */
913 uint64_t end, /* exclusive */
914 int *growthp) /* OUT */
915 {
916 avl64node_t *root = tree->avl_root;
917 avl64node_t *np;
918
919 np = root;
920 ASSERT(np); /* caller ensures that there is atleast one node in tree */
921
922 for ( ; ; ) {
923 CERT(np->avl_parent || root == np);
924 CERT(!np->avl_parent || root != np);
925 CERT(!(np->avl_back) || np->avl_back->avl_parent == np);
926 CERT(!(np->avl_forw) || np->avl_forw->avl_parent == np);
927 CERT(np->avl_balance != AVL_FORW || np->avl_forw);
928 CERT(np->avl_balance != AVL_BACK || np->avl_back);
929 CERT(np->avl_balance != AVL_BALANCE ||
930 np->avl_back == NULL || np->avl_forw);
931 CERT(np->avl_balance != AVL_BALANCE ||
932 np->avl_forw == NULL || np->avl_back);
933
934 if (AVL_START(tree, np) >= end) {
935 if (np->avl_back) {
936 np = np->avl_back;
937 continue;
938 }
939 *growthp = AVL_BACK;
940 break;
941 }
942
943 if (AVL_END(tree, np) <= start) {
944 if (np->avl_forw) {
945 np = np->avl_forw;
946 continue;
947 }
948 *growthp = AVL_FORW;
949 break;
950 }
951 /* found exact match -- let caller decide if it is an error */
952 return(NULL);
953 }
954 return(np);
955 }
956
957
958 static void
959 avl64_insert_grow(
960 avl64tree_desc_t *tree,
961 avl64node_t *parent,
962 avl64node_t *newnode,
963 int growth)
964 {
965 avl64node_t *nnext;
966 uint64_t start = AVL_START(tree, newnode);
967
968 if (growth == AVL_BACK) {
969
970 parent->avl_back = newnode;
971 /*
972 * we are growing to the left; previous in-order to newnode is
973 * closest ancestor with lesser value. Before this
974 * insertion, this ancestor will be pointing to
975 * newnode's parent. After insertion, next in-order to newnode
976 * is the parent.
977 */
978 newnode->avl_nextino = parent;
979 nnext = parent;
980 while (nnext) {
981 if (AVL_END(tree, nnext) <= start)
982 break;
983 nnext = nnext->avl_parent;
984 }
985 if (nnext) {
986 /*
987 * nnext will be null if newnode is
988 * the least element, and hence very first in the list.
989 */
990 ASSERT(nnext->avl_nextino == parent);
991 nnext->avl_nextino = newnode;
992 }
993 }
994 else {
995 parent->avl_forw = newnode;
996 newnode->avl_nextino = parent->avl_nextino;
997 parent->avl_nextino = newnode;
998 }
999 }
1000
1001
1002 avl64node_t *
1003 avl64_insert(
1004 avl64tree_desc_t *tree,
1005 avl64node_t *newnode)
1006 {
1007 avl64node_t *np;
1008 uint64_t start = AVL_START(tree, newnode);
1009 uint64_t end = AVL_END(tree, newnode);
1010 int growth;
1011
1012 ASSERT(newnode);
1013 /*
1014 * Clean all pointers for sanity; some will be reset as necessary.
1015 */
1016 newnode->avl_nextino = NULL;
1017 newnode->avl_parent = NULL;
1018 newnode->avl_forw = NULL;
1019 newnode->avl_back = NULL;
1020 newnode->avl_balance = AVL_BALANCE;
1021
1022 if ((np = tree->avl_root) == NULL) { /* degenerate case... */
1023 tree->avl_root = newnode;
1024 tree->avl_firstino = newnode;
1025 return newnode;
1026 }
1027
1028 if ((np = avl64_insert_find_growth(tree, start, end, &growth))
1029 == NULL) {
1030 if (start != end) { /* non-zero length range */
1031 fprintf(stderr,
1032 _("avl_insert: Warning! duplicate range [%llu,%llu]\n"),
1033 (unsigned long long)start,
1034 (unsigned long long)end);
1035 }
1036 return(NULL);
1037 }
1038
1039 avl64_insert_grow(tree, np, newnode, growth);
1040 if (growth == AVL_BACK) {
1041 /*
1042 * Growing to left. if np was firstino, newnode will be firstino
1043 */
1044 if (tree->avl_firstino == np)
1045 tree->avl_firstino = newnode;
1046 }
1047 #ifdef notneeded
1048 else
1049 if (growth == AVL_FORW)
1050 /*
1051 * Cannot possibly be firstino; there is somebody to our left.
1052 */
1053 ;
1054 #endif
1055
1056 newnode->avl_parent = np;
1057 CERT(np->avl_forw == newnode || np->avl_back == newnode);
1058
1059 avl64_balance(&tree->avl_root, np, growth);
1060
1061 avl64_checktree(tree, tree->avl_root);
1062
1063 return newnode;
1064 }
1065
1066 /*
1067 *
1068 * avl64_insert_immediate(tree, afterp, newnode):
1069 * insert newnode immediately into tree immediately after afterp.
1070 * after insertion, newnode is right child of afterp.
1071 */
1072 void
1073 avl64_insert_immediate(
1074 avl64tree_desc_t *tree,
1075 avl64node_t *afterp,
1076 avl64node_t *newnode)
1077 {
1078 /*
1079 * Clean all pointers for sanity; some will be reset as necessary.
1080 */
1081 newnode->avl_nextino = NULL;
1082 newnode->avl_parent = NULL;
1083 newnode->avl_forw = NULL;
1084 newnode->avl_back = NULL;
1085 newnode->avl_balance = AVL_BALANCE;
1086
1087 if (afterp == NULL) {
1088 tree->avl_root = newnode;
1089 tree->avl_firstino = newnode;
1090 return;
1091 }
1092
1093 ASSERT(afterp->avl_forw == NULL);
1094 avl64_insert_grow(tree, afterp, newnode, AVL_FORW); /* grow to right */
1095 CERT(afterp->avl_forw == newnode);
1096 avl64_balance(&tree->avl_root, afterp, AVL_FORW);
1097 avl64_checktree(tree, tree->avl_root);
1098 }
1099
1100
1101 /*
1102 * Returns first in order node
1103 */
1104 avl64node_t *
1105 avl64_firstino(avl64node_t *root)
1106 {
1107 avl64node_t *np;
1108
1109 if ((np = root) == NULL)
1110 return NULL;
1111
1112 while (np->avl_back)
1113 np = np->avl_back;
1114 return np;
1115 }
1116
1117 /*
1118 * Returns last in order node
1119 */
1120 avl64node_t *
1121 avl64_lastino(avl64node_t *root)
1122 {
1123 avl64node_t *np;
1124
1125 if ((np = root) == NULL)
1126 return NULL;
1127
1128 while (np->avl_forw)
1129 np = np->avl_forw;
1130 return np;
1131 }
1132
1133 void
1134 avl64_init_tree(avl64tree_desc_t *tree, avl64ops_t *ops)
1135 {
1136 tree->avl_root = NULL;
1137 tree->avl_firstino = NULL;
1138 tree->avl_ops = ops;
1139 }
1140
1141 #ifdef AVL_DEBUG
1142 static void
1143 avl64_printnode(avl64tree_desc_t *tree, avl64node_t *np, int nl)
1144 {
1145 printf("[%d-%d]%c", AVL_START(tree, np),
1146 (AVL_END(tree, np) - 1), nl ? '\n' : ' ');
1147 }
1148 #endif
1149 #ifdef STAND_ALONE_DEBUG
1150
1151 struct avl_debug_node {
1152 avl64node_t avl_node;
1153 xfs_off_t avl_start;
1154 unsigned int avl_size;
1155 }
1156
1157 avl64ops_t avl_debug_ops = {
1158 avl_debug_start,
1159 avl_debug_end,
1160 }
1161
1162 static uint64_t
1163 avl64_debug_start(avl64node_t *node)
1164 {
1165 return (uint64_t)(struct avl_debug_node *)node->avl_start;
1166 }
1167
1168 static uint64_t
1169 avl64_debug_end(avl64node_t *node)
1170 {
1171 return (uint64_t)
1172 ((struct avl_debug_node *)node->avl_start +
1173 (struct avl_debug_node *)node->avl_size);
1174 }
1175
1176 avl_debug_node freenodes[100];
1177 avl_debug_node *freehead = &freenodes[0];
1178
1179 static avl64node_t *
1180 alloc_avl64_debug_node()
1181 {
1182 freehead->avl_balance = AVL_BALANCE;
1183 freehead->avl_parent = freehead->avl_forw = freehead->avl_back = NULL;
1184 return(freehead++);
1185 }
1186
1187 static void
1188 avl64_print(avl64tree_desc_t *tree, avl64node_t *root, int depth)
1189 {
1190 int i;
1191
1192 if (!root)
1193 return;
1194 if (root->avl_forw)
1195 avl64_print(tree, root->avl_forw, depth+5);
1196 for (i = 0; i < depth; i++)
1197 putchar((int) ' ');
1198 avl64_printnode(tree, root,1);
1199 if (root->avl_back)
1200 avl64_print(tree, root->avl_back, depth+5);
1201 }
1202
1203 main()
1204 {
1205 int i, j;
1206 avl64node_t *np;
1207 avl64tree_desc_t tree;
1208 char linebuf[256], cmd[256];
1209
1210 avl64_init_tree(&tree, &avl_debug_ops);
1211
1212 for (i = 100; i > 0; i = i - 10)
1213 {
1214 np = alloc__debug_avlnode();
1215 ASSERT(np);
1216 np->avl_start = i;
1217 np->avl_size = 10;
1218 avl64_insert(&tree, np);
1219 }
1220 avl64_print(&tree, tree.avl_root, 0);
1221
1222 for (np = tree.avl_firstino; np != NULL; np = np->avl_nextino)
1223 avl64_printnode(&tree, np, 0);
1224 printf("\n");
1225
1226 while (1) {
1227 printf(_("Command [fpdir] : "));
1228 fgets(linebuf, 256, stdin);
1229 if (feof(stdin)) break;
1230 cmd[0] = NULL;
1231 if (sscanf(linebuf, "%[fpdir]%d", cmd, &i) != 2)
1232 continue;
1233 switch (cmd[0]) {
1234 case 'd':
1235 case 'f':
1236 printf(_("end of range ? "));
1237 fgets(linebuf, 256, stdin);
1238 j = atoi(linebuf);
1239
1240 if (i == j) j = i+1;
1241 np = avl64_findinrange(&tree,i,j);
1242 if (np) {
1243 avl64_printnode(&tree, np, 1);
1244 if (cmd[0] == 'd')
1245 avl64_delete(&tree, np);
1246 } else
1247 printf(_("Cannot find %d\n"), i);
1248 break;
1249 case 'p':
1250 avl64_print(&tree, tree.avl_root, 0);
1251 for (np = tree.avl_firstino;
1252 np != NULL; np = np->avl_nextino)
1253 avl64_printnode(&tree, np, 0);
1254 printf("\n");
1255 break;
1256 case 'i':
1257 np = alloc_avlnode();
1258 ASSERT(np);
1259 np->avl_start = i;
1260 printf(_("size of range ? "));
1261 fgets(linebuf, 256, stdin);
1262 j = atoi(linebuf);
1263
1264 np->avl_size = j;
1265 avl64_insert(&tree, np);
1266 break;
1267 case 'r': {
1268 avl64node_t *b, *e, *t;
1269 int checklen;
1270
1271 printf(_("End of range ? "));
1272 fgets(linebuf, 256, stdin);
1273 j = atoi(linebuf);
1274
1275 printf(_("checklen 0/1 ? "));
1276 fgets(linebuf, 256, stdin);
1277 checklen = atoi(linebuf);
1278
1279
1280 b = avl64_findanyrange(&tree, i, j, checklen);
1281 if (b) {
1282 printf(_("Found something\n"));
1283 t = b;
1284 while (t) {
1285 if (t != b &&
1286 AVL_START(&tree, t) >= j)
1287 break;
1288 avl64_printnode(&tree, t, 0);
1289 t = t->avl_nextino;
1290 }
1291 printf("\n");
1292 }
1293 }
1294 }
1295 }
1296 }
1297 #endif
1298
1299 /*
1300 * Given a tree, find value; will find return range enclosing value,
1301 * or range immediately succeeding value,
1302 * or range immediately preceeding value.
1303 */
1304 avl64node_t *
1305 avl64_findadjacent(
1306 avl64tree_desc_t *tree,
1307 uint64_t value,
1308 int dir)
1309 {
1310 avl64node_t *np = tree->avl_root;
1311
1312 while (np) {
1313 if (value < AVL_START(tree, np)) {
1314 if (np->avl_back) {
1315 np = np->avl_back;
1316 continue;
1317 }
1318 /* if we were to add node with value, would
1319 * have a growth of AVL_BACK
1320 */
1321 if (dir == AVL_SUCCEED) {
1322 /* if succeeding node is needed, this is it.
1323 */
1324 return(np);
1325 }
1326 if (dir == AVL_PRECEED) {
1327 /*
1328 * find nearest ancestor with lesser value.
1329 */
1330 np = np->avl_parent;
1331 while (np) {
1332 if (AVL_END(tree, np) <= value)
1333 break;
1334 np = np->avl_parent;
1335 }
1336 return(np);
1337 }
1338 ASSERT(dir == AVL_SUCCEED || dir == AVL_PRECEED);
1339 break;
1340 }
1341 if (value >= AVL_END(tree, np)) {
1342 if (np->avl_forw) {
1343 np = np->avl_forw;
1344 continue;
1345 }
1346 /* if we were to add node with value, would
1347 * have a growth of AVL_FORW;
1348 */
1349 if (dir == AVL_SUCCEED) {
1350 /* we are looking for a succeeding node;
1351 * this is nextino.
1352 */
1353 return(np->avl_nextino);
1354 }
1355 if (dir == AVL_PRECEED) {
1356 /* looking for a preceeding node; this is it. */
1357 return(np);
1358 }
1359 ASSERT(dir == AVL_SUCCEED || dir == AVL_PRECEED);
1360 }
1361 /* AVL_START(tree, np) <= value < AVL_END(tree, np) */
1362 return(np);
1363 }
1364 return NULL;
1365 }
1366
1367
1368 /*
1369 * avl_findranges:
1370 *
1371 * Given range r [start, end), find all ranges in tree which are contained
1372 * in r. At return, startp and endp point to first and last of
1373 * a chain of elements which describe the contained ranges. Elements
1374 * in startp ... endp are in sort order, and can be accessed by
1375 * using avl_nextino.
1376 */
1377
1378 void
1379 avl64_findranges(
1380 avl64tree_desc_t *tree,
1381 uint64_t start,
1382 uint64_t end,
1383 avl64node_t **startp,
1384 avl64node_t **endp)
1385 {
1386 avl64node_t *np;
1387
1388 np = avl64_findadjacent(tree, start, AVL_SUCCEED);
1389 if (np == NULL /* nothing succeding start */
1390 || (np && (end <= AVL_START(tree, np))))
1391 /* something follows start,
1392 but... is entirely after end */
1393 {
1394 *startp = NULL;
1395 *endp = NULL;
1396 return;
1397 }
1398
1399 *startp = np;
1400
1401 /* see if end is in this region itself */
1402 if (end <= AVL_END(tree, np) ||
1403 np->avl_nextino == NULL ||
1404 (np->avl_nextino &&
1405 (end <= AVL_START(tree, np->avl_nextino)))) {
1406 *endp = np;
1407 return;
1408 }
1409 /* have to munge for end */
1410 /*
1411 * note: have to look for (end - 1), since
1412 * findadjacent will look for exact value, and does not
1413 * care about the fact that end is actually one more
1414 * than the value actually being looked for; thus feed it one less.
1415 */
1416 *endp = avl64_findadjacent(tree, (end-1), AVL_PRECEED);
1417 ASSERT(*endp);
1418 }