]> git.ipfire.org Git - thirdparty/xfsprogs-dev.git/blob - repair/avl64.c
Undoes mod: xfs-cmds:slinx:120772a
[thirdparty/xfsprogs-dev.git] / repair / avl64.c
1 /**************************************************************************
2 * *
3 * Copyright (c) 2000 Silicon Graphics, Inc. All Rights Reserved.
4 *
5 * This program is free software; you can redistribute it and/or modify it
6 * under the terms of version 2 of the GNU General Public License as
7 * published by the Free Software Foundation.
8 *
9 * This program is distributed in the hope that it would be useful, but
10 * WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
12 *
13 * Further, this software is distributed without any warranty that it is
14 * free of the rightful claim of any third person regarding infringement
15 * or the like. Any license provided herein, whether implied or
16 * otherwise, applies only to this software file. Patent licenses, if
17 * any, provided herein do not apply to combinations of this program with
18 * other software, or any other product whatsoever.
19 *
20 * You should have received a copy of the GNU General Public License along
21 * with this program; if not, write the Free Software Foundation, Inc., 59
22 * Temple Place - Suite 330, Boston MA 02111-1307, USA.
23 *
24 * Contact information: Silicon Graphics, Inc., 1600 Amphitheatre Pkwy,
25 * Mountain View, CA 94043, or:
26 *
27 * http://www.sgi.com
28 *
29 * For further information regarding this notice, see:
30 *
31 * http://oss.sgi.com/projects/GenInfo/SGIGPLNoticeExplan/
32 * *
33 **************************************************************************/
34
35 #include <stdio.h>
36 #include <libxfs.h>
37 #include "avl64.h"
38
39 #define CERT ASSERT
40
41 #ifdef AVL_DEBUG
42
43 static void
44 avl64_checknode(
45 register avl64tree_desc_t *tree,
46 register avl64node_t *np)
47 {
48 register avl64node_t *back = np->avl_back;
49 register avl64node_t *forw = np->avl_forw;
50 register avl64node_t *nextino = np->avl_nextino;
51 register int bal = np->avl_balance;
52
53 ASSERT(bal != AVL_BALANCE || (!back && !forw) || (back && forw));
54 ASSERT(bal != AVL_FORW || forw);
55 ASSERT(bal != AVL_BACK || back);
56
57 if (forw) {
58 ASSERT(AVL_START(tree, np) < AVL_START(tree, forw));
59 ASSERT(np->avl_forw->avl_parent == np);
60 ASSERT(back || bal == AVL_FORW);
61 } else {
62 ASSERT(bal != AVL_FORW);
63 ASSERT(bal == AVL_BALANCE || back);
64 ASSERT(bal == AVL_BACK || !back);
65 }
66
67 if (back) {
68 ASSERT(AVL_START(tree, np) > AVL_START(tree, back));
69 ASSERT(np->avl_back->avl_parent == np);
70 ASSERT(forw || bal == AVL_BACK);
71 } else {
72 ASSERT(bal != AVL_BACK);
73 ASSERT(bal == AVL_BALANCE || forw);
74 ASSERT(bal == AVL_FORW || !forw);
75 }
76
77 if (nextino == NULL)
78 ASSERT(forw == NULL);
79 else
80 ASSERT(AVL_END(tree, np) <= AVL_START(tree, nextino));
81 }
82
83 static void
84 avl64_checktree(
85 register avl64tree_desc_t *tree,
86 register avl64node_t *root)
87 {
88 register avl64node_t *nlast, *nnext, *np;
89 __uint64_t offset = 0;
90 __uint64_t end;
91
92 nlast = nnext = root;
93
94 ASSERT(!nnext || nnext->avl_parent == NULL);
95
96 while (nnext) {
97
98 avl64_checknode(tree, nnext);
99 end = AVL_END(tree, nnext);
100
101 if (end <= offset) {
102 if ((np = nnext->avl_forw) && np != nlast) {
103 nlast = nnext;
104 nnext = np;
105 } else {
106 nlast = nnext;
107 nnext = nnext->avl_parent;
108 }
109 continue;
110 }
111
112 nlast = nnext;
113 if (np = nnext->avl_back) {
114 if (AVL_END(tree, np) > offset) {
115 nnext = np;
116 continue;
117 }
118 }
119
120 np = nnext;
121 nnext = nnext->avl_forw;
122 if (!nnext)
123 nnext = np->avl_parent;
124
125 offset = end;
126 }
127 }
128 #else /* ! AVL_DEBUG */
129 #define avl64_checktree(t,x)
130 #endif /* AVL_DEBUG */
131
132
133 /*
134 * Reset balance for np up through tree.
135 * ``direction'' is the way that np's balance
136 * is headed after the deletion of one of its children --
137 * e.g., deleting a avl_forw child sends avl_balance toward AVL_BACK.
138 * Called only when deleting a node from the tree.
139 */
140 static void
141 retreat(
142 avl64tree_desc_t *tree,
143 register avl64node_t *np,
144 register int direction)
145 {
146 register avl64node_t **rootp = &tree->avl_root;
147 register avl64node_t *parent;
148 register avl64node_t *child;
149 register avl64node_t *tmp;
150 register int bal;
151
152 do {
153 ASSERT(direction == AVL_BACK || direction == AVL_FORW);
154
155 if (np->avl_balance == AVL_BALANCE) {
156 np->avl_balance = direction;
157 return;
158 }
159
160 parent = np->avl_parent;
161
162 /*
163 * If balance is being restored, no local node
164 * reorganization is necessary, but may be at
165 * a higher node. Reset direction and continue.
166 */
167 if (direction != np->avl_balance) {
168 np->avl_balance = AVL_BALANCE;
169 if (parent) {
170 if (parent->avl_forw == np)
171 direction = AVL_BACK;
172 else
173 direction = AVL_FORW;
174
175 np = parent;
176 continue;
177 }
178 return;
179 }
180
181 /*
182 * Imbalance. If a avl_forw node was removed, direction
183 * (and, by reduction, np->avl_balance) is/was AVL_BACK.
184 */
185 if (np->avl_balance == AVL_BACK) {
186
187 ASSERT(direction == AVL_BACK);
188 child = np->avl_back;
189 bal = child->avl_balance;
190
191 if (bal != AVL_FORW) /* single LL */ {
192 /*
193 * np gets pushed down to lesser child's
194 * avl_forw branch.
195 *
196 * np-> -D +B
197 * / \ / \
198 * child-> B deleted A -D
199 * / \ /
200 * A C C
201 cmn_err(CE_CONT, "!LL delete b 0x%x c 0x%x\n",
202 np, child);
203 */
204
205 np->avl_back = child->avl_forw;
206 if (child->avl_forw)
207 child->avl_forw->avl_parent = np;
208 child->avl_forw = np;
209
210 if (parent) {
211 if (parent->avl_forw == np) {
212 parent->avl_forw = child;
213 direction = AVL_BACK;
214 } else {
215 ASSERT(parent->avl_back == np);
216 parent->avl_back = child;
217 direction = AVL_FORW;
218 }
219 } else {
220 ASSERT(*rootp == np);
221 *rootp = child;
222 }
223 np->avl_parent = child;
224 child->avl_parent = parent;
225
226 if (bal == AVL_BALANCE) {
227 np->avl_balance = AVL_BACK;
228 child->avl_balance = AVL_FORW;
229 return;
230 } else {
231 np->avl_balance = AVL_BALANCE;
232 child->avl_balance = AVL_BALANCE;
233 np = parent;
234 avl64_checktree(tree, *rootp);
235 continue;
236 }
237 }
238
239 /* child->avl_balance == AVL_FORW double LR rotation
240 *
241 * child's avl_forw node gets promoted up, along with
242 * its avl_forw subtree
243 *
244 * np-> -G C
245 * / \ / \
246 * child-> +B H -B G
247 * / \ \ / / \
248 * A +C deleted A D H
249 * \
250 * D
251 cmn_err(CE_CONT, "!LR delete b 0x%x c 0x%x t 0x%x\n",
252 np, child, child->avl_forw);
253 */
254
255 tmp = child->avl_forw;
256 bal = tmp->avl_balance;
257
258 child->avl_forw = tmp->avl_back;
259 if (tmp->avl_back)
260 tmp->avl_back->avl_parent = child;
261
262 tmp->avl_back = child;
263 child->avl_parent = tmp;
264
265 np->avl_back = tmp->avl_forw;
266 if (tmp->avl_forw)
267 tmp->avl_forw->avl_parent = np;
268 tmp->avl_forw = np;
269
270 if (bal == AVL_FORW)
271 child->avl_balance = AVL_BACK;
272 else
273 child->avl_balance = AVL_BALANCE;
274
275 if (bal == AVL_BACK)
276 np->avl_balance = AVL_FORW;
277 else
278 np->avl_balance = AVL_BALANCE;
279
280 goto next;
281 }
282
283 ASSERT(np->avl_balance == AVL_FORW && direction == AVL_FORW);
284
285 child = np->avl_forw;
286 bal = child->avl_balance;
287
288 if (bal != AVL_BACK) /* single RR */ {
289 /*
290 * np gets pushed down to greater child's
291 * avl_back branch.
292 *
293 * np-> +B -D
294 * / \ / \
295 * deleted D <-child +B E
296 * / \ \
297 * C E C
298 cmn_err(CE_CONT, "!RR delete b 0x%x c 0x%x\n",
299 np, child);
300 */
301
302 np->avl_forw = child->avl_back;
303 if (child->avl_back)
304 child->avl_back->avl_parent = np;
305 child->avl_back = np;
306
307 if (parent) {
308 if (parent->avl_forw == np) {
309 parent->avl_forw = child;
310 direction = AVL_BACK;
311 } else {
312 ASSERT(parent->avl_back == np);
313 parent->avl_back = child;
314 direction = AVL_FORW;
315 }
316 } else {
317 ASSERT(*rootp == np);
318 *rootp = child;
319 }
320 np->avl_parent = child;
321 child->avl_parent = parent;
322
323 if (bal == AVL_BALANCE) {
324 np->avl_balance = AVL_FORW;
325 child->avl_balance = AVL_BACK;
326 return;
327 } else {
328 np->avl_balance = AVL_BALANCE;
329 child->avl_balance = AVL_BALANCE;
330 np = parent;
331 avl64_checktree(tree, *rootp);
332 continue;
333 }
334 }
335
336 /* child->avl_balance == AVL_BACK double RL rotation
337 cmn_err(CE_CONT, "!RL delete b 0x%x c 0x%x t 0x%x\n",
338 np, child, child->avl_back);
339 */
340
341 tmp = child->avl_back;
342 bal = tmp->avl_balance;
343
344 child->avl_back = tmp->avl_forw;
345 if (tmp->avl_forw)
346 tmp->avl_forw->avl_parent = child;
347
348 tmp->avl_forw = child;
349 child->avl_parent = tmp;
350
351 np->avl_forw = tmp->avl_back;
352 if (tmp->avl_back)
353 tmp->avl_back->avl_parent = np;
354 tmp->avl_back = np;
355
356 if (bal == AVL_BACK)
357 child->avl_balance = AVL_FORW;
358 else
359 child->avl_balance = AVL_BALANCE;
360
361 if (bal == AVL_FORW)
362 np->avl_balance = AVL_BACK;
363 else
364 np->avl_balance = AVL_BALANCE;
365 next:
366 np->avl_parent = tmp;
367 tmp->avl_balance = AVL_BALANCE;
368 tmp->avl_parent = parent;
369
370 if (parent) {
371 if (parent->avl_forw == np) {
372 parent->avl_forw = tmp;
373 direction = AVL_BACK;
374 } else {
375 ASSERT(parent->avl_back == np);
376 parent->avl_back = tmp;
377 direction = AVL_FORW;
378 }
379 } else {
380 ASSERT(*rootp == np);
381 *rootp = tmp;
382 return;
383 }
384
385 np = parent;
386 avl64_checktree(tree, *rootp);
387 } while (np);
388 }
389
390 /*
391 * Remove node from tree.
392 * avl_delete does the local tree manipulations,
393 * calls retreat() to rebalance tree up to its root.
394 */
395 void
396 avl64_delete(
397 register avl64tree_desc_t *tree,
398 register avl64node_t *np)
399 {
400 register avl64node_t *forw = np->avl_forw;
401 register avl64node_t *back = np->avl_back;
402 register avl64node_t *parent = np->avl_parent;
403 register avl64node_t *nnext;
404
405
406 if (np->avl_back) {
407 /*
408 * a left child exits, then greatest left descendent's nextino
409 * is pointing to np; make it point to np->nextino.
410 */
411 nnext = np->avl_back;
412 while (nnext) {
413 if (!nnext->avl_forw)
414 break; /* can't find anything bigger */
415 nnext = nnext->avl_forw;
416 }
417 } else
418 if (np->avl_parent) {
419 /*
420 * find nearest ancestor with lesser value. That ancestor's
421 * nextino is pointing to np; make it point to np->nextino
422 */
423 nnext = np->avl_parent;
424 while (nnext) {
425 if (AVL_END(tree, nnext) <= AVL_END(tree, np))
426 break;
427 nnext = nnext->avl_parent;
428 }
429 } else
430 nnext = NULL;
431
432 if (nnext) {
433 ASSERT(nnext->avl_nextino == np);
434 nnext->avl_nextino = np->avl_nextino;
435 /*
436 * Something preceeds np; np cannot be firstino.
437 */
438 ASSERT(tree->avl_firstino != np);
439 }
440 else {
441 /*
442 * Nothing preceeding np; after deletion, np's nextino
443 * is firstino of tree.
444 */
445 ASSERT(tree->avl_firstino == np);
446 tree->avl_firstino = np->avl_nextino;
447 }
448
449
450 /*
451 * Degenerate cases...
452 */
453 if (forw == NULL) {
454 forw = back;
455 goto attach;
456 }
457
458 if (back == NULL) {
459 attach:
460 if (forw)
461 forw->avl_parent = parent;
462 if (parent) {
463 if (parent->avl_forw == np) {
464 parent->avl_forw = forw;
465 retreat(tree, parent, AVL_BACK);
466 } else {
467 ASSERT(parent->avl_back == np);
468 parent->avl_back = forw;
469 retreat(tree, parent, AVL_FORW);
470 }
471 } else {
472 ASSERT(tree->avl_root == np);
473 tree->avl_root = forw;
474 }
475 avl64_checktree(tree, tree->avl_root);
476 return;
477 }
478
479 /*
480 * Harder case: children on both sides.
481 * If back's avl_forw pointer is null, just have back
482 * inherit np's avl_forw tree, remove np from the tree
483 * and adjust balance counters starting at back.
484 *
485 * np-> xI xH (befor retreat())
486 * / \ / \
487 * back-> H J G J
488 * / / \ / \
489 * G ? ? ? ?
490 * / \
491 * ? ?
492 */
493 if ((forw = back->avl_forw) == NULL) {
494 /*
495 * AVL_FORW retreat below will set back's
496 * balance to AVL_BACK.
497 */
498 back->avl_balance = np->avl_balance;
499 back->avl_forw = forw = np->avl_forw;
500 forw->avl_parent = back;
501 back->avl_parent = parent;
502
503 if (parent) {
504 if (parent->avl_forw == np)
505 parent->avl_forw = back;
506 else {
507 ASSERT(parent->avl_back == np);
508 parent->avl_back = back;
509 }
510 } else {
511 ASSERT(tree->avl_root == np);
512 tree->avl_root = back;
513 }
514
515 /*
516 * back is taking np's place in the tree, and
517 * has therefore lost a avl_back node (itself).
518 */
519 retreat(tree, back, AVL_FORW);
520 avl64_checktree(tree, tree->avl_root);
521 return;
522 }
523
524 /*
525 * Hardest case: children on both sides, and back's
526 * avl_forw pointer isn't null. Find the immediately
527 * inferior buffer by following back's avl_forw line
528 * to the end, then have it inherit np's avl_forw tree.
529 *
530 * np-> xI xH
531 * / \ / \
532 * G J back-> G J (before retreat())
533 * / \ / \
534 * F ?... F ?1
535 * / \
536 * ? H <-forw
537 * /
538 * ?1
539 */
540 while ((back = forw->avl_forw))
541 forw = back;
542
543 /*
544 * Will be adjusted by retreat() below.
545 */
546 forw->avl_balance = np->avl_balance;
547
548 /*
549 * forw inherits np's avl_forw...
550 */
551 forw->avl_forw = np->avl_forw;
552 np->avl_forw->avl_parent = forw;
553
554 /*
555 * ... forw's parent gets forw's avl_back...
556 */
557 back = forw->avl_parent;
558 back->avl_forw = forw->avl_back;
559 if (forw->avl_back)
560 forw->avl_back->avl_parent = back;
561
562 /*
563 * ... forw gets np's avl_back...
564 */
565 forw->avl_back = np->avl_back;
566 np->avl_back->avl_parent = forw;
567
568 /*
569 * ... and forw gets np's parent.
570 */
571 forw->avl_parent = parent;
572
573 if (parent) {
574 if (parent->avl_forw == np)
575 parent->avl_forw = forw;
576 else
577 parent->avl_back = forw;
578 } else {
579 ASSERT(tree->avl_root == np);
580 tree->avl_root = forw;
581 }
582
583 /*
584 * What used to be forw's parent is the starting
585 * point for rebalancing. It has lost a avl_forw node.
586 */
587 retreat(tree, back, AVL_BACK);
588 avl64_checktree(tree, tree->avl_root);
589 }
590
591
592 /*
593 * avl_findanyrange:
594 *
595 * Given range r [start, end), find any range which is contained in r.
596 * if checklen is non-zero, then only ranges of non-zero length are
597 * considered in finding a match.
598 */
599 avl64node_t *
600 avl64_findanyrange(
601 register avl64tree_desc_t *tree,
602 register __uint64_t start,
603 register __uint64_t end,
604 int checklen)
605 {
606 register avl64node_t *np = tree->avl_root;
607
608 /* np = avl64_findadjacent(tree, start, AVL_SUCCEED); */
609 while (np) {
610 if (start < AVL_START(tree, np)) {
611 if (np->avl_back) {
612 np = np->avl_back;
613 continue;
614 }
615 /* if we were to add node with start, would
616 * have a growth of AVL_BACK
617 */
618 /* if succeeding node is needed, this is it.
619 */
620 break;
621 }
622 if (start >= AVL_END(tree, np)) {
623 if (np->avl_forw) {
624 np = np->avl_forw;
625 continue;
626 }
627 /* if we were to add node with start, would
628 * have a growth of AVL_FORW;
629 */
630 /* we are looking for a succeeding node;
631 * this is nextino.
632 */
633 np = np->avl_nextino;
634 break;
635 }
636 /* AVL_START(tree, np) <= start < AVL_END(tree, np) */
637 break;
638 }
639 if (np) {
640 if (checklen == AVL_INCLUDE_ZEROLEN) {
641 if (end <= AVL_START(tree, np)) {
642 /* something follows start, but is
643 * is entierly after the range (end)
644 */
645 return(NULL);
646 }
647 /* np may stradle [start, end) */
648 return(np);
649 }
650 /*
651 * find non-zero length region
652 */
653 while (np && (AVL_END(tree, np) - AVL_START(tree, np) == 0)
654 && (AVL_START(tree, np) < end))
655 np = np->avl_nextino;
656
657 if ((np == NULL) || (AVL_START(tree, np) >= end))
658 return NULL;
659 return(np);
660 }
661 /*
662 * nothing succeeds start, all existing ranges are before start.
663 */
664 return NULL;
665 }
666
667
668 /*
669 * Returns a pointer to range which contains value.
670 */
671 avl64node_t *
672 avl64_findrange(
673 register avl64tree_desc_t *tree,
674 register __uint64_t value)
675 {
676 register avl64node_t *np = tree->avl_root;
677
678 while (np) {
679 if (value < AVL_START(tree, np)) {
680 np = np->avl_back;
681 continue;
682 }
683 if (value >= AVL_END(tree, np)) {
684 np = np->avl_forw;
685 continue;
686 }
687 ASSERT(AVL_START(tree, np) <= value &&
688 value < AVL_END(tree, np));
689 return np;
690 }
691 return NULL;
692 }
693
694
695 /*
696 * Returns a pointer to node which contains exact value.
697 */
698 avl64node_t *
699 avl64_find(
700 register avl64tree_desc_t *tree,
701 register __uint64_t value)
702 {
703 register avl64node_t *np = tree->avl_root;
704 register __uint64_t nvalue;
705
706 while (np) {
707 nvalue = AVL_START(tree, np);
708 if (value < nvalue) {
709 np = np->avl_back;
710 continue;
711 }
712 if (value == nvalue) {
713 return np;
714 }
715 np = np->avl_forw;
716 }
717 return NULL;
718 }
719
720
721 /*
722 * Balance buffer AVL tree after attaching a new node to root.
723 * Called only by avl_insert.
724 */
725 static void
726 avl64_balance(
727 register avl64node_t **rootp,
728 register avl64node_t *np,
729 register int growth)
730 {
731 /*
732 * At this point, np points to the node to which
733 * a new node has been attached. All that remains is to
734 * propagate avl_balance up the tree.
735 */
736 for ( ; ; ) {
737 register avl64node_t *parent = np->avl_parent;
738 register avl64node_t *child;
739
740 CERT(growth == AVL_BACK || growth == AVL_FORW);
741
742 /*
743 * If the buffer was already balanced, set avl_balance
744 * to the new direction. Continue if there is a
745 * parent after setting growth to reflect np's
746 * relation to its parent.
747 */
748 if (np->avl_balance == AVL_BALANCE) {
749 np->avl_balance = growth;
750 if (parent) {
751 if (parent->avl_forw == np)
752 growth = AVL_FORW;
753 else {
754 ASSERT(parent->avl_back == np);
755 growth = AVL_BACK;
756 }
757
758 np = parent;
759 continue;
760 }
761 break;
762 }
763
764 if (growth != np->avl_balance) {
765 /*
766 * Subtree is now balanced -- no net effect
767 * in the size of the subtree, so leave.
768 */
769 np->avl_balance = AVL_BALANCE;
770 break;
771 }
772
773 if (growth == AVL_BACK) {
774
775 child = np->avl_back;
776 CERT(np->avl_balance == AVL_BACK && child);
777
778 if (child->avl_balance == AVL_BACK) { /* single LL */
779 /*
780 * ``A'' just got inserted;
781 * np points to ``E'', child to ``C'',
782 * and it is already AVL_BACK --
783 * child will get promoted to top of subtree.
784
785 np-> -E C
786 / \ / \
787 child-> -C F -B E
788 / \ / / \
789 -B D A D F
790 /
791 A
792
793 Note that child->avl_parent and
794 avl_balance get set in common code.
795 */
796 np->avl_parent = child;
797 np->avl_balance = AVL_BALANCE;
798 np->avl_back = child->avl_forw;
799 if (child->avl_forw)
800 child->avl_forw->avl_parent = np;
801 child->avl_forw = np;
802 } else {
803 /*
804 * double LR
805 *
806 * child's avl_forw node gets promoted to
807 * the top of the subtree.
808
809 np-> -E C
810 / \ / \
811 child-> +B F -B E
812 / \ / / \
813 A +C A D F
814 \
815 D
816
817 */
818 register avl64node_t *tmp = child->avl_forw;
819
820 CERT(child->avl_balance == AVL_FORW && tmp);
821
822 child->avl_forw = tmp->avl_back;
823 if (tmp->avl_back)
824 tmp->avl_back->avl_parent = child;
825
826 tmp->avl_back = child;
827 child->avl_parent = tmp;
828
829 np->avl_back = tmp->avl_forw;
830 if (tmp->avl_forw)
831 tmp->avl_forw->avl_parent = np;
832
833 tmp->avl_forw = np;
834 np->avl_parent = tmp;
835
836 if (tmp->avl_balance == AVL_BACK)
837 np->avl_balance = AVL_FORW;
838 else
839 np->avl_balance = AVL_BALANCE;
840
841 if (tmp->avl_balance == AVL_FORW)
842 child->avl_balance = AVL_BACK;
843 else
844 child->avl_balance = AVL_BALANCE;
845
846 /*
847 * Set child to point to tmp since it is
848 * now the top of the subtree, and will
849 * get attached to the subtree parent in
850 * the common code below.
851 */
852 child = tmp;
853 }
854
855 } else /* growth == AVL_BACK */ {
856
857 /*
858 * This code is the mirror image of AVL_FORW above.
859 */
860
861 child = np->avl_forw;
862 CERT(np->avl_balance == AVL_FORW && child);
863
864 if (child->avl_balance == AVL_FORW) { /* single RR */
865 np->avl_parent = child;
866 np->avl_balance = AVL_BALANCE;
867 np->avl_forw = child->avl_back;
868 if (child->avl_back)
869 child->avl_back->avl_parent = np;
870 child->avl_back = np;
871 } else {
872 /*
873 * double RL
874 */
875 register avl64node_t *tmp = child->avl_back;
876
877 ASSERT(child->avl_balance == AVL_BACK && tmp);
878
879 child->avl_back = tmp->avl_forw;
880 if (tmp->avl_forw)
881 tmp->avl_forw->avl_parent = child;
882
883 tmp->avl_forw = child;
884 child->avl_parent = tmp;
885
886 np->avl_forw = tmp->avl_back;
887 if (tmp->avl_back)
888 tmp->avl_back->avl_parent = np;
889
890 tmp->avl_back = np;
891 np->avl_parent = tmp;
892
893 if (tmp->avl_balance == AVL_FORW)
894 np->avl_balance = AVL_BACK;
895 else
896 np->avl_balance = AVL_BALANCE;
897
898 if (tmp->avl_balance == AVL_BACK)
899 child->avl_balance = AVL_FORW;
900 else
901 child->avl_balance = AVL_BALANCE;
902
903 child = tmp;
904 }
905 }
906
907 child->avl_parent = parent;
908 child->avl_balance = AVL_BALANCE;
909
910 if (parent) {
911 if (parent->avl_back == np)
912 parent->avl_back = child;
913 else
914 parent->avl_forw = child;
915 } else {
916 ASSERT(*rootp == np);
917 *rootp = child;
918 }
919
920 break;
921 }
922 }
923
924 static
925 avl64node_t *
926 avl64_insert_find_growth(
927 register avl64tree_desc_t *tree,
928 register __uint64_t start, /* range start at start, */
929 register __uint64_t end, /* exclusive */
930 register int *growthp) /* OUT */
931 {
932 avl64node_t *root = tree->avl_root;
933 register avl64node_t *np;
934
935 np = root;
936 ASSERT(np); /* caller ensures that there is atleast one node in tree */
937
938 for ( ; ; ) {
939 CERT(np->avl_parent || root == np);
940 CERT(!np->avl_parent || root != np);
941 CERT(!(np->avl_back) || np->avl_back->avl_parent == np);
942 CERT(!(np->avl_forw) || np->avl_forw->avl_parent == np);
943 CERT(np->avl_balance != AVL_FORW || np->avl_forw);
944 CERT(np->avl_balance != AVL_BACK || np->avl_back);
945 CERT(np->avl_balance != AVL_BALANCE ||
946 np->avl_back == NULL || np->avl_forw);
947 CERT(np->avl_balance != AVL_BALANCE ||
948 np->avl_forw == NULL || np->avl_back);
949
950 if (AVL_START(tree, np) >= end) {
951 if (np->avl_back) {
952 np = np->avl_back;
953 continue;
954 }
955 *growthp = AVL_BACK;
956 break;
957 }
958
959 if (AVL_END(tree, np) <= start) {
960 if (np->avl_forw) {
961 np = np->avl_forw;
962 continue;
963 }
964 *growthp = AVL_FORW;
965 break;
966 }
967 /* found exact match -- let caller decide if it is an error */
968 return(NULL);
969 }
970 return(np);
971 }
972
973
974 static void
975 avl64_insert_grow(
976 register avl64tree_desc_t *tree,
977 register avl64node_t *parent,
978 register avl64node_t *newnode,
979 register int growth)
980 {
981 register avl64node_t *nnext;
982 register __uint64_t start = AVL_START(tree, newnode);
983
984 if (growth == AVL_BACK) {
985
986 parent->avl_back = newnode;
987 /*
988 * we are growing to the left; previous in-order to newnode is
989 * closest ancestor with lesser value. Before this
990 * insertion, this ancestor will be pointing to
991 * newnode's parent. After insertion, next in-order to newnode
992 * is the parent.
993 */
994 newnode->avl_nextino = parent;
995 nnext = parent;
996 while (nnext) {
997 if (AVL_END(tree, nnext) <= start)
998 break;
999 nnext = nnext->avl_parent;
1000 }
1001 if (nnext) {
1002 /*
1003 * nnext will be null if newnode is
1004 * the least element, and hence very first in the list.
1005 */
1006 ASSERT(nnext->avl_nextino == parent);
1007 nnext->avl_nextino = newnode;
1008 }
1009 }
1010 else {
1011 parent->avl_forw = newnode;
1012 newnode->avl_nextino = parent->avl_nextino;
1013 parent->avl_nextino = newnode;
1014 }
1015 }
1016
1017
1018 avl64node_t *
1019 avl64_insert(
1020 register avl64tree_desc_t *tree,
1021 register avl64node_t *newnode)
1022 {
1023 register avl64node_t *np;
1024 register __uint64_t start = AVL_START(tree, newnode);
1025 register __uint64_t end = AVL_END(tree, newnode);
1026 int growth;
1027
1028 ASSERT(newnode);
1029 /*
1030 * Clean all pointers for sanity; some will be reset as necessary.
1031 */
1032 newnode->avl_nextino = NULL;
1033 newnode->avl_parent = NULL;
1034 newnode->avl_forw = NULL;
1035 newnode->avl_back = NULL;
1036 newnode->avl_balance = AVL_BALANCE;
1037
1038 if ((np = tree->avl_root) == NULL) { /* degenerate case... */
1039 tree->avl_root = newnode;
1040 tree->avl_firstino = newnode;
1041 return newnode;
1042 }
1043
1044 if ((np = avl64_insert_find_growth(tree, start, end, &growth))
1045 == NULL) {
1046 if (start != end) { /* non-zero length range */
1047 fprintf(stderr,
1048 "avl_insert: Warning! duplicate range [%llu,%llu]\n",
1049 (unsigned long long)start,
1050 (unsigned long long)end);
1051 }
1052 return(NULL);
1053 }
1054
1055 avl64_insert_grow(tree, np, newnode, growth);
1056 if (growth == AVL_BACK) {
1057 /*
1058 * Growing to left. if np was firstino, newnode will be firstino
1059 */
1060 if (tree->avl_firstino == np)
1061 tree->avl_firstino = newnode;
1062 }
1063 #ifdef notneeded
1064 else
1065 if (growth == AVL_FORW)
1066 /*
1067 * Cannot possibly be firstino; there is somebody to our left.
1068 */
1069 ;
1070 #endif
1071
1072 newnode->avl_parent = np;
1073 CERT(np->avl_forw == newnode || np->avl_back == newnode);
1074
1075 avl64_balance(&tree->avl_root, np, growth);
1076
1077 avl64_checktree(tree, tree->avl_root);
1078
1079 return newnode;
1080 }
1081
1082 /*
1083 *
1084 * avl64_insert_immediate(tree, afterp, newnode):
1085 * insert newnode immediately into tree immediately after afterp.
1086 * after insertion, newnode is right child of afterp.
1087 */
1088 void
1089 avl64_insert_immediate(
1090 avl64tree_desc_t *tree,
1091 avl64node_t *afterp,
1092 avl64node_t *newnode)
1093 {
1094 /*
1095 * Clean all pointers for sanity; some will be reset as necessary.
1096 */
1097 newnode->avl_nextino = NULL;
1098 newnode->avl_parent = NULL;
1099 newnode->avl_forw = NULL;
1100 newnode->avl_back = NULL;
1101 newnode->avl_balance = AVL_BALANCE;
1102
1103 if (afterp == NULL) {
1104 tree->avl_root = newnode;
1105 tree->avl_firstino = newnode;
1106 return;
1107 }
1108
1109 ASSERT(afterp->avl_forw == NULL);
1110 avl64_insert_grow(tree, afterp, newnode, AVL_FORW); /* grow to right */
1111 CERT(afterp->avl_forw == newnode);
1112 avl64_balance(&tree->avl_root, afterp, AVL_FORW);
1113 avl64_checktree(tree, tree->avl_root);
1114 }
1115
1116
1117 /*
1118 * Returns first in order node
1119 */
1120 avl64node_t *
1121 avl64_firstino(register avl64node_t *root)
1122 {
1123 register avl64node_t *np;
1124
1125 if ((np = root) == NULL)
1126 return NULL;
1127
1128 while (np->avl_back)
1129 np = np->avl_back;
1130 return np;
1131 }
1132
1133 /*
1134 * Returns last in order node
1135 */
1136 avl64node_t *
1137 avl64_lastino(register avl64node_t *root)
1138 {
1139 register avl64node_t *np;
1140
1141 if ((np = root) == NULL)
1142 return NULL;
1143
1144 while (np->avl_forw)
1145 np = np->avl_forw;
1146 return np;
1147 }
1148
1149 void
1150 avl64_init_tree(avl64tree_desc_t *tree, avl64ops_t *ops)
1151 {
1152 tree->avl_root = NULL;
1153 tree->avl_firstino = NULL;
1154 tree->avl_ops = ops;
1155 }
1156
1157 #ifdef AVL_DEBUG
1158 static void
1159 avl64_printnode(avl64tree_desc_t *tree, avl64node_t *np, int nl)
1160 {
1161 printf("[%d-%d]%c", AVL_START(tree, np),
1162 (AVL_END(tree, np) - 1), nl ? '\n' : ' ');
1163 }
1164 #endif
1165 #ifdef STAND_ALONE_DEBUG
1166
1167 struct avl_debug_node {
1168 avl64node_t avl_node;
1169 xfs_off_t avl_start;
1170 unsigned int avl_size;
1171 }
1172
1173 avl64ops_t avl_debug_ops = {
1174 avl_debug_start,
1175 avl_debug_end,
1176 }
1177
1178 static __uint64_t
1179 avl64_debug_start(avl64node_t *node)
1180 {
1181 return (__uint64_t)(struct avl_debug_node *)node->avl_start;
1182 }
1183
1184 static __uint64_t
1185 avl64_debug_end(avl64node_t *node)
1186 {
1187 return (__uint64_t)
1188 ((struct avl_debug_node *)node->avl_start +
1189 (struct avl_debug_node *)node->avl_size);
1190 }
1191
1192 avl_debug_node freenodes[100];
1193 avl_debug_node *freehead = &freenodes[0];
1194
1195 static avl64node_t *
1196 alloc_avl64_debug_node()
1197 {
1198 freehead->avl_balance = AVL_BALANCE;
1199 freehead->avl_parent = freehead->avl_forw = freehead->avl_back = NULL;
1200 return(freehead++);
1201 }
1202
1203 static void
1204 avl64_print(avl64tree_desc_t *tree, avl64node_t *root, int depth)
1205 {
1206 int i;
1207
1208 if (!root)
1209 return;
1210 if (root->avl_forw)
1211 avl64_print(tree, root->avl_forw, depth+5);
1212 for (i = 0; i < depth; i++)
1213 putchar((int) ' ');
1214 avl64_printnode(tree, root,1);
1215 if (root->avl_back)
1216 avl64_print(tree, root->avl_back, depth+5);
1217 }
1218
1219 main()
1220 {
1221 int i, j;
1222 avl64node_t *np;
1223 avl64tree_desc_t tree;
1224 char linebuf[256], cmd[256];
1225
1226 avl64_init_tree(&tree, &avl_debug_ops);
1227
1228 for (i = 100; i > 0; i = i - 10)
1229 {
1230 np = alloc__debug_avlnode();
1231 ASSERT(np);
1232 np->avl_start = i;
1233 np->avl_size = 10;
1234 avl64_insert(&tree, np);
1235 }
1236 avl64_print(&tree, tree.avl_root, 0);
1237
1238 for (np = tree.avl_firstino; np != NULL; np = np->avl_nextino)
1239 avl64_printnode(&tree, np, 0);
1240 printf("\n");
1241
1242 while (1) {
1243 printf("Command [fpdir] : ");
1244 fgets(linebuf, 256, stdin);
1245 if (feof(stdin)) break;
1246 cmd[0] = NULL;
1247 if (sscanf(linebuf, "%[fpdir]%d", cmd, &i) != 2)
1248 continue;
1249 switch (cmd[0]) {
1250 case 'd':
1251 case 'f':
1252 printf("end of range ? ");
1253 fgets(linebuf, 256, stdin);
1254 j = atoi(linebuf);
1255
1256 if (i == j) j = i+1;
1257 np = avl64_findinrange(&tree,i,j);
1258 if (np) {
1259 avl64_printnode(&tree, np, 1);
1260 if (cmd[0] == 'd')
1261 avl64_delete(&tree, np);
1262 } else
1263 printf("Cannot find %d\n", i);
1264 break;
1265 case 'p':
1266 avl64_print(&tree, tree.avl_root, 0);
1267 for (np = tree.avl_firstino;
1268 np != NULL; np = np->avl_nextino)
1269 avl64_printnode(&tree, np, 0);
1270 printf("\n");
1271 break;
1272 case 'i':
1273 np = alloc_avlnode();
1274 ASSERT(np);
1275 np->avl_start = i;
1276 printf("size of range ? ");
1277 fgets(linebuf, 256, stdin);
1278 j = atoi(linebuf);
1279
1280 np->avl_size = j;
1281 avl64_insert(&tree, np);
1282 break;
1283 case 'r': {
1284 avl64node_t *b, *e, *t;
1285 int checklen;
1286
1287 printf("End of range ? ");
1288 fgets(linebuf, 256, stdin);
1289 j = atoi(linebuf);
1290
1291 printf("checklen 0/1 ? ");
1292 fgets(linebuf, 256, stdin);
1293 checklen = atoi(linebuf);
1294
1295
1296 b = avl64_findanyrange(&tree, i, j, checklen);
1297 if (b) {
1298 printf("Found something\n");
1299 t = b;
1300 while (t) {
1301 if (t != b &&
1302 AVL_START(&tree, t) >= j)
1303 break;
1304 avl64_printnode(&tree, t, 0);
1305 t = t->avl_nextino;
1306 }
1307 printf("\n");
1308 }
1309 }
1310 }
1311 }
1312 }
1313 #endif
1314
1315 /*
1316 * Given a tree, find value; will find return range enclosing value,
1317 * or range immediately succeeding value,
1318 * or range immediately preceeding value.
1319 */
1320 avl64node_t *
1321 avl64_findadjacent(
1322 register avl64tree_desc_t *tree,
1323 register __uint64_t value,
1324 register int dir)
1325 {
1326 register avl64node_t *np = tree->avl_root;
1327
1328 while (np) {
1329 if (value < AVL_START(tree, np)) {
1330 if (np->avl_back) {
1331 np = np->avl_back;
1332 continue;
1333 }
1334 /* if we were to add node with value, would
1335 * have a growth of AVL_BACK
1336 */
1337 if (dir == AVL_SUCCEED) {
1338 /* if succeeding node is needed, this is it.
1339 */
1340 return(np);
1341 }
1342 if (dir == AVL_PRECEED) {
1343 /*
1344 * find nearest ancestor with lesser value.
1345 */
1346 np = np->avl_parent;
1347 while (np) {
1348 if (AVL_END(tree, np) <= value)
1349 break;
1350 np = np->avl_parent;
1351 }
1352 return(np);
1353 }
1354 ASSERT(dir == AVL_SUCCEED || dir == AVL_PRECEED);
1355 break;
1356 }
1357 if (value >= AVL_END(tree, np)) {
1358 if (np->avl_forw) {
1359 np = np->avl_forw;
1360 continue;
1361 }
1362 /* if we were to add node with value, would
1363 * have a growth of AVL_FORW;
1364 */
1365 if (dir == AVL_SUCCEED) {
1366 /* we are looking for a succeeding node;
1367 * this is nextino.
1368 */
1369 return(np->avl_nextino);
1370 }
1371 if (dir == AVL_PRECEED) {
1372 /* looking for a preceeding node; this is it. */
1373 return(np);
1374 }
1375 ASSERT(dir == AVL_SUCCEED || dir == AVL_PRECEED);
1376 }
1377 /* AVL_START(tree, np) <= value < AVL_END(tree, np) */
1378 return(np);
1379 }
1380 return NULL;
1381 }
1382
1383
1384 /*
1385 * avl_findranges:
1386 *
1387 * Given range r [start, end), find all ranges in tree which are contained
1388 * in r. At return, startp and endp point to first and last of
1389 * a chain of elements which describe the contained ranges. Elements
1390 * in startp ... endp are in sort order, and can be accessed by
1391 * using avl_nextino.
1392 */
1393
1394 void
1395 avl64_findranges(
1396 register avl64tree_desc_t *tree,
1397 register __uint64_t start,
1398 register __uint64_t end,
1399 avl64node_t **startp,
1400 avl64node_t **endp)
1401 {
1402 register avl64node_t *np;
1403
1404 np = avl64_findadjacent(tree, start, AVL_SUCCEED);
1405 if (np == NULL /* nothing succeding start */
1406 || (np && (end <= AVL_START(tree, np))))
1407 /* something follows start,
1408 but... is entirely after end */
1409 {
1410 *startp = NULL;
1411 *endp = NULL;
1412 return;
1413 }
1414
1415 *startp = np;
1416
1417 /* see if end is in this region itself */
1418 if (end <= AVL_END(tree, np) ||
1419 np->avl_nextino == NULL ||
1420 (np->avl_nextino &&
1421 (end <= AVL_START(tree, np->avl_nextino)))) {
1422 *endp = np;
1423 return;
1424 }
1425 /* have to munge for end */
1426 /*
1427 * note: have to look for (end - 1), since
1428 * findadjacent will look for exact value, and does not
1429 * care about the fact that end is actually one more
1430 * than the value actually being looked for; thus feed it one less.
1431 */
1432 *endp = avl64_findadjacent(tree, (end-1), AVL_PRECEED);
1433 ASSERT(*endp);
1434 }