]> git.ipfire.org Git - thirdparty/xfsprogs-dev.git/blob - repair/avl64.c
bump minor version number.
[thirdparty/xfsprogs-dev.git] / repair / avl64.c
1 /**************************************************************************
2 * *
3 * Copyright (c) 2000 Silicon Graphics, Inc. All Rights Reserved.
4 *
5 * This program is free software; you can redistribute it and/or modify it
6 * under the terms of version 2 of the GNU General Public License as
7 * published by the Free Software Foundation.
8 *
9 * This program is distributed in the hope that it would be useful, but
10 * WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
12 *
13 * Further, this software is distributed without any warranty that it is
14 * free of the rightful claim of any third person regarding infringement
15 * or the like. Any license provided herein, whether implied or
16 * otherwise, applies only to this software file. Patent licenses, if
17 * any, provided herein do not apply to combinations of this program with
18 * other software, or any other product whatsoever.
19 *
20 * You should have received a copy of the GNU General Public License along
21 * with this program; if not, write the Free Software Foundation, Inc., 59
22 * Temple Place - Suite 330, Boston MA 02111-1307, USA.
23 *
24 * Contact information: Silicon Graphics, Inc., 1600 Amphitheatre Pkwy,
25 * Mountain View, CA 94043, or:
26 *
27 * http://www.sgi.com
28 *
29 * For further information regarding this notice, see:
30 *
31 * http://oss.sgi.com/projects/GenInfo/SGIGPLNoticeExplan/
32 * *
33 **************************************************************************/
34
35 /* to allow use by user-level utilities */
36
37 #ifdef STAND_ALONE_DEBUG
38 #define AVL_USER_MODE
39 #endif
40
41 #if defined(STAND_ALONE_DEBUG) || defined(AVL_USER_MODE_DEBUG)
42 #define AVL_DEBUG
43 #endif
44
45 #include <stdio.h>
46 #include <libxfs.h>
47 #include "avl64.h"
48
49 #define CERT ASSERT
50
51 #ifdef AVL_DEBUG
52
53 static void
54 avl64_checknode(
55 register avl64tree_desc_t *tree,
56 register avl64node_t *np)
57 {
58 register avl64node_t *back = np->avl_back;
59 register avl64node_t *forw = np->avl_forw;
60 register avl64node_t *nextino = np->avl_nextino;
61 register int bal = np->avl_balance;
62
63 ASSERT(bal != AVL_BALANCE || (!back && !forw) || (back && forw));
64 ASSERT(bal != AVL_FORW || forw);
65 ASSERT(bal != AVL_BACK || back);
66
67 if (forw) {
68 ASSERT(AVL_START(tree, np) < AVL_START(tree, forw));
69 ASSERT(np->avl_forw->avl_parent == np);
70 ASSERT(back || bal == AVL_FORW);
71 } else {
72 ASSERT(bal != AVL_FORW);
73 ASSERT(bal == AVL_BALANCE || back);
74 ASSERT(bal == AVL_BACK || !back);
75 }
76
77 if (back) {
78 ASSERT(AVL_START(tree, np) > AVL_START(tree, back));
79 ASSERT(np->avl_back->avl_parent == np);
80 ASSERT(forw || bal == AVL_BACK);
81 } else {
82 ASSERT(bal != AVL_BACK);
83 ASSERT(bal == AVL_BALANCE || forw);
84 ASSERT(bal == AVL_FORW || !forw);
85 }
86
87 if (nextino == NULL)
88 ASSERT(forw == NULL);
89 else
90 ASSERT(AVL_END(tree, np) <= AVL_START(tree, nextino));
91 }
92
93 static void
94 avl64_checktree(
95 register avl64tree_desc_t *tree,
96 register avl64node_t *root)
97 {
98 register avl64node_t *nlast, *nnext, *np;
99 __uint64_t offset = 0;
100 __uint64_t end;
101
102 nlast = nnext = root;
103
104 ASSERT(!nnext || nnext->avl_parent == NULL);
105
106 while (nnext) {
107
108 avl64_checknode(tree, nnext);
109 end = AVL_END(tree, nnext);
110
111 if (end <= offset) {
112 if ((np = nnext->avl_forw) && np != nlast) {
113 nlast = nnext;
114 nnext = np;
115 } else {
116 nlast = nnext;
117 nnext = nnext->avl_parent;
118 }
119 continue;
120 }
121
122 nlast = nnext;
123 if (np = nnext->avl_back) {
124 if (AVL_END(tree, np) > offset) {
125 nnext = np;
126 continue;
127 }
128 }
129
130 np = nnext;
131 nnext = nnext->avl_forw;
132 if (!nnext)
133 nnext = np->avl_parent;
134
135 offset = end;
136 }
137 }
138 #else /* ! AVL_DEBUG */
139 #define avl64_checktree(t,x)
140 #endif /* AVL_DEBUG */
141
142
143 /*
144 * Reset balance for np up through tree.
145 * ``direction'' is the way that np's balance
146 * is headed after the deletion of one of its children --
147 * e.g., deleting a avl_forw child sends avl_balance toward AVL_BACK.
148 * Called only when deleting a node from the tree.
149 */
150 static void
151 retreat(
152 avl64tree_desc_t *tree,
153 register avl64node_t *np,
154 register int direction)
155 {
156 register avl64node_t **rootp = &tree->avl_root;
157 register avl64node_t *parent;
158 register avl64node_t *child;
159 register avl64node_t *tmp;
160 register int bal;
161
162 do {
163 ASSERT(direction == AVL_BACK || direction == AVL_FORW);
164
165 if (np->avl_balance == AVL_BALANCE) {
166 np->avl_balance = direction;
167 return;
168 }
169
170 parent = np->avl_parent;
171
172 /*
173 * If balance is being restored, no local node
174 * reorganization is necessary, but may be at
175 * a higher node. Reset direction and continue.
176 */
177 if (direction != np->avl_balance) {
178 np->avl_balance = AVL_BALANCE;
179 if (parent) {
180 if (parent->avl_forw == np)
181 direction = AVL_BACK;
182 else
183 direction = AVL_FORW;
184
185 np = parent;
186 continue;
187 }
188 return;
189 }
190
191 /*
192 * Imbalance. If a avl_forw node was removed, direction
193 * (and, by reduction, np->avl_balance) is/was AVL_BACK.
194 */
195 if (np->avl_balance == AVL_BACK) {
196
197 ASSERT(direction == AVL_BACK);
198 child = np->avl_back;
199 bal = child->avl_balance;
200
201 if (bal != AVL_FORW) /* single LL */ {
202 /*
203 * np gets pushed down to lesser child's
204 * avl_forw branch.
205 *
206 * np-> -D +B
207 * / \ / \
208 * child-> B deleted A -D
209 * / \ /
210 * A C C
211 cmn_err(CE_CONT, "!LL delete b 0x%x c 0x%x\n",
212 np, child);
213 */
214
215 np->avl_back = child->avl_forw;
216 if (child->avl_forw)
217 child->avl_forw->avl_parent = np;
218 child->avl_forw = np;
219
220 if (parent) {
221 if (parent->avl_forw == np) {
222 parent->avl_forw = child;
223 direction = AVL_BACK;
224 } else {
225 ASSERT(parent->avl_back == np);
226 parent->avl_back = child;
227 direction = AVL_FORW;
228 }
229 } else {
230 ASSERT(*rootp == np);
231 *rootp = child;
232 }
233 np->avl_parent = child;
234 child->avl_parent = parent;
235
236 if (bal == AVL_BALANCE) {
237 np->avl_balance = AVL_BACK;
238 child->avl_balance = AVL_FORW;
239 return;
240 } else {
241 np->avl_balance = AVL_BALANCE;
242 child->avl_balance = AVL_BALANCE;
243 np = parent;
244 avl64_checktree(tree, *rootp);
245 continue;
246 }
247 }
248
249 /* child->avl_balance == AVL_FORW double LR rotation
250 *
251 * child's avl_forw node gets promoted up, along with
252 * its avl_forw subtree
253 *
254 * np-> -G C
255 * / \ / \
256 * child-> +B H -B G
257 * / \ \ / / \
258 * A +C deleted A D H
259 * \
260 * D
261 cmn_err(CE_CONT, "!LR delete b 0x%x c 0x%x t 0x%x\n",
262 np, child, child->avl_forw);
263 */
264
265 tmp = child->avl_forw;
266 bal = tmp->avl_balance;
267
268 child->avl_forw = tmp->avl_back;
269 if (tmp->avl_back)
270 tmp->avl_back->avl_parent = child;
271
272 tmp->avl_back = child;
273 child->avl_parent = tmp;
274
275 np->avl_back = tmp->avl_forw;
276 if (tmp->avl_forw)
277 tmp->avl_forw->avl_parent = np;
278 tmp->avl_forw = np;
279
280 if (bal == AVL_FORW)
281 child->avl_balance = AVL_BACK;
282 else
283 child->avl_balance = AVL_BALANCE;
284
285 if (bal == AVL_BACK)
286 np->avl_balance = AVL_FORW;
287 else
288 np->avl_balance = AVL_BALANCE;
289
290 goto next;
291 }
292
293 ASSERT(np->avl_balance == AVL_FORW && direction == AVL_FORW);
294
295 child = np->avl_forw;
296 bal = child->avl_balance;
297
298 if (bal != AVL_BACK) /* single RR */ {
299 /*
300 * np gets pushed down to greater child's
301 * avl_back branch.
302 *
303 * np-> +B -D
304 * / \ / \
305 * deleted D <-child +B E
306 * / \ \
307 * C E C
308 cmn_err(CE_CONT, "!RR delete b 0x%x c 0x%x\n",
309 np, child);
310 */
311
312 np->avl_forw = child->avl_back;
313 if (child->avl_back)
314 child->avl_back->avl_parent = np;
315 child->avl_back = np;
316
317 if (parent) {
318 if (parent->avl_forw == np) {
319 parent->avl_forw = child;
320 direction = AVL_BACK;
321 } else {
322 ASSERT(parent->avl_back == np);
323 parent->avl_back = child;
324 direction = AVL_FORW;
325 }
326 } else {
327 ASSERT(*rootp == np);
328 *rootp = child;
329 }
330 np->avl_parent = child;
331 child->avl_parent = parent;
332
333 if (bal == AVL_BALANCE) {
334 np->avl_balance = AVL_FORW;
335 child->avl_balance = AVL_BACK;
336 return;
337 } else {
338 np->avl_balance = AVL_BALANCE;
339 child->avl_balance = AVL_BALANCE;
340 np = parent;
341 avl64_checktree(tree, *rootp);
342 continue;
343 }
344 }
345
346 /* child->avl_balance == AVL_BACK double RL rotation
347 cmn_err(CE_CONT, "!RL delete b 0x%x c 0x%x t 0x%x\n",
348 np, child, child->avl_back);
349 */
350
351 tmp = child->avl_back;
352 bal = tmp->avl_balance;
353
354 child->avl_back = tmp->avl_forw;
355 if (tmp->avl_forw)
356 tmp->avl_forw->avl_parent = child;
357
358 tmp->avl_forw = child;
359 child->avl_parent = tmp;
360
361 np->avl_forw = tmp->avl_back;
362 if (tmp->avl_back)
363 tmp->avl_back->avl_parent = np;
364 tmp->avl_back = np;
365
366 if (bal == AVL_BACK)
367 child->avl_balance = AVL_FORW;
368 else
369 child->avl_balance = AVL_BALANCE;
370
371 if (bal == AVL_FORW)
372 np->avl_balance = AVL_BACK;
373 else
374 np->avl_balance = AVL_BALANCE;
375 next:
376 np->avl_parent = tmp;
377 tmp->avl_balance = AVL_BALANCE;
378 tmp->avl_parent = parent;
379
380 if (parent) {
381 if (parent->avl_forw == np) {
382 parent->avl_forw = tmp;
383 direction = AVL_BACK;
384 } else {
385 ASSERT(parent->avl_back == np);
386 parent->avl_back = tmp;
387 direction = AVL_FORW;
388 }
389 } else {
390 ASSERT(*rootp == np);
391 *rootp = tmp;
392 return;
393 }
394
395 np = parent;
396 avl64_checktree(tree, *rootp);
397 } while (np);
398 }
399
400 /*
401 * Remove node from tree.
402 * avl_delete does the local tree manipulations,
403 * calls retreat() to rebalance tree up to its root.
404 */
405 void
406 avl64_delete(
407 register avl64tree_desc_t *tree,
408 register avl64node_t *np)
409 {
410 register avl64node_t *forw = np->avl_forw;
411 register avl64node_t *back = np->avl_back;
412 register avl64node_t *parent = np->avl_parent;
413 register avl64node_t *nnext;
414
415
416 if (np->avl_back) {
417 /*
418 * a left child exits, then greatest left descendent's nextino
419 * is pointing to np; make it point to np->nextino.
420 */
421 nnext = np->avl_back;
422 while (nnext) {
423 if (!nnext->avl_forw)
424 break; /* can't find anything bigger */
425 nnext = nnext->avl_forw;
426 }
427 } else
428 if (np->avl_parent) {
429 /*
430 * find nearest ancestor with lesser value. That ancestor's
431 * nextino is pointing to np; make it point to np->nextino
432 */
433 nnext = np->avl_parent;
434 while (nnext) {
435 if (AVL_END(tree, nnext) <= AVL_END(tree, np))
436 break;
437 nnext = nnext->avl_parent;
438 }
439 } else
440 nnext = NULL;
441
442 if (nnext) {
443 ASSERT(nnext->avl_nextino == np);
444 nnext->avl_nextino = np->avl_nextino;
445 /*
446 * Something preceeds np; np cannot be firstino.
447 */
448 ASSERT(tree->avl_firstino != np);
449 }
450 else {
451 /*
452 * Nothing preceeding np; after deletion, np's nextino
453 * is firstino of tree.
454 */
455 ASSERT(tree->avl_firstino == np);
456 tree->avl_firstino = np->avl_nextino;
457 }
458
459
460 /*
461 * Degenerate cases...
462 */
463 if (forw == NULL) {
464 forw = back;
465 goto attach;
466 }
467
468 if (back == NULL) {
469 attach:
470 if (forw)
471 forw->avl_parent = parent;
472 if (parent) {
473 if (parent->avl_forw == np) {
474 parent->avl_forw = forw;
475 retreat(tree, parent, AVL_BACK);
476 } else {
477 ASSERT(parent->avl_back == np);
478 parent->avl_back = forw;
479 retreat(tree, parent, AVL_FORW);
480 }
481 } else {
482 ASSERT(tree->avl_root == np);
483 tree->avl_root = forw;
484 }
485 avl64_checktree(tree, tree->avl_root);
486 return;
487 }
488
489 /*
490 * Harder case: children on both sides.
491 * If back's avl_forw pointer is null, just have back
492 * inherit np's avl_forw tree, remove np from the tree
493 * and adjust balance counters starting at back.
494 *
495 * np-> xI xH (befor retreat())
496 * / \ / \
497 * back-> H J G J
498 * / / \ / \
499 * G ? ? ? ?
500 * / \
501 * ? ?
502 */
503 if ((forw = back->avl_forw) == NULL) {
504 /*
505 * AVL_FORW retreat below will set back's
506 * balance to AVL_BACK.
507 */
508 back->avl_balance = np->avl_balance;
509 back->avl_forw = forw = np->avl_forw;
510 forw->avl_parent = back;
511 back->avl_parent = parent;
512
513 if (parent) {
514 if (parent->avl_forw == np)
515 parent->avl_forw = back;
516 else {
517 ASSERT(parent->avl_back == np);
518 parent->avl_back = back;
519 }
520 } else {
521 ASSERT(tree->avl_root == np);
522 tree->avl_root = back;
523 }
524
525 /*
526 * back is taking np's place in the tree, and
527 * has therefore lost a avl_back node (itself).
528 */
529 retreat(tree, back, AVL_FORW);
530 avl64_checktree(tree, tree->avl_root);
531 return;
532 }
533
534 /*
535 * Hardest case: children on both sides, and back's
536 * avl_forw pointer isn't null. Find the immediately
537 * inferior buffer by following back's avl_forw line
538 * to the end, then have it inherit np's avl_forw tree.
539 *
540 * np-> xI xH
541 * / \ / \
542 * G J back-> G J (before retreat())
543 * / \ / \
544 * F ?... F ?1
545 * / \
546 * ? H <-forw
547 * /
548 * ?1
549 */
550 while (back = forw->avl_forw)
551 forw = back;
552
553 /*
554 * Will be adjusted by retreat() below.
555 */
556 forw->avl_balance = np->avl_balance;
557
558 /*
559 * forw inherits np's avl_forw...
560 */
561 forw->avl_forw = np->avl_forw;
562 np->avl_forw->avl_parent = forw;
563
564 /*
565 * ... forw's parent gets forw's avl_back...
566 */
567 back = forw->avl_parent;
568 back->avl_forw = forw->avl_back;
569 if (forw->avl_back)
570 forw->avl_back->avl_parent = back;
571
572 /*
573 * ... forw gets np's avl_back...
574 */
575 forw->avl_back = np->avl_back;
576 np->avl_back->avl_parent = forw;
577
578 /*
579 * ... and forw gets np's parent.
580 */
581 forw->avl_parent = parent;
582
583 if (parent) {
584 if (parent->avl_forw == np)
585 parent->avl_forw = forw;
586 else
587 parent->avl_back = forw;
588 } else {
589 ASSERT(tree->avl_root == np);
590 tree->avl_root = forw;
591 }
592
593 /*
594 * What used to be forw's parent is the starting
595 * point for rebalancing. It has lost a avl_forw node.
596 */
597 retreat(tree, back, AVL_BACK);
598 avl64_checktree(tree, tree->avl_root);
599 }
600
601
602 /*
603 * avl_findanyrange:
604 *
605 * Given range r [start, end), find any range which is contained in r.
606 * if checklen is non-zero, then only ranges of non-zero length are
607 * considered in finding a match.
608 */
609 avl64node_t *
610 avl64_findanyrange(
611 register avl64tree_desc_t *tree,
612 register __uint64_t start,
613 register __uint64_t end,
614 int checklen)
615 {
616 register avl64node_t *np = tree->avl_root;
617
618 /* np = avl64_findadjacent(tree, start, AVL_SUCCEED); */
619 while (np) {
620 if (start < AVL_START(tree, np)) {
621 if (np->avl_back) {
622 np = np->avl_back;
623 continue;
624 }
625 /* if we were to add node with start, would
626 * have a growth of AVL_BACK
627 */
628 /* if succeeding node is needed, this is it.
629 */
630 break;
631 }
632 if (start >= AVL_END(tree, np)) {
633 if (np->avl_forw) {
634 np = np->avl_forw;
635 continue;
636 }
637 /* if we were to add node with start, would
638 * have a growth of AVL_FORW;
639 */
640 /* we are looking for a succeeding node;
641 * this is nextino.
642 */
643 np = np->avl_nextino;
644 break;
645 }
646 /* AVL_START(tree, np) <= start < AVL_END(tree, np) */
647 break;
648 }
649 if (np) {
650 if (checklen == AVL_INCLUDE_ZEROLEN) {
651 if (end <= AVL_START(tree, np)) {
652 /* something follows start, but is
653 * is entierly after the range (end)
654 */
655 return(NULL);
656 }
657 /* np may stradle [start, end) */
658 return(np);
659 }
660 /*
661 * find non-zero length region
662 */
663 while (np && (AVL_END(tree, np) - AVL_START(tree, np) == 0)
664 && (AVL_START(tree, np) < end))
665 np = np->avl_nextino;
666
667 if ((np == NULL) || (AVL_START(tree, np) >= end))
668 return NULL;
669 return(np);
670 }
671 /*
672 * nothing succeeds start, all existing ranges are before start.
673 */
674 return NULL;
675 }
676
677
678 /*
679 * Returns a pointer to range which contains value.
680 */
681 avl64node_t *
682 avl64_findrange(
683 register avl64tree_desc_t *tree,
684 register __uint64_t value)
685 {
686 register avl64node_t *np = tree->avl_root;
687
688 while (np) {
689 if (value < AVL_START(tree, np)) {
690 np = np->avl_back;
691 continue;
692 }
693 if (value >= AVL_END(tree, np)) {
694 np = np->avl_forw;
695 continue;
696 }
697 ASSERT(AVL_START(tree, np) <= value &&
698 value < AVL_END(tree, np));
699 return np;
700 }
701 return NULL;
702 }
703
704
705 /*
706 * Returns a pointer to node which contains exact value.
707 */
708 avl64node_t *
709 avl64_find(
710 register avl64tree_desc_t *tree,
711 register __uint64_t value)
712 {
713 register avl64node_t *np = tree->avl_root;
714 register __uint64_t nvalue;
715
716 while (np) {
717 nvalue = AVL_START(tree, np);
718 if (value < nvalue) {
719 np = np->avl_back;
720 continue;
721 }
722 if (value == nvalue) {
723 return np;
724 }
725 np = np->avl_forw;
726 }
727 return NULL;
728 }
729
730
731 /*
732 * Balance buffer AVL tree after attaching a new node to root.
733 * Called only by avl_insert.
734 */
735 static void
736 avl64_balance(
737 register avl64node_t **rootp,
738 register avl64node_t *np,
739 register int growth)
740 {
741 /*
742 * At this point, np points to the node to which
743 * a new node has been attached. All that remains is to
744 * propagate avl_balance up the tree.
745 */
746 for ( ; ; ) {
747 register avl64node_t *parent = np->avl_parent;
748 register avl64node_t *child;
749
750 CERT(growth == AVL_BACK || growth == AVL_FORW);
751
752 /*
753 * If the buffer was already balanced, set avl_balance
754 * to the new direction. Continue if there is a
755 * parent after setting growth to reflect np's
756 * relation to its parent.
757 */
758 if (np->avl_balance == AVL_BALANCE) {
759 np->avl_balance = growth;
760 if (parent) {
761 if (parent->avl_forw == np)
762 growth = AVL_FORW;
763 else {
764 ASSERT(parent->avl_back == np);
765 growth = AVL_BACK;
766 }
767
768 np = parent;
769 continue;
770 }
771 break;
772 }
773
774 if (growth != np->avl_balance) {
775 /*
776 * Subtree is now balanced -- no net effect
777 * in the size of the subtree, so leave.
778 */
779 np->avl_balance = AVL_BALANCE;
780 break;
781 }
782
783 if (growth == AVL_BACK) {
784
785 child = np->avl_back;
786 CERT(np->avl_balance == AVL_BACK && child);
787
788 if (child->avl_balance == AVL_BACK) { /* single LL */
789 /*
790 * ``A'' just got inserted;
791 * np points to ``E'', child to ``C'',
792 * and it is already AVL_BACK --
793 * child will get promoted to top of subtree.
794
795 np-> -E C
796 / \ / \
797 child-> -C F -B E
798 / \ / / \
799 -B D A D F
800 /
801 A
802
803 Note that child->avl_parent and
804 avl_balance get set in common code.
805 */
806 np->avl_parent = child;
807 np->avl_balance = AVL_BALANCE;
808 np->avl_back = child->avl_forw;
809 if (child->avl_forw)
810 child->avl_forw->avl_parent = np;
811 child->avl_forw = np;
812 } else {
813 /*
814 * double LR
815 *
816 * child's avl_forw node gets promoted to
817 * the top of the subtree.
818
819 np-> -E C
820 / \ / \
821 child-> +B F -B E
822 / \ / / \
823 A +C A D F
824 \
825 D
826
827 */
828 register avl64node_t *tmp = child->avl_forw;
829
830 CERT(child->avl_balance == AVL_FORW && tmp);
831
832 child->avl_forw = tmp->avl_back;
833 if (tmp->avl_back)
834 tmp->avl_back->avl_parent = child;
835
836 tmp->avl_back = child;
837 child->avl_parent = tmp;
838
839 np->avl_back = tmp->avl_forw;
840 if (tmp->avl_forw)
841 tmp->avl_forw->avl_parent = np;
842
843 tmp->avl_forw = np;
844 np->avl_parent = tmp;
845
846 if (tmp->avl_balance == AVL_BACK)
847 np->avl_balance = AVL_FORW;
848 else
849 np->avl_balance = AVL_BALANCE;
850
851 if (tmp->avl_balance == AVL_FORW)
852 child->avl_balance = AVL_BACK;
853 else
854 child->avl_balance = AVL_BALANCE;
855
856 /*
857 * Set child to point to tmp since it is
858 * now the top of the subtree, and will
859 * get attached to the subtree parent in
860 * the common code below.
861 */
862 child = tmp;
863 }
864
865 } else /* growth == AVL_BACK */ {
866
867 /*
868 * This code is the mirror image of AVL_FORW above.
869 */
870
871 child = np->avl_forw;
872 CERT(np->avl_balance == AVL_FORW && child);
873
874 if (child->avl_balance == AVL_FORW) { /* single RR */
875 np->avl_parent = child;
876 np->avl_balance = AVL_BALANCE;
877 np->avl_forw = child->avl_back;
878 if (child->avl_back)
879 child->avl_back->avl_parent = np;
880 child->avl_back = np;
881 } else {
882 /*
883 * double RL
884 */
885 register avl64node_t *tmp = child->avl_back;
886
887 ASSERT(child->avl_balance == AVL_BACK && tmp);
888
889 child->avl_back = tmp->avl_forw;
890 if (tmp->avl_forw)
891 tmp->avl_forw->avl_parent = child;
892
893 tmp->avl_forw = child;
894 child->avl_parent = tmp;
895
896 np->avl_forw = tmp->avl_back;
897 if (tmp->avl_back)
898 tmp->avl_back->avl_parent = np;
899
900 tmp->avl_back = np;
901 np->avl_parent = tmp;
902
903 if (tmp->avl_balance == AVL_FORW)
904 np->avl_balance = AVL_BACK;
905 else
906 np->avl_balance = AVL_BALANCE;
907
908 if (tmp->avl_balance == AVL_BACK)
909 child->avl_balance = AVL_FORW;
910 else
911 child->avl_balance = AVL_BALANCE;
912
913 child = tmp;
914 }
915 }
916
917 child->avl_parent = parent;
918 child->avl_balance = AVL_BALANCE;
919
920 if (parent) {
921 if (parent->avl_back == np)
922 parent->avl_back = child;
923 else
924 parent->avl_forw = child;
925 } else {
926 ASSERT(*rootp == np);
927 *rootp = child;
928 }
929
930 break;
931 }
932 }
933
934 static
935 avl64node_t *
936 avl64_insert_find_growth(
937 register avl64tree_desc_t *tree,
938 register __uint64_t start, /* range start at start, */
939 register __uint64_t end, /* exclusive */
940 register int *growthp) /* OUT */
941 {
942 avl64node_t *root = tree->avl_root;
943 register avl64node_t *np;
944
945 np = root;
946 ASSERT(np); /* caller ensures that there is atleast one node in tree */
947
948 for ( ; ; ) {
949 CERT(np->avl_parent || root == np);
950 CERT(!np->avl_parent || root != np);
951 CERT(!(np->avl_back) || np->avl_back->avl_parent == np);
952 CERT(!(np->avl_forw) || np->avl_forw->avl_parent == np);
953 CERT(np->avl_balance != AVL_FORW || np->avl_forw);
954 CERT(np->avl_balance != AVL_BACK || np->avl_back);
955 CERT(np->avl_balance != AVL_BALANCE ||
956 np->avl_back == NULL || np->avl_forw);
957 CERT(np->avl_balance != AVL_BALANCE ||
958 np->avl_forw == NULL || np->avl_back);
959
960 if (AVL_START(tree, np) >= end) {
961 if (np->avl_back) {
962 np = np->avl_back;
963 continue;
964 }
965 *growthp = AVL_BACK;
966 break;
967 }
968
969 if (AVL_END(tree, np) <= start) {
970 if (np->avl_forw) {
971 np = np->avl_forw;
972 continue;
973 }
974 *growthp = AVL_FORW;
975 break;
976 }
977 /* found exact match -- let caller decide if it is an error */
978 return(NULL);
979 }
980 return(np);
981 }
982
983
984 static void
985 avl64_insert_grow(
986 register avl64tree_desc_t *tree,
987 register avl64node_t *parent,
988 register avl64node_t *newnode,
989 register int growth)
990 {
991 register avl64node_t *nnext;
992 register __uint64_t start = AVL_START(tree, newnode);
993
994 if (growth == AVL_BACK) {
995
996 parent->avl_back = newnode;
997 /*
998 * we are growing to the left; previous in-order to newnode is
999 * closest ancestor with lesser value. Before this
1000 * insertion, this ancestor will be pointing to
1001 * newnode's parent. After insertion, next in-order to newnode
1002 * is the parent.
1003 */
1004 newnode->avl_nextino = parent;
1005 nnext = parent;
1006 while (nnext) {
1007 if (AVL_END(tree, nnext) <= start)
1008 break;
1009 nnext = nnext->avl_parent;
1010 }
1011 if (nnext) {
1012 /*
1013 * nnext will be null if newnode is
1014 * the least element, and hence very first in the list.
1015 */
1016 ASSERT(nnext->avl_nextino == parent);
1017 nnext->avl_nextino = newnode;
1018 }
1019 }
1020 else {
1021 parent->avl_forw = newnode;
1022 newnode->avl_nextino = parent->avl_nextino;
1023 parent->avl_nextino = newnode;
1024 }
1025 }
1026
1027
1028 avl64node_t *
1029 avl64_insert(
1030 register avl64tree_desc_t *tree,
1031 register avl64node_t *newnode)
1032 {
1033 register avl64node_t *np;
1034 register __uint64_t start = AVL_START(tree, newnode);
1035 register __uint64_t end = AVL_END(tree, newnode);
1036 int growth;
1037
1038 ASSERT(newnode);
1039 /*
1040 * Clean all pointers for sanity; some will be reset as necessary.
1041 */
1042 newnode->avl_nextino = NULL;
1043 newnode->avl_parent = NULL;
1044 newnode->avl_forw = NULL;
1045 newnode->avl_back = NULL;
1046 newnode->avl_balance = AVL_BALANCE;
1047
1048 if ((np = tree->avl_root) == NULL) { /* degenerate case... */
1049 tree->avl_root = newnode;
1050 tree->avl_firstino = newnode;
1051 return newnode;
1052 }
1053
1054 if ((np = avl64_insert_find_growth(tree, start, end, &growth))
1055 == NULL) {
1056 if (start != end) { /* non-zero length range */
1057 #ifdef AVL_USER_MODE
1058 printf("avl_insert: Warning! duplicate range [0x%llx,0x%llx)\n",
1059 (unsigned long long)start, (unsigned long long)end);
1060 #else
1061 cmn_err(CE_CONT,
1062 "!avl_insert: Warning! duplicate range [0x%llx,0x%llx)\n",
1063 start, end);
1064 #endif
1065 }
1066 return(NULL);
1067 }
1068
1069 avl64_insert_grow(tree, np, newnode, growth);
1070 if (growth == AVL_BACK) {
1071 /*
1072 * Growing to left. if np was firstino, newnode will be firstino
1073 */
1074 if (tree->avl_firstino == np)
1075 tree->avl_firstino = newnode;
1076 }
1077 #ifdef notneeded
1078 else
1079 if (growth == AVL_FORW)
1080 /*
1081 * Cannot possibly be firstino; there is somebody to our left.
1082 */
1083 ;
1084 #endif
1085
1086 newnode->avl_parent = np;
1087 CERT(np->avl_forw == newnode || np->avl_back == newnode);
1088
1089 avl64_balance(&tree->avl_root, np, growth);
1090
1091 avl64_checktree(tree, tree->avl_root);
1092
1093 return newnode;
1094 }
1095
1096 /*
1097 *
1098 * avl64_insert_immediate(tree, afterp, newnode):
1099 * insert newnode immediately into tree immediately after afterp.
1100 * after insertion, newnode is right child of afterp.
1101 */
1102 void
1103 avl64_insert_immediate(
1104 avl64tree_desc_t *tree,
1105 avl64node_t *afterp,
1106 avl64node_t *newnode)
1107 {
1108 /*
1109 * Clean all pointers for sanity; some will be reset as necessary.
1110 */
1111 newnode->avl_nextino = NULL;
1112 newnode->avl_parent = NULL;
1113 newnode->avl_forw = NULL;
1114 newnode->avl_back = NULL;
1115 newnode->avl_balance = AVL_BALANCE;
1116
1117 if (afterp == NULL) {
1118 tree->avl_root = newnode;
1119 tree->avl_firstino = newnode;
1120 return;
1121 }
1122
1123 ASSERT(afterp->avl_forw == NULL);
1124 avl64_insert_grow(tree, afterp, newnode, AVL_FORW); /* grow to right */
1125 CERT(afterp->avl_forw == newnode);
1126 avl64_balance(&tree->avl_root, afterp, AVL_FORW);
1127 avl64_checktree(tree, tree->avl_root);
1128 }
1129
1130
1131 /*
1132 * Returns first in order node
1133 */
1134 avl64node_t *
1135 avl64_firstino(register avl64node_t *root)
1136 {
1137 register avl64node_t *np;
1138
1139 if ((np = root) == NULL)
1140 return NULL;
1141
1142 while (np->avl_back)
1143 np = np->avl_back;
1144 return np;
1145 }
1146
1147 #ifdef AVL_USER_MODE
1148 /*
1149 * leave this as a user-mode only routine until someone actually
1150 * needs it in the kernel
1151 */
1152
1153 /*
1154 * Returns last in order node
1155 */
1156 avl64node_t *
1157 avl64_lastino(register avl64node_t *root)
1158 {
1159 register avl64node_t *np;
1160
1161 if ((np = root) == NULL)
1162 return NULL;
1163
1164 while (np->avl_forw)
1165 np = np->avl_forw;
1166 return np;
1167 }
1168 #endif
1169
1170 void
1171 avl64_init_tree(avl64tree_desc_t *tree, avl64ops_t *ops)
1172 {
1173 tree->avl_root = NULL;
1174 tree->avl_firstino = NULL;
1175 tree->avl_ops = ops;
1176 }
1177
1178 #ifdef AVL_DEBUG
1179 static void
1180 avl64_printnode(avl64tree_desc_t *tree, avl64node_t *np, int nl)
1181 {
1182 printf("[%d-%d]%c", AVL_START(tree, np),
1183 (AVL_END(tree, np) - 1), nl ? '\n' : ' ');
1184 }
1185 #endif
1186 #ifdef STAND_ALONE_DEBUG
1187
1188 struct avl_debug_node {
1189 avl64node_t avl_node;
1190 xfs_off_t avl_start;
1191 unsigned int avl_size;
1192 }
1193
1194 avl64ops_t avl_debug_ops = {
1195 avl_debug_start,
1196 avl_debug_end,
1197 }
1198
1199 static __uint64_t
1200 avl64_debug_start(avl64node_t *node)
1201 {
1202 return (__uint64_t)(struct avl_debug_node *)node->avl_start;
1203 }
1204
1205 static __uint64_t
1206 avl64_debug_end(avl64node_t *node)
1207 {
1208 return (__uint64_t)
1209 ((struct avl_debug_node *)node->avl_start +
1210 (struct avl_debug_node *)node->avl_size);
1211 }
1212
1213 avl_debug_node freenodes[100];
1214 avl_debug_node *freehead = &freenodes[0];
1215
1216 static avl64node_t *
1217 alloc_avl64_debug_node()
1218 {
1219 freehead->avl_balance = AVL_BALANCE;
1220 freehead->avl_parent = freehead->avl_forw = freehead->avl_back = NULL;
1221 return(freehead++);
1222 }
1223
1224 static void
1225 avl64_print(avl64tree_desc_t *tree, avl64node_t *root, int depth)
1226 {
1227 int i;
1228
1229 if (!root)
1230 return;
1231 if (root->avl_forw)
1232 avl64_print(tree, root->avl_forw, depth+5);
1233 for (i = 0; i < depth; i++)
1234 putchar((int) ' ');
1235 avl64_printnode(tree, root,1);
1236 if (root->avl_back)
1237 avl64_print(tree, root->avl_back, depth+5);
1238 }
1239
1240 main()
1241 {
1242 int i, j;
1243 avl64node_t *np;
1244 avl64tree_desc_t tree;
1245 char linebuf[256], cmd[256];
1246
1247 avl64_init_tree(&tree, &avl_debug_ops);
1248
1249 for (i = 100; i > 0; i = i - 10)
1250 {
1251 np = alloc__debug_avlnode();
1252 ASSERT(np);
1253 np->avl_start = i;
1254 np->avl_size = 10;
1255 avl64_insert(&tree, np);
1256 }
1257 avl64_print(&tree, tree.avl_root, 0);
1258
1259 for (np = tree.avl_firstino; np != NULL; np = np->avl_nextino)
1260 avl64_printnode(&tree, np, 0);
1261 printf("\n");
1262
1263 while (1) {
1264 printf("Command [fpdir] : ");
1265 fgets(linebuf, 256, stdin);
1266 if (feof(stdin)) break;
1267 cmd[0] = NULL;
1268 if (sscanf(linebuf, "%[fpdir]%d", cmd, &i) != 2)
1269 continue;
1270 switch (cmd[0]) {
1271 case 'd':
1272 case 'f':
1273 printf("end of range ? ");
1274 fgets(linebuf, 256, stdin);
1275 j = atoi(linebuf);
1276
1277 if (i == j) j = i+1;
1278 np = avl64_findinrange(&tree,i,j);
1279 if (np) {
1280 avl64_printnode(&tree, np, 1);
1281 if (cmd[0] == 'd')
1282 avl64_delete(&tree, np);
1283 } else
1284 printf("Cannot find %d\n", i);
1285 break;
1286 case 'p':
1287 avl64_print(&tree, tree.avl_root, 0);
1288 for (np = tree.avl_firstino;
1289 np != NULL; np = np->avl_nextino)
1290 avl64_printnode(&tree, np, 0);
1291 printf("\n");
1292 break;
1293 case 'i':
1294 np = alloc_avlnode();
1295 ASSERT(np);
1296 np->avl_start = i;
1297 printf("size of range ? ");
1298 fgets(linebuf, 256, stdin);
1299 j = atoi(linebuf);
1300
1301 np->avl_size = j;
1302 avl64_insert(&tree, np);
1303 break;
1304 case 'r': {
1305 avl64node_t *b, *e, *t;
1306 int checklen;
1307
1308 printf("End of range ? ");
1309 fgets(linebuf, 256, stdin);
1310 j = atoi(linebuf);
1311
1312 printf("checklen 0/1 ? ");
1313 fgets(linebuf, 256, stdin);
1314 checklen = atoi(linebuf);
1315
1316
1317 b = avl64_findanyrange(&tree, i, j, checklen);
1318 if (b) {
1319 printf("Found something\n");
1320 t = b;
1321 while (t) {
1322 if (t != b &&
1323 AVL_START(&tree, t) >= j)
1324 break;
1325 avl64_printnode(&tree, t, 0);
1326 t = t->avl_nextino;
1327 }
1328 printf("\n");
1329 }
1330 }
1331 }
1332 }
1333 }
1334 #endif
1335
1336 /*
1337 * Given a tree, find value; will find return range enclosing value,
1338 * or range immediately succeeding value,
1339 * or range immediately preceeding value.
1340 */
1341 avl64node_t *
1342 avl64_findadjacent(
1343 register avl64tree_desc_t *tree,
1344 register __uint64_t value,
1345 register int dir)
1346 {
1347 register avl64node_t *np = tree->avl_root;
1348
1349 while (np) {
1350 if (value < AVL_START(tree, np)) {
1351 if (np->avl_back) {
1352 np = np->avl_back;
1353 continue;
1354 }
1355 /* if we were to add node with value, would
1356 * have a growth of AVL_BACK
1357 */
1358 if (dir == AVL_SUCCEED) {
1359 /* if succeeding node is needed, this is it.
1360 */
1361 return(np);
1362 }
1363 if (dir == AVL_PRECEED) {
1364 /*
1365 * find nearest ancestor with lesser value.
1366 */
1367 np = np->avl_parent;
1368 while (np) {
1369 if (AVL_END(tree, np) <= value)
1370 break;
1371 np = np->avl_parent;
1372 }
1373 return(np);
1374 }
1375 ASSERT(dir == AVL_SUCCEED || dir == AVL_PRECEED);
1376 break;
1377 }
1378 if (value >= AVL_END(tree, np)) {
1379 if (np->avl_forw) {
1380 np = np->avl_forw;
1381 continue;
1382 }
1383 /* if we were to add node with value, would
1384 * have a growth of AVL_FORW;
1385 */
1386 if (dir == AVL_SUCCEED) {
1387 /* we are looking for a succeeding node;
1388 * this is nextino.
1389 */
1390 return(np->avl_nextino);
1391 }
1392 if (dir == AVL_PRECEED) {
1393 /* looking for a preceeding node; this is it. */
1394 return(np);
1395 }
1396 ASSERT(dir == AVL_SUCCEED || dir == AVL_PRECEED);
1397 }
1398 /* AVL_START(tree, np) <= value < AVL_END(tree, np) */
1399 return(np);
1400 }
1401 return NULL;
1402 }
1403
1404
1405 #ifdef AVL_FUTURE_ENHANCEMENTS
1406 /*
1407 * avl_findranges:
1408 *
1409 * Given range r [start, end), find all ranges in tree which are contained
1410 * in r. At return, startp and endp point to first and last of
1411 * a chain of elements which describe the contained ranges. Elements
1412 * in startp ... endp are in sort order, and can be accessed by
1413 * using avl_nextino.
1414 */
1415
1416 void
1417 avl64_findranges(
1418 register avl64tree_desc_t *tree,
1419 register __uint64_t start,
1420 register __uint64_t end,
1421 avl64node_t **startp,
1422 avl64node_t **endp)
1423 {
1424 register avl64node_t *np;
1425
1426 np = avl64_findadjacent(tree, start, AVL_SUCCEED);
1427 if (np == NULL /* nothing succeding start */
1428 || (np && (end <= AVL_START(tree, np))))
1429 /* something follows start,
1430 but... is entirely after end */
1431 {
1432 *startp = NULL;
1433 *endp = NULL;
1434 return;
1435 }
1436
1437 *startp = np;
1438
1439 /* see if end is in this region itself */
1440 if (end <= AVL_END(tree, np) ||
1441 np->avl_nextino == NULL ||
1442 (np->avl_nextino &&
1443 (end <= AVL_START(tree, np->avl_nextino)))) {
1444 *endp = np;
1445 return;
1446 }
1447 /* have to munge for end */
1448 /*
1449 * note: have to look for (end - 1), since
1450 * findadjacent will look for exact value, and does not
1451 * care about the fact that end is actually one more
1452 * than the value actually being looked for; thus feed it one less.
1453 */
1454 *endp = avl64_findadjacent(tree, (end-1), AVL_PRECEED);
1455 ASSERT(*endp);
1456 }
1457
1458 #endif /* AVL_FUTURE_ENHANCEMENTS */