]> git.ipfire.org Git - thirdparty/xfsprogs-dev.git/blob - repair/incore_ext.c
Revert "Merge branch 'xfsprogs-dev'"
[thirdparty/xfsprogs-dev.git] / repair / incore_ext.c
1 /*
2 * Copyright (c) 2000-2001,2005 Silicon Graphics, Inc.
3 * All Rights Reserved.
4 *
5 * This program is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU General Public License as
7 * published by the Free Software Foundation.
8 *
9 * This program is distributed in the hope that it would be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
13 *
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, write the Free Software Foundation,
16 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19 #include <libxfs.h>
20 #include "avl.h"
21 #include "globals.h"
22 #include "incore.h"
23 #include "agheader.h"
24 #include "protos.h"
25 #include "err_protos.h"
26 #include "avl64.h"
27 #include "threads.h"
28 #define ALLOC_NUM_EXTS 100
29
30 /*
31 * paranoia -- account for any weird padding, 64/32-bit alignment, etc.
32 */
33 typedef struct extent_alloc_rec {
34 ba_rec_t alloc_rec;
35 extent_tree_node_t extents[ALLOC_NUM_EXTS];
36 } extent_alloc_rec_t;
37
38 typedef struct rt_extent_alloc_rec {
39 ba_rec_t alloc_rec;
40 rt_extent_tree_node_t extents[ALLOC_NUM_EXTS];
41 } rt_extent_alloc_rec_t;
42
43 /*
44 * note: there are 4 sets of incore things handled here:
45 * block bitmaps, extent trees, uncertain inode list,
46 * and inode tree. The tree-based code uses the AVL
47 * tree package used by the IRIX kernel VM code
48 * (sys/avl.h). The inode list code uses the same records
49 * as the inode tree code for convenience. The bitmaps
50 * and bitmap operators are mostly macros defined in incore.h.
51 * There are one of everything per AG except for extent
52 * trees. There's one duplicate extent tree, one bno and
53 * one bcnt extent tree per AG. Not all of the above exist
54 * through all phases. The duplicate extent tree gets trashed
55 * at the end of phase 4. The bno/bcnt trees don't appear until
56 * phase 5. The uncertain inode list goes away at the end of
57 * phase 3. The inode tree and bno/bnct trees go away after phase 5.
58 */
59 typedef struct ext_flist_s {
60 extent_tree_node_t *list;
61 int cnt;
62 } ext_flist_t;
63
64 static ext_flist_t ext_flist;
65
66 typedef struct rt_ext_flist_s {
67 rt_extent_tree_node_t *list;
68 int cnt;
69 } rt_ext_flist_t;
70
71 static rt_ext_flist_t rt_ext_flist;
72
73 static avl64tree_desc_t *rt_ext_tree_ptr; /* dup extent tree for rt */
74
75 avltree_desc_t **extent_tree_ptrs; /* array of extent tree ptrs */
76 /* one per ag for dups */
77 static avltree_desc_t **extent_bno_ptrs; /*
78 * array of extent tree ptrs
79 * one per ag for free extents
80 * sorted by starting block
81 * number
82 */
83 static avltree_desc_t **extent_bcnt_ptrs; /*
84 * array of extent tree ptrs
85 * one per ag for free extents
86 * sorted by size
87 */
88
89 /*
90 * list of allocated "blocks" for easy freeing later
91 */
92 static ba_rec_t *ba_list;
93 static ba_rec_t *rt_ba_list;
94
95 /*
96 * locks.
97 */
98 static pthread_mutex_t ext_flist_lock;
99 static pthread_mutex_t rt_ext_tree_lock;
100 static pthread_mutex_t rt_ext_flist_lock;
101
102 /*
103 * extent tree stuff is avl trees of duplicate extents,
104 * sorted in order by block number. there is one tree per ag.
105 */
106
107 static extent_tree_node_t *
108 mk_extent_tree_nodes(xfs_agblock_t new_startblock,
109 xfs_extlen_t new_blockcount, extent_state_t new_state)
110 {
111 int i;
112 extent_tree_node_t *new;
113 extent_alloc_rec_t *rec;
114
115 pthread_mutex_lock(&ext_flist_lock);
116 if (ext_flist.cnt == 0) {
117 ASSERT(ext_flist.list == NULL);
118
119 if ((rec = malloc(sizeof(extent_alloc_rec_t))) == NULL)
120 do_error(
121 _("couldn't allocate new extent descriptors.\n"));
122
123 record_allocation(&rec->alloc_rec, ba_list);
124
125 new = &rec->extents[0];
126
127 for (i = 0; i < ALLOC_NUM_EXTS; i++) {
128 new->avl_node.avl_nextino = (avlnode_t *)
129 ext_flist.list;
130 ext_flist.list = new;
131 ext_flist.cnt++;
132 new++;
133 }
134 }
135
136 ASSERT(ext_flist.list != NULL);
137
138 new = ext_flist.list;
139 ext_flist.list = (extent_tree_node_t *) new->avl_node.avl_nextino;
140 ext_flist.cnt--;
141 new->avl_node.avl_nextino = NULL;
142 pthread_mutex_unlock(&ext_flist_lock);
143
144 /* initialize node */
145
146 new->ex_startblock = new_startblock;
147 new->ex_blockcount = new_blockcount;
148 new->ex_state = new_state;
149 new->next = NULL;
150 new->last = NULL;
151
152 return(new);
153 }
154
155 void
156 release_extent_tree_node(extent_tree_node_t *node)
157 {
158 pthread_mutex_lock(&ext_flist_lock);
159 node->avl_node.avl_nextino = (avlnode_t *) ext_flist.list;
160 ext_flist.list = node;
161 ext_flist.cnt++;
162 pthread_mutex_unlock(&ext_flist_lock);
163
164 return;
165 }
166
167 /*
168 * routines to recycle all nodes in a tree. it walks the tree
169 * and puts all nodes back on the free list so the nodes can be
170 * reused. the duplicate and bno/bcnt extent trees for each AG
171 * are recycled after they're no longer needed to save memory
172 */
173 void
174 release_extent_tree(avltree_desc_t *tree)
175 {
176 extent_tree_node_t *ext;
177 extent_tree_node_t *tmp;
178 extent_tree_node_t *lext;
179 extent_tree_node_t *ltmp;
180
181 if (tree->avl_firstino == NULL)
182 return;
183
184 ext = (extent_tree_node_t *) tree->avl_firstino;
185
186 while (ext != NULL) {
187 tmp = (extent_tree_node_t *) ext->avl_node.avl_nextino;
188
189 /*
190 * ext->next is guaranteed to be set only in bcnt trees
191 */
192 if (ext->next != NULL) {
193 lext = ext->next;
194 while (lext != NULL) {
195 ltmp = lext->next;
196 release_extent_tree_node(lext);
197 lext = ltmp;
198 }
199 }
200
201 release_extent_tree_node(ext);
202 ext = tmp;
203 }
204
205 tree->avl_root = tree->avl_firstino = NULL;
206
207 return;
208 }
209
210 /*
211 * top-level (visible) routines
212 */
213 void
214 release_dup_extent_tree(xfs_agnumber_t agno)
215 {
216 release_extent_tree(extent_tree_ptrs[agno]);
217
218 return;
219 }
220
221 void
222 release_agbno_extent_tree(xfs_agnumber_t agno)
223 {
224 release_extent_tree(extent_bno_ptrs[agno]);
225
226 return;
227 }
228
229 void
230 release_agbcnt_extent_tree(xfs_agnumber_t agno)
231 {
232 release_extent_tree(extent_bcnt_ptrs[agno]);
233
234 return;
235 }
236
237 /*
238 * the next 4 routines manage the trees of free extents -- 2 trees
239 * per AG. The first tree is sorted by block number. The second
240 * tree is sorted by extent size. This is the bno tree.
241 */
242 void
243 add_bno_extent(xfs_agnumber_t agno, xfs_agblock_t startblock,
244 xfs_extlen_t blockcount)
245 {
246 extent_tree_node_t *ext;
247
248 ASSERT(extent_bno_ptrs != NULL);
249 ASSERT(extent_bno_ptrs[agno] != NULL);
250
251 ext = mk_extent_tree_nodes(startblock, blockcount, XR_E_FREE);
252
253 if (avl_insert(extent_bno_ptrs[agno], (avlnode_t *) ext) == NULL) {
254 do_error(_("duplicate bno extent range\n"));
255 }
256 }
257
258 extent_tree_node_t *
259 findfirst_bno_extent(xfs_agnumber_t agno)
260 {
261 ASSERT(extent_bno_ptrs != NULL);
262 ASSERT(extent_bno_ptrs[agno] != NULL);
263
264 return((extent_tree_node_t *) extent_bno_ptrs[agno]->avl_firstino);
265 }
266
267 extent_tree_node_t *
268 find_bno_extent(xfs_agnumber_t agno, xfs_agblock_t startblock)
269 {
270 ASSERT(extent_bno_ptrs != NULL);
271 ASSERT(extent_bno_ptrs[agno] != NULL);
272
273 return((extent_tree_node_t *) avl_find(extent_bno_ptrs[agno],
274 startblock));
275 }
276
277 /*
278 * delete a node that's in the tree (pointer obtained by a find routine)
279 */
280 void
281 get_bno_extent(xfs_agnumber_t agno, extent_tree_node_t *ext)
282 {
283 ASSERT(extent_bno_ptrs != NULL);
284 ASSERT(extent_bno_ptrs[agno] != NULL);
285
286 avl_delete(extent_bno_ptrs[agno], &ext->avl_node);
287
288 return;
289 }
290
291 /*
292 * normalizing constant for bcnt size -> address conversion (see avl ops)
293 * used by the AVL tree code to convert sizes and must be used when
294 * doing an AVL search in the tree (e.g. avl_findrange(s))
295 */
296 #define MAXBCNT 0xFFFFFFFF
297 #define BCNT_ADDR(cnt) ((unsigned int) MAXBCNT - (cnt))
298
299 /*
300 * the next 4 routines manage the trees of free extents -- 2 trees
301 * per AG. The first tree is sorted by block number. The second
302 * tree is sorted by extent size. This is the bcnt tree.
303 */
304 void
305 add_bcnt_extent(xfs_agnumber_t agno, xfs_agblock_t startblock,
306 xfs_extlen_t blockcount)
307 {
308 extent_tree_node_t *ext, *prev, *current, *top;
309 xfs_agblock_t tmp_startblock;
310 xfs_extlen_t tmp_blockcount;
311 extent_state_t tmp_state;
312
313 ASSERT(extent_bcnt_ptrs != NULL);
314 ASSERT(extent_bcnt_ptrs[agno] != NULL);
315
316 ext = mk_extent_tree_nodes(startblock, blockcount, XR_E_FREE);
317
318 ASSERT(ext->next == NULL);
319
320 #ifdef XR_BCNT_TRACE
321 fprintf(stderr, "adding bcnt: agno = %d, start = %u, count = %u\n",
322 agno, startblock, blockcount);
323 #endif
324 if ((current = (extent_tree_node_t *) avl_find(extent_bcnt_ptrs[agno],
325 blockcount)) != NULL) {
326 /*
327 * avl tree code doesn't handle dups so insert
328 * onto linked list in increasing startblock order
329 *
330 * when called from mk_incore_fstree,
331 * startblock is in increasing order.
332 * current is an "anchor" node.
333 * quick check if the new ext goes to the end.
334 * if so, append at the end, using the last field
335 * of the "anchor".
336 */
337 ASSERT(current->last != NULL);
338 if (startblock > current->last->ex_startblock) {
339 current->last->next = ext;
340 current->last = ext;
341 return;
342 }
343
344 /*
345 * scan, to find the proper location for new entry.
346 * this scan is *very* expensive and gets worse with
347 * with increasing entries.
348 */
349 top = prev = current;
350 while (current != NULL &&
351 startblock > current->ex_startblock) {
352 prev = current;
353 current = current->next;
354 }
355
356 if (top == current) {
357 ASSERT(top == prev);
358 /*
359 * new entry should be ahead of current.
360 * to keep the avl tree intact,
361 * swap the values of to-be-inserted element
362 * and the values of the head of the list.
363 * then insert as the 2nd element on the list.
364 *
365 * see the comment in get_bcnt_extent()
366 * as to why we have to do this.
367 */
368 tmp_startblock = top->ex_startblock;
369 tmp_blockcount = top->ex_blockcount;
370 tmp_state = top->ex_state;
371
372 top->ex_startblock = ext->ex_startblock;
373 top->ex_blockcount = ext->ex_blockcount;
374 top->ex_state = ext->ex_state;
375
376 ext->ex_startblock = tmp_startblock;
377 ext->ex_blockcount = tmp_blockcount;
378 ext->ex_state = tmp_state;
379
380 current = top->next;
381 prev = top;
382 }
383
384 prev->next = ext;
385 ext->next = current;
386
387 return;
388 }
389
390 if (avl_insert(extent_bcnt_ptrs[agno], (avlnode_t *) ext) == NULL) {
391 do_error(_(": duplicate bno extent range\n"));
392 }
393
394 ext->last = ext; /* ext is an "anchor" node */
395
396 return;
397 }
398
399 extent_tree_node_t *
400 findfirst_bcnt_extent(xfs_agnumber_t agno)
401 {
402 ASSERT(extent_bcnt_ptrs != NULL);
403 ASSERT(extent_bcnt_ptrs[agno] != NULL);
404
405 return((extent_tree_node_t *) extent_bcnt_ptrs[agno]->avl_firstino);
406 }
407
408 extent_tree_node_t *
409 findbiggest_bcnt_extent(xfs_agnumber_t agno)
410 {
411 extern avlnode_t *avl_lastino(avlnode_t *root);
412
413 ASSERT(extent_bcnt_ptrs != NULL);
414 ASSERT(extent_bcnt_ptrs[agno] != NULL);
415
416 return((extent_tree_node_t *) avl_lastino(extent_bcnt_ptrs[agno]->avl_root));
417 }
418
419 extent_tree_node_t *
420 findnext_bcnt_extent(xfs_agnumber_t agno, extent_tree_node_t *ext)
421 {
422 avlnode_t *nextino;
423
424 if (ext->next != NULL) {
425 ASSERT(ext->ex_blockcount == ext->next->ex_blockcount);
426 ASSERT(ext->ex_startblock < ext->next->ex_startblock);
427 return(ext->next);
428 } else {
429 /*
430 * have to look at the top of the list to get the
431 * correct avl_nextino pointer since that pointer
432 * is maintained and altered by the AVL code.
433 */
434 nextino = avl_find(extent_bcnt_ptrs[agno], ext->ex_blockcount);
435 ASSERT(nextino != NULL);
436 if (nextino->avl_nextino != NULL) {
437 ASSERT(ext->ex_blockcount < ((extent_tree_node_t *)
438 nextino->avl_nextino)->ex_blockcount);
439 }
440 return((extent_tree_node_t *) nextino->avl_nextino);
441 }
442 }
443
444 /*
445 * this is meant to be called after you walk the bno tree to
446 * determine exactly which extent you want (so you'll know the
447 * desired value for startblock when you call this routine).
448 */
449 extent_tree_node_t *
450 get_bcnt_extent(xfs_agnumber_t agno, xfs_agblock_t startblock,
451 xfs_extlen_t blockcount)
452 {
453 extent_tree_node_t *ext, *prev, *top;
454 xfs_agblock_t tmp_startblock;
455 xfs_extlen_t tmp_blockcount;
456 extent_state_t tmp_state;
457
458 prev = NULL;
459 ASSERT(extent_bcnt_ptrs != NULL);
460 ASSERT(extent_bcnt_ptrs[agno] != NULL);
461
462 if ((ext = (extent_tree_node_t *) avl_find(extent_bcnt_ptrs[agno],
463 blockcount)) == NULL)
464 return(NULL);
465
466 top = ext;
467
468 if (ext->next != NULL) {
469 /*
470 * pull it off the list
471 */
472 while (ext != NULL && startblock != ext->ex_startblock) {
473 prev = ext;
474 ext = ext->next;
475 }
476 ASSERT(ext != NULL);
477 if (ext == top) {
478 /*
479 * this node is linked into the tree so we
480 * swap the core values so we can delete
481 * the next item on the list instead of
482 * the head of the list. This is because
483 * the rest of the tree undoubtedly has
484 * pointers to the piece of memory that
485 * is the head of the list so pulling
486 * the item out of the list and hence
487 * the avl tree would be a bad idea.
488 *
489 * (cheaper than the alternative, a tree
490 * delete of this node followed by a tree
491 * insert of the next node on the list).
492 */
493 tmp_startblock = ext->next->ex_startblock;
494 tmp_blockcount = ext->next->ex_blockcount;
495 tmp_state = ext->next->ex_state;
496
497 ext->next->ex_startblock = ext->ex_startblock;
498 ext->next->ex_blockcount = ext->ex_blockcount;
499 ext->next->ex_state = ext->ex_state;
500
501 ext->ex_startblock = tmp_startblock;
502 ext->ex_blockcount = tmp_blockcount;
503 ext->ex_state = tmp_state;
504
505 ext = ext->next;
506 prev = top;
507 }
508 /*
509 * now, a simple list deletion
510 */
511 prev->next = ext->next;
512 ext->next = NULL;
513 } else {
514 /*
515 * no list, just one node. simply delete
516 */
517 avl_delete(extent_bcnt_ptrs[agno], &ext->avl_node);
518 }
519
520 ASSERT(ext->ex_startblock == startblock);
521 ASSERT(ext->ex_blockcount == blockcount);
522 return(ext);
523 }
524
525 /*
526 * the next 2 routines manage the trees of duplicate extents -- 1 tree
527 * per AG
528 */
529 void
530 add_dup_extent(xfs_agnumber_t agno, xfs_agblock_t startblock,
531 xfs_extlen_t blockcount)
532 {
533 extent_tree_node_t *first, *last, *ext, *next_ext;
534 xfs_agblock_t new_startblock;
535 xfs_extlen_t new_blockcount;
536
537 ASSERT(agno < glob_agcount);
538
539 #ifdef XR_DUP_TRACE
540 fprintf(stderr, "Adding dup extent - %d/%d %d\n", agno, startblock, blockcount);
541 #endif
542 avl_findranges(extent_tree_ptrs[agno], startblock - 1,
543 startblock + blockcount + 1,
544 (avlnode_t **) &first, (avlnode_t **) &last);
545 /*
546 * find adjacent and overlapping extent blocks
547 */
548 if (first == NULL && last == NULL) {
549 /* nothing, just make and insert new extent */
550
551 ext = mk_extent_tree_nodes(startblock, blockcount, XR_E_MULT);
552
553 if (avl_insert(extent_tree_ptrs[agno],
554 (avlnode_t *) ext) == NULL) {
555 do_error(_("duplicate extent range\n"));
556 }
557
558 return;
559 }
560
561 ASSERT(first != NULL && last != NULL);
562
563 /*
564 * find the new composite range, delete old extent nodes
565 * as we go
566 */
567 new_startblock = startblock;
568 new_blockcount = blockcount;
569
570 for (ext = first;
571 ext != (extent_tree_node_t *) last->avl_node.avl_nextino;
572 ext = next_ext) {
573 /*
574 * preserve the next inorder node
575 */
576 next_ext = (extent_tree_node_t *) ext->avl_node.avl_nextino;
577 /*
578 * just bail if the new extent is contained within an old one
579 */
580 if (ext->ex_startblock <= startblock &&
581 ext->ex_blockcount >= blockcount)
582 return;
583 /*
584 * now check for overlaps and adjacent extents
585 */
586 if (ext->ex_startblock + ext->ex_blockcount >= startblock
587 || ext->ex_startblock <= startblock + blockcount) {
588
589 if (ext->ex_startblock < new_startblock)
590 new_startblock = ext->ex_startblock;
591
592 if (ext->ex_startblock + ext->ex_blockcount >
593 new_startblock + new_blockcount)
594 new_blockcount = ext->ex_startblock +
595 ext->ex_blockcount -
596 new_startblock;
597
598 avl_delete(extent_tree_ptrs[agno], (avlnode_t *) ext);
599 continue;
600 }
601 }
602
603 ext = mk_extent_tree_nodes(new_startblock, new_blockcount, XR_E_MULT);
604
605 if (avl_insert(extent_tree_ptrs[agno], (avlnode_t *) ext) == NULL) {
606 do_error(_("duplicate extent range\n"));
607 }
608
609 return;
610 }
611
612 static __psunsigned_t
613 avl_ext_start(avlnode_t *node)
614 {
615 return((__psunsigned_t)
616 ((extent_tree_node_t *) node)->ex_startblock);
617 }
618
619 static __psunsigned_t
620 avl_ext_end(avlnode_t *node)
621 {
622 return((__psunsigned_t) (
623 ((extent_tree_node_t *) node)->ex_startblock +
624 ((extent_tree_node_t *) node)->ex_blockcount));
625 }
626
627 /*
628 * convert size to an address for the AVL tree code -- the bigger the size,
629 * the lower the address so the biggest extent will be first in the tree
630 */
631 static __psunsigned_t
632 avl_ext_bcnt_start(avlnode_t *node)
633 {
634 /*
635 return((__psunsigned_t) (BCNT_ADDR(((extent_tree_node_t *)
636 node)->ex_blockcount)));
637 */
638 return((__psunsigned_t) ((extent_tree_node_t *)node)->ex_blockcount);
639 }
640
641 static __psunsigned_t
642 avl_ext_bcnt_end(avlnode_t *node)
643 {
644 /*
645 return((__psunsigned_t) (BCNT_ADDR(((extent_tree_node_t *)
646 node)->ex_blockcount)));
647 */
648 return((__psunsigned_t) ((extent_tree_node_t *)node)->ex_blockcount);
649 }
650
651 avlops_t avl_extent_bcnt_tree_ops = {
652 avl_ext_bcnt_start,
653 avl_ext_bcnt_end
654 };
655
656 avlops_t avl_extent_tree_ops = {
657 avl_ext_start,
658 avl_ext_end
659 };
660
661 /*
662 * for real-time extents -- have to dup code since realtime extent
663 * startblocks can be 64-bit values.
664 */
665 static rt_extent_tree_node_t *
666 mk_rt_extent_tree_nodes(xfs_drtbno_t new_startblock,
667 xfs_extlen_t new_blockcount, extent_state_t new_state)
668 {
669 int i;
670 rt_extent_tree_node_t *new;
671 rt_extent_alloc_rec_t *rec;
672
673 pthread_mutex_lock(&rt_ext_flist_lock);
674 if (rt_ext_flist.cnt == 0) {
675 ASSERT(rt_ext_flist.list == NULL);
676
677 if ((rec = malloc(sizeof(rt_extent_alloc_rec_t))) == NULL)
678 do_error(
679 _("couldn't allocate new extent descriptors.\n"));
680
681 record_allocation(&rec->alloc_rec, rt_ba_list);
682
683 new = &rec->extents[0];
684
685 for (i = 0; i < ALLOC_NUM_EXTS; i++) {
686 new->avl_node.avl_nextino = (avlnode_t *)
687 rt_ext_flist.list;
688 rt_ext_flist.list = new;
689 rt_ext_flist.cnt++;
690 new++;
691 }
692 }
693
694 ASSERT(rt_ext_flist.list != NULL);
695
696 new = rt_ext_flist.list;
697 rt_ext_flist.list = (rt_extent_tree_node_t *) new->avl_node.avl_nextino;
698 rt_ext_flist.cnt--;
699 new->avl_node.avl_nextino = NULL;
700 pthread_mutex_unlock(&rt_ext_flist_lock);
701
702 /* initialize node */
703
704 new->rt_startblock = new_startblock;
705 new->rt_blockcount = new_blockcount;
706 new->rt_state = new_state;
707
708 return(new);
709 }
710
711 #if 0
712 void
713 release_rt_extent_tree_node(rt_extent_tree_node_t *node)
714 {
715 node->avl_node.avl_nextino = (avlnode_t *) rt_ext_flist.list;
716 rt_ext_flist.list = node;
717 rt_ext_flist.cnt++;
718
719 return;
720 }
721
722 void
723 release_rt_extent_tree()
724 {
725 extent_tree_node_t *ext;
726 extent_tree_node_t *tmp;
727 extent_tree_node_t *lext;
728 extent_tree_node_t *ltmp;
729 avl64tree_desc_t *tree;
730
731 tree = rt_extent_tree_ptr;
732
733 if (tree->avl_firstino == NULL)
734 return;
735
736 ext = (extent_tree_node_t *) tree->avl_firstino;
737
738 while (ext != NULL) {
739 tmp = (extent_tree_node_t *) ext->avl_node.avl_nextino;
740 release_rt_extent_tree_node(ext);
741 ext = tmp;
742 }
743
744 tree->avl_root = tree->avl_firstino = NULL;
745
746 return;
747 }
748 #endif
749
750 /*
751 * don't need release functions for realtime tree teardown
752 * since we only have one tree, not one per AG
753 */
754 /* ARGSUSED */
755 void
756 free_rt_dup_extent_tree(xfs_mount_t *mp)
757 {
758 ASSERT(mp->m_sb.sb_rblocks != 0);
759
760 free_allocations(rt_ba_list);
761 free(rt_ext_tree_ptr);
762
763 rt_ba_list = NULL;
764 rt_ext_tree_ptr = NULL;
765
766 return;
767 }
768
769 /*
770 * add a duplicate real-time extent
771 */
772 void
773 add_rt_dup_extent(xfs_drtbno_t startblock, xfs_extlen_t blockcount)
774 {
775 rt_extent_tree_node_t *first, *last, *ext, *next_ext;
776 xfs_drtbno_t new_startblock;
777 xfs_extlen_t new_blockcount;
778
779 pthread_mutex_lock(&rt_ext_tree_lock);
780 avl64_findranges(rt_ext_tree_ptr, startblock - 1,
781 startblock + blockcount + 1,
782 (avl64node_t **) &first, (avl64node_t **) &last);
783 /*
784 * find adjacent and overlapping extent blocks
785 */
786 if (first == NULL && last == NULL) {
787 /* nothing, just make and insert new extent */
788
789 ext = mk_rt_extent_tree_nodes(startblock,
790 blockcount, XR_E_MULT);
791
792 if (avl64_insert(rt_ext_tree_ptr,
793 (avl64node_t *) ext) == NULL) {
794 do_error(_("duplicate extent range\n"));
795 }
796
797 pthread_mutex_unlock(&rt_ext_tree_lock);
798 return;
799 }
800
801 ASSERT(first != NULL && last != NULL);
802
803 /*
804 * find the new composite range, delete old extent nodes
805 * as we go
806 */
807 new_startblock = startblock;
808 new_blockcount = blockcount;
809
810 for (ext = first;
811 ext != (rt_extent_tree_node_t *) last->avl_node.avl_nextino;
812 ext = next_ext) {
813 /*
814 * preserve the next inorder node
815 */
816 next_ext = (rt_extent_tree_node_t *) ext->avl_node.avl_nextino;
817 /*
818 * just bail if the new extent is contained within an old one
819 */
820 if (ext->rt_startblock <= startblock &&
821 ext->rt_blockcount >= blockcount) {
822 pthread_mutex_unlock(&rt_ext_tree_lock);
823 return;
824 }
825 /*
826 * now check for overlaps and adjacent extents
827 */
828 if (ext->rt_startblock + ext->rt_blockcount >= startblock
829 || ext->rt_startblock <= startblock + blockcount) {
830
831 if (ext->rt_startblock < new_startblock)
832 new_startblock = ext->rt_startblock;
833
834 if (ext->rt_startblock + ext->rt_blockcount >
835 new_startblock + new_blockcount)
836 new_blockcount = ext->rt_startblock +
837 ext->rt_blockcount -
838 new_startblock;
839
840 avl64_delete(rt_ext_tree_ptr, (avl64node_t *) ext);
841 continue;
842 }
843 }
844
845 ext = mk_rt_extent_tree_nodes(new_startblock,
846 new_blockcount, XR_E_MULT);
847
848 if (avl64_insert(rt_ext_tree_ptr, (avl64node_t *) ext) == NULL) {
849 do_error(_("duplicate extent range\n"));
850 }
851
852 pthread_mutex_unlock(&rt_ext_tree_lock);
853 return;
854 }
855
856 /*
857 * returns 1 if block is a dup, 0 if not
858 */
859 /* ARGSUSED */
860 int
861 search_rt_dup_extent(xfs_mount_t *mp, xfs_drtbno_t bno)
862 {
863 int ret;
864
865 pthread_mutex_lock(&rt_ext_tree_lock);
866 if (avl64_findrange(rt_ext_tree_ptr, bno) != NULL)
867 ret = 1;
868 else
869 ret = 0;
870 pthread_mutex_unlock(&rt_ext_tree_lock);
871 return(ret);
872 }
873
874 static __uint64_t
875 avl64_rt_ext_start(avl64node_t *node)
876 {
877 return(((rt_extent_tree_node_t *) node)->rt_startblock);
878 }
879
880 static __uint64_t
881 avl64_ext_end(avl64node_t *node)
882 {
883 return(((rt_extent_tree_node_t *) node)->rt_startblock +
884 ((rt_extent_tree_node_t *) node)->rt_blockcount);
885 }
886
887 avl64ops_t avl64_extent_tree_ops = {
888 avl64_rt_ext_start,
889 avl64_ext_end
890 };
891
892 void
893 incore_ext_init(xfs_mount_t *mp)
894 {
895 int i;
896 xfs_agnumber_t agcount = mp->m_sb.sb_agcount;
897
898 ba_list = NULL;
899 rt_ba_list = NULL;
900 pthread_mutex_init(&ext_flist_lock, NULL);
901 pthread_mutex_init(&rt_ext_tree_lock, NULL);
902 pthread_mutex_init(&rt_ext_flist_lock, NULL);
903
904 if ((extent_tree_ptrs = malloc(agcount *
905 sizeof(avltree_desc_t *))) == NULL)
906 do_error(
907 _("couldn't malloc dup extent tree descriptor table\n"));
908
909 if ((extent_bno_ptrs = malloc(agcount *
910 sizeof(avltree_desc_t *))) == NULL)
911 do_error(
912 _("couldn't malloc free by-bno extent tree descriptor table\n"));
913
914 if ((extent_bcnt_ptrs = malloc(agcount *
915 sizeof(avltree_desc_t *))) == NULL)
916 do_error(
917 _("couldn't malloc free by-bcnt extent tree descriptor table\n"));
918
919 for (i = 0; i < agcount; i++) {
920 if ((extent_tree_ptrs[i] =
921 malloc(sizeof(avltree_desc_t))) == NULL)
922 do_error(
923 _("couldn't malloc dup extent tree descriptor\n"));
924 if ((extent_bno_ptrs[i] =
925 malloc(sizeof(avltree_desc_t))) == NULL)
926 do_error(
927 _("couldn't malloc bno extent tree descriptor\n"));
928 if ((extent_bcnt_ptrs[i] =
929 malloc(sizeof(avltree_desc_t))) == NULL)
930 do_error(
931 _("couldn't malloc bcnt extent tree descriptor\n"));
932 }
933
934 for (i = 0; i < agcount; i++) {
935 avl_init_tree(extent_tree_ptrs[i], &avl_extent_tree_ops);
936 avl_init_tree(extent_bno_ptrs[i], &avl_extent_tree_ops);
937 avl_init_tree(extent_bcnt_ptrs[i], &avl_extent_bcnt_tree_ops);
938 }
939
940 if ((rt_ext_tree_ptr = malloc(sizeof(avltree_desc_t))) == NULL)
941 do_error(_("couldn't malloc dup rt extent tree descriptor\n"));
942
943 avl64_init_tree(rt_ext_tree_ptr, &avl64_extent_tree_ops);
944
945 ext_flist.cnt = 0;
946 ext_flist.list = NULL;
947
948 return;
949 }
950
951 /*
952 * this routine actually frees all the memory used to track per-AG trees
953 */
954 void
955 incore_ext_teardown(xfs_mount_t *mp)
956 {
957 xfs_agnumber_t i;
958
959 free_allocations(ba_list);
960
961 for (i = 0; i < mp->m_sb.sb_agcount; i++) {
962 free(extent_tree_ptrs[i]);
963 free(extent_bno_ptrs[i]);
964 free(extent_bcnt_ptrs[i]);
965 }
966
967 free(extent_bcnt_ptrs);
968 free(extent_bno_ptrs);
969 free(extent_tree_ptrs);
970
971 extent_bcnt_ptrs = extent_bno_ptrs = extent_tree_ptrs = NULL;
972
973 return;
974 }
975
976 int
977 count_extents(xfs_agnumber_t agno, avltree_desc_t *tree, int whichtree)
978 {
979 extent_tree_node_t *node;
980 int i = 0;
981
982 node = (extent_tree_node_t *) tree->avl_firstino;
983
984 while (node != NULL) {
985 i++;
986 if (whichtree)
987 node = findnext_bcnt_extent(agno, node);
988 else
989 node = findnext_bno_extent(node);
990 }
991
992 return(i);
993 }
994
995 int
996 count_bno_extents_blocks(xfs_agnumber_t agno, uint *numblocks)
997 {
998 __uint64_t nblocks;
999 extent_tree_node_t *node;
1000 int i = 0;
1001
1002 ASSERT(agno < glob_agcount);
1003
1004 nblocks = 0;
1005
1006 node = (extent_tree_node_t *) extent_bno_ptrs[agno]->avl_firstino;
1007
1008 while (node != NULL) {
1009 nblocks += node->ex_blockcount;
1010 i++;
1011 node = findnext_bno_extent(node);
1012 }
1013
1014 *numblocks = nblocks;
1015 return(i);
1016 }
1017
1018 int
1019 count_bno_extents(xfs_agnumber_t agno)
1020 {
1021 ASSERT(agno < glob_agcount);
1022 return(count_extents(agno, extent_bno_ptrs[agno], 0));
1023 }
1024
1025 int
1026 count_bcnt_extents(xfs_agnumber_t agno)
1027 {
1028 ASSERT(agno < glob_agcount);
1029 return(count_extents(agno, extent_bcnt_ptrs[agno], 1));
1030 }