]>
Commit | Line | Data |
---|---|---|
1a59d1b8 | 1 | /* SPDX-License-Identifier: GPL-2.0-or-later */ |
1da177e4 LT |
2 | /* |
3 | Red Black Trees | |
4 | (C) 1999 Andrea Arcangeli <andrea@suse.de> | |
5 | ||
1da177e4 LT |
6 | |
7 | linux/include/linux/rbtree.h | |
8 | ||
9 | To use rbtrees you'll have to implement your own insert and search cores. | |
10 | This will avoid us to use callbacks and to drop drammatically performances. | |
11 | I know it's not the cleaner way, but in C (not in C++) to get | |
12 | performances and genericity... | |
13 | ||
14bbe3e3 | 14 | See Documentation/core-api/rbtree.rst for documentation and samples. |
1da177e4 LT |
15 | */ |
16 | ||
17 | #ifndef _LINUX_RBTREE_H | |
18 | #define _LINUX_RBTREE_H | |
19 | ||
4815a360 | 20 | #include <linux/container_of.h> |
089050ca SAS |
21 | #include <linux/rbtree_types.h> |
22 | ||
1da177e4 | 23 | #include <linux/stddef.h> |
d72da4a4 | 24 | #include <linux/rcupdate.h> |
1da177e4 | 25 | |
bf7ad8ee | 26 | #define rb_parent(r) ((struct rb_node *)((r)->__rb_parent_color & ~3)) |
55a98102 | 27 | |
1da177e4 LT |
28 | #define rb_entry(ptr, type, member) container_of(ptr, type, member) |
29 | ||
a460bece | 30 | #define RB_EMPTY_ROOT(root) (READ_ONCE((root)->rb_node) == NULL) |
4c199a93 | 31 | |
7647f14f | 32 | /* 'empty' nodes are nodes that are known not to be inserted in an rbtree */ |
bf7ad8ee ML |
33 | #define RB_EMPTY_NODE(node) \ |
34 | ((node)->__rb_parent_color == (unsigned long)(node)) | |
35 | #define RB_CLEAR_NODE(node) \ | |
36 | ((node)->__rb_parent_color = (unsigned long)(node)) | |
dd67d051 | 37 | |
88d19cf3 | 38 | |
1da177e4 LT |
39 | extern void rb_insert_color(struct rb_node *, struct rb_root *); |
40 | extern void rb_erase(struct rb_node *, struct rb_root *); | |
41 | ||
14b94af0 | 42 | |
1da177e4 | 43 | /* Find logical next and previous nodes in a tree */ |
f4b477c4 AB |
44 | extern struct rb_node *rb_next(const struct rb_node *); |
45 | extern struct rb_node *rb_prev(const struct rb_node *); | |
46 | extern struct rb_node *rb_first(const struct rb_root *); | |
47 | extern struct rb_node *rb_last(const struct rb_root *); | |
1da177e4 | 48 | |
9dee5c51 CS |
49 | /* Postorder iteration - always visit the parent after its children */ |
50 | extern struct rb_node *rb_first_postorder(const struct rb_root *); | |
51 | extern struct rb_node *rb_next_postorder(const struct rb_node *); | |
52 | ||
1da177e4 | 53 | /* Fast replacement of a single node without remove/rebalance/add/rebalance */ |
d72da4a4 | 54 | extern void rb_replace_node(struct rb_node *victim, struct rb_node *new, |
1da177e4 | 55 | struct rb_root *root); |
c1adf200 DH |
56 | extern void rb_replace_node_rcu(struct rb_node *victim, struct rb_node *new, |
57 | struct rb_root *root); | |
1da177e4 | 58 | |
d72da4a4 PZ |
59 | static inline void rb_link_node(struct rb_node *node, struct rb_node *parent, |
60 | struct rb_node **rb_link) | |
1da177e4 | 61 | { |
bf7ad8ee | 62 | node->__rb_parent_color = (unsigned long)parent; |
1da177e4 LT |
63 | node->rb_left = node->rb_right = NULL; |
64 | ||
65 | *rb_link = node; | |
66 | } | |
67 | ||
d72da4a4 PZ |
68 | static inline void rb_link_node_rcu(struct rb_node *node, struct rb_node *parent, |
69 | struct rb_node **rb_link) | |
70 | { | |
71 | node->__rb_parent_color = (unsigned long)parent; | |
72 | node->rb_left = node->rb_right = NULL; | |
73 | ||
74 | rcu_assign_pointer(*rb_link, node); | |
75 | } | |
76 | ||
1310a5a9 JK |
77 | #define rb_entry_safe(ptr, type, member) \ |
78 | ({ typeof(ptr) ____ptr = (ptr); \ | |
79 | ____ptr ? rb_entry(____ptr, type, member) : NULL; \ | |
80 | }) | |
81 | ||
2b529089 | 82 | /** |
8de1ee7e CS |
83 | * rbtree_postorder_for_each_entry_safe - iterate in post-order over rb_root of |
84 | * given type allowing the backing memory of @pos to be invalidated | |
2b529089 CS |
85 | * |
86 | * @pos: the 'type *' to use as a loop cursor. | |
87 | * @n: another 'type *' to use as temporary storage | |
88 | * @root: 'rb_root *' of the rbtree. | |
89 | * @field: the name of the rb_node field within 'type'. | |
8de1ee7e CS |
90 | * |
91 | * rbtree_postorder_for_each_entry_safe() provides a similar guarantee as | |
92 | * list_for_each_entry_safe() and allows the iteration to continue independent | |
93 | * of changes to @pos by the body of the loop. | |
94 | * | |
95 | * Note, however, that it cannot handle other modifications that re-order the | |
96 | * rbtree it is iterating over. This includes calling rb_erase() on @pos, as | |
97 | * rb_erase() may rebalance the tree, causing us to miss some nodes. | |
2b529089 CS |
98 | */ |
99 | #define rbtree_postorder_for_each_entry_safe(pos, n, root, field) \ | |
1310a5a9 JK |
100 | for (pos = rb_entry_safe(rb_first_postorder(root), typeof(*pos), field); \ |
101 | pos && ({ n = rb_entry_safe(rb_next_postorder(&pos->field), \ | |
102 | typeof(*pos), field); 1; }); \ | |
103 | pos = n) | |
2b529089 | 104 | |
9f973cb3 ML |
105 | /* Same as rb_first(), but O(1) */ |
106 | #define rb_first_cached(root) (root)->rb_leftmost | |
107 | ||
108 | static inline void rb_insert_color_cached(struct rb_node *node, | |
109 | struct rb_root_cached *root, | |
110 | bool leftmost) | |
111 | { | |
112 | if (leftmost) | |
113 | root->rb_leftmost = node; | |
114 | rb_insert_color(node, &root->rb_root); | |
115 | } | |
116 | ||
8ecca394 PZ |
117 | |
118 | static inline struct rb_node * | |
119 | rb_erase_cached(struct rb_node *node, struct rb_root_cached *root) | |
9f973cb3 | 120 | { |
8ecca394 PZ |
121 | struct rb_node *leftmost = NULL; |
122 | ||
9f973cb3 | 123 | if (root->rb_leftmost == node) |
8ecca394 PZ |
124 | leftmost = root->rb_leftmost = rb_next(node); |
125 | ||
9f973cb3 | 126 | rb_erase(node, &root->rb_root); |
8ecca394 PZ |
127 | |
128 | return leftmost; | |
9f973cb3 ML |
129 | } |
130 | ||
131 | static inline void rb_replace_node_cached(struct rb_node *victim, | |
132 | struct rb_node *new, | |
133 | struct rb_root_cached *root) | |
134 | { | |
135 | if (root->rb_leftmost == victim) | |
136 | root->rb_leftmost = new; | |
137 | rb_replace_node(victim, new, &root->rb_root); | |
138 | } | |
139 | ||
2d24dd57 PZ |
140 | /* |
141 | * The below helper functions use 2 operators with 3 different | |
142 | * calling conventions. The operators are related like: | |
143 | * | |
144 | * comp(a->key,b) < 0 := less(a,b) | |
145 | * comp(a->key,b) > 0 := less(b,a) | |
146 | * comp(a->key,b) == 0 := !less(a,b) && !less(b,a) | |
147 | * | |
148 | * If these operators define a partial order on the elements we make no | |
149 | * guarantee on which of the elements matching the key is found. See | |
150 | * rb_find(). | |
151 | * | |
152 | * The reason for this is to allow the find() interface without requiring an | |
153 | * on-stack dummy object, which might not be feasible due to object size. | |
154 | */ | |
155 | ||
156 | /** | |
157 | * rb_add_cached() - insert @node into the leftmost cached tree @tree | |
158 | * @node: node to insert | |
159 | * @tree: leftmost cached tree to insert @node into | |
160 | * @less: operator defining the (partial) node order | |
8ecca394 PZ |
161 | * |
162 | * Returns @node when it is the new leftmost, or NULL. | |
2d24dd57 | 163 | */ |
8ecca394 | 164 | static __always_inline struct rb_node * |
2d24dd57 PZ |
165 | rb_add_cached(struct rb_node *node, struct rb_root_cached *tree, |
166 | bool (*less)(struct rb_node *, const struct rb_node *)) | |
167 | { | |
168 | struct rb_node **link = &tree->rb_root.rb_node; | |
169 | struct rb_node *parent = NULL; | |
170 | bool leftmost = true; | |
171 | ||
172 | while (*link) { | |
173 | parent = *link; | |
174 | if (less(node, parent)) { | |
175 | link = &parent->rb_left; | |
176 | } else { | |
177 | link = &parent->rb_right; | |
178 | leftmost = false; | |
179 | } | |
180 | } | |
181 | ||
182 | rb_link_node(node, parent, link); | |
183 | rb_insert_color_cached(node, tree, leftmost); | |
8ecca394 PZ |
184 | |
185 | return leftmost ? node : NULL; | |
2d24dd57 PZ |
186 | } |
187 | ||
188 | /** | |
189 | * rb_add() - insert @node into @tree | |
190 | * @node: node to insert | |
191 | * @tree: tree to insert @node into | |
192 | * @less: operator defining the (partial) node order | |
193 | */ | |
194 | static __always_inline void | |
195 | rb_add(struct rb_node *node, struct rb_root *tree, | |
196 | bool (*less)(struct rb_node *, const struct rb_node *)) | |
197 | { | |
198 | struct rb_node **link = &tree->rb_node; | |
199 | struct rb_node *parent = NULL; | |
200 | ||
201 | while (*link) { | |
202 | parent = *link; | |
203 | if (less(node, parent)) | |
204 | link = &parent->rb_left; | |
205 | else | |
206 | link = &parent->rb_right; | |
207 | } | |
208 | ||
209 | rb_link_node(node, parent, link); | |
210 | rb_insert_color(node, tree); | |
211 | } | |
212 | ||
90dde9a1 RBI |
213 | /** |
214 | * rb_find_add_cached() - find equivalent @node in @tree, or add @node | |
215 | * @node: node to look-for / insert | |
216 | * @tree: tree to search / modify | |
217 | * @cmp: operator defining the node order | |
218 | * | |
219 | * Returns the rb_node matching @node, or NULL when no match is found and @node | |
220 | * is inserted. | |
221 | */ | |
222 | static __always_inline struct rb_node * | |
223 | rb_find_add_cached(struct rb_node *node, struct rb_root_cached *tree, | |
224 | int (*cmp)(const struct rb_node *new, const struct rb_node *exist)) | |
225 | { | |
226 | bool leftmost = true; | |
227 | struct rb_node **link = &tree->rb_root.rb_node; | |
228 | struct rb_node *parent = NULL; | |
229 | int c; | |
230 | ||
231 | while (*link) { | |
232 | parent = *link; | |
233 | c = cmp(node, parent); | |
234 | ||
235 | if (c < 0) { | |
236 | link = &parent->rb_left; | |
237 | } else if (c > 0) { | |
238 | link = &parent->rb_right; | |
239 | leftmost = false; | |
240 | } else { | |
241 | return parent; | |
242 | } | |
243 | } | |
244 | ||
245 | rb_link_node(node, parent, link); | |
246 | rb_insert_color_cached(node, tree, leftmost); | |
247 | return NULL; | |
248 | } | |
249 | ||
2d24dd57 PZ |
250 | /** |
251 | * rb_find_add() - find equivalent @node in @tree, or add @node | |
252 | * @node: node to look-for / insert | |
253 | * @tree: tree to search / modify | |
254 | * @cmp: operator defining the node order | |
255 | * | |
256 | * Returns the rb_node matching @node, or NULL when no match is found and @node | |
257 | * is inserted. | |
258 | */ | |
259 | static __always_inline struct rb_node * | |
260 | rb_find_add(struct rb_node *node, struct rb_root *tree, | |
261 | int (*cmp)(struct rb_node *, const struct rb_node *)) | |
262 | { | |
263 | struct rb_node **link = &tree->rb_node; | |
264 | struct rb_node *parent = NULL; | |
265 | int c; | |
266 | ||
267 | while (*link) { | |
268 | parent = *link; | |
269 | c = cmp(node, parent); | |
270 | ||
271 | if (c < 0) | |
272 | link = &parent->rb_left; | |
273 | else if (c > 0) | |
274 | link = &parent->rb_right; | |
275 | else | |
276 | return parent; | |
277 | } | |
278 | ||
279 | rb_link_node(node, parent, link); | |
280 | rb_insert_color(node, tree); | |
281 | return NULL; | |
282 | } | |
283 | ||
50a38035 PZ |
284 | /** |
285 | * rb_find_add_rcu() - find equivalent @node in @tree, or add @node | |
286 | * @node: node to look-for / insert | |
287 | * @tree: tree to search / modify | |
288 | * @cmp: operator defining the node order | |
289 | * | |
290 | * Adds a Store-Release for link_node. | |
291 | * | |
292 | * Returns the rb_node matching @node, or NULL when no match is found and @node | |
293 | * is inserted. | |
294 | */ | |
295 | static __always_inline struct rb_node * | |
296 | rb_find_add_rcu(struct rb_node *node, struct rb_root *tree, | |
297 | int (*cmp)(struct rb_node *, const struct rb_node *)) | |
298 | { | |
299 | struct rb_node **link = &tree->rb_node; | |
300 | struct rb_node *parent = NULL; | |
301 | int c; | |
302 | ||
303 | while (*link) { | |
304 | parent = *link; | |
305 | c = cmp(node, parent); | |
306 | ||
307 | if (c < 0) | |
308 | link = &parent->rb_left; | |
309 | else if (c > 0) | |
310 | link = &parent->rb_right; | |
311 | else | |
312 | return parent; | |
313 | } | |
314 | ||
315 | rb_link_node_rcu(node, parent, link); | |
316 | rb_insert_color(node, tree); | |
317 | return NULL; | |
318 | } | |
319 | ||
2d24dd57 PZ |
320 | /** |
321 | * rb_find() - find @key in tree @tree | |
322 | * @key: key to match | |
323 | * @tree: tree to search | |
324 | * @cmp: operator defining the node order | |
325 | * | |
326 | * Returns the rb_node matching @key or NULL. | |
327 | */ | |
328 | static __always_inline struct rb_node * | |
329 | rb_find(const void *key, const struct rb_root *tree, | |
330 | int (*cmp)(const void *key, const struct rb_node *)) | |
331 | { | |
332 | struct rb_node *node = tree->rb_node; | |
333 | ||
334 | while (node) { | |
335 | int c = cmp(key, node); | |
336 | ||
337 | if (c < 0) | |
338 | node = node->rb_left; | |
339 | else if (c > 0) | |
340 | node = node->rb_right; | |
341 | else | |
342 | return node; | |
343 | } | |
344 | ||
345 | return NULL; | |
346 | } | |
347 | ||
50a38035 PZ |
348 | /** |
349 | * rb_find_rcu() - find @key in tree @tree | |
350 | * @key: key to match | |
351 | * @tree: tree to search | |
352 | * @cmp: operator defining the node order | |
353 | * | |
354 | * Notably, tree descent vs concurrent tree rotations is unsound and can result | |
355 | * in false-negatives. | |
356 | * | |
357 | * Returns the rb_node matching @key or NULL. | |
358 | */ | |
359 | static __always_inline struct rb_node * | |
360 | rb_find_rcu(const void *key, const struct rb_root *tree, | |
361 | int (*cmp)(const void *key, const struct rb_node *)) | |
362 | { | |
363 | struct rb_node *node = tree->rb_node; | |
364 | ||
365 | while (node) { | |
366 | int c = cmp(key, node); | |
367 | ||
368 | if (c < 0) | |
369 | node = rcu_dereference_raw(node->rb_left); | |
370 | else if (c > 0) | |
371 | node = rcu_dereference_raw(node->rb_right); | |
372 | else | |
373 | return node; | |
374 | } | |
375 | ||
376 | return NULL; | |
377 | } | |
378 | ||
2d24dd57 PZ |
379 | /** |
380 | * rb_find_first() - find the first @key in @tree | |
381 | * @key: key to match | |
382 | * @tree: tree to search | |
383 | * @cmp: operator defining node order | |
384 | * | |
385 | * Returns the leftmost node matching @key, or NULL. | |
386 | */ | |
387 | static __always_inline struct rb_node * | |
388 | rb_find_first(const void *key, const struct rb_root *tree, | |
389 | int (*cmp)(const void *key, const struct rb_node *)) | |
390 | { | |
391 | struct rb_node *node = tree->rb_node; | |
392 | struct rb_node *match = NULL; | |
393 | ||
394 | while (node) { | |
395 | int c = cmp(key, node); | |
396 | ||
397 | if (c <= 0) { | |
398 | if (!c) | |
399 | match = node; | |
400 | node = node->rb_left; | |
401 | } else if (c > 0) { | |
402 | node = node->rb_right; | |
403 | } | |
404 | } | |
405 | ||
406 | return match; | |
407 | } | |
408 | ||
409 | /** | |
410 | * rb_next_match() - find the next @key in @tree | |
411 | * @key: key to match | |
412 | * @tree: tree to search | |
413 | * @cmp: operator defining node order | |
414 | * | |
415 | * Returns the next node matching @key, or NULL. | |
416 | */ | |
417 | static __always_inline struct rb_node * | |
418 | rb_next_match(const void *key, struct rb_node *node, | |
419 | int (*cmp)(const void *key, const struct rb_node *)) | |
420 | { | |
421 | node = rb_next(node); | |
422 | if (node && cmp(key, node)) | |
423 | node = NULL; | |
424 | return node; | |
425 | } | |
426 | ||
427 | /** | |
428 | * rb_for_each() - iterates a subtree matching @key | |
429 | * @node: iterator | |
430 | * @key: key to match | |
431 | * @tree: tree to search | |
432 | * @cmp: operator defining node order | |
433 | */ | |
434 | #define rb_for_each(node, key, tree, cmp) \ | |
435 | for ((node) = rb_find_first((key), (tree), (cmp)); \ | |
436 | (node); (node) = rb_next_match((key), (node), (cmp))) | |
437 | ||
1da177e4 | 438 | #endif /* _LINUX_RBTREE_H */ |