1 /* $NetBSD: tree.h,v 1.8 2004/03/28 19:38:30 provos Exp $ */
2 /* $OpenBSD: tree.h,v 1.7 2002/10/17 21:51:54 art Exp $ */
6 * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
8 * Copyright 2002 Niels Provos <provos@citi.umich.edu>
11 * Redistribution and use in source and binary forms, with or without
12 * modification, are permitted provided that the following conditions
14 * 1. Redistributions of source code must retain the above copyright
15 * notice, this list of conditions and the following disclaimer.
16 * 2. Redistributions in binary form must reproduce the above copyright
17 * notice, this list of conditions and the following disclaimer in the
18 * documentation and/or other materials provided with the distribution.
20 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
21 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
22 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
23 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
24 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
25 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
29 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35 #include <sys/cdefs.h>
38 * This file defines data structures for different types of trees:
39 * splay trees and rank-balanced trees.
41 * A splay tree is a self-organizing data structure. Every operation
42 * on the tree causes a splay to happen. The splay moves the requested
43 * node to the root of the tree and partly rebalances it.
45 * This has the benefit that request locality causes faster lookups as
46 * the requested nodes move to the top of the tree. On the other hand,
47 * every lookup causes memory writes.
49 * The Balance Theorem bounds the total access time for m operations
50 * and n inserts on an initially empty tree as O((m + n)lg n). The
51 * amortized cost for a sequence of m accesses to a splay tree is O(lg n);
53 * A rank-balanced tree is a binary search tree with an integer
54 * rank-difference as an attribute of each pointer from parent to child.
55 * The sum of the rank-differences on any path from a node down to null is
56 * the same, and defines the rank of that node. The rank of the null node
59 * Different additional conditions define different sorts of balanced trees,
60 * including "red-black" and "AVL" trees. The set of conditions applied here
61 * are the "weak-AVL" conditions of Haeupler, Sen and Tarjan presented in in
62 * "Rank Balanced Trees", ACM Transactions on Algorithms Volume 11 Issue 4 June
63 * 2015 Article No.: 30pp 1–26 https://doi.org/10.1145/2689412 (the HST paper):
64 * - every rank-difference is 1 or 2.
65 * - the rank of any leaf is 1.
67 * For historical reasons, rank differences that are even are associated
68 * with the color red (Rank-Even-Difference), and the child that a red edge
69 * points to is called a red child.
71 * Every operation on a rank-balanced tree is bounded as O(lg n).
72 * The maximum height of a rank-balanced tree is 2lg (n+1).
75 #define SPLAY_HEAD(name, type) \
77 struct type *sph_root; /* root of the tree */ \
80 #define SPLAY_INITIALIZER(root) \
83 #define SPLAY_INIT(root) do { \
84 (root)->sph_root = NULL; \
85 } while (/*CONSTCOND*/ 0)
87 #define SPLAY_ENTRY(type) \
89 struct type *spe_left; /* left element */ \
90 struct type *spe_right; /* right element */ \
93 #define SPLAY_LEFT(elm, field) (elm)->field.spe_left
94 #define SPLAY_RIGHT(elm, field) (elm)->field.spe_right
95 #define SPLAY_ROOT(head) (head)->sph_root
96 #define SPLAY_EMPTY(head) (SPLAY_ROOT(head) == NULL)
98 /* SPLAY_ROTATE_{LEFT,RIGHT} expect that tmp hold SPLAY_{RIGHT,LEFT} */
99 #define SPLAY_ROTATE_RIGHT(head, tmp, field) do { \
100 SPLAY_LEFT((head)->sph_root, field) = SPLAY_RIGHT(tmp, field); \
101 SPLAY_RIGHT(tmp, field) = (head)->sph_root; \
102 (head)->sph_root = tmp; \
103 } while (/*CONSTCOND*/ 0)
105 #define SPLAY_ROTATE_LEFT(head, tmp, field) do { \
106 SPLAY_RIGHT((head)->sph_root, field) = SPLAY_LEFT(tmp, field); \
107 SPLAY_LEFT(tmp, field) = (head)->sph_root; \
108 (head)->sph_root = tmp; \
109 } while (/*CONSTCOND*/ 0)
111 #define SPLAY_LINKLEFT(head, tmp, field) do { \
112 SPLAY_LEFT(tmp, field) = (head)->sph_root; \
113 tmp = (head)->sph_root; \
114 (head)->sph_root = SPLAY_LEFT((head)->sph_root, field); \
115 } while (/*CONSTCOND*/ 0)
117 #define SPLAY_LINKRIGHT(head, tmp, field) do { \
118 SPLAY_RIGHT(tmp, field) = (head)->sph_root; \
119 tmp = (head)->sph_root; \
120 (head)->sph_root = SPLAY_RIGHT((head)->sph_root, field); \
121 } while (/*CONSTCOND*/ 0)
123 #define SPLAY_ASSEMBLE(head, node, left, right, field) do { \
124 SPLAY_RIGHT(left, field) = SPLAY_LEFT((head)->sph_root, field); \
125 SPLAY_LEFT(right, field) = SPLAY_RIGHT((head)->sph_root, field);\
126 SPLAY_LEFT((head)->sph_root, field) = SPLAY_RIGHT(node, field); \
127 SPLAY_RIGHT((head)->sph_root, field) = SPLAY_LEFT(node, field); \
128 } while (/*CONSTCOND*/ 0)
130 /* Generates prototypes and inline functions */
132 #define SPLAY_PROTOTYPE(name, type, field, cmp) \
133 void name##_SPLAY(struct name *, struct type *); \
134 void name##_SPLAY_MINMAX(struct name *, int); \
135 struct type *name##_SPLAY_INSERT(struct name *, struct type *); \
136 struct type *name##_SPLAY_REMOVE(struct name *, struct type *); \
138 /* Finds the node with the same key as elm */ \
139 static __unused __inline struct type * \
140 name##_SPLAY_FIND(struct name *head, struct type *elm) \
142 if (SPLAY_EMPTY(head)) \
144 name##_SPLAY(head, elm); \
145 if ((cmp)(elm, (head)->sph_root) == 0) \
146 return (head->sph_root); \
150 static __unused __inline struct type * \
151 name##_SPLAY_NEXT(struct name *head, struct type *elm) \
153 name##_SPLAY(head, elm); \
154 if (SPLAY_RIGHT(elm, field) != NULL) { \
155 elm = SPLAY_RIGHT(elm, field); \
156 while (SPLAY_LEFT(elm, field) != NULL) { \
157 elm = SPLAY_LEFT(elm, field); \
164 static __unused __inline struct type * \
165 name##_SPLAY_MIN_MAX(struct name *head, int val) \
167 name##_SPLAY_MINMAX(head, val); \
168 return (SPLAY_ROOT(head)); \
171 /* Main splay operation.
172 * Moves node close to the key of elm to top
174 #define SPLAY_GENERATE(name, type, field, cmp) \
176 name##_SPLAY_INSERT(struct name *head, struct type *elm) \
178 if (SPLAY_EMPTY(head)) { \
179 SPLAY_LEFT(elm, field) = SPLAY_RIGHT(elm, field) = NULL; \
181 __typeof(cmp(NULL, NULL)) __comp; \
182 name##_SPLAY(head, elm); \
183 __comp = (cmp)(elm, (head)->sph_root); \
185 SPLAY_LEFT(elm, field) = SPLAY_LEFT((head)->sph_root, field);\
186 SPLAY_RIGHT(elm, field) = (head)->sph_root; \
187 SPLAY_LEFT((head)->sph_root, field) = NULL; \
188 } else if (__comp > 0) { \
189 SPLAY_RIGHT(elm, field) = SPLAY_RIGHT((head)->sph_root, field);\
190 SPLAY_LEFT(elm, field) = (head)->sph_root; \
191 SPLAY_RIGHT((head)->sph_root, field) = NULL; \
193 return ((head)->sph_root); \
195 (head)->sph_root = (elm); \
200 name##_SPLAY_REMOVE(struct name *head, struct type *elm) \
202 struct type *__tmp; \
203 if (SPLAY_EMPTY(head)) \
205 name##_SPLAY(head, elm); \
206 if ((cmp)(elm, (head)->sph_root) == 0) { \
207 if (SPLAY_LEFT((head)->sph_root, field) == NULL) { \
208 (head)->sph_root = SPLAY_RIGHT((head)->sph_root, field);\
210 __tmp = SPLAY_RIGHT((head)->sph_root, field); \
211 (head)->sph_root = SPLAY_LEFT((head)->sph_root, field);\
212 name##_SPLAY(head, elm); \
213 SPLAY_RIGHT((head)->sph_root, field) = __tmp; \
221 name##_SPLAY(struct name *head, struct type *elm) \
223 struct type __node, *__left, *__right, *__tmp; \
224 __typeof(cmp(NULL, NULL)) __comp; \
226 SPLAY_LEFT(&__node, field) = SPLAY_RIGHT(&__node, field) = NULL;\
227 __left = __right = &__node; \
229 while ((__comp = (cmp)(elm, (head)->sph_root)) != 0) { \
231 __tmp = SPLAY_LEFT((head)->sph_root, field); \
234 if ((cmp)(elm, __tmp) < 0){ \
235 SPLAY_ROTATE_RIGHT(head, __tmp, field); \
236 if (SPLAY_LEFT((head)->sph_root, field) == NULL)\
239 SPLAY_LINKLEFT(head, __right, field); \
240 } else if (__comp > 0) { \
241 __tmp = SPLAY_RIGHT((head)->sph_root, field); \
244 if ((cmp)(elm, __tmp) > 0){ \
245 SPLAY_ROTATE_LEFT(head, __tmp, field); \
246 if (SPLAY_RIGHT((head)->sph_root, field) == NULL)\
249 SPLAY_LINKRIGHT(head, __left, field); \
252 SPLAY_ASSEMBLE(head, &__node, __left, __right, field); \
255 /* Splay with either the minimum or the maximum element \
256 * Used to find minimum or maximum element in tree. \
258 void name##_SPLAY_MINMAX(struct name *head, int __comp) \
260 struct type __node, *__left, *__right, *__tmp; \
262 SPLAY_LEFT(&__node, field) = SPLAY_RIGHT(&__node, field) = NULL;\
263 __left = __right = &__node; \
267 __tmp = SPLAY_LEFT((head)->sph_root, field); \
271 SPLAY_ROTATE_RIGHT(head, __tmp, field); \
272 if (SPLAY_LEFT((head)->sph_root, field) == NULL)\
275 SPLAY_LINKLEFT(head, __right, field); \
276 } else if (__comp > 0) { \
277 __tmp = SPLAY_RIGHT((head)->sph_root, field); \
281 SPLAY_ROTATE_LEFT(head, __tmp, field); \
282 if (SPLAY_RIGHT((head)->sph_root, field) == NULL)\
285 SPLAY_LINKRIGHT(head, __left, field); \
288 SPLAY_ASSEMBLE(head, &__node, __left, __right, field); \
291 #define SPLAY_NEGINF -1
294 #define SPLAY_INSERT(name, x, y) name##_SPLAY_INSERT(x, y)
295 #define SPLAY_REMOVE(name, x, y) name##_SPLAY_REMOVE(x, y)
296 #define SPLAY_FIND(name, x, y) name##_SPLAY_FIND(x, y)
297 #define SPLAY_NEXT(name, x, y) name##_SPLAY_NEXT(x, y)
298 #define SPLAY_MIN(name, x) (SPLAY_EMPTY(x) ? NULL \
299 : name##_SPLAY_MIN_MAX(x, SPLAY_NEGINF))
300 #define SPLAY_MAX(name, x) (SPLAY_EMPTY(x) ? NULL \
301 : name##_SPLAY_MIN_MAX(x, SPLAY_INF))
303 #define SPLAY_FOREACH(x, name, head) \
304 for ((x) = SPLAY_MIN(name, head); \
306 (x) = SPLAY_NEXT(name, head, x))
308 /* Macros that define a rank-balanced tree */
309 #define RB_HEAD(name, type) \
311 struct type *rbh_root; /* root of the tree */ \
314 #define RB_INITIALIZER(root) \
317 #define RB_INIT(root) do { \
318 (root)->rbh_root = NULL; \
319 } while (/*CONSTCOND*/ 0)
321 #define RB_ENTRY(type) \
323 struct type *rbe_link[3]; \
327 * With the expectation that any object of struct type has an
328 * address that is a multiple of 4, and that therefore the
329 * 2 least significant bits of a pointer to struct type are
330 * always zero, this implementation sets those bits to indicate
331 * that the left or right child of the tree node is "red".
333 #define _RB_LINK(elm, dir, field) (elm)->field.rbe_link[dir]
334 #define _RB_UP(elm, field) _RB_LINK(elm, 0, field)
335 #define _RB_L ((__uintptr_t)1)
336 #define _RB_R ((__uintptr_t)2)
337 #define _RB_LR ((__uintptr_t)3)
338 #define _RB_BITS(elm) (*(__uintptr_t *)&elm)
339 #define _RB_BITSUP(elm, field) _RB_BITS(_RB_UP(elm, field))
340 #define _RB_PTR(elm) (__typeof(elm)) \
341 ((__uintptr_t)elm & ~_RB_LR)
343 #define RB_PARENT(elm, field) _RB_PTR(_RB_UP(elm, field))
344 #define RB_LEFT(elm, field) _RB_LINK(elm, _RB_L, field)
345 #define RB_RIGHT(elm, field) _RB_LINK(elm, _RB_R, field)
346 #define RB_ROOT(head) (head)->rbh_root
347 #define RB_EMPTY(head) (RB_ROOT(head) == NULL)
349 #define RB_SET_PARENT(dst, src, field) do { \
350 _RB_BITSUP(dst, field) = (__uintptr_t)src | \
351 (_RB_BITSUP(dst, field) & _RB_LR); \
352 } while (/*CONSTCOND*/ 0)
354 #define RB_SET(elm, parent, field) do { \
355 _RB_UP(elm, field) = parent; \
356 RB_LEFT(elm, field) = RB_RIGHT(elm, field) = NULL; \
357 } while (/*CONSTCOND*/ 0)
360 * Either RB_AUGMENT or RB_AUGMENT_CHECK is invoked in a loop at the root of
361 * every modified subtree, from the bottom up to the root, to update augmented
362 * node data. RB_AUGMENT_CHECK returns true only when the update changes the
363 * node data, so that updating can be stopped short of the root when it returns
366 #ifndef RB_AUGMENT_CHECK
368 #define RB_AUGMENT_CHECK(x) 0
370 #define RB_AUGMENT_CHECK(x) (RB_AUGMENT(x), 1)
374 #define RB_UPDATE_AUGMENT(elm, field) do { \
375 __typeof(elm) rb_update_tmp = (elm); \
376 while (RB_AUGMENT_CHECK(rb_update_tmp) && \
377 (rb_update_tmp = RB_PARENT(rb_update_tmp, field)) != NULL) \
381 #define RB_SWAP_CHILD(head, par, out, in, field) do { \
383 RB_ROOT(head) = (in); \
384 else if ((out) == RB_LEFT(par, field)) \
385 RB_LEFT(par, field) = (in); \
387 RB_RIGHT(par, field) = (in); \
388 } while (/*CONSTCOND*/ 0)
391 * RB_ROTATE macro partially restructures the tree to improve balance. In the
392 * case when dir is _RB_L, tmp is a right child of elm. After rotation, elm
393 * is a left child of tmp, and the subtree that represented the items between
394 * them, which formerly hung to the left of tmp now hangs to the right of elm.
395 * The parent-child relationship between elm and its former parent is not
396 * changed; where this macro once updated those fields, that is now left to the
397 * caller of RB_ROTATE to clean up, so that a pair of rotations does not twice
398 * update the same pair of pointer fields with distinct values.
400 #define RB_ROTATE(elm, tmp, dir, field) do { \
401 if ((_RB_LINK(elm, dir ^ _RB_LR, field) = \
402 _RB_LINK(tmp, dir, field)) != NULL) \
403 RB_SET_PARENT(_RB_LINK(tmp, dir, field), elm, field); \
404 _RB_LINK(tmp, dir, field) = (elm); \
405 RB_SET_PARENT(elm, tmp, field); \
406 } while (/*CONSTCOND*/ 0)
408 /* Generates prototypes and inline functions */
409 #define RB_PROTOTYPE(name, type, field, cmp) \
410 RB_PROTOTYPE_INTERNAL(name, type, field, cmp,)
411 #define RB_PROTOTYPE_STATIC(name, type, field, cmp) \
412 RB_PROTOTYPE_INTERNAL(name, type, field, cmp, __unused static)
413 #define RB_PROTOTYPE_INTERNAL(name, type, field, cmp, attr) \
414 RB_PROTOTYPE_RANK(name, type, attr) \
415 RB_PROTOTYPE_INSERT_COLOR(name, type, attr); \
416 RB_PROTOTYPE_REMOVE_COLOR(name, type, attr); \
417 RB_PROTOTYPE_INSERT_FINISH(name, type, attr); \
418 RB_PROTOTYPE_INSERT(name, type, attr); \
419 RB_PROTOTYPE_REMOVE(name, type, attr); \
420 RB_PROTOTYPE_FIND(name, type, attr); \
421 RB_PROTOTYPE_NFIND(name, type, attr); \
422 RB_PROTOTYPE_NEXT(name, type, attr); \
423 RB_PROTOTYPE_INSERT_NEXT(name, type, attr); \
424 RB_PROTOTYPE_PREV(name, type, attr); \
425 RB_PROTOTYPE_INSERT_PREV(name, type, attr); \
426 RB_PROTOTYPE_MINMAX(name, type, attr); \
427 RB_PROTOTYPE_REINSERT(name, type, attr);
428 #ifdef _RB_DIAGNOSTIC
429 #define RB_PROTOTYPE_RANK(name, type, attr) \
430 attr int name##_RB_RANK(struct type *);
432 #define RB_PROTOTYPE_RANK(name, type, attr)
434 #define RB_PROTOTYPE_INSERT_COLOR(name, type, attr) \
435 attr struct type *name##_RB_INSERT_COLOR(struct name *, \
436 struct type *, struct type *)
437 #define RB_PROTOTYPE_REMOVE_COLOR(name, type, attr) \
438 attr struct type *name##_RB_REMOVE_COLOR(struct name *, \
439 struct type *, struct type *)
440 #define RB_PROTOTYPE_REMOVE(name, type, attr) \
441 attr struct type *name##_RB_REMOVE(struct name *, struct type *)
442 #define RB_PROTOTYPE_INSERT_FINISH(name, type, attr) \
443 attr struct type *name##_RB_INSERT_FINISH(struct name *, \
444 struct type *, struct type **, struct type *)
445 #define RB_PROTOTYPE_INSERT(name, type, attr) \
446 attr struct type *name##_RB_INSERT(struct name *, struct type *)
447 #define RB_PROTOTYPE_FIND(name, type, attr) \
448 attr struct type *name##_RB_FIND(struct name *, struct type *)
449 #define RB_PROTOTYPE_NFIND(name, type, attr) \
450 attr struct type *name##_RB_NFIND(struct name *, struct type *)
451 #define RB_PROTOTYPE_NEXT(name, type, attr) \
452 attr struct type *name##_RB_NEXT(struct type *)
453 #define RB_PROTOTYPE_INSERT_NEXT(name, type, attr) \
454 attr struct type *name##_RB_INSERT_NEXT(struct name *, \
455 struct type *, struct type *)
456 #define RB_PROTOTYPE_PREV(name, type, attr) \
457 attr struct type *name##_RB_PREV(struct type *)
458 #define RB_PROTOTYPE_INSERT_PREV(name, type, attr) \
459 attr struct type *name##_RB_INSERT_PREV(struct name *, \
460 struct type *, struct type *)
461 #define RB_PROTOTYPE_MINMAX(name, type, attr) \
462 attr struct type *name##_RB_MINMAX(struct name *, int)
463 #define RB_PROTOTYPE_REINSERT(name, type, attr) \
464 attr struct type *name##_RB_REINSERT(struct name *, struct type *)
466 /* Main rb operation.
467 * Moves node close to the key of elm to top
469 #define RB_GENERATE(name, type, field, cmp) \
470 RB_GENERATE_INTERNAL(name, type, field, cmp,)
471 #define RB_GENERATE_STATIC(name, type, field, cmp) \
472 RB_GENERATE_INTERNAL(name, type, field, cmp, __unused static)
473 #define RB_GENERATE_INTERNAL(name, type, field, cmp, attr) \
474 RB_GENERATE_RANK(name, type, field, attr) \
475 RB_GENERATE_INSERT_COLOR(name, type, field, attr) \
476 RB_GENERATE_REMOVE_COLOR(name, type, field, attr) \
477 RB_GENERATE_INSERT_FINISH(name, type, field, attr) \
478 RB_GENERATE_INSERT(name, type, field, cmp, attr) \
479 RB_GENERATE_REMOVE(name, type, field, attr) \
480 RB_GENERATE_FIND(name, type, field, cmp, attr) \
481 RB_GENERATE_NFIND(name, type, field, cmp, attr) \
482 RB_GENERATE_NEXT(name, type, field, attr) \
483 RB_GENERATE_INSERT_NEXT(name, type, field, cmp, attr) \
484 RB_GENERATE_PREV(name, type, field, attr) \
485 RB_GENERATE_INSERT_PREV(name, type, field, cmp, attr) \
486 RB_GENERATE_MINMAX(name, type, field, attr) \
487 RB_GENERATE_REINSERT(name, type, field, cmp, attr)
489 #ifdef _RB_DIAGNOSTIC
491 #define _RB_AUGMENT_VERIFY(x) RB_AUGMENT_CHECK(x)
493 #define _RB_AUGMENT_VERIFY(x) 0
495 #define RB_GENERATE_RANK(name, type, field, attr) \
497 * Return the rank of the subtree rooted at elm, or -1 if the subtree \
498 * is not rank-balanced, or has inconsistent augmentation data.
501 name##_RB_RANK(struct type *elm) \
503 struct type *left, *right, *up; \
504 int left_rank, right_rank; \
508 up = _RB_UP(elm, field); \
509 left = RB_LEFT(elm, field); \
510 left_rank = ((_RB_BITS(up) & _RB_L) ? 2 : 1) + \
511 name##_RB_RANK(left); \
512 right = RB_RIGHT(elm, field); \
513 right_rank = ((_RB_BITS(up) & _RB_R) ? 2 : 1) + \
514 name##_RB_RANK(right); \
515 if (left_rank != right_rank || \
516 (left_rank == 2 && left == NULL && right == NULL) || \
517 _RB_AUGMENT_VERIFY(elm)) \
519 return (left_rank); \
522 #define RB_GENERATE_RANK(name, type, field, attr)
525 #define RB_GENERATE_INSERT_COLOR(name, type, field, attr) \
527 name##_RB_INSERT_COLOR(struct name *head, \
528 struct type *parent, struct type *elm) \
531 * Initially, elm is a leaf. Either its parent was previously \
532 * a leaf, with two black null children, or an interior node \
533 * with a black non-null child and a red null child. The \
534 * balance criterion "the rank of any leaf is 1" precludes the \
535 * possibility of two red null children for the initial parent. \
536 * So the first loop iteration cannot lead to accessing an \
537 * uninitialized 'child', and a later iteration can only happen \
538 * when a value has been assigned to 'child' in the previous \
541 struct type *child, *child_up, *gpar; \
542 __uintptr_t elmdir, sibdir; \
545 /* the rank of the tree rooted at elm grew */ \
546 gpar = _RB_UP(parent, field); \
547 elmdir = RB_RIGHT(parent, field) == elm ? _RB_R : _RB_L; \
548 if (_RB_BITS(gpar) & elmdir) { \
549 /* shorten the parent-elm edge to rebalance */ \
550 _RB_BITSUP(parent, field) ^= elmdir; \
553 sibdir = elmdir ^ _RB_LR; \
554 /* the other edge must change length */ \
555 _RB_BITSUP(parent, field) ^= sibdir; \
556 if ((_RB_BITS(gpar) & _RB_LR) == 0) { \
557 /* both edges now short, retry from parent */ \
562 _RB_UP(parent, field) = gpar = _RB_PTR(gpar); \
563 if (_RB_BITSUP(elm, field) & elmdir) { \
565 * Exactly one of the edges descending from elm \
566 * is long. The long one is in the same \
567 * direction as the edge from parent to elm, \
568 * so change that by rotation. The edge from \
569 * parent to z was shortened above. Shorten \
570 * the long edge down from elm, and adjust \
571 * other edge lengths based on the downward \
572 * edges from 'child'. \
584 RB_ROTATE(elm, child, elmdir, field); \
585 child_up = _RB_UP(child, field); \
586 if (_RB_BITS(child_up) & sibdir) \
587 _RB_BITSUP(parent, field) ^= elmdir; \
588 if (_RB_BITS(child_up) & elmdir) \
589 _RB_BITSUP(elm, field) ^= _RB_LR; \
591 _RB_BITSUP(elm, field) ^= elmdir; \
592 /* if child is a leaf, don't augment elm, \
593 * since it is restored to be a leaf again. */ \
594 if ((_RB_BITS(child_up) & _RB_LR) == 0) \
600 * The long edge descending from 'child' points back \
601 * in the direction of 'parent'. Rotate to make \
602 * 'parent' a child of 'child', then make both edges \
603 * of 'child' short to rebalance. \
613 RB_ROTATE(parent, child, sibdir, field); \
614 _RB_UP(child, field) = gpar; \
615 RB_SWAP_CHILD(head, gpar, parent, child, field); \
617 * Elements rotated down have new, smaller subtrees, \
618 * so update augmentation for them. \
621 (void)RB_AUGMENT_CHECK(elm); \
622 (void)RB_AUGMENT_CHECK(parent); \
624 } while ((parent = gpar) != NULL); \
628 #ifndef RB_STRICT_HST
630 * In REMOVE_COLOR, the HST paper, in figure 3, in the single-rotate case, has
631 * 'parent' with one higher rank, and then reduces its rank if 'parent' has
632 * become a leaf. This implementation always has the parent in its new position
633 * with lower rank, to avoid the leaf check. Define RB_STRICT_HST to 1 to get
634 * the behavior that HST describes.
636 #define RB_STRICT_HST 0
639 #define RB_GENERATE_REMOVE_COLOR(name, type, field, attr) \
641 name##_RB_REMOVE_COLOR(struct name *head, \
642 struct type *parent, struct type *elm) \
644 struct type *gpar, *sib, *up; \
645 __uintptr_t elmdir, sibdir; \
647 if (RB_RIGHT(parent, field) == elm && \
648 RB_LEFT(parent, field) == elm) { \
649 /* Deleting a leaf that is an only-child creates a \
650 * rank-2 leaf. Demote that leaf. */ \
651 _RB_UP(parent, field) = _RB_PTR(_RB_UP(parent, field)); \
653 if ((parent = _RB_UP(elm, field)) == NULL) \
657 /* the rank of the tree rooted at elm shrank */ \
658 gpar = _RB_UP(parent, field); \
659 elmdir = RB_RIGHT(parent, field) == elm ? _RB_R : _RB_L; \
660 _RB_BITS(gpar) ^= elmdir; \
661 if (_RB_BITS(gpar) & elmdir) { \
662 /* lengthen the parent-elm edge to rebalance */ \
663 _RB_UP(parent, field) = gpar; \
666 if (_RB_BITS(gpar) & _RB_LR) { \
667 /* shorten other edge, retry from parent */ \
668 _RB_BITS(gpar) ^= _RB_LR; \
669 _RB_UP(parent, field) = gpar; \
670 gpar = _RB_PTR(gpar); \
673 sibdir = elmdir ^ _RB_LR; \
674 sib = _RB_LINK(parent, sibdir, field); \
675 up = _RB_UP(sib, field); \
676 _RB_BITS(up) ^= _RB_LR; \
677 if ((_RB_BITS(up) & _RB_LR) == 0) { \
678 /* shorten edges descending from sib, retry */ \
679 _RB_UP(sib, field) = up; \
682 if ((_RB_BITS(up) & sibdir) == 0) { \
684 * The edge descending from 'sib' away from \
685 * 'parent' is long. The short edge descending \
686 * from 'sib' toward 'parent' points to 'elm*' \
687 * Rotate to make 'sib' a child of 'elm*' \
688 * then adjust the lengths of the edges \
689 * descending from 'sib' and 'elm*'. \
703 elm = _RB_LINK(sib, elmdir, field); \
704 /* elm is a 1-child. First rotate at elm. */ \
705 RB_ROTATE(sib, elm, sibdir, field); \
706 up = _RB_UP(elm, field); \
707 _RB_BITSUP(parent, field) ^= \
708 (_RB_BITS(up) & elmdir) ? _RB_LR : elmdir; \
709 _RB_BITSUP(sib, field) ^= \
710 (_RB_BITS(up) & sibdir) ? _RB_LR : sibdir; \
711 _RB_BITSUP(elm, field) |= _RB_LR; \
713 if ((_RB_BITS(up) & elmdir) == 0 && \
714 RB_STRICT_HST && elm != NULL) { \
715 /* if parent does not become a leaf, \
716 do not demote parent yet. */ \
717 _RB_BITSUP(parent, field) ^= sibdir; \
718 _RB_BITSUP(sib, field) ^= _RB_LR; \
719 } else if ((_RB_BITS(up) & elmdir) == 0) { \
720 /* demote parent. */ \
721 _RB_BITSUP(parent, field) ^= elmdir; \
722 _RB_BITSUP(sib, field) ^= sibdir; \
724 _RB_BITSUP(sib, field) ^= sibdir; \
729 * The edge descending from 'elm' away from 'parent' \
730 * is short. Rotate to make 'parent' a child of 'elm', \
731 * then lengthen the short edges descending from \
732 * 'parent' and 'elm' to rebalance. \
743 RB_ROTATE(parent, elm, elmdir, field); \
744 RB_SET_PARENT(elm, gpar, field); \
745 RB_SWAP_CHILD(head, gpar, parent, elm, field); \
747 * An element rotated down, but not into the search \
748 * path has a new, smaller subtree, so update \
749 * augmentation for it. \
752 (void)RB_AUGMENT_CHECK(sib); \
754 } while (elm = parent, (parent = gpar) != NULL); \
758 #define _RB_AUGMENT_WALK(elm, match, field) \
762 } while (RB_AUGMENT_CHECK(elm) && \
763 (elm = RB_PARENT(elm, field)) != NULL)
765 #define RB_GENERATE_REMOVE(name, type, field, attr) \
767 name##_RB_REMOVE(struct name *head, struct type *out) \
769 struct type *child, *in, *opar, *parent; \
771 child = RB_LEFT(out, field); \
772 in = RB_RIGHT(out, field); \
773 opar = _RB_UP(out, field); \
774 if (in == NULL || child == NULL) { \
775 in = child = in == NULL ? child : in; \
776 parent = opar = _RB_PTR(opar); \
779 while (RB_LEFT(in, field)) \
780 in = RB_LEFT(in, field); \
781 RB_SET_PARENT(child, in, field); \
782 RB_LEFT(in, field) = child; \
783 child = RB_RIGHT(in, field); \
784 if (parent != in) { \
785 RB_SET_PARENT(parent, in, field); \
786 RB_RIGHT(in, field) = parent; \
787 parent = RB_PARENT(in, field); \
788 RB_LEFT(parent, field) = child; \
790 _RB_UP(in, field) = opar; \
791 opar = _RB_PTR(opar); \
793 RB_SWAP_CHILD(head, opar, out, in, field); \
795 _RB_UP(child, field) = parent; \
796 if (parent != NULL) { \
797 opar = name##_RB_REMOVE_COLOR(head, parent, child); \
798 /* if rotation has made 'parent' the root of the same \
799 * subtree as before, don't re-augment it. */ \
800 if (parent == in && RB_LEFT(parent, field) == NULL) { \
802 parent = RB_PARENT(parent, field); \
804 _RB_AUGMENT_WALK(parent, opar, field); \
805 if (opar != NULL) { \
807 * Elements rotated into the search path have \
808 * changed subtrees, so update augmentation for \
809 * them if AUGMENT_WALK didn't. \
811 (void)RB_AUGMENT_CHECK(opar); \
812 (void)RB_AUGMENT_CHECK(RB_PARENT(opar, field)); \
818 #define RB_GENERATE_INSERT_FINISH(name, type, field, attr) \
819 /* Inserts a node into the RB tree */ \
821 name##_RB_INSERT_FINISH(struct name *head, struct type *parent, \
822 struct type **pptr, struct type *elm) \
824 struct type *tmp = NULL; \
826 RB_SET(elm, parent, field); \
828 if (parent != NULL) \
829 tmp = name##_RB_INSERT_COLOR(head, parent, elm); \
830 _RB_AUGMENT_WALK(elm, tmp, field); \
833 * An element rotated into the search path has a \
834 * changed subtree, so update augmentation for it if \
835 * AUGMENT_WALK didn't. \
837 (void)RB_AUGMENT_CHECK(tmp); \
841 #define RB_GENERATE_INSERT(name, type, field, cmp, attr) \
842 /* Inserts a node into the RB tree */ \
844 name##_RB_INSERT(struct name *head, struct type *elm) \
847 struct type **tmpp = &RB_ROOT(head); \
848 struct type *parent = NULL; \
850 while ((tmp = *tmpp) != NULL) { \
852 __typeof(cmp(NULL, NULL)) comp = (cmp)(elm, parent); \
854 tmpp = &RB_LEFT(parent, field); \
856 tmpp = &RB_RIGHT(parent, field); \
860 return (name##_RB_INSERT_FINISH(head, parent, tmpp, elm)); \
863 #define RB_GENERATE_FIND(name, type, field, cmp, attr) \
864 /* Finds the node with the same key as elm */ \
866 name##_RB_FIND(struct name *head, struct type *elm) \
868 struct type *tmp = RB_ROOT(head); \
869 __typeof(cmp(NULL, NULL)) comp; \
871 comp = cmp(elm, tmp); \
873 tmp = RB_LEFT(tmp, field); \
875 tmp = RB_RIGHT(tmp, field); \
882 #define RB_GENERATE_NFIND(name, type, field, cmp, attr) \
883 /* Finds the first node greater than or equal to the search key */ \
885 name##_RB_NFIND(struct name *head, struct type *elm) \
887 struct type *tmp = RB_ROOT(head); \
888 struct type *res = NULL; \
889 __typeof(cmp(NULL, NULL)) comp; \
891 comp = cmp(elm, tmp); \
894 tmp = RB_LEFT(tmp, field); \
897 tmp = RB_RIGHT(tmp, field); \
904 #define RB_GENERATE_NEXT(name, type, field, attr) \
907 name##_RB_NEXT(struct type *elm) \
909 if (RB_RIGHT(elm, field)) { \
910 elm = RB_RIGHT(elm, field); \
911 while (RB_LEFT(elm, field)) \
912 elm = RB_LEFT(elm, field); \
914 while (RB_PARENT(elm, field) && \
915 (elm == RB_RIGHT(RB_PARENT(elm, field), field))) \
916 elm = RB_PARENT(elm, field); \
917 elm = RB_PARENT(elm, field); \
922 #if defined(_KERNEL) && defined(DIAGNOSTIC)
923 #define _RB_ORDER_CHECK(cmp, lo, hi) do { \
924 KASSERT((cmp)(lo, hi) < 0, ("out of order insertion")); \
927 #define _RB_ORDER_CHECK(cmp, lo, hi) do {} while (0)
930 #define RB_GENERATE_INSERT_NEXT(name, type, field, cmp, attr) \
931 /* Inserts a node into the next position in the RB tree */ \
933 name##_RB_INSERT_NEXT(struct name *head, \
934 struct type *elm, struct type *next) \
937 struct type **tmpp = &RB_RIGHT(elm, field); \
939 _RB_ORDER_CHECK(cmp, elm, next); \
940 if (name##_RB_NEXT(elm) != NULL) \
941 _RB_ORDER_CHECK(cmp, next, name##_RB_NEXT(elm)); \
942 while ((tmp = *tmpp) != NULL) { \
944 tmpp = &RB_LEFT(elm, field); \
946 return (name##_RB_INSERT_FINISH(head, elm, tmpp, next)); \
949 #define RB_GENERATE_PREV(name, type, field, attr) \
952 name##_RB_PREV(struct type *elm) \
954 if (RB_LEFT(elm, field)) { \
955 elm = RB_LEFT(elm, field); \
956 while (RB_RIGHT(elm, field)) \
957 elm = RB_RIGHT(elm, field); \
959 while (RB_PARENT(elm, field) && \
960 (elm == RB_LEFT(RB_PARENT(elm, field), field))) \
961 elm = RB_PARENT(elm, field); \
962 elm = RB_PARENT(elm, field); \
967 #define RB_GENERATE_INSERT_PREV(name, type, field, cmp, attr) \
968 /* Inserts a node into the prev position in the RB tree */ \
970 name##_RB_INSERT_PREV(struct name *head, \
971 struct type *elm, struct type *prev) \
974 struct type **tmpp = &RB_LEFT(elm, field); \
976 _RB_ORDER_CHECK(cmp, prev, elm); \
977 if (name##_RB_PREV(elm) != NULL) \
978 _RB_ORDER_CHECK(cmp, name##_RB_PREV(elm), prev); \
979 while ((tmp = *tmpp) != NULL) { \
981 tmpp = &RB_RIGHT(elm, field); \
983 return (name##_RB_INSERT_FINISH(head, elm, tmpp, prev)); \
986 #define RB_GENERATE_MINMAX(name, type, field, attr) \
988 name##_RB_MINMAX(struct name *head, int val) \
990 struct type *tmp = RB_ROOT(head); \
991 struct type *parent = NULL; \
995 tmp = RB_LEFT(tmp, field); \
997 tmp = RB_RIGHT(tmp, field); \
1002 #define RB_GENERATE_REINSERT(name, type, field, cmp, attr) \
1003 attr struct type * \
1004 name##_RB_REINSERT(struct name *head, struct type *elm) \
1006 struct type *cmpelm; \
1007 if (((cmpelm = RB_PREV(name, head, elm)) != NULL && \
1008 cmp(cmpelm, elm) >= 0) || \
1009 ((cmpelm = RB_NEXT(name, head, elm)) != NULL && \
1010 cmp(elm, cmpelm) >= 0)) { \
1011 /* XXXLAS: Remove/insert is heavy handed. */ \
1012 RB_REMOVE(name, head, elm); \
1013 return (RB_INSERT(name, head, elm)); \
1018 #define RB_NEGINF -1
1021 #define RB_INSERT(name, x, y) name##_RB_INSERT(x, y)
1022 #define RB_INSERT_NEXT(name, x, y, z) name##_RB_INSERT_NEXT(x, y, z)
1023 #define RB_INSERT_PREV(name, x, y, z) name##_RB_INSERT_PREV(x, y, z)
1024 #define RB_REMOVE(name, x, y) name##_RB_REMOVE(x, y)
1025 #define RB_FIND(name, x, y) name##_RB_FIND(x, y)
1026 #define RB_NFIND(name, x, y) name##_RB_NFIND(x, y)
1027 #define RB_NEXT(name, x, y) name##_RB_NEXT(y)
1028 #define RB_PREV(name, x, y) name##_RB_PREV(y)
1029 #define RB_MIN(name, x) name##_RB_MINMAX(x, RB_NEGINF)
1030 #define RB_MAX(name, x) name##_RB_MINMAX(x, RB_INF)
1031 #define RB_REINSERT(name, x, y) name##_RB_REINSERT(x, y)
1033 #define RB_FOREACH(x, name, head) \
1034 for ((x) = RB_MIN(name, head); \
1036 (x) = name##_RB_NEXT(x))
1038 #define RB_FOREACH_FROM(x, name, y) \
1040 ((x) != NULL) && ((y) = name##_RB_NEXT(x), (x) != NULL); \
1043 #define RB_FOREACH_SAFE(x, name, head, y) \
1044 for ((x) = RB_MIN(name, head); \
1045 ((x) != NULL) && ((y) = name##_RB_NEXT(x), (x) != NULL); \
1048 #define RB_FOREACH_REVERSE(x, name, head) \
1049 for ((x) = RB_MAX(name, head); \
1051 (x) = name##_RB_PREV(x))
1053 #define RB_FOREACH_REVERSE_FROM(x, name, y) \
1055 ((x) != NULL) && ((y) = name##_RB_PREV(x), (x) != NULL); \
1058 #define RB_FOREACH_REVERSE_SAFE(x, name, head, y) \
1059 for ((x) = RB_MAX(name, head); \
1060 ((x) != NULL) && ((y) = name##_RB_PREV(x), (x) != NULL); \
1063 #endif /* _SYS_TREE_H_ */