1 /* $NetBSD: tree.h,v 1.8 2004/03/28 19:38:30 provos Exp $ */
2 /* $OpenBSD: tree.h,v 1.7 2002/10/17 21:51:54 art Exp $ */
5 * SPDX-License-Identifier: BSD-2-Clause
7 * Copyright 2002 Niels Provos <provos@citi.umich.edu>
10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions
13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer.
15 * 2. Redistributions in binary form must reproduce the above copyright
16 * notice, this list of conditions and the following disclaimer in the
17 * documentation and/or other materials provided with the distribution.
19 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
20 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
21 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
22 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
23 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
24 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
34 #include <sys/cdefs.h>
37 * This file defines data structures for different types of trees:
38 * splay trees and rank-balanced trees.
40 * A splay tree is a self-organizing data structure. Every operation
41 * on the tree causes a splay to happen. The splay moves the requested
42 * node to the root of the tree and partly rebalances it.
44 * This has the benefit that request locality causes faster lookups as
45 * the requested nodes move to the top of the tree. On the other hand,
46 * every lookup causes memory writes.
48 * The Balance Theorem bounds the total access time for m operations
49 * and n inserts on an initially empty tree as O((m + n)lg n). The
50 * amortized cost for a sequence of m accesses to a splay tree is O(lg n);
52 * A rank-balanced tree is a binary search tree with an integer
53 * rank-difference as an attribute of each pointer from parent to child.
54 * The sum of the rank-differences on any path from a node down to null is
55 * the same, and defines the rank of that node. The rank of the null node
58 * Different additional conditions define different sorts of balanced trees,
59 * including "red-black" and "AVL" trees. The set of conditions applied here
60 * are the "weak-AVL" conditions of Haeupler, Sen and Tarjan presented in in
61 * "Rank Balanced Trees", ACM Transactions on Algorithms Volume 11 Issue 4 June
62 * 2015 Article No.: 30pp 1–26 https://doi.org/10.1145/2689412 (the HST paper):
63 * - every rank-difference is 1 or 2.
64 * - the rank of any leaf is 1.
66 * For historical reasons, rank differences that are even are associated
67 * with the color red (Rank-Even-Difference), and the child that a red edge
68 * points to is called a red child.
70 * Every operation on a rank-balanced tree is bounded as O(lg n).
71 * The maximum height of a rank-balanced tree is 2lg (n+1).
74 #define SPLAY_HEAD(name, type) \
76 struct type *sph_root; /* root of the tree */ \
79 #define SPLAY_INITIALIZER(root) \
82 #define SPLAY_INIT(root) do { \
83 (root)->sph_root = NULL; \
84 } while (/*CONSTCOND*/ 0)
86 #define SPLAY_ENTRY(type) \
88 struct type *spe_left; /* left element */ \
89 struct type *spe_right; /* right element */ \
92 #define SPLAY_LEFT(elm, field) (elm)->field.spe_left
93 #define SPLAY_RIGHT(elm, field) (elm)->field.spe_right
94 #define SPLAY_ROOT(head) (head)->sph_root
95 #define SPLAY_EMPTY(head) (SPLAY_ROOT(head) == NULL)
97 /* SPLAY_ROTATE_{LEFT,RIGHT} expect that tmp hold SPLAY_{RIGHT,LEFT} */
98 #define SPLAY_ROTATE_RIGHT(head, tmp, field) do { \
99 SPLAY_LEFT((head)->sph_root, field) = SPLAY_RIGHT(tmp, field); \
100 SPLAY_RIGHT(tmp, field) = (head)->sph_root; \
101 (head)->sph_root = tmp; \
102 } while (/*CONSTCOND*/ 0)
104 #define SPLAY_ROTATE_LEFT(head, tmp, field) do { \
105 SPLAY_RIGHT((head)->sph_root, field) = SPLAY_LEFT(tmp, field); \
106 SPLAY_LEFT(tmp, field) = (head)->sph_root; \
107 (head)->sph_root = tmp; \
108 } while (/*CONSTCOND*/ 0)
110 #define SPLAY_LINKLEFT(head, tmp, field) do { \
111 SPLAY_LEFT(tmp, field) = (head)->sph_root; \
112 tmp = (head)->sph_root; \
113 (head)->sph_root = SPLAY_LEFT((head)->sph_root, field); \
114 } while (/*CONSTCOND*/ 0)
116 #define SPLAY_LINKRIGHT(head, tmp, field) do { \
117 SPLAY_RIGHT(tmp, field) = (head)->sph_root; \
118 tmp = (head)->sph_root; \
119 (head)->sph_root = SPLAY_RIGHT((head)->sph_root, field); \
120 } while (/*CONSTCOND*/ 0)
122 #define SPLAY_ASSEMBLE(head, node, left, right, field) do { \
123 SPLAY_RIGHT(left, field) = SPLAY_LEFT((head)->sph_root, field); \
124 SPLAY_LEFT(right, field) = SPLAY_RIGHT((head)->sph_root, field);\
125 SPLAY_LEFT((head)->sph_root, field) = SPLAY_RIGHT(node, field); \
126 SPLAY_RIGHT((head)->sph_root, field) = SPLAY_LEFT(node, field); \
127 } while (/*CONSTCOND*/ 0)
129 /* Generates prototypes and inline functions */
131 #define SPLAY_PROTOTYPE(name, type, field, cmp) \
132 void name##_SPLAY(struct name *, struct type *); \
133 void name##_SPLAY_MINMAX(struct name *, int); \
134 struct type *name##_SPLAY_INSERT(struct name *, struct type *); \
135 struct type *name##_SPLAY_REMOVE(struct name *, struct type *); \
137 /* Finds the node with the same key as elm */ \
138 static __unused __inline struct type * \
139 name##_SPLAY_FIND(struct name *head, struct type *elm) \
141 if (SPLAY_EMPTY(head)) \
143 name##_SPLAY(head, elm); \
144 if ((cmp)(elm, (head)->sph_root) == 0) \
145 return (head->sph_root); \
149 static __unused __inline struct type * \
150 name##_SPLAY_NEXT(struct name *head, struct type *elm) \
152 name##_SPLAY(head, elm); \
153 if (SPLAY_RIGHT(elm, field) != NULL) { \
154 elm = SPLAY_RIGHT(elm, field); \
155 while (SPLAY_LEFT(elm, field) != NULL) { \
156 elm = SPLAY_LEFT(elm, field); \
163 static __unused __inline struct type * \
164 name##_SPLAY_MIN_MAX(struct name *head, int val) \
166 name##_SPLAY_MINMAX(head, val); \
167 return (SPLAY_ROOT(head)); \
170 /* Main splay operation.
171 * Moves node close to the key of elm to top
173 #define SPLAY_GENERATE(name, type, field, cmp) \
175 name##_SPLAY_INSERT(struct name *head, struct type *elm) \
177 if (SPLAY_EMPTY(head)) { \
178 SPLAY_LEFT(elm, field) = SPLAY_RIGHT(elm, field) = NULL; \
180 __typeof(cmp(NULL, NULL)) __comp; \
181 name##_SPLAY(head, elm); \
182 __comp = (cmp)(elm, (head)->sph_root); \
184 SPLAY_LEFT(elm, field) = SPLAY_LEFT((head)->sph_root, field);\
185 SPLAY_RIGHT(elm, field) = (head)->sph_root; \
186 SPLAY_LEFT((head)->sph_root, field) = NULL; \
187 } else if (__comp > 0) { \
188 SPLAY_RIGHT(elm, field) = SPLAY_RIGHT((head)->sph_root, field);\
189 SPLAY_LEFT(elm, field) = (head)->sph_root; \
190 SPLAY_RIGHT((head)->sph_root, field) = NULL; \
192 return ((head)->sph_root); \
194 (head)->sph_root = (elm); \
199 name##_SPLAY_REMOVE(struct name *head, struct type *elm) \
201 struct type *__tmp; \
202 if (SPLAY_EMPTY(head)) \
204 name##_SPLAY(head, elm); \
205 if ((cmp)(elm, (head)->sph_root) == 0) { \
206 if (SPLAY_LEFT((head)->sph_root, field) == NULL) { \
207 (head)->sph_root = SPLAY_RIGHT((head)->sph_root, field);\
209 __tmp = SPLAY_RIGHT((head)->sph_root, field); \
210 (head)->sph_root = SPLAY_LEFT((head)->sph_root, field);\
211 name##_SPLAY(head, elm); \
212 SPLAY_RIGHT((head)->sph_root, field) = __tmp; \
220 name##_SPLAY(struct name *head, struct type *elm) \
222 struct type __node, *__left, *__right, *__tmp; \
223 __typeof(cmp(NULL, NULL)) __comp; \
225 SPLAY_LEFT(&__node, field) = SPLAY_RIGHT(&__node, field) = NULL;\
226 __left = __right = &__node; \
228 while ((__comp = (cmp)(elm, (head)->sph_root)) != 0) { \
230 __tmp = SPLAY_LEFT((head)->sph_root, field); \
233 if ((cmp)(elm, __tmp) < 0){ \
234 SPLAY_ROTATE_RIGHT(head, __tmp, field); \
235 if (SPLAY_LEFT((head)->sph_root, field) == NULL)\
238 SPLAY_LINKLEFT(head, __right, field); \
239 } else if (__comp > 0) { \
240 __tmp = SPLAY_RIGHT((head)->sph_root, field); \
243 if ((cmp)(elm, __tmp) > 0){ \
244 SPLAY_ROTATE_LEFT(head, __tmp, field); \
245 if (SPLAY_RIGHT((head)->sph_root, field) == NULL)\
248 SPLAY_LINKRIGHT(head, __left, field); \
251 SPLAY_ASSEMBLE(head, &__node, __left, __right, field); \
254 /* Splay with either the minimum or the maximum element \
255 * Used to find minimum or maximum element in tree. \
257 void name##_SPLAY_MINMAX(struct name *head, int __comp) \
259 struct type __node, *__left, *__right, *__tmp; \
261 SPLAY_LEFT(&__node, field) = SPLAY_RIGHT(&__node, field) = NULL;\
262 __left = __right = &__node; \
266 __tmp = SPLAY_LEFT((head)->sph_root, field); \
270 SPLAY_ROTATE_RIGHT(head, __tmp, field); \
271 if (SPLAY_LEFT((head)->sph_root, field) == NULL)\
274 SPLAY_LINKLEFT(head, __right, field); \
275 } else if (__comp > 0) { \
276 __tmp = SPLAY_RIGHT((head)->sph_root, field); \
280 SPLAY_ROTATE_LEFT(head, __tmp, field); \
281 if (SPLAY_RIGHT((head)->sph_root, field) == NULL)\
284 SPLAY_LINKRIGHT(head, __left, field); \
287 SPLAY_ASSEMBLE(head, &__node, __left, __right, field); \
290 #define SPLAY_NEGINF -1
293 #define SPLAY_INSERT(name, x, y) name##_SPLAY_INSERT(x, y)
294 #define SPLAY_REMOVE(name, x, y) name##_SPLAY_REMOVE(x, y)
295 #define SPLAY_FIND(name, x, y) name##_SPLAY_FIND(x, y)
296 #define SPLAY_NEXT(name, x, y) name##_SPLAY_NEXT(x, y)
297 #define SPLAY_MIN(name, x) (SPLAY_EMPTY(x) ? NULL \
298 : name##_SPLAY_MIN_MAX(x, SPLAY_NEGINF))
299 #define SPLAY_MAX(name, x) (SPLAY_EMPTY(x) ? NULL \
300 : name##_SPLAY_MIN_MAX(x, SPLAY_INF))
302 #define SPLAY_FOREACH(x, name, head) \
303 for ((x) = SPLAY_MIN(name, head); \
305 (x) = SPLAY_NEXT(name, head, x))
307 /* Macros that define a rank-balanced tree */
308 #define RB_HEAD(name, type) \
310 struct type *rbh_root; /* root of the tree */ \
313 #define RB_INITIALIZER(root) \
316 #define RB_INIT(root) do { \
317 (root)->rbh_root = NULL; \
318 } while (/*CONSTCOND*/ 0)
320 #define RB_ENTRY(type) \
322 struct type *rbe_link[3]; \
326 * With the expectation that any object of struct type has an
327 * address that is a multiple of 4, and that therefore the
328 * 2 least significant bits of a pointer to struct type are
329 * always zero, this implementation sets those bits to indicate
330 * that the left or right child of the tree node is "red".
332 #define _RB_LINK(elm, dir, field) (elm)->field.rbe_link[dir]
333 #define _RB_UP(elm, field) _RB_LINK(elm, 0, field)
334 #define _RB_L ((__uintptr_t)1)
335 #define _RB_R ((__uintptr_t)2)
336 #define _RB_LR ((__uintptr_t)3)
337 #define _RB_BITS(elm) (*(__uintptr_t *)&elm)
338 #define _RB_BITSUP(elm, field) _RB_BITS(_RB_UP(elm, field))
339 #define _RB_PTR(elm) (__typeof(elm)) \
340 ((__uintptr_t)elm & ~_RB_LR)
342 #define RB_PARENT(elm, field) _RB_PTR(_RB_UP(elm, field))
343 #define RB_LEFT(elm, field) _RB_LINK(elm, _RB_L, field)
344 #define RB_RIGHT(elm, field) _RB_LINK(elm, _RB_R, field)
345 #define RB_ROOT(head) (head)->rbh_root
346 #define RB_EMPTY(head) (RB_ROOT(head) == NULL)
348 #define RB_SET_PARENT(dst, src, field) do { \
349 _RB_BITSUP(dst, field) = (__uintptr_t)src | \
350 (_RB_BITSUP(dst, field) & _RB_LR); \
351 } while (/*CONSTCOND*/ 0)
353 #define RB_SET(elm, parent, field) do { \
354 _RB_UP(elm, field) = parent; \
355 RB_LEFT(elm, field) = RB_RIGHT(elm, field) = NULL; \
356 } while (/*CONSTCOND*/ 0)
359 * Either RB_AUGMENT or RB_AUGMENT_CHECK is invoked in a loop at the root of
360 * every modified subtree, from the bottom up to the root, to update augmented
361 * node data. RB_AUGMENT_CHECK returns true only when the update changes the
362 * node data, so that updating can be stopped short of the root when it returns
365 #ifndef RB_AUGMENT_CHECK
367 #define RB_AUGMENT_CHECK(x) 0
369 #define RB_AUGMENT_CHECK(x) (RB_AUGMENT(x), 1)
373 #define RB_UPDATE_AUGMENT(elm, field) do { \
374 __typeof(elm) rb_update_tmp = (elm); \
375 while (RB_AUGMENT_CHECK(rb_update_tmp) && \
376 (rb_update_tmp = RB_PARENT(rb_update_tmp, field)) != NULL) \
380 #define RB_SWAP_CHILD(head, par, out, in, field) do { \
382 RB_ROOT(head) = (in); \
383 else if ((out) == RB_LEFT(par, field)) \
384 RB_LEFT(par, field) = (in); \
386 RB_RIGHT(par, field) = (in); \
387 } while (/*CONSTCOND*/ 0)
390 * RB_ROTATE macro partially restructures the tree to improve balance. In the
391 * case when dir is _RB_L, tmp is a right child of elm. After rotation, elm
392 * is a left child of tmp, and the subtree that represented the items between
393 * them, which formerly hung to the left of tmp now hangs to the right of elm.
394 * The parent-child relationship between elm and its former parent is not
395 * changed; where this macro once updated those fields, that is now left to the
396 * caller of RB_ROTATE to clean up, so that a pair of rotations does not twice
397 * update the same pair of pointer fields with distinct values.
399 #define RB_ROTATE(elm, tmp, dir, field) do { \
400 if ((_RB_LINK(elm, dir ^ _RB_LR, field) = \
401 _RB_LINK(tmp, dir, field)) != NULL) \
402 RB_SET_PARENT(_RB_LINK(tmp, dir, field), elm, field); \
403 _RB_LINK(tmp, dir, field) = (elm); \
404 RB_SET_PARENT(elm, tmp, field); \
405 } while (/*CONSTCOND*/ 0)
407 /* Generates prototypes and inline functions */
408 #define RB_PROTOTYPE(name, type, field, cmp) \
409 RB_PROTOTYPE_INTERNAL(name, type, field, cmp,)
410 #define RB_PROTOTYPE_STATIC(name, type, field, cmp) \
411 RB_PROTOTYPE_INTERNAL(name, type, field, cmp, __unused static)
412 #define RB_PROTOTYPE_INTERNAL(name, type, field, cmp, attr) \
413 RB_PROTOTYPE_RANK(name, type, attr) \
414 RB_PROTOTYPE_INSERT_COLOR(name, type, attr); \
415 RB_PROTOTYPE_REMOVE_COLOR(name, type, attr); \
416 RB_PROTOTYPE_INSERT_FINISH(name, type, attr); \
417 RB_PROTOTYPE_INSERT(name, type, attr); \
418 RB_PROTOTYPE_REMOVE(name, type, attr); \
419 RB_PROTOTYPE_FIND(name, type, attr); \
420 RB_PROTOTYPE_NFIND(name, type, attr); \
421 RB_PROTOTYPE_NEXT(name, type, attr); \
422 RB_PROTOTYPE_INSERT_NEXT(name, type, attr); \
423 RB_PROTOTYPE_PREV(name, type, attr); \
424 RB_PROTOTYPE_INSERT_PREV(name, type, attr); \
425 RB_PROTOTYPE_MINMAX(name, type, attr); \
426 RB_PROTOTYPE_REINSERT(name, type, attr);
427 #ifdef _RB_DIAGNOSTIC
428 #define RB_PROTOTYPE_RANK(name, type, attr) \
429 attr int name##_RB_RANK(struct type *);
431 #define RB_PROTOTYPE_RANK(name, type, attr)
433 #define RB_PROTOTYPE_INSERT_COLOR(name, type, attr) \
434 attr struct type *name##_RB_INSERT_COLOR(struct name *, \
435 struct type *, struct type *)
436 #define RB_PROTOTYPE_REMOVE_COLOR(name, type, attr) \
437 attr struct type *name##_RB_REMOVE_COLOR(struct name *, \
438 struct type *, struct type *)
439 #define RB_PROTOTYPE_REMOVE(name, type, attr) \
440 attr struct type *name##_RB_REMOVE(struct name *, struct type *)
441 #define RB_PROTOTYPE_INSERT_FINISH(name, type, attr) \
442 attr struct type *name##_RB_INSERT_FINISH(struct name *, \
443 struct type *, struct type **, struct type *)
444 #define RB_PROTOTYPE_INSERT(name, type, attr) \
445 attr struct type *name##_RB_INSERT(struct name *, struct type *)
446 #define RB_PROTOTYPE_FIND(name, type, attr) \
447 attr struct type *name##_RB_FIND(struct name *, struct type *)
448 #define RB_PROTOTYPE_NFIND(name, type, attr) \
449 attr struct type *name##_RB_NFIND(struct name *, struct type *)
450 #define RB_PROTOTYPE_NEXT(name, type, attr) \
451 attr struct type *name##_RB_NEXT(struct type *)
452 #define RB_PROTOTYPE_INSERT_NEXT(name, type, attr) \
453 attr struct type *name##_RB_INSERT_NEXT(struct name *, \
454 struct type *, struct type *)
455 #define RB_PROTOTYPE_PREV(name, type, attr) \
456 attr struct type *name##_RB_PREV(struct type *)
457 #define RB_PROTOTYPE_INSERT_PREV(name, type, attr) \
458 attr struct type *name##_RB_INSERT_PREV(struct name *, \
459 struct type *, struct type *)
460 #define RB_PROTOTYPE_MINMAX(name, type, attr) \
461 attr struct type *name##_RB_MINMAX(struct name *, int)
462 #define RB_PROTOTYPE_REINSERT(name, type, attr) \
463 attr struct type *name##_RB_REINSERT(struct name *, struct type *)
465 /* Main rb operation.
466 * Moves node close to the key of elm to top
468 #define RB_GENERATE(name, type, field, cmp) \
469 RB_GENERATE_INTERNAL(name, type, field, cmp,)
470 #define RB_GENERATE_STATIC(name, type, field, cmp) \
471 RB_GENERATE_INTERNAL(name, type, field, cmp, __unused static)
472 #define RB_GENERATE_INTERNAL(name, type, field, cmp, attr) \
473 RB_GENERATE_RANK(name, type, field, attr) \
474 RB_GENERATE_INSERT_COLOR(name, type, field, attr) \
475 RB_GENERATE_REMOVE_COLOR(name, type, field, attr) \
476 RB_GENERATE_INSERT_FINISH(name, type, field, attr) \
477 RB_GENERATE_INSERT(name, type, field, cmp, attr) \
478 RB_GENERATE_REMOVE(name, type, field, attr) \
479 RB_GENERATE_FIND(name, type, field, cmp, attr) \
480 RB_GENERATE_NFIND(name, type, field, cmp, attr) \
481 RB_GENERATE_NEXT(name, type, field, attr) \
482 RB_GENERATE_INSERT_NEXT(name, type, field, cmp, attr) \
483 RB_GENERATE_PREV(name, type, field, attr) \
484 RB_GENERATE_INSERT_PREV(name, type, field, cmp, attr) \
485 RB_GENERATE_MINMAX(name, type, field, attr) \
486 RB_GENERATE_REINSERT(name, type, field, cmp, attr)
488 #ifdef _RB_DIAGNOSTIC
490 #define _RB_AUGMENT_VERIFY(x) RB_AUGMENT_CHECK(x)
492 #define _RB_AUGMENT_VERIFY(x) 0
494 #define RB_GENERATE_RANK(name, type, field, attr) \
496 * Return the rank of the subtree rooted at elm, or -1 if the subtree \
497 * is not rank-balanced, or has inconsistent augmentation data.
500 name##_RB_RANK(struct type *elm) \
502 struct type *left, *right, *up; \
503 int left_rank, right_rank; \
507 up = _RB_UP(elm, field); \
508 left = RB_LEFT(elm, field); \
509 left_rank = ((_RB_BITS(up) & _RB_L) ? 2 : 1) + \
510 name##_RB_RANK(left); \
511 right = RB_RIGHT(elm, field); \
512 right_rank = ((_RB_BITS(up) & _RB_R) ? 2 : 1) + \
513 name##_RB_RANK(right); \
514 if (left_rank != right_rank || \
515 (left_rank == 2 && left == NULL && right == NULL) || \
516 _RB_AUGMENT_VERIFY(elm)) \
518 return (left_rank); \
521 #define RB_GENERATE_RANK(name, type, field, attr)
524 #define RB_GENERATE_INSERT_COLOR(name, type, field, attr) \
526 name##_RB_INSERT_COLOR(struct name *head, \
527 struct type *parent, struct type *elm) \
530 * Initially, elm is a leaf. Either its parent was previously \
531 * a leaf, with two black null children, or an interior node \
532 * with a black non-null child and a red null child. The \
533 * balance criterion "the rank of any leaf is 1" precludes the \
534 * possibility of two red null children for the initial parent. \
535 * So the first loop iteration cannot lead to accessing an \
536 * uninitialized 'child', and a later iteration can only happen \
537 * when a value has been assigned to 'child' in the previous \
540 struct type *child, *child_up, *gpar; \
541 __uintptr_t elmdir, sibdir; \
544 /* the rank of the tree rooted at elm grew */ \
545 gpar = _RB_UP(parent, field); \
546 elmdir = RB_RIGHT(parent, field) == elm ? _RB_R : _RB_L; \
547 if (_RB_BITS(gpar) & elmdir) { \
548 /* shorten the parent-elm edge to rebalance */ \
549 _RB_BITSUP(parent, field) ^= elmdir; \
552 sibdir = elmdir ^ _RB_LR; \
553 /* the other edge must change length */ \
554 _RB_BITSUP(parent, field) ^= sibdir; \
555 if ((_RB_BITS(gpar) & _RB_LR) == 0) { \
556 /* both edges now short, retry from parent */ \
561 _RB_UP(parent, field) = gpar = _RB_PTR(gpar); \
562 if (_RB_BITSUP(elm, field) & elmdir) { \
564 * Exactly one of the edges descending from elm \
565 * is long. The long one is in the same \
566 * direction as the edge from parent to elm, \
567 * so change that by rotation. The edge from \
568 * parent to z was shortened above. Shorten \
569 * the long edge down from elm, and adjust \
570 * other edge lengths based on the downward \
571 * edges from 'child'. \
583 RB_ROTATE(elm, child, elmdir, field); \
584 child_up = _RB_UP(child, field); \
585 if (_RB_BITS(child_up) & sibdir) \
586 _RB_BITSUP(parent, field) ^= elmdir; \
587 if (_RB_BITS(child_up) & elmdir) \
588 _RB_BITSUP(elm, field) ^= _RB_LR; \
590 _RB_BITSUP(elm, field) ^= elmdir; \
591 /* if child is a leaf, don't augment elm, \
592 * since it is restored to be a leaf again. */ \
593 if ((_RB_BITS(child_up) & _RB_LR) == 0) \
599 * The long edge descending from 'child' points back \
600 * in the direction of 'parent'. Rotate to make \
601 * 'parent' a child of 'child', then make both edges \
602 * of 'child' short to rebalance. \
612 RB_ROTATE(parent, child, sibdir, field); \
613 _RB_UP(child, field) = gpar; \
614 RB_SWAP_CHILD(head, gpar, parent, child, field); \
616 * Elements rotated down have new, smaller subtrees, \
617 * so update augmentation for them. \
620 (void)RB_AUGMENT_CHECK(elm); \
621 (void)RB_AUGMENT_CHECK(parent); \
623 } while ((parent = gpar) != NULL); \
627 #ifndef RB_STRICT_HST
629 * In REMOVE_COLOR, the HST paper, in figure 3, in the single-rotate case, has
630 * 'parent' with one higher rank, and then reduces its rank if 'parent' has
631 * become a leaf. This implementation always has the parent in its new position
632 * with lower rank, to avoid the leaf check. Define RB_STRICT_HST to 1 to get
633 * the behavior that HST describes.
635 #define RB_STRICT_HST 0
638 #define RB_GENERATE_REMOVE_COLOR(name, type, field, attr) \
640 name##_RB_REMOVE_COLOR(struct name *head, \
641 struct type *parent, struct type *elm) \
643 struct type *gpar, *sib, *up; \
644 __uintptr_t elmdir, sibdir; \
646 if (RB_RIGHT(parent, field) == elm && \
647 RB_LEFT(parent, field) == elm) { \
648 /* Deleting a leaf that is an only-child creates a \
649 * rank-2 leaf. Demote that leaf. */ \
650 _RB_UP(parent, field) = _RB_PTR(_RB_UP(parent, field)); \
652 if ((parent = _RB_UP(elm, field)) == NULL) \
656 /* the rank of the tree rooted at elm shrank */ \
657 gpar = _RB_UP(parent, field); \
658 elmdir = RB_RIGHT(parent, field) == elm ? _RB_R : _RB_L; \
659 _RB_BITS(gpar) ^= elmdir; \
660 if (_RB_BITS(gpar) & elmdir) { \
661 /* lengthen the parent-elm edge to rebalance */ \
662 _RB_UP(parent, field) = gpar; \
665 if (_RB_BITS(gpar) & _RB_LR) { \
666 /* shorten other edge, retry from parent */ \
667 _RB_BITS(gpar) ^= _RB_LR; \
668 _RB_UP(parent, field) = gpar; \
669 gpar = _RB_PTR(gpar); \
672 sibdir = elmdir ^ _RB_LR; \
673 sib = _RB_LINK(parent, sibdir, field); \
674 up = _RB_UP(sib, field); \
675 _RB_BITS(up) ^= _RB_LR; \
676 if ((_RB_BITS(up) & _RB_LR) == 0) { \
677 /* shorten edges descending from sib, retry */ \
678 _RB_UP(sib, field) = up; \
681 if ((_RB_BITS(up) & sibdir) == 0) { \
683 * The edge descending from 'sib' away from \
684 * 'parent' is long. The short edge descending \
685 * from 'sib' toward 'parent' points to 'elm*' \
686 * Rotate to make 'sib' a child of 'elm*' \
687 * then adjust the lengths of the edges \
688 * descending from 'sib' and 'elm*'. \
702 elm = _RB_LINK(sib, elmdir, field); \
703 /* elm is a 1-child. First rotate at elm. */ \
704 RB_ROTATE(sib, elm, sibdir, field); \
705 up = _RB_UP(elm, field); \
706 _RB_BITSUP(parent, field) ^= \
707 (_RB_BITS(up) & elmdir) ? _RB_LR : elmdir; \
708 _RB_BITSUP(sib, field) ^= \
709 (_RB_BITS(up) & sibdir) ? _RB_LR : sibdir; \
710 _RB_BITSUP(elm, field) |= _RB_LR; \
712 if ((_RB_BITS(up) & elmdir) == 0 && \
713 RB_STRICT_HST && elm != NULL) { \
714 /* if parent does not become a leaf, \
715 do not demote parent yet. */ \
716 _RB_BITSUP(parent, field) ^= sibdir; \
717 _RB_BITSUP(sib, field) ^= _RB_LR; \
718 } else if ((_RB_BITS(up) & elmdir) == 0) { \
719 /* demote parent. */ \
720 _RB_BITSUP(parent, field) ^= elmdir; \
721 _RB_BITSUP(sib, field) ^= sibdir; \
723 _RB_BITSUP(sib, field) ^= sibdir; \
728 * The edge descending from 'elm' away from 'parent' \
729 * is short. Rotate to make 'parent' a child of 'elm', \
730 * then lengthen the short edges descending from \
731 * 'parent' and 'elm' to rebalance. \
742 RB_ROTATE(parent, elm, elmdir, field); \
743 RB_SET_PARENT(elm, gpar, field); \
744 RB_SWAP_CHILD(head, gpar, parent, elm, field); \
746 * An element rotated down, but not into the search \
747 * path has a new, smaller subtree, so update \
748 * augmentation for it. \
751 (void)RB_AUGMENT_CHECK(sib); \
753 } while (elm = parent, (parent = gpar) != NULL); \
757 #define _RB_AUGMENT_WALK(elm, match, field) \
761 } while (RB_AUGMENT_CHECK(elm) && \
762 (elm = RB_PARENT(elm, field)) != NULL)
764 #define RB_GENERATE_REMOVE(name, type, field, attr) \
766 name##_RB_REMOVE(struct name *head, struct type *out) \
768 struct type *child, *in, *opar, *parent; \
770 child = RB_LEFT(out, field); \
771 in = RB_RIGHT(out, field); \
772 opar = _RB_UP(out, field); \
773 if (in == NULL || child == NULL) { \
774 in = child = (in == NULL ? child : in); \
775 parent = opar = _RB_PTR(opar); \
778 while (RB_LEFT(in, field)) \
779 in = RB_LEFT(in, field); \
780 RB_SET_PARENT(child, in, field); \
781 RB_LEFT(in, field) = child; \
782 child = RB_RIGHT(in, field); \
783 if (parent != in) { \
784 RB_SET_PARENT(parent, in, field); \
785 RB_RIGHT(in, field) = parent; \
786 parent = RB_PARENT(in, field); \
787 RB_LEFT(parent, field) = child; \
789 _RB_UP(in, field) = opar; \
790 opar = _RB_PTR(opar); \
792 RB_SWAP_CHILD(head, opar, out, in, field); \
794 _RB_UP(child, field) = parent; \
795 if (parent != NULL) { \
796 opar = name##_RB_REMOVE_COLOR(head, parent, child); \
797 /* if rotation has made 'parent' the root of the same \
798 * subtree as before, don't re-augment it. */ \
799 if (parent == in && RB_LEFT(parent, field) == NULL) { \
801 parent = RB_PARENT(parent, field); \
803 _RB_AUGMENT_WALK(parent, opar, field); \
804 if (opar != NULL) { \
806 * Elements rotated into the search path have \
807 * changed subtrees, so update augmentation for \
808 * them if AUGMENT_WALK didn't. \
810 (void)RB_AUGMENT_CHECK(opar); \
811 (void)RB_AUGMENT_CHECK(RB_PARENT(opar, field)); \
817 #define RB_GENERATE_INSERT_FINISH(name, type, field, attr) \
818 /* Inserts a node into the RB tree */ \
820 name##_RB_INSERT_FINISH(struct name *head, struct type *parent, \
821 struct type **pptr, struct type *elm) \
823 struct type *tmp = NULL; \
825 RB_SET(elm, parent, field); \
827 if (parent != NULL) \
828 tmp = name##_RB_INSERT_COLOR(head, parent, elm); \
829 _RB_AUGMENT_WALK(elm, tmp, field); \
832 * An element rotated into the search path has a \
833 * changed subtree, so update augmentation for it if \
834 * AUGMENT_WALK didn't. \
836 (void)RB_AUGMENT_CHECK(tmp); \
840 #define RB_GENERATE_INSERT(name, type, field, cmp, attr) \
841 /* Inserts a node into the RB tree */ \
843 name##_RB_INSERT(struct name *head, struct type *elm) \
846 struct type **tmpp = &RB_ROOT(head); \
847 struct type *parent = NULL; \
849 while ((tmp = *tmpp) != NULL) { \
851 __typeof(cmp(NULL, NULL)) comp = (cmp)(elm, parent); \
853 tmpp = &RB_LEFT(parent, field); \
855 tmpp = &RB_RIGHT(parent, field); \
859 return (name##_RB_INSERT_FINISH(head, parent, tmpp, elm)); \
862 #define RB_GENERATE_FIND(name, type, field, cmp, attr) \
863 /* Finds the node with the same key as elm */ \
865 name##_RB_FIND(struct name *head, struct type *elm) \
867 struct type *tmp = RB_ROOT(head); \
868 __typeof(cmp(NULL, NULL)) comp; \
870 comp = cmp(elm, tmp); \
872 tmp = RB_LEFT(tmp, field); \
874 tmp = RB_RIGHT(tmp, field); \
881 #define RB_GENERATE_NFIND(name, type, field, cmp, attr) \
882 /* Finds the first node greater than or equal to the search key */ \
884 name##_RB_NFIND(struct name *head, struct type *elm) \
886 struct type *tmp = RB_ROOT(head); \
887 struct type *res = NULL; \
888 __typeof(cmp(NULL, NULL)) comp; \
890 comp = cmp(elm, tmp); \
893 tmp = RB_LEFT(tmp, field); \
896 tmp = RB_RIGHT(tmp, field); \
903 #define RB_GENERATE_NEXT(name, type, field, attr) \
906 name##_RB_NEXT(struct type *elm) \
908 if (RB_RIGHT(elm, field)) { \
909 elm = RB_RIGHT(elm, field); \
910 while (RB_LEFT(elm, field)) \
911 elm = RB_LEFT(elm, field); \
913 while (RB_PARENT(elm, field) && \
914 (elm == RB_RIGHT(RB_PARENT(elm, field), field))) \
915 elm = RB_PARENT(elm, field); \
916 elm = RB_PARENT(elm, field); \
921 #if defined(_KERNEL) && defined(DIAGNOSTIC)
922 #define _RB_ORDER_CHECK(cmp, lo, hi) do { \
923 KASSERT((cmp)(lo, hi) < 0, ("out of order insertion")); \
926 #define _RB_ORDER_CHECK(cmp, lo, hi) do {} while (0)
929 #define RB_GENERATE_INSERT_NEXT(name, type, field, cmp, attr) \
930 /* Inserts a node into the next position in the RB tree */ \
932 name##_RB_INSERT_NEXT(struct name *head, \
933 struct type *elm, struct type *next) \
936 struct type **tmpp = &RB_RIGHT(elm, field); \
938 _RB_ORDER_CHECK(cmp, elm, next); \
939 if (name##_RB_NEXT(elm) != NULL) \
940 _RB_ORDER_CHECK(cmp, next, name##_RB_NEXT(elm)); \
941 while ((tmp = *tmpp) != NULL) { \
943 tmpp = &RB_LEFT(elm, field); \
945 return (name##_RB_INSERT_FINISH(head, elm, tmpp, next)); \
948 #define RB_GENERATE_PREV(name, type, field, attr) \
951 name##_RB_PREV(struct type *elm) \
953 if (RB_LEFT(elm, field)) { \
954 elm = RB_LEFT(elm, field); \
955 while (RB_RIGHT(elm, field)) \
956 elm = RB_RIGHT(elm, field); \
958 while (RB_PARENT(elm, field) && \
959 (elm == RB_LEFT(RB_PARENT(elm, field), field))) \
960 elm = RB_PARENT(elm, field); \
961 elm = RB_PARENT(elm, field); \
966 #define RB_GENERATE_INSERT_PREV(name, type, field, cmp, attr) \
967 /* Inserts a node into the prev position in the RB tree */ \
969 name##_RB_INSERT_PREV(struct name *head, \
970 struct type *elm, struct type *prev) \
973 struct type **tmpp = &RB_LEFT(elm, field); \
975 _RB_ORDER_CHECK(cmp, prev, elm); \
976 if (name##_RB_PREV(elm) != NULL) \
977 _RB_ORDER_CHECK(cmp, name##_RB_PREV(elm), prev); \
978 while ((tmp = *tmpp) != NULL) { \
980 tmpp = &RB_RIGHT(elm, field); \
982 return (name##_RB_INSERT_FINISH(head, elm, tmpp, prev)); \
985 #define RB_GENERATE_MINMAX(name, type, field, attr) \
987 name##_RB_MINMAX(struct name *head, int val) \
989 struct type *tmp = RB_ROOT(head); \
990 struct type *parent = NULL; \
994 tmp = RB_LEFT(tmp, field); \
996 tmp = RB_RIGHT(tmp, field); \
1001 #define RB_GENERATE_REINSERT(name, type, field, cmp, attr) \
1002 attr struct type * \
1003 name##_RB_REINSERT(struct name *head, struct type *elm) \
1005 struct type *cmpelm; \
1006 if (((cmpelm = RB_PREV(name, head, elm)) != NULL && \
1007 cmp(cmpelm, elm) >= 0) || \
1008 ((cmpelm = RB_NEXT(name, head, elm)) != NULL && \
1009 cmp(elm, cmpelm) >= 0)) { \
1010 /* XXXLAS: Remove/insert is heavy handed. */ \
1011 RB_REMOVE(name, head, elm); \
1012 return (RB_INSERT(name, head, elm)); \
1017 #define RB_NEGINF -1
1020 #define RB_INSERT(name, x, y) name##_RB_INSERT(x, y)
1021 #define RB_INSERT_NEXT(name, x, y, z) name##_RB_INSERT_NEXT(x, y, z)
1022 #define RB_INSERT_PREV(name, x, y, z) name##_RB_INSERT_PREV(x, y, z)
1023 #define RB_REMOVE(name, x, y) name##_RB_REMOVE(x, y)
1024 #define RB_FIND(name, x, y) name##_RB_FIND(x, y)
1025 #define RB_NFIND(name, x, y) name##_RB_NFIND(x, y)
1026 #define RB_NEXT(name, x, y) name##_RB_NEXT(y)
1027 #define RB_PREV(name, x, y) name##_RB_PREV(y)
1028 #define RB_MIN(name, x) name##_RB_MINMAX(x, RB_NEGINF)
1029 #define RB_MAX(name, x) name##_RB_MINMAX(x, RB_INF)
1030 #define RB_REINSERT(name, x, y) name##_RB_REINSERT(x, y)
1032 #define RB_FOREACH(x, name, head) \
1033 for ((x) = RB_MIN(name, head); \
1035 (x) = name##_RB_NEXT(x))
1037 #define RB_FOREACH_FROM(x, name, y) \
1039 ((x) != NULL) && ((y) = name##_RB_NEXT(x), (x) != NULL); \
1042 #define RB_FOREACH_SAFE(x, name, head, y) \
1043 for ((x) = RB_MIN(name, head); \
1044 ((x) != NULL) && ((y) = name##_RB_NEXT(x), (x) != NULL); \
1047 #define RB_FOREACH_REVERSE(x, name, head) \
1048 for ((x) = RB_MAX(name, head); \
1050 (x) = name##_RB_PREV(x))
1052 #define RB_FOREACH_REVERSE_FROM(x, name, y) \
1054 ((x) != NULL) && ((y) = name##_RB_PREV(x), (x) != NULL); \
1057 #define RB_FOREACH_REVERSE_SAFE(x, name, head, y) \
1058 for ((x) = RB_MAX(name, head); \
1059 ((x) != NULL) && ((y) = name##_RB_PREV(x), (x) != NULL); \
1062 #endif /* _SYS_TREE_H_ */