1 /* $NetBSD: tree.h,v 1.8 2004/03/28 19:38:30 provos Exp $ */
2 /* $OpenBSD: tree.h,v 1.7 2002/10/17 21:51:54 art Exp $ */
6 * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
8 * Copyright 2002 Niels Provos <provos@citi.umich.edu>
11 * Redistribution and use in source and binary forms, with or without
12 * modification, are permitted provided that the following conditions
14 * 1. Redistributions of source code must retain the above copyright
15 * notice, this list of conditions and the following disclaimer.
16 * 2. Redistributions in binary form must reproduce the above copyright
17 * notice, this list of conditions and the following disclaimer in the
18 * documentation and/or other materials provided with the distribution.
20 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
21 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
22 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
23 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
24 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
25 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
29 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35 #include <sys/cdefs.h>
38 * This file defines data structures for different types of trees:
39 * splay trees and rank-balanced trees.
41 * A splay tree is a self-organizing data structure. Every operation
42 * on the tree causes a splay to happen. The splay moves the requested
43 * node to the root of the tree and partly rebalances it.
45 * This has the benefit that request locality causes faster lookups as
46 * the requested nodes move to the top of the tree. On the other hand,
47 * every lookup causes memory writes.
49 * The Balance Theorem bounds the total access time for m operations
50 * and n inserts on an initially empty tree as O((m + n)lg n). The
51 * amortized cost for a sequence of m accesses to a splay tree is O(lg n);
53 * A rank-balanced tree is a binary search tree with an integer
54 * rank-difference as an attribute of each pointer from parent to child.
55 * The sum of the rank-differences on any path from a node down to null is
56 * the same, and defines the rank of that node. The rank of the null node
59 * Different additional conditions define different sorts of balanced trees,
60 * including "red-black" and "AVL" trees. The set of conditions applied here
61 * are the "weak-AVL" conditions of Haeupler, Sen and Tarjan presented in in
62 * "Rank Balanced Trees", ACM Transactions on Algorithms Volume 11 Issue 4 June
63 * 2015 Article No.: 30pp 1–26 https://doi.org/10.1145/2689412 (the HST paper):
64 * - every rank-difference is 1 or 2.
65 * - the rank of any leaf is 1.
67 * For historical reasons, rank differences that are even are associated
68 * with the color red (Rank-Even-Difference), and the child that a red edge
69 * points to is called a red child.
71 * Every operation on a rank-balanced tree is bounded as O(lg n).
72 * The maximum height of a rank-balanced tree is 2lg (n+1).
75 #define SPLAY_HEAD(name, type) \
77 struct type *sph_root; /* root of the tree */ \
80 #define SPLAY_INITIALIZER(root) \
83 #define SPLAY_INIT(root) do { \
84 (root)->sph_root = NULL; \
85 } while (/*CONSTCOND*/ 0)
87 #define SPLAY_ENTRY(type) \
89 struct type *spe_left; /* left element */ \
90 struct type *spe_right; /* right element */ \
93 #define SPLAY_LEFT(elm, field) (elm)->field.spe_left
94 #define SPLAY_RIGHT(elm, field) (elm)->field.spe_right
95 #define SPLAY_ROOT(head) (head)->sph_root
96 #define SPLAY_EMPTY(head) (SPLAY_ROOT(head) == NULL)
98 /* SPLAY_ROTATE_{LEFT,RIGHT} expect that tmp hold SPLAY_{RIGHT,LEFT} */
99 #define SPLAY_ROTATE_RIGHT(head, tmp, field) do { \
100 SPLAY_LEFT((head)->sph_root, field) = SPLAY_RIGHT(tmp, field); \
101 SPLAY_RIGHT(tmp, field) = (head)->sph_root; \
102 (head)->sph_root = tmp; \
103 } while (/*CONSTCOND*/ 0)
105 #define SPLAY_ROTATE_LEFT(head, tmp, field) do { \
106 SPLAY_RIGHT((head)->sph_root, field) = SPLAY_LEFT(tmp, field); \
107 SPLAY_LEFT(tmp, field) = (head)->sph_root; \
108 (head)->sph_root = tmp; \
109 } while (/*CONSTCOND*/ 0)
111 #define SPLAY_LINKLEFT(head, tmp, field) do { \
112 SPLAY_LEFT(tmp, field) = (head)->sph_root; \
113 tmp = (head)->sph_root; \
114 (head)->sph_root = SPLAY_LEFT((head)->sph_root, field); \
115 } while (/*CONSTCOND*/ 0)
117 #define SPLAY_LINKRIGHT(head, tmp, field) do { \
118 SPLAY_RIGHT(tmp, field) = (head)->sph_root; \
119 tmp = (head)->sph_root; \
120 (head)->sph_root = SPLAY_RIGHT((head)->sph_root, field); \
121 } while (/*CONSTCOND*/ 0)
123 #define SPLAY_ASSEMBLE(head, node, left, right, field) do { \
124 SPLAY_RIGHT(left, field) = SPLAY_LEFT((head)->sph_root, field); \
125 SPLAY_LEFT(right, field) = SPLAY_RIGHT((head)->sph_root, field);\
126 SPLAY_LEFT((head)->sph_root, field) = SPLAY_RIGHT(node, field); \
127 SPLAY_RIGHT((head)->sph_root, field) = SPLAY_LEFT(node, field); \
128 } while (/*CONSTCOND*/ 0)
130 /* Generates prototypes and inline functions */
132 #define SPLAY_PROTOTYPE(name, type, field, cmp) \
133 void name##_SPLAY(struct name *, struct type *); \
134 void name##_SPLAY_MINMAX(struct name *, int); \
135 struct type *name##_SPLAY_INSERT(struct name *, struct type *); \
136 struct type *name##_SPLAY_REMOVE(struct name *, struct type *); \
138 /* Finds the node with the same key as elm */ \
139 static __unused __inline struct type * \
140 name##_SPLAY_FIND(struct name *head, struct type *elm) \
142 if (SPLAY_EMPTY(head)) \
144 name##_SPLAY(head, elm); \
145 if ((cmp)(elm, (head)->sph_root) == 0) \
146 return (head->sph_root); \
150 static __unused __inline struct type * \
151 name##_SPLAY_NEXT(struct name *head, struct type *elm) \
153 name##_SPLAY(head, elm); \
154 if (SPLAY_RIGHT(elm, field) != NULL) { \
155 elm = SPLAY_RIGHT(elm, field); \
156 while (SPLAY_LEFT(elm, field) != NULL) { \
157 elm = SPLAY_LEFT(elm, field); \
164 static __unused __inline struct type * \
165 name##_SPLAY_MIN_MAX(struct name *head, int val) \
167 name##_SPLAY_MINMAX(head, val); \
168 return (SPLAY_ROOT(head)); \
171 /* Main splay operation.
172 * Moves node close to the key of elm to top
174 #define SPLAY_GENERATE(name, type, field, cmp) \
176 name##_SPLAY_INSERT(struct name *head, struct type *elm) \
178 if (SPLAY_EMPTY(head)) { \
179 SPLAY_LEFT(elm, field) = SPLAY_RIGHT(elm, field) = NULL; \
181 __typeof(cmp(NULL, NULL)) __comp; \
182 name##_SPLAY(head, elm); \
183 __comp = (cmp)(elm, (head)->sph_root); \
185 SPLAY_LEFT(elm, field) = SPLAY_LEFT((head)->sph_root, field);\
186 SPLAY_RIGHT(elm, field) = (head)->sph_root; \
187 SPLAY_LEFT((head)->sph_root, field) = NULL; \
188 } else if (__comp > 0) { \
189 SPLAY_RIGHT(elm, field) = SPLAY_RIGHT((head)->sph_root, field);\
190 SPLAY_LEFT(elm, field) = (head)->sph_root; \
191 SPLAY_RIGHT((head)->sph_root, field) = NULL; \
193 return ((head)->sph_root); \
195 (head)->sph_root = (elm); \
200 name##_SPLAY_REMOVE(struct name *head, struct type *elm) \
202 struct type *__tmp; \
203 if (SPLAY_EMPTY(head)) \
205 name##_SPLAY(head, elm); \
206 if ((cmp)(elm, (head)->sph_root) == 0) { \
207 if (SPLAY_LEFT((head)->sph_root, field) == NULL) { \
208 (head)->sph_root = SPLAY_RIGHT((head)->sph_root, field);\
210 __tmp = SPLAY_RIGHT((head)->sph_root, field); \
211 (head)->sph_root = SPLAY_LEFT((head)->sph_root, field);\
212 name##_SPLAY(head, elm); \
213 SPLAY_RIGHT((head)->sph_root, field) = __tmp; \
221 name##_SPLAY(struct name *head, struct type *elm) \
223 struct type __node, *__left, *__right, *__tmp; \
224 __typeof(cmp(NULL, NULL)) __comp; \
226 SPLAY_LEFT(&__node, field) = SPLAY_RIGHT(&__node, field) = NULL;\
227 __left = __right = &__node; \
229 while ((__comp = (cmp)(elm, (head)->sph_root)) != 0) { \
231 __tmp = SPLAY_LEFT((head)->sph_root, field); \
234 if ((cmp)(elm, __tmp) < 0){ \
235 SPLAY_ROTATE_RIGHT(head, __tmp, field); \
236 if (SPLAY_LEFT((head)->sph_root, field) == NULL)\
239 SPLAY_LINKLEFT(head, __right, field); \
240 } else if (__comp > 0) { \
241 __tmp = SPLAY_RIGHT((head)->sph_root, field); \
244 if ((cmp)(elm, __tmp) > 0){ \
245 SPLAY_ROTATE_LEFT(head, __tmp, field); \
246 if (SPLAY_RIGHT((head)->sph_root, field) == NULL)\
249 SPLAY_LINKRIGHT(head, __left, field); \
252 SPLAY_ASSEMBLE(head, &__node, __left, __right, field); \
255 /* Splay with either the minimum or the maximum element \
256 * Used to find minimum or maximum element in tree. \
258 void name##_SPLAY_MINMAX(struct name *head, int __comp) \
260 struct type __node, *__left, *__right, *__tmp; \
262 SPLAY_LEFT(&__node, field) = SPLAY_RIGHT(&__node, field) = NULL;\
263 __left = __right = &__node; \
267 __tmp = SPLAY_LEFT((head)->sph_root, field); \
271 SPLAY_ROTATE_RIGHT(head, __tmp, field); \
272 if (SPLAY_LEFT((head)->sph_root, field) == NULL)\
275 SPLAY_LINKLEFT(head, __right, field); \
276 } else if (__comp > 0) { \
277 __tmp = SPLAY_RIGHT((head)->sph_root, field); \
281 SPLAY_ROTATE_LEFT(head, __tmp, field); \
282 if (SPLAY_RIGHT((head)->sph_root, field) == NULL)\
285 SPLAY_LINKRIGHT(head, __left, field); \
288 SPLAY_ASSEMBLE(head, &__node, __left, __right, field); \
291 #define SPLAY_NEGINF -1
294 #define SPLAY_INSERT(name, x, y) name##_SPLAY_INSERT(x, y)
295 #define SPLAY_REMOVE(name, x, y) name##_SPLAY_REMOVE(x, y)
296 #define SPLAY_FIND(name, x, y) name##_SPLAY_FIND(x, y)
297 #define SPLAY_NEXT(name, x, y) name##_SPLAY_NEXT(x, y)
298 #define SPLAY_MIN(name, x) (SPLAY_EMPTY(x) ? NULL \
299 : name##_SPLAY_MIN_MAX(x, SPLAY_NEGINF))
300 #define SPLAY_MAX(name, x) (SPLAY_EMPTY(x) ? NULL \
301 : name##_SPLAY_MIN_MAX(x, SPLAY_INF))
303 #define SPLAY_FOREACH(x, name, head) \
304 for ((x) = SPLAY_MIN(name, head); \
306 (x) = SPLAY_NEXT(name, head, x))
308 /* Macros that define a rank-balanced tree */
309 #define RB_HEAD(name, type) \
311 struct type *rbh_root; /* root of the tree */ \
314 #define RB_INITIALIZER(root) \
317 #define RB_INIT(root) do { \
318 (root)->rbh_root = NULL; \
319 } while (/*CONSTCOND*/ 0)
321 #define RB_ENTRY(type) \
323 struct type *rbe_link[3]; \
327 * With the expectation that any object of struct type has an
328 * address that is a multiple of 4, and that therefore the
329 * 2 least significant bits of a pointer to struct type are
330 * always zero, this implementation sets those bits to indicate
331 * that the left or right child of the tree node is "red".
333 #define _RB_LINK(elm, dir, field) (elm)->field.rbe_link[dir]
334 #define _RB_UP(elm, field) _RB_LINK(elm, 0, field)
335 #define _RB_L ((__uintptr_t)1)
336 #define _RB_R ((__uintptr_t)2)
337 #define _RB_LR ((__uintptr_t)3)
338 #define _RB_BITS(elm) (*(__uintptr_t *)&elm)
339 #define _RB_BITSUP(elm, field) _RB_BITS(_RB_UP(elm, field))
340 #define _RB_PTR(elm) (__typeof(elm)) \
341 ((__uintptr_t)elm & ~_RB_LR)
343 #define RB_PARENT(elm, field) _RB_PTR(_RB_UP(elm, field))
344 #define RB_LEFT(elm, field) _RB_LINK(elm, _RB_L, field)
345 #define RB_RIGHT(elm, field) _RB_LINK(elm, _RB_R, field)
346 #define RB_ROOT(head) (head)->rbh_root
347 #define RB_EMPTY(head) (RB_ROOT(head) == NULL)
349 #define RB_SET_PARENT(dst, src, field) do { \
350 _RB_BITSUP(dst, field) = (__uintptr_t)src | \
351 (_RB_BITSUP(dst, field) & _RB_LR); \
352 } while (/*CONSTCOND*/ 0)
354 #define RB_SET(elm, parent, field) do { \
355 _RB_UP(elm, field) = parent; \
356 RB_LEFT(elm, field) = RB_RIGHT(elm, field) = NULL; \
357 } while (/*CONSTCOND*/ 0)
360 * Either RB_AUGMENT or RB_AUGMENT_CHECK is invoked in a loop at the root of
361 * every modified subtree, from the bottom up to the root, to update augmented
362 * node data. RB_AUGMENT_CHECK returns true only when the update changes the
363 * node data, so that updating can be stopped short of the root when it returns
366 #ifndef RB_AUGMENT_CHECK
368 #define RB_AUGMENT_CHECK(x) 0
370 #define RB_AUGMENT_CHECK(x) (RB_AUGMENT(x), 1)
374 #define RB_UPDATE_AUGMENT(elm, field) do { \
375 __typeof(elm) rb_update_tmp = (elm); \
376 while (RB_AUGMENT_CHECK(rb_update_tmp) && \
377 (rb_update_tmp = RB_PARENT(rb_update_tmp, field)) != NULL) \
381 #define RB_SWAP_CHILD(head, par, out, in, field) do { \
383 RB_ROOT(head) = (in); \
384 else if ((out) == RB_LEFT(par, field)) \
385 RB_LEFT(par, field) = (in); \
387 RB_RIGHT(par, field) = (in); \
388 } while (/*CONSTCOND*/ 0)
391 * RB_ROTATE macro partially restructures the tree to improve balance. In the
392 * case when dir is _RB_L, tmp is a right child of elm. After rotation, elm
393 * is a left child of tmp, and the subtree that represented the items between
394 * them, which formerly hung to the left of tmp now hangs to the right of elm.
395 * The parent-child relationship between elm and its former parent is not
396 * changed; where this macro once updated those fields, that is now left to the
397 * caller of RB_ROTATE to clean up, so that a pair of rotations does not twice
398 * update the same pair of pointer fields with distinct values.
400 #define RB_ROTATE(elm, tmp, dir, field) do { \
401 if ((_RB_LINK(elm, dir ^ _RB_LR, field) = \
402 _RB_LINK(tmp, dir, field)) != NULL) \
403 RB_SET_PARENT(_RB_LINK(tmp, dir, field), elm, field); \
404 _RB_LINK(tmp, dir, field) = (elm); \
405 RB_SET_PARENT(elm, tmp, field); \
406 } while (/*CONSTCOND*/ 0)
408 /* Generates prototypes and inline functions */
409 #define RB_PROTOTYPE(name, type, field, cmp) \
410 RB_PROTOTYPE_INTERNAL(name, type, field, cmp,)
411 #define RB_PROTOTYPE_STATIC(name, type, field, cmp) \
412 RB_PROTOTYPE_INTERNAL(name, type, field, cmp, __unused static)
413 #define RB_PROTOTYPE_INTERNAL(name, type, field, cmp, attr) \
414 RB_PROTOTYPE_RANK(name, type, attr) \
415 RB_PROTOTYPE_INSERT_COLOR(name, type, attr); \
416 RB_PROTOTYPE_REMOVE_COLOR(name, type, attr); \
417 RB_PROTOTYPE_INSERT(name, type, attr); \
418 RB_PROTOTYPE_REMOVE(name, type, attr); \
419 RB_PROTOTYPE_FIND(name, type, attr); \
420 RB_PROTOTYPE_NFIND(name, type, attr); \
421 RB_PROTOTYPE_NEXT(name, type, attr); \
422 RB_PROTOTYPE_PREV(name, type, attr); \
423 RB_PROTOTYPE_MINMAX(name, type, attr); \
424 RB_PROTOTYPE_REINSERT(name, type, attr);
425 #ifdef _RB_DIAGNOSTIC
426 #define RB_PROTOTYPE_RANK(name, type, attr) \
427 attr int name##_RB_RANK(struct type *);
429 #define RB_PROTOTYPE_RANK(name, type, attr)
431 #define RB_PROTOTYPE_INSERT_COLOR(name, type, attr) \
432 attr struct type *name##_RB_INSERT_COLOR(struct name *, \
433 struct type *, struct type *)
434 #define RB_PROTOTYPE_REMOVE_COLOR(name, type, attr) \
435 attr struct type *name##_RB_REMOVE_COLOR(struct name *, \
436 struct type *, struct type *)
437 #define RB_PROTOTYPE_REMOVE(name, type, attr) \
438 attr struct type *name##_RB_REMOVE(struct name *, struct type *)
439 #define RB_PROTOTYPE_INSERT(name, type, attr) \
440 attr struct type *name##_RB_INSERT(struct name *, struct type *)
441 #define RB_PROTOTYPE_FIND(name, type, attr) \
442 attr struct type *name##_RB_FIND(struct name *, struct type *)
443 #define RB_PROTOTYPE_NFIND(name, type, attr) \
444 attr struct type *name##_RB_NFIND(struct name *, struct type *)
445 #define RB_PROTOTYPE_NEXT(name, type, attr) \
446 attr struct type *name##_RB_NEXT(struct type *)
447 #define RB_PROTOTYPE_PREV(name, type, attr) \
448 attr struct type *name##_RB_PREV(struct type *)
449 #define RB_PROTOTYPE_MINMAX(name, type, attr) \
450 attr struct type *name##_RB_MINMAX(struct name *, int)
451 #define RB_PROTOTYPE_REINSERT(name, type, attr) \
452 attr struct type *name##_RB_REINSERT(struct name *, struct type *)
454 /* Main rb operation.
455 * Moves node close to the key of elm to top
457 #define RB_GENERATE(name, type, field, cmp) \
458 RB_GENERATE_INTERNAL(name, type, field, cmp,)
459 #define RB_GENERATE_STATIC(name, type, field, cmp) \
460 RB_GENERATE_INTERNAL(name, type, field, cmp, __unused static)
461 #define RB_GENERATE_INTERNAL(name, type, field, cmp, attr) \
462 RB_GENERATE_RANK(name, type, field, attr) \
463 RB_GENERATE_INSERT_COLOR(name, type, field, attr) \
464 RB_GENERATE_REMOVE_COLOR(name, type, field, attr) \
465 RB_GENERATE_INSERT(name, type, field, cmp, attr) \
466 RB_GENERATE_REMOVE(name, type, field, attr) \
467 RB_GENERATE_FIND(name, type, field, cmp, attr) \
468 RB_GENERATE_NFIND(name, type, field, cmp, attr) \
469 RB_GENERATE_NEXT(name, type, field, attr) \
470 RB_GENERATE_PREV(name, type, field, attr) \
471 RB_GENERATE_MINMAX(name, type, field, attr) \
472 RB_GENERATE_REINSERT(name, type, field, cmp, attr)
474 #ifdef _RB_DIAGNOSTIC
476 #define _RB_AUGMENT_VERIFY(x) RB_AUGMENT_CHECK(x)
478 #define _RB_AUGMENT_VERIFY(x) 0
480 #define RB_GENERATE_RANK(name, type, field, attr) \
482 * Return the rank of the subtree rooted at elm, or -1 if the subtree \
483 * is not rank-balanced, or has inconsistent augmentation data.
486 name##_RB_RANK(struct type *elm) \
488 struct type *left, *right, *up; \
489 int left_rank, right_rank; \
493 up = _RB_UP(elm, field); \
494 left = RB_LEFT(elm, field); \
495 left_rank = ((_RB_BITS(up) & _RB_L) ? 2 : 1) + \
496 name##_RB_RANK(left); \
497 right = RB_RIGHT(elm, field); \
498 right_rank = ((_RB_BITS(up) & _RB_R) ? 2 : 1) + \
499 name##_RB_RANK(right); \
500 if (left_rank != right_rank || \
501 (left_rank == 2 && left == NULL && right == NULL) || \
502 _RB_AUGMENT_VERIFY(elm)) \
504 return (left_rank); \
507 #define RB_GENERATE_RANK(name, type, field, attr)
510 #define RB_GENERATE_INSERT_COLOR(name, type, field, attr) \
512 name##_RB_INSERT_COLOR(struct name *head, \
513 struct type *parent, struct type *elm) \
516 * Initially, elm is a leaf. Either its parent was previously \
517 * a leaf, with two black null children, or an interior node \
518 * with a black non-null child and a red null child. The \
519 * balance criterion "the rank of any leaf is 1" precludes the \
520 * possibility of two red null children for the initial parent. \
521 * So the first loop iteration cannot lead to accessing an \
522 * uninitialized 'child', and a later iteration can only happen \
523 * when a value has been assigned to 'child' in the previous \
526 struct type *child, *child_up, *gpar; \
527 __uintptr_t elmdir, sibdir; \
530 /* the rank of the tree rooted at elm grew */ \
531 gpar = _RB_UP(parent, field); \
532 elmdir = RB_RIGHT(parent, field) == elm ? _RB_R : _RB_L; \
533 if (_RB_BITS(gpar) & elmdir) { \
534 /* shorten the parent-elm edge to rebalance */ \
535 _RB_BITSUP(parent, field) ^= elmdir; \
538 sibdir = elmdir ^ _RB_LR; \
539 /* the other edge must change length */ \
540 _RB_BITSUP(parent, field) ^= sibdir; \
541 if ((_RB_BITS(gpar) & _RB_LR) == 0) { \
542 /* both edges now short, retry from parent */ \
547 _RB_UP(parent, field) = gpar = _RB_PTR(gpar); \
548 if (_RB_BITSUP(elm, field) & elmdir) { \
550 * Exactly one of the edges descending from elm \
551 * is long. The long one is in the same \
552 * direction as the edge from parent to elm, \
553 * so change that by rotation. The edge from \
554 * parent to z was shortened above. Shorten \
555 * the long edge down from elm, and adjust \
556 * other edge lengths based on the downward \
557 * edges from 'child'. \
569 RB_ROTATE(elm, child, elmdir, field); \
570 child_up = _RB_UP(child, field); \
571 if (_RB_BITS(child_up) & sibdir) \
572 _RB_BITSUP(parent, field) ^= elmdir; \
573 if (_RB_BITS(child_up) & elmdir) \
574 _RB_BITSUP(elm, field) ^= _RB_LR; \
576 _RB_BITSUP(elm, field) ^= elmdir; \
577 /* if child is a leaf, don't augment elm, \
578 * since it is restored to be a leaf again. */ \
579 if ((_RB_BITS(child_up) & _RB_LR) == 0) \
585 * The long edge descending from 'child' points back \
586 * in the direction of 'parent'. Rotate to make \
587 * 'parent' a child of 'child', then make both edges \
588 * of 'child' short to rebalance. \
598 RB_ROTATE(parent, child, sibdir, field); \
599 _RB_UP(child, field) = gpar; \
600 RB_SWAP_CHILD(head, gpar, parent, child, field); \
602 * Elements rotated down have new, smaller subtrees, \
603 * so update augmentation for them. \
606 RB_AUGMENT_CHECK(elm); \
607 RB_AUGMENT_CHECK(parent); \
609 } while ((parent = gpar) != NULL); \
613 #ifndef RB_STRICT_HST
615 * In REMOVE_COLOR, the HST paper, in figure 3, in the single-rotate case, has
616 * 'parent' with one higher rank, and then reduces its rank if 'parent' has
617 * become a leaf. This implementation always has the parent in its new position
618 * with lower rank, to avoid the leaf check. Define RB_STRICT_HST to 1 to get
619 * the behavior that HST describes.
621 #define RB_STRICT_HST 0
624 #define RB_GENERATE_REMOVE_COLOR(name, type, field, attr) \
626 name##_RB_REMOVE_COLOR(struct name *head, \
627 struct type *parent, struct type *elm) \
629 struct type *gpar, *sib, *up; \
630 __uintptr_t elmdir, sibdir; \
632 if (RB_RIGHT(parent, field) == elm && \
633 RB_LEFT(parent, field) == elm) { \
634 /* Deleting a leaf that is an only-child creates a \
635 * rank-2 leaf. Demote that leaf. */ \
636 _RB_UP(parent, field) = _RB_PTR(_RB_UP(parent, field)); \
638 if ((parent = _RB_UP(elm, field)) == NULL) \
642 /* the rank of the tree rooted at elm shrank */ \
643 gpar = _RB_UP(parent, field); \
644 elmdir = RB_RIGHT(parent, field) == elm ? _RB_R : _RB_L; \
645 _RB_BITS(gpar) ^= elmdir; \
646 if (_RB_BITS(gpar) & elmdir) { \
647 /* lengthen the parent-elm edge to rebalance */ \
648 _RB_UP(parent, field) = gpar; \
651 if (_RB_BITS(gpar) & _RB_LR) { \
652 /* shorten other edge, retry from parent */ \
653 _RB_BITS(gpar) ^= _RB_LR; \
654 _RB_UP(parent, field) = gpar; \
655 gpar = _RB_PTR(gpar); \
658 sibdir = elmdir ^ _RB_LR; \
659 sib = _RB_LINK(parent, sibdir, field); \
660 up = _RB_UP(sib, field); \
661 _RB_BITS(up) ^= _RB_LR; \
662 if ((_RB_BITS(up) & _RB_LR) == 0) { \
663 /* shorten edges descending from sib, retry */ \
664 _RB_UP(sib, field) = up; \
667 if ((_RB_BITS(up) & sibdir) == 0) { \
669 * The edge descending from 'sib' away from \
670 * 'parent' is long. The short edge descending \
671 * from 'sib' toward 'parent' points to 'elm*' \
672 * Rotate to make 'sib' a child of 'elm*' \
673 * then adjust the lengths of the edges \
674 * descending from 'sib' and 'elm*'. \
688 elm = _RB_LINK(sib, elmdir, field); \
689 /* elm is a 1-child. First rotate at elm. */ \
690 RB_ROTATE(sib, elm, sibdir, field); \
691 up = _RB_UP(elm, field); \
692 _RB_BITSUP(parent, field) ^= \
693 (_RB_BITS(up) & elmdir) ? _RB_LR : elmdir; \
694 _RB_BITSUP(sib, field) ^= \
695 (_RB_BITS(up) & sibdir) ? _RB_LR : sibdir; \
696 _RB_BITSUP(elm, field) |= _RB_LR; \
698 if ((_RB_BITS(up) & elmdir) == 0 && \
699 RB_STRICT_HST && elm != NULL) { \
700 /* if parent does not become a leaf, \
701 do not demote parent yet. */ \
702 _RB_BITSUP(parent, field) ^= sibdir; \
703 _RB_BITSUP(sib, field) ^= _RB_LR; \
704 } else if ((_RB_BITS(up) & elmdir) == 0) { \
705 /* demote parent. */ \
706 _RB_BITSUP(parent, field) ^= elmdir; \
707 _RB_BITSUP(sib, field) ^= sibdir; \
709 _RB_BITSUP(sib, field) ^= sibdir; \
714 * The edge descending from 'elm' away from 'parent' \
715 * is short. Rotate to make 'parent' a child of 'elm', \
716 * then lengthen the short edges descending from \
717 * 'parent' and 'elm' to rebalance. \
728 RB_ROTATE(parent, elm, elmdir, field); \
729 RB_SET_PARENT(elm, gpar, field); \
730 RB_SWAP_CHILD(head, gpar, parent, elm, field); \
732 * An element rotated down, but not into the search \
733 * path has a new, smaller subtree, so update \
734 * augmentation for it. \
737 RB_AUGMENT_CHECK(sib); \
739 } while (elm = parent, (parent = gpar) != NULL); \
743 #define _RB_AUGMENT_WALK(elm, match, field) \
747 } while (RB_AUGMENT_CHECK(elm) && \
748 (elm = RB_PARENT(elm, field)) != NULL)
750 #define RB_GENERATE_REMOVE(name, type, field, attr) \
752 name##_RB_REMOVE(struct name *head, struct type *out) \
754 struct type *child, *in, *opar, *parent; \
756 child = RB_LEFT(out, field); \
757 in = RB_RIGHT(out, field); \
758 opar = _RB_UP(out, field); \
759 if (in == NULL || child == NULL) { \
760 in = child = in == NULL ? child : in; \
761 parent = opar = _RB_PTR(opar); \
764 while (RB_LEFT(in, field)) \
765 in = RB_LEFT(in, field); \
766 RB_SET_PARENT(child, in, field); \
767 RB_LEFT(in, field) = child; \
768 child = RB_RIGHT(in, field); \
769 if (parent != in) { \
770 RB_SET_PARENT(parent, in, field); \
771 RB_RIGHT(in, field) = parent; \
772 parent = RB_PARENT(in, field); \
773 RB_LEFT(parent, field) = child; \
775 _RB_UP(in, field) = opar; \
776 opar = _RB_PTR(opar); \
778 RB_SWAP_CHILD(head, opar, out, in, field); \
780 _RB_UP(child, field) = parent; \
781 if (parent != NULL) { \
782 opar = name##_RB_REMOVE_COLOR(head, parent, child); \
783 /* if rotation has made 'parent' the root of the same \
784 * subtree as before, don't re-augment it. */ \
785 if (parent == in && RB_LEFT(parent, field) == NULL) { \
787 parent = RB_PARENT(parent, field); \
789 _RB_AUGMENT_WALK(parent, opar, field); \
790 if (opar != NULL) { \
792 * Elements rotated into the search path have \
793 * changed subtrees, so update augmentation for \
794 * them if AUGMENT_WALK didn't. \
796 RB_AUGMENT_CHECK(opar); \
797 RB_AUGMENT_CHECK(RB_PARENT(opar, field)); \
803 #define RB_GENERATE_INSERT(name, type, field, cmp, attr) \
804 /* Inserts a node into the RB tree */ \
806 name##_RB_INSERT(struct name *head, struct type *elm) \
809 struct type **tmpp = &RB_ROOT(head); \
810 struct type *parent = NULL; \
812 while ((tmp = *tmpp) != NULL) { \
814 __typeof(cmp(NULL, NULL)) comp = (cmp)(elm, parent); \
816 tmpp = &RB_LEFT(parent, field); \
818 tmpp = &RB_RIGHT(parent, field); \
822 RB_SET(elm, parent, field); \
824 if (parent != NULL) \
825 tmp = name##_RB_INSERT_COLOR(head, parent, elm); \
826 _RB_AUGMENT_WALK(elm, tmp, field); \
829 * An element rotated into the search path has a \
830 * changed subtree, so update augmentation for it if \
831 * AUGMENT_WALK didn't. \
833 RB_AUGMENT_CHECK(tmp); \
837 #define RB_GENERATE_FIND(name, type, field, cmp, attr) \
838 /* Finds the node with the same key as elm */ \
840 name##_RB_FIND(struct name *head, struct type *elm) \
842 struct type *tmp = RB_ROOT(head); \
843 __typeof(cmp(NULL, NULL)) comp; \
845 comp = cmp(elm, tmp); \
847 tmp = RB_LEFT(tmp, field); \
849 tmp = RB_RIGHT(tmp, field); \
856 #define RB_GENERATE_NFIND(name, type, field, cmp, attr) \
857 /* Finds the first node greater than or equal to the search key */ \
859 name##_RB_NFIND(struct name *head, struct type *elm) \
861 struct type *tmp = RB_ROOT(head); \
862 struct type *res = NULL; \
863 __typeof(cmp(NULL, NULL)) comp; \
865 comp = cmp(elm, tmp); \
868 tmp = RB_LEFT(tmp, field); \
871 tmp = RB_RIGHT(tmp, field); \
878 #define RB_GENERATE_NEXT(name, type, field, attr) \
881 name##_RB_NEXT(struct type *elm) \
883 if (RB_RIGHT(elm, field)) { \
884 elm = RB_RIGHT(elm, field); \
885 while (RB_LEFT(elm, field)) \
886 elm = RB_LEFT(elm, field); \
888 while (RB_PARENT(elm, field) && \
889 (elm == RB_RIGHT(RB_PARENT(elm, field), field))) \
890 elm = RB_PARENT(elm, field); \
891 elm = RB_PARENT(elm, field); \
896 #define RB_GENERATE_PREV(name, type, field, attr) \
899 name##_RB_PREV(struct type *elm) \
901 if (RB_LEFT(elm, field)) { \
902 elm = RB_LEFT(elm, field); \
903 while (RB_RIGHT(elm, field)) \
904 elm = RB_RIGHT(elm, field); \
906 while (RB_PARENT(elm, field) && \
907 (elm == RB_LEFT(RB_PARENT(elm, field), field))) \
908 elm = RB_PARENT(elm, field); \
909 elm = RB_PARENT(elm, field); \
914 #define RB_GENERATE_MINMAX(name, type, field, attr) \
916 name##_RB_MINMAX(struct name *head, int val) \
918 struct type *tmp = RB_ROOT(head); \
919 struct type *parent = NULL; \
923 tmp = RB_LEFT(tmp, field); \
925 tmp = RB_RIGHT(tmp, field); \
930 #define RB_GENERATE_REINSERT(name, type, field, cmp, attr) \
932 name##_RB_REINSERT(struct name *head, struct type *elm) \
934 struct type *cmpelm; \
935 if (((cmpelm = RB_PREV(name, head, elm)) != NULL && \
936 cmp(cmpelm, elm) >= 0) || \
937 ((cmpelm = RB_NEXT(name, head, elm)) != NULL && \
938 cmp(elm, cmpelm) >= 0)) { \
939 /* XXXLAS: Remove/insert is heavy handed. */ \
940 RB_REMOVE(name, head, elm); \
941 return (RB_INSERT(name, head, elm)); \
949 #define RB_INSERT(name, x, y) name##_RB_INSERT(x, y)
950 #define RB_REMOVE(name, x, y) name##_RB_REMOVE(x, y)
951 #define RB_FIND(name, x, y) name##_RB_FIND(x, y)
952 #define RB_NFIND(name, x, y) name##_RB_NFIND(x, y)
953 #define RB_NEXT(name, x, y) name##_RB_NEXT(y)
954 #define RB_PREV(name, x, y) name##_RB_PREV(y)
955 #define RB_MIN(name, x) name##_RB_MINMAX(x, RB_NEGINF)
956 #define RB_MAX(name, x) name##_RB_MINMAX(x, RB_INF)
957 #define RB_REINSERT(name, x, y) name##_RB_REINSERT(x, y)
959 #define RB_FOREACH(x, name, head) \
960 for ((x) = RB_MIN(name, head); \
962 (x) = name##_RB_NEXT(x))
964 #define RB_FOREACH_FROM(x, name, y) \
966 ((x) != NULL) && ((y) = name##_RB_NEXT(x), (x) != NULL); \
969 #define RB_FOREACH_SAFE(x, name, head, y) \
970 for ((x) = RB_MIN(name, head); \
971 ((x) != NULL) && ((y) = name##_RB_NEXT(x), (x) != NULL); \
974 #define RB_FOREACH_REVERSE(x, name, head) \
975 for ((x) = RB_MAX(name, head); \
977 (x) = name##_RB_PREV(x))
979 #define RB_FOREACH_REVERSE_FROM(x, name, y) \
981 ((x) != NULL) && ((y) = name##_RB_PREV(x), (x) != NULL); \
984 #define RB_FOREACH_REVERSE_SAFE(x, name, head, y) \
985 for ((x) = RB_MAX(name, head); \
986 ((x) != NULL) && ((y) = name##_RB_PREV(x), (x) != NULL); \
989 #endif /* _SYS_TREE_H_ */