Mercurial > jemalloc
comparison rb.h @ 0:9a44d900ee55
initial import
author | Yoshiki Yazawa <yaz@honeyplanet.jp> |
---|---|
date | Mon, 05 Oct 2009 16:06:43 +0900 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:9a44d900ee55 |
---|---|
1 /****************************************************************************** | |
2 * | |
3 * Copyright (C) 2008 Jason Evans <jasone@FreeBSD.org>. | |
4 * All rights reserved. | |
5 * | |
6 * Redistribution and use in source and binary forms, with or without | |
7 * modification, are permitted provided that the following conditions | |
8 * are met: | |
9 * 1. Redistributions of source code must retain the above copyright | |
10 * notice(s), this list of conditions and the following disclaimer | |
11 * unmodified other than the allowable addition of one or more | |
12 * copyright notices. | |
13 * 2. Redistributions in binary form must reproduce the above copyright | |
14 * notice(s), this list of conditions and the following disclaimer in | |
15 * the documentation and/or other materials provided with the | |
16 * distribution. | |
17 * | |
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) ``AS IS'' AND ANY | |
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | |
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
21 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) BE | |
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | |
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | |
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR | |
25 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, | |
26 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE | |
27 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, | |
28 * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
29 * | |
30 ****************************************************************************** | |
31 * | |
32 * cpp macro implementation of left-leaning red-black trees. | |
33 * | |
34 * Usage: | |
35 * | |
36 * (Optional.) | |
37 * #define SIZEOF_PTR ... | |
38 * #define SIZEOF_PTR_2POW ... | |
39 * #define RB_NO_C99_VARARRAYS | |
40 * | |
41 * (Optional, see assert(3).) | |
42 * #define NDEBUG | |
43 * | |
44 * (Required.) | |
45 * #include <assert.h> | |
46 * #include <rb.h> | |
47 * ... | |
48 * | |
49 * All operations are done non-recursively. Parent pointers are not used, and | |
50 * color bits are stored in the least significant bit of right-child pointers, | |
51 * thus making node linkage as compact as is possible for red-black trees. | |
52 * | |
53 * Some macros use a comparison function pointer, which is expected to have the | |
54 * following prototype: | |
55 * | |
56 * int (a_cmp *)(a_type *a_node, a_type *a_other); | |
57 * ^^^^^^ | |
58 * or a_key | |
59 * | |
60 * Interpretation of comparision function return values: | |
61 * | |
62 * -1 : a_node < a_other | |
63 * 0 : a_node == a_other | |
64 * 1 : a_node > a_other | |
65 * | |
66 * In all cases, the a_node or a_key macro argument is the first argument to the | |
67 * comparison function, which makes it possible to write comparison functions | |
68 * that treat the first argument specially. | |
69 * | |
70 ******************************************************************************/ | |
71 | |
72 #ifndef RB_H_ | |
73 #define RB_H_ | |
74 | |
75 #if 0 | |
76 #include <sys/cdefs.h> | |
77 __FBSDID("$FreeBSD: head/lib/libc/stdlib/rb.h 178995 2008-05-14 18:33:13Z jasone $"); | |
78 #endif | |
79 | |
80 /* Node structure. */ | |
81 #define rb_node(a_type) \ | |
82 struct { \ | |
83 a_type *rbn_left; \ | |
84 a_type *rbn_right_red; \ | |
85 } | |
86 | |
87 /* Root structure. */ | |
88 #define rb_tree(a_type) \ | |
89 struct { \ | |
90 a_type *rbt_root; \ | |
91 a_type rbt_nil; \ | |
92 } | |
93 | |
94 /* Left accessors. */ | |
95 #define rbp_left_get(a_type, a_field, a_node) \ | |
96 ((a_node)->a_field.rbn_left) | |
97 #define rbp_left_set(a_type, a_field, a_node, a_left) do { \ | |
98 (a_node)->a_field.rbn_left = a_left; \ | |
99 } while (0) | |
100 | |
101 /* Right accessors. */ | |
102 #define rbp_right_get(a_type, a_field, a_node) \ | |
103 ((a_type *) (((intptr_t) (a_node)->a_field.rbn_right_red) \ | |
104 & ((ssize_t)-2))) | |
105 #define rbp_right_set(a_type, a_field, a_node, a_right) do { \ | |
106 (a_node)->a_field.rbn_right_red = (a_type *) (((uintptr_t) a_right) \ | |
107 | (((uintptr_t) (a_node)->a_field.rbn_right_red) & ((size_t)1))); \ | |
108 } while (0) | |
109 | |
110 /* Color accessors. */ | |
111 #define rbp_red_get(a_type, a_field, a_node) \ | |
112 ((bool) (((uintptr_t) (a_node)->a_field.rbn_right_red) \ | |
113 & ((size_t)1))) | |
114 #define rbp_color_set(a_type, a_field, a_node, a_red) do { \ | |
115 (a_node)->a_field.rbn_right_red = (a_type *) ((((intptr_t) \ | |
116 (a_node)->a_field.rbn_right_red) & ((ssize_t)-2)) \ | |
117 | ((ssize_t)a_red)); \ | |
118 } while (0) | |
119 #define rbp_red_set(a_type, a_field, a_node) do { \ | |
120 (a_node)->a_field.rbn_right_red = (a_type *) (((uintptr_t) \ | |
121 (a_node)->a_field.rbn_right_red) | ((size_t)1)); \ | |
122 } while (0) | |
123 #define rbp_black_set(a_type, a_field, a_node) do { \ | |
124 (a_node)->a_field.rbn_right_red = (a_type *) (((intptr_t) \ | |
125 (a_node)->a_field.rbn_right_red) & ((ssize_t)-2)); \ | |
126 } while (0) | |
127 | |
128 /* Node initializer. */ | |
129 #define rbp_node_new(a_type, a_field, a_tree, a_node) do { \ | |
130 rbp_left_set(a_type, a_field, (a_node), &(a_tree)->rbt_nil); \ | |
131 rbp_right_set(a_type, a_field, (a_node), &(a_tree)->rbt_nil); \ | |
132 rbp_red_set(a_type, a_field, (a_node)); \ | |
133 } while (0) | |
134 | |
135 /* Tree initializer. */ | |
136 #define rb_new(a_type, a_field, a_tree) do { \ | |
137 (a_tree)->rbt_root = &(a_tree)->rbt_nil; \ | |
138 rbp_node_new(a_type, a_field, a_tree, &(a_tree)->rbt_nil); \ | |
139 rbp_black_set(a_type, a_field, &(a_tree)->rbt_nil); \ | |
140 } while (0) | |
141 | |
142 /* Tree operations. */ | |
143 #define rbp_black_height(a_type, a_field, a_tree, r_height) do { \ | |
144 a_type *rbp_bh_t; \ | |
145 for (rbp_bh_t = (a_tree)->rbt_root, (r_height) = 0; \ | |
146 rbp_bh_t != &(a_tree)->rbt_nil; \ | |
147 rbp_bh_t = rbp_left_get(a_type, a_field, rbp_bh_t)) { \ | |
148 if (rbp_red_get(a_type, a_field, rbp_bh_t) == false) { \ | |
149 (r_height)++; \ | |
150 } \ | |
151 } \ | |
152 } while (0) | |
153 | |
154 #define rbp_first(a_type, a_field, a_tree, a_root, r_node) do { \ | |
155 for ((r_node) = (a_root); \ | |
156 rbp_left_get(a_type, a_field, (r_node)) != &(a_tree)->rbt_nil; \ | |
157 (r_node) = rbp_left_get(a_type, a_field, (r_node))) { \ | |
158 } \ | |
159 } while (0) | |
160 | |
161 #define rbp_last(a_type, a_field, a_tree, a_root, r_node) do { \ | |
162 for ((r_node) = (a_root); \ | |
163 rbp_right_get(a_type, a_field, (r_node)) != &(a_tree)->rbt_nil; \ | |
164 (r_node) = rbp_right_get(a_type, a_field, (r_node))) { \ | |
165 } \ | |
166 } while (0) | |
167 | |
168 #define rbp_next(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \ | |
169 if (rbp_right_get(a_type, a_field, (a_node)) \ | |
170 != &(a_tree)->rbt_nil) { \ | |
171 rbp_first(a_type, a_field, a_tree, rbp_right_get(a_type, \ | |
172 a_field, (a_node)), (r_node)); \ | |
173 } else { \ | |
174 a_type *rbp_n_t = (a_tree)->rbt_root; \ | |
175 assert(rbp_n_t != &(a_tree)->rbt_nil); \ | |
176 (r_node) = &(a_tree)->rbt_nil; \ | |
177 while (true) { \ | |
178 int rbp_n_cmp = (a_cmp)((a_node), rbp_n_t); \ | |
179 if (rbp_n_cmp < 0) { \ | |
180 (r_node) = rbp_n_t; \ | |
181 rbp_n_t = rbp_left_get(a_type, a_field, rbp_n_t); \ | |
182 } else if (rbp_n_cmp > 0) { \ | |
183 rbp_n_t = rbp_right_get(a_type, a_field, rbp_n_t); \ | |
184 } else { \ | |
185 break; \ | |
186 } \ | |
187 assert(rbp_n_t != &(a_tree)->rbt_nil); \ | |
188 } \ | |
189 } \ | |
190 } while (0) | |
191 | |
192 #define rbp_prev(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \ | |
193 if (rbp_left_get(a_type, a_field, (a_node)) != &(a_tree)->rbt_nil) {\ | |
194 rbp_last(a_type, a_field, a_tree, rbp_left_get(a_type, \ | |
195 a_field, (a_node)), (r_node)); \ | |
196 } else { \ | |
197 a_type *rbp_p_t = (a_tree)->rbt_root; \ | |
198 assert(rbp_p_t != &(a_tree)->rbt_nil); \ | |
199 (r_node) = &(a_tree)->rbt_nil; \ | |
200 while (true) { \ | |
201 int rbp_p_cmp = (a_cmp)((a_node), rbp_p_t); \ | |
202 if (rbp_p_cmp < 0) { \ | |
203 rbp_p_t = rbp_left_get(a_type, a_field, rbp_p_t); \ | |
204 } else if (rbp_p_cmp > 0) { \ | |
205 (r_node) = rbp_p_t; \ | |
206 rbp_p_t = rbp_right_get(a_type, a_field, rbp_p_t); \ | |
207 } else { \ | |
208 break; \ | |
209 } \ | |
210 assert(rbp_p_t != &(a_tree)->rbt_nil); \ | |
211 } \ | |
212 } \ | |
213 } while (0) | |
214 | |
215 #define rb_first(a_type, a_field, a_tree, r_node) do { \ | |
216 rbp_first(a_type, a_field, a_tree, (a_tree)->rbt_root, (r_node)); \ | |
217 if ((r_node) == &(a_tree)->rbt_nil) { \ | |
218 (r_node) = NULL; \ | |
219 } \ | |
220 } while (0) | |
221 | |
222 #define rb_last(a_type, a_field, a_tree, r_node) do { \ | |
223 rbp_last(a_type, a_field, a_tree, (a_tree)->rbt_root, r_node); \ | |
224 if ((r_node) == &(a_tree)->rbt_nil) { \ | |
225 (r_node) = NULL; \ | |
226 } \ | |
227 } while (0) | |
228 | |
229 #define rb_next(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \ | |
230 rbp_next(a_type, a_field, a_cmp, a_tree, (a_node), (r_node)); \ | |
231 if ((r_node) == &(a_tree)->rbt_nil) { \ | |
232 (r_node) = NULL; \ | |
233 } \ | |
234 } while (0) | |
235 | |
236 #define rb_prev(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \ | |
237 rbp_prev(a_type, a_field, a_cmp, a_tree, (a_node), (r_node)); \ | |
238 if ((r_node) == &(a_tree)->rbt_nil) { \ | |
239 (r_node) = NULL; \ | |
240 } \ | |
241 } while (0) | |
242 | |
243 #define rb_search(a_type, a_field, a_cmp, a_tree, a_key, r_node) do { \ | |
244 int rbp_se_cmp; \ | |
245 (r_node) = (a_tree)->rbt_root; \ | |
246 while ((r_node) != &(a_tree)->rbt_nil \ | |
247 && (rbp_se_cmp = (a_cmp)((a_key), (r_node))) != 0) { \ | |
248 if (rbp_se_cmp < 0) { \ | |
249 (r_node) = rbp_left_get(a_type, a_field, (r_node)); \ | |
250 } else { \ | |
251 (r_node) = rbp_right_get(a_type, a_field, (r_node)); \ | |
252 } \ | |
253 } \ | |
254 if ((r_node) == &(a_tree)->rbt_nil) { \ | |
255 (r_node) = NULL; \ | |
256 } \ | |
257 } while (0) | |
258 | |
259 /* | |
260 * Find a match if it exists. Otherwise, find the next greater node, if one | |
261 * exists. | |
262 */ | |
263 #define rb_nsearch(a_type, a_field, a_cmp, a_tree, a_key, r_node) do { \ | |
264 a_type *rbp_ns_t = (a_tree)->rbt_root; \ | |
265 (r_node) = NULL; \ | |
266 while (rbp_ns_t != &(a_tree)->rbt_nil) { \ | |
267 int rbp_ns_cmp = (a_cmp)((a_key), rbp_ns_t); \ | |
268 if (rbp_ns_cmp < 0) { \ | |
269 (r_node) = rbp_ns_t; \ | |
270 rbp_ns_t = rbp_left_get(a_type, a_field, rbp_ns_t); \ | |
271 } else if (rbp_ns_cmp > 0) { \ | |
272 rbp_ns_t = rbp_right_get(a_type, a_field, rbp_ns_t); \ | |
273 } else { \ | |
274 (r_node) = rbp_ns_t; \ | |
275 break; \ | |
276 } \ | |
277 } \ | |
278 } while (0) | |
279 | |
280 /* | |
281 * Find a match if it exists. Otherwise, find the previous lesser node, if one | |
282 * exists. | |
283 */ | |
284 #define rb_psearch(a_type, a_field, a_cmp, a_tree, a_key, r_node) do { \ | |
285 a_type *rbp_ps_t = (a_tree)->rbt_root; \ | |
286 (r_node) = NULL; \ | |
287 while (rbp_ps_t != &(a_tree)->rbt_nil) { \ | |
288 int rbp_ps_cmp = (a_cmp)((a_key), rbp_ps_t); \ | |
289 if (rbp_ps_cmp < 0) { \ | |
290 rbp_ps_t = rbp_left_get(a_type, a_field, rbp_ps_t); \ | |
291 } else if (rbp_ps_cmp > 0) { \ | |
292 (r_node) = rbp_ps_t; \ | |
293 rbp_ps_t = rbp_right_get(a_type, a_field, rbp_ps_t); \ | |
294 } else { \ | |
295 (r_node) = rbp_ps_t; \ | |
296 break; \ | |
297 } \ | |
298 } \ | |
299 } while (0) | |
300 | |
301 #define rbp_rotate_left(a_type, a_field, a_node, r_node) do { \ | |
302 (r_node) = rbp_right_get(a_type, a_field, (a_node)); \ | |
303 rbp_right_set(a_type, a_field, (a_node), \ | |
304 rbp_left_get(a_type, a_field, (r_node))); \ | |
305 rbp_left_set(a_type, a_field, (r_node), (a_node)); \ | |
306 } while (0) | |
307 | |
308 #define rbp_rotate_right(a_type, a_field, a_node, r_node) do { \ | |
309 (r_node) = rbp_left_get(a_type, a_field, (a_node)); \ | |
310 rbp_left_set(a_type, a_field, (a_node), \ | |
311 rbp_right_get(a_type, a_field, (r_node))); \ | |
312 rbp_right_set(a_type, a_field, (r_node), (a_node)); \ | |
313 } while (0) | |
314 | |
315 #define rbp_lean_left(a_type, a_field, a_node, r_node) do { \ | |
316 bool rbp_ll_red; \ | |
317 rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \ | |
318 rbp_ll_red = rbp_red_get(a_type, a_field, (a_node)); \ | |
319 rbp_color_set(a_type, a_field, (r_node), rbp_ll_red); \ | |
320 rbp_red_set(a_type, a_field, (a_node)); \ | |
321 } while (0) | |
322 | |
323 #define rbp_lean_right(a_type, a_field, a_node, r_node) do { \ | |
324 bool rbp_lr_red; \ | |
325 rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \ | |
326 rbp_lr_red = rbp_red_get(a_type, a_field, (a_node)); \ | |
327 rbp_color_set(a_type, a_field, (r_node), rbp_lr_red); \ | |
328 rbp_red_set(a_type, a_field, (a_node)); \ | |
329 } while (0) | |
330 | |
331 #define rbp_move_red_left(a_type, a_field, a_node, r_node) do { \ | |
332 a_type *rbp_mrl_t, *rbp_mrl_u; \ | |
333 rbp_mrl_t = rbp_left_get(a_type, a_field, (a_node)); \ | |
334 rbp_red_set(a_type, a_field, rbp_mrl_t); \ | |
335 rbp_mrl_t = rbp_right_get(a_type, a_field, (a_node)); \ | |
336 rbp_mrl_u = rbp_left_get(a_type, a_field, rbp_mrl_t); \ | |
337 if (rbp_red_get(a_type, a_field, rbp_mrl_u)) { \ | |
338 rbp_rotate_right(a_type, a_field, rbp_mrl_t, rbp_mrl_u); \ | |
339 rbp_right_set(a_type, a_field, (a_node), rbp_mrl_u); \ | |
340 rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \ | |
341 rbp_mrl_t = rbp_right_get(a_type, a_field, (a_node)); \ | |
342 if (rbp_red_get(a_type, a_field, rbp_mrl_t)) { \ | |
343 rbp_black_set(a_type, a_field, rbp_mrl_t); \ | |
344 rbp_red_set(a_type, a_field, (a_node)); \ | |
345 rbp_rotate_left(a_type, a_field, (a_node), rbp_mrl_t); \ | |
346 rbp_left_set(a_type, a_field, (r_node), rbp_mrl_t); \ | |
347 } else { \ | |
348 rbp_black_set(a_type, a_field, (a_node)); \ | |
349 } \ | |
350 } else { \ | |
351 rbp_red_set(a_type, a_field, (a_node)); \ | |
352 rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \ | |
353 } \ | |
354 } while (0) | |
355 | |
356 #define rbp_move_red_right(a_type, a_field, a_node, r_node) do { \ | |
357 a_type *rbp_mrr_t; \ | |
358 rbp_mrr_t = rbp_left_get(a_type, a_field, (a_node)); \ | |
359 if (rbp_red_get(a_type, a_field, rbp_mrr_t)) { \ | |
360 a_type *rbp_mrr_u, *rbp_mrr_v; \ | |
361 rbp_mrr_u = rbp_right_get(a_type, a_field, rbp_mrr_t); \ | |
362 rbp_mrr_v = rbp_left_get(a_type, a_field, rbp_mrr_u); \ | |
363 if (rbp_red_get(a_type, a_field, rbp_mrr_v)) { \ | |
364 rbp_color_set(a_type, a_field, rbp_mrr_u, \ | |
365 rbp_red_get(a_type, a_field, (a_node))); \ | |
366 rbp_black_set(a_type, a_field, rbp_mrr_v); \ | |
367 rbp_rotate_left(a_type, a_field, rbp_mrr_t, rbp_mrr_u); \ | |
368 rbp_left_set(a_type, a_field, (a_node), rbp_mrr_u); \ | |
369 rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \ | |
370 rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t); \ | |
371 rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t); \ | |
372 } else { \ | |
373 rbp_color_set(a_type, a_field, rbp_mrr_t, \ | |
374 rbp_red_get(a_type, a_field, (a_node))); \ | |
375 rbp_red_set(a_type, a_field, rbp_mrr_u); \ | |
376 rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \ | |
377 rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t); \ | |
378 rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t); \ | |
379 } \ | |
380 rbp_red_set(a_type, a_field, (a_node)); \ | |
381 } else { \ | |
382 rbp_red_set(a_type, a_field, rbp_mrr_t); \ | |
383 rbp_mrr_t = rbp_left_get(a_type, a_field, rbp_mrr_t); \ | |
384 if (rbp_red_get(a_type, a_field, rbp_mrr_t)) { \ | |
385 rbp_black_set(a_type, a_field, rbp_mrr_t); \ | |
386 rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \ | |
387 rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t); \ | |
388 rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t); \ | |
389 } else { \ | |
390 rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \ | |
391 } \ | |
392 } \ | |
393 } while (0) | |
394 | |
395 #define rb_insert(a_type, a_field, a_cmp, a_tree, a_node) do { \ | |
396 a_type rbp_i_s; \ | |
397 a_type *rbp_i_g, *rbp_i_p, *rbp_i_c, *rbp_i_t, *rbp_i_u; \ | |
398 int rbp_i_cmp = 0; \ | |
399 rbp_i_g = &(a_tree)->rbt_nil; \ | |
400 rbp_left_set(a_type, a_field, &rbp_i_s, (a_tree)->rbt_root); \ | |
401 rbp_right_set(a_type, a_field, &rbp_i_s, &(a_tree)->rbt_nil); \ | |
402 rbp_black_set(a_type, a_field, &rbp_i_s); \ | |
403 rbp_i_p = &rbp_i_s; \ | |
404 rbp_i_c = (a_tree)->rbt_root; \ | |
405 /* Iteratively search down the tree for the insertion point, */\ | |
406 /* splitting 4-nodes as they are encountered. At the end of each */\ | |
407 /* iteration, rbp_i_g->rbp_i_p->rbp_i_c is a 3-level path down */\ | |
408 /* the tree, assuming a sufficiently deep tree. */\ | |
409 while (rbp_i_c != &(a_tree)->rbt_nil) { \ | |
410 rbp_i_t = rbp_left_get(a_type, a_field, rbp_i_c); \ | |
411 rbp_i_u = rbp_left_get(a_type, a_field, rbp_i_t); \ | |
412 if (rbp_red_get(a_type, a_field, rbp_i_t) \ | |
413 && rbp_red_get(a_type, a_field, rbp_i_u)) { \ | |
414 /* rbp_i_c is the top of a logical 4-node, so split it. */\ | |
415 /* This iteration does not move down the tree, due to the */\ | |
416 /* disruptiveness of node splitting. */\ | |
417 /* */\ | |
418 /* Rotate right. */\ | |
419 rbp_rotate_right(a_type, a_field, rbp_i_c, rbp_i_t); \ | |
420 /* Pass red links up one level. */\ | |
421 rbp_i_u = rbp_left_get(a_type, a_field, rbp_i_t); \ | |
422 rbp_black_set(a_type, a_field, rbp_i_u); \ | |
423 if (rbp_left_get(a_type, a_field, rbp_i_p) == rbp_i_c) { \ | |
424 rbp_left_set(a_type, a_field, rbp_i_p, rbp_i_t); \ | |
425 rbp_i_c = rbp_i_t; \ | |
426 } else { \ | |
427 /* rbp_i_c was the right child of rbp_i_p, so rotate */\ | |
428 /* left in order to maintain the left-leaning */\ | |
429 /* invariant. */\ | |
430 assert(rbp_right_get(a_type, a_field, rbp_i_p) \ | |
431 == rbp_i_c); \ | |
432 rbp_right_set(a_type, a_field, rbp_i_p, rbp_i_t); \ | |
433 rbp_lean_left(a_type, a_field, rbp_i_p, rbp_i_u); \ | |
434 if (rbp_left_get(a_type, a_field, rbp_i_g) == rbp_i_p) {\ | |
435 rbp_left_set(a_type, a_field, rbp_i_g, rbp_i_u); \ | |
436 } else { \ | |
437 assert(rbp_right_get(a_type, a_field, rbp_i_g) \ | |
438 == rbp_i_p); \ | |
439 rbp_right_set(a_type, a_field, rbp_i_g, rbp_i_u); \ | |
440 } \ | |
441 rbp_i_p = rbp_i_u; \ | |
442 rbp_i_cmp = (a_cmp)((a_node), rbp_i_p); \ | |
443 if (rbp_i_cmp < 0) { \ | |
444 rbp_i_c = rbp_left_get(a_type, a_field, rbp_i_p); \ | |
445 } else { \ | |
446 assert(rbp_i_cmp > 0); \ | |
447 rbp_i_c = rbp_right_get(a_type, a_field, rbp_i_p); \ | |
448 } \ | |
449 continue; \ | |
450 } \ | |
451 } \ | |
452 rbp_i_g = rbp_i_p; \ | |
453 rbp_i_p = rbp_i_c; \ | |
454 rbp_i_cmp = (a_cmp)((a_node), rbp_i_c); \ | |
455 if (rbp_i_cmp < 0) { \ | |
456 rbp_i_c = rbp_left_get(a_type, a_field, rbp_i_c); \ | |
457 } else { \ | |
458 assert(rbp_i_cmp > 0); \ | |
459 rbp_i_c = rbp_right_get(a_type, a_field, rbp_i_c); \ | |
460 } \ | |
461 } \ | |
462 /* rbp_i_p now refers to the node under which to insert. */\ | |
463 rbp_node_new(a_type, a_field, a_tree, (a_node)); \ | |
464 if (rbp_i_cmp > 0) { \ | |
465 rbp_right_set(a_type, a_field, rbp_i_p, (a_node)); \ | |
466 rbp_lean_left(a_type, a_field, rbp_i_p, rbp_i_t); \ | |
467 if (rbp_left_get(a_type, a_field, rbp_i_g) == rbp_i_p) { \ | |
468 rbp_left_set(a_type, a_field, rbp_i_g, rbp_i_t); \ | |
469 } else if (rbp_right_get(a_type, a_field, rbp_i_g) == rbp_i_p) {\ | |
470 rbp_right_set(a_type, a_field, rbp_i_g, rbp_i_t); \ | |
471 } \ | |
472 } else { \ | |
473 rbp_left_set(a_type, a_field, rbp_i_p, (a_node)); \ | |
474 } \ | |
475 /* Update the root and make sure that it is black. */\ | |
476 (a_tree)->rbt_root = rbp_left_get(a_type, a_field, &rbp_i_s); \ | |
477 rbp_black_set(a_type, a_field, (a_tree)->rbt_root); \ | |
478 } while (0) | |
479 | |
480 #define rb_remove(a_type, a_field, a_cmp, a_tree, a_node) do { \ | |
481 a_type rbp_r_s; \ | |
482 a_type *rbp_r_p, *rbp_r_c, *rbp_r_xp, *rbp_r_t, *rbp_r_u; \ | |
483 int rbp_r_cmp; \ | |
484 rbp_left_set(a_type, a_field, &rbp_r_s, (a_tree)->rbt_root); \ | |
485 rbp_right_set(a_type, a_field, &rbp_r_s, &(a_tree)->rbt_nil); \ | |
486 rbp_black_set(a_type, a_field, &rbp_r_s); \ | |
487 rbp_r_p = &rbp_r_s; \ | |
488 rbp_r_c = (a_tree)->rbt_root; \ | |
489 rbp_r_xp = &(a_tree)->rbt_nil; \ | |
490 /* Iterate down the tree, but always transform 2-nodes to 3- or */\ | |
491 /* 4-nodes in order to maintain the invariant that the current */\ | |
492 /* node is not a 2-node. This allows simple deletion once a leaf */\ | |
493 /* is reached. Handle the root specially though, since there may */\ | |
494 /* be no way to convert it from a 2-node to a 3-node. */\ | |
495 rbp_r_cmp = (a_cmp)((a_node), rbp_r_c); \ | |
496 if (rbp_r_cmp < 0) { \ | |
497 rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c); \ | |
498 rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \ | |
499 if (rbp_red_get(a_type, a_field, rbp_r_t) == false \ | |
500 && rbp_red_get(a_type, a_field, rbp_r_u) == false) { \ | |
501 /* Apply standard transform to prepare for left move. */\ | |
502 rbp_move_red_left(a_type, a_field, rbp_r_c, rbp_r_t); \ | |
503 rbp_black_set(a_type, a_field, rbp_r_t); \ | |
504 rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t); \ | |
505 rbp_r_c = rbp_r_t; \ | |
506 } else { \ | |
507 /* Move left. */\ | |
508 rbp_r_p = rbp_r_c; \ | |
509 rbp_r_c = rbp_left_get(a_type, a_field, rbp_r_c); \ | |
510 } \ | |
511 } else { \ | |
512 if (rbp_r_cmp == 0) { \ | |
513 assert((a_node) == rbp_r_c); \ | |
514 if (rbp_right_get(a_type, a_field, rbp_r_c) \ | |
515 == &(a_tree)->rbt_nil) { \ | |
516 /* Delete root node (which is also a leaf node). */\ | |
517 if (rbp_left_get(a_type, a_field, rbp_r_c) \ | |
518 != &(a_tree)->rbt_nil) { \ | |
519 rbp_lean_right(a_type, a_field, rbp_r_c, rbp_r_t); \ | |
520 rbp_right_set(a_type, a_field, rbp_r_t, \ | |
521 &(a_tree)->rbt_nil); \ | |
522 } else { \ | |
523 rbp_r_t = &(a_tree)->rbt_nil; \ | |
524 } \ | |
525 rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t); \ | |
526 } else { \ | |
527 /* This is the node we want to delete, but we will */\ | |
528 /* instead swap it with its successor and delete the */\ | |
529 /* successor. Record enough information to do the */\ | |
530 /* swap later. rbp_r_xp is the a_node's parent. */\ | |
531 rbp_r_xp = rbp_r_p; \ | |
532 rbp_r_cmp = 1; /* Note that deletion is incomplete. */\ | |
533 } \ | |
534 } \ | |
535 if (rbp_r_cmp == 1) { \ | |
536 if (rbp_red_get(a_type, a_field, rbp_left_get(a_type, \ | |
537 a_field, rbp_right_get(a_type, a_field, rbp_r_c))) \ | |
538 == false) { \ | |
539 rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c); \ | |
540 if (rbp_red_get(a_type, a_field, rbp_r_t)) { \ | |
541 /* Standard transform. */\ | |
542 rbp_move_red_right(a_type, a_field, rbp_r_c, \ | |
543 rbp_r_t); \ | |
544 } else { \ | |
545 /* Root-specific transform. */\ | |
546 rbp_red_set(a_type, a_field, rbp_r_c); \ | |
547 rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \ | |
548 if (rbp_red_get(a_type, a_field, rbp_r_u)) { \ | |
549 rbp_black_set(a_type, a_field, rbp_r_u); \ | |
550 rbp_rotate_right(a_type, a_field, rbp_r_c, \ | |
551 rbp_r_t); \ | |
552 rbp_rotate_left(a_type, a_field, rbp_r_c, \ | |
553 rbp_r_u); \ | |
554 rbp_right_set(a_type, a_field, rbp_r_t, \ | |
555 rbp_r_u); \ | |
556 } else { \ | |
557 rbp_red_set(a_type, a_field, rbp_r_t); \ | |
558 rbp_rotate_left(a_type, a_field, rbp_r_c, \ | |
559 rbp_r_t); \ | |
560 } \ | |
561 } \ | |
562 rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t); \ | |
563 rbp_r_c = rbp_r_t; \ | |
564 } else { \ | |
565 /* Move right. */\ | |
566 rbp_r_p = rbp_r_c; \ | |
567 rbp_r_c = rbp_right_get(a_type, a_field, rbp_r_c); \ | |
568 } \ | |
569 } \ | |
570 } \ | |
571 if (rbp_r_cmp != 0) { \ | |
572 while (true) { \ | |
573 assert(rbp_r_p != &(a_tree)->rbt_nil); \ | |
574 rbp_r_cmp = (a_cmp)((a_node), rbp_r_c); \ | |
575 if (rbp_r_cmp < 0) { \ | |
576 rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c); \ | |
577 if (rbp_r_t == &(a_tree)->rbt_nil) { \ | |
578 /* rbp_r_c now refers to the successor node to */\ | |
579 /* relocate, and rbp_r_xp/a_node refer to the */\ | |
580 /* context for the relocation. */\ | |
581 if (rbp_left_get(a_type, a_field, rbp_r_xp) \ | |
582 == (a_node)) { \ | |
583 rbp_left_set(a_type, a_field, rbp_r_xp, \ | |
584 rbp_r_c); \ | |
585 } else { \ | |
586 assert(rbp_right_get(a_type, a_field, \ | |
587 rbp_r_xp) == (a_node)); \ | |
588 rbp_right_set(a_type, a_field, rbp_r_xp, \ | |
589 rbp_r_c); \ | |
590 } \ | |
591 rbp_left_set(a_type, a_field, rbp_r_c, \ | |
592 rbp_left_get(a_type, a_field, (a_node))); \ | |
593 rbp_right_set(a_type, a_field, rbp_r_c, \ | |
594 rbp_right_get(a_type, a_field, (a_node))); \ | |
595 rbp_color_set(a_type, a_field, rbp_r_c, \ | |
596 rbp_red_get(a_type, a_field, (a_node))); \ | |
597 if (rbp_left_get(a_type, a_field, rbp_r_p) \ | |
598 == rbp_r_c) { \ | |
599 rbp_left_set(a_type, a_field, rbp_r_p, \ | |
600 &(a_tree)->rbt_nil); \ | |
601 } else { \ | |
602 assert(rbp_right_get(a_type, a_field, rbp_r_p) \ | |
603 == rbp_r_c); \ | |
604 rbp_right_set(a_type, a_field, rbp_r_p, \ | |
605 &(a_tree)->rbt_nil); \ | |
606 } \ | |
607 break; \ | |
608 } \ | |
609 rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \ | |
610 if (rbp_red_get(a_type, a_field, rbp_r_t) == false \ | |
611 && rbp_red_get(a_type, a_field, rbp_r_u) == false) { \ | |
612 rbp_move_red_left(a_type, a_field, rbp_r_c, \ | |
613 rbp_r_t); \ | |
614 if (rbp_left_get(a_type, a_field, rbp_r_p) \ | |
615 == rbp_r_c) { \ | |
616 rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t);\ | |
617 } else { \ | |
618 rbp_right_set(a_type, a_field, rbp_r_p, \ | |
619 rbp_r_t); \ | |
620 } \ | |
621 rbp_r_c = rbp_r_t; \ | |
622 } else { \ | |
623 rbp_r_p = rbp_r_c; \ | |
624 rbp_r_c = rbp_left_get(a_type, a_field, rbp_r_c); \ | |
625 } \ | |
626 } else { \ | |
627 /* Check whether to delete this node (it has to be */\ | |
628 /* the correct node and a leaf node). */\ | |
629 if (rbp_r_cmp == 0) { \ | |
630 assert((a_node) == rbp_r_c); \ | |
631 if (rbp_right_get(a_type, a_field, rbp_r_c) \ | |
632 == &(a_tree)->rbt_nil) { \ | |
633 /* Delete leaf node. */\ | |
634 if (rbp_left_get(a_type, a_field, rbp_r_c) \ | |
635 != &(a_tree)->rbt_nil) { \ | |
636 rbp_lean_right(a_type, a_field, rbp_r_c, \ | |
637 rbp_r_t); \ | |
638 rbp_right_set(a_type, a_field, rbp_r_t, \ | |
639 &(a_tree)->rbt_nil); \ | |
640 } else { \ | |
641 rbp_r_t = &(a_tree)->rbt_nil; \ | |
642 } \ | |
643 if (rbp_left_get(a_type, a_field, rbp_r_p) \ | |
644 == rbp_r_c) { \ | |
645 rbp_left_set(a_type, a_field, rbp_r_p, \ | |
646 rbp_r_t); \ | |
647 } else { \ | |
648 rbp_right_set(a_type, a_field, rbp_r_p, \ | |
649 rbp_r_t); \ | |
650 } \ | |
651 break; \ | |
652 } else { \ | |
653 /* This is the node we want to delete, but we */\ | |
654 /* will instead swap it with its successor */\ | |
655 /* and delete the successor. Record enough */\ | |
656 /* information to do the swap later. */\ | |
657 /* rbp_r_xp is a_node's parent. */\ | |
658 rbp_r_xp = rbp_r_p; \ | |
659 } \ | |
660 } \ | |
661 rbp_r_t = rbp_right_get(a_type, a_field, rbp_r_c); \ | |
662 rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \ | |
663 if (rbp_red_get(a_type, a_field, rbp_r_u) == false) { \ | |
664 rbp_move_red_right(a_type, a_field, rbp_r_c, \ | |
665 rbp_r_t); \ | |
666 if (rbp_left_get(a_type, a_field, rbp_r_p) \ | |
667 == rbp_r_c) { \ | |
668 rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t);\ | |
669 } else { \ | |
670 rbp_right_set(a_type, a_field, rbp_r_p, \ | |
671 rbp_r_t); \ | |
672 } \ | |
673 rbp_r_c = rbp_r_t; \ | |
674 } else { \ | |
675 rbp_r_p = rbp_r_c; \ | |
676 rbp_r_c = rbp_right_get(a_type, a_field, rbp_r_c); \ | |
677 } \ | |
678 } \ | |
679 } \ | |
680 } \ | |
681 /* Update root. */\ | |
682 (a_tree)->rbt_root = rbp_left_get(a_type, a_field, &rbp_r_s); \ | |
683 } while (0) | |
684 | |
685 /* | |
686 * The rb_wrap() macro provides a convenient way to wrap functions around the | |
687 * cpp macros. The main benefits of wrapping are that 1) repeated macro | |
688 * expansion can cause code bloat, especially for rb_{insert,remove)(), and | |
689 * 2) type, linkage, comparison functions, etc. need not be specified at every | |
690 * call point. | |
691 */ | |
692 | |
693 #define rb_wrap(a_attr, a_prefix, a_tree_type, a_type, a_field, a_cmp) \ | |
694 a_attr void \ | |
695 a_prefix##new(a_tree_type *tree) { \ | |
696 rb_new(a_type, a_field, tree); \ | |
697 } \ | |
698 a_attr a_type * \ | |
699 a_prefix##first(a_tree_type *tree) { \ | |
700 a_type *ret; \ | |
701 rb_first(a_type, a_field, tree, ret); \ | |
702 return (ret); \ | |
703 } \ | |
704 a_attr a_type * \ | |
705 a_prefix##last(a_tree_type *tree) { \ | |
706 a_type *ret; \ | |
707 rb_last(a_type, a_field, tree, ret); \ | |
708 return (ret); \ | |
709 } \ | |
710 a_attr a_type * \ | |
711 a_prefix##next(a_tree_type *tree, a_type *node) { \ | |
712 a_type *ret; \ | |
713 rb_next(a_type, a_field, a_cmp, tree, node, ret); \ | |
714 return (ret); \ | |
715 } \ | |
716 a_attr a_type * \ | |
717 a_prefix##prev(a_tree_type *tree, a_type *node) { \ | |
718 a_type *ret; \ | |
719 rb_prev(a_type, a_field, a_cmp, tree, node, ret); \ | |
720 return (ret); \ | |
721 } \ | |
722 a_attr a_type * \ | |
723 a_prefix##search(a_tree_type *tree, a_type *key) { \ | |
724 a_type *ret; \ | |
725 rb_search(a_type, a_field, a_cmp, tree, key, ret); \ | |
726 return (ret); \ | |
727 } \ | |
728 a_attr a_type * \ | |
729 a_prefix##nsearch(a_tree_type *tree, a_type *key) { \ | |
730 a_type *ret; \ | |
731 rb_nsearch(a_type, a_field, a_cmp, tree, key, ret); \ | |
732 return (ret); \ | |
733 } \ | |
734 a_attr a_type * \ | |
735 a_prefix##psearch(a_tree_type *tree, a_type *key) { \ | |
736 a_type *ret; \ | |
737 rb_psearch(a_type, a_field, a_cmp, tree, key, ret); \ | |
738 return (ret); \ | |
739 } \ | |
740 a_attr void \ | |
741 a_prefix##insert(a_tree_type *tree, a_type *node) { \ | |
742 rb_insert(a_type, a_field, a_cmp, tree, node); \ | |
743 } \ | |
744 a_attr void \ | |
745 a_prefix##remove(a_tree_type *tree, a_type *node) { \ | |
746 rb_remove(a_type, a_field, a_cmp, tree, node); \ | |
747 } | |
748 | |
749 /* | |
750 * The iterators simulate recursion via an array of pointers that store the | |
751 * current path. This is critical to performance, since a series of calls to | |
752 * rb_{next,prev}() would require time proportional to (n lg n), whereas this | |
753 * implementation only requires time proportional to (n). | |
754 * | |
755 * Since the iterators cache a path down the tree, any tree modification may | |
756 * cause the cached path to become invalid. In order to continue iteration, | |
757 * use something like the following sequence: | |
758 * | |
759 * { | |
760 * a_type *node, *tnode; | |
761 * | |
762 * rb_foreach_begin(a_type, a_field, a_tree, node) { | |
763 * ... | |
764 * rb_next(a_type, a_field, a_cmp, a_tree, node, tnode); | |
765 * rb_remove(a_type, a_field, a_cmp, a_tree, node); | |
766 * rb_foreach_next(a_type, a_field, a_cmp, a_tree, tnode); | |
767 * ... | |
768 * } rb_foreach_end(a_type, a_field, a_tree, node) | |
769 * } | |
770 * | |
771 * Note that this idiom is not advised if every iteration modifies the tree, | |
772 * since in that case there is no algorithmic complexity improvement over a | |
773 * series of rb_{next,prev}() calls, thus making the setup overhead wasted | |
774 * effort. | |
775 */ | |
776 | |
777 #ifdef RB_NO_C99_VARARRAYS | |
778 /* | |
779 * Avoid using variable-length arrays, at the cost of using more stack space. | |
780 * Size the path arrays such that they are always large enough, even if a | |
781 * tree consumes all of memory. Since each node must contain a minimum of | |
782 * two pointers, there can never be more nodes than: | |
783 * | |
784 * 1 << ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1)) | |
785 * | |
786 * Since the depth of a tree is limited to 3*lg(#nodes), the maximum depth | |
787 * is: | |
788 * | |
789 * (3 * ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1))) | |
790 * | |
791 * This works out to a maximum depth of 87 and 180 for 32- and 64-bit | |
792 * systems, respectively (approximatly 348 and 1440 bytes, respectively). | |
793 */ | |
794 # define rbp_compute_f_height(a_type, a_field, a_tree) | |
795 # define rbp_f_height (3 * ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1))) | |
796 # define rbp_compute_fr_height(a_type, a_field, a_tree) | |
797 # define rbp_fr_height (3 * ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1))) | |
798 #else | |
799 # define rbp_compute_f_height(a_type, a_field, a_tree) \ | |
800 /* Compute the maximum possible tree depth (3X the black height). */\ | |
801 unsigned rbp_f_height; \ | |
802 rbp_black_height(a_type, a_field, a_tree, rbp_f_height); \ | |
803 rbp_f_height *= 3; | |
804 # define rbp_compute_fr_height(a_type, a_field, a_tree) \ | |
805 /* Compute the maximum possible tree depth (3X the black height). */\ | |
806 unsigned rbp_fr_height; \ | |
807 rbp_black_height(a_type, a_field, a_tree, rbp_fr_height); \ | |
808 rbp_fr_height *= 3; | |
809 #endif | |
810 | |
811 #define rb_foreach_begin(a_type, a_field, a_tree, a_var) { \ | |
812 rbp_compute_f_height(a_type, a_field, a_tree) \ | |
813 { \ | |
814 /* Initialize the path to contain the left spine. */\ | |
815 a_type *rbp_f_path[rbp_f_height]; \ | |
816 a_type *rbp_f_node; \ | |
817 bool rbp_f_synced = false; \ | |
818 unsigned rbp_f_depth = 0; \ | |
819 if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \ | |
820 rbp_f_path[rbp_f_depth] = (a_tree)->rbt_root; \ | |
821 rbp_f_depth++; \ | |
822 while ((rbp_f_node = rbp_left_get(a_type, a_field, \ | |
823 rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) { \ | |
824 rbp_f_path[rbp_f_depth] = rbp_f_node; \ | |
825 rbp_f_depth++; \ | |
826 } \ | |
827 } \ | |
828 /* While the path is non-empty, iterate. */\ | |
829 while (rbp_f_depth > 0) { \ | |
830 (a_var) = rbp_f_path[rbp_f_depth-1]; | |
831 | |
832 /* Only use if modifying the tree during iteration. */ | |
833 #define rb_foreach_next(a_type, a_field, a_cmp, a_tree, a_node) \ | |
834 /* Re-initialize the path to contain the path to a_node. */\ | |
835 rbp_f_depth = 0; \ | |
836 if (a_node != NULL) { \ | |
837 if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \ | |
838 rbp_f_path[rbp_f_depth] = (a_tree)->rbt_root; \ | |
839 rbp_f_depth++; \ | |
840 rbp_f_node = rbp_f_path[0]; \ | |
841 while (true) { \ | |
842 int rbp_f_cmp = (a_cmp)((a_node), \ | |
843 rbp_f_path[rbp_f_depth-1]); \ | |
844 if (rbp_f_cmp < 0) { \ | |
845 rbp_f_node = rbp_left_get(a_type, a_field, \ | |
846 rbp_f_path[rbp_f_depth-1]); \ | |
847 } else if (rbp_f_cmp > 0) { \ | |
848 rbp_f_node = rbp_right_get(a_type, a_field, \ | |
849 rbp_f_path[rbp_f_depth-1]); \ | |
850 } else { \ | |
851 break; \ | |
852 } \ | |
853 assert(rbp_f_node != &(a_tree)->rbt_nil); \ | |
854 rbp_f_path[rbp_f_depth] = rbp_f_node; \ | |
855 rbp_f_depth++; \ | |
856 } \ | |
857 } \ | |
858 } \ | |
859 rbp_f_synced = true; | |
860 | |
861 #define rb_foreach_end(a_type, a_field, a_tree, a_var) \ | |
862 if (rbp_f_synced) { \ | |
863 rbp_f_synced = false; \ | |
864 continue; \ | |
865 } \ | |
866 /* Find the successor. */\ | |
867 if ((rbp_f_node = rbp_right_get(a_type, a_field, \ | |
868 rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) { \ | |
869 /* The successor is the left-most node in the right */\ | |
870 /* subtree. */\ | |
871 rbp_f_path[rbp_f_depth] = rbp_f_node; \ | |
872 rbp_f_depth++; \ | |
873 while ((rbp_f_node = rbp_left_get(a_type, a_field, \ | |
874 rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) { \ | |
875 rbp_f_path[rbp_f_depth] = rbp_f_node; \ | |
876 rbp_f_depth++; \ | |
877 } \ | |
878 } else { \ | |
879 /* The successor is above the current node. Unwind */\ | |
880 /* until a left-leaning edge is removed from the */\ | |
881 /* path, or the path is empty. */\ | |
882 for (rbp_f_depth--; rbp_f_depth > 0; rbp_f_depth--) { \ | |
883 if (rbp_left_get(a_type, a_field, \ | |
884 rbp_f_path[rbp_f_depth-1]) \ | |
885 == rbp_f_path[rbp_f_depth]) { \ | |
886 break; \ | |
887 } \ | |
888 } \ | |
889 } \ | |
890 } \ | |
891 } \ | |
892 } | |
893 | |
894 #define rb_foreach_reverse_begin(a_type, a_field, a_tree, a_var) { \ | |
895 rbp_compute_fr_height(a_type, a_field, a_tree) \ | |
896 { \ | |
897 /* Initialize the path to contain the right spine. */\ | |
898 a_type *rbp_fr_path[rbp_fr_height]; \ | |
899 a_type *rbp_fr_node; \ | |
900 bool rbp_fr_synced = false; \ | |
901 unsigned rbp_fr_depth = 0; \ | |
902 if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \ | |
903 rbp_fr_path[rbp_fr_depth] = (a_tree)->rbt_root; \ | |
904 rbp_fr_depth++; \ | |
905 while ((rbp_fr_node = rbp_right_get(a_type, a_field, \ | |
906 rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) { \ | |
907 rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \ | |
908 rbp_fr_depth++; \ | |
909 } \ | |
910 } \ | |
911 /* While the path is non-empty, iterate. */\ | |
912 while (rbp_fr_depth > 0) { \ | |
913 (a_var) = rbp_fr_path[rbp_fr_depth-1]; | |
914 | |
915 /* Only use if modifying the tree during iteration. */ | |
916 #define rb_foreach_reverse_prev(a_type, a_field, a_cmp, a_tree, a_node) \ | |
917 /* Re-initialize the path to contain the path to a_node. */\ | |
918 rbp_fr_depth = 0; \ | |
919 if (a_node != NULL) { \ | |
920 if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \ | |
921 rbp_fr_path[rbp_fr_depth] = (a_tree)->rbt_root; \ | |
922 rbp_fr_depth++; \ | |
923 rbp_fr_node = rbp_fr_path[0]; \ | |
924 while (true) { \ | |
925 int rbp_fr_cmp = (a_cmp)((a_node), \ | |
926 rbp_fr_path[rbp_fr_depth-1]); \ | |
927 if (rbp_fr_cmp < 0) { \ | |
928 rbp_fr_node = rbp_left_get(a_type, a_field, \ | |
929 rbp_fr_path[rbp_fr_depth-1]); \ | |
930 } else if (rbp_fr_cmp > 0) { \ | |
931 rbp_fr_node = rbp_right_get(a_type, a_field,\ | |
932 rbp_fr_path[rbp_fr_depth-1]); \ | |
933 } else { \ | |
934 break; \ | |
935 } \ | |
936 assert(rbp_fr_node != &(a_tree)->rbt_nil); \ | |
937 rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \ | |
938 rbp_fr_depth++; \ | |
939 } \ | |
940 } \ | |
941 } \ | |
942 rbp_fr_synced = true; | |
943 | |
944 #define rb_foreach_reverse_end(a_type, a_field, a_tree, a_var) \ | |
945 if (rbp_fr_synced) { \ | |
946 rbp_fr_synced = false; \ | |
947 continue; \ | |
948 } \ | |
949 if (rbp_fr_depth == 0) { \ | |
950 /* rb_foreach_reverse_sync() was called with a NULL */\ | |
951 /* a_node. */\ | |
952 break; \ | |
953 } \ | |
954 /* Find the predecessor. */\ | |
955 if ((rbp_fr_node = rbp_left_get(a_type, a_field, \ | |
956 rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) { \ | |
957 /* The predecessor is the right-most node in the left */\ | |
958 /* subtree. */\ | |
959 rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \ | |
960 rbp_fr_depth++; \ | |
961 while ((rbp_fr_node = rbp_right_get(a_type, a_field, \ | |
962 rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) {\ | |
963 rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \ | |
964 rbp_fr_depth++; \ | |
965 } \ | |
966 } else { \ | |
967 /* The predecessor is above the current node. Unwind */\ | |
968 /* until a right-leaning edge is removed from the */\ | |
969 /* path, or the path is empty. */\ | |
970 for (rbp_fr_depth--; rbp_fr_depth > 0; rbp_fr_depth--) {\ | |
971 if (rbp_right_get(a_type, a_field, \ | |
972 rbp_fr_path[rbp_fr_depth-1]) \ | |
973 == rbp_fr_path[rbp_fr_depth]) { \ | |
974 break; \ | |
975 } \ | |
976 } \ | |
977 } \ | |
978 } \ | |
979 } \ | |
980 } | |
981 | |
982 #endif /* RB_H_ */ |