1 //! AVL trees with a private allocation pool.
2 //!
3 //! AVL tree internals are public, so that backtracking.rs can do custom
4 //! traversals of the tree as it wishes.
5
6 use smallvec::SmallVec;
7 use std::cmp::Ordering;
8
9 //=============================================================================
10 // Data structures for AVLTree
11
12 #[derive(Clone, PartialEq)]
13 pub enum AVLTag {
14 Free, // This pool entry is not in use
15 None, // This pool entry is in use. Neither subtree is higher.
16 Left, // This pool entry is in use. The left subtree is higher.
17 Right, // This pool entry is in use. The right subtree is higher.
18 }
19
20 #[derive(Clone)]
21 pub struct AVLNode<T> {
22 pub tag: AVLTag,
23 pub left: u32,
24 pub right: u32,
25 pub item: T,
26 }
27 impl<T> AVLNode<T> {
new(tag: AVLTag, left: u32, right: u32, item: T) -> Self28 fn new(tag: AVLTag, left: u32, right: u32, item: T) -> Self {
29 Self {
30 tag,
31 left,
32 right,
33 item,
34 }
35 }
36 }
37
38 pub const AVL_NULL: u32 = 0xFFFF_FFFF;
39
40 pub struct AVLTree<T> {
41 // The storage area. There can be at most 2^32-2 entries, since AVL_NULL
42 // (== 2^32-1) is used to mean "the null pointer".
43 pub pool: Vec<AVLNode<T>>,
44 // A default value for the stored item. We don't care what this is;
45 // unfortunately Rust forces us to have one so that additions to the free
46 // list will be fully initialised.
47 default: T,
48 // The freelist head. This is a list of available entries. Each item on
49 // the freelist must have its .tag be AVLTag::Free, and will use its .left
50 // field as the link to the next freelist item. A freelist link value of
51 // AVL_NULL denotes the end of the list. If `freelist` itself is AVL_NULL
52 // then the list is empty.
53 freelist: u32,
54 // Last but not least, the root node.
55 pub root: u32,
56 }
57
58 //=============================================================================
59 // Storage management functions for AVLTree
60
61 impl<T: Clone> AVLTree<T> {
62 // Create a new tree and its associated storage pool. This requires knowing
63 // the default item value.
new(default: T) -> Self64 pub fn new(default: T) -> Self {
65 // Pre-allocate a few entries so as to save a few reallocs later, on the
66 // assumption that most trees will get quite large.
67 let pool = Vec::with_capacity(16);
68 let freelist = AVL_NULL;
69 let root = AVL_NULL;
70 Self {
71 pool,
72 default,
73 freelist,
74 root,
75 }
76 }
77
78 // Private function: free a tree node and put it back on the storage pool's
79 // freelist.
free(&mut self, index: u32)80 fn free(&mut self, index: u32) {
81 assert!(index != AVL_NULL);
82 assert!(self.pool[index as usize].tag != AVLTag::Free);
83 self.pool[index as usize] =
84 AVLNode::new(AVLTag::Free, self.freelist, AVL_NULL, self.default.clone());
85 self.freelist = index;
86 }
87
88 // Private function: allocate a tree node from the storage pool, resizing
89 // the pool if necessary. This will decline to expand the tree past about
90 // 1.75 billion items.
alloc(&mut self) -> u3291 fn alloc(&mut self) -> u32 {
92 // Check to see if the freelist is empty, and if so allocate a bunch more
93 // slots.
94 if self.freelist == AVL_NULL {
95 let start = self.pool.len();
96 let fill_item = AVLNode::new(AVLTag::Free, AVL_NULL, AVL_NULL, self.default.clone());
97 // What happens if this OOMs? At least guard against u32 overflow by
98 // doing this:
99 if start >= 0x7000_0000 {
100 // 1.75 billion elements in the tree should be enough for any
101 // reasonable use of this register allocator.
102 panic!("AVLTree<T>::alloc: too many items");
103 }
104 self.pool.resize(2 * start + 2, fill_item);
105 let end_plus_1 = self.pool.len();
106 debug_assert!(end_plus_1 >= 2);
107 self.pool[end_plus_1 - 1].left = self.freelist;
108 let mut i = end_plus_1 - 2;
109 while i >= start {
110 // The entry is already marked as free, but we must set the link.
111 self.pool[i].left = i as u32 + 1;
112 if i == 0 {
113 break;
114 }
115 i -= 1;
116 }
117 self.freelist = start as u32;
118 debug_assert!(self.freelist != AVL_NULL);
119 }
120 // And now allocate.
121 let new = self.freelist;
122 assert!(self.pool[new as usize].tag == AVLTag::Free);
123 // The caller is responsible for filling in the entry. But at least set
124 // the tag to non-Free, for sanity.
125 self.pool[new as usize].tag = AVLTag::None;
126 self.freelist = self.pool[new as usize].left;
127 new
128 }
129 }
130
131 //=============================================================================
132 // Tree-wrangling machinery for AVLTree (private)
133
134 // For the public interface, see below.
135
136 // The functions 'insert' and 'delete', and all supporting functions reachable
137 // from them, are derived from a public domain implementation by Georg Kraml.
138 // Unfortunately the relevant web site is long gone, and can only be found on
139 // the Wayback Machine.
140 //
141 // https://web.archive.org/web/20010419134337/
142 // http://www.kraml.at/georg/avltree/index.html
143 //
144 // https://web.archive.org/web/20030926063347/
145 // http://www.kraml.at/georg/avltree/avlmonolithic.c
146 //
147 // https://web.archive.org/web/20030401124003/http://www.kraml.at/src/howto/
148 //
149 // For relicensing clearance, see Mozilla bug 1620332, at
150 // https://bugzilla.mozilla.org/show_bug.cgi?id=1620332.
151
152 // Did a given insertion/deletion succeed, and what do we do next?
153 #[derive(Clone, Copy, PartialEq)]
154 enum AVLRes {
155 Error,
156 OK,
157 Balance,
158 }
159
160 impl<T: Clone + PartialOrd> AVLTree<T> {
161 // Private function: rotleft: perform counterclockwise rotation
162 // Takes the root of the tree to rotate, returns the new root
rotleft(&mut self, old_root: u32) -> u32163 fn rotleft(&mut self, old_root: u32) -> u32 {
164 let new_root = self.pool[old_root as usize].right;
165 self.pool[old_root as usize].right = self.pool[new_root as usize].left;
166 self.pool[new_root as usize].left = old_root;
167 new_root
168 }
169
170 // Private function: rotright: perform clockwise rotation
171 // Takes the root of the tree to rotate, returns the new root
rotright(&mut self, old_root: u32) -> u32172 fn rotright(&mut self, old_root: u32) -> u32 {
173 let new_root = self.pool[old_root as usize].left;
174 self.pool[old_root as usize].left = self.pool[new_root as usize].right;
175 self.pool[new_root as usize].right = old_root;
176 new_root
177 }
178
179 // Private function: leftgrown: helper function for `insert`
180 //
181 // Parameters:
182 //
183 // root Root of a tree. This node's left
184 // subtree has just grown due to item insertion; its
185 // "tag" flag needs adjustment, and the local tree
186 // (the subtree of which this node is the root node) may
187 // have become unbalanced.
188 //
189 // Return values:
190 //
191 // The new root of the subtree, plus either:
192 //
193 // OK The local tree could be rebalanced or was balanced
194 // from the start. The parent activations of the avlinsert
195 // activation that called this function may assume the
196 // entire tree is valid.
197 // or
198 // BALANCE The local tree was balanced, but has grown in height.
199 // Do not assume the entire tree is valid.
200 //
201 // This function has been split into two pieces: `leftgrown`, which is small and hot, and is
202 // marked always-inline, and `leftgrown_left`, which handles a more complex and less
203 // frequent case, and is marked never-inline. The intent is to have the common case always
204 // inlined without having to deal with the extra register pressure from inlining the less
205 // frequent code. The dual function `rightgrown` is split similarly.
206 #[inline(never)]
leftgrown_left(&mut self, mut root: u32) -> (u32, AVLRes)207 fn leftgrown_left(&mut self, mut root: u32) -> (u32, AVLRes) {
208 if self.pool[self.pool[root as usize].left as usize].tag == AVLTag::Left {
209 self.pool[root as usize].tag = AVLTag::None;
210 let t = self.pool[root as usize].left;
211 self.pool[t as usize].tag = AVLTag::None;
212 root = self.rotright(root);
213 } else {
214 match self.pool[self.pool[self.pool[root as usize].left as usize].right as usize].tag {
215 AVLTag::Left => {
216 self.pool[root as usize].tag = AVLTag::Right;
217 let t = self.pool[root as usize].left;
218 self.pool[t as usize].tag = AVLTag::None;
219 }
220 AVLTag::Right => {
221 self.pool[root as usize].tag = AVLTag::None;
222 let t = self.pool[root as usize].left;
223 self.pool[t as usize].tag = AVLTag::Left;
224 }
225 AVLTag::None => {
226 self.pool[root as usize].tag = AVLTag::None;
227 let t = self.pool[root as usize].left;
228 self.pool[t as usize].tag = AVLTag::None;
229 }
230 AVLTag::Free => panic!("AVLTree::leftgrown_left: unallocated node in tree"),
231 }
232 let t = self.pool[self.pool[root as usize].left as usize].right;
233 self.pool[t as usize].tag = AVLTag::None;
234 self.pool[root as usize].left = self.rotleft(self.pool[root as usize].left);
235 root = self.rotright(root);
236 }
237 return (root, AVLRes::OK);
238 }
239
240 #[inline(always)]
leftgrown(&mut self, root: u32) -> (u32, AVLRes)241 fn leftgrown(&mut self, root: u32) -> (u32, AVLRes) {
242 let root_node = &mut self.pool[root as usize];
243 match root_node.tag {
244 AVLTag::Left => self.leftgrown_left(root),
245 AVLTag::Right => {
246 root_node.tag = AVLTag::None;
247 return (root, AVLRes::OK);
248 }
249 AVLTag::None => {
250 root_node.tag = AVLTag::Left;
251 return (root, AVLRes::Balance);
252 }
253 AVLTag::Free => panic!("AVLTree::leftgrown: unallocated node in tree"),
254 }
255 }
256
257 // Private function: rightgrown: helper function for `insert`
258 //
259 // See leftgrown for details.
260 #[inline(never)]
rightgrown_right(&mut self, mut root: u32) -> (u32, AVLRes)261 fn rightgrown_right(&mut self, mut root: u32) -> (u32, AVLRes) {
262 if self.pool[self.pool[root as usize].right as usize].tag == AVLTag::Right {
263 self.pool[root as usize].tag = AVLTag::None;
264 let t = self.pool[root as usize].right as usize;
265 self.pool[t].tag = AVLTag::None;
266 root = self.rotleft(root);
267 } else {
268 match self.pool[self.pool[self.pool[root as usize].right as usize].left as usize].tag {
269 AVLTag::Right => {
270 self.pool[root as usize].tag = AVLTag::Left;
271 let t = self.pool[root as usize].right;
272 self.pool[t as usize].tag = AVLTag::None;
273 }
274 AVLTag::Left => {
275 self.pool[root as usize].tag = AVLTag::None;
276 let t = self.pool[root as usize].right;
277 self.pool[t as usize].tag = AVLTag::Right;
278 }
279 AVLTag::None => {
280 self.pool[root as usize].tag = AVLTag::None;
281 let t = self.pool[root as usize].right;
282 self.pool[t as usize].tag = AVLTag::None;
283 }
284 AVLTag::Free => panic!("AVLTree::rightgrown_right: unallocated node in tree"),
285 }
286 let t = self.pool[self.pool[root as usize].right as usize].left;
287 self.pool[t as usize].tag = AVLTag::None;
288 self.pool[root as usize].right = self.rotright(self.pool[root as usize].right);
289 root = self.rotleft(root);
290 }
291 return (root, AVLRes::OK);
292 }
293
294 #[inline(always)]
rightgrown(&mut self, root: u32) -> (u32, AVLRes)295 fn rightgrown(&mut self, root: u32) -> (u32, AVLRes) {
296 match self.pool[root as usize].tag {
297 AVLTag::Left => {
298 self.pool[root as usize].tag = AVLTag::None;
299 return (root, AVLRes::OK);
300 }
301 AVLTag::Right => self.rightgrown_right(root),
302 AVLTag::None => {
303 self.pool[root as usize].tag = AVLTag::Right;
304 return (root, AVLRes::Balance);
305 }
306 AVLTag::Free => panic!("AVLTree::rightgrown: unallocated node in tree"),
307 }
308 }
309
310 // Private function: insert_wrk: insert a node into the AVL tree
311 // (worker function)
312 //
313 // Parameters:
314 //
315 // root Root of the tree in whch to insert `d`.
316 //
317 // item Item to be inserted.
318 //
319 // Returns AVL_NULL if the value is already in the tree. Otherwise returns the index of the
320 // new root (which is obviously always non-AVL_NULL). This is infallible in the sense that,
321 // if allocation of a new node fails, it won't return -- `self.alloc()` will panic.
322 //
323 // This function relies on the fact that any non-AVL_NULL value will have its top bit (bit
324 // 31) clear, since that bit is used as a boolean in the `stack`. That property is
325 // guaranteed us by `fn alloc`, which ensures that the max number of nodes in the tree is
326 // 0x70000000.
insert_wrk<F>(&mut self, mut root: u32, item: T, mb_cmp: Option<&F>) -> u32 where F: Fn(T, T) -> Option<Ordering>,327 fn insert_wrk<F>(&mut self, mut root: u32, item: T, mb_cmp: Option<&F>) -> u32
328 where
329 F: Fn(T, T) -> Option<Ordering>,
330 {
331 #[inline(always)]
332 fn stack_entry_set_is_left(node: u32) -> u32 {
333 node | 0x8000_0000
334 }
335 #[inline(always)]
336 fn stack_entry_get_is_left(ent: u32) -> u32 {
337 ent & 0x8000_0000
338 }
339 #[inline(always)]
340 fn stack_entry_get_node(ent: u32) -> u32 {
341 ent & 0x7FFF_FFFF
342 }
343
344 // The stack will hold a root-leaf path. Give that the max number of elements allowed
345 // in the tree is 0x70000000, which is (7/8) * 2^31, and that the max depth is 1.44 *
346 // log2(elems), a 64 entry stack should always be sufficient. Hence there should never
347 // be any dynamic allocation here. In fact a 48 entry stack would also suffice, but
348 // SmallVec doesn't provide that size.
349 let mut stack = SmallVec::<[u32; 64]>::new();
350
351 // In the first phase, walk down the tree to find the place where the new node should be
352 // inserted. This loop is cloned so as to allow the test on `mb_cmp` to be done just
353 // once.
354 match mb_cmp {
355 None => {
356 while root != AVL_NULL {
357 let cmp_loc_right = &self.pool[root as usize];
358 let cmp_arg_right: T = cmp_loc_right.item.clone();
359 let cmp_arg_left: T = item.clone();
360 debug_assert!(stack_entry_get_is_left(root) == 0);
361 match cmp_arg_left.partial_cmp(&cmp_arg_right) {
362 None => panic!("AVLTree::insert_wrk: unordered elements(1)"),
363 Some(Ordering::Less) => {
364 stack.push(stack_entry_set_is_left(root));
365 root = cmp_loc_right.left;
366 }
367 Some(Ordering::Greater) => {
368 stack.push(root);
369 root = cmp_loc_right.right;
370 }
371 Some(Ordering::Equal) => {
372 // Item is already in the tree.
373 return AVL_NULL;
374 }
375 }
376 }
377 }
378 Some(cmp) => {
379 while root != AVL_NULL {
380 let cmp_loc_right = &self.pool[root as usize];
381 let cmp_arg_right: T = cmp_loc_right.item.clone();
382 let cmp_arg_left: T = item.clone();
383 debug_assert!(stack_entry_get_is_left(root) == 0);
384 match cmp(cmp_arg_left, cmp_arg_right) {
385 None => panic!("AVLTree::insert_wrk: unordered elements(2)"),
386 Some(Ordering::Less) => {
387 stack.push(stack_entry_set_is_left(root));
388 root = cmp_loc_right.left;
389 }
390 Some(Ordering::Greater) => {
391 stack.push(root);
392 root = cmp_loc_right.right;
393 }
394 Some(Ordering::Equal) => {
395 // Item is already in the tree.
396 return AVL_NULL;
397 }
398 }
399 }
400 }
401 }
402
403 // Now allocate the new node.
404 debug_assert!(root == AVL_NULL);
405 let new_node = self.alloc();
406 self.pool[new_node as usize] = AVLNode::new(AVLTag::None, AVL_NULL, AVL_NULL, item.clone());
407
408 // And unwind the stack, back to the root, rebalancing as we go. Once get to a place
409 // where the new subtree doesn't need to be rebalanced, we can stop this upward scan,
410 // because no nodes above it will need to be rebalanced either.
411 let mut curr_node = new_node;
412 let mut curr_node_action = AVLRes::Balance;
413
414 while let Some(parent_node_tagged) = stack.pop() {
415 let parent_node = stack_entry_get_node(parent_node_tagged);
416 if stack_entry_get_is_left(parent_node_tagged) != 0 {
417 self.pool[parent_node as usize].left = curr_node;
418 if curr_node_action == AVLRes::Balance {
419 let pair = self.leftgrown(parent_node);
420 curr_node = pair.0;
421 curr_node_action = pair.1;
422 } else {
423 curr_node = parent_node;
424 break;
425 }
426 } else {
427 self.pool[parent_node as usize].right = curr_node;
428 if curr_node_action == AVLRes::Balance {
429 let pair = self.rightgrown(parent_node);
430 curr_node = pair.0;
431 curr_node_action = pair.1;
432 } else {
433 curr_node = parent_node;
434 break;
435 }
436 }
437 }
438
439 if !stack.is_empty() {
440 curr_node = stack_entry_get_node(stack[0]);
441 }
442
443 debug_assert!(curr_node != AVL_NULL);
444 return curr_node;
445 }
446
447 // Private function: leftshrunk: helper function for delete and
448 // findlowest
449 //
450 // Parameters:
451 //
452 // n Address of a pointer to a node. The node's left
453 // subtree has just shrunk due to item removal; its
454 // "skew" flag needs adjustment, and the local tree
455 // (the subtree of which this node is the root node) may
456 // have become unbalanced.
457 //
458 // Return values:
459 //
460 // OK The parent activation of the delete activation
461 // that called this function may assume the entire
462 // tree is valid.
463 //
464 // BALANCE Do not assume the entire tree is valid.
leftshrunk(&mut self, mut n: u32) -> (u32, AVLRes)465 fn leftshrunk(&mut self, mut n: u32) -> (u32, AVLRes) {
466 match self.pool[n as usize].tag {
467 AVLTag::Left => {
468 self.pool[n as usize].tag = AVLTag::None;
469 return (n, AVLRes::Balance);
470 }
471 AVLTag::Right => {
472 if self.pool[self.pool[n as usize].right as usize].tag == AVLTag::Right {
473 self.pool[n as usize].tag = AVLTag::None;
474 let t = self.pool[n as usize].right;
475 self.pool[t as usize].tag = AVLTag::None;
476 n = self.rotleft(n);
477 return (n, AVLRes::Balance);
478 } else if self.pool[self.pool[n as usize].right as usize].tag == AVLTag::None {
479 self.pool[n as usize].tag = AVLTag::Right;
480 let t = self.pool[n as usize].right;
481 self.pool[t as usize].tag = AVLTag::Left;
482 n = self.rotleft(n);
483 return (n, AVLRes::OK);
484 } else {
485 match self.pool[self.pool[self.pool[n as usize].right as usize].left as usize]
486 .tag
487 {
488 AVLTag::Left => {
489 self.pool[n as usize].tag = AVLTag::None;
490 let t = self.pool[n as usize].right;
491 self.pool[t as usize].tag = AVLTag::Right;
492 }
493 AVLTag::Right => {
494 self.pool[n as usize].tag = AVLTag::Left;
495 let t = self.pool[n as usize].right;
496 self.pool[t as usize].tag = AVLTag::None;
497 }
498 AVLTag::None => {
499 self.pool[n as usize].tag = AVLTag::None;
500 let t = self.pool[n as usize].right;
501 self.pool[t as usize].tag = AVLTag::None;
502 }
503 AVLTag::Free => {
504 panic!("AVLTree::leftshrunk(1): unallocated node in tree");
505 }
506 }
507 {
508 let t = self.pool[self.pool[n as usize].right as usize].left;
509 self.pool[t as usize].tag = AVLTag::None;
510 }
511 {
512 let t = self.rotright(self.pool[n as usize].right);
513 self.pool[n as usize].right = t;
514 }
515 n = self.rotleft(n);
516 return (n, AVLRes::Balance);
517 }
518 }
519 AVLTag::None => {
520 self.pool[n as usize].tag = AVLTag::Right;
521 return (n, AVLRes::OK);
522 }
523 AVLTag::Free => {
524 panic!("AVLTree::leftshrunk(2): unallocated node in tree");
525 }
526 }
527 }
528
529 // Private function: rightshrunk: helper function for delete and
530 // findhighest
531 //
532 // See leftshrunk for details.
rightshrunk(&mut self, mut n: u32) -> (u32, AVLRes)533 fn rightshrunk(&mut self, mut n: u32) -> (u32, AVLRes) {
534 match self.pool[n as usize].tag {
535 AVLTag::Right => {
536 self.pool[n as usize].tag = AVLTag::None;
537 return (n, AVLRes::Balance);
538 }
539 AVLTag::Left => {
540 if self.pool[self.pool[n as usize].left as usize].tag == AVLTag::Left {
541 self.pool[n as usize].tag = AVLTag::None;
542 let t = self.pool[n as usize].left;
543 self.pool[t as usize].tag = AVLTag::None;
544 n = self.rotright(n);
545 return (n, AVLRes::Balance);
546 } else if self.pool[self.pool[n as usize].left as usize].tag == AVLTag::None {
547 self.pool[n as usize].tag = AVLTag::Left;
548 let t = self.pool[n as usize].left;
549 self.pool[t as usize].tag = AVLTag::Right;
550 n = self.rotright(n);
551 return (n, AVLRes::OK);
552 } else {
553 match self.pool[self.pool[self.pool[n as usize].left as usize].right as usize]
554 .tag
555 {
556 AVLTag::Left => {
557 self.pool[n as usize].tag = AVLTag::Right;
558 let t = self.pool[n as usize].left;
559 self.pool[t as usize].tag = AVLTag::None;
560 }
561 AVLTag::Right => {
562 self.pool[n as usize].tag = AVLTag::None;
563 let t = self.pool[n as usize].left;
564 self.pool[t as usize].tag = AVLTag::Left;
565 }
566 AVLTag::None => {
567 self.pool[n as usize].tag = AVLTag::None;
568 let t = self.pool[n as usize].left;
569 self.pool[t as usize].tag = AVLTag::None;
570 }
571 AVLTag::Free => {
572 panic!("AVLTree::rightshrunk(1): unallocated node in tree");
573 }
574 }
575 {
576 let t = self.pool[self.pool[n as usize].left as usize].right;
577 self.pool[t as usize].tag = AVLTag::None;
578 }
579 {
580 let t = self.rotleft(self.pool[n as usize].left);
581 self.pool[n as usize].left = t;
582 }
583 n = self.rotright(n);
584 return (n, AVLRes::Balance);
585 }
586 }
587 AVLTag::None => {
588 self.pool[n as usize].tag = AVLTag::Left;
589 return (n, AVLRes::OK);
590 }
591 AVLTag::Free => {
592 panic!("AVLTree::rightshrunk(2): unallocated node in tree");
593 }
594 }
595 }
596
597 // Private function: findhighest: replace a node with a subtree's
598 // highest-ranking item.
599 //
600 // Parameters:
601 //
602 // target Pointer to node to be replaced.
603 //
604 // n Address of pointer to subtree.
605 //
606 // res Pointer to variable used to tell the caller whether
607 // further checks are necessary; analog to the return
608 // values of leftgrown and leftshrunk (see there).
609 //
610 // Return values:
611 //
612 // True A node was found; the target node has been replaced.
613 //
614 // False The target node could not be replaced because
615 // the subtree provided was empty.
616 //
findhighest(&mut self, target: u32, mut n: u32) -> Option<(u32, AVLRes)>617 fn findhighest(&mut self, target: u32, mut n: u32) -> Option<(u32, AVLRes)> {
618 if n == AVL_NULL {
619 return None;
620 }
621 let mut res = AVLRes::Balance;
622 if self.pool[n as usize].right != AVL_NULL {
623 let rec = self.findhighest(target, self.pool[n as usize].right);
624 if let Some((new_n_right, new_res)) = rec {
625 self.pool[n as usize].right = new_n_right;
626 res = new_res;
627 if res == AVLRes::Balance {
628 let (new_n, new_res) = self.rightshrunk(n);
629 n = new_n;
630 res = new_res;
631 }
632 return Some((n, res));
633 } else {
634 return None;
635 }
636 }
637 self.pool[target as usize].item = self.pool[n as usize].item.clone();
638 let tmp = n;
639 n = self.pool[n as usize].left;
640 self.free(tmp);
641 Some((n, res))
642 }
643
644 // Private function: findlowest: replace node with a subtree's
645 // lowest-ranking item.
646 //
647 // See findhighest for the details.
findlowest(&mut self, target: u32, mut n: u32) -> Option<(u32, AVLRes)>648 fn findlowest(&mut self, target: u32, mut n: u32) -> Option<(u32, AVLRes)> {
649 if n == AVL_NULL {
650 return None;
651 }
652 let mut res = AVLRes::Balance;
653 if self.pool[n as usize].left != AVL_NULL {
654 let rec = self.findlowest(target, self.pool[n as usize].left);
655 if let Some((new_n_left, new_res)) = rec {
656 self.pool[n as usize].left = new_n_left;
657 res = new_res;
658 if res == AVLRes::Balance {
659 let (new_n, new_res) = self.leftshrunk(n);
660 n = new_n;
661 res = new_res;
662 }
663 return Some((n, res));
664 } else {
665 return None;
666 }
667 }
668 self.pool[target as usize].item = self.pool[n as usize].item.clone();
669 let tmp = n;
670 n = self.pool[n as usize].right;
671 self.free(tmp);
672 Some((n, res))
673 }
674
675 // Private function: delete_wrk: delete an item from the tree.
676 // (worker function)
677 //
678 // Parameters:
679 //
680 // n Address of a pointer to a node.
681 //
682 // key AVLKEY of item to be removed.
683 //
684 // Return values:
685 //
686 // nonzero The item has been removed. The exact value of
687 // nonzero yields if of no concern to user code; when
688 // delete recursively calls itself, the number
689 // returned tells the parent activation if the AVL tree
690 // may have become unbalanced; specifically:
691 //
692 // OK None of the subtrees of the node that n points to
693 // has shrunk, the AVL tree is valid.
694 //
695 // BALANCE One of the subtrees of the node that n points to
696 // has shrunk, the node's "skew" flag needs adjustment,
697 // and the AVL tree may have become unbalanced.
698 //
699 // zero The tree does not contain an item yielding the
700 // AVLKEY value provided by the caller.
delete_wrk<F>(&mut self, mut root: u32, item: T, mb_cmp: Option<&F>) -> (u32, AVLRes) where F: Fn(T, T) -> Option<Ordering>,701 fn delete_wrk<F>(&mut self, mut root: u32, item: T, mb_cmp: Option<&F>) -> (u32, AVLRes)
702 where
703 F: Fn(T, T) -> Option<Ordering>,
704 {
705 let mut tmp = AVLRes::Balance;
706 if root == AVL_NULL {
707 return (root, AVLRes::Error);
708 }
709
710 let cmp_arg_left: T = item.clone();
711 let cmp_arg_right: T = self.pool[root as usize].item.clone();
712 let cmp_res = match mb_cmp {
713 None => cmp_arg_left.partial_cmp(&cmp_arg_right),
714 Some(cmp) => cmp(cmp_arg_left, cmp_arg_right),
715 };
716 match cmp_res {
717 None => panic!("AVLTree::delete_wrk: unordered elements"),
718 Some(Ordering::Less) => {
719 let root_left = self.pool[root as usize].left;
720 let (new_root_left, new_tmp) = self.delete_wrk(root_left, item, mb_cmp);
721 self.pool[root as usize].left = new_root_left;
722 tmp = new_tmp;
723 if tmp == AVLRes::Balance {
724 let (new_root, new_res) = self.leftshrunk(root);
725 root = new_root;
726 tmp = new_res;
727 }
728 return (root, tmp);
729 }
730 Some(Ordering::Greater) => {
731 let root_right = self.pool[root as usize].right;
732 let (new_root_right, new_tmp) = self.delete_wrk(root_right, item, mb_cmp);
733 self.pool[root as usize].right = new_root_right;
734 tmp = new_tmp;
735 if tmp == AVLRes::Balance {
736 let (new_root, new_res) = self.rightshrunk(root);
737 root = new_root;
738 tmp = new_res;
739 }
740 return (root, tmp);
741 }
742 Some(Ordering::Equal) => {
743 if self.pool[root as usize].left != AVL_NULL {
744 let root_left = self.pool[root as usize].left;
745 if let Some((new_root_left, new_tmp)) = self.findhighest(root, root_left) {
746 self.pool[root as usize].left = new_root_left;
747 tmp = new_tmp;
748 if new_tmp == AVLRes::Balance {
749 let (new_root, new_res) = self.leftshrunk(root);
750 root = new_root;
751 tmp = new_res;
752 }
753 }
754 return (root, tmp);
755 }
756 if self.pool[root as usize].right != AVL_NULL {
757 let root_right = self.pool[root as usize].right;
758 if let Some((new_root_right, new_tmp)) = self.findlowest(root, root_right) {
759 self.pool[root as usize].right = new_root_right;
760 tmp = new_tmp;
761 if new_tmp == AVLRes::Balance {
762 let (new_root, new_res) = self.rightshrunk(root);
763 root = new_root;
764 tmp = new_res;
765 }
766 }
767 return (root, tmp);
768 }
769 self.free(root);
770 root = AVL_NULL;
771 return (root, AVLRes::Balance);
772 }
773 }
774 }
775
776 // Private fn: count the number of items in the tree. Warning: costs O(N) !
777 #[cfg(test)]
count_wrk(&self, n: u32) -> usize778 fn count_wrk(&self, n: u32) -> usize {
779 if n == AVL_NULL {
780 return 0;
781 }
782 1 + self.count_wrk(self.pool[n as usize].left) + self.count_wrk(self.pool[n as usize].right)
783 }
784
785 // Private fn: find the max depth of the tree. Warning: costs O(N) !
786 #[cfg(test)]
depth_wrk(&self, n: u32) -> usize787 fn depth_wrk(&self, n: u32) -> usize {
788 if n == AVL_NULL {
789 return 0;
790 }
791 let d_left = self.depth_wrk(self.pool[n as usize].left);
792 let d_right = self.depth_wrk(self.pool[n as usize].right);
793 1 + if d_left > d_right { d_left } else { d_right }
794 }
795 }
796
797 // Machinery for iterating over the tree, enumerating nodes in ascending order.
798 // Unfortunately AVLTreeIter has to be public.
799 pub struct AVLTreeIter<'t, 's, T> {
800 tree: &'t AVLTree<T>,
801 stack: &'s mut Vec<u32>,
802 }
803
804 impl<'t, 's, T> AVLTreeIter<'t, 's, T> {
805 #[allow(dead_code)]
new(tree: &'t AVLTree<T>, stack: &'s mut Vec<u32>) -> Self806 fn new(tree: &'t AVLTree<T>, stack: &'s mut Vec<u32>) -> Self {
807 let mut iter = AVLTreeIter { tree, stack };
808 if tree.root != AVL_NULL {
809 iter.stack.push(tree.root);
810 iter.visit_left_children(tree.root);
811 }
812 iter
813 }
814
visit_left_children(&mut self, root: u32)815 fn visit_left_children(&mut self, root: u32) {
816 let mut cur = root;
817 loop {
818 let left = self.tree.pool[cur as usize].left;
819 if left == AVL_NULL {
820 break;
821 }
822 self.stack.push(left);
823 cur = left;
824 }
825 }
826 }
827
828 impl<'s, 't, T: Copy> Iterator for AVLTreeIter<'s, 't, T> {
829 type Item = T;
next(&mut self) -> Option<Self::Item>830 fn next(&mut self) -> Option<Self::Item> {
831 let ret = match self.stack.pop() {
832 Some(ret) => ret,
833 None => return None,
834 };
835 let right = self.tree.pool[ret as usize].right;
836 if right != AVL_NULL {
837 self.stack.push(right);
838 self.visit_left_children(right);
839 }
840 Some(self.tree.pool[ret as usize].item)
841 }
842 }
843
844 //=============================================================================
845 // Public interface for AVLTree
846
847 impl<T: Clone + PartialOrd> AVLTree<T> {
848 // The core functions (insert, delete, contains) take a comparator argument
849 //
850 // mb_cmp: Option<&F>
851 // where
852 // F: Fn(T, T) -> Option<Ordering>
853 //
854 // which allows control over how node comparison is done. If this is None,
855 // then comparison is done directly using PartialOrd for the T values.
856 //
857 // If this is Some(cmp), then comparison is done by passing the two T values
858 // to `cmp`. In this case, the routines will complain (panic) if `cmp`
859 // indicates that its arguments are unordered.
860
861 // Insert a value in the tree. Returns true if an insert happened, false if
862 // the item was already present.
insert<F>(&mut self, item: T, mb_cmp: Option<&F>) -> bool where F: Fn(T, T) -> Option<Ordering>,863 pub fn insert<F>(&mut self, item: T, mb_cmp: Option<&F>) -> bool
864 where
865 F: Fn(T, T) -> Option<Ordering>,
866 {
867 let new_root = self.insert_wrk(self.root, item, mb_cmp);
868 if new_root == AVL_NULL {
869 return false; // already in tree
870 } else {
871 self.root = new_root;
872 return true;
873 }
874 }
875
876 // Remove an item from the tree. Returns a bool which indicates whether the
877 // value was in there in the first place. (meaning, true == a removal
878 // actually occurred).
delete<F>(&mut self, item: T, mb_cmp: Option<&F>) -> bool where F: Fn(T, T) -> Option<Ordering>,879 pub fn delete<F>(&mut self, item: T, mb_cmp: Option<&F>) -> bool
880 where
881 F: Fn(T, T) -> Option<Ordering>,
882 {
883 let (new_root, res) = self.delete_wrk(self.root, item, mb_cmp);
884 if res == AVLRes::Error {
885 return false;
886 } else {
887 self.root = new_root;
888 return true;
889 }
890 }
891
892 // Find `item` in the tree, and replace it with `replacement`. `item` and `replacement`
893 // must compare equal per the comparison function `cmp`. Returns a bool indicating whether
894 // `item` was found (and hence, replaced). There's no comparison fast-path here
895 // (meaning, `cmp` is `&F` and not `Option<&F>`) only because so far there is no use case
896 // for it.
find_and_replace<F>(&mut self, item: T, replacement: T, cmp: &F) -> bool where F: Fn(T, T) -> Option<Ordering>,897 pub fn find_and_replace<F>(&mut self, item: T, replacement: T, cmp: &F) -> bool
898 where
899 F: Fn(T, T) -> Option<Ordering>,
900 {
901 let mut n = self.root;
902 loop {
903 if n == AVL_NULL {
904 return false;
905 }
906 let cmp_arg_left: T = item.clone();
907 let cmp_arg_right: T = self.pool[n as usize].item.clone();
908 match cmp(cmp_arg_left, cmp_arg_right) {
909 Some(Ordering::Less) => {
910 n = self.pool[n as usize].left;
911 }
912 Some(Ordering::Greater) => {
913 n = self.pool[n as usize].right;
914 }
915 Some(Ordering::Equal) => {
916 // Do what we can to ensure the caller can't mess up the total ordering in
917 // the tree. This is more restrictive than it needs to be, but loosening
918 // it requires finding the largest item below `item` and the smallest one
919 // above it, which is expensive.
920 assert!(cmp(item, replacement.clone()) == Some(Ordering::Equal));
921 self.pool[n as usize].item = replacement.clone();
922 return true;
923 }
924 None => {
925 panic!("AVLTree::find_and_replace: unordered elements in search!");
926 }
927 }
928 }
929 }
930
931 // Determine whether an item is in the tree.
932 // sewardj 2020Mar31: this is not used; I assume all users of the trees
933 // do their own custom traversals. Remove #[cfg(test)] if any real uses
934 // appear.
935 #[cfg(test)]
contains<F>(&self, item: T, mb_cmp: Option<&F>) -> bool where F: Fn(T, T) -> Option<Ordering>,936 pub fn contains<F>(&self, item: T, mb_cmp: Option<&F>) -> bool
937 where
938 F: Fn(T, T) -> Option<Ordering>,
939 {
940 let mut n = self.root;
941 // Lookup needs to be really fast, so have two versions of the loop, one
942 // for direct comparison, one for indirect.
943 match mb_cmp {
944 None => {
945 // Do comparisons directly on the items.
946 loop {
947 if n == AVL_NULL {
948 return false;
949 }
950 let cmp_arg_left: T = item.clone();
951 let cmp_arg_right: T = self.pool[n as usize].item.clone();
952 match cmp_arg_left.partial_cmp(&cmp_arg_right) {
953 Some(Ordering::Less) => {
954 n = self.pool[n as usize].left;
955 }
956 Some(Ordering::Greater) => {
957 n = self.pool[n as usize].right;
958 }
959 Some(Ordering::Equal) => {
960 return true;
961 }
962 None => {
963 panic!("AVLTree::contains(1): unordered elements in search!");
964 }
965 }
966 }
967 }
968 Some(cmp) => {
969 // Do comparisons by handing off to the supplied function.
970 loop {
971 if n == AVL_NULL {
972 return false;
973 }
974 let cmp_arg_left: T = item.clone();
975 let cmp_arg_right: T = self.pool[n as usize].item.clone();
976 match cmp(cmp_arg_left, cmp_arg_right) {
977 Some(Ordering::Less) => {
978 n = self.pool[n as usize].left;
979 }
980 Some(Ordering::Greater) => {
981 n = self.pool[n as usize].right;
982 }
983 Some(Ordering::Equal) => {
984 return true;
985 }
986 None => {
987 panic!("AVLTree::contains(2): unordered elements in search!");
988 }
989 }
990 }
991 }
992 }
993 }
994
995 // Count the number of items in the tree. Warning: costs O(N) !
996 #[cfg(test)]
count(&self) -> usize997 fn count(&self) -> usize {
998 self.count_wrk(self.root)
999 }
1000
1001 // Private fn: find the max depth of the tree. Warning: costs O(N) !
1002 #[cfg(test)]
depth(&self) -> usize1003 fn depth(&self) -> usize {
1004 self.depth_wrk(self.root)
1005 }
1006
to_vec(&self) -> Vec<T>1007 pub fn to_vec(&self) -> Vec<T> {
1008 // BEGIN helper fn
1009 fn walk<U: Clone>(res: &mut Vec<U>, root: u32, pool: &Vec<AVLNode<U>>) {
1010 let root_left = pool[root as usize].left;
1011 if root_left != AVL_NULL {
1012 walk(res, root_left, pool);
1013 }
1014 res.push(pool[root as usize].item.clone());
1015 let root_right = pool[root as usize].right;
1016 if root_right != AVL_NULL {
1017 walk(res, root_right, pool);
1018 }
1019 }
1020 // END helper fn
1021
1022 let mut res = Vec::<T>::new();
1023 if self.root != AVL_NULL {
1024 walk(&mut res, self.root, &self.pool);
1025 }
1026 res
1027 }
1028
1029 #[allow(dead_code)]
iter<'t, 's>(&'t self, storage: &'s mut Vec<u32>) -> AVLTreeIter<'t, 's, T>1030 pub fn iter<'t, 's>(&'t self, storage: &'s mut Vec<u32>) -> AVLTreeIter<'t, 's, T> {
1031 storage.clear();
1032 AVLTreeIter::new(self, storage)
1033 }
1034
1035 // Show the tree. (For debugging only.)
1036 //pub fn show(&self, depth: i32, node: u32) {
1037 // if node != AVL_NULL {
1038 // self.show(depth + 1, self.pool[node as usize].left);
1039 // for _ in 0..depth {
1040 // print!(" ");
1041 // }
1042 // println!("{}", ToFromU32::to_u32(self.pool[node as usize].item));
1043 // self.show(depth + 1, self.pool[node as usize].right);
1044 // }
1045 //}
1046 }
1047
1048 //=============================================================================
1049 // Testing machinery for AVLTree
1050
1051 #[cfg(test)]
1052 mod avl_tree_test_utils {
1053 use crate::data_structures::Set;
1054 use std::cmp::Ordering;
1055
1056 // Perform various checks on the tree, and assert if it is not OK.
check_tree( tree: &super::AVLTree<u32>, should_be_in_tree: &Set<u32>, univ_min: u32, univ_max: u32, )1057 pub fn check_tree(
1058 tree: &super::AVLTree<u32>,
1059 should_be_in_tree: &Set<u32>,
1060 univ_min: u32,
1061 univ_max: u32,
1062 ) {
1063 // Same cardinality
1064 let n_in_set = should_be_in_tree.card();
1065 let n_in_tree = tree.count();
1066 assert!(n_in_set == n_in_tree);
1067
1068 // Tree is not wildly out of balance. Depth should not exceed 1.44 *
1069 // log2(size).
1070 let tree_depth = tree.depth();
1071 let mut log2_size = 0;
1072 {
1073 let mut n: usize = n_in_tree;
1074 while n > 0 {
1075 n = n >> 1;
1076 log2_size += 1;
1077 }
1078 }
1079 // Actually a tighter limit than stated above. For these test cases, the
1080 // tree is either perfectly balanced or within one level of being so
1081 // (hence the +1).
1082 assert!(tree_depth <= log2_size + 1);
1083
1084 // Check that everything that should be in the tree is in it, and vice
1085 // versa.
1086 for i in univ_min..univ_max {
1087 let should_be_in = should_be_in_tree.contains(i);
1088
1089 // Look it up with a null comparator (so `contains` compares
1090 // directly)
1091 let is_in = tree.contains::<fn(u32, u32) -> Option<Ordering>>(i, None);
1092 assert!(is_in == should_be_in);
1093
1094 // We should get the same result with a custom comparator that does the
1095 // same as the null comparator.
1096 let is_in_w_cmp = tree.contains(
1097 i,
1098 Some(&(|x_left: u32, x_right: u32| x_left.partial_cmp(&x_right))),
1099 );
1100 assert!(is_in_w_cmp == is_in);
1101
1102 // And even when the comparator is actually a closure
1103 let forty_two: u32 = 52;
1104 let is_in_w_cmp_closure = tree.contains(
1105 i,
1106 Some(
1107 &(|x_left: u32, x_right: u32| {
1108 (x_left + forty_two).partial_cmp(&(x_right + forty_two))
1109 }),
1110 ),
1111 );
1112 assert!(is_in_w_cmp_closure == is_in);
1113 }
1114
1115 // We could even test that the tree items are in-order, but it hardly
1116 // seems worth the hassle, since the previous test would surely have
1117 // failed if that wasn't the case.
1118 }
1119 }
1120
1121 #[test]
test_avl_tree1()1122 fn test_avl_tree1() {
1123 use crate::data_structures::Set;
1124
1125 // Perform tests on an AVL tree. Use as values, every third number between
1126 // 5000 and 5999 inclusive. This is to ensure that there's no confusion
1127 // between element values and internal tree indices (although I think the
1128 // typechecker guarantees that anyway).
1129 //
1130 // Also carry along a Set<u32>, which keeps track of which values should be
1131 // in the tree at the current point.
1132 const UNIV_MIN: u32 = 5000;
1133 const UNIV_MAX: u32 = 5999;
1134 const UNIV_SIZE: u32 = UNIV_MAX - UNIV_MIN + 1;
1135
1136 let mut tree = AVLTree::<u32>::new(0);
1137 let mut should_be_in_tree = Set::<u32>::empty();
1138
1139 // Add numbers to the tree, checking as we go.
1140 for i in UNIV_MIN..UNIV_MAX {
1141 // Idiotic but simple
1142 if i % 3 != 0 {
1143 continue;
1144 }
1145 let was_added = tree.insert::<fn(u32, u32) -> Option<Ordering>>(i, None);
1146 should_be_in_tree.insert(i);
1147 assert!(was_added == true);
1148 avl_tree_test_utils::check_tree(&tree, &should_be_in_tree, UNIV_MIN, UNIV_MAX);
1149 }
1150
1151 // Then remove the middle half of the tree, also checking.
1152 for i in UNIV_MIN + UNIV_SIZE / 4..UNIV_MIN + 3 * (UNIV_SIZE / 4) {
1153 // Note that here, we're asking to delete a bunch of numbers that aren't
1154 // in the tree. It should remain valid throughout.
1155 let was_removed = tree.delete::<fn(u32, u32) -> Option<Ordering>>(i, None);
1156 let should_have_been_removed = should_be_in_tree.contains(i);
1157 assert!(was_removed == should_have_been_removed);
1158 should_be_in_tree.delete(i);
1159 avl_tree_test_utils::check_tree(&tree, &should_be_in_tree, UNIV_MIN, UNIV_MAX);
1160 }
1161
1162 // Now add some numbers which are already in the tree.
1163 for i in UNIV_MIN..UNIV_MIN + UNIV_SIZE / 4 {
1164 if i % 3 != 0 {
1165 continue;
1166 }
1167 let was_added = tree.insert::<fn(u32, u32) -> Option<Ordering>>(i, None);
1168 let should_have_been_added = !should_be_in_tree.contains(i);
1169 assert!(was_added == should_have_been_added);
1170 should_be_in_tree.insert(i);
1171 avl_tree_test_utils::check_tree(&tree, &should_be_in_tree, UNIV_MIN, UNIV_MAX);
1172 }
1173
1174 // Then remove all numbers from the tree, in reverse order.
1175 for ir in UNIV_MIN..UNIV_MAX {
1176 let i = UNIV_MIN + (UNIV_MAX - ir);
1177 let was_removed = tree.delete::<fn(u32, u32) -> Option<Ordering>>(i, None);
1178 let should_have_been_removed = should_be_in_tree.contains(i);
1179 assert!(was_removed == should_have_been_removed);
1180 should_be_in_tree.delete(i);
1181 avl_tree_test_utils::check_tree(&tree, &should_be_in_tree, UNIV_MIN, UNIV_MAX);
1182 }
1183
1184 // Now the tree should be empty.
1185 assert!(should_be_in_tree.is_empty());
1186 assert!(tree.count() == 0);
1187
1188 // Now delete some more stuff. Tree should still be empty :-)
1189 for i in UNIV_MIN + 10..UNIV_MIN + 100 {
1190 assert!(should_be_in_tree.is_empty());
1191 assert!(tree.count() == 0);
1192 tree.delete::<fn(u32, u32) -> Option<Ordering>>(i, None);
1193 avl_tree_test_utils::check_tree(&tree, &should_be_in_tree, UNIV_MIN, UNIV_MAX);
1194 }
1195
1196 // The tree root should be NULL.
1197 assert!(tree.root == AVL_NULL);
1198 assert!(tree.freelist != AVL_NULL);
1199
1200 // Check the freelist: all entries are of the expected form.
1201 for e in &tree.pool {
1202 assert!(e.tag == AVLTag::Free);
1203 assert!(e.left == AVL_NULL || (e.left as usize) < tree.pool.len());
1204 assert!(e.right == AVL_NULL);
1205 assert!(e.item == 0);
1206 }
1207
1208 // Check the freelist: it's non-circular, and contains the expected number
1209 // of elements.
1210 let mut n_in_freelist = 0;
1211 let mut cursor: u32 = tree.freelist;
1212 while cursor != AVL_NULL {
1213 assert!((cursor as usize) < tree.pool.len());
1214 n_in_freelist += 1;
1215 assert!(n_in_freelist < 100000 /*arbitrary*/); // else it has a cycle
1216 cursor = tree.pool[cursor as usize].left;
1217 }
1218 // If we get here, the freelist at least doesn't have a cycle.
1219
1220 // All elements in the pool are on the freelist.
1221 assert!(n_in_freelist == tree.pool.len());
1222 }
1223
1224 #[test]
test_avl_tree2()1225 fn test_avl_tree2() {
1226 use std::cmp::Ordering;
1227
1228 // Do some simple testing using a custom comparator, which inverts the order
1229 // of items in the tree, so as to check custom comparators work right.
1230 let mut tree = AVLTree::<u32>::new(0);
1231
1232 let nums = [31, 41, 59, 27, 14, 35, 62, 25, 18, 28, 45, 90, 61];
1233
1234 fn reverse_cmp(x: u32, y: u32) -> Option<Ordering> {
1235 y.partial_cmp(&x)
1236 }
1237
1238 // Insert
1239 for n in &nums {
1240 let insert_happened = tree.insert(*n, Some(&reverse_cmp));
1241 assert!(insert_happened == true);
1242 }
1243
1244 // Check membership
1245 for n in 0..100 {
1246 let is_in = tree.contains(n, Some(&reverse_cmp));
1247 let should_be_in = nums.iter().any(|m| n == *m);
1248 assert!(is_in == should_be_in);
1249 }
1250
1251 // Delete
1252 for n in 0..100 {
1253 let remove_happened = tree.delete(n, Some(&reverse_cmp));
1254 let remove_should_have_happened = nums.iter().any(|m| n == *m);
1255 assert!(remove_happened == remove_should_have_happened);
1256 }
1257
1258 // Final check
1259 assert!(tree.root == AVL_NULL);
1260 assert!(tree.count() == 0);
1261 }
1262
1263 #[test]
test_avl_tree_iter()1264 fn test_avl_tree_iter() {
1265 let mut storage = Vec::new();
1266 let tree = AVLTree::<u32>::new(0);
1267 assert!(tree.iter(&mut storage).next().is_none());
1268
1269 const FROM: u32 = 0;
1270 const TO: u32 = 10000;
1271
1272 let mut tree = AVLTree::<u32>::new(0);
1273 for i in FROM..TO {
1274 tree.insert(i, Some(&|a: u32, b: u32| a.partial_cmp(&b)));
1275 }
1276
1277 let as_vec = tree.to_vec();
1278 for (i, val) in tree.iter(&mut storage).enumerate() {
1279 assert_eq!(as_vec[i], val, "not equal for i={}", i);
1280 }
1281 }
1282