1 //! AVL trees with a private allocation pool.
2 //!
3 //! AVL tree internals are public, so that backtracking.rs can do custom
4 //! traversals of the tree as it wishes.
5
6 use std::cmp::Ordering;
7
8 //=============================================================================
9 // Data structures for AVLTree
10
11 #[derive(Clone, PartialEq)]
12 pub enum AVLTag {
13 Free, // This pool entry is not in use
14 None, // This pool entry is in use. Neither subtree is higher.
15 Left, // This pool entry is in use. The left subtree is higher.
16 Right, // This pool entry is in use. The right subtree is higher.
17 }
18
19 #[derive(Clone)]
20 pub struct AVLNode<T> {
21 pub tag: AVLTag,
22 pub left: u32,
23 pub right: u32,
24 pub item: T,
25 }
26 impl<T> AVLNode<T> {
new(tag: AVLTag, left: u32, right: u32, item: T) -> Self27 fn new(tag: AVLTag, left: u32, right: u32, item: T) -> Self {
28 Self {
29 tag,
30 left,
31 right,
32 item,
33 }
34 }
35 }
36
37 pub const AVL_NULL: u32 = 0xFFFF_FFFF;
38
39 pub struct AVLTree<T> {
40 // The storage area. There can be at most 2^32-2 entries, since AVL_NULL
41 // (== 2^32-1) is used to mean "the null pointer".
42 pub pool: Vec<AVLNode<T>>,
43 // A default value for the stored item. We don't care what this is;
44 // unfortunately Rust forces us to have one so that additions to the free
45 // list will be fully initialised.
46 default: T,
47 // The freelist head. This is a list of available entries. Each item on
48 // the freelist must have its .tag be AVLTag::Free, and will use its .left
49 // field as the link to the next freelist item. A freelist link value of
50 // AVL_NULL denotes the end of the list. If `freelist` itself is AVL_NULL
51 // then the list is empty.
52 freelist: u32,
53 // Last but not least, the root node.
54 pub root: u32,
55 }
56
57 //=============================================================================
58 // Storage management functions for AVLTree
59
60 impl<T: Clone> AVLTree<T> {
61 // Create a new tree and its associated storage pool. This requires knowing
62 // the default item value.
new(default: T) -> Self63 pub fn new(default: T) -> Self {
64 let pool = vec![];
65 let freelist = AVL_NULL;
66 let root = AVL_NULL;
67 Self {
68 pool,
69 default,
70 freelist,
71 root,
72 }
73 }
74
75 // Private function: free a tree node and put it back on the storage pool's
76 // freelist.
free(&mut self, index: u32)77 fn free(&mut self, index: u32) {
78 assert!(index != AVL_NULL);
79 assert!(self.pool[index as usize].tag != AVLTag::Free);
80 self.pool[index as usize] =
81 AVLNode::new(AVLTag::Free, self.freelist, AVL_NULL, self.default.clone());
82 self.freelist = index;
83 }
84
85 // Private function: allocate a tree node from the storage pool, resizing
86 // the pool if necessary. This will decline to expand the tree past about
87 // 1.75 billion items.
alloc(&mut self) -> u3288 fn alloc(&mut self) -> u32 {
89 // Check to see if the freelist is empty, and if so allocate a bunch more
90 // slots.
91 if self.freelist == AVL_NULL {
92 let start = self.pool.len();
93 let fill_item = AVLNode::new(AVLTag::Free, AVL_NULL, AVL_NULL, self.default.clone());
94 // What happens if this OOMs? At least guard against u32 overflow by
95 // doing this:
96 if start >= 0x7000_0000 {
97 // 1.75 billion elements in the tree should be enough for any
98 // reasonable use of this register allocator.
99 panic!("AVLTree<T>::alloc: too many items");
100 }
101 self.pool.resize(2 * start + 2, fill_item);
102 let end_plus_1 = self.pool.len();
103 debug_assert!(end_plus_1 >= 2);
104 self.pool[end_plus_1 - 1].left = self.freelist;
105 let mut i = end_plus_1 - 2;
106 while i >= start {
107 // The entry is already marked as free, but we must set the link.
108 self.pool[i].left = i as u32 + 1;
109 if i == 0 {
110 break;
111 }
112 i -= 1;
113 }
114 self.freelist = start as u32;
115 debug_assert!(self.freelist != AVL_NULL);
116 }
117 // And now allocate.
118 let new = self.freelist;
119 assert!(self.pool[new as usize].tag == AVLTag::Free);
120 // The caller is responsible for filling in the entry. But at least set
121 // the tag to non-Free, for sanity.
122 self.pool[new as usize].tag = AVLTag::None;
123 self.freelist = self.pool[new as usize].left;
124 new
125 }
126 }
127
128 //=============================================================================
129 // Tree-wrangling machinery for AVLTree (private)
130
131 // For the public interface, see below.
132
133 // The functions 'insert' and 'delete', and all supporting functions reachable
134 // from them, are derived from a public domain implementation by Georg Kraml.
135 // Unfortunately the relevant web site is long gone, and can only be found on
136 // the Wayback Machine.
137 //
138 // https://web.archive.org/web/20010419134337/
139 // http://www.kraml.at/georg/avltree/index.html
140 //
141 // https://web.archive.org/web/20030926063347/
142 // http://www.kraml.at/georg/avltree/avlmonolithic.c
143 //
144 // https://web.archive.org/web/20030401124003/http://www.kraml.at/src/howto/
145 //
146 // For relicensing clearance, see Mozilla bug 1620332, at
147 // https://bugzilla.mozilla.org/show_bug.cgi?id=1620332.
148
149 // Did a given insertion/deletion succeed, and what do we do next?
150 #[derive(Clone, Copy, PartialEq)]
151 enum AVLRes {
152 Error,
153 OK,
154 Balance,
155 }
156
157 impl<T: Clone + PartialOrd> AVLTree<T> {
158 // Private function: rotleft: perform counterclockwise rotation
159 // Takes the root of the tree to rotate, returns the new root
rotleft(&mut self, old_root: u32) -> u32160 fn rotleft(&mut self, old_root: u32) -> u32 {
161 let new_root = self.pool[old_root as usize].right;
162 self.pool[old_root as usize].right = self.pool[new_root as usize].left;
163 self.pool[new_root as usize].left = old_root;
164 new_root
165 }
166
167 // Private function: rotright: perform clockwise rotation
168 // Takes the root of the tree to rotate, returns the new root
rotright(&mut self, old_root: u32) -> u32169 fn rotright(&mut self, old_root: u32) -> u32 {
170 let new_root = self.pool[old_root as usize].left;
171 self.pool[old_root as usize].left = self.pool[new_root as usize].right;
172 self.pool[new_root as usize].right = old_root;
173 new_root
174 }
175
176 // Private function: leftgrown: helper function for `insert`
177 //
178 // Parameters:
179 //
180 // root Root of a tree. This node's left
181 // subtree has just grown due to item insertion; its
182 // "tag" flag needs adjustment, and the local tree
183 // (the subtree of which this node is the root node) may
184 // have become unbalanced.
185 //
186 // Return values:
187 //
188 // The new root of the subtree, plus either:
189 //
190 // OK The local tree could be rebalanced or was balanced
191 // from the start. The parent activations of the avlinsert
192 // activation that called this function may assume the
193 // entire tree is valid.
194 // or
195 // BALANCE The local tree was balanced, but has grown in height.
196 // Do not assume the entire tree is valid.
leftgrown(&mut self, mut root: u32) -> (u32, AVLRes)197 fn leftgrown(&mut self, mut root: u32) -> (u32, AVLRes) {
198 match self.pool[root as usize].tag {
199 AVLTag::Left => {
200 if self.pool[self.pool[root as usize].left as usize].tag == AVLTag::Left {
201 self.pool[root as usize].tag = AVLTag::None;
202 let t = self.pool[root as usize].left;
203 self.pool[t as usize].tag = AVLTag::None;
204 root = self.rotright(root);
205 } else {
206 match self.pool
207 [self.pool[self.pool[root as usize].left as usize].right as usize]
208 .tag
209 {
210 AVLTag::Left => {
211 self.pool[root as usize].tag = AVLTag::Right;
212 let t = self.pool[root as usize].left;
213 self.pool[t as usize].tag = AVLTag::None;
214 }
215 AVLTag::Right => {
216 self.pool[root as usize].tag = AVLTag::None;
217 let t = self.pool[root as usize].left;
218 self.pool[t as usize].tag = AVLTag::Left;
219 }
220 AVLTag::None => {
221 self.pool[root as usize].tag = AVLTag::None;
222 let t = self.pool[root as usize].left;
223 self.pool[t as usize].tag = AVLTag::None;
224 }
225 AVLTag::Free => panic!("AVLTree::leftgrown(1): unallocated node in tree"),
226 }
227 {
228 let t = self.pool[self.pool[root as usize].left as usize].right;
229 self.pool[t as usize].tag = AVLTag::None;
230 }
231 self.pool[root as usize].left = self.rotleft(self.pool[root as usize].left);
232 root = self.rotright(root);
233 }
234 return (root, AVLRes::OK);
235 }
236 AVLTag::Right => {
237 self.pool[root as usize].tag = AVLTag::None;
238 return (root, AVLRes::OK);
239 }
240 AVLTag::None => {
241 self.pool[root as usize].tag = AVLTag::Left;
242 return (root, AVLRes::Balance);
243 }
244 AVLTag::Free => panic!("AVLTree::leftgrown(2): unallocated node in tree"),
245 }
246 }
247
248 // Private function: rightgrown: helper function for `insert`
249 //
250 // See leftgrown for details.
rightgrown(&mut self, mut root: u32) -> (u32, AVLRes)251 fn rightgrown(&mut self, mut root: u32) -> (u32, AVLRes) {
252 match self.pool[root as usize].tag {
253 AVLTag::Left => {
254 self.pool[root as usize].tag = AVLTag::None;
255 return (root, AVLRes::OK);
256 }
257 AVLTag::Right => {
258 if self.pool[self.pool[root as usize].right as usize].tag == AVLTag::Right {
259 self.pool[root as usize].tag = AVLTag::None;
260 let t = self.pool[root as usize].right as usize;
261 self.pool[t].tag = AVLTag::None;
262 root = self.rotleft(root);
263 } else {
264 match self.pool
265 [self.pool[self.pool[root as usize].right as usize].left as usize]
266 .tag
267 {
268 AVLTag::Right => {
269 self.pool[root as usize].tag = AVLTag::Left;
270 let t = self.pool[root as usize].right;
271 self.pool[t as usize].tag = AVLTag::None;
272 }
273 AVLTag::Left => {
274 self.pool[root as usize].tag = AVLTag::None;
275 let t = self.pool[root as usize].right;
276 self.pool[t as usize].tag = AVLTag::Right;
277 }
278 AVLTag::None => {
279 self.pool[root as usize].tag = AVLTag::None;
280 let t = self.pool[root as usize].right;
281 self.pool[t as usize].tag = AVLTag::None;
282 }
283 AVLTag::Free => panic!("AVLTree::rightgrown(1): unallocated node in tree"),
284 }
285 {
286 let t = self.pool[self.pool[root as usize].right as usize].left;
287 self.pool[t as usize].tag = AVLTag::None;
288 }
289 self.pool[root as usize].right = self.rotright(self.pool[root as usize].right);
290 root = self.rotleft(root);
291 }
292 return (root, AVLRes::OK);
293 }
294 AVLTag::None => {
295 self.pool[root as usize].tag = AVLTag::Right;
296 return (root, AVLRes::Balance);
297 }
298 AVLTag::Free => panic!("AVLTree::rightgrown(2): unallocated node in tree"),
299 }
300 }
301
302 // Private function: insert_wrk: insert a node into the AVL tree
303 // (worker function)
304 //
305 // Parameters:
306 //
307 // root Root of the tree in whch to insert `d`.
308 //
309 // item Item to be inserted.
310 //
311 // Return values:
312 //
313 // Root of the new tree, plus one of:
314 //
315 // nonzero The item has been inserted. The excact value of
316 // nonzero yields is of no concern to user code; when
317 // avlinsert recursively calls itself, the number
318 // returned tells the parent activation if the AVL tree
319 // may have become unbalanced; specifically:
320 //
321 // OK None of the subtrees of the node that n points to
322 // has grown, the AVL tree is valid.
323 //
324 // BALANCE One of the subtrees of the node that n points to
325 // has grown, the node's "skew" flag needs adjustment,
326 // and the AVL tree may have become unbalanced.
327 //
328 // zero The datum provided could not be inserted, either due
329 // to AVLKEY collision (the tree already contains another
330 // item with which the same AVLKEY is associated), or
331 // due to insufficient memory.
insert_wrk<F>(&mut self, mut root: u32, item: T, mb_cmp: Option<&F>) -> (u32, AVLRes) where F: Fn(T, T) -> Option<Ordering>,332 fn insert_wrk<F>(&mut self, mut root: u32, item: T, mb_cmp: Option<&F>) -> (u32, AVLRes)
333 where
334 F: Fn(T, T) -> Option<Ordering>,
335 {
336 if root == AVL_NULL {
337 root = self.alloc();
338 self.pool[root as usize] = AVLNode::new(AVLTag::None, AVL_NULL, AVL_NULL, item);
339 return (root, AVLRes::Balance);
340 }
341
342 let cmp_arg_left: T = item.clone();
343 let cmp_arg_right: T = self.pool[root as usize].item.clone();
344 let cmp_res = match mb_cmp {
345 None => cmp_arg_left.partial_cmp(&cmp_arg_right),
346 Some(cmp) => cmp(cmp_arg_left, cmp_arg_right),
347 };
348 match cmp_res {
349 None => panic!("AVLTree::insert_wrk: unordered elements"),
350 Some(Ordering::Less) => {
351 let (new_left, res) =
352 self.insert_wrk(self.pool[root as usize].left, item.clone(), mb_cmp);
353 self.pool[root as usize].left = new_left;
354 if res == AVLRes::Balance {
355 return self.leftgrown(root);
356 }
357 return (root, res);
358 }
359 Some(Ordering::Greater) => {
360 let (new_right, res) =
361 self.insert_wrk(self.pool[root as usize].right, item.clone(), mb_cmp);
362 self.pool[root as usize].right = new_right;
363 if res == AVLRes::Balance {
364 return self.rightgrown(root);
365 }
366 return (root, res);
367 }
368 Some(Ordering::Equal) => {
369 return (root, AVLRes::Error);
370 }
371 }
372 }
373
374 // Private function: leftshrunk: helper function for delete and
375 // findlowest
376 //
377 // Parameters:
378 //
379 // n Address of a pointer to a node. The node's left
380 // subtree has just shrunk due to item removal; its
381 // "skew" flag needs adjustment, and the local tree
382 // (the subtree of which this node is the root node) may
383 // have become unbalanced.
384 //
385 // Return values:
386 //
387 // OK The parent activation of the delete activation
388 // that called this function may assume the entire
389 // tree is valid.
390 //
391 // BALANCE Do not assume the entire tree is valid.
leftshrunk(&mut self, mut n: u32) -> (u32, AVLRes)392 fn leftshrunk(&mut self, mut n: u32) -> (u32, AVLRes) {
393 match self.pool[n as usize].tag {
394 AVLTag::Left => {
395 self.pool[n as usize].tag = AVLTag::None;
396 return (n, AVLRes::Balance);
397 }
398 AVLTag::Right => {
399 if self.pool[self.pool[n as usize].right as usize].tag == AVLTag::Right {
400 self.pool[n as usize].tag = AVLTag::None;
401 let t = self.pool[n as usize].right;
402 self.pool[t as usize].tag = AVLTag::None;
403 n = self.rotleft(n);
404 return (n, AVLRes::Balance);
405 } else if self.pool[self.pool[n as usize].right as usize].tag == AVLTag::None {
406 self.pool[n as usize].tag = AVLTag::Right;
407 let t = self.pool[n as usize].right;
408 self.pool[t as usize].tag = AVLTag::Left;
409 n = self.rotleft(n);
410 return (n, AVLRes::OK);
411 } else {
412 match self.pool[self.pool[self.pool[n as usize].right as usize].left as usize]
413 .tag
414 {
415 AVLTag::Left => {
416 self.pool[n as usize].tag = AVLTag::None;
417 let t = self.pool[n as usize].right;
418 self.pool[t as usize].tag = AVLTag::Right;
419 }
420 AVLTag::Right => {
421 self.pool[n as usize].tag = AVLTag::Left;
422 let t = self.pool[n as usize].right;
423 self.pool[t as usize].tag = AVLTag::None;
424 }
425 AVLTag::None => {
426 self.pool[n as usize].tag = AVLTag::None;
427 let t = self.pool[n as usize].right;
428 self.pool[t as usize].tag = AVLTag::None;
429 }
430 AVLTag::Free => {
431 panic!("AVLTree::leftshrunk(1): unallocated node in tree");
432 }
433 }
434 {
435 let t = self.pool[self.pool[n as usize].right as usize].left;
436 self.pool[t as usize].tag = AVLTag::None;
437 }
438 {
439 let t = self.rotright(self.pool[n as usize].right);
440 self.pool[n as usize].right = t;
441 }
442 n = self.rotleft(n);
443 return (n, AVLRes::Balance);
444 }
445 }
446 AVLTag::None => {
447 self.pool[n as usize].tag = AVLTag::Right;
448 return (n, AVLRes::OK);
449 }
450 AVLTag::Free => {
451 panic!("AVLTree::leftshrunk(2): unallocated node in tree");
452 }
453 }
454 }
455
456 // Private function: rightshrunk: helper function for delete and
457 // findhighest
458 //
459 // See leftshrunk for details.
rightshrunk(&mut self, mut n: u32) -> (u32, AVLRes)460 fn rightshrunk(&mut self, mut n: u32) -> (u32, AVLRes) {
461 match self.pool[n as usize].tag {
462 AVLTag::Right => {
463 self.pool[n as usize].tag = AVLTag::None;
464 return (n, AVLRes::Balance);
465 }
466 AVLTag::Left => {
467 if self.pool[self.pool[n as usize].left as usize].tag == AVLTag::Left {
468 self.pool[n as usize].tag = AVLTag::None;
469 let t = self.pool[n as usize].left;
470 self.pool[t as usize].tag = AVLTag::None;
471 n = self.rotright(n);
472 return (n, AVLRes::Balance);
473 } else if self.pool[self.pool[n as usize].left as usize].tag == AVLTag::None {
474 self.pool[n as usize].tag = AVLTag::Left;
475 let t = self.pool[n as usize].left;
476 self.pool[t as usize].tag = AVLTag::Right;
477 n = self.rotright(n);
478 return (n, AVLRes::OK);
479 } else {
480 match self.pool[self.pool[self.pool[n as usize].left as usize].right as usize]
481 .tag
482 {
483 AVLTag::Left => {
484 self.pool[n as usize].tag = AVLTag::Right;
485 let t = self.pool[n as usize].left;
486 self.pool[t as usize].tag = AVLTag::None;
487 }
488 AVLTag::Right => {
489 self.pool[n as usize].tag = AVLTag::None;
490 let t = self.pool[n as usize].left;
491 self.pool[t as usize].tag = AVLTag::Left;
492 }
493 AVLTag::None => {
494 self.pool[n as usize].tag = AVLTag::None;
495 let t = self.pool[n as usize].left;
496 self.pool[t as usize].tag = AVLTag::None;
497 }
498 AVLTag::Free => {
499 panic!("AVLTree::rightshrunk(1): unallocated node in tree");
500 }
501 }
502 {
503 let t = self.pool[self.pool[n as usize].left as usize].right;
504 self.pool[t as usize].tag = AVLTag::None;
505 }
506 {
507 let t = self.rotleft(self.pool[n as usize].left);
508 self.pool[n as usize].left = t;
509 }
510 n = self.rotright(n);
511 return (n, AVLRes::Balance);
512 }
513 }
514 AVLTag::None => {
515 self.pool[n as usize].tag = AVLTag::Left;
516 return (n, AVLRes::OK);
517 }
518 AVLTag::Free => {
519 panic!("AVLTree::rightshrunk(2): unallocated node in tree");
520 }
521 }
522 }
523
524 // Private function: findhighest: replace a node with a subtree's
525 // highest-ranking item.
526 //
527 // Parameters:
528 //
529 // target Pointer to node to be replaced.
530 //
531 // n Address of pointer to subtree.
532 //
533 // res Pointer to variable used to tell the caller whether
534 // further checks are necessary; analog to the return
535 // values of leftgrown and leftshrunk (see there).
536 //
537 // Return values:
538 //
539 // True A node was found; the target node has been replaced.
540 //
541 // False The target node could not be replaced because
542 // the subtree provided was empty.
543 //
findhighest(&mut self, target: u32, mut n: u32) -> Option<(u32, AVLRes)>544 fn findhighest(&mut self, target: u32, mut n: u32) -> Option<(u32, AVLRes)> {
545 if n == AVL_NULL {
546 return None;
547 }
548 let mut res = AVLRes::Balance;
549 if self.pool[n as usize].right != AVL_NULL {
550 let rec = self.findhighest(target, self.pool[n as usize].right);
551 if let Some((new_n_right, new_res)) = rec {
552 self.pool[n as usize].right = new_n_right;
553 res = new_res;
554 if res == AVLRes::Balance {
555 let (new_n, new_res) = self.rightshrunk(n);
556 n = new_n;
557 res = new_res;
558 }
559 return Some((n, res));
560 } else {
561 return None;
562 }
563 }
564 self.pool[target as usize].item = self.pool[n as usize].item.clone();
565 let tmp = n;
566 n = self.pool[n as usize].left;
567 self.free(tmp);
568 Some((n, res))
569 }
570
571 // Private function: findlowest: replace node with a subtree's
572 // lowest-ranking item.
573 //
574 // See findhighest for the details.
findlowest(&mut self, target: u32, mut n: u32) -> Option<(u32, AVLRes)>575 fn findlowest(&mut self, target: u32, mut n: u32) -> Option<(u32, AVLRes)> {
576 if n == AVL_NULL {
577 return None;
578 }
579 let mut res = AVLRes::Balance;
580 if self.pool[n as usize].left != AVL_NULL {
581 let rec = self.findlowest(target, self.pool[n as usize].left);
582 if let Some((new_n_left, new_res)) = rec {
583 self.pool[n as usize].left = new_n_left;
584 res = new_res;
585 if res == AVLRes::Balance {
586 let (new_n, new_res) = self.leftshrunk(n);
587 n = new_n;
588 res = new_res;
589 }
590 return Some((n, res));
591 } else {
592 return None;
593 }
594 }
595 self.pool[target as usize].item = self.pool[n as usize].item.clone();
596 let tmp = n;
597 n = self.pool[n as usize].right;
598 self.free(tmp);
599 Some((n, res))
600 }
601
602 // Private function: delete_wrk: delete an item from the tree.
603 // (worker function)
604 //
605 // Parameters:
606 //
607 // n Address of a pointer to a node.
608 //
609 // key AVLKEY of item to be removed.
610 //
611 // Return values:
612 //
613 // nonzero The item has been removed. The exact value of
614 // nonzero yields if of no concern to user code; when
615 // delete recursively calls itself, the number
616 // returned tells the parent activation if the AVL tree
617 // may have become unbalanced; specifically:
618 //
619 // OK None of the subtrees of the node that n points to
620 // has shrunk, the AVL tree is valid.
621 //
622 // BALANCE One of the subtrees of the node that n points to
623 // has shrunk, the node's "skew" flag needs adjustment,
624 // and the AVL tree may have become unbalanced.
625 //
626 // zero The tree does not contain an item yielding the
627 // AVLKEY value provided by the caller.
delete_wrk<F>(&mut self, mut root: u32, item: T, mb_cmp: Option<&F>) -> (u32, AVLRes) where F: Fn(T, T) -> Option<Ordering>,628 fn delete_wrk<F>(&mut self, mut root: u32, item: T, mb_cmp: Option<&F>) -> (u32, AVLRes)
629 where
630 F: Fn(T, T) -> Option<Ordering>,
631 {
632 let mut tmp = AVLRes::Balance;
633 if root == AVL_NULL {
634 return (root, AVLRes::Error);
635 }
636
637 let cmp_arg_left: T = item.clone();
638 let cmp_arg_right: T = self.pool[root as usize].item.clone();
639 let cmp_res = match mb_cmp {
640 None => cmp_arg_left.partial_cmp(&cmp_arg_right),
641 Some(cmp) => cmp(cmp_arg_left, cmp_arg_right),
642 };
643 match cmp_res {
644 None => panic!("AVLTree::delete_wrk: unordered elements"),
645 Some(Ordering::Less) => {
646 let root_left = self.pool[root as usize].left;
647 let (new_root_left, new_tmp) = self.delete_wrk(root_left, item, mb_cmp);
648 self.pool[root as usize].left = new_root_left;
649 tmp = new_tmp;
650 if tmp == AVLRes::Balance {
651 let (new_root, new_res) = self.leftshrunk(root);
652 root = new_root;
653 tmp = new_res;
654 }
655 return (root, tmp);
656 }
657 Some(Ordering::Greater) => {
658 let root_right = self.pool[root as usize].right;
659 let (new_root_right, new_tmp) = self.delete_wrk(root_right, item, mb_cmp);
660 self.pool[root as usize].right = new_root_right;
661 tmp = new_tmp;
662 if tmp == AVLRes::Balance {
663 let (new_root, new_res) = self.rightshrunk(root);
664 root = new_root;
665 tmp = new_res;
666 }
667 return (root, tmp);
668 }
669 Some(Ordering::Equal) => {
670 if self.pool[root as usize].left != AVL_NULL {
671 let root_left = self.pool[root as usize].left;
672 if let Some((new_root_left, new_tmp)) = self.findhighest(root, root_left) {
673 self.pool[root as usize].left = new_root_left;
674 tmp = new_tmp;
675 if new_tmp == AVLRes::Balance {
676 let (new_root, new_res) = self.leftshrunk(root);
677 root = new_root;
678 tmp = new_res;
679 }
680 }
681 return (root, tmp);
682 }
683 if self.pool[root as usize].right != AVL_NULL {
684 let root_right = self.pool[root as usize].right;
685 if let Some((new_root_right, new_tmp)) = self.findlowest(root, root_right) {
686 self.pool[root as usize].right = new_root_right;
687 tmp = new_tmp;
688 if new_tmp == AVLRes::Balance {
689 let (new_root, new_res) = self.rightshrunk(root);
690 root = new_root;
691 tmp = new_res;
692 }
693 }
694 return (root, tmp);
695 }
696 self.free(root);
697 root = AVL_NULL;
698 return (root, AVLRes::Balance);
699 }
700 }
701 }
702
703 // Private fn: count the number of items in the tree. Warning: costs O(N) !
704 #[cfg(test)]
count_wrk(&self, n: u32) -> usize705 fn count_wrk(&self, n: u32) -> usize {
706 if n == AVL_NULL {
707 return 0;
708 }
709 1 + self.count_wrk(self.pool[n as usize].left) + self.count_wrk(self.pool[n as usize].right)
710 }
711
712 // Private fn: find the max depth of the tree. Warning: costs O(N) !
713 #[cfg(test)]
depth_wrk(&self, n: u32) -> usize714 fn depth_wrk(&self, n: u32) -> usize {
715 if n == AVL_NULL {
716 return 0;
717 }
718 let d_left = self.depth_wrk(self.pool[n as usize].left);
719 let d_right = self.depth_wrk(self.pool[n as usize].right);
720 1 + if d_left > d_right { d_left } else { d_right }
721 }
722 }
723
724 // Machinery for iterating over the tree, enumerating nodes in ascending order.
725 // Unfortunately AVLTreeIter has to be public.
726 pub struct AVLTreeIter<'t, 's, T> {
727 tree: &'t AVLTree<T>,
728 stack: &'s mut Vec<u32>,
729 }
730
731 impl<'t, 's, T> AVLTreeIter<'t, 's, T> {
732 #[allow(dead_code)]
new(tree: &'t AVLTree<T>, stack: &'s mut Vec<u32>) -> Self733 fn new(tree: &'t AVLTree<T>, stack: &'s mut Vec<u32>) -> Self {
734 let mut iter = AVLTreeIter { tree, stack };
735 if tree.root != AVL_NULL {
736 iter.stack.push(tree.root);
737 iter.visit_left_children(tree.root);
738 }
739 iter
740 }
741
visit_left_children(&mut self, root: u32)742 fn visit_left_children(&mut self, root: u32) {
743 let mut cur = root;
744 loop {
745 let left = self.tree.pool[cur as usize].left;
746 if left == AVL_NULL {
747 break;
748 }
749 self.stack.push(left);
750 cur = left;
751 }
752 }
753 }
754
755 impl<'s, 't, T: Copy> Iterator for AVLTreeIter<'s, 't, T> {
756 type Item = T;
next(&mut self) -> Option<Self::Item>757 fn next(&mut self) -> Option<Self::Item> {
758 let ret = match self.stack.pop() {
759 Some(ret) => ret,
760 None => return None,
761 };
762 let right = self.tree.pool[ret as usize].right;
763 if right != AVL_NULL {
764 self.stack.push(right);
765 self.visit_left_children(right);
766 }
767 Some(self.tree.pool[ret as usize].item)
768 }
769 }
770
771 //=============================================================================
772 // Public interface for AVLTree
773
774 impl<T: Clone + PartialOrd> AVLTree<T> {
775 // The core functions (insert, delete, contains) take a comparator argument
776 //
777 // mb_cmp: Option<&F>
778 // where
779 // F: Fn(T, T) -> Option<Ordering>
780 //
781 // which allows control over how node comparison is done. If this is None,
782 // then comparison is done directly using PartialOrd for the T values.
783 //
784 // If this is Some(cmp), then comparison is done by passing the two T values
785 // to `cmp`. In this case, the routines will complain (panic) if `cmp`
786 // indicates that its arguments are unordered.
787
788 // Insert a value in the tree. Returns true if an insert happened, false if
789 // the item was already present.
insert<F>(&mut self, item: T, mb_cmp: Option<&F>) -> bool where F: Fn(T, T) -> Option<Ordering>,790 pub fn insert<F>(&mut self, item: T, mb_cmp: Option<&F>) -> bool
791 where
792 F: Fn(T, T) -> Option<Ordering>,
793 {
794 let (new_root, res) = self.insert_wrk(self.root, item, mb_cmp);
795 if res == AVLRes::Error {
796 return false; // already in tree
797 } else {
798 self.root = new_root;
799 return true;
800 }
801 }
802
803 // Remove an item from the tree. Returns a bool which indicates whether the
804 // value was in there in the first place. (meaning, true == a removal
805 // actually occurred).
delete<F>(&mut self, item: T, mb_cmp: Option<&F>) -> bool where F: Fn(T, T) -> Option<Ordering>,806 pub fn delete<F>(&mut self, item: T, mb_cmp: Option<&F>) -> bool
807 where
808 F: Fn(T, T) -> Option<Ordering>,
809 {
810 let (new_root, res) = self.delete_wrk(self.root, item, mb_cmp);
811 if res == AVLRes::Error {
812 return false;
813 } else {
814 self.root = new_root;
815 return true;
816 }
817 }
818
819 // Determine whether an item is in the tree.
820 // sewardj 2020Mar31: this is not used; I assume all users of the trees
821 // do their own custom traversals. Remove #[cfg(test)] if any real uses
822 // appear.
823 #[cfg(test)]
contains<F>(&self, item: T, mb_cmp: Option<&F>) -> bool where F: Fn(T, T) -> Option<Ordering>,824 pub fn contains<F>(&self, item: T, mb_cmp: Option<&F>) -> bool
825 where
826 F: Fn(T, T) -> Option<Ordering>,
827 {
828 let mut n = self.root;
829 // Lookup needs to be really fast, so have two versions of the loop, one
830 // for direct comparison, one for indirect.
831 match mb_cmp {
832 None => {
833 // Do comparisons directly on the items.
834 loop {
835 if n == AVL_NULL {
836 return false;
837 }
838 let cmp_arg_left: T = item.clone();
839 let cmp_arg_right: T = self.pool[n as usize].item.clone();
840 match cmp_arg_left.partial_cmp(&cmp_arg_right) {
841 Some(Ordering::Less) => {
842 n = self.pool[n as usize].left;
843 }
844 Some(Ordering::Greater) => {
845 n = self.pool[n as usize].right;
846 }
847 Some(Ordering::Equal) => {
848 return true;
849 }
850 None => {
851 panic!("AVLTree::contains(1): unordered elements in search!");
852 }
853 }
854 }
855 }
856 Some(cmp) => {
857 // Do comparisons by handing off to the supplied function.
858 loop {
859 if n == AVL_NULL {
860 return false;
861 }
862 let cmp_arg_left: T = item.clone();
863 let cmp_arg_right: T = self.pool[n as usize].item.clone();
864 match cmp(cmp_arg_left, cmp_arg_right) {
865 Some(Ordering::Less) => {
866 n = self.pool[n as usize].left;
867 }
868 Some(Ordering::Greater) => {
869 n = self.pool[n as usize].right;
870 }
871 Some(Ordering::Equal) => {
872 return true;
873 }
874 None => {
875 panic!("AVLTree::contains(2): unordered elements in search!");
876 }
877 }
878 }
879 }
880 }
881 }
882
883 // Count the number of items in the tree. Warning: costs O(N) !
884 #[cfg(test)]
count(&self) -> usize885 fn count(&self) -> usize {
886 self.count_wrk(self.root)
887 }
888
889 // Private fn: find the max depth of the tree. Warning: costs O(N) !
890 #[cfg(test)]
depth(&self) -> usize891 fn depth(&self) -> usize {
892 self.depth_wrk(self.root)
893 }
894
to_vec(&self) -> Vec<T>895 pub fn to_vec(&self) -> Vec<T> {
896 // BEGIN helper fn
897 fn walk<U: Clone>(res: &mut Vec<U>, root: u32, pool: &Vec<AVLNode<U>>) {
898 let root_left = pool[root as usize].left;
899 if root_left != AVL_NULL {
900 walk(res, root_left, pool);
901 }
902 res.push(pool[root as usize].item.clone());
903 let root_right = pool[root as usize].right;
904 if root_right != AVL_NULL {
905 walk(res, root_right, pool);
906 }
907 }
908 // END helper fn
909
910 let mut res = Vec::<T>::new();
911 if self.root != AVL_NULL {
912 walk(&mut res, self.root, &self.pool);
913 }
914 res
915 }
916
917 #[allow(dead_code)]
iter<'t, 's>(&'t self, storage: &'s mut Vec<u32>) -> AVLTreeIter<'t, 's, T>918 pub fn iter<'t, 's>(&'t self, storage: &'s mut Vec<u32>) -> AVLTreeIter<'t, 's, T> {
919 storage.clear();
920 AVLTreeIter::new(self, storage)
921 }
922
923 // Show the tree. (For debugging only.)
924 //pub fn show(&self, depth: i32, node: u32) {
925 // if node != AVL_NULL {
926 // self.show(depth + 1, self.pool[node as usize].left);
927 // for _ in 0..depth {
928 // print!(" ");
929 // }
930 // println!("{}", ToFromU32::to_u32(self.pool[node as usize].item));
931 // self.show(depth + 1, self.pool[node as usize].right);
932 // }
933 //}
934 }
935
936 //=============================================================================
937 // Testing machinery for AVLTree
938
939 #[cfg(test)]
940 mod avl_tree_test_utils {
941 use crate::data_structures::Set;
942 use std::cmp::Ordering;
943
944 // Perform various checks on the tree, and assert if it is not OK.
check_tree( tree: &super::AVLTree<u32>, should_be_in_tree: &Set<u32>, univ_min: u32, univ_max: u32, )945 pub fn check_tree(
946 tree: &super::AVLTree<u32>,
947 should_be_in_tree: &Set<u32>,
948 univ_min: u32,
949 univ_max: u32,
950 ) {
951 // Same cardinality
952 let n_in_set = should_be_in_tree.card();
953 let n_in_tree = tree.count();
954 assert!(n_in_set == n_in_tree);
955
956 // Tree is not wildly out of balance. Depth should not exceed 1.44 *
957 // log2(size).
958 let tree_depth = tree.depth();
959 let mut log2_size = 0;
960 {
961 let mut n: usize = n_in_tree;
962 while n > 0 {
963 n = n >> 1;
964 log2_size += 1;
965 }
966 }
967 // Actually a tighter limit than stated above. For these test cases, the
968 // tree is either perfectly balanced or within one level of being so
969 // (hence the +1).
970 assert!(tree_depth <= log2_size + 1);
971
972 // Check that everything that should be in the tree is in it, and vice
973 // versa.
974 for i in univ_min..univ_max {
975 let should_be_in = should_be_in_tree.contains(i);
976
977 // Look it up with a null comparator (so `contains` compares
978 // directly)
979 let is_in = tree.contains::<fn(u32, u32) -> Option<Ordering>>(i, None);
980 assert!(is_in == should_be_in);
981
982 // We should get the same result with a custom comparator that does the
983 // same as the null comparator.
984 let is_in_w_cmp = tree.contains(
985 i,
986 Some(&(|x_left: u32, x_right: u32| x_left.partial_cmp(&x_right))),
987 );
988 assert!(is_in_w_cmp == is_in);
989
990 // And even when the comparator is actually a closure
991 let forty_two: u32 = 52;
992 let is_in_w_cmp_closure = tree.contains(
993 i,
994 Some(
995 &(|x_left: u32, x_right: u32| {
996 (x_left + forty_two).partial_cmp(&(x_right + forty_two))
997 }),
998 ),
999 );
1000 assert!(is_in_w_cmp_closure == is_in);
1001 }
1002
1003 // We could even test that the tree items are in-order, but it hardly
1004 // seems worth the hassle, since the previous test would surely have
1005 // failed if that wasn't the case.
1006 }
1007 }
1008
1009 #[test]
test_avl_tree1()1010 fn test_avl_tree1() {
1011 use crate::data_structures::Set;
1012
1013 // Perform tests on an AVL tree. Use as values, every third number between
1014 // 5000 and 5999 inclusive. This is to ensure that there's no confusion
1015 // between element values and internal tree indices (although I think the
1016 // typechecker guarantees that anyway).
1017 //
1018 // Also carry along a Set<u32>, which keeps track of which values should be
1019 // in the tree at the current point.
1020 const UNIV_MIN: u32 = 5000;
1021 const UNIV_MAX: u32 = 5999;
1022 const UNIV_SIZE: u32 = UNIV_MAX - UNIV_MIN + 1;
1023
1024 let mut tree = AVLTree::<u32>::new(0);
1025 let mut should_be_in_tree = Set::<u32>::empty();
1026
1027 // Add numbers to the tree, checking as we go.
1028 for i in UNIV_MIN..UNIV_MAX {
1029 // Idiotic but simple
1030 if i % 3 != 0 {
1031 continue;
1032 }
1033 let was_added = tree.insert::<fn(u32, u32) -> Option<Ordering>>(i, None);
1034 should_be_in_tree.insert(i);
1035 assert!(was_added == true);
1036 avl_tree_test_utils::check_tree(&tree, &should_be_in_tree, UNIV_MIN, UNIV_MAX);
1037 }
1038
1039 // Then remove the middle half of the tree, also checking.
1040 for i in UNIV_MIN + UNIV_SIZE / 4..UNIV_MIN + 3 * (UNIV_SIZE / 4) {
1041 // Note that here, we're asking to delete a bunch of numbers that aren't
1042 // in the tree. It should remain valid throughout.
1043 let was_removed = tree.delete::<fn(u32, u32) -> Option<Ordering>>(i, None);
1044 let should_have_been_removed = should_be_in_tree.contains(i);
1045 assert!(was_removed == should_have_been_removed);
1046 should_be_in_tree.delete(i);
1047 avl_tree_test_utils::check_tree(&tree, &should_be_in_tree, UNIV_MIN, UNIV_MAX);
1048 }
1049
1050 // Now add some numbers which are already in the tree.
1051 for i in UNIV_MIN..UNIV_MIN + UNIV_SIZE / 4 {
1052 if i % 3 != 0 {
1053 continue;
1054 }
1055 let was_added = tree.insert::<fn(u32, u32) -> Option<Ordering>>(i, None);
1056 let should_have_been_added = !should_be_in_tree.contains(i);
1057 assert!(was_added == should_have_been_added);
1058 should_be_in_tree.insert(i);
1059 avl_tree_test_utils::check_tree(&tree, &should_be_in_tree, UNIV_MIN, UNIV_MAX);
1060 }
1061
1062 // Then remove all numbers from the tree, in reverse order.
1063 for ir in UNIV_MIN..UNIV_MAX {
1064 let i = UNIV_MIN + (UNIV_MAX - ir);
1065 let was_removed = tree.delete::<fn(u32, u32) -> Option<Ordering>>(i, None);
1066 let should_have_been_removed = should_be_in_tree.contains(i);
1067 assert!(was_removed == should_have_been_removed);
1068 should_be_in_tree.delete(i);
1069 avl_tree_test_utils::check_tree(&tree, &should_be_in_tree, UNIV_MIN, UNIV_MAX);
1070 }
1071
1072 // Now the tree should be empty.
1073 assert!(should_be_in_tree.is_empty());
1074 assert!(tree.count() == 0);
1075
1076 // Now delete some more stuff. Tree should still be empty :-)
1077 for i in UNIV_MIN + 10..UNIV_MIN + 100 {
1078 assert!(should_be_in_tree.is_empty());
1079 assert!(tree.count() == 0);
1080 tree.delete::<fn(u32, u32) -> Option<Ordering>>(i, None);
1081 avl_tree_test_utils::check_tree(&tree, &should_be_in_tree, UNIV_MIN, UNIV_MAX);
1082 }
1083
1084 // The tree root should be NULL.
1085 assert!(tree.root == AVL_NULL);
1086 assert!(tree.freelist != AVL_NULL);
1087
1088 // Check the freelist: all entries are of the expected form.
1089 for e in &tree.pool {
1090 assert!(e.tag == AVLTag::Free);
1091 assert!(e.left == AVL_NULL || (e.left as usize) < tree.pool.len());
1092 assert!(e.right == AVL_NULL);
1093 assert!(e.item == 0);
1094 }
1095
1096 // Check the freelist: it's non-circular, and contains the expected number
1097 // of elements.
1098 let mut n_in_freelist = 0;
1099 let mut cursor: u32 = tree.freelist;
1100 while cursor != AVL_NULL {
1101 assert!((cursor as usize) < tree.pool.len());
1102 n_in_freelist += 1;
1103 assert!(n_in_freelist < 100000 /*arbitrary*/); // else it has a cycle
1104 cursor = tree.pool[cursor as usize].left;
1105 }
1106 // If we get here, the freelist at least doesn't have a cycle.
1107
1108 // All elements in the pool are on the freelist.
1109 assert!(n_in_freelist == tree.pool.len());
1110 }
1111
1112 #[test]
test_avl_tree2()1113 fn test_avl_tree2() {
1114 use std::cmp::Ordering;
1115
1116 // Do some simple testing using a custom comparator, which inverts the order
1117 // of items in the tree, so as to check custom comparators work right.
1118 let mut tree = AVLTree::<u32>::new(0);
1119
1120 let nums = [31, 41, 59, 27, 14, 35, 62, 25, 18, 28, 45, 90, 61];
1121
1122 fn reverse_cmp(x: u32, y: u32) -> Option<Ordering> {
1123 y.partial_cmp(&x)
1124 }
1125
1126 // Insert
1127 for n in &nums {
1128 let insert_happened = tree.insert(*n, Some(&reverse_cmp));
1129 assert!(insert_happened == true);
1130 }
1131
1132 // Check membership
1133 for n in 0..100 {
1134 let is_in = tree.contains(n, Some(&reverse_cmp));
1135 let should_be_in = nums.iter().any(|m| n == *m);
1136 assert!(is_in == should_be_in);
1137 }
1138
1139 // Delete
1140 for n in 0..100 {
1141 let remove_happened = tree.delete(n, Some(&reverse_cmp));
1142 let remove_should_have_happened = nums.iter().any(|m| n == *m);
1143 assert!(remove_happened == remove_should_have_happened);
1144 }
1145
1146 // Final check
1147 assert!(tree.root == AVL_NULL);
1148 assert!(tree.count() == 0);
1149 }
1150
1151 #[test]
test_avl_tree_iter()1152 fn test_avl_tree_iter() {
1153 let mut storage = Vec::new();
1154 let tree = AVLTree::<u32>::new(0);
1155 assert!(tree.iter(&mut storage).next().is_none());
1156
1157 const FROM: u32 = 0;
1158 const TO: u32 = 10000;
1159
1160 let mut tree = AVLTree::<u32>::new(0);
1161 for i in FROM..TO {
1162 tree.insert(i, Some(&|a: u32, b: u32| a.partial_cmp(&b)));
1163 }
1164
1165 let as_vec = tree.to_vec();
1166 for (i, val) in tree.iter(&mut storage).enumerate() {
1167 assert_eq!(as_vec[i], val, "not equal for i={}", i);
1168 }
1169 }
1170