1// This may look like C code, but it is really -*- C++ -*- 2/* 3Copyright (C) 1988 Free Software Foundation 4 written by Doug Lea (dl@rocky.oswego.edu) 5 6This file is part of GNU CC. 7 8GNU CC is distributed in the hope that it will be useful, 9but WITHOUT ANY WARRANTY. No author or distributor 10accepts responsibility to anyone for the consequences of using it 11or for whether it serves any particular purpose or works at all, 12unless he says so in writing. Refer to the GNU CC General Public 13License for full details. 14 15Everyone is granted permission to copy, modify and redistribute 16GNU CC, but only under the conditions described in the 17GNU CC General Public License. A copy of this license is 18supposed to have been given to you along with GNU CC so you 19can know your rights and responsibilities. It should be in a 20file named COPYING. Among other things, the copyright notice 21and this notice must be preserved on all copies. 22*/ 23 24#ifdef __GNUG__ 25#pragma implementation 26#endif 27#include <stream.h> 28#include <assert.h> 29#include "<T>.<C>.RAVLMap.h" 30 31 32/* 33 constants & inlines for maintaining balance & thread status in tree nodes 34*/ 35 36#define AVLBALANCEMASK 3 37#define AVLBALANCED 0 38#define AVLLEFTHEAVY 1 39#define AVLRIGHTHEAVY 2 40 41#define LTHREADBIT 4 42#define RTHREADBIT 8 43 44 45static inline int bf(<T><C>RAVLNode* t) 46{ 47 return t->stat & AVLBALANCEMASK; 48} 49 50static inline void set_bf(<T><C>RAVLNode* t, int b) 51{ 52 t->stat = (t->stat & ~AVLBALANCEMASK) | (b & AVLBALANCEMASK); 53} 54 55 56static inline int rthread(<T><C>RAVLNode* t) 57{ 58 return t->stat & RTHREADBIT; 59} 60 61static inline void set_rthread(<T><C>RAVLNode* t, int b) 62{ 63 if (b) 64 t->stat |= RTHREADBIT; 65 else 66 t->stat &= ~RTHREADBIT; 67} 68 69static inline int lthread(<T><C>RAVLNode* t) 70{ 71 return t->stat & LTHREADBIT; 72} 73 74static inline void set_lthread(<T><C>RAVLNode* t, int b) 75{ 76 if (b) 77 t->stat |= LTHREADBIT; 78 else 79 t->stat &= ~LTHREADBIT; 80} 81 82/* 83 traversal primitives 84*/ 85 86 87<T><C>RAVLNode* <T><C>RAVLMap::leftmost() 88{ 89 <T><C>RAVLNode* t = root; 90 if (t != 0) while (t->lt != 0) t = t->lt; 91 return t; 92} 93 94<T><C>RAVLNode* <T><C>RAVLMap::rightmost() 95{ 96 <T><C>RAVLNode* t = root; 97 if (t != 0) while (t->rt != 0) t = t->rt; 98 return t; 99} 100 101<T><C>RAVLNode* <T><C>RAVLMap::succ(<T><C>RAVLNode* t) 102{ 103 <T><C>RAVLNode* r = t->rt; 104 if (!rthread(t)) while (!lthread(r)) r = r->lt; 105 return r; 106} 107 108<T><C>RAVLNode* <T><C>RAVLMap::pred(<T><C>RAVLNode* t) 109{ 110 <T><C>RAVLNode* l = t->lt; 111 if (!lthread(t)) while (!rthread(l)) l = l->rt; 112 return l; 113} 114 115 116Pix <T><C>RAVLMap::seek(<T&> key) 117{ 118 <T><C>RAVLNode* t = root; 119 if (t == 0) 120 return 0; 121 for (;;) 122 { 123 int cmp = <T>CMP(key, t->item); 124 if (cmp == 0) 125 return Pix(t); 126 else if (cmp < 0) 127 { 128 if (lthread(t)) 129 return 0; 130 else 131 t = t->lt; 132 } 133 else if (rthread(t)) 134 return 0; 135 else 136 t = t->rt; 137 } 138} 139 140 141int <T><C>RAVLMap::rankof(<T&> key) 142{ 143 int r; 144 <T><C>RAVLNode* t = root; 145 if (t == 0) 146 return 0; 147 for (r=t->rank; t != 0; r+=t->rank) 148 { 149 int cmp = <T>CMP(key, t->item); 150 if (cmp == 0) 151 return r; 152 else if (cmp < 0) 153 { 154 if (lthread(t)) 155 return 0; 156 else 157 { 158 r -= t->rank; 159 t = t->lt; 160 } 161 } 162 else if (rthread(t)) 163 return 0; 164 else 165 { 166 t = t->rt; 167 } 168 } 169 return 0; 170} 171 172Pix <T><C>RAVLMap::ranktoPix(int i) 173{ 174 int r; 175 <T><C>RAVLNode* t = root; 176 177 if ((i<1)||(i>count)) 178 return 0; 179 for (r=t->rank; r!=i; r+=t->rank) 180 { 181 if (r>i) 182 { 183 r -= t->rank; 184 t = t->lt; 185 } 186 else 187 t = t->rt; 188 } 189 return Pix(t); 190} 191 192/* 193 The combination of threads and AVL bits make adding & deleting 194 interesting, but very awkward. 195 196 We use the following statics to avoid passing them around recursively 197*/ 198 199static int _need_rebalancing; // to send back balance info from rec. calls 200static <T>* _target_item; // add/del_item target 201static <T><C>RAVLNode* _found_node; // returned added/deleted node 202static int _already_found; // for deletion subcases 203static int _rank_changed; // for rank computation 204 205 206void <T><C>RAVLMap:: _add(<T><C>RAVLNode*& t) 207{ 208 int cmp = <T>CMP(*_target_item, t->item); 209 if (cmp == 0) 210 { 211 _found_node = t; 212 return; 213 } 214 else if (cmp < 0) 215 { 216 if (lthread(t)) 217 { 218 ++count; 219 _found_node = new <T><C>RAVLNode(*_target_item, def); 220 set_lthread(_found_node, 1); 221 set_rthread(_found_node, 1); 222 _found_node->lt = t->lt; 223 _found_node->rt = t; 224 t->lt = _found_node; 225 set_lthread(t, 0); 226 _need_rebalancing = 1; 227 _rank_changed = 1; 228 } 229 else 230 _add(t->lt); 231 if (_rank_changed) ++t->rank; 232 if (_need_rebalancing) 233 { 234 switch(bf(t)) 235 { 236 case AVLRIGHTHEAVY: 237 set_bf(t, AVLBALANCED); 238 _need_rebalancing = 0; 239 return; 240 case AVLBALANCED: 241 set_bf(t, AVLLEFTHEAVY); 242 return; 243 case AVLLEFTHEAVY: 244 <T><C>RAVLNode* l = t->lt; 245 if (bf(l) == AVLLEFTHEAVY) 246 { 247 t->rank -= l->rank; 248 if (rthread(l)) 249 t->lt = l; 250 else 251 t->lt = l->rt; 252 set_lthread(t, rthread(l)); 253 l->rt = t; 254 set_rthread(l, 0); 255 set_bf(t, AVLBALANCED); 256 set_bf(l, AVLBALANCED); 257 t = l; 258 _need_rebalancing = 0; 259 } 260 else 261 { 262 <T><C>RAVLNode* r = l->rt; 263 r->rank += l->rank; 264 t->rank -= r->rank; 265 set_rthread(l, lthread(r)); 266 if (lthread(r)) 267 l->rt = r; 268 else 269 l->rt = r->lt; 270 r->lt = l; 271 set_lthread(r, 0); 272 set_lthread(t, rthread(r)); 273 if (rthread(r)) 274 t->lt = r; 275 else 276 t->lt = r->rt; 277 r->rt = t; 278 set_rthread(r, 0); 279 if (bf(r) == AVLLEFTHEAVY) 280 set_bf(t, AVLRIGHTHEAVY); 281 else 282 set_bf(t, AVLBALANCED); 283 if (bf(r) == AVLRIGHTHEAVY) 284 set_bf(l, AVLLEFTHEAVY); 285 else 286 set_bf(l, AVLBALANCED); 287 set_bf(r, AVLBALANCED); 288 t = r; 289 _need_rebalancing = 0; 290 return; 291 } 292 } 293 } 294 } 295 else 296 { 297 if (rthread(t)) 298 { 299 ++count; 300 _found_node = new <T><C>RAVLNode(*_target_item, def); 301 set_rthread(t, 0); 302 set_lthread(_found_node, 1); 303 set_rthread(_found_node, 1); 304 _found_node->lt = t; 305 _found_node->rt = t->rt; 306 t->rt = _found_node; 307 _need_rebalancing = 1; 308 _rank_changed = 1; 309 } 310 else 311 _add(t->rt); 312 if (_need_rebalancing) 313 { 314 switch(bf(t)) 315 { 316 case AVLLEFTHEAVY: 317 set_bf(t, AVLBALANCED); 318 _need_rebalancing = 0; 319 return; 320 case AVLBALANCED: 321 set_bf(t, AVLRIGHTHEAVY); 322 return; 323 case AVLRIGHTHEAVY: 324 <T><C>RAVLNode* r = t->rt; 325 if (bf(r) == AVLRIGHTHEAVY) 326 { 327 r->rank += t->rank; 328 if (lthread(r)) 329 t->rt = r; 330 else 331 t->rt = r->lt; 332 set_rthread(t, lthread(r)); 333 r->lt = t; 334 set_lthread(r, 0); 335 set_bf(t, AVLBALANCED); 336 set_bf(r, AVLBALANCED); 337 t = r; 338 _need_rebalancing = 0; 339 } 340 else 341 { 342 <T><C>RAVLNode* l = r->lt; 343 r->rank -= l->rank; 344 l->rank += t->rank; 345 set_lthread(r, rthread(l)); 346 if (rthread(l)) 347 r->lt = l; 348 else 349 r->lt = l->rt; 350 l->rt = r; 351 set_rthread(l, 0); 352 set_rthread(t, lthread(l)); 353 if (lthread(l)) 354 t->rt = l; 355 else 356 t->rt = l->lt; 357 l->lt = t; 358 set_lthread(l, 0); 359 if (bf(l) == AVLRIGHTHEAVY) 360 set_bf(t, AVLLEFTHEAVY); 361 else 362 set_bf(t, AVLBALANCED); 363 if (bf(l) == AVLLEFTHEAVY) 364 set_bf(r, AVLRIGHTHEAVY); 365 else 366 set_bf(r, AVLBALANCED); 367 set_bf(l, AVLBALANCED); 368 t = l; 369 _need_rebalancing = 0; 370 return; 371 } 372 } 373 } 374 } 375} 376 377 378<C>& <T><C>RAVLMap::operator [] (<T&> item) 379{ 380 if (root == 0) 381 { 382 ++count; 383 root = new <T><C>RAVLNode(item, def); 384 set_rthread(root, 1); 385 set_lthread(root, 1); 386 return root->cont; 387 } 388 else 389 { 390 _target_item = &item; 391 _need_rebalancing = 0; 392 _rank_changed = 0; 393 _add(root); 394 return _found_node->cont; 395 } 396} 397 398 399void <T><C>RAVLMap::_del(<T><C>RAVLNode* par, <T><C>RAVLNode*& t) 400{ 401 int comp; 402 if (_already_found) 403 { 404 if (rthread(t)) 405 comp = 0; 406 else 407 comp = 1; 408 } 409 else 410 comp = <T>CMP(*_target_item, t->item); 411 if (comp == 0) 412 { 413 if (lthread(t) && rthread(t)) 414 { 415 _found_node = t; 416 if (t == par->lt) 417 { 418 set_lthread(par, 1); 419 par->lt = t->lt; 420 } 421 else 422 { 423 set_rthread(par, 1); 424 par->rt = t->rt; 425 } 426 _need_rebalancing = 1; 427 _rank_changed = 1; 428 return; 429 } 430 else if (lthread(t)) 431 { 432 _found_node = t; 433 <T><C>RAVLNode* s = succ(t); 434 if (s != 0 && lthread(s)) 435 s->lt = t->lt; 436 t = t->rt; 437 _need_rebalancing = 1; 438 _rank_changed = 1; 439 return; 440 } 441 else if (rthread(t)) 442 { 443 _found_node = t; 444 <T><C>RAVLNode* p = pred(t); 445 if (p != 0 && rthread(p)) 446 p->rt = t->rt; 447 t = t->lt; 448 _need_rebalancing = 1; 449 _rank_changed = 1; 450 return; 451 } 452 else // replace item & find someone deletable 453 { 454 <T><C>RAVLNode* p = pred(t); 455 t->item = p->item; 456 t->cont = p->cont; 457 _already_found = 1; 458 comp = -1; // fall through below to left 459 } 460 } 461 462 if (comp < 0) 463 { 464 if (lthread(t)) 465 return; 466 _del(t, t->lt); 467 if (_rank_changed) --t->rank; 468 if (!_need_rebalancing) 469 return; 470 switch (bf(t)) 471 { 472 case AVLLEFTHEAVY: 473 set_bf(t, AVLBALANCED); 474 return; 475 case AVLBALANCED: 476 set_bf(t, AVLRIGHTHEAVY); 477 _need_rebalancing = 0; 478 return; 479 case AVLRIGHTHEAVY: 480 <T><C>RAVLNode* r = t->rt; 481 switch (bf(r)) 482 { 483 case AVLBALANCED: 484 r->rank += t->rank; 485 if (lthread(r)) 486 t->rt = r; 487 else 488 t->rt = r->lt; 489 set_rthread(t, lthread(r)); 490 r->lt = t; 491 set_lthread(r, 0); 492 set_bf(t, AVLRIGHTHEAVY); 493 set_bf(r, AVLLEFTHEAVY); 494 _need_rebalancing = 0; 495 t = r; 496 return; 497 case AVLRIGHTHEAVY: 498 r->rank += t->rank; 499 if (lthread(r)) 500 t->rt = r; 501 else 502 t->rt = r->lt; 503 set_rthread(t, lthread(r)); 504 r->lt = t; 505 set_lthread(r, 0); 506 set_bf(t, AVLBALANCED); 507 set_bf(r, AVLBALANCED); 508 t = r; 509 return; 510 case AVLLEFTHEAVY: 511 <T><C>RAVLNode* l = r->lt; 512 r->rank -= l->rank; 513 l->rank += t->rank; 514 set_lthread(r, rthread(l)); 515 if (rthread(l)) 516 r->lt = l; 517 else 518 r->lt = l->rt; 519 l->rt = r; 520 set_rthread(l, 0); 521 set_rthread(t, lthread(l)); 522 if (lthread(l)) 523 t->rt = l; 524 else 525 t->rt = l->lt; 526 l->lt = t; 527 set_lthread(l, 0); 528 if (bf(l) == AVLRIGHTHEAVY) 529 set_bf(t, AVLLEFTHEAVY); 530 else 531 set_bf(t, AVLBALANCED); 532 if (bf(l) == AVLLEFTHEAVY) 533 set_bf(r, AVLRIGHTHEAVY); 534 else 535 set_bf(r, AVLBALANCED); 536 set_bf(l, AVLBALANCED); 537 t = l; 538 return; 539 } 540 } 541 } 542 else 543 { 544 if (rthread(t)) 545 return; 546 _del(t, t->rt); 547 if (!_need_rebalancing) 548 return; 549 switch (bf(t)) 550 { 551 case AVLRIGHTHEAVY: 552 set_bf(t, AVLBALANCED); 553 return; 554 case AVLBALANCED: 555 set_bf(t, AVLLEFTHEAVY); 556 _need_rebalancing = 0; 557 return; 558 case AVLLEFTHEAVY: 559 <T><C>RAVLNode* l = t->lt; 560 switch (bf(l)) 561 { 562 case AVLBALANCED: 563 t->rank -= l->rank; 564 if (rthread(l)) 565 t->lt = l; 566 else 567 t->lt = l->rt; 568 set_lthread(t, rthread(l)); 569 l->rt = t; 570 set_rthread(l, 0); 571 set_bf(t, AVLLEFTHEAVY); 572 set_bf(l, AVLRIGHTHEAVY); 573 _need_rebalancing = 0; 574 t = l; 575 return; 576 case AVLLEFTHEAVY: 577 t->rank -= l->rank; 578 if (rthread(l)) 579 t->lt = l; 580 else 581 t->lt = l->rt; 582 set_lthread(t, rthread(l)); 583 l->rt = t; 584 set_rthread(l, 0); 585 set_bf(t, AVLBALANCED); 586 set_bf(l, AVLBALANCED); 587 t = l; 588 return; 589 case AVLRIGHTHEAVY: 590 <T><C>RAVLNode* r = l->rt; 591 r->rank += l->rank; 592 t->rank -= r->rank; 593 set_rthread(l, lthread(r)); 594 if (lthread(r)) 595 l->rt = r; 596 else 597 l->rt = r->lt; 598 r->lt = l; 599 set_lthread(r, 0); 600 set_lthread(t, rthread(r)); 601 if (rthread(r)) 602 t->lt = r; 603 else 604 t->lt = r->rt; 605 r->rt = t; 606 set_rthread(r, 0); 607 if (bf(r) == AVLLEFTHEAVY) 608 set_bf(t, AVLRIGHTHEAVY); 609 else 610 set_bf(t, AVLBALANCED); 611 if (bf(r) == AVLRIGHTHEAVY) 612 set_bf(l, AVLLEFTHEAVY); 613 else 614 set_bf(l, AVLBALANCED); 615 set_bf(r, AVLBALANCED); 616 t = r; 617 return; 618 } 619 } 620 } 621} 622 623 624void <T><C>RAVLMap::del(<T&> item) 625{ 626 if (root == 0) return; 627 _need_rebalancing = 0; 628 _already_found = 0; 629 _found_node = 0; 630 _rank_changed = 0; 631 _target_item = &item; 632 _del(root, root); 633 if (_found_node) 634 { 635 delete(_found_node); 636 if (--count == 0) 637 root = 0; 638 } 639} 640 641void <T><C>RAVLMap::_kill(<T><C>RAVLNode* t) 642{ 643 if (t != 0) 644 { 645 if (!lthread(t)) _kill(t->lt); 646 if (!rthread(t)) _kill(t->rt); 647 delete t; 648 } 649} 650 651 652<T><C>RAVLMap::<T><C>RAVLMap(<T><C>RAVLMap& b) :<T><C>Map(b.def) 653{ 654 root = 0; 655 count = 0; 656 for (Pix i = b.first(); i != 0; b.next(i)) 657 (*this)[b.key(i)] = b.contents(i); 658} 659 660 661int <T><C>RAVLMap::OK() 662{ 663 int v = 1; 664 if (root == 0) 665 v = count == 0; 666 else 667 { 668 int n = 1; 669 <T><C>RAVLNode* trail = leftmost(); 670 v &= rankof(trail->item) == n; 671 <T><C>RAVLNode* t = succ(trail); 672 while (t != 0) 673 { 674 ++n; 675 v &= <T>CMP(trail->item, t->item) < 0; 676 v &= rankof(t->item) == n; 677 trail = t; 678 t = succ(t); 679 } 680 v &= n == count; 681 } 682 if (!v) error("invariant failure"); 683 return v; 684} 685