1 /* 2 * ALIST.C - Bitmap allocator/deallocator, using a radix tree with hinting. 3 * Unlimited-size allocations, power-of-2 only, power-of-2 4 * aligned results only. 5 * 6 * Copyright (c) 2007 The DragonFly Project. All rights reserved. 7 * 8 * This code is derived from software contributed to The DragonFly Project 9 * by Matthew Dillon <dillon@backplane.com> 10 * 11 * Redistribution and use in source and binary forms, with or without 12 * modification, are permitted provided that the following conditions 13 * are met: 14 * 15 * 1. Redistributions of source code must retain the above copyright 16 * notice, this list of conditions and the following disclaimer. 17 * 2. Redistributions in binary form must reproduce the above copyright 18 * notice, this list of conditions and the following disclaimer in 19 * the documentation and/or other materials provided with the 20 * distribution. 21 * 3. Neither the name of The DragonFly Project nor the names of its 22 * contributors may be used to endorse or promote products derived 23 * from this software without specific, prior written permission. 24 * 25 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 26 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 27 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 28 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 29 * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 30 * INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING, 31 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 32 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED 33 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 34 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT 35 * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 36 * SUCH DAMAGE. 37 * 38 * $DragonFly: src/sys/kern/subr_alist.c,v 1.3 2007/09/27 18:20:20 dillon Exp $ 39 */ 40 /* 41 * This module has been adapted from the BLIST module, which was written 42 * by Matthew Dillon many years ago. 43 * 44 * This module implements a general power-of-2 bitmap allocator/deallocator. 45 * All allocations must be in powers of 2 and will return similarly aligned 46 * results. The module does not try to interpret the meaning of a 'block' 47 * other then to return ALIST_BLOCK_NONE on an allocation failure. 48 * 49 * A maximum of 2 billion blocks is supported so, for example, if one block 50 * represented 64 bytes a maximally sized ALIST would represent 51 * 128 gigabytes. 52 * 53 * A radix tree is used to maintain the bitmap and layed out in a manner 54 * similar to the blist code. Meta nodes use a radix of 16 and 2 bits per 55 * block while leaf nodes use a radix of 32 and 1 bit per block (stored in 56 * a 32 bit bitmap field). Both meta and leaf nodes have a hint field. 57 * This field gives us a hint as to the largest free contiguous range of 58 * blocks under the node. It may contain a value that is too high, but 59 * will never contain a value that is too low. When the radix tree is 60 * searched, allocation failures in subtrees update the hint. 61 * 62 * The radix tree is layed out recursively using a linear array. Each meta 63 * node is immediately followed (layed out sequentially in memory) by 64 * ALIST_META_RADIX lower level nodes. This is a recursive structure but one 65 * that can be easily scanned through a very simple 'skip' calculation. In 66 * order to support large radixes, portions of the tree may reside outside our 67 * memory allocation. We handle this with an early-terminate optimization 68 * in the meta-node. The memory allocation is only large enough to cover 69 * the number of blocks requested at creation time even if it must be 70 * encompassed in larger root-node radix. 71 * 72 * This code can be compiled stand-alone for debugging. 73 */ 74 75 #ifdef _KERNEL 76 77 #include <sys/param.h> 78 #include <sys/systm.h> 79 #include <sys/lock.h> 80 #include <sys/kernel.h> 81 #include <sys/alist.h> 82 #include <sys/malloc.h> 83 #include <vm/vm.h> 84 #include <vm/vm_object.h> 85 #include <vm/vm_kern.h> 86 #include <vm/vm_extern.h> 87 #include <vm/vm_page.h> 88 89 #else 90 91 #ifndef ALIST_NO_DEBUG 92 #define ALIST_DEBUG 93 #endif 94 95 #include <sys/types.h> 96 #include <stdio.h> 97 #include <assert.h> 98 #include <string.h> 99 #include <stdlib.h> 100 #include <stdarg.h> 101 102 #define kmalloc(a,b,c) malloc(a) 103 #define kfree(a,b) free(a) 104 #define kprintf printf 105 #define KKASSERT(exp) assert(exp) 106 struct malloc_type; 107 108 typedef unsigned int u_daddr_t; 109 110 #include <sys/alist.h> 111 112 void panic(const char *ctl, ...); 113 114 #endif 115 116 /* 117 * static support functions 118 */ 119 120 static daddr_t alst_leaf_alloc(almeta_t *scan, daddr_t blk, int count); 121 static daddr_t alst_meta_alloc(almeta_t *scan, daddr_t blk, 122 daddr_t count, daddr_t radix, int skip); 123 static void alst_leaf_free(almeta_t *scan, daddr_t relblk, int count); 124 static void alst_meta_free(almeta_t *scan, daddr_t freeBlk, daddr_t count, 125 daddr_t radix, int skip, daddr_t blk); 126 static daddr_t alst_radix_init(almeta_t *scan, daddr_t radix, 127 int skip, daddr_t count); 128 #ifndef _KERNEL 129 static void alst_radix_print(almeta_t *scan, daddr_t blk, 130 daddr_t radix, int skip, int tab); 131 #endif 132 133 /* 134 * alist_create() - create a alist capable of handling up to the specified 135 * number of blocks 136 * 137 * blocks must be greater then 0 138 * 139 * The smallest alist consists of a single leaf node capable of 140 * managing ALIST_BMAP_RADIX blocks. 141 */ 142 143 alist_t 144 alist_create(daddr_t blocks, struct malloc_type *mtype) 145 { 146 alist_t bl; 147 int radix; 148 int skip = 0; 149 150 /* 151 * Calculate radix and skip field used for scanning. 152 */ 153 radix = ALIST_BMAP_RADIX; 154 155 while (radix < blocks) { 156 radix *= ALIST_META_RADIX; 157 skip = (skip + 1) * ALIST_META_RADIX; 158 } 159 160 bl = kmalloc(sizeof(struct alist), mtype, M_WAITOK); 161 162 bzero(bl, sizeof(*bl)); 163 164 bl->bl_blocks = blocks; 165 bl->bl_radix = radix; 166 bl->bl_skip = skip; 167 bl->bl_rootblks = 1 + 168 alst_radix_init(NULL, bl->bl_radix, bl->bl_skip, blocks); 169 bl->bl_root = kmalloc(sizeof(almeta_t) * bl->bl_rootblks, mtype, M_WAITOK); 170 171 #if defined(ALIST_DEBUG) 172 kprintf( 173 "ALIST representing %d blocks (%d MB of swap)" 174 ", requiring %dK (%d bytes) of ram\n", 175 bl->bl_blocks, 176 bl->bl_blocks * 4 / 1024, 177 (bl->bl_rootblks * sizeof(almeta_t) + 1023) / 1024, 178 (bl->bl_rootblks * sizeof(almeta_t)) 179 ); 180 kprintf("ALIST raw radix tree contains %d records\n", bl->bl_rootblks); 181 #endif 182 alst_radix_init(bl->bl_root, bl->bl_radix, bl->bl_skip, blocks); 183 184 return(bl); 185 } 186 187 void 188 alist_destroy(alist_t bl, struct malloc_type *mtype) 189 { 190 kfree(bl->bl_root, mtype); 191 kfree(bl, mtype); 192 } 193 194 /* 195 * alist_alloc() - reserve space in the block bitmap. Return the base 196 * of a contiguous region or ALIST_BLOCK_NONE if space 197 * could not be allocated. 198 */ 199 200 daddr_t 201 alist_alloc(alist_t bl, daddr_t count) 202 { 203 daddr_t blk = ALIST_BLOCK_NONE; 204 205 KKASSERT((count | (count - 1)) == (count << 1) - 1); 206 207 if (bl && count < bl->bl_radix) { 208 if (bl->bl_radix == ALIST_BMAP_RADIX) 209 blk = alst_leaf_alloc(bl->bl_root, 0, count); 210 else 211 blk = alst_meta_alloc(bl->bl_root, 0, count, bl->bl_radix, bl->bl_skip); 212 if (blk != ALIST_BLOCK_NONE) 213 bl->bl_free -= count; 214 } 215 return(blk); 216 } 217 218 /* 219 * alist_free() - free up space in the block bitmap. Return the base 220 * of a contiguous region. Panic if an inconsistancy is 221 * found. 222 */ 223 224 void 225 alist_free(alist_t bl, daddr_t blkno, daddr_t count) 226 { 227 if (bl) { 228 KKASSERT(blkno + count <= bl->bl_blocks); 229 if (bl->bl_radix == ALIST_BMAP_RADIX) 230 alst_leaf_free(bl->bl_root, blkno, count); 231 else 232 alst_meta_free(bl->bl_root, blkno, count, bl->bl_radix, bl->bl_skip, 0); 233 bl->bl_free += count; 234 } 235 } 236 237 #ifdef ALIST_DEBUG 238 239 /* 240 * alist_print() - dump radix tree 241 */ 242 243 void 244 alist_print(alist_t bl) 245 { 246 kprintf("ALIST {\n"); 247 alst_radix_print(bl->bl_root, 0, bl->bl_radix, bl->bl_skip, 4); 248 kprintf("}\n"); 249 } 250 251 #endif 252 253 /************************************************************************ 254 * ALLOCATION SUPPORT FUNCTIONS * 255 ************************************************************************ 256 * 257 * These support functions do all the actual work. They may seem 258 * rather longish, but that's because I've commented them up. The 259 * actual code is straight forward. 260 * 261 */ 262 263 /* 264 * alist_leaf_alloc() - allocate at a leaf in the radix tree (a bitmap). 265 * 266 * This is the core of the allocator and is optimized for the 1 block 267 * and the ALIST_BMAP_RADIX block allocation cases. Other cases are 268 * somewhat slower. The 1 block allocation case is log2 and extremely 269 * quick. 270 */ 271 272 static daddr_t 273 alst_leaf_alloc( 274 almeta_t *scan, 275 daddr_t blk, 276 int count 277 ) { 278 u_daddr_t orig = scan->bm_bitmap; 279 280 /* 281 * Optimize bitmap all-allocated case. Also, count = 1 282 * case assumes at least 1 bit is free in the bitmap, so 283 * we have to take care of this case here. 284 */ 285 if (orig == 0) { 286 scan->bm_bighint = 0; 287 return(ALIST_BLOCK_NONE); 288 } 289 290 /* 291 * Optimized code to allocate one bit out of the bitmap 292 */ 293 if (count == 1) { 294 u_daddr_t mask; 295 int j = ALIST_BMAP_RADIX/2; 296 int r = 0; 297 298 mask = (u_daddr_t)-1 >> (ALIST_BMAP_RADIX/2); 299 300 while (j) { 301 if ((orig & mask) == 0) { 302 r += j; 303 orig >>= j; 304 } 305 j >>= 1; 306 mask >>= j; 307 } 308 scan->bm_bitmap &= ~(1 << r); 309 return(blk + r); 310 } 311 312 /* 313 * non-optimized code to allocate N bits out of the bitmap. 314 * The more bits, the faster the code runs. It will run 315 * the slowest allocating 2 bits, but since there aren't any 316 * memory ops in the core loop (or shouldn't be, anyway), 317 * you probably won't notice the difference. 318 * 319 * Similar to the blist case, the alist code also requires 320 * allocations to be power-of-2 sized and aligned to the 321 * size of the allocation, which simplifies the algorithm. 322 */ 323 { 324 int j; 325 int n = ALIST_BMAP_RADIX - count; 326 u_daddr_t mask; 327 328 mask = (u_daddr_t)-1 >> n; 329 330 for (j = 0; j <= n; j += count) { 331 if ((orig & mask) == mask) { 332 scan->bm_bitmap &= ~mask; 333 return(blk + j); 334 } 335 mask = mask << count; 336 } 337 } 338 339 /* 340 * We couldn't allocate count in this subtree, update bighint. 341 */ 342 scan->bm_bighint = count - 1; 343 return(ALIST_BLOCK_NONE); 344 } 345 346 /* 347 * alist_meta_alloc() - allocate at a meta in the radix tree. 348 * 349 * Attempt to allocate at a meta node. If we can't, we update 350 * bighint and return a failure. Updating bighint optimize future 351 * calls that hit this node. We have to check for our collapse cases 352 * and we have a few optimizations strewn in as well. 353 */ 354 355 static daddr_t 356 alst_meta_alloc( 357 almeta_t *scan, 358 daddr_t blk, 359 daddr_t count, 360 daddr_t radix, 361 int skip 362 ) { 363 int i; 364 u_daddr_t mask; 365 u_daddr_t pmask; 366 int next_skip = ((u_int)skip / ALIST_META_RADIX); 367 368 /* 369 * ALL-ALLOCATED special case 370 */ 371 if (scan->bm_bitmap == 0) { 372 scan->bm_bighint = 0; 373 return(ALIST_BLOCK_NONE); 374 } 375 376 radix /= ALIST_META_RADIX; 377 378 /* 379 * Radix now represents each bitmap entry for this meta node. If 380 * the number of blocks being allocated can be fully represented, 381 * we allocate directly out of this meta node. 382 * 383 * Meta node bitmaps use 2 bits per block. 384 * 385 * 00 ALL-ALLOCATED 386 * 01 PARTIALLY-FREE/PARTIALLY-ALLOCATED 387 * 10 (RESERVED) 388 * 11 ALL-FREE 389 */ 390 if (count >= radix) { 391 int n = count / radix * 2; /* number of bits */ 392 int j; 393 394 mask = (u_daddr_t)-1 >> (ALIST_BMAP_RADIX - n); 395 for (j = 0; j < ALIST_META_RADIX; j += n / 2) { 396 if ((scan->bm_bitmap & mask) == mask) { 397 scan->bm_bitmap &= ~mask; 398 return(blk + j * radix); 399 } 400 mask <<= n; 401 } 402 if (scan->bm_bighint >= count) 403 scan->bm_bighint = count >> 1; 404 return(ALIST_BLOCK_NONE); 405 } 406 407 /* 408 * If not we have to recurse. 409 */ 410 mask = 0x00000003; 411 pmask = 0x00000001; 412 for (i = 1; i <= skip; i += next_skip) { 413 if (scan[i].bm_bighint == (daddr_t)-1) { 414 /* 415 * Terminator 416 */ 417 break; 418 } 419 if ((scan->bm_bitmap & mask) == mask) { 420 scan[i].bm_bitmap = (u_daddr_t)-1; 421 scan[i].bm_bighint = radix; 422 } 423 424 if (count <= scan[i].bm_bighint) { 425 /* 426 * count fits in object 427 */ 428 daddr_t r; 429 if (next_skip == 1) { 430 r = alst_leaf_alloc(&scan[i], blk, count); 431 } else { 432 r = alst_meta_alloc(&scan[i], blk, count, radix, next_skip - 1); 433 } 434 if (r != ALIST_BLOCK_NONE) { 435 if (scan[i].bm_bitmap == 0) { 436 scan->bm_bitmap &= ~mask; 437 } else { 438 scan->bm_bitmap &= ~mask; 439 scan->bm_bitmap |= pmask; 440 } 441 return(r); 442 } 443 } else if (count > radix) { 444 /* 445 * count does not fit in object even if it were 446 * completely free. 447 */ 448 break; 449 } 450 blk += radix; 451 mask <<= 2; 452 pmask <<= 2; 453 } 454 455 /* 456 * We couldn't allocate count in this subtree, update bighint. 457 */ 458 if (scan->bm_bighint >= count) 459 scan->bm_bighint = count >> 1; 460 return(ALIST_BLOCK_NONE); 461 } 462 463 /* 464 * BLST_LEAF_FREE() - free allocated block from leaf bitmap 465 * 466 */ 467 static void 468 alst_leaf_free( 469 almeta_t *scan, 470 daddr_t blk, 471 int count 472 ) { 473 /* 474 * free some data in this bitmap 475 * 476 * e.g. 477 * 0000111111111110000 478 * \_________/\__/ 479 * v n 480 */ 481 int n = blk & (ALIST_BMAP_RADIX - 1); 482 u_daddr_t mask; 483 484 mask = ((u_daddr_t)-1 << n) & 485 ((u_daddr_t)-1 >> (ALIST_BMAP_RADIX - count - n)); 486 487 if (scan->bm_bitmap & mask) 488 panic("alst_radix_free: freeing free block"); 489 scan->bm_bitmap |= mask; 490 491 /* 492 * We could probably do a better job here. We are required to make 493 * bighint at least as large as the biggest contiguous block of 494 * data. If we just shoehorn it, a little extra overhead will 495 * be incured on the next allocation (but only that one typically). 496 */ 497 scan->bm_bighint = ALIST_BMAP_RADIX; 498 } 499 500 /* 501 * BLST_META_FREE() - free allocated blocks from radix tree meta info 502 * 503 * This support routine frees a range of blocks from the bitmap. 504 * The range must be entirely enclosed by this radix node. If a 505 * meta node, we break the range down recursively to free blocks 506 * in subnodes (which means that this code can free an arbitrary 507 * range whereas the allocation code cannot allocate an arbitrary 508 * range). 509 */ 510 511 static void 512 alst_meta_free( 513 almeta_t *scan, 514 daddr_t freeBlk, 515 daddr_t count, 516 daddr_t radix, 517 int skip, 518 daddr_t blk 519 ) { 520 int next_skip = ((u_int)skip / ALIST_META_RADIX); 521 u_daddr_t mask; 522 u_daddr_t pmask; 523 int i; 524 525 /* 526 * Break the free down into its components. Because it is so easy 527 * to implement, frees are not limited to power-of-2 sizes. 528 * 529 * Each block in a meta-node bitmap takes two bits. 530 */ 531 radix /= ALIST_META_RADIX; 532 533 i = (freeBlk - blk) / radix; 534 blk += i * radix; 535 mask = 0x00000003 << (i * 2); 536 pmask = 0x00000001 << (i * 2); 537 538 i = i * next_skip + 1; 539 540 while (i <= skip && blk < freeBlk + count) { 541 daddr_t v; 542 543 v = blk + radix - freeBlk; 544 if (v > count) 545 v = count; 546 547 if (scan->bm_bighint == (daddr_t)-1) 548 panic("alst_meta_free: freeing unexpected range"); 549 550 if (freeBlk == blk && count >= radix) { 551 /* 552 * All-free case, no need to update sub-tree 553 */ 554 scan->bm_bitmap |= mask; 555 scan->bm_bighint = radix * ALIST_META_RADIX;/*XXX*/ 556 } else { 557 /* 558 * Recursion case 559 */ 560 if (next_skip == 1) 561 alst_leaf_free(&scan[i], freeBlk, v); 562 else 563 alst_meta_free(&scan[i], freeBlk, v, radix, next_skip - 1, blk); 564 if (scan[i].bm_bitmap == (u_daddr_t)-1) 565 scan->bm_bitmap |= mask; 566 else 567 scan->bm_bitmap |= pmask; 568 if (scan->bm_bighint < scan[i].bm_bighint) 569 scan->bm_bighint = scan[i].bm_bighint; 570 } 571 mask <<= 2; 572 pmask <<= 2; 573 count -= v; 574 freeBlk += v; 575 blk += radix; 576 i += next_skip; 577 } 578 } 579 580 /* 581 * BLST_RADIX_INIT() - initialize radix tree 582 * 583 * Initialize our meta structures and bitmaps and calculate the exact 584 * amount of space required to manage 'count' blocks - this space may 585 * be considerably less then the calculated radix due to the large 586 * RADIX values we use. 587 */ 588 589 static daddr_t 590 alst_radix_init(almeta_t *scan, daddr_t radix, int skip, daddr_t count) 591 { 592 int i; 593 int next_skip; 594 daddr_t memindex = 0; 595 u_daddr_t mask; 596 u_daddr_t pmask; 597 598 /* 599 * Leaf node 600 */ 601 if (radix == ALIST_BMAP_RADIX) { 602 if (scan) { 603 scan->bm_bighint = 0; 604 scan->bm_bitmap = 0; 605 } 606 return(memindex); 607 } 608 609 /* 610 * Meta node. If allocating the entire object we can special 611 * case it. However, we need to figure out how much memory 612 * is required to manage 'count' blocks, so we continue on anyway. 613 */ 614 615 if (scan) { 616 scan->bm_bighint = 0; 617 scan->bm_bitmap = 0; 618 } 619 620 radix /= ALIST_META_RADIX; 621 next_skip = ((u_int)skip / ALIST_META_RADIX); 622 mask = 0x00000003; 623 pmask = 0x00000001; 624 625 for (i = 1; i <= skip; i += next_skip) { 626 if (count >= radix) { 627 /* 628 * Allocate the entire object 629 */ 630 memindex = i + alst_radix_init( 631 ((scan) ? &scan[i] : NULL), 632 radix, 633 next_skip - 1, 634 radix 635 ); 636 count -= radix; 637 /* already marked as wholely allocated */ 638 } else if (count > 0) { 639 /* 640 * Allocate a partial object 641 */ 642 memindex = i + alst_radix_init( 643 ((scan) ? &scan[i] : NULL), 644 radix, 645 next_skip - 1, 646 count 647 ); 648 count = 0; 649 650 /* 651 * Mark as partially allocated 652 */ 653 if (scan) 654 scan->bm_bitmap |= pmask; 655 } else { 656 /* 657 * Add terminator and break out 658 */ 659 if (scan) 660 scan[i].bm_bighint = (daddr_t)-1; 661 /* already marked as wholely allocated */ 662 break; 663 } 664 mask <<= 2; 665 pmask <<= 2; 666 } 667 if (memindex < i) 668 memindex = i; 669 return(memindex); 670 } 671 672 #ifdef ALIST_DEBUG 673 674 static void 675 alst_radix_print(almeta_t *scan, daddr_t blk, daddr_t radix, int skip, int tab) 676 { 677 int i; 678 int next_skip; 679 int lastState = 0; 680 u_daddr_t mask; 681 682 if (radix == ALIST_BMAP_RADIX) { 683 kprintf( 684 "%*.*s(%04x,%d): bitmap %08x big=%d\n", 685 tab, tab, "", 686 blk, radix, 687 scan->bm_bitmap, 688 scan->bm_bighint 689 ); 690 return; 691 } 692 693 if (scan->bm_bitmap == 0) { 694 kprintf( 695 "%*.*s(%04x,%d) ALL ALLOCATED\n", 696 tab, tab, "", 697 blk, 698 radix 699 ); 700 return; 701 } 702 if (scan->bm_bitmap == (u_daddr_t)-1) { 703 kprintf( 704 "%*.*s(%04x,%d) ALL FREE\n", 705 tab, tab, "", 706 blk, 707 radix 708 ); 709 return; 710 } 711 712 kprintf( 713 "%*.*s(%04x,%d): subtree (%d) bitmap=%08x big=%d {\n", 714 tab, tab, "", 715 blk, radix, 716 radix, 717 scan->bm_bitmap, 718 scan->bm_bighint 719 ); 720 721 radix /= ALIST_META_RADIX; 722 next_skip = ((u_int)skip / ALIST_META_RADIX); 723 tab += 4; 724 mask = 0x00000003; 725 726 for (i = 1; i <= skip; i += next_skip) { 727 if (scan[i].bm_bighint == (daddr_t)-1) { 728 kprintf( 729 "%*.*s(%04x,%d): Terminator\n", 730 tab, tab, "", 731 blk, radix 732 ); 733 lastState = 0; 734 break; 735 } 736 if ((scan->bm_bitmap & mask) == mask) { 737 kprintf( 738 "%*.*s(%04x,%d): ALL FREE\n", 739 tab, tab, "", 740 blk, radix 741 ); 742 } else if ((scan->bm_bitmap & mask) == 0) { 743 kprintf( 744 "%*.*s(%04x,%d): ALL ALLOCATED\n", 745 tab, tab, "", 746 blk, radix 747 ); 748 } else { 749 alst_radix_print( 750 &scan[i], 751 blk, 752 radix, 753 next_skip - 1, 754 tab 755 ); 756 } 757 blk += radix; 758 mask <<= 2; 759 } 760 tab -= 4; 761 762 kprintf( 763 "%*.*s}\n", 764 tab, tab, "" 765 ); 766 } 767 768 #endif 769 770 #ifdef ALIST_DEBUG 771 772 int 773 main(int ac, char **av) 774 { 775 int size = 1024; 776 int i; 777 alist_t bl; 778 779 for (i = 1; i < ac; ++i) { 780 const char *ptr = av[i]; 781 if (*ptr != '-') { 782 size = strtol(ptr, NULL, 0); 783 continue; 784 } 785 ptr += 2; 786 fprintf(stderr, "Bad option: %s\n", ptr - 2); 787 exit(1); 788 } 789 bl = alist_create(size, NULL); 790 alist_free(bl, 0, size); 791 792 for (;;) { 793 char buf[1024]; 794 daddr_t da = 0; 795 daddr_t count = 0; 796 797 798 kprintf("%d/%d/%d> ", bl->bl_free, size, bl->bl_radix); 799 fflush(stdout); 800 if (fgets(buf, sizeof(buf), stdin) == NULL) 801 break; 802 switch(buf[0]) { 803 case 'p': 804 alist_print(bl); 805 break; 806 case 'a': 807 if (sscanf(buf + 1, "%d", &count) == 1) { 808 daddr_t blk = alist_alloc(bl, count); 809 kprintf(" R=%04x\n", blk); 810 } else { 811 kprintf("?\n"); 812 } 813 break; 814 case 'f': 815 if (sscanf(buf + 1, "%x %d", &da, &count) == 2) { 816 alist_free(bl, da, count); 817 } else { 818 kprintf("?\n"); 819 } 820 break; 821 case '?': 822 case 'h': 823 puts( 824 "p -print\n" 825 "a %d -allocate\n" 826 "f %x %d -free\n" 827 "h/? -help" 828 ); 829 break; 830 default: 831 kprintf("?\n"); 832 break; 833 } 834 } 835 return(0); 836 } 837 838 void 839 panic(const char *ctl, ...) 840 { 841 __va_list va; 842 843 __va_start(va, ctl); 844 vfprintf(stderr, ctl, va); 845 fprintf(stderr, "\n"); 846 __va_end(va); 847 exit(1); 848 } 849 850 #endif 851 852