1 /* $NetBSD: zlib.c,v 1.18 2002/05/07 09:14:20 tron Exp $ */ 2 /* 3 * This file is derived from various .h and .c files from the zlib-1.0.4 4 * distribution by Jean-loup Gailly and Mark Adler, with some additions 5 * by Paul Mackerras to aid in implementing Deflate compression and 6 * decompression for PPP packets. See zlib.h for conditions of 7 * distribution and use. 8 * 9 * Changes that have been made include: 10 * - added Z_PACKET_FLUSH (see zlib.h for details) 11 * - added inflateIncomp and deflateOutputPending 12 * - allow strm->next_out to be NULL, meaning discard the output 13 * 14 * $Id: zlib.c,v 1.18 2002/05/07 09:14:20 tron Exp $ 15 */ 16 17 /* 18 * ==FILEVERSION 020312== 19 * 20 * This marker is used by the Linux installation script to determine 21 * whether an up-to-date version of this file is already installed. 22 */ 23 24 #include <sys/cdefs.h> 25 __KERNEL_RCSID(0, "$NetBSD: zlib.c,v 1.18 2002/05/07 09:14:20 tron Exp $"); 26 27 #define NO_DUMMY_DECL 28 #define NO_ZCFUNCS 29 #define MY_ZCALLOC 30 31 #if defined(__FreeBSD__) && (defined(KERNEL) || defined(_KERNEL)) 32 #define inflate inflate_ppp /* FreeBSD already has an inflate :-( */ 33 #endif 34 35 36 /* +++ zutil.h */ 37 38 /* zutil.h -- internal interface and configuration of the compression library 39 * Copyright (C) 1995-2002 Jean-loup Gailly. 40 * For conditions of distribution and use, see copyright notice in zlib.h 41 */ 42 43 /* WARNING: this file should *not* be used by applications. It is 44 part of the implementation of the compression library and is 45 subject to change. Applications should only use zlib.h. 46 */ 47 48 /* @(#) $Id: zlib.c,v 1.18 2002/05/07 09:14:20 tron Exp $ */ 49 50 #ifndef _Z_UTIL_H 51 #define _Z_UTIL_H 52 53 #include "zlib.h" 54 55 #if defined(KERNEL) || defined(_KERNEL) 56 /* Assume this is a *BSD or SVR4 kernel */ 57 #include <sys/param.h> 58 #include <sys/time.h> 59 #include <sys/systm.h> 60 # define HAVE_MEMCPY 61 #else 62 #if defined(__KERNEL__) 63 /* Assume this is a Linux kernel */ 64 #include <linux/string.h> 65 #define HAVE_MEMCPY 66 67 #else /* not kernel */ 68 69 #if defined(__NetBSD__) && (defined(_KERNEL) || defined(_STANDALONE)) 70 71 /* XXX doesn't seem to need anything at all, but this is for consistency. */ 72 # include <lib/libkern/libkern.h> 73 74 #else 75 #ifdef STDC 76 # include <stddef.h> 77 # include <string.h> 78 # include <stdlib.h> 79 #endif 80 #ifdef NO_ERRNO_H 81 extern int errno; 82 #else 83 # include <errno.h> 84 #endif 85 #endif /* __NetBSD__ && _STANDALONE */ 86 #endif /* __KERNEL__ */ 87 #endif /* _KERNEL || KERNEL */ 88 89 90 #ifndef local 91 # define local static 92 #endif 93 /* compile with -Dlocal if your debugger can't find static symbols */ 94 95 typedef unsigned char uch; 96 typedef uch FAR uchf; 97 typedef unsigned short ush; 98 typedef ush FAR ushf; 99 typedef unsigned long ulg; 100 101 extern const char *z_errmsg[10]; /* indexed by 2-zlib_error */ 102 /* (size given to avoid silly warnings with Visual C++) */ 103 104 #define ERR_MSG(err) z_errmsg[Z_NEED_DICT-(err)] 105 106 #define ERR_RETURN(strm,err) \ 107 return (strm->msg = (char*)ERR_MSG(err), (err)) 108 /* To be used only when the state is known to be valid */ 109 110 /* common constants */ 111 112 #ifndef DEF_WBITS 113 # define DEF_WBITS MAX_WBITS 114 #endif 115 /* default windowBits for decompression. MAX_WBITS is for compression only */ 116 117 #if MAX_MEM_LEVEL >= 8 118 # define DEF_MEM_LEVEL 8 119 #else 120 # define DEF_MEM_LEVEL MAX_MEM_LEVEL 121 #endif 122 /* default memLevel */ 123 124 #define STORED_BLOCK 0 125 #define STATIC_TREES 1 126 #define DYN_TREES 2 127 /* The three kinds of block type */ 128 129 #define MIN_MATCH 3 130 #define MAX_MATCH 258 131 /* The minimum and maximum match lengths */ 132 133 #define PRESET_DICT 0x20 /* preset dictionary flag in zlib header */ 134 135 /* target dependencies */ 136 137 #ifdef MSDOS 138 # define OS_CODE 0x00 139 # if defined(__TURBOC__) || defined(__BORLANDC__) 140 # if(__STDC__ == 1) && (defined(__LARGE__) || defined(__COMPACT__)) 141 /* Allow compilation with ANSI keywords only enabled */ 142 void _Cdecl farfree( void *block ); 143 void *_Cdecl farmalloc( unsigned long nbytes ); 144 # else 145 # include <alloc.h> 146 # endif 147 # else /* MSC or DJGPP */ 148 # include <malloc.h> 149 # endif 150 #endif 151 152 #ifdef OS2 153 # define OS_CODE 0x06 154 #endif 155 156 #ifdef WIN32 /* Window 95 & Windows NT */ 157 # define OS_CODE 0x0b 158 #endif 159 160 #if defined(VAXC) || defined(VMS) 161 # define OS_CODE 0x02 162 # define F_OPEN(name, mode) \ 163 fopen((name), (mode), "mbc=60", "ctx=stm", "rfm=fix", "mrs=512") 164 #endif 165 166 #ifdef AMIGA 167 # define OS_CODE 0x01 168 #endif 169 170 #if defined(ATARI) || defined(atarist) 171 # define OS_CODE 0x05 172 #endif 173 174 #if defined(MACOS) || defined(TARGET_OS_MAC) 175 # define OS_CODE 0x07 176 # if defined(__MWERKS__) && __dest_os != __be_os && __dest_os != __win32_os 177 # include <unix.h> /* for fdopen */ 178 # else 179 # ifndef fdopen 180 # define fdopen(fd,mode) NULL /* No fdopen() */ 181 # endif 182 # endif 183 #endif 184 185 #ifdef __50SERIES /* Prime/PRIMOS */ 186 # define OS_CODE 0x0F 187 #endif 188 189 #ifdef TOPS20 190 # define OS_CODE 0x0a 191 #endif 192 193 #if defined(_BEOS_) || defined(RISCOS) 194 # define fdopen(fd,mode) NULL /* No fdopen() */ 195 #endif 196 197 #if (defined(_MSC_VER) && (_MSC_VER > 600)) 198 # define fdopen(fd,type) _fdopen(fd,type) 199 #endif 200 201 202 /* Common defaults */ 203 204 #ifndef OS_CODE 205 # define OS_CODE 0x03 /* assume Unix */ 206 #endif 207 208 #ifndef F_OPEN 209 # define F_OPEN(name, mode) fopen((name), (mode)) 210 #endif 211 212 /* functions */ 213 214 #ifdef HAVE_STRERROR 215 extern char *strerror __P((int)); 216 # define zstrerror(errnum) strerror(errnum) 217 #else 218 # define zstrerror(errnum) "" 219 #endif 220 221 #if defined(pyr) 222 # define NO_MEMCPY 223 #endif 224 #if defined(SMALL_MEDIUM) && !defined(_MSC_VER) && !defined(__SC__) 225 /* Use our own functions for small and medium model with MSC <= 5.0. 226 * You may have to use the same strategy for Borland C (untested). 227 * The __SC__ check is for Symantec. 228 */ 229 # define NO_MEMCPY 230 #endif 231 #if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY) 232 # define HAVE_MEMCPY 233 #endif 234 #ifdef HAVE_MEMCPY 235 # ifdef SMALL_MEDIUM /* MSDOS small or medium model */ 236 # define zmemcpy _fmemcpy 237 # define zmemcmp _fmemcmp 238 # define zmemzero(dest, len) _fmemset(dest, 0, len) 239 # else 240 # define zmemcpy memcpy 241 # define zmemcmp memcmp 242 # define zmemzero(dest, len) memset(dest, 0, len) 243 # endif 244 #else 245 extern void zmemcpy __P((Bytef* dest, const Bytef* source, uInt len)); 246 extern int zmemcmp __P((const Bytef* s1, const Bytef* s2, uInt len)); 247 extern void zmemzero __P((Bytef* dest, uInt len)); 248 #endif 249 250 /* Diagnostic functions */ 251 #if defined(DEBUG_ZLIB) && !defined(_KERNEL) && !defined(_STANDALONE) 252 # include <stdio.h> 253 extern int z_verbose; 254 extern void z_error __P((char *m)); 255 # define Assert(cond,msg) {if(!(cond)) z_error(msg);} 256 # define Trace(x) {if (z_verbose>=0) fprintf x ;} 257 # define Tracev(x) {if (z_verbose>0) fprintf x ;} 258 # define Tracevv(x) {if (z_verbose>1) fprintf x ;} 259 # define Tracec(c,x) {if (z_verbose>0 && (c)) fprintf x ;} 260 # define Tracecv(c,x) {if (z_verbose>1 && (c)) fprintf x ;} 261 #else 262 # define Assert(cond,msg) 263 # define Trace(x) 264 # define Tracev(x) 265 # define Tracevv(x) 266 # define Tracec(c,x) 267 # define Tracecv(c,x) 268 #endif 269 270 271 typedef uLong (ZEXPORT *check_func) __P((uLong check, const Bytef *buf, 272 uInt len)); 273 voidpf zcalloc __P((voidpf opaque, unsigned items, unsigned size)); 274 void zcfree __P((voidpf opaque, voidpf ptr)); 275 276 #define ZALLOC(strm, items, size) \ 277 (*((strm)->zalloc))((strm)->opaque, (items), (size)) 278 #define ZFREE(strm, addr) (*((strm)->zfree))((strm)->opaque, (voidpf)(addr)) 279 #define TRY_FREE(s, p) {if (p) ZFREE(s, p);} 280 281 #endif /* _Z_UTIL_H */ 282 /* --- zutil.h */ 283 284 /* +++ deflate.h */ 285 286 /* deflate.h -- internal compression state 287 * Copyright (C) 1995-2002 Jean-loup Gailly 288 * For conditions of distribution and use, see copyright notice in zlib.h 289 */ 290 291 /* WARNING: this file should *not* be used by applications. It is 292 part of the implementation of the compression library and is 293 subject to change. Applications should only use zlib.h. 294 */ 295 296 /* @(#) $Id: zlib.c,v 1.18 2002/05/07 09:14:20 tron Exp $ */ 297 298 #ifndef _DEFLATE_H 299 #define _DEFLATE_H 300 301 /* #include "zutil.h" */ 302 303 /* =========================================================================== 304 * Internal compression state. 305 */ 306 307 #define LENGTH_CODES 29 308 /* number of length codes, not counting the special END_BLOCK code */ 309 310 #define LITERALS 256 311 /* number of literal bytes 0..255 */ 312 313 #define L_CODES (LITERALS+1+LENGTH_CODES) 314 /* number of Literal or Length codes, including the END_BLOCK code */ 315 316 #define D_CODES 30 317 /* number of distance codes */ 318 319 #define BL_CODES 19 320 /* number of codes used to transfer the bit lengths */ 321 322 #define HEAP_SIZE (2*L_CODES+1) 323 /* maximum heap size */ 324 325 #define MAX_BITS 15 326 /* All codes must not exceed MAX_BITS bits */ 327 328 #define INIT_STATE 42 329 #define BUSY_STATE 113 330 #define FINISH_STATE 666 331 /* Stream status */ 332 333 334 /* Data structure describing a single value and its code string. */ 335 typedef struct ct_data_s { 336 union { 337 ush freq; /* frequency count */ 338 ush code; /* bit string */ 339 } fc; 340 union { 341 ush dad; /* father node in Huffman tree */ 342 ush len; /* length of bit string */ 343 } dl; 344 } FAR ct_data; 345 346 #define Freq fc.freq 347 #define Code fc.code 348 #define Dad dl.dad 349 #define Len dl.len 350 351 typedef struct static_tree_desc_s static_tree_desc; 352 353 typedef struct tree_desc_s { 354 ct_data *dyn_tree; /* the dynamic tree */ 355 int max_code; /* largest code with non zero frequency */ 356 static_tree_desc *stat_desc; /* the corresponding static tree */ 357 } FAR tree_desc; 358 359 typedef ush Pos; 360 typedef Pos FAR Posf; 361 typedef unsigned IPos; 362 363 /* A Pos is an index in the character window. We use short instead of int to 364 * save space in the various tables. IPos is used only for parameter passing. 365 */ 366 367 typedef struct deflate_state { 368 z_streamp strm; /* pointer back to this zlib stream */ 369 int status; /* as the name implies */ 370 Bytef *pending_buf; /* output still pending */ 371 ulg pending_buf_size; /* size of pending_buf */ 372 Bytef *pending_out; /* next pending byte to output to the stream */ 373 int pending; /* nb of bytes in the pending buffer */ 374 int noheader; /* suppress zlib header and adler32 */ 375 Byte data_type; /* UNKNOWN, BINARY or ASCII */ 376 Byte method; /* STORED (for zip only) or DEFLATED */ 377 int last_flush; /* value of flush param for previous deflate call */ 378 379 /* used by deflate.c: */ 380 381 uInt w_size; /* LZ77 window size (32K by default) */ 382 uInt w_bits; /* log2(w_size) (8..16) */ 383 uInt w_mask; /* w_size - 1 */ 384 385 Bytef *window; 386 /* Sliding window. Input bytes are read into the second half of the window, 387 * and move to the first half later to keep a dictionary of at least wSize 388 * bytes. With this organization, matches are limited to a distance of 389 * wSize-MAX_MATCH bytes, but this ensures that IO is always 390 * performed with a length multiple of the block size. Also, it limits 391 * the window size to 64K, which is quite useful on MSDOS. 392 * To do: use the user input buffer as sliding window. 393 */ 394 395 ulg window_size; 396 /* Actual size of window: 2*wSize, except when the user input buffer 397 * is directly used as sliding window. 398 */ 399 400 Posf *prev; 401 /* Link to older string with same hash index. To limit the size of this 402 * array to 64K, this link is maintained only for the last 32K strings. 403 * An index in this array is thus a window index modulo 32K. 404 */ 405 406 Posf *head; /* Heads of the hash chains or NIL. */ 407 408 uInt ins_h; /* hash index of string to be inserted */ 409 uInt hash_size; /* number of elements in hash table */ 410 uInt hash_bits; /* log2(hash_size) */ 411 uInt hash_mask; /* hash_size-1 */ 412 413 uInt hash_shift; 414 /* Number of bits by which ins_h must be shifted at each input 415 * step. It must be such that after MIN_MATCH steps, the oldest 416 * byte no longer takes part in the hash key, that is: 417 * hash_shift * MIN_MATCH >= hash_bits 418 */ 419 420 long block_start; 421 /* Window position at the beginning of the current output block. Gets 422 * negative when the window is moved backwards. 423 */ 424 425 uInt match_length; /* length of best match */ 426 IPos prev_match; /* previous match */ 427 int match_available; /* set if previous match exists */ 428 uInt strstart; /* start of string to insert */ 429 uInt match_start; /* start of matching string */ 430 uInt lookahead; /* number of valid bytes ahead in window */ 431 432 uInt prev_length; 433 /* Length of the best match at previous step. Matches not greater than this 434 * are discarded. This is used in the lazy match evaluation. 435 */ 436 437 uInt max_chain_length; 438 /* To speed up deflation, hash chains are never searched beyond this 439 * length. A higher limit improves compression ratio but degrades the 440 * speed. 441 */ 442 443 uInt max_lazy_match; 444 /* Attempt to find a better match only when the current match is strictly 445 * smaller than this value. This mechanism is used only for compression 446 * levels >= 4. 447 */ 448 # define max_insert_length max_lazy_match 449 /* Insert new strings in the hash table only if the match length is not 450 * greater than this length. This saves time but degrades compression. 451 * max_insert_length is used only for compression levels <= 3. 452 */ 453 454 int level; /* compression level (1..9) */ 455 int strategy; /* favor or force Huffman coding*/ 456 457 uInt good_match; 458 /* Use a faster search when the previous match is longer than this */ 459 460 int nice_match; /* Stop searching when current match exceeds this */ 461 462 /* used by trees.c: */ 463 /* Didn't use ct_data typedef below to supress compiler warning */ 464 struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */ 465 struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */ 466 struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */ 467 468 struct tree_desc_s l_desc; /* desc. for literal tree */ 469 struct tree_desc_s d_desc; /* desc. for distance tree */ 470 struct tree_desc_s bl_desc; /* desc. for bit length tree */ 471 472 ush bl_count[MAX_BITS+1]; 473 /* number of codes at each bit length for an optimal tree */ 474 475 int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */ 476 int heap_len; /* number of elements in the heap */ 477 int heap_max; /* element of largest frequency */ 478 /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. 479 * The same heap array is used to build all trees. 480 */ 481 482 uch depth[2*L_CODES+1]; 483 /* Depth of each subtree used as tie breaker for trees of equal frequency 484 */ 485 486 uchf *l_buf; /* buffer for literals or lengths */ 487 488 uInt lit_bufsize; 489 /* Size of match buffer for literals/lengths. There are 4 reasons for 490 * limiting lit_bufsize to 64K: 491 * - frequencies can be kept in 16 bit counters 492 * - if compression is not successful for the first block, all input 493 * data is still in the window so we can still emit a stored block even 494 * when input comes from standard input. (This can also be done for 495 * all blocks if lit_bufsize is not greater than 32K.) 496 * - if compression is not successful for a file smaller than 64K, we can 497 * even emit a stored file instead of a stored block (saving 5 bytes). 498 * This is applicable only for zip (not gzip or zlib). 499 * - creating new Huffman trees less frequently may not provide fast 500 * adaptation to changes in the input data statistics. (Take for 501 * example a binary file with poorly compressible code followed by 502 * a highly compressible string table.) Smaller buffer sizes give 503 * fast adaptation but have of course the overhead of transmitting 504 * trees more frequently. 505 * - I can't count above 4 506 */ 507 508 uInt last_lit; /* running index in l_buf */ 509 510 ushf *d_buf; 511 /* Buffer for distances. To simplify the code, d_buf and l_buf have 512 * the same number of elements. To use different lengths, an extra flag 513 * array would be necessary. 514 */ 515 516 ulg opt_len; /* bit length of current block with optimal trees */ 517 ulg static_len; /* bit length of current block with static trees */ 518 uInt matches; /* number of string matches in current block */ 519 int last_eob_len; /* bit length of EOB code for last block */ 520 521 #ifdef DEBUG_ZLIB 522 ulg compressed_len; /* total bit length of compressed file mod 2^32 */ 523 ulg bits_sent; /* bit length of compressed data sent mod 2^32 */ 524 #endif 525 526 ush bi_buf; 527 /* Output buffer. bits are inserted starting at the bottom (least 528 * significant bits). 529 */ 530 int bi_valid; 531 /* Number of valid bits in bi_buf. All bits above the last valid bit 532 * are always zero. 533 */ 534 535 } FAR deflate_state; 536 537 /* Output a byte on the stream. 538 * IN assertion: there is enough room in pending_buf. 539 */ 540 #define put_byte(s, c) {s->pending_buf[s->pending++] = (c);} 541 542 543 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1) 544 /* Minimum amount of lookahead, except at the end of the input file. 545 * See deflate.c for comments about the MIN_MATCH+1. 546 */ 547 548 #define MAX_DIST(s) ((s)->w_size-MIN_LOOKAHEAD) 549 /* In order to simplify the code, particularly on 16 bit machines, match 550 * distances are limited to MAX_DIST instead of WSIZE. 551 */ 552 553 /* in trees.c */ 554 void _tr_init __P((deflate_state *s)); 555 int _tr_tally __P((deflate_state *s, unsigned dist, unsigned lc)); 556 void _tr_flush_block __P((deflate_state *s, charf *buf, ulg stored_len, 557 int eof)); 558 void _tr_align __P((deflate_state *s)); 559 void _tr_stored_block __P((deflate_state *s, charf *buf, ulg stored_len, 560 int eof)); 561 void _tr_stored_type_only __P((deflate_state *)); 562 563 #define d_code(dist) \ 564 ((dist) < 256 ? _dist_code[dist] : _dist_code[256+((dist)>>7)]) 565 /* Mapping from a distance to a distance code. dist is the distance - 1 and 566 * must not have side effects. _dist_code[256] and _dist_code[257] are never 567 * used. 568 */ 569 570 #ifndef DEBUG_ZLIB 571 /* Inline versions of _tr_tally for speed: */ 572 573 #if defined(GEN_TREES_H) || !defined(STDC) 574 extern uch _length_code[]; 575 extern uch _dist_code[]; 576 #else 577 extern const uch _length_code[]; 578 extern const uch _dist_code[]; 579 #endif 580 581 # define _tr_tally_lit(s, c, flush) \ 582 { uch cc = (c); \ 583 s->d_buf[s->last_lit] = 0; \ 584 s->l_buf[s->last_lit++] = cc; \ 585 s->dyn_ltree[cc].Freq++; \ 586 flush = (s->last_lit == s->lit_bufsize-1); \ 587 } 588 # define _tr_tally_dist(s, distance, length, flush) \ 589 { uch len = (length); \ 590 ush dist = (distance); \ 591 s->d_buf[s->last_lit] = dist; \ 592 s->l_buf[s->last_lit++] = len; \ 593 dist--; \ 594 s->dyn_ltree[_length_code[len]+LITERALS+1].Freq++; \ 595 s->dyn_dtree[d_code(dist)].Freq++; \ 596 flush = (s->last_lit == s->lit_bufsize-1); \ 597 } 598 #else 599 # define _tr_tally_lit(s, c, flush) flush = _tr_tally(s, 0, c) 600 # define _tr_tally_dist(s, distance, length, flush) \ 601 flush = _tr_tally(s, distance, length) 602 #endif 603 604 #endif 605 /* --- deflate.h */ 606 607 /* +++ deflate.c */ 608 609 /* deflate.c -- compress data using the deflation algorithm 610 * Copyright (C) 1995-2002 Jean-loup Gailly. 611 * For conditions of distribution and use, see copyright notice in zlib.h 612 */ 613 614 /* 615 * ALGORITHM 616 * 617 * The "deflation" process depends on being able to identify portions 618 * of the input text which are identical to earlier input (within a 619 * sliding window trailing behind the input currently being processed). 620 * 621 * The most straightforward technique turns out to be the fastest for 622 * most input files: try all possible matches and select the longest. 623 * The key feature of this algorithm is that insertions into the string 624 * dictionary are very simple and thus fast, and deletions are avoided 625 * completely. Insertions are performed at each input character, whereas 626 * string matches are performed only when the previous match ends. So it 627 * is preferable to spend more time in matches to allow very fast string 628 * insertions and avoid deletions. The matching algorithm for small 629 * strings is inspired from that of Rabin & Karp. A brute force approach 630 * is used to find longer strings when a small match has been found. 631 * A similar algorithm is used in comic (by Jan-Mark Wams) and freeze 632 * (by Leonid Broukhis). 633 * A previous version of this file used a more sophisticated algorithm 634 * (by Fiala and Greene) which is guaranteed to run in linear amortized 635 * time, but has a larger average cost, uses more memory and is patented. 636 * However the F&G algorithm may be faster for some highly redundant 637 * files if the parameter max_chain_length (described below) is too large. 638 * 639 * ACKNOWLEDGEMENTS 640 * 641 * The idea of lazy evaluation of matches is due to Jan-Mark Wams, and 642 * I found it in 'freeze' written by Leonid Broukhis. 643 * Thanks to many people for bug reports and testing. 644 * 645 * REFERENCES 646 * 647 * Deutsch, L.P.,"DEFLATE Compressed Data Format Specification". 648 * Available in ftp://ds.internic.net/rfc/rfc1951.txt 649 * 650 * A description of the Rabin and Karp algorithm is given in the book 651 * "Algorithms" by R. Sedgewick, Addison-Wesley, p252. 652 * 653 * Fiala,E.R., and Greene,D.H. 654 * Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595 655 * 656 */ 657 658 /* @(#) $Id: zlib.c,v 1.18 2002/05/07 09:14:20 tron Exp $ */ 659 660 /* #include "deflate.h" */ 661 662 const char deflate_copyright[] = 663 " deflate 1.1.4 Copyright 1995-2002 Jean-loup Gailly "; 664 /* 665 If you use the zlib library in a product, an acknowledgment is welcome 666 in the documentation of your product. If for some reason you cannot 667 include such an acknowledgment, I would appreciate that you keep this 668 copyright string in the executable of your product. 669 */ 670 671 /* =========================================================================== 672 * Function prototypes. 673 */ 674 typedef enum { 675 need_more, /* block not completed, need more input or more output */ 676 block_done, /* block flush performed */ 677 finish_started, /* finish started, need only more output at next deflate */ 678 finish_done /* finish done, accept no more input or output */ 679 } block_state; 680 681 typedef block_state (*compress_func) __P((deflate_state *s, int flush)); 682 /* Compression function. Returns the block state after the call. */ 683 684 local void fill_window __P((deflate_state *s)); 685 local block_state deflate_stored __P((deflate_state *s, int flush)); 686 local block_state deflate_fast __P((deflate_state *s, int flush)); 687 local block_state deflate_slow __P((deflate_state *s, int flush)); 688 local void lm_init __P((deflate_state *s)); 689 local void putShortMSB __P((deflate_state *s, uInt b)); 690 local void flush_pending __P((z_streamp strm)); 691 local int read_buf __P((z_streamp strm, Bytef *buf, unsigned size)); 692 #ifdef ASMV 693 void match_init __P((void)); /* asm code initialization */ 694 uInt longest_match __P((deflate_state *s, IPos cur_match)); 695 #else 696 local uInt longest_match __P((deflate_state *s, IPos cur_match)); 697 #endif 698 699 #ifdef DEBUG_ZLIB 700 local void check_match __P((deflate_state *s, IPos start, IPos match, 701 int length)); 702 #endif 703 704 /* =========================================================================== 705 * Local data 706 */ 707 708 #define NIL 0 709 /* Tail of hash chains */ 710 711 #ifndef TOO_FAR 712 # define TOO_FAR 4096 713 #endif 714 /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */ 715 716 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1) 717 /* Minimum amount of lookahead, except at the end of the input file. 718 * See deflate.c for comments about the MIN_MATCH+1. 719 */ 720 721 /* Values for max_lazy_match, good_match and max_chain_length, depending on 722 * the desired pack level (0..9). The values given below have been tuned to 723 * exclude worst case performance for pathological files. Better values may be 724 * found for specific files. 725 */ 726 typedef struct config_s { 727 ush good_length; /* reduce lazy search above this match length */ 728 ush max_lazy; /* do not perform lazy search above this match length */ 729 ush nice_length; /* quit search above this match length */ 730 ush max_chain; 731 compress_func func; 732 } config; 733 734 local const config configuration_table[10] = { 735 /* good lazy nice chain */ 736 /* 0 */ {0, 0, 0, 0, deflate_stored}, /* store only */ 737 /* 1 */ {4, 4, 8, 4, deflate_fast}, /* maximum speed, no lazy matches */ 738 /* 2 */ {4, 5, 16, 8, deflate_fast}, 739 /* 3 */ {4, 6, 32, 32, deflate_fast}, 740 741 /* 4 */ {4, 4, 16, 16, deflate_slow}, /* lazy matches */ 742 /* 5 */ {8, 16, 32, 32, deflate_slow}, 743 /* 6 */ {8, 16, 128, 128, deflate_slow}, 744 /* 7 */ {8, 32, 128, 256, deflate_slow}, 745 /* 8 */ {32, 128, 258, 1024, deflate_slow}, 746 /* 9 */ {32, 258, 258, 4096, deflate_slow}}; /* maximum compression */ 747 748 /* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4 749 * For deflate_fast() (levels <= 3) good is ignored and lazy has a different 750 * meaning. 751 */ 752 753 #define EQUAL 0 754 /* result of memcmp for equal strings */ 755 756 #ifndef NO_DUMMY_DECL 757 struct static_tree_desc_s {int dummy;}; /* for buggy compilers */ 758 #endif 759 760 /* =========================================================================== 761 * Update a hash value with the given input byte 762 * IN assertion: all calls to to UPDATE_HASH are made with consecutive 763 * input characters, so that a running hash key can be computed from the 764 * previous key instead of complete recalculation each time. 765 */ 766 #define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask) 767 768 769 /* =========================================================================== 770 * Insert string str in the dictionary and set match_head to the previous head 771 * of the hash chain (the most recent string with same hash key). Return 772 * the previous length of the hash chain. 773 * If this file is compiled with -DFASTEST, the compression level is forced 774 * to 1, and no hash chains are maintained. 775 * IN assertion: all calls to to INSERT_STRING are made with consecutive 776 * input characters and the first MIN_MATCH bytes of str are valid 777 * (except for the last MIN_MATCH-1 bytes of the input file). 778 */ 779 #ifdef FASTEST 780 #define INSERT_STRING(s, str, match_head) \ 781 (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \ 782 match_head = s->head[s->ins_h], \ 783 s->head[s->ins_h] = (Pos)(str)) 784 #else 785 #define INSERT_STRING(s, str, match_head) \ 786 (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \ 787 s->prev[(str) & s->w_mask] = match_head = s->head[s->ins_h], \ 788 s->head[s->ins_h] = (Pos)(str)) 789 #endif 790 791 /* =========================================================================== 792 * Initialize the hash table (avoiding 64K overflow for 16 bit systems). 793 * prev[] will be initialized on the fly. 794 */ 795 #define CLEAR_HASH(s) \ 796 s->head[s->hash_size-1] = NIL; \ 797 zmemzero((Bytef *)s->head, (unsigned)(s->hash_size-1)*sizeof(*s->head)); 798 799 /* ========================================================================= */ 800 int ZEXPORT deflateInit_(strm, level, version, stream_size) 801 z_streamp strm; 802 int level; 803 const char *version; 804 int stream_size; 805 { 806 return deflateInit2_(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, 807 Z_DEFAULT_STRATEGY, version, stream_size); 808 /* To do: ignore strm->next_in if we use it as window */ 809 } 810 811 /* ========================================================================= */ 812 int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy, 813 version, stream_size) 814 z_streamp strm; 815 int level; 816 int method; 817 int windowBits; 818 int memLevel; 819 int strategy; 820 const char *version; 821 int stream_size; 822 { 823 deflate_state *s; 824 int noheader = 0; 825 static const char* my_version = ZLIB_VERSION; 826 827 ushf *overlay; 828 /* We overlay pending_buf and d_buf+l_buf. This works since the average 829 * output size for (length,distance) codes is <= 24 bits. 830 */ 831 832 if (version == Z_NULL || version[0] != my_version[0] || 833 stream_size != sizeof(z_stream)) { 834 return Z_VERSION_ERROR; 835 } 836 if (strm == Z_NULL) return Z_STREAM_ERROR; 837 838 strm->msg = Z_NULL; 839 #ifndef NO_ZCFUNCS 840 if (strm->zalloc == Z_NULL) { 841 strm->zalloc = zcalloc; 842 strm->opaque = (voidpf)0; 843 } 844 if (strm->zfree == Z_NULL) strm->zfree = zcfree; 845 #endif 846 847 if (level == Z_DEFAULT_COMPRESSION) level = 6; 848 #ifdef FASTEST 849 level = 1; 850 #endif 851 852 if (windowBits < 0) { /* undocumented feature: suppress zlib header */ 853 noheader = 1; 854 windowBits = -windowBits; 855 } 856 if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != Z_DEFLATED || 857 windowBits < 9 || windowBits > 15 || level < 0 || level > 9 || 858 strategy < 0 || strategy > Z_HUFFMAN_ONLY) { 859 return Z_STREAM_ERROR; 860 } 861 s = (deflate_state *) ZALLOC(strm, 1, sizeof(deflate_state)); 862 if (s == Z_NULL) return Z_MEM_ERROR; 863 strm->state = (struct internal_state FAR *)s; 864 s->strm = strm; 865 866 s->noheader = noheader; 867 s->w_bits = windowBits; 868 s->w_size = 1 << s->w_bits; 869 s->w_mask = s->w_size - 1; 870 871 s->hash_bits = memLevel + 7; 872 s->hash_size = 1 << s->hash_bits; 873 s->hash_mask = s->hash_size - 1; 874 s->hash_shift = ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH); 875 876 s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte)); 877 s->prev = (Posf *) ZALLOC(strm, s->w_size, sizeof(Pos)); 878 s->head = (Posf *) ZALLOC(strm, s->hash_size, sizeof(Pos)); 879 880 s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */ 881 882 overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2); 883 s->pending_buf = (uchf *) overlay; 884 s->pending_buf_size = (ulg)s->lit_bufsize * (sizeof(ush)+2L); 885 886 if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL || 887 s->pending_buf == Z_NULL) { 888 strm->msg = (char*)ERR_MSG(Z_MEM_ERROR); 889 s->status = INIT_STATE; 890 deflateEnd (strm); 891 return Z_MEM_ERROR; 892 } 893 s->d_buf = overlay + s->lit_bufsize/sizeof(ush); 894 s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize; 895 896 s->level = level; 897 s->strategy = strategy; 898 s->method = (Byte)method; 899 900 return deflateReset(strm); 901 } 902 903 /* ========================================================================= */ 904 int ZEXPORT deflateSetDictionary (strm, dictionary, dictLength) 905 z_streamp strm; 906 const Bytef *dictionary; 907 uInt dictLength; 908 { 909 deflate_state *s; 910 uInt length = dictLength; 911 uInt n; 912 IPos hash_head = 0; 913 914 if (strm == Z_NULL || strm->state == Z_NULL || dictionary == Z_NULL) 915 return Z_STREAM_ERROR; 916 917 s = (deflate_state *)strm->state; 918 if (s->status != INIT_STATE) return Z_STREAM_ERROR; 919 920 strm->adler = adler32(strm->adler, dictionary, dictLength); 921 922 if (length < MIN_MATCH) return Z_OK; 923 if (length > MAX_DIST(s)) { 924 length = MAX_DIST(s); 925 #ifndef USE_DICT_HEAD 926 dictionary += dictLength - length; /* use the tail of the dictionary */ 927 #endif 928 } 929 zmemcpy(s->window, dictionary, length); 930 s->strstart = length; 931 s->block_start = (long)length; 932 933 /* Insert all strings in the hash table (except for the last two bytes). 934 * s->lookahead stays null, so s->ins_h will be recomputed at the next 935 * call of fill_window. 936 */ 937 s->ins_h = s->window[0]; 938 UPDATE_HASH(s, s->ins_h, s->window[1]); 939 for (n = 0; n <= length - MIN_MATCH; n++) { 940 INSERT_STRING(s, n, hash_head); 941 } 942 if (hash_head) hash_head = 0; /* to make compiler happy */ 943 return Z_OK; 944 } 945 946 /* ========================================================================= */ 947 int ZEXPORT deflateReset (strm) 948 z_streamp strm; 949 { 950 deflate_state *s; 951 952 if (strm == Z_NULL || strm->state == Z_NULL || 953 strm->zalloc == Z_NULL || strm->zfree == Z_NULL) return Z_STREAM_ERROR; 954 955 strm->total_in = strm->total_out = 0; 956 strm->msg = Z_NULL; /* use zfree if we ever allocate msg dynamically */ 957 strm->data_type = Z_UNKNOWN; 958 959 s = (deflate_state *)strm->state; 960 s->pending = 0; 961 s->pending_out = s->pending_buf; 962 963 if (s->noheader < 0) { 964 s->noheader = 0; /* was set to -1 by deflate(..., Z_FINISH); */ 965 } 966 s->status = s->noheader ? BUSY_STATE : INIT_STATE; 967 strm->adler = 1; 968 s->last_flush = Z_NO_FLUSH; 969 970 _tr_init(s); 971 lm_init(s); 972 973 return Z_OK; 974 } 975 976 /* ========================================================================= */ 977 int ZEXPORT deflateParams(strm, level, strategy) 978 z_streamp strm; 979 int level; 980 int strategy; 981 { 982 deflate_state *s; 983 compress_func func; 984 int err = Z_OK; 985 986 if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR; 987 s = (deflate_state *)strm->state; 988 989 if (level == Z_DEFAULT_COMPRESSION) { 990 level = 6; 991 } 992 if (level < 0 || level > 9 || strategy < 0 || strategy > Z_HUFFMAN_ONLY) { 993 return Z_STREAM_ERROR; 994 } 995 func = configuration_table[s->level].func; 996 997 if (func != configuration_table[level].func && strm->total_in != 0) { 998 /* Flush the last buffer: */ 999 err = deflate(strm, Z_PARTIAL_FLUSH); 1000 } 1001 if (s->level != level) { 1002 s->level = level; 1003 s->max_lazy_match = configuration_table[level].max_lazy; 1004 s->good_match = configuration_table[level].good_length; 1005 s->nice_match = configuration_table[level].nice_length; 1006 s->max_chain_length = configuration_table[level].max_chain; 1007 } 1008 s->strategy = strategy; 1009 return err; 1010 } 1011 1012 /* ========================================================================= 1013 * Put a short in the pending buffer. The 16-bit value is put in MSB order. 1014 * IN assertion: the stream state is correct and there is enough room in 1015 * pending_buf. 1016 */ 1017 local void putShortMSB (s, b) 1018 deflate_state *s; 1019 uInt b; 1020 { 1021 put_byte(s, (Byte)(b >> 8)); 1022 put_byte(s, (Byte)(b & 0xff)); 1023 } 1024 1025 /* ========================================================================= 1026 * Flush as much pending output as possible. All deflate() output goes 1027 * through this function so some applications may wish to modify it 1028 * to avoid allocating a large strm->next_out buffer and copying into it. 1029 * (See also read_buf()). 1030 */ 1031 local void flush_pending(strm) 1032 z_streamp strm; 1033 { 1034 deflate_state *s = (deflate_state *) strm->state; 1035 unsigned len = s->pending; 1036 1037 if (len > strm->avail_out) len = strm->avail_out; 1038 if (len == 0) return; 1039 1040 if (strm->next_out != Z_NULL) { 1041 zmemcpy(strm->next_out, s->pending_out, len); 1042 strm->next_out += len; 1043 } 1044 s->pending_out += len; 1045 strm->total_out += len; 1046 strm->avail_out -= len; 1047 s->pending -= len; 1048 if (s->pending == 0) { 1049 s->pending_out = s->pending_buf; 1050 } 1051 } 1052 1053 /* ========================================================================= */ 1054 int ZEXPORT deflate (strm, flush) 1055 z_streamp strm; 1056 int flush; 1057 { 1058 int old_flush; /* value of flush param for previous deflate call */ 1059 deflate_state *s; 1060 1061 if (strm == Z_NULL || strm->state == Z_NULL || 1062 flush > Z_FINISH || flush < 0) { 1063 return Z_STREAM_ERROR; 1064 } 1065 s = (deflate_state *)strm->state; 1066 1067 if ((strm->next_in == Z_NULL && strm->avail_in != 0) || 1068 (s->status == FINISH_STATE && flush != Z_FINISH)) { 1069 ERR_RETURN(strm, Z_STREAM_ERROR); 1070 } 1071 if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR); 1072 1073 s->strm = strm; /* just in case */ 1074 old_flush = s->last_flush; 1075 s->last_flush = flush; 1076 1077 /* Write the zlib header */ 1078 if (s->status == INIT_STATE) { 1079 1080 uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8; 1081 uInt level_flags = (s->level-1) >> 1; 1082 1083 if (level_flags > 3) level_flags = 3; 1084 header |= (level_flags << 6); 1085 if (s->strstart != 0) header |= PRESET_DICT; 1086 header += 31 - (header % 31); 1087 1088 s->status = BUSY_STATE; 1089 putShortMSB(s, header); 1090 1091 /* Save the adler32 of the preset dictionary: */ 1092 if (s->strstart != 0) { 1093 putShortMSB(s, (uInt)(strm->adler >> 16)); 1094 putShortMSB(s, (uInt)(strm->adler & 0xffff)); 1095 } 1096 strm->adler = 1L; 1097 } 1098 1099 /* Flush as much pending output as possible */ 1100 if (s->pending != 0) { 1101 flush_pending(strm); 1102 if (strm->avail_out == 0) { 1103 /* Since avail_out is 0, deflate will be called again with 1104 * more output space, but possibly with both pending and 1105 * avail_in equal to zero. There won't be anything to do, 1106 * but this is not an error situation so make sure we 1107 * return OK instead of BUF_ERROR at next call of deflate: 1108 */ 1109 s->last_flush = -1; 1110 return Z_OK; 1111 } 1112 1113 /* Make sure there is something to do and avoid duplicate consecutive 1114 * flushes. For repeated and useless calls with Z_FINISH, we keep 1115 * returning Z_STREAM_END instead of Z_BUFF_ERROR. 1116 */ 1117 } else if (strm->avail_in == 0 && flush <= old_flush && 1118 flush != Z_FINISH) { 1119 ERR_RETURN(strm, Z_BUF_ERROR); 1120 } 1121 1122 /* User must not provide more input after the first FINISH: */ 1123 if (s->status == FINISH_STATE && strm->avail_in != 0) { 1124 ERR_RETURN(strm, Z_BUF_ERROR); 1125 } 1126 1127 /* Start a new block or continue the current one. 1128 */ 1129 if (strm->avail_in != 0 || s->lookahead != 0 || 1130 (flush != Z_NO_FLUSH && s->status != FINISH_STATE)) { 1131 block_state bstate; 1132 1133 bstate = (*(configuration_table[s->level].func))(s, flush); 1134 1135 if (bstate == finish_started || bstate == finish_done) { 1136 s->status = FINISH_STATE; 1137 } 1138 if (bstate == need_more || bstate == finish_started) { 1139 if (strm->avail_out == 0) { 1140 s->last_flush = -1; /* avoid BUF_ERROR next call, see above */ 1141 } 1142 return Z_OK; 1143 /* If flush != Z_NO_FLUSH && avail_out == 0, the next call 1144 * of deflate should use the same flush parameter to make sure 1145 * that the flush is complete. So we don't have to output an 1146 * empty block here, this will be done at next call. This also 1147 * ensures that for a very small output buffer, we emit at most 1148 * one empty block. 1149 */ 1150 } 1151 if (bstate == block_done) { 1152 if (flush == Z_PARTIAL_FLUSH) { 1153 _tr_align(s); 1154 } else if (flush == Z_PACKET_FLUSH) { 1155 /* Output just the 3-bit `stored' block type value, 1156 but not a zero length. */ 1157 _tr_stored_type_only(s); 1158 } else { /* FULL_FLUSH or SYNC_FLUSH */ 1159 _tr_stored_block(s, (char*)0, 0L, 0); 1160 /* For a full flush, this empty block will be recognized 1161 * as a special marker by inflate_sync(). 1162 */ 1163 if (flush == Z_FULL_FLUSH) { 1164 CLEAR_HASH(s); /* forget history */ 1165 } 1166 } 1167 flush_pending(strm); 1168 if (strm->avail_out == 0) { 1169 s->last_flush = -1; /* avoid BUF_ERROR at next call, see above */ 1170 return Z_OK; 1171 } 1172 } 1173 } 1174 Assert(strm->avail_out > 0, "bug2"); 1175 1176 if (flush != Z_FINISH) return Z_OK; 1177 if (s->noheader) return Z_STREAM_END; 1178 1179 /* Write the zlib trailer (adler32) */ 1180 putShortMSB(s, (uInt)(strm->adler >> 16)); 1181 putShortMSB(s, (uInt)(strm->adler & 0xffff)); 1182 flush_pending(strm); 1183 /* If avail_out is zero, the application will call deflate again 1184 * to flush the rest. 1185 */ 1186 s->noheader = -1; /* write the trailer only once! */ 1187 return s->pending != 0 ? Z_OK : Z_STREAM_END; 1188 } 1189 1190 /* ========================================================================= */ 1191 int ZEXPORT deflateEnd (strm) 1192 z_streamp strm; 1193 { 1194 int status; 1195 deflate_state *s; 1196 1197 if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR; 1198 s = (deflate_state *) strm->state; 1199 1200 status = s->status; 1201 if (status != INIT_STATE && status != BUSY_STATE && 1202 status != FINISH_STATE) { 1203 return Z_STREAM_ERROR; 1204 } 1205 1206 /* Deallocate in reverse order of allocations: */ 1207 TRY_FREE(strm, s->pending_buf); 1208 TRY_FREE(strm, s->head); 1209 TRY_FREE(strm, s->prev); 1210 TRY_FREE(strm, s->window); 1211 1212 ZFREE(strm, s); 1213 strm->state = Z_NULL; 1214 1215 return status == BUSY_STATE ? Z_DATA_ERROR : Z_OK; 1216 } 1217 1218 /* ========================================================================= 1219 * Copy the source state to the destination state. 1220 * To simplify the source, this is not supported for 16-bit MSDOS (which 1221 * doesn't have enough memory anyway to duplicate compression states). 1222 */ 1223 int ZEXPORT deflateCopy (dest, source) 1224 z_streamp dest; 1225 z_streamp source; 1226 { 1227 #ifdef MAXSEG_64K 1228 return Z_STREAM_ERROR; 1229 #else 1230 deflate_state *ds; 1231 deflate_state *ss; 1232 ushf *overlay; 1233 1234 1235 if (source == Z_NULL || dest == Z_NULL || source->state == Z_NULL) { 1236 return Z_STREAM_ERROR; 1237 } 1238 1239 ss = (deflate_state *)source->state; 1240 1241 *dest = *source; 1242 1243 ds = (deflate_state *) ZALLOC(dest, 1, sizeof(deflate_state)); 1244 if (ds == Z_NULL) return Z_MEM_ERROR; 1245 dest->state = (void *) ds; 1246 *ds = *ss; 1247 ds->strm = dest; 1248 1249 ds->window = (Bytef *) ZALLOC(dest, ds->w_size, 2*sizeof(Byte)); 1250 ds->prev = (Posf *) ZALLOC(dest, ds->w_size, sizeof(Pos)); 1251 ds->head = (Posf *) ZALLOC(dest, ds->hash_size, sizeof(Pos)); 1252 overlay = (ushf *) ZALLOC(dest, ds->lit_bufsize, sizeof(ush)+2); 1253 ds->pending_buf = (uchf *) overlay; 1254 1255 if (ds->window == Z_NULL || ds->prev == Z_NULL || ds->head == Z_NULL || 1256 ds->pending_buf == Z_NULL) { 1257 ds->status = INIT_STATE; 1258 deflateEnd (dest); 1259 return Z_MEM_ERROR; 1260 } 1261 /* following zmemcpy do not work for 16-bit MSDOS */ 1262 zmemcpy(ds->window, ss->window, ds->w_size * 2 * sizeof(Byte)); 1263 zmemcpy(ds->prev, ss->prev, ds->w_size * sizeof(Pos)); 1264 zmemcpy(ds->head, ss->head, ds->hash_size * sizeof(Pos)); 1265 zmemcpy(ds->pending_buf, ss->pending_buf, (uInt)ds->pending_buf_size); 1266 1267 ds->pending_out = ds->pending_buf + (ss->pending_out - ss->pending_buf); 1268 ds->d_buf = overlay + ds->lit_bufsize/sizeof(ush); 1269 ds->l_buf = ds->pending_buf + (1+sizeof(ush))*ds->lit_bufsize; 1270 1271 ds->l_desc.dyn_tree = ds->dyn_ltree; 1272 ds->d_desc.dyn_tree = ds->dyn_dtree; 1273 ds->bl_desc.dyn_tree = ds->bl_tree; 1274 1275 return Z_OK; 1276 #endif 1277 } 1278 1279 /* =========================================================================== 1280 * Return the number of bytes of output which are immediately available 1281 * for output from the decompressor. 1282 */ 1283 int deflateOutputPending (strm) 1284 z_streamp strm; 1285 { 1286 if (strm == Z_NULL || strm->state == Z_NULL) return 0; 1287 1288 return ((deflate_state *)(strm->state))->pending; 1289 } 1290 1291 /* =========================================================================== 1292 * Read a new buffer from the current input stream, update the adler32 1293 * and total number of bytes read. All deflate() input goes through 1294 * this function so some applications may wish to modify it to avoid 1295 * allocating a large strm->next_in buffer and copying from it. 1296 * (See also flush_pending()). 1297 */ 1298 local int read_buf(strm, buf, size) 1299 z_streamp strm; 1300 Bytef *buf; 1301 unsigned size; 1302 { 1303 unsigned len = strm->avail_in; 1304 1305 if (len > size) len = size; 1306 if (len == 0) return 0; 1307 1308 strm->avail_in -= len; 1309 1310 if (!((deflate_state *)(strm->state))->noheader) { 1311 strm->adler = adler32(strm->adler, strm->next_in, len); 1312 } 1313 zmemcpy(buf, strm->next_in, len); 1314 strm->next_in += len; 1315 strm->total_in += len; 1316 1317 return (int)len; 1318 } 1319 1320 /* =========================================================================== 1321 * Initialize the "longest match" routines for a new zlib stream 1322 */ 1323 local void lm_init (s) 1324 deflate_state *s; 1325 { 1326 s->window_size = (ulg)2L*s->w_size; 1327 1328 CLEAR_HASH(s); 1329 1330 /* Set the default configuration parameters: 1331 */ 1332 s->max_lazy_match = configuration_table[s->level].max_lazy; 1333 s->good_match = configuration_table[s->level].good_length; 1334 s->nice_match = configuration_table[s->level].nice_length; 1335 s->max_chain_length = configuration_table[s->level].max_chain; 1336 1337 s->strstart = 0; 1338 s->block_start = 0L; 1339 s->lookahead = 0; 1340 s->match_length = s->prev_length = MIN_MATCH-1; 1341 s->match_available = 0; 1342 s->ins_h = 0; 1343 #ifdef ASMV 1344 match_init(); /* initialize the asm code */ 1345 #endif 1346 } 1347 1348 /* =========================================================================== 1349 * Set match_start to the longest match starting at the given string and 1350 * return its length. Matches shorter or equal to prev_length are discarded, 1351 * in which case the result is equal to prev_length and match_start is 1352 * garbage. 1353 * IN assertions: cur_match is the head of the hash chain for the current 1354 * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 1355 * OUT assertion: the match length is not greater than s->lookahead. 1356 */ 1357 #ifndef ASMV 1358 /* For 80x86 and 680x0, an optimized version will be provided in match.asm or 1359 * match.S. The code will be functionally equivalent. 1360 */ 1361 #ifndef FASTEST 1362 local uInt longest_match(s, cur_match) 1363 deflate_state *s; 1364 IPos cur_match; /* current match */ 1365 { 1366 unsigned chain_length = s->max_chain_length;/* max hash chain length */ 1367 Bytef *scan = s->window + s->strstart; /* current string */ 1368 Bytef *match; /* matched string */ 1369 int len; /* length of current match */ 1370 int best_len = s->prev_length; /* best match length so far */ 1371 int nice_match = s->nice_match; /* stop if match long enough */ 1372 IPos limit = s->strstart > (IPos)MAX_DIST(s) ? 1373 s->strstart - (IPos)MAX_DIST(s) : NIL; 1374 /* Stop when cur_match becomes <= limit. To simplify the code, 1375 * we prevent matches with the string of window index 0. 1376 */ 1377 Posf *prev = s->prev; 1378 uInt wmask = s->w_mask; 1379 1380 #ifdef UNALIGNED_OK 1381 /* Compare two bytes at a time. Note: this is not always beneficial. 1382 * Try with and without -DUNALIGNED_OK to check. 1383 */ 1384 Bytef *strend = s->window + s->strstart + MAX_MATCH - 1; 1385 ush scan_start = *(ushf*)scan; 1386 ush scan_end = *(ushf*)(scan+best_len-1); 1387 #else 1388 Bytef *strend = s->window + s->strstart + MAX_MATCH; 1389 Byte scan_end1 = scan[best_len-1]; 1390 Byte scan_end = scan[best_len]; 1391 #endif 1392 1393 /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. 1394 * It is easy to get rid of this optimization if necessary. 1395 */ 1396 Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); 1397 1398 /* Do not waste too much time if we already have a good match: */ 1399 if (s->prev_length >= s->good_match) { 1400 chain_length >>= 2; 1401 } 1402 /* Do not look for matches beyond the end of the input. This is necessary 1403 * to make deflate deterministic. 1404 */ 1405 if ((uInt)nice_match > s->lookahead) nice_match = s->lookahead; 1406 1407 Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); 1408 1409 do { 1410 Assert(cur_match < s->strstart, "no future"); 1411 match = s->window + cur_match; 1412 1413 /* Skip to next match if the match length cannot increase 1414 * or if the match length is less than 2: 1415 */ 1416 #if (defined(UNALIGNED_OK) && MAX_MATCH == 258) 1417 /* This code assumes sizeof(unsigned short) == 2. Do not use 1418 * UNALIGNED_OK if your compiler uses a different size. 1419 */ 1420 if (*(ushf*)(match+best_len-1) != scan_end || 1421 *(ushf*)match != scan_start) continue; 1422 1423 /* It is not necessary to compare scan[2] and match[2] since they are 1424 * always equal when the other bytes match, given that the hash keys 1425 * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at 1426 * strstart+3, +5, ... up to strstart+257. We check for insufficient 1427 * lookahead only every 4th comparison; the 128th check will be made 1428 * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is 1429 * necessary to put more guard bytes at the end of the window, or 1430 * to check more often for insufficient lookahead. 1431 */ 1432 Assert(scan[2] == match[2], "scan[2]?"); 1433 scan++, match++; 1434 do { 1435 } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) && 1436 *(ushf*)(scan+=2) == *(ushf*)(match+=2) && 1437 *(ushf*)(scan+=2) == *(ushf*)(match+=2) && 1438 *(ushf*)(scan+=2) == *(ushf*)(match+=2) && 1439 scan < strend); 1440 /* The funny "do {}" generates better code on most compilers */ 1441 1442 /* Here, scan <= window+strstart+257 */ 1443 Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); 1444 if (*scan == *match) scan++; 1445 1446 len = (MAX_MATCH - 1) - (int)(strend-scan); 1447 scan = strend - (MAX_MATCH-1); 1448 1449 #else /* UNALIGNED_OK */ 1450 1451 if (match[best_len] != scan_end || 1452 match[best_len-1] != scan_end1 || 1453 *match != *scan || 1454 *++match != scan[1]) continue; 1455 1456 /* The check at best_len-1 can be removed because it will be made 1457 * again later. (This heuristic is not always a win.) 1458 * It is not necessary to compare scan[2] and match[2] since they 1459 * are always equal when the other bytes match, given that 1460 * the hash keys are equal and that HASH_BITS >= 8. 1461 */ 1462 scan += 2, match++; 1463 Assert(*scan == *match, "match[2]?"); 1464 1465 /* We check for insufficient lookahead only every 8th comparison; 1466 * the 256th check will be made at strstart+258. 1467 */ 1468 do { 1469 } while (*++scan == *++match && *++scan == *++match && 1470 *++scan == *++match && *++scan == *++match && 1471 *++scan == *++match && *++scan == *++match && 1472 *++scan == *++match && *++scan == *++match && 1473 scan < strend); 1474 1475 Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); 1476 1477 len = MAX_MATCH - (int)(strend - scan); 1478 scan = strend - MAX_MATCH; 1479 1480 #endif /* UNALIGNED_OK */ 1481 1482 if (len > best_len) { 1483 s->match_start = cur_match; 1484 best_len = len; 1485 if (len >= nice_match) break; 1486 #ifdef UNALIGNED_OK 1487 scan_end = *(ushf*)(scan+best_len-1); 1488 #else 1489 scan_end1 = scan[best_len-1]; 1490 scan_end = scan[best_len]; 1491 #endif 1492 } 1493 } while ((cur_match = prev[cur_match & wmask]) > limit 1494 && --chain_length != 0); 1495 1496 if ((uInt)best_len <= s->lookahead) return (uInt)best_len; 1497 return s->lookahead; 1498 } 1499 1500 #else /* FASTEST */ 1501 /* --------------------------------------------------------------------------- 1502 * Optimized version for level == 1 only 1503 */ 1504 local uInt longest_match(s, cur_match) 1505 deflate_state *s; 1506 IPos cur_match; /* current match */ 1507 { 1508 register Bytef *scan = s->window + s->strstart; /* current string */ 1509 register Bytef *match; /* matched string */ 1510 register int len; /* length of current match */ 1511 register Bytef *strend = s->window + s->strstart + MAX_MATCH; 1512 1513 /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. 1514 * It is easy to get rid of this optimization if necessary. 1515 */ 1516 Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); 1517 1518 Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); 1519 1520 Assert(cur_match < s->strstart, "no future"); 1521 1522 match = s->window + cur_match; 1523 1524 /* Return failure if the match length is less than 2: 1525 */ 1526 if (match[0] != scan[0] || match[1] != scan[1]) return MIN_MATCH-1; 1527 1528 /* The check at best_len-1 can be removed because it will be made 1529 * again later. (This heuristic is not always a win.) 1530 * It is not necessary to compare scan[2] and match[2] since they 1531 * are always equal when the other bytes match, given that 1532 * the hash keys are equal and that HASH_BITS >= 8. 1533 */ 1534 scan += 2, match += 2; 1535 Assert(*scan == *match, "match[2]?"); 1536 1537 /* We check for insufficient lookahead only every 8th comparison; 1538 * the 256th check will be made at strstart+258. 1539 */ 1540 do { 1541 } while (*++scan == *++match && *++scan == *++match && 1542 *++scan == *++match && *++scan == *++match && 1543 *++scan == *++match && *++scan == *++match && 1544 *++scan == *++match && *++scan == *++match && 1545 scan < strend); 1546 1547 Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); 1548 1549 len = MAX_MATCH - (int)(strend - scan); 1550 1551 if (len < MIN_MATCH) return MIN_MATCH - 1; 1552 1553 s->match_start = cur_match; 1554 return len <= s->lookahead ? len : s->lookahead; 1555 } 1556 #endif /* FASTEST */ 1557 #endif /* ASMV */ 1558 1559 #ifdef DEBUG_ZLIB 1560 /* =========================================================================== 1561 * Check that the match at match_start is indeed a match. 1562 */ 1563 local void check_match(s, start, match, length) 1564 deflate_state *s; 1565 IPos start, match; 1566 int length; 1567 { 1568 /* check that the match is indeed a match */ 1569 if (zmemcmp(s->window + match, 1570 s->window + start, length) != EQUAL) { 1571 fprintf(stderr, " start %u, match %u, length %d\n", 1572 start, match, length); 1573 do { 1574 fprintf(stderr, "%c%c", s->window[match++], s->window[start++]); 1575 } while (--length != 0); 1576 z_error("invalid match"); 1577 } 1578 if (z_verbose > 1) { 1579 fprintf(stderr,"\\[%d,%d]", start-match, length); 1580 do { putc(s->window[start++], stderr); } while (--length != 0); 1581 } 1582 } 1583 #else 1584 # define check_match(s, start, match, length) 1585 #endif 1586 1587 /* =========================================================================== 1588 * Fill the window when the lookahead becomes insufficient. 1589 * Updates strstart and lookahead. 1590 * 1591 * IN assertion: lookahead < MIN_LOOKAHEAD 1592 * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD 1593 * At least one byte has been read, or avail_in == 0; reads are 1594 * performed for at least two bytes (required for the zip translate_eol 1595 * option -- not supported here). 1596 */ 1597 local void fill_window(s) 1598 deflate_state *s; 1599 { 1600 unsigned n, m; 1601 Posf *p; 1602 unsigned more; /* Amount of free space at the end of the window. */ 1603 uInt wsize = s->w_size; 1604 1605 do { 1606 more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart); 1607 1608 /* Deal with !@#$% 64K limit: */ 1609 if (more == 0 && s->strstart == 0 && s->lookahead == 0) { 1610 more = wsize; 1611 1612 } else if (more == (unsigned)(-1)) { 1613 /* Very unlikely, but possible on 16 bit machine if strstart == 0 1614 * and lookahead == 1 (input done one byte at time) 1615 */ 1616 more--; 1617 1618 /* If the window is almost full and there is insufficient lookahead, 1619 * move the upper half to the lower one to make room in the upper half. 1620 */ 1621 } else if (s->strstart >= wsize+MAX_DIST(s)) { 1622 1623 zmemcpy(s->window, s->window+wsize, (unsigned)wsize); 1624 s->match_start -= wsize; 1625 s->strstart -= wsize; /* we now have strstart >= MAX_DIST */ 1626 s->block_start -= (long) wsize; 1627 1628 /* Slide the hash table (could be avoided with 32 bit values 1629 at the expense of memory usage). We slide even when level == 0 1630 to keep the hash table consistent if we switch back to level > 0 1631 later. (Using level 0 permanently is not an optimal usage of 1632 zlib, so we don't care about this pathological case.) 1633 */ 1634 n = s->hash_size; 1635 p = &s->head[n]; 1636 do { 1637 m = *--p; 1638 *p = (Pos)(m >= wsize ? m-wsize : NIL); 1639 } while (--n); 1640 1641 n = wsize; 1642 #ifndef FASTEST 1643 p = &s->prev[n]; 1644 do { 1645 m = *--p; 1646 *p = (Pos)(m >= wsize ? m-wsize : NIL); 1647 /* If n is not on any hash chain, prev[n] is garbage but 1648 * its value will never be used. 1649 */ 1650 } while (--n); 1651 #endif 1652 more += wsize; 1653 } 1654 if (s->strm->avail_in == 0) return; 1655 1656 /* If there was no sliding: 1657 * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 && 1658 * more == window_size - lookahead - strstart 1659 * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1) 1660 * => more >= window_size - 2*WSIZE + 2 1661 * In the BIG_MEM or MMAP case (not yet supported), 1662 * window_size == input_size + MIN_LOOKAHEAD && 1663 * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD. 1664 * Otherwise, window_size == 2*WSIZE so more >= 2. 1665 * If there was sliding, more >= WSIZE. So in all cases, more >= 2. 1666 */ 1667 Assert(more >= 2, "more < 2"); 1668 1669 n = read_buf(s->strm, s->window + s->strstart + s->lookahead, more); 1670 s->lookahead += n; 1671 1672 /* Initialize the hash value now that we have some input: */ 1673 if (s->lookahead >= MIN_MATCH) { 1674 s->ins_h = s->window[s->strstart]; 1675 UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]); 1676 #if MIN_MATCH != 3 1677 Call UPDATE_HASH() MIN_MATCH-3 more times 1678 #endif 1679 } 1680 /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage, 1681 * but this is not important since only literal bytes will be emitted. 1682 */ 1683 1684 } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0); 1685 } 1686 1687 /* =========================================================================== 1688 * Flush the current block, with given end-of-file flag. 1689 * IN assertion: strstart is set to the end of the current match. 1690 */ 1691 #define FLUSH_BLOCK_ONLY(s, eof) { \ 1692 _tr_flush_block(s, (s->block_start >= 0L ? \ 1693 (charf *)&s->window[(unsigned)s->block_start] : \ 1694 (charf *)Z_NULL), \ 1695 (ulg)((long)s->strstart - s->block_start), \ 1696 (eof)); \ 1697 s->block_start = s->strstart; \ 1698 flush_pending(s->strm); \ 1699 Tracev((stderr,"[FLUSH]")); \ 1700 } 1701 1702 /* Same but force premature exit if necessary. */ 1703 #define FLUSH_BLOCK(s, eof) { \ 1704 FLUSH_BLOCK_ONLY(s, eof); \ 1705 if (s->strm->avail_out == 0) return (eof) ? finish_started : need_more; \ 1706 } 1707 1708 /* =========================================================================== 1709 * Copy without compression as much as possible from the input stream, return 1710 * the current block state. 1711 * This function does not insert new strings in the dictionary since 1712 * uncompressible data is probably not useful. This function is used 1713 * only for the level=0 compression option. 1714 * NOTE: this function should be optimized to avoid extra copying from 1715 * window to pending_buf. 1716 */ 1717 local block_state deflate_stored(s, flush) 1718 deflate_state *s; 1719 int flush; 1720 { 1721 /* Stored blocks are limited to 0xffff bytes, pending_buf is limited 1722 * to pending_buf_size, and each stored block has a 5 byte header: 1723 */ 1724 ulg max_block_size = 0xffff; 1725 ulg max_start; 1726 1727 if (max_block_size > s->pending_buf_size - 5) { 1728 max_block_size = s->pending_buf_size - 5; 1729 } 1730 1731 /* Copy as much as possible from input to output: */ 1732 for (;;) { 1733 /* Fill the window as much as possible: */ 1734 if (s->lookahead <= 1) { 1735 1736 Assert(s->strstart < s->w_size+MAX_DIST(s) || 1737 s->block_start >= (long)s->w_size, "slide too late"); 1738 1739 fill_window(s); 1740 if (s->lookahead == 0 && flush == Z_NO_FLUSH) return need_more; 1741 1742 if (s->lookahead == 0) break; /* flush the current block */ 1743 } 1744 Assert(s->block_start >= 0L, "block gone"); 1745 1746 s->strstart += s->lookahead; 1747 s->lookahead = 0; 1748 1749 /* Emit a stored block if pending_buf will be full: */ 1750 max_start = s->block_start + max_block_size; 1751 if (s->strstart == 0 || (ulg)s->strstart >= max_start) { 1752 /* strstart == 0 is possible when wraparound on 16-bit machine */ 1753 s->lookahead = (uInt)(s->strstart - max_start); 1754 s->strstart = (uInt)max_start; 1755 FLUSH_BLOCK(s, 0); 1756 } 1757 /* Flush if we may have to slide, otherwise block_start may become 1758 * negative and the data will be gone: 1759 */ 1760 if (s->strstart - (uInt)s->block_start >= MAX_DIST(s)) { 1761 FLUSH_BLOCK(s, 0); 1762 } 1763 } 1764 FLUSH_BLOCK(s, flush == Z_FINISH); 1765 return flush == Z_FINISH ? finish_done : block_done; 1766 } 1767 1768 /* =========================================================================== 1769 * Compress as much as possible from the input stream, return the current 1770 * block state. 1771 * This function does not perform lazy evaluation of matches and inserts 1772 * new strings in the dictionary only for unmatched strings or for short 1773 * matches. It is used only for the fast compression options. 1774 */ 1775 local block_state deflate_fast(s, flush) 1776 deflate_state *s; 1777 int flush; 1778 { 1779 IPos hash_head = NIL; /* head of the hash chain */ 1780 int bflush; /* set if current block must be flushed */ 1781 1782 for (;;) { 1783 /* Make sure that we always have enough lookahead, except 1784 * at the end of the input file. We need MAX_MATCH bytes 1785 * for the next match, plus MIN_MATCH bytes to insert the 1786 * string following the next match. 1787 */ 1788 if (s->lookahead < MIN_LOOKAHEAD) { 1789 fill_window(s); 1790 if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) { 1791 return need_more; 1792 } 1793 if (s->lookahead == 0) break; /* flush the current block */ 1794 } 1795 1796 /* Insert the string window[strstart .. strstart+2] in the 1797 * dictionary, and set hash_head to the head of the hash chain: 1798 */ 1799 if (s->lookahead >= MIN_MATCH) { 1800 INSERT_STRING(s, s->strstart, hash_head); 1801 } 1802 1803 /* Find the longest match, discarding those <= prev_length. 1804 * At this point we have always match_length < MIN_MATCH 1805 */ 1806 if (hash_head != NIL && s->strstart - hash_head <= MAX_DIST(s)) { 1807 /* To simplify the code, we prevent matches with the string 1808 * of window index 0 (in particular we have to avoid a match 1809 * of the string with itself at the start of the input file). 1810 */ 1811 if (s->strategy != Z_HUFFMAN_ONLY) { 1812 s->match_length = longest_match (s, hash_head); 1813 } 1814 /* longest_match() sets match_start */ 1815 } 1816 if (s->match_length >= MIN_MATCH) { 1817 check_match(s, s->strstart, s->match_start, s->match_length); 1818 1819 _tr_tally_dist(s, s->strstart - s->match_start, 1820 s->match_length - MIN_MATCH, bflush); 1821 1822 s->lookahead -= s->match_length; 1823 1824 /* Insert new strings in the hash table only if the match length 1825 * is not too large. This saves time but degrades compression. 1826 */ 1827 #ifndef FASTEST 1828 if (s->match_length <= s->max_insert_length && 1829 s->lookahead >= MIN_MATCH) { 1830 s->match_length--; /* string at strstart already in hash table */ 1831 do { 1832 s->strstart++; 1833 INSERT_STRING(s, s->strstart, hash_head); 1834 /* strstart never exceeds WSIZE-MAX_MATCH, so there are 1835 * always MIN_MATCH bytes ahead. 1836 */ 1837 } while (--s->match_length != 0); 1838 s->strstart++; 1839 } else 1840 #endif 1841 { 1842 s->strstart += s->match_length; 1843 s->match_length = 0; 1844 s->ins_h = s->window[s->strstart]; 1845 UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]); 1846 #if MIN_MATCH != 3 1847 Call UPDATE_HASH() MIN_MATCH-3 more times 1848 #endif 1849 /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not 1850 * matter since it will be recomputed at next deflate call. 1851 */ 1852 } 1853 } else { 1854 /* No match, output a literal byte */ 1855 Tracevv((stderr,"%c", s->window[s->strstart])); 1856 _tr_tally_lit (s, s->window[s->strstart], bflush); 1857 s->lookahead--; 1858 s->strstart++; 1859 } 1860 if (bflush) FLUSH_BLOCK(s, 0); 1861 } 1862 FLUSH_BLOCK(s, flush == Z_FINISH); 1863 return flush == Z_FINISH ? finish_done : block_done; 1864 } 1865 1866 /* =========================================================================== 1867 * Same as above, but achieves better compression. We use a lazy 1868 * evaluation for matches: a match is finally adopted only if there is 1869 * no better match at the next window position. 1870 */ 1871 local block_state deflate_slow(s, flush) 1872 deflate_state *s; 1873 int flush; 1874 { 1875 IPos hash_head = NIL; /* head of hash chain */ 1876 int bflush; /* set if current block must be flushed */ 1877 1878 /* Process the input block. */ 1879 for (;;) { 1880 /* Make sure that we always have enough lookahead, except 1881 * at the end of the input file. We need MAX_MATCH bytes 1882 * for the next match, plus MIN_MATCH bytes to insert the 1883 * string following the next match. 1884 */ 1885 if (s->lookahead < MIN_LOOKAHEAD) { 1886 fill_window(s); 1887 if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) { 1888 return need_more; 1889 } 1890 if (s->lookahead == 0) break; /* flush the current block */ 1891 } 1892 1893 /* Insert the string window[strstart .. strstart+2] in the 1894 * dictionary, and set hash_head to the head of the hash chain: 1895 */ 1896 if (s->lookahead >= MIN_MATCH) { 1897 INSERT_STRING(s, s->strstart, hash_head); 1898 } 1899 1900 /* Find the longest match, discarding those <= prev_length. 1901 */ 1902 s->prev_length = s->match_length, s->prev_match = s->match_start; 1903 s->match_length = MIN_MATCH-1; 1904 1905 if (hash_head != NIL && s->prev_length < s->max_lazy_match && 1906 s->strstart - hash_head <= MAX_DIST(s)) { 1907 /* To simplify the code, we prevent matches with the string 1908 * of window index 0 (in particular we have to avoid a match 1909 * of the string with itself at the start of the input file). 1910 */ 1911 if (s->strategy != Z_HUFFMAN_ONLY) { 1912 s->match_length = longest_match (s, hash_head); 1913 } 1914 /* longest_match() sets match_start */ 1915 1916 if (s->match_length <= 5 && (s->strategy == Z_FILTERED || 1917 (s->match_length == MIN_MATCH && 1918 s->strstart - s->match_start > TOO_FAR))) { 1919 1920 /* If prev_match is also MIN_MATCH, match_start is garbage 1921 * but we will ignore the current match anyway. 1922 */ 1923 s->match_length = MIN_MATCH-1; 1924 } 1925 } 1926 /* If there was a match at the previous step and the current 1927 * match is not better, output the previous match: 1928 */ 1929 if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) { 1930 uInt max_insert = s->strstart + s->lookahead - MIN_MATCH; 1931 /* Do not insert strings in hash table beyond this. */ 1932 1933 check_match(s, s->strstart-1, s->prev_match, s->prev_length); 1934 1935 _tr_tally_dist(s, s->strstart -1 - s->prev_match, 1936 s->prev_length - MIN_MATCH, bflush); 1937 1938 /* Insert in hash table all strings up to the end of the match. 1939 * strstart-1 and strstart are already inserted. If there is not 1940 * enough lookahead, the last two strings are not inserted in 1941 * the hash table. 1942 */ 1943 s->lookahead -= s->prev_length-1; 1944 s->prev_length -= 2; 1945 do { 1946 if (++s->strstart <= max_insert) { 1947 INSERT_STRING(s, s->strstart, hash_head); 1948 } 1949 } while (--s->prev_length != 0); 1950 s->match_available = 0; 1951 s->match_length = MIN_MATCH-1; 1952 s->strstart++; 1953 1954 if (bflush) FLUSH_BLOCK(s, 0); 1955 1956 } else if (s->match_available) { 1957 /* If there was no match at the previous position, output a 1958 * single literal. If there was a match but the current match 1959 * is longer, truncate the previous match to a single literal. 1960 */ 1961 Tracevv((stderr,"%c", s->window[s->strstart-1])); 1962 _tr_tally_lit(s, s->window[s->strstart-1], bflush); 1963 if (bflush) { 1964 FLUSH_BLOCK_ONLY(s, 0); 1965 } 1966 s->strstart++; 1967 s->lookahead--; 1968 if (s->strm->avail_out == 0) return need_more; 1969 } else { 1970 /* There is no previous match to compare with, wait for 1971 * the next step to decide. 1972 */ 1973 s->match_available = 1; 1974 s->strstart++; 1975 s->lookahead--; 1976 } 1977 } 1978 Assert (flush != Z_NO_FLUSH, "no flush?"); 1979 if (s->match_available) { 1980 Tracevv((stderr,"%c", s->window[s->strstart-1])); 1981 _tr_tally_lit(s, s->window[s->strstart-1], bflush); 1982 s->match_available = 0; 1983 } 1984 FLUSH_BLOCK(s, flush == Z_FINISH); 1985 return flush == Z_FINISH ? finish_done : block_done; 1986 } 1987 /* --- deflate.c */ 1988 1989 /* +++ trees.c */ 1990 1991 /* trees.c -- output deflated data using Huffman coding 1992 * Copyright (C) 1995-2002 Jean-loup Gailly 1993 * For conditions of distribution and use, see copyright notice in zlib.h 1994 */ 1995 1996 /* 1997 * ALGORITHM 1998 * 1999 * The "deflation" process uses several Huffman trees. The more 2000 * common source values are represented by shorter bit sequences. 2001 * 2002 * Each code tree is stored in a compressed form which is itself 2003 * a Huffman encoding of the lengths of all the code strings (in 2004 * ascending order by source values). The actual code strings are 2005 * reconstructed from the lengths in the inflate process, as described 2006 * in the deflate specification. 2007 * 2008 * REFERENCES 2009 * 2010 * Deutsch, L.P.,"'Deflate' Compressed Data Format Specification". 2011 * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc 2012 * 2013 * Storer, James A. 2014 * Data Compression: Methods and Theory, pp. 49-50. 2015 * Computer Science Press, 1988. ISBN 0-7167-8156-5. 2016 * 2017 * Sedgewick, R. 2018 * Algorithms, p290. 2019 * Addison-Wesley, 1983. ISBN 0-201-06672-6. 2020 */ 2021 2022 /* @(#) $Id: zlib.c,v 1.18 2002/05/07 09:14:20 tron Exp $ */ 2023 2024 /* #define GEN_TREES_H */ 2025 2026 /* #include "deflate.h" */ 2027 2028 #ifdef DEBUG_ZLIB 2029 # include <ctype.h> 2030 #endif 2031 2032 /* =========================================================================== 2033 * Constants 2034 */ 2035 2036 #define MAX_BL_BITS 7 2037 /* Bit length codes must not exceed MAX_BL_BITS bits */ 2038 2039 #define END_BLOCK 256 2040 /* end of block literal code */ 2041 2042 #define REP_3_6 16 2043 /* repeat previous bit length 3-6 times (2 bits of repeat count) */ 2044 2045 #define REPZ_3_10 17 2046 /* repeat a zero length 3-10 times (3 bits of repeat count) */ 2047 2048 #define REPZ_11_138 18 2049 /* repeat a zero length 11-138 times (7 bits of repeat count) */ 2050 2051 local const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */ 2052 = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0}; 2053 2054 local const int extra_dbits[D_CODES] /* extra bits for each distance code */ 2055 = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13}; 2056 2057 local const int extra_blbits[BL_CODES]/* extra bits for each bit length code */ 2058 = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7}; 2059 2060 local const uch bl_order[BL_CODES] 2061 = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15}; 2062 /* The lengths of the bit length codes are sent in order of decreasing 2063 * probability, to avoid transmitting the lengths for unused bit length codes. 2064 */ 2065 2066 #define Buf_size (8 * 2*sizeof(char)) 2067 /* Number of bits used within bi_buf. (bi_buf might be implemented on 2068 * more than 16 bits on some systems.) 2069 */ 2070 2071 /* =========================================================================== 2072 * Local data. These are initialized only once. 2073 */ 2074 2075 #define DIST_CODE_LEN 512 /* see definition of array dist_code below */ 2076 2077 #if defined(GEN_TREES_H) || !defined(STDC) 2078 /* non ANSI compilers may not accept trees.h */ 2079 2080 local ct_data static_ltree[L_CODES+2]; 2081 /* The static literal tree. Since the bit lengths are imposed, there is no 2082 * need for the L_CODES extra codes used during heap construction. However 2083 * The codes 286 and 287 are needed to build a canonical tree (see _tr_init 2084 * below). 2085 */ 2086 2087 local ct_data static_dtree[D_CODES]; 2088 /* The static distance tree. (Actually a trivial tree since all codes use 2089 * 5 bits.) 2090 */ 2091 2092 uch _dist_code[DIST_CODE_LEN]; 2093 /* Distance codes. The first 256 values correspond to the distances 2094 * 3 .. 258, the last 256 values correspond to the top 8 bits of 2095 * the 15 bit distances. 2096 */ 2097 2098 uch _length_code[MAX_MATCH-MIN_MATCH+1]; 2099 /* length code for each normalized match length (0 == MIN_MATCH) */ 2100 2101 local int base_length[LENGTH_CODES]; 2102 /* First normalized length for each code (0 = MIN_MATCH) */ 2103 2104 local int base_dist[D_CODES]; 2105 /* First normalized distance for each code (0 = distance of 1) */ 2106 2107 #else 2108 /* +++ trees.h */ 2109 2110 /* header created automatically with -DGEN_TREES_H */ 2111 2112 local const ct_data static_ltree[L_CODES+2] = { 2113 {{ 12},{ 8}}, {{140},{ 8}}, {{ 76},{ 8}}, {{204},{ 8}}, {{ 44},{ 8}}, 2114 {{172},{ 8}}, {{108},{ 8}}, {{236},{ 8}}, {{ 28},{ 8}}, {{156},{ 8}}, 2115 {{ 92},{ 8}}, {{220},{ 8}}, {{ 60},{ 8}}, {{188},{ 8}}, {{124},{ 8}}, 2116 {{252},{ 8}}, {{ 2},{ 8}}, {{130},{ 8}}, {{ 66},{ 8}}, {{194},{ 8}}, 2117 {{ 34},{ 8}}, {{162},{ 8}}, {{ 98},{ 8}}, {{226},{ 8}}, {{ 18},{ 8}}, 2118 {{146},{ 8}}, {{ 82},{ 8}}, {{210},{ 8}}, {{ 50},{ 8}}, {{178},{ 8}}, 2119 {{114},{ 8}}, {{242},{ 8}}, {{ 10},{ 8}}, {{138},{ 8}}, {{ 74},{ 8}}, 2120 {{202},{ 8}}, {{ 42},{ 8}}, {{170},{ 8}}, {{106},{ 8}}, {{234},{ 8}}, 2121 {{ 26},{ 8}}, {{154},{ 8}}, {{ 90},{ 8}}, {{218},{ 8}}, {{ 58},{ 8}}, 2122 {{186},{ 8}}, {{122},{ 8}}, {{250},{ 8}}, {{ 6},{ 8}}, {{134},{ 8}}, 2123 {{ 70},{ 8}}, {{198},{ 8}}, {{ 38},{ 8}}, {{166},{ 8}}, {{102},{ 8}}, 2124 {{230},{ 8}}, {{ 22},{ 8}}, {{150},{ 8}}, {{ 86},{ 8}}, {{214},{ 8}}, 2125 {{ 54},{ 8}}, {{182},{ 8}}, {{118},{ 8}}, {{246},{ 8}}, {{ 14},{ 8}}, 2126 {{142},{ 8}}, {{ 78},{ 8}}, {{206},{ 8}}, {{ 46},{ 8}}, {{174},{ 8}}, 2127 {{110},{ 8}}, {{238},{ 8}}, {{ 30},{ 8}}, {{158},{ 8}}, {{ 94},{ 8}}, 2128 {{222},{ 8}}, {{ 62},{ 8}}, {{190},{ 8}}, {{126},{ 8}}, {{254},{ 8}}, 2129 {{ 1},{ 8}}, {{129},{ 8}}, {{ 65},{ 8}}, {{193},{ 8}}, {{ 33},{ 8}}, 2130 {{161},{ 8}}, {{ 97},{ 8}}, {{225},{ 8}}, {{ 17},{ 8}}, {{145},{ 8}}, 2131 {{ 81},{ 8}}, {{209},{ 8}}, {{ 49},{ 8}}, {{177},{ 8}}, {{113},{ 8}}, 2132 {{241},{ 8}}, {{ 9},{ 8}}, {{137},{ 8}}, {{ 73},{ 8}}, {{201},{ 8}}, 2133 {{ 41},{ 8}}, {{169},{ 8}}, {{105},{ 8}}, {{233},{ 8}}, {{ 25},{ 8}}, 2134 {{153},{ 8}}, {{ 89},{ 8}}, {{217},{ 8}}, {{ 57},{ 8}}, {{185},{ 8}}, 2135 {{121},{ 8}}, {{249},{ 8}}, {{ 5},{ 8}}, {{133},{ 8}}, {{ 69},{ 8}}, 2136 {{197},{ 8}}, {{ 37},{ 8}}, {{165},{ 8}}, {{101},{ 8}}, {{229},{ 8}}, 2137 {{ 21},{ 8}}, {{149},{ 8}}, {{ 85},{ 8}}, {{213},{ 8}}, {{ 53},{ 8}}, 2138 {{181},{ 8}}, {{117},{ 8}}, {{245},{ 8}}, {{ 13},{ 8}}, {{141},{ 8}}, 2139 {{ 77},{ 8}}, {{205},{ 8}}, {{ 45},{ 8}}, {{173},{ 8}}, {{109},{ 8}}, 2140 {{237},{ 8}}, {{ 29},{ 8}}, {{157},{ 8}}, {{ 93},{ 8}}, {{221},{ 8}}, 2141 {{ 61},{ 8}}, {{189},{ 8}}, {{125},{ 8}}, {{253},{ 8}}, {{ 19},{ 9}}, 2142 {{275},{ 9}}, {{147},{ 9}}, {{403},{ 9}}, {{ 83},{ 9}}, {{339},{ 9}}, 2143 {{211},{ 9}}, {{467},{ 9}}, {{ 51},{ 9}}, {{307},{ 9}}, {{179},{ 9}}, 2144 {{435},{ 9}}, {{115},{ 9}}, {{371},{ 9}}, {{243},{ 9}}, {{499},{ 9}}, 2145 {{ 11},{ 9}}, {{267},{ 9}}, {{139},{ 9}}, {{395},{ 9}}, {{ 75},{ 9}}, 2146 {{331},{ 9}}, {{203},{ 9}}, {{459},{ 9}}, {{ 43},{ 9}}, {{299},{ 9}}, 2147 {{171},{ 9}}, {{427},{ 9}}, {{107},{ 9}}, {{363},{ 9}}, {{235},{ 9}}, 2148 {{491},{ 9}}, {{ 27},{ 9}}, {{283},{ 9}}, {{155},{ 9}}, {{411},{ 9}}, 2149 {{ 91},{ 9}}, {{347},{ 9}}, {{219},{ 9}}, {{475},{ 9}}, {{ 59},{ 9}}, 2150 {{315},{ 9}}, {{187},{ 9}}, {{443},{ 9}}, {{123},{ 9}}, {{379},{ 9}}, 2151 {{251},{ 9}}, {{507},{ 9}}, {{ 7},{ 9}}, {{263},{ 9}}, {{135},{ 9}}, 2152 {{391},{ 9}}, {{ 71},{ 9}}, {{327},{ 9}}, {{199},{ 9}}, {{455},{ 9}}, 2153 {{ 39},{ 9}}, {{295},{ 9}}, {{167},{ 9}}, {{423},{ 9}}, {{103},{ 9}}, 2154 {{359},{ 9}}, {{231},{ 9}}, {{487},{ 9}}, {{ 23},{ 9}}, {{279},{ 9}}, 2155 {{151},{ 9}}, {{407},{ 9}}, {{ 87},{ 9}}, {{343},{ 9}}, {{215},{ 9}}, 2156 {{471},{ 9}}, {{ 55},{ 9}}, {{311},{ 9}}, {{183},{ 9}}, {{439},{ 9}}, 2157 {{119},{ 9}}, {{375},{ 9}}, {{247},{ 9}}, {{503},{ 9}}, {{ 15},{ 9}}, 2158 {{271},{ 9}}, {{143},{ 9}}, {{399},{ 9}}, {{ 79},{ 9}}, {{335},{ 9}}, 2159 {{207},{ 9}}, {{463},{ 9}}, {{ 47},{ 9}}, {{303},{ 9}}, {{175},{ 9}}, 2160 {{431},{ 9}}, {{111},{ 9}}, {{367},{ 9}}, {{239},{ 9}}, {{495},{ 9}}, 2161 {{ 31},{ 9}}, {{287},{ 9}}, {{159},{ 9}}, {{415},{ 9}}, {{ 95},{ 9}}, 2162 {{351},{ 9}}, {{223},{ 9}}, {{479},{ 9}}, {{ 63},{ 9}}, {{319},{ 9}}, 2163 {{191},{ 9}}, {{447},{ 9}}, {{127},{ 9}}, {{383},{ 9}}, {{255},{ 9}}, 2164 {{511},{ 9}}, {{ 0},{ 7}}, {{ 64},{ 7}}, {{ 32},{ 7}}, {{ 96},{ 7}}, 2165 {{ 16},{ 7}}, {{ 80},{ 7}}, {{ 48},{ 7}}, {{112},{ 7}}, {{ 8},{ 7}}, 2166 {{ 72},{ 7}}, {{ 40},{ 7}}, {{104},{ 7}}, {{ 24},{ 7}}, {{ 88},{ 7}}, 2167 {{ 56},{ 7}}, {{120},{ 7}}, {{ 4},{ 7}}, {{ 68},{ 7}}, {{ 36},{ 7}}, 2168 {{100},{ 7}}, {{ 20},{ 7}}, {{ 84},{ 7}}, {{ 52},{ 7}}, {{116},{ 7}}, 2169 {{ 3},{ 8}}, {{131},{ 8}}, {{ 67},{ 8}}, {{195},{ 8}}, {{ 35},{ 8}}, 2170 {{163},{ 8}}, {{ 99},{ 8}}, {{227},{ 8}} 2171 }; 2172 2173 local const ct_data static_dtree[D_CODES] = { 2174 {{ 0},{ 5}}, {{16},{ 5}}, {{ 8},{ 5}}, {{24},{ 5}}, {{ 4},{ 5}}, 2175 {{20},{ 5}}, {{12},{ 5}}, {{28},{ 5}}, {{ 2},{ 5}}, {{18},{ 5}}, 2176 {{10},{ 5}}, {{26},{ 5}}, {{ 6},{ 5}}, {{22},{ 5}}, {{14},{ 5}}, 2177 {{30},{ 5}}, {{ 1},{ 5}}, {{17},{ 5}}, {{ 9},{ 5}}, {{25},{ 5}}, 2178 {{ 5},{ 5}}, {{21},{ 5}}, {{13},{ 5}}, {{29},{ 5}}, {{ 3},{ 5}}, 2179 {{19},{ 5}}, {{11},{ 5}}, {{27},{ 5}}, {{ 7},{ 5}}, {{23},{ 5}} 2180 }; 2181 2182 const uch _dist_code[DIST_CODE_LEN] = { 2183 0, 1, 2, 3, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8, 2184 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10, 2185 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 2186 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 2187 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13, 2188 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 2189 13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 2190 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 2191 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 2192 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 15, 2193 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 2194 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 2195 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 0, 0, 16, 17, 2196 18, 18, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 22, 2197 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 2198 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 2199 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 2200 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 2201 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 2202 27, 27, 27, 27, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 2203 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 2204 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 2205 28, 28, 28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 2206 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 2207 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 2208 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29 2209 }; 2210 2211 const uch _length_code[MAX_MATCH-MIN_MATCH+1]= { 2212 0, 1, 2, 3, 4, 5, 6, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 12, 12, 2213 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16, 2214 17, 17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19, 2215 19, 19, 19, 19, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 2216 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22, 2217 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 23, 2218 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 2219 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 2220 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 2221 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26, 26, 2222 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 2223 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 2224 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28 2225 }; 2226 2227 local const int base_length[LENGTH_CODES] = { 2228 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56, 2229 64, 80, 96, 112, 128, 160, 192, 224, 0 2230 }; 2231 2232 local const int base_dist[D_CODES] = { 2233 0, 1, 2, 3, 4, 6, 8, 12, 16, 24, 2234 32, 48, 64, 96, 128, 192, 256, 384, 512, 768, 2235 1024, 1536, 2048, 3072, 4096, 6144, 8192, 12288, 16384, 24576 2236 }; 2237 /* --- trees.h */ 2238 2239 #endif /* GEN_TREES_H */ 2240 2241 struct static_tree_desc_s { 2242 const ct_data *static_tree; /* static tree or NULL */ 2243 const intf *extra_bits; /* extra bits for each code or NULL */ 2244 int extra_base; /* base index for extra_bits */ 2245 int elems; /* max number of elements in the tree */ 2246 int max_length; /* max bit length for the codes */ 2247 }; 2248 2249 local static_tree_desc static_l_desc = 2250 {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS}; 2251 2252 local static_tree_desc static_d_desc = 2253 {static_dtree, extra_dbits, 0, D_CODES, MAX_BITS}; 2254 2255 local static_tree_desc static_bl_desc = 2256 {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS}; 2257 2258 /* =========================================================================== 2259 * Local (static) routines in this file. 2260 */ 2261 2262 local void tr_static_init __P((void)); 2263 local void init_block __P((deflate_state *s)); 2264 local void pqdownheap __P((deflate_state *s, ct_data *tree, int k)); 2265 local void gen_bitlen __P((deflate_state *s, tree_desc *desc)); 2266 local void gen_codes __P((ct_data *tree, int max_code, ushf *bl_count)); 2267 local void build_tree __P((deflate_state *s, tree_desc *desc)); 2268 local void scan_tree __P((deflate_state *s, ct_data *tree, int max_code)); 2269 local void send_tree __P((deflate_state *s, ct_data *tree, int max_code)); 2270 local int build_bl_tree __P((deflate_state *s)); 2271 local void send_all_trees __P((deflate_state *s, int lcodes, int dcodes, 2272 int blcodes)); 2273 local void compress_block __P((deflate_state *s, ct_data *ltree, 2274 ct_data *dtree)); 2275 local void set_data_type __P((deflate_state *s)); 2276 local unsigned bi_reverse __P((unsigned value, int length)); 2277 local void bi_windup __P((deflate_state *s)); 2278 local void bi_flush __P((deflate_state *s)); 2279 local void copy_block __P((deflate_state *s, charf *buf, unsigned len, 2280 int header)); 2281 2282 #ifdef GEN_TREES_H 2283 local void gen_trees_header __P((void)); 2284 #endif 2285 2286 #ifndef DEBUG_ZLIB 2287 # define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len) 2288 /* Send a code of the given tree. c and tree must not have side effects */ 2289 2290 #else /* DEBUG_ZLIB */ 2291 # define send_code(s, c, tree) \ 2292 { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \ 2293 send_bits(s, tree[c].Code, tree[c].Len); } 2294 #endif 2295 2296 /* =========================================================================== 2297 * Output a short LSB first on the stream. 2298 * IN assertion: there is enough room in pendingBuf. 2299 */ 2300 #define put_short(s, w) { \ 2301 put_byte(s, (uch)((w) & 0xff)); \ 2302 put_byte(s, (uch)((ush)(w) >> 8)); \ 2303 } 2304 2305 /* =========================================================================== 2306 * Send a value on a given number of bits. 2307 * IN assertion: length <= 16 and value fits in length bits. 2308 */ 2309 #ifdef DEBUG_ZLIB 2310 local void send_bits __P((deflate_state *s, int value, int length)); 2311 2312 local void send_bits(s, value, length) 2313 deflate_state *s; 2314 int value; /* value to send */ 2315 int length; /* number of bits */ 2316 { 2317 Tracevv((stderr," l %2d v %4x ", length, value)); 2318 Assert(length > 0 && length <= 15, "invalid length"); 2319 s->bits_sent += (ulg)length; 2320 2321 /* If not enough room in bi_buf, use (valid) bits from bi_buf and 2322 * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid)) 2323 * unused bits in value. 2324 */ 2325 if (s->bi_valid > (int)Buf_size - length) { 2326 s->bi_buf |= (value << s->bi_valid); 2327 put_short(s, s->bi_buf); 2328 s->bi_buf = (ush)value >> (Buf_size - s->bi_valid); 2329 s->bi_valid += length - Buf_size; 2330 } else { 2331 s->bi_buf |= value << s->bi_valid; 2332 s->bi_valid += length; 2333 } 2334 } 2335 #else /* !DEBUG_ZLIB */ 2336 2337 #define send_bits(s, value, length) \ 2338 { int len = length;\ 2339 if (s->bi_valid > (int)Buf_size - len) {\ 2340 int val = value;\ 2341 s->bi_buf |= (val << s->bi_valid);\ 2342 put_short(s, s->bi_buf);\ 2343 s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\ 2344 s->bi_valid += len - Buf_size;\ 2345 } else {\ 2346 s->bi_buf |= (value) << s->bi_valid;\ 2347 s->bi_valid += len;\ 2348 }\ 2349 } 2350 #endif /* DEBUG_ZLIB */ 2351 2352 2353 /* =========================================================================== 2354 * Initialize the various 'constant' tables. 2355 */ 2356 local void tr_static_init() 2357 { 2358 #if defined(GEN_TREES_H) || !defined(STDC) 2359 static int static_init_done = 0; 2360 int n; /* iterates over tree elements */ 2361 int bits; /* bit counter */ 2362 int length; /* length value */ 2363 int code; /* code value */ 2364 int dist; /* distance index */ 2365 ush bl_count[MAX_BITS+1]; 2366 /* number of codes at each bit length for an optimal tree */ 2367 2368 if (static_init_done) return; 2369 2370 /* For some embedded targets, global variables are not initialized: */ 2371 static_l_desc.static_tree = static_ltree; 2372 static_l_desc.extra_bits = extra_lbits; 2373 static_d_desc.static_tree = static_dtree; 2374 static_d_desc.extra_bits = extra_dbits; 2375 static_bl_desc.extra_bits = extra_blbits; 2376 2377 /* Initialize the mapping length (0..255) -> length code (0..28) */ 2378 length = 0; 2379 for (code = 0; code < LENGTH_CODES-1; code++) { 2380 base_length[code] = length; 2381 for (n = 0; n < (1<<extra_lbits[code]); n++) { 2382 _length_code[length++] = (uch)code; 2383 } 2384 } 2385 Assert (length == 256, "tr_static_init: length != 256"); 2386 /* Note that the length 255 (match length 258) can be represented 2387 * in two different ways: code 284 + 5 bits or code 285, so we 2388 * overwrite length_code[255] to use the best encoding: 2389 */ 2390 _length_code[length-1] = (uch)code; 2391 2392 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ 2393 dist = 0; 2394 for (code = 0 ; code < 16; code++) { 2395 base_dist[code] = dist; 2396 for (n = 0; n < (1<<extra_dbits[code]); n++) { 2397 _dist_code[dist++] = (uch)code; 2398 } 2399 } 2400 Assert (dist == 256, "tr_static_init: dist != 256"); 2401 dist >>= 7; /* from now on, all distances are divided by 128 */ 2402 for ( ; code < D_CODES; code++) { 2403 base_dist[code] = dist << 7; 2404 for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) { 2405 _dist_code[256 + dist++] = (uch)code; 2406 } 2407 } 2408 Assert (dist == 256, "tr_static_init: 256+dist != 512"); 2409 2410 /* Construct the codes of the static literal tree */ 2411 for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0; 2412 n = 0; 2413 while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++; 2414 while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++; 2415 while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++; 2416 while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++; 2417 /* Codes 286 and 287 do not exist, but we must include them in the 2418 * tree construction to get a canonical Huffman tree (longest code 2419 * all ones) 2420 */ 2421 gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count); 2422 2423 /* The static distance tree is trivial: */ 2424 for (n = 0; n < D_CODES; n++) { 2425 static_dtree[n].Len = 5; 2426 static_dtree[n].Code = bi_reverse((unsigned)n, 5); 2427 } 2428 static_init_done = 1; 2429 2430 # ifdef GEN_TREES_H 2431 gen_trees_header(); 2432 # endif 2433 #endif /* defined(GEN_TREES_H) || !defined(STDC) */ 2434 } 2435 2436 /* =========================================================================== 2437 * Genererate the file trees.h describing the static trees. 2438 */ 2439 #ifdef GEN_TREES_H 2440 # ifndef DEBUG_ZLIB 2441 # include <stdio.h> 2442 # endif 2443 2444 # define SEPARATOR(i, last, width) \ 2445 ((i) == (last)? "\n};\n\n" : \ 2446 ((i) % (width) == (width)-1 ? ",\n" : ", ")) 2447 2448 void gen_trees_header() 2449 { 2450 FILE *header = fopen("trees.h", "w"); 2451 int i; 2452 2453 Assert (header != NULL, "Can't open trees.h"); 2454 fprintf(header, 2455 "/* header created automatically with -DGEN_TREES_H */\n\n"); 2456 2457 fprintf(header, "local const ct_data static_ltree[L_CODES+2] = {\n"); 2458 for (i = 0; i < L_CODES+2; i++) { 2459 fprintf(header, "{{%3u},{%3u}}%s", static_ltree[i].Code, 2460 static_ltree[i].Len, SEPARATOR(i, L_CODES+1, 5)); 2461 } 2462 2463 fprintf(header, "local const ct_data static_dtree[D_CODES] = {\n"); 2464 for (i = 0; i < D_CODES; i++) { 2465 fprintf(header, "{{%2u},{%2u}}%s", static_dtree[i].Code, 2466 static_dtree[i].Len, SEPARATOR(i, D_CODES-1, 5)); 2467 } 2468 2469 fprintf(header, "const uch _dist_code[DIST_CODE_LEN] = {\n"); 2470 for (i = 0; i < DIST_CODE_LEN; i++) { 2471 fprintf(header, "%2u%s", _dist_code[i], 2472 SEPARATOR(i, DIST_CODE_LEN-1, 20)); 2473 } 2474 2475 fprintf(header, "const uch _length_code[MAX_MATCH-MIN_MATCH+1]= {\n"); 2476 for (i = 0; i < MAX_MATCH-MIN_MATCH+1; i++) { 2477 fprintf(header, "%2u%s", _length_code[i], 2478 SEPARATOR(i, MAX_MATCH-MIN_MATCH, 20)); 2479 } 2480 2481 fprintf(header, "local const int base_length[LENGTH_CODES] = {\n"); 2482 for (i = 0; i < LENGTH_CODES; i++) { 2483 fprintf(header, "%1u%s", base_length[i], 2484 SEPARATOR(i, LENGTH_CODES-1, 20)); 2485 } 2486 2487 fprintf(header, "local const int base_dist[D_CODES] = {\n"); 2488 for (i = 0; i < D_CODES; i++) { 2489 fprintf(header, "%5u%s", base_dist[i], 2490 SEPARATOR(i, D_CODES-1, 10)); 2491 } 2492 2493 fclose(header); 2494 } 2495 #endif /* GEN_TREES_H */ 2496 2497 /* =========================================================================== 2498 * Initialize the tree data structures for a new zlib stream. 2499 */ 2500 void _tr_init(s) 2501 deflate_state *s; 2502 { 2503 tr_static_init(); 2504 2505 s->l_desc.dyn_tree = s->dyn_ltree; 2506 s->l_desc.stat_desc = &static_l_desc; 2507 2508 s->d_desc.dyn_tree = s->dyn_dtree; 2509 s->d_desc.stat_desc = &static_d_desc; 2510 2511 s->bl_desc.dyn_tree = s->bl_tree; 2512 s->bl_desc.stat_desc = &static_bl_desc; 2513 2514 s->bi_buf = 0; 2515 s->bi_valid = 0; 2516 s->last_eob_len = 8; /* enough lookahead for inflate */ 2517 #ifdef DEBUG_ZLIB 2518 s->compressed_len = 0L; 2519 s->bits_sent = 0L; 2520 #endif 2521 2522 /* Initialize the first block of the first file: */ 2523 init_block(s); 2524 } 2525 2526 /* =========================================================================== 2527 * Initialize a new block. 2528 */ 2529 local void init_block(s) 2530 deflate_state *s; 2531 { 2532 int n; /* iterates over tree elements */ 2533 2534 /* Initialize the trees. */ 2535 for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0; 2536 for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0; 2537 for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0; 2538 2539 s->dyn_ltree[END_BLOCK].Freq = 1; 2540 s->opt_len = s->static_len = 0L; 2541 s->last_lit = s->matches = 0; 2542 } 2543 2544 #define SMALLEST 1 2545 /* Index within the heap array of least frequent node in the Huffman tree */ 2546 2547 2548 /* =========================================================================== 2549 * Remove the smallest element from the heap and recreate the heap with 2550 * one less element. Updates heap and heap_len. 2551 */ 2552 #define pqremove(s, tree, top) \ 2553 {\ 2554 top = s->heap[SMALLEST]; \ 2555 s->heap[SMALLEST] = s->heap[s->heap_len--]; \ 2556 pqdownheap(s, tree, SMALLEST); \ 2557 } 2558 2559 /* =========================================================================== 2560 * Compares to subtrees, using the tree depth as tie breaker when 2561 * the subtrees have equal frequency. This minimizes the worst case length. 2562 */ 2563 #define smaller(tree, n, m, depth) \ 2564 (tree[n].Freq < tree[m].Freq || \ 2565 (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m])) 2566 2567 /* =========================================================================== 2568 * Restore the heap property by moving down the tree starting at node k, 2569 * exchanging a node with the smallest of its two sons if necessary, stopping 2570 * when the heap property is re-established (each father smaller than its 2571 * two sons). 2572 */ 2573 local void pqdownheap(s, tree, k) 2574 deflate_state *s; 2575 ct_data *tree; /* the tree to restore */ 2576 int k; /* node to move down */ 2577 { 2578 int v = s->heap[k]; 2579 int j = k << 1; /* left son of k */ 2580 while (j <= s->heap_len) { 2581 /* Set j to the smallest of the two sons: */ 2582 if (j < s->heap_len && 2583 smaller(tree, s->heap[j+1], s->heap[j], s->depth)) { 2584 j++; 2585 } 2586 /* Exit if v is smaller than both sons */ 2587 if (smaller(tree, v, s->heap[j], s->depth)) break; 2588 2589 /* Exchange v with the smallest son */ 2590 s->heap[k] = s->heap[j]; k = j; 2591 2592 /* And continue down the tree, setting j to the left son of k */ 2593 j <<= 1; 2594 } 2595 s->heap[k] = v; 2596 } 2597 2598 /* =========================================================================== 2599 * Compute the optimal bit lengths for a tree and update the total bit length 2600 * for the current block. 2601 * IN assertion: the fields freq and dad are set, heap[heap_max] and 2602 * above are the tree nodes sorted by increasing frequency. 2603 * OUT assertions: the field len is set to the optimal bit length, the 2604 * array bl_count contains the frequencies for each bit length. 2605 * The length opt_len is updated; static_len is also updated if stree is 2606 * not null. 2607 */ 2608 local void gen_bitlen(s, desc) 2609 deflate_state *s; 2610 tree_desc *desc; /* the tree descriptor */ 2611 { 2612 ct_data *tree = desc->dyn_tree; 2613 int max_code = desc->max_code; 2614 const ct_data *stree = desc->stat_desc->static_tree; 2615 const intf *extra = desc->stat_desc->extra_bits; 2616 int base = desc->stat_desc->extra_base; 2617 int max_length = desc->stat_desc->max_length; 2618 int h; /* heap index */ 2619 int n, m; /* iterate over the tree elements */ 2620 int bits; /* bit length */ 2621 int xbits; /* extra bits */ 2622 ush f; /* frequency */ 2623 int overflow = 0; /* number of elements with bit length too large */ 2624 2625 for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0; 2626 2627 /* In a first pass, compute the optimal bit lengths (which may 2628 * overflow in the case of the bit length tree). 2629 */ 2630 tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ 2631 2632 for (h = s->heap_max+1; h < HEAP_SIZE; h++) { 2633 n = s->heap[h]; 2634 bits = tree[tree[n].Dad].Len + 1; 2635 if (bits > max_length) bits = max_length, overflow++; 2636 tree[n].Len = (ush)bits; 2637 /* We overwrite tree[n].Dad which is no longer needed */ 2638 2639 if (n > max_code) continue; /* not a leaf node */ 2640 2641 s->bl_count[bits]++; 2642 xbits = 0; 2643 if (n >= base) xbits = extra[n-base]; 2644 f = tree[n].Freq; 2645 s->opt_len += (ulg)f * (bits + xbits); 2646 if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits); 2647 } 2648 if (overflow == 0) return; 2649 2650 Trace((stderr,"\nbit length overflow\n")); 2651 /* This happens for example on obj2 and pic of the Calgary corpus */ 2652 2653 /* Find the first bit length which could increase: */ 2654 do { 2655 bits = max_length-1; 2656 while (s->bl_count[bits] == 0) bits--; 2657 s->bl_count[bits]--; /* move one leaf down the tree */ 2658 s->bl_count[bits+1] += 2; /* move one overflow item as its brother */ 2659 s->bl_count[max_length]--; 2660 /* The brother of the overflow item also moves one step up, 2661 * but this does not affect bl_count[max_length] 2662 */ 2663 overflow -= 2; 2664 } while (overflow > 0); 2665 2666 /* Now recompute all bit lengths, scanning in increasing frequency. 2667 * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all 2668 * lengths instead of fixing only the wrong ones. This idea is taken 2669 * from 'ar' written by Haruhiko Okumura.) 2670 */ 2671 for (bits = max_length; bits != 0; bits--) { 2672 n = s->bl_count[bits]; 2673 while (n != 0) { 2674 m = s->heap[--h]; 2675 if (m > max_code) continue; 2676 if (tree[m].Len != (unsigned) bits) { 2677 Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); 2678 s->opt_len += ((long)bits - (long)tree[m].Len) 2679 *(long)tree[m].Freq; 2680 tree[m].Len = (ush)bits; 2681 } 2682 n--; 2683 } 2684 } 2685 } 2686 2687 /* =========================================================================== 2688 * Generate the codes for a given tree and bit counts (which need not be 2689 * optimal). 2690 * IN assertion: the array bl_count contains the bit length statistics for 2691 * the given tree and the field len is set for all tree elements. 2692 * OUT assertion: the field code is set for all tree elements of non 2693 * zero code length. 2694 */ 2695 local void gen_codes (tree, max_code, bl_count) 2696 ct_data *tree; /* the tree to decorate */ 2697 int max_code; /* largest code with non zero frequency */ 2698 ushf *bl_count; /* number of codes at each bit length */ 2699 { 2700 ush next_code[MAX_BITS+1]; /* next code value for each bit length */ 2701 ush code = 0; /* running code value */ 2702 int bits; /* bit index */ 2703 int n; /* code index */ 2704 2705 /* The distribution counts are first used to generate the code values 2706 * without bit reversal. 2707 */ 2708 for (bits = 1; bits <= MAX_BITS; bits++) { 2709 next_code[bits] = code = (code + bl_count[bits-1]) << 1; 2710 } 2711 /* Check that the bit counts in bl_count are consistent. The last code 2712 * must be all ones. 2713 */ 2714 Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1, 2715 "inconsistent bit counts"); 2716 Tracev((stderr,"\ngen_codes: max_code %d ", max_code)); 2717 2718 for (n = 0; n <= max_code; n++) { 2719 int len = tree[n].Len; 2720 if (len == 0) continue; 2721 /* Now reverse the bits */ 2722 tree[n].Code = bi_reverse(next_code[len]++, len); 2723 2724 Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ", 2725 n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1)); 2726 } 2727 } 2728 2729 /* =========================================================================== 2730 * Construct one Huffman tree and assigns the code bit strings and lengths. 2731 * Update the total bit length for the current block. 2732 * IN assertion: the field freq is set for all tree elements. 2733 * OUT assertions: the fields len and code are set to the optimal bit length 2734 * and corresponding code. The length opt_len is updated; static_len is 2735 * also updated if stree is not null. The field max_code is set. 2736 */ 2737 local void build_tree(s, desc) 2738 deflate_state *s; 2739 tree_desc *desc; /* the tree descriptor */ 2740 { 2741 ct_data *tree = desc->dyn_tree; 2742 const ct_data *stree = desc->stat_desc->static_tree; 2743 int elems = desc->stat_desc->elems; 2744 int n, m; /* iterate over heap elements */ 2745 int max_code = -1; /* largest code with non zero frequency */ 2746 int node; /* new node being created */ 2747 2748 /* Construct the initial heap, with least frequent element in 2749 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. 2750 * heap[0] is not used. 2751 */ 2752 s->heap_len = 0, s->heap_max = HEAP_SIZE; 2753 2754 for (n = 0; n < elems; n++) { 2755 if (tree[n].Freq != 0) { 2756 s->heap[++(s->heap_len)] = max_code = n; 2757 s->depth[n] = 0; 2758 } else { 2759 tree[n].Len = 0; 2760 } 2761 } 2762 2763 /* The pkzip format requires that at least one distance code exists, 2764 * and that at least one bit should be sent even if there is only one 2765 * possible code. So to avoid special checks later on we force at least 2766 * two codes of non zero frequency. 2767 */ 2768 while (s->heap_len < 2) { 2769 node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0); 2770 tree[node].Freq = 1; 2771 s->depth[node] = 0; 2772 s->opt_len--; if (stree) s->static_len -= stree[node].Len; 2773 /* node is 0 or 1 so it does not have extra bits */ 2774 } 2775 desc->max_code = max_code; 2776 2777 /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, 2778 * establish sub-heaps of increasing lengths: 2779 */ 2780 for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n); 2781 2782 /* Construct the Huffman tree by repeatedly combining the least two 2783 * frequent nodes. 2784 */ 2785 node = elems; /* next internal node of the tree */ 2786 do { 2787 pqremove(s, tree, n); /* n = node of least frequency */ 2788 m = s->heap[SMALLEST]; /* m = node of next least frequency */ 2789 2790 s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */ 2791 s->heap[--(s->heap_max)] = m; 2792 2793 /* Create a new node father of n and m */ 2794 tree[node].Freq = tree[n].Freq + tree[m].Freq; 2795 s->depth[node] = (uch) (MAX(s->depth[n], s->depth[m]) + 1); 2796 tree[n].Dad = tree[m].Dad = (ush)node; 2797 #ifdef DUMP_BL_TREE 2798 if (tree == s->bl_tree) { 2799 fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)", 2800 node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq); 2801 } 2802 #endif 2803 /* and insert the new node in the heap */ 2804 s->heap[SMALLEST] = node++; 2805 pqdownheap(s, tree, SMALLEST); 2806 2807 } while (s->heap_len >= 2); 2808 2809 s->heap[--(s->heap_max)] = s->heap[SMALLEST]; 2810 2811 /* At this point, the fields freq and dad are set. We can now 2812 * generate the bit lengths. 2813 */ 2814 gen_bitlen(s, (tree_desc *)desc); 2815 2816 /* The field len is now set, we can generate the bit codes */ 2817 gen_codes ((ct_data *)tree, max_code, s->bl_count); 2818 } 2819 2820 /* =========================================================================== 2821 * Scan a literal or distance tree to determine the frequencies of the codes 2822 * in the bit length tree. 2823 */ 2824 local void scan_tree (s, tree, max_code) 2825 deflate_state *s; 2826 ct_data *tree; /* the tree to be scanned */ 2827 int max_code; /* and its largest code of non zero frequency */ 2828 { 2829 int n; /* iterates over all tree elements */ 2830 int prevlen = -1; /* last emitted length */ 2831 int curlen; /* length of current code */ 2832 int nextlen = tree[0].Len; /* length of next code */ 2833 int count = 0; /* repeat count of the current code */ 2834 int max_count = 7; /* max repeat count */ 2835 int min_count = 4; /* min repeat count */ 2836 2837 if (nextlen == 0) max_count = 138, min_count = 3; 2838 tree[max_code+1].Len = (ush)0xffff; /* guard */ 2839 2840 for (n = 0; n <= max_code; n++) { 2841 curlen = nextlen; nextlen = tree[n+1].Len; 2842 if (++count < max_count && curlen == nextlen) { 2843 continue; 2844 } else if (count < min_count) { 2845 s->bl_tree[curlen].Freq += count; 2846 } else if (curlen != 0) { 2847 if (curlen != prevlen) s->bl_tree[curlen].Freq++; 2848 s->bl_tree[REP_3_6].Freq++; 2849 } else if (count <= 10) { 2850 s->bl_tree[REPZ_3_10].Freq++; 2851 } else { 2852 s->bl_tree[REPZ_11_138].Freq++; 2853 } 2854 count = 0; prevlen = curlen; 2855 if (nextlen == 0) { 2856 max_count = 138, min_count = 3; 2857 } else if (curlen == nextlen) { 2858 max_count = 6, min_count = 3; 2859 } else { 2860 max_count = 7, min_count = 4; 2861 } 2862 } 2863 } 2864 2865 /* =========================================================================== 2866 * Send a literal or distance tree in compressed form, using the codes in 2867 * bl_tree. 2868 */ 2869 local void send_tree (s, tree, max_code) 2870 deflate_state *s; 2871 ct_data *tree; /* the tree to be scanned */ 2872 int max_code; /* and its largest code of non zero frequency */ 2873 { 2874 int n; /* iterates over all tree elements */ 2875 int prevlen = -1; /* last emitted length */ 2876 int curlen; /* length of current code */ 2877 int nextlen = tree[0].Len; /* length of next code */ 2878 int count = 0; /* repeat count of the current code */ 2879 int max_count = 7; /* max repeat count */ 2880 int min_count = 4; /* min repeat count */ 2881 2882 /* tree[max_code+1].Len = -1; */ /* guard already set */ 2883 if (nextlen == 0) max_count = 138, min_count = 3; 2884 2885 for (n = 0; n <= max_code; n++) { 2886 curlen = nextlen; nextlen = tree[n+1].Len; 2887 if (++count < max_count && curlen == nextlen) { 2888 continue; 2889 } else if (count < min_count) { 2890 do { send_code(s, curlen, s->bl_tree); } while (--count != 0); 2891 2892 } else if (curlen != 0) { 2893 if (curlen != prevlen) { 2894 send_code(s, curlen, s->bl_tree); count--; 2895 } 2896 Assert(count >= 3 && count <= 6, " 3_6?"); 2897 send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2); 2898 2899 } else if (count <= 10) { 2900 send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3); 2901 2902 } else { 2903 send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7); 2904 } 2905 count = 0; prevlen = curlen; 2906 if (nextlen == 0) { 2907 max_count = 138, min_count = 3; 2908 } else if (curlen == nextlen) { 2909 max_count = 6, min_count = 3; 2910 } else { 2911 max_count = 7, min_count = 4; 2912 } 2913 } 2914 } 2915 2916 /* =========================================================================== 2917 * Construct the Huffman tree for the bit lengths and return the index in 2918 * bl_order of the last bit length code to send. 2919 */ 2920 local int build_bl_tree(s) 2921 deflate_state *s; 2922 { 2923 int max_blindex; /* index of last bit length code of non zero freq */ 2924 2925 /* Determine the bit length frequencies for literal and distance trees */ 2926 scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code); 2927 scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code); 2928 2929 /* Build the bit length tree: */ 2930 build_tree(s, (tree_desc *)(&(s->bl_desc))); 2931 /* opt_len now includes the length of the tree representations, except 2932 * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. 2933 */ 2934 2935 /* Determine the number of bit length codes to send. The pkzip format 2936 * requires that at least 4 bit length codes be sent. (appnote.txt says 2937 * 3 but the actual value used is 4.) 2938 */ 2939 for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) { 2940 if (s->bl_tree[bl_order[max_blindex]].Len != 0) break; 2941 } 2942 /* Update opt_len to include the bit length tree and counts */ 2943 s->opt_len += 3*(max_blindex+1) + 5+5+4; 2944 Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", 2945 s->opt_len, s->static_len)); 2946 2947 return max_blindex; 2948 } 2949 2950 /* =========================================================================== 2951 * Send the header for a block using dynamic Huffman trees: the counts, the 2952 * lengths of the bit length codes, the literal tree and the distance tree. 2953 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. 2954 */ 2955 local void send_all_trees(s, lcodes, dcodes, blcodes) 2956 deflate_state *s; 2957 int lcodes, dcodes, blcodes; /* number of codes for each tree */ 2958 { 2959 int rank; /* index in bl_order */ 2960 2961 Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); 2962 Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, 2963 "too many codes"); 2964 Tracev((stderr, "\nbl counts: ")); 2965 send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */ 2966 send_bits(s, dcodes-1, 5); 2967 send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */ 2968 for (rank = 0; rank < blcodes; rank++) { 2969 Tracev((stderr, "\nbl code %2d ", bl_order[rank])); 2970 send_bits(s, s->bl_tree[bl_order[rank]].Len, 3); 2971 } 2972 Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); 2973 2974 send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */ 2975 Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); 2976 2977 send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */ 2978 Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); 2979 } 2980 2981 /* =========================================================================== 2982 * Send a stored block 2983 */ 2984 void _tr_stored_block(s, buf, stored_len, eof) 2985 deflate_state *s; 2986 charf *buf; /* input block */ 2987 ulg stored_len; /* length of input block */ 2988 int eof; /* true if this is the last block for a file */ 2989 { 2990 send_bits(s, (STORED_BLOCK<<1)+eof, 3); /* send block type */ 2991 #ifdef DEBUG_ZLIB 2992 s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L; 2993 s->compressed_len += (stored_len + 4) << 3; 2994 #endif 2995 copy_block(s, buf, (unsigned)stored_len, 1); /* with header */ 2996 } 2997 2998 /* Send just the `stored block' type code without any length bytes or data. 2999 */ 3000 void _tr_stored_type_only(s) 3001 deflate_state *s; 3002 { 3003 send_bits(s, (STORED_BLOCK << 1), 3); 3004 bi_windup(s); 3005 #ifdef DEBUG_ZLIB 3006 s->compressed_len = (s->compressed_len + 3) & ~7L; 3007 #endif 3008 } 3009 3010 /* =========================================================================== 3011 * Send one empty static block to give enough lookahead for inflate. 3012 * This takes 10 bits, of which 7 may remain in the bit buffer. 3013 * The current inflate code requires 9 bits of lookahead. If the 3014 * last two codes for the previous block (real code plus EOB) were coded 3015 * on 5 bits or less, inflate may have only 5+3 bits of lookahead to decode 3016 * the last real code. In this case we send two empty static blocks instead 3017 * of one. (There are no problems if the previous block is stored or fixed.) 3018 * To simplify the code, we assume the worst case of last real code encoded 3019 * on one bit only. 3020 */ 3021 void _tr_align(s) 3022 deflate_state *s; 3023 { 3024 send_bits(s, STATIC_TREES<<1, 3); 3025 send_code(s, END_BLOCK, static_ltree); 3026 #ifdef DEBUG_ZLIB 3027 s->compressed_len += 10L; /* 3 for block type, 7 for EOB */ 3028 #endif 3029 bi_flush(s); 3030 /* Of the 10 bits for the empty block, we have already sent 3031 * (10 - bi_valid) bits. The lookahead for the last real code (before 3032 * the EOB of the previous block) was thus at least one plus the length 3033 * of the EOB plus what we have just sent of the empty static block. 3034 */ 3035 if (1 + s->last_eob_len + 10 - s->bi_valid < 9) { 3036 send_bits(s, STATIC_TREES<<1, 3); 3037 send_code(s, END_BLOCK, static_ltree); 3038 #ifdef DEBUG_ZLIB 3039 s->compressed_len += 10L; 3040 #endif 3041 bi_flush(s); 3042 } 3043 s->last_eob_len = 7; 3044 } 3045 3046 /* =========================================================================== 3047 * Determine the best encoding for the current block: dynamic trees, static 3048 * trees or store, and output the encoded block to the zip file. 3049 */ 3050 void _tr_flush_block(s, buf, stored_len, eof) 3051 deflate_state *s; 3052 charf *buf; /* input block, or NULL if too old */ 3053 ulg stored_len; /* length of input block */ 3054 int eof; /* true if this is the last block for a file */ 3055 { 3056 ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */ 3057 int max_blindex = 0; /* index of last bit length code of non zero freq */ 3058 3059 /* Build the Huffman trees unless a stored block is forced */ 3060 if (s->level > 0) { 3061 3062 /* Check if the file is ascii or binary */ 3063 if (s->data_type == Z_UNKNOWN) set_data_type(s); 3064 3065 /* Construct the literal and distance trees */ 3066 build_tree(s, (tree_desc *)(&(s->l_desc))); 3067 Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, 3068 s->static_len)); 3069 3070 build_tree(s, (tree_desc *)(&(s->d_desc))); 3071 Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, 3072 s->static_len)); 3073 /* At this point, opt_len and static_len are the total bit lengths of 3074 * the compressed block data, excluding the tree representations. 3075 */ 3076 3077 /* Build the bit length tree for the above two trees, and get the index 3078 * in bl_order of the last bit length code to send. 3079 */ 3080 max_blindex = build_bl_tree(s); 3081 3082 /* Determine the best encoding. Compute first the block length in bytes*/ 3083 opt_lenb = (s->opt_len+3+7)>>3; 3084 static_lenb = (s->static_len+3+7)>>3; 3085 3086 Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", 3087 opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, 3088 s->last_lit)); 3089 3090 if (static_lenb <= opt_lenb) opt_lenb = static_lenb; 3091 3092 } else { 3093 Assert(buf != (char*)0, "lost buf"); 3094 opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ 3095 } 3096 3097 #ifdef FORCE_STORED 3098 if (buf != (char*)0) { /* force stored block */ 3099 #else 3100 if (stored_len+4 <= opt_lenb && buf != (char*)0) { 3101 /* 4: two words for the lengths */ 3102 #endif 3103 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. 3104 * Otherwise we can't have processed more than WSIZE input bytes since 3105 * the last block flush, because compression would have been 3106 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to 3107 * transform a block into a stored block. 3108 */ 3109 _tr_stored_block(s, buf, stored_len, eof); 3110 3111 #ifdef FORCE_STATIC 3112 } else if (static_lenb >= 0) { /* force static trees */ 3113 #else 3114 } else if (static_lenb == opt_lenb) { 3115 #endif 3116 send_bits(s, (STATIC_TREES<<1)+eof, 3); 3117 compress_block(s, (ct_data *)static_ltree, (ct_data *)static_dtree); 3118 #ifdef DEBUG_ZLIB 3119 s->compressed_len += 3 + s->static_len; 3120 #endif 3121 } else { 3122 send_bits(s, (DYN_TREES<<1)+eof, 3); 3123 send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1, 3124 max_blindex+1); 3125 compress_block(s, (ct_data *)s->dyn_ltree, (ct_data *)s->dyn_dtree); 3126 #ifdef DEBUG_ZLIB 3127 s->compressed_len += 3 + s->opt_len; 3128 #endif 3129 } 3130 Assert (s->compressed_len == s->bits_sent, "bad compressed size"); 3131 /* The above check is made mod 2^32, for files larger than 512 MB 3132 * and uLong implemented on 32 bits. 3133 */ 3134 init_block(s); 3135 3136 if (eof) { 3137 bi_windup(s); 3138 #ifdef DEBUG_ZLIB 3139 s->compressed_len += 7; /* align on byte boundary */ 3140 #endif 3141 } 3142 Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, 3143 s->compressed_len-7*eof)); 3144 } 3145 3146 /* =========================================================================== 3147 * Save the match info and tally the frequency counts. Return true if 3148 * the current block must be flushed. 3149 */ 3150 int _tr_tally (s, dist, lc) 3151 deflate_state *s; 3152 unsigned dist; /* distance of matched string */ 3153 unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ 3154 { 3155 s->d_buf[s->last_lit] = (ush)dist; 3156 s->l_buf[s->last_lit++] = (uch)lc; 3157 if (dist == 0) { 3158 /* lc is the unmatched char */ 3159 s->dyn_ltree[lc].Freq++; 3160 } else { 3161 s->matches++; 3162 /* Here, lc is the match length - MIN_MATCH */ 3163 dist--; /* dist = match distance - 1 */ 3164 Assert((ush)dist < (ush)MAX_DIST(s) && 3165 (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && 3166 (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); 3167 3168 s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++; 3169 s->dyn_dtree[d_code(dist)].Freq++; 3170 } 3171 3172 #ifdef TRUNCATE_BLOCK 3173 /* Try to guess if it is profitable to stop the current block here */ 3174 if ((s->last_lit & 0x1fff) == 0 && s->level > 2) { 3175 /* Compute an upper bound for the compressed length */ 3176 ulg out_length = (ulg)s->last_lit*8L; 3177 ulg in_length = (ulg)((long)s->strstart - s->block_start); 3178 int dcode; 3179 for (dcode = 0; dcode < D_CODES; dcode++) { 3180 out_length += (ulg)s->dyn_dtree[dcode].Freq * 3181 (5L+extra_dbits[dcode]); 3182 } 3183 out_length >>= 3; 3184 Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", 3185 s->last_lit, in_length, out_length, 3186 100L - out_length*100L/in_length)); 3187 if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1; 3188 } 3189 #endif 3190 return (s->last_lit == s->lit_bufsize-1); 3191 /* We avoid equality with lit_bufsize because of wraparound at 64K 3192 * on 16 bit machines and because stored blocks are restricted to 3193 * 64K-1 bytes. 3194 */ 3195 } 3196 3197 /* =========================================================================== 3198 * Send the block data compressed using the given Huffman trees 3199 */ 3200 local void compress_block(s, ltree, dtree) 3201 deflate_state *s; 3202 ct_data *ltree; /* literal tree */ 3203 ct_data *dtree; /* distance tree */ 3204 { 3205 unsigned dist; /* distance of matched string */ 3206 int lc; /* match length or unmatched char (if dist == 0) */ 3207 unsigned lx = 0; /* running index in l_buf */ 3208 unsigned code; /* the code to send */ 3209 int extra; /* number of extra bits to send */ 3210 3211 if (s->last_lit != 0) do { 3212 dist = s->d_buf[lx]; 3213 lc = s->l_buf[lx++]; 3214 if (dist == 0) { 3215 send_code(s, lc, ltree); /* send a literal byte */ 3216 Tracecv(isgraph(lc), (stderr," '%c' ", lc)); 3217 } else { 3218 /* Here, lc is the match length - MIN_MATCH */ 3219 code = _length_code[lc]; 3220 send_code(s, code+LITERALS+1, ltree); /* send the length code */ 3221 extra = extra_lbits[code]; 3222 if (extra != 0) { 3223 lc -= base_length[code]; 3224 send_bits(s, lc, extra); /* send the extra length bits */ 3225 } 3226 dist--; /* dist is now the match distance - 1 */ 3227 code = d_code(dist); 3228 Assert (code < D_CODES, "bad d_code"); 3229 3230 send_code(s, code, dtree); /* send the distance code */ 3231 extra = extra_dbits[code]; 3232 if (extra != 0) { 3233 dist -= base_dist[code]; 3234 send_bits(s, dist, extra); /* send the extra distance bits */ 3235 } 3236 } /* literal or match pair ? */ 3237 3238 /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ 3239 Assert(s->pending < s->lit_bufsize + 2*lx, "pendingBuf overflow"); 3240 3241 } while (lx < s->last_lit); 3242 3243 send_code(s, END_BLOCK, ltree); 3244 s->last_eob_len = ltree[END_BLOCK].Len; 3245 } 3246 3247 /* =========================================================================== 3248 * Set the data type to ASCII or BINARY, using a crude approximation: 3249 * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise. 3250 * IN assertion: the fields freq of dyn_ltree are set and the total of all 3251 * frequencies does not exceed 64K (to fit in an int on 16 bit machines). 3252 */ 3253 local void set_data_type(s) 3254 deflate_state *s; 3255 { 3256 int n = 0; 3257 unsigned ascii_freq = 0; 3258 unsigned bin_freq = 0; 3259 while (n < 7) bin_freq += s->dyn_ltree[n++].Freq; 3260 while (n < 128) ascii_freq += s->dyn_ltree[n++].Freq; 3261 while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq; 3262 s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? Z_BINARY : Z_ASCII); 3263 } 3264 3265 /* =========================================================================== 3266 * Reverse the first len bits of a code, using straightforward code (a faster 3267 * method would use a table) 3268 * IN assertion: 1 <= len <= 15 3269 */ 3270 local unsigned bi_reverse(code, len) 3271 unsigned code; /* the value to invert */ 3272 int len; /* its bit length */ 3273 { 3274 unsigned res = 0; 3275 do { 3276 res |= code & 1; 3277 code >>= 1, res <<= 1; 3278 } while (--len > 0); 3279 return res >> 1; 3280 } 3281 3282 /* =========================================================================== 3283 * Flush the bit buffer, keeping at most 7 bits in it. 3284 */ 3285 local void bi_flush(s) 3286 deflate_state *s; 3287 { 3288 if (s->bi_valid == 16) { 3289 put_short(s, s->bi_buf); 3290 s->bi_buf = 0; 3291 s->bi_valid = 0; 3292 } else if (s->bi_valid >= 8) { 3293 put_byte(s, (Byte)s->bi_buf); 3294 s->bi_buf >>= 8; 3295 s->bi_valid -= 8; 3296 } 3297 } 3298 3299 /* =========================================================================== 3300 * Flush the bit buffer and align the output on a byte boundary 3301 */ 3302 local void bi_windup(s) 3303 deflate_state *s; 3304 { 3305 if (s->bi_valid > 8) { 3306 put_short(s, s->bi_buf); 3307 } else if (s->bi_valid > 0) { 3308 put_byte(s, (Byte)s->bi_buf); 3309 } 3310 s->bi_buf = 0; 3311 s->bi_valid = 0; 3312 #ifdef DEBUG_ZLIB 3313 s->bits_sent = (s->bits_sent+7) & ~7; 3314 #endif 3315 } 3316 3317 /* =========================================================================== 3318 * Copy a stored block, storing first the length and its 3319 * one's complement if requested. 3320 */ 3321 local void copy_block(s, buf, len, header) 3322 deflate_state *s; 3323 charf *buf; /* the input data */ 3324 unsigned len; /* its length */ 3325 int header; /* true if block header must be written */ 3326 { 3327 bi_windup(s); /* align on byte boundary */ 3328 s->last_eob_len = 8; /* enough lookahead for inflate */ 3329 3330 if (header) { 3331 put_short(s, (ush)len); 3332 put_short(s, (ush)~len); 3333 #ifdef DEBUG_ZLIB 3334 s->bits_sent += 2*16; 3335 #endif 3336 } 3337 #ifdef DEBUG_ZLIB 3338 s->bits_sent += (ulg)len<<3; 3339 #endif 3340 /* bundle up the put_byte(s, *buf++) calls */ 3341 zmemcpy(&s->pending_buf[s->pending], buf, len); 3342 s->pending += len; 3343 } 3344 /* --- trees.c */ 3345 3346 /* +++ inflate.c */ 3347 3348 /* inflate.c -- zlib interface to inflate modules 3349 * Copyright (C) 1995-2002 Mark Adler 3350 * For conditions of distribution and use, see copyright notice in zlib.h 3351 */ 3352 3353 /* #include "zutil.h" */ 3354 3355 /* +++ infblock.h */ 3356 3357 /* infblock.h -- header to use infblock.c 3358 * Copyright (C) 1995-2002 Mark Adler 3359 * For conditions of distribution and use, see copyright notice in zlib.h 3360 */ 3361 3362 /* WARNING: this file should *not* be used by applications. It is 3363 part of the implementation of the compression library and is 3364 subject to change. Applications should only use zlib.h. 3365 */ 3366 3367 struct inflate_blocks_state; 3368 typedef struct inflate_blocks_state FAR inflate_blocks_statef; 3369 3370 extern inflate_blocks_statef * inflate_blocks_new __P(( 3371 z_streamp z, 3372 check_func c, /* check function */ 3373 uInt w)); /* window size */ 3374 3375 extern int inflate_blocks __P(( 3376 inflate_blocks_statef *, 3377 z_streamp , 3378 int)); /* initial return code */ 3379 3380 extern void inflate_blocks_reset __P(( 3381 inflate_blocks_statef *, 3382 z_streamp , 3383 uLongf *)); /* check value on output */ 3384 3385 extern int inflate_blocks_free __P(( 3386 inflate_blocks_statef *, 3387 z_streamp)); 3388 3389 extern void inflate_set_dictionary __P(( 3390 inflate_blocks_statef *s, 3391 const Bytef *d, /* dictionary */ 3392 uInt n)); /* dictionary length */ 3393 3394 extern int inflate_blocks_sync_point __P(( 3395 inflate_blocks_statef *s)); 3396 extern int inflate_addhistory __P(( 3397 inflate_blocks_statef *, 3398 z_streamp)); 3399 3400 extern int inflate_packet_flush __P(( 3401 inflate_blocks_statef *)); 3402 3403 /* --- infblock.h */ 3404 3405 #ifndef NO_DUMMY_DECL 3406 struct inflate_blocks_state {int dummy;}; /* for buggy compilers */ 3407 #endif 3408 3409 typedef enum { 3410 METHOD, /* waiting for method byte */ 3411 FLAG, /* waiting for flag byte */ 3412 DICT4, /* four dictionary check bytes to go */ 3413 DICT3, /* three dictionary check bytes to go */ 3414 DICT2, /* two dictionary check bytes to go */ 3415 DICT1, /* one dictionary check byte to go */ 3416 DICT0, /* waiting for inflateSetDictionary */ 3417 BLOCKS, /* decompressing blocks */ 3418 CHECK4, /* four check bytes to go */ 3419 CHECK3, /* three check bytes to go */ 3420 CHECK2, /* two check bytes to go */ 3421 CHECK1, /* one check byte to go */ 3422 DONE, /* finished check, done */ 3423 BAD} /* got an error--stay here */ 3424 inflate_mode; 3425 3426 /* inflate private state */ 3427 struct internal_state { 3428 3429 /* mode */ 3430 inflate_mode mode; /* current inflate mode */ 3431 3432 /* mode dependent information */ 3433 union { 3434 uInt method; /* if FLAGS, method byte */ 3435 struct { 3436 uLong was; /* computed check value */ 3437 uLong need; /* stream check value */ 3438 } check; /* if CHECK, check values to compare */ 3439 uInt marker; /* if BAD, inflateSync's marker bytes count */ 3440 } sub; /* submode */ 3441 3442 /* mode independent information */ 3443 int nowrap; /* flag for no wrapper */ 3444 uInt wbits; /* log2(window size) (8..15, defaults to 15) */ 3445 inflate_blocks_statef 3446 *blocks; /* current inflate_blocks state */ 3447 3448 }; 3449 3450 3451 int ZEXPORT inflateReset(z) 3452 z_streamp z; 3453 { 3454 if (z == Z_NULL || z->state == Z_NULL) 3455 return Z_STREAM_ERROR; 3456 z->total_in = z->total_out = 0; 3457 z->msg = Z_NULL; 3458 z->state->mode = z->state->nowrap ? BLOCKS : METHOD; 3459 inflate_blocks_reset(z->state->blocks, z, Z_NULL); 3460 Tracev((stderr, "inflate: reset\n")); 3461 return Z_OK; 3462 } 3463 3464 3465 int ZEXPORT inflateEnd(z) 3466 z_streamp z; 3467 { 3468 if (z == Z_NULL || z->state == Z_NULL || z->zfree == Z_NULL) 3469 return Z_STREAM_ERROR; 3470 if (z->state->blocks != Z_NULL) 3471 inflate_blocks_free(z->state->blocks, z); 3472 ZFREE(z, z->state); 3473 z->state = Z_NULL; 3474 Tracev((stderr, "inflate: end\n")); 3475 return Z_OK; 3476 } 3477 3478 3479 int ZEXPORT inflateInit2_(z, w, version, stream_size) 3480 z_streamp z; 3481 int w; 3482 const char *version; 3483 int stream_size; 3484 { 3485 if (version == Z_NULL || version[0] != ZLIB_VERSION[0] || 3486 stream_size != sizeof(z_stream)) 3487 return Z_VERSION_ERROR; 3488 3489 /* initialize state */ 3490 if (z == Z_NULL) 3491 return Z_STREAM_ERROR; 3492 z->msg = Z_NULL; 3493 #ifndef NO_ZCFUNCS 3494 if (z->zalloc == Z_NULL) 3495 { 3496 z->zalloc = zcalloc; 3497 z->opaque = (voidpf)0; 3498 } 3499 if (z->zfree == Z_NULL) z->zfree = zcfree; 3500 #endif 3501 if ((z->state = (struct internal_state FAR *) 3502 ZALLOC(z,1,sizeof(struct internal_state))) == Z_NULL) 3503 return Z_MEM_ERROR; 3504 z->state->blocks = Z_NULL; 3505 3506 /* handle undocumented nowrap option (no zlib header or check) */ 3507 z->state->nowrap = 0; 3508 if (w < 0) 3509 { 3510 w = - w; 3511 z->state->nowrap = 1; 3512 } 3513 3514 /* set window size */ 3515 if (w < 8 || w > 15) 3516 { 3517 inflateEnd(z); 3518 return Z_STREAM_ERROR; 3519 } 3520 z->state->wbits = (uInt)w; 3521 3522 /* create inflate_blocks state */ 3523 if ((z->state->blocks = 3524 inflate_blocks_new(z, z->state->nowrap ? Z_NULL : adler32, (uInt)1 << w)) 3525 == Z_NULL) 3526 { 3527 inflateEnd(z); 3528 return Z_MEM_ERROR; 3529 } 3530 Tracev((stderr, "inflate: allocated\n")); 3531 3532 /* reset state */ 3533 inflateReset(z); 3534 return Z_OK; 3535 } 3536 3537 3538 int ZEXPORT inflateInit_(z, version, stream_size) 3539 z_streamp z; 3540 const char *version; 3541 int stream_size; 3542 { 3543 return inflateInit2_(z, DEF_WBITS, version, stream_size); 3544 } 3545 3546 3547 #define NEEDBYTE {if(z->avail_in==0)goto empty;r=Z_OK;} 3548 #define NEXTBYTE (z->avail_in--,z->total_in++,*z->next_in++) 3549 3550 int ZEXPORT inflate(z, f) 3551 z_streamp z; 3552 int f; 3553 { 3554 int r, r2; 3555 uInt b; 3556 3557 if (z == Z_NULL || z->state == Z_NULL || z->next_in == Z_NULL) 3558 return Z_STREAM_ERROR; 3559 r2 = f == Z_FINISH ? Z_BUF_ERROR : Z_OK; 3560 r = Z_BUF_ERROR; 3561 while (1) switch (z->state->mode) 3562 { 3563 case METHOD: 3564 NEEDBYTE 3565 if (((z->state->sub.method = NEXTBYTE) & 0xf) != Z_DEFLATED) 3566 { 3567 z->state->mode = BAD; 3568 z->msg = (char*)"unknown compression method"; 3569 z->state->sub.marker = 5; /* can't try inflateSync */ 3570 break; 3571 } 3572 if ((z->state->sub.method >> 4) + 8 > z->state->wbits) 3573 { 3574 z->state->mode = BAD; 3575 z->msg = (char*)"invalid window size"; 3576 z->state->sub.marker = 5; /* can't try inflateSync */ 3577 break; 3578 } 3579 z->state->mode = FLAG; 3580 case FLAG: 3581 NEEDBYTE 3582 b = NEXTBYTE; 3583 if (((z->state->sub.method << 8) + b) % 31) 3584 { 3585 z->state->mode = BAD; 3586 z->msg = (char*)"incorrect header check"; 3587 z->state->sub.marker = 5; /* can't try inflateSync */ 3588 break; 3589 } 3590 Tracev((stderr, "inflate: zlib header ok\n")); 3591 if (!(b & PRESET_DICT)) 3592 { 3593 z->state->mode = BLOCKS; 3594 break; 3595 } 3596 z->state->mode = DICT4; 3597 case DICT4: 3598 NEEDBYTE 3599 z->state->sub.check.need = (uLong)NEXTBYTE << 24; 3600 z->state->mode = DICT3; 3601 case DICT3: 3602 NEEDBYTE 3603 z->state->sub.check.need += (uLong)NEXTBYTE << 16; 3604 z->state->mode = DICT2; 3605 case DICT2: 3606 NEEDBYTE 3607 z->state->sub.check.need += (uLong)NEXTBYTE << 8; 3608 z->state->mode = DICT1; 3609 case DICT1: 3610 NEEDBYTE 3611 z->state->sub.check.need += (uLong)NEXTBYTE; 3612 z->adler = z->state->sub.check.need; 3613 z->state->mode = DICT0; 3614 return Z_NEED_DICT; 3615 case DICT0: 3616 z->state->mode = BAD; 3617 z->msg = (char*)"need dictionary"; 3618 z->state->sub.marker = 0; /* can try inflateSync */ 3619 return Z_STREAM_ERROR; 3620 case BLOCKS: 3621 r = inflate_blocks(z->state->blocks, z, r); 3622 if (f == Z_PACKET_FLUSH && z->avail_in == 0 && z->avail_out != 0) 3623 r = inflate_packet_flush(z->state->blocks); 3624 if (r == Z_DATA_ERROR) 3625 { 3626 z->state->mode = BAD; 3627 z->state->sub.marker = 0; /* can try inflateSync */ 3628 break; 3629 } 3630 if (r == Z_OK) 3631 r = r2; 3632 if (r != Z_STREAM_END) 3633 return r; 3634 r = r2; 3635 inflate_blocks_reset(z->state->blocks, z, &z->state->sub.check.was); 3636 if (z->state->nowrap) 3637 { 3638 z->state->mode = DONE; 3639 break; 3640 } 3641 z->state->mode = CHECK4; 3642 case CHECK4: 3643 NEEDBYTE 3644 z->state->sub.check.need = (uLong)NEXTBYTE << 24; 3645 z->state->mode = CHECK3; 3646 case CHECK3: 3647 NEEDBYTE 3648 z->state->sub.check.need += (uLong)NEXTBYTE << 16; 3649 z->state->mode = CHECK2; 3650 case CHECK2: 3651 NEEDBYTE 3652 z->state->sub.check.need += (uLong)NEXTBYTE << 8; 3653 z->state->mode = CHECK1; 3654 case CHECK1: 3655 NEEDBYTE 3656 z->state->sub.check.need += (uLong)NEXTBYTE; 3657 3658 if (z->state->sub.check.was != z->state->sub.check.need) 3659 { 3660 z->state->mode = BAD; 3661 z->msg = (char*)"incorrect data check"; 3662 z->state->sub.marker = 5; /* can't try inflateSync */ 3663 break; 3664 } 3665 Tracev((stderr, "inflate: zlib check ok\n")); 3666 z->state->mode = DONE; 3667 case DONE: 3668 return Z_STREAM_END; 3669 case BAD: 3670 return Z_DATA_ERROR; 3671 default: 3672 return Z_STREAM_ERROR; 3673 } 3674 empty: 3675 if (f != Z_PACKET_FLUSH) 3676 return r; 3677 z->state->mode = BAD; 3678 z->msg = (char *)"need more for packet flush"; 3679 z->state->sub.marker = 0; 3680 return Z_DATA_ERROR; 3681 } 3682 3683 3684 int ZEXPORT inflateSetDictionary(z, dictionary, dictLength) 3685 z_streamp z; 3686 const Bytef *dictionary; 3687 uInt dictLength; 3688 { 3689 uInt length = dictLength; 3690 3691 if (z == Z_NULL || z->state == Z_NULL || z->state->mode != DICT0) 3692 return Z_STREAM_ERROR; 3693 3694 if (adler32(1L, dictionary, dictLength) != z->adler) return Z_DATA_ERROR; 3695 z->adler = 1L; 3696 3697 if (length >= ((uInt)1<<z->state->wbits)) 3698 { 3699 length = (1<<z->state->wbits)-1; 3700 dictionary += dictLength - length; 3701 } 3702 inflate_set_dictionary(z->state->blocks, dictionary, length); 3703 z->state->mode = BLOCKS; 3704 return Z_OK; 3705 } 3706 3707 /* 3708 * This subroutine adds the data at next_in/avail_in to the output history 3709 * without performing any output. The output buffer must be "caught up"; 3710 * i.e. no pending output (hence s->read equals s->write), and the state must 3711 * be BLOCKS (i.e. we should be willing to see the start of a series of 3712 * BLOCKS). On exit, the output will also be caught up, and the checksum 3713 * will have been updated if need be. 3714 */ 3715 3716 int inflateIncomp(z) 3717 z_stream *z; 3718 { 3719 if (z->state->mode != BLOCKS) 3720 return Z_DATA_ERROR; 3721 return inflate_addhistory(z->state->blocks, z); 3722 } 3723 3724 int ZEXPORT inflateSync(z) 3725 z_streamp z; 3726 { 3727 uInt n; /* number of bytes to look at */ 3728 Bytef *p; /* pointer to bytes */ 3729 uInt m; /* number of marker bytes found in a row */ 3730 uLong r, w; /* temporaries to save total_in and total_out */ 3731 3732 /* set up */ 3733 if (z == Z_NULL || z->state == Z_NULL) 3734 return Z_STREAM_ERROR; 3735 if (z->state->mode != BAD) 3736 { 3737 z->state->mode = BAD; 3738 z->state->sub.marker = 0; 3739 } 3740 if ((n = z->avail_in) == 0) 3741 return Z_BUF_ERROR; 3742 p = z->next_in; 3743 m = z->state->sub.marker; 3744 3745 /* search */ 3746 while (n && m < 4) 3747 { 3748 static const Byte mark[4] = {0, 0, 0xff, 0xff}; 3749 if (*p == mark[m]) 3750 m++; 3751 else if (*p) 3752 m = 0; 3753 else 3754 m = 4 - m; 3755 p++, n--; 3756 } 3757 3758 /* restore */ 3759 z->total_in += p - z->next_in; 3760 z->next_in = p; 3761 z->avail_in = n; 3762 z->state->sub.marker = m; 3763 3764 /* return no joy or set up to restart on a new block */ 3765 if (m != 4) 3766 return Z_DATA_ERROR; 3767 r = z->total_in; w = z->total_out; 3768 inflateReset(z); 3769 z->total_in = r; z->total_out = w; 3770 z->state->mode = BLOCKS; 3771 return Z_OK; 3772 } 3773 3774 3775 /* Returns true if inflate is currently at the end of a block generated 3776 * by Z_SYNC_FLUSH or Z_FULL_FLUSH. This function is used by one PPP 3777 * implementation to provide an additional safety check. PPP uses Z_SYNC_FLUSH 3778 * but removes the length bytes of the resulting empty stored block. When 3779 * decompressing, PPP checks that at the end of input packet, inflate is 3780 * waiting for these length bytes. 3781 */ 3782 int ZEXPORT inflateSyncPoint(z) 3783 z_streamp z; 3784 { 3785 if (z == Z_NULL || z->state == Z_NULL || z->state->blocks == Z_NULL) 3786 return Z_STREAM_ERROR; 3787 return inflate_blocks_sync_point(z->state->blocks); 3788 } 3789 #undef NEEDBYTE 3790 #undef NEXTBYTE 3791 /* --- inflate.c */ 3792 3793 /* +++ infblock.c */ 3794 3795 /* infblock.c -- interpret and process block types to last block 3796 * Copyright (C) 1995-2002 Mark Adler 3797 * For conditions of distribution and use, see copyright notice in zlib.h 3798 */ 3799 3800 /* #include "zutil.h" */ 3801 /* #include "infblock.h" */ 3802 3803 /* +++ inftrees.h */ 3804 3805 /* inftrees.h -- header to use inftrees.c 3806 * Copyright (C) 1995-2002 Mark Adler 3807 * For conditions of distribution and use, see copyright notice in zlib.h 3808 */ 3809 3810 /* WARNING: this file should *not* be used by applications. It is 3811 part of the implementation of the compression library and is 3812 subject to change. Applications should only use zlib.h. 3813 */ 3814 3815 /* Huffman code lookup table entry--this entry is four bytes for machines 3816 that have 16-bit pointers (e.g. PC's in the small or medium model). */ 3817 3818 typedef struct inflate_huft_s FAR inflate_huft; 3819 3820 struct inflate_huft_s { 3821 union { 3822 struct { 3823 Byte Exop; /* number of extra bits or operation */ 3824 Byte Bits; /* number of bits in this code or subcode */ 3825 } what; 3826 uInt pad; /* pad structure to a power of 2 (4 bytes for */ 3827 } word; /* 16-bit, 8 bytes for 32-bit int's) */ 3828 uInt base; /* literal, length base, distance base, 3829 or table offset */ 3830 }; 3831 3832 /* Maximum size of dynamic tree. The maximum found in a long but non- 3833 exhaustive search was 1004 huft structures (850 for length/literals 3834 and 154 for distances, the latter actually the result of an 3835 exhaustive search). The actual maximum is not known, but the 3836 value below is more than safe. */ 3837 #define MANY 1440 3838 3839 extern int inflate_trees_bits __P(( 3840 uIntf *, /* 19 code lengths */ 3841 uIntf *, /* bits tree desired/actual depth */ 3842 inflate_huft * FAR *, /* bits tree result */ 3843 inflate_huft *, /* space for trees */ 3844 z_streamp)); /* for messages */ 3845 3846 extern int inflate_trees_dynamic __P(( 3847 uInt, /* number of literal/length codes */ 3848 uInt, /* number of distance codes */ 3849 uIntf *, /* that many (total) code lengths */ 3850 uIntf *, /* literal desired/actual bit depth */ 3851 uIntf *, /* distance desired/actual bit depth */ 3852 inflate_huft * FAR *, /* literal/length tree result */ 3853 inflate_huft * FAR *, /* distance tree result */ 3854 inflate_huft *, /* space for trees */ 3855 z_streamp)); /* for messages */ 3856 3857 extern int inflate_trees_fixed __P(( 3858 uIntf *, /* literal desired/actual bit depth */ 3859 uIntf *, /* distance desired/actual bit depth */ 3860 inflate_huft * FAR *, /* literal/length tree result */ 3861 inflate_huft * FAR *, /* distance tree result */ 3862 z_streamp)); /* for memory allocation */ 3863 /* --- inftrees.h */ 3864 3865 /* +++ infcodes.h */ 3866 3867 /* infcodes.h -- header to use infcodes.c 3868 * Copyright (C) 1995-2002 Mark Adler 3869 * For conditions of distribution and use, see copyright notice in zlib.h 3870 */ 3871 3872 /* WARNING: this file should *not* be used by applications. It is 3873 part of the implementation of the compression library and is 3874 subject to change. Applications should only use zlib.h. 3875 */ 3876 3877 struct inflate_codes_state; 3878 typedef struct inflate_codes_state FAR inflate_codes_statef; 3879 3880 extern inflate_codes_statef *inflate_codes_new __P(( 3881 uInt, uInt, 3882 inflate_huft *, inflate_huft *, 3883 z_streamp )); 3884 3885 extern int inflate_codes __P(( 3886 inflate_blocks_statef *, 3887 z_streamp , 3888 int)); 3889 3890 extern void inflate_codes_free __P(( 3891 inflate_codes_statef *, 3892 z_streamp )); 3893 3894 /* --- infcodes.h */ 3895 3896 /* +++ infutil.h */ 3897 3898 /* infutil.h -- types and macros common to blocks and codes 3899 * Copyright (C) 1995-2002 Mark Adler 3900 * For conditions of distribution and use, see copyright notice in zlib.h 3901 */ 3902 3903 /* WARNING: this file should *not* be used by applications. It is 3904 part of the implementation of the compression library and is 3905 subject to change. Applications should only use zlib.h. 3906 */ 3907 3908 #ifndef _INFUTIL_H 3909 #define _INFUTIL_H 3910 3911 typedef enum { 3912 TYPE, /* get type bits (3, including end bit) */ 3913 LENS, /* get lengths for stored */ 3914 STORED, /* processing stored block */ 3915 TABLE, /* get table lengths */ 3916 BTREE, /* get bit lengths tree for a dynamic block */ 3917 DTREE, /* get length, distance trees for a dynamic block */ 3918 CODES, /* processing fixed or dynamic block */ 3919 DRY, /* output remaining window bytes */ 3920 DONEB, /* finished last block, done */ 3921 BADB} /* got a data error--stuck here */ 3922 inflate_block_mode; 3923 3924 /* inflate blocks semi-private state */ 3925 struct inflate_blocks_state { 3926 3927 /* mode */ 3928 inflate_block_mode mode; /* current inflate_block mode */ 3929 3930 /* mode dependent information */ 3931 union { 3932 uInt left; /* if STORED, bytes left to copy */ 3933 struct { 3934 uInt table; /* table lengths (14 bits) */ 3935 uInt index; /* index into blens (or border) */ 3936 uIntf *blens; /* bit lengths of codes */ 3937 uInt bb; /* bit length tree depth */ 3938 inflate_huft *tb; /* bit length decoding tree */ 3939 } trees; /* if DTREE, decoding info for trees */ 3940 struct { 3941 inflate_codes_statef 3942 *codes; 3943 } decode; /* if CODES, current state */ 3944 } sub; /* submode */ 3945 uInt last; /* true if this block is the last block */ 3946 3947 /* mode independent information */ 3948 uInt bitk; /* bits in bit buffer */ 3949 uLong bitb; /* bit buffer */ 3950 inflate_huft *hufts; /* single malloc for tree space */ 3951 Bytef *window; /* sliding window */ 3952 Bytef *end; /* one byte after sliding window */ 3953 Bytef *read; /* window read pointer */ 3954 Bytef *write; /* window write pointer */ 3955 check_func checkfn; /* check function */ 3956 uLong check; /* check on output */ 3957 3958 }; 3959 3960 3961 /* defines for inflate input/output */ 3962 /* update pointers and return */ 3963 #define UPDBITS {s->bitb=b;s->bitk=k;} 3964 #define UPDIN {z->avail_in=n;z->total_in+=p-z->next_in;z->next_in=p;} 3965 #define UPDOUT {s->write=q;} 3966 #define UPDATE {UPDBITS UPDIN UPDOUT} 3967 #define LEAVE {UPDATE return inflate_flush(s,z,r);} 3968 /* get bytes and bits */ 3969 #define LOADIN {p=z->next_in;n=z->avail_in;b=s->bitb;k=s->bitk;} 3970 #define NEEDBYTE {if(n)r=Z_OK;else LEAVE} 3971 #define NEXTBYTE (n--,*p++) 3972 #define NEEDBITS(j) {while(k<(j)){NEEDBYTE;b|=((uLong)NEXTBYTE)<<k;k+=8;}} 3973 #define DUMPBITS(j) {b>>=(j);k-=(j);} 3974 /* output bytes */ 3975 #define WAVAIL (uInt)(q<s->read?s->read-q-1:s->end-q) 3976 #define LOADOUT {q=s->write;m=(uInt)WAVAIL;} 3977 #define WRAP {if(q==s->end&&s->read!=s->window){q=s->window;m=(uInt)WAVAIL;}} 3978 #define FLUSH {UPDOUT r=inflate_flush(s,z,r); LOADOUT} 3979 #define NEEDOUT {if(m==0){WRAP if(m==0){FLUSH WRAP if(m==0) LEAVE}}r=Z_OK;} 3980 #define OUTBYTE(a) {*q++=(Byte)(a);m--;} 3981 /* load local pointers */ 3982 #define LOAD {LOADIN LOADOUT} 3983 3984 /* masks for lower bits (size given to avoid silly warnings with Visual C++) */ 3985 extern uInt inflate_mask[17]; 3986 3987 /* copy as much as possible from the sliding window to the output area */ 3988 extern int inflate_flush __P(( 3989 inflate_blocks_statef *, 3990 z_streamp , 3991 int)); 3992 3993 #ifndef NO_DUMMY_DECL 3994 struct internal_state {int dummy;}; /* for buggy compilers */ 3995 #endif 3996 3997 #endif 3998 /* --- infutil.h */ 3999 4000 #ifndef NO_DUMMY_DECL 4001 struct inflate_codes_state {int dummy;}; /* for buggy compilers */ 4002 #endif 4003 4004 /* simplify the use of the inflate_huft type with some defines */ 4005 #define exop word.what.Exop 4006 #define bits word.what.Bits 4007 4008 /* Table for deflate from PKZIP's appnote.txt. */ 4009 local const uInt border[] = { /* Order of the bit length code lengths */ 4010 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15}; 4011 4012 /* 4013 Notes beyond the 1.93a appnote.txt: 4014 4015 1. Distance pointers never point before the beginning of the output 4016 stream. 4017 2. Distance pointers can point back across blocks, up to 32k away. 4018 3. There is an implied maximum of 7 bits for the bit length table and 4019 15 bits for the actual data. 4020 4. If only one code exists, then it is encoded using one bit. (Zero 4021 would be more efficient, but perhaps a little confusing.) If two 4022 codes exist, they are coded using one bit each (0 and 1). 4023 5. There is no way of sending zero distance codes--a dummy must be 4024 sent if there are none. (History: a pre 2.0 version of PKZIP would 4025 store blocks with no distance codes, but this was discovered to be 4026 too harsh a criterion.) Valid only for 1.93a. 2.04c does allow 4027 zero distance codes, which is sent as one code of zero bits in 4028 length. 4029 6. There are up to 286 literal/length codes. Code 256 represents the 4030 end-of-block. Note however that the static length tree defines 4031 288 codes just to fill out the Huffman codes. Codes 286 and 287 4032 cannot be used though, since there is no length base or extra bits 4033 defined for them. Similarily, there are up to 30 distance codes. 4034 However, static trees define 32 codes (all 5 bits) to fill out the 4035 Huffman codes, but the last two had better not show up in the data. 4036 7. Unzip can check dynamic Huffman blocks for complete code sets. 4037 The exception is that a single code would not be complete (see #4). 4038 8. The five bits following the block type is really the number of 4039 literal codes sent minus 257. 4040 9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits 4041 (1+6+6). Therefore, to output three times the length, you output 4042 three codes (1+1+1), whereas to output four times the same length, 4043 you only need two codes (1+3). Hmm. 4044 10. In the tree reconstruction algorithm, Code = Code + Increment 4045 only if BitLength(i) is not zero. (Pretty obvious.) 4046 11. Correction: 4 Bits: # of Bit Length codes - 4 (4 - 19) 4047 12. Note: length code 284 can represent 227-258, but length code 285 4048 really is 258. The last length deserves its own, short code 4049 since it gets used a lot in very redundant files. The length 4050 258 is special since 258 - 3 (the min match length) is 255. 4051 13. The literal/length and distance code bit lengths are read as a 4052 single stream of lengths. It is possible (and advantageous) for 4053 a repeat code (16, 17, or 18) to go across the boundary between 4054 the two sets of lengths. 4055 */ 4056 4057 4058 void inflate_blocks_reset(s, z, c) 4059 inflate_blocks_statef *s; 4060 z_streamp z; 4061 uLongf *c; 4062 { 4063 if (c != Z_NULL) 4064 *c = s->check; 4065 if (s->mode == BTREE || s->mode == DTREE) 4066 ZFREE(z, s->sub.trees.blens); 4067 if (s->mode == CODES) 4068 inflate_codes_free(s->sub.decode.codes, z); 4069 s->mode = TYPE; 4070 s->bitk = 0; 4071 s->bitb = 0; 4072 s->read = s->write = s->window; 4073 if (s->checkfn != Z_NULL) 4074 z->adler = s->check = (*s->checkfn)(0L, (const Bytef *)Z_NULL, 0); 4075 Tracev((stderr, "inflate: blocks reset\n")); 4076 } 4077 4078 4079 inflate_blocks_statef *inflate_blocks_new(z, c, w) 4080 z_streamp z; 4081 check_func c; 4082 uInt w; 4083 { 4084 inflate_blocks_statef *s; 4085 4086 if ((s = (inflate_blocks_statef *)ZALLOC 4087 (z,1,sizeof(struct inflate_blocks_state))) == Z_NULL) 4088 return s; 4089 if ((s->hufts = 4090 (inflate_huft *)ZALLOC(z, sizeof(inflate_huft), MANY)) == Z_NULL) 4091 { 4092 ZFREE(z, s); 4093 return Z_NULL; 4094 } 4095 if ((s->window = (Bytef *)ZALLOC(z, 1, w)) == Z_NULL) 4096 { 4097 ZFREE(z, s->hufts); 4098 ZFREE(z, s); 4099 return Z_NULL; 4100 } 4101 s->end = s->window + w; 4102 s->checkfn = c; 4103 s->mode = TYPE; 4104 Tracev((stderr, "inflate: blocks allocated\n")); 4105 inflate_blocks_reset(s, z, Z_NULL); 4106 return s; 4107 } 4108 4109 4110 int inflate_blocks(s, z, r) 4111 inflate_blocks_statef *s; 4112 z_streamp z; 4113 int r; 4114 { 4115 uInt t; /* temporary storage */ 4116 uLong b; /* bit buffer */ 4117 uInt k; /* bits in bit buffer */ 4118 Bytef *p; /* input data pointer */ 4119 uInt n; /* bytes available there */ 4120 Bytef *q; /* output window write pointer */ 4121 uInt m; /* bytes to end of window or read pointer */ 4122 4123 /* copy input/output information to locals (UPDATE macro restores) */ 4124 LOAD 4125 4126 /* process input based on current state */ 4127 while (1) switch (s->mode) 4128 { 4129 case TYPE: 4130 NEEDBITS(3) 4131 t = (uInt)b & 7; 4132 s->last = t & 1; 4133 switch (t >> 1) 4134 { 4135 case 0: /* stored */ 4136 Tracev((stderr, "inflate: stored block%s\n", 4137 s->last ? " (last)" : "")); 4138 DUMPBITS(3) 4139 t = k & 7; /* go to byte boundary */ 4140 DUMPBITS(t) 4141 s->mode = LENS; /* get length of stored block */ 4142 break; 4143 case 1: /* fixed */ 4144 Tracev((stderr, "inflate: fixed codes block%s\n", 4145 s->last ? " (last)" : "")); 4146 { 4147 uInt bl, bd; 4148 inflate_huft *tl, *td; 4149 4150 inflate_trees_fixed(&bl, &bd, &tl, &td, z); 4151 s->sub.decode.codes = inflate_codes_new(bl, bd, tl, td, z); 4152 if (s->sub.decode.codes == Z_NULL) 4153 { 4154 r = Z_MEM_ERROR; 4155 LEAVE 4156 } 4157 } 4158 DUMPBITS(3) 4159 s->mode = CODES; 4160 break; 4161 case 2: /* dynamic */ 4162 Tracev((stderr, "inflate: dynamic codes block%s\n", 4163 s->last ? " (last)" : "")); 4164 DUMPBITS(3) 4165 s->mode = TABLE; 4166 break; 4167 case 3: /* illegal */ 4168 DUMPBITS(3) 4169 s->mode = BADB; 4170 z->msg = (char*)"invalid block type"; 4171 r = Z_DATA_ERROR; 4172 LEAVE 4173 } 4174 break; 4175 case LENS: 4176 NEEDBITS(32) 4177 if ((((~b) >> 16) & 0xffff) != (b & 0xffff)) 4178 { 4179 s->mode = BADB; 4180 z->msg = (char*)"invalid stored block lengths"; 4181 r = Z_DATA_ERROR; 4182 LEAVE 4183 } 4184 s->sub.left = (uInt)b & 0xffff; 4185 b = k = 0; /* dump bits */ 4186 Tracev((stderr, "inflate: stored length %u\n", s->sub.left)); 4187 s->mode = s->sub.left ? STORED : (s->last ? DRY : TYPE); 4188 break; 4189 case STORED: 4190 if (n == 0) 4191 LEAVE 4192 NEEDOUT 4193 t = s->sub.left; 4194 if (t > n) t = n; 4195 if (t > m) t = m; 4196 zmemcpy(q, p, t); 4197 p += t; n -= t; 4198 q += t; m -= t; 4199 if ((s->sub.left -= t) != 0) 4200 break; 4201 Tracev((stderr, "inflate: stored end, %lu total out\n", 4202 z->total_out + (q >= s->read ? q - s->read : 4203 (s->end - s->read) + (q - s->window)))); 4204 s->mode = s->last ? DRY : TYPE; 4205 break; 4206 case TABLE: 4207 NEEDBITS(14) 4208 s->sub.trees.table = t = (uInt)b & 0x3fff; 4209 #ifndef PKZIP_BUG_WORKAROUND 4210 if ((t & 0x1f) > 29 || ((t >> 5) & 0x1f) > 29) 4211 { 4212 s->mode = BADB; 4213 z->msg = (char*)"too many length or distance symbols"; 4214 r = Z_DATA_ERROR; 4215 LEAVE 4216 } 4217 #endif 4218 t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f); 4219 if ((s->sub.trees.blens = (uIntf*)ZALLOC(z, t, sizeof(uInt))) == Z_NULL) 4220 { 4221 r = Z_MEM_ERROR; 4222 LEAVE 4223 } 4224 DUMPBITS(14) 4225 s->sub.trees.index = 0; 4226 Tracev((stderr, "inflate: table sizes ok\n")); 4227 s->mode = BTREE; 4228 case BTREE: 4229 while (s->sub.trees.index < 4 + (s->sub.trees.table >> 10)) 4230 { 4231 NEEDBITS(3) 4232 s->sub.trees.blens[border[s->sub.trees.index++]] = (uInt)b & 7; 4233 DUMPBITS(3) 4234 } 4235 while (s->sub.trees.index < 19) 4236 s->sub.trees.blens[border[s->sub.trees.index++]] = 0; 4237 s->sub.trees.bb = 7; 4238 t = inflate_trees_bits(s->sub.trees.blens, &s->sub.trees.bb, 4239 &s->sub.trees.tb, s->hufts, z); 4240 if (t != Z_OK) 4241 { 4242 r = t; 4243 if (r == Z_DATA_ERROR) 4244 { 4245 ZFREE(z, s->sub.trees.blens); 4246 s->mode = BADB; 4247 } 4248 LEAVE 4249 } 4250 s->sub.trees.index = 0; 4251 Tracev((stderr, "inflate: bits tree ok\n")); 4252 s->mode = DTREE; 4253 case DTREE: 4254 while (t = s->sub.trees.table, 4255 s->sub.trees.index < 258 + (t & 0x1f) + ((t >> 5) & 0x1f)) 4256 { 4257 inflate_huft *h; 4258 uInt i, j, c; 4259 4260 t = s->sub.trees.bb; 4261 NEEDBITS(t) 4262 h = s->sub.trees.tb + ((uInt)b & inflate_mask[t]); 4263 t = h->bits; 4264 c = h->base; 4265 if (c < 16) 4266 { 4267 DUMPBITS(t) 4268 s->sub.trees.blens[s->sub.trees.index++] = c; 4269 } 4270 else /* c == 16..18 */ 4271 { 4272 i = c == 18 ? 7 : c - 14; 4273 j = c == 18 ? 11 : 3; 4274 NEEDBITS(t + i) 4275 DUMPBITS(t) 4276 j += (uInt)b & inflate_mask[i]; 4277 DUMPBITS(i) 4278 i = s->sub.trees.index; 4279 t = s->sub.trees.table; 4280 if (i + j > 258 + (t & 0x1f) + ((t >> 5) & 0x1f) || 4281 (c == 16 && i < 1)) 4282 { 4283 ZFREE(z, s->sub.trees.blens); 4284 s->mode = BADB; 4285 z->msg = (char*)"invalid bit length repeat"; 4286 r = Z_DATA_ERROR; 4287 LEAVE 4288 } 4289 c = c == 16 ? s->sub.trees.blens[i - 1] : 0; 4290 do { 4291 s->sub.trees.blens[i++] = c; 4292 } while (--j); 4293 s->sub.trees.index = i; 4294 } 4295 } 4296 s->sub.trees.tb = Z_NULL; 4297 { 4298 uInt bl, bd; 4299 inflate_huft *tl, *td; 4300 inflate_codes_statef *c; 4301 4302 bl = 9; /* must be <= 9 for lookahead assumptions */ 4303 bd = 6; /* must be <= 9 for lookahead assumptions */ 4304 t = s->sub.trees.table; 4305 t = inflate_trees_dynamic(257 + (t & 0x1f), 1 + ((t >> 5) & 0x1f), 4306 s->sub.trees.blens, &bl, &bd, &tl, &td, 4307 s->hufts, z); 4308 if (t != Z_OK) 4309 { 4310 if (t == (uInt)Z_DATA_ERROR) 4311 { 4312 ZFREE(z, s->sub.trees.blens); 4313 s->mode = BADB; 4314 } 4315 r = t; 4316 LEAVE 4317 } 4318 Tracev((stderr, "inflate: trees ok\n")); 4319 if ((c = inflate_codes_new(bl, bd, tl, td, z)) == Z_NULL) 4320 { 4321 r = Z_MEM_ERROR; 4322 LEAVE 4323 } 4324 s->sub.decode.codes = c; 4325 } 4326 ZFREE(z, s->sub.trees.blens); 4327 s->mode = CODES; 4328 case CODES: 4329 UPDATE 4330 if ((r = inflate_codes(s, z, r)) != Z_STREAM_END) 4331 return inflate_flush(s, z, r); 4332 r = Z_OK; 4333 inflate_codes_free(s->sub.decode.codes, z); 4334 LOAD 4335 Tracev((stderr, "inflate: codes end, %lu total out\n", 4336 z->total_out + (q >= s->read ? q - s->read : 4337 (s->end - s->read) + (q - s->window)))); 4338 if (!s->last) 4339 { 4340 s->mode = TYPE; 4341 break; 4342 } 4343 s->mode = DRY; 4344 case DRY: 4345 FLUSH 4346 if (s->read != s->write) 4347 LEAVE 4348 s->mode = DONEB; 4349 case DONEB: 4350 r = Z_STREAM_END; 4351 LEAVE 4352 case BADB: 4353 r = Z_DATA_ERROR; 4354 LEAVE 4355 default: 4356 r = Z_STREAM_ERROR; 4357 LEAVE 4358 } 4359 } 4360 4361 4362 int inflate_blocks_free(s, z) 4363 inflate_blocks_statef *s; 4364 z_streamp z; 4365 { 4366 inflate_blocks_reset(s, z, Z_NULL); 4367 ZFREE(z, s->window); 4368 ZFREE(z, s->hufts); 4369 ZFREE(z, s); 4370 Tracev((stderr, "inflate: blocks freed\n")); 4371 return Z_OK; 4372 } 4373 4374 4375 void inflate_set_dictionary(s, d, n) 4376 inflate_blocks_statef *s; 4377 const Bytef *d; 4378 uInt n; 4379 { 4380 zmemcpy(s->window, d, n); 4381 s->read = s->write = s->window + n; 4382 } 4383 4384 /* 4385 * This subroutine adds the data at next_in/avail_in to the output history 4386 * without performing any output. The output buffer must be "caught up"; 4387 * i.e. no pending output (hence s->read equals s->write), and the state must 4388 * be BLOCKS (i.e. we should be willing to see the start of a series of 4389 * BLOCKS). On exit, the output will also be caught up, and the checksum 4390 * will have been updated if need be. 4391 */ 4392 int inflate_addhistory(s, z) 4393 inflate_blocks_statef *s; 4394 z_stream *z; 4395 { 4396 uLong b; /* bit buffer */ /* NOT USED HERE */ 4397 uInt k; /* bits in bit buffer */ /* NOT USED HERE */ 4398 uInt t; /* temporary storage */ 4399 Bytef *p; /* input data pointer */ 4400 uInt n; /* bytes available there */ 4401 Bytef *q; /* output window write pointer */ 4402 uInt m; /* bytes to end of window or read pointer */ 4403 4404 if (s->read != s->write) 4405 return Z_STREAM_ERROR; 4406 if (s->mode != TYPE) 4407 return Z_DATA_ERROR; 4408 4409 /* we're ready to rock */ 4410 LOAD 4411 /* while there is input ready, copy to output buffer, moving 4412 * pointers as needed. 4413 */ 4414 while (n) { 4415 t = n; /* how many to do */ 4416 /* is there room until end of buffer? */ 4417 if (t > m) t = m; 4418 /* update check information */ 4419 if (s->checkfn != Z_NULL) 4420 s->check = (*s->checkfn)(s->check, q, t); 4421 zmemcpy(q, p, t); 4422 q += t; 4423 p += t; 4424 n -= t; 4425 z->total_out += t; 4426 s->read = q; /* drag read pointer forward */ 4427 /* WWRAP */ /* expand WWRAP macro by hand to handle s->read */ 4428 if (q == s->end) { 4429 s->read = q = s->window; 4430 m = WAVAIL; 4431 } 4432 } 4433 UPDATE 4434 return Z_OK; 4435 } 4436 4437 4438 /* 4439 * At the end of a Deflate-compressed PPP packet, we expect to have seen 4440 * a `stored' block type value but not the (zero) length bytes. 4441 */ 4442 int inflate_packet_flush(s) 4443 inflate_blocks_statef *s; 4444 { 4445 if (s->mode != LENS) 4446 return Z_DATA_ERROR; 4447 s->mode = TYPE; 4448 return Z_OK; 4449 } 4450 4451 /* Returns true if inflate is currently at the end of a block generated 4452 * by Z_SYNC_FLUSH or Z_FULL_FLUSH. 4453 * IN assertion: s != Z_NULL 4454 */ 4455 int inflate_blocks_sync_point(s) 4456 inflate_blocks_statef *s; 4457 { 4458 return s->mode == LENS; 4459 } 4460 /* --- infblock.c */ 4461 4462 4463 /* +++ inftrees.c */ 4464 4465 /* inftrees.c -- generate Huffman trees for efficient decoding 4466 * Copyright (C) 1995-2002 Mark Adler 4467 * For conditions of distribution and use, see copyright notice in zlib.h 4468 */ 4469 4470 /* #include "zutil.h" */ 4471 /* #include "inftrees.h" */ 4472 4473 #if !defined(BUILDFIXED) && !defined(STDC) 4474 # define BUILDFIXED /* non ANSI compilers may not accept inffixed.h */ 4475 #endif 4476 4477 const char inflate_copyright[] = 4478 " inflate 1.1.4 Copyright 1995-2002 Mark Adler "; 4479 /* 4480 If you use the zlib library in a product, an acknowledgment is welcome 4481 in the documentation of your product. If for some reason you cannot 4482 include such an acknowledgment, I would appreciate that you keep this 4483 copyright string in the executable of your product. 4484 */ 4485 4486 #ifndef NO_DUMMY_DECL 4487 struct internal_state {int dummy;}; /* for buggy compilers */ 4488 #endif 4489 4490 /* simplify the use of the inflate_huft type with some defines */ 4491 #define exop word.what.Exop 4492 #define bits word.what.Bits 4493 4494 4495 local int huft_build __P(( 4496 uIntf *, /* code lengths in bits */ 4497 uInt, /* number of codes */ 4498 uInt, /* number of "simple" codes */ 4499 const uIntf *, /* list of base values for non-simple codes */ 4500 const uIntf *, /* list of extra bits for non-simple codes */ 4501 inflate_huft * FAR*,/* result: starting table */ 4502 uIntf *, /* maximum lookup bits (returns actual) */ 4503 inflate_huft *, /* space for trees */ 4504 uInt *, /* hufts used in space */ 4505 uIntf * )); /* space for values */ 4506 4507 /* Tables for deflate from PKZIP's appnote.txt. */ 4508 local const uInt cplens[31] = { /* Copy lengths for literal codes 257..285 */ 4509 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, 4510 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0}; 4511 /* see note #13 above about 258 */ 4512 local const uInt cplext[31] = { /* Extra bits for literal codes 257..285 */ 4513 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 4514 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 112, 112}; /* 112==invalid */ 4515 local const uInt cpdist[30] = { /* Copy offsets for distance codes 0..29 */ 4516 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, 4517 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, 4518 8193, 12289, 16385, 24577}; 4519 local const uInt cpdext[30] = { /* Extra bits for distance codes */ 4520 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 4521 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 4522 12, 12, 13, 13}; 4523 4524 /* 4525 Huffman code decoding is performed using a multi-level table lookup. 4526 The fastest way to decode is to simply build a lookup table whose 4527 size is determined by the longest code. However, the time it takes 4528 to build this table can also be a factor if the data being decoded 4529 is not very long. The most common codes are necessarily the 4530 shortest codes, so those codes dominate the decoding time, and hence 4531 the speed. The idea is you can have a shorter table that decodes the 4532 shorter, more probable codes, and then point to subsidiary tables for 4533 the longer codes. The time it costs to decode the longer codes is 4534 then traded against the time it takes to make longer tables. 4535 4536 This results of this trade are in the variables lbits and dbits 4537 below. lbits is the number of bits the first level table for literal/ 4538 length codes can decode in one step, and dbits is the same thing for 4539 the distance codes. Subsequent tables are also less than or equal to 4540 those sizes. These values may be adjusted either when all of the 4541 codes are shorter than that, in which case the longest code length in 4542 bits is used, or when the shortest code is *longer* than the requested 4543 table size, in which case the length of the shortest code in bits is 4544 used. 4545 4546 There are two different values for the two tables, since they code a 4547 different number of possibilities each. The literal/length table 4548 codes 286 possible values, or in a flat code, a little over eight 4549 bits. The distance table codes 30 possible values, or a little less 4550 than five bits, flat. The optimum values for speed end up being 4551 about one bit more than those, so lbits is 8+1 and dbits is 5+1. 4552 The optimum values may differ though from machine to machine, and 4553 possibly even between compilers. Your mileage may vary. 4554 */ 4555 4556 4557 /* If BMAX needs to be larger than 16, then h and x[] should be uLong. */ 4558 #define BMAX 15 /* maximum bit length of any code */ 4559 4560 local int huft_build(b, n, s, d, e, t, m, hp, hn, v) 4561 uIntf *b; /* code lengths in bits (all assumed <= BMAX) */ 4562 uInt n; /* number of codes (assumed <= 288) */ 4563 uInt s; /* number of simple-valued codes (0..s-1) */ 4564 const uIntf *d; /* list of base values for non-simple codes */ 4565 const uIntf *e; /* list of extra bits for non-simple codes */ 4566 inflate_huft * FAR *t; /* result: starting table */ 4567 uIntf *m; /* maximum lookup bits, returns actual */ 4568 inflate_huft *hp; /* space for trees */ 4569 uInt *hn; /* hufts used in space */ 4570 uIntf *v; /* working area: values in order of bit length */ 4571 /* Given a list of code lengths and a maximum table size, make a set of 4572 tables to decode that set of codes. Return Z_OK on success, Z_BUF_ERROR 4573 if the given code set is incomplete (the tables are still built in this 4574 case), or Z_DATA_ERROR if the input is invalid. */ 4575 { 4576 4577 uInt a; /* counter for codes of length k */ 4578 uInt c[BMAX+1]; /* bit length count table */ 4579 uInt f; /* i repeats in table every f entries */ 4580 int g; /* maximum code length */ 4581 int h; /* table level */ 4582 uInt i; /* counter, current code */ 4583 uInt j; /* counter */ 4584 int k; /* number of bits in current code */ 4585 int l; /* bits per table (returned in m) */ 4586 uInt mask; /* (1 << w) - 1, to avoid cc -O bug on HP */ 4587 uIntf *p; /* pointer into c[], b[], or v[] */ 4588 inflate_huft *q; /* points to current table */ 4589 struct inflate_huft_s r; /* table entry for structure assignment */ 4590 inflate_huft *u[BMAX]; /* table stack */ 4591 int w; /* bits before this table == (l * h) */ 4592 uInt x[BMAX+1]; /* bit offsets, then code stack */ 4593 uIntf *xp; /* pointer into x */ 4594 int y; /* number of dummy codes added */ 4595 uInt z; /* number of entries in current table */ 4596 4597 4598 /* Generate counts for each bit length */ 4599 p = c; 4600 #define C0 *p++ = 0; 4601 #define C2 C0 C0 C0 C0 4602 #define C4 C2 C2 C2 C2 4603 C4 /* clear c[]--assume BMAX+1 is 16 */ 4604 p = b; i = n; 4605 do { 4606 c[*p++]++; /* assume all entries <= BMAX */ 4607 } while (--i); 4608 if (c[0] == n) /* null input--all zero length codes */ 4609 { 4610 *t = (inflate_huft *)Z_NULL; 4611 *m = 0; 4612 return Z_OK; 4613 } 4614 4615 4616 /* Find minimum and maximum length, bound *m by those */ 4617 l = *m; 4618 for (j = 1; j <= BMAX; j++) 4619 if (c[j]) 4620 break; 4621 k = j; /* minimum code length */ 4622 if ((uInt)l < j) 4623 l = j; 4624 for (i = BMAX; i; i--) 4625 if (c[i]) 4626 break; 4627 g = i; /* maximum code length */ 4628 if ((uInt)l > i) 4629 l = i; 4630 *m = l; 4631 4632 4633 /* Adjust last length count to fill out codes, if needed */ 4634 for (y = 1 << j; j < i; j++, y <<= 1) 4635 if ((y -= c[j]) < 0) 4636 return Z_DATA_ERROR; 4637 if ((y -= c[i]) < 0) 4638 return Z_DATA_ERROR; 4639 c[i] += y; 4640 4641 4642 /* Generate starting offsets into the value table for each length */ 4643 x[1] = j = 0; 4644 p = c + 1; xp = x + 2; 4645 while (--i) { /* note that i == g from above */ 4646 *xp++ = (j += *p++); 4647 } 4648 4649 4650 /* Make a table of values in order of bit lengths */ 4651 p = b; i = 0; 4652 do { 4653 if ((j = *p++) != 0) 4654 v[x[j]++] = i; 4655 } while (++i < n); 4656 n = x[g]; /* set n to length of v */ 4657 4658 4659 /* Generate the Huffman codes and for each, make the table entries */ 4660 x[0] = i = 0; /* first Huffman code is zero */ 4661 p = v; /* grab values in bit order */ 4662 h = -1; /* no tables yet--level -1 */ 4663 w = -l; /* bits decoded == (l * h) */ 4664 u[0] = (inflate_huft *)Z_NULL; /* just to keep compilers happy */ 4665 q = (inflate_huft *)Z_NULL; /* ditto */ 4666 z = 0; /* ditto */ 4667 4668 /* go through the bit lengths (k already is bits in shortest code) */ 4669 for (; k <= g; k++) 4670 { 4671 a = c[k]; 4672 while (a--) 4673 { 4674 /* here i is the Huffman code of length k bits for value *p */ 4675 /* make tables up to required level */ 4676 while (k > w + l) 4677 { 4678 h++; 4679 w += l; /* previous table always l bits */ 4680 4681 /* compute minimum size table less than or equal to l bits */ 4682 z = g - w; 4683 z = z > (uInt)l ? l : z; /* table size upper limit */ 4684 if ((f = 1 << (j = k - w)) > a + 1) /* try a k-w bit table */ 4685 { /* too few codes for k-w bit table */ 4686 f -= a + 1; /* deduct codes from patterns left */ 4687 xp = c + k; 4688 if (j < z) 4689 while (++j < z) /* try smaller tables up to z bits */ 4690 { 4691 if ((f <<= 1) <= *++xp) 4692 break; /* enough codes to use up j bits */ 4693 f -= *xp; /* else deduct codes from patterns */ 4694 } 4695 } 4696 z = 1 << j; /* table entries for j-bit table */ 4697 4698 /* allocate new table */ 4699 if (*hn + z > MANY) /* (note: doesn't matter for fixed) */ 4700 return Z_DATA_ERROR; /* overflow of MANY */ 4701 u[h] = q = hp + *hn; 4702 *hn += z; 4703 4704 /* connect to last table, if there is one */ 4705 if (h) 4706 { 4707 x[h] = i; /* save pattern for backing up */ 4708 r.bits = (Byte)l; /* bits to dump before this table */ 4709 r.exop = (Byte)j; /* bits in this table */ 4710 j = i >> (w - l); 4711 r.base = (uInt)(q - u[h-1] - j); /* offset to this table */ 4712 u[h-1][j] = r; /* connect to last table */ 4713 } 4714 else 4715 *t = q; /* first table is returned result */ 4716 } 4717 4718 /* set up table entry in r */ 4719 r.bits = (Byte)(k - w); 4720 if (p >= v + n) 4721 r.exop = 128 + 64; /* out of values--invalid code */ 4722 else if (*p < s) 4723 { 4724 r.exop = (Byte)(*p < 256 ? 0 : 32 + 64); /* 256 is end-of-block */ 4725 r.base = *p++; /* simple code is just the value */ 4726 } 4727 else 4728 { 4729 r.exop = (Byte)(e[*p - s] + 16 + 64);/* non-simple--look up in lists */ 4730 r.base = d[*p++ - s]; 4731 } 4732 4733 /* fill code-like entries with r */ 4734 f = 1 << (k - w); 4735 for (j = i >> w; j < z; j += f) 4736 q[j] = r; 4737 4738 /* backwards increment the k-bit code i */ 4739 for (j = 1 << (k - 1); i & j; j >>= 1) 4740 i ^= j; 4741 i ^= j; 4742 4743 /* backup over finished tables */ 4744 mask = (1 << w) - 1; /* needed on HP, cc -O bug */ 4745 while ((i & mask) != x[h]) 4746 { 4747 h--; /* don't need to update q */ 4748 w -= l; 4749 mask = (1 << w) - 1; 4750 } 4751 } 4752 } 4753 4754 4755 /* Return Z_BUF_ERROR if we were given an incomplete table */ 4756 return y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK; 4757 } 4758 4759 4760 int inflate_trees_bits(c, bb, tb, hp, z) 4761 uIntf *c; /* 19 code lengths */ 4762 uIntf *bb; /* bits tree desired/actual depth */ 4763 inflate_huft * FAR *tb; /* bits tree result */ 4764 inflate_huft *hp; /* space for trees */ 4765 z_streamp z; /* for messages */ 4766 { 4767 int r; 4768 uInt hn = 0; /* hufts used in space */ 4769 uIntf *v; /* work area for huft_build */ 4770 4771 if ((v = (uIntf*)ZALLOC(z, 19, sizeof(uInt))) == Z_NULL) 4772 return Z_MEM_ERROR; 4773 r = huft_build(c, 19, 19, (uIntf*)Z_NULL, (uIntf*)Z_NULL, 4774 tb, bb, hp, &hn, v); 4775 if (r == Z_DATA_ERROR) 4776 z->msg = (char*)"oversubscribed dynamic bit lengths tree"; 4777 else if (r == Z_BUF_ERROR || *bb == 0) 4778 { 4779 z->msg = (char*)"incomplete dynamic bit lengths tree"; 4780 r = Z_DATA_ERROR; 4781 } 4782 ZFREE(z, v); 4783 return r; 4784 } 4785 4786 4787 int inflate_trees_dynamic(nl, nd, c, bl, bd, tl, td, hp, z) 4788 uInt nl; /* number of literal/length codes */ 4789 uInt nd; /* number of distance codes */ 4790 uIntf *c; /* that many (total) code lengths */ 4791 uIntf *bl; /* literal desired/actual bit depth */ 4792 uIntf *bd; /* distance desired/actual bit depth */ 4793 inflate_huft * FAR *tl; /* literal/length tree result */ 4794 inflate_huft * FAR *td; /* distance tree result */ 4795 inflate_huft *hp; /* space for trees */ 4796 z_streamp z; /* for messages */ 4797 { 4798 int r; 4799 uInt hn = 0; /* hufts used in space */ 4800 uIntf *v; /* work area for huft_build */ 4801 4802 /* allocate work area */ 4803 if ((v = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL) 4804 return Z_MEM_ERROR; 4805 4806 /* build literal/length tree */ 4807 r = huft_build(c, nl, 257, cplens, cplext, tl, bl, hp, &hn, v); 4808 if (r != Z_OK || *bl == 0) 4809 { 4810 if (r == Z_DATA_ERROR) 4811 z->msg = (char*)"oversubscribed literal/length tree"; 4812 else if (r != Z_MEM_ERROR) 4813 { 4814 z->msg = (char*)"incomplete literal/length tree"; 4815 r = Z_DATA_ERROR; 4816 } 4817 ZFREE(z, v); 4818 return r; 4819 } 4820 4821 /* build distance tree */ 4822 r = huft_build(c + nl, nd, 0, cpdist, cpdext, td, bd, hp, &hn, v); 4823 if (r != Z_OK || (*bd == 0 && nl > 257)) 4824 { 4825 if (r == Z_DATA_ERROR) 4826 z->msg = (char*)"oversubscribed distance tree"; 4827 else if (r == Z_BUF_ERROR) { 4828 #ifdef PKZIP_BUG_WORKAROUND 4829 r = Z_OK; 4830 } 4831 #else 4832 z->msg = (char*)"incomplete distance tree"; 4833 r = Z_DATA_ERROR; 4834 } 4835 else if (r != Z_MEM_ERROR) 4836 { 4837 z->msg = (char*)"empty distance tree with lengths"; 4838 r = Z_DATA_ERROR; 4839 } 4840 ZFREE(z, v); 4841 return r; 4842 #endif 4843 } 4844 4845 /* done */ 4846 ZFREE(z, v); 4847 return Z_OK; 4848 } 4849 4850 4851 /* build fixed tables only once--keep them here */ 4852 #ifdef BUILDFIXED 4853 local int fixed_built = 0; 4854 #define FIXEDH 544 /* number of hufts used by fixed tables */ 4855 local inflate_huft fixed_mem[FIXEDH]; 4856 local uInt fixed_bl; 4857 local uInt fixed_bd; 4858 local inflate_huft *fixed_tl; 4859 local inflate_huft *fixed_td; 4860 #else 4861 4862 /* +++ inffixed.h */ 4863 /* inffixed.h -- table for decoding fixed codes 4864 * Generated automatically by the maketree.c program 4865 */ 4866 4867 /* WARNING: this file should *not* be used by applications. It is 4868 part of the implementation of the compression library and is 4869 subject to change. Applications should only use zlib.h. 4870 */ 4871 4872 local uInt fixed_bl = 9; 4873 local uInt fixed_bd = 5; 4874 local inflate_huft fixed_tl[] = { 4875 {{{96,7}},256}, {{{0,8}},80}, {{{0,8}},16}, {{{84,8}},115}, 4876 {{{82,7}},31}, {{{0,8}},112}, {{{0,8}},48}, {{{0,9}},192}, 4877 {{{80,7}},10}, {{{0,8}},96}, {{{0,8}},32}, {{{0,9}},160}, 4878 {{{0,8}},0}, {{{0,8}},128}, {{{0,8}},64}, {{{0,9}},224}, 4879 {{{80,7}},6}, {{{0,8}},88}, {{{0,8}},24}, {{{0,9}},144}, 4880 {{{83,7}},59}, {{{0,8}},120}, {{{0,8}},56}, {{{0,9}},208}, 4881 {{{81,7}},17}, {{{0,8}},104}, {{{0,8}},40}, {{{0,9}},176}, 4882 {{{0,8}},8}, {{{0,8}},136}, {{{0,8}},72}, {{{0,9}},240}, 4883 {{{80,7}},4}, {{{0,8}},84}, {{{0,8}},20}, {{{85,8}},227}, 4884 {{{83,7}},43}, {{{0,8}},116}, {{{0,8}},52}, {{{0,9}},200}, 4885 {{{81,7}},13}, {{{0,8}},100}, {{{0,8}},36}, {{{0,9}},168}, 4886 {{{0,8}},4}, {{{0,8}},132}, {{{0,8}},68}, {{{0,9}},232}, 4887 {{{80,7}},8}, {{{0,8}},92}, {{{0,8}},28}, {{{0,9}},152}, 4888 {{{84,7}},83}, {{{0,8}},124}, {{{0,8}},60}, {{{0,9}},216}, 4889 {{{82,7}},23}, {{{0,8}},108}, {{{0,8}},44}, {{{0,9}},184}, 4890 {{{0,8}},12}, {{{0,8}},140}, {{{0,8}},76}, {{{0,9}},248}, 4891 {{{80,7}},3}, {{{0,8}},82}, {{{0,8}},18}, {{{85,8}},163}, 4892 {{{83,7}},35}, {{{0,8}},114}, {{{0,8}},50}, {{{0,9}},196}, 4893 {{{81,7}},11}, {{{0,8}},98}, {{{0,8}},34}, {{{0,9}},164}, 4894 {{{0,8}},2}, {{{0,8}},130}, {{{0,8}},66}, {{{0,9}},228}, 4895 {{{80,7}},7}, {{{0,8}},90}, {{{0,8}},26}, {{{0,9}},148}, 4896 {{{84,7}},67}, {{{0,8}},122}, {{{0,8}},58}, {{{0,9}},212}, 4897 {{{82,7}},19}, {{{0,8}},106}, {{{0,8}},42}, {{{0,9}},180}, 4898 {{{0,8}},10}, {{{0,8}},138}, {{{0,8}},74}, {{{0,9}},244}, 4899 {{{80,7}},5}, {{{0,8}},86}, {{{0,8}},22}, {{{192,8}},0}, 4900 {{{83,7}},51}, {{{0,8}},118}, {{{0,8}},54}, {{{0,9}},204}, 4901 {{{81,7}},15}, {{{0,8}},102}, {{{0,8}},38}, {{{0,9}},172}, 4902 {{{0,8}},6}, {{{0,8}},134}, {{{0,8}},70}, {{{0,9}},236}, 4903 {{{80,7}},9}, {{{0,8}},94}, {{{0,8}},30}, {{{0,9}},156}, 4904 {{{84,7}},99}, {{{0,8}},126}, {{{0,8}},62}, {{{0,9}},220}, 4905 {{{82,7}},27}, {{{0,8}},110}, {{{0,8}},46}, {{{0,9}},188}, 4906 {{{0,8}},14}, {{{0,8}},142}, {{{0,8}},78}, {{{0,9}},252}, 4907 {{{96,7}},256}, {{{0,8}},81}, {{{0,8}},17}, {{{85,8}},131}, 4908 {{{82,7}},31}, {{{0,8}},113}, {{{0,8}},49}, {{{0,9}},194}, 4909 {{{80,7}},10}, {{{0,8}},97}, {{{0,8}},33}, {{{0,9}},162}, 4910 {{{0,8}},1}, {{{0,8}},129}, {{{0,8}},65}, {{{0,9}},226}, 4911 {{{80,7}},6}, {{{0,8}},89}, {{{0,8}},25}, {{{0,9}},146}, 4912 {{{83,7}},59}, {{{0,8}},121}, {{{0,8}},57}, {{{0,9}},210}, 4913 {{{81,7}},17}, {{{0,8}},105}, {{{0,8}},41}, {{{0,9}},178}, 4914 {{{0,8}},9}, {{{0,8}},137}, {{{0,8}},73}, {{{0,9}},242}, 4915 {{{80,7}},4}, {{{0,8}},85}, {{{0,8}},21}, {{{80,8}},258}, 4916 {{{83,7}},43}, {{{0,8}},117}, {{{0,8}},53}, {{{0,9}},202}, 4917 {{{81,7}},13}, {{{0,8}},101}, {{{0,8}},37}, {{{0,9}},170}, 4918 {{{0,8}},5}, {{{0,8}},133}, {{{0,8}},69}, {{{0,9}},234}, 4919 {{{80,7}},8}, {{{0,8}},93}, {{{0,8}},29}, {{{0,9}},154}, 4920 {{{84,7}},83}, {{{0,8}},125}, {{{0,8}},61}, {{{0,9}},218}, 4921 {{{82,7}},23}, {{{0,8}},109}, {{{0,8}},45}, {{{0,9}},186}, 4922 {{{0,8}},13}, {{{0,8}},141}, {{{0,8}},77}, {{{0,9}},250}, 4923 {{{80,7}},3}, {{{0,8}},83}, {{{0,8}},19}, {{{85,8}},195}, 4924 {{{83,7}},35}, {{{0,8}},115}, {{{0,8}},51}, {{{0,9}},198}, 4925 {{{81,7}},11}, {{{0,8}},99}, {{{0,8}},35}, {{{0,9}},166}, 4926 {{{0,8}},3}, {{{0,8}},131}, {{{0,8}},67}, {{{0,9}},230}, 4927 {{{80,7}},7}, {{{0,8}},91}, {{{0,8}},27}, {{{0,9}},150}, 4928 {{{84,7}},67}, {{{0,8}},123}, {{{0,8}},59}, {{{0,9}},214}, 4929 {{{82,7}},19}, {{{0,8}},107}, {{{0,8}},43}, {{{0,9}},182}, 4930 {{{0,8}},11}, {{{0,8}},139}, {{{0,8}},75}, {{{0,9}},246}, 4931 {{{80,7}},5}, {{{0,8}},87}, {{{0,8}},23}, {{{192,8}},0}, 4932 {{{83,7}},51}, {{{0,8}},119}, {{{0,8}},55}, {{{0,9}},206}, 4933 {{{81,7}},15}, {{{0,8}},103}, {{{0,8}},39}, {{{0,9}},174}, 4934 {{{0,8}},7}, {{{0,8}},135}, {{{0,8}},71}, {{{0,9}},238}, 4935 {{{80,7}},9}, {{{0,8}},95}, {{{0,8}},31}, {{{0,9}},158}, 4936 {{{84,7}},99}, {{{0,8}},127}, {{{0,8}},63}, {{{0,9}},222}, 4937 {{{82,7}},27}, {{{0,8}},111}, {{{0,8}},47}, {{{0,9}},190}, 4938 {{{0,8}},15}, {{{0,8}},143}, {{{0,8}},79}, {{{0,9}},254}, 4939 {{{96,7}},256}, {{{0,8}},80}, {{{0,8}},16}, {{{84,8}},115}, 4940 {{{82,7}},31}, {{{0,8}},112}, {{{0,8}},48}, {{{0,9}},193}, 4941 {{{80,7}},10}, {{{0,8}},96}, {{{0,8}},32}, {{{0,9}},161}, 4942 {{{0,8}},0}, {{{0,8}},128}, {{{0,8}},64}, {{{0,9}},225}, 4943 {{{80,7}},6}, {{{0,8}},88}, {{{0,8}},24}, {{{0,9}},145}, 4944 {{{83,7}},59}, {{{0,8}},120}, {{{0,8}},56}, {{{0,9}},209}, 4945 {{{81,7}},17}, {{{0,8}},104}, {{{0,8}},40}, {{{0,9}},177}, 4946 {{{0,8}},8}, {{{0,8}},136}, {{{0,8}},72}, {{{0,9}},241}, 4947 {{{80,7}},4}, {{{0,8}},84}, {{{0,8}},20}, {{{85,8}},227}, 4948 {{{83,7}},43}, {{{0,8}},116}, {{{0,8}},52}, {{{0,9}},201}, 4949 {{{81,7}},13}, {{{0,8}},100}, {{{0,8}},36}, {{{0,9}},169}, 4950 {{{0,8}},4}, {{{0,8}},132}, {{{0,8}},68}, {{{0,9}},233}, 4951 {{{80,7}},8}, {{{0,8}},92}, {{{0,8}},28}, {{{0,9}},153}, 4952 {{{84,7}},83}, {{{0,8}},124}, {{{0,8}},60}, {{{0,9}},217}, 4953 {{{82,7}},23}, {{{0,8}},108}, {{{0,8}},44}, {{{0,9}},185}, 4954 {{{0,8}},12}, {{{0,8}},140}, {{{0,8}},76}, {{{0,9}},249}, 4955 {{{80,7}},3}, {{{0,8}},82}, {{{0,8}},18}, {{{85,8}},163}, 4956 {{{83,7}},35}, {{{0,8}},114}, {{{0,8}},50}, {{{0,9}},197}, 4957 {{{81,7}},11}, {{{0,8}},98}, {{{0,8}},34}, {{{0,9}},165}, 4958 {{{0,8}},2}, {{{0,8}},130}, {{{0,8}},66}, {{{0,9}},229}, 4959 {{{80,7}},7}, {{{0,8}},90}, {{{0,8}},26}, {{{0,9}},149}, 4960 {{{84,7}},67}, {{{0,8}},122}, {{{0,8}},58}, {{{0,9}},213}, 4961 {{{82,7}},19}, {{{0,8}},106}, {{{0,8}},42}, {{{0,9}},181}, 4962 {{{0,8}},10}, {{{0,8}},138}, {{{0,8}},74}, {{{0,9}},245}, 4963 {{{80,7}},5}, {{{0,8}},86}, {{{0,8}},22}, {{{192,8}},0}, 4964 {{{83,7}},51}, {{{0,8}},118}, {{{0,8}},54}, {{{0,9}},205}, 4965 {{{81,7}},15}, {{{0,8}},102}, {{{0,8}},38}, {{{0,9}},173}, 4966 {{{0,8}},6}, {{{0,8}},134}, {{{0,8}},70}, {{{0,9}},237}, 4967 {{{80,7}},9}, {{{0,8}},94}, {{{0,8}},30}, {{{0,9}},157}, 4968 {{{84,7}},99}, {{{0,8}},126}, {{{0,8}},62}, {{{0,9}},221}, 4969 {{{82,7}},27}, {{{0,8}},110}, {{{0,8}},46}, {{{0,9}},189}, 4970 {{{0,8}},14}, {{{0,8}},142}, {{{0,8}},78}, {{{0,9}},253}, 4971 {{{96,7}},256}, {{{0,8}},81}, {{{0,8}},17}, {{{85,8}},131}, 4972 {{{82,7}},31}, {{{0,8}},113}, {{{0,8}},49}, {{{0,9}},195}, 4973 {{{80,7}},10}, {{{0,8}},97}, {{{0,8}},33}, {{{0,9}},163}, 4974 {{{0,8}},1}, {{{0,8}},129}, {{{0,8}},65}, {{{0,9}},227}, 4975 {{{80,7}},6}, {{{0,8}},89}, {{{0,8}},25}, {{{0,9}},147}, 4976 {{{83,7}},59}, {{{0,8}},121}, {{{0,8}},57}, {{{0,9}},211}, 4977 {{{81,7}},17}, {{{0,8}},105}, {{{0,8}},41}, {{{0,9}},179}, 4978 {{{0,8}},9}, {{{0,8}},137}, {{{0,8}},73}, {{{0,9}},243}, 4979 {{{80,7}},4}, {{{0,8}},85}, {{{0,8}},21}, {{{80,8}},258}, 4980 {{{83,7}},43}, {{{0,8}},117}, {{{0,8}},53}, {{{0,9}},203}, 4981 {{{81,7}},13}, {{{0,8}},101}, {{{0,8}},37}, {{{0,9}},171}, 4982 {{{0,8}},5}, {{{0,8}},133}, {{{0,8}},69}, {{{0,9}},235}, 4983 {{{80,7}},8}, {{{0,8}},93}, {{{0,8}},29}, {{{0,9}},155}, 4984 {{{84,7}},83}, {{{0,8}},125}, {{{0,8}},61}, {{{0,9}},219}, 4985 {{{82,7}},23}, {{{0,8}},109}, {{{0,8}},45}, {{{0,9}},187}, 4986 {{{0,8}},13}, {{{0,8}},141}, {{{0,8}},77}, {{{0,9}},251}, 4987 {{{80,7}},3}, {{{0,8}},83}, {{{0,8}},19}, {{{85,8}},195}, 4988 {{{83,7}},35}, {{{0,8}},115}, {{{0,8}},51}, {{{0,9}},199}, 4989 {{{81,7}},11}, {{{0,8}},99}, {{{0,8}},35}, {{{0,9}},167}, 4990 {{{0,8}},3}, {{{0,8}},131}, {{{0,8}},67}, {{{0,9}},231}, 4991 {{{80,7}},7}, {{{0,8}},91}, {{{0,8}},27}, {{{0,9}},151}, 4992 {{{84,7}},67}, {{{0,8}},123}, {{{0,8}},59}, {{{0,9}},215}, 4993 {{{82,7}},19}, {{{0,8}},107}, {{{0,8}},43}, {{{0,9}},183}, 4994 {{{0,8}},11}, {{{0,8}},139}, {{{0,8}},75}, {{{0,9}},247}, 4995 {{{80,7}},5}, {{{0,8}},87}, {{{0,8}},23}, {{{192,8}},0}, 4996 {{{83,7}},51}, {{{0,8}},119}, {{{0,8}},55}, {{{0,9}},207}, 4997 {{{81,7}},15}, {{{0,8}},103}, {{{0,8}},39}, {{{0,9}},175}, 4998 {{{0,8}},7}, {{{0,8}},135}, {{{0,8}},71}, {{{0,9}},239}, 4999 {{{80,7}},9}, {{{0,8}},95}, {{{0,8}},31}, {{{0,9}},159}, 5000 {{{84,7}},99}, {{{0,8}},127}, {{{0,8}},63}, {{{0,9}},223}, 5001 {{{82,7}},27}, {{{0,8}},111}, {{{0,8}},47}, {{{0,9}},191}, 5002 {{{0,8}},15}, {{{0,8}},143}, {{{0,8}},79}, {{{0,9}},255} 5003 }; 5004 local inflate_huft fixed_td[] = { 5005 {{{80,5}},1}, {{{87,5}},257}, {{{83,5}},17}, {{{91,5}},4097}, 5006 {{{81,5}},5}, {{{89,5}},1025}, {{{85,5}},65}, {{{93,5}},16385}, 5007 {{{80,5}},3}, {{{88,5}},513}, {{{84,5}},33}, {{{92,5}},8193}, 5008 {{{82,5}},9}, {{{90,5}},2049}, {{{86,5}},129}, {{{192,5}},24577}, 5009 {{{80,5}},2}, {{{87,5}},385}, {{{83,5}},25}, {{{91,5}},6145}, 5010 {{{81,5}},7}, {{{89,5}},1537}, {{{85,5}},97}, {{{93,5}},24577}, 5011 {{{80,5}},4}, {{{88,5}},769}, {{{84,5}},49}, {{{92,5}},12289}, 5012 {{{82,5}},13}, {{{90,5}},3073}, {{{86,5}},193}, {{{192,5}},24577} 5013 }; 5014 /* --- inffixed.h */ 5015 5016 #endif 5017 5018 5019 int inflate_trees_fixed(bl, bd, tl, td, z) 5020 uIntf *bl; /* literal desired/actual bit depth */ 5021 uIntf *bd; /* distance desired/actual bit depth */ 5022 inflate_huft * FAR *tl; /* literal/length tree result */ 5023 inflate_huft * FAR *td; /* distance tree result */ 5024 z_streamp z; /* for memory allocation */ 5025 { 5026 #ifdef BUILDFIXED 5027 /* build fixed tables if not already */ 5028 if (!fixed_built) 5029 { 5030 int k; /* temporary variable */ 5031 uInt f = 0; /* number of hufts used in fixed_mem */ 5032 uIntf *c; /* length list for huft_build */ 5033 uIntf *v; /* work area for huft_build */ 5034 5035 /* allocate memory */ 5036 if ((c = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL) 5037 return Z_MEM_ERROR; 5038 if ((v = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL) 5039 { 5040 ZFREE(z, c); 5041 return Z_MEM_ERROR; 5042 } 5043 5044 /* literal table */ 5045 for (k = 0; k < 144; k++) 5046 c[k] = 8; 5047 for (; k < 256; k++) 5048 c[k] = 9; 5049 for (; k < 280; k++) 5050 c[k] = 7; 5051 for (; k < 288; k++) 5052 c[k] = 8; 5053 fixed_bl = 9; 5054 huft_build(c, 288, 257, cplens, cplext, &fixed_tl, &fixed_bl, 5055 fixed_mem, &f, v); 5056 5057 /* distance table */ 5058 for (k = 0; k < 30; k++) 5059 c[k] = 5; 5060 fixed_bd = 5; 5061 huft_build(c, 30, 0, cpdist, cpdext, &fixed_td, &fixed_bd, 5062 fixed_mem, &f, v); 5063 5064 /* done */ 5065 ZFREE(z, v); 5066 ZFREE(z, c); 5067 fixed_built = 1; 5068 } 5069 #endif 5070 *bl = fixed_bl; 5071 *bd = fixed_bd; 5072 *tl = fixed_tl; 5073 *td = fixed_td; 5074 return Z_OK; 5075 } 5076 /* --- inftrees.c */ 5077 5078 /* +++ infcodes.c */ 5079 5080 /* infcodes.c -- process literals and length/distance pairs 5081 * Copyright (C) 1995-2002 Mark Adler 5082 * For conditions of distribution and use, see copyright notice in zlib.h 5083 */ 5084 5085 /* #include "zutil.h" */ 5086 /* #include "inftrees.h" */ 5087 /* #include "infblock.h" */ 5088 /* #include "infcodes.h" */ 5089 /* #include "infutil.h" */ 5090 5091 /* +++ inffast.h */ 5092 5093 /* inffast.h -- header to use inffast.c 5094 * Copyright (C) 1995-2002 Mark Adler 5095 * For conditions of distribution and use, see copyright notice in zlib.h 5096 */ 5097 5098 /* WARNING: this file should *not* be used by applications. It is 5099 part of the implementation of the compression library and is 5100 subject to change. Applications should only use zlib.h. 5101 */ 5102 5103 extern int inflate_fast __P(( 5104 uInt, 5105 uInt, 5106 inflate_huft *, 5107 inflate_huft *, 5108 inflate_blocks_statef *, 5109 z_streamp )); 5110 /* --- inffast.h */ 5111 5112 /* simplify the use of the inflate_huft type with some defines */ 5113 #define exop word.what.Exop 5114 #define bits word.what.Bits 5115 5116 typedef enum { /* waiting for "i:"=input, "o:"=output, "x:"=nothing */ 5117 START, /* x: set up for LEN */ 5118 LEN, /* i: get length/literal/eob next */ 5119 LENEXT, /* i: getting length extra (have base) */ 5120 DIST, /* i: get distance next */ 5121 DISTEXT, /* i: getting distance extra */ 5122 COPY, /* o: copying bytes in window, waiting for space */ 5123 LIT, /* o: got literal, waiting for output space */ 5124 WASH, /* o: got eob, possibly still output waiting */ 5125 END, /* x: got eob and all data flushed */ 5126 BADCODE} /* x: got error */ 5127 inflate_codes_mode; 5128 5129 /* inflate codes private state */ 5130 struct inflate_codes_state { 5131 5132 /* mode */ 5133 inflate_codes_mode mode; /* current inflate_codes mode */ 5134 5135 /* mode dependent information */ 5136 uInt len; 5137 union { 5138 struct { 5139 inflate_huft *tree; /* pointer into tree */ 5140 uInt need; /* bits needed */ 5141 } code; /* if LEN or DIST, where in tree */ 5142 uInt lit; /* if LIT, literal */ 5143 struct { 5144 uInt get; /* bits to get for extra */ 5145 uInt dist; /* distance back to copy from */ 5146 } copy; /* if EXT or COPY, where and how much */ 5147 } sub; /* submode */ 5148 5149 /* mode independent information */ 5150 Byte lbits; /* ltree bits decoded per branch */ 5151 Byte dbits; /* dtree bits decoder per branch */ 5152 inflate_huft *ltree; /* literal/length/eob tree */ 5153 inflate_huft *dtree; /* distance tree */ 5154 5155 }; 5156 5157 5158 inflate_codes_statef *inflate_codes_new(bl, bd, tl, td, z) 5159 uInt bl, bd; 5160 inflate_huft *tl; 5161 inflate_huft *td; /* need separate declaration for Borland C++ */ 5162 z_streamp z; 5163 { 5164 inflate_codes_statef *c; 5165 5166 if ((c = (inflate_codes_statef *) 5167 ZALLOC(z,1,sizeof(struct inflate_codes_state))) != Z_NULL) 5168 { 5169 c->mode = START; 5170 c->lbits = (Byte)bl; 5171 c->dbits = (Byte)bd; 5172 c->ltree = tl; 5173 c->dtree = td; 5174 Tracev((stderr, "inflate: codes new\n")); 5175 } 5176 return c; 5177 } 5178 5179 5180 int inflate_codes(s, z, r) 5181 inflate_blocks_statef *s; 5182 z_streamp z; 5183 int r; 5184 { 5185 uInt j; /* temporary storage */ 5186 inflate_huft *t; /* temporary pointer */ 5187 uInt e; /* extra bits or operation */ 5188 uLong b; /* bit buffer */ 5189 uInt k; /* bits in bit buffer */ 5190 Bytef *p; /* input data pointer */ 5191 uInt n; /* bytes available there */ 5192 Bytef *q; /* output window write pointer */ 5193 uInt m; /* bytes to end of window or read pointer */ 5194 Bytef *f; /* pointer to copy strings from */ 5195 inflate_codes_statef *c = s->sub.decode.codes; /* codes state */ 5196 5197 /* copy input/output information to locals (UPDATE macro restores) */ 5198 LOAD 5199 5200 /* process input and output based on current state */ 5201 while (1) switch (c->mode) 5202 { /* waiting for "i:"=input, "o:"=output, "x:"=nothing */ 5203 case START: /* x: set up for LEN */ 5204 #ifndef SLOW 5205 if (m >= 258 && n >= 10) 5206 { 5207 UPDATE 5208 r = inflate_fast(c->lbits, c->dbits, c->ltree, c->dtree, s, z); 5209 LOAD 5210 if (r != Z_OK) 5211 { 5212 c->mode = r == Z_STREAM_END ? WASH : BADCODE; 5213 break; 5214 } 5215 } 5216 #endif /* !SLOW */ 5217 c->sub.code.need = c->lbits; 5218 c->sub.code.tree = c->ltree; 5219 c->mode = LEN; 5220 case LEN: /* i: get length/literal/eob next */ 5221 j = c->sub.code.need; 5222 NEEDBITS(j) 5223 t = c->sub.code.tree + ((uInt)b & inflate_mask[j]); 5224 DUMPBITS(t->bits) 5225 e = (uInt)(t->exop); 5226 if (e == 0) /* literal */ 5227 { 5228 c->sub.lit = t->base; 5229 Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ? 5230 "inflate: literal '%c'\n" : 5231 "inflate: literal 0x%02x\n", t->base)); 5232 c->mode = LIT; 5233 break; 5234 } 5235 if (e & 16) /* length */ 5236 { 5237 c->sub.copy.get = e & 15; 5238 c->len = t->base; 5239 c->mode = LENEXT; 5240 break; 5241 } 5242 if ((e & 64) == 0) /* next table */ 5243 { 5244 c->sub.code.need = e; 5245 c->sub.code.tree = t + t->base; 5246 break; 5247 } 5248 if (e & 32) /* end of block */ 5249 { 5250 Tracevv((stderr, "inflate: end of block\n")); 5251 c->mode = WASH; 5252 break; 5253 } 5254 c->mode = BADCODE; /* invalid code */ 5255 z->msg = (char*)"invalid literal/length code"; 5256 r = Z_DATA_ERROR; 5257 LEAVE 5258 case LENEXT: /* i: getting length extra (have base) */ 5259 j = c->sub.copy.get; 5260 NEEDBITS(j) 5261 c->len += (uInt)b & inflate_mask[j]; 5262 DUMPBITS(j) 5263 c->sub.code.need = c->dbits; 5264 c->sub.code.tree = c->dtree; 5265 Tracevv((stderr, "inflate: length %u\n", c->len)); 5266 c->mode = DIST; 5267 case DIST: /* i: get distance next */ 5268 j = c->sub.code.need; 5269 NEEDBITS(j) 5270 t = c->sub.code.tree + ((uInt)b & inflate_mask[j]); 5271 DUMPBITS(t->bits) 5272 e = (uInt)(t->exop); 5273 if (e & 16) /* distance */ 5274 { 5275 c->sub.copy.get = e & 15; 5276 c->sub.copy.dist = t->base; 5277 c->mode = DISTEXT; 5278 break; 5279 } 5280 if ((e & 64) == 0) /* next table */ 5281 { 5282 c->sub.code.need = e; 5283 c->sub.code.tree = t + t->base; 5284 break; 5285 } 5286 c->mode = BADCODE; /* invalid code */ 5287 z->msg = (char*)"invalid distance code"; 5288 r = Z_DATA_ERROR; 5289 LEAVE 5290 case DISTEXT: /* i: getting distance extra */ 5291 j = c->sub.copy.get; 5292 NEEDBITS(j) 5293 c->sub.copy.dist += (uInt)b & inflate_mask[j]; 5294 DUMPBITS(j) 5295 Tracevv((stderr, "inflate: distance %u\n", c->sub.copy.dist)); 5296 c->mode = COPY; 5297 case COPY: /* o: copying bytes in window, waiting for space */ 5298 f = q - c->sub.copy.dist; 5299 while (f < s->window) /* modulo window size-"while" instead */ 5300 f += s->end - s->window; /* of "if" handles invalid distances */ 5301 while (c->len) 5302 { 5303 NEEDOUT 5304 OUTBYTE(*f++) 5305 if (f == s->end) 5306 f = s->window; 5307 c->len--; 5308 } 5309 c->mode = START; 5310 break; 5311 case LIT: /* o: got literal, waiting for output space */ 5312 NEEDOUT 5313 OUTBYTE(c->sub.lit) 5314 c->mode = START; 5315 break; 5316 case WASH: /* o: got eob, possibly more output */ 5317 if (k > 7) /* return unused byte, if any */ 5318 { 5319 Assert(k < 16, "inflate_codes grabbed too many bytes") 5320 k -= 8; 5321 n++; 5322 p--; /* can always return one */ 5323 } 5324 FLUSH 5325 if (s->read != s->write) 5326 LEAVE 5327 c->mode = END; 5328 case END: 5329 r = Z_STREAM_END; 5330 LEAVE 5331 case BADCODE: /* x: got error */ 5332 r = Z_DATA_ERROR; 5333 LEAVE 5334 default: 5335 r = Z_STREAM_ERROR; 5336 LEAVE 5337 } 5338 #ifdef NEED_DUMMY_RETURN 5339 return Z_STREAM_ERROR; /* Some dumb compilers complain without this */ 5340 #endif 5341 } 5342 5343 5344 void inflate_codes_free(c, z) 5345 inflate_codes_statef *c; 5346 z_streamp z; 5347 { 5348 ZFREE(z, c); 5349 Tracev((stderr, "inflate: codes free\n")); 5350 } 5351 /* --- infcodes.c */ 5352 5353 /* +++ infutil.c */ 5354 5355 /* inflate_util.c -- data and routines common to blocks and codes 5356 * Copyright (C) 1995-2002 Mark Adler 5357 * For conditions of distribution and use, see copyright notice in zlib.h 5358 */ 5359 5360 /* #include "zutil.h" */ 5361 /* #include "infblock.h" */ 5362 /* #include "inftrees.h" */ 5363 /* #include "infcodes.h" */ 5364 /* #include "infutil.h" */ 5365 5366 #ifndef NO_DUMMY_DECL 5367 struct inflate_codes_state {int dummy;}; /* for buggy compilers */ 5368 #endif 5369 5370 /* And'ing with mask[n] masks the lower n bits */ 5371 uInt inflate_mask[17] = { 5372 0x0000, 5373 0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff, 5374 0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff 5375 }; 5376 5377 5378 /* copy as much as possible from the sliding window to the output area */ 5379 int inflate_flush(s, z, r) 5380 inflate_blocks_statef *s; 5381 z_streamp z; 5382 int r; 5383 { 5384 uInt n; 5385 Bytef *p; 5386 Bytef *q; 5387 5388 /* local copies of source and destination pointers */ 5389 p = z->next_out; 5390 q = s->read; 5391 5392 /* compute number of bytes to copy as far as end of window */ 5393 n = (uInt)((q <= s->write ? s->write : s->end) - q); 5394 if (n > z->avail_out) n = z->avail_out; 5395 if (n && r == Z_BUF_ERROR) r = Z_OK; 5396 5397 /* update counters */ 5398 z->avail_out -= n; 5399 z->total_out += n; 5400 5401 /* update check information */ 5402 if (s->checkfn != Z_NULL) 5403 z->adler = s->check = (*s->checkfn)(s->check, q, n); 5404 5405 /* copy as far as end of window */ 5406 if (p != Z_NULL) { 5407 zmemcpy(p, q, n); 5408 p += n; 5409 } 5410 q += n; 5411 5412 /* see if more to copy at beginning of window */ 5413 if (q == s->end) 5414 { 5415 /* wrap pointers */ 5416 q = s->window; 5417 if (s->write == s->end) 5418 s->write = s->window; 5419 5420 /* compute bytes to copy */ 5421 n = (uInt)(s->write - q); 5422 if (n > z->avail_out) n = z->avail_out; 5423 if (n && r == Z_BUF_ERROR) r = Z_OK; 5424 5425 /* update counters */ 5426 z->avail_out -= n; 5427 z->total_out += n; 5428 5429 /* update check information */ 5430 if (s->checkfn != Z_NULL) 5431 z->adler = s->check = (*s->checkfn)(s->check, q, n); 5432 5433 /* copy */ 5434 if (p != NULL) { 5435 zmemcpy(p, q, n); 5436 p += n; 5437 } 5438 q += n; 5439 } 5440 5441 /* update pointers */ 5442 z->next_out = p; 5443 s->read = q; 5444 5445 /* done */ 5446 return r; 5447 } 5448 /* --- infutil.c */ 5449 5450 /* +++ inffast.c */ 5451 5452 /* inffast.c -- process literals and length/distance pairs fast 5453 * Copyright (C) 1995-2002 Mark Adler 5454 * For conditions of distribution and use, see copyright notice in zlib.h 5455 */ 5456 5457 /* #include "zutil.h" */ 5458 /* #include "inftrees.h" */ 5459 /* #include "infblock.h" */ 5460 /* #include "infcodes.h" */ 5461 /* #include "infutil.h" */ 5462 /* #include "inffast.h" */ 5463 5464 #ifndef NO_DUMMY_DECL 5465 struct inflate_codes_state {int dummy;}; /* for buggy compilers */ 5466 #endif 5467 5468 /* simplify the use of the inflate_huft type with some defines */ 5469 #define exop word.what.Exop 5470 #define bits word.what.Bits 5471 5472 /* macros for bit input with no checking and for returning unused bytes */ 5473 #define GRABBITS(j) {while(k<(j)){b|=((uLong)NEXTBYTE)<<k;k+=8;}} 5474 #define UNGRAB {c=z->avail_in-n;c=(k>>3)<c?k>>3:c;n+=c;p-=c;k-=c<<3;} 5475 5476 /* Called with number of bytes left to write in window at least 258 5477 (the maximum string length) and number of input bytes available 5478 at least ten. The ten bytes are six bytes for the longest length/ 5479 distance pair plus four bytes for overloading the bit buffer. */ 5480 5481 int inflate_fast(bl, bd, tl, td, s, z) 5482 uInt bl, bd; 5483 inflate_huft *tl; 5484 inflate_huft *td; /* need separate declaration for Borland C++ */ 5485 inflate_blocks_statef *s; 5486 z_streamp z; 5487 { 5488 inflate_huft *t; /* temporary pointer */ 5489 uInt e; /* extra bits or operation */ 5490 uLong b; /* bit buffer */ 5491 uInt k; /* bits in bit buffer */ 5492 Bytef *p; /* input data pointer */ 5493 uInt n; /* bytes available there */ 5494 Bytef *q; /* output window write pointer */ 5495 uInt m; /* bytes to end of window or read pointer */ 5496 uInt ml; /* mask for literal/length tree */ 5497 uInt md; /* mask for distance tree */ 5498 uInt c; /* bytes to copy */ 5499 uInt d; /* distance back to copy from */ 5500 Bytef *r; /* copy source pointer */ 5501 5502 /* load input, output, bit values */ 5503 LOAD 5504 5505 /* initialize masks */ 5506 ml = inflate_mask[bl]; 5507 md = inflate_mask[bd]; 5508 5509 /* do until not enough input or output space for fast loop */ 5510 do { /* assume called with m >= 258 && n >= 10 */ 5511 /* get literal/length code */ 5512 GRABBITS(20) /* max bits for literal/length code */ 5513 if ((e = (t = tl + ((uInt)b & ml))->exop) == 0) 5514 { 5515 DUMPBITS(t->bits) 5516 Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ? 5517 "inflate: * literal '%c'\n" : 5518 "inflate: * literal 0x%02x\n", t->base)); 5519 *q++ = (Byte)t->base; 5520 m--; 5521 continue; 5522 } 5523 do { 5524 DUMPBITS(t->bits) 5525 if (e & 16) 5526 { 5527 /* get extra bits for length */ 5528 e &= 15; 5529 c = t->base + ((uInt)b & inflate_mask[e]); 5530 DUMPBITS(e) 5531 Tracevv((stderr, "inflate: * length %u\n", c)); 5532 5533 /* decode distance base of block to copy */ 5534 GRABBITS(15); /* max bits for distance code */ 5535 e = (t = td + ((uInt)b & md))->exop; 5536 do { 5537 DUMPBITS(t->bits) 5538 if (e & 16) 5539 { 5540 /* get extra bits to add to distance base */ 5541 e &= 15; 5542 GRABBITS(e) /* get extra bits (up to 13) */ 5543 d = t->base + ((uInt)b & inflate_mask[e]); 5544 DUMPBITS(e) 5545 Tracevv((stderr, "inflate: * distance %u\n", d)); 5546 5547 /* do the copy */ 5548 m -= c; 5549 r = q - d; 5550 if (r < s->window) /* wrap if needed */ 5551 { 5552 do { 5553 r += s->end - s->window; /* force pointer in window */ 5554 } while (r < s->window); /* covers invalid distances */ 5555 e = s->end - r; 5556 if (c > e) 5557 { 5558 c -= e; /* wrapped copy */ 5559 do { 5560 *q++ = *r++; 5561 } while (--e); 5562 r = s->window; 5563 do { 5564 *q++ = *r++; 5565 } while (--c); 5566 } 5567 else /* normal copy */ 5568 { 5569 *q++ = *r++; c--; 5570 *q++ = *r++; c--; 5571 do { 5572 *q++ = *r++; 5573 } while (--c); 5574 } 5575 } 5576 else /* normal copy */ 5577 { 5578 *q++ = *r++; c--; 5579 *q++ = *r++; c--; 5580 do { 5581 *q++ = *r++; 5582 } while (--c); 5583 } 5584 break; 5585 } 5586 else if ((e & 64) == 0) 5587 { 5588 t += t->base; 5589 e = (t += ((uInt)b & inflate_mask[e]))->exop; 5590 } 5591 else 5592 { 5593 z->msg = (char*)"invalid distance code"; 5594 UNGRAB 5595 UPDATE 5596 return Z_DATA_ERROR; 5597 } 5598 } while (1); 5599 break; 5600 } 5601 if ((e & 64) == 0) 5602 { 5603 t += t->base; 5604 if ((e = (t += ((uInt)b & inflate_mask[e]))->exop) == 0) 5605 { 5606 DUMPBITS(t->bits) 5607 Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ? 5608 "inflate: * literal '%c'\n" : 5609 "inflate: * literal 0x%02x\n", t->base)); 5610 *q++ = (Byte)t->base; 5611 m--; 5612 break; 5613 } 5614 } 5615 else if (e & 32) 5616 { 5617 Tracevv((stderr, "inflate: * end of block\n")); 5618 UNGRAB 5619 UPDATE 5620 return Z_STREAM_END; 5621 } 5622 else 5623 { 5624 z->msg = (char*)"invalid literal/length code"; 5625 UNGRAB 5626 UPDATE 5627 return Z_DATA_ERROR; 5628 } 5629 } while (1); 5630 } while (m >= 258 && n >= 10); 5631 5632 /* not enough input or output--restore pointers and return */ 5633 UNGRAB 5634 UPDATE 5635 return Z_OK; 5636 } 5637 /* --- inffast.c */ 5638 5639 /* +++ zutil.c */ 5640 5641 /* zutil.c -- target dependent utility functions for the compression library 5642 * Copyright (C) 1995-2002 Jean-loup Gailly. 5643 * For conditions of distribution and use, see copyright notice in zlib.h 5644 */ 5645 5646 /* @(#) Id */ 5647 5648 #ifdef DEBUG_ZLIB 5649 #include <stdio.h> 5650 #endif 5651 5652 /* #include "zutil.h" */ 5653 5654 #ifndef NO_DUMMY_DECL 5655 struct internal_state {int dummy;}; /* for buggy compilers */ 5656 #endif 5657 5658 #ifndef STDC 5659 extern void exit __P((int)); 5660 #endif 5661 5662 const char *z_errmsg[10] = { 5663 "need dictionary", /* Z_NEED_DICT 2 */ 5664 "stream end", /* Z_STREAM_END 1 */ 5665 "", /* Z_OK 0 */ 5666 "file error", /* Z_ERRNO (-1) */ 5667 "stream error", /* Z_STREAM_ERROR (-2) */ 5668 "data error", /* Z_DATA_ERROR (-3) */ 5669 "insufficient memory", /* Z_MEM_ERROR (-4) */ 5670 "buffer error", /* Z_BUF_ERROR (-5) */ 5671 "incompatible version",/* Z_VERSION_ERROR (-6) */ 5672 ""}; 5673 5674 5675 const char * ZEXPORT zlibVersion() 5676 { 5677 return ZLIB_VERSION; 5678 } 5679 5680 #ifdef DEBUG_ZLIB 5681 5682 # ifndef verbose 5683 # define verbose 0 5684 # endif 5685 int z_verbose = verbose; 5686 5687 void z_error (m) 5688 char *m; 5689 { 5690 fprintf(stderr, "%s\n", m); 5691 exit(1); 5692 } 5693 #endif 5694 5695 /* exported to allow conversion of error code to string for compress() and 5696 * uncompress() 5697 */ 5698 const char * ZEXPORT zError(err) 5699 int err; 5700 { 5701 return ERR_MSG(err); 5702 } 5703 5704 5705 #ifndef HAVE_MEMCPY 5706 5707 void zmemcpy(dest, source, len) 5708 Bytef* dest; 5709 const Bytef* source; 5710 uInt len; 5711 { 5712 if (len == 0) return; 5713 do { 5714 *dest++ = *source++; /* ??? to be unrolled */ 5715 } while (--len != 0); 5716 } 5717 5718 int zmemcmp(s1, s2, len) 5719 const Bytef* s1; 5720 const Bytef* s2; 5721 uInt len; 5722 { 5723 uInt j; 5724 5725 for (j = 0; j < len; j++) { 5726 if (s1[j] != s2[j]) return 2*(s1[j] > s2[j])-1; 5727 } 5728 return 0; 5729 } 5730 5731 void zmemzero(dest, len) 5732 Bytef* dest; 5733 uInt len; 5734 { 5735 if (len == 0) return; 5736 do { 5737 *dest++ = 0; /* ??? to be unrolled */ 5738 } while (--len != 0); 5739 } 5740 #endif 5741 5742 #ifdef __TURBOC__ 5743 #if (defined( __BORLANDC__) || !defined(SMALL_MEDIUM)) && !defined(__32BIT__) 5744 /* Small and medium model in Turbo C are for now limited to near allocation 5745 * with reduced MAX_WBITS and MAX_MEM_LEVEL 5746 */ 5747 # define MY_ZCALLOC 5748 5749 /* Turbo C malloc() does not allow dynamic allocation of 64K bytes 5750 * and farmalloc(64K) returns a pointer with an offset of 8, so we 5751 * must fix the pointer. Warning: the pointer must be put back to its 5752 * original form in order to free it, use zcfree(). 5753 */ 5754 5755 #define MAX_PTR 10 5756 /* 10*64K = 640K */ 5757 5758 local int next_ptr = 0; 5759 5760 typedef struct ptr_table_s { 5761 voidpf org_ptr; 5762 voidpf new_ptr; 5763 } ptr_table; 5764 5765 local ptr_table table[MAX_PTR]; 5766 /* This table is used to remember the original form of pointers 5767 * to large buffers (64K). Such pointers are normalized with a zero offset. 5768 * Since MSDOS is not a preemptive multitasking OS, this table is not 5769 * protected from concurrent access. This hack doesn't work anyway on 5770 * a protected system like OS/2. Use Microsoft C instead. 5771 */ 5772 5773 voidpf zcalloc (voidpf opaque, unsigned items, unsigned size) 5774 { 5775 voidpf buf = opaque; /* just to make some compilers happy */ 5776 ulg bsize = (ulg)items*size; 5777 5778 /* If we allocate less than 65520 bytes, we assume that farmalloc 5779 * will return a usable pointer which doesn't have to be normalized. 5780 */ 5781 if (bsize < 65520L) { 5782 buf = farmalloc(bsize); 5783 if (*(ush*)&buf != 0) return buf; 5784 } else { 5785 buf = farmalloc(bsize + 16L); 5786 } 5787 if (buf == NULL || next_ptr >= MAX_PTR) return NULL; 5788 table[next_ptr].org_ptr = buf; 5789 5790 /* Normalize the pointer to seg:0 */ 5791 *((ush*)&buf+1) += ((ush)((uch*)buf-0) + 15) >> 4; 5792 *(ush*)&buf = 0; 5793 table[next_ptr++].new_ptr = buf; 5794 return buf; 5795 } 5796 5797 void zcfree (voidpf opaque, voidpf ptr) 5798 { 5799 int n; 5800 if (*(ush*)&ptr != 0) { /* object < 64K */ 5801 farfree(ptr); 5802 return; 5803 } 5804 /* Find the original pointer */ 5805 for (n = 0; n < next_ptr; n++) { 5806 if (ptr != table[n].new_ptr) continue; 5807 5808 farfree(table[n].org_ptr); 5809 while (++n < next_ptr) { 5810 table[n-1] = table[n]; 5811 } 5812 next_ptr--; 5813 return; 5814 } 5815 ptr = opaque; /* just to make some compilers happy */ 5816 Assert(0, "zcfree: ptr not found"); 5817 } 5818 #endif 5819 #endif /* __TURBOC__ */ 5820 5821 5822 #if defined(M_I86) && !defined(__32BIT__) 5823 /* Microsoft C in 16-bit mode */ 5824 5825 # define MY_ZCALLOC 5826 5827 #if (!defined(_MSC_VER) || (_MSC_VER <= 600)) 5828 # define _halloc halloc 5829 # define _hfree hfree 5830 #endif 5831 5832 voidpf zcalloc (voidpf opaque, unsigned items, unsigned size) 5833 { 5834 if (opaque) opaque = 0; /* to make compiler happy */ 5835 return _halloc((long)items, size); 5836 } 5837 5838 void zcfree (voidpf opaque, voidpf ptr) 5839 { 5840 if (opaque) opaque = 0; /* to make compiler happy */ 5841 _hfree(ptr); 5842 } 5843 5844 #endif /* MSC */ 5845 5846 5847 #ifndef MY_ZCALLOC /* Any system without a special alloc function */ 5848 5849 #ifndef STDC 5850 extern voidp calloc __P((uInt items, uInt size)); 5851 extern void free __P((voidpf ptr)); 5852 #endif 5853 5854 voidpf zcalloc (opaque, items, size) 5855 voidpf opaque; 5856 unsigned items; 5857 unsigned size; 5858 { 5859 if (opaque) items += size - size; /* make compiler happy */ 5860 return (voidpf)calloc(items, size); 5861 } 5862 5863 void zcfree (opaque, ptr) 5864 voidpf opaque; 5865 voidpf ptr; 5866 { 5867 free(ptr); 5868 if (opaque) return; /* make compiler happy */ 5869 } 5870 5871 #endif /* MY_ZCALLOC */ 5872 /* --- zutil.c */ 5873 5874 /* +++ adler32.c */ 5875 /* adler32.c -- compute the Adler-32 checksum of a data stream 5876 * Copyright (C) 1995-2002 Mark Adler 5877 * For conditions of distribution and use, see copyright notice in zlib.h 5878 */ 5879 5880 /* @(#) $Id: zlib.c,v 1.18 2002/05/07 09:14:20 tron Exp $ */ 5881 5882 /* #include "zlib.h" */ 5883 5884 #define BASE 65521L /* largest prime smaller than 65536 */ 5885 #define NMAX 5552 5886 /* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */ 5887 5888 #define DO1(buf,i) {s1 += buf[i]; s2 += s1;} 5889 #define DO2(buf,i) DO1(buf,i); DO1(buf,i+1); 5890 #define DO4(buf,i) DO2(buf,i); DO2(buf,i+2); 5891 #define DO8(buf,i) DO4(buf,i); DO4(buf,i+4); 5892 #define DO16(buf) DO8(buf,0); DO8(buf,8); 5893 5894 /* ========================================================================= */ 5895 uLong ZEXPORT adler32(adler, buf, len) 5896 uLong adler; 5897 const Bytef *buf; 5898 uInt len; 5899 { 5900 unsigned long s1 = adler & 0xffff; 5901 unsigned long s2 = (adler >> 16) & 0xffff; 5902 int k; 5903 5904 if (buf == Z_NULL) return 1L; 5905 5906 while (len > 0) { 5907 k = len < NMAX ? len : NMAX; 5908 len -= k; 5909 while (k >= 16) { 5910 DO16(buf); 5911 buf += 16; 5912 k -= 16; 5913 } 5914 if (k != 0) do { 5915 s1 += *buf++; 5916 s2 += s1; 5917 } while (--k); 5918 s1 %= BASE; 5919 s2 %= BASE; 5920 } 5921 return (s2 << 16) | s1; 5922 } 5923 /* --- adler32.c */ 5924 5925