1//
2// Copyright (c) 2016-2019 Vinnie Falco (vinnie dot falco at gmail dot com)
3//
4// Distributed under the Boost Software License, Version 1.0. (See accompanying
5// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
6//
7// Official repository: https://github.com/boostorg/beast
8//
9// This is a derivative work based on Zlib, copyright below:
10/*
11    Copyright (C) 1995-2013 Jean-loup Gailly and Mark Adler
12
13    This software is provided 'as-is', without any express or implied
14    warranty.  In no event will the authors be held liable for any damages
15    arising from the use of this software.
16
17    Permission is granted to anyone to use this software for any purpose,
18    including commercial applications, and to alter it and redistribute it
19    freely, subject to the following restrictions:
20
21    1. The origin of this software must not be misrepresented; you must not
22       claim that you wrote the original software. If you use this software
23       in a product, an acknowledgment in the product documentation would be
24       appreciated but is not required.
25    2. Altered source versions must be plainly marked as such, and must not be
26       misrepresented as being the original software.
27    3. This notice may not be removed or altered from any source distribution.
28
29    Jean-loup Gailly        Mark Adler
30    jloup@gzip.org          madler@alumni.caltech.edu
31
32    The data format used by the zlib library is described by RFCs (Request for
33    Comments) 1950 to 1952 in the files http://tools.ietf.org/html/rfc1950
34    (zlib format), rfc1951 (deflate format) and rfc1952 (gzip format).
35*/
36
37#ifndef BOOST_BEAST_ZLIB_DETAIL_DEFLATE_STREAM_IPP
38#define BOOST_BEAST_ZLIB_DETAIL_DEFLATE_STREAM_IPP
39
40#include <boost/beast/zlib/detail/deflate_stream.hpp>
41#include <boost/beast/zlib/detail/ranges.hpp>
42#include <boost/assert.hpp>
43#include <boost/config.hpp>
44#include <boost/make_unique.hpp>
45#include <boost/optional.hpp>
46#include <boost/throw_exception.hpp>
47#include <cstdint>
48#include <cstdlib>
49#include <cstring>
50#include <memory>
51#include <stdexcept>
52#include <type_traits>
53
54namespace boost {
55namespace beast {
56namespace zlib {
57namespace detail {
58
59/*
60 *  ALGORITHM
61 *
62 *      The "deflation" process depends on being able to identify portions
63 *      of the input text which are identical to earlier input (within a
64 *      sliding window trailing behind the input currently being processed).
65 *
66 *      Each code tree is stored in a compressed form which is itself
67 *      a Huffman encoding of the lengths of all the code strings (in
68 *      ascending order by source values).  The actual code strings are
69 *      reconstructed from the lengths in the inflate process, as described
70 *      in the deflate specification.
71 *
72 *      The most straightforward technique turns out to be the fastest for
73 *      most input files: try all possible matches and select the longest.
74 *      The key feature of this algorithm is that insertions into the string
75 *      dictionary are very simple and thus fast, and deletions are avoided
76 *      completely. Insertions are performed at each input character, whereas
77 *      string matches are performed only when the previous match ends. So it
78 *      is preferable to spend more time in matches to allow very fast string
79 *      insertions and avoid deletions. The matching algorithm for small
80 *      strings is inspired from that of Rabin & Karp. A brute force approach
81 *      is used to find longer strings when a small match has been found.
82 *      A similar algorithm is used in comic (by Jan-Mark Wams) and freeze
83 *      (by Leonid Broukhis).
84 *         A previous version of this file used a more sophisticated algorithm
85 *      (by Fiala and Greene) which is guaranteed to run in linear amortized
86 *      time, but has a larger average cost, uses more memory and is patented.
87 *      However the F&G algorithm may be faster for some highly redundant
88 *      files if the parameter max_chain_length (described below) is too large.
89 *
90 *  ACKNOWLEDGEMENTS
91 *
92 *      The idea of lazy evaluation of matches is due to Jan-Mark Wams, and
93 *      I found it in 'freeze' written by Leonid Broukhis.
94 *      Thanks to many people for bug reports and testing.
95 *
96 *  REFERENCES
97 *
98 *      Deutsch, L.P.,"DEFLATE Compressed Data Format Specification".
99 *      Available in http://tools.ietf.org/html/rfc1951
100 *
101 *      A description of the Rabin and Karp algorithm is given in the book
102 *         "Algorithms" by R. Sedgewick, Addison-Wesley, p252.
103 *
104 *      Fiala,E.R., and Greene,D.H.
105 *         Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595
106 *
107 */
108
109/*  Generate the codes for a given tree and bit counts (which need not be optimal).
110    IN assertion: the array bl_count contains the bit length statistics for
111       the given tree and the field len is set for all tree elements.
112    OUT assertion: the field code is set for all tree elements of non
113        zero code length.
114*/
115void
116deflate_stream::
117gen_codes(ct_data *tree, int max_code, std::uint16_t *bl_count)
118{
119    std::uint16_t next_code[maxBits+1]; /* next code value for each bit length */
120    std::uint16_t code = 0;              /* running code value */
121    int bits;                  /* bit index */
122    int n;                     /* code index */
123
124    // The distribution counts are first used to
125    // generate the code values without bit reversal.
126    for(bits = 1; bits <= maxBits; bits++)
127    {
128        code = (code + bl_count[bits-1]) << 1;
129        next_code[bits] = code;
130    }
131    // Check that the bit counts in bl_count are consistent.
132    // The last code must be all ones.
133    BOOST_ASSERT(code + bl_count[maxBits]-1 == (1<<maxBits)-1);
134    for(n = 0; n <= max_code; n++)
135    {
136        int len = tree[n].dl;
137        if(len == 0)
138            continue;
139        tree[n].fc = bi_reverse(next_code[len]++, len);
140    }
141}
142
143auto
144deflate_stream::get_lut() ->
145    lut_type const&
146{
147    struct init
148    {
149        lut_type tables;
150
151        init()
152        {
153            // number of codes at each bit length for an optimal tree
154            //std::uint16_t bl_count[maxBits+1];
155
156            // Initialize the mapping length (0..255) -> length code (0..28)
157            std::uint8_t length = 0;
158            for(std::uint8_t code = 0; code < lengthCodes-1; ++code)
159            {
160                tables.base_length[code] = length;
161                auto const run = 1U << tables.extra_lbits[code];
162                for(unsigned n = 0; n < run; ++n)
163                    tables.length_code[length++] = code;
164            }
165            BOOST_ASSERT(length == 0);
166            // Note that the length 255 (match length 258) can be represented
167            // in two different ways: code 284 + 5 bits or code 285, so we
168            // overwrite length_code[255] to use the best encoding:
169            tables.length_code[255] = lengthCodes-1;
170
171            // Initialize the mapping dist (0..32K) -> dist code (0..29)
172            {
173                std::uint8_t code;
174                std::uint16_t dist = 0;
175                for(code = 0; code < 16; code++)
176                {
177                    tables.base_dist[code] = dist;
178                    auto const run = 1U << tables.extra_dbits[code];
179                    for(unsigned n = 0; n < run; ++n)
180                        tables.dist_code[dist++] = code;
181                }
182                BOOST_ASSERT(dist == 256);
183                // from now on, all distances are divided by 128
184                dist >>= 7;
185                for(; code < dCodes; ++code)
186                {
187                    tables.base_dist[code] = dist << 7;
188                    auto const run = 1U << (tables.extra_dbits[code]-7);
189                    for(std::size_t n = 0; n < run; ++n)
190                        tables.dist_code[256 + dist++] = code;
191                }
192                BOOST_ASSERT(dist == 256);
193            }
194
195            // Construct the codes of the static literal tree
196            std::uint16_t bl_count[maxBits+1];
197            std::memset(bl_count, 0, sizeof(bl_count));
198            unsigned n = 0;
199            while (n <= 143)
200                tables.ltree[n++].dl = 8;
201            bl_count[8] += 144;
202            while (n <= 255)
203                tables.ltree[n++].dl = 9;
204            bl_count[9] += 112;
205            while (n <= 279)
206                tables.ltree[n++].dl = 7;
207            bl_count[7] += 24;
208            while (n <= 287)
209                tables.ltree[n++].dl = 8;
210            bl_count[8] += 8;
211            // Codes 286 and 287 do not exist, but we must include them in the tree
212            // construction to get a canonical Huffman tree (longest code all ones)
213            gen_codes(tables.ltree, lCodes+1, bl_count);
214
215            for(n = 0; n < dCodes; ++n)
216            {
217                tables.dtree[n].dl = 5;
218                tables.dtree[n].fc =
219                    static_cast<std::uint16_t>(bi_reverse(n, 5));
220            }
221        }
222    };
223    static init const data;
224    return data.tables;
225}
226
227void
228deflate_stream::
229doReset(
230    int level,
231    int windowBits,
232    int memLevel,
233    Strategy strategy)
234{
235    if(level == default_size)
236        level = 6;
237
238    // VFALCO What do we do about this?
239    // until 256-byte window bug fixed
240    if(windowBits == 8)
241        windowBits = 9;
242
243    if(level < 0 || level > 9)
244        BOOST_THROW_EXCEPTION(std::invalid_argument{
245            "invalid level"});
246
247    if(windowBits < 8 || windowBits > 15)
248        BOOST_THROW_EXCEPTION(std::invalid_argument{
249            "invalid windowBits"});
250
251    if(memLevel < 1 || memLevel > max_mem_level)
252        BOOST_THROW_EXCEPTION(std::invalid_argument{
253            "invalid memLevel"});
254
255    w_bits_ = windowBits;
256
257    hash_bits_ = memLevel + 7;
258
259    // 16K elements by default
260    lit_bufsize_ = 1 << (memLevel + 6);
261
262    level_ = level;
263    strategy_ = strategy;
264    inited_ = false;
265}
266
267void
268deflate_stream::
269doReset()
270{
271    inited_ = false;
272}
273
274void
275deflate_stream::
276doClear()
277{
278    inited_ = false;
279    buf_.reset();
280}
281
282std::size_t
283deflate_stream::
284doUpperBound(std::size_t sourceLen) const
285{
286    std::size_t complen;
287    std::size_t wraplen;
288
289    /* conservative upper bound for compressed data */
290    complen = sourceLen +
291              ((sourceLen + 7) >> 3) + ((sourceLen + 63) >> 6) + 5;
292
293    /* compute wrapper length */
294    wraplen = 0;
295
296    /* if not default parameters, return conservative bound */
297    if(w_bits_ != 15 || hash_bits_ != 8 + 7)
298        return complen + wraplen;
299
300    /* default settings: return tight bound for that case */
301    return sourceLen + (sourceLen >> 12) + (sourceLen >> 14) +
302           (sourceLen >> 25) + 13 - 6 + wraplen;
303}
304
305void
306deflate_stream::
307doTune(
308    int good_length,
309    int max_lazy,
310    int nice_length,
311    int max_chain)
312{
313    good_match_ = good_length;
314    nice_match_ = nice_length;
315    max_lazy_match_ = max_lazy;
316    max_chain_length_ = max_chain;
317}
318
319void
320deflate_stream::
321doParams(z_params& zs, int level, Strategy strategy, error_code& ec)
322{
323    compress_func func;
324
325    if(level == default_size)
326        level = 6;
327    if(level < 0 || level > 9)
328    {
329        ec = error::stream_error;
330        return;
331    }
332    func = get_config(level_).func;
333
334    if((strategy != strategy_ || func != get_config(level).func) &&
335        zs.total_in != 0)
336    {
337        // Flush the last buffer:
338        doWrite(zs, Flush::block, ec);
339        if(ec == error::need_buffers && pending_ == 0)
340            ec = {};
341    }
342    if(level_ != level)
343    {
344        level_ = level;
345        max_lazy_match_   = get_config(level).max_lazy;
346        good_match_       = get_config(level).good_length;
347        nice_match_       = get_config(level).nice_length;
348        max_chain_length_ = get_config(level).max_chain;
349    }
350    strategy_ = strategy;
351}
352
353// VFALCO boost::optional param is a workaround for
354//        gcc "maybe uninitialized" warning
355//        https://github.com/boostorg/beast/issues/532
356//
357void
358deflate_stream::
359doWrite(z_params& zs, boost::optional<Flush> flush, error_code& ec)
360{
361    maybe_init();
362
363    if(zs.next_in == nullptr && zs.avail_in != 0)
364        BOOST_THROW_EXCEPTION(std::invalid_argument{"invalid input"});
365
366    if(zs.next_out == nullptr ||
367        (status_ == FINISH_STATE && flush != Flush::finish))
368    {
369        ec = error::stream_error;
370        return;
371    }
372    if(zs.avail_out == 0)
373    {
374        ec = error::need_buffers;
375        return;
376    }
377
378    // value of flush param for previous deflate call
379    auto old_flush = boost::make_optional<Flush>(
380        last_flush_.is_initialized(),
381        last_flush_ ? *last_flush_ : Flush::none);
382
383    last_flush_ = flush;
384
385    // Flush as much pending output as possible
386    if(pending_ != 0)
387    {
388        flush_pending(zs);
389        if(zs.avail_out == 0)
390        {
391            /* Since avail_out is 0, deflate will be called again with
392             * more output space, but possibly with both pending and
393             * avail_in equal to zero. There won't be anything to do,
394             * but this is not an error situation so make sure we
395             * return OK instead of BUF_ERROR at next call of deflate:
396             */
397            last_flush_ = boost::none;
398            return;
399        }
400    }
401    else if(zs.avail_in == 0 && (
402            old_flush && flush <= *old_flush // Caution: depends on enum order
403        ) && flush != Flush::finish)
404    {
405        /* Make sure there is something to do and avoid duplicate consecutive
406         * flushes. For repeated and useless calls with Flush::finish, we keep
407         * returning Z_STREAM_END instead of Z_BUF_ERROR.
408         */
409        ec = error::need_buffers;
410        return;
411    }
412
413    // User must not provide more input after the first FINISH:
414    if(status_ == FINISH_STATE && zs.avail_in != 0)
415    {
416        ec = error::need_buffers;
417        return;
418    }
419
420    /* Start a new block or continue the current one.
421     */
422    if(zs.avail_in != 0 || lookahead_ != 0 ||
423        (flush != Flush::none && status_ != FINISH_STATE))
424    {
425        block_state bstate;
426
427        switch(strategy_)
428        {
429        case Strategy::huffman:
430            bstate = deflate_huff(zs, flush.get());
431            break;
432        case Strategy::rle:
433            bstate = deflate_rle(zs, flush.get());
434            break;
435        default:
436        {
437            bstate = (this->*(get_config(level_).func))(zs, flush.get());
438            break;
439        }
440        }
441
442        if(bstate == finish_started || bstate == finish_done)
443        {
444            status_ = FINISH_STATE;
445        }
446        if(bstate == need_more || bstate == finish_started)
447        {
448            if(zs.avail_out == 0)
449            {
450                last_flush_ = boost::none; /* avoid BUF_ERROR next call, see above */
451            }
452            return;
453            /*  If flush != Flush::none && avail_out == 0, the next call
454                of deflate should use the same flush parameter to make sure
455                that the flush is complete. So we don't have to output an
456                empty block here, this will be done at next call. This also
457                ensures that for a very small output buffer, we emit at most
458                one empty block.
459            */
460        }
461        if(bstate == block_done)
462        {
463            if(flush == Flush::partial)
464            {
465                tr_align();
466            }
467            else if(flush != Flush::block)
468            {
469                /* FULL_FLUSH or SYNC_FLUSH */
470                tr_stored_block(nullptr, 0L, 0);
471                /* For a full flush, this empty block will be recognized
472                 * as a special marker by inflate_sync().
473                 */
474                if(flush == Flush::full)
475                {
476                    clear_hash(); // forget history
477                    if(lookahead_ == 0)
478                    {
479                        strstart_ = 0;
480                        block_start_ = 0L;
481                        insert_ = 0;
482                    }
483                }
484            }
485            flush_pending(zs);
486            if(zs.avail_out == 0)
487            {
488                last_flush_ = boost::none; /* avoid BUF_ERROR at next call, see above */
489                return;
490            }
491        }
492    }
493
494    if(flush == Flush::finish)
495    {
496        ec = error::end_of_stream;
497        return;
498    }
499}
500
501// VFALCO Warning: untested
502void
503deflate_stream::
504doDictionary(Byte const* dict, uInt dictLength, error_code& ec)
505{
506    if(lookahead_)
507    {
508        ec = error::stream_error;
509        return;
510    }
511
512    maybe_init();
513
514    /* if dict would fill window, just replace the history */
515    if(dictLength >= w_size_)
516    {
517        clear_hash();
518        strstart_ = 0;
519        block_start_ = 0L;
520        insert_ = 0;
521        dict += dictLength - w_size_;  /* use the tail */
522        dictLength = w_size_;
523    }
524
525    /* insert dict into window and hash */
526    z_params zs;
527    zs.avail_in = dictLength;
528    zs.next_in = (const Byte *)dict;
529    zs.avail_out = 0;
530    zs.next_out = 0;
531    fill_window(zs);
532    while(lookahead_ >= minMatch)
533    {
534        uInt str = strstart_;
535        uInt n = lookahead_ - (minMatch-1);
536        do
537        {
538            update_hash(ins_h_, window_[str + minMatch-1]);
539            prev_[str & w_mask_] = head_[ins_h_];
540            head_[ins_h_] = (std::uint16_t)str;
541            str++;
542        }
543        while(--n);
544        strstart_ = str;
545        lookahead_ = minMatch-1;
546        fill_window(zs);
547    }
548    strstart_ += lookahead_;
549    block_start_ = (long)strstart_;
550    insert_ = lookahead_;
551    lookahead_ = 0;
552    match_length_ = prev_length_ = minMatch-1;
553    match_available_ = 0;
554}
555
556void
557deflate_stream::
558doPrime(int bits, int value, error_code& ec)
559{
560    maybe_init();
561
562    if((Byte *)(d_buf_) < pending_out_ + ((Buf_size + 7) >> 3))
563    {
564        ec = error::need_buffers;
565        return;
566    }
567
568    do
569    {
570        int put = Buf_size - bi_valid_;
571        if(put > bits)
572            put = bits;
573        bi_buf_ |= (std::uint16_t)((value & ((1 << put) - 1)) << bi_valid_);
574        bi_valid_ += put;
575        tr_flush_bits();
576        value >>= put;
577        bits -= put;
578    }
579    while(bits);
580}
581
582void
583deflate_stream::
584doPending(unsigned* value, int* bits)
585{
586    if(value != 0)
587        *value = pending_;
588    if(bits != 0)
589        *bits = bi_valid_;
590}
591
592//--------------------------------------------------------------------------
593
594// Do lazy initialization
595void
596deflate_stream::
597init()
598{
599    //  Caller must set these:
600    //      w_bits_
601    //      hash_bits_
602    //      lit_bufsize_
603    //      level_
604    //      strategy_
605
606    w_size_ = 1 << w_bits_;
607    w_mask_ = w_size_ - 1;
608
609    hash_size_ = 1 << hash_bits_;
610    hash_mask_ = hash_size_ - 1;
611    hash_shift_ =  ((hash_bits_+minMatch-1)/minMatch);
612
613    auto const nwindow  = w_size_ * 2*sizeof(Byte);
614    auto const nprev    = w_size_ * sizeof(std::uint16_t);
615    auto const nhead    = hash_size_ * sizeof(std::uint16_t);
616    auto const noverlay = lit_bufsize_ * (sizeof(std::uint16_t)+2);
617    auto const needed   = nwindow + nprev + nhead + noverlay;
618
619    if(! buf_ || buf_size_ != needed)
620    {
621        buf_ = boost::make_unique_noinit<
622            std::uint8_t[]>(needed);
623        buf_size_ = needed;
624    }
625
626    window_ = reinterpret_cast<Byte*>(buf_.get());
627    prev_   = reinterpret_cast<std::uint16_t*>(buf_.get() + nwindow);
628    std::memset(prev_, 0, nprev);
629    head_   = reinterpret_cast<std::uint16_t*>(buf_.get() + nwindow + nprev);
630
631    /*  We overlay pending_buf_ and d_buf_ + l_buf_. This works
632        since the average output size for(length, distance)
633        codes is <= 24 bits.
634    */
635    auto overlay = reinterpret_cast<std::uint16_t*>(
636        buf_.get() + nwindow + nprev + nhead);
637
638    // nothing written to window_ yet
639    high_water_ = 0;
640
641    pending_buf_ =
642        reinterpret_cast<std::uint8_t*>(overlay);
643    pending_buf_size_ =
644        static_cast<std::uint32_t>(lit_bufsize_) *
645            (sizeof(std::uint16_t) + 2L);
646
647    d_buf_ = overlay + lit_bufsize_ / sizeof(std::uint16_t);
648    l_buf_ = pending_buf_ + (1 + sizeof(std::uint16_t)) * lit_bufsize_;
649
650    pending_ = 0;
651    pending_out_ = pending_buf_;
652
653    status_ = BUSY_STATE;
654    last_flush_ = Flush::none;
655
656    tr_init();
657    lm_init();
658
659    inited_ = true;
660}
661
662/*  Initialize the "longest match" routines for a new zlib stream
663*/
664void
665deflate_stream::
666lm_init()
667{
668    window_size_ = (std::uint32_t)2L*w_size_;
669
670    clear_hash();
671
672    /* Set the default configuration parameters:
673     */
674    // VFALCO TODO just copy the config struct
675    max_lazy_match_   = get_config(level_).max_lazy;
676    good_match_       = get_config(level_).good_length;
677    nice_match_       = get_config(level_).nice_length;
678    max_chain_length_ = get_config(level_).max_chain;
679
680    strstart_ = 0;
681    block_start_ = 0L;
682    lookahead_ = 0;
683    insert_ = 0;
684    match_length_ = prev_length_ = minMatch-1;
685    match_available_ = 0;
686    ins_h_ = 0;
687}
688
689// Initialize a new block.
690//
691void
692deflate_stream::
693init_block()
694{
695    for(int n = 0; n < lCodes;  n++)
696        dyn_ltree_[n].fc = 0;
697    for(int n = 0; n < dCodes;  n++)
698        dyn_dtree_[n].fc = 0;
699    for(int n = 0; n < blCodes; n++)
700        bl_tree_[n].fc = 0;
701    dyn_ltree_[END_BLOCK].fc = 1;
702    opt_len_ = 0L;
703    static_len_ = 0L;
704    last_lit_ = 0;
705    matches_ = 0;
706}
707
708/*  Restore the heap property by moving down the tree starting at node k,
709    exchanging a node with the smallest of its two sons if necessary,
710    stopping when the heap property is re-established (each father smaller
711    than its two sons).
712*/
713void
714deflate_stream::
715pqdownheap(
716    ct_data const* tree,    // the tree to restore
717    int k)                          // node to move down
718{
719    int v = heap_[k];
720    int j = k << 1;  // left son of k
721    while(j <= heap_len_)
722    {
723        // Set j to the smallest of the two sons:
724        if(j < heap_len_ &&
725                smaller(tree, heap_[j+1], heap_[j]))
726            j++;
727        // Exit if v is smaller than both sons
728        if(smaller(tree, v, heap_[j]))
729            break;
730
731        // Exchange v with the smallest son
732        heap_[k] = heap_[j];
733        k = j;
734
735        // And continue down the tree,
736        // setting j to the left son of k
737        j <<= 1;
738    }
739    heap_[k] = v;
740}
741
742/*  Remove the smallest element from the heap and recreate the heap
743    with one less element. Updates heap and heap_len.
744*/
745void
746deflate_stream::
747pqremove(ct_data const* tree, int& top)
748{
749    top = heap_[kSmallest];
750    heap_[kSmallest] = heap_[heap_len_--];
751    pqdownheap(tree, kSmallest);
752}
753
754/*  Compute the optimal bit lengths for a tree and update the total bit length
755    for the current block.
756    IN assertion: the fields freq and dad are set, heap[heap_max] and
757       above are the tree nodes sorted by increasing frequency.
758    OUT assertions: the field len is set to the optimal bit length, the
759        array bl_count contains the frequencies for each bit length.
760        The length opt_len is updated; static_len is also updated if stree is
761        not null.
762*/
763void
764deflate_stream::
765gen_bitlen(tree_desc *desc)
766{
767    ct_data *tree           = desc->dyn_tree;
768    int max_code                    = desc->max_code;
769    ct_data const* stree    = desc->stat_desc->static_tree;
770    std::uint8_t const *extra       = desc->stat_desc->extra_bits;
771    int base                        = desc->stat_desc->extra_base;
772    int max_length                  = desc->stat_desc->max_length;
773    int h;                          // heap index
774    int n, m;                       // iterate over the tree elements
775    int bits;                       // bit length
776    int xbits;                      // extra bits
777    std::uint16_t f;                // frequency
778    int overflow = 0;               // number of elements with bit length too large
779
780    std::fill(&bl_count_[0], &bl_count_[maxBits+1], std::uint16_t{0});
781
782    /* In a first pass, compute the optimal bit lengths (which may
783     * overflow in the case of the bit length tree).
784     */
785    tree[heap_[heap_max_]].dl = 0; // root of the heap
786
787    for(h = heap_max_+1; h < HEAP_SIZE; h++) {
788        n = heap_[h];
789        bits = tree[tree[n].dl].dl + 1;
790        if(bits > max_length) bits = max_length, overflow++;
791        // We overwrite tree[n].dl which is no longer needed
792        tree[n].dl = (std::uint16_t)bits;
793
794        if(n > max_code)
795            continue; // not a leaf node
796
797        bl_count_[bits]++;
798        xbits = 0;
799        if(n >= base)
800            xbits = extra[n-base];
801        f = tree[n].fc;
802        opt_len_ += (std::uint32_t)f * (bits + xbits);
803        if(stree)
804            static_len_ += (std::uint32_t)f * (stree[n].dl + xbits);
805    }
806    if(overflow == 0)
807        return;
808
809    // Find the first bit length which could increase:
810    do
811    {
812        bits = max_length-1;
813        while(bl_count_[bits] == 0)
814            bits--;
815        bl_count_[bits]--;      // move one leaf down the tree
816        bl_count_[bits+1] += 2; // move one overflow item as its brother
817        bl_count_[max_length]--;
818        /* The brother of the overflow item also moves one step up,
819         * but this does not affect bl_count[max_length]
820         */
821        overflow -= 2;
822    }
823    while(overflow > 0);
824
825    /* Now recompute all bit lengths, scanning in increasing frequency.
826     * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
827     * lengths instead of fixing only the wrong ones. This idea is taken
828     * from 'ar' written by Haruhiko Okumura.)
829     */
830    for(bits = max_length; bits != 0; bits--)
831    {
832        n = bl_count_[bits];
833        while(n != 0)
834        {
835            m = heap_[--h];
836            if(m > max_code)
837                continue;
838            if((unsigned) tree[m].dl != (unsigned) bits)
839            {
840                opt_len_ += ((long)bits - (long)tree[m].dl) *(long)tree[m].fc;
841                tree[m].dl = (std::uint16_t)bits;
842            }
843            n--;
844        }
845    }
846}
847
848/*  Construct one Huffman tree and assigns the code bit strings and lengths.
849    Update the total bit length for the current block.
850    IN assertion: the field freq is set for all tree elements.
851    OUT assertions: the fields len and code are set to the optimal bit length
852        and corresponding code. The length opt_len is updated; static_len is
853        also updated if stree is not null. The field max_code is set.
854*/
855void
856deflate_stream::
857build_tree(tree_desc *desc)
858{
859    ct_data *tree         = desc->dyn_tree;
860    ct_data const* stree  = desc->stat_desc->static_tree;
861    int elems                     = desc->stat_desc->elems;
862    int n, m;          // iterate over heap elements
863    int max_code = -1; // largest code with non zero frequency
864    int node;          // new node being created
865
866    /* Construct the initial heap, with least frequent element in
867     * heap[kSmallest]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
868     * heap[0] is not used.
869     */
870    heap_len_ = 0;
871    heap_max_ = HEAP_SIZE;
872
873    for(n = 0; n < elems; n++)
874    {
875        if(tree[n].fc != 0)
876        {
877            heap_[++(heap_len_)] = max_code = n;
878            depth_[n] = 0;
879        }
880        else
881        {
882            tree[n].dl = 0;
883        }
884    }
885
886    /* The pkzip format requires that at least one distance code exists,
887     * and that at least one bit should be sent even if there is only one
888     * possible code. So to avoid special checks later on we force at least
889     * two codes of non zero frequency.
890     */
891    while(heap_len_ < 2)
892    {
893        node = heap_[++(heap_len_)] = (max_code < 2 ? ++max_code : 0);
894        tree[node].fc = 1;
895        depth_[node] = 0;
896        opt_len_--;
897        if(stree)
898            static_len_ -= stree[node].dl;
899        // node is 0 or 1 so it does not have extra bits
900    }
901    desc->max_code = max_code;
902
903    /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
904     * establish sub-heaps of increasing lengths:
905     */
906    for(n = heap_len_/2; n >= 1; n--)
907        pqdownheap(tree, n);
908
909    /* Construct the Huffman tree by repeatedly combining the least two
910     * frequent nodes.
911     */
912    node = elems;              /* next internal node of the tree */
913    do
914    {
915        pqremove(tree, n);  /* n = node of least frequency */
916        m = heap_[kSmallest]; /* m = node of next least frequency */
917
918        heap_[--(heap_max_)] = n; /* keep the nodes sorted by frequency */
919        heap_[--(heap_max_)] = m;
920
921        /* Create a new node father of n and m */
922        tree[node].fc = tree[n].fc + tree[m].fc;
923        depth_[node] = (std::uint8_t)((depth_[n] >= depth_[m] ?
924                                depth_[n] : depth_[m]) + 1);
925        tree[n].dl = tree[m].dl = (std::uint16_t)node;
926        /* and insert the new node in the heap */
927        heap_[kSmallest] = node++;
928        pqdownheap(tree, kSmallest);
929
930    }
931    while(heap_len_ >= 2);
932
933    heap_[--(heap_max_)] = heap_[kSmallest];
934
935    /* At this point, the fields freq and dad are set. We can now
936     * generate the bit lengths.
937     */
938    gen_bitlen((tree_desc *)desc);
939
940    /* The field len is now set, we can generate the bit codes */
941    gen_codes(tree, max_code, bl_count_);
942}
943
944/*  Scan a literal or distance tree to determine the frequencies
945    of the codes in the bit length tree.
946*/
947void
948deflate_stream::
949scan_tree(
950    ct_data *tree,      // the tree to be scanned
951    int max_code)               // and its largest code of non zero frequency
952{
953    int n;                      // iterates over all tree elements
954    int prevlen = -1;           // last emitted length
955    int curlen;                 // length of current code
956    int nextlen = tree[0].dl;   // length of next code
957    std::uint16_t count = 0;    // repeat count of the current code
958    int max_count = 7;          // max repeat count
959    int min_count = 4;          // min repeat count
960
961    if(nextlen == 0)
962    {
963        max_count = 138;
964        min_count = 3;
965    }
966    tree[max_code+1].dl = (std::uint16_t)0xffff; // guard
967
968    for(n = 0; n <= max_code; n++)
969    {
970        curlen = nextlen; nextlen = tree[n+1].dl;
971        if(++count < max_count && curlen == nextlen)
972        {
973            continue;
974        }
975        else if(count < min_count)
976        {
977            bl_tree_[curlen].fc += count;
978        }
979        else if(curlen != 0)
980        {
981            if(curlen != prevlen) bl_tree_[curlen].fc++;
982                bl_tree_[REP_3_6].fc++;
983        }
984        else if(count <= 10)
985        {
986            bl_tree_[REPZ_3_10].fc++;
987        }
988        else
989        {
990            bl_tree_[REPZ_11_138].fc++;
991        }
992        count = 0;
993        prevlen = curlen;
994        if(nextlen == 0)
995        {
996            max_count = 138;
997            min_count = 3;
998        }
999        else if(curlen == nextlen)
1000        {
1001            max_count = 6;
1002            min_count = 3;
1003        }
1004        else
1005        {
1006            max_count = 7;
1007            min_count = 4;
1008        }
1009    }
1010}
1011
1012/*  Send a literal or distance tree in compressed form,
1013    using the codes in bl_tree.
1014*/
1015void
1016deflate_stream::
1017send_tree(
1018    ct_data *tree,      // the tree to be scanned
1019    int max_code)               // and its largest code of non zero frequency
1020{
1021    int n;                      // iterates over all tree elements
1022    int prevlen = -1;           // last emitted length
1023    int curlen;                 // length of current code
1024    int nextlen = tree[0].dl;   // length of next code
1025    int count = 0;              // repeat count of the current code
1026    int max_count = 7;          // max repeat count
1027    int min_count = 4;          // min repeat count
1028
1029    // tree[max_code+1].dl = -1; // guard already set
1030    if(nextlen == 0)
1031    {
1032        max_count = 138;
1033        min_count = 3;
1034    }
1035
1036    for(n = 0; n <= max_code; n++)
1037    {
1038        curlen = nextlen;
1039        nextlen = tree[n+1].dl;
1040        if(++count < max_count && curlen == nextlen)
1041        {
1042            continue;
1043        }
1044        else if(count < min_count)
1045        {
1046            do
1047            {
1048                send_code(curlen, bl_tree_);
1049            }
1050            while (--count != 0);
1051        }
1052        else if(curlen != 0)
1053        {
1054            if(curlen != prevlen)
1055            {
1056                send_code(curlen, bl_tree_);
1057                count--;
1058            }
1059            BOOST_ASSERT(count >= 3 && count <= 6);
1060            send_code(REP_3_6, bl_tree_);
1061            send_bits(count-3, 2);
1062        }
1063        else if(count <= 10)
1064        {
1065            send_code(REPZ_3_10, bl_tree_);
1066            send_bits(count-3, 3);
1067        }
1068        else
1069        {
1070            send_code(REPZ_11_138, bl_tree_);
1071            send_bits(count-11, 7);
1072        }
1073        count = 0;
1074        prevlen = curlen;
1075        if(nextlen == 0)
1076        {
1077            max_count = 138;
1078            min_count = 3;
1079        }
1080        else if(curlen == nextlen)
1081        {
1082            max_count = 6;
1083            min_count = 3;
1084        }
1085        else
1086        {
1087            max_count = 7;
1088            min_count = 4;
1089        }
1090    }
1091}
1092
1093/*  Construct the Huffman tree for the bit lengths and return
1094    the index in bl_order of the last bit length code to send.
1095*/
1096int
1097deflate_stream::
1098build_bl_tree()
1099{
1100    int max_blindex;  // index of last bit length code of non zero freq
1101
1102    // Determine the bit length frequencies for literal and distance trees
1103    scan_tree((ct_data *)dyn_ltree_, l_desc_.max_code);
1104    scan_tree((ct_data *)dyn_dtree_, d_desc_.max_code);
1105
1106    // Build the bit length tree:
1107    build_tree((tree_desc *)(&(bl_desc_)));
1108    /* opt_len now includes the length of the tree representations, except
1109     * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
1110     */
1111
1112    /* Determine the number of bit length codes to send. The pkzip format
1113     * requires that at least 4 bit length codes be sent. (appnote.txt says
1114     * 3 but the actual value used is 4.)
1115     */
1116    for(max_blindex = blCodes-1; max_blindex >= 3; max_blindex--)
1117    {
1118        if(bl_tree_[lut_.bl_order[max_blindex]].dl != 0)
1119            break;
1120    }
1121    // Update opt_len to include the bit length tree and counts
1122    opt_len_ += 3*(max_blindex+1) + 5+5+4;
1123    return max_blindex;
1124}
1125
1126/*  Send the header for a block using dynamic Huffman trees: the counts,
1127    the lengths of the bit length codes, the literal tree and the distance
1128    tree.
1129    IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
1130*/
1131void
1132deflate_stream::
1133send_all_trees(
1134    int lcodes,
1135    int dcodes,
1136    int blcodes)    // number of codes for each tree
1137{
1138    int rank;       // index in bl_order
1139
1140    BOOST_ASSERT(lcodes >= 257 && dcodes >= 1 && blcodes >= 4);
1141    BOOST_ASSERT(lcodes <= lCodes && dcodes <= dCodes && blcodes <= blCodes);
1142    send_bits(lcodes-257, 5); // not +255 as stated in appnote.txt
1143    send_bits(dcodes-1,   5);
1144    send_bits(blcodes-4,  4); // not -3 as stated in appnote.txt
1145    for(rank = 0; rank < blcodes; rank++)
1146        send_bits(bl_tree_[lut_.bl_order[rank]].dl, 3);
1147    send_tree((ct_data *)dyn_ltree_, lcodes-1); // literal tree
1148    send_tree((ct_data *)dyn_dtree_, dcodes-1); // distance tree
1149}
1150
1151/*  Send the block data compressed using the given Huffman trees
1152*/
1153void
1154deflate_stream::
1155compress_block(
1156    ct_data const* ltree, // literal tree
1157    ct_data const* dtree) // distance tree
1158{
1159    unsigned dist;      /* distance of matched string */
1160    int lc;             /* match length or unmatched char (if dist == 0) */
1161    unsigned lx = 0;    /* running index in l_buf */
1162    unsigned code;      /* the code to send */
1163    int extra;          /* number of extra bits to send */
1164
1165    if(last_lit_ != 0)
1166    {
1167        do
1168        {
1169            dist = d_buf_[lx];
1170            lc = l_buf_[lx++];
1171            if(dist == 0)
1172            {
1173                send_code(lc, ltree); /* send a literal byte */
1174            }
1175            else
1176            {
1177                /* Here, lc is the match length - minMatch */
1178                code = lut_.length_code[lc];
1179                send_code(code+literals+1, ltree); /* send the length code */
1180                extra = lut_.extra_lbits[code];
1181                if(extra != 0)
1182                {
1183                    lc -= lut_.base_length[code];
1184                    send_bits(lc, extra);       /* send the extra length bits */
1185                }
1186                dist--; /* dist is now the match distance - 1 */
1187                code = d_code(dist);
1188                BOOST_ASSERT(code < dCodes);
1189
1190                send_code(code, dtree);       /* send the distance code */
1191                extra = lut_.extra_dbits[code];
1192                if(extra != 0)
1193                {
1194                    dist -= lut_.base_dist[code];
1195                    send_bits(dist, extra);   /* send the extra distance bits */
1196                }
1197            } /* literal or match pair ? */
1198
1199            /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
1200            BOOST_ASSERT((uInt)(pending_) < lit_bufsize_ + 2*lx);
1201        }
1202        while(lx < last_lit_);
1203    }
1204
1205    send_code(END_BLOCK, ltree);
1206}
1207
1208/*  Check if the data type is TEXT or BINARY, using the following algorithm:
1209    - TEXT if the two conditions below are satisfied:
1210        a) There are no non-portable control characters belonging to the
1211            "black list" (0..6, 14..25, 28..31).
1212        b) There is at least one printable character belonging to the
1213            "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255).
1214    - BINARY otherwise.
1215    - The following partially-portable control characters form a
1216        "gray list" that is ignored in this detection algorithm:
1217        (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}).
1218    IN assertion: the fields fc of dyn_ltree are set.
1219*/
1220int
1221deflate_stream::
1222detect_data_type()
1223{
1224    /* black_mask is the bit mask of black-listed bytes
1225     * set bits 0..6, 14..25, and 28..31
1226     * 0xf3ffc07f = binary 11110011111111111100000001111111
1227     */
1228    unsigned long black_mask = 0xf3ffc07fUL;
1229    int n;
1230
1231    // Check for non-textual ("black-listed") bytes.
1232    for(n = 0; n <= 31; n++, black_mask >>= 1)
1233        if((black_mask & 1) && (dyn_ltree_[n].fc != 0))
1234            return binary;
1235
1236    // Check for textual ("white-listed") bytes. */
1237    if(dyn_ltree_[9].fc != 0 || dyn_ltree_[10].fc != 0
1238            || dyn_ltree_[13].fc != 0)
1239        return text;
1240    for(n = 32; n < literals; n++)
1241        if(dyn_ltree_[n].fc != 0)
1242            return text;
1243
1244    /* There are no "black-listed" or "white-listed" bytes:
1245     * this stream either is empty or has tolerated ("gray-listed") bytes only.
1246     */
1247    return binary;
1248}
1249
1250/*  Flush the bit buffer and align the output on a byte boundary
1251*/
1252void
1253deflate_stream::
1254bi_windup()
1255{
1256    if(bi_valid_ > 8)
1257        put_short(bi_buf_);
1258    else if(bi_valid_ > 0)
1259        put_byte((Byte)bi_buf_);
1260    bi_buf_ = 0;
1261    bi_valid_ = 0;
1262}
1263
1264/*  Flush the bit buffer, keeping at most 7 bits in it.
1265*/
1266void
1267deflate_stream::
1268bi_flush()
1269{
1270    if(bi_valid_ == 16)
1271    {
1272        put_short(bi_buf_);
1273        bi_buf_ = 0;
1274        bi_valid_ = 0;
1275    }
1276    else if(bi_valid_ >= 8)
1277    {
1278        put_byte((Byte)bi_buf_);
1279        bi_buf_ >>= 8;
1280        bi_valid_ -= 8;
1281    }
1282}
1283
1284/*  Copy a stored block, storing first the length and its
1285    one's complement if requested.
1286*/
1287void
1288deflate_stream::
1289copy_block(
1290    char    *buf,       // the input data
1291    unsigned len,       // its length
1292    int      header)    // true if block header must be written
1293{
1294    bi_windup();        // align on byte boundary
1295
1296    if(header)
1297    {
1298        put_short((std::uint16_t)len);
1299        put_short((std::uint16_t)~len);
1300    }
1301    if(buf)
1302        std::memcpy(&pending_buf_[pending_], buf, len);
1303    pending_ += len;
1304}
1305
1306//------------------------------------------------------------------------------
1307
1308/* Initialize the tree data structures for a new zlib stream.
1309*/
1310void
1311deflate_stream::
1312tr_init()
1313{
1314    l_desc_.dyn_tree = dyn_ltree_;
1315    l_desc_.stat_desc = &lut_.l_desc;
1316
1317    d_desc_.dyn_tree = dyn_dtree_;
1318    d_desc_.stat_desc = &lut_.d_desc;
1319
1320    bl_desc_.dyn_tree = bl_tree_;
1321    bl_desc_.stat_desc = &lut_.bl_desc;
1322
1323    bi_buf_ = 0;
1324    bi_valid_ = 0;
1325
1326    // Initialize the first block of the first file:
1327    init_block();
1328}
1329
1330/*  Send one empty static block to give enough lookahead for inflate.
1331    This takes 10 bits, of which 7 may remain in the bit buffer.
1332*/
1333void
1334deflate_stream::
1335tr_align()
1336{
1337    send_bits(STATIC_TREES<<1, 3);
1338    send_code(END_BLOCK, lut_.ltree);
1339    bi_flush();
1340}
1341
1342/* Flush the bits in the bit buffer to pending output (leaves at most 7 bits)
1343*/
1344void
1345deflate_stream::
1346tr_flush_bits()
1347{
1348    bi_flush();
1349}
1350
1351/* Send a stored block
1352*/
1353void
1354deflate_stream::
1355tr_stored_block(
1356    char *buf,                  // input block
1357    std::uint32_t stored_len,   // length of input block
1358    int last)                   // one if this is the last block for a file
1359{
1360    send_bits((STORED_BLOCK<<1)+last, 3);       // send block type
1361    copy_block(buf, (unsigned)stored_len, 1);   // with header
1362}
1363
1364void
1365deflate_stream::
1366tr_tally_dist(std::uint16_t dist, std::uint8_t len, bool& flush)
1367{
1368    d_buf_[last_lit_] = dist;
1369    l_buf_[last_lit_++] = len;
1370    dist--;
1371    dyn_ltree_[lut_.length_code[len]+literals+1].fc++;
1372    dyn_dtree_[d_code(dist)].fc++;
1373    flush = (last_lit_ == lit_bufsize_-1);
1374}
1375
1376void
1377deflate_stream::
1378tr_tally_lit(std::uint8_t c, bool& flush)
1379{
1380    d_buf_[last_lit_] = 0;
1381    l_buf_[last_lit_++] = c;
1382    dyn_ltree_[c].fc++;
1383    flush = (last_lit_ == lit_bufsize_-1);
1384}
1385
1386//------------------------------------------------------------------------------
1387
1388/*  Determine the best encoding for the current block: dynamic trees,
1389    static trees or store, and output the encoded block to the zip file.
1390*/
1391void
1392deflate_stream::
1393tr_flush_block(
1394    z_params& zs,
1395    char *buf,                  // input block, or NULL if too old
1396    std::uint32_t stored_len,   // length of input block
1397    int last)                   // one if this is the last block for a file
1398{
1399    std::uint32_t opt_lenb;
1400    std::uint32_t static_lenb;  // opt_len and static_len in bytes
1401    int max_blindex = 0;        // index of last bit length code of non zero freq
1402
1403    // Build the Huffman trees unless a stored block is forced
1404    if(level_ > 0)
1405    {
1406        // Check if the file is binary or text
1407        if(zs.data_type == unknown)
1408            zs.data_type = detect_data_type();
1409
1410        // Construct the literal and distance trees
1411        build_tree((tree_desc *)(&(l_desc_)));
1412
1413        build_tree((tree_desc *)(&(d_desc_)));
1414        /* At this point, opt_len and static_len are the total bit lengths of
1415         * the compressed block data, excluding the tree representations.
1416         */
1417
1418        /* Build the bit length tree for the above two trees, and get the index
1419         * in bl_order of the last bit length code to send.
1420         */
1421        max_blindex = build_bl_tree();
1422
1423        /* Determine the best encoding. Compute the block lengths in bytes. */
1424        opt_lenb = (opt_len_+3+7)>>3;
1425        static_lenb = (static_len_+3+7)>>3;
1426
1427        if(static_lenb <= opt_lenb)
1428            opt_lenb = static_lenb;
1429    }
1430    else
1431    {
1432        // VFALCO This assertion fails even in the original ZLib,
1433        //        happens with strategy == Z_HUFFMAN_ONLY, see:
1434        //        https://github.com/madler/zlib/issues/172
1435
1436    #if 0
1437        BOOST_ASSERT(buf);
1438    #endif
1439        opt_lenb = static_lenb = stored_len + 5; // force a stored block
1440    }
1441
1442#ifdef FORCE_STORED
1443    if(buf != (char*)0) { /* force stored block */
1444#else
1445    if(stored_len+4 <= opt_lenb && buf != (char*)0) {
1446                       /* 4: two words for the lengths */
1447#endif
1448        /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
1449         * Otherwise we can't have processed more than WSIZE input bytes since
1450         * the last block flush, because compression would have been
1451         * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
1452         * transform a block into a stored block.
1453         */
1454        tr_stored_block(buf, stored_len, last);
1455
1456#ifdef FORCE_STATIC
1457    }
1458    else if(static_lenb >= 0)
1459    {
1460        // force static trees
1461#else
1462    }
1463    else if(strategy_ == Strategy::fixed || static_lenb == opt_lenb)
1464    {
1465#endif
1466        send_bits((STATIC_TREES<<1)+last, 3);
1467        compress_block(lut_.ltree, lut_.dtree);
1468    }
1469    else
1470    {
1471        send_bits((DYN_TREES<<1)+last, 3);
1472        send_all_trees(l_desc_.max_code+1, d_desc_.max_code+1,
1473                       max_blindex+1);
1474        compress_block((const ct_data *)dyn_ltree_,
1475                       (const ct_data *)dyn_dtree_);
1476    }
1477    /* The above check is made mod 2^32, for files larger than 512 MB
1478     * and std::size_t implemented on 32 bits.
1479     */
1480    init_block();
1481
1482    if(last)
1483        bi_windup();
1484}
1485
1486void
1487deflate_stream::
1488fill_window(z_params& zs)
1489{
1490    unsigned n, m;
1491    unsigned more;    // Amount of free space at the end of the window.
1492    std::uint16_t *p;
1493    uInt wsize = w_size_;
1494
1495    do
1496    {
1497        more = (unsigned)(window_size_ -
1498            (std::uint32_t)lookahead_ -(std::uint32_t)strstart_);
1499
1500        // VFALCO We don't support systems below 32-bit
1501    #if 0
1502        // Deal with !@#$% 64K limit:
1503        if(sizeof(int) <= 2)
1504        {
1505            if(more == 0 && strstart_ == 0 && lookahead_ == 0)
1506            {
1507                more = wsize;
1508            }
1509            else if(more == (unsigned)(-1))
1510            {
1511                /* Very unlikely, but possible on 16 bit machine if
1512                 * strstart == 0 && lookahead == 1 (input done a byte at time)
1513                 */
1514                more--;
1515            }
1516        }
1517    #endif
1518
1519        /*  If the window is almost full and there is insufficient lookahead,
1520            move the upper half to the lower one to make room in the upper half.
1521        */
1522        if(strstart_ >= wsize+max_dist())
1523        {
1524            std::memcpy(window_, window_+wsize, (unsigned)wsize);
1525            match_start_ -= wsize;
1526            strstart_    -= wsize; // we now have strstart >= max_dist
1527            block_start_ -= (long) wsize;
1528
1529            /* Slide the hash table (could be avoided with 32 bit values
1530               at the expense of memory usage). We slide even when level == 0
1531               to keep the hash table consistent if we switch back to level > 0
1532               later. (Using level 0 permanently is not an optimal usage of
1533               zlib, so we don't care about this pathological case.)
1534            */
1535            n = hash_size_;
1536            p = &head_[n];
1537            do
1538            {
1539                m = *--p;
1540                *p = (std::uint16_t)(m >= wsize ? m-wsize : 0);
1541            }
1542            while(--n);
1543
1544            n = wsize;
1545            p = &prev_[n];
1546            do
1547            {
1548                m = *--p;
1549                *p = (std::uint16_t)(m >= wsize ? m-wsize : 0);
1550                /*  If n is not on any hash chain, prev[n] is garbage but
1551                    its value will never be used.
1552                */
1553            }
1554            while(--n);
1555            more += wsize;
1556        }
1557        if(zs.avail_in == 0)
1558            break;
1559
1560        /*  If there was no sliding:
1561               strstart <= WSIZE+max_dist-1 && lookahead <= kMinLookahead - 1 &&
1562               more == window_size - lookahead - strstart
1563            => more >= window_size - (kMinLookahead-1 + WSIZE + max_dist-1)
1564            => more >= window_size - 2*WSIZE + 2
1565            In the BIG_MEM or MMAP case (not yet supported),
1566              window_size == input_size + kMinLookahead  &&
1567              strstart + lookahead_ <= input_size => more >= kMinLookahead.
1568            Otherwise, window_size == 2*WSIZE so more >= 2.
1569            If there was sliding, more >= WSIZE. So in all cases, more >= 2.
1570        */
1571        n = read_buf(zs, window_ + strstart_ + lookahead_, more);
1572        lookahead_ += n;
1573
1574        // Initialize the hash value now that we have some input:
1575        if(lookahead_ + insert_ >= minMatch)
1576        {
1577            uInt str = strstart_ - insert_;
1578            ins_h_ = window_[str];
1579            update_hash(ins_h_, window_[str + 1]);
1580            while(insert_)
1581            {
1582                update_hash(ins_h_, window_[str + minMatch-1]);
1583                prev_[str & w_mask_] = head_[ins_h_];
1584                head_[ins_h_] = (std::uint16_t)str;
1585                str++;
1586                insert_--;
1587                if(lookahead_ + insert_ < minMatch)
1588                    break;
1589            }
1590        }
1591        /*  If the whole input has less than minMatch bytes, ins_h is garbage,
1592            but this is not important since only literal bytes will be emitted.
1593        */
1594    }
1595    while(lookahead_ < kMinLookahead && zs.avail_in != 0);
1596
1597    /*  If the kWinInit bytes after the end of the current data have never been
1598        written, then zero those bytes in order to avoid memory check reports of
1599        the use of uninitialized (or uninitialised as Julian writes) bytes by
1600        the longest match routines.  Update the high water mark for the next
1601        time through here.  kWinInit is set to maxMatch since the longest match
1602        routines allow scanning to strstart + maxMatch, ignoring lookahead.
1603    */
1604    if(high_water_ < window_size_)
1605    {
1606        std::uint32_t curr = strstart_ + (std::uint32_t)(lookahead_);
1607        std::uint32_t winit;
1608
1609        if(high_water_ < curr)
1610        {
1611            /*  Previous high water mark below current data -- zero kWinInit
1612                bytes or up to end of window, whichever is less.
1613            */
1614            winit = window_size_ - curr;
1615            if(winit > kWinInit)
1616                winit = kWinInit;
1617            std::memset(window_ + curr, 0, (unsigned)winit);
1618            high_water_ = curr + winit;
1619        }
1620        else if(high_water_ < (std::uint32_t)curr + kWinInit)
1621        {
1622            /*  High water mark at or above current data, but below current data
1623                plus kWinInit -- zero out to current data plus kWinInit, or up
1624                to end of window, whichever is less.
1625            */
1626            winit = (std::uint32_t)curr + kWinInit - high_water_;
1627            if(winit > window_size_ - high_water_)
1628                winit = window_size_ - high_water_;
1629            std::memset(window_ + high_water_, 0, (unsigned)winit);
1630            high_water_ += winit;
1631        }
1632    }
1633}
1634
1635/*  Flush as much pending output as possible. All write() output goes
1636    through this function so some applications may wish to modify it
1637    to avoid allocating a large strm->next_out buffer and copying into it.
1638    (See also read_buf()).
1639*/
1640void
1641deflate_stream::
1642flush_pending(z_params& zs)
1643{
1644    tr_flush_bits();
1645    auto len = clamp(pending_, zs.avail_out);
1646    if(len == 0)
1647        return;
1648
1649    std::memcpy(zs.next_out, pending_out_, len);
1650    zs.next_out =
1651        static_cast<std::uint8_t*>(zs.next_out) + len;
1652    pending_out_  += len;
1653    zs.total_out += len;
1654    zs.avail_out  -= len;
1655    pending_ -= len;
1656    if(pending_ == 0)
1657        pending_out_ = pending_buf_;
1658}
1659
1660/*  Flush the current block, with given end-of-file flag.
1661    IN assertion: strstart is set to the end of the current match.
1662*/
1663void
1664deflate_stream::
1665flush_block(z_params& zs, bool last)
1666{
1667    tr_flush_block(zs,
1668        (block_start_ >= 0L ?
1669            (char *)&window_[(unsigned)block_start_] :
1670            (char *)0),
1671        (std::uint32_t)((long)strstart_ - block_start_),
1672        last);
1673   block_start_ = strstart_;
1674   flush_pending(zs);
1675}
1676
1677/*  Read a new buffer from the current input stream, update the adler32
1678    and total number of bytes read.  All write() input goes through
1679    this function so some applications may wish to modify it to avoid
1680    allocating a large strm->next_in buffer and copying from it.
1681    (See also flush_pending()).
1682*/
1683int
1684deflate_stream::
1685read_buf(z_params& zs, Byte *buf, unsigned size)
1686{
1687    auto len = clamp(zs.avail_in, size);
1688    if(len == 0)
1689        return 0;
1690
1691    zs.avail_in  -= len;
1692
1693    std::memcpy(buf, zs.next_in, len);
1694    zs.next_in = static_cast<
1695        std::uint8_t const*>(zs.next_in) + len;
1696    zs.total_in += len;
1697    return (int)len;
1698}
1699
1700/*  Set match_start to the longest match starting at the given string and
1701    return its length. Matches shorter or equal to prev_length are discarded,
1702    in which case the result is equal to prev_length and match_start is
1703    garbage.
1704    IN assertions: cur_match is the head of the hash chain for the current
1705        string (strstart) and its distance is <= max_dist, and prev_length >= 1
1706    OUT assertion: the match length is not greater than s->lookahead_.
1707
1708    For 80x86 and 680x0, an optimized version will be provided in match.asm or
1709    match.S. The code will be functionally equivalent.
1710*/
1711uInt
1712deflate_stream::
1713longest_match(IPos cur_match)
1714{
1715    unsigned chain_length = max_chain_length_;/* max hash chain length */
1716    Byte *scan = window_ + strstart_; /* current string */
1717    Byte *match;                       /* matched string */
1718    int len;                           /* length of current match */
1719    int best_len = prev_length_;              /* best match length so far */
1720    int nice_match = nice_match_;             /* stop if match long enough */
1721    IPos limit = strstart_ > (IPos)max_dist() ?
1722        strstart_ - (IPos)max_dist() : 0;
1723    /* Stop when cur_match becomes <= limit. To simplify the code,
1724     * we prevent matches with the string of window index 0.
1725     */
1726    std::uint16_t *prev = prev_;
1727    uInt wmask = w_mask_;
1728
1729    Byte *strend = window_ + strstart_ + maxMatch;
1730    Byte scan_end1  = scan[best_len-1];
1731    Byte scan_end   = scan[best_len];
1732
1733    /* The code is optimized for HASH_BITS >= 8 and maxMatch-2 multiple of 16.
1734     * It is easy to get rid of this optimization if necessary.
1735     */
1736    BOOST_ASSERT(hash_bits_ >= 8 && maxMatch == 258);
1737
1738    /* Do not waste too much time if we already have a good match: */
1739    if(prev_length_ >= good_match_) {
1740        chain_length >>= 2;
1741    }
1742    /* Do not look for matches beyond the end of the input. This is necessary
1743     * to make deflate deterministic.
1744     */
1745    if((uInt)nice_match > lookahead_)
1746        nice_match = lookahead_;
1747
1748    BOOST_ASSERT((std::uint32_t)strstart_ <= window_size_-kMinLookahead);
1749
1750    do {
1751        BOOST_ASSERT(cur_match < strstart_);
1752        match = window_ + cur_match;
1753
1754        /* Skip to next match if the match length cannot increase
1755         * or if the match length is less than 2.  Note that the checks below
1756         * for insufficient lookahead only occur occasionally for performance
1757         * reasons.  Therefore uninitialized memory will be accessed, and
1758         * conditional jumps will be made that depend on those values.
1759         * However the length of the match is limited to the lookahead, so
1760         * the output of deflate is not affected by the uninitialized values.
1761         */
1762        if(     match[best_len]   != scan_end  ||
1763                match[best_len-1] != scan_end1 ||
1764                *match            != *scan     ||
1765                *++match          != scan[1])
1766            continue;
1767
1768        /* The check at best_len-1 can be removed because it will be made
1769         * again later. (This heuristic is not always a win.)
1770         * It is not necessary to compare scan[2] and match[2] since they
1771         * are always equal when the other bytes match, given that
1772         * the hash keys are equal and that HASH_BITS >= 8.
1773         */
1774        scan += 2, match++;
1775        BOOST_ASSERT(*scan == *match);
1776
1777        /* We check for insufficient lookahead only every 8th comparison;
1778         * the 256th check will be made at strstart+258.
1779         */
1780        do
1781        {
1782        }
1783        while(  *++scan == *++match && *++scan == *++match &&
1784                *++scan == *++match && *++scan == *++match &&
1785                *++scan == *++match && *++scan == *++match &&
1786                *++scan == *++match && *++scan == *++match &&
1787                scan < strend);
1788
1789        BOOST_ASSERT(scan <= window_+(unsigned)(window_size_-1));
1790
1791        len = maxMatch - (int)(strend - scan);
1792        scan = strend - maxMatch;
1793
1794        if(len > best_len) {
1795            match_start_ = cur_match;
1796            best_len = len;
1797            if(len >= nice_match) break;
1798            scan_end1  = scan[best_len-1];
1799            scan_end   = scan[best_len];
1800        }
1801    }
1802    while((cur_match = prev[cur_match & wmask]) > limit
1803        && --chain_length != 0);
1804
1805    if((uInt)best_len <= lookahead_)
1806        return (uInt)best_len;
1807    return lookahead_;
1808}
1809
1810//------------------------------------------------------------------------------
1811
1812/*  Copy without compression as much as possible from the input stream, return
1813    the current block state.
1814    This function does not insert new strings in the dictionary since
1815    uncompressible data is probably not useful. This function is used
1816    only for the level=0 compression option.
1817    NOTE: this function should be optimized to avoid extra copying from
1818    window to pending_buf.
1819*/
1820auto
1821deflate_stream::
1822f_stored(z_params& zs, Flush flush) ->
1823    block_state
1824{
1825    /* Stored blocks are limited to 0xffff bytes, pending_buf is limited
1826     * to pending_buf_size, and each stored block has a 5 byte header:
1827     */
1828    std::uint32_t max_block_size = 0xffff;
1829    std::uint32_t max_start;
1830
1831    if(max_block_size > pending_buf_size_ - 5) {
1832        max_block_size = pending_buf_size_ - 5;
1833    }
1834
1835    /* Copy as much as possible from input to output: */
1836    for(;;) {
1837        /* Fill the window as much as possible: */
1838        if(lookahead_ <= 1) {
1839
1840            BOOST_ASSERT(strstart_ < w_size_+max_dist() ||
1841                   block_start_ >= (long)w_size_);
1842
1843            fill_window(zs);
1844            if(lookahead_ == 0 && flush == Flush::none)
1845                return need_more;
1846
1847            if(lookahead_ == 0) break; /* flush the current block */
1848        }
1849        BOOST_ASSERT(block_start_ >= 0L);
1850
1851        strstart_ += lookahead_;
1852        lookahead_ = 0;
1853
1854        /* Emit a stored block if pending_buf will be full: */
1855        max_start = block_start_ + max_block_size;
1856        if(strstart_ == 0 || (std::uint32_t)strstart_ >= max_start) {
1857            /* strstart == 0 is possible when wraparound on 16-bit machine */
1858            lookahead_ = (uInt)(strstart_ - max_start);
1859            strstart_ = (uInt)max_start;
1860            flush_block(zs, false);
1861            if(zs.avail_out == 0)
1862                return need_more;
1863        }
1864        /* Flush if we may have to slide, otherwise block_start may become
1865         * negative and the data will be gone:
1866         */
1867        if(strstart_ - (uInt)block_start_ >= max_dist()) {
1868            flush_block(zs, false);
1869            if(zs.avail_out == 0)
1870                return need_more;
1871        }
1872    }
1873    insert_ = 0;
1874    if(flush == Flush::finish)
1875    {
1876        flush_block(zs, true);
1877        if(zs.avail_out == 0)
1878            return finish_started;
1879        return finish_done;
1880    }
1881    if((long)strstart_ > block_start_)
1882    {
1883        flush_block(zs, false);
1884        if(zs.avail_out == 0)
1885            return need_more;
1886    }
1887    return block_done;
1888}
1889
1890/*  Compress as much as possible from the input stream, return the current
1891    block state.
1892    This function does not perform lazy evaluation of matches and inserts
1893    new strings in the dictionary only for unmatched strings or for short
1894    matches. It is used only for the fast compression options.
1895*/
1896auto
1897deflate_stream::
1898f_fast(z_params& zs, Flush flush) ->
1899    block_state
1900{
1901    IPos hash_head;       /* head of the hash chain */
1902    bool bflush;           /* set if current block must be flushed */
1903
1904    for(;;)
1905    {
1906        /* Make sure that we always have enough lookahead, except
1907         * at the end of the input file. We need maxMatch bytes
1908         * for the next match, plus minMatch bytes to insert the
1909         * string following the next match.
1910         */
1911        if(lookahead_ < kMinLookahead)
1912        {
1913            fill_window(zs);
1914            if(lookahead_ < kMinLookahead && flush == Flush::none)
1915                return need_more;
1916            if(lookahead_ == 0)
1917                break; /* flush the current block */
1918        }
1919
1920        /* Insert the string window[strstart .. strstart+2] in the
1921         * dictionary, and set hash_head to the head of the hash chain:
1922         */
1923        hash_head = 0;
1924        if(lookahead_ >= minMatch) {
1925            insert_string(hash_head);
1926        }
1927
1928        /* Find the longest match, discarding those <= prev_length.
1929         * At this point we have always match_length < minMatch
1930         */
1931        if(hash_head != 0 && strstart_ - hash_head <= max_dist()) {
1932            /* To simplify the code, we prevent matches with the string
1933             * of window index 0 (in particular we have to avoid a match
1934             * of the string with itself at the start of the input file).
1935             */
1936            match_length_ = longest_match (hash_head);
1937            /* longest_match() sets match_start */
1938        }
1939        if(match_length_ >= minMatch)
1940        {
1941            tr_tally_dist(static_cast<std::uint16_t>(strstart_ - match_start_),
1942                static_cast<std::uint8_t>(match_length_ - minMatch), bflush);
1943
1944            lookahead_ -= match_length_;
1945
1946            /* Insert new strings in the hash table only if the match length
1947             * is not too large. This saves time but degrades compression.
1948             */
1949            if(match_length_ <= max_lazy_match_ &&
1950                lookahead_ >= minMatch) {
1951                match_length_--; /* string at strstart already in table */
1952                do
1953                {
1954                    strstart_++;
1955                    insert_string(hash_head);
1956                    /* strstart never exceeds WSIZE-maxMatch, so there are
1957                     * always minMatch bytes ahead.
1958                     */
1959                }
1960                while(--match_length_ != 0);
1961                strstart_++;
1962            }
1963            else
1964            {
1965                strstart_ += match_length_;
1966                match_length_ = 0;
1967                ins_h_ = window_[strstart_];
1968                update_hash(ins_h_, window_[strstart_+1]);
1969                /* If lookahead < minMatch, ins_h is garbage, but it does not
1970                 * matter since it will be recomputed at next deflate call.
1971                 */
1972            }
1973        }
1974        else
1975        {
1976            /* No match, output a literal byte */
1977            tr_tally_lit(window_[strstart_], bflush);
1978            lookahead_--;
1979            strstart_++;
1980        }
1981        if(bflush)
1982        {
1983            flush_block(zs, false);
1984            if(zs.avail_out == 0)
1985                return need_more;
1986        }
1987    }
1988    insert_ = strstart_ < minMatch-1 ? strstart_ : minMatch-1;
1989    if(flush == Flush::finish)
1990    {
1991        flush_block(zs, true);
1992        if(zs.avail_out == 0)
1993            return finish_started;
1994        return finish_done;
1995    }
1996    if(last_lit_)
1997    {
1998        flush_block(zs, false);
1999        if(zs.avail_out == 0)
2000            return need_more;
2001    }
2002    return block_done;
2003}
2004
2005/*  Same as above, but achieves better compression. We use a lazy
2006    evaluation for matches: a match is finally adopted only if there is
2007    no better match at the next window position.
2008*/
2009auto
2010deflate_stream::
2011f_slow(z_params& zs, Flush flush) ->
2012    block_state
2013{
2014    IPos hash_head;          /* head of hash chain */
2015    bool bflush;              /* set if current block must be flushed */
2016
2017    /* Process the input block. */
2018    for(;;)
2019    {
2020        /* Make sure that we always have enough lookahead, except
2021         * at the end of the input file. We need maxMatch bytes
2022         * for the next match, plus minMatch bytes to insert the
2023         * string following the next match.
2024         */
2025        if(lookahead_ < kMinLookahead)
2026        {
2027            fill_window(zs);
2028            if(lookahead_ < kMinLookahead && flush == Flush::none)
2029                return need_more;
2030            if(lookahead_ == 0)
2031                break; /* flush the current block */
2032        }
2033
2034        /* Insert the string window[strstart .. strstart+2] in the
2035         * dictionary, and set hash_head to the head of the hash chain:
2036         */
2037        hash_head = 0;
2038        if(lookahead_ >= minMatch)
2039            insert_string(hash_head);
2040
2041        /* Find the longest match, discarding those <= prev_length.
2042         */
2043        prev_length_ = match_length_, prev_match_ = match_start_;
2044        match_length_ = minMatch-1;
2045
2046        if(hash_head != 0 && prev_length_ < max_lazy_match_ &&
2047            strstart_ - hash_head <= max_dist())
2048        {
2049            /* To simplify the code, we prevent matches with the string
2050             * of window index 0 (in particular we have to avoid a match
2051             * of the string with itself at the start of the input file).
2052             */
2053            match_length_ = longest_match(hash_head);
2054            /* longest_match() sets match_start */
2055
2056            if(match_length_ <= 5 && (strategy_ == Strategy::filtered
2057                || (match_length_ == minMatch &&
2058                    strstart_ - match_start_ > kTooFar)
2059                ))
2060            {
2061                /* If prev_match is also minMatch, match_start is garbage
2062                 * but we will ignore the current match anyway.
2063                 */
2064                match_length_ = minMatch-1;
2065            }
2066        }
2067        /* If there was a match at the previous step and the current
2068         * match is not better, output the previous match:
2069         */
2070        if(prev_length_ >= minMatch && match_length_ <= prev_length_)
2071        {
2072            /* Do not insert strings in hash table beyond this. */
2073            uInt max_insert = strstart_ + lookahead_ - minMatch;
2074
2075            tr_tally_dist(
2076                static_cast<std::uint16_t>(strstart_ -1 - prev_match_),
2077                static_cast<std::uint8_t>(prev_length_ - minMatch), bflush);
2078
2079            /* Insert in hash table all strings up to the end of the match.
2080             * strstart-1 and strstart are already inserted. If there is not
2081             * enough lookahead, the last two strings are not inserted in
2082             * the hash table.
2083             */
2084            lookahead_ -= prev_length_-1;
2085            prev_length_ -= 2;
2086            do {
2087                if(++strstart_ <= max_insert)
2088                    insert_string(hash_head);
2089            }
2090            while(--prev_length_ != 0);
2091            match_available_ = 0;
2092            match_length_ = minMatch-1;
2093            strstart_++;
2094
2095            if(bflush)
2096            {
2097                flush_block(zs, false);
2098                if(zs.avail_out == 0)
2099                    return need_more;
2100            }
2101
2102        }
2103        else if(match_available_)
2104        {
2105            /* If there was no match at the previous position, output a
2106             * single literal. If there was a match but the current match
2107             * is longer, truncate the previous match to a single literal.
2108             */
2109            tr_tally_lit(window_[strstart_-1], bflush);
2110            if(bflush)
2111                flush_block(zs, false);
2112            strstart_++;
2113            lookahead_--;
2114            if(zs.avail_out == 0)
2115                return need_more;
2116        }
2117        else
2118        {
2119            /* There is no previous match to compare with, wait for
2120             * the next step to decide.
2121             */
2122            match_available_ = 1;
2123            strstart_++;
2124            lookahead_--;
2125        }
2126    }
2127    BOOST_ASSERT(flush != Flush::none);
2128    if(match_available_)
2129    {
2130        tr_tally_lit(window_[strstart_-1], bflush);
2131        match_available_ = 0;
2132    }
2133    insert_ = strstart_ < minMatch-1 ? strstart_ : minMatch-1;
2134    if(flush == Flush::finish)
2135    {
2136        flush_block(zs, true);
2137        if(zs.avail_out == 0)
2138            return finish_started;
2139        return finish_done;
2140    }
2141    if(last_lit_)
2142    {
2143        flush_block(zs, false);
2144        if(zs.avail_out == 0)
2145            return need_more;
2146    }
2147    return block_done;
2148}
2149
2150/*  For Strategy::rle, simply look for runs of bytes, generate matches only of distance
2151    one.  Do not maintain a hash table.  (It will be regenerated if this run of
2152    deflate switches away from Strategy::rle.)
2153*/
2154auto
2155deflate_stream::
2156f_rle(z_params& zs, Flush flush) ->
2157    block_state
2158{
2159    bool bflush;            // set if current block must be flushed
2160    uInt prev;              // byte at distance one to match
2161    Byte *scan, *strend;    // scan goes up to strend for length of run
2162
2163    for(;;)
2164    {
2165        /* Make sure that we always have enough lookahead, except
2166         * at the end of the input file. We need maxMatch bytes
2167         * for the longest run, plus one for the unrolled loop.
2168         */
2169        if(lookahead_ <= maxMatch) {
2170            fill_window(zs);
2171            if(lookahead_ <= maxMatch && flush == Flush::none) {
2172                return need_more;
2173            }
2174            if(lookahead_ == 0) break; /* flush the current block */
2175        }
2176
2177        /* See how many times the previous byte repeats */
2178        match_length_ = 0;
2179        if(lookahead_ >= minMatch && strstart_ > 0) {
2180            scan = window_ + strstart_ - 1;
2181            prev = *scan;
2182            if(prev == *++scan && prev == *++scan && prev == *++scan) {
2183                strend = window_ + strstart_ + maxMatch;
2184                do {
2185                } while(prev == *++scan && prev == *++scan &&
2186                         prev == *++scan && prev == *++scan &&
2187                         prev == *++scan && prev == *++scan &&
2188                         prev == *++scan && prev == *++scan &&
2189                         scan < strend);
2190                match_length_ = maxMatch - (int)(strend - scan);
2191                if(match_length_ > lookahead_)
2192                    match_length_ = lookahead_;
2193            }
2194            BOOST_ASSERT(scan <= window_+(uInt)(window_size_-1));
2195        }
2196
2197        /* Emit match if have run of minMatch or longer, else emit literal */
2198        if(match_length_ >= minMatch) {
2199            tr_tally_dist(std::uint16_t{1},
2200                static_cast<std::uint8_t>(match_length_ - minMatch),
2201                bflush);
2202
2203            lookahead_ -= match_length_;
2204            strstart_ += match_length_;
2205            match_length_ = 0;
2206        } else {
2207            /* No match, output a literal byte */
2208            tr_tally_lit(window_[strstart_], bflush);
2209            lookahead_--;
2210            strstart_++;
2211        }
2212        if(bflush)
2213        {
2214            flush_block(zs, false);
2215            if(zs.avail_out == 0)
2216                return need_more;
2217        }
2218    }
2219    insert_ = 0;
2220    if(flush == Flush::finish)
2221    {
2222        flush_block(zs, true);
2223        if(zs.avail_out == 0)
2224            return finish_started;
2225        return finish_done;
2226    }
2227    if(last_lit_)
2228    {
2229        flush_block(zs, false);
2230        if(zs.avail_out == 0)
2231            return need_more;
2232    }
2233    return block_done;
2234}
2235
2236/* ===========================================================================
2237 * For Strategy::huffman, do not look for matches.  Do not maintain a hash table.
2238 * (It will be regenerated if this run of deflate switches away from Huffman.)
2239 */
2240auto
2241deflate_stream::
2242f_huff(z_params& zs, Flush flush) ->
2243    block_state
2244{
2245    bool bflush;             // set if current block must be flushed
2246
2247    for(;;)
2248    {
2249        // Make sure that we have a literal to write.
2250        if(lookahead_ == 0)
2251        {
2252            fill_window(zs);
2253            if(lookahead_ == 0)
2254            {
2255                if(flush == Flush::none)
2256                    return need_more;
2257                break;      // flush the current block
2258            }
2259        }
2260
2261        // Output a literal byte
2262        match_length_ = 0;
2263        tr_tally_lit(window_[strstart_], bflush);
2264        lookahead_--;
2265        strstart_++;
2266        if(bflush)
2267        {
2268            flush_block(zs, false);
2269            if(zs.avail_out == 0)
2270                return need_more;
2271        }
2272    }
2273    insert_ = 0;
2274    if(flush == Flush::finish)
2275    {
2276        flush_block(zs, true);
2277        if(zs.avail_out == 0)
2278            return finish_started;
2279        return finish_done;
2280    }
2281    if(last_lit_)
2282    {
2283        flush_block(zs, false);
2284        if(zs.avail_out == 0)
2285            return need_more;
2286    }
2287    return block_done;
2288}
2289
2290} // detail
2291} // zlib
2292} // beast
2293} // boost
2294
2295#endif
2296