1 /* -*- mode: C; c-basic-offset: 4; indent-tabs-mode: nil -*- */
2 // vim: expandtab:ts=8:sw=4:softtabstop=4:
3 ///////////////////////////////////////////////////////////////////////////////
4 //
5 /// \file lzma_encoder.c
6 /// \brief LZMA encoder
7 ///
8 // Authors: Igor Pavlov
9 // Lasse Collin
10 //
11 // This file has been put into the public domain.
12 // You can do whatever you want with this file.
13 //
14 ///////////////////////////////////////////////////////////////////////////////
15
16 #include "lzma2_encoder.h"
17 #include "lzma_encoder_private.h"
18 #include "fastpos.h"
19
20
21 /////////////
22 // Literal //
23 /////////////
24
25 static inline void
literal_matched(lzma_range_encoder * rc,probability * subcoder,uint32_t match_byte,uint32_t symbol)26 literal_matched(lzma_range_encoder *rc, probability *subcoder,
27 uint32_t match_byte, uint32_t symbol)
28 {
29 uint32_t offset = 0x100;
30 symbol += UINT32_C(1) << 8;
31
32 do {
33 match_byte <<= 1;
34 const uint32_t match_bit = match_byte & offset;
35 const uint32_t subcoder_index
36 = offset + match_bit + (symbol >> 8);
37 const uint32_t bit = (symbol >> 7) & 1;
38 rc_bit(rc, &subcoder[subcoder_index], bit);
39
40 symbol <<= 1;
41 offset &= ~(match_byte ^ symbol);
42
43 } while (symbol < (UINT32_C(1) << 16));
44 }
45
46
47 static inline void
literal(lzma_coder * coder,lzma_mf * mf,uint32_t position)48 literal(lzma_coder *coder, lzma_mf *mf, uint32_t position)
49 {
50 // Locate the literal byte to be encoded and the subcoder.
51 const uint8_t cur_byte = mf->buffer[
52 mf->read_pos - mf->read_ahead];
53 probability *subcoder = literal_subcoder(coder->literal,
54 coder->literal_context_bits, coder->literal_pos_mask,
55 position, mf->buffer[mf->read_pos - mf->read_ahead - 1]);
56
57 if (is_literal_state(coder->state)) {
58 // Previous LZMA-symbol was a literal. Encode a normal
59 // literal without a match byte.
60 rc_bittree(&coder->rc, subcoder, 8, cur_byte);
61 } else {
62 // Previous LZMA-symbol was a match. Use the last byte of
63 // the match as a "match byte". That is, compare the bits
64 // of the current literal and the match byte.
65 const uint8_t match_byte = mf->buffer[
66 mf->read_pos - coder->reps[0] - 1
67 - mf->read_ahead];
68 literal_matched(&coder->rc, subcoder, match_byte, cur_byte);
69 }
70
71 update_literal(coder->state);
72 }
73
74
75 //////////////////
76 // Match length //
77 //////////////////
78
79 static void
length_update_prices(lzma_length_encoder * lc,const uint32_t pos_state)80 length_update_prices(lzma_length_encoder *lc, const uint32_t pos_state)
81 {
82 const uint32_t table_size = lc->table_size;
83 lc->counters[pos_state] = table_size;
84
85 const uint32_t a0 = rc_bit_0_price(lc->choice);
86 const uint32_t a1 = rc_bit_1_price(lc->choice);
87 const uint32_t b0 = a1 + rc_bit_0_price(lc->choice2);
88 const uint32_t b1 = a1 + rc_bit_1_price(lc->choice2);
89 uint32_t *const prices = lc->prices[pos_state];
90
91 uint32_t i;
92 for (i = 0; i < table_size && i < LEN_LOW_SYMBOLS; ++i)
93 prices[i] = a0 + rc_bittree_price(lc->low[pos_state],
94 LEN_LOW_BITS, i);
95
96 for (; i < table_size && i < LEN_LOW_SYMBOLS + LEN_MID_SYMBOLS; ++i)
97 prices[i] = b0 + rc_bittree_price(lc->mid[pos_state],
98 LEN_MID_BITS, i - LEN_LOW_SYMBOLS);
99
100 for (; i < table_size; ++i)
101 prices[i] = b1 + rc_bittree_price(lc->high, LEN_HIGH_BITS,
102 i - LEN_LOW_SYMBOLS - LEN_MID_SYMBOLS);
103
104 return;
105 }
106
107
108 static inline void
length(lzma_range_encoder * rc,lzma_length_encoder * lc,const uint32_t pos_state,uint32_t len,const bool fast_mode)109 length(lzma_range_encoder *rc, lzma_length_encoder *lc,
110 const uint32_t pos_state, uint32_t len, const bool fast_mode)
111 {
112 assert(len <= MATCH_LEN_MAX);
113 len -= MATCH_LEN_MIN;
114
115 if (len < LEN_LOW_SYMBOLS) {
116 rc_bit(rc, &lc->choice, 0);
117 rc_bittree(rc, lc->low[pos_state], LEN_LOW_BITS, len);
118 } else {
119 rc_bit(rc, &lc->choice, 1);
120 len -= LEN_LOW_SYMBOLS;
121
122 if (len < LEN_MID_SYMBOLS) {
123 rc_bit(rc, &lc->choice2, 0);
124 rc_bittree(rc, lc->mid[pos_state], LEN_MID_BITS, len);
125 } else {
126 rc_bit(rc, &lc->choice2, 1);
127 len -= LEN_MID_SYMBOLS;
128 rc_bittree(rc, lc->high, LEN_HIGH_BITS, len);
129 }
130 }
131
132 // Only getoptimum uses the prices so don't update the table when
133 // in fast mode.
134 if (!fast_mode)
135 if (--lc->counters[pos_state] == 0)
136 length_update_prices(lc, pos_state);
137 }
138
139
140 ///////////
141 // Match //
142 ///////////
143
144 static inline void
match(lzma_coder * coder,const uint32_t pos_state,const uint32_t distance,const uint32_t len)145 match(lzma_coder *coder, const uint32_t pos_state,
146 const uint32_t distance, const uint32_t len)
147 {
148 update_match(coder->state);
149
150 length(&coder->rc, &coder->match_len_encoder, pos_state, len,
151 coder->fast_mode);
152
153 const uint32_t pos_slot = get_pos_slot(distance);
154 const uint32_t len_to_pos_state = get_len_to_pos_state(len);
155 rc_bittree(&coder->rc, coder->pos_slot[len_to_pos_state],
156 POS_SLOT_BITS, pos_slot);
157
158 if (pos_slot >= START_POS_MODEL_INDEX) {
159 const uint32_t footer_bits = (pos_slot >> 1) - 1;
160 const uint32_t base = (2 | (pos_slot & 1)) << footer_bits;
161 const uint32_t pos_reduced = distance - base;
162
163 if (pos_slot < END_POS_MODEL_INDEX) {
164 // Careful here: base - pos_slot - 1 can be -1, but
165 // rc_bittree_reverse starts at probs[1], not probs[0].
166 rc_bittree_reverse(&coder->rc,
167 coder->pos_special + base - pos_slot - 1,
168 footer_bits, pos_reduced);
169 } else {
170 rc_direct(&coder->rc, pos_reduced >> ALIGN_BITS,
171 footer_bits - ALIGN_BITS);
172 rc_bittree_reverse(
173 &coder->rc, coder->pos_align,
174 ALIGN_BITS, pos_reduced & ALIGN_MASK);
175 ++coder->align_price_count;
176 }
177 }
178
179 coder->reps[3] = coder->reps[2];
180 coder->reps[2] = coder->reps[1];
181 coder->reps[1] = coder->reps[0];
182 coder->reps[0] = distance;
183 ++coder->match_price_count;
184 }
185
186
187 ////////////////////
188 // Repeated match //
189 ////////////////////
190
191 static inline void
rep_match(lzma_coder * coder,const uint32_t pos_state,const uint32_t rep,const uint32_t len)192 rep_match(lzma_coder *coder, const uint32_t pos_state,
193 const uint32_t rep, const uint32_t len)
194 {
195 if (rep == 0) {
196 rc_bit(&coder->rc, &coder->is_rep0[coder->state], 0);
197 rc_bit(&coder->rc,
198 &coder->is_rep0_long[coder->state][pos_state],
199 len != 1);
200 } else {
201 const uint32_t distance = coder->reps[rep];
202 rc_bit(&coder->rc, &coder->is_rep0[coder->state], 1);
203
204 if (rep == 1) {
205 rc_bit(&coder->rc, &coder->is_rep1[coder->state], 0);
206 } else {
207 rc_bit(&coder->rc, &coder->is_rep1[coder->state], 1);
208 rc_bit(&coder->rc, &coder->is_rep2[coder->state],
209 rep - 2);
210
211 if (rep == 3)
212 coder->reps[3] = coder->reps[2];
213
214 coder->reps[2] = coder->reps[1];
215 }
216
217 coder->reps[1] = coder->reps[0];
218 coder->reps[0] = distance;
219 }
220
221 if (len == 1) {
222 update_short_rep(coder->state);
223 } else {
224 length(&coder->rc, &coder->rep_len_encoder, pos_state, len,
225 coder->fast_mode);
226 update_long_rep(coder->state);
227 }
228 }
229
230
231 //////////
232 // Main //
233 //////////
234
235 static void
encode_symbol(lzma_coder * coder,lzma_mf * mf,uint32_t back,uint32_t len,uint32_t position)236 encode_symbol(lzma_coder *coder, lzma_mf *mf,
237 uint32_t back, uint32_t len, uint32_t position)
238 {
239 const uint32_t pos_state = position & coder->pos_mask;
240
241 if (back == UINT32_MAX) {
242 // Literal i.e. eight-bit byte
243 assert(len == 1);
244 rc_bit(&coder->rc,
245 &coder->is_match[coder->state][pos_state], 0);
246 literal(coder, mf, position);
247 } else {
248 // Some type of match
249 rc_bit(&coder->rc,
250 &coder->is_match[coder->state][pos_state], 1);
251
252 if (back < REP_DISTANCES) {
253 // It's a repeated match i.e. the same distance
254 // has been used earlier.
255 rc_bit(&coder->rc, &coder->is_rep[coder->state], 1);
256 rep_match(coder, pos_state, back, len);
257 } else {
258 // Normal match
259 rc_bit(&coder->rc, &coder->is_rep[coder->state], 0);
260 match(coder, pos_state, back - REP_DISTANCES, len);
261 }
262 }
263
264 assert(mf->read_ahead >= len);
265 mf->read_ahead -= len;
266 }
267
268
269 static bool
encode_init(lzma_coder * coder,lzma_mf * mf)270 encode_init(lzma_coder *coder, lzma_mf *mf)
271 {
272 assert(mf_position(mf) == 0);
273
274 if (mf->read_pos == mf->read_limit) {
275 if (mf->action == LZMA_RUN)
276 return false; // We cannot do anything.
277
278 // We are finishing (we cannot get here when flushing).
279 assert(mf->write_pos == mf->read_pos);
280 assert(mf->action == LZMA_FINISH);
281 } else {
282 // Do the actual initialization. The first LZMA symbol must
283 // always be a literal.
284 mf_skip(mf, 1);
285 mf->read_ahead = 0;
286 rc_bit(&coder->rc, &coder->is_match[0][0], 0);
287 rc_bittree(&coder->rc, coder->literal[0], 8, mf->buffer[0]);
288 }
289
290 // Initialization is done (except if empty file).
291 coder->is_initialized = true;
292
293 return true;
294 }
295
296
297 static void
encode_eopm(lzma_coder * coder,uint32_t position)298 encode_eopm(lzma_coder *coder, uint32_t position)
299 {
300 const uint32_t pos_state = position & coder->pos_mask;
301 rc_bit(&coder->rc, &coder->is_match[coder->state][pos_state], 1);
302 rc_bit(&coder->rc, &coder->is_rep[coder->state], 0);
303 match(coder, pos_state, UINT32_MAX, MATCH_LEN_MIN);
304 }
305
306
307 /// Number of bytes that a single encoding loop in lzma_lzma_encode() can
308 /// consume from the dictionary. This limit comes from lzma_lzma_optimum()
309 /// and may need to be updated if that function is significantly modified.
310 #define LOOP_INPUT_MAX (OPTS + 1)
311
312
313 extern lzma_ret
lzma_lzma_encode(lzma_coder * restrict coder,lzma_mf * restrict mf,uint8_t * restrict out,size_t * restrict out_pos,size_t out_size,uint32_t limit)314 lzma_lzma_encode(lzma_coder *restrict coder, lzma_mf *restrict mf,
315 uint8_t *restrict out, size_t *restrict out_pos,
316 size_t out_size, uint32_t limit)
317 {
318 // Initialize the stream if no data has been encoded yet.
319 if (!coder->is_initialized && !encode_init(coder, mf))
320 return LZMA_OK;
321
322 // Get the lowest bits of the uncompressed offset from the LZ layer.
323 uint32_t position = mf_position(mf);
324
325 while (true) {
326 // Encode pending bits, if any. Calling this before encoding
327 // the next symbol is needed only with plain LZMA, since
328 // LZMA2 always provides big enough buffer to flush
329 // everything out from the range encoder. For the same reason,
330 // rc_encode() never returns true when this function is used
331 // as part of LZMA2 encoder.
332 if (rc_encode(&coder->rc, out, out_pos, out_size)) {
333 assert(limit == UINT32_MAX);
334 return LZMA_OK;
335 }
336
337 // With LZMA2 we need to take care that compressed size of
338 // a chunk doesn't get too big.
339 // TODO
340 if (limit != UINT32_MAX
341 && (mf->read_pos - mf->read_ahead >= limit
342 || *out_pos + rc_pending(&coder->rc)
343 >= LZMA2_CHUNK_MAX
344 - LOOP_INPUT_MAX))
345 break;
346
347 // Check that there is some input to process.
348 if (mf->read_pos >= mf->read_limit) {
349 if (mf->action == LZMA_RUN)
350 return LZMA_OK;
351
352 if (mf->read_ahead == 0)
353 break;
354 }
355
356 // Get optimal match (repeat position and length).
357 // Value ranges for pos:
358 // - [0, REP_DISTANCES): repeated match
359 // - [REP_DISTANCES, UINT32_MAX):
360 // match at (pos - REP_DISTANCES)
361 // - UINT32_MAX: not a match but a literal
362 // Value ranges for len:
363 // - [MATCH_LEN_MIN, MATCH_LEN_MAX]
364 uint32_t len;
365 uint32_t back;
366
367 if (coder->fast_mode)
368 lzma_lzma_optimum_fast(coder, mf, &back, &len);
369 else
370 lzma_lzma_optimum_normal(
371 coder, mf, &back, &len, position);
372
373 encode_symbol(coder, mf, back, len, position);
374
375 position += len;
376 }
377
378 if (!coder->is_flushed) {
379 coder->is_flushed = true;
380
381 // We don't support encoding plain LZMA streams without EOPM,
382 // and LZMA2 doesn't use EOPM at LZMA level.
383 if (limit == UINT32_MAX)
384 encode_eopm(coder, position);
385
386 // Flush the remaining bytes from the range encoder.
387 rc_flush(&coder->rc);
388
389 // Copy the remaining bytes to the output buffer. If there
390 // isn't enough output space, we will copy out the remaining
391 // bytes on the next call to this function by using
392 // the rc_encode() call in the encoding loop above.
393 if (rc_encode(&coder->rc, out, out_pos, out_size)) {
394 assert(limit == UINT32_MAX);
395 return LZMA_OK;
396 }
397 }
398
399 // Make it ready for the next LZMA2 chunk.
400 coder->is_flushed = false;
401
402 return LZMA_STREAM_END;
403 }
404
405
406 static lzma_ret
lzma_encode(lzma_coder * restrict coder,lzma_mf * restrict mf,uint8_t * restrict out,size_t * restrict out_pos,size_t out_size)407 lzma_encode(lzma_coder *restrict coder, lzma_mf *restrict mf,
408 uint8_t *restrict out, size_t *restrict out_pos,
409 size_t out_size)
410 {
411 // Plain LZMA has no support for sync-flushing.
412 if (unlikely(mf->action == LZMA_SYNC_FLUSH))
413 return LZMA_OPTIONS_ERROR;
414
415 return lzma_lzma_encode(coder, mf, out, out_pos, out_size, UINT32_MAX);
416 }
417
418
419 ////////////////////
420 // Initialization //
421 ////////////////////
422
423 static bool
is_options_valid(const lzma_options_lzma * options)424 is_options_valid(const lzma_options_lzma *options)
425 {
426 // Validate some of the options. LZ encoder validates nice_len too
427 // but we need a valid value here earlier.
428 return is_lclppb_valid(options)
429 && options->nice_len >= MATCH_LEN_MIN
430 && options->nice_len <= MATCH_LEN_MAX
431 && (options->mode == LZMA_MODE_FAST
432 || options->mode == LZMA_MODE_NORMAL);
433 }
434
435
436 static void
set_lz_options(lzma_lz_options * lz_options,const lzma_options_lzma * options)437 set_lz_options(lzma_lz_options *lz_options, const lzma_options_lzma *options)
438 {
439 // LZ encoder initialization does the validation for these so we
440 // don't need to validate here.
441 lz_options->before_size = OPTS;
442 lz_options->dict_size = options->dict_size;
443 lz_options->after_size = LOOP_INPUT_MAX;
444 lz_options->match_len_max = MATCH_LEN_MAX;
445 lz_options->nice_len = options->nice_len;
446 lz_options->match_finder = options->mf;
447 lz_options->depth = options->depth;
448 lz_options->preset_dict = options->preset_dict;
449 lz_options->preset_dict_size = options->preset_dict_size;
450 return;
451 }
452
453
454 static void
length_encoder_reset(lzma_length_encoder * lencoder,const uint32_t num_pos_states,const bool fast_mode)455 length_encoder_reset(lzma_length_encoder *lencoder,
456 const uint32_t num_pos_states, const bool fast_mode)
457 {
458 bit_reset(lencoder->choice);
459 bit_reset(lencoder->choice2);
460
461 for (size_t pos_state = 0; pos_state < num_pos_states; ++pos_state) {
462 bittree_reset(lencoder->low[pos_state], LEN_LOW_BITS);
463 bittree_reset(lencoder->mid[pos_state], LEN_MID_BITS);
464 }
465
466 bittree_reset(lencoder->high, LEN_HIGH_BITS);
467
468 if (!fast_mode)
469 for (size_t pos_state = 0; pos_state < num_pos_states;
470 ++pos_state)
471 length_update_prices(lencoder, pos_state);
472
473 return;
474 }
475
476
477 extern lzma_ret
lzma_lzma_encoder_reset(lzma_coder * coder,const lzma_options_lzma * options)478 lzma_lzma_encoder_reset(lzma_coder *coder, const lzma_options_lzma *options)
479 {
480 if (!is_options_valid(options))
481 return LZMA_OPTIONS_ERROR;
482
483 coder->pos_mask = (1U << options->pb) - 1;
484 coder->literal_context_bits = options->lc;
485 coder->literal_pos_mask = (1U << options->lp) - 1;
486
487 // Range coder
488 rc_reset(&coder->rc);
489
490 // State
491 coder->state = 0;
492 for (size_t i = 0; i < REP_DISTANCES; ++i)
493 coder->reps[i] = 0;
494
495 literal_init(coder->literal, options->lc, options->lp);
496
497 // Bit encoders
498 for (size_t i = 0; i < STATES; ++i) {
499 for (size_t j = 0; j <= coder->pos_mask; ++j) {
500 bit_reset(coder->is_match[i][j]);
501 bit_reset(coder->is_rep0_long[i][j]);
502 }
503
504 bit_reset(coder->is_rep[i]);
505 bit_reset(coder->is_rep0[i]);
506 bit_reset(coder->is_rep1[i]);
507 bit_reset(coder->is_rep2[i]);
508 }
509
510 for (size_t i = 0; i < FULL_DISTANCES - END_POS_MODEL_INDEX; ++i)
511 bit_reset(coder->pos_special[i]);
512
513 // Bit tree encoders
514 for (size_t i = 0; i < LEN_TO_POS_STATES; ++i)
515 bittree_reset(coder->pos_slot[i], POS_SLOT_BITS);
516
517 bittree_reset(coder->pos_align, ALIGN_BITS);
518
519 // Length encoders
520 length_encoder_reset(&coder->match_len_encoder,
521 1U << options->pb, coder->fast_mode);
522
523 length_encoder_reset(&coder->rep_len_encoder,
524 1U << options->pb, coder->fast_mode);
525
526 // Price counts are incremented every time appropriate probabilities
527 // are changed. price counts are set to zero when the price tables
528 // are updated, which is done when the appropriate price counts have
529 // big enough value, and lzma_mf.read_ahead == 0 which happens at
530 // least every OPTS (a few thousand) possible price count increments.
531 //
532 // By resetting price counts to UINT32_MAX / 2, we make sure that the
533 // price tables will be initialized before they will be used (since
534 // the value is definitely big enough), and that it is OK to increment
535 // price counts without risk of integer overflow (since UINT32_MAX / 2
536 // is small enough). The current code doesn't increment price counts
537 // before initializing price tables, but it maybe done in future if
538 // we add support for saving the state between LZMA2 chunks.
539 coder->match_price_count = UINT32_MAX / 2;
540 coder->align_price_count = UINT32_MAX / 2;
541
542 coder->opts_end_index = 0;
543 coder->opts_current_index = 0;
544
545 return LZMA_OK;
546 }
547
548
549 extern lzma_ret
lzma_lzma_encoder_create(lzma_coder ** coder_ptr,lzma_allocator * allocator,const lzma_options_lzma * options,lzma_lz_options * lz_options)550 lzma_lzma_encoder_create(lzma_coder **coder_ptr, lzma_allocator *allocator,
551 const lzma_options_lzma *options, lzma_lz_options *lz_options)
552 {
553 // Allocate lzma_coder if it wasn't already allocated.
554 if (*coder_ptr == NULL) {
555 *coder_ptr = lzma_alloc(sizeof(lzma_coder), allocator);
556 if (*coder_ptr == NULL)
557 return LZMA_MEM_ERROR;
558 }
559
560 lzma_coder *coder = *coder_ptr;
561
562 // Set compression mode. We haven't validates the options yet,
563 // but it's OK here, since nothing bad happens with invalid
564 // options in the code below, and they will get rejected by
565 // lzma_lzma_encoder_reset() call at the end of this function.
566 switch (options->mode) {
567 case LZMA_MODE_FAST:
568 coder->fast_mode = true;
569 break;
570
571 case LZMA_MODE_NORMAL: {
572 coder->fast_mode = false;
573
574 // Set dist_table_size.
575 // Round the dictionary size up to next 2^n.
576 uint32_t log_size = 0;
577 while ((UINT32_C(1) << log_size) < options->dict_size)
578 ++log_size;
579
580 coder->dist_table_size = log_size * 2;
581
582 // Length encoders' price table size
583 coder->match_len_encoder.table_size
584 = options->nice_len + 1 - MATCH_LEN_MIN;
585 coder->rep_len_encoder.table_size
586 = options->nice_len + 1 - MATCH_LEN_MIN;
587 break;
588 }
589
590 default:
591 return LZMA_OPTIONS_ERROR;
592 }
593
594 // We don't need to write the first byte as literal if there is
595 // a non-empty preset dictionary. encode_init() wouldn't even work
596 // if there is a non-empty preset dictionary, because encode_init()
597 // assumes that position is zero and previous byte is also zero.
598 coder->is_initialized = options->preset_dict != NULL
599 && options->preset_dict_size > 0;
600 coder->is_flushed = false;
601
602 set_lz_options(lz_options, options);
603
604 return lzma_lzma_encoder_reset(coder, options);
605 }
606
607
608 static lzma_ret
lzma_encoder_init(lzma_lz_encoder * lz,lzma_allocator * allocator,const void * options,lzma_lz_options * lz_options)609 lzma_encoder_init(lzma_lz_encoder *lz, lzma_allocator *allocator,
610 const void *options, lzma_lz_options *lz_options)
611 {
612 lz->code = &lzma_encode;
613 return lzma_lzma_encoder_create(
614 &lz->coder, allocator, options, lz_options);
615 }
616
617
618 extern lzma_ret
lzma_lzma_encoder_init(lzma_next_coder * next,lzma_allocator * allocator,const lzma_filter_info * filters)619 lzma_lzma_encoder_init(lzma_next_coder *next, lzma_allocator *allocator,
620 const lzma_filter_info *filters)
621 {
622 return lzma_lz_encoder_init(
623 next, allocator, filters, &lzma_encoder_init);
624 }
625
626
627 extern uint64_t
lzma_lzma_encoder_memusage(const void * options)628 lzma_lzma_encoder_memusage(const void *options)
629 {
630 if (!is_options_valid(options))
631 return UINT64_MAX;
632
633 lzma_lz_options lz_options;
634 set_lz_options(&lz_options, options);
635
636 const uint64_t lz_memusage = lzma_lz_encoder_memusage(&lz_options);
637 if (lz_memusage == UINT64_MAX)
638 return UINT64_MAX;
639
640 return (uint64_t)(sizeof(lzma_coder)) + lz_memusage;
641 }
642
643
644 extern bool
lzma_lzma_lclppb_encode(const lzma_options_lzma * options,uint8_t * byte)645 lzma_lzma_lclppb_encode(const lzma_options_lzma *options, uint8_t *byte)
646 {
647 if (!is_lclppb_valid(options))
648 return true;
649
650 *byte = (options->pb * 5 + options->lp) * 9 + options->lc;
651 assert(*byte <= (4 * 5 + 4) * 9 + 8);
652
653 return false;
654 }
655
656
657 #ifdef HAVE_ENCODER_LZMA1
658 extern lzma_ret
lzma_lzma_props_encode(const void * options,uint8_t * out)659 lzma_lzma_props_encode(const void *options, uint8_t *out)
660 {
661 const lzma_options_lzma *const opt = options;
662
663 if (lzma_lzma_lclppb_encode(opt, out))
664 return LZMA_PROG_ERROR;
665
666 integer_write_32(out + 1, opt->dict_size);
667
668 return LZMA_OK;
669 }
670 #endif
671
672
673 extern LZMA_API(lzma_bool)
lzma_mode_is_supported(lzma_mode mode)674 lzma_mode_is_supported(lzma_mode mode)
675 {
676 return mode == LZMA_MODE_FAST || mode == LZMA_MODE_NORMAL;
677 }
678