1 #![allow(dead_code)]
2 use super::backward_references::kHashMul32;
3 //use super::super::alloc::{SliceWrapper, SliceWrapperMut};
4
5 use super::brotli_bit_stream::{BrotliBuildAndStoreHuffmanTreeFast, BrotliStoreHuffmanTree};
6 //caution: lots of the functions look structurally the same as two_pass,
7 // but have subtle index differences
8 // examples: IsMatch checks p1[4] and p1[5]
9 // the hoops that BuildAndStoreCommandPrefixCode goes through are subtly different in order
10 // (eg memcpy x+24, y instead of +24, y+40
11 // pretty much assume compress_fragment_two_pass is a trap! except for BrotliStoreMetaBlockHeader
12 use super::compress_fragment_two_pass::{BrotliStoreMetaBlockHeader, BrotliWriteBits, memcpy};
13 use super::entropy_encode::{BrotliConvertBitDepthsToSymbols, BrotliCreateHuffmanTree, HuffmanTree,
14 NewHuffmanTree};
15 use super::static_dict::{BROTLI_UNALIGNED_LOAD32, BROTLI_UNALIGNED_LOAD64, FindMatchLengthWithLimit};
16 use super::super::alloc;
17 use super::util::{brotli_min_size_t, brotli_min_uint32_t, Log2FloorNonZero, FastLog2};
18
19 //static kHashMul32: u32 = 0x1e35a7bdu32;
20
21 static kCmdHistoSeed: [u32; 128] =
22 [0u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32,
23 0u32, 0u32, 0u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32,
24 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 0u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32,
25 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32,
26 1u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32,
27 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32,
28 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32,
29 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 1u32, 0u32, 0u32, 0u32, 0u32];
30
Hash(p: &[u8], shift: usize) -> u3231 fn Hash(p: &[u8], shift: usize) -> u32 {
32 let h: u64 = (BROTLI_UNALIGNED_LOAD64(p) << 24i32).wrapping_mul(kHashMul32 as (u64));
33 (h >> shift) as (u32)
34 }
IsMatch(p1: &[u8], p2: &[u8]) -> i3235 fn IsMatch(p1: &[u8], p2: &[u8]) -> i32 {
36 if !!(BROTLI_UNALIGNED_LOAD32(p1) == BROTLI_UNALIGNED_LOAD32(p2) &&
37 (p1[(4usize)] as (i32) == p2[(4usize)] as (i32))) {
38 1i32
39 } else {
40 0i32
41 }
42 }
43
BuildAndStoreLiteralPrefixCode<AllocHT:alloc::Allocator<HuffmanTree>>(mht: &mut AllocHT, input: &[u8], input_size: usize, depths: &mut [u8], bits: &mut [u16], storage_ix: &mut usize, storage: &mut [u8]) -> usize44 fn BuildAndStoreLiteralPrefixCode<AllocHT:alloc::Allocator<HuffmanTree>>(mht: &mut AllocHT,
45 input: &[u8],
46 input_size: usize,
47 depths: &mut [u8],
48 bits: &mut [u16],
49 storage_ix: &mut usize,
50 storage: &mut [u8])
51 -> usize{
52 let mut histogram: [u32; 256] = [0; 256];
53 let mut histogram_total: usize;
54 let mut i: usize;
55 if input_size < (1i32 << 15i32) as (usize) {
56 i = 0usize;
57 while i < input_size {
58 {
59 let _rhs = 1;
60 let _lhs = &mut histogram[input[(i as (usize))] as (usize)];
61 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
62 }
63 i = i.wrapping_add(1 as (usize));
64 }
65 histogram_total = input_size;
66 i = 0usize;
67 while i < 256usize {
68 {
69 let adjust: u32 = (2u32).wrapping_mul(brotli_min_uint32_t(histogram[i], 11u32));
70 {
71 let _rhs = adjust;
72 let _lhs = &mut histogram[i];
73 *_lhs = (*_lhs).wrapping_add(_rhs);
74 }
75 histogram_total = histogram_total.wrapping_add(adjust as (usize));
76 }
77 i = i.wrapping_add(1 as (usize));
78 }
79 } else {
80 static kSampleRate: usize = 29usize;
81 i = 0usize;
82 while i < input_size {
83 {
84 let _rhs = 1;
85 let _lhs = &mut histogram[input[(i as (usize))] as (usize)];
86 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
87 }
88 i = i.wrapping_add(kSampleRate);
89 }
90 histogram_total =
91 input_size.wrapping_add(kSampleRate).wrapping_sub(1usize).wrapping_div(kSampleRate);
92 i = 0usize;
93 while i < 256usize {
94 {
95 let adjust: u32 =
96 (1u32).wrapping_add((2u32).wrapping_mul(brotli_min_uint32_t(histogram[i], 11u32)));
97 {
98 let _rhs = adjust;
99 let _lhs = &mut histogram[i];
100 *_lhs = (*_lhs).wrapping_add(_rhs);
101 }
102 histogram_total = histogram_total.wrapping_add(adjust as (usize));
103 }
104 i = i.wrapping_add(1 as (usize));
105 }
106 }
107 BrotliBuildAndStoreHuffmanTreeFast(mht,
108 &mut histogram[..],
109 histogram_total,
110 8usize,
111 depths,
112 bits,
113 storage_ix,
114 storage);
115 {
116 let mut literal_ratio: usize = 0usize;
117 i = 0usize;
118 while i < 256usize {
119 {
120 if histogram[i] != 0 {
121 literal_ratio =
122 literal_ratio.wrapping_add(histogram[i].wrapping_mul(depths[(i as (usize))] as (u32)) as
123 (usize));
124 }
125 }
126 i = i.wrapping_add(1 as (usize));
127 }
128 literal_ratio.wrapping_mul(125usize).wrapping_div(histogram_total)
129 }
130 }
131 #[derive(PartialEq, Eq, Copy, Clone)]
132 pub enum CodeBlockState {
133 EMIT_REMAINDER,
134 EMIT_COMMANDS,
135 NEXT_BLOCK,
136 }
137
138
139
140
EmitInsertLen(insertlen: usize, depth: &[u8], bits: &[u16], histo: &mut [u32], storage_ix: &mut usize, storage: &mut [u8])141 fn EmitInsertLen(insertlen: usize,
142 depth: &[u8],
143 bits: &[u16],
144 histo: &mut [u32],
145 storage_ix: &mut usize,
146 storage: &mut [u8]) {
147 if insertlen < 6usize {
148 let code: usize = insertlen.wrapping_add(40usize);
149 BrotliWriteBits(depth[(code as (usize))] as (usize),
150 bits[(code as (usize))] as (u64),
151 storage_ix,
152 storage);
153 {
154 let _rhs = 1;
155 let _lhs = &mut histo[(code as (usize))];
156 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
157 }
158 } else if insertlen < 130usize {
159 let tail: usize = insertlen.wrapping_sub(2usize);
160 let nbits: u32 = Log2FloorNonZero(tail as u64).wrapping_sub(1u32);
161 let prefix: usize = tail >> nbits;
162 let inscode: usize = ((nbits << 1i32) as (usize)).wrapping_add(prefix).wrapping_add(42usize);
163 BrotliWriteBits(depth[(inscode as (usize))] as (usize),
164 bits[(inscode as (usize))] as (u64),
165 storage_ix,
166 storage);
167 BrotliWriteBits(nbits as (usize),
168 (tail as u64).wrapping_sub((prefix as u64) << nbits),
169 storage_ix,
170 storage);
171 {
172 let _rhs = 1;
173 let _lhs = &mut histo[(inscode as (usize))];
174 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
175 }
176 } else if insertlen < 2114usize {
177 let tail: usize = insertlen.wrapping_sub(66usize);
178 let nbits: u32 = Log2FloorNonZero(tail as u64);
179 let code: usize = nbits.wrapping_add(50u32) as (usize);
180 BrotliWriteBits(depth[(code as (usize))] as (usize),
181 bits[(code as (usize))] as (u64),
182 storage_ix,
183 storage);
184 BrotliWriteBits(nbits as (usize),
185 (tail as u64).wrapping_sub(1 << nbits),
186 storage_ix,
187 storage);
188 {
189 let _rhs = 1;
190 let _lhs = &mut histo[(code as (usize))];
191 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
192 }
193 } else {
194 BrotliWriteBits(depth[(61usize)] as (usize),
195 bits[(61usize)] as (u64),
196 storage_ix,
197 storage);
198 BrotliWriteBits(12usize,
199 (insertlen as u64).wrapping_sub(2114u64),
200 storage_ix,
201 storage);
202 {
203 let _rhs = 1;
204 let _lhs = &mut histo[(61usize)];
205 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
206 }
207 }
208 }
209
ShouldUseUncompressedMode(delta: isize, insertlen: usize, literal_ratio: usize) -> i32210 fn ShouldUseUncompressedMode(delta: isize, insertlen: usize, literal_ratio: usize) -> i32 {
211 let compressed: usize = delta as (usize);
212 if compressed.wrapping_mul(50usize) > insertlen {
213 0i32
214 } else if !!(literal_ratio > 980usize) {
215 1i32
216 } else {
217 0i32
218 }
219 }
RewindBitPosition(new_storage_ix: usize, storage_ix: &mut usize, storage: &mut [u8])220 fn RewindBitPosition(new_storage_ix: usize, storage_ix: &mut usize, storage: &mut [u8]) {
221 let bitpos: usize = new_storage_ix & 7usize;
222 let mask: usize = (1u32 << bitpos).wrapping_sub(1u32) as (usize);
223 {
224 let _rhs = mask as (u8);
225 let _lhs = &mut storage[((new_storage_ix >> 3i32) as (usize))];
226 *_lhs = (*_lhs as (i32) & _rhs as (i32)) as (u8);
227 }
228 *storage_ix = new_storage_ix;
229 }
230
EmitUncompressedMetaBlock(begin: &[u8], len: usize, storage_ix_start: usize, storage_ix: &mut usize, storage: &mut [u8])231 fn EmitUncompressedMetaBlock(begin: &[u8],
232 len: usize,
233 storage_ix_start: usize,
234 storage_ix: &mut usize,
235 storage: &mut [u8]) {
236 RewindBitPosition(storage_ix_start, storage_ix, storage);
237 BrotliStoreMetaBlockHeader(len, 1i32, storage_ix, storage);
238 *storage_ix = (*storage_ix).wrapping_add(7u32 as (usize)) & !7u32 as (usize);
239 memcpy(storage, ((*storage_ix >> 3i32) as (usize)), begin, 0, len);
240 *storage_ix = (*storage_ix).wrapping_add(len << 3i32);
241 storage[((*storage_ix >> 3i32) as (usize))] = 0i32 as (u8);
242 }
243
EmitLongInsertLen(insertlen: usize, depth: &[u8], bits: &[u16], histo: &mut [u32], storage_ix: &mut usize, storage: &mut [u8])244 fn EmitLongInsertLen(insertlen: usize,
245 depth: &[u8],
246 bits: &[u16],
247 histo: &mut [u32],
248 storage_ix: &mut usize,
249 storage: &mut [u8]) {
250 if insertlen < 22594usize {
251 BrotliWriteBits(depth[(62usize)] as (usize),
252 bits[(62usize)] as (u64),
253 storage_ix,
254 storage);
255 BrotliWriteBits(14usize,
256 (insertlen as u64).wrapping_sub(6210),
257 storage_ix,
258 storage);
259 {
260 let _rhs = 1;
261 let _lhs = &mut histo[(62usize)];
262 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
263 }
264 } else {
265 BrotliWriteBits(depth[(63usize)] as (usize),
266 bits[(63usize)] as (u64),
267 storage_ix,
268 storage);
269 BrotliWriteBits(24usize,
270 (insertlen as u64).wrapping_sub(22594),
271 storage_ix,
272 storage);
273 {
274 let _rhs = 1;
275 let _lhs = &mut histo[(63usize)];
276 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
277 }
278 }
279 }
280
EmitLiterals(input: &[u8], len: usize, depth: &[u8], bits: &[u16], storage_ix: &mut usize, storage: &mut [u8])281 fn EmitLiterals(input: &[u8],
282 len: usize,
283 depth: &[u8],
284 bits: &[u16],
285 storage_ix: &mut usize,
286 storage: &mut [u8]) {
287 let mut j: usize;
288 j = 0usize;
289 while j < len {
290 {
291 let lit: u8 = input[(j as (usize))];
292 BrotliWriteBits(depth[(lit as (usize))] as (usize),
293 bits[(lit as (usize))] as (u64),
294 storage_ix,
295 storage);
296 }
297 j = j.wrapping_add(1 as (usize));
298 }
299 }
300
EmitDistance(distance: usize, depth: &[u8], bits: &[u16], histo: &mut [u32], storage_ix: &mut usize, storage: &mut [u8])301 fn EmitDistance(distance: usize,
302 depth: &[u8],
303 bits: &[u16],
304 histo: &mut [u32],
305 storage_ix: &mut usize,
306 storage: &mut [u8]) {
307 let d: u64 = distance.wrapping_add(3usize) as u64;
308 let nbits: u32 = Log2FloorNonZero(d as u64).wrapping_sub(1u32);
309 let prefix: u64 = d >> nbits & 1;
310 let offset: u64 = (2u64).wrapping_add(prefix) << nbits;
311 let distcode: u64 = ((2u32).wrapping_mul(nbits.wrapping_sub(1u32)) as (u64))
312 .wrapping_add(prefix)
313 .wrapping_add(80u64);
314 BrotliWriteBits(depth[(distcode as (usize))] as (usize),
315 bits[(distcode as (usize))] as (u64),
316 storage_ix,
317 storage);
318 BrotliWriteBits(nbits as (usize),
319 d.wrapping_sub(offset),
320 storage_ix,
321 storage);
322 {
323 let _rhs = 1;
324 let _lhs = &mut histo[(distcode as (usize))];
325 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
326 }
327 }
328
329
EmitCopyLenLastDistance(copylen: usize, depth: &[u8], bits: &[u16], histo: &mut [u32], storage_ix: &mut usize, storage: &mut [u8])330 fn EmitCopyLenLastDistance(copylen: usize,
331 depth: &[u8],
332 bits: &[u16],
333 histo: &mut [u32],
334 storage_ix: &mut usize,
335 storage: &mut [u8]) {
336 if copylen < 12usize {
337 BrotliWriteBits(depth[(copylen.wrapping_sub(4usize) as (usize))] as (usize),
338 bits[(copylen.wrapping_sub(4usize) as (usize))] as (u64),
339 storage_ix,
340 storage);
341 {
342 let _rhs = 1;
343 let _lhs = &mut histo[(copylen.wrapping_sub(4usize) as (usize))];
344 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
345 }
346 } else if copylen < 72usize {
347 let tail: usize = copylen.wrapping_sub(8usize);
348 let nbits: u32 = Log2FloorNonZero(tail as u64).wrapping_sub(1u32);
349 let prefix: usize = tail >> nbits;
350 let code: usize = ((nbits << 1i32) as (usize)).wrapping_add(prefix).wrapping_add(4usize);
351 BrotliWriteBits(depth[(code as (usize))] as (usize),
352 bits[(code as (usize))] as (u64),
353 storage_ix,
354 storage);
355 BrotliWriteBits(nbits as (usize),
356 tail.wrapping_sub(prefix << nbits) as u64,
357 storage_ix,
358 storage);
359 {
360 let _rhs = 1;
361 let _lhs = &mut histo[(code as (usize))];
362 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
363 }
364 } else if copylen < 136usize {
365 let tail: usize = copylen.wrapping_sub(8usize);
366 let code: usize = (tail >> 5i32).wrapping_add(30usize);
367 BrotliWriteBits(depth[(code as (usize))] as (usize),
368 bits[(code as (usize))] as (u64),
369 storage_ix,
370 storage);
371 BrotliWriteBits(5usize, tail as u64 & 31, storage_ix, storage);
372 BrotliWriteBits(depth[(64usize)] as (usize),
373 bits[(64usize)] as (u64),
374 storage_ix,
375 storage);
376 {
377 let _rhs = 1;
378 let _lhs = &mut histo[(code as (usize))];
379 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
380 }
381 {
382 let _rhs = 1;
383 let _lhs = &mut histo[(64usize)];
384 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
385 }
386 } else if copylen < 2120usize {
387 let tail: usize = copylen.wrapping_sub(72usize);
388 let nbits: u32 = Log2FloorNonZero(tail as u64);
389 let code: usize = nbits.wrapping_add(28u32) as (usize);
390 BrotliWriteBits(depth[(code as (usize))] as (usize),
391 bits[(code as (usize))] as (u64),
392 storage_ix,
393 storage);
394 BrotliWriteBits(nbits as (usize),
395 (tail as u64).wrapping_sub(1u64 << nbits),
396 storage_ix,
397 storage);
398 BrotliWriteBits(depth[(64usize)] as (usize),
399 bits[(64usize)] as (u64),
400 storage_ix,
401 storage);
402 {
403 let _rhs = 1;
404 let _lhs = &mut histo[(code as (usize))];
405 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
406 }
407 {
408 let _rhs = 1;
409 let _lhs = &mut histo[(64usize)];
410 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
411 }
412 } else {
413 BrotliWriteBits(depth[(39usize)] as (usize),
414 bits[(39usize)] as (u64),
415 storage_ix,
416 storage);
417 BrotliWriteBits(24usize,
418 copylen.wrapping_sub(2120usize) as u64,
419 storage_ix,
420 storage);
421 BrotliWriteBits(depth[(64usize)] as (usize),
422 bits[(64usize)] as (u64),
423 storage_ix,
424 storage);
425 {
426 let _rhs = 1;
427 let _lhs = &mut histo[(39usize)];
428 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
429 }
430 {
431 let _rhs = 1;
432 let _lhs = &mut histo[(64usize)];
433 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
434 }
435 }
436 }
437
HashBytesAtOffset(v: u64, offset: i32, shift: usize) -> u32438 fn HashBytesAtOffset(v: u64, offset: i32, shift: usize) -> u32 {
439 {
440 let h: u64 = (v >> 8i32 * offset << 24i32).wrapping_mul(kHashMul32 as (u64));
441 (h >> shift) as (u32)
442 }
443 }
444
EmitCopyLen(copylen: usize, depth: &[u8], bits: &[u16], histo: &mut [u32], storage_ix: &mut usize, storage: &mut [u8])445 fn EmitCopyLen(copylen: usize,
446 depth: &[u8],
447 bits: &[u16],
448 histo: &mut [u32],
449 storage_ix: &mut usize,
450 storage: &mut [u8]) {
451 if copylen < 10usize {
452 BrotliWriteBits(depth[(copylen.wrapping_add(14usize) as (usize))] as (usize),
453 bits[(copylen.wrapping_add(14usize) as (usize))] as (u64),
454 storage_ix,
455 storage);
456 {
457 let _rhs = 1;
458 let _lhs = &mut histo[(copylen.wrapping_add(14usize) as (usize))];
459 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
460 }
461 } else if copylen < 134usize {
462 let tail: usize = copylen.wrapping_sub(6usize);
463 let nbits: u32 = Log2FloorNonZero(tail as u64).wrapping_sub(1u32);
464 let prefix: usize = tail >> nbits;
465 let code: usize = ((nbits << 1i32) as (usize)).wrapping_add(prefix).wrapping_add(20usize);
466 BrotliWriteBits(depth[(code as (usize))] as (usize),
467 bits[(code as (usize))] as (u64),
468 storage_ix,
469 storage);
470 BrotliWriteBits(nbits as (usize),
471 (tail as u64).wrapping_sub((prefix as u64) << nbits),
472 storage_ix,
473 storage);
474 {
475 let _rhs = 1;
476 let _lhs = &mut histo[(code as (usize))];
477 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
478 }
479 } else if copylen < 2118usize {
480 let tail: usize = copylen.wrapping_sub(70usize);
481 let nbits: u32 = Log2FloorNonZero(tail as u64);
482 let code: usize = nbits.wrapping_add(28u32) as (usize);
483 BrotliWriteBits(depth[(code as (usize))] as (usize),
484 bits[(code as (usize))] as (u64),
485 storage_ix,
486 storage);
487 BrotliWriteBits(nbits as (usize),
488 (tail as u64).wrapping_sub(1 << nbits),
489 storage_ix,
490 storage);
491 {
492 let _rhs = 1;
493 let _lhs = &mut histo[(code as (usize))];
494 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
495 }
496 } else {
497 BrotliWriteBits(depth[(39usize)] as (usize),
498 bits[(39usize)] as (u64),
499 storage_ix,
500 storage);
501 BrotliWriteBits(24usize,
502 (copylen as u64).wrapping_sub(2118),
503 storage_ix,
504 storage);
505 {
506 let _rhs = 1;
507 let _lhs = &mut histo[(39usize)];
508 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
509 }
510 }
511 }
512
ShouldMergeBlock(data: &[u8], len: usize, depths: &[u8]) -> i32513 fn ShouldMergeBlock(data: &[u8], len: usize, depths: &[u8]) -> i32 {
514 let mut histo: [usize; 256] = [0; 256];
515 static kSampleRate: usize = 43usize;
516 let mut i: usize;
517 i = 0usize;
518 while i < len {
519 {
520 let _rhs = 1;
521 let _lhs = &mut histo[data[(i as (usize))] as (usize)];
522 *_lhs = (*_lhs).wrapping_add(_rhs as (usize));
523 }
524 i = i.wrapping_add(kSampleRate);
525 }
526 {
527 let total: usize = len.wrapping_add(kSampleRate).wrapping_sub(1usize).wrapping_div(kSampleRate);
528 let mut r: super::util::floatX = (FastLog2(total as u64) + 0.5 as super::util::floatX) * total as (super::util::floatX) + 200i32 as (super::util::floatX);
529 i = 0usize;
530 while i < 256usize {
531 {
532 r = r - histo[i] as (super::util::floatX) * (depths[(i as (usize))] as (super::util::floatX) + FastLog2(histo[i] as u64));
533 }
534 i = i.wrapping_add(1 as (usize));
535 }
536 if !!(r >= 0.0 as super::util::floatX) { 1i32 } else { 0i32 }
537 }
538 }
539
UpdateBits(mut n_bits: usize, mut bits: u32, mut pos: usize, array: &mut [u8])540 fn UpdateBits(mut n_bits: usize, mut bits: u32, mut pos: usize, array: &mut [u8]) {
541 while n_bits > 0usize {
542 let byte_pos: usize = pos >> 3i32;
543 let n_unchanged_bits: usize = pos & 7usize;
544 let n_changed_bits: usize = brotli_min_size_t(n_bits, (8usize).wrapping_sub(n_unchanged_bits));
545 let total_bits: usize = n_unchanged_bits.wrapping_add(n_changed_bits);
546 let mask: u32 = !(1u32 << total_bits).wrapping_sub(1u32) |
547 (1u32 << n_unchanged_bits).wrapping_sub(1u32);
548 let unchanged_bits: u32 = array[(byte_pos as (usize))] as (u32) & mask;
549 let changed_bits: u32 = bits & (1u32 << n_changed_bits).wrapping_sub(1u32);
550 array[(byte_pos as (usize))] = (changed_bits << n_unchanged_bits | unchanged_bits) as (u8);
551 n_bits = n_bits.wrapping_sub(n_changed_bits);
552 bits = bits >> n_changed_bits;
553 pos = pos.wrapping_add(n_changed_bits);
554 }
555 }
556
557
BuildAndStoreCommandPrefixCode(histogram: &[u32], depth: &mut [u8], bits: &mut [u16], storage_ix: &mut usize, storage: &mut [u8])558 fn BuildAndStoreCommandPrefixCode(histogram: &[u32],
559 depth: &mut [u8],
560 bits: &mut [u16],
561 storage_ix: &mut usize,
562 storage: &mut [u8]) {
563 let mut tree: [HuffmanTree; 129] = [NewHuffmanTree(0, 0, 0); 129];
564 let mut cmd_depth: [u8; 704] = [0i32 as (u8); 704];
565
566 let mut cmd_bits: [u16; 64] = [0; 64];
567 BrotliCreateHuffmanTree(&histogram[..], 64usize, 15i32, &mut tree[..], depth);
568 BrotliCreateHuffmanTree(&histogram[(64usize)..],
569 64usize,
570 14i32,
571 &mut tree[..],
572 &mut depth[(64usize)..]);
573 /* We have to jump through a few hoops here in order to compute
574 the command bits because the symbols are in a different order than in
575 the full alphabet. This looks complicated, but having the symbols
576 in this order in the command bits saves a few branches in the Emit*
577 functions. */
578 memcpy(&mut cmd_depth[..], 0, depth, 0, 24usize);
579 memcpy(&mut cmd_depth[..],
580 24i32 as (usize),
581 depth,
582 (40usize),
583 8usize);
584 memcpy(&mut cmd_depth[..],
585 32i32 as (usize),
586 depth,
587 (24usize),
588 8usize);
589 memcpy(&mut cmd_depth[..],
590 40i32 as (usize),
591 depth,
592 (48usize),
593 8usize);
594 memcpy(&mut cmd_depth[..],
595 48i32 as (usize),
596 depth,
597 (32usize),
598 8usize);
599 memcpy(&mut cmd_depth[..],
600 56i32 as (usize),
601 depth,
602 (56usize),
603 8usize);
604 BrotliConvertBitDepthsToSymbols(&mut cmd_depth[..], 64usize, &mut cmd_bits[..]);
605 memcpy(bits, 0, &cmd_bits[..], 0, 24usize);
606 memcpy(bits, (24usize), &cmd_bits[..], 32i32 as (usize), 8usize);
607 memcpy(bits, (32usize), &cmd_bits[..], 48i32 as (usize), 8usize);
608 memcpy(bits, (40usize), &cmd_bits[..], 24i32 as (usize), 8usize);
609 memcpy(bits, (48usize), &cmd_bits[..], 40i32 as (usize), 8usize);
610 memcpy(bits, (56usize), &cmd_bits[..], 56i32 as (usize), 8usize);
611 BrotliConvertBitDepthsToSymbols(&mut depth[(64usize)..], 64usize, &mut bits[(64usize)..]);
612 {
613 let mut i: usize;
614 for item in cmd_depth[..64].iter_mut() {
615 *item = 0;
616 }
617 memcpy(&mut cmd_depth[..], 0, depth, 0, 8usize);
618 memcpy(&mut cmd_depth[..],
619 64i32 as (usize),
620 depth,
621 (8usize),
622 8usize);
623 memcpy(&mut cmd_depth[..],
624 128i32 as (usize),
625 depth,
626 (16usize),
627 8usize);
628 memcpy(&mut cmd_depth[..],
629 192i32 as (usize),
630 depth,
631 (24usize),
632 8usize);
633 memcpy(&mut cmd_depth[..],
634 384i32 as (usize),
635 depth,
636 (32usize),
637 8usize);
638 i = 0usize;
639 while i < 8usize {
640 {
641 cmd_depth[(128usize).wrapping_add((8usize).wrapping_mul(i))] = depth[i.wrapping_add(40)];
642 cmd_depth[(256usize).wrapping_add((8usize).wrapping_mul(i))] = depth[i.wrapping_add(48)];
643 cmd_depth[(448usize).wrapping_add((8usize).wrapping_mul(i))] = depth[i.wrapping_add(56)];
644 }
645 i = i.wrapping_add(1 as (usize));
646 }
647 BrotliStoreHuffmanTree(&mut cmd_depth[..],
648 704usize,
649 &mut tree[..],
650 storage_ix,
651 storage);
652 }
653 BrotliStoreHuffmanTree(&mut depth[(64usize)..],
654 64usize,
655 &mut tree[..],
656 storage_ix,
657 storage);
658 }
659
660 #[allow(unused_assignments)]
BrotliCompressFragmentFastImpl<AllocHT:alloc::Allocator<HuffmanTree>>(m: &mut AllocHT, input_ptr: &[u8], mut input_size: usize, is_last: i32, table: &mut [i32], table_bits: usize, cmd_depth: &mut [u8], cmd_bits: &mut [u16], cmd_code_numbits: &mut usize, cmd_code: &mut [u8], storage_ix: &mut usize, storage: &mut [u8])661 fn BrotliCompressFragmentFastImpl<AllocHT:alloc::Allocator<HuffmanTree>>(m: &mut AllocHT,
662 input_ptr: &[u8],
663 mut input_size: usize,
664 is_last: i32,
665 table: &mut [i32],
666 table_bits: usize,
667 cmd_depth: &mut [u8],
668 cmd_bits: &mut [u16],
669 cmd_code_numbits: &mut usize,
670 cmd_code: &mut [u8],
671 storage_ix: &mut usize,
672 storage: &mut [u8]){
673 let mut cmd_histo = [0u32; 128];
674 let mut ip_end = 0usize;
675 let mut next_emit = 0usize;
676 let base_ip = 0usize;
677 static kFirstBlockSize: usize = (3i32 << 15i32) as (usize);
678 static kMergeBlockSize: usize = (1i32 << 16i32) as (usize);
679 let kInputMarginBytes = 16usize;
680 let kMinMatchLen = 5usize;
681 let mut metablock_start = 0usize;
682 let mut block_size = brotli_min_size_t(input_size, kFirstBlockSize);
683 let mut total_block_size = block_size;
684 let mut mlen_storage_ix = (*storage_ix).wrapping_add(3usize);
685 let mut lit_depth = [0u8; 256];
686 let mut lit_bits = [0u16; 256];
687 let mut literal_ratio: usize;
688 let mut input_index = 0usize;
689 let mut last_distance: i32;
690 let shift: usize = (64u32 as usize).wrapping_sub(table_bits);
691 BrotliStoreMetaBlockHeader(block_size, 0i32, storage_ix, storage);
692 BrotliWriteBits(13usize, 0, storage_ix, storage);
693 literal_ratio = BuildAndStoreLiteralPrefixCode(m,
694 &input_ptr[input_index..],
695 block_size,
696 &mut lit_depth[..],
697 &mut lit_bits[..],
698 storage_ix,
699 storage);
700 {
701 let mut i = 0usize;
702 while i.wrapping_add(7usize) < *cmd_code_numbits {
703 BrotliWriteBits(8usize,
704 cmd_code[i >> 3] as u64,
705 storage_ix,
706 storage);
707 i = i.wrapping_add(8usize);
708 }
709 }
710 BrotliWriteBits(*cmd_code_numbits & 7usize,
711 cmd_code[*cmd_code_numbits >> 3i32] as u64,
712 storage_ix,
713 storage);
714 let mut code_block_selection: CodeBlockState = CodeBlockState::EMIT_COMMANDS;
715 loop {
716 let mut ip_index: usize;
717 if code_block_selection == CodeBlockState::EMIT_COMMANDS {
718 cmd_histo[..128].clone_from_slice(&kCmdHistoSeed[..]);
719 ip_index = input_index;
720 last_distance = -1i32;
721 ip_end = input_index.wrapping_add(block_size);
722 if block_size >= kInputMarginBytes {
723 let len_limit: usize = brotli_min_size_t(block_size.wrapping_sub(kMinMatchLen),
724 input_size.wrapping_sub(kInputMarginBytes));
725 let ip_limit: usize = input_index.wrapping_add(len_limit);
726 let mut next_hash = Hash(&input_ptr[{
727 ip_index = ip_index.wrapping_add(1usize);
728 ip_index
729 }..],
730 shift);
731 loop {
732 let mut skip = 32u32;
733 let mut next_ip = ip_index;
734 let mut candidate = 0usize;
735 loop {
736 {
737 'break15: loop {
738 {
739 let hash = next_hash;
740 let bytes_between_hash_lookups: u32 = {
741 let _old = skip;
742 skip = skip.wrapping_add(1u32);
743 _old
744 } >>
745 5i32;
746 ip_index = next_ip;
747 next_ip = ip_index.wrapping_add(bytes_between_hash_lookups as usize);
748 if next_ip > ip_limit {
749 code_block_selection = CodeBlockState::EMIT_REMAINDER;
750 break 'break15;
751 }
752 next_hash = Hash(&input_ptr[next_ip..], shift);
753 candidate = ip_index.wrapping_sub(last_distance as usize);
754 if IsMatch(&input_ptr[ip_index..],
755 &input_ptr[candidate..]) != 0 {
756 if candidate < ip_index {
757 table[hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
758 break 'break15;
759 }
760 }
761 candidate = base_ip.wrapping_add(table[hash as usize] as usize);
762 table[hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
763 }
764 if IsMatch(&input_ptr[ip_index..],
765 &input_ptr[candidate..]) != 0 {
766 break;
767 }
768 }
769 }
770 if !(ip_index.wrapping_sub(candidate) >
771 (1usize << 18i32).wrapping_sub(16usize) as isize as usize &&
772 (code_block_selection as i32 == CodeBlockState::EMIT_COMMANDS as i32)) {
773 break;
774 }
775 }
776 if code_block_selection as i32 != CodeBlockState::EMIT_COMMANDS as i32 {
777 break;
778 }
779 {
780 let base: usize = ip_index;
781 let matched = (5usize)
782 .wrapping_add(FindMatchLengthWithLimit(&input_ptr[candidate + 5..],
783 &input_ptr[ip_index + 5..],
784 ip_end.wrapping_sub(ip_index)
785 .wrapping_sub(5usize)));
786 let distance = base.wrapping_sub(candidate) as i32;
787 let insert = base.wrapping_sub(next_emit);
788 ip_index = ip_index.wrapping_add(matched);
789 if insert < 6210 {
790 EmitInsertLen(insert,
791 cmd_depth,
792 cmd_bits,
793 &mut cmd_histo[..],
794 storage_ix,
795 storage);
796 } else if ShouldUseUncompressedMode((next_emit as isize) -
797 (metablock_start as isize),
798 insert,
799 literal_ratio) != 0 {
800 EmitUncompressedMetaBlock(&input_ptr[metablock_start..],
801 base.wrapping_sub(metablock_start),
802 mlen_storage_ix.wrapping_sub(3usize),
803 storage_ix,
804 storage);
805 input_size = input_size.wrapping_sub(base.wrapping_sub(input_index));
806 input_index = base;
807 next_emit = input_index;
808 code_block_selection = CodeBlockState::NEXT_BLOCK;
809 break;
810 } else {
811 EmitLongInsertLen(insert,
812 cmd_depth,
813 cmd_bits,
814 &mut cmd_histo[..],
815 storage_ix,
816 storage);
817 }
818 EmitLiterals(&input_ptr[(next_emit as (usize))..],
819 insert,
820 &mut lit_depth[..],
821 &mut lit_bits[..],
822 storage_ix,
823 storage);
824 if distance == last_distance {
825 BrotliWriteBits(cmd_depth[64] as usize,
826 cmd_bits[64] as u64,
827 storage_ix,
828 storage);
829 {
830 let _rhs = 1u32;
831 let _lhs = &mut cmd_histo[64];
832 *_lhs = (*_lhs).wrapping_add(_rhs);
833 }
834 } else {
835 EmitDistance(distance as usize,
836 cmd_depth,
837 cmd_bits,
838 &mut cmd_histo[..],
839 storage_ix,
840 storage);
841 last_distance = distance;
842 }
843 EmitCopyLenLastDistance(matched,
844 cmd_depth,
845 cmd_bits,
846 &mut cmd_histo[..],
847 storage_ix,
848 storage);
849 next_emit = ip_index;
850 if ip_index >= ip_limit {
851 code_block_selection = CodeBlockState::EMIT_REMAINDER;
852 break;
853 }
854 {
855 assert!(ip_index >= 3);
856 let input_bytes: u64 = BROTLI_UNALIGNED_LOAD64(&input_ptr[ip_index - 3..]);
857 let mut prev_hash: u32 = HashBytesAtOffset(input_bytes, 0i32, shift);
858 let cur_hash: u32 = HashBytesAtOffset(input_bytes, 3i32, shift);
859 table[prev_hash as usize] = ip_index.wrapping_sub(base_ip)
860 .wrapping_sub(3usize) as i32;
861 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift);
862 table[prev_hash as usize] = ip_index.wrapping_sub(base_ip)
863 .wrapping_sub(2usize) as i32;
864 prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift);
865 table[prev_hash as usize] = ip_index.wrapping_sub(base_ip)
866 .wrapping_sub(1usize) as i32;
867 candidate = base_ip.wrapping_add(table[cur_hash as usize] as usize);
868 table[cur_hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
869 }
870 }
871 while IsMatch(&input_ptr[ip_index..], &input_ptr[candidate..]) != 0 {
872 let base: usize = ip_index;
873 let matched: usize = (5usize)
874 .wrapping_add(FindMatchLengthWithLimit(&input_ptr[candidate + 5..],
875 &input_ptr[ip_index + 5..],
876 ip_end.wrapping_sub(ip_index)
877 .wrapping_sub(5usize)));
878 if ip_index.wrapping_sub(candidate) >
879 (1usize << 18i32).wrapping_sub(16usize) {
880 break;
881 }
882 ip_index = ip_index.wrapping_add(matched);
883 last_distance = base.wrapping_sub(candidate) as i32;
884 EmitCopyLen(matched,
885 cmd_depth,
886 cmd_bits,
887 &mut cmd_histo[..],
888 storage_ix,
889 storage);
890 EmitDistance(last_distance as usize,
891 cmd_depth,
892 cmd_bits,
893 &mut cmd_histo[..],
894 storage_ix,
895 storage);
896 next_emit = ip_index;
897 if ip_index >= ip_limit {
898 code_block_selection = CodeBlockState::EMIT_REMAINDER;
899 break;
900 }
901 {
902 assert!(ip_index >= 3);
903 let input_bytes: u64 = BROTLI_UNALIGNED_LOAD64(&input_ptr[ip_index as usize -
904 3..]);
905 let mut prev_hash: u32 = HashBytesAtOffset(input_bytes, 0i32, shift);
906 let cur_hash: u32 = HashBytesAtOffset(input_bytes, 3i32, shift);
907 table[prev_hash as usize] = ip_index.wrapping_sub(base_ip)
908 .wrapping_sub(3usize) as i32;
909 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift);
910 table[prev_hash as usize] = ip_index.wrapping_sub(base_ip)
911 .wrapping_sub(2usize) as i32;
912 prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift);
913 table[prev_hash as usize] = ip_index.wrapping_sub(base_ip)
914 .wrapping_sub(1usize) as i32;
915 candidate = base_ip.wrapping_add(table[cur_hash as usize] as usize);
916 table[cur_hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
917 }
918 }
919 if code_block_selection as i32 == CodeBlockState::EMIT_REMAINDER as i32 {
920 break;
921 }
922 if code_block_selection as i32 == CodeBlockState::EMIT_COMMANDS as i32 {
923 next_hash = Hash(&input_ptr[{
924 ip_index = ip_index.wrapping_add(1usize);
925 ip_index
926 }..],
927 shift);
928 }
929 }
930 }
931 code_block_selection = CodeBlockState::EMIT_REMAINDER;
932 continue;
933 } else if code_block_selection as i32 == CodeBlockState::EMIT_REMAINDER as i32 {
934 input_index = input_index.wrapping_add(block_size);
935 input_size = input_size.wrapping_sub(block_size);
936 block_size = brotli_min_size_t(input_size, kMergeBlockSize);
937 if input_size > 0 &&
938 (total_block_size.wrapping_add(block_size) <= (1i32 << 20i32) as usize) &&
939 (ShouldMergeBlock(&input_ptr[input_index..],
940 block_size,
941 &mut lit_depth[..]) != 0) {
942 total_block_size = total_block_size.wrapping_add(block_size);
943 UpdateBits(20usize,
944 total_block_size.wrapping_sub(1usize) as (u32),
945 mlen_storage_ix,
946 storage);
947 code_block_selection = CodeBlockState::EMIT_COMMANDS;
948 continue;
949 }
950 if next_emit < ip_end {
951 let insert: usize = ip_end.wrapping_sub(next_emit);
952 if insert < 6210 {
953 EmitInsertLen(insert,
954 cmd_depth,
955 cmd_bits,
956 &mut cmd_histo[..],
957 storage_ix,
958 storage);
959 EmitLiterals(&input_ptr[next_emit..],
960 insert,
961 &mut lit_depth[..],
962 &mut lit_bits[..],
963 storage_ix,
964 storage);
965 } else if ShouldUseUncompressedMode(next_emit as isize - metablock_start as isize,
966 insert,
967 literal_ratio) != 0 {
968 EmitUncompressedMetaBlock(&input_ptr[metablock_start..],
969 ip_end.wrapping_sub(metablock_start),
970 mlen_storage_ix.wrapping_sub(3usize),
971 storage_ix,
972 storage);
973 } else {
974 EmitLongInsertLen(insert,
975 cmd_depth,
976 cmd_bits,
977 &mut cmd_histo[..],
978 storage_ix,
979 storage);
980 EmitLiterals(&input_ptr[next_emit..],
981 insert,
982 &mut lit_depth[..],
983 &mut lit_bits[..],
984 storage_ix,
985 storage);
986 }
987 }
988 next_emit = ip_end;
989 code_block_selection = CodeBlockState::NEXT_BLOCK;
990 } else if code_block_selection as i32 == CodeBlockState::NEXT_BLOCK as i32 {
991 if input_size > 0 {
992 metablock_start = input_index;
993 block_size = brotli_min_size_t(input_size, kFirstBlockSize);
994 total_block_size = block_size;
995 mlen_storage_ix = (*storage_ix).wrapping_add(3usize);
996 BrotliStoreMetaBlockHeader(block_size, 0i32, storage_ix, storage);
997 BrotliWriteBits(13usize, 0, storage_ix, storage);
998 literal_ratio = BuildAndStoreLiteralPrefixCode(m,
999 &input_ptr[(input_index as (usize))..],
1000 block_size,
1001 &mut lit_depth[..],
1002 &mut lit_bits[..],
1003 storage_ix,
1004 storage);
1005 BuildAndStoreCommandPrefixCode(&mut cmd_histo[..],
1006 cmd_depth,
1007 cmd_bits,
1008 storage_ix,
1009 storage);
1010 code_block_selection = CodeBlockState::EMIT_COMMANDS;
1011 continue;
1012 }
1013 break;
1014 }
1015 }
1016 if is_last == 0 {
1017 cmd_code[0] = 0;
1018 *cmd_code_numbits = 0;
1019 BuildAndStoreCommandPrefixCode(&mut cmd_histo[..],
1020 cmd_depth,
1021 cmd_bits,
1022 cmd_code_numbits,
1023 cmd_code);
1024 }
1025 }
1026
1027 macro_rules! compress_specialization {
1028 ($table_bits : expr, $fname: ident) => {
1029 fn $fname<AllocHT:alloc::Allocator<HuffmanTree>>(mht: &mut AllocHT,
1030 input: &[u8],
1031 input_size: usize,
1032 is_last: i32,
1033 table: &mut [i32],
1034 cmd_depth: &mut [u8],
1035 cmd_bits: &mut [u16],
1036 cmd_code_numbits: &mut usize,
1037 cmd_code: &mut [u8],
1038 storage_ix: &mut usize,
1039 storage: &mut [u8]) {
1040 BrotliCompressFragmentFastImpl(mht,
1041 input,
1042 input_size,
1043 is_last,
1044 table,
1045 $table_bits,
1046 cmd_depth,
1047 cmd_bits,
1048 cmd_code_numbits,
1049 cmd_code,
1050 storage_ix,
1051 storage);
1052 }
1053 };
1054 }
1055
1056 compress_specialization!(9, BrotliCompressFragmentFastImpl9);
1057 compress_specialization!(11, BrotliCompressFragmentFastImpl11);
1058 compress_specialization!(13, BrotliCompressFragmentFastImpl13);
1059 compress_specialization!(15, BrotliCompressFragmentFastImpl15);
1060
1061
BrotliCompressFragmentFast<AllocHT:alloc::Allocator<HuffmanTree>>(m: &mut AllocHT, input: &[u8], input_size: usize, is_last: i32, table: &mut [i32], table_size: usize, cmd_depth: &mut [u8], cmd_bits: &mut [u16], cmd_code_numbits: &mut usize, cmd_code: &mut [u8], storage_ix: &mut usize, storage: &mut [u8])1062 pub fn BrotliCompressFragmentFast<AllocHT:alloc::Allocator<HuffmanTree>>(m: &mut AllocHT,
1063 input: &[u8],
1064 input_size: usize,
1065 is_last: i32,
1066 table: &mut [i32],
1067 table_size: usize,
1068 cmd_depth: &mut [u8],
1069 cmd_bits: &mut [u16],
1070 cmd_code_numbits: &mut usize,
1071 cmd_code: &mut [u8],
1072 storage_ix: &mut usize,
1073 storage: &mut [u8]){
1074 let initial_storage_ix: usize = *storage_ix;
1075 let table_bits: usize = Log2FloorNonZero(table_size as u64) as (usize);
1076 if input_size == 0usize {
1077 0i32;
1078 BrotliWriteBits(1usize, 1, storage_ix, storage);
1079 BrotliWriteBits(1usize, 1, storage_ix, storage);
1080 *storage_ix = (*storage_ix).wrapping_add(7u32 as (usize)) & !7u32 as (usize);
1081 return;
1082 }
1083 if table_bits == 9usize {
1084 BrotliCompressFragmentFastImpl9(m,
1085 input,
1086 input_size,
1087 is_last,
1088 table,
1089 cmd_depth,
1090 cmd_bits,
1091 cmd_code_numbits,
1092 cmd_code,
1093 storage_ix,
1094 storage);
1095 }
1096 if table_bits == 11usize {
1097 BrotliCompressFragmentFastImpl11(m,
1098 input,
1099 input_size,
1100 is_last,
1101 table,
1102 cmd_depth,
1103 cmd_bits,
1104 cmd_code_numbits,
1105 cmd_code,
1106 storage_ix,
1107 storage);
1108 }
1109 if table_bits == 13usize {
1110 BrotliCompressFragmentFastImpl13(m,
1111 input,
1112 input_size,
1113 is_last,
1114 table,
1115 cmd_depth,
1116 cmd_bits,
1117 cmd_code_numbits,
1118 cmd_code,
1119 storage_ix,
1120 storage);
1121 }
1122 if table_bits == 15usize {
1123 BrotliCompressFragmentFastImpl15(m,
1124 input,
1125 input_size,
1126 is_last,
1127 table,
1128 cmd_depth,
1129 cmd_bits,
1130 cmd_code_numbits,
1131 cmd_code,
1132 storage_ix,
1133 storage);
1134 }
1135 if (*storage_ix).wrapping_sub(initial_storage_ix) > (31usize).wrapping_add(input_size << 3i32) {
1136 EmitUncompressedMetaBlock(input, input_size, initial_storage_ix, storage_ix, storage);
1137 }
1138 if is_last != 0 {
1139 BrotliWriteBits(1usize, 1, storage_ix, storage);
1140 BrotliWriteBits(1usize, 1, storage_ix, storage);
1141 *storage_ix = (*storage_ix).wrapping_add(7u32 as (usize)) & !7u32 as (usize);
1142 }
1143 }
1144