1 extern {
BrotliAllocate( m : *mut MemoryManager, n : usize ) -> *mut std::os::raw::c_void2 fn BrotliAllocate(
3 m : *mut MemoryManager, n : usize
4 ) -> *mut std::os::raw::c_void;
BrotliEstimateBitCostsForLiterals( pos : usize, len : usize, mask : usize, data : *const u8, cost : *mut f32 )5 fn BrotliEstimateBitCostsForLiterals(
6 pos : usize,
7 len : usize,
8 mask : usize,
9 data : *const u8,
10 cost : *mut f32
11 );
BrotliFindAllStaticDictionaryMatches( dictionary : *const BrotliEncoderDictionary, data : *const u8, min_length : usize, max_length : usize, matches : *mut u32 ) -> i3212 fn BrotliFindAllStaticDictionaryMatches(
13 dictionary : *const BrotliEncoderDictionary,
14 data : *const u8,
15 min_length : usize,
16 max_length : usize,
17 matches : *mut u32
18 ) -> i32;
BrotliFree( m : *mut MemoryManager, p : *mut std::os::raw::c_void )19 fn BrotliFree(
20 m : *mut MemoryManager, p : *mut std::os::raw::c_void
21 );
log2(__x : f64) -> f6422 fn log2(__x : f64) -> f64;
memcpy( __dest : *mut std::os::raw::c_void, __src : *const std::os::raw::c_void, __n : usize ) -> *mut std::os::raw::c_void23 fn memcpy(
24 __dest : *mut std::os::raw::c_void,
25 __src : *const std::os::raw::c_void,
26 __n : usize
27 ) -> *mut std::os::raw::c_void;
memset( __s : *mut std::os::raw::c_void, __c : i32, __n : usize ) -> *mut std::os::raw::c_void28 fn memset(
29 __s : *mut std::os::raw::c_void, __c : i32, __n : usize
30 ) -> *mut std::os::raw::c_void;
31 }
32
33 static mut kLog2Table
34 : *const f32
35 = 0.0000000000000000f32 as (*const f32);
36
37 static kDictNumBits : i32 = 15i32;
38
39 static kDictHashMul32 : u32 = 0x1e35a7bdi32 as (u32);
40
41 static mut kStaticDictionaryBuckets
42 : *const u16
43 = 1i32 as (*const u16);
44
45 #[derive(Clone, Copy)]
46 #[repr(C)]
47 pub struct DictWord {
48 pub len : u8,
49 pub transform : u8,
50 pub idx : u16,
51 }
52
53 static mut kStaticDictionaryWords
54 : *const DictWord
55 = 0i32 as (*const DictWord);
56
57 static mut kInsBase : *mut u32 = 0i32 as (*mut u32);
58
59 static mut kInsExtra : *mut u32 = 0i32 as (*mut u32);
60
61 static mut kCopyBase : *mut u32 = 2i32 as (*mut u32);
62
63 static mut kCopyExtra : *mut u32 = 0i32 as (*mut u32);
64
65 #[no_mangle]
ctzll(mut x : usize) -> usize66 pub unsafe extern fn ctzll(mut x : usize) -> usize {
67 let mut count : u8 = 0i32 as (u8);
68 while x & 0i32 as (usize) != 0 {
69 count = (count as (i32) + 1i32) as (u8);
70 x = x >> 1i32;
71 }
72 count as (usize)
73 }
74
75 static kInvalidMatch : u32 = 0xfffffffi32 as (u32);
76
77 static kCutoffTransformsCount : u32 = 10i32 as (u32);
78
79 static kCutoffTransforms
80 : usize
81 = 0x71b520ai32 as (usize) << 32i32 | 0xda2d3200u32 as (usize);
82
83 static kHashMul32 : u32 = 0x1e35a7bdi32 as (u32);
84
85 static kHashMul64
86 : usize
87 = 0x1e35a7bdi32 as (usize) << 32i32 | 0x1e35a7bdi32 as (usize);
88
89 static kHashMul64Long
90 : usize
91 = 0x1fe35a7bu32 as (usize) << 32i32 | 0xd3579bd3u32 as (usize);
92
93 static kInfinity : f32 = 1.7e38f32;
94
95 static mut kDistanceCacheIndex : *const u32 = 0i32 as (*const u32);
96
97 static mut kDistanceCacheOffset
98 : *const i32
99 = 0i32 as (*const i32);
100
101 #[derive(Clone, Copy)]
102 #[repr(C)]
103 pub struct Struct1 {
104 pub cost : f32,
105 pub next : u32,
106 pub shortcut : u32,
107 }
108
109 #[derive(Clone, Copy)]
110 #[repr(C)]
111 pub struct ZopfliNode {
112 pub length : u32,
113 pub distance : u32,
114 pub dcode_insert_length : u32,
115 pub u : Struct1,
116 }
117
118 #[no_mangle]
BrotliInitZopfliNodes( mut array : *mut ZopfliNode, mut length : usize )119 pub unsafe extern fn BrotliInitZopfliNodes(
120 mut array : *mut ZopfliNode, mut length : usize
121 ) {
122 let mut stub : ZopfliNode;
123 let mut i : usize;
124 stub.length = 1i32 as (u32);
125 stub.distance = 0i32 as (u32);
126 stub.dcode_insert_length = 0i32 as (u32);
127 stub.u.cost = kInfinity;
128 i = 0i32 as (usize);
129 while i < length {
130 *array.offset(i as (isize)) = stub;
131 i = i.wrapping_add(1 as (usize));
132 }
133 }
134
135 #[derive(Clone, Copy)]
136 #[repr(i32)]
137 pub enum BrotliEncoderMode {
138 BROTLI_MODE_GENERIC = 0i32,
139 BROTLI_MODE_TEXT = 1i32,
140 BROTLI_MODE_FONT = 2i32,
141 }
142
143 #[derive(Clone, Copy)]
144 #[repr(C)]
145 pub struct BrotliHasherParams {
146 pub type_ : i32,
147 pub bucket_bits : i32,
148 pub block_bits : i32,
149 pub hash_len : i32,
150 pub num_last_distances_to_check : i32,
151 }
152
153 #[derive(Clone, Copy)]
154 #[repr(C)]
155 pub struct BrotliDistanceParams {
156 pub distance_postfix_bits : u32,
157 pub num_direct_distance_codes : u32,
158 pub alphabet_size : u32,
159 pub max_distance : usize,
160 }
161
162 #[derive(Clone, Copy)]
163 #[repr(C)]
164 pub struct BrotliDictionary {
165 pub size_bits_by_length : *mut u8,
166 pub offsets_by_length : *mut u32,
167 pub data_size : usize,
168 pub data : *const u8,
169 }
170
171 #[derive(Clone, Copy)]
172 #[repr(C)]
173 pub struct BrotliEncoderDictionary {
174 pub words : *const BrotliDictionary,
175 pub cutoffTransformsCount : u32,
176 pub cutoffTransforms : usize,
177 pub hash_table : *const u16,
178 pub buckets : *const u16,
179 pub dict_words : *const DictWord,
180 }
181
182 #[derive(Clone, Copy)]
183 #[repr(C)]
184 pub struct BrotliEncoderParams {
185 pub mode : BrotliEncoderMode,
186 pub quality : i32,
187 pub lgwin : i32,
188 pub lgblock : i32,
189 pub size_hint : usize,
190 pub disable_literal_context_modeling : i32,
191 pub large_window : i32,
192 pub hasher : BrotliHasherParams,
193 pub dist : BrotliDistanceParams,
194 pub dictionary : BrotliEncoderDictionary,
195 }
196
197 #[derive(Clone, Copy)]
198 #[repr(C)]
199 pub struct Command {
200 pub insert_len_ : u32,
201 pub copy_len_ : u32,
202 pub dist_extra_ : u32,
203 pub cmd_prefix_ : u16,
204 pub dist_prefix_ : u16,
205 }
206
ZopfliNodeCopyLength( mut self : *const ZopfliNode ) -> u32207 unsafe extern fn ZopfliNodeCopyLength(
208 mut self : *const ZopfliNode
209 ) -> u32 {
210 (*self).length & 0x1ffffffi32 as (u32)
211 }
212
ZopfliNodeCopyDistance( mut self : *const ZopfliNode ) -> u32213 unsafe extern fn ZopfliNodeCopyDistance(
214 mut self : *const ZopfliNode
215 ) -> u32 {
216 (*self).distance
217 }
218
ZopfliNodeLengthCode( mut self : *const ZopfliNode ) -> u32219 unsafe extern fn ZopfliNodeLengthCode(
220 mut self : *const ZopfliNode
221 ) -> u32 {
222 let modifier : u32 = (*self).length >> 25i32;
223 ZopfliNodeCopyLength(self).wrapping_add(9u32).wrapping_sub(
224 modifier
225 )
226 }
227
brotli_min_size_t( mut a : usize, mut b : usize ) -> usize228 unsafe extern fn brotli_min_size_t(
229 mut a : usize, mut b : usize
230 ) -> usize {
231 if a < b { a } else { b }
232 }
233
ZopfliNodeDistanceCode( mut self : *const ZopfliNode ) -> u32234 unsafe extern fn ZopfliNodeDistanceCode(
235 mut self : *const ZopfliNode
236 ) -> u32 {
237 let short_code : u32 = (*self).dcode_insert_length >> 27i32;
238 if short_code == 0i32 as (u32) {
239 ZopfliNodeCopyDistance(self).wrapping_add(
240 16i32 as (u32)
241 ).wrapping_sub(
242 1i32 as (u32)
243 )
244 } else {
245 short_code.wrapping_sub(1i32 as (u32))
246 }
247 }
248
Log2FloorNonZero(mut n : usize) -> u32249 unsafe extern fn Log2FloorNonZero(mut n : usize) -> u32 {
250 let mut result : u32 = 0i32 as (u32);
251 while {
252 n = n >> 1i32;
253 n
254 } != 0 {
255 result = result.wrapping_add(1 as (u32));
256 }
257 result
258 }
259
PrefixEncodeCopyDistance( mut distance_code : usize, mut num_direct_codes : usize, mut postfix_bits : usize, mut code : *mut u16, mut extra_bits : *mut u32 )260 unsafe extern fn PrefixEncodeCopyDistance(
261 mut distance_code : usize,
262 mut num_direct_codes : usize,
263 mut postfix_bits : usize,
264 mut code : *mut u16,
265 mut extra_bits : *mut u32
266 ) { if distance_code < (16i32 as (usize)).wrapping_add(
267 num_direct_codes
268 ) {
269 *code = distance_code as (u16);
270 *extra_bits = 0i32 as (u32);
271 } else {
272 let mut dist
273 : usize
274 = (1i32 as (usize) << postfix_bits.wrapping_add(
275 2u32 as (usize)
276 )).wrapping_add(
277 distance_code.wrapping_sub(16i32 as (usize)).wrapping_sub(
278 num_direct_codes
279 )
280 );
281 let mut bucket
282 : usize
283 = Log2FloorNonZero(dist).wrapping_sub(1i32 as (u32)) as (usize);
284 let mut postfix_mask
285 : usize
286 = (1u32 << postfix_bits).wrapping_sub(1i32 as (u32)) as (usize);
287 let mut postfix : usize = dist & postfix_mask;
288 let mut prefix : usize = dist >> bucket & 1i32 as (usize);
289 let mut offset
290 : usize
291 = (2i32 as (usize)).wrapping_add(prefix) << bucket;
292 let mut nbits : usize = bucket.wrapping_sub(postfix_bits);
293 *code = (nbits << 10i32 | (16i32 as (usize)).wrapping_add(
294 num_direct_codes
295 ).wrapping_add(
296 (2i32 as (usize)).wrapping_mul(
297 nbits.wrapping_sub(1i32 as (usize))
298 ).wrapping_add(
299 prefix
300 ) << postfix_bits
301 ).wrapping_add(
302 postfix
303 )) as (u16);
304 *extra_bits = (dist.wrapping_sub(offset) >> postfix_bits) as (u32);
305 }
306 }
307
GetInsertLengthCode( mut insertlen : usize ) -> u16308 unsafe extern fn GetInsertLengthCode(
309 mut insertlen : usize
310 ) -> u16 {
311 if insertlen < 6i32 as (usize) {
312 insertlen as (u16)
313 } else if insertlen < 130i32 as (usize) {
314 let mut nbits
315 : u32
316 = Log2FloorNonZero(
317 insertlen.wrapping_sub(2i32 as (usize))
318 ).wrapping_sub(
319 1u32
320 );
321 ((nbits << 1i32) as (usize)).wrapping_add(
322 insertlen.wrapping_sub(2i32 as (usize)) >> nbits
323 ).wrapping_add(
324 2i32 as (usize)
325 ) as (u16)
326 } else if insertlen < 2114i32 as (usize) {
327 Log2FloorNonZero(
328 insertlen.wrapping_sub(66i32 as (usize))
329 ).wrapping_add(
330 10i32 as (u32)
331 ) as (u16)
332 } else if insertlen < 6210i32 as (usize) {
333 21u32 as (u16)
334 } else if insertlen < 22594i32 as (usize) {
335 22u32 as (u16)
336 } else {
337 23u32 as (u16)
338 }
339 }
340
GetCopyLengthCode(mut copylen : usize) -> u16341 unsafe extern fn GetCopyLengthCode(mut copylen : usize) -> u16 {
342 if copylen < 10i32 as (usize) {
343 copylen.wrapping_sub(2i32 as (usize)) as (u16)
344 } else if copylen < 134i32 as (usize) {
345 let mut nbits
346 : u32
347 = Log2FloorNonZero(
348 copylen.wrapping_sub(6i32 as (usize))
349 ).wrapping_sub(
350 1u32
351 );
352 ((nbits << 1i32) as (usize)).wrapping_add(
353 copylen.wrapping_sub(6i32 as (usize)) >> nbits
354 ).wrapping_add(
355 4i32 as (usize)
356 ) as (u16)
357 } else if copylen < 2118i32 as (usize) {
358 Log2FloorNonZero(
359 copylen.wrapping_sub(70i32 as (usize))
360 ).wrapping_add(
361 12i32 as (u32)
362 ) as (u16)
363 } else {
364 23u32 as (u16)
365 }
366 }
367
CombineLengthCodes( mut inscode : u16, mut copycode : u16, mut use_last_distance : i32 ) -> u16368 unsafe extern fn CombineLengthCodes(
369 mut inscode : u16, mut copycode : u16, mut use_last_distance : i32
370 ) -> u16 {
371 let mut bits64
372 : u16
373 = (copycode as (u32) & 0x7u32 | (inscode as (u32) & 0x7u32) << 3i32) as (u16);
374 if use_last_distance != 0 && (inscode as (i32) < 8i32) && (copycode as (i32) < 16i32) {
375 if copycode as (i32) < 8i32 {
376 bits64 as (i32)
377 } else {
378 bits64 as (i32) | 64i32
379 } as (u16)
380 } else {
381 let mut offset
382 : i32
383 = 2i32 * ((copycode as (i32) >> 3i32) + 3i32 * (inscode as (i32) >> 3i32));
384 offset = (offset << 5i32) + 0x40i32 + (0x520d40i32 >> offset & 0xc0i32);
385 (offset as (u16) as (i32) | bits64 as (i32)) as (u16)
386 }
387 }
388
GetLengthCode( mut insertlen : usize, mut copylen : usize, mut use_last_distance : i32, mut code : *mut u16 )389 unsafe extern fn GetLengthCode(
390 mut insertlen : usize,
391 mut copylen : usize,
392 mut use_last_distance : i32,
393 mut code : *mut u16
394 ) {
395 let mut inscode : u16 = GetInsertLengthCode(insertlen);
396 let mut copycode : u16 = GetCopyLengthCode(copylen);
397 *code = CombineLengthCodes(inscode,copycode,use_last_distance);
398 }
399
InitCommand( mut self : *mut Command, mut dist : *const BrotliDistanceParams, mut insertlen : usize, mut copylen : usize, mut copylen_code_delta : i32, mut distance_code : usize )400 unsafe extern fn InitCommand(
401 mut self : *mut Command,
402 mut dist : *const BrotliDistanceParams,
403 mut insertlen : usize,
404 mut copylen : usize,
405 mut copylen_code_delta : i32,
406 mut distance_code : usize
407 ) {
408 let mut delta : u32 = copylen_code_delta as (i8) as (u8) as (u32);
409 (*self).insert_len_ = insertlen as (u32);
410 (*self).copy_len_ = (copylen | (delta << 25i32) as (usize)) as (u32);
411 PrefixEncodeCopyDistance(
412 distance_code,
413 (*dist).num_direct_distance_codes as (usize),
414 (*dist).distance_postfix_bits as (usize),
415 &mut (*self).dist_prefix_ as (*mut u16),
416 &mut (*self).dist_extra_ as (*mut u32)
417 );
418 GetLengthCode(
419 insertlen,
420 (copylen as (i32) + copylen_code_delta) as (usize),
421 if !!((*self).dist_prefix_ as (i32) & 0x3ffi32 == 0i32) {
422 1i32
423 } else {
424 0i32
425 },
426 &mut (*self).cmd_prefix_ as (*mut u16)
427 );
428 }
429
430 #[no_mangle]
BrotliZopfliCreateCommands( num_bytes : usize, block_start : usize, max_backward_limit : usize, mut nodes : *const ZopfliNode, mut dist_cache : *mut i32, mut last_insert_len : *mut usize, mut params : *const BrotliEncoderParams, mut commands : *mut Command, mut num_literals : *mut usize )431 pub unsafe extern fn BrotliZopfliCreateCommands(
432 num_bytes : usize,
433 block_start : usize,
434 max_backward_limit : usize,
435 mut nodes : *const ZopfliNode,
436 mut dist_cache : *mut i32,
437 mut last_insert_len : *mut usize,
438 mut params : *const BrotliEncoderParams,
439 mut commands : *mut Command,
440 mut num_literals : *mut usize
441 ) {
442 let mut pos : usize = 0i32 as (usize);
443 let mut offset : u32 = (*nodes.offset(0i32 as (isize))).u.next;
444 let mut i : usize;
445 let mut gap : usize = 0i32 as (usize);
446 i = 0i32 as (usize);
447 while offset != !(0i32 as (u32)) {
448 {
449 let mut next
450 : *const ZopfliNode
451 = &*nodes.offset(
452 pos.wrapping_add(offset as (usize)) as (isize)
453 ) as (*const ZopfliNode);
454 let mut copy_length
455 : usize
456 = ZopfliNodeCopyLength(next) as (usize);
457 let mut insert_length
458 : usize
459 = ((*next).dcode_insert_length & 0x7ffffffi32 as (u32)) as (usize);
460 pos = pos.wrapping_add(insert_length);
461 offset = (*next).u.next;
462 if i == 0i32 as (usize) {
463 insert_length = insert_length.wrapping_add(*last_insert_len);
464 *last_insert_len = 0i32 as (usize);
465 }
466 {
467 let mut distance : usize = ZopfliNodeCopyDistance(next) as (usize);
468 let mut len_code : usize = ZopfliNodeLengthCode(next) as (usize);
469 let mut max_distance
470 : usize
471 = brotli_min_size_t(
472 block_start.wrapping_add(pos),
473 max_backward_limit
474 );
475 let mut is_dictionary
476 : i32
477 = if !!(distance > max_distance.wrapping_add(gap)) {
478 1i32
479 } else {
480 0i32
481 };
482 let mut dist_code
483 : usize
484 = ZopfliNodeDistanceCode(next) as (usize);
485 InitCommand(
486 &mut *commands.offset(i as (isize)) as (*mut Command),
487 &(*params).dist as (*const BrotliDistanceParams),
488 insert_length,
489 copy_length,
490 len_code as (i32) - copy_length as (i32),
491 dist_code
492 );
493 if is_dictionary == 0 && (dist_code > 0i32 as (usize)) {
494 *dist_cache.offset(3i32 as (isize)) = *dist_cache.offset(
495 2i32 as (isize)
496 );
497 *dist_cache.offset(2i32 as (isize)) = *dist_cache.offset(
498 1i32 as (isize)
499 );
500 *dist_cache.offset(1i32 as (isize)) = *dist_cache.offset(
501 0i32 as (isize)
502 );
503 *dist_cache.offset(0i32 as (isize)) = distance as (i32);
504 }
505 }
506 *num_literals = (*num_literals).wrapping_add(insert_length);
507 pos = pos.wrapping_add(copy_length);
508 }
509 i = i.wrapping_add(1 as (usize));
510 }
511 *last_insert_len = (*last_insert_len).wrapping_add(
512 num_bytes.wrapping_sub(pos)
513 );
514 }
515
516 #[derive(Clone, Copy)]
517 #[repr(C)]
518 pub struct MemoryManager {
519 pub alloc_func : unsafe extern fn(*mut std::os::raw::c_void, usize) -> *mut std::os::raw::c_void,
520 pub free_func : unsafe extern fn(*mut std::os::raw::c_void, *mut std::os::raw::c_void),
521 pub opaque : *mut std::os::raw::c_void,
522 }
523
MaxZopfliLen( mut params : *const BrotliEncoderParams ) -> usize524 unsafe extern fn MaxZopfliLen(
525 mut params : *const BrotliEncoderParams
526 ) -> usize {
527 (if (*params).quality <= 10i32 {
528 150i32
529 } else {
530 325i32
531 }) as (usize)
532 }
533
534 #[derive(Clone, Copy)]
535 #[repr(C)]
536 pub struct ZopfliCostModel {
537 pub cost_cmd_ : *mut f32,
538 pub cost_dist_ : *mut f32,
539 pub distance_histogram_size : u32,
540 pub literal_costs_ : *mut f32,
541 pub min_cost_cmd_ : f32,
542 pub num_bytes_ : usize,
543 }
544
545 #[derive(Clone, Copy)]
546 #[repr(C)]
547 pub struct PosData {
548 pub pos : usize,
549 pub distance_cache : *mut i32,
550 pub costdiff : f32,
551 pub cost : f32,
552 }
553
554 #[derive(Clone, Copy)]
555 #[repr(C)]
556 pub struct StartPosQueue {
557 pub q_ : *mut PosData,
558 pub idx_ : usize,
559 }
560
561 #[derive(Clone, Copy)]
562 #[repr(C)]
563 pub struct BackwardMatch {
564 pub distance : u32,
565 pub length_and_code : u32,
566 }
567
StoreLookaheadH10() -> usize568 unsafe extern fn StoreLookaheadH10() -> usize { 128i32 as (usize) }
569
InitZopfliCostModel( mut m : *mut MemoryManager, mut self : *mut ZopfliCostModel, mut dist : *const BrotliDistanceParams, mut num_bytes : usize )570 unsafe extern fn InitZopfliCostModel(
571 mut m : *mut MemoryManager,
572 mut self : *mut ZopfliCostModel,
573 mut dist : *const BrotliDistanceParams,
574 mut num_bytes : usize
575 ) {
576 let mut distance_histogram_size : u32 = (*dist).alphabet_size;
577 if distance_histogram_size > 544i32 as (u32) {
578 distance_histogram_size = 544i32 as (u32);
579 }
580 (*self).num_bytes_ = num_bytes;
581 (*self).literal_costs_ = if num_bytes.wrapping_add(
582 2i32 as (usize)
583 ) > 0i32 as (usize) {
584 BrotliAllocate(
585 m,
586 num_bytes.wrapping_add(2i32 as (usize)).wrapping_mul(
587 std::mem::size_of::<f32>()
588 )
589 ) as (*mut f32)
590 } else {
591 0i32 as (*mut std::os::raw::c_void) as (*mut f32)
592 };
593 (*self).cost_dist_ = if (*dist).alphabet_size > 0i32 as (u32) {
594 BrotliAllocate(
595 m,
596 ((*dist).alphabet_size as (usize)).wrapping_mul(
597 std::mem::size_of::<f32>()
598 )
599 ) as (*mut f32)
600 } else {
601 0i32 as (*mut std::os::raw::c_void) as (*mut f32)
602 };
603 (*self).distance_histogram_size = distance_histogram_size;
604 if !(0i32 == 0) { }
605 }
606
FastLog2(mut v : usize) -> f64607 unsafe extern fn FastLog2(mut v : usize) -> f64 {
608 if v < std::mem::size_of::<*const f32>().wrapping_div(
609 std::mem::size_of::<f32>()
610 ) {
611 return *kLog2Table.offset(v as (isize)) as (f64);
612 }
613 log2(v as (f64))
614 }
615
ZopfliCostModelSetFromLiteralCosts( mut self : *mut ZopfliCostModel, mut position : usize, mut ringbuffer : *const u8, mut ringbuffer_mask : usize )616 unsafe extern fn ZopfliCostModelSetFromLiteralCosts(
617 mut self : *mut ZopfliCostModel,
618 mut position : usize,
619 mut ringbuffer : *const u8,
620 mut ringbuffer_mask : usize
621 ) {
622 let mut literal_costs : *mut f32 = (*self).literal_costs_;
623 let mut literal_carry : f32 = 0.0f64 as (f32);
624 let mut cost_dist : *mut f32 = (*self).cost_dist_;
625 let mut cost_cmd : *mut f32 = (*self).cost_cmd_;
626 let mut num_bytes : usize = (*self).num_bytes_;
627 let mut i : usize;
628 BrotliEstimateBitCostsForLiterals(
629 position,
630 num_bytes,
631 ringbuffer_mask,
632 ringbuffer,
633 &mut *literal_costs.offset(1i32 as (isize)) as (*mut f32)
634 );
635 *literal_costs.offset(0i32 as (isize)) = 0.0f64 as (f32);
636 i = 0i32 as (usize);
637 while i < num_bytes {
638 {
639 literal_carry = literal_carry + *literal_costs.offset(
640 i.wrapping_add(1i32 as (usize)) as (isize)
641 );
642 *literal_costs.offset(
643 i.wrapping_add(1i32 as (usize)) as (isize)
644 ) = *literal_costs.offset(i as (isize)) + literal_carry;
645 literal_carry = literal_carry - (*literal_costs.offset(
646 i.wrapping_add(1i32 as (usize)) as (isize)
647 ) - *literal_costs.offset(i as (isize)));
648 }
649 i = i.wrapping_add(1 as (usize));
650 }
651 i = 0i32 as (usize);
652 while i < 704i32 as (usize) {
653 {
654 *cost_cmd.offset(i as (isize)) = FastLog2(
655 (11i32 as (u32)).wrapping_add(
656 i as (u32)
657 ) as (usize)
658 ) as (f32);
659 }
660 i = i.wrapping_add(1 as (usize));
661 }
662 i = 0i32 as (usize);
663 while i < (*self).distance_histogram_size as (usize) {
664 {
665 *cost_dist.offset(i as (isize)) = FastLog2(
666 (20i32 as (u32)).wrapping_add(
667 i as (u32)
668 ) as (usize)
669 ) as (f32);
670 }
671 i = i.wrapping_add(1 as (usize));
672 }
673 (*self).min_cost_cmd_ = FastLog2(11i32 as (usize)) as (f32);
674 }
675
InitStartPosQueue(mut self : *mut StartPosQueue)676 unsafe extern fn InitStartPosQueue(mut self : *mut StartPosQueue) {
677 (*self).idx_ = 0i32 as (usize);
678 }
679
BrotliUnalignedRead64( mut p : *const std::os::raw::c_void ) -> usize680 unsafe extern fn BrotliUnalignedRead64(
681 mut p : *const std::os::raw::c_void
682 ) -> usize {
683 *(p as (*const usize))
684 }
685
FindMatchLengthWithLimit( mut s1 : *const u8, mut s2 : *const u8, mut limit : usize ) -> usize686 unsafe extern fn FindMatchLengthWithLimit(
687 mut s1 : *const u8, mut s2 : *const u8, mut limit : usize
688 ) -> usize {
689 let mut matched : usize = 0i32 as (usize);
690 let mut limit2
691 : usize
692 = (limit >> 3i32).wrapping_add(1i32 as (usize));
693 while {
694 limit2 = limit2.wrapping_sub(1 as (usize));
695 limit2
696 } != 0 {
697 if BrotliUnalignedRead64(
698 s2 as (*const std::os::raw::c_void)
699 ) == BrotliUnalignedRead64(
700 s1.offset(matched as (isize)) as (*const std::os::raw::c_void)
701 ) {
702 s2 = s2.offset(8i32 as (isize));
703 matched = matched.wrapping_add(8i32 as (usize));
704 } else {
705 let mut x
706 : usize
707 = BrotliUnalignedRead64(
708 s2 as (*const std::os::raw::c_void)
709 ) ^ BrotliUnalignedRead64(
710 s1.offset(matched as (isize)) as (*const std::os::raw::c_void)
711 );
712 let mut matching_bits : usize = ctzll(x) as (usize);
713 matched = matched.wrapping_add(matching_bits >> 3i32);
714 return matched;
715 }
716 }
717 limit = (limit & 7i32 as (usize)).wrapping_add(1i32 as (usize));
718 while {
719 limit = limit.wrapping_sub(1 as (usize));
720 limit
721 } != 0 {
722 if *s1.offset(matched as (isize)) as (i32) == *s2 as (i32) {
723 s2 = s2.offset(1 as (isize));
724 matched = matched.wrapping_add(1 as (usize));
725 } else {
726 return matched;
727 }
728 }
729 matched
730 }
731
InitBackwardMatch( mut self : *mut BackwardMatch, mut dist : usize, mut len : usize )732 unsafe extern fn InitBackwardMatch(
733 mut self : *mut BackwardMatch, mut dist : usize, mut len : usize
734 ) {
735 (*self).distance = dist as (u32);
736 (*self).length_and_code = (len << 5i32) as (u32);
737 }
738
739 #[derive(Clone, Copy)]
740 #[repr(C)]
741 pub struct H10 {
742 pub window_mask_ : usize,
743 pub buckets_ : *mut u32,
744 pub invalid_pos_ : u32,
745 }
746
BrotliUnalignedRead32( mut p : *const std::os::raw::c_void ) -> u32747 unsafe extern fn BrotliUnalignedRead32(
748 mut p : *const std::os::raw::c_void
749 ) -> u32 {
750 *(p as (*const u32))
751 }
752
HashBytesH10(mut data : *const u8) -> u32753 unsafe extern fn HashBytesH10(mut data : *const u8) -> u32 {
754 let mut h
755 : u32
756 = BrotliUnalignedRead32(
757 data as (*const std::os::raw::c_void)
758 ).wrapping_mul(
759 kHashMul32
760 );
761 h >> 32i32 - 17i32
762 }
763
ForestH10(mut self : *mut H10) -> *mut u32764 unsafe extern fn ForestH10(mut self : *mut H10) -> *mut u32 {
765 &mut *self.offset(1i32 as (isize)) as (*mut H10) as (*mut u32)
766 }
767
LeftChildIndexH10( mut self : *mut H10, pos : usize ) -> usize768 unsafe extern fn LeftChildIndexH10(
769 mut self : *mut H10, pos : usize
770 ) -> usize {
771 (2i32 as (usize)).wrapping_mul(pos & (*self).window_mask_)
772 }
773
RightChildIndexH10( mut self : *mut H10, pos : usize ) -> usize774 unsafe extern fn RightChildIndexH10(
775 mut self : *mut H10, pos : usize
776 ) -> usize {
777 (2i32 as (usize)).wrapping_mul(
778 pos & (*self).window_mask_
779 ).wrapping_add(
780 1i32 as (usize)
781 )
782 }
783
StoreAndFindMatchesH10( mut self : *mut H10, data : *const u8, cur_ix : usize, ring_buffer_mask : usize, max_length : usize, max_backward : usize, best_len : *mut usize, mut matches : *mut BackwardMatch ) -> *mut BackwardMatch784 unsafe extern fn StoreAndFindMatchesH10(
785 mut self : *mut H10,
786 data : *const u8,
787 cur_ix : usize,
788 ring_buffer_mask : usize,
789 max_length : usize,
790 max_backward : usize,
791 best_len : *mut usize,
792 mut matches : *mut BackwardMatch
793 ) -> *mut BackwardMatch {
794 let cur_ix_masked : usize = cur_ix & ring_buffer_mask;
795 let max_comp_len
796 : usize
797 = brotli_min_size_t(max_length,128i32 as (usize));
798 let should_reroot_tree
799 : i32
800 = if !!(max_length >= 128i32 as (usize)) { 1i32 } else { 0i32 };
801 let key
802 : u32
803 = HashBytesH10(
804 &*data.offset(cur_ix_masked as (isize)) as (*const u8)
805 );
806 let mut forest : *mut u32 = ForestH10(self);
807 let mut prev_ix
808 : usize
809 = *(*self).buckets_.offset(key as (isize)) as (usize);
810 let mut node_left : usize = LeftChildIndexH10(self,cur_ix);
811 let mut node_right : usize = RightChildIndexH10(self,cur_ix);
812 let mut best_len_left : usize = 0i32 as (usize);
813 let mut best_len_right : usize = 0i32 as (usize);
814 let mut depth_remaining : usize;
815 if should_reroot_tree != 0 {
816 *(*self).buckets_.offset(key as (isize)) = cur_ix as (u32);
817 }
818 depth_remaining = 64i32 as (usize);
819 'break16: loop {
820 {
821 let backward : usize = cur_ix.wrapping_sub(prev_ix);
822 let prev_ix_masked : usize = prev_ix & ring_buffer_mask;
823 if backward == 0i32 as (usize) || backward > max_backward || depth_remaining == 0i32 as (usize) {
824 if should_reroot_tree != 0 {
825 *forest.offset(node_left as (isize)) = (*self).invalid_pos_;
826 *forest.offset(node_right as (isize)) = (*self).invalid_pos_;
827 }
828 break 'break16;
829 }
830 {
831 let cur_len
832 : usize
833 = brotli_min_size_t(best_len_left,best_len_right);
834 let mut len : usize;
835 len = cur_len.wrapping_add(
836 FindMatchLengthWithLimit(
837 &*data.offset(
838 cur_ix_masked.wrapping_add(cur_len) as (isize)
839 ) as (*const u8),
840 &*data.offset(
841 prev_ix_masked.wrapping_add(cur_len) as (isize)
842 ) as (*const u8),
843 max_length.wrapping_sub(cur_len)
844 )
845 );
846 if !matches.is_null() && (len > *best_len) {
847 *best_len = len;
848 InitBackwardMatch(
849 {
850 let _old = matches;
851 matches = matches.offset(1 as (isize));
852 _old
853 },
854 backward,
855 len
856 );
857 }
858 if len >= max_comp_len {
859 if should_reroot_tree != 0 {
860 *forest.offset(node_left as (isize)) = *forest.offset(
861 LeftChildIndexH10(
862 self,
863 prev_ix
864 ) as (isize)
865 );
866 *forest.offset(node_right as (isize)) = *forest.offset(
867 RightChildIndexH10(
868 self,
869 prev_ix
870 ) as (isize)
871 );
872 }
873 break 'break16;
874 }
875 if *data.offset(
876 cur_ix_masked.wrapping_add(len) as (isize)
877 ) as (i32) > *data.offset(
878 prev_ix_masked.wrapping_add(len) as (isize)
879 ) as (i32) {
880 best_len_left = len;
881 if should_reroot_tree != 0 {
882 *forest.offset(node_left as (isize)) = prev_ix as (u32);
883 }
884 node_left = RightChildIndexH10(self,prev_ix);
885 prev_ix = *forest.offset(node_left as (isize)) as (usize);
886 } else {
887 best_len_right = len;
888 if should_reroot_tree != 0 {
889 *forest.offset(node_right as (isize)) = prev_ix as (u32);
890 }
891 node_right = LeftChildIndexH10(self,prev_ix);
892 prev_ix = *forest.offset(node_right as (isize)) as (usize);
893 }
894 }
895 }
896 depth_remaining = depth_remaining.wrapping_sub(1 as (usize));
897 }
898 matches
899 }
900
901 #[derive(Clone, Copy)]
902 #[repr(C)]
903 pub struct Struct18 {
904 pub params : BrotliHasherParams,
905 pub is_prepared_ : i32,
906 pub dict_num_lookups : usize,
907 pub dict_num_matches : usize,
908 }
909
GetHasherCommon( mut handle : *mut u8 ) -> *mut Struct18910 unsafe extern fn GetHasherCommon(
911 mut handle : *mut u8
912 ) -> *mut Struct18 {
913 handle as (*mut Struct18)
914 }
915
SelfH10(mut handle : *mut u8) -> *mut H10916 unsafe extern fn SelfH10(mut handle : *mut u8) -> *mut H10 {
917 &mut *GetHasherCommon(handle).offset(
918 1i32 as (isize)
919 ) as (*mut Struct18) as (*mut H10)
920 }
921
brotli_max_size_t( mut a : usize, mut b : usize ) -> usize922 unsafe extern fn brotli_max_size_t(
923 mut a : usize, mut b : usize
924 ) -> usize {
925 if a > b { a } else { b }
926 }
927
InitDictionaryBackwardMatch( mut self : *mut BackwardMatch, mut dist : usize, mut len : usize, mut len_code : usize )928 unsafe extern fn InitDictionaryBackwardMatch(
929 mut self : *mut BackwardMatch,
930 mut dist : usize,
931 mut len : usize,
932 mut len_code : usize
933 ) {
934 (*self).distance = dist as (u32);
935 (*self).length_and_code = (len << 5i32 | if len == len_code {
936 0i32 as (usize)
937 } else {
938 len_code
939 }) as (u32);
940 }
941
FindAllMatchesH10( mut handle : *mut u8, mut dictionary : *const BrotliEncoderDictionary, mut data : *const u8, ring_buffer_mask : usize, cur_ix : usize, max_length : usize, max_backward : usize, gap : usize, mut params : *const BrotliEncoderParams, mut matches : *mut BackwardMatch ) -> usize942 unsafe extern fn FindAllMatchesH10(
943 mut handle : *mut u8,
944 mut dictionary : *const BrotliEncoderDictionary,
945 mut data : *const u8,
946 ring_buffer_mask : usize,
947 cur_ix : usize,
948 max_length : usize,
949 max_backward : usize,
950 gap : usize,
951 mut params : *const BrotliEncoderParams,
952 mut matches : *mut BackwardMatch
953 ) -> usize {
954 let orig_matches : *mut BackwardMatch = matches;
955 let cur_ix_masked : usize = cur_ix & ring_buffer_mask;
956 let mut best_len : usize = 1i32 as (usize);
957 let short_match_max_backward
958 : usize
959 = (if (*params).quality != 11i32 {
960 16i32
961 } else {
962 64i32
963 }) as (usize);
964 let mut stop
965 : usize
966 = cur_ix.wrapping_sub(short_match_max_backward);
967 let mut dict_matches : *mut u32;
968 let mut i : usize;
969 if cur_ix < short_match_max_backward {
970 stop = 0i32 as (usize);
971 }
972 i = cur_ix.wrapping_sub(1i32 as (usize));
973 'break14: while i > stop && (best_len <= 2i32 as (usize)) {
974 'continue15: loop {
975 {
976 let mut prev_ix : usize = i;
977 let backward : usize = cur_ix.wrapping_sub(prev_ix);
978 if backward > max_backward {
979 break 'break14;
980 }
981 prev_ix = prev_ix & ring_buffer_mask;
982 if *data.offset(cur_ix_masked as (isize)) as (i32) != *data.offset(
983 prev_ix as (isize)
984 ) as (i32) || *data.offset(
985 cur_ix_masked.wrapping_add(
986 1i32 as (usize)
987 ) as (isize)
988 ) as (i32) != *data.offset(
989 prev_ix.wrapping_add(
990 1i32 as (usize)
991 ) as (isize)
992 ) as (i32) {
993 break 'continue15;
994 }
995 {
996 let len
997 : usize
998 = FindMatchLengthWithLimit(
999 &*data.offset(prev_ix as (isize)) as (*const u8),
1000 &*data.offset(cur_ix_masked as (isize)) as (*const u8),
1001 max_length
1002 );
1003 if len > best_len {
1004 best_len = len;
1005 InitBackwardMatch(
1006 {
1007 let _old = matches;
1008 matches = matches.offset(1 as (isize));
1009 _old
1010 },
1011 backward,
1012 len
1013 );
1014 }
1015 }
1016 }
1017 break;
1018 }
1019 i = i.wrapping_sub(1 as (usize));
1020 }
1021 if best_len < max_length {
1022 matches = StoreAndFindMatchesH10(
1023 SelfH10(handle),
1024 data,
1025 cur_ix,
1026 ring_buffer_mask,
1027 max_length,
1028 max_backward,
1029 &mut best_len as (*mut usize),
1030 matches
1031 );
1032 }
1033 i = 0i32 as (usize);
1034 while i <= 37i32 as (usize) {
1035 {
1036 *dict_matches.offset(i as (isize)) = kInvalidMatch;
1037 }
1038 i = i.wrapping_add(1 as (usize));
1039 }
1040 {
1041 let mut minlen
1042 : usize
1043 = brotli_max_size_t(
1044 4i32 as (usize),
1045 best_len.wrapping_add(1i32 as (usize))
1046 );
1047 if BrotliFindAllStaticDictionaryMatches(
1048 dictionary,
1049 &*data.offset(cur_ix_masked as (isize)) as (*const u8),
1050 minlen,
1051 max_length,
1052 &mut *dict_matches.offset(0i32 as (isize)) as (*mut u32)
1053 ) != 0 {
1054 let mut maxlen
1055 : usize
1056 = brotli_min_size_t(37i32 as (usize),max_length);
1057 let mut l : usize;
1058 l = minlen;
1059 while l <= maxlen {
1060 {
1061 let mut dict_id : u32 = *dict_matches.offset(l as (isize));
1062 if dict_id < kInvalidMatch {
1063 let mut distance
1064 : usize
1065 = max_backward.wrapping_add(gap).wrapping_add(
1066 (dict_id >> 5i32) as (usize)
1067 ).wrapping_add(
1068 1i32 as (usize)
1069 );
1070 if distance <= (*params).dist.max_distance {
1071 InitDictionaryBackwardMatch(
1072 {
1073 let _old = matches;
1074 matches = matches.offset(1 as (isize));
1075 _old
1076 },
1077 distance,
1078 l,
1079 (dict_id & 31i32 as (u32)) as (usize)
1080 );
1081 }
1082 }
1083 }
1084 l = l.wrapping_add(1 as (usize));
1085 }
1086 }
1087 }
1088 ((matches as (isize)).wrapping_sub(
1089 orig_matches as (isize)
1090 ) / std::mem::size_of::<*mut BackwardMatch>(
1091 ) as (isize)) as (usize)
1092 }
1093
BackwardMatchLength( mut self : *const BackwardMatch ) -> usize1094 unsafe extern fn BackwardMatchLength(
1095 mut self : *const BackwardMatch
1096 ) -> usize {
1097 ((*self).length_and_code >> 5i32) as (usize)
1098 }
1099
MaxZopfliCandidates( mut params : *const BrotliEncoderParams ) -> usize1100 unsafe extern fn MaxZopfliCandidates(
1101 mut params : *const BrotliEncoderParams
1102 ) -> usize {
1103 (if (*params).quality <= 10i32 { 1i32 } else { 5i32 }) as (usize)
1104 }
1105
ComputeDistanceShortcut( block_start : usize, pos : usize, max_backward : usize, gap : usize, mut nodes : *const ZopfliNode ) -> u321106 unsafe extern fn ComputeDistanceShortcut(
1107 block_start : usize,
1108 pos : usize,
1109 max_backward : usize,
1110 gap : usize,
1111 mut nodes : *const ZopfliNode
1112 ) -> u32 {
1113 let clen
1114 : usize
1115 = ZopfliNodeCopyLength(
1116 &*nodes.offset(pos as (isize)) as (*const ZopfliNode)
1117 ) as (usize);
1118 let ilen
1119 : usize
1120 = ((*nodes.offset(
1121 pos as (isize)
1122 )).dcode_insert_length & 0x7ffffffi32 as (u32)) as (usize);
1123 let dist
1124 : usize
1125 = ZopfliNodeCopyDistance(
1126 &*nodes.offset(pos as (isize)) as (*const ZopfliNode)
1127 ) as (usize);
1128 if pos == 0i32 as (usize) {
1129 0i32 as (u32)
1130 } else if dist.wrapping_add(clen) <= block_start.wrapping_add(
1131 pos
1132 ).wrapping_add(
1133 gap
1134 ) && (dist <= max_backward.wrapping_add(
1135 gap
1136 )) && (ZopfliNodeDistanceCode(
1137 &*nodes.offset(
1138 pos as (isize)
1139 ) as (*const ZopfliNode)
1140 ) > 0i32 as (u32)) {
1141 pos as (u32)
1142 } else {
1143 (*nodes.offset(
1144 pos.wrapping_sub(clen).wrapping_sub(ilen) as (isize)
1145 )).u.shortcut
1146 }
1147 }
1148
ZopfliCostModelGetLiteralCosts( mut self : *const ZopfliCostModel, mut from : usize, mut to : usize ) -> f321149 unsafe extern fn ZopfliCostModelGetLiteralCosts(
1150 mut self : *const ZopfliCostModel, mut from : usize, mut to : usize
1151 ) -> f32 {
1152 *(*self).literal_costs_.offset(
1153 to as (isize)
1154 ) - *(*self).literal_costs_.offset(from as (isize))
1155 }
1156
ComputeDistanceCache( pos : usize, mut starting_dist_cache : *const i32, mut nodes : *const ZopfliNode, mut dist_cache : *mut i32 )1157 unsafe extern fn ComputeDistanceCache(
1158 pos : usize,
1159 mut starting_dist_cache : *const i32,
1160 mut nodes : *const ZopfliNode,
1161 mut dist_cache : *mut i32
1162 ) {
1163 let mut idx : i32 = 0i32;
1164 let mut p
1165 : usize
1166 = (*nodes.offset(pos as (isize))).u.shortcut as (usize);
1167 while idx < 4i32 && (p > 0i32 as (usize)) {
1168 let ilen
1169 : usize
1170 = ((*nodes.offset(
1171 p as (isize)
1172 )).dcode_insert_length & 0x7ffffffi32 as (u32)) as (usize);
1173 let clen
1174 : usize
1175 = ZopfliNodeCopyLength(
1176 &*nodes.offset(p as (isize)) as (*const ZopfliNode)
1177 ) as (usize);
1178 let dist
1179 : usize
1180 = ZopfliNodeCopyDistance(
1181 &*nodes.offset(p as (isize)) as (*const ZopfliNode)
1182 ) as (usize);
1183 *dist_cache.offset(
1184 {
1185 let _old = idx;
1186 idx = idx + 1;
1187 _old
1188 } as (isize)
1189 ) = dist as (i32);
1190 p = (*nodes.offset(
1191 p.wrapping_sub(clen).wrapping_sub(ilen) as (isize)
1192 )).u.shortcut as (usize);
1193 }
1194 while idx < 4i32 {
1195 {
1196 *dist_cache.offset(idx as (isize)) = *{
1197 let _old = starting_dist_cache;
1198 starting_dist_cache = starting_dist_cache.offset(
1199 1 as (isize)
1200 );
1201 _old
1202 };
1203 }
1204 idx = idx + 1;
1205 }
1206 }
1207
StartPosQueueSize( mut self : *const StartPosQueue ) -> usize1208 unsafe extern fn StartPosQueueSize(
1209 mut self : *const StartPosQueue
1210 ) -> usize {
1211 brotli_min_size_t((*self).idx_,8i32 as (usize))
1212 }
1213
StartPosQueuePush( mut self : *mut StartPosQueue, mut posdata : *const PosData )1214 unsafe extern fn StartPosQueuePush(
1215 mut self : *mut StartPosQueue, mut posdata : *const PosData
1216 ) {
1217 let mut offset
1218 : usize
1219 = !{
1220 let _old = (*self).idx_;
1221 (*self).idx_ = (*self).idx_.wrapping_add(1 as (usize));
1222 _old
1223 } & 7i32 as (usize);
1224 let mut len
1225 : usize
1226 = StartPosQueueSize(self as (*const StartPosQueue));
1227 let mut i : usize;
1228 let mut q : *mut PosData = (*self).q_;
1229 *q.offset(offset as (isize)) = *posdata;
1230 i = 1i32 as (usize);
1231 while i < len {
1232 {
1233 if (*q.offset(
1234 (offset & 7i32 as (usize)) as (isize)
1235 )).costdiff > (*q.offset(
1236 (offset.wrapping_add(
1237 1i32 as (usize)
1238 ) & 7i32 as (usize)) as (isize)
1239 )).costdiff {
1240 let mut __brotli_swap_tmp
1241 : PosData
1242 = *q.offset((offset & 7i32 as (usize)) as (isize));
1243 *q.offset((offset & 7i32 as (usize)) as (isize)) = *q.offset(
1244 (offset.wrapping_add(
1245 1i32 as (usize)
1246 ) & 7i32 as (usize)) as (isize)
1247 );
1248 *q.offset(
1249 (offset.wrapping_add(1i32 as (usize)) & 7i32 as (usize)) as (isize)
1250 ) = __brotli_swap_tmp;
1251 }
1252 offset = offset.wrapping_add(1 as (usize));
1253 }
1254 i = i.wrapping_add(1 as (usize));
1255 }
1256 }
1257
EvaluateNode( block_start : usize, pos : usize, max_backward_limit : usize, gap : usize, mut starting_dist_cache : *const i32, mut model : *const ZopfliCostModel, mut queue : *mut StartPosQueue, mut nodes : *mut ZopfliNode )1258 unsafe extern fn EvaluateNode(
1259 block_start : usize,
1260 pos : usize,
1261 max_backward_limit : usize,
1262 gap : usize,
1263 mut starting_dist_cache : *const i32,
1264 mut model : *const ZopfliCostModel,
1265 mut queue : *mut StartPosQueue,
1266 mut nodes : *mut ZopfliNode
1267 ) {
1268 let mut node_cost : f32 = (*nodes.offset(pos as (isize))).u.cost;
1269 (*nodes.offset(
1270 pos as (isize)
1271 )).u.shortcut = ComputeDistanceShortcut(
1272 block_start,
1273 pos,
1274 max_backward_limit,
1275 gap,
1276 nodes as (*const ZopfliNode)
1277 );
1278 if node_cost <= ZopfliCostModelGetLiteralCosts(
1279 model,
1280 0i32 as (usize),
1281 pos
1282 ) {
1283 let mut posdata : PosData;
1284 posdata.pos = pos;
1285 posdata.cost = node_cost;
1286 posdata.costdiff = node_cost - ZopfliCostModelGetLiteralCosts(
1287 model,
1288 0i32 as (usize),
1289 pos
1290 );
1291 ComputeDistanceCache(
1292 pos,
1293 starting_dist_cache,
1294 nodes as (*const ZopfliNode),
1295 posdata.distance_cache
1296 );
1297 StartPosQueuePush(
1298 queue,
1299 &mut posdata as (*mut PosData) as (*const PosData)
1300 );
1301 }
1302 }
1303
StartPosQueueAt( mut self : *const StartPosQueue, mut k : usize ) -> *const PosData1304 unsafe extern fn StartPosQueueAt(
1305 mut self : *const StartPosQueue, mut k : usize
1306 ) -> *const PosData {
1307 &mut *(*self).q_.offset(
1308 (k.wrapping_sub((*self).idx_) & 7i32 as (usize)) as (isize)
1309 ) as (*mut PosData) as (*const PosData)
1310 }
1311
ZopfliCostModelGetMinCostCmd( mut self : *const ZopfliCostModel ) -> f321312 unsafe extern fn ZopfliCostModelGetMinCostCmd(
1313 mut self : *const ZopfliCostModel
1314 ) -> f32 {
1315 (*self).min_cost_cmd_
1316 }
1317
ComputeMinimumCopyLength( start_cost : f32, mut nodes : *const ZopfliNode, num_bytes : usize, pos : usize ) -> usize1318 unsafe extern fn ComputeMinimumCopyLength(
1319 start_cost : f32,
1320 mut nodes : *const ZopfliNode,
1321 num_bytes : usize,
1322 pos : usize
1323 ) -> usize {
1324 let mut min_cost : f32 = start_cost;
1325 let mut len : usize = 2i32 as (usize);
1326 let mut next_len_bucket : usize = 4i32 as (usize);
1327 let mut next_len_offset : usize = 10i32 as (usize);
1328 while pos.wrapping_add(len) <= num_bytes && ((*nodes.offset(
1329 pos.wrapping_add(len) as (isize)
1330 )).u.cost <= min_cost) {
1331 len = len.wrapping_add(1 as (usize));
1332 if len == next_len_offset {
1333 min_cost = min_cost + 1.0f32;
1334 next_len_offset = next_len_offset.wrapping_add(next_len_bucket);
1335 next_len_bucket = next_len_bucket.wrapping_mul(2i32 as (usize));
1336 }
1337 }
1338 len
1339 }
1340
GetInsertExtra(mut inscode : u16) -> u321341 unsafe extern fn GetInsertExtra(mut inscode : u16) -> u32 {
1342 *kInsExtra.offset(inscode as (isize))
1343 }
1344
ZopfliCostModelGetDistanceCost( mut self : *const ZopfliCostModel, mut distcode : usize ) -> f321345 unsafe extern fn ZopfliCostModelGetDistanceCost(
1346 mut self : *const ZopfliCostModel, mut distcode : usize
1347 ) -> f32 {
1348 *(*self).cost_dist_.offset(distcode as (isize))
1349 }
1350
GetCopyExtra(mut copycode : u16) -> u321351 unsafe extern fn GetCopyExtra(mut copycode : u16) -> u32 {
1352 *kCopyExtra.offset(copycode as (isize))
1353 }
1354
ZopfliCostModelGetCommandCost( mut self : *const ZopfliCostModel, mut cmdcode : u16 ) -> f321355 unsafe extern fn ZopfliCostModelGetCommandCost(
1356 mut self : *const ZopfliCostModel, mut cmdcode : u16
1357 ) -> f32 {
1358 *(*self).cost_cmd_.offset(cmdcode as (isize))
1359 }
1360
UpdateZopfliNode( mut nodes : *mut ZopfliNode, mut pos : usize, mut start_pos : usize, mut len : usize, mut len_code : usize, mut dist : usize, mut short_code : usize, mut cost : f32 )1361 unsafe extern fn UpdateZopfliNode(
1362 mut nodes : *mut ZopfliNode,
1363 mut pos : usize,
1364 mut start_pos : usize,
1365 mut len : usize,
1366 mut len_code : usize,
1367 mut dist : usize,
1368 mut short_code : usize,
1369 mut cost : f32
1370 ) {
1371 let mut next
1372 : *mut ZopfliNode
1373 = &mut *nodes.offset(
1374 pos.wrapping_add(len) as (isize)
1375 ) as (*mut ZopfliNode);
1376 (*next).length = (len | len.wrapping_add(
1377 9u32 as (usize)
1378 ).wrapping_sub(
1379 len_code
1380 ) << 25i32) as (u32);
1381 (*next).distance = dist as (u32);
1382 (*next).dcode_insert_length = (short_code << 27i32 | pos.wrapping_sub(
1383 start_pos
1384 )) as (u32);
1385 (*next).u.cost = cost;
1386 }
1387
BackwardMatchLengthCode( mut self : *const BackwardMatch ) -> usize1388 unsafe extern fn BackwardMatchLengthCode(
1389 mut self : *const BackwardMatch
1390 ) -> usize {
1391 let mut code
1392 : usize
1393 = ((*self).length_and_code & 31i32 as (u32)) as (usize);
1394 if code != 0 { code } else { BackwardMatchLength(self) }
1395 }
1396
UpdateNodes( num_bytes : usize, block_start : usize, pos : usize, mut ringbuffer : *const u8, ringbuffer_mask : usize, mut params : *const BrotliEncoderParams, max_backward_limit : usize, mut starting_dist_cache : *const i32, num_matches : usize, mut matches : *const BackwardMatch, mut model : *const ZopfliCostModel, mut queue : *mut StartPosQueue, mut nodes : *mut ZopfliNode ) -> usize1397 unsafe extern fn UpdateNodes(
1398 num_bytes : usize,
1399 block_start : usize,
1400 pos : usize,
1401 mut ringbuffer : *const u8,
1402 ringbuffer_mask : usize,
1403 mut params : *const BrotliEncoderParams,
1404 max_backward_limit : usize,
1405 mut starting_dist_cache : *const i32,
1406 num_matches : usize,
1407 mut matches : *const BackwardMatch,
1408 mut model : *const ZopfliCostModel,
1409 mut queue : *mut StartPosQueue,
1410 mut nodes : *mut ZopfliNode
1411 ) -> usize {
1412 let cur_ix : usize = block_start.wrapping_add(pos);
1413 let cur_ix_masked : usize = cur_ix & ringbuffer_mask;
1414 let max_distance
1415 : usize
1416 = brotli_min_size_t(cur_ix,max_backward_limit);
1417 let max_len : usize = num_bytes.wrapping_sub(pos);
1418 let max_zopfli_len : usize = MaxZopfliLen(params);
1419 let max_iters : usize = MaxZopfliCandidates(params);
1420 let mut min_len : usize;
1421 let mut result : usize = 0i32 as (usize);
1422 let mut k : usize;
1423 let mut gap : usize = 0i32 as (usize);
1424 EvaluateNode(
1425 block_start,
1426 pos,
1427 max_backward_limit,
1428 gap,
1429 starting_dist_cache,
1430 model,
1431 queue,
1432 nodes
1433 );
1434 {
1435 let mut posdata
1436 : *const PosData
1437 = StartPosQueueAt(queue as (*const StartPosQueue),0i32 as (usize));
1438 let mut min_cost
1439 : f32
1440 = (*posdata).cost + ZopfliCostModelGetMinCostCmd(
1441 model
1442 ) + ZopfliCostModelGetLiteralCosts(model,(*posdata).pos,pos);
1443 min_len = ComputeMinimumCopyLength(
1444 min_cost,
1445 nodes as (*const ZopfliNode),
1446 num_bytes,
1447 pos
1448 );
1449 }
1450 k = 0i32 as (usize);
1451 while k < max_iters && (k < StartPosQueueSize(
1452 queue as (*const StartPosQueue)
1453 )) {
1454 'continue28: loop {
1455 {
1456 let mut posdata
1457 : *const PosData
1458 = StartPosQueueAt(queue as (*const StartPosQueue),k);
1459 let start : usize = (*posdata).pos;
1460 let inscode : u16 = GetInsertLengthCode(pos.wrapping_sub(start));
1461 let start_costdiff : f32 = (*posdata).costdiff;
1462 let base_cost
1463 : f32
1464 = start_costdiff + GetInsertExtra(
1465 inscode
1466 ) as (f32) + ZopfliCostModelGetLiteralCosts(
1467 model,
1468 0i32 as (usize),
1469 pos
1470 );
1471 let mut best_len : usize = min_len.wrapping_sub(1i32 as (usize));
1472 let mut j : usize = 0i32 as (usize);
1473 'break29: while j < 16i32 as (usize) && (best_len < max_len) {
1474 'continue30: loop {
1475 {
1476 let idx
1477 : usize
1478 = *kDistanceCacheIndex.offset(j as (isize)) as (usize);
1479 let backward
1480 : usize
1481 = (*(*posdata).distance_cache.offset(
1482 idx as (isize)
1483 ) + *kDistanceCacheOffset.offset(j as (isize))) as (usize);
1484 let mut prev_ix : usize = cur_ix.wrapping_sub(backward);
1485 let mut len : usize = 0i32 as (usize);
1486 let mut continuation
1487 : u8
1488 = *ringbuffer.offset(
1489 cur_ix_masked.wrapping_add(best_len) as (isize)
1490 );
1491 if cur_ix_masked.wrapping_add(best_len) > ringbuffer_mask {
1492 break 'break29;
1493 }
1494 if backward > max_distance.wrapping_add(gap) {
1495 break 'continue30;
1496 }
1497 if backward <= max_distance {
1498 if prev_ix >= cur_ix {
1499 break 'continue30;
1500 }
1501 prev_ix = prev_ix & ringbuffer_mask;
1502 if prev_ix.wrapping_add(
1503 best_len
1504 ) > ringbuffer_mask || continuation as (i32) != *ringbuffer.offset(
1505 prev_ix.wrapping_add(
1506 best_len
1507 ) as (isize)
1508 ) as (i32) {
1509 break 'continue30;
1510 }
1511 len = FindMatchLengthWithLimit(
1512 &*ringbuffer.offset(prev_ix as (isize)) as (*const u8),
1513 &*ringbuffer.offset(
1514 cur_ix_masked as (isize)
1515 ) as (*const u8),
1516 max_len
1517 );
1518 } else {
1519 break 'continue30;
1520 }
1521 {
1522 let dist_cost
1523 : f32
1524 = base_cost + ZopfliCostModelGetDistanceCost(model,j);
1525 let mut l : usize;
1526 l = best_len.wrapping_add(1i32 as (usize));
1527 while l <= len {
1528 {
1529 let copycode : u16 = GetCopyLengthCode(l);
1530 let cmdcode
1531 : u16
1532 = CombineLengthCodes(
1533 inscode,
1534 copycode,
1535 (j == 0i32 as (usize)) as (i32)
1536 );
1537 let cost
1538 : f32
1539 = (if cmdcode as (i32) < 128i32 {
1540 base_cost
1541 } else {
1542 dist_cost
1543 }) + GetCopyExtra(
1544 copycode
1545 ) as (f32) + ZopfliCostModelGetCommandCost(
1546 model,
1547 cmdcode
1548 );
1549 if cost < (*nodes.offset(
1550 pos.wrapping_add(l) as (isize)
1551 )).u.cost {
1552 UpdateZopfliNode(
1553 nodes,
1554 pos,
1555 start,
1556 l,
1557 l,
1558 backward,
1559 j.wrapping_add(1i32 as (usize)),
1560 cost
1561 );
1562 result = brotli_max_size_t(result,l);
1563 }
1564 best_len = l;
1565 }
1566 l = l.wrapping_add(1 as (usize));
1567 }
1568 }
1569 }
1570 break;
1571 }
1572 j = j.wrapping_add(1 as (usize));
1573 }
1574 if k >= 2i32 as (usize) {
1575 break 'continue28;
1576 }
1577 {
1578 let mut len : usize = min_len;
1579 j = 0i32 as (usize);
1580 while j < num_matches {
1581 {
1582 let mut match_ : BackwardMatch = *matches.offset(j as (isize));
1583 let mut dist : usize = match_.distance as (usize);
1584 let mut is_dictionary_match
1585 : i32
1586 = if !!(dist > max_distance.wrapping_add(gap)) {
1587 1i32
1588 } else {
1589 0i32
1590 };
1591 let mut dist_code
1592 : usize
1593 = dist.wrapping_add(16i32 as (usize)).wrapping_sub(
1594 1i32 as (usize)
1595 );
1596 let mut dist_symbol : u16;
1597 let mut distextra : u32;
1598 let mut distnumextra : u32;
1599 let mut dist_cost : f32;
1600 let mut max_match_len : usize;
1601 PrefixEncodeCopyDistance(
1602 dist_code,
1603 (*params).dist.num_direct_distance_codes as (usize),
1604 (*params).dist.distance_postfix_bits as (usize),
1605 &mut dist_symbol as (*mut u16),
1606 &mut distextra as (*mut u32)
1607 );
1608 distnumextra = (dist_symbol as (i32) >> 10i32) as (u32);
1609 dist_cost = base_cost + distnumextra as (f32) + ZopfliCostModelGetDistanceCost(
1610 model,
1611 (dist_symbol as (i32) & 0x3ffi32) as (usize)
1612 );
1613 max_match_len = BackwardMatchLength(
1614 &mut match_ as (*mut BackwardMatch) as (*const BackwardMatch)
1615 );
1616 if len < max_match_len && (is_dictionary_match != 0 || max_match_len > max_zopfli_len) {
1617 len = max_match_len;
1618 }
1619 while len <= max_match_len {
1620 {
1621 let len_code
1622 : usize
1623 = if is_dictionary_match != 0 {
1624 BackwardMatchLengthCode(
1625 &mut match_ as (*mut BackwardMatch) as (*const BackwardMatch)
1626 )
1627 } else {
1628 len
1629 };
1630 let copycode : u16 = GetCopyLengthCode(len_code);
1631 let cmdcode : u16 = CombineLengthCodes(inscode,copycode,0i32);
1632 let cost
1633 : f32
1634 = dist_cost + GetCopyExtra(
1635 copycode
1636 ) as (f32) + ZopfliCostModelGetCommandCost(
1637 model,
1638 cmdcode
1639 );
1640 if cost < (*nodes.offset(
1641 pos.wrapping_add(len) as (isize)
1642 )).u.cost {
1643 UpdateZopfliNode(
1644 nodes,
1645 pos,
1646 start,
1647 len,
1648 len_code,
1649 dist,
1650 0i32 as (usize),
1651 cost
1652 );
1653 result = brotli_max_size_t(result,len);
1654 }
1655 }
1656 len = len.wrapping_add(1 as (usize));
1657 }
1658 }
1659 j = j.wrapping_add(1 as (usize));
1660 }
1661 }
1662 }
1663 break;
1664 }
1665 k = k.wrapping_add(1 as (usize));
1666 }
1667 result
1668 }
1669
StoreH10( mut handle : *mut u8, mut data : *const u8, mask : usize, ix : usize )1670 unsafe extern fn StoreH10(
1671 mut handle : *mut u8,
1672 mut data : *const u8,
1673 mask : usize,
1674 ix : usize
1675 ) {
1676 let mut self : *mut H10 = SelfH10(handle);
1677 let max_backward
1678 : usize
1679 = (*self).window_mask_.wrapping_sub(16i32 as (usize)).wrapping_add(
1680 1i32 as (usize)
1681 );
1682 StoreAndFindMatchesH10(
1683 self,
1684 data,
1685 ix,
1686 mask,
1687 128i32 as (usize),
1688 max_backward,
1689 0i32 as (*mut std::os::raw::c_void) as (*mut usize),
1690 0i32 as (*mut std::os::raw::c_void) as (*mut BackwardMatch)
1691 );
1692 }
1693
StoreRangeH10( mut handle : *mut u8, mut data : *const u8, mask : usize, ix_start : usize, ix_end : usize )1694 unsafe extern fn StoreRangeH10(
1695 mut handle : *mut u8,
1696 mut data : *const u8,
1697 mask : usize,
1698 ix_start : usize,
1699 ix_end : usize
1700 ) {
1701 let mut i : usize = ix_start;
1702 let mut j : usize = ix_start;
1703 if ix_start.wrapping_add(63i32 as (usize)) <= ix_end {
1704 i = ix_end.wrapping_sub(63i32 as (usize));
1705 }
1706 if ix_start.wrapping_add(512i32 as (usize)) <= i {
1707 while j < i {
1708 {
1709 StoreH10(handle,data,mask,j);
1710 }
1711 j = j.wrapping_add(8i32 as (usize));
1712 }
1713 }
1714 while i < ix_end {
1715 {
1716 StoreH10(handle,data,mask,i);
1717 }
1718 i = i.wrapping_add(1 as (usize));
1719 }
1720 }
1721
HashTypeLengthH10() -> usize1722 unsafe extern fn HashTypeLengthH10() -> usize { 4i32 as (usize) }
1723
CleanupZopfliCostModel( mut m : *mut MemoryManager, mut self : *mut ZopfliCostModel )1724 unsafe extern fn CleanupZopfliCostModel(
1725 mut m : *mut MemoryManager, mut self : *mut ZopfliCostModel
1726 ) {
1727 {
1728 BrotliFree(
1729 m,
1730 (*self).literal_costs_ as (*mut std::os::raw::c_void)
1731 );
1732 (*self).literal_costs_ = 0i32 as (*mut std::os::raw::c_void) as (*mut f32);
1733 }
1734 {
1735 BrotliFree(m,(*self).cost_dist_ as (*mut std::os::raw::c_void));
1736 (*self).cost_dist_ = 0i32 as (*mut std::os::raw::c_void) as (*mut f32);
1737 }
1738 }
1739
ZopfliNodeCommandLength( mut self : *const ZopfliNode ) -> u321740 unsafe extern fn ZopfliNodeCommandLength(
1741 mut self : *const ZopfliNode
1742 ) -> u32 {
1743 ZopfliNodeCopyLength(self).wrapping_add(
1744 (*self).dcode_insert_length & 0x7ffffffi32 as (u32)
1745 )
1746 }
1747
ComputeShortestPathFromNodes( mut num_bytes : usize, mut nodes : *mut ZopfliNode ) -> usize1748 unsafe extern fn ComputeShortestPathFromNodes(
1749 mut num_bytes : usize, mut nodes : *mut ZopfliNode
1750 ) -> usize {
1751 let mut index : usize = num_bytes;
1752 let mut num_commands : usize = 0i32 as (usize);
1753 while (*nodes.offset(
1754 index as (isize)
1755 )).dcode_insert_length & 0x7ffffffi32 as (u32) == 0i32 as (u32) && ((*nodes.offset(
1756 index as (isize)
1757 )).length == 1i32 as (u32)) {
1758 index = index.wrapping_sub(1 as (usize));
1759 }
1760 (*nodes.offset(index as (isize))).u.next = !(0i32 as (u32));
1761 while index != 0i32 as (usize) {
1762 let mut len
1763 : usize
1764 = ZopfliNodeCommandLength(
1765 &mut *nodes.offset(
1766 index as (isize)
1767 ) as (*mut ZopfliNode) as (*const ZopfliNode)
1768 ) as (usize);
1769 index = index.wrapping_sub(len);
1770 (*nodes.offset(index as (isize))).u.next = len as (u32);
1771 num_commands = num_commands.wrapping_add(1 as (usize));
1772 }
1773 num_commands
1774 }
1775
1776 #[no_mangle]
BrotliZopfliComputeShortestPath( mut m : *mut MemoryManager, mut num_bytes : usize, mut position : usize, mut ringbuffer : *const u8, mut ringbuffer_mask : usize, mut params : *const BrotliEncoderParams, max_backward_limit : usize, mut dist_cache : *const i32, mut hasher : *mut u8, mut nodes : *mut ZopfliNode ) -> usize1777 pub unsafe extern fn BrotliZopfliComputeShortestPath(
1778 mut m : *mut MemoryManager,
1779 mut num_bytes : usize,
1780 mut position : usize,
1781 mut ringbuffer : *const u8,
1782 mut ringbuffer_mask : usize,
1783 mut params : *const BrotliEncoderParams,
1784 max_backward_limit : usize,
1785 mut dist_cache : *const i32,
1786 mut hasher : *mut u8,
1787 mut nodes : *mut ZopfliNode
1788 ) -> usize {
1789 let max_zopfli_len : usize = MaxZopfliLen(params);
1790 let mut model : ZopfliCostModel;
1791 let mut queue : StartPosQueue;
1792 let mut matches : *mut BackwardMatch;
1793 let store_end
1794 : usize
1795 = if num_bytes >= StoreLookaheadH10() {
1796 position.wrapping_add(num_bytes).wrapping_sub(
1797 StoreLookaheadH10()
1798 ).wrapping_add(
1799 1i32 as (usize)
1800 )
1801 } else {
1802 position
1803 };
1804 let mut i : usize;
1805 let mut gap : usize = 0i32 as (usize);
1806 let mut lz_matches_offset : usize = 0i32 as (usize);
1807 (*nodes.offset(0i32 as (isize))).length = 0i32 as (u32);
1808 (*nodes.offset(0i32 as (isize))).u.cost = 0i32 as (f32);
1809 InitZopfliCostModel(
1810 m,
1811 &mut model as (*mut ZopfliCostModel),
1812 &(*params).dist as (*const BrotliDistanceParams),
1813 num_bytes
1814 );
1815 if !(0i32 == 0) {
1816 return 0i32 as (usize);
1817 }
1818 ZopfliCostModelSetFromLiteralCosts(
1819 &mut model as (*mut ZopfliCostModel),
1820 position,
1821 ringbuffer,
1822 ringbuffer_mask
1823 );
1824 InitStartPosQueue(&mut queue as (*mut StartPosQueue));
1825 i = 0i32 as (usize);
1826 while i.wrapping_add(HashTypeLengthH10()).wrapping_sub(
1827 1i32 as (usize)
1828 ) < num_bytes {
1829 {
1830 let pos : usize = position.wrapping_add(i);
1831 let max_distance
1832 : usize
1833 = brotli_min_size_t(pos,max_backward_limit);
1834 let mut skip : usize;
1835 let mut num_matches
1836 : usize
1837 = FindAllMatchesH10(
1838 hasher,
1839 &(*params).dictionary as (*const BrotliEncoderDictionary),
1840 ringbuffer,
1841 ringbuffer_mask,
1842 pos,
1843 num_bytes.wrapping_sub(i),
1844 max_distance,
1845 gap,
1846 params,
1847 &mut *matches.offset(
1848 lz_matches_offset as (isize)
1849 ) as (*mut BackwardMatch)
1850 );
1851 if num_matches > 0i32 as (usize) && (BackwardMatchLength(
1852 &mut *matches.offset(
1853 num_matches.wrapping_sub(
1854 1i32 as (usize)
1855 ) as (isize)
1856 ) as (*mut BackwardMatch) as (*const BackwardMatch)
1857 ) > max_zopfli_len) {
1858 *matches.offset(0i32 as (isize)) = *matches.offset(
1859 num_matches.wrapping_sub(
1860 1i32 as (usize)
1861 ) as (isize)
1862 );
1863 num_matches = 1i32 as (usize);
1864 }
1865 skip = UpdateNodes(
1866 num_bytes,
1867 position,
1868 i,
1869 ringbuffer,
1870 ringbuffer_mask,
1871 params,
1872 max_backward_limit,
1873 dist_cache,
1874 num_matches,
1875 matches as (*const BackwardMatch),
1876 &mut model as (*mut ZopfliCostModel) as (*const ZopfliCostModel),
1877 &mut queue as (*mut StartPosQueue),
1878 nodes
1879 );
1880 if skip < 16384i32 as (usize) {
1881 skip = 0i32 as (usize);
1882 }
1883 if num_matches == 1i32 as (usize) && (BackwardMatchLength(
1884 &mut *matches.offset(
1885 0i32 as (isize)
1886 ) as (*mut BackwardMatch) as (*const BackwardMatch)
1887 ) > max_zopfli_len) {
1888 skip = brotli_max_size_t(
1889 BackwardMatchLength(
1890 &mut *matches.offset(
1891 0i32 as (isize)
1892 ) as (*mut BackwardMatch) as (*const BackwardMatch)
1893 ),
1894 skip
1895 );
1896 }
1897 if skip > 1i32 as (usize) {
1898 StoreRangeH10(
1899 hasher,
1900 ringbuffer,
1901 ringbuffer_mask,
1902 pos.wrapping_add(1i32 as (usize)),
1903 brotli_min_size_t(pos.wrapping_add(skip),store_end)
1904 );
1905 skip = skip.wrapping_sub(1 as (usize));
1906 while skip != 0 {
1907 i = i.wrapping_add(1 as (usize));
1908 if i.wrapping_add(HashTypeLengthH10()).wrapping_sub(
1909 1i32 as (usize)
1910 ) >= num_bytes {
1911 break;
1912 }
1913 EvaluateNode(
1914 position,
1915 i,
1916 max_backward_limit,
1917 gap,
1918 dist_cache,
1919 &mut model as (*mut ZopfliCostModel) as (*const ZopfliCostModel),
1920 &mut queue as (*mut StartPosQueue),
1921 nodes
1922 );
1923 skip = skip.wrapping_sub(1 as (usize));
1924 }
1925 }
1926 }
1927 i = i.wrapping_add(1 as (usize));
1928 }
1929 CleanupZopfliCostModel(m,&mut model as (*mut ZopfliCostModel));
1930 ComputeShortestPathFromNodes(num_bytes,nodes)
1931 }
1932
1933 #[no_mangle]
BrotliCreateZopfliBackwardReferences( mut m : *mut MemoryManager, mut num_bytes : usize, mut position : usize, mut ringbuffer : *const u8, mut ringbuffer_mask : usize, mut params : *const BrotliEncoderParams, mut hasher : *mut u8, mut dist_cache : *mut i32, mut last_insert_len : *mut usize, mut commands : *mut Command, mut num_commands : *mut usize, mut num_literals : *mut usize )1934 pub unsafe extern fn BrotliCreateZopfliBackwardReferences(
1935 mut m : *mut MemoryManager,
1936 mut num_bytes : usize,
1937 mut position : usize,
1938 mut ringbuffer : *const u8,
1939 mut ringbuffer_mask : usize,
1940 mut params : *const BrotliEncoderParams,
1941 mut hasher : *mut u8,
1942 mut dist_cache : *mut i32,
1943 mut last_insert_len : *mut usize,
1944 mut commands : *mut Command,
1945 mut num_commands : *mut usize,
1946 mut num_literals : *mut usize
1947 ) {
1948 let max_backward_limit
1949 : usize
1950 = (1i32 as (usize) << (*params).lgwin).wrapping_sub(
1951 16i32 as (usize)
1952 );
1953 let mut nodes : *mut ZopfliNode;
1954 nodes = if num_bytes.wrapping_add(
1955 1i32 as (usize)
1956 ) > 0i32 as (usize) {
1957 BrotliAllocate(
1958 m,
1959 num_bytes.wrapping_add(1i32 as (usize)).wrapping_mul(
1960 std::mem::size_of::<ZopfliNode>()
1961 )
1962 ) as (*mut ZopfliNode)
1963 } else {
1964 0i32 as (*mut std::os::raw::c_void) as (*mut ZopfliNode)
1965 };
1966 if !(0i32 == 0) {
1967 return;
1968 }
1969 BrotliInitZopfliNodes(
1970 nodes,
1971 num_bytes.wrapping_add(1i32 as (usize))
1972 );
1973 *num_commands = (*num_commands).wrapping_add(
1974 BrotliZopfliComputeShortestPath(
1975 m,
1976 num_bytes,
1977 position,
1978 ringbuffer,
1979 ringbuffer_mask,
1980 params,
1981 max_backward_limit,
1982 dist_cache as (*const i32),
1983 hasher,
1984 nodes
1985 )
1986 );
1987 if !(0i32 == 0) {
1988 return;
1989 }
1990 BrotliZopfliCreateCommands(
1991 num_bytes,
1992 position,
1993 max_backward_limit,
1994 nodes as (*const ZopfliNode),
1995 dist_cache,
1996 last_insert_len,
1997 params,
1998 commands,
1999 num_literals
2000 );
2001 {
2002 BrotliFree(m,nodes as (*mut std::os::raw::c_void));
2003 nodes = 0i32 as (*mut std::os::raw::c_void) as (*mut ZopfliNode);
2004 }
2005 }
2006
CommandCopyLen(mut self : *const Command) -> u322007 unsafe extern fn CommandCopyLen(mut self : *const Command) -> u32 {
2008 (*self).copy_len_ & 0x1ffffffi32 as (u32)
2009 }
2010
SetCost( mut histogram : *const u32, mut histogram_size : usize, mut literal_histogram : i32, mut cost : *mut f32 )2011 unsafe extern fn SetCost(
2012 mut histogram : *const u32,
2013 mut histogram_size : usize,
2014 mut literal_histogram : i32,
2015 mut cost : *mut f32
2016 ) {
2017 let mut sum : usize = 0i32 as (usize);
2018 let mut missing_symbol_sum : usize;
2019 let mut log2sum : f32;
2020 let mut missing_symbol_cost : f32;
2021 let mut i : usize;
2022 i = 0i32 as (usize);
2023 while i < histogram_size {
2024 {
2025 sum = sum.wrapping_add(*histogram.offset(i as (isize)) as (usize));
2026 }
2027 i = i.wrapping_add(1 as (usize));
2028 }
2029 log2sum = FastLog2(sum) as (f32);
2030 missing_symbol_sum = sum;
2031 if literal_histogram == 0 {
2032 i = 0i32 as (usize);
2033 while i < histogram_size {
2034 {
2035 if *histogram.offset(i as (isize)) == 0i32 as (u32) {
2036 missing_symbol_sum = missing_symbol_sum.wrapping_add(1 as (usize));
2037 }
2038 }
2039 i = i.wrapping_add(1 as (usize));
2040 }
2041 }
2042 missing_symbol_cost = FastLog2(
2043 missing_symbol_sum
2044 ) as (f32) + 2i32 as (f32);
2045 i = 0i32 as (usize);
2046 while i < histogram_size {
2047 'continue56: loop {
2048 {
2049 if *histogram.offset(i as (isize)) == 0i32 as (u32) {
2050 *cost.offset(i as (isize)) = missing_symbol_cost;
2051 break 'continue56;
2052 }
2053 *cost.offset(i as (isize)) = log2sum - FastLog2(
2054 *histogram.offset(
2055 i as (isize)
2056 ) as (usize)
2057 ) as (f32);
2058 if *cost.offset(i as (isize)) < 1i32 as (f32) {
2059 *cost.offset(i as (isize)) = 1i32 as (f32);
2060 }
2061 }
2062 break;
2063 }
2064 i = i.wrapping_add(1 as (usize));
2065 }
2066 }
2067
brotli_min_float( mut a : f32, mut b : f32 ) -> f322068 unsafe extern fn brotli_min_float(
2069 mut a : f32, mut b : f32
2070 ) -> f32 {
2071 if a < b { a } else { b }
2072 }
2073
ZopfliCostModelSetFromCommands( mut self : *mut ZopfliCostModel, mut position : usize, mut ringbuffer : *const u8, mut ringbuffer_mask : usize, mut commands : *const Command, mut num_commands : usize, mut last_insert_len : usize )2074 unsafe extern fn ZopfliCostModelSetFromCommands(
2075 mut self : *mut ZopfliCostModel,
2076 mut position : usize,
2077 mut ringbuffer : *const u8,
2078 mut ringbuffer_mask : usize,
2079 mut commands : *const Command,
2080 mut num_commands : usize,
2081 mut last_insert_len : usize
2082 ) {
2083 let mut histogram_literal : *mut u32;
2084 let mut histogram_cmd : *mut u32;
2085 let mut histogram_dist : *mut u32;
2086 let mut cost_literal : *mut f32;
2087 let mut pos : usize = position.wrapping_sub(last_insert_len);
2088 let mut min_cost_cmd : f32 = kInfinity;
2089 let mut i : usize;
2090 let mut cost_cmd : *mut f32 = (*self).cost_cmd_;
2091 memset(
2092 histogram_literal as (*mut std::os::raw::c_void),
2093 0i32,
2094 std::mem::size_of::<*mut u32>()
2095 );
2096 memset(
2097 histogram_cmd as (*mut std::os::raw::c_void),
2098 0i32,
2099 std::mem::size_of::<*mut u32>()
2100 );
2101 memset(
2102 histogram_dist as (*mut std::os::raw::c_void),
2103 0i32,
2104 std::mem::size_of::<*mut u32>()
2105 );
2106 i = 0i32 as (usize);
2107 while i < num_commands {
2108 {
2109 let mut inslength
2110 : usize
2111 = (*commands.offset(i as (isize))).insert_len_ as (usize);
2112 let mut copylength
2113 : usize
2114 = CommandCopyLen(
2115 &*commands.offset(i as (isize)) as (*const Command)
2116 ) as (usize);
2117 let mut distcode
2118 : usize
2119 = ((*commands.offset(
2120 i as (isize)
2121 )).dist_prefix_ as (i32) & 0x3ffi32) as (usize);
2122 let mut cmdcode
2123 : usize
2124 = (*commands.offset(i as (isize))).cmd_prefix_ as (usize);
2125 let mut j : usize;
2126 {
2127 let _rhs = 1;
2128 let _lhs = &mut *histogram_cmd.offset(cmdcode as (isize));
2129 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
2130 }
2131 if cmdcode >= 128i32 as (usize) {
2132 let _rhs = 1;
2133 let _lhs = &mut *histogram_dist.offset(distcode as (isize));
2134 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
2135 }
2136 j = 0i32 as (usize);
2137 while j < inslength {
2138 {
2139 let _rhs = 1;
2140 let _lhs
2141 = &mut *histogram_literal.offset(
2142 *ringbuffer.offset(
2143 (pos.wrapping_add(j) & ringbuffer_mask) as (isize)
2144 ) as (isize)
2145 );
2146 *_lhs = (*_lhs).wrapping_add(_rhs as (u32));
2147 }
2148 j = j.wrapping_add(1 as (usize));
2149 }
2150 pos = pos.wrapping_add(inslength.wrapping_add(copylength));
2151 }
2152 i = i.wrapping_add(1 as (usize));
2153 }
2154 SetCost(
2155 histogram_literal as (*const u32),
2156 256i32 as (usize),
2157 1i32,
2158 cost_literal
2159 );
2160 SetCost(
2161 histogram_cmd as (*const u32),
2162 704i32 as (usize),
2163 0i32,
2164 cost_cmd
2165 );
2166 SetCost(
2167 histogram_dist as (*const u32),
2168 (*self).distance_histogram_size as (usize),
2169 0i32,
2170 (*self).cost_dist_
2171 );
2172 i = 0i32 as (usize);
2173 while i < 704i32 as (usize) {
2174 {
2175 min_cost_cmd = brotli_min_float(
2176 min_cost_cmd,
2177 *cost_cmd.offset(i as (isize))
2178 );
2179 }
2180 i = i.wrapping_add(1 as (usize));
2181 }
2182 (*self).min_cost_cmd_ = min_cost_cmd;
2183 {
2184 let mut literal_costs : *mut f32 = (*self).literal_costs_;
2185 let mut literal_carry : f32 = 0.0f64 as (f32);
2186 let mut num_bytes : usize = (*self).num_bytes_;
2187 *literal_costs.offset(0i32 as (isize)) = 0.0f64 as (f32);
2188 i = 0i32 as (usize);
2189 while i < num_bytes {
2190 {
2191 literal_carry = literal_carry + *cost_literal.offset(
2192 *ringbuffer.offset(
2193 (position.wrapping_add(
2194 i
2195 ) & ringbuffer_mask) as (isize)
2196 ) as (isize)
2197 );
2198 *literal_costs.offset(
2199 i.wrapping_add(1i32 as (usize)) as (isize)
2200 ) = *literal_costs.offset(i as (isize)) + literal_carry;
2201 literal_carry = literal_carry - (*literal_costs.offset(
2202 i.wrapping_add(1i32 as (usize)) as (isize)
2203 ) - *literal_costs.offset(i as (isize)));
2204 }
2205 i = i.wrapping_add(1 as (usize));
2206 }
2207 }
2208 }
2209
ZopfliIterate( mut num_bytes : usize, mut position : usize, mut ringbuffer : *const u8, mut ringbuffer_mask : usize, mut params : *const BrotliEncoderParams, max_backward_limit : usize, gap : usize, mut dist_cache : *const i32, mut model : *const ZopfliCostModel, mut num_matches : *const u32, mut matches : *const BackwardMatch, mut nodes : *mut ZopfliNode ) -> usize2210 unsafe extern fn ZopfliIterate(
2211 mut num_bytes : usize,
2212 mut position : usize,
2213 mut ringbuffer : *const u8,
2214 mut ringbuffer_mask : usize,
2215 mut params : *const BrotliEncoderParams,
2216 max_backward_limit : usize,
2217 gap : usize,
2218 mut dist_cache : *const i32,
2219 mut model : *const ZopfliCostModel,
2220 mut num_matches : *const u32,
2221 mut matches : *const BackwardMatch,
2222 mut nodes : *mut ZopfliNode
2223 ) -> usize {
2224 let max_zopfli_len : usize = MaxZopfliLen(params);
2225 let mut queue : StartPosQueue;
2226 let mut cur_match_pos : usize = 0i32 as (usize);
2227 let mut i : usize;
2228 (*nodes.offset(0i32 as (isize))).length = 0i32 as (u32);
2229 (*nodes.offset(0i32 as (isize))).u.cost = 0i32 as (f32);
2230 InitStartPosQueue(&mut queue as (*mut StartPosQueue));
2231 i = 0i32 as (usize);
2232 while i.wrapping_add(3i32 as (usize)) < num_bytes {
2233 {
2234 let mut skip
2235 : usize
2236 = UpdateNodes(
2237 num_bytes,
2238 position,
2239 i,
2240 ringbuffer,
2241 ringbuffer_mask,
2242 params,
2243 max_backward_limit,
2244 dist_cache,
2245 *num_matches.offset(i as (isize)) as (usize),
2246 &*matches.offset(
2247 cur_match_pos as (isize)
2248 ) as (*const BackwardMatch),
2249 model,
2250 &mut queue as (*mut StartPosQueue),
2251 nodes
2252 );
2253 if skip < 16384i32 as (usize) {
2254 skip = 0i32 as (usize);
2255 }
2256 cur_match_pos = cur_match_pos.wrapping_add(
2257 *num_matches.offset(i as (isize)) as (usize)
2258 );
2259 if *num_matches.offset(
2260 i as (isize)
2261 ) == 1i32 as (u32) && (BackwardMatchLength(
2262 &*matches.offset(
2263 cur_match_pos.wrapping_sub(
2264 1i32 as (usize)
2265 ) as (isize)
2266 ) as (*const BackwardMatch)
2267 ) > max_zopfli_len) {
2268 skip = brotli_max_size_t(
2269 BackwardMatchLength(
2270 &*matches.offset(
2271 cur_match_pos.wrapping_sub(1i32 as (usize)) as (isize)
2272 ) as (*const BackwardMatch)
2273 ),
2274 skip
2275 );
2276 }
2277 if skip > 1i32 as (usize) {
2278 skip = skip.wrapping_sub(1 as (usize));
2279 while skip != 0 {
2280 i = i.wrapping_add(1 as (usize));
2281 if i.wrapping_add(3i32 as (usize)) >= num_bytes {
2282 break;
2283 }
2284 EvaluateNode(
2285 position,
2286 i,
2287 max_backward_limit,
2288 gap,
2289 dist_cache,
2290 model,
2291 &mut queue as (*mut StartPosQueue),
2292 nodes
2293 );
2294 cur_match_pos = cur_match_pos.wrapping_add(
2295 *num_matches.offset(i as (isize)) as (usize)
2296 );
2297 skip = skip.wrapping_sub(1 as (usize));
2298 }
2299 }
2300 }
2301 i = i.wrapping_add(1 as (usize));
2302 }
2303 ComputeShortestPathFromNodes(num_bytes,nodes)
2304 }
2305
2306 #[no_mangle]
BrotliCreateHqZopfliBackwardReferences( mut m : *mut MemoryManager, mut num_bytes : usize, mut position : usize, mut ringbuffer : *const u8, mut ringbuffer_mask : usize, mut params : *const BrotliEncoderParams, mut hasher : *mut u8, mut dist_cache : *mut i32, mut last_insert_len : *mut usize, mut commands : *mut Command, mut num_commands : *mut usize, mut num_literals : *mut usize )2307 pub unsafe extern fn BrotliCreateHqZopfliBackwardReferences(
2308 mut m : *mut MemoryManager,
2309 mut num_bytes : usize,
2310 mut position : usize,
2311 mut ringbuffer : *const u8,
2312 mut ringbuffer_mask : usize,
2313 mut params : *const BrotliEncoderParams,
2314 mut hasher : *mut u8,
2315 mut dist_cache : *mut i32,
2316 mut last_insert_len : *mut usize,
2317 mut commands : *mut Command,
2318 mut num_commands : *mut usize,
2319 mut num_literals : *mut usize
2320 ) {
2321 let max_backward_limit
2322 : usize
2323 = (1i32 as (usize) << (*params).lgwin).wrapping_sub(
2324 16i32 as (usize)
2325 );
2326 let mut num_matches
2327 : *mut u32
2328 = if num_bytes > 0i32 as (usize) {
2329 BrotliAllocate(
2330 m,
2331 num_bytes.wrapping_mul(std::mem::size_of::<u32>())
2332 ) as (*mut u32)
2333 } else {
2334 0i32 as (*mut std::os::raw::c_void) as (*mut u32)
2335 };
2336 let mut matches_size
2337 : usize
2338 = (4i32 as (usize)).wrapping_mul(num_bytes);
2339 let store_end
2340 : usize
2341 = if num_bytes >= StoreLookaheadH10() {
2342 position.wrapping_add(num_bytes).wrapping_sub(
2343 StoreLookaheadH10()
2344 ).wrapping_add(
2345 1i32 as (usize)
2346 )
2347 } else {
2348 position
2349 };
2350 let mut cur_match_pos : usize = 0i32 as (usize);
2351 let mut i : usize;
2352 let mut orig_num_literals : usize;
2353 let mut orig_last_insert_len : usize;
2354 let mut orig_dist_cache : *mut i32;
2355 let mut orig_num_commands : usize;
2356 let mut model : ZopfliCostModel;
2357 let mut nodes : *mut ZopfliNode;
2358 let mut matches
2359 : *mut BackwardMatch
2360 = if matches_size > 0i32 as (usize) {
2361 BrotliAllocate(
2362 m,
2363 matches_size.wrapping_mul(std::mem::size_of::<BackwardMatch>())
2364 ) as (*mut BackwardMatch)
2365 } else {
2366 0i32 as (*mut std::os::raw::c_void) as (*mut BackwardMatch)
2367 };
2368 let mut gap : usize = 0i32 as (usize);
2369 let mut shadow_matches : usize = 0i32 as (usize);
2370 if !(0i32 == 0) {
2371 return;
2372 }
2373 i = 0i32 as (usize);
2374 while i.wrapping_add(HashTypeLengthH10()).wrapping_sub(
2375 1i32 as (usize)
2376 ) < num_bytes {
2377 {
2378 let pos : usize = position.wrapping_add(i);
2379 let mut max_distance
2380 : usize
2381 = brotli_min_size_t(pos,max_backward_limit);
2382 let mut max_length : usize = num_bytes.wrapping_sub(i);
2383 let mut num_found_matches : usize;
2384 let mut cur_match_end : usize;
2385 let mut j : usize;
2386 {
2387 if matches_size < cur_match_pos.wrapping_add(
2388 128i32 as (usize)
2389 ).wrapping_add(
2390 shadow_matches
2391 ) {
2392 let mut _new_size
2393 : usize
2394 = if matches_size == 0i32 as (usize) {
2395 cur_match_pos.wrapping_add(128i32 as (usize)).wrapping_add(
2396 shadow_matches
2397 )
2398 } else {
2399 matches_size
2400 };
2401 let mut new_array : *mut BackwardMatch;
2402 while _new_size < cur_match_pos.wrapping_add(
2403 128i32 as (usize)
2404 ).wrapping_add(
2405 shadow_matches
2406 ) {
2407 _new_size = _new_size.wrapping_mul(2i32 as (usize));
2408 }
2409 new_array = if _new_size > 0i32 as (usize) {
2410 BrotliAllocate(
2411 m,
2412 _new_size.wrapping_mul(std::mem::size_of::<BackwardMatch>())
2413 ) as (*mut BackwardMatch)
2414 } else {
2415 0i32 as (*mut std::os::raw::c_void) as (*mut BackwardMatch)
2416 };
2417 if !!(0i32 == 0) && (matches_size != 0i32 as (usize)) {
2418 memcpy(
2419 new_array as (*mut std::os::raw::c_void),
2420 matches as (*const std::os::raw::c_void),
2421 matches_size.wrapping_mul(std::mem::size_of::<BackwardMatch>())
2422 );
2423 }
2424 {
2425 BrotliFree(m,matches as (*mut std::os::raw::c_void));
2426 matches = 0i32 as (*mut std::os::raw::c_void) as (*mut BackwardMatch);
2427 }
2428 matches = new_array;
2429 matches_size = _new_size;
2430 }
2431 }
2432 if !(0i32 == 0) {
2433 return;
2434 }
2435 num_found_matches = FindAllMatchesH10(
2436 hasher,
2437 &(*params).dictionary as (*const BrotliEncoderDictionary),
2438 ringbuffer,
2439 ringbuffer_mask,
2440 pos,
2441 max_length,
2442 max_distance,
2443 gap,
2444 params,
2445 &mut *matches.offset(
2446 cur_match_pos.wrapping_add(shadow_matches) as (isize)
2447 ) as (*mut BackwardMatch)
2448 );
2449 cur_match_end = cur_match_pos.wrapping_add(num_found_matches);
2450 j = cur_match_pos;
2451 while j.wrapping_add(1i32 as (usize)) < cur_match_end {
2452 { }
2453 j = j.wrapping_add(1 as (usize));
2454 }
2455 *num_matches.offset(i as (isize)) = num_found_matches as (u32);
2456 if num_found_matches > 0i32 as (usize) {
2457 let match_len
2458 : usize
2459 = BackwardMatchLength(
2460 &mut *matches.offset(
2461 cur_match_end.wrapping_sub(1i32 as (usize)) as (isize)
2462 ) as (*mut BackwardMatch) as (*const BackwardMatch)
2463 );
2464 if match_len > 325i32 as (usize) {
2465 let skip : usize = match_len.wrapping_sub(1i32 as (usize));
2466 *matches.offset(
2467 {
2468 let _old = cur_match_pos;
2469 cur_match_pos = cur_match_pos.wrapping_add(1 as (usize));
2470 _old
2471 } as (isize)
2472 ) = *matches.offset(
2473 cur_match_end.wrapping_sub(1i32 as (usize)) as (isize)
2474 );
2475 *num_matches.offset(i as (isize)) = 1i32 as (u32);
2476 StoreRangeH10(
2477 hasher,
2478 ringbuffer,
2479 ringbuffer_mask,
2480 pos.wrapping_add(1i32 as (usize)),
2481 brotli_min_size_t(pos.wrapping_add(match_len),store_end)
2482 );
2483 memset(
2484 &mut *num_matches.offset(
2485 i.wrapping_add(1i32 as (usize)) as (isize)
2486 ) as (*mut u32) as (*mut std::os::raw::c_void),
2487 0i32,
2488 skip.wrapping_mul(std::mem::size_of::<u32>())
2489 );
2490 i = i.wrapping_add(skip);
2491 } else {
2492 cur_match_pos = cur_match_end;
2493 }
2494 }
2495 }
2496 i = i.wrapping_add(1 as (usize));
2497 }
2498 orig_num_literals = *num_literals;
2499 orig_last_insert_len = *last_insert_len;
2500 memcpy(
2501 orig_dist_cache as (*mut std::os::raw::c_void),
2502 dist_cache as (*const std::os::raw::c_void),
2503 (4i32 as (usize)).wrapping_mul(std::mem::size_of::<i32>())
2504 );
2505 orig_num_commands = *num_commands;
2506 nodes = if num_bytes.wrapping_add(
2507 1i32 as (usize)
2508 ) > 0i32 as (usize) {
2509 BrotliAllocate(
2510 m,
2511 num_bytes.wrapping_add(1i32 as (usize)).wrapping_mul(
2512 std::mem::size_of::<ZopfliNode>()
2513 )
2514 ) as (*mut ZopfliNode)
2515 } else {
2516 0i32 as (*mut std::os::raw::c_void) as (*mut ZopfliNode)
2517 };
2518 if !(0i32 == 0) {
2519 return;
2520 }
2521 InitZopfliCostModel(
2522 m,
2523 &mut model as (*mut ZopfliCostModel),
2524 &(*params).dist as (*const BrotliDistanceParams),
2525 num_bytes
2526 );
2527 if !(0i32 == 0) {
2528 return;
2529 }
2530 i = 0i32 as (usize);
2531 while i < 2i32 as (usize) {
2532 {
2533 BrotliInitZopfliNodes(
2534 nodes,
2535 num_bytes.wrapping_add(1i32 as (usize))
2536 );
2537 if i == 0i32 as (usize) {
2538 ZopfliCostModelSetFromLiteralCosts(
2539 &mut model as (*mut ZopfliCostModel),
2540 position,
2541 ringbuffer,
2542 ringbuffer_mask
2543 );
2544 } else {
2545 ZopfliCostModelSetFromCommands(
2546 &mut model as (*mut ZopfliCostModel),
2547 position,
2548 ringbuffer,
2549 ringbuffer_mask,
2550 commands as (*const Command),
2551 (*num_commands).wrapping_sub(orig_num_commands),
2552 orig_last_insert_len
2553 );
2554 }
2555 *num_commands = orig_num_commands;
2556 *num_literals = orig_num_literals;
2557 *last_insert_len = orig_last_insert_len;
2558 memcpy(
2559 dist_cache as (*mut std::os::raw::c_void),
2560 orig_dist_cache as (*const std::os::raw::c_void),
2561 (4i32 as (usize)).wrapping_mul(std::mem::size_of::<i32>())
2562 );
2563 *num_commands = (*num_commands).wrapping_add(
2564 ZopfliIterate(
2565 num_bytes,
2566 position,
2567 ringbuffer,
2568 ringbuffer_mask,
2569 params,
2570 max_backward_limit,
2571 gap,
2572 dist_cache as (*const i32),
2573 &mut model as (*mut ZopfliCostModel) as (*const ZopfliCostModel),
2574 num_matches as (*const u32),
2575 matches as (*const BackwardMatch),
2576 nodes
2577 )
2578 );
2579 BrotliZopfliCreateCommands(
2580 num_bytes,
2581 position,
2582 max_backward_limit,
2583 nodes as (*const ZopfliNode),
2584 dist_cache,
2585 last_insert_len,
2586 params,
2587 commands,
2588 num_literals
2589 );
2590 }
2591 i = i.wrapping_add(1 as (usize));
2592 }
2593 CleanupZopfliCostModel(m,&mut model as (*mut ZopfliCostModel));
2594 {
2595 BrotliFree(m,nodes as (*mut std::os::raw::c_void));
2596 nodes = 0i32 as (*mut std::os::raw::c_void) as (*mut ZopfliNode);
2597 }
2598 {
2599 BrotliFree(m,matches as (*mut std::os::raw::c_void));
2600 matches = 0i32 as (*mut std::os::raw::c_void) as (*mut BackwardMatch);
2601 }
2602 {
2603 BrotliFree(m,num_matches as (*mut std::os::raw::c_void));
2604 num_matches = 0i32 as (*mut std::os::raw::c_void) as (*mut u32);
2605 }
2606 }
2607