1*c03c5b1cSMartin Matuska /*
2*c03c5b1cSMartin Matuska  * Copyright (c) 2016-2020, Yann Collet, Facebook, Inc.
3*c03c5b1cSMartin Matuska  * All rights reserved.
4*c03c5b1cSMartin Matuska  *
5*c03c5b1cSMartin Matuska  * This source code is licensed under both the BSD-style license (found in the
6*c03c5b1cSMartin Matuska  * LICENSE file in the root directory of this source tree) and the GPLv2 (found
7*c03c5b1cSMartin Matuska  * in the COPYING file in the root directory of this source tree).
8*c03c5b1cSMartin Matuska  * You may select, at your option, one of the above-listed licenses.
9*c03c5b1cSMartin Matuska  */
10*c03c5b1cSMartin Matuska 
11*c03c5b1cSMartin Matuska #ifndef ZSTD_CCOMMON_H_MODULE
12*c03c5b1cSMartin Matuska #define ZSTD_CCOMMON_H_MODULE
13*c03c5b1cSMartin Matuska 
14*c03c5b1cSMartin Matuska /* this module contains definitions which must be identical
15*c03c5b1cSMartin Matuska  * across compression, decompression and dictBuilder.
16*c03c5b1cSMartin Matuska  * It also contains a few functions useful to at least 2 of them
17*c03c5b1cSMartin Matuska  * and which benefit from being inlined */
18*c03c5b1cSMartin Matuska 
19*c03c5b1cSMartin Matuska /*-*************************************
20*c03c5b1cSMartin Matuska *  Dependencies
21*c03c5b1cSMartin Matuska ***************************************/
22*c03c5b1cSMartin Matuska #if !defined(ZSTD_NO_INTRINSICS) && defined(__ARM_NEON)
23*c03c5b1cSMartin Matuska #include <arm_neon.h>
24*c03c5b1cSMartin Matuska #endif
25*c03c5b1cSMartin Matuska #include "compiler.h"
26*c03c5b1cSMartin Matuska #include "mem.h"
27*c03c5b1cSMartin Matuska #include "debug.h"                 /* assert, DEBUGLOG, RAWLOG, g_debuglevel */
28*c03c5b1cSMartin Matuska #include "error_private.h"
29*c03c5b1cSMartin Matuska #define ZSTD_STATIC_LINKING_ONLY
30*c03c5b1cSMartin Matuska #include "../zstd.h"
31*c03c5b1cSMartin Matuska #define FSE_STATIC_LINKING_ONLY
32*c03c5b1cSMartin Matuska #include "fse.h"
33*c03c5b1cSMartin Matuska #define HUF_STATIC_LINKING_ONLY
34*c03c5b1cSMartin Matuska #include "huf.h"
35*c03c5b1cSMartin Matuska #ifndef XXH_STATIC_LINKING_ONLY
36*c03c5b1cSMartin Matuska #  define XXH_STATIC_LINKING_ONLY  /* XXH64_state_t */
37*c03c5b1cSMartin Matuska #endif
38*c03c5b1cSMartin Matuska #include "xxhash.h"                /* XXH_reset, update, digest */
39*c03c5b1cSMartin Matuska 
40*c03c5b1cSMartin Matuska #if defined (__cplusplus)
41*c03c5b1cSMartin Matuska extern "C" {
42*c03c5b1cSMartin Matuska #endif
43*c03c5b1cSMartin Matuska 
44*c03c5b1cSMartin Matuska /* ---- static assert (debug) --- */
45*c03c5b1cSMartin Matuska #define ZSTD_STATIC_ASSERT(c) DEBUG_STATIC_ASSERT(c)
46*c03c5b1cSMartin Matuska #define FSE_isError  ERR_isError
47*c03c5b1cSMartin Matuska #define HUF_isError  ERR_isError
48*c03c5b1cSMartin Matuska 
49*c03c5b1cSMartin Matuska 
50*c03c5b1cSMartin Matuska /*-*************************************
51*c03c5b1cSMartin Matuska *  shared macros
52*c03c5b1cSMartin Matuska ***************************************/
53*c03c5b1cSMartin Matuska #undef MIN
54*c03c5b1cSMartin Matuska #undef MAX
55*c03c5b1cSMartin Matuska #define MIN(a,b) ((a)<(b) ? (a) : (b))
56*c03c5b1cSMartin Matuska #define MAX(a,b) ((a)>(b) ? (a) : (b))
57*c03c5b1cSMartin Matuska 
58*c03c5b1cSMartin Matuska /**
59*c03c5b1cSMartin Matuska  * Ignore: this is an internal helper.
60*c03c5b1cSMartin Matuska  *
61*c03c5b1cSMartin Matuska  * This is a helper function to help force C99-correctness during compilation.
62*c03c5b1cSMartin Matuska  * Under strict compilation modes, variadic macro arguments can't be empty.
63*c03c5b1cSMartin Matuska  * However, variadic function arguments can be. Using a function therefore lets
64*c03c5b1cSMartin Matuska  * us statically check that at least one (string) argument was passed,
65*c03c5b1cSMartin Matuska  * independent of the compilation flags.
66*c03c5b1cSMartin Matuska  */
67*c03c5b1cSMartin Matuska static INLINE_KEYWORD UNUSED_ATTR
_force_has_format_string(const char * format,...)68*c03c5b1cSMartin Matuska void _force_has_format_string(const char *format, ...) {
69*c03c5b1cSMartin Matuska   (void)format;
70*c03c5b1cSMartin Matuska }
71*c03c5b1cSMartin Matuska 
72*c03c5b1cSMartin Matuska /**
73*c03c5b1cSMartin Matuska  * Ignore: this is an internal helper.
74*c03c5b1cSMartin Matuska  *
75*c03c5b1cSMartin Matuska  * We want to force this function invocation to be syntactically correct, but
76*c03c5b1cSMartin Matuska  * we don't want to force runtime evaluation of its arguments.
77*c03c5b1cSMartin Matuska  */
78*c03c5b1cSMartin Matuska #define _FORCE_HAS_FORMAT_STRING(...) \
79*c03c5b1cSMartin Matuska   if (0) { \
80*c03c5b1cSMartin Matuska     _force_has_format_string(__VA_ARGS__); \
81*c03c5b1cSMartin Matuska   }
82*c03c5b1cSMartin Matuska 
83*c03c5b1cSMartin Matuska /**
84*c03c5b1cSMartin Matuska  * Return the specified error if the condition evaluates to true.
85*c03c5b1cSMartin Matuska  *
86*c03c5b1cSMartin Matuska  * In debug modes, prints additional information.
87*c03c5b1cSMartin Matuska  * In order to do that (particularly, printing the conditional that failed),
88*c03c5b1cSMartin Matuska  * this can't just wrap RETURN_ERROR().
89*c03c5b1cSMartin Matuska  */
90*c03c5b1cSMartin Matuska #define RETURN_ERROR_IF(cond, err, ...) \
91*c03c5b1cSMartin Matuska   if (cond) { \
92*c03c5b1cSMartin Matuska     RAWLOG(3, "%s:%d: ERROR!: check %s failed, returning %s", \
93*c03c5b1cSMartin Matuska            __FILE__, __LINE__, ZSTD_QUOTE(cond), ZSTD_QUOTE(ERROR(err))); \
94*c03c5b1cSMartin Matuska     _FORCE_HAS_FORMAT_STRING(__VA_ARGS__); \
95*c03c5b1cSMartin Matuska     RAWLOG(3, ": " __VA_ARGS__); \
96*c03c5b1cSMartin Matuska     RAWLOG(3, "\n"); \
97*c03c5b1cSMartin Matuska     return ERROR(err); \
98*c03c5b1cSMartin Matuska   }
99*c03c5b1cSMartin Matuska 
100*c03c5b1cSMartin Matuska /**
101*c03c5b1cSMartin Matuska  * Unconditionally return the specified error.
102*c03c5b1cSMartin Matuska  *
103*c03c5b1cSMartin Matuska  * In debug modes, prints additional information.
104*c03c5b1cSMartin Matuska  */
105*c03c5b1cSMartin Matuska #define RETURN_ERROR(err, ...) \
106*c03c5b1cSMartin Matuska   do { \
107*c03c5b1cSMartin Matuska     RAWLOG(3, "%s:%d: ERROR!: unconditional check failed, returning %s", \
108*c03c5b1cSMartin Matuska            __FILE__, __LINE__, ZSTD_QUOTE(ERROR(err))); \
109*c03c5b1cSMartin Matuska     _FORCE_HAS_FORMAT_STRING(__VA_ARGS__); \
110*c03c5b1cSMartin Matuska     RAWLOG(3, ": " __VA_ARGS__); \
111*c03c5b1cSMartin Matuska     RAWLOG(3, "\n"); \
112*c03c5b1cSMartin Matuska     return ERROR(err); \
113*c03c5b1cSMartin Matuska   } while(0);
114*c03c5b1cSMartin Matuska 
115*c03c5b1cSMartin Matuska /**
116*c03c5b1cSMartin Matuska  * If the provided expression evaluates to an error code, returns that error code.
117*c03c5b1cSMartin Matuska  *
118*c03c5b1cSMartin Matuska  * In debug modes, prints additional information.
119*c03c5b1cSMartin Matuska  */
120*c03c5b1cSMartin Matuska #define FORWARD_IF_ERROR(err, ...) \
121*c03c5b1cSMartin Matuska   do { \
122*c03c5b1cSMartin Matuska     size_t const err_code = (err); \
123*c03c5b1cSMartin Matuska     if (ERR_isError(err_code)) { \
124*c03c5b1cSMartin Matuska       RAWLOG(3, "%s:%d: ERROR!: forwarding error in %s: %s", \
125*c03c5b1cSMartin Matuska              __FILE__, __LINE__, ZSTD_QUOTE(err), ERR_getErrorName(err_code)); \
126*c03c5b1cSMartin Matuska       _FORCE_HAS_FORMAT_STRING(__VA_ARGS__); \
127*c03c5b1cSMartin Matuska       RAWLOG(3, ": " __VA_ARGS__); \
128*c03c5b1cSMartin Matuska       RAWLOG(3, "\n"); \
129*c03c5b1cSMartin Matuska       return err_code; \
130*c03c5b1cSMartin Matuska     } \
131*c03c5b1cSMartin Matuska   } while(0);
132*c03c5b1cSMartin Matuska 
133*c03c5b1cSMartin Matuska 
134*c03c5b1cSMartin Matuska /*-*************************************
135*c03c5b1cSMartin Matuska *  Common constants
136*c03c5b1cSMartin Matuska ***************************************/
137*c03c5b1cSMartin Matuska #define ZSTD_OPT_NUM    (1<<12)
138*c03c5b1cSMartin Matuska 
139*c03c5b1cSMartin Matuska #define ZSTD_REP_NUM      3                 /* number of repcodes */
140*c03c5b1cSMartin Matuska #define ZSTD_REP_MOVE     (ZSTD_REP_NUM-1)
141*c03c5b1cSMartin Matuska static const U32 repStartValue[ZSTD_REP_NUM] = { 1, 4, 8 };
142*c03c5b1cSMartin Matuska 
143*c03c5b1cSMartin Matuska #define KB *(1 <<10)
144*c03c5b1cSMartin Matuska #define MB *(1 <<20)
145*c03c5b1cSMartin Matuska #define GB *(1U<<30)
146*c03c5b1cSMartin Matuska 
147*c03c5b1cSMartin Matuska #define BIT7 128
148*c03c5b1cSMartin Matuska #define BIT6  64
149*c03c5b1cSMartin Matuska #define BIT5  32
150*c03c5b1cSMartin Matuska #define BIT4  16
151*c03c5b1cSMartin Matuska #define BIT1   2
152*c03c5b1cSMartin Matuska #define BIT0   1
153*c03c5b1cSMartin Matuska 
154*c03c5b1cSMartin Matuska #define ZSTD_WINDOWLOG_ABSOLUTEMIN 10
155*c03c5b1cSMartin Matuska static const size_t ZSTD_fcs_fieldSize[4] = { 0, 2, 4, 8 };
156*c03c5b1cSMartin Matuska static const size_t ZSTD_did_fieldSize[4] = { 0, 1, 2, 4 };
157*c03c5b1cSMartin Matuska 
158*c03c5b1cSMartin Matuska #define ZSTD_FRAMEIDSIZE 4   /* magic number size */
159*c03c5b1cSMartin Matuska 
160*c03c5b1cSMartin Matuska #define ZSTD_BLOCKHEADERSIZE 3   /* C standard doesn't allow `static const` variable to be init using another `static const` variable */
161*c03c5b1cSMartin Matuska static const size_t ZSTD_blockHeaderSize = ZSTD_BLOCKHEADERSIZE;
162*c03c5b1cSMartin Matuska typedef enum { bt_raw, bt_rle, bt_compressed, bt_reserved } blockType_e;
163*c03c5b1cSMartin Matuska 
164*c03c5b1cSMartin Matuska #define ZSTD_FRAMECHECKSUMSIZE 4
165*c03c5b1cSMartin Matuska 
166*c03c5b1cSMartin Matuska #define MIN_SEQUENCES_SIZE 1 /* nbSeq==0 */
167*c03c5b1cSMartin Matuska #define MIN_CBLOCK_SIZE (1 /*litCSize*/ + 1 /* RLE or RAW */ + MIN_SEQUENCES_SIZE /* nbSeq==0 */)   /* for a non-null block */
168*c03c5b1cSMartin Matuska 
169*c03c5b1cSMartin Matuska #define HufLog 12
170*c03c5b1cSMartin Matuska typedef enum { set_basic, set_rle, set_compressed, set_repeat } symbolEncodingType_e;
171*c03c5b1cSMartin Matuska 
172*c03c5b1cSMartin Matuska #define LONGNBSEQ 0x7F00
173*c03c5b1cSMartin Matuska 
174*c03c5b1cSMartin Matuska #define MINMATCH 3
175*c03c5b1cSMartin Matuska 
176*c03c5b1cSMartin Matuska #define Litbits  8
177*c03c5b1cSMartin Matuska #define MaxLit ((1<<Litbits) - 1)
178*c03c5b1cSMartin Matuska #define MaxML   52
179*c03c5b1cSMartin Matuska #define MaxLL   35
180*c03c5b1cSMartin Matuska #define DefaultMaxOff 28
181*c03c5b1cSMartin Matuska #define MaxOff  31
182*c03c5b1cSMartin Matuska #define MaxSeq MAX(MaxLL, MaxML)   /* Assumption : MaxOff < MaxLL,MaxML */
183*c03c5b1cSMartin Matuska #define MLFSELog    9
184*c03c5b1cSMartin Matuska #define LLFSELog    9
185*c03c5b1cSMartin Matuska #define OffFSELog   8
186*c03c5b1cSMartin Matuska #define MaxFSELog  MAX(MAX(MLFSELog, LLFSELog), OffFSELog)
187*c03c5b1cSMartin Matuska 
188*c03c5b1cSMartin Matuska static const U32 LL_bits[MaxLL+1] = { 0, 0, 0, 0, 0, 0, 0, 0,
189*c03c5b1cSMartin Matuska                                       0, 0, 0, 0, 0, 0, 0, 0,
190*c03c5b1cSMartin Matuska                                       1, 1, 1, 1, 2, 2, 3, 3,
191*c03c5b1cSMartin Matuska                                       4, 6, 7, 8, 9,10,11,12,
192*c03c5b1cSMartin Matuska                                      13,14,15,16 };
193*c03c5b1cSMartin Matuska static const S16 LL_defaultNorm[MaxLL+1] = { 4, 3, 2, 2, 2, 2, 2, 2,
194*c03c5b1cSMartin Matuska                                              2, 2, 2, 2, 2, 1, 1, 1,
195*c03c5b1cSMartin Matuska                                              2, 2, 2, 2, 2, 2, 2, 2,
196*c03c5b1cSMartin Matuska                                              2, 3, 2, 1, 1, 1, 1, 1,
197*c03c5b1cSMartin Matuska                                             -1,-1,-1,-1 };
198*c03c5b1cSMartin Matuska #define LL_DEFAULTNORMLOG 6  /* for static allocation */
199*c03c5b1cSMartin Matuska static const U32 LL_defaultNormLog = LL_DEFAULTNORMLOG;
200*c03c5b1cSMartin Matuska 
201*c03c5b1cSMartin Matuska static const U32 ML_bits[MaxML+1] = { 0, 0, 0, 0, 0, 0, 0, 0,
202*c03c5b1cSMartin Matuska                                       0, 0, 0, 0, 0, 0, 0, 0,
203*c03c5b1cSMartin Matuska                                       0, 0, 0, 0, 0, 0, 0, 0,
204*c03c5b1cSMartin Matuska                                       0, 0, 0, 0, 0, 0, 0, 0,
205*c03c5b1cSMartin Matuska                                       1, 1, 1, 1, 2, 2, 3, 3,
206*c03c5b1cSMartin Matuska                                       4, 4, 5, 7, 8, 9,10,11,
207*c03c5b1cSMartin Matuska                                      12,13,14,15,16 };
208*c03c5b1cSMartin Matuska static const S16 ML_defaultNorm[MaxML+1] = { 1, 4, 3, 2, 2, 2, 2, 2,
209*c03c5b1cSMartin Matuska                                              2, 1, 1, 1, 1, 1, 1, 1,
210*c03c5b1cSMartin Matuska                                              1, 1, 1, 1, 1, 1, 1, 1,
211*c03c5b1cSMartin Matuska                                              1, 1, 1, 1, 1, 1, 1, 1,
212*c03c5b1cSMartin Matuska                                              1, 1, 1, 1, 1, 1, 1, 1,
213*c03c5b1cSMartin Matuska                                              1, 1, 1, 1, 1, 1,-1,-1,
214*c03c5b1cSMartin Matuska                                             -1,-1,-1,-1,-1 };
215*c03c5b1cSMartin Matuska #define ML_DEFAULTNORMLOG 6  /* for static allocation */
216*c03c5b1cSMartin Matuska static const U32 ML_defaultNormLog = ML_DEFAULTNORMLOG;
217*c03c5b1cSMartin Matuska 
218*c03c5b1cSMartin Matuska static const S16 OF_defaultNorm[DefaultMaxOff+1] = { 1, 1, 1, 1, 1, 1, 2, 2,
219*c03c5b1cSMartin Matuska                                                      2, 1, 1, 1, 1, 1, 1, 1,
220*c03c5b1cSMartin Matuska                                                      1, 1, 1, 1, 1, 1, 1, 1,
221*c03c5b1cSMartin Matuska                                                     -1,-1,-1,-1,-1 };
222*c03c5b1cSMartin Matuska #define OF_DEFAULTNORMLOG 5  /* for static allocation */
223*c03c5b1cSMartin Matuska static const U32 OF_defaultNormLog = OF_DEFAULTNORMLOG;
224*c03c5b1cSMartin Matuska 
225*c03c5b1cSMartin Matuska 
226*c03c5b1cSMartin Matuska /*-*******************************************
227*c03c5b1cSMartin Matuska *  Shared functions to include for inlining
228*c03c5b1cSMartin Matuska *********************************************/
ZSTD_copy8(void * dst,const void * src)229*c03c5b1cSMartin Matuska static void ZSTD_copy8(void* dst, const void* src) {
230*c03c5b1cSMartin Matuska #if !defined(ZSTD_NO_INTRINSICS) && defined(__ARM_NEON)
231*c03c5b1cSMartin Matuska     vst1_u8((uint8_t*)dst, vld1_u8((const uint8_t*)src));
232*c03c5b1cSMartin Matuska #else
233*c03c5b1cSMartin Matuska     memcpy(dst, src, 8);
234*c03c5b1cSMartin Matuska #endif
235*c03c5b1cSMartin Matuska }
236*c03c5b1cSMartin Matuska 
237*c03c5b1cSMartin Matuska #define COPY8(d,s) { ZSTD_copy8(d,s); d+=8; s+=8; }
ZSTD_copy16(void * dst,const void * src)238*c03c5b1cSMartin Matuska static void ZSTD_copy16(void* dst, const void* src) {
239*c03c5b1cSMartin Matuska #if !defined(ZSTD_NO_INTRINSICS) && defined(__ARM_NEON)
240*c03c5b1cSMartin Matuska     vst1q_u8((uint8_t*)dst, vld1q_u8((const uint8_t*)src));
241*c03c5b1cSMartin Matuska #else
242*c03c5b1cSMartin Matuska     memcpy(dst, src, 16);
243*c03c5b1cSMartin Matuska #endif
244*c03c5b1cSMartin Matuska }
245*c03c5b1cSMartin Matuska #define COPY16(d,s) { ZSTD_copy16(d,s); d+=16; s+=16; }
246*c03c5b1cSMartin Matuska 
247*c03c5b1cSMartin Matuska #define WILDCOPY_OVERLENGTH 32
248*c03c5b1cSMartin Matuska #define WILDCOPY_VECLEN 16
249*c03c5b1cSMartin Matuska 
250*c03c5b1cSMartin Matuska typedef enum {
251*c03c5b1cSMartin Matuska     ZSTD_no_overlap,
252*c03c5b1cSMartin Matuska     ZSTD_overlap_src_before_dst
253*c03c5b1cSMartin Matuska     /*  ZSTD_overlap_dst_before_src, */
254*c03c5b1cSMartin Matuska } ZSTD_overlap_e;
255*c03c5b1cSMartin Matuska 
256*c03c5b1cSMartin Matuska /*! ZSTD_wildcopy() :
257*c03c5b1cSMartin Matuska  *  Custom version of memcpy(), can over read/write up to WILDCOPY_OVERLENGTH bytes (if length==0)
258*c03c5b1cSMartin Matuska  *  @param ovtype controls the overlap detection
259*c03c5b1cSMartin Matuska  *         - ZSTD_no_overlap: The source and destination are guaranteed to be at least WILDCOPY_VECLEN bytes apart.
260*c03c5b1cSMartin Matuska  *         - ZSTD_overlap_src_before_dst: The src and dst may overlap, but they MUST be at least 8 bytes apart.
261*c03c5b1cSMartin Matuska  *           The src buffer must be before the dst buffer.
262*c03c5b1cSMartin Matuska  */
263*c03c5b1cSMartin Matuska MEM_STATIC FORCE_INLINE_ATTR
ZSTD_wildcopy(void * dst,const void * src,ptrdiff_t length,ZSTD_overlap_e const ovtype)264*c03c5b1cSMartin Matuska void ZSTD_wildcopy(void* dst, const void* src, ptrdiff_t length, ZSTD_overlap_e const ovtype)
265*c03c5b1cSMartin Matuska {
266*c03c5b1cSMartin Matuska     ptrdiff_t diff = (BYTE*)dst - (const BYTE*)src;
267*c03c5b1cSMartin Matuska     const BYTE* ip = (const BYTE*)src;
268*c03c5b1cSMartin Matuska     BYTE* op = (BYTE*)dst;
269*c03c5b1cSMartin Matuska     BYTE* const oend = op + length;
270*c03c5b1cSMartin Matuska 
271*c03c5b1cSMartin Matuska     assert(diff >= 8 || (ovtype == ZSTD_no_overlap && diff <= -WILDCOPY_VECLEN));
272*c03c5b1cSMartin Matuska 
273*c03c5b1cSMartin Matuska     if (ovtype == ZSTD_overlap_src_before_dst && diff < WILDCOPY_VECLEN) {
274*c03c5b1cSMartin Matuska         /* Handle short offset copies. */
275*c03c5b1cSMartin Matuska         do {
276*c03c5b1cSMartin Matuska             COPY8(op, ip)
277*c03c5b1cSMartin Matuska         } while (op < oend);
278*c03c5b1cSMartin Matuska     } else {
279*c03c5b1cSMartin Matuska         assert(diff >= WILDCOPY_VECLEN || diff <= -WILDCOPY_VECLEN);
280*c03c5b1cSMartin Matuska         /* Separate out the first COPY16() call because the copy length is
281*c03c5b1cSMartin Matuska          * almost certain to be short, so the branches have different
282*c03c5b1cSMartin Matuska          * probabilities. Since it is almost certain to be short, only do
283*c03c5b1cSMartin Matuska          * one COPY16() in the first call. Then, do two calls per loop since
284*c03c5b1cSMartin Matuska          * at that point it is more likely to have a high trip count.
285*c03c5b1cSMartin Matuska          */
286*c03c5b1cSMartin Matuska #ifndef __aarch64__
287*c03c5b1cSMartin Matuska         do {
288*c03c5b1cSMartin Matuska             COPY16(op, ip);
289*c03c5b1cSMartin Matuska         }
290*c03c5b1cSMartin Matuska         while (op < oend);
291*c03c5b1cSMartin Matuska #else
292*c03c5b1cSMartin Matuska         COPY16(op, ip);
293*c03c5b1cSMartin Matuska         if (op >= oend) return;
294*c03c5b1cSMartin Matuska         do {
295*c03c5b1cSMartin Matuska             COPY16(op, ip);
296*c03c5b1cSMartin Matuska             COPY16(op, ip);
297*c03c5b1cSMartin Matuska         }
298*c03c5b1cSMartin Matuska         while (op < oend);
299*c03c5b1cSMartin Matuska #endif
300*c03c5b1cSMartin Matuska     }
301*c03c5b1cSMartin Matuska }
302*c03c5b1cSMartin Matuska 
ZSTD_limitCopy(void * dst,size_t dstCapacity,const void * src,size_t srcSize)303*c03c5b1cSMartin Matuska MEM_STATIC size_t ZSTD_limitCopy(void* dst, size_t dstCapacity, const void* src, size_t srcSize)
304*c03c5b1cSMartin Matuska {
305*c03c5b1cSMartin Matuska     size_t const length = MIN(dstCapacity, srcSize);
306*c03c5b1cSMartin Matuska     if (length > 0) {
307*c03c5b1cSMartin Matuska         memcpy(dst, src, length);
308*c03c5b1cSMartin Matuska     }
309*c03c5b1cSMartin Matuska     return length;
310*c03c5b1cSMartin Matuska }
311*c03c5b1cSMartin Matuska 
312*c03c5b1cSMartin Matuska /* define "workspace is too large" as this number of times larger than needed */
313*c03c5b1cSMartin Matuska #define ZSTD_WORKSPACETOOLARGE_FACTOR 3
314*c03c5b1cSMartin Matuska 
315*c03c5b1cSMartin Matuska /* when workspace is continuously too large
316*c03c5b1cSMartin Matuska  * during at least this number of times,
317*c03c5b1cSMartin Matuska  * context's memory usage is considered wasteful,
318*c03c5b1cSMartin Matuska  * because it's sized to handle a worst case scenario which rarely happens.
319*c03c5b1cSMartin Matuska  * In which case, resize it down to free some memory */
320*c03c5b1cSMartin Matuska #define ZSTD_WORKSPACETOOLARGE_MAXDURATION 128
321*c03c5b1cSMartin Matuska 
322*c03c5b1cSMartin Matuska 
323*c03c5b1cSMartin Matuska /*-*******************************************
324*c03c5b1cSMartin Matuska *  Private declarations
325*c03c5b1cSMartin Matuska *********************************************/
326*c03c5b1cSMartin Matuska typedef struct seqDef_s {
327*c03c5b1cSMartin Matuska     U32 offset;
328*c03c5b1cSMartin Matuska     U16 litLength;
329*c03c5b1cSMartin Matuska     U16 matchLength;
330*c03c5b1cSMartin Matuska } seqDef;
331*c03c5b1cSMartin Matuska 
332*c03c5b1cSMartin Matuska typedef struct {
333*c03c5b1cSMartin Matuska     seqDef* sequencesStart;
334*c03c5b1cSMartin Matuska     seqDef* sequences;
335*c03c5b1cSMartin Matuska     BYTE* litStart;
336*c03c5b1cSMartin Matuska     BYTE* lit;
337*c03c5b1cSMartin Matuska     BYTE* llCode;
338*c03c5b1cSMartin Matuska     BYTE* mlCode;
339*c03c5b1cSMartin Matuska     BYTE* ofCode;
340*c03c5b1cSMartin Matuska     size_t maxNbSeq;
341*c03c5b1cSMartin Matuska     size_t maxNbLit;
342*c03c5b1cSMartin Matuska     U32   longLengthID;   /* 0 == no longLength; 1 == Lit.longLength; 2 == Match.longLength; */
343*c03c5b1cSMartin Matuska     U32   longLengthPos;
344*c03c5b1cSMartin Matuska } seqStore_t;
345*c03c5b1cSMartin Matuska 
346*c03c5b1cSMartin Matuska typedef struct {
347*c03c5b1cSMartin Matuska     U32 litLength;
348*c03c5b1cSMartin Matuska     U32 matchLength;
349*c03c5b1cSMartin Matuska } ZSTD_sequenceLength;
350*c03c5b1cSMartin Matuska 
351*c03c5b1cSMartin Matuska /**
352*c03c5b1cSMartin Matuska  * Returns the ZSTD_sequenceLength for the given sequences. It handles the decoding of long sequences
353*c03c5b1cSMartin Matuska  * indicated by longLengthPos and longLengthID, and adds MINMATCH back to matchLength.
354*c03c5b1cSMartin Matuska  */
ZSTD_getSequenceLength(seqStore_t const * seqStore,seqDef const * seq)355*c03c5b1cSMartin Matuska MEM_STATIC ZSTD_sequenceLength ZSTD_getSequenceLength(seqStore_t const* seqStore, seqDef const* seq)
356*c03c5b1cSMartin Matuska {
357*c03c5b1cSMartin Matuska     ZSTD_sequenceLength seqLen;
358*c03c5b1cSMartin Matuska     seqLen.litLength = seq->litLength;
359*c03c5b1cSMartin Matuska     seqLen.matchLength = seq->matchLength + MINMATCH;
360*c03c5b1cSMartin Matuska     if (seqStore->longLengthPos == (U32)(seq - seqStore->sequencesStart)) {
361*c03c5b1cSMartin Matuska         if (seqStore->longLengthID == 1) {
362*c03c5b1cSMartin Matuska             seqLen.litLength += 0xFFFF;
363*c03c5b1cSMartin Matuska         }
364*c03c5b1cSMartin Matuska         if (seqStore->longLengthID == 2) {
365*c03c5b1cSMartin Matuska             seqLen.matchLength += 0xFFFF;
366*c03c5b1cSMartin Matuska         }
367*c03c5b1cSMartin Matuska     }
368*c03c5b1cSMartin Matuska     return seqLen;
369*c03c5b1cSMartin Matuska }
370*c03c5b1cSMartin Matuska 
371*c03c5b1cSMartin Matuska /**
372*c03c5b1cSMartin Matuska  * Contains the compressed frame size and an upper-bound for the decompressed frame size.
373*c03c5b1cSMartin Matuska  * Note: before using `compressedSize`, check for errors using ZSTD_isError().
374*c03c5b1cSMartin Matuska  *       similarly, before using `decompressedBound`, check for errors using:
375*c03c5b1cSMartin Matuska  *          `decompressedBound != ZSTD_CONTENTSIZE_ERROR`
376*c03c5b1cSMartin Matuska  */
377*c03c5b1cSMartin Matuska typedef struct {
378*c03c5b1cSMartin Matuska     size_t compressedSize;
379*c03c5b1cSMartin Matuska     unsigned long long decompressedBound;
380*c03c5b1cSMartin Matuska } ZSTD_frameSizeInfo;   /* decompress & legacy */
381*c03c5b1cSMartin Matuska 
382*c03c5b1cSMartin Matuska const seqStore_t* ZSTD_getSeqStore(const ZSTD_CCtx* ctx);   /* compress & dictBuilder */
383*c03c5b1cSMartin Matuska void ZSTD_seqToCodes(const seqStore_t* seqStorePtr);   /* compress, dictBuilder, decodeCorpus (shouldn't get its definition from here) */
384*c03c5b1cSMartin Matuska 
385*c03c5b1cSMartin Matuska /* custom memory allocation functions */
386*c03c5b1cSMartin Matuska void* ZSTD_malloc(size_t size, ZSTD_customMem customMem);
387*c03c5b1cSMartin Matuska void* ZSTD_calloc(size_t size, ZSTD_customMem customMem);
388*c03c5b1cSMartin Matuska void ZSTD_free(void* ptr, ZSTD_customMem customMem);
389*c03c5b1cSMartin Matuska 
390*c03c5b1cSMartin Matuska 
ZSTD_highbit32(U32 val)391*c03c5b1cSMartin Matuska MEM_STATIC U32 ZSTD_highbit32(U32 val)   /* compress, dictBuilder, decodeCorpus */
392*c03c5b1cSMartin Matuska {
393*c03c5b1cSMartin Matuska     assert(val != 0);
394*c03c5b1cSMartin Matuska     {
395*c03c5b1cSMartin Matuska #   if defined(_MSC_VER)   /* Visual */
396*c03c5b1cSMartin Matuska         unsigned long r=0;
397*c03c5b1cSMartin Matuska         return _BitScanReverse(&r, val) ? (unsigned)r : 0;
398*c03c5b1cSMartin Matuska #   elif defined(__GNUC__) && (__GNUC__ >= 3)   /* GCC Intrinsic */
399*c03c5b1cSMartin Matuska         return __builtin_clz (val) ^ 31;
400*c03c5b1cSMartin Matuska #   elif defined(__ICCARM__)    /* IAR Intrinsic */
401*c03c5b1cSMartin Matuska         return 31 - __CLZ(val);
402*c03c5b1cSMartin Matuska #   else   /* Software version */
403*c03c5b1cSMartin Matuska         static const U32 DeBruijnClz[32] = { 0, 9, 1, 10, 13, 21, 2, 29, 11, 14, 16, 18, 22, 25, 3, 30, 8, 12, 20, 28, 15, 17, 24, 7, 19, 27, 23, 6, 26, 5, 4, 31 };
404*c03c5b1cSMartin Matuska         U32 v = val;
405*c03c5b1cSMartin Matuska         v |= v >> 1;
406*c03c5b1cSMartin Matuska         v |= v >> 2;
407*c03c5b1cSMartin Matuska         v |= v >> 4;
408*c03c5b1cSMartin Matuska         v |= v >> 8;
409*c03c5b1cSMartin Matuska         v |= v >> 16;
410*c03c5b1cSMartin Matuska         return DeBruijnClz[(v * 0x07C4ACDDU) >> 27];
411*c03c5b1cSMartin Matuska #   endif
412*c03c5b1cSMartin Matuska     }
413*c03c5b1cSMartin Matuska }
414*c03c5b1cSMartin Matuska 
415*c03c5b1cSMartin Matuska 
416*c03c5b1cSMartin Matuska /* ZSTD_invalidateRepCodes() :
417*c03c5b1cSMartin Matuska  * ensures next compression will not use repcodes from previous block.
418*c03c5b1cSMartin Matuska  * Note : only works with regular variant;
419*c03c5b1cSMartin Matuska  *        do not use with extDict variant ! */
420*c03c5b1cSMartin Matuska void ZSTD_invalidateRepCodes(ZSTD_CCtx* cctx);   /* zstdmt, adaptive_compression (shouldn't get this definition from here) */
421*c03c5b1cSMartin Matuska 
422*c03c5b1cSMartin Matuska 
423*c03c5b1cSMartin Matuska typedef struct {
424*c03c5b1cSMartin Matuska     blockType_e blockType;
425*c03c5b1cSMartin Matuska     U32 lastBlock;
426*c03c5b1cSMartin Matuska     U32 origSize;
427*c03c5b1cSMartin Matuska } blockProperties_t;   /* declared here for decompress and fullbench */
428*c03c5b1cSMartin Matuska 
429*c03c5b1cSMartin Matuska /*! ZSTD_getcBlockSize() :
430*c03c5b1cSMartin Matuska  *  Provides the size of compressed block from block header `src` */
431*c03c5b1cSMartin Matuska /* Used by: decompress, fullbench (does not get its definition from here) */
432*c03c5b1cSMartin Matuska size_t ZSTD_getcBlockSize(const void* src, size_t srcSize,
433*c03c5b1cSMartin Matuska                           blockProperties_t* bpPtr);
434*c03c5b1cSMartin Matuska 
435*c03c5b1cSMartin Matuska /*! ZSTD_decodeSeqHeaders() :
436*c03c5b1cSMartin Matuska  *  decode sequence header from src */
437*c03c5b1cSMartin Matuska /* Used by: decompress, fullbench (does not get its definition from here) */
438*c03c5b1cSMartin Matuska size_t ZSTD_decodeSeqHeaders(ZSTD_DCtx* dctx, int* nbSeqPtr,
439*c03c5b1cSMartin Matuska                        const void* src, size_t srcSize);
440*c03c5b1cSMartin Matuska 
441*c03c5b1cSMartin Matuska 
442*c03c5b1cSMartin Matuska #if defined (__cplusplus)
443*c03c5b1cSMartin Matuska }
444*c03c5b1cSMartin Matuska #endif
445*c03c5b1cSMartin Matuska 
446*c03c5b1cSMartin Matuska #endif   /* ZSTD_CCOMMON_H_MODULE */
447