1 /* 2 * Copyright (c) 2016-present, Przemyslaw Skibinski, Yann Collet, Facebook, Inc. 3 * All rights reserved. 4 * 5 * This source code is licensed under both the BSD-style license (found in the 6 * LICENSE file in the root directory of this source tree) and the GPLv2 (found 7 * in the COPYING file in the root directory of this source tree). 8 * You may select, at your option, one of the above-listed licenses. 9 */ 10 11 #include "zstd_compress_internal.h" 12 #include "hist.h" 13 #include "zstd_opt.h" 14 15 16 #define ZSTD_LITFREQ_ADD 2 /* scaling factor for litFreq, so that frequencies adapt faster to new stats */ 17 #define ZSTD_FREQ_DIV 4 /* log factor when using previous stats to init next stats */ 18 #define ZSTD_MAX_PRICE (1<<30) 19 20 21 /*-************************************* 22 * Price functions for optimal parser 23 ***************************************/ 24 25 #if 0 /* approximation at bit level */ 26 # define BITCOST_ACCURACY 0 27 # define BITCOST_MULTIPLIER (1 << BITCOST_ACCURACY) 28 # define WEIGHT(stat) ((void)opt, ZSTD_bitWeight(stat)) 29 #elif 0 /* fractional bit accuracy */ 30 # define BITCOST_ACCURACY 8 31 # define BITCOST_MULTIPLIER (1 << BITCOST_ACCURACY) 32 # define WEIGHT(stat,opt) ((void)opt, ZSTD_fracWeight(stat)) 33 #else /* opt==approx, ultra==accurate */ 34 # define BITCOST_ACCURACY 8 35 # define BITCOST_MULTIPLIER (1 << BITCOST_ACCURACY) 36 # define WEIGHT(stat,opt) (opt ? ZSTD_fracWeight(stat) : ZSTD_bitWeight(stat)) 37 #endif 38 39 MEM_STATIC U32 ZSTD_bitWeight(U32 stat) 40 { 41 return (ZSTD_highbit32(stat+1) * BITCOST_MULTIPLIER); 42 } 43 44 MEM_STATIC U32 ZSTD_fracWeight(U32 rawStat) 45 { 46 U32 const stat = rawStat + 1; 47 U32 const hb = ZSTD_highbit32(stat); 48 U32 const BWeight = hb * BITCOST_MULTIPLIER; 49 U32 const FWeight = (stat << BITCOST_ACCURACY) >> hb; 50 U32 const weight = BWeight + FWeight; 51 assert(hb + BITCOST_ACCURACY < 31); 52 return weight; 53 } 54 55 /* debugging function, @return price in bytes */ 56 MEM_STATIC double ZSTD_fCost(U32 price) 57 { 58 return (double)price / (BITCOST_MULTIPLIER*8); 59 } 60 61 static void ZSTD_setBasePrices(optState_t* optPtr, int optLevel) 62 { 63 optPtr->litSumBasePrice = WEIGHT(optPtr->litSum, optLevel); 64 optPtr->litLengthSumBasePrice = WEIGHT(optPtr->litLengthSum, optLevel); 65 optPtr->matchLengthSumBasePrice = WEIGHT(optPtr->matchLengthSum, optLevel); 66 optPtr->offCodeSumBasePrice = WEIGHT(optPtr->offCodeSum, optLevel); 67 } 68 69 70 static U32 ZSTD_downscaleStat(U32* table, U32 lastEltIndex, int malus) 71 { 72 U32 s, sum=0; 73 assert(ZSTD_FREQ_DIV+malus > 0 && ZSTD_FREQ_DIV+malus < 31); 74 for (s=0; s<=lastEltIndex; s++) { 75 table[s] = 1 + (table[s] >> (ZSTD_FREQ_DIV+malus)); 76 sum += table[s]; 77 } 78 return sum; 79 } 80 81 static void ZSTD_rescaleFreqs(optState_t* const optPtr, 82 const BYTE* const src, size_t const srcSize, 83 int optLevel) 84 { 85 optPtr->priceType = zop_dynamic; 86 87 if (optPtr->litLengthSum == 0) { /* first block : init */ 88 if (srcSize <= 1024) /* heuristic */ 89 optPtr->priceType = zop_predef; 90 91 assert(optPtr->symbolCosts != NULL); 92 if (optPtr->symbolCosts->huf.repeatMode == HUF_repeat_valid) { /* huffman table presumed generated by dictionary */ 93 optPtr->priceType = zop_dynamic; 94 95 assert(optPtr->litFreq != NULL); 96 optPtr->litSum = 0; 97 { unsigned lit; 98 for (lit=0; lit<=MaxLit; lit++) { 99 U32 const scaleLog = 11; /* scale to 2K */ 100 U32 const bitCost = HUF_getNbBits(optPtr->symbolCosts->huf.CTable, lit); 101 assert(bitCost <= scaleLog); 102 optPtr->litFreq[lit] = bitCost ? 1 << (scaleLog-bitCost) : 1 /*minimum to calculate cost*/; 103 optPtr->litSum += optPtr->litFreq[lit]; 104 } } 105 106 { unsigned ll; 107 FSE_CState_t llstate; 108 FSE_initCState(&llstate, optPtr->symbolCosts->fse.litlengthCTable); 109 optPtr->litLengthSum = 0; 110 for (ll=0; ll<=MaxLL; ll++) { 111 U32 const scaleLog = 10; /* scale to 1K */ 112 U32 const bitCost = FSE_getMaxNbBits(llstate.symbolTT, ll); 113 assert(bitCost < scaleLog); 114 optPtr->litLengthFreq[ll] = bitCost ? 1 << (scaleLog-bitCost) : 1 /*minimum to calculate cost*/; 115 optPtr->litLengthSum += optPtr->litLengthFreq[ll]; 116 } } 117 118 { unsigned ml; 119 FSE_CState_t mlstate; 120 FSE_initCState(&mlstate, optPtr->symbolCosts->fse.matchlengthCTable); 121 optPtr->matchLengthSum = 0; 122 for (ml=0; ml<=MaxML; ml++) { 123 U32 const scaleLog = 10; 124 U32 const bitCost = FSE_getMaxNbBits(mlstate.symbolTT, ml); 125 assert(bitCost < scaleLog); 126 optPtr->matchLengthFreq[ml] = bitCost ? 1 << (scaleLog-bitCost) : 1 /*minimum to calculate cost*/; 127 optPtr->matchLengthSum += optPtr->matchLengthFreq[ml]; 128 } } 129 130 { unsigned of; 131 FSE_CState_t ofstate; 132 FSE_initCState(&ofstate, optPtr->symbolCosts->fse.offcodeCTable); 133 optPtr->offCodeSum = 0; 134 for (of=0; of<=MaxOff; of++) { 135 U32 const scaleLog = 10; 136 U32 const bitCost = FSE_getMaxNbBits(ofstate.symbolTT, of); 137 assert(bitCost < scaleLog); 138 optPtr->offCodeFreq[of] = bitCost ? 1 << (scaleLog-bitCost) : 1 /*minimum to calculate cost*/; 139 optPtr->offCodeSum += optPtr->offCodeFreq[of]; 140 } } 141 142 } else { /* not a dictionary */ 143 144 assert(optPtr->litFreq != NULL); 145 { unsigned lit = MaxLit; 146 HIST_count_simple(optPtr->litFreq, &lit, src, srcSize); /* use raw first block to init statistics */ 147 } 148 optPtr->litSum = ZSTD_downscaleStat(optPtr->litFreq, MaxLit, 1); 149 150 { unsigned ll; 151 for (ll=0; ll<=MaxLL; ll++) 152 optPtr->litLengthFreq[ll] = 1; 153 } 154 optPtr->litLengthSum = MaxLL+1; 155 156 { unsigned ml; 157 for (ml=0; ml<=MaxML; ml++) 158 optPtr->matchLengthFreq[ml] = 1; 159 } 160 optPtr->matchLengthSum = MaxML+1; 161 162 { unsigned of; 163 for (of=0; of<=MaxOff; of++) 164 optPtr->offCodeFreq[of] = 1; 165 } 166 optPtr->offCodeSum = MaxOff+1; 167 168 } 169 170 } else { /* new block : re-use previous statistics, scaled down */ 171 172 optPtr->litSum = ZSTD_downscaleStat(optPtr->litFreq, MaxLit, 1); 173 optPtr->litLengthSum = ZSTD_downscaleStat(optPtr->litLengthFreq, MaxLL, 0); 174 optPtr->matchLengthSum = ZSTD_downscaleStat(optPtr->matchLengthFreq, MaxML, 0); 175 optPtr->offCodeSum = ZSTD_downscaleStat(optPtr->offCodeFreq, MaxOff, 0); 176 } 177 178 ZSTD_setBasePrices(optPtr, optLevel); 179 } 180 181 /* ZSTD_rawLiteralsCost() : 182 * price of literals (only) in specified segment (which length can be 0). 183 * does not include price of literalLength symbol */ 184 static U32 ZSTD_rawLiteralsCost(const BYTE* const literals, U32 const litLength, 185 const optState_t* const optPtr, 186 int optLevel) 187 { 188 if (litLength == 0) return 0; 189 if (optPtr->priceType == zop_predef) 190 return (litLength*6) * BITCOST_MULTIPLIER; /* 6 bit per literal - no statistic used */ 191 192 /* dynamic statistics */ 193 { U32 price = litLength * optPtr->litSumBasePrice; 194 U32 u; 195 for (u=0; u < litLength; u++) { 196 assert(WEIGHT(optPtr->litFreq[literals[u]], optLevel) <= optPtr->litSumBasePrice); /* literal cost should never be negative */ 197 price -= WEIGHT(optPtr->litFreq[literals[u]], optLevel); 198 } 199 return price; 200 } 201 } 202 203 /* ZSTD_litLengthPrice() : 204 * cost of literalLength symbol */ 205 static U32 ZSTD_litLengthPrice(U32 const litLength, const optState_t* const optPtr, int optLevel) 206 { 207 if (optPtr->priceType == zop_predef) return WEIGHT(litLength, optLevel); 208 209 /* dynamic statistics */ 210 { U32 const llCode = ZSTD_LLcode(litLength); 211 return (LL_bits[llCode] * BITCOST_MULTIPLIER) + (optPtr->litLengthSumBasePrice - WEIGHT(optPtr->litLengthFreq[llCode], optLevel)); 212 } 213 } 214 215 /* ZSTD_litLengthContribution() : 216 * @return ( cost(litlength) - cost(0) ) 217 * this value can then be added to rawLiteralsCost() 218 * to provide a cost which is directly comparable to a match ending at same position */ 219 static int ZSTD_litLengthContribution(U32 const litLength, const optState_t* const optPtr, int optLevel) 220 { 221 if (optPtr->priceType >= zop_predef) return WEIGHT(litLength, optLevel); 222 223 /* dynamic statistics */ 224 { U32 const llCode = ZSTD_LLcode(litLength); 225 int const contribution = (LL_bits[llCode] * BITCOST_MULTIPLIER) 226 + WEIGHT(optPtr->litLengthFreq[0], optLevel) /* note: log2litLengthSum cancel out */ 227 - WEIGHT(optPtr->litLengthFreq[llCode], optLevel); 228 #if 1 229 return contribution; 230 #else 231 return MAX(0, contribution); /* sometimes better, sometimes not ... */ 232 #endif 233 } 234 } 235 236 /* ZSTD_literalsContribution() : 237 * creates a fake cost for the literals part of a sequence 238 * which can be compared to the ending cost of a match 239 * should a new match start at this position */ 240 static int ZSTD_literalsContribution(const BYTE* const literals, U32 const litLength, 241 const optState_t* const optPtr, 242 int optLevel) 243 { 244 int const contribution = ZSTD_rawLiteralsCost(literals, litLength, optPtr, optLevel) 245 + ZSTD_litLengthContribution(litLength, optPtr, optLevel); 246 return contribution; 247 } 248 249 /* ZSTD_getMatchPrice() : 250 * Provides the cost of the match part (offset + matchLength) of a sequence 251 * Must be combined with ZSTD_fullLiteralsCost() to get the full cost of a sequence. 252 * optLevel: when <2, favors small offset for decompression speed (improved cache efficiency) */ 253 FORCE_INLINE_TEMPLATE U32 254 ZSTD_getMatchPrice(U32 const offset, 255 U32 const matchLength, 256 const optState_t* const optPtr, 257 int const optLevel) 258 { 259 U32 price; 260 U32 const offCode = ZSTD_highbit32(offset+1); 261 U32 const mlBase = matchLength - MINMATCH; 262 assert(matchLength >= MINMATCH); 263 264 if (optPtr->priceType == zop_predef) /* fixed scheme, do not use statistics */ 265 return WEIGHT(mlBase, optLevel) + ((16 + offCode) * BITCOST_MULTIPLIER); 266 267 /* dynamic statistics */ 268 price = (offCode * BITCOST_MULTIPLIER) + (optPtr->offCodeSumBasePrice - WEIGHT(optPtr->offCodeFreq[offCode], optLevel)); 269 if ((optLevel<2) /*static*/ && offCode >= 20) 270 price += (offCode-19)*2 * BITCOST_MULTIPLIER; /* handicap for long distance offsets, favor decompression speed */ 271 272 /* match Length */ 273 { U32 const mlCode = ZSTD_MLcode(mlBase); 274 price += (ML_bits[mlCode] * BITCOST_MULTIPLIER) + (optPtr->matchLengthSumBasePrice - WEIGHT(optPtr->matchLengthFreq[mlCode], optLevel)); 275 } 276 277 price += BITCOST_MULTIPLIER / 5; /* heuristic : make matches a bit more costly to favor less sequences -> faster decompression speed */ 278 279 DEBUGLOG(8, "ZSTD_getMatchPrice(ml:%u) = %u", matchLength, price); 280 return price; 281 } 282 283 /* ZSTD_updateStats() : 284 * assumption : literals + litLengtn <= iend */ 285 static void ZSTD_updateStats(optState_t* const optPtr, 286 U32 litLength, const BYTE* literals, 287 U32 offsetCode, U32 matchLength) 288 { 289 /* literals */ 290 { U32 u; 291 for (u=0; u < litLength; u++) 292 optPtr->litFreq[literals[u]] += ZSTD_LITFREQ_ADD; 293 optPtr->litSum += litLength*ZSTD_LITFREQ_ADD; 294 } 295 296 /* literal Length */ 297 { U32 const llCode = ZSTD_LLcode(litLength); 298 optPtr->litLengthFreq[llCode]++; 299 optPtr->litLengthSum++; 300 } 301 302 /* match offset code (0-2=>repCode; 3+=>offset+2) */ 303 { U32 const offCode = ZSTD_highbit32(offsetCode+1); 304 assert(offCode <= MaxOff); 305 optPtr->offCodeFreq[offCode]++; 306 optPtr->offCodeSum++; 307 } 308 309 /* match Length */ 310 { U32 const mlBase = matchLength - MINMATCH; 311 U32 const mlCode = ZSTD_MLcode(mlBase); 312 optPtr->matchLengthFreq[mlCode]++; 313 optPtr->matchLengthSum++; 314 } 315 } 316 317 318 /* ZSTD_readMINMATCH() : 319 * function safe only for comparisons 320 * assumption : memPtr must be at least 4 bytes before end of buffer */ 321 MEM_STATIC U32 ZSTD_readMINMATCH(const void* memPtr, U32 length) 322 { 323 switch (length) 324 { 325 default : 326 case 4 : return MEM_read32(memPtr); 327 case 3 : if (MEM_isLittleEndian()) 328 return MEM_read32(memPtr)<<8; 329 else 330 return MEM_read32(memPtr)>>8; 331 } 332 } 333 334 335 /* Update hashTable3 up to ip (excluded) 336 Assumption : always within prefix (i.e. not within extDict) */ 337 static U32 ZSTD_insertAndFindFirstIndexHash3 (ZSTD_matchState_t* ms, const BYTE* const ip) 338 { 339 U32* const hashTable3 = ms->hashTable3; 340 U32 const hashLog3 = ms->hashLog3; 341 const BYTE* const base = ms->window.base; 342 U32 idx = ms->nextToUpdate3; 343 U32 const target = ms->nextToUpdate3 = (U32)(ip - base); 344 size_t const hash3 = ZSTD_hash3Ptr(ip, hashLog3); 345 assert(hashLog3 > 0); 346 347 while(idx < target) { 348 hashTable3[ZSTD_hash3Ptr(base+idx, hashLog3)] = idx; 349 idx++; 350 } 351 352 return hashTable3[hash3]; 353 } 354 355 356 /*-************************************* 357 * Binary Tree search 358 ***************************************/ 359 /** ZSTD_insertBt1() : add one or multiple positions to tree. 360 * ip : assumed <= iend-8 . 361 * @return : nb of positions added */ 362 static U32 ZSTD_insertBt1( 363 ZSTD_matchState_t* ms, 364 const BYTE* const ip, const BYTE* const iend, 365 U32 const mls, const int extDict) 366 { 367 const ZSTD_compressionParameters* const cParams = &ms->cParams; 368 U32* const hashTable = ms->hashTable; 369 U32 const hashLog = cParams->hashLog; 370 size_t const h = ZSTD_hashPtr(ip, hashLog, mls); 371 U32* const bt = ms->chainTable; 372 U32 const btLog = cParams->chainLog - 1; 373 U32 const btMask = (1 << btLog) - 1; 374 U32 matchIndex = hashTable[h]; 375 size_t commonLengthSmaller=0, commonLengthLarger=0; 376 const BYTE* const base = ms->window.base; 377 const BYTE* const dictBase = ms->window.dictBase; 378 const U32 dictLimit = ms->window.dictLimit; 379 const BYTE* const dictEnd = dictBase + dictLimit; 380 const BYTE* const prefixStart = base + dictLimit; 381 const BYTE* match; 382 const U32 current = (U32)(ip-base); 383 const U32 btLow = btMask >= current ? 0 : current - btMask; 384 U32* smallerPtr = bt + 2*(current&btMask); 385 U32* largerPtr = smallerPtr + 1; 386 U32 dummy32; /* to be nullified at the end */ 387 U32 const windowLow = ms->window.lowLimit; 388 U32 const matchLow = windowLow ? windowLow : 1; 389 U32 matchEndIdx = current+8+1; 390 size_t bestLength = 8; 391 U32 nbCompares = 1U << cParams->searchLog; 392 #ifdef ZSTD_C_PREDICT 393 U32 predictedSmall = *(bt + 2*((current-1)&btMask) + 0); 394 U32 predictedLarge = *(bt + 2*((current-1)&btMask) + 1); 395 predictedSmall += (predictedSmall>0); 396 predictedLarge += (predictedLarge>0); 397 #endif /* ZSTD_C_PREDICT */ 398 399 DEBUGLOG(8, "ZSTD_insertBt1 (%u)", current); 400 401 assert(ip <= iend-8); /* required for h calculation */ 402 hashTable[h] = current; /* Update Hash Table */ 403 404 while (nbCompares-- && (matchIndex >= matchLow)) { 405 U32* const nextPtr = bt + 2*(matchIndex & btMask); 406 size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */ 407 assert(matchIndex < current); 408 409 #ifdef ZSTD_C_PREDICT /* note : can create issues when hlog small <= 11 */ 410 const U32* predictPtr = bt + 2*((matchIndex-1) & btMask); /* written this way, as bt is a roll buffer */ 411 if (matchIndex == predictedSmall) { 412 /* no need to check length, result known */ 413 *smallerPtr = matchIndex; 414 if (matchIndex <= btLow) { smallerPtr=&dummy32; break; } /* beyond tree size, stop the search */ 415 smallerPtr = nextPtr+1; /* new "smaller" => larger of match */ 416 matchIndex = nextPtr[1]; /* new matchIndex larger than previous (closer to current) */ 417 predictedSmall = predictPtr[1] + (predictPtr[1]>0); 418 continue; 419 } 420 if (matchIndex == predictedLarge) { 421 *largerPtr = matchIndex; 422 if (matchIndex <= btLow) { largerPtr=&dummy32; break; } /* beyond tree size, stop the search */ 423 largerPtr = nextPtr; 424 matchIndex = nextPtr[0]; 425 predictedLarge = predictPtr[0] + (predictPtr[0]>0); 426 continue; 427 } 428 #endif 429 430 if (!extDict || (matchIndex+matchLength >= dictLimit)) { 431 assert(matchIndex+matchLength >= dictLimit); /* might be wrong if actually extDict */ 432 match = base + matchIndex; 433 matchLength += ZSTD_count(ip+matchLength, match+matchLength, iend); 434 } else { 435 match = dictBase + matchIndex; 436 matchLength += ZSTD_count_2segments(ip+matchLength, match+matchLength, iend, dictEnd, prefixStart); 437 if (matchIndex+matchLength >= dictLimit) 438 match = base + matchIndex; /* to prepare for next usage of match[matchLength] */ 439 } 440 441 if (matchLength > bestLength) { 442 bestLength = matchLength; 443 if (matchLength > matchEndIdx - matchIndex) 444 matchEndIdx = matchIndex + (U32)matchLength; 445 } 446 447 if (ip+matchLength == iend) { /* equal : no way to know if inf or sup */ 448 break; /* drop , to guarantee consistency ; miss a bit of compression, but other solutions can corrupt tree */ 449 } 450 451 if (match[matchLength] < ip[matchLength]) { /* necessarily within buffer */ 452 /* match is smaller than current */ 453 *smallerPtr = matchIndex; /* update smaller idx */ 454 commonLengthSmaller = matchLength; /* all smaller will now have at least this guaranteed common length */ 455 if (matchIndex <= btLow) { smallerPtr=&dummy32; break; } /* beyond tree size, stop searching */ 456 smallerPtr = nextPtr+1; /* new "candidate" => larger than match, which was smaller than target */ 457 matchIndex = nextPtr[1]; /* new matchIndex, larger than previous and closer to current */ 458 } else { 459 /* match is larger than current */ 460 *largerPtr = matchIndex; 461 commonLengthLarger = matchLength; 462 if (matchIndex <= btLow) { largerPtr=&dummy32; break; } /* beyond tree size, stop searching */ 463 largerPtr = nextPtr; 464 matchIndex = nextPtr[0]; 465 } } 466 467 *smallerPtr = *largerPtr = 0; 468 if (bestLength > 384) return MIN(192, (U32)(bestLength - 384)); /* speed optimization */ 469 assert(matchEndIdx > current + 8); 470 return matchEndIdx - (current + 8); 471 } 472 473 FORCE_INLINE_TEMPLATE 474 void ZSTD_updateTree_internal( 475 ZSTD_matchState_t* ms, 476 const BYTE* const ip, const BYTE* const iend, 477 const U32 mls, const ZSTD_dictMode_e dictMode) 478 { 479 const BYTE* const base = ms->window.base; 480 U32 const target = (U32)(ip - base); 481 U32 idx = ms->nextToUpdate; 482 DEBUGLOG(5, "ZSTD_updateTree_internal, from %u to %u (dictMode:%u)", 483 idx, target, dictMode); 484 485 while(idx < target) 486 idx += ZSTD_insertBt1(ms, base+idx, iend, mls, dictMode == ZSTD_extDict); 487 ms->nextToUpdate = target; 488 } 489 490 void ZSTD_updateTree(ZSTD_matchState_t* ms, const BYTE* ip, const BYTE* iend) { 491 ZSTD_updateTree_internal(ms, ip, iend, ms->cParams.searchLength, ZSTD_noDict); 492 } 493 494 FORCE_INLINE_TEMPLATE 495 U32 ZSTD_insertBtAndGetAllMatches ( 496 ZSTD_matchState_t* ms, 497 const BYTE* const ip, const BYTE* const iLimit, const ZSTD_dictMode_e dictMode, 498 U32 rep[ZSTD_REP_NUM], U32 const ll0, 499 ZSTD_match_t* matches, const U32 lengthToBeat, U32 const mls /* template */) 500 { 501 const ZSTD_compressionParameters* const cParams = &ms->cParams; 502 U32 const sufficient_len = MIN(cParams->targetLength, ZSTD_OPT_NUM -1); 503 const BYTE* const base = ms->window.base; 504 U32 const current = (U32)(ip-base); 505 U32 const hashLog = cParams->hashLog; 506 U32 const minMatch = (mls==3) ? 3 : 4; 507 U32* const hashTable = ms->hashTable; 508 size_t const h = ZSTD_hashPtr(ip, hashLog, mls); 509 U32 matchIndex = hashTable[h]; 510 U32* const bt = ms->chainTable; 511 U32 const btLog = cParams->chainLog - 1; 512 U32 const btMask= (1U << btLog) - 1; 513 size_t commonLengthSmaller=0, commonLengthLarger=0; 514 const BYTE* const dictBase = ms->window.dictBase; 515 U32 const dictLimit = ms->window.dictLimit; 516 const BYTE* const dictEnd = dictBase + dictLimit; 517 const BYTE* const prefixStart = base + dictLimit; 518 U32 const btLow = btMask >= current ? 0 : current - btMask; 519 U32 const windowLow = ms->window.lowLimit; 520 U32 const matchLow = windowLow ? windowLow : 1; 521 U32* smallerPtr = bt + 2*(current&btMask); 522 U32* largerPtr = bt + 2*(current&btMask) + 1; 523 U32 matchEndIdx = current+8+1; /* farthest referenced position of any match => detects repetitive patterns */ 524 U32 dummy32; /* to be nullified at the end */ 525 U32 mnum = 0; 526 U32 nbCompares = 1U << cParams->searchLog; 527 528 const ZSTD_matchState_t* dms = dictMode == ZSTD_dictMatchState ? ms->dictMatchState : NULL; 529 const ZSTD_compressionParameters* const dmsCParams = 530 dictMode == ZSTD_dictMatchState ? &dms->cParams : NULL; 531 const BYTE* const dmsBase = dictMode == ZSTD_dictMatchState ? dms->window.base : NULL; 532 const BYTE* const dmsEnd = dictMode == ZSTD_dictMatchState ? dms->window.nextSrc : NULL; 533 U32 const dmsHighLimit = dictMode == ZSTD_dictMatchState ? (U32)(dmsEnd - dmsBase) : 0; 534 U32 const dmsLowLimit = dictMode == ZSTD_dictMatchState ? dms->window.lowLimit : 0; 535 U32 const dmsIndexDelta = dictMode == ZSTD_dictMatchState ? windowLow - dmsHighLimit : 0; 536 U32 const dmsHashLog = dictMode == ZSTD_dictMatchState ? dmsCParams->hashLog : hashLog; 537 U32 const dmsBtLog = dictMode == ZSTD_dictMatchState ? dmsCParams->chainLog - 1 : btLog; 538 U32 const dmsBtMask = dictMode == ZSTD_dictMatchState ? (1U << dmsBtLog) - 1 : 0; 539 U32 const dmsBtLow = dictMode == ZSTD_dictMatchState && dmsBtMask < dmsHighLimit - dmsLowLimit ? dmsHighLimit - dmsBtMask : dmsLowLimit; 540 541 size_t bestLength = lengthToBeat-1; 542 DEBUGLOG(8, "ZSTD_insertBtAndGetAllMatches: current=%u", current); 543 544 /* check repCode */ 545 { U32 const lastR = ZSTD_REP_NUM + ll0; 546 U32 repCode; 547 for (repCode = ll0; repCode < lastR; repCode++) { 548 U32 const repOffset = (repCode==ZSTD_REP_NUM) ? (rep[0] - 1) : rep[repCode]; 549 U32 const repIndex = current - repOffset; 550 U32 repLen = 0; 551 assert(current >= dictLimit); 552 if (repOffset-1 /* intentional overflow, discards 0 and -1 */ < current-dictLimit) { /* equivalent to `current > repIndex >= dictLimit` */ 553 if (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(ip - repOffset, minMatch)) { 554 repLen = (U32)ZSTD_count(ip+minMatch, ip+minMatch-repOffset, iLimit) + minMatch; 555 } 556 } else { /* repIndex < dictLimit || repIndex >= current */ 557 const BYTE* const repMatch = dictMode == ZSTD_dictMatchState ? 558 dmsBase + repIndex - dmsIndexDelta : 559 dictBase + repIndex; 560 assert(current >= windowLow); 561 if ( dictMode == ZSTD_extDict 562 && ( ((repOffset-1) /*intentional overflow*/ < current - windowLow) /* equivalent to `current > repIndex >= windowLow` */ 563 & (((U32)((dictLimit-1) - repIndex) >= 3) ) /* intentional overflow : do not test positions overlapping 2 memory segments */) 564 && (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(repMatch, minMatch)) ) { 565 repLen = (U32)ZSTD_count_2segments(ip+minMatch, repMatch+minMatch, iLimit, dictEnd, prefixStart) + minMatch; 566 } 567 if (dictMode == ZSTD_dictMatchState 568 && ( ((repOffset-1) /*intentional overflow*/ < current - (dmsLowLimit + dmsIndexDelta)) /* equivalent to `current > repIndex >= dmsLowLimit` */ 569 & ((U32)((dictLimit-1) - repIndex) >= 3) ) /* intentional overflow : do not test positions overlapping 2 memory segments */ 570 && (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(repMatch, minMatch)) ) { 571 repLen = (U32)ZSTD_count_2segments(ip+minMatch, repMatch+minMatch, iLimit, dmsEnd, prefixStart) + minMatch; 572 } } 573 /* save longer solution */ 574 if (repLen > bestLength) { 575 DEBUGLOG(8, "found repCode %u (ll0:%u, offset:%u) of length %u", 576 repCode, ll0, repOffset, repLen); 577 bestLength = repLen; 578 matches[mnum].off = repCode - ll0; 579 matches[mnum].len = (U32)repLen; 580 mnum++; 581 if ( (repLen > sufficient_len) 582 | (ip+repLen == iLimit) ) { /* best possible */ 583 return mnum; 584 } } } } 585 586 /* HC3 match finder */ 587 if ((mls == 3) /*static*/ && (bestLength < mls)) { 588 U32 const matchIndex3 = ZSTD_insertAndFindFirstIndexHash3(ms, ip); 589 if ((matchIndex3 >= matchLow) 590 & (current - matchIndex3 < (1<<18)) /*heuristic : longer distance likely too expensive*/ ) { 591 size_t mlen; 592 if ((dictMode == ZSTD_noDict) /*static*/ || (dictMode == ZSTD_dictMatchState) /*static*/ || (matchIndex3 >= dictLimit)) { 593 const BYTE* const match = base + matchIndex3; 594 mlen = ZSTD_count(ip, match, iLimit); 595 } else { 596 const BYTE* const match = dictBase + matchIndex3; 597 mlen = ZSTD_count_2segments(ip, match, iLimit, dictEnd, prefixStart); 598 } 599 600 /* save best solution */ 601 if (mlen >= mls /* == 3 > bestLength */) { 602 DEBUGLOG(8, "found small match with hlog3, of length %u", 603 (U32)mlen); 604 bestLength = mlen; 605 assert(current > matchIndex3); 606 assert(mnum==0); /* no prior solution */ 607 matches[0].off = (current - matchIndex3) + ZSTD_REP_MOVE; 608 matches[0].len = (U32)mlen; 609 mnum = 1; 610 if ( (mlen > sufficient_len) | 611 (ip+mlen == iLimit) ) { /* best possible length */ 612 ms->nextToUpdate = current+1; /* skip insertion */ 613 return 1; 614 } 615 } 616 } 617 /* no dictMatchState lookup: dicts don't have a populated HC3 table */ 618 } 619 620 hashTable[h] = current; /* Update Hash Table */ 621 622 while (nbCompares-- && (matchIndex >= matchLow)) { 623 U32* const nextPtr = bt + 2*(matchIndex & btMask); 624 size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */ 625 const BYTE* match; 626 assert(current > matchIndex); 627 628 if ((dictMode == ZSTD_noDict) || (dictMode == ZSTD_dictMatchState) || (matchIndex+matchLength >= dictLimit)) { 629 assert(matchIndex+matchLength >= dictLimit); /* ensure the condition is correct when !extDict */ 630 match = base + matchIndex; 631 matchLength += ZSTD_count(ip+matchLength, match+matchLength, iLimit); 632 } else { 633 match = dictBase + matchIndex; 634 matchLength += ZSTD_count_2segments(ip+matchLength, match+matchLength, iLimit, dictEnd, prefixStart); 635 if (matchIndex+matchLength >= dictLimit) 636 match = base + matchIndex; /* prepare for match[matchLength] */ 637 } 638 639 if (matchLength > bestLength) { 640 DEBUGLOG(8, "found match of length %u at distance %u (offCode=%u)", 641 (U32)matchLength, current - matchIndex, current - matchIndex + ZSTD_REP_MOVE); 642 assert(matchEndIdx > matchIndex); 643 if (matchLength > matchEndIdx - matchIndex) 644 matchEndIdx = matchIndex + (U32)matchLength; 645 bestLength = matchLength; 646 matches[mnum].off = (current - matchIndex) + ZSTD_REP_MOVE; 647 matches[mnum].len = (U32)matchLength; 648 mnum++; 649 if ( (matchLength > ZSTD_OPT_NUM) 650 | (ip+matchLength == iLimit) /* equal : no way to know if inf or sup */) { 651 if (dictMode == ZSTD_dictMatchState) nbCompares = 0; /* break should also skip searching dms */ 652 break; /* drop, to preserve bt consistency (miss a little bit of compression) */ 653 } 654 } 655 656 if (match[matchLength] < ip[matchLength]) { 657 /* match smaller than current */ 658 *smallerPtr = matchIndex; /* update smaller idx */ 659 commonLengthSmaller = matchLength; /* all smaller will now have at least this guaranteed common length */ 660 if (matchIndex <= btLow) { smallerPtr=&dummy32; break; } /* beyond tree size, stop the search */ 661 smallerPtr = nextPtr+1; /* new candidate => larger than match, which was smaller than current */ 662 matchIndex = nextPtr[1]; /* new matchIndex, larger than previous, closer to current */ 663 } else { 664 *largerPtr = matchIndex; 665 commonLengthLarger = matchLength; 666 if (matchIndex <= btLow) { largerPtr=&dummy32; break; } /* beyond tree size, stop the search */ 667 largerPtr = nextPtr; 668 matchIndex = nextPtr[0]; 669 } } 670 671 *smallerPtr = *largerPtr = 0; 672 673 if (dictMode == ZSTD_dictMatchState && nbCompares) { 674 size_t const dmsH = ZSTD_hashPtr(ip, dmsHashLog, mls); 675 U32 dictMatchIndex = dms->hashTable[dmsH]; 676 const U32* const dmsBt = dms->chainTable; 677 commonLengthSmaller = commonLengthLarger = 0; 678 while (nbCompares-- && (dictMatchIndex > dmsLowLimit)) { 679 const U32* const nextPtr = dmsBt + 2*(dictMatchIndex & dmsBtMask); 680 size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */ 681 const BYTE* match = dmsBase + dictMatchIndex; 682 matchLength += ZSTD_count_2segments(ip+matchLength, match+matchLength, iLimit, dmsEnd, prefixStart); 683 if (dictMatchIndex+matchLength >= dmsHighLimit) 684 match = base + dictMatchIndex + dmsIndexDelta; /* to prepare for next usage of match[matchLength] */ 685 686 if (matchLength > bestLength) { 687 matchIndex = dictMatchIndex + dmsIndexDelta; 688 DEBUGLOG(8, "found dms match of length %u at distance %u (offCode=%u)", 689 (U32)matchLength, current - matchIndex, current - matchIndex + ZSTD_REP_MOVE); 690 if (matchLength > matchEndIdx - matchIndex) 691 matchEndIdx = matchIndex + (U32)matchLength; 692 bestLength = matchLength; 693 matches[mnum].off = (current - matchIndex) + ZSTD_REP_MOVE; 694 matches[mnum].len = (U32)matchLength; 695 mnum++; 696 if ( (matchLength > ZSTD_OPT_NUM) 697 | (ip+matchLength == iLimit) /* equal : no way to know if inf or sup */) { 698 break; /* drop, to guarantee consistency (miss a little bit of compression) */ 699 } 700 } 701 702 if (dictMatchIndex <= dmsBtLow) { break; } /* beyond tree size, stop the search */ 703 if (match[matchLength] < ip[matchLength]) { 704 commonLengthSmaller = matchLength; /* all smaller will now have at least this guaranteed common length */ 705 dictMatchIndex = nextPtr[1]; /* new matchIndex larger than previous (closer to current) */ 706 } else { 707 /* match is larger than current */ 708 commonLengthLarger = matchLength; 709 dictMatchIndex = nextPtr[0]; 710 } 711 } 712 } 713 714 assert(matchEndIdx > current+8); 715 ms->nextToUpdate = matchEndIdx - 8; /* skip repetitive patterns */ 716 return mnum; 717 } 718 719 720 FORCE_INLINE_TEMPLATE U32 ZSTD_BtGetAllMatches ( 721 ZSTD_matchState_t* ms, 722 const BYTE* ip, const BYTE* const iHighLimit, const ZSTD_dictMode_e dictMode, 723 U32 rep[ZSTD_REP_NUM], U32 const ll0, 724 ZSTD_match_t* matches, U32 const lengthToBeat) 725 { 726 const ZSTD_compressionParameters* const cParams = &ms->cParams; 727 U32 const matchLengthSearch = cParams->searchLength; 728 DEBUGLOG(8, "ZSTD_BtGetAllMatches"); 729 if (ip < ms->window.base + ms->nextToUpdate) return 0; /* skipped area */ 730 ZSTD_updateTree_internal(ms, ip, iHighLimit, matchLengthSearch, dictMode); 731 switch(matchLengthSearch) 732 { 733 case 3 : return ZSTD_insertBtAndGetAllMatches(ms, ip, iHighLimit, dictMode, rep, ll0, matches, lengthToBeat, 3); 734 default : 735 case 4 : return ZSTD_insertBtAndGetAllMatches(ms, ip, iHighLimit, dictMode, rep, ll0, matches, lengthToBeat, 4); 736 case 5 : return ZSTD_insertBtAndGetAllMatches(ms, ip, iHighLimit, dictMode, rep, ll0, matches, lengthToBeat, 5); 737 case 7 : 738 case 6 : return ZSTD_insertBtAndGetAllMatches(ms, ip, iHighLimit, dictMode, rep, ll0, matches, lengthToBeat, 6); 739 } 740 } 741 742 743 /*-******************************* 744 * Optimal parser 745 *********************************/ 746 typedef struct repcodes_s { 747 U32 rep[3]; 748 } repcodes_t; 749 750 static repcodes_t ZSTD_updateRep(U32 const rep[3], U32 const offset, U32 const ll0) 751 { 752 repcodes_t newReps; 753 if (offset >= ZSTD_REP_NUM) { /* full offset */ 754 newReps.rep[2] = rep[1]; 755 newReps.rep[1] = rep[0]; 756 newReps.rep[0] = offset - ZSTD_REP_MOVE; 757 } else { /* repcode */ 758 U32 const repCode = offset + ll0; 759 if (repCode > 0) { /* note : if repCode==0, no change */ 760 U32 const currentOffset = (repCode==ZSTD_REP_NUM) ? (rep[0] - 1) : rep[repCode]; 761 newReps.rep[2] = (repCode >= 2) ? rep[1] : rep[2]; 762 newReps.rep[1] = rep[0]; 763 newReps.rep[0] = currentOffset; 764 } else { /* repCode == 0 */ 765 memcpy(&newReps, rep, sizeof(newReps)); 766 } 767 } 768 return newReps; 769 } 770 771 772 static U32 ZSTD_totalLen(ZSTD_optimal_t sol) 773 { 774 return sol.litlen + sol.mlen; 775 } 776 777 FORCE_INLINE_TEMPLATE size_t 778 ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms, 779 seqStore_t* seqStore, 780 U32 rep[ZSTD_REP_NUM], 781 const void* src, size_t srcSize, 782 const int optLevel, const ZSTD_dictMode_e dictMode) 783 { 784 optState_t* const optStatePtr = &ms->opt; 785 const BYTE* const istart = (const BYTE*)src; 786 const BYTE* ip = istart; 787 const BYTE* anchor = istart; 788 const BYTE* const iend = istart + srcSize; 789 const BYTE* const ilimit = iend - 8; 790 const BYTE* const base = ms->window.base; 791 const BYTE* const prefixStart = base + ms->window.dictLimit; 792 const ZSTD_compressionParameters* const cParams = &ms->cParams; 793 794 U32 const sufficient_len = MIN(cParams->targetLength, ZSTD_OPT_NUM -1); 795 U32 const minMatch = (cParams->searchLength == 3) ? 3 : 4; 796 797 ZSTD_optimal_t* const opt = optStatePtr->priceTable; 798 ZSTD_match_t* const matches = optStatePtr->matchTable; 799 ZSTD_optimal_t lastSequence; 800 801 /* init */ 802 DEBUGLOG(5, "ZSTD_compressBlock_opt_generic"); 803 assert(optLevel <= 2); 804 ms->nextToUpdate3 = ms->nextToUpdate; 805 ZSTD_rescaleFreqs(optStatePtr, (const BYTE*)src, srcSize, optLevel); 806 ip += (ip==prefixStart); 807 808 /* Match Loop */ 809 while (ip < ilimit) { 810 U32 cur, last_pos = 0; 811 812 /* find first match */ 813 { U32 const litlen = (U32)(ip - anchor); 814 U32 const ll0 = !litlen; 815 U32 const nbMatches = ZSTD_BtGetAllMatches(ms, ip, iend, dictMode, rep, ll0, matches, minMatch); 816 if (!nbMatches) { ip++; continue; } 817 818 /* initialize opt[0] */ 819 { U32 i ; for (i=0; i<ZSTD_REP_NUM; i++) opt[0].rep[i] = rep[i]; } 820 opt[0].mlen = 0; /* means is_a_literal */ 821 opt[0].litlen = litlen; 822 opt[0].price = ZSTD_literalsContribution(anchor, litlen, optStatePtr, optLevel); 823 824 /* large match -> immediate encoding */ 825 { U32 const maxML = matches[nbMatches-1].len; 826 U32 const maxOffset = matches[nbMatches-1].off; 827 DEBUGLOG(6, "found %u matches of maxLength=%u and maxOffCode=%u at cPos=%u => start new serie", 828 nbMatches, maxML, maxOffset, (U32)(ip-prefixStart)); 829 830 if (maxML > sufficient_len) { 831 lastSequence.litlen = litlen; 832 lastSequence.mlen = maxML; 833 lastSequence.off = maxOffset; 834 DEBUGLOG(6, "large match (%u>%u), immediate encoding", 835 maxML, sufficient_len); 836 cur = 0; 837 last_pos = ZSTD_totalLen(lastSequence); 838 goto _shortestPath; 839 } } 840 841 /* set prices for first matches starting position == 0 */ 842 { U32 const literalsPrice = opt[0].price + ZSTD_litLengthPrice(0, optStatePtr, optLevel); 843 U32 pos; 844 U32 matchNb; 845 for (pos = 1; pos < minMatch; pos++) { 846 opt[pos].price = ZSTD_MAX_PRICE; /* mlen, litlen and price will be fixed during forward scanning */ 847 } 848 for (matchNb = 0; matchNb < nbMatches; matchNb++) { 849 U32 const offset = matches[matchNb].off; 850 U32 const end = matches[matchNb].len; 851 repcodes_t const repHistory = ZSTD_updateRep(rep, offset, ll0); 852 for ( ; pos <= end ; pos++ ) { 853 U32 const matchPrice = ZSTD_getMatchPrice(offset, pos, optStatePtr, optLevel); 854 U32 const sequencePrice = literalsPrice + matchPrice; 855 DEBUGLOG(7, "rPos:%u => set initial price : %.2f", 856 pos, ZSTD_fCost(sequencePrice)); 857 opt[pos].mlen = pos; 858 opt[pos].off = offset; 859 opt[pos].litlen = litlen; 860 opt[pos].price = sequencePrice; 861 ZSTD_STATIC_ASSERT(sizeof(opt[pos].rep) == sizeof(repHistory)); 862 memcpy(opt[pos].rep, &repHistory, sizeof(repHistory)); 863 } } 864 last_pos = pos-1; 865 } 866 } 867 868 /* check further positions */ 869 for (cur = 1; cur <= last_pos; cur++) { 870 const BYTE* const inr = ip + cur; 871 assert(cur < ZSTD_OPT_NUM); 872 DEBUGLOG(7, "cPos:%zi==rPos:%u", inr-istart, cur) 873 874 /* Fix current position with one literal if cheaper */ 875 { U32 const litlen = (opt[cur-1].mlen == 0) ? opt[cur-1].litlen + 1 : 1; 876 int const price = opt[cur-1].price 877 + ZSTD_rawLiteralsCost(ip+cur-1, 1, optStatePtr, optLevel) 878 + ZSTD_litLengthPrice(litlen, optStatePtr, optLevel) 879 - ZSTD_litLengthPrice(litlen-1, optStatePtr, optLevel); 880 assert(price < 1000000000); /* overflow check */ 881 if (price <= opt[cur].price) { 882 DEBUGLOG(7, "cPos:%zi==rPos:%u : better price (%.2f<=%.2f) using literal (ll==%u) (hist:%u,%u,%u)", 883 inr-istart, cur, ZSTD_fCost(price), ZSTD_fCost(opt[cur].price), litlen, 884 opt[cur-1].rep[0], opt[cur-1].rep[1], opt[cur-1].rep[2]); 885 opt[cur].mlen = 0; 886 opt[cur].off = 0; 887 opt[cur].litlen = litlen; 888 opt[cur].price = price; 889 memcpy(opt[cur].rep, opt[cur-1].rep, sizeof(opt[cur].rep)); 890 } else { 891 DEBUGLOG(7, "cPos:%zi==rPos:%u : literal would cost more (%.2f>%.2f) (hist:%u,%u,%u)", 892 inr-istart, cur, ZSTD_fCost(price), ZSTD_fCost(opt[cur].price), 893 opt[cur].rep[0], opt[cur].rep[1], opt[cur].rep[2]); 894 } 895 } 896 897 /* last match must start at a minimum distance of 8 from oend */ 898 if (inr > ilimit) continue; 899 900 if (cur == last_pos) break; 901 902 if ( (optLevel==0) /*static_test*/ 903 && (opt[cur+1].price <= opt[cur].price + (BITCOST_MULTIPLIER/2)) ) { 904 DEBUGLOG(7, "move to next rPos:%u : price is <=", cur+1); 905 continue; /* skip unpromising positions; about ~+6% speed, -0.01 ratio */ 906 } 907 908 { U32 const ll0 = (opt[cur].mlen != 0); 909 U32 const litlen = (opt[cur].mlen == 0) ? opt[cur].litlen : 0; 910 U32 const previousPrice = opt[cur].price; 911 U32 const basePrice = previousPrice + ZSTD_litLengthPrice(0, optStatePtr, optLevel); 912 U32 const nbMatches = ZSTD_BtGetAllMatches(ms, inr, iend, dictMode, opt[cur].rep, ll0, matches, minMatch); 913 U32 matchNb; 914 if (!nbMatches) { 915 DEBUGLOG(7, "rPos:%u : no match found", cur); 916 continue; 917 } 918 919 { U32 const maxML = matches[nbMatches-1].len; 920 DEBUGLOG(7, "cPos:%zi==rPos:%u, found %u matches, of maxLength=%u", 921 inr-istart, cur, nbMatches, maxML); 922 923 if ( (maxML > sufficient_len) 924 || (cur + maxML >= ZSTD_OPT_NUM) ) { 925 lastSequence.mlen = maxML; 926 lastSequence.off = matches[nbMatches-1].off; 927 lastSequence.litlen = litlen; 928 cur -= (opt[cur].mlen==0) ? opt[cur].litlen : 0; /* last sequence is actually only literals, fix cur to last match - note : may underflow, in which case, it's first sequence, and it's okay */ 929 last_pos = cur + ZSTD_totalLen(lastSequence); 930 if (cur > ZSTD_OPT_NUM) cur = 0; /* underflow => first match */ 931 goto _shortestPath; 932 } } 933 934 /* set prices using matches found at position == cur */ 935 for (matchNb = 0; matchNb < nbMatches; matchNb++) { 936 U32 const offset = matches[matchNb].off; 937 repcodes_t const repHistory = ZSTD_updateRep(opt[cur].rep, offset, ll0); 938 U32 const lastML = matches[matchNb].len; 939 U32 const startML = (matchNb>0) ? matches[matchNb-1].len+1 : minMatch; 940 U32 mlen; 941 942 DEBUGLOG(7, "testing match %u => offCode=%4u, mlen=%2u, llen=%2u", 943 matchNb, matches[matchNb].off, lastML, litlen); 944 945 for (mlen = lastML; mlen >= startML; mlen--) { /* scan downward */ 946 U32 const pos = cur + mlen; 947 int const price = basePrice + ZSTD_getMatchPrice(offset, mlen, optStatePtr, optLevel); 948 949 if ((pos > last_pos) || (price < opt[pos].price)) { 950 DEBUGLOG(7, "rPos:%u (ml=%2u) => new better price (%.2f<%.2f)", 951 pos, mlen, ZSTD_fCost(price), ZSTD_fCost(opt[pos].price)); 952 while (last_pos < pos) { opt[last_pos+1].price = ZSTD_MAX_PRICE; last_pos++; } /* fill empty positions */ 953 opt[pos].mlen = mlen; 954 opt[pos].off = offset; 955 opt[pos].litlen = litlen; 956 opt[pos].price = price; 957 ZSTD_STATIC_ASSERT(sizeof(opt[pos].rep) == sizeof(repHistory)); 958 memcpy(opt[pos].rep, &repHistory, sizeof(repHistory)); 959 } else { 960 DEBUGLOG(7, "rPos:%u (ml=%2u) => new price is worse (%.2f>=%.2f)", 961 pos, mlen, ZSTD_fCost(price), ZSTD_fCost(opt[pos].price)); 962 if (optLevel==0) break; /* early update abort; gets ~+10% speed for about -0.01 ratio loss */ 963 } 964 } } } 965 } /* for (cur = 1; cur <= last_pos; cur++) */ 966 967 lastSequence = opt[last_pos]; 968 cur = last_pos > ZSTD_totalLen(lastSequence) ? last_pos - ZSTD_totalLen(lastSequence) : 0; /* single sequence, and it starts before `ip` */ 969 assert(cur < ZSTD_OPT_NUM); /* control overflow*/ 970 971 _shortestPath: /* cur, last_pos, best_mlen, best_off have to be set */ 972 assert(opt[0].mlen == 0); 973 974 { U32 const storeEnd = cur + 1; 975 U32 storeStart = storeEnd; 976 U32 seqPos = cur; 977 978 DEBUGLOG(6, "start reverse traversal (last_pos:%u, cur:%u)", 979 last_pos, cur); (void)last_pos; 980 assert(storeEnd < ZSTD_OPT_NUM); 981 DEBUGLOG(6, "last sequence copied into pos=%u (llen=%u,mlen=%u,ofc=%u)", 982 storeEnd, lastSequence.litlen, lastSequence.mlen, lastSequence.off); 983 opt[storeEnd] = lastSequence; 984 while (seqPos > 0) { 985 U32 const backDist = ZSTD_totalLen(opt[seqPos]); 986 storeStart--; 987 DEBUGLOG(6, "sequence from rPos=%u copied into pos=%u (llen=%u,mlen=%u,ofc=%u)", 988 seqPos, storeStart, opt[seqPos].litlen, opt[seqPos].mlen, opt[seqPos].off); 989 opt[storeStart] = opt[seqPos]; 990 seqPos = (seqPos > backDist) ? seqPos - backDist : 0; 991 } 992 993 /* save sequences */ 994 DEBUGLOG(6, "sending selected sequences into seqStore") 995 { U32 storePos; 996 for (storePos=storeStart; storePos <= storeEnd; storePos++) { 997 U32 const llen = opt[storePos].litlen; 998 U32 const mlen = opt[storePos].mlen; 999 U32 const offCode = opt[storePos].off; 1000 U32 const advance = llen + mlen; 1001 DEBUGLOG(6, "considering seq starting at %zi, llen=%u, mlen=%u", 1002 anchor - istart, llen, mlen); 1003 1004 if (mlen==0) { /* only literals => must be last "sequence", actually starting a new stream of sequences */ 1005 assert(storePos == storeEnd); /* must be last sequence */ 1006 ip = anchor + llen; /* last "sequence" is a bunch of literals => don't progress anchor */ 1007 continue; /* will finish */ 1008 } 1009 1010 /* repcodes update : like ZSTD_updateRep(), but update in place */ 1011 if (offCode >= ZSTD_REP_NUM) { /* full offset */ 1012 rep[2] = rep[1]; 1013 rep[1] = rep[0]; 1014 rep[0] = offCode - ZSTD_REP_MOVE; 1015 } else { /* repcode */ 1016 U32 const repCode = offCode + (llen==0); 1017 if (repCode) { /* note : if repCode==0, no change */ 1018 U32 const currentOffset = (repCode==ZSTD_REP_NUM) ? (rep[0] - 1) : rep[repCode]; 1019 if (repCode >= 2) rep[2] = rep[1]; 1020 rep[1] = rep[0]; 1021 rep[0] = currentOffset; 1022 } } 1023 1024 assert(anchor + llen <= iend); 1025 ZSTD_updateStats(optStatePtr, llen, anchor, offCode, mlen); 1026 ZSTD_storeSeq(seqStore, llen, anchor, offCode, mlen-MINMATCH); 1027 anchor += advance; 1028 ip = anchor; 1029 } } 1030 ZSTD_setBasePrices(optStatePtr, optLevel); 1031 } 1032 1033 } /* while (ip < ilimit) */ 1034 1035 /* Return the last literals size */ 1036 return iend - anchor; 1037 } 1038 1039 1040 size_t ZSTD_compressBlock_btopt( 1041 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], 1042 const void* src, size_t srcSize) 1043 { 1044 DEBUGLOG(5, "ZSTD_compressBlock_btopt"); 1045 return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 0 /*optLevel*/, ZSTD_noDict); 1046 } 1047 1048 1049 /* used in 2-pass strategy */ 1050 static U32 ZSTD_upscaleStat(U32* table, U32 lastEltIndex, int bonus) 1051 { 1052 U32 s, sum=0; 1053 assert(ZSTD_FREQ_DIV+bonus > 0); 1054 for (s=0; s<=lastEltIndex; s++) { 1055 table[s] <<= ZSTD_FREQ_DIV+bonus; 1056 table[s]--; 1057 sum += table[s]; 1058 } 1059 return sum; 1060 } 1061 1062 /* used in 2-pass strategy */ 1063 MEM_STATIC void ZSTD_upscaleStats(optState_t* optPtr) 1064 { 1065 optPtr->litSum = ZSTD_upscaleStat(optPtr->litFreq, MaxLit, 0); 1066 optPtr->litLengthSum = ZSTD_upscaleStat(optPtr->litLengthFreq, MaxLL, 1); 1067 optPtr->matchLengthSum = ZSTD_upscaleStat(optPtr->matchLengthFreq, MaxML, 1); 1068 optPtr->offCodeSum = ZSTD_upscaleStat(optPtr->offCodeFreq, MaxOff, 1); 1069 } 1070 1071 size_t ZSTD_compressBlock_btultra( 1072 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], 1073 const void* src, size_t srcSize) 1074 { 1075 DEBUGLOG(5, "ZSTD_compressBlock_btultra (srcSize=%zu)", srcSize); 1076 #if 0 1077 /* 2-pass strategy (disabled) 1078 * this strategy makes a first pass over first block to collect statistics 1079 * and seed next round's statistics with it. 1080 * The compression ratio gain is generally small (~0.5% on first block), 1081 * the cost is 2x cpu time on first block. */ 1082 assert(srcSize <= ZSTD_BLOCKSIZE_MAX); 1083 if ( (ms->opt.litLengthSum==0) /* first block */ 1084 && (seqStore->sequences == seqStore->sequencesStart) /* no ldm */ 1085 && (ms->window.dictLimit == ms->window.lowLimit) ) { /* no dictionary */ 1086 U32 tmpRep[ZSTD_REP_NUM]; 1087 DEBUGLOG(5, "ZSTD_compressBlock_btultra: first block: collecting statistics"); 1088 assert(ms->nextToUpdate >= ms->window.dictLimit 1089 && ms->nextToUpdate <= ms->window.dictLimit + 1); 1090 memcpy(tmpRep, rep, sizeof(tmpRep)); 1091 ZSTD_compressBlock_opt_generic(ms, seqStore, tmpRep, src, srcSize, 2 /*optLevel*/, ZSTD_noDict); /* generate stats into ms->opt*/ 1092 ZSTD_resetSeqStore(seqStore); 1093 /* invalidate first scan from history */ 1094 ms->window.base -= srcSize; 1095 ms->window.dictLimit += (U32)srcSize; 1096 ms->window.lowLimit = ms->window.dictLimit; 1097 ms->nextToUpdate = ms->window.dictLimit; 1098 ms->nextToUpdate3 = ms->window.dictLimit; 1099 /* re-inforce weight of collected statistics */ 1100 ZSTD_upscaleStats(&ms->opt); 1101 } 1102 #endif 1103 return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 2 /*optLevel*/, ZSTD_noDict); 1104 } 1105 1106 size_t ZSTD_compressBlock_btopt_dictMatchState( 1107 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], 1108 const void* src, size_t srcSize) 1109 { 1110 return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 0 /*optLevel*/, ZSTD_dictMatchState); 1111 } 1112 1113 size_t ZSTD_compressBlock_btultra_dictMatchState( 1114 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], 1115 const void* src, size_t srcSize) 1116 { 1117 return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 2 /*optLevel*/, ZSTD_dictMatchState); 1118 } 1119 1120 size_t ZSTD_compressBlock_btopt_extDict( 1121 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], 1122 const void* src, size_t srcSize) 1123 { 1124 return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 0 /*optLevel*/, ZSTD_extDict); 1125 } 1126 1127 size_t ZSTD_compressBlock_btultra_extDict( 1128 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], 1129 const void* src, size_t srcSize) 1130 { 1131 return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 2 /*optLevel*/, ZSTD_extDict); 1132 } 1133