1 /*-*************************************
2 *  Dependencies
3 ***************************************/
4 #include <stdio.h>  /* fprintf */
5 #include <stdlib.h> /* malloc, free, qsort */
6 #include <string.h> /* memset */
7 #include <time.h>   /* clock */
8 
9 #include "mem.h" /* read */
10 #include "pool.h"
11 #include "threading.h"
12 #include "cover.h"
13 #include "zstd_internal.h" /* includes zstd.h */
14 #ifndef ZDICT_STATIC_LINKING_ONLY
15 #define ZDICT_STATIC_LINKING_ONLY
16 #endif
17 #include "zdict.h"
18 
19 
20 /*-*************************************
21 *  Constants
22 ***************************************/
23 #define FASTCOVER_MAX_SAMPLES_SIZE (sizeof(size_t) == 8 ? ((unsigned)-1) : ((unsigned)1 GB))
24 #define FASTCOVER_MAX_F 31
25 #define FASTCOVER_MAX_ACCEL 10
26 #define DEFAULT_SPLITPOINT 0.75
27 #define DEFAULT_F 20
28 #define DEFAULT_ACCEL 1
29 
30 
31 /*-*************************************
32 *  Console display
33 ***************************************/
34 static int g_displayLevel = 2;
35 #define DISPLAY(...)                                                           \
36   {                                                                            \
37     fprintf(stderr, __VA_ARGS__);                                              \
38     fflush(stderr);                                                            \
39   }
40 #define LOCALDISPLAYLEVEL(displayLevel, l, ...)                                \
41   if (displayLevel >= l) {                                                     \
42     DISPLAY(__VA_ARGS__);                                                      \
43   } /* 0 : no display;   1: errors;   2: default;  3: details;  4: debug */
44 #define DISPLAYLEVEL(l, ...) LOCALDISPLAYLEVEL(g_displayLevel, l, __VA_ARGS__)
45 
46 #define LOCALDISPLAYUPDATE(displayLevel, l, ...)                               \
47   if (displayLevel >= l) {                                                     \
48     if ((clock() - g_time > refreshRate) || (displayLevel >= 4)) {             \
49       g_time = clock();                                                        \
50       DISPLAY(__VA_ARGS__);                                                    \
51     }                                                                          \
52   }
53 #define DISPLAYUPDATE(l, ...) LOCALDISPLAYUPDATE(g_displayLevel, l, __VA_ARGS__)
54 static const clock_t refreshRate = CLOCKS_PER_SEC * 15 / 100;
55 static clock_t g_time = 0;
56 
57 
58 /*-*************************************
59 * Hash Functions
60 ***************************************/
61 static const U64 prime6bytes = 227718039650203ULL;
62 static size_t ZSTD_hash6(U64 u, U32 h) { return (size_t)(((u  << (64-48)) * prime6bytes) >> (64-h)) ; }
63 static size_t ZSTD_hash6Ptr(const void* p, U32 h) { return ZSTD_hash6(MEM_readLE64(p), h); }
64 
65 static const U64 prime8bytes = 0xCF1BBCDCB7A56463ULL;
66 static size_t ZSTD_hash8(U64 u, U32 h) { return (size_t)(((u) * prime8bytes) >> (64-h)) ; }
67 static size_t ZSTD_hash8Ptr(const void* p, U32 h) { return ZSTD_hash8(MEM_readLE64(p), h); }
68 
69 
70 /**
71  * Hash the d-byte value pointed to by p and mod 2^f
72  */
73 static size_t FASTCOVER_hashPtrToIndex(const void* p, U32 h, unsigned d) {
74   if (d == 6) {
75     return ZSTD_hash6Ptr(p, h) & ((1 << h) - 1);
76   }
77   return ZSTD_hash8Ptr(p, h) & ((1 << h) - 1);
78 }
79 
80 
81 /*-*************************************
82 * Acceleration
83 ***************************************/
84 typedef struct {
85   unsigned finalize;    /* Percentage of training samples used for ZDICT_finalizeDictionary */
86   unsigned skip;        /* Number of dmer skipped between each dmer counted in computeFrequency */
87 } FASTCOVER_accel_t;
88 
89 
90 static const FASTCOVER_accel_t FASTCOVER_defaultAccelParameters[FASTCOVER_MAX_ACCEL+1] = {
91   { 100, 0 },   /* accel = 0, should not happen because accel = 0 defaults to accel = 1 */
92   { 100, 0 },   /* accel = 1 */
93   { 50, 1 },   /* accel = 2 */
94   { 34, 2 },   /* accel = 3 */
95   { 25, 3 },   /* accel = 4 */
96   { 20, 4 },   /* accel = 5 */
97   { 17, 5 },   /* accel = 6 */
98   { 14, 6 },   /* accel = 7 */
99   { 13, 7 },   /* accel = 8 */
100   { 11, 8 },   /* accel = 9 */
101   { 10, 9 },   /* accel = 10 */
102 };
103 
104 
105 /*-*************************************
106 * Context
107 ***************************************/
108 typedef struct {
109   const BYTE *samples;
110   size_t *offsets;
111   const size_t *samplesSizes;
112   size_t nbSamples;
113   size_t nbTrainSamples;
114   size_t nbTestSamples;
115   size_t nbDmers;
116   U32 *freqs;
117   unsigned d;
118   unsigned f;
119   FASTCOVER_accel_t accelParams;
120 } FASTCOVER_ctx_t;
121 
122 
123 /*-*************************************
124 *  Helper functions
125 ***************************************/
126 /**
127  * Selects the best segment in an epoch.
128  * Segments of are scored according to the function:
129  *
130  * Let F(d) be the frequency of all dmers with hash value d.
131  * Let S_i be hash value of the dmer at position i of segment S which has length k.
132  *
133  *     Score(S) = F(S_1) + F(S_2) + ... + F(S_{k-d+1})
134  *
135  * Once the dmer with hash value d is in the dictionary we set F(d) = 0.
136  */
137 static COVER_segment_t FASTCOVER_selectSegment(const FASTCOVER_ctx_t *ctx,
138                                               U32 *freqs, U32 begin, U32 end,
139                                               ZDICT_cover_params_t parameters,
140                                               U16* segmentFreqs) {
141   /* Constants */
142   const U32 k = parameters.k;
143   const U32 d = parameters.d;
144   const U32 f = ctx->f;
145   const U32 dmersInK = k - d + 1;
146 
147   /* Try each segment (activeSegment) and save the best (bestSegment) */
148   COVER_segment_t bestSegment = {0, 0, 0};
149   COVER_segment_t activeSegment;
150 
151   /* Reset the activeDmers in the segment */
152   /* The activeSegment starts at the beginning of the epoch. */
153   activeSegment.begin = begin;
154   activeSegment.end = begin;
155   activeSegment.score = 0;
156 
157   /* Slide the activeSegment through the whole epoch.
158    * Save the best segment in bestSegment.
159    */
160   while (activeSegment.end < end) {
161     /* Get hash value of current dmer */
162     const size_t idx = FASTCOVER_hashPtrToIndex(ctx->samples + activeSegment.end, f, d);
163 
164     /* Add frequency of this index to score if this is the first occurrence of index in active segment */
165     if (segmentFreqs[idx] == 0) {
166       activeSegment.score += freqs[idx];
167     }
168     /* Increment end of segment and segmentFreqs*/
169     activeSegment.end += 1;
170     segmentFreqs[idx] += 1;
171     /* If the window is now too large, drop the first position */
172     if (activeSegment.end - activeSegment.begin == dmersInK + 1) {
173       /* Get hash value of the dmer to be eliminated from active segment */
174       const size_t delIndex = FASTCOVER_hashPtrToIndex(ctx->samples + activeSegment.begin, f, d);
175       segmentFreqs[delIndex] -= 1;
176       /* Subtract frequency of this index from score if this is the last occurrence of this index in active segment */
177       if (segmentFreqs[delIndex] == 0) {
178         activeSegment.score -= freqs[delIndex];
179       }
180       /* Increment start of segment */
181       activeSegment.begin += 1;
182     }
183 
184     /* If this segment is the best so far save it */
185     if (activeSegment.score > bestSegment.score) {
186       bestSegment = activeSegment;
187     }
188   }
189 
190   /* Zero out rest of segmentFreqs array */
191   while (activeSegment.begin < end) {
192     const size_t delIndex = FASTCOVER_hashPtrToIndex(ctx->samples + activeSegment.begin, f, d);
193     segmentFreqs[delIndex] -= 1;
194     activeSegment.begin += 1;
195   }
196 
197   {
198     /*  Zero the frequency of hash value of each dmer covered by the chosen segment. */
199     U32 pos;
200     for (pos = bestSegment.begin; pos != bestSegment.end; ++pos) {
201       const size_t i = FASTCOVER_hashPtrToIndex(ctx->samples + pos, f, d);
202       freqs[i] = 0;
203     }
204   }
205 
206   return bestSegment;
207 }
208 
209 
210 static int FASTCOVER_checkParameters(ZDICT_cover_params_t parameters,
211                                      size_t maxDictSize, unsigned f,
212                                      unsigned accel) {
213   /* k, d, and f are required parameters */
214   if (parameters.d == 0 || parameters.k == 0) {
215     return 0;
216   }
217   /* d has to be 6 or 8 */
218   if (parameters.d != 6 && parameters.d != 8) {
219     return 0;
220   }
221   /* k <= maxDictSize */
222   if (parameters.k > maxDictSize) {
223     return 0;
224   }
225   /* d <= k */
226   if (parameters.d > parameters.k) {
227     return 0;
228   }
229   /* 0 < f <= FASTCOVER_MAX_F*/
230   if (f > FASTCOVER_MAX_F || f == 0) {
231     return 0;
232   }
233   /* 0 < splitPoint <= 1 */
234   if (parameters.splitPoint <= 0 || parameters.splitPoint > 1) {
235     return 0;
236   }
237   /* 0 < accel <= 10 */
238   if (accel > 10 || accel == 0) {
239     return 0;
240   }
241   return 1;
242 }
243 
244 
245 /**
246  * Clean up a context initialized with `FASTCOVER_ctx_init()`.
247  */
248 static void
249 FASTCOVER_ctx_destroy(FASTCOVER_ctx_t* ctx)
250 {
251     if (!ctx) return;
252 
253     free(ctx->freqs);
254     ctx->freqs = NULL;
255 
256     free(ctx->offsets);
257     ctx->offsets = NULL;
258 }
259 
260 
261 /**
262  * Calculate for frequency of hash value of each dmer in ctx->samples
263  */
264 static void
265 FASTCOVER_computeFrequency(U32* freqs, const FASTCOVER_ctx_t* ctx)
266 {
267     const unsigned f = ctx->f;
268     const unsigned d = ctx->d;
269     const unsigned skip = ctx->accelParams.skip;
270     const unsigned readLength = MAX(d, 8);
271     size_t i;
272     assert(ctx->nbTrainSamples >= 5);
273     assert(ctx->nbTrainSamples <= ctx->nbSamples);
274     for (i = 0; i < ctx->nbTrainSamples; i++) {
275         size_t start = ctx->offsets[i];  /* start of current dmer */
276         size_t const currSampleEnd = ctx->offsets[i+1];
277         while (start + readLength <= currSampleEnd) {
278             const size_t dmerIndex = FASTCOVER_hashPtrToIndex(ctx->samples + start, f, d);
279             freqs[dmerIndex]++;
280             start = start + skip + 1;
281         }
282     }
283 }
284 
285 
286 /**
287  * Prepare a context for dictionary building.
288  * The context is only dependent on the parameter `d` and can used multiple
289  * times.
290  * Returns 1 on success or zero on error.
291  * The context must be destroyed with `FASTCOVER_ctx_destroy()`.
292  */
293 static int
294 FASTCOVER_ctx_init(FASTCOVER_ctx_t* ctx,
295                    const void* samplesBuffer,
296                    const size_t* samplesSizes, unsigned nbSamples,
297                    unsigned d, double splitPoint, unsigned f,
298                    FASTCOVER_accel_t accelParams)
299 {
300     const BYTE* const samples = (const BYTE*)samplesBuffer;
301     const size_t totalSamplesSize = COVER_sum(samplesSizes, nbSamples);
302     /* Split samples into testing and training sets */
303     const unsigned nbTrainSamples = splitPoint < 1.0 ? (unsigned)((double)nbSamples * splitPoint) : nbSamples;
304     const unsigned nbTestSamples = splitPoint < 1.0 ? nbSamples - nbTrainSamples : nbSamples;
305     const size_t trainingSamplesSize = splitPoint < 1.0 ? COVER_sum(samplesSizes, nbTrainSamples) : totalSamplesSize;
306     const size_t testSamplesSize = splitPoint < 1.0 ? COVER_sum(samplesSizes + nbTrainSamples, nbTestSamples) : totalSamplesSize;
307 
308     /* Checks */
309     if (totalSamplesSize < MAX(d, sizeof(U64)) ||
310         totalSamplesSize >= (size_t)FASTCOVER_MAX_SAMPLES_SIZE) {
311         DISPLAYLEVEL(1, "Total samples size is too large (%u MB), maximum size is %u MB\n",
312                     (unsigned)(totalSamplesSize >> 20), (FASTCOVER_MAX_SAMPLES_SIZE >> 20));
313         return 0;
314     }
315 
316     /* Check if there are at least 5 training samples */
317     if (nbTrainSamples < 5) {
318         DISPLAYLEVEL(1, "Total number of training samples is %u and is invalid\n", nbTrainSamples);
319         return 0;
320     }
321 
322     /* Check if there's testing sample */
323     if (nbTestSamples < 1) {
324         DISPLAYLEVEL(1, "Total number of testing samples is %u and is invalid.\n", nbTestSamples);
325         return 0;
326     }
327 
328     /* Zero the context */
329     memset(ctx, 0, sizeof(*ctx));
330     DISPLAYLEVEL(2, "Training on %u samples of total size %u\n", nbTrainSamples,
331                     (unsigned)trainingSamplesSize);
332     DISPLAYLEVEL(2, "Testing on %u samples of total size %u\n", nbTestSamples,
333                     (unsigned)testSamplesSize);
334 
335     ctx->samples = samples;
336     ctx->samplesSizes = samplesSizes;
337     ctx->nbSamples = nbSamples;
338     ctx->nbTrainSamples = nbTrainSamples;
339     ctx->nbTestSamples = nbTestSamples;
340     ctx->nbDmers = trainingSamplesSize - MAX(d, sizeof(U64)) + 1;
341     ctx->d = d;
342     ctx->f = f;
343     ctx->accelParams = accelParams;
344 
345     /* The offsets of each file */
346     ctx->offsets = (size_t*)calloc((nbSamples + 1), sizeof(size_t));
347     if (ctx->offsets == NULL) {
348         DISPLAYLEVEL(1, "Failed to allocate scratch buffers \n");
349         FASTCOVER_ctx_destroy(ctx);
350         return 0;
351     }
352 
353     /* Fill offsets from the samplesSizes */
354     {   U32 i;
355         ctx->offsets[0] = 0;
356         assert(nbSamples >= 5);
357         for (i = 1; i <= nbSamples; ++i) {
358             ctx->offsets[i] = ctx->offsets[i - 1] + samplesSizes[i - 1];
359         }
360     }
361 
362     /* Initialize frequency array of size 2^f */
363     ctx->freqs = (U32*)calloc(((U64)1 << f), sizeof(U32));
364     if (ctx->freqs == NULL) {
365         DISPLAYLEVEL(1, "Failed to allocate frequency table \n");
366         FASTCOVER_ctx_destroy(ctx);
367         return 0;
368     }
369 
370     DISPLAYLEVEL(2, "Computing frequencies\n");
371     FASTCOVER_computeFrequency(ctx->freqs, ctx);
372 
373     return 1;
374 }
375 
376 
377 /**
378  * Given the prepared context build the dictionary.
379  */
380 static size_t
381 FASTCOVER_buildDictionary(const FASTCOVER_ctx_t* ctx,
382                           U32* freqs,
383                           void* dictBuffer, size_t dictBufferCapacity,
384                           ZDICT_cover_params_t parameters,
385                           U16* segmentFreqs)
386 {
387   BYTE *const dict = (BYTE *)dictBuffer;
388   size_t tail = dictBufferCapacity;
389   /* Divide the data into epochs. We will select one segment from each epoch. */
390   const COVER_epoch_info_t epochs = COVER_computeEpochs(
391       (U32)dictBufferCapacity, (U32)ctx->nbDmers, parameters.k, 1);
392   const size_t maxZeroScoreRun = 10;
393   size_t zeroScoreRun = 0;
394   size_t epoch;
395   DISPLAYLEVEL(2, "Breaking content into %u epochs of size %u\n",
396                 (U32)epochs.num, (U32)epochs.size);
397   /* Loop through the epochs until there are no more segments or the dictionary
398    * is full.
399    */
400   for (epoch = 0; tail > 0; epoch = (epoch + 1) % epochs.num) {
401     const U32 epochBegin = (U32)(epoch * epochs.size);
402     const U32 epochEnd = epochBegin + epochs.size;
403     size_t segmentSize;
404     /* Select a segment */
405     COVER_segment_t segment = FASTCOVER_selectSegment(
406         ctx, freqs, epochBegin, epochEnd, parameters, segmentFreqs);
407 
408     /* If the segment covers no dmers, then we are out of content.
409      * There may be new content in other epochs, for continue for some time.
410      */
411     if (segment.score == 0) {
412       if (++zeroScoreRun >= maxZeroScoreRun) {
413           break;
414       }
415       continue;
416     }
417     zeroScoreRun = 0;
418 
419     /* Trim the segment if necessary and if it is too small then we are done */
420     segmentSize = MIN(segment.end - segment.begin + parameters.d - 1, tail);
421     if (segmentSize < parameters.d) {
422       break;
423     }
424 
425     /* We fill the dictionary from the back to allow the best segments to be
426      * referenced with the smallest offsets.
427      */
428     tail -= segmentSize;
429     memcpy(dict + tail, ctx->samples + segment.begin, segmentSize);
430     DISPLAYUPDATE(
431         2, "\r%u%%       ",
432         (unsigned)(((dictBufferCapacity - tail) * 100) / dictBufferCapacity));
433   }
434   DISPLAYLEVEL(2, "\r%79s\r", "");
435   return tail;
436 }
437 
438 
439 /**
440  * Parameters for FASTCOVER_tryParameters().
441  */
442 typedef struct FASTCOVER_tryParameters_data_s {
443     const FASTCOVER_ctx_t* ctx;
444     COVER_best_t* best;
445     size_t dictBufferCapacity;
446     ZDICT_cover_params_t parameters;
447 } FASTCOVER_tryParameters_data_t;
448 
449 
450 /**
451  * Tries a set of parameters and updates the COVER_best_t with the results.
452  * This function is thread safe if zstd is compiled with multithreaded support.
453  * It takes its parameters as an *OWNING* opaque pointer to support threading.
454  */
455 static void FASTCOVER_tryParameters(void *opaque)
456 {
457   /* Save parameters as local variables */
458   FASTCOVER_tryParameters_data_t *const data = (FASTCOVER_tryParameters_data_t *)opaque;
459   const FASTCOVER_ctx_t *const ctx = data->ctx;
460   const ZDICT_cover_params_t parameters = data->parameters;
461   size_t dictBufferCapacity = data->dictBufferCapacity;
462   size_t totalCompressedSize = ERROR(GENERIC);
463   /* Initialize array to keep track of frequency of dmer within activeSegment */
464   U16* segmentFreqs = (U16 *)calloc(((U64)1 << ctx->f), sizeof(U16));
465   /* Allocate space for hash table, dict, and freqs */
466   BYTE *const dict = (BYTE * const)malloc(dictBufferCapacity);
467   U32 *freqs = (U32*) malloc(((U64)1 << ctx->f) * sizeof(U32));
468   if (!segmentFreqs || !dict || !freqs) {
469     DISPLAYLEVEL(1, "Failed to allocate buffers: out of memory\n");
470     goto _cleanup;
471   }
472   /* Copy the frequencies because we need to modify them */
473   memcpy(freqs, ctx->freqs, ((U64)1 << ctx->f) * sizeof(U32));
474   /* Build the dictionary */
475   { const size_t tail = FASTCOVER_buildDictionary(ctx, freqs, dict, dictBufferCapacity,
476                                                   parameters, segmentFreqs);
477     const unsigned nbFinalizeSamples = (unsigned)(ctx->nbTrainSamples * ctx->accelParams.finalize / 100);
478     dictBufferCapacity = ZDICT_finalizeDictionary(
479         dict, dictBufferCapacity, dict + tail, dictBufferCapacity - tail,
480         ctx->samples, ctx->samplesSizes, nbFinalizeSamples, parameters.zParams);
481     if (ZDICT_isError(dictBufferCapacity)) {
482       DISPLAYLEVEL(1, "Failed to finalize dictionary\n");
483       goto _cleanup;
484     }
485   }
486   /* Check total compressed size */
487   totalCompressedSize = COVER_checkTotalCompressedSize(parameters, ctx->samplesSizes,
488                                                        ctx->samples, ctx->offsets,
489                                                        ctx->nbTrainSamples, ctx->nbSamples,
490                                                        dict, dictBufferCapacity);
491 _cleanup:
492   COVER_best_finish(data->best, totalCompressedSize, parameters, dict,
493                     dictBufferCapacity);
494   free(data);
495   free(segmentFreqs);
496   free(dict);
497   free(freqs);
498 }
499 
500 
501 static void
502 FASTCOVER_convertToCoverParams(ZDICT_fastCover_params_t fastCoverParams,
503                                ZDICT_cover_params_t* coverParams)
504 {
505     coverParams->k = fastCoverParams.k;
506     coverParams->d = fastCoverParams.d;
507     coverParams->steps = fastCoverParams.steps;
508     coverParams->nbThreads = fastCoverParams.nbThreads;
509     coverParams->splitPoint = fastCoverParams.splitPoint;
510     coverParams->zParams = fastCoverParams.zParams;
511 }
512 
513 
514 static void
515 FASTCOVER_convertToFastCoverParams(ZDICT_cover_params_t coverParams,
516                                    ZDICT_fastCover_params_t* fastCoverParams,
517                                    unsigned f, unsigned accel)
518 {
519     fastCoverParams->k = coverParams.k;
520     fastCoverParams->d = coverParams.d;
521     fastCoverParams->steps = coverParams.steps;
522     fastCoverParams->nbThreads = coverParams.nbThreads;
523     fastCoverParams->splitPoint = coverParams.splitPoint;
524     fastCoverParams->f = f;
525     fastCoverParams->accel = accel;
526     fastCoverParams->zParams = coverParams.zParams;
527 }
528 
529 
530 ZDICTLIB_API size_t
531 ZDICT_trainFromBuffer_fastCover(void* dictBuffer, size_t dictBufferCapacity,
532                                 const void* samplesBuffer,
533                                 const size_t* samplesSizes, unsigned nbSamples,
534                                 ZDICT_fastCover_params_t parameters)
535 {
536     BYTE* const dict = (BYTE*)dictBuffer;
537     FASTCOVER_ctx_t ctx;
538     ZDICT_cover_params_t coverParams;
539     FASTCOVER_accel_t accelParams;
540     /* Initialize global data */
541     g_displayLevel = parameters.zParams.notificationLevel;
542     /* Assign splitPoint and f if not provided */
543     parameters.splitPoint = 1.0;
544     parameters.f = parameters.f == 0 ? DEFAULT_F : parameters.f;
545     parameters.accel = parameters.accel == 0 ? DEFAULT_ACCEL : parameters.accel;
546     /* Convert to cover parameter */
547     memset(&coverParams, 0 , sizeof(coverParams));
548     FASTCOVER_convertToCoverParams(parameters, &coverParams);
549     /* Checks */
550     if (!FASTCOVER_checkParameters(coverParams, dictBufferCapacity, parameters.f,
551                                    parameters.accel)) {
552       DISPLAYLEVEL(1, "FASTCOVER parameters incorrect\n");
553       return ERROR(GENERIC);
554     }
555     if (nbSamples == 0) {
556       DISPLAYLEVEL(1, "FASTCOVER must have at least one input file\n");
557       return ERROR(GENERIC);
558     }
559     if (dictBufferCapacity < ZDICT_DICTSIZE_MIN) {
560       DISPLAYLEVEL(1, "dictBufferCapacity must be at least %u\n",
561                    ZDICT_DICTSIZE_MIN);
562       return ERROR(dstSize_tooSmall);
563     }
564     /* Assign corresponding FASTCOVER_accel_t to accelParams*/
565     accelParams = FASTCOVER_defaultAccelParameters[parameters.accel];
566     /* Initialize context */
567     if (!FASTCOVER_ctx_init(&ctx, samplesBuffer, samplesSizes, nbSamples,
568                             coverParams.d, parameters.splitPoint, parameters.f,
569                             accelParams)) {
570       DISPLAYLEVEL(1, "Failed to initialize context\n");
571       return ERROR(GENERIC);
572     }
573     COVER_warnOnSmallCorpus(dictBufferCapacity, ctx.nbDmers, g_displayLevel);
574     /* Build the dictionary */
575     DISPLAYLEVEL(2, "Building dictionary\n");
576     {
577       /* Initialize array to keep track of frequency of dmer within activeSegment */
578       U16* segmentFreqs = (U16 *)calloc(((U64)1 << parameters.f), sizeof(U16));
579       const size_t tail = FASTCOVER_buildDictionary(&ctx, ctx.freqs, dictBuffer,
580                                                 dictBufferCapacity, coverParams, segmentFreqs);
581       const unsigned nbFinalizeSamples = (unsigned)(ctx.nbTrainSamples * ctx.accelParams.finalize / 100);
582       const size_t dictionarySize = ZDICT_finalizeDictionary(
583           dict, dictBufferCapacity, dict + tail, dictBufferCapacity - tail,
584           samplesBuffer, samplesSizes, nbFinalizeSamples, coverParams.zParams);
585       if (!ZSTD_isError(dictionarySize)) {
586           DISPLAYLEVEL(2, "Constructed dictionary of size %u\n",
587                       (unsigned)dictionarySize);
588       }
589       FASTCOVER_ctx_destroy(&ctx);
590       free(segmentFreqs);
591       return dictionarySize;
592     }
593 }
594 
595 
596 ZDICTLIB_API size_t
597 ZDICT_optimizeTrainFromBuffer_fastCover(
598                     void* dictBuffer, size_t dictBufferCapacity,
599                     const void* samplesBuffer,
600                     const size_t* samplesSizes, unsigned nbSamples,
601                     ZDICT_fastCover_params_t* parameters)
602 {
603     ZDICT_cover_params_t coverParams;
604     FASTCOVER_accel_t accelParams;
605     /* constants */
606     const unsigned nbThreads = parameters->nbThreads;
607     const double splitPoint =
608         parameters->splitPoint <= 0.0 ? DEFAULT_SPLITPOINT : parameters->splitPoint;
609     const unsigned kMinD = parameters->d == 0 ? 6 : parameters->d;
610     const unsigned kMaxD = parameters->d == 0 ? 8 : parameters->d;
611     const unsigned kMinK = parameters->k == 0 ? 50 : parameters->k;
612     const unsigned kMaxK = parameters->k == 0 ? 2000 : parameters->k;
613     const unsigned kSteps = parameters->steps == 0 ? 40 : parameters->steps;
614     const unsigned kStepSize = MAX((kMaxK - kMinK) / kSteps, 1);
615     const unsigned kIterations =
616         (1 + (kMaxD - kMinD) / 2) * (1 + (kMaxK - kMinK) / kStepSize);
617     const unsigned f = parameters->f == 0 ? DEFAULT_F : parameters->f;
618     const unsigned accel = parameters->accel == 0 ? DEFAULT_ACCEL : parameters->accel;
619     /* Local variables */
620     const int displayLevel = parameters->zParams.notificationLevel;
621     unsigned iteration = 1;
622     unsigned d;
623     unsigned k;
624     COVER_best_t best;
625     POOL_ctx *pool = NULL;
626     int warned = 0;
627     /* Checks */
628     if (splitPoint <= 0 || splitPoint > 1) {
629       LOCALDISPLAYLEVEL(displayLevel, 1, "Incorrect splitPoint\n");
630       return ERROR(GENERIC);
631     }
632     if (accel == 0 || accel > FASTCOVER_MAX_ACCEL) {
633       LOCALDISPLAYLEVEL(displayLevel, 1, "Incorrect accel\n");
634       return ERROR(GENERIC);
635     }
636     if (kMinK < kMaxD || kMaxK < kMinK) {
637       LOCALDISPLAYLEVEL(displayLevel, 1, "Incorrect k\n");
638       return ERROR(GENERIC);
639     }
640     if (nbSamples == 0) {
641       LOCALDISPLAYLEVEL(displayLevel, 1, "FASTCOVER must have at least one input file\n");
642       return ERROR(GENERIC);
643     }
644     if (dictBufferCapacity < ZDICT_DICTSIZE_MIN) {
645       LOCALDISPLAYLEVEL(displayLevel, 1, "dictBufferCapacity must be at least %u\n",
646                    ZDICT_DICTSIZE_MIN);
647       return ERROR(dstSize_tooSmall);
648     }
649     if (nbThreads > 1) {
650       pool = POOL_create(nbThreads, 1);
651       if (!pool) {
652         return ERROR(memory_allocation);
653       }
654     }
655     /* Initialization */
656     COVER_best_init(&best);
657     memset(&coverParams, 0 , sizeof(coverParams));
658     FASTCOVER_convertToCoverParams(*parameters, &coverParams);
659     accelParams = FASTCOVER_defaultAccelParameters[accel];
660     /* Turn down global display level to clean up display at level 2 and below */
661     g_displayLevel = displayLevel == 0 ? 0 : displayLevel - 1;
662     /* Loop through d first because each new value needs a new context */
663     LOCALDISPLAYLEVEL(displayLevel, 2, "Trying %u different sets of parameters\n",
664                       kIterations);
665     for (d = kMinD; d <= kMaxD; d += 2) {
666       /* Initialize the context for this value of d */
667       FASTCOVER_ctx_t ctx;
668       LOCALDISPLAYLEVEL(displayLevel, 3, "d=%u\n", d);
669       if (!FASTCOVER_ctx_init(&ctx, samplesBuffer, samplesSizes, nbSamples, d, splitPoint, f, accelParams)) {
670         LOCALDISPLAYLEVEL(displayLevel, 1, "Failed to initialize context\n");
671         COVER_best_destroy(&best);
672         POOL_free(pool);
673         return ERROR(GENERIC);
674       }
675       if (!warned) {
676         COVER_warnOnSmallCorpus(dictBufferCapacity, ctx.nbDmers, displayLevel);
677         warned = 1;
678       }
679       /* Loop through k reusing the same context */
680       for (k = kMinK; k <= kMaxK; k += kStepSize) {
681         /* Prepare the arguments */
682         FASTCOVER_tryParameters_data_t *data = (FASTCOVER_tryParameters_data_t *)malloc(
683             sizeof(FASTCOVER_tryParameters_data_t));
684         LOCALDISPLAYLEVEL(displayLevel, 3, "k=%u\n", k);
685         if (!data) {
686           LOCALDISPLAYLEVEL(displayLevel, 1, "Failed to allocate parameters\n");
687           COVER_best_destroy(&best);
688           FASTCOVER_ctx_destroy(&ctx);
689           POOL_free(pool);
690           return ERROR(GENERIC);
691         }
692         data->ctx = &ctx;
693         data->best = &best;
694         data->dictBufferCapacity = dictBufferCapacity;
695         data->parameters = coverParams;
696         data->parameters.k = k;
697         data->parameters.d = d;
698         data->parameters.splitPoint = splitPoint;
699         data->parameters.steps = kSteps;
700         data->parameters.zParams.notificationLevel = g_displayLevel;
701         /* Check the parameters */
702         if (!FASTCOVER_checkParameters(data->parameters, dictBufferCapacity,
703                                        data->ctx->f, accel)) {
704           DISPLAYLEVEL(1, "FASTCOVER parameters incorrect\n");
705           free(data);
706           continue;
707         }
708         /* Call the function and pass ownership of data to it */
709         COVER_best_start(&best);
710         if (pool) {
711           POOL_add(pool, &FASTCOVER_tryParameters, data);
712         } else {
713           FASTCOVER_tryParameters(data);
714         }
715         /* Print status */
716         LOCALDISPLAYUPDATE(displayLevel, 2, "\r%u%%       ",
717                            (unsigned)((iteration * 100) / kIterations));
718         ++iteration;
719       }
720       COVER_best_wait(&best);
721       FASTCOVER_ctx_destroy(&ctx);
722     }
723     LOCALDISPLAYLEVEL(displayLevel, 2, "\r%79s\r", "");
724     /* Fill the output buffer and parameters with output of the best parameters */
725     {
726       const size_t dictSize = best.dictSize;
727       if (ZSTD_isError(best.compressedSize)) {
728         const size_t compressedSize = best.compressedSize;
729         COVER_best_destroy(&best);
730         POOL_free(pool);
731         return compressedSize;
732       }
733       FASTCOVER_convertToFastCoverParams(best.parameters, parameters, f, accel);
734       memcpy(dictBuffer, best.dict, dictSize);
735       COVER_best_destroy(&best);
736       POOL_free(pool);
737       return dictSize;
738     }
739 
740 }
741