1 /*
2    american fuzzy lop++ - fuzze_one routines in different flavours
3    ---------------------------------------------------------------
4 
5    Originally written by Michal Zalewski
6 
7    Now maintained by Marc Heuse <mh@mh-sec.de>,
8                         Heiko Eißfeldt <heiko.eissfeldt@hexco.de> and
9                         Andrea Fioraldi <andreafioraldi@gmail.com>
10 
11    Copyright 2016, 2017 Google Inc. All rights reserved.
12    Copyright 2019-2020 AFLplusplus Project. All rights reserved.
13 
14    Licensed under the Apache License, Version 2.0 (the "License");
15    you may not use this file except in compliance with the License.
16    You may obtain a copy of the License at:
17 
18      http://www.apache.org/licenses/LICENSE-2.0
19 
20    This is the real deal: the program takes an instrumented binary and
21    attempts a variety of basic fuzzing tricks, paying close attention to
22    how they affect the execution path.
23 
24  */
25 
26 #include "afl-fuzz.h"
27 #include <string.h>
28 #include <limits.h>
29 #include "cmplog.h"
30 
31 /* MOpt */
32 
select_algorithm(afl_state_t * afl,u32 max_algorithm)33 static int select_algorithm(afl_state_t *afl, u32 max_algorithm) {
34 
35   int i_puppet, j_puppet = 0, operator_number = max_algorithm;
36 
37   double range_sele =
38       (double)afl->probability_now[afl->swarm_now][operator_number - 1];
39   double sele = ((double)(rand_below(afl, 10000) * 0.0001 * range_sele));
40 
41   for (i_puppet = 0; i_puppet < operator_num; ++i_puppet) {
42 
43     if (unlikely(i_puppet == 0)) {
44 
45       if (sele < afl->probability_now[afl->swarm_now][i_puppet]) { break; }
46 
47     } else {
48 
49       if (sele < afl->probability_now[afl->swarm_now][i_puppet]) {
50 
51         j_puppet = 1;
52         break;
53 
54       }
55 
56     }
57 
58   }
59 
60   if ((j_puppet == 1 &&
61        sele < afl->probability_now[afl->swarm_now][i_puppet - 1]) ||
62       (i_puppet + 1 < operator_num &&
63        sele > afl->probability_now[afl->swarm_now][i_puppet + 1])) {
64 
65     FATAL("error select_algorithm");
66 
67   }
68 
69   return i_puppet;
70 
71 }
72 
73 /* Helper to choose random block len for block operations in fuzz_one().
74    Doesn't return zero, provided that max_len is > 0. */
75 
choose_block_len(afl_state_t * afl,u32 limit)76 static inline u32 choose_block_len(afl_state_t *afl, u32 limit) {
77 
78   u32 min_value, max_value;
79   u32 rlim = MIN(afl->queue_cycle, (u32)3);
80 
81   if (unlikely(!afl->run_over10m)) { rlim = 1; }
82 
83   switch (rand_below(afl, rlim)) {
84 
85     case 0:
86       min_value = 1;
87       max_value = HAVOC_BLK_SMALL;
88       break;
89 
90     case 1:
91       min_value = HAVOC_BLK_SMALL;
92       max_value = HAVOC_BLK_MEDIUM;
93       break;
94 
95     default:
96 
97       if (likely(rand_below(afl, 10))) {
98 
99         min_value = HAVOC_BLK_MEDIUM;
100         max_value = HAVOC_BLK_LARGE;
101 
102       } else {
103 
104         min_value = HAVOC_BLK_LARGE;
105         max_value = HAVOC_BLK_XL;
106 
107       }
108 
109   }
110 
111   if (min_value >= limit) { min_value = 1; }
112 
113   return min_value + rand_below(afl, MIN(max_value, limit) - min_value + 1);
114 
115 }
116 
117 /* Helper function to see if a particular change (xor_val = old ^ new) could
118    be a product of deterministic bit flips with the lengths and stepovers
119    attempted by afl-fuzz. This is used to avoid dupes in some of the
120    deterministic fuzzing operations that follow bit flips. We also
121    return 1 if xor_val is zero, which implies that the old and attempted new
122    values are identical and the exec would be a waste of time. */
123 
could_be_bitflip(u32 xor_val)124 static u8 could_be_bitflip(u32 xor_val) {
125 
126   u32 sh = 0;
127 
128   if (!xor_val) { return 1; }
129 
130   /* Shift left until first bit set. */
131 
132   while (!(xor_val & 1)) {
133 
134     ++sh;
135     xor_val >>= 1;
136 
137   }
138 
139   /* 1-, 2-, and 4-bit patterns are OK anywhere. */
140 
141   if (xor_val == 1 || xor_val == 3 || xor_val == 15) { return 1; }
142 
143   /* 8-, 16-, and 32-bit patterns are OK only if shift factor is
144      divisible by 8, since that's the stepover for these ops. */
145 
146   if (sh & 7) { return 0; }
147 
148   if (xor_val == 0xff || xor_val == 0xffff || xor_val == 0xffffffff) {
149 
150     return 1;
151 
152   }
153 
154   return 0;
155 
156 }
157 
158 /* Helper function to see if a particular value is reachable through
159    arithmetic operations. Used for similar purposes. */
160 
could_be_arith(u32 old_val,u32 new_val,u8 blen)161 static u8 could_be_arith(u32 old_val, u32 new_val, u8 blen) {
162 
163   u32 i, ov = 0, nv = 0, diffs = 0;
164 
165   if (old_val == new_val) { return 1; }
166 
167   /* See if one-byte adjustments to any byte could produce this result. */
168 
169   for (i = 0; (u8)i < blen; ++i) {
170 
171     u8 a = old_val >> (8 * i), b = new_val >> (8 * i);
172 
173     if (a != b) {
174 
175       ++diffs;
176       ov = a;
177       nv = b;
178 
179     }
180 
181   }
182 
183   /* If only one byte differs and the values are within range, return 1. */
184 
185   if (diffs == 1) {
186 
187     if ((u8)(ov - nv) <= ARITH_MAX || (u8)(nv - ov) <= ARITH_MAX) { return 1; }
188 
189   }
190 
191   if (blen == 1) { return 0; }
192 
193   /* See if two-byte adjustments to any byte would produce this result. */
194 
195   diffs = 0;
196 
197   for (i = 0; (u8)i < blen / 2; ++i) {
198 
199     u16 a = old_val >> (16 * i), b = new_val >> (16 * i);
200 
201     if (a != b) {
202 
203       ++diffs;
204       ov = a;
205       nv = b;
206 
207     }
208 
209   }
210 
211   /* If only one word differs and the values are within range, return 1. */
212 
213   if (diffs == 1) {
214 
215     if ((u16)(ov - nv) <= ARITH_MAX || (u16)(nv - ov) <= ARITH_MAX) {
216 
217       return 1;
218 
219     }
220 
221     ov = SWAP16(ov);
222     nv = SWAP16(nv);
223 
224     if ((u16)(ov - nv) <= ARITH_MAX || (u16)(nv - ov) <= ARITH_MAX) {
225 
226       return 1;
227 
228     }
229 
230   }
231 
232   /* Finally, let's do the same thing for dwords. */
233 
234   if (blen == 4) {
235 
236     if ((u32)(old_val - new_val) <= ARITH_MAX ||
237         (u32)(new_val - old_val) <= ARITH_MAX) {
238 
239       return 1;
240 
241     }
242 
243     new_val = SWAP32(new_val);
244     old_val = SWAP32(old_val);
245 
246     if ((u32)(old_val - new_val) <= ARITH_MAX ||
247         (u32)(new_val - old_val) <= ARITH_MAX) {
248 
249       return 1;
250 
251     }
252 
253   }
254 
255   return 0;
256 
257 }
258 
259 /* Last but not least, a similar helper to see if insertion of an
260    interesting integer is redundant given the insertions done for
261    shorter blen. The last param (check_le) is set if the caller
262    already executed LE insertion for current blen and wants to see
263    if BE variant passed in new_val is unique. */
264 
could_be_interest(u32 old_val,u32 new_val,u8 blen,u8 check_le)265 static u8 could_be_interest(u32 old_val, u32 new_val, u8 blen, u8 check_le) {
266 
267   u32 i, j;
268 
269   if (old_val == new_val) { return 1; }
270 
271   /* See if one-byte insertions from interesting_8 over old_val could
272      produce new_val. */
273 
274   for (i = 0; i < blen; ++i) {
275 
276     for (j = 0; j < sizeof(interesting_8); ++j) {
277 
278       u32 tval =
279           (old_val & ~(0xff << (i * 8))) | (((u8)interesting_8[j]) << (i * 8));
280 
281       if (new_val == tval) { return 1; }
282 
283     }
284 
285   }
286 
287   /* Bail out unless we're also asked to examine two-byte LE insertions
288      as a preparation for BE attempts. */
289 
290   if (blen == 2 && !check_le) { return 0; }
291 
292   /* See if two-byte insertions over old_val could give us new_val. */
293 
294   for (i = 0; (u8)i < blen - 1; ++i) {
295 
296     for (j = 0; j < sizeof(interesting_16) / 2; ++j) {
297 
298       u32 tval = (old_val & ~(0xffff << (i * 8))) |
299                  (((u16)interesting_16[j]) << (i * 8));
300 
301       if (new_val == tval) { return 1; }
302 
303       /* Continue here only if blen > 2. */
304 
305       if (blen > 2) {
306 
307         tval = (old_val & ~(0xffff << (i * 8))) |
308                (SWAP16(interesting_16[j]) << (i * 8));
309 
310         if (new_val == tval) { return 1; }
311 
312       }
313 
314     }
315 
316   }
317 
318   if (blen == 4 && check_le) {
319 
320     /* See if four-byte insertions could produce the same result
321        (LE only). */
322 
323     for (j = 0; j < sizeof(interesting_32) / 4; ++j) {
324 
325       if (new_val == (u32)interesting_32[j]) { return 1; }
326 
327     }
328 
329   }
330 
331   return 0;
332 
333 }
334 
335 #ifndef IGNORE_FINDS
336 
337 /* Helper function to compare buffers; returns first and last differing offset.
338    We use this to find reasonable locations for splicing two files. */
339 
locate_diffs(u8 * ptr1,u8 * ptr2,u32 len,s32 * first,s32 * last)340 static void locate_diffs(u8 *ptr1, u8 *ptr2, u32 len, s32 *first, s32 *last) {
341 
342   s32 f_loc = -1;
343   s32 l_loc = -1;
344   u32 pos;
345 
346   for (pos = 0; pos < len; ++pos) {
347 
348     if (*(ptr1++) != *(ptr2++)) {
349 
350       if (f_loc == -1) { f_loc = pos; }
351       l_loc = pos;
352 
353     }
354 
355   }
356 
357   *first = f_loc;
358   *last = l_loc;
359 
360   return;
361 
362 }
363 
364 #endif                                                     /* !IGNORE_FINDS */
365 
366 /* Take the current entry from the queue, fuzz it for a while. This
367    function is a tad too long... returns 0 if fuzzed successfully, 1 if
368    skipped or bailed out. */
369 
fuzz_one_original(afl_state_t * afl)370 u8 fuzz_one_original(afl_state_t *afl) {
371 
372   u32 len, temp_len;
373   u32 j;
374   u32 i;
375   u8 *in_buf, *out_buf, *orig_in, *ex_tmp, *eff_map = 0;
376   u64 havoc_queued = 0, orig_hit_cnt, new_hit_cnt = 0, prev_cksum;
377   u32 splice_cycle = 0, perf_score = 100, orig_perf, eff_cnt = 1;
378 
379   u8 ret_val = 1, doing_det = 0;
380 
381   u8  a_collect[MAX_AUTO_EXTRA];
382   u32 a_len = 0;
383 
384 #ifdef IGNORE_FINDS
385 
386   /* In IGNORE_FINDS mode, skip any entries that weren't in the
387      initial data set. */
388 
389   if (afl->queue_cur->depth > 1) return 1;
390 
391 #else
392 
393   if (unlikely(afl->custom_mutators_count)) {
394 
395     /* The custom mutator will decide to skip this test case or not. */
396 
397     LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, {
398 
399       if (el->afl_custom_queue_get &&
400           !el->afl_custom_queue_get(el->data, afl->queue_cur->fname)) {
401 
402         return 1;
403 
404       }
405 
406     });
407 
408   }
409 
410   if (likely(afl->pending_favored)) {
411 
412     /* If we have any favored, non-fuzzed new arrivals in the queue,
413        possibly skip to them at the expense of already-fuzzed or non-favored
414        cases. */
415 
416     if (((afl->queue_cur->was_fuzzed > 0 || afl->queue_cur->fuzz_level > 0) ||
417          !afl->queue_cur->favored) &&
418         likely(rand_below(afl, 100) < SKIP_TO_NEW_PROB)) {
419 
420       return 1;
421 
422     }
423 
424   } else if (!afl->non_instrumented_mode && !afl->queue_cur->favored &&
425 
426              afl->queued_paths > 10) {
427 
428     /* Otherwise, still possibly skip non-favored cases, albeit less often.
429        The odds of skipping stuff are higher for already-fuzzed inputs and
430        lower for never-fuzzed entries. */
431 
432     if (afl->queue_cycle > 1 &&
433         (afl->queue_cur->fuzz_level == 0 || afl->queue_cur->was_fuzzed)) {
434 
435       if (likely(rand_below(afl, 100) < SKIP_NFAV_NEW_PROB)) { return 1; }
436 
437     } else {
438 
439       if (likely(rand_below(afl, 100) < SKIP_NFAV_OLD_PROB)) { return 1; }
440 
441     }
442 
443   }
444 
445 #endif                                                     /* ^IGNORE_FINDS */
446 
447   if (unlikely(afl->not_on_tty)) {
448 
449     ACTF(
450         "Fuzzing test case #%u (%u total, %llu uniq crashes found, "
451         "perf_score=%0.0f, exec_us=%llu, hits=%u, map=%u)...",
452         afl->current_entry, afl->queued_paths, afl->unique_crashes,
453         afl->queue_cur->perf_score, afl->queue_cur->exec_us,
454         likely(afl->n_fuzz) ? afl->n_fuzz[afl->queue_cur->n_fuzz_entry] : 0,
455         afl->queue_cur->bitmap_size);
456     fflush(stdout);
457 
458   }
459 
460   orig_in = in_buf = queue_testcase_get(afl, afl->queue_cur);
461   len = afl->queue_cur->len;
462 
463   out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
464   if (unlikely(!out_buf)) { PFATAL("alloc"); }
465 
466   afl->subseq_tmouts = 0;
467 
468   afl->cur_depth = afl->queue_cur->depth;
469 
470   /*******************************************
471    * CALIBRATION (only if failed earlier on) *
472    *******************************************/
473 
474   if (unlikely(afl->queue_cur->cal_failed)) {
475 
476     u8 res = FSRV_RUN_TMOUT;
477 
478     if (afl->queue_cur->cal_failed < CAL_CHANCES) {
479 
480       afl->queue_cur->exec_cksum = 0;
481 
482       res =
483           calibrate_case(afl, afl->queue_cur, in_buf, afl->queue_cycle - 1, 0);
484 
485       if (unlikely(res == FSRV_RUN_ERROR)) {
486 
487         FATAL("Unable to execute target application");
488 
489       }
490 
491     }
492 
493     if (unlikely(afl->stop_soon) || res != afl->crash_mode) {
494 
495       ++afl->cur_skipped_paths;
496       goto abandon_entry;
497 
498     }
499 
500   }
501 
502   /************
503    * TRIMMING *
504    ************/
505 
506   if (unlikely(!afl->non_instrumented_mode && !afl->queue_cur->trim_done &&
507                !afl->disable_trim)) {
508 
509     u32 old_len = afl->queue_cur->len;
510 
511     u8 res = trim_case(afl, afl->queue_cur, in_buf);
512     orig_in = in_buf = queue_testcase_get(afl, afl->queue_cur);
513 
514     if (unlikely(res == FSRV_RUN_ERROR)) {
515 
516       FATAL("Unable to execute target application");
517 
518     }
519 
520     if (unlikely(afl->stop_soon)) {
521 
522       ++afl->cur_skipped_paths;
523       goto abandon_entry;
524 
525     }
526 
527     /* Don't retry trimming, even if it failed. */
528 
529     afl->queue_cur->trim_done = 1;
530 
531     len = afl->queue_cur->len;
532 
533     /* maybe current entry is not ready for splicing anymore */
534     if (unlikely(len <= 4 && old_len > 4)) --afl->ready_for_splicing_count;
535 
536   }
537 
538   memcpy(out_buf, in_buf, len);
539 
540   /*********************
541    * PERFORMANCE SCORE *
542    *********************/
543 
544   if (likely(!afl->old_seed_selection))
545     orig_perf = perf_score = afl->queue_cur->perf_score;
546   else
547     afl->queue_cur->perf_score = orig_perf = perf_score =
548         calculate_score(afl, afl->queue_cur);
549 
550   if (unlikely(perf_score <= 0)) { goto abandon_entry; }
551 
552   if (unlikely(afl->shm.cmplog_mode &&
553                afl->queue_cur->colorized < afl->cmplog_lvl &&
554                (u32)len <= afl->cmplog_max_filesize)) {
555 
556     if (unlikely(len < 4)) {
557 
558       afl->queue_cur->colorized = CMPLOG_LVL_MAX;
559 
560     } else {
561 
562       if (afl->cmplog_lvl == 3 ||
563           (afl->cmplog_lvl == 2 && afl->queue_cur->tc_ref) ||
564           afl->queue_cur->favored ||
565           !(afl->fsrv.total_execs % afl->queued_paths) ||
566           get_cur_time() - afl->last_path_time > 300000) {  // 300 seconds
567 
568         if (input_to_state_stage(afl, in_buf, out_buf, len)) {
569 
570           goto abandon_entry;
571 
572         }
573 
574       }
575 
576     }
577 
578   }
579 
580   /* Skip right away if -d is given, if it has not been chosen sufficiently
581      often to warrant the expensive deterministic stage (fuzz_level), or
582      if it has gone through deterministic testing in earlier, resumed runs
583      (passed_det). */
584 
585   if (likely(afl->queue_cur->passed_det) || likely(afl->skip_deterministic) ||
586       likely(perf_score <
587              (afl->queue_cur->depth * 30 <= afl->havoc_max_mult * 100
588                   ? afl->queue_cur->depth * 30
589                   : afl->havoc_max_mult * 100))) {
590 
591     goto custom_mutator_stage;
592 
593   }
594 
595   /* Skip deterministic fuzzing if exec path checksum puts this out of scope
596      for this main instance. */
597 
598   if (unlikely(afl->main_node_max &&
599                (afl->queue_cur->exec_cksum % afl->main_node_max) !=
600                    afl->main_node_id - 1)) {
601 
602     goto custom_mutator_stage;
603 
604   }
605 
606   doing_det = 1;
607 
608   /*********************************************
609    * SIMPLE BITFLIP (+dictionary construction) *
610    *********************************************/
611 
612 #define FLIP_BIT(_ar, _b)                   \
613   do {                                      \
614                                             \
615     u8 *_arf = (u8 *)(_ar);                 \
616     u32 _bf = (_b);                         \
617     _arf[(_bf) >> 3] ^= (128 >> ((_bf)&7)); \
618                                             \
619   } while (0)
620 
621   /* Single walking bit. */
622 
623   afl->stage_short = "flip1";
624   afl->stage_max = len << 3;
625   afl->stage_name = "bitflip 1/1";
626 
627   afl->stage_val_type = STAGE_VAL_NONE;
628 
629   orig_hit_cnt = afl->queued_paths + afl->unique_crashes;
630 
631   prev_cksum = afl->queue_cur->exec_cksum;
632 
633   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
634 
635     afl->stage_cur_byte = afl->stage_cur >> 3;
636 
637     FLIP_BIT(out_buf, afl->stage_cur);
638 
639 #ifdef INTROSPECTION
640     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT1-%u",
641              afl->queue_cur->fname, afl->stage_cur);
642 #endif
643 
644     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
645 
646     FLIP_BIT(out_buf, afl->stage_cur);
647 
648     /* While flipping the least significant bit in every byte, pull of an extra
649        trick to detect possible syntax tokens. In essence, the idea is that if
650        you have a binary blob like this:
651 
652        xxxxxxxxIHDRxxxxxxxx
653 
654        ...and changing the leading and trailing bytes causes variable or no
655        changes in program flow, but touching any character in the "IHDR" string
656        always produces the same, distinctive path, it's highly likely that
657        "IHDR" is an atomically-checked magic value of special significance to
658        the fuzzed format.
659 
660        We do this here, rather than as a separate stage, because it's a nice
661        way to keep the operation approximately "free" (i.e., no extra execs).
662 
663        Empirically, performing the check when flipping the least significant bit
664        is advantageous, compared to doing it at the time of more disruptive
665        changes, where the program flow may be affected in more violent ways.
666 
667        The caveat is that we won't generate dictionaries in the -d mode or -S
668        mode - but that's probably a fair trade-off.
669 
670        This won't work particularly well with paths that exhibit variable
671        behavior, but fails gracefully, so we'll carry out the checks anyway.
672 
673       */
674 
675     if (!afl->non_instrumented_mode && (afl->stage_cur & 7) == 7) {
676 
677       u64 cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
678 
679       if (afl->stage_cur == afl->stage_max - 1 && cksum == prev_cksum) {
680 
681         /* If at end of file and we are still collecting a string, grab the
682            final character and force output. */
683 
684         if (a_len < MAX_AUTO_EXTRA) {
685 
686           a_collect[a_len] = out_buf[afl->stage_cur >> 3];
687 
688         }
689 
690         ++a_len;
691 
692         if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA) {
693 
694           maybe_add_auto(afl, a_collect, a_len);
695 
696         }
697 
698       } else if (cksum != prev_cksum) {
699 
700         /* Otherwise, if the checksum has changed, see if we have something
701            worthwhile queued up, and collect that if the answer is yes. */
702 
703         if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA) {
704 
705           maybe_add_auto(afl, a_collect, a_len);
706 
707         }
708 
709         a_len = 0;
710         prev_cksum = cksum;
711 
712       }
713 
714       /* Continue collecting string, but only if the bit flip actually made
715          any difference - we don't want no-op tokens. */
716 
717       if (cksum != afl->queue_cur->exec_cksum) {
718 
719         if (a_len < MAX_AUTO_EXTRA) {
720 
721           a_collect[a_len] = out_buf[afl->stage_cur >> 3];
722 
723         }
724 
725         ++a_len;
726 
727       }
728 
729     }
730 
731   }
732 
733   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
734 
735   afl->stage_finds[STAGE_FLIP1] += new_hit_cnt - orig_hit_cnt;
736   afl->stage_cycles[STAGE_FLIP1] += afl->stage_max;
737 
738   /* Two walking bits. */
739 
740   afl->stage_name = "bitflip 2/1";
741   afl->stage_short = "flip2";
742   afl->stage_max = (len << 3) - 1;
743 
744   orig_hit_cnt = new_hit_cnt;
745 
746   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
747 
748     afl->stage_cur_byte = afl->stage_cur >> 3;
749 
750     FLIP_BIT(out_buf, afl->stage_cur);
751     FLIP_BIT(out_buf, afl->stage_cur + 1);
752 
753 #ifdef INTROSPECTION
754     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT2-%u",
755              afl->queue_cur->fname, afl->stage_cur);
756 #endif
757 
758     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
759 
760     FLIP_BIT(out_buf, afl->stage_cur);
761     FLIP_BIT(out_buf, afl->stage_cur + 1);
762 
763   }
764 
765   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
766 
767   afl->stage_finds[STAGE_FLIP2] += new_hit_cnt - orig_hit_cnt;
768   afl->stage_cycles[STAGE_FLIP2] += afl->stage_max;
769 
770   /* Four walking bits. */
771 
772   afl->stage_name = "bitflip 4/1";
773   afl->stage_short = "flip4";
774   afl->stage_max = (len << 3) - 3;
775 
776   orig_hit_cnt = new_hit_cnt;
777 
778   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
779 
780     afl->stage_cur_byte = afl->stage_cur >> 3;
781 
782     FLIP_BIT(out_buf, afl->stage_cur);
783     FLIP_BIT(out_buf, afl->stage_cur + 1);
784     FLIP_BIT(out_buf, afl->stage_cur + 2);
785     FLIP_BIT(out_buf, afl->stage_cur + 3);
786 
787 #ifdef INTROSPECTION
788     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT4-%u",
789              afl->queue_cur->fname, afl->stage_cur);
790 #endif
791 
792     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
793 
794     FLIP_BIT(out_buf, afl->stage_cur);
795     FLIP_BIT(out_buf, afl->stage_cur + 1);
796     FLIP_BIT(out_buf, afl->stage_cur + 2);
797     FLIP_BIT(out_buf, afl->stage_cur + 3);
798 
799   }
800 
801   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
802 
803   afl->stage_finds[STAGE_FLIP4] += new_hit_cnt - orig_hit_cnt;
804   afl->stage_cycles[STAGE_FLIP4] += afl->stage_max;
805 
806   /* Effector map setup. These macros calculate:
807 
808      EFF_APOS      - position of a particular file offset in the map.
809      EFF_ALEN      - length of a map with a particular number of bytes.
810      EFF_SPAN_ALEN - map span for a sequence of bytes.
811 
812    */
813 
814 #define EFF_APOS(_p) ((_p) >> EFF_MAP_SCALE2)
815 #define EFF_REM(_x) ((_x) & ((1 << EFF_MAP_SCALE2) - 1))
816 #define EFF_ALEN(_l) (EFF_APOS(_l) + !!EFF_REM(_l))
817 #define EFF_SPAN_ALEN(_p, _l) (EFF_APOS((_p) + (_l)-1) - EFF_APOS(_p) + 1)
818 
819   /* Initialize effector map for the next step (see comments below). Always
820      flag first and last byte as doing something. */
821 
822   eff_map = afl_realloc(AFL_BUF_PARAM(eff), EFF_ALEN(len));
823   if (unlikely(!eff_map)) { PFATAL("alloc"); }
824   eff_map[0] = 1;
825 
826   if (EFF_APOS(len - 1) != 0) {
827 
828     eff_map[EFF_APOS(len - 1)] = 1;
829     ++eff_cnt;
830 
831   }
832 
833   /* Walking byte. */
834 
835   afl->stage_name = "bitflip 8/8";
836   afl->stage_short = "flip8";
837   afl->stage_max = len;
838 
839   orig_hit_cnt = new_hit_cnt;
840 
841   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
842 
843     afl->stage_cur_byte = afl->stage_cur;
844 
845     out_buf[afl->stage_cur] ^= 0xFF;
846 
847 #ifdef INTROSPECTION
848     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT8-%u",
849              afl->queue_cur->fname, afl->stage_cur);
850 #endif
851 
852     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
853 
854     /* We also use this stage to pull off a simple trick: we identify
855        bytes that seem to have no effect on the current execution path
856        even when fully flipped - and we skip them during more expensive
857        deterministic stages, such as arithmetics or known ints. */
858 
859     if (!eff_map[EFF_APOS(afl->stage_cur)]) {
860 
861       u64 cksum;
862 
863       /* If in non-instrumented mode or if the file is very short, just flag
864          everything without wasting time on checksums. */
865 
866       if (!afl->non_instrumented_mode && len >= EFF_MIN_LEN) {
867 
868         cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
869 
870       } else {
871 
872         cksum = ~afl->queue_cur->exec_cksum;
873 
874       }
875 
876       if (cksum != afl->queue_cur->exec_cksum) {
877 
878         eff_map[EFF_APOS(afl->stage_cur)] = 1;
879         ++eff_cnt;
880 
881       }
882 
883     }
884 
885     out_buf[afl->stage_cur] ^= 0xFF;
886 
887   }
888 
889   /* If the effector map is more than EFF_MAX_PERC dense, just flag the
890      whole thing as worth fuzzing, since we wouldn't be saving much time
891      anyway. */
892 
893   if (eff_cnt != (u32)EFF_ALEN(len) &&
894       eff_cnt * 100 / EFF_ALEN(len) > EFF_MAX_PERC) {
895 
896     memset(eff_map, 1, EFF_ALEN(len));
897 
898     afl->blocks_eff_select += EFF_ALEN(len);
899 
900   } else {
901 
902     afl->blocks_eff_select += eff_cnt;
903 
904   }
905 
906   afl->blocks_eff_total += EFF_ALEN(len);
907 
908   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
909 
910   afl->stage_finds[STAGE_FLIP8] += new_hit_cnt - orig_hit_cnt;
911   afl->stage_cycles[STAGE_FLIP8] += afl->stage_max;
912 
913   /* Two walking bytes. */
914 
915   if (len < 2) { goto skip_bitflip; }
916 
917   afl->stage_name = "bitflip 16/8";
918   afl->stage_short = "flip16";
919   afl->stage_cur = 0;
920   afl->stage_max = len - 1;
921 
922   orig_hit_cnt = new_hit_cnt;
923 
924   for (i = 0; i < len - 1; ++i) {
925 
926     /* Let's consult the effector map... */
927 
928     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)]) {
929 
930       --afl->stage_max;
931       continue;
932 
933     }
934 
935     afl->stage_cur_byte = i;
936 
937     *(u16 *)(out_buf + i) ^= 0xFFFF;
938 
939 #ifdef INTROSPECTION
940     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT16-%u",
941              afl->queue_cur->fname, afl->stage_cur);
942 #endif
943 
944     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
945     ++afl->stage_cur;
946 
947     *(u16 *)(out_buf + i) ^= 0xFFFF;
948 
949   }
950 
951   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
952 
953   afl->stage_finds[STAGE_FLIP16] += new_hit_cnt - orig_hit_cnt;
954   afl->stage_cycles[STAGE_FLIP16] += afl->stage_max;
955 
956   if (len < 4) { goto skip_bitflip; }
957 
958   /* Four walking bytes. */
959 
960   afl->stage_name = "bitflip 32/8";
961   afl->stage_short = "flip32";
962   afl->stage_cur = 0;
963   afl->stage_max = len - 3;
964 
965   orig_hit_cnt = new_hit_cnt;
966 
967   for (i = 0; i < len - 3; ++i) {
968 
969     /* Let's consult the effector map... */
970     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
971         !eff_map[EFF_APOS(i + 2)] && !eff_map[EFF_APOS(i + 3)]) {
972 
973       --afl->stage_max;
974       continue;
975 
976     }
977 
978     afl->stage_cur_byte = i;
979 
980     *(u32 *)(out_buf + i) ^= 0xFFFFFFFF;
981 
982 #ifdef INTROSPECTION
983     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT32-%u",
984              afl->queue_cur->fname, afl->stage_cur);
985 #endif
986 
987     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
988     ++afl->stage_cur;
989 
990     *(u32 *)(out_buf + i) ^= 0xFFFFFFFF;
991 
992   }
993 
994   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
995 
996   afl->stage_finds[STAGE_FLIP32] += new_hit_cnt - orig_hit_cnt;
997   afl->stage_cycles[STAGE_FLIP32] += afl->stage_max;
998 
999 skip_bitflip:
1000 
1001   if (afl->no_arith) { goto skip_arith; }
1002 
1003   /**********************
1004    * ARITHMETIC INC/DEC *
1005    **********************/
1006 
1007   /* 8-bit arithmetics. */
1008 
1009   afl->stage_name = "arith 8/8";
1010   afl->stage_short = "arith8";
1011   afl->stage_cur = 0;
1012   afl->stage_max = 2 * len * ARITH_MAX;
1013 
1014   afl->stage_val_type = STAGE_VAL_LE;
1015 
1016   orig_hit_cnt = new_hit_cnt;
1017 
1018   for (i = 0; i < (u32)len; ++i) {
1019 
1020     u8 orig = out_buf[i];
1021 
1022     /* Let's consult the effector map... */
1023 
1024     if (!eff_map[EFF_APOS(i)]) {
1025 
1026       afl->stage_max -= 2 * ARITH_MAX;
1027       continue;
1028 
1029     }
1030 
1031     afl->stage_cur_byte = i;
1032 
1033     for (j = 1; j <= ARITH_MAX; ++j) {
1034 
1035       u8 r = orig ^ (orig + j);
1036 
1037       /* Do arithmetic operations only if the result couldn't be a product
1038          of a bitflip. */
1039 
1040       if (!could_be_bitflip(r)) {
1041 
1042         afl->stage_cur_val = j;
1043         out_buf[i] = orig + j;
1044 
1045 #ifdef INTROSPECTION
1046         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH8+-%u-%u",
1047                  afl->queue_cur->fname, i, j);
1048 #endif
1049 
1050         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1051         ++afl->stage_cur;
1052 
1053       } else {
1054 
1055         --afl->stage_max;
1056 
1057       }
1058 
1059       r = orig ^ (orig - j);
1060 
1061       if (!could_be_bitflip(r)) {
1062 
1063         afl->stage_cur_val = -j;
1064         out_buf[i] = orig - j;
1065 
1066 #ifdef INTROSPECTION
1067         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH8--%u-%u",
1068                  afl->queue_cur->fname, i, j);
1069 #endif
1070 
1071         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1072         ++afl->stage_cur;
1073 
1074       } else {
1075 
1076         --afl->stage_max;
1077 
1078       }
1079 
1080       out_buf[i] = orig;
1081 
1082     }
1083 
1084   }
1085 
1086   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
1087 
1088   afl->stage_finds[STAGE_ARITH8] += new_hit_cnt - orig_hit_cnt;
1089   afl->stage_cycles[STAGE_ARITH8] += afl->stage_max;
1090 
1091   /* 16-bit arithmetics, both endians. */
1092 
1093   if (len < 2) { goto skip_arith; }
1094 
1095   afl->stage_name = "arith 16/8";
1096   afl->stage_short = "arith16";
1097   afl->stage_cur = 0;
1098   afl->stage_max = 4 * (len - 1) * ARITH_MAX;
1099 
1100   orig_hit_cnt = new_hit_cnt;
1101 
1102   for (i = 0; i < (u32)len - 1; ++i) {
1103 
1104     u16 orig = *(u16 *)(out_buf + i);
1105 
1106     /* Let's consult the effector map... */
1107 
1108     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)]) {
1109 
1110       afl->stage_max -= 4 * ARITH_MAX;
1111       continue;
1112 
1113     }
1114 
1115     afl->stage_cur_byte = i;
1116 
1117     for (j = 1; j <= ARITH_MAX; ++j) {
1118 
1119       u16 r1 = orig ^ (orig + j), r2 = orig ^ (orig - j),
1120           r3 = orig ^ SWAP16(SWAP16(orig) + j),
1121           r4 = orig ^ SWAP16(SWAP16(orig) - j);
1122 
1123       /* Try little endian addition and subtraction first. Do it only
1124          if the operation would affect more than one byte (hence the
1125          & 0xff overflow checks) and if it couldn't be a product of
1126          a bitflip. */
1127 
1128       afl->stage_val_type = STAGE_VAL_LE;
1129 
1130       if ((orig & 0xff) + j > 0xff && !could_be_bitflip(r1)) {
1131 
1132         afl->stage_cur_val = j;
1133         *(u16 *)(out_buf + i) = orig + j;
1134 
1135 #ifdef INTROSPECTION
1136         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH16+-%u-%u",
1137                  afl->queue_cur->fname, i, j);
1138 #endif
1139 
1140         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1141         ++afl->stage_cur;
1142 
1143       } else {
1144 
1145         --afl->stage_max;
1146 
1147       }
1148 
1149       if ((orig & 0xff) < j && !could_be_bitflip(r2)) {
1150 
1151         afl->stage_cur_val = -j;
1152         *(u16 *)(out_buf + i) = orig - j;
1153 
1154 #ifdef INTROSPECTION
1155         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH16--%u-%u",
1156                  afl->queue_cur->fname, i, j);
1157 #endif
1158 
1159         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1160         ++afl->stage_cur;
1161 
1162       } else {
1163 
1164         --afl->stage_max;
1165 
1166       }
1167 
1168       /* Big endian comes next. Same deal. */
1169 
1170       afl->stage_val_type = STAGE_VAL_BE;
1171 
1172       if ((orig >> 8) + j > 0xff && !could_be_bitflip(r3)) {
1173 
1174         afl->stage_cur_val = j;
1175         *(u16 *)(out_buf + i) = SWAP16(SWAP16(orig) + j);
1176 
1177 #ifdef INTROSPECTION
1178         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH16+BE-%u-%u",
1179                  afl->queue_cur->fname, i, j);
1180 #endif
1181 
1182         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1183         ++afl->stage_cur;
1184 
1185       } else {
1186 
1187         --afl->stage_max;
1188 
1189       }
1190 
1191       if ((orig >> 8) < j && !could_be_bitflip(r4)) {
1192 
1193         afl->stage_cur_val = -j;
1194         *(u16 *)(out_buf + i) = SWAP16(SWAP16(orig) - j);
1195 
1196 #ifdef INTROSPECTION
1197         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH16_BE-%u-%u",
1198                  afl->queue_cur->fname, i, j);
1199 #endif
1200 
1201         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1202         ++afl->stage_cur;
1203 
1204       } else {
1205 
1206         --afl->stage_max;
1207 
1208       }
1209 
1210       *(u16 *)(out_buf + i) = orig;
1211 
1212     }
1213 
1214   }
1215 
1216   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
1217 
1218   afl->stage_finds[STAGE_ARITH16] += new_hit_cnt - orig_hit_cnt;
1219   afl->stage_cycles[STAGE_ARITH16] += afl->stage_max;
1220 
1221   /* 32-bit arithmetics, both endians. */
1222 
1223   if (len < 4) { goto skip_arith; }
1224 
1225   afl->stage_name = "arith 32/8";
1226   afl->stage_short = "arith32";
1227   afl->stage_cur = 0;
1228   afl->stage_max = 4 * (len - 3) * ARITH_MAX;
1229 
1230   orig_hit_cnt = new_hit_cnt;
1231 
1232   for (i = 0; i < (u32)len - 3; ++i) {
1233 
1234     u32 orig = *(u32 *)(out_buf + i);
1235 
1236     /* Let's consult the effector map... */
1237 
1238     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
1239         !eff_map[EFF_APOS(i + 2)] && !eff_map[EFF_APOS(i + 3)]) {
1240 
1241       afl->stage_max -= 4 * ARITH_MAX;
1242       continue;
1243 
1244     }
1245 
1246     afl->stage_cur_byte = i;
1247 
1248     for (j = 1; j <= ARITH_MAX; ++j) {
1249 
1250       u32 r1 = orig ^ (orig + j), r2 = orig ^ (orig - j),
1251           r3 = orig ^ SWAP32(SWAP32(orig) + j),
1252           r4 = orig ^ SWAP32(SWAP32(orig) - j);
1253 
1254       /* Little endian first. Same deal as with 16-bit: we only want to
1255          try if the operation would have effect on more than two bytes. */
1256 
1257       afl->stage_val_type = STAGE_VAL_LE;
1258 
1259       if ((orig & 0xffff) + j > 0xffff && !could_be_bitflip(r1)) {
1260 
1261         afl->stage_cur_val = j;
1262         *(u32 *)(out_buf + i) = orig + j;
1263 
1264 #ifdef INTROSPECTION
1265         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH32+-%u-%u",
1266                  afl->queue_cur->fname, i, j);
1267 #endif
1268 
1269         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1270         ++afl->stage_cur;
1271 
1272       } else {
1273 
1274         --afl->stage_max;
1275 
1276       }
1277 
1278       if ((orig & 0xffff) < (u32)j && !could_be_bitflip(r2)) {
1279 
1280         afl->stage_cur_val = -j;
1281         *(u32 *)(out_buf + i) = orig - j;
1282 
1283 #ifdef INTROSPECTION
1284         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH32_-%u-%u",
1285                  afl->queue_cur->fname, i, j);
1286 #endif
1287 
1288         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1289         ++afl->stage_cur;
1290 
1291       } else {
1292 
1293         --afl->stage_max;
1294 
1295       }
1296 
1297       /* Big endian next. */
1298 
1299       afl->stage_val_type = STAGE_VAL_BE;
1300 
1301       if ((SWAP32(orig) & 0xffff) + j > 0xffff && !could_be_bitflip(r3)) {
1302 
1303         afl->stage_cur_val = j;
1304         *(u32 *)(out_buf + i) = SWAP32(SWAP32(orig) + j);
1305 
1306 #ifdef INTROSPECTION
1307         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH32+BE-%u-%u",
1308                  afl->queue_cur->fname, i, j);
1309 #endif
1310 
1311         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1312         ++afl->stage_cur;
1313 
1314       } else {
1315 
1316         --afl->stage_max;
1317 
1318       }
1319 
1320       if ((SWAP32(orig) & 0xffff) < (u32)j && !could_be_bitflip(r4)) {
1321 
1322         afl->stage_cur_val = -j;
1323         *(u32 *)(out_buf + i) = SWAP32(SWAP32(orig) - j);
1324 
1325 #ifdef INTROSPECTION
1326         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH32_BE-%u-%u",
1327                  afl->queue_cur->fname, i, j);
1328 #endif
1329 
1330         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1331         ++afl->stage_cur;
1332 
1333       } else {
1334 
1335         --afl->stage_max;
1336 
1337       }
1338 
1339       *(u32 *)(out_buf + i) = orig;
1340 
1341     }
1342 
1343   }
1344 
1345   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
1346 
1347   afl->stage_finds[STAGE_ARITH32] += new_hit_cnt - orig_hit_cnt;
1348   afl->stage_cycles[STAGE_ARITH32] += afl->stage_max;
1349 
1350 skip_arith:
1351 
1352   /**********************
1353    * INTERESTING VALUES *
1354    **********************/
1355 
1356   afl->stage_name = "interest 8/8";
1357   afl->stage_short = "int8";
1358   afl->stage_cur = 0;
1359   afl->stage_max = len * sizeof(interesting_8);
1360 
1361   afl->stage_val_type = STAGE_VAL_LE;
1362 
1363   orig_hit_cnt = new_hit_cnt;
1364 
1365   /* Setting 8-bit integers. */
1366 
1367   for (i = 0; i < (u32)len; ++i) {
1368 
1369     u8 orig = out_buf[i];
1370 
1371     /* Let's consult the effector map... */
1372 
1373     if (!eff_map[EFF_APOS(i)]) {
1374 
1375       afl->stage_max -= sizeof(interesting_8);
1376       continue;
1377 
1378     }
1379 
1380     afl->stage_cur_byte = i;
1381 
1382     for (j = 0; j < (u32)sizeof(interesting_8); ++j) {
1383 
1384       /* Skip if the value could be a product of bitflips or arithmetics. */
1385 
1386       if (could_be_bitflip(orig ^ (u8)interesting_8[j]) ||
1387           could_be_arith(orig, (u8)interesting_8[j], 1)) {
1388 
1389         --afl->stage_max;
1390         continue;
1391 
1392       }
1393 
1394       afl->stage_cur_val = interesting_8[j];
1395       out_buf[i] = interesting_8[j];
1396 
1397 #ifdef INTROSPECTION
1398       snprintf(afl->mutation, sizeof(afl->mutation), "%s INTERESTING8_%u_%u",
1399                afl->queue_cur->fname, i, j);
1400 #endif
1401 
1402       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1403 
1404       out_buf[i] = orig;
1405       ++afl->stage_cur;
1406 
1407     }
1408 
1409   }
1410 
1411   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
1412 
1413   afl->stage_finds[STAGE_INTEREST8] += new_hit_cnt - orig_hit_cnt;
1414   afl->stage_cycles[STAGE_INTEREST8] += afl->stage_max;
1415 
1416   /* Setting 16-bit integers, both endians. */
1417 
1418   if (afl->no_arith || len < 2) { goto skip_interest; }
1419 
1420   afl->stage_name = "interest 16/8";
1421   afl->stage_short = "int16";
1422   afl->stage_cur = 0;
1423   afl->stage_max = 2 * (len - 1) * (sizeof(interesting_16) >> 1);
1424 
1425   orig_hit_cnt = new_hit_cnt;
1426 
1427   for (i = 0; i < len - 1; ++i) {
1428 
1429     u16 orig = *(u16 *)(out_buf + i);
1430 
1431     /* Let's consult the effector map... */
1432 
1433     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)]) {
1434 
1435       afl->stage_max -= sizeof(interesting_16);
1436       continue;
1437 
1438     }
1439 
1440     afl->stage_cur_byte = i;
1441 
1442     for (j = 0; j < sizeof(interesting_16) / 2; ++j) {
1443 
1444       afl->stage_cur_val = interesting_16[j];
1445 
1446       /* Skip if this could be a product of a bitflip, arithmetics,
1447          or single-byte interesting value insertion. */
1448 
1449       if (!could_be_bitflip(orig ^ (u16)interesting_16[j]) &&
1450           !could_be_arith(orig, (u16)interesting_16[j], 2) &&
1451           !could_be_interest(orig, (u16)interesting_16[j], 2, 0)) {
1452 
1453         afl->stage_val_type = STAGE_VAL_LE;
1454 
1455         *(u16 *)(out_buf + i) = interesting_16[j];
1456 
1457 #ifdef INTROSPECTION
1458         snprintf(afl->mutation, sizeof(afl->mutation), "%s INTERESTING16_%u_%u",
1459                  afl->queue_cur->fname, i, j);
1460 #endif
1461 
1462         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1463         ++afl->stage_cur;
1464 
1465       } else {
1466 
1467         --afl->stage_max;
1468 
1469       }
1470 
1471       if ((u16)interesting_16[j] != SWAP16(interesting_16[j]) &&
1472           !could_be_bitflip(orig ^ SWAP16(interesting_16[j])) &&
1473           !could_be_arith(orig, SWAP16(interesting_16[j]), 2) &&
1474           !could_be_interest(orig, SWAP16(interesting_16[j]), 2, 1)) {
1475 
1476         afl->stage_val_type = STAGE_VAL_BE;
1477 
1478 #ifdef INTROSPECTION
1479         snprintf(afl->mutation, sizeof(afl->mutation),
1480                  "%s INTERESTING16BE_%u_%u", afl->queue_cur->fname, i, j);
1481 #endif
1482 
1483         *(u16 *)(out_buf + i) = SWAP16(interesting_16[j]);
1484         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1485         ++afl->stage_cur;
1486 
1487       } else {
1488 
1489         --afl->stage_max;
1490 
1491       }
1492 
1493     }
1494 
1495     *(u16 *)(out_buf + i) = orig;
1496 
1497   }
1498 
1499   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
1500 
1501   afl->stage_finds[STAGE_INTEREST16] += new_hit_cnt - orig_hit_cnt;
1502   afl->stage_cycles[STAGE_INTEREST16] += afl->stage_max;
1503 
1504   if (len < 4) { goto skip_interest; }
1505 
1506   /* Setting 32-bit integers, both endians. */
1507 
1508   afl->stage_name = "interest 32/8";
1509   afl->stage_short = "int32";
1510   afl->stage_cur = 0;
1511   afl->stage_max = 2 * (len - 3) * (sizeof(interesting_32) >> 2);
1512 
1513   orig_hit_cnt = new_hit_cnt;
1514 
1515   for (i = 0; i < len - 3; i++) {
1516 
1517     u32 orig = *(u32 *)(out_buf + i);
1518 
1519     /* Let's consult the effector map... */
1520 
1521     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
1522         !eff_map[EFF_APOS(i + 2)] && !eff_map[EFF_APOS(i + 3)]) {
1523 
1524       afl->stage_max -= sizeof(interesting_32) >> 1;
1525       continue;
1526 
1527     }
1528 
1529     afl->stage_cur_byte = i;
1530 
1531     for (j = 0; j < sizeof(interesting_32) / 4; ++j) {
1532 
1533       afl->stage_cur_val = interesting_32[j];
1534 
1535       /* Skip if this could be a product of a bitflip, arithmetics,
1536          or word interesting value insertion. */
1537 
1538       if (!could_be_bitflip(orig ^ (u32)interesting_32[j]) &&
1539           !could_be_arith(orig, interesting_32[j], 4) &&
1540           !could_be_interest(orig, interesting_32[j], 4, 0)) {
1541 
1542         afl->stage_val_type = STAGE_VAL_LE;
1543 
1544         *(u32 *)(out_buf + i) = interesting_32[j];
1545 
1546 #ifdef INTROSPECTION
1547         snprintf(afl->mutation, sizeof(afl->mutation), "%s INTERESTING32_%u_%u",
1548                  afl->queue_cur->fname, i, j);
1549 #endif
1550 
1551         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1552         ++afl->stage_cur;
1553 
1554       } else {
1555 
1556         --afl->stage_max;
1557 
1558       }
1559 
1560       if ((u32)interesting_32[j] != SWAP32(interesting_32[j]) &&
1561           !could_be_bitflip(orig ^ SWAP32(interesting_32[j])) &&
1562           !could_be_arith(orig, SWAP32(interesting_32[j]), 4) &&
1563           !could_be_interest(orig, SWAP32(interesting_32[j]), 4, 1)) {
1564 
1565         afl->stage_val_type = STAGE_VAL_BE;
1566 
1567 #ifdef INTROSPECTION
1568         snprintf(afl->mutation, sizeof(afl->mutation),
1569                  "%s INTERESTING32BE_%u_%u", afl->queue_cur->fname, i, j);
1570 #endif
1571 
1572         *(u32 *)(out_buf + i) = SWAP32(interesting_32[j]);
1573         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1574         ++afl->stage_cur;
1575 
1576       } else {
1577 
1578         --afl->stage_max;
1579 
1580       }
1581 
1582     }
1583 
1584     *(u32 *)(out_buf + i) = orig;
1585 
1586   }
1587 
1588   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
1589 
1590   afl->stage_finds[STAGE_INTEREST32] += new_hit_cnt - orig_hit_cnt;
1591   afl->stage_cycles[STAGE_INTEREST32] += afl->stage_max;
1592 
1593 skip_interest:
1594 
1595   /********************
1596    * DICTIONARY STUFF *
1597    ********************/
1598 
1599   if (!afl->extras_cnt) { goto skip_user_extras; }
1600 
1601   /* Overwrite with user-supplied extras. */
1602 
1603   afl->stage_name = "user extras (over)";
1604   afl->stage_short = "ext_UO";
1605   afl->stage_cur = 0;
1606   afl->stage_max = afl->extras_cnt * len;
1607 
1608   afl->stage_val_type = STAGE_VAL_NONE;
1609 
1610   orig_hit_cnt = new_hit_cnt;
1611 
1612   for (i = 0; i < (u32)len; ++i) {
1613 
1614     u32 last_len = 0;
1615 
1616     afl->stage_cur_byte = i;
1617 
1618     /* Extras are sorted by size, from smallest to largest. This means
1619        that we don't have to worry about restoring the buffer in
1620        between writes at a particular offset determined by the outer
1621        loop. */
1622 
1623     for (j = 0; j < afl->extras_cnt; ++j) {
1624 
1625       /* Skip extras probabilistically if afl->extras_cnt > AFL_MAX_DET_EXTRAS.
1626          Also skip them if there's no room to insert the payload, if the token
1627          is redundant, or if its entire span has no bytes set in the effector
1628          map. */
1629 
1630       if ((afl->extras_cnt > afl->max_det_extras &&
1631            rand_below(afl, afl->extras_cnt) >= afl->max_det_extras) ||
1632           afl->extras[j].len > len - i ||
1633           !memcmp(afl->extras[j].data, out_buf + i, afl->extras[j].len) ||
1634           !memchr(eff_map + EFF_APOS(i), 1,
1635                   EFF_SPAN_ALEN(i, afl->extras[j].len))) {
1636 
1637         --afl->stage_max;
1638         continue;
1639 
1640       }
1641 
1642       last_len = afl->extras[j].len;
1643       memcpy(out_buf + i, afl->extras[j].data, last_len);
1644 
1645 #ifdef INTROSPECTION
1646       snprintf(afl->mutation, sizeof(afl->mutation),
1647                "%s EXTRAS_overwrite-%u-%u", afl->queue_cur->fname, i, j);
1648 #endif
1649 
1650       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1651 
1652       ++afl->stage_cur;
1653 
1654     }
1655 
1656     /* Restore all the clobbered memory. */
1657     memcpy(out_buf + i, in_buf + i, last_len);
1658 
1659   }
1660 
1661   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
1662 
1663   afl->stage_finds[STAGE_EXTRAS_UO] += new_hit_cnt - orig_hit_cnt;
1664   afl->stage_cycles[STAGE_EXTRAS_UO] += afl->stage_max;
1665 
1666   /* Insertion of user-supplied extras. */
1667 
1668   afl->stage_name = "user extras (insert)";
1669   afl->stage_short = "ext_UI";
1670   afl->stage_cur = 0;
1671   afl->stage_max = afl->extras_cnt * (len + 1);
1672 
1673   orig_hit_cnt = new_hit_cnt;
1674 
1675   ex_tmp = afl_realloc(AFL_BUF_PARAM(ex), len + MAX_DICT_FILE);
1676   if (unlikely(!ex_tmp)) { PFATAL("alloc"); }
1677 
1678   for (i = 0; i <= (u32)len; ++i) {
1679 
1680     afl->stage_cur_byte = i;
1681 
1682     for (j = 0; j < afl->extras_cnt; ++j) {
1683 
1684       if (len + afl->extras[j].len > MAX_FILE) {
1685 
1686         --afl->stage_max;
1687         continue;
1688 
1689       }
1690 
1691       /* Insert token */
1692       memcpy(ex_tmp + i, afl->extras[j].data, afl->extras[j].len);
1693 
1694       /* Copy tail */
1695       memcpy(ex_tmp + i + afl->extras[j].len, out_buf + i, len - i);
1696 
1697 #ifdef INTROSPECTION
1698       snprintf(afl->mutation, sizeof(afl->mutation), "%s EXTRAS_insert-%u-%u",
1699                afl->queue_cur->fname, i, j);
1700 #endif
1701 
1702       if (common_fuzz_stuff(afl, ex_tmp, len + afl->extras[j].len)) {
1703 
1704         goto abandon_entry;
1705 
1706       }
1707 
1708       ++afl->stage_cur;
1709 
1710     }
1711 
1712     /* Copy head */
1713     ex_tmp[i] = out_buf[i];
1714 
1715   }
1716 
1717   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
1718 
1719   afl->stage_finds[STAGE_EXTRAS_UI] += new_hit_cnt - orig_hit_cnt;
1720   afl->stage_cycles[STAGE_EXTRAS_UI] += afl->stage_max;
1721 
1722 skip_user_extras:
1723 
1724   if (!afl->a_extras_cnt) { goto skip_extras; }
1725 
1726   afl->stage_name = "auto extras (over)";
1727   afl->stage_short = "ext_AO";
1728   afl->stage_cur = 0;
1729   afl->stage_max = MIN(afl->a_extras_cnt, (u32)USE_AUTO_EXTRAS) * len;
1730 
1731   afl->stage_val_type = STAGE_VAL_NONE;
1732 
1733   orig_hit_cnt = new_hit_cnt;
1734 
1735   for (i = 0; i < (u32)len; ++i) {
1736 
1737     u32 last_len = 0;
1738 
1739     afl->stage_cur_byte = i;
1740 
1741     u32 min_extra_len = MIN(afl->a_extras_cnt, (u32)USE_AUTO_EXTRAS);
1742     for (j = 0; j < min_extra_len; ++j) {
1743 
1744       /* See the comment in the earlier code; extras are sorted by size. */
1745 
1746       if (afl->a_extras[j].len > len - i ||
1747           !memcmp(afl->a_extras[j].data, out_buf + i, afl->a_extras[j].len) ||
1748           !memchr(eff_map + EFF_APOS(i), 1,
1749                   EFF_SPAN_ALEN(i, afl->a_extras[j].len))) {
1750 
1751         --afl->stage_max;
1752         continue;
1753 
1754       }
1755 
1756       last_len = afl->a_extras[j].len;
1757       memcpy(out_buf + i, afl->a_extras[j].data, last_len);
1758 
1759 #ifdef INTROSPECTION
1760       snprintf(afl->mutation, sizeof(afl->mutation),
1761                "%s AUTO_EXTRAS_overwrite-%u-%u", afl->queue_cur->fname, i, j);
1762 #endif
1763 
1764       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1765 
1766       ++afl->stage_cur;
1767 
1768     }
1769 
1770     /* Restore all the clobbered memory. */
1771     memcpy(out_buf + i, in_buf + i, last_len);
1772 
1773   }
1774 
1775   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
1776 
1777   afl->stage_finds[STAGE_EXTRAS_AO] += new_hit_cnt - orig_hit_cnt;
1778   afl->stage_cycles[STAGE_EXTRAS_AO] += afl->stage_max;
1779 
1780 skip_extras:
1781 
1782   /* If we made this to here without jumping to havoc_stage or abandon_entry,
1783      we're properly done with deterministic steps and can mark it as such
1784      in the .state/ directory. */
1785 
1786   if (!afl->queue_cur->passed_det) { mark_as_det_done(afl, afl->queue_cur); }
1787 
1788 custom_mutator_stage:
1789   /*******************
1790    * CUSTOM MUTATORS *
1791    *******************/
1792 
1793   if (likely(!afl->custom_mutators_count)) { goto havoc_stage; }
1794 
1795   afl->stage_name = "custom mutator";
1796   afl->stage_short = "custom";
1797   afl->stage_max = HAVOC_CYCLES * perf_score / afl->havoc_div / 100;
1798   afl->stage_val_type = STAGE_VAL_NONE;
1799   bool has_custom_fuzz = false;
1800 
1801   if (afl->stage_max < HAVOC_MIN) { afl->stage_max = HAVOC_MIN; }
1802 
1803   const u32 max_seed_size = MAX_FILE, saved_max = afl->stage_max;
1804 
1805   orig_hit_cnt = afl->queued_paths + afl->unique_crashes;
1806 
1807 #ifdef INTROSPECTION
1808   afl->mutation[0] = 0;
1809 #endif
1810 
1811   LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, {
1812 
1813     if (el->afl_custom_fuzz) {
1814 
1815       afl->current_custom_fuzz = el;
1816 
1817       if (el->afl_custom_fuzz_count) {
1818 
1819         afl->stage_max = el->afl_custom_fuzz_count(el->data, out_buf, len);
1820 
1821       } else {
1822 
1823         afl->stage_max = saved_max;
1824 
1825       }
1826 
1827       has_custom_fuzz = true;
1828 
1829       afl->stage_short = el->name_short;
1830 
1831       if (afl->stage_max) {
1832 
1833         for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max;
1834              ++afl->stage_cur) {
1835 
1836           struct queue_entry *target = NULL;
1837           u32                 tid;
1838           u8 *                new_buf = NULL;
1839           u32                 target_len = 0;
1840 
1841           /* check if splicing makes sense yet (enough entries) */
1842           if (likely(afl->ready_for_splicing_count > 1)) {
1843 
1844             /* Pick a random other queue entry for passing to external API
1845                that has the necessary length */
1846 
1847             do {
1848 
1849               tid = rand_below(afl, afl->queued_paths);
1850 
1851             } while (unlikely(tid == afl->current_entry ||
1852 
1853                               afl->queue_buf[tid]->len < 4));
1854 
1855             target = afl->queue_buf[tid];
1856             afl->splicing_with = tid;
1857 
1858             /* Read the additional testcase into a new buffer. */
1859             new_buf = queue_testcase_get(afl, target);
1860             target_len = target->len;
1861 
1862           }
1863 
1864           u8 *mutated_buf = NULL;
1865 
1866           size_t mutated_size =
1867               el->afl_custom_fuzz(el->data, out_buf, len, &mutated_buf, new_buf,
1868                                   target_len, max_seed_size);
1869 
1870           if (unlikely(!mutated_buf)) {
1871 
1872             FATAL("Error in custom_fuzz. Size returned: %zu", mutated_size);
1873 
1874           }
1875 
1876           if (mutated_size > 0) {
1877 
1878             if (common_fuzz_stuff(afl, mutated_buf, (u32)mutated_size)) {
1879 
1880               goto abandon_entry;
1881 
1882             }
1883 
1884             if (!el->afl_custom_fuzz_count) {
1885 
1886               /* If we're finding new stuff, let's run for a bit longer, limits
1887                 permitting. */
1888 
1889               if (afl->queued_paths != havoc_queued) {
1890 
1891                 if (perf_score <= afl->havoc_max_mult * 100) {
1892 
1893                   afl->stage_max *= 2;
1894                   perf_score *= 2;
1895 
1896                 }
1897 
1898                 havoc_queued = afl->queued_paths;
1899 
1900               }
1901 
1902             }
1903 
1904           }
1905 
1906           /* `(afl->)out_buf` may have been changed by the call to custom_fuzz
1907            */
1908           /* TODO: Only do this when `mutated_buf` == `out_buf`? Branch vs
1909            * Memcpy.
1910            */
1911           memcpy(out_buf, in_buf, len);
1912 
1913         }
1914 
1915       }
1916 
1917     }
1918 
1919   });
1920 
1921   afl->current_custom_fuzz = NULL;
1922 
1923   if (!has_custom_fuzz) goto havoc_stage;
1924 
1925   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
1926 
1927   afl->stage_finds[STAGE_CUSTOM_MUTATOR] += new_hit_cnt - orig_hit_cnt;
1928   afl->stage_cycles[STAGE_CUSTOM_MUTATOR] += afl->stage_max;
1929 
1930   if (likely(afl->custom_only)) {
1931 
1932     /* Skip other stages */
1933     ret_val = 0;
1934     goto abandon_entry;
1935 
1936   }
1937 
1938   /****************
1939    * RANDOM HAVOC *
1940    ****************/
1941 
1942 havoc_stage:
1943 
1944   afl->stage_cur_byte = -1;
1945 
1946   /* The havoc stage mutation code is also invoked when splicing files; if the
1947      splice_cycle variable is set, generate different descriptions and such. */
1948 
1949   if (!splice_cycle) {
1950 
1951     afl->stage_name = "havoc";
1952     afl->stage_short = "havoc";
1953     afl->stage_max = (doing_det ? HAVOC_CYCLES_INIT : HAVOC_CYCLES) *
1954                      perf_score / afl->havoc_div / 100;
1955 
1956   } else {
1957 
1958     perf_score = orig_perf;
1959 
1960     snprintf(afl->stage_name_buf, STAGE_BUF_SIZE, "splice %u", splice_cycle);
1961     afl->stage_name = afl->stage_name_buf;
1962     afl->stage_short = "splice";
1963     afl->stage_max = SPLICE_HAVOC * perf_score / afl->havoc_div / 100;
1964 
1965   }
1966 
1967   if (afl->stage_max < HAVOC_MIN) { afl->stage_max = HAVOC_MIN; }
1968 
1969   temp_len = len;
1970 
1971   orig_hit_cnt = afl->queued_paths + afl->unique_crashes;
1972 
1973   havoc_queued = afl->queued_paths;
1974 
1975   if (afl->custom_mutators_count) {
1976 
1977     LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, {
1978 
1979       if (el->stacked_custom && el->afl_custom_havoc_mutation_probability) {
1980 
1981         el->stacked_custom_prob =
1982             el->afl_custom_havoc_mutation_probability(el->data);
1983         if (el->stacked_custom_prob > 100) {
1984 
1985           FATAL(
1986               "The probability returned by "
1987               "afl_custom_havoc_mutation_propability "
1988               "has to be in the range 0-100.");
1989 
1990         }
1991 
1992       }
1993 
1994     });
1995 
1996   }
1997 
1998   /* We essentially just do several thousand runs (depending on perf_score)
1999      where we take the input file and make random stacked tweaks. */
2000 
2001 #define MAX_HAVOC_ENTRY 59                                      /* 55 to 60 */
2002 
2003   u32 r_max, r;
2004 
2005   r_max = (MAX_HAVOC_ENTRY + 1) + (afl->extras_cnt ? 4 : 0) +
2006           (afl->a_extras_cnt ? 4 : 0);
2007 
2008   if (unlikely(afl->expand_havoc && afl->ready_for_splicing_count > 1)) {
2009 
2010     /* add expensive havoc cases here, they are activated after a full
2011        cycle without finds happened */
2012 
2013     r_max += 4;
2014 
2015   }
2016 
2017   if (unlikely(get_cur_time() - afl->last_path_time > 5000 /* 5 seconds */ &&
2018                afl->ready_for_splicing_count > 1)) {
2019 
2020     /* add expensive havoc cases here if there is no findings in the last 5s */
2021 
2022     r_max += 4;
2023 
2024   }
2025 
2026   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
2027 
2028     u32 use_stacking = 1 << (1 + rand_below(afl, afl->havoc_stack_pow2));
2029 
2030     afl->stage_cur_val = use_stacking;
2031 
2032 #ifdef INTROSPECTION
2033     snprintf(afl->mutation, sizeof(afl->mutation), "%s HAVOC-%u",
2034              afl->queue_cur->fname, use_stacking);
2035 #endif
2036 
2037     for (i = 0; i < use_stacking; ++i) {
2038 
2039       if (afl->custom_mutators_count) {
2040 
2041         LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, {
2042 
2043           if (el->stacked_custom &&
2044               rand_below(afl, 100) < el->stacked_custom_prob) {
2045 
2046             u8 *   custom_havoc_buf = NULL;
2047             size_t new_len = el->afl_custom_havoc_mutation(
2048                 el->data, out_buf, temp_len, &custom_havoc_buf, MAX_FILE);
2049             if (unlikely(!custom_havoc_buf)) {
2050 
2051               FATAL("Error in custom_havoc (return %zu)", new_len);
2052 
2053             }
2054 
2055             if (likely(new_len > 0 && custom_havoc_buf)) {
2056 
2057               temp_len = new_len;
2058               if (out_buf != custom_havoc_buf) {
2059 
2060                 out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len);
2061                 if (unlikely(!afl->out_buf)) { PFATAL("alloc"); }
2062                 memcpy(out_buf, custom_havoc_buf, temp_len);
2063 
2064               }
2065 
2066             }
2067 
2068           }
2069 
2070         });
2071 
2072       }
2073 
2074       switch ((r = rand_below(afl, r_max))) {
2075 
2076         case 0 ... 3: {
2077 
2078           /* Flip a single bit somewhere. Spooky! */
2079 
2080 #ifdef INTROSPECTION
2081           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT1");
2082           strcat(afl->mutation, afl->m_tmp);
2083 #endif
2084           FLIP_BIT(out_buf, rand_below(afl, temp_len << 3));
2085           break;
2086 
2087         }
2088 
2089         case 4 ... 7: {
2090 
2091           /* Set byte to interesting value. */
2092 
2093 #ifdef INTROSPECTION
2094           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING8");
2095           strcat(afl->mutation, afl->m_tmp);
2096 #endif
2097           out_buf[rand_below(afl, temp_len)] =
2098               interesting_8[rand_below(afl, sizeof(interesting_8))];
2099           break;
2100 
2101         }
2102 
2103         case 8 ... 9: {
2104 
2105           /* Set word to interesting value, little endian. */
2106 
2107           if (temp_len < 2) { break; }
2108 
2109 #ifdef INTROSPECTION
2110           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING16");
2111           strcat(afl->mutation, afl->m_tmp);
2112 #endif
2113           *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) =
2114               interesting_16[rand_below(afl, sizeof(interesting_16) >> 1)];
2115 
2116           break;
2117 
2118         }
2119 
2120         case 10 ... 11: {
2121 
2122           /* Set word to interesting value, big endian. */
2123 
2124           if (temp_len < 2) { break; }
2125 
2126 #ifdef INTROSPECTION
2127           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING16BE");
2128           strcat(afl->mutation, afl->m_tmp);
2129 #endif
2130           *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) = SWAP16(
2131               interesting_16[rand_below(afl, sizeof(interesting_16) >> 1)]);
2132 
2133           break;
2134 
2135         }
2136 
2137         case 12 ... 13: {
2138 
2139           /* Set dword to interesting value, little endian. */
2140 
2141           if (temp_len < 4) { break; }
2142 
2143 #ifdef INTROSPECTION
2144           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING32");
2145           strcat(afl->mutation, afl->m_tmp);
2146 #endif
2147           *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) =
2148               interesting_32[rand_below(afl, sizeof(interesting_32) >> 2)];
2149 
2150           break;
2151 
2152         }
2153 
2154         case 14 ... 15: {
2155 
2156           /* Set dword to interesting value, big endian. */
2157 
2158           if (temp_len < 4) { break; }
2159 
2160 #ifdef INTROSPECTION
2161           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING32BE");
2162           strcat(afl->mutation, afl->m_tmp);
2163 #endif
2164           *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) = SWAP32(
2165               interesting_32[rand_below(afl, sizeof(interesting_32) >> 2)]);
2166 
2167           break;
2168 
2169         }
2170 
2171         case 16 ... 19: {
2172 
2173           /* Randomly subtract from byte. */
2174 
2175 #ifdef INTROSPECTION
2176           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH8_");
2177           strcat(afl->mutation, afl->m_tmp);
2178 #endif
2179           out_buf[rand_below(afl, temp_len)] -= 1 + rand_below(afl, ARITH_MAX);
2180           break;
2181 
2182         }
2183 
2184         case 20 ... 23: {
2185 
2186           /* Randomly add to byte. */
2187 
2188 #ifdef INTROSPECTION
2189           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH8+");
2190           strcat(afl->mutation, afl->m_tmp);
2191 #endif
2192           out_buf[rand_below(afl, temp_len)] += 1 + rand_below(afl, ARITH_MAX);
2193           break;
2194 
2195         }
2196 
2197         case 24 ... 25: {
2198 
2199           /* Randomly subtract from word, little endian. */
2200 
2201           if (temp_len < 2) { break; }
2202 
2203           u32 pos = rand_below(afl, temp_len - 1);
2204 
2205 #ifdef INTROSPECTION
2206           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16_-%u", pos);
2207           strcat(afl->mutation, afl->m_tmp);
2208 #endif
2209           *(u16 *)(out_buf + pos) -= 1 + rand_below(afl, ARITH_MAX);
2210 
2211           break;
2212 
2213         }
2214 
2215         case 26 ... 27: {
2216 
2217           /* Randomly subtract from word, big endian. */
2218 
2219           if (temp_len < 2) { break; }
2220 
2221           u32 pos = rand_below(afl, temp_len - 1);
2222           u16 num = 1 + rand_below(afl, ARITH_MAX);
2223 
2224 #ifdef INTROSPECTION
2225           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16_BE-%u_%u", pos,
2226                    num);
2227           strcat(afl->mutation, afl->m_tmp);
2228 #endif
2229           *(u16 *)(out_buf + pos) =
2230               SWAP16(SWAP16(*(u16 *)(out_buf + pos)) - num);
2231 
2232           break;
2233 
2234         }
2235 
2236         case 28 ... 29: {
2237 
2238           /* Randomly add to word, little endian. */
2239 
2240           if (temp_len < 2) { break; }
2241 
2242           u32 pos = rand_below(afl, temp_len - 1);
2243 
2244 #ifdef INTROSPECTION
2245           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16+-%u", pos);
2246           strcat(afl->mutation, afl->m_tmp);
2247 #endif
2248           *(u16 *)(out_buf + pos) += 1 + rand_below(afl, ARITH_MAX);
2249 
2250           break;
2251 
2252         }
2253 
2254         case 30 ... 31: {
2255 
2256           /* Randomly add to word, big endian. */
2257 
2258           if (temp_len < 2) { break; }
2259 
2260           u32 pos = rand_below(afl, temp_len - 1);
2261           u16 num = 1 + rand_below(afl, ARITH_MAX);
2262 
2263 #ifdef INTROSPECTION
2264           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16+BE-%u_%u", pos,
2265                    num);
2266           strcat(afl->mutation, afl->m_tmp);
2267 #endif
2268           *(u16 *)(out_buf + pos) =
2269               SWAP16(SWAP16(*(u16 *)(out_buf + pos)) + num);
2270 
2271           break;
2272 
2273         }
2274 
2275         case 32 ... 33: {
2276 
2277           /* Randomly subtract from dword, little endian. */
2278 
2279           if (temp_len < 4) { break; }
2280 
2281           u32 pos = rand_below(afl, temp_len - 3);
2282 
2283 #ifdef INTROSPECTION
2284           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32_-%u", pos);
2285           strcat(afl->mutation, afl->m_tmp);
2286 #endif
2287           *(u32 *)(out_buf + pos) -= 1 + rand_below(afl, ARITH_MAX);
2288 
2289           break;
2290 
2291         }
2292 
2293         case 34 ... 35: {
2294 
2295           /* Randomly subtract from dword, big endian. */
2296 
2297           if (temp_len < 4) { break; }
2298 
2299           u32 pos = rand_below(afl, temp_len - 3);
2300           u32 num = 1 + rand_below(afl, ARITH_MAX);
2301 
2302 #ifdef INTROSPECTION
2303           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32_BE-%u-%u", pos,
2304                    num);
2305           strcat(afl->mutation, afl->m_tmp);
2306 #endif
2307           *(u32 *)(out_buf + pos) =
2308               SWAP32(SWAP32(*(u32 *)(out_buf + pos)) - num);
2309 
2310           break;
2311 
2312         }
2313 
2314         case 36 ... 37: {
2315 
2316           /* Randomly add to dword, little endian. */
2317 
2318           if (temp_len < 4) { break; }
2319 
2320           u32 pos = rand_below(afl, temp_len - 3);
2321 
2322 #ifdef INTROSPECTION
2323           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32+-%u", pos);
2324           strcat(afl->mutation, afl->m_tmp);
2325 #endif
2326           *(u32 *)(out_buf + pos) += 1 + rand_below(afl, ARITH_MAX);
2327 
2328           break;
2329 
2330         }
2331 
2332         case 38 ... 39: {
2333 
2334           /* Randomly add to dword, big endian. */
2335 
2336           if (temp_len < 4) { break; }
2337 
2338           u32 pos = rand_below(afl, temp_len - 3);
2339           u32 num = 1 + rand_below(afl, ARITH_MAX);
2340 
2341 #ifdef INTROSPECTION
2342           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32+BE-%u-%u", pos,
2343                    num);
2344           strcat(afl->mutation, afl->m_tmp);
2345 #endif
2346           *(u32 *)(out_buf + pos) =
2347               SWAP32(SWAP32(*(u32 *)(out_buf + pos)) + num);
2348 
2349           break;
2350 
2351         }
2352 
2353         case 40 ... 43: {
2354 
2355           /* Just set a random byte to a random value. Because,
2356              why not. We use XOR with 1-255 to eliminate the
2357              possibility of a no-op. */
2358 
2359 #ifdef INTROSPECTION
2360           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " RAND8");
2361           strcat(afl->mutation, afl->m_tmp);
2362 #endif
2363           out_buf[rand_below(afl, temp_len)] ^= 1 + rand_below(afl, 255);
2364           break;
2365 
2366         }
2367 
2368         case 44 ... 46: {
2369 
2370           if (temp_len + HAVOC_BLK_XL < MAX_FILE) {
2371 
2372             /* Clone bytes. */
2373 
2374             u32 clone_len = choose_block_len(afl, temp_len);
2375             u32 clone_from = rand_below(afl, temp_len - clone_len + 1);
2376             u32 clone_to = rand_below(afl, temp_len);
2377 
2378 #ifdef INTROSPECTION
2379             snprintf(afl->m_tmp, sizeof(afl->m_tmp), " CLONE-%s-%u-%u-%u",
2380                      "clone", clone_from, clone_to, clone_len);
2381             strcat(afl->mutation, afl->m_tmp);
2382 #endif
2383             u8 *new_buf =
2384                 afl_realloc(AFL_BUF_PARAM(out_scratch), temp_len + clone_len);
2385             if (unlikely(!new_buf)) { PFATAL("alloc"); }
2386 
2387             /* Head */
2388 
2389             memcpy(new_buf, out_buf, clone_to);
2390 
2391             /* Inserted part */
2392 
2393             memcpy(new_buf + clone_to, out_buf + clone_from, clone_len);
2394 
2395             /* Tail */
2396             memcpy(new_buf + clone_to + clone_len, out_buf + clone_to,
2397                    temp_len - clone_to);
2398 
2399             out_buf = new_buf;
2400             afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
2401             temp_len += clone_len;
2402 
2403           }
2404 
2405           break;
2406 
2407         }
2408 
2409         case 47: {
2410 
2411           if (temp_len + HAVOC_BLK_XL < MAX_FILE) {
2412 
2413             /* Insert a block of constant bytes (25%). */
2414 
2415             u32 clone_len = choose_block_len(afl, HAVOC_BLK_XL);
2416             u32 clone_to = rand_below(afl, temp_len);
2417 
2418 #ifdef INTROSPECTION
2419             snprintf(afl->m_tmp, sizeof(afl->m_tmp), " CLONE-%s-%u-%u",
2420                      "insert", clone_to, clone_len);
2421             strcat(afl->mutation, afl->m_tmp);
2422 #endif
2423             u8 *new_buf =
2424                 afl_realloc(AFL_BUF_PARAM(out_scratch), temp_len + clone_len);
2425             if (unlikely(!new_buf)) { PFATAL("alloc"); }
2426 
2427             /* Head */
2428 
2429             memcpy(new_buf, out_buf, clone_to);
2430 
2431             /* Inserted part */
2432 
2433             memset(new_buf + clone_to,
2434                    rand_below(afl, 2) ? rand_below(afl, 256)
2435                                       : out_buf[rand_below(afl, temp_len)],
2436                    clone_len);
2437 
2438             /* Tail */
2439             memcpy(new_buf + clone_to + clone_len, out_buf + clone_to,
2440                    temp_len - clone_to);
2441 
2442             out_buf = new_buf;
2443             afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
2444             temp_len += clone_len;
2445 
2446           }
2447 
2448           break;
2449 
2450         }
2451 
2452         case 48 ... 50: {
2453 
2454           /* Overwrite bytes with a randomly selected chunk bytes. */
2455 
2456           if (temp_len < 2) { break; }
2457 
2458           u32 copy_len = choose_block_len(afl, temp_len - 1);
2459           u32 copy_from = rand_below(afl, temp_len - copy_len + 1);
2460           u32 copy_to = rand_below(afl, temp_len - copy_len + 1);
2461 
2462           if (likely(copy_from != copy_to)) {
2463 
2464 #ifdef INTROSPECTION
2465             snprintf(afl->m_tmp, sizeof(afl->m_tmp), " OVERWRITE_COPY-%u-%u-%u",
2466                      copy_from, copy_to, copy_len);
2467             strcat(afl->mutation, afl->m_tmp);
2468 #endif
2469             memmove(out_buf + copy_to, out_buf + copy_from, copy_len);
2470 
2471           }
2472 
2473           break;
2474 
2475         }
2476 
2477         case 51: {
2478 
2479           /* Overwrite bytes with fixed bytes. */
2480 
2481           if (temp_len < 2) { break; }
2482 
2483           u32 copy_len = choose_block_len(afl, temp_len - 1);
2484           u32 copy_to = rand_below(afl, temp_len - copy_len + 1);
2485 
2486 #ifdef INTROSPECTION
2487           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " OVERWRITE_FIXED-%u-%u",
2488                    copy_to, copy_len);
2489           strcat(afl->mutation, afl->m_tmp);
2490 #endif
2491           memset(out_buf + copy_to,
2492                  rand_below(afl, 2) ? rand_below(afl, 256)
2493                                     : out_buf[rand_below(afl, temp_len)],
2494                  copy_len);
2495 
2496           break;
2497 
2498         }
2499 
2500         // increase from 4 up to 8?
2501         case 52 ... MAX_HAVOC_ENTRY: {
2502 
2503           /* Delete bytes. We're making this a bit more likely
2504              than insertion (the next option) in hopes of keeping
2505              files reasonably small. */
2506 
2507           if (temp_len < 2) { break; }
2508 
2509           /* Don't delete too much. */
2510 
2511           u32 del_len = choose_block_len(afl, temp_len - 1);
2512           u32 del_from = rand_below(afl, temp_len - del_len + 1);
2513 
2514 #ifdef INTROSPECTION
2515           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " DEL-%u-%u", del_from,
2516                    del_len);
2517           strcat(afl->mutation, afl->m_tmp);
2518 #endif
2519           memmove(out_buf + del_from, out_buf + del_from + del_len,
2520                   temp_len - del_from - del_len);
2521 
2522           temp_len -= del_len;
2523 
2524           break;
2525 
2526         }
2527 
2528         default:
2529 
2530           r -= (MAX_HAVOC_ENTRY + 1);
2531 
2532           if (afl->extras_cnt) {
2533 
2534             if (r < 2) {
2535 
2536               /* Use the dictionary. */
2537 
2538               u32 use_extra = rand_below(afl, afl->extras_cnt);
2539               u32 extra_len = afl->extras[use_extra].len;
2540 
2541               if (extra_len > temp_len) { break; }
2542 
2543               u32 insert_at = rand_below(afl, temp_len - extra_len + 1);
2544 #ifdef INTROSPECTION
2545               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " EXTRA_OVERWRITE-%u-%u",
2546                        insert_at, extra_len);
2547               strcat(afl->mutation, afl->m_tmp);
2548 #endif
2549               memcpy(out_buf + insert_at, afl->extras[use_extra].data,
2550                      extra_len);
2551 
2552               break;
2553 
2554             } else if (r < 4) {
2555 
2556               u32 use_extra = rand_below(afl, afl->extras_cnt);
2557               u32 extra_len = afl->extras[use_extra].len;
2558               if (temp_len + extra_len >= MAX_FILE) { break; }
2559 
2560               u8 *ptr = afl->extras[use_extra].data;
2561               u32 insert_at = rand_below(afl, temp_len + 1);
2562 #ifdef INTROSPECTION
2563               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " EXTRA_INSERT-%u-%u",
2564                        insert_at, extra_len);
2565               strcat(afl->mutation, afl->m_tmp);
2566 #endif
2567 
2568               out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len + extra_len);
2569               if (unlikely(!out_buf)) { PFATAL("alloc"); }
2570 
2571               /* Tail */
2572               memmove(out_buf + insert_at + extra_len, out_buf + insert_at,
2573                       temp_len - insert_at);
2574 
2575               /* Inserted part */
2576               memcpy(out_buf + insert_at, ptr, extra_len);
2577               temp_len += extra_len;
2578 
2579               break;
2580 
2581             } else {
2582 
2583               r -= 4;
2584 
2585             }
2586 
2587           }
2588 
2589           if (afl->a_extras_cnt) {
2590 
2591             if (r < 2) {
2592 
2593               /* Use the dictionary. */
2594 
2595               u32 use_extra = rand_below(afl, afl->a_extras_cnt);
2596               u32 extra_len = afl->a_extras[use_extra].len;
2597 
2598               if (extra_len > temp_len) { break; }
2599 
2600               u32 insert_at = rand_below(afl, temp_len - extra_len + 1);
2601 #ifdef INTROSPECTION
2602               snprintf(afl->m_tmp, sizeof(afl->m_tmp),
2603                        " AUTO_EXTRA_OVERWRITE-%u-%u", insert_at, extra_len);
2604               strcat(afl->mutation, afl->m_tmp);
2605 #endif
2606               memcpy(out_buf + insert_at, afl->a_extras[use_extra].data,
2607                      extra_len);
2608 
2609               break;
2610 
2611             } else if (r < 4) {
2612 
2613               u32 use_extra = rand_below(afl, afl->a_extras_cnt);
2614               u32 extra_len = afl->a_extras[use_extra].len;
2615               if (temp_len + extra_len >= MAX_FILE) { break; }
2616 
2617               u8 *ptr = afl->a_extras[use_extra].data;
2618               u32 insert_at = rand_below(afl, temp_len + 1);
2619 #ifdef INTROSPECTION
2620               snprintf(afl->m_tmp, sizeof(afl->m_tmp),
2621                        " AUTO_EXTRA_INSERT-%u-%u", insert_at, extra_len);
2622               strcat(afl->mutation, afl->m_tmp);
2623 #endif
2624 
2625               out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len + extra_len);
2626               if (unlikely(!out_buf)) { PFATAL("alloc"); }
2627 
2628               /* Tail */
2629               memmove(out_buf + insert_at + extra_len, out_buf + insert_at,
2630                       temp_len - insert_at);
2631 
2632               /* Inserted part */
2633               memcpy(out_buf + insert_at, ptr, extra_len);
2634               temp_len += extra_len;
2635 
2636               break;
2637 
2638             } else {
2639 
2640               r -= 4;
2641 
2642             }
2643 
2644           }
2645 
2646           /* Splicing otherwise if we are still here.
2647              Overwrite bytes with a randomly selected chunk from another
2648              testcase or insert that chunk. */
2649 
2650           /* Pick a random queue entry and seek to it. */
2651 
2652           u32 tid;
2653           do {
2654 
2655             tid = rand_below(afl, afl->queued_paths);
2656 
2657           } while (tid == afl->current_entry || afl->queue_buf[tid]->len < 4);
2658 
2659           /* Get the testcase for splicing. */
2660           struct queue_entry *target = afl->queue_buf[tid];
2661           u32                 new_len = target->len;
2662           u8 *                new_buf = queue_testcase_get(afl, target);
2663 
2664           if ((temp_len >= 2 && r % 2) || temp_len + HAVOC_BLK_XL >= MAX_FILE) {
2665 
2666             /* overwrite mode */
2667 
2668             u32 copy_from, copy_to, copy_len;
2669 
2670             copy_len = choose_block_len(afl, new_len - 1);
2671             if (copy_len > temp_len) copy_len = temp_len;
2672 
2673             copy_from = rand_below(afl, new_len - copy_len + 1);
2674             copy_to = rand_below(afl, temp_len - copy_len + 1);
2675 
2676 #ifdef INTROSPECTION
2677             snprintf(afl->m_tmp, sizeof(afl->m_tmp),
2678                      " SPLICE_OVERWRITE-%u-%u-%u-%s", copy_from, copy_to,
2679                      copy_len, target->fname);
2680             strcat(afl->mutation, afl->m_tmp);
2681 #endif
2682             memmove(out_buf + copy_to, new_buf + copy_from, copy_len);
2683 
2684           } else {
2685 
2686             /* insert mode */
2687 
2688             u32 clone_from, clone_to, clone_len;
2689 
2690             clone_len = choose_block_len(afl, new_len);
2691             clone_from = rand_below(afl, new_len - clone_len + 1);
2692             clone_to = rand_below(afl, temp_len + 1);
2693 
2694             u8 *temp_buf = afl_realloc(AFL_BUF_PARAM(out_scratch),
2695                                        temp_len + clone_len + 1);
2696             if (unlikely(!temp_buf)) { PFATAL("alloc"); }
2697 
2698 #ifdef INTROSPECTION
2699             snprintf(afl->m_tmp, sizeof(afl->m_tmp),
2700                      " SPLICE_INSERT-%u-%u-%u-%s", clone_from, clone_to,
2701                      clone_len, target->fname);
2702             strcat(afl->mutation, afl->m_tmp);
2703 #endif
2704             /* Head */
2705 
2706             memcpy(temp_buf, out_buf, clone_to);
2707 
2708             /* Inserted part */
2709 
2710             memcpy(temp_buf + clone_to, new_buf + clone_from, clone_len);
2711 
2712             /* Tail */
2713             memcpy(temp_buf + clone_to + clone_len, out_buf + clone_to,
2714                    temp_len - clone_to);
2715 
2716             out_buf = temp_buf;
2717             afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
2718             temp_len += clone_len;
2719 
2720           }
2721 
2722           break;
2723 
2724           // end of default
2725 
2726       }
2727 
2728     }
2729 
2730     if (common_fuzz_stuff(afl, out_buf, temp_len)) { goto abandon_entry; }
2731 
2732     /* out_buf might have been mangled a bit, so let's restore it to its
2733        original size and shape. */
2734 
2735     out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
2736     if (unlikely(!out_buf)) { PFATAL("alloc"); }
2737     temp_len = len;
2738     memcpy(out_buf, in_buf, len);
2739 
2740     /* If we're finding new stuff, let's run for a bit longer, limits
2741        permitting. */
2742 
2743     if (afl->queued_paths != havoc_queued) {
2744 
2745       if (perf_score <= afl->havoc_max_mult * 100) {
2746 
2747         afl->stage_max *= 2;
2748         perf_score *= 2;
2749 
2750       }
2751 
2752       havoc_queued = afl->queued_paths;
2753 
2754     }
2755 
2756   }
2757 
2758   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
2759 
2760   if (!splice_cycle) {
2761 
2762     afl->stage_finds[STAGE_HAVOC] += new_hit_cnt - orig_hit_cnt;
2763     afl->stage_cycles[STAGE_HAVOC] += afl->stage_max;
2764 
2765   } else {
2766 
2767     afl->stage_finds[STAGE_SPLICE] += new_hit_cnt - orig_hit_cnt;
2768     afl->stage_cycles[STAGE_SPLICE] += afl->stage_max;
2769 
2770   }
2771 
2772 #ifndef IGNORE_FINDS
2773 
2774   /************
2775    * SPLICING *
2776    ************/
2777 
2778   /* This is a last-resort strategy triggered by a full round with no findings.
2779      It takes the current input file, randomly selects another input, and
2780      splices them together at some offset, then relies on the havoc
2781      code to mutate that blob. */
2782 
2783 retry_splicing:
2784 
2785   if (afl->use_splicing && splice_cycle++ < SPLICE_CYCLES &&
2786       afl->ready_for_splicing_count > 1 && afl->queue_cur->len >= 4) {
2787 
2788     struct queue_entry *target;
2789     u32                 tid, split_at;
2790     u8 *                new_buf;
2791     s32                 f_diff, l_diff;
2792 
2793     /* First of all, if we've modified in_buf for havoc, let's clean that
2794        up... */
2795 
2796     if (in_buf != orig_in) {
2797 
2798       in_buf = orig_in;
2799       len = afl->queue_cur->len;
2800 
2801     }
2802 
2803     /* Pick a random queue entry and seek to it. Don't splice with yourself. */
2804 
2805     do {
2806 
2807       tid = rand_below(afl, afl->queued_paths);
2808 
2809     } while (tid == afl->current_entry || afl->queue_buf[tid]->len < 4);
2810 
2811     /* Get the testcase */
2812     afl->splicing_with = tid;
2813     target = afl->queue_buf[tid];
2814     new_buf = queue_testcase_get(afl, target);
2815 
2816     /* Find a suitable splicing location, somewhere between the first and
2817        the last differing byte. Bail out if the difference is just a single
2818        byte or so. */
2819 
2820     locate_diffs(in_buf, new_buf, MIN(len, (s64)target->len), &f_diff, &l_diff);
2821 
2822     if (f_diff < 0 || l_diff < 2 || f_diff == l_diff) { goto retry_splicing; }
2823 
2824     /* Split somewhere between the first and last differing byte. */
2825 
2826     split_at = f_diff + rand_below(afl, l_diff - f_diff);
2827 
2828     /* Do the thing. */
2829 
2830     len = target->len;
2831     afl->in_scratch_buf = afl_realloc(AFL_BUF_PARAM(in_scratch), len);
2832     memcpy(afl->in_scratch_buf, in_buf, split_at);
2833     memcpy(afl->in_scratch_buf + split_at, new_buf, len - split_at);
2834     in_buf = afl->in_scratch_buf;
2835     afl_swap_bufs(AFL_BUF_PARAM(in), AFL_BUF_PARAM(in_scratch));
2836 
2837     out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
2838     if (unlikely(!out_buf)) { PFATAL("alloc"); }
2839     memcpy(out_buf, in_buf, len);
2840 
2841     goto custom_mutator_stage;
2842 
2843   }
2844 
2845 #endif                                                     /* !IGNORE_FINDS */
2846 
2847   ret_val = 0;
2848 
2849 /* we are through with this queue entry - for this iteration */
2850 abandon_entry:
2851 
2852   afl->splicing_with = -1;
2853 
2854   /* Update afl->pending_not_fuzzed count if we made it through the calibration
2855      cycle and have not seen this entry before. */
2856 
2857   if (!afl->stop_soon && !afl->queue_cur->cal_failed &&
2858       (afl->queue_cur->was_fuzzed == 0 || afl->queue_cur->fuzz_level == 0) &&
2859       !afl->queue_cur->disabled) {
2860 
2861     if (!afl->queue_cur->was_fuzzed) {
2862 
2863       --afl->pending_not_fuzzed;
2864       afl->queue_cur->was_fuzzed = 1;
2865       afl->reinit_table = 1;
2866       if (afl->queue_cur->favored) { --afl->pending_favored; }
2867 
2868     }
2869 
2870   }
2871 
2872   ++afl->queue_cur->fuzz_level;
2873   orig_in = NULL;
2874   return ret_val;
2875 
2876 #undef FLIP_BIT
2877 
2878 }
2879 
2880 /* MOpt mode */
mopt_common_fuzzing(afl_state_t * afl,MOpt_globals_t MOpt_globals)2881 static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) {
2882 
2883   if (!MOpt_globals.is_pilot_mode) {
2884 
2885     if (swarm_num == 1) {
2886 
2887       afl->key_module = 2;
2888       return 0;
2889 
2890     }
2891 
2892   }
2893 
2894   u32 len, temp_len;
2895   u32 i;
2896   u32 j;
2897   u8 *in_buf, *out_buf, *orig_in, *ex_tmp, *eff_map = 0;
2898   u64 havoc_queued = 0, orig_hit_cnt, new_hit_cnt = 0, cur_ms_lv, prev_cksum;
2899   u32 splice_cycle = 0, perf_score = 100, orig_perf, eff_cnt = 1;
2900 
2901   u8 ret_val = 1, doing_det = 0;
2902 
2903   u8  a_collect[MAX_AUTO_EXTRA];
2904   u32 a_len = 0;
2905 
2906 #ifdef IGNORE_FINDS
2907 
2908   /* In IGNORE_FINDS mode, skip any entries that weren't in the
2909      initial data set. */
2910 
2911   if (afl->queue_cur->depth > 1) return 1;
2912 
2913 #else
2914 
2915   if (likely(afl->pending_favored)) {
2916 
2917     /* If we have any favored, non-fuzzed new arrivals in the queue,
2918        possibly skip to them at the expense of already-fuzzed or non-favored
2919        cases. */
2920 
2921     if (((afl->queue_cur->was_fuzzed > 0 || afl->queue_cur->fuzz_level > 0) ||
2922          !afl->queue_cur->favored) &&
2923         rand_below(afl, 100) < SKIP_TO_NEW_PROB) {
2924 
2925       return 1;
2926 
2927     }
2928 
2929   } else if (!afl->non_instrumented_mode && !afl->queue_cur->favored &&
2930 
2931              afl->queued_paths > 10) {
2932 
2933     /* Otherwise, still possibly skip non-favored cases, albeit less often.
2934        The odds of skipping stuff are higher for already-fuzzed inputs and
2935        lower for never-fuzzed entries. */
2936 
2937     if (afl->queue_cycle > 1 &&
2938         (afl->queue_cur->fuzz_level == 0 || afl->queue_cur->was_fuzzed)) {
2939 
2940       if (likely(rand_below(afl, 100) < SKIP_NFAV_NEW_PROB)) { return 1; }
2941 
2942     } else {
2943 
2944       if (likely(rand_below(afl, 100) < SKIP_NFAV_OLD_PROB)) { return 1; }
2945 
2946     }
2947 
2948   }
2949 
2950 #endif                                                     /* ^IGNORE_FINDS */
2951 
2952   if (afl->not_on_tty) {
2953 
2954     ACTF("Fuzzing test case #%u (%u total, %llu uniq crashes found)...",
2955          afl->current_entry, afl->queued_paths, afl->unique_crashes);
2956     fflush(stdout);
2957 
2958   }
2959 
2960   /* Map the test case into memory. */
2961   orig_in = in_buf = queue_testcase_get(afl, afl->queue_cur);
2962   len = afl->queue_cur->len;
2963 
2964   out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
2965   if (unlikely(!out_buf)) { PFATAL("alloc"); }
2966 
2967   afl->subseq_tmouts = 0;
2968 
2969   afl->cur_depth = afl->queue_cur->depth;
2970 
2971   /*******************************************
2972    * CALIBRATION (only if failed earlier on) *
2973    *******************************************/
2974 
2975   if (unlikely(afl->queue_cur->cal_failed)) {
2976 
2977     u8 res = FSRV_RUN_TMOUT;
2978 
2979     if (afl->queue_cur->cal_failed < CAL_CHANCES) {
2980 
2981       afl->queue_cur->exec_cksum = 0;
2982 
2983       res =
2984           calibrate_case(afl, afl->queue_cur, in_buf, afl->queue_cycle - 1, 0);
2985 
2986       if (res == FSRV_RUN_ERROR) {
2987 
2988         FATAL("Unable to execute target application");
2989 
2990       }
2991 
2992     }
2993 
2994     if (afl->stop_soon || res != afl->crash_mode) {
2995 
2996       ++afl->cur_skipped_paths;
2997       goto abandon_entry;
2998 
2999     }
3000 
3001   }
3002 
3003   /************
3004    * TRIMMING *
3005    ************/
3006 
3007   if (unlikely(!afl->non_instrumented_mode && !afl->queue_cur->trim_done &&
3008                !afl->disable_trim)) {
3009 
3010     u32 old_len = afl->queue_cur->len;
3011 
3012     u8 res = trim_case(afl, afl->queue_cur, in_buf);
3013     orig_in = in_buf = queue_testcase_get(afl, afl->queue_cur);
3014 
3015     if (unlikely(res == FSRV_RUN_ERROR)) {
3016 
3017       FATAL("Unable to execute target application");
3018 
3019     }
3020 
3021     if (unlikely(afl->stop_soon)) {
3022 
3023       ++afl->cur_skipped_paths;
3024       goto abandon_entry;
3025 
3026     }
3027 
3028     /* Don't retry trimming, even if it failed. */
3029 
3030     afl->queue_cur->trim_done = 1;
3031 
3032     len = afl->queue_cur->len;
3033 
3034     /* maybe current entry is not ready for splicing anymore */
3035     if (unlikely(len <= 4 && old_len > 4)) --afl->ready_for_splicing_count;
3036 
3037   }
3038 
3039   memcpy(out_buf, in_buf, len);
3040 
3041   /*********************
3042    * PERFORMANCE SCORE *
3043    *********************/
3044 
3045   if (likely(!afl->old_seed_selection))
3046     orig_perf = perf_score = afl->queue_cur->perf_score;
3047   else
3048     orig_perf = perf_score = calculate_score(afl, afl->queue_cur);
3049 
3050   if (unlikely(perf_score <= 0)) { goto abandon_entry; }
3051 
3052   if (unlikely(afl->shm.cmplog_mode &&
3053                afl->queue_cur->colorized < afl->cmplog_lvl &&
3054                (u32)len <= afl->cmplog_max_filesize)) {
3055 
3056     if (unlikely(len < 4)) {
3057 
3058       afl->queue_cur->colorized = CMPLOG_LVL_MAX;
3059 
3060     } else {
3061 
3062       if (afl->cmplog_lvl == 3 ||
3063           (afl->cmplog_lvl == 2 && afl->queue_cur->tc_ref) ||
3064           !(afl->fsrv.total_execs % afl->queued_paths) ||
3065           get_cur_time() - afl->last_path_time > 300000) {  // 300 seconds
3066 
3067         if (input_to_state_stage(afl, in_buf, out_buf, len)) {
3068 
3069           goto abandon_entry;
3070 
3071         }
3072 
3073       }
3074 
3075     }
3076 
3077   }
3078 
3079   /* Go to pacemker fuzzing if MOpt is doing well */
3080 
3081   cur_ms_lv = get_cur_time();
3082   if (!(afl->key_puppet == 0 &&
3083         ((cur_ms_lv - afl->last_path_time < (u32)afl->limit_time_puppet) ||
3084          (afl->last_crash_time != 0 &&
3085           cur_ms_lv - afl->last_crash_time < (u32)afl->limit_time_puppet) ||
3086          afl->last_path_time == 0))) {
3087 
3088     afl->key_puppet = 1;
3089     goto pacemaker_fuzzing;
3090 
3091   }
3092 
3093   /* Skip right away if -d is given, if we have done deterministic fuzzing on
3094      this entry ourselves (was_fuzzed), or if it has gone through deterministic
3095      testing in earlier, resumed runs (passed_det). */
3096 
3097   if (likely(afl->skip_deterministic || afl->queue_cur->was_fuzzed ||
3098              afl->queue_cur->passed_det)) {
3099 
3100     goto havoc_stage;
3101 
3102   }
3103 
3104   /* Skip deterministic fuzzing if exec path checksum puts this out of scope
3105      for this main instance. */
3106 
3107   if (unlikely(afl->main_node_max &&
3108                (afl->queue_cur->exec_cksum % afl->main_node_max) !=
3109                    afl->main_node_id - 1)) {
3110 
3111     goto havoc_stage;
3112 
3113   }
3114 
3115   doing_det = 1;
3116 
3117   /*********************************************
3118    * SIMPLE BITFLIP (+dictionary construction) *
3119    *********************************************/
3120 
3121 #define FLIP_BIT(_ar, _b)                   \
3122   do {                                      \
3123                                             \
3124     u8 *_arf = (u8 *)(_ar);                 \
3125     u32 _bf = (_b);                         \
3126     _arf[(_bf) >> 3] ^= (128 >> ((_bf)&7)); \
3127                                             \
3128   } while (0)
3129 
3130   /* Single walking bit. */
3131 
3132   afl->stage_short = "flip1";
3133   afl->stage_max = len << 3;
3134   afl->stage_name = "bitflip 1/1";
3135 
3136   afl->stage_val_type = STAGE_VAL_NONE;
3137 
3138   orig_hit_cnt = afl->queued_paths + afl->unique_crashes;
3139 
3140   prev_cksum = afl->queue_cur->exec_cksum;
3141 
3142   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
3143 
3144     afl->stage_cur_byte = afl->stage_cur >> 3;
3145 
3146     FLIP_BIT(out_buf, afl->stage_cur);
3147 
3148 #ifdef INTROSPECTION
3149     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT1-%u",
3150              afl->queue_cur->fname, afl->stage_cur);
3151 #endif
3152     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3153 
3154     FLIP_BIT(out_buf, afl->stage_cur);
3155 
3156     /* While flipping the least significant bit in every byte, pull of an extra
3157        trick to detect possible syntax tokens. In essence, the idea is that if
3158        you have a binary blob like this:
3159 
3160        xxxxxxxxIHDRxxxxxxxx
3161 
3162        ...and changing the leading and trailing bytes causes variable or no
3163        changes in program flow, but touching any character in the "IHDR" string
3164        always produces the same, distinctive path, it's highly likely that
3165        "IHDR" is an atomically-checked magic value of special significance to
3166        the fuzzed format.
3167 
3168        We do this here, rather than as a separate stage, because it's a nice
3169        way to keep the operation approximately "free" (i.e., no extra execs).
3170 
3171        Empirically, performing the check when flipping the least significant bit
3172        is advantageous, compared to doing it at the time of more disruptive
3173        changes, where the program flow may be affected in more violent ways.
3174 
3175        The caveat is that we won't generate dictionaries in the -d mode or -S
3176        mode - but that's probably a fair trade-off.
3177 
3178        This won't work particularly well with paths that exhibit variable
3179        behavior, but fails gracefully, so we'll carry out the checks anyway.
3180 
3181       */
3182 
3183     if (!afl->non_instrumented_mode && (afl->stage_cur & 7) == 7) {
3184 
3185       u64 cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
3186 
3187       if (afl->stage_cur == afl->stage_max - 1 && cksum == prev_cksum) {
3188 
3189         /* If at end of file and we are still collecting a string, grab the
3190            final character and force output. */
3191 
3192         if (a_len < MAX_AUTO_EXTRA) {
3193 
3194           a_collect[a_len] = out_buf[afl->stage_cur >> 3];
3195 
3196         }
3197 
3198         ++a_len;
3199 
3200         if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA) {
3201 
3202           maybe_add_auto(afl, a_collect, a_len);
3203 
3204         }
3205 
3206       } else if (cksum != prev_cksum) {
3207 
3208         /* Otherwise, if the checksum has changed, see if we have something
3209            worthwhile queued up, and collect that if the answer is yes. */
3210 
3211         if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA) {
3212 
3213           maybe_add_auto(afl, a_collect, a_len);
3214 
3215         }
3216 
3217         a_len = 0;
3218         prev_cksum = cksum;
3219 
3220       }
3221 
3222       /* Continue collecting string, but only if the bit flip actually made
3223          any difference - we don't want no-op tokens. */
3224 
3225       if (cksum != afl->queue_cur->exec_cksum) {
3226 
3227         if (a_len < MAX_AUTO_EXTRA) {
3228 
3229           a_collect[a_len] = out_buf[afl->stage_cur >> 3];
3230 
3231         }
3232 
3233         ++a_len;
3234 
3235       }
3236 
3237     }                                       /* if (afl->stage_cur & 7) == 7 */
3238 
3239   }                                                   /* for afl->stage_cur */
3240 
3241   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
3242 
3243   afl->stage_finds[STAGE_FLIP1] += new_hit_cnt - orig_hit_cnt;
3244   afl->stage_cycles[STAGE_FLIP1] += afl->stage_max;
3245 
3246   /* Two walking bits. */
3247 
3248   afl->stage_name = "bitflip 2/1";
3249   afl->stage_short = "flip2";
3250   afl->stage_max = (len << 3) - 1;
3251 
3252   orig_hit_cnt = new_hit_cnt;
3253 
3254   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
3255 
3256     afl->stage_cur_byte = afl->stage_cur >> 3;
3257 
3258     FLIP_BIT(out_buf, afl->stage_cur);
3259     FLIP_BIT(out_buf, afl->stage_cur + 1);
3260 
3261 #ifdef INTROSPECTION
3262     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT2-%u",
3263              afl->queue_cur->fname, afl->stage_cur);
3264 #endif
3265     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3266 
3267     FLIP_BIT(out_buf, afl->stage_cur);
3268     FLIP_BIT(out_buf, afl->stage_cur + 1);
3269 
3270   }                                                   /* for afl->stage_cur */
3271 
3272   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
3273 
3274   afl->stage_finds[STAGE_FLIP2] += new_hit_cnt - orig_hit_cnt;
3275   afl->stage_cycles[STAGE_FLIP2] += afl->stage_max;
3276 
3277   /* Four walking bits. */
3278 
3279   afl->stage_name = "bitflip 4/1";
3280   afl->stage_short = "flip4";
3281   afl->stage_max = (len << 3) - 3;
3282 
3283   orig_hit_cnt = new_hit_cnt;
3284 
3285   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
3286 
3287     afl->stage_cur_byte = afl->stage_cur >> 3;
3288 
3289     FLIP_BIT(out_buf, afl->stage_cur);
3290     FLIP_BIT(out_buf, afl->stage_cur + 1);
3291     FLIP_BIT(out_buf, afl->stage_cur + 2);
3292     FLIP_BIT(out_buf, afl->stage_cur + 3);
3293 
3294 #ifdef INTROSPECTION
3295     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT4-%u",
3296              afl->queue_cur->fname, afl->stage_cur);
3297 #endif
3298     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3299 
3300     FLIP_BIT(out_buf, afl->stage_cur);
3301     FLIP_BIT(out_buf, afl->stage_cur + 1);
3302     FLIP_BIT(out_buf, afl->stage_cur + 2);
3303     FLIP_BIT(out_buf, afl->stage_cur + 3);
3304 
3305   }                                                   /* for afl->stage_cur */
3306 
3307   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
3308 
3309   afl->stage_finds[STAGE_FLIP4] += new_hit_cnt - orig_hit_cnt;
3310   afl->stage_cycles[STAGE_FLIP4] += afl->stage_max;
3311 
3312   /* Effector map setup. These macros calculate:
3313 
3314      EFF_APOS      - position of a particular file offset in the map.
3315      EFF_ALEN      - length of a map with a particular number of bytes.
3316      EFF_SPAN_ALEN - map span for a sequence of bytes.
3317 
3318    */
3319 
3320 #define EFF_APOS(_p) ((_p) >> EFF_MAP_SCALE2)
3321 #define EFF_REM(_x) ((_x) & ((1 << EFF_MAP_SCALE2) - 1))
3322 #define EFF_ALEN(_l) (EFF_APOS(_l) + !!EFF_REM(_l))
3323 #define EFF_SPAN_ALEN(_p, _l) (EFF_APOS((_p) + (_l)-1) - EFF_APOS(_p) + 1)
3324 
3325   /* Initialize effector map for the next step (see comments below). Always
3326          flag first and last byte as doing something. */
3327 
3328   eff_map = afl_realloc(AFL_BUF_PARAM(eff), EFF_ALEN(len));
3329   if (unlikely(!eff_map)) { PFATAL("alloc"); }
3330   eff_map[0] = 1;
3331 
3332   if (EFF_APOS(len - 1) != 0) {
3333 
3334     eff_map[EFF_APOS(len - 1)] = 1;
3335     ++eff_cnt;
3336 
3337   }
3338 
3339   /* Walking byte. */
3340 
3341   afl->stage_name = "bitflip 8/8";
3342   afl->stage_short = "flip8";
3343   afl->stage_max = len;
3344 
3345   orig_hit_cnt = new_hit_cnt;
3346 
3347   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
3348 
3349     afl->stage_cur_byte = afl->stage_cur;
3350 
3351     out_buf[afl->stage_cur] ^= 0xFF;
3352 
3353 #ifdef INTROSPECTION
3354     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT8-%u",
3355              afl->queue_cur->fname, afl->stage_cur);
3356 #endif
3357     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3358 
3359     /* We also use this stage to pull off a simple trick: we identify
3360        bytes that seem to have no effect on the current execution path
3361        even when fully flipped - and we skip them during more expensive
3362        deterministic stages, such as arithmetics or known ints. */
3363 
3364     if (!eff_map[EFF_APOS(afl->stage_cur)]) {
3365 
3366       u64 cksum;
3367 
3368       /* If in non-instrumented mode or if the file is very short, just flag
3369          everything without wasting time on checksums. */
3370 
3371       if (!afl->non_instrumented_mode && len >= EFF_MIN_LEN) {
3372 
3373         cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
3374 
3375       } else {
3376 
3377         cksum = ~afl->queue_cur->exec_cksum;
3378 
3379       }
3380 
3381       if (cksum != afl->queue_cur->exec_cksum) {
3382 
3383         eff_map[EFF_APOS(afl->stage_cur)] = 1;
3384         ++eff_cnt;
3385 
3386       }
3387 
3388     }
3389 
3390     out_buf[afl->stage_cur] ^= 0xFF;
3391 
3392   }                                                   /* for afl->stage_cur */
3393 
3394   /* If the effector map is more than EFF_MAX_PERC dense, just flag the
3395      whole thing as worth fuzzing, since we wouldn't be saving much time
3396      anyway. */
3397 
3398   if (eff_cnt != (u32)EFF_ALEN(len) &&
3399       eff_cnt * 100 / EFF_ALEN(len) > EFF_MAX_PERC) {
3400 
3401     memset(eff_map, 1, EFF_ALEN(len));
3402 
3403     afl->blocks_eff_select += EFF_ALEN(len);
3404 
3405   } else {
3406 
3407     afl->blocks_eff_select += eff_cnt;
3408 
3409   }
3410 
3411   afl->blocks_eff_total += EFF_ALEN(len);
3412 
3413   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
3414 
3415   afl->stage_finds[STAGE_FLIP8] += new_hit_cnt - orig_hit_cnt;
3416   afl->stage_cycles[STAGE_FLIP8] += afl->stage_max;
3417 
3418   /* Two walking bytes. */
3419 
3420   if (len < 2) { goto skip_bitflip; }
3421 
3422   afl->stage_name = "bitflip 16/8";
3423   afl->stage_short = "flip16";
3424   afl->stage_cur = 0;
3425   afl->stage_max = len - 1;
3426 
3427   orig_hit_cnt = new_hit_cnt;
3428 
3429   for (i = 0; i < len - 1; ++i) {
3430 
3431     /* Let's consult the effector map... */
3432 
3433     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)]) {
3434 
3435       --afl->stage_max;
3436       continue;
3437 
3438     }
3439 
3440     afl->stage_cur_byte = i;
3441 
3442     *(u16 *)(out_buf + i) ^= 0xFFFF;
3443 
3444 #ifdef INTROSPECTION
3445     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT16-%u",
3446              afl->queue_cur->fname, afl->stage_cur);
3447 #endif
3448     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3449     ++afl->stage_cur;
3450 
3451     *(u16 *)(out_buf + i) ^= 0xFFFF;
3452 
3453   }                                                   /* for i = 0; i < len */
3454 
3455   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
3456 
3457   afl->stage_finds[STAGE_FLIP16] += new_hit_cnt - orig_hit_cnt;
3458   afl->stage_cycles[STAGE_FLIP16] += afl->stage_max;
3459 
3460   if (len < 4) { goto skip_bitflip; }
3461 
3462   /* Four walking bytes. */
3463 
3464   afl->stage_name = "bitflip 32/8";
3465   afl->stage_short = "flip32";
3466   afl->stage_cur = 0;
3467   afl->stage_max = len - 3;
3468 
3469   orig_hit_cnt = new_hit_cnt;
3470 
3471   for (i = 0; i < len - 3; ++i) {
3472 
3473     /* Let's consult the effector map... */
3474     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
3475         !eff_map[EFF_APOS(i + 2)] && !eff_map[EFF_APOS(i + 3)]) {
3476 
3477       --afl->stage_max;
3478       continue;
3479 
3480     }
3481 
3482     afl->stage_cur_byte = i;
3483 
3484     *(u32 *)(out_buf + i) ^= 0xFFFFFFFF;
3485 
3486 #ifdef INTROSPECTION
3487     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT32-%u",
3488              afl->queue_cur->fname, afl->stage_cur);
3489 #endif
3490     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3491     ++afl->stage_cur;
3492 
3493     *(u32 *)(out_buf + i) ^= 0xFFFFFFFF;
3494 
3495   }                                               /* for i = 0; i < len - 3 */
3496 
3497   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
3498 
3499   afl->stage_finds[STAGE_FLIP32] += new_hit_cnt - orig_hit_cnt;
3500   afl->stage_cycles[STAGE_FLIP32] += afl->stage_max;
3501 
3502 skip_bitflip:
3503 
3504   if (afl->no_arith) { goto skip_arith; }
3505 
3506   /**********************
3507    * ARITHMETIC INC/DEC *
3508    **********************/
3509 
3510   /* 8-bit arithmetics. */
3511 
3512   afl->stage_name = "arith 8/8";
3513   afl->stage_short = "arith8";
3514   afl->stage_cur = 0;
3515   afl->stage_max = 2 * len * ARITH_MAX;
3516 
3517   afl->stage_val_type = STAGE_VAL_LE;
3518 
3519   orig_hit_cnt = new_hit_cnt;
3520 
3521   for (i = 0; i < (u32)len; ++i) {
3522 
3523     u8 orig = out_buf[i];
3524 
3525     /* Let's consult the effector map... */
3526 
3527     if (!eff_map[EFF_APOS(i)]) {
3528 
3529       afl->stage_max -= 2 * ARITH_MAX;
3530       continue;
3531 
3532     }
3533 
3534     afl->stage_cur_byte = i;
3535 
3536     for (j = 1; j <= ARITH_MAX; ++j) {
3537 
3538       u8 r = orig ^ (orig + j);
3539 
3540       /* Do arithmetic operations only if the result couldn't be a product
3541          of a bitflip. */
3542 
3543       if (!could_be_bitflip(r)) {
3544 
3545         afl->stage_cur_val = j;
3546         out_buf[i] = orig + j;
3547 
3548 #ifdef INTROSPECTION
3549         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH8+-%u-%u",
3550                  afl->queue_cur->fname, i, j);
3551 #endif
3552         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3553         ++afl->stage_cur;
3554 
3555       } else {
3556 
3557         --afl->stage_max;
3558 
3559       }
3560 
3561       r = orig ^ (orig - j);
3562 
3563       if (!could_be_bitflip(r)) {
3564 
3565         afl->stage_cur_val = -j;
3566         out_buf[i] = orig - j;
3567 
3568 #ifdef INTROSPECTION
3569         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH8_-%u-%u",
3570                  afl->queue_cur->fname, i, j);
3571 #endif
3572         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3573         ++afl->stage_cur;
3574 
3575       } else {
3576 
3577         --afl->stage_max;
3578 
3579       }
3580 
3581       out_buf[i] = orig;
3582 
3583     }
3584 
3585   }                                                   /* for i = 0; i < len */
3586 
3587   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
3588 
3589   afl->stage_finds[STAGE_ARITH8] += new_hit_cnt - orig_hit_cnt;
3590   afl->stage_cycles[STAGE_ARITH8] += afl->stage_max;
3591 
3592   /* 16-bit arithmetics, both endians. */
3593 
3594   if (len < 2) { goto skip_arith; }
3595 
3596   afl->stage_name = "arith 16/8";
3597   afl->stage_short = "arith16";
3598   afl->stage_cur = 0;
3599   afl->stage_max = 4 * (len - 1) * ARITH_MAX;
3600 
3601   orig_hit_cnt = new_hit_cnt;
3602 
3603   for (i = 0; i < len - 1; ++i) {
3604 
3605     u16 orig = *(u16 *)(out_buf + i);
3606 
3607     /* Let's consult the effector map... */
3608 
3609     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)]) {
3610 
3611       afl->stage_max -= 4 * ARITH_MAX;
3612       continue;
3613 
3614     }
3615 
3616     afl->stage_cur_byte = i;
3617 
3618     for (j = 1; j <= ARITH_MAX; ++j) {
3619 
3620       u16 r1 = orig ^ (orig + j), r2 = orig ^ (orig - j),
3621           r3 = orig ^ SWAP16(SWAP16(orig) + j),
3622           r4 = orig ^ SWAP16(SWAP16(orig) - j);
3623 
3624       /* Try little endian addition and subtraction first. Do it only
3625          if the operation would affect more than one byte (hence the
3626          & 0xff overflow checks) and if it couldn't be a product of
3627          a bitflip. */
3628 
3629       afl->stage_val_type = STAGE_VAL_LE;
3630 
3631       if ((orig & 0xff) + j > 0xff && !could_be_bitflip(r1)) {
3632 
3633         afl->stage_cur_val = j;
3634         *(u16 *)(out_buf + i) = orig + j;
3635 
3636 #ifdef INTROSPECTION
3637         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH16+-%u-%u",
3638                  afl->queue_cur->fname, i, j);
3639 #endif
3640         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3641         ++afl->stage_cur;
3642 
3643       } else {
3644 
3645         --afl->stage_max;
3646 
3647       }
3648 
3649       if ((orig & 0xff) < j && !could_be_bitflip(r2)) {
3650 
3651         afl->stage_cur_val = -j;
3652         *(u16 *)(out_buf + i) = orig - j;
3653 
3654 #ifdef INTROSPECTION
3655         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH16_-%u-%u",
3656                  afl->queue_cur->fname, i, j);
3657 #endif
3658         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3659         ++afl->stage_cur;
3660 
3661       } else {
3662 
3663         --afl->stage_max;
3664 
3665       }
3666 
3667       /* Big endian comes next. Same deal. */
3668 
3669       afl->stage_val_type = STAGE_VAL_BE;
3670 
3671       if ((orig >> 8) + j > 0xff && !could_be_bitflip(r3)) {
3672 
3673         afl->stage_cur_val = j;
3674         *(u16 *)(out_buf + i) = SWAP16(SWAP16(orig) + j);
3675 
3676 #ifdef INTROSPECTION
3677         snprintf(afl->mutation, sizeof(afl->mutation),
3678                  "%s MOPT_ARITH16+BE-%u-%u", afl->queue_cur->fname, i, j);
3679 #endif
3680         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3681         ++afl->stage_cur;
3682 
3683       } else {
3684 
3685         --afl->stage_max;
3686 
3687       }
3688 
3689       if ((orig >> 8) < j && !could_be_bitflip(r4)) {
3690 
3691         afl->stage_cur_val = -j;
3692         *(u16 *)(out_buf + i) = SWAP16(SWAP16(orig) - j);
3693 
3694 #ifdef INTROSPECTION
3695         snprintf(afl->mutation, sizeof(afl->mutation),
3696                  "%s MOPT_ARITH16_BE+%u+%u", afl->queue_cur->fname, i, j);
3697 #endif
3698         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3699         ++afl->stage_cur;
3700 
3701       } else {
3702 
3703         --afl->stage_max;
3704 
3705       }
3706 
3707       *(u16 *)(out_buf + i) = orig;
3708 
3709     }
3710 
3711   }                                               /* for i = 0; i < len - 1 */
3712 
3713   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
3714 
3715   afl->stage_finds[STAGE_ARITH16] += new_hit_cnt - orig_hit_cnt;
3716   afl->stage_cycles[STAGE_ARITH16] += afl->stage_max;
3717 
3718   /* 32-bit arithmetics, both endians. */
3719 
3720   if (len < 4) { goto skip_arith; }
3721 
3722   afl->stage_name = "arith 32/8";
3723   afl->stage_short = "arith32";
3724   afl->stage_cur = 0;
3725   afl->stage_max = 4 * (len - 3) * ARITH_MAX;
3726 
3727   orig_hit_cnt = new_hit_cnt;
3728 
3729   for (i = 0; i < len - 3; ++i) {
3730 
3731     u32 orig = *(u32 *)(out_buf + i);
3732 
3733     /* Let's consult the effector map... */
3734 
3735     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
3736         !eff_map[EFF_APOS(i + 2)] && !eff_map[EFF_APOS(i + 3)]) {
3737 
3738       afl->stage_max -= 4 * ARITH_MAX;
3739       continue;
3740 
3741     }
3742 
3743     afl->stage_cur_byte = i;
3744 
3745     for (j = 1; j <= ARITH_MAX; ++j) {
3746 
3747       u32 r1 = orig ^ (orig + j), r2 = orig ^ (orig - j),
3748           r3 = orig ^ SWAP32(SWAP32(orig) + j),
3749           r4 = orig ^ SWAP32(SWAP32(orig) - j);
3750 
3751       /* Little endian first. Same deal as with 16-bit: we only want to
3752          try if the operation would have effect on more than two bytes. */
3753 
3754       afl->stage_val_type = STAGE_VAL_LE;
3755 
3756       if ((orig & 0xffff) + j > 0xffff && !could_be_bitflip(r1)) {
3757 
3758         afl->stage_cur_val = j;
3759         *(u32 *)(out_buf + i) = orig + j;
3760 
3761 #ifdef INTROSPECTION
3762         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH32+-%u-%u",
3763                  afl->queue_cur->fname, i, j);
3764 #endif
3765         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3766         ++afl->stage_cur;
3767 
3768       } else {
3769 
3770         --afl->stage_max;
3771 
3772       }
3773 
3774       if ((orig & 0xffff) < j && !could_be_bitflip(r2)) {
3775 
3776         afl->stage_cur_val = -j;
3777         *(u32 *)(out_buf + i) = orig - j;
3778 
3779 #ifdef INTROSPECTION
3780         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH32_-%u-%u",
3781                  afl->queue_cur->fname, i, j);
3782 #endif
3783         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3784         ++afl->stage_cur;
3785 
3786       } else {
3787 
3788         --afl->stage_max;
3789 
3790       }
3791 
3792       /* Big endian next. */
3793 
3794       afl->stage_val_type = STAGE_VAL_BE;
3795 
3796       if ((SWAP32(orig) & 0xffff) + j > 0xffff && !could_be_bitflip(r3)) {
3797 
3798         afl->stage_cur_val = j;
3799         *(u32 *)(out_buf + i) = SWAP32(SWAP32(orig) + j);
3800 
3801 #ifdef INTROSPECTION
3802         snprintf(afl->mutation, sizeof(afl->mutation),
3803                  "%s MOPT_ARITH32+BE-%u-%u", afl->queue_cur->fname, i, j);
3804 #endif
3805         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3806         ++afl->stage_cur;
3807 
3808       } else {
3809 
3810         --afl->stage_max;
3811 
3812       }
3813 
3814       if ((SWAP32(orig) & 0xffff) < j && !could_be_bitflip(r4)) {
3815 
3816         afl->stage_cur_val = -j;
3817         *(u32 *)(out_buf + i) = SWAP32(SWAP32(orig) - j);
3818 
3819 #ifdef INTROSPECTION
3820         snprintf(afl->mutation, sizeof(afl->mutation),
3821                  "%s MOPT_ARITH32_BE-%u-%u", afl->queue_cur->fname, i, j);
3822 #endif
3823         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3824         ++afl->stage_cur;
3825 
3826       } else {
3827 
3828         --afl->stage_max;
3829 
3830       }
3831 
3832       *(u32 *)(out_buf + i) = orig;
3833 
3834     }
3835 
3836   }                                               /* for i = 0; i < len - 3 */
3837 
3838   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
3839 
3840   afl->stage_finds[STAGE_ARITH32] += new_hit_cnt - orig_hit_cnt;
3841   afl->stage_cycles[STAGE_ARITH32] += afl->stage_max;
3842 
3843 skip_arith:
3844 
3845   /**********************
3846    * INTERESTING VALUES *
3847    **********************/
3848 
3849   afl->stage_name = "interest 8/8";
3850   afl->stage_short = "int8";
3851   afl->stage_cur = 0;
3852   afl->stage_max = len * sizeof(interesting_8);
3853 
3854   afl->stage_val_type = STAGE_VAL_LE;
3855 
3856   orig_hit_cnt = new_hit_cnt;
3857 
3858   /* Setting 8-bit integers. */
3859 
3860   for (i = 0; i < (u32)len; ++i) {
3861 
3862     u8 orig = out_buf[i];
3863 
3864     /* Let's consult the effector map... */
3865 
3866     if (!eff_map[EFF_APOS(i)]) {
3867 
3868       afl->stage_max -= sizeof(interesting_8);
3869       continue;
3870 
3871     }
3872 
3873     afl->stage_cur_byte = i;
3874 
3875     for (j = 0; j < sizeof(interesting_8); ++j) {
3876 
3877       /* Skip if the value could be a product of bitflips or arithmetics. */
3878 
3879       if (could_be_bitflip(orig ^ (u8)interesting_8[j]) ||
3880           could_be_arith(orig, (u8)interesting_8[j], 1)) {
3881 
3882         --afl->stage_max;
3883         continue;
3884 
3885       }
3886 
3887       afl->stage_cur_val = interesting_8[j];
3888       out_buf[i] = interesting_8[j];
3889 
3890 #ifdef INTROSPECTION
3891       snprintf(afl->mutation, sizeof(afl->mutation),
3892                "%s MOPT_INTERESTING8-%u-%u", afl->queue_cur->fname, i, j);
3893 #endif
3894       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3895 
3896       out_buf[i] = orig;
3897       ++afl->stage_cur;
3898 
3899     }
3900 
3901   }                                                   /* for i = 0; i < len */
3902 
3903   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
3904 
3905   afl->stage_finds[STAGE_INTEREST8] += new_hit_cnt - orig_hit_cnt;
3906   afl->stage_cycles[STAGE_INTEREST8] += afl->stage_max;
3907 
3908   /* Setting 16-bit integers, both endians. */
3909 
3910   if (afl->no_arith || len < 2) { goto skip_interest; }
3911 
3912   afl->stage_name = "interest 16/8";
3913   afl->stage_short = "int16";
3914   afl->stage_cur = 0;
3915   afl->stage_max = 2 * (len - 1) * (sizeof(interesting_16) >> 1);
3916 
3917   orig_hit_cnt = new_hit_cnt;
3918 
3919   for (i = 0; i < len - 1; ++i) {
3920 
3921     u16 orig = *(u16 *)(out_buf + i);
3922 
3923     /* Let's consult the effector map... */
3924 
3925     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)]) {
3926 
3927       afl->stage_max -= sizeof(interesting_16);
3928       continue;
3929 
3930     }
3931 
3932     afl->stage_cur_byte = i;
3933 
3934     for (j = 0; j < sizeof(interesting_16) / 2; ++j) {
3935 
3936       afl->stage_cur_val = interesting_16[j];
3937 
3938       /* Skip if this could be a product of a bitflip, arithmetics,
3939          or single-byte interesting value insertion. */
3940 
3941       if (!could_be_bitflip(orig ^ (u16)interesting_16[j]) &&
3942           !could_be_arith(orig, (u16)interesting_16[j], 2) &&
3943           !could_be_interest(orig, (u16)interesting_16[j], 2, 0)) {
3944 
3945         afl->stage_val_type = STAGE_VAL_LE;
3946 
3947         *(u16 *)(out_buf + i) = interesting_16[j];
3948 
3949 #ifdef INTROSPECTION
3950         snprintf(afl->mutation, sizeof(afl->mutation),
3951                  "%s MOPT_INTERESTING16-%u-%u", afl->queue_cur->fname, i, j);
3952 #endif
3953         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3954         ++afl->stage_cur;
3955 
3956       } else {
3957 
3958         --afl->stage_max;
3959 
3960       }
3961 
3962       if ((u16)interesting_16[j] != SWAP16(interesting_16[j]) &&
3963           !could_be_bitflip(orig ^ SWAP16(interesting_16[j])) &&
3964           !could_be_arith(orig, SWAP16(interesting_16[j]), 2) &&
3965           !could_be_interest(orig, SWAP16(interesting_16[j]), 2, 1)) {
3966 
3967         afl->stage_val_type = STAGE_VAL_BE;
3968 
3969 #ifdef INTROSPECTION
3970         snprintf(afl->mutation, sizeof(afl->mutation),
3971                  "%s MOPT_INTERESTING16BE-%u-%u", afl->queue_cur->fname, i, j);
3972 #endif
3973         *(u16 *)(out_buf + i) = SWAP16(interesting_16[j]);
3974         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3975         ++afl->stage_cur;
3976 
3977       } else {
3978 
3979         --afl->stage_max;
3980 
3981       }
3982 
3983     }
3984 
3985     *(u16 *)(out_buf + i) = orig;
3986 
3987   }                                               /* for i = 0; i < len - 1 */
3988 
3989   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
3990 
3991   afl->stage_finds[STAGE_INTEREST16] += new_hit_cnt - orig_hit_cnt;
3992   afl->stage_cycles[STAGE_INTEREST16] += afl->stage_max;
3993 
3994   if (len < 4) { goto skip_interest; }
3995 
3996   /* Setting 32-bit integers, both endians. */
3997 
3998   afl->stage_name = "interest 32/8";
3999   afl->stage_short = "int32";
4000   afl->stage_cur = 0;
4001   afl->stage_max = 2 * (len - 3) * (sizeof(interesting_32) >> 2);
4002 
4003   orig_hit_cnt = new_hit_cnt;
4004 
4005   for (i = 0; i < len - 3; ++i) {
4006 
4007     u32 orig = *(u32 *)(out_buf + i);
4008 
4009     /* Let's consult the effector map... */
4010 
4011     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
4012         !eff_map[EFF_APOS(i + 2)] && !eff_map[EFF_APOS(i + 3)]) {
4013 
4014       afl->stage_max -= sizeof(interesting_32) >> 1;
4015       continue;
4016 
4017     }
4018 
4019     afl->stage_cur_byte = i;
4020 
4021     for (j = 0; j < sizeof(interesting_32) / 4; ++j) {
4022 
4023       afl->stage_cur_val = interesting_32[j];
4024 
4025       /* Skip if this could be a product of a bitflip, arithmetics,
4026          or word interesting value insertion. */
4027 
4028       if (!could_be_bitflip(orig ^ (u32)interesting_32[j]) &&
4029           !could_be_arith(orig, interesting_32[j], 4) &&
4030           !could_be_interest(orig, interesting_32[j], 4, 0)) {
4031 
4032         afl->stage_val_type = STAGE_VAL_LE;
4033 
4034         *(u32 *)(out_buf + i) = interesting_32[j];
4035 
4036 #ifdef INTROSPECTION
4037         snprintf(afl->mutation, sizeof(afl->mutation),
4038                  "%s MOPT_INTERESTING32-%u-%u", afl->queue_cur->fname, i, j);
4039 #endif
4040         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4041         ++afl->stage_cur;
4042 
4043       } else {
4044 
4045         --afl->stage_max;
4046 
4047       }
4048 
4049       if ((u32)interesting_32[j] != SWAP32(interesting_32[j]) &&
4050           !could_be_bitflip(orig ^ SWAP32(interesting_32[j])) &&
4051           !could_be_arith(orig, SWAP32(interesting_32[j]), 4) &&
4052           !could_be_interest(orig, SWAP32(interesting_32[j]), 4, 1)) {
4053 
4054         afl->stage_val_type = STAGE_VAL_BE;
4055 
4056 #ifdef INTROSPECTION
4057         snprintf(afl->mutation, sizeof(afl->mutation),
4058                  "%s MOPT_INTERESTING32BE-%u-%u", afl->queue_cur->fname, i, j);
4059 #endif
4060         *(u32 *)(out_buf + i) = SWAP32(interesting_32[j]);
4061         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4062         ++afl->stage_cur;
4063 
4064       } else {
4065 
4066         --afl->stage_max;
4067 
4068       }
4069 
4070     }
4071 
4072     *(u32 *)(out_buf + i) = orig;
4073 
4074   }                                               /* for i = 0; i < len - 3 */
4075 
4076   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
4077 
4078   afl->stage_finds[STAGE_INTEREST32] += new_hit_cnt - orig_hit_cnt;
4079   afl->stage_cycles[STAGE_INTEREST32] += afl->stage_max;
4080 
4081 skip_interest:
4082 
4083   /********************
4084    * DICTIONARY STUFF *
4085    ********************/
4086 
4087   if (!afl->extras_cnt) { goto skip_user_extras; }
4088 
4089   /* Overwrite with user-supplied extras. */
4090 
4091   afl->stage_name = "user extras (over)";
4092   afl->stage_short = "ext_UO";
4093   afl->stage_cur = 0;
4094   afl->stage_max = afl->extras_cnt * len;
4095 
4096   afl->stage_val_type = STAGE_VAL_NONE;
4097 
4098   orig_hit_cnt = new_hit_cnt;
4099 
4100   for (i = 0; i < (u32)len; ++i) {
4101 
4102     u32 last_len = 0;
4103 
4104     afl->stage_cur_byte = i;
4105 
4106     /* Extras are sorted by size, from smallest to largest. This means
4107        that we don't have to worry about restoring the buffer in
4108        between writes at a particular offset determined by the outer
4109        loop. */
4110 
4111     for (j = 0; j < afl->extras_cnt; ++j) {
4112 
4113       /* Skip extras probabilistically if afl->extras_cnt > AFL_MAX_DET_EXTRAS.
4114          Also skip them if there's no room to insert the payload, if the token
4115          is redundant, or if its entire span has no bytes set in the effector
4116          map. */
4117 
4118       if ((afl->extras_cnt > afl->max_det_extras &&
4119            rand_below(afl, afl->extras_cnt) >= afl->max_det_extras) ||
4120           afl->extras[j].len > len - i ||
4121           !memcmp(afl->extras[j].data, out_buf + i, afl->extras[j].len) ||
4122           !memchr(eff_map + EFF_APOS(i), 1,
4123                   EFF_SPAN_ALEN(i, afl->extras[j].len))) {
4124 
4125         --afl->stage_max;
4126         continue;
4127 
4128       }
4129 
4130       last_len = afl->extras[j].len;
4131       memcpy(out_buf + i, afl->extras[j].data, last_len);
4132 
4133 #ifdef INTROSPECTION
4134       snprintf(afl->mutation, sizeof(afl->mutation),
4135                "%s MOPT_EXTRAS_overwrite-%u-%u", afl->queue_cur->fname, i, j);
4136 #endif
4137 
4138       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4139 
4140       ++afl->stage_cur;
4141 
4142     }
4143 
4144     /* Restore all the clobbered memory. */
4145     memcpy(out_buf + i, in_buf + i, last_len);
4146 
4147   }                                                   /* for i = 0; i < len */
4148 
4149   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
4150 
4151   afl->stage_finds[STAGE_EXTRAS_UO] += new_hit_cnt - orig_hit_cnt;
4152   afl->stage_cycles[STAGE_EXTRAS_UO] += afl->stage_max;
4153 
4154   /* Insertion of user-supplied extras. */
4155 
4156   afl->stage_name = "user extras (insert)";
4157   afl->stage_short = "ext_UI";
4158   afl->stage_cur = 0;
4159   afl->stage_max = afl->extras_cnt * (len + 1);
4160 
4161   orig_hit_cnt = new_hit_cnt;
4162 
4163   ex_tmp = afl_realloc(AFL_BUF_PARAM(ex), len + MAX_DICT_FILE);
4164   if (unlikely(!ex_tmp)) { PFATAL("alloc"); }
4165 
4166   for (i = 0; i <= (u32)len; ++i) {
4167 
4168     afl->stage_cur_byte = i;
4169 
4170     for (j = 0; j < afl->extras_cnt; ++j) {
4171 
4172       if (len + afl->extras[j].len > MAX_FILE) {
4173 
4174         --afl->stage_max;
4175         continue;
4176 
4177       }
4178 
4179       /* Insert token */
4180       memcpy(ex_tmp + i, afl->extras[j].data, afl->extras[j].len);
4181 
4182       /* Copy tail */
4183       memcpy(ex_tmp + i + afl->extras[j].len, out_buf + i, len - i);
4184 
4185 #ifdef INTROSPECTION
4186       snprintf(afl->mutation, sizeof(afl->mutation),
4187                "%s MOPT_EXTRAS_insert-%u-%u", afl->queue_cur->fname, i, j);
4188 #endif
4189 
4190       if (common_fuzz_stuff(afl, ex_tmp, len + afl->extras[j].len)) {
4191 
4192         goto abandon_entry;
4193 
4194       }
4195 
4196       ++afl->stage_cur;
4197 
4198     }
4199 
4200     /* Copy head */
4201     ex_tmp[i] = out_buf[i];
4202 
4203   }                                                  /* for i = 0; i <= len */
4204 
4205   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
4206 
4207   afl->stage_finds[STAGE_EXTRAS_UI] += new_hit_cnt - orig_hit_cnt;
4208   afl->stage_cycles[STAGE_EXTRAS_UI] += afl->stage_max;
4209 
4210 skip_user_extras:
4211 
4212   if (!afl->a_extras_cnt) { goto skip_extras; }
4213 
4214   afl->stage_name = "auto extras (over)";
4215   afl->stage_short = "ext_AO";
4216   afl->stage_cur = 0;
4217   afl->stage_max = MIN(afl->a_extras_cnt, (u32)USE_AUTO_EXTRAS) * len;
4218 
4219   afl->stage_val_type = STAGE_VAL_NONE;
4220 
4221   orig_hit_cnt = new_hit_cnt;
4222 
4223   for (i = 0; i < (u32)len; ++i) {
4224 
4225     u32 last_len = 0;
4226 
4227     afl->stage_cur_byte = i;
4228 
4229     u32 min_extra_len = MIN(afl->a_extras_cnt, (u32)USE_AUTO_EXTRAS);
4230     for (j = 0; j < min_extra_len; ++j) {
4231 
4232       /* See the comment in the earlier code; extras are sorted by size. */
4233 
4234       if ((afl->a_extras[j].len) > (len - i) ||
4235           !memcmp(afl->a_extras[j].data, out_buf + i, afl->a_extras[j].len) ||
4236           !memchr(eff_map + EFF_APOS(i), 1,
4237                   EFF_SPAN_ALEN(i, afl->a_extras[j].len))) {
4238 
4239         --afl->stage_max;
4240         continue;
4241 
4242       }
4243 
4244       last_len = afl->a_extras[j].len;
4245       memcpy(out_buf + i, afl->a_extras[j].data, last_len);
4246 
4247 #ifdef INTROSPECTION
4248       snprintf(afl->mutation, sizeof(afl->mutation),
4249                "%s MOPT_AUTO_EXTRAS_overwrite-%u-%u", afl->queue_cur->fname, i,
4250                j);
4251 #endif
4252 
4253       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4254 
4255       ++afl->stage_cur;
4256 
4257     }
4258 
4259     /* Restore all the clobbered memory. */
4260     memcpy(out_buf + i, in_buf + i, last_len);
4261 
4262   }                                                   /* for i = 0; i < len */
4263 
4264   new_hit_cnt = afl->queued_paths + afl->unique_crashes;
4265 
4266   afl->stage_finds[STAGE_EXTRAS_AO] += new_hit_cnt - orig_hit_cnt;
4267   afl->stage_cycles[STAGE_EXTRAS_AO] += afl->stage_max;
4268 
4269 skip_extras:
4270 
4271   /* If we made this to here without jumping to havoc_stage or abandon_entry,
4272      we're properly done with deterministic steps and can mark it as such
4273      in the .state/ directory. */
4274 
4275   if (!afl->queue_cur->passed_det) { mark_as_det_done(afl, afl->queue_cur); }
4276 
4277   /****************
4278    * RANDOM HAVOC *
4279    ****************/
4280 
4281 havoc_stage:
4282 pacemaker_fuzzing:
4283 
4284   afl->stage_cur_byte = -1;
4285 
4286   /* The havoc stage mutation code is also invoked when splicing files; if the
4287      splice_cycle variable is set, generate different descriptions and such. */
4288 
4289   if (!splice_cycle) {
4290 
4291     afl->stage_name = MOpt_globals.havoc_stagename;
4292     afl->stage_short = MOpt_globals.havoc_stagenameshort;
4293     afl->stage_max = (doing_det ? HAVOC_CYCLES_INIT : HAVOC_CYCLES) *
4294                      perf_score / afl->havoc_div / 100;
4295 
4296   } else {
4297 
4298     perf_score = orig_perf;
4299 
4300     snprintf(afl->stage_name_buf, STAGE_BUF_SIZE,
4301              MOpt_globals.splice_stageformat, splice_cycle);
4302     afl->stage_name = afl->stage_name_buf;
4303     afl->stage_short = MOpt_globals.splice_stagenameshort;
4304     afl->stage_max = SPLICE_HAVOC * perf_score / afl->havoc_div / 100;
4305 
4306   }
4307 
4308   s32 temp_len_puppet;
4309 
4310   // for (; afl->swarm_now < swarm_num; ++afl->swarm_now)
4311   {
4312 
4313     if (afl->key_puppet == 1) {
4314 
4315       if (unlikely(afl->orig_hit_cnt_puppet == 0)) {
4316 
4317         afl->orig_hit_cnt_puppet = afl->queued_paths + afl->unique_crashes;
4318         afl->last_limit_time_start = get_cur_time();
4319         afl->SPLICE_CYCLES_puppet =
4320             (rand_below(
4321                  afl, SPLICE_CYCLES_puppet_up - SPLICE_CYCLES_puppet_low + 1) +
4322              SPLICE_CYCLES_puppet_low);
4323 
4324       }
4325 
4326     }                                            /* if afl->key_puppet == 1 */
4327 
4328     {
4329 
4330 #ifndef IGNORE_FINDS
4331     havoc_stage_puppet:
4332 #endif
4333 
4334       afl->stage_cur_byte = -1;
4335 
4336       /* The havoc stage mutation code is also invoked when splicing files; if
4337          the splice_cycle variable is set, generate different descriptions and
4338          such. */
4339 
4340       if (!splice_cycle) {
4341 
4342         afl->stage_name = MOpt_globals.havoc_stagename;
4343         afl->stage_short = MOpt_globals.havoc_stagenameshort;
4344         afl->stage_max = (doing_det ? HAVOC_CYCLES_INIT : HAVOC_CYCLES) *
4345                          perf_score / afl->havoc_div / 100;
4346 
4347       } else {
4348 
4349         perf_score = orig_perf;
4350         snprintf(afl->stage_name_buf, STAGE_BUF_SIZE,
4351                  MOpt_globals.splice_stageformat, splice_cycle);
4352         afl->stage_name = afl->stage_name_buf;
4353         afl->stage_short = MOpt_globals.splice_stagenameshort;
4354         afl->stage_max = SPLICE_HAVOC * perf_score / afl->havoc_div / 100;
4355 
4356       }
4357 
4358       if (afl->stage_max < HAVOC_MIN) { afl->stage_max = HAVOC_MIN; }
4359 
4360       temp_len = len;
4361 
4362       orig_hit_cnt = afl->queued_paths + afl->unique_crashes;
4363 
4364       havoc_queued = afl->queued_paths;
4365 
4366       u32 r_max;
4367 
4368       r_max = 15 + ((afl->extras_cnt + afl->a_extras_cnt) ? 2 : 0);
4369 
4370       if (unlikely(afl->expand_havoc && afl->ready_for_splicing_count > 1)) {
4371 
4372         /* add expensive havoc cases here, they are activated after a full
4373            cycle without finds happened */
4374 
4375         ++r_max;
4376 
4377       }
4378 
4379       for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max;
4380            ++afl->stage_cur) {
4381 
4382         u32 use_stacking = 1 << (1 + rand_below(afl, afl->havoc_stack_pow2));
4383 
4384         afl->stage_cur_val = use_stacking;
4385 
4386         for (i = 0; i < operator_num; ++i) {
4387 
4388           MOpt_globals.cycles_v3[i] = MOpt_globals.cycles_v2[i];
4389 
4390         }
4391 
4392 #ifdef INTROSPECTION
4393         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_HAVOC-%u",
4394                  afl->queue_cur->fname, use_stacking);
4395 #endif
4396 
4397         for (i = 0; i < use_stacking; ++i) {
4398 
4399           switch (select_algorithm(afl, r_max)) {
4400 
4401             case 0:
4402               /* Flip a single bit somewhere. Spooky! */
4403               FLIP_BIT(out_buf, rand_below(afl, temp_len << 3));
4404               MOpt_globals.cycles_v2[STAGE_FLIP1]++;
4405 #ifdef INTROSPECTION
4406               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT1");
4407               strcat(afl->mutation, afl->m_tmp);
4408 #endif
4409               break;
4410 
4411             case 1:
4412               if (temp_len < 2) { break; }
4413               temp_len_puppet = rand_below(afl, (temp_len << 3) - 1);
4414               FLIP_BIT(out_buf, temp_len_puppet);
4415               FLIP_BIT(out_buf, temp_len_puppet + 1);
4416               MOpt_globals.cycles_v2[STAGE_FLIP2]++;
4417 #ifdef INTROSPECTION
4418               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT2");
4419               strcat(afl->mutation, afl->m_tmp);
4420 #endif
4421               break;
4422 
4423             case 2:
4424               if (temp_len < 2) { break; }
4425               temp_len_puppet = rand_below(afl, (temp_len << 3) - 3);
4426               FLIP_BIT(out_buf, temp_len_puppet);
4427               FLIP_BIT(out_buf, temp_len_puppet + 1);
4428               FLIP_BIT(out_buf, temp_len_puppet + 2);
4429               FLIP_BIT(out_buf, temp_len_puppet + 3);
4430               MOpt_globals.cycles_v2[STAGE_FLIP4]++;
4431 #ifdef INTROSPECTION
4432               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT4");
4433               strcat(afl->mutation, afl->m_tmp);
4434 #endif
4435               break;
4436 
4437             case 3:
4438               if (temp_len < 4) { break; }
4439               out_buf[rand_below(afl, temp_len)] ^= 0xFF;
4440               MOpt_globals.cycles_v2[STAGE_FLIP8]++;
4441 #ifdef INTROSPECTION
4442               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT8");
4443               strcat(afl->mutation, afl->m_tmp);
4444 #endif
4445               break;
4446 
4447             case 4:
4448               if (temp_len < 8) { break; }
4449               *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) ^= 0xFFFF;
4450               MOpt_globals.cycles_v2[STAGE_FLIP16]++;
4451 #ifdef INTROSPECTION
4452               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT16");
4453               strcat(afl->mutation, afl->m_tmp);
4454 #endif
4455               break;
4456 
4457             case 5:
4458               if (temp_len < 8) { break; }
4459               *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) ^= 0xFFFFFFFF;
4460               MOpt_globals.cycles_v2[STAGE_FLIP32]++;
4461 #ifdef INTROSPECTION
4462               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT32");
4463               strcat(afl->mutation, afl->m_tmp);
4464 #endif
4465               break;
4466 
4467             case 6:
4468               out_buf[rand_below(afl, temp_len)] -=
4469                   1 + rand_below(afl, ARITH_MAX);
4470               out_buf[rand_below(afl, temp_len)] +=
4471                   1 + rand_below(afl, ARITH_MAX);
4472               MOpt_globals.cycles_v2[STAGE_ARITH8]++;
4473 #ifdef INTROSPECTION
4474               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH8");
4475               strcat(afl->mutation, afl->m_tmp);
4476 #endif
4477               break;
4478 
4479             case 7:
4480               /* Randomly subtract from word, random endian. */
4481               if (temp_len < 8) { break; }
4482               if (rand_below(afl, 2)) {
4483 
4484                 u32 pos = rand_below(afl, temp_len - 1);
4485                 *(u16 *)(out_buf + pos) -= 1 + rand_below(afl, ARITH_MAX);
4486 #ifdef INTROSPECTION
4487                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16-%u", pos);
4488                 strcat(afl->mutation, afl->m_tmp);
4489 #endif
4490 
4491               } else {
4492 
4493                 u32 pos = rand_below(afl, temp_len - 1);
4494                 u16 num = 1 + rand_below(afl, ARITH_MAX);
4495 #ifdef INTROSPECTION
4496                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16BE-%u-%u",
4497                          pos, num);
4498                 strcat(afl->mutation, afl->m_tmp);
4499 #endif
4500                 *(u16 *)(out_buf + pos) =
4501                     SWAP16(SWAP16(*(u16 *)(out_buf + pos)) - num);
4502 
4503               }
4504 
4505               /* Randomly add to word, random endian. */
4506               if (rand_below(afl, 2)) {
4507 
4508                 u32 pos = rand_below(afl, temp_len - 1);
4509 #ifdef INTROSPECTION
4510                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16+-%u", pos);
4511                 strcat(afl->mutation, afl->m_tmp);
4512 #endif
4513                 *(u16 *)(out_buf + pos) += 1 + rand_below(afl, ARITH_MAX);
4514 
4515               } else {
4516 
4517                 u32 pos = rand_below(afl, temp_len - 1);
4518                 u16 num = 1 + rand_below(afl, ARITH_MAX);
4519 #ifdef INTROSPECTION
4520                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16BE+-%u-%u",
4521                          pos, num);
4522                 strcat(afl->mutation, afl->m_tmp);
4523 #endif
4524                 *(u16 *)(out_buf + pos) =
4525                     SWAP16(SWAP16(*(u16 *)(out_buf + pos)) + num);
4526 
4527               }
4528 
4529               MOpt_globals.cycles_v2[STAGE_ARITH16]++;
4530               break;
4531 
4532             case 8:
4533               /* Randomly subtract from dword, random endian. */
4534               if (temp_len < 8) { break; }
4535               if (rand_below(afl, 2)) {
4536 
4537                 u32 pos = rand_below(afl, temp_len - 3);
4538 #ifdef INTROSPECTION
4539                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32_-%u", pos);
4540                 strcat(afl->mutation, afl->m_tmp);
4541 #endif
4542                 *(u32 *)(out_buf + pos) -= 1 + rand_below(afl, ARITH_MAX);
4543 
4544               } else {
4545 
4546                 u32 pos = rand_below(afl, temp_len - 3);
4547                 u32 num = 1 + rand_below(afl, ARITH_MAX);
4548 #ifdef INTROSPECTION
4549                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32BE_-%u-%u",
4550                          pos, num);
4551                 strcat(afl->mutation, afl->m_tmp);
4552 #endif
4553                 *(u32 *)(out_buf + pos) =
4554                     SWAP32(SWAP32(*(u32 *)(out_buf + pos)) - num);
4555 
4556               }
4557 
4558               /* Randomly add to dword, random endian. */
4559               // if (temp_len < 4) break;
4560               if (rand_below(afl, 2)) {
4561 
4562                 u32 pos = rand_below(afl, temp_len - 3);
4563 #ifdef INTROSPECTION
4564                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32+-%u", pos);
4565                 strcat(afl->mutation, afl->m_tmp);
4566 #endif
4567                 *(u32 *)(out_buf + pos) += 1 + rand_below(afl, ARITH_MAX);
4568 
4569               } else {
4570 
4571                 u32 pos = rand_below(afl, temp_len - 3);
4572                 u32 num = 1 + rand_below(afl, ARITH_MAX);
4573 #ifdef INTROSPECTION
4574                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32BE+-%u-%u",
4575                          pos, num);
4576                 strcat(afl->mutation, afl->m_tmp);
4577 #endif
4578                 *(u32 *)(out_buf + pos) =
4579                     SWAP32(SWAP32(*(u32 *)(out_buf + pos)) + num);
4580 
4581               }
4582 
4583               MOpt_globals.cycles_v2[STAGE_ARITH32]++;
4584               break;
4585 
4586             case 9:
4587               /* Set byte to interesting value. */
4588               if (temp_len < 4) { break; }
4589               out_buf[rand_below(afl, temp_len)] =
4590                   interesting_8[rand_below(afl, sizeof(interesting_8))];
4591               MOpt_globals.cycles_v2[STAGE_INTEREST8]++;
4592 #ifdef INTROSPECTION
4593               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING8");
4594               strcat(afl->mutation, afl->m_tmp);
4595 #endif
4596               break;
4597 
4598             case 10:
4599               /* Set word to interesting value, randomly choosing endian. */
4600               if (temp_len < 8) { break; }
4601               if (rand_below(afl, 2)) {
4602 
4603 #ifdef INTROSPECTION
4604                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING16");
4605                 strcat(afl->mutation, afl->m_tmp);
4606 #endif
4607                 *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) =
4608                     interesting_16[rand_below(afl,
4609                                               sizeof(interesting_16) >> 1)];
4610 
4611               } else {
4612 
4613 #ifdef INTROSPECTION
4614                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING16BE");
4615                 strcat(afl->mutation, afl->m_tmp);
4616 #endif
4617                 *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) =
4618                     SWAP16(interesting_16[rand_below(
4619                         afl, sizeof(interesting_16) >> 1)]);
4620 
4621               }
4622 
4623               MOpt_globals.cycles_v2[STAGE_INTEREST16]++;
4624               break;
4625 
4626             case 11:
4627               /* Set dword to interesting value, randomly choosing endian. */
4628 
4629               if (temp_len < 8) { break; }
4630 
4631               if (rand_below(afl, 2)) {
4632 
4633 #ifdef INTROSPECTION
4634                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING32");
4635                 strcat(afl->mutation, afl->m_tmp);
4636 #endif
4637                 *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) =
4638                     interesting_32[rand_below(afl,
4639                                               sizeof(interesting_32) >> 2)];
4640 
4641               } else {
4642 
4643 #ifdef INTROSPECTION
4644                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING32BE");
4645                 strcat(afl->mutation, afl->m_tmp);
4646 #endif
4647                 *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) =
4648                     SWAP32(interesting_32[rand_below(
4649                         afl, sizeof(interesting_32) >> 2)]);
4650 
4651               }
4652 
4653               MOpt_globals.cycles_v2[STAGE_INTEREST32]++;
4654               break;
4655 
4656             case 12:
4657 
4658               /* Just set a random byte to a random value. Because,
4659                  why not. We use XOR with 1-255 to eliminate the
4660                  possibility of a no-op. */
4661 
4662               out_buf[rand_below(afl, temp_len)] ^= 1 + rand_below(afl, 255);
4663               MOpt_globals.cycles_v2[STAGE_RANDOMBYTE]++;
4664 #ifdef INTROSPECTION
4665               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " RAND8");
4666               strcat(afl->mutation, afl->m_tmp);
4667 #endif
4668               break;
4669 
4670             case 13: {
4671 
4672               /* Delete bytes. We're making this a bit more likely
4673                  than insertion (the next option) in hopes of keeping
4674                  files reasonably small. */
4675 
4676               u32 del_from, del_len;
4677 
4678               if (temp_len < 2) { break; }
4679 
4680               /* Don't delete too much. */
4681 
4682               del_len = choose_block_len(afl, temp_len - 1);
4683 
4684               del_from = rand_below(afl, temp_len - del_len + 1);
4685 
4686 #ifdef INTROSPECTION
4687               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " DEL-%u%u", del_from,
4688                        del_len);
4689               strcat(afl->mutation, afl->m_tmp);
4690 #endif
4691               memmove(out_buf + del_from, out_buf + del_from + del_len,
4692                       temp_len - del_from - del_len);
4693 
4694               temp_len -= del_len;
4695               MOpt_globals.cycles_v2[STAGE_DELETEBYTE]++;
4696               break;
4697 
4698             }
4699 
4700             case 14:
4701 
4702               if (temp_len + HAVOC_BLK_XL < MAX_FILE) {
4703 
4704                 /* Clone bytes (75%) or insert a block of constant bytes (25%).
4705                  */
4706 
4707                 u8  actually_clone = rand_below(afl, 4);
4708                 u32 clone_from, clone_to, clone_len;
4709                 u8 *new_buf;
4710 
4711                 if (likely(actually_clone)) {
4712 
4713                   clone_len = choose_block_len(afl, temp_len);
4714                   clone_from = rand_below(afl, temp_len - clone_len + 1);
4715 
4716                 } else {
4717 
4718                   clone_len = choose_block_len(afl, HAVOC_BLK_XL);
4719                   clone_from = 0;
4720 
4721                 }
4722 
4723                 clone_to = rand_below(afl, temp_len);
4724 
4725 #ifdef INTROSPECTION
4726                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " CLONE_%s-%u-%u-%u",
4727                          actually_clone ? "clone" : "insert", clone_from,
4728                          clone_to, clone_len);
4729                 strcat(afl->mutation, afl->m_tmp);
4730 #endif
4731                 new_buf = afl_realloc(AFL_BUF_PARAM(out_scratch),
4732                                       temp_len + clone_len);
4733                 if (unlikely(!new_buf)) { PFATAL("alloc"); }
4734 
4735                 /* Head */
4736 
4737                 memcpy(new_buf, out_buf, clone_to);
4738 
4739                 /* Inserted part */
4740 
4741                 if (actually_clone) {
4742 
4743                   memcpy(new_buf + clone_to, out_buf + clone_from, clone_len);
4744 
4745                 } else {
4746 
4747                   memset(new_buf + clone_to,
4748                          rand_below(afl, 2)
4749                              ? rand_below(afl, 256)
4750                              : out_buf[rand_below(afl, temp_len)],
4751                          clone_len);
4752 
4753                 }
4754 
4755                 /* Tail */
4756                 memcpy(new_buf + clone_to + clone_len, out_buf + clone_to,
4757                        temp_len - clone_to);
4758 
4759                 out_buf = new_buf;
4760                 afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
4761                 temp_len += clone_len;
4762                 MOpt_globals.cycles_v2[STAGE_Clone75]++;
4763 
4764               }
4765 
4766               break;
4767 
4768             case 15: {
4769 
4770               /* Overwrite bytes with a randomly selected chunk (75%) or fixed
4771                  bytes (25%). */
4772 
4773               u32 copy_from, copy_to, copy_len;
4774 
4775               if (temp_len < 2) { break; }
4776 
4777               copy_len = choose_block_len(afl, temp_len - 1);
4778 
4779               copy_from = rand_below(afl, temp_len - copy_len + 1);
4780               copy_to = rand_below(afl, temp_len - copy_len + 1);
4781 
4782               if (likely(rand_below(afl, 4))) {
4783 
4784                 if (likely(copy_from != copy_to)) {
4785 
4786 #ifdef INTROSPECTION
4787                   snprintf(afl->m_tmp, sizeof(afl->m_tmp),
4788                            " OVERWRITE_COPY-%u-%u-%u", copy_from, copy_to,
4789                            copy_len);
4790                   strcat(afl->mutation, afl->m_tmp);
4791 #endif
4792                   memmove(out_buf + copy_to, out_buf + copy_from, copy_len);
4793 
4794                 }
4795 
4796               } else {
4797 
4798 #ifdef INTROSPECTION
4799                 snprintf(afl->m_tmp, sizeof(afl->m_tmp),
4800                          " OVERWRITE_FIXED-%u-%u-%u", copy_from, copy_to,
4801                          copy_len);
4802                 strcat(afl->mutation, afl->m_tmp);
4803 #endif
4804                 memset(out_buf + copy_to,
4805                        rand_below(afl, 2) ? rand_below(afl, 256)
4806                                           : out_buf[rand_below(afl, temp_len)],
4807                        copy_len);
4808 
4809               }
4810 
4811               MOpt_globals.cycles_v2[STAGE_OverWrite75]++;
4812               break;
4813 
4814             }                                                    /* case 15 */
4815 
4816               /* Values 16 and 17 can be selected only if there are any extras
4817                  present in the dictionaries. */
4818 
4819             case 16: {
4820 
4821               /* Overwrite bytes with an extra. */
4822 
4823               if (!afl->extras_cnt ||
4824                   (afl->a_extras_cnt && rand_below(afl, 2))) {
4825 
4826                 /* No user-specified extras or odds in our favor. Let's use an
4827                   auto-detected one. */
4828 
4829                 u32 use_extra = rand_below(afl, afl->a_extras_cnt);
4830                 u32 extra_len = afl->a_extras[use_extra].len;
4831 
4832                 if (extra_len > (u32)temp_len) break;
4833 
4834                 u32 insert_at = rand_below(afl, temp_len - extra_len + 1);
4835 #ifdef INTROSPECTION
4836                 snprintf(afl->m_tmp, sizeof(afl->m_tmp),
4837                          " AUTO_EXTRA_OVERWRITE-%u-%u", insert_at, extra_len);
4838                 strcat(afl->mutation, afl->m_tmp);
4839 #endif
4840                 memcpy(out_buf + insert_at, afl->a_extras[use_extra].data,
4841                        extra_len);
4842 
4843               } else {
4844 
4845                 /* No auto extras or odds in our favor. Use the dictionary. */
4846 
4847                 u32 use_extra = rand_below(afl, afl->extras_cnt);
4848                 u32 extra_len = afl->extras[use_extra].len;
4849 
4850                 if (extra_len > (u32)temp_len) break;
4851 
4852                 u32 insert_at = rand_below(afl, temp_len - extra_len + 1);
4853 #ifdef INTROSPECTION
4854                 snprintf(afl->m_tmp, sizeof(afl->m_tmp),
4855                          " EXTRA_OVERWRITE-%u-%u", insert_at, extra_len);
4856                 strcat(afl->mutation, afl->m_tmp);
4857 #endif
4858                 memcpy(out_buf + insert_at, afl->extras[use_extra].data,
4859                        extra_len);
4860 
4861               }
4862 
4863               MOpt_globals.cycles_v2[STAGE_OverWriteExtra]++;
4864 
4865               break;
4866 
4867             }
4868 
4869               /* Insert an extra. */
4870 
4871             case 17: {
4872 
4873               u32 use_extra, extra_len,
4874                   insert_at = rand_below(afl, temp_len + 1);
4875               u8 *ptr;
4876 
4877               /* Insert an extra. Do the same dice-rolling stuff as for the
4878                 previous case. */
4879 
4880               if (!afl->extras_cnt ||
4881                   (afl->a_extras_cnt && rand_below(afl, 2))) {
4882 
4883                 use_extra = rand_below(afl, afl->a_extras_cnt);
4884                 extra_len = afl->a_extras[use_extra].len;
4885                 ptr = afl->a_extras[use_extra].data;
4886 #ifdef INTROSPECTION
4887                 snprintf(afl->m_tmp, sizeof(afl->m_tmp),
4888                          " AUTO_EXTRA_INSERT-%u-%u", insert_at, extra_len);
4889                 strcat(afl->mutation, afl->m_tmp);
4890 #endif
4891 
4892               } else {
4893 
4894                 use_extra = rand_below(afl, afl->extras_cnt);
4895                 extra_len = afl->extras[use_extra].len;
4896                 ptr = afl->extras[use_extra].data;
4897 #ifdef INTROSPECTION
4898                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " EXTRA_INSERT-%u-%u",
4899                          insert_at, extra_len);
4900                 strcat(afl->mutation, afl->m_tmp);
4901 #endif
4902 
4903               }
4904 
4905               if (temp_len + extra_len >= MAX_FILE) break;
4906 
4907               out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len + extra_len);
4908               if (unlikely(!out_buf)) { PFATAL("alloc"); }
4909 
4910               /* Tail */
4911               memmove(out_buf + insert_at + extra_len, out_buf + insert_at,
4912                       temp_len - insert_at);
4913 
4914               /* Inserted part */
4915               memcpy(out_buf + insert_at, ptr, extra_len);
4916 
4917               temp_len += extra_len;
4918               MOpt_globals.cycles_v2[STAGE_InsertExtra]++;
4919               break;
4920 
4921             }
4922 
4923             default: {
4924 
4925               if (unlikely(afl->ready_for_splicing_count < 2)) break;
4926 
4927               u32 tid;
4928               do {
4929 
4930                 tid = rand_below(afl, afl->queued_paths);
4931 
4932               } while (tid == afl->current_entry ||
4933 
4934                        afl->queue_buf[tid]->len < 4);
4935 
4936               /* Get the testcase for splicing. */
4937               struct queue_entry *target = afl->queue_buf[tid];
4938               u32                 new_len = target->len;
4939               u8 *                new_buf = queue_testcase_get(afl, target);
4940 
4941               if ((temp_len >= 2 && rand_below(afl, 2)) ||
4942                   temp_len + HAVOC_BLK_XL >= MAX_FILE) {
4943 
4944                 /* overwrite mode */
4945 
4946                 u32 copy_from, copy_to, copy_len;
4947 
4948                 copy_len = choose_block_len(afl, new_len - 1);
4949                 if (copy_len > temp_len) copy_len = temp_len;
4950 
4951                 copy_from = rand_below(afl, new_len - copy_len + 1);
4952                 copy_to = rand_below(afl, temp_len - copy_len + 1);
4953 
4954 #ifdef INTROSPECTION
4955                 snprintf(afl->m_tmp, sizeof(afl->m_tmp),
4956                          " SPLICE_OVERWRITE-%u-%u-%u-%s", copy_from, copy_to,
4957                          copy_len, target->fname);
4958                 strcat(afl->mutation, afl->m_tmp);
4959 #endif
4960                 memmove(out_buf + copy_to, new_buf + copy_from, copy_len);
4961 
4962               } else {
4963 
4964                 /* insert mode */
4965 
4966                 u32 clone_from, clone_to, clone_len;
4967 
4968                 clone_len = choose_block_len(afl, new_len);
4969                 clone_from = rand_below(afl, new_len - clone_len + 1);
4970                 clone_to = rand_below(afl, temp_len + 1);
4971 
4972                 u8 *temp_buf = afl_realloc(AFL_BUF_PARAM(out_scratch),
4973                                            temp_len + clone_len + 1);
4974                 if (unlikely(!temp_buf)) { PFATAL("alloc"); }
4975 
4976 #ifdef INTROSPECTION
4977                 snprintf(afl->m_tmp, sizeof(afl->m_tmp),
4978                          " SPLICE_INSERT-%u-%u-%u-%s", clone_from, clone_to,
4979                          clone_len, target->fname);
4980                 strcat(afl->mutation, afl->m_tmp);
4981 #endif
4982                 /* Head */
4983 
4984                 memcpy(temp_buf, out_buf, clone_to);
4985 
4986                 /* Inserted part */
4987 
4988                 memcpy(temp_buf + clone_to, new_buf + clone_from, clone_len);
4989 
4990                 /* Tail */
4991                 memcpy(temp_buf + clone_to + clone_len, out_buf + clone_to,
4992                        temp_len - clone_to);
4993 
4994                 out_buf = temp_buf;
4995                 afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
4996                 temp_len += clone_len;
4997 
4998               }
4999 
5000               MOpt_globals.cycles_v2[STAGE_Splice]++;
5001               break;
5002 
5003             }  // end of default:
5004 
5005           }                                    /* switch select_algorithm() */
5006 
5007         }                                      /* for i=0; i < use_stacking */
5008 
5009         ++*MOpt_globals.pTime;
5010 
5011         u64 temp_total_found = afl->queued_paths + afl->unique_crashes;
5012 
5013         if (common_fuzz_stuff(afl, out_buf, temp_len)) {
5014 
5015           goto abandon_entry_puppet;
5016 
5017         }
5018 
5019         /* out_buf might have been mangled a bit, so let's restore it to its
5020            original size and shape. */
5021 
5022         out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
5023         if (unlikely(!out_buf)) { PFATAL("alloc"); }
5024         temp_len = len;
5025         memcpy(out_buf, in_buf, len);
5026 
5027         /* If we're finding new stuff, let's run for a bit longer, limits
5028            permitting. */
5029 
5030         if (afl->queued_paths != havoc_queued) {
5031 
5032           if (perf_score <= afl->havoc_max_mult * 100) {
5033 
5034             afl->stage_max *= 2;
5035             perf_score *= 2;
5036 
5037           }
5038 
5039           havoc_queued = afl->queued_paths;
5040 
5041         }
5042 
5043         if (unlikely(afl->queued_paths + afl->unique_crashes >
5044                      temp_total_found)) {
5045 
5046           u64 temp_temp_puppet =
5047               afl->queued_paths + afl->unique_crashes - temp_total_found;
5048           afl->total_puppet_find = afl->total_puppet_find + temp_temp_puppet;
5049 
5050           if (MOpt_globals.is_pilot_mode) {
5051 
5052             for (i = 0; i < operator_num; ++i) {
5053 
5054               if (MOpt_globals.cycles_v2[i] > MOpt_globals.cycles_v3[i]) {
5055 
5056                 MOpt_globals.finds_v2[i] += temp_temp_puppet;
5057 
5058               }
5059 
5060             }
5061 
5062           } else {
5063 
5064             for (i = 0; i < operator_num; i++) {
5065 
5066               if (afl->core_operator_cycles_puppet_v2[i] >
5067                   afl->core_operator_cycles_puppet_v3[i])
5068 
5069                 afl->core_operator_finds_puppet_v2[i] += temp_temp_puppet;
5070 
5071             }
5072 
5073           }
5074 
5075         }                                                             /* if */
5076 
5077       } /* for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max;
5078 
5079            ++afl->stage_cur) { */
5080 
5081       new_hit_cnt = afl->queued_paths + afl->unique_crashes;
5082 
5083       if (MOpt_globals.is_pilot_mode) {
5084 
5085         if (!splice_cycle) {
5086 
5087           afl->stage_finds[STAGE_HAVOC] += new_hit_cnt - orig_hit_cnt;
5088           afl->stage_cycles[STAGE_HAVOC] += afl->stage_max;
5089 
5090         } else {
5091 
5092           afl->stage_finds[STAGE_SPLICE] += new_hit_cnt - orig_hit_cnt;
5093           afl->stage_cycles[STAGE_SPLICE] += afl->stage_max;
5094 
5095         }
5096 
5097       }
5098 
5099 #ifndef IGNORE_FINDS
5100 
5101       /************
5102        * SPLICING *
5103        ************/
5104 
5105     retry_splicing_puppet:
5106 
5107       if (afl->use_splicing &&
5108           splice_cycle++ < (u32)afl->SPLICE_CYCLES_puppet &&
5109           afl->ready_for_splicing_count > 1 && afl->queue_cur->len >= 4) {
5110 
5111         struct queue_entry *target;
5112         u32                 tid, split_at;
5113         u8 *                new_buf;
5114         s32                 f_diff, l_diff;
5115 
5116         /* First of all, if we've modified in_buf for havoc, let's clean that
5117            up... */
5118 
5119         if (in_buf != orig_in) {
5120 
5121           in_buf = orig_in;
5122           len = afl->queue_cur->len;
5123 
5124         }
5125 
5126         /* Pick a random queue entry and seek to it. Don't splice with yourself.
5127          */
5128 
5129         do {
5130 
5131           tid = rand_below(afl, afl->queued_paths);
5132 
5133         } while (tid == afl->current_entry || afl->queue_buf[tid]->len < 4);
5134 
5135         afl->splicing_with = tid;
5136         target = afl->queue_buf[tid];
5137 
5138         /* Read the testcase into a new buffer. */
5139         new_buf = queue_testcase_get(afl, target);
5140 
5141         /* Find a suitable splicin g location, somewhere between the first and
5142            the last differing byte. Bail out if the difference is just a single
5143            byte or so. */
5144 
5145         locate_diffs(in_buf, new_buf, MIN(len, target->len), &f_diff, &l_diff);
5146 
5147         if (f_diff < 0 || l_diff < 2 || f_diff == l_diff) {
5148 
5149           goto retry_splicing_puppet;
5150 
5151         }
5152 
5153         /* Split somewhere between the first and last differing byte. */
5154 
5155         split_at = f_diff + rand_below(afl, l_diff - f_diff);
5156 
5157         /* Do the thing. */
5158 
5159         len = target->len;
5160         afl->in_scratch_buf = afl_realloc(AFL_BUF_PARAM(in_scratch), len);
5161         memcpy(afl->in_scratch_buf, in_buf, split_at);
5162         memcpy(afl->in_scratch_buf + split_at, new_buf, len - split_at);
5163         in_buf = afl->in_scratch_buf;
5164         afl_swap_bufs(AFL_BUF_PARAM(in), AFL_BUF_PARAM(in_scratch));
5165 
5166         out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
5167         if (unlikely(!out_buf)) { PFATAL("alloc"); }
5168         memcpy(out_buf, in_buf, len);
5169 
5170         goto havoc_stage_puppet;
5171 
5172       }                                                  /* if splice_cycle */
5173 
5174 #endif                                                     /* !IGNORE_FINDS */
5175 
5176       ret_val = 0;
5177 
5178     abandon_entry:
5179     abandon_entry_puppet:
5180 
5181       if ((s64)splice_cycle >= afl->SPLICE_CYCLES_puppet) {
5182 
5183         afl->SPLICE_CYCLES_puppet =
5184             (rand_below(
5185                  afl, SPLICE_CYCLES_puppet_up - SPLICE_CYCLES_puppet_low + 1) +
5186              SPLICE_CYCLES_puppet_low);
5187 
5188       }
5189 
5190       afl->splicing_with = -1;
5191 
5192       /* Update afl->pending_not_fuzzed count if we made it through the
5193          calibration cycle and have not seen this entry before. */
5194       /*
5195         // TODO FIXME: I think we need this plus need an -L -1 check
5196         if (!afl->stop_soon && !afl->queue_cur->cal_failed &&
5197             (afl->queue_cur->was_fuzzed == 0 || afl->queue_cur->fuzz_level == 0)
5198         && !afl->queue_cur->disabled) {
5199 
5200           if (!afl->queue_cur->was_fuzzed) {
5201 
5202             --afl->pending_not_fuzzed;
5203             afl->queue_cur->was_fuzzed = 1;
5204             if (afl->queue_cur->favored) { --afl->pending_favored; }
5205 
5206           }
5207 
5208         }
5209 
5210       */
5211 
5212       orig_in = NULL;
5213 
5214       if (afl->key_puppet == 1) {
5215 
5216         if (unlikely(
5217                 afl->queued_paths + afl->unique_crashes >
5218                 ((afl->queued_paths + afl->unique_crashes) * limit_time_bound +
5219                  afl->orig_hit_cnt_puppet))) {
5220 
5221           afl->key_puppet = 0;
5222           afl->orig_hit_cnt_puppet = 0;
5223           afl->last_limit_time_start = 0;
5224 
5225         }
5226 
5227       }
5228 
5229       if (unlikely(*MOpt_globals.pTime > MOpt_globals.period)) {
5230 
5231         afl->total_pacemaker_time += *MOpt_globals.pTime;
5232         *MOpt_globals.pTime = 0;
5233         new_hit_cnt = afl->queued_paths + afl->unique_crashes;
5234 
5235         if (MOpt_globals.is_pilot_mode) {
5236 
5237           afl->swarm_fitness[afl->swarm_now] =
5238               (double)(afl->total_puppet_find - afl->temp_puppet_find) /
5239               ((double)(afl->tmp_pilot_time) / afl->period_pilot_tmp);
5240 
5241         }
5242 
5243         afl->temp_puppet_find = afl->total_puppet_find;
5244         u64 temp_stage_finds_puppet = 0;
5245         for (i = 0; i < operator_num; ++i) {
5246 
5247           if (MOpt_globals.is_pilot_mode) {
5248 
5249             double temp_eff = 0.0;
5250 
5251             if (MOpt_globals.cycles_v2[i] > MOpt_globals.cycles[i]) {
5252 
5253               temp_eff =
5254                   (double)(MOpt_globals.finds_v2[i] - MOpt_globals.finds[i]) /
5255                   (double)(MOpt_globals.cycles_v2[i] - MOpt_globals.cycles[i]);
5256 
5257             }
5258 
5259             if (afl->eff_best[afl->swarm_now][i] < temp_eff) {
5260 
5261               afl->eff_best[afl->swarm_now][i] = temp_eff;
5262               afl->L_best[afl->swarm_now][i] = afl->x_now[afl->swarm_now][i];
5263 
5264             }
5265 
5266           }
5267 
5268           MOpt_globals.finds[i] = MOpt_globals.finds_v2[i];
5269           MOpt_globals.cycles[i] = MOpt_globals.cycles_v2[i];
5270           temp_stage_finds_puppet += MOpt_globals.finds[i];
5271 
5272         }                                    /* for i = 0; i < operator_num */
5273 
5274         if (MOpt_globals.is_pilot_mode) {
5275 
5276           afl->swarm_now = afl->swarm_now + 1;
5277           if (afl->swarm_now == swarm_num) {
5278 
5279             afl->key_module = 1;
5280             for (i = 0; i < operator_num; ++i) {
5281 
5282               afl->core_operator_cycles_puppet_v2[i] =
5283                   afl->core_operator_cycles_puppet[i];
5284               afl->core_operator_cycles_puppet_v3[i] =
5285                   afl->core_operator_cycles_puppet[i];
5286               afl->core_operator_finds_puppet_v2[i] =
5287                   afl->core_operator_finds_puppet[i];
5288 
5289             }
5290 
5291             double swarm_eff = 0.0;
5292             afl->swarm_now = 0;
5293             for (i = 0; i < swarm_num; ++i) {
5294 
5295               if (afl->swarm_fitness[i] > swarm_eff) {
5296 
5297                 swarm_eff = afl->swarm_fitness[i];
5298                 afl->swarm_now = i;
5299 
5300               }
5301 
5302             }
5303 
5304             if (afl->swarm_now < 0 || afl->swarm_now > swarm_num - 1) {
5305 
5306               PFATAL("swarm_now error number  %d", afl->swarm_now);
5307 
5308             }
5309 
5310           }                               /* if afl->swarm_now == swarm_num */
5311 
5312           /* adjust pointers dependent on 'afl->swarm_now' */
5313           afl->mopt_globals_pilot.finds =
5314               afl->stage_finds_puppet[afl->swarm_now];
5315           afl->mopt_globals_pilot.finds_v2 =
5316               afl->stage_finds_puppet_v2[afl->swarm_now];
5317           afl->mopt_globals_pilot.cycles =
5318               afl->stage_cycles_puppet[afl->swarm_now];
5319           afl->mopt_globals_pilot.cycles_v2 =
5320               afl->stage_cycles_puppet_v2[afl->swarm_now];
5321           afl->mopt_globals_pilot.cycles_v3 =
5322               afl->stage_cycles_puppet_v3[afl->swarm_now];
5323 
5324         } else {
5325 
5326           for (i = 0; i < operator_num; i++) {
5327 
5328             afl->core_operator_finds_puppet[i] =
5329                 afl->core_operator_finds_puppet_v2[i];
5330             afl->core_operator_cycles_puppet[i] =
5331                 afl->core_operator_cycles_puppet_v2[i];
5332             temp_stage_finds_puppet += afl->core_operator_finds_puppet[i];
5333 
5334           }
5335 
5336           afl->key_module = 2;
5337 
5338           afl->old_hit_count = new_hit_cnt;
5339 
5340         }                                                  /* if pilot_mode */
5341 
5342       }         /* if (unlikely(*MOpt_globals.pTime > MOpt_globals.period)) */
5343 
5344     }                                                              /* block */
5345 
5346   }                                                                /* block */
5347 
5348   return ret_val;
5349 
5350 }
5351 
5352 #undef FLIP_BIT
5353 
core_fuzzing(afl_state_t * afl)5354 u8 core_fuzzing(afl_state_t *afl) {
5355 
5356   return mopt_common_fuzzing(afl, afl->mopt_globals_core);
5357 
5358 }
5359 
pilot_fuzzing(afl_state_t * afl)5360 u8 pilot_fuzzing(afl_state_t *afl) {
5361 
5362   return mopt_common_fuzzing(afl, afl->mopt_globals_pilot);
5363 
5364 }
5365 
pso_updating(afl_state_t * afl)5366 void pso_updating(afl_state_t *afl) {
5367 
5368   afl->g_now++;
5369   if (afl->g_now > afl->g_max) { afl->g_now = 0; }
5370   afl->w_now =
5371       (afl->w_init - afl->w_end) * (afl->g_max - afl->g_now) / (afl->g_max) +
5372       afl->w_end;
5373   int tmp_swarm, i, j;
5374   u64 temp_operator_finds_puppet = 0;
5375   for (i = 0; i < operator_num; ++i) {
5376 
5377     afl->operator_finds_puppet[i] = afl->core_operator_finds_puppet[i];
5378 
5379     for (j = 0; j < swarm_num; ++j) {
5380 
5381       afl->operator_finds_puppet[i] =
5382           afl->operator_finds_puppet[i] + afl->stage_finds_puppet[j][i];
5383 
5384     }
5385 
5386     temp_operator_finds_puppet =
5387         temp_operator_finds_puppet + afl->operator_finds_puppet[i];
5388 
5389   }
5390 
5391   for (i = 0; i < operator_num; ++i) {
5392 
5393     if (afl->operator_finds_puppet[i]) {
5394 
5395       afl->G_best[i] = (double)((double)(afl->operator_finds_puppet[i]) /
5396                                 (double)(temp_operator_finds_puppet));
5397 
5398     }
5399 
5400   }
5401 
5402   for (tmp_swarm = 0; tmp_swarm < swarm_num; ++tmp_swarm) {
5403 
5404     double x_temp = 0.0;
5405     for (i = 0; i < operator_num; ++i) {
5406 
5407       afl->probability_now[tmp_swarm][i] = 0.0;
5408       afl->v_now[tmp_swarm][i] =
5409           afl->w_now * afl->v_now[tmp_swarm][i] +
5410           RAND_C * (afl->L_best[tmp_swarm][i] - afl->x_now[tmp_swarm][i]) +
5411           RAND_C * (afl->G_best[i] - afl->x_now[tmp_swarm][i]);
5412       afl->x_now[tmp_swarm][i] += afl->v_now[tmp_swarm][i];
5413       if (afl->x_now[tmp_swarm][i] > v_max) {
5414 
5415         afl->x_now[tmp_swarm][i] = v_max;
5416 
5417       } else if (afl->x_now[tmp_swarm][i] < v_min) {
5418 
5419         afl->x_now[tmp_swarm][i] = v_min;
5420 
5421       }
5422 
5423       x_temp += afl->x_now[tmp_swarm][i];
5424 
5425     }
5426 
5427     for (i = 0; i < operator_num; ++i) {
5428 
5429       afl->x_now[tmp_swarm][i] = afl->x_now[tmp_swarm][i] / x_temp;
5430       if (likely(i != 0)) {
5431 
5432         afl->probability_now[tmp_swarm][i] =
5433             afl->probability_now[tmp_swarm][i - 1] + afl->x_now[tmp_swarm][i];
5434 
5435       } else {
5436 
5437         afl->probability_now[tmp_swarm][i] = afl->x_now[tmp_swarm][i];
5438 
5439       }
5440 
5441     }
5442 
5443     if (afl->probability_now[tmp_swarm][operator_num - 1] < 0.99 ||
5444         afl->probability_now[tmp_swarm][operator_num - 1] > 1.01) {
5445 
5446       FATAL("ERROR probability");
5447 
5448     }
5449 
5450   }
5451 
5452   afl->swarm_now = 0;
5453   afl->key_module = 0;
5454 
5455 }
5456 
5457 /* larger change for MOpt implementation: the original fuzz_one was renamed
5458    to fuzz_one_original. All documentation references to fuzz_one therefore
5459    mean fuzz_one_original */
5460 
fuzz_one(afl_state_t * afl)5461 u8 fuzz_one(afl_state_t *afl) {
5462 
5463   int key_val_lv_1 = 0, key_val_lv_2 = 0;
5464 
5465 #ifdef _AFL_DOCUMENT_MUTATIONS
5466 
5467   u8 path_buf[PATH_MAX];
5468   if (afl->do_document == 0) {
5469 
5470     snprintf(path_buf, PATH_MAX, "%s/mutations", afl->out_dir);
5471     afl->do_document = mkdir(path_buf, 0700);  // if it exists we do not care
5472     afl->do_document = 1;
5473 
5474   } else {
5475 
5476     afl->do_document = 2;
5477     afl->stop_soon = 2;
5478 
5479   }
5480 
5481 #endif
5482 
5483   // if limit_time_sig == -1 then both are run after each other
5484 
5485   if (afl->limit_time_sig <= 0) { key_val_lv_1 = fuzz_one_original(afl); }
5486 
5487   if (afl->limit_time_sig != 0) {
5488 
5489     if (afl->key_module == 0) {
5490 
5491       key_val_lv_2 = pilot_fuzzing(afl);
5492 
5493     } else if (afl->key_module == 1) {
5494 
5495       key_val_lv_2 = core_fuzzing(afl);
5496 
5497     } else if (afl->key_module == 2) {
5498 
5499       pso_updating(afl);
5500 
5501     }
5502 
5503   }
5504 
5505   return (key_val_lv_1 | key_val_lv_2);
5506 
5507 }
5508 
5509