xref: /aosp_15_r20/external/AFLplusplus/src/afl-fuzz-one.c (revision 08b48e0b10e97b33e7b60c5b6e2243bd915777f2)
1 /*
2    american fuzzy lop++ - fuzze_one routines in different flavours
3    ---------------------------------------------------------------
4 
5    Originally written by Michal Zalewski
6 
7    Now maintained by Marc Heuse <[email protected]>,
8                         Heiko Eißfeldt <[email protected]> and
9                         Andrea Fioraldi <[email protected]>
10 
11    Copyright 2016, 2017 Google Inc. All rights reserved.
12    Copyright 2019-2024 AFLplusplus Project. All rights reserved.
13 
14    Licensed under the Apache License, Version 2.0 (the "License");
15    you may not use this file except in compliance with the License.
16    You may obtain a copy of the License at:
17 
18      https://www.apache.org/licenses/LICENSE-2.0
19 
20    This is the real deal: the program takes an instrumented binary and
21    attempts a variety of basic fuzzing tricks, paying close attention to
22    how they affect the execution path.
23 
24  */
25 
26 #include "afl-fuzz.h"
27 #include <string.h>
28 #include <limits.h>
29 #include "cmplog.h"
30 #include "afl-mutations.h"
31 
32 /* MOpt */
33 
select_algorithm(afl_state_t * afl,u32 max_algorithm)34 static int select_algorithm(afl_state_t *afl, u32 max_algorithm) {
35 
36   int i_puppet, j_puppet = 0, operator_number = max_algorithm;
37 
38   double range_sele =
39       (double)afl->probability_now[afl->swarm_now][operator_number - 1];
40   double sele = ((double)(rand_below(afl, 10000) * 0.0001 * range_sele));
41 
42   for (i_puppet = 0; i_puppet < operator_num; ++i_puppet) {
43 
44     if (unlikely(i_puppet == 0)) {
45 
46       if (sele < afl->probability_now[afl->swarm_now][i_puppet]) { break; }
47 
48     } else {
49 
50       if (sele < afl->probability_now[afl->swarm_now][i_puppet]) {
51 
52         j_puppet = 1;
53         break;
54 
55       }
56 
57     }
58 
59   }
60 
61   if ((j_puppet == 1 &&
62        sele < afl->probability_now[afl->swarm_now][i_puppet - 1]) ||
63       (i_puppet + 1 < operator_num &&
64        sele > afl->probability_now[afl->swarm_now][i_puppet + 1])) {
65 
66     FATAL("error select_algorithm");
67 
68   }
69 
70   return i_puppet;
71 
72 }
73 
74 /* Helper function to see if a particular change (xor_val = old ^ new) could
75    be a product of deterministic bit flips with the lengths and stepovers
76    attempted by afl-fuzz. This is used to avoid dupes in some of the
77    deterministic fuzzing operations that follow bit flips. We also
78    return 1 if xor_val is zero, which implies that the old and attempted new
79    values are identical and the exec would be a waste of time. */
80 
could_be_bitflip(u32 xor_val)81 static u8 could_be_bitflip(u32 xor_val) {
82 
83   u32 sh = 0;
84 
85   if (!xor_val) { return 1; }
86 
87   /* Shift left until first bit set. */
88 
89   while (!(xor_val & 1)) {
90 
91     ++sh;
92     xor_val >>= 1;
93 
94   }
95 
96   /* 1-, 2-, and 4-bit patterns are OK anywhere. */
97 
98   if (xor_val == 1 || xor_val == 3 || xor_val == 15) { return 1; }
99 
100   /* 8-, 16-, and 32-bit patterns are OK only if shift factor is
101      divisible by 8, since that's the stepover for these ops. */
102 
103   if (sh & 7) { return 0; }
104 
105   if (xor_val == 0xff || xor_val == 0xffff || xor_val == 0xffffffff) {
106 
107     return 1;
108 
109   }
110 
111   return 0;
112 
113 }
114 
115 /* Helper function to see if a particular value is reachable through
116    arithmetic operations. Used for similar purposes. */
117 
could_be_arith(u32 old_val,u32 new_val,u8 blen)118 static u8 could_be_arith(u32 old_val, u32 new_val, u8 blen) {
119 
120   u32 i, ov = 0, nv = 0, diffs = 0;
121 
122   if (old_val == new_val) { return 1; }
123 
124   /* See if one-byte adjustments to any byte could produce this result. */
125 
126   for (i = 0; (u8)i < blen; ++i) {
127 
128     u8 a = old_val >> (8 * i), b = new_val >> (8 * i);
129 
130     if (a != b) {
131 
132       ++diffs;
133       ov = a;
134       nv = b;
135 
136     }
137 
138   }
139 
140   /* If only one byte differs and the values are within range, return 1. */
141 
142   if (diffs == 1) {
143 
144     if ((u8)(ov - nv) <= ARITH_MAX || (u8)(nv - ov) <= ARITH_MAX) { return 1; }
145 
146   }
147 
148   if (blen == 1) { return 0; }
149 
150   /* See if two-byte adjustments to any byte would produce this result. */
151 
152   diffs = 0;
153 
154   for (i = 0; (u8)i < blen / 2; ++i) {
155 
156     u16 a = old_val >> (16 * i), b = new_val >> (16 * i);
157 
158     if (a != b) {
159 
160       ++diffs;
161       ov = a;
162       nv = b;
163 
164     }
165 
166   }
167 
168   /* If only one word differs and the values are within range, return 1. */
169 
170   if (diffs == 1) {
171 
172     if ((u16)(ov - nv) <= ARITH_MAX || (u16)(nv - ov) <= ARITH_MAX) {
173 
174       return 1;
175 
176     }
177 
178     ov = SWAP16(ov);
179     nv = SWAP16(nv);
180 
181     if ((u16)(ov - nv) <= ARITH_MAX || (u16)(nv - ov) <= ARITH_MAX) {
182 
183       return 1;
184 
185     }
186 
187   }
188 
189   /* Finally, let's do the same thing for dwords. */
190 
191   if (blen == 4) {
192 
193     if ((u32)(old_val - new_val) <= ARITH_MAX ||
194         (u32)(new_val - old_val) <= ARITH_MAX) {
195 
196       return 1;
197 
198     }
199 
200     new_val = SWAP32(new_val);
201     old_val = SWAP32(old_val);
202 
203     if ((u32)(old_val - new_val) <= ARITH_MAX ||
204         (u32)(new_val - old_val) <= ARITH_MAX) {
205 
206       return 1;
207 
208     }
209 
210   }
211 
212   return 0;
213 
214 }
215 
216 /* Last but not least, a similar helper to see if insertion of an
217    interesting integer is redundant given the insertions done for
218    shorter blen. The last param (check_le) is set if the caller
219    already executed LE insertion for current blen and wants to see
220    if BE variant passed in new_val is unique. */
221 
could_be_interest(u32 old_val,u32 new_val,u8 blen,u8 check_le)222 static u8 could_be_interest(u32 old_val, u32 new_val, u8 blen, u8 check_le) {
223 
224   u32 i, j;
225 
226   if (old_val == new_val) { return 1; }
227 
228   /* See if one-byte insertions from interesting_8 over old_val could
229      produce new_val. */
230 
231   for (i = 0; i < blen; ++i) {
232 
233     for (j = 0; j < sizeof(interesting_8); ++j) {
234 
235       u32 tval =
236           (old_val & ~(0xff << (i * 8))) | (((u8)interesting_8[j]) << (i * 8));
237 
238       if (new_val == tval) { return 1; }
239 
240     }
241 
242   }
243 
244   /* Bail out unless we're also asked to examine two-byte LE insertions
245      as a preparation for BE attempts. */
246 
247   if (blen == 2 && !check_le) { return 0; }
248 
249   /* See if two-byte insertions over old_val could give us new_val. */
250 
251   for (i = 0; (u8)i < blen - 1; ++i) {
252 
253     for (j = 0; j < sizeof(interesting_16) / 2; ++j) {
254 
255       u32 tval = (old_val & ~(0xffff << (i * 8))) |
256                  (((u16)interesting_16[j]) << (i * 8));
257 
258       if (new_val == tval) { return 1; }
259 
260       /* Continue here only if blen > 2. */
261 
262       if (blen > 2) {
263 
264         tval = (old_val & ~(0xffff << (i * 8))) |
265                (SWAP16(interesting_16[j]) << (i * 8));
266 
267         if (new_val == tval) { return 1; }
268 
269       }
270 
271     }
272 
273   }
274 
275   if (blen == 4 && check_le) {
276 
277     /* See if four-byte insertions could produce the same result
278        (LE only). */
279 
280     for (j = 0; j < sizeof(interesting_32) / 4; ++j) {
281 
282       if (new_val == (u32)interesting_32[j]) { return 1; }
283 
284     }
285 
286   }
287 
288   return 0;
289 
290 }
291 
292 #ifndef IGNORE_FINDS
293 
294 /* Helper function to compare buffers; returns first and last differing offset.
295    We use this to find reasonable locations for splicing two files. */
296 
locate_diffs(u8 * ptr1,u8 * ptr2,u32 len,s32 * first,s32 * last)297 static void locate_diffs(u8 *ptr1, u8 *ptr2, u32 len, s32 *first, s32 *last) {
298 
299   s32 f_loc = -1;
300   s32 l_loc = -1;
301   u32 pos;
302 
303   for (pos = 0; pos < len; ++pos) {
304 
305     if (*(ptr1++) != *(ptr2++)) {
306 
307       if (f_loc == -1) { f_loc = pos; }
308       l_loc = pos;
309 
310     }
311 
312   }
313 
314   *first = f_loc;
315   *last = l_loc;
316 
317   return;
318 
319 }
320 
321 #endif                                                     /* !IGNORE_FINDS */
322 
323 /* Take the current entry from the queue, fuzz it for a while. This
324    function is a tad too long... returns 0 if fuzzed successfully, 1 if
325    skipped or bailed out. */
326 
fuzz_one_original(afl_state_t * afl)327 u8 fuzz_one_original(afl_state_t *afl) {
328 
329   u32 len, temp_len;
330   u32 j;
331   u32 i;
332   u8 *in_buf, *out_buf, *orig_in, *ex_tmp;
333   u64 havoc_queued = 0, orig_hit_cnt, new_hit_cnt = 0, prev_cksum, _prev_cksum;
334   u32 splice_cycle = 0, perf_score = 100, orig_perf;
335 
336   u8 ret_val = 1, doing_det = 0;
337 
338   u8  a_collect[MAX_AUTO_EXTRA];
339   u32 a_len = 0;
340 
341 #ifdef IGNORE_FINDS
342 
343   /* In IGNORE_FINDS mode, skip any entries that weren't in the
344      initial data set. */
345 
346   if (afl->queue_cur->depth > 1) return 1;
347 
348 #else
349 
350   if (unlikely(afl->custom_mutators_count)) {
351 
352     /* The custom mutator will decide to skip this test case or not. */
353 
354     LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, {
355 
356       if (el->afl_custom_queue_get &&
357           !el->afl_custom_queue_get(el->data, afl->queue_cur->fname)) {
358 
359         return 1;
360 
361       }
362 
363     });
364 
365   }
366 
367   if (likely(afl->pending_favored)) {
368 
369     /* If we have any favored, non-fuzzed new arrivals in the queue,
370        possibly skip to them at the expense of already-fuzzed or non-favored
371        cases. */
372 
373     if ((afl->queue_cur->fuzz_level || !afl->queue_cur->favored) &&
374         likely(rand_below(afl, 100) < SKIP_TO_NEW_PROB)) {
375 
376       return 1;
377 
378     }
379 
380   } else if (!afl->non_instrumented_mode && !afl->queue_cur->favored &&
381 
382              afl->queued_items > 10) {
383 
384     /* Otherwise, still possibly skip non-favored cases, albeit less often.
385        The odds of skipping stuff are higher for already-fuzzed inputs and
386        lower for never-fuzzed entries. */
387 
388     if (afl->queue_cycle > 1 && !afl->queue_cur->fuzz_level) {
389 
390       if (likely(rand_below(afl, 100) < SKIP_NFAV_NEW_PROB)) { return 1; }
391 
392     } else {
393 
394       if (likely(rand_below(afl, 100) < SKIP_NFAV_OLD_PROB)) { return 1; }
395 
396     }
397 
398   }
399 
400 #endif                                                     /* ^IGNORE_FINDS */
401 
402   if (likely(afl->not_on_tty)) {
403 
404     u8 time_tmp[64];
405 
406     u_simplestring_time_diff(time_tmp, afl->prev_run_time + get_cur_time(),
407                              afl->start_time);
408     ACTF(
409         "Fuzzing test case #%u (%u total, %llu crashes saved, state: %s, "
410         "mode=%s, "
411         "perf_score=%0.0f, weight=%0.0f, favorite=%u, was_fuzzed=%u, "
412         "exec_us=%llu, hits=%u, map=%u, ascii=%u, run_time=%s)...",
413         afl->current_entry, afl->queued_items, afl->saved_crashes,
414         get_fuzzing_state(afl), afl->fuzz_mode ? "exploit" : "explore",
415         afl->queue_cur->perf_score, afl->queue_cur->weight,
416         afl->queue_cur->favored, afl->queue_cur->was_fuzzed,
417         afl->queue_cur->exec_us,
418         likely(afl->n_fuzz) ? afl->n_fuzz[afl->queue_cur->n_fuzz_entry] : 0,
419         afl->queue_cur->bitmap_size, afl->queue_cur->is_ascii, time_tmp);
420     fflush(stdout);
421 
422   }
423 
424   orig_in = in_buf = queue_testcase_get(afl, afl->queue_cur);
425   len = afl->queue_cur->len;
426 
427   out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
428   if (unlikely(!out_buf)) { PFATAL("alloc"); }
429 
430   afl->subseq_tmouts = 0;
431 
432   afl->cur_depth = afl->queue_cur->depth;
433 
434   /*******************************************
435    * CALIBRATION (only if failed earlier on) *
436    *******************************************/
437 
438   if (unlikely(afl->queue_cur->cal_failed)) {
439 
440     u8 res = FSRV_RUN_TMOUT;
441 
442     if (afl->queue_cur->cal_failed < CAL_CHANCES) {
443 
444       afl->queue_cur->exec_cksum = 0;
445 
446       res =
447           calibrate_case(afl, afl->queue_cur, in_buf, afl->queue_cycle - 1, 0);
448 
449       if (unlikely(res == FSRV_RUN_ERROR)) {
450 
451         FATAL("Unable to execute target application");
452 
453       }
454 
455     }
456 
457     if (unlikely(afl->stop_soon) || res != afl->crash_mode) {
458 
459       ++afl->cur_skipped_items;
460       goto abandon_entry;
461 
462     }
463 
464   }
465 
466   /************
467    * TRIMMING *
468    ************/
469 
470   if (unlikely(!afl->non_instrumented_mode && !afl->queue_cur->trim_done &&
471                !afl->disable_trim)) {
472 
473     u32 old_len = afl->queue_cur->len;
474 
475     u8 res = trim_case(afl, afl->queue_cur, in_buf);
476     orig_in = in_buf = queue_testcase_get(afl, afl->queue_cur);
477 
478     if (unlikely(res == FSRV_RUN_ERROR)) {
479 
480       FATAL("Unable to execute target application");
481 
482     }
483 
484     if (unlikely(afl->stop_soon)) {
485 
486       ++afl->cur_skipped_items;
487       goto abandon_entry;
488 
489     }
490 
491     /* Don't retry trimming, even if it failed. */
492 
493     afl->queue_cur->trim_done = 1;
494 
495     len = afl->queue_cur->len;
496 
497     /* maybe current entry is not ready for splicing anymore */
498     if (unlikely(len <= 4 && old_len > 4)) --afl->ready_for_splicing_count;
499 
500   }
501 
502   memcpy(out_buf, in_buf, len);
503 
504   /*********************
505    * PERFORMANCE SCORE *
506    *********************/
507 
508   if (likely(!afl->old_seed_selection))
509     orig_perf = perf_score = afl->queue_cur->perf_score;
510   else
511     afl->queue_cur->perf_score = orig_perf = perf_score =
512         calculate_score(afl, afl->queue_cur);
513 
514   if (unlikely(perf_score <= 0 && afl->active_items > 1)) {
515 
516     goto abandon_entry;
517 
518   }
519 
520   if (unlikely(afl->shm.cmplog_mode &&
521                afl->queue_cur->colorized < afl->cmplog_lvl &&
522                (u32)len <= afl->cmplog_max_filesize)) {
523 
524     if (unlikely(len < 4)) {
525 
526       afl->queue_cur->colorized = CMPLOG_LVL_MAX;
527 
528     } else {
529 
530       if (afl->queue_cur->favored || afl->cmplog_lvl == 3 ||
531           (afl->cmplog_lvl == 2 &&
532            (afl->queue_cur->tc_ref ||
533             afl->fsrv.total_execs % afl->queued_items <= 10)) ||
534           get_cur_time() - afl->last_find_time > 250000) {  // 250 seconds
535 
536         if (input_to_state_stage(afl, in_buf, out_buf, len)) {
537 
538           goto abandon_entry;
539 
540         }
541 
542       }
543 
544     }
545 
546   }
547 
548   u64 before_det_time = get_cur_time();
549 #ifdef INTROSPECTION
550 
551   u64 before_havoc_time;
552   u32 before_det_findings = afl->queued_items,
553       before_det_edges = count_non_255_bytes(afl, afl->virgin_bits),
554       before_havoc_findings, before_havoc_edges;
555   u8 is_logged = 0;
556 
557 #endif
558   if (!afl->skip_deterministic) {
559 
560     if (!skip_deterministic_stage(afl, in_buf, out_buf, len, before_det_time)) {
561 
562       goto abandon_entry;
563 
564     }
565 
566   }
567 
568   u8 *skip_eff_map = afl->queue_cur->skipdet_e->skip_eff_map;
569 
570   /* Skip right away if -d is given, if it has not been chosen sufficiently
571      often to warrant the expensive deterministic stage (fuzz_level), or
572      if it has gone through deterministic testing in earlier, resumed runs
573      (passed_det). */
574   /* if skipdet decide to skip the seed or no interesting bytes found,
575      we skip the whole deterministic stage as well */
576 
577   if (likely(afl->skip_deterministic) || likely(afl->queue_cur->passed_det) ||
578       likely(!afl->queue_cur->skipdet_e->quick_eff_bytes) ||
579       likely(perf_score <
580              (afl->queue_cur->depth * 30 <= afl->havoc_max_mult * 100
581                   ? afl->queue_cur->depth * 30
582                   : afl->havoc_max_mult * 100))) {
583 
584     goto custom_mutator_stage;
585 
586   }
587 
588   /* Skip deterministic fuzzing if exec path checksum puts this out of scope
589      for this main instance. */
590 
591   if (unlikely(afl->main_node_max &&
592                (afl->queue_cur->exec_cksum % afl->main_node_max) !=
593                    afl->main_node_id - 1)) {
594 
595     goto custom_mutator_stage;
596 
597   }
598 
599   doing_det = 1;
600 
601   /*********************************************
602    * SIMPLE BITFLIP (+dictionary construction) *
603    *********************************************/
604 
605 #define FLIP_BIT(_ar, _b)                     \
606   do {                                        \
607                                               \
608     u8 *_arf = (u8 *)(_ar);                   \
609     u32 _bf = (_b);                           \
610     _arf[(_bf) >> 3] ^= (128 >> ((_bf) & 7)); \
611                                               \
612   } while (0)
613 
614   /* Single walking bit. */
615 
616   afl->stage_short = "flip1";
617   afl->stage_max = len << 3;
618   afl->stage_name = "bitflip 1/1";
619 
620   afl->stage_val_type = STAGE_VAL_NONE;
621 
622   orig_hit_cnt = afl->queued_items + afl->saved_crashes;
623 
624   /* Get a clean cksum. */
625 
626   if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
627 
628   prev_cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
629   _prev_cksum = prev_cksum;
630 
631   /* Now flip bits. */
632 
633   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
634 
635     afl->stage_cur_byte = afl->stage_cur >> 3;
636 
637     if (!skip_eff_map[afl->stage_cur_byte]) continue;
638 
639     if (is_det_timeout(before_det_time, 0)) { goto custom_mutator_stage; }
640 
641     FLIP_BIT(out_buf, afl->stage_cur);
642 
643 #ifdef INTROSPECTION
644     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT1-%u",
645              afl->queue_cur->fname, afl->stage_cur);
646 #endif
647 
648     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
649 
650     FLIP_BIT(out_buf, afl->stage_cur);
651 
652     /* While flipping the least significant bit in every byte, pull of an extra
653        trick to detect possible syntax tokens. In essence, the idea is that if
654        you have a binary blob like this:
655 
656        xxxxxxxxIHDRxxxxxxxx
657 
658        ...and changing the leading and trailing bytes causes variable or no
659        changes in program flow, but touching any character in the "IHDR" string
660        always produces the same, distinctive path, it's highly likely that
661        "IHDR" is an atomically-checked magic value of special significance to
662        the fuzzed format.
663 
664        We do this here, rather than as a separate stage, because it's a nice
665        way to keep the operation approximately "free" (i.e., no extra execs).
666 
667        Empirically, performing the check when flipping the least significant bit
668        is advantageous, compared to doing it at the time of more disruptive
669        changes, where the program flow may be affected in more violent ways.
670 
671        The caveat is that we won't generate dictionaries in the -d mode or -S
672        mode - but that's probably a fair trade-off.
673 
674        This won't work particularly well with paths that exhibit variable
675        behavior, but fails gracefully, so we'll carry out the checks anyway.
676 
677       */
678 
679     if (!afl->non_instrumented_mode && (afl->stage_cur & 7) == 7) {
680 
681       u64 cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
682 
683       if (afl->stage_cur == afl->stage_max - 1 && cksum == prev_cksum) {
684 
685         /* If at end of file and we are still collecting a string, grab the
686            final character and force output. */
687 
688         if (a_len < MAX_AUTO_EXTRA) {
689 
690           a_collect[a_len] = out_buf[afl->stage_cur >> 3];
691 
692         }
693 
694         ++a_len;
695 
696         if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA) {
697 
698           maybe_add_auto(afl, a_collect, a_len);
699 
700         }
701 
702       } else if (cksum != prev_cksum) {
703 
704         /* Otherwise, if the checksum has changed, see if we have something
705            worthwhile queued up, and collect that if the answer is yes. */
706 
707         if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA) {
708 
709           maybe_add_auto(afl, a_collect, a_len);
710 
711         }
712 
713         a_len = 0;
714         prev_cksum = cksum;
715 
716       }
717 
718       /* Continue collecting string, but only if the bit flip actually made
719          any difference - we don't want no-op tokens. */
720 
721       if (cksum != _prev_cksum) {
722 
723         if (a_len < MAX_AUTO_EXTRA) {
724 
725           a_collect[a_len] = out_buf[afl->stage_cur >> 3];
726 
727         }
728 
729         ++a_len;
730 
731       }
732 
733     }
734 
735   }
736 
737   new_hit_cnt = afl->queued_items + afl->saved_crashes;
738 
739   afl->stage_finds[STAGE_FLIP1] += new_hit_cnt - orig_hit_cnt;
740   afl->stage_cycles[STAGE_FLIP1] += afl->stage_max;
741 #ifdef INTROSPECTION
742   afl->queue_cur->stats_mutated += afl->stage_max;
743 #endif
744 
745   /* Two walking bits. */
746 
747   afl->stage_name = "bitflip 2/1";
748   afl->stage_short = "flip2";
749   afl->stage_max = (len << 3) - 1;
750 
751   orig_hit_cnt = new_hit_cnt;
752 
753   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
754 
755     afl->stage_cur_byte = afl->stage_cur >> 3;
756 
757     if (!skip_eff_map[afl->stage_cur_byte]) continue;
758 
759     if (is_det_timeout(before_det_time, 0)) { goto custom_mutator_stage; }
760 
761     FLIP_BIT(out_buf, afl->stage_cur);
762     FLIP_BIT(out_buf, afl->stage_cur + 1);
763 
764 #ifdef INTROSPECTION
765     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT2-%u",
766              afl->queue_cur->fname, afl->stage_cur);
767 #endif
768 
769     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
770 
771     FLIP_BIT(out_buf, afl->stage_cur);
772     FLIP_BIT(out_buf, afl->stage_cur + 1);
773 
774   }
775 
776   new_hit_cnt = afl->queued_items + afl->saved_crashes;
777 
778   afl->stage_finds[STAGE_FLIP2] += new_hit_cnt - orig_hit_cnt;
779   afl->stage_cycles[STAGE_FLIP2] += afl->stage_max;
780 #ifdef INTROSPECTION
781   afl->queue_cur->stats_mutated += afl->stage_max;
782 #endif
783 
784   /* Four walking bits. */
785 
786   afl->stage_name = "bitflip 4/1";
787   afl->stage_short = "flip4";
788   afl->stage_max = (len << 3) - 3;
789 
790   orig_hit_cnt = new_hit_cnt;
791 
792   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
793 
794     afl->stage_cur_byte = afl->stage_cur >> 3;
795 
796     if (!skip_eff_map[afl->stage_cur_byte]) continue;
797 
798     if (is_det_timeout(before_det_time, 0)) { goto custom_mutator_stage; }
799 
800     FLIP_BIT(out_buf, afl->stage_cur);
801     FLIP_BIT(out_buf, afl->stage_cur + 1);
802     FLIP_BIT(out_buf, afl->stage_cur + 2);
803     FLIP_BIT(out_buf, afl->stage_cur + 3);
804 
805 #ifdef INTROSPECTION
806     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT4-%u",
807              afl->queue_cur->fname, afl->stage_cur);
808 #endif
809 
810     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
811 
812     FLIP_BIT(out_buf, afl->stage_cur);
813     FLIP_BIT(out_buf, afl->stage_cur + 1);
814     FLIP_BIT(out_buf, afl->stage_cur + 2);
815     FLIP_BIT(out_buf, afl->stage_cur + 3);
816 
817   }
818 
819   new_hit_cnt = afl->queued_items + afl->saved_crashes;
820 
821   afl->stage_finds[STAGE_FLIP4] += new_hit_cnt - orig_hit_cnt;
822   afl->stage_cycles[STAGE_FLIP4] += afl->stage_max;
823 #ifdef INTROSPECTION
824   afl->queue_cur->stats_mutated += afl->stage_max;
825 #endif
826 
827   /* Walking byte. */
828 
829   afl->stage_name = "bitflip 8/8";
830   afl->stage_short = "flip8";
831   afl->stage_max = len;
832 
833   orig_hit_cnt = new_hit_cnt;
834   prev_cksum = _prev_cksum;
835 
836   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
837 
838     afl->stage_cur_byte = afl->stage_cur;
839 
840     if (!skip_eff_map[afl->stage_cur_byte]) continue;
841 
842     if (is_det_timeout(before_det_time, 0)) { goto custom_mutator_stage; }
843 
844     out_buf[afl->stage_cur] ^= 0xFF;
845 
846 #ifdef INTROSPECTION
847     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT8-%u",
848              afl->queue_cur->fname, afl->stage_cur);
849 #endif
850 
851     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
852 
853     out_buf[afl->stage_cur] ^= 0xFF;
854 
855   }
856 
857   /* New effective bytes calculation. */
858 
859   for (i = 0; i < len; i++) {
860 
861     if (skip_eff_map[i]) afl->blocks_eff_select += 1;
862 
863   }
864 
865   afl->blocks_eff_total += len;
866 
867   new_hit_cnt = afl->queued_items + afl->saved_crashes;
868 
869   afl->stage_finds[STAGE_FLIP8] += new_hit_cnt - orig_hit_cnt;
870   afl->stage_cycles[STAGE_FLIP8] += afl->stage_max;
871 #ifdef INTROSPECTION
872   afl->queue_cur->stats_mutated += afl->stage_max;
873 #endif
874 
875   /* Two walking bytes. */
876 
877   if (len < 2) { goto skip_bitflip; }
878 
879   afl->stage_name = "bitflip 16/8";
880   afl->stage_short = "flip16";
881   afl->stage_cur = 0;
882   afl->stage_max = len - 1;
883 
884   orig_hit_cnt = new_hit_cnt;
885 
886   for (i = 0; i < len - 1; ++i) {
887 
888     /* Let's consult the effector map... */
889 
890     if (!skip_eff_map[i]) continue;
891 
892     if (is_det_timeout(before_det_time, 0)) { goto custom_mutator_stage; }
893 
894     afl->stage_cur_byte = i;
895 
896     *(u16 *)(out_buf + i) ^= 0xFFFF;
897 
898 #ifdef INTROSPECTION
899     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT16-%u",
900              afl->queue_cur->fname, afl->stage_cur);
901 #endif
902 
903     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
904     ++afl->stage_cur;
905 
906     *(u16 *)(out_buf + i) ^= 0xFFFF;
907 
908   }
909 
910   new_hit_cnt = afl->queued_items + afl->saved_crashes;
911 
912   afl->stage_finds[STAGE_FLIP16] += new_hit_cnt - orig_hit_cnt;
913   afl->stage_cycles[STAGE_FLIP16] += afl->stage_max;
914 #ifdef INTROSPECTION
915   afl->queue_cur->stats_mutated += afl->stage_max;
916 #endif
917 
918   if (len < 4) { goto skip_bitflip; }
919 
920   /* Four walking bytes. */
921 
922   afl->stage_name = "bitflip 32/8";
923   afl->stage_short = "flip32";
924   afl->stage_cur = 0;
925   afl->stage_max = len - 3;
926 
927   orig_hit_cnt = new_hit_cnt;
928 
929   for (i = 0; i < len - 3; ++i) {
930 
931     /* Let's consult the effector map... */
932 
933     if (!skip_eff_map[i]) continue;
934 
935     if (is_det_timeout(before_det_time, 0)) { goto custom_mutator_stage; }
936 
937     afl->stage_cur_byte = i;
938 
939     *(u32 *)(out_buf + i) ^= 0xFFFFFFFF;
940 
941 #ifdef INTROSPECTION
942     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT32-%u",
943              afl->queue_cur->fname, afl->stage_cur);
944 #endif
945 
946     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
947     ++afl->stage_cur;
948 
949     *(u32 *)(out_buf + i) ^= 0xFFFFFFFF;
950 
951   }
952 
953   new_hit_cnt = afl->queued_items + afl->saved_crashes;
954 
955   afl->stage_finds[STAGE_FLIP32] += new_hit_cnt - orig_hit_cnt;
956   afl->stage_cycles[STAGE_FLIP32] += afl->stage_max;
957 #ifdef INTROSPECTION
958   afl->queue_cur->stats_mutated += afl->stage_max;
959 #endif
960 
961 skip_bitflip:
962 
963   if (afl->no_arith) { goto skip_arith; }
964 
965   /**********************
966    * ARITHMETIC INC/DEC *
967    **********************/
968 
969   /* 8-bit arithmetics. */
970 
971   afl->stage_name = "arith 8/8";
972   afl->stage_short = "arith8";
973   afl->stage_cur = 0;
974   afl->stage_max = 2 * len * ARITH_MAX;
975 
976   afl->stage_val_type = STAGE_VAL_LE;
977 
978   orig_hit_cnt = new_hit_cnt;
979 
980   for (i = 0; i < (u32)len; ++i) {
981 
982     u8 orig = out_buf[i];
983 
984     /* Let's consult the effector map... */
985 
986     if (!skip_eff_map[i]) continue;
987 
988     if (is_det_timeout(before_det_time, 0)) { goto custom_mutator_stage; }
989 
990     afl->stage_cur_byte = i;
991 
992     for (j = 1; j <= ARITH_MAX; ++j) {
993 
994       u8 r = orig ^ (orig + j);
995 
996       /* Do arithmetic operations only if the result couldn't be a product
997          of a bitflip. */
998 
999       if (!could_be_bitflip(r)) {
1000 
1001         afl->stage_cur_val = j;
1002         out_buf[i] = orig + j;
1003 
1004 #ifdef INTROSPECTION
1005         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH8+-%u-%u",
1006                  afl->queue_cur->fname, i, j);
1007 #endif
1008 
1009         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1010         ++afl->stage_cur;
1011 
1012       } else {
1013 
1014         --afl->stage_max;
1015 
1016       }
1017 
1018       r = orig ^ (orig - j);
1019 
1020       if (!could_be_bitflip(r)) {
1021 
1022         afl->stage_cur_val = -j;
1023         out_buf[i] = orig - j;
1024 
1025 #ifdef INTROSPECTION
1026         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH8--%u-%u",
1027                  afl->queue_cur->fname, i, j);
1028 #endif
1029 
1030         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1031         ++afl->stage_cur;
1032 
1033       } else {
1034 
1035         --afl->stage_max;
1036 
1037       }
1038 
1039       out_buf[i] = orig;
1040 
1041     }
1042 
1043   }
1044 
1045   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1046 
1047   afl->stage_finds[STAGE_ARITH8] += new_hit_cnt - orig_hit_cnt;
1048   afl->stage_cycles[STAGE_ARITH8] += afl->stage_max;
1049 #ifdef INTROSPECTION
1050   afl->queue_cur->stats_mutated += afl->stage_max;
1051 #endif
1052 
1053   /* 16-bit arithmetics, both endians. */
1054 
1055   if (len < 2) { goto skip_arith; }
1056 
1057   afl->stage_name = "arith 16/8";
1058   afl->stage_short = "arith16";
1059   afl->stage_cur = 0;
1060   afl->stage_max = 4 * (len - 1) * ARITH_MAX;
1061 
1062   orig_hit_cnt = new_hit_cnt;
1063 
1064   for (i = 0; i < (u32)len - 1; ++i) {
1065 
1066     u16 orig = *(u16 *)(out_buf + i);
1067 
1068     /* Let's consult the effector map... */
1069 
1070     if (!skip_eff_map[i]) continue;
1071 
1072     if (is_det_timeout(before_det_time, 0)) { goto custom_mutator_stage; }
1073 
1074     afl->stage_cur_byte = i;
1075 
1076     for (j = 1; j <= ARITH_MAX; ++j) {
1077 
1078       u16 r1 = orig ^ (orig + j), r2 = orig ^ (orig - j),
1079           r3 = orig ^ SWAP16(SWAP16(orig) + j),
1080           r4 = orig ^ SWAP16(SWAP16(orig) - j);
1081 
1082       /* Try little endian addition and subtraction first. Do it only
1083          if the operation would affect more than one byte (hence the
1084          & 0xff overflow checks) and if it couldn't be a product of
1085          a bitflip. */
1086 
1087       afl->stage_val_type = STAGE_VAL_LE;
1088 
1089       if ((orig & 0xff) + j > 0xff && !could_be_bitflip(r1)) {
1090 
1091         afl->stage_cur_val = j;
1092         *(u16 *)(out_buf + i) = orig + j;
1093 
1094 #ifdef INTROSPECTION
1095         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH16+-%u-%u",
1096                  afl->queue_cur->fname, i, j);
1097 #endif
1098 
1099         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1100         ++afl->stage_cur;
1101 
1102       } else {
1103 
1104         --afl->stage_max;
1105 
1106       }
1107 
1108       if ((orig & 0xff) < j && !could_be_bitflip(r2)) {
1109 
1110         afl->stage_cur_val = -j;
1111         *(u16 *)(out_buf + i) = orig - j;
1112 
1113 #ifdef INTROSPECTION
1114         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH16--%u-%u",
1115                  afl->queue_cur->fname, i, j);
1116 #endif
1117 
1118         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1119         ++afl->stage_cur;
1120 
1121       } else {
1122 
1123         --afl->stage_max;
1124 
1125       }
1126 
1127       /* Big endian comes next. Same deal. */
1128 
1129       afl->stage_val_type = STAGE_VAL_BE;
1130 
1131       if ((orig >> 8) + j > 0xff && !could_be_bitflip(r3)) {
1132 
1133         afl->stage_cur_val = j;
1134         *(u16 *)(out_buf + i) = SWAP16(SWAP16(orig) + j);
1135 
1136 #ifdef INTROSPECTION
1137         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH16+BE-%u-%u",
1138                  afl->queue_cur->fname, i, j);
1139 #endif
1140 
1141         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1142         ++afl->stage_cur;
1143 
1144       } else {
1145 
1146         --afl->stage_max;
1147 
1148       }
1149 
1150       if ((orig >> 8) < j && !could_be_bitflip(r4)) {
1151 
1152         afl->stage_cur_val = -j;
1153         *(u16 *)(out_buf + i) = SWAP16(SWAP16(orig) - j);
1154 
1155 #ifdef INTROSPECTION
1156         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH16_BE-%u-%u",
1157                  afl->queue_cur->fname, i, j);
1158 #endif
1159 
1160         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1161         ++afl->stage_cur;
1162 
1163       } else {
1164 
1165         --afl->stage_max;
1166 
1167       }
1168 
1169       *(u16 *)(out_buf + i) = orig;
1170 
1171     }
1172 
1173   }
1174 
1175   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1176 
1177   afl->stage_finds[STAGE_ARITH16] += new_hit_cnt - orig_hit_cnt;
1178   afl->stage_cycles[STAGE_ARITH16] += afl->stage_max;
1179 #ifdef INTROSPECTION
1180   afl->queue_cur->stats_mutated += afl->stage_max;
1181 #endif
1182 
1183   /* 32-bit arithmetics, both endians. */
1184 
1185   if (len < 4) { goto skip_arith; }
1186 
1187   afl->stage_name = "arith 32/8";
1188   afl->stage_short = "arith32";
1189   afl->stage_cur = 0;
1190   afl->stage_max = 4 * (len - 3) * ARITH_MAX;
1191 
1192   orig_hit_cnt = new_hit_cnt;
1193 
1194   for (i = 0; i < (u32)len - 3; ++i) {
1195 
1196     u32 orig = *(u32 *)(out_buf + i);
1197 
1198     /* Let's consult the effector map... */
1199 
1200     if (!skip_eff_map[i]) continue;
1201 
1202     if (is_det_timeout(before_det_time, 0)) { goto custom_mutator_stage; }
1203 
1204     afl->stage_cur_byte = i;
1205 
1206     for (j = 1; j <= ARITH_MAX; ++j) {
1207 
1208       u32 r1 = orig ^ (orig + j), r2 = orig ^ (orig - j),
1209           r3 = orig ^ SWAP32(SWAP32(orig) + j),
1210           r4 = orig ^ SWAP32(SWAP32(orig) - j);
1211 
1212       /* Little endian first. Same deal as with 16-bit: we only want to
1213          try if the operation would have effect on more than two bytes. */
1214 
1215       afl->stage_val_type = STAGE_VAL_LE;
1216 
1217       if ((orig & 0xffff) + j > 0xffff && !could_be_bitflip(r1)) {
1218 
1219         afl->stage_cur_val = j;
1220         *(u32 *)(out_buf + i) = orig + j;
1221 
1222 #ifdef INTROSPECTION
1223         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH32+-%u-%u",
1224                  afl->queue_cur->fname, i, j);
1225 #endif
1226 
1227         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1228         ++afl->stage_cur;
1229 
1230       } else {
1231 
1232         --afl->stage_max;
1233 
1234       }
1235 
1236       if ((orig & 0xffff) < (u32)j && !could_be_bitflip(r2)) {
1237 
1238         afl->stage_cur_val = -j;
1239         *(u32 *)(out_buf + i) = orig - j;
1240 
1241 #ifdef INTROSPECTION
1242         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH32_-%u-%u",
1243                  afl->queue_cur->fname, i, j);
1244 #endif
1245 
1246         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1247         ++afl->stage_cur;
1248 
1249       } else {
1250 
1251         --afl->stage_max;
1252 
1253       }
1254 
1255       /* Big endian next. */
1256 
1257       afl->stage_val_type = STAGE_VAL_BE;
1258 
1259       if ((SWAP32(orig) & 0xffff) + j > 0xffff && !could_be_bitflip(r3)) {
1260 
1261         afl->stage_cur_val = j;
1262         *(u32 *)(out_buf + i) = SWAP32(SWAP32(orig) + j);
1263 
1264 #ifdef INTROSPECTION
1265         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH32+BE-%u-%u",
1266                  afl->queue_cur->fname, i, j);
1267 #endif
1268 
1269         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1270         ++afl->stage_cur;
1271 
1272       } else {
1273 
1274         --afl->stage_max;
1275 
1276       }
1277 
1278       if ((SWAP32(orig) & 0xffff) < (u32)j && !could_be_bitflip(r4)) {
1279 
1280         afl->stage_cur_val = -j;
1281         *(u32 *)(out_buf + i) = SWAP32(SWAP32(orig) - j);
1282 
1283 #ifdef INTROSPECTION
1284         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH32_BE-%u-%u",
1285                  afl->queue_cur->fname, i, j);
1286 #endif
1287 
1288         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1289         ++afl->stage_cur;
1290 
1291       } else {
1292 
1293         --afl->stage_max;
1294 
1295       }
1296 
1297       *(u32 *)(out_buf + i) = orig;
1298 
1299     }
1300 
1301   }
1302 
1303   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1304 
1305   afl->stage_finds[STAGE_ARITH32] += new_hit_cnt - orig_hit_cnt;
1306   afl->stage_cycles[STAGE_ARITH32] += afl->stage_max;
1307 #ifdef INTROSPECTION
1308   afl->queue_cur->stats_mutated += afl->stage_max;
1309 #endif
1310 
1311 skip_arith:
1312 
1313   /**********************
1314    * INTERESTING VALUES *
1315    **********************/
1316 
1317   afl->stage_name = "interest 8/8";
1318   afl->stage_short = "int8";
1319   afl->stage_cur = 0;
1320   afl->stage_max = len * sizeof(interesting_8);
1321 
1322   afl->stage_val_type = STAGE_VAL_LE;
1323 
1324   orig_hit_cnt = new_hit_cnt;
1325 
1326   /* Setting 8-bit integers. */
1327 
1328   for (i = 0; i < (u32)len; ++i) {
1329 
1330     u8 orig = out_buf[i];
1331 
1332     /* Let's consult the effector map... */
1333 
1334     if (!skip_eff_map[i]) continue;
1335 
1336     if (is_det_timeout(before_det_time, 0)) { goto custom_mutator_stage; }
1337 
1338     afl->stage_cur_byte = i;
1339 
1340     for (j = 0; j < (u32)sizeof(interesting_8); ++j) {
1341 
1342       /* Skip if the value could be a product of bitflips or arithmetics. */
1343 
1344       if (could_be_bitflip(orig ^ (u8)interesting_8[j]) ||
1345           could_be_arith(orig, (u8)interesting_8[j], 1)) {
1346 
1347         --afl->stage_max;
1348         continue;
1349 
1350       }
1351 
1352       afl->stage_cur_val = interesting_8[j];
1353       out_buf[i] = interesting_8[j];
1354 
1355 #ifdef INTROSPECTION
1356       snprintf(afl->mutation, sizeof(afl->mutation), "%s INTERESTING8_%u_%u",
1357                afl->queue_cur->fname, i, j);
1358 #endif
1359 
1360       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1361 
1362       out_buf[i] = orig;
1363       ++afl->stage_cur;
1364 
1365     }
1366 
1367   }
1368 
1369   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1370 
1371   afl->stage_finds[STAGE_INTEREST8] += new_hit_cnt - orig_hit_cnt;
1372   afl->stage_cycles[STAGE_INTEREST8] += afl->stage_max;
1373 #ifdef INTROSPECTION
1374   afl->queue_cur->stats_mutated += afl->stage_max;
1375 #endif
1376 
1377   /* Setting 16-bit integers, both endians. */
1378 
1379   if (afl->no_arith || len < 2) { goto skip_interest; }
1380 
1381   afl->stage_name = "interest 16/8";
1382   afl->stage_short = "int16";
1383   afl->stage_cur = 0;
1384   afl->stage_max = 2 * (len - 1) * (sizeof(interesting_16) >> 1);
1385 
1386   orig_hit_cnt = new_hit_cnt;
1387 
1388   for (i = 0; i < len - 1; ++i) {
1389 
1390     u16 orig = *(u16 *)(out_buf + i);
1391 
1392     /* Let's consult the effector map... */
1393 
1394     if (!skip_eff_map[i]) continue;
1395 
1396     if (is_det_timeout(before_det_time, 0)) { goto custom_mutator_stage; }
1397 
1398     afl->stage_cur_byte = i;
1399 
1400     for (j = 0; j < sizeof(interesting_16) / 2; ++j) {
1401 
1402       afl->stage_cur_val = interesting_16[j];
1403 
1404       /* Skip if this could be a product of a bitflip, arithmetics,
1405          or single-byte interesting value insertion. */
1406 
1407       if (!could_be_bitflip(orig ^ (u16)interesting_16[j]) &&
1408           !could_be_arith(orig, (u16)interesting_16[j], 2) &&
1409           !could_be_interest(orig, (u16)interesting_16[j], 2, 0)) {
1410 
1411         afl->stage_val_type = STAGE_VAL_LE;
1412 
1413         *(u16 *)(out_buf + i) = interesting_16[j];
1414 
1415 #ifdef INTROSPECTION
1416         snprintf(afl->mutation, sizeof(afl->mutation), "%s INTERESTING16_%u_%u",
1417                  afl->queue_cur->fname, i, j);
1418 #endif
1419 
1420         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1421         ++afl->stage_cur;
1422 
1423       } else {
1424 
1425         --afl->stage_max;
1426 
1427       }
1428 
1429       if ((u16)interesting_16[j] != SWAP16(interesting_16[j]) &&
1430           !could_be_bitflip(orig ^ SWAP16(interesting_16[j])) &&
1431           !could_be_arith(orig, SWAP16(interesting_16[j]), 2) &&
1432           !could_be_interest(orig, SWAP16(interesting_16[j]), 2, 1)) {
1433 
1434         afl->stage_val_type = STAGE_VAL_BE;
1435 
1436 #ifdef INTROSPECTION
1437         snprintf(afl->mutation, sizeof(afl->mutation),
1438                  "%s INTERESTING16BE_%u_%u", afl->queue_cur->fname, i, j);
1439 #endif
1440 
1441         *(u16 *)(out_buf + i) = SWAP16(interesting_16[j]);
1442         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1443         ++afl->stage_cur;
1444 
1445       } else {
1446 
1447         --afl->stage_max;
1448 
1449       }
1450 
1451     }
1452 
1453     *(u16 *)(out_buf + i) = orig;
1454 
1455   }
1456 
1457   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1458 
1459   afl->stage_finds[STAGE_INTEREST16] += new_hit_cnt - orig_hit_cnt;
1460   afl->stage_cycles[STAGE_INTEREST16] += afl->stage_max;
1461 #ifdef INTROSPECTION
1462   afl->queue_cur->stats_mutated += afl->stage_max;
1463 #endif
1464 
1465   if (len < 4) { goto skip_interest; }
1466 
1467   /* Setting 32-bit integers, both endians. */
1468 
1469   afl->stage_name = "interest 32/8";
1470   afl->stage_short = "int32";
1471   afl->stage_cur = 0;
1472   afl->stage_max = 2 * (len - 3) * (sizeof(interesting_32) >> 2);
1473 
1474   orig_hit_cnt = new_hit_cnt;
1475 
1476   for (i = 0; i < len - 3; i++) {
1477 
1478     u32 orig = *(u32 *)(out_buf + i);
1479 
1480     /* Let's consult the effector map... */
1481 
1482     if (!skip_eff_map[i]) continue;
1483 
1484     if (is_det_timeout(before_det_time, 0)) { goto custom_mutator_stage; }
1485 
1486     afl->stage_cur_byte = i;
1487 
1488     for (j = 0; j < sizeof(interesting_32) / 4; ++j) {
1489 
1490       afl->stage_cur_val = interesting_32[j];
1491 
1492       /* Skip if this could be a product of a bitflip, arithmetics,
1493          or word interesting value insertion. */
1494 
1495       if (!could_be_bitflip(orig ^ (u32)interesting_32[j]) &&
1496           !could_be_arith(orig, interesting_32[j], 4) &&
1497           !could_be_interest(orig, interesting_32[j], 4, 0)) {
1498 
1499         afl->stage_val_type = STAGE_VAL_LE;
1500 
1501         *(u32 *)(out_buf + i) = interesting_32[j];
1502 
1503 #ifdef INTROSPECTION
1504         snprintf(afl->mutation, sizeof(afl->mutation), "%s INTERESTING32_%u_%u",
1505                  afl->queue_cur->fname, i, j);
1506 #endif
1507 
1508         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1509         ++afl->stage_cur;
1510 
1511       } else {
1512 
1513         --afl->stage_max;
1514 
1515       }
1516 
1517       if ((u32)interesting_32[j] != SWAP32(interesting_32[j]) &&
1518           !could_be_bitflip(orig ^ SWAP32(interesting_32[j])) &&
1519           !could_be_arith(orig, SWAP32(interesting_32[j]), 4) &&
1520           !could_be_interest(orig, SWAP32(interesting_32[j]), 4, 1)) {
1521 
1522         afl->stage_val_type = STAGE_VAL_BE;
1523 
1524 #ifdef INTROSPECTION
1525         snprintf(afl->mutation, sizeof(afl->mutation),
1526                  "%s INTERESTING32BE_%u_%u", afl->queue_cur->fname, i, j);
1527 #endif
1528 
1529         *(u32 *)(out_buf + i) = SWAP32(interesting_32[j]);
1530         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1531         ++afl->stage_cur;
1532 
1533       } else {
1534 
1535         --afl->stage_max;
1536 
1537       }
1538 
1539     }
1540 
1541     *(u32 *)(out_buf + i) = orig;
1542 
1543   }
1544 
1545   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1546 
1547   afl->stage_finds[STAGE_INTEREST32] += new_hit_cnt - orig_hit_cnt;
1548   afl->stage_cycles[STAGE_INTEREST32] += afl->stage_max;
1549 #ifdef INTROSPECTION
1550   afl->queue_cur->stats_mutated += afl->stage_max;
1551 #endif
1552 
1553 skip_interest:
1554 
1555   /********************
1556    * DICTIONARY STUFF *
1557    ********************/
1558 
1559   if (!afl->extras_cnt) { goto skip_user_extras; }
1560 
1561   /* Overwrite with user-supplied extras. */
1562 
1563   afl->stage_name = "user extras (over)";
1564   afl->stage_short = "ext_UO";
1565   afl->stage_cur = 0;
1566   afl->stage_max = afl->extras_cnt * len;
1567 
1568   afl->stage_val_type = STAGE_VAL_NONE;
1569 
1570   orig_hit_cnt = new_hit_cnt;
1571 
1572   for (i = 0; i < (u32)len; ++i) {
1573 
1574     u32 last_len = 0;
1575 
1576     if (!skip_eff_map[i]) continue;
1577 
1578     if (is_det_timeout(before_det_time, 0)) { goto custom_mutator_stage; }
1579 
1580     afl->stage_cur_byte = i;
1581 
1582     /* Extras are sorted by size, from smallest to largest. This means
1583        that we don't have to worry about restoring the buffer in
1584        between writes at a particular offset determined by the outer
1585        loop. */
1586 
1587     for (j = 0; j < afl->extras_cnt; ++j) {
1588 
1589       /* Skip extras probabilistically if afl->extras_cnt > AFL_MAX_DET_EXTRAS.
1590          Also skip them if there's no room to insert the payload, if the token
1591          is redundant, or if its entire span has no bytes set in the effector
1592          map. */
1593 
1594       if ((afl->extras_cnt > afl->max_det_extras &&
1595            rand_below(afl, afl->extras_cnt) >= afl->max_det_extras) ||
1596           afl->extras[j].len > len - i ||
1597           !memcmp(afl->extras[j].data, out_buf + i, afl->extras[j].len)) {
1598 
1599         --afl->stage_max;
1600         continue;
1601 
1602       }
1603 
1604       last_len = afl->extras[j].len;
1605       memcpy(out_buf + i, afl->extras[j].data, last_len);
1606 
1607 #ifdef INTROSPECTION
1608       snprintf(afl->mutation, sizeof(afl->mutation),
1609                "%s EXTRAS_overwrite-%u-%u", afl->queue_cur->fname, i, j);
1610 #endif
1611 
1612       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1613 
1614       ++afl->stage_cur;
1615 
1616     }
1617 
1618     /* Restore all the clobbered memory. */
1619     memcpy(out_buf + i, in_buf + i, last_len);
1620 
1621   }
1622 
1623   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1624 
1625   afl->stage_finds[STAGE_EXTRAS_UO] += new_hit_cnt - orig_hit_cnt;
1626   afl->stage_cycles[STAGE_EXTRAS_UO] += afl->stage_max;
1627 #ifdef INTROSPECTION
1628   afl->queue_cur->stats_mutated += afl->stage_max;
1629 #endif
1630 
1631   /* Insertion of user-supplied extras. */
1632 
1633   afl->stage_name = "user extras (insert)";
1634   afl->stage_short = "ext_UI";
1635   afl->stage_cur = 0;
1636   afl->stage_max = afl->extras_cnt * (len + 1);
1637 
1638   orig_hit_cnt = new_hit_cnt;
1639 
1640   ex_tmp = afl_realloc(AFL_BUF_PARAM(ex), len + MAX_DICT_FILE);
1641   if (unlikely(!ex_tmp)) { PFATAL("alloc"); }
1642 
1643   for (i = 0; i <= (u32)len; ++i) {
1644 
1645     if (!skip_eff_map[i % len]) continue;
1646 
1647     if (is_det_timeout(before_det_time, 0)) { goto custom_mutator_stage; }
1648 
1649     afl->stage_cur_byte = i;
1650 
1651     for (j = 0; j < afl->extras_cnt; ++j) {
1652 
1653       if (len + afl->extras[j].len > MAX_FILE) {
1654 
1655         --afl->stage_max;
1656         continue;
1657 
1658       }
1659 
1660       /* Insert token */
1661       memcpy(ex_tmp + i, afl->extras[j].data, afl->extras[j].len);
1662 
1663       /* Copy tail */
1664       memcpy(ex_tmp + i + afl->extras[j].len, out_buf + i, len - i);
1665 
1666 #ifdef INTROSPECTION
1667       snprintf(afl->mutation, sizeof(afl->mutation), "%s EXTRAS_insert-%u-%u",
1668                afl->queue_cur->fname, i, j);
1669 #endif
1670 
1671       if (common_fuzz_stuff(afl, ex_tmp, len + afl->extras[j].len)) {
1672 
1673         goto abandon_entry;
1674 
1675       }
1676 
1677       ++afl->stage_cur;
1678 
1679     }
1680 
1681     /* Copy head */
1682     ex_tmp[i] = out_buf[i];
1683 
1684   }
1685 
1686   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1687 
1688   afl->stage_finds[STAGE_EXTRAS_UI] += new_hit_cnt - orig_hit_cnt;
1689   afl->stage_cycles[STAGE_EXTRAS_UI] += afl->stage_max;
1690 #ifdef INTROSPECTION
1691   afl->queue_cur->stats_mutated += afl->stage_max;
1692 #endif
1693 
1694 skip_user_extras:
1695 
1696   if (!afl->a_extras_cnt) { goto skip_extras; }
1697 
1698   afl->stage_name = "auto extras (over)";
1699   afl->stage_short = "ext_AO";
1700   afl->stage_cur = 0;
1701   afl->stage_max = MIN(afl->a_extras_cnt, (u32)USE_AUTO_EXTRAS) * len;
1702 
1703   afl->stage_val_type = STAGE_VAL_NONE;
1704 
1705   orig_hit_cnt = new_hit_cnt;
1706 
1707   for (i = 0; i < (u32)len; ++i) {
1708 
1709     u32 last_len = 0;
1710 
1711     if (!skip_eff_map[i]) continue;
1712 
1713     if (is_det_timeout(before_det_time, 0)) { goto custom_mutator_stage; }
1714 
1715     afl->stage_cur_byte = i;
1716 
1717     u32 min_extra_len = MIN(afl->a_extras_cnt, (u32)USE_AUTO_EXTRAS);
1718     for (j = 0; j < min_extra_len; ++j) {
1719 
1720       /* See the comment in the earlier code; extras are sorted by size. */
1721 
1722       if (afl->a_extras[j].len > len - i ||
1723           !memcmp(afl->a_extras[j].data, out_buf + i, afl->a_extras[j].len)) {
1724 
1725         --afl->stage_max;
1726         continue;
1727 
1728       }
1729 
1730       last_len = afl->a_extras[j].len;
1731       memcpy(out_buf + i, afl->a_extras[j].data, last_len);
1732 
1733 #ifdef INTROSPECTION
1734       snprintf(afl->mutation, sizeof(afl->mutation),
1735                "%s AUTO_EXTRAS_overwrite-%u-%u", afl->queue_cur->fname, i, j);
1736 #endif
1737 
1738       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1739 
1740       ++afl->stage_cur;
1741 
1742     }
1743 
1744     /* Restore all the clobbered memory. */
1745     memcpy(out_buf + i, in_buf + i, last_len);
1746 
1747   }
1748 
1749   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1750 
1751   afl->stage_finds[STAGE_EXTRAS_AO] += new_hit_cnt - orig_hit_cnt;
1752   afl->stage_cycles[STAGE_EXTRAS_AO] += afl->stage_max;
1753 #ifdef INTROSPECTION
1754   afl->queue_cur->stats_mutated += afl->stage_max;
1755 #endif
1756 
1757   /* Insertion of auto extras. */
1758 
1759   afl->stage_name = "auto extras (insert)";
1760   afl->stage_short = "ext_AI";
1761   afl->stage_cur = 0;
1762   afl->stage_max = afl->a_extras_cnt * (len + 1);
1763 
1764   orig_hit_cnt = new_hit_cnt;
1765 
1766   ex_tmp = afl_realloc(AFL_BUF_PARAM(ex), len + MAX_DICT_FILE);
1767   if (unlikely(!ex_tmp)) { PFATAL("alloc"); }
1768 
1769   for (i = 0; i <= (u32)len; ++i) {
1770 
1771     if (!skip_eff_map[i % len]) continue;
1772 
1773     if (is_det_timeout(before_det_time, 0)) { goto custom_mutator_stage; }
1774 
1775     afl->stage_cur_byte = i;
1776 
1777     for (j = 0; j < afl->a_extras_cnt; ++j) {
1778 
1779       if (len + afl->a_extras[j].len > MAX_FILE) {
1780 
1781         --afl->stage_max;
1782         continue;
1783 
1784       }
1785 
1786       /* Insert token */
1787       memcpy(ex_tmp + i, afl->a_extras[j].data, afl->a_extras[j].len);
1788 
1789       /* Copy tail */
1790       memcpy(ex_tmp + i + afl->a_extras[j].len, out_buf + i, len - i);
1791 
1792 #ifdef INTROSPECTION
1793       snprintf(afl->mutation, sizeof(afl->mutation),
1794                "%s AUTO_EXTRAS_insert-%u-%u", afl->queue_cur->fname, i, j);
1795 #endif
1796 
1797       if (common_fuzz_stuff(afl, ex_tmp, len + afl->a_extras[j].len)) {
1798 
1799         goto abandon_entry;
1800 
1801       }
1802 
1803       ++afl->stage_cur;
1804 
1805     }
1806 
1807     /* Copy head */
1808     ex_tmp[i] = out_buf[i];
1809 
1810   }
1811 
1812   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1813 
1814   afl->stage_finds[STAGE_EXTRAS_AI] += new_hit_cnt - orig_hit_cnt;
1815   afl->stage_cycles[STAGE_EXTRAS_AI] += afl->stage_max;
1816 #ifdef INTROSPECTION
1817   afl->queue_cur->stats_mutated += afl->stage_max;
1818 #endif
1819 
1820 skip_extras:
1821 
1822   /* If we made this to here without jumping to havoc_stage or abandon_entry,
1823      we're properly done with deterministic steps and can mark it as such
1824      in the .state/ directory. */
1825 
1826   if (!afl->queue_cur->passed_det) { mark_as_det_done(afl, afl->queue_cur); }
1827 
1828 custom_mutator_stage:
1829   /*******************
1830    * CUSTOM MUTATORS *
1831    *******************/
1832 
1833   if (likely(!afl->custom_mutators_count)) { goto havoc_stage; }
1834 
1835   afl->stage_name = "custom mutator";
1836   afl->stage_short = "custom";
1837   afl->stage_cur = 0;
1838   afl->stage_val_type = STAGE_VAL_NONE;
1839   bool has_custom_fuzz = false;
1840   u32  shift = unlikely(afl->custom_only) ? 7 : 8;
1841   afl->stage_max = (HAVOC_CYCLES * perf_score / afl->havoc_div) >> shift;
1842 
1843   if (afl->stage_max < HAVOC_MIN) { afl->stage_max = HAVOC_MIN; }
1844 
1845   const u32 max_seed_size = MAX_FILE, saved_max = afl->stage_max;
1846 
1847   orig_hit_cnt = afl->queued_items + afl->saved_crashes;
1848 
1849 #ifdef INTROSPECTION
1850   afl->mutation[0] = 0;
1851 #endif
1852 
1853   LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, {
1854 
1855     if (el->afl_custom_fuzz) {
1856 
1857       havoc_queued = afl->queued_items;
1858 
1859       afl->current_custom_fuzz = el;
1860       afl->stage_name = el->name_short;
1861 
1862       if (el->afl_custom_fuzz_count) {
1863 
1864         afl->stage_max = el->afl_custom_fuzz_count(el->data, out_buf, len);
1865 
1866       } else {
1867 
1868         afl->stage_max = saved_max;
1869 
1870       }
1871 
1872       has_custom_fuzz = true;
1873 
1874       afl->stage_short = el->name_short;
1875 
1876       if (afl->stage_max) {
1877 
1878         for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max;
1879              ++afl->stage_cur) {
1880 
1881           struct queue_entry *target = NULL;
1882           u32                 tid;
1883           u8                 *new_buf = NULL;
1884           u32                 target_len = 0;
1885 
1886           /* check if splicing makes sense yet (enough entries) */
1887           if (likely(!afl->custom_splice_optout &&
1888                      afl->ready_for_splicing_count > 1)) {
1889 
1890             /* Pick a random other queue entry for passing to external API
1891                that has the necessary length */
1892 
1893             do {
1894 
1895               tid = rand_below(afl, afl->queued_items);
1896 
1897             } while (unlikely(tid == afl->current_entry ||
1898 
1899                               afl->queue_buf[tid]->len < 4));
1900 
1901             target = afl->queue_buf[tid];
1902             afl->splicing_with = tid;
1903 
1904             /* Read the additional testcase into a new buffer. */
1905             new_buf = queue_testcase_get(afl, target);
1906             target_len = target->len;
1907 
1908           }
1909 
1910           u8 *mutated_buf = NULL;
1911 
1912           size_t mutated_size =
1913               el->afl_custom_fuzz(el->data, out_buf, len, &mutated_buf, new_buf,
1914                                   target_len, max_seed_size);
1915 
1916           if (unlikely(!mutated_buf)) {
1917 
1918             // FATAL("Error in custom_fuzz. Size returned: %zu", mutated_size);
1919             break;
1920 
1921           }
1922 
1923           if (mutated_size > 0) {
1924 
1925             if (common_fuzz_stuff(afl, mutated_buf, (u32)mutated_size)) {
1926 
1927               goto abandon_entry;
1928 
1929             }
1930 
1931             if (!el->afl_custom_fuzz_count) {
1932 
1933               /* If we're finding new stuff, let's run for a bit longer, limits
1934                 permitting. */
1935 
1936               if (afl->queued_items != havoc_queued) {
1937 
1938                 if (perf_score <= afl->havoc_max_mult * 100) {
1939 
1940                   afl->stage_max *= 2;
1941                   perf_score *= 2;
1942 
1943                 }
1944 
1945                 havoc_queued = afl->queued_items;
1946 
1947               }
1948 
1949             }
1950 
1951           }
1952 
1953           /* out_buf may have been changed by the call to custom_fuzz */
1954           memcpy(out_buf, in_buf, len);
1955 
1956         }
1957 
1958       }
1959 
1960     }
1961 
1962   });
1963 
1964   afl->current_custom_fuzz = NULL;
1965 
1966   if (!has_custom_fuzz) goto havoc_stage;
1967 
1968   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1969 
1970   afl->stage_finds[STAGE_CUSTOM_MUTATOR] += new_hit_cnt - orig_hit_cnt;
1971   afl->stage_cycles[STAGE_CUSTOM_MUTATOR] += afl->stage_cur;
1972 #ifdef INTROSPECTION
1973   afl->queue_cur->stats_mutated += afl->stage_max;
1974 #endif
1975 
1976   /****************
1977    * RANDOM HAVOC *
1978    ****************/
1979 
1980 havoc_stage:
1981 
1982 #ifdef INTROSPECTION
1983 
1984   if (!is_logged) {
1985 
1986     is_logged = 1;
1987     before_havoc_findings = afl->queued_items;
1988     before_havoc_edges = count_non_255_bytes(afl, afl->virgin_bits);
1989     before_havoc_time = get_cur_time();
1990 
1991   }
1992 
1993 #endif
1994 
1995   if (unlikely(afl->custom_only)) {
1996 
1997     /* Force UI update */
1998     show_stats(afl);
1999     /* Skip other stages */
2000     ret_val = 0;
2001     goto abandon_entry;
2002 
2003   }
2004 
2005   afl->stage_cur_byte = -1;
2006 
2007   /* The havoc stage mutation code is also invoked when splicing files; if the
2008      splice_cycle variable is set, generate different descriptions and such. */
2009 
2010   if (!splice_cycle) {
2011 
2012     afl->stage_name = "havoc";
2013     afl->stage_short = "havoc";
2014     afl->stage_max = ((doing_det ? HAVOC_CYCLES_INIT : HAVOC_CYCLES) *
2015                       perf_score / afl->havoc_div) >>
2016                      8;
2017 
2018   } else {
2019 
2020     perf_score = orig_perf;
2021 
2022     snprintf(afl->stage_name_buf, STAGE_BUF_SIZE, "splice %u", splice_cycle);
2023     afl->stage_name = afl->stage_name_buf;
2024     afl->stage_short = "splice";
2025     afl->stage_max = (SPLICE_HAVOC * perf_score / afl->havoc_div) >> 8;
2026 
2027   }
2028 
2029   if (unlikely(afl->stage_max < HAVOC_MIN)) { afl->stage_max = HAVOC_MIN; }
2030 
2031   temp_len = len;
2032 
2033   orig_hit_cnt = afl->queued_items + afl->saved_crashes;
2034 
2035   havoc_queued = afl->queued_items;
2036 
2037   if (afl->custom_mutators_count) {
2038 
2039     LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, {
2040 
2041       if (el->stacked_custom && el->afl_custom_havoc_mutation_probability) {
2042 
2043         el->stacked_custom_prob =
2044             el->afl_custom_havoc_mutation_probability(el->data);
2045         if (el->stacked_custom_prob > 100) {
2046 
2047           FATAL(
2048               "The probability returned by "
2049               "afl_custom_havoc_mutation_propability "
2050               "has to be in the range 0-100.");
2051 
2052         }
2053 
2054       }
2055 
2056     });
2057 
2058   }
2059 
2060   /* We essentially just do several thousand runs (depending on perf_score)
2061      where we take the input file and make random stacked tweaks. */
2062 
2063   u32 *mutation_array;
2064   u32  stack_max, rand_max;  // stack_max_pow = afl->havoc_stack_pow2;
2065 
2066   switch (afl->input_mode) {
2067 
2068     case 1: {  // TEXT
2069 
2070       if (likely(afl->fuzz_mode == 0)) {  // is exploration?
2071         mutation_array = (unsigned int *)&binary_array;
2072         rand_max = MUT_BIN_ARRAY_SIZE;
2073 
2074       } else {  // exploitation mode
2075 
2076         mutation_array = (unsigned int *)&text_array;
2077         rand_max = MUT_TXT_ARRAY_SIZE;
2078 
2079       }
2080 
2081       break;
2082 
2083     }
2084 
2085     case 2: {  // BINARY
2086 
2087       if (likely(afl->fuzz_mode == 0)) {  // is exploration?
2088         mutation_array = (unsigned int *)&mutation_strategy_exploration_binary;
2089         rand_max = MUT_STRATEGY_ARRAY_SIZE;
2090 
2091       } else {  // exploitation mode
2092 
2093         mutation_array = (unsigned int *)&mutation_strategy_exploitation_binary;
2094         rand_max = MUT_STRATEGY_ARRAY_SIZE;
2095         // or this one? we do not have enough binary bug benchmarks :-(
2096         // mutation_array = (unsigned int *)&binary_array;
2097         // rand_max = MUT_BIN_ARRAY_SIZE;
2098 
2099       }
2100 
2101       break;
2102 
2103     }
2104 
2105     default: {  // DEFAULT/GENERIC
2106 
2107       if (likely(afl->fuzz_mode == 0)) {  // is exploration?
2108         mutation_array = (unsigned int *)&binary_array;
2109         rand_max = MUT_BIN_ARRAY_SIZE;
2110 
2111       } else {  // exploitation mode
2112 
2113         mutation_array = (unsigned int *)&text_array;
2114         rand_max = MUT_TXT_ARRAY_SIZE;
2115 
2116       }
2117 
2118       break;
2119 
2120     }
2121 
2122   }
2123 
2124   /*
2125   if (temp_len < 64) {
2126 
2127     --stack_max_pow;
2128 
2129   } else if (temp_len <= 8096) {
2130 
2131     ++stack_max_pow;
2132 
2133   } else {
2134 
2135     ++stack_max_pow;
2136 
2137   }
2138 
2139   */
2140 
2141   stack_max = 1 << (1 + rand_below(afl, afl->havoc_stack_pow2));
2142 
2143   // + (afl->extras_cnt ? 2 : 0) + (afl->a_extras_cnt ? 2 : 0);
2144 
2145   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
2146 
2147     u32 use_stacking = 1 + rand_below(afl, stack_max);
2148 
2149     afl->stage_cur_val = use_stacking;
2150 
2151 #ifdef INTROSPECTION
2152     snprintf(afl->mutation, sizeof(afl->mutation), "%s HAVOC-%u-%u",
2153              afl->queue_cur->fname, afl->queue_cur->is_ascii, use_stacking);
2154 #endif
2155 
2156     for (i = 0; i < use_stacking; ++i) {
2157 
2158       if (afl->custom_mutators_count) {
2159 
2160         LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, {
2161 
2162           if (unlikely(el->stacked_custom &&
2163                        rand_below(afl, 100) < el->stacked_custom_prob)) {
2164 
2165             u8    *custom_havoc_buf = NULL;
2166             size_t new_len = el->afl_custom_havoc_mutation(
2167                 el->data, out_buf, temp_len, &custom_havoc_buf, MAX_FILE);
2168             if (unlikely(!custom_havoc_buf)) {
2169 
2170               FATAL("Error in custom_havoc (return %zu)", new_len);
2171 
2172             }
2173 
2174             if (likely(new_len > 0 && custom_havoc_buf)) {
2175 
2176               temp_len = new_len;
2177               if (out_buf != custom_havoc_buf) {
2178 
2179                 out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len);
2180                 if (unlikely(!afl->out_buf)) { PFATAL("alloc"); }
2181                 memcpy(out_buf, custom_havoc_buf, temp_len);
2182 
2183               }
2184 
2185             }
2186 
2187           }
2188 
2189         });
2190 
2191       }
2192 
2193     retry_havoc_step: {
2194 
2195       u32 r = rand_below(afl, rand_max), item;
2196 
2197       switch (mutation_array[r]) {
2198 
2199         case MUT_FLIPBIT: {
2200 
2201           /* Flip a single bit somewhere. Spooky! */
2202           u8  bit = rand_below(afl, 8);
2203           u32 off = rand_below(afl, temp_len);
2204           out_buf[off] ^= 1 << bit;
2205 
2206 #ifdef INTROSPECTION
2207           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP-BIT_%u", bit);
2208           strcat(afl->mutation, afl->m_tmp);
2209 #endif
2210           break;
2211 
2212         }
2213 
2214         case MUT_INTERESTING8: {
2215 
2216           /* Set byte to interesting value. */
2217 
2218           item = rand_below(afl, sizeof(interesting_8));
2219 #ifdef INTROSPECTION
2220           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING8_%u", item);
2221           strcat(afl->mutation, afl->m_tmp);
2222 #endif
2223           out_buf[rand_below(afl, temp_len)] = interesting_8[item];
2224           break;
2225 
2226         }
2227 
2228         case MUT_INTERESTING16: {
2229 
2230           /* Set word to interesting value, little endian. */
2231 
2232           if (unlikely(temp_len < 2)) { break; }  // no retry
2233 
2234           item = rand_below(afl, sizeof(interesting_16) >> 1);
2235 #ifdef INTROSPECTION
2236           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING16_%u", item);
2237           strcat(afl->mutation, afl->m_tmp);
2238 #endif
2239 
2240           *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) =
2241               interesting_16[item];
2242 
2243           break;
2244 
2245         }
2246 
2247         case MUT_INTERESTING16BE: {
2248 
2249           /* Set word to interesting value, big endian. */
2250 
2251           if (unlikely(temp_len < 2)) { break; }  // no retry
2252 
2253           item = rand_below(afl, sizeof(interesting_16) >> 1);
2254 #ifdef INTROSPECTION
2255           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING16BE_%u", item);
2256           strcat(afl->mutation, afl->m_tmp);
2257 #endif
2258           *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) =
2259               SWAP16(interesting_16[item]);
2260 
2261           break;
2262 
2263         }
2264 
2265         case MUT_INTERESTING32: {
2266 
2267           /* Set dword to interesting value, little endian. */
2268 
2269           if (unlikely(temp_len < 4)) { break; }  // no retry
2270 
2271           item = rand_below(afl, sizeof(interesting_32) >> 2);
2272 #ifdef INTROSPECTION
2273           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING32_%u", item);
2274           strcat(afl->mutation, afl->m_tmp);
2275 #endif
2276 
2277           *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) =
2278               interesting_32[item];
2279 
2280           break;
2281 
2282         }
2283 
2284         case MUT_INTERESTING32BE: {
2285 
2286           /* Set dword to interesting value, big endian. */
2287 
2288           if (unlikely(temp_len < 4)) { break; }  // no retry
2289 
2290           item = rand_below(afl, sizeof(interesting_32) >> 2);
2291 #ifdef INTROSPECTION
2292           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING32BE_%u", item);
2293           strcat(afl->mutation, afl->m_tmp);
2294 #endif
2295           *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) =
2296               SWAP32(interesting_32[item]);
2297 
2298           break;
2299 
2300         }
2301 
2302         case MUT_ARITH8_: {
2303 
2304           /* Randomly subtract from byte. */
2305 
2306           item = 1 + rand_below(afl, ARITH_MAX);
2307 #ifdef INTROSPECTION
2308           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH8-_%u", item);
2309           strcat(afl->mutation, afl->m_tmp);
2310 #endif
2311           out_buf[rand_below(afl, temp_len)] -= item;
2312           break;
2313 
2314         }
2315 
2316         case MUT_ARITH8: {
2317 
2318           /* Randomly add to byte. */
2319 
2320           item = 1 + rand_below(afl, ARITH_MAX);
2321 #ifdef INTROSPECTION
2322           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH8+_%u", item);
2323           strcat(afl->mutation, afl->m_tmp);
2324 #endif
2325           out_buf[rand_below(afl, temp_len)] += item;
2326           break;
2327 
2328         }
2329 
2330         case MUT_ARITH16_: {
2331 
2332           /* Randomly subtract from word, little endian. */
2333 
2334           if (unlikely(temp_len < 2)) { break; }  // no retry
2335 
2336           u32 pos = rand_below(afl, temp_len - 1);
2337           item = 1 + rand_below(afl, ARITH_MAX);
2338 
2339 #ifdef INTROSPECTION
2340           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16-_%u", item);
2341           strcat(afl->mutation, afl->m_tmp);
2342 #endif
2343           *(u16 *)(out_buf + pos) -= item;
2344 
2345           break;
2346 
2347         }
2348 
2349         case MUT_ARITH16BE_: {
2350 
2351           /* Randomly subtract from word, big endian. */
2352 
2353           if (unlikely(temp_len < 2)) { break; }  // no retry
2354 
2355           u32 pos = rand_below(afl, temp_len - 1);
2356           u16 num = 1 + rand_below(afl, ARITH_MAX);
2357 
2358 #ifdef INTROSPECTION
2359           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16BE-_%u", num);
2360           strcat(afl->mutation, afl->m_tmp);
2361 #endif
2362           *(u16 *)(out_buf + pos) =
2363               SWAP16(SWAP16(*(u16 *)(out_buf + pos)) - num);
2364 
2365           break;
2366 
2367         }
2368 
2369         case MUT_ARITH16: {
2370 
2371           /* Randomly add to word, little endian. */
2372 
2373           if (unlikely(temp_len < 2)) { break; }  // no retry
2374 
2375           u32 pos = rand_below(afl, temp_len - 1);
2376           item = 1 + rand_below(afl, ARITH_MAX);
2377 
2378 #ifdef INTROSPECTION
2379           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16+_%u", item);
2380           strcat(afl->mutation, afl->m_tmp);
2381 #endif
2382           *(u16 *)(out_buf + pos) += item;
2383 
2384           break;
2385 
2386         }
2387 
2388         case MUT_ARITH16BE: {
2389 
2390           /* Randomly add to word, big endian. */
2391 
2392           if (unlikely(temp_len < 2)) { break; }  // no retry
2393 
2394           u32 pos = rand_below(afl, temp_len - 1);
2395           u16 num = 1 + rand_below(afl, ARITH_MAX);
2396 
2397 #ifdef INTROSPECTION
2398           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16BE+__%u", num);
2399           strcat(afl->mutation, afl->m_tmp);
2400 #endif
2401           *(u16 *)(out_buf + pos) =
2402               SWAP16(SWAP16(*(u16 *)(out_buf + pos)) + num);
2403 
2404           break;
2405 
2406         }
2407 
2408         case MUT_ARITH32_: {
2409 
2410           /* Randomly subtract from dword, little endian. */
2411 
2412           if (unlikely(temp_len < 4)) { break; }  // no retry
2413 
2414           u32 pos = rand_below(afl, temp_len - 3);
2415           item = 1 + rand_below(afl, ARITH_MAX);
2416 
2417 #ifdef INTROSPECTION
2418           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32-_%u", item);
2419           strcat(afl->mutation, afl->m_tmp);
2420 #endif
2421           *(u32 *)(out_buf + pos) -= item;
2422 
2423           break;
2424 
2425         }
2426 
2427         case MUT_ARITH32BE_: {
2428 
2429           /* Randomly subtract from dword, big endian. */
2430 
2431           if (unlikely(temp_len < 4)) { break; }  // no retry
2432 
2433           u32 pos = rand_below(afl, temp_len - 3);
2434           u32 num = 1 + rand_below(afl, ARITH_MAX);
2435 
2436 #ifdef INTROSPECTION
2437           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32BE-_%u", num);
2438           strcat(afl->mutation, afl->m_tmp);
2439 #endif
2440           *(u32 *)(out_buf + pos) =
2441               SWAP32(SWAP32(*(u32 *)(out_buf + pos)) - num);
2442 
2443           break;
2444 
2445         }
2446 
2447         case MUT_ARITH32: {
2448 
2449           /* Randomly add to dword, little endian. */
2450 
2451           if (unlikely(temp_len < 4)) { break; }  // no retry
2452 
2453           u32 pos = rand_below(afl, temp_len - 3);
2454           item = 1 + rand_below(afl, ARITH_MAX);
2455 
2456 #ifdef INTROSPECTION
2457           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32+_%u", item);
2458           strcat(afl->mutation, afl->m_tmp);
2459 #endif
2460           *(u32 *)(out_buf + pos) += item;
2461 
2462           break;
2463 
2464         }
2465 
2466         case MUT_ARITH32BE: {
2467 
2468           /* Randomly add to dword, big endian. */
2469 
2470           if (unlikely(temp_len < 4)) { break; }  // no retry
2471 
2472           u32 pos = rand_below(afl, temp_len - 3);
2473           u32 num = 1 + rand_below(afl, ARITH_MAX);
2474 
2475 #ifdef INTROSPECTION
2476           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32BE+_%u", num);
2477           strcat(afl->mutation, afl->m_tmp);
2478 #endif
2479           *(u32 *)(out_buf + pos) =
2480               SWAP32(SWAP32(*(u32 *)(out_buf + pos)) + num);
2481 
2482           break;
2483 
2484         }
2485 
2486         case MUT_RAND8: {
2487 
2488           /* Just set a random byte to a random value. Because,
2489              why not. We use XOR with 1-255 to eliminate the
2490              possibility of a no-op. */
2491 
2492           u32 pos = rand_below(afl, temp_len);
2493           item = 1 + rand_below(afl, 255);
2494 #ifdef INTROSPECTION
2495           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " RAND8_%u",
2496                    out_buf[pos] ^ item);
2497           strcat(afl->mutation, afl->m_tmp);
2498 #endif
2499           out_buf[pos] ^= item;
2500           break;
2501 
2502         }
2503 
2504         case MUT_CLONE_COPY: {
2505 
2506           if (likely(temp_len + HAVOC_BLK_XL < MAX_FILE)) {
2507 
2508             /* Clone bytes. */
2509 
2510             u32 clone_len = choose_block_len(afl, temp_len);
2511             u32 clone_from = rand_below(afl, temp_len - clone_len + 1);
2512             u32 clone_to = rand_below(afl, temp_len);
2513 
2514 #ifdef INTROSPECTION
2515             snprintf(afl->m_tmp, sizeof(afl->m_tmp), " CLONE-%s_%u_%u_%u",
2516                      "COPY", clone_from, clone_to, clone_len);
2517             strcat(afl->mutation, afl->m_tmp);
2518 #endif
2519             u8 *new_buf =
2520                 afl_realloc(AFL_BUF_PARAM(out_scratch), temp_len + clone_len);
2521             if (unlikely(!new_buf)) { PFATAL("alloc"); }
2522 
2523             /* Head */
2524 
2525             memcpy(new_buf, out_buf, clone_to);
2526 
2527             /* Inserted part */
2528 
2529             memcpy(new_buf + clone_to, out_buf + clone_from, clone_len);
2530 
2531             /* Tail */
2532             memcpy(new_buf + clone_to + clone_len, out_buf + clone_to,
2533                    temp_len - clone_to);
2534 
2535             out_buf = new_buf;
2536             afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
2537             temp_len += clone_len;
2538 
2539           } else if (unlikely(temp_len < 8)) {
2540 
2541             break;
2542 
2543           } else {
2544 
2545             goto retry_havoc_step;
2546 
2547           }
2548 
2549           break;
2550 
2551         }
2552 
2553         case MUT_CLONE_FIXED: {
2554 
2555           if (likely(temp_len + HAVOC_BLK_XL < MAX_FILE)) {
2556 
2557             /* Insert a block of constant bytes (25%). */
2558 
2559             u32 clone_len = choose_block_len(afl, HAVOC_BLK_XL);
2560             u32 clone_to = rand_below(afl, temp_len);
2561             u32 strat = rand_below(afl, 2);
2562             u32 clone_from = clone_to ? clone_to - 1 : 0;
2563             item = strat ? rand_below(afl, 256) : out_buf[clone_from];
2564 
2565 #ifdef INTROSPECTION
2566             snprintf(afl->m_tmp, sizeof(afl->m_tmp), " CLONE-%s_%u_%u_%u",
2567                      "FIXED", strat, clone_to, clone_len);
2568             strcat(afl->mutation, afl->m_tmp);
2569 #endif
2570             u8 *new_buf =
2571                 afl_realloc(AFL_BUF_PARAM(out_scratch), temp_len + clone_len);
2572             if (unlikely(!new_buf)) { PFATAL("alloc"); }
2573 
2574             /* Head */
2575 
2576             memcpy(new_buf, out_buf, clone_to);
2577 
2578             /* Inserted part */
2579 
2580             memset(new_buf + clone_to, item, clone_len);
2581 
2582             /* Tail */
2583             memcpy(new_buf + clone_to + clone_len, out_buf + clone_to,
2584                    temp_len - clone_to);
2585 
2586             out_buf = new_buf;
2587             afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
2588             temp_len += clone_len;
2589 
2590           } else if (unlikely(temp_len < 8)) {
2591 
2592             break;
2593 
2594           } else {
2595 
2596             goto retry_havoc_step;
2597 
2598           }
2599 
2600           break;
2601 
2602         }
2603 
2604         case MUT_OVERWRITE_COPY: {
2605 
2606           /* Overwrite bytes with a randomly selected chunk bytes. */
2607 
2608           if (unlikely(temp_len < 2)) { break; }  // no retry
2609 
2610           u32 copy_from, copy_to,
2611               copy_len = choose_block_len(afl, temp_len - 1);
2612 
2613           do {
2614 
2615             copy_from = rand_below(afl, temp_len - copy_len + 1);
2616             copy_to = rand_below(afl, temp_len - copy_len + 1);
2617 
2618           } while (unlikely(copy_from == copy_to));
2619 
2620 #ifdef INTROSPECTION
2621           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " OVERWRITE-COPY_%u_%u_%u",
2622                    copy_from, copy_to, copy_len);
2623           strcat(afl->mutation, afl->m_tmp);
2624 #endif
2625           memmove(out_buf + copy_to, out_buf + copy_from, copy_len);
2626 
2627           break;
2628 
2629         }
2630 
2631         case MUT_OVERWRITE_FIXED: {
2632 
2633           /* Overwrite bytes with fixed bytes. */
2634 
2635           if (unlikely(temp_len < 2)) { break; }  // no retry
2636 
2637           u32 copy_len = choose_block_len(afl, temp_len - 1);
2638           u32 copy_to = rand_below(afl, temp_len - copy_len + 1);
2639           u32 strat = rand_below(afl, 2);
2640           u32 copy_from = copy_to ? copy_to - 1 : 0;
2641           item = strat ? rand_below(afl, 256) : out_buf[copy_from];
2642 
2643 #ifdef INTROSPECTION
2644           snprintf(afl->m_tmp, sizeof(afl->m_tmp),
2645                    " OVERWRITE-FIXED_%u_%u_%u-%u", strat, item, copy_to,
2646                    copy_len);
2647           strcat(afl->mutation, afl->m_tmp);
2648 #endif
2649           memset(out_buf + copy_to, item, copy_len);
2650 
2651           break;
2652 
2653         }
2654 
2655         case MUT_BYTEADD: {
2656 
2657           /* Increase byte by 1. */
2658 
2659 #ifdef INTROSPECTION
2660           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " BYTEADD_");
2661           strcat(afl->mutation, afl->m_tmp);
2662 #endif
2663           out_buf[rand_below(afl, temp_len)]++;
2664           break;
2665 
2666         }
2667 
2668         case MUT_BYTESUB: {
2669 
2670           /* Decrease byte by 1. */
2671 
2672 #ifdef INTROSPECTION
2673           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " BYTESUB_");
2674           strcat(afl->mutation, afl->m_tmp);
2675 #endif
2676           out_buf[rand_below(afl, temp_len)]--;
2677           break;
2678 
2679         }
2680 
2681         case MUT_FLIP8: {
2682 
2683           /* Flip byte with a XOR 0xff. This is the same as NEG. */
2684 
2685 #ifdef INTROSPECTION
2686           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP8_");
2687           strcat(afl->mutation, afl->m_tmp);
2688 #endif
2689           out_buf[rand_below(afl, temp_len)] ^= 0xff;
2690           break;
2691 
2692         }
2693 
2694         case MUT_SWITCH: {
2695 
2696           if (unlikely(temp_len < 4)) { break; }  // no retry
2697 
2698           /* Switch bytes. */
2699 
2700           u32 to_end, switch_to, switch_len, switch_from;
2701           switch_from = rand_below(afl, temp_len);
2702           do {
2703 
2704             switch_to = rand_below(afl, temp_len);
2705 
2706           } while (unlikely(switch_from == switch_to));
2707 
2708           if (switch_from < switch_to) {
2709 
2710             switch_len = switch_to - switch_from;
2711             to_end = temp_len - switch_to;
2712 
2713           } else {
2714 
2715             switch_len = switch_from - switch_to;
2716             to_end = temp_len - switch_from;
2717 
2718           }
2719 
2720           switch_len = choose_block_len(afl, MIN(switch_len, to_end));
2721 
2722 #ifdef INTROSPECTION
2723           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " SWITCH-%s_%u_%u_%u",
2724                    "switch", switch_from, switch_to, switch_len);
2725           strcat(afl->mutation, afl->m_tmp);
2726 #endif
2727           u8 *new_buf = afl_realloc(AFL_BUF_PARAM(out_scratch), switch_len);
2728           if (unlikely(!new_buf)) { PFATAL("alloc"); }
2729 
2730           /* Backup */
2731 
2732           memcpy(new_buf, out_buf + switch_from, switch_len);
2733 
2734           /* Switch 1 */
2735 
2736           memcpy(out_buf + switch_from, out_buf + switch_to, switch_len);
2737 
2738           /* Switch 2 */
2739 
2740           memcpy(out_buf + switch_to, new_buf, switch_len);
2741 
2742           break;
2743 
2744         }
2745 
2746         case MUT_DEL: {
2747 
2748           /* Delete bytes. */
2749 
2750           if (unlikely(temp_len < 2)) { break; }  // no retry
2751 
2752           /* Don't delete too much. */
2753 
2754           u32 del_len = choose_block_len(afl, temp_len - 1);
2755           u32 del_from = rand_below(afl, temp_len - del_len + 1);
2756 
2757 #ifdef INTROSPECTION
2758           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " DEL_%u_%u", del_from,
2759                    del_len);
2760           strcat(afl->mutation, afl->m_tmp);
2761 #endif
2762           memmove(out_buf + del_from, out_buf + del_from + del_len,
2763                   temp_len - del_from - del_len);
2764 
2765           temp_len -= del_len;
2766 
2767           break;
2768 
2769         }
2770 
2771         case MUT_SHUFFLE: {
2772 
2773           /* Shuffle bytes. */
2774 
2775           if (unlikely(temp_len < 4)) { break; }  // no retry
2776 
2777           u32 len = choose_block_len(afl, temp_len - 1);
2778           u32 off = rand_below(afl, temp_len - len + 1);
2779 
2780 #ifdef INTROSPECTION
2781           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " SHUFFLE_%u", len);
2782           strcat(afl->mutation, afl->m_tmp);
2783 #endif
2784 
2785           for (u32 i = len - 1; i > 0; i--) {
2786 
2787             u32 j;
2788             do {
2789 
2790               j = rand_below(afl, i + 1);
2791 
2792             } while (unlikely(i == j));
2793 
2794             unsigned char temp = out_buf[off + i];
2795             out_buf[off + i] = out_buf[off + j];
2796             out_buf[off + j] = temp;
2797 
2798           }
2799 
2800           break;
2801 
2802         }
2803 
2804         case MUT_DELONE: {
2805 
2806           /* Delete bytes. */
2807 
2808           if (unlikely(temp_len < 2)) { break; }  // no retry
2809 
2810           /* Don't delete too much. */
2811 
2812           u32 del_len = 1;
2813           u32 del_from = rand_below(afl, temp_len - del_len + 1);
2814 
2815 #ifdef INTROSPECTION
2816           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " DELONE_%u", del_from);
2817           strcat(afl->mutation, afl->m_tmp);
2818 #endif
2819           memmove(out_buf + del_from, out_buf + del_from + del_len,
2820                   temp_len - del_from - del_len);
2821 
2822           temp_len -= del_len;
2823 
2824           break;
2825 
2826         }
2827 
2828         case MUT_INSERTONE: {
2829 
2830           if (unlikely(temp_len < 2)) { break; }  // no retry
2831 
2832           u32 clone_len = 1;
2833           u32 clone_to = rand_below(afl, temp_len);
2834           u32 strat = rand_below(afl, 2);
2835           u32 clone_from = clone_to ? clone_to - 1 : 0;
2836           item = strat ? rand_below(afl, 256) : out_buf[clone_from];
2837 
2838 #ifdef INTROSPECTION
2839           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INSERTONE_%u_%u", strat,
2840                    clone_to);
2841           strcat(afl->mutation, afl->m_tmp);
2842 #endif
2843           u8 *new_buf =
2844               afl_realloc(AFL_BUF_PARAM(out_scratch), temp_len + clone_len);
2845           if (unlikely(!new_buf)) { PFATAL("alloc"); }
2846 
2847           /* Head */
2848 
2849           memcpy(new_buf, out_buf, clone_to);
2850 
2851           /* Inserted part */
2852 
2853           memset(new_buf + clone_to, item, clone_len);
2854 
2855           /* Tail */
2856           memcpy(new_buf + clone_to + clone_len, out_buf + clone_to,
2857                  temp_len - clone_to);
2858 
2859           out_buf = new_buf;
2860           afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
2861           temp_len += clone_len;
2862 
2863           break;
2864 
2865         }
2866 
2867         case MUT_ASCIINUM: {
2868 
2869           if (unlikely(temp_len < 4)) { break; }  // no retry
2870 
2871           u32 off = rand_below(afl, temp_len), off2 = off, cnt = 0;
2872 
2873           while (off2 + cnt < temp_len && !isdigit(out_buf[off2 + cnt])) {
2874 
2875             ++cnt;
2876 
2877           }
2878 
2879           // none found, wrap
2880           if (off2 + cnt == temp_len) {
2881 
2882             off2 = 0;
2883             cnt = 0;
2884 
2885             while (cnt < off && !isdigit(out_buf[off2 + cnt])) {
2886 
2887               ++cnt;
2888 
2889             }
2890 
2891             if (cnt == off) {
2892 
2893               if (temp_len < 8) {
2894 
2895                 break;
2896 
2897               } else {
2898 
2899                 goto retry_havoc_step;
2900 
2901               }
2902 
2903             }
2904 
2905           }
2906 
2907           off = off2 + cnt;
2908           off2 = off + 1;
2909 
2910           while (off2 < temp_len && isdigit(out_buf[off2])) {
2911 
2912             ++off2;
2913 
2914           }
2915 
2916           s64 val = out_buf[off] - '0';
2917           for (u32 i = off + 1; i < off2; ++i) {
2918 
2919             val = (val * 10) + out_buf[i] - '0';
2920 
2921           }
2922 
2923           if (off && out_buf[off - 1] == '-') { val = -val; }
2924 
2925           u32 strat = rand_below(afl, 8);
2926           switch (strat) {
2927 
2928             case 0:
2929               val++;
2930               break;
2931             case 1:
2932               val--;
2933               break;
2934             case 2:
2935               val *= 2;
2936               break;
2937             case 3:
2938               val /= 2;
2939               break;
2940             case 4:
2941               if (likely(val && (u64)val < 0x19999999)) {
2942 
2943                 val = (u64)rand_next(afl) % (u64)((u64)val * 10);
2944 
2945               } else {
2946 
2947                 val = rand_below(afl, 256);
2948 
2949               }
2950 
2951               break;
2952             case 5:
2953               val += rand_below(afl, 256);
2954               break;
2955             case 6:
2956               val -= rand_below(afl, 256);
2957               break;
2958             case 7:
2959               val = ~(val);
2960               break;
2961 
2962           }
2963 
2964 #ifdef INTROSPECTION
2965           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ASCIINUM_%u_%u_%u",
2966                    afl->queue_cur->is_ascii, strat, off);
2967           strcat(afl->mutation, afl->m_tmp);
2968 #endif
2969           // fprintf(stderr, "val: %u-%u = %ld\n", off, off2, val);
2970 
2971           char buf[20];
2972           snprintf(buf, sizeof(buf), "%" PRId64, val);
2973 
2974           // fprintf(stderr, "BEFORE: %s\n", out_buf);
2975 
2976           u32 old_len = off2 - off;
2977           u32 new_len = strlen(buf);
2978 
2979           if (old_len == new_len) {
2980 
2981             memcpy(out_buf + off, buf, new_len);
2982 
2983           } else {
2984 
2985             u8 *new_buf = afl_realloc(AFL_BUF_PARAM(out_scratch),
2986                                       temp_len + new_len - old_len);
2987             if (unlikely(!new_buf)) { PFATAL("alloc"); }
2988 
2989             /* Head */
2990 
2991             memcpy(new_buf, out_buf, off);
2992 
2993             /* Inserted part */
2994 
2995             memcpy(new_buf + off, buf, new_len);
2996 
2997             /* Tail */
2998             memcpy(new_buf + off + new_len, out_buf + off2, temp_len - off2);
2999 
3000             out_buf = new_buf;
3001             afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
3002             temp_len += (new_len - old_len);
3003 
3004           }
3005 
3006           // fprintf(stderr, "AFTER : %s\n", out_buf);
3007           break;
3008 
3009         }
3010 
3011         case MUT_INSERTASCIINUM: {
3012 
3013           u32 len = 1 + rand_below(afl, 8);
3014           u32 pos = rand_below(afl, temp_len);
3015           /* Insert ascii number. */
3016           if (unlikely(temp_len < pos + len)) {
3017 
3018             if (unlikely(temp_len < 8)) {
3019 
3020               break;
3021 
3022             } else {
3023 
3024               goto retry_havoc_step;
3025 
3026             }
3027 
3028           }
3029 
3030 #ifdef INTROSPECTION
3031           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INSERTASCIINUM_");
3032           strcat(afl->mutation, afl->m_tmp);
3033 #endif
3034           u64  val = rand_next(afl);
3035           char buf[20];
3036           snprintf(buf, sizeof(buf), "%llu", val);
3037           memcpy(out_buf + pos, buf, len);
3038 
3039           break;
3040 
3041         }
3042 
3043         case MUT_EXTRA_OVERWRITE: {
3044 
3045           if (unlikely(!afl->extras_cnt)) { goto retry_havoc_step; }
3046 
3047           /* Use the dictionary. */
3048 
3049           u32 use_extra = rand_below(afl, afl->extras_cnt);
3050           u32 extra_len = afl->extras[use_extra].len;
3051 
3052           if (unlikely(extra_len > temp_len)) { goto retry_havoc_step; }
3053 
3054           u32 insert_at = rand_below(afl, temp_len - extra_len + 1);
3055 #ifdef INTROSPECTION
3056           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " EXTRA-OVERWRITE_%u_%u",
3057                    insert_at, extra_len);
3058           strcat(afl->mutation, afl->m_tmp);
3059 #endif
3060           memcpy(out_buf + insert_at, afl->extras[use_extra].data, extra_len);
3061 
3062           break;
3063 
3064         }
3065 
3066         case MUT_EXTRA_INSERT: {
3067 
3068           if (unlikely(!afl->extras_cnt)) { goto retry_havoc_step; }
3069 
3070           u32 use_extra = rand_below(afl, afl->extras_cnt);
3071           u32 extra_len = afl->extras[use_extra].len;
3072           if (unlikely(temp_len + extra_len >= MAX_FILE)) {
3073 
3074             goto retry_havoc_step;
3075 
3076           }
3077 
3078           u8 *ptr = afl->extras[use_extra].data;
3079           u32 insert_at = rand_below(afl, temp_len + 1);
3080 #ifdef INTROSPECTION
3081           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " EXTRA-INSERT_%u_%u",
3082                    insert_at, extra_len);
3083           strcat(afl->mutation, afl->m_tmp);
3084 #endif
3085 
3086           out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len + extra_len);
3087           if (unlikely(!out_buf)) { PFATAL("alloc"); }
3088 
3089           /* Tail */
3090           memmove(out_buf + insert_at + extra_len, out_buf + insert_at,
3091                   temp_len - insert_at);
3092 
3093           /* Inserted part */
3094           memcpy(out_buf + insert_at, ptr, extra_len);
3095           temp_len += extra_len;
3096 
3097           break;
3098 
3099         }
3100 
3101         case MUT_AUTO_EXTRA_OVERWRITE: {
3102 
3103           if (unlikely(!afl->a_extras_cnt)) { goto retry_havoc_step; }
3104 
3105           /* Use the dictionary. */
3106 
3107           u32 use_extra = rand_below(afl, afl->a_extras_cnt);
3108           u32 extra_len = afl->a_extras[use_extra].len;
3109 
3110           if (unlikely(extra_len > temp_len)) { goto retry_havoc_step; }
3111 
3112           u32 insert_at = rand_below(afl, temp_len - extra_len + 1);
3113 #ifdef INTROSPECTION
3114           snprintf(afl->m_tmp, sizeof(afl->m_tmp),
3115                    " AUTO-EXTRA-OVERWRITE_%u_%u", insert_at, extra_len);
3116           strcat(afl->mutation, afl->m_tmp);
3117 #endif
3118           memcpy(out_buf + insert_at, afl->a_extras[use_extra].data, extra_len);
3119 
3120           break;
3121 
3122         }
3123 
3124         case MUT_AUTO_EXTRA_INSERT: {
3125 
3126           if (unlikely(!afl->a_extras_cnt)) { goto retry_havoc_step; }
3127 
3128           u32 use_extra = rand_below(afl, afl->a_extras_cnt);
3129           u32 extra_len = afl->a_extras[use_extra].len;
3130           if (unlikely(temp_len + extra_len >= MAX_FILE)) {
3131 
3132             goto retry_havoc_step;
3133 
3134           }
3135 
3136           u8 *ptr = afl->a_extras[use_extra].data;
3137           u32 insert_at = rand_below(afl, temp_len + 1);
3138 #ifdef INTROSPECTION
3139           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " AUTO-EXTRA-INSERT_%u_%u",
3140                    insert_at, extra_len);
3141           strcat(afl->mutation, afl->m_tmp);
3142 #endif
3143 
3144           out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len + extra_len);
3145           if (unlikely(!out_buf)) { PFATAL("alloc"); }
3146 
3147           /* Tail */
3148           memmove(out_buf + insert_at + extra_len, out_buf + insert_at,
3149                   temp_len - insert_at);
3150 
3151           /* Inserted part */
3152           memcpy(out_buf + insert_at, ptr, extra_len);
3153           temp_len += extra_len;
3154 
3155           break;
3156 
3157         }
3158 
3159         case MUT_SPLICE_OVERWRITE: {
3160 
3161           if (unlikely(afl->ready_for_splicing_count <= 1)) {
3162 
3163             goto retry_havoc_step;
3164 
3165           }
3166 
3167           /* Pick a random queue entry and seek to it. */
3168 
3169           u32 tid;
3170           do {
3171 
3172             tid = rand_below(afl, afl->queued_items);
3173 
3174           } while (unlikely(tid == afl->current_entry ||
3175 
3176                             afl->queue_buf[tid]->len < 4));
3177 
3178           /* Get the testcase for splicing. */
3179           struct queue_entry *target = afl->queue_buf[tid];
3180           u32                 new_len = target->len;
3181           u8                 *new_buf = queue_testcase_get(afl, target);
3182 
3183           /* overwrite mode */
3184 
3185           u32 copy_from, copy_to, copy_len;
3186 
3187           copy_len = choose_block_len(afl, new_len - 1);
3188           if (copy_len > temp_len) copy_len = temp_len;
3189 
3190           copy_from = rand_below(afl, new_len - copy_len + 1);
3191           copy_to = rand_below(afl, temp_len - copy_len + 1);
3192 
3193 #ifdef INTROSPECTION
3194           snprintf(afl->m_tmp, sizeof(afl->m_tmp),
3195                    " SPLICE-OVERWRITE_%u_%u_%u_%s", copy_from, copy_to,
3196                    copy_len, target->fname);
3197           strcat(afl->mutation, afl->m_tmp);
3198 #endif
3199           memmove(out_buf + copy_to, new_buf + copy_from, copy_len);
3200 
3201           break;
3202 
3203         }
3204 
3205         case MUT_SPLICE_INSERT: {
3206 
3207           if (unlikely(afl->ready_for_splicing_count <= 1)) {
3208 
3209             goto retry_havoc_step;
3210 
3211           }
3212 
3213           if (unlikely(temp_len + HAVOC_BLK_XL >= MAX_FILE)) {
3214 
3215             goto retry_havoc_step;
3216 
3217           }
3218 
3219           /* Pick a random queue entry and seek to it. */
3220 
3221           u32 tid;
3222           do {
3223 
3224             tid = rand_below(afl, afl->queued_items);
3225 
3226           } while (unlikely(tid == afl->current_entry ||
3227 
3228                             afl->queue_buf[tid]->len < 4));
3229 
3230           /* Get the testcase for splicing. */
3231           struct queue_entry *target = afl->queue_buf[tid];
3232           u32                 new_len = target->len;
3233           u8                 *new_buf = queue_testcase_get(afl, target);
3234 
3235           /* insert mode */
3236 
3237           u32 clone_from, clone_to, clone_len;
3238 
3239           clone_len = choose_block_len(afl, new_len);
3240           clone_from = rand_below(afl, new_len - clone_len + 1);
3241           clone_to = rand_below(afl, temp_len + 1);
3242 
3243           u8 *temp_buf =
3244               afl_realloc(AFL_BUF_PARAM(out_scratch), temp_len + clone_len + 1);
3245           if (unlikely(!temp_buf)) { PFATAL("alloc"); }
3246 
3247 #ifdef INTROSPECTION
3248           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " SPLICE-INSERT_%u_%u_%u_%s",
3249                    clone_from, clone_to, clone_len, target->fname);
3250           strcat(afl->mutation, afl->m_tmp);
3251 #endif
3252           /* Head */
3253 
3254           memcpy(temp_buf, out_buf, clone_to);
3255 
3256           /* Inserted part */
3257 
3258           memcpy(temp_buf + clone_to, new_buf + clone_from, clone_len);
3259 
3260           /* Tail */
3261           memcpy(temp_buf + clone_to + clone_len, out_buf + clone_to,
3262                  temp_len - clone_to);
3263 
3264           out_buf = temp_buf;
3265           afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
3266           temp_len += clone_len;
3267 
3268           break;
3269 
3270         }
3271 
3272       }
3273 
3274     }
3275 
3276     }
3277 
3278     if (common_fuzz_stuff(afl, out_buf, temp_len)) { goto abandon_entry; }
3279 
3280     /* out_buf might have been mangled a bit, so let's restore it to its
3281        original size and shape. */
3282 
3283     out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
3284     if (unlikely(!out_buf)) { PFATAL("alloc"); }
3285     temp_len = len;
3286     memcpy(out_buf, in_buf, len);
3287 
3288     /* If we're finding new stuff, let's run for a bit longer, limits
3289        permitting. */
3290 
3291     if (afl->queued_items != havoc_queued) {
3292 
3293       if (perf_score <= afl->havoc_max_mult * 100) {
3294 
3295         afl->stage_max *= 2;
3296         perf_score *= 2;
3297 
3298       }
3299 
3300       havoc_queued = afl->queued_items;
3301 
3302     }
3303 
3304   }
3305 
3306   new_hit_cnt = afl->queued_items + afl->saved_crashes;
3307 
3308   if (!splice_cycle) {
3309 
3310     afl->stage_finds[STAGE_HAVOC] += new_hit_cnt - orig_hit_cnt;
3311     afl->stage_cycles[STAGE_HAVOC] += afl->stage_max;
3312 #ifdef INTROSPECTION
3313     afl->queue_cur->stats_mutated += afl->stage_max;
3314 #endif
3315 
3316   } else {
3317 
3318     afl->stage_finds[STAGE_SPLICE] += new_hit_cnt - orig_hit_cnt;
3319     afl->stage_cycles[STAGE_SPLICE] += afl->stage_max;
3320 #ifdef INTROSPECTION
3321     afl->queue_cur->stats_mutated += afl->stage_max;
3322 #endif
3323 
3324   }
3325 
3326 #ifndef IGNORE_FINDS
3327 
3328   /************
3329    * SPLICING *
3330    ************/
3331 
3332   /* This is a last-resort strategy triggered by a full round with no findings.
3333      It takes the current input file, randomly selects another input, and
3334      splices them together at some offset, then relies on the havoc
3335      code to mutate that blob. */
3336 
3337 retry_splicing:
3338 
3339   if (afl->use_splicing && splice_cycle++ < SPLICE_CYCLES &&
3340       afl->ready_for_splicing_count > 1 && afl->queue_cur->len >= 4) {
3341 
3342     struct queue_entry *target;
3343     u32                 tid, split_at;
3344     u8                 *new_buf;
3345     s32                 f_diff, l_diff;
3346 
3347     /* First of all, if we've modified in_buf for havoc, let's clean that
3348        up... */
3349 
3350     if (in_buf != orig_in) {
3351 
3352       in_buf = orig_in;
3353       len = afl->queue_cur->len;
3354 
3355     }
3356 
3357     /* Pick a random queue entry and seek to it. Don't splice with yourself. */
3358 
3359     do {
3360 
3361       tid = rand_below(afl, afl->queued_items);
3362 
3363     } while (
3364 
3365         unlikely(tid == afl->current_entry || afl->queue_buf[tid]->len < 4));
3366 
3367     /* Get the testcase */
3368     afl->splicing_with = tid;
3369     target = afl->queue_buf[tid];
3370     new_buf = queue_testcase_get(afl, target);
3371 
3372     /* Find a suitable splicing location, somewhere between the first and
3373        the last differing byte. Bail out if the difference is just a single
3374        byte or so. */
3375 
3376     locate_diffs(in_buf, new_buf, MIN(len, (s64)target->len), &f_diff, &l_diff);
3377 
3378     if (f_diff < 0 || l_diff < 2 || f_diff == l_diff) { goto retry_splicing; }
3379 
3380     /* Split somewhere between the first and last differing byte. */
3381 
3382     split_at = f_diff + rand_below(afl, l_diff - f_diff);
3383 
3384     /* Do the thing. */
3385 
3386     len = target->len;
3387     afl->in_scratch_buf = afl_realloc(AFL_BUF_PARAM(in_scratch), len);
3388     memcpy(afl->in_scratch_buf, in_buf, split_at);
3389     memcpy(afl->in_scratch_buf + split_at, new_buf, len - split_at);
3390     in_buf = afl->in_scratch_buf;
3391     afl_swap_bufs(AFL_BUF_PARAM(in), AFL_BUF_PARAM(in_scratch));
3392 
3393     out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
3394     if (unlikely(!out_buf)) { PFATAL("alloc"); }
3395     memcpy(out_buf, in_buf, len);
3396 
3397     goto custom_mutator_stage;
3398 
3399   }
3400 
3401 #endif                                                     /* !IGNORE_FINDS */
3402 
3403   ret_val = 0;
3404 
3405 #ifdef INTROSPECTION
3406 
3407   afl->havoc_prof->queued_det_stage =
3408       before_havoc_findings - before_det_findings;
3409   afl->havoc_prof->queued_havoc_stage =
3410       afl->queued_items - before_havoc_findings;
3411   afl->havoc_prof->total_queued_det += afl->havoc_prof->queued_det_stage;
3412   afl->havoc_prof->edge_det_stage = before_havoc_edges - before_det_edges;
3413   afl->havoc_prof->edge_havoc_stage =
3414       count_non_255_bytes(afl, afl->virgin_bits) - before_havoc_edges;
3415   afl->havoc_prof->total_det_edge += afl->havoc_prof->edge_det_stage;
3416   afl->havoc_prof->det_stage_time = before_havoc_time - before_det_time;
3417   afl->havoc_prof->havoc_stage_time = get_cur_time() - before_havoc_time;
3418   afl->havoc_prof->total_det_time += afl->havoc_prof->det_stage_time;
3419 
3420   plot_profile_data(afl, afl->queue_cur);
3421 
3422 #endif
3423 
3424 /* we are through with this queue entry - for this iteration */
3425 abandon_entry:
3426 
3427   afl->splicing_with = -1;
3428 
3429   /* Update afl->pending_not_fuzzed count if we made it through the calibration
3430      cycle and have not seen this entry before. */
3431 
3432   if (!afl->stop_soon && !afl->queue_cur->cal_failed &&
3433       !afl->queue_cur->was_fuzzed && !afl->queue_cur->disabled) {
3434 
3435     --afl->pending_not_fuzzed;
3436     afl->queue_cur->was_fuzzed = 1;
3437     afl->reinit_table = 1;
3438     if (afl->queue_cur->favored) {
3439 
3440       --afl->pending_favored;
3441       afl->smallest_favored = -1;
3442 
3443     }
3444 
3445   }
3446 
3447   ++afl->queue_cur->fuzz_level;
3448   orig_in = NULL;
3449   return ret_val;
3450 
3451 #undef FLIP_BIT
3452 
3453 }
3454 
3455 /* MOpt mode */
mopt_common_fuzzing(afl_state_t * afl,MOpt_globals_t MOpt_globals)3456 static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) {
3457 
3458   if (!MOpt_globals.is_pilot_mode) {
3459 
3460     if (swarm_num == 1) {
3461 
3462       afl->key_module = 2;
3463       return 0;
3464 
3465     }
3466 
3467   }
3468 
3469   u32 len, temp_len;
3470   u32 i;
3471   u32 j;
3472   u8 *in_buf, *out_buf, *orig_in, *ex_tmp, *eff_map = 0;
3473   u64 havoc_queued = 0, orig_hit_cnt, new_hit_cnt = 0, cur_ms_lv, prev_cksum,
3474       _prev_cksum;
3475   u32 splice_cycle = 0, perf_score = 100, orig_perf, eff_cnt = 1;
3476 
3477   u8 ret_val = 1, doing_det = 0;
3478 
3479   u8  a_collect[MAX_AUTO_EXTRA];
3480   u32 a_len = 0;
3481 
3482 #ifdef IGNORE_FINDS
3483 
3484   /* In IGNORE_FINDS mode, skip any entries that weren't in the
3485      initial data set. */
3486 
3487   if (afl->queue_cur->depth > 1) return 1;
3488 
3489 #else
3490 
3491   if (likely(afl->pending_favored)) {
3492 
3493     /* If we have any favored, non-fuzzed new arrivals in the queue,
3494        possibly skip to them at the expense of already-fuzzed or non-favored
3495        cases. */
3496 
3497     if ((afl->queue_cur->fuzz_level || !afl->queue_cur->favored) &&
3498         rand_below(afl, 100) < SKIP_TO_NEW_PROB) {
3499 
3500       return 1;
3501 
3502     }
3503 
3504   } else if (!afl->non_instrumented_mode && !afl->queue_cur->favored &&
3505 
3506              afl->queued_items > 10) {
3507 
3508     /* Otherwise, still possibly skip non-favored cases, albeit less often.
3509        The odds of skipping stuff are higher for already-fuzzed inputs and
3510        lower for never-fuzzed entries. */
3511 
3512     if (afl->queue_cycle > 1 && !afl->queue_cur->fuzz_level) {
3513 
3514       if (likely(rand_below(afl, 100) < SKIP_NFAV_NEW_PROB)) { return 1; }
3515 
3516     } else {
3517 
3518       if (likely(rand_below(afl, 100) < SKIP_NFAV_OLD_PROB)) { return 1; }
3519 
3520     }
3521 
3522   }
3523 
3524 #endif                                                     /* ^IGNORE_FINDS */
3525 
3526   if (afl->not_on_tty) {
3527 
3528     ACTF("Fuzzing test case #%u (%u total, %llu crashes saved)...",
3529          afl->current_entry, afl->queued_items, afl->saved_crashes);
3530     fflush(stdout);
3531 
3532   }
3533 
3534   /* Map the test case into memory. */
3535   orig_in = in_buf = queue_testcase_get(afl, afl->queue_cur);
3536   len = afl->queue_cur->len;
3537 
3538   out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
3539   if (unlikely(!out_buf)) { PFATAL("alloc"); }
3540 
3541   afl->subseq_tmouts = 0;
3542 
3543   afl->cur_depth = afl->queue_cur->depth;
3544 
3545   /*******************************************
3546    * CALIBRATION (only if failed earlier on) *
3547    *******************************************/
3548 
3549   if (unlikely(afl->queue_cur->cal_failed)) {
3550 
3551     u8 res = FSRV_RUN_TMOUT;
3552 
3553     if (afl->queue_cur->cal_failed < CAL_CHANCES) {
3554 
3555       afl->queue_cur->exec_cksum = 0;
3556 
3557       res =
3558           calibrate_case(afl, afl->queue_cur, in_buf, afl->queue_cycle - 1, 0);
3559 
3560       if (res == FSRV_RUN_ERROR) {
3561 
3562         FATAL("Unable to execute target application");
3563 
3564       }
3565 
3566     }
3567 
3568     if (afl->stop_soon || res != afl->crash_mode) {
3569 
3570       ++afl->cur_skipped_items;
3571       goto abandon_entry;
3572 
3573     }
3574 
3575   }
3576 
3577   /************
3578    * TRIMMING *
3579    ************/
3580 
3581   if (unlikely(!afl->non_instrumented_mode && !afl->queue_cur->trim_done &&
3582                !afl->disable_trim)) {
3583 
3584     u32 old_len = afl->queue_cur->len;
3585 
3586     u8 res = trim_case(afl, afl->queue_cur, in_buf);
3587     orig_in = in_buf = queue_testcase_get(afl, afl->queue_cur);
3588 
3589     if (unlikely(res == FSRV_RUN_ERROR)) {
3590 
3591       FATAL("Unable to execute target application");
3592 
3593     }
3594 
3595     if (unlikely(afl->stop_soon)) {
3596 
3597       ++afl->cur_skipped_items;
3598       goto abandon_entry;
3599 
3600     }
3601 
3602     /* Don't retry trimming, even if it failed. */
3603 
3604     afl->queue_cur->trim_done = 1;
3605 
3606     len = afl->queue_cur->len;
3607 
3608     /* maybe current entry is not ready for splicing anymore */
3609     if (unlikely(len <= 4 && old_len > 4)) --afl->ready_for_splicing_count;
3610 
3611   }
3612 
3613   memcpy(out_buf, in_buf, len);
3614 
3615   /*********************
3616    * PERFORMANCE SCORE *
3617    *********************/
3618 
3619   if (likely(!afl->old_seed_selection))
3620     orig_perf = perf_score = afl->queue_cur->perf_score;
3621   else
3622     orig_perf = perf_score = calculate_score(afl, afl->queue_cur);
3623 
3624   if (unlikely(perf_score <= 0 && afl->active_items > 1)) {
3625 
3626     goto abandon_entry;
3627 
3628   }
3629 
3630   if (unlikely(afl->shm.cmplog_mode &&
3631                afl->queue_cur->colorized < afl->cmplog_lvl &&
3632                (u32)len <= afl->cmplog_max_filesize)) {
3633 
3634     if (unlikely(len < 4)) {
3635 
3636       afl->queue_cur->colorized = CMPLOG_LVL_MAX;
3637 
3638     } else {
3639 
3640       if (afl->cmplog_lvl == 3 ||
3641           (afl->cmplog_lvl == 2 && afl->queue_cur->tc_ref) ||
3642           !(afl->fsrv.total_execs % afl->queued_items) ||
3643           get_cur_time() - afl->last_find_time > 300000) {  // 300 seconds
3644 
3645         if (input_to_state_stage(afl, in_buf, out_buf, len)) {
3646 
3647           goto abandon_entry;
3648 
3649         }
3650 
3651       }
3652 
3653     }
3654 
3655   }
3656 
3657   /* Go to pacemker fuzzing if MOpt is doing well */
3658 
3659   cur_ms_lv = get_cur_time();
3660   if (!(afl->key_puppet == 0 &&
3661         ((cur_ms_lv - afl->last_find_time < (u32)afl->limit_time_puppet) ||
3662          (afl->last_crash_time != 0 &&
3663           cur_ms_lv - afl->last_crash_time < (u32)afl->limit_time_puppet) ||
3664          afl->last_find_time == 0))) {
3665 
3666     afl->key_puppet = 1;
3667     goto pacemaker_fuzzing;
3668 
3669   }
3670 
3671   /* Skip right away if -d is given, if we have done deterministic fuzzing on
3672      this entry ourselves (was_fuzzed), or if it has gone through deterministic
3673      testing in earlier, resumed runs (passed_det). */
3674 
3675   if (likely(afl->skip_deterministic || afl->queue_cur->was_fuzzed ||
3676              afl->queue_cur->passed_det)) {
3677 
3678     goto havoc_stage;
3679 
3680   }
3681 
3682   /* Skip deterministic fuzzing if exec path checksum puts this out of scope
3683      for this main instance. */
3684 
3685   if (unlikely(afl->main_node_max &&
3686                (afl->queue_cur->exec_cksum % afl->main_node_max) !=
3687                    afl->main_node_id - 1)) {
3688 
3689     goto havoc_stage;
3690 
3691   }
3692 
3693   doing_det = 1;
3694 
3695   /*********************************************
3696    * SIMPLE BITFLIP (+dictionary construction) *
3697    *********************************************/
3698 
3699 #define FLIP_BIT(_ar, _b)                     \
3700   do {                                        \
3701                                               \
3702     u8 *_arf = (u8 *)(_ar);                   \
3703     u32 _bf = (_b);                           \
3704     _arf[(_bf) >> 3] ^= (128 >> ((_bf) & 7)); \
3705                                               \
3706   } while (0)
3707 
3708   /* Single walking bit. */
3709 
3710   afl->stage_short = "flip1";
3711   afl->stage_max = len << 3;
3712   afl->stage_name = "bitflip 1/1";
3713 
3714   afl->stage_val_type = STAGE_VAL_NONE;
3715 
3716   orig_hit_cnt = afl->queued_items + afl->saved_crashes;
3717 
3718   /* Get a clean cksum. */
3719 
3720   if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3721 
3722   prev_cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
3723   _prev_cksum = prev_cksum;
3724 
3725   /* Now flip bits. */
3726 
3727   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
3728 
3729     afl->stage_cur_byte = afl->stage_cur >> 3;
3730 
3731     FLIP_BIT(out_buf, afl->stage_cur);
3732 
3733 #ifdef INTROSPECTION
3734     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT1-%u",
3735              afl->queue_cur->fname, afl->stage_cur);
3736 #endif
3737     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3738 
3739     FLIP_BIT(out_buf, afl->stage_cur);
3740 
3741     /* While flipping the least significant bit in every byte, pull of an extra
3742        trick to detect possible syntax tokens. In essence, the idea is that if
3743        you have a binary blob like this:
3744 
3745        xxxxxxxxIHDRxxxxxxxx
3746 
3747        ...and changing the leading and trailing bytes causes variable or no
3748        changes in program flow, but touching any character in the "IHDR" string
3749        always produces the same, distinctive path, it's highly likely that
3750        "IHDR" is an atomically-checked magic value of special significance to
3751        the fuzzed format.
3752 
3753        We do this here, rather than as a separate stage, because it's a nice
3754        way to keep the operation approximately "free" (i.e., no extra execs).
3755 
3756        Empirically, performing the check when flipping the least significant bit
3757        is advantageous, compared to doing it at the time of more disruptive
3758        changes, where the program flow may be affected in more violent ways.
3759 
3760        The caveat is that we won't generate dictionaries in the -d mode or -S
3761        mode - but that's probably a fair trade-off.
3762 
3763        This won't work particularly well with paths that exhibit variable
3764        behavior, but fails gracefully, so we'll carry out the checks anyway.
3765 
3766       */
3767 
3768     if (!afl->non_instrumented_mode && (afl->stage_cur & 7) == 7) {
3769 
3770       u64 cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
3771 
3772       if (afl->stage_cur == afl->stage_max - 1 && cksum == prev_cksum) {
3773 
3774         /* If at end of file and we are still collecting a string, grab the
3775            final character and force output. */
3776 
3777         if (a_len < MAX_AUTO_EXTRA) {
3778 
3779           a_collect[a_len] = out_buf[afl->stage_cur >> 3];
3780 
3781         }
3782 
3783         ++a_len;
3784 
3785         if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA) {
3786 
3787           maybe_add_auto(afl, a_collect, a_len);
3788 
3789         }
3790 
3791       } else if (cksum != prev_cksum) {
3792 
3793         /* Otherwise, if the checksum has changed, see if we have something
3794            worthwhile queued up, and collect that if the answer is yes. */
3795 
3796         if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA) {
3797 
3798           maybe_add_auto(afl, a_collect, a_len);
3799 
3800         }
3801 
3802         a_len = 0;
3803         prev_cksum = cksum;
3804 
3805       }
3806 
3807       /* Continue collecting string, but only if the bit flip actually made
3808          any difference - we don't want no-op tokens. */
3809 
3810       if (cksum != _prev_cksum) {
3811 
3812         if (a_len < MAX_AUTO_EXTRA) {
3813 
3814           a_collect[a_len] = out_buf[afl->stage_cur >> 3];
3815 
3816         }
3817 
3818         ++a_len;
3819 
3820       }
3821 
3822     }                                       /* if (afl->stage_cur & 7) == 7 */
3823 
3824   }                                                   /* for afl->stage_cur */
3825 
3826   new_hit_cnt = afl->queued_items + afl->saved_crashes;
3827 
3828   afl->stage_finds[STAGE_FLIP1] += new_hit_cnt - orig_hit_cnt;
3829   afl->stage_cycles[STAGE_FLIP1] += afl->stage_max;
3830 #ifdef INTROSPECTION
3831   afl->queue_cur->stats_mutated += afl->stage_max;
3832 #endif
3833 
3834   /* Two walking bits. */
3835 
3836   afl->stage_name = "bitflip 2/1";
3837   afl->stage_short = "flip2";
3838   afl->stage_max = (len << 3) - 1;
3839 
3840   orig_hit_cnt = new_hit_cnt;
3841 
3842   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
3843 
3844     afl->stage_cur_byte = afl->stage_cur >> 3;
3845 
3846     FLIP_BIT(out_buf, afl->stage_cur);
3847     FLIP_BIT(out_buf, afl->stage_cur + 1);
3848 
3849 #ifdef INTROSPECTION
3850     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT2-%u",
3851              afl->queue_cur->fname, afl->stage_cur);
3852 #endif
3853     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3854 
3855     FLIP_BIT(out_buf, afl->stage_cur);
3856     FLIP_BIT(out_buf, afl->stage_cur + 1);
3857 
3858   }                                                   /* for afl->stage_cur */
3859 
3860   new_hit_cnt = afl->queued_items + afl->saved_crashes;
3861 
3862   afl->stage_finds[STAGE_FLIP2] += new_hit_cnt - orig_hit_cnt;
3863   afl->stage_cycles[STAGE_FLIP2] += afl->stage_max;
3864 #ifdef INTROSPECTION
3865   afl->queue_cur->stats_mutated += afl->stage_max;
3866 #endif
3867 
3868   /* Four walking bits. */
3869 
3870   afl->stage_name = "bitflip 4/1";
3871   afl->stage_short = "flip4";
3872   afl->stage_max = (len << 3) - 3;
3873 
3874   orig_hit_cnt = new_hit_cnt;
3875 
3876   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
3877 
3878     afl->stage_cur_byte = afl->stage_cur >> 3;
3879 
3880     FLIP_BIT(out_buf, afl->stage_cur);
3881     FLIP_BIT(out_buf, afl->stage_cur + 1);
3882     FLIP_BIT(out_buf, afl->stage_cur + 2);
3883     FLIP_BIT(out_buf, afl->stage_cur + 3);
3884 
3885 #ifdef INTROSPECTION
3886     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT4-%u",
3887              afl->queue_cur->fname, afl->stage_cur);
3888 #endif
3889     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3890 
3891     FLIP_BIT(out_buf, afl->stage_cur);
3892     FLIP_BIT(out_buf, afl->stage_cur + 1);
3893     FLIP_BIT(out_buf, afl->stage_cur + 2);
3894     FLIP_BIT(out_buf, afl->stage_cur + 3);
3895 
3896   }                                                   /* for afl->stage_cur */
3897 
3898   new_hit_cnt = afl->queued_items + afl->saved_crashes;
3899 
3900   afl->stage_finds[STAGE_FLIP4] += new_hit_cnt - orig_hit_cnt;
3901   afl->stage_cycles[STAGE_FLIP4] += afl->stage_max;
3902 #ifdef INTROSPECTION
3903   afl->queue_cur->stats_mutated += afl->stage_max;
3904 #endif
3905 
3906   /* Effector map setup. These macros calculate:
3907 
3908      EFF_APOS      - position of a particular file offset in the map.
3909      EFF_ALEN      - length of a map with a particular number of bytes.
3910      EFF_SPAN_ALEN - map span for a sequence of bytes.
3911 
3912    */
3913 
3914 #define EFF_APOS(_p) ((_p) >> EFF_MAP_SCALE2)
3915 #define EFF_REM(_x) ((_x) & ((1 << EFF_MAP_SCALE2) - 1))
3916 #define EFF_ALEN(_l) (EFF_APOS(_l) + !!EFF_REM(_l))
3917 #define EFF_SPAN_ALEN(_p, _l) (EFF_APOS((_p) + (_l)-1) - EFF_APOS(_p) + 1)
3918 
3919   /* Initialize effector map for the next step (see comments below). Always
3920          flag first and last byte as doing something. */
3921 
3922   eff_map = afl_realloc(AFL_BUF_PARAM(eff), EFF_ALEN(len));
3923   if (unlikely(!eff_map)) { PFATAL("alloc"); }
3924   memset(eff_map, 0, EFF_ALEN(len));
3925   eff_map[0] = 1;
3926 
3927   if (EFF_APOS(len - 1) != 0) {
3928 
3929     eff_map[EFF_APOS(len - 1)] = 1;
3930     ++eff_cnt;
3931 
3932   }
3933 
3934   /* Walking byte. */
3935 
3936   afl->stage_name = "bitflip 8/8";
3937   afl->stage_short = "flip8";
3938   afl->stage_max = len;
3939 
3940   orig_hit_cnt = new_hit_cnt;
3941   prev_cksum = _prev_cksum;
3942 
3943   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
3944 
3945     afl->stage_cur_byte = afl->stage_cur;
3946 
3947     out_buf[afl->stage_cur] ^= 0xFF;
3948 
3949 #ifdef INTROSPECTION
3950     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT8-%u",
3951              afl->queue_cur->fname, afl->stage_cur);
3952 #endif
3953     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3954 
3955     /* We also use this stage to pull off a simple trick: we identify
3956        bytes that seem to have no effect on the current execution path
3957        even when fully flipped - and we skip them during more expensive
3958        deterministic stages, such as arithmetics or known ints. */
3959 
3960     if (!eff_map[EFF_APOS(afl->stage_cur)]) {
3961 
3962       u64 cksum;
3963 
3964       /* If in non-instrumented mode or if the file is very short, just flag
3965          everything without wasting time on checksums. */
3966 
3967       if (!afl->non_instrumented_mode && len >= EFF_MIN_LEN) {
3968 
3969         cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
3970 
3971       } else {
3972 
3973         cksum = ~prev_cksum;
3974 
3975       }
3976 
3977       if (cksum != prev_cksum) {
3978 
3979         eff_map[EFF_APOS(afl->stage_cur)] = 1;
3980         ++eff_cnt;
3981 
3982       }
3983 
3984     }
3985 
3986     out_buf[afl->stage_cur] ^= 0xFF;
3987 
3988   }                                                   /* for afl->stage_cur */
3989 
3990   /* If the effector map is more than EFF_MAX_PERC dense, just flag the
3991      whole thing as worth fuzzing, since we wouldn't be saving much time
3992      anyway. */
3993 
3994   if (eff_cnt != (u32)EFF_ALEN(len) &&
3995       eff_cnt * 100 / EFF_ALEN(len) > EFF_MAX_PERC) {
3996 
3997     memset(eff_map, 1, EFF_ALEN(len));
3998 
3999     afl->blocks_eff_select += EFF_ALEN(len);
4000 
4001   } else {
4002 
4003     afl->blocks_eff_select += eff_cnt;
4004 
4005   }
4006 
4007   afl->blocks_eff_total += EFF_ALEN(len);
4008 
4009   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4010 
4011   afl->stage_finds[STAGE_FLIP8] += new_hit_cnt - orig_hit_cnt;
4012   afl->stage_cycles[STAGE_FLIP8] += afl->stage_max;
4013 #ifdef INTROSPECTION
4014   afl->queue_cur->stats_mutated += afl->stage_max;
4015 #endif
4016 
4017   /* Two walking bytes. */
4018 
4019   if (len < 2) { goto skip_bitflip; }
4020 
4021   afl->stage_name = "bitflip 16/8";
4022   afl->stage_short = "flip16";
4023   afl->stage_cur = 0;
4024   afl->stage_max = len - 1;
4025 
4026   orig_hit_cnt = new_hit_cnt;
4027 
4028   for (i = 0; i < len - 1; ++i) {
4029 
4030     /* Let's consult the effector map... */
4031 
4032     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)]) {
4033 
4034       --afl->stage_max;
4035       continue;
4036 
4037     }
4038 
4039     afl->stage_cur_byte = i;
4040 
4041     *(u16 *)(out_buf + i) ^= 0xFFFF;
4042 
4043 #ifdef INTROSPECTION
4044     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT16-%u",
4045              afl->queue_cur->fname, afl->stage_cur);
4046 #endif
4047     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4048     ++afl->stage_cur;
4049 
4050     *(u16 *)(out_buf + i) ^= 0xFFFF;
4051 
4052   }                                                   /* for i = 0; i < len */
4053 
4054   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4055 
4056   afl->stage_finds[STAGE_FLIP16] += new_hit_cnt - orig_hit_cnt;
4057   afl->stage_cycles[STAGE_FLIP16] += afl->stage_max;
4058 #ifdef INTROSPECTION
4059   afl->queue_cur->stats_mutated += afl->stage_max;
4060 #endif
4061 
4062   if (len < 4) { goto skip_bitflip; }
4063 
4064   /* Four walking bytes. */
4065 
4066   afl->stage_name = "bitflip 32/8";
4067   afl->stage_short = "flip32";
4068   afl->stage_cur = 0;
4069   afl->stage_max = len - 3;
4070 
4071   orig_hit_cnt = new_hit_cnt;
4072 
4073   for (i = 0; i < len - 3; ++i) {
4074 
4075     /* Let's consult the effector map... */
4076     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
4077         !eff_map[EFF_APOS(i + 2)] && !eff_map[EFF_APOS(i + 3)]) {
4078 
4079       --afl->stage_max;
4080       continue;
4081 
4082     }
4083 
4084     afl->stage_cur_byte = i;
4085 
4086     *(u32 *)(out_buf + i) ^= 0xFFFFFFFF;
4087 
4088 #ifdef INTROSPECTION
4089     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT32-%u",
4090              afl->queue_cur->fname, afl->stage_cur);
4091 #endif
4092     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4093     ++afl->stage_cur;
4094 
4095     *(u32 *)(out_buf + i) ^= 0xFFFFFFFF;
4096 
4097   }                                               /* for i = 0; i < len - 3 */
4098 
4099   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4100 
4101   afl->stage_finds[STAGE_FLIP32] += new_hit_cnt - orig_hit_cnt;
4102   afl->stage_cycles[STAGE_FLIP32] += afl->stage_max;
4103 #ifdef INTROSPECTION
4104   afl->queue_cur->stats_mutated += afl->stage_max;
4105 #endif
4106 
4107 skip_bitflip:
4108 
4109   if (afl->no_arith) { goto skip_arith; }
4110 
4111   /**********************
4112    * ARITHMETIC INC/DEC *
4113    **********************/
4114 
4115   /* 8-bit arithmetics. */
4116 
4117   afl->stage_name = "arith 8/8";
4118   afl->stage_short = "arith8";
4119   afl->stage_cur = 0;
4120   afl->stage_max = 2 * len * ARITH_MAX;
4121 
4122   afl->stage_val_type = STAGE_VAL_LE;
4123 
4124   orig_hit_cnt = new_hit_cnt;
4125 
4126   for (i = 0; i < (u32)len; ++i) {
4127 
4128     u8 orig = out_buf[i];
4129 
4130     /* Let's consult the effector map... */
4131 
4132     if (!eff_map[EFF_APOS(i)]) {
4133 
4134       afl->stage_max -= 2 * ARITH_MAX;
4135       continue;
4136 
4137     }
4138 
4139     afl->stage_cur_byte = i;
4140 
4141     for (j = 1; j <= ARITH_MAX; ++j) {
4142 
4143       u8 r = orig ^ (orig + j);
4144 
4145       /* Do arithmetic operations only if the result couldn't be a product
4146          of a bitflip. */
4147 
4148       if (!could_be_bitflip(r)) {
4149 
4150         afl->stage_cur_val = j;
4151         out_buf[i] = orig + j;
4152 
4153 #ifdef INTROSPECTION
4154         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH8+-%u-%u",
4155                  afl->queue_cur->fname, i, j);
4156 #endif
4157         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4158         ++afl->stage_cur;
4159 
4160       } else {
4161 
4162         --afl->stage_max;
4163 
4164       }
4165 
4166       r = orig ^ (orig - j);
4167 
4168       if (!could_be_bitflip(r)) {
4169 
4170         afl->stage_cur_val = -j;
4171         out_buf[i] = orig - j;
4172 
4173 #ifdef INTROSPECTION
4174         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH8_-%u-%u",
4175                  afl->queue_cur->fname, i, j);
4176 #endif
4177         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4178         ++afl->stage_cur;
4179 
4180       } else {
4181 
4182         --afl->stage_max;
4183 
4184       }
4185 
4186       out_buf[i] = orig;
4187 
4188     }
4189 
4190   }                                                   /* for i = 0; i < len */
4191 
4192   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4193 
4194   afl->stage_finds[STAGE_ARITH8] += new_hit_cnt - orig_hit_cnt;
4195   afl->stage_cycles[STAGE_ARITH8] += afl->stage_max;
4196 #ifdef INTROSPECTION
4197   afl->queue_cur->stats_mutated += afl->stage_max;
4198 #endif
4199 
4200   /* 16-bit arithmetics, both endians. */
4201 
4202   if (len < 2) { goto skip_arith; }
4203 
4204   afl->stage_name = "arith 16/8";
4205   afl->stage_short = "arith16";
4206   afl->stage_cur = 0;
4207   afl->stage_max = 4 * (len - 1) * ARITH_MAX;
4208 
4209   orig_hit_cnt = new_hit_cnt;
4210 
4211   for (i = 0; i < len - 1; ++i) {
4212 
4213     u16 orig = *(u16 *)(out_buf + i);
4214 
4215     /* Let's consult the effector map... */
4216 
4217     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)]) {
4218 
4219       afl->stage_max -= 4 * ARITH_MAX;
4220       continue;
4221 
4222     }
4223 
4224     afl->stage_cur_byte = i;
4225 
4226     for (j = 1; j <= ARITH_MAX; ++j) {
4227 
4228       u16 r1 = orig ^ (orig + j), r2 = orig ^ (orig - j),
4229           r3 = orig ^ SWAP16(SWAP16(orig) + j),
4230           r4 = orig ^ SWAP16(SWAP16(orig) - j);
4231 
4232       /* Try little endian addition and subtraction first. Do it only
4233          if the operation would affect more than one byte (hence the
4234          & 0xff overflow checks) and if it couldn't be a product of
4235          a bitflip. */
4236 
4237       afl->stage_val_type = STAGE_VAL_LE;
4238 
4239       if ((orig & 0xff) + j > 0xff && !could_be_bitflip(r1)) {
4240 
4241         afl->stage_cur_val = j;
4242         *(u16 *)(out_buf + i) = orig + j;
4243 
4244 #ifdef INTROSPECTION
4245         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH16+-%u-%u",
4246                  afl->queue_cur->fname, i, j);
4247 #endif
4248         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4249         ++afl->stage_cur;
4250 
4251       } else {
4252 
4253         --afl->stage_max;
4254 
4255       }
4256 
4257       if ((orig & 0xff) < j && !could_be_bitflip(r2)) {
4258 
4259         afl->stage_cur_val = -j;
4260         *(u16 *)(out_buf + i) = orig - j;
4261 
4262 #ifdef INTROSPECTION
4263         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH16_-%u-%u",
4264                  afl->queue_cur->fname, i, j);
4265 #endif
4266         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4267         ++afl->stage_cur;
4268 
4269       } else {
4270 
4271         --afl->stage_max;
4272 
4273       }
4274 
4275       /* Big endian comes next. Same deal. */
4276 
4277       afl->stage_val_type = STAGE_VAL_BE;
4278 
4279       if ((orig >> 8) + j > 0xff && !could_be_bitflip(r3)) {
4280 
4281         afl->stage_cur_val = j;
4282         *(u16 *)(out_buf + i) = SWAP16(SWAP16(orig) + j);
4283 
4284 #ifdef INTROSPECTION
4285         snprintf(afl->mutation, sizeof(afl->mutation),
4286                  "%s MOPT_ARITH16+BE-%u-%u", afl->queue_cur->fname, i, j);
4287 #endif
4288         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4289         ++afl->stage_cur;
4290 
4291       } else {
4292 
4293         --afl->stage_max;
4294 
4295       }
4296 
4297       if ((orig >> 8) < j && !could_be_bitflip(r4)) {
4298 
4299         afl->stage_cur_val = -j;
4300         *(u16 *)(out_buf + i) = SWAP16(SWAP16(orig) - j);
4301 
4302 #ifdef INTROSPECTION
4303         snprintf(afl->mutation, sizeof(afl->mutation),
4304                  "%s MOPT_ARITH16_BE+%u+%u", afl->queue_cur->fname, i, j);
4305 #endif
4306         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4307         ++afl->stage_cur;
4308 
4309       } else {
4310 
4311         --afl->stage_max;
4312 
4313       }
4314 
4315       *(u16 *)(out_buf + i) = orig;
4316 
4317     }
4318 
4319   }                                               /* for i = 0; i < len - 1 */
4320 
4321   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4322 
4323   afl->stage_finds[STAGE_ARITH16] += new_hit_cnt - orig_hit_cnt;
4324   afl->stage_cycles[STAGE_ARITH16] += afl->stage_max;
4325 #ifdef INTROSPECTION
4326   afl->queue_cur->stats_mutated += afl->stage_max;
4327 #endif
4328 
4329   /* 32-bit arithmetics, both endians. */
4330 
4331   if (len < 4) { goto skip_arith; }
4332 
4333   afl->stage_name = "arith 32/8";
4334   afl->stage_short = "arith32";
4335   afl->stage_cur = 0;
4336   afl->stage_max = 4 * (len - 3) * ARITH_MAX;
4337 
4338   orig_hit_cnt = new_hit_cnt;
4339 
4340   for (i = 0; i < len - 3; ++i) {
4341 
4342     u32 orig = *(u32 *)(out_buf + i);
4343 
4344     /* Let's consult the effector map... */
4345 
4346     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
4347         !eff_map[EFF_APOS(i + 2)] && !eff_map[EFF_APOS(i + 3)]) {
4348 
4349       afl->stage_max -= 4 * ARITH_MAX;
4350       continue;
4351 
4352     }
4353 
4354     afl->stage_cur_byte = i;
4355 
4356     for (j = 1; j <= ARITH_MAX; ++j) {
4357 
4358       u32 r1 = orig ^ (orig + j), r2 = orig ^ (orig - j),
4359           r3 = orig ^ SWAP32(SWAP32(orig) + j),
4360           r4 = orig ^ SWAP32(SWAP32(orig) - j);
4361 
4362       /* Little endian first. Same deal as with 16-bit: we only want to
4363          try if the operation would have effect on more than two bytes. */
4364 
4365       afl->stage_val_type = STAGE_VAL_LE;
4366 
4367       if ((orig & 0xffff) + j > 0xffff && !could_be_bitflip(r1)) {
4368 
4369         afl->stage_cur_val = j;
4370         *(u32 *)(out_buf + i) = orig + j;
4371 
4372 #ifdef INTROSPECTION
4373         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH32+-%u-%u",
4374                  afl->queue_cur->fname, i, j);
4375 #endif
4376         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4377         ++afl->stage_cur;
4378 
4379       } else {
4380 
4381         --afl->stage_max;
4382 
4383       }
4384 
4385       if ((orig & 0xffff) < j && !could_be_bitflip(r2)) {
4386 
4387         afl->stage_cur_val = -j;
4388         *(u32 *)(out_buf + i) = orig - j;
4389 
4390 #ifdef INTROSPECTION
4391         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH32_-%u-%u",
4392                  afl->queue_cur->fname, i, j);
4393 #endif
4394         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4395         ++afl->stage_cur;
4396 
4397       } else {
4398 
4399         --afl->stage_max;
4400 
4401       }
4402 
4403       /* Big endian next. */
4404 
4405       afl->stage_val_type = STAGE_VAL_BE;
4406 
4407       if ((SWAP32(orig) & 0xffff) + j > 0xffff && !could_be_bitflip(r3)) {
4408 
4409         afl->stage_cur_val = j;
4410         *(u32 *)(out_buf + i) = SWAP32(SWAP32(orig) + j);
4411 
4412 #ifdef INTROSPECTION
4413         snprintf(afl->mutation, sizeof(afl->mutation),
4414                  "%s MOPT_ARITH32+BE-%u-%u", afl->queue_cur->fname, i, j);
4415 #endif
4416         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4417         ++afl->stage_cur;
4418 
4419       } else {
4420 
4421         --afl->stage_max;
4422 
4423       }
4424 
4425       if ((SWAP32(orig) & 0xffff) < j && !could_be_bitflip(r4)) {
4426 
4427         afl->stage_cur_val = -j;
4428         *(u32 *)(out_buf + i) = SWAP32(SWAP32(orig) - j);
4429 
4430 #ifdef INTROSPECTION
4431         snprintf(afl->mutation, sizeof(afl->mutation),
4432                  "%s MOPT_ARITH32_BE-%u-%u", afl->queue_cur->fname, i, j);
4433 #endif
4434         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4435         ++afl->stage_cur;
4436 
4437       } else {
4438 
4439         --afl->stage_max;
4440 
4441       }
4442 
4443       *(u32 *)(out_buf + i) = orig;
4444 
4445     }
4446 
4447   }                                               /* for i = 0; i < len - 3 */
4448 
4449   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4450 
4451   afl->stage_finds[STAGE_ARITH32] += new_hit_cnt - orig_hit_cnt;
4452   afl->stage_cycles[STAGE_ARITH32] += afl->stage_max;
4453 #ifdef INTROSPECTION
4454   afl->queue_cur->stats_mutated += afl->stage_max;
4455 #endif
4456 
4457 skip_arith:
4458 
4459   /**********************
4460    * INTERESTING VALUES *
4461    **********************/
4462 
4463   afl->stage_name = "interest 8/8";
4464   afl->stage_short = "int8";
4465   afl->stage_cur = 0;
4466   afl->stage_max = len * sizeof(interesting_8);
4467 
4468   afl->stage_val_type = STAGE_VAL_LE;
4469 
4470   orig_hit_cnt = new_hit_cnt;
4471 
4472   /* Setting 8-bit integers. */
4473 
4474   for (i = 0; i < (u32)len; ++i) {
4475 
4476     u8 orig = out_buf[i];
4477 
4478     /* Let's consult the effector map... */
4479 
4480     if (!eff_map[EFF_APOS(i)]) {
4481 
4482       afl->stage_max -= sizeof(interesting_8);
4483       continue;
4484 
4485     }
4486 
4487     afl->stage_cur_byte = i;
4488 
4489     for (j = 0; j < sizeof(interesting_8); ++j) {
4490 
4491       /* Skip if the value could be a product of bitflips or arithmetics. */
4492 
4493       if (could_be_bitflip(orig ^ (u8)interesting_8[j]) ||
4494           could_be_arith(orig, (u8)interesting_8[j], 1)) {
4495 
4496         --afl->stage_max;
4497         continue;
4498 
4499       }
4500 
4501       afl->stage_cur_val = interesting_8[j];
4502       out_buf[i] = interesting_8[j];
4503 
4504 #ifdef INTROSPECTION
4505       snprintf(afl->mutation, sizeof(afl->mutation),
4506                "%s MOPT_INTERESTING8-%u-%u", afl->queue_cur->fname, i, j);
4507 #endif
4508       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4509 
4510       out_buf[i] = orig;
4511       ++afl->stage_cur;
4512 
4513     }
4514 
4515   }                                                   /* for i = 0; i < len */
4516 
4517   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4518 
4519   afl->stage_finds[STAGE_INTEREST8] += new_hit_cnt - orig_hit_cnt;
4520   afl->stage_cycles[STAGE_INTEREST8] += afl->stage_max;
4521 #ifdef INTROSPECTION
4522   afl->queue_cur->stats_mutated += afl->stage_max;
4523 #endif
4524 
4525   /* Setting 16-bit integers, both endians. */
4526 
4527   if (afl->no_arith || len < 2) { goto skip_interest; }
4528 
4529   afl->stage_name = "interest 16/8";
4530   afl->stage_short = "int16";
4531   afl->stage_cur = 0;
4532   afl->stage_max = 2 * (len - 1) * (sizeof(interesting_16) >> 1);
4533 
4534   orig_hit_cnt = new_hit_cnt;
4535 
4536   for (i = 0; i < len - 1; ++i) {
4537 
4538     u16 orig = *(u16 *)(out_buf + i);
4539 
4540     /* Let's consult the effector map... */
4541 
4542     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)]) {
4543 
4544       afl->stage_max -= sizeof(interesting_16);
4545       continue;
4546 
4547     }
4548 
4549     afl->stage_cur_byte = i;
4550 
4551     for (j = 0; j < sizeof(interesting_16) / 2; ++j) {
4552 
4553       afl->stage_cur_val = interesting_16[j];
4554 
4555       /* Skip if this could be a product of a bitflip, arithmetics,
4556          or single-byte interesting value insertion. */
4557 
4558       if (!could_be_bitflip(orig ^ (u16)interesting_16[j]) &&
4559           !could_be_arith(orig, (u16)interesting_16[j], 2) &&
4560           !could_be_interest(orig, (u16)interesting_16[j], 2, 0)) {
4561 
4562         afl->stage_val_type = STAGE_VAL_LE;
4563 
4564         *(u16 *)(out_buf + i) = interesting_16[j];
4565 
4566 #ifdef INTROSPECTION
4567         snprintf(afl->mutation, sizeof(afl->mutation),
4568                  "%s MOPT_INTERESTING16-%u-%u", afl->queue_cur->fname, i, j);
4569 #endif
4570         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4571         ++afl->stage_cur;
4572 
4573       } else {
4574 
4575         --afl->stage_max;
4576 
4577       }
4578 
4579       if ((u16)interesting_16[j] != SWAP16(interesting_16[j]) &&
4580           !could_be_bitflip(orig ^ SWAP16(interesting_16[j])) &&
4581           !could_be_arith(orig, SWAP16(interesting_16[j]), 2) &&
4582           !could_be_interest(orig, SWAP16(interesting_16[j]), 2, 1)) {
4583 
4584         afl->stage_val_type = STAGE_VAL_BE;
4585 
4586 #ifdef INTROSPECTION
4587         snprintf(afl->mutation, sizeof(afl->mutation),
4588                  "%s MOPT_INTERESTING16BE-%u-%u", afl->queue_cur->fname, i, j);
4589 #endif
4590         *(u16 *)(out_buf + i) = SWAP16(interesting_16[j]);
4591         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4592         ++afl->stage_cur;
4593 
4594       } else {
4595 
4596         --afl->stage_max;
4597 
4598       }
4599 
4600     }
4601 
4602     *(u16 *)(out_buf + i) = orig;
4603 
4604   }                                               /* for i = 0; i < len - 1 */
4605 
4606   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4607 
4608   afl->stage_finds[STAGE_INTEREST16] += new_hit_cnt - orig_hit_cnt;
4609   afl->stage_cycles[STAGE_INTEREST16] += afl->stage_max;
4610 #ifdef INTROSPECTION
4611   afl->queue_cur->stats_mutated += afl->stage_max;
4612 #endif
4613 
4614   if (len < 4) { goto skip_interest; }
4615 
4616   /* Setting 32-bit integers, both endians. */
4617 
4618   afl->stage_name = "interest 32/8";
4619   afl->stage_short = "int32";
4620   afl->stage_cur = 0;
4621   afl->stage_max = 2 * (len - 3) * (sizeof(interesting_32) >> 2);
4622 
4623   orig_hit_cnt = new_hit_cnt;
4624 
4625   for (i = 0; i < len - 3; ++i) {
4626 
4627     u32 orig = *(u32 *)(out_buf + i);
4628 
4629     /* Let's consult the effector map... */
4630 
4631     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
4632         !eff_map[EFF_APOS(i + 2)] && !eff_map[EFF_APOS(i + 3)]) {
4633 
4634       afl->stage_max -= sizeof(interesting_32) >> 1;
4635       continue;
4636 
4637     }
4638 
4639     afl->stage_cur_byte = i;
4640 
4641     for (j = 0; j < sizeof(interesting_32) / 4; ++j) {
4642 
4643       afl->stage_cur_val = interesting_32[j];
4644 
4645       /* Skip if this could be a product of a bitflip, arithmetics,
4646          or word interesting value insertion. */
4647 
4648       if (!could_be_bitflip(orig ^ (u32)interesting_32[j]) &&
4649           !could_be_arith(orig, interesting_32[j], 4) &&
4650           !could_be_interest(orig, interesting_32[j], 4, 0)) {
4651 
4652         afl->stage_val_type = STAGE_VAL_LE;
4653 
4654         *(u32 *)(out_buf + i) = interesting_32[j];
4655 
4656 #ifdef INTROSPECTION
4657         snprintf(afl->mutation, sizeof(afl->mutation),
4658                  "%s MOPT_INTERESTING32-%u-%u", afl->queue_cur->fname, i, j);
4659 #endif
4660         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4661         ++afl->stage_cur;
4662 
4663       } else {
4664 
4665         --afl->stage_max;
4666 
4667       }
4668 
4669       if ((u32)interesting_32[j] != SWAP32(interesting_32[j]) &&
4670           !could_be_bitflip(orig ^ SWAP32(interesting_32[j])) &&
4671           !could_be_arith(orig, SWAP32(interesting_32[j]), 4) &&
4672           !could_be_interest(orig, SWAP32(interesting_32[j]), 4, 1)) {
4673 
4674         afl->stage_val_type = STAGE_VAL_BE;
4675 
4676 #ifdef INTROSPECTION
4677         snprintf(afl->mutation, sizeof(afl->mutation),
4678                  "%s MOPT_INTERESTING32BE-%u-%u", afl->queue_cur->fname, i, j);
4679 #endif
4680         *(u32 *)(out_buf + i) = SWAP32(interesting_32[j]);
4681         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4682         ++afl->stage_cur;
4683 
4684       } else {
4685 
4686         --afl->stage_max;
4687 
4688       }
4689 
4690     }
4691 
4692     *(u32 *)(out_buf + i) = orig;
4693 
4694   }                                               /* for i = 0; i < len - 3 */
4695 
4696   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4697 
4698   afl->stage_finds[STAGE_INTEREST32] += new_hit_cnt - orig_hit_cnt;
4699   afl->stage_cycles[STAGE_INTEREST32] += afl->stage_max;
4700 #ifdef INTROSPECTION
4701   afl->queue_cur->stats_mutated += afl->stage_max;
4702 #endif
4703 
4704 skip_interest:
4705 
4706   /********************
4707    * DICTIONARY STUFF *
4708    ********************/
4709 
4710   if (!afl->extras_cnt) { goto skip_user_extras; }
4711 
4712   /* Overwrite with user-supplied extras. */
4713 
4714   afl->stage_name = "user extras (over)";
4715   afl->stage_short = "ext_UO";
4716   afl->stage_cur = 0;
4717   afl->stage_max = afl->extras_cnt * len;
4718 
4719   afl->stage_val_type = STAGE_VAL_NONE;
4720 
4721   orig_hit_cnt = new_hit_cnt;
4722 
4723   for (i = 0; i < (u32)len; ++i) {
4724 
4725     u32 last_len = 0;
4726 
4727     afl->stage_cur_byte = i;
4728 
4729     /* Extras are sorted by size, from smallest to largest. This means
4730        that we don't have to worry about restoring the buffer in
4731        between writes at a particular offset determined by the outer
4732        loop. */
4733 
4734     for (j = 0; j < afl->extras_cnt; ++j) {
4735 
4736       /* Skip extras probabilistically if afl->extras_cnt > AFL_MAX_DET_EXTRAS.
4737          Also skip them if there's no room to insert the payload, if the token
4738          is redundant, or if its entire span has no bytes set in the effector
4739          map. */
4740 
4741       if ((afl->extras_cnt > afl->max_det_extras &&
4742            rand_below(afl, afl->extras_cnt) >= afl->max_det_extras) ||
4743           afl->extras[j].len > len - i ||
4744           !memcmp(afl->extras[j].data, out_buf + i, afl->extras[j].len) ||
4745           !memchr(eff_map + EFF_APOS(i), 1,
4746                   EFF_SPAN_ALEN(i, afl->extras[j].len))) {
4747 
4748         --afl->stage_max;
4749         continue;
4750 
4751       }
4752 
4753       last_len = afl->extras[j].len;
4754       memcpy(out_buf + i, afl->extras[j].data, last_len);
4755 
4756 #ifdef INTROSPECTION
4757       snprintf(afl->mutation, sizeof(afl->mutation),
4758                "%s MOPT_EXTRAS_overwrite-%u-%u", afl->queue_cur->fname, i, j);
4759 #endif
4760 
4761       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4762 
4763       ++afl->stage_cur;
4764 
4765     }
4766 
4767     /* Restore all the clobbered memory. */
4768     memcpy(out_buf + i, in_buf + i, last_len);
4769 
4770   }                                                   /* for i = 0; i < len */
4771 
4772   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4773 
4774   afl->stage_finds[STAGE_EXTRAS_UO] += new_hit_cnt - orig_hit_cnt;
4775   afl->stage_cycles[STAGE_EXTRAS_UO] += afl->stage_max;
4776 #ifdef INTROSPECTION
4777   afl->queue_cur->stats_mutated += afl->stage_max;
4778 #endif
4779 
4780   /* Insertion of user-supplied extras. */
4781 
4782   afl->stage_name = "user extras (insert)";
4783   afl->stage_short = "ext_UI";
4784   afl->stage_cur = 0;
4785   afl->stage_max = afl->extras_cnt * (len + 1);
4786 
4787   orig_hit_cnt = new_hit_cnt;
4788 
4789   ex_tmp = afl_realloc(AFL_BUF_PARAM(ex), len + MAX_DICT_FILE);
4790   if (unlikely(!ex_tmp)) { PFATAL("alloc"); }
4791 
4792   for (i = 0; i <= (u32)len; ++i) {
4793 
4794     afl->stage_cur_byte = i;
4795 
4796     for (j = 0; j < afl->extras_cnt; ++j) {
4797 
4798       if (len + afl->extras[j].len > MAX_FILE) {
4799 
4800         --afl->stage_max;
4801         continue;
4802 
4803       }
4804 
4805       /* Insert token */
4806       memcpy(ex_tmp + i, afl->extras[j].data, afl->extras[j].len);
4807 
4808       /* Copy tail */
4809       memcpy(ex_tmp + i + afl->extras[j].len, out_buf + i, len - i);
4810 
4811 #ifdef INTROSPECTION
4812       snprintf(afl->mutation, sizeof(afl->mutation),
4813                "%s MOPT_EXTRAS_insert-%u-%u", afl->queue_cur->fname, i, j);
4814 #endif
4815 
4816       if (common_fuzz_stuff(afl, ex_tmp, len + afl->extras[j].len)) {
4817 
4818         goto abandon_entry;
4819 
4820       }
4821 
4822       ++afl->stage_cur;
4823 
4824     }
4825 
4826     /* Copy head */
4827     ex_tmp[i] = out_buf[i];
4828 
4829   }                                                  /* for i = 0; i <= len */
4830 
4831   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4832 
4833   afl->stage_finds[STAGE_EXTRAS_UI] += new_hit_cnt - orig_hit_cnt;
4834   afl->stage_cycles[STAGE_EXTRAS_UI] += afl->stage_max;
4835 #ifdef INTROSPECTION
4836   afl->queue_cur->stats_mutated += afl->stage_max;
4837 #endif
4838 
4839 skip_user_extras:
4840 
4841   if (!afl->a_extras_cnt) { goto skip_extras; }
4842 
4843   afl->stage_name = "auto extras (over)";
4844   afl->stage_short = "ext_AO";
4845   afl->stage_cur = 0;
4846   afl->stage_max = MIN(afl->a_extras_cnt, (u32)USE_AUTO_EXTRAS) * len;
4847 
4848   afl->stage_val_type = STAGE_VAL_NONE;
4849 
4850   orig_hit_cnt = new_hit_cnt;
4851 
4852   for (i = 0; i < (u32)len; ++i) {
4853 
4854     u32 last_len = 0;
4855 
4856     afl->stage_cur_byte = i;
4857 
4858     u32 min_extra_len = MIN(afl->a_extras_cnt, (u32)USE_AUTO_EXTRAS);
4859     for (j = 0; j < min_extra_len; ++j) {
4860 
4861       /* See the comment in the earlier code; extras are sorted by size. */
4862 
4863       if ((afl->a_extras[j].len) > (len - i) ||
4864           !memcmp(afl->a_extras[j].data, out_buf + i, afl->a_extras[j].len) ||
4865           !memchr(eff_map + EFF_APOS(i), 1,
4866                   EFF_SPAN_ALEN(i, afl->a_extras[j].len))) {
4867 
4868         --afl->stage_max;
4869         continue;
4870 
4871       }
4872 
4873       last_len = afl->a_extras[j].len;
4874       memcpy(out_buf + i, afl->a_extras[j].data, last_len);
4875 
4876 #ifdef INTROSPECTION
4877       snprintf(afl->mutation, sizeof(afl->mutation),
4878                "%s MOPT_AUTO_EXTRAS_overwrite-%u-%u", afl->queue_cur->fname, i,
4879                j);
4880 #endif
4881 
4882       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4883 
4884       ++afl->stage_cur;
4885 
4886     }
4887 
4888     /* Restore all the clobbered memory. */
4889     memcpy(out_buf + i, in_buf + i, last_len);
4890 
4891   }                                                   /* for i = 0; i < len */
4892 
4893   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4894 
4895   afl->stage_finds[STAGE_EXTRAS_AO] += new_hit_cnt - orig_hit_cnt;
4896   afl->stage_cycles[STAGE_EXTRAS_AO] += afl->stage_max;
4897 #ifdef INTROSPECTION
4898   afl->queue_cur->stats_mutated += afl->stage_max;
4899 #endif
4900 
4901   /* Insertion of auto extras. */
4902 
4903   afl->stage_name = "auto extras (insert)";
4904   afl->stage_short = "ext_AI";
4905   afl->stage_cur = 0;
4906   afl->stage_max = afl->a_extras_cnt * (len + 1);
4907 
4908   orig_hit_cnt = new_hit_cnt;
4909 
4910   ex_tmp = afl_realloc(AFL_BUF_PARAM(ex), len + MAX_DICT_FILE);
4911   if (unlikely(!ex_tmp)) { PFATAL("alloc"); }
4912 
4913   for (i = 0; i <= (u32)len; ++i) {
4914 
4915     afl->stage_cur_byte = i;
4916 
4917     for (j = 0; j < afl->a_extras_cnt; ++j) {
4918 
4919       if (len + afl->a_extras[j].len > MAX_FILE) {
4920 
4921         --afl->stage_max;
4922         continue;
4923 
4924       }
4925 
4926       /* Insert token */
4927       memcpy(ex_tmp + i, afl->a_extras[j].data, afl->a_extras[j].len);
4928 
4929       /* Copy tail */
4930       memcpy(ex_tmp + i + afl->a_extras[j].len, out_buf + i, len - i);
4931 
4932 #ifdef INTROSPECTION
4933       snprintf(afl->mutation, sizeof(afl->mutation),
4934                "%s MOPT_AUTO_EXTRAS_insert-%u-%u", afl->queue_cur->fname, i, j);
4935 #endif
4936 
4937       if (common_fuzz_stuff(afl, ex_tmp, len + afl->a_extras[j].len)) {
4938 
4939         goto abandon_entry;
4940 
4941       }
4942 
4943       ++afl->stage_cur;
4944 
4945     }
4946 
4947     /* Copy head */
4948     ex_tmp[i] = out_buf[i];
4949 
4950   }                                                  /* for i = 0; i <= len */
4951 
4952   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4953 
4954   afl->stage_finds[STAGE_EXTRAS_AI] += new_hit_cnt - orig_hit_cnt;
4955   afl->stage_cycles[STAGE_EXTRAS_AI] += afl->stage_max;
4956 #ifdef INTROSPECTION
4957   afl->queue_cur->stats_mutated += afl->stage_max;
4958 #endif
4959 
4960 skip_extras:
4961 
4962   /* If we made this to here without jumping to havoc_stage or abandon_entry,
4963      we're properly done with deterministic steps and can mark it as such
4964      in the .state/ directory. */
4965 
4966   if (!afl->queue_cur->passed_det) { mark_as_det_done(afl, afl->queue_cur); }
4967 
4968   /****************
4969    * RANDOM HAVOC *
4970    ****************/
4971 
4972 havoc_stage:
4973 pacemaker_fuzzing:
4974 
4975   afl->stage_cur_byte = -1;
4976 
4977   /* The havoc stage mutation code is also invoked when splicing files; if the
4978      splice_cycle variable is set, generate different descriptions and such. */
4979 
4980   if (!splice_cycle) {
4981 
4982     afl->stage_name = MOpt_globals.havoc_stagename;
4983     afl->stage_short = MOpt_globals.havoc_stagenameshort;
4984     afl->stage_max = ((doing_det ? HAVOC_CYCLES_INIT : HAVOC_CYCLES) *
4985                       perf_score / afl->havoc_div) >>
4986                      7;
4987 
4988   } else {
4989 
4990     perf_score = orig_perf;
4991 
4992     snprintf(afl->stage_name_buf, STAGE_BUF_SIZE,
4993              MOpt_globals.splice_stageformat, splice_cycle);
4994     afl->stage_name = afl->stage_name_buf;
4995     afl->stage_short = MOpt_globals.splice_stagenameshort;
4996     afl->stage_max = (SPLICE_HAVOC * perf_score / afl->havoc_div) >> 8;
4997 
4998   }
4999 
5000   s32 temp_len_puppet;
5001 
5002   // for (; afl->swarm_now < swarm_num; ++afl->swarm_now)
5003   {
5004 
5005     if (afl->key_puppet == 1) {
5006 
5007       if (unlikely(afl->orig_hit_cnt_puppet == 0)) {
5008 
5009         afl->orig_hit_cnt_puppet = afl->queued_items + afl->saved_crashes;
5010         afl->last_limit_time_start = get_cur_time();
5011         afl->SPLICE_CYCLES_puppet =
5012             (rand_below(
5013                  afl, SPLICE_CYCLES_puppet_up - SPLICE_CYCLES_puppet_low + 1) +
5014              SPLICE_CYCLES_puppet_low);
5015 
5016       }
5017 
5018     }                                            /* if afl->key_puppet == 1 */
5019 
5020     {
5021 
5022 #ifndef IGNORE_FINDS
5023     havoc_stage_puppet:
5024 #endif
5025 
5026       afl->stage_cur_byte = -1;
5027 
5028       /* The havoc stage mutation code is also invoked when splicing files; if
5029          the splice_cycle variable is set, generate different descriptions and
5030          such. */
5031 
5032       if (!splice_cycle) {
5033 
5034         afl->stage_name = MOpt_globals.havoc_stagename;
5035         afl->stage_short = MOpt_globals.havoc_stagenameshort;
5036         afl->stage_max = (doing_det ? HAVOC_CYCLES_INIT : HAVOC_CYCLES) *
5037                          perf_score / afl->havoc_div / 100;
5038 
5039       } else {
5040 
5041         perf_score = orig_perf;
5042         snprintf(afl->stage_name_buf, STAGE_BUF_SIZE,
5043                  MOpt_globals.splice_stageformat, splice_cycle);
5044         afl->stage_name = afl->stage_name_buf;
5045         afl->stage_short = MOpt_globals.splice_stagenameshort;
5046         afl->stage_max = SPLICE_HAVOC * perf_score / afl->havoc_div / 100;
5047 
5048       }
5049 
5050       if (afl->stage_max < HAVOC_MIN) { afl->stage_max = HAVOC_MIN; }
5051 
5052       temp_len = len;
5053 
5054       orig_hit_cnt = afl->queued_items + afl->saved_crashes;
5055 
5056       havoc_queued = afl->queued_items;
5057 
5058       u32 r_max, r;
5059 
5060       r_max = 16 + ((afl->extras_cnt + afl->a_extras_cnt) ? 2 : 0);
5061 
5062       if (unlikely(afl->expand_havoc && afl->ready_for_splicing_count > 1)) {
5063 
5064         /* add expensive havoc cases here, they are activated after a full
5065            cycle without any finds happened */
5066 
5067         ++r_max;
5068 
5069       }
5070 
5071       for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max;
5072            ++afl->stage_cur) {
5073 
5074         u32 use_stacking = 1 << (1 + rand_below(afl, afl->havoc_stack_pow2));
5075 
5076         afl->stage_cur_val = use_stacking;
5077 
5078         for (i = 0; i < operator_num; ++i) {
5079 
5080           MOpt_globals.cycles_v3[i] = MOpt_globals.cycles_v2[i];
5081 
5082         }
5083 
5084 #ifdef INTROSPECTION
5085         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_HAVOC-%u",
5086                  afl->queue_cur->fname, use_stacking);
5087 #endif
5088 
5089         for (i = 0; i < use_stacking; ++i) {
5090 
5091           switch (r = (select_algorithm(afl, r_max))) {
5092 
5093             case 0:
5094               /* Flip a single bit somewhere. Spooky! */
5095               FLIP_BIT(out_buf, rand_below(afl, temp_len << 3));
5096               MOpt_globals.cycles_v2[STAGE_FLIP1]++;
5097 #ifdef INTROSPECTION
5098               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT1");
5099               strcat(afl->mutation, afl->m_tmp);
5100 #endif
5101               break;
5102 
5103             case 1:
5104               if (temp_len < 2) { break; }
5105               temp_len_puppet = rand_below(afl, (temp_len << 3) - 1);
5106               FLIP_BIT(out_buf, temp_len_puppet);
5107               FLIP_BIT(out_buf, temp_len_puppet + 1);
5108               MOpt_globals.cycles_v2[STAGE_FLIP2]++;
5109 #ifdef INTROSPECTION
5110               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT2");
5111               strcat(afl->mutation, afl->m_tmp);
5112 #endif
5113               break;
5114 
5115             case 2:
5116               if (temp_len < 2) { break; }
5117               temp_len_puppet = rand_below(afl, (temp_len << 3) - 3);
5118               FLIP_BIT(out_buf, temp_len_puppet);
5119               FLIP_BIT(out_buf, temp_len_puppet + 1);
5120               FLIP_BIT(out_buf, temp_len_puppet + 2);
5121               FLIP_BIT(out_buf, temp_len_puppet + 3);
5122               MOpt_globals.cycles_v2[STAGE_FLIP4]++;
5123 #ifdef INTROSPECTION
5124               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT4");
5125               strcat(afl->mutation, afl->m_tmp);
5126 #endif
5127               break;
5128 
5129             case 3:
5130               if (temp_len < 4) { break; }
5131               out_buf[rand_below(afl, temp_len)] ^= 0xFF;
5132               MOpt_globals.cycles_v2[STAGE_FLIP8]++;
5133 #ifdef INTROSPECTION
5134               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT8");
5135               strcat(afl->mutation, afl->m_tmp);
5136 #endif
5137               break;
5138 
5139             case 4:
5140               if (temp_len < 8) { break; }
5141               *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) ^= 0xFFFF;
5142               MOpt_globals.cycles_v2[STAGE_FLIP16]++;
5143 #ifdef INTROSPECTION
5144               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT16");
5145               strcat(afl->mutation, afl->m_tmp);
5146 #endif
5147               break;
5148 
5149             case 5:
5150               if (temp_len < 8) { break; }
5151               *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) ^= 0xFFFFFFFF;
5152               MOpt_globals.cycles_v2[STAGE_FLIP32]++;
5153 #ifdef INTROSPECTION
5154               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT32");
5155               strcat(afl->mutation, afl->m_tmp);
5156 #endif
5157               break;
5158 
5159             case 6:
5160               out_buf[rand_below(afl, temp_len)] -=
5161                   1 + rand_below(afl, ARITH_MAX);
5162               out_buf[rand_below(afl, temp_len)] +=
5163                   1 + rand_below(afl, ARITH_MAX);
5164               MOpt_globals.cycles_v2[STAGE_ARITH8]++;
5165 #ifdef INTROSPECTION
5166               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH8");
5167               strcat(afl->mutation, afl->m_tmp);
5168 #endif
5169               break;
5170 
5171             case 7:
5172               /* Randomly subtract from word, random endian. */
5173               if (temp_len < 8) { break; }
5174               if (rand_below(afl, 2)) {
5175 
5176                 u32 pos = rand_below(afl, temp_len - 1);
5177                 *(u16 *)(out_buf + pos) -= 1 + rand_below(afl, ARITH_MAX);
5178 #ifdef INTROSPECTION
5179                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16-%u", pos);
5180                 strcat(afl->mutation, afl->m_tmp);
5181 #endif
5182 
5183               } else {
5184 
5185                 u32 pos = rand_below(afl, temp_len - 1);
5186                 u16 num = 1 + rand_below(afl, ARITH_MAX);
5187 #ifdef INTROSPECTION
5188                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16BE-%u-%u",
5189                          pos, num);
5190                 strcat(afl->mutation, afl->m_tmp);
5191 #endif
5192                 *(u16 *)(out_buf + pos) =
5193                     SWAP16(SWAP16(*(u16 *)(out_buf + pos)) - num);
5194 
5195               }
5196 
5197               /* Randomly add to word, random endian. */
5198               if (rand_below(afl, 2)) {
5199 
5200                 u32 pos = rand_below(afl, temp_len - 1);
5201 #ifdef INTROSPECTION
5202                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16+-%u", pos);
5203                 strcat(afl->mutation, afl->m_tmp);
5204 #endif
5205                 *(u16 *)(out_buf + pos) += 1 + rand_below(afl, ARITH_MAX);
5206 
5207               } else {
5208 
5209                 u32 pos = rand_below(afl, temp_len - 1);
5210                 u16 num = 1 + rand_below(afl, ARITH_MAX);
5211 #ifdef INTROSPECTION
5212                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16BE+-%u-%u",
5213                          pos, num);
5214                 strcat(afl->mutation, afl->m_tmp);
5215 #endif
5216                 *(u16 *)(out_buf + pos) =
5217                     SWAP16(SWAP16(*(u16 *)(out_buf + pos)) + num);
5218 
5219               }
5220 
5221               MOpt_globals.cycles_v2[STAGE_ARITH16]++;
5222               break;
5223 
5224             case 8:
5225               /* Randomly subtract from dword, random endian. */
5226               if (temp_len < 8) { break; }
5227               if (rand_below(afl, 2)) {
5228 
5229                 u32 pos = rand_below(afl, temp_len - 3);
5230 #ifdef INTROSPECTION
5231                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32_-%u", pos);
5232                 strcat(afl->mutation, afl->m_tmp);
5233 #endif
5234                 *(u32 *)(out_buf + pos) -= 1 + rand_below(afl, ARITH_MAX);
5235 
5236               } else {
5237 
5238                 u32 pos = rand_below(afl, temp_len - 3);
5239                 u32 num = 1 + rand_below(afl, ARITH_MAX);
5240 #ifdef INTROSPECTION
5241                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32BE_-%u-%u",
5242                          pos, num);
5243                 strcat(afl->mutation, afl->m_tmp);
5244 #endif
5245                 *(u32 *)(out_buf + pos) =
5246                     SWAP32(SWAP32(*(u32 *)(out_buf + pos)) - num);
5247 
5248               }
5249 
5250               /* Randomly add to dword, random endian. */
5251               // if (temp_len < 4) break;
5252               if (rand_below(afl, 2)) {
5253 
5254                 u32 pos = rand_below(afl, temp_len - 3);
5255 #ifdef INTROSPECTION
5256                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32+-%u", pos);
5257                 strcat(afl->mutation, afl->m_tmp);
5258 #endif
5259                 *(u32 *)(out_buf + pos) += 1 + rand_below(afl, ARITH_MAX);
5260 
5261               } else {
5262 
5263                 u32 pos = rand_below(afl, temp_len - 3);
5264                 u32 num = 1 + rand_below(afl, ARITH_MAX);
5265 #ifdef INTROSPECTION
5266                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32BE+-%u-%u",
5267                          pos, num);
5268                 strcat(afl->mutation, afl->m_tmp);
5269 #endif
5270                 *(u32 *)(out_buf + pos) =
5271                     SWAP32(SWAP32(*(u32 *)(out_buf + pos)) + num);
5272 
5273               }
5274 
5275               MOpt_globals.cycles_v2[STAGE_ARITH32]++;
5276               break;
5277 
5278             case 9:
5279               /* Set byte to interesting value. */
5280               if (temp_len < 4) { break; }
5281               out_buf[rand_below(afl, temp_len)] =
5282                   interesting_8[rand_below(afl, sizeof(interesting_8))];
5283               MOpt_globals.cycles_v2[STAGE_INTEREST8]++;
5284 #ifdef INTROSPECTION
5285               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING8");
5286               strcat(afl->mutation, afl->m_tmp);
5287 #endif
5288               break;
5289 
5290             case 10:
5291               /* Set word to interesting value, randomly choosing endian. */
5292               if (temp_len < 8) { break; }
5293               if (rand_below(afl, 2)) {
5294 
5295 #ifdef INTROSPECTION
5296                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING16");
5297                 strcat(afl->mutation, afl->m_tmp);
5298 #endif
5299                 *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) =
5300                     interesting_16[rand_below(afl,
5301                                               sizeof(interesting_16) >> 1)];
5302 
5303               } else {
5304 
5305 #ifdef INTROSPECTION
5306                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING16BE");
5307                 strcat(afl->mutation, afl->m_tmp);
5308 #endif
5309                 *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) =
5310                     SWAP16(interesting_16[rand_below(
5311                         afl, sizeof(interesting_16) >> 1)]);
5312 
5313               }
5314 
5315               MOpt_globals.cycles_v2[STAGE_INTEREST16]++;
5316               break;
5317 
5318             case 11:
5319               /* Set dword to interesting value, randomly choosing endian. */
5320 
5321               if (temp_len < 8) { break; }
5322 
5323               if (rand_below(afl, 2)) {
5324 
5325 #ifdef INTROSPECTION
5326                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING32");
5327                 strcat(afl->mutation, afl->m_tmp);
5328 #endif
5329                 *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) =
5330                     interesting_32[rand_below(afl,
5331                                               sizeof(interesting_32) >> 2)];
5332 
5333               } else {
5334 
5335 #ifdef INTROSPECTION
5336                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING32BE");
5337                 strcat(afl->mutation, afl->m_tmp);
5338 #endif
5339                 *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) =
5340                     SWAP32(interesting_32[rand_below(
5341                         afl, sizeof(interesting_32) >> 2)]);
5342 
5343               }
5344 
5345               MOpt_globals.cycles_v2[STAGE_INTEREST32]++;
5346               break;
5347 
5348             case 12:
5349 
5350               /* Just set a random byte to a random value. Because,
5351                  why not. We use XOR with 1-255 to eliminate the
5352                  possibility of a no-op. */
5353 
5354               out_buf[rand_below(afl, temp_len)] ^= 1 + rand_below(afl, 255);
5355               MOpt_globals.cycles_v2[STAGE_RANDOMBYTE]++;
5356 #ifdef INTROSPECTION
5357               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " RAND8");
5358               strcat(afl->mutation, afl->m_tmp);
5359 #endif
5360               break;
5361 
5362             case 13: {
5363 
5364               /* Delete bytes. We're making this a bit more likely
5365                  than insertion (the next option) in hopes of keeping
5366                  files reasonably small. */
5367 
5368               u32 del_from, del_len;
5369 
5370               if (temp_len < 2) { break; }
5371 
5372               /* Don't delete too much. */
5373 
5374               del_len = choose_block_len(afl, temp_len - 1);
5375 
5376               del_from = rand_below(afl, temp_len - del_len + 1);
5377 
5378 #ifdef INTROSPECTION
5379               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " DEL-%u%u", del_from,
5380                        del_len);
5381               strcat(afl->mutation, afl->m_tmp);
5382 #endif
5383               memmove(out_buf + del_from, out_buf + del_from + del_len,
5384                       temp_len - del_from - del_len);
5385 
5386               temp_len -= del_len;
5387               MOpt_globals.cycles_v2[STAGE_DELETEBYTE]++;
5388               break;
5389 
5390             }
5391 
5392             case 14:
5393 
5394               if (temp_len + HAVOC_BLK_XL < MAX_FILE) {
5395 
5396                 /* Clone bytes (75%) or insert a block of constant bytes (25%).
5397                  */
5398 
5399                 u8  actually_clone = rand_below(afl, 4);
5400                 u32 clone_from, clone_to, clone_len;
5401                 u8 *new_buf;
5402 
5403                 if (likely(actually_clone)) {
5404 
5405                   clone_len = choose_block_len(afl, temp_len);
5406                   clone_from = rand_below(afl, temp_len - clone_len + 1);
5407 
5408                 } else {
5409 
5410                   clone_len = choose_block_len(afl, HAVOC_BLK_XL);
5411                   clone_from = 0;
5412 
5413                 }
5414 
5415                 clone_to = rand_below(afl, temp_len);
5416 
5417 #ifdef INTROSPECTION
5418                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " CLONE_%s-%u-%u-%u",
5419                          actually_clone ? "clone" : "insert", clone_from,
5420                          clone_to, clone_len);
5421                 strcat(afl->mutation, afl->m_tmp);
5422 #endif
5423                 new_buf = afl_realloc(AFL_BUF_PARAM(out_scratch),
5424                                       temp_len + clone_len);
5425                 if (unlikely(!new_buf)) { PFATAL("alloc"); }
5426 
5427                 /* Head */
5428 
5429                 memcpy(new_buf, out_buf, clone_to);
5430 
5431                 /* Inserted part */
5432 
5433                 if (actually_clone) {
5434 
5435                   memcpy(new_buf + clone_to, out_buf + clone_from, clone_len);
5436 
5437                 } else {
5438 
5439                   memset(new_buf + clone_to,
5440                          rand_below(afl, 2)
5441                              ? rand_below(afl, 256)
5442                              : out_buf[rand_below(afl, temp_len)],
5443                          clone_len);
5444 
5445                 }
5446 
5447                 /* Tail */
5448                 memcpy(new_buf + clone_to + clone_len, out_buf + clone_to,
5449                        temp_len - clone_to);
5450 
5451                 out_buf = new_buf;
5452                 afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
5453                 temp_len += clone_len;
5454                 MOpt_globals.cycles_v2[STAGE_Clone75]++;
5455 
5456               }
5457 
5458               break;
5459 
5460             case 15: {
5461 
5462               /* Overwrite bytes with a randomly selected chunk (75%) or fixed
5463                  bytes (25%). */
5464 
5465               u32 copy_from, copy_to, copy_len;
5466 
5467               if (temp_len < 2) { break; }
5468 
5469               copy_len = choose_block_len(afl, temp_len - 1);
5470 
5471               copy_from = rand_below(afl, temp_len - copy_len + 1);
5472               copy_to = rand_below(afl, temp_len - copy_len + 1);
5473 
5474               if (likely(rand_below(afl, 4))) {
5475 
5476                 if (likely(copy_from != copy_to)) {
5477 
5478 #ifdef INTROSPECTION
5479                   snprintf(afl->m_tmp, sizeof(afl->m_tmp),
5480                            " OVERWRITE_COPY-%u-%u-%u", copy_from, copy_to,
5481                            copy_len);
5482                   strcat(afl->mutation, afl->m_tmp);
5483 #endif
5484                   memmove(out_buf + copy_to, out_buf + copy_from, copy_len);
5485 
5486                 }
5487 
5488               } else {
5489 
5490 #ifdef INTROSPECTION
5491                 snprintf(afl->m_tmp, sizeof(afl->m_tmp),
5492                          " OVERWRITE_FIXED-%u-%u-%u", copy_from, copy_to,
5493                          copy_len);
5494                 strcat(afl->mutation, afl->m_tmp);
5495 #endif
5496                 memset(out_buf + copy_to,
5497                        rand_below(afl, 2) ? rand_below(afl, 256)
5498                                           : out_buf[rand_below(afl, temp_len)],
5499                        copy_len);
5500 
5501               }
5502 
5503               MOpt_globals.cycles_v2[STAGE_OverWrite75]++;
5504               break;
5505 
5506             }                                                    /* case 15 */
5507 
5508             default: {
5509 
5510               /* Values 16 and 17 can be selected only if there are any extras
5511                  present in the dictionaries. */
5512 
5513               r -= 16;
5514 
5515               if (r == 0 && (afl->extras_cnt || afl->a_extras_cnt)) {
5516 
5517                 /* Overwrite bytes with an extra. */
5518 
5519                 if (!afl->extras_cnt ||
5520                     (afl->a_extras_cnt && rand_below(afl, 2))) {
5521 
5522                   /* No user-specified extras or odds in our favor. Let's use an
5523                     auto-detected one. */
5524 
5525                   u32 use_extra = rand_below(afl, afl->a_extras_cnt);
5526                   u32 extra_len = afl->a_extras[use_extra].len;
5527 
5528                   if (extra_len > (u32)temp_len) break;
5529 
5530                   u32 insert_at = rand_below(afl, temp_len - extra_len + 1);
5531 #ifdef INTROSPECTION
5532                   snprintf(afl->m_tmp, sizeof(afl->m_tmp),
5533                            " AUTO_EXTRA_OVERWRITE-%u-%u", insert_at, extra_len);
5534                   strcat(afl->mutation, afl->m_tmp);
5535 #endif
5536                   memcpy(out_buf + insert_at, afl->a_extras[use_extra].data,
5537                          extra_len);
5538 
5539                 } else {
5540 
5541                   /* No auto extras or odds in our favor. Use the dictionary. */
5542 
5543                   u32 use_extra = rand_below(afl, afl->extras_cnt);
5544                   u32 extra_len = afl->extras[use_extra].len;
5545 
5546                   if (extra_len > (u32)temp_len) break;
5547 
5548                   u32 insert_at = rand_below(afl, temp_len - extra_len + 1);
5549 #ifdef INTROSPECTION
5550                   snprintf(afl->m_tmp, sizeof(afl->m_tmp),
5551                            " EXTRA_OVERWRITE-%u-%u", insert_at, extra_len);
5552                   strcat(afl->mutation, afl->m_tmp);
5553 #endif
5554                   memcpy(out_buf + insert_at, afl->extras[use_extra].data,
5555                          extra_len);
5556 
5557                 }
5558 
5559                 MOpt_globals.cycles_v2[STAGE_OverWriteExtra]++;
5560 
5561                 break;
5562 
5563               }
5564 
5565               /* Insert an extra. */
5566 
5567               else if (r == 1 && (afl->extras_cnt || afl->a_extras_cnt)) {
5568 
5569                 u32 use_extra, extra_len,
5570                     insert_at = rand_below(afl, temp_len + 1);
5571                 u8 *ptr;
5572 
5573                 /* Insert an extra. Do the same dice-rolling stuff as for the
5574                   previous case. */
5575 
5576                 if (!afl->extras_cnt ||
5577                     (afl->a_extras_cnt && rand_below(afl, 2))) {
5578 
5579                   use_extra = rand_below(afl, afl->a_extras_cnt);
5580                   extra_len = afl->a_extras[use_extra].len;
5581                   ptr = afl->a_extras[use_extra].data;
5582 #ifdef INTROSPECTION
5583                   snprintf(afl->m_tmp, sizeof(afl->m_tmp),
5584                            " AUTO_EXTRA_INSERT-%u-%u", insert_at, extra_len);
5585                   strcat(afl->mutation, afl->m_tmp);
5586 #endif
5587 
5588                 } else {
5589 
5590                   use_extra = rand_below(afl, afl->extras_cnt);
5591                   extra_len = afl->extras[use_extra].len;
5592                   ptr = afl->extras[use_extra].data;
5593 #ifdef INTROSPECTION
5594                   snprintf(afl->m_tmp, sizeof(afl->m_tmp),
5595                            " EXTRA_INSERT-%u-%u", insert_at, extra_len);
5596                   strcat(afl->mutation, afl->m_tmp);
5597 #endif
5598 
5599                 }
5600 
5601                 if (temp_len + extra_len >= MAX_FILE) break;
5602 
5603                 out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len + extra_len);
5604                 if (unlikely(!out_buf)) { PFATAL("alloc"); }
5605 
5606                 /* Tail */
5607                 memmove(out_buf + insert_at + extra_len, out_buf + insert_at,
5608                         temp_len - insert_at);
5609 
5610                 /* Inserted part */
5611                 memcpy(out_buf + insert_at, ptr, extra_len);
5612 
5613                 temp_len += extra_len;
5614                 MOpt_globals.cycles_v2[STAGE_InsertExtra]++;
5615                 break;
5616 
5617               } else {
5618 
5619                 if (unlikely(afl->ready_for_splicing_count < 2)) break;
5620 
5621                 u32 tid;
5622                 do {
5623 
5624                   tid = rand_below(afl, afl->queued_items);
5625 
5626                 } while (tid == afl->current_entry ||
5627 
5628                          afl->queue_buf[tid]->len < 4);
5629 
5630                 /* Get the testcase for splicing. */
5631                 struct queue_entry *target = afl->queue_buf[tid];
5632                 u32                 new_len = target->len;
5633                 u8                 *new_buf = queue_testcase_get(afl, target);
5634 
5635                 if ((temp_len >= 2 && rand_below(afl, 2)) ||
5636                     temp_len + HAVOC_BLK_XL >= MAX_FILE) {
5637 
5638                   /* overwrite mode */
5639 
5640                   u32 copy_from, copy_to, copy_len;
5641 
5642                   copy_len = choose_block_len(afl, new_len - 1);
5643                   if (copy_len > temp_len) copy_len = temp_len;
5644 
5645                   copy_from = rand_below(afl, new_len - copy_len + 1);
5646                   copy_to = rand_below(afl, temp_len - copy_len + 1);
5647 
5648 #ifdef INTROSPECTION
5649                   snprintf(afl->m_tmp, sizeof(afl->m_tmp),
5650                            " SPLICE_OVERWRITE-%u-%u-%u-%s", copy_from, copy_to,
5651                            copy_len, target->fname);
5652                   strcat(afl->mutation, afl->m_tmp);
5653 #endif
5654                   memmove(out_buf + copy_to, new_buf + copy_from, copy_len);
5655 
5656                 } else {
5657 
5658                   /* insert mode */
5659 
5660                   u32 clone_from, clone_to, clone_len;
5661 
5662                   clone_len = choose_block_len(afl, new_len);
5663                   clone_from = rand_below(afl, new_len - clone_len + 1);
5664                   clone_to = rand_below(afl, temp_len + 1);
5665 
5666                   u8 *temp_buf = afl_realloc(AFL_BUF_PARAM(out_scratch),
5667                                              temp_len + clone_len + 1);
5668                   if (unlikely(!temp_buf)) { PFATAL("alloc"); }
5669 
5670 #ifdef INTROSPECTION
5671                   snprintf(afl->m_tmp, sizeof(afl->m_tmp),
5672                            " SPLICE_INSERT-%u-%u-%u-%s", clone_from, clone_to,
5673                            clone_len, target->fname);
5674                   strcat(afl->mutation, afl->m_tmp);
5675 #endif
5676                   /* Head */
5677 
5678                   memcpy(temp_buf, out_buf, clone_to);
5679 
5680                   /* Inserted part */
5681 
5682                   memcpy(temp_buf + clone_to, new_buf + clone_from, clone_len);
5683 
5684                   /* Tail */
5685                   memcpy(temp_buf + clone_to + clone_len, out_buf + clone_to,
5686                          temp_len - clone_to);
5687 
5688                   out_buf = temp_buf;
5689                   afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
5690                   temp_len += clone_len;
5691 
5692                 }
5693 
5694                 MOpt_globals.cycles_v2[STAGE_Splice]++;
5695                 break;
5696 
5697               }
5698 
5699             }  // end of default:
5700 
5701           }                                    /* switch select_algorithm() */
5702 
5703         }                                      /* for i=0; i < use_stacking */
5704 
5705         ++*MOpt_globals.pTime;
5706 
5707         u64 temp_total_found = afl->queued_items + afl->saved_crashes;
5708 
5709         if (common_fuzz_stuff(afl, out_buf, temp_len)) {
5710 
5711           goto abandon_entry_puppet;
5712 
5713         }
5714 
5715         /* out_buf might have been mangled a bit, so let's restore it to its
5716            original size and shape. */
5717 
5718         out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
5719         if (unlikely(!out_buf)) { PFATAL("alloc"); }
5720         temp_len = len;
5721         memcpy(out_buf, in_buf, len);
5722 
5723         /* If we're finding new stuff, let's run for a bit longer, limits
5724            permitting. */
5725 
5726         if (afl->queued_items != havoc_queued) {
5727 
5728           if (perf_score <= afl->havoc_max_mult * 100) {
5729 
5730             afl->stage_max *= 2;
5731             perf_score *= 2;
5732 
5733           }
5734 
5735           havoc_queued = afl->queued_items;
5736 
5737         }
5738 
5739         if (unlikely(afl->queued_items + afl->saved_crashes >
5740                      temp_total_found)) {
5741 
5742           u64 temp_temp_puppet =
5743               afl->queued_items + afl->saved_crashes - temp_total_found;
5744           afl->total_puppet_find = afl->total_puppet_find + temp_temp_puppet;
5745 
5746           if (MOpt_globals.is_pilot_mode) {
5747 
5748             for (i = 0; i < operator_num; ++i) {
5749 
5750               if (MOpt_globals.cycles_v2[i] > MOpt_globals.cycles_v3[i]) {
5751 
5752                 MOpt_globals.finds_v2[i] += temp_temp_puppet;
5753 
5754               }
5755 
5756             }
5757 
5758           } else {
5759 
5760             for (i = 0; i < operator_num; i++) {
5761 
5762               if (afl->core_operator_cycles_puppet_v2[i] >
5763                   afl->core_operator_cycles_puppet_v3[i])
5764 
5765                 afl->core_operator_finds_puppet_v2[i] += temp_temp_puppet;
5766 
5767             }
5768 
5769           }
5770 
5771         }                                                             /* if */
5772 
5773       } /* for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max;
5774 
5775            ++afl->stage_cur) { */
5776 
5777       new_hit_cnt = afl->queued_items + afl->saved_crashes;
5778 
5779       if (MOpt_globals.is_pilot_mode) {
5780 
5781         if (!splice_cycle) {
5782 
5783           afl->stage_finds[STAGE_HAVOC] += new_hit_cnt - orig_hit_cnt;
5784           afl->stage_cycles[STAGE_HAVOC] += afl->stage_max;
5785 #ifdef INTROSPECTION
5786           afl->queue_cur->stats_mutated += afl->stage_max;
5787 #endif
5788 
5789         } else {
5790 
5791           afl->stage_finds[STAGE_SPLICE] += new_hit_cnt - orig_hit_cnt;
5792           afl->stage_cycles[STAGE_SPLICE] += afl->stage_max;
5793 #ifdef INTROSPECTION
5794           afl->queue_cur->stats_mutated += afl->stage_max;
5795 #endif
5796 
5797         }
5798 
5799       }
5800 
5801 #ifndef IGNORE_FINDS
5802 
5803       /************
5804        * SPLICING *
5805        ************/
5806 
5807     retry_splicing_puppet:
5808 
5809       if (afl->use_splicing &&
5810           splice_cycle++ < (u32)afl->SPLICE_CYCLES_puppet &&
5811           afl->ready_for_splicing_count > 1 && afl->queue_cur->len >= 4) {
5812 
5813         struct queue_entry *target;
5814         u32                 tid, split_at;
5815         u8                 *new_buf;
5816         s32                 f_diff, l_diff;
5817 
5818         /* First of all, if we've modified in_buf for havoc, let's clean that
5819            up... */
5820 
5821         if (in_buf != orig_in) {
5822 
5823           in_buf = orig_in;
5824           len = afl->queue_cur->len;
5825 
5826         }
5827 
5828         /* Pick a random queue entry and seek to it. Don't splice with yourself.
5829          */
5830 
5831         do {
5832 
5833           tid = rand_below(afl, afl->queued_items);
5834 
5835         } while (tid == afl->current_entry || afl->queue_buf[tid]->len < 4);
5836 
5837         afl->splicing_with = tid;
5838         target = afl->queue_buf[tid];
5839 
5840         /* Read the testcase into a new buffer. */
5841         new_buf = queue_testcase_get(afl, target);
5842 
5843         /* Find a suitable splicin g location, somewhere between the first and
5844            the last differing byte. Bail out if the difference is just a single
5845            byte or so. */
5846 
5847         locate_diffs(in_buf, new_buf, MIN(len, target->len), &f_diff, &l_diff);
5848 
5849         if (f_diff < 0 || l_diff < 2 || f_diff == l_diff) {
5850 
5851           goto retry_splicing_puppet;
5852 
5853         }
5854 
5855         /* Split somewhere between the first and last differing byte. */
5856 
5857         split_at = f_diff + rand_below(afl, l_diff - f_diff);
5858 
5859         /* Do the thing. */
5860 
5861         len = target->len;
5862         afl->in_scratch_buf = afl_realloc(AFL_BUF_PARAM(in_scratch), len);
5863         memcpy(afl->in_scratch_buf, in_buf, split_at);
5864         memcpy(afl->in_scratch_buf + split_at, new_buf, len - split_at);
5865         in_buf = afl->in_scratch_buf;
5866         afl_swap_bufs(AFL_BUF_PARAM(in), AFL_BUF_PARAM(in_scratch));
5867 
5868         out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
5869         if (unlikely(!out_buf)) { PFATAL("alloc"); }
5870         memcpy(out_buf, in_buf, len);
5871 
5872         goto havoc_stage_puppet;
5873 
5874       }                                                  /* if splice_cycle */
5875 
5876 #endif                                                     /* !IGNORE_FINDS */
5877 
5878       ret_val = 0;
5879 
5880     abandon_entry:
5881     abandon_entry_puppet:
5882 
5883       if ((s64)splice_cycle >= afl->SPLICE_CYCLES_puppet) {
5884 
5885         afl->SPLICE_CYCLES_puppet =
5886             (rand_below(
5887                  afl, SPLICE_CYCLES_puppet_up - SPLICE_CYCLES_puppet_low + 1) +
5888              SPLICE_CYCLES_puppet_low);
5889 
5890       }
5891 
5892       afl->splicing_with = -1;
5893 
5894       /* Update afl->pending_not_fuzzed count if we made it through the
5895          calibration cycle and have not seen this entry before. */
5896       /*
5897         // TODO FIXME: I think we need this plus need an -L -1 check
5898         if (!afl->stop_soon && !afl->queue_cur->cal_failed &&
5899             (afl->queue_cur->was_fuzzed == 0 || afl->queue_cur->fuzz_level == 0)
5900         && !afl->queue_cur->disabled) {
5901 
5902           if (!afl->queue_cur->was_fuzzed) {
5903 
5904             --afl->pending_not_fuzzed;
5905             afl->queue_cur->was_fuzzed = 1;
5906             afl->reinit_table = 1
5907             if (afl->queue_cur->favored) {
5908 
5909               --afl->pending_favored;
5910               afl->smallest_favored = -1;
5911 
5912             }
5913 
5914           }
5915 
5916         }
5917 
5918       */
5919 
5920       orig_in = NULL;
5921 
5922       if (afl->key_puppet == 1) {
5923 
5924         if (unlikely(
5925                 afl->queued_items + afl->saved_crashes >
5926                 ((afl->queued_items + afl->saved_crashes) * limit_time_bound +
5927                  afl->orig_hit_cnt_puppet))) {
5928 
5929           afl->key_puppet = 0;
5930           afl->orig_hit_cnt_puppet = 0;
5931           afl->last_limit_time_start = 0;
5932 
5933         }
5934 
5935       }
5936 
5937       if (unlikely(*MOpt_globals.pTime > MOpt_globals.period)) {
5938 
5939         afl->total_pacemaker_time += *MOpt_globals.pTime;
5940         *MOpt_globals.pTime = 0;
5941         new_hit_cnt = afl->queued_items + afl->saved_crashes;
5942 
5943         if (MOpt_globals.is_pilot_mode) {
5944 
5945           afl->swarm_fitness[afl->swarm_now] =
5946               (double)(afl->total_puppet_find - afl->temp_puppet_find) /
5947               ((double)(afl->tmp_pilot_time) / afl->period_pilot_tmp);
5948 
5949         }
5950 
5951         afl->temp_puppet_find = afl->total_puppet_find;
5952         for (i = 0; i < operator_num; ++i) {
5953 
5954           if (MOpt_globals.is_pilot_mode) {
5955 
5956             double temp_eff = 0.0;
5957 
5958             if (MOpt_globals.cycles_v2[i] > MOpt_globals.cycles[i]) {
5959 
5960               temp_eff =
5961                   (double)(MOpt_globals.finds_v2[i] - MOpt_globals.finds[i]) /
5962                   (double)(MOpt_globals.cycles_v2[i] - MOpt_globals.cycles[i]);
5963 
5964             }
5965 
5966             if (afl->eff_best[afl->swarm_now][i] < temp_eff) {
5967 
5968               afl->eff_best[afl->swarm_now][i] = temp_eff;
5969               afl->L_best[afl->swarm_now][i] = afl->x_now[afl->swarm_now][i];
5970 
5971             }
5972 
5973           }
5974 
5975           MOpt_globals.finds[i] = MOpt_globals.finds_v2[i];
5976           MOpt_globals.cycles[i] = MOpt_globals.cycles_v2[i];
5977 
5978         }                                    /* for i = 0; i < operator_num */
5979 
5980         if (MOpt_globals.is_pilot_mode) {
5981 
5982           afl->swarm_now = afl->swarm_now + 1;
5983           if (afl->swarm_now == swarm_num) {
5984 
5985             afl->key_module = 1;
5986             for (i = 0; i < operator_num; ++i) {
5987 
5988               afl->core_operator_cycles_puppet_v2[i] =
5989                   afl->core_operator_cycles_puppet[i];
5990               afl->core_operator_cycles_puppet_v3[i] =
5991                   afl->core_operator_cycles_puppet[i];
5992               afl->core_operator_finds_puppet_v2[i] =
5993                   afl->core_operator_finds_puppet[i];
5994 
5995             }
5996 
5997             double swarm_eff = 0.0;
5998             afl->swarm_now = 0;
5999             for (i = 0; i < swarm_num; ++i) {
6000 
6001               if (afl->swarm_fitness[i] > swarm_eff) {
6002 
6003                 swarm_eff = afl->swarm_fitness[i];
6004                 afl->swarm_now = i;
6005 
6006               }
6007 
6008             }
6009 
6010             if (afl->swarm_now < 0 || afl->swarm_now > swarm_num - 1) {
6011 
6012               PFATAL("swarm_now error number  %d", afl->swarm_now);
6013 
6014             }
6015 
6016           }                               /* if afl->swarm_now == swarm_num */
6017 
6018           /* adjust pointers dependent on 'afl->swarm_now' */
6019           afl->mopt_globals_pilot.finds =
6020               afl->stage_finds_puppet[afl->swarm_now];
6021           afl->mopt_globals_pilot.finds_v2 =
6022               afl->stage_finds_puppet_v2[afl->swarm_now];
6023           afl->mopt_globals_pilot.cycles =
6024               afl->stage_cycles_puppet[afl->swarm_now];
6025           afl->mopt_globals_pilot.cycles_v2 =
6026               afl->stage_cycles_puppet_v2[afl->swarm_now];
6027           afl->mopt_globals_pilot.cycles_v3 =
6028               afl->stage_cycles_puppet_v3[afl->swarm_now];
6029 
6030         } else {
6031 
6032           for (i = 0; i < operator_num; i++) {
6033 
6034             afl->core_operator_finds_puppet[i] =
6035                 afl->core_operator_finds_puppet_v2[i];
6036             afl->core_operator_cycles_puppet[i] =
6037                 afl->core_operator_cycles_puppet_v2[i];
6038 
6039           }
6040 
6041           afl->key_module = 2;
6042 
6043           afl->old_hit_count = new_hit_cnt;
6044 
6045         }                                                  /* if pilot_mode */
6046 
6047       }         /* if (unlikely(*MOpt_globals.pTime > MOpt_globals.period)) */
6048 
6049     }                                                              /* block */
6050 
6051   }                                                                /* block */
6052 
6053   ++afl->queue_cur->fuzz_level;
6054   return ret_val;
6055 
6056 }
6057 
6058 #undef FLIP_BIT
6059 
core_fuzzing(afl_state_t * afl)6060 u8 core_fuzzing(afl_state_t *afl) {
6061 
6062   return mopt_common_fuzzing(afl, afl->mopt_globals_core);
6063 
6064 }
6065 
pilot_fuzzing(afl_state_t * afl)6066 u8 pilot_fuzzing(afl_state_t *afl) {
6067 
6068   return mopt_common_fuzzing(afl, afl->mopt_globals_pilot);
6069 
6070 }
6071 
pso_updating(afl_state_t * afl)6072 void pso_updating(afl_state_t *afl) {
6073 
6074   afl->g_now++;
6075   if (afl->g_now > afl->g_max) { afl->g_now = 0; }
6076   afl->w_now =
6077       (afl->w_init - afl->w_end) * (afl->g_max - afl->g_now) / (afl->g_max) +
6078       afl->w_end;
6079   int tmp_swarm, i, j;
6080   u64 temp_operator_finds_puppet = 0;
6081   for (i = 0; i < operator_num; ++i) {
6082 
6083     afl->operator_finds_puppet[i] = afl->core_operator_finds_puppet[i];
6084 
6085     for (j = 0; j < swarm_num; ++j) {
6086 
6087       afl->operator_finds_puppet[i] =
6088           afl->operator_finds_puppet[i] + afl->stage_finds_puppet[j][i];
6089 
6090     }
6091 
6092     temp_operator_finds_puppet =
6093         temp_operator_finds_puppet + afl->operator_finds_puppet[i];
6094 
6095   }
6096 
6097   for (i = 0; i < operator_num; ++i) {
6098 
6099     if (afl->operator_finds_puppet[i]) {
6100 
6101       afl->G_best[i] = (double)((double)(afl->operator_finds_puppet[i]) /
6102                                 (double)(temp_operator_finds_puppet));
6103 
6104     }
6105 
6106   }
6107 
6108   for (tmp_swarm = 0; tmp_swarm < swarm_num; ++tmp_swarm) {
6109 
6110     double x_temp = 0.0;
6111     for (i = 0; i < operator_num; ++i) {
6112 
6113       afl->probability_now[tmp_swarm][i] = 0.0;
6114       afl->v_now[tmp_swarm][i] =
6115           afl->w_now * afl->v_now[tmp_swarm][i] +
6116           RAND_C * (afl->L_best[tmp_swarm][i] - afl->x_now[tmp_swarm][i]) +
6117           RAND_C * (afl->G_best[i] - afl->x_now[tmp_swarm][i]);
6118       afl->x_now[tmp_swarm][i] += afl->v_now[tmp_swarm][i];
6119       if (afl->x_now[tmp_swarm][i] > v_max) {
6120 
6121         afl->x_now[tmp_swarm][i] = v_max;
6122 
6123       } else if (afl->x_now[tmp_swarm][i] < v_min) {
6124 
6125         afl->x_now[tmp_swarm][i] = v_min;
6126 
6127       }
6128 
6129       x_temp += afl->x_now[tmp_swarm][i];
6130 
6131     }
6132 
6133     for (i = 0; i < operator_num; ++i) {
6134 
6135       afl->x_now[tmp_swarm][i] = afl->x_now[tmp_swarm][i] / x_temp;
6136       if (likely(i != 0)) {
6137 
6138         afl->probability_now[tmp_swarm][i] =
6139             afl->probability_now[tmp_swarm][i - 1] + afl->x_now[tmp_swarm][i];
6140 
6141       } else {
6142 
6143         afl->probability_now[tmp_swarm][i] = afl->x_now[tmp_swarm][i];
6144 
6145       }
6146 
6147     }
6148 
6149     if (afl->probability_now[tmp_swarm][operator_num - 1] < 0.99 ||
6150         afl->probability_now[tmp_swarm][operator_num - 1] > 1.01) {
6151 
6152       FATAL("ERROR probability");
6153 
6154     }
6155 
6156   }
6157 
6158   afl->swarm_now = 0;
6159   afl->key_module = 0;
6160 
6161 }
6162 
6163 /* The entry point for the mutator, choosing the default mutator, and/or MOpt
6164    depending on the configuration. */
fuzz_one(afl_state_t * afl)6165 u8 fuzz_one(afl_state_t *afl) {
6166 
6167   int key_val_lv_1 = -1, key_val_lv_2 = -1;
6168 
6169 #ifdef _AFL_DOCUMENT_MUTATIONS
6170 
6171   u8 path_buf[PATH_MAX];
6172   if (afl->do_document == 0) {
6173 
6174     snprintf(path_buf, PATH_MAX, "%s/mutations", afl->out_dir);
6175     afl->do_document = mkdir(path_buf, 0700);  // if it exists we do not care
6176     afl->do_document = 1;
6177 
6178   } else {
6179 
6180     afl->do_document = 2;
6181     afl->stop_soon = 2;
6182 
6183   }
6184 
6185 #endif
6186 
6187   /*
6188      -L command line paramter => limit_time_sig value
6189        limit_time_sig == 0 then run the default mutator
6190        limit_time_sig  > 0 then run MOpt
6191        limit_time_sig  < 0 both are run
6192   */
6193 
6194   if (afl->limit_time_sig <= 0) { key_val_lv_1 = fuzz_one_original(afl); }
6195 
6196   if (afl->limit_time_sig != 0) {
6197 
6198     if (afl->key_module == 0) {
6199 
6200       key_val_lv_2 = pilot_fuzzing(afl);
6201 
6202     } else if (afl->key_module == 1) {
6203 
6204       key_val_lv_2 = core_fuzzing(afl);
6205 
6206     } else if (afl->key_module == 2) {
6207 
6208       pso_updating(afl);
6209 
6210     }
6211 
6212   }
6213 
6214   if (unlikely(key_val_lv_1 == -1)) { key_val_lv_1 = 0; }
6215   if (likely(key_val_lv_2 == -1)) { key_val_lv_2 = 0; }
6216 
6217   return (key_val_lv_1 | key_val_lv_2);
6218 
6219 }
6220 
6221