xref: /aosp_15_r20/external/AFLplusplus/src/afl-fuzz-skipdet.c (revision 08b48e0b10e97b33e7b60c5b6e2243bd915777f2)
1 
2 
3 #include "afl-fuzz.h"
4 
flip_range(u8 * input,u32 pos,u32 size)5 void flip_range(u8 *input, u32 pos, u32 size) {
6 
7   for (u32 i = 0; i < size; i++)
8     input[pos + i] ^= 0xFF;
9 
10   return;
11 
12 }
13 
14 #define MAX_EFF_TIMEOUT (10 * 60 * 1000)
15 #define MAX_DET_TIMEOUT (15 * 60 * 1000)
is_det_timeout(u64 cur_ms,u8 is_flip)16 u8 is_det_timeout(u64 cur_ms, u8 is_flip) {
17 
18   if (is_flip) {
19 
20     if (unlikely(get_cur_time() - cur_ms > MAX_EFF_TIMEOUT)) return 1;
21 
22   } else {
23 
24     if (unlikely(get_cur_time() - cur_ms > MAX_DET_TIMEOUT)) return 1;
25 
26   }
27 
28   return 0;
29 
30 }
31 
32 /* decide if the seed should be deterministically fuzzed */
33 
should_det_fuzz(afl_state_t * afl,struct queue_entry * q)34 u8 should_det_fuzz(afl_state_t *afl, struct queue_entry *q) {
35 
36   if (!afl->skipdet_g->virgin_det_bits) {
37 
38     afl->skipdet_g->virgin_det_bits =
39         (u8 *)ck_alloc(sizeof(u8) * afl->fsrv.map_size);
40 
41   }
42 
43   if (!q->favored || q->passed_det) return 0;
44   if (!q->trace_mini) return 0;
45 
46   if (!afl->skipdet_g->last_cov_undet)
47     afl->skipdet_g->last_cov_undet = get_cur_time();
48 
49   if (get_cur_time() - afl->skipdet_g->last_cov_undet >= THRESHOLD_DEC_TIME) {
50 
51     if (afl->skipdet_g->undet_bits_threshold >= 2) {
52 
53       afl->skipdet_g->undet_bits_threshold *= 0.75;
54       afl->skipdet_g->last_cov_undet = get_cur_time();
55 
56     }
57 
58   }
59 
60   u32 new_det_bits = 0;
61 
62   for (u32 i = 0; i < afl->fsrv.map_size; i++) {
63 
64     if (unlikely(q->trace_mini[i >> 3] & (1 << (i & 7)))) {
65 
66       if (!afl->skipdet_g->virgin_det_bits[i]) { new_det_bits++; }
67 
68     }
69 
70   }
71 
72   if (!afl->skipdet_g->undet_bits_threshold)
73     afl->skipdet_g->undet_bits_threshold = new_det_bits * 0.05;
74 
75   if (new_det_bits >= afl->skipdet_g->undet_bits_threshold) {
76 
77     afl->skipdet_g->last_cov_undet = get_cur_time();
78     q->skipdet_e->undet_bits = new_det_bits;
79 
80     for (u32 i = 0; i < afl->fsrv.map_size; i++) {
81 
82       if (unlikely(q->trace_mini[i >> 3] & (1 << (i & 7)))) {
83 
84         if (!afl->skipdet_g->virgin_det_bits[i])
85           afl->skipdet_g->virgin_det_bits[i] = 1;
86 
87       }
88 
89     }
90 
91     return 1;
92 
93   }
94 
95   return 0;
96 
97 }
98 
99 /*
100   consists of two stages that
101   return 0 if exec failed.
102 */
103 
skip_deterministic_stage(afl_state_t * afl,u8 * orig_buf,u8 * out_buf,u32 len,u64 before_det_time)104 u8 skip_deterministic_stage(afl_state_t *afl, u8 *orig_buf, u8 *out_buf,
105                             u32 len, u64 before_det_time) {
106 
107   u64 orig_hit_cnt, new_hit_cnt;
108 
109   if (afl->queue_cur->skipdet_e->done_eff) return 1;
110 
111   if (!should_det_fuzz(afl, afl->queue_cur)) return 1;
112 
113   /* Add check to make sure that for seeds without too much undet bits,
114      we ignore them */
115 
116   /******************
117    * SKIP INFERENCE *
118    ******************/
119 
120   afl->stage_short = "inf";
121   afl->stage_name = "inference";
122   afl->stage_cur = 0;
123   orig_hit_cnt = afl->queued_items + afl->saved_crashes;
124 
125   u8 *inf_eff_map = (u8 *)ck_alloc(sizeof(u8) * len);
126   memset(inf_eff_map, 1, sizeof(u8) * len);
127 
128   if (common_fuzz_stuff(afl, orig_buf, len)) { return 0; }
129 
130   u64 prev_cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
131   u64 _prev_cksum = prev_cksum;
132 
133   if (MINIMAL_BLOCK_SIZE * 8 < len) {
134 
135     // u64 size_skiped = 0, quick_skip_exec = total_execs, quick_skip_time =
136     // get_cur_time();
137     u64 pre_inf_exec = afl->fsrv.total_execs, pre_inf_time = get_cur_time();
138 
139     /* if determine stage time / input size is too small, just go ahead */
140 
141     u32 pos = 0, cur_block_size = MINIMAL_BLOCK_SIZE, max_block_size = len / 8;
142 
143     while (pos < len - 1) {
144 
145       cur_block_size = MINIMAL_BLOCK_SIZE;
146 
147       while (cur_block_size < max_block_size) {
148 
149         u32 flip_block_size =
150             (cur_block_size + pos < len) ? cur_block_size : len - 1 - pos;
151 
152         afl->stage_cur += 1;
153 
154         flip_range(out_buf, pos, flip_block_size);
155 
156         if (common_fuzz_stuff(afl, out_buf, len)) return 0;
157 
158         flip_range(out_buf, pos, flip_block_size);
159 
160         u64 cksum =
161             hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
162 
163         // printf("Now trying range %d with %d, %s.\n", pos, cur_block_size,
164         //     (cksum == prev_cksum) ? (u8*)"Yes" : (u8*) "Not");
165 
166         /* continue until we fail or exceed length */
167         if (cksum == _prev_cksum) {
168 
169           cur_block_size *= 2;
170 
171           if (cur_block_size >= len - 1 - pos) break;
172 
173         } else {
174 
175           break;
176 
177         }
178 
179       }
180 
181       if (cur_block_size == MINIMAL_BLOCK_SIZE) {
182 
183         /* we failed early on*/
184 
185         pos += cur_block_size;
186 
187       } else {
188 
189         u32 cur_skip_len = (cur_block_size / 2 + pos < len)
190                                ? (cur_block_size / 2)
191                                : (len - pos - 1);
192 
193         memset(inf_eff_map + pos, 0, cur_skip_len);
194 
195         afl->skipdet_g->inf_prof->inf_skipped_bytes += cur_skip_len;
196 
197         pos += cur_skip_len;
198 
199       }
200 
201     }
202 
203     afl->skipdet_g->inf_prof->inf_execs_cost +=
204         (afl->fsrv.total_execs - pre_inf_exec);
205     afl->skipdet_g->inf_prof->inf_time_cost += (get_cur_time() - pre_inf_time);
206     // PFATAL("Done, now have %d bytes skipped, with exec %lld, time %lld.\n",
207     // afl->inf_skipped_bytes, afl->inf_execs_cost, afl->inf_time_cost);
208 
209   } else
210 
211     memset(inf_eff_map, 1, len);
212 
213   new_hit_cnt = afl->queued_items + afl->saved_crashes;
214 
215   afl->stage_finds[STAGE_INF] += new_hit_cnt - orig_hit_cnt;
216   afl->stage_cycles[STAGE_INF] += afl->stage_cur;
217 
218   /****************************
219    * Quick Skip Effective Map *
220    ****************************/
221 
222   /* Quick Effective Map Calculation */
223 
224   afl->stage_short = "quick";
225   afl->stage_name = "quick eff";
226   afl->stage_cur = 0;
227   afl->stage_max = 32 * 1024;
228 
229   orig_hit_cnt = afl->queued_items + afl->saved_crashes;
230 
231   u32 before_skip_inf = afl->queued_items;
232 
233   /* clean all the eff bytes, since previous eff bytes are already fuzzed */
234   u8 *skip_eff_map = afl->queue_cur->skipdet_e->skip_eff_map,
235      *done_inf_map = afl->queue_cur->skipdet_e->done_inf_map;
236 
237   if (!skip_eff_map) {
238 
239     skip_eff_map = (u8 *)ck_alloc(sizeof(u8) * len);
240     afl->queue_cur->skipdet_e->skip_eff_map = skip_eff_map;
241 
242   } else {
243 
244     memset(skip_eff_map, 0, sizeof(u8) * len);
245 
246   }
247 
248   /* restore the starting point */
249   if (!done_inf_map) {
250 
251     done_inf_map = (u8 *)ck_alloc(sizeof(u8) * len);
252     afl->queue_cur->skipdet_e->done_inf_map = done_inf_map;
253 
254   } else {
255 
256     for (afl->stage_cur = 0; afl->stage_cur < len; afl->stage_cur++) {
257 
258       if (done_inf_map[afl->stage_cur] == 0) break;
259 
260     }
261 
262   }
263 
264   /* depending on the seed's performance, we could search eff bytes
265      for multiple rounds */
266 
267   u8 eff_round_continue = 1, eff_round_done = 0, done_eff = 0, repeat_eff = 0,
268      fuzz_nearby = 0, *non_eff_bytes = 0;
269 
270   u64 before_eff_execs = afl->fsrv.total_execs;
271 
272   if (getenv("REPEAT_EFF")) repeat_eff = 1;
273   if (getenv("FUZZ_NEARBY")) fuzz_nearby = 1;
274 
275   if (fuzz_nearby) {
276 
277     non_eff_bytes = (u8 *)ck_alloc(sizeof(u8) * len);
278 
279     // clean exec cksum
280     if (common_fuzz_stuff(afl, out_buf, len)) { return 0; }
281     prev_cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
282 
283   }
284 
285   do {
286 
287     eff_round_continue = 0;
288     afl->stage_max = 32 * 1024;
289 
290     for (; afl->stage_cur < afl->stage_max && afl->stage_cur < len;
291          ++afl->stage_cur) {
292 
293       afl->stage_cur_byte = afl->stage_cur;
294 
295       if (!inf_eff_map[afl->stage_cur_byte] ||
296           skip_eff_map[afl->stage_cur_byte])
297         continue;
298 
299       if (is_det_timeout(before_det_time, 1)) { goto cleanup_skipdet; }
300 
301       u8 orig = out_buf[afl->stage_cur_byte], replace = rand_below(afl, 256);
302 
303       while (replace == orig) {
304 
305         replace = rand_below(afl, 256);
306 
307       }
308 
309       out_buf[afl->stage_cur_byte] = replace;
310 
311       before_skip_inf = afl->queued_items;
312 
313       if (common_fuzz_stuff(afl, out_buf, len)) { return 0; }
314 
315       out_buf[afl->stage_cur_byte] = orig;
316 
317       if (fuzz_nearby) {
318 
319         if (prev_cksum ==
320             hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST)) {
321 
322           non_eff_bytes[afl->stage_cur_byte] = 1;
323 
324         }
325 
326       }
327 
328       if (afl->queued_items != before_skip_inf) {
329 
330         skip_eff_map[afl->stage_cur_byte] = 1;
331         afl->queue_cur->skipdet_e->quick_eff_bytes += 1;
332 
333         if (afl->stage_max < MAXIMUM_QUICK_EFF_EXECS) { afl->stage_max *= 2; }
334 
335         if (afl->stage_max == MAXIMUM_QUICK_EFF_EXECS && repeat_eff)
336           eff_round_continue = 1;
337 
338       }
339 
340       done_inf_map[afl->stage_cur_byte] = 1;
341 
342     }
343 
344     afl->stage_cur = 0;
345     done_eff = 1;
346 
347     if (++eff_round_done >= 8) break;
348 
349   } while (eff_round_continue);
350 
351   new_hit_cnt = afl->queued_items + afl->saved_crashes;
352 
353   afl->stage_finds[STAGE_QUICK] += new_hit_cnt - orig_hit_cnt;
354   afl->stage_cycles[STAGE_QUICK] += (afl->fsrv.total_execs - before_eff_execs);
355 
356 cleanup_skipdet:
357 
358   if (fuzz_nearby) {
359 
360     u8 *nearby_bytes = (u8 *)ck_alloc(sizeof(u8) * len);
361 
362     u32 i = 3;
363     while (i < len) {
364 
365       // assume DWORD size, from i - 3 -> i + 3
366       if (skip_eff_map[i]) {
367 
368         u32 fill_length = (i + 3 < len) ? 7 : len - i + 2;
369         memset(nearby_bytes + i - 3, 1, fill_length);
370         i += 3;
371 
372       } else
373 
374         i += 1;
375 
376     }
377 
378     for (i = 0; i < len; i++) {
379 
380       if (nearby_bytes[i] && !non_eff_bytes[i]) skip_eff_map[i] = 1;
381 
382     }
383 
384     ck_free(nearby_bytes);
385     ck_free(non_eff_bytes);
386 
387   }
388 
389   if (done_eff) {
390 
391     afl->queue_cur->skipdet_e->continue_inf = 0;
392     afl->queue_cur->skipdet_e->done_eff = 1;
393 
394   } else {
395 
396     afl->queue_cur->skipdet_e->continue_inf = 1;
397 
398   }
399 
400   return 1;
401 
402 }
403 
404