1*ee754c2dSkamil //===- FuzzerTracePC.cpp - PC tracing--------------------------------------===//
2*ee754c2dSkamil //
3*ee754c2dSkamil //                     The LLVM Compiler Infrastructure
4*ee754c2dSkamil //
5*ee754c2dSkamil // This file is distributed under the University of Illinois Open Source
6*ee754c2dSkamil // License. See LICENSE.TXT for details.
7*ee754c2dSkamil //
8*ee754c2dSkamil //===----------------------------------------------------------------------===//
9*ee754c2dSkamil // Trace PCs.
10*ee754c2dSkamil // This module implements __sanitizer_cov_trace_pc_guard[_init],
11*ee754c2dSkamil // the callback required for -fsanitize-coverage=trace-pc-guard instrumentation.
12*ee754c2dSkamil //
13*ee754c2dSkamil //===----------------------------------------------------------------------===//
14*ee754c2dSkamil 
15*ee754c2dSkamil #include "FuzzerTracePC.h"
16*ee754c2dSkamil #include "FuzzerCorpus.h"
17*ee754c2dSkamil #include "FuzzerDefs.h"
18*ee754c2dSkamil #include "FuzzerDictionary.h"
19*ee754c2dSkamil #include "FuzzerExtFunctions.h"
20*ee754c2dSkamil #include "FuzzerIO.h"
21*ee754c2dSkamil #include "FuzzerUtil.h"
22*ee754c2dSkamil #include "FuzzerValueBitMap.h"
23*ee754c2dSkamil #include <set>
24*ee754c2dSkamil 
25*ee754c2dSkamil // The coverage counters and PCs.
26*ee754c2dSkamil // These are declared as global variables named "__sancov_*" to simplify
27*ee754c2dSkamil // experiments with inlined instrumentation.
28*ee754c2dSkamil alignas(64) ATTRIBUTE_INTERFACE
29*ee754c2dSkamil uint8_t __sancov_trace_pc_guard_8bit_counters[fuzzer::TracePC::kNumPCs];
30*ee754c2dSkamil 
31*ee754c2dSkamil ATTRIBUTE_INTERFACE
32*ee754c2dSkamil uintptr_t __sancov_trace_pc_pcs[fuzzer::TracePC::kNumPCs];
33*ee754c2dSkamil 
34*ee754c2dSkamil // Used by -fsanitize-coverage=stack-depth to track stack depth
35*ee754c2dSkamil ATTRIBUTES_INTERFACE_TLS_INITIAL_EXEC uintptr_t __sancov_lowest_stack;
36*ee754c2dSkamil 
37*ee754c2dSkamil namespace fuzzer {
38*ee754c2dSkamil 
39*ee754c2dSkamil TracePC TPC;
40*ee754c2dSkamil 
Counters() const41*ee754c2dSkamil uint8_t *TracePC::Counters() const {
42*ee754c2dSkamil   return __sancov_trace_pc_guard_8bit_counters;
43*ee754c2dSkamil }
44*ee754c2dSkamil 
PCs() const45*ee754c2dSkamil uintptr_t *TracePC::PCs() const {
46*ee754c2dSkamil   return __sancov_trace_pc_pcs;
47*ee754c2dSkamil }
48*ee754c2dSkamil 
GetTotalPCCoverage()49*ee754c2dSkamil size_t TracePC::GetTotalPCCoverage() {
50*ee754c2dSkamil   if (ObservedPCs.size())
51*ee754c2dSkamil     return ObservedPCs.size();
52*ee754c2dSkamil   size_t Res = 0;
53*ee754c2dSkamil   for (size_t i = 1, N = GetNumPCs(); i < N; i++)
54*ee754c2dSkamil     if (PCs()[i])
55*ee754c2dSkamil       Res++;
56*ee754c2dSkamil   return Res;
57*ee754c2dSkamil }
58*ee754c2dSkamil 
59*ee754c2dSkamil template<class CallBack>
IterateInline8bitCounters(CallBack CB) const60*ee754c2dSkamil void TracePC::IterateInline8bitCounters(CallBack CB) const {
61*ee754c2dSkamil   if (NumInline8bitCounters && NumInline8bitCounters == NumPCsInPCTables) {
62*ee754c2dSkamil     size_t CounterIdx = 0;
63*ee754c2dSkamil     for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
64*ee754c2dSkamil       uint8_t *Beg = ModuleCounters[i].Start;
65*ee754c2dSkamil       size_t Size = ModuleCounters[i].Stop - Beg;
66*ee754c2dSkamil       assert(Size == (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
67*ee754c2dSkamil       for (size_t j = 0; j < Size; j++, CounterIdx++)
68*ee754c2dSkamil         CB(i, j, CounterIdx);
69*ee754c2dSkamil     }
70*ee754c2dSkamil   }
71*ee754c2dSkamil }
72*ee754c2dSkamil 
73*ee754c2dSkamil // Initializes unstable counters by copying Inline8bitCounters to unstable
74*ee754c2dSkamil // counters.
InitializeUnstableCounters()75*ee754c2dSkamil void TracePC::InitializeUnstableCounters() {
76*ee754c2dSkamil   IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
77*ee754c2dSkamil     UnstableCounters[UnstableIdx].Counter = ModuleCounters[i].Start[j];
78*ee754c2dSkamil   });
79*ee754c2dSkamil }
80*ee754c2dSkamil 
81*ee754c2dSkamil // Compares the current counters with counters from previous runs
82*ee754c2dSkamil // and records differences as unstable edges.
UpdateUnstableCounters(int UnstableMode)83*ee754c2dSkamil bool TracePC::UpdateUnstableCounters(int UnstableMode) {
84*ee754c2dSkamil   bool Updated = false;
85*ee754c2dSkamil   IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
86*ee754c2dSkamil     if (ModuleCounters[i].Start[j] != UnstableCounters[UnstableIdx].Counter) {
87*ee754c2dSkamil       Updated = true;
88*ee754c2dSkamil       UnstableCounters[UnstableIdx].IsUnstable = true;
89*ee754c2dSkamil       if (UnstableMode == ZeroUnstable)
90*ee754c2dSkamil         UnstableCounters[UnstableIdx].Counter = 0;
91*ee754c2dSkamil       else if (UnstableMode == MinUnstable)
92*ee754c2dSkamil         UnstableCounters[UnstableIdx].Counter = std::min(
93*ee754c2dSkamil             ModuleCounters[i].Start[j], UnstableCounters[UnstableIdx].Counter);
94*ee754c2dSkamil     }
95*ee754c2dSkamil   });
96*ee754c2dSkamil   return Updated;
97*ee754c2dSkamil }
98*ee754c2dSkamil 
99*ee754c2dSkamil // Updates and applies unstable counters to ModuleCounters in single iteration
UpdateAndApplyUnstableCounters(int UnstableMode)100*ee754c2dSkamil void TracePC::UpdateAndApplyUnstableCounters(int UnstableMode) {
101*ee754c2dSkamil   IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
102*ee754c2dSkamil     if (ModuleCounters[i].Start[j] != UnstableCounters[UnstableIdx].Counter) {
103*ee754c2dSkamil       UnstableCounters[UnstableIdx].IsUnstable = true;
104*ee754c2dSkamil       if (UnstableMode == ZeroUnstable)
105*ee754c2dSkamil         ModuleCounters[i].Start[j] = 0;
106*ee754c2dSkamil       else if (UnstableMode == MinUnstable)
107*ee754c2dSkamil         ModuleCounters[i].Start[j] = std::min(
108*ee754c2dSkamil             ModuleCounters[i].Start[j], UnstableCounters[UnstableIdx].Counter);
109*ee754c2dSkamil     }
110*ee754c2dSkamil   });
111*ee754c2dSkamil }
112*ee754c2dSkamil 
HandleInline8bitCountersInit(uint8_t * Start,uint8_t * Stop)113*ee754c2dSkamil void TracePC::HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop) {
114*ee754c2dSkamil   if (Start == Stop) return;
115*ee754c2dSkamil   if (NumModulesWithInline8bitCounters &&
116*ee754c2dSkamil       ModuleCounters[NumModulesWithInline8bitCounters-1].Start == Start) return;
117*ee754c2dSkamil   assert(NumModulesWithInline8bitCounters <
118*ee754c2dSkamil          sizeof(ModuleCounters) / sizeof(ModuleCounters[0]));
119*ee754c2dSkamil   ModuleCounters[NumModulesWithInline8bitCounters++] = {Start, Stop};
120*ee754c2dSkamil   NumInline8bitCounters += Stop - Start;
121*ee754c2dSkamil }
122*ee754c2dSkamil 
HandlePCsInit(const uintptr_t * Start,const uintptr_t * Stop)123*ee754c2dSkamil void TracePC::HandlePCsInit(const uintptr_t *Start, const uintptr_t *Stop) {
124*ee754c2dSkamil   const PCTableEntry *B = reinterpret_cast<const PCTableEntry *>(Start);
125*ee754c2dSkamil   const PCTableEntry *E = reinterpret_cast<const PCTableEntry *>(Stop);
126*ee754c2dSkamil   if (NumPCTables && ModulePCTable[NumPCTables - 1].Start == B) return;
127*ee754c2dSkamil   assert(NumPCTables < sizeof(ModulePCTable) / sizeof(ModulePCTable[0]));
128*ee754c2dSkamil   ModulePCTable[NumPCTables++] = {B, E};
129*ee754c2dSkamil   NumPCsInPCTables += E - B;
130*ee754c2dSkamil }
131*ee754c2dSkamil 
HandleInit(uint32_t * Start,uint32_t * Stop)132*ee754c2dSkamil void TracePC::HandleInit(uint32_t *Start, uint32_t *Stop) {
133*ee754c2dSkamil   if (Start == Stop || *Start) return;
134*ee754c2dSkamil   assert(NumModules < sizeof(Modules) / sizeof(Modules[0]));
135*ee754c2dSkamil   for (uint32_t *P = Start; P < Stop; P++) {
136*ee754c2dSkamil     NumGuards++;
137*ee754c2dSkamil     if (NumGuards == kNumPCs) {
138*ee754c2dSkamil       RawPrint(
139*ee754c2dSkamil           "WARNING: The binary has too many instrumented PCs.\n"
140*ee754c2dSkamil           "         You may want to reduce the size of the binary\n"
141*ee754c2dSkamil           "         for more efficient fuzzing and precise coverage data\n");
142*ee754c2dSkamil     }
143*ee754c2dSkamil     *P = NumGuards % kNumPCs;
144*ee754c2dSkamil   }
145*ee754c2dSkamil   Modules[NumModules].Start = Start;
146*ee754c2dSkamil   Modules[NumModules].Stop = Stop;
147*ee754c2dSkamil   NumModules++;
148*ee754c2dSkamil }
149*ee754c2dSkamil 
PrintModuleInfo()150*ee754c2dSkamil void TracePC::PrintModuleInfo() {
151*ee754c2dSkamil   if (NumGuards) {
152*ee754c2dSkamil     Printf("INFO: Loaded %zd modules   (%zd guards): ", NumModules, NumGuards);
153*ee754c2dSkamil     for (size_t i = 0; i < NumModules; i++)
154*ee754c2dSkamil       Printf("%zd [%p, %p), ", Modules[i].Stop - Modules[i].Start,
155*ee754c2dSkamil              Modules[i].Start, Modules[i].Stop);
156*ee754c2dSkamil     Printf("\n");
157*ee754c2dSkamil   }
158*ee754c2dSkamil   if (NumModulesWithInline8bitCounters) {
159*ee754c2dSkamil     Printf("INFO: Loaded %zd modules   (%zd inline 8-bit counters): ",
160*ee754c2dSkamil            NumModulesWithInline8bitCounters, NumInline8bitCounters);
161*ee754c2dSkamil     for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++)
162*ee754c2dSkamil       Printf("%zd [%p, %p), ", ModuleCounters[i].Stop - ModuleCounters[i].Start,
163*ee754c2dSkamil              ModuleCounters[i].Start, ModuleCounters[i].Stop);
164*ee754c2dSkamil     Printf("\n");
165*ee754c2dSkamil   }
166*ee754c2dSkamil   if (NumPCTables) {
167*ee754c2dSkamil     Printf("INFO: Loaded %zd PC tables (%zd PCs): ", NumPCTables,
168*ee754c2dSkamil            NumPCsInPCTables);
169*ee754c2dSkamil     for (size_t i = 0; i < NumPCTables; i++) {
170*ee754c2dSkamil       Printf("%zd [%p,%p), ", ModulePCTable[i].Stop - ModulePCTable[i].Start,
171*ee754c2dSkamil              ModulePCTable[i].Start, ModulePCTable[i].Stop);
172*ee754c2dSkamil     }
173*ee754c2dSkamil     Printf("\n");
174*ee754c2dSkamil 
175*ee754c2dSkamil     if ((NumGuards && NumGuards != NumPCsInPCTables) ||
176*ee754c2dSkamil         (NumInline8bitCounters && NumInline8bitCounters != NumPCsInPCTables)) {
177*ee754c2dSkamil       Printf("ERROR: The size of coverage PC tables does not match the\n"
178*ee754c2dSkamil              "number of instrumented PCs. This might be a compiler bug,\n"
179*ee754c2dSkamil              "please contact the libFuzzer developers.\n"
180*ee754c2dSkamil              "Also check https://bugs.llvm.org/show_bug.cgi?id=34636\n"
181*ee754c2dSkamil              "for possible workarounds (tl;dr: don't use the old GNU ld)\n");
182*ee754c2dSkamil       _Exit(1);
183*ee754c2dSkamil     }
184*ee754c2dSkamil   }
185*ee754c2dSkamil   if (size_t NumExtraCounters = ExtraCountersEnd() - ExtraCountersBegin())
186*ee754c2dSkamil     Printf("INFO: %zd Extra Counters\n", NumExtraCounters);
187*ee754c2dSkamil }
188*ee754c2dSkamil 
189*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
HandleCallerCallee(uintptr_t Caller,uintptr_t Callee)190*ee754c2dSkamil void TracePC::HandleCallerCallee(uintptr_t Caller, uintptr_t Callee) {
191*ee754c2dSkamil   const uintptr_t kBits = 12;
192*ee754c2dSkamil   const uintptr_t kMask = (1 << kBits) - 1;
193*ee754c2dSkamil   uintptr_t Idx = (Caller & kMask) | ((Callee & kMask) << kBits);
194*ee754c2dSkamil   ValueProfileMap.AddValueModPrime(Idx);
195*ee754c2dSkamil }
196*ee754c2dSkamil 
197*ee754c2dSkamil /// \return the address of the previous instruction.
198*ee754c2dSkamil /// Note: the logic is copied from `sanitizer_common/sanitizer_stacktrace.h`
GetPreviousInstructionPc(uintptr_t PC)199*ee754c2dSkamil inline ALWAYS_INLINE uintptr_t GetPreviousInstructionPc(uintptr_t PC) {
200*ee754c2dSkamil #if defined(__arm__)
201*ee754c2dSkamil   // T32 (Thumb) branch instructions might be 16 or 32 bit long,
202*ee754c2dSkamil   // so we return (pc-2) in that case in order to be safe.
203*ee754c2dSkamil   // For A32 mode we return (pc-4) because all instructions are 32 bit long.
204*ee754c2dSkamil   return (PC - 3) & (~1);
205*ee754c2dSkamil #elif defined(__powerpc__) || defined(__powerpc64__) || defined(__aarch64__)
206*ee754c2dSkamil   // PCs are always 4 byte aligned.
207*ee754c2dSkamil   return PC - 4;
208*ee754c2dSkamil #elif defined(__sparc__) || defined(__mips__)
209*ee754c2dSkamil   return PC - 8;
210*ee754c2dSkamil #else
211*ee754c2dSkamil   return PC - 1;
212*ee754c2dSkamil #endif
213*ee754c2dSkamil }
214*ee754c2dSkamil 
215*ee754c2dSkamil /// \return the address of the next instruction.
216*ee754c2dSkamil /// Note: the logic is copied from `sanitizer_common/sanitizer_stacktrace.cc`
GetNextInstructionPc(uintptr_t PC)217*ee754c2dSkamil inline ALWAYS_INLINE uintptr_t GetNextInstructionPc(uintptr_t PC) {
218*ee754c2dSkamil #if defined(__mips__)
219*ee754c2dSkamil   return PC + 8;
220*ee754c2dSkamil #elif defined(__powerpc__) || defined(__sparc__) || defined(__arm__) || \
221*ee754c2dSkamil     defined(__aarch64__)
222*ee754c2dSkamil   return PC + 4;
223*ee754c2dSkamil #else
224*ee754c2dSkamil   return PC + 1;
225*ee754c2dSkamil #endif
226*ee754c2dSkamil }
227*ee754c2dSkamil 
UpdateObservedPCs()228*ee754c2dSkamil void TracePC::UpdateObservedPCs() {
229*ee754c2dSkamil   Vector<uintptr_t> CoveredFuncs;
230*ee754c2dSkamil   auto ObservePC = [&](uintptr_t PC) {
231*ee754c2dSkamil     if (ObservedPCs.insert(PC).second && DoPrintNewPCs) {
232*ee754c2dSkamil       PrintPC("\tNEW_PC: %p %F %L", "\tNEW_PC: %p", GetNextInstructionPc(PC));
233*ee754c2dSkamil       Printf("\n");
234*ee754c2dSkamil     }
235*ee754c2dSkamil   };
236*ee754c2dSkamil 
237*ee754c2dSkamil   auto Observe = [&](const PCTableEntry &TE) {
238*ee754c2dSkamil     if (TE.PCFlags & 1)
239*ee754c2dSkamil       if (++ObservedFuncs[TE.PC] == 1 && NumPrintNewFuncs)
240*ee754c2dSkamil         CoveredFuncs.push_back(TE.PC);
241*ee754c2dSkamil     ObservePC(TE.PC);
242*ee754c2dSkamil   };
243*ee754c2dSkamil 
244*ee754c2dSkamil   if (NumPCsInPCTables) {
245*ee754c2dSkamil     if (NumInline8bitCounters == NumPCsInPCTables) {
246*ee754c2dSkamil       IterateInline8bitCounters([&](int i, int j, int CounterIdx) {
247*ee754c2dSkamil         if (ModuleCounters[i].Start[j])
248*ee754c2dSkamil           Observe(ModulePCTable[i].Start[j]);
249*ee754c2dSkamil       });
250*ee754c2dSkamil     } else if (NumGuards == NumPCsInPCTables) {
251*ee754c2dSkamil       size_t GuardIdx = 1;
252*ee754c2dSkamil       for (size_t i = 0; i < NumModules; i++) {
253*ee754c2dSkamil         uint32_t *Beg = Modules[i].Start;
254*ee754c2dSkamil         size_t Size = Modules[i].Stop - Beg;
255*ee754c2dSkamil         assert(Size ==
256*ee754c2dSkamil                (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
257*ee754c2dSkamil         for (size_t j = 0; j < Size; j++, GuardIdx++)
258*ee754c2dSkamil           if (Counters()[GuardIdx])
259*ee754c2dSkamil             Observe(ModulePCTable[i].Start[j]);
260*ee754c2dSkamil       }
261*ee754c2dSkamil     }
262*ee754c2dSkamil   }
263*ee754c2dSkamil 
264*ee754c2dSkamil   for (size_t i = 0, N = Min(CoveredFuncs.size(), NumPrintNewFuncs); i < N;
265*ee754c2dSkamil        i++) {
266*ee754c2dSkamil     Printf("\tNEW_FUNC[%zd/%zd]: ", i + 1, CoveredFuncs.size());
267*ee754c2dSkamil     PrintPC("%p %F %L", "%p", GetNextInstructionPc(CoveredFuncs[i]));
268*ee754c2dSkamil     Printf("\n");
269*ee754c2dSkamil   }
270*ee754c2dSkamil }
271*ee754c2dSkamil 
272*ee754c2dSkamil 
GetModuleName(uintptr_t PC)273*ee754c2dSkamil static std::string GetModuleName(uintptr_t PC) {
274*ee754c2dSkamil   char ModulePathRaw[4096] = "";  // What's PATH_MAX in portable C++?
275*ee754c2dSkamil   void *OffsetRaw = nullptr;
276*ee754c2dSkamil   if (!EF->__sanitizer_get_module_and_offset_for_pc(
277*ee754c2dSkamil       reinterpret_cast<void *>(PC), ModulePathRaw,
278*ee754c2dSkamil       sizeof(ModulePathRaw), &OffsetRaw))
279*ee754c2dSkamil     return "";
280*ee754c2dSkamil   return ModulePathRaw;
281*ee754c2dSkamil }
282*ee754c2dSkamil 
283*ee754c2dSkamil template<class CallBack>
IterateCoveredFunctions(CallBack CB)284*ee754c2dSkamil void TracePC::IterateCoveredFunctions(CallBack CB) {
285*ee754c2dSkamil   for (size_t i = 0; i < NumPCTables; i++) {
286*ee754c2dSkamil     auto &M = ModulePCTable[i];
287*ee754c2dSkamil     assert(M.Start < M.Stop);
288*ee754c2dSkamil     auto ModuleName = GetModuleName(M.Start->PC);
289*ee754c2dSkamil     for (auto NextFE = M.Start; NextFE < M.Stop; ) {
290*ee754c2dSkamil       auto FE = NextFE;
291*ee754c2dSkamil       assert((FE->PCFlags & 1) && "Not a function entry point");
292*ee754c2dSkamil       do {
293*ee754c2dSkamil         NextFE++;
294*ee754c2dSkamil       } while (NextFE < M.Stop && !(NextFE->PCFlags & 1));
295*ee754c2dSkamil       if (ObservedFuncs.count(FE->PC))
296*ee754c2dSkamil         CB(FE, NextFE, ObservedFuncs[FE->PC]);
297*ee754c2dSkamil     }
298*ee754c2dSkamil   }
299*ee754c2dSkamil }
300*ee754c2dSkamil 
SetFocusFunction(const std::string & FuncName)301*ee754c2dSkamil void TracePC::SetFocusFunction(const std::string &FuncName) {
302*ee754c2dSkamil   // This function should be called once.
303*ee754c2dSkamil   assert(FocusFunction.first > NumModulesWithInline8bitCounters);
304*ee754c2dSkamil   if (FuncName.empty())
305*ee754c2dSkamil     return;
306*ee754c2dSkamil   for (size_t M = 0; M < NumModulesWithInline8bitCounters; M++) {
307*ee754c2dSkamil     auto &PCTE = ModulePCTable[M];
308*ee754c2dSkamil     size_t N = PCTE.Stop - PCTE.Start;
309*ee754c2dSkamil     for (size_t I = 0; I < N; I++) {
310*ee754c2dSkamil       if (!(PCTE.Start[I].PCFlags & 1)) continue;  // not a function entry.
311*ee754c2dSkamil       auto Name = DescribePC("%F", GetNextInstructionPc(PCTE.Start[I].PC));
312*ee754c2dSkamil       if (Name[0] == 'i' && Name[1] == 'n' && Name[2] == ' ')
313*ee754c2dSkamil         Name = Name.substr(3, std::string::npos);
314*ee754c2dSkamil       if (FuncName != Name) continue;
315*ee754c2dSkamil       Printf("INFO: Focus function is set to '%s'\n", Name.c_str());
316*ee754c2dSkamil       FocusFunction = {M, I};
317*ee754c2dSkamil       return;
318*ee754c2dSkamil     }
319*ee754c2dSkamil   }
320*ee754c2dSkamil }
321*ee754c2dSkamil 
ObservedFocusFunction()322*ee754c2dSkamil bool TracePC::ObservedFocusFunction() {
323*ee754c2dSkamil   size_t I = FocusFunction.first;
324*ee754c2dSkamil   size_t J = FocusFunction.second;
325*ee754c2dSkamil   if (I >= NumModulesWithInline8bitCounters)
326*ee754c2dSkamil     return false;
327*ee754c2dSkamil   auto &MC = ModuleCounters[I];
328*ee754c2dSkamil   size_t Size = MC.Stop - MC.Start;
329*ee754c2dSkamil   if (J >= Size)
330*ee754c2dSkamil     return false;
331*ee754c2dSkamil   return MC.Start[J] != 0;
332*ee754c2dSkamil }
333*ee754c2dSkamil 
PrintCoverage()334*ee754c2dSkamil void TracePC::PrintCoverage() {
335*ee754c2dSkamil   if (!EF->__sanitizer_symbolize_pc ||
336*ee754c2dSkamil       !EF->__sanitizer_get_module_and_offset_for_pc) {
337*ee754c2dSkamil     Printf("INFO: __sanitizer_symbolize_pc or "
338*ee754c2dSkamil            "__sanitizer_get_module_and_offset_for_pc is not available,"
339*ee754c2dSkamil            " not printing coverage\n");
340*ee754c2dSkamil     return;
341*ee754c2dSkamil   }
342*ee754c2dSkamil   Printf("COVERAGE:\n");
343*ee754c2dSkamil   auto CoveredFunctionCallback = [&](const PCTableEntry *First,
344*ee754c2dSkamil                                      const PCTableEntry *Last,
345*ee754c2dSkamil                                      uintptr_t Counter) {
346*ee754c2dSkamil     assert(First < Last);
347*ee754c2dSkamil     auto VisualizePC = GetNextInstructionPc(First->PC);
348*ee754c2dSkamil     std::string FileStr = DescribePC("%s", VisualizePC);
349*ee754c2dSkamil     if (!IsInterestingCoverageFile(FileStr))
350*ee754c2dSkamil       return;
351*ee754c2dSkamil     std::string FunctionStr = DescribePC("%F", VisualizePC);
352*ee754c2dSkamil     if (FunctionStr.find("in ") == 0)
353*ee754c2dSkamil       FunctionStr = FunctionStr.substr(3);
354*ee754c2dSkamil     std::string LineStr = DescribePC("%l", VisualizePC);
355*ee754c2dSkamil     size_t Line = std::stoul(LineStr);
356*ee754c2dSkamil     size_t NumEdges = Last - First;
357*ee754c2dSkamil     Vector<uintptr_t> UncoveredPCs;
358*ee754c2dSkamil     for (auto TE = First; TE < Last; TE++)
359*ee754c2dSkamil       if (!ObservedPCs.count(TE->PC))
360*ee754c2dSkamil         UncoveredPCs.push_back(TE->PC);
361*ee754c2dSkamil     Printf("COVERED_FUNC: hits: %zd", Counter);
362*ee754c2dSkamil     Printf(" edges: %zd/%zd", NumEdges - UncoveredPCs.size(), NumEdges);
363*ee754c2dSkamil     Printf(" %s %s:%zd\n", FunctionStr.c_str(), FileStr.c_str(), Line);
364*ee754c2dSkamil     for (auto PC: UncoveredPCs)
365*ee754c2dSkamil       Printf("  UNCOVERED_PC: %s\n",
366*ee754c2dSkamil              DescribePC("%s:%l", GetNextInstructionPc(PC)).c_str());
367*ee754c2dSkamil   };
368*ee754c2dSkamil 
369*ee754c2dSkamil   IterateCoveredFunctions(CoveredFunctionCallback);
370*ee754c2dSkamil }
371*ee754c2dSkamil 
DumpCoverage()372*ee754c2dSkamil void TracePC::DumpCoverage() {
373*ee754c2dSkamil   if (EF->__sanitizer_dump_coverage) {
374*ee754c2dSkamil     Vector<uintptr_t> PCsCopy(GetNumPCs());
375*ee754c2dSkamil     for (size_t i = 0; i < GetNumPCs(); i++)
376*ee754c2dSkamil       PCsCopy[i] = PCs()[i] ? GetPreviousInstructionPc(PCs()[i]) : 0;
377*ee754c2dSkamil     EF->__sanitizer_dump_coverage(PCsCopy.data(), PCsCopy.size());
378*ee754c2dSkamil   }
379*ee754c2dSkamil }
380*ee754c2dSkamil 
PrintUnstableStats()381*ee754c2dSkamil void TracePC::PrintUnstableStats() {
382*ee754c2dSkamil   size_t count = 0;
383*ee754c2dSkamil   Printf("UNSTABLE_FUNCTIONS:\n");
384*ee754c2dSkamil   IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
385*ee754c2dSkamil     const PCTableEntry &TE = ModulePCTable[i].Start[j];
386*ee754c2dSkamil     if (UnstableCounters[UnstableIdx].IsUnstable) {
387*ee754c2dSkamil       count++;
388*ee754c2dSkamil       if (ObservedFuncs.count(TE.PC)) {
389*ee754c2dSkamil         auto VisualizePC = GetNextInstructionPc(TE.PC);
390*ee754c2dSkamil         std::string FunctionStr = DescribePC("%F", VisualizePC);
391*ee754c2dSkamil         if (FunctionStr.find("in ") == 0)
392*ee754c2dSkamil           FunctionStr = FunctionStr.substr(3);
393*ee754c2dSkamil         Printf("%s\n", FunctionStr.c_str());
394*ee754c2dSkamil       }
395*ee754c2dSkamil     }
396*ee754c2dSkamil   });
397*ee754c2dSkamil 
398*ee754c2dSkamil   Printf("stat::stability_rate: %.2f\n",
399*ee754c2dSkamil          100 - static_cast<float>(count * 100) / NumInline8bitCounters);
400*ee754c2dSkamil }
401*ee754c2dSkamil 
402*ee754c2dSkamil // Value profile.
403*ee754c2dSkamil // We keep track of various values that affect control flow.
404*ee754c2dSkamil // These values are inserted into a bit-set-based hash map.
405*ee754c2dSkamil // Every new bit in the map is treated as a new coverage.
406*ee754c2dSkamil //
407*ee754c2dSkamil // For memcmp/strcmp/etc the interesting value is the length of the common
408*ee754c2dSkamil // prefix of the parameters.
409*ee754c2dSkamil // For cmp instructions the interesting value is a XOR of the parameters.
410*ee754c2dSkamil // The interesting value is mixed up with the PC and is then added to the map.
411*ee754c2dSkamil 
412*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
AddValueForMemcmp(void * caller_pc,const void * s1,const void * s2,size_t n,bool StopAtZero)413*ee754c2dSkamil void TracePC::AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2,
414*ee754c2dSkamil                                 size_t n, bool StopAtZero) {
415*ee754c2dSkamil   if (!n) return;
416*ee754c2dSkamil   size_t Len = std::min(n, Word::GetMaxSize());
417*ee754c2dSkamil   const uint8_t *A1 = reinterpret_cast<const uint8_t *>(s1);
418*ee754c2dSkamil   const uint8_t *A2 = reinterpret_cast<const uint8_t *>(s2);
419*ee754c2dSkamil   uint8_t B1[Word::kMaxSize];
420*ee754c2dSkamil   uint8_t B2[Word::kMaxSize];
421*ee754c2dSkamil   // Copy the data into locals in this non-msan-instrumented function
422*ee754c2dSkamil   // to avoid msan complaining further.
423*ee754c2dSkamil   size_t Hash = 0;  // Compute some simple hash of both strings.
424*ee754c2dSkamil   for (size_t i = 0; i < Len; i++) {
425*ee754c2dSkamil     B1[i] = A1[i];
426*ee754c2dSkamil     B2[i] = A2[i];
427*ee754c2dSkamil     size_t T = B1[i];
428*ee754c2dSkamil     Hash ^= (T << 8) | B2[i];
429*ee754c2dSkamil   }
430*ee754c2dSkamil   size_t I = 0;
431*ee754c2dSkamil   for (; I < Len; I++)
432*ee754c2dSkamil     if (B1[I] != B2[I] || (StopAtZero && B1[I] == 0))
433*ee754c2dSkamil       break;
434*ee754c2dSkamil   size_t PC = reinterpret_cast<size_t>(caller_pc);
435*ee754c2dSkamil   size_t Idx = (PC & 4095) | (I << 12);
436*ee754c2dSkamil   ValueProfileMap.AddValue(Idx);
437*ee754c2dSkamil   TORCW.Insert(Idx ^ Hash, Word(B1, Len), Word(B2, Len));
438*ee754c2dSkamil }
439*ee754c2dSkamil 
440*ee754c2dSkamil template <class T>
441*ee754c2dSkamil ATTRIBUTE_TARGET_POPCNT ALWAYS_INLINE
442*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
HandleCmp(uintptr_t PC,T Arg1,T Arg2)443*ee754c2dSkamil void TracePC::HandleCmp(uintptr_t PC, T Arg1, T Arg2) {
444*ee754c2dSkamil   uint64_t ArgXor = Arg1 ^ Arg2;
445*ee754c2dSkamil   if (sizeof(T) == 4)
446*ee754c2dSkamil       TORC4.Insert(ArgXor, Arg1, Arg2);
447*ee754c2dSkamil   else if (sizeof(T) == 8)
448*ee754c2dSkamil       TORC8.Insert(ArgXor, Arg1, Arg2);
449*ee754c2dSkamil   uint64_t HammingDistance = __builtin_popcountll(ArgXor); // [0,64]
450*ee754c2dSkamil   uint64_t AbsoluteDistance =
451*ee754c2dSkamil       (Arg1 == Arg2 ? 0 : __builtin_clzll(Arg1 - Arg2) + 1);
452*ee754c2dSkamil   ValueProfileMap.AddValue(PC * 128 + HammingDistance);
453*ee754c2dSkamil   ValueProfileMap.AddValue(PC * 128 + 64 + AbsoluteDistance);
454*ee754c2dSkamil }
455*ee754c2dSkamil 
InternalStrnlen(const char * S,size_t MaxLen)456*ee754c2dSkamil static size_t InternalStrnlen(const char *S, size_t MaxLen) {
457*ee754c2dSkamil   size_t Len = 0;
458*ee754c2dSkamil   for (; Len < MaxLen && S[Len]; Len++) {}
459*ee754c2dSkamil   return Len;
460*ee754c2dSkamil }
461*ee754c2dSkamil 
462*ee754c2dSkamil // Finds min of (strlen(S1), strlen(S2)).
463*ee754c2dSkamil // Needed bacause one of these strings may actually be non-zero terminated.
InternalStrnlen2(const char * S1,const char * S2)464*ee754c2dSkamil static size_t InternalStrnlen2(const char *S1, const char *S2) {
465*ee754c2dSkamil   size_t Len = 0;
466*ee754c2dSkamil   for (; S1[Len] && S2[Len]; Len++)  {}
467*ee754c2dSkamil   return Len;
468*ee754c2dSkamil }
469*ee754c2dSkamil 
ClearInlineCounters()470*ee754c2dSkamil void TracePC::ClearInlineCounters() {
471*ee754c2dSkamil   for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
472*ee754c2dSkamil     uint8_t *Beg = ModuleCounters[i].Start;
473*ee754c2dSkamil     size_t Size = ModuleCounters[i].Stop - Beg;
474*ee754c2dSkamil     memset(Beg, 0, Size);
475*ee754c2dSkamil   }
476*ee754c2dSkamil }
477*ee754c2dSkamil 
478*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
RecordInitialStack()479*ee754c2dSkamil void TracePC::RecordInitialStack() {
480*ee754c2dSkamil   int stack;
481*ee754c2dSkamil   __sancov_lowest_stack = InitialStack = reinterpret_cast<uintptr_t>(&stack);
482*ee754c2dSkamil }
483*ee754c2dSkamil 
GetMaxStackOffset() const484*ee754c2dSkamil uintptr_t TracePC::GetMaxStackOffset() const {
485*ee754c2dSkamil   return InitialStack - __sancov_lowest_stack;  // Stack grows down
486*ee754c2dSkamil }
487*ee754c2dSkamil 
488*ee754c2dSkamil } // namespace fuzzer
489*ee754c2dSkamil 
490*ee754c2dSkamil extern "C" {
491*ee754c2dSkamil ATTRIBUTE_INTERFACE
492*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
__sanitizer_cov_trace_pc_guard(uint32_t * Guard)493*ee754c2dSkamil void __sanitizer_cov_trace_pc_guard(uint32_t *Guard) {
494*ee754c2dSkamil   uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
495*ee754c2dSkamil   uint32_t Idx = *Guard;
496*ee754c2dSkamil   __sancov_trace_pc_pcs[Idx] = PC;
497*ee754c2dSkamil   __sancov_trace_pc_guard_8bit_counters[Idx]++;
498*ee754c2dSkamil }
499*ee754c2dSkamil 
500*ee754c2dSkamil // Best-effort support for -fsanitize-coverage=trace-pc, which is available
501*ee754c2dSkamil // in both Clang and GCC.
502*ee754c2dSkamil ATTRIBUTE_INTERFACE
503*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
__sanitizer_cov_trace_pc()504*ee754c2dSkamil void __sanitizer_cov_trace_pc() {
505*ee754c2dSkamil   uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
506*ee754c2dSkamil   uintptr_t Idx = PC & (((uintptr_t)1 << fuzzer::TracePC::kTracePcBits) - 1);
507*ee754c2dSkamil   __sancov_trace_pc_pcs[Idx] = PC;
508*ee754c2dSkamil   __sancov_trace_pc_guard_8bit_counters[Idx]++;
509*ee754c2dSkamil }
510*ee754c2dSkamil 
511*ee754c2dSkamil ATTRIBUTE_INTERFACE
__sanitizer_cov_trace_pc_guard_init(uint32_t * Start,uint32_t * Stop)512*ee754c2dSkamil void __sanitizer_cov_trace_pc_guard_init(uint32_t *Start, uint32_t *Stop) {
513*ee754c2dSkamil   fuzzer::TPC.HandleInit(Start, Stop);
514*ee754c2dSkamil }
515*ee754c2dSkamil 
516*ee754c2dSkamil ATTRIBUTE_INTERFACE
__sanitizer_cov_8bit_counters_init(uint8_t * Start,uint8_t * Stop)517*ee754c2dSkamil void __sanitizer_cov_8bit_counters_init(uint8_t *Start, uint8_t *Stop) {
518*ee754c2dSkamil   fuzzer::TPC.HandleInline8bitCountersInit(Start, Stop);
519*ee754c2dSkamil }
520*ee754c2dSkamil 
521*ee754c2dSkamil ATTRIBUTE_INTERFACE
__sanitizer_cov_pcs_init(const uintptr_t * pcs_beg,const uintptr_t * pcs_end)522*ee754c2dSkamil void __sanitizer_cov_pcs_init(const uintptr_t *pcs_beg,
523*ee754c2dSkamil                               const uintptr_t *pcs_end) {
524*ee754c2dSkamil   fuzzer::TPC.HandlePCsInit(pcs_beg, pcs_end);
525*ee754c2dSkamil }
526*ee754c2dSkamil 
527*ee754c2dSkamil ATTRIBUTE_INTERFACE
528*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
__sanitizer_cov_trace_pc_indir(uintptr_t Callee)529*ee754c2dSkamil void __sanitizer_cov_trace_pc_indir(uintptr_t Callee) {
530*ee754c2dSkamil   uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
531*ee754c2dSkamil   fuzzer::TPC.HandleCallerCallee(PC, Callee);
532*ee754c2dSkamil }
533*ee754c2dSkamil 
534*ee754c2dSkamil ATTRIBUTE_INTERFACE
535*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
536*ee754c2dSkamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_cmp8(uint64_t Arg1,uint64_t Arg2)537*ee754c2dSkamil void __sanitizer_cov_trace_cmp8(uint64_t Arg1, uint64_t Arg2) {
538*ee754c2dSkamil   uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
539*ee754c2dSkamil   fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
540*ee754c2dSkamil }
541*ee754c2dSkamil 
542*ee754c2dSkamil ATTRIBUTE_INTERFACE
543*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
544*ee754c2dSkamil ATTRIBUTE_TARGET_POPCNT
545*ee754c2dSkamil // Now the __sanitizer_cov_trace_const_cmp[1248] callbacks just mimic
546*ee754c2dSkamil // the behaviour of __sanitizer_cov_trace_cmp[1248] ones. This, however,
547*ee754c2dSkamil // should be changed later to make full use of instrumentation.
__sanitizer_cov_trace_const_cmp8(uint64_t Arg1,uint64_t Arg2)548*ee754c2dSkamil void __sanitizer_cov_trace_const_cmp8(uint64_t Arg1, uint64_t Arg2) {
549*ee754c2dSkamil   uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
550*ee754c2dSkamil   fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
551*ee754c2dSkamil }
552*ee754c2dSkamil 
553*ee754c2dSkamil ATTRIBUTE_INTERFACE
554*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
555*ee754c2dSkamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_cmp4(uint32_t Arg1,uint32_t Arg2)556*ee754c2dSkamil void __sanitizer_cov_trace_cmp4(uint32_t Arg1, uint32_t Arg2) {
557*ee754c2dSkamil   uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
558*ee754c2dSkamil   fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
559*ee754c2dSkamil }
560*ee754c2dSkamil 
561*ee754c2dSkamil ATTRIBUTE_INTERFACE
562*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
563*ee754c2dSkamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_const_cmp4(uint32_t Arg1,uint32_t Arg2)564*ee754c2dSkamil void __sanitizer_cov_trace_const_cmp4(uint32_t Arg1, uint32_t Arg2) {
565*ee754c2dSkamil   uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
566*ee754c2dSkamil   fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
567*ee754c2dSkamil }
568*ee754c2dSkamil 
569*ee754c2dSkamil ATTRIBUTE_INTERFACE
570*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
571*ee754c2dSkamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_cmp2(uint16_t Arg1,uint16_t Arg2)572*ee754c2dSkamil void __sanitizer_cov_trace_cmp2(uint16_t Arg1, uint16_t Arg2) {
573*ee754c2dSkamil   uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
574*ee754c2dSkamil   fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
575*ee754c2dSkamil }
576*ee754c2dSkamil 
577*ee754c2dSkamil ATTRIBUTE_INTERFACE
578*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
579*ee754c2dSkamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_const_cmp2(uint16_t Arg1,uint16_t Arg2)580*ee754c2dSkamil void __sanitizer_cov_trace_const_cmp2(uint16_t Arg1, uint16_t Arg2) {
581*ee754c2dSkamil   uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
582*ee754c2dSkamil   fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
583*ee754c2dSkamil }
584*ee754c2dSkamil 
585*ee754c2dSkamil ATTRIBUTE_INTERFACE
586*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
587*ee754c2dSkamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_cmp1(uint8_t Arg1,uint8_t Arg2)588*ee754c2dSkamil void __sanitizer_cov_trace_cmp1(uint8_t Arg1, uint8_t Arg2) {
589*ee754c2dSkamil   uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
590*ee754c2dSkamil   fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
591*ee754c2dSkamil }
592*ee754c2dSkamil 
593*ee754c2dSkamil ATTRIBUTE_INTERFACE
594*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
595*ee754c2dSkamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_const_cmp1(uint8_t Arg1,uint8_t Arg2)596*ee754c2dSkamil void __sanitizer_cov_trace_const_cmp1(uint8_t Arg1, uint8_t Arg2) {
597*ee754c2dSkamil   uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
598*ee754c2dSkamil   fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
599*ee754c2dSkamil }
600*ee754c2dSkamil 
601*ee754c2dSkamil ATTRIBUTE_INTERFACE
602*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
603*ee754c2dSkamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_switch(uint64_t Val,uint64_t * Cases)604*ee754c2dSkamil void __sanitizer_cov_trace_switch(uint64_t Val, uint64_t *Cases) {
605*ee754c2dSkamil   uint64_t N = Cases[0];
606*ee754c2dSkamil   uint64_t ValSizeInBits = Cases[1];
607*ee754c2dSkamil   uint64_t *Vals = Cases + 2;
608*ee754c2dSkamil   // Skip the most common and the most boring case.
609*ee754c2dSkamil   if (Vals[N - 1]  < 256 && Val < 256)
610*ee754c2dSkamil     return;
611*ee754c2dSkamil   uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
612*ee754c2dSkamil   size_t i;
613*ee754c2dSkamil   uint64_t Token = 0;
614*ee754c2dSkamil   for (i = 0; i < N; i++) {
615*ee754c2dSkamil     Token = Val ^ Vals[i];
616*ee754c2dSkamil     if (Val < Vals[i])
617*ee754c2dSkamil       break;
618*ee754c2dSkamil   }
619*ee754c2dSkamil 
620*ee754c2dSkamil   if (ValSizeInBits == 16)
621*ee754c2dSkamil     fuzzer::TPC.HandleCmp(PC + i, static_cast<uint16_t>(Token), (uint16_t)(0));
622*ee754c2dSkamil   else if (ValSizeInBits == 32)
623*ee754c2dSkamil     fuzzer::TPC.HandleCmp(PC + i, static_cast<uint32_t>(Token), (uint32_t)(0));
624*ee754c2dSkamil   else
625*ee754c2dSkamil     fuzzer::TPC.HandleCmp(PC + i, Token, (uint64_t)(0));
626*ee754c2dSkamil }
627*ee754c2dSkamil 
628*ee754c2dSkamil ATTRIBUTE_INTERFACE
629*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
630*ee754c2dSkamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_div4(uint32_t Val)631*ee754c2dSkamil void __sanitizer_cov_trace_div4(uint32_t Val) {
632*ee754c2dSkamil   uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
633*ee754c2dSkamil   fuzzer::TPC.HandleCmp(PC, Val, (uint32_t)0);
634*ee754c2dSkamil }
635*ee754c2dSkamil 
636*ee754c2dSkamil ATTRIBUTE_INTERFACE
637*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
638*ee754c2dSkamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_div8(uint64_t Val)639*ee754c2dSkamil void __sanitizer_cov_trace_div8(uint64_t Val) {
640*ee754c2dSkamil   uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
641*ee754c2dSkamil   fuzzer::TPC.HandleCmp(PC, Val, (uint64_t)0);
642*ee754c2dSkamil }
643*ee754c2dSkamil 
644*ee754c2dSkamil ATTRIBUTE_INTERFACE
645*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
646*ee754c2dSkamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_gep(uintptr_t Idx)647*ee754c2dSkamil void __sanitizer_cov_trace_gep(uintptr_t Idx) {
648*ee754c2dSkamil   uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
649*ee754c2dSkamil   fuzzer::TPC.HandleCmp(PC, Idx, (uintptr_t)0);
650*ee754c2dSkamil }
651*ee754c2dSkamil 
652*ee754c2dSkamil ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_memcmp(void * caller_pc,const void * s1,const void * s2,size_t n,int result)653*ee754c2dSkamil void __sanitizer_weak_hook_memcmp(void *caller_pc, const void *s1,
654*ee754c2dSkamil                                   const void *s2, size_t n, int result) {
655*ee754c2dSkamil   if (!fuzzer::RunningUserCallback) return;
656*ee754c2dSkamil   if (result == 0) return;  // No reason to mutate.
657*ee754c2dSkamil   if (n <= 1) return;  // Not interesting.
658*ee754c2dSkamil   fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/false);
659*ee754c2dSkamil }
660*ee754c2dSkamil 
661*ee754c2dSkamil ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strncmp(void * caller_pc,const char * s1,const char * s2,size_t n,int result)662*ee754c2dSkamil void __sanitizer_weak_hook_strncmp(void *caller_pc, const char *s1,
663*ee754c2dSkamil                                    const char *s2, size_t n, int result) {
664*ee754c2dSkamil   if (!fuzzer::RunningUserCallback) return;
665*ee754c2dSkamil   if (result == 0) return;  // No reason to mutate.
666*ee754c2dSkamil   size_t Len1 = fuzzer::InternalStrnlen(s1, n);
667*ee754c2dSkamil   size_t Len2 = fuzzer::InternalStrnlen(s2, n);
668*ee754c2dSkamil   n = std::min(n, Len1);
669*ee754c2dSkamil   n = std::min(n, Len2);
670*ee754c2dSkamil   if (n <= 1) return;  // Not interesting.
671*ee754c2dSkamil   fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/true);
672*ee754c2dSkamil }
673*ee754c2dSkamil 
674*ee754c2dSkamil ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strcmp(void * caller_pc,const char * s1,const char * s2,int result)675*ee754c2dSkamil void __sanitizer_weak_hook_strcmp(void *caller_pc, const char *s1,
676*ee754c2dSkamil                                    const char *s2, int result) {
677*ee754c2dSkamil   if (!fuzzer::RunningUserCallback) return;
678*ee754c2dSkamil   if (result == 0) return;  // No reason to mutate.
679*ee754c2dSkamil   size_t N = fuzzer::InternalStrnlen2(s1, s2);
680*ee754c2dSkamil   if (N <= 1) return;  // Not interesting.
681*ee754c2dSkamil   fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, N, /*StopAtZero*/true);
682*ee754c2dSkamil }
683*ee754c2dSkamil 
684*ee754c2dSkamil ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strncasecmp(void * called_pc,const char * s1,const char * s2,size_t n,int result)685*ee754c2dSkamil void __sanitizer_weak_hook_strncasecmp(void *called_pc, const char *s1,
686*ee754c2dSkamil                                        const char *s2, size_t n, int result) {
687*ee754c2dSkamil   if (!fuzzer::RunningUserCallback) return;
688*ee754c2dSkamil   return __sanitizer_weak_hook_strncmp(called_pc, s1, s2, n, result);
689*ee754c2dSkamil }
690*ee754c2dSkamil 
691*ee754c2dSkamil ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strcasecmp(void * called_pc,const char * s1,const char * s2,int result)692*ee754c2dSkamil void __sanitizer_weak_hook_strcasecmp(void *called_pc, const char *s1,
693*ee754c2dSkamil                                       const char *s2, int result) {
694*ee754c2dSkamil   if (!fuzzer::RunningUserCallback) return;
695*ee754c2dSkamil   return __sanitizer_weak_hook_strcmp(called_pc, s1, s2, result);
696*ee754c2dSkamil }
697*ee754c2dSkamil 
698*ee754c2dSkamil ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strstr(void * called_pc,const char * s1,const char * s2,char * result)699*ee754c2dSkamil void __sanitizer_weak_hook_strstr(void *called_pc, const char *s1,
700*ee754c2dSkamil                                   const char *s2, char *result) {
701*ee754c2dSkamil   if (!fuzzer::RunningUserCallback) return;
702*ee754c2dSkamil   fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
703*ee754c2dSkamil }
704*ee754c2dSkamil 
705*ee754c2dSkamil ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strcasestr(void * called_pc,const char * s1,const char * s2,char * result)706*ee754c2dSkamil void __sanitizer_weak_hook_strcasestr(void *called_pc, const char *s1,
707*ee754c2dSkamil                                       const char *s2, char *result) {
708*ee754c2dSkamil   if (!fuzzer::RunningUserCallback) return;
709*ee754c2dSkamil   fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
710*ee754c2dSkamil }
711*ee754c2dSkamil 
712*ee754c2dSkamil ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_memmem(void * called_pc,const void * s1,size_t len1,const void * s2,size_t len2,void * result)713*ee754c2dSkamil void __sanitizer_weak_hook_memmem(void *called_pc, const void *s1, size_t len1,
714*ee754c2dSkamil                                   const void *s2, size_t len2, void *result) {
715*ee754c2dSkamil   if (!fuzzer::RunningUserCallback) return;
716*ee754c2dSkamil   fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), len2);
717*ee754c2dSkamil }
718*ee754c2dSkamil }  // extern "C"
719