1*ee754c2dSkamil //===- FuzzerTracePC.h - Internal header for the Fuzzer ---------*- C++ -* ===//
2*ee754c2dSkamil //
3*ee754c2dSkamil // The LLVM Compiler Infrastructure
4*ee754c2dSkamil //
5*ee754c2dSkamil // This file is distributed under the University of Illinois Open Source
6*ee754c2dSkamil // License. See LICENSE.TXT for details.
7*ee754c2dSkamil //
8*ee754c2dSkamil //===----------------------------------------------------------------------===//
9*ee754c2dSkamil // fuzzer::TracePC
10*ee754c2dSkamil //===----------------------------------------------------------------------===//
11*ee754c2dSkamil
12*ee754c2dSkamil #ifndef LLVM_FUZZER_TRACE_PC
13*ee754c2dSkamil #define LLVM_FUZZER_TRACE_PC
14*ee754c2dSkamil
15*ee754c2dSkamil #include "FuzzerDefs.h"
16*ee754c2dSkamil #include "FuzzerDictionary.h"
17*ee754c2dSkamil #include "FuzzerValueBitMap.h"
18*ee754c2dSkamil
19*ee754c2dSkamil #include <set>
20*ee754c2dSkamil #include <unordered_map>
21*ee754c2dSkamil
22*ee754c2dSkamil namespace fuzzer {
23*ee754c2dSkamil
24*ee754c2dSkamil // TableOfRecentCompares (TORC) remembers the most recently performed
25*ee754c2dSkamil // comparisons of type T.
26*ee754c2dSkamil // We record the arguments of CMP instructions in this table unconditionally
27*ee754c2dSkamil // because it seems cheaper this way than to compute some expensive
28*ee754c2dSkamil // conditions inside __sanitizer_cov_trace_cmp*.
29*ee754c2dSkamil // After the unit has been executed we may decide to use the contents of
30*ee754c2dSkamil // this table to populate a Dictionary.
31*ee754c2dSkamil template<class T, size_t kSizeT>
32*ee754c2dSkamil struct TableOfRecentCompares {
33*ee754c2dSkamil static const size_t kSize = kSizeT;
34*ee754c2dSkamil struct Pair {
35*ee754c2dSkamil T A, B;
36*ee754c2dSkamil };
37*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
InsertTableOfRecentCompares38*ee754c2dSkamil void Insert(size_t Idx, const T &Arg1, const T &Arg2) {
39*ee754c2dSkamil Idx = Idx % kSize;
40*ee754c2dSkamil Table[Idx].A = Arg1;
41*ee754c2dSkamil Table[Idx].B = Arg2;
42*ee754c2dSkamil }
43*ee754c2dSkamil
GetTableOfRecentCompares44*ee754c2dSkamil Pair Get(size_t I) { return Table[I % kSize]; }
45*ee754c2dSkamil
46*ee754c2dSkamil Pair Table[kSize];
47*ee754c2dSkamil };
48*ee754c2dSkamil
49*ee754c2dSkamil template <size_t kSizeT>
50*ee754c2dSkamil struct MemMemTable {
51*ee754c2dSkamil static const size_t kSize = kSizeT;
52*ee754c2dSkamil Word MemMemWords[kSize];
53*ee754c2dSkamil Word EmptyWord;
54*ee754c2dSkamil
AddMemMemTable55*ee754c2dSkamil void Add(const uint8_t *Data, size_t Size) {
56*ee754c2dSkamil if (Size <= 2) return;
57*ee754c2dSkamil Size = std::min(Size, Word::GetMaxSize());
58*ee754c2dSkamil size_t Idx = SimpleFastHash(Data, Size) % kSize;
59*ee754c2dSkamil MemMemWords[Idx].Set(Data, Size);
60*ee754c2dSkamil }
GetMemMemTable61*ee754c2dSkamil const Word &Get(size_t Idx) {
62*ee754c2dSkamil for (size_t i = 0; i < kSize; i++) {
63*ee754c2dSkamil const Word &W = MemMemWords[(Idx + i) % kSize];
64*ee754c2dSkamil if (W.size()) return W;
65*ee754c2dSkamil }
66*ee754c2dSkamil EmptyWord.Set(nullptr, 0);
67*ee754c2dSkamil return EmptyWord;
68*ee754c2dSkamil }
69*ee754c2dSkamil };
70*ee754c2dSkamil
71*ee754c2dSkamil class TracePC {
72*ee754c2dSkamil public:
73*ee754c2dSkamil static const size_t kNumPCs = 1 << 21;
74*ee754c2dSkamil // How many bits of PC are used from __sanitizer_cov_trace_pc.
75*ee754c2dSkamil static const size_t kTracePcBits = 18;
76*ee754c2dSkamil
77*ee754c2dSkamil enum HandleUnstableOptions {
78*ee754c2dSkamil MinUnstable = 1,
79*ee754c2dSkamil ZeroUnstable = 2,
80*ee754c2dSkamil };
81*ee754c2dSkamil
82*ee754c2dSkamil void HandleInit(uint32_t *Start, uint32_t *Stop);
83*ee754c2dSkamil void HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop);
84*ee754c2dSkamil void HandlePCsInit(const uintptr_t *Start, const uintptr_t *Stop);
85*ee754c2dSkamil void HandleCallerCallee(uintptr_t Caller, uintptr_t Callee);
86*ee754c2dSkamil template <class T> void HandleCmp(uintptr_t PC, T Arg1, T Arg2);
87*ee754c2dSkamil size_t GetTotalPCCoverage();
SetUseCounters(bool UC)88*ee754c2dSkamil void SetUseCounters(bool UC) { UseCounters = UC; }
SetUseValueProfileMask(uint32_t VPMask)89*ee754c2dSkamil void SetUseValueProfileMask(uint32_t VPMask) { UseValueProfileMask = VPMask; }
SetPrintNewPCs(bool P)90*ee754c2dSkamil void SetPrintNewPCs(bool P) { DoPrintNewPCs = P; }
SetPrintNewFuncs(size_t P)91*ee754c2dSkamil void SetPrintNewFuncs(size_t P) { NumPrintNewFuncs = P; }
92*ee754c2dSkamil void UpdateObservedPCs();
93*ee754c2dSkamil template <class Callback> void CollectFeatures(Callback CB) const;
94*ee754c2dSkamil
ResetMaps()95*ee754c2dSkamil void ResetMaps() {
96*ee754c2dSkamil ValueProfileMap.Reset();
97*ee754c2dSkamil if (NumModules)
98*ee754c2dSkamil memset(Counters(), 0, GetNumPCs());
99*ee754c2dSkamil ClearExtraCounters();
100*ee754c2dSkamil ClearInlineCounters();
101*ee754c2dSkamil }
102*ee754c2dSkamil
103*ee754c2dSkamil void ClearInlineCounters();
104*ee754c2dSkamil
105*ee754c2dSkamil void UpdateFeatureSet(size_t CurrentElementIdx, size_t CurrentElementSize);
106*ee754c2dSkamil void PrintFeatureSet();
107*ee754c2dSkamil
108*ee754c2dSkamil void PrintModuleInfo();
109*ee754c2dSkamil
110*ee754c2dSkamil void PrintCoverage();
111*ee754c2dSkamil void DumpCoverage();
112*ee754c2dSkamil void PrintUnstableStats();
113*ee754c2dSkamil
114*ee754c2dSkamil template<class CallBack>
115*ee754c2dSkamil void IterateCoveredFunctions(CallBack CB);
116*ee754c2dSkamil
117*ee754c2dSkamil void AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2,
118*ee754c2dSkamil size_t n, bool StopAtZero);
119*ee754c2dSkamil
120*ee754c2dSkamil TableOfRecentCompares<uint32_t, 32> TORC4;
121*ee754c2dSkamil TableOfRecentCompares<uint64_t, 32> TORC8;
122*ee754c2dSkamil TableOfRecentCompares<Word, 32> TORCW;
123*ee754c2dSkamil MemMemTable<1024> MMT;
124*ee754c2dSkamil
GetNumPCs()125*ee754c2dSkamil size_t GetNumPCs() const {
126*ee754c2dSkamil return NumGuards == 0 ? (1 << kTracePcBits) : Min(kNumPCs, NumGuards + 1);
127*ee754c2dSkamil }
GetPC(size_t Idx)128*ee754c2dSkamil uintptr_t GetPC(size_t Idx) {
129*ee754c2dSkamil assert(Idx < GetNumPCs());
130*ee754c2dSkamil return PCs()[Idx];
131*ee754c2dSkamil }
132*ee754c2dSkamil
133*ee754c2dSkamil void RecordInitialStack();
134*ee754c2dSkamil uintptr_t GetMaxStackOffset() const;
135*ee754c2dSkamil
136*ee754c2dSkamil template<class CallBack>
ForEachObservedPC(CallBack CB)137*ee754c2dSkamil void ForEachObservedPC(CallBack CB) {
138*ee754c2dSkamil for (auto PC : ObservedPCs)
139*ee754c2dSkamil CB(PC);
140*ee754c2dSkamil }
141*ee754c2dSkamil
142*ee754c2dSkamil void SetFocusFunction(const std::string &FuncName);
143*ee754c2dSkamil bool ObservedFocusFunction();
144*ee754c2dSkamil
145*ee754c2dSkamil void InitializeUnstableCounters();
146*ee754c2dSkamil bool UpdateUnstableCounters(int UnstableMode);
147*ee754c2dSkamil void UpdateAndApplyUnstableCounters(int UnstableMode);
148*ee754c2dSkamil
149*ee754c2dSkamil private:
150*ee754c2dSkamil struct UnstableEdge {
151*ee754c2dSkamil uint8_t Counter;
152*ee754c2dSkamil bool IsUnstable;
153*ee754c2dSkamil };
154*ee754c2dSkamil
155*ee754c2dSkamil UnstableEdge UnstableCounters[kNumPCs];
156*ee754c2dSkamil
157*ee754c2dSkamil bool UseCounters = false;
158*ee754c2dSkamil uint32_t UseValueProfileMask = false;
159*ee754c2dSkamil bool DoPrintNewPCs = false;
160*ee754c2dSkamil size_t NumPrintNewFuncs = 0;
161*ee754c2dSkamil
162*ee754c2dSkamil struct Module {
163*ee754c2dSkamil uint32_t *Start, *Stop;
164*ee754c2dSkamil };
165*ee754c2dSkamil
166*ee754c2dSkamil Module Modules[4096];
167*ee754c2dSkamil size_t NumModules; // linker-initialized.
168*ee754c2dSkamil size_t NumGuards; // linker-initialized.
169*ee754c2dSkamil
170*ee754c2dSkamil struct { uint8_t *Start, *Stop; } ModuleCounters[4096];
171*ee754c2dSkamil size_t NumModulesWithInline8bitCounters; // linker-initialized.
172*ee754c2dSkamil size_t NumInline8bitCounters;
173*ee754c2dSkamil
174*ee754c2dSkamil struct PCTableEntry {
175*ee754c2dSkamil uintptr_t PC, PCFlags;
176*ee754c2dSkamil };
177*ee754c2dSkamil
178*ee754c2dSkamil struct { const PCTableEntry *Start, *Stop; } ModulePCTable[4096];
179*ee754c2dSkamil size_t NumPCTables;
180*ee754c2dSkamil size_t NumPCsInPCTables;
181*ee754c2dSkamil
182*ee754c2dSkamil uint8_t *Counters() const;
183*ee754c2dSkamil uintptr_t *PCs() const;
184*ee754c2dSkamil
185*ee754c2dSkamil Set<uintptr_t> ObservedPCs;
186*ee754c2dSkamil std::unordered_map<uintptr_t, uintptr_t> ObservedFuncs; // PC => Counter.
187*ee754c2dSkamil
188*ee754c2dSkamil template <class Callback>
189*ee754c2dSkamil void IterateInline8bitCounters(Callback CB) const;
190*ee754c2dSkamil
191*ee754c2dSkamil std::pair<size_t, size_t> FocusFunction = {-1, -1}; // Module and PC IDs.
192*ee754c2dSkamil
193*ee754c2dSkamil ValueBitMap ValueProfileMap;
194*ee754c2dSkamil uintptr_t InitialStack;
195*ee754c2dSkamil };
196*ee754c2dSkamil
197*ee754c2dSkamil template <class Callback>
198*ee754c2dSkamil // void Callback(size_t FirstFeature, size_t Idx, uint8_t Value);
199*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ALL
ForEachNonZeroByte(const uint8_t * Begin,const uint8_t * End,size_t FirstFeature,Callback Handle8bitCounter)200*ee754c2dSkamil void ForEachNonZeroByte(const uint8_t *Begin, const uint8_t *End,
201*ee754c2dSkamil size_t FirstFeature, Callback Handle8bitCounter) {
202*ee754c2dSkamil typedef uintptr_t LargeType;
203*ee754c2dSkamil const size_t Step = sizeof(LargeType) / sizeof(uint8_t);
204*ee754c2dSkamil const size_t StepMask = Step - 1;
205*ee754c2dSkamil auto P = Begin;
206*ee754c2dSkamil // Iterate by 1 byte until either the alignment boundary or the end.
207*ee754c2dSkamil for (; reinterpret_cast<uintptr_t>(P) & StepMask && P < End; P++)
208*ee754c2dSkamil if (uint8_t V = *P)
209*ee754c2dSkamil Handle8bitCounter(FirstFeature, P - Begin, V);
210*ee754c2dSkamil
211*ee754c2dSkamil // Iterate by Step bytes at a time.
212*ee754c2dSkamil for (; P < End; P += Step)
213*ee754c2dSkamil if (LargeType Bundle = *reinterpret_cast<const LargeType *>(P))
214*ee754c2dSkamil for (size_t I = 0; I < Step; I++, Bundle >>= 8)
215*ee754c2dSkamil if (uint8_t V = Bundle & 0xff)
216*ee754c2dSkamil Handle8bitCounter(FirstFeature, P - Begin + I, V);
217*ee754c2dSkamil
218*ee754c2dSkamil // Iterate by 1 byte until the end.
219*ee754c2dSkamil for (; P < End; P++)
220*ee754c2dSkamil if (uint8_t V = *P)
221*ee754c2dSkamil Handle8bitCounter(FirstFeature, P - Begin, V);
222*ee754c2dSkamil }
223*ee754c2dSkamil
224*ee754c2dSkamil // Given a non-zero Counter returns a number in the range [0,7].
225*ee754c2dSkamil template<class T>
CounterToFeature(T Counter)226*ee754c2dSkamil unsigned CounterToFeature(T Counter) {
227*ee754c2dSkamil // Returns a feature number by placing Counters into buckets as illustrated
228*ee754c2dSkamil // below.
229*ee754c2dSkamil //
230*ee754c2dSkamil // Counter bucket: [1] [2] [3] [4-7] [8-15] [16-31] [32-127] [128+]
231*ee754c2dSkamil // Feature number: 0 1 2 3 4 5 6 7
232*ee754c2dSkamil //
233*ee754c2dSkamil // This is a heuristic taken from AFL (see
234*ee754c2dSkamil // http://lcamtuf.coredump.cx/afl/technical_details.txt).
235*ee754c2dSkamil //
236*ee754c2dSkamil // This implementation may change in the future so clients should
237*ee754c2dSkamil // not rely on it.
238*ee754c2dSkamil assert(Counter);
239*ee754c2dSkamil unsigned Bit = 0;
240*ee754c2dSkamil /**/ if (Counter >= 128) Bit = 7;
241*ee754c2dSkamil else if (Counter >= 32) Bit = 6;
242*ee754c2dSkamil else if (Counter >= 16) Bit = 5;
243*ee754c2dSkamil else if (Counter >= 8) Bit = 4;
244*ee754c2dSkamil else if (Counter >= 4) Bit = 3;
245*ee754c2dSkamil else if (Counter >= 3) Bit = 2;
246*ee754c2dSkamil else if (Counter >= 2) Bit = 1;
247*ee754c2dSkamil return Bit;
248*ee754c2dSkamil }
249*ee754c2dSkamil
250*ee754c2dSkamil template <class Callback> // void Callback(size_t Feature)
251*ee754c2dSkamil ATTRIBUTE_NO_SANITIZE_ADDRESS
252*ee754c2dSkamil __attribute__((noinline))
CollectFeatures(Callback HandleFeature)253*ee754c2dSkamil void TracePC::CollectFeatures(Callback HandleFeature) const {
254*ee754c2dSkamil uint8_t *Counters = this->Counters();
255*ee754c2dSkamil size_t N = GetNumPCs();
256*ee754c2dSkamil auto Handle8bitCounter = [&](size_t FirstFeature,
257*ee754c2dSkamil size_t Idx, uint8_t Counter) {
258*ee754c2dSkamil if (UseCounters)
259*ee754c2dSkamil HandleFeature(FirstFeature + Idx * 8 + CounterToFeature(Counter));
260*ee754c2dSkamil else
261*ee754c2dSkamil HandleFeature(FirstFeature + Idx);
262*ee754c2dSkamil };
263*ee754c2dSkamil
264*ee754c2dSkamil size_t FirstFeature = 0;
265*ee754c2dSkamil
266*ee754c2dSkamil if (!NumInline8bitCounters) {
267*ee754c2dSkamil ForEachNonZeroByte(Counters, Counters + N, FirstFeature, Handle8bitCounter);
268*ee754c2dSkamil FirstFeature += N * 8;
269*ee754c2dSkamil }
270*ee754c2dSkamil
271*ee754c2dSkamil if (NumInline8bitCounters) {
272*ee754c2dSkamil for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
273*ee754c2dSkamil ForEachNonZeroByte(ModuleCounters[i].Start, ModuleCounters[i].Stop,
274*ee754c2dSkamil FirstFeature, Handle8bitCounter);
275*ee754c2dSkamil FirstFeature += 8 * (ModuleCounters[i].Stop - ModuleCounters[i].Start);
276*ee754c2dSkamil }
277*ee754c2dSkamil }
278*ee754c2dSkamil
279*ee754c2dSkamil ForEachNonZeroByte(ExtraCountersBegin(), ExtraCountersEnd(), FirstFeature,
280*ee754c2dSkamil Handle8bitCounter);
281*ee754c2dSkamil FirstFeature += (ExtraCountersEnd() - ExtraCountersBegin()) * 8;
282*ee754c2dSkamil
283*ee754c2dSkamil if (UseValueProfileMask) {
284*ee754c2dSkamil ValueProfileMap.ForEach([&](size_t Idx) {
285*ee754c2dSkamil HandleFeature(FirstFeature + Idx);
286*ee754c2dSkamil });
287*ee754c2dSkamil FirstFeature += ValueProfileMap.SizeInBits();
288*ee754c2dSkamil }
289*ee754c2dSkamil
290*ee754c2dSkamil // Step function, grows similar to 8 * Log_2(A).
291*ee754c2dSkamil auto StackDepthStepFunction = [](uint32_t A) -> uint32_t {
292*ee754c2dSkamil if (!A) return A;
293*ee754c2dSkamil uint32_t Log2 = Log(A);
294*ee754c2dSkamil if (Log2 < 3) return A;
295*ee754c2dSkamil Log2 -= 3;
296*ee754c2dSkamil return (Log2 + 1) * 8 + ((A >> Log2) & 7);
297*ee754c2dSkamil };
298*ee754c2dSkamil assert(StackDepthStepFunction(1024) == 64);
299*ee754c2dSkamil assert(StackDepthStepFunction(1024 * 4) == 80);
300*ee754c2dSkamil assert(StackDepthStepFunction(1024 * 1024) == 144);
301*ee754c2dSkamil
302*ee754c2dSkamil if (auto MaxStackOffset = GetMaxStackOffset())
303*ee754c2dSkamil HandleFeature(FirstFeature + StackDepthStepFunction(MaxStackOffset / 8));
304*ee754c2dSkamil }
305*ee754c2dSkamil
306*ee754c2dSkamil extern TracePC TPC;
307*ee754c2dSkamil
308*ee754c2dSkamil } // namespace fuzzer
309*ee754c2dSkamil
310*ee754c2dSkamil #endif // LLVM_FUZZER_TRACE_PC
311