1 //===-- ProfiledBinary.h - Binary decoder -----------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #ifndef LLVM_TOOLS_LLVM_PROFGEN_PROFILEDBINARY_H
10 #define LLVM_TOOLS_LLVM_PROFGEN_PROFILEDBINARY_H
11 
12 #include "CallContext.h"
13 #include "ErrorHandling.h"
14 #include "llvm/ADT/DenseMap.h"
15 #include "llvm/ADT/StringRef.h"
16 #include "llvm/ADT/StringSet.h"
17 #include "llvm/DebugInfo/DWARF/DWARFContext.h"
18 #include "llvm/DebugInfo/Symbolize/Symbolize.h"
19 #include "llvm/MC/MCAsmInfo.h"
20 #include "llvm/MC/MCContext.h"
21 #include "llvm/MC/MCDisassembler/MCDisassembler.h"
22 #include "llvm/MC/MCInst.h"
23 #include "llvm/MC/MCInstPrinter.h"
24 #include "llvm/MC/MCInstrAnalysis.h"
25 #include "llvm/MC/MCInstrInfo.h"
26 #include "llvm/MC/MCObjectFileInfo.h"
27 #include "llvm/MC/MCPseudoProbe.h"
28 #include "llvm/MC/MCRegisterInfo.h"
29 #include "llvm/MC/MCSubtargetInfo.h"
30 #include "llvm/MC/MCTargetOptions.h"
31 #include "llvm/Object/ELFObjectFile.h"
32 #include "llvm/ProfileData/SampleProf.h"
33 #include "llvm/Support/CommandLine.h"
34 #include "llvm/Support/Path.h"
35 #include "llvm/Transforms/IPO/SampleContextTracker.h"
36 #include <list>
37 #include <map>
38 #include <set>
39 #include <sstream>
40 #include <string>
41 #include <unordered_map>
42 #include <unordered_set>
43 #include <vector>
44 
45 extern cl::opt<bool> EnableCSPreInliner;
46 extern cl::opt<bool> UseContextCostForPreInliner;
47 
48 using namespace llvm;
49 using namespace sampleprof;
50 using namespace llvm::object;
51 
52 namespace llvm {
53 namespace sampleprof {
54 
55 class ProfiledBinary;
56 class MissingFrameInferrer;
57 
58 struct InstructionPointer {
59   const ProfiledBinary *Binary;
60   // Address of the executable segment of the binary.
61   uint64_t Address;
62   // Index to the sorted code address array of the binary.
63   uint64_t Index = 0;
64   InstructionPointer(const ProfiledBinary *Binary, uint64_t Address,
65                      bool RoundToNext = false);
66   bool advance();
67   bool backward();
68   void update(uint64_t Addr);
69 };
70 
71 // The special frame addresses.
72 enum SpecialFrameAddr {
73   // Dummy root of frame trie.
74   DummyRoot = 0,
75   // Represent all the addresses outside of current binary.
76   // This's also used to indicate the call stack should be truncated since this
77   // isn't a real call context the compiler will see.
78   ExternalAddr = 1,
79 };
80 
81 using RangesTy = std::vector<std::pair<uint64_t, uint64_t>>;
82 
83 struct BinaryFunction {
84   StringRef FuncName;
85   // End of range is an exclusive bound.
86   RangesTy Ranges;
87 
getFuncSizeBinaryFunction88   uint64_t getFuncSize() {
89     uint64_t Sum = 0;
90     for (auto &R : Ranges) {
91       Sum += R.second - R.first;
92     }
93     return Sum;
94   }
95 };
96 
97 // Info about function range. A function can be split into multiple
98 // non-continuous ranges, each range corresponds to one FuncRange.
99 struct FuncRange {
100   uint64_t StartAddress;
101   // EndAddress is an exclusive bound.
102   uint64_t EndAddress;
103   // Function the range belongs to
104   BinaryFunction *Func;
105   // Whether the start address is the real entry of the function.
106   bool IsFuncEntry = false;
107 
getFuncNameFuncRange108   StringRef getFuncName() { return Func->FuncName; }
109 };
110 
111 // PrologEpilog address tracker, used to filter out broken stack samples
112 // Currently we use a heuristic size (two) to infer prolog and epilog
113 // based on the start address and return address. In the future,
114 // we will switch to Dwarf CFI based tracker
115 struct PrologEpilogTracker {
116   // A set of prolog and epilog addresses. Used by virtual unwinding.
117   std::unordered_set<uint64_t> PrologEpilogSet;
118   ProfiledBinary *Binary;
PrologEpilogTrackerPrologEpilogTracker119   PrologEpilogTracker(ProfiledBinary *Bin) : Binary(Bin){};
120 
121   // Take the two addresses from the start of function as prolog
122   void
inferPrologAddressesPrologEpilogTracker123   inferPrologAddresses(std::map<uint64_t, FuncRange> &FuncStartAddressMap) {
124     for (auto I : FuncStartAddressMap) {
125       PrologEpilogSet.insert(I.first);
126       InstructionPointer IP(Binary, I.first);
127       if (!IP.advance())
128         break;
129       PrologEpilogSet.insert(IP.Address);
130     }
131   }
132 
133   // Take the last two addresses before the return address as epilog
inferEpilogAddressesPrologEpilogTracker134   void inferEpilogAddresses(std::unordered_set<uint64_t> &RetAddrs) {
135     for (auto Addr : RetAddrs) {
136       PrologEpilogSet.insert(Addr);
137       InstructionPointer IP(Binary, Addr);
138       if (!IP.backward())
139         break;
140       PrologEpilogSet.insert(IP.Address);
141     }
142   }
143 };
144 
145 // Track function byte size under different context (outlined version as well as
146 // various inlined versions). It also provides query support to get function
147 // size with the best matching context, which is used to help pre-inliner use
148 // accurate post-optimization size to make decisions.
149 // TODO: If an inlinee is completely optimized away, ideally we should have zero
150 // for its context size, currently we would misss such context since it doesn't
151 // have instructions. To fix this, we need to mark all inlinee with entry probe
152 // but without instructions as having zero size.
153 class BinarySizeContextTracker {
154 public:
155   // Add instruction with given size to a context
156   void addInstructionForContext(const SampleContextFrameVector &Context,
157                                 uint32_t InstrSize);
158 
159   // Get function size with a specific context. When there's no exact match
160   // for the given context, try to retrieve the size of that function from
161   // closest matching context.
162   uint32_t getFuncSizeForContext(const ContextTrieNode *Context);
163 
164   // For inlinees that are full optimized away, we can establish zero size using
165   // their remaining probes.
166   void trackInlineesOptimizedAway(MCPseudoProbeDecoder &ProbeDecoder);
167 
168   using ProbeFrameStack = SmallVector<std::pair<StringRef, uint32_t>>;
169   void trackInlineesOptimizedAway(MCPseudoProbeDecoder &ProbeDecoder,
170                                   MCDecodedPseudoProbeInlineTree &ProbeNode,
171                                   ProbeFrameStack &Context);
172 
dump()173   void dump() { RootContext.dumpTree(); }
174 
175 private:
176   // Root node for context trie tree, node that this is a reverse context trie
177   // with callee as parent and caller as child. This way we can traverse from
178   // root to find the best/longest matching context if an exact match does not
179   // exist. It gives us the best possible estimate for function's post-inline,
180   // post-optimization byte size.
181   ContextTrieNode RootContext;
182 };
183 
184 using AddressRange = std::pair<uint64_t, uint64_t>;
185 
186 class ProfiledBinary {
187   // Absolute path of the executable binary.
188   std::string Path;
189   // Path of the debug info binary.
190   std::string DebugBinaryPath;
191   // Path of symbolizer path which should be pointed to binary with debug info.
192   StringRef SymbolizerPath;
193   // The target triple.
194   Triple TheTriple;
195   // The runtime base address that the first executable segment is loaded at.
196   uint64_t BaseAddress = 0;
197   // The runtime base address that the first loadabe segment is loaded at.
198   uint64_t FirstLoadableAddress = 0;
199   // The preferred load address of each executable segment.
200   std::vector<uint64_t> PreferredTextSegmentAddresses;
201   // The file offset of each executable segment.
202   std::vector<uint64_t> TextSegmentOffsets;
203 
204   // Mutiple MC component info
205   std::unique_ptr<const MCRegisterInfo> MRI;
206   std::unique_ptr<const MCAsmInfo> AsmInfo;
207   std::unique_ptr<const MCSubtargetInfo> STI;
208   std::unique_ptr<const MCInstrInfo> MII;
209   std::unique_ptr<MCDisassembler> DisAsm;
210   std::unique_ptr<const MCInstrAnalysis> MIA;
211   std::unique_ptr<MCInstPrinter> IPrinter;
212   // A list of text sections sorted by start RVA and size. Used to check
213   // if a given RVA is a valid code address.
214   std::set<std::pair<uint64_t, uint64_t>> TextSections;
215 
216   // A map of mapping function name to BinaryFunction info.
217   std::unordered_map<std::string, BinaryFunction> BinaryFunctions;
218 
219   // A list of binary functions that have samples.
220   std::unordered_set<const BinaryFunction *> ProfiledFunctions;
221 
222   // GUID to Elf symbol start address map
223   DenseMap<uint64_t, uint64_t> SymbolStartAddrs;
224 
225   // Start address to Elf symbol GUID map
226   std::unordered_multimap<uint64_t, uint64_t> StartAddrToSymMap;
227 
228   // An ordered map of mapping function's start address to function range
229   // relevant info. Currently to determine if the offset of ELF is the start of
230   // a real function, we leverage the function range info from DWARF.
231   std::map<uint64_t, FuncRange> StartAddrToFuncRangeMap;
232 
233   // Address to context location map. Used to expand the context.
234   std::unordered_map<uint64_t, SampleContextFrameVector> AddressToLocStackMap;
235 
236   // Address to instruction size map. Also used for quick Address lookup.
237   std::unordered_map<uint64_t, uint64_t> AddressToInstSizeMap;
238 
239   // An array of Addresses of all instructions sorted in increasing order. The
240   // sorting is needed to fast advance to the next forward/backward instruction.
241   std::vector<uint64_t> CodeAddressVec;
242   // A set of call instruction addresses. Used by virtual unwinding.
243   std::unordered_set<uint64_t> CallAddressSet;
244   // A set of return instruction addresses. Used by virtual unwinding.
245   std::unordered_set<uint64_t> RetAddressSet;
246   // An ordered set of unconditional branch instruction addresses.
247   std::set<uint64_t> UncondBranchAddrSet;
248   // A set of branch instruction addresses.
249   std::unordered_set<uint64_t> BranchAddressSet;
250 
251   // Estimate and track function prolog and epilog ranges.
252   PrologEpilogTracker ProEpilogTracker;
253 
254   // Infer missing frames due to compiler optimizations such as tail call
255   // elimination.
256   std::unique_ptr<MissingFrameInferrer> MissingContextInferrer;
257 
258   // Track function sizes under different context
259   BinarySizeContextTracker FuncSizeTracker;
260 
261   // The symbolizer used to get inline context for an instruction.
262   std::unique_ptr<symbolize::LLVMSymbolizer> Symbolizer;
263 
264   // String table owning function name strings created from the symbolizer.
265   std::unordered_set<std::string> NameStrings;
266 
267   // A collection of functions to print disassembly for.
268   StringSet<> DisassembleFunctionSet;
269 
270   // Pseudo probe decoder
271   MCPseudoProbeDecoder ProbeDecoder;
272 
273   // Function name to probe frame map for top-level outlined functions.
274   StringMap<MCDecodedPseudoProbeInlineTree *> TopLevelProbeFrameMap;
275 
276   bool UsePseudoProbes = false;
277 
278   bool UseFSDiscriminator = false;
279 
280   // Whether we need to symbolize all instructions to get function context size.
281   bool TrackFuncContextSize = false;
282 
283   // Indicate if the base loading address is parsed from the mmap event or uses
284   // the preferred address
285   bool IsLoadedByMMap = false;
286   // Use to avoid redundant warning.
287   bool MissingMMapWarned = false;
288 
289   void setPreferredTextSegmentAddresses(const ELFObjectFileBase *O);
290 
291   template <class ELFT>
292   void setPreferredTextSegmentAddresses(const ELFFile<ELFT> &Obj,
293                                         StringRef FileName);
294 
295   void checkPseudoProbe(const ELFObjectFileBase *Obj);
296 
297   void decodePseudoProbe(const ELFObjectFileBase *Obj);
298 
299   void
300   checkUseFSDiscriminator(const ELFObjectFileBase *Obj,
301                           std::map<SectionRef, SectionSymbolsTy> &AllSymbols);
302 
303   // Set up disassembler and related components.
304   void setUpDisassembler(const ELFObjectFileBase *Obj);
305   void setupSymbolizer();
306 
307   // Load debug info of subprograms from DWARF section.
308   void loadSymbolsFromDWARF(ObjectFile &Obj);
309 
310   // Load debug info from DWARF unit.
311   void loadSymbolsFromDWARFUnit(DWARFUnit &CompilationUnit);
312 
313   // Create elf symbol to its start address mapping.
314   void populateElfSymbolAddressList(const ELFObjectFileBase *O);
315 
316   // A function may be spilt into multiple non-continuous address ranges. We use
317   // this to set whether start a function range is the real entry of the
318   // function and also set false to the non-function label.
319   void setIsFuncEntry(FuncRange *FRange, StringRef RangeSymName);
320 
321   // Warn if no entry range exists in the function.
322   void warnNoFuncEntry();
323 
324   /// Dissassemble the text section and build various address maps.
325   void disassemble(const ELFObjectFileBase *O);
326 
327   /// Helper function to dissassemble the symbol and extract info for unwinding
328   bool dissassembleSymbol(std::size_t SI, ArrayRef<uint8_t> Bytes,
329                           SectionSymbolsTy &Symbols, const SectionRef &Section);
330   /// Symbolize a given instruction pointer and return a full call context.
331   SampleContextFrameVector symbolize(const InstructionPointer &IP,
332                                      bool UseCanonicalFnName = false,
333                                      bool UseProbeDiscriminator = false);
334   /// Decode the interesting parts of the binary and build internal data
335   /// structures. On high level, the parts of interest are:
336   ///   1. Text sections, including the main code section and the PLT
337   ///   entries that will be used to handle cross-module call transitions.
338   ///   2. The .debug_line section, used by Dwarf-based profile generation.
339   ///   3. Pseudo probe related sections, used by probe-based profile
340   ///   generation.
341   void load();
342 
343 public:
344   ProfiledBinary(const StringRef ExeBinPath, const StringRef DebugBinPath);
345   ~ProfiledBinary();
346 
347   void decodePseudoProbe();
348 
getPath()349   StringRef getPath() const { return Path; }
getName()350   StringRef getName() const { return llvm::sys::path::filename(Path); }
getBaseAddress()351   uint64_t getBaseAddress() const { return BaseAddress; }
setBaseAddress(uint64_t Address)352   void setBaseAddress(uint64_t Address) { BaseAddress = Address; }
353 
354   // Canonicalize to use preferred load address as base address.
canonicalizeVirtualAddress(uint64_t Address)355   uint64_t canonicalizeVirtualAddress(uint64_t Address) {
356     return Address - BaseAddress + getPreferredBaseAddress();
357   }
358   // Return the preferred load address for the first executable segment.
getPreferredBaseAddress()359   uint64_t getPreferredBaseAddress() const {
360     return PreferredTextSegmentAddresses[0];
361   }
362   // Return the preferred load address for the first loadable segment.
getFirstLoadableAddress()363   uint64_t getFirstLoadableAddress() const { return FirstLoadableAddress; }
364   // Return the file offset for the first executable segment.
getTextSegmentOffset()365   uint64_t getTextSegmentOffset() const { return TextSegmentOffsets[0]; }
getPreferredTextSegmentAddresses()366   const std::vector<uint64_t> &getPreferredTextSegmentAddresses() const {
367     return PreferredTextSegmentAddresses;
368   }
getTextSegmentOffsets()369   const std::vector<uint64_t> &getTextSegmentOffsets() const {
370     return TextSegmentOffsets;
371   }
372 
getInstSize(uint64_t Address)373   uint64_t getInstSize(uint64_t Address) const {
374     auto I = AddressToInstSizeMap.find(Address);
375     if (I == AddressToInstSizeMap.end())
376       return 0;
377     return I->second;
378   }
379 
addressIsCode(uint64_t Address)380   bool addressIsCode(uint64_t Address) const {
381     return AddressToInstSizeMap.find(Address) != AddressToInstSizeMap.end();
382   }
383 
addressIsCall(uint64_t Address)384   bool addressIsCall(uint64_t Address) const {
385     return CallAddressSet.count(Address);
386   }
addressIsReturn(uint64_t Address)387   bool addressIsReturn(uint64_t Address) const {
388     return RetAddressSet.count(Address);
389   }
addressInPrologEpilog(uint64_t Address)390   bool addressInPrologEpilog(uint64_t Address) const {
391     return ProEpilogTracker.PrologEpilogSet.count(Address);
392   }
393 
addressIsTransfer(uint64_t Address)394   bool addressIsTransfer(uint64_t Address) {
395     return BranchAddressSet.count(Address) || RetAddressSet.count(Address) ||
396            CallAddressSet.count(Address);
397   }
398 
rangeCrossUncondBranch(uint64_t Start,uint64_t End)399   bool rangeCrossUncondBranch(uint64_t Start, uint64_t End) {
400     if (Start >= End)
401       return false;
402     auto R = UncondBranchAddrSet.lower_bound(Start);
403     return R != UncondBranchAddrSet.end() && *R < End;
404   }
405 
getAddressforIndex(uint64_t Index)406   uint64_t getAddressforIndex(uint64_t Index) const {
407     return CodeAddressVec[Index];
408   }
409 
getCodeAddrVecSize()410   size_t getCodeAddrVecSize() const { return CodeAddressVec.size(); }
411 
usePseudoProbes()412   bool usePseudoProbes() const { return UsePseudoProbes; }
useFSDiscriminator()413   bool useFSDiscriminator() const { return UseFSDiscriminator; }
414   // Get the index in CodeAddressVec for the address
415   // As we might get an address which is not the code
416   // here it would round to the next valid code address by
417   // using lower bound operation
getIndexForAddr(uint64_t Address)418   uint32_t getIndexForAddr(uint64_t Address) const {
419     auto Low = llvm::lower_bound(CodeAddressVec, Address);
420     return Low - CodeAddressVec.begin();
421   }
422 
getCallAddrFromFrameAddr(uint64_t FrameAddr)423   uint64_t getCallAddrFromFrameAddr(uint64_t FrameAddr) const {
424     if (FrameAddr == ExternalAddr)
425       return ExternalAddr;
426     auto I = getIndexForAddr(FrameAddr);
427     FrameAddr = I ? getAddressforIndex(I - 1) : 0;
428     if (FrameAddr && addressIsCall(FrameAddr))
429       return FrameAddr;
430     return 0;
431   }
432 
findFuncRangeForStartAddr(uint64_t Address)433   FuncRange *findFuncRangeForStartAddr(uint64_t Address) {
434     auto I = StartAddrToFuncRangeMap.find(Address);
435     if (I == StartAddrToFuncRangeMap.end())
436       return nullptr;
437     return &I->second;
438   }
439 
440   // Binary search the function range which includes the input address.
findFuncRange(uint64_t Address)441   FuncRange *findFuncRange(uint64_t Address) {
442     auto I = StartAddrToFuncRangeMap.upper_bound(Address);
443     if (I == StartAddrToFuncRangeMap.begin())
444       return nullptr;
445     I--;
446 
447     if (Address >= I->second.EndAddress)
448       return nullptr;
449 
450     return &I->second;
451   }
452 
453   // Get all ranges of one function.
getRanges(uint64_t Address)454   RangesTy getRanges(uint64_t Address) {
455     auto *FRange = findFuncRange(Address);
456     // Ignore the range which falls into plt section or system lib.
457     if (!FRange)
458       return RangesTy();
459 
460     return FRange->Func->Ranges;
461   }
462 
463   const std::unordered_map<std::string, BinaryFunction> &
getAllBinaryFunctions()464   getAllBinaryFunctions() {
465     return BinaryFunctions;
466   }
467 
getProfiledFunctions()468   std::unordered_set<const BinaryFunction *> &getProfiledFunctions() {
469     return ProfiledFunctions;
470   }
471 
setProfiledFunctions(std::unordered_set<const BinaryFunction * > & Funcs)472   void setProfiledFunctions(std::unordered_set<const BinaryFunction *> &Funcs) {
473     ProfiledFunctions = Funcs;
474   }
475 
getBinaryFunction(StringRef FName)476   BinaryFunction *getBinaryFunction(StringRef FName) {
477     auto I = BinaryFunctions.find(FName.str());
478     if (I == BinaryFunctions.end())
479       return nullptr;
480     return &I->second;
481   }
482 
getFuncSizeForContext(const ContextTrieNode * ContextNode)483   uint32_t getFuncSizeForContext(const ContextTrieNode *ContextNode) {
484     return FuncSizeTracker.getFuncSizeForContext(ContextNode);
485   }
486 
487   void inferMissingFrames(const SmallVectorImpl<uint64_t> &Context,
488                           SmallVectorImpl<uint64_t> &NewContext);
489 
490   // Load the symbols from debug table and populate into symbol list.
491   void populateSymbolListFromDWARF(ProfileSymbolList &SymbolList);
492 
493   SampleContextFrameVector
494   getFrameLocationStack(uint64_t Address, bool UseProbeDiscriminator = false) {
495     InstructionPointer IP(this, Address);
496     return symbolize(IP, true, UseProbeDiscriminator);
497   }
498 
499   const SampleContextFrameVector &
500   getCachedFrameLocationStack(uint64_t Address,
501                               bool UseProbeDiscriminator = false) {
502     auto I = AddressToLocStackMap.emplace(Address, SampleContextFrameVector());
503     if (I.second) {
504       I.first->second = getFrameLocationStack(Address, UseProbeDiscriminator);
505     }
506     return I.first->second;
507   }
508 
getInlineLeafFrameLoc(uint64_t Address)509   std::optional<SampleContextFrame> getInlineLeafFrameLoc(uint64_t Address) {
510     const auto &Stack = getCachedFrameLocationStack(Address);
511     if (Stack.empty())
512       return {};
513     return Stack.back();
514   }
515 
flushSymbolizer()516   void flushSymbolizer() { Symbolizer.reset(); }
517 
getMissingContextInferrer()518   MissingFrameInferrer* getMissingContextInferrer() {
519     return MissingContextInferrer.get();
520   }
521 
522   // Compare two addresses' inline context
523   bool inlineContextEqual(uint64_t Add1, uint64_t Add2);
524 
525   // Get the full context of the current stack with inline context filled in.
526   // It will search the disassembling info stored in AddressToLocStackMap. This
527   // is used as the key of function sample map
528   SampleContextFrameVector
529   getExpandedContext(const SmallVectorImpl<uint64_t> &Stack,
530                      bool &WasLeafInlined);
531   // Go through instructions among the given range and record its size for the
532   // inline context.
533   void computeInlinedContextSizeForRange(uint64_t StartAddress,
534                                          uint64_t EndAddress);
535 
536   void computeInlinedContextSizeForFunc(const BinaryFunction *Func);
537 
getCallProbeForAddr(uint64_t Address)538   const MCDecodedPseudoProbe *getCallProbeForAddr(uint64_t Address) const {
539     return ProbeDecoder.getCallProbeForAddr(Address);
540   }
541 
542   void getInlineContextForProbe(const MCDecodedPseudoProbe *Probe,
543                                 SampleContextFrameVector &InlineContextStack,
544                                 bool IncludeLeaf = false) const {
545     SmallVector<MCPseduoProbeFrameLocation, 16> ProbeInlineContext;
546     ProbeDecoder.getInlineContextForProbe(Probe, ProbeInlineContext,
547                                           IncludeLeaf);
548     for (uint32_t I = 0; I < ProbeInlineContext.size(); I++) {
549       auto &Callsite = ProbeInlineContext[I];
550       // Clear the current context for an unknown probe.
551       if (Callsite.second == 0 && I != ProbeInlineContext.size() - 1) {
552         InlineContextStack.clear();
553         continue;
554       }
555       InlineContextStack.emplace_back(Callsite.first,
556                                       LineLocation(Callsite.second, 0));
557     }
558   }
getAddress2ProbesMap()559   const AddressProbesMap &getAddress2ProbesMap() const {
560     return ProbeDecoder.getAddress2ProbesMap();
561   }
getFuncDescForGUID(uint64_t GUID)562   const MCPseudoProbeFuncDesc *getFuncDescForGUID(uint64_t GUID) {
563     return ProbeDecoder.getFuncDescForGUID(GUID);
564   }
565 
566   const MCPseudoProbeFuncDesc *
getInlinerDescForProbe(const MCDecodedPseudoProbe * Probe)567   getInlinerDescForProbe(const MCDecodedPseudoProbe *Probe) {
568     return ProbeDecoder.getInlinerDescForProbe(Probe);
569   }
570 
getTrackFuncContextSize()571   bool getTrackFuncContextSize() { return TrackFuncContextSize; }
572 
getIsLoadedByMMap()573   bool getIsLoadedByMMap() { return IsLoadedByMMap; }
574 
setIsLoadedByMMap(bool Value)575   void setIsLoadedByMMap(bool Value) { IsLoadedByMMap = Value; }
576 
getMissingMMapWarned()577   bool getMissingMMapWarned() { return MissingMMapWarned; }
578 
setMissingMMapWarned(bool Value)579   void setMissingMMapWarned(bool Value) { MissingMMapWarned = Value; }
580 };
581 
582 } // end namespace sampleprof
583 } // end namespace llvm
584 
585 #endif
586