1 //===-- InstrProfiling.cpp - Frontend instrumentation based profiling -----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This pass lowers instrprof_* intrinsics emitted by a frontend for profiling.
10 // It also builds the data structures and initialization code needed for
11 // updating execution counts and emitting the profile at runtime.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "llvm/Transforms/Instrumentation/InstrProfiling.h"
16 #include "llvm/ADT/ArrayRef.h"
17 #include "llvm/ADT/SmallVector.h"
18 #include "llvm/ADT/StringRef.h"
19 #include "llvm/ADT/Triple.h"
20 #include "llvm/ADT/Twine.h"
21 #include "llvm/Analysis/BlockFrequencyInfo.h"
22 #include "llvm/Analysis/BranchProbabilityInfo.h"
23 #include "llvm/Analysis/LoopInfo.h"
24 #include "llvm/Analysis/TargetLibraryInfo.h"
25 #include "llvm/IR/Attributes.h"
26 #include "llvm/IR/BasicBlock.h"
27 #include "llvm/IR/Constant.h"
28 #include "llvm/IR/Constants.h"
29 #include "llvm/IR/DerivedTypes.h"
30 #include "llvm/IR/Dominators.h"
31 #include "llvm/IR/Function.h"
32 #include "llvm/IR/GlobalValue.h"
33 #include "llvm/IR/GlobalVariable.h"
34 #include "llvm/IR/IRBuilder.h"
35 #include "llvm/IR/Instruction.h"
36 #include "llvm/IR/Instructions.h"
37 #include "llvm/IR/IntrinsicInst.h"
38 #include "llvm/IR/Module.h"
39 #include "llvm/IR/Type.h"
40 #include "llvm/InitializePasses.h"
41 #include "llvm/Pass.h"
42 #include "llvm/ProfileData/InstrProf.h"
43 #include "llvm/Support/Casting.h"
44 #include "llvm/Support/CommandLine.h"
45 #include "llvm/Support/Error.h"
46 #include "llvm/Support/ErrorHandling.h"
47 #include "llvm/Transforms/Utils/BasicBlockUtils.h"
48 #include "llvm/Transforms/Utils/ModuleUtils.h"
49 #include "llvm/Transforms/Utils/SSAUpdater.h"
50 #include <algorithm>
51 #include <cassert>
52 #include <cstddef>
53 #include <cstdint>
54 #include <string>
55 
56 using namespace llvm;
57 
58 #define DEBUG_TYPE "instrprof"
59 
60 // The start and end values of precise value profile range for memory
61 // intrinsic sizes
62 cl::opt<std::string> MemOPSizeRange(
63     "memop-size-range",
64     cl::desc("Set the range of size in memory intrinsic calls to be profiled "
65              "precisely, in a format of <start_val>:<end_val>"),
66     cl::init(""));
67 
68 // The value that considered to be large value in  memory intrinsic.
69 cl::opt<unsigned> MemOPSizeLarge(
70     "memop-size-large",
71     cl::desc("Set large value thresthold in memory intrinsic size profiling. "
72              "Value of 0 disables the large value profiling."),
73     cl::init(8192));
74 
75 namespace {
76 
77 cl::opt<bool> DoNameCompression("enable-name-compression",
78                                 cl::desc("Enable name string compression"),
79                                 cl::init(true));
80 
81 cl::opt<bool> DoHashBasedCounterSplit(
82     "hash-based-counter-split",
83     cl::desc("Rename counter variable of a comdat function based on cfg hash"),
84     cl::init(true));
85 
86 cl::opt<bool> ValueProfileStaticAlloc(
87     "vp-static-alloc",
88     cl::desc("Do static counter allocation for value profiler"),
89     cl::init(true));
90 
91 cl::opt<double> NumCountersPerValueSite(
92     "vp-counters-per-site",
93     cl::desc("The average number of profile counters allocated "
94              "per value profiling site."),
95     // This is set to a very small value because in real programs, only
96     // a very small percentage of value sites have non-zero targets, e.g, 1/30.
97     // For those sites with non-zero profile, the average number of targets
98     // is usually smaller than 2.
99     cl::init(1.0));
100 
101 cl::opt<bool> AtomicCounterUpdateAll(
102     "instrprof-atomic-counter-update-all", cl::ZeroOrMore,
103     cl::desc("Make all profile counter updates atomic (for testing only)"),
104     cl::init(false));
105 
106 cl::opt<bool> AtomicCounterUpdatePromoted(
107     "atomic-counter-update-promoted", cl::ZeroOrMore,
108     cl::desc("Do counter update using atomic fetch add "
109              " for promoted counters only"),
110     cl::init(false));
111 
112 // If the option is not specified, the default behavior about whether
113 // counter promotion is done depends on how instrumentaiton lowering
114 // pipeline is setup, i.e., the default value of true of this option
115 // does not mean the promotion will be done by default. Explicitly
116 // setting this option can override the default behavior.
117 cl::opt<bool> DoCounterPromotion("do-counter-promotion", cl::ZeroOrMore,
118                                  cl::desc("Do counter register promotion"),
119                                  cl::init(false));
120 cl::opt<unsigned> MaxNumOfPromotionsPerLoop(
121     cl::ZeroOrMore, "max-counter-promotions-per-loop", cl::init(20),
122     cl::desc("Max number counter promotions per loop to avoid"
123              " increasing register pressure too much"));
124 
125 // A debug option
126 cl::opt<int>
127     MaxNumOfPromotions(cl::ZeroOrMore, "max-counter-promotions", cl::init(-1),
128                        cl::desc("Max number of allowed counter promotions"));
129 
130 cl::opt<unsigned> SpeculativeCounterPromotionMaxExiting(
131     cl::ZeroOrMore, "speculative-counter-promotion-max-exiting", cl::init(3),
132     cl::desc("The max number of exiting blocks of a loop to allow "
133              " speculative counter promotion"));
134 
135 cl::opt<bool> SpeculativeCounterPromotionToLoop(
136     cl::ZeroOrMore, "speculative-counter-promotion-to-loop", cl::init(false),
137     cl::desc("When the option is false, if the target block is in a loop, "
138              "the promotion will be disallowed unless the promoted counter "
139              " update can be further/iteratively promoted into an acyclic "
140              " region."));
141 
142 cl::opt<bool> IterativeCounterPromotion(
143     cl::ZeroOrMore, "iterative-counter-promotion", cl::init(true),
144     cl::desc("Allow counter promotion across the whole loop nest."));
145 
146 class InstrProfilingLegacyPass : public ModulePass {
147   InstrProfiling InstrProf;
148 
149 public:
150   static char ID;
151 
152   InstrProfilingLegacyPass() : ModulePass(ID) {}
153   InstrProfilingLegacyPass(const InstrProfOptions &Options, bool IsCS = false)
154       : ModulePass(ID), InstrProf(Options, IsCS) {}
155 
156   StringRef getPassName() const override {
157     return "Frontend instrumentation-based coverage lowering";
158   }
159 
160   bool runOnModule(Module &M) override {
161     auto GetTLI = [this](Function &F) -> TargetLibraryInfo & {
162       return this->getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F);
163     };
164     return InstrProf.run(M, GetTLI);
165   }
166 
167   void getAnalysisUsage(AnalysisUsage &AU) const override {
168     AU.setPreservesCFG();
169     AU.addRequired<TargetLibraryInfoWrapperPass>();
170   }
171 };
172 
173 ///
174 /// A helper class to promote one counter RMW operation in the loop
175 /// into register update.
176 ///
177 /// RWM update for the counter will be sinked out of the loop after
178 /// the transformation.
179 ///
180 class PGOCounterPromoterHelper : public LoadAndStorePromoter {
181 public:
182   PGOCounterPromoterHelper(
183       Instruction *L, Instruction *S, SSAUpdater &SSA, Value *Init,
184       BasicBlock *PH, ArrayRef<BasicBlock *> ExitBlocks,
185       ArrayRef<Instruction *> InsertPts,
186       DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands,
187       LoopInfo &LI)
188       : LoadAndStorePromoter({L, S}, SSA), Store(S), ExitBlocks(ExitBlocks),
189         InsertPts(InsertPts), LoopToCandidates(LoopToCands), LI(LI) {
190     assert(isa<LoadInst>(L));
191     assert(isa<StoreInst>(S));
192     SSA.AddAvailableValue(PH, Init);
193   }
194 
195   void doExtraRewritesBeforeFinalDeletion() override {
196     for (unsigned i = 0, e = ExitBlocks.size(); i != e; ++i) {
197       BasicBlock *ExitBlock = ExitBlocks[i];
198       Instruction *InsertPos = InsertPts[i];
199       // Get LiveIn value into the ExitBlock. If there are multiple
200       // predecessors, the value is defined by a PHI node in this
201       // block.
202       Value *LiveInValue = SSA.GetValueInMiddleOfBlock(ExitBlock);
203       Value *Addr = cast<StoreInst>(Store)->getPointerOperand();
204       Type *Ty = LiveInValue->getType();
205       IRBuilder<> Builder(InsertPos);
206       if (AtomicCounterUpdatePromoted)
207         // automic update currently can only be promoted across the current
208         // loop, not the whole loop nest.
209         Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, LiveInValue,
210                                 AtomicOrdering::SequentiallyConsistent);
211       else {
212         LoadInst *OldVal = Builder.CreateLoad(Ty, Addr, "pgocount.promoted");
213         auto *NewVal = Builder.CreateAdd(OldVal, LiveInValue);
214         auto *NewStore = Builder.CreateStore(NewVal, Addr);
215 
216         // Now update the parent loop's candidate list:
217         if (IterativeCounterPromotion) {
218           auto *TargetLoop = LI.getLoopFor(ExitBlock);
219           if (TargetLoop)
220             LoopToCandidates[TargetLoop].emplace_back(OldVal, NewStore);
221         }
222       }
223     }
224   }
225 
226 private:
227   Instruction *Store;
228   ArrayRef<BasicBlock *> ExitBlocks;
229   ArrayRef<Instruction *> InsertPts;
230   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates;
231   LoopInfo &LI;
232 };
233 
234 /// A helper class to do register promotion for all profile counter
235 /// updates in a loop.
236 ///
237 class PGOCounterPromoter {
238 public:
239   PGOCounterPromoter(
240       DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands,
241       Loop &CurLoop, LoopInfo &LI, BlockFrequencyInfo *BFI)
242       : LoopToCandidates(LoopToCands), ExitBlocks(), InsertPts(), L(CurLoop),
243         LI(LI), BFI(BFI) {
244 
245     SmallVector<BasicBlock *, 8> LoopExitBlocks;
246     SmallPtrSet<BasicBlock *, 8> BlockSet;
247     L.getExitBlocks(LoopExitBlocks);
248 
249     for (BasicBlock *ExitBlock : LoopExitBlocks) {
250       if (BlockSet.insert(ExitBlock).second) {
251         ExitBlocks.push_back(ExitBlock);
252         InsertPts.push_back(&*ExitBlock->getFirstInsertionPt());
253       }
254     }
255   }
256 
257   bool run(int64_t *NumPromoted) {
258     // Skip 'infinite' loops:
259     if (ExitBlocks.size() == 0)
260       return false;
261     unsigned MaxProm = getMaxNumOfPromotionsInLoop(&L);
262     if (MaxProm == 0)
263       return false;
264 
265     unsigned Promoted = 0;
266     for (auto &Cand : LoopToCandidates[&L]) {
267 
268       SmallVector<PHINode *, 4> NewPHIs;
269       SSAUpdater SSA(&NewPHIs);
270       Value *InitVal = ConstantInt::get(Cand.first->getType(), 0);
271 
272       // If BFI is set, we will use it to guide the promotions.
273       if (BFI) {
274         auto *BB = Cand.first->getParent();
275         auto InstrCount = BFI->getBlockProfileCount(BB);
276         if (!InstrCount)
277           continue;
278         auto PreheaderCount = BFI->getBlockProfileCount(L.getLoopPreheader());
279         // If the average loop trip count is not greater than 1.5, we skip
280         // promotion.
281         if (PreheaderCount &&
282             (PreheaderCount.getValue() * 3) >= (InstrCount.getValue() * 2))
283           continue;
284       }
285 
286       PGOCounterPromoterHelper Promoter(Cand.first, Cand.second, SSA, InitVal,
287                                         L.getLoopPreheader(), ExitBlocks,
288                                         InsertPts, LoopToCandidates, LI);
289       Promoter.run(SmallVector<Instruction *, 2>({Cand.first, Cand.second}));
290       Promoted++;
291       if (Promoted >= MaxProm)
292         break;
293 
294       (*NumPromoted)++;
295       if (MaxNumOfPromotions != -1 && *NumPromoted >= MaxNumOfPromotions)
296         break;
297     }
298 
299     LLVM_DEBUG(dbgs() << Promoted << " counters promoted for loop (depth="
300                       << L.getLoopDepth() << ")\n");
301     return Promoted != 0;
302   }
303 
304 private:
305   bool allowSpeculativeCounterPromotion(Loop *LP) {
306     SmallVector<BasicBlock *, 8> ExitingBlocks;
307     L.getExitingBlocks(ExitingBlocks);
308     // Not considierered speculative.
309     if (ExitingBlocks.size() == 1)
310       return true;
311     if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting)
312       return false;
313     return true;
314   }
315 
316   // Returns the max number of Counter Promotions for LP.
317   unsigned getMaxNumOfPromotionsInLoop(Loop *LP) {
318     // We can't insert into a catchswitch.
319     SmallVector<BasicBlock *, 8> LoopExitBlocks;
320     LP->getExitBlocks(LoopExitBlocks);
321     if (llvm::any_of(LoopExitBlocks, [](BasicBlock *Exit) {
322           return isa<CatchSwitchInst>(Exit->getTerminator());
323         }))
324       return 0;
325 
326     if (!LP->hasDedicatedExits())
327       return 0;
328 
329     BasicBlock *PH = LP->getLoopPreheader();
330     if (!PH)
331       return 0;
332 
333     SmallVector<BasicBlock *, 8> ExitingBlocks;
334     LP->getExitingBlocks(ExitingBlocks);
335 
336     // If BFI is set, we do more aggressive promotions based on BFI.
337     if (BFI)
338       return (unsigned)-1;
339 
340     // Not considierered speculative.
341     if (ExitingBlocks.size() == 1)
342       return MaxNumOfPromotionsPerLoop;
343 
344     if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting)
345       return 0;
346 
347     // Whether the target block is in a loop does not matter:
348     if (SpeculativeCounterPromotionToLoop)
349       return MaxNumOfPromotionsPerLoop;
350 
351     // Now check the target block:
352     unsigned MaxProm = MaxNumOfPromotionsPerLoop;
353     for (auto *TargetBlock : LoopExitBlocks) {
354       auto *TargetLoop = LI.getLoopFor(TargetBlock);
355       if (!TargetLoop)
356         continue;
357       unsigned MaxPromForTarget = getMaxNumOfPromotionsInLoop(TargetLoop);
358       unsigned PendingCandsInTarget = LoopToCandidates[TargetLoop].size();
359       MaxProm =
360           std::min(MaxProm, std::max(MaxPromForTarget, PendingCandsInTarget) -
361                                 PendingCandsInTarget);
362     }
363     return MaxProm;
364   }
365 
366   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates;
367   SmallVector<BasicBlock *, 8> ExitBlocks;
368   SmallVector<Instruction *, 8> InsertPts;
369   Loop &L;
370   LoopInfo &LI;
371   BlockFrequencyInfo *BFI;
372 };
373 
374 } // end anonymous namespace
375 
376 PreservedAnalyses InstrProfiling::run(Module &M, ModuleAnalysisManager &AM) {
377   FunctionAnalysisManager &FAM =
378       AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
379   auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
380     return FAM.getResult<TargetLibraryAnalysis>(F);
381   };
382   if (!run(M, GetTLI))
383     return PreservedAnalyses::all();
384 
385   return PreservedAnalyses::none();
386 }
387 
388 char InstrProfilingLegacyPass::ID = 0;
389 INITIALIZE_PASS_BEGIN(
390     InstrProfilingLegacyPass, "instrprof",
391     "Frontend instrumentation-based coverage lowering.", false, false)
392 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
393 INITIALIZE_PASS_END(
394     InstrProfilingLegacyPass, "instrprof",
395     "Frontend instrumentation-based coverage lowering.", false, false)
396 
397 ModulePass *
398 llvm::createInstrProfilingLegacyPass(const InstrProfOptions &Options,
399                                      bool IsCS) {
400   return new InstrProfilingLegacyPass(Options, IsCS);
401 }
402 
403 static InstrProfIncrementInst *castToIncrementInst(Instruction *Instr) {
404   InstrProfIncrementInst *Inc = dyn_cast<InstrProfIncrementInstStep>(Instr);
405   if (Inc)
406     return Inc;
407   return dyn_cast<InstrProfIncrementInst>(Instr);
408 }
409 
410 bool InstrProfiling::lowerIntrinsics(Function *F) {
411   bool MadeChange = false;
412   PromotionCandidates.clear();
413   for (BasicBlock &BB : *F) {
414     for (auto I = BB.begin(), E = BB.end(); I != E;) {
415       auto Instr = I++;
416       InstrProfIncrementInst *Inc = castToIncrementInst(&*Instr);
417       if (Inc) {
418         lowerIncrement(Inc);
419         MadeChange = true;
420       } else if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(Instr)) {
421         lowerValueProfileInst(Ind);
422         MadeChange = true;
423       }
424     }
425   }
426 
427   if (!MadeChange)
428     return false;
429 
430   promoteCounterLoadStores(F);
431   return true;
432 }
433 
434 bool InstrProfiling::isCounterPromotionEnabled() const {
435   if (DoCounterPromotion.getNumOccurrences() > 0)
436     return DoCounterPromotion;
437 
438   return Options.DoCounterPromotion;
439 }
440 
441 void InstrProfiling::promoteCounterLoadStores(Function *F) {
442   if (!isCounterPromotionEnabled())
443     return;
444 
445   DominatorTree DT(*F);
446   LoopInfo LI(DT);
447   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> LoopPromotionCandidates;
448 
449   std::unique_ptr<BlockFrequencyInfo> BFI;
450   if (Options.UseBFIInPromotion) {
451     std::unique_ptr<BranchProbabilityInfo> BPI;
452     BPI.reset(new BranchProbabilityInfo(*F, LI, &GetTLI(*F)));
453     BFI.reset(new BlockFrequencyInfo(*F, *BPI, LI));
454   }
455 
456   for (const auto &LoadStore : PromotionCandidates) {
457     auto *CounterLoad = LoadStore.first;
458     auto *CounterStore = LoadStore.second;
459     BasicBlock *BB = CounterLoad->getParent();
460     Loop *ParentLoop = LI.getLoopFor(BB);
461     if (!ParentLoop)
462       continue;
463     LoopPromotionCandidates[ParentLoop].emplace_back(CounterLoad, CounterStore);
464   }
465 
466   SmallVector<Loop *, 4> Loops = LI.getLoopsInPreorder();
467 
468   // Do a post-order traversal of the loops so that counter updates can be
469   // iteratively hoisted outside the loop nest.
470   for (auto *Loop : llvm::reverse(Loops)) {
471     PGOCounterPromoter Promoter(LoopPromotionCandidates, *Loop, LI, BFI.get());
472     Promoter.run(&TotalCountersPromoted);
473   }
474 }
475 
476 /// Check if the module contains uses of any profiling intrinsics.
477 static bool containsProfilingIntrinsics(Module &M) {
478   if (auto *F = M.getFunction(
479           Intrinsic::getName(llvm::Intrinsic::instrprof_increment)))
480     if (!F->use_empty())
481       return true;
482   if (auto *F = M.getFunction(
483           Intrinsic::getName(llvm::Intrinsic::instrprof_increment_step)))
484     if (!F->use_empty())
485       return true;
486   if (auto *F = M.getFunction(
487           Intrinsic::getName(llvm::Intrinsic::instrprof_value_profile)))
488     if (!F->use_empty())
489       return true;
490   return false;
491 }
492 
493 bool InstrProfiling::run(
494     Module &M, std::function<const TargetLibraryInfo &(Function &F)> GetTLI) {
495   this->M = &M;
496   this->GetTLI = std::move(GetTLI);
497   NamesVar = nullptr;
498   NamesSize = 0;
499   ProfileDataMap.clear();
500   UsedVars.clear();
501   getMemOPSizeRangeFromOption(MemOPSizeRange, MemOPSizeRangeStart,
502                               MemOPSizeRangeLast);
503   TT = Triple(M.getTargetTriple());
504 
505   // Emit the runtime hook even if no counters are present.
506   bool MadeChange = emitRuntimeHook();
507 
508   // Improve compile time by avoiding linear scans when there is no work.
509   GlobalVariable *CoverageNamesVar =
510       M.getNamedGlobal(getCoverageUnusedNamesVarName());
511   if (!containsProfilingIntrinsics(M) && !CoverageNamesVar)
512     return MadeChange;
513 
514   // We did not know how many value sites there would be inside
515   // the instrumented function. This is counting the number of instrumented
516   // target value sites to enter it as field in the profile data variable.
517   for (Function &F : M) {
518     InstrProfIncrementInst *FirstProfIncInst = nullptr;
519     for (BasicBlock &BB : F)
520       for (auto I = BB.begin(), E = BB.end(); I != E; I++)
521         if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(I))
522           computeNumValueSiteCounts(Ind);
523         else if (FirstProfIncInst == nullptr)
524           FirstProfIncInst = dyn_cast<InstrProfIncrementInst>(I);
525 
526     // Value profiling intrinsic lowering requires per-function profile data
527     // variable to be created first.
528     if (FirstProfIncInst != nullptr)
529       static_cast<void>(getOrCreateRegionCounters(FirstProfIncInst));
530   }
531 
532   for (Function &F : M)
533     MadeChange |= lowerIntrinsics(&F);
534 
535   if (CoverageNamesVar) {
536     lowerCoverageData(CoverageNamesVar);
537     MadeChange = true;
538   }
539 
540   if (!MadeChange)
541     return false;
542 
543   emitVNodes();
544   emitNameData();
545   emitRegistration();
546   emitUses();
547   emitInitialization();
548   return true;
549 }
550 
551 static FunctionCallee
552 getOrInsertValueProfilingCall(Module &M, const TargetLibraryInfo &TLI,
553                               bool IsRange = false) {
554   LLVMContext &Ctx = M.getContext();
555   auto *ReturnTy = Type::getVoidTy(M.getContext());
556 
557   AttributeList AL;
558   if (auto AK = TLI.getExtAttrForI32Param(false))
559     AL = AL.addParamAttribute(M.getContext(), 2, AK);
560 
561   if (!IsRange) {
562     Type *ParamTypes[] = {
563 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType
564 #include "llvm/ProfileData/InstrProfData.inc"
565     };
566     auto *ValueProfilingCallTy =
567         FunctionType::get(ReturnTy, makeArrayRef(ParamTypes), false);
568     return M.getOrInsertFunction(getInstrProfValueProfFuncName(),
569                                  ValueProfilingCallTy, AL);
570   } else {
571     Type *RangeParamTypes[] = {
572 #define VALUE_RANGE_PROF 1
573 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType
574 #include "llvm/ProfileData/InstrProfData.inc"
575 #undef VALUE_RANGE_PROF
576     };
577     auto *ValueRangeProfilingCallTy =
578         FunctionType::get(ReturnTy, makeArrayRef(RangeParamTypes), false);
579     return M.getOrInsertFunction(getInstrProfValueRangeProfFuncName(),
580                                  ValueRangeProfilingCallTy, AL);
581   }
582 }
583 
584 void InstrProfiling::computeNumValueSiteCounts(InstrProfValueProfileInst *Ind) {
585   GlobalVariable *Name = Ind->getName();
586   uint64_t ValueKind = Ind->getValueKind()->getZExtValue();
587   uint64_t Index = Ind->getIndex()->getZExtValue();
588   auto It = ProfileDataMap.find(Name);
589   if (It == ProfileDataMap.end()) {
590     PerFunctionProfileData PD;
591     PD.NumValueSites[ValueKind] = Index + 1;
592     ProfileDataMap[Name] = PD;
593   } else if (It->second.NumValueSites[ValueKind] <= Index)
594     It->second.NumValueSites[ValueKind] = Index + 1;
595 }
596 
597 void InstrProfiling::lowerValueProfileInst(InstrProfValueProfileInst *Ind) {
598   GlobalVariable *Name = Ind->getName();
599   auto It = ProfileDataMap.find(Name);
600   assert(It != ProfileDataMap.end() && It->second.DataVar &&
601          "value profiling detected in function with no counter incerement");
602 
603   GlobalVariable *DataVar = It->second.DataVar;
604   uint64_t ValueKind = Ind->getValueKind()->getZExtValue();
605   uint64_t Index = Ind->getIndex()->getZExtValue();
606   for (uint32_t Kind = IPVK_First; Kind < ValueKind; ++Kind)
607     Index += It->second.NumValueSites[Kind];
608 
609   IRBuilder<> Builder(Ind);
610   bool IsRange = (Ind->getValueKind()->getZExtValue() ==
611                   llvm::InstrProfValueKind::IPVK_MemOPSize);
612   CallInst *Call = nullptr;
613   auto *TLI = &GetTLI(*Ind->getFunction());
614   if (!IsRange) {
615     Value *Args[3] = {Ind->getTargetValue(),
616                       Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()),
617                       Builder.getInt32(Index)};
618     Call = Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI), Args);
619   } else {
620     Value *Args[6] = {
621         Ind->getTargetValue(),
622         Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()),
623         Builder.getInt32(Index),
624         Builder.getInt64(MemOPSizeRangeStart),
625         Builder.getInt64(MemOPSizeRangeLast),
626         Builder.getInt64(MemOPSizeLarge == 0 ? INT64_MIN : MemOPSizeLarge)};
627     Call =
628         Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI, true), Args);
629   }
630   if (auto AK = TLI->getExtAttrForI32Param(false))
631     Call->addParamAttr(2, AK);
632   Ind->replaceAllUsesWith(Call);
633   Ind->eraseFromParent();
634 }
635 
636 void InstrProfiling::lowerIncrement(InstrProfIncrementInst *Inc) {
637   GlobalVariable *Counters = getOrCreateRegionCounters(Inc);
638 
639   IRBuilder<> Builder(Inc);
640   uint64_t Index = Inc->getIndex()->getZExtValue();
641   Value *Addr = Builder.CreateConstInBoundsGEP2_64(Counters->getValueType(),
642                                                    Counters, 0, Index);
643 
644   if (Options.Atomic || AtomicCounterUpdateAll) {
645     Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, Inc->getStep(),
646                             AtomicOrdering::Monotonic);
647   } else {
648     Value *IncStep = Inc->getStep();
649     Value *Load = Builder.CreateLoad(IncStep->getType(), Addr, "pgocount");
650     auto *Count = Builder.CreateAdd(Load, Inc->getStep());
651     auto *Store = Builder.CreateStore(Count, Addr);
652     if (isCounterPromotionEnabled())
653       PromotionCandidates.emplace_back(cast<Instruction>(Load), Store);
654   }
655   Inc->eraseFromParent();
656 }
657 
658 void InstrProfiling::lowerCoverageData(GlobalVariable *CoverageNamesVar) {
659   ConstantArray *Names =
660       cast<ConstantArray>(CoverageNamesVar->getInitializer());
661   for (unsigned I = 0, E = Names->getNumOperands(); I < E; ++I) {
662     Constant *NC = Names->getOperand(I);
663     Value *V = NC->stripPointerCasts();
664     assert(isa<GlobalVariable>(V) && "Missing reference to function name");
665     GlobalVariable *Name = cast<GlobalVariable>(V);
666 
667     Name->setLinkage(GlobalValue::PrivateLinkage);
668     ReferencedNames.push_back(Name);
669     NC->dropAllReferences();
670   }
671   CoverageNamesVar->eraseFromParent();
672 }
673 
674 /// Get the name of a profiling variable for a particular function.
675 static std::string getVarName(InstrProfIncrementInst *Inc, StringRef Prefix) {
676   StringRef NamePrefix = getInstrProfNameVarPrefix();
677   StringRef Name = Inc->getName()->getName().substr(NamePrefix.size());
678   Function *F = Inc->getParent()->getParent();
679   Module *M = F->getParent();
680   if (!DoHashBasedCounterSplit || !isIRPGOFlagSet(M) ||
681       !canRenameComdatFunc(*F))
682     return (Prefix + Name).str();
683   uint64_t FuncHash = Inc->getHash()->getZExtValue();
684   SmallVector<char, 24> HashPostfix;
685   if (Name.endswith((Twine(".") + Twine(FuncHash)).toStringRef(HashPostfix)))
686     return (Prefix + Name).str();
687   return (Prefix + Name + "." + Twine(FuncHash)).str();
688 }
689 
690 static inline bool shouldRecordFunctionAddr(Function *F) {
691   // Check the linkage
692   bool HasAvailableExternallyLinkage = F->hasAvailableExternallyLinkage();
693   if (!F->hasLinkOnceLinkage() && !F->hasLocalLinkage() &&
694       !HasAvailableExternallyLinkage)
695     return true;
696 
697   // A function marked 'alwaysinline' with available_externally linkage can't
698   // have its address taken. Doing so would create an undefined external ref to
699   // the function, which would fail to link.
700   if (HasAvailableExternallyLinkage &&
701       F->hasFnAttribute(Attribute::AlwaysInline))
702     return false;
703 
704   // Prohibit function address recording if the function is both internal and
705   // COMDAT. This avoids the profile data variable referencing internal symbols
706   // in COMDAT.
707   if (F->hasLocalLinkage() && F->hasComdat())
708     return false;
709 
710   // Check uses of this function for other than direct calls or invokes to it.
711   // Inline virtual functions have linkeOnceODR linkage. When a key method
712   // exists, the vtable will only be emitted in the TU where the key method
713   // is defined. In a TU where vtable is not available, the function won't
714   // be 'addresstaken'. If its address is not recorded here, the profile data
715   // with missing address may be picked by the linker leading  to missing
716   // indirect call target info.
717   return F->hasAddressTaken() || F->hasLinkOnceLinkage();
718 }
719 
720 static bool needsRuntimeRegistrationOfSectionRange(const Triple &TT) {
721   // Don't do this for Darwin.  compiler-rt uses linker magic.
722   if (TT.isOSDarwin())
723     return false;
724   // Use linker script magic to get data/cnts/name start/end.
725   if (TT.isOSLinux() || TT.isOSFreeBSD() || TT.isOSNetBSD() ||
726       TT.isOSSolaris() || TT.isOSFuchsia() || TT.isPS4CPU() ||
727       TT.isOSWindows())
728     return false;
729 
730   return true;
731 }
732 
733 GlobalVariable *
734 InstrProfiling::getOrCreateRegionCounters(InstrProfIncrementInst *Inc) {
735   GlobalVariable *NamePtr = Inc->getName();
736   auto It = ProfileDataMap.find(NamePtr);
737   PerFunctionProfileData PD;
738   if (It != ProfileDataMap.end()) {
739     if (It->second.RegionCounters)
740       return It->second.RegionCounters;
741     PD = It->second;
742   }
743 
744   // Match the linkage and visibility of the name global. COFF supports using
745   // comdats with internal symbols, so do that if we can.
746   Function *Fn = Inc->getParent()->getParent();
747   GlobalValue::LinkageTypes Linkage = NamePtr->getLinkage();
748   GlobalValue::VisibilityTypes Visibility = NamePtr->getVisibility();
749   if (TT.isOSBinFormatCOFF()) {
750     Linkage = GlobalValue::InternalLinkage;
751     Visibility = GlobalValue::DefaultVisibility;
752   }
753 
754   // Move the name variable to the right section. Place them in a COMDAT group
755   // if the associated function is a COMDAT. This will make sure that only one
756   // copy of counters of the COMDAT function will be emitted after linking. Keep
757   // in mind that this pass may run before the inliner, so we need to create a
758   // new comdat group for the counters and profiling data. If we use the comdat
759   // of the parent function, that will result in relocations against discarded
760   // sections.
761   bool NeedComdat = needsComdatForCounter(*Fn, *M);
762   if (NeedComdat) {
763     if (TT.isOSBinFormatCOFF()) {
764       // For COFF, put the counters, data, and values each into their own
765       // comdats. We can't use a group because the Visual C++ linker will
766       // report duplicate symbol errors if there are multiple external symbols
767       // with the same name marked IMAGE_COMDAT_SELECT_ASSOCIATIVE.
768       Linkage = GlobalValue::LinkOnceODRLinkage;
769       Visibility = GlobalValue::HiddenVisibility;
770     }
771   }
772   auto MaybeSetComdat = [=](GlobalVariable *GV) {
773     if (NeedComdat)
774       GV->setComdat(M->getOrInsertComdat(GV->getName()));
775   };
776 
777   uint64_t NumCounters = Inc->getNumCounters()->getZExtValue();
778   LLVMContext &Ctx = M->getContext();
779   ArrayType *CounterTy = ArrayType::get(Type::getInt64Ty(Ctx), NumCounters);
780 
781   // Create the counters variable.
782   auto *CounterPtr =
783       new GlobalVariable(*M, CounterTy, false, Linkage,
784                          Constant::getNullValue(CounterTy),
785                          getVarName(Inc, getInstrProfCountersVarPrefix()));
786   CounterPtr->setVisibility(Visibility);
787   CounterPtr->setSection(
788       getInstrProfSectionName(IPSK_cnts, TT.getObjectFormat()));
789   CounterPtr->setAlignment(Align(8));
790   MaybeSetComdat(CounterPtr);
791   CounterPtr->setLinkage(Linkage);
792 
793   auto *Int8PtrTy = Type::getInt8PtrTy(Ctx);
794   // Allocate statically the array of pointers to value profile nodes for
795   // the current function.
796   Constant *ValuesPtrExpr = ConstantPointerNull::get(Int8PtrTy);
797   if (ValueProfileStaticAlloc && !needsRuntimeRegistrationOfSectionRange(TT)) {
798     uint64_t NS = 0;
799     for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
800       NS += PD.NumValueSites[Kind];
801     if (NS) {
802       ArrayType *ValuesTy = ArrayType::get(Type::getInt64Ty(Ctx), NS);
803 
804       auto *ValuesVar =
805           new GlobalVariable(*M, ValuesTy, false, Linkage,
806                              Constant::getNullValue(ValuesTy),
807                              getVarName(Inc, getInstrProfValuesVarPrefix()));
808       ValuesVar->setVisibility(Visibility);
809       ValuesVar->setSection(
810           getInstrProfSectionName(IPSK_vals, TT.getObjectFormat()));
811       ValuesVar->setAlignment(Align(8));
812       MaybeSetComdat(ValuesVar);
813       ValuesPtrExpr =
814           ConstantExpr::getBitCast(ValuesVar, Type::getInt8PtrTy(Ctx));
815     }
816   }
817 
818   // Create data variable.
819   auto *Int16Ty = Type::getInt16Ty(Ctx);
820   auto *Int16ArrayTy = ArrayType::get(Int16Ty, IPVK_Last + 1);
821   Type *DataTypes[] = {
822 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) LLVMType,
823 #include "llvm/ProfileData/InstrProfData.inc"
824   };
825   auto *DataTy = StructType::get(Ctx, makeArrayRef(DataTypes));
826 
827   Constant *FunctionAddr = shouldRecordFunctionAddr(Fn)
828                                ? ConstantExpr::getBitCast(Fn, Int8PtrTy)
829                                : ConstantPointerNull::get(Int8PtrTy);
830 
831   Constant *Int16ArrayVals[IPVK_Last + 1];
832   for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
833     Int16ArrayVals[Kind] = ConstantInt::get(Int16Ty, PD.NumValueSites[Kind]);
834 
835   Constant *DataVals[] = {
836 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) Init,
837 #include "llvm/ProfileData/InstrProfData.inc"
838   };
839   auto *Data = new GlobalVariable(*M, DataTy, false, Linkage,
840                                   ConstantStruct::get(DataTy, DataVals),
841                                   getVarName(Inc, getInstrProfDataVarPrefix()));
842   Data->setVisibility(Visibility);
843   Data->setSection(getInstrProfSectionName(IPSK_data, TT.getObjectFormat()));
844   Data->setAlignment(Align(INSTR_PROF_DATA_ALIGNMENT));
845   MaybeSetComdat(Data);
846   Data->setLinkage(Linkage);
847 
848   PD.RegionCounters = CounterPtr;
849   PD.DataVar = Data;
850   ProfileDataMap[NamePtr] = PD;
851 
852   // Mark the data variable as used so that it isn't stripped out.
853   UsedVars.push_back(Data);
854   // Now that the linkage set by the FE has been passed to the data and counter
855   // variables, reset Name variable's linkage and visibility to private so that
856   // it can be removed later by the compiler.
857   NamePtr->setLinkage(GlobalValue::PrivateLinkage);
858   // Collect the referenced names to be used by emitNameData.
859   ReferencedNames.push_back(NamePtr);
860 
861   return CounterPtr;
862 }
863 
864 void InstrProfiling::emitVNodes() {
865   if (!ValueProfileStaticAlloc)
866     return;
867 
868   // For now only support this on platforms that do
869   // not require runtime registration to discover
870   // named section start/end.
871   if (needsRuntimeRegistrationOfSectionRange(TT))
872     return;
873 
874   size_t TotalNS = 0;
875   for (auto &PD : ProfileDataMap) {
876     for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
877       TotalNS += PD.second.NumValueSites[Kind];
878   }
879 
880   if (!TotalNS)
881     return;
882 
883   uint64_t NumCounters = TotalNS * NumCountersPerValueSite;
884 // Heuristic for small programs with very few total value sites.
885 // The default value of vp-counters-per-site is chosen based on
886 // the observation that large apps usually have a low percentage
887 // of value sites that actually have any profile data, and thus
888 // the average number of counters per site is low. For small
889 // apps with very few sites, this may not be true. Bump up the
890 // number of counters in this case.
891 #define INSTR_PROF_MIN_VAL_COUNTS 10
892   if (NumCounters < INSTR_PROF_MIN_VAL_COUNTS)
893     NumCounters = std::max(INSTR_PROF_MIN_VAL_COUNTS, (int)NumCounters * 2);
894 
895   auto &Ctx = M->getContext();
896   Type *VNodeTypes[] = {
897 #define INSTR_PROF_VALUE_NODE(Type, LLVMType, Name, Init) LLVMType,
898 #include "llvm/ProfileData/InstrProfData.inc"
899   };
900   auto *VNodeTy = StructType::get(Ctx, makeArrayRef(VNodeTypes));
901 
902   ArrayType *VNodesTy = ArrayType::get(VNodeTy, NumCounters);
903   auto *VNodesVar = new GlobalVariable(
904       *M, VNodesTy, false, GlobalValue::PrivateLinkage,
905       Constant::getNullValue(VNodesTy), getInstrProfVNodesVarName());
906   VNodesVar->setSection(
907       getInstrProfSectionName(IPSK_vnodes, TT.getObjectFormat()));
908   UsedVars.push_back(VNodesVar);
909 }
910 
911 void InstrProfiling::emitNameData() {
912   std::string UncompressedData;
913 
914   if (ReferencedNames.empty())
915     return;
916 
917   std::string CompressedNameStr;
918   if (Error E = collectPGOFuncNameStrings(ReferencedNames, CompressedNameStr,
919                                           DoNameCompression)) {
920     report_fatal_error(toString(std::move(E)), false);
921   }
922 
923   auto &Ctx = M->getContext();
924   auto *NamesVal = ConstantDataArray::getString(
925       Ctx, StringRef(CompressedNameStr), false);
926   NamesVar = new GlobalVariable(*M, NamesVal->getType(), true,
927                                 GlobalValue::PrivateLinkage, NamesVal,
928                                 getInstrProfNamesVarName());
929   NamesSize = CompressedNameStr.size();
930   NamesVar->setSection(
931       getInstrProfSectionName(IPSK_name, TT.getObjectFormat()));
932   // On COFF, it's important to reduce the alignment down to 1 to prevent the
933   // linker from inserting padding before the start of the names section or
934   // between names entries.
935   NamesVar->setAlignment(Align::None());
936   UsedVars.push_back(NamesVar);
937 
938   for (auto *NamePtr : ReferencedNames)
939     NamePtr->eraseFromParent();
940 }
941 
942 void InstrProfiling::emitRegistration() {
943   if (!needsRuntimeRegistrationOfSectionRange(TT))
944     return;
945 
946   // Construct the function.
947   auto *VoidTy = Type::getVoidTy(M->getContext());
948   auto *VoidPtrTy = Type::getInt8PtrTy(M->getContext());
949   auto *Int64Ty = Type::getInt64Ty(M->getContext());
950   auto *RegisterFTy = FunctionType::get(VoidTy, false);
951   auto *RegisterF = Function::Create(RegisterFTy, GlobalValue::InternalLinkage,
952                                      getInstrProfRegFuncsName(), M);
953   RegisterF->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
954   if (Options.NoRedZone)
955     RegisterF->addFnAttr(Attribute::NoRedZone);
956 
957   auto *RuntimeRegisterTy = FunctionType::get(VoidTy, VoidPtrTy, false);
958   auto *RuntimeRegisterF =
959       Function::Create(RuntimeRegisterTy, GlobalVariable::ExternalLinkage,
960                        getInstrProfRegFuncName(), M);
961 
962   IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", RegisterF));
963   for (Value *Data : UsedVars)
964     if (Data != NamesVar && !isa<Function>(Data))
965       IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy));
966 
967   if (NamesVar) {
968     Type *ParamTypes[] = {VoidPtrTy, Int64Ty};
969     auto *NamesRegisterTy =
970         FunctionType::get(VoidTy, makeArrayRef(ParamTypes), false);
971     auto *NamesRegisterF =
972         Function::Create(NamesRegisterTy, GlobalVariable::ExternalLinkage,
973                          getInstrProfNamesRegFuncName(), M);
974     IRB.CreateCall(NamesRegisterF, {IRB.CreateBitCast(NamesVar, VoidPtrTy),
975                                     IRB.getInt64(NamesSize)});
976   }
977 
978   IRB.CreateRetVoid();
979 }
980 
981 bool InstrProfiling::emitRuntimeHook() {
982   // We expect the linker to be invoked with -u<hook_var> flag for linux,
983   // for which case there is no need to emit the user function.
984   if (TT.isOSLinux())
985     return false;
986 
987   // If the module's provided its own runtime, we don't need to do anything.
988   if (M->getGlobalVariable(getInstrProfRuntimeHookVarName()))
989     return false;
990 
991   // Declare an external variable that will pull in the runtime initialization.
992   auto *Int32Ty = Type::getInt32Ty(M->getContext());
993   auto *Var =
994       new GlobalVariable(*M, Int32Ty, false, GlobalValue::ExternalLinkage,
995                          nullptr, getInstrProfRuntimeHookVarName());
996 
997   // Make a function that uses it.
998   auto *User = Function::Create(FunctionType::get(Int32Ty, false),
999                                 GlobalValue::LinkOnceODRLinkage,
1000                                 getInstrProfRuntimeHookVarUseFuncName(), M);
1001   User->addFnAttr(Attribute::NoInline);
1002   if (Options.NoRedZone)
1003     User->addFnAttr(Attribute::NoRedZone);
1004   User->setVisibility(GlobalValue::HiddenVisibility);
1005   if (TT.supportsCOMDAT())
1006     User->setComdat(M->getOrInsertComdat(User->getName()));
1007 
1008   IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", User));
1009   auto *Load = IRB.CreateLoad(Int32Ty, Var);
1010   IRB.CreateRet(Load);
1011 
1012   // Mark the user variable as used so that it isn't stripped out.
1013   UsedVars.push_back(User);
1014   return true;
1015 }
1016 
1017 void InstrProfiling::emitUses() {
1018   if (!UsedVars.empty())
1019     appendToUsed(*M, UsedVars);
1020 }
1021 
1022 void InstrProfiling::emitInitialization() {
1023   // Create ProfileFileName variable. Don't don't this for the
1024   // context-sensitive instrumentation lowering: This lowering is after
1025   // LTO/ThinLTO linking. Pass PGOInstrumentationGenCreateVar should
1026   // have already create the variable before LTO/ThinLTO linking.
1027   if (!IsCS)
1028     createProfileFileNameVar(*M, Options.InstrProfileOutput);
1029   Function *RegisterF = M->getFunction(getInstrProfRegFuncsName());
1030   if (!RegisterF)
1031     return;
1032 
1033   // Create the initialization function.
1034   auto *VoidTy = Type::getVoidTy(M->getContext());
1035   auto *F = Function::Create(FunctionType::get(VoidTy, false),
1036                              GlobalValue::InternalLinkage,
1037                              getInstrProfInitFuncName(), M);
1038   F->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
1039   F->addFnAttr(Attribute::NoInline);
1040   if (Options.NoRedZone)
1041     F->addFnAttr(Attribute::NoRedZone);
1042 
1043   // Add the basic block and the necessary calls.
1044   IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", F));
1045   IRB.CreateCall(RegisterF, {});
1046   IRB.CreateRetVoid();
1047 
1048   appendToGlobalCtors(*M, F, 0);
1049 }
1050