1 //===-- InstrProfiling.cpp - Frontend instrumentation based profiling -----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This pass lowers instrprof_* intrinsics emitted by a frontend for profiling.
10 // It also builds the data structures and initialization code needed for
11 // updating execution counts and emitting the profile at runtime.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "llvm/Transforms/Instrumentation/InstrProfiling.h"
16 #include "llvm/ADT/ArrayRef.h"
17 #include "llvm/ADT/SmallVector.h"
18 #include "llvm/ADT/StringRef.h"
19 #include "llvm/ADT/Twine.h"
20 #include "llvm/Analysis/BlockFrequencyInfo.h"
21 #include "llvm/Analysis/BranchProbabilityInfo.h"
22 #include "llvm/Analysis/LoopInfo.h"
23 #include "llvm/Analysis/TargetLibraryInfo.h"
24 #include "llvm/IR/Attributes.h"
25 #include "llvm/IR/BasicBlock.h"
26 #include "llvm/IR/Constant.h"
27 #include "llvm/IR/Constants.h"
28 #include "llvm/IR/DIBuilder.h"
29 #include "llvm/IR/DerivedTypes.h"
30 #include "llvm/IR/DiagnosticInfo.h"
31 #include "llvm/IR/Dominators.h"
32 #include "llvm/IR/Function.h"
33 #include "llvm/IR/GlobalValue.h"
34 #include "llvm/IR/GlobalVariable.h"
35 #include "llvm/IR/IRBuilder.h"
36 #include "llvm/IR/Instruction.h"
37 #include "llvm/IR/Instructions.h"
38 #include "llvm/IR/IntrinsicInst.h"
39 #include "llvm/IR/Module.h"
40 #include "llvm/IR/Type.h"
41 #include "llvm/InitializePasses.h"
42 #include "llvm/Pass.h"
43 #include "llvm/ProfileData/InstrProf.h"
44 #include "llvm/ProfileData/InstrProfCorrelator.h"
45 #include "llvm/Support/Casting.h"
46 #include "llvm/Support/CommandLine.h"
47 #include "llvm/Support/Error.h"
48 #include "llvm/Support/ErrorHandling.h"
49 #include "llvm/TargetParser/Triple.h"
50 #include "llvm/Transforms/Utils/ModuleUtils.h"
51 #include "llvm/Transforms/Utils/SSAUpdater.h"
52 #include <algorithm>
53 #include <cassert>
54 #include <cstdint>
55 #include <string>
56 
57 using namespace llvm;
58 
59 #define DEBUG_TYPE "instrprof"
60 
61 namespace llvm {
62 cl::opt<bool>
63     DebugInfoCorrelate("debug-info-correlate",
64                        cl::desc("Use debug info to correlate profiles."),
65                        cl::init(false));
66 } // namespace llvm
67 
68 namespace {
69 
70 cl::opt<bool> DoHashBasedCounterSplit(
71     "hash-based-counter-split",
72     cl::desc("Rename counter variable of a comdat function based on cfg hash"),
73     cl::init(true));
74 
75 cl::opt<bool>
76     RuntimeCounterRelocation("runtime-counter-relocation",
77                              cl::desc("Enable relocating counters at runtime."),
78                              cl::init(false));
79 
80 cl::opt<bool> ValueProfileStaticAlloc(
81     "vp-static-alloc",
82     cl::desc("Do static counter allocation for value profiler"),
83     cl::init(true));
84 
85 cl::opt<double> NumCountersPerValueSite(
86     "vp-counters-per-site",
87     cl::desc("The average number of profile counters allocated "
88              "per value profiling site."),
89     // This is set to a very small value because in real programs, only
90     // a very small percentage of value sites have non-zero targets, e.g, 1/30.
91     // For those sites with non-zero profile, the average number of targets
92     // is usually smaller than 2.
93     cl::init(1.0));
94 
95 cl::opt<bool> AtomicCounterUpdateAll(
96     "instrprof-atomic-counter-update-all",
97     cl::desc("Make all profile counter updates atomic (for testing only)"),
98     cl::init(false));
99 
100 cl::opt<bool> AtomicCounterUpdatePromoted(
101     "atomic-counter-update-promoted",
102     cl::desc("Do counter update using atomic fetch add "
103              " for promoted counters only"),
104     cl::init(false));
105 
106 cl::opt<bool> AtomicFirstCounter(
107     "atomic-first-counter",
108     cl::desc("Use atomic fetch add for first counter in a function (usually "
109              "the entry counter)"),
110     cl::init(false));
111 
112 // If the option is not specified, the default behavior about whether
113 // counter promotion is done depends on how instrumentaiton lowering
114 // pipeline is setup, i.e., the default value of true of this option
115 // does not mean the promotion will be done by default. Explicitly
116 // setting this option can override the default behavior.
117 cl::opt<bool> DoCounterPromotion("do-counter-promotion",
118                                  cl::desc("Do counter register promotion"),
119                                  cl::init(false));
120 cl::opt<unsigned> MaxNumOfPromotionsPerLoop(
121     "max-counter-promotions-per-loop", cl::init(20),
122     cl::desc("Max number counter promotions per loop to avoid"
123              " increasing register pressure too much"));
124 
125 // A debug option
126 cl::opt<int>
127     MaxNumOfPromotions("max-counter-promotions", cl::init(-1),
128                        cl::desc("Max number of allowed counter promotions"));
129 
130 cl::opt<unsigned> SpeculativeCounterPromotionMaxExiting(
131     "speculative-counter-promotion-max-exiting", cl::init(3),
132     cl::desc("The max number of exiting blocks of a loop to allow "
133              " speculative counter promotion"));
134 
135 cl::opt<bool> SpeculativeCounterPromotionToLoop(
136     "speculative-counter-promotion-to-loop",
137     cl::desc("When the option is false, if the target block is in a loop, "
138              "the promotion will be disallowed unless the promoted counter "
139              " update can be further/iteratively promoted into an acyclic "
140              " region."));
141 
142 cl::opt<bool> IterativeCounterPromotion(
143     "iterative-counter-promotion", cl::init(true),
144     cl::desc("Allow counter promotion across the whole loop nest."));
145 
146 cl::opt<bool> SkipRetExitBlock(
147     "skip-ret-exit-block", cl::init(true),
148     cl::desc("Suppress counter promotion if exit blocks contain ret."));
149 
150 ///
151 /// A helper class to promote one counter RMW operation in the loop
152 /// into register update.
153 ///
154 /// RWM update for the counter will be sinked out of the loop after
155 /// the transformation.
156 ///
157 class PGOCounterPromoterHelper : public LoadAndStorePromoter {
158 public:
159   PGOCounterPromoterHelper(
160       Instruction *L, Instruction *S, SSAUpdater &SSA, Value *Init,
161       BasicBlock *PH, ArrayRef<BasicBlock *> ExitBlocks,
162       ArrayRef<Instruction *> InsertPts,
163       DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands,
164       LoopInfo &LI)
165       : LoadAndStorePromoter({L, S}, SSA), Store(S), ExitBlocks(ExitBlocks),
166         InsertPts(InsertPts), LoopToCandidates(LoopToCands), LI(LI) {
167     assert(isa<LoadInst>(L));
168     assert(isa<StoreInst>(S));
169     SSA.AddAvailableValue(PH, Init);
170   }
171 
172   void doExtraRewritesBeforeFinalDeletion() override {
173     for (unsigned i = 0, e = ExitBlocks.size(); i != e; ++i) {
174       BasicBlock *ExitBlock = ExitBlocks[i];
175       Instruction *InsertPos = InsertPts[i];
176       // Get LiveIn value into the ExitBlock. If there are multiple
177       // predecessors, the value is defined by a PHI node in this
178       // block.
179       Value *LiveInValue = SSA.GetValueInMiddleOfBlock(ExitBlock);
180       Value *Addr = cast<StoreInst>(Store)->getPointerOperand();
181       Type *Ty = LiveInValue->getType();
182       IRBuilder<> Builder(InsertPos);
183       if (auto *AddrInst = dyn_cast_or_null<IntToPtrInst>(Addr)) {
184         // If isRuntimeCounterRelocationEnabled() is true then the address of
185         // the store instruction is computed with two instructions in
186         // InstrProfiling::getCounterAddress(). We need to copy those
187         // instructions to this block to compute Addr correctly.
188         // %BiasAdd = add i64 ptrtoint <__profc_>, <__llvm_profile_counter_bias>
189         // %Addr = inttoptr i64 %BiasAdd to i64*
190         auto *OrigBiasInst = dyn_cast<BinaryOperator>(AddrInst->getOperand(0));
191         assert(OrigBiasInst->getOpcode() == Instruction::BinaryOps::Add);
192         Value *BiasInst = Builder.Insert(OrigBiasInst->clone());
193         Addr = Builder.CreateIntToPtr(BiasInst, Ty->getPointerTo());
194       }
195       if (AtomicCounterUpdatePromoted)
196         // automic update currently can only be promoted across the current
197         // loop, not the whole loop nest.
198         Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, LiveInValue,
199                                 MaybeAlign(),
200                                 AtomicOrdering::SequentiallyConsistent);
201       else {
202         LoadInst *OldVal = Builder.CreateLoad(Ty, Addr, "pgocount.promoted");
203         auto *NewVal = Builder.CreateAdd(OldVal, LiveInValue);
204         auto *NewStore = Builder.CreateStore(NewVal, Addr);
205 
206         // Now update the parent loop's candidate list:
207         if (IterativeCounterPromotion) {
208           auto *TargetLoop = LI.getLoopFor(ExitBlock);
209           if (TargetLoop)
210             LoopToCandidates[TargetLoop].emplace_back(OldVal, NewStore);
211         }
212       }
213     }
214   }
215 
216 private:
217   Instruction *Store;
218   ArrayRef<BasicBlock *> ExitBlocks;
219   ArrayRef<Instruction *> InsertPts;
220   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates;
221   LoopInfo &LI;
222 };
223 
224 /// A helper class to do register promotion for all profile counter
225 /// updates in a loop.
226 ///
227 class PGOCounterPromoter {
228 public:
229   PGOCounterPromoter(
230       DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands,
231       Loop &CurLoop, LoopInfo &LI, BlockFrequencyInfo *BFI)
232       : LoopToCandidates(LoopToCands), L(CurLoop), LI(LI), BFI(BFI) {
233 
234     // Skip collection of ExitBlocks and InsertPts for loops that will not be
235     // able to have counters promoted.
236     SmallVector<BasicBlock *, 8> LoopExitBlocks;
237     SmallPtrSet<BasicBlock *, 8> BlockSet;
238 
239     L.getExitBlocks(LoopExitBlocks);
240     if (!isPromotionPossible(&L, LoopExitBlocks))
241       return;
242 
243     for (BasicBlock *ExitBlock : LoopExitBlocks) {
244       if (BlockSet.insert(ExitBlock).second) {
245         ExitBlocks.push_back(ExitBlock);
246         InsertPts.push_back(&*ExitBlock->getFirstInsertionPt());
247       }
248     }
249   }
250 
251   bool run(int64_t *NumPromoted) {
252     // Skip 'infinite' loops:
253     if (ExitBlocks.size() == 0)
254       return false;
255 
256     // Skip if any of the ExitBlocks contains a ret instruction.
257     // This is to prevent dumping of incomplete profile -- if the
258     // the loop is a long running loop and dump is called in the middle
259     // of the loop, the result profile is incomplete.
260     // FIXME: add other heuristics to detect long running loops.
261     if (SkipRetExitBlock) {
262       for (auto *BB : ExitBlocks)
263         if (isa<ReturnInst>(BB->getTerminator()))
264           return false;
265     }
266 
267     unsigned MaxProm = getMaxNumOfPromotionsInLoop(&L);
268     if (MaxProm == 0)
269       return false;
270 
271     unsigned Promoted = 0;
272     for (auto &Cand : LoopToCandidates[&L]) {
273 
274       SmallVector<PHINode *, 4> NewPHIs;
275       SSAUpdater SSA(&NewPHIs);
276       Value *InitVal = ConstantInt::get(Cand.first->getType(), 0);
277 
278       // If BFI is set, we will use it to guide the promotions.
279       if (BFI) {
280         auto *BB = Cand.first->getParent();
281         auto InstrCount = BFI->getBlockProfileCount(BB);
282         if (!InstrCount)
283           continue;
284         auto PreheaderCount = BFI->getBlockProfileCount(L.getLoopPreheader());
285         // If the average loop trip count is not greater than 1.5, we skip
286         // promotion.
287         if (PreheaderCount && (*PreheaderCount * 3) >= (*InstrCount * 2))
288           continue;
289       }
290 
291       PGOCounterPromoterHelper Promoter(Cand.first, Cand.second, SSA, InitVal,
292                                         L.getLoopPreheader(), ExitBlocks,
293                                         InsertPts, LoopToCandidates, LI);
294       Promoter.run(SmallVector<Instruction *, 2>({Cand.first, Cand.second}));
295       Promoted++;
296       if (Promoted >= MaxProm)
297         break;
298 
299       (*NumPromoted)++;
300       if (MaxNumOfPromotions != -1 && *NumPromoted >= MaxNumOfPromotions)
301         break;
302     }
303 
304     LLVM_DEBUG(dbgs() << Promoted << " counters promoted for loop (depth="
305                       << L.getLoopDepth() << ")\n");
306     return Promoted != 0;
307   }
308 
309 private:
310   bool allowSpeculativeCounterPromotion(Loop *LP) {
311     SmallVector<BasicBlock *, 8> ExitingBlocks;
312     L.getExitingBlocks(ExitingBlocks);
313     // Not considierered speculative.
314     if (ExitingBlocks.size() == 1)
315       return true;
316     if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting)
317       return false;
318     return true;
319   }
320 
321   // Check whether the loop satisfies the basic conditions needed to perform
322   // Counter Promotions.
323   bool
324   isPromotionPossible(Loop *LP,
325                       const SmallVectorImpl<BasicBlock *> &LoopExitBlocks) {
326     // We can't insert into a catchswitch.
327     if (llvm::any_of(LoopExitBlocks, [](BasicBlock *Exit) {
328           return isa<CatchSwitchInst>(Exit->getTerminator());
329         }))
330       return false;
331 
332     if (!LP->hasDedicatedExits())
333       return false;
334 
335     BasicBlock *PH = LP->getLoopPreheader();
336     if (!PH)
337       return false;
338 
339     return true;
340   }
341 
342   // Returns the max number of Counter Promotions for LP.
343   unsigned getMaxNumOfPromotionsInLoop(Loop *LP) {
344     SmallVector<BasicBlock *, 8> LoopExitBlocks;
345     LP->getExitBlocks(LoopExitBlocks);
346     if (!isPromotionPossible(LP, LoopExitBlocks))
347       return 0;
348 
349     SmallVector<BasicBlock *, 8> ExitingBlocks;
350     LP->getExitingBlocks(ExitingBlocks);
351 
352     // If BFI is set, we do more aggressive promotions based on BFI.
353     if (BFI)
354       return (unsigned)-1;
355 
356     // Not considierered speculative.
357     if (ExitingBlocks.size() == 1)
358       return MaxNumOfPromotionsPerLoop;
359 
360     if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting)
361       return 0;
362 
363     // Whether the target block is in a loop does not matter:
364     if (SpeculativeCounterPromotionToLoop)
365       return MaxNumOfPromotionsPerLoop;
366 
367     // Now check the target block:
368     unsigned MaxProm = MaxNumOfPromotionsPerLoop;
369     for (auto *TargetBlock : LoopExitBlocks) {
370       auto *TargetLoop = LI.getLoopFor(TargetBlock);
371       if (!TargetLoop)
372         continue;
373       unsigned MaxPromForTarget = getMaxNumOfPromotionsInLoop(TargetLoop);
374       unsigned PendingCandsInTarget = LoopToCandidates[TargetLoop].size();
375       MaxProm =
376           std::min(MaxProm, std::max(MaxPromForTarget, PendingCandsInTarget) -
377                                 PendingCandsInTarget);
378     }
379     return MaxProm;
380   }
381 
382   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates;
383   SmallVector<BasicBlock *, 8> ExitBlocks;
384   SmallVector<Instruction *, 8> InsertPts;
385   Loop &L;
386   LoopInfo &LI;
387   BlockFrequencyInfo *BFI;
388 };
389 
390 enum class ValueProfilingCallType {
391   // Individual values are tracked. Currently used for indiret call target
392   // profiling.
393   Default,
394 
395   // MemOp: the memop size value profiling.
396   MemOp
397 };
398 
399 } // end anonymous namespace
400 
401 PreservedAnalyses InstrProfiling::run(Module &M, ModuleAnalysisManager &AM) {
402   FunctionAnalysisManager &FAM =
403       AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
404   auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
405     return FAM.getResult<TargetLibraryAnalysis>(F);
406   };
407   if (!run(M, GetTLI))
408     return PreservedAnalyses::all();
409 
410   return PreservedAnalyses::none();
411 }
412 
413 bool InstrProfiling::lowerIntrinsics(Function *F) {
414   bool MadeChange = false;
415   PromotionCandidates.clear();
416   for (BasicBlock &BB : *F) {
417     for (Instruction &Instr : llvm::make_early_inc_range(BB)) {
418       if (auto *IPIS = dyn_cast<InstrProfIncrementInstStep>(&Instr)) {
419         lowerIncrement(IPIS);
420         MadeChange = true;
421       } else if (auto *IPI = dyn_cast<InstrProfIncrementInst>(&Instr)) {
422         lowerIncrement(IPI);
423         MadeChange = true;
424       } else if (auto *IPC = dyn_cast<InstrProfTimestampInst>(&Instr)) {
425         lowerTimestamp(IPC);
426         MadeChange = true;
427       } else if (auto *IPC = dyn_cast<InstrProfCoverInst>(&Instr)) {
428         lowerCover(IPC);
429         MadeChange = true;
430       } else if (auto *IPVP = dyn_cast<InstrProfValueProfileInst>(&Instr)) {
431         lowerValueProfileInst(IPVP);
432         MadeChange = true;
433       }
434     }
435   }
436 
437   if (!MadeChange)
438     return false;
439 
440   promoteCounterLoadStores(F);
441   return true;
442 }
443 
444 bool InstrProfiling::isRuntimeCounterRelocationEnabled() const {
445   // Mach-O don't support weak external references.
446   if (TT.isOSBinFormatMachO())
447     return false;
448 
449   if (RuntimeCounterRelocation.getNumOccurrences() > 0)
450     return RuntimeCounterRelocation;
451 
452   // Fuchsia uses runtime counter relocation by default.
453   return TT.isOSFuchsia();
454 }
455 
456 bool InstrProfiling::isCounterPromotionEnabled() const {
457   if (DoCounterPromotion.getNumOccurrences() > 0)
458     return DoCounterPromotion;
459 
460   return Options.DoCounterPromotion;
461 }
462 
463 void InstrProfiling::promoteCounterLoadStores(Function *F) {
464   if (!isCounterPromotionEnabled())
465     return;
466 
467   DominatorTree DT(*F);
468   LoopInfo LI(DT);
469   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> LoopPromotionCandidates;
470 
471   std::unique_ptr<BlockFrequencyInfo> BFI;
472   if (Options.UseBFIInPromotion) {
473     std::unique_ptr<BranchProbabilityInfo> BPI;
474     BPI.reset(new BranchProbabilityInfo(*F, LI, &GetTLI(*F)));
475     BFI.reset(new BlockFrequencyInfo(*F, *BPI, LI));
476   }
477 
478   for (const auto &LoadStore : PromotionCandidates) {
479     auto *CounterLoad = LoadStore.first;
480     auto *CounterStore = LoadStore.second;
481     BasicBlock *BB = CounterLoad->getParent();
482     Loop *ParentLoop = LI.getLoopFor(BB);
483     if (!ParentLoop)
484       continue;
485     LoopPromotionCandidates[ParentLoop].emplace_back(CounterLoad, CounterStore);
486   }
487 
488   SmallVector<Loop *, 4> Loops = LI.getLoopsInPreorder();
489 
490   // Do a post-order traversal of the loops so that counter updates can be
491   // iteratively hoisted outside the loop nest.
492   for (auto *Loop : llvm::reverse(Loops)) {
493     PGOCounterPromoter Promoter(LoopPromotionCandidates, *Loop, LI, BFI.get());
494     Promoter.run(&TotalCountersPromoted);
495   }
496 }
497 
498 static bool needsRuntimeHookUnconditionally(const Triple &TT) {
499   // On Fuchsia, we only need runtime hook if any counters are present.
500   if (TT.isOSFuchsia())
501     return false;
502 
503   return true;
504 }
505 
506 /// Check if the module contains uses of any profiling intrinsics.
507 static bool containsProfilingIntrinsics(Module &M) {
508   auto containsIntrinsic = [&](int ID) {
509     if (auto *F = M.getFunction(Intrinsic::getName(ID)))
510       return !F->use_empty();
511     return false;
512   };
513   return containsIntrinsic(llvm::Intrinsic::instrprof_cover) ||
514          containsIntrinsic(llvm::Intrinsic::instrprof_increment) ||
515          containsIntrinsic(llvm::Intrinsic::instrprof_increment_step) ||
516          containsIntrinsic(llvm::Intrinsic::instrprof_timestamp) ||
517          containsIntrinsic(llvm::Intrinsic::instrprof_value_profile);
518 }
519 
520 bool InstrProfiling::run(
521     Module &M, std::function<const TargetLibraryInfo &(Function &F)> GetTLI) {
522   this->M = &M;
523   this->GetTLI = std::move(GetTLI);
524   NamesVar = nullptr;
525   NamesSize = 0;
526   ProfileDataMap.clear();
527   CompilerUsedVars.clear();
528   UsedVars.clear();
529   TT = Triple(M.getTargetTriple());
530 
531   bool MadeChange = false;
532   bool NeedsRuntimeHook = needsRuntimeHookUnconditionally(TT);
533   if (NeedsRuntimeHook)
534     MadeChange = emitRuntimeHook();
535 
536   bool ContainsProfiling = containsProfilingIntrinsics(M);
537   GlobalVariable *CoverageNamesVar =
538       M.getNamedGlobal(getCoverageUnusedNamesVarName());
539   // Improve compile time by avoiding linear scans when there is no work.
540   if (!ContainsProfiling && !CoverageNamesVar)
541     return MadeChange;
542 
543   // We did not know how many value sites there would be inside
544   // the instrumented function. This is counting the number of instrumented
545   // target value sites to enter it as field in the profile data variable.
546   for (Function &F : M) {
547     InstrProfInstBase *FirstProfInst = nullptr;
548     for (BasicBlock &BB : F)
549       for (auto I = BB.begin(), E = BB.end(); I != E; I++)
550         if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(I))
551           computeNumValueSiteCounts(Ind);
552         else if (FirstProfInst == nullptr &&
553                  (isa<InstrProfIncrementInst>(I) || isa<InstrProfCoverInst>(I)))
554           FirstProfInst = dyn_cast<InstrProfInstBase>(I);
555 
556     // Value profiling intrinsic lowering requires per-function profile data
557     // variable to be created first.
558     if (FirstProfInst != nullptr)
559       static_cast<void>(getOrCreateRegionCounters(FirstProfInst));
560   }
561 
562   for (Function &F : M)
563     MadeChange |= lowerIntrinsics(&F);
564 
565   if (CoverageNamesVar) {
566     lowerCoverageData(CoverageNamesVar);
567     MadeChange = true;
568   }
569 
570   if (!MadeChange)
571     return false;
572 
573   emitVNodes();
574   emitNameData();
575 
576   // Emit runtime hook for the cases where the target does not unconditionally
577   // require pulling in profile runtime, and coverage is enabled on code that is
578   // not eliminated by the front-end, e.g. unused functions with internal
579   // linkage.
580   if (!NeedsRuntimeHook && ContainsProfiling)
581     emitRuntimeHook();
582 
583   emitRegistration();
584   emitUses();
585   emitInitialization();
586   return true;
587 }
588 
589 static FunctionCallee getOrInsertValueProfilingCall(
590     Module &M, const TargetLibraryInfo &TLI,
591     ValueProfilingCallType CallType = ValueProfilingCallType::Default) {
592   LLVMContext &Ctx = M.getContext();
593   auto *ReturnTy = Type::getVoidTy(M.getContext());
594 
595   AttributeList AL;
596   if (auto AK = TLI.getExtAttrForI32Param(false))
597     AL = AL.addParamAttribute(M.getContext(), 2, AK);
598 
599   assert((CallType == ValueProfilingCallType::Default ||
600           CallType == ValueProfilingCallType::MemOp) &&
601          "Must be Default or MemOp");
602   Type *ParamTypes[] = {
603 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType
604 #include "llvm/ProfileData/InstrProfData.inc"
605   };
606   auto *ValueProfilingCallTy =
607       FunctionType::get(ReturnTy, ArrayRef(ParamTypes), false);
608   StringRef FuncName = CallType == ValueProfilingCallType::Default
609                            ? getInstrProfValueProfFuncName()
610                            : getInstrProfValueProfMemOpFuncName();
611   return M.getOrInsertFunction(FuncName, ValueProfilingCallTy, AL);
612 }
613 
614 void InstrProfiling::computeNumValueSiteCounts(InstrProfValueProfileInst *Ind) {
615   GlobalVariable *Name = Ind->getName();
616   uint64_t ValueKind = Ind->getValueKind()->getZExtValue();
617   uint64_t Index = Ind->getIndex()->getZExtValue();
618   auto &PD = ProfileDataMap[Name];
619   PD.NumValueSites[ValueKind] =
620       std::max(PD.NumValueSites[ValueKind], (uint32_t)(Index + 1));
621 }
622 
623 void InstrProfiling::lowerValueProfileInst(InstrProfValueProfileInst *Ind) {
624   // TODO: Value profiling heavily depends on the data section which is omitted
625   // in lightweight mode. We need to move the value profile pointer to the
626   // Counter struct to get this working.
627   assert(
628       !DebugInfoCorrelate &&
629       "Value profiling is not yet supported with lightweight instrumentation");
630   GlobalVariable *Name = Ind->getName();
631   auto It = ProfileDataMap.find(Name);
632   assert(It != ProfileDataMap.end() && It->second.DataVar &&
633          "value profiling detected in function with no counter incerement");
634 
635   GlobalVariable *DataVar = It->second.DataVar;
636   uint64_t ValueKind = Ind->getValueKind()->getZExtValue();
637   uint64_t Index = Ind->getIndex()->getZExtValue();
638   for (uint32_t Kind = IPVK_First; Kind < ValueKind; ++Kind)
639     Index += It->second.NumValueSites[Kind];
640 
641   IRBuilder<> Builder(Ind);
642   bool IsMemOpSize = (Ind->getValueKind()->getZExtValue() ==
643                       llvm::InstrProfValueKind::IPVK_MemOPSize);
644   CallInst *Call = nullptr;
645   auto *TLI = &GetTLI(*Ind->getFunction());
646 
647   // To support value profiling calls within Windows exception handlers, funclet
648   // information contained within operand bundles needs to be copied over to
649   // the library call. This is required for the IR to be processed by the
650   // WinEHPrepare pass.
651   SmallVector<OperandBundleDef, 1> OpBundles;
652   Ind->getOperandBundlesAsDefs(OpBundles);
653   if (!IsMemOpSize) {
654     Value *Args[3] = {Ind->getTargetValue(),
655                       Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()),
656                       Builder.getInt32(Index)};
657     Call = Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI), Args,
658                               OpBundles);
659   } else {
660     Value *Args[3] = {Ind->getTargetValue(),
661                       Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()),
662                       Builder.getInt32(Index)};
663     Call = Builder.CreateCall(
664         getOrInsertValueProfilingCall(*M, *TLI, ValueProfilingCallType::MemOp),
665         Args, OpBundles);
666   }
667   if (auto AK = TLI->getExtAttrForI32Param(false))
668     Call->addParamAttr(2, AK);
669   Ind->replaceAllUsesWith(Call);
670   Ind->eraseFromParent();
671 }
672 
673 Value *InstrProfiling::getCounterAddress(InstrProfInstBase *I) {
674   auto *Counters = getOrCreateRegionCounters(I);
675   IRBuilder<> Builder(I);
676 
677   if (isa<InstrProfTimestampInst>(I))
678     Counters->setAlignment(Align(8));
679 
680   auto *Addr = Builder.CreateConstInBoundsGEP2_32(
681       Counters->getValueType(), Counters, 0, I->getIndex()->getZExtValue());
682 
683   if (!isRuntimeCounterRelocationEnabled())
684     return Addr;
685 
686   Type *Int64Ty = Type::getInt64Ty(M->getContext());
687   Function *Fn = I->getParent()->getParent();
688   LoadInst *&BiasLI = FunctionToProfileBiasMap[Fn];
689   if (!BiasLI) {
690     IRBuilder<> EntryBuilder(&Fn->getEntryBlock().front());
691     auto *Bias = M->getGlobalVariable(getInstrProfCounterBiasVarName());
692     if (!Bias) {
693       // Compiler must define this variable when runtime counter relocation
694       // is being used. Runtime has a weak external reference that is used
695       // to check whether that's the case or not.
696       Bias = new GlobalVariable(
697           *M, Int64Ty, false, GlobalValue::LinkOnceODRLinkage,
698           Constant::getNullValue(Int64Ty), getInstrProfCounterBiasVarName());
699       Bias->setVisibility(GlobalVariable::HiddenVisibility);
700       // A definition that's weak (linkonce_odr) without being in a COMDAT
701       // section wouldn't lead to link errors, but it would lead to a dead
702       // data word from every TU but one. Putting it in COMDAT ensures there
703       // will be exactly one data slot in the link.
704       if (TT.supportsCOMDAT())
705         Bias->setComdat(M->getOrInsertComdat(Bias->getName()));
706     }
707     BiasLI = EntryBuilder.CreateLoad(Int64Ty, Bias);
708   }
709   auto *Add = Builder.CreateAdd(Builder.CreatePtrToInt(Addr, Int64Ty), BiasLI);
710   return Builder.CreateIntToPtr(Add, Addr->getType());
711 }
712 
713 void InstrProfiling::lowerCover(InstrProfCoverInst *CoverInstruction) {
714   auto *Addr = getCounterAddress(CoverInstruction);
715   IRBuilder<> Builder(CoverInstruction);
716   // We store zero to represent that this block is covered.
717   Builder.CreateStore(Builder.getInt8(0), Addr);
718   CoverInstruction->eraseFromParent();
719 }
720 
721 void InstrProfiling::lowerTimestamp(
722     InstrProfTimestampInst *TimestampInstruction) {
723   assert(TimestampInstruction->getIndex()->isZeroValue() &&
724          "timestamp probes are always the first probe for a function");
725   auto &Ctx = M->getContext();
726   auto *TimestampAddr = getCounterAddress(TimestampInstruction);
727   IRBuilder<> Builder(TimestampInstruction);
728   auto *CalleeTy =
729       FunctionType::get(Type::getVoidTy(Ctx), TimestampAddr->getType(), false);
730   auto Callee = M->getOrInsertFunction(
731       INSTR_PROF_QUOTE(INSTR_PROF_PROFILE_SET_TIMESTAMP), CalleeTy);
732   Builder.CreateCall(Callee, {TimestampAddr});
733   TimestampInstruction->eraseFromParent();
734 }
735 
736 void InstrProfiling::lowerIncrement(InstrProfIncrementInst *Inc) {
737   auto *Addr = getCounterAddress(Inc);
738 
739   IRBuilder<> Builder(Inc);
740   if (Options.Atomic || AtomicCounterUpdateAll ||
741       (Inc->getIndex()->isZeroValue() && AtomicFirstCounter)) {
742     Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, Inc->getStep(),
743                             MaybeAlign(), AtomicOrdering::Monotonic);
744   } else {
745     Value *IncStep = Inc->getStep();
746     Value *Load = Builder.CreateLoad(IncStep->getType(), Addr, "pgocount");
747     auto *Count = Builder.CreateAdd(Load, Inc->getStep());
748     auto *Store = Builder.CreateStore(Count, Addr);
749     if (isCounterPromotionEnabled())
750       PromotionCandidates.emplace_back(cast<Instruction>(Load), Store);
751   }
752   Inc->eraseFromParent();
753 }
754 
755 void InstrProfiling::lowerCoverageData(GlobalVariable *CoverageNamesVar) {
756   ConstantArray *Names =
757       cast<ConstantArray>(CoverageNamesVar->getInitializer());
758   for (unsigned I = 0, E = Names->getNumOperands(); I < E; ++I) {
759     Constant *NC = Names->getOperand(I);
760     Value *V = NC->stripPointerCasts();
761     assert(isa<GlobalVariable>(V) && "Missing reference to function name");
762     GlobalVariable *Name = cast<GlobalVariable>(V);
763 
764     Name->setLinkage(GlobalValue::PrivateLinkage);
765     ReferencedNames.push_back(Name);
766     if (isa<ConstantExpr>(NC))
767       NC->dropAllReferences();
768   }
769   CoverageNamesVar->eraseFromParent();
770 }
771 
772 /// Get the name of a profiling variable for a particular function.
773 static std::string getVarName(InstrProfInstBase *Inc, StringRef Prefix,
774                               bool &Renamed) {
775   StringRef NamePrefix = getInstrProfNameVarPrefix();
776   StringRef Name = Inc->getName()->getName().substr(NamePrefix.size());
777   Function *F = Inc->getParent()->getParent();
778   Module *M = F->getParent();
779   if (!DoHashBasedCounterSplit || !isIRPGOFlagSet(M) ||
780       !canRenameComdatFunc(*F)) {
781     Renamed = false;
782     return (Prefix + Name).str();
783   }
784   Renamed = true;
785   uint64_t FuncHash = Inc->getHash()->getZExtValue();
786   SmallVector<char, 24> HashPostfix;
787   if (Name.endswith((Twine(".") + Twine(FuncHash)).toStringRef(HashPostfix)))
788     return (Prefix + Name).str();
789   return (Prefix + Name + "." + Twine(FuncHash)).str();
790 }
791 
792 static uint64_t getIntModuleFlagOrZero(const Module &M, StringRef Flag) {
793   auto *MD = dyn_cast_or_null<ConstantAsMetadata>(M.getModuleFlag(Flag));
794   if (!MD)
795     return 0;
796 
797   // If the flag is a ConstantAsMetadata, it should be an integer representable
798   // in 64-bits.
799   return cast<ConstantInt>(MD->getValue())->getZExtValue();
800 }
801 
802 static bool enablesValueProfiling(const Module &M) {
803   return isIRPGOFlagSet(&M) ||
804          getIntModuleFlagOrZero(M, "EnableValueProfiling") != 0;
805 }
806 
807 // Conservatively returns true if data variables may be referenced by code.
808 static bool profDataReferencedByCode(const Module &M) {
809   return enablesValueProfiling(M);
810 }
811 
812 static inline bool shouldRecordFunctionAddr(Function *F) {
813   // Only record function addresses if IR PGO is enabled or if clang value
814   // profiling is enabled. Recording function addresses greatly increases object
815   // file size, because it prevents the inliner from deleting functions that
816   // have been inlined everywhere.
817   if (!profDataReferencedByCode(*F->getParent()))
818     return false;
819 
820   // Check the linkage
821   bool HasAvailableExternallyLinkage = F->hasAvailableExternallyLinkage();
822   if (!F->hasLinkOnceLinkage() && !F->hasLocalLinkage() &&
823       !HasAvailableExternallyLinkage)
824     return true;
825 
826   // A function marked 'alwaysinline' with available_externally linkage can't
827   // have its address taken. Doing so would create an undefined external ref to
828   // the function, which would fail to link.
829   if (HasAvailableExternallyLinkage &&
830       F->hasFnAttribute(Attribute::AlwaysInline))
831     return false;
832 
833   // Prohibit function address recording if the function is both internal and
834   // COMDAT. This avoids the profile data variable referencing internal symbols
835   // in COMDAT.
836   if (F->hasLocalLinkage() && F->hasComdat())
837     return false;
838 
839   // Check uses of this function for other than direct calls or invokes to it.
840   // Inline virtual functions have linkeOnceODR linkage. When a key method
841   // exists, the vtable will only be emitted in the TU where the key method
842   // is defined. In a TU where vtable is not available, the function won't
843   // be 'addresstaken'. If its address is not recorded here, the profile data
844   // with missing address may be picked by the linker leading  to missing
845   // indirect call target info.
846   return F->hasAddressTaken() || F->hasLinkOnceLinkage();
847 }
848 
849 static inline bool shouldUsePublicSymbol(Function *Fn) {
850   // It isn't legal to make an alias of this function at all
851   if (Fn->isDeclarationForLinker())
852     return true;
853 
854   // Symbols with local linkage can just use the symbol directly without
855   // introducing relocations
856   if (Fn->hasLocalLinkage())
857     return true;
858 
859   // PGO + ThinLTO + CFI cause duplicate symbols to be introduced due to some
860   // unfavorable interaction between the new alias and the alias renaming done
861   // in LowerTypeTests under ThinLTO. For comdat functions that would normally
862   // be deduplicated, but the renaming scheme ends up preventing renaming, since
863   // it creates unique names for each alias, resulting in duplicated symbols. In
864   // the future, we should update the CFI related passes to migrate these
865   // aliases to the same module as the jump-table they refer to will be defined.
866   if (Fn->hasMetadata(LLVMContext::MD_type))
867     return true;
868 
869   // For comdat functions, an alias would need the same linkage as the original
870   // function and hidden visibility. There is no point in adding an alias with
871   // identical linkage an visibility to avoid introducing symbolic relocations.
872   if (Fn->hasComdat() &&
873       (Fn->getVisibility() == GlobalValue::VisibilityTypes::HiddenVisibility))
874     return true;
875 
876   // its OK to use an alias
877   return false;
878 }
879 
880 static inline Constant *getFuncAddrForProfData(Function *Fn) {
881   auto *Int8PtrTy = Type::getInt8PtrTy(Fn->getContext());
882   // Store a nullptr in __llvm_profd, if we shouldn't use a real address
883   if (!shouldRecordFunctionAddr(Fn))
884     return ConstantPointerNull::get(Int8PtrTy);
885 
886   // If we can't use an alias, we must use the public symbol, even though this
887   // may require a symbolic relocation.
888   if (shouldUsePublicSymbol(Fn))
889     return ConstantExpr::getBitCast(Fn, Int8PtrTy);
890 
891   // When possible use a private alias to avoid symbolic relocations.
892   auto *GA = GlobalAlias::create(GlobalValue::LinkageTypes::PrivateLinkage,
893                                  Fn->getName() + ".local", Fn);
894 
895   // When the instrumented function is a COMDAT function, we cannot use a
896   // private alias. If we did, we would create reference to a local label in
897   // this function's section. If this version of the function isn't selected by
898   // the linker, then the metadata would introduce a reference to a discarded
899   // section. So, for COMDAT functions, we need to adjust the linkage of the
900   // alias. Using hidden visibility avoids a dynamic relocation and an entry in
901   // the dynamic symbol table.
902   //
903   // Note that this handles COMDAT functions with visibility other than Hidden,
904   // since that case is covered in shouldUsePublicSymbol()
905   if (Fn->hasComdat()) {
906     GA->setLinkage(Fn->getLinkage());
907     GA->setVisibility(GlobalValue::VisibilityTypes::HiddenVisibility);
908   }
909 
910   // appendToCompilerUsed(*Fn->getParent(), {GA});
911 
912   return ConstantExpr::getBitCast(GA, Int8PtrTy);
913 }
914 
915 static bool needsRuntimeRegistrationOfSectionRange(const Triple &TT) {
916   // Don't do this for Darwin.  compiler-rt uses linker magic.
917   if (TT.isOSDarwin())
918     return false;
919   // Use linker script magic to get data/cnts/name start/end.
920   if (TT.isOSAIX() || TT.isOSLinux() || TT.isOSFreeBSD() || TT.isOSNetBSD() ||
921       TT.isOSSolaris() || TT.isOSFuchsia() || TT.isPS() || TT.isOSWindows())
922     return false;
923 
924   return true;
925 }
926 
927 GlobalVariable *
928 InstrProfiling::createRegionCounters(InstrProfInstBase *Inc, StringRef Name,
929                                      GlobalValue::LinkageTypes Linkage) {
930   uint64_t NumCounters = Inc->getNumCounters()->getZExtValue();
931   auto &Ctx = M->getContext();
932   GlobalVariable *GV;
933   if (isa<InstrProfCoverInst>(Inc)) {
934     auto *CounterTy = Type::getInt8Ty(Ctx);
935     auto *CounterArrTy = ArrayType::get(CounterTy, NumCounters);
936     // TODO: `Constant::getAllOnesValue()` does not yet accept an array type.
937     std::vector<Constant *> InitialValues(NumCounters,
938                                           Constant::getAllOnesValue(CounterTy));
939     GV = new GlobalVariable(*M, CounterArrTy, false, Linkage,
940                             ConstantArray::get(CounterArrTy, InitialValues),
941                             Name);
942     GV->setAlignment(Align(1));
943   } else {
944     auto *CounterTy = ArrayType::get(Type::getInt64Ty(Ctx), NumCounters);
945     GV = new GlobalVariable(*M, CounterTy, false, Linkage,
946                             Constant::getNullValue(CounterTy), Name);
947     GV->setAlignment(Align(8));
948   }
949   return GV;
950 }
951 
952 GlobalVariable *
953 InstrProfiling::getOrCreateRegionCounters(InstrProfInstBase *Inc) {
954   GlobalVariable *NamePtr = Inc->getName();
955   auto &PD = ProfileDataMap[NamePtr];
956   if (PD.RegionCounters)
957     return PD.RegionCounters;
958 
959   // Match the linkage and visibility of the name global.
960   Function *Fn = Inc->getParent()->getParent();
961   GlobalValue::LinkageTypes Linkage = NamePtr->getLinkage();
962   GlobalValue::VisibilityTypes Visibility = NamePtr->getVisibility();
963 
964   // Use internal rather than private linkage so the counter variable shows up
965   // in the symbol table when using debug info for correlation.
966   if (DebugInfoCorrelate && TT.isOSBinFormatMachO() &&
967       Linkage == GlobalValue::PrivateLinkage)
968     Linkage = GlobalValue::InternalLinkage;
969 
970   // Due to the limitation of binder as of 2021/09/28, the duplicate weak
971   // symbols in the same csect won't be discarded. When there are duplicate weak
972   // symbols, we can NOT guarantee that the relocations get resolved to the
973   // intended weak symbol, so we can not ensure the correctness of the relative
974   // CounterPtr, so we have to use private linkage for counter and data symbols.
975   if (TT.isOSBinFormatXCOFF()) {
976     Linkage = GlobalValue::PrivateLinkage;
977     Visibility = GlobalValue::DefaultVisibility;
978   }
979   // Move the name variable to the right section. Place them in a COMDAT group
980   // if the associated function is a COMDAT. This will make sure that only one
981   // copy of counters of the COMDAT function will be emitted after linking. Keep
982   // in mind that this pass may run before the inliner, so we need to create a
983   // new comdat group for the counters and profiling data. If we use the comdat
984   // of the parent function, that will result in relocations against discarded
985   // sections.
986   //
987   // If the data variable is referenced by code,  counters and data have to be
988   // in different comdats for COFF because the Visual C++ linker will report
989   // duplicate symbol errors if there are multiple external symbols with the
990   // same name marked IMAGE_COMDAT_SELECT_ASSOCIATIVE.
991   //
992   // For ELF, when not using COMDAT, put counters, data and values into a
993   // nodeduplicate COMDAT which is lowered to a zero-flag section group. This
994   // allows -z start-stop-gc to discard the entire group when the function is
995   // discarded.
996   bool DataReferencedByCode = profDataReferencedByCode(*M);
997   bool NeedComdat = needsComdatForCounter(*Fn, *M);
998   bool Renamed;
999   std::string CntsVarName =
1000       getVarName(Inc, getInstrProfCountersVarPrefix(), Renamed);
1001   std::string DataVarName =
1002       getVarName(Inc, getInstrProfDataVarPrefix(), Renamed);
1003   auto MaybeSetComdat = [&](GlobalVariable *GV) {
1004     bool UseComdat = (NeedComdat || TT.isOSBinFormatELF());
1005     if (UseComdat) {
1006       StringRef GroupName = TT.isOSBinFormatCOFF() && DataReferencedByCode
1007                                 ? GV->getName()
1008                                 : CntsVarName;
1009       Comdat *C = M->getOrInsertComdat(GroupName);
1010       if (!NeedComdat)
1011         C->setSelectionKind(Comdat::NoDeduplicate);
1012       GV->setComdat(C);
1013       // COFF doesn't allow the comdat group leader to have private linkage, so
1014       // upgrade private linkage to internal linkage to produce a symbol table
1015       // entry.
1016       if (TT.isOSBinFormatCOFF() && GV->hasPrivateLinkage())
1017         GV->setLinkage(GlobalValue::InternalLinkage);
1018     }
1019   };
1020 
1021   uint64_t NumCounters = Inc->getNumCounters()->getZExtValue();
1022   LLVMContext &Ctx = M->getContext();
1023 
1024   auto *CounterPtr = createRegionCounters(Inc, CntsVarName, Linkage);
1025   CounterPtr->setVisibility(Visibility);
1026   CounterPtr->setSection(
1027       getInstrProfSectionName(IPSK_cnts, TT.getObjectFormat()));
1028   CounterPtr->setLinkage(Linkage);
1029   MaybeSetComdat(CounterPtr);
1030   PD.RegionCounters = CounterPtr;
1031   if (DebugInfoCorrelate) {
1032     if (auto *SP = Fn->getSubprogram()) {
1033       DIBuilder DB(*M, true, SP->getUnit());
1034       Metadata *FunctionNameAnnotation[] = {
1035           MDString::get(Ctx, InstrProfCorrelator::FunctionNameAttributeName),
1036           MDString::get(Ctx, getPGOFuncNameVarInitializer(NamePtr)),
1037       };
1038       Metadata *CFGHashAnnotation[] = {
1039           MDString::get(Ctx, InstrProfCorrelator::CFGHashAttributeName),
1040           ConstantAsMetadata::get(Inc->getHash()),
1041       };
1042       Metadata *NumCountersAnnotation[] = {
1043           MDString::get(Ctx, InstrProfCorrelator::NumCountersAttributeName),
1044           ConstantAsMetadata::get(Inc->getNumCounters()),
1045       };
1046       auto Annotations = DB.getOrCreateArray({
1047           MDNode::get(Ctx, FunctionNameAnnotation),
1048           MDNode::get(Ctx, CFGHashAnnotation),
1049           MDNode::get(Ctx, NumCountersAnnotation),
1050       });
1051       auto *DICounter = DB.createGlobalVariableExpression(
1052           SP, CounterPtr->getName(), /*LinkageName=*/StringRef(), SP->getFile(),
1053           /*LineNo=*/0, DB.createUnspecifiedType("Profile Data Type"),
1054           CounterPtr->hasLocalLinkage(), /*IsDefined=*/true, /*Expr=*/nullptr,
1055           /*Decl=*/nullptr, /*TemplateParams=*/nullptr, /*AlignInBits=*/0,
1056           Annotations);
1057       CounterPtr->addDebugInfo(DICounter);
1058       DB.finalize();
1059     } else {
1060       std::string Msg = ("Missing debug info for function " + Fn->getName() +
1061                          "; required for profile correlation.")
1062                             .str();
1063       Ctx.diagnose(
1064           DiagnosticInfoPGOProfile(M->getName().data(), Msg, DS_Warning));
1065     }
1066   }
1067 
1068   auto *Int8PtrTy = Type::getInt8PtrTy(Ctx);
1069   // Allocate statically the array of pointers to value profile nodes for
1070   // the current function.
1071   Constant *ValuesPtrExpr = ConstantPointerNull::get(Int8PtrTy);
1072   uint64_t NS = 0;
1073   for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
1074     NS += PD.NumValueSites[Kind];
1075   if (NS > 0 && ValueProfileStaticAlloc &&
1076       !needsRuntimeRegistrationOfSectionRange(TT)) {
1077     ArrayType *ValuesTy = ArrayType::get(Type::getInt64Ty(Ctx), NS);
1078     auto *ValuesVar = new GlobalVariable(
1079         *M, ValuesTy, false, Linkage, Constant::getNullValue(ValuesTy),
1080         getVarName(Inc, getInstrProfValuesVarPrefix(), Renamed));
1081     ValuesVar->setVisibility(Visibility);
1082     ValuesVar->setSection(
1083         getInstrProfSectionName(IPSK_vals, TT.getObjectFormat()));
1084     ValuesVar->setAlignment(Align(8));
1085     MaybeSetComdat(ValuesVar);
1086     ValuesPtrExpr =
1087         ConstantExpr::getBitCast(ValuesVar, Type::getInt8PtrTy(Ctx));
1088   }
1089 
1090   if (DebugInfoCorrelate) {
1091     // Mark the counter variable as used so that it isn't optimized out.
1092     CompilerUsedVars.push_back(PD.RegionCounters);
1093     return PD.RegionCounters;
1094   }
1095 
1096   // Create data variable.
1097   auto *IntPtrTy = M->getDataLayout().getIntPtrType(M->getContext());
1098   auto *Int16Ty = Type::getInt16Ty(Ctx);
1099   auto *Int16ArrayTy = ArrayType::get(Int16Ty, IPVK_Last + 1);
1100   Type *DataTypes[] = {
1101 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) LLVMType,
1102 #include "llvm/ProfileData/InstrProfData.inc"
1103   };
1104   auto *DataTy = StructType::get(Ctx, ArrayRef(DataTypes));
1105 
1106   Constant *FunctionAddr = getFuncAddrForProfData(Fn);
1107 
1108   Constant *Int16ArrayVals[IPVK_Last + 1];
1109   for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
1110     Int16ArrayVals[Kind] = ConstantInt::get(Int16Ty, PD.NumValueSites[Kind]);
1111 
1112   // If the data variable is not referenced by code (if we don't emit
1113   // @llvm.instrprof.value.profile, NS will be 0), and the counter keeps the
1114   // data variable live under linker GC, the data variable can be private. This
1115   // optimization applies to ELF.
1116   //
1117   // On COFF, a comdat leader cannot be local so we require DataReferencedByCode
1118   // to be false.
1119   //
1120   // If profd is in a deduplicate comdat, NS==0 with a hash suffix guarantees
1121   // that other copies must have the same CFG and cannot have value profiling.
1122   // If no hash suffix, other profd copies may be referenced by code.
1123   if (NS == 0 && !(DataReferencedByCode && NeedComdat && !Renamed) &&
1124       (TT.isOSBinFormatELF() ||
1125        (!DataReferencedByCode && TT.isOSBinFormatCOFF()))) {
1126     Linkage = GlobalValue::PrivateLinkage;
1127     Visibility = GlobalValue::DefaultVisibility;
1128   }
1129   auto *Data =
1130       new GlobalVariable(*M, DataTy, false, Linkage, nullptr, DataVarName);
1131   // Reference the counter variable with a label difference (link-time
1132   // constant).
1133   auto *RelativeCounterPtr =
1134       ConstantExpr::getSub(ConstantExpr::getPtrToInt(CounterPtr, IntPtrTy),
1135                            ConstantExpr::getPtrToInt(Data, IntPtrTy));
1136 
1137   Constant *DataVals[] = {
1138 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) Init,
1139 #include "llvm/ProfileData/InstrProfData.inc"
1140   };
1141   Data->setInitializer(ConstantStruct::get(DataTy, DataVals));
1142 
1143   Data->setVisibility(Visibility);
1144   Data->setSection(getInstrProfSectionName(IPSK_data, TT.getObjectFormat()));
1145   Data->setAlignment(Align(INSTR_PROF_DATA_ALIGNMENT));
1146   MaybeSetComdat(Data);
1147 
1148   PD.DataVar = Data;
1149 
1150   // Mark the data variable as used so that it isn't stripped out.
1151   CompilerUsedVars.push_back(Data);
1152   // Now that the linkage set by the FE has been passed to the data and counter
1153   // variables, reset Name variable's linkage and visibility to private so that
1154   // it can be removed later by the compiler.
1155   NamePtr->setLinkage(GlobalValue::PrivateLinkage);
1156   // Collect the referenced names to be used by emitNameData.
1157   ReferencedNames.push_back(NamePtr);
1158 
1159   return PD.RegionCounters;
1160 }
1161 
1162 void InstrProfiling::emitVNodes() {
1163   if (!ValueProfileStaticAlloc)
1164     return;
1165 
1166   // For now only support this on platforms that do
1167   // not require runtime registration to discover
1168   // named section start/end.
1169   if (needsRuntimeRegistrationOfSectionRange(TT))
1170     return;
1171 
1172   size_t TotalNS = 0;
1173   for (auto &PD : ProfileDataMap) {
1174     for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
1175       TotalNS += PD.second.NumValueSites[Kind];
1176   }
1177 
1178   if (!TotalNS)
1179     return;
1180 
1181   uint64_t NumCounters = TotalNS * NumCountersPerValueSite;
1182 // Heuristic for small programs with very few total value sites.
1183 // The default value of vp-counters-per-site is chosen based on
1184 // the observation that large apps usually have a low percentage
1185 // of value sites that actually have any profile data, and thus
1186 // the average number of counters per site is low. For small
1187 // apps with very few sites, this may not be true. Bump up the
1188 // number of counters in this case.
1189 #define INSTR_PROF_MIN_VAL_COUNTS 10
1190   if (NumCounters < INSTR_PROF_MIN_VAL_COUNTS)
1191     NumCounters = std::max(INSTR_PROF_MIN_VAL_COUNTS, (int)NumCounters * 2);
1192 
1193   auto &Ctx = M->getContext();
1194   Type *VNodeTypes[] = {
1195 #define INSTR_PROF_VALUE_NODE(Type, LLVMType, Name, Init) LLVMType,
1196 #include "llvm/ProfileData/InstrProfData.inc"
1197   };
1198   auto *VNodeTy = StructType::get(Ctx, ArrayRef(VNodeTypes));
1199 
1200   ArrayType *VNodesTy = ArrayType::get(VNodeTy, NumCounters);
1201   auto *VNodesVar = new GlobalVariable(
1202       *M, VNodesTy, false, GlobalValue::PrivateLinkage,
1203       Constant::getNullValue(VNodesTy), getInstrProfVNodesVarName());
1204   VNodesVar->setSection(
1205       getInstrProfSectionName(IPSK_vnodes, TT.getObjectFormat()));
1206   VNodesVar->setAlignment(M->getDataLayout().getABITypeAlign(VNodesTy));
1207   // VNodesVar is used by runtime but not referenced via relocation by other
1208   // sections. Conservatively make it linker retained.
1209   UsedVars.push_back(VNodesVar);
1210 }
1211 
1212 void InstrProfiling::emitNameData() {
1213   std::string UncompressedData;
1214 
1215   if (ReferencedNames.empty())
1216     return;
1217 
1218   std::string CompressedNameStr;
1219   if (Error E = collectPGOFuncNameStrings(ReferencedNames, CompressedNameStr,
1220                                           DoInstrProfNameCompression)) {
1221     report_fatal_error(Twine(toString(std::move(E))), false);
1222   }
1223 
1224   auto &Ctx = M->getContext();
1225   auto *NamesVal =
1226       ConstantDataArray::getString(Ctx, StringRef(CompressedNameStr), false);
1227   NamesVar = new GlobalVariable(*M, NamesVal->getType(), true,
1228                                 GlobalValue::PrivateLinkage, NamesVal,
1229                                 getInstrProfNamesVarName());
1230   NamesSize = CompressedNameStr.size();
1231   NamesVar->setSection(
1232       getInstrProfSectionName(IPSK_name, TT.getObjectFormat()));
1233   // On COFF, it's important to reduce the alignment down to 1 to prevent the
1234   // linker from inserting padding before the start of the names section or
1235   // between names entries.
1236   NamesVar->setAlignment(Align(1));
1237   // NamesVar is used by runtime but not referenced via relocation by other
1238   // sections. Conservatively make it linker retained.
1239   UsedVars.push_back(NamesVar);
1240 
1241   for (auto *NamePtr : ReferencedNames)
1242     NamePtr->eraseFromParent();
1243 }
1244 
1245 void InstrProfiling::emitRegistration() {
1246   if (!needsRuntimeRegistrationOfSectionRange(TT))
1247     return;
1248 
1249   // Construct the function.
1250   auto *VoidTy = Type::getVoidTy(M->getContext());
1251   auto *VoidPtrTy = Type::getInt8PtrTy(M->getContext());
1252   auto *Int64Ty = Type::getInt64Ty(M->getContext());
1253   auto *RegisterFTy = FunctionType::get(VoidTy, false);
1254   auto *RegisterF = Function::Create(RegisterFTy, GlobalValue::InternalLinkage,
1255                                      getInstrProfRegFuncsName(), M);
1256   RegisterF->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
1257   if (Options.NoRedZone)
1258     RegisterF->addFnAttr(Attribute::NoRedZone);
1259 
1260   auto *RuntimeRegisterTy = FunctionType::get(VoidTy, VoidPtrTy, false);
1261   auto *RuntimeRegisterF =
1262       Function::Create(RuntimeRegisterTy, GlobalVariable::ExternalLinkage,
1263                        getInstrProfRegFuncName(), M);
1264 
1265   IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", RegisterF));
1266   for (Value *Data : CompilerUsedVars)
1267     if (!isa<Function>(Data))
1268       IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy));
1269   for (Value *Data : UsedVars)
1270     if (Data != NamesVar && !isa<Function>(Data))
1271       IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy));
1272 
1273   if (NamesVar) {
1274     Type *ParamTypes[] = {VoidPtrTy, Int64Ty};
1275     auto *NamesRegisterTy =
1276         FunctionType::get(VoidTy, ArrayRef(ParamTypes), false);
1277     auto *NamesRegisterF =
1278         Function::Create(NamesRegisterTy, GlobalVariable::ExternalLinkage,
1279                          getInstrProfNamesRegFuncName(), M);
1280     IRB.CreateCall(NamesRegisterF, {IRB.CreateBitCast(NamesVar, VoidPtrTy),
1281                                     IRB.getInt64(NamesSize)});
1282   }
1283 
1284   IRB.CreateRetVoid();
1285 }
1286 
1287 bool InstrProfiling::emitRuntimeHook() {
1288   // We expect the linker to be invoked with -u<hook_var> flag for Linux
1289   // in which case there is no need to emit the external variable.
1290   if (TT.isOSLinux() || TT.isOSAIX())
1291     return false;
1292 
1293   // If the module's provided its own runtime, we don't need to do anything.
1294   if (M->getGlobalVariable(getInstrProfRuntimeHookVarName()))
1295     return false;
1296 
1297   // Declare an external variable that will pull in the runtime initialization.
1298   auto *Int32Ty = Type::getInt32Ty(M->getContext());
1299   auto *Var =
1300       new GlobalVariable(*M, Int32Ty, false, GlobalValue::ExternalLinkage,
1301                          nullptr, getInstrProfRuntimeHookVarName());
1302   Var->setVisibility(GlobalValue::HiddenVisibility);
1303 
1304   if (TT.isOSBinFormatELF() && !TT.isPS()) {
1305     // Mark the user variable as used so that it isn't stripped out.
1306     CompilerUsedVars.push_back(Var);
1307   } else {
1308     // Make a function that uses it.
1309     auto *User = Function::Create(FunctionType::get(Int32Ty, false),
1310                                   GlobalValue::LinkOnceODRLinkage,
1311                                   getInstrProfRuntimeHookVarUseFuncName(), M);
1312     User->addFnAttr(Attribute::NoInline);
1313     if (Options.NoRedZone)
1314       User->addFnAttr(Attribute::NoRedZone);
1315     User->setVisibility(GlobalValue::HiddenVisibility);
1316     if (TT.supportsCOMDAT())
1317       User->setComdat(M->getOrInsertComdat(User->getName()));
1318 
1319     IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", User));
1320     auto *Load = IRB.CreateLoad(Int32Ty, Var);
1321     IRB.CreateRet(Load);
1322 
1323     // Mark the function as used so that it isn't stripped out.
1324     CompilerUsedVars.push_back(User);
1325   }
1326   return true;
1327 }
1328 
1329 void InstrProfiling::emitUses() {
1330   // The metadata sections are parallel arrays. Optimizers (e.g.
1331   // GlobalOpt/ConstantMerge) may not discard associated sections as a unit, so
1332   // we conservatively retain all unconditionally in the compiler.
1333   //
1334   // On ELF and Mach-O, the linker can guarantee the associated sections will be
1335   // retained or discarded as a unit, so llvm.compiler.used is sufficient.
1336   // Similarly on COFF, if prof data is not referenced by code we use one comdat
1337   // and ensure this GC property as well. Otherwise, we have to conservatively
1338   // make all of the sections retained by the linker.
1339   if (TT.isOSBinFormatELF() || TT.isOSBinFormatMachO() ||
1340       (TT.isOSBinFormatCOFF() && !profDataReferencedByCode(*M)))
1341     appendToCompilerUsed(*M, CompilerUsedVars);
1342   else
1343     appendToUsed(*M, CompilerUsedVars);
1344 
1345   // We do not add proper references from used metadata sections to NamesVar and
1346   // VNodesVar, so we have to be conservative and place them in llvm.used
1347   // regardless of the target,
1348   appendToUsed(*M, UsedVars);
1349 }
1350 
1351 void InstrProfiling::emitInitialization() {
1352   // Create ProfileFileName variable. Don't don't this for the
1353   // context-sensitive instrumentation lowering: This lowering is after
1354   // LTO/ThinLTO linking. Pass PGOInstrumentationGenCreateVar should
1355   // have already create the variable before LTO/ThinLTO linking.
1356   if (!IsCS)
1357     createProfileFileNameVar(*M, Options.InstrProfileOutput);
1358   Function *RegisterF = M->getFunction(getInstrProfRegFuncsName());
1359   if (!RegisterF)
1360     return;
1361 
1362   // Create the initialization function.
1363   auto *VoidTy = Type::getVoidTy(M->getContext());
1364   auto *F = Function::Create(FunctionType::get(VoidTy, false),
1365                              GlobalValue::InternalLinkage,
1366                              getInstrProfInitFuncName(), M);
1367   F->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
1368   F->addFnAttr(Attribute::NoInline);
1369   if (Options.NoRedZone)
1370     F->addFnAttr(Attribute::NoRedZone);
1371 
1372   // Add the basic block and the necessary calls.
1373   IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", F));
1374   IRB.CreateCall(RegisterF, {});
1375   IRB.CreateRetVoid();
1376 
1377   appendToGlobalCtors(*M, F, 0);
1378 }
1379