1 //===- GlobalsModRef.cpp - Simple Mod/Ref Analysis for Globals ------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This simple pass provides alias and mod/ref information for global values
10 // that do not have their address taken, and keeps track of whether functions
11 // read or write memory (are "pure").  For this simple (but very common) case,
12 // we can provide pretty accurate and useful information.
13 //
14 //===----------------------------------------------------------------------===//
15 
16 #include "llvm/Analysis/GlobalsModRef.h"
17 #include "llvm/ADT/SCCIterator.h"
18 #include "llvm/ADT/SmallPtrSet.h"
19 #include "llvm/ADT/Statistic.h"
20 #include "llvm/Analysis/CallGraph.h"
21 #include "llvm/Analysis/MemoryBuiltins.h"
22 #include "llvm/Analysis/TargetLibraryInfo.h"
23 #include "llvm/Analysis/ValueTracking.h"
24 #include "llvm/IR/InstIterator.h"
25 #include "llvm/IR/Instructions.h"
26 #include "llvm/IR/IntrinsicInst.h"
27 #include "llvm/IR/Module.h"
28 #include "llvm/IR/PassManager.h"
29 #include "llvm/InitializePasses.h"
30 #include "llvm/Pass.h"
31 #include "llvm/Support/CommandLine.h"
32 
33 using namespace llvm;
34 
35 #define DEBUG_TYPE "globalsmodref-aa"
36 
37 STATISTIC(NumNonAddrTakenGlobalVars,
38           "Number of global vars without address taken");
39 STATISTIC(NumNonAddrTakenFunctions,"Number of functions without address taken");
40 STATISTIC(NumNoMemFunctions, "Number of functions that do not access memory");
41 STATISTIC(NumReadMemFunctions, "Number of functions that only read memory");
42 STATISTIC(NumIndirectGlobalVars, "Number of indirect global objects");
43 
44 // An option to enable unsafe alias results from the GlobalsModRef analysis.
45 // When enabled, GlobalsModRef will provide no-alias results which in extremely
46 // rare cases may not be conservatively correct. In particular, in the face of
47 // transforms which cause asymmetry between how effective getUnderlyingObject
48 // is for two pointers, it may produce incorrect results.
49 //
50 // These unsafe results have been returned by GMR for many years without
51 // causing significant issues in the wild and so we provide a mechanism to
52 // re-enable them for users of LLVM that have a particular performance
53 // sensitivity and no known issues. The option also makes it easy to evaluate
54 // the performance impact of these results.
55 static cl::opt<bool> EnableUnsafeGlobalsModRefAliasResults(
56     "enable-unsafe-globalsmodref-alias-results", cl::init(false), cl::Hidden);
57 
58 /// The mod/ref information collected for a particular function.
59 ///
60 /// We collect information about mod/ref behavior of a function here, both in
61 /// general and as pertains to specific globals. We only have this detailed
62 /// information when we know *something* useful about the behavior. If we
63 /// saturate to fully general mod/ref, we remove the info for the function.
64 class GlobalsAAResult::FunctionInfo {
65   typedef SmallDenseMap<const GlobalValue *, ModRefInfo, 16> GlobalInfoMapType;
66 
67   /// Build a wrapper struct that has 8-byte alignment. All heap allocations
68   /// should provide this much alignment at least, but this makes it clear we
69   /// specifically rely on this amount of alignment.
70   struct alignas(8) AlignedMap {
71     AlignedMap() = default;
72     AlignedMap(const AlignedMap &Arg) = default;
73     GlobalInfoMapType Map;
74   };
75 
76   /// Pointer traits for our aligned map.
77   struct AlignedMapPointerTraits {
78     static inline void *getAsVoidPointer(AlignedMap *P) { return P; }
79     static inline AlignedMap *getFromVoidPointer(void *P) {
80       return (AlignedMap *)P;
81     }
82     static constexpr int NumLowBitsAvailable = 3;
83     static_assert(alignof(AlignedMap) >= (1 << NumLowBitsAvailable),
84                   "AlignedMap insufficiently aligned to have enough low bits.");
85   };
86 
87   /// The bit that flags that this function may read any global. This is
88   /// chosen to mix together with ModRefInfo bits.
89   /// FIXME: This assumes ModRefInfo lattice will remain 4 bits!
90   /// It overlaps with ModRefInfo::Must bit!
91   /// FunctionInfo.getModRefInfo() masks out everything except ModRef so
92   /// this remains correct, but the Must info is lost.
93   enum { MayReadAnyGlobal = 4 };
94 
95   /// Checks to document the invariants of the bit packing here.
96   static_assert((MayReadAnyGlobal & static_cast<int>(ModRefInfo::MustModRef)) ==
97                     0,
98                 "ModRef and the MayReadAnyGlobal flag bits overlap.");
99   static_assert(((MayReadAnyGlobal |
100                   static_cast<int>(ModRefInfo::MustModRef)) >>
101                  AlignedMapPointerTraits::NumLowBitsAvailable) == 0,
102                 "Insufficient low bits to store our flag and ModRef info.");
103 
104 public:
105   FunctionInfo() = default;
106   ~FunctionInfo() {
107     delete Info.getPointer();
108   }
109   // Spell out the copy ond move constructors and assignment operators to get
110   // deep copy semantics and correct move semantics in the face of the
111   // pointer-int pair.
112   FunctionInfo(const FunctionInfo &Arg)
113       : Info(nullptr, Arg.Info.getInt()) {
114     if (const auto *ArgPtr = Arg.Info.getPointer())
115       Info.setPointer(new AlignedMap(*ArgPtr));
116   }
117   FunctionInfo(FunctionInfo &&Arg)
118       : Info(Arg.Info.getPointer(), Arg.Info.getInt()) {
119     Arg.Info.setPointerAndInt(nullptr, 0);
120   }
121   FunctionInfo &operator=(const FunctionInfo &RHS) {
122     delete Info.getPointer();
123     Info.setPointerAndInt(nullptr, RHS.Info.getInt());
124     if (const auto *RHSPtr = RHS.Info.getPointer())
125       Info.setPointer(new AlignedMap(*RHSPtr));
126     return *this;
127   }
128   FunctionInfo &operator=(FunctionInfo &&RHS) {
129     delete Info.getPointer();
130     Info.setPointerAndInt(RHS.Info.getPointer(), RHS.Info.getInt());
131     RHS.Info.setPointerAndInt(nullptr, 0);
132     return *this;
133   }
134 
135   /// This method clears MayReadAnyGlobal bit added by GlobalsAAResult to return
136   /// the corresponding ModRefInfo. It must align in functionality with
137   /// clearMust().
138   ModRefInfo globalClearMayReadAnyGlobal(int I) const {
139     return ModRefInfo((I & static_cast<int>(ModRefInfo::ModRef)) |
140                       static_cast<int>(ModRefInfo::NoModRef));
141   }
142 
143   /// Returns the \c ModRefInfo info for this function.
144   ModRefInfo getModRefInfo() const {
145     return globalClearMayReadAnyGlobal(Info.getInt());
146   }
147 
148   /// Adds new \c ModRefInfo for this function to its state.
149   void addModRefInfo(ModRefInfo NewMRI) {
150     Info.setInt(Info.getInt() | static_cast<int>(setMust(NewMRI)));
151   }
152 
153   /// Returns whether this function may read any global variable, and we don't
154   /// know which global.
155   bool mayReadAnyGlobal() const { return Info.getInt() & MayReadAnyGlobal; }
156 
157   /// Sets this function as potentially reading from any global.
158   void setMayReadAnyGlobal() { Info.setInt(Info.getInt() | MayReadAnyGlobal); }
159 
160   /// Returns the \c ModRefInfo info for this function w.r.t. a particular
161   /// global, which may be more precise than the general information above.
162   ModRefInfo getModRefInfoForGlobal(const GlobalValue &GV) const {
163     ModRefInfo GlobalMRI =
164         mayReadAnyGlobal() ? ModRefInfo::Ref : ModRefInfo::NoModRef;
165     if (AlignedMap *P = Info.getPointer()) {
166       auto I = P->Map.find(&GV);
167       if (I != P->Map.end())
168         GlobalMRI = unionModRef(GlobalMRI, I->second);
169     }
170     return GlobalMRI;
171   }
172 
173   /// Add mod/ref info from another function into ours, saturating towards
174   /// ModRef.
175   void addFunctionInfo(const FunctionInfo &FI) {
176     addModRefInfo(FI.getModRefInfo());
177 
178     if (FI.mayReadAnyGlobal())
179       setMayReadAnyGlobal();
180 
181     if (AlignedMap *P = FI.Info.getPointer())
182       for (const auto &G : P->Map)
183         addModRefInfoForGlobal(*G.first, G.second);
184   }
185 
186   void addModRefInfoForGlobal(const GlobalValue &GV, ModRefInfo NewMRI) {
187     AlignedMap *P = Info.getPointer();
188     if (!P) {
189       P = new AlignedMap();
190       Info.setPointer(P);
191     }
192     auto &GlobalMRI = P->Map[&GV];
193     GlobalMRI = unionModRef(GlobalMRI, NewMRI);
194   }
195 
196   /// Clear a global's ModRef info. Should be used when a global is being
197   /// deleted.
198   void eraseModRefInfoForGlobal(const GlobalValue &GV) {
199     if (AlignedMap *P = Info.getPointer())
200       P->Map.erase(&GV);
201   }
202 
203 private:
204   /// All of the information is encoded into a single pointer, with a three bit
205   /// integer in the low three bits. The high bit provides a flag for when this
206   /// function may read any global. The low two bits are the ModRefInfo. And
207   /// the pointer, when non-null, points to a map from GlobalValue to
208   /// ModRefInfo specific to that GlobalValue.
209   PointerIntPair<AlignedMap *, 3, unsigned, AlignedMapPointerTraits> Info;
210 };
211 
212 void GlobalsAAResult::DeletionCallbackHandle::deleted() {
213   Value *V = getValPtr();
214   if (auto *F = dyn_cast<Function>(V))
215     GAR->FunctionInfos.erase(F);
216 
217   if (GlobalValue *GV = dyn_cast<GlobalValue>(V)) {
218     if (GAR->NonAddressTakenGlobals.erase(GV)) {
219       // This global might be an indirect global.  If so, remove it and
220       // remove any AllocRelatedValues for it.
221       if (GAR->IndirectGlobals.erase(GV)) {
222         // Remove any entries in AllocsForIndirectGlobals for this global.
223         for (auto I = GAR->AllocsForIndirectGlobals.begin(),
224                   E = GAR->AllocsForIndirectGlobals.end();
225              I != E; ++I)
226           if (I->second == GV)
227             GAR->AllocsForIndirectGlobals.erase(I);
228       }
229 
230       // Scan the function info we have collected and remove this global
231       // from all of them.
232       for (auto &FIPair : GAR->FunctionInfos)
233         FIPair.second.eraseModRefInfoForGlobal(*GV);
234     }
235   }
236 
237   // If this is an allocation related to an indirect global, remove it.
238   GAR->AllocsForIndirectGlobals.erase(V);
239 
240   // And clear out the handle.
241   setValPtr(nullptr);
242   GAR->Handles.erase(I);
243   // This object is now destroyed!
244 }
245 
246 FunctionModRefBehavior GlobalsAAResult::getModRefBehavior(const Function *F) {
247   FunctionModRefBehavior Min = FMRB_UnknownModRefBehavior;
248 
249   if (FunctionInfo *FI = getFunctionInfo(F)) {
250     if (!isModOrRefSet(FI->getModRefInfo()))
251       Min = FMRB_DoesNotAccessMemory;
252     else if (!isModSet(FI->getModRefInfo()))
253       Min = FMRB_OnlyReadsMemory;
254   }
255 
256   return FunctionModRefBehavior(AAResultBase::getModRefBehavior(F) & Min);
257 }
258 
259 /// Returns the function info for the function, or null if we don't have
260 /// anything useful to say about it.
261 GlobalsAAResult::FunctionInfo *
262 GlobalsAAResult::getFunctionInfo(const Function *F) {
263   auto I = FunctionInfos.find(F);
264   if (I != FunctionInfos.end())
265     return &I->second;
266   return nullptr;
267 }
268 
269 /// AnalyzeGlobals - Scan through the users of all of the internal
270 /// GlobalValue's in the program.  If none of them have their "address taken"
271 /// (really, their address passed to something nontrivial), record this fact,
272 /// and record the functions that they are used directly in.
273 void GlobalsAAResult::AnalyzeGlobals(Module &M) {
274   SmallPtrSet<Function *, 32> TrackedFunctions;
275   for (Function &F : M)
276     if (F.hasLocalLinkage()) {
277       if (!AnalyzeUsesOfPointer(&F)) {
278         // Remember that we are tracking this global.
279         NonAddressTakenGlobals.insert(&F);
280         TrackedFunctions.insert(&F);
281         Handles.emplace_front(*this, &F);
282         Handles.front().I = Handles.begin();
283         ++NumNonAddrTakenFunctions;
284       } else
285         UnknownFunctionsWithLocalLinkage = true;
286     }
287 
288   SmallPtrSet<Function *, 16> Readers, Writers;
289   for (GlobalVariable &GV : M.globals())
290     if (GV.hasLocalLinkage()) {
291       if (!AnalyzeUsesOfPointer(&GV, &Readers,
292                                 GV.isConstant() ? nullptr : &Writers)) {
293         // Remember that we are tracking this global, and the mod/ref fns
294         NonAddressTakenGlobals.insert(&GV);
295         Handles.emplace_front(*this, &GV);
296         Handles.front().I = Handles.begin();
297 
298         for (Function *Reader : Readers) {
299           if (TrackedFunctions.insert(Reader).second) {
300             Handles.emplace_front(*this, Reader);
301             Handles.front().I = Handles.begin();
302           }
303           FunctionInfos[Reader].addModRefInfoForGlobal(GV, ModRefInfo::Ref);
304         }
305 
306         if (!GV.isConstant()) // No need to keep track of writers to constants
307           for (Function *Writer : Writers) {
308             if (TrackedFunctions.insert(Writer).second) {
309               Handles.emplace_front(*this, Writer);
310               Handles.front().I = Handles.begin();
311             }
312             FunctionInfos[Writer].addModRefInfoForGlobal(GV, ModRefInfo::Mod);
313           }
314         ++NumNonAddrTakenGlobalVars;
315 
316         // If this global holds a pointer type, see if it is an indirect global.
317         if (GV.getValueType()->isPointerTy() &&
318             AnalyzeIndirectGlobalMemory(&GV))
319           ++NumIndirectGlobalVars;
320       }
321       Readers.clear();
322       Writers.clear();
323     }
324 }
325 
326 /// AnalyzeUsesOfPointer - Look at all of the users of the specified pointer.
327 /// If this is used by anything complex (i.e., the address escapes), return
328 /// true.  Also, while we are at it, keep track of those functions that read and
329 /// write to the value.
330 ///
331 /// If OkayStoreDest is non-null, stores into this global are allowed.
332 bool GlobalsAAResult::AnalyzeUsesOfPointer(Value *V,
333                                            SmallPtrSetImpl<Function *> *Readers,
334                                            SmallPtrSetImpl<Function *> *Writers,
335                                            GlobalValue *OkayStoreDest) {
336   if (!V->getType()->isPointerTy())
337     return true;
338 
339   for (Use &U : V->uses()) {
340     User *I = U.getUser();
341     if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
342       if (Readers)
343         Readers->insert(LI->getParent()->getParent());
344     } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
345       if (V == SI->getOperand(1)) {
346         if (Writers)
347           Writers->insert(SI->getParent()->getParent());
348       } else if (SI->getOperand(1) != OkayStoreDest) {
349         return true; // Storing the pointer
350       }
351     } else if (Operator::getOpcode(I) == Instruction::GetElementPtr) {
352       if (AnalyzeUsesOfPointer(I, Readers, Writers))
353         return true;
354     } else if (Operator::getOpcode(I) == Instruction::BitCast ||
355                Operator::getOpcode(I) == Instruction::AddrSpaceCast) {
356       if (AnalyzeUsesOfPointer(I, Readers, Writers, OkayStoreDest))
357         return true;
358     } else if (auto *Call = dyn_cast<CallBase>(I)) {
359       // Make sure that this is just the function being called, not that it is
360       // passing into the function.
361       if (Call->isDataOperand(&U)) {
362         // Detect calls to free.
363         if (Call->isArgOperand(&U) &&
364             getFreedOperand(Call, &GetTLI(*Call->getFunction())) == U) {
365           if (Writers)
366             Writers->insert(Call->getParent()->getParent());
367         } else {
368           return true; // Argument of an unknown call.
369         }
370       }
371     } else if (ICmpInst *ICI = dyn_cast<ICmpInst>(I)) {
372       if (!isa<ConstantPointerNull>(ICI->getOperand(1)))
373         return true; // Allow comparison against null.
374     } else if (Constant *C = dyn_cast<Constant>(I)) {
375       // Ignore constants which don't have any live uses.
376       if (isa<GlobalValue>(C) || C->isConstantUsed())
377         return true;
378     } else {
379       return true;
380     }
381   }
382 
383   return false;
384 }
385 
386 /// AnalyzeIndirectGlobalMemory - We found an non-address-taken global variable
387 /// which holds a pointer type.  See if the global always points to non-aliased
388 /// heap memory: that is, all initializers of the globals store a value known
389 /// to be obtained via a noalias return function call which have no other use.
390 /// Further, all loads out of GV must directly use the memory, not store the
391 /// pointer somewhere.  If this is true, we consider the memory pointed to by
392 /// GV to be owned by GV and can disambiguate other pointers from it.
393 bool GlobalsAAResult::AnalyzeIndirectGlobalMemory(GlobalVariable *GV) {
394   // Keep track of values related to the allocation of the memory, f.e. the
395   // value produced by the noalias call and any casts.
396   std::vector<Value *> AllocRelatedValues;
397 
398   // If the initializer is a valid pointer, bail.
399   if (Constant *C = GV->getInitializer())
400     if (!C->isNullValue())
401       return false;
402 
403   // Walk the user list of the global.  If we find anything other than a direct
404   // load or store, bail out.
405   for (User *U : GV->users()) {
406     if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
407       // The pointer loaded from the global can only be used in simple ways:
408       // we allow addressing of it and loading storing to it.  We do *not* allow
409       // storing the loaded pointer somewhere else or passing to a function.
410       if (AnalyzeUsesOfPointer(LI))
411         return false; // Loaded pointer escapes.
412       // TODO: Could try some IP mod/ref of the loaded pointer.
413     } else if (StoreInst *SI = dyn_cast<StoreInst>(U)) {
414       // Storing the global itself.
415       if (SI->getOperand(0) == GV)
416         return false;
417 
418       // If storing the null pointer, ignore it.
419       if (isa<ConstantPointerNull>(SI->getOperand(0)))
420         continue;
421 
422       // Check the value being stored.
423       Value *Ptr = getUnderlyingObject(SI->getOperand(0));
424 
425       if (!isNoAliasCall(Ptr))
426         return false; // Too hard to analyze.
427 
428       // Analyze all uses of the allocation.  If any of them are used in a
429       // non-simple way (e.g. stored to another global) bail out.
430       if (AnalyzeUsesOfPointer(Ptr, /*Readers*/ nullptr, /*Writers*/ nullptr,
431                                GV))
432         return false; // Loaded pointer escapes.
433 
434       // Remember that this allocation is related to the indirect global.
435       AllocRelatedValues.push_back(Ptr);
436     } else {
437       // Something complex, bail out.
438       return false;
439     }
440   }
441 
442   // Okay, this is an indirect global.  Remember all of the allocations for
443   // this global in AllocsForIndirectGlobals.
444   while (!AllocRelatedValues.empty()) {
445     AllocsForIndirectGlobals[AllocRelatedValues.back()] = GV;
446     Handles.emplace_front(*this, AllocRelatedValues.back());
447     Handles.front().I = Handles.begin();
448     AllocRelatedValues.pop_back();
449   }
450   IndirectGlobals.insert(GV);
451   Handles.emplace_front(*this, GV);
452   Handles.front().I = Handles.begin();
453   return true;
454 }
455 
456 void GlobalsAAResult::CollectSCCMembership(CallGraph &CG) {
457   // We do a bottom-up SCC traversal of the call graph.  In other words, we
458   // visit all callees before callers (leaf-first).
459   unsigned SCCID = 0;
460   for (scc_iterator<CallGraph *> I = scc_begin(&CG); !I.isAtEnd(); ++I) {
461     const std::vector<CallGraphNode *> &SCC = *I;
462     assert(!SCC.empty() && "SCC with no functions?");
463 
464     for (auto *CGN : SCC)
465       if (Function *F = CGN->getFunction())
466         FunctionToSCCMap[F] = SCCID;
467     ++SCCID;
468   }
469 }
470 
471 /// AnalyzeCallGraph - At this point, we know the functions where globals are
472 /// immediately stored to and read from.  Propagate this information up the call
473 /// graph to all callers and compute the mod/ref info for all memory for each
474 /// function.
475 void GlobalsAAResult::AnalyzeCallGraph(CallGraph &CG, Module &M) {
476   // We do a bottom-up SCC traversal of the call graph.  In other words, we
477   // visit all callees before callers (leaf-first).
478   for (scc_iterator<CallGraph *> I = scc_begin(&CG); !I.isAtEnd(); ++I) {
479     const std::vector<CallGraphNode *> &SCC = *I;
480     assert(!SCC.empty() && "SCC with no functions?");
481 
482     Function *F = SCC[0]->getFunction();
483 
484     if (!F || !F->isDefinitionExact()) {
485       // Calls externally or not exact - can't say anything useful. Remove any
486       // existing function records (may have been created when scanning
487       // globals).
488       for (auto *Node : SCC)
489         FunctionInfos.erase(Node->getFunction());
490       continue;
491     }
492 
493     FunctionInfo &FI = FunctionInfos[F];
494     Handles.emplace_front(*this, F);
495     Handles.front().I = Handles.begin();
496     bool KnowNothing = false;
497 
498     // Intrinsics, like any other synchronizing function, can make effects
499     // of other threads visible. Without nosync we know nothing really.
500     // Similarly, if `nocallback` is missing the function, or intrinsic,
501     // can call into the module arbitrarily. If both are set the function
502     // has an effect but will not interact with accesses of internal
503     // globals inside the module. We are conservative here for optnone
504     // functions, might not be necessary.
505     auto MaySyncOrCallIntoModule = [](const Function &F) {
506       return !F.isDeclaration() || !F.hasNoSync() ||
507              !F.hasFnAttribute(Attribute::NoCallback);
508     };
509 
510     // Collect the mod/ref properties due to called functions.  We only compute
511     // one mod-ref set.
512     for (unsigned i = 0, e = SCC.size(); i != e && !KnowNothing; ++i) {
513       if (!F) {
514         KnowNothing = true;
515         break;
516       }
517 
518       if (F->isDeclaration() || F->hasOptNone()) {
519         // Try to get mod/ref behaviour from function attributes.
520         if (F->doesNotAccessMemory()) {
521           // Can't do better than that!
522         } else if (F->onlyReadsMemory()) {
523           FI.addModRefInfo(ModRefInfo::Ref);
524           if (!F->onlyAccessesArgMemory() && MaySyncOrCallIntoModule(*F))
525             // This function might call back into the module and read a global -
526             // consider every global as possibly being read by this function.
527             FI.setMayReadAnyGlobal();
528         } else {
529           FI.addModRefInfo(ModRefInfo::ModRef);
530           if (!F->onlyAccessesArgMemory())
531             FI.setMayReadAnyGlobal();
532           if (MaySyncOrCallIntoModule(*F)) {
533             KnowNothing = true;
534             break;
535           }
536         }
537         continue;
538       }
539 
540       for (CallGraphNode::iterator CI = SCC[i]->begin(), E = SCC[i]->end();
541            CI != E && !KnowNothing; ++CI)
542         if (Function *Callee = CI->second->getFunction()) {
543           if (FunctionInfo *CalleeFI = getFunctionInfo(Callee)) {
544             // Propagate function effect up.
545             FI.addFunctionInfo(*CalleeFI);
546           } else {
547             // Can't say anything about it.  However, if it is inside our SCC,
548             // then nothing needs to be done.
549             CallGraphNode *CalleeNode = CG[Callee];
550             if (!is_contained(SCC, CalleeNode))
551               KnowNothing = true;
552           }
553         } else {
554           KnowNothing = true;
555         }
556     }
557 
558     // If we can't say anything useful about this SCC, remove all SCC functions
559     // from the FunctionInfos map.
560     if (KnowNothing) {
561       for (auto *Node : SCC)
562         FunctionInfos.erase(Node->getFunction());
563       continue;
564     }
565 
566     // Scan the function bodies for explicit loads or stores.
567     for (auto *Node : SCC) {
568       if (isModAndRefSet(FI.getModRefInfo()))
569         break; // The mod/ref lattice saturates here.
570 
571       // Don't prove any properties based on the implementation of an optnone
572       // function. Function attributes were already used as a best approximation
573       // above.
574       if (Node->getFunction()->hasOptNone())
575         continue;
576 
577       for (Instruction &I : instructions(Node->getFunction())) {
578         if (isModAndRefSet(FI.getModRefInfo()))
579           break; // The mod/ref lattice saturates here.
580 
581         // We handle calls specially because the graph-relevant aspects are
582         // handled above.
583         if (auto *Call = dyn_cast<CallBase>(&I)) {
584           if (Function *Callee = Call->getCalledFunction()) {
585             // The callgraph doesn't include intrinsic calls.
586             if (Callee->isIntrinsic()) {
587               if (isa<DbgInfoIntrinsic>(Call))
588                 // Don't let dbg intrinsics affect alias info.
589                 continue;
590 
591               FunctionModRefBehavior Behaviour =
592                   AAResultBase::getModRefBehavior(Callee);
593               FI.addModRefInfo(createModRefInfo(Behaviour));
594             }
595           }
596           continue;
597         }
598 
599         // All non-call instructions we use the primary predicates for whether
600         // they read or write memory.
601         if (I.mayReadFromMemory())
602           FI.addModRefInfo(ModRefInfo::Ref);
603         if (I.mayWriteToMemory())
604           FI.addModRefInfo(ModRefInfo::Mod);
605       }
606     }
607 
608     if (!isModSet(FI.getModRefInfo()))
609       ++NumReadMemFunctions;
610     if (!isModOrRefSet(FI.getModRefInfo()))
611       ++NumNoMemFunctions;
612 
613     // Finally, now that we know the full effect on this SCC, clone the
614     // information to each function in the SCC.
615     // FI is a reference into FunctionInfos, so copy it now so that it doesn't
616     // get invalidated if DenseMap decides to re-hash.
617     FunctionInfo CachedFI = FI;
618     for (unsigned i = 1, e = SCC.size(); i != e; ++i)
619       FunctionInfos[SCC[i]->getFunction()] = CachedFI;
620   }
621 }
622 
623 // GV is a non-escaping global. V is a pointer address that has been loaded from.
624 // If we can prove that V must escape, we can conclude that a load from V cannot
625 // alias GV.
626 static bool isNonEscapingGlobalNoAliasWithLoad(const GlobalValue *GV,
627                                                const Value *V,
628                                                int &Depth,
629                                                const DataLayout &DL) {
630   SmallPtrSet<const Value *, 8> Visited;
631   SmallVector<const Value *, 8> Inputs;
632   Visited.insert(V);
633   Inputs.push_back(V);
634   do {
635     const Value *Input = Inputs.pop_back_val();
636 
637     if (isa<GlobalValue>(Input) || isa<Argument>(Input) || isa<CallInst>(Input) ||
638         isa<InvokeInst>(Input))
639       // Arguments to functions or returns from functions are inherently
640       // escaping, so we can immediately classify those as not aliasing any
641       // non-addr-taken globals.
642       //
643       // (Transitive) loads from a global are also safe - if this aliased
644       // another global, its address would escape, so no alias.
645       continue;
646 
647     // Recurse through a limited number of selects, loads and PHIs. This is an
648     // arbitrary depth of 4, lower numbers could be used to fix compile time
649     // issues if needed, but this is generally expected to be only be important
650     // for small depths.
651     if (++Depth > 4)
652       return false;
653 
654     if (auto *LI = dyn_cast<LoadInst>(Input)) {
655       Inputs.push_back(getUnderlyingObject(LI->getPointerOperand()));
656       continue;
657     }
658     if (auto *SI = dyn_cast<SelectInst>(Input)) {
659       const Value *LHS = getUnderlyingObject(SI->getTrueValue());
660       const Value *RHS = getUnderlyingObject(SI->getFalseValue());
661       if (Visited.insert(LHS).second)
662         Inputs.push_back(LHS);
663       if (Visited.insert(RHS).second)
664         Inputs.push_back(RHS);
665       continue;
666     }
667     if (auto *PN = dyn_cast<PHINode>(Input)) {
668       for (const Value *Op : PN->incoming_values()) {
669         Op = getUnderlyingObject(Op);
670         if (Visited.insert(Op).second)
671           Inputs.push_back(Op);
672       }
673       continue;
674     }
675 
676     return false;
677   } while (!Inputs.empty());
678 
679   // All inputs were known to be no-alias.
680   return true;
681 }
682 
683 // There are particular cases where we can conclude no-alias between
684 // a non-addr-taken global and some other underlying object. Specifically,
685 // a non-addr-taken global is known to not be escaped from any function. It is
686 // also incorrect for a transformation to introduce an escape of a global in
687 // a way that is observable when it was not there previously. One function
688 // being transformed to introduce an escape which could possibly be observed
689 // (via loading from a global or the return value for example) within another
690 // function is never safe. If the observation is made through non-atomic
691 // operations on different threads, it is a data-race and UB. If the
692 // observation is well defined, by being observed the transformation would have
693 // changed program behavior by introducing the observed escape, making it an
694 // invalid transform.
695 //
696 // This property does require that transformations which *temporarily* escape
697 // a global that was not previously escaped, prior to restoring it, cannot rely
698 // on the results of GMR::alias. This seems a reasonable restriction, although
699 // currently there is no way to enforce it. There is also no realistic
700 // optimization pass that would make this mistake. The closest example is
701 // a transformation pass which does reg2mem of SSA values but stores them into
702 // global variables temporarily before restoring the global variable's value.
703 // This could be useful to expose "benign" races for example. However, it seems
704 // reasonable to require that a pass which introduces escapes of global
705 // variables in this way to either not trust AA results while the escape is
706 // active, or to be forced to operate as a module pass that cannot co-exist
707 // with an alias analysis such as GMR.
708 bool GlobalsAAResult::isNonEscapingGlobalNoAlias(const GlobalValue *GV,
709                                                  const Value *V) {
710   // In order to know that the underlying object cannot alias the
711   // non-addr-taken global, we must know that it would have to be an escape.
712   // Thus if the underlying object is a function argument, a load from
713   // a global, or the return of a function, it cannot alias. We can also
714   // recurse through PHI nodes and select nodes provided all of their inputs
715   // resolve to one of these known-escaping roots.
716   SmallPtrSet<const Value *, 8> Visited;
717   SmallVector<const Value *, 8> Inputs;
718   Visited.insert(V);
719   Inputs.push_back(V);
720   int Depth = 0;
721   do {
722     const Value *Input = Inputs.pop_back_val();
723 
724     if (auto *InputGV = dyn_cast<GlobalValue>(Input)) {
725       // If one input is the very global we're querying against, then we can't
726       // conclude no-alias.
727       if (InputGV == GV)
728         return false;
729 
730       // Distinct GlobalVariables never alias, unless overriden or zero-sized.
731       // FIXME: The condition can be refined, but be conservative for now.
732       auto *GVar = dyn_cast<GlobalVariable>(GV);
733       auto *InputGVar = dyn_cast<GlobalVariable>(InputGV);
734       if (GVar && InputGVar &&
735           !GVar->isDeclaration() && !InputGVar->isDeclaration() &&
736           !GVar->isInterposable() && !InputGVar->isInterposable()) {
737         Type *GVType = GVar->getInitializer()->getType();
738         Type *InputGVType = InputGVar->getInitializer()->getType();
739         if (GVType->isSized() && InputGVType->isSized() &&
740             (DL.getTypeAllocSize(GVType) > 0) &&
741             (DL.getTypeAllocSize(InputGVType) > 0))
742           continue;
743       }
744 
745       // Conservatively return false, even though we could be smarter
746       // (e.g. look through GlobalAliases).
747       return false;
748     }
749 
750     if (isa<Argument>(Input) || isa<CallInst>(Input) ||
751         isa<InvokeInst>(Input)) {
752       // Arguments to functions or returns from functions are inherently
753       // escaping, so we can immediately classify those as not aliasing any
754       // non-addr-taken globals.
755       continue;
756     }
757 
758     // Recurse through a limited number of selects, loads and PHIs. This is an
759     // arbitrary depth of 4, lower numbers could be used to fix compile time
760     // issues if needed, but this is generally expected to be only be important
761     // for small depths.
762     if (++Depth > 4)
763       return false;
764 
765     if (auto *LI = dyn_cast<LoadInst>(Input)) {
766       // A pointer loaded from a global would have been captured, and we know
767       // that the global is non-escaping, so no alias.
768       const Value *Ptr = getUnderlyingObject(LI->getPointerOperand());
769       if (isNonEscapingGlobalNoAliasWithLoad(GV, Ptr, Depth, DL))
770         // The load does not alias with GV.
771         continue;
772       // Otherwise, a load could come from anywhere, so bail.
773       return false;
774     }
775     if (auto *SI = dyn_cast<SelectInst>(Input)) {
776       const Value *LHS = getUnderlyingObject(SI->getTrueValue());
777       const Value *RHS = getUnderlyingObject(SI->getFalseValue());
778       if (Visited.insert(LHS).second)
779         Inputs.push_back(LHS);
780       if (Visited.insert(RHS).second)
781         Inputs.push_back(RHS);
782       continue;
783     }
784     if (auto *PN = dyn_cast<PHINode>(Input)) {
785       for (const Value *Op : PN->incoming_values()) {
786         Op = getUnderlyingObject(Op);
787         if (Visited.insert(Op).second)
788           Inputs.push_back(Op);
789       }
790       continue;
791     }
792 
793     // FIXME: It would be good to handle other obvious no-alias cases here, but
794     // it isn't clear how to do so reasonably without building a small version
795     // of BasicAA into this code. We could recurse into AAResultBase::alias
796     // here but that seems likely to go poorly as we're inside the
797     // implementation of such a query. Until then, just conservatively return
798     // false.
799     return false;
800   } while (!Inputs.empty());
801 
802   // If all the inputs to V were definitively no-alias, then V is no-alias.
803   return true;
804 }
805 
806 bool GlobalsAAResult::invalidate(Module &, const PreservedAnalyses &PA,
807                                  ModuleAnalysisManager::Invalidator &) {
808   // Check whether the analysis has been explicitly invalidated. Otherwise, it's
809   // stateless and remains preserved.
810   auto PAC = PA.getChecker<GlobalsAA>();
811   return !PAC.preservedWhenStateless();
812 }
813 
814 /// alias - If one of the pointers is to a global that we are tracking, and the
815 /// other is some random pointer, we know there cannot be an alias, because the
816 /// address of the global isn't taken.
817 AliasResult GlobalsAAResult::alias(const MemoryLocation &LocA,
818                                    const MemoryLocation &LocB,
819                                    AAQueryInfo &AAQI) {
820   // Get the base object these pointers point to.
821   const Value *UV1 =
822       getUnderlyingObject(LocA.Ptr->stripPointerCastsForAliasAnalysis());
823   const Value *UV2 =
824       getUnderlyingObject(LocB.Ptr->stripPointerCastsForAliasAnalysis());
825 
826   // If either of the underlying values is a global, they may be non-addr-taken
827   // globals, which we can answer queries about.
828   const GlobalValue *GV1 = dyn_cast<GlobalValue>(UV1);
829   const GlobalValue *GV2 = dyn_cast<GlobalValue>(UV2);
830   if (GV1 || GV2) {
831     // If the global's address is taken, pretend we don't know it's a pointer to
832     // the global.
833     if (GV1 && !NonAddressTakenGlobals.count(GV1))
834       GV1 = nullptr;
835     if (GV2 && !NonAddressTakenGlobals.count(GV2))
836       GV2 = nullptr;
837 
838     // If the two pointers are derived from two different non-addr-taken
839     // globals we know these can't alias.
840     if (GV1 && GV2 && GV1 != GV2)
841       return AliasResult::NoAlias;
842 
843     // If one is and the other isn't, it isn't strictly safe but we can fake
844     // this result if necessary for performance. This does not appear to be
845     // a common problem in practice.
846     if (EnableUnsafeGlobalsModRefAliasResults)
847       if ((GV1 || GV2) && GV1 != GV2)
848         return AliasResult::NoAlias;
849 
850     // Check for a special case where a non-escaping global can be used to
851     // conclude no-alias.
852     if ((GV1 || GV2) && GV1 != GV2) {
853       const GlobalValue *GV = GV1 ? GV1 : GV2;
854       const Value *UV = GV1 ? UV2 : UV1;
855       if (isNonEscapingGlobalNoAlias(GV, UV))
856         return AliasResult::NoAlias;
857     }
858 
859     // Otherwise if they are both derived from the same addr-taken global, we
860     // can't know the two accesses don't overlap.
861   }
862 
863   // These pointers may be based on the memory owned by an indirect global.  If
864   // so, we may be able to handle this.  First check to see if the base pointer
865   // is a direct load from an indirect global.
866   GV1 = GV2 = nullptr;
867   if (const LoadInst *LI = dyn_cast<LoadInst>(UV1))
868     if (GlobalVariable *GV = dyn_cast<GlobalVariable>(LI->getOperand(0)))
869       if (IndirectGlobals.count(GV))
870         GV1 = GV;
871   if (const LoadInst *LI = dyn_cast<LoadInst>(UV2))
872     if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(LI->getOperand(0)))
873       if (IndirectGlobals.count(GV))
874         GV2 = GV;
875 
876   // These pointers may also be from an allocation for the indirect global.  If
877   // so, also handle them.
878   if (!GV1)
879     GV1 = AllocsForIndirectGlobals.lookup(UV1);
880   if (!GV2)
881     GV2 = AllocsForIndirectGlobals.lookup(UV2);
882 
883   // Now that we know whether the two pointers are related to indirect globals,
884   // use this to disambiguate the pointers. If the pointers are based on
885   // different indirect globals they cannot alias.
886   if (GV1 && GV2 && GV1 != GV2)
887     return AliasResult::NoAlias;
888 
889   // If one is based on an indirect global and the other isn't, it isn't
890   // strictly safe but we can fake this result if necessary for performance.
891   // This does not appear to be a common problem in practice.
892   if (EnableUnsafeGlobalsModRefAliasResults)
893     if ((GV1 || GV2) && GV1 != GV2)
894       return AliasResult::NoAlias;
895 
896   return AAResultBase::alias(LocA, LocB, AAQI);
897 }
898 
899 ModRefInfo GlobalsAAResult::getModRefInfoForArgument(const CallBase *Call,
900                                                      const GlobalValue *GV,
901                                                      AAQueryInfo &AAQI) {
902   if (Call->doesNotAccessMemory())
903     return ModRefInfo::NoModRef;
904   ModRefInfo ConservativeResult =
905       Call->onlyReadsMemory() ? ModRefInfo::Ref : ModRefInfo::ModRef;
906 
907   // Iterate through all the arguments to the called function. If any argument
908   // is based on GV, return the conservative result.
909   for (const auto &A : Call->args()) {
910     SmallVector<const Value*, 4> Objects;
911     getUnderlyingObjects(A, Objects);
912 
913     // All objects must be identified.
914     if (!all_of(Objects, isIdentifiedObject) &&
915         // Try ::alias to see if all objects are known not to alias GV.
916         !all_of(Objects, [&](const Value *V) {
917           return this->alias(MemoryLocation::getBeforeOrAfter(V),
918                              MemoryLocation::getBeforeOrAfter(GV),
919                              AAQI) == AliasResult::NoAlias;
920         }))
921       return ConservativeResult;
922 
923     if (is_contained(Objects, GV))
924       return ConservativeResult;
925   }
926 
927   // We identified all objects in the argument list, and none of them were GV.
928   return ModRefInfo::NoModRef;
929 }
930 
931 ModRefInfo GlobalsAAResult::getModRefInfo(const CallBase *Call,
932                                           const MemoryLocation &Loc,
933                                           AAQueryInfo &AAQI) {
934   ModRefInfo Known = ModRefInfo::ModRef;
935 
936   // If we are asking for mod/ref info of a direct call with a pointer to a
937   // global we are tracking, return information if we have it.
938   if (const GlobalValue *GV =
939           dyn_cast<GlobalValue>(getUnderlyingObject(Loc.Ptr)))
940     // If GV is internal to this IR and there is no function with local linkage
941     // that has had their address taken, keep looking for a tighter ModRefInfo.
942     if (GV->hasLocalLinkage() && !UnknownFunctionsWithLocalLinkage)
943       if (const Function *F = Call->getCalledFunction())
944         if (NonAddressTakenGlobals.count(GV))
945           if (const FunctionInfo *FI = getFunctionInfo(F))
946             Known = unionModRef(FI->getModRefInfoForGlobal(*GV),
947                                 getModRefInfoForArgument(Call, GV, AAQI));
948 
949   if (!isModOrRefSet(Known))
950     return ModRefInfo::NoModRef; // No need to query other mod/ref analyses
951   return intersectModRef(Known, AAResultBase::getModRefInfo(Call, Loc, AAQI));
952 }
953 
954 GlobalsAAResult::GlobalsAAResult(
955     const DataLayout &DL,
956     std::function<const TargetLibraryInfo &(Function &F)> GetTLI)
957     : DL(DL), GetTLI(std::move(GetTLI)) {}
958 
959 GlobalsAAResult::GlobalsAAResult(GlobalsAAResult &&Arg)
960     : AAResultBase(std::move(Arg)), DL(Arg.DL), GetTLI(std::move(Arg.GetTLI)),
961       NonAddressTakenGlobals(std::move(Arg.NonAddressTakenGlobals)),
962       IndirectGlobals(std::move(Arg.IndirectGlobals)),
963       AllocsForIndirectGlobals(std::move(Arg.AllocsForIndirectGlobals)),
964       FunctionInfos(std::move(Arg.FunctionInfos)),
965       Handles(std::move(Arg.Handles)) {
966   // Update the parent for each DeletionCallbackHandle.
967   for (auto &H : Handles) {
968     assert(H.GAR == &Arg);
969     H.GAR = this;
970   }
971 }
972 
973 GlobalsAAResult::~GlobalsAAResult() = default;
974 
975 /*static*/ GlobalsAAResult GlobalsAAResult::analyzeModule(
976     Module &M, std::function<const TargetLibraryInfo &(Function &F)> GetTLI,
977     CallGraph &CG) {
978   GlobalsAAResult Result(M.getDataLayout(), GetTLI);
979 
980   // Discover which functions aren't recursive, to feed into AnalyzeGlobals.
981   Result.CollectSCCMembership(CG);
982 
983   // Find non-addr taken globals.
984   Result.AnalyzeGlobals(M);
985 
986   // Propagate on CG.
987   Result.AnalyzeCallGraph(CG, M);
988 
989   return Result;
990 }
991 
992 AnalysisKey GlobalsAA::Key;
993 
994 GlobalsAAResult GlobalsAA::run(Module &M, ModuleAnalysisManager &AM) {
995   FunctionAnalysisManager &FAM =
996       AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
997   auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
998     return FAM.getResult<TargetLibraryAnalysis>(F);
999   };
1000   return GlobalsAAResult::analyzeModule(M, GetTLI,
1001                                         AM.getResult<CallGraphAnalysis>(M));
1002 }
1003 
1004 PreservedAnalyses RecomputeGlobalsAAPass::run(Module &M,
1005                                               ModuleAnalysisManager &AM) {
1006   if (auto *G = AM.getCachedResult<GlobalsAA>(M)) {
1007     auto &CG = AM.getResult<CallGraphAnalysis>(M);
1008     G->NonAddressTakenGlobals.clear();
1009     G->UnknownFunctionsWithLocalLinkage = false;
1010     G->IndirectGlobals.clear();
1011     G->AllocsForIndirectGlobals.clear();
1012     G->FunctionInfos.clear();
1013     G->FunctionToSCCMap.clear();
1014     G->Handles.clear();
1015     G->CollectSCCMembership(CG);
1016     G->AnalyzeGlobals(M);
1017     G->AnalyzeCallGraph(CG, M);
1018   }
1019   return PreservedAnalyses::all();
1020 }
1021 
1022 char GlobalsAAWrapperPass::ID = 0;
1023 INITIALIZE_PASS_BEGIN(GlobalsAAWrapperPass, "globals-aa",
1024                       "Globals Alias Analysis", false, true)
1025 INITIALIZE_PASS_DEPENDENCY(CallGraphWrapperPass)
1026 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
1027 INITIALIZE_PASS_END(GlobalsAAWrapperPass, "globals-aa",
1028                     "Globals Alias Analysis", false, true)
1029 
1030 ModulePass *llvm::createGlobalsAAWrapperPass() {
1031   return new GlobalsAAWrapperPass();
1032 }
1033 
1034 GlobalsAAWrapperPass::GlobalsAAWrapperPass() : ModulePass(ID) {
1035   initializeGlobalsAAWrapperPassPass(*PassRegistry::getPassRegistry());
1036 }
1037 
1038 bool GlobalsAAWrapperPass::runOnModule(Module &M) {
1039   auto GetTLI = [this](Function &F) -> TargetLibraryInfo & {
1040     return this->getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F);
1041   };
1042   Result.reset(new GlobalsAAResult(GlobalsAAResult::analyzeModule(
1043       M, GetTLI, getAnalysis<CallGraphWrapperPass>().getCallGraph())));
1044   return false;
1045 }
1046 
1047 bool GlobalsAAWrapperPass::doFinalization(Module &M) {
1048   Result.reset();
1049   return false;
1050 }
1051 
1052 void GlobalsAAWrapperPass::getAnalysisUsage(AnalysisUsage &AU) const {
1053   AU.setPreservesAll();
1054   AU.addRequired<CallGraphWrapperPass>();
1055   AU.addRequired<TargetLibraryInfoWrapperPass>();
1056 }
1057