1 //===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains routines that help determine which pointers are captured.
10 // A pointer value is captured if the function makes a copy of any part of the
11 // pointer that outlives the call.  Not being captured means, more or less, that
12 // the pointer is only dereferenced and not stored in a global.  Returning part
13 // of the pointer as the function return value may or may not count as capturing
14 // the pointer, depending on the context.
15 //
16 //===----------------------------------------------------------------------===//
17 
18 #include "llvm/Analysis/CaptureTracking.h"
19 #include "llvm/ADT/SmallPtrSet.h"
20 #include "llvm/ADT/SmallSet.h"
21 #include "llvm/ADT/SmallVector.h"
22 #include "llvm/ADT/Statistic.h"
23 #include "llvm/Analysis/AliasAnalysis.h"
24 #include "llvm/Analysis/CFG.h"
25 #include "llvm/Analysis/ValueTracking.h"
26 #include "llvm/IR/Constants.h"
27 #include "llvm/IR/Dominators.h"
28 #include "llvm/IR/Instructions.h"
29 #include "llvm/IR/IntrinsicInst.h"
30 #include "llvm/Support/CommandLine.h"
31 
32 using namespace llvm;
33 
34 #define DEBUG_TYPE "capture-tracking"
35 
36 STATISTIC(NumCaptured,          "Number of pointers maybe captured");
37 STATISTIC(NumNotCaptured,       "Number of pointers not captured");
38 STATISTIC(NumCapturedBefore,    "Number of pointers maybe captured before");
39 STATISTIC(NumNotCapturedBefore, "Number of pointers not captured before");
40 
41 /// The default value for MaxUsesToExplore argument. It's relatively small to
42 /// keep the cost of analysis reasonable for clients like BasicAliasAnalysis,
43 /// where the results can't be cached.
44 /// TODO: we should probably introduce a caching CaptureTracking analysis and
45 /// use it where possible. The caching version can use much higher limit or
46 /// don't have this cap at all.
47 static cl::opt<unsigned>
48     DefaultMaxUsesToExplore("capture-tracking-max-uses-to-explore", cl::Hidden,
49                             cl::desc("Maximal number of uses to explore."),
50                             cl::init(100));
51 
52 unsigned llvm::getDefaultMaxUsesToExploreForCaptureTracking() {
53   return DefaultMaxUsesToExplore;
54 }
55 
56 CaptureTracker::~CaptureTracker() = default;
57 
58 bool CaptureTracker::shouldExplore(const Use *U) { return true; }
59 
60 bool CaptureTracker::isDereferenceableOrNull(Value *O, const DataLayout &DL) {
61   // An inbounds GEP can either be a valid pointer (pointing into
62   // or to the end of an allocation), or be null in the default
63   // address space. So for an inbounds GEP there is no way to let
64   // the pointer escape using clever GEP hacking because doing so
65   // would make the pointer point outside of the allocated object
66   // and thus make the GEP result a poison value. Similarly, other
67   // dereferenceable pointers cannot be manipulated without producing
68   // poison.
69   if (auto *GEP = dyn_cast<GetElementPtrInst>(O))
70     if (GEP->isInBounds())
71       return true;
72   bool CanBeNull, CanBeFreed;
73   return O->getPointerDereferenceableBytes(DL, CanBeNull, CanBeFreed);
74 }
75 
76 namespace {
77   struct SimpleCaptureTracker : public CaptureTracker {
78     explicit SimpleCaptureTracker(
79 
80         const SmallPtrSetImpl<const Value *> &EphValues, bool ReturnCaptures)
81         : EphValues(EphValues), ReturnCaptures(ReturnCaptures) {}
82 
83     void tooManyUses() override { Captured = true; }
84 
85     bool captured(const Use *U) override {
86       if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
87         return false;
88 
89       if (EphValues.contains(U->getUser()))
90         return false;
91 
92       Captured = true;
93       return true;
94     }
95 
96     const SmallPtrSetImpl<const Value *> &EphValues;
97 
98     bool ReturnCaptures;
99 
100     bool Captured = false;
101   };
102 
103   /// Only find pointer captures which happen before the given instruction. Uses
104   /// the dominator tree to determine whether one instruction is before another.
105   /// Only support the case where the Value is defined in the same basic block
106   /// as the given instruction and the use.
107   struct CapturesBefore : public CaptureTracker {
108 
109     CapturesBefore(bool ReturnCaptures, const Instruction *I,
110                    const DominatorTree *DT, bool IncludeI, const LoopInfo *LI)
111         : BeforeHere(I), DT(DT), ReturnCaptures(ReturnCaptures),
112           IncludeI(IncludeI), LI(LI) {}
113 
114     void tooManyUses() override { Captured = true; }
115 
116     bool isSafeToPrune(Instruction *I) {
117       if (BeforeHere == I)
118         return !IncludeI;
119 
120       // We explore this usage only if the usage can reach "BeforeHere".
121       // If use is not reachable from entry, there is no need to explore.
122       if (!DT->isReachableFromEntry(I->getParent()))
123         return true;
124 
125       // Check whether there is a path from I to BeforeHere.
126       return !isPotentiallyReachable(I, BeforeHere, nullptr, DT, LI);
127     }
128 
129     bool captured(const Use *U) override {
130       Instruction *I = cast<Instruction>(U->getUser());
131       if (isa<ReturnInst>(I) && !ReturnCaptures)
132         return false;
133 
134       // Check isSafeToPrune() here rather than in shouldExplore() to avoid
135       // an expensive reachability query for every instruction we look at.
136       // Instead we only do one for actual capturing candidates.
137       if (isSafeToPrune(I))
138         return false;
139 
140       Captured = true;
141       return true;
142     }
143 
144     const Instruction *BeforeHere;
145     const DominatorTree *DT;
146 
147     bool ReturnCaptures;
148     bool IncludeI;
149 
150     bool Captured = false;
151 
152     const LoopInfo *LI;
153   };
154 
155   /// Find the 'earliest' instruction before which the pointer is known not to
156   /// be captured. Here an instruction A is considered earlier than instruction
157   /// B, if A dominates B. If 2 escapes do not dominate each other, the
158   /// terminator of the common dominator is chosen. If not all uses cannot be
159   /// analyzed, the earliest escape is set to the first instruction in the
160   /// function entry block.
161   // NOTE: Users have to make sure instructions compared against the earliest
162   // escape are not in a cycle.
163   struct EarliestCaptures : public CaptureTracker {
164 
165     EarliestCaptures(bool ReturnCaptures, Function &F, const DominatorTree &DT,
166                      const SmallPtrSetImpl<const Value *> &EphValues)
167         : EphValues(EphValues), DT(DT), ReturnCaptures(ReturnCaptures), F(F) {}
168 
169     void tooManyUses() override {
170       Captured = true;
171       EarliestCapture = &*F.getEntryBlock().begin();
172     }
173 
174     bool captured(const Use *U) override {
175       Instruction *I = cast<Instruction>(U->getUser());
176       if (isa<ReturnInst>(I) && !ReturnCaptures)
177         return false;
178 
179       if (EphValues.contains(I))
180         return false;
181 
182       if (!EarliestCapture) {
183         EarliestCapture = I;
184       } else if (EarliestCapture->getParent() == I->getParent()) {
185         if (I->comesBefore(EarliestCapture))
186           EarliestCapture = I;
187       } else {
188         BasicBlock *CurrentBB = I->getParent();
189         BasicBlock *EarliestBB = EarliestCapture->getParent();
190         if (DT.dominates(EarliestBB, CurrentBB)) {
191           // EarliestCapture already comes before the current use.
192         } else if (DT.dominates(CurrentBB, EarliestBB)) {
193           EarliestCapture = I;
194         } else {
195           // Otherwise find the nearest common dominator and use its terminator.
196           auto *NearestCommonDom =
197               DT.findNearestCommonDominator(CurrentBB, EarliestBB);
198           EarliestCapture = NearestCommonDom->getTerminator();
199         }
200       }
201       Captured = true;
202 
203       // Return false to continue analysis; we need to see all potential
204       // captures.
205       return false;
206     }
207 
208     const SmallPtrSetImpl<const Value *> &EphValues;
209 
210     Instruction *EarliestCapture = nullptr;
211 
212     const DominatorTree &DT;
213 
214     bool ReturnCaptures;
215 
216     bool Captured = false;
217 
218     Function &F;
219   };
220 }
221 
222 /// PointerMayBeCaptured - Return true if this pointer value may be captured
223 /// by the enclosing function (which is required to exist).  This routine can
224 /// be expensive, so consider caching the results.  The boolean ReturnCaptures
225 /// specifies whether returning the value (or part of it) from the function
226 /// counts as capturing it or not.  The boolean StoreCaptures specified whether
227 /// storing the value (or part of it) into memory anywhere automatically
228 /// counts as capturing it or not.
229 bool llvm::PointerMayBeCaptured(const Value *V, bool ReturnCaptures,
230                                 bool StoreCaptures, unsigned MaxUsesToExplore) {
231   SmallPtrSet<const Value *, 1> Empty;
232   return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures, Empty,
233                               MaxUsesToExplore);
234 }
235 
236 /// Variant of the above function which accepts a set of Values that are
237 /// ephemeral and cannot cause pointers to escape.
238 bool llvm::PointerMayBeCaptured(const Value *V, bool ReturnCaptures,
239                                 bool StoreCaptures,
240                                 const SmallPtrSetImpl<const Value *> &EphValues,
241                                 unsigned MaxUsesToExplore) {
242   assert(!isa<GlobalValue>(V) &&
243          "It doesn't make sense to ask whether a global is captured.");
244 
245   // TODO: If StoreCaptures is not true, we could do Fancy analysis
246   // to determine whether this store is not actually an escape point.
247   // In that case, BasicAliasAnalysis should be updated as well to
248   // take advantage of this.
249   (void)StoreCaptures;
250 
251   SimpleCaptureTracker SCT(EphValues, ReturnCaptures);
252   PointerMayBeCaptured(V, &SCT, MaxUsesToExplore);
253   if (SCT.Captured)
254     ++NumCaptured;
255   else
256     ++NumNotCaptured;
257   return SCT.Captured;
258 }
259 
260 /// PointerMayBeCapturedBefore - Return true if this pointer value may be
261 /// captured by the enclosing function (which is required to exist). If a
262 /// DominatorTree is provided, only captures which happen before the given
263 /// instruction are considered. This routine can be expensive, so consider
264 /// caching the results.  The boolean ReturnCaptures specifies whether
265 /// returning the value (or part of it) from the function counts as capturing
266 /// it or not.  The boolean StoreCaptures specified whether storing the value
267 /// (or part of it) into memory anywhere automatically counts as capturing it
268 /// or not.
269 bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures,
270                                       bool StoreCaptures, const Instruction *I,
271                                       const DominatorTree *DT, bool IncludeI,
272                                       unsigned MaxUsesToExplore,
273                                       const LoopInfo *LI) {
274   assert(!isa<GlobalValue>(V) &&
275          "It doesn't make sense to ask whether a global is captured.");
276 
277   if (!DT)
278     return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures,
279                                 MaxUsesToExplore);
280 
281   // TODO: See comment in PointerMayBeCaptured regarding what could be done
282   // with StoreCaptures.
283 
284   CapturesBefore CB(ReturnCaptures, I, DT, IncludeI, LI);
285   PointerMayBeCaptured(V, &CB, MaxUsesToExplore);
286   if (CB.Captured)
287     ++NumCapturedBefore;
288   else
289     ++NumNotCapturedBefore;
290   return CB.Captured;
291 }
292 
293 Instruction *
294 llvm::FindEarliestCapture(const Value *V, Function &F, bool ReturnCaptures,
295                           bool StoreCaptures, const DominatorTree &DT,
296 
297                           const SmallPtrSetImpl<const Value *> &EphValues,
298                           unsigned MaxUsesToExplore) {
299   assert(!isa<GlobalValue>(V) &&
300          "It doesn't make sense to ask whether a global is captured.");
301 
302   EarliestCaptures CB(ReturnCaptures, F, DT, EphValues);
303   PointerMayBeCaptured(V, &CB, MaxUsesToExplore);
304   if (CB.Captured)
305     ++NumCapturedBefore;
306   else
307     ++NumNotCapturedBefore;
308   return CB.EarliestCapture;
309 }
310 
311 UseCaptureKind llvm::DetermineUseCaptureKind(
312     const Use &U,
313     function_ref<bool(Value *, const DataLayout &)> IsDereferenceableOrNull) {
314   Instruction *I = cast<Instruction>(U.getUser());
315 
316   switch (I->getOpcode()) {
317   case Instruction::Call:
318   case Instruction::Invoke: {
319     auto *Call = cast<CallBase>(I);
320     // Not captured if the callee is readonly, doesn't return a copy through
321     // its return value and doesn't unwind (a readonly function can leak bits
322     // by throwing an exception or not depending on the input value).
323     if (Call->onlyReadsMemory() && Call->doesNotThrow() &&
324         Call->getType()->isVoidTy())
325       return UseCaptureKind::NO_CAPTURE;
326 
327     // The pointer is not captured if returned pointer is not captured.
328     // NOTE: CaptureTracking users should not assume that only functions
329     // marked with nocapture do not capture. This means that places like
330     // getUnderlyingObject in ValueTracking or DecomposeGEPExpression
331     // in BasicAA also need to know about this property.
332     if (isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(Call, true))
333       return UseCaptureKind::PASSTHROUGH;
334 
335     // Volatile operations effectively capture the memory location that they
336     // load and store to.
337     if (auto *MI = dyn_cast<MemIntrinsic>(Call))
338       if (MI->isVolatile())
339         return UseCaptureKind::MAY_CAPTURE;
340 
341     // Calling a function pointer does not in itself cause the pointer to
342     // be captured.  This is a subtle point considering that (for example)
343     // the callee might return its own address.  It is analogous to saying
344     // that loading a value from a pointer does not cause the pointer to be
345     // captured, even though the loaded value might be the pointer itself
346     // (think of self-referential objects).
347     if (Call->isCallee(&U))
348       return UseCaptureKind::NO_CAPTURE;
349 
350     // Not captured if only passed via 'nocapture' arguments.
351     if (Call->isDataOperand(&U) &&
352         !Call->doesNotCapture(Call->getDataOperandNo(&U))) {
353       // The parameter is not marked 'nocapture' - captured.
354       return UseCaptureKind::MAY_CAPTURE;
355     }
356     return UseCaptureKind::NO_CAPTURE;
357   }
358   case Instruction::Load:
359     // Volatile loads make the address observable.
360     if (cast<LoadInst>(I)->isVolatile())
361       return UseCaptureKind::MAY_CAPTURE;
362     return UseCaptureKind::NO_CAPTURE;
363   case Instruction::VAArg:
364     // "va-arg" from a pointer does not cause it to be captured.
365     return UseCaptureKind::NO_CAPTURE;
366   case Instruction::Store:
367     // Stored the pointer - conservatively assume it may be captured.
368     // Volatile stores make the address observable.
369     if (U.getOperandNo() == 0 || cast<StoreInst>(I)->isVolatile())
370       return UseCaptureKind::MAY_CAPTURE;
371     return UseCaptureKind::NO_CAPTURE;
372   case Instruction::AtomicRMW: {
373     // atomicrmw conceptually includes both a load and store from
374     // the same location.
375     // As with a store, the location being accessed is not captured,
376     // but the value being stored is.
377     // Volatile stores make the address observable.
378     auto *ARMWI = cast<AtomicRMWInst>(I);
379     if (U.getOperandNo() == 1 || ARMWI->isVolatile())
380       return UseCaptureKind::MAY_CAPTURE;
381     return UseCaptureKind::NO_CAPTURE;
382   }
383   case Instruction::AtomicCmpXchg: {
384     // cmpxchg conceptually includes both a load and store from
385     // the same location.
386     // As with a store, the location being accessed is not captured,
387     // but the value being stored is.
388     // Volatile stores make the address observable.
389     auto *ACXI = cast<AtomicCmpXchgInst>(I);
390     if (U.getOperandNo() == 1 || U.getOperandNo() == 2 || ACXI->isVolatile())
391       return UseCaptureKind::MAY_CAPTURE;
392     return UseCaptureKind::NO_CAPTURE;
393   }
394   case Instruction::BitCast:
395   case Instruction::GetElementPtr:
396   case Instruction::PHI:
397   case Instruction::Select:
398   case Instruction::AddrSpaceCast:
399     // The original value is not captured via this if the new value isn't.
400     return UseCaptureKind::PASSTHROUGH;
401   case Instruction::ICmp: {
402     unsigned Idx = U.getOperandNo();
403     unsigned OtherIdx = 1 - Idx;
404     if (auto *CPN = dyn_cast<ConstantPointerNull>(I->getOperand(OtherIdx))) {
405       // Don't count comparisons of a no-alias return value against null as
406       // captures. This allows us to ignore comparisons of malloc results
407       // with null, for example.
408       if (CPN->getType()->getAddressSpace() == 0)
409         if (isNoAliasCall(U.get()->stripPointerCasts()))
410           return UseCaptureKind::NO_CAPTURE;
411       if (!I->getFunction()->nullPointerIsDefined()) {
412         auto *O = I->getOperand(Idx)->stripPointerCastsSameRepresentation();
413         // Comparing a dereferenceable_or_null pointer against null cannot
414         // lead to pointer escapes, because if it is not null it must be a
415         // valid (in-bounds) pointer.
416         const DataLayout &DL = I->getModule()->getDataLayout();
417         if (IsDereferenceableOrNull && IsDereferenceableOrNull(O, DL))
418           return UseCaptureKind::NO_CAPTURE;
419       }
420     }
421     // Comparison against value stored in global variable. Given the pointer
422     // does not escape, its value cannot be guessed and stored separately in a
423     // global variable.
424     auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIdx));
425     if (LI && isa<GlobalVariable>(LI->getPointerOperand()))
426       return UseCaptureKind::NO_CAPTURE;
427     // Otherwise, be conservative. There are crazy ways to capture pointers
428     // using comparisons.
429     return UseCaptureKind::MAY_CAPTURE;
430   }
431   default:
432     // Something else - be conservative and say it is captured.
433     return UseCaptureKind::MAY_CAPTURE;
434   }
435 }
436 
437 void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker,
438                                 unsigned MaxUsesToExplore) {
439   assert(V->getType()->isPointerTy() && "Capture is for pointers only!");
440   if (MaxUsesToExplore == 0)
441     MaxUsesToExplore = DefaultMaxUsesToExplore;
442 
443   SmallVector<const Use *, 20> Worklist;
444   Worklist.reserve(getDefaultMaxUsesToExploreForCaptureTracking());
445   SmallSet<const Use *, 20> Visited;
446 
447   auto AddUses = [&](const Value *V) {
448     for (const Use &U : V->uses()) {
449       // If there are lots of uses, conservatively say that the value
450       // is captured to avoid taking too much compile time.
451       if (Visited.size()  >= MaxUsesToExplore) {
452         Tracker->tooManyUses();
453         return false;
454       }
455       if (!Visited.insert(&U).second)
456         continue;
457       if (!Tracker->shouldExplore(&U))
458         continue;
459       Worklist.push_back(&U);
460     }
461     return true;
462   };
463   if (!AddUses(V))
464     return;
465 
466   auto IsDereferenceableOrNull = [Tracker](Value *V, const DataLayout &DL) {
467     return Tracker->isDereferenceableOrNull(V, DL);
468   };
469   while (!Worklist.empty()) {
470     const Use *U = Worklist.pop_back_val();
471     switch (DetermineUseCaptureKind(*U, IsDereferenceableOrNull)) {
472     case UseCaptureKind::NO_CAPTURE:
473       continue;
474     case UseCaptureKind::MAY_CAPTURE:
475       if (Tracker->captured(U))
476         return;
477       continue;
478     case UseCaptureKind::PASSTHROUGH:
479       if (!AddUses(U->getUser()))
480         return;
481       continue;
482     }
483   }
484 
485   // All uses examined.
486 }
487 
488 bool llvm::isNonEscapingLocalObject(
489     const Value *V, SmallDenseMap<const Value *, bool, 8> *IsCapturedCache) {
490   SmallDenseMap<const Value *, bool, 8>::iterator CacheIt;
491   if (IsCapturedCache) {
492     bool Inserted;
493     std::tie(CacheIt, Inserted) = IsCapturedCache->insert({V, false});
494     if (!Inserted)
495       // Found cached result, return it!
496       return CacheIt->second;
497   }
498 
499   // If this is an identified function-local object, check to see if it escapes.
500   if (isIdentifiedFunctionLocal(V)) {
501     // Set StoreCaptures to True so that we can assume in our callers that the
502     // pointer is not the result of a load instruction. Currently
503     // PointerMayBeCaptured doesn't have any special analysis for the
504     // StoreCaptures=false case; if it did, our callers could be refined to be
505     // more precise.
506     auto Ret = !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true);
507     if (IsCapturedCache)
508       CacheIt->second = Ret;
509     return Ret;
510   }
511 
512   return false;
513 }
514