1 //===- UninitializedValues.cpp - Find Uninitialized Values ----------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements uninitialized values analysis for source-level CFGs.
11 //
12 //===----------------------------------------------------------------------===//
13
14 #include "clang/Analysis/Analyses/UninitializedValues.h"
15 #include "clang/AST/Attr.h"
16 #include "clang/AST/Decl.h"
17 #include "clang/AST/DeclBase.h"
18 #include "clang/AST/Expr.h"
19 #include "clang/AST/OperationKinds.h"
20 #include "clang/AST/Stmt.h"
21 #include "clang/AST/StmtObjC.h"
22 #include "clang/AST/StmtVisitor.h"
23 #include "clang/AST/Type.h"
24 #include "clang/Analysis/Analyses/PostOrderCFGView.h"
25 #include "clang/Analysis/AnalysisDeclContext.h"
26 #include "clang/Analysis/CFG.h"
27 #include "clang/Analysis/DomainSpecific/ObjCNoReturn.h"
28 #include "clang/Basic/LLVM.h"
29 #include "llvm/ADT/BitVector.h"
30 #include "llvm/ADT/DenseMap.h"
31 #include "llvm/ADT/None.h"
32 #include "llvm/ADT/Optional.h"
33 #include "llvm/ADT/PackedVector.h"
34 #include "llvm/ADT/SmallBitVector.h"
35 #include "llvm/ADT/SmallVector.h"
36 #include "llvm/Support/Casting.h"
37 #include <algorithm>
38 #include <cassert>
39
40 using namespace clang;
41
42 #define DEBUG_LOGGING 0
43
isTrackedVar(const VarDecl * vd,const DeclContext * dc)44 static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
45 if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
46 !vd->isExceptionVariable() && !vd->isInitCapture() &&
47 !vd->isImplicit() && vd->getDeclContext() == dc) {
48 QualType ty = vd->getType();
49 return ty->isScalarType() || ty->isVectorType() || ty->isRecordType();
50 }
51 return false;
52 }
53
54 //------------------------------------------------------------------------====//
55 // DeclToIndex: a mapping from Decls we track to value indices.
56 //====------------------------------------------------------------------------//
57
58 namespace {
59
60 class DeclToIndex {
61 llvm::DenseMap<const VarDecl *, unsigned> map;
62
63 public:
64 DeclToIndex() = default;
65
66 /// Compute the actual mapping from declarations to bits.
67 void computeMap(const DeclContext &dc);
68
69 /// Return the number of declarations in the map.
size() const70 unsigned size() const { return map.size(); }
71
72 /// Returns the bit vector index for a given declaration.
73 Optional<unsigned> getValueIndex(const VarDecl *d) const;
74 };
75
76 } // namespace
77
computeMap(const DeclContext & dc)78 void DeclToIndex::computeMap(const DeclContext &dc) {
79 unsigned count = 0;
80 DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
81 E(dc.decls_end());
82 for ( ; I != E; ++I) {
83 const VarDecl *vd = *I;
84 if (isTrackedVar(vd, &dc))
85 map[vd] = count++;
86 }
87 }
88
getValueIndex(const VarDecl * d) const89 Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
90 llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
91 if (I == map.end())
92 return None;
93 return I->second;
94 }
95
96 //------------------------------------------------------------------------====//
97 // CFGBlockValues: dataflow values for CFG blocks.
98 //====------------------------------------------------------------------------//
99
100 // These values are defined in such a way that a merge can be done using
101 // a bitwise OR.
102 enum Value { Unknown = 0x0, /* 00 */
103 Initialized = 0x1, /* 01 */
104 Uninitialized = 0x2, /* 10 */
105 MayUninitialized = 0x3 /* 11 */ };
106
isUninitialized(const Value v)107 static bool isUninitialized(const Value v) {
108 return v >= Uninitialized;
109 }
110
isAlwaysUninit(const Value v)111 static bool isAlwaysUninit(const Value v) {
112 return v == Uninitialized;
113 }
114
115 namespace {
116
117 using ValueVector = llvm::PackedVector<Value, 2, llvm::SmallBitVector>;
118
119 class CFGBlockValues {
120 const CFG &cfg;
121 SmallVector<ValueVector, 8> vals;
122 ValueVector scratch;
123 DeclToIndex declToIndex;
124
125 public:
126 CFGBlockValues(const CFG &cfg);
127
getNumEntries() const128 unsigned getNumEntries() const { return declToIndex.size(); }
129
130 void computeSetOfDeclarations(const DeclContext &dc);
131
getValueVector(const CFGBlock * block)132 ValueVector &getValueVector(const CFGBlock *block) {
133 return vals[block->getBlockID()];
134 }
135
136 void setAllScratchValues(Value V);
137 void mergeIntoScratch(ValueVector const &source, bool isFirst);
138 bool updateValueVectorWithScratch(const CFGBlock *block);
139
hasNoDeclarations() const140 bool hasNoDeclarations() const {
141 return declToIndex.size() == 0;
142 }
143
144 void resetScratch();
145
146 ValueVector::reference operator[](const VarDecl *vd);
147
getValue(const CFGBlock * block,const CFGBlock * dstBlock,const VarDecl * vd)148 Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
149 const VarDecl *vd) {
150 const Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
151 assert(idx.hasValue());
152 return getValueVector(block)[idx.getValue()];
153 }
154 };
155
156 } // namespace
157
CFGBlockValues(const CFG & c)158 CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {}
159
computeSetOfDeclarations(const DeclContext & dc)160 void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
161 declToIndex.computeMap(dc);
162 unsigned decls = declToIndex.size();
163 scratch.resize(decls);
164 unsigned n = cfg.getNumBlockIDs();
165 if (!n)
166 return;
167 vals.resize(n);
168 for (auto &val : vals)
169 val.resize(decls);
170 }
171
172 #if DEBUG_LOGGING
printVector(const CFGBlock * block,ValueVector & bv,unsigned num)173 static void printVector(const CFGBlock *block, ValueVector &bv,
174 unsigned num) {
175 llvm::errs() << block->getBlockID() << " :";
176 for (const auto &i : bv)
177 llvm::errs() << ' ' << i;
178 llvm::errs() << " : " << num << '\n';
179 }
180 #endif
181
setAllScratchValues(Value V)182 void CFGBlockValues::setAllScratchValues(Value V) {
183 for (unsigned I = 0, E = scratch.size(); I != E; ++I)
184 scratch[I] = V;
185 }
186
mergeIntoScratch(ValueVector const & source,bool isFirst)187 void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
188 bool isFirst) {
189 if (isFirst)
190 scratch = source;
191 else
192 scratch |= source;
193 }
194
updateValueVectorWithScratch(const CFGBlock * block)195 bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
196 ValueVector &dst = getValueVector(block);
197 bool changed = (dst != scratch);
198 if (changed)
199 dst = scratch;
200 #if DEBUG_LOGGING
201 printVector(block, scratch, 0);
202 #endif
203 return changed;
204 }
205
resetScratch()206 void CFGBlockValues::resetScratch() {
207 scratch.reset();
208 }
209
operator [](const VarDecl * vd)210 ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
211 const Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
212 assert(idx.hasValue());
213 return scratch[idx.getValue()];
214 }
215
216 //------------------------------------------------------------------------====//
217 // Worklist: worklist for dataflow analysis.
218 //====------------------------------------------------------------------------//
219
220 namespace {
221
222 class DataflowWorklist {
223 PostOrderCFGView::iterator PO_I, PO_E;
224 SmallVector<const CFGBlock *, 20> worklist;
225 llvm::BitVector enqueuedBlocks;
226
227 public:
DataflowWorklist(const CFG & cfg,PostOrderCFGView & view)228 DataflowWorklist(const CFG &cfg, PostOrderCFGView &view)
229 : PO_I(view.begin()), PO_E(view.end()),
230 enqueuedBlocks(cfg.getNumBlockIDs(), true) {
231 // Treat the first block as already analyzed.
232 if (PO_I != PO_E) {
233 assert(*PO_I == &cfg.getEntry());
234 enqueuedBlocks[(*PO_I)->getBlockID()] = false;
235 ++PO_I;
236 }
237 }
238
239 void enqueueSuccessors(const CFGBlock *block);
240 const CFGBlock *dequeue();
241 };
242
243 } // namespace
244
enqueueSuccessors(const CFGBlock * block)245 void DataflowWorklist::enqueueSuccessors(const CFGBlock *block) {
246 for (CFGBlock::const_succ_iterator I = block->succ_begin(),
247 E = block->succ_end(); I != E; ++I) {
248 const CFGBlock *Successor = *I;
249 if (!Successor || enqueuedBlocks[Successor->getBlockID()])
250 continue;
251 worklist.push_back(Successor);
252 enqueuedBlocks[Successor->getBlockID()] = true;
253 }
254 }
255
dequeue()256 const CFGBlock *DataflowWorklist::dequeue() {
257 const CFGBlock *B = nullptr;
258
259 // First dequeue from the worklist. This can represent
260 // updates along backedges that we want propagated as quickly as possible.
261 if (!worklist.empty())
262 B = worklist.pop_back_val();
263
264 // Next dequeue from the initial reverse post order. This is the
265 // theoretical ideal in the presence of no back edges.
266 else if (PO_I != PO_E) {
267 B = *PO_I;
268 ++PO_I;
269 }
270 else
271 return nullptr;
272
273 assert(enqueuedBlocks[B->getBlockID()] == true);
274 enqueuedBlocks[B->getBlockID()] = false;
275 return B;
276 }
277
278 //------------------------------------------------------------------------====//
279 // Classification of DeclRefExprs as use or initialization.
280 //====------------------------------------------------------------------------//
281
282 namespace {
283
284 class FindVarResult {
285 const VarDecl *vd;
286 const DeclRefExpr *dr;
287
288 public:
FindVarResult(const VarDecl * vd,const DeclRefExpr * dr)289 FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {}
290
getDeclRefExpr() const291 const DeclRefExpr *getDeclRefExpr() const { return dr; }
getDecl() const292 const VarDecl *getDecl() const { return vd; }
293 };
294
295 } // namespace
296
stripCasts(ASTContext & C,const Expr * Ex)297 static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
298 while (Ex) {
299 Ex = Ex->IgnoreParenNoopCasts(C);
300 if (const auto *CE = dyn_cast<CastExpr>(Ex)) {
301 if (CE->getCastKind() == CK_LValueBitCast) {
302 Ex = CE->getSubExpr();
303 continue;
304 }
305 }
306 break;
307 }
308 return Ex;
309 }
310
311 /// If E is an expression comprising a reference to a single variable, find that
312 /// variable.
findVar(const Expr * E,const DeclContext * DC)313 static FindVarResult findVar(const Expr *E, const DeclContext *DC) {
314 if (const auto *DRE =
315 dyn_cast<DeclRefExpr>(stripCasts(DC->getParentASTContext(), E)))
316 if (const auto *VD = dyn_cast<VarDecl>(DRE->getDecl()))
317 if (isTrackedVar(VD, DC))
318 return FindVarResult(VD, DRE);
319 return FindVarResult(nullptr, nullptr);
320 }
321
322 namespace {
323
324 /// Classify each DeclRefExpr as an initialization or a use. Any
325 /// DeclRefExpr which isn't explicitly classified will be assumed to have
326 /// escaped the analysis and will be treated as an initialization.
327 class ClassifyRefs : public StmtVisitor<ClassifyRefs> {
328 public:
329 enum Class {
330 Init,
331 Use,
332 SelfInit,
333 Ignore
334 };
335
336 private:
337 const DeclContext *DC;
338 llvm::DenseMap<const DeclRefExpr *, Class> Classification;
339
isTrackedVar(const VarDecl * VD) const340 bool isTrackedVar(const VarDecl *VD) const {
341 return ::isTrackedVar(VD, DC);
342 }
343
344 void classify(const Expr *E, Class C);
345
346 public:
ClassifyRefs(AnalysisDeclContext & AC)347 ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(AC.getDecl())) {}
348
349 void VisitDeclStmt(DeclStmt *DS);
350 void VisitUnaryOperator(UnaryOperator *UO);
351 void VisitBinaryOperator(BinaryOperator *BO);
352 void VisitCallExpr(CallExpr *CE);
353 void VisitCastExpr(CastExpr *CE);
354
operator ()(Stmt * S)355 void operator()(Stmt *S) { Visit(S); }
356
get(const DeclRefExpr * DRE) const357 Class get(const DeclRefExpr *DRE) const {
358 llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
359 = Classification.find(DRE);
360 if (I != Classification.end())
361 return I->second;
362
363 const auto *VD = dyn_cast<VarDecl>(DRE->getDecl());
364 if (!VD || !isTrackedVar(VD))
365 return Ignore;
366
367 return Init;
368 }
369 };
370
371 } // namespace
372
getSelfInitExpr(VarDecl * VD)373 static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) {
374 if (VD->getType()->isRecordType())
375 return nullptr;
376 if (Expr *Init = VD->getInit()) {
377 const auto *DRE =
378 dyn_cast<DeclRefExpr>(stripCasts(VD->getASTContext(), Init));
379 if (DRE && DRE->getDecl() == VD)
380 return DRE;
381 }
382 return nullptr;
383 }
384
classify(const Expr * E,Class C)385 void ClassifyRefs::classify(const Expr *E, Class C) {
386 // The result of a ?: could also be an lvalue.
387 E = E->IgnoreParens();
388 if (const auto *CO = dyn_cast<ConditionalOperator>(E)) {
389 classify(CO->getTrueExpr(), C);
390 classify(CO->getFalseExpr(), C);
391 return;
392 }
393
394 if (const auto *BCO = dyn_cast<BinaryConditionalOperator>(E)) {
395 classify(BCO->getFalseExpr(), C);
396 return;
397 }
398
399 if (const auto *OVE = dyn_cast<OpaqueValueExpr>(E)) {
400 classify(OVE->getSourceExpr(), C);
401 return;
402 }
403
404 if (const auto *ME = dyn_cast<MemberExpr>(E)) {
405 if (const auto *VD = dyn_cast<VarDecl>(ME->getMemberDecl())) {
406 if (!VD->isStaticDataMember())
407 classify(ME->getBase(), C);
408 }
409 return;
410 }
411
412 if (const auto *BO = dyn_cast<BinaryOperator>(E)) {
413 switch (BO->getOpcode()) {
414 case BO_PtrMemD:
415 case BO_PtrMemI:
416 classify(BO->getLHS(), C);
417 return;
418 case BO_Comma:
419 classify(BO->getRHS(), C);
420 return;
421 default:
422 return;
423 }
424 }
425
426 FindVarResult Var = findVar(E, DC);
427 if (const DeclRefExpr *DRE = Var.getDeclRefExpr())
428 Classification[DRE] = std::max(Classification[DRE], C);
429 }
430
VisitDeclStmt(DeclStmt * DS)431 void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) {
432 for (auto *DI : DS->decls()) {
433 auto *VD = dyn_cast<VarDecl>(DI);
434 if (VD && isTrackedVar(VD))
435 if (const DeclRefExpr *DRE = getSelfInitExpr(VD))
436 Classification[DRE] = SelfInit;
437 }
438 }
439
VisitBinaryOperator(BinaryOperator * BO)440 void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) {
441 // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this
442 // is not a compound-assignment, we will treat it as initializing the variable
443 // when TransferFunctions visits it. A compound-assignment does not affect
444 // whether a variable is uninitialized, and there's no point counting it as a
445 // use.
446 if (BO->isCompoundAssignmentOp())
447 classify(BO->getLHS(), Use);
448 else if (BO->getOpcode() == BO_Assign || BO->getOpcode() == BO_Comma)
449 classify(BO->getLHS(), Ignore);
450 }
451
VisitUnaryOperator(UnaryOperator * UO)452 void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) {
453 // Increment and decrement are uses despite there being no lvalue-to-rvalue
454 // conversion.
455 if (UO->isIncrementDecrementOp())
456 classify(UO->getSubExpr(), Use);
457 }
458
isPointerToConst(const QualType & QT)459 static bool isPointerToConst(const QualType &QT) {
460 return QT->isAnyPointerType() && QT->getPointeeType().isConstQualified();
461 }
462
VisitCallExpr(CallExpr * CE)463 void ClassifyRefs::VisitCallExpr(CallExpr *CE) {
464 // Classify arguments to std::move as used.
465 if (CE->isCallToStdMove()) {
466 // RecordTypes are handled in SemaDeclCXX.cpp.
467 if (!CE->getArg(0)->getType()->isRecordType())
468 classify(CE->getArg(0), Use);
469 return;
470 }
471
472 // If a value is passed by const pointer or by const reference to a function,
473 // we should not assume that it is initialized by the call, and we
474 // conservatively do not assume that it is used.
475 for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end();
476 I != E; ++I) {
477 if ((*I)->isGLValue()) {
478 if ((*I)->getType().isConstQualified())
479 classify((*I), Ignore);
480 } else if (isPointerToConst((*I)->getType())) {
481 const Expr *Ex = stripCasts(DC->getParentASTContext(), *I);
482 const auto *UO = dyn_cast<UnaryOperator>(Ex);
483 if (UO && UO->getOpcode() == UO_AddrOf)
484 Ex = UO->getSubExpr();
485 classify(Ex, Ignore);
486 }
487 }
488 }
489
VisitCastExpr(CastExpr * CE)490 void ClassifyRefs::VisitCastExpr(CastExpr *CE) {
491 if (CE->getCastKind() == CK_LValueToRValue)
492 classify(CE->getSubExpr(), Use);
493 else if (const auto *CSE = dyn_cast<CStyleCastExpr>(CE)) {
494 if (CSE->getType()->isVoidType()) {
495 // Squelch any detected load of an uninitialized value if
496 // we cast it to void.
497 // e.g. (void) x;
498 classify(CSE->getSubExpr(), Ignore);
499 }
500 }
501 }
502
503 //------------------------------------------------------------------------====//
504 // Transfer function for uninitialized values analysis.
505 //====------------------------------------------------------------------------//
506
507 namespace {
508
509 class TransferFunctions : public StmtVisitor<TransferFunctions> {
510 CFGBlockValues &vals;
511 const CFG &cfg;
512 const CFGBlock *block;
513 AnalysisDeclContext ∾
514 const ClassifyRefs &classification;
515 ObjCNoReturn objCNoRet;
516 UninitVariablesHandler &handler;
517
518 public:
TransferFunctions(CFGBlockValues & vals,const CFG & cfg,const CFGBlock * block,AnalysisDeclContext & ac,const ClassifyRefs & classification,UninitVariablesHandler & handler)519 TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
520 const CFGBlock *block, AnalysisDeclContext &ac,
521 const ClassifyRefs &classification,
522 UninitVariablesHandler &handler)
523 : vals(vals), cfg(cfg), block(block), ac(ac),
524 classification(classification), objCNoRet(ac.getASTContext()),
525 handler(handler) {}
526
527 void reportUse(const Expr *ex, const VarDecl *vd);
528
529 void VisitBinaryOperator(BinaryOperator *bo);
530 void VisitBlockExpr(BlockExpr *be);
531 void VisitCallExpr(CallExpr *ce);
532 void VisitDeclRefExpr(DeclRefExpr *dr);
533 void VisitDeclStmt(DeclStmt *ds);
534 void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS);
535 void VisitObjCMessageExpr(ObjCMessageExpr *ME);
536
isTrackedVar(const VarDecl * vd)537 bool isTrackedVar(const VarDecl *vd) {
538 return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
539 }
540
findVar(const Expr * ex)541 FindVarResult findVar(const Expr *ex) {
542 return ::findVar(ex, cast<DeclContext>(ac.getDecl()));
543 }
544
getUninitUse(const Expr * ex,const VarDecl * vd,Value v)545 UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
546 UninitUse Use(ex, isAlwaysUninit(v));
547
548 assert(isUninitialized(v));
549 if (Use.getKind() == UninitUse::Always)
550 return Use;
551
552 // If an edge which leads unconditionally to this use did not initialize
553 // the variable, we can say something stronger than 'may be uninitialized':
554 // we can say 'either it's used uninitialized or you have dead code'.
555 //
556 // We track the number of successors of a node which have been visited, and
557 // visit a node once we have visited all of its successors. Only edges where
558 // the variable might still be uninitialized are followed. Since a variable
559 // can't transfer from being initialized to being uninitialized, this will
560 // trace out the subgraph which inevitably leads to the use and does not
561 // initialize the variable. We do not want to skip past loops, since their
562 // non-termination might be correlated with the initialization condition.
563 //
564 // For example:
565 //
566 // void f(bool a, bool b) {
567 // block1: int n;
568 // if (a) {
569 // block2: if (b)
570 // block3: n = 1;
571 // block4: } else if (b) {
572 // block5: while (!a) {
573 // block6: do_work(&a);
574 // n = 2;
575 // }
576 // }
577 // block7: if (a)
578 // block8: g();
579 // block9: return n;
580 // }
581 //
582 // Starting from the maybe-uninitialized use in block 9:
583 // * Block 7 is not visited because we have only visited one of its two
584 // successors.
585 // * Block 8 is visited because we've visited its only successor.
586 // From block 8:
587 // * Block 7 is visited because we've now visited both of its successors.
588 // From block 7:
589 // * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
590 // of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
591 // * Block 3 is not visited because it initializes 'n'.
592 // Now the algorithm terminates, having visited blocks 7 and 8, and having
593 // found the frontier is blocks 2, 4, and 5.
594 //
595 // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
596 // and 4), so we report that any time either of those edges is taken (in
597 // each case when 'b == false'), 'n' is used uninitialized.
598 SmallVector<const CFGBlock*, 32> Queue;
599 SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
600 Queue.push_back(block);
601 // Specify that we've already visited all successors of the starting block.
602 // This has the dual purpose of ensuring we never add it to the queue, and
603 // of marking it as not being a candidate element of the frontier.
604 SuccsVisited[block->getBlockID()] = block->succ_size();
605 while (!Queue.empty()) {
606 const CFGBlock *B = Queue.pop_back_val();
607
608 // If the use is always reached from the entry block, make a note of that.
609 if (B == &cfg.getEntry())
610 Use.setUninitAfterCall();
611
612 for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
613 I != E; ++I) {
614 const CFGBlock *Pred = *I;
615 if (!Pred)
616 continue;
617
618 Value AtPredExit = vals.getValue(Pred, B, vd);
619 if (AtPredExit == Initialized)
620 // This block initializes the variable.
621 continue;
622 if (AtPredExit == MayUninitialized &&
623 vals.getValue(B, nullptr, vd) == Uninitialized) {
624 // This block declares the variable (uninitialized), and is reachable
625 // from a block that initializes the variable. We can't guarantee to
626 // give an earlier location for the diagnostic (and it appears that
627 // this code is intended to be reachable) so give a diagnostic here
628 // and go no further down this path.
629 Use.setUninitAfterDecl();
630 continue;
631 }
632
633 unsigned &SV = SuccsVisited[Pred->getBlockID()];
634 if (!SV) {
635 // When visiting the first successor of a block, mark all NULL
636 // successors as having been visited.
637 for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(),
638 SE = Pred->succ_end();
639 SI != SE; ++SI)
640 if (!*SI)
641 ++SV;
642 }
643
644 if (++SV == Pred->succ_size())
645 // All paths from this block lead to the use and don't initialize the
646 // variable.
647 Queue.push_back(Pred);
648 }
649 }
650
651 // Scan the frontier, looking for blocks where the variable was
652 // uninitialized.
653 for (const auto *Block : cfg) {
654 unsigned BlockID = Block->getBlockID();
655 const Stmt *Term = Block->getTerminator();
656 if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
657 Term) {
658 // This block inevitably leads to the use. If we have an edge from here
659 // to a post-dominator block, and the variable is uninitialized on that
660 // edge, we have found a bug.
661 for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
662 E = Block->succ_end(); I != E; ++I) {
663 const CFGBlock *Succ = *I;
664 if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
665 vals.getValue(Block, Succ, vd) == Uninitialized) {
666 // Switch cases are a special case: report the label to the caller
667 // as the 'terminator', not the switch statement itself. Suppress
668 // situations where no label matched: we can't be sure that's
669 // possible.
670 if (isa<SwitchStmt>(Term)) {
671 const Stmt *Label = Succ->getLabel();
672 if (!Label || !isa<SwitchCase>(Label))
673 // Might not be possible.
674 continue;
675 UninitUse::Branch Branch;
676 Branch.Terminator = Label;
677 Branch.Output = 0; // Ignored.
678 Use.addUninitBranch(Branch);
679 } else {
680 UninitUse::Branch Branch;
681 Branch.Terminator = Term;
682 Branch.Output = I - Block->succ_begin();
683 Use.addUninitBranch(Branch);
684 }
685 }
686 }
687 }
688 }
689
690 return Use;
691 }
692 };
693
694 } // namespace
695
reportUse(const Expr * ex,const VarDecl * vd)696 void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
697 Value v = vals[vd];
698 if (isUninitialized(v))
699 handler.handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
700 }
701
VisitObjCForCollectionStmt(ObjCForCollectionStmt * FS)702 void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) {
703 // This represents an initialization of the 'element' value.
704 if (const auto *DS = dyn_cast<DeclStmt>(FS->getElement())) {
705 const auto *VD = cast<VarDecl>(DS->getSingleDecl());
706 if (isTrackedVar(VD))
707 vals[VD] = Initialized;
708 }
709 }
710
VisitBlockExpr(BlockExpr * be)711 void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
712 const BlockDecl *bd = be->getBlockDecl();
713 for (const auto &I : bd->captures()) {
714 const VarDecl *vd = I.getVariable();
715 if (!isTrackedVar(vd))
716 continue;
717 if (I.isByRef()) {
718 vals[vd] = Initialized;
719 continue;
720 }
721 reportUse(be, vd);
722 }
723 }
724
VisitCallExpr(CallExpr * ce)725 void TransferFunctions::VisitCallExpr(CallExpr *ce) {
726 if (Decl *Callee = ce->getCalleeDecl()) {
727 if (Callee->hasAttr<ReturnsTwiceAttr>()) {
728 // After a call to a function like setjmp or vfork, any variable which is
729 // initialized anywhere within this function may now be initialized. For
730 // now, just assume such a call initializes all variables. FIXME: Only
731 // mark variables as initialized if they have an initializer which is
732 // reachable from here.
733 vals.setAllScratchValues(Initialized);
734 }
735 else if (Callee->hasAttr<AnalyzerNoReturnAttr>()) {
736 // Functions labeled like "analyzer_noreturn" are often used to denote
737 // "panic" functions that in special debug situations can still return,
738 // but for the most part should not be treated as returning. This is a
739 // useful annotation borrowed from the static analyzer that is useful for
740 // suppressing branch-specific false positives when we call one of these
741 // functions but keep pretending the path continues (when in reality the
742 // user doesn't care).
743 vals.setAllScratchValues(Unknown);
744 }
745 }
746 }
747
VisitDeclRefExpr(DeclRefExpr * dr)748 void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
749 switch (classification.get(dr)) {
750 case ClassifyRefs::Ignore:
751 break;
752 case ClassifyRefs::Use:
753 reportUse(dr, cast<VarDecl>(dr->getDecl()));
754 break;
755 case ClassifyRefs::Init:
756 vals[cast<VarDecl>(dr->getDecl())] = Initialized;
757 break;
758 case ClassifyRefs::SelfInit:
759 handler.handleSelfInit(cast<VarDecl>(dr->getDecl()));
760 break;
761 }
762 }
763
VisitBinaryOperator(BinaryOperator * BO)764 void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) {
765 if (BO->getOpcode() == BO_Assign) {
766 FindVarResult Var = findVar(BO->getLHS());
767 if (const VarDecl *VD = Var.getDecl())
768 vals[VD] = Initialized;
769 }
770 }
771
VisitDeclStmt(DeclStmt * DS)772 void TransferFunctions::VisitDeclStmt(DeclStmt *DS) {
773 for (auto *DI : DS->decls()) {
774 auto *VD = dyn_cast<VarDecl>(DI);
775 if (VD && isTrackedVar(VD)) {
776 if (getSelfInitExpr(VD)) {
777 // If the initializer consists solely of a reference to itself, we
778 // explicitly mark the variable as uninitialized. This allows code
779 // like the following:
780 //
781 // int x = x;
782 //
783 // to deliberately leave a variable uninitialized. Different analysis
784 // clients can detect this pattern and adjust their reporting
785 // appropriately, but we need to continue to analyze subsequent uses
786 // of the variable.
787 vals[VD] = Uninitialized;
788 } else if (VD->getInit()) {
789 // Treat the new variable as initialized.
790 vals[VD] = Initialized;
791 } else {
792 // No initializer: the variable is now uninitialized. This matters
793 // for cases like:
794 // while (...) {
795 // int n;
796 // use(n);
797 // n = 0;
798 // }
799 // FIXME: Mark the variable as uninitialized whenever its scope is
800 // left, since its scope could be re-entered by a jump over the
801 // declaration.
802 vals[VD] = Uninitialized;
803 }
804 }
805 }
806 }
807
VisitObjCMessageExpr(ObjCMessageExpr * ME)808 void TransferFunctions::VisitObjCMessageExpr(ObjCMessageExpr *ME) {
809 // If the Objective-C message expression is an implicit no-return that
810 // is not modeled in the CFG, set the tracked dataflow values to Unknown.
811 if (objCNoRet.isImplicitNoReturn(ME)) {
812 vals.setAllScratchValues(Unknown);
813 }
814 }
815
816 //------------------------------------------------------------------------====//
817 // High-level "driver" logic for uninitialized values analysis.
818 //====------------------------------------------------------------------------//
819
runOnBlock(const CFGBlock * block,const CFG & cfg,AnalysisDeclContext & ac,CFGBlockValues & vals,const ClassifyRefs & classification,llvm::BitVector & wasAnalyzed,UninitVariablesHandler & handler)820 static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
821 AnalysisDeclContext &ac, CFGBlockValues &vals,
822 const ClassifyRefs &classification,
823 llvm::BitVector &wasAnalyzed,
824 UninitVariablesHandler &handler) {
825 wasAnalyzed[block->getBlockID()] = true;
826 vals.resetScratch();
827 // Merge in values of predecessor blocks.
828 bool isFirst = true;
829 for (CFGBlock::const_pred_iterator I = block->pred_begin(),
830 E = block->pred_end(); I != E; ++I) {
831 const CFGBlock *pred = *I;
832 if (!pred)
833 continue;
834 if (wasAnalyzed[pred->getBlockID()]) {
835 vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
836 isFirst = false;
837 }
838 }
839 // Apply the transfer function.
840 TransferFunctions tf(vals, cfg, block, ac, classification, handler);
841 for (const auto &I : *block) {
842 if (Optional<CFGStmt> cs = I.getAs<CFGStmt>())
843 tf.Visit(const_cast<Stmt *>(cs->getStmt()));
844 }
845 return vals.updateValueVectorWithScratch(block);
846 }
847
848 namespace {
849
850 /// PruneBlocksHandler is a special UninitVariablesHandler that is used
851 /// to detect when a CFGBlock has any *potential* use of an uninitialized
852 /// variable. It is mainly used to prune out work during the final
853 /// reporting pass.
854 struct PruneBlocksHandler : public UninitVariablesHandler {
855 /// Records if a CFGBlock had a potential use of an uninitialized variable.
856 llvm::BitVector hadUse;
857
858 /// Records if any CFGBlock had a potential use of an uninitialized variable.
859 bool hadAnyUse = false;
860
861 /// The current block to scribble use information.
862 unsigned currentBlock = 0;
863
PruneBlocksHandler__anon235e83620711::PruneBlocksHandler864 PruneBlocksHandler(unsigned numBlocks) : hadUse(numBlocks, false) {}
865
866 ~PruneBlocksHandler() override = default;
867
handleUseOfUninitVariable__anon235e83620711::PruneBlocksHandler868 void handleUseOfUninitVariable(const VarDecl *vd,
869 const UninitUse &use) override {
870 hadUse[currentBlock] = true;
871 hadAnyUse = true;
872 }
873
874 /// Called when the uninitialized variable analysis detects the
875 /// idiom 'int x = x'. All other uses of 'x' within the initializer
876 /// are handled by handleUseOfUninitVariable.
handleSelfInit__anon235e83620711::PruneBlocksHandler877 void handleSelfInit(const VarDecl *vd) override {
878 hadUse[currentBlock] = true;
879 hadAnyUse = true;
880 }
881 };
882
883 } // namespace
884
runUninitializedVariablesAnalysis(const DeclContext & dc,const CFG & cfg,AnalysisDeclContext & ac,UninitVariablesHandler & handler,UninitVariablesAnalysisStats & stats)885 void clang::runUninitializedVariablesAnalysis(
886 const DeclContext &dc,
887 const CFG &cfg,
888 AnalysisDeclContext &ac,
889 UninitVariablesHandler &handler,
890 UninitVariablesAnalysisStats &stats) {
891 CFGBlockValues vals(cfg);
892 vals.computeSetOfDeclarations(dc);
893 if (vals.hasNoDeclarations())
894 return;
895
896 stats.NumVariablesAnalyzed = vals.getNumEntries();
897
898 // Precompute which expressions are uses and which are initializations.
899 ClassifyRefs classification(ac);
900 cfg.VisitBlockStmts(classification);
901
902 // Mark all variables uninitialized at the entry.
903 const CFGBlock &entry = cfg.getEntry();
904 ValueVector &vec = vals.getValueVector(&entry);
905 const unsigned n = vals.getNumEntries();
906 for (unsigned j = 0; j < n; ++j) {
907 vec[j] = Uninitialized;
908 }
909
910 // Proceed with the workist.
911 DataflowWorklist worklist(cfg, *ac.getAnalysis<PostOrderCFGView>());
912 llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
913 worklist.enqueueSuccessors(&cfg.getEntry());
914 llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
915 wasAnalyzed[cfg.getEntry().getBlockID()] = true;
916 PruneBlocksHandler PBH(cfg.getNumBlockIDs());
917
918 while (const CFGBlock *block = worklist.dequeue()) {
919 PBH.currentBlock = block->getBlockID();
920
921 // Did the block change?
922 bool changed = runOnBlock(block, cfg, ac, vals,
923 classification, wasAnalyzed, PBH);
924 ++stats.NumBlockVisits;
925 if (changed || !previouslyVisited[block->getBlockID()])
926 worklist.enqueueSuccessors(block);
927 previouslyVisited[block->getBlockID()] = true;
928 }
929
930 if (!PBH.hadAnyUse)
931 return;
932
933 // Run through the blocks one more time, and report uninitialized variables.
934 for (const auto *block : cfg)
935 if (PBH.hadUse[block->getBlockID()]) {
936 runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, handler);
937 ++stats.NumBlockVisits;
938 }
939 }
940
941 UninitVariablesHandler::~UninitVariablesHandler() = default;
942