1 //=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines analysis_warnings::[Policy,Executor].
10 // Together they are used by Sema to issue warnings based on inexpensive
11 // static analysis algorithms in libAnalysis.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "clang/Sema/AnalysisBasedWarnings.h"
16 #include "clang/AST/Decl.h"
17 #include "clang/AST/DeclCXX.h"
18 #include "clang/AST/DeclObjC.h"
19 #include "clang/AST/EvaluatedExprVisitor.h"
20 #include "clang/AST/Expr.h"
21 #include "clang/AST/ExprCXX.h"
22 #include "clang/AST/ExprObjC.h"
23 #include "clang/AST/OperationKinds.h"
24 #include "clang/AST/ParentMap.h"
25 #include "clang/AST/RecursiveASTVisitor.h"
26 #include "clang/AST/StmtCXX.h"
27 #include "clang/AST/StmtObjC.h"
28 #include "clang/AST/StmtVisitor.h"
29 #include "clang/AST/RecursiveASTVisitor.h"
30 #include "clang/AST/Type.h"
31 #include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
32 #include "clang/Analysis/Analyses/CalledOnceCheck.h"
33 #include "clang/Analysis/Analyses/Consumed.h"
34 #include "clang/Analysis/Analyses/ReachableCode.h"
35 #include "clang/Analysis/Analyses/ThreadSafety.h"
36 #include "clang/Analysis/Analyses/UninitializedValues.h"
37 #include "clang/Analysis/Analyses/UnsafeBufferUsage.h"
38 #include "clang/Analysis/AnalysisDeclContext.h"
39 #include "clang/Analysis/CFG.h"
40 #include "clang/Analysis/CFGStmtMap.h"
41 #include "clang/Basic/Diagnostic.h"
42 #include "clang/Basic/SourceLocation.h"
43 #include "clang/Basic/SourceManager.h"
44 #include "clang/Lex/Preprocessor.h"
45 #include "clang/Sema/ScopeInfo.h"
46 #include "clang/Sema/SemaInternal.h"
47 #include "llvm/ADT/ArrayRef.h"
48 #include "llvm/ADT/BitVector.h"
49 #include "llvm/ADT/MapVector.h"
50 #include "llvm/ADT/STLFunctionalExtras.h"
51 #include "llvm/ADT/SmallString.h"
52 #include "llvm/ADT/SmallVector.h"
53 #include "llvm/ADT/StringRef.h"
54 #include "llvm/Support/Casting.h"
55 #include <algorithm>
56 #include <deque>
57 #include <iterator>
58 #include <optional>
59 
60 using namespace clang;
61 
62 //===----------------------------------------------------------------------===//
63 // Unreachable code analysis.
64 //===----------------------------------------------------------------------===//
65 
66 namespace {
67   class UnreachableCodeHandler : public reachable_code::Callback {
68     Sema &S;
69     SourceRange PreviousSilenceableCondVal;
70 
71   public:
72     UnreachableCodeHandler(Sema &s) : S(s) {}
73 
74     void HandleUnreachable(reachable_code::UnreachableKind UK, SourceLocation L,
75                            SourceRange SilenceableCondVal, SourceRange R1,
76                            SourceRange R2, bool HasFallThroughAttr) override {
77       // If the diagnosed code is `[[fallthrough]];` and
78       // `-Wunreachable-code-fallthrough` is  enabled, suppress `code will never
79       // be executed` warning to avoid generating diagnostic twice
80       if (HasFallThroughAttr &&
81           !S.getDiagnostics().isIgnored(diag::warn_unreachable_fallthrough_attr,
82                                         SourceLocation()))
83         return;
84 
85       // Avoid reporting multiple unreachable code diagnostics that are
86       // triggered by the same conditional value.
87       if (PreviousSilenceableCondVal.isValid() &&
88           SilenceableCondVal.isValid() &&
89           PreviousSilenceableCondVal == SilenceableCondVal)
90         return;
91       PreviousSilenceableCondVal = SilenceableCondVal;
92 
93       unsigned diag = diag::warn_unreachable;
94       switch (UK) {
95         case reachable_code::UK_Break:
96           diag = diag::warn_unreachable_break;
97           break;
98         case reachable_code::UK_Return:
99           diag = diag::warn_unreachable_return;
100           break;
101         case reachable_code::UK_Loop_Increment:
102           diag = diag::warn_unreachable_loop_increment;
103           break;
104         case reachable_code::UK_Other:
105           break;
106       }
107 
108       S.Diag(L, diag) << R1 << R2;
109 
110       SourceLocation Open = SilenceableCondVal.getBegin();
111       if (Open.isValid()) {
112         SourceLocation Close = SilenceableCondVal.getEnd();
113         Close = S.getLocForEndOfToken(Close);
114         if (Close.isValid()) {
115           S.Diag(Open, diag::note_unreachable_silence)
116             << FixItHint::CreateInsertion(Open, "/* DISABLES CODE */ (")
117             << FixItHint::CreateInsertion(Close, ")");
118         }
119       }
120     }
121   };
122 } // anonymous namespace
123 
124 /// CheckUnreachable - Check for unreachable code.
125 static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
126   // As a heuristic prune all diagnostics not in the main file.  Currently
127   // the majority of warnings in headers are false positives.  These
128   // are largely caused by configuration state, e.g. preprocessor
129   // defined code, etc.
130   //
131   // Note that this is also a performance optimization.  Analyzing
132   // headers many times can be expensive.
133   if (!S.getSourceManager().isInMainFile(AC.getDecl()->getBeginLoc()))
134     return;
135 
136   UnreachableCodeHandler UC(S);
137   reachable_code::FindUnreachableCode(AC, S.getPreprocessor(), UC);
138 }
139 
140 namespace {
141 /// Warn on logical operator errors in CFGBuilder
142 class LogicalErrorHandler : public CFGCallback {
143   Sema &S;
144 
145 public:
146   LogicalErrorHandler(Sema &S) : S(S) {}
147 
148   static bool HasMacroID(const Expr *E) {
149     if (E->getExprLoc().isMacroID())
150       return true;
151 
152     // Recurse to children.
153     for (const Stmt *SubStmt : E->children())
154       if (const Expr *SubExpr = dyn_cast_or_null<Expr>(SubStmt))
155         if (HasMacroID(SubExpr))
156           return true;
157 
158     return false;
159   }
160 
161   void compareAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
162     if (HasMacroID(B))
163       return;
164 
165     SourceRange DiagRange = B->getSourceRange();
166     S.Diag(B->getExprLoc(), diag::warn_tautological_overlap_comparison)
167         << DiagRange << isAlwaysTrue;
168   }
169 
170   void compareBitwiseEquality(const BinaryOperator *B,
171                               bool isAlwaysTrue) override {
172     if (HasMacroID(B))
173       return;
174 
175     SourceRange DiagRange = B->getSourceRange();
176     S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_always)
177         << DiagRange << isAlwaysTrue;
178   }
179 
180   void compareBitwiseOr(const BinaryOperator *B) override {
181     if (HasMacroID(B))
182       return;
183 
184     SourceRange DiagRange = B->getSourceRange();
185     S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_or) << DiagRange;
186   }
187 
188   static bool hasActiveDiagnostics(DiagnosticsEngine &Diags,
189                                    SourceLocation Loc) {
190     return !Diags.isIgnored(diag::warn_tautological_overlap_comparison, Loc) ||
191            !Diags.isIgnored(diag::warn_comparison_bitwise_or, Loc);
192   }
193 };
194 } // anonymous namespace
195 
196 //===----------------------------------------------------------------------===//
197 // Check for infinite self-recursion in functions
198 //===----------------------------------------------------------------------===//
199 
200 // Returns true if the function is called anywhere within the CFGBlock.
201 // For member functions, the additional condition of being call from the
202 // this pointer is required.
203 static bool hasRecursiveCallInPath(const FunctionDecl *FD, CFGBlock &Block) {
204   // Process all the Stmt's in this block to find any calls to FD.
205   for (const auto &B : Block) {
206     if (B.getKind() != CFGElement::Statement)
207       continue;
208 
209     const CallExpr *CE = dyn_cast<CallExpr>(B.getAs<CFGStmt>()->getStmt());
210     if (!CE || !CE->getCalleeDecl() ||
211         CE->getCalleeDecl()->getCanonicalDecl() != FD)
212       continue;
213 
214     // Skip function calls which are qualified with a templated class.
215     if (const DeclRefExpr *DRE =
216             dyn_cast<DeclRefExpr>(CE->getCallee()->IgnoreParenImpCasts())) {
217       if (NestedNameSpecifier *NNS = DRE->getQualifier()) {
218         if (NNS->getKind() == NestedNameSpecifier::TypeSpec &&
219             isa<TemplateSpecializationType>(NNS->getAsType())) {
220           continue;
221         }
222       }
223     }
224 
225     const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(CE);
226     if (!MCE || isa<CXXThisExpr>(MCE->getImplicitObjectArgument()) ||
227         !MCE->getMethodDecl()->isVirtual())
228       return true;
229   }
230   return false;
231 }
232 
233 // Returns true if every path from the entry block passes through a call to FD.
234 static bool checkForRecursiveFunctionCall(const FunctionDecl *FD, CFG *cfg) {
235   llvm::SmallPtrSet<CFGBlock *, 16> Visited;
236   llvm::SmallVector<CFGBlock *, 16> WorkList;
237   // Keep track of whether we found at least one recursive path.
238   bool foundRecursion = false;
239 
240   const unsigned ExitID = cfg->getExit().getBlockID();
241 
242   // Seed the work list with the entry block.
243   WorkList.push_back(&cfg->getEntry());
244 
245   while (!WorkList.empty()) {
246     CFGBlock *Block = WorkList.pop_back_val();
247 
248     for (auto I = Block->succ_begin(), E = Block->succ_end(); I != E; ++I) {
249       if (CFGBlock *SuccBlock = *I) {
250         if (!Visited.insert(SuccBlock).second)
251           continue;
252 
253         // Found a path to the exit node without a recursive call.
254         if (ExitID == SuccBlock->getBlockID())
255           return false;
256 
257         // If the successor block contains a recursive call, end analysis there.
258         if (hasRecursiveCallInPath(FD, *SuccBlock)) {
259           foundRecursion = true;
260           continue;
261         }
262 
263         WorkList.push_back(SuccBlock);
264       }
265     }
266   }
267   return foundRecursion;
268 }
269 
270 static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
271                                    const Stmt *Body, AnalysisDeclContext &AC) {
272   FD = FD->getCanonicalDecl();
273 
274   // Only run on non-templated functions and non-templated members of
275   // templated classes.
276   if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
277       FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization)
278     return;
279 
280   CFG *cfg = AC.getCFG();
281   if (!cfg) return;
282 
283   // If the exit block is unreachable, skip processing the function.
284   if (cfg->getExit().pred_empty())
285     return;
286 
287   // Emit diagnostic if a recursive function call is detected for all paths.
288   if (checkForRecursiveFunctionCall(FD, cfg))
289     S.Diag(Body->getBeginLoc(), diag::warn_infinite_recursive_function);
290 }
291 
292 //===----------------------------------------------------------------------===//
293 // Check for throw in a non-throwing function.
294 //===----------------------------------------------------------------------===//
295 
296 /// Determine whether an exception thrown by E, unwinding from ThrowBlock,
297 /// can reach ExitBlock.
298 static bool throwEscapes(Sema &S, const CXXThrowExpr *E, CFGBlock &ThrowBlock,
299                          CFG *Body) {
300   SmallVector<CFGBlock *, 16> Stack;
301   llvm::BitVector Queued(Body->getNumBlockIDs());
302 
303   Stack.push_back(&ThrowBlock);
304   Queued[ThrowBlock.getBlockID()] = true;
305 
306   while (!Stack.empty()) {
307     CFGBlock &UnwindBlock = *Stack.back();
308     Stack.pop_back();
309 
310     for (auto &Succ : UnwindBlock.succs()) {
311       if (!Succ.isReachable() || Queued[Succ->getBlockID()])
312         continue;
313 
314       if (Succ->getBlockID() == Body->getExit().getBlockID())
315         return true;
316 
317       if (auto *Catch =
318               dyn_cast_or_null<CXXCatchStmt>(Succ->getLabel())) {
319         QualType Caught = Catch->getCaughtType();
320         if (Caught.isNull() || // catch (...) catches everything
321             !E->getSubExpr() || // throw; is considered cuaght by any handler
322             S.handlerCanCatch(Caught, E->getSubExpr()->getType()))
323           // Exception doesn't escape via this path.
324           break;
325       } else {
326         Stack.push_back(Succ);
327         Queued[Succ->getBlockID()] = true;
328       }
329     }
330   }
331 
332   return false;
333 }
334 
335 static void visitReachableThrows(
336     CFG *BodyCFG,
337     llvm::function_ref<void(const CXXThrowExpr *, CFGBlock &)> Visit) {
338   llvm::BitVector Reachable(BodyCFG->getNumBlockIDs());
339   clang::reachable_code::ScanReachableFromBlock(&BodyCFG->getEntry(), Reachable);
340   for (CFGBlock *B : *BodyCFG) {
341     if (!Reachable[B->getBlockID()])
342       continue;
343     for (CFGElement &E : *B) {
344       std::optional<CFGStmt> S = E.getAs<CFGStmt>();
345       if (!S)
346         continue;
347       if (auto *Throw = dyn_cast<CXXThrowExpr>(S->getStmt()))
348         Visit(Throw, *B);
349     }
350   }
351 }
352 
353 static void EmitDiagForCXXThrowInNonThrowingFunc(Sema &S, SourceLocation OpLoc,
354                                                  const FunctionDecl *FD) {
355   if (!S.getSourceManager().isInSystemHeader(OpLoc) &&
356       FD->getTypeSourceInfo()) {
357     S.Diag(OpLoc, diag::warn_throw_in_noexcept_func) << FD;
358     if (S.getLangOpts().CPlusPlus11 &&
359         (isa<CXXDestructorDecl>(FD) ||
360          FD->getDeclName().getCXXOverloadedOperator() == OO_Delete ||
361          FD->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete)) {
362       if (const auto *Ty = FD->getTypeSourceInfo()->getType()->
363                                          getAs<FunctionProtoType>())
364         S.Diag(FD->getLocation(), diag::note_throw_in_dtor)
365             << !isa<CXXDestructorDecl>(FD) << !Ty->hasExceptionSpec()
366             << FD->getExceptionSpecSourceRange();
367     } else
368       S.Diag(FD->getLocation(), diag::note_throw_in_function)
369           << FD->getExceptionSpecSourceRange();
370   }
371 }
372 
373 static void checkThrowInNonThrowingFunc(Sema &S, const FunctionDecl *FD,
374                                         AnalysisDeclContext &AC) {
375   CFG *BodyCFG = AC.getCFG();
376   if (!BodyCFG)
377     return;
378   if (BodyCFG->getExit().pred_empty())
379     return;
380   visitReachableThrows(BodyCFG, [&](const CXXThrowExpr *Throw, CFGBlock &Block) {
381     if (throwEscapes(S, Throw, Block, BodyCFG))
382       EmitDiagForCXXThrowInNonThrowingFunc(S, Throw->getThrowLoc(), FD);
383   });
384 }
385 
386 static bool isNoexcept(const FunctionDecl *FD) {
387   const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
388   if (FPT->isNothrow() || FD->hasAttr<NoThrowAttr>())
389     return true;
390   return false;
391 }
392 
393 //===----------------------------------------------------------------------===//
394 // Check for missing return value.
395 //===----------------------------------------------------------------------===//
396 
397 enum ControlFlowKind {
398   UnknownFallThrough,
399   NeverFallThrough,
400   MaybeFallThrough,
401   AlwaysFallThrough,
402   NeverFallThroughOrReturn
403 };
404 
405 /// CheckFallThrough - Check that we don't fall off the end of a
406 /// Statement that should return a value.
407 ///
408 /// \returns AlwaysFallThrough iff we always fall off the end of the statement,
409 /// MaybeFallThrough iff we might or might not fall off the end,
410 /// NeverFallThroughOrReturn iff we never fall off the end of the statement or
411 /// return.  We assume NeverFallThrough iff we never fall off the end of the
412 /// statement but we may return.  We assume that functions not marked noreturn
413 /// will return.
414 static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
415   CFG *cfg = AC.getCFG();
416   if (!cfg) return UnknownFallThrough;
417 
418   // The CFG leaves in dead things, and we don't want the dead code paths to
419   // confuse us, so we mark all live things first.
420   llvm::BitVector live(cfg->getNumBlockIDs());
421   unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
422                                                           live);
423 
424   bool AddEHEdges = AC.getAddEHEdges();
425   if (!AddEHEdges && count != cfg->getNumBlockIDs())
426     // When there are things remaining dead, and we didn't add EH edges
427     // from CallExprs to the catch clauses, we have to go back and
428     // mark them as live.
429     for (const auto *B : *cfg) {
430       if (!live[B->getBlockID()]) {
431         if (B->pred_begin() == B->pred_end()) {
432           const Stmt *Term = B->getTerminatorStmt();
433           if (Term && isa<CXXTryStmt>(Term))
434             // When not adding EH edges from calls, catch clauses
435             // can otherwise seem dead.  Avoid noting them as dead.
436             count += reachable_code::ScanReachableFromBlock(B, live);
437           continue;
438         }
439       }
440     }
441 
442   // Now we know what is live, we check the live precessors of the exit block
443   // and look for fall through paths, being careful to ignore normal returns,
444   // and exceptional paths.
445   bool HasLiveReturn = false;
446   bool HasFakeEdge = false;
447   bool HasPlainEdge = false;
448   bool HasAbnormalEdge = false;
449 
450   // Ignore default cases that aren't likely to be reachable because all
451   // enums in a switch(X) have explicit case statements.
452   CFGBlock::FilterOptions FO;
453   FO.IgnoreDefaultsWithCoveredEnums = 1;
454 
455   for (CFGBlock::filtered_pred_iterator I =
456            cfg->getExit().filtered_pred_start_end(FO);
457        I.hasMore(); ++I) {
458     const CFGBlock &B = **I;
459     if (!live[B.getBlockID()])
460       continue;
461 
462     // Skip blocks which contain an element marked as no-return. They don't
463     // represent actually viable edges into the exit block, so mark them as
464     // abnormal.
465     if (B.hasNoReturnElement()) {
466       HasAbnormalEdge = true;
467       continue;
468     }
469 
470     // Destructors can appear after the 'return' in the CFG.  This is
471     // normal.  We need to look pass the destructors for the return
472     // statement (if it exists).
473     CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
474 
475     for ( ; ri != re ; ++ri)
476       if (ri->getAs<CFGStmt>())
477         break;
478 
479     // No more CFGElements in the block?
480     if (ri == re) {
481       const Stmt *Term = B.getTerminatorStmt();
482       if (Term && (isa<CXXTryStmt>(Term) || isa<ObjCAtTryStmt>(Term))) {
483         HasAbnormalEdge = true;
484         continue;
485       }
486       // A labeled empty statement, or the entry block...
487       HasPlainEdge = true;
488       continue;
489     }
490 
491     CFGStmt CS = ri->castAs<CFGStmt>();
492     const Stmt *S = CS.getStmt();
493     if (isa<ReturnStmt>(S) || isa<CoreturnStmt>(S)) {
494       HasLiveReturn = true;
495       continue;
496     }
497     if (isa<ObjCAtThrowStmt>(S)) {
498       HasFakeEdge = true;
499       continue;
500     }
501     if (isa<CXXThrowExpr>(S)) {
502       HasFakeEdge = true;
503       continue;
504     }
505     if (isa<MSAsmStmt>(S)) {
506       // TODO: Verify this is correct.
507       HasFakeEdge = true;
508       HasLiveReturn = true;
509       continue;
510     }
511     if (isa<CXXTryStmt>(S)) {
512       HasAbnormalEdge = true;
513       continue;
514     }
515     if (!llvm::is_contained(B.succs(), &cfg->getExit())) {
516       HasAbnormalEdge = true;
517       continue;
518     }
519 
520     HasPlainEdge = true;
521   }
522   if (!HasPlainEdge) {
523     if (HasLiveReturn)
524       return NeverFallThrough;
525     return NeverFallThroughOrReturn;
526   }
527   if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
528     return MaybeFallThrough;
529   // This says AlwaysFallThrough for calls to functions that are not marked
530   // noreturn, that don't return.  If people would like this warning to be more
531   // accurate, such functions should be marked as noreturn.
532   return AlwaysFallThrough;
533 }
534 
535 namespace {
536 
537 struct CheckFallThroughDiagnostics {
538   unsigned diag_MaybeFallThrough_HasNoReturn;
539   unsigned diag_MaybeFallThrough_ReturnsNonVoid;
540   unsigned diag_AlwaysFallThrough_HasNoReturn;
541   unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
542   unsigned diag_NeverFallThroughOrReturn;
543   enum { Function, Block, Lambda, Coroutine } funMode;
544   SourceLocation FuncLoc;
545 
546   static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
547     CheckFallThroughDiagnostics D;
548     D.FuncLoc = Func->getLocation();
549     D.diag_MaybeFallThrough_HasNoReturn =
550       diag::warn_falloff_noreturn_function;
551     D.diag_MaybeFallThrough_ReturnsNonVoid =
552       diag::warn_maybe_falloff_nonvoid_function;
553     D.diag_AlwaysFallThrough_HasNoReturn =
554       diag::warn_falloff_noreturn_function;
555     D.diag_AlwaysFallThrough_ReturnsNonVoid =
556       diag::warn_falloff_nonvoid_function;
557 
558     // Don't suggest that virtual functions be marked "noreturn", since they
559     // might be overridden by non-noreturn functions.
560     bool isVirtualMethod = false;
561     if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
562       isVirtualMethod = Method->isVirtual();
563 
564     // Don't suggest that template instantiations be marked "noreturn"
565     bool isTemplateInstantiation = false;
566     if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
567       isTemplateInstantiation = Function->isTemplateInstantiation();
568 
569     if (!isVirtualMethod && !isTemplateInstantiation)
570       D.diag_NeverFallThroughOrReturn =
571         diag::warn_suggest_noreturn_function;
572     else
573       D.diag_NeverFallThroughOrReturn = 0;
574 
575     D.funMode = Function;
576     return D;
577   }
578 
579   static CheckFallThroughDiagnostics MakeForCoroutine(const Decl *Func) {
580     CheckFallThroughDiagnostics D;
581     D.FuncLoc = Func->getLocation();
582     D.diag_MaybeFallThrough_HasNoReturn = 0;
583     D.diag_MaybeFallThrough_ReturnsNonVoid =
584         diag::warn_maybe_falloff_nonvoid_coroutine;
585     D.diag_AlwaysFallThrough_HasNoReturn = 0;
586     D.diag_AlwaysFallThrough_ReturnsNonVoid =
587         diag::warn_falloff_nonvoid_coroutine;
588     D.diag_NeverFallThroughOrReturn = 0;
589     D.funMode = Coroutine;
590     return D;
591   }
592 
593   static CheckFallThroughDiagnostics MakeForBlock() {
594     CheckFallThroughDiagnostics D;
595     D.diag_MaybeFallThrough_HasNoReturn =
596       diag::err_noreturn_block_has_return_expr;
597     D.diag_MaybeFallThrough_ReturnsNonVoid =
598       diag::err_maybe_falloff_nonvoid_block;
599     D.diag_AlwaysFallThrough_HasNoReturn =
600       diag::err_noreturn_block_has_return_expr;
601     D.diag_AlwaysFallThrough_ReturnsNonVoid =
602       diag::err_falloff_nonvoid_block;
603     D.diag_NeverFallThroughOrReturn = 0;
604     D.funMode = Block;
605     return D;
606   }
607 
608   static CheckFallThroughDiagnostics MakeForLambda() {
609     CheckFallThroughDiagnostics D;
610     D.diag_MaybeFallThrough_HasNoReturn =
611       diag::err_noreturn_lambda_has_return_expr;
612     D.diag_MaybeFallThrough_ReturnsNonVoid =
613       diag::warn_maybe_falloff_nonvoid_lambda;
614     D.diag_AlwaysFallThrough_HasNoReturn =
615       diag::err_noreturn_lambda_has_return_expr;
616     D.diag_AlwaysFallThrough_ReturnsNonVoid =
617       diag::warn_falloff_nonvoid_lambda;
618     D.diag_NeverFallThroughOrReturn = 0;
619     D.funMode = Lambda;
620     return D;
621   }
622 
623   bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
624                         bool HasNoReturn) const {
625     if (funMode == Function) {
626       return (ReturnsVoid ||
627               D.isIgnored(diag::warn_maybe_falloff_nonvoid_function,
628                           FuncLoc)) &&
629              (!HasNoReturn ||
630               D.isIgnored(diag::warn_noreturn_function_has_return_expr,
631                           FuncLoc)) &&
632              (!ReturnsVoid ||
633               D.isIgnored(diag::warn_suggest_noreturn_block, FuncLoc));
634     }
635     if (funMode == Coroutine) {
636       return (ReturnsVoid ||
637               D.isIgnored(diag::warn_maybe_falloff_nonvoid_function, FuncLoc) ||
638               D.isIgnored(diag::warn_maybe_falloff_nonvoid_coroutine,
639                           FuncLoc)) &&
640              (!HasNoReturn);
641     }
642     // For blocks / lambdas.
643     return ReturnsVoid && !HasNoReturn;
644   }
645 };
646 
647 } // anonymous namespace
648 
649 /// CheckFallThroughForBody - Check that we don't fall off the end of a
650 /// function that should return a value.  Check that we don't fall off the end
651 /// of a noreturn function.  We assume that functions and blocks not marked
652 /// noreturn will return.
653 static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
654                                     QualType BlockType,
655                                     const CheckFallThroughDiagnostics &CD,
656                                     AnalysisDeclContext &AC,
657                                     sema::FunctionScopeInfo *FSI) {
658 
659   bool ReturnsVoid = false;
660   bool HasNoReturn = false;
661   bool IsCoroutine = FSI->isCoroutine();
662 
663   if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
664     if (const auto *CBody = dyn_cast<CoroutineBodyStmt>(Body))
665       ReturnsVoid = CBody->getFallthroughHandler() != nullptr;
666     else
667       ReturnsVoid = FD->getReturnType()->isVoidType();
668     HasNoReturn = FD->isNoReturn();
669   }
670   else if (const auto *MD = dyn_cast<ObjCMethodDecl>(D)) {
671     ReturnsVoid = MD->getReturnType()->isVoidType();
672     HasNoReturn = MD->hasAttr<NoReturnAttr>();
673   }
674   else if (isa<BlockDecl>(D)) {
675     if (const FunctionType *FT =
676           BlockType->getPointeeType()->getAs<FunctionType>()) {
677       if (FT->getReturnType()->isVoidType())
678         ReturnsVoid = true;
679       if (FT->getNoReturnAttr())
680         HasNoReturn = true;
681     }
682   }
683 
684   DiagnosticsEngine &Diags = S.getDiagnostics();
685 
686   // Short circuit for compilation speed.
687   if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
688       return;
689   SourceLocation LBrace = Body->getBeginLoc(), RBrace = Body->getEndLoc();
690   auto EmitDiag = [&](SourceLocation Loc, unsigned DiagID) {
691     if (IsCoroutine)
692       S.Diag(Loc, DiagID) << FSI->CoroutinePromise->getType();
693     else
694       S.Diag(Loc, DiagID);
695   };
696 
697   // cpu_dispatch functions permit empty function bodies for ICC compatibility.
698   if (D->getAsFunction() && D->getAsFunction()->isCPUDispatchMultiVersion())
699     return;
700 
701   // Either in a function body compound statement, or a function-try-block.
702   switch (CheckFallThrough(AC)) {
703     case UnknownFallThrough:
704       break;
705 
706     case MaybeFallThrough:
707       if (HasNoReturn)
708         EmitDiag(RBrace, CD.diag_MaybeFallThrough_HasNoReturn);
709       else if (!ReturnsVoid)
710         EmitDiag(RBrace, CD.diag_MaybeFallThrough_ReturnsNonVoid);
711       break;
712     case AlwaysFallThrough:
713       if (HasNoReturn)
714         EmitDiag(RBrace, CD.diag_AlwaysFallThrough_HasNoReturn);
715       else if (!ReturnsVoid)
716         EmitDiag(RBrace, CD.diag_AlwaysFallThrough_ReturnsNonVoid);
717       break;
718     case NeverFallThroughOrReturn:
719       if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
720         if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
721           S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 0 << FD;
722         } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
723           S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 1 << MD;
724         } else {
725           S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn);
726         }
727       }
728       break;
729     case NeverFallThrough:
730       break;
731   }
732 }
733 
734 //===----------------------------------------------------------------------===//
735 // -Wuninitialized
736 //===----------------------------------------------------------------------===//
737 
738 namespace {
739 /// ContainsReference - A visitor class to search for references to
740 /// a particular declaration (the needle) within any evaluated component of an
741 /// expression (recursively).
742 class ContainsReference : public ConstEvaluatedExprVisitor<ContainsReference> {
743   bool FoundReference;
744   const DeclRefExpr *Needle;
745 
746 public:
747   typedef ConstEvaluatedExprVisitor<ContainsReference> Inherited;
748 
749   ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
750     : Inherited(Context), FoundReference(false), Needle(Needle) {}
751 
752   void VisitExpr(const Expr *E) {
753     // Stop evaluating if we already have a reference.
754     if (FoundReference)
755       return;
756 
757     Inherited::VisitExpr(E);
758   }
759 
760   void VisitDeclRefExpr(const DeclRefExpr *E) {
761     if (E == Needle)
762       FoundReference = true;
763     else
764       Inherited::VisitDeclRefExpr(E);
765   }
766 
767   bool doesContainReference() const { return FoundReference; }
768 };
769 } // anonymous namespace
770 
771 static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
772   QualType VariableTy = VD->getType().getCanonicalType();
773   if (VariableTy->isBlockPointerType() &&
774       !VD->hasAttr<BlocksAttr>()) {
775     S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization)
776         << VD->getDeclName()
777         << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
778     return true;
779   }
780 
781   // Don't issue a fixit if there is already an initializer.
782   if (VD->getInit())
783     return false;
784 
785   // Don't suggest a fixit inside macros.
786   if (VD->getEndLoc().isMacroID())
787     return false;
788 
789   SourceLocation Loc = S.getLocForEndOfToken(VD->getEndLoc());
790 
791   // Suggest possible initialization (if any).
792   std::string Init = S.getFixItZeroInitializerForType(VariableTy, Loc);
793   if (Init.empty())
794     return false;
795 
796   S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
797     << FixItHint::CreateInsertion(Loc, Init);
798   return true;
799 }
800 
801 /// Create a fixit to remove an if-like statement, on the assumption that its
802 /// condition is CondVal.
803 static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
804                           const Stmt *Else, bool CondVal,
805                           FixItHint &Fixit1, FixItHint &Fixit2) {
806   if (CondVal) {
807     // If condition is always true, remove all but the 'then'.
808     Fixit1 = FixItHint::CreateRemoval(
809         CharSourceRange::getCharRange(If->getBeginLoc(), Then->getBeginLoc()));
810     if (Else) {
811       SourceLocation ElseKwLoc = S.getLocForEndOfToken(Then->getEndLoc());
812       Fixit2 =
813           FixItHint::CreateRemoval(SourceRange(ElseKwLoc, Else->getEndLoc()));
814     }
815   } else {
816     // If condition is always false, remove all but the 'else'.
817     if (Else)
818       Fixit1 = FixItHint::CreateRemoval(CharSourceRange::getCharRange(
819           If->getBeginLoc(), Else->getBeginLoc()));
820     else
821       Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
822   }
823 }
824 
825 /// DiagUninitUse -- Helper function to produce a diagnostic for an
826 /// uninitialized use of a variable.
827 static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
828                           bool IsCapturedByBlock) {
829   bool Diagnosed = false;
830 
831   switch (Use.getKind()) {
832   case UninitUse::Always:
833     S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_var)
834         << VD->getDeclName() << IsCapturedByBlock
835         << Use.getUser()->getSourceRange();
836     return;
837 
838   case UninitUse::AfterDecl:
839   case UninitUse::AfterCall:
840     S.Diag(VD->getLocation(), diag::warn_sometimes_uninit_var)
841       << VD->getDeclName() << IsCapturedByBlock
842       << (Use.getKind() == UninitUse::AfterDecl ? 4 : 5)
843       << const_cast<DeclContext*>(VD->getLexicalDeclContext())
844       << VD->getSourceRange();
845     S.Diag(Use.getUser()->getBeginLoc(), diag::note_uninit_var_use)
846         << IsCapturedByBlock << Use.getUser()->getSourceRange();
847     return;
848 
849   case UninitUse::Maybe:
850   case UninitUse::Sometimes:
851     // Carry on to report sometimes-uninitialized branches, if possible,
852     // or a 'may be used uninitialized' diagnostic otherwise.
853     break;
854   }
855 
856   // Diagnose each branch which leads to a sometimes-uninitialized use.
857   for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
858        I != E; ++I) {
859     assert(Use.getKind() == UninitUse::Sometimes);
860 
861     const Expr *User = Use.getUser();
862     const Stmt *Term = I->Terminator;
863 
864     // Information used when building the diagnostic.
865     unsigned DiagKind;
866     StringRef Str;
867     SourceRange Range;
868 
869     // FixIts to suppress the diagnostic by removing the dead condition.
870     // For all binary terminators, branch 0 is taken if the condition is true,
871     // and branch 1 is taken if the condition is false.
872     int RemoveDiagKind = -1;
873     const char *FixitStr =
874         S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
875                                   : (I->Output ? "1" : "0");
876     FixItHint Fixit1, Fixit2;
877 
878     switch (Term ? Term->getStmtClass() : Stmt::DeclStmtClass) {
879     default:
880       // Don't know how to report this. Just fall back to 'may be used
881       // uninitialized'. FIXME: Can this happen?
882       continue;
883 
884     // "condition is true / condition is false".
885     case Stmt::IfStmtClass: {
886       const IfStmt *IS = cast<IfStmt>(Term);
887       DiagKind = 0;
888       Str = "if";
889       Range = IS->getCond()->getSourceRange();
890       RemoveDiagKind = 0;
891       CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
892                     I->Output, Fixit1, Fixit2);
893       break;
894     }
895     case Stmt::ConditionalOperatorClass: {
896       const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
897       DiagKind = 0;
898       Str = "?:";
899       Range = CO->getCond()->getSourceRange();
900       RemoveDiagKind = 0;
901       CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
902                     I->Output, Fixit1, Fixit2);
903       break;
904     }
905     case Stmt::BinaryOperatorClass: {
906       const BinaryOperator *BO = cast<BinaryOperator>(Term);
907       if (!BO->isLogicalOp())
908         continue;
909       DiagKind = 0;
910       Str = BO->getOpcodeStr();
911       Range = BO->getLHS()->getSourceRange();
912       RemoveDiagKind = 0;
913       if ((BO->getOpcode() == BO_LAnd && I->Output) ||
914           (BO->getOpcode() == BO_LOr && !I->Output))
915         // true && y -> y, false || y -> y.
916         Fixit1 = FixItHint::CreateRemoval(
917             SourceRange(BO->getBeginLoc(), BO->getOperatorLoc()));
918       else
919         // false && y -> false, true || y -> true.
920         Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
921       break;
922     }
923 
924     // "loop is entered / loop is exited".
925     case Stmt::WhileStmtClass:
926       DiagKind = 1;
927       Str = "while";
928       Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
929       RemoveDiagKind = 1;
930       Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
931       break;
932     case Stmt::ForStmtClass:
933       DiagKind = 1;
934       Str = "for";
935       Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
936       RemoveDiagKind = 1;
937       if (I->Output)
938         Fixit1 = FixItHint::CreateRemoval(Range);
939       else
940         Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
941       break;
942     case Stmt::CXXForRangeStmtClass:
943       if (I->Output == 1) {
944         // The use occurs if a range-based for loop's body never executes.
945         // That may be impossible, and there's no syntactic fix for this,
946         // so treat it as a 'may be uninitialized' case.
947         continue;
948       }
949       DiagKind = 1;
950       Str = "for";
951       Range = cast<CXXForRangeStmt>(Term)->getRangeInit()->getSourceRange();
952       break;
953 
954     // "condition is true / loop is exited".
955     case Stmt::DoStmtClass:
956       DiagKind = 2;
957       Str = "do";
958       Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
959       RemoveDiagKind = 1;
960       Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
961       break;
962 
963     // "switch case is taken".
964     case Stmt::CaseStmtClass:
965       DiagKind = 3;
966       Str = "case";
967       Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
968       break;
969     case Stmt::DefaultStmtClass:
970       DiagKind = 3;
971       Str = "default";
972       Range = cast<DefaultStmt>(Term)->getDefaultLoc();
973       break;
974     }
975 
976     S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
977       << VD->getDeclName() << IsCapturedByBlock << DiagKind
978       << Str << I->Output << Range;
979     S.Diag(User->getBeginLoc(), diag::note_uninit_var_use)
980         << IsCapturedByBlock << User->getSourceRange();
981     if (RemoveDiagKind != -1)
982       S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
983         << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
984 
985     Diagnosed = true;
986   }
987 
988   if (!Diagnosed)
989     S.Diag(Use.getUser()->getBeginLoc(), diag::warn_maybe_uninit_var)
990         << VD->getDeclName() << IsCapturedByBlock
991         << Use.getUser()->getSourceRange();
992 }
993 
994 /// Diagnose uninitialized const reference usages.
995 static bool DiagnoseUninitializedConstRefUse(Sema &S, const VarDecl *VD,
996                                              const UninitUse &Use) {
997   S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_const_reference)
998       << VD->getDeclName() << Use.getUser()->getSourceRange();
999   return true;
1000 }
1001 
1002 /// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
1003 /// uninitialized variable. This manages the different forms of diagnostic
1004 /// emitted for particular types of uses. Returns true if the use was diagnosed
1005 /// as a warning. If a particular use is one we omit warnings for, returns
1006 /// false.
1007 static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
1008                                      const UninitUse &Use,
1009                                      bool alwaysReportSelfInit = false) {
1010   if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
1011     // Inspect the initializer of the variable declaration which is
1012     // being referenced prior to its initialization. We emit
1013     // specialized diagnostics for self-initialization, and we
1014     // specifically avoid warning about self references which take the
1015     // form of:
1016     //
1017     //   int x = x;
1018     //
1019     // This is used to indicate to GCC that 'x' is intentionally left
1020     // uninitialized. Proven code paths which access 'x' in
1021     // an uninitialized state after this will still warn.
1022     if (const Expr *Initializer = VD->getInit()) {
1023       if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
1024         return false;
1025 
1026       ContainsReference CR(S.Context, DRE);
1027       CR.Visit(Initializer);
1028       if (CR.doesContainReference()) {
1029         S.Diag(DRE->getBeginLoc(), diag::warn_uninit_self_reference_in_init)
1030             << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
1031         return true;
1032       }
1033     }
1034 
1035     DiagUninitUse(S, VD, Use, false);
1036   } else {
1037     const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
1038     if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
1039       S.Diag(BE->getBeginLoc(),
1040              diag::warn_uninit_byref_blockvar_captured_by_block)
1041           << VD->getDeclName()
1042           << VD->getType().getQualifiers().hasObjCLifetime();
1043     else
1044       DiagUninitUse(S, VD, Use, true);
1045   }
1046 
1047   // Report where the variable was declared when the use wasn't within
1048   // the initializer of that declaration & we didn't already suggest
1049   // an initialization fixit.
1050   if (!SuggestInitializationFixit(S, VD))
1051     S.Diag(VD->getBeginLoc(), diag::note_var_declared_here)
1052         << VD->getDeclName();
1053 
1054   return true;
1055 }
1056 
1057 namespace {
1058   class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
1059   public:
1060     FallthroughMapper(Sema &S)
1061       : FoundSwitchStatements(false),
1062         S(S) {
1063     }
1064 
1065     bool foundSwitchStatements() const { return FoundSwitchStatements; }
1066 
1067     void markFallthroughVisited(const AttributedStmt *Stmt) {
1068       bool Found = FallthroughStmts.erase(Stmt);
1069       assert(Found);
1070       (void)Found;
1071     }
1072 
1073     typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
1074 
1075     const AttrStmts &getFallthroughStmts() const {
1076       return FallthroughStmts;
1077     }
1078 
1079     void fillReachableBlocks(CFG *Cfg) {
1080       assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
1081       std::deque<const CFGBlock *> BlockQueue;
1082 
1083       ReachableBlocks.insert(&Cfg->getEntry());
1084       BlockQueue.push_back(&Cfg->getEntry());
1085       // Mark all case blocks reachable to avoid problems with switching on
1086       // constants, covered enums, etc.
1087       // These blocks can contain fall-through annotations, and we don't want to
1088       // issue a warn_fallthrough_attr_unreachable for them.
1089       for (const auto *B : *Cfg) {
1090         const Stmt *L = B->getLabel();
1091         if (L && isa<SwitchCase>(L) && ReachableBlocks.insert(B).second)
1092           BlockQueue.push_back(B);
1093       }
1094 
1095       while (!BlockQueue.empty()) {
1096         const CFGBlock *P = BlockQueue.front();
1097         BlockQueue.pop_front();
1098         for (const CFGBlock *B : P->succs()) {
1099           if (B && ReachableBlocks.insert(B).second)
1100             BlockQueue.push_back(B);
1101         }
1102       }
1103     }
1104 
1105     bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt,
1106                                    bool IsTemplateInstantiation) {
1107       assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
1108 
1109       int UnannotatedCnt = 0;
1110       AnnotatedCnt = 0;
1111 
1112       std::deque<const CFGBlock*> BlockQueue(B.pred_begin(), B.pred_end());
1113       while (!BlockQueue.empty()) {
1114         const CFGBlock *P = BlockQueue.front();
1115         BlockQueue.pop_front();
1116         if (!P) continue;
1117 
1118         const Stmt *Term = P->getTerminatorStmt();
1119         if (Term && isa<SwitchStmt>(Term))
1120           continue; // Switch statement, good.
1121 
1122         const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
1123         if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
1124           continue; // Previous case label has no statements, good.
1125 
1126         const LabelStmt *L = dyn_cast_or_null<LabelStmt>(P->getLabel());
1127         if (L && L->getSubStmt() == B.getLabel() && P->begin() == P->end())
1128           continue; // Case label is preceded with a normal label, good.
1129 
1130         if (!ReachableBlocks.count(P)) {
1131           for (const CFGElement &Elem : llvm::reverse(*P)) {
1132             if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>()) {
1133             if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
1134               // Don't issue a warning for an unreachable fallthrough
1135               // attribute in template instantiations as it may not be
1136               // unreachable in all instantiations of the template.
1137               if (!IsTemplateInstantiation)
1138                 S.Diag(AS->getBeginLoc(),
1139                        diag::warn_unreachable_fallthrough_attr);
1140               markFallthroughVisited(AS);
1141               ++AnnotatedCnt;
1142               break;
1143             }
1144             // Don't care about other unreachable statements.
1145             }
1146           }
1147           // If there are no unreachable statements, this may be a special
1148           // case in CFG:
1149           // case X: {
1150           //    A a;  // A has a destructor.
1151           //    break;
1152           // }
1153           // // <<<< This place is represented by a 'hanging' CFG block.
1154           // case Y:
1155           continue;
1156         }
1157 
1158         const Stmt *LastStmt = getLastStmt(*P);
1159         if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
1160           markFallthroughVisited(AS);
1161           ++AnnotatedCnt;
1162           continue; // Fallthrough annotation, good.
1163         }
1164 
1165         if (!LastStmt) { // This block contains no executable statements.
1166           // Traverse its predecessors.
1167           std::copy(P->pred_begin(), P->pred_end(),
1168                     std::back_inserter(BlockQueue));
1169           continue;
1170         }
1171 
1172         ++UnannotatedCnt;
1173       }
1174       return !!UnannotatedCnt;
1175     }
1176 
1177     // RecursiveASTVisitor setup.
1178     bool shouldWalkTypesOfTypeLocs() const { return false; }
1179 
1180     bool VisitAttributedStmt(AttributedStmt *S) {
1181       if (asFallThroughAttr(S))
1182         FallthroughStmts.insert(S);
1183       return true;
1184     }
1185 
1186     bool VisitSwitchStmt(SwitchStmt *S) {
1187       FoundSwitchStatements = true;
1188       return true;
1189     }
1190 
1191     // We don't want to traverse local type declarations. We analyze their
1192     // methods separately.
1193     bool TraverseDecl(Decl *D) { return true; }
1194 
1195     // We analyze lambda bodies separately. Skip them here.
1196     bool TraverseLambdaExpr(LambdaExpr *LE) {
1197       // Traverse the captures, but not the body.
1198       for (const auto C : zip(LE->captures(), LE->capture_inits()))
1199         TraverseLambdaCapture(LE, &std::get<0>(C), std::get<1>(C));
1200       return true;
1201     }
1202 
1203   private:
1204 
1205     static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
1206       if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
1207         if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
1208           return AS;
1209       }
1210       return nullptr;
1211     }
1212 
1213     static const Stmt *getLastStmt(const CFGBlock &B) {
1214       if (const Stmt *Term = B.getTerminatorStmt())
1215         return Term;
1216       for (const CFGElement &Elem : llvm::reverse(B))
1217         if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>())
1218           return CS->getStmt();
1219       // Workaround to detect a statement thrown out by CFGBuilder:
1220       //   case X: {} case Y:
1221       //   case X: ; case Y:
1222       if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
1223         if (!isa<SwitchCase>(SW->getSubStmt()))
1224           return SW->getSubStmt();
1225 
1226       return nullptr;
1227     }
1228 
1229     bool FoundSwitchStatements;
1230     AttrStmts FallthroughStmts;
1231     Sema &S;
1232     llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
1233   };
1234 } // anonymous namespace
1235 
1236 static StringRef getFallthroughAttrSpelling(Preprocessor &PP,
1237                                             SourceLocation Loc) {
1238   TokenValue FallthroughTokens[] = {
1239     tok::l_square, tok::l_square,
1240     PP.getIdentifierInfo("fallthrough"),
1241     tok::r_square, tok::r_square
1242   };
1243 
1244   TokenValue ClangFallthroughTokens[] = {
1245     tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"),
1246     tok::coloncolon, PP.getIdentifierInfo("fallthrough"),
1247     tok::r_square, tok::r_square
1248   };
1249 
1250   bool PreferClangAttr = !PP.getLangOpts().CPlusPlus17 && !PP.getLangOpts().C2x;
1251 
1252   StringRef MacroName;
1253   if (PreferClangAttr)
1254     MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1255   if (MacroName.empty())
1256     MacroName = PP.getLastMacroWithSpelling(Loc, FallthroughTokens);
1257   if (MacroName.empty() && !PreferClangAttr)
1258     MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1259   if (MacroName.empty()) {
1260     if (!PreferClangAttr)
1261       MacroName = "[[fallthrough]]";
1262     else if (PP.getLangOpts().CPlusPlus)
1263       MacroName = "[[clang::fallthrough]]";
1264     else
1265       MacroName = "__attribute__((fallthrough))";
1266   }
1267   return MacroName;
1268 }
1269 
1270 static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
1271                                             bool PerFunction) {
1272   FallthroughMapper FM(S);
1273   FM.TraverseStmt(AC.getBody());
1274 
1275   if (!FM.foundSwitchStatements())
1276     return;
1277 
1278   if (PerFunction && FM.getFallthroughStmts().empty())
1279     return;
1280 
1281   CFG *Cfg = AC.getCFG();
1282 
1283   if (!Cfg)
1284     return;
1285 
1286   FM.fillReachableBlocks(Cfg);
1287 
1288   for (const CFGBlock *B : llvm::reverse(*Cfg)) {
1289     const Stmt *Label = B->getLabel();
1290 
1291     if (!isa_and_nonnull<SwitchCase>(Label))
1292       continue;
1293 
1294     int AnnotatedCnt;
1295 
1296     bool IsTemplateInstantiation = false;
1297     if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(AC.getDecl()))
1298       IsTemplateInstantiation = Function->isTemplateInstantiation();
1299     if (!FM.checkFallThroughIntoBlock(*B, AnnotatedCnt,
1300                                       IsTemplateInstantiation))
1301       continue;
1302 
1303     S.Diag(Label->getBeginLoc(),
1304            PerFunction ? diag::warn_unannotated_fallthrough_per_function
1305                        : diag::warn_unannotated_fallthrough);
1306 
1307     if (!AnnotatedCnt) {
1308       SourceLocation L = Label->getBeginLoc();
1309       if (L.isMacroID())
1310         continue;
1311 
1312       const Stmt *Term = B->getTerminatorStmt();
1313       // Skip empty cases.
1314       while (B->empty() && !Term && B->succ_size() == 1) {
1315         B = *B->succ_begin();
1316         Term = B->getTerminatorStmt();
1317       }
1318       if (!(B->empty() && Term && isa<BreakStmt>(Term))) {
1319         Preprocessor &PP = S.getPreprocessor();
1320         StringRef AnnotationSpelling = getFallthroughAttrSpelling(PP, L);
1321         SmallString<64> TextToInsert(AnnotationSpelling);
1322         TextToInsert += "; ";
1323         S.Diag(L, diag::note_insert_fallthrough_fixit)
1324             << AnnotationSpelling
1325             << FixItHint::CreateInsertion(L, TextToInsert);
1326       }
1327       S.Diag(L, diag::note_insert_break_fixit)
1328           << FixItHint::CreateInsertion(L, "break; ");
1329     }
1330   }
1331 
1332   for (const auto *F : FM.getFallthroughStmts())
1333     S.Diag(F->getBeginLoc(), diag::err_fallthrough_attr_invalid_placement);
1334 }
1335 
1336 static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
1337                      const Stmt *S) {
1338   assert(S);
1339 
1340   do {
1341     switch (S->getStmtClass()) {
1342     case Stmt::ForStmtClass:
1343     case Stmt::WhileStmtClass:
1344     case Stmt::CXXForRangeStmtClass:
1345     case Stmt::ObjCForCollectionStmtClass:
1346       return true;
1347     case Stmt::DoStmtClass: {
1348       Expr::EvalResult Result;
1349       if (!cast<DoStmt>(S)->getCond()->EvaluateAsInt(Result, Ctx))
1350         return true;
1351       return Result.Val.getInt().getBoolValue();
1352     }
1353     default:
1354       break;
1355     }
1356   } while ((S = PM.getParent(S)));
1357 
1358   return false;
1359 }
1360 
1361 static void diagnoseRepeatedUseOfWeak(Sema &S,
1362                                       const sema::FunctionScopeInfo *CurFn,
1363                                       const Decl *D,
1364                                       const ParentMap &PM) {
1365   typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
1366   typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
1367   typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
1368   typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
1369   StmtUsesPair;
1370 
1371   ASTContext &Ctx = S.getASTContext();
1372 
1373   const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
1374 
1375   // Extract all weak objects that are referenced more than once.
1376   SmallVector<StmtUsesPair, 8> UsesByStmt;
1377   for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
1378        I != E; ++I) {
1379     const WeakUseVector &Uses = I->second;
1380 
1381     // Find the first read of the weak object.
1382     WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
1383     for ( ; UI != UE; ++UI) {
1384       if (UI->isUnsafe())
1385         break;
1386     }
1387 
1388     // If there were only writes to this object, don't warn.
1389     if (UI == UE)
1390       continue;
1391 
1392     // If there was only one read, followed by any number of writes, and the
1393     // read is not within a loop, don't warn. Additionally, don't warn in a
1394     // loop if the base object is a local variable -- local variables are often
1395     // changed in loops.
1396     if (UI == Uses.begin()) {
1397       WeakUseVector::const_iterator UI2 = UI;
1398       for (++UI2; UI2 != UE; ++UI2)
1399         if (UI2->isUnsafe())
1400           break;
1401 
1402       if (UI2 == UE) {
1403         if (!isInLoop(Ctx, PM, UI->getUseExpr()))
1404           continue;
1405 
1406         const WeakObjectProfileTy &Profile = I->first;
1407         if (!Profile.isExactProfile())
1408           continue;
1409 
1410         const NamedDecl *Base = Profile.getBase();
1411         if (!Base)
1412           Base = Profile.getProperty();
1413         assert(Base && "A profile always has a base or property.");
1414 
1415         if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base))
1416           if (BaseVar->hasLocalStorage() && !isa<ParmVarDecl>(Base))
1417             continue;
1418       }
1419     }
1420 
1421     UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I));
1422   }
1423 
1424   if (UsesByStmt.empty())
1425     return;
1426 
1427   // Sort by first use so that we emit the warnings in a deterministic order.
1428   SourceManager &SM = S.getSourceManager();
1429   llvm::sort(UsesByStmt,
1430              [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
1431                return SM.isBeforeInTranslationUnit(LHS.first->getBeginLoc(),
1432                                                    RHS.first->getBeginLoc());
1433              });
1434 
1435   // Classify the current code body for better warning text.
1436   // This enum should stay in sync with the cases in
1437   // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1438   // FIXME: Should we use a common classification enum and the same set of
1439   // possibilities all throughout Sema?
1440   enum {
1441     Function,
1442     Method,
1443     Block,
1444     Lambda
1445   } FunctionKind;
1446 
1447   if (isa<sema::BlockScopeInfo>(CurFn))
1448     FunctionKind = Block;
1449   else if (isa<sema::LambdaScopeInfo>(CurFn))
1450     FunctionKind = Lambda;
1451   else if (isa<ObjCMethodDecl>(D))
1452     FunctionKind = Method;
1453   else
1454     FunctionKind = Function;
1455 
1456   // Iterate through the sorted problems and emit warnings for each.
1457   for (const auto &P : UsesByStmt) {
1458     const Stmt *FirstRead = P.first;
1459     const WeakObjectProfileTy &Key = P.second->first;
1460     const WeakUseVector &Uses = P.second->second;
1461 
1462     // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1463     // may not contain enough information to determine that these are different
1464     // properties. We can only be 100% sure of a repeated use in certain cases,
1465     // and we adjust the diagnostic kind accordingly so that the less certain
1466     // case can be turned off if it is too noisy.
1467     unsigned DiagKind;
1468     if (Key.isExactProfile())
1469       DiagKind = diag::warn_arc_repeated_use_of_weak;
1470     else
1471       DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
1472 
1473     // Classify the weak object being accessed for better warning text.
1474     // This enum should stay in sync with the cases in
1475     // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1476     enum {
1477       Variable,
1478       Property,
1479       ImplicitProperty,
1480       Ivar
1481     } ObjectKind;
1482 
1483     const NamedDecl *KeyProp = Key.getProperty();
1484     if (isa<VarDecl>(KeyProp))
1485       ObjectKind = Variable;
1486     else if (isa<ObjCPropertyDecl>(KeyProp))
1487       ObjectKind = Property;
1488     else if (isa<ObjCMethodDecl>(KeyProp))
1489       ObjectKind = ImplicitProperty;
1490     else if (isa<ObjCIvarDecl>(KeyProp))
1491       ObjectKind = Ivar;
1492     else
1493       llvm_unreachable("Unexpected weak object kind!");
1494 
1495     // Do not warn about IBOutlet weak property receivers being set to null
1496     // since they are typically only used from the main thread.
1497     if (const ObjCPropertyDecl *Prop = dyn_cast<ObjCPropertyDecl>(KeyProp))
1498       if (Prop->hasAttr<IBOutletAttr>())
1499         continue;
1500 
1501     // Show the first time the object was read.
1502     S.Diag(FirstRead->getBeginLoc(), DiagKind)
1503         << int(ObjectKind) << KeyProp << int(FunctionKind)
1504         << FirstRead->getSourceRange();
1505 
1506     // Print all the other accesses as notes.
1507     for (const auto &Use : Uses) {
1508       if (Use.getUseExpr() == FirstRead)
1509         continue;
1510       S.Diag(Use.getUseExpr()->getBeginLoc(),
1511              diag::note_arc_weak_also_accessed_here)
1512           << Use.getUseExpr()->getSourceRange();
1513     }
1514   }
1515 }
1516 
1517 namespace clang {
1518 namespace {
1519 typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
1520 typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
1521 typedef std::list<DelayedDiag> DiagList;
1522 
1523 struct SortDiagBySourceLocation {
1524   SourceManager &SM;
1525   SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
1526 
1527   bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
1528     // Although this call will be slow, this is only called when outputting
1529     // multiple warnings.
1530     return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
1531   }
1532 };
1533 } // anonymous namespace
1534 } // namespace clang
1535 
1536 namespace {
1537 class UninitValsDiagReporter : public UninitVariablesHandler {
1538   Sema &S;
1539   typedef SmallVector<UninitUse, 2> UsesVec;
1540   typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType;
1541   // Prefer using MapVector to DenseMap, so that iteration order will be
1542   // the same as insertion order. This is needed to obtain a deterministic
1543   // order of diagnostics when calling flushDiagnostics().
1544   typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
1545   UsesMap uses;
1546   UsesMap constRefUses;
1547 
1548 public:
1549   UninitValsDiagReporter(Sema &S) : S(S) {}
1550   ~UninitValsDiagReporter() override { flushDiagnostics(); }
1551 
1552   MappedType &getUses(UsesMap &um, const VarDecl *vd) {
1553     MappedType &V = um[vd];
1554     if (!V.getPointer())
1555       V.setPointer(new UsesVec());
1556     return V;
1557   }
1558 
1559   void handleUseOfUninitVariable(const VarDecl *vd,
1560                                  const UninitUse &use) override {
1561     getUses(uses, vd).getPointer()->push_back(use);
1562   }
1563 
1564   void handleConstRefUseOfUninitVariable(const VarDecl *vd,
1565                                          const UninitUse &use) override {
1566     getUses(constRefUses, vd).getPointer()->push_back(use);
1567   }
1568 
1569   void handleSelfInit(const VarDecl *vd) override {
1570     getUses(uses, vd).setInt(true);
1571     getUses(constRefUses, vd).setInt(true);
1572   }
1573 
1574   void flushDiagnostics() {
1575     for (const auto &P : uses) {
1576       const VarDecl *vd = P.first;
1577       const MappedType &V = P.second;
1578 
1579       UsesVec *vec = V.getPointer();
1580       bool hasSelfInit = V.getInt();
1581 
1582       // Specially handle the case where we have uses of an uninitialized
1583       // variable, but the root cause is an idiomatic self-init.  We want
1584       // to report the diagnostic at the self-init since that is the root cause.
1585       if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
1586         DiagnoseUninitializedUse(S, vd,
1587                                  UninitUse(vd->getInit()->IgnoreParenCasts(),
1588                                            /* isAlwaysUninit */ true),
1589                                  /* alwaysReportSelfInit */ true);
1590       else {
1591         // Sort the uses by their SourceLocations.  While not strictly
1592         // guaranteed to produce them in line/column order, this will provide
1593         // a stable ordering.
1594         llvm::sort(*vec, [](const UninitUse &a, const UninitUse &b) {
1595           // Prefer a more confident report over a less confident one.
1596           if (a.getKind() != b.getKind())
1597             return a.getKind() > b.getKind();
1598           return a.getUser()->getBeginLoc() < b.getUser()->getBeginLoc();
1599         });
1600 
1601         for (const auto &U : *vec) {
1602           // If we have self-init, downgrade all uses to 'may be uninitialized'.
1603           UninitUse Use = hasSelfInit ? UninitUse(U.getUser(), false) : U;
1604 
1605           if (DiagnoseUninitializedUse(S, vd, Use))
1606             // Skip further diagnostics for this variable. We try to warn only
1607             // on the first point at which a variable is used uninitialized.
1608             break;
1609         }
1610       }
1611 
1612       // Release the uses vector.
1613       delete vec;
1614     }
1615 
1616     uses.clear();
1617 
1618     // Flush all const reference uses diags.
1619     for (const auto &P : constRefUses) {
1620       const VarDecl *vd = P.first;
1621       const MappedType &V = P.second;
1622 
1623       UsesVec *vec = V.getPointer();
1624       bool hasSelfInit = V.getInt();
1625 
1626       if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
1627         DiagnoseUninitializedUse(S, vd,
1628                                  UninitUse(vd->getInit()->IgnoreParenCasts(),
1629                                            /* isAlwaysUninit */ true),
1630                                  /* alwaysReportSelfInit */ true);
1631       else {
1632         for (const auto &U : *vec) {
1633           if (DiagnoseUninitializedConstRefUse(S, vd, U))
1634             break;
1635         }
1636       }
1637 
1638       // Release the uses vector.
1639       delete vec;
1640     }
1641 
1642     constRefUses.clear();
1643   }
1644 
1645 private:
1646   static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
1647     return llvm::any_of(*vec, [](const UninitUse &U) {
1648       return U.getKind() == UninitUse::Always ||
1649              U.getKind() == UninitUse::AfterCall ||
1650              U.getKind() == UninitUse::AfterDecl;
1651     });
1652   }
1653 };
1654 
1655 /// Inter-procedural data for the called-once checker.
1656 class CalledOnceInterProceduralData {
1657 public:
1658   // Add the delayed warning for the given block.
1659   void addDelayedWarning(const BlockDecl *Block,
1660                          PartialDiagnosticAt &&Warning) {
1661     DelayedBlockWarnings[Block].emplace_back(std::move(Warning));
1662   }
1663   // Report all of the warnings we've gathered for the given block.
1664   void flushWarnings(const BlockDecl *Block, Sema &S) {
1665     for (const PartialDiagnosticAt &Delayed : DelayedBlockWarnings[Block])
1666       S.Diag(Delayed.first, Delayed.second);
1667 
1668     discardWarnings(Block);
1669   }
1670   // Discard all of the warnings we've gathered for the given block.
1671   void discardWarnings(const BlockDecl *Block) {
1672     DelayedBlockWarnings.erase(Block);
1673   }
1674 
1675 private:
1676   using DelayedDiagnostics = SmallVector<PartialDiagnosticAt, 2>;
1677   llvm::DenseMap<const BlockDecl *, DelayedDiagnostics> DelayedBlockWarnings;
1678 };
1679 
1680 class CalledOnceCheckReporter : public CalledOnceCheckHandler {
1681 public:
1682   CalledOnceCheckReporter(Sema &S, CalledOnceInterProceduralData &Data)
1683       : S(S), Data(Data) {}
1684   void handleDoubleCall(const ParmVarDecl *Parameter, const Expr *Call,
1685                         const Expr *PrevCall, bool IsCompletionHandler,
1686                         bool Poised) override {
1687     auto DiagToReport = IsCompletionHandler
1688                             ? diag::warn_completion_handler_called_twice
1689                             : diag::warn_called_once_gets_called_twice;
1690     S.Diag(Call->getBeginLoc(), DiagToReport) << Parameter;
1691     S.Diag(PrevCall->getBeginLoc(), diag::note_called_once_gets_called_twice)
1692         << Poised;
1693   }
1694 
1695   void handleNeverCalled(const ParmVarDecl *Parameter,
1696                          bool IsCompletionHandler) override {
1697     auto DiagToReport = IsCompletionHandler
1698                             ? diag::warn_completion_handler_never_called
1699                             : diag::warn_called_once_never_called;
1700     S.Diag(Parameter->getBeginLoc(), DiagToReport)
1701         << Parameter << /* Captured */ false;
1702   }
1703 
1704   void handleNeverCalled(const ParmVarDecl *Parameter, const Decl *Function,
1705                          const Stmt *Where, NeverCalledReason Reason,
1706                          bool IsCalledDirectly,
1707                          bool IsCompletionHandler) override {
1708     auto DiagToReport = IsCompletionHandler
1709                             ? diag::warn_completion_handler_never_called_when
1710                             : diag::warn_called_once_never_called_when;
1711     PartialDiagnosticAt Warning(Where->getBeginLoc(), S.PDiag(DiagToReport)
1712                                                           << Parameter
1713                                                           << IsCalledDirectly
1714                                                           << (unsigned)Reason);
1715 
1716     if (const auto *Block = dyn_cast<BlockDecl>(Function)) {
1717       // We shouldn't report these warnings on blocks immediately
1718       Data.addDelayedWarning(Block, std::move(Warning));
1719     } else {
1720       S.Diag(Warning.first, Warning.second);
1721     }
1722   }
1723 
1724   void handleCapturedNeverCalled(const ParmVarDecl *Parameter,
1725                                  const Decl *Where,
1726                                  bool IsCompletionHandler) override {
1727     auto DiagToReport = IsCompletionHandler
1728                             ? diag::warn_completion_handler_never_called
1729                             : diag::warn_called_once_never_called;
1730     S.Diag(Where->getBeginLoc(), DiagToReport)
1731         << Parameter << /* Captured */ true;
1732   }
1733 
1734   void
1735   handleBlockThatIsGuaranteedToBeCalledOnce(const BlockDecl *Block) override {
1736     Data.flushWarnings(Block, S);
1737   }
1738 
1739   void handleBlockWithNoGuarantees(const BlockDecl *Block) override {
1740     Data.discardWarnings(Block);
1741   }
1742 
1743 private:
1744   Sema &S;
1745   CalledOnceInterProceduralData &Data;
1746 };
1747 
1748 constexpr unsigned CalledOnceWarnings[] = {
1749     diag::warn_called_once_never_called,
1750     diag::warn_called_once_never_called_when,
1751     diag::warn_called_once_gets_called_twice};
1752 
1753 constexpr unsigned CompletionHandlerWarnings[]{
1754     diag::warn_completion_handler_never_called,
1755     diag::warn_completion_handler_never_called_when,
1756     diag::warn_completion_handler_called_twice};
1757 
1758 bool shouldAnalyzeCalledOnceImpl(llvm::ArrayRef<unsigned> DiagIDs,
1759                                  const DiagnosticsEngine &Diags,
1760                                  SourceLocation At) {
1761   return llvm::any_of(DiagIDs, [&Diags, At](unsigned DiagID) {
1762     return !Diags.isIgnored(DiagID, At);
1763   });
1764 }
1765 
1766 bool shouldAnalyzeCalledOnceConventions(const DiagnosticsEngine &Diags,
1767                                         SourceLocation At) {
1768   return shouldAnalyzeCalledOnceImpl(CompletionHandlerWarnings, Diags, At);
1769 }
1770 
1771 bool shouldAnalyzeCalledOnceParameters(const DiagnosticsEngine &Diags,
1772                                        SourceLocation At) {
1773   return shouldAnalyzeCalledOnceImpl(CalledOnceWarnings, Diags, At) ||
1774          shouldAnalyzeCalledOnceConventions(Diags, At);
1775 }
1776 } // anonymous namespace
1777 
1778 //===----------------------------------------------------------------------===//
1779 // -Wthread-safety
1780 //===----------------------------------------------------------------------===//
1781 namespace clang {
1782 namespace threadSafety {
1783 namespace {
1784 class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler {
1785   Sema &S;
1786   DiagList Warnings;
1787   SourceLocation FunLocation, FunEndLocation;
1788 
1789   const FunctionDecl *CurrentFunction;
1790   bool Verbose;
1791 
1792   OptionalNotes getNotes() const {
1793     if (Verbose && CurrentFunction) {
1794       PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1795                                 S.PDiag(diag::note_thread_warning_in_fun)
1796                                     << CurrentFunction);
1797       return OptionalNotes(1, FNote);
1798     }
1799     return OptionalNotes();
1800   }
1801 
1802   OptionalNotes getNotes(const PartialDiagnosticAt &Note) const {
1803     OptionalNotes ONS(1, Note);
1804     if (Verbose && CurrentFunction) {
1805       PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1806                                 S.PDiag(diag::note_thread_warning_in_fun)
1807                                     << CurrentFunction);
1808       ONS.push_back(std::move(FNote));
1809     }
1810     return ONS;
1811   }
1812 
1813   OptionalNotes getNotes(const PartialDiagnosticAt &Note1,
1814                          const PartialDiagnosticAt &Note2) const {
1815     OptionalNotes ONS;
1816     ONS.push_back(Note1);
1817     ONS.push_back(Note2);
1818     if (Verbose && CurrentFunction) {
1819       PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1820                                 S.PDiag(diag::note_thread_warning_in_fun)
1821                                     << CurrentFunction);
1822       ONS.push_back(std::move(FNote));
1823     }
1824     return ONS;
1825   }
1826 
1827   OptionalNotes makeLockedHereNote(SourceLocation LocLocked, StringRef Kind) {
1828     return LocLocked.isValid()
1829                ? getNotes(PartialDiagnosticAt(
1830                      LocLocked, S.PDiag(diag::note_locked_here) << Kind))
1831                : getNotes();
1832   }
1833 
1834   OptionalNotes makeUnlockedHereNote(SourceLocation LocUnlocked,
1835                                      StringRef Kind) {
1836     return LocUnlocked.isValid()
1837                ? getNotes(PartialDiagnosticAt(
1838                      LocUnlocked, S.PDiag(diag::note_unlocked_here) << Kind))
1839                : getNotes();
1840   }
1841 
1842  public:
1843   ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1844     : S(S), FunLocation(FL), FunEndLocation(FEL),
1845       CurrentFunction(nullptr), Verbose(false) {}
1846 
1847   void setVerbose(bool b) { Verbose = b; }
1848 
1849   /// Emit all buffered diagnostics in order of sourcelocation.
1850   /// We need to output diagnostics produced while iterating through
1851   /// the lockset in deterministic order, so this function orders diagnostics
1852   /// and outputs them.
1853   void emitDiagnostics() {
1854     Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1855     for (const auto &Diag : Warnings) {
1856       S.Diag(Diag.first.first, Diag.first.second);
1857       for (const auto &Note : Diag.second)
1858         S.Diag(Note.first, Note.second);
1859     }
1860   }
1861 
1862   void handleInvalidLockExp(SourceLocation Loc) override {
1863     PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_cannot_resolve_lock)
1864                                          << Loc);
1865     Warnings.emplace_back(std::move(Warning), getNotes());
1866   }
1867 
1868   void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc,
1869                              SourceLocation LocPreviousUnlock) override {
1870     if (Loc.isInvalid())
1871       Loc = FunLocation;
1872     PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_unlock_but_no_lock)
1873                                          << Kind << LockName);
1874     Warnings.emplace_back(std::move(Warning),
1875                           makeUnlockedHereNote(LocPreviousUnlock, Kind));
1876   }
1877 
1878   void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
1879                                  LockKind Expected, LockKind Received,
1880                                  SourceLocation LocLocked,
1881                                  SourceLocation LocUnlock) override {
1882     if (LocUnlock.isInvalid())
1883       LocUnlock = FunLocation;
1884     PartialDiagnosticAt Warning(
1885         LocUnlock, S.PDiag(diag::warn_unlock_kind_mismatch)
1886                        << Kind << LockName << Received << Expected);
1887     Warnings.emplace_back(std::move(Warning),
1888                           makeLockedHereNote(LocLocked, Kind));
1889   }
1890 
1891   void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked,
1892                         SourceLocation LocDoubleLock) override {
1893     if (LocDoubleLock.isInvalid())
1894       LocDoubleLock = FunLocation;
1895     PartialDiagnosticAt Warning(LocDoubleLock, S.PDiag(diag::warn_double_lock)
1896                                                    << Kind << LockName);
1897     Warnings.emplace_back(std::move(Warning),
1898                           makeLockedHereNote(LocLocked, Kind));
1899   }
1900 
1901   void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
1902                                  SourceLocation LocLocked,
1903                                  SourceLocation LocEndOfScope,
1904                                  LockErrorKind LEK) override {
1905     unsigned DiagID = 0;
1906     switch (LEK) {
1907       case LEK_LockedSomePredecessors:
1908         DiagID = diag::warn_lock_some_predecessors;
1909         break;
1910       case LEK_LockedSomeLoopIterations:
1911         DiagID = diag::warn_expecting_lock_held_on_loop;
1912         break;
1913       case LEK_LockedAtEndOfFunction:
1914         DiagID = diag::warn_no_unlock;
1915         break;
1916       case LEK_NotLockedAtEndOfFunction:
1917         DiagID = diag::warn_expecting_locked;
1918         break;
1919     }
1920     if (LocEndOfScope.isInvalid())
1921       LocEndOfScope = FunEndLocation;
1922 
1923     PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << Kind
1924                                                                << LockName);
1925     Warnings.emplace_back(std::move(Warning),
1926                           makeLockedHereNote(LocLocked, Kind));
1927   }
1928 
1929   void handleExclusiveAndShared(StringRef Kind, Name LockName,
1930                                 SourceLocation Loc1,
1931                                 SourceLocation Loc2) override {
1932     PartialDiagnosticAt Warning(Loc1,
1933                                 S.PDiag(diag::warn_lock_exclusive_and_shared)
1934                                     << Kind << LockName);
1935     PartialDiagnosticAt Note(Loc2, S.PDiag(diag::note_lock_exclusive_and_shared)
1936                                        << Kind << LockName);
1937     Warnings.emplace_back(std::move(Warning), getNotes(Note));
1938   }
1939 
1940   void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
1941                          AccessKind AK, SourceLocation Loc) override {
1942     assert((POK == POK_VarAccess || POK == POK_VarDereference) &&
1943            "Only works for variables");
1944     unsigned DiagID = POK == POK_VarAccess?
1945                         diag::warn_variable_requires_any_lock:
1946                         diag::warn_var_deref_requires_any_lock;
1947     PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1948       << D << getLockKindFromAccessKind(AK));
1949     Warnings.emplace_back(std::move(Warning), getNotes());
1950   }
1951 
1952   void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
1953                           ProtectedOperationKind POK, Name LockName,
1954                           LockKind LK, SourceLocation Loc,
1955                           Name *PossibleMatch) override {
1956     unsigned DiagID = 0;
1957     if (PossibleMatch) {
1958       switch (POK) {
1959         case POK_VarAccess:
1960           DiagID = diag::warn_variable_requires_lock_precise;
1961           break;
1962         case POK_VarDereference:
1963           DiagID = diag::warn_var_deref_requires_lock_precise;
1964           break;
1965         case POK_FunctionCall:
1966           DiagID = diag::warn_fun_requires_lock_precise;
1967           break;
1968         case POK_PassByRef:
1969           DiagID = diag::warn_guarded_pass_by_reference;
1970           break;
1971         case POK_PtPassByRef:
1972           DiagID = diag::warn_pt_guarded_pass_by_reference;
1973           break;
1974       }
1975       PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
1976                                                        << D
1977                                                        << LockName << LK);
1978       PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
1979                                         << *PossibleMatch);
1980       if (Verbose && POK == POK_VarAccess) {
1981         PartialDiagnosticAt VNote(D->getLocation(),
1982                                   S.PDiag(diag::note_guarded_by_declared_here)
1983                                       << D->getDeclName());
1984         Warnings.emplace_back(std::move(Warning), getNotes(Note, VNote));
1985       } else
1986         Warnings.emplace_back(std::move(Warning), getNotes(Note));
1987     } else {
1988       switch (POK) {
1989         case POK_VarAccess:
1990           DiagID = diag::warn_variable_requires_lock;
1991           break;
1992         case POK_VarDereference:
1993           DiagID = diag::warn_var_deref_requires_lock;
1994           break;
1995         case POK_FunctionCall:
1996           DiagID = diag::warn_fun_requires_lock;
1997           break;
1998         case POK_PassByRef:
1999           DiagID = diag::warn_guarded_pass_by_reference;
2000           break;
2001         case POK_PtPassByRef:
2002           DiagID = diag::warn_pt_guarded_pass_by_reference;
2003           break;
2004       }
2005       PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
2006                                                        << D
2007                                                        << LockName << LK);
2008       if (Verbose && POK == POK_VarAccess) {
2009         PartialDiagnosticAt Note(D->getLocation(),
2010                                  S.PDiag(diag::note_guarded_by_declared_here));
2011         Warnings.emplace_back(std::move(Warning), getNotes(Note));
2012       } else
2013         Warnings.emplace_back(std::move(Warning), getNotes());
2014     }
2015   }
2016 
2017   void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg,
2018                              SourceLocation Loc) override {
2019     PartialDiagnosticAt Warning(Loc,
2020         S.PDiag(diag::warn_acquire_requires_negative_cap)
2021         << Kind << LockName << Neg);
2022     Warnings.emplace_back(std::move(Warning), getNotes());
2023   }
2024 
2025   void handleNegativeNotHeld(const NamedDecl *D, Name LockName,
2026                              SourceLocation Loc) override {
2027     PartialDiagnosticAt Warning(
2028         Loc, S.PDiag(diag::warn_fun_requires_negative_cap) << D << LockName);
2029     Warnings.emplace_back(std::move(Warning), getNotes());
2030   }
2031 
2032   void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
2033                              SourceLocation Loc) override {
2034     PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_fun_excludes_mutex)
2035                                          << Kind << FunName << LockName);
2036     Warnings.emplace_back(std::move(Warning), getNotes());
2037   }
2038 
2039   void handleLockAcquiredBefore(StringRef Kind, Name L1Name, Name L2Name,
2040                                 SourceLocation Loc) override {
2041     PartialDiagnosticAt Warning(Loc,
2042       S.PDiag(diag::warn_acquired_before) << Kind << L1Name << L2Name);
2043     Warnings.emplace_back(std::move(Warning), getNotes());
2044   }
2045 
2046   void handleBeforeAfterCycle(Name L1Name, SourceLocation Loc) override {
2047     PartialDiagnosticAt Warning(Loc,
2048       S.PDiag(diag::warn_acquired_before_after_cycle) << L1Name);
2049     Warnings.emplace_back(std::move(Warning), getNotes());
2050   }
2051 
2052   void enterFunction(const FunctionDecl* FD) override {
2053     CurrentFunction = FD;
2054   }
2055 
2056   void leaveFunction(const FunctionDecl* FD) override {
2057     CurrentFunction = nullptr;
2058   }
2059 };
2060 } // anonymous namespace
2061 } // namespace threadSafety
2062 } // namespace clang
2063 
2064 //===----------------------------------------------------------------------===//
2065 // -Wconsumed
2066 //===----------------------------------------------------------------------===//
2067 
2068 namespace clang {
2069 namespace consumed {
2070 namespace {
2071 class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase {
2072 
2073   Sema &S;
2074   DiagList Warnings;
2075 
2076 public:
2077 
2078   ConsumedWarningsHandler(Sema &S) : S(S) {}
2079 
2080   void emitDiagnostics() override {
2081     Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
2082     for (const auto &Diag : Warnings) {
2083       S.Diag(Diag.first.first, Diag.first.second);
2084       for (const auto &Note : Diag.second)
2085         S.Diag(Note.first, Note.second);
2086     }
2087   }
2088 
2089   void warnLoopStateMismatch(SourceLocation Loc,
2090                              StringRef VariableName) override {
2091     PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_loop_state_mismatch) <<
2092       VariableName);
2093 
2094     Warnings.emplace_back(std::move(Warning), OptionalNotes());
2095   }
2096 
2097   void warnParamReturnTypestateMismatch(SourceLocation Loc,
2098                                         StringRef VariableName,
2099                                         StringRef ExpectedState,
2100                                         StringRef ObservedState) override {
2101 
2102     PartialDiagnosticAt Warning(Loc, S.PDiag(
2103       diag::warn_param_return_typestate_mismatch) << VariableName <<
2104         ExpectedState << ObservedState);
2105 
2106     Warnings.emplace_back(std::move(Warning), OptionalNotes());
2107   }
2108 
2109   void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2110                                   StringRef ObservedState) override {
2111 
2112     PartialDiagnosticAt Warning(Loc, S.PDiag(
2113       diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
2114 
2115     Warnings.emplace_back(std::move(Warning), OptionalNotes());
2116   }
2117 
2118   void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
2119                                               StringRef TypeName) override {
2120     PartialDiagnosticAt Warning(Loc, S.PDiag(
2121       diag::warn_return_typestate_for_unconsumable_type) << TypeName);
2122 
2123     Warnings.emplace_back(std::move(Warning), OptionalNotes());
2124   }
2125 
2126   void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2127                                    StringRef ObservedState) override {
2128 
2129     PartialDiagnosticAt Warning(Loc, S.PDiag(
2130       diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
2131 
2132     Warnings.emplace_back(std::move(Warning), OptionalNotes());
2133   }
2134 
2135   void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
2136                                    SourceLocation Loc) override {
2137 
2138     PartialDiagnosticAt Warning(Loc, S.PDiag(
2139       diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
2140 
2141     Warnings.emplace_back(std::move(Warning), OptionalNotes());
2142   }
2143 
2144   void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
2145                              StringRef State, SourceLocation Loc) override {
2146 
2147     PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_in_invalid_state) <<
2148                                 MethodName << VariableName << State);
2149 
2150     Warnings.emplace_back(std::move(Warning), OptionalNotes());
2151   }
2152 };
2153 } // anonymous namespace
2154 } // namespace consumed
2155 } // namespace clang
2156 
2157 //===----------------------------------------------------------------------===//
2158 // Unsafe buffer usage analysis.
2159 //===----------------------------------------------------------------------===//
2160 
2161 namespace {
2162 class UnsafeBufferUsageReporter : public UnsafeBufferUsageHandler {
2163   Sema &S;
2164   bool SuggestSuggestions;  // Recommend -fsafe-buffer-usage-suggestions?
2165 
2166 public:
2167   UnsafeBufferUsageReporter(Sema &S, bool SuggestSuggestions)
2168     : S(S), SuggestSuggestions(SuggestSuggestions) {}
2169 
2170   void handleUnsafeOperation(const Stmt *Operation,
2171                              bool IsRelatedToDecl) override {
2172     SourceLocation Loc;
2173     SourceRange Range;
2174     unsigned MsgParam = 0;
2175     if (const auto *ASE = dyn_cast<ArraySubscriptExpr>(Operation)) {
2176       Loc = ASE->getBase()->getExprLoc();
2177       Range = ASE->getBase()->getSourceRange();
2178       MsgParam = 2;
2179     } else if (const auto *BO = dyn_cast<BinaryOperator>(Operation)) {
2180       BinaryOperator::Opcode Op = BO->getOpcode();
2181       if (Op == BO_Add || Op == BO_AddAssign || Op == BO_Sub ||
2182           Op == BO_SubAssign) {
2183         if (BO->getRHS()->getType()->isIntegerType()) {
2184           Loc = BO->getLHS()->getExprLoc();
2185           Range = BO->getLHS()->getSourceRange();
2186         } else {
2187           Loc = BO->getRHS()->getExprLoc();
2188           Range = BO->getRHS()->getSourceRange();
2189         }
2190         MsgParam = 1;
2191       }
2192     } else if (const auto *UO = dyn_cast<UnaryOperator>(Operation)) {
2193       UnaryOperator::Opcode Op = UO->getOpcode();
2194       if (Op == UO_PreInc || Op == UO_PreDec || Op == UO_PostInc ||
2195           Op == UO_PostDec) {
2196         Loc = UO->getSubExpr()->getExprLoc();
2197         Range = UO->getSubExpr()->getSourceRange();
2198         MsgParam = 1;
2199       }
2200     } else {
2201       if (isa<CallExpr>(Operation)) {
2202         // note_unsafe_buffer_operation doesn't have this mode yet.
2203         assert(!IsRelatedToDecl && "Not implemented yet!");
2204         MsgParam = 3;
2205       }
2206       Loc = Operation->getBeginLoc();
2207       Range = Operation->getSourceRange();
2208     }
2209     if (IsRelatedToDecl) {
2210       assert(!SuggestSuggestions &&
2211              "Variables blamed for unsafe buffer usage without suggestions!");
2212       S.Diag(Loc, diag::note_unsafe_buffer_operation) << MsgParam << Range;
2213     } else {
2214       S.Diag(Loc, diag::warn_unsafe_buffer_operation) << MsgParam << Range;
2215       if (SuggestSuggestions) {
2216         S.Diag(Loc, diag::note_safe_buffer_usage_suggestions_disabled);
2217       }
2218     }
2219   }
2220 
2221   void handleUnsafeVariableGroup(const VarDecl *Variable,
2222                                  const DefMapTy &VarGrpMap,
2223                              FixItList &&Fixes) override {
2224     assert(!SuggestSuggestions &&
2225            "Unsafe buffer usage fixits displayed without suggestions!");
2226     S.Diag(Variable->getLocation(), diag::warn_unsafe_buffer_variable)
2227         << Variable << (Variable->getType()->isPointerType() ? 0 : 1)
2228         << Variable->getSourceRange();
2229     if (!Fixes.empty()) {
2230       const auto VarGroupForVD = VarGrpMap.find(Variable)->second;
2231       unsigned FixItStrategy = 0; // For now we only have 'std::span' strategy
2232       const auto &FD = S.Diag(Variable->getLocation(),
2233                               diag::note_unsafe_buffer_variable_fixit_group);
2234 
2235       FD << Variable << FixItStrategy;
2236       std::string AllVars = "";
2237       if (VarGroupForVD.size() > 1) {
2238         if (VarGroupForVD.size() == 2) {
2239           if (VarGroupForVD[0] == Variable) {
2240             AllVars.append("'" + VarGroupForVD[1]->getName().str() + "'");
2241           } else {
2242             AllVars.append("'" + VarGroupForVD[0]->getName().str() + "'");
2243           }
2244         } else {
2245           bool first = false;
2246           if (VarGroupForVD.size() == 3) {
2247             for (const VarDecl * V : VarGroupForVD) {
2248               if (V == Variable) {
2249                 continue;
2250               }
2251               if (!first) {
2252                 first = true;
2253                 AllVars.append("'" + V->getName().str() + "'" + " and ");
2254               } else {
2255                 AllVars.append("'" + V->getName().str() + "'");
2256               }
2257             }
2258           } else {
2259             for (const VarDecl * V : VarGroupForVD) {
2260               if (V == Variable) {
2261                 continue;
2262               }
2263               if (VarGroupForVD.back() != V) {
2264                 AllVars.append("'" + V->getName().str() + "'" + ", ");
2265               } else {
2266                 AllVars.append("and '" + V->getName().str() + "'");
2267               }
2268             }
2269           }
2270         }
2271         FD << AllVars << 1;
2272       } else {
2273         FD << "" << 0;
2274       }
2275 
2276       for (const auto &F : Fixes)
2277         FD << F;
2278     }
2279   }
2280 
2281   bool isSafeBufferOptOut(const SourceLocation &Loc) const override {
2282     return S.PP.isSafeBufferOptOut(S.getSourceManager(), Loc);
2283   }
2284 
2285   // Returns the text representation of clang::unsafe_buffer_usage attribute.
2286   // `WSSuffix` holds customized "white-space"s, e.g., newline or whilespace
2287   // characters.
2288   std::string
2289   getUnsafeBufferUsageAttributeTextAt(SourceLocation Loc,
2290                                       StringRef WSSuffix = "") const override {
2291     Preprocessor &PP = S.getPreprocessor();
2292     TokenValue ClangUnsafeBufferUsageTokens[] = {
2293         tok::l_square,
2294         tok::l_square,
2295         PP.getIdentifierInfo("clang"),
2296         tok::coloncolon,
2297         PP.getIdentifierInfo("unsafe_buffer_usage"),
2298         tok::r_square,
2299         tok::r_square};
2300 
2301     StringRef MacroName;
2302 
2303     // The returned macro (it returns) is guaranteed not to be function-like:
2304     MacroName = PP.getLastMacroWithSpelling(Loc, ClangUnsafeBufferUsageTokens);
2305     if (MacroName.empty())
2306       MacroName = "[[clang::unsafe_buffer_usage]]";
2307     return MacroName.str() + WSSuffix.str();
2308   }
2309 };
2310 } // namespace
2311 
2312 //===----------------------------------------------------------------------===//
2313 // AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
2314 //  warnings on a function, method, or block.
2315 //===----------------------------------------------------------------------===//
2316 
2317 sema::AnalysisBasedWarnings::Policy::Policy() {
2318   enableCheckFallThrough = 1;
2319   enableCheckUnreachable = 0;
2320   enableThreadSafetyAnalysis = 0;
2321   enableConsumedAnalysis = 0;
2322 }
2323 
2324 /// InterProceduralData aims to be a storage of whatever data should be passed
2325 /// between analyses of different functions.
2326 ///
2327 /// At the moment, its primary goal is to make the information gathered during
2328 /// the analysis of the blocks available during the analysis of the enclosing
2329 /// function.  This is important due to the fact that blocks are analyzed before
2330 /// the enclosed function is even parsed fully, so it is not viable to access
2331 /// anything in the outer scope while analyzing the block.  On the other hand,
2332 /// re-building CFG for blocks and re-analyzing them when we do have all the
2333 /// information (i.e. during the analysis of the enclosing function) seems to be
2334 /// ill-designed.
2335 class sema::AnalysisBasedWarnings::InterProceduralData {
2336 public:
2337   // It is important to analyze blocks within functions because it's a very
2338   // common pattern to capture completion handler parameters by blocks.
2339   CalledOnceInterProceduralData CalledOnceData;
2340 };
2341 
2342 static unsigned isEnabled(DiagnosticsEngine &D, unsigned diag) {
2343   return (unsigned)!D.isIgnored(diag, SourceLocation());
2344 }
2345 
2346 sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
2347     : S(s), IPData(std::make_unique<InterProceduralData>()),
2348       NumFunctionsAnalyzed(0), NumFunctionsWithBadCFGs(0), NumCFGBlocks(0),
2349       MaxCFGBlocksPerFunction(0), NumUninitAnalysisFunctions(0),
2350       NumUninitAnalysisVariables(0), MaxUninitAnalysisVariablesPerFunction(0),
2351       NumUninitAnalysisBlockVisits(0),
2352       MaxUninitAnalysisBlockVisitsPerFunction(0) {
2353 
2354   using namespace diag;
2355   DiagnosticsEngine &D = S.getDiagnostics();
2356 
2357   DefaultPolicy.enableCheckUnreachable =
2358       isEnabled(D, warn_unreachable) || isEnabled(D, warn_unreachable_break) ||
2359       isEnabled(D, warn_unreachable_return) ||
2360       isEnabled(D, warn_unreachable_loop_increment);
2361 
2362   DefaultPolicy.enableThreadSafetyAnalysis = isEnabled(D, warn_double_lock);
2363 
2364   DefaultPolicy.enableConsumedAnalysis =
2365       isEnabled(D, warn_use_in_invalid_state);
2366 }
2367 
2368 // We need this here for unique_ptr with forward declared class.
2369 sema::AnalysisBasedWarnings::~AnalysisBasedWarnings() = default;
2370 
2371 static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) {
2372   for (const auto &D : fscope->PossiblyUnreachableDiags)
2373     S.Diag(D.Loc, D.PD);
2374 }
2375 
2376 // An AST Visitor that calls a callback function on each callable DEFINITION
2377 // that is NOT in a dependent context:
2378 class CallableVisitor : public RecursiveASTVisitor<CallableVisitor> {
2379 private:
2380   llvm::function_ref<void(const Decl *)> Callback;
2381 
2382 public:
2383   CallableVisitor(llvm::function_ref<void(const Decl *)> Callback)
2384       : Callback(Callback) {}
2385 
2386   bool VisitFunctionDecl(FunctionDecl *Node) {
2387     if (cast<DeclContext>(Node)->isDependentContext())
2388       return true; // Not to analyze dependent decl
2389     // `FunctionDecl->hasBody()` returns true if the function has a body
2390     // somewhere defined.  But we want to know if this `Node` has a body
2391     // child.  So we use `doesThisDeclarationHaveABody`:
2392     if (Node->doesThisDeclarationHaveABody())
2393       Callback(Node);
2394     return true;
2395   }
2396 
2397   bool VisitBlockDecl(BlockDecl *Node) {
2398     if (cast<DeclContext>(Node)->isDependentContext())
2399       return true; // Not to analyze dependent decl
2400     Callback(Node);
2401     return true;
2402   }
2403 
2404   bool VisitObjCMethodDecl(ObjCMethodDecl *Node) {
2405     if (cast<DeclContext>(Node)->isDependentContext())
2406       return true; // Not to analyze dependent decl
2407     if (Node->hasBody())
2408       Callback(Node);
2409     return true;
2410   }
2411 
2412   bool VisitLambdaExpr(LambdaExpr *Node) {
2413     return VisitFunctionDecl(Node->getCallOperator());
2414   }
2415 
2416   bool shouldVisitTemplateInstantiations() const { return true; }
2417   bool shouldVisitImplicitCode() const { return false; }
2418 };
2419 
2420 void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2421      TranslationUnitDecl *TU) {
2422   if (!TU)
2423     return; // This is unexpected, give up quietly.
2424 
2425   DiagnosticsEngine &Diags = S.getDiagnostics();
2426 
2427   if (S.hasUncompilableErrorOccurred() || Diags.getIgnoreAllWarnings())
2428     // exit if having uncompilable errors or ignoring all warnings:
2429     return;
2430 
2431   DiagnosticOptions &DiagOpts = Diags.getDiagnosticOptions();
2432 
2433   // UnsafeBufferUsage analysis settings.
2434   bool UnsafeBufferUsageCanEmitSuggestions = S.getLangOpts().CPlusPlus20;
2435   bool UnsafeBufferUsageShouldEmitSuggestions =  // Should != Can.
2436       UnsafeBufferUsageCanEmitSuggestions &&
2437       DiagOpts.ShowSafeBufferUsageSuggestions;
2438   bool UnsafeBufferUsageShouldSuggestSuggestions =
2439       UnsafeBufferUsageCanEmitSuggestions &&
2440       !DiagOpts.ShowSafeBufferUsageSuggestions;
2441   UnsafeBufferUsageReporter R(S, UnsafeBufferUsageShouldSuggestSuggestions);
2442 
2443   // The Callback function that performs analyses:
2444   auto CallAnalyzers = [&](const Decl *Node) -> void {
2445     // Perform unsafe buffer usage analysis:
2446     if (!Diags.isIgnored(diag::warn_unsafe_buffer_operation,
2447                          Node->getBeginLoc()) ||
2448         !Diags.isIgnored(diag::warn_unsafe_buffer_variable,
2449                          Node->getBeginLoc())) {
2450       clang::checkUnsafeBufferUsage(Node, R,
2451                                     UnsafeBufferUsageShouldEmitSuggestions);
2452     }
2453 
2454     // More analysis ...
2455   };
2456   // Emit per-function analysis-based warnings that require the whole-TU
2457   // reasoning. Check if any of them is enabled at all before scanning the AST:
2458   if (!Diags.isIgnored(diag::warn_unsafe_buffer_operation, SourceLocation()) ||
2459       !Diags.isIgnored(diag::warn_unsafe_buffer_variable, SourceLocation())) {
2460     CallableVisitor(CallAnalyzers).TraverseTranslationUnitDecl(TU);
2461   }
2462 }
2463 
2464 void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2465     sema::AnalysisBasedWarnings::Policy P, sema::FunctionScopeInfo *fscope,
2466     const Decl *D, QualType BlockType) {
2467 
2468   // We avoid doing analysis-based warnings when there are errors for
2469   // two reasons:
2470   // (1) The CFGs often can't be constructed (if the body is invalid), so
2471   //     don't bother trying.
2472   // (2) The code already has problems; running the analysis just takes more
2473   //     time.
2474   DiagnosticsEngine &Diags = S.getDiagnostics();
2475 
2476   // Do not do any analysis if we are going to just ignore them.
2477   if (Diags.getIgnoreAllWarnings() ||
2478       (Diags.getSuppressSystemWarnings() &&
2479        S.SourceMgr.isInSystemHeader(D->getLocation())))
2480     return;
2481 
2482   // For code in dependent contexts, we'll do this at instantiation time.
2483   if (cast<DeclContext>(D)->isDependentContext())
2484     return;
2485 
2486   if (S.hasUncompilableErrorOccurred()) {
2487     // Flush out any possibly unreachable diagnostics.
2488     flushDiagnostics(S, fscope);
2489     return;
2490   }
2491 
2492   const Stmt *Body = D->getBody();
2493   assert(Body);
2494 
2495   // Construct the analysis context with the specified CFG build options.
2496   AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D);
2497 
2498   // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
2499   // explosion for destructors that can result and the compile time hit.
2500   AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
2501   AC.getCFGBuildOptions().AddEHEdges = false;
2502   AC.getCFGBuildOptions().AddInitializers = true;
2503   AC.getCFGBuildOptions().AddImplicitDtors = true;
2504   AC.getCFGBuildOptions().AddTemporaryDtors = true;
2505   AC.getCFGBuildOptions().AddCXXNewAllocator = false;
2506   AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
2507 
2508   // Force that certain expressions appear as CFGElements in the CFG.  This
2509   // is used to speed up various analyses.
2510   // FIXME: This isn't the right factoring.  This is here for initial
2511   // prototyping, but we need a way for analyses to say what expressions they
2512   // expect to always be CFGElements and then fill in the BuildOptions
2513   // appropriately.  This is essentially a layering violation.
2514   if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis ||
2515       P.enableConsumedAnalysis) {
2516     // Unreachable code analysis and thread safety require a linearized CFG.
2517     AC.getCFGBuildOptions().setAllAlwaysAdd();
2518   }
2519   else {
2520     AC.getCFGBuildOptions()
2521       .setAlwaysAdd(Stmt::BinaryOperatorClass)
2522       .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
2523       .setAlwaysAdd(Stmt::BlockExprClass)
2524       .setAlwaysAdd(Stmt::CStyleCastExprClass)
2525       .setAlwaysAdd(Stmt::DeclRefExprClass)
2526       .setAlwaysAdd(Stmt::ImplicitCastExprClass)
2527       .setAlwaysAdd(Stmt::UnaryOperatorClass);
2528   }
2529 
2530   // Install the logical handler.
2531   std::optional<LogicalErrorHandler> LEH;
2532   if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2533     LEH.emplace(S);
2534     AC.getCFGBuildOptions().Observer = &*LEH;
2535   }
2536 
2537   // Emit delayed diagnostics.
2538   if (!fscope->PossiblyUnreachableDiags.empty()) {
2539     bool analyzed = false;
2540 
2541     // Register the expressions with the CFGBuilder.
2542     for (const auto &D : fscope->PossiblyUnreachableDiags) {
2543       for (const Stmt *S : D.Stmts)
2544         AC.registerForcedBlockExpression(S);
2545     }
2546 
2547     if (AC.getCFG()) {
2548       analyzed = true;
2549       for (const auto &D : fscope->PossiblyUnreachableDiags) {
2550         bool AllReachable = true;
2551         for (const Stmt *S : D.Stmts) {
2552           const CFGBlock *block = AC.getBlockForRegisteredExpression(S);
2553           CFGReverseBlockReachabilityAnalysis *cra =
2554               AC.getCFGReachablityAnalysis();
2555           // FIXME: We should be able to assert that block is non-null, but
2556           // the CFG analysis can skip potentially-evaluated expressions in
2557           // edge cases; see test/Sema/vla-2.c.
2558           if (block && cra) {
2559             // Can this block be reached from the entrance?
2560             if (!cra->isReachable(&AC.getCFG()->getEntry(), block)) {
2561               AllReachable = false;
2562               break;
2563             }
2564           }
2565           // If we cannot map to a basic block, assume the statement is
2566           // reachable.
2567         }
2568 
2569         if (AllReachable)
2570           S.Diag(D.Loc, D.PD);
2571       }
2572     }
2573 
2574     if (!analyzed)
2575       flushDiagnostics(S, fscope);
2576   }
2577 
2578   // Warning: check missing 'return'
2579   if (P.enableCheckFallThrough) {
2580     const CheckFallThroughDiagnostics &CD =
2581         (isa<BlockDecl>(D)
2582              ? CheckFallThroughDiagnostics::MakeForBlock()
2583              : (isa<CXXMethodDecl>(D) &&
2584                 cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call &&
2585                 cast<CXXMethodDecl>(D)->getParent()->isLambda())
2586                    ? CheckFallThroughDiagnostics::MakeForLambda()
2587                    : (fscope->isCoroutine()
2588                           ? CheckFallThroughDiagnostics::MakeForCoroutine(D)
2589                           : CheckFallThroughDiagnostics::MakeForFunction(D)));
2590     CheckFallThroughForBody(S, D, Body, BlockType, CD, AC, fscope);
2591   }
2592 
2593   // Warning: check for unreachable code
2594   if (P.enableCheckUnreachable) {
2595     // Only check for unreachable code on non-template instantiations.
2596     // Different template instantiations can effectively change the control-flow
2597     // and it is very difficult to prove that a snippet of code in a template
2598     // is unreachable for all instantiations.
2599     bool isTemplateInstantiation = false;
2600     if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
2601       isTemplateInstantiation = Function->isTemplateInstantiation();
2602     if (!isTemplateInstantiation)
2603       CheckUnreachable(S, AC);
2604   }
2605 
2606   // Check for thread safety violations
2607   if (P.enableThreadSafetyAnalysis) {
2608     SourceLocation FL = AC.getDecl()->getLocation();
2609     SourceLocation FEL = AC.getDecl()->getEndLoc();
2610     threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL);
2611     if (!Diags.isIgnored(diag::warn_thread_safety_beta, D->getBeginLoc()))
2612       Reporter.setIssueBetaWarnings(true);
2613     if (!Diags.isIgnored(diag::warn_thread_safety_verbose, D->getBeginLoc()))
2614       Reporter.setVerbose(true);
2615 
2616     threadSafety::runThreadSafetyAnalysis(AC, Reporter,
2617                                           &S.ThreadSafetyDeclCache);
2618     Reporter.emitDiagnostics();
2619   }
2620 
2621   // Check for violations of consumed properties.
2622   if (P.enableConsumedAnalysis) {
2623     consumed::ConsumedWarningsHandler WarningHandler(S);
2624     consumed::ConsumedAnalyzer Analyzer(WarningHandler);
2625     Analyzer.run(AC);
2626   }
2627 
2628   if (!Diags.isIgnored(diag::warn_uninit_var, D->getBeginLoc()) ||
2629       !Diags.isIgnored(diag::warn_sometimes_uninit_var, D->getBeginLoc()) ||
2630       !Diags.isIgnored(diag::warn_maybe_uninit_var, D->getBeginLoc()) ||
2631       !Diags.isIgnored(diag::warn_uninit_const_reference, D->getBeginLoc())) {
2632     if (CFG *cfg = AC.getCFG()) {
2633       UninitValsDiagReporter reporter(S);
2634       UninitVariablesAnalysisStats stats;
2635       std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
2636       runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
2637                                         reporter, stats);
2638 
2639       if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
2640         ++NumUninitAnalysisFunctions;
2641         NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
2642         NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
2643         MaxUninitAnalysisVariablesPerFunction =
2644             std::max(MaxUninitAnalysisVariablesPerFunction,
2645                      stats.NumVariablesAnalyzed);
2646         MaxUninitAnalysisBlockVisitsPerFunction =
2647             std::max(MaxUninitAnalysisBlockVisitsPerFunction,
2648                      stats.NumBlockVisits);
2649       }
2650     }
2651   }
2652 
2653   // Check for violations of "called once" parameter properties.
2654   if (S.getLangOpts().ObjC && !S.getLangOpts().CPlusPlus &&
2655       shouldAnalyzeCalledOnceParameters(Diags, D->getBeginLoc())) {
2656     if (AC.getCFG()) {
2657       CalledOnceCheckReporter Reporter(S, IPData->CalledOnceData);
2658       checkCalledOnceParameters(
2659           AC, Reporter,
2660           shouldAnalyzeCalledOnceConventions(Diags, D->getBeginLoc()));
2661     }
2662   }
2663 
2664   bool FallThroughDiagFull =
2665       !Diags.isIgnored(diag::warn_unannotated_fallthrough, D->getBeginLoc());
2666   bool FallThroughDiagPerFunction = !Diags.isIgnored(
2667       diag::warn_unannotated_fallthrough_per_function, D->getBeginLoc());
2668   if (FallThroughDiagFull || FallThroughDiagPerFunction ||
2669       fscope->HasFallthroughStmt) {
2670     DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
2671   }
2672 
2673   if (S.getLangOpts().ObjCWeak &&
2674       !Diags.isIgnored(diag::warn_arc_repeated_use_of_weak, D->getBeginLoc()))
2675     diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap());
2676 
2677 
2678   // Check for infinite self-recursion in functions
2679   if (!Diags.isIgnored(diag::warn_infinite_recursive_function,
2680                        D->getBeginLoc())) {
2681     if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
2682       checkRecursiveFunction(S, FD, Body, AC);
2683     }
2684   }
2685 
2686   // Check for throw out of non-throwing function.
2687   if (!Diags.isIgnored(diag::warn_throw_in_noexcept_func, D->getBeginLoc()))
2688     if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D))
2689       if (S.getLangOpts().CPlusPlus && !fscope->isCoroutine() && isNoexcept(FD))
2690         checkThrowInNonThrowingFunc(S, FD, AC);
2691 
2692   // If none of the previous checks caused a CFG build, trigger one here
2693   // for the logical error handler.
2694   if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2695     AC.getCFG();
2696   }
2697 
2698   // Collect statistics about the CFG if it was built.
2699   if (S.CollectStats && AC.isCFGBuilt()) {
2700     ++NumFunctionsAnalyzed;
2701     if (CFG *cfg = AC.getCFG()) {
2702       // If we successfully built a CFG for this context, record some more
2703       // detail information about it.
2704       NumCFGBlocks += cfg->getNumBlockIDs();
2705       MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
2706                                          cfg->getNumBlockIDs());
2707     } else {
2708       ++NumFunctionsWithBadCFGs;
2709     }
2710   }
2711 }
2712 
2713 void clang::sema::AnalysisBasedWarnings::PrintStats() const {
2714   llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
2715 
2716   unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
2717   unsigned AvgCFGBlocksPerFunction =
2718       !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
2719   llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
2720                << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
2721                << "  " << NumCFGBlocks << " CFG blocks built.\n"
2722                << "  " << AvgCFGBlocksPerFunction
2723                << " average CFG blocks per function.\n"
2724                << "  " << MaxCFGBlocksPerFunction
2725                << " max CFG blocks per function.\n";
2726 
2727   unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
2728       : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
2729   unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
2730       : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
2731   llvm::errs() << NumUninitAnalysisFunctions
2732                << " functions analyzed for uninitialiazed variables\n"
2733                << "  " << NumUninitAnalysisVariables << " variables analyzed.\n"
2734                << "  " << AvgUninitVariablesPerFunction
2735                << " average variables per function.\n"
2736                << "  " << MaxUninitAnalysisVariablesPerFunction
2737                << " max variables per function.\n"
2738                << "  " << NumUninitAnalysisBlockVisits << " block visits.\n"
2739                << "  " << AvgUninitBlockVisitsPerFunction
2740                << " average block visits per function.\n"
2741                << "  " << MaxUninitAnalysisBlockVisitsPerFunction
2742                << " max block visits per function.\n";
2743 }
2744