1 //===--- JumpDiagnostics.cpp - Protected scope jump analysis ------*- C++ -*-=//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the JumpScopeChecker class, which is used to diagnose
10 // jumps that enter a protected scope in an invalid way.
11 //
12 //===----------------------------------------------------------------------===//
13
14 #include "clang/Sema/SemaInternal.h"
15 #include "clang/AST/DeclCXX.h"
16 #include "clang/AST/Expr.h"
17 #include "clang/AST/ExprCXX.h"
18 #include "clang/AST/StmtCXX.h"
19 #include "clang/AST/StmtObjC.h"
20 #include "clang/AST/StmtOpenMP.h"
21 #include "llvm/ADT/BitVector.h"
22 using namespace clang;
23
24 namespace {
25
26 /// JumpScopeChecker - This object is used by Sema to diagnose invalid jumps
27 /// into VLA and other protected scopes. For example, this rejects:
28 /// goto L;
29 /// int a[n];
30 /// L:
31 ///
32 class JumpScopeChecker {
33 Sema &S;
34
35 /// Permissive - True when recovering from errors, in which case precautions
36 /// are taken to handle incomplete scope information.
37 const bool Permissive;
38
39 /// GotoScope - This is a record that we use to keep track of all of the
40 /// scopes that are introduced by VLAs and other things that scope jumps like
41 /// gotos. This scope tree has nothing to do with the source scope tree,
42 /// because you can have multiple VLA scopes per compound statement, and most
43 /// compound statements don't introduce any scopes.
44 struct GotoScope {
45 /// ParentScope - The index in ScopeMap of the parent scope. This is 0 for
46 /// the parent scope is the function body.
47 unsigned ParentScope;
48
49 /// InDiag - The note to emit if there is a jump into this scope.
50 unsigned InDiag;
51
52 /// OutDiag - The note to emit if there is an indirect jump out
53 /// of this scope. Direct jumps always clean up their current scope
54 /// in an orderly way.
55 unsigned OutDiag;
56
57 /// Loc - Location to emit the diagnostic.
58 SourceLocation Loc;
59
GotoScope__anon80f4c9ef0111::JumpScopeChecker::GotoScope60 GotoScope(unsigned parentScope, unsigned InDiag, unsigned OutDiag,
61 SourceLocation L)
62 : ParentScope(parentScope), InDiag(InDiag), OutDiag(OutDiag), Loc(L) {}
63 };
64
65 SmallVector<GotoScope, 48> Scopes;
66 llvm::DenseMap<Stmt*, unsigned> LabelAndGotoScopes;
67 SmallVector<Stmt*, 16> Jumps;
68
69 SmallVector<Stmt*, 4> IndirectJumps;
70 SmallVector<Stmt*, 4> AsmJumps;
71 SmallVector<LabelDecl*, 4> IndirectJumpTargets;
72 SmallVector<LabelDecl*, 4> AsmJumpTargets;
73 public:
74 JumpScopeChecker(Stmt *Body, Sema &S);
75 private:
76 void BuildScopeInformation(Decl *D, unsigned &ParentScope);
77 void BuildScopeInformation(VarDecl *D, const BlockDecl *BDecl,
78 unsigned &ParentScope);
79 void BuildScopeInformation(CompoundLiteralExpr *CLE, unsigned &ParentScope);
80 void BuildScopeInformation(Stmt *S, unsigned &origParentScope);
81
82 void VerifyJumps();
83 void VerifyIndirectOrAsmJumps(bool IsAsmGoto);
84 void NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes);
85 void DiagnoseIndirectOrAsmJump(Stmt *IG, unsigned IGScope, LabelDecl *Target,
86 unsigned TargetScope);
87 void CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc,
88 unsigned JumpDiag, unsigned JumpDiagWarning,
89 unsigned JumpDiagCXX98Compat);
90 void CheckGotoStmt(GotoStmt *GS);
91
92 unsigned GetDeepestCommonScope(unsigned A, unsigned B);
93 };
94 } // end anonymous namespace
95
96 #define CHECK_PERMISSIVE(x) (assert(Permissive || !(x)), (Permissive && (x)))
97
JumpScopeChecker(Stmt * Body,Sema & s)98 JumpScopeChecker::JumpScopeChecker(Stmt *Body, Sema &s)
99 : S(s), Permissive(s.hasAnyUnrecoverableErrorsInThisFunction()) {
100 // Add a scope entry for function scope.
101 Scopes.push_back(GotoScope(~0U, ~0U, ~0U, SourceLocation()));
102
103 // Build information for the top level compound statement, so that we have a
104 // defined scope record for every "goto" and label.
105 unsigned BodyParentScope = 0;
106 BuildScopeInformation(Body, BodyParentScope);
107
108 // Check that all jumps we saw are kosher.
109 VerifyJumps();
110 VerifyIndirectOrAsmJumps(false);
111 VerifyIndirectOrAsmJumps(true);
112 }
113
114 /// GetDeepestCommonScope - Finds the innermost scope enclosing the
115 /// two scopes.
GetDeepestCommonScope(unsigned A,unsigned B)116 unsigned JumpScopeChecker::GetDeepestCommonScope(unsigned A, unsigned B) {
117 while (A != B) {
118 // Inner scopes are created after outer scopes and therefore have
119 // higher indices.
120 if (A < B) {
121 assert(Scopes[B].ParentScope < B);
122 B = Scopes[B].ParentScope;
123 } else {
124 assert(Scopes[A].ParentScope < A);
125 A = Scopes[A].ParentScope;
126 }
127 }
128 return A;
129 }
130
131 typedef std::pair<unsigned,unsigned> ScopePair;
132
133 /// GetDiagForGotoScopeDecl - If this decl induces a new goto scope, return a
134 /// diagnostic that should be emitted if control goes over it. If not, return 0.
GetDiagForGotoScopeDecl(Sema & S,const Decl * D)135 static ScopePair GetDiagForGotoScopeDecl(Sema &S, const Decl *D) {
136 if (const VarDecl *VD = dyn_cast<VarDecl>(D)) {
137 unsigned InDiag = 0;
138 unsigned OutDiag = 0;
139
140 if (VD->getType()->isVariablyModifiedType())
141 InDiag = diag::note_protected_by_vla;
142
143 if (VD->hasAttr<BlocksAttr>())
144 return ScopePair(diag::note_protected_by___block,
145 diag::note_exits___block);
146
147 if (VD->hasAttr<CleanupAttr>())
148 return ScopePair(diag::note_protected_by_cleanup,
149 diag::note_exits_cleanup);
150
151 if (VD->hasLocalStorage()) {
152 switch (VD->getType().isDestructedType()) {
153 case QualType::DK_objc_strong_lifetime:
154 return ScopePair(diag::note_protected_by_objc_strong_init,
155 diag::note_exits_objc_strong);
156
157 case QualType::DK_objc_weak_lifetime:
158 return ScopePair(diag::note_protected_by_objc_weak_init,
159 diag::note_exits_objc_weak);
160
161 case QualType::DK_nontrivial_c_struct:
162 return ScopePair(diag::note_protected_by_non_trivial_c_struct_init,
163 diag::note_exits_dtor);
164
165 case QualType::DK_cxx_destructor:
166 OutDiag = diag::note_exits_dtor;
167 break;
168
169 case QualType::DK_none:
170 break;
171 }
172 }
173
174 const Expr *Init = VD->getInit();
175 if (S.Context.getLangOpts().CPlusPlus && VD->hasLocalStorage() && Init) {
176 // C++11 [stmt.dcl]p3:
177 // A program that jumps from a point where a variable with automatic
178 // storage duration is not in scope to a point where it is in scope
179 // is ill-formed unless the variable has scalar type, class type with
180 // a trivial default constructor and a trivial destructor, a
181 // cv-qualified version of one of these types, or an array of one of
182 // the preceding types and is declared without an initializer.
183
184 // C++03 [stmt.dcl.p3:
185 // A program that jumps from a point where a local variable
186 // with automatic storage duration is not in scope to a point
187 // where it is in scope is ill-formed unless the variable has
188 // POD type and is declared without an initializer.
189
190 InDiag = diag::note_protected_by_variable_init;
191
192 // For a variable of (array of) class type declared without an
193 // initializer, we will have call-style initialization and the initializer
194 // will be the CXXConstructExpr with no intervening nodes.
195 if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(Init)) {
196 const CXXConstructorDecl *Ctor = CCE->getConstructor();
197 if (Ctor->isTrivial() && Ctor->isDefaultConstructor() &&
198 VD->getInitStyle() == VarDecl::CallInit) {
199 if (OutDiag)
200 InDiag = diag::note_protected_by_variable_nontriv_destructor;
201 else if (!Ctor->getParent()->isPOD())
202 InDiag = diag::note_protected_by_variable_non_pod;
203 else
204 InDiag = 0;
205 }
206 }
207 }
208
209 return ScopePair(InDiag, OutDiag);
210 }
211
212 if (const TypedefNameDecl *TD = dyn_cast<TypedefNameDecl>(D)) {
213 if (TD->getUnderlyingType()->isVariablyModifiedType())
214 return ScopePair(isa<TypedefDecl>(TD)
215 ? diag::note_protected_by_vla_typedef
216 : diag::note_protected_by_vla_type_alias,
217 0);
218 }
219
220 return ScopePair(0U, 0U);
221 }
222
223 /// Build scope information for a declaration that is part of a DeclStmt.
BuildScopeInformation(Decl * D,unsigned & ParentScope)224 void JumpScopeChecker::BuildScopeInformation(Decl *D, unsigned &ParentScope) {
225 // If this decl causes a new scope, push and switch to it.
226 std::pair<unsigned,unsigned> Diags = GetDiagForGotoScopeDecl(S, D);
227 if (Diags.first || Diags.second) {
228 Scopes.push_back(GotoScope(ParentScope, Diags.first, Diags.second,
229 D->getLocation()));
230 ParentScope = Scopes.size()-1;
231 }
232
233 // If the decl has an initializer, walk it with the potentially new
234 // scope we just installed.
235 if (VarDecl *VD = dyn_cast<VarDecl>(D))
236 if (Expr *Init = VD->getInit())
237 BuildScopeInformation(Init, ParentScope);
238 }
239
240 /// Build scope information for a captured block literal variables.
BuildScopeInformation(VarDecl * D,const BlockDecl * BDecl,unsigned & ParentScope)241 void JumpScopeChecker::BuildScopeInformation(VarDecl *D,
242 const BlockDecl *BDecl,
243 unsigned &ParentScope) {
244 // exclude captured __block variables; there's no destructor
245 // associated with the block literal for them.
246 if (D->hasAttr<BlocksAttr>())
247 return;
248 QualType T = D->getType();
249 QualType::DestructionKind destructKind = T.isDestructedType();
250 if (destructKind != QualType::DK_none) {
251 std::pair<unsigned,unsigned> Diags;
252 switch (destructKind) {
253 case QualType::DK_cxx_destructor:
254 Diags = ScopePair(diag::note_enters_block_captures_cxx_obj,
255 diag::note_exits_block_captures_cxx_obj);
256 break;
257 case QualType::DK_objc_strong_lifetime:
258 Diags = ScopePair(diag::note_enters_block_captures_strong,
259 diag::note_exits_block_captures_strong);
260 break;
261 case QualType::DK_objc_weak_lifetime:
262 Diags = ScopePair(diag::note_enters_block_captures_weak,
263 diag::note_exits_block_captures_weak);
264 break;
265 case QualType::DK_nontrivial_c_struct:
266 Diags = ScopePair(diag::note_enters_block_captures_non_trivial_c_struct,
267 diag::note_exits_block_captures_non_trivial_c_struct);
268 break;
269 case QualType::DK_none:
270 llvm_unreachable("non-lifetime captured variable");
271 }
272 SourceLocation Loc = D->getLocation();
273 if (Loc.isInvalid())
274 Loc = BDecl->getLocation();
275 Scopes.push_back(GotoScope(ParentScope,
276 Diags.first, Diags.second, Loc));
277 ParentScope = Scopes.size()-1;
278 }
279 }
280
281 /// Build scope information for compound literals of C struct types that are
282 /// non-trivial to destruct.
BuildScopeInformation(CompoundLiteralExpr * CLE,unsigned & ParentScope)283 void JumpScopeChecker::BuildScopeInformation(CompoundLiteralExpr *CLE,
284 unsigned &ParentScope) {
285 unsigned InDiag = diag::note_enters_compound_literal_scope;
286 unsigned OutDiag = diag::note_exits_compound_literal_scope;
287 Scopes.push_back(GotoScope(ParentScope, InDiag, OutDiag, CLE->getExprLoc()));
288 ParentScope = Scopes.size() - 1;
289 }
290
291 /// BuildScopeInformation - The statements from CI to CE are known to form a
292 /// coherent VLA scope with a specified parent node. Walk through the
293 /// statements, adding any labels or gotos to LabelAndGotoScopes and recursively
294 /// walking the AST as needed.
BuildScopeInformation(Stmt * S,unsigned & origParentScope)295 void JumpScopeChecker::BuildScopeInformation(Stmt *S,
296 unsigned &origParentScope) {
297 // If this is a statement, rather than an expression, scopes within it don't
298 // propagate out into the enclosing scope. Otherwise we have to worry
299 // about block literals, which have the lifetime of their enclosing statement.
300 unsigned independentParentScope = origParentScope;
301 unsigned &ParentScope = ((isa<Expr>(S) && !isa<StmtExpr>(S))
302 ? origParentScope : independentParentScope);
303
304 unsigned StmtsToSkip = 0u;
305
306 // If we found a label, remember that it is in ParentScope scope.
307 switch (S->getStmtClass()) {
308 case Stmt::AddrLabelExprClass:
309 IndirectJumpTargets.push_back(cast<AddrLabelExpr>(S)->getLabel());
310 break;
311
312 case Stmt::ObjCForCollectionStmtClass: {
313 auto *CS = cast<ObjCForCollectionStmt>(S);
314 unsigned Diag = diag::note_protected_by_objc_fast_enumeration;
315 unsigned NewParentScope = Scopes.size();
316 Scopes.push_back(GotoScope(ParentScope, Diag, 0, S->getBeginLoc()));
317 BuildScopeInformation(CS->getBody(), NewParentScope);
318 return;
319 }
320
321 case Stmt::IndirectGotoStmtClass:
322 // "goto *&&lbl;" is a special case which we treat as equivalent
323 // to a normal goto. In addition, we don't calculate scope in the
324 // operand (to avoid recording the address-of-label use), which
325 // works only because of the restricted set of expressions which
326 // we detect as constant targets.
327 if (cast<IndirectGotoStmt>(S)->getConstantTarget()) {
328 LabelAndGotoScopes[S] = ParentScope;
329 Jumps.push_back(S);
330 return;
331 }
332
333 LabelAndGotoScopes[S] = ParentScope;
334 IndirectJumps.push_back(S);
335 break;
336
337 case Stmt::SwitchStmtClass:
338 // Evaluate the C++17 init stmt and condition variable
339 // before entering the scope of the switch statement.
340 if (Stmt *Init = cast<SwitchStmt>(S)->getInit()) {
341 BuildScopeInformation(Init, ParentScope);
342 ++StmtsToSkip;
343 }
344 if (VarDecl *Var = cast<SwitchStmt>(S)->getConditionVariable()) {
345 BuildScopeInformation(Var, ParentScope);
346 ++StmtsToSkip;
347 }
348 LLVM_FALLTHROUGH;
349
350 case Stmt::GotoStmtClass:
351 // Remember both what scope a goto is in as well as the fact that we have
352 // it. This makes the second scan not have to walk the AST again.
353 LabelAndGotoScopes[S] = ParentScope;
354 Jumps.push_back(S);
355 break;
356
357 case Stmt::GCCAsmStmtClass:
358 if (auto *GS = dyn_cast<GCCAsmStmt>(S))
359 if (GS->isAsmGoto()) {
360 // Remember both what scope a goto is in as well as the fact that we
361 // have it. This makes the second scan not have to walk the AST again.
362 LabelAndGotoScopes[S] = ParentScope;
363 AsmJumps.push_back(GS);
364 for (auto *E : GS->labels())
365 AsmJumpTargets.push_back(E->getLabel());
366 }
367 break;
368
369 case Stmt::IfStmtClass: {
370 IfStmt *IS = cast<IfStmt>(S);
371 if (!(IS->isConstexpr() || IS->isObjCAvailabilityCheck()))
372 break;
373
374 unsigned Diag = IS->isConstexpr() ? diag::note_protected_by_constexpr_if
375 : diag::note_protected_by_if_available;
376
377 if (VarDecl *Var = IS->getConditionVariable())
378 BuildScopeInformation(Var, ParentScope);
379
380 // Cannot jump into the middle of the condition.
381 unsigned NewParentScope = Scopes.size();
382 Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
383 BuildScopeInformation(IS->getCond(), NewParentScope);
384
385 // Jumps into either arm of an 'if constexpr' are not allowed.
386 NewParentScope = Scopes.size();
387 Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
388 BuildScopeInformation(IS->getThen(), NewParentScope);
389 if (Stmt *Else = IS->getElse()) {
390 NewParentScope = Scopes.size();
391 Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
392 BuildScopeInformation(Else, NewParentScope);
393 }
394 return;
395 }
396
397 case Stmt::CXXTryStmtClass: {
398 CXXTryStmt *TS = cast<CXXTryStmt>(S);
399 {
400 unsigned NewParentScope = Scopes.size();
401 Scopes.push_back(GotoScope(ParentScope,
402 diag::note_protected_by_cxx_try,
403 diag::note_exits_cxx_try,
404 TS->getSourceRange().getBegin()));
405 if (Stmt *TryBlock = TS->getTryBlock())
406 BuildScopeInformation(TryBlock, NewParentScope);
407 }
408
409 // Jump from the catch into the try is not allowed either.
410 for (unsigned I = 0, E = TS->getNumHandlers(); I != E; ++I) {
411 CXXCatchStmt *CS = TS->getHandler(I);
412 unsigned NewParentScope = Scopes.size();
413 Scopes.push_back(GotoScope(ParentScope,
414 diag::note_protected_by_cxx_catch,
415 diag::note_exits_cxx_catch,
416 CS->getSourceRange().getBegin()));
417 BuildScopeInformation(CS->getHandlerBlock(), NewParentScope);
418 }
419 return;
420 }
421
422 case Stmt::SEHTryStmtClass: {
423 SEHTryStmt *TS = cast<SEHTryStmt>(S);
424 {
425 unsigned NewParentScope = Scopes.size();
426 Scopes.push_back(GotoScope(ParentScope,
427 diag::note_protected_by_seh_try,
428 diag::note_exits_seh_try,
429 TS->getSourceRange().getBegin()));
430 if (Stmt *TryBlock = TS->getTryBlock())
431 BuildScopeInformation(TryBlock, NewParentScope);
432 }
433
434 // Jump from __except or __finally into the __try are not allowed either.
435 if (SEHExceptStmt *Except = TS->getExceptHandler()) {
436 unsigned NewParentScope = Scopes.size();
437 Scopes.push_back(GotoScope(ParentScope,
438 diag::note_protected_by_seh_except,
439 diag::note_exits_seh_except,
440 Except->getSourceRange().getBegin()));
441 BuildScopeInformation(Except->getBlock(), NewParentScope);
442 } else if (SEHFinallyStmt *Finally = TS->getFinallyHandler()) {
443 unsigned NewParentScope = Scopes.size();
444 Scopes.push_back(GotoScope(ParentScope,
445 diag::note_protected_by_seh_finally,
446 diag::note_exits_seh_finally,
447 Finally->getSourceRange().getBegin()));
448 BuildScopeInformation(Finally->getBlock(), NewParentScope);
449 }
450
451 return;
452 }
453
454 case Stmt::DeclStmtClass: {
455 // If this is a declstmt with a VLA definition, it defines a scope from here
456 // to the end of the containing context.
457 DeclStmt *DS = cast<DeclStmt>(S);
458 // The decl statement creates a scope if any of the decls in it are VLAs
459 // or have the cleanup attribute.
460 for (auto *I : DS->decls())
461 BuildScopeInformation(I, origParentScope);
462 return;
463 }
464
465 case Stmt::ObjCAtTryStmtClass: {
466 // Disallow jumps into any part of an @try statement by pushing a scope and
467 // walking all sub-stmts in that scope.
468 ObjCAtTryStmt *AT = cast<ObjCAtTryStmt>(S);
469 // Recursively walk the AST for the @try part.
470 {
471 unsigned NewParentScope = Scopes.size();
472 Scopes.push_back(GotoScope(ParentScope,
473 diag::note_protected_by_objc_try,
474 diag::note_exits_objc_try,
475 AT->getAtTryLoc()));
476 if (Stmt *TryPart = AT->getTryBody())
477 BuildScopeInformation(TryPart, NewParentScope);
478 }
479
480 // Jump from the catch to the finally or try is not valid.
481 for (unsigned I = 0, N = AT->getNumCatchStmts(); I != N; ++I) {
482 ObjCAtCatchStmt *AC = AT->getCatchStmt(I);
483 unsigned NewParentScope = Scopes.size();
484 Scopes.push_back(GotoScope(ParentScope,
485 diag::note_protected_by_objc_catch,
486 diag::note_exits_objc_catch,
487 AC->getAtCatchLoc()));
488 // @catches are nested and it isn't
489 BuildScopeInformation(AC->getCatchBody(), NewParentScope);
490 }
491
492 // Jump from the finally to the try or catch is not valid.
493 if (ObjCAtFinallyStmt *AF = AT->getFinallyStmt()) {
494 unsigned NewParentScope = Scopes.size();
495 Scopes.push_back(GotoScope(ParentScope,
496 diag::note_protected_by_objc_finally,
497 diag::note_exits_objc_finally,
498 AF->getAtFinallyLoc()));
499 BuildScopeInformation(AF, NewParentScope);
500 }
501
502 return;
503 }
504
505 case Stmt::ObjCAtSynchronizedStmtClass: {
506 // Disallow jumps into the protected statement of an @synchronized, but
507 // allow jumps into the object expression it protects.
508 ObjCAtSynchronizedStmt *AS = cast<ObjCAtSynchronizedStmt>(S);
509 // Recursively walk the AST for the @synchronized object expr, it is
510 // evaluated in the normal scope.
511 BuildScopeInformation(AS->getSynchExpr(), ParentScope);
512
513 // Recursively walk the AST for the @synchronized part, protected by a new
514 // scope.
515 unsigned NewParentScope = Scopes.size();
516 Scopes.push_back(GotoScope(ParentScope,
517 diag::note_protected_by_objc_synchronized,
518 diag::note_exits_objc_synchronized,
519 AS->getAtSynchronizedLoc()));
520 BuildScopeInformation(AS->getSynchBody(), NewParentScope);
521 return;
522 }
523
524 case Stmt::ObjCAutoreleasePoolStmtClass: {
525 // Disallow jumps into the protected statement of an @autoreleasepool.
526 ObjCAutoreleasePoolStmt *AS = cast<ObjCAutoreleasePoolStmt>(S);
527 // Recursively walk the AST for the @autoreleasepool part, protected by a
528 // new scope.
529 unsigned NewParentScope = Scopes.size();
530 Scopes.push_back(GotoScope(ParentScope,
531 diag::note_protected_by_objc_autoreleasepool,
532 diag::note_exits_objc_autoreleasepool,
533 AS->getAtLoc()));
534 BuildScopeInformation(AS->getSubStmt(), NewParentScope);
535 return;
536 }
537
538 case Stmt::ExprWithCleanupsClass: {
539 // Disallow jumps past full-expressions that use blocks with
540 // non-trivial cleanups of their captures. This is theoretically
541 // implementable but a lot of work which we haven't felt up to doing.
542 ExprWithCleanups *EWC = cast<ExprWithCleanups>(S);
543 for (unsigned i = 0, e = EWC->getNumObjects(); i != e; ++i) {
544 if (auto *BDecl = EWC->getObject(i).dyn_cast<BlockDecl *>())
545 for (const auto &CI : BDecl->captures()) {
546 VarDecl *variable = CI.getVariable();
547 BuildScopeInformation(variable, BDecl, origParentScope);
548 }
549 else if (auto *CLE = EWC->getObject(i).dyn_cast<CompoundLiteralExpr *>())
550 BuildScopeInformation(CLE, origParentScope);
551 else
552 llvm_unreachable("unexpected cleanup object type");
553 }
554 break;
555 }
556
557 case Stmt::MaterializeTemporaryExprClass: {
558 // Disallow jumps out of scopes containing temporaries lifetime-extended to
559 // automatic storage duration.
560 MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S);
561 if (MTE->getStorageDuration() == SD_Automatic) {
562 SmallVector<const Expr *, 4> CommaLHS;
563 SmallVector<SubobjectAdjustment, 4> Adjustments;
564 const Expr *ExtendedObject =
565 MTE->getSubExpr()->skipRValueSubobjectAdjustments(CommaLHS,
566 Adjustments);
567 if (ExtendedObject->getType().isDestructedType()) {
568 Scopes.push_back(GotoScope(ParentScope, 0,
569 diag::note_exits_temporary_dtor,
570 ExtendedObject->getExprLoc()));
571 origParentScope = Scopes.size()-1;
572 }
573 }
574 break;
575 }
576
577 case Stmt::CaseStmtClass:
578 case Stmt::DefaultStmtClass:
579 case Stmt::LabelStmtClass:
580 LabelAndGotoScopes[S] = ParentScope;
581 break;
582
583 default:
584 if (auto *ED = dyn_cast<OMPExecutableDirective>(S)) {
585 if (!ED->isStandaloneDirective()) {
586 unsigned NewParentScope = Scopes.size();
587 Scopes.emplace_back(ParentScope,
588 diag::note_omp_protected_structured_block,
589 diag::note_omp_exits_structured_block,
590 ED->getStructuredBlock()->getBeginLoc());
591 BuildScopeInformation(ED->getStructuredBlock(), NewParentScope);
592 return;
593 }
594 }
595 break;
596 }
597
598 for (Stmt *SubStmt : S->children()) {
599 if (!SubStmt)
600 continue;
601 if (StmtsToSkip) {
602 --StmtsToSkip;
603 continue;
604 }
605
606 // Cases, labels, and defaults aren't "scope parents". It's also
607 // important to handle these iteratively instead of recursively in
608 // order to avoid blowing out the stack.
609 while (true) {
610 Stmt *Next;
611 if (SwitchCase *SC = dyn_cast<SwitchCase>(SubStmt))
612 Next = SC->getSubStmt();
613 else if (LabelStmt *LS = dyn_cast<LabelStmt>(SubStmt))
614 Next = LS->getSubStmt();
615 else
616 break;
617
618 LabelAndGotoScopes[SubStmt] = ParentScope;
619 SubStmt = Next;
620 }
621
622 // Recursively walk the AST.
623 BuildScopeInformation(SubStmt, ParentScope);
624 }
625 }
626
627 /// VerifyJumps - Verify each element of the Jumps array to see if they are
628 /// valid, emitting diagnostics if not.
VerifyJumps()629 void JumpScopeChecker::VerifyJumps() {
630 while (!Jumps.empty()) {
631 Stmt *Jump = Jumps.pop_back_val();
632
633 // With a goto,
634 if (GotoStmt *GS = dyn_cast<GotoStmt>(Jump)) {
635 // The label may not have a statement if it's coming from inline MS ASM.
636 if (GS->getLabel()->getStmt()) {
637 CheckJump(GS, GS->getLabel()->getStmt(), GS->getGotoLoc(),
638 diag::err_goto_into_protected_scope,
639 diag::ext_goto_into_protected_scope,
640 diag::warn_cxx98_compat_goto_into_protected_scope);
641 }
642 CheckGotoStmt(GS);
643 continue;
644 }
645
646 // We only get indirect gotos here when they have a constant target.
647 if (IndirectGotoStmt *IGS = dyn_cast<IndirectGotoStmt>(Jump)) {
648 LabelDecl *Target = IGS->getConstantTarget();
649 CheckJump(IGS, Target->getStmt(), IGS->getGotoLoc(),
650 diag::err_goto_into_protected_scope,
651 diag::ext_goto_into_protected_scope,
652 diag::warn_cxx98_compat_goto_into_protected_scope);
653 continue;
654 }
655
656 SwitchStmt *SS = cast<SwitchStmt>(Jump);
657 for (SwitchCase *SC = SS->getSwitchCaseList(); SC;
658 SC = SC->getNextSwitchCase()) {
659 if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(SC)))
660 continue;
661 SourceLocation Loc;
662 if (CaseStmt *CS = dyn_cast<CaseStmt>(SC))
663 Loc = CS->getBeginLoc();
664 else if (DefaultStmt *DS = dyn_cast<DefaultStmt>(SC))
665 Loc = DS->getBeginLoc();
666 else
667 Loc = SC->getBeginLoc();
668 CheckJump(SS, SC, Loc, diag::err_switch_into_protected_scope, 0,
669 diag::warn_cxx98_compat_switch_into_protected_scope);
670 }
671 }
672 }
673
674 /// VerifyIndirectOrAsmJumps - Verify whether any possible indirect goto or
675 /// asm goto jump might cross a protection boundary. Unlike direct jumps,
676 /// indirect or asm goto jumps count cleanups as protection boundaries:
677 /// since there's no way to know where the jump is going, we can't implicitly
678 /// run the right cleanups the way we can with direct jumps.
679 /// Thus, an indirect/asm jump is "trivial" if it bypasses no
680 /// initializations and no teardowns. More formally, an indirect/asm jump
681 /// from A to B is trivial if the path out from A to DCA(A,B) is
682 /// trivial and the path in from DCA(A,B) to B is trivial, where
683 /// DCA(A,B) is the deepest common ancestor of A and B.
684 /// Jump-triviality is transitive but asymmetric.
685 ///
686 /// A path in is trivial if none of the entered scopes have an InDiag.
687 /// A path out is trivial is none of the exited scopes have an OutDiag.
688 ///
689 /// Under these definitions, this function checks that the indirect
690 /// jump between A and B is trivial for every indirect goto statement A
691 /// and every label B whose address was taken in the function.
VerifyIndirectOrAsmJumps(bool IsAsmGoto)692 void JumpScopeChecker::VerifyIndirectOrAsmJumps(bool IsAsmGoto) {
693 SmallVector<Stmt*, 4> GotoJumps = IsAsmGoto ? AsmJumps : IndirectJumps;
694 if (GotoJumps.empty())
695 return;
696 SmallVector<LabelDecl *, 4> JumpTargets =
697 IsAsmGoto ? AsmJumpTargets : IndirectJumpTargets;
698 // If there aren't any address-of-label expressions in this function,
699 // complain about the first indirect goto.
700 if (JumpTargets.empty()) {
701 assert(!IsAsmGoto &&"only indirect goto can get here");
702 S.Diag(GotoJumps[0]->getBeginLoc(),
703 diag::err_indirect_goto_without_addrlabel);
704 return;
705 }
706 // Collect a single representative of every scope containing an
707 // indirect or asm goto. For most code bases, this substantially cuts
708 // down on the number of jump sites we'll have to consider later.
709 typedef std::pair<unsigned, Stmt*> JumpScope;
710 SmallVector<JumpScope, 32> JumpScopes;
711 {
712 llvm::DenseMap<unsigned, Stmt*> JumpScopesMap;
713 for (SmallVectorImpl<Stmt *>::iterator I = GotoJumps.begin(),
714 E = GotoJumps.end();
715 I != E; ++I) {
716 Stmt *IG = *I;
717 if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(IG)))
718 continue;
719 unsigned IGScope = LabelAndGotoScopes[IG];
720 Stmt *&Entry = JumpScopesMap[IGScope];
721 if (!Entry) Entry = IG;
722 }
723 JumpScopes.reserve(JumpScopesMap.size());
724 for (llvm::DenseMap<unsigned, Stmt *>::iterator I = JumpScopesMap.begin(),
725 E = JumpScopesMap.end();
726 I != E; ++I)
727 JumpScopes.push_back(*I);
728 }
729
730 // Collect a single representative of every scope containing a
731 // label whose address was taken somewhere in the function.
732 // For most code bases, there will be only one such scope.
733 llvm::DenseMap<unsigned, LabelDecl*> TargetScopes;
734 for (SmallVectorImpl<LabelDecl *>::iterator I = JumpTargets.begin(),
735 E = JumpTargets.end();
736 I != E; ++I) {
737 LabelDecl *TheLabel = *I;
738 if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(TheLabel->getStmt())))
739 continue;
740 unsigned LabelScope = LabelAndGotoScopes[TheLabel->getStmt()];
741 LabelDecl *&Target = TargetScopes[LabelScope];
742 if (!Target) Target = TheLabel;
743 }
744
745 // For each target scope, make sure it's trivially reachable from
746 // every scope containing a jump site.
747 //
748 // A path between scopes always consists of exitting zero or more
749 // scopes, then entering zero or more scopes. We build a set of
750 // of scopes S from which the target scope can be trivially
751 // entered, then verify that every jump scope can be trivially
752 // exitted to reach a scope in S.
753 llvm::BitVector Reachable(Scopes.size(), false);
754 for (llvm::DenseMap<unsigned,LabelDecl*>::iterator
755 TI = TargetScopes.begin(), TE = TargetScopes.end(); TI != TE; ++TI) {
756 unsigned TargetScope = TI->first;
757 LabelDecl *TargetLabel = TI->second;
758
759 Reachable.reset();
760
761 // Mark all the enclosing scopes from which you can safely jump
762 // into the target scope. 'Min' will end up being the index of
763 // the shallowest such scope.
764 unsigned Min = TargetScope;
765 while (true) {
766 Reachable.set(Min);
767
768 // Don't go beyond the outermost scope.
769 if (Min == 0) break;
770
771 // Stop if we can't trivially enter the current scope.
772 if (Scopes[Min].InDiag) break;
773
774 Min = Scopes[Min].ParentScope;
775 }
776
777 // Walk through all the jump sites, checking that they can trivially
778 // reach this label scope.
779 for (SmallVectorImpl<JumpScope>::iterator
780 I = JumpScopes.begin(), E = JumpScopes.end(); I != E; ++I) {
781 unsigned Scope = I->first;
782
783 // Walk out the "scope chain" for this scope, looking for a scope
784 // we've marked reachable. For well-formed code this amortizes
785 // to O(JumpScopes.size() / Scopes.size()): we only iterate
786 // when we see something unmarked, and in well-formed code we
787 // mark everything we iterate past.
788 bool IsReachable = false;
789 while (true) {
790 if (Reachable.test(Scope)) {
791 // If we find something reachable, mark all the scopes we just
792 // walked through as reachable.
793 for (unsigned S = I->first; S != Scope; S = Scopes[S].ParentScope)
794 Reachable.set(S);
795 IsReachable = true;
796 break;
797 }
798
799 // Don't walk out if we've reached the top-level scope or we've
800 // gotten shallower than the shallowest reachable scope.
801 if (Scope == 0 || Scope < Min) break;
802
803 // Don't walk out through an out-diagnostic.
804 if (Scopes[Scope].OutDiag) break;
805
806 Scope = Scopes[Scope].ParentScope;
807 }
808
809 // Only diagnose if we didn't find something.
810 if (IsReachable) continue;
811
812 DiagnoseIndirectOrAsmJump(I->second, I->first, TargetLabel, TargetScope);
813 }
814 }
815 }
816
817 /// Return true if a particular error+note combination must be downgraded to a
818 /// warning in Microsoft mode.
IsMicrosoftJumpWarning(unsigned JumpDiag,unsigned InDiagNote)819 static bool IsMicrosoftJumpWarning(unsigned JumpDiag, unsigned InDiagNote) {
820 return (JumpDiag == diag::err_goto_into_protected_scope &&
821 (InDiagNote == diag::note_protected_by_variable_init ||
822 InDiagNote == diag::note_protected_by_variable_nontriv_destructor));
823 }
824
825 /// Return true if a particular note should be downgraded to a compatibility
826 /// warning in C++11 mode.
IsCXX98CompatWarning(Sema & S,unsigned InDiagNote)827 static bool IsCXX98CompatWarning(Sema &S, unsigned InDiagNote) {
828 return S.getLangOpts().CPlusPlus11 &&
829 InDiagNote == diag::note_protected_by_variable_non_pod;
830 }
831
832 /// Produce primary diagnostic for an indirect jump statement.
DiagnoseIndirectOrAsmJumpStmt(Sema & S,Stmt * Jump,LabelDecl * Target,bool & Diagnosed)833 static void DiagnoseIndirectOrAsmJumpStmt(Sema &S, Stmt *Jump,
834 LabelDecl *Target, bool &Diagnosed) {
835 if (Diagnosed)
836 return;
837 bool IsAsmGoto = isa<GCCAsmStmt>(Jump);
838 S.Diag(Jump->getBeginLoc(), diag::err_indirect_goto_in_protected_scope)
839 << IsAsmGoto;
840 S.Diag(Target->getStmt()->getIdentLoc(), diag::note_indirect_goto_target)
841 << IsAsmGoto;
842 Diagnosed = true;
843 }
844
845 /// Produce note diagnostics for a jump into a protected scope.
NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes)846 void JumpScopeChecker::NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes) {
847 if (CHECK_PERMISSIVE(ToScopes.empty()))
848 return;
849 for (unsigned I = 0, E = ToScopes.size(); I != E; ++I)
850 if (Scopes[ToScopes[I]].InDiag)
851 S.Diag(Scopes[ToScopes[I]].Loc, Scopes[ToScopes[I]].InDiag);
852 }
853
854 /// Diagnose an indirect jump which is known to cross scopes.
DiagnoseIndirectOrAsmJump(Stmt * Jump,unsigned JumpScope,LabelDecl * Target,unsigned TargetScope)855 void JumpScopeChecker::DiagnoseIndirectOrAsmJump(Stmt *Jump, unsigned JumpScope,
856 LabelDecl *Target,
857 unsigned TargetScope) {
858 if (CHECK_PERMISSIVE(JumpScope == TargetScope))
859 return;
860
861 unsigned Common = GetDeepestCommonScope(JumpScope, TargetScope);
862 bool Diagnosed = false;
863
864 // Walk out the scope chain until we reach the common ancestor.
865 for (unsigned I = JumpScope; I != Common; I = Scopes[I].ParentScope)
866 if (Scopes[I].OutDiag) {
867 DiagnoseIndirectOrAsmJumpStmt(S, Jump, Target, Diagnosed);
868 S.Diag(Scopes[I].Loc, Scopes[I].OutDiag);
869 }
870
871 SmallVector<unsigned, 10> ToScopesCXX98Compat;
872
873 // Now walk into the scopes containing the label whose address was taken.
874 for (unsigned I = TargetScope; I != Common; I = Scopes[I].ParentScope)
875 if (IsCXX98CompatWarning(S, Scopes[I].InDiag))
876 ToScopesCXX98Compat.push_back(I);
877 else if (Scopes[I].InDiag) {
878 DiagnoseIndirectOrAsmJumpStmt(S, Jump, Target, Diagnosed);
879 S.Diag(Scopes[I].Loc, Scopes[I].InDiag);
880 }
881
882 // Diagnose this jump if it would be ill-formed in C++98.
883 if (!Diagnosed && !ToScopesCXX98Compat.empty()) {
884 bool IsAsmGoto = isa<GCCAsmStmt>(Jump);
885 S.Diag(Jump->getBeginLoc(),
886 diag::warn_cxx98_compat_indirect_goto_in_protected_scope)
887 << IsAsmGoto;
888 S.Diag(Target->getStmt()->getIdentLoc(), diag::note_indirect_goto_target)
889 << IsAsmGoto;
890 NoteJumpIntoScopes(ToScopesCXX98Compat);
891 }
892 }
893
894 /// CheckJump - Validate that the specified jump statement is valid: that it is
895 /// jumping within or out of its current scope, not into a deeper one.
CheckJump(Stmt * From,Stmt * To,SourceLocation DiagLoc,unsigned JumpDiagError,unsigned JumpDiagWarning,unsigned JumpDiagCXX98Compat)896 void JumpScopeChecker::CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc,
897 unsigned JumpDiagError, unsigned JumpDiagWarning,
898 unsigned JumpDiagCXX98Compat) {
899 if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(From)))
900 return;
901 if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(To)))
902 return;
903
904 unsigned FromScope = LabelAndGotoScopes[From];
905 unsigned ToScope = LabelAndGotoScopes[To];
906
907 // Common case: exactly the same scope, which is fine.
908 if (FromScope == ToScope) return;
909
910 // Warn on gotos out of __finally blocks.
911 if (isa<GotoStmt>(From) || isa<IndirectGotoStmt>(From)) {
912 // If FromScope > ToScope, FromScope is more nested and the jump goes to a
913 // less nested scope. Check if it crosses a __finally along the way.
914 for (unsigned I = FromScope; I > ToScope; I = Scopes[I].ParentScope) {
915 if (Scopes[I].InDiag == diag::note_protected_by_seh_finally) {
916 S.Diag(From->getBeginLoc(), diag::warn_jump_out_of_seh_finally);
917 break;
918 }
919 if (Scopes[I].InDiag == diag::note_omp_protected_structured_block) {
920 S.Diag(From->getBeginLoc(), diag::err_goto_into_protected_scope);
921 S.Diag(To->getBeginLoc(), diag::note_omp_exits_structured_block);
922 break;
923 }
924 }
925 }
926
927 unsigned CommonScope = GetDeepestCommonScope(FromScope, ToScope);
928
929 // It's okay to jump out from a nested scope.
930 if (CommonScope == ToScope) return;
931
932 // Pull out (and reverse) any scopes we might need to diagnose skipping.
933 SmallVector<unsigned, 10> ToScopesCXX98Compat;
934 SmallVector<unsigned, 10> ToScopesError;
935 SmallVector<unsigned, 10> ToScopesWarning;
936 for (unsigned I = ToScope; I != CommonScope; I = Scopes[I].ParentScope) {
937 if (S.getLangOpts().MSVCCompat && JumpDiagWarning != 0 &&
938 IsMicrosoftJumpWarning(JumpDiagError, Scopes[I].InDiag))
939 ToScopesWarning.push_back(I);
940 else if (IsCXX98CompatWarning(S, Scopes[I].InDiag))
941 ToScopesCXX98Compat.push_back(I);
942 else if (Scopes[I].InDiag)
943 ToScopesError.push_back(I);
944 }
945
946 // Handle warnings.
947 if (!ToScopesWarning.empty()) {
948 S.Diag(DiagLoc, JumpDiagWarning);
949 NoteJumpIntoScopes(ToScopesWarning);
950 }
951
952 // Handle errors.
953 if (!ToScopesError.empty()) {
954 S.Diag(DiagLoc, JumpDiagError);
955 NoteJumpIntoScopes(ToScopesError);
956 }
957
958 // Handle -Wc++98-compat warnings if the jump is well-formed.
959 if (ToScopesError.empty() && !ToScopesCXX98Compat.empty()) {
960 S.Diag(DiagLoc, JumpDiagCXX98Compat);
961 NoteJumpIntoScopes(ToScopesCXX98Compat);
962 }
963 }
964
CheckGotoStmt(GotoStmt * GS)965 void JumpScopeChecker::CheckGotoStmt(GotoStmt *GS) {
966 if (GS->getLabel()->isMSAsmLabel()) {
967 S.Diag(GS->getGotoLoc(), diag::err_goto_ms_asm_label)
968 << GS->getLabel()->getIdentifier();
969 S.Diag(GS->getLabel()->getLocation(), diag::note_goto_ms_asm_label)
970 << GS->getLabel()->getIdentifier();
971 }
972 }
973
DiagnoseInvalidJumps(Stmt * Body)974 void Sema::DiagnoseInvalidJumps(Stmt *Body) {
975 (void)JumpScopeChecker(Body, *this);
976 }
977