1 //===- ExprEngineCXX.cpp - ExprEngine support for C++ -----------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file defines the C++ expression evaluation engine. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 14 #include "clang/Analysis/ConstructionContext.h" 15 #include "clang/AST/DeclCXX.h" 16 #include "clang/AST/StmtCXX.h" 17 #include "clang/AST/ParentMap.h" 18 #include "clang/Basic/PrettyStackTrace.h" 19 #include "clang/StaticAnalyzer/Core/CheckerManager.h" 20 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 21 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 22 23 using namespace clang; 24 using namespace ento; 25 26 void ExprEngine::CreateCXXTemporaryObject(const MaterializeTemporaryExpr *ME, 27 ExplodedNode *Pred, 28 ExplodedNodeSet &Dst) { 29 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 30 const Expr *tempExpr = ME->getSubExpr()->IgnoreParens(); 31 ProgramStateRef state = Pred->getState(); 32 const LocationContext *LCtx = Pred->getLocationContext(); 33 34 state = createTemporaryRegionIfNeeded(state, LCtx, tempExpr, ME); 35 Bldr.generateNode(ME, Pred, state); 36 } 37 38 // FIXME: This is the sort of code that should eventually live in a Core 39 // checker rather than as a special case in ExprEngine. 40 void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred, 41 const CallEvent &Call) { 42 SVal ThisVal; 43 bool AlwaysReturnsLValue; 44 const CXXRecordDecl *ThisRD = nullptr; 45 if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(&Call)) { 46 assert(Ctor->getDecl()->isTrivial()); 47 assert(Ctor->getDecl()->isCopyOrMoveConstructor()); 48 ThisVal = Ctor->getCXXThisVal(); 49 ThisRD = Ctor->getDecl()->getParent(); 50 AlwaysReturnsLValue = false; 51 } else { 52 assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial()); 53 assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() == 54 OO_Equal); 55 ThisVal = cast<CXXInstanceCall>(Call).getCXXThisVal(); 56 ThisRD = cast<CXXMethodDecl>(Call.getDecl())->getParent(); 57 AlwaysReturnsLValue = true; 58 } 59 60 assert(ThisRD); 61 if (ThisRD->isEmpty()) { 62 // Do nothing for empty classes. Otherwise it'd retrieve an UnknownVal 63 // and bind it and RegionStore would think that the actual value 64 // in this region at this offset is unknown. 65 return; 66 } 67 68 const LocationContext *LCtx = Pred->getLocationContext(); 69 70 ExplodedNodeSet Dst; 71 Bldr.takeNodes(Pred); 72 73 SVal V = Call.getArgSVal(0); 74 75 // If the value being copied is not unknown, load from its location to get 76 // an aggregate rvalue. 77 if (Optional<Loc> L = V.getAs<Loc>()) 78 V = Pred->getState()->getSVal(*L); 79 else 80 assert(V.isUnknownOrUndef()); 81 82 const Expr *CallExpr = Call.getOriginExpr(); 83 evalBind(Dst, CallExpr, Pred, ThisVal, V, true); 84 85 PostStmt PS(CallExpr, LCtx); 86 for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end(); 87 I != E; ++I) { 88 ProgramStateRef State = (*I)->getState(); 89 if (AlwaysReturnsLValue) 90 State = State->BindExpr(CallExpr, LCtx, ThisVal); 91 else 92 State = bindReturnValue(Call, LCtx, State); 93 Bldr.generateNode(PS, State, *I); 94 } 95 } 96 97 SVal ExprEngine::makeElementRegion(ProgramStateRef State, SVal LValue, 98 QualType &Ty, bool &IsArray, unsigned Idx) { 99 SValBuilder &SVB = State->getStateManager().getSValBuilder(); 100 ASTContext &Ctx = SVB.getContext(); 101 102 if (const ArrayType *AT = Ctx.getAsArrayType(Ty)) { 103 while (AT) { 104 Ty = AT->getElementType(); 105 AT = dyn_cast<ArrayType>(AT->getElementType()); 106 } 107 LValue = State->getLValue(Ty, SVB.makeArrayIndex(Idx), LValue); 108 IsArray = true; 109 } 110 111 return LValue; 112 } 113 114 SVal ExprEngine::computeObjectUnderConstruction( 115 const Expr *E, ProgramStateRef State, const LocationContext *LCtx, 116 const ConstructionContext *CC, EvalCallOptions &CallOpts, unsigned Idx) { 117 SValBuilder &SVB = getSValBuilder(); 118 MemRegionManager &MRMgr = SVB.getRegionManager(); 119 ASTContext &ACtx = SVB.getContext(); 120 121 // Compute the target region by exploring the construction context. 122 if (CC) { 123 switch (CC->getKind()) { 124 case ConstructionContext::CXX17ElidedCopyVariableKind: 125 case ConstructionContext::SimpleVariableKind: { 126 const auto *DSCC = cast<VariableConstructionContext>(CC); 127 const auto *DS = DSCC->getDeclStmt(); 128 const auto *Var = cast<VarDecl>(DS->getSingleDecl()); 129 QualType Ty = Var->getType(); 130 return makeElementRegion(State, State->getLValue(Var, LCtx), Ty, 131 CallOpts.IsArrayCtorOrDtor, Idx); 132 } 133 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind: 134 case ConstructionContext::SimpleConstructorInitializerKind: { 135 const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC); 136 const auto *Init = ICC->getCXXCtorInitializer(); 137 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl()); 138 Loc ThisPtr = SVB.getCXXThis(CurCtor, LCtx->getStackFrame()); 139 SVal ThisVal = State->getSVal(ThisPtr); 140 if (Init->isBaseInitializer()) { 141 const auto *ThisReg = cast<SubRegion>(ThisVal.getAsRegion()); 142 const CXXRecordDecl *BaseClass = 143 Init->getBaseClass()->getAsCXXRecordDecl(); 144 const auto *BaseReg = 145 MRMgr.getCXXBaseObjectRegion(BaseClass, ThisReg, 146 Init->isBaseVirtual()); 147 return SVB.makeLoc(BaseReg); 148 } 149 if (Init->isDelegatingInitializer()) 150 return ThisVal; 151 152 const ValueDecl *Field; 153 SVal FieldVal; 154 if (Init->isIndirectMemberInitializer()) { 155 Field = Init->getIndirectMember(); 156 FieldVal = State->getLValue(Init->getIndirectMember(), ThisVal); 157 } else { 158 Field = Init->getMember(); 159 FieldVal = State->getLValue(Init->getMember(), ThisVal); 160 } 161 162 QualType Ty = Field->getType(); 163 return makeElementRegion(State, FieldVal, Ty, CallOpts.IsArrayCtorOrDtor, 164 Idx); 165 } 166 case ConstructionContext::NewAllocatedObjectKind: { 167 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 168 const auto *NECC = cast<NewAllocatedObjectConstructionContext>(CC); 169 const auto *NE = NECC->getCXXNewExpr(); 170 SVal V = *getObjectUnderConstruction(State, NE, LCtx); 171 if (const SubRegion *MR = 172 dyn_cast_or_null<SubRegion>(V.getAsRegion())) { 173 if (NE->isArray()) { 174 // TODO: In fact, we need to call the constructor for every 175 // allocated element, not just the first one! 176 CallOpts.IsArrayCtorOrDtor = true; 177 178 auto R = MRMgr.getElementRegion(NE->getType()->getPointeeType(), 179 svalBuilder.makeArrayIndex(Idx), MR, 180 SVB.getContext()); 181 182 return loc::MemRegionVal(R); 183 } 184 return V; 185 } 186 // TODO: Detect when the allocator returns a null pointer. 187 // Constructor shall not be called in this case. 188 } 189 break; 190 } 191 case ConstructionContext::SimpleReturnedValueKind: 192 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: { 193 // The temporary is to be managed by the parent stack frame. 194 // So build it in the parent stack frame if we're not in the 195 // top frame of the analysis. 196 const StackFrameContext *SFC = LCtx->getStackFrame(); 197 if (const LocationContext *CallerLCtx = SFC->getParent()) { 198 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()] 199 .getAs<CFGCXXRecordTypedCall>(); 200 if (!RTC) { 201 // We were unable to find the correct construction context for the 202 // call in the parent stack frame. This is equivalent to not being 203 // able to find construction context at all. 204 break; 205 } 206 if (isa<BlockInvocationContext>(CallerLCtx)) { 207 // Unwrap block invocation contexts. They're mostly part of 208 // the current stack frame. 209 CallerLCtx = CallerLCtx->getParent(); 210 assert(!isa<BlockInvocationContext>(CallerLCtx)); 211 } 212 return computeObjectUnderConstruction( 213 cast<Expr>(SFC->getCallSite()), State, CallerLCtx, 214 RTC->getConstructionContext(), CallOpts); 215 } else { 216 // We are on the top frame of the analysis. We do not know where is the 217 // object returned to. Conjure a symbolic region for the return value. 218 // TODO: We probably need a new MemRegion kind to represent the storage 219 // of that SymbolicRegion, so that we cound produce a fancy symbol 220 // instead of an anonymous conjured symbol. 221 // TODO: Do we need to track the region to avoid having it dead 222 // too early? It does die too early, at least in C++17, but because 223 // putting anything into a SymbolicRegion causes an immediate escape, 224 // it doesn't cause any leak false positives. 225 const auto *RCC = cast<ReturnedValueConstructionContext>(CC); 226 // Make sure that this doesn't coincide with any other symbol 227 // conjured for the returned expression. 228 static const int TopLevelSymRegionTag = 0; 229 const Expr *RetE = RCC->getReturnStmt()->getRetValue(); 230 assert(RetE && "Void returns should not have a construction context"); 231 QualType ReturnTy = RetE->getType(); 232 QualType RegionTy = ACtx.getPointerType(ReturnTy); 233 return SVB.conjureSymbolVal(&TopLevelSymRegionTag, RetE, SFC, RegionTy, 234 currBldrCtx->blockCount()); 235 } 236 llvm_unreachable("Unhandled return value construction context!"); 237 } 238 case ConstructionContext::ElidedTemporaryObjectKind: { 239 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors); 240 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC); 241 242 // Support pre-C++17 copy elision. We'll have the elidable copy 243 // constructor in the AST and in the CFG, but we'll skip it 244 // and construct directly into the final object. This call 245 // also sets the CallOpts flags for us. 246 // If the elided copy/move constructor is not supported, there's still 247 // benefit in trying to model the non-elided constructor. 248 // Stash our state before trying to elide, as it'll get overwritten. 249 ProgramStateRef PreElideState = State; 250 EvalCallOptions PreElideCallOpts = CallOpts; 251 252 SVal V = computeObjectUnderConstruction( 253 TCC->getConstructorAfterElision(), State, LCtx, 254 TCC->getConstructionContextAfterElision(), CallOpts); 255 256 // FIXME: This definition of "copy elision has not failed" is unreliable. 257 // It doesn't indicate that the constructor will actually be inlined 258 // later; this is still up to evalCall() to decide. 259 if (!CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) 260 return V; 261 262 // Copy elision failed. Revert the changes and proceed as if we have 263 // a simple temporary. 264 CallOpts = PreElideCallOpts; 265 CallOpts.IsElidableCtorThatHasNotBeenElided = true; 266 LLVM_FALLTHROUGH; 267 } 268 case ConstructionContext::SimpleTemporaryObjectKind: { 269 const auto *TCC = cast<TemporaryObjectConstructionContext>(CC); 270 const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr(); 271 272 CallOpts.IsTemporaryCtorOrDtor = true; 273 if (MTE) { 274 if (const ValueDecl *VD = MTE->getExtendingDecl()) { 275 assert(MTE->getStorageDuration() != SD_FullExpression); 276 if (!VD->getType()->isReferenceType()) { 277 // We're lifetime-extended by a surrounding aggregate. 278 // Automatic destructors aren't quite working in this case 279 // on the CFG side. We should warn the caller about that. 280 // FIXME: Is there a better way to retrieve this information from 281 // the MaterializeTemporaryExpr? 282 CallOpts.IsTemporaryLifetimeExtendedViaAggregate = true; 283 } 284 } 285 286 if (MTE->getStorageDuration() == SD_Static || 287 MTE->getStorageDuration() == SD_Thread) 288 return loc::MemRegionVal(MRMgr.getCXXStaticTempObjectRegion(E)); 289 } 290 291 return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); 292 } 293 case ConstructionContext::ArgumentKind: { 294 // Arguments are technically temporaries. 295 CallOpts.IsTemporaryCtorOrDtor = true; 296 297 const auto *ACC = cast<ArgumentConstructionContext>(CC); 298 const Expr *E = ACC->getCallLikeExpr(); 299 unsigned Idx = ACC->getIndex(); 300 301 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 302 auto getArgLoc = [&](CallEventRef<> Caller) -> Optional<SVal> { 303 const LocationContext *FutureSFC = 304 Caller->getCalleeStackFrame(currBldrCtx->blockCount()); 305 // Return early if we are unable to reliably foresee 306 // the future stack frame. 307 if (!FutureSFC) 308 return None; 309 310 // This should be equivalent to Caller->getDecl() for now, but 311 // FutureSFC->getDecl() is likely to support better stuff (like 312 // virtual functions) earlier. 313 const Decl *CalleeD = FutureSFC->getDecl(); 314 315 // FIXME: Support for variadic arguments is not implemented here yet. 316 if (CallEvent::isVariadic(CalleeD)) 317 return None; 318 319 // Operator arguments do not correspond to operator parameters 320 // because this-argument is implemented as a normal argument in 321 // operator call expressions but not in operator declarations. 322 const TypedValueRegion *TVR = Caller->getParameterLocation( 323 *Caller->getAdjustedParameterIndex(Idx), currBldrCtx->blockCount()); 324 if (!TVR) 325 return None; 326 327 return loc::MemRegionVal(TVR); 328 }; 329 330 if (const auto *CE = dyn_cast<CallExpr>(E)) { 331 CallEventRef<> Caller = CEMgr.getSimpleCall(CE, State, LCtx); 332 if (Optional<SVal> V = getArgLoc(Caller)) 333 return *V; 334 else 335 break; 336 } else if (const auto *CCE = dyn_cast<CXXConstructExpr>(E)) { 337 // Don't bother figuring out the target region for the future 338 // constructor because we won't need it. 339 CallEventRef<> Caller = 340 CEMgr.getCXXConstructorCall(CCE, /*Target=*/nullptr, State, LCtx); 341 if (Optional<SVal> V = getArgLoc(Caller)) 342 return *V; 343 else 344 break; 345 } else if (const auto *ME = dyn_cast<ObjCMessageExpr>(E)) { 346 CallEventRef<> Caller = CEMgr.getObjCMethodCall(ME, State, LCtx); 347 if (Optional<SVal> V = getArgLoc(Caller)) 348 return *V; 349 else 350 break; 351 } 352 } 353 } // switch (CC->getKind()) 354 } 355 356 // If we couldn't find an existing region to construct into, assume we're 357 // constructing a temporary. Notify the caller of our failure. 358 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; 359 return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); 360 } 361 362 ProgramStateRef ExprEngine::updateObjectsUnderConstruction( 363 SVal V, const Expr *E, ProgramStateRef State, const LocationContext *LCtx, 364 const ConstructionContext *CC, const EvalCallOptions &CallOpts) { 365 if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) { 366 // Sounds like we failed to find the target region and therefore 367 // copy elision failed. There's nothing we can do about it here. 368 return State; 369 } 370 371 // See if we're constructing an existing region by looking at the 372 // current construction context. 373 assert(CC && "Computed target region without construction context?"); 374 switch (CC->getKind()) { 375 case ConstructionContext::CXX17ElidedCopyVariableKind: 376 case ConstructionContext::SimpleVariableKind: { 377 const auto *DSCC = cast<VariableConstructionContext>(CC); 378 return addObjectUnderConstruction(State, DSCC->getDeclStmt(), LCtx, V); 379 } 380 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind: 381 case ConstructionContext::SimpleConstructorInitializerKind: { 382 const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC); 383 const auto *Init = ICC->getCXXCtorInitializer(); 384 // Base and delegating initializers handled above 385 assert(Init->isAnyMemberInitializer() && 386 "Base and delegating initializers should have been handled by" 387 "computeObjectUnderConstruction()"); 388 return addObjectUnderConstruction(State, Init, LCtx, V); 389 } 390 case ConstructionContext::NewAllocatedObjectKind: { 391 return State; 392 } 393 case ConstructionContext::SimpleReturnedValueKind: 394 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: { 395 const StackFrameContext *SFC = LCtx->getStackFrame(); 396 const LocationContext *CallerLCtx = SFC->getParent(); 397 if (!CallerLCtx) { 398 // No extra work is necessary in top frame. 399 return State; 400 } 401 402 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()] 403 .getAs<CFGCXXRecordTypedCall>(); 404 assert(RTC && "Could not have had a target region without it"); 405 if (isa<BlockInvocationContext>(CallerLCtx)) { 406 // Unwrap block invocation contexts. They're mostly part of 407 // the current stack frame. 408 CallerLCtx = CallerLCtx->getParent(); 409 assert(!isa<BlockInvocationContext>(CallerLCtx)); 410 } 411 412 return updateObjectsUnderConstruction(V, 413 cast<Expr>(SFC->getCallSite()), State, CallerLCtx, 414 RTC->getConstructionContext(), CallOpts); 415 } 416 case ConstructionContext::ElidedTemporaryObjectKind: { 417 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors); 418 if (!CallOpts.IsElidableCtorThatHasNotBeenElided) { 419 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC); 420 State = updateObjectsUnderConstruction( 421 V, TCC->getConstructorAfterElision(), State, LCtx, 422 TCC->getConstructionContextAfterElision(), CallOpts); 423 424 // Remember that we've elided the constructor. 425 State = addObjectUnderConstruction( 426 State, TCC->getConstructorAfterElision(), LCtx, V); 427 428 // Remember that we've elided the destructor. 429 if (const auto *BTE = TCC->getCXXBindTemporaryExpr()) 430 State = elideDestructor(State, BTE, LCtx); 431 432 // Instead of materialization, shamelessly return 433 // the final object destination. 434 if (const auto *MTE = TCC->getMaterializedTemporaryExpr()) 435 State = addObjectUnderConstruction(State, MTE, LCtx, V); 436 437 return State; 438 } 439 // If we decided not to elide the constructor, proceed as if 440 // it's a simple temporary. 441 LLVM_FALLTHROUGH; 442 } 443 case ConstructionContext::SimpleTemporaryObjectKind: { 444 const auto *TCC = cast<TemporaryObjectConstructionContext>(CC); 445 if (const auto *BTE = TCC->getCXXBindTemporaryExpr()) 446 State = addObjectUnderConstruction(State, BTE, LCtx, V); 447 448 if (const auto *MTE = TCC->getMaterializedTemporaryExpr()) 449 State = addObjectUnderConstruction(State, MTE, LCtx, V); 450 451 return State; 452 } 453 case ConstructionContext::ArgumentKind: { 454 const auto *ACC = cast<ArgumentConstructionContext>(CC); 455 if (const auto *BTE = ACC->getCXXBindTemporaryExpr()) 456 State = addObjectUnderConstruction(State, BTE, LCtx, V); 457 458 return addObjectUnderConstruction( 459 State, {ACC->getCallLikeExpr(), ACC->getIndex()}, LCtx, V); 460 } 461 } 462 llvm_unreachable("Unhandled construction context!"); 463 } 464 465 void ExprEngine::handleConstructor(const Expr *E, 466 ExplodedNode *Pred, 467 ExplodedNodeSet &destNodes) { 468 const auto *CE = dyn_cast<CXXConstructExpr>(E); 469 const auto *CIE = dyn_cast<CXXInheritedCtorInitExpr>(E); 470 assert(CE || CIE); 471 472 const LocationContext *LCtx = Pred->getLocationContext(); 473 ProgramStateRef State = Pred->getState(); 474 475 SVal Target = UnknownVal(); 476 477 if (CE) { 478 if (Optional<SVal> ElidedTarget = 479 getObjectUnderConstruction(State, CE, LCtx)) { 480 // We've previously modeled an elidable constructor by pretending that it 481 // in fact constructs into the correct target. This constructor can 482 // therefore be skipped. 483 Target = *ElidedTarget; 484 StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx); 485 State = finishObjectConstruction(State, CE, LCtx); 486 if (auto L = Target.getAs<Loc>()) 487 State = State->BindExpr(CE, LCtx, State->getSVal(*L, CE->getType())); 488 Bldr.generateNode(CE, Pred, State); 489 return; 490 } 491 } 492 493 EvalCallOptions CallOpts; 494 auto C = getCurrentCFGElement().getAs<CFGConstructor>(); 495 assert(C || getCurrentCFGElement().getAs<CFGStmt>()); 496 const ConstructionContext *CC = C ? C->getConstructionContext() : nullptr; 497 498 const CXXConstructExpr::ConstructionKind CK = 499 CE ? CE->getConstructionKind() : CIE->getConstructionKind(); 500 switch (CK) { 501 case CXXConstructExpr::CK_Complete: { 502 // Inherited constructors are always base class constructors. 503 assert(CE && !CIE && "A complete constructor is inherited?!"); 504 505 unsigned Idx = 0; 506 if (CE->getType()->isArrayType()) { 507 Idx = getIndexOfElementToConstruct(State, CE, LCtx).value_or(0u); 508 State = setIndexOfElementToConstruct(State, CE, LCtx, Idx + 1); 509 } 510 511 // The target region is found from construction context. 512 std::tie(State, Target) = 513 handleConstructionContext(CE, State, LCtx, CC, CallOpts, Idx); 514 break; 515 } 516 case CXXConstructExpr::CK_VirtualBase: { 517 // Make sure we are not calling virtual base class initializers twice. 518 // Only the most-derived object should initialize virtual base classes. 519 const auto *OuterCtor = dyn_cast_or_null<CXXConstructExpr>( 520 LCtx->getStackFrame()->getCallSite()); 521 assert( 522 (!OuterCtor || 523 OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Complete || 524 OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Delegating) && 525 ("This virtual base should have already been initialized by " 526 "the most derived class!")); 527 (void)OuterCtor; 528 LLVM_FALLTHROUGH; 529 } 530 case CXXConstructExpr::CK_NonVirtualBase: 531 // In C++17, classes with non-virtual bases may be aggregates, so they would 532 // be initialized as aggregates without a constructor call, so we may have 533 // a base class constructed directly into an initializer list without 534 // having the derived-class constructor call on the previous stack frame. 535 // Initializer lists may be nested into more initializer lists that 536 // correspond to surrounding aggregate initializations. 537 // FIXME: For now this code essentially bails out. We need to find the 538 // correct target region and set it. 539 // FIXME: Instead of relying on the ParentMap, we should have the 540 // trigger-statement (InitListExpr in this case) passed down from CFG or 541 // otherwise always available during construction. 542 if (isa_and_nonnull<InitListExpr>(LCtx->getParentMap().getParent(E))) { 543 MemRegionManager &MRMgr = getSValBuilder().getRegionManager(); 544 Target = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); 545 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; 546 break; 547 } 548 LLVM_FALLTHROUGH; 549 case CXXConstructExpr::CK_Delegating: { 550 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl()); 551 Loc ThisPtr = getSValBuilder().getCXXThis(CurCtor, 552 LCtx->getStackFrame()); 553 SVal ThisVal = State->getSVal(ThisPtr); 554 555 if (CK == CXXConstructExpr::CK_Delegating) { 556 Target = ThisVal; 557 } else { 558 // Cast to the base type. 559 bool IsVirtual = (CK == CXXConstructExpr::CK_VirtualBase); 560 SVal BaseVal = 561 getStoreManager().evalDerivedToBase(ThisVal, E->getType(), IsVirtual); 562 Target = BaseVal; 563 } 564 break; 565 } 566 } 567 568 if (State != Pred->getState()) { 569 static SimpleProgramPointTag T("ExprEngine", 570 "Prepare for object construction"); 571 ExplodedNodeSet DstPrepare; 572 StmtNodeBuilder BldrPrepare(Pred, DstPrepare, *currBldrCtx); 573 BldrPrepare.generateNode(E, Pred, State, &T, ProgramPoint::PreStmtKind); 574 assert(DstPrepare.size() <= 1); 575 if (DstPrepare.size() == 0) 576 return; 577 Pred = *BldrPrepare.begin(); 578 } 579 580 const MemRegion *TargetRegion = Target.getAsRegion(); 581 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 582 CallEventRef<> Call = 583 CIE ? (CallEventRef<>)CEMgr.getCXXInheritedConstructorCall( 584 CIE, TargetRegion, State, LCtx) 585 : (CallEventRef<>)CEMgr.getCXXConstructorCall( 586 CE, TargetRegion, State, LCtx); 587 588 ExplodedNodeSet DstPreVisit; 589 getCheckerManager().runCheckersForPreStmt(DstPreVisit, Pred, E, *this); 590 591 ExplodedNodeSet PreInitialized; 592 if (CE) { 593 // FIXME: Is it possible and/or useful to do this before PreStmt? 594 StmtNodeBuilder Bldr(DstPreVisit, PreInitialized, *currBldrCtx); 595 for (ExplodedNodeSet::iterator I = DstPreVisit.begin(), 596 E = DstPreVisit.end(); 597 I != E; ++I) { 598 ProgramStateRef State = (*I)->getState(); 599 if (CE->requiresZeroInitialization()) { 600 // FIXME: Once we properly handle constructors in new-expressions, we'll 601 // need to invalidate the region before setting a default value, to make 602 // sure there aren't any lingering bindings around. This probably needs 603 // to happen regardless of whether or not the object is zero-initialized 604 // to handle random fields of a placement-initialized object picking up 605 // old bindings. We might only want to do it when we need to, though. 606 // FIXME: This isn't actually correct for arrays -- we need to zero- 607 // initialize the entire array, not just the first element -- but our 608 // handling of arrays everywhere else is weak as well, so this shouldn't 609 // actually make things worse. Placement new makes this tricky as well, 610 // since it's then possible to be initializing one part of a multi- 611 // dimensional array. 612 State = State->bindDefaultZero(Target, LCtx); 613 } 614 615 Bldr.generateNode(CE, *I, State, /*tag=*/nullptr, 616 ProgramPoint::PreStmtKind); 617 } 618 } else { 619 PreInitialized = DstPreVisit; 620 } 621 622 ExplodedNodeSet DstPreCall; 623 getCheckerManager().runCheckersForPreCall(DstPreCall, PreInitialized, 624 *Call, *this); 625 626 ExplodedNodeSet DstEvaluated; 627 628 if (CE && CE->getConstructor()->isTrivial() && 629 CE->getConstructor()->isCopyOrMoveConstructor() && 630 !CallOpts.IsArrayCtorOrDtor) { 631 StmtNodeBuilder Bldr(DstPreCall, DstEvaluated, *currBldrCtx); 632 // FIXME: Handle other kinds of trivial constructors as well. 633 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); 634 I != E; ++I) 635 performTrivialCopy(Bldr, *I, *Call); 636 637 } else { 638 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); 639 I != E; ++I) 640 getCheckerManager().runCheckersForEvalCall(DstEvaluated, *I, *Call, *this, 641 CallOpts); 642 } 643 644 // If the CFG was constructed without elements for temporary destructors 645 // and the just-called constructor created a temporary object then 646 // stop exploration if the temporary object has a noreturn constructor. 647 // This can lose coverage because the destructor, if it were present 648 // in the CFG, would be called at the end of the full expression or 649 // later (for life-time extended temporaries) -- but avoids infeasible 650 // paths when no-return temporary destructors are used for assertions. 651 ExplodedNodeSet DstEvaluatedPostProcessed; 652 StmtNodeBuilder Bldr(DstEvaluated, DstEvaluatedPostProcessed, *currBldrCtx); 653 const AnalysisDeclContext *ADC = LCtx->getAnalysisDeclContext(); 654 if (!ADC->getCFGBuildOptions().AddTemporaryDtors) { 655 if (llvm::isa_and_nonnull<CXXTempObjectRegion>(TargetRegion) && 656 cast<CXXConstructorDecl>(Call->getDecl()) 657 ->getParent() 658 ->isAnyDestructorNoReturn()) { 659 660 // If we've inlined the constructor, then DstEvaluated would be empty. 661 // In this case we still want a sink, which could be implemented 662 // in processCallExit. But we don't have that implemented at the moment, 663 // so if you hit this assertion, see if you can avoid inlining 664 // the respective constructor when analyzer-config cfg-temporary-dtors 665 // is set to false. 666 // Otherwise there's nothing wrong with inlining such constructor. 667 assert(!DstEvaluated.empty() && 668 "We should not have inlined this constructor!"); 669 670 for (ExplodedNode *N : DstEvaluated) { 671 Bldr.generateSink(E, N, N->getState()); 672 } 673 674 // There is no need to run the PostCall and PostStmt checker 675 // callbacks because we just generated sinks on all nodes in th 676 // frontier. 677 return; 678 } 679 } 680 681 ExplodedNodeSet DstPostArgumentCleanup; 682 for (ExplodedNode *I : DstEvaluatedPostProcessed) 683 finishArgumentConstruction(DstPostArgumentCleanup, I, *Call); 684 685 // If there were other constructors called for object-type arguments 686 // of this constructor, clean them up. 687 ExplodedNodeSet DstPostCall; 688 getCheckerManager().runCheckersForPostCall(DstPostCall, 689 DstPostArgumentCleanup, 690 *Call, *this); 691 getCheckerManager().runCheckersForPostStmt(destNodes, DstPostCall, E, *this); 692 } 693 694 void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *CE, 695 ExplodedNode *Pred, 696 ExplodedNodeSet &Dst) { 697 handleConstructor(CE, Pred, Dst); 698 } 699 700 void ExprEngine::VisitCXXInheritedCtorInitExpr( 701 const CXXInheritedCtorInitExpr *CE, ExplodedNode *Pred, 702 ExplodedNodeSet &Dst) { 703 handleConstructor(CE, Pred, Dst); 704 } 705 706 void ExprEngine::VisitCXXDestructor(QualType ObjectType, 707 const MemRegion *Dest, 708 const Stmt *S, 709 bool IsBaseDtor, 710 ExplodedNode *Pred, 711 ExplodedNodeSet &Dst, 712 EvalCallOptions &CallOpts) { 713 assert(S && "A destructor without a trigger!"); 714 const LocationContext *LCtx = Pred->getLocationContext(); 715 ProgramStateRef State = Pred->getState(); 716 717 const CXXRecordDecl *RecordDecl = ObjectType->getAsCXXRecordDecl(); 718 assert(RecordDecl && "Only CXXRecordDecls should have destructors"); 719 const CXXDestructorDecl *DtorDecl = RecordDecl->getDestructor(); 720 // FIXME: There should always be a Decl, otherwise the destructor call 721 // shouldn't have been added to the CFG in the first place. 722 if (!DtorDecl) { 723 // Skip the invalid destructor. We cannot simply return because 724 // it would interrupt the analysis instead. 725 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor"); 726 // FIXME: PostImplicitCall with a null decl may crash elsewhere anyway. 727 PostImplicitCall PP(/*Decl=*/nullptr, S->getEndLoc(), LCtx, &T); 728 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 729 Bldr.generateNode(PP, Pred->getState(), Pred); 730 return; 731 } 732 733 if (!Dest) { 734 // We're trying to destroy something that is not a region. This may happen 735 // for a variety of reasons (unknown target region, concrete integer instead 736 // of target region, etc.). The current code makes an attempt to recover. 737 // FIXME: We probably don't really need to recover when we're dealing 738 // with concrete integers specifically. 739 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; 740 if (const Expr *E = dyn_cast_or_null<Expr>(S)) { 741 Dest = MRMgr.getCXXTempObjectRegion(E, Pred->getLocationContext()); 742 } else { 743 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor"); 744 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 745 Bldr.generateSink(Pred->getLocation().withTag(&T), 746 Pred->getState(), Pred); 747 return; 748 } 749 } 750 751 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 752 CallEventRef<CXXDestructorCall> Call = 753 CEMgr.getCXXDestructorCall(DtorDecl, S, Dest, IsBaseDtor, State, LCtx); 754 755 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 756 Call->getSourceRange().getBegin(), 757 "Error evaluating destructor"); 758 759 ExplodedNodeSet DstPreCall; 760 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, 761 *Call, *this); 762 763 ExplodedNodeSet DstInvalidated; 764 StmtNodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx); 765 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); 766 I != E; ++I) 767 defaultEvalCall(Bldr, *I, *Call, CallOpts); 768 769 getCheckerManager().runCheckersForPostCall(Dst, DstInvalidated, 770 *Call, *this); 771 } 772 773 void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE, 774 ExplodedNode *Pred, 775 ExplodedNodeSet &Dst) { 776 ProgramStateRef State = Pred->getState(); 777 const LocationContext *LCtx = Pred->getLocationContext(); 778 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 779 CNE->getBeginLoc(), 780 "Error evaluating New Allocator Call"); 781 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 782 CallEventRef<CXXAllocatorCall> Call = 783 CEMgr.getCXXAllocatorCall(CNE, State, LCtx); 784 785 ExplodedNodeSet DstPreCall; 786 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, 787 *Call, *this); 788 789 ExplodedNodeSet DstPostCall; 790 StmtNodeBuilder CallBldr(DstPreCall, DstPostCall, *currBldrCtx); 791 for (ExplodedNode *I : DstPreCall) { 792 // FIXME: Provide evalCall for checkers? 793 defaultEvalCall(CallBldr, I, *Call); 794 } 795 // If the call is inlined, DstPostCall will be empty and we bail out now. 796 797 // Store return value of operator new() for future use, until the actual 798 // CXXNewExpr gets processed. 799 ExplodedNodeSet DstPostValue; 800 StmtNodeBuilder ValueBldr(DstPostCall, DstPostValue, *currBldrCtx); 801 for (ExplodedNode *I : DstPostCall) { 802 // FIXME: Because CNE serves as the "call site" for the allocator (due to 803 // lack of a better expression in the AST), the conjured return value symbol 804 // is going to be of the same type (C++ object pointer type). Technically 805 // this is not correct because the operator new's prototype always says that 806 // it returns a 'void *'. So we should change the type of the symbol, 807 // and then evaluate the cast over the symbolic pointer from 'void *' to 808 // the object pointer type. But without changing the symbol's type it 809 // is breaking too much to evaluate the no-op symbolic cast over it, so we 810 // skip it for now. 811 ProgramStateRef State = I->getState(); 812 SVal RetVal = State->getSVal(CNE, LCtx); 813 814 // If this allocation function is not declared as non-throwing, failures 815 // /must/ be signalled by exceptions, and thus the return value will never 816 // be NULL. -fno-exceptions does not influence this semantics. 817 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case 818 // where new can return NULL. If we end up supporting that option, we can 819 // consider adding a check for it here. 820 // C++11 [basic.stc.dynamic.allocation]p3. 821 if (const FunctionDecl *FD = CNE->getOperatorNew()) { 822 QualType Ty = FD->getType(); 823 if (const auto *ProtoType = Ty->getAs<FunctionProtoType>()) 824 if (!ProtoType->isNothrow()) 825 State = State->assume(RetVal.castAs<DefinedOrUnknownSVal>(), true); 826 } 827 828 ValueBldr.generateNode( 829 CNE, I, addObjectUnderConstruction(State, CNE, LCtx, RetVal)); 830 } 831 832 ExplodedNodeSet DstPostPostCallCallback; 833 getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback, 834 DstPostValue, *Call, *this); 835 for (ExplodedNode *I : DstPostPostCallCallback) { 836 getCheckerManager().runCheckersForNewAllocator(*Call, Dst, I, *this); 837 } 838 } 839 840 void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred, 841 ExplodedNodeSet &Dst) { 842 // FIXME: Much of this should eventually migrate to CXXAllocatorCall. 843 // Also, we need to decide how allocators actually work -- they're not 844 // really part of the CXXNewExpr because they happen BEFORE the 845 // CXXConstructExpr subexpression. See PR12014 for some discussion. 846 847 unsigned blockCount = currBldrCtx->blockCount(); 848 const LocationContext *LCtx = Pred->getLocationContext(); 849 SVal symVal = UnknownVal(); 850 FunctionDecl *FD = CNE->getOperatorNew(); 851 852 bool IsStandardGlobalOpNewFunction = 853 FD->isReplaceableGlobalAllocationFunction(); 854 855 ProgramStateRef State = Pred->getState(); 856 857 // Retrieve the stored operator new() return value. 858 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 859 symVal = *getObjectUnderConstruction(State, CNE, LCtx); 860 State = finishObjectConstruction(State, CNE, LCtx); 861 } 862 863 // We assume all standard global 'operator new' functions allocate memory in 864 // heap. We realize this is an approximation that might not correctly model 865 // a custom global allocator. 866 if (symVal.isUnknown()) { 867 if (IsStandardGlobalOpNewFunction) 868 symVal = svalBuilder.getConjuredHeapSymbolVal(CNE, LCtx, blockCount); 869 else 870 symVal = svalBuilder.conjureSymbolVal(nullptr, CNE, LCtx, CNE->getType(), 871 blockCount); 872 } 873 874 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 875 CallEventRef<CXXAllocatorCall> Call = 876 CEMgr.getCXXAllocatorCall(CNE, State, LCtx); 877 878 if (!AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 879 // Invalidate placement args. 880 // FIXME: Once we figure out how we want allocators to work, 881 // we should be using the usual pre-/(default-)eval-/post-call checkers 882 // here. 883 State = Call->invalidateRegions(blockCount); 884 if (!State) 885 return; 886 887 // If this allocation function is not declared as non-throwing, failures 888 // /must/ be signalled by exceptions, and thus the return value will never 889 // be NULL. -fno-exceptions does not influence this semantics. 890 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case 891 // where new can return NULL. If we end up supporting that option, we can 892 // consider adding a check for it here. 893 // C++11 [basic.stc.dynamic.allocation]p3. 894 if (const auto *ProtoType = FD->getType()->getAs<FunctionProtoType>()) 895 if (!ProtoType->isNothrow()) 896 if (auto dSymVal = symVal.getAs<DefinedOrUnknownSVal>()) 897 State = State->assume(*dSymVal, true); 898 } 899 900 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 901 902 SVal Result = symVal; 903 904 if (CNE->isArray()) { 905 906 if (const auto *NewReg = cast_or_null<SubRegion>(symVal.getAsRegion())) { 907 // If each element is initialized by their default constructor, the field 908 // values are properly placed inside the required region, however if an 909 // initializer list is used, this doesn't happen automatically. 910 auto *Init = CNE->getInitializer(); 911 bool isInitList = dyn_cast_or_null<InitListExpr>(Init); 912 913 QualType ObjTy = 914 isInitList ? Init->getType() : CNE->getType()->getPointeeType(); 915 const ElementRegion *EleReg = 916 MRMgr.getElementRegion(ObjTy, svalBuilder.makeArrayIndex(0), NewReg, 917 svalBuilder.getContext()); 918 Result = loc::MemRegionVal(EleReg); 919 920 // If the array is list initialized, we bind the initializer list to the 921 // memory region here, otherwise we would lose it. 922 if (isInitList) { 923 Bldr.takeNodes(Pred); 924 Pred = Bldr.generateNode(CNE, Pred, State); 925 926 SVal V = State->getSVal(Init, LCtx); 927 ExplodedNodeSet evaluated; 928 evalBind(evaluated, CNE, Pred, Result, V, true); 929 930 Bldr.takeNodes(Pred); 931 Bldr.addNodes(evaluated); 932 933 Pred = *evaluated.begin(); 934 State = Pred->getState(); 935 } 936 } 937 938 State = State->BindExpr(CNE, Pred->getLocationContext(), Result); 939 Bldr.generateNode(CNE, Pred, State); 940 return; 941 } 942 943 // FIXME: Once we have proper support for CXXConstructExprs inside 944 // CXXNewExpr, we need to make sure that the constructed object is not 945 // immediately invalidated here. (The placement call should happen before 946 // the constructor call anyway.) 947 if (FD->isReservedGlobalPlacementOperator()) { 948 // Non-array placement new should always return the placement location. 949 SVal PlacementLoc = State->getSVal(CNE->getPlacementArg(0), LCtx); 950 Result = svalBuilder.evalCast(PlacementLoc, CNE->getType(), 951 CNE->getPlacementArg(0)->getType()); 952 } 953 954 // Bind the address of the object, then check to see if we cached out. 955 State = State->BindExpr(CNE, LCtx, Result); 956 ExplodedNode *NewN = Bldr.generateNode(CNE, Pred, State); 957 if (!NewN) 958 return; 959 960 // If the type is not a record, we won't have a CXXConstructExpr as an 961 // initializer. Copy the value over. 962 if (const Expr *Init = CNE->getInitializer()) { 963 if (!isa<CXXConstructExpr>(Init)) { 964 assert(Bldr.getResults().size() == 1); 965 Bldr.takeNodes(NewN); 966 evalBind(Dst, CNE, NewN, Result, State->getSVal(Init, LCtx), 967 /*FirstInit=*/IsStandardGlobalOpNewFunction); 968 } 969 } 970 } 971 972 void ExprEngine::VisitCXXDeleteExpr(const CXXDeleteExpr *CDE, 973 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 974 975 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 976 CallEventRef<CXXDeallocatorCall> Call = CEMgr.getCXXDeallocatorCall( 977 CDE, Pred->getState(), Pred->getLocationContext()); 978 979 ExplodedNodeSet DstPreCall; 980 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, *Call, *this); 981 ExplodedNodeSet DstPostCall; 982 983 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 984 StmtNodeBuilder Bldr(DstPreCall, DstPostCall, *currBldrCtx); 985 for (ExplodedNode *I : DstPreCall) { 986 defaultEvalCall(Bldr, I, *Call); 987 } 988 } else { 989 DstPostCall = DstPreCall; 990 } 991 getCheckerManager().runCheckersForPostCall(Dst, DstPostCall, *Call, *this); 992 } 993 994 void ExprEngine::VisitCXXCatchStmt(const CXXCatchStmt *CS, ExplodedNode *Pred, 995 ExplodedNodeSet &Dst) { 996 const VarDecl *VD = CS->getExceptionDecl(); 997 if (!VD) { 998 Dst.Add(Pred); 999 return; 1000 } 1001 1002 const LocationContext *LCtx = Pred->getLocationContext(); 1003 SVal V = svalBuilder.conjureSymbolVal(CS, LCtx, VD->getType(), 1004 currBldrCtx->blockCount()); 1005 ProgramStateRef state = Pred->getState(); 1006 state = state->bindLoc(state->getLValue(VD, LCtx), V, LCtx); 1007 1008 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1009 Bldr.generateNode(CS, Pred, state); 1010 } 1011 1012 void ExprEngine::VisitCXXThisExpr(const CXXThisExpr *TE, ExplodedNode *Pred, 1013 ExplodedNodeSet &Dst) { 1014 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1015 1016 // Get the this object region from StoreManager. 1017 const LocationContext *LCtx = Pred->getLocationContext(); 1018 const MemRegion *R = 1019 svalBuilder.getRegionManager().getCXXThisRegion( 1020 getContext().getCanonicalType(TE->getType()), 1021 LCtx); 1022 1023 ProgramStateRef state = Pred->getState(); 1024 SVal V = state->getSVal(loc::MemRegionVal(R)); 1025 Bldr.generateNode(TE, Pred, state->BindExpr(TE, LCtx, V)); 1026 } 1027 1028 void ExprEngine::VisitLambdaExpr(const LambdaExpr *LE, ExplodedNode *Pred, 1029 ExplodedNodeSet &Dst) { 1030 const LocationContext *LocCtxt = Pred->getLocationContext(); 1031 1032 // Get the region of the lambda itself. 1033 const MemRegion *R = svalBuilder.getRegionManager().getCXXTempObjectRegion( 1034 LE, LocCtxt); 1035 SVal V = loc::MemRegionVal(R); 1036 1037 ProgramStateRef State = Pred->getState(); 1038 1039 // If we created a new MemRegion for the lambda, we should explicitly bind 1040 // the captures. 1041 CXXRecordDecl::field_iterator CurField = LE->getLambdaClass()->field_begin(); 1042 for (LambdaExpr::const_capture_init_iterator i = LE->capture_init_begin(), 1043 e = LE->capture_init_end(); 1044 i != e; ++i, ++CurField) { 1045 FieldDecl *FieldForCapture = *CurField; 1046 SVal FieldLoc = State->getLValue(FieldForCapture, V); 1047 1048 SVal InitVal; 1049 if (!FieldForCapture->hasCapturedVLAType()) { 1050 Expr *InitExpr = *i; 1051 assert(InitExpr && "Capture missing initialization expression"); 1052 InitVal = State->getSVal(InitExpr, LocCtxt); 1053 } else { 1054 // The field stores the length of a captured variable-length array. 1055 // These captures don't have initialization expressions; instead we 1056 // get the length from the VLAType size expression. 1057 Expr *SizeExpr = FieldForCapture->getCapturedVLAType()->getSizeExpr(); 1058 InitVal = State->getSVal(SizeExpr, LocCtxt); 1059 } 1060 1061 State = State->bindLoc(FieldLoc, InitVal, LocCtxt); 1062 } 1063 1064 // Decay the Loc into an RValue, because there might be a 1065 // MaterializeTemporaryExpr node above this one which expects the bound value 1066 // to be an RValue. 1067 SVal LambdaRVal = State->getSVal(R); 1068 1069 ExplodedNodeSet Tmp; 1070 StmtNodeBuilder Bldr(Pred, Tmp, *currBldrCtx); 1071 // FIXME: is this the right program point kind? 1072 Bldr.generateNode(LE, Pred, 1073 State->BindExpr(LE, LocCtxt, LambdaRVal), 1074 nullptr, ProgramPoint::PostLValueKind); 1075 1076 // FIXME: Move all post/pre visits to ::Visit(). 1077 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, LE, *this); 1078 } 1079