ExprEngineCXX.cpp 44 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039
  1. //===- ExprEngineCXX.cpp - ExprEngine support for C++ -----------*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file defines the C++ expression evaluation engine.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
  13. #include "clang/Analysis/ConstructionContext.h"
  14. #include "clang/AST/DeclCXX.h"
  15. #include "clang/AST/StmtCXX.h"
  16. #include "clang/AST/ParentMap.h"
  17. #include "clang/Basic/PrettyStackTrace.h"
  18. #include "clang/StaticAnalyzer/Core/CheckerManager.h"
  19. #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
  20. #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
  21. using namespace clang;
  22. using namespace ento;
  23. void ExprEngine::CreateCXXTemporaryObject(const MaterializeTemporaryExpr *ME,
  24. ExplodedNode *Pred,
  25. ExplodedNodeSet &Dst) {
  26. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  27. const Expr *tempExpr = ME->getSubExpr()->IgnoreParens();
  28. ProgramStateRef state = Pred->getState();
  29. const LocationContext *LCtx = Pred->getLocationContext();
  30. state = createTemporaryRegionIfNeeded(state, LCtx, tempExpr, ME);
  31. Bldr.generateNode(ME, Pred, state);
  32. }
  33. // FIXME: This is the sort of code that should eventually live in a Core
  34. // checker rather than as a special case in ExprEngine.
  35. void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred,
  36. const CallEvent &Call) {
  37. SVal ThisVal;
  38. bool AlwaysReturnsLValue;
  39. const CXXRecordDecl *ThisRD = nullptr;
  40. if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(&Call)) {
  41. assert(Ctor->getDecl()->isTrivial());
  42. assert(Ctor->getDecl()->isCopyOrMoveConstructor());
  43. ThisVal = Ctor->getCXXThisVal();
  44. ThisRD = Ctor->getDecl()->getParent();
  45. AlwaysReturnsLValue = false;
  46. } else {
  47. assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial());
  48. assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() ==
  49. OO_Equal);
  50. ThisVal = cast<CXXInstanceCall>(Call).getCXXThisVal();
  51. ThisRD = cast<CXXMethodDecl>(Call.getDecl())->getParent();
  52. AlwaysReturnsLValue = true;
  53. }
  54. assert(ThisRD);
  55. if (ThisRD->isEmpty()) {
  56. // Do nothing for empty classes. Otherwise it'd retrieve an UnknownVal
  57. // and bind it and RegionStore would think that the actual value
  58. // in this region at this offset is unknown.
  59. return;
  60. }
  61. const LocationContext *LCtx = Pred->getLocationContext();
  62. ExplodedNodeSet Dst;
  63. Bldr.takeNodes(Pred);
  64. SVal V = Call.getArgSVal(0);
  65. // If the value being copied is not unknown, load from its location to get
  66. // an aggregate rvalue.
  67. if (Optional<Loc> L = V.getAs<Loc>())
  68. V = Pred->getState()->getSVal(*L);
  69. else
  70. assert(V.isUnknownOrUndef());
  71. const Expr *CallExpr = Call.getOriginExpr();
  72. evalBind(Dst, CallExpr, Pred, ThisVal, V, true);
  73. PostStmt PS(CallExpr, LCtx);
  74. for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end();
  75. I != E; ++I) {
  76. ProgramStateRef State = (*I)->getState();
  77. if (AlwaysReturnsLValue)
  78. State = State->BindExpr(CallExpr, LCtx, ThisVal);
  79. else
  80. State = bindReturnValue(Call, LCtx, State);
  81. Bldr.generateNode(PS, State, *I);
  82. }
  83. }
  84. SVal ExprEngine::makeZeroElementRegion(ProgramStateRef State, SVal LValue,
  85. QualType &Ty, bool &IsArray) {
  86. SValBuilder &SVB = State->getStateManager().getSValBuilder();
  87. ASTContext &Ctx = SVB.getContext();
  88. while (const ArrayType *AT = Ctx.getAsArrayType(Ty)) {
  89. Ty = AT->getElementType();
  90. LValue = State->getLValue(Ty, SVB.makeZeroArrayIndex(), LValue);
  91. IsArray = true;
  92. }
  93. return LValue;
  94. }
  95. SVal ExprEngine::computeObjectUnderConstruction(
  96. const Expr *E, ProgramStateRef State, const LocationContext *LCtx,
  97. const ConstructionContext *CC, EvalCallOptions &CallOpts) {
  98. SValBuilder &SVB = getSValBuilder();
  99. MemRegionManager &MRMgr = SVB.getRegionManager();
  100. ASTContext &ACtx = SVB.getContext();
  101. // Compute the target region by exploring the construction context.
  102. if (CC) {
  103. switch (CC->getKind()) {
  104. case ConstructionContext::CXX17ElidedCopyVariableKind:
  105. case ConstructionContext::SimpleVariableKind: {
  106. const auto *DSCC = cast<VariableConstructionContext>(CC);
  107. const auto *DS = DSCC->getDeclStmt();
  108. const auto *Var = cast<VarDecl>(DS->getSingleDecl());
  109. QualType Ty = Var->getType();
  110. return makeZeroElementRegion(State, State->getLValue(Var, LCtx), Ty,
  111. CallOpts.IsArrayCtorOrDtor);
  112. }
  113. case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind:
  114. case ConstructionContext::SimpleConstructorInitializerKind: {
  115. const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC);
  116. const auto *Init = ICC->getCXXCtorInitializer();
  117. const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl());
  118. Loc ThisPtr = SVB.getCXXThis(CurCtor, LCtx->getStackFrame());
  119. SVal ThisVal = State->getSVal(ThisPtr);
  120. if (Init->isBaseInitializer()) {
  121. const auto *ThisReg = cast<SubRegion>(ThisVal.getAsRegion());
  122. const CXXRecordDecl *BaseClass =
  123. Init->getBaseClass()->getAsCXXRecordDecl();
  124. const auto *BaseReg =
  125. MRMgr.getCXXBaseObjectRegion(BaseClass, ThisReg,
  126. Init->isBaseVirtual());
  127. return SVB.makeLoc(BaseReg);
  128. }
  129. if (Init->isDelegatingInitializer())
  130. return ThisVal;
  131. const ValueDecl *Field;
  132. SVal FieldVal;
  133. if (Init->isIndirectMemberInitializer()) {
  134. Field = Init->getIndirectMember();
  135. FieldVal = State->getLValue(Init->getIndirectMember(), ThisVal);
  136. } else {
  137. Field = Init->getMember();
  138. FieldVal = State->getLValue(Init->getMember(), ThisVal);
  139. }
  140. QualType Ty = Field->getType();
  141. return makeZeroElementRegion(State, FieldVal, Ty,
  142. CallOpts.IsArrayCtorOrDtor);
  143. }
  144. case ConstructionContext::NewAllocatedObjectKind: {
  145. if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
  146. const auto *NECC = cast<NewAllocatedObjectConstructionContext>(CC);
  147. const auto *NE = NECC->getCXXNewExpr();
  148. SVal V = *getObjectUnderConstruction(State, NE, LCtx);
  149. if (const SubRegion *MR =
  150. dyn_cast_or_null<SubRegion>(V.getAsRegion())) {
  151. if (NE->isArray()) {
  152. // TODO: In fact, we need to call the constructor for every
  153. // allocated element, not just the first one!
  154. CallOpts.IsArrayCtorOrDtor = true;
  155. return loc::MemRegionVal(getStoreManager().GetElementZeroRegion(
  156. MR, NE->getType()->getPointeeType()));
  157. }
  158. return V;
  159. }
  160. // TODO: Detect when the allocator returns a null pointer.
  161. // Constructor shall not be called in this case.
  162. }
  163. break;
  164. }
  165. case ConstructionContext::SimpleReturnedValueKind:
  166. case ConstructionContext::CXX17ElidedCopyReturnedValueKind: {
  167. // The temporary is to be managed by the parent stack frame.
  168. // So build it in the parent stack frame if we're not in the
  169. // top frame of the analysis.
  170. const StackFrameContext *SFC = LCtx->getStackFrame();
  171. if (const LocationContext *CallerLCtx = SFC->getParent()) {
  172. auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()]
  173. .getAs<CFGCXXRecordTypedCall>();
  174. if (!RTC) {
  175. // We were unable to find the correct construction context for the
  176. // call in the parent stack frame. This is equivalent to not being
  177. // able to find construction context at all.
  178. break;
  179. }
  180. if (isa<BlockInvocationContext>(CallerLCtx)) {
  181. // Unwrap block invocation contexts. They're mostly part of
  182. // the current stack frame.
  183. CallerLCtx = CallerLCtx->getParent();
  184. assert(!isa<BlockInvocationContext>(CallerLCtx));
  185. }
  186. return computeObjectUnderConstruction(
  187. cast<Expr>(SFC->getCallSite()), State, CallerLCtx,
  188. RTC->getConstructionContext(), CallOpts);
  189. } else {
  190. // We are on the top frame of the analysis. We do not know where is the
  191. // object returned to. Conjure a symbolic region for the return value.
  192. // TODO: We probably need a new MemRegion kind to represent the storage
  193. // of that SymbolicRegion, so that we cound produce a fancy symbol
  194. // instead of an anonymous conjured symbol.
  195. // TODO: Do we need to track the region to avoid having it dead
  196. // too early? It does die too early, at least in C++17, but because
  197. // putting anything into a SymbolicRegion causes an immediate escape,
  198. // it doesn't cause any leak false positives.
  199. const auto *RCC = cast<ReturnedValueConstructionContext>(CC);
  200. // Make sure that this doesn't coincide with any other symbol
  201. // conjured for the returned expression.
  202. static const int TopLevelSymRegionTag = 0;
  203. const Expr *RetE = RCC->getReturnStmt()->getRetValue();
  204. assert(RetE && "Void returns should not have a construction context");
  205. QualType ReturnTy = RetE->getType();
  206. QualType RegionTy = ACtx.getPointerType(ReturnTy);
  207. return SVB.conjureSymbolVal(&TopLevelSymRegionTag, RetE, SFC, RegionTy,
  208. currBldrCtx->blockCount());
  209. }
  210. llvm_unreachable("Unhandled return value construction context!");
  211. }
  212. case ConstructionContext::ElidedTemporaryObjectKind: {
  213. assert(AMgr.getAnalyzerOptions().ShouldElideConstructors);
  214. const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC);
  215. // Support pre-C++17 copy elision. We'll have the elidable copy
  216. // constructor in the AST and in the CFG, but we'll skip it
  217. // and construct directly into the final object. This call
  218. // also sets the CallOpts flags for us.
  219. // If the elided copy/move constructor is not supported, there's still
  220. // benefit in trying to model the non-elided constructor.
  221. // Stash our state before trying to elide, as it'll get overwritten.
  222. ProgramStateRef PreElideState = State;
  223. EvalCallOptions PreElideCallOpts = CallOpts;
  224. SVal V = computeObjectUnderConstruction(
  225. TCC->getConstructorAfterElision(), State, LCtx,
  226. TCC->getConstructionContextAfterElision(), CallOpts);
  227. // FIXME: This definition of "copy elision has not failed" is unreliable.
  228. // It doesn't indicate that the constructor will actually be inlined
  229. // later; this is still up to evalCall() to decide.
  230. if (!CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion)
  231. return V;
  232. // Copy elision failed. Revert the changes and proceed as if we have
  233. // a simple temporary.
  234. CallOpts = PreElideCallOpts;
  235. CallOpts.IsElidableCtorThatHasNotBeenElided = true;
  236. LLVM_FALLTHROUGH;
  237. }
  238. case ConstructionContext::SimpleTemporaryObjectKind: {
  239. const auto *TCC = cast<TemporaryObjectConstructionContext>(CC);
  240. const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr();
  241. CallOpts.IsTemporaryCtorOrDtor = true;
  242. if (MTE) {
  243. if (const ValueDecl *VD = MTE->getExtendingDecl()) {
  244. assert(MTE->getStorageDuration() != SD_FullExpression);
  245. if (!VD->getType()->isReferenceType()) {
  246. // We're lifetime-extended by a surrounding aggregate.
  247. // Automatic destructors aren't quite working in this case
  248. // on the CFG side. We should warn the caller about that.
  249. // FIXME: Is there a better way to retrieve this information from
  250. // the MaterializeTemporaryExpr?
  251. CallOpts.IsTemporaryLifetimeExtendedViaAggregate = true;
  252. }
  253. }
  254. if (MTE->getStorageDuration() == SD_Static ||
  255. MTE->getStorageDuration() == SD_Thread)
  256. return loc::MemRegionVal(MRMgr.getCXXStaticTempObjectRegion(E));
  257. }
  258. return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx));
  259. }
  260. case ConstructionContext::ArgumentKind: {
  261. // Arguments are technically temporaries.
  262. CallOpts.IsTemporaryCtorOrDtor = true;
  263. const auto *ACC = cast<ArgumentConstructionContext>(CC);
  264. const Expr *E = ACC->getCallLikeExpr();
  265. unsigned Idx = ACC->getIndex();
  266. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  267. auto getArgLoc = [&](CallEventRef<> Caller) -> Optional<SVal> {
  268. const LocationContext *FutureSFC =
  269. Caller->getCalleeStackFrame(currBldrCtx->blockCount());
  270. // Return early if we are unable to reliably foresee
  271. // the future stack frame.
  272. if (!FutureSFC)
  273. return None;
  274. // This should be equivalent to Caller->getDecl() for now, but
  275. // FutureSFC->getDecl() is likely to support better stuff (like
  276. // virtual functions) earlier.
  277. const Decl *CalleeD = FutureSFC->getDecl();
  278. // FIXME: Support for variadic arguments is not implemented here yet.
  279. if (CallEvent::isVariadic(CalleeD))
  280. return None;
  281. // Operator arguments do not correspond to operator parameters
  282. // because this-argument is implemented as a normal argument in
  283. // operator call expressions but not in operator declarations.
  284. const TypedValueRegion *TVR = Caller->getParameterLocation(
  285. *Caller->getAdjustedParameterIndex(Idx), currBldrCtx->blockCount());
  286. if (!TVR)
  287. return None;
  288. return loc::MemRegionVal(TVR);
  289. };
  290. if (const auto *CE = dyn_cast<CallExpr>(E)) {
  291. CallEventRef<> Caller = CEMgr.getSimpleCall(CE, State, LCtx);
  292. if (Optional<SVal> V = getArgLoc(Caller))
  293. return *V;
  294. else
  295. break;
  296. } else if (const auto *CCE = dyn_cast<CXXConstructExpr>(E)) {
  297. // Don't bother figuring out the target region for the future
  298. // constructor because we won't need it.
  299. CallEventRef<> Caller =
  300. CEMgr.getCXXConstructorCall(CCE, /*Target=*/nullptr, State, LCtx);
  301. if (Optional<SVal> V = getArgLoc(Caller))
  302. return *V;
  303. else
  304. break;
  305. } else if (const auto *ME = dyn_cast<ObjCMessageExpr>(E)) {
  306. CallEventRef<> Caller = CEMgr.getObjCMethodCall(ME, State, LCtx);
  307. if (Optional<SVal> V = getArgLoc(Caller))
  308. return *V;
  309. else
  310. break;
  311. }
  312. }
  313. } // switch (CC->getKind())
  314. }
  315. // If we couldn't find an existing region to construct into, assume we're
  316. // constructing a temporary. Notify the caller of our failure.
  317. CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
  318. return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx));
  319. }
  320. ProgramStateRef ExprEngine::updateObjectsUnderConstruction(
  321. SVal V, const Expr *E, ProgramStateRef State, const LocationContext *LCtx,
  322. const ConstructionContext *CC, const EvalCallOptions &CallOpts) {
  323. if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) {
  324. // Sounds like we failed to find the target region and therefore
  325. // copy elision failed. There's nothing we can do about it here.
  326. return State;
  327. }
  328. // See if we're constructing an existing region by looking at the
  329. // current construction context.
  330. assert(CC && "Computed target region without construction context?");
  331. switch (CC->getKind()) {
  332. case ConstructionContext::CXX17ElidedCopyVariableKind:
  333. case ConstructionContext::SimpleVariableKind: {
  334. const auto *DSCC = cast<VariableConstructionContext>(CC);
  335. return addObjectUnderConstruction(State, DSCC->getDeclStmt(), LCtx, V);
  336. }
  337. case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind:
  338. case ConstructionContext::SimpleConstructorInitializerKind: {
  339. const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC);
  340. const auto *Init = ICC->getCXXCtorInitializer();
  341. // Base and delegating initializers handled above
  342. assert(Init->isAnyMemberInitializer() &&
  343. "Base and delegating initializers should have been handled by"
  344. "computeObjectUnderConstruction()");
  345. return addObjectUnderConstruction(State, Init, LCtx, V);
  346. }
  347. case ConstructionContext::NewAllocatedObjectKind: {
  348. return State;
  349. }
  350. case ConstructionContext::SimpleReturnedValueKind:
  351. case ConstructionContext::CXX17ElidedCopyReturnedValueKind: {
  352. const StackFrameContext *SFC = LCtx->getStackFrame();
  353. const LocationContext *CallerLCtx = SFC->getParent();
  354. if (!CallerLCtx) {
  355. // No extra work is necessary in top frame.
  356. return State;
  357. }
  358. auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()]
  359. .getAs<CFGCXXRecordTypedCall>();
  360. assert(RTC && "Could not have had a target region without it");
  361. if (isa<BlockInvocationContext>(CallerLCtx)) {
  362. // Unwrap block invocation contexts. They're mostly part of
  363. // the current stack frame.
  364. CallerLCtx = CallerLCtx->getParent();
  365. assert(!isa<BlockInvocationContext>(CallerLCtx));
  366. }
  367. return updateObjectsUnderConstruction(V,
  368. cast<Expr>(SFC->getCallSite()), State, CallerLCtx,
  369. RTC->getConstructionContext(), CallOpts);
  370. }
  371. case ConstructionContext::ElidedTemporaryObjectKind: {
  372. assert(AMgr.getAnalyzerOptions().ShouldElideConstructors);
  373. if (!CallOpts.IsElidableCtorThatHasNotBeenElided) {
  374. const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC);
  375. State = updateObjectsUnderConstruction(
  376. V, TCC->getConstructorAfterElision(), State, LCtx,
  377. TCC->getConstructionContextAfterElision(), CallOpts);
  378. // Remember that we've elided the constructor.
  379. State = addObjectUnderConstruction(
  380. State, TCC->getConstructorAfterElision(), LCtx, V);
  381. // Remember that we've elided the destructor.
  382. if (const auto *BTE = TCC->getCXXBindTemporaryExpr())
  383. State = elideDestructor(State, BTE, LCtx);
  384. // Instead of materialization, shamelessly return
  385. // the final object destination.
  386. if (const auto *MTE = TCC->getMaterializedTemporaryExpr())
  387. State = addObjectUnderConstruction(State, MTE, LCtx, V);
  388. return State;
  389. }
  390. // If we decided not to elide the constructor, proceed as if
  391. // it's a simple temporary.
  392. LLVM_FALLTHROUGH;
  393. }
  394. case ConstructionContext::SimpleTemporaryObjectKind: {
  395. const auto *TCC = cast<TemporaryObjectConstructionContext>(CC);
  396. if (const auto *BTE = TCC->getCXXBindTemporaryExpr())
  397. State = addObjectUnderConstruction(State, BTE, LCtx, V);
  398. if (const auto *MTE = TCC->getMaterializedTemporaryExpr())
  399. State = addObjectUnderConstruction(State, MTE, LCtx, V);
  400. return State;
  401. }
  402. case ConstructionContext::ArgumentKind: {
  403. const auto *ACC = cast<ArgumentConstructionContext>(CC);
  404. if (const auto *BTE = ACC->getCXXBindTemporaryExpr())
  405. State = addObjectUnderConstruction(State, BTE, LCtx, V);
  406. return addObjectUnderConstruction(
  407. State, {ACC->getCallLikeExpr(), ACC->getIndex()}, LCtx, V);
  408. }
  409. }
  410. llvm_unreachable("Unhandled construction context!");
  411. }
  412. void ExprEngine::handleConstructor(const Expr *E,
  413. ExplodedNode *Pred,
  414. ExplodedNodeSet &destNodes) {
  415. const auto *CE = dyn_cast<CXXConstructExpr>(E);
  416. const auto *CIE = dyn_cast<CXXInheritedCtorInitExpr>(E);
  417. assert(CE || CIE);
  418. const LocationContext *LCtx = Pred->getLocationContext();
  419. ProgramStateRef State = Pred->getState();
  420. SVal Target = UnknownVal();
  421. if (CE) {
  422. if (Optional<SVal> ElidedTarget =
  423. getObjectUnderConstruction(State, CE, LCtx)) {
  424. // We've previously modeled an elidable constructor by pretending that it
  425. // in fact constructs into the correct target. This constructor can
  426. // therefore be skipped.
  427. Target = *ElidedTarget;
  428. StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx);
  429. State = finishObjectConstruction(State, CE, LCtx);
  430. if (auto L = Target.getAs<Loc>())
  431. State = State->BindExpr(CE, LCtx, State->getSVal(*L, CE->getType()));
  432. Bldr.generateNode(CE, Pred, State);
  433. return;
  434. }
  435. }
  436. // FIXME: Handle arrays, which run the same constructor for every element.
  437. // For now, we just run the first constructor (which should still invalidate
  438. // the entire array).
  439. EvalCallOptions CallOpts;
  440. auto C = getCurrentCFGElement().getAs<CFGConstructor>();
  441. assert(C || getCurrentCFGElement().getAs<CFGStmt>());
  442. const ConstructionContext *CC = C ? C->getConstructionContext() : nullptr;
  443. const CXXConstructExpr::ConstructionKind CK =
  444. CE ? CE->getConstructionKind() : CIE->getConstructionKind();
  445. switch (CK) {
  446. case CXXConstructExpr::CK_Complete: {
  447. // Inherited constructors are always base class constructors.
  448. assert(CE && !CIE && "A complete constructor is inherited?!");
  449. // The target region is found from construction context.
  450. std::tie(State, Target) =
  451. handleConstructionContext(CE, State, LCtx, CC, CallOpts);
  452. break;
  453. }
  454. case CXXConstructExpr::CK_VirtualBase: {
  455. // Make sure we are not calling virtual base class initializers twice.
  456. // Only the most-derived object should initialize virtual base classes.
  457. const auto *OuterCtor = dyn_cast_or_null<CXXConstructExpr>(
  458. LCtx->getStackFrame()->getCallSite());
  459. assert(
  460. (!OuterCtor ||
  461. OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Complete ||
  462. OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Delegating) &&
  463. ("This virtual base should have already been initialized by "
  464. "the most derived class!"));
  465. (void)OuterCtor;
  466. LLVM_FALLTHROUGH;
  467. }
  468. case CXXConstructExpr::CK_NonVirtualBase:
  469. // In C++17, classes with non-virtual bases may be aggregates, so they would
  470. // be initialized as aggregates without a constructor call, so we may have
  471. // a base class constructed directly into an initializer list without
  472. // having the derived-class constructor call on the previous stack frame.
  473. // Initializer lists may be nested into more initializer lists that
  474. // correspond to surrounding aggregate initializations.
  475. // FIXME: For now this code essentially bails out. We need to find the
  476. // correct target region and set it.
  477. // FIXME: Instead of relying on the ParentMap, we should have the
  478. // trigger-statement (InitListExpr in this case) passed down from CFG or
  479. // otherwise always available during construction.
  480. if (isa_and_nonnull<InitListExpr>(LCtx->getParentMap().getParent(E))) {
  481. MemRegionManager &MRMgr = getSValBuilder().getRegionManager();
  482. Target = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx));
  483. CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
  484. break;
  485. }
  486. LLVM_FALLTHROUGH;
  487. case CXXConstructExpr::CK_Delegating: {
  488. const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl());
  489. Loc ThisPtr = getSValBuilder().getCXXThis(CurCtor,
  490. LCtx->getStackFrame());
  491. SVal ThisVal = State->getSVal(ThisPtr);
  492. if (CK == CXXConstructExpr::CK_Delegating) {
  493. Target = ThisVal;
  494. } else {
  495. // Cast to the base type.
  496. bool IsVirtual = (CK == CXXConstructExpr::CK_VirtualBase);
  497. SVal BaseVal =
  498. getStoreManager().evalDerivedToBase(ThisVal, E->getType(), IsVirtual);
  499. Target = BaseVal;
  500. }
  501. break;
  502. }
  503. }
  504. if (State != Pred->getState()) {
  505. static SimpleProgramPointTag T("ExprEngine",
  506. "Prepare for object construction");
  507. ExplodedNodeSet DstPrepare;
  508. StmtNodeBuilder BldrPrepare(Pred, DstPrepare, *currBldrCtx);
  509. BldrPrepare.generateNode(E, Pred, State, &T, ProgramPoint::PreStmtKind);
  510. assert(DstPrepare.size() <= 1);
  511. if (DstPrepare.size() == 0)
  512. return;
  513. Pred = *BldrPrepare.begin();
  514. }
  515. const MemRegion *TargetRegion = Target.getAsRegion();
  516. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  517. CallEventRef<> Call =
  518. CIE ? (CallEventRef<>)CEMgr.getCXXInheritedConstructorCall(
  519. CIE, TargetRegion, State, LCtx)
  520. : (CallEventRef<>)CEMgr.getCXXConstructorCall(
  521. CE, TargetRegion, State, LCtx);
  522. ExplodedNodeSet DstPreVisit;
  523. getCheckerManager().runCheckersForPreStmt(DstPreVisit, Pred, E, *this);
  524. ExplodedNodeSet PreInitialized;
  525. if (CE) {
  526. // FIXME: Is it possible and/or useful to do this before PreStmt?
  527. StmtNodeBuilder Bldr(DstPreVisit, PreInitialized, *currBldrCtx);
  528. for (ExplodedNodeSet::iterator I = DstPreVisit.begin(),
  529. E = DstPreVisit.end();
  530. I != E; ++I) {
  531. ProgramStateRef State = (*I)->getState();
  532. if (CE->requiresZeroInitialization()) {
  533. // FIXME: Once we properly handle constructors in new-expressions, we'll
  534. // need to invalidate the region before setting a default value, to make
  535. // sure there aren't any lingering bindings around. This probably needs
  536. // to happen regardless of whether or not the object is zero-initialized
  537. // to handle random fields of a placement-initialized object picking up
  538. // old bindings. We might only want to do it when we need to, though.
  539. // FIXME: This isn't actually correct for arrays -- we need to zero-
  540. // initialize the entire array, not just the first element -- but our
  541. // handling of arrays everywhere else is weak as well, so this shouldn't
  542. // actually make things worse. Placement new makes this tricky as well,
  543. // since it's then possible to be initializing one part of a multi-
  544. // dimensional array.
  545. State = State->bindDefaultZero(Target, LCtx);
  546. }
  547. Bldr.generateNode(CE, *I, State, /*tag=*/nullptr,
  548. ProgramPoint::PreStmtKind);
  549. }
  550. } else {
  551. PreInitialized = DstPreVisit;
  552. }
  553. ExplodedNodeSet DstPreCall;
  554. getCheckerManager().runCheckersForPreCall(DstPreCall, PreInitialized,
  555. *Call, *this);
  556. ExplodedNodeSet DstEvaluated;
  557. if (CE && CE->getConstructor()->isTrivial() &&
  558. CE->getConstructor()->isCopyOrMoveConstructor() &&
  559. !CallOpts.IsArrayCtorOrDtor) {
  560. StmtNodeBuilder Bldr(DstPreCall, DstEvaluated, *currBldrCtx);
  561. // FIXME: Handle other kinds of trivial constructors as well.
  562. for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
  563. I != E; ++I)
  564. performTrivialCopy(Bldr, *I, *Call);
  565. } else {
  566. for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
  567. I != E; ++I)
  568. getCheckerManager().runCheckersForEvalCall(DstEvaluated, *I, *Call, *this,
  569. CallOpts);
  570. }
  571. // If the CFG was constructed without elements for temporary destructors
  572. // and the just-called constructor created a temporary object then
  573. // stop exploration if the temporary object has a noreturn constructor.
  574. // This can lose coverage because the destructor, if it were present
  575. // in the CFG, would be called at the end of the full expression or
  576. // later (for life-time extended temporaries) -- but avoids infeasible
  577. // paths when no-return temporary destructors are used for assertions.
  578. ExplodedNodeSet DstEvaluatedPostProcessed;
  579. StmtNodeBuilder Bldr(DstEvaluated, DstEvaluatedPostProcessed, *currBldrCtx);
  580. const AnalysisDeclContext *ADC = LCtx->getAnalysisDeclContext();
  581. if (!ADC->getCFGBuildOptions().AddTemporaryDtors) {
  582. if (llvm::isa_and_nonnull<CXXTempObjectRegion>(TargetRegion) &&
  583. cast<CXXConstructorDecl>(Call->getDecl())
  584. ->getParent()
  585. ->isAnyDestructorNoReturn()) {
  586. // If we've inlined the constructor, then DstEvaluated would be empty.
  587. // In this case we still want a sink, which could be implemented
  588. // in processCallExit. But we don't have that implemented at the moment,
  589. // so if you hit this assertion, see if you can avoid inlining
  590. // the respective constructor when analyzer-config cfg-temporary-dtors
  591. // is set to false.
  592. // Otherwise there's nothing wrong with inlining such constructor.
  593. assert(!DstEvaluated.empty() &&
  594. "We should not have inlined this constructor!");
  595. for (ExplodedNode *N : DstEvaluated) {
  596. Bldr.generateSink(E, N, N->getState());
  597. }
  598. // There is no need to run the PostCall and PostStmt checker
  599. // callbacks because we just generated sinks on all nodes in th
  600. // frontier.
  601. return;
  602. }
  603. }
  604. ExplodedNodeSet DstPostArgumentCleanup;
  605. for (ExplodedNode *I : DstEvaluatedPostProcessed)
  606. finishArgumentConstruction(DstPostArgumentCleanup, I, *Call);
  607. // If there were other constructors called for object-type arguments
  608. // of this constructor, clean them up.
  609. ExplodedNodeSet DstPostCall;
  610. getCheckerManager().runCheckersForPostCall(DstPostCall,
  611. DstPostArgumentCleanup,
  612. *Call, *this);
  613. getCheckerManager().runCheckersForPostStmt(destNodes, DstPostCall, E, *this);
  614. }
  615. void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *CE,
  616. ExplodedNode *Pred,
  617. ExplodedNodeSet &Dst) {
  618. handleConstructor(CE, Pred, Dst);
  619. }
  620. void ExprEngine::VisitCXXInheritedCtorInitExpr(
  621. const CXXInheritedCtorInitExpr *CE, ExplodedNode *Pred,
  622. ExplodedNodeSet &Dst) {
  623. handleConstructor(CE, Pred, Dst);
  624. }
  625. void ExprEngine::VisitCXXDestructor(QualType ObjectType,
  626. const MemRegion *Dest,
  627. const Stmt *S,
  628. bool IsBaseDtor,
  629. ExplodedNode *Pred,
  630. ExplodedNodeSet &Dst,
  631. EvalCallOptions &CallOpts) {
  632. assert(S && "A destructor without a trigger!");
  633. const LocationContext *LCtx = Pred->getLocationContext();
  634. ProgramStateRef State = Pred->getState();
  635. const CXXRecordDecl *RecordDecl = ObjectType->getAsCXXRecordDecl();
  636. assert(RecordDecl && "Only CXXRecordDecls should have destructors");
  637. const CXXDestructorDecl *DtorDecl = RecordDecl->getDestructor();
  638. // FIXME: There should always be a Decl, otherwise the destructor call
  639. // shouldn't have been added to the CFG in the first place.
  640. if (!DtorDecl) {
  641. // Skip the invalid destructor. We cannot simply return because
  642. // it would interrupt the analysis instead.
  643. static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor");
  644. // FIXME: PostImplicitCall with a null decl may crash elsewhere anyway.
  645. PostImplicitCall PP(/*Decl=*/nullptr, S->getEndLoc(), LCtx, &T);
  646. NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  647. Bldr.generateNode(PP, Pred->getState(), Pred);
  648. return;
  649. }
  650. if (!Dest) {
  651. // We're trying to destroy something that is not a region. This may happen
  652. // for a variety of reasons (unknown target region, concrete integer instead
  653. // of target region, etc.). The current code makes an attempt to recover.
  654. // FIXME: We probably don't really need to recover when we're dealing
  655. // with concrete integers specifically.
  656. CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
  657. if (const Expr *E = dyn_cast_or_null<Expr>(S)) {
  658. Dest = MRMgr.getCXXTempObjectRegion(E, Pred->getLocationContext());
  659. } else {
  660. static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor");
  661. NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  662. Bldr.generateSink(Pred->getLocation().withTag(&T),
  663. Pred->getState(), Pred);
  664. return;
  665. }
  666. }
  667. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  668. CallEventRef<CXXDestructorCall> Call =
  669. CEMgr.getCXXDestructorCall(DtorDecl, S, Dest, IsBaseDtor, State, LCtx);
  670. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  671. Call->getSourceRange().getBegin(),
  672. "Error evaluating destructor");
  673. ExplodedNodeSet DstPreCall;
  674. getCheckerManager().runCheckersForPreCall(DstPreCall, Pred,
  675. *Call, *this);
  676. ExplodedNodeSet DstInvalidated;
  677. StmtNodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx);
  678. for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
  679. I != E; ++I)
  680. defaultEvalCall(Bldr, *I, *Call, CallOpts);
  681. getCheckerManager().runCheckersForPostCall(Dst, DstInvalidated,
  682. *Call, *this);
  683. }
  684. void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE,
  685. ExplodedNode *Pred,
  686. ExplodedNodeSet &Dst) {
  687. ProgramStateRef State = Pred->getState();
  688. const LocationContext *LCtx = Pred->getLocationContext();
  689. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  690. CNE->getBeginLoc(),
  691. "Error evaluating New Allocator Call");
  692. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  693. CallEventRef<CXXAllocatorCall> Call =
  694. CEMgr.getCXXAllocatorCall(CNE, State, LCtx);
  695. ExplodedNodeSet DstPreCall;
  696. getCheckerManager().runCheckersForPreCall(DstPreCall, Pred,
  697. *Call, *this);
  698. ExplodedNodeSet DstPostCall;
  699. StmtNodeBuilder CallBldr(DstPreCall, DstPostCall, *currBldrCtx);
  700. for (ExplodedNode *I : DstPreCall) {
  701. // FIXME: Provide evalCall for checkers?
  702. defaultEvalCall(CallBldr, I, *Call);
  703. }
  704. // If the call is inlined, DstPostCall will be empty and we bail out now.
  705. // Store return value of operator new() for future use, until the actual
  706. // CXXNewExpr gets processed.
  707. ExplodedNodeSet DstPostValue;
  708. StmtNodeBuilder ValueBldr(DstPostCall, DstPostValue, *currBldrCtx);
  709. for (ExplodedNode *I : DstPostCall) {
  710. // FIXME: Because CNE serves as the "call site" for the allocator (due to
  711. // lack of a better expression in the AST), the conjured return value symbol
  712. // is going to be of the same type (C++ object pointer type). Technically
  713. // this is not correct because the operator new's prototype always says that
  714. // it returns a 'void *'. So we should change the type of the symbol,
  715. // and then evaluate the cast over the symbolic pointer from 'void *' to
  716. // the object pointer type. But without changing the symbol's type it
  717. // is breaking too much to evaluate the no-op symbolic cast over it, so we
  718. // skip it for now.
  719. ProgramStateRef State = I->getState();
  720. SVal RetVal = State->getSVal(CNE, LCtx);
  721. // If this allocation function is not declared as non-throwing, failures
  722. // /must/ be signalled by exceptions, and thus the return value will never
  723. // be NULL. -fno-exceptions does not influence this semantics.
  724. // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
  725. // where new can return NULL. If we end up supporting that option, we can
  726. // consider adding a check for it here.
  727. // C++11 [basic.stc.dynamic.allocation]p3.
  728. if (const FunctionDecl *FD = CNE->getOperatorNew()) {
  729. QualType Ty = FD->getType();
  730. if (const auto *ProtoType = Ty->getAs<FunctionProtoType>())
  731. if (!ProtoType->isNothrow())
  732. State = State->assume(RetVal.castAs<DefinedOrUnknownSVal>(), true);
  733. }
  734. ValueBldr.generateNode(
  735. CNE, I, addObjectUnderConstruction(State, CNE, LCtx, RetVal));
  736. }
  737. ExplodedNodeSet DstPostPostCallCallback;
  738. getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
  739. DstPostValue, *Call, *this);
  740. for (ExplodedNode *I : DstPostPostCallCallback) {
  741. getCheckerManager().runCheckersForNewAllocator(*Call, Dst, I, *this);
  742. }
  743. }
  744. void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred,
  745. ExplodedNodeSet &Dst) {
  746. // FIXME: Much of this should eventually migrate to CXXAllocatorCall.
  747. // Also, we need to decide how allocators actually work -- they're not
  748. // really part of the CXXNewExpr because they happen BEFORE the
  749. // CXXConstructExpr subexpression. See PR12014 for some discussion.
  750. unsigned blockCount = currBldrCtx->blockCount();
  751. const LocationContext *LCtx = Pred->getLocationContext();
  752. SVal symVal = UnknownVal();
  753. FunctionDecl *FD = CNE->getOperatorNew();
  754. bool IsStandardGlobalOpNewFunction =
  755. FD->isReplaceableGlobalAllocationFunction();
  756. ProgramStateRef State = Pred->getState();
  757. // Retrieve the stored operator new() return value.
  758. if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
  759. symVal = *getObjectUnderConstruction(State, CNE, LCtx);
  760. State = finishObjectConstruction(State, CNE, LCtx);
  761. }
  762. // We assume all standard global 'operator new' functions allocate memory in
  763. // heap. We realize this is an approximation that might not correctly model
  764. // a custom global allocator.
  765. if (symVal.isUnknown()) {
  766. if (IsStandardGlobalOpNewFunction)
  767. symVal = svalBuilder.getConjuredHeapSymbolVal(CNE, LCtx, blockCount);
  768. else
  769. symVal = svalBuilder.conjureSymbolVal(nullptr, CNE, LCtx, CNE->getType(),
  770. blockCount);
  771. }
  772. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  773. CallEventRef<CXXAllocatorCall> Call =
  774. CEMgr.getCXXAllocatorCall(CNE, State, LCtx);
  775. if (!AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
  776. // Invalidate placement args.
  777. // FIXME: Once we figure out how we want allocators to work,
  778. // we should be using the usual pre-/(default-)eval-/post-call checkers
  779. // here.
  780. State = Call->invalidateRegions(blockCount);
  781. if (!State)
  782. return;
  783. // If this allocation function is not declared as non-throwing, failures
  784. // /must/ be signalled by exceptions, and thus the return value will never
  785. // be NULL. -fno-exceptions does not influence this semantics.
  786. // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
  787. // where new can return NULL. If we end up supporting that option, we can
  788. // consider adding a check for it here.
  789. // C++11 [basic.stc.dynamic.allocation]p3.
  790. if (FD) {
  791. QualType Ty = FD->getType();
  792. if (const auto *ProtoType = Ty->getAs<FunctionProtoType>())
  793. if (!ProtoType->isNothrow())
  794. if (auto dSymVal = symVal.getAs<DefinedOrUnknownSVal>())
  795. State = State->assume(*dSymVal, true);
  796. }
  797. }
  798. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  799. SVal Result = symVal;
  800. if (CNE->isArray()) {
  801. // FIXME: allocating an array requires simulating the constructors.
  802. // For now, just return a symbolicated region.
  803. if (const auto *NewReg = cast_or_null<SubRegion>(symVal.getAsRegion())) {
  804. QualType ObjTy = CNE->getType()->getPointeeType();
  805. const ElementRegion *EleReg =
  806. getStoreManager().GetElementZeroRegion(NewReg, ObjTy);
  807. Result = loc::MemRegionVal(EleReg);
  808. }
  809. State = State->BindExpr(CNE, Pred->getLocationContext(), Result);
  810. Bldr.generateNode(CNE, Pred, State);
  811. return;
  812. }
  813. // FIXME: Once we have proper support for CXXConstructExprs inside
  814. // CXXNewExpr, we need to make sure that the constructed object is not
  815. // immediately invalidated here. (The placement call should happen before
  816. // the constructor call anyway.)
  817. if (FD && FD->isReservedGlobalPlacementOperator()) {
  818. // Non-array placement new should always return the placement location.
  819. SVal PlacementLoc = State->getSVal(CNE->getPlacementArg(0), LCtx);
  820. Result = svalBuilder.evalCast(PlacementLoc, CNE->getType(),
  821. CNE->getPlacementArg(0)->getType());
  822. }
  823. // Bind the address of the object, then check to see if we cached out.
  824. State = State->BindExpr(CNE, LCtx, Result);
  825. ExplodedNode *NewN = Bldr.generateNode(CNE, Pred, State);
  826. if (!NewN)
  827. return;
  828. // If the type is not a record, we won't have a CXXConstructExpr as an
  829. // initializer. Copy the value over.
  830. if (const Expr *Init = CNE->getInitializer()) {
  831. if (!isa<CXXConstructExpr>(Init)) {
  832. assert(Bldr.getResults().size() == 1);
  833. Bldr.takeNodes(NewN);
  834. evalBind(Dst, CNE, NewN, Result, State->getSVal(Init, LCtx),
  835. /*FirstInit=*/IsStandardGlobalOpNewFunction);
  836. }
  837. }
  838. }
  839. void ExprEngine::VisitCXXDeleteExpr(const CXXDeleteExpr *CDE,
  840. ExplodedNode *Pred, ExplodedNodeSet &Dst) {
  841. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  842. CallEventRef<CXXDeallocatorCall> Call = CEMgr.getCXXDeallocatorCall(
  843. CDE, Pred->getState(), Pred->getLocationContext());
  844. ExplodedNodeSet DstPreCall;
  845. getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, *Call, *this);
  846. getCheckerManager().runCheckersForPostCall(Dst, DstPreCall, *Call, *this);
  847. }
  848. void ExprEngine::VisitCXXCatchStmt(const CXXCatchStmt *CS, ExplodedNode *Pred,
  849. ExplodedNodeSet &Dst) {
  850. const VarDecl *VD = CS->getExceptionDecl();
  851. if (!VD) {
  852. Dst.Add(Pred);
  853. return;
  854. }
  855. const LocationContext *LCtx = Pred->getLocationContext();
  856. SVal V = svalBuilder.conjureSymbolVal(CS, LCtx, VD->getType(),
  857. currBldrCtx->blockCount());
  858. ProgramStateRef state = Pred->getState();
  859. state = state->bindLoc(state->getLValue(VD, LCtx), V, LCtx);
  860. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  861. Bldr.generateNode(CS, Pred, state);
  862. }
  863. void ExprEngine::VisitCXXThisExpr(const CXXThisExpr *TE, ExplodedNode *Pred,
  864. ExplodedNodeSet &Dst) {
  865. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  866. // Get the this object region from StoreManager.
  867. const LocationContext *LCtx = Pred->getLocationContext();
  868. const MemRegion *R =
  869. svalBuilder.getRegionManager().getCXXThisRegion(
  870. getContext().getCanonicalType(TE->getType()),
  871. LCtx);
  872. ProgramStateRef state = Pred->getState();
  873. SVal V = state->getSVal(loc::MemRegionVal(R));
  874. Bldr.generateNode(TE, Pred, state->BindExpr(TE, LCtx, V));
  875. }
  876. void ExprEngine::VisitLambdaExpr(const LambdaExpr *LE, ExplodedNode *Pred,
  877. ExplodedNodeSet &Dst) {
  878. const LocationContext *LocCtxt = Pred->getLocationContext();
  879. // Get the region of the lambda itself.
  880. const MemRegion *R = svalBuilder.getRegionManager().getCXXTempObjectRegion(
  881. LE, LocCtxt);
  882. SVal V = loc::MemRegionVal(R);
  883. ProgramStateRef State = Pred->getState();
  884. // If we created a new MemRegion for the lambda, we should explicitly bind
  885. // the captures.
  886. CXXRecordDecl::field_iterator CurField = LE->getLambdaClass()->field_begin();
  887. for (LambdaExpr::const_capture_init_iterator i = LE->capture_init_begin(),
  888. e = LE->capture_init_end();
  889. i != e; ++i, ++CurField) {
  890. FieldDecl *FieldForCapture = *CurField;
  891. SVal FieldLoc = State->getLValue(FieldForCapture, V);
  892. SVal InitVal;
  893. if (!FieldForCapture->hasCapturedVLAType()) {
  894. Expr *InitExpr = *i;
  895. assert(InitExpr && "Capture missing initialization expression");
  896. InitVal = State->getSVal(InitExpr, LocCtxt);
  897. } else {
  898. // The field stores the length of a captured variable-length array.
  899. // These captures don't have initialization expressions; instead we
  900. // get the length from the VLAType size expression.
  901. Expr *SizeExpr = FieldForCapture->getCapturedVLAType()->getSizeExpr();
  902. InitVal = State->getSVal(SizeExpr, LocCtxt);
  903. }
  904. State = State->bindLoc(FieldLoc, InitVal, LocCtxt);
  905. }
  906. // Decay the Loc into an RValue, because there might be a
  907. // MaterializeTemporaryExpr node above this one which expects the bound value
  908. // to be an RValue.
  909. SVal LambdaRVal = State->getSVal(R);
  910. ExplodedNodeSet Tmp;
  911. StmtNodeBuilder Bldr(Pred, Tmp, *currBldrCtx);
  912. // FIXME: is this the right program point kind?
  913. Bldr.generateNode(LE, Pred,
  914. State->BindExpr(LE, LocCtxt, LambdaRVal),
  915. nullptr, ProgramPoint::PostLValueKind);
  916. // FIXME: Move all post/pre visits to ::Visit().
  917. getCheckerManager().runCheckersForPostStmt(Dst, Tmp, LE, *this);
  918. }