ExprEngineCXX.cpp 52 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255
  1. //===- ExprEngineCXX.cpp - ExprEngine support for C++ -----------*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file defines the C++ expression evaluation engine.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "clang/AST/DeclCXX.h"
  13. #include "clang/AST/ParentMap.h"
  14. #include "clang/AST/StmtCXX.h"
  15. #include "clang/Analysis/ConstructionContext.h"
  16. #include "clang/Basic/PrettyStackTrace.h"
  17. #include "clang/StaticAnalyzer/Core/CheckerManager.h"
  18. #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
  19. #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
  20. #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
  21. #include "clang/StaticAnalyzer/Core/PathSensitive/SVals.h"
  22. #include <optional>
  23. using namespace clang;
  24. using namespace ento;
  25. void ExprEngine::CreateCXXTemporaryObject(const MaterializeTemporaryExpr *ME,
  26. ExplodedNode *Pred,
  27. ExplodedNodeSet &Dst) {
  28. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  29. const Expr *tempExpr = ME->getSubExpr()->IgnoreParens();
  30. ProgramStateRef state = Pred->getState();
  31. const LocationContext *LCtx = Pred->getLocationContext();
  32. state = createTemporaryRegionIfNeeded(state, LCtx, tempExpr, ME);
  33. Bldr.generateNode(ME, Pred, state);
  34. }
  35. // FIXME: This is the sort of code that should eventually live in a Core
  36. // checker rather than as a special case in ExprEngine.
  37. void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred,
  38. const CallEvent &Call) {
  39. SVal ThisVal;
  40. bool AlwaysReturnsLValue;
  41. const CXXRecordDecl *ThisRD = nullptr;
  42. if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(&Call)) {
  43. assert(Ctor->getDecl()->isTrivial());
  44. assert(Ctor->getDecl()->isCopyOrMoveConstructor());
  45. ThisVal = Ctor->getCXXThisVal();
  46. ThisRD = Ctor->getDecl()->getParent();
  47. AlwaysReturnsLValue = false;
  48. } else {
  49. assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial());
  50. assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() ==
  51. OO_Equal);
  52. ThisVal = cast<CXXInstanceCall>(Call).getCXXThisVal();
  53. ThisRD = cast<CXXMethodDecl>(Call.getDecl())->getParent();
  54. AlwaysReturnsLValue = true;
  55. }
  56. assert(ThisRD);
  57. if (ThisRD->isEmpty()) {
  58. // Do nothing for empty classes. Otherwise it'd retrieve an UnknownVal
  59. // and bind it and RegionStore would think that the actual value
  60. // in this region at this offset is unknown.
  61. return;
  62. }
  63. const LocationContext *LCtx = Pred->getLocationContext();
  64. ExplodedNodeSet Dst;
  65. Bldr.takeNodes(Pred);
  66. SVal V = Call.getArgSVal(0);
  67. // If the value being copied is not unknown, load from its location to get
  68. // an aggregate rvalue.
  69. if (std::optional<Loc> L = V.getAs<Loc>())
  70. V = Pred->getState()->getSVal(*L);
  71. else
  72. assert(V.isUnknownOrUndef());
  73. const Expr *CallExpr = Call.getOriginExpr();
  74. evalBind(Dst, CallExpr, Pred, ThisVal, V, true);
  75. PostStmt PS(CallExpr, LCtx);
  76. for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end();
  77. I != E; ++I) {
  78. ProgramStateRef State = (*I)->getState();
  79. if (AlwaysReturnsLValue)
  80. State = State->BindExpr(CallExpr, LCtx, ThisVal);
  81. else
  82. State = bindReturnValue(Call, LCtx, State);
  83. Bldr.generateNode(PS, State, *I);
  84. }
  85. }
  86. SVal ExprEngine::makeElementRegion(ProgramStateRef State, SVal LValue,
  87. QualType &Ty, bool &IsArray, unsigned Idx) {
  88. SValBuilder &SVB = State->getStateManager().getSValBuilder();
  89. ASTContext &Ctx = SVB.getContext();
  90. if (const ArrayType *AT = Ctx.getAsArrayType(Ty)) {
  91. while (AT) {
  92. Ty = AT->getElementType();
  93. AT = dyn_cast<ArrayType>(AT->getElementType());
  94. }
  95. LValue = State->getLValue(Ty, SVB.makeArrayIndex(Idx), LValue);
  96. IsArray = true;
  97. }
  98. return LValue;
  99. }
  100. // In case when the prvalue is returned from the function (kind is one of
  101. // SimpleReturnedValueKind, CXX17ElidedCopyReturnedValueKind), then
  102. // it's materialization happens in context of the caller.
  103. // We pass BldrCtx explicitly, as currBldrCtx always refers to callee's context.
  104. SVal ExprEngine::computeObjectUnderConstruction(
  105. const Expr *E, ProgramStateRef State, const NodeBuilderContext *BldrCtx,
  106. const LocationContext *LCtx, const ConstructionContext *CC,
  107. EvalCallOptions &CallOpts, unsigned Idx) {
  108. SValBuilder &SVB = getSValBuilder();
  109. MemRegionManager &MRMgr = SVB.getRegionManager();
  110. ASTContext &ACtx = SVB.getContext();
  111. // Compute the target region by exploring the construction context.
  112. if (CC) {
  113. switch (CC->getKind()) {
  114. case ConstructionContext::CXX17ElidedCopyVariableKind:
  115. case ConstructionContext::SimpleVariableKind: {
  116. const auto *DSCC = cast<VariableConstructionContext>(CC);
  117. const auto *DS = DSCC->getDeclStmt();
  118. const auto *Var = cast<VarDecl>(DS->getSingleDecl());
  119. QualType Ty = Var->getType();
  120. return makeElementRegion(State, State->getLValue(Var, LCtx), Ty,
  121. CallOpts.IsArrayCtorOrDtor, Idx);
  122. }
  123. case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind:
  124. case ConstructionContext::SimpleConstructorInitializerKind: {
  125. const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC);
  126. const auto *Init = ICC->getCXXCtorInitializer();
  127. const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl());
  128. Loc ThisPtr = SVB.getCXXThis(CurCtor, LCtx->getStackFrame());
  129. SVal ThisVal = State->getSVal(ThisPtr);
  130. if (Init->isBaseInitializer()) {
  131. const auto *ThisReg = cast<SubRegion>(ThisVal.getAsRegion());
  132. const CXXRecordDecl *BaseClass =
  133. Init->getBaseClass()->getAsCXXRecordDecl();
  134. const auto *BaseReg =
  135. MRMgr.getCXXBaseObjectRegion(BaseClass, ThisReg,
  136. Init->isBaseVirtual());
  137. return SVB.makeLoc(BaseReg);
  138. }
  139. if (Init->isDelegatingInitializer())
  140. return ThisVal;
  141. const ValueDecl *Field;
  142. SVal FieldVal;
  143. if (Init->isIndirectMemberInitializer()) {
  144. Field = Init->getIndirectMember();
  145. FieldVal = State->getLValue(Init->getIndirectMember(), ThisVal);
  146. } else {
  147. Field = Init->getMember();
  148. FieldVal = State->getLValue(Init->getMember(), ThisVal);
  149. }
  150. QualType Ty = Field->getType();
  151. return makeElementRegion(State, FieldVal, Ty, CallOpts.IsArrayCtorOrDtor,
  152. Idx);
  153. }
  154. case ConstructionContext::NewAllocatedObjectKind: {
  155. if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
  156. const auto *NECC = cast<NewAllocatedObjectConstructionContext>(CC);
  157. const auto *NE = NECC->getCXXNewExpr();
  158. SVal V = *getObjectUnderConstruction(State, NE, LCtx);
  159. if (const SubRegion *MR =
  160. dyn_cast_or_null<SubRegion>(V.getAsRegion())) {
  161. if (NE->isArray()) {
  162. CallOpts.IsArrayCtorOrDtor = true;
  163. auto Ty = NE->getType()->getPointeeType();
  164. while (const auto *AT = getContext().getAsArrayType(Ty))
  165. Ty = AT->getElementType();
  166. auto R = MRMgr.getElementRegion(Ty, svalBuilder.makeArrayIndex(Idx),
  167. MR, SVB.getContext());
  168. return loc::MemRegionVal(R);
  169. }
  170. return V;
  171. }
  172. // TODO: Detect when the allocator returns a null pointer.
  173. // Constructor shall not be called in this case.
  174. }
  175. break;
  176. }
  177. case ConstructionContext::SimpleReturnedValueKind:
  178. case ConstructionContext::CXX17ElidedCopyReturnedValueKind: {
  179. // The temporary is to be managed by the parent stack frame.
  180. // So build it in the parent stack frame if we're not in the
  181. // top frame of the analysis.
  182. const StackFrameContext *SFC = LCtx->getStackFrame();
  183. if (const LocationContext *CallerLCtx = SFC->getParent()) {
  184. auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()]
  185. .getAs<CFGCXXRecordTypedCall>();
  186. if (!RTC) {
  187. // We were unable to find the correct construction context for the
  188. // call in the parent stack frame. This is equivalent to not being
  189. // able to find construction context at all.
  190. break;
  191. }
  192. if (isa<BlockInvocationContext>(CallerLCtx)) {
  193. // Unwrap block invocation contexts. They're mostly part of
  194. // the current stack frame.
  195. CallerLCtx = CallerLCtx->getParent();
  196. assert(!isa<BlockInvocationContext>(CallerLCtx));
  197. }
  198. NodeBuilderContext CallerBldrCtx(getCoreEngine(),
  199. SFC->getCallSiteBlock(), CallerLCtx);
  200. return computeObjectUnderConstruction(
  201. cast<Expr>(SFC->getCallSite()), State, &CallerBldrCtx, CallerLCtx,
  202. RTC->getConstructionContext(), CallOpts);
  203. } else {
  204. // We are on the top frame of the analysis. We do not know where is the
  205. // object returned to. Conjure a symbolic region for the return value.
  206. // TODO: We probably need a new MemRegion kind to represent the storage
  207. // of that SymbolicRegion, so that we cound produce a fancy symbol
  208. // instead of an anonymous conjured symbol.
  209. // TODO: Do we need to track the region to avoid having it dead
  210. // too early? It does die too early, at least in C++17, but because
  211. // putting anything into a SymbolicRegion causes an immediate escape,
  212. // it doesn't cause any leak false positives.
  213. const auto *RCC = cast<ReturnedValueConstructionContext>(CC);
  214. // Make sure that this doesn't coincide with any other symbol
  215. // conjured for the returned expression.
  216. static const int TopLevelSymRegionTag = 0;
  217. const Expr *RetE = RCC->getReturnStmt()->getRetValue();
  218. assert(RetE && "Void returns should not have a construction context");
  219. QualType ReturnTy = RetE->getType();
  220. QualType RegionTy = ACtx.getPointerType(ReturnTy);
  221. return SVB.conjureSymbolVal(&TopLevelSymRegionTag, RetE, SFC, RegionTy,
  222. currBldrCtx->blockCount());
  223. }
  224. llvm_unreachable("Unhandled return value construction context!");
  225. }
  226. case ConstructionContext::ElidedTemporaryObjectKind: {
  227. assert(AMgr.getAnalyzerOptions().ShouldElideConstructors);
  228. const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC);
  229. // Support pre-C++17 copy elision. We'll have the elidable copy
  230. // constructor in the AST and in the CFG, but we'll skip it
  231. // and construct directly into the final object. This call
  232. // also sets the CallOpts flags for us.
  233. // If the elided copy/move constructor is not supported, there's still
  234. // benefit in trying to model the non-elided constructor.
  235. // Stash our state before trying to elide, as it'll get overwritten.
  236. ProgramStateRef PreElideState = State;
  237. EvalCallOptions PreElideCallOpts = CallOpts;
  238. SVal V = computeObjectUnderConstruction(
  239. TCC->getConstructorAfterElision(), State, BldrCtx, LCtx,
  240. TCC->getConstructionContextAfterElision(), CallOpts);
  241. // FIXME: This definition of "copy elision has not failed" is unreliable.
  242. // It doesn't indicate that the constructor will actually be inlined
  243. // later; this is still up to evalCall() to decide.
  244. if (!CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion)
  245. return V;
  246. // Copy elision failed. Revert the changes and proceed as if we have
  247. // a simple temporary.
  248. CallOpts = PreElideCallOpts;
  249. CallOpts.IsElidableCtorThatHasNotBeenElided = true;
  250. [[fallthrough]];
  251. }
  252. case ConstructionContext::SimpleTemporaryObjectKind: {
  253. const auto *TCC = cast<TemporaryObjectConstructionContext>(CC);
  254. const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr();
  255. CallOpts.IsTemporaryCtorOrDtor = true;
  256. if (MTE) {
  257. if (const ValueDecl *VD = MTE->getExtendingDecl()) {
  258. assert(MTE->getStorageDuration() != SD_FullExpression);
  259. if (!VD->getType()->isReferenceType()) {
  260. // We're lifetime-extended by a surrounding aggregate.
  261. // Automatic destructors aren't quite working in this case
  262. // on the CFG side. We should warn the caller about that.
  263. // FIXME: Is there a better way to retrieve this information from
  264. // the MaterializeTemporaryExpr?
  265. CallOpts.IsTemporaryLifetimeExtendedViaAggregate = true;
  266. }
  267. }
  268. if (MTE->getStorageDuration() == SD_Static ||
  269. MTE->getStorageDuration() == SD_Thread)
  270. return loc::MemRegionVal(MRMgr.getCXXStaticTempObjectRegion(E));
  271. }
  272. return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx));
  273. }
  274. case ConstructionContext::LambdaCaptureKind: {
  275. CallOpts.IsTemporaryCtorOrDtor = true;
  276. const auto *LCC = cast<LambdaCaptureConstructionContext>(CC);
  277. SVal Base = loc::MemRegionVal(
  278. MRMgr.getCXXTempObjectRegion(LCC->getInitializer(), LCtx));
  279. const auto *CE = dyn_cast_or_null<CXXConstructExpr>(E);
  280. if (getIndexOfElementToConstruct(State, CE, LCtx)) {
  281. CallOpts.IsArrayCtorOrDtor = true;
  282. Base = State->getLValue(E->getType(), svalBuilder.makeArrayIndex(Idx),
  283. Base);
  284. }
  285. return Base;
  286. }
  287. case ConstructionContext::ArgumentKind: {
  288. // Arguments are technically temporaries.
  289. CallOpts.IsTemporaryCtorOrDtor = true;
  290. const auto *ACC = cast<ArgumentConstructionContext>(CC);
  291. const Expr *E = ACC->getCallLikeExpr();
  292. unsigned Idx = ACC->getIndex();
  293. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  294. auto getArgLoc = [&](CallEventRef<> Caller) -> std::optional<SVal> {
  295. const LocationContext *FutureSFC =
  296. Caller->getCalleeStackFrame(BldrCtx->blockCount());
  297. // Return early if we are unable to reliably foresee
  298. // the future stack frame.
  299. if (!FutureSFC)
  300. return std::nullopt;
  301. // This should be equivalent to Caller->getDecl() for now, but
  302. // FutureSFC->getDecl() is likely to support better stuff (like
  303. // virtual functions) earlier.
  304. const Decl *CalleeD = FutureSFC->getDecl();
  305. // FIXME: Support for variadic arguments is not implemented here yet.
  306. if (CallEvent::isVariadic(CalleeD))
  307. return std::nullopt;
  308. // Operator arguments do not correspond to operator parameters
  309. // because this-argument is implemented as a normal argument in
  310. // operator call expressions but not in operator declarations.
  311. const TypedValueRegion *TVR = Caller->getParameterLocation(
  312. *Caller->getAdjustedParameterIndex(Idx), BldrCtx->blockCount());
  313. if (!TVR)
  314. return std::nullopt;
  315. return loc::MemRegionVal(TVR);
  316. };
  317. if (const auto *CE = dyn_cast<CallExpr>(E)) {
  318. CallEventRef<> Caller = CEMgr.getSimpleCall(CE, State, LCtx);
  319. if (std::optional<SVal> V = getArgLoc(Caller))
  320. return *V;
  321. else
  322. break;
  323. } else if (const auto *CCE = dyn_cast<CXXConstructExpr>(E)) {
  324. // Don't bother figuring out the target region for the future
  325. // constructor because we won't need it.
  326. CallEventRef<> Caller =
  327. CEMgr.getCXXConstructorCall(CCE, /*Target=*/nullptr, State, LCtx);
  328. if (std::optional<SVal> V = getArgLoc(Caller))
  329. return *V;
  330. else
  331. break;
  332. } else if (const auto *ME = dyn_cast<ObjCMessageExpr>(E)) {
  333. CallEventRef<> Caller = CEMgr.getObjCMethodCall(ME, State, LCtx);
  334. if (std::optional<SVal> V = getArgLoc(Caller))
  335. return *V;
  336. else
  337. break;
  338. }
  339. }
  340. } // switch (CC->getKind())
  341. }
  342. // If we couldn't find an existing region to construct into, assume we're
  343. // constructing a temporary. Notify the caller of our failure.
  344. CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
  345. return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx));
  346. }
  347. ProgramStateRef ExprEngine::updateObjectsUnderConstruction(
  348. SVal V, const Expr *E, ProgramStateRef State, const LocationContext *LCtx,
  349. const ConstructionContext *CC, const EvalCallOptions &CallOpts) {
  350. if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) {
  351. // Sounds like we failed to find the target region and therefore
  352. // copy elision failed. There's nothing we can do about it here.
  353. return State;
  354. }
  355. // See if we're constructing an existing region by looking at the
  356. // current construction context.
  357. assert(CC && "Computed target region without construction context?");
  358. switch (CC->getKind()) {
  359. case ConstructionContext::CXX17ElidedCopyVariableKind:
  360. case ConstructionContext::SimpleVariableKind: {
  361. const auto *DSCC = cast<VariableConstructionContext>(CC);
  362. return addObjectUnderConstruction(State, DSCC->getDeclStmt(), LCtx, V);
  363. }
  364. case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind:
  365. case ConstructionContext::SimpleConstructorInitializerKind: {
  366. const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC);
  367. const auto *Init = ICC->getCXXCtorInitializer();
  368. // Base and delegating initializers handled above
  369. assert(Init->isAnyMemberInitializer() &&
  370. "Base and delegating initializers should have been handled by"
  371. "computeObjectUnderConstruction()");
  372. return addObjectUnderConstruction(State, Init, LCtx, V);
  373. }
  374. case ConstructionContext::NewAllocatedObjectKind: {
  375. return State;
  376. }
  377. case ConstructionContext::SimpleReturnedValueKind:
  378. case ConstructionContext::CXX17ElidedCopyReturnedValueKind: {
  379. const StackFrameContext *SFC = LCtx->getStackFrame();
  380. const LocationContext *CallerLCtx = SFC->getParent();
  381. if (!CallerLCtx) {
  382. // No extra work is necessary in top frame.
  383. return State;
  384. }
  385. auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()]
  386. .getAs<CFGCXXRecordTypedCall>();
  387. assert(RTC && "Could not have had a target region without it");
  388. if (isa<BlockInvocationContext>(CallerLCtx)) {
  389. // Unwrap block invocation contexts. They're mostly part of
  390. // the current stack frame.
  391. CallerLCtx = CallerLCtx->getParent();
  392. assert(!isa<BlockInvocationContext>(CallerLCtx));
  393. }
  394. return updateObjectsUnderConstruction(V,
  395. cast<Expr>(SFC->getCallSite()), State, CallerLCtx,
  396. RTC->getConstructionContext(), CallOpts);
  397. }
  398. case ConstructionContext::ElidedTemporaryObjectKind: {
  399. assert(AMgr.getAnalyzerOptions().ShouldElideConstructors);
  400. if (!CallOpts.IsElidableCtorThatHasNotBeenElided) {
  401. const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC);
  402. State = updateObjectsUnderConstruction(
  403. V, TCC->getConstructorAfterElision(), State, LCtx,
  404. TCC->getConstructionContextAfterElision(), CallOpts);
  405. // Remember that we've elided the constructor.
  406. State = addObjectUnderConstruction(
  407. State, TCC->getConstructorAfterElision(), LCtx, V);
  408. // Remember that we've elided the destructor.
  409. if (const auto *BTE = TCC->getCXXBindTemporaryExpr())
  410. State = elideDestructor(State, BTE, LCtx);
  411. // Instead of materialization, shamelessly return
  412. // the final object destination.
  413. if (const auto *MTE = TCC->getMaterializedTemporaryExpr())
  414. State = addObjectUnderConstruction(State, MTE, LCtx, V);
  415. return State;
  416. }
  417. // If we decided not to elide the constructor, proceed as if
  418. // it's a simple temporary.
  419. [[fallthrough]];
  420. }
  421. case ConstructionContext::SimpleTemporaryObjectKind: {
  422. const auto *TCC = cast<TemporaryObjectConstructionContext>(CC);
  423. if (const auto *BTE = TCC->getCXXBindTemporaryExpr())
  424. State = addObjectUnderConstruction(State, BTE, LCtx, V);
  425. if (const auto *MTE = TCC->getMaterializedTemporaryExpr())
  426. State = addObjectUnderConstruction(State, MTE, LCtx, V);
  427. return State;
  428. }
  429. case ConstructionContext::LambdaCaptureKind: {
  430. const auto *LCC = cast<LambdaCaptureConstructionContext>(CC);
  431. // If we capture and array, we want to store the super region, not a
  432. // sub-region.
  433. if (const auto *EL = dyn_cast_or_null<ElementRegion>(V.getAsRegion()))
  434. V = loc::MemRegionVal(EL->getSuperRegion());
  435. return addObjectUnderConstruction(
  436. State, {LCC->getLambdaExpr(), LCC->getIndex()}, LCtx, V);
  437. }
  438. case ConstructionContext::ArgumentKind: {
  439. const auto *ACC = cast<ArgumentConstructionContext>(CC);
  440. if (const auto *BTE = ACC->getCXXBindTemporaryExpr())
  441. State = addObjectUnderConstruction(State, BTE, LCtx, V);
  442. return addObjectUnderConstruction(
  443. State, {ACC->getCallLikeExpr(), ACC->getIndex()}, LCtx, V);
  444. }
  445. }
  446. llvm_unreachable("Unhandled construction context!");
  447. }
  448. static ProgramStateRef
  449. bindRequiredArrayElementToEnvironment(ProgramStateRef State,
  450. const ArrayInitLoopExpr *AILE,
  451. const LocationContext *LCtx, SVal Idx) {
  452. // The ctor in this case is guaranteed to be a copy ctor, otherwise we hit a
  453. // compile time error.
  454. //
  455. // -ArrayInitLoopExpr <-- we're here
  456. // |-OpaqueValueExpr
  457. // | `-DeclRefExpr <-- match this
  458. // `-CXXConstructExpr
  459. // `-ImplicitCastExpr
  460. // `-ArraySubscriptExpr
  461. // |-ImplicitCastExpr
  462. // | `-OpaqueValueExpr
  463. // | `-DeclRefExpr
  464. // `-ArrayInitIndexExpr
  465. //
  466. // The resulting expression might look like the one below in an implicit
  467. // copy/move ctor.
  468. //
  469. // ArrayInitLoopExpr <-- we're here
  470. // |-OpaqueValueExpr
  471. // | `-MemberExpr <-- match this
  472. // | (`-CXXStaticCastExpr) <-- move ctor only
  473. // | `-DeclRefExpr
  474. // `-CXXConstructExpr
  475. // `-ArraySubscriptExpr
  476. // |-ImplicitCastExpr
  477. // | `-OpaqueValueExpr
  478. // | `-MemberExpr
  479. // | `-DeclRefExpr
  480. // `-ArrayInitIndexExpr
  481. //
  482. // The resulting expression for a multidimensional array.
  483. // ArrayInitLoopExpr <-- we're here
  484. // |-OpaqueValueExpr
  485. // | `-DeclRefExpr <-- match this
  486. // `-ArrayInitLoopExpr
  487. // |-OpaqueValueExpr
  488. // | `-ArraySubscriptExpr
  489. // | |-ImplicitCastExpr
  490. // | | `-OpaqueValueExpr
  491. // | | `-DeclRefExpr
  492. // | `-ArrayInitIndexExpr
  493. // `-CXXConstructExpr <-- extract this
  494. // ` ...
  495. const auto *OVESrc = AILE->getCommonExpr()->getSourceExpr();
  496. // HACK: There is no way we can put the index of the array element into the
  497. // CFG unless we unroll the loop, so we manually select and bind the required
  498. // parameter to the environment.
  499. const auto *CE =
  500. cast<CXXConstructExpr>(extractElementInitializerFromNestedAILE(AILE));
  501. SVal Base = UnknownVal();
  502. if (const auto *ME = dyn_cast<MemberExpr>(OVESrc))
  503. Base = State->getSVal(ME, LCtx);
  504. else if (const auto *DRE = dyn_cast<DeclRefExpr>(OVESrc))
  505. Base = State->getLValue(cast<VarDecl>(DRE->getDecl()), LCtx);
  506. else
  507. llvm_unreachable("ArrayInitLoopExpr contains unexpected source expression");
  508. SVal NthElem = State->getLValue(CE->getType(), Idx, Base);
  509. return State->BindExpr(CE->getArg(0), LCtx, NthElem);
  510. }
  511. void ExprEngine::handleConstructor(const Expr *E,
  512. ExplodedNode *Pred,
  513. ExplodedNodeSet &destNodes) {
  514. const auto *CE = dyn_cast<CXXConstructExpr>(E);
  515. const auto *CIE = dyn_cast<CXXInheritedCtorInitExpr>(E);
  516. assert(CE || CIE);
  517. const LocationContext *LCtx = Pred->getLocationContext();
  518. ProgramStateRef State = Pred->getState();
  519. SVal Target = UnknownVal();
  520. if (CE) {
  521. if (std::optional<SVal> ElidedTarget =
  522. getObjectUnderConstruction(State, CE, LCtx)) {
  523. // We've previously modeled an elidable constructor by pretending that
  524. // it in fact constructs into the correct target. This constructor can
  525. // therefore be skipped.
  526. Target = *ElidedTarget;
  527. StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx);
  528. State = finishObjectConstruction(State, CE, LCtx);
  529. if (auto L = Target.getAs<Loc>())
  530. State = State->BindExpr(CE, LCtx, State->getSVal(*L, CE->getType()));
  531. Bldr.generateNode(CE, Pred, State);
  532. return;
  533. }
  534. }
  535. EvalCallOptions CallOpts;
  536. auto C = getCurrentCFGElement().getAs<CFGConstructor>();
  537. assert(C || getCurrentCFGElement().getAs<CFGStmt>());
  538. const ConstructionContext *CC = C ? C->getConstructionContext() : nullptr;
  539. const CXXConstructExpr::ConstructionKind CK =
  540. CE ? CE->getConstructionKind() : CIE->getConstructionKind();
  541. switch (CK) {
  542. case CXXConstructExpr::CK_Complete: {
  543. // Inherited constructors are always base class constructors.
  544. assert(CE && !CIE && "A complete constructor is inherited?!");
  545. // If the ctor is part of an ArrayInitLoopExpr, we want to handle it
  546. // differently.
  547. auto *AILE = CC ? CC->getArrayInitLoop() : nullptr;
  548. unsigned Idx = 0;
  549. if (CE->getType()->isArrayType() || AILE) {
  550. auto isZeroSizeArray = [&] {
  551. uint64_t Size = 1;
  552. if (const auto *CAT = dyn_cast<ConstantArrayType>(CE->getType()))
  553. Size = getContext().getConstantArrayElementCount(CAT);
  554. else if (AILE)
  555. Size = getContext().getArrayInitLoopExprElementCount(AILE);
  556. return Size == 0;
  557. };
  558. // No element construction will happen in a 0 size array.
  559. if (isZeroSizeArray()) {
  560. StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx);
  561. static SimpleProgramPointTag T{"ExprEngine",
  562. "Skipping 0 size array construction"};
  563. Bldr.generateNode(CE, Pred, State, &T);
  564. return;
  565. }
  566. Idx = getIndexOfElementToConstruct(State, CE, LCtx).value_or(0u);
  567. State = setIndexOfElementToConstruct(State, CE, LCtx, Idx + 1);
  568. }
  569. if (AILE) {
  570. // Only set this once even though we loop through it multiple times.
  571. if (!getPendingInitLoop(State, CE, LCtx))
  572. State = setPendingInitLoop(
  573. State, CE, LCtx,
  574. getContext().getArrayInitLoopExprElementCount(AILE));
  575. State = bindRequiredArrayElementToEnvironment(
  576. State, AILE, LCtx, svalBuilder.makeArrayIndex(Idx));
  577. }
  578. // The target region is found from construction context.
  579. std::tie(State, Target) = handleConstructionContext(
  580. CE, State, currBldrCtx, LCtx, CC, CallOpts, Idx);
  581. break;
  582. }
  583. case CXXConstructExpr::CK_VirtualBase: {
  584. // Make sure we are not calling virtual base class initializers twice.
  585. // Only the most-derived object should initialize virtual base classes.
  586. const auto *OuterCtor = dyn_cast_or_null<CXXConstructExpr>(
  587. LCtx->getStackFrame()->getCallSite());
  588. assert(
  589. (!OuterCtor ||
  590. OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Complete ||
  591. OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Delegating) &&
  592. ("This virtual base should have already been initialized by "
  593. "the most derived class!"));
  594. (void)OuterCtor;
  595. [[fallthrough]];
  596. }
  597. case CXXConstructExpr::CK_NonVirtualBase:
  598. // In C++17, classes with non-virtual bases may be aggregates, so they would
  599. // be initialized as aggregates without a constructor call, so we may have
  600. // a base class constructed directly into an initializer list without
  601. // having the derived-class constructor call on the previous stack frame.
  602. // Initializer lists may be nested into more initializer lists that
  603. // correspond to surrounding aggregate initializations.
  604. // FIXME: For now this code essentially bails out. We need to find the
  605. // correct target region and set it.
  606. // FIXME: Instead of relying on the ParentMap, we should have the
  607. // trigger-statement (InitListExpr in this case) passed down from CFG or
  608. // otherwise always available during construction.
  609. if (isa_and_nonnull<InitListExpr>(LCtx->getParentMap().getParent(E))) {
  610. MemRegionManager &MRMgr = getSValBuilder().getRegionManager();
  611. Target = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx));
  612. CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
  613. break;
  614. }
  615. [[fallthrough]];
  616. case CXXConstructExpr::CK_Delegating: {
  617. const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl());
  618. Loc ThisPtr = getSValBuilder().getCXXThis(CurCtor,
  619. LCtx->getStackFrame());
  620. SVal ThisVal = State->getSVal(ThisPtr);
  621. if (CK == CXXConstructExpr::CK_Delegating) {
  622. Target = ThisVal;
  623. } else {
  624. // Cast to the base type.
  625. bool IsVirtual = (CK == CXXConstructExpr::CK_VirtualBase);
  626. SVal BaseVal =
  627. getStoreManager().evalDerivedToBase(ThisVal, E->getType(), IsVirtual);
  628. Target = BaseVal;
  629. }
  630. break;
  631. }
  632. }
  633. if (State != Pred->getState()) {
  634. static SimpleProgramPointTag T("ExprEngine",
  635. "Prepare for object construction");
  636. ExplodedNodeSet DstPrepare;
  637. StmtNodeBuilder BldrPrepare(Pred, DstPrepare, *currBldrCtx);
  638. BldrPrepare.generateNode(E, Pred, State, &T, ProgramPoint::PreStmtKind);
  639. assert(DstPrepare.size() <= 1);
  640. if (DstPrepare.size() == 0)
  641. return;
  642. Pred = *BldrPrepare.begin();
  643. }
  644. const MemRegion *TargetRegion = Target.getAsRegion();
  645. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  646. CallEventRef<> Call =
  647. CIE ? (CallEventRef<>)CEMgr.getCXXInheritedConstructorCall(
  648. CIE, TargetRegion, State, LCtx)
  649. : (CallEventRef<>)CEMgr.getCXXConstructorCall(
  650. CE, TargetRegion, State, LCtx);
  651. ExplodedNodeSet DstPreVisit;
  652. getCheckerManager().runCheckersForPreStmt(DstPreVisit, Pred, E, *this);
  653. ExplodedNodeSet PreInitialized;
  654. if (CE) {
  655. // FIXME: Is it possible and/or useful to do this before PreStmt?
  656. StmtNodeBuilder Bldr(DstPreVisit, PreInitialized, *currBldrCtx);
  657. for (ExplodedNodeSet::iterator I = DstPreVisit.begin(),
  658. E = DstPreVisit.end();
  659. I != E; ++I) {
  660. ProgramStateRef State = (*I)->getState();
  661. if (CE->requiresZeroInitialization()) {
  662. // FIXME: Once we properly handle constructors in new-expressions, we'll
  663. // need to invalidate the region before setting a default value, to make
  664. // sure there aren't any lingering bindings around. This probably needs
  665. // to happen regardless of whether or not the object is zero-initialized
  666. // to handle random fields of a placement-initialized object picking up
  667. // old bindings. We might only want to do it when we need to, though.
  668. // FIXME: This isn't actually correct for arrays -- we need to zero-
  669. // initialize the entire array, not just the first element -- but our
  670. // handling of arrays everywhere else is weak as well, so this shouldn't
  671. // actually make things worse. Placement new makes this tricky as well,
  672. // since it's then possible to be initializing one part of a multi-
  673. // dimensional array.
  674. State = State->bindDefaultZero(Target, LCtx);
  675. }
  676. Bldr.generateNode(CE, *I, State, /*tag=*/nullptr,
  677. ProgramPoint::PreStmtKind);
  678. }
  679. } else {
  680. PreInitialized = DstPreVisit;
  681. }
  682. ExplodedNodeSet DstPreCall;
  683. getCheckerManager().runCheckersForPreCall(DstPreCall, PreInitialized,
  684. *Call, *this);
  685. ExplodedNodeSet DstEvaluated;
  686. if (CE && CE->getConstructor()->isTrivial() &&
  687. CE->getConstructor()->isCopyOrMoveConstructor() &&
  688. !CallOpts.IsArrayCtorOrDtor) {
  689. StmtNodeBuilder Bldr(DstPreCall, DstEvaluated, *currBldrCtx);
  690. // FIXME: Handle other kinds of trivial constructors as well.
  691. for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
  692. I != E; ++I)
  693. performTrivialCopy(Bldr, *I, *Call);
  694. } else {
  695. for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
  696. I != E; ++I)
  697. getCheckerManager().runCheckersForEvalCall(DstEvaluated, *I, *Call, *this,
  698. CallOpts);
  699. }
  700. // If the CFG was constructed without elements for temporary destructors
  701. // and the just-called constructor created a temporary object then
  702. // stop exploration if the temporary object has a noreturn constructor.
  703. // This can lose coverage because the destructor, if it were present
  704. // in the CFG, would be called at the end of the full expression or
  705. // later (for life-time extended temporaries) -- but avoids infeasible
  706. // paths when no-return temporary destructors are used for assertions.
  707. ExplodedNodeSet DstEvaluatedPostProcessed;
  708. StmtNodeBuilder Bldr(DstEvaluated, DstEvaluatedPostProcessed, *currBldrCtx);
  709. const AnalysisDeclContext *ADC = LCtx->getAnalysisDeclContext();
  710. if (!ADC->getCFGBuildOptions().AddTemporaryDtors) {
  711. if (llvm::isa_and_nonnull<CXXTempObjectRegion>(TargetRegion) &&
  712. cast<CXXConstructorDecl>(Call->getDecl())
  713. ->getParent()
  714. ->isAnyDestructorNoReturn()) {
  715. // If we've inlined the constructor, then DstEvaluated would be empty.
  716. // In this case we still want a sink, which could be implemented
  717. // in processCallExit. But we don't have that implemented at the moment,
  718. // so if you hit this assertion, see if you can avoid inlining
  719. // the respective constructor when analyzer-config cfg-temporary-dtors
  720. // is set to false.
  721. // Otherwise there's nothing wrong with inlining such constructor.
  722. assert(!DstEvaluated.empty() &&
  723. "We should not have inlined this constructor!");
  724. for (ExplodedNode *N : DstEvaluated) {
  725. Bldr.generateSink(E, N, N->getState());
  726. }
  727. // There is no need to run the PostCall and PostStmt checker
  728. // callbacks because we just generated sinks on all nodes in th
  729. // frontier.
  730. return;
  731. }
  732. }
  733. ExplodedNodeSet DstPostArgumentCleanup;
  734. for (ExplodedNode *I : DstEvaluatedPostProcessed)
  735. finishArgumentConstruction(DstPostArgumentCleanup, I, *Call);
  736. // If there were other constructors called for object-type arguments
  737. // of this constructor, clean them up.
  738. ExplodedNodeSet DstPostCall;
  739. getCheckerManager().runCheckersForPostCall(DstPostCall,
  740. DstPostArgumentCleanup,
  741. *Call, *this);
  742. getCheckerManager().runCheckersForPostStmt(destNodes, DstPostCall, E, *this);
  743. }
  744. void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *CE,
  745. ExplodedNode *Pred,
  746. ExplodedNodeSet &Dst) {
  747. handleConstructor(CE, Pred, Dst);
  748. }
  749. void ExprEngine::VisitCXXInheritedCtorInitExpr(
  750. const CXXInheritedCtorInitExpr *CE, ExplodedNode *Pred,
  751. ExplodedNodeSet &Dst) {
  752. handleConstructor(CE, Pred, Dst);
  753. }
  754. void ExprEngine::VisitCXXDestructor(QualType ObjectType,
  755. const MemRegion *Dest,
  756. const Stmt *S,
  757. bool IsBaseDtor,
  758. ExplodedNode *Pred,
  759. ExplodedNodeSet &Dst,
  760. EvalCallOptions &CallOpts) {
  761. assert(S && "A destructor without a trigger!");
  762. const LocationContext *LCtx = Pred->getLocationContext();
  763. ProgramStateRef State = Pred->getState();
  764. const CXXRecordDecl *RecordDecl = ObjectType->getAsCXXRecordDecl();
  765. assert(RecordDecl && "Only CXXRecordDecls should have destructors");
  766. const CXXDestructorDecl *DtorDecl = RecordDecl->getDestructor();
  767. // FIXME: There should always be a Decl, otherwise the destructor call
  768. // shouldn't have been added to the CFG in the first place.
  769. if (!DtorDecl) {
  770. // Skip the invalid destructor. We cannot simply return because
  771. // it would interrupt the analysis instead.
  772. static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor");
  773. // FIXME: PostImplicitCall with a null decl may crash elsewhere anyway.
  774. PostImplicitCall PP(/*Decl=*/nullptr, S->getEndLoc(), LCtx, &T);
  775. NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  776. Bldr.generateNode(PP, Pred->getState(), Pred);
  777. return;
  778. }
  779. if (!Dest) {
  780. // We're trying to destroy something that is not a region. This may happen
  781. // for a variety of reasons (unknown target region, concrete integer instead
  782. // of target region, etc.). The current code makes an attempt to recover.
  783. // FIXME: We probably don't really need to recover when we're dealing
  784. // with concrete integers specifically.
  785. CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
  786. if (const Expr *E = dyn_cast_or_null<Expr>(S)) {
  787. Dest = MRMgr.getCXXTempObjectRegion(E, Pred->getLocationContext());
  788. } else {
  789. static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor");
  790. NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  791. Bldr.generateSink(Pred->getLocation().withTag(&T),
  792. Pred->getState(), Pred);
  793. return;
  794. }
  795. }
  796. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  797. CallEventRef<CXXDestructorCall> Call =
  798. CEMgr.getCXXDestructorCall(DtorDecl, S, Dest, IsBaseDtor, State, LCtx);
  799. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  800. Call->getSourceRange().getBegin(),
  801. "Error evaluating destructor");
  802. ExplodedNodeSet DstPreCall;
  803. getCheckerManager().runCheckersForPreCall(DstPreCall, Pred,
  804. *Call, *this);
  805. ExplodedNodeSet DstInvalidated;
  806. StmtNodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx);
  807. for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
  808. I != E; ++I)
  809. defaultEvalCall(Bldr, *I, *Call, CallOpts);
  810. getCheckerManager().runCheckersForPostCall(Dst, DstInvalidated,
  811. *Call, *this);
  812. }
  813. void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE,
  814. ExplodedNode *Pred,
  815. ExplodedNodeSet &Dst) {
  816. ProgramStateRef State = Pred->getState();
  817. const LocationContext *LCtx = Pred->getLocationContext();
  818. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  819. CNE->getBeginLoc(),
  820. "Error evaluating New Allocator Call");
  821. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  822. CallEventRef<CXXAllocatorCall> Call =
  823. CEMgr.getCXXAllocatorCall(CNE, State, LCtx);
  824. ExplodedNodeSet DstPreCall;
  825. getCheckerManager().runCheckersForPreCall(DstPreCall, Pred,
  826. *Call, *this);
  827. ExplodedNodeSet DstPostCall;
  828. StmtNodeBuilder CallBldr(DstPreCall, DstPostCall, *currBldrCtx);
  829. for (ExplodedNode *I : DstPreCall) {
  830. // FIXME: Provide evalCall for checkers?
  831. defaultEvalCall(CallBldr, I, *Call);
  832. }
  833. // If the call is inlined, DstPostCall will be empty and we bail out now.
  834. // Store return value of operator new() for future use, until the actual
  835. // CXXNewExpr gets processed.
  836. ExplodedNodeSet DstPostValue;
  837. StmtNodeBuilder ValueBldr(DstPostCall, DstPostValue, *currBldrCtx);
  838. for (ExplodedNode *I : DstPostCall) {
  839. // FIXME: Because CNE serves as the "call site" for the allocator (due to
  840. // lack of a better expression in the AST), the conjured return value symbol
  841. // is going to be of the same type (C++ object pointer type). Technically
  842. // this is not correct because the operator new's prototype always says that
  843. // it returns a 'void *'. So we should change the type of the symbol,
  844. // and then evaluate the cast over the symbolic pointer from 'void *' to
  845. // the object pointer type. But without changing the symbol's type it
  846. // is breaking too much to evaluate the no-op symbolic cast over it, so we
  847. // skip it for now.
  848. ProgramStateRef State = I->getState();
  849. SVal RetVal = State->getSVal(CNE, LCtx);
  850. // [basic.stc.dynamic.allocation] (on the return value of an allocation
  851. // function):
  852. // "The order, contiguity, and initial value of storage allocated by
  853. // successive calls to an allocation function are unspecified."
  854. State = State->bindDefaultInitial(RetVal, UndefinedVal{}, LCtx);
  855. // If this allocation function is not declared as non-throwing, failures
  856. // /must/ be signalled by exceptions, and thus the return value will never
  857. // be NULL. -fno-exceptions does not influence this semantics.
  858. // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
  859. // where new can return NULL. If we end up supporting that option, we can
  860. // consider adding a check for it here.
  861. // C++11 [basic.stc.dynamic.allocation]p3.
  862. if (const FunctionDecl *FD = CNE->getOperatorNew()) {
  863. QualType Ty = FD->getType();
  864. if (const auto *ProtoType = Ty->getAs<FunctionProtoType>())
  865. if (!ProtoType->isNothrow())
  866. State = State->assume(RetVal.castAs<DefinedOrUnknownSVal>(), true);
  867. }
  868. ValueBldr.generateNode(
  869. CNE, I, addObjectUnderConstruction(State, CNE, LCtx, RetVal));
  870. }
  871. ExplodedNodeSet DstPostPostCallCallback;
  872. getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
  873. DstPostValue, *Call, *this);
  874. for (ExplodedNode *I : DstPostPostCallCallback) {
  875. getCheckerManager().runCheckersForNewAllocator(*Call, Dst, I, *this);
  876. }
  877. }
  878. void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred,
  879. ExplodedNodeSet &Dst) {
  880. // FIXME: Much of this should eventually migrate to CXXAllocatorCall.
  881. // Also, we need to decide how allocators actually work -- they're not
  882. // really part of the CXXNewExpr because they happen BEFORE the
  883. // CXXConstructExpr subexpression. See PR12014 for some discussion.
  884. unsigned blockCount = currBldrCtx->blockCount();
  885. const LocationContext *LCtx = Pred->getLocationContext();
  886. SVal symVal = UnknownVal();
  887. FunctionDecl *FD = CNE->getOperatorNew();
  888. bool IsStandardGlobalOpNewFunction =
  889. FD->isReplaceableGlobalAllocationFunction();
  890. ProgramStateRef State = Pred->getState();
  891. // Retrieve the stored operator new() return value.
  892. if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
  893. symVal = *getObjectUnderConstruction(State, CNE, LCtx);
  894. State = finishObjectConstruction(State, CNE, LCtx);
  895. }
  896. // We assume all standard global 'operator new' functions allocate memory in
  897. // heap. We realize this is an approximation that might not correctly model
  898. // a custom global allocator.
  899. if (symVal.isUnknown()) {
  900. if (IsStandardGlobalOpNewFunction)
  901. symVal = svalBuilder.getConjuredHeapSymbolVal(CNE, LCtx, blockCount);
  902. else
  903. symVal = svalBuilder.conjureSymbolVal(nullptr, CNE, LCtx, CNE->getType(),
  904. blockCount);
  905. }
  906. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  907. CallEventRef<CXXAllocatorCall> Call =
  908. CEMgr.getCXXAllocatorCall(CNE, State, LCtx);
  909. if (!AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
  910. // Invalidate placement args.
  911. // FIXME: Once we figure out how we want allocators to work,
  912. // we should be using the usual pre-/(default-)eval-/post-call checkers
  913. // here.
  914. State = Call->invalidateRegions(blockCount);
  915. if (!State)
  916. return;
  917. // If this allocation function is not declared as non-throwing, failures
  918. // /must/ be signalled by exceptions, and thus the return value will never
  919. // be NULL. -fno-exceptions does not influence this semantics.
  920. // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
  921. // where new can return NULL. If we end up supporting that option, we can
  922. // consider adding a check for it here.
  923. // C++11 [basic.stc.dynamic.allocation]p3.
  924. if (const auto *ProtoType = FD->getType()->getAs<FunctionProtoType>())
  925. if (!ProtoType->isNothrow())
  926. if (auto dSymVal = symVal.getAs<DefinedOrUnknownSVal>())
  927. State = State->assume(*dSymVal, true);
  928. }
  929. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  930. SVal Result = symVal;
  931. if (CNE->isArray()) {
  932. if (const auto *NewReg = cast_or_null<SubRegion>(symVal.getAsRegion())) {
  933. // If each element is initialized by their default constructor, the field
  934. // values are properly placed inside the required region, however if an
  935. // initializer list is used, this doesn't happen automatically.
  936. auto *Init = CNE->getInitializer();
  937. bool isInitList = isa_and_nonnull<InitListExpr>(Init);
  938. QualType ObjTy =
  939. isInitList ? Init->getType() : CNE->getType()->getPointeeType();
  940. const ElementRegion *EleReg =
  941. MRMgr.getElementRegion(ObjTy, svalBuilder.makeArrayIndex(0), NewReg,
  942. svalBuilder.getContext());
  943. Result = loc::MemRegionVal(EleReg);
  944. // If the array is list initialized, we bind the initializer list to the
  945. // memory region here, otherwise we would lose it.
  946. if (isInitList) {
  947. Bldr.takeNodes(Pred);
  948. Pred = Bldr.generateNode(CNE, Pred, State);
  949. SVal V = State->getSVal(Init, LCtx);
  950. ExplodedNodeSet evaluated;
  951. evalBind(evaluated, CNE, Pred, Result, V, true);
  952. Bldr.takeNodes(Pred);
  953. Bldr.addNodes(evaluated);
  954. Pred = *evaluated.begin();
  955. State = Pred->getState();
  956. }
  957. }
  958. State = State->BindExpr(CNE, Pred->getLocationContext(), Result);
  959. Bldr.generateNode(CNE, Pred, State);
  960. return;
  961. }
  962. // FIXME: Once we have proper support for CXXConstructExprs inside
  963. // CXXNewExpr, we need to make sure that the constructed object is not
  964. // immediately invalidated here. (The placement call should happen before
  965. // the constructor call anyway.)
  966. if (FD->isReservedGlobalPlacementOperator()) {
  967. // Non-array placement new should always return the placement location.
  968. SVal PlacementLoc = State->getSVal(CNE->getPlacementArg(0), LCtx);
  969. Result = svalBuilder.evalCast(PlacementLoc, CNE->getType(),
  970. CNE->getPlacementArg(0)->getType());
  971. }
  972. // Bind the address of the object, then check to see if we cached out.
  973. State = State->BindExpr(CNE, LCtx, Result);
  974. ExplodedNode *NewN = Bldr.generateNode(CNE, Pred, State);
  975. if (!NewN)
  976. return;
  977. // If the type is not a record, we won't have a CXXConstructExpr as an
  978. // initializer. Copy the value over.
  979. if (const Expr *Init = CNE->getInitializer()) {
  980. if (!isa<CXXConstructExpr>(Init)) {
  981. assert(Bldr.getResults().size() == 1);
  982. Bldr.takeNodes(NewN);
  983. evalBind(Dst, CNE, NewN, Result, State->getSVal(Init, LCtx),
  984. /*FirstInit=*/IsStandardGlobalOpNewFunction);
  985. }
  986. }
  987. }
  988. void ExprEngine::VisitCXXDeleteExpr(const CXXDeleteExpr *CDE,
  989. ExplodedNode *Pred, ExplodedNodeSet &Dst) {
  990. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  991. CallEventRef<CXXDeallocatorCall> Call = CEMgr.getCXXDeallocatorCall(
  992. CDE, Pred->getState(), Pred->getLocationContext());
  993. ExplodedNodeSet DstPreCall;
  994. getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, *Call, *this);
  995. ExplodedNodeSet DstPostCall;
  996. if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
  997. StmtNodeBuilder Bldr(DstPreCall, DstPostCall, *currBldrCtx);
  998. for (ExplodedNode *I : DstPreCall) {
  999. defaultEvalCall(Bldr, I, *Call);
  1000. }
  1001. } else {
  1002. DstPostCall = DstPreCall;
  1003. }
  1004. getCheckerManager().runCheckersForPostCall(Dst, DstPostCall, *Call, *this);
  1005. }
  1006. void ExprEngine::VisitCXXCatchStmt(const CXXCatchStmt *CS, ExplodedNode *Pred,
  1007. ExplodedNodeSet &Dst) {
  1008. const VarDecl *VD = CS->getExceptionDecl();
  1009. if (!VD) {
  1010. Dst.Add(Pred);
  1011. return;
  1012. }
  1013. const LocationContext *LCtx = Pred->getLocationContext();
  1014. SVal V = svalBuilder.conjureSymbolVal(CS, LCtx, VD->getType(),
  1015. currBldrCtx->blockCount());
  1016. ProgramStateRef state = Pred->getState();
  1017. state = state->bindLoc(state->getLValue(VD, LCtx), V, LCtx);
  1018. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  1019. Bldr.generateNode(CS, Pred, state);
  1020. }
  1021. void ExprEngine::VisitCXXThisExpr(const CXXThisExpr *TE, ExplodedNode *Pred,
  1022. ExplodedNodeSet &Dst) {
  1023. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  1024. // Get the this object region from StoreManager.
  1025. const LocationContext *LCtx = Pred->getLocationContext();
  1026. const MemRegion *R =
  1027. svalBuilder.getRegionManager().getCXXThisRegion(
  1028. getContext().getCanonicalType(TE->getType()),
  1029. LCtx);
  1030. ProgramStateRef state = Pred->getState();
  1031. SVal V = state->getSVal(loc::MemRegionVal(R));
  1032. Bldr.generateNode(TE, Pred, state->BindExpr(TE, LCtx, V));
  1033. }
  1034. void ExprEngine::VisitLambdaExpr(const LambdaExpr *LE, ExplodedNode *Pred,
  1035. ExplodedNodeSet &Dst) {
  1036. const LocationContext *LocCtxt = Pred->getLocationContext();
  1037. // Get the region of the lambda itself.
  1038. const MemRegion *R = svalBuilder.getRegionManager().getCXXTempObjectRegion(
  1039. LE, LocCtxt);
  1040. SVal V = loc::MemRegionVal(R);
  1041. ProgramStateRef State = Pred->getState();
  1042. // If we created a new MemRegion for the lambda, we should explicitly bind
  1043. // the captures.
  1044. unsigned Idx = 0;
  1045. CXXRecordDecl::field_iterator CurField = LE->getLambdaClass()->field_begin();
  1046. for (LambdaExpr::const_capture_init_iterator i = LE->capture_init_begin(),
  1047. e = LE->capture_init_end();
  1048. i != e; ++i, ++CurField, ++Idx) {
  1049. FieldDecl *FieldForCapture = *CurField;
  1050. SVal FieldLoc = State->getLValue(FieldForCapture, V);
  1051. SVal InitVal;
  1052. if (!FieldForCapture->hasCapturedVLAType()) {
  1053. const Expr *InitExpr = *i;
  1054. assert(InitExpr && "Capture missing initialization expression");
  1055. // Capturing a 0 length array is a no-op, so we ignore it to get a more
  1056. // accurate analysis. If it's not ignored, it would set the default
  1057. // binding of the lambda to 'Unknown', which can lead to falsely detecting
  1058. // 'Uninitialized' values as 'Unknown' and not reporting a warning.
  1059. const auto FTy = FieldForCapture->getType();
  1060. if (FTy->isConstantArrayType() &&
  1061. getContext().getConstantArrayElementCount(
  1062. getContext().getAsConstantArrayType(FTy)) == 0)
  1063. continue;
  1064. // With C++17 copy elision the InitExpr can be anything, so instead of
  1065. // pattern matching all cases, we simple check if the current field is
  1066. // under construction or not, regardless what it's InitExpr is.
  1067. if (const auto OUC =
  1068. getObjectUnderConstruction(State, {LE, Idx}, LocCtxt)) {
  1069. InitVal = State->getSVal(OUC->getAsRegion());
  1070. State = finishObjectConstruction(State, {LE, Idx}, LocCtxt);
  1071. } else
  1072. InitVal = State->getSVal(InitExpr, LocCtxt);
  1073. } else {
  1074. assert(!getObjectUnderConstruction(State, {LE, Idx}, LocCtxt) &&
  1075. "VLA capture by value is a compile time error!");
  1076. // The field stores the length of a captured variable-length array.
  1077. // These captures don't have initialization expressions; instead we
  1078. // get the length from the VLAType size expression.
  1079. Expr *SizeExpr = FieldForCapture->getCapturedVLAType()->getSizeExpr();
  1080. InitVal = State->getSVal(SizeExpr, LocCtxt);
  1081. }
  1082. State = State->bindLoc(FieldLoc, InitVal, LocCtxt);
  1083. }
  1084. // Decay the Loc into an RValue, because there might be a
  1085. // MaterializeTemporaryExpr node above this one which expects the bound value
  1086. // to be an RValue.
  1087. SVal LambdaRVal = State->getSVal(R);
  1088. ExplodedNodeSet Tmp;
  1089. StmtNodeBuilder Bldr(Pred, Tmp, *currBldrCtx);
  1090. // FIXME: is this the right program point kind?
  1091. Bldr.generateNode(LE, Pred,
  1092. State->BindExpr(LE, LocCtxt, LambdaRVal),
  1093. nullptr, ProgramPoint::PostLValueKind);
  1094. // FIXME: Move all post/pre visits to ::Visit().
  1095. getCheckerManager().runCheckersForPostStmt(Dst, Tmp, LE, *this);
  1096. }