UninitializedValues.cpp 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973
  1. //===- UninitializedValues.cpp - Find Uninitialized Values ----------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file implements uninitialized values analysis for source-level CFGs.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "clang/Analysis/Analyses/UninitializedValues.h"
  13. #include "clang/AST/Attr.h"
  14. #include "clang/AST/Decl.h"
  15. #include "clang/AST/DeclBase.h"
  16. #include "clang/AST/Expr.h"
  17. #include "clang/AST/OperationKinds.h"
  18. #include "clang/AST/Stmt.h"
  19. #include "clang/AST/StmtObjC.h"
  20. #include "clang/AST/StmtVisitor.h"
  21. #include "clang/AST/Type.h"
  22. #include "clang/Analysis/Analyses/PostOrderCFGView.h"
  23. #include "clang/Analysis/AnalysisDeclContext.h"
  24. #include "clang/Analysis/CFG.h"
  25. #include "clang/Analysis/DomainSpecific/ObjCNoReturn.h"
  26. #include "clang/Analysis/FlowSensitive/DataflowWorklist.h"
  27. #include "clang/Basic/LLVM.h"
  28. #include "llvm/ADT/BitVector.h"
  29. #include "llvm/ADT/DenseMap.h"
  30. #include "llvm/ADT/None.h"
  31. #include "llvm/ADT/Optional.h"
  32. #include "llvm/ADT/PackedVector.h"
  33. #include "llvm/ADT/SmallBitVector.h"
  34. #include "llvm/ADT/SmallVector.h"
  35. #include "llvm/Support/Casting.h"
  36. #include <algorithm>
  37. #include <cassert>
  38. using namespace clang;
  39. #define DEBUG_LOGGING 0
  40. static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
  41. if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
  42. !vd->isExceptionVariable() && !vd->isInitCapture() &&
  43. !vd->isImplicit() && vd->getDeclContext() == dc) {
  44. QualType ty = vd->getType();
  45. return ty->isScalarType() || ty->isVectorType() || ty->isRecordType();
  46. }
  47. return false;
  48. }
  49. //------------------------------------------------------------------------====//
  50. // DeclToIndex: a mapping from Decls we track to value indices.
  51. //====------------------------------------------------------------------------//
  52. namespace {
  53. class DeclToIndex {
  54. llvm::DenseMap<const VarDecl *, unsigned> map;
  55. public:
  56. DeclToIndex() = default;
  57. /// Compute the actual mapping from declarations to bits.
  58. void computeMap(const DeclContext &dc);
  59. /// Return the number of declarations in the map.
  60. unsigned size() const { return map.size(); }
  61. /// Returns the bit vector index for a given declaration.
  62. Optional<unsigned> getValueIndex(const VarDecl *d) const;
  63. };
  64. } // namespace
  65. void DeclToIndex::computeMap(const DeclContext &dc) {
  66. unsigned count = 0;
  67. DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
  68. E(dc.decls_end());
  69. for ( ; I != E; ++I) {
  70. const VarDecl *vd = *I;
  71. if (isTrackedVar(vd, &dc))
  72. map[vd] = count++;
  73. }
  74. }
  75. Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
  76. llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
  77. if (I == map.end())
  78. return None;
  79. return I->second;
  80. }
  81. //------------------------------------------------------------------------====//
  82. // CFGBlockValues: dataflow values for CFG blocks.
  83. //====------------------------------------------------------------------------//
  84. // These values are defined in such a way that a merge can be done using
  85. // a bitwise OR.
  86. enum Value { Unknown = 0x0, /* 00 */
  87. Initialized = 0x1, /* 01 */
  88. Uninitialized = 0x2, /* 10 */
  89. MayUninitialized = 0x3 /* 11 */ };
  90. static bool isUninitialized(const Value v) {
  91. return v >= Uninitialized;
  92. }
  93. static bool isAlwaysUninit(const Value v) {
  94. return v == Uninitialized;
  95. }
  96. namespace {
  97. using ValueVector = llvm::PackedVector<Value, 2, llvm::SmallBitVector>;
  98. class CFGBlockValues {
  99. const CFG &cfg;
  100. SmallVector<ValueVector, 8> vals;
  101. ValueVector scratch;
  102. DeclToIndex declToIndex;
  103. public:
  104. CFGBlockValues(const CFG &cfg);
  105. unsigned getNumEntries() const { return declToIndex.size(); }
  106. void computeSetOfDeclarations(const DeclContext &dc);
  107. ValueVector &getValueVector(const CFGBlock *block) {
  108. return vals[block->getBlockID()];
  109. }
  110. void setAllScratchValues(Value V);
  111. void mergeIntoScratch(ValueVector const &source, bool isFirst);
  112. bool updateValueVectorWithScratch(const CFGBlock *block);
  113. bool hasNoDeclarations() const {
  114. return declToIndex.size() == 0;
  115. }
  116. void resetScratch();
  117. ValueVector::reference operator[](const VarDecl *vd);
  118. Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
  119. const VarDecl *vd) {
  120. const Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
  121. assert(idx.hasValue());
  122. return getValueVector(block)[idx.getValue()];
  123. }
  124. };
  125. } // namespace
  126. CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {}
  127. void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
  128. declToIndex.computeMap(dc);
  129. unsigned decls = declToIndex.size();
  130. scratch.resize(decls);
  131. unsigned n = cfg.getNumBlockIDs();
  132. if (!n)
  133. return;
  134. vals.resize(n);
  135. for (auto &val : vals)
  136. val.resize(decls);
  137. }
  138. #if DEBUG_LOGGING
  139. static void printVector(const CFGBlock *block, ValueVector &bv,
  140. unsigned num) {
  141. llvm::errs() << block->getBlockID() << " :";
  142. for (const auto &i : bv)
  143. llvm::errs() << ' ' << i;
  144. llvm::errs() << " : " << num << '\n';
  145. }
  146. #endif
  147. void CFGBlockValues::setAllScratchValues(Value V) {
  148. for (unsigned I = 0, E = scratch.size(); I != E; ++I)
  149. scratch[I] = V;
  150. }
  151. void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
  152. bool isFirst) {
  153. if (isFirst)
  154. scratch = source;
  155. else
  156. scratch |= source;
  157. }
  158. bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
  159. ValueVector &dst = getValueVector(block);
  160. bool changed = (dst != scratch);
  161. if (changed)
  162. dst = scratch;
  163. #if DEBUG_LOGGING
  164. printVector(block, scratch, 0);
  165. #endif
  166. return changed;
  167. }
  168. void CFGBlockValues::resetScratch() {
  169. scratch.reset();
  170. }
  171. ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
  172. const Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
  173. assert(idx.hasValue());
  174. return scratch[idx.getValue()];
  175. }
  176. //------------------------------------------------------------------------====//
  177. // Classification of DeclRefExprs as use or initialization.
  178. //====------------------------------------------------------------------------//
  179. namespace {
  180. class FindVarResult {
  181. const VarDecl *vd;
  182. const DeclRefExpr *dr;
  183. public:
  184. FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {}
  185. const DeclRefExpr *getDeclRefExpr() const { return dr; }
  186. const VarDecl *getDecl() const { return vd; }
  187. };
  188. } // namespace
  189. static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
  190. while (Ex) {
  191. Ex = Ex->IgnoreParenNoopCasts(C);
  192. if (const auto *CE = dyn_cast<CastExpr>(Ex)) {
  193. if (CE->getCastKind() == CK_LValueBitCast) {
  194. Ex = CE->getSubExpr();
  195. continue;
  196. }
  197. }
  198. break;
  199. }
  200. return Ex;
  201. }
  202. /// If E is an expression comprising a reference to a single variable, find that
  203. /// variable.
  204. static FindVarResult findVar(const Expr *E, const DeclContext *DC) {
  205. if (const auto *DRE =
  206. dyn_cast<DeclRefExpr>(stripCasts(DC->getParentASTContext(), E)))
  207. if (const auto *VD = dyn_cast<VarDecl>(DRE->getDecl()))
  208. if (isTrackedVar(VD, DC))
  209. return FindVarResult(VD, DRE);
  210. return FindVarResult(nullptr, nullptr);
  211. }
  212. namespace {
  213. /// Classify each DeclRefExpr as an initialization or a use. Any
  214. /// DeclRefExpr which isn't explicitly classified will be assumed to have
  215. /// escaped the analysis and will be treated as an initialization.
  216. class ClassifyRefs : public StmtVisitor<ClassifyRefs> {
  217. public:
  218. enum Class {
  219. Init,
  220. Use,
  221. SelfInit,
  222. ConstRefUse,
  223. Ignore
  224. };
  225. private:
  226. const DeclContext *DC;
  227. llvm::DenseMap<const DeclRefExpr *, Class> Classification;
  228. bool isTrackedVar(const VarDecl *VD) const {
  229. return ::isTrackedVar(VD, DC);
  230. }
  231. void classify(const Expr *E, Class C);
  232. public:
  233. ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(AC.getDecl())) {}
  234. void VisitDeclStmt(DeclStmt *DS);
  235. void VisitUnaryOperator(UnaryOperator *UO);
  236. void VisitBinaryOperator(BinaryOperator *BO);
  237. void VisitCallExpr(CallExpr *CE);
  238. void VisitCastExpr(CastExpr *CE);
  239. void VisitOMPExecutableDirective(OMPExecutableDirective *ED);
  240. void operator()(Stmt *S) { Visit(S); }
  241. Class get(const DeclRefExpr *DRE) const {
  242. llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
  243. = Classification.find(DRE);
  244. if (I != Classification.end())
  245. return I->second;
  246. const auto *VD = dyn_cast<VarDecl>(DRE->getDecl());
  247. if (!VD || !isTrackedVar(VD))
  248. return Ignore;
  249. return Init;
  250. }
  251. };
  252. } // namespace
  253. static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) {
  254. if (VD->getType()->isRecordType())
  255. return nullptr;
  256. if (Expr *Init = VD->getInit()) {
  257. const auto *DRE =
  258. dyn_cast<DeclRefExpr>(stripCasts(VD->getASTContext(), Init));
  259. if (DRE && DRE->getDecl() == VD)
  260. return DRE;
  261. }
  262. return nullptr;
  263. }
  264. void ClassifyRefs::classify(const Expr *E, Class C) {
  265. // The result of a ?: could also be an lvalue.
  266. E = E->IgnoreParens();
  267. if (const auto *CO = dyn_cast<ConditionalOperator>(E)) {
  268. classify(CO->getTrueExpr(), C);
  269. classify(CO->getFalseExpr(), C);
  270. return;
  271. }
  272. if (const auto *BCO = dyn_cast<BinaryConditionalOperator>(E)) {
  273. classify(BCO->getFalseExpr(), C);
  274. return;
  275. }
  276. if (const auto *OVE = dyn_cast<OpaqueValueExpr>(E)) {
  277. classify(OVE->getSourceExpr(), C);
  278. return;
  279. }
  280. if (const auto *ME = dyn_cast<MemberExpr>(E)) {
  281. if (const auto *VD = dyn_cast<VarDecl>(ME->getMemberDecl())) {
  282. if (!VD->isStaticDataMember())
  283. classify(ME->getBase(), C);
  284. }
  285. return;
  286. }
  287. if (const auto *BO = dyn_cast<BinaryOperator>(E)) {
  288. switch (BO->getOpcode()) {
  289. case BO_PtrMemD:
  290. case BO_PtrMemI:
  291. classify(BO->getLHS(), C);
  292. return;
  293. case BO_Comma:
  294. classify(BO->getRHS(), C);
  295. return;
  296. default:
  297. return;
  298. }
  299. }
  300. FindVarResult Var = findVar(E, DC);
  301. if (const DeclRefExpr *DRE = Var.getDeclRefExpr())
  302. Classification[DRE] = std::max(Classification[DRE], C);
  303. }
  304. void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) {
  305. for (auto *DI : DS->decls()) {
  306. auto *VD = dyn_cast<VarDecl>(DI);
  307. if (VD && isTrackedVar(VD))
  308. if (const DeclRefExpr *DRE = getSelfInitExpr(VD))
  309. Classification[DRE] = SelfInit;
  310. }
  311. }
  312. void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) {
  313. // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this
  314. // is not a compound-assignment, we will treat it as initializing the variable
  315. // when TransferFunctions visits it. A compound-assignment does not affect
  316. // whether a variable is uninitialized, and there's no point counting it as a
  317. // use.
  318. if (BO->isCompoundAssignmentOp())
  319. classify(BO->getLHS(), Use);
  320. else if (BO->getOpcode() == BO_Assign || BO->getOpcode() == BO_Comma)
  321. classify(BO->getLHS(), Ignore);
  322. }
  323. void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) {
  324. // Increment and decrement are uses despite there being no lvalue-to-rvalue
  325. // conversion.
  326. if (UO->isIncrementDecrementOp())
  327. classify(UO->getSubExpr(), Use);
  328. }
  329. void ClassifyRefs::VisitOMPExecutableDirective(OMPExecutableDirective *ED) {
  330. for (Stmt *S : OMPExecutableDirective::used_clauses_children(ED->clauses()))
  331. classify(cast<Expr>(S), Use);
  332. }
  333. static bool isPointerToConst(const QualType &QT) {
  334. return QT->isAnyPointerType() && QT->getPointeeType().isConstQualified();
  335. }
  336. static bool hasTrivialBody(CallExpr *CE) {
  337. if (FunctionDecl *FD = CE->getDirectCallee()) {
  338. if (FunctionTemplateDecl *FTD = FD->getPrimaryTemplate())
  339. return FTD->getTemplatedDecl()->hasTrivialBody();
  340. return FD->hasTrivialBody();
  341. }
  342. return false;
  343. }
  344. void ClassifyRefs::VisitCallExpr(CallExpr *CE) {
  345. // Classify arguments to std::move as used.
  346. if (CE->isCallToStdMove()) {
  347. // RecordTypes are handled in SemaDeclCXX.cpp.
  348. if (!CE->getArg(0)->getType()->isRecordType())
  349. classify(CE->getArg(0), Use);
  350. return;
  351. }
  352. bool isTrivialBody = hasTrivialBody(CE);
  353. // If a value is passed by const pointer to a function,
  354. // we should not assume that it is initialized by the call, and we
  355. // conservatively do not assume that it is used.
  356. // If a value is passed by const reference to a function,
  357. // it should already be initialized.
  358. for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end();
  359. I != E; ++I) {
  360. if ((*I)->isGLValue()) {
  361. if ((*I)->getType().isConstQualified())
  362. classify((*I), isTrivialBody ? Ignore : ConstRefUse);
  363. } else if (isPointerToConst((*I)->getType())) {
  364. const Expr *Ex = stripCasts(DC->getParentASTContext(), *I);
  365. const auto *UO = dyn_cast<UnaryOperator>(Ex);
  366. if (UO && UO->getOpcode() == UO_AddrOf)
  367. Ex = UO->getSubExpr();
  368. classify(Ex, Ignore);
  369. }
  370. }
  371. }
  372. void ClassifyRefs::VisitCastExpr(CastExpr *CE) {
  373. if (CE->getCastKind() == CK_LValueToRValue)
  374. classify(CE->getSubExpr(), Use);
  375. else if (const auto *CSE = dyn_cast<CStyleCastExpr>(CE)) {
  376. if (CSE->getType()->isVoidType()) {
  377. // Squelch any detected load of an uninitialized value if
  378. // we cast it to void.
  379. // e.g. (void) x;
  380. classify(CSE->getSubExpr(), Ignore);
  381. }
  382. }
  383. }
  384. //------------------------------------------------------------------------====//
  385. // Transfer function for uninitialized values analysis.
  386. //====------------------------------------------------------------------------//
  387. namespace {
  388. class TransferFunctions : public StmtVisitor<TransferFunctions> {
  389. CFGBlockValues &vals;
  390. const CFG &cfg;
  391. const CFGBlock *block;
  392. AnalysisDeclContext &ac;
  393. const ClassifyRefs &classification;
  394. ObjCNoReturn objCNoRet;
  395. UninitVariablesHandler &handler;
  396. public:
  397. TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
  398. const CFGBlock *block, AnalysisDeclContext &ac,
  399. const ClassifyRefs &classification,
  400. UninitVariablesHandler &handler)
  401. : vals(vals), cfg(cfg), block(block), ac(ac),
  402. classification(classification), objCNoRet(ac.getASTContext()),
  403. handler(handler) {}
  404. void reportUse(const Expr *ex, const VarDecl *vd);
  405. void reportConstRefUse(const Expr *ex, const VarDecl *vd);
  406. void VisitBinaryOperator(BinaryOperator *bo);
  407. void VisitBlockExpr(BlockExpr *be);
  408. void VisitCallExpr(CallExpr *ce);
  409. void VisitDeclRefExpr(DeclRefExpr *dr);
  410. void VisitDeclStmt(DeclStmt *ds);
  411. void VisitGCCAsmStmt(GCCAsmStmt *as);
  412. void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS);
  413. void VisitObjCMessageExpr(ObjCMessageExpr *ME);
  414. void VisitOMPExecutableDirective(OMPExecutableDirective *ED);
  415. bool isTrackedVar(const VarDecl *vd) {
  416. return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
  417. }
  418. FindVarResult findVar(const Expr *ex) {
  419. return ::findVar(ex, cast<DeclContext>(ac.getDecl()));
  420. }
  421. UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
  422. UninitUse Use(ex, isAlwaysUninit(v));
  423. assert(isUninitialized(v));
  424. if (Use.getKind() == UninitUse::Always)
  425. return Use;
  426. // If an edge which leads unconditionally to this use did not initialize
  427. // the variable, we can say something stronger than 'may be uninitialized':
  428. // we can say 'either it's used uninitialized or you have dead code'.
  429. //
  430. // We track the number of successors of a node which have been visited, and
  431. // visit a node once we have visited all of its successors. Only edges where
  432. // the variable might still be uninitialized are followed. Since a variable
  433. // can't transfer from being initialized to being uninitialized, this will
  434. // trace out the subgraph which inevitably leads to the use and does not
  435. // initialize the variable. We do not want to skip past loops, since their
  436. // non-termination might be correlated with the initialization condition.
  437. //
  438. // For example:
  439. //
  440. // void f(bool a, bool b) {
  441. // block1: int n;
  442. // if (a) {
  443. // block2: if (b)
  444. // block3: n = 1;
  445. // block4: } else if (b) {
  446. // block5: while (!a) {
  447. // block6: do_work(&a);
  448. // n = 2;
  449. // }
  450. // }
  451. // block7: if (a)
  452. // block8: g();
  453. // block9: return n;
  454. // }
  455. //
  456. // Starting from the maybe-uninitialized use in block 9:
  457. // * Block 7 is not visited because we have only visited one of its two
  458. // successors.
  459. // * Block 8 is visited because we've visited its only successor.
  460. // From block 8:
  461. // * Block 7 is visited because we've now visited both of its successors.
  462. // From block 7:
  463. // * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
  464. // of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
  465. // * Block 3 is not visited because it initializes 'n'.
  466. // Now the algorithm terminates, having visited blocks 7 and 8, and having
  467. // found the frontier is blocks 2, 4, and 5.
  468. //
  469. // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
  470. // and 4), so we report that any time either of those edges is taken (in
  471. // each case when 'b == false'), 'n' is used uninitialized.
  472. SmallVector<const CFGBlock*, 32> Queue;
  473. SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
  474. Queue.push_back(block);
  475. // Specify that we've already visited all successors of the starting block.
  476. // This has the dual purpose of ensuring we never add it to the queue, and
  477. // of marking it as not being a candidate element of the frontier.
  478. SuccsVisited[block->getBlockID()] = block->succ_size();
  479. while (!Queue.empty()) {
  480. const CFGBlock *B = Queue.pop_back_val();
  481. // If the use is always reached from the entry block, make a note of that.
  482. if (B == &cfg.getEntry())
  483. Use.setUninitAfterCall();
  484. for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
  485. I != E; ++I) {
  486. const CFGBlock *Pred = *I;
  487. if (!Pred)
  488. continue;
  489. Value AtPredExit = vals.getValue(Pred, B, vd);
  490. if (AtPredExit == Initialized)
  491. // This block initializes the variable.
  492. continue;
  493. if (AtPredExit == MayUninitialized &&
  494. vals.getValue(B, nullptr, vd) == Uninitialized) {
  495. // This block declares the variable (uninitialized), and is reachable
  496. // from a block that initializes the variable. We can't guarantee to
  497. // give an earlier location for the diagnostic (and it appears that
  498. // this code is intended to be reachable) so give a diagnostic here
  499. // and go no further down this path.
  500. Use.setUninitAfterDecl();
  501. continue;
  502. }
  503. if (AtPredExit == MayUninitialized) {
  504. // If the predecessor's terminator is an "asm goto" that initializes
  505. // the variable, then don't count it as "initialized" on the indirect
  506. // paths.
  507. CFGTerminator term = Pred->getTerminator();
  508. if (const auto *as = dyn_cast_or_null<GCCAsmStmt>(term.getStmt())) {
  509. const CFGBlock *fallthrough = *Pred->succ_begin();
  510. if (as->isAsmGoto() &&
  511. llvm::any_of(as->outputs(), [&](const Expr *output) {
  512. return vd == findVar(output).getDecl() &&
  513. llvm::any_of(as->labels(),
  514. [&](const AddrLabelExpr *label) {
  515. return label->getLabel()->getStmt() == B->Label &&
  516. B != fallthrough;
  517. });
  518. })) {
  519. Use.setUninitAfterDecl();
  520. continue;
  521. }
  522. }
  523. }
  524. unsigned &SV = SuccsVisited[Pred->getBlockID()];
  525. if (!SV) {
  526. // When visiting the first successor of a block, mark all NULL
  527. // successors as having been visited.
  528. for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(),
  529. SE = Pred->succ_end();
  530. SI != SE; ++SI)
  531. if (!*SI)
  532. ++SV;
  533. }
  534. if (++SV == Pred->succ_size())
  535. // All paths from this block lead to the use and don't initialize the
  536. // variable.
  537. Queue.push_back(Pred);
  538. }
  539. }
  540. // Scan the frontier, looking for blocks where the variable was
  541. // uninitialized.
  542. for (const auto *Block : cfg) {
  543. unsigned BlockID = Block->getBlockID();
  544. const Stmt *Term = Block->getTerminatorStmt();
  545. if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
  546. Term) {
  547. // This block inevitably leads to the use. If we have an edge from here
  548. // to a post-dominator block, and the variable is uninitialized on that
  549. // edge, we have found a bug.
  550. for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
  551. E = Block->succ_end(); I != E; ++I) {
  552. const CFGBlock *Succ = *I;
  553. if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
  554. vals.getValue(Block, Succ, vd) == Uninitialized) {
  555. // Switch cases are a special case: report the label to the caller
  556. // as the 'terminator', not the switch statement itself. Suppress
  557. // situations where no label matched: we can't be sure that's
  558. // possible.
  559. if (isa<SwitchStmt>(Term)) {
  560. const Stmt *Label = Succ->getLabel();
  561. if (!Label || !isa<SwitchCase>(Label))
  562. // Might not be possible.
  563. continue;
  564. UninitUse::Branch Branch;
  565. Branch.Terminator = Label;
  566. Branch.Output = 0; // Ignored.
  567. Use.addUninitBranch(Branch);
  568. } else {
  569. UninitUse::Branch Branch;
  570. Branch.Terminator = Term;
  571. Branch.Output = I - Block->succ_begin();
  572. Use.addUninitBranch(Branch);
  573. }
  574. }
  575. }
  576. }
  577. }
  578. return Use;
  579. }
  580. };
  581. } // namespace
  582. void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
  583. Value v = vals[vd];
  584. if (isUninitialized(v))
  585. handler.handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
  586. }
  587. void TransferFunctions::reportConstRefUse(const Expr *ex, const VarDecl *vd) {
  588. Value v = vals[vd];
  589. if (isAlwaysUninit(v))
  590. handler.handleConstRefUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
  591. }
  592. void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) {
  593. // This represents an initialization of the 'element' value.
  594. if (const auto *DS = dyn_cast<DeclStmt>(FS->getElement())) {
  595. const auto *VD = cast<VarDecl>(DS->getSingleDecl());
  596. if (isTrackedVar(VD))
  597. vals[VD] = Initialized;
  598. }
  599. }
  600. void TransferFunctions::VisitOMPExecutableDirective(
  601. OMPExecutableDirective *ED) {
  602. for (Stmt *S : OMPExecutableDirective::used_clauses_children(ED->clauses())) {
  603. assert(S && "Expected non-null used-in-clause child.");
  604. Visit(S);
  605. }
  606. if (!ED->isStandaloneDirective())
  607. Visit(ED->getStructuredBlock());
  608. }
  609. void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
  610. const BlockDecl *bd = be->getBlockDecl();
  611. for (const auto &I : bd->captures()) {
  612. const VarDecl *vd = I.getVariable();
  613. if (!isTrackedVar(vd))
  614. continue;
  615. if (I.isByRef()) {
  616. vals[vd] = Initialized;
  617. continue;
  618. }
  619. reportUse(be, vd);
  620. }
  621. }
  622. void TransferFunctions::VisitCallExpr(CallExpr *ce) {
  623. if (Decl *Callee = ce->getCalleeDecl()) {
  624. if (Callee->hasAttr<ReturnsTwiceAttr>()) {
  625. // After a call to a function like setjmp or vfork, any variable which is
  626. // initialized anywhere within this function may now be initialized. For
  627. // now, just assume such a call initializes all variables. FIXME: Only
  628. // mark variables as initialized if they have an initializer which is
  629. // reachable from here.
  630. vals.setAllScratchValues(Initialized);
  631. }
  632. else if (Callee->hasAttr<AnalyzerNoReturnAttr>()) {
  633. // Functions labeled like "analyzer_noreturn" are often used to denote
  634. // "panic" functions that in special debug situations can still return,
  635. // but for the most part should not be treated as returning. This is a
  636. // useful annotation borrowed from the static analyzer that is useful for
  637. // suppressing branch-specific false positives when we call one of these
  638. // functions but keep pretending the path continues (when in reality the
  639. // user doesn't care).
  640. vals.setAllScratchValues(Unknown);
  641. }
  642. }
  643. }
  644. void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
  645. switch (classification.get(dr)) {
  646. case ClassifyRefs::Ignore:
  647. break;
  648. case ClassifyRefs::Use:
  649. reportUse(dr, cast<VarDecl>(dr->getDecl()));
  650. break;
  651. case ClassifyRefs::Init:
  652. vals[cast<VarDecl>(dr->getDecl())] = Initialized;
  653. break;
  654. case ClassifyRefs::SelfInit:
  655. handler.handleSelfInit(cast<VarDecl>(dr->getDecl()));
  656. break;
  657. case ClassifyRefs::ConstRefUse:
  658. reportConstRefUse(dr, cast<VarDecl>(dr->getDecl()));
  659. break;
  660. }
  661. }
  662. void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) {
  663. if (BO->getOpcode() == BO_Assign) {
  664. FindVarResult Var = findVar(BO->getLHS());
  665. if (const VarDecl *VD = Var.getDecl())
  666. vals[VD] = Initialized;
  667. }
  668. }
  669. void TransferFunctions::VisitDeclStmt(DeclStmt *DS) {
  670. for (auto *DI : DS->decls()) {
  671. auto *VD = dyn_cast<VarDecl>(DI);
  672. if (VD && isTrackedVar(VD)) {
  673. if (getSelfInitExpr(VD)) {
  674. // If the initializer consists solely of a reference to itself, we
  675. // explicitly mark the variable as uninitialized. This allows code
  676. // like the following:
  677. //
  678. // int x = x;
  679. //
  680. // to deliberately leave a variable uninitialized. Different analysis
  681. // clients can detect this pattern and adjust their reporting
  682. // appropriately, but we need to continue to analyze subsequent uses
  683. // of the variable.
  684. vals[VD] = Uninitialized;
  685. } else if (VD->getInit()) {
  686. // Treat the new variable as initialized.
  687. vals[VD] = Initialized;
  688. } else {
  689. // No initializer: the variable is now uninitialized. This matters
  690. // for cases like:
  691. // while (...) {
  692. // int n;
  693. // use(n);
  694. // n = 0;
  695. // }
  696. // FIXME: Mark the variable as uninitialized whenever its scope is
  697. // left, since its scope could be re-entered by a jump over the
  698. // declaration.
  699. vals[VD] = Uninitialized;
  700. }
  701. }
  702. }
  703. }
  704. void TransferFunctions::VisitGCCAsmStmt(GCCAsmStmt *as) {
  705. // An "asm goto" statement is a terminator that may initialize some variables.
  706. if (!as->isAsmGoto())
  707. return;
  708. ASTContext &C = ac.getASTContext();
  709. for (const Expr *O : as->outputs()) {
  710. const Expr *Ex = stripCasts(C, O);
  711. // Strip away any unary operators. Invalid l-values are reported by other
  712. // semantic analysis passes.
  713. while (const auto *UO = dyn_cast<UnaryOperator>(Ex))
  714. Ex = stripCasts(C, UO->getSubExpr());
  715. // Mark the variable as potentially uninitialized for those cases where
  716. // it's used on an indirect path, where it's not guaranteed to be
  717. // defined.
  718. if (const VarDecl *VD = findVar(Ex).getDecl())
  719. vals[VD] = MayUninitialized;
  720. }
  721. }
  722. void TransferFunctions::VisitObjCMessageExpr(ObjCMessageExpr *ME) {
  723. // If the Objective-C message expression is an implicit no-return that
  724. // is not modeled in the CFG, set the tracked dataflow values to Unknown.
  725. if (objCNoRet.isImplicitNoReturn(ME)) {
  726. vals.setAllScratchValues(Unknown);
  727. }
  728. }
  729. //------------------------------------------------------------------------====//
  730. // High-level "driver" logic for uninitialized values analysis.
  731. //====------------------------------------------------------------------------//
  732. static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
  733. AnalysisDeclContext &ac, CFGBlockValues &vals,
  734. const ClassifyRefs &classification,
  735. llvm::BitVector &wasAnalyzed,
  736. UninitVariablesHandler &handler) {
  737. wasAnalyzed[block->getBlockID()] = true;
  738. vals.resetScratch();
  739. // Merge in values of predecessor blocks.
  740. bool isFirst = true;
  741. for (CFGBlock::const_pred_iterator I = block->pred_begin(),
  742. E = block->pred_end(); I != E; ++I) {
  743. const CFGBlock *pred = *I;
  744. if (!pred)
  745. continue;
  746. if (wasAnalyzed[pred->getBlockID()]) {
  747. vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
  748. isFirst = false;
  749. }
  750. }
  751. // Apply the transfer function.
  752. TransferFunctions tf(vals, cfg, block, ac, classification, handler);
  753. for (const auto &I : *block) {
  754. if (Optional<CFGStmt> cs = I.getAs<CFGStmt>())
  755. tf.Visit(const_cast<Stmt *>(cs->getStmt()));
  756. }
  757. CFGTerminator terminator = block->getTerminator();
  758. if (auto *as = dyn_cast_or_null<GCCAsmStmt>(terminator.getStmt()))
  759. if (as->isAsmGoto())
  760. tf.Visit(as);
  761. return vals.updateValueVectorWithScratch(block);
  762. }
  763. namespace {
  764. /// PruneBlocksHandler is a special UninitVariablesHandler that is used
  765. /// to detect when a CFGBlock has any *potential* use of an uninitialized
  766. /// variable. It is mainly used to prune out work during the final
  767. /// reporting pass.
  768. struct PruneBlocksHandler : public UninitVariablesHandler {
  769. /// Records if a CFGBlock had a potential use of an uninitialized variable.
  770. llvm::BitVector hadUse;
  771. /// Records if any CFGBlock had a potential use of an uninitialized variable.
  772. bool hadAnyUse = false;
  773. /// The current block to scribble use information.
  774. unsigned currentBlock = 0;
  775. PruneBlocksHandler(unsigned numBlocks) : hadUse(numBlocks, false) {}
  776. ~PruneBlocksHandler() override = default;
  777. void handleUseOfUninitVariable(const VarDecl *vd,
  778. const UninitUse &use) override {
  779. hadUse[currentBlock] = true;
  780. hadAnyUse = true;
  781. }
  782. void handleConstRefUseOfUninitVariable(const VarDecl *vd,
  783. const UninitUse &use) override {
  784. hadUse[currentBlock] = true;
  785. hadAnyUse = true;
  786. }
  787. /// Called when the uninitialized variable analysis detects the
  788. /// idiom 'int x = x'. All other uses of 'x' within the initializer
  789. /// are handled by handleUseOfUninitVariable.
  790. void handleSelfInit(const VarDecl *vd) override {
  791. hadUse[currentBlock] = true;
  792. hadAnyUse = true;
  793. }
  794. };
  795. } // namespace
  796. void clang::runUninitializedVariablesAnalysis(
  797. const DeclContext &dc,
  798. const CFG &cfg,
  799. AnalysisDeclContext &ac,
  800. UninitVariablesHandler &handler,
  801. UninitVariablesAnalysisStats &stats) {
  802. CFGBlockValues vals(cfg);
  803. vals.computeSetOfDeclarations(dc);
  804. if (vals.hasNoDeclarations())
  805. return;
  806. stats.NumVariablesAnalyzed = vals.getNumEntries();
  807. // Precompute which expressions are uses and which are initializations.
  808. ClassifyRefs classification(ac);
  809. cfg.VisitBlockStmts(classification);
  810. // Mark all variables uninitialized at the entry.
  811. const CFGBlock &entry = cfg.getEntry();
  812. ValueVector &vec = vals.getValueVector(&entry);
  813. const unsigned n = vals.getNumEntries();
  814. for (unsigned j = 0; j < n; ++j) {
  815. vec[j] = Uninitialized;
  816. }
  817. // Proceed with the workist.
  818. ForwardDataflowWorklist worklist(cfg, ac);
  819. llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
  820. worklist.enqueueSuccessors(&cfg.getEntry());
  821. llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
  822. wasAnalyzed[cfg.getEntry().getBlockID()] = true;
  823. PruneBlocksHandler PBH(cfg.getNumBlockIDs());
  824. while (const CFGBlock *block = worklist.dequeue()) {
  825. PBH.currentBlock = block->getBlockID();
  826. // Did the block change?
  827. bool changed = runOnBlock(block, cfg, ac, vals,
  828. classification, wasAnalyzed, PBH);
  829. ++stats.NumBlockVisits;
  830. if (changed || !previouslyVisited[block->getBlockID()])
  831. worklist.enqueueSuccessors(block);
  832. previouslyVisited[block->getBlockID()] = true;
  833. }
  834. if (!PBH.hadAnyUse)
  835. return;
  836. // Run through the blocks one more time, and report uninitialized variables.
  837. for (const auto *block : cfg)
  838. if (PBH.hadUse[block->getBlockID()]) {
  839. runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, handler);
  840. ++stats.NumBlockVisits;
  841. }
  842. }
  843. UninitVariablesHandler::~UninitVariablesHandler() = default;