CalledOnceCheck.cpp 65 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705
  1. //===- CalledOnceCheck.cpp - Check 'called once' parameters ---------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. #include "clang/Analysis/Analyses/CalledOnceCheck.h"
  9. #include "clang/AST/ASTContext.h"
  10. #include "clang/AST/Attr.h"
  11. #include "clang/AST/Decl.h"
  12. #include "clang/AST/DeclBase.h"
  13. #include "clang/AST/Expr.h"
  14. #include "clang/AST/ExprObjC.h"
  15. #include "clang/AST/OperationKinds.h"
  16. #include "clang/AST/ParentMap.h"
  17. #include "clang/AST/RecursiveASTVisitor.h"
  18. #include "clang/AST/Stmt.h"
  19. #include "clang/AST/StmtObjC.h"
  20. #include "clang/AST/StmtVisitor.h"
  21. #include "clang/AST/Type.h"
  22. #include "clang/Analysis/AnalysisDeclContext.h"
  23. #include "clang/Analysis/CFG.h"
  24. #include "clang/Analysis/FlowSensitive/DataflowWorklist.h"
  25. #include "clang/Basic/Builtins.h"
  26. #include "clang/Basic/IdentifierTable.h"
  27. #include "clang/Basic/LLVM.h"
  28. #include "llvm/ADT/BitVector.h"
  29. #include "llvm/ADT/BitmaskEnum.h"
  30. #include "llvm/ADT/Optional.h"
  31. #include "llvm/ADT/PointerIntPair.h"
  32. #include "llvm/ADT/STLExtras.h"
  33. #include "llvm/ADT/Sequence.h"
  34. #include "llvm/ADT/SmallVector.h"
  35. #include "llvm/ADT/StringRef.h"
  36. #include "llvm/Support/Casting.h"
  37. #include "llvm/Support/Compiler.h"
  38. #include "llvm/Support/ErrorHandling.h"
  39. #include <memory>
  40. using namespace clang;
  41. namespace {
  42. static constexpr unsigned EXPECTED_MAX_NUMBER_OF_PARAMS = 2;
  43. template <class T>
  44. using ParamSizedVector = llvm::SmallVector<T, EXPECTED_MAX_NUMBER_OF_PARAMS>;
  45. static constexpr unsigned EXPECTED_NUMBER_OF_BASIC_BLOCKS = 8;
  46. template <class T>
  47. using CFGSizedVector = llvm::SmallVector<T, EXPECTED_NUMBER_OF_BASIC_BLOCKS>;
  48. constexpr llvm::StringLiteral CONVENTIONAL_NAMES[] = {
  49. "completionHandler", "completion", "withCompletionHandler",
  50. "withCompletion", "completionBlock", "withCompletionBlock",
  51. "replyTo", "reply", "withReplyTo"};
  52. constexpr llvm::StringLiteral CONVENTIONAL_SUFFIXES[] = {
  53. "WithCompletionHandler", "WithCompletion", "WithCompletionBlock",
  54. "WithReplyTo", "WithReply"};
  55. constexpr llvm::StringLiteral CONVENTIONAL_CONDITIONS[] = {
  56. "error", "cancel", "shouldCall", "done", "OK", "success"};
  57. struct KnownCalledOnceParameter {
  58. llvm::StringLiteral FunctionName;
  59. unsigned ParamIndex;
  60. };
  61. constexpr KnownCalledOnceParameter KNOWN_CALLED_ONCE_PARAMETERS[] = {
  62. {llvm::StringLiteral{"dispatch_async"}, 1},
  63. {llvm::StringLiteral{"dispatch_async_and_wait"}, 1},
  64. {llvm::StringLiteral{"dispatch_after"}, 2},
  65. {llvm::StringLiteral{"dispatch_sync"}, 1},
  66. {llvm::StringLiteral{"dispatch_once"}, 1},
  67. {llvm::StringLiteral{"dispatch_barrier_async"}, 1},
  68. {llvm::StringLiteral{"dispatch_barrier_async_and_wait"}, 1},
  69. {llvm::StringLiteral{"dispatch_barrier_sync"}, 1}};
  70. class ParameterStatus {
  71. public:
  72. // Status kind is basically the main part of parameter's status.
  73. // The kind represents our knowledge (so far) about a tracked parameter
  74. // in the context of this analysis.
  75. //
  76. // Since we want to report on missing and extraneous calls, we need to
  77. // track the fact whether paramater was called or not. This automatically
  78. // decides two kinds: `NotCalled` and `Called`.
  79. //
  80. // One of the erroneous situations is the case when parameter is called only
  81. // on some of the paths. We could've considered it `NotCalled`, but we want
  82. // to report double call warnings even if these two calls are not guaranteed
  83. // to happen in every execution. We also don't want to have it as `Called`
  84. // because not calling tracked parameter on all of the paths is an error
  85. // on its own. For these reasons, we need to have a separate kind,
  86. // `MaybeCalled`, and change `Called` to `DefinitelyCalled` to avoid
  87. // confusion.
  88. //
  89. // Two violations of calling parameter more than once and not calling it on
  90. // every path are not, however, mutually exclusive. In situations where both
  91. // violations take place, we prefer to report ONLY double call. It's always
  92. // harder to pinpoint a bug that has arisen when a user neglects to take the
  93. // right action (and therefore, no action is taken), than when a user takes
  94. // the wrong action. And, in order to remember that we already reported
  95. // a double call, we need another kind: `Reported`.
  96. //
  97. // Our analysis is intra-procedural and, while in the perfect world,
  98. // developers only use tracked parameters to call them, in the real world,
  99. // the picture might be different. Parameters can be stored in global
  100. // variables or leaked into other functions that we know nothing about.
  101. // We try to be lenient and trust users. Another kind `Escaped` reflects
  102. // such situations. We don't know if it gets called there or not, but we
  103. // should always think of `Escaped` as the best possible option.
  104. //
  105. // Some of the paths in the analyzed functions might end with a call
  106. // to noreturn functions. Such paths are not required to have parameter
  107. // calls and we want to track that. For the purposes of better diagnostics,
  108. // we don't want to reuse `Escaped` and, thus, have another kind `NoReturn`.
  109. //
  110. // Additionally, we have `NotVisited` kind that tells us nothing about
  111. // a tracked parameter, but is used for tracking analyzed (aka visited)
  112. // basic blocks.
  113. //
  114. // If we consider `|` to be a JOIN operation of two kinds coming from
  115. // two different paths, the following properties must hold:
  116. //
  117. // 1. for any Kind K: K | K == K
  118. // Joining two identical kinds should result in the same kind.
  119. //
  120. // 2. for any Kind K: Reported | K == Reported
  121. // Doesn't matter on which path it was reported, it still is.
  122. //
  123. // 3. for any Kind K: NoReturn | K == K
  124. // We can totally ignore noreturn paths during merges.
  125. //
  126. // 4. DefinitelyCalled | NotCalled == MaybeCalled
  127. // Called on one path, not called on another - that's simply
  128. // a definition for MaybeCalled.
  129. //
  130. // 5. for any Kind K in [DefinitelyCalled, NotCalled, MaybeCalled]:
  131. // Escaped | K == K
  132. // Escaped mirrors other statuses after joins.
  133. // Every situation, when we join any of the listed kinds K,
  134. // is a violation. For this reason, in order to assume the
  135. // best outcome for this escape, we consider it to be the
  136. // same as the other path.
  137. //
  138. // 6. for any Kind K in [DefinitelyCalled, NotCalled]:
  139. // MaybeCalled | K == MaybeCalled
  140. // MaybeCalled should basically stay after almost every join.
  141. enum Kind {
  142. // No-return paths should be absolutely transparent for the analysis.
  143. // 0x0 is the identity element for selected join operation (binary or).
  144. NoReturn = 0x0, /* 0000 */
  145. // Escaped marks situations when marked parameter escaped into
  146. // another function (so we can assume that it was possibly called there).
  147. Escaped = 0x1, /* 0001 */
  148. // Parameter was definitely called once at this point.
  149. DefinitelyCalled = 0x3, /* 0011 */
  150. // Kinds less or equal to NON_ERROR_STATUS are not considered errors.
  151. NON_ERROR_STATUS = DefinitelyCalled,
  152. // Parameter was not yet called.
  153. NotCalled = 0x5, /* 0101 */
  154. // Parameter was not called at least on one path leading to this point,
  155. // while there is also at least one path that it gets called.
  156. MaybeCalled = 0x7, /* 0111 */
  157. // Parameter was not yet analyzed.
  158. NotVisited = 0x8, /* 1000 */
  159. // We already reported a violation and stopped tracking calls for this
  160. // parameter.
  161. Reported = 0x15, /* 1111 */
  162. LLVM_MARK_AS_BITMASK_ENUM(/* LargestValue = */ Reported)
  163. };
  164. constexpr ParameterStatus() = default;
  165. /* implicit */ ParameterStatus(Kind K) : StatusKind(K) {
  166. assert(!seenAnyCalls(K) && "Can't initialize status without a call");
  167. }
  168. ParameterStatus(Kind K, const Expr *Call) : StatusKind(K), Call(Call) {
  169. assert(seenAnyCalls(K) && "This kind is not supposed to have a call");
  170. }
  171. const Expr &getCall() const {
  172. assert(seenAnyCalls(getKind()) && "ParameterStatus doesn't have a call");
  173. return *Call;
  174. }
  175. static bool seenAnyCalls(Kind K) {
  176. return (K & DefinitelyCalled) == DefinitelyCalled && K != Reported;
  177. }
  178. bool seenAnyCalls() const { return seenAnyCalls(getKind()); }
  179. static bool isErrorStatus(Kind K) { return K > NON_ERROR_STATUS; }
  180. bool isErrorStatus() const { return isErrorStatus(getKind()); }
  181. Kind getKind() const { return StatusKind; }
  182. void join(const ParameterStatus &Other) {
  183. // If we have a pointer already, let's keep it.
  184. // For the purposes of the analysis, it doesn't really matter
  185. // which call we report.
  186. //
  187. // If we don't have a pointer, let's take whatever gets joined.
  188. if (!Call) {
  189. Call = Other.Call;
  190. }
  191. // Join kinds.
  192. StatusKind |= Other.getKind();
  193. }
  194. bool operator==(const ParameterStatus &Other) const {
  195. // We compare only kinds, pointers on their own is only additional
  196. // information.
  197. return getKind() == Other.getKind();
  198. }
  199. private:
  200. // It would've been a perfect place to use llvm::PointerIntPair, but
  201. // unfortunately NumLowBitsAvailable for clang::Expr had been reduced to 2.
  202. Kind StatusKind = NotVisited;
  203. const Expr *Call = nullptr;
  204. };
  205. /// State aggregates statuses of all tracked parameters.
  206. class State {
  207. public:
  208. State(unsigned Size, ParameterStatus::Kind K = ParameterStatus::NotVisited)
  209. : ParamData(Size, K) {}
  210. /// Return status of a parameter with the given index.
  211. /// \{
  212. ParameterStatus &getStatusFor(unsigned Index) { return ParamData[Index]; }
  213. const ParameterStatus &getStatusFor(unsigned Index) const {
  214. return ParamData[Index];
  215. }
  216. /// \}
  217. /// Return true if parameter with the given index can be called.
  218. bool seenAnyCalls(unsigned Index) const {
  219. return getStatusFor(Index).seenAnyCalls();
  220. }
  221. /// Return a reference that we consider a call.
  222. ///
  223. /// Should only be used for parameters that can be called.
  224. const Expr &getCallFor(unsigned Index) const {
  225. return getStatusFor(Index).getCall();
  226. }
  227. /// Return status kind of parameter with the given index.
  228. ParameterStatus::Kind getKindFor(unsigned Index) const {
  229. return getStatusFor(Index).getKind();
  230. }
  231. bool isVisited() const {
  232. return llvm::all_of(ParamData, [](const ParameterStatus &S) {
  233. return S.getKind() != ParameterStatus::NotVisited;
  234. });
  235. }
  236. // Join other state into the current state.
  237. void join(const State &Other) {
  238. assert(ParamData.size() == Other.ParamData.size() &&
  239. "Couldn't join statuses with different sizes");
  240. for (auto Pair : llvm::zip(ParamData, Other.ParamData)) {
  241. std::get<0>(Pair).join(std::get<1>(Pair));
  242. }
  243. }
  244. using iterator = ParamSizedVector<ParameterStatus>::iterator;
  245. using const_iterator = ParamSizedVector<ParameterStatus>::const_iterator;
  246. iterator begin() { return ParamData.begin(); }
  247. iterator end() { return ParamData.end(); }
  248. const_iterator begin() const { return ParamData.begin(); }
  249. const_iterator end() const { return ParamData.end(); }
  250. bool operator==(const State &Other) const {
  251. return ParamData == Other.ParamData;
  252. }
  253. private:
  254. ParamSizedVector<ParameterStatus> ParamData;
  255. };
  256. /// A simple class that finds DeclRefExpr in the given expression.
  257. ///
  258. /// However, we don't want to find ANY nested DeclRefExpr skipping whatever
  259. /// expressions on our way. Only certain expressions considered "no-op"
  260. /// for our task are indeed skipped.
  261. class DeclRefFinder
  262. : public ConstStmtVisitor<DeclRefFinder, const DeclRefExpr *> {
  263. public:
  264. /// Find a DeclRefExpr in the given expression.
  265. ///
  266. /// In its most basic form (ShouldRetrieveFromComparisons == false),
  267. /// this function can be simply reduced to the following question:
  268. ///
  269. /// - If expression E is used as a function argument, could we say
  270. /// that DeclRefExpr nested in E is used as an argument?
  271. ///
  272. /// According to this rule, we can say that parens, casts and dereferencing
  273. /// (dereferencing only applied to function pointers, but this is our case)
  274. /// can be skipped.
  275. ///
  276. /// When we should look into comparisons the question changes to:
  277. ///
  278. /// - If expression E is used as a condition, could we say that
  279. /// DeclRefExpr is being checked?
  280. ///
  281. /// And even though, these are two different questions, they have quite a lot
  282. /// in common. Actually, we can say that whatever expression answers
  283. /// positively the first question also fits the second question as well.
  284. ///
  285. /// In addition, we skip binary operators == and !=, and unary opeartor !.
  286. static const DeclRefExpr *find(const Expr *E,
  287. bool ShouldRetrieveFromComparisons = false) {
  288. return DeclRefFinder(ShouldRetrieveFromComparisons).Visit(E);
  289. }
  290. const DeclRefExpr *VisitDeclRefExpr(const DeclRefExpr *DR) { return DR; }
  291. const DeclRefExpr *VisitUnaryOperator(const UnaryOperator *UO) {
  292. switch (UO->getOpcode()) {
  293. case UO_LNot:
  294. // We care about logical not only if we care about comparisons.
  295. if (!ShouldRetrieveFromComparisons)
  296. return nullptr;
  297. LLVM_FALLTHROUGH;
  298. // Function pointer/references can be dereferenced before a call.
  299. // That doesn't make it, however, any different from a regular call.
  300. // For this reason, dereference operation is a "no-op".
  301. case UO_Deref:
  302. return Visit(UO->getSubExpr());
  303. default:
  304. return nullptr;
  305. }
  306. }
  307. const DeclRefExpr *VisitBinaryOperator(const BinaryOperator *BO) {
  308. if (!ShouldRetrieveFromComparisons)
  309. return nullptr;
  310. switch (BO->getOpcode()) {
  311. case BO_EQ:
  312. case BO_NE: {
  313. const DeclRefExpr *LHS = Visit(BO->getLHS());
  314. return LHS ? LHS : Visit(BO->getRHS());
  315. }
  316. default:
  317. return nullptr;
  318. }
  319. }
  320. const DeclRefExpr *VisitOpaqueValueExpr(const OpaqueValueExpr *OVE) {
  321. return Visit(OVE->getSourceExpr());
  322. }
  323. const DeclRefExpr *VisitCallExpr(const CallExpr *CE) {
  324. if (!ShouldRetrieveFromComparisons)
  325. return nullptr;
  326. // We want to see through some of the boolean builtin functions
  327. // that we are likely to see in conditions.
  328. switch (CE->getBuiltinCallee()) {
  329. case Builtin::BI__builtin_expect:
  330. case Builtin::BI__builtin_expect_with_probability: {
  331. assert(CE->getNumArgs() >= 2);
  332. const DeclRefExpr *Candidate = Visit(CE->getArg(0));
  333. return Candidate != nullptr ? Candidate : Visit(CE->getArg(1));
  334. }
  335. case Builtin::BI__builtin_unpredictable:
  336. return Visit(CE->getArg(0));
  337. default:
  338. return nullptr;
  339. }
  340. }
  341. const DeclRefExpr *VisitExpr(const Expr *E) {
  342. // It is a fallback method that gets called whenever the actual type
  343. // of the given expression is not covered.
  344. //
  345. // We first check if we have anything to skip. And then repeat the whole
  346. // procedure for a nested expression instead.
  347. const Expr *DeclutteredExpr = E->IgnoreParenCasts();
  348. return E != DeclutteredExpr ? Visit(DeclutteredExpr) : nullptr;
  349. }
  350. private:
  351. DeclRefFinder(bool ShouldRetrieveFromComparisons)
  352. : ShouldRetrieveFromComparisons(ShouldRetrieveFromComparisons) {}
  353. bool ShouldRetrieveFromComparisons;
  354. };
  355. const DeclRefExpr *findDeclRefExpr(const Expr *In,
  356. bool ShouldRetrieveFromComparisons = false) {
  357. return DeclRefFinder::find(In, ShouldRetrieveFromComparisons);
  358. }
  359. const ParmVarDecl *
  360. findReferencedParmVarDecl(const Expr *In,
  361. bool ShouldRetrieveFromComparisons = false) {
  362. if (const DeclRefExpr *DR =
  363. findDeclRefExpr(In, ShouldRetrieveFromComparisons)) {
  364. return dyn_cast<ParmVarDecl>(DR->getDecl());
  365. }
  366. return nullptr;
  367. }
  368. /// Return conditions expression of a statement if it has one.
  369. const Expr *getCondition(const Stmt *S) {
  370. if (!S) {
  371. return nullptr;
  372. }
  373. if (const auto *If = dyn_cast<IfStmt>(S)) {
  374. return If->getCond();
  375. }
  376. if (const auto *Ternary = dyn_cast<AbstractConditionalOperator>(S)) {
  377. return Ternary->getCond();
  378. }
  379. return nullptr;
  380. }
  381. /// A small helper class that collects all named identifiers in the given
  382. /// expression. It traverses it recursively, so names from deeper levels
  383. /// of the AST will end up in the results.
  384. /// Results might have duplicate names, if this is a problem, convert to
  385. /// string sets afterwards.
  386. class NamesCollector : public RecursiveASTVisitor<NamesCollector> {
  387. public:
  388. static constexpr unsigned EXPECTED_NUMBER_OF_NAMES = 5;
  389. using NameCollection =
  390. llvm::SmallVector<llvm::StringRef, EXPECTED_NUMBER_OF_NAMES>;
  391. static NameCollection collect(const Expr *From) {
  392. NamesCollector Impl;
  393. Impl.TraverseStmt(const_cast<Expr *>(From));
  394. return Impl.Result;
  395. }
  396. bool VisitDeclRefExpr(const DeclRefExpr *E) {
  397. Result.push_back(E->getDecl()->getName());
  398. return true;
  399. }
  400. bool VisitObjCPropertyRefExpr(const ObjCPropertyRefExpr *E) {
  401. llvm::StringRef Name;
  402. if (E->isImplicitProperty()) {
  403. ObjCMethodDecl *PropertyMethodDecl = nullptr;
  404. if (E->isMessagingGetter()) {
  405. PropertyMethodDecl = E->getImplicitPropertyGetter();
  406. } else {
  407. PropertyMethodDecl = E->getImplicitPropertySetter();
  408. }
  409. assert(PropertyMethodDecl &&
  410. "Implicit property must have associated declaration");
  411. Name = PropertyMethodDecl->getSelector().getNameForSlot(0);
  412. } else {
  413. assert(E->isExplicitProperty());
  414. Name = E->getExplicitProperty()->getName();
  415. }
  416. Result.push_back(Name);
  417. return true;
  418. }
  419. private:
  420. NamesCollector() = default;
  421. NameCollection Result;
  422. };
  423. /// Check whether the given expression mentions any of conventional names.
  424. bool mentionsAnyOfConventionalNames(const Expr *E) {
  425. NamesCollector::NameCollection MentionedNames = NamesCollector::collect(E);
  426. return llvm::any_of(MentionedNames, [](llvm::StringRef ConditionName) {
  427. return llvm::any_of(
  428. CONVENTIONAL_CONDITIONS,
  429. [ConditionName](const llvm::StringLiteral &Conventional) {
  430. return ConditionName.contains_insensitive(Conventional);
  431. });
  432. });
  433. }
  434. /// Clarification is a simple pair of a reason why parameter is not called
  435. /// on every path and a statement to blame.
  436. struct Clarification {
  437. NeverCalledReason Reason;
  438. const Stmt *Location;
  439. };
  440. /// A helper class that can produce a clarification based on the given pair
  441. /// of basic blocks.
  442. class NotCalledClarifier
  443. : public ConstStmtVisitor<NotCalledClarifier,
  444. llvm::Optional<Clarification>> {
  445. public:
  446. /// The main entrypoint for the class, the function that tries to find the
  447. /// clarification of how to explain which sub-path starts with a CFG edge
  448. /// from Conditional to SuccWithoutCall.
  449. ///
  450. /// This means that this function has one precondition:
  451. /// SuccWithoutCall should be a successor block for Conditional.
  452. ///
  453. /// Because clarification is not needed for non-trivial pairs of blocks
  454. /// (i.e. SuccWithoutCall is not the only successor), it returns meaningful
  455. /// results only for such cases. For this very reason, the parent basic
  456. /// block, Conditional, is named that way, so it is clear what kind of
  457. /// block is expected.
  458. static llvm::Optional<Clarification>
  459. clarify(const CFGBlock *Conditional, const CFGBlock *SuccWithoutCall) {
  460. if (const Stmt *Terminator = Conditional->getTerminatorStmt()) {
  461. return NotCalledClarifier{Conditional, SuccWithoutCall}.Visit(Terminator);
  462. }
  463. return llvm::None;
  464. }
  465. llvm::Optional<Clarification> VisitIfStmt(const IfStmt *If) {
  466. return VisitBranchingBlock(If, NeverCalledReason::IfThen);
  467. }
  468. llvm::Optional<Clarification>
  469. VisitAbstractConditionalOperator(const AbstractConditionalOperator *Ternary) {
  470. return VisitBranchingBlock(Ternary, NeverCalledReason::IfThen);
  471. }
  472. llvm::Optional<Clarification> VisitSwitchStmt(const SwitchStmt *Switch) {
  473. const Stmt *CaseToBlame = SuccInQuestion->getLabel();
  474. if (!CaseToBlame) {
  475. // If interesting basic block is not labeled, it means that this
  476. // basic block does not represent any of the cases.
  477. return Clarification{NeverCalledReason::SwitchSkipped, Switch};
  478. }
  479. for (const SwitchCase *Case = Switch->getSwitchCaseList(); Case;
  480. Case = Case->getNextSwitchCase()) {
  481. if (Case == CaseToBlame) {
  482. return Clarification{NeverCalledReason::Switch, Case};
  483. }
  484. }
  485. llvm_unreachable("Found unexpected switch structure");
  486. }
  487. llvm::Optional<Clarification> VisitForStmt(const ForStmt *For) {
  488. return VisitBranchingBlock(For, NeverCalledReason::LoopEntered);
  489. }
  490. llvm::Optional<Clarification> VisitWhileStmt(const WhileStmt *While) {
  491. return VisitBranchingBlock(While, NeverCalledReason::LoopEntered);
  492. }
  493. llvm::Optional<Clarification>
  494. VisitBranchingBlock(const Stmt *Terminator, NeverCalledReason DefaultReason) {
  495. assert(Parent->succ_size() == 2 &&
  496. "Branching block should have exactly two successors");
  497. unsigned SuccessorIndex = getSuccessorIndex(Parent, SuccInQuestion);
  498. NeverCalledReason ActualReason =
  499. updateForSuccessor(DefaultReason, SuccessorIndex);
  500. return Clarification{ActualReason, Terminator};
  501. }
  502. llvm::Optional<Clarification> VisitBinaryOperator(const BinaryOperator *) {
  503. // We don't want to report on short-curcuit logical operations.
  504. return llvm::None;
  505. }
  506. llvm::Optional<Clarification> VisitStmt(const Stmt *Terminator) {
  507. // If we got here, we didn't have a visit function for more derived
  508. // classes of statement that this terminator actually belongs to.
  509. //
  510. // This is not a good scenario and should not happen in practice, but
  511. // at least we'll warn the user.
  512. return Clarification{NeverCalledReason::FallbackReason, Terminator};
  513. }
  514. static unsigned getSuccessorIndex(const CFGBlock *Parent,
  515. const CFGBlock *Child) {
  516. CFGBlock::const_succ_iterator It = llvm::find(Parent->succs(), Child);
  517. assert(It != Parent->succ_end() &&
  518. "Given blocks should be in parent-child relationship");
  519. return It - Parent->succ_begin();
  520. }
  521. static NeverCalledReason
  522. updateForSuccessor(NeverCalledReason ReasonForTrueBranch,
  523. unsigned SuccessorIndex) {
  524. assert(SuccessorIndex <= 1);
  525. unsigned RawReason =
  526. static_cast<unsigned>(ReasonForTrueBranch) + SuccessorIndex;
  527. assert(RawReason <=
  528. static_cast<unsigned>(NeverCalledReason::LARGEST_VALUE));
  529. return static_cast<NeverCalledReason>(RawReason);
  530. }
  531. private:
  532. NotCalledClarifier(const CFGBlock *Parent, const CFGBlock *SuccInQuestion)
  533. : Parent(Parent), SuccInQuestion(SuccInQuestion) {}
  534. const CFGBlock *Parent, *SuccInQuestion;
  535. };
  536. class CalledOnceChecker : public ConstStmtVisitor<CalledOnceChecker> {
  537. public:
  538. static void check(AnalysisDeclContext &AC, CalledOnceCheckHandler &Handler,
  539. bool CheckConventionalParameters) {
  540. CalledOnceChecker(AC, Handler, CheckConventionalParameters).check();
  541. }
  542. private:
  543. CalledOnceChecker(AnalysisDeclContext &AC, CalledOnceCheckHandler &Handler,
  544. bool CheckConventionalParameters)
  545. : FunctionCFG(*AC.getCFG()), AC(AC), Handler(Handler),
  546. CheckConventionalParameters(CheckConventionalParameters),
  547. CurrentState(0) {
  548. initDataStructures();
  549. assert((size() == 0 || !States.empty()) &&
  550. "Data structures are inconsistent");
  551. }
  552. //===----------------------------------------------------------------------===//
  553. // Initializing functions
  554. //===----------------------------------------------------------------------===//
  555. void initDataStructures() {
  556. const Decl *AnalyzedDecl = AC.getDecl();
  557. if (const auto *Function = dyn_cast<FunctionDecl>(AnalyzedDecl)) {
  558. findParamsToTrack(Function);
  559. } else if (const auto *Method = dyn_cast<ObjCMethodDecl>(AnalyzedDecl)) {
  560. findParamsToTrack(Method);
  561. } else if (const auto *Block = dyn_cast<BlockDecl>(AnalyzedDecl)) {
  562. findCapturesToTrack(Block);
  563. findParamsToTrack(Block);
  564. }
  565. // Have something to track, let's init states for every block from the CFG.
  566. if (size() != 0) {
  567. States =
  568. CFGSizedVector<State>(FunctionCFG.getNumBlockIDs(), State(size()));
  569. }
  570. }
  571. void findCapturesToTrack(const BlockDecl *Block) {
  572. for (const auto &Capture : Block->captures()) {
  573. if (const auto *P = dyn_cast<ParmVarDecl>(Capture.getVariable())) {
  574. // Parameter DeclContext is its owning function or method.
  575. const DeclContext *ParamContext = P->getDeclContext();
  576. if (shouldBeCalledOnce(ParamContext, P)) {
  577. TrackedParams.push_back(P);
  578. }
  579. }
  580. }
  581. }
  582. template <class FunctionLikeDecl>
  583. void findParamsToTrack(const FunctionLikeDecl *Function) {
  584. for (unsigned Index : llvm::seq<unsigned>(0u, Function->param_size())) {
  585. if (shouldBeCalledOnce(Function, Index)) {
  586. TrackedParams.push_back(Function->getParamDecl(Index));
  587. }
  588. }
  589. }
  590. //===----------------------------------------------------------------------===//
  591. // Main logic 'check' functions
  592. //===----------------------------------------------------------------------===//
  593. void check() {
  594. // Nothing to check here: we don't have marked parameters.
  595. if (size() == 0 || isPossiblyEmptyImpl())
  596. return;
  597. assert(
  598. llvm::none_of(States, [](const State &S) { return S.isVisited(); }) &&
  599. "None of the blocks should be 'visited' before the analysis");
  600. // For our task, both backward and forward approaches suite well.
  601. // However, in order to report better diagnostics, we decided to go with
  602. // backward analysis.
  603. //
  604. // Let's consider the following CFG and how forward and backward analyses
  605. // will work for it.
  606. //
  607. // FORWARD: | BACKWARD:
  608. // #1 | #1
  609. // +---------+ | +-----------+
  610. // | if | | |MaybeCalled|
  611. // +---------+ | +-----------+
  612. // |NotCalled| | | if |
  613. // +---------+ | +-----------+
  614. // / \ | / \
  615. // #2 / \ #3 | #2 / \ #3
  616. // +----------------+ +---------+ | +----------------+ +---------+
  617. // | foo() | | ... | | |DefinitelyCalled| |NotCalled|
  618. // +----------------+ +---------+ | +----------------+ +---------+
  619. // |DefinitelyCalled| |NotCalled| | | foo() | | ... |
  620. // +----------------+ +---------+ | +----------------+ +---------+
  621. // \ / | \ /
  622. // \ #4 / | \ #4 /
  623. // +-----------+ | +---------+
  624. // | ... | | |NotCalled|
  625. // +-----------+ | +---------+
  626. // |MaybeCalled| | | ... |
  627. // +-----------+ | +---------+
  628. //
  629. // The most natural way to report lacking call in the block #3 would be to
  630. // message that the false branch of the if statement in the block #1 doesn't
  631. // have a call. And while with the forward approach we'll need to find a
  632. // least common ancestor or something like that to find the 'if' to blame,
  633. // backward analysis gives it to us out of the box.
  634. BackwardDataflowWorklist Worklist(FunctionCFG, AC);
  635. // Let's visit EXIT.
  636. const CFGBlock *Exit = &FunctionCFG.getExit();
  637. assignState(Exit, State(size(), ParameterStatus::NotCalled));
  638. Worklist.enqueuePredecessors(Exit);
  639. while (const CFGBlock *BB = Worklist.dequeue()) {
  640. assert(BB && "Worklist should filter out null blocks");
  641. check(BB);
  642. assert(CurrentState.isVisited() &&
  643. "After the check, basic block should be visited");
  644. // Traverse successor basic blocks if the status of this block
  645. // has changed.
  646. if (assignState(BB, CurrentState)) {
  647. Worklist.enqueuePredecessors(BB);
  648. }
  649. }
  650. // Check that we have all tracked parameters at the last block.
  651. // As we are performing a backward version of the analysis,
  652. // it should be the ENTRY block.
  653. checkEntry(&FunctionCFG.getEntry());
  654. }
  655. void check(const CFGBlock *BB) {
  656. // We start with a state 'inherited' from all the successors.
  657. CurrentState = joinSuccessors(BB);
  658. assert(CurrentState.isVisited() &&
  659. "Shouldn't start with a 'not visited' state");
  660. // This is the 'exit' situation, broken promises are probably OK
  661. // in such scenarios.
  662. if (BB->hasNoReturnElement()) {
  663. markNoReturn();
  664. // This block still can have calls (even multiple calls) and
  665. // for this reason there is no early return here.
  666. }
  667. // We use a backward dataflow propagation and for this reason we
  668. // should traverse basic blocks bottom-up.
  669. for (const CFGElement &Element : llvm::reverse(*BB)) {
  670. if (Optional<CFGStmt> S = Element.getAs<CFGStmt>()) {
  671. check(S->getStmt());
  672. }
  673. }
  674. }
  675. void check(const Stmt *S) { Visit(S); }
  676. void checkEntry(const CFGBlock *Entry) {
  677. // We finalize this algorithm with the ENTRY block because
  678. // we use a backward version of the analysis. This is where
  679. // we can judge that some of the tracked parameters are not called on
  680. // every path from ENTRY to EXIT.
  681. const State &EntryStatus = getState(Entry);
  682. llvm::BitVector NotCalledOnEveryPath(size(), false);
  683. llvm::BitVector NotUsedOnEveryPath(size(), false);
  684. // Check if there are no calls of the marked parameter at all
  685. for (const auto &IndexedStatus : llvm::enumerate(EntryStatus)) {
  686. const ParmVarDecl *Parameter = getParameter(IndexedStatus.index());
  687. switch (IndexedStatus.value().getKind()) {
  688. case ParameterStatus::NotCalled:
  689. // If there were places where this parameter escapes (aka being used),
  690. // we can provide a more useful diagnostic by pointing at the exact
  691. // branches where it is not even mentioned.
  692. if (!hasEverEscaped(IndexedStatus.index())) {
  693. // This parameter is was not used at all, so we should report the
  694. // most generic version of the warning.
  695. if (isCaptured(Parameter)) {
  696. // We want to specify that it was captured by the block.
  697. Handler.handleCapturedNeverCalled(Parameter, AC.getDecl(),
  698. !isExplicitlyMarked(Parameter));
  699. } else {
  700. Handler.handleNeverCalled(Parameter,
  701. !isExplicitlyMarked(Parameter));
  702. }
  703. } else {
  704. // Mark it as 'interesting' to figure out which paths don't even
  705. // have escapes.
  706. NotUsedOnEveryPath[IndexedStatus.index()] = true;
  707. }
  708. break;
  709. case ParameterStatus::MaybeCalled:
  710. // If we have 'maybe called' at this point, we have an error
  711. // that there is at least one path where this parameter
  712. // is not called.
  713. //
  714. // However, reporting the warning with only that information can be
  715. // too vague for the users. For this reason, we mark such parameters
  716. // as "interesting" for further analysis.
  717. NotCalledOnEveryPath[IndexedStatus.index()] = true;
  718. break;
  719. default:
  720. break;
  721. }
  722. }
  723. // Early exit if we don't have parameters for extra analysis...
  724. if (NotCalledOnEveryPath.none() && NotUsedOnEveryPath.none() &&
  725. // ... or if we've seen variables with cleanup functions.
  726. // We can't reason that we've seen every path in this case,
  727. // and thus abandon reporting any warnings that imply that.
  728. !FunctionHasCleanupVars)
  729. return;
  730. // We are looking for a pair of blocks A, B so that the following is true:
  731. // * A is a predecessor of B
  732. // * B is marked as NotCalled
  733. // * A has at least one successor marked as either
  734. // Escaped or DefinitelyCalled
  735. //
  736. // In that situation, it is guaranteed that B is the first block of the path
  737. // where the user doesn't call or use parameter in question.
  738. //
  739. // For this reason, branch A -> B can be used for reporting.
  740. //
  741. // This part of the algorithm is guarded by a condition that the function
  742. // does indeed have a violation of contract. For this reason, we can
  743. // spend more time to find a good spot to place the warning.
  744. //
  745. // The following algorithm has the worst case complexity of O(V + E),
  746. // where V is the number of basic blocks in FunctionCFG,
  747. // E is the number of edges between blocks in FunctionCFG.
  748. for (const CFGBlock *BB : FunctionCFG) {
  749. if (!BB)
  750. continue;
  751. const State &BlockState = getState(BB);
  752. for (unsigned Index : llvm::seq(0u, size())) {
  753. // We don't want to use 'isLosingCall' here because we want to report
  754. // the following situation as well:
  755. //
  756. // MaybeCalled
  757. // | ... |
  758. // MaybeCalled NotCalled
  759. //
  760. // Even though successor is not 'DefinitelyCalled', it is still useful
  761. // to report it, it is still a path without a call.
  762. if (NotCalledOnEveryPath[Index] &&
  763. BlockState.getKindFor(Index) == ParameterStatus::MaybeCalled) {
  764. findAndReportNotCalledBranches(BB, Index);
  765. } else if (NotUsedOnEveryPath[Index] &&
  766. isLosingEscape(BlockState, BB, Index)) {
  767. findAndReportNotCalledBranches(BB, Index, /* IsEscape = */ true);
  768. }
  769. }
  770. }
  771. }
  772. /// Check potential call of a tracked parameter.
  773. void checkDirectCall(const CallExpr *Call) {
  774. if (auto Index = getIndexOfCallee(Call)) {
  775. processCallFor(*Index, Call);
  776. }
  777. }
  778. /// Check the call expression for being an indirect call of one of the tracked
  779. /// parameters. It is indirect in the sense that this particular call is not
  780. /// calling the parameter itself, but rather uses it as the argument.
  781. template <class CallLikeExpr>
  782. void checkIndirectCall(const CallLikeExpr *CallOrMessage) {
  783. // CallExpr::arguments does not interact nicely with llvm::enumerate.
  784. llvm::ArrayRef<const Expr *> Arguments = llvm::makeArrayRef(
  785. CallOrMessage->getArgs(), CallOrMessage->getNumArgs());
  786. // Let's check if any of the call arguments is a point of interest.
  787. for (const auto &Argument : llvm::enumerate(Arguments)) {
  788. if (auto Index = getIndexOfExpression(Argument.value())) {
  789. if (shouldBeCalledOnce(CallOrMessage, Argument.index())) {
  790. // If the corresponding parameter is marked as 'called_once' we should
  791. // consider it as a call.
  792. processCallFor(*Index, CallOrMessage);
  793. } else {
  794. // Otherwise, we mark this parameter as escaped, which can be
  795. // interpreted both as called or not called depending on the context.
  796. processEscapeFor(*Index);
  797. }
  798. // Otherwise, let's keep the state as it is.
  799. }
  800. }
  801. }
  802. /// Process call of the parameter with the given index
  803. void processCallFor(unsigned Index, const Expr *Call) {
  804. ParameterStatus &CurrentParamStatus = CurrentState.getStatusFor(Index);
  805. if (CurrentParamStatus.seenAnyCalls()) {
  806. // At this point, this parameter was called, so this is a second call.
  807. const ParmVarDecl *Parameter = getParameter(Index);
  808. Handler.handleDoubleCall(
  809. Parameter, &CurrentState.getCallFor(Index), Call,
  810. !isExplicitlyMarked(Parameter),
  811. // We are sure that the second call is definitely
  812. // going to happen if the status is 'DefinitelyCalled'.
  813. CurrentParamStatus.getKind() == ParameterStatus::DefinitelyCalled);
  814. // Mark this parameter as already reported on, so we don't repeat
  815. // warnings.
  816. CurrentParamStatus = ParameterStatus::Reported;
  817. } else if (CurrentParamStatus.getKind() != ParameterStatus::Reported) {
  818. // If we didn't report anything yet, let's mark this parameter
  819. // as called.
  820. ParameterStatus Called(ParameterStatus::DefinitelyCalled, Call);
  821. CurrentParamStatus = Called;
  822. }
  823. }
  824. /// Process escape of the parameter with the given index
  825. void processEscapeFor(unsigned Index) {
  826. ParameterStatus &CurrentParamStatus = CurrentState.getStatusFor(Index);
  827. // Escape overrides whatever error we think happened.
  828. if (CurrentParamStatus.isErrorStatus()) {
  829. CurrentParamStatus = ParameterStatus::Escaped;
  830. }
  831. }
  832. void findAndReportNotCalledBranches(const CFGBlock *Parent, unsigned Index,
  833. bool IsEscape = false) {
  834. for (const CFGBlock *Succ : Parent->succs()) {
  835. if (!Succ)
  836. continue;
  837. if (getState(Succ).getKindFor(Index) == ParameterStatus::NotCalled) {
  838. assert(Parent->succ_size() >= 2 &&
  839. "Block should have at least two successors at this point");
  840. if (auto Clarification = NotCalledClarifier::clarify(Parent, Succ)) {
  841. const ParmVarDecl *Parameter = getParameter(Index);
  842. Handler.handleNeverCalled(
  843. Parameter, AC.getDecl(), Clarification->Location,
  844. Clarification->Reason, !IsEscape, !isExplicitlyMarked(Parameter));
  845. }
  846. }
  847. }
  848. }
  849. //===----------------------------------------------------------------------===//
  850. // Predicate functions to check parameters
  851. //===----------------------------------------------------------------------===//
  852. /// Return true if parameter is explicitly marked as 'called_once'.
  853. static bool isExplicitlyMarked(const ParmVarDecl *Parameter) {
  854. return Parameter->hasAttr<CalledOnceAttr>();
  855. }
  856. /// Return true if the given name matches conventional pattens.
  857. static bool isConventional(llvm::StringRef Name) {
  858. return llvm::count(CONVENTIONAL_NAMES, Name) != 0;
  859. }
  860. /// Return true if the given name has conventional suffixes.
  861. static bool hasConventionalSuffix(llvm::StringRef Name) {
  862. return llvm::any_of(CONVENTIONAL_SUFFIXES, [Name](llvm::StringRef Suffix) {
  863. return Name.endswith(Suffix);
  864. });
  865. }
  866. /// Return true if the given type can be used for conventional parameters.
  867. static bool isConventional(QualType Ty) {
  868. if (!Ty->isBlockPointerType()) {
  869. return false;
  870. }
  871. QualType BlockType = Ty->castAs<BlockPointerType>()->getPointeeType();
  872. // Completion handlers should have a block type with void return type.
  873. return BlockType->castAs<FunctionType>()->getReturnType()->isVoidType();
  874. }
  875. /// Return true if the only parameter of the function is conventional.
  876. static bool isOnlyParameterConventional(const FunctionDecl *Function) {
  877. IdentifierInfo *II = Function->getIdentifier();
  878. return Function->getNumParams() == 1 && II &&
  879. hasConventionalSuffix(II->getName());
  880. }
  881. /// Return true/false if 'swift_async' attribute states that the given
  882. /// parameter is conventionally called once.
  883. /// Return llvm::None if the given declaration doesn't have 'swift_async'
  884. /// attribute.
  885. static llvm::Optional<bool> isConventionalSwiftAsync(const Decl *D,
  886. unsigned ParamIndex) {
  887. if (const SwiftAsyncAttr *A = D->getAttr<SwiftAsyncAttr>()) {
  888. if (A->getKind() == SwiftAsyncAttr::None) {
  889. return false;
  890. }
  891. return A->getCompletionHandlerIndex().getASTIndex() == ParamIndex;
  892. }
  893. return llvm::None;
  894. }
  895. /// Return true if the specified selector represents init method.
  896. static bool isInitMethod(Selector MethodSelector) {
  897. return MethodSelector.getMethodFamily() == OMF_init;
  898. }
  899. /// Return true if the specified selector piece matches conventions.
  900. static bool isConventionalSelectorPiece(Selector MethodSelector,
  901. unsigned PieceIndex,
  902. QualType PieceType) {
  903. if (!isConventional(PieceType) || isInitMethod(MethodSelector)) {
  904. return false;
  905. }
  906. if (MethodSelector.getNumArgs() == 1) {
  907. assert(PieceIndex == 0);
  908. return hasConventionalSuffix(MethodSelector.getNameForSlot(0));
  909. }
  910. llvm::StringRef PieceName = MethodSelector.getNameForSlot(PieceIndex);
  911. return isConventional(PieceName) || hasConventionalSuffix(PieceName);
  912. }
  913. bool shouldBeCalledOnce(const ParmVarDecl *Parameter) const {
  914. return isExplicitlyMarked(Parameter) ||
  915. (CheckConventionalParameters &&
  916. (isConventional(Parameter->getName()) ||
  917. hasConventionalSuffix(Parameter->getName())) &&
  918. isConventional(Parameter->getType()));
  919. }
  920. bool shouldBeCalledOnce(const DeclContext *ParamContext,
  921. const ParmVarDecl *Param) {
  922. unsigned ParamIndex = Param->getFunctionScopeIndex();
  923. if (const auto *Function = dyn_cast<FunctionDecl>(ParamContext)) {
  924. return shouldBeCalledOnce(Function, ParamIndex);
  925. }
  926. if (const auto *Method = dyn_cast<ObjCMethodDecl>(ParamContext)) {
  927. return shouldBeCalledOnce(Method, ParamIndex);
  928. }
  929. return shouldBeCalledOnce(Param);
  930. }
  931. bool shouldBeCalledOnce(const BlockDecl *Block, unsigned ParamIndex) const {
  932. return shouldBeCalledOnce(Block->getParamDecl(ParamIndex));
  933. }
  934. bool shouldBeCalledOnce(const FunctionDecl *Function,
  935. unsigned ParamIndex) const {
  936. if (ParamIndex >= Function->getNumParams()) {
  937. return false;
  938. }
  939. // 'swift_async' goes first and overrides anything else.
  940. if (auto ConventionalAsync =
  941. isConventionalSwiftAsync(Function, ParamIndex)) {
  942. return ConventionalAsync.getValue();
  943. }
  944. return shouldBeCalledOnce(Function->getParamDecl(ParamIndex)) ||
  945. (CheckConventionalParameters &&
  946. isOnlyParameterConventional(Function));
  947. }
  948. bool shouldBeCalledOnce(const ObjCMethodDecl *Method,
  949. unsigned ParamIndex) const {
  950. Selector MethodSelector = Method->getSelector();
  951. if (ParamIndex >= MethodSelector.getNumArgs()) {
  952. return false;
  953. }
  954. // 'swift_async' goes first and overrides anything else.
  955. if (auto ConventionalAsync = isConventionalSwiftAsync(Method, ParamIndex)) {
  956. return ConventionalAsync.getValue();
  957. }
  958. const ParmVarDecl *Parameter = Method->getParamDecl(ParamIndex);
  959. return shouldBeCalledOnce(Parameter) ||
  960. (CheckConventionalParameters &&
  961. isConventionalSelectorPiece(MethodSelector, ParamIndex,
  962. Parameter->getType()));
  963. }
  964. bool shouldBeCalledOnce(const CallExpr *Call, unsigned ParamIndex) const {
  965. const FunctionDecl *Function = Call->getDirectCallee();
  966. return Function && shouldBeCalledOnce(Function, ParamIndex);
  967. }
  968. bool shouldBeCalledOnce(const ObjCMessageExpr *Message,
  969. unsigned ParamIndex) const {
  970. const ObjCMethodDecl *Method = Message->getMethodDecl();
  971. return Method && ParamIndex < Method->param_size() &&
  972. shouldBeCalledOnce(Method, ParamIndex);
  973. }
  974. //===----------------------------------------------------------------------===//
  975. // Utility methods
  976. //===----------------------------------------------------------------------===//
  977. bool isCaptured(const ParmVarDecl *Parameter) const {
  978. if (const BlockDecl *Block = dyn_cast<BlockDecl>(AC.getDecl())) {
  979. return Block->capturesVariable(Parameter);
  980. }
  981. return false;
  982. }
  983. // Return a call site where the block is called exactly once or null otherwise
  984. const Expr *getBlockGuaraneedCallSite(const BlockExpr *Block) const {
  985. ParentMap &PM = AC.getParentMap();
  986. // We don't want to track the block through assignments and so on, instead
  987. // we simply see how the block used and if it's used directly in a call,
  988. // we decide based on call to what it is.
  989. //
  990. // In order to do this, we go up the parents of the block looking for
  991. // a call or a message expressions. These might not be immediate parents
  992. // of the actual block expression due to casts and parens, so we skip them.
  993. for (const Stmt *Prev = Block, *Current = PM.getParent(Block);
  994. Current != nullptr; Prev = Current, Current = PM.getParent(Current)) {
  995. // Skip no-op (for our case) operations.
  996. if (isa<CastExpr>(Current) || isa<ParenExpr>(Current))
  997. continue;
  998. // At this point, Prev represents our block as an immediate child of the
  999. // call.
  1000. if (const auto *Call = dyn_cast<CallExpr>(Current)) {
  1001. // It might be the call of the Block itself...
  1002. if (Call->getCallee() == Prev)
  1003. return Call;
  1004. // ...or it can be an indirect call of the block.
  1005. return shouldBlockArgumentBeCalledOnce(Call, Prev) ? Call : nullptr;
  1006. }
  1007. if (const auto *Message = dyn_cast<ObjCMessageExpr>(Current)) {
  1008. return shouldBlockArgumentBeCalledOnce(Message, Prev) ? Message
  1009. : nullptr;
  1010. }
  1011. break;
  1012. }
  1013. return nullptr;
  1014. }
  1015. template <class CallLikeExpr>
  1016. bool shouldBlockArgumentBeCalledOnce(const CallLikeExpr *CallOrMessage,
  1017. const Stmt *BlockArgument) const {
  1018. // CallExpr::arguments does not interact nicely with llvm::enumerate.
  1019. llvm::ArrayRef<const Expr *> Arguments = llvm::makeArrayRef(
  1020. CallOrMessage->getArgs(), CallOrMessage->getNumArgs());
  1021. for (const auto &Argument : llvm::enumerate(Arguments)) {
  1022. if (Argument.value() == BlockArgument) {
  1023. return shouldBlockArgumentBeCalledOnce(CallOrMessage, Argument.index());
  1024. }
  1025. }
  1026. return false;
  1027. }
  1028. bool shouldBlockArgumentBeCalledOnce(const CallExpr *Call,
  1029. unsigned ParamIndex) const {
  1030. const FunctionDecl *Function = Call->getDirectCallee();
  1031. return shouldBlockArgumentBeCalledOnce(Function, ParamIndex) ||
  1032. shouldBeCalledOnce(Call, ParamIndex);
  1033. }
  1034. bool shouldBlockArgumentBeCalledOnce(const ObjCMessageExpr *Message,
  1035. unsigned ParamIndex) const {
  1036. // At the moment, we don't have any Obj-C methods we want to specifically
  1037. // check in here.
  1038. return shouldBeCalledOnce(Message, ParamIndex);
  1039. }
  1040. static bool shouldBlockArgumentBeCalledOnce(const FunctionDecl *Function,
  1041. unsigned ParamIndex) {
  1042. // There is a list of important API functions that while not following
  1043. // conventions nor being directly annotated, still guarantee that the
  1044. // callback parameter will be called exactly once.
  1045. //
  1046. // Here we check if this is the case.
  1047. return Function &&
  1048. llvm::any_of(KNOWN_CALLED_ONCE_PARAMETERS,
  1049. [Function, ParamIndex](
  1050. const KnownCalledOnceParameter &Reference) {
  1051. return Reference.FunctionName ==
  1052. Function->getName() &&
  1053. Reference.ParamIndex == ParamIndex;
  1054. });
  1055. }
  1056. /// Return true if the analyzed function is actually a default implementation
  1057. /// of the method that has to be overriden.
  1058. ///
  1059. /// These functions can have tracked parameters, but wouldn't call them
  1060. /// because they are not designed to perform any meaningful actions.
  1061. ///
  1062. /// There are a couple of flavors of such default implementations:
  1063. /// 1. Empty methods or methods with a single return statement
  1064. /// 2. Methods that have one block with a call to no return function
  1065. /// 3. Methods with only assertion-like operations
  1066. bool isPossiblyEmptyImpl() const {
  1067. if (!isa<ObjCMethodDecl>(AC.getDecl())) {
  1068. // We care only about functions that are not supposed to be called.
  1069. // Only methods can be overriden.
  1070. return false;
  1071. }
  1072. // Case #1 (without return statements)
  1073. if (FunctionCFG.size() == 2) {
  1074. // Method has only two blocks: ENTRY and EXIT.
  1075. // This is equivalent to empty function.
  1076. return true;
  1077. }
  1078. // Case #2
  1079. if (FunctionCFG.size() == 3) {
  1080. const CFGBlock &Entry = FunctionCFG.getEntry();
  1081. if (Entry.succ_empty()) {
  1082. return false;
  1083. }
  1084. const CFGBlock *OnlyBlock = *Entry.succ_begin();
  1085. // Method has only one block, let's see if it has a no-return
  1086. // element.
  1087. if (OnlyBlock && OnlyBlock->hasNoReturnElement()) {
  1088. return true;
  1089. }
  1090. // Fallthrough, CFGs with only one block can fall into #1 and #3 as well.
  1091. }
  1092. // Cases #1 (return statements) and #3.
  1093. //
  1094. // It is hard to detect that something is an assertion or came
  1095. // from assertion. Here we use a simple heuristic:
  1096. //
  1097. // - If it came from a macro, it can be an assertion.
  1098. //
  1099. // Additionally, we can't assume a number of basic blocks or the CFG's
  1100. // structure because assertions might include loops and conditions.
  1101. return llvm::all_of(FunctionCFG, [](const CFGBlock *BB) {
  1102. if (!BB) {
  1103. // Unreachable blocks are totally fine.
  1104. return true;
  1105. }
  1106. // Return statements can have sub-expressions that are represented as
  1107. // separate statements of a basic block. We should allow this.
  1108. // This parent map will be initialized with a parent tree for all
  1109. // subexpressions of the block's return statement (if it has one).
  1110. std::unique_ptr<ParentMap> ReturnChildren;
  1111. return llvm::all_of(
  1112. llvm::reverse(*BB), // we should start with return statements, if we
  1113. // have any, i.e. from the bottom of the block
  1114. [&ReturnChildren](const CFGElement &Element) {
  1115. if (Optional<CFGStmt> S = Element.getAs<CFGStmt>()) {
  1116. const Stmt *SuspiciousStmt = S->getStmt();
  1117. if (isa<ReturnStmt>(SuspiciousStmt)) {
  1118. // Let's initialize this structure to test whether
  1119. // some further statement is a part of this return.
  1120. ReturnChildren = std::make_unique<ParentMap>(
  1121. const_cast<Stmt *>(SuspiciousStmt));
  1122. // Return statements are allowed as part of #1.
  1123. return true;
  1124. }
  1125. return SuspiciousStmt->getBeginLoc().isMacroID() ||
  1126. (ReturnChildren &&
  1127. ReturnChildren->hasParent(SuspiciousStmt));
  1128. }
  1129. return true;
  1130. });
  1131. });
  1132. }
  1133. /// Check if parameter with the given index has ever escaped.
  1134. bool hasEverEscaped(unsigned Index) const {
  1135. return llvm::any_of(States, [Index](const State &StateForOneBB) {
  1136. return StateForOneBB.getKindFor(Index) == ParameterStatus::Escaped;
  1137. });
  1138. }
  1139. /// Return status stored for the given basic block.
  1140. /// \{
  1141. State &getState(const CFGBlock *BB) {
  1142. assert(BB);
  1143. return States[BB->getBlockID()];
  1144. }
  1145. const State &getState(const CFGBlock *BB) const {
  1146. assert(BB);
  1147. return States[BB->getBlockID()];
  1148. }
  1149. /// \}
  1150. /// Assign status to the given basic block.
  1151. ///
  1152. /// Returns true when the stored status changed.
  1153. bool assignState(const CFGBlock *BB, const State &ToAssign) {
  1154. State &Current = getState(BB);
  1155. if (Current == ToAssign) {
  1156. return false;
  1157. }
  1158. Current = ToAssign;
  1159. return true;
  1160. }
  1161. /// Join all incoming statuses for the given basic block.
  1162. State joinSuccessors(const CFGBlock *BB) const {
  1163. auto Succs =
  1164. llvm::make_filter_range(BB->succs(), [this](const CFGBlock *Succ) {
  1165. return Succ && this->getState(Succ).isVisited();
  1166. });
  1167. // We came to this block from somewhere after all.
  1168. assert(!Succs.empty() &&
  1169. "Basic block should have at least one visited successor");
  1170. State Result = getState(*Succs.begin());
  1171. for (const CFGBlock *Succ : llvm::drop_begin(Succs, 1)) {
  1172. Result.join(getState(Succ));
  1173. }
  1174. if (const Expr *Condition = getCondition(BB->getTerminatorStmt())) {
  1175. handleConditional(BB, Condition, Result);
  1176. }
  1177. return Result;
  1178. }
  1179. void handleConditional(const CFGBlock *BB, const Expr *Condition,
  1180. State &ToAlter) const {
  1181. handleParameterCheck(BB, Condition, ToAlter);
  1182. if (SuppressOnConventionalErrorPaths) {
  1183. handleConventionalCheck(BB, Condition, ToAlter);
  1184. }
  1185. }
  1186. void handleParameterCheck(const CFGBlock *BB, const Expr *Condition,
  1187. State &ToAlter) const {
  1188. // In this function, we try to deal with the following pattern:
  1189. //
  1190. // if (parameter)
  1191. // parameter(...);
  1192. //
  1193. // It's not good to show a warning here because clearly 'parameter'
  1194. // couldn't and shouldn't be called on the 'else' path.
  1195. //
  1196. // Let's check if this if statement has a check involving one of
  1197. // the tracked parameters.
  1198. if (const ParmVarDecl *Parameter = findReferencedParmVarDecl(
  1199. Condition,
  1200. /* ShouldRetrieveFromComparisons = */ true)) {
  1201. if (const auto Index = getIndex(*Parameter)) {
  1202. ParameterStatus &CurrentStatus = ToAlter.getStatusFor(*Index);
  1203. // We don't want to deep dive into semantics of the check and
  1204. // figure out if that check was for null or something else.
  1205. // We simply trust the user that they know what they are doing.
  1206. //
  1207. // For this reason, in the following loop we look for the
  1208. // best-looking option.
  1209. for (const CFGBlock *Succ : BB->succs()) {
  1210. if (!Succ)
  1211. continue;
  1212. const ParameterStatus &StatusInSucc =
  1213. getState(Succ).getStatusFor(*Index);
  1214. if (StatusInSucc.isErrorStatus()) {
  1215. continue;
  1216. }
  1217. // Let's use this status instead.
  1218. CurrentStatus = StatusInSucc;
  1219. if (StatusInSucc.getKind() == ParameterStatus::DefinitelyCalled) {
  1220. // This is the best option to have and we already found it.
  1221. break;
  1222. }
  1223. // If we found 'Escaped' first, we still might find 'DefinitelyCalled'
  1224. // on the other branch. And we prefer the latter.
  1225. }
  1226. }
  1227. }
  1228. }
  1229. void handleConventionalCheck(const CFGBlock *BB, const Expr *Condition,
  1230. State &ToAlter) const {
  1231. // Even when the analysis is technically correct, it is a widespread pattern
  1232. // not to call completion handlers in some scenarios. These usually have
  1233. // typical conditional names, such as 'error' or 'cancel'.
  1234. if (!mentionsAnyOfConventionalNames(Condition)) {
  1235. return;
  1236. }
  1237. for (const auto &IndexedStatus : llvm::enumerate(ToAlter)) {
  1238. const ParmVarDecl *Parameter = getParameter(IndexedStatus.index());
  1239. // Conventions do not apply to explicitly marked parameters.
  1240. if (isExplicitlyMarked(Parameter)) {
  1241. continue;
  1242. }
  1243. ParameterStatus &CurrentStatus = IndexedStatus.value();
  1244. // If we did find that on one of the branches the user uses the callback
  1245. // and doesn't on the other path, we believe that they know what they are
  1246. // doing and trust them.
  1247. //
  1248. // There are two possible scenarios for that:
  1249. // 1. Current status is 'MaybeCalled' and one of the branches is
  1250. // 'DefinitelyCalled'
  1251. // 2. Current status is 'NotCalled' and one of the branches is 'Escaped'
  1252. if (isLosingCall(ToAlter, BB, IndexedStatus.index()) ||
  1253. isLosingEscape(ToAlter, BB, IndexedStatus.index())) {
  1254. CurrentStatus = ParameterStatus::Escaped;
  1255. }
  1256. }
  1257. }
  1258. bool isLosingCall(const State &StateAfterJoin, const CFGBlock *JoinBlock,
  1259. unsigned ParameterIndex) const {
  1260. // Let's check if the block represents DefinitelyCalled -> MaybeCalled
  1261. // transition.
  1262. return isLosingJoin(StateAfterJoin, JoinBlock, ParameterIndex,
  1263. ParameterStatus::MaybeCalled,
  1264. ParameterStatus::DefinitelyCalled);
  1265. }
  1266. bool isLosingEscape(const State &StateAfterJoin, const CFGBlock *JoinBlock,
  1267. unsigned ParameterIndex) const {
  1268. // Let's check if the block represents Escaped -> NotCalled transition.
  1269. return isLosingJoin(StateAfterJoin, JoinBlock, ParameterIndex,
  1270. ParameterStatus::NotCalled, ParameterStatus::Escaped);
  1271. }
  1272. bool isLosingJoin(const State &StateAfterJoin, const CFGBlock *JoinBlock,
  1273. unsigned ParameterIndex, ParameterStatus::Kind AfterJoin,
  1274. ParameterStatus::Kind BeforeJoin) const {
  1275. assert(!ParameterStatus::isErrorStatus(BeforeJoin) &&
  1276. ParameterStatus::isErrorStatus(AfterJoin) &&
  1277. "It's not a losing join if statuses do not represent "
  1278. "correct-to-error transition");
  1279. const ParameterStatus &CurrentStatus =
  1280. StateAfterJoin.getStatusFor(ParameterIndex);
  1281. return CurrentStatus.getKind() == AfterJoin &&
  1282. anySuccessorHasStatus(JoinBlock, ParameterIndex, BeforeJoin);
  1283. }
  1284. /// Return true if any of the successors of the given basic block has
  1285. /// a specified status for the given parameter.
  1286. bool anySuccessorHasStatus(const CFGBlock *Parent, unsigned ParameterIndex,
  1287. ParameterStatus::Kind ToFind) const {
  1288. return llvm::any_of(
  1289. Parent->succs(), [this, ParameterIndex, ToFind](const CFGBlock *Succ) {
  1290. return Succ && getState(Succ).getKindFor(ParameterIndex) == ToFind;
  1291. });
  1292. }
  1293. /// Check given expression that was discovered to escape.
  1294. void checkEscapee(const Expr *E) {
  1295. if (const ParmVarDecl *Parameter = findReferencedParmVarDecl(E)) {
  1296. checkEscapee(*Parameter);
  1297. }
  1298. }
  1299. /// Check given parameter that was discovered to escape.
  1300. void checkEscapee(const ParmVarDecl &Parameter) {
  1301. if (auto Index = getIndex(Parameter)) {
  1302. processEscapeFor(*Index);
  1303. }
  1304. }
  1305. /// Mark all parameters in the current state as 'no-return'.
  1306. void markNoReturn() {
  1307. for (ParameterStatus &PS : CurrentState) {
  1308. PS = ParameterStatus::NoReturn;
  1309. }
  1310. }
  1311. /// Check if the given assignment represents suppression and act on it.
  1312. void checkSuppression(const BinaryOperator *Assignment) {
  1313. // Suppression has the following form:
  1314. // parameter = 0;
  1315. // 0 can be of any form (NULL, nil, etc.)
  1316. if (auto Index = getIndexOfExpression(Assignment->getLHS())) {
  1317. // We don't care what is written in the RHS, it could be whatever
  1318. // we can interpret as 0.
  1319. if (auto Constant =
  1320. Assignment->getRHS()->IgnoreParenCasts()->getIntegerConstantExpr(
  1321. AC.getASTContext())) {
  1322. ParameterStatus &CurrentParamStatus = CurrentState.getStatusFor(*Index);
  1323. if (0 == *Constant && CurrentParamStatus.seenAnyCalls()) {
  1324. // Even though this suppression mechanism is introduced to tackle
  1325. // false positives for multiple calls, the fact that the user has
  1326. // to use suppression can also tell us that we couldn't figure out
  1327. // how different paths cancel each other out. And if that is true,
  1328. // we will most certainly have false positives about parameters not
  1329. // being called on certain paths.
  1330. //
  1331. // For this reason, we abandon tracking this parameter altogether.
  1332. CurrentParamStatus = ParameterStatus::Reported;
  1333. }
  1334. }
  1335. }
  1336. }
  1337. public:
  1338. //===----------------------------------------------------------------------===//
  1339. // Tree traversal methods
  1340. //===----------------------------------------------------------------------===//
  1341. void VisitCallExpr(const CallExpr *Call) {
  1342. // This call might be a direct call, i.e. a parameter call...
  1343. checkDirectCall(Call);
  1344. // ... or an indirect call, i.e. when parameter is an argument.
  1345. checkIndirectCall(Call);
  1346. }
  1347. void VisitObjCMessageExpr(const ObjCMessageExpr *Message) {
  1348. // The most common situation that we are defending against here is
  1349. // copying a tracked parameter.
  1350. if (const Expr *Receiver = Message->getInstanceReceiver()) {
  1351. checkEscapee(Receiver);
  1352. }
  1353. // Message expressions unlike calls, could not be direct.
  1354. checkIndirectCall(Message);
  1355. }
  1356. void VisitBlockExpr(const BlockExpr *Block) {
  1357. // Block expressions are tricky. It is a very common practice to capture
  1358. // completion handlers by blocks and use them there.
  1359. // For this reason, it is important to analyze blocks and report warnings
  1360. // for completion handler misuse in blocks.
  1361. //
  1362. // However, it can be quite difficult to track how the block itself is being
  1363. // used. The full precise anlysis of that will be similar to alias analysis
  1364. // for completion handlers and can be too heavyweight for a compile-time
  1365. // diagnostic. Instead, we judge about the immediate use of the block.
  1366. //
  1367. // Here, we try to find a call expression where we know due to conventions,
  1368. // annotations, or other reasons that the block is called once and only
  1369. // once.
  1370. const Expr *CalledOnceCallSite = getBlockGuaraneedCallSite(Block);
  1371. // We need to report this information to the handler because in the
  1372. // situation when we know that the block is called exactly once, we can be
  1373. // stricter in terms of reported diagnostics.
  1374. if (CalledOnceCallSite) {
  1375. Handler.handleBlockThatIsGuaranteedToBeCalledOnce(Block->getBlockDecl());
  1376. } else {
  1377. Handler.handleBlockWithNoGuarantees(Block->getBlockDecl());
  1378. }
  1379. for (const auto &Capture : Block->getBlockDecl()->captures()) {
  1380. if (const auto *Param = dyn_cast<ParmVarDecl>(Capture.getVariable())) {
  1381. if (auto Index = getIndex(*Param)) {
  1382. if (CalledOnceCallSite) {
  1383. // The call site of a block can be considered a call site of the
  1384. // captured parameter we track.
  1385. processCallFor(*Index, CalledOnceCallSite);
  1386. } else {
  1387. // We still should consider this block as an escape for parameter,
  1388. // if we don't know about its call site or the number of time it
  1389. // can be invoked.
  1390. processEscapeFor(*Index);
  1391. }
  1392. }
  1393. }
  1394. }
  1395. }
  1396. void VisitBinaryOperator(const BinaryOperator *Op) {
  1397. if (Op->getOpcode() == clang::BO_Assign) {
  1398. // Let's check if one of the tracked parameters is assigned into
  1399. // something, and if it is we don't want to track extra variables, so we
  1400. // consider it as an escapee.
  1401. checkEscapee(Op->getRHS());
  1402. // Let's check whether this assignment is a suppression.
  1403. checkSuppression(Op);
  1404. }
  1405. }
  1406. void VisitDeclStmt(const DeclStmt *DS) {
  1407. // Variable initialization is not assignment and should be handled
  1408. // separately.
  1409. //
  1410. // Multiple declarations can be a part of declaration statement.
  1411. for (const auto *Declaration : DS->getDeclGroup()) {
  1412. if (const auto *Var = dyn_cast<VarDecl>(Declaration)) {
  1413. if (Var->getInit()) {
  1414. checkEscapee(Var->getInit());
  1415. }
  1416. if (Var->hasAttr<CleanupAttr>()) {
  1417. FunctionHasCleanupVars = true;
  1418. }
  1419. }
  1420. }
  1421. }
  1422. void VisitCStyleCastExpr(const CStyleCastExpr *Cast) {
  1423. // We consider '(void)parameter' as a manual no-op escape.
  1424. // It should be used to explicitly tell the analysis that this parameter
  1425. // is intentionally not called on this path.
  1426. if (Cast->getType().getCanonicalType()->isVoidType()) {
  1427. checkEscapee(Cast->getSubExpr());
  1428. }
  1429. }
  1430. void VisitObjCAtThrowStmt(const ObjCAtThrowStmt *) {
  1431. // It is OK not to call marked parameters on exceptional paths.
  1432. markNoReturn();
  1433. }
  1434. private:
  1435. unsigned size() const { return TrackedParams.size(); }
  1436. llvm::Optional<unsigned> getIndexOfCallee(const CallExpr *Call) const {
  1437. return getIndexOfExpression(Call->getCallee());
  1438. }
  1439. llvm::Optional<unsigned> getIndexOfExpression(const Expr *E) const {
  1440. if (const ParmVarDecl *Parameter = findReferencedParmVarDecl(E)) {
  1441. return getIndex(*Parameter);
  1442. }
  1443. return llvm::None;
  1444. }
  1445. llvm::Optional<unsigned> getIndex(const ParmVarDecl &Parameter) const {
  1446. // Expected number of parameters that we actually track is 1.
  1447. //
  1448. // Also, the maximum number of declared parameters could not be on a scale
  1449. // of hundreds of thousands.
  1450. //
  1451. // In this setting, linear search seems reasonable and even performs better
  1452. // than bisection.
  1453. ParamSizedVector<const ParmVarDecl *>::const_iterator It =
  1454. llvm::find(TrackedParams, &Parameter);
  1455. if (It != TrackedParams.end()) {
  1456. return It - TrackedParams.begin();
  1457. }
  1458. return llvm::None;
  1459. }
  1460. const ParmVarDecl *getParameter(unsigned Index) const {
  1461. assert(Index < TrackedParams.size());
  1462. return TrackedParams[Index];
  1463. }
  1464. const CFG &FunctionCFG;
  1465. AnalysisDeclContext &AC;
  1466. CalledOnceCheckHandler &Handler;
  1467. bool CheckConventionalParameters;
  1468. // As of now, we turn this behavior off. So, we still are going to report
  1469. // missing calls on paths that look like it was intentional.
  1470. // Technically such reports are true positives, but they can make some users
  1471. // grumpy because of the sheer number of warnings.
  1472. // It can be turned back on if we decide that we want to have the other way
  1473. // around.
  1474. bool SuppressOnConventionalErrorPaths = false;
  1475. // The user can annotate variable declarations with cleanup functions, which
  1476. // essentially imposes a custom destructor logic on that variable.
  1477. // It is possible to use it, however, to call tracked parameters on all exits
  1478. // from the function. For this reason, we track the fact that the function
  1479. // actually has these.
  1480. bool FunctionHasCleanupVars = false;
  1481. State CurrentState;
  1482. ParamSizedVector<const ParmVarDecl *> TrackedParams;
  1483. CFGSizedVector<State> States;
  1484. };
  1485. } // end anonymous namespace
  1486. namespace clang {
  1487. void checkCalledOnceParameters(AnalysisDeclContext &AC,
  1488. CalledOnceCheckHandler &Handler,
  1489. bool CheckConventionalParameters) {
  1490. CalledOnceChecker::check(AC, Handler, CheckConventionalParameters);
  1491. }
  1492. } // end namespace clang